--- /dev/null
+/*
+ * libmm-player
+ *
+ * Copyright (c) 2000 - 2011 Samsung Electronics Co., Ltd. All rights reserved.
+ *
+ * Contact: JongHyuk Choi <jhchoi.choi@samsung.com>, YeJin Cho <cho.yejin@samsung.com>,
+ * Seungbae Shin <seungbae.shin@samsung.com>, YoungHwan An <younghwan_.an@samsung.com>
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+/*===========================================================================================
+| |
+| INCLUDE FILES |
+| |
+========================================================================================== */
+#include <dlog.h>
+#include <mm_error.h>
+#include <mm_attrs_private.h>
+
+#include "mm_player_gst.h"
+#include "mm_player_priv.h"
+#include "mm_player_attrs.h"
+#include "mm_player_utils.h"
+
+/*===========================================================================================
+| |
+| LOCAL DEFINITIONS AND DECLARATIONS FOR MODULE |
+| |
+========================================================================================== */
+
+/*---------------------------------------------------------------------------
+| GLOBAL CONSTANT DEFINITIONS: |
+---------------------------------------------------------------------------*/
+
+/*---------------------------------------------------------------------------
+| IMPORTED VARIABLE DECLARATIONS: |
+---------------------------------------------------------------------------*/
+
+/*---------------------------------------------------------------------------
+| IMPORTED FUNCTION DECLARATIONS: |
+---------------------------------------------------------------------------*/
+
+/*---------------------------------------------------------------------------
+| LOCAL #defines: |
+---------------------------------------------------------------------------*/
+
+/*---------------------------------------------------------------------------
+| LOCAL CONSTANT DEFINITIONS: |
+---------------------------------------------------------------------------*/
+
+/*---------------------------------------------------------------------------
+| LOCAL DATA TYPE DEFINITIONS: |
+---------------------------------------------------------------------------*/
+
+/*---------------------------------------------------------------------------
+| GLOBAL VARIABLE DEFINITIONS: |
+---------------------------------------------------------------------------*/
+
+/*---------------------------------------------------------------------------
+| LOCAL VARIABLE DEFINITIONS: |
+---------------------------------------------------------------------------*/
+
+/*---------------------------------------------------------------------------
+| LOCAL FUNCTION PROTOTYPES: |
+---------------------------------------------------------------------------*/
+
+/*===========================================================================================
+| |
+| FUNCTION DEFINITIONS |
+| |
+========================================================================================== */
+
+/* NOTE : decide gstreamer state whether there is some playable track or not. */
+static gint
+__mmplayer_gst_transform_gsterror(mm_player_t* player, GstMessage * message, GError* error)
+{
+ gchar *src_element_name = NULL;
+ GstElement *src_element = NULL;
+ GstElementFactory *factory = NULL;
+ const gchar* klass = NULL;
+
+ MMPLAYER_FENTER();
+
+ MMPLAYER_RETURN_VAL_IF_FAIL(message, MM_ERROR_INVALID_ARGUMENT);
+ MMPLAYER_RETURN_VAL_IF_FAIL(message->src, MM_ERROR_INVALID_ARGUMENT);
+ MMPLAYER_RETURN_VAL_IF_FAIL(error, MM_ERROR_INVALID_ARGUMENT);
+ MMPLAYER_RETURN_VAL_IF_FAIL(player &&
+ player->pipeline &&
+ player->pipeline->mainbin, MM_ERROR_PLAYER_NOT_INITIALIZED);
+
+ src_element = GST_ELEMENT_CAST(message->src);
+ if (!src_element)
+ goto INTERNAL_ERROR;
+
+ src_element_name = GST_ELEMENT_NAME(src_element);
+ if (!src_element_name)
+ goto INTERNAL_ERROR;
+
+ factory = gst_element_get_factory(src_element);
+ if (!factory)
+ goto INTERNAL_ERROR;
+
+ klass = gst_element_factory_get_metadata(factory, GST_ELEMENT_METADATA_KLASS);
+ if (!klass)
+ goto INTERNAL_ERROR;
+
+ LOGD("error code=%d, msg=%s, src element=%s, class=%s\n",
+ error->code, error->message, src_element_name, klass);
+
+ /* check whether the error is posted from not-activated track or not */
+ if (player->pipeline->mainbin[MMPLAYER_M_A_INPUT_SELECTOR].gst) {
+ int msg_src_pos = 0;
+ gint active_pad_index = player->selector[MM_PLAYER_TRACK_TYPE_AUDIO].active_pad_index;
+ LOGD("current active pad index -%d", active_pad_index);
+
+ if (src_element_name) {
+ int idx = 0;
+
+ if (player->audio_decoders) {
+ GList *adec = player->audio_decoders;
+ for (; adec ; adec = g_list_next(adec)) {
+ gchar *name = adec->data;
+
+ LOGD("found audio decoder name = %s", name);
+ if (g_strrstr(name, src_element_name)) {
+ msg_src_pos = idx;
+ break;
+ }
+ idx++;
+ }
+ }
+ LOGD("active pad = %d, error src index = %d", active_pad_index, msg_src_pos);
+ }
+
+ if (active_pad_index != msg_src_pos) {
+ LOGD("skip error because error is posted from no activated track");
+ return MM_ERROR_NONE;
+ }
+ }
+
+ switch (error->code) {
+ case GST_STREAM_ERROR_DECODE:
+ {
+ /* Demuxer can't parse one track because it's corrupted.
+ * So, the decoder for it is not linked.
+ * But, it has one playable track.
+ */
+ if (g_strrstr(klass, "Demux")) {
+ if (player->can_support_codec == FOUND_PLUGIN_VIDEO) {
+ return MM_ERROR_PLAYER_AUDIO_CODEC_NOT_FOUND;
+ } else if (player->can_support_codec == FOUND_PLUGIN_AUDIO) {
+ return MM_ERROR_PLAYER_VIDEO_CODEC_NOT_FOUND;
+ } else {
+ if (player->pipeline->audiobin) // PCM
+ return MM_ERROR_PLAYER_VIDEO_CODEC_NOT_FOUND;
+ else
+ goto CODEC_NOT_FOUND;
+ }
+ }
+ return MM_ERROR_PLAYER_INVALID_STREAM;
+ }
+ break;
+
+ case GST_STREAM_ERROR_CODEC_NOT_FOUND:
+ case GST_STREAM_ERROR_TYPE_NOT_FOUND:
+ case GST_STREAM_ERROR_WRONG_TYPE:
+ {
+ if (src_element == player->pipeline->mainbin[MMPLAYER_M_SUBPARSE].gst) {
+ LOGE("Not supported subtitle.");
+ return MM_ERROR_PLAYER_NOT_SUPPORTED_SUBTITLE;
+ }
+ return MM_ERROR_PLAYER_NOT_SUPPORTED_FORMAT;
+ }
+
+ case GST_STREAM_ERROR_FAILED:
+ {
+ /* Decoder Custom Message */
+ if (strstr(error->message, "ongoing")) {
+ if (strncasecmp(klass, "audio", 5)) {
+ if ((player->can_support_codec & FOUND_PLUGIN_VIDEO)) {
+ LOGD("Video can keep playing.\n");
+ return MM_ERROR_PLAYER_AUDIO_CODEC_NOT_FOUND;
+ } else
+ goto CODEC_NOT_FOUND;
+
+ } else if (strncasecmp(klass, "video", 5)) {
+ if ((player->can_support_codec & FOUND_PLUGIN_AUDIO)) {
+ LOGD("Audio can keep playing.\n");
+ return MM_ERROR_PLAYER_VIDEO_CODEC_NOT_FOUND;
+ } else
+ goto CODEC_NOT_FOUND;
+ }
+ }
+ return MM_ERROR_PLAYER_NOT_SUPPORTED_FORMAT;
+ }
+ break;
+
+ case GST_STREAM_ERROR_DECRYPT:
+ case GST_STREAM_ERROR_DECRYPT_NOKEY:
+ {
+ LOGE("decryption error, [%s] failed, reason : [%s]\n", src_element_name, error->message);
+
+ if (strstr(error->message, "rights expired"))
+ return MM_ERROR_PLAYER_DRM_EXPIRED;
+ else if (strstr(error->message, "no rights"))
+ return MM_ERROR_PLAYER_DRM_NO_LICENSE;
+ else if (strstr(error->message, "has future rights"))
+ return MM_ERROR_PLAYER_DRM_FUTURE_USE;
+ else if (strstr(error->message, "opl violation"))
+ return MM_ERROR_PLAYER_DRM_OUTPUT_PROTECTION;
+ return MM_ERROR_PLAYER_DRM_NOT_AUTHORIZED;
+ }
+ break;
+
+ default:
+ break;
+ }
+
+ MMPLAYER_FLEAVE();
+
+ return MM_ERROR_PLAYER_INVALID_STREAM;
+
+INTERNAL_ERROR:
+ return MM_ERROR_PLAYER_INTERNAL;
+
+CODEC_NOT_FOUND:
+ LOGD("not found any available codec. Player should be destroyed.\n");
+ return MM_ERROR_PLAYER_CODEC_NOT_FOUND;
+}
+
+gint
+__mmplayer_gst_handle_core_error(mm_player_t* player, int code)
+{
+ gint trans_err = MM_ERROR_NONE;
+
+ MMPLAYER_FENTER();
+
+ MMPLAYER_RETURN_VAL_IF_FAIL(player, MM_ERROR_PLAYER_NOT_INITIALIZED);
+
+ switch (code) {
+ case GST_CORE_ERROR_MISSING_PLUGIN:
+ return MM_ERROR_PLAYER_NOT_SUPPORTED_FORMAT;
+ case GST_CORE_ERROR_STATE_CHANGE:
+ case GST_CORE_ERROR_SEEK:
+ case GST_CORE_ERROR_NOT_IMPLEMENTED:
+ case GST_CORE_ERROR_FAILED:
+ case GST_CORE_ERROR_TOO_LAZY:
+ case GST_CORE_ERROR_PAD:
+ case GST_CORE_ERROR_THREAD:
+ case GST_CORE_ERROR_NEGOTIATION:
+ case GST_CORE_ERROR_EVENT:
+ case GST_CORE_ERROR_CAPS:
+ case GST_CORE_ERROR_TAG:
+ case GST_CORE_ERROR_CLOCK:
+ case GST_CORE_ERROR_DISABLED:
+ default:
+ trans_err = MM_ERROR_PLAYER_INVALID_STREAM;
+ break;
+ }
+
+ MMPLAYER_FLEAVE();
+
+ return trans_err;
+}
+
+gint
+__mmplayer_gst_handle_library_error(mm_player_t* player, int code)
+{
+ gint trans_err = MM_ERROR_NONE;
+
+ MMPLAYER_FENTER();
+
+ MMPLAYER_RETURN_VAL_IF_FAIL(player, MM_ERROR_PLAYER_NOT_INITIALIZED);
+
+ switch (code) {
+ case GST_LIBRARY_ERROR_FAILED:
+ case GST_LIBRARY_ERROR_TOO_LAZY:
+ case GST_LIBRARY_ERROR_INIT:
+ case GST_LIBRARY_ERROR_SHUTDOWN:
+ case GST_LIBRARY_ERROR_SETTINGS:
+ case GST_LIBRARY_ERROR_ENCODE:
+ default:
+ trans_err = MM_ERROR_PLAYER_INVALID_STREAM;
+ break;
+ }
+
+ MMPLAYER_FLEAVE();
+
+ return trans_err;
+}
+
+gint
+__mmplayer_gst_handle_resource_error(mm_player_t* player, int code, GstMessage * message)
+{
+ gint trans_err = MM_ERROR_NONE;
+
+ MMPLAYER_FENTER();
+
+ MMPLAYER_RETURN_VAL_IF_FAIL(player, MM_ERROR_PLAYER_NOT_INITIALIZED);
+
+ switch (code) {
+ case GST_RESOURCE_ERROR_NO_SPACE_LEFT:
+ trans_err = MM_ERROR_PLAYER_NO_FREE_SPACE;
+ break;
+ case GST_RESOURCE_ERROR_NOT_FOUND:
+ case GST_RESOURCE_ERROR_OPEN_READ:
+ if (MMPLAYER_IS_HTTP_STREAMING(player) || MMPLAYER_IS_HTTP_LIVE_STREAMING(player)
+ || MMPLAYER_IS_RTSP_STREAMING(player)) {
+ trans_err = MM_ERROR_PLAYER_STREAMING_CONNECTION_FAIL;
+ break;
+ }
+ case GST_RESOURCE_ERROR_READ:
+ if (MMPLAYER_IS_HTTP_STREAMING(player) || MMPLAYER_IS_HTTP_LIVE_STREAMING(player)
+ || MMPLAYER_IS_RTSP_STREAMING(player)) {
+ trans_err = MM_ERROR_PLAYER_STREAMING_FAIL;
+ break;
+ } else if (message != NULL && message->src != NULL) {
+ storage_state_e storage_state = STORAGE_STATE_UNMOUNTABLE;
+ MMPlayerPathType path_type = MMPLAYER_PATH_MAX;
+
+ if (message->src == (GstObject *)player->pipeline->mainbin[MMPLAYER_M_SRC].gst)
+ path_type = MMPLAYER_PATH_VOD;
+ else if (message->src == (GstObject *)player->pipeline->mainbin[MMPLAYER_M_SUBSRC].gst)
+ path_type = MMPLAYER_PATH_TEXT;
+
+ if (path_type != MMPLAYER_PATH_MAX && player->storage_info[path_type].type == STORAGE_TYPE_EXTERNAL) {
+ /* check storage state */
+ storage_get_state(player->storage_info[path_type].id, &storage_state);
+ player->storage_info[path_type].state = storage_state;
+ LOGW("path %d, storage state %d:%d", path_type, player->storage_info[path_type].id, storage_state);
+ }
+ } /* fall through */
+ case GST_RESOURCE_ERROR_WRITE:
+ case GST_RESOURCE_ERROR_FAILED:
+ case GST_RESOURCE_ERROR_SEEK:
+ case GST_RESOURCE_ERROR_TOO_LAZY:
+ case GST_RESOURCE_ERROR_BUSY:
+ case GST_RESOURCE_ERROR_OPEN_WRITE:
+ case GST_RESOURCE_ERROR_OPEN_READ_WRITE:
+ case GST_RESOURCE_ERROR_CLOSE:
+ case GST_RESOURCE_ERROR_SYNC:
+ case GST_RESOURCE_ERROR_SETTINGS:
+ default:
+ trans_err = MM_ERROR_PLAYER_INTERNAL;
+ break;
+ }
+
+ MMPLAYER_FLEAVE();
+
+ return trans_err;
+}
+
+gint
+__mmplayer_gst_handle_stream_error(mm_player_t* player, GError* error, GstMessage * message)
+{
+ gint trans_err = MM_ERROR_NONE;
+
+ MMPLAYER_FENTER();
+
+ MMPLAYER_RETURN_VAL_IF_FAIL(player, MM_ERROR_PLAYER_NOT_INITIALIZED);
+ MMPLAYER_RETURN_VAL_IF_FAIL(error, MM_ERROR_INVALID_ARGUMENT);
+ MMPLAYER_RETURN_VAL_IF_FAIL(message, MM_ERROR_INVALID_ARGUMENT);
+
+ switch (error->code) {
+ case GST_STREAM_ERROR_FAILED:
+ case GST_STREAM_ERROR_TYPE_NOT_FOUND:
+ case GST_STREAM_ERROR_DECODE:
+ case GST_STREAM_ERROR_WRONG_TYPE:
+ case GST_STREAM_ERROR_DECRYPT:
+ case GST_STREAM_ERROR_DECRYPT_NOKEY:
+ case GST_STREAM_ERROR_CODEC_NOT_FOUND:
+ trans_err = __mmplayer_gst_transform_gsterror(player, message, error);
+ break;
+
+ case GST_STREAM_ERROR_NOT_IMPLEMENTED:
+ case GST_STREAM_ERROR_TOO_LAZY:
+ case GST_STREAM_ERROR_ENCODE:
+ case GST_STREAM_ERROR_DEMUX:
+ case GST_STREAM_ERROR_MUX:
+ case GST_STREAM_ERROR_FORMAT:
+ default:
+ trans_err = MM_ERROR_PLAYER_INVALID_STREAM;
+ break;
+ }
+
+ MMPLAYER_FLEAVE();
+
+ return trans_err;
+}
+
+gboolean
+__mmplayer_handle_gst_error(mm_player_t* player, GstMessage * message, GError* error)
+{
+ MMMessageParamType msg_param;
+ gchar *msg_src_element;
+
+ MMPLAYER_FENTER();
+
+ MMPLAYER_RETURN_VAL_IF_FAIL(player, FALSE);
+ MMPLAYER_RETURN_VAL_IF_FAIL(error, FALSE);
+
+ /* NOTE : do somthing necessary inside of __gst_handle_XXX_error. not here */
+
+ memset(&msg_param, 0, sizeof(MMMessageParamType));
+
+ if (error->domain == GST_CORE_ERROR) {
+ msg_param.code = __mmplayer_gst_handle_core_error(player, error->code);
+ } else if (error->domain == GST_LIBRARY_ERROR) {
+ msg_param.code = __mmplayer_gst_handle_library_error(player, error->code);
+ } else if (error->domain == GST_RESOURCE_ERROR) {
+ msg_param.code = __mmplayer_gst_handle_resource_error(player, error->code, message);
+ } else if (error->domain == GST_STREAM_ERROR) {
+ msg_param.code = __mmplayer_gst_handle_stream_error(player, error, message);
+ } else {
+ LOGW("This error domain is not defined.\n");
+
+ /* we treat system error as an internal error */
+ msg_param.code = MM_ERROR_PLAYER_INVALID_STREAM;
+ }
+
+ if (message->src) {
+ msg_src_element = GST_ELEMENT_NAME(GST_ELEMENT_CAST(message->src));
+
+ msg_param.data = (void *) error->message;
+
+ LOGE("-Msg src : [%s] Domain : [%s] Error : [%s] Code : [%d] is tranlated to error code : [0x%x]\n",
+ msg_src_element, g_quark_to_string(error->domain), error->message, error->code, msg_param.code);
+ }
+
+ /* no error */
+ if (msg_param.code == MM_ERROR_NONE)
+ return TRUE;
+
+ /* skip error to avoid duplicated posting */
+ if (((player->storage_info[MMPLAYER_PATH_VOD].type == STORAGE_TYPE_EXTERNAL) &&
+ (player->storage_info[MMPLAYER_PATH_VOD].state <= STORAGE_STATE_REMOVED)) ||
+ ((player->storage_info[MMPLAYER_PATH_TEXT].type == STORAGE_TYPE_EXTERNAL) &&
+ (player->storage_info[MMPLAYER_PATH_TEXT].state <= STORAGE_STATE_REMOVED))) {
+
+ /* The error will be handled by mused.
+ * @ref _mmplayer_manage_external_storage_state() */
+
+ LOGW("storage is removed, skip error post");
+ return TRUE;
+ }
+
+ /* post error to application */
+ if (!player->msg_posted) {
+ MMPLAYER_POST_MSG(player, MM_MESSAGE_ERROR, &msg_param);
+ /* don't post more if one was sent already */
+ player->msg_posted = TRUE;
+ } else
+ LOGD("skip error post because it's sent already.\n");
+
+ MMPLAYER_FLEAVE();
+
+ return TRUE;
+}
+
+static gboolean
+__mmplayer_handle_streaming_error(mm_player_t* player, GstMessage * message)
+{
+ LOGD("\n");
+ MMMessageParamType msg_param;
+ gchar *msg_src_element = NULL;
+ GstStructure *s = NULL;
+ guint error_id = 0;
+ gchar *error_string = NULL;
+
+ MMPLAYER_FENTER();
+
+ MMPLAYER_RETURN_VAL_IF_FAIL(player, FALSE);
+ MMPLAYER_RETURN_VAL_IF_FAIL(message, FALSE);
+
+ s = gst_structure_copy(gst_message_get_structure(message));
+
+
+ if (!gst_structure_get_uint(s, "error_id", &error_id))
+ error_id = MMPLAYER_STREAMING_ERROR_NONE;
+
+ switch (error_id) {
+ case MMPLAYER_STREAMING_ERROR_UNSUPPORTED_AUDIO:
+ msg_param.code = MM_ERROR_PLAYER_STREAMING_UNSUPPORTED_AUDIO;
+ break;
+ case MMPLAYER_STREAMING_ERROR_UNSUPPORTED_VIDEO:
+ msg_param.code = MM_ERROR_PLAYER_STREAMING_UNSUPPORTED_VIDEO;
+ break;
+ case MMPLAYER_STREAMING_ERROR_CONNECTION_FAIL:
+ msg_param.code = MM_ERROR_PLAYER_STREAMING_CONNECTION_FAIL;
+ break;
+ case MMPLAYER_STREAMING_ERROR_DNS_FAIL:
+ msg_param.code = MM_ERROR_PLAYER_STREAMING_DNS_FAIL;
+ break;
+ case MMPLAYER_STREAMING_ERROR_SERVER_DISCONNECTED:
+ msg_param.code = MM_ERROR_PLAYER_STREAMING_SERVER_DISCONNECTED;
+ break;
+ case MMPLAYER_STREAMING_ERROR_BAD_SERVER:
+ msg_param.code = MM_ERROR_PLAYER_STREAMING_BAD_SERVER;
+ break;
+ case MMPLAYER_STREAMING_ERROR_INVALID_PROTOCOL:
+ msg_param.code = MM_ERROR_PLAYER_STREAMING_INVALID_PROTOCOL;
+ break;
+ case MMPLAYER_STREAMING_ERROR_INVALID_URL:
+ msg_param.code = MM_ERROR_PLAYER_STREAMING_INVALID_URL;
+ break;
+ case MMPLAYER_STREAMING_ERROR_UNEXPECTED_MSG:
+ msg_param.code = MM_ERROR_PLAYER_STREAMING_UNEXPECTED_MSG;
+ break;
+ case MMPLAYER_STREAMING_ERROR_OUT_OF_MEMORIES:
+ msg_param.code = MM_ERROR_PLAYER_STREAMING_OUT_OF_MEMORIES;
+ break;
+ case MMPLAYER_STREAMING_ERROR_RTSP_TIMEOUT:
+ msg_param.code = MM_ERROR_PLAYER_STREAMING_RTSP_TIMEOUT;
+ break;
+ case MMPLAYER_STREAMING_ERROR_BAD_REQUEST:
+ msg_param.code = MM_ERROR_PLAYER_STREAMING_BAD_REQUEST;
+ break;
+ case MMPLAYER_STREAMING_ERROR_NOT_AUTHORIZED:
+ msg_param.code = MM_ERROR_PLAYER_STREAMING_NOT_AUTHORIZED;
+ break;
+ case MMPLAYER_STREAMING_ERROR_PAYMENT_REQUIRED:
+ msg_param.code = MM_ERROR_PLAYER_STREAMING_PAYMENT_REQUIRED;
+ break;
+ case MMPLAYER_STREAMING_ERROR_FORBIDDEN:
+ msg_param.code = MM_ERROR_PLAYER_STREAMING_FORBIDDEN;
+ break;
+ case MMPLAYER_STREAMING_ERROR_CONTENT_NOT_FOUND:
+ msg_param.code = MM_ERROR_PLAYER_STREAMING_CONTENT_NOT_FOUND;
+ break;
+ case MMPLAYER_STREAMING_ERROR_METHOD_NOT_ALLOWED:
+ msg_param.code = MM_ERROR_PLAYER_STREAMING_METHOD_NOT_ALLOWED;
+ break;
+ case MMPLAYER_STREAMING_ERROR_NOT_ACCEPTABLE:
+ msg_param.code = MM_ERROR_PLAYER_STREAMING_NOT_ACCEPTABLE;
+ break;
+ case MMPLAYER_STREAMING_ERROR_PROXY_AUTHENTICATION_REQUIRED:
+ msg_param.code = MM_ERROR_PLAYER_STREAMING_PROXY_AUTHENTICATION_REQUIRED;
+ break;
+ case MMPLAYER_STREAMING_ERROR_SERVER_TIMEOUT:
+ msg_param.code = MM_ERROR_PLAYER_STREAMING_SERVER_TIMEOUT;
+ break;
+ case MMPLAYER_STREAMING_ERROR_GONE:
+ msg_param.code = MM_ERROR_PLAYER_STREAMING_GONE;
+ break;
+ case MMPLAYER_STREAMING_ERROR_LENGTH_REQUIRED:
+ msg_param.code = MM_ERROR_PLAYER_STREAMING_LENGTH_REQUIRED;
+ break;
+ case MMPLAYER_STREAMING_ERROR_PRECONDITION_FAILED:
+ msg_param.code = MM_ERROR_PLAYER_STREAMING_PRECONDITION_FAILED;
+ break;
+ case MMPLAYER_STREAMING_ERROR_REQUEST_ENTITY_TOO_LARGE:
+ msg_param.code = MM_ERROR_PLAYER_STREAMING_REQUEST_ENTITY_TOO_LARGE;
+ break;
+ case MMPLAYER_STREAMING_ERROR_REQUEST_URI_TOO_LARGE:
+ msg_param.code = MM_ERROR_PLAYER_STREAMING_REQUEST_URI_TOO_LARGE;
+ break;
+ case MMPLAYER_STREAMING_ERROR_UNSUPPORTED_MEDIA_TYPE:
+ msg_param.code = MM_ERROR_PLAYER_STREAMING_UNSUPPORTED_MEDIA_TYPE;
+ break;
+ case MMPLAYER_STREAMING_ERROR_PARAMETER_NOT_UNDERSTOOD:
+ msg_param.code = MM_ERROR_PLAYER_STREAMING_PARAMETER_NOT_UNDERSTOOD;
+ break;
+ case MMPLAYER_STREAMING_ERROR_CONFERENCE_NOT_FOUND:
+ msg_param.code = MM_ERROR_PLAYER_STREAMING_CONFERENCE_NOT_FOUND;
+ break;
+ case MMPLAYER_STREAMING_ERROR_NOT_ENOUGH_BANDWIDTH:
+ msg_param.code = MM_ERROR_PLAYER_STREAMING_NOT_ENOUGH_BANDWIDTH;
+ break;
+ case MMPLAYER_STREAMING_ERROR_NO_SESSION_ID:
+ msg_param.code = MM_ERROR_PLAYER_STREAMING_NO_SESSION_ID;
+ break;
+ case MMPLAYER_STREAMING_ERROR_METHOD_NOT_VALID_IN_THIS_STATE:
+ msg_param.code = MM_ERROR_PLAYER_STREAMING_METHOD_NOT_VALID_IN_THIS_STATE;
+ break;
+ case MMPLAYER_STREAMING_ERROR_HEADER_FIELD_NOT_VALID_FOR_SOURCE:
+ msg_param.code = MM_ERROR_PLAYER_STREAMING_HEADER_FIELD_NOT_VALID_FOR_SOURCE;
+ break;
+ case MMPLAYER_STREAMING_ERROR_INVALID_RANGE:
+ msg_param.code = MM_ERROR_PLAYER_STREAMING_INVALID_RANGE;
+ break;
+ case MMPLAYER_STREAMING_ERROR_PARAMETER_IS_READONLY:
+ msg_param.code = MM_ERROR_PLAYER_STREAMING_PARAMETER_IS_READONLY;
+ break;
+ case MMPLAYER_STREAMING_ERROR_AGGREGATE_OP_NOT_ALLOWED:
+ msg_param.code = MM_ERROR_PLAYER_STREAMING_AGGREGATE_OP_NOT_ALLOWED;
+ break;
+ case MMPLAYER_STREAMING_ERROR_ONLY_AGGREGATE_OP_ALLOWED:
+ msg_param.code = MM_ERROR_PLAYER_STREAMING_ONLY_AGGREGATE_OP_ALLOWED;
+ break;
+ case MMPLAYER_STREAMING_ERROR_BAD_TRANSPORT:
+ msg_param.code = MM_ERROR_PLAYER_STREAMING_BAD_TRANSPORT;
+ break;
+ case MMPLAYER_STREAMING_ERROR_DESTINATION_UNREACHABLE:
+ msg_param.code = MM_ERROR_PLAYER_STREAMING_DESTINATION_UNREACHABLE;
+ break;
+ case MMPLAYER_STREAMING_ERROR_INTERNAL_SERVER_ERROR:
+ msg_param.code = MM_ERROR_PLAYER_STREAMING_INTERNAL_SERVER_ERROR;
+ break;
+ case MMPLAYER_STREAMING_ERROR_NOT_IMPLEMENTED:
+ msg_param.code = MM_ERROR_PLAYER_STREAMING_NOT_IMPLEMENTED;
+ break;
+ case MMPLAYER_STREAMING_ERROR_BAD_GATEWAY:
+ msg_param.code = MM_ERROR_PLAYER_STREAMING_BAD_GATEWAY;
+ break;
+ case MMPLAYER_STREAMING_ERROR_SERVICE_UNAVAILABLE:
+ msg_param.code = MM_ERROR_PLAYER_STREAMING_SERVICE_UNAVAILABLE;
+ break;
+ case MMPLAYER_STREAMING_ERROR_GATEWAY_TIME_OUT:
+ msg_param.code = MM_ERROR_PLAYER_STREAMING_GATEWAY_TIME_OUT;
+ break;
+ case MMPLAYER_STREAMING_ERROR_RTSP_VERSION_NOT_SUPPORTED:
+ msg_param.code = MM_ERROR_PLAYER_STREAMING_RTSP_VERSION_NOT_SUPPORTED;
+ break;
+ case MMPLAYER_STREAMING_ERROR_OPTION_NOT_SUPPORTED:
+ msg_param.code = MM_ERROR_PLAYER_STREAMING_OPTION_NOT_SUPPORTED;
+ break;
+ default:
+ {
+ gst_structure_free(s);
+ return MM_ERROR_PLAYER_STREAMING_FAIL;
+ }
+ }
+
+ error_string = g_strdup(gst_structure_get_string(s, "error_string"));
+ if (error_string)
+ msg_param.data = (void *) error_string;
+
+ if (message->src) {
+ msg_src_element = GST_ELEMENT_NAME(GST_ELEMENT_CAST(message->src));
+
+ LOGE("-Msg src : [%s] Code : [%x] Error : [%s] \n",
+ msg_src_element, msg_param.code, (char*)msg_param.data);
+ }
+
+ /* post error to application */
+ if (!player->msg_posted) {
+ MMPLAYER_POST_MSG(player, MM_MESSAGE_ERROR, &msg_param);
+
+ /* don't post more if one was sent already */
+ player->msg_posted = TRUE;
+ } else
+ LOGD("skip error post because it's sent already.\n");
+
+ gst_structure_free(s);
+ MMPLAYER_FLEAVE();
+ g_free(error_string);
+
+ return TRUE;
+
+}
+
+static void
+__mmplayer_get_metadata_360_from_tags(GstTagList *tags, mm_player_spherical_metadata_t *metadata)
+{
+ gst_tag_list_get_int(tags, "is_spherical", &metadata->is_spherical);
+ gst_tag_list_get_int(tags, "is_stitched", &metadata->is_stitched);
+ gst_tag_list_get_string(tags, "stitching_software",
+ &metadata->stitching_software);
+ gst_tag_list_get_string(tags, "projection_type",
+ &metadata->projection_type_string);
+ gst_tag_list_get_string(tags, "stereo_mode", &metadata->stereo_mode_string);
+ gst_tag_list_get_int(tags, "source_count", &metadata->source_count);
+ gst_tag_list_get_int(tags, "init_view_heading",
+ &metadata->init_view_heading);
+ gst_tag_list_get_int(tags, "init_view_pitch", &metadata->init_view_pitch);
+ gst_tag_list_get_int(tags, "init_view_roll", &metadata->init_view_roll);
+ gst_tag_list_get_int(tags, "timestamp", &metadata->timestamp);
+ gst_tag_list_get_int(tags, "full_pano_width_pixels",
+ &metadata->full_pano_width_pixels);
+ gst_tag_list_get_int(tags, "full_pano_height_pixels",
+ &metadata->full_pano_height_pixels);
+ gst_tag_list_get_int(tags, "cropped_area_image_width",
+ &metadata->cropped_area_image_width);
+ gst_tag_list_get_int(tags, "cropped_area_image_height",
+ &metadata->cropped_area_image_height);
+ gst_tag_list_get_int(tags, "cropped_area_left",
+ &metadata->cropped_area_left);
+ gst_tag_list_get_int(tags, "cropped_area_top", &metadata->cropped_area_top);
+ gst_tag_list_get_int(tags, "ambisonic_type", &metadata->ambisonic_type);
+ gst_tag_list_get_int(tags, "ambisonic_format", &metadata->ambisonic_format);
+ gst_tag_list_get_int(tags, "ambisonic_order", &metadata->ambisonic_order);
+}
+
+static gboolean
+__mmplayer_gst_extract_tag_from_msg(mm_player_t* player, GstMessage* msg)
+{
+
+/* macro for better code readability */
+#define MMPLAYER_UPDATE_TAG_STRING(gsttag, attribute, playertag) \
+if (gst_tag_list_get_string(tag_list, gsttag, &string)) {\
+ if (string != NULL) { \
+ SECURE_LOGD("update tag string : %s\n", string); \
+ if (strlen(string) > MM_MAX_STRING_LENGTH) { \
+ char *new_string = malloc(MM_MAX_STRING_LENGTH); \
+ strncpy(new_string, string, MM_MAX_STRING_LENGTH-1); \
+ new_string[MM_MAX_STRING_LENGTH-1] = '\0'; \
+ mm_attrs_set_string_by_name(attribute, playertag, new_string); \
+ g_free(new_string); \
+ new_string = NULL; \
+ } else { \
+ mm_attrs_set_string_by_name(attribute, playertag, string); \
+ } \
+ g_free(string); \
+ string = NULL; \
+ } \
+}
+
+#define MMPLAYER_UPDATE_TAG_IMAGE(gsttag, attribute, playertag) \
+do { \
+ GstSample *sample = NULL;\
+ if (gst_tag_list_get_sample_index(tag_list, gsttag, index, &sample)) {\
+ GstMapInfo info = GST_MAP_INFO_INIT;\
+ buffer = gst_sample_get_buffer(sample);\
+ if (!gst_buffer_map(buffer, &info, GST_MAP_READ)) {\
+ LOGD("failed to get image data from tag");\
+ gst_sample_unref(sample);\
+ return FALSE;\
+ } \
+ SECURE_LOGD("update album cover data : %p, size : %d\n", info.data, info.size);\
+ MMPLAYER_FREEIF(player->album_art);\
+ player->album_art = (gchar *)g_malloc(info.size);\
+ if (player->album_art) {\
+ memcpy(player->album_art, info.data, info.size);\
+ mm_attrs_set_data_by_name(attribute, playertag, (void *)player->album_art, info.size);\
+ if (MMPLAYER_IS_HTTP_LIVE_STREAMING(player)) {\
+ msg_param.data = (void *)player->album_art;\
+ msg_param.size = info.size;\
+ MMPLAYER_POST_MSG(player, MM_MESSAGE_IMAGE_BUFFER, &msg_param);\
+ SECURE_LOGD("post message image buffer data : %p, size : %d\n", info.data, info.size);\
+ } \
+ } \
+ gst_buffer_unmap(buffer, &info);\
+ gst_sample_unref(sample);\
+ } \
+} while (0)
+
+#define MMPLAYER_UPDATE_TAG_UINT(gsttag, attribute, playertag) \
+do { \
+ if (gst_tag_list_get_uint(tag_list, gsttag, &v_uint)) { \
+ if (v_uint) { \
+ int i = 0; \
+ gchar *tag_list_str = NULL; \
+ MMPlayerTrackType track_type = MM_PLAYER_TRACK_TYPE_AUDIO; \
+ if (strstr(GST_OBJECT_NAME(msg->src), "audio")) \
+ track_type = MM_PLAYER_TRACK_TYPE_AUDIO; \
+ else if (strstr(GST_OBJECT_NAME(msg->src), "video")) \
+ track_type = MM_PLAYER_TRACK_TYPE_VIDEO; \
+ else \
+ track_type = MM_PLAYER_TRACK_TYPE_TEXT; \
+ if (!strncmp(gsttag, GST_TAG_BITRATE, strlen(GST_TAG_BITRATE))) { \
+ if (track_type == MM_PLAYER_TRACK_TYPE_AUDIO) \
+ mm_attrs_set_int_by_name(attribute, "content_audio_bitrate", v_uint); \
+ player->bitrate[track_type] = v_uint; \
+ player->total_bitrate = 0; \
+ for (i = 0; i < MM_PLAYER_STREAM_COUNT_MAX; i++) \
+ player->total_bitrate += player->bitrate[i]; \
+ mm_attrs_set_int_by_name(attribute, playertag, player->total_bitrate); \
+ SECURE_LOGD("update bitrate %d[bps] of stream #%d.\n", v_uint, (int)track_type); \
+ } else if (!strncmp(gsttag, GST_TAG_MAXIMUM_BITRATE, strlen(GST_TAG_MAXIMUM_BITRATE))) { \
+ player->maximum_bitrate[track_type] = v_uint; \
+ player->total_maximum_bitrate = 0; \
+ for (i = 0; i < MM_PLAYER_STREAM_COUNT_MAX; i++) \
+ player->total_maximum_bitrate += player->maximum_bitrate[i]; \
+ mm_attrs_set_int_by_name(attribute, playertag, player->total_maximum_bitrate);\
+ SECURE_LOGD("update maximum bitrate %d[bps] of stream #%d\n", v_uint, (int)track_type);\
+ } else { \
+ mm_attrs_set_int_by_name(attribute, playertag, v_uint); \
+ } \
+ v_uint = 0;\
+ g_free(tag_list_str); \
+ } \
+ } \
+} while (0)
+
+#define MMPLAYER_UPDATE_TAG_DATE(gsttag, attribute, playertag) \
+if (gst_tag_list_get_date(tag_list, gsttag, &date)) {\
+ if (date != NULL) {\
+ string = g_strdup_printf("%d", g_date_get_year(date));\
+ mm_attrs_set_string_by_name(attribute, playertag, string);\
+ SECURE_LOGD("metainfo year : %s\n", string);\
+ MMPLAYER_FREEIF(string);\
+ g_date_free(date);\
+ } \
+}
+
+#define MMPLAYER_UPDATE_TAG_DATE_TIME(gsttag, attribute, playertag) \
+if (gst_tag_list_get_date_time(tag_list, gsttag, &datetime)) {\
+ if (datetime != NULL) {\
+ string = g_strdup_printf("%d", gst_date_time_get_year(datetime));\
+ mm_attrs_set_string_by_name(attribute, playertag, string);\
+ SECURE_LOGD("metainfo year : %s\n", string);\
+ MMPLAYER_FREEIF(string);\
+ gst_date_time_unref(datetime);\
+ } \
+}
+
+#define MMPLAYER_UPDATE_TAG_UINT64(gsttag, attribute, playertag) \
+if (gst_tag_list_get_uint64(tag_list, gsttag, &v_uint64)) {\
+ if (v_uint64) {\
+ /* FIXIT : don't know how to store date */\
+ g_assert(1);\
+ v_uint64 = 0;\
+ } \
+}
+
+#define MMPLAYER_UPDATE_TAG_DOUBLE(gsttag, attribute, playertag) \
+if (gst_tag_list_get_double(tag_list, gsttag, &v_double)) {\
+ if (v_double) {\
+ /* FIXIT : don't know how to store date */\
+ g_assert(1);\
+ v_double = 0;\
+ } \
+}
+
+ /* function start */
+ GstTagList* tag_list = NULL;
+
+ MMHandleType attrs = 0;
+
+ char *string = NULL;
+ guint v_uint = 0;
+ GDate *date = NULL;
+ GstDateTime *datetime = NULL;
+ /* album cover */
+ GstBuffer *buffer = NULL;
+ gint index = 0;
+ MMMessageParamType msg_param = {0, };
+
+ /* currently not used. but those are needed for above macro */
+ //guint64 v_uint64 = 0;
+ //gdouble v_double = 0;
+
+ MMPLAYER_RETURN_VAL_IF_FAIL(player && msg, FALSE);
+
+ attrs = MMPLAYER_GET_ATTRS(player);
+
+ MMPLAYER_RETURN_VAL_IF_FAIL(attrs, FALSE);
+
+ /* get tag list from gst message */
+ gst_message_parse_tag(msg, &tag_list);
+
+ /* store tags to player attributes */
+ MMPLAYER_UPDATE_TAG_STRING(GST_TAG_TITLE, attrs, "tag_title");
+ /* MMPLAYER_UPDATE_TAG_STRING(GST_TAG_TITLE_SORTNAME, ?, ?); */
+ MMPLAYER_UPDATE_TAG_STRING(GST_TAG_ARTIST, attrs, "tag_artist");
+ /* MMPLAYER_UPDATE_TAG_STRING(GST_TAG_ARTIST_SORTNAME, ?, ?); */
+ MMPLAYER_UPDATE_TAG_STRING(GST_TAG_ALBUM, attrs, "tag_album");
+ /* MMPLAYER_UPDATE_TAG_STRING(GST_TAG_ALBUM_SORTNAME, ?, ?); */
+ MMPLAYER_UPDATE_TAG_STRING(GST_TAG_COMPOSER, attrs, "tag_author");
+ MMPLAYER_UPDATE_TAG_DATE(GST_TAG_DATE, attrs, "tag_date");
+ MMPLAYER_UPDATE_TAG_DATE_TIME(GST_TAG_DATE_TIME, attrs, "tag_date");
+ MMPLAYER_UPDATE_TAG_STRING(GST_TAG_GENRE, attrs, "tag_genre");
+ /* MMPLAYER_UPDATE_TAG_STRING(GST_TAG_COMMENT, ?, ?); */
+ /* MMPLAYER_UPDATE_TAG_STRING(GST_TAG_EXTENDED_COMMENT, ?, ?); */
+ MMPLAYER_UPDATE_TAG_UINT(GST_TAG_TRACK_NUMBER, attrs, "tag_track_num");
+ /* MMPLAYER_UPDATE_TAG_UINT(GST_TAG_TRACK_COUNT, ?, ?); */
+ /* MMPLAYER_UPDATE_TAG_UINT(GST_TAG_ALBUM_VOLUME_NUMBER, ?, ?); */
+ /* MMPLAYER_UPDATE_TAG_UINT(GST_TAG_ALBUM_VOLUME_COUNT, ?, ?); */
+ /* MMPLAYER_UPDATE_TAG_STRING(GST_TAG_LOCATION, ?, ?); */
+ MMPLAYER_UPDATE_TAG_STRING(GST_TAG_DESCRIPTION, attrs, "tag_description");
+ /* MMPLAYER_UPDATE_TAG_STRING(GST_TAG_VERSION, ?, ?); */
+ /* MMPLAYER_UPDATE_TAG_STRING(GST_TAG_ISRC, ?, ?); */
+ /* MMPLAYER_UPDATE_TAG_STRING(GST_TAG_ORGANIZATION, ?, ?); */
+ MMPLAYER_UPDATE_TAG_STRING(GST_TAG_COPYRIGHT, attrs, "tag_copyright");
+ /* MMPLAYER_UPDATE_TAG_STRING(GST_TAG_COPYRIGHT_URI, ?, ?); */
+ /* MMPLAYER_UPDATE_TAG_STRING(GST_TAG_CONTACT, ?, ?); */
+ /* MMPLAYER_UPDATE_TAG_STRING(GST_TAG_LICENSE, ?, ?); */
+ /* MMPLAYER_UPDATE_TAG_STRING(GST_TAG_LICENSE_URI, ?, ?); */
+ /* MMPLAYER_UPDATE_TAG_STRING(GST_TAG_PERFORMER, ?, ?); */
+ /* MMPLAYER_UPDATE_TAG_UINT64(GST_TAG_DURATION, ?, ?); */
+ /* MMPLAYER_UPDATE_TAG_STRING(GST_TAG_CODEC, ?, ?); */
+ MMPLAYER_UPDATE_TAG_STRING(GST_TAG_VIDEO_CODEC, attrs, "content_video_codec");
+ MMPLAYER_UPDATE_TAG_STRING(GST_TAG_AUDIO_CODEC, attrs, "content_audio_codec");
+ MMPLAYER_UPDATE_TAG_UINT(GST_TAG_BITRATE, attrs, "content_bitrate");
+ MMPLAYER_UPDATE_TAG_UINT(GST_TAG_MAXIMUM_BITRATE, attrs, "content_max_bitrate");
+ MMPLAYER_UPDATE_TAG_LOCK(player);
+ MMPLAYER_UPDATE_TAG_IMAGE(GST_TAG_IMAGE, attrs, "tag_album_cover");
+ MMPLAYER_UPDATE_TAG_UNLOCK(player);
+ /* MMPLAYER_UPDATE_TAG_UINT(GST_TAG_NOMINAL_BITRATE, ?, ?); */
+ /* MMPLAYER_UPDATE_TAG_UINT(GST_TAG_MINIMUM_BITRATE, ?, ?); */
+ /* MMPLAYER_UPDATE_TAG_UINT(GST_TAG_SERIAL, ?, ?); */
+ /* MMPLAYER_UPDATE_TAG_STRING(GST_TAG_ENCODER, ?, ?); */
+ /* MMPLAYER_UPDATE_TAG_UINT(GST_TAG_ENCODER_VERSION, ?, ?); */
+ /* MMPLAYER_UPDATE_TAG_DOUBLE(GST_TAG_TRACK_GAIN, ?, ?); */
+ /* MMPLAYER_UPDATE_TAG_DOUBLE(GST_TAG_TRACK_PEAK, ?, ?); */
+ /* MMPLAYER_UPDATE_TAG_DOUBLE(GST_TAG_ALBUM_GAIN, ?, ?); */
+ /* MMPLAYER_UPDATE_TAG_DOUBLE(GST_TAG_ALBUM_PEAK, ?, ?); */
+ /* MMPLAYER_UPDATE_TAG_DOUBLE(GST_TAG_REFERENCE_LEVEL, ?, ?); */
+ /* MMPLAYER_UPDATE_TAG_STRING(GST_TAG_LANGUAGE_CODE, ?, ?); */
+ /* MMPLAYER_UPDATE_TAG_DOUBLE(GST_TAG_BEATS_PER_MINUTE, ?, ?); */
+ MMPLAYER_UPDATE_TAG_STRING(GST_TAG_IMAGE_ORIENTATION, attrs, "content_video_orientation");
+
+ if (strstr(GST_OBJECT_NAME(msg->src), "demux")) {
+ if (player->video360_metadata.is_spherical == -1) {
+ __mmplayer_get_metadata_360_from_tags(tag_list, &player->video360_metadata);
+ mm_attrs_set_int_by_name(attrs, "content_video_is_spherical",
+ player->video360_metadata.is_spherical);
+ if (player->video360_metadata.is_spherical == 1) {
+ LOGD("This is spherical content for 360 playback.");
+ player->is_content_spherical = TRUE;
+ } else {
+ LOGD("This is not spherical content");
+ player->is_content_spherical = FALSE;
+ }
+
+ if (player->video360_metadata.projection_type_string) {
+ if (!strcmp(player->video360_metadata.projection_type_string, "equirectangular")) {
+ player->video360_metadata.projection_type = VIDEO360_PROJECTION_TYPE_EQUIRECTANGULAR;
+ } else {
+ LOGE("Projection %s: code not implemented.\n", player->video360_metadata.projection_type_string);
+ player->is_content_spherical = player->is_video360_enabled = FALSE;
+ }
+ }
+
+ if (player->video360_metadata.stereo_mode_string) {
+ if (!strcmp(player->video360_metadata.stereo_mode_string, "mono")) {
+ player->video360_metadata.stereo_mode = VIDEO360_MODE_MONOSCOPIC;
+ } else if (!strcmp(player->video360_metadata.stereo_mode_string, "left-right")) {
+ player->video360_metadata.stereo_mode = VIDEO360_MODE_STEREOSCOPIC_LEFT_RIGHT;
+ } else if (!strcmp(player->video360_metadata.stereo_mode_string, "top-bottom")) {
+ player->video360_metadata.stereo_mode = VIDEO360_MODE_STEREOSCOPIC_TOP_BOTTOM;
+ } else {
+ LOGE("Stereo mode %s: code not implemented.\n", player->video360_metadata.stereo_mode_string);
+ player->is_content_spherical = player->is_video360_enabled = FALSE;
+ }
+ }
+ }
+ }
+
+ if (mmf_attrs_commit(attrs))
+ LOGE("failed to commit.\n");
+
+ gst_tag_list_free(tag_list);
+
+ return TRUE;
+}
+
+/* if retval is FALSE, it will be dropped for perfomance. */
+static gboolean
+__mmplayer_gst_check_useful_message(mm_player_t *player, GstMessage * message)
+{
+ gboolean retval = FALSE;
+
+ if (!(player->pipeline && player->pipeline->mainbin)) {
+ LOGE("player pipeline handle is null");
+ return TRUE;
+ }
+
+ switch (GST_MESSAGE_TYPE(message)) {
+ case GST_MESSAGE_TAG:
+ case GST_MESSAGE_EOS:
+ case GST_MESSAGE_ERROR:
+ case GST_MESSAGE_WARNING:
+ case GST_MESSAGE_CLOCK_LOST:
+ case GST_MESSAGE_NEW_CLOCK:
+ case GST_MESSAGE_ELEMENT:
+ case GST_MESSAGE_DURATION_CHANGED:
+ case GST_MESSAGE_ASYNC_START:
+ retval = TRUE;
+ break;
+ case GST_MESSAGE_ASYNC_DONE:
+ case GST_MESSAGE_STATE_CHANGED:
+ /* we only handle messages from pipeline */
+ if ((message->src == (GstObject *)player->pipeline->mainbin[MMPLAYER_M_PIPE].gst) && (!player->gapless.reconfigure))
+ retval = TRUE;
+ else
+ retval = FALSE;
+ break;
+ case GST_MESSAGE_BUFFERING:
+ {
+ gint buffer_percent = 0;
+
+ retval = TRUE;
+ gst_message_parse_buffering(message, &buffer_percent);
+ if (buffer_percent != MAX_BUFFER_PERCENT) {
+ LOGD("[%s] buffering msg %d%%!!\n", GST_OBJECT_NAME(GST_MESSAGE_SRC(message)), buffer_percent);
+ break;
+ }
+
+ if (!MMPLAYER_CMD_TRYLOCK(player)) {
+ LOGW("can't get cmd lock, send msg to bus");
+ break;
+ }
+
+ if ((player->streamer) && (player->streamer->buffering_state & MM_PLAYER_BUFFERING_IN_PROGRESS)) {
+ LOGD("[%s] Buffering DONE is detected !!\n", GST_OBJECT_NAME(GST_MESSAGE_SRC(message)));
+ player->streamer->buffering_state |= MM_PLAYER_BUFFERING_COMPLETE;
+ }
+
+ MMPLAYER_CMD_UNLOCK(player);
+
+ break;
+ }
+ default:
+ retval = FALSE;
+ break;
+ }
+
+ return retval;
+}
+
+static void
+__mmplayer_update_buffer_setting(mm_player_t *player, GstMessage *buffering_msg)
+{
+ MMHandleType attrs = 0;
+ guint64 data_size = 0;
+ gchar* path = NULL;
+ gint64 pos_nsec = 0;
+ struct stat sb;
+
+ MMPLAYER_RETURN_IF_FAIL(player && player->pipeline && player->pipeline->mainbin);
+
+ __mmplayer_gst_get_position(player, MM_PLAYER_POS_FORMAT_TIME, &pos_nsec); /* to update player->last_position */
+
+ attrs = MMPLAYER_GET_ATTRS(player);
+ if (!attrs) {
+ LOGE("fail to get attributes.\n");
+ return;
+ }
+
+ if (!MMPLAYER_IS_STREAMING(player) && (player->can_support_codec & FOUND_PLUGIN_VIDEO)) {
+ mm_attrs_get_string_by_name(attrs, "profile_uri", &path);
+
+ if (stat(path, &sb) == 0)
+ data_size = (guint64)sb.st_size;
+ } else if (MMPLAYER_IS_HTTP_STREAMING(player))
+ data_size = player->http_content_size;
+
+ __mm_player_streaming_buffering(player->streamer, buffering_msg, data_size, player->last_position, player->duration);
+ __mm_player_streaming_sync_property(player->streamer, player->pipeline->mainbin[MMPLAYER_M_AUTOPLUG].gst);
+
+ return;
+}
+
+static int
+__mmplayer_handle_buffering_message(mm_player_t* player)
+{
+ int ret = MM_ERROR_NONE;
+ MMPlayerStateType prev_state = MM_PLAYER_STATE_NONE;
+ MMPlayerStateType current_state = MM_PLAYER_STATE_NONE;
+ MMPlayerStateType target_state = MM_PLAYER_STATE_NONE;
+ MMPlayerStateType pending_state = MM_PLAYER_STATE_NONE;
+
+ if (!player || !player->streamer || (MMPLAYER_IS_LIVE_STREAMING(player) && MMPLAYER_IS_RTSP_STREAMING(player))) {
+ LOGW("do nothing for buffering msg\n");
+ ret = MM_ERROR_PLAYER_INVALID_STATE;
+ goto exit;
+ }
+
+ prev_state = MMPLAYER_PREV_STATE(player);
+ current_state = MMPLAYER_CURRENT_STATE(player);
+ target_state = MMPLAYER_TARGET_STATE(player);
+ pending_state = MMPLAYER_PENDING_STATE(player);
+
+ LOGD("player state : prev %s, current %s, pending %s, target %s, buffering state 0x%X",
+ MMPLAYER_STATE_GET_NAME(prev_state),
+ MMPLAYER_STATE_GET_NAME(current_state),
+ MMPLAYER_STATE_GET_NAME(pending_state),
+ MMPLAYER_STATE_GET_NAME(target_state),
+ player->streamer->buffering_state);
+
+ if (!(player->streamer->buffering_state & MM_PLAYER_BUFFERING_IN_PROGRESS)) {
+ /* NOTE : if buffering has done, player has to go to target state. */
+ switch (target_state) {
+ case MM_PLAYER_STATE_PAUSED:
+ {
+ switch (pending_state) {
+ case MM_PLAYER_STATE_PLAYING:
+ __mmplayer_gst_pause(player, TRUE);
+ break;
+
+ case MM_PLAYER_STATE_PAUSED:
+ LOGD("player is already going to paused state, there is nothing to do.\n");
+ break;
+
+ case MM_PLAYER_STATE_NONE:
+ case MM_PLAYER_STATE_NULL:
+ case MM_PLAYER_STATE_READY:
+ default:
+ LOGW("invalid pending state [%s].\n", MMPLAYER_STATE_GET_NAME(pending_state));
+ break;
+ }
+ }
+ break;
+
+ case MM_PLAYER_STATE_PLAYING:
+ {
+ switch (pending_state) {
+ case MM_PLAYER_STATE_NONE:
+ {
+ if (current_state != MM_PLAYER_STATE_PLAYING)
+ __mmplayer_gst_resume(player, TRUE);
+ }
+ break;
+
+ case MM_PLAYER_STATE_PAUSED:
+ /* NOTE: It should be worked as asynchronously.
+ * Because, buffering can be completed during autoplugging when pipeline would try to go playing state directly.
+ */
+ if (current_state == MM_PLAYER_STATE_PLAYING) {
+ /* NOTE: If the current state is PLAYING, it means, async __mmplayer_gst_pause() is not completed yet.
+ * The current state should be changed to paused purposely to prevent state conflict.
+ */
+ MMPLAYER_SET_STATE(player, MM_PLAYER_STATE_PAUSED);
+ }
+ __mmplayer_gst_resume(player, TRUE);
+ break;
+
+ case MM_PLAYER_STATE_PLAYING:
+ LOGD("player is already going to playing state, there is nothing to do.\n");
+ break;
+
+ case MM_PLAYER_STATE_NULL:
+ case MM_PLAYER_STATE_READY:
+ default:
+ LOGW("invalid pending state [%s].\n", MMPLAYER_STATE_GET_NAME(pending_state));
+ break;
+ }
+ }
+ break;
+
+ case MM_PLAYER_STATE_NULL:
+ case MM_PLAYER_STATE_READY:
+ case MM_PLAYER_STATE_NONE:
+ default:
+ LOGW("invalid target state [%s].\n", MMPLAYER_STATE_GET_NAME(target_state));
+ break;
+ }
+ } else {
+ /* NOTE : during the buffering, pause the player for stopping pipeline clock.
+ * it's for stopping the pipeline clock to prevent dropping the data in sink element.
+ */
+ switch (pending_state) {
+ case MM_PLAYER_STATE_NONE:
+ {
+ if (current_state != MM_PLAYER_STATE_PAUSED) {
+ /* rtsp streaming pause makes rtsp server stop sending data. */
+ if (!MMPLAYER_IS_RTSP_STREAMING(player)) {
+ LOGD("set pause state during buffering\n");
+ __mmplayer_gst_pause(player, TRUE);
+ }
+ }
+ }
+ break;
+
+ case MM_PLAYER_STATE_PLAYING:
+ /* rtsp streaming pause makes rtsp server stop sending data. */
+ if (!MMPLAYER_IS_RTSP_STREAMING(player))
+ __mmplayer_gst_pause(player, TRUE);
+ break;
+
+ case MM_PLAYER_STATE_PAUSED:
+ break;
+
+ case MM_PLAYER_STATE_NULL:
+ case MM_PLAYER_STATE_READY:
+ default:
+ LOGW("invalid pending state [%s].\n", MMPLAYER_STATE_GET_NAME(pending_state));
+ break;
+ }
+ }
+
+exit:
+ return ret;
+}
+
+static VariantData *
+__mmplayer_adaptive_var_info(const VariantData *self, gpointer user_data)
+{
+ VariantData *var_info = NULL;
+ g_return_val_if_fail(self != NULL, NULL);
+
+ var_info = g_new0(VariantData, 1);
+ if (!var_info) return NULL;
+ var_info->bandwidth = self->bandwidth;
+ var_info->width = self->width;
+ var_info->height = self->height;
+ return var_info;
+}
+
+static gboolean
+__mmplayer_gst_handle_duration(mm_player_t* player, GstMessage* msg)
+{
+ gint64 bytes = 0;
+
+ MMPLAYER_FENTER();
+
+ MMPLAYER_RETURN_VAL_IF_FAIL(player, FALSE);
+ MMPLAYER_RETURN_VAL_IF_FAIL(msg, FALSE);
+
+ if ((MMPLAYER_IS_HTTP_STREAMING(player)) &&
+ (msg->src) && (msg->src == (GstObject *)player->pipeline->mainbin[MMPLAYER_M_SRC].gst)) {
+ LOGD("msg src : [%s]", GST_ELEMENT_NAME(GST_ELEMENT_CAST(msg->src)));
+
+ if (gst_element_query_duration(GST_ELEMENT_CAST(msg->src), GST_FORMAT_BYTES, &bytes)) {
+ LOGD("data total size of http content: %"G_GINT64_FORMAT, bytes);
+ player->http_content_size = (bytes > 0) ? (bytes) : (0);
+ }
+ } else {
+ /* handling audio clip which has vbr. means duration is keep changing */
+ __mmplayer_update_content_attrs(player, ATTR_DURATION);
+ }
+
+ MMPLAYER_FLEAVE();
+
+ return TRUE;
+}
+
+static gboolean
+__mmplayer_eos_timer_cb(gpointer u_data)
+{
+ mm_player_t* player = NULL;
+ MMHandleType attrs = 0;
+ int count = 0;
+
+ MMPLAYER_RETURN_VAL_IF_FAIL(u_data, FALSE);
+
+ player = (mm_player_t*) u_data;
+ attrs = MMPLAYER_GET_ATTRS(player);
+
+ mm_attrs_get_int_by_name(attrs, "profile_play_count", &count);
+
+ if (count == -1) {
+ gint ret_value = 0;
+ ret_value = __mmplayer_gst_set_position(player, MM_PLAYER_POS_FORMAT_TIME, 0, TRUE);
+ if (ret_value != MM_ERROR_NONE)
+ LOGE("seeking to 0 failed in repeat play");
+ } else {
+ /* posting eos */
+ MMPLAYER_POST_MSG(player, MM_MESSAGE_END_OF_STREAM, NULL);
+ }
+
+ /* we are returning FALSE as we need only one posting */
+ return FALSE;
+}
+
+static void
+__mmplayer_handle_eos_delay(mm_player_t* player, int delay_in_ms)
+{
+ MMPLAYER_RETURN_IF_FAIL(player);
+
+ /* post now if delay is zero */
+ if (delay_in_ms == 0 || player->set_mode.pcm_extraction) {
+ LOGD("eos delay is zero. posting EOS now\n");
+ MMPLAYER_POST_MSG(player, MM_MESSAGE_END_OF_STREAM, NULL);
+
+ if (player->set_mode.pcm_extraction)
+ __mmplayer_cancel_eos_timer(player);
+
+ return;
+ }
+
+ /* cancel if existing */
+ __mmplayer_cancel_eos_timer(player);
+
+ /* init new timeout */
+ /* NOTE : consider give high priority to this timer */
+ LOGD("posting EOS message after [%d] msec\n", delay_in_ms);
+
+ player->eos_timer = g_timeout_add(delay_in_ms,
+ __mmplayer_eos_timer_cb, player);
+
+ player->context.global_default = g_main_context_default();
+ LOGD("global default context = %p, eos timer id = %d", player->context.global_default, player->eos_timer);
+
+ /* check timer is valid. if not, send EOS now */
+ if (player->eos_timer == 0) {
+ LOGW("creating timer for delayed EOS has failed. sending EOS now\n");
+ MMPLAYER_POST_MSG(player, MM_MESSAGE_END_OF_STREAM, NULL);
+ }
+}
+
+static int __mmplayer_gst_pending_seek(mm_player_t* player)
+{
+ MMPlayerStateType current_state = MM_PLAYER_STATE_NONE;
+ int ret = MM_ERROR_NONE;
+
+ MMPLAYER_FENTER();
+
+ MMPLAYER_RETURN_VAL_IF_FAIL(player && player->pipeline, MM_ERROR_PLAYER_NOT_INITIALIZED);
+
+ if (!player->pending_seek.is_pending) {
+ LOGD("pending seek is not reserved. nothing to do.\n");
+ return ret;
+ }
+
+ /* check player state if player could pending seek or not. */
+ current_state = MMPLAYER_CURRENT_STATE(player);
+
+ if (current_state != MM_PLAYER_STATE_PAUSED && current_state != MM_PLAYER_STATE_PLAYING) {
+ LOGW("try to pending seek in %s state, try next time. \n",
+ MMPLAYER_STATE_GET_NAME(current_state));
+ return ret;
+ }
+
+ LOGD("trying to play from(%"G_GINT64_FORMAT") pending position\n", player->pending_seek.pos);
+
+ ret = __mmplayer_gst_set_position(player, player->pending_seek.format, player->pending_seek.pos, FALSE);
+
+ if (MM_ERROR_NONE != ret)
+ LOGE("failed to seek pending postion. just keep staying current position.\n");
+
+ player->pending_seek.is_pending = FALSE;
+
+ MMPLAYER_FLEAVE();
+
+ return ret;
+}
+
+static void
+__mmplayer_gst_handle_async(mm_player_t* player, gboolean async, enum MMPlayerSinkType type)
+{
+ MMPlayerGstElement *videobin = NULL, *audiobin = NULL, *textbin = NULL;
+
+ MMPLAYER_RETURN_IF_FAIL(player && player->pipeline);
+
+ audiobin = player->pipeline->audiobin; /* can be null */
+ videobin = player->pipeline->videobin; /* can be null */
+ textbin = player->pipeline->textbin; /* can be null */
+
+ LOGD("Async will be set to %d about 0x%X type sink", async, type);
+
+ if ((type & MMPLAYER_AUDIO_SINK) && audiobin && audiobin[MMPLAYER_A_SINK].gst)
+ g_object_set(audiobin[MMPLAYER_A_SINK].gst, "async", async, NULL);
+
+ if ((type & MMPLAYER_VIDEO_SINK) && videobin && videobin[MMPLAYER_V_SINK].gst)
+ g_object_set(videobin[MMPLAYER_V_SINK].gst, "async", async, NULL);
+
+ if ((type & MMPLAYER_TEXT_SINK) && textbin && textbin[MMPLAYER_T_FAKE_SINK].gst)
+ g_object_set(textbin[MMPLAYER_T_FAKE_SINK].gst, "async", async, NULL);
+
+ return;
+}
+
+static void
+__mmplayer_drop_subtitle(mm_player_t* player, gboolean is_drop)
+{
+ MMPlayerGstElement *textbin;
+ MMPLAYER_FENTER();
+
+ MMPLAYER_RETURN_IF_FAIL(player &&
+ player->pipeline &&
+ player->pipeline->textbin);
+
+ MMPLAYER_RETURN_IF_FAIL(player->pipeline->textbin[MMPLAYER_T_IDENTITY].gst);
+
+ textbin = player->pipeline->textbin;
+
+ if (is_drop) {
+ LOGD("Drop subtitle text after getting EOS\n");
+
+ __mmplayer_gst_handle_async(player, FALSE, MMPLAYER_TEXT_SINK);
+ g_object_set(textbin[MMPLAYER_T_IDENTITY].gst, "drop-probability", (gfloat)1.0, NULL);
+
+ player->is_subtitle_force_drop = TRUE;
+ } else {
+ if (player->is_subtitle_force_drop == TRUE) {
+ LOGD("Enable subtitle data path without drop\n");
+
+ g_object_set(textbin[MMPLAYER_T_IDENTITY].gst, "drop-probability", (gfloat)0.0, NULL);
+ __mmplayer_gst_handle_async(player, TRUE, MMPLAYER_TEXT_SINK);
+
+ LOGD("non-connected with external display");
+
+ player->is_subtitle_force_drop = FALSE;
+ }
+ }
+}
+
+
+#if 0
+#endif
+
+int
+__mmplayer_gst_set_state(mm_player_t* player, GstElement * element, GstState state, gboolean async, gint timeout)
+{
+ GstState element_state = GST_STATE_VOID_PENDING;
+ GstState element_pending_state = GST_STATE_VOID_PENDING;
+ GstStateChangeReturn ret = GST_STATE_CHANGE_FAILURE;
+
+ MMPLAYER_FENTER();
+
+ MMPLAYER_RETURN_VAL_IF_FAIL(player, MM_ERROR_PLAYER_NOT_INITIALIZED);
+ MMPLAYER_RETURN_VAL_IF_FAIL(element, MM_ERROR_INVALID_ARGUMENT);
+
+ LOGD("setting [%s] element state to : %s\n", GST_ELEMENT_NAME(element), gst_element_state_get_name(state));
+
+ /* set state */
+ ret = gst_element_set_state(element, state);
+
+ if (ret == GST_STATE_CHANGE_FAILURE) {
+ LOGE("failed to set [%s] state\n", GST_ELEMENT_NAME(element));
+
+ /* dump state of all element */
+ __mmplayer_dump_pipeline_state(player);
+
+ return MM_ERROR_PLAYER_INTERNAL;
+ }
+
+ /* return here so state transition to be done in async mode */
+ if (async) {
+ LOGD("async state transition. not waiting for state complete.\n");
+ return MM_ERROR_NONE;
+ }
+
+ /* wait for state transition */
+ ret = gst_element_get_state(element, &element_state, &element_pending_state, timeout * GST_SECOND);
+
+ if (ret == GST_STATE_CHANGE_FAILURE || (state != element_state)) {
+ LOGE("failed to change [%s] element state to [%s] within %d sec\n",
+ GST_ELEMENT_NAME(element),
+ gst_element_state_get_name(state), timeout);
+
+ LOGE(" [%s] state : %s pending : %s \n",
+ GST_ELEMENT_NAME(element),
+ gst_element_state_get_name(element_state),
+ gst_element_state_get_name(element_pending_state));
+
+ /* dump state of all element */
+ __mmplayer_dump_pipeline_state(player);
+
+ return MM_ERROR_PLAYER_INTERNAL;
+ }
+
+ LOGD("[%s] element state has changed\n", GST_ELEMENT_NAME(element));
+
+ MMPLAYER_FLEAVE();
+
+ return MM_ERROR_NONE;
+}
+
+void
+__mmplayer_gst_callback(GstMessage *msg, gpointer data)
+{
+ mm_player_t* player = (mm_player_t*)(data);
+
+ MMPLAYER_RETURN_IF_FAIL(player);
+ MMPLAYER_RETURN_IF_FAIL(msg && GST_IS_MESSAGE(msg));
+
+ switch (GST_MESSAGE_TYPE(msg)) {
+ case GST_MESSAGE_UNKNOWN:
+ LOGD("unknown message received\n");
+ break;
+
+ case GST_MESSAGE_EOS:
+ {
+ MMHandleType attrs = 0;
+ gint count = 0;
+
+ LOGD("GST_MESSAGE_EOS received\n");
+
+ /* NOTE : EOS event is comming multiple time. watch out it */
+ /* check state. we only process EOS when pipeline state goes to PLAYING */
+ if (!(player->cmd == MMPLAYER_COMMAND_START || player->cmd == MMPLAYER_COMMAND_RESUME)) {
+ LOGD("EOS received on non-playing state. ignoring it\n");
+ break;
+ }
+
+ if (player->pipeline) {
+ if (player->pipeline->textbin)
+ __mmplayer_drop_subtitle(player, TRUE);
+
+ if ((player->audio_stream_cb) && (player->set_mode.pcm_extraction) && (!player->audio_stream_render_cb_ex)) {
+ GstPad *pad = NULL;
+
+ pad = gst_element_get_static_pad(player->pipeline->audiobin[MMPLAYER_A_SINK].gst, "sink");
+
+ LOGD("release audio callback\n");
+
+ /* release audio callback */
+ gst_pad_remove_probe(pad, player->audio_cb_probe_id);
+ player->audio_cb_probe_id = 0;
+ /* audio callback should be free because it can be called even though probe remove.*/
+ player->audio_stream_cb = NULL;
+ player->audio_stream_cb_user_param = NULL;
+
+ }
+ }
+ if ((player->audio_stream_render_cb_ex) && (!player->audio_stream_sink_sync))
+ __mmplayer_audio_stream_clear_buffer(player, TRUE);
+
+ /* rewind if repeat count is greater then zero */
+ /* get play count */
+ attrs = MMPLAYER_GET_ATTRS(player);
+
+ if (attrs) {
+ mm_attrs_get_int_by_name(attrs, "profile_play_count", &count);
+
+ LOGD("play count: %d, playback rate: %f\n", count, player->playback_rate);
+
+ if (count == -1 || player->playback_rate < 0.0) /* default value is 1 */ {
+ if (player->playback_rate < 0.0) {
+ player->resumed_by_rewind = TRUE;
+ _mmplayer_set_mute((MMHandleType)player, 0);
+ MMPLAYER_POST_MSG(player, MM_MESSAGE_RESUMED_BY_REW, NULL);
+ }
+
+ __mmplayer_handle_eos_delay(player, player->ini.delay_before_repeat);
+
+ /* initialize */
+ player->sent_bos = FALSE;
+
+ /* not posting eos when repeating */
+ break;
+ }
+ }
+
+ if (player->pipeline)
+ MMPLAYER_GENERATE_DOT_IF_ENABLED(player, "pipeline-status-eos");
+
+ /* post eos message to application */
+ __mmplayer_handle_eos_delay(player, player->ini.eos_delay);
+
+ /* reset last position */
+ player->last_position = 0;
+ }
+ break;
+
+ case GST_MESSAGE_ERROR:
+ {
+ GError *error = NULL;
+ gchar* debug = NULL;
+
+ /* generating debug info before returning error */
+ MMPLAYER_GENERATE_DOT_IF_ENABLED(player, "pipeline-status-error");
+
+ /* get error code */
+ gst_message_parse_error(msg, &error, &debug);
+
+ if (gst_structure_has_name(gst_message_get_structure(msg), "streaming_error")) {
+ /* Note : the streaming error from the streaming source is handled
+ * using __mmplayer_handle_streaming_error.
+ */
+ __mmplayer_handle_streaming_error(player, msg);
+
+ /* dump state of all element */
+ __mmplayer_dump_pipeline_state(player);
+ } else {
+ /* traslate gst error code to msl error code. then post it
+ * to application if needed
+ */
+ __mmplayer_handle_gst_error(player, msg, error);
+
+ if (debug)
+ LOGE("error debug : %s", debug);
+ }
+
+ if (MMPLAYER_IS_HTTP_PD(player))
+ _mmplayer_unrealize_pd_downloader((MMHandleType)player);
+
+ MMPLAYER_FREEIF(debug);
+ g_error_free(error);
+ }
+ break;
+
+ case GST_MESSAGE_WARNING:
+ {
+ char* debug = NULL;
+ GError* error = NULL;
+
+ gst_message_parse_warning(msg, &error, &debug);
+
+ LOGD("warning : %s\n", error->message);
+ LOGD("debug : %s\n", debug);
+
+ MMPLAYER_POST_MSG(player, MM_MESSAGE_WARNING, NULL);
+
+ MMPLAYER_FREEIF(debug);
+ g_error_free(error);
+ }
+ break;
+
+ case GST_MESSAGE_TAG:
+ {
+ LOGD("GST_MESSAGE_TAG\n");
+ if (!__mmplayer_gst_extract_tag_from_msg(player, msg))
+ LOGW("failed to extract tags from gstmessage\n");
+ }
+ break;
+
+ case GST_MESSAGE_BUFFERING:
+ {
+ MMMessageParamType msg_param = {0, };
+ int bRet = MM_ERROR_NONE;
+
+ if (!(player->pipeline && player->pipeline->mainbin)) {
+ LOGE("Pipeline is not initialized");
+ break;
+ }
+
+ if (!MMPLAYER_IS_STREAMING(player))
+ break;
+
+ if (player->pd_mode == MM_PLAYER_PD_MODE_URI) {
+ if (!MMPLAYER_CMD_TRYLOCK(player)) {
+ /* skip the playback control by buffering msg while user request is handled. */
+ gint per = 0;
+
+ LOGW("[PD mode] can't get cmd lock, only post buffering msg");
+
+ gst_message_parse_buffering(msg, &per);
+ LOGD("[PD mode][%s] buffering %d %%....", GST_OBJECT_NAME(GST_MESSAGE_SRC(msg)), per);
+
+ msg_param.connection.buffering = per;
+ MMPLAYER_POST_MSG(player, MM_MESSAGE_BUFFERING, &msg_param);
+ break;
+ }
+ } else {
+ MMPLAYER_CMD_LOCK(player);
+ }
+
+ if (!player->streamer) {
+ LOGW("Pipeline is shutting down");
+ MMPLAYER_CMD_UNLOCK(player);
+ break;
+ }
+
+ /* ignore the remained buffering message till getting 100% msg */
+ if (player->streamer->buffering_state == MM_PLAYER_BUFFERING_COMPLETE) {
+ gint buffer_percent = 0;
+
+ gst_message_parse_buffering(msg, &buffer_percent);
+
+ if (buffer_percent == MAX_BUFFER_PERCENT) {
+ LOGD("Ignored all the previous buffering msg!(got %d%%)\n", buffer_percent);
+ player->streamer->buffering_state = MM_PLAYER_BUFFERING_DEFAULT;
+ }
+ MMPLAYER_CMD_UNLOCK(player);
+ break;
+ }
+
+ /* ignore the remained buffering message */
+ if (player->streamer->buffering_state == MM_PLAYER_BUFFERING_ABORT) {
+ gint buffer_percent = 0;
+
+ gst_message_parse_buffering(msg, &buffer_percent);
+
+ LOGD("interrupted buffering -last posted %d %%, new per %d %%",
+ player->streamer->buffering_percent, buffer_percent);
+
+ if (player->streamer->buffering_percent > buffer_percent || buffer_percent <= 0) {
+ player->streamer->buffering_state = MM_PLAYER_BUFFERING_DEFAULT;
+ player->streamer->buffering_req.is_pre_buffering = FALSE;
+
+ LOGD("interrupted buffering - need to enter the buffering mode again - %d %%", buffer_percent);
+ } else {
+ LOGD("interrupted buffering - ignored the remained buffering msg!");
+ MMPLAYER_CMD_UNLOCK(player);
+ break;
+ }
+ }
+
+ __mmplayer_update_buffer_setting(player, msg);
+
+ bRet = __mmplayer_handle_buffering_message(player); /* playback control */
+
+ if (bRet == MM_ERROR_NONE) {
+ msg_param.connection.buffering = player->streamer->buffering_percent;
+ MMPLAYER_POST_MSG(player, MM_MESSAGE_BUFFERING, &msg_param);
+
+ if (MMPLAYER_IS_RTSP_STREAMING(player) &&
+ player->pending_resume &&
+ (player->streamer->buffering_percent >= MAX_BUFFER_PERCENT)) {
+
+ player->is_external_subtitle_added_now = FALSE;
+ player->pending_resume = FALSE;
+ _mmplayer_resume((MMHandleType)player);
+ }
+
+ if (MMPLAYER_IS_RTSP_STREAMING(player) &&
+ (player->streamer->buffering_percent >= MAX_BUFFER_PERCENT)) {
+
+ if (player->seek_state == MMPLAYER_SEEK_IN_PROGRESS) {
+ if (MMPLAYER_TARGET_STATE(player) == MM_PLAYER_STATE_PAUSED) {
+ player->seek_state = MMPLAYER_SEEK_NONE;
+ MMPLAYER_POST_MSG(player, MM_MESSAGE_SEEK_COMPLETED, NULL);
+ } else if (MMPLAYER_TARGET_STATE(player) == MM_PLAYER_STATE_PLAYING) {
+ /* Considering the async state trasition in case of RTSP.
+ After getting state change gst msg, seek cmpleted msg will be posted. */
+ player->seek_state = MMPLAYER_SEEK_COMPLETED;
+ }
+ }
+ }
+ } else if (bRet == MM_ERROR_PLAYER_INVALID_STATE) {
+ if (!player->streamer) {
+ LOGW("player->streamer is NULL, so discarding the buffering percent update\n");
+ MMPLAYER_CMD_UNLOCK(player);
+ break;
+ }
+
+ if ((MMPLAYER_IS_LIVE_STREAMING(player)) && (MMPLAYER_IS_RTSP_STREAMING(player))) {
+
+ LOGD("player->last_position=%"G_GINT64_FORMAT" , player->streamer->buffering_percent=%d \n",
+ GST_TIME_AS_SECONDS(player->last_position), player->streamer->buffering_percent);
+
+ if ((GST_TIME_AS_SECONDS(player->last_position) <= 0) && (MMPLAYER_CURRENT_STATE(player) == MM_PLAYER_STATE_PAUSED)) {
+ msg_param.connection.buffering = player->streamer->buffering_percent;
+ MMPLAYER_POST_MSG(player, MM_MESSAGE_BUFFERING, &msg_param);
+ } else {
+ LOGD("Not updating Buffering Message for Live RTSP case !!!\n");
+ }
+ } else {
+ msg_param.connection.buffering = player->streamer->buffering_percent;
+ MMPLAYER_POST_MSG(player, MM_MESSAGE_BUFFERING, &msg_param);
+ }
+ }
+ MMPLAYER_CMD_UNLOCK(player);
+ }
+ break;
+
+ case GST_MESSAGE_STATE_CHANGED:
+ {
+ MMPlayerGstElement *mainbin;
+ const GValue *voldstate, *vnewstate, *vpending;
+ GstState oldstate = GST_STATE_NULL;
+ GstState newstate = GST_STATE_NULL;
+ GstState pending = GST_STATE_NULL;
+
+ if (!(player->pipeline && player->pipeline->mainbin)) {
+ LOGE("player pipeline handle is null");
+ break;
+ }
+
+ mainbin = player->pipeline->mainbin;
+
+ /* we only handle messages from pipeline */
+ if (msg->src != (GstObject *)mainbin[MMPLAYER_M_PIPE].gst)
+ break;
+
+ /* get state info from msg */
+ voldstate = gst_structure_get_value(gst_message_get_structure(msg), "old-state");
+ vnewstate = gst_structure_get_value(gst_message_get_structure(msg), "new-state");
+ vpending = gst_structure_get_value(gst_message_get_structure(msg), "pending-state");
+
+ if (!voldstate || !vnewstate) {
+ LOGE("received msg has wrong format.");
+ break;
+ }
+
+ oldstate = (GstState)voldstate->data[0].v_int;
+ newstate = (GstState)vnewstate->data[0].v_int;
+ if (vpending)
+ pending = (GstState)vpending->data[0].v_int;
+
+ LOGD("state changed [%s] : %s ---> %s final : %s\n",
+ GST_OBJECT_NAME(GST_MESSAGE_SRC(msg)),
+ gst_element_state_get_name((GstState)oldstate),
+ gst_element_state_get_name((GstState)newstate),
+ gst_element_state_get_name((GstState)pending));
+
+ if (newstate == GST_STATE_PLAYING) {
+ if ((MMPLAYER_IS_RTSP_STREAMING(player)) && (player->pending_seek.is_pending)) {
+
+ int retVal = MM_ERROR_NONE;
+ LOGD("trying to play from (%"G_GINT64_FORMAT") pending position\n", player->pending_seek.pos);
+
+ retVal = __mmplayer_gst_set_position(player, player->pending_seek.format, player->pending_seek.pos, TRUE);
+
+ if (MM_ERROR_NONE != retVal)
+ LOGE("failed to seek pending postion. just keep staying current position.\n");
+
+ player->pending_seek.is_pending = FALSE;
+ }
+ }
+
+ if (oldstate == newstate) {
+ LOGD("pipeline reports state transition to old state");
+ break;
+ }
+
+ switch (newstate) {
+ case GST_STATE_VOID_PENDING:
+ break;
+
+ case GST_STATE_NULL:
+ break;
+
+ case GST_STATE_READY:
+ break;
+
+ case GST_STATE_PAUSED:
+ {
+ gboolean prepare_async = FALSE;
+
+ if (!player->audio_cb_probe_id && player->set_mode.pcm_extraction && !player->audio_stream_render_cb_ex)
+ __mmplayer_configure_audio_callback(player);
+
+ if (!player->sent_bos && oldstate == GST_STATE_READY) {
+ // managed prepare async case
+ mm_attrs_get_int_by_name(player->attrs, "profile_prepare_async", &prepare_async);
+ LOGD("checking prepare mode for async transition - %d", prepare_async);
+ }
+
+ if (MMPLAYER_IS_STREAMING(player) || MMPLAYER_IS_MS_BUFF_SRC(player) || prepare_async) {
+ MMPLAYER_SET_STATE(player, MM_PLAYER_STATE_PAUSED);
+
+ if (MMPLAYER_IS_STREAMING(player) && (player->streamer))
+ __mm_player_streaming_set_content_bitrate(player->streamer,
+ player->total_maximum_bitrate, player->total_bitrate);
+
+ if (player->pending_seek.is_pending) {
+ LOGW("trying to do pending seek");
+ MMPLAYER_CMD_LOCK(player);
+ __mmplayer_gst_pending_seek(player);
+ MMPLAYER_CMD_UNLOCK(player);
+ }
+ }
+ }
+ break;
+
+ case GST_STATE_PLAYING:
+ {
+ if (MMPLAYER_IS_STREAMING(player)) {
+ // managed prepare async case when buffering is completed
+ // pending state should be reset otherwise, it's still playing even though it's resumed after bufferging.
+ if ((MMPLAYER_CURRENT_STATE(player) != MM_PLAYER_STATE_PLAYING) ||
+ (MMPLAYER_PENDING_STATE(player) == MM_PLAYER_STATE_PLAYING))
+ MMPLAYER_SET_STATE(player, MM_PLAYER_STATE_PLAYING);
+
+ if (MMPLAYER_IS_RTSP_STREAMING(player) && (MMPLAYER_IS_LIVE_STREAMING(player))) {
+
+ LOGD("Current Buffering Percent = %d", player->streamer->buffering_percent);
+ if (player->streamer->buffering_percent < 100) {
+
+ MMMessageParamType msg_param = {0, };
+ LOGW("Posting Buffering Completed Message to Application !!!");
+
+ msg_param.connection.buffering = 100;
+ MMPLAYER_POST_MSG(player, MM_MESSAGE_BUFFERING, &msg_param);
+ }
+ }
+ }
+
+ if (player->gapless.stream_changed) {
+ __mmplayer_update_content_attrs(player, ATTR_ALL);
+ player->gapless.stream_changed = FALSE;
+ }
+
+ if (player->seek_state == MMPLAYER_SEEK_COMPLETED) {
+ player->seek_state = MMPLAYER_SEEK_NONE;
+ MMPLAYER_POST_MSG(player, MM_MESSAGE_SEEK_COMPLETED, NULL);
+ }
+ }
+ break;
+
+ default:
+ break;
+ }
+ }
+ break;
+
+ case GST_MESSAGE_CLOCK_LOST:
+ {
+ GstClock *clock = NULL;
+ gboolean need_new_clock = FALSE;
+
+ gst_message_parse_clock_lost(msg, &clock);
+ LOGD("GST_MESSAGE_CLOCK_LOST : %s\n", (clock ? GST_OBJECT_NAME(clock) : "NULL"));
+
+ if (!player->videodec_linked)
+ need_new_clock = TRUE;
+ else if (!player->ini.use_system_clock)
+ need_new_clock = TRUE;
+
+ if (need_new_clock) {
+ LOGD("Provide clock is TRUE, do pause->resume\n");
+ __mmplayer_gst_pause(player, FALSE);
+ __mmplayer_gst_resume(player, FALSE);
+ }
+ }
+ break;
+
+ case GST_MESSAGE_NEW_CLOCK:
+ {
+ GstClock *clock = NULL;
+ gst_message_parse_new_clock(msg, &clock);
+ LOGD("GST_MESSAGE_NEW_CLOCK : %s\n", (clock ? GST_OBJECT_NAME(clock) : "NULL"));
+ }
+ break;
+
+ case GST_MESSAGE_ELEMENT:
+ {
+ const gchar *structure_name;
+ gint count = 0, idx = 0;
+ MMHandleType attrs = 0;
+
+ attrs = MMPLAYER_GET_ATTRS(player);
+ if (!attrs) {
+ LOGE("cannot get content attribute");
+ break;
+ }
+
+ if (gst_message_get_structure(msg) == NULL)
+ break;
+
+ structure_name = gst_structure_get_name(gst_message_get_structure(msg));
+ if (!structure_name)
+ break;
+
+ LOGD("GST_MESSAGE_ELEMENT %s from %s", structure_name, GST_OBJECT_NAME(GST_MESSAGE_SRC(msg)));
+
+ if (!strcmp(structure_name, "adaptive-streaming-variant")) {
+ const GValue *var_info = NULL;
+
+ var_info = gst_structure_get_value(gst_message_get_structure(msg), "video-variant-info");
+ if (var_info != NULL) {
+ if (player->adaptive_info.var_list)
+ g_list_free_full(player->adaptive_info.var_list, g_free);
+
+ /* share addr or copy the list */
+ player->adaptive_info.var_list =
+ g_list_copy_deep((GList *)g_value_get_pointer(var_info), (GCopyFunc)__mmplayer_adaptive_var_info, NULL);
+
+ count = g_list_length(player->adaptive_info.var_list);
+ if (count > 0) {
+ VariantData *temp = NULL;
+
+ /* print out for debug */
+ LOGD("num of variant_info %d", count);
+ for (idx = 0; idx < count; idx++) {
+ temp = g_list_nth_data(player->adaptive_info.var_list, idx);
+ if (temp)
+ LOGD("variant(%d) [b]%d [w]%d [h]%d ", idx, temp->bandwidth, temp->width, temp->height);
+ }
+ }
+ }
+ }
+
+ if (!strcmp(structure_name, "prepare-decode-buffers")) {
+ gint num_buffers = 0;
+ gint extra_num_buffers = 0;
+
+ if (gst_structure_get_int(gst_message_get_structure(msg), "num_buffers", &num_buffers)) {
+ player->video_num_buffers = num_buffers;
+ LOGD("video_num_buffers : %d", player->video_num_buffers);
+ }
+
+ if (gst_structure_get_int(gst_message_get_structure(msg), "extra_num_buffers", &extra_num_buffers)) {
+ player->video_extra_num_buffers = extra_num_buffers;
+ LOGD("num_of_vout_extra num buffers : %d", extra_num_buffers);
+ }
+ break;
+ }
+
+ if (!strcmp(structure_name, "Language_list")) {
+ const GValue *lang_list = NULL;
+ lang_list = gst_structure_get_value(gst_message_get_structure(msg), "lang_list");
+ if (lang_list != NULL) {
+ count = g_list_length((GList *)g_value_get_pointer(lang_list));
+ if (count > 1)
+ LOGD("Total audio tracks(from parser) = %d \n", count);
+ }
+ }
+
+ if (!strcmp(structure_name, "Ext_Sub_Language_List")) {
+ const GValue *lang_list = NULL;
+ MMPlayerLangStruct *temp = NULL;
+
+ lang_list = gst_structure_get_value(gst_message_get_structure(msg), "lang_list");
+ if (lang_list != NULL) {
+ count = g_list_length((GList *)g_value_get_pointer(lang_list));
+ if (count) {
+ MMPLAYER_SUBTITLE_INFO_LOCK(player);
+ player->subtitle_language_list = (GList *)g_value_get_pointer(lang_list);
+ mm_attrs_set_int_by_name(attrs, "content_text_track_num", (gint)count);
+ if (mmf_attrs_commit(attrs))
+ LOGE("failed to commit.\n");
+ LOGD("Total subtitle tracks = %d \n", count);
+
+ while (count) {
+ temp = g_list_nth_data(player->subtitle_language_list, count - 1);
+ if (temp)
+ LOGD("value of lang_key is %s and lang_code is %s",
+ temp->language_key, temp->language_code);
+ count--;
+ }
+ MMPLAYER_SUBTITLE_INFO_SIGNAL(player);
+ MMPLAYER_SUBTITLE_INFO_UNLOCK(player);
+ }
+ }
+ }
+
+ /* custom message */
+ if (!strcmp(structure_name, "audio_codec_not_supported")) {
+ MMMessageParamType msg_param = {0,};
+ msg_param.code = MM_ERROR_PLAYER_AUDIO_CODEC_NOT_FOUND;
+ MMPLAYER_POST_MSG(player, MM_MESSAGE_ERROR, &msg_param);
+ }
+
+ /* custom message for RTSP attribute :
+ RTSP case, buffer is not come from server before PLAYING state. However,we have to get attribute after PAUSE state chaged.
+ sdp which has contents info is received when rtsp connection is opened.
+ extract duration ,codec info , resolution from sdp and get it by GstMessage */
+ if (!strcmp(structure_name, "rtspsrc_properties")) {
+
+ gchar *audio_codec = NULL;
+ gchar *video_codec = NULL;
+ gchar *video_frame_size = NULL;
+
+ gst_structure_get(gst_message_get_structure(msg), "rtsp_duration", G_TYPE_UINT64, &player->duration, NULL);
+ LOGD("rtsp duration : %"G_GINT64_FORMAT" msec", GST_TIME_AS_MSECONDS(player->duration));
+ player->streaming_type = __mmplayer_get_stream_service_type(player);
+
+ gst_structure_get(gst_message_get_structure(msg), "rtsp_audio_codec", G_TYPE_STRING, &audio_codec, NULL);
+ LOGD("rtsp_audio_codec : %s", audio_codec);
+ if (audio_codec)
+ mm_attrs_set_string_by_name(player->attrs, "content_audio_codec", audio_codec);
+
+ gst_structure_get(gst_message_get_structure(msg), "rtsp_video_codec", G_TYPE_STRING, &video_codec, NULL);
+ LOGD("rtsp_video_codec : %s", video_codec);
+ if (video_codec)
+ mm_attrs_set_string_by_name(player->attrs, "content_video_codec", video_codec);
+
+ gst_structure_get(gst_message_get_structure(msg), "rtsp_video_frame_size", G_TYPE_STRING, &video_frame_size, NULL);
+ LOGD("rtsp_video_frame_size : %s", video_frame_size);
+ if (video_frame_size) {
+
+ char *seperator = strchr(video_frame_size, '-');
+ if (seperator) {
+
+ char video_width[10] = {0,};
+ int frame_size_len = strlen(video_frame_size);
+ int separtor_len = strlen(seperator);
+
+ strncpy(video_width, video_frame_size, (frame_size_len - separtor_len));
+ mm_attrs_set_int_by_name(attrs, "content_video_width", atoi(video_width));
+
+ seperator++;
+ mm_attrs_set_int_by_name(attrs, "content_video_height", atoi(seperator));
+ }
+ }
+
+ if (mmf_attrs_commit(attrs))
+ LOGE("failed to commit.\n");
+ }
+ }
+ break;
+
+ case GST_MESSAGE_DURATION_CHANGED:
+ {
+ LOGD("GST_MESSAGE_DURATION_CHANGED\n");
+ if (!__mmplayer_gst_handle_duration(player, msg))
+ LOGW("failed to update duration");
+ }
+
+ break;
+
+ case GST_MESSAGE_ASYNC_START:
+ LOGD("GST_MESSAGE_ASYNC_START : %s\n", GST_ELEMENT_NAME(GST_MESSAGE_SRC(msg)));
+ break;
+
+ case GST_MESSAGE_ASYNC_DONE:
+ {
+ MMPlayerGstElement *mainbin;
+
+ if (!(player->pipeline && player->pipeline->mainbin)) {
+ LOGE("player pipeline handle is null");
+ break;
+ }
+
+ mainbin = player->pipeline->mainbin;
+
+ LOGD("GST_MESSAGE_ASYNC_DONE : %s\n", GST_ELEMENT_NAME(GST_MESSAGE_SRC(msg)));
+
+ /* we only handle messages from pipeline */
+ if (msg->src != (GstObject *)mainbin[MMPLAYER_M_PIPE].gst)
+ break;
+
+ if (player->seek_state == MMPLAYER_SEEK_IN_PROGRESS) {
+ if (MMPLAYER_TARGET_STATE(player) == MM_PLAYER_STATE_PAUSED) {
+ player->seek_state = MMPLAYER_SEEK_NONE;
+ MMPLAYER_POST_MSG(player, MM_MESSAGE_SEEK_COMPLETED, NULL);
+ } else if (MMPLAYER_TARGET_STATE(player) == MM_PLAYER_STATE_PLAYING) {
+ if (mainbin[MMPLAYER_M_AUTOPLUG].gst) {
+ LOGD("sync %s state(%s) with parent state(%s)",
+ GST_ELEMENT_NAME(mainbin[MMPLAYER_M_AUTOPLUG].gst),
+ gst_element_state_get_name(GST_STATE(mainbin[MMPLAYER_M_AUTOPLUG].gst)),
+ gst_element_state_get_name(GST_STATE(mainbin[MMPLAYER_M_PIPE].gst)));
+
+ /* In case of streaming, pause is required before finishing seeking by buffering.
+ After completing the seek(during buffering), the player and sink elems has paused state but others in playing state.
+ Because the buffering state is controlled according to the state transition for force resume,
+ the decodebin state should be paused as player state. */
+ gst_element_sync_state_with_parent(mainbin[MMPLAYER_M_AUTOPLUG].gst);
+ }
+
+ if ((MMPLAYER_IS_HTTP_STREAMING(player)) &&
+ (player->streamer) &&
+ (player->streamer->streaming_buffer_type == BUFFER_TYPE_MUXED) &&
+ !(player->streamer->buffering_state & MM_PLAYER_BUFFERING_IN_PROGRESS)) {
+ GstQuery *query = NULL;
+ gboolean busy = FALSE;
+ gint percent = 0;
+
+ if (player->streamer->buffer_handle[BUFFER_TYPE_MUXED].buffer) {
+ query = gst_query_new_buffering(GST_FORMAT_PERCENT);
+ if (gst_element_query(player->streamer->buffer_handle[BUFFER_TYPE_MUXED].buffer, query))
+ gst_query_parse_buffering_percent(query, &busy, &percent);
+ gst_query_unref(query);
+
+ LOGD("buffered percent(%s): %d\n",
+ GST_ELEMENT_NAME(player->streamer->buffer_handle[BUFFER_TYPE_MUXED].buffer), percent);
+ }
+
+ if (percent >= 100)
+ __mmplayer_handle_buffering_message(player);
+ }
+
+ player->seek_state = MMPLAYER_SEEK_COMPLETED;
+ }
+ }
+ }
+ break;
+
+ #if 0 /* delete unnecessary logs */
+ case GST_MESSAGE_REQUEST_STATE: LOGD("GST_MESSAGE_REQUEST_STATE\n"); break;
+ case GST_MESSAGE_STEP_START: LOGD("GST_MESSAGE_STEP_START\n"); break;
+ case GST_MESSAGE_QOS: LOGD("GST_MESSAGE_QOS\n"); break;
+ case GST_MESSAGE_PROGRESS: LOGD("GST_MESSAGE_PROGRESS\n"); break;
+ case GST_MESSAGE_ANY: LOGD("GST_MESSAGE_ANY\n"); break;
+ case GST_MESSAGE_INFO: LOGD("GST_MESSAGE_STATE_DIRTY\n"); break;
+ case GST_MESSAGE_STATE_DIRTY: LOGD("GST_MESSAGE_STATE_DIRTY\n"); break;
+ case GST_MESSAGE_STEP_DONE: LOGD("GST_MESSAGE_STEP_DONE\n"); break;
+ case GST_MESSAGE_CLOCK_PROVIDE: LOGD("GST_MESSAGE_CLOCK_PROVIDE\n"); break;
+ case GST_MESSAGE_STRUCTURE_CHANGE: LOGD("GST_MESSAGE_STRUCTURE_CHANGE\n"); break;
+ case GST_MESSAGE_STREAM_STATUS: LOGD("GST_MESSAGE_STREAM_STATUS\n"); break;
+ case GST_MESSAGE_APPLICATION: LOGD("GST_MESSAGE_APPLICATION\n"); break;
+ case GST_MESSAGE_SEGMENT_START: LOGD("GST_MESSAGE_SEGMENT_START\n"); break;
+ case GST_MESSAGE_SEGMENT_DONE: LOGD("GST_MESSAGE_SEGMENT_DONE\n"); break;
+ case GST_MESSAGE_LATENCY: LOGD("GST_MESSAGE_LATENCY\n"); break;
+ #endif
+
+ default:
+ break;
+ }
+
+ /* should not call 'gst_message_unref(msg)' */
+ return;
+}
+
+GstBusSyncReply
+__mmplayer_bus_sync_callback(GstBus * bus, GstMessage * message, gpointer data)
+{
+ mm_player_t *player = (mm_player_t *)data;
+ GstBusSyncReply reply = GST_BUS_DROP;
+
+ if (!(player->pipeline && player->pipeline->mainbin)) {
+ LOGE("player pipeline handle is null");
+ return GST_BUS_PASS;
+ }
+
+ if (!__mmplayer_gst_check_useful_message(player, message)) {
+ gst_message_unref(message);
+ return GST_BUS_DROP;
+ }
+
+ switch (GST_MESSAGE_TYPE(message)) {
+ case GST_MESSAGE_STATE_CHANGED:
+ /* post directly for fast launch */
+ if (player->sync_handler) {
+ __mmplayer_gst_callback(message, player);
+ reply = GST_BUS_DROP;
+ } else
+ reply = GST_BUS_PASS;
+ break;
+ case GST_MESSAGE_TAG:
+ __mmplayer_gst_extract_tag_from_msg(player, message);
+
+ #if 0 // debug
+ {
+ GstTagList *tags = NULL;
+
+ gst_message_parse_tag(message, &tags);
+ if (tags) {
+ LOGE("TAGS received from element \"%s\".\n",
+ GST_STR_NULL(GST_ELEMENT_NAME(GST_MESSAGE_SRC(message))));
+
+ gst_tag_list_foreach(tags, print_tag, NULL);
+ gst_tag_list_free(tags);
+ tags = NULL;
+ }
+ break;
+ }
+ #endif
+ break;
+
+ case GST_MESSAGE_DURATION_CHANGED:
+ __mmplayer_gst_handle_duration(player, message);
+ break;
+ case GST_MESSAGE_ASYNC_DONE:
+ /* NOTE:Don't call gst_callback directly
+ * because previous frame can be showed even though this message is received for seek.
+ */
+ default:
+ reply = GST_BUS_PASS;
+ break;
+ }
+
+ if (reply == GST_BUS_DROP)
+ gst_message_unref(message);
+
+ return reply;
+}
+
+int __mmplayer_gst_start(mm_player_t* player)
+{
+ int ret = MM_ERROR_NONE;
+ gboolean async = FALSE;
+
+ MMPLAYER_FENTER();
+
+ MMPLAYER_RETURN_VAL_IF_FAIL(player && player->pipeline, MM_ERROR_PLAYER_NOT_INITIALIZED);
+
+ /* NOTE : if SetPosition was called before Start. do it now */
+ /* streaming doesn't support it. so it should be always sync */
+ /* !!create one more api to check if there is pending seek rather than checking variables */
+ if (player->pending_seek.is_pending && !MMPLAYER_IS_STREAMING(player)) {
+ MMPLAYER_TARGET_STATE(player) = MM_PLAYER_STATE_PAUSED;
+ ret = __mmplayer_gst_pause(player, FALSE);
+ if (ret != MM_ERROR_NONE) {
+ LOGE("failed to set state to PAUSED for pending seek");
+ return ret;
+ }
+
+ MMPLAYER_TARGET_STATE(player) = MM_PLAYER_STATE_PLAYING;
+ if (__mmplayer_gst_pending_seek(player) != MM_ERROR_NONE)
+ LOGW("failed to seek pending postion. starting from the begin of content");
+ }
+
+ LOGD("current state before doing transition");
+ MMPLAYER_PENDING_STATE(player) = MM_PLAYER_STATE_PLAYING;
+ MMPLAYER_PRINT_STATE(player);
+
+ /* set pipeline state to PLAYING */
+ ret = __mmplayer_gst_set_state(player,
+ player->pipeline->mainbin[MMPLAYER_M_PIPE].gst, GST_STATE_PLAYING, async, MMPLAYER_STATE_CHANGE_TIMEOUT(player));
+
+ if (ret == MM_ERROR_NONE) {
+ MMPLAYER_SET_STATE(player, MM_PLAYER_STATE_PLAYING);
+ } else {
+ LOGE("failed to set state to PLAYING");
+ return ret;
+ }
+
+ /* generating debug info before returning error */
+ MMPLAYER_GENERATE_DOT_IF_ENABLED(player, "pipeline-status-start");
+
+ MMPLAYER_FLEAVE();
+
+ return ret;
+}
+
+int __mmplayer_gst_stop(mm_player_t* player)
+{
+ GstStateChangeReturn change_ret = GST_STATE_CHANGE_SUCCESS;
+ MMHandleType attrs = 0;
+ gboolean rewind = FALSE;
+ gint timeout = 0;
+ int ret = MM_ERROR_NONE;
+
+ MMPLAYER_FENTER();
+
+ MMPLAYER_RETURN_VAL_IF_FAIL(player && player->pipeline, MM_ERROR_PLAYER_NOT_INITIALIZED);
+ MMPLAYER_RETURN_VAL_IF_FAIL(player->pipeline->mainbin, MM_ERROR_PLAYER_NOT_INITIALIZED);
+
+ LOGD("current state before doing transition");
+ MMPLAYER_PENDING_STATE(player) = MM_PLAYER_STATE_READY;
+ MMPLAYER_PRINT_STATE(player);
+
+ attrs = MMPLAYER_GET_ATTRS(player);
+ if (!attrs) {
+ LOGE("cannot get content attribute\n");
+ return MM_ERROR_PLAYER_INTERNAL;
+ }
+
+ /* Just set state to PAUESED and the rewind. it's usual player behavior. */
+ timeout = MMPLAYER_STATE_CHANGE_TIMEOUT(player);
+
+ if ((!MMPLAYER_IS_STREAMING(player) && !MMPLAYER_IS_MS_BUFF_SRC(player)) ||
+ (player->streaming_type == STREAMING_SERVICE_VOD && player->videodec_linked))
+ rewind = TRUE;
+
+ if (player->es_player_push_mode || MMPLAYER_IS_HTTP_PD(player)) {
+ /* disable the async state transition because there could be no data in the pipeline */
+ __mmplayer_gst_handle_async(player, FALSE, MMPLAYER_SINK_ALL);
+ }
+
+ /* set gst state */
+ ret = __mmplayer_gst_set_state(player, player->pipeline->mainbin[MMPLAYER_M_PIPE].gst, GST_STATE_PAUSED, FALSE, timeout);
+
+ if (player->es_player_push_mode || MMPLAYER_IS_HTTP_PD(player)) {
+ /* enable the async state transition as default operation */
+ __mmplayer_gst_handle_async(player, TRUE, MMPLAYER_SINK_ALL);
+ }
+
+ /* return if set_state has failed */
+ if (ret != MM_ERROR_NONE) {
+ LOGE("failed to set state.\n");
+ return ret;
+ }
+
+ /* rewind */
+ if (rewind) {
+ if (!__mmplayer_gst_seek(player, player->pipeline->mainbin[MMPLAYER_M_PIPE].gst, player->playback_rate,
+ GST_FORMAT_TIME, GST_SEEK_FLAG_FLUSH, GST_SEEK_TYPE_SET, 0,
+ GST_SEEK_TYPE_NONE, GST_CLOCK_TIME_NONE)) {
+ LOGW("failed to rewind\n");
+ ret = MM_ERROR_PLAYER_SEEK;
+ }
+ }
+
+ /* initialize */
+ player->sent_bos = FALSE;
+
+ if (player->es_player_push_mode) //for cloudgame
+ timeout = 0;
+
+ /* wait for seek to complete */
+ change_ret = gst_element_get_state(player->pipeline->mainbin[MMPLAYER_M_PIPE].gst, NULL, NULL, timeout * GST_SECOND);
+ if (change_ret == GST_STATE_CHANGE_SUCCESS || change_ret == GST_STATE_CHANGE_NO_PREROLL) {
+ MMPLAYER_SET_STATE(player, MM_PLAYER_STATE_READY);
+ } else {
+ LOGE("fail to stop player.\n");
+ ret = MM_ERROR_PLAYER_INTERNAL;
+ __mmplayer_dump_pipeline_state(player);
+ }
+
+ /* generate dot file if enabled */
+ MMPLAYER_GENERATE_DOT_IF_ENABLED(player, "pipeline-status-stop");
+
+ MMPLAYER_FLEAVE();
+
+ return ret;
+}
+
+int __mmplayer_gst_pause(mm_player_t* player, gboolean async)
+{
+ int ret = MM_ERROR_NONE;
+
+ MMPLAYER_FENTER();
+
+ MMPLAYER_RETURN_VAL_IF_FAIL(player && player->pipeline, MM_ERROR_PLAYER_NOT_INITIALIZED);
+ MMPLAYER_RETURN_VAL_IF_FAIL(player->pipeline->mainbin, MM_ERROR_PLAYER_NOT_INITIALIZED);
+
+ LOGD("current state before doing transition");
+ MMPLAYER_PENDING_STATE(player) = MM_PLAYER_STATE_PAUSED;
+ MMPLAYER_PRINT_STATE(player);
+
+ /* set pipeline status to PAUSED */
+ ret = __mmplayer_gst_set_state(player,
+ player->pipeline->mainbin[MMPLAYER_M_PIPE].gst, GST_STATE_PAUSED, async, MMPLAYER_STATE_CHANGE_TIMEOUT(player));
+
+ if (FALSE == async) {
+ if (ret != MM_ERROR_NONE) {
+ GstMessage *msg = NULL;
+ GTimer *timer = NULL;
+ gdouble MAX_TIMEOUT_SEC = 3;
+
+ LOGE("failed to set state to PAUSED");
+
+ if (!player->bus_watcher) {
+ LOGE("there is no bus msg thread. pipeline is shutting down.");
+ return ret;
+ }
+
+ if (player->msg_posted) {
+ LOGE("error msg is already posted.");
+ return ret;
+ }
+
+ timer = g_timer_new();
+ g_timer_start(timer);
+
+ GstBus *bus = gst_pipeline_get_bus(GST_PIPELINE(player->pipeline->mainbin[MMPLAYER_M_PIPE].gst));
+
+ do {
+ msg = gst_bus_timed_pop(bus, 100 * GST_MSECOND);
+ if (msg) {
+ if (GST_MESSAGE_TYPE(msg) == GST_MESSAGE_ERROR) {
+ GError *error = NULL;
+
+ /* parse error code */
+ gst_message_parse_error(msg, &error, NULL);
+
+ if (gst_structure_has_name(gst_message_get_structure(msg), "streaming_error")) {
+ /* Note : the streaming error from the streaming source is handled
+ * using __mmplayer_handle_streaming_error.
+ */
+ __mmplayer_handle_streaming_error(player, msg);
+
+ } else if (error) {
+ LOGE("paring error posted from bus, domain : %s, code : %d", g_quark_to_string(error->domain), error->code);
+
+ if (error->domain == GST_STREAM_ERROR)
+ ret = __mmplayer_gst_handle_stream_error(player, error, msg);
+ else if (error->domain == GST_RESOURCE_ERROR)
+ ret = __mmplayer_gst_handle_resource_error(player, error->code, NULL);
+ else if (error->domain == GST_LIBRARY_ERROR)
+ ret = __mmplayer_gst_handle_library_error(player, error->code);
+ else if (error->domain == GST_CORE_ERROR)
+ ret = __mmplayer_gst_handle_core_error(player, error->code);
+
+ g_error_free(error);
+ }
+ player->msg_posted = TRUE;
+ }
+ gst_message_unref(msg);
+ }
+ } while (!player->msg_posted && (g_timer_elapsed(timer, NULL) < MAX_TIMEOUT_SEC));
+ /* clean */
+ gst_object_unref(bus);
+ g_timer_stop(timer);
+ g_timer_destroy(timer);
+
+ return ret;
+
+ } else if ((!MMPLAYER_IS_RTSP_STREAMING(player)) && (!player->video_stream_cb) &&
+ (!player->pipeline->videobin) && (!player->pipeline->audiobin)) {
+
+ return MM_ERROR_PLAYER_CODEC_NOT_FOUND;
+
+ } else {
+ MMPLAYER_SET_STATE(player, MM_PLAYER_STATE_PAUSED);
+ }
+ }
+
+ /* generate dot file before returning error */
+ MMPLAYER_GENERATE_DOT_IF_ENABLED(player, "pipeline-status-pause");
+
+ MMPLAYER_FLEAVE();
+
+ return ret;
+}
+
+int __mmplayer_gst_resume(mm_player_t* player, gboolean async)
+{
+ int ret = MM_ERROR_NONE;
+ gint timeout = 0;
+
+ MMPLAYER_FENTER();
+
+ MMPLAYER_RETURN_VAL_IF_FAIL(player && player->pipeline,
+ MM_ERROR_PLAYER_NOT_INITIALIZED);
+
+ LOGD("current state before doing transition");
+ MMPLAYER_PENDING_STATE(player) = MM_PLAYER_STATE_PLAYING;
+ MMPLAYER_PRINT_STATE(player);
+
+ if (async)
+ LOGD("do async state transition to PLAYING");
+
+ /* set pipeline state to PLAYING */
+ timeout = MMPLAYER_STATE_CHANGE_TIMEOUT(player);
+
+ ret = __mmplayer_gst_set_state(player,
+ player->pipeline->mainbin[MMPLAYER_M_PIPE].gst, GST_STATE_PLAYING, async, timeout);
+ if (ret != MM_ERROR_NONE) {
+ LOGE("failed to set state to PLAYING");
+ goto EXIT;
+ } else {
+ if (async == FALSE)
+ MMPLAYER_SET_STATE(player, MM_PLAYER_STATE_PLAYING);
+ }
+
+EXIT:
+ /* generate dot file */
+ MMPLAYER_GENERATE_DOT_IF_ENABLED(player, "pipeline-status-resume");
+
+ MMPLAYER_FLEAVE();
+
+ return ret;
+}
+
+/* sending event to one of sinkelements */
+gboolean
+__mmplayer_gst_send_event_to_sink(mm_player_t* player, GstEvent* event)
+{
+ GstEvent * event2 = NULL;
+ GList *sinks = NULL;
+ gboolean res = FALSE;
+ MMPLAYER_FENTER();
+
+ MMPLAYER_RETURN_VAL_IF_FAIL(player, FALSE);
+ MMPLAYER_RETURN_VAL_IF_FAIL(event, FALSE);
+
+ /* While adding subtitles in live feeds seek is getting called.
+ Adding defensive check in framework layer.*/
+ if (GST_EVENT_TYPE(event) == GST_EVENT_SEEK) {
+ if (MMPLAYER_IS_LIVE_STREAMING(player)) {
+ LOGE("Should not send seek event during live playback");
+ return TRUE;
+ }
+ }
+
+ if (player->play_subtitle)
+ event2 = gst_event_copy((const GstEvent *)event);
+
+ sinks = player->sink_elements;
+ while (sinks) {
+ GstElement *sink = GST_ELEMENT_CAST(sinks->data);
+
+ if (GST_IS_ELEMENT(sink)) {
+ /* keep ref to the event */
+ gst_event_ref(event);
+
+ if ((res = gst_element_send_event(sink, event))) {
+ LOGD("sending event[%s] to sink element [%s] success!\n",
+ GST_EVENT_TYPE_NAME(event), GST_ELEMENT_NAME(sink));
+
+ /* rtsp case, asyn_done is not called after seek during pause state */
+ if (MMPLAYER_IS_RTSP_STREAMING(player)) {
+ if (GST_EVENT_TYPE(event) == GST_EVENT_SEEK) {
+ if (MMPLAYER_TARGET_STATE(player) == MM_PLAYER_STATE_PAUSED) {
+ LOGD("RTSP seek completed, after pause state..\n");
+ player->seek_state = MMPLAYER_SEEK_NONE;
+ MMPLAYER_POST_MSG(player, MM_MESSAGE_SEEK_COMPLETED, NULL);
+ }
+
+ }
+ }
+
+ if (MMPLAYER_IS_MS_BUFF_SRC(player)) {
+ sinks = g_list_next(sinks);
+ continue;
+ } else {
+ break;
+ }
+ }
+
+ LOGD("sending event[%s] to sink element [%s] failed. try with next one.\n",
+ GST_EVENT_TYPE_NAME(event), GST_ELEMENT_NAME(sink));
+ }
+
+ sinks = g_list_next(sinks);
+ }
+
+ /* Note : Textbin is not linked to the video or audio bin.
+ * It needs to send the event to the text sink seperatelly.
+ */
+ if (player->play_subtitle && player->pipeline) {
+ GstElement *text_sink = GST_ELEMENT_CAST(player->pipeline->textbin[MMPLAYER_T_FAKE_SINK].gst);
+
+ if (GST_IS_ELEMENT(text_sink)) {
+ /* keep ref to the event */
+ gst_event_ref(event2);
+
+ if ((res = gst_element_send_event(text_sink, event2)))
+ LOGD("sending event[%s] to subtitle sink element [%s] success!\n",
+ GST_EVENT_TYPE_NAME(event2), GST_ELEMENT_NAME(text_sink));
+ else
+ LOGE("sending event[%s] to subtitle sink element [%s] failed!\n",
+ GST_EVENT_TYPE_NAME(event2), GST_ELEMENT_NAME(text_sink));
+
+ gst_event_unref(event2);
+ }
+ }
+
+ gst_event_unref(event);
+
+ MMPLAYER_FLEAVE();
+
+ return res;
+}
+
+gboolean
+__mmplayer_gst_seek(mm_player_t* player, GstElement * element, gdouble rate,
+ GstFormat format, GstSeekFlags flags, GstSeekType cur_type,
+ gint64 cur, GstSeekType stop_type, gint64 stop)
+{
+ GstEvent* event = NULL;
+ gboolean result = FALSE;
+
+ MMPLAYER_FENTER();
+
+ MMPLAYER_RETURN_VAL_IF_FAIL(player, FALSE);
+
+ if (player->pipeline && player->pipeline->textbin)
+ __mmplayer_drop_subtitle(player, FALSE);
+
+ event = gst_event_new_seek(rate, format, flags, cur_type,
+ cur, stop_type, stop);
+
+ result = __mmplayer_gst_send_event_to_sink(player, event);
+
+ MMPLAYER_FLEAVE();
+
+ return result;
+}
+
+int
+__mmplayer_gst_set_position(mm_player_t* player, int format, gint64 position, gboolean internal_called)
+{
+ gint64 dur_nsec = 0;
+ gint64 pos_nsec = 0;
+ gboolean ret = TRUE;
+ gboolean accurated = FALSE;
+ GstSeekFlags seek_flags = GST_SEEK_FLAG_FLUSH;
+
+ MMPLAYER_FENTER();
+ MMPLAYER_RETURN_VAL_IF_FAIL(player && player->pipeline, MM_ERROR_PLAYER_NOT_INITIALIZED);
+ MMPLAYER_RETURN_VAL_IF_FAIL(!MMPLAYER_IS_LIVE_STREAMING(player), MM_ERROR_PLAYER_NO_OP);
+
+ if (MMPLAYER_CURRENT_STATE(player) != MM_PLAYER_STATE_PLAYING
+ && MMPLAYER_CURRENT_STATE(player) != MM_PLAYER_STATE_PAUSED)
+ goto PENDING;
+
+ if (!MMPLAYER_IS_MS_BUFF_SRC(player)) {
+ /* check duration */
+ /* NOTE : duration cannot be zero except live streaming.
+ * Since some element could have some timing problemn with quering duration, try again.
+ */
+ if (player->duration == 0) {
+ if (!gst_element_query_duration(player->pipeline->mainbin[MMPLAYER_M_PIPE].gst, GST_FORMAT_TIME, &dur_nsec)) {
+ /* For RTSP Streaming , duration is not returned in READY state. So seek to the previous position does not work properly.
+ * Added a patch to postpone the actual seek when state changes to PLAY. Sending a fake SEEK_COMPLETED event to finish the current request. */
+ if ((MMPLAYER_IS_RTSP_STREAMING(player)) && (__mmplayer_get_stream_service_type(player) == STREAMING_SERVICE_VOD)) {
+ player->pending_seek.is_pending = TRUE;
+ player->pending_seek.format = format;
+ player->pending_seek.pos = position;
+ player->seek_state = MMPLAYER_SEEK_NONE;
+ MMPLAYER_POST_MSG(player, MM_MESSAGE_SEEK_COMPLETED, NULL);
+ return MM_ERROR_NONE;
+ } else {
+ goto SEEK_ERROR;
+ }
+ }
+ player->duration = dur_nsec;
+ }
+ }
+ LOGD("playback rate: %f\n", player->playback_rate);
+
+ mm_attrs_get_int_by_name(player->attrs, "accurate_seek", &accurated);
+ if (accurated)
+ seek_flags |= GST_SEEK_FLAG_ACCURATE;
+ else
+ seek_flags |= GST_SEEK_FLAG_KEY_UNIT;
+
+ /* do seek */
+ switch (format) {
+ case MM_PLAYER_POS_FORMAT_TIME:
+ {
+ if (!MMPLAYER_IS_MS_BUFF_SRC(player)) {
+ GstQuery *query = NULL;
+ gboolean seekable = FALSE;
+
+ /* check position is valid or not */
+ if (position > player->duration)
+ goto INVALID_ARGS;
+
+ query = gst_query_new_seeking(GST_FORMAT_TIME);
+ if (gst_element_query(player->pipeline->mainbin[MMPLAYER_M_PIPE].gst, query)) {
+ gst_query_parse_seeking(query, NULL, &seekable, NULL, NULL);
+ gst_query_unref(query);
+
+ if (!seekable) {
+ LOGW("non-seekable content");
+ player->seek_state = MMPLAYER_SEEK_NONE;
+ return MM_ERROR_PLAYER_NO_OP;
+ }
+ } else {
+ LOGW("failed to get seeking query");
+ gst_query_unref(query); /* keep seeking operation */
+ }
+
+ LOGD("seeking to(%"G_GINT64_FORMAT") nsec, duration is %"G_GINT64_FORMAT" nsec\n", position, player->duration);
+
+ /* For rtspsrc stack , npt-start value coming from server is used for finding the current position.
+ But when a rtsp clip (especially from Youtube Desktop View) is paused and kept for sometime,npt-start is still increasing.
+ This causes problem is position calculation during normal pause resume scenarios also.
+ Currently during seek , we are sending the current position to rtspsrc module for position saving for later use. */
+ if ((MMPLAYER_IS_RTSP_STREAMING(player)) &&
+ (__mmplayer_get_stream_service_type(player) == STREAMING_SERVICE_VOD)) {
+ if (!gst_element_query_position(player->pipeline->mainbin[MMPLAYER_M_PIPE].gst, GST_FORMAT_TIME, &pos_nsec))
+ LOGW("getting current position failed in seek\n");
+
+ player->last_position = pos_nsec;
+ g_object_set(player->pipeline->mainbin[MMPLAYER_M_SRC].gst, "resume-position", player->last_position, NULL);
+ }
+
+ if (player->seek_state != MMPLAYER_SEEK_NONE) {
+ LOGD("not completed seek");
+ return MM_ERROR_PLAYER_DOING_SEEK;
+ }
+ }
+
+ if (!internal_called)
+ player->seek_state = MMPLAYER_SEEK_IN_PROGRESS;
+
+ if ((MMPLAYER_IS_HTTP_STREAMING(player)) && (!player->videodec_linked)) {
+ gint64 cur_time = 0;
+
+ /* get current position */
+ gst_element_query_position(player->pipeline->mainbin[MMPLAYER_M_PIPE].gst, GST_FORMAT_TIME, &cur_time);
+
+ /* flush */
+ GstEvent *event = gst_event_new_seek(1.0,
+ GST_FORMAT_TIME,
+ (GstSeekFlags)GST_SEEK_FLAG_FLUSH,
+ GST_SEEK_TYPE_SET, cur_time,
+ GST_SEEK_TYPE_NONE, GST_CLOCK_TIME_NONE);
+ if (event)
+ __mmplayer_gst_send_event_to_sink(player, event);
+
+ if (!MMPLAYER_IS_RTSP_STREAMING(player))
+ __mmplayer_gst_pause(player, FALSE);
+ }
+
+ pos_nsec = position;
+
+ /* rtsp streaming case, there is no sink after READY TO PAUSE state(no preroll state change).
+ that's why set position through property. */
+ if ((MMPLAYER_IS_RTSP_STREAMING(player)) &&
+ (MMPLAYER_CURRENT_STATE(player) == MM_PLAYER_STATE_PAUSED) &&
+ (MMPLAYER_PREV_STATE(player) == MM_PLAYER_STATE_READY) &&
+ (!player->videodec_linked) && (!player->audiodec_linked)) {
+
+ g_object_set(player->pipeline->mainbin[MMPLAYER_M_SRC].gst, "pending-start-position", pos_nsec, NULL);
+ LOGD("[%s] set position =%"GST_TIME_FORMAT,
+ GST_ELEMENT_NAME(player->pipeline->mainbin[MMPLAYER_M_SRC].gst), GST_TIME_ARGS(pos_nsec));
+ player->seek_state = MMPLAYER_SEEK_NONE;
+ MMPLAYER_POST_MSG(player, MM_MESSAGE_SEEK_COMPLETED, NULL);
+ } else {
+ ret = __mmplayer_gst_seek(player, player->pipeline->mainbin[MMPLAYER_M_PIPE].gst, player->playback_rate,
+ GST_FORMAT_TIME, seek_flags,
+ GST_SEEK_TYPE_SET, pos_nsec, GST_SEEK_TYPE_NONE, GST_CLOCK_TIME_NONE);
+ }
+
+ if (!ret) {
+ LOGE("failed to set position.");
+ goto SEEK_ERROR;
+ }
+ }
+ break;
+
+ case MM_PLAYER_POS_FORMAT_PERCENT:
+ {
+ LOGD("seeking to %"G_GINT64_FORMAT"%%", position);
+
+ if (player->seek_state != MMPLAYER_SEEK_NONE) {
+ LOGD("not completed seek");
+ return MM_ERROR_PLAYER_DOING_SEEK;
+ }
+
+ if (!internal_called)
+ player->seek_state = MMPLAYER_SEEK_IN_PROGRESS;
+
+ /* FIXIT : why don't we use 'GST_FORMAT_PERCENT' */
+ pos_nsec = (gint64)((position * player->duration) / 100);
+ ret = __mmplayer_gst_seek(player, player->pipeline->mainbin[MMPLAYER_M_PIPE].gst, player->playback_rate,
+ GST_FORMAT_TIME, seek_flags,
+ GST_SEEK_TYPE_SET, pos_nsec, GST_SEEK_TYPE_NONE, GST_CLOCK_TIME_NONE);
+ if (!ret) {
+ LOGE("failed to set position. pos[%"G_GINT64_FORMAT"] dur[%"G_GINT64_FORMAT"] ", pos_nsec, player->duration);
+ goto SEEK_ERROR;
+ }
+ }
+ break;
+
+ default:
+ goto INVALID_ARGS;
+ }
+
+ /* NOTE : store last seeking point to overcome some bad operation
+ * (returning zero when getting current position) of some elements
+ */
+ player->last_position = pos_nsec;
+
+ /* MSL should guarante playback rate when seek is selected during trick play of fast forward. */
+ if (player->playback_rate > 1.0)
+ _mmplayer_set_playspeed((MMHandleType)player, player->playback_rate, FALSE);
+
+ if ((!internal_called) &&
+ (player->streamer) && (player->streamer->buffering_state & MM_PLAYER_BUFFERING_IN_PROGRESS)) {
+ LOGD("buffering should be reset after seeking");
+ player->streamer->buffering_state = MM_PLAYER_BUFFERING_ABORT;
+ player->streamer->buffering_percent = 100; /* after seeking, new per can be non-zero. */
+ }
+
+ MMPLAYER_FLEAVE();
+ return MM_ERROR_NONE;
+
+PENDING:
+ player->pending_seek.is_pending = TRUE;
+ player->pending_seek.format = format;
+ player->pending_seek.pos = position;
+
+ LOGW("player current-state : %s, pending-state : %s, just preserve pending position(%"G_GINT64_FORMAT").\n",
+ MMPLAYER_STATE_GET_NAME(MMPLAYER_CURRENT_STATE(player)),
+ MMPLAYER_STATE_GET_NAME(MMPLAYER_PENDING_STATE(player)),
+ player->pending_seek.pos);
+
+ return MM_ERROR_NONE;
+
+INVALID_ARGS:
+ LOGE("invalid arguments, position: %"G_GINT64_FORMAT" dur : %"G_GINT64_FORMAT" format : %d \n", position, player->duration, format);
+ return MM_ERROR_INVALID_ARGUMENT;
+
+SEEK_ERROR:
+ player->seek_state = MMPLAYER_SEEK_NONE;
+ return MM_ERROR_PLAYER_SEEK;
+}
+
+int
+__mmplayer_gst_get_position(mm_player_t* player, int format, gint64* position)
+{
+#define TRICKPLAY_OFFSET GST_MSECOND
+
+ MMPlayerStateType current_state = MM_PLAYER_STATE_NONE;
+ gint64 pos_nsec = 0;
+ gboolean ret = TRUE;
+
+ MMPLAYER_RETURN_VAL_IF_FAIL(player && position && player->pipeline && player->pipeline->mainbin,
+ MM_ERROR_PLAYER_NOT_INITIALIZED);
+
+ current_state = MMPLAYER_CURRENT_STATE(player);
+
+ /* NOTE : query position except paused state to overcome some bad operation
+ * please refer to below comments in details
+ */
+ if (current_state != MM_PLAYER_STATE_PAUSED)
+ ret = gst_element_query_position(player->pipeline->mainbin[MMPLAYER_M_PIPE].gst, GST_FORMAT_TIME, &pos_nsec);
+
+ /* NOTE : get last point to overcome some bad operation of some elements
+ *(returning zero when getting current position in paused state
+ * and when failed to get postion during seeking
+ */
+ if ((current_state == MM_PLAYER_STATE_PAUSED) || (!ret)) {
+ LOGD("pos_nsec = %"GST_TIME_FORMAT" and ret = %d and state = %d", GST_TIME_ARGS(pos_nsec), ret, current_state);
+
+ if (player->playback_rate < 0.0)
+ pos_nsec = player->last_position - TRICKPLAY_OFFSET;
+ else
+ pos_nsec = player->last_position;
+
+ if (!ret)
+ pos_nsec = player->last_position;
+ else
+ player->last_position = pos_nsec;
+
+ LOGD("returning last point : %"GST_TIME_FORMAT, GST_TIME_ARGS(pos_nsec));
+
+ } else {
+ if (player->duration > 0 && pos_nsec > player->duration)
+ pos_nsec = player->duration;
+
+ player->last_position = pos_nsec;
+ }
+
+ switch (format) {
+ case MM_PLAYER_POS_FORMAT_TIME:
+ *position = pos_nsec;
+ break;
+
+ case MM_PLAYER_POS_FORMAT_PERCENT:
+ {
+ if (player->duration <= 0) {
+ LOGD("duration is [%"G_GINT64_FORMAT"], so returning position 0\n", player->duration);
+ *position = 0;
+ } else {
+ LOGD("position is [%"G_GINT64_FORMAT"] nsec , duration is [%"G_GINT64_FORMAT"] nsec", pos_nsec, player->duration);
+ *position = (gint64)(pos_nsec * 100 / player->duration);
+ }
+ break;
+ }
+ default:
+ return MM_ERROR_PLAYER_INTERNAL;
+ }
+
+ return MM_ERROR_NONE;
+}
+
+int __mmplayer_gst_get_buffer_position(mm_player_t* player, int format, unsigned long* start_pos, unsigned long* stop_pos)
+{
+#define STREAMING_IS_FINISHED 0
+#define BUFFERING_MAX_PER 100
+#define DEFAULT_PER_VALUE -1
+#define CHECK_PERCENT_VALUE(a, min, max)(((a) > (min)) ? (((a) < (max)) ? (a) : (max)) : (min))
+
+ MMPlayerGstElement *mainbin = NULL;
+ gint start_per = DEFAULT_PER_VALUE, stop_per = DEFAULT_PER_VALUE;
+ gint64 buffered_total = 0;
+ gint64 position = 0;
+ gint buffered_sec = -1;
+ GstBufferingMode mode = GST_BUFFERING_STREAM;
+ gint64 content_size_time = player->duration;
+ guint64 content_size_bytes = player->http_content_size;
+
+ MMPLAYER_RETURN_VAL_IF_FAIL(player &&
+ player->pipeline &&
+ player->pipeline->mainbin,
+ MM_ERROR_PLAYER_NOT_INITIALIZED);
+
+ MMPLAYER_RETURN_VAL_IF_FAIL(start_pos && stop_pos, MM_ERROR_INVALID_ARGUMENT);
+
+ *start_pos = 0;
+ *stop_pos = 0;
+
+ if (!MMPLAYER_IS_HTTP_STREAMING(player)) {
+ /* and rtsp is not ready yet. */
+ LOGW("it's only used for http streaming case.\n");
+ return MM_ERROR_PLAYER_NO_OP;
+ }
+
+ if (format != MM_PLAYER_POS_FORMAT_PERCENT) {
+ LOGW("Time format is not supported yet.\n");
+ return MM_ERROR_INVALID_ARGUMENT;
+ }
+
+ if (content_size_time <= 0 || content_size_bytes <= 0) {
+ LOGW("there is no content size.");
+ return MM_ERROR_NONE;
+ }
+
+ if (__mmplayer_gst_get_position(player, MM_PLAYER_POS_FORMAT_TIME, &position) != MM_ERROR_NONE) {
+ LOGW("fail to get current position.");
+ return MM_ERROR_NONE;
+ }
+
+ LOGD("pos %"G_GINT64_FORMAT" msec, dur %d sec, len %"G_GUINT64_FORMAT" bytes",
+ GST_TIME_AS_MSECONDS(position), (guint)GST_TIME_AS_SECONDS(content_size_time), content_size_bytes);
+
+ mainbin = player->pipeline->mainbin;
+ start_per = (gint)(floor(100 *(gdouble)position / (gdouble)content_size_time));
+
+ if (mainbin[MMPLAYER_M_MUXED_S_BUFFER].gst) {
+ GstQuery *query = NULL;
+ gint byte_in_rate = 0, byte_out_rate = 0;
+ gint64 estimated_total = 0;
+
+ query = gst_query_new_buffering(GST_FORMAT_BYTES);
+ if (!query || !gst_element_query(mainbin[MMPLAYER_M_MUXED_S_BUFFER].gst, query)) {
+ LOGW("fail to get buffering query from queue2");
+ if (query)
+ gst_query_unref(query);
+ return MM_ERROR_NONE;
+ }
+
+ gst_query_parse_buffering_stats(query, &mode, &byte_in_rate, &byte_out_rate, NULL);
+ LOGD("mode %d, in_rate %d, out_rate %d", mode, byte_in_rate, byte_out_rate);
+
+ if (mode == GST_BUFFERING_STREAM) {
+ /* using only queue in case of push mode(ts / mp3) */
+ if (gst_element_query_position(mainbin[MMPLAYER_M_SRC].gst,
+ GST_FORMAT_BYTES, &buffered_total)) {
+ LOGD("buffered_total %"G_GINT64_FORMAT, buffered_total);
+ stop_per = 100 * buffered_total / content_size_bytes;
+ }
+ } else {
+ /* GST_BUFFERING_TIMESHIFT or GST_BUFFERING_DOWNLOAD */
+ guint idx = 0;
+ guint num_of_ranges = 0;
+ gint64 start_byte = 0, stop_byte = 0;
+
+ gst_query_parse_buffering_range(query, NULL, NULL, NULL, &estimated_total);
+ if (estimated_total != STREAMING_IS_FINISHED) {
+ /* buffered size info from queue2 */
+ num_of_ranges = gst_query_get_n_buffering_ranges(query);
+ for (idx = 0; idx < num_of_ranges; idx++) {
+ gst_query_parse_nth_buffering_range(query, idx, &start_byte, &stop_byte);
+ LOGD("range %d, %"G_GINT64_FORMAT" ~ %"G_GUINT64_FORMAT, idx, start_byte, stop_byte);
+
+ buffered_total += (stop_byte - start_byte);
+ }
+ } else
+ stop_per = BUFFERING_MAX_PER;
+ }
+ gst_query_unref(query);
+ }
+
+ if (stop_per == DEFAULT_PER_VALUE) {
+ guint dur_sec = (guint)(content_size_time/GST_SECOND);
+ if (dur_sec > 0) {
+ guint avg_byterate = (guint)(content_size_bytes/dur_sec);
+
+ /* buffered size info from multiqueue */
+ if (mainbin[MMPLAYER_M_DEMUXED_S_BUFFER].gst) {
+ guint curr_size_bytes = 0;
+ g_object_get(G_OBJECT(mainbin[MMPLAYER_M_DEMUXED_S_BUFFER].gst),
+ "curr-size-bytes", &curr_size_bytes, NULL);
+ LOGD("curr_size_bytes of multiqueue = %d", curr_size_bytes);
+ buffered_total += curr_size_bytes;
+ }
+
+ if (avg_byterate > 0)
+ buffered_sec = (gint)(ceil((gdouble)buffered_total/(gdouble)avg_byterate));
+ else if (player->total_maximum_bitrate > 0)
+ buffered_sec = (gint)(ceil((gdouble)GET_BIT_FROM_BYTE(buffered_total)/(gdouble)player->total_maximum_bitrate));
+ else if (player->total_bitrate > 0)
+ buffered_sec = (gint)(ceil((gdouble)GET_BIT_FROM_BYTE(buffered_total)/(gdouble)player->total_bitrate));
+
+ if (buffered_sec >= 0)
+ stop_per = start_per +(gint)(ceil)(100*(gdouble)buffered_sec/(gdouble)dur_sec);
+ }
+ }
+
+ *start_pos = CHECK_PERCENT_VALUE(start_per, 0, 100);
+ *stop_pos = CHECK_PERCENT_VALUE(stop_per, *start_pos, 100);
+
+ LOGD("buffered info: %"G_GINT64_FORMAT" bytes, %d sec, per %lu~%lu\n",
+ buffered_total, buffered_sec, *start_pos, *stop_pos);
+
+ return MM_ERROR_NONE;
+}
+
#include "mm_player_utils.h"
#include "mm_player_tracks.h"
#include "mm_player_360.h"
+#include "mm_player_gst.h"
#include <system_info.h>
#include <sound_manager.h>
static void __mmplayer_gst_rtp_no_more_pads(GstElement *element, gpointer data);
static void __mmplayer_gst_rtp_dynamic_pad(GstElement *element, GstPad *pad, gpointer data);
-static MMStreamingType __mmplayer_get_stream_service_type(mm_player_t* player);
static gboolean __mmplayer_update_subtitle(GstElement* object, GstBuffer *buffer, GstPad *pad, gpointer data);
static void __mmplayer_release_misc(mm_player_t* player);
static void __mmplayer_release_misc_post(mm_player_t* player);
static gboolean __mmplayer_init_gstreamer(mm_player_t* player);
-static GstBusSyncReply __mmplayer_bus_sync_callback(GstBus * bus, GstMessage * message, gpointer data);
-static void __mmplayer_gst_callback(GstMessage *msg, gpointer data);
-static gboolean __mmplayer_gst_extract_tag_from_msg(mm_player_t* player, GstMessage *msg);
-static gboolean __mmplayer_gst_handle_duration(mm_player_t* player, GstMessage* msg);
static gboolean __mmplayer_gst_remove_fakesink(mm_player_t* player, MMPlayerGstElement* fakesink);
static GstPadProbeReturn __mmplayer_audio_stream_probe(GstPad *pad, GstPadProbeInfo *info, gpointer u_data);
static void __mmplayer_video_stream_decoded_preroll_cb(GstElement* object, GstBuffer *buffer, GstPad *pad, gpointer data);
static int __mmplayer_change_selector_pad(mm_player_t* player, MMPlayerTrackType type, int index);
static gboolean __mmplayer_check_subtitle(mm_player_t* player);
-static gboolean __mmplayer_handle_streaming_error(mm_player_t* player, GstMessage * message);
-static void __mmplayer_handle_eos_delay(mm_player_t* player, int delay_in_ms);
-static void __mmplayer_cancel_eos_timer(mm_player_t* player);
-static gboolean __mmplayer_eos_timer_cb(gpointer u_data);
static int __mmplayer_handle_missed_plugin(mm_player_t* player);
static int __mmplayer_check_not_supported_codec(mm_player_t* player, const gchar* factory_class, const gchar* mime);
-static gboolean __mmplayer_configure_audio_callback(mm_player_t* player);
static void __mmplayer_add_sink(mm_player_t* player, GstElement* sink);
static void __mmplayer_del_sink(mm_player_t* player, GstElement* sink);
static void __mmplayer_release_signal_connection(mm_player_t* player, MMPlayerSignalType type);
static gpointer __mmplayer_next_play_thread(gpointer data);
-static gboolean _mmplayer_update_content_attrs(mm_player_t* player, enum content_attr_flag flag);
-
static gboolean __mmplayer_add_dump_buffer_probe(mm_player_t *player, GstElement *element);
static GstPadProbeReturn __mmplayer_dump_buffer_probe_cb(GstPad *pad, GstPadProbeInfo *info, gpointer u_data);
static void __mmplayer_release_dump_list(GList *dump_list);
-
static int __gst_realize(mm_player_t* player);
static int __gst_unrealize(mm_player_t* player);
-static int __gst_start(mm_player_t* player);
-static int __gst_stop(mm_player_t* player);
-static int __gst_pause(mm_player_t* player, gboolean async);
-static int __gst_resume(mm_player_t* player, gboolean async);
-static gboolean __gst_seek(mm_player_t* player, GstElement * element, gdouble rate,
- GstFormat format, GstSeekFlags flags, GstSeekType cur_type,
- gint64 cur, GstSeekType stop_type, gint64 stop);
-static int __gst_pending_seek(mm_player_t* player);
-
-static int __gst_set_position(mm_player_t* player, int format, gint64 position, gboolean internal_called);
-static int __gst_get_position(mm_player_t* player, int format, gint64 *position);
-static int __gst_get_buffer_position(mm_player_t* player, int format, unsigned long* start_pos, unsigned long* stop_pos);
static int __gst_adjust_subtitle_position(mm_player_t* player, int format, int position);
static int __gst_set_message_callback(mm_player_t* player, MMMessageCallback callback, gpointer user_param);
-
-static gboolean __gst_send_event_to_sink(mm_player_t* player, GstEvent* event);
-
static gboolean __mmplayer_can_extract_pcm(mm_player_t* player);
/* util */
-static gboolean __is_ms_buff_src(mm_player_t* player);
-static gboolean __has_suffix(mm_player_t * player, const gchar * suffix);
-
static int __mmplayer_realize_streaming_ext(mm_player_t* player);
static int __mmplayer_unrealize_streaming_ext(mm_player_t *player);
static int __mmplayer_start_streaming_ext(mm_player_t *player);
static int __mmplayer_destroy_streaming_ext(mm_player_t* player);
static int __mmplayer_do_change_videosink(mm_player_t* player, const int dec_index, const char *videosink_element, MMDisplaySurfaceType surface_type, void *display_overlay);
-
static gboolean __mmplayer_verify_next_play_path(mm_player_t *player);
static void __mmplayer_activate_next_source(mm_player_t *player, GstState target);
static void __mmplayer_check_pipeline(mm_player_t* player);
static gboolean __mmplayer_deactivate_selector(mm_player_t *player, MMPlayerTrackType type);
static void __mmplayer_deactivate_old_path(mm_player_t *player);
-
-static void __mmplayer_update_buffer_setting(mm_player_t *player, GstMessage *buffering_msg);
static GstElement *__mmplayer_element_create_and_link(mm_player_t *player, GstPad* pad, const char* name);
-
static int __mmplayer_gst_create_plain_text_elements(mm_player_t* player);
static guint32 _mmplayer_convert_fourcc_string_to_value(const gchar* format_name);
static void __gst_appsrc_feed_audio_data(GstElement *element, guint size, gpointer user_data);
static gboolean __gst_seek_video_data(GstElement * appsrc, guint64 position, gpointer user_data);
static gboolean __gst_seek_subtitle_data(GstElement * appsrc, guint64 position, gpointer user_data);
static void __mmplayer_gst_caps_notify_cb(GstPad * pad, GParamSpec * unused, gpointer data);
-static void __mmplayer_audio_stream_clear_buffer(mm_player_t* player, gboolean send_all);
static void __mmplayer_audio_stream_send_data(mm_player_t* player, mm_player_audio_stream_buff_t *a_buffer);
static void __mmplayer_initialize_storage_info(mm_player_t* player, MMPlayerPathType path_type);
-static void __mmplayer_get_metadata_360_from_tags(GstTagList *tags, mm_player_spherical_metadata_t *metadata);
static int __resource_release_cb(mm_resource_manager_h rm, mm_resource_manager_res_h res, void *user_data);
-static void __mmplayer_gst_handle_async(mm_player_t* player, gboolean async, enum MMPlayerSinkType type);
/*===========================================================================================
| |
/* This function should be called after the pipeline goes PAUSED or higher
state. */
gboolean
-_mmplayer_update_content_attrs(mm_player_t* player, enum content_attr_flag flag)
+__mmplayer_update_content_attrs(mm_player_t* player, enum content_attr_flag flag)
{
static gboolean has_duration = FALSE;
static gboolean has_video_attrs = FALSE;
return TRUE;
}
-static MMStreamingType __mmplayer_get_stream_service_type(mm_player_t* player)
+MMStreamingType __mmplayer_get_stream_service_type(mm_player_t* player)
{
MMStreamingType streaming_type = STREAMING_SERVICE_NONE;
/* rtsp case, get content attrs by GstMessage */
if (!MMPLAYER_IS_RTSP_STREAMING(player)) {
/* it's first time to update all content attrs. */
- _mmplayer_update_content_attrs(player, ATTR_ALL);
+ __mmplayer_update_content_attrs(player, ATTR_ALL);
}
}
/* NOTE : giving ATTR_MISSING_ONLY may have dependency with
* c-api since c-api doesn't use _start() anymore. It may not work propery with
* legacy mmfw-player api */
- _mmplayer_update_content_attrs(player, ATTR_MISSING_ONLY);
+ __mmplayer_update_content_attrs(player, ATTR_MISSING_ONLY);
}
if ((player->cmd == MMPLAYER_COMMAND_START) || (player->cmd == MMPLAYER_COMMAND_RESUME)) {
return;
}
-static gpointer __mmplayer_next_play_thread(gpointer data)
+int
+__mmplayer_check_state(mm_player_t* player, enum PlayerCommandState command)
{
- mm_player_t* player = (mm_player_t*) data;
- MMPlayerGstElement *mainbin = NULL;
+ MMPlayerStateType current_state = MM_PLAYER_STATE_NUM;
+ MMPlayerStateType pending_state = MM_PLAYER_STATE_NUM;
- MMPLAYER_RETURN_VAL_IF_FAIL(player, NULL);
-
- MMPLAYER_NEXT_PLAY_THREAD_LOCK(player);
- while (!player->next_play_thread_exit) {
- LOGD("next play thread started. waiting for signal.\n");
- MMPLAYER_NEXT_PLAY_THREAD_WAIT(player);
+ MMPLAYER_RETURN_VAL_IF_FAIL(player, MM_ERROR_PLAYER_NOT_INITIALIZED);
- LOGD("reconfigure pipeline for gapless play.\n");
+ //LOGD("incomming command : %d \n", command);
- if (player->next_play_thread_exit) {
- if (player->gapless.reconfigure) {
- player->gapless.reconfigure = false;
- MMPLAYER_PLAYBACK_UNLOCK(player);
- }
- LOGD("exiting gapless play thread\n");
- break;
- }
+ current_state = MMPLAYER_CURRENT_STATE(player);
+ pending_state = MMPLAYER_PENDING_STATE(player);
- mainbin = player->pipeline->mainbin;
+ MMPLAYER_PRINT_STATE(player);
- MMPLAYER_RELEASE_ELEMENT(player, mainbin, MMPLAYER_M_MUXED_S_BUFFER);
- MMPLAYER_RELEASE_ELEMENT(player, mainbin, MMPLAYER_M_ID3DEMUX);
- MMPLAYER_RELEASE_ELEMENT(player, mainbin, MMPLAYER_M_AUTOPLUG);
- MMPLAYER_RELEASE_ELEMENT(player, mainbin, MMPLAYER_M_TYPEFIND);
- MMPLAYER_RELEASE_ELEMENT(player, mainbin, MMPLAYER_M_SRC);
+ switch (command) {
+ case MMPLAYER_COMMAND_CREATE:
+ {
+ MMPLAYER_TARGET_STATE(player) = MM_PLAYER_STATE_NULL;
- __mmplayer_activate_next_source(player, GST_STATE_PLAYING);
+ if (current_state == MM_PLAYER_STATE_NULL ||
+ current_state == MM_PLAYER_STATE_READY ||
+ current_state == MM_PLAYER_STATE_PAUSED ||
+ current_state == MM_PLAYER_STATE_PLAYING)
+ goto NO_OP;
}
- MMPLAYER_NEXT_PLAY_THREAD_UNLOCK(player);
+ break;
- return NULL;
-}
+ case MMPLAYER_COMMAND_DESTROY:
+ {
+ /* destroy can called anytime */
-static void
-__mmplayer_update_buffer_setting(mm_player_t *player, GstMessage *buffering_msg)
-{
- MMHandleType attrs = 0;
- guint64 data_size = 0;
- gchar* path = NULL;
- gint64 pos_nsec = 0;
- struct stat sb;
+ MMPLAYER_TARGET_STATE(player) = MM_PLAYER_STATE_NONE;
+ }
+ break;
- MMPLAYER_RETURN_IF_FAIL(player && player->pipeline && player->pipeline->mainbin);
+ case MMPLAYER_COMMAND_REALIZE:
+ {
+ MMPLAYER_TARGET_STATE(player) = MM_PLAYER_STATE_READY;
- __gst_get_position(player, MM_PLAYER_POS_FORMAT_TIME, &pos_nsec); /* to update player->last_position */
+ if (pending_state != MM_PLAYER_STATE_NONE) {
+ goto INVALID_STATE;
+ } else {
+ /* need ready state to realize */
+ if (current_state == MM_PLAYER_STATE_READY)
+ goto NO_OP;
- attrs = MMPLAYER_GET_ATTRS(player);
- if (!attrs) {
- LOGE("fail to get attributes.\n");
- return;
+ if (current_state != MM_PLAYER_STATE_NULL)
+ goto INVALID_STATE;
+ }
}
+ break;
- if (!MMPLAYER_IS_STREAMING(player) && (player->can_support_codec & FOUND_PLUGIN_VIDEO)) {
- mm_attrs_get_string_by_name(attrs, "profile_uri", &path);
+ case MMPLAYER_COMMAND_UNREALIZE:
+ {
+ MMPLAYER_TARGET_STATE(player) = MM_PLAYER_STATE_NULL;
+
+ if (current_state == MM_PLAYER_STATE_NULL)
+ goto NO_OP;
+ }
+ break;
- if (stat(path, &sb) == 0)
- data_size = (guint64)sb.st_size;
- } else if (MMPLAYER_IS_HTTP_STREAMING(player))
- data_size = player->http_content_size;
+ case MMPLAYER_COMMAND_START:
+ {
+ MMPLAYER_TARGET_STATE(player) = MM_PLAYER_STATE_PLAYING;
- __mm_player_streaming_buffering(player->streamer, buffering_msg, data_size, player->last_position, player->duration);
- __mm_player_streaming_sync_property(player->streamer, player->pipeline->mainbin[MMPLAYER_M_AUTOPLUG].gst);
+ if (pending_state == MM_PLAYER_STATE_NONE) {
+ if (current_state == MM_PLAYER_STATE_PLAYING)
+ goto NO_OP;
+ else if (current_state != MM_PLAYER_STATE_READY &&
+ current_state != MM_PLAYER_STATE_PAUSED)
+ goto INVALID_STATE;
+ } else if (pending_state == MM_PLAYER_STATE_PLAYING) {
+ goto ALREADY_GOING;
+ } else if (pending_state == MM_PLAYER_STATE_PAUSED) {
+ LOGD("player is going to paused state, just change the pending state as playing");
+ } else
+ goto INVALID_STATE;
+ }
+ break;
- return;
-}
+ case MMPLAYER_COMMAND_STOP:
+ {
+ MMPLAYER_TARGET_STATE(player) = MM_PLAYER_STATE_READY;
-static int
-__mmplayer_handle_buffering_message(mm_player_t* player)
-{
- int ret = MM_ERROR_NONE;
- MMPlayerStateType prev_state = MM_PLAYER_STATE_NONE;
- MMPlayerStateType current_state = MM_PLAYER_STATE_NONE;
- MMPlayerStateType target_state = MM_PLAYER_STATE_NONE;
- MMPlayerStateType pending_state = MM_PLAYER_STATE_NONE;
+ if (current_state == MM_PLAYER_STATE_READY)
+ goto NO_OP;
- if (!player || !player->streamer || (MMPLAYER_IS_LIVE_STREAMING(player) && MMPLAYER_IS_RTSP_STREAMING(player))) {
- LOGW("do nothing for buffering msg\n");
- ret = MM_ERROR_PLAYER_INVALID_STATE;
- goto exit;
+ /* need playing/paused state to stop */
+ if (current_state != MM_PLAYER_STATE_PLAYING &&
+ current_state != MM_PLAYER_STATE_PAUSED)
+ goto INVALID_STATE;
}
+ break;
- prev_state = MMPLAYER_PREV_STATE(player);
- current_state = MMPLAYER_CURRENT_STATE(player);
- target_state = MMPLAYER_TARGET_STATE(player);
- pending_state = MMPLAYER_PENDING_STATE(player);
+ case MMPLAYER_COMMAND_PAUSE:
+ {
+ if (MMPLAYER_IS_LIVE_STREAMING(player))
+ goto NO_OP;
- LOGD("player state : prev %s, current %s, pending %s, target %s, buffering state 0x%X",
- MMPLAYER_STATE_GET_NAME(prev_state),
- MMPLAYER_STATE_GET_NAME(current_state),
- MMPLAYER_STATE_GET_NAME(pending_state),
- MMPLAYER_STATE_GET_NAME(target_state),
- player->streamer->buffering_state);
-
- if (!(player->streamer->buffering_state & MM_PLAYER_BUFFERING_IN_PROGRESS)) {
- /* NOTE : if buffering has done, player has to go to target state. */
- switch (target_state) {
- case MM_PLAYER_STATE_PAUSED:
- {
- switch (pending_state) {
- case MM_PLAYER_STATE_PLAYING:
- __gst_pause(player, TRUE);
- break;
+ if (player->seek_state == MMPLAYER_SEEK_IN_PROGRESS)
+ goto NOT_COMPLETED_SEEK;
- case MM_PLAYER_STATE_PAUSED:
- LOGD("player is already going to paused state, there is nothing to do.\n");
- break;
+ MMPLAYER_TARGET_STATE(player) = MM_PLAYER_STATE_PAUSED;
- case MM_PLAYER_STATE_NONE:
- case MM_PLAYER_STATE_NULL:
- case MM_PLAYER_STATE_READY:
- default:
- LOGW("invalid pending state [%s].\n", MMPLAYER_STATE_GET_NAME(pending_state));
- break;
- }
- }
- break;
+ if (pending_state == MM_PLAYER_STATE_NONE) {
+ if (current_state == MM_PLAYER_STATE_PAUSED)
+ goto NO_OP;
+ else if (current_state != MM_PLAYER_STATE_PLAYING && current_state != MM_PLAYER_STATE_READY) // support loading state of broswer
+ goto INVALID_STATE;
+ } else if (pending_state == MM_PLAYER_STATE_PAUSED) {
+ goto ALREADY_GOING;
+ } else if (pending_state == MM_PLAYER_STATE_PLAYING) {
+ if (current_state == MM_PLAYER_STATE_PAUSED)
+ LOGD("player is PAUSED going to PLAYING, just change the pending state as PAUSED");
+ else
+ goto INVALID_STATE;
+ }
+ }
+ break;
- case MM_PLAYER_STATE_PLAYING:
- {
- switch (pending_state) {
- case MM_PLAYER_STATE_NONE:
- {
- if (current_state != MM_PLAYER_STATE_PLAYING)
- __gst_resume(player, TRUE);
- }
- break;
+ case MMPLAYER_COMMAND_RESUME:
+ {
+ if (player->seek_state == MMPLAYER_SEEK_IN_PROGRESS)
+ goto NOT_COMPLETED_SEEK;
- case MM_PLAYER_STATE_PAUSED:
- /* NOTE: It should be worked as asynchronously.
- * Because, buffering can be completed during autoplugging when pipeline would try to go playing state directly.
- */
- if (current_state == MM_PLAYER_STATE_PLAYING) {
- /* NOTE: If the current state is PLAYING, it means, async __gst_pause() is not completed yet.
- * The current state should be changed to paused purposely to prevent state conflict.
- */
- MMPLAYER_SET_STATE(player, MM_PLAYER_STATE_PAUSED);
- }
- __gst_resume(player, TRUE);
- break;
+ MMPLAYER_TARGET_STATE(player) = MM_PLAYER_STATE_PLAYING;
- case MM_PLAYER_STATE_PLAYING:
- LOGD("player is already going to playing state, there is nothing to do.\n");
- break;
+ if (pending_state == MM_PLAYER_STATE_NONE) {
+ if (current_state == MM_PLAYER_STATE_PLAYING)
+ goto NO_OP;
+ else if (current_state != MM_PLAYER_STATE_PAUSED)
+ goto INVALID_STATE;
+ } else if (pending_state == MM_PLAYER_STATE_PLAYING) {
+ goto ALREADY_GOING;
+ } else if (pending_state == MM_PLAYER_STATE_PAUSED) {
+ LOGD("player is going to paused state, just change the pending state as playing");
+ } else
+ goto INVALID_STATE;
+ }
+ break;
- case MM_PLAYER_STATE_NULL:
- case MM_PLAYER_STATE_READY:
- default:
- LOGW("invalid pending state [%s].\n", MMPLAYER_STATE_GET_NAME(pending_state));
- break;
- }
- }
- break;
+ default:
+ break;
+ }
+ player->cmd = command;
- case MM_PLAYER_STATE_NULL:
- case MM_PLAYER_STATE_READY:
- case MM_PLAYER_STATE_NONE:
- default:
- LOGW("invalid target state [%s].\n", MMPLAYER_STATE_GET_NAME(target_state));
- break;
- }
- } else {
- /* NOTE : during the buffering, pause the player for stopping pipeline clock.
- * it's for stopping the pipeline clock to prevent dropping the data in sink element.
- */
- switch (pending_state) {
- case MM_PLAYER_STATE_NONE:
- {
- if (current_state != MM_PLAYER_STATE_PAUSED) {
- /* rtsp streaming pause makes rtsp server stop sending data. */
- if (!MMPLAYER_IS_RTSP_STREAMING(player)) {
- LOGD("set pause state during buffering\n");
- __gst_pause(player, TRUE);
- }
- }
- }
- break;
+ return MM_ERROR_NONE;
- case MM_PLAYER_STATE_PLAYING:
- /* rtsp streaming pause makes rtsp server stop sending data. */
- if (!MMPLAYER_IS_RTSP_STREAMING(player))
- __gst_pause(player, TRUE);
- break;
+INVALID_STATE:
+ LOGW("since player is in wrong state(%s). it's not able to apply the command(%d)",
+ MMPLAYER_STATE_GET_NAME(current_state), command);
+ return MM_ERROR_PLAYER_INVALID_STATE;
- case MM_PLAYER_STATE_PAUSED:
- break;
+NOT_COMPLETED_SEEK:
+ LOGW("not completed seek");
+ return MM_ERROR_PLAYER_DOING_SEEK;
- case MM_PLAYER_STATE_NULL:
- case MM_PLAYER_STATE_READY:
- default:
- LOGW("invalid pending state [%s].\n", MMPLAYER_STATE_GET_NAME(pending_state));
- break;
- }
- }
+NO_OP:
+ LOGW("player is in the desired state(%s). doing noting", MMPLAYER_STATE_GET_NAME(current_state));
+ return MM_ERROR_PLAYER_NO_OP;
-exit:
- return ret;
+ALREADY_GOING:
+ LOGW("player is already going to %s, doing nothing", MMPLAYER_STATE_GET_NAME(pending_state));
+ return MM_ERROR_PLAYER_NO_OP;
}
-static void
-__mmplayer_drop_subtitle(mm_player_t* player, gboolean is_drop)
+static gpointer __mmplayer_next_play_thread(gpointer data)
{
- MMPlayerGstElement *textbin;
- MMPLAYER_FENTER();
-
- MMPLAYER_RETURN_IF_FAIL(player &&
- player->pipeline &&
- player->pipeline->textbin);
-
- MMPLAYER_RETURN_IF_FAIL(player->pipeline->textbin[MMPLAYER_T_IDENTITY].gst);
+ mm_player_t* player = (mm_player_t*) data;
+ MMPlayerGstElement *mainbin = NULL;
- textbin = player->pipeline->textbin;
+ MMPLAYER_RETURN_VAL_IF_FAIL(player, NULL);
- if (is_drop) {
- LOGD("Drop subtitle text after getting EOS\n");
+ MMPLAYER_NEXT_PLAY_THREAD_LOCK(player);
+ while (!player->next_play_thread_exit) {
+ LOGD("next play thread started. waiting for signal.\n");
+ MMPLAYER_NEXT_PLAY_THREAD_WAIT(player);
- __mmplayer_gst_handle_async(player, FALSE, MMPLAYER_TEXT_SINK);
- g_object_set(textbin[MMPLAYER_T_IDENTITY].gst, "drop-probability", (gfloat)1.0, NULL);
+ LOGD("reconfigure pipeline for gapless play.\n");
- player->is_subtitle_force_drop = TRUE;
- } else {
- if (player->is_subtitle_force_drop == TRUE) {
- LOGD("Enable subtitle data path without drop\n");
+ if (player->next_play_thread_exit) {
+ if (player->gapless.reconfigure) {
+ player->gapless.reconfigure = false;
+ MMPLAYER_PLAYBACK_UNLOCK(player);
+ }
+ LOGD("exiting gapless play thread\n");
+ break;
+ }
- g_object_set(textbin[MMPLAYER_T_IDENTITY].gst, "drop-probability", (gfloat)0.0, NULL);
- __mmplayer_gst_handle_async(player, TRUE, MMPLAYER_TEXT_SINK);
+ mainbin = player->pipeline->mainbin;
- LOGD("non-connected with external display");
+ MMPLAYER_RELEASE_ELEMENT(player, mainbin, MMPLAYER_M_MUXED_S_BUFFER);
+ MMPLAYER_RELEASE_ELEMENT(player, mainbin, MMPLAYER_M_ID3DEMUX);
+ MMPLAYER_RELEASE_ELEMENT(player, mainbin, MMPLAYER_M_AUTOPLUG);
+ MMPLAYER_RELEASE_ELEMENT(player, mainbin, MMPLAYER_M_TYPEFIND);
+ MMPLAYER_RELEASE_ELEMENT(player, mainbin, MMPLAYER_M_SRC);
- player->is_subtitle_force_drop = FALSE;
- }
+ __mmplayer_activate_next_source(player, GST_STATE_PLAYING);
}
+ MMPLAYER_NEXT_PLAY_THREAD_UNLOCK(player);
+
+ return NULL;
}
-static VariantData *
-__mmplayer_adaptive_var_info(const VariantData *self, gpointer user_data)
+static void
+__mmplayer_remove_g_source_from_context(GMainContext *context, guint source_id)
{
- VariantData *var_info = NULL;
- g_return_val_if_fail(self != NULL, NULL);
+ GSource *source = NULL;
+
+ MMPLAYER_FENTER();
+
+ source = g_main_context_find_source_by_id(context, source_id);
+
+ if (source != NULL) {
+ LOGW("context: %p, source id: %d, source: %p", context, source_id, source);
+ g_source_destroy(source);
+ }
- var_info = g_new0(VariantData, 1);
- if (!var_info) return NULL;
- var_info->bandwidth = self->bandwidth;
- var_info->width = self->width;
- var_info->height = self->height;
- return var_info;
+ MMPLAYER_FLEAVE();
}
-void _mmplayer_bus_msg_thread_destroy(MMHandleType hplayer)
+void __mmplayer_bus_msg_thread_destroy(MMHandleType hplayer)
{
mm_player_t* player = (mm_player_t*)hplayer;
GstMessage *msg = NULL;
}
static void
-__mmplayer_gst_callback(GstMessage *msg, gpointer data)
+__mmplayer_gst_rtp_no_more_pads(GstElement *element, gpointer data)
{
- mm_player_t* player = (mm_player_t*)(data);
+ mm_player_t* player = (mm_player_t*) data;
- MMPLAYER_RETURN_IF_FAIL(player);
- MMPLAYER_RETURN_IF_FAIL(msg && GST_IS_MESSAGE(msg));
+ MMPLAYER_FENTER();
- switch (GST_MESSAGE_TYPE(msg)) {
- case GST_MESSAGE_UNKNOWN:
- LOGD("unknown message received\n");
- break;
+ /* NOTE : we can remove fakesink here if there's no rtp_dynamic_pad. because whenever
+ * we connect autoplugging element to the pad which is just added to rtspsrc, we increase
+ * num_dynamic_pad. and this is no-more-pad situation which means no more pad will be added.
+ * So we can say this. if num_dynamic_pad is zero, it must be one of followings
- case GST_MESSAGE_EOS:
- {
- MMHandleType attrs = 0;
- gint count = 0;
+ * [1] audio and video will be dumped with filesink.
+ * [2] autoplugging is done by just using pad caps.
+ * [3] typefinding has happend in audio but audiosink is created already before no-more-pad signal
+ * and the video will be dumped via filesink.
+ */
+ if (player->num_dynamic_pad == 0) {
+ LOGD("it seems pad caps is directely used for autoplugging. removing fakesink now\n");
- LOGD("GST_MESSAGE_EOS received\n");
+ if (!__mmplayer_gst_remove_fakesink(player,
+ &player->pipeline->mainbin[MMPLAYER_M_SRC_FAKESINK]))
+ /* NOTE : __mmplayer_pipeline_complete() can be called several time. because
+ * signaling mechanism(pad-added, no-more-pad, new-decoded-pad) from various
+ * source element are not same. To overcome this situation, this function will called
+ * several places and several times. Therefore, this is not an error case.
+ */
+ return;
+ }
- /* NOTE : EOS event is comming multiple time. watch out it */
- /* check state. we only process EOS when pipeline state goes to PLAYING */
- if (!(player->cmd == MMPLAYER_COMMAND_START || player->cmd == MMPLAYER_COMMAND_RESUME)) {
- LOGD("EOS received on non-playing state. ignoring it\n");
- break;
- }
+ /* create dot before error-return. for debugging */
+ MMPLAYER_GENERATE_DOT_IF_ENABLED(player, "pipeline-no-more-pad");
- if (player->pipeline) {
- if (player->pipeline->textbin)
- __mmplayer_drop_subtitle(player, TRUE);
+ player->no_more_pad = TRUE;
- if ((player->audio_stream_cb) && (player->set_mode.pcm_extraction) && (!player->audio_stream_render_cb_ex)) {
- GstPad *pad = NULL;
+ MMPLAYER_FLEAVE();
+}
- pad = gst_element_get_static_pad(player->pipeline->audiobin[MMPLAYER_A_SINK].gst, "sink");
+static gboolean
+__mmplayer_gst_remove_fakesink(mm_player_t* player, MMPlayerGstElement* fakesink)
+{
+ GstElement* parent = NULL;
- LOGD("release audio callback\n");
+ MMPLAYER_RETURN_VAL_IF_FAIL(player && player->pipeline, FALSE);
- /* release audio callback */
- gst_pad_remove_probe(pad, player->audio_cb_probe_id);
- player->audio_cb_probe_id = 0;
- /* audio callback should be free because it can be called even though probe remove.*/
- player->audio_stream_cb = NULL;
- player->audio_stream_cb_user_param = NULL;
+ /* if we have no fakesink. this meas we are using decodebin which doesn'
+ t need to add extra fakesink */
+ MMPLAYER_RETURN_VAL_IF_FAIL(fakesink, TRUE);
- }
- }
- if ((player->audio_stream_render_cb_ex) && (!player->audio_stream_sink_sync))
- __mmplayer_audio_stream_clear_buffer(player, TRUE);
+ /* lock */
+ MMPLAYER_FSINK_LOCK(player);
- /* rewind if repeat count is greater then zero */
- /* get play count */
- attrs = MMPLAYER_GET_ATTRS(player);
+ if (!fakesink->gst)
+ goto ERROR;
- if (attrs) {
- mm_attrs_get_int_by_name(attrs, "profile_play_count", &count);
+ /* get parent of fakesink */
+ parent = (GstElement*)gst_object_get_parent((GstObject*)fakesink->gst);
+ if (!parent) {
+ LOGD("fakesink already removed\n");
+ goto ERROR;
+ }
- LOGD("play count: %d, playback rate: %f\n", count, player->playback_rate);
+ gst_element_set_locked_state(fakesink->gst, TRUE);
- if (count == -1 || player->playback_rate < 0.0) /* default value is 1 */ {
- if (player->playback_rate < 0.0) {
- player->resumed_by_rewind = TRUE;
- _mmplayer_set_mute((MMHandleType)player, 0);
- MMPLAYER_POST_MSG(player, MM_MESSAGE_RESUMED_BY_REW, NULL);
- }
+ /* setting the state to NULL never returns async
+ * so no need to wait for completion of state transiton
+ */
+ if (GST_STATE_CHANGE_FAILURE == gst_element_set_state(fakesink->gst, GST_STATE_NULL))
+ LOGE("fakesink state change failure!\n");
+ /* FIXIT : should I return here? or try to proceed to next? */
+ /* return FALSE; */
- __mmplayer_handle_eos_delay(player, player->ini.delay_before_repeat);
+ /* remove fakesink from it's parent */
+ if (!gst_bin_remove(GST_BIN(parent), fakesink->gst)) {
+ LOGE("failed to remove fakesink\n");
- /* initialize */
- player->sent_bos = FALSE;
+ gst_object_unref(parent);
- /* not posting eos when repeating */
- break;
- }
- }
+ goto ERROR;
+ }
- if (player->pipeline)
- MMPLAYER_GENERATE_DOT_IF_ENABLED(player, "pipeline-status-eos");
+ gst_object_unref(parent);
- /* post eos message to application */
- __mmplayer_handle_eos_delay(player, player->ini.eos_delay);
+ LOGD("state-holder removed\n");
- /* reset last position */
- player->last_position = 0;
- }
- break;
+ gst_element_set_locked_state(fakesink->gst, FALSE);
- case GST_MESSAGE_ERROR:
- {
- GError *error = NULL;
- gchar* debug = NULL;
+ MMPLAYER_FSINK_UNLOCK(player);
+ return TRUE;
- /* generating debug info before returning error */
- MMPLAYER_GENERATE_DOT_IF_ENABLED(player, "pipeline-status-error");
+ERROR:
+ if (fakesink->gst)
+ gst_element_set_locked_state(fakesink->gst, FALSE);
- /* get error code */
- gst_message_parse_error(msg, &error, &debug);
+ MMPLAYER_FSINK_UNLOCK(player);
+ return FALSE;
+}
- if (gst_structure_has_name(gst_message_get_structure(msg), "streaming_error")) {
- /* Note : the streaming error from the streaming source is handled
- * using __mmplayer_handle_streaming_error.
- */
- __mmplayer_handle_streaming_error(player, msg);
- /* dump state of all element */
- __mmplayer_dump_pipeline_state(player);
- } else {
- /* traslate gst error code to msl error code. then post it
- * to application if needed
- */
- __mmplayer_handle_gst_error(player, msg, error);
-
- if (debug)
- LOGE("error debug : %s", debug);
- }
-
- if (MMPLAYER_IS_HTTP_PD(player))
- _mmplayer_unrealize_pd_downloader((MMHandleType)player);
-
- MMPLAYER_FREEIF(debug);
- g_error_free(error);
- }
- break;
-
- case GST_MESSAGE_WARNING:
- {
- char* debug = NULL;
- GError* error = NULL;
+static void
+__mmplayer_gst_rtp_dynamic_pad(GstElement *element, GstPad *pad, gpointer data)
+{
+ GstPad *sinkpad = NULL;
+ GstCaps* caps = NULL;
+ GstElement* new_element = NULL;
+ GstStructure* str = NULL;
+ const gchar* name = NULL;
- gst_message_parse_warning(msg, &error, &debug);
+ mm_player_t* player = (mm_player_t*) data;
- LOGD("warning : %s\n", error->message);
- LOGD("debug : %s\n", debug);
+ MMPLAYER_FENTER();
- MMPLAYER_POST_MSG(player, MM_MESSAGE_WARNING, NULL);
+ MMPLAYER_RETURN_IF_FAIL(element && pad);
+ MMPLAYER_RETURN_IF_FAIL(player &&
+ player->pipeline &&
+ player->pipeline->mainbin);
- MMPLAYER_FREEIF(debug);
- g_error_free(error);
- }
- break;
- case GST_MESSAGE_TAG:
- {
- LOGD("GST_MESSAGE_TAG\n");
- if (!__mmplayer_gst_extract_tag_from_msg(player, msg))
- LOGW("failed to extract tags from gstmessage\n");
- }
- break;
+ /* payload type is recognizable. increase num_dynamic and wait for sinkbin creation.
+ * num_dynamic_pad will decreased after creating a sinkbin.
+ */
+ player->num_dynamic_pad++;
+ LOGD("stream count inc : %d\n", player->num_dynamic_pad);
- case GST_MESSAGE_BUFFERING:
- {
- MMMessageParamType msg_param = {0, };
- int bRet = MM_ERROR_NONE;
+ caps = gst_pad_query_caps(pad, NULL);
- if (!(player->pipeline && player->pipeline->mainbin)) {
- LOGE("Pipeline is not initialized");
- break;
- }
+ MMPLAYER_CHECK_NULL(caps);
- if (!MMPLAYER_IS_STREAMING(player))
- break;
+ /* clear previous result*/
+ player->have_dynamic_pad = FALSE;
- if (player->pd_mode == MM_PLAYER_PD_MODE_URI) {
- if (!MMPLAYER_CMD_TRYLOCK(player)) {
- /* skip the playback control by buffering msg while user request is handled. */
- gint per = 0;
+ str = gst_caps_get_structure(caps, 0);
- LOGW("[PD mode] can't get cmd lock, only post buffering msg");
+ if (!str) {
+ LOGE("cannot get structure from caps.\n");
+ goto ERROR;
+ }
- gst_message_parse_buffering(msg, &per);
- LOGD("[PD mode][%s] buffering %d %%....", GST_OBJECT_NAME(GST_MESSAGE_SRC(msg)), per);
+ name = gst_structure_get_name(str);
+ if (!name) {
+ LOGE("cannot get mimetype from structure.\n");
+ goto ERROR;
+ }
- msg_param.connection.buffering = per;
- MMPLAYER_POST_MSG(player, MM_MESSAGE_BUFFERING, &msg_param);
- break;
- }
- } else {
- MMPLAYER_CMD_LOCK(player);
- }
+ if (strstr(name, "video")) {
+ gint stype = 0;
+ mm_attrs_get_int_by_name(player->attrs, "display_surface_type", &stype);
- if (!player->streamer) {
- LOGW("Pipeline is shutting down");
- MMPLAYER_CMD_UNLOCK(player);
- break;
+ if (stype == MM_DISPLAY_SURFACE_NULL || stype == MM_DISPLAY_SURFACE_REMOTE) {
+ if (player->v_stream_caps) {
+ gst_caps_unref(player->v_stream_caps);
+ player->v_stream_caps = NULL;
}
- /* ignore the remained buffering message till getting 100% msg */
- if (player->streamer->buffering_state == MM_PLAYER_BUFFERING_COMPLETE) {
- gint buffer_percent = 0;
-
- gst_message_parse_buffering(msg, &buffer_percent);
+ new_element = gst_element_factory_make("fakesink", NULL);
+ player->num_dynamic_pad--;
+ goto NEW_ELEMENT;
+ }
+ }
- if (buffer_percent == MAX_BUFFER_PERCENT) {
- LOGD("Ignored all the previous buffering msg!(got %d%%)\n", buffer_percent);
- player->streamer->buffering_state = MM_PLAYER_BUFFERING_DEFAULT;
- }
- MMPLAYER_CMD_UNLOCK(player);
- break;
- }
+ /* clear previous result*/
+ player->have_dynamic_pad = FALSE;
- /* ignore the remained buffering message */
- if (player->streamer->buffering_state == MM_PLAYER_BUFFERING_ABORT) {
- gint buffer_percent = 0;
+ if (!__mmplayer_try_to_plug_decodebin(player, pad, caps)) {
+ LOGE("failed to autoplug for caps");
+ goto ERROR;
+ }
- gst_message_parse_buffering(msg, &buffer_percent);
+ /* check if there's dynamic pad*/
+ if (player->have_dynamic_pad) {
+ LOGE("using pad caps assums there's no dynamic pad !\n");
+ goto ERROR;
+ }
- LOGD("interrupted buffering -last posted %d %%, new per %d %%",
- player->streamer->buffering_percent, buffer_percent);
+ gst_caps_unref(caps);
+ caps = NULL;
- if (player->streamer->buffering_percent > buffer_percent || buffer_percent <= 0) {
- player->streamer->buffering_state = MM_PLAYER_BUFFERING_DEFAULT;
- player->streamer->buffering_req.is_pre_buffering = FALSE;
+NEW_ELEMENT:
- LOGD("interrupted buffering - need to enter the buffering mode again - %d %%", buffer_percent);
- } else {
- LOGD("interrupted buffering - ignored the remained buffering msg!");
- MMPLAYER_CMD_UNLOCK(player);
- break;
- }
- }
+ /* excute new_element if created*/
+ if (new_element) {
+ LOGD("adding new element to pipeline\n");
- __mmplayer_update_buffer_setting(player, msg);
+ /* set state to READY before add to bin */
+ MMPLAYER_ELEMENT_SET_STATE(new_element, GST_STATE_READY);
- bRet = __mmplayer_handle_buffering_message(player); /* playback control */
+ /* add new element to the pipeline */
+ if (FALSE == gst_bin_add(GST_BIN(player->pipeline->mainbin[MMPLAYER_M_PIPE].gst), new_element)) {
+ LOGE("failed to add autoplug element to bin\n");
+ goto ERROR;
+ }
- if (bRet == MM_ERROR_NONE) {
- msg_param.connection.buffering = player->streamer->buffering_percent;
- MMPLAYER_POST_MSG(player, MM_MESSAGE_BUFFERING, &msg_param);
+ /* get pad from element */
+ sinkpad = gst_element_get_static_pad(GST_ELEMENT(new_element), "sink");
+ if (!sinkpad) {
+ LOGE("failed to get sinkpad from autoplug element\n");
+ goto ERROR;
+ }
- if (MMPLAYER_IS_RTSP_STREAMING(player) &&
- player->pending_resume &&
- (player->streamer->buffering_percent >= MAX_BUFFER_PERCENT)) {
+ /* link it */
+ if (GST_PAD_LINK_OK != gst_pad_link(pad, sinkpad)) {
+ LOGE("failed to link autoplug element\n");
+ goto ERROR;
+ }
- player->is_external_subtitle_added_now = FALSE;
- player->pending_resume = FALSE;
- _mmplayer_resume((MMHandleType)player);
- }
+ gst_object_unref(sinkpad);
+ sinkpad = NULL;
- if (MMPLAYER_IS_RTSP_STREAMING(player) &&
- (player->streamer->buffering_percent >= MAX_BUFFER_PERCENT)) {
-
- if (player->seek_state == MMPLAYER_SEEK_IN_PROGRESS) {
- if (MMPLAYER_TARGET_STATE(player) == MM_PLAYER_STATE_PAUSED) {
- player->seek_state = MMPLAYER_SEEK_NONE;
- MMPLAYER_POST_MSG(player, MM_MESSAGE_SEEK_COMPLETED, NULL);
- } else if (MMPLAYER_TARGET_STATE(player) == MM_PLAYER_STATE_PLAYING) {
- /* Considering the async state trasition in case of RTSP.
- After getting state change gst msg, seek cmpleted msg will be posted. */
- player->seek_state = MMPLAYER_SEEK_COMPLETED;
- }
- }
- }
- } else if (bRet == MM_ERROR_PLAYER_INVALID_STATE) {
- if (!player->streamer) {
- LOGW("player->streamer is NULL, so discarding the buffering percent update\n");
- MMPLAYER_CMD_UNLOCK(player);
- break;
- }
+ /* run. setting PLAYING here since streamming source is live source */
+ MMPLAYER_ELEMENT_SET_STATE(new_element, GST_STATE_PLAYING);
+ }
- if ((MMPLAYER_IS_LIVE_STREAMING(player)) && (MMPLAYER_IS_RTSP_STREAMING(player))) {
+ if (caps)
+ gst_caps_unref(caps);
- LOGD("player->last_position=%"G_GINT64_FORMAT" , player->streamer->buffering_percent=%d \n",
- GST_TIME_AS_SECONDS(player->last_position), player->streamer->buffering_percent);
+ MMPLAYER_FLEAVE();
- if ((GST_TIME_AS_SECONDS(player->last_position) <= 0) && (MMPLAYER_CURRENT_STATE(player) == MM_PLAYER_STATE_PAUSED)) {
- msg_param.connection.buffering = player->streamer->buffering_percent;
- MMPLAYER_POST_MSG(player, MM_MESSAGE_BUFFERING, &msg_param);
- } else {
- LOGD("Not updating Buffering Message for Live RTSP case !!!\n");
- }
- } else {
- msg_param.connection.buffering = player->streamer->buffering_percent;
- MMPLAYER_POST_MSG(player, MM_MESSAGE_BUFFERING, &msg_param);
- }
- }
- MMPLAYER_CMD_UNLOCK(player);
- }
- break;
+ return;
- case GST_MESSAGE_STATE_CHANGED:
- {
- MMPlayerGstElement *mainbin;
- const GValue *voldstate, *vnewstate, *vpending;
- GstState oldstate = GST_STATE_NULL;
- GstState newstate = GST_STATE_NULL;
- GstState pending = GST_STATE_NULL;
-
- if (!(player->pipeline && player->pipeline->mainbin)) {
- LOGE("player pipeline handle is null");
- break;
- }
+STATE_CHANGE_FAILED:
+ERROR:
+ /* FIXIT : take care if new_element has already added to pipeline */
+ if (new_element)
+ gst_object_unref(GST_OBJECT(new_element));
- mainbin = player->pipeline->mainbin;
+ if (sinkpad)
+ gst_object_unref(GST_OBJECT(sinkpad));
- /* we only handle messages from pipeline */
- if (msg->src != (GstObject *)mainbin[MMPLAYER_M_PIPE].gst)
- break;
+ if (caps)
+ gst_caps_unref(caps);
- /* get state info from msg */
- voldstate = gst_structure_get_value(gst_message_get_structure(msg), "old-state");
- vnewstate = gst_structure_get_value(gst_message_get_structure(msg), "new-state");
- vpending = gst_structure_get_value(gst_message_get_structure(msg), "pending-state");
+ /* FIXIT : how to inform this error to MSL ????? */
+ /* FIXIT : I think we'd better to use g_idle_add() to destroy pipeline and
+ * then post an error to application
+ */
+}
- if (!voldstate || !vnewstate) {
- LOGE("received msg has wrong format.");
- break;
- }
+static GstPadProbeReturn
+__mmplayer_gst_selector_blocked(GstPad* pad, GstPadProbeInfo *info, gpointer data)
+{
+ LOGD("pad(%s:%s) is blocked", GST_DEBUG_PAD_NAME(pad));
+ return GST_PAD_PROBE_OK;
+}
- oldstate = (GstState)voldstate->data[0].v_int;
- newstate = (GstState)vnewstate->data[0].v_int;
- if (vpending)
- pending = (GstState)vpending->data[0].v_int;
+static GstPadProbeReturn
+__mmplayer_gst_selector_event_probe(GstPad * pad, GstPadProbeInfo * info, gpointer data)
+{
+ GstPadProbeReturn ret = GST_PAD_PROBE_OK;
+ GstEvent *event = GST_PAD_PROBE_INFO_DATA(info);
+ mm_player_t* player = (mm_player_t*)data;
+ GstCaps* caps = NULL;
+ GstStructure* str = NULL;
+ const gchar* name = NULL;
+ MMPlayerTrackType stream_type = MM_PLAYER_TRACK_TYPE_VIDEO;
- LOGD("state changed [%s] : %s ---> %s final : %s\n",
- GST_OBJECT_NAME(GST_MESSAGE_SRC(msg)),
- gst_element_state_get_name((GstState)oldstate),
- gst_element_state_get_name((GstState)newstate),
- gst_element_state_get_name((GstState)pending));
- if (newstate == GST_STATE_PLAYING) {
- if ((MMPLAYER_IS_RTSP_STREAMING(player)) && (player->pending_seek.is_pending)) {
+ if (GST_EVENT_IS_DOWNSTREAM(event)) {
+ if (GST_EVENT_TYPE(event) != GST_EVENT_STREAM_START &&
+ GST_EVENT_TYPE(event) != GST_EVENT_FLUSH_STOP &&
+ GST_EVENT_TYPE(event) != GST_EVENT_SEGMENT &&
+ GST_EVENT_TYPE(event) != GST_EVENT_EOS)
+ return ret;
+ } else if (GST_EVENT_IS_UPSTREAM(event)) {
+ if (GST_EVENT_TYPE(event) != GST_EVENT_QOS)
+ return ret;
+ }
- int retVal = MM_ERROR_NONE;
- LOGD("trying to play from (%"G_GINT64_FORMAT") pending position\n", player->pending_seek.pos);
+ caps = gst_pad_query_caps(pad, NULL);
+ if (!caps) {
+ LOGE("failed to get caps from pad[%s:%s]", GST_DEBUG_PAD_NAME(pad));
+ return ret;
+ }
- retVal = __gst_set_position(player, player->pending_seek.format, player->pending_seek.pos, TRUE);
+ str = gst_caps_get_structure(caps, 0);
+ if (!str) {
+ LOGE("failed to get structure from caps");
+ goto ERROR;
+ }
- if (MM_ERROR_NONE != retVal)
- LOGE("failed to seek pending postion. just keep staying current position.\n");
+ name = gst_structure_get_name(str);
+ if (!name) {
+ LOGE("failed to get name from str");
+ goto ERROR;
+ }
- player->pending_seek.is_pending = FALSE;
- }
- }
+ if (strstr(name, "audio")) {
+ stream_type = MM_PLAYER_TRACK_TYPE_AUDIO;
+ } else if (strstr(name, "video")) {
+ stream_type = MM_PLAYER_TRACK_TYPE_VIDEO;
+ } else {
+ /* text track is not supportable */
+ LOGE("invalid name %s", name);
+ goto ERROR;
+ }
- if (oldstate == newstate) {
- LOGD("pipeline reports state transition to old state");
- break;
+ switch (GST_EVENT_TYPE(event)) {
+ case GST_EVENT_EOS:
+ {
+ /* in case of gapless, drop eos event not to send it to sink */
+ if (player->gapless.reconfigure && !player->msg_posted) {
+ LOGD("[%d] %s:%s EOS received but will be drop", stream_type, GST_DEBUG_PAD_NAME(pad));
+ ret = GST_PAD_PROBE_DROP;
}
+ break;
+ }
+ case GST_EVENT_STREAM_START:
+ {
+ gint64 stop_running_time = 0;
+ gint64 position_running_time = 0;
+ gint64 position = 0;
+ gint idx = 0;
- switch (newstate) {
- case GST_STATE_VOID_PENDING:
- break;
+ for (idx = MM_PLAYER_TRACK_TYPE_AUDIO; idx < MM_PLAYER_TRACK_TYPE_TEXT; idx++) {
+ if ((player->gapless.update_segment[idx] == TRUE) ||
+ !(player->selector[idx].event_probe_id)) {
+ /* LOGW("[%d] skip", idx); */
+ continue;
+ }
- case GST_STATE_NULL:
- break;
+ if (GST_CLOCK_TIME_IS_VALID(player->gapless.segment[idx].stop)) {
+ stop_running_time =
+ gst_segment_to_running_time(&player->gapless.segment[idx],
+ GST_FORMAT_TIME, player->gapless.segment[idx].stop);
+ } else if (GST_CLOCK_TIME_IS_VALID(player->gapless.segment[idx].duration)) {
+ stop_running_time =
+ gst_segment_to_running_time(&player->gapless.segment[idx],
+ GST_FORMAT_TIME, player->gapless.segment[idx].duration);
+ } else {
+ LOGD("duration: %"GST_TIME_FORMAT, GST_TIME_ARGS(player->duration));
+ stop_running_time =
+ gst_segment_to_running_time(&player->gapless.segment[idx],
+ GST_FORMAT_TIME, player->duration);
+ }
- case GST_STATE_READY:
- break;
+ position_running_time =
+ gst_segment_to_running_time(&player->gapless.segment[idx],
+ GST_FORMAT_TIME, player->gapless.segment[idx].position);
- case GST_STATE_PAUSED:
- {
- gboolean prepare_async = FALSE;
+ LOGD("[type:%d] time info %" GST_TIME_FORMAT " , %"
+ GST_TIME_FORMAT" , %" GST_TIME_FORMAT,
+ idx,
+ GST_TIME_ARGS(stop_running_time),
+ GST_TIME_ARGS(position_running_time),
+ GST_TIME_ARGS(gst_segment_to_running_time(&player->gapless.segment[idx],
+ GST_FORMAT_TIME, player->gapless.segment[idx].start)));
- if (!player->audio_cb_probe_id && player->set_mode.pcm_extraction && !player->audio_stream_render_cb_ex)
- __mmplayer_configure_audio_callback(player);
+ position_running_time = MAX(position_running_time, stop_running_time);
+ position_running_time -= gst_segment_to_running_time(&player->gapless.segment[idx],
+ GST_FORMAT_TIME, player->gapless.segment[idx].start);
+ position_running_time = MAX(0, position_running_time);
+ position = MAX(position, position_running_time);
+ }
- if (!player->sent_bos && oldstate == GST_STATE_READY) {
- // managed prepare async case
- mm_attrs_get_int_by_name(player->attrs, "profile_prepare_async", &prepare_async);
- LOGD("checking prepare mode for async transition - %d", prepare_async);
- }
+ if (position != 0) {
+ LOGD("[%d]GST_EVENT_STREAM_START: start_time from %" GST_TIME_FORMAT " to %" GST_TIME_FORMAT,
+ stream_type, GST_TIME_ARGS(player->gapless.start_time[stream_type]),
+ GST_TIME_ARGS(player->gapless.start_time[stream_type] + position));
- if (MMPLAYER_IS_STREAMING(player) || MMPLAYER_IS_MS_BUFF_SRC(player) || prepare_async) {
- MMPLAYER_SET_STATE(player, MM_PLAYER_STATE_PAUSED);
+ player->gapless.start_time[stream_type] += position;
+ }
+ break;
+ }
+ case GST_EVENT_FLUSH_STOP:
+ {
+ LOGD("[%d] GST_EVENT_FLUSH_STOP", stream_type);
+ gst_segment_init(&player->gapless.segment[stream_type], GST_FORMAT_UNDEFINED);
+ player->gapless.start_time[stream_type] = 0;
+ break;
+ }
+ case GST_EVENT_SEGMENT:
+ {
+ GstSegment segment;
+ GstEvent *tmpev;
- if (MMPLAYER_IS_STREAMING(player) && (player->streamer))
- __mm_player_streaming_set_content_bitrate(player->streamer,
- player->total_maximum_bitrate, player->total_bitrate);
+ LOGD("[%d] GST_EVENT_SEGMENT", stream_type);
+ gst_event_copy_segment(event, &segment);
- if (player->pending_seek.is_pending) {
- LOGW("trying to do pending seek");
- MMPLAYER_CMD_LOCK(player);
- __gst_pending_seek(player);
- MMPLAYER_CMD_UNLOCK(player);
- }
- }
- }
- break;
+ if (segment.format == GST_FORMAT_TIME) {
+ LOGD("segment base:%" GST_TIME_FORMAT ", offset:%" GST_TIME_FORMAT
+ ", start:%" GST_TIME_FORMAT ", stop: %" GST_TIME_FORMAT
+ ", time: %" GST_TIME_FORMAT ", pos: %" GST_TIME_FORMAT ", dur: %" GST_TIME_FORMAT,
+ GST_TIME_ARGS(segment.base), GST_TIME_ARGS(segment.offset),
+ GST_TIME_ARGS(segment.start), GST_TIME_ARGS(segment.stop),
+ GST_TIME_ARGS(segment.time), GST_TIME_ARGS(segment.position), GST_TIME_ARGS(segment.duration));
- case GST_STATE_PLAYING:
- {
- if (MMPLAYER_IS_STREAMING(player)) {
- // managed prepare async case when buffering is completed
- // pending state should be reset otherwise, it's still playing even though it's resumed after bufferging.
- if ((MMPLAYER_CURRENT_STATE(player) != MM_PLAYER_STATE_PLAYING) ||
- (MMPLAYER_PENDING_STATE(player) == MM_PLAYER_STATE_PLAYING))
- MMPLAYER_SET_STATE(player, MM_PLAYER_STATE_PLAYING);
+ /* keep the all the segment ev to cover the seeking */
+ gst_segment_copy_into(&segment, &player->gapless.segment[stream_type]);
+ player->gapless.update_segment[stream_type] = TRUE;
- if (MMPLAYER_IS_RTSP_STREAMING(player) && (MMPLAYER_IS_LIVE_STREAMING(player))) {
+ if (!player->gapless.running)
+ break;
- LOGD("Current Buffering Percent = %d", player->streamer->buffering_percent);
- if (player->streamer->buffering_percent < 100) {
+ player->gapless.segment[stream_type].base = player->gapless.start_time[stream_type];
- MMMessageParamType msg_param = {0, };
- LOGW("Posting Buffering Completed Message to Application !!!");
+ LOGD("[%d] new base: %" GST_TIME_FORMAT, stream_type, GST_TIME_ARGS(player->gapless.segment[stream_type].base));
- msg_param.connection.buffering = 100;
- MMPLAYER_POST_MSG(player, MM_MESSAGE_BUFFERING, &msg_param);
- }
- }
- }
+ tmpev = gst_event_new_segment(&player->gapless.segment[stream_type]);
+ gst_event_set_seqnum(tmpev, gst_event_get_seqnum(event));
+ gst_event_unref(event);
+ GST_PAD_PROBE_INFO_DATA(info) = tmpev;
+ }
+ break;
+ }
+ case GST_EVENT_QOS:
+ {
+ gdouble proportion = 0.0;
+ GstClockTimeDiff diff = 0;
+ GstClockTime timestamp = 0;
+ gint64 running_time_diff = -1;
+ GstQOSType type = 0;
+ GstEvent *tmpev = NULL;
- if (player->gapless.stream_changed) {
- _mmplayer_update_content_attrs(player, ATTR_ALL);
- player->gapless.stream_changed = FALSE;
- }
+ running_time_diff = player->gapless.segment[stream_type].base;
- if (player->seek_state == MMPLAYER_SEEK_COMPLETED) {
- player->seek_state = MMPLAYER_SEEK_NONE;
- MMPLAYER_POST_MSG(player, MM_MESSAGE_SEEK_COMPLETED, NULL);
- }
- }
+ if (running_time_diff <= 0) /* don't need to adjust */
break;
- default:
+ gst_event_parse_qos(event, &type, &proportion, &diff, ×tamp);
+ gst_event_unref(event);
+
+ if (timestamp < running_time_diff) {
+ LOGW("QOS event from previous group");
+ ret = GST_PAD_PROBE_DROP;
break;
}
- }
- break;
-
- case GST_MESSAGE_CLOCK_LOST:
- {
- GstClock *clock = NULL;
- gboolean need_new_clock = FALSE;
- gst_message_parse_clock_lost(msg, &clock);
- LOGD("GST_MESSAGE_CLOCK_LOST : %s\n", (clock ? GST_OBJECT_NAME(clock) : "NULL"));
-
- if (!player->videodec_linked)
- need_new_clock = TRUE;
- else if (!player->ini.use_system_clock)
- need_new_clock = TRUE;
+ LOGD("[%d] Adjusting QOS event: %" GST_TIME_FORMAT
+ " - %" GST_TIME_FORMAT " = %" GST_TIME_FORMAT,
+ stream_type, GST_TIME_ARGS(timestamp),
+ GST_TIME_ARGS(running_time_diff),
+ GST_TIME_ARGS(timestamp - running_time_diff));
- if (need_new_clock) {
- LOGD("Provide clock is TRUE, do pause->resume\n");
- __gst_pause(player, FALSE);
- __gst_resume(player, FALSE);
- }
- }
- break;
+ timestamp -= running_time_diff;
- case GST_MESSAGE_NEW_CLOCK:
- {
- GstClock *clock = NULL;
- gst_message_parse_new_clock(msg, &clock);
- LOGD("GST_MESSAGE_NEW_CLOCK : %s\n", (clock ? GST_OBJECT_NAME(clock) : "NULL"));
+ /* That case is invalid for QoS events */
+ if (diff < 0 && -diff > timestamp) {
+ LOGW("QOS event from previous group");
+ ret = GST_PAD_PROBE_DROP;
+ break;
}
- break;
- case GST_MESSAGE_ELEMENT:
- {
- const gchar *structure_name;
- gint count = 0, idx = 0;
- MMHandleType attrs = 0;
+ tmpev = gst_event_new_qos(GST_QOS_TYPE_UNDERFLOW, proportion, diff, timestamp);
+ GST_PAD_PROBE_INFO_DATA(info) = tmpev;
- attrs = MMPLAYER_GET_ATTRS(player);
- if (!attrs) {
- LOGE("cannot get content attribute");
- break;
- }
+ break;
+ }
+ default:
+ break;
+ }
- if (gst_message_get_structure(msg) == NULL)
- break;
+ERROR:
+ if (caps)
+ gst_caps_unref(caps);
+ return ret;
+}
- structure_name = gst_structure_get_name(gst_message_get_structure(msg));
- if (!structure_name)
- break;
+static void
+__mmplayer_gst_decode_pad_added(GstElement *elem, GstPad *pad, gpointer data)
+{
+ mm_player_t* player = NULL;
+ GstElement* pipeline = NULL;
+ GstElement* selector = NULL;
+ GstElement* fakesink = NULL;
+ GstCaps* caps = NULL;
+ GstStructure* str = NULL;
+ const gchar* name = NULL;
+ GstPad* sinkpad = NULL;
+ GstPad* srcpad = NULL;
+ gboolean first_track = FALSE;
- LOGD("GST_MESSAGE_ELEMENT %s from %s", structure_name, GST_OBJECT_NAME(GST_MESSAGE_SRC(msg)));
+ enum MainElementID elemId = MMPLAYER_M_NUM;
+ MMPlayerTrackType stream_type = MM_PLAYER_TRACK_TYPE_AUDIO;
- if (!strcmp(structure_name, "adaptive-streaming-variant")) {
- const GValue *var_info = NULL;
+ /* check handles */
+ player = (mm_player_t*)data;
- var_info = gst_structure_get_value(gst_message_get_structure(msg), "video-variant-info");
- if (var_info != NULL) {
- if (player->adaptive_info.var_list)
- g_list_free_full(player->adaptive_info.var_list, g_free);
+ MMPLAYER_RETURN_IF_FAIL(elem && pad);
+ MMPLAYER_RETURN_IF_FAIL(player && player->pipeline && player->pipeline->mainbin);
- /* share addr or copy the list */
- player->adaptive_info.var_list =
- g_list_copy_deep((GList *)g_value_get_pointer(var_info), (GCopyFunc)__mmplayer_adaptive_var_info, NULL);
+ //LOGD("pad-added signal handling\n");
- count = g_list_length(player->adaptive_info.var_list);
- if (count > 0) {
- VariantData *temp = NULL;
+ pipeline = player->pipeline->mainbin[MMPLAYER_M_PIPE].gst;
- /* print out for debug */
- LOGD("num of variant_info %d", count);
- for (idx = 0; idx < count; idx++) {
- temp = g_list_nth_data(player->adaptive_info.var_list, idx);
- if (temp)
- LOGD("variant(%d) [b]%d [w]%d [h]%d ", idx, temp->bandwidth, temp->width, temp->height);
- }
- }
- }
- }
+ /* get mimetype from caps */
+ caps = gst_pad_query_caps(pad, NULL);
+ if (!caps) {
+ LOGE("cannot get caps from pad.\n");
+ goto ERROR;
+ }
- if (!strcmp(structure_name, "prepare-decode-buffers")) {
- gint num_buffers = 0;
- gint extra_num_buffers = 0;
+ str = gst_caps_get_structure(caps, 0);
+ if (!str) {
+ LOGE("cannot get structure from caps.\n");
+ goto ERROR;
+ }
- if (gst_structure_get_int(gst_message_get_structure(msg), "num_buffers", &num_buffers)) {
- player->video_num_buffers = num_buffers;
- LOGD("video_num_buffers : %d", player->video_num_buffers);
- }
+ name = gst_structure_get_name(str);
+ if (!name) {
+ LOGE("cannot get mimetype from structure.\n");
+ goto ERROR;
+ }
- if (gst_structure_get_int(gst_message_get_structure(msg), "extra_num_buffers", &extra_num_buffers)) {
- player->video_extra_num_buffers = extra_num_buffers;
- LOGD("num_of_vout_extra num buffers : %d", extra_num_buffers);
- }
- break;
- }
+ MMPLAYER_LOG_GST_CAPS_TYPE(caps);
+ //LOGD("detected mimetype : %s\n", name);
- if (!strcmp(structure_name, "Language_list")) {
- const GValue *lang_list = NULL;
- lang_list = gst_structure_get_value(gst_message_get_structure(msg), "lang_list");
- if (lang_list != NULL) {
- count = g_list_length((GList *)g_value_get_pointer(lang_list));
- if (count > 1)
- LOGD("Total audio tracks(from parser) = %d \n", count);
- }
- }
+ if (strstr(name, "video")) {
+ gint stype = 0;
- if (!strcmp(structure_name, "Ext_Sub_Language_List")) {
- const GValue *lang_list = NULL;
- MMPlayerLangStruct *temp = NULL;
-
- lang_list = gst_structure_get_value(gst_message_get_structure(msg), "lang_list");
- if (lang_list != NULL) {
- count = g_list_length((GList *)g_value_get_pointer(lang_list));
- if (count) {
- MMPLAYER_SUBTITLE_INFO_LOCK(player);
- player->subtitle_language_list = (GList *)g_value_get_pointer(lang_list);
- mm_attrs_set_int_by_name(attrs, "content_text_track_num", (gint)count);
- if (mmf_attrs_commit(attrs))
- LOGE("failed to commit.\n");
- LOGD("Total subtitle tracks = %d \n", count);
-
- while (count) {
- temp = g_list_nth_data(player->subtitle_language_list, count - 1);
- if (temp)
- LOGD("value of lang_key is %s and lang_code is %s",
- temp->language_key, temp->language_code);
- count--;
- }
- MMPLAYER_SUBTITLE_INFO_SIGNAL(player);
- MMPLAYER_SUBTITLE_INFO_UNLOCK(player);
- }
- }
- }
+ mm_attrs_set_int_by_name(player->attrs, "content_video_found", TRUE);
+ mm_attrs_get_int_by_name(player->attrs, "display_surface_type", &stype);
- /* custom message */
- if (!strcmp(structure_name, "audio_codec_not_supported")) {
- MMMessageParamType msg_param = {0,};
- msg_param.code = MM_ERROR_PLAYER_AUDIO_CODEC_NOT_FOUND;
- MMPLAYER_POST_MSG(player, MM_MESSAGE_ERROR, &msg_param);
- }
+ /* don't make video because of not required, and not support multiple track */
+ if (stype == MM_DISPLAY_SURFACE_NULL) {
+ LOGD("no video sink by null surface");
- /* custom message for RTSP attribute :
- RTSP case, buffer is not come from server before PLAYING state. However,we have to get attribute after PAUSE state chaged.
- sdp which has contents info is received when rtsp connection is opened.
- extract duration ,codec info , resolution from sdp and get it by GstMessage */
- if (!strcmp(structure_name, "rtspsrc_properties")) {
+ gchar *caps_str = gst_caps_to_string(caps);
+ if (caps_str && (strstr(caps_str, "ST12") || strstr(caps_str, "SN12") ||
+ strstr(caps_str, "SN21") || strstr(caps_str, "S420") || strstr(caps_str, "SR32")))
+ player->set_mode.video_zc = TRUE;
- gchar *audio_codec = NULL;
- gchar *video_codec = NULL;
- gchar *video_frame_size = NULL;
+ MMPLAYER_FREEIF(caps_str);
- gst_structure_get(gst_message_get_structure(msg), "rtsp_duration", G_TYPE_UINT64, &player->duration, NULL);
- LOGD("rtsp duration : %"G_GINT64_FORMAT" msec", GST_TIME_AS_MSECONDS(player->duration));
- player->streaming_type = __mmplayer_get_stream_service_type(player);
+ if (player->v_stream_caps) {
+ gst_caps_unref(player->v_stream_caps);
+ player->v_stream_caps = NULL;
+ }
- gst_structure_get(gst_message_get_structure(msg), "rtsp_audio_codec", G_TYPE_STRING, &audio_codec, NULL);
- LOGD("rtsp_audio_codec : %s", audio_codec);
- if (audio_codec)
- mm_attrs_set_string_by_name(player->attrs, "content_audio_codec", audio_codec);
+ LOGD("create fakesink instead of videobin");
- gst_structure_get(gst_message_get_structure(msg), "rtsp_video_codec", G_TYPE_STRING, &video_codec, NULL);
- LOGD("rtsp_video_codec : %s", video_codec);
- if (video_codec)
- mm_attrs_set_string_by_name(player->attrs, "content_video_codec", video_codec);
+ /* fake sink */
+ fakesink = gst_element_factory_make("fakesink", NULL);
+ if (fakesink == NULL) {
+ LOGE("ERROR : fakesink create error\n");
+ goto ERROR;
+ }
- gst_structure_get(gst_message_get_structure(msg), "rtsp_video_frame_size", G_TYPE_STRING, &video_frame_size, NULL);
- LOGD("rtsp_video_frame_size : %s", video_frame_size);
- if (video_frame_size) {
+ if (player->ini.set_dump_element_flag)
+ __mmplayer_add_dump_buffer_probe(player, fakesink);
- char *seperator = strchr(video_frame_size, '-');
- if (seperator) {
+ player->video_fakesink = fakesink;
- char video_width[10] = {0,};
- int frame_size_len = strlen(video_frame_size);
- int separtor_len = strlen(seperator);
+ /* store it as it's sink element */
+ __mmplayer_add_sink(player, player->video_fakesink);
- strncpy(video_width, video_frame_size, (frame_size_len - separtor_len));
- mm_attrs_set_int_by_name(attrs, "content_video_width", atoi(video_width));
+ gst_bin_add(GST_BIN(pipeline), fakesink);
- seperator++;
- mm_attrs_set_int_by_name(attrs, "content_video_height", atoi(seperator));
- }
- }
+ // link
+ sinkpad = gst_element_get_static_pad(fakesink, "sink");
- if (mmf_attrs_commit(attrs))
- LOGE("failed to commit.\n");
- }
+ if (GST_PAD_LINK_OK != gst_pad_link(pad, sinkpad)) {
+ LOGW("failed to link fakesink\n");
+ gst_object_unref(GST_OBJECT(fakesink));
+ goto ERROR;
}
- break;
-
- case GST_MESSAGE_DURATION_CHANGED:
- {
- LOGD("GST_MESSAGE_DURATION_CHANGED\n");
- if (!__mmplayer_gst_handle_duration(player, msg))
- LOGW("failed to update duration");
- }
- break;
+ if (stype == MM_DISPLAY_SURFACE_REMOTE) {
+ MMPLAYER_SIGNAL_CONNECT(player, sinkpad, MM_PLAYER_SIGNAL_TYPE_VIDEOBIN,
+ "notify::caps", G_CALLBACK(__mmplayer_gst_caps_notify_cb), player);
+ }
- case GST_MESSAGE_ASYNC_START:
- LOGD("GST_MESSAGE_ASYNC_START : %s\n", GST_ELEMENT_NAME(GST_MESSAGE_SRC(msg)));
- break;
+ if (player->set_mode.media_packet_video_stream) {
+ g_object_set(G_OBJECT(fakesink), "signal-handoffs", TRUE, NULL);
- case GST_MESSAGE_ASYNC_DONE:
- {
- MMPlayerGstElement *mainbin;
+ MMPLAYER_SIGNAL_CONNECT(player,
+ G_OBJECT(fakesink),
+ MM_PLAYER_SIGNAL_TYPE_VIDEOBIN,
+ "handoff",
+ G_CALLBACK(__mmplayer_video_stream_decoded_render_cb),
+ (gpointer)player);
- if (!(player->pipeline && player->pipeline->mainbin)) {
- LOGE("player pipeline handle is null");
- break;
+ MMPLAYER_SIGNAL_CONNECT(player,
+ G_OBJECT(fakesink),
+ MM_PLAYER_SIGNAL_TYPE_VIDEOBIN,
+ "preroll-handoff",
+ G_CALLBACK(__mmplayer_video_stream_decoded_preroll_cb),
+ (gpointer)player);
}
- mainbin = player->pipeline->mainbin;
-
- LOGD("GST_MESSAGE_ASYNC_DONE : %s\n", GST_ELEMENT_NAME(GST_MESSAGE_SRC(msg)));
+ g_object_set(G_OBJECT(fakesink), "async", TRUE, "sync", TRUE, NULL);
+ gst_element_set_state(fakesink, GST_STATE_PAUSED);
+ goto DONE;
+ }
- /* we only handle messages from pipeline */
- if (msg->src != (GstObject *)mainbin[MMPLAYER_M_PIPE].gst)
- break;
+ if (MMPLAYER_IS_MS_BUFF_SRC(player)) {
+ __mmplayer_gst_decode_callback(elem, pad, player);
+ goto DONE;
+ }
- if (player->seek_state == MMPLAYER_SEEK_IN_PROGRESS) {
- if (MMPLAYER_TARGET_STATE(player) == MM_PLAYER_STATE_PAUSED) {
- player->seek_state = MMPLAYER_SEEK_NONE;
- MMPLAYER_POST_MSG(player, MM_MESSAGE_SEEK_COMPLETED, NULL);
- } else if (MMPLAYER_TARGET_STATE(player) == MM_PLAYER_STATE_PLAYING) {
- if (mainbin[MMPLAYER_M_AUTOPLUG].gst) {
- LOGD("sync %s state(%s) with parent state(%s)",
- GST_ELEMENT_NAME(mainbin[MMPLAYER_M_AUTOPLUG].gst),
- gst_element_state_get_name(GST_STATE(mainbin[MMPLAYER_M_AUTOPLUG].gst)),
- gst_element_state_get_name(GST_STATE(mainbin[MMPLAYER_M_PIPE].gst)));
-
- /* In case of streaming, pause is required before finishing seeking by buffering.
- After completing the seek(during buffering), the player and sink elems has paused state but others in playing state.
- Because the buffering state is controlled according to the state transition for force resume,
- the decodebin state should be paused as player state. */
- gst_element_sync_state_with_parent(mainbin[MMPLAYER_M_AUTOPLUG].gst);
- }
+ LOGD("video selector \n");
+ elemId = MMPLAYER_M_V_INPUT_SELECTOR;
+ stream_type = MM_PLAYER_TRACK_TYPE_VIDEO;
+ } else {
+ if (strstr(name, "audio")) {
+ gint samplerate = 0;
+ gint channels = 0;
- if ((MMPLAYER_IS_HTTP_STREAMING(player)) &&
- (player->streamer) &&
- (player->streamer->streaming_buffer_type == BUFFER_TYPE_MUXED) &&
- !(player->streamer->buffering_state & MM_PLAYER_BUFFERING_IN_PROGRESS)) {
- GstQuery *query = NULL;
- gboolean busy = FALSE;
- gint percent = 0;
-
- if (player->streamer->buffer_handle[BUFFER_TYPE_MUXED].buffer) {
- query = gst_query_new_buffering(GST_FORMAT_PERCENT);
- if (gst_element_query(player->streamer->buffer_handle[BUFFER_TYPE_MUXED].buffer, query))
- gst_query_parse_buffering_percent(query, &busy, &percent);
- gst_query_unref(query);
-
- LOGD("buffered percent(%s): %d\n",
- GST_ELEMENT_NAME(player->streamer->buffer_handle[BUFFER_TYPE_MUXED].buffer), percent);
- }
+ if (MMPLAYER_IS_MS_BUFF_SRC(player)) {
+ __mmplayer_gst_decode_callback(elem, pad, player);
+ goto DONE;
+ }
- if (percent >= 100)
- __mmplayer_handle_buffering_message(player);
- }
+ LOGD("audio selector \n");
+ elemId = MMPLAYER_M_A_INPUT_SELECTOR;
+ stream_type = MM_PLAYER_TRACK_TYPE_AUDIO;
- player->seek_state = MMPLAYER_SEEK_COMPLETED;
- }
- }
- }
- break;
-
- #if 0 /* delete unnecessary logs */
- case GST_MESSAGE_REQUEST_STATE: LOGD("GST_MESSAGE_REQUEST_STATE\n"); break;
- case GST_MESSAGE_STEP_START: LOGD("GST_MESSAGE_STEP_START\n"); break;
- case GST_MESSAGE_QOS: LOGD("GST_MESSAGE_QOS\n"); break;
- case GST_MESSAGE_PROGRESS: LOGD("GST_MESSAGE_PROGRESS\n"); break;
- case GST_MESSAGE_ANY: LOGD("GST_MESSAGE_ANY\n"); break;
- case GST_MESSAGE_INFO: LOGD("GST_MESSAGE_STATE_DIRTY\n"); break;
- case GST_MESSAGE_STATE_DIRTY: LOGD("GST_MESSAGE_STATE_DIRTY\n"); break;
- case GST_MESSAGE_STEP_DONE: LOGD("GST_MESSAGE_STEP_DONE\n"); break;
- case GST_MESSAGE_CLOCK_PROVIDE: LOGD("GST_MESSAGE_CLOCK_PROVIDE\n"); break;
- case GST_MESSAGE_STRUCTURE_CHANGE: LOGD("GST_MESSAGE_STRUCTURE_CHANGE\n"); break;
- case GST_MESSAGE_STREAM_STATUS: LOGD("GST_MESSAGE_STREAM_STATUS\n"); break;
- case GST_MESSAGE_APPLICATION: LOGD("GST_MESSAGE_APPLICATION\n"); break;
- case GST_MESSAGE_SEGMENT_START: LOGD("GST_MESSAGE_SEGMENT_START\n"); break;
- case GST_MESSAGE_SEGMENT_DONE: LOGD("GST_MESSAGE_SEGMENT_DONE\n"); break;
- case GST_MESSAGE_LATENCY: LOGD("GST_MESSAGE_LATENCY\n"); break;
- #endif
-
- default:
- break;
- }
+ gst_structure_get_int(str, "rate", &samplerate);
+ gst_structure_get_int(str, "channels", &channels);
- /* should not call 'gst_message_unref(msg)' */
- return;
-}
+ if ((channels > 0 && samplerate == 0)) {//exclude audio decoding
+ /* fake sink */
+ fakesink = gst_element_factory_make("fakesink", NULL);
+ if (fakesink == NULL) {
+ LOGE("ERROR : fakesink create error\n");
+ goto ERROR;
+ }
-static gboolean
-__mmplayer_gst_handle_duration(mm_player_t* player, GstMessage* msg)
-{
- gint64 bytes = 0;
+ gst_bin_add(GST_BIN(pipeline), fakesink);
- MMPLAYER_FENTER();
+ /* link */
+ sinkpad = gst_element_get_static_pad(fakesink, "sink");
- MMPLAYER_RETURN_VAL_IF_FAIL(player, FALSE);
- MMPLAYER_RETURN_VAL_IF_FAIL(msg, FALSE);
+ if (GST_PAD_LINK_OK != gst_pad_link(pad, sinkpad)) {
+ LOGW("failed to link fakesink\n");
+ gst_object_unref(GST_OBJECT(fakesink));
+ goto ERROR;
+ }
- if ((MMPLAYER_IS_HTTP_STREAMING(player)) &&
- (msg->src) && (msg->src == (GstObject *)player->pipeline->mainbin[MMPLAYER_M_SRC].gst)) {
- LOGD("msg src : [%s]", GST_ELEMENT_NAME(GST_ELEMENT_CAST(msg->src)));
+ g_object_set(G_OBJECT(fakesink), "sync", TRUE, NULL);
+ gst_element_set_state(fakesink, GST_STATE_PAUSED);
- if (gst_element_query_duration(GST_ELEMENT_CAST(msg->src), GST_FORMAT_BYTES, &bytes)) {
- LOGD("data total size of http content: %"G_GINT64_FORMAT, bytes);
- player->http_content_size = (bytes > 0) ? (bytes) : (0);
+ goto DONE;
+ }
+ } else if (strstr(name, "text")) {
+ LOGD("text selector \n");
+ elemId = MMPLAYER_M_T_INPUT_SELECTOR;
+ stream_type = MM_PLAYER_TRACK_TYPE_TEXT;
+ } else {
+ LOGE("wrong elem id \n");
+ goto ERROR;
}
- } else {
- /* handling audio clip which has vbr. means duration is keep changing */
- _mmplayer_update_content_attrs(player, ATTR_DURATION);
}
- MMPLAYER_FLEAVE();
-
- return TRUE;
-}
-
-static void __mmplayer_get_metadata_360_from_tags(GstTagList *tags,
- mm_player_spherical_metadata_t *metadata) {
- gst_tag_list_get_int(tags, "is_spherical", &metadata->is_spherical);
- gst_tag_list_get_int(tags, "is_stitched", &metadata->is_stitched);
- gst_tag_list_get_string(tags, "stitching_software",
- &metadata->stitching_software);
- gst_tag_list_get_string(tags, "projection_type",
- &metadata->projection_type_string);
- gst_tag_list_get_string(tags, "stereo_mode", &metadata->stereo_mode_string);
- gst_tag_list_get_int(tags, "source_count", &metadata->source_count);
- gst_tag_list_get_int(tags, "init_view_heading",
- &metadata->init_view_heading);
- gst_tag_list_get_int(tags, "init_view_pitch", &metadata->init_view_pitch);
- gst_tag_list_get_int(tags, "init_view_roll", &metadata->init_view_roll);
- gst_tag_list_get_int(tags, "timestamp", &metadata->timestamp);
- gst_tag_list_get_int(tags, "full_pano_width_pixels",
- &metadata->full_pano_width_pixels);
- gst_tag_list_get_int(tags, "full_pano_height_pixels",
- &metadata->full_pano_height_pixels);
- gst_tag_list_get_int(tags, "cropped_area_image_width",
- &metadata->cropped_area_image_width);
- gst_tag_list_get_int(tags, "cropped_area_image_height",
- &metadata->cropped_area_image_height);
- gst_tag_list_get_int(tags, "cropped_area_left",
- &metadata->cropped_area_left);
- gst_tag_list_get_int(tags, "cropped_area_top", &metadata->cropped_area_top);
- gst_tag_list_get_int(tags, "ambisonic_type", &metadata->ambisonic_type);
- gst_tag_list_get_int(tags, "ambisonic_format", &metadata->ambisonic_format);
- gst_tag_list_get_int(tags, "ambisonic_order", &metadata->ambisonic_order);
-}
+ selector = player->pipeline->mainbin[elemId].gst;
+ if (selector == NULL) {
+ selector = gst_element_factory_make("input-selector", NULL);
+ LOGD("Creating input-selector\n");
+ if (selector == NULL) {
+ LOGE("ERROR : input-selector create error\n");
+ goto ERROR;
+ }
+ g_object_set(selector, "sync-streams", TRUE, NULL);
-static gboolean
-__mmplayer_gst_extract_tag_from_msg(mm_player_t* player, GstMessage* msg)
-{
-
-/* macro for better code readability */
-#define MMPLAYER_UPDATE_TAG_STRING(gsttag, attribute, playertag) \
-if (gst_tag_list_get_string(tag_list, gsttag, &string)) {\
- if (string != NULL) { \
- SECURE_LOGD("update tag string : %s\n", string); \
- if (strlen(string) > MM_MAX_STRING_LENGTH) { \
- char *new_string = malloc(MM_MAX_STRING_LENGTH); \
- strncpy(new_string, string, MM_MAX_STRING_LENGTH-1); \
- new_string[MM_MAX_STRING_LENGTH-1] = '\0'; \
- mm_attrs_set_string_by_name(attribute, playertag, new_string); \
- g_free(new_string); \
- new_string = NULL; \
- } else { \
- mm_attrs_set_string_by_name(attribute, playertag, string); \
- } \
- g_free(string); \
- string = NULL; \
- } \
-}
+ player->pipeline->mainbin[elemId].id = elemId;
+ player->pipeline->mainbin[elemId].gst = selector;
-#define MMPLAYER_UPDATE_TAG_IMAGE(gsttag, attribute, playertag) \
-do { \
- GstSample *sample = NULL;\
- if (gst_tag_list_get_sample_index(tag_list, gsttag, index, &sample)) {\
- GstMapInfo info = GST_MAP_INFO_INIT;\
- buffer = gst_sample_get_buffer(sample);\
- if (!gst_buffer_map(buffer, &info, GST_MAP_READ)) {\
- LOGD("failed to get image data from tag");\
- gst_sample_unref(sample);\
- return FALSE;\
- } \
- SECURE_LOGD("update album cover data : %p, size : %d\n", info.data, info.size);\
- MMPLAYER_FREEIF(player->album_art);\
- player->album_art = (gchar *)g_malloc(info.size);\
- if (player->album_art) {\
- memcpy(player->album_art, info.data, info.size);\
- mm_attrs_set_data_by_name(attribute, playertag, (void *)player->album_art, info.size);\
- if (MMPLAYER_IS_HTTP_LIVE_STREAMING(player)) {\
- msg_param.data = (void *)player->album_art;\
- msg_param.size = info.size;\
- MMPLAYER_POST_MSG(player, MM_MESSAGE_IMAGE_BUFFER, &msg_param);\
- SECURE_LOGD("post message image buffer data : %p, size : %d\n", info.data, info.size);\
- } \
- } \
- gst_buffer_unmap(buffer, &info);\
- gst_sample_unref(sample);\
- } \
-} while (0)
-
-#define MMPLAYER_UPDATE_TAG_UINT(gsttag, attribute, playertag) \
-do { \
- if (gst_tag_list_get_uint(tag_list, gsttag, &v_uint)) { \
- if (v_uint) { \
- int i = 0; \
- gchar *tag_list_str = NULL; \
- MMPlayerTrackType track_type = MM_PLAYER_TRACK_TYPE_AUDIO; \
- if (strstr(GST_OBJECT_NAME(msg->src), "audio")) \
- track_type = MM_PLAYER_TRACK_TYPE_AUDIO; \
- else if (strstr(GST_OBJECT_NAME(msg->src), "video")) \
- track_type = MM_PLAYER_TRACK_TYPE_VIDEO; \
- else \
- track_type = MM_PLAYER_TRACK_TYPE_TEXT; \
- if (!strncmp(gsttag, GST_TAG_BITRATE, strlen(GST_TAG_BITRATE))) { \
- if (track_type == MM_PLAYER_TRACK_TYPE_AUDIO) \
- mm_attrs_set_int_by_name(attribute, "content_audio_bitrate", v_uint); \
- player->bitrate[track_type] = v_uint; \
- player->total_bitrate = 0; \
- for (i = 0; i < MM_PLAYER_STREAM_COUNT_MAX; i++) \
- player->total_bitrate += player->bitrate[i]; \
- mm_attrs_set_int_by_name(attribute, playertag, player->total_bitrate); \
- SECURE_LOGD("update bitrate %d[bps] of stream #%d.\n", v_uint, (int)track_type); \
- } else if (!strncmp(gsttag, GST_TAG_MAXIMUM_BITRATE, strlen(GST_TAG_MAXIMUM_BITRATE))) { \
- player->maximum_bitrate[track_type] = v_uint; \
- player->total_maximum_bitrate = 0; \
- for (i = 0; i < MM_PLAYER_STREAM_COUNT_MAX; i++) \
- player->total_maximum_bitrate += player->maximum_bitrate[i]; \
- mm_attrs_set_int_by_name(attribute, playertag, player->total_maximum_bitrate);\
- SECURE_LOGD("update maximum bitrate %d[bps] of stream #%d\n", v_uint, (int)track_type);\
- } else { \
- mm_attrs_set_int_by_name(attribute, playertag, v_uint); \
- } \
- v_uint = 0;\
- g_free(tag_list_str); \
- } \
- } \
-} while (0)
-
-#define MMPLAYER_UPDATE_TAG_DATE(gsttag, attribute, playertag) \
-if (gst_tag_list_get_date(tag_list, gsttag, &date)) {\
- if (date != NULL) {\
- string = g_strdup_printf("%d", g_date_get_year(date));\
- mm_attrs_set_string_by_name(attribute, playertag, string);\
- SECURE_LOGD("metainfo year : %s\n", string);\
- MMPLAYER_FREEIF(string);\
- g_date_free(date);\
- } \
-}
+ first_track = TRUE;
+ // player->selector[stream_type].active_pad_index = DEFAULT_TRACK; // default
-#define MMPLAYER_UPDATE_TAG_DATE_TIME(gsttag, attribute, playertag) \
-if (gst_tag_list_get_date_time(tag_list, gsttag, &datetime)) {\
- if (datetime != NULL) {\
- string = g_strdup_printf("%d", gst_date_time_get_year(datetime));\
- mm_attrs_set_string_by_name(attribute, playertag, string);\
- SECURE_LOGD("metainfo year : %s\n", string);\
- MMPLAYER_FREEIF(string);\
- gst_date_time_unref(datetime);\
- } \
-}
+ srcpad = gst_element_get_static_pad(selector, "src");
-#define MMPLAYER_UPDATE_TAG_UINT64(gsttag, attribute, playertag) \
-if (gst_tag_list_get_uint64(tag_list, gsttag, &v_uint64)) {\
- if (v_uint64) {\
- /* FIXIT : don't know how to store date */\
- g_assert(1);\
- v_uint64 = 0;\
- } \
-}
+ LOGD("blocking %s:%s", GST_DEBUG_PAD_NAME(srcpad));
+ player->selector[stream_type].block_id = gst_pad_add_probe(srcpad, GST_PAD_PROBE_TYPE_BLOCK_DOWNSTREAM,
+ __mmplayer_gst_selector_blocked, NULL, NULL);
+ player->selector[stream_type].event_probe_id = gst_pad_add_probe(srcpad, GST_PAD_PROBE_TYPE_EVENT_BOTH|GST_PAD_PROBE_TYPE_EVENT_FLUSH,
+ __mmplayer_gst_selector_event_probe, player, NULL);
-#define MMPLAYER_UPDATE_TAG_DOUBLE(gsttag, attribute, playertag) \
-if (gst_tag_list_get_double(tag_list, gsttag, &v_double)) {\
- if (v_double) {\
- /* FIXIT : don't know how to store date */\
- g_assert(1);\
- v_double = 0;\
- } \
-}
+ gst_element_set_state(selector, GST_STATE_PAUSED);
+ gst_bin_add(GST_BIN(pipeline), selector);
+ } else
+ LOGD("input-selector is already created.\n");
- /* function start */
- GstTagList* tag_list = NULL;
+ // link
+ LOGD("Calling request pad with selector %p \n", selector);
+ sinkpad = gst_element_get_request_pad(selector, "sink_%u");
- MMHandleType attrs = 0;
+ LOGD("got pad %s:%s from selector", GST_DEBUG_PAD_NAME(sinkpad));
- char *string = NULL;
- guint v_uint = 0;
- GDate *date = NULL;
- GstDateTime *datetime = NULL;
- /* album cover */
- GstBuffer *buffer = NULL;
- gint index = 0;
- MMMessageParamType msg_param = {0, };
+ if (GST_PAD_LINK_OK != gst_pad_link(pad, sinkpad)) {
+ LOGW("failed to link selector\n");
+ gst_object_unref(GST_OBJECT(selector));
+ goto ERROR;
+ }
- /* currently not used. but those are needed for above macro */
- //guint64 v_uint64 = 0;
- //gdouble v_double = 0;
+ if (first_track) {
+ LOGD("this is first track --> active track \n");
+ g_object_set(selector, "active-pad", sinkpad, NULL);
+ }
- MMPLAYER_RETURN_VAL_IF_FAIL(player && msg, FALSE);
+ _mmplayer_track_update_info(player, stream_type, sinkpad);
- attrs = MMPLAYER_GET_ATTRS(player);
- MMPLAYER_RETURN_VAL_IF_FAIL(attrs, FALSE);
-
- /* get tag list from gst message */
- gst_message_parse_tag(msg, &tag_list);
-
- /* store tags to player attributes */
- MMPLAYER_UPDATE_TAG_STRING(GST_TAG_TITLE, attrs, "tag_title");
- /* MMPLAYER_UPDATE_TAG_STRING(GST_TAG_TITLE_SORTNAME, ?, ?); */
- MMPLAYER_UPDATE_TAG_STRING(GST_TAG_ARTIST, attrs, "tag_artist");
- /* MMPLAYER_UPDATE_TAG_STRING(GST_TAG_ARTIST_SORTNAME, ?, ?); */
- MMPLAYER_UPDATE_TAG_STRING(GST_TAG_ALBUM, attrs, "tag_album");
- /* MMPLAYER_UPDATE_TAG_STRING(GST_TAG_ALBUM_SORTNAME, ?, ?); */
- MMPLAYER_UPDATE_TAG_STRING(GST_TAG_COMPOSER, attrs, "tag_author");
- MMPLAYER_UPDATE_TAG_DATE(GST_TAG_DATE, attrs, "tag_date");
- MMPLAYER_UPDATE_TAG_DATE_TIME(GST_TAG_DATE_TIME, attrs, "tag_date");
- MMPLAYER_UPDATE_TAG_STRING(GST_TAG_GENRE, attrs, "tag_genre");
- /* MMPLAYER_UPDATE_TAG_STRING(GST_TAG_COMMENT, ?, ?); */
- /* MMPLAYER_UPDATE_TAG_STRING(GST_TAG_EXTENDED_COMMENT, ?, ?); */
- MMPLAYER_UPDATE_TAG_UINT(GST_TAG_TRACK_NUMBER, attrs, "tag_track_num");
- /* MMPLAYER_UPDATE_TAG_UINT(GST_TAG_TRACK_COUNT, ?, ?); */
- /* MMPLAYER_UPDATE_TAG_UINT(GST_TAG_ALBUM_VOLUME_NUMBER, ?, ?); */
- /* MMPLAYER_UPDATE_TAG_UINT(GST_TAG_ALBUM_VOLUME_COUNT, ?, ?); */
- /* MMPLAYER_UPDATE_TAG_STRING(GST_TAG_LOCATION, ?, ?); */
- MMPLAYER_UPDATE_TAG_STRING(GST_TAG_DESCRIPTION, attrs, "tag_description");
- /* MMPLAYER_UPDATE_TAG_STRING(GST_TAG_VERSION, ?, ?); */
- /* MMPLAYER_UPDATE_TAG_STRING(GST_TAG_ISRC, ?, ?); */
- /* MMPLAYER_UPDATE_TAG_STRING(GST_TAG_ORGANIZATION, ?, ?); */
- MMPLAYER_UPDATE_TAG_STRING(GST_TAG_COPYRIGHT, attrs, "tag_copyright");
- /* MMPLAYER_UPDATE_TAG_STRING(GST_TAG_COPYRIGHT_URI, ?, ?); */
- /* MMPLAYER_UPDATE_TAG_STRING(GST_TAG_CONTACT, ?, ?); */
- /* MMPLAYER_UPDATE_TAG_STRING(GST_TAG_LICENSE, ?, ?); */
- /* MMPLAYER_UPDATE_TAG_STRING(GST_TAG_LICENSE_URI, ?, ?); */
- /* MMPLAYER_UPDATE_TAG_STRING(GST_TAG_PERFORMER, ?, ?); */
- /* MMPLAYER_UPDATE_TAG_UINT64(GST_TAG_DURATION, ?, ?); */
- /* MMPLAYER_UPDATE_TAG_STRING(GST_TAG_CODEC, ?, ?); */
- MMPLAYER_UPDATE_TAG_STRING(GST_TAG_VIDEO_CODEC, attrs, "content_video_codec");
- MMPLAYER_UPDATE_TAG_STRING(GST_TAG_AUDIO_CODEC, attrs, "content_audio_codec");
- MMPLAYER_UPDATE_TAG_UINT(GST_TAG_BITRATE, attrs, "content_bitrate");
- MMPLAYER_UPDATE_TAG_UINT(GST_TAG_MAXIMUM_BITRATE, attrs, "content_max_bitrate");
- MMPLAYER_UPDATE_TAG_LOCK(player);
- MMPLAYER_UPDATE_TAG_IMAGE(GST_TAG_IMAGE, attrs, "tag_album_cover");
- MMPLAYER_UPDATE_TAG_UNLOCK(player);
- /* MMPLAYER_UPDATE_TAG_UINT(GST_TAG_NOMINAL_BITRATE, ?, ?); */
- /* MMPLAYER_UPDATE_TAG_UINT(GST_TAG_MINIMUM_BITRATE, ?, ?); */
- /* MMPLAYER_UPDATE_TAG_UINT(GST_TAG_SERIAL, ?, ?); */
- /* MMPLAYER_UPDATE_TAG_STRING(GST_TAG_ENCODER, ?, ?); */
- /* MMPLAYER_UPDATE_TAG_UINT(GST_TAG_ENCODER_VERSION, ?, ?); */
- /* MMPLAYER_UPDATE_TAG_DOUBLE(GST_TAG_TRACK_GAIN, ?, ?); */
- /* MMPLAYER_UPDATE_TAG_DOUBLE(GST_TAG_TRACK_PEAK, ?, ?); */
- /* MMPLAYER_UPDATE_TAG_DOUBLE(GST_TAG_ALBUM_GAIN, ?, ?); */
- /* MMPLAYER_UPDATE_TAG_DOUBLE(GST_TAG_ALBUM_PEAK, ?, ?); */
- /* MMPLAYER_UPDATE_TAG_DOUBLE(GST_TAG_REFERENCE_LEVEL, ?, ?); */
- /* MMPLAYER_UPDATE_TAG_STRING(GST_TAG_LANGUAGE_CODE, ?, ?); */
- /* MMPLAYER_UPDATE_TAG_DOUBLE(GST_TAG_BEATS_PER_MINUTE, ?, ?); */
- MMPLAYER_UPDATE_TAG_STRING(GST_TAG_IMAGE_ORIENTATION, attrs, "content_video_orientation");
-
- if (strstr(GST_OBJECT_NAME(msg->src), "demux")) {
- if (player->video360_metadata.is_spherical == -1) {
- __mmplayer_get_metadata_360_from_tags(tag_list, &player->video360_metadata);
- mm_attrs_set_int_by_name(attrs, "content_video_is_spherical",
- player->video360_metadata.is_spherical);
- if (player->video360_metadata.is_spherical == 1) {
- LOGD("This is spherical content for 360 playback.");
- player->is_content_spherical = TRUE;
- } else {
- LOGD("This is not spherical content");
- player->is_content_spherical = FALSE;
- }
+DONE:
+ERROR:
- if (player->video360_metadata.projection_type_string) {
- if (!strcmp(player->video360_metadata.projection_type_string, "equirectangular")) {
- player->video360_metadata.projection_type = VIDEO360_PROJECTION_TYPE_EQUIRECTANGULAR;
- } else {
- LOGE("Projection %s: code not implemented.\n", player->video360_metadata.projection_type_string);
- player->is_content_spherical = player->is_video360_enabled = FALSE;
- }
- }
+ if (caps)
+ gst_caps_unref(caps);
- if (player->video360_metadata.stereo_mode_string) {
- if (!strcmp(player->video360_metadata.stereo_mode_string, "mono")) {
- player->video360_metadata.stereo_mode = VIDEO360_MODE_MONOSCOPIC;
- } else if (!strcmp(player->video360_metadata.stereo_mode_string, "left-right")) {
- player->video360_metadata.stereo_mode = VIDEO360_MODE_STEREOSCOPIC_LEFT_RIGHT;
- } else if (!strcmp(player->video360_metadata.stereo_mode_string, "top-bottom")) {
- player->video360_metadata.stereo_mode = VIDEO360_MODE_STEREOSCOPIC_TOP_BOTTOM;
- } else {
- LOGE("Stereo mode %s: code not implemented.\n", player->video360_metadata.stereo_mode_string);
- player->is_content_spherical = player->is_video360_enabled = FALSE;
- }
- }
- }
+ if (sinkpad) {
+ gst_object_unref(GST_OBJECT(sinkpad));
+ sinkpad = NULL;
}
- if (mmf_attrs_commit(attrs))
- LOGE("failed to commit.\n");
-
- gst_tag_list_free(tag_list);
+ if (srcpad) {
+ gst_object_unref(GST_OBJECT(srcpad));
+ srcpad = NULL;
+ }
- return TRUE;
+ return;
}
-static void
-__mmplayer_gst_rtp_no_more_pads(GstElement *element, gpointer data)
+static void __mmplayer_handle_text_decode_path(mm_player_t* player, GstElement* text_selector)
{
- mm_player_t* player = (mm_player_t*) data;
+ GstPad* srcpad = NULL;
+ MMHandleType attrs = 0;
+ gint active_index = 0;
- MMPLAYER_FENTER();
+ // [link] input-selector :: textbin
+ srcpad = gst_element_get_static_pad(text_selector, "src");
+ if (!srcpad) {
+ LOGE("failed to get srcpad from selector\n");
+ return;
+ }
- /* NOTE : we can remove fakesink here if there's no rtp_dynamic_pad. because whenever
- * we connect autoplugging element to the pad which is just added to rtspsrc, we increase
- * num_dynamic_pad. and this is no-more-pad situation which means no more pad will be added.
- * So we can say this. if num_dynamic_pad is zero, it must be one of followings
+ LOGD("got pad %s:%s from text selector\n", GST_DEBUG_PAD_NAME(srcpad));
- * [1] audio and video will be dumped with filesink.
- * [2] autoplugging is done by just using pad caps.
- * [3] typefinding has happend in audio but audiosink is created already before no-more-pad signal
- * and the video will be dumped via filesink.
- */
- if (player->num_dynamic_pad == 0) {
- LOGD("it seems pad caps is directely used for autoplugging. removing fakesink now\n");
+ active_index = player->selector[MM_PLAYER_TRACK_TYPE_TEXT].active_pad_index;
+ if ((active_index != DEFAULT_TRACK) &&
+ (__mmplayer_change_selector_pad(player, MM_PLAYER_TRACK_TYPE_TEXT, active_index) != MM_ERROR_NONE)) {
+ LOGW("failed to change text track\n");
+ player->selector[MM_PLAYER_TRACK_TYPE_TEXT].active_pad_index = DEFAULT_TRACK;
+ }
- if (!__mmplayer_gst_remove_fakesink(player,
- &player->pipeline->mainbin[MMPLAYER_M_SRC_FAKESINK]))
- /* NOTE : __mmplayer_pipeline_complete() can be called several time. because
- * signaling mechanism(pad-added, no-more-pad, new-decoded-pad) from various
- * source element are not same. To overcome this situation, this function will called
- * several places and several times. Therefore, this is not an error case.
- */
- return;
+ player->no_more_pad = TRUE;
+ __mmplayer_gst_decode_callback(text_selector, srcpad, player);
+
+ LOGD("unblocking %s:%s", GST_DEBUG_PAD_NAME(srcpad));
+ if (player->selector[MM_PLAYER_TRACK_TYPE_TEXT].block_id) {
+ gst_pad_remove_probe(srcpad, player->selector[MM_PLAYER_TRACK_TYPE_TEXT].block_id);
+ player->selector[MM_PLAYER_TRACK_TYPE_TEXT].block_id = 0;
}
- /* create dot before error-return. for debugging */
- MMPLAYER_GENERATE_DOT_IF_ENABLED(player, "pipeline-no-more-pad");
+ LOGD("Total text tracks = %d \n", player->selector[MM_PLAYER_TRACK_TYPE_TEXT].total_track_num);
- player->no_more_pad = TRUE;
+ if (player->selector[MM_PLAYER_TRACK_TYPE_TEXT].total_track_num > 0)
+ player->has_closed_caption = TRUE;
- MMPLAYER_FLEAVE();
+ attrs = MMPLAYER_GET_ATTRS(player);
+ if (attrs) {
+ mm_attrs_set_int_by_name(attrs, "content_text_track_num", (gint)player->selector[MM_PLAYER_TRACK_TYPE_TEXT].total_track_num);
+ if (mmf_attrs_commit(attrs))
+ LOGE("failed to commit.\n");
+ } else
+ LOGE("cannot get content attribute");
+
+ if (srcpad) {
+ gst_object_unref(GST_OBJECT(srcpad));
+ srcpad = NULL;
+ }
}
-static gboolean
-__mmplayer_gst_remove_fakesink(mm_player_t* player, MMPlayerGstElement* fakesink)
+static void
+__mmplayer_gst_deinterleave_pad_added(GstElement *elem, GstPad *pad, gpointer data)
{
- GstElement* parent = NULL;
-
- MMPLAYER_RETURN_VAL_IF_FAIL(player && player->pipeline, FALSE);
+ mm_player_t* player = (mm_player_t*)data;
+ GstElement* selector = NULL;
+ GstElement* queue = NULL;
- /* if we have no fakesink. this meas we are using decodebin which doesn'
- t need to add extra fakesink */
- MMPLAYER_RETURN_VAL_IF_FAIL(fakesink, TRUE);
+ GstPad* srcpad = NULL;
+ GstPad* sinkpad = NULL;
+ GstCaps* caps = NULL;
+ gchar* caps_str = NULL;
- /* lock */
- MMPLAYER_FSINK_LOCK(player);
+ MMPLAYER_FENTER();
+ MMPLAYER_RETURN_IF_FAIL(player && player->pipeline && player->pipeline->mainbin);
- if (!fakesink->gst)
- goto ERROR;
+ caps = gst_pad_get_current_caps(pad);
+ caps_str = gst_caps_to_string(caps);
+ LOGD("deinterleave new caps : %s\n", caps_str);
+ MMPLAYER_FREEIF(caps_str);
+ gst_caps_unref(caps);
- /* get parent of fakesink */
- parent = (GstElement*)gst_object_get_parent((GstObject*)fakesink->gst);
- if (!parent) {
- LOGD("fakesink already removed\n");
+ if ((queue = __mmplayer_element_create_and_link(player, pad, "queue")) == NULL) {
+ LOGE("ERROR : queue create error\n");
goto ERROR;
}
- gst_element_set_locked_state(fakesink->gst, TRUE);
-
- /* setting the state to NULL never returns async
- * so no need to wait for completion of state transiton
- */
- if (GST_STATE_CHANGE_FAILURE == gst_element_set_state(fakesink->gst, GST_STATE_NULL))
- LOGE("fakesink state change failure!\n");
- /* FIXIT : should I return here? or try to proceed to next? */
- /* return FALSE; */
-
- /* remove fakesink from it's parent */
- if (!gst_bin_remove(GST_BIN(parent), fakesink->gst)) {
- LOGE("failed to remove fakesink\n");
+ g_object_set(G_OBJECT(queue),
+ "max-size-buffers", 10,
+ "max-size-bytes", 0,
+ "max-size-time", (guint64)0,
+ NULL);
- gst_object_unref(parent);
+ selector = player->pipeline->mainbin[MMPLAYER_M_A_SELECTOR].gst;
+ if (!selector) {
+ LOGE("there is no audio channel selector.\n");
goto ERROR;
}
- gst_object_unref(parent);
+ srcpad = gst_element_get_static_pad(queue, "src");
+ sinkpad = gst_element_get_request_pad(selector, "sink_%u");
- LOGD("state-holder removed\n");
+ LOGD("link(%s:%s - %s:%s)\n", GST_DEBUG_PAD_NAME(srcpad), GST_DEBUG_PAD_NAME(sinkpad));
- gst_element_set_locked_state(fakesink->gst, FALSE);
+ if (GST_PAD_LINK_OK != gst_pad_link(srcpad, sinkpad)) {
+ LOGW("failed to link deinterleave - selector\n");
+ goto ERROR;
+ }
- MMPLAYER_FSINK_UNLOCK(player);
- return TRUE;
+ gst_element_set_state(queue, GST_STATE_PAUSED);
+ player->audio_mode.total_track_num++;
ERROR:
- if (fakesink->gst)
- gst_element_set_locked_state(fakesink->gst, FALSE);
- MMPLAYER_FSINK_UNLOCK(player);
- return FALSE;
-}
+ if (srcpad) {
+ gst_object_unref(GST_OBJECT(srcpad));
+ srcpad = NULL;
+ }
+
+ if (sinkpad) {
+ gst_object_unref(GST_OBJECT(sinkpad));
+ sinkpad = NULL;
+ }
+ MMPLAYER_FLEAVE();
+ return;
+}
static void
-__mmplayer_gst_rtp_dynamic_pad(GstElement *element, GstPad *pad, gpointer data)
+__mmplayer_gst_deinterleave_no_more_pads(GstElement *elem, gpointer data)
{
- GstPad *sinkpad = NULL;
- GstCaps* caps = NULL;
- GstElement* new_element = NULL;
- GstStructure* str = NULL;
- const gchar* name = NULL;
-
- mm_player_t* player = (mm_player_t*) data;
+ mm_player_t* player = NULL;
+ GstElement* selector = NULL;
+ GstPad* sinkpad = NULL;
+ gint active_index = 0;
+ gchar* change_pad_name = NULL;
+ GstCaps* caps = NULL; // no need to unref
+ gint default_audio_ch = 0;
MMPLAYER_FENTER();
+ player = (mm_player_t*) data;
- MMPLAYER_RETURN_IF_FAIL(element && pad);
- MMPLAYER_RETURN_IF_FAIL(player &&
- player->pipeline &&
- player->pipeline->mainbin);
+ selector = player->pipeline->mainbin[MMPLAYER_M_A_SELECTOR].gst;
+ if (!selector) {
+ LOGE("there is no audio channel selector.\n");
+ goto ERROR;
+ }
- /* payload type is recognizable. increase num_dynamic and wait for sinkbin creation.
- * num_dynamic_pad will decreased after creating a sinkbin.
- */
- player->num_dynamic_pad++;
- LOGD("stream count inc : %d\n", player->num_dynamic_pad);
+ active_index = player->audio_mode.active_pad_index;
- caps = gst_pad_query_caps(pad, NULL);
+ if (active_index != default_audio_ch) {
+ gint audio_ch = default_audio_ch;
- MMPLAYER_CHECK_NULL(caps);
+ /*To get the new pad from the selector*/
+ change_pad_name = g_strdup_printf("sink%d", active_index);
+ if (change_pad_name != NULL) {
+ sinkpad = gst_element_get_static_pad(selector, change_pad_name);
+ if (sinkpad != NULL) {
+ LOGD("Set Active Pad - %s:%s\n", GST_DEBUG_PAD_NAME(sinkpad));
+ g_object_set(selector, "active-pad", sinkpad, NULL);
- /* clear previous result*/
- player->have_dynamic_pad = FALSE;
+ audio_ch = active_index;
- str = gst_caps_get_structure(caps, 0);
+ caps = gst_pad_get_current_caps(sinkpad);
+ MMPLAYER_LOG_GST_CAPS_TYPE(caps);
- if (!str) {
- LOGE("cannot get structure from caps.\n");
- goto ERROR;
- }
+ __mmplayer_set_audio_attrs(player, caps);
+ gst_caps_unref(caps);
+ }
+ MMPLAYER_FREEIF(change_pad_name);
+ }
- name = gst_structure_get_name(str);
- if (!name) {
- LOGE("cannot get mimetype from structure.\n");
- goto ERROR;
+ player->audio_mode.active_pad_index = audio_ch;
+ LOGD("audio LR info(0:stereo) = %d\n", player->audio_mode.active_pad_index);
}
- if (strstr(name, "video")) {
- gint stype = 0;
- mm_attrs_get_int_by_name(player->attrs, "display_surface_type", &stype);
+ERROR:
- if (stype == MM_DISPLAY_SURFACE_NULL || stype == MM_DISPLAY_SURFACE_REMOTE) {
- if (player->v_stream_caps) {
- gst_caps_unref(player->v_stream_caps);
- player->v_stream_caps = NULL;
- }
+ if (sinkpad)
+ gst_object_unref(sinkpad);
- new_element = gst_element_factory_make("fakesink", NULL);
- player->num_dynamic_pad--;
- goto NEW_ELEMENT;
- }
- }
+ MMPLAYER_FLEAVE();
+ return;
+}
- /* clear previous result*/
- player->have_dynamic_pad = FALSE;
+static void
+__mmplayer_gst_build_deinterleave_path(GstElement *elem, GstPad *pad, gpointer data)
+{
+ mm_player_t* player = NULL;
+ MMPlayerGstElement *mainbin = NULL;
- if (!__mmplayer_try_to_plug_decodebin(player, pad, caps)) {
- LOGE("failed to autoplug for caps");
+ GstElement* tee = NULL;
+ GstElement* stereo_queue = NULL;
+ GstElement* mono_queue = NULL;
+ GstElement* conv = NULL;
+ GstElement* filter = NULL;
+ GstElement* deinterleave = NULL;
+ GstElement* selector = NULL;
+
+ GstPad* srcpad = NULL;
+ GstPad* selector_srcpad = NULL;
+ GstPad* sinkpad = NULL;
+ GstCaps* caps = NULL;
+ gulong block_id = 0;
+
+ MMPLAYER_FENTER();
+
+ /* check handles */
+ player = (mm_player_t*) data;
+
+ MMPLAYER_RETURN_IF_FAIL(elem && pad);
+ MMPLAYER_RETURN_IF_FAIL(player && player->pipeline && player->pipeline->mainbin);
+
+ mainbin = player->pipeline->mainbin;
+
+ /* tee */
+ if ((tee = __mmplayer_element_create_and_link(player, pad, "tee")) == NULL) {
+ LOGE("ERROR : tee create error\n");
goto ERROR;
}
- /* check if there's dynamic pad*/
- if (player->have_dynamic_pad) {
- LOGE("using pad caps assums there's no dynamic pad !\n");
+ mainbin[MMPLAYER_M_A_TEE].id = MMPLAYER_M_A_TEE;
+ mainbin[MMPLAYER_M_A_TEE].gst = tee;
+
+ gst_element_set_state(tee, GST_STATE_PAUSED);
+
+ /* queue */
+ srcpad = gst_element_get_request_pad(tee, "src_%u");
+ if ((stereo_queue = __mmplayer_element_create_and_link(player, srcpad, "queue")) == NULL) {
+ LOGE("ERROR : stereo queue create error\n");
goto ERROR;
}
- gst_caps_unref(caps);
- caps = NULL;
+ g_object_set(G_OBJECT(stereo_queue),
+ "max-size-buffers", 10,
+ "max-size-bytes", 0,
+ "max-size-time", (guint64)0,
+ NULL);
-NEW_ELEMENT:
+ player->pipeline->mainbin[MMPLAYER_M_A_Q1].id = MMPLAYER_M_A_Q1;
+ player->pipeline->mainbin[MMPLAYER_M_A_Q1].gst = stereo_queue;
- /* excute new_element if created*/
- if (new_element) {
- LOGD("adding new element to pipeline\n");
+ if (srcpad) {
+ gst_object_unref(GST_OBJECT(srcpad));
+ srcpad = NULL;
+ }
- /* set state to READY before add to bin */
- MMPLAYER_ELEMENT_SET_STATE(new_element, GST_STATE_READY);
+ srcpad = gst_element_get_request_pad(tee, "src_%u");
- /* add new element to the pipeline */
- if (FALSE == gst_bin_add(GST_BIN(player->pipeline->mainbin[MMPLAYER_M_PIPE].gst), new_element)) {
- LOGE("failed to add autoplug element to bin\n");
- goto ERROR;
- }
+ if ((mono_queue = __mmplayer_element_create_and_link(player, srcpad, "queue")) == NULL) {
+ LOGE("ERROR : mono queue create error\n");
+ goto ERROR;
+ }
- /* get pad from element */
- sinkpad = gst_element_get_static_pad(GST_ELEMENT(new_element), "sink");
- if (!sinkpad) {
- LOGE("failed to get sinkpad from autoplug element\n");
- goto ERROR;
- }
+ g_object_set(G_OBJECT(mono_queue),
+ "max-size-buffers", 10,
+ "max-size-bytes", 0,
+ "max-size-time", (guint64)0,
+ NULL);
- /* link it */
- if (GST_PAD_LINK_OK != gst_pad_link(pad, sinkpad)) {
- LOGE("failed to link autoplug element\n");
- goto ERROR;
- }
+ player->pipeline->mainbin[MMPLAYER_M_A_Q2].id = MMPLAYER_M_A_Q2;
+ player->pipeline->mainbin[MMPLAYER_M_A_Q2].gst = mono_queue;
- gst_object_unref(sinkpad);
- sinkpad = NULL;
+ gst_element_set_state(stereo_queue, GST_STATE_PAUSED);
+ gst_element_set_state(mono_queue, GST_STATE_PAUSED);
- /* run. setting PLAYING here since streamming source is live source */
- MMPLAYER_ELEMENT_SET_STATE(new_element, GST_STATE_PLAYING);
+ /* audioconvert */
+ srcpad = gst_element_get_static_pad(mono_queue, "src");
+ if ((conv = __mmplayer_element_create_and_link(player, srcpad, "audioconvert")) == NULL) {
+ LOGE("ERROR : audioconvert create error\n");
+ goto ERROR;
}
- if (caps)
- gst_caps_unref(caps);
+ player->pipeline->mainbin[MMPLAYER_M_A_CONV].id = MMPLAYER_M_A_CONV;
+ player->pipeline->mainbin[MMPLAYER_M_A_CONV].gst = conv;
- MMPLAYER_FLEAVE();
+ /* caps filter */
+ if (srcpad) {
+ gst_object_unref(GST_OBJECT(srcpad));
+ srcpad = NULL;
+ }
+ srcpad = gst_element_get_static_pad(conv, "src");
- return;
+ if ((filter = __mmplayer_element_create_and_link(player, srcpad, "capsfilter")) == NULL) {
+ LOGE("ERROR : capsfilter create error\n");
+ goto ERROR;
+ }
-STATE_CHANGE_FAILED:
-ERROR:
- /* FIXIT : take care if new_element has already added to pipeline */
- if (new_element)
- gst_object_unref(GST_OBJECT(new_element));
+ player->pipeline->mainbin[MMPLAYER_M_A_FILTER].id = MMPLAYER_M_A_FILTER;
+ player->pipeline->mainbin[MMPLAYER_M_A_FILTER].gst = filter;
- if (sinkpad)
- gst_object_unref(GST_OBJECT(sinkpad));
+ caps = gst_caps_from_string("audio/x-raw-int, "
+ "width = (int) 16, "
+ "depth = (int) 16, "
+ "channels = (int) 2");
- if (caps)
- gst_caps_unref(caps);
+ g_object_set(GST_ELEMENT(player->pipeline->mainbin[MMPLAYER_M_A_FILTER].gst), "caps", caps, NULL);
+ gst_caps_unref(caps);
- /* FIXIT : how to inform this error to MSL ????? */
- /* FIXIT : I think we'd better to use g_idle_add() to destroy pipeline and
- * then post an error to application
- */
-}
+ gst_element_set_state(conv, GST_STATE_PAUSED);
+ gst_element_set_state(filter, GST_STATE_PAUSED);
-static GstPadProbeReturn
-__mmplayer_gst_selector_blocked(GstPad* pad, GstPadProbeInfo *info, gpointer data)
-{
- LOGD("pad(%s:%s) is blocked", GST_DEBUG_PAD_NAME(pad));
- return GST_PAD_PROBE_OK;
-}
+ /* deinterleave */
+ if (srcpad) {
+ gst_object_unref(GST_OBJECT(srcpad));
+ srcpad = NULL;
+ }
+ srcpad = gst_element_get_static_pad(filter, "src");
-static GstPadProbeReturn
-__mmplayer_gst_selector_event_probe(GstPad * pad, GstPadProbeInfo * info, gpointer data)
-{
- GstPadProbeReturn ret = GST_PAD_PROBE_OK;
- GstEvent *event = GST_PAD_PROBE_INFO_DATA(info);
- mm_player_t* player = (mm_player_t*)data;
- GstCaps* caps = NULL;
- GstStructure* str = NULL;
- const gchar* name = NULL;
- MMPlayerTrackType stream_type = MM_PLAYER_TRACK_TYPE_VIDEO;
+ if ((deinterleave = __mmplayer_element_create_and_link(player, srcpad, "deinterleave")) == NULL) {
+ LOGE("ERROR : deinterleave create error\n");
+ goto ERROR;
+ }
+ g_object_set(deinterleave, "keep-positions", TRUE, NULL);
- if (GST_EVENT_IS_DOWNSTREAM(event)) {
- if (GST_EVENT_TYPE(event) != GST_EVENT_STREAM_START &&
- GST_EVENT_TYPE(event) != GST_EVENT_FLUSH_STOP &&
- GST_EVENT_TYPE(event) != GST_EVENT_SEGMENT &&
- GST_EVENT_TYPE(event) != GST_EVENT_EOS)
- return ret;
- } else if (GST_EVENT_IS_UPSTREAM(event)) {
- if (GST_EVENT_TYPE(event) != GST_EVENT_QOS)
- return ret;
- }
+ MMPLAYER_SIGNAL_CONNECT(player, deinterleave, MM_PLAYER_SIGNAL_TYPE_AUTOPLUG, "pad-added",
+ G_CALLBACK(__mmplayer_gst_deinterleave_pad_added), player);
- caps = gst_pad_query_caps(pad, NULL);
- if (!caps) {
- LOGE("failed to get caps from pad[%s:%s]", GST_DEBUG_PAD_NAME(pad));
- return ret;
- }
+ MMPLAYER_SIGNAL_CONNECT(player, deinterleave, MM_PLAYER_SIGNAL_TYPE_AUTOPLUG, "no-more-pads",
+ G_CALLBACK(__mmplayer_gst_deinterleave_no_more_pads), player);
- str = gst_caps_get_structure(caps, 0);
- if (!str) {
- LOGE("failed to get structure from caps");
+ player->pipeline->mainbin[MMPLAYER_M_A_DEINTERLEAVE].id = MMPLAYER_M_A_DEINTERLEAVE;
+ player->pipeline->mainbin[MMPLAYER_M_A_DEINTERLEAVE].gst = deinterleave;
+
+ /* selector */
+ selector = gst_element_factory_make("input-selector", "audio-channel-selector");
+ if (selector == NULL) {
+ LOGE("ERROR : audio-selector create error\n");
goto ERROR;
}
- name = gst_structure_get_name(str);
- if (!name) {
- LOGE("failed to get name from str");
- goto ERROR;
+ g_object_set(selector, "sync-streams", TRUE, NULL);
+ gst_bin_add(GST_BIN(mainbin[MMPLAYER_M_PIPE].gst), selector);
+
+ player->pipeline->mainbin[MMPLAYER_M_A_SELECTOR].id = MMPLAYER_M_A_SELECTOR;
+ player->pipeline->mainbin[MMPLAYER_M_A_SELECTOR].gst = selector;
+
+ selector_srcpad = gst_element_get_static_pad(selector, "src");
+
+ LOGD("blocking %s:%s", GST_DEBUG_PAD_NAME(selector_srcpad));
+ block_id =
+ gst_pad_add_probe(selector_srcpad, GST_PAD_PROBE_TYPE_BLOCK_DOWNSTREAM,
+ __mmplayer_gst_selector_blocked, NULL, NULL);
+
+ if (srcpad) {
+ gst_object_unref(GST_OBJECT(srcpad));
+ srcpad = NULL;
}
- if (strstr(name, "audio")) {
- stream_type = MM_PLAYER_TRACK_TYPE_AUDIO;
- } else if (strstr(name, "video")) {
- stream_type = MM_PLAYER_TRACK_TYPE_VIDEO;
- } else {
- /* text track is not supportable */
- LOGE("invalid name %s", name);
+ srcpad = gst_element_get_static_pad(stereo_queue, "src");
+ sinkpad = gst_element_get_request_pad(selector, "sink_%u");
+
+ if (GST_PAD_LINK_OK != gst_pad_link(srcpad, sinkpad)) {
+ LOGW("failed to link queue_stereo - selector\n");
goto ERROR;
}
- switch (GST_EVENT_TYPE(event)) {
- case GST_EVENT_EOS:
- {
- /* in case of gapless, drop eos event not to send it to sink */
- if (player->gapless.reconfigure && !player->msg_posted) {
- LOGD("[%d] %s:%s EOS received but will be drop", stream_type, GST_DEBUG_PAD_NAME(pad));
- ret = GST_PAD_PROBE_DROP;
- }
- break;
- }
- case GST_EVENT_STREAM_START:
- {
- gint64 stop_running_time = 0;
- gint64 position_running_time = 0;
- gint64 position = 0;
- gint idx = 0;
-
- for (idx = MM_PLAYER_TRACK_TYPE_AUDIO; idx < MM_PLAYER_TRACK_TYPE_TEXT; idx++) {
- if ((player->gapless.update_segment[idx] == TRUE) ||
- !(player->selector[idx].event_probe_id)) {
- /* LOGW("[%d] skip", idx); */
- continue;
- }
+ player->audio_mode.total_track_num++;
- if (GST_CLOCK_TIME_IS_VALID(player->gapless.segment[idx].stop)) {
- stop_running_time =
- gst_segment_to_running_time(&player->gapless.segment[idx],
- GST_FORMAT_TIME, player->gapless.segment[idx].stop);
- } else if (GST_CLOCK_TIME_IS_VALID(player->gapless.segment[idx].duration)) {
- stop_running_time =
- gst_segment_to_running_time(&player->gapless.segment[idx],
- GST_FORMAT_TIME, player->gapless.segment[idx].duration);
- } else {
- LOGD("duration: %"GST_TIME_FORMAT, GST_TIME_ARGS(player->duration));
- stop_running_time =
- gst_segment_to_running_time(&player->gapless.segment[idx],
- GST_FORMAT_TIME, player->duration);
- }
+ g_object_set(selector, "active-pad", sinkpad, NULL);
+ gst_element_set_state(deinterleave, GST_STATE_PAUSED);
+ gst_element_set_state(selector, GST_STATE_PAUSED);
- position_running_time =
- gst_segment_to_running_time(&player->gapless.segment[idx],
- GST_FORMAT_TIME, player->gapless.segment[idx].position);
+ __mmplayer_gst_decode_callback(selector, selector_srcpad, player);
- LOGD("[type:%d] time info %" GST_TIME_FORMAT " , %"
- GST_TIME_FORMAT" , %" GST_TIME_FORMAT,
- idx,
- GST_TIME_ARGS(stop_running_time),
- GST_TIME_ARGS(position_running_time),
- GST_TIME_ARGS(gst_segment_to_running_time(&player->gapless.segment[idx],
- GST_FORMAT_TIME, player->gapless.segment[idx].start)));
+ERROR:
- position_running_time = MAX(position_running_time, stop_running_time);
- position_running_time -= gst_segment_to_running_time(&player->gapless.segment[idx],
- GST_FORMAT_TIME, player->gapless.segment[idx].start);
- position_running_time = MAX(0, position_running_time);
- position = MAX(position, position_running_time);
- }
+ LOGD("unblocking %s:%s", GST_DEBUG_PAD_NAME(selector_srcpad));
+ if (block_id != 0) {
+ gst_pad_remove_probe(selector_srcpad, block_id);
+ block_id = 0;
+ }
- if (position != 0) {
- LOGD("[%d]GST_EVENT_STREAM_START: start_time from %" GST_TIME_FORMAT " to %" GST_TIME_FORMAT,
- stream_type, GST_TIME_ARGS(player->gapless.start_time[stream_type]),
- GST_TIME_ARGS(player->gapless.start_time[stream_type] + position));
+ if (sinkpad) {
+ gst_object_unref(GST_OBJECT(sinkpad));
+ sinkpad = NULL;
+ }
- player->gapless.start_time[stream_type] += position;
- }
- break;
- }
- case GST_EVENT_FLUSH_STOP:
- {
- LOGD("[%d] GST_EVENT_FLUSH_STOP", stream_type);
- gst_segment_init(&player->gapless.segment[stream_type], GST_FORMAT_UNDEFINED);
- player->gapless.start_time[stream_type] = 0;
- break;
- }
- case GST_EVENT_SEGMENT:
- {
- GstSegment segment;
- GstEvent *tmpev;
+ if (srcpad) {
+ gst_object_unref(GST_OBJECT(srcpad));
+ srcpad = NULL;
+ }
- LOGD("[%d] GST_EVENT_SEGMENT", stream_type);
- gst_event_copy_segment(event, &segment);
+ if (selector_srcpad) {
+ gst_object_unref(GST_OBJECT(selector_srcpad));
+ selector_srcpad = NULL;
+ }
- if (segment.format == GST_FORMAT_TIME) {
- LOGD("segment base:%" GST_TIME_FORMAT ", offset:%" GST_TIME_FORMAT
- ", start:%" GST_TIME_FORMAT ", stop: %" GST_TIME_FORMAT
- ", time: %" GST_TIME_FORMAT ", pos: %" GST_TIME_FORMAT ", dur: %" GST_TIME_FORMAT,
- GST_TIME_ARGS(segment.base), GST_TIME_ARGS(segment.offset),
- GST_TIME_ARGS(segment.start), GST_TIME_ARGS(segment.stop),
- GST_TIME_ARGS(segment.time), GST_TIME_ARGS(segment.position), GST_TIME_ARGS(segment.duration));
+ MMPLAYER_FLEAVE();
+ return;
+}
- /* keep the all the segment ev to cover the seeking */
- gst_segment_copy_into(&segment, &player->gapless.segment[stream_type]);
- player->gapless.update_segment[stream_type] = TRUE;
+static void
+__mmplayer_gst_decode_no_more_pads(GstElement *elem, gpointer data)
+{
+ mm_player_t* player = NULL;
+ GstPad* srcpad = NULL;
+ GstElement* video_selector = NULL;
+ GstElement* audio_selector = NULL;
+ GstElement* text_selector = NULL;
+ MMHandleType attrs = 0;
+ gint active_index = 0;
+ gint64 dur_bytes = 0L;
- if (!player->gapless.running)
- break;
+ player = (mm_player_t*) data;
- player->gapless.segment[stream_type].base = player->gapless.start_time[stream_type];
+ LOGD("no-more-pad signal handling\n");
- LOGD("[%d] new base: %" GST_TIME_FORMAT, stream_type, GST_TIME_ARGS(player->gapless.segment[stream_type].base));
+ if ((player->cmd == MMPLAYER_COMMAND_DESTROY) ||
+ (player->cmd == MMPLAYER_COMMAND_UNREALIZE)) {
+ LOGW("no need to go more");
- tmpev = gst_event_new_segment(&player->gapless.segment[stream_type]);
- gst_event_set_seqnum(tmpev, gst_event_get_seqnum(event));
- gst_event_unref(event);
- GST_PAD_PROBE_INFO_DATA(info) = tmpev;
- }
- break;
+ if (player->gapless.reconfigure) {
+ player->gapless.reconfigure = FALSE;
+ MMPLAYER_PLAYBACK_UNLOCK(player);
}
- case GST_EVENT_QOS:
- {
- gdouble proportion = 0.0;
- GstClockTimeDiff diff = 0;
- GstClockTime timestamp = 0;
- gint64 running_time_diff = -1;
- GstQOSType type = 0;
- GstEvent *tmpev = NULL;
- running_time_diff = player->gapless.segment[stream_type].base;
+ return;
+ }
- if (running_time_diff <= 0) /* don't need to adjust */
- break;
+ if ((!MMPLAYER_IS_HTTP_PD(player)) &&
+ (MMPLAYER_IS_HTTP_STREAMING(player)) &&
+ (!player->pipeline->mainbin[MMPLAYER_M_DEMUXED_S_BUFFER].gst) &&
+ (player->pipeline->mainbin[MMPLAYER_M_MUXED_S_BUFFER].gst)) {
+ #define ESTIMATED_BUFFER_UNIT (1*1024*1024)
- gst_event_parse_qos(event, &type, &proportion, &diff, ×tamp);
- gst_event_unref(event);
+ if (NULL == player->streamer) {
+ LOGW("invalid state for buffering");
+ goto ERROR;
+ }
- if (timestamp < running_time_diff) {
- LOGW("QOS event from previous group");
- ret = GST_PAD_PROBE_DROP;
- break;
- }
+ gint init_buffering_time = player->streamer->buffering_req.prebuffer_time;
+ guint buffer_bytes = (guint)(init_buffering_time/1000) * ESTIMATED_BUFFER_UNIT;
- LOGD("[%d] Adjusting QOS event: %" GST_TIME_FORMAT
- " - %" GST_TIME_FORMAT " = %" GST_TIME_FORMAT,
- stream_type, GST_TIME_ARGS(timestamp),
- GST_TIME_ARGS(running_time_diff),
- GST_TIME_ARGS(timestamp - running_time_diff));
+ buffer_bytes = MAX(buffer_bytes, player->streamer->buffer_handle[BUFFER_TYPE_MUXED].buffering_bytes);
+ LOGD("[Decodebin2] set use-buffering on Q2(pre buffer time: %d ms, buffer size : %d)\n", init_buffering_time, buffer_bytes);
- timestamp -= running_time_diff;
+ init_buffering_time = (init_buffering_time != 0) ? (init_buffering_time) : (player->ini.http_buffering_time);
- /* That case is invalid for QoS events */
- if (diff < 0 && -diff > timestamp) {
- LOGW("QOS event from previous group");
- ret = GST_PAD_PROBE_DROP;
- break;
+ if (!gst_element_query_duration(player->pipeline->mainbin[MMPLAYER_M_SRC].gst, GST_FORMAT_BYTES, &dur_bytes))
+ LOGE("fail to get duration.\n");
+
+ /* there is no mq, enable use-buffering on queue2 (ex) wav streaming
+ * use file information was already set on Q2 when it was created. */
+ __mm_player_streaming_set_queue2(player->streamer,
+ player->pipeline->mainbin[MMPLAYER_M_MUXED_S_BUFFER].gst,
+ TRUE, /* use_buffering */
+ buffer_bytes,
+ init_buffering_time,
+ 1.0, /* low percent */
+ player->ini.http_buffering_limit, /* high percent */
+ MUXED_BUFFER_TYPE_MAX, /* use previous buffer type setting */
+ NULL,
+ ((dur_bytes > 0) ? ((guint64)dur_bytes) : 0));
+ }
+
+ video_selector = player->pipeline->mainbin[MMPLAYER_M_V_INPUT_SELECTOR].gst;
+ audio_selector = player->pipeline->mainbin[MMPLAYER_M_A_INPUT_SELECTOR].gst;
+ text_selector = player->pipeline->mainbin[MMPLAYER_M_T_INPUT_SELECTOR].gst;
+ if (video_selector) {
+ // [link] input-selector :: videobin
+ srcpad = gst_element_get_static_pad(video_selector, "src");
+ if (!srcpad) {
+ LOGE("failed to get srcpad from video selector\n");
+ goto ERROR;
+ }
+
+ LOGD("got pad %s:%s from video selector\n", GST_DEBUG_PAD_NAME(srcpad));
+ if (!text_selector && !audio_selector)
+ player->no_more_pad = TRUE;
+
+ __mmplayer_gst_decode_callback(video_selector, srcpad, player);
+
+ LOGD("unblocking %s:%s", GST_DEBUG_PAD_NAME(srcpad));
+ if (player->selector[MM_PLAYER_TRACK_TYPE_VIDEO].block_id) {
+ gst_pad_remove_probe(srcpad, player->selector[MM_PLAYER_TRACK_TYPE_VIDEO].block_id);
+ player->selector[MM_PLAYER_TRACK_TYPE_VIDEO].block_id = 0;
+ }
+ }
+
+ if (audio_selector) {
+ active_index = player->selector[MM_PLAYER_TRACK_TYPE_AUDIO].active_pad_index;
+ if ((active_index != DEFAULT_TRACK) &&
+ (__mmplayer_change_selector_pad(player, MM_PLAYER_TRACK_TYPE_AUDIO, active_index) != MM_ERROR_NONE)) {
+ LOGW("failed to change audio track\n");
+ player->selector[MM_PLAYER_TRACK_TYPE_AUDIO].active_pad_index = DEFAULT_TRACK;
+ }
+
+ // [link] input-selector :: audiobin
+ srcpad = gst_element_get_static_pad(audio_selector, "src");
+ if (!srcpad) {
+ LOGE("failed to get srcpad from selector\n");
+ goto ERROR;
+ }
+
+ LOGD("got pad %s:%s from selector\n", GST_DEBUG_PAD_NAME(srcpad));
+ if (!text_selector)
+ player->no_more_pad = TRUE;
+
+ if ((player->use_deinterleave == TRUE) && (player->max_audio_channels >= 2)) {
+ LOGD("unblocking %s:%s", GST_DEBUG_PAD_NAME(srcpad));
+ if (player->selector[MM_PLAYER_TRACK_TYPE_AUDIO].block_id) {
+ gst_pad_remove_probe(srcpad, player->selector[MM_PLAYER_TRACK_TYPE_AUDIO].block_id);
+ player->selector[MM_PLAYER_TRACK_TYPE_AUDIO].block_id = 0;
}
- tmpev = gst_event_new_qos(GST_QOS_TYPE_UNDERFLOW, proportion, diff, timestamp);
- GST_PAD_PROBE_INFO_DATA(info) = tmpev;
+ __mmplayer_gst_build_deinterleave_path(audio_selector, srcpad, player);
+ } else {
+ __mmplayer_gst_decode_callback(audio_selector, srcpad, player);
- break;
+ LOGD("unblocking %s:%s", GST_DEBUG_PAD_NAME(srcpad));
+ if (player->selector[MM_PLAYER_TRACK_TYPE_AUDIO].block_id) {
+ gst_pad_remove_probe(srcpad, player->selector[MM_PLAYER_TRACK_TYPE_AUDIO].block_id);
+ player->selector[MM_PLAYER_TRACK_TYPE_AUDIO].block_id = 0;
+ }
}
- default:
- break;
+
+ LOGD("Total audio tracks = %d \n", player->selector[MM_PLAYER_TRACK_TYPE_AUDIO].total_track_num);
+
+ attrs = MMPLAYER_GET_ATTRS(player);
+ if (attrs) {
+ mm_attrs_set_int_by_name(attrs, "content_audio_track_num", (gint)player->selector[MM_PLAYER_TRACK_TYPE_AUDIO].total_track_num);
+ if (mmf_attrs_commit(attrs))
+ LOGE("failed to commit.\n");
+ } else
+ LOGE("cannot get content attribute");
+ } else {
+ if ((player->pipeline->audiobin) && (player->pipeline->audiobin[MMPLAYER_A_BIN].gst)) {
+ LOGD("There is no audio track : remove audiobin");
+
+ __mmplayer_release_signal_connection(player, MM_PLAYER_SIGNAL_TYPE_AUDIOBIN);
+ __mmplayer_del_sink(player, player->pipeline->audiobin[MMPLAYER_A_SINK].gst);
+
+ MMPLAYER_RELEASE_ELEMENT(player, player->pipeline->audiobin, MMPLAYER_A_BIN);
+ MMPLAYER_FREEIF(player->pipeline->audiobin);
+ }
+
+ if (player->num_dynamic_pad == 0)
+ __mmplayer_pipeline_complete(NULL, player);
+ }
+
+ if (!MMPLAYER_IS_MS_BUFF_SRC(player)) {
+ if (text_selector)
+ __mmplayer_handle_text_decode_path(player, text_selector);
}
+ MMPLAYER_FLEAVE();
+
ERROR:
- if (caps)
- gst_caps_unref(caps);
- return ret;
+ if (srcpad) {
+ gst_object_unref(GST_OBJECT(srcpad));
+ srcpad = NULL;
+ }
+
+ if (player->gapless.reconfigure) {
+ player->gapless.reconfigure = FALSE;
+ MMPLAYER_PLAYBACK_UNLOCK(player);
+ }
}
static void
-__mmplayer_gst_decode_pad_added(GstElement *elem, GstPad *pad, gpointer data)
+__mmplayer_gst_decode_callback(GstElement *elem, GstPad *pad, gpointer data)
{
mm_player_t* player = NULL;
+ MMHandleType attrs = 0;
GstElement* pipeline = NULL;
- GstElement* selector = NULL;
- GstElement* fakesink = NULL;
GstCaps* caps = NULL;
+ gchar* caps_str = NULL;
GstStructure* str = NULL;
const gchar* name = NULL;
GstPad* sinkpad = NULL;
- GstPad* srcpad = NULL;
- gboolean first_track = FALSE;
-
- enum MainElementID elemId = MMPLAYER_M_NUM;
- MMPlayerTrackType stream_type = MM_PLAYER_TRACK_TYPE_AUDIO;
+ GstElement* sinkbin = NULL;
+ gboolean reusing = FALSE;
+ GstElement *text_selector = NULL;
/* check handles */
- player = (mm_player_t*)data;
+ player = (mm_player_t*) data;
MMPLAYER_RETURN_IF_FAIL(elem && pad);
MMPLAYER_RETURN_IF_FAIL(player && player->pipeline && player->pipeline->mainbin);
- //LOGD("pad-added signal handling\n");
-
pipeline = player->pipeline->mainbin[MMPLAYER_M_PIPE].gst;
+ attrs = MMPLAYER_GET_ATTRS(player);
+ if (!attrs) {
+ LOGE("cannot get content attribute\n");
+ goto ERROR;
+ }
+
/* get mimetype from caps */
caps = gst_pad_query_caps(pad, NULL);
if (!caps) {
LOGE("cannot get caps from pad.\n");
goto ERROR;
}
+ caps_str = gst_caps_to_string(caps);
str = gst_caps_get_structure(caps, 0);
if (!str) {
goto ERROR;
}
- MMPLAYER_LOG_GST_CAPS_TYPE(caps);
//LOGD("detected mimetype : %s\n", name);
- if (strstr(name, "video")) {
- gint stype = 0;
-
- mm_attrs_set_int_by_name(player->attrs, "content_video_found", TRUE);
- mm_attrs_get_int_by_name(player->attrs, "display_surface_type", &stype);
+ if (strstr(name, "audio")) {
+ if (player->pipeline->audiobin == NULL) {
+ if (MM_ERROR_NONE != __mmplayer_gst_create_audio_pipeline(player)) {
+ LOGE("failed to create audiobin. continuing without audio\n");
+ goto ERROR;
+ }
- /* don't make video because of not required, and not support multiple track */
- if (stype == MM_DISPLAY_SURFACE_NULL) {
- LOGD("no video sink by null surface");
+ sinkbin = player->pipeline->audiobin[MMPLAYER_A_BIN].gst;
+ LOGD("creating audiosink bin success\n");
+ } else {
+ reusing = TRUE;
+ sinkbin = player->pipeline->audiobin[MMPLAYER_A_BIN].gst;
+ LOGD("reusing audiobin\n");
+ __mmplayer_update_content_attrs(player, ATTR_AUDIO);
+ }
- gchar *caps_str = gst_caps_to_string(caps);
- if (caps_str && (strstr(caps_str, "ST12") || strstr(caps_str, "SN12") ||
- strstr(caps_str, "SN21") || strstr(caps_str, "S420") || strstr(caps_str, "SR32")))
- player->set_mode.video_zc = TRUE;
+ if (player->selector[MM_PLAYER_TRACK_TYPE_AUDIO].total_track_num <= 0) // should not update if content have multi audio tracks
+ mm_attrs_set_int_by_name(attrs, "content_audio_track_num", 1);
- MMPLAYER_FREEIF(caps_str);
+ player->audiosink_linked = 1;
- if (player->v_stream_caps) {
- gst_caps_unref(player->v_stream_caps);
- player->v_stream_caps = NULL;
- }
+ sinkpad = gst_element_get_static_pad(GST_ELEMENT(sinkbin), "sink");
+ if (!sinkpad) {
+ LOGE("failed to get pad from sinkbin\n");
+ goto ERROR;
+ }
+ } else if (strstr(name, "video")) {
+ if (caps_str && (strstr(caps_str, "ST12") || strstr(caps_str, "SN12") ||
+ strstr(caps_str, "SN21") || strstr(caps_str, "S420") || strstr(caps_str, "SR32")))
+ player->set_mode.video_zc = TRUE;
- LOGD("create fakesink instead of videobin");
+ if (player->pipeline->videobin == NULL) {
+ /* NOTE : not make videobin because application dose not want to play it even though file has video stream. */
+ /* get video surface type */
+ int surface_type = 0;
+ mm_attrs_get_int_by_name(player->attrs, "display_surface_type", &surface_type);
+ LOGD("display_surface_type(%d)\n", surface_type);
- /* fake sink */
- fakesink = gst_element_factory_make("fakesink", NULL);
- if (fakesink == NULL) {
- LOGE("ERROR : fakesink create error\n");
+ if (surface_type == MM_DISPLAY_SURFACE_NULL) {
+ LOGD("not make videobin because it dose not want\n");
goto ERROR;
}
- if (player->ini.set_dump_element_flag)
- __mmplayer_add_dump_buffer_probe(player, fakesink);
-
- player->video_fakesink = fakesink;
-
- /* store it as it's sink element */
- __mmplayer_add_sink(player, player->video_fakesink);
-
- gst_bin_add(GST_BIN(pipeline), fakesink);
-
- // link
- sinkpad = gst_element_get_static_pad(fakesink, "sink");
+ if (surface_type == MM_DISPLAY_SURFACE_OVERLAY) {
+ /* mark video overlay for acquire */
+ if (player->video_overlay_resource == NULL) {
+ if (mm_resource_manager_mark_for_acquire(player->resource_manager,
+ MM_RESOURCE_MANAGER_RES_TYPE_VIDEO_OVERLAY,
+ MM_RESOURCE_MANAGER_RES_VOLUME_FULL,
+ &player->video_overlay_resource)
+ != MM_RESOURCE_MANAGER_ERROR_NONE) {
+ LOGE("could not mark video_overlay resource for acquire\n");
+ goto ERROR;
+ }
+ }
+ }
- if (GST_PAD_LINK_OK != gst_pad_link(pad, sinkpad)) {
- LOGW("failed to link fakesink\n");
- gst_object_unref(GST_OBJECT(fakesink));
+ player->interrupted_by_resource = FALSE;
+ /* acquire resources for video overlay */
+ if (mm_resource_manager_commit(player->resource_manager) !=
+ MM_RESOURCE_MANAGER_ERROR_NONE) {
+ LOGE("could not acquire resources for video playing\n");
goto ERROR;
}
- if (stype == MM_DISPLAY_SURFACE_REMOTE) {
- MMPLAYER_SIGNAL_CONNECT(player, sinkpad, MM_PLAYER_SIGNAL_TYPE_VIDEOBIN,
- "notify::caps", G_CALLBACK(__mmplayer_gst_caps_notify_cb), player);
+ if (MM_ERROR_NONE != __mmplayer_gst_create_video_pipeline(player, caps, surface_type)) {
+ LOGE("failed to create videobin. continuing without video\n");
+ goto ERROR;
}
- if (player->set_mode.media_packet_video_stream) {
- g_object_set(G_OBJECT(fakesink), "signal-handoffs", TRUE, NULL);
+ sinkbin = player->pipeline->videobin[MMPLAYER_V_BIN].gst;
+ LOGD("creating videosink bin success\n");
+ } else {
+ reusing = TRUE;
+ sinkbin = player->pipeline->videobin[MMPLAYER_V_BIN].gst;
+ LOGD("re-using videobin\n");
+ __mmplayer_update_content_attrs(player, ATTR_VIDEO);
+ }
- MMPLAYER_SIGNAL_CONNECT(player,
- G_OBJECT(fakesink),
- MM_PLAYER_SIGNAL_TYPE_VIDEOBIN,
- "handoff",
- G_CALLBACK(__mmplayer_video_stream_decoded_render_cb),
- (gpointer)player);
+ player->videosink_linked = 1;
- MMPLAYER_SIGNAL_CONNECT(player,
- G_OBJECT(fakesink),
- MM_PLAYER_SIGNAL_TYPE_VIDEOBIN,
- "preroll-handoff",
- G_CALLBACK(__mmplayer_video_stream_decoded_preroll_cb),
- (gpointer)player);
+ sinkpad = gst_element_get_static_pad(GST_ELEMENT(sinkbin), "sink");
+ if (!sinkpad) {
+ LOGE("failed to get pad from sinkbin\n");
+ goto ERROR;
+ }
+ } else if (strstr(name, "text")) {
+ if (player->pipeline->textbin == NULL) {
+ MMPlayerGstElement* mainbin = NULL;
+
+ if (MM_ERROR_NONE != __mmplayer_gst_create_text_sink_bin(player)) {
+ LOGE("failed to create text sink bin. continuing without text\n");
+ goto ERROR;
}
- g_object_set(G_OBJECT(fakesink), "async", TRUE, "sync", TRUE, NULL);
- gst_element_set_state(fakesink, GST_STATE_PAUSED);
- goto DONE;
- }
+ sinkbin = player->pipeline->textbin[MMPLAYER_T_BIN].gst;
+ LOGD("creating textsink bin success\n");
- if (MMPLAYER_IS_MS_BUFF_SRC(player)) {
- __mmplayer_gst_decode_callback(elem, pad, player);
- goto DONE;
- }
+ /* FIXIT : track number shouldn't be hardcoded */
+ mm_attrs_set_int_by_name(attrs, "content_text_track_num", 1);
- LOGD("video selector \n");
- elemId = MMPLAYER_M_V_INPUT_SELECTOR;
- stream_type = MM_PLAYER_TRACK_TYPE_VIDEO;
- } else {
- if (strstr(name, "audio")) {
- gint samplerate = 0;
- gint channels = 0;
+ player->textsink_linked = 1;
+ LOGI("player->textsink_linked set to 1\n");
- if (MMPLAYER_IS_MS_BUFF_SRC(player)) {
- __mmplayer_gst_decode_callback(elem, pad, player);
- goto DONE;
+ sinkpad = gst_element_get_static_pad(GST_ELEMENT(sinkbin), "text_sink");
+ if (!sinkpad) {
+ LOGE("failed to get pad from sinkbin\n");
+ goto ERROR;
}
- LOGD("audio selector \n");
- elemId = MMPLAYER_M_A_INPUT_SELECTOR;
- stream_type = MM_PLAYER_TRACK_TYPE_AUDIO;
-
- gst_structure_get_int(str, "rate", &samplerate);
- gst_structure_get_int(str, "channels", &channels);
+ mainbin = player->pipeline->mainbin;
- if ((channels > 0 && samplerate == 0)) {//exclude audio decoding
- /* fake sink */
- fakesink = gst_element_factory_make("fakesink", NULL);
- if (fakesink == NULL) {
- LOGE("ERROR : fakesink create error\n");
+ if (!mainbin[MMPLAYER_M_T_INPUT_SELECTOR].gst) {
+ /* input selector */
+ text_selector = gst_element_factory_make("input-selector", "subtitle_inselector");
+ if (!text_selector) {
+ LOGE("failed to create subtitle input selector element\n");
goto ERROR;
}
+ g_object_set(text_selector, "sync-streams", TRUE, NULL);
- gst_bin_add(GST_BIN(pipeline), fakesink);
+ mainbin[MMPLAYER_M_T_INPUT_SELECTOR].id = MMPLAYER_M_T_INPUT_SELECTOR;
+ mainbin[MMPLAYER_M_T_INPUT_SELECTOR].gst = text_selector;
- /* link */
- sinkpad = gst_element_get_static_pad(fakesink, "sink");
+ /* warm up */
+ if (GST_STATE_CHANGE_FAILURE == gst_element_set_state(text_selector, GST_STATE_READY)) {
+ LOGE("failed to set state(READY) to sinkbin\n");
+ goto ERROR;
+ }
- if (GST_PAD_LINK_OK != gst_pad_link(pad, sinkpad)) {
- LOGW("failed to link fakesink\n");
- gst_object_unref(GST_OBJECT(fakesink));
+ if (!gst_bin_add(GST_BIN(mainbin[MMPLAYER_M_PIPE].gst), text_selector)) {
+ LOGW("failed to add subtitle input selector\n");
goto ERROR;
}
- g_object_set(G_OBJECT(fakesink), "sync", TRUE, NULL);
- gst_element_set_state(fakesink, GST_STATE_PAUSED);
+ LOGD("created element input-selector");
- goto DONE;
+ } else {
+ LOGD("already having subtitle input selector");
+ text_selector = mainbin[MMPLAYER_M_T_INPUT_SELECTOR].gst;
}
- } else if (strstr(name, "text")) {
- LOGD("text selector \n");
- elemId = MMPLAYER_M_T_INPUT_SELECTOR;
- stream_type = MM_PLAYER_TRACK_TYPE_TEXT;
} else {
- LOGE("wrong elem id \n");
- goto ERROR;
- }
- }
-
- selector = player->pipeline->mainbin[elemId].gst;
- if (selector == NULL) {
- selector = gst_element_factory_make("input-selector", NULL);
- LOGD("Creating input-selector\n");
- if (selector == NULL) {
- LOGE("ERROR : input-selector create error\n");
- goto ERROR;
- }
- g_object_set(selector, "sync-streams", TRUE, NULL);
+ if (!player->textsink_linked) {
+ LOGD("re-using textbin\n");
- player->pipeline->mainbin[elemId].id = elemId;
- player->pipeline->mainbin[elemId].gst = selector;
+ reusing = TRUE;
+ sinkbin = player->pipeline->textbin[MMPLAYER_T_BIN].gst;
- first_track = TRUE;
- // player->selector[stream_type].active_pad_index = DEFAULT_TRACK; // default
+ player->textsink_linked = 1;
+ LOGI("player->textsink_linked set to 1\n");
+ } else
+ LOGD("ignoring internal subtutle since external subtitle is available");
+ }
+ } else {
+ LOGW("unknown type of elementary stream!ignoring it...\n");
+ goto ERROR;
+ }
- srcpad = gst_element_get_static_pad(selector, "src");
+ if (sinkbin) {
+ if (!reusing) {
+ /* warm up */
+ if (GST_STATE_CHANGE_FAILURE == gst_element_set_state(sinkbin, GST_STATE_READY)) {
+ LOGE("failed to set state(READY) to sinkbin\n");
+ goto ERROR;
+ }
- LOGD("blocking %s:%s", GST_DEBUG_PAD_NAME(srcpad));
- player->selector[stream_type].block_id = gst_pad_add_probe(srcpad, GST_PAD_PROBE_TYPE_BLOCK_DOWNSTREAM,
- __mmplayer_gst_selector_blocked, NULL, NULL);
- player->selector[stream_type].event_probe_id = gst_pad_add_probe(srcpad, GST_PAD_PROBE_TYPE_EVENT_BOTH|GST_PAD_PROBE_TYPE_EVENT_FLUSH,
- __mmplayer_gst_selector_event_probe, player, NULL);
+ /* Added for multi audio support to avoid adding audio bin again*/
+ /* add */
+ if (FALSE == gst_bin_add(GST_BIN(pipeline), sinkbin)) {
+ LOGE("failed to add sinkbin to pipeline\n");
+ goto ERROR;
+ }
+ }
- gst_element_set_state(selector, GST_STATE_PAUSED);
- gst_bin_add(GST_BIN(pipeline), selector);
- } else
- LOGD("input-selector is already created.\n");
+ /* link */
+ if (GST_PAD_LINK_OK != gst_pad_link(pad, sinkpad)) {
+ LOGE("failed to get pad from sinkbin\n");
+ goto ERROR;
+ }
- // link
- LOGD("Calling request pad with selector %p \n", selector);
- sinkpad = gst_element_get_request_pad(selector, "sink_%u");
+ if (!reusing) {
+ /* run */
+ if (GST_STATE_CHANGE_FAILURE == gst_element_set_state(sinkbin, GST_STATE_PAUSED)) {
+ LOGE("failed to set state(PAUSED) to sinkbin\n");
+ goto ERROR;
+ }
- LOGD("got pad %s:%s from selector", GST_DEBUG_PAD_NAME(sinkpad));
+ if (text_selector) {
+ if (GST_STATE_CHANGE_FAILURE == gst_element_set_state(text_selector, GST_STATE_PAUSED)) {
+ LOGE("failed to set state(PAUSED) to sinkbin\n");
+ goto ERROR;
+ }
+ }
+ }
- if (GST_PAD_LINK_OK != gst_pad_link(pad, sinkpad)) {
- LOGW("failed to link selector\n");
- gst_object_unref(GST_OBJECT(selector));
- goto ERROR;
+ gst_object_unref(sinkpad);
+ sinkpad = NULL;
}
- if (first_track) {
- LOGD("this is first track --> active track \n");
- g_object_set(selector, "active-pad", sinkpad, NULL);
- }
+ LOGD("[handle: %p] linking sink bin success", player);
- _mmplayer_track_update_info(player, stream_type, sinkpad);
+ /* FIXIT : we cannot hold callback for 'no-more-pad' signal because signal was emitted in
+ * streaming task. if the task blocked, then buffer will not flow to the next element
+ *(autoplugging element). so this is special hack for streaming. please try to remove it
+ */
+ /* dec stream count. we can remove fakesink if it's zero */
+ if (player->num_dynamic_pad)
+ player->num_dynamic_pad--;
+ LOGD("no more pads: %d stream count dec : %d(num of dynamic pad)\n", player->no_more_pad, player->num_dynamic_pad);
+
+ if ((player->no_more_pad) && (player->num_dynamic_pad == 0))
+ __mmplayer_pipeline_complete(NULL, player);
-DONE:
ERROR:
+ MMPLAYER_FREEIF(caps_str);
+
if (caps)
gst_caps_unref(caps);
- if (sinkpad) {
+ if (sinkpad)
gst_object_unref(GST_OBJECT(sinkpad));
- sinkpad = NULL;
- }
- if (srcpad) {
- gst_object_unref(GST_OBJECT(srcpad));
- srcpad = NULL;
- }
+ /* flusing out new attributes */
+ if (mmf_attrs_commit(attrs))
+ LOGE("failed to comit attributes\n");
return;
}
-static void __mmplayer_handle_text_decode_path(mm_player_t* player, GstElement* text_selector)
+static gboolean
+__mmplayer_get_property_value_for_rotation(mm_player_t* player, int rotation_angle, int *value)
{
- GstPad* srcpad = NULL;
- MMHandleType attrs = 0;
- gint active_index = 0;
+ int pro_value = 0; // in the case of expection, default will be returned.
+ int dest_angle = rotation_angle;
+ int rotation_type = -1;
- // [link] input-selector :: textbin
- srcpad = gst_element_get_static_pad(text_selector, "src");
- if (!srcpad) {
- LOGE("failed to get srcpad from selector\n");
- return;
- }
+ MMPLAYER_RETURN_VAL_IF_FAIL(player, FALSE);
+ MMPLAYER_RETURN_VAL_IF_FAIL(value, FALSE);
+ MMPLAYER_RETURN_VAL_IF_FAIL(rotation_angle >= 0, FALSE);
- LOGD("got pad %s:%s from text selector\n", GST_DEBUG_PAD_NAME(srcpad));
+ if (rotation_angle >= 360)
+ dest_angle = rotation_angle - 360;
- active_index = player->selector[MM_PLAYER_TRACK_TYPE_TEXT].active_pad_index;
- if ((active_index != DEFAULT_TRACK) &&
- (__mmplayer_change_selector_pad(player, MM_PLAYER_TRACK_TYPE_TEXT, active_index) != MM_ERROR_NONE)) {
- LOGW("failed to change text track\n");
- player->selector[MM_PLAYER_TRACK_TYPE_TEXT].active_pad_index = DEFAULT_TRACK;
+ /* chech if supported or not */
+ if (dest_angle % 90) {
+ LOGD("not supported rotation angle = %d", rotation_angle);
+ return FALSE;
}
- player->no_more_pad = TRUE;
- __mmplayer_gst_decode_callback(text_selector, srcpad, player);
-
- LOGD("unblocking %s:%s", GST_DEBUG_PAD_NAME(srcpad));
- if (player->selector[MM_PLAYER_TRACK_TYPE_TEXT].block_id) {
- gst_pad_remove_probe(srcpad, player->selector[MM_PLAYER_TRACK_TYPE_TEXT].block_id);
- player->selector[MM_PLAYER_TRACK_TYPE_TEXT].block_id = 0;
- }
-
- LOGD("Total text tracks = %d \n", player->selector[MM_PLAYER_TRACK_TYPE_TEXT].total_track_num);
+ /*
+ * tizenwlsink (A)
+ * custom_convert - none (B)
+ * videoflip - none (C)
+ */
+ if (player->set_mode.video_zc) {
+ if (player->pipeline->videobin[MMPLAYER_V_CONV].gst) // B
+ rotation_type = ROTATION_USING_CUSTOM;
+ else // A
+ rotation_type = ROTATION_USING_SINK;
+ } else {
+ int surface_type = 0;
+ rotation_type = ROTATION_USING_FLIP;
- if (player->selector[MM_PLAYER_TRACK_TYPE_TEXT].total_track_num > 0)
- player->has_closed_caption = TRUE;
+ mm_attrs_get_int_by_name(player->attrs, "display_surface_type", &surface_type);
+ LOGD("check display surface type attribute: %d", surface_type);
- attrs = MMPLAYER_GET_ATTRS(player);
- if (attrs) {
- mm_attrs_set_int_by_name(attrs, "content_text_track_num", (gint)player->selector[MM_PLAYER_TRACK_TYPE_TEXT].total_track_num);
- if (mmf_attrs_commit(attrs))
- LOGE("failed to commit.\n");
- } else
- LOGE("cannot get content attribute");
+ if (surface_type == MM_DISPLAY_SURFACE_OVERLAY)
+ rotation_type = ROTATION_USING_SINK;
+ else
+ rotation_type = ROTATION_USING_FLIP; //C
- if (srcpad) {
- gst_object_unref(GST_OBJECT(srcpad));
- srcpad = NULL;
+ LOGD("using %d type for rotation", rotation_type);
}
-}
-static void
-__mmplayer_gst_deinterleave_pad_added(GstElement *elem, GstPad *pad, gpointer data)
-{
- mm_player_t* player = (mm_player_t*)data;
- GstElement* selector = NULL;
- GstElement* queue = NULL;
+ /* get property value for setting */
+ switch (rotation_type) {
+ case ROTATION_USING_SINK: // tizenwlsink
+ {
+ switch (dest_angle) {
+ case 0:
+ break;
+ case 90:
+ pro_value = 3; // clockwise 90
+ break;
+ case 180:
+ pro_value = 2;
+ break;
+ case 270:
+ pro_value = 1; // counter-clockwise 90
+ break;
+ }
+ }
+ break;
+ case ROTATION_USING_CUSTOM:
+ {
+ gchar *ename = NULL;
+ ename = GST_OBJECT_NAME(gst_element_get_factory(player->pipeline->videobin[MMPLAYER_V_CONV].gst));
- GstPad* srcpad = NULL;
- GstPad* sinkpad = NULL;
- GstCaps* caps = NULL;
- gchar* caps_str = NULL;
+ if (g_strrstr(ename, "fimcconvert")) {
+ switch (dest_angle) {
+ case 0:
+ break;
+ case 90:
+ pro_value = 90; // clockwise 90
+ break;
+ case 180:
+ pro_value = 180;
+ break;
+ case 270:
+ pro_value = 270; // counter-clockwise 90
+ break;
+ }
+ }
+ }
+ break;
+ case ROTATION_USING_FLIP: // videoflip
+ {
+ switch (dest_angle) {
+ case 0:
+ break;
+ case 90:
+ pro_value = 1; // clockwise 90
+ break;
+ case 180:
+ pro_value = 2;
+ break;
+ case 270:
+ pro_value = 3; // counter-clockwise 90
+ break;
+ }
+ }
+ break;
+ }
- MMPLAYER_FENTER();
- MMPLAYER_RETURN_IF_FAIL(player && player->pipeline && player->pipeline->mainbin);
+ LOGD("setting rotation property value : %d, used rotation type : %d", pro_value, rotation_type);
- caps = gst_pad_get_current_caps(pad);
- caps_str = gst_caps_to_string(caps);
- LOGD("deinterleave new caps : %s\n", caps_str);
- MMPLAYER_FREEIF(caps_str);
- gst_caps_unref(caps);
+ *value = pro_value;
- if ((queue = __mmplayer_element_create_and_link(player, pad, "queue")) == NULL) {
- LOGE("ERROR : queue create error\n");
- goto ERROR;
- }
+ return TRUE;
+}
- g_object_set(G_OBJECT(queue),
- "max-size-buffers", 10,
- "max-size-bytes", 0,
- "max-size-time", (guint64)0,
- NULL);
+int
+__mmplayer_video_param_check_video_sink_bin(mm_player_t* player)
+{
+ /* check video sinkbin is created */
+ MMPLAYER_RETURN_VAL_IF_FAIL(player &&
+ player->pipeline &&
+ player->pipeline->videobin &&
+ player->pipeline->videobin[MMPLAYER_V_BIN].gst &&
+ player->pipeline->videobin[MMPLAYER_V_SINK].gst,
+ MM_ERROR_PLAYER_NOT_INITIALIZED);
- selector = player->pipeline->mainbin[MMPLAYER_M_A_SELECTOR].gst;
+ return MM_ERROR_NONE;
+}
- if (!selector) {
- LOGE("there is no audio channel selector.\n");
- goto ERROR;
- }
+int
+__mmplayer_get_video_angle(mm_player_t* player, int *user_angle, int *org_angle)
+{
+ int user_angle_type = 0;
+ gchar *org_orient = NULL;
+ MMHandleType attrs = MMPLAYER_GET_ATTRS(player);
- srcpad = gst_element_get_static_pad(queue, "src");
- sinkpad = gst_element_get_request_pad(selector, "sink_%u");
+ if (!attrs) {
+ LOGE("cannot get content attribute");
+ return MM_ERROR_PLAYER_INTERNAL;
+ }
- LOGD("link(%s:%s - %s:%s)\n", GST_DEBUG_PAD_NAME(srcpad), GST_DEBUG_PAD_NAME(sinkpad));
+ if (user_angle) {
+ /* update user roation */
+ mm_attrs_get_int_by_name(attrs, "display_rotation", &user_angle_type);
- if (GST_PAD_LINK_OK != gst_pad_link(srcpad, sinkpad)) {
- LOGW("failed to link deinterleave - selector\n");
- goto ERROR;
+ /* get angle with user type */
+ switch (user_angle_type) {
+ case MM_DISPLAY_ROTATION_NONE:
+ *user_angle = 0;
+ break;
+ case MM_DISPLAY_ROTATION_90: /* counter-clockwise 90 */
+ *user_angle = 270;
+ break;
+ case MM_DISPLAY_ROTATION_180:
+ *user_angle = 180;
+ break;
+ case MM_DISPLAY_ROTATION_270: /* clockwise 90 */
+ *user_angle = 90;
+ break;
+ default:
+ LOGW("wrong angle type : %d", user_angle_type);
+ break;
+ }
+ LOGD("check user angle: %d", *user_angle);
}
- gst_element_set_state(queue, GST_STATE_PAUSED);
- player->audio_mode.total_track_num++;
-
-ERROR:
+ if (org_angle) {
+ /* get original orientation */
+ mm_attrs_get_string_by_name(attrs, "content_video_orientation", &org_orient);
- if (srcpad) {
- gst_object_unref(GST_OBJECT(srcpad));
- srcpad = NULL;
- }
+ if (org_orient) {
+ if (!strcmp(org_orient, "rotate-90"))
+ *org_angle = 90;
+ else if (!strcmp(org_orient, "rotate-180"))
+ *org_angle = 180;
+ else if (!strcmp(org_orient, "rotate-270"))
+ *org_angle = 270;
+ else
+ LOGD("original rotation is %s", org_orient);
+ } else {
+ LOGD("content_video_orientation get fail");
+ }
- if (sinkpad) {
- gst_object_unref(GST_OBJECT(sinkpad));
- sinkpad = NULL;
+ LOGD("check orientation: %d", *org_angle);
}
- MMPLAYER_FLEAVE();
- return;
+ return MM_ERROR_NONE;
}
-static void
-__mmplayer_gst_deinterleave_no_more_pads(GstElement *elem, gpointer data)
+void
+__mmplayer_video_param_set_display_rotation(mm_player_t* player)
{
- mm_player_t* player = NULL;
- GstElement* selector = NULL;
- GstPad* sinkpad = NULL;
- gint active_index = 0;
- gchar* change_pad_name = NULL;
- GstCaps* caps = NULL; // no need to unref
- gint default_audio_ch = 0;
-
+ int rotation_value = 0;
+ int org_angle = 0; // current supported angle values are 0, 90, 180, 270
+ int user_angle = 0;
MMPLAYER_FENTER();
- player = (mm_player_t*) data;
- selector = player->pipeline->mainbin[MMPLAYER_M_A_SELECTOR].gst;
+ /* check video sinkbin is created */
+ if (MM_ERROR_NONE != __mmplayer_video_param_check_video_sink_bin(player))
+ return;
- if (!selector) {
- LOGE("there is no audio channel selector.\n");
- goto ERROR;
- }
+ __mmplayer_get_video_angle(player, &user_angle, &org_angle);
- active_index = player->audio_mode.active_pad_index;
+ /* get rotation value to set */
+ __mmplayer_get_property_value_for_rotation(player, org_angle+user_angle, &rotation_value);
+ g_object_set(player->pipeline->videobin[MMPLAYER_V_SINK].gst, "rotate", rotation_value, NULL);
+ LOGD("set video param : rotate %d", rotation_value);
+}
- if (active_index != default_audio_ch) {
- gint audio_ch = default_audio_ch;
+void
+__mmplayer_video_param_set_display_visible(mm_player_t* player)
+{
+ MMHandleType attrs = 0;
+ int visible = 0;
+ MMPLAYER_FENTER();
- /*To get the new pad from the selector*/
- change_pad_name = g_strdup_printf("sink%d", active_index);
- if (change_pad_name != NULL) {
- sinkpad = gst_element_get_static_pad(selector, change_pad_name);
- if (sinkpad != NULL) {
- LOGD("Set Active Pad - %s:%s\n", GST_DEBUG_PAD_NAME(sinkpad));
- g_object_set(selector, "active-pad", sinkpad, NULL);
+ /* check video sinkbin is created */
+ if (MM_ERROR_NONE != __mmplayer_video_param_check_video_sink_bin(player))
+ return;
- audio_ch = active_index;
+ attrs = MMPLAYER_GET_ATTRS(player);
+ MMPLAYER_RETURN_IF_FAIL(attrs);
- caps = gst_pad_get_current_caps(sinkpad);
- MMPLAYER_LOG_GST_CAPS_TYPE(caps);
+ mm_attrs_get_int_by_name(attrs, "display_visible", &visible);
+ g_object_set(player->pipeline->videobin[MMPLAYER_V_SINK].gst, "visible", visible, NULL);
+ LOGD("set video param : visible %d", visible);
+}
- __mmplayer_set_audio_attrs(player, caps);
- gst_caps_unref(caps);
- }
- MMPLAYER_FREEIF(change_pad_name);
- }
+void
+__mmplayer_video_param_set_display_method(mm_player_t* player)
+{
+ MMHandleType attrs = 0;
+ int display_method = 0;
+ MMPLAYER_FENTER();
- player->audio_mode.active_pad_index = audio_ch;
- LOGD("audio LR info(0:stereo) = %d\n", player->audio_mode.active_pad_index);
- }
+ /* check video sinkbin is created */
+ if (MM_ERROR_NONE != __mmplayer_video_param_check_video_sink_bin(player))
+ return;
-ERROR:
+ attrs = MMPLAYER_GET_ATTRS(player);
+ MMPLAYER_RETURN_IF_FAIL(attrs);
- if (sinkpad)
- gst_object_unref(sinkpad);
-
- MMPLAYER_FLEAVE();
- return;
+ mm_attrs_get_int_by_name(attrs, "display_method", &display_method);
+ g_object_set(player->pipeline->videobin[MMPLAYER_V_SINK].gst, "display-geometry-method", display_method, NULL);
+ LOGD("set video param : method %d", display_method);
}
-static void
-__mmplayer_gst_build_deinterleave_path(GstElement *elem, GstPad *pad, gpointer data)
+void
+__mmplayer_video_param_set_roi_area(mm_player_t* player)
{
- mm_player_t* player = NULL;
- MMPlayerGstElement *mainbin = NULL;
-
- GstElement* tee = NULL;
- GstElement* stereo_queue = NULL;
- GstElement* mono_queue = NULL;
- GstElement* conv = NULL;
- GstElement* filter = NULL;
- GstElement* deinterleave = NULL;
- GstElement* selector = NULL;
-
- GstPad* srcpad = NULL;
- GstPad* selector_srcpad = NULL;
- GstPad* sinkpad = NULL;
- GstCaps* caps = NULL;
- gulong block_id = 0;
-
+ MMHandleType attrs = 0;
+ void *handle = NULL;
+ /*set wl_display*/
+ int win_roi_x = 0;
+ int win_roi_y = 0;
+ int win_roi_width = 0;
+ int win_roi_height = 0;
MMPLAYER_FENTER();
- /* check handles */
- player = (mm_player_t*) data;
-
- MMPLAYER_RETURN_IF_FAIL(elem && pad);
- MMPLAYER_RETURN_IF_FAIL(player && player->pipeline && player->pipeline->mainbin);
+ /* check video sinkbin is created */
+ if (MM_ERROR_NONE != __mmplayer_video_param_check_video_sink_bin(player))
+ return;
- mainbin = player->pipeline->mainbin;
+ attrs = MMPLAYER_GET_ATTRS(player);
+ MMPLAYER_RETURN_IF_FAIL(attrs);
- /* tee */
- if ((tee = __mmplayer_element_create_and_link(player, pad, "tee")) == NULL) {
- LOGE("ERROR : tee create error\n");
- goto ERROR;
- }
+ mm_attrs_get_data_by_name(attrs, "display_overlay", &handle);
- mainbin[MMPLAYER_M_A_TEE].id = MMPLAYER_M_A_TEE;
- mainbin[MMPLAYER_M_A_TEE].gst = tee;
+ if (handle) {
+ /* It should be set after setting window */
+ mm_attrs_get_int_by_name(attrs, "display_win_roi_x", &win_roi_x);
+ mm_attrs_get_int_by_name(attrs, "display_win_roi_y", &win_roi_y);
+ mm_attrs_get_int_by_name(attrs, "display_win_roi_width", &win_roi_width);
+ mm_attrs_get_int_by_name(attrs, "display_win_roi_height", &win_roi_height);
- gst_element_set_state(tee, GST_STATE_PAUSED);
+ /* After setting window handle, set display roi area */
+ gst_video_overlay_set_display_roi_area(
+ GST_VIDEO_OVERLAY(player->pipeline->videobin[MMPLAYER_V_SINK].gst),
+ win_roi_x, win_roi_y, win_roi_width, win_roi_height);
+ LOGD("set video param : roi area : x(%d) y(%d) width(%d) height(%d)",
+ win_roi_x, win_roi_y, win_roi_width, win_roi_height);
- /* queue */
- srcpad = gst_element_get_request_pad(tee, "src_%u");
- if ((stereo_queue = __mmplayer_element_create_and_link(player, srcpad, "queue")) == NULL) {
- LOGE("ERROR : stereo queue create error\n");
- goto ERROR;
}
+}
+void
+__mmplayer_video_param_set_display_overlay(mm_player_t* player)
+{
+ MMHandleType attrs = 0;
+ void *handle = NULL;
- g_object_set(G_OBJECT(stereo_queue),
- "max-size-buffers", 10,
- "max-size-bytes", 0,
- "max-size-time", (guint64)0,
- NULL);
-
- player->pipeline->mainbin[MMPLAYER_M_A_Q1].id = MMPLAYER_M_A_Q1;
- player->pipeline->mainbin[MMPLAYER_M_A_Q1].gst = stereo_queue;
+ /* check video sinkbin is created */
+ if (MM_ERROR_NONE != __mmplayer_video_param_check_video_sink_bin(player))
+ return;
- if (srcpad) {
- gst_object_unref(GST_OBJECT(srcpad));
- srcpad = NULL;
- }
+ attrs = MMPLAYER_GET_ATTRS(player);
+ MMPLAYER_RETURN_IF_FAIL(attrs);
- srcpad = gst_element_get_request_pad(tee, "src_%u");
+ /* common case if using overlay surface */
+ mm_attrs_get_data_by_name(attrs, "display_overlay", &handle);
- if ((mono_queue = __mmplayer_element_create_and_link(player, srcpad, "queue")) == NULL) {
- LOGE("ERROR : mono queue create error\n");
- goto ERROR;
- }
+ if (handle) {
+ /* default is using wl_surface_id */
+ unsigned int wl_surface_id = 0;
+ wl_surface_id = *(int*)handle;
+ LOGD("set video param : wl_surface_id %d %p", wl_surface_id, *(int*)handle);
+ gst_video_overlay_set_wl_window_wl_surface_id(
+ GST_VIDEO_OVERLAY(player->pipeline->videobin[MMPLAYER_V_SINK].gst),
+ *(int*)handle);
+ } else
+ /* FIXIT : is it error case? */
+ LOGW("still we don't have a window handle on player attribute. create it's own surface.");
+}
- g_object_set(G_OBJECT(mono_queue),
- "max-size-buffers", 10,
- "max-size-bytes", 0,
- "max-size-time", (guint64)0,
- NULL);
- player->pipeline->mainbin[MMPLAYER_M_A_Q2].id = MMPLAYER_M_A_Q2;
- player->pipeline->mainbin[MMPLAYER_M_A_Q2].gst = mono_queue;
+int
+__mmplayer_update_wayland_videosink_video_param(mm_player_t* player, char *param_name)
+{
+ bool update_all_param = FALSE;
+ MMPLAYER_FENTER();
- gst_element_set_state(stereo_queue, GST_STATE_PAUSED);
- gst_element_set_state(mono_queue, GST_STATE_PAUSED);
+ /* check video sinkbin is created */
+ if (MM_ERROR_NONE != __mmplayer_video_param_check_video_sink_bin(player))
+ return MM_ERROR_PLAYER_NOT_INITIALIZED;
- /* audioconvert */
- srcpad = gst_element_get_static_pad(mono_queue, "src");
- if ((conv = __mmplayer_element_create_and_link(player, srcpad, "audioconvert")) == NULL) {
- LOGE("ERROR : audioconvert create error\n");
- goto ERROR;
+ if (strcmp(player->ini.videosink_element_overlay, "tizenwlsink")) {
+ LOGE("can not find tizenwlsink");
+ return MM_ERROR_PLAYER_INTERNAL;
}
- player->pipeline->mainbin[MMPLAYER_M_A_CONV].id = MMPLAYER_M_A_CONV;
- player->pipeline->mainbin[MMPLAYER_M_A_CONV].gst = conv;
-
- /* caps filter */
- if (srcpad) {
- gst_object_unref(GST_OBJECT(srcpad));
- srcpad = NULL;
- }
- srcpad = gst_element_get_static_pad(conv, "src");
+ LOGD("param_name : %s", param_name);
+ if (!g_strcmp0(param_name, "update_all_param"))
+ update_all_param = TRUE;
- if ((filter = __mmplayer_element_create_and_link(player, srcpad, "capsfilter")) == NULL) {
- LOGE("ERROR : capsfilter create error\n");
- goto ERROR;
- }
+ if (update_all_param || !g_strcmp0(param_name, "display_overlay"))
+ __mmplayer_video_param_set_display_overlay(player);
+ if (update_all_param || !g_strcmp0(param_name, "display_method"))
+ __mmplayer_video_param_set_display_method(player);
+ if (update_all_param || !g_strcmp0(param_name, "display_visible"))
+ __mmplayer_video_param_set_display_visible(player);
+ if (update_all_param || !g_strcmp0(param_name, "display_rotation"))
+ __mmplayer_video_param_set_display_rotation(player);
+ if (update_all_param || !g_strcmp0(param_name, "display_win_roi_x"))
+ __mmplayer_video_param_set_roi_area(player);
- player->pipeline->mainbin[MMPLAYER_M_A_FILTER].id = MMPLAYER_M_A_FILTER;
- player->pipeline->mainbin[MMPLAYER_M_A_FILTER].gst = filter;
+ return MM_ERROR_NONE;
+}
- caps = gst_caps_from_string("audio/x-raw-int, "
- "width = (int) 16, "
- "depth = (int) 16, "
- "channels = (int) 2");
+int
+_mmplayer_update_video_param(mm_player_t* player, char *param_name)
+{
+ MMHandleType attrs = 0;
+ int surface_type = 0;
+ int ret = MM_ERROR_NONE;
- g_object_set(GST_ELEMENT(player->pipeline->mainbin[MMPLAYER_M_A_FILTER].gst), "caps", caps, NULL);
- gst_caps_unref(caps);
+ MMPLAYER_FENTER();
- gst_element_set_state(conv, GST_STATE_PAUSED);
- gst_element_set_state(filter, GST_STATE_PAUSED);
+ /* check video sinkbin is created */
+ if (MM_ERROR_NONE != __mmplayer_video_param_check_video_sink_bin(player))
+ return MM_ERROR_PLAYER_NOT_INITIALIZED;
- /* deinterleave */
- if (srcpad) {
- gst_object_unref(GST_OBJECT(srcpad));
- srcpad = NULL;
+ attrs = MMPLAYER_GET_ATTRS(player);
+ if (!attrs) {
+ LOGE("cannot get content attribute");
+ return MM_ERROR_PLAYER_INTERNAL;
}
- srcpad = gst_element_get_static_pad(filter, "src");
+ LOGD("param_name : %s", param_name);
- if ((deinterleave = __mmplayer_element_create_and_link(player, srcpad, "deinterleave")) == NULL) {
- LOGE("ERROR : deinterleave create error\n");
- goto ERROR;
+ /* update display surface */
+ mm_attrs_get_int_by_name(attrs, "display_surface_type", &surface_type);
+ LOGD("check display surface type attribute: %d", surface_type);
+
+ /* configuring display */
+ switch (surface_type) {
+ case MM_DISPLAY_SURFACE_OVERLAY:
+ {
+ ret = __mmplayer_update_wayland_videosink_video_param(player, param_name);
+ if (ret != MM_ERROR_NONE)
+ return ret;
+ }
+ break;
}
- g_object_set(deinterleave, "keep-positions", TRUE, NULL);
+ MMPLAYER_FLEAVE();
- MMPLAYER_SIGNAL_CONNECT(player, deinterleave, MM_PLAYER_SIGNAL_TYPE_AUTOPLUG, "pad-added",
- G_CALLBACK(__mmplayer_gst_deinterleave_pad_added), player);
+ return MM_ERROR_NONE;
+}
- MMPLAYER_SIGNAL_CONNECT(player, deinterleave, MM_PLAYER_SIGNAL_TYPE_AUTOPLUG, "no-more-pads",
- G_CALLBACK(__mmplayer_gst_deinterleave_no_more_pads), player);
+int
+_mmplayer_set_audio_only(MMHandleType hplayer, bool audio_only)
+{
+ gboolean disable_overlay = FALSE;
+ mm_player_t* player = (mm_player_t*) hplayer;
+ int ret = MM_ERROR_NONE;
- player->pipeline->mainbin[MMPLAYER_M_A_DEINTERLEAVE].id = MMPLAYER_M_A_DEINTERLEAVE;
- player->pipeline->mainbin[MMPLAYER_M_A_DEINTERLEAVE].gst = deinterleave;
+ MMPLAYER_FENTER();
+ MMPLAYER_RETURN_VAL_IF_FAIL(player && player->pipeline, MM_ERROR_PLAYER_NOT_INITIALIZED);
+ MMPLAYER_RETURN_VAL_IF_FAIL(player->pipeline->videobin &&
+ player->pipeline->videobin[MMPLAYER_V_SINK].gst,
+ MM_ERROR_PLAYER_NO_OP); /* invalid op */
- /* selector */
- selector = gst_element_factory_make("input-selector", "audio-channel-selector");
- if (selector == NULL) {
- LOGE("ERROR : audio-selector create error\n");
- goto ERROR;
+ if (!g_object_class_find_property(G_OBJECT_GET_CLASS(player->pipeline->videobin[MMPLAYER_V_SINK].gst), "disable-overlay")) {
+ LOGW("Display control is not supported");
+ return MM_ERROR_PLAYER_INTERNAL;
}
- g_object_set(selector, "sync-streams", TRUE, NULL);
- gst_bin_add(GST_BIN(mainbin[MMPLAYER_M_PIPE].gst), selector);
+ g_object_get(player->pipeline->videobin[MMPLAYER_V_SINK].gst, "disable-overlay", &disable_overlay, NULL);
- player->pipeline->mainbin[MMPLAYER_M_A_SELECTOR].id = MMPLAYER_M_A_SELECTOR;
- player->pipeline->mainbin[MMPLAYER_M_A_SELECTOR].gst = selector;
+ if (audio_only == (bool)disable_overlay) {
+ LOGE("It's the same with current setting: (%d)", audio_only);
+ return MM_ERROR_NONE;
+ }
- selector_srcpad = gst_element_get_static_pad(selector, "src");
+ if (audio_only) {
+ LOGE("disable overlay");
+ g_object_set(player->pipeline->videobin[MMPLAYER_V_SINK].gst, "disable-overlay", TRUE, NULL);
- LOGD("blocking %s:%s", GST_DEBUG_PAD_NAME(selector_srcpad));
- block_id =
- gst_pad_add_probe(selector_srcpad, GST_PAD_PROBE_TYPE_BLOCK_DOWNSTREAM,
- __mmplayer_gst_selector_blocked, NULL, NULL);
+ /* release overlay resource */
+ if (player->video_overlay_resource != NULL) {
+ ret = mm_resource_manager_mark_for_release(player->resource_manager,
+ player->video_overlay_resource);
+ if (ret != MM_RESOURCE_MANAGER_ERROR_NONE) {
+ LOGE("failed to mark overlay resource for release, ret(0x%x)\n", ret);
+ goto ERROR;
+ }
+ player->video_overlay_resource = NULL;
+ }
- if (srcpad) {
- gst_object_unref(GST_OBJECT(srcpad));
- srcpad = NULL;
- }
+ ret = mm_resource_manager_commit(player->resource_manager);
+ if (ret != MM_RESOURCE_MANAGER_ERROR_NONE) {
+ LOGE("failed to commit acquiring of overlay resource, ret(0x%x)\n", ret);
+ goto ERROR;
+ }
+ } else {
+ /* mark video overlay for acquire */
+ if (player->video_overlay_resource == NULL) {
+ ret = mm_resource_manager_mark_for_acquire(player->resource_manager,
+ MM_RESOURCE_MANAGER_RES_TYPE_VIDEO_OVERLAY,
+ MM_RESOURCE_MANAGER_RES_VOLUME_FULL,
+ &player->video_overlay_resource);
+ if (ret != MM_RESOURCE_MANAGER_ERROR_NONE) {
+ LOGE("could not prepare for video_overlay resource\n");
+ goto ERROR;
+ }
+ }
- srcpad = gst_element_get_static_pad(stereo_queue, "src");
- sinkpad = gst_element_get_request_pad(selector, "sink_%u");
+ player->interrupted_by_resource = FALSE;
+ /* acquire resources for video overlay */
+ ret = mm_resource_manager_commit(player->resource_manager);
+ if (ret != MM_RESOURCE_MANAGER_ERROR_NONE) {
+ LOGE("could not acquire resources for video playing\n");
+ goto ERROR;
+ }
- if (GST_PAD_LINK_OK != gst_pad_link(srcpad, sinkpad)) {
- LOGW("failed to link queue_stereo - selector\n");
- goto ERROR;
+ LOGD("enable overlay");
+ __mmplayer_video_param_set_display_overlay(player);
+ g_object_set(player->pipeline->videobin[MMPLAYER_V_SINK].gst, "disable-overlay", FALSE, NULL);
}
- player->audio_mode.total_track_num++;
+ERROR:
+ MMPLAYER_FLEAVE();
+ return MM_ERROR_NONE;
+}
- g_object_set(selector, "active-pad", sinkpad, NULL);
- gst_element_set_state(deinterleave, GST_STATE_PAUSED);
- gst_element_set_state(selector, GST_STATE_PAUSED);
+int
+_mmplayer_get_audio_only(MMHandleType hplayer, bool *paudio_only)
+{
+ mm_player_t* player = (mm_player_t*) hplayer;
+ gboolean disable_overlay = FALSE;
- __mmplayer_gst_decode_callback(selector, selector_srcpad, player);
+ MMPLAYER_FENTER();
-ERROR:
+ MMPLAYER_RETURN_VAL_IF_FAIL(player && player->pipeline, MM_ERROR_PLAYER_NOT_INITIALIZED);
+ MMPLAYER_RETURN_VAL_IF_FAIL(paudio_only, MM_ERROR_INVALID_ARGUMENT);
+ MMPLAYER_RETURN_VAL_IF_FAIL(player->pipeline->videobin &&
+ player->pipeline->videobin[MMPLAYER_V_SINK].gst,
+ MM_ERROR_PLAYER_NO_OP); /* invalid op */
- LOGD("unblocking %s:%s", GST_DEBUG_PAD_NAME(selector_srcpad));
- if (block_id != 0) {
- gst_pad_remove_probe(selector_srcpad, block_id);
- block_id = 0;
+ if (!g_object_class_find_property(G_OBJECT_GET_CLASS(player->pipeline->videobin[MMPLAYER_V_SINK].gst), "disable-overlay")) {
+ LOGW("Display control is not supported");
+ return MM_ERROR_PLAYER_INTERNAL;
}
- if (sinkpad) {
- gst_object_unref(GST_OBJECT(sinkpad));
- sinkpad = NULL;
- }
+ g_object_get(player->pipeline->videobin[MMPLAYER_V_SINK].gst, "disable-overlay", &disable_overlay, NULL);
- if (srcpad) {
- gst_object_unref(GST_OBJECT(srcpad));
- srcpad = NULL;
- }
+ *paudio_only = (bool)(disable_overlay);
- if (selector_srcpad) {
- gst_object_unref(GST_OBJECT(selector_srcpad));
- selector_srcpad = NULL;
- }
+ LOGD("audio_only : %d", *paudio_only);
MMPLAYER_FLEAVE();
- return;
+
+ return MM_ERROR_NONE;
}
-static void
-__mmplayer_gst_decode_no_more_pads(GstElement *elem, gpointer data)
+static int
+__mmplayer_gst_element_link_bucket(GList* element_bucket)
{
- mm_player_t* player = NULL;
- GstPad* srcpad = NULL;
- GstElement* video_selector = NULL;
- GstElement* audio_selector = NULL;
- GstElement* text_selector = NULL;
- MMHandleType attrs = 0;
- gint active_index = 0;
- gint64 dur_bytes = 0L;
+ GList* bucket = element_bucket;
+ MMPlayerGstElement* element = NULL;
+ MMPlayerGstElement* prv_element = NULL;
+ gint successful_link_count = 0;
- player = (mm_player_t*) data;
+ MMPLAYER_FENTER();
- LOGD("no-more-pad signal handling\n");
+ MMPLAYER_RETURN_VAL_IF_FAIL(element_bucket, -1);
- if ((player->cmd == MMPLAYER_COMMAND_DESTROY) ||
- (player->cmd == MMPLAYER_COMMAND_UNREALIZE)) {
- LOGW("no need to go more");
+ prv_element = (MMPlayerGstElement*)bucket->data;
+ bucket = bucket->next;
- if (player->gapless.reconfigure) {
- player->gapless.reconfigure = FALSE;
- MMPLAYER_PLAYBACK_UNLOCK(player);
+ for (; bucket; bucket = bucket->next) {
+ element = (MMPlayerGstElement*)bucket->data;
+
+ if (element && element->gst) {
+ /* If next element is audio appsrc then make a separate audio pipeline */
+ if (!strcmp(GST_ELEMENT_NAME(GST_ELEMENT(element->gst)), "audio_appsrc") ||
+ !strcmp(GST_ELEMENT_NAME(GST_ELEMENT(element->gst)), "subtitle_appsrc")) {
+ prv_element = element;
+ continue;
+ }
+
+ if (prv_element && prv_element->gst) {
+ if (gst_element_link(GST_ELEMENT(prv_element->gst), GST_ELEMENT(element->gst))) {
+ LOGD("linking [%s] to [%s] success\n",
+ GST_ELEMENT_NAME(GST_ELEMENT(prv_element->gst)),
+ GST_ELEMENT_NAME(GST_ELEMENT(element->gst)));
+ successful_link_count++;
+ } else {
+ LOGD("linking [%s] to [%s] failed\n",
+ GST_ELEMENT_NAME(GST_ELEMENT(prv_element->gst)),
+ GST_ELEMENT_NAME(GST_ELEMENT(element->gst)));
+ return -1;
+ }
+ }
}
- return;
+ prv_element = element;
}
- if ((!MMPLAYER_IS_HTTP_PD(player)) &&
- (MMPLAYER_IS_HTTP_STREAMING(player)) &&
- (!player->pipeline->mainbin[MMPLAYER_M_DEMUXED_S_BUFFER].gst) &&
- (player->pipeline->mainbin[MMPLAYER_M_MUXED_S_BUFFER].gst)) {
- #define ESTIMATED_BUFFER_UNIT (1*1024*1024)
+ MMPLAYER_FLEAVE();
- if (NULL == player->streamer) {
- LOGW("invalid state for buffering");
- goto ERROR;
- }
+ return successful_link_count;
+}
- gint init_buffering_time = player->streamer->buffering_req.prebuffer_time;
- guint buffer_bytes = (guint)(init_buffering_time/1000) * ESTIMATED_BUFFER_UNIT;
+static int
+__mmplayer_gst_element_add_bucket_to_bin(GstBin* bin, GList* element_bucket)
+{
+ GList* bucket = element_bucket;
+ MMPlayerGstElement* element = NULL;
+ int successful_add_count = 0;
- buffer_bytes = MAX(buffer_bytes, player->streamer->buffer_handle[BUFFER_TYPE_MUXED].buffering_bytes);
- LOGD("[Decodebin2] set use-buffering on Q2(pre buffer time: %d ms, buffer size : %d)\n", init_buffering_time, buffer_bytes);
+ MMPLAYER_FENTER();
- init_buffering_time = (init_buffering_time != 0) ? (init_buffering_time) : (player->ini.http_buffering_time);
+ MMPLAYER_RETURN_VAL_IF_FAIL(element_bucket, 0);
+ MMPLAYER_RETURN_VAL_IF_FAIL(bin, 0);
- if (!gst_element_query_duration(player->pipeline->mainbin[MMPLAYER_M_SRC].gst, GST_FORMAT_BYTES, &dur_bytes))
- LOGE("fail to get duration.\n");
+ for (; bucket; bucket = bucket->next) {
+ element = (MMPlayerGstElement*)bucket->data;
- /* there is no mq, enable use-buffering on queue2 (ex) wav streaming
- * use file information was already set on Q2 when it was created. */
- __mm_player_streaming_set_queue2(player->streamer,
- player->pipeline->mainbin[MMPLAYER_M_MUXED_S_BUFFER].gst,
- TRUE, /* use_buffering */
- buffer_bytes,
- init_buffering_time,
- 1.0, /* low percent */
- player->ini.http_buffering_limit, /* high percent */
- MUXED_BUFFER_TYPE_MAX, /* use previous buffer type setting */
- NULL,
- ((dur_bytes > 0) ? ((guint64)dur_bytes) : 0));
+ if (element && element->gst) {
+ if (!gst_bin_add(bin, GST_ELEMENT(element->gst))) {
+ LOGD("__mmplayer_gst_element_link_bucket : Adding element [%s] to bin [%s] failed\n",
+ GST_ELEMENT_NAME(GST_ELEMENT(element->gst)),
+ GST_ELEMENT_NAME(GST_ELEMENT(bin)));
+ return 0;
+ }
+ successful_add_count++;
+ }
}
- video_selector = player->pipeline->mainbin[MMPLAYER_M_V_INPUT_SELECTOR].gst;
- audio_selector = player->pipeline->mainbin[MMPLAYER_M_A_INPUT_SELECTOR].gst;
- text_selector = player->pipeline->mainbin[MMPLAYER_M_T_INPUT_SELECTOR].gst;
- if (video_selector) {
- // [link] input-selector :: videobin
- srcpad = gst_element_get_static_pad(video_selector, "src");
- if (!srcpad) {
- LOGE("failed to get srcpad from video selector\n");
- goto ERROR;
- }
+ MMPLAYER_FLEAVE();
- LOGD("got pad %s:%s from video selector\n", GST_DEBUG_PAD_NAME(srcpad));
- if (!text_selector && !audio_selector)
- player->no_more_pad = TRUE;
+ return successful_add_count;
+}
- __mmplayer_gst_decode_callback(video_selector, srcpad, player);
+static void __mmplayer_gst_caps_notify_cb(GstPad * pad, GParamSpec * unused, gpointer data)
+{
+ mm_player_t* player = (mm_player_t*) data;
+ GstCaps *caps = NULL;
+ GstStructure *str = NULL;
+ const char *name;
- LOGD("unblocking %s:%s", GST_DEBUG_PAD_NAME(srcpad));
- if (player->selector[MM_PLAYER_TRACK_TYPE_VIDEO].block_id) {
- gst_pad_remove_probe(srcpad, player->selector[MM_PLAYER_TRACK_TYPE_VIDEO].block_id);
- player->selector[MM_PLAYER_TRACK_TYPE_VIDEO].block_id = 0;
- }
- }
+ MMPLAYER_FENTER();
- if (audio_selector) {
- active_index = player->selector[MM_PLAYER_TRACK_TYPE_AUDIO].active_pad_index;
- if ((active_index != DEFAULT_TRACK) &&
- (__mmplayer_change_selector_pad(player, MM_PLAYER_TRACK_TYPE_AUDIO, active_index) != MM_ERROR_NONE)) {
- LOGW("failed to change audio track\n");
- player->selector[MM_PLAYER_TRACK_TYPE_AUDIO].active_pad_index = DEFAULT_TRACK;
- }
+ MMPLAYER_RETURN_IF_FAIL(pad)
+ MMPLAYER_RETURN_IF_FAIL(unused)
+ MMPLAYER_RETURN_IF_FAIL(data)
- // [link] input-selector :: audiobin
- srcpad = gst_element_get_static_pad(audio_selector, "src");
- if (!srcpad) {
- LOGE("failed to get srcpad from selector\n");
- goto ERROR;
- }
+ caps = gst_pad_get_current_caps(pad);
+ if (!caps)
+ return;
- LOGD("got pad %s:%s from selector\n", GST_DEBUG_PAD_NAME(srcpad));
- if (!text_selector)
- player->no_more_pad = TRUE;
+ str = gst_caps_get_structure(caps, 0);
+ if (!str)
+ goto ERROR;
- if ((player->use_deinterleave == TRUE) && (player->max_audio_channels >= 2)) {
- LOGD("unblocking %s:%s", GST_DEBUG_PAD_NAME(srcpad));
- if (player->selector[MM_PLAYER_TRACK_TYPE_AUDIO].block_id) {
- gst_pad_remove_probe(srcpad, player->selector[MM_PLAYER_TRACK_TYPE_AUDIO].block_id);
- player->selector[MM_PLAYER_TRACK_TYPE_AUDIO].block_id = 0;
- }
-
- __mmplayer_gst_build_deinterleave_path(audio_selector, srcpad, player);
- } else {
- __mmplayer_gst_decode_callback(audio_selector, srcpad, player);
+ name = gst_structure_get_name(str);
+ if (!name)
+ goto ERROR;
- LOGD("unblocking %s:%s", GST_DEBUG_PAD_NAME(srcpad));
- if (player->selector[MM_PLAYER_TRACK_TYPE_AUDIO].block_id) {
- gst_pad_remove_probe(srcpad, player->selector[MM_PLAYER_TRACK_TYPE_AUDIO].block_id);
- player->selector[MM_PLAYER_TRACK_TYPE_AUDIO].block_id = 0;
- }
- }
+ LOGD("name = %s\n", name);
- LOGD("Total audio tracks = %d \n", player->selector[MM_PLAYER_TRACK_TYPE_AUDIO].total_track_num);
+ if (strstr(name, "audio")) {
+ __mmplayer_update_content_attrs(player, ATTR_AUDIO);
- attrs = MMPLAYER_GET_ATTRS(player);
- if (attrs) {
- mm_attrs_set_int_by_name(attrs, "content_audio_track_num", (gint)player->selector[MM_PLAYER_TRACK_TYPE_AUDIO].total_track_num);
- if (mmf_attrs_commit(attrs))
- LOGE("failed to commit.\n");
- } else
- LOGE("cannot get content attribute");
- } else {
- if ((player->pipeline->audiobin) && (player->pipeline->audiobin[MMPLAYER_A_BIN].gst)) {
- LOGD("There is no audio track : remove audiobin");
+ if (player->audio_stream_changed_cb) {
+ LOGE("call the audio stream changed cb\n");
+ player->audio_stream_changed_cb(player->audio_stream_changed_cb_user_param);
+ }
+ } else if (strstr(name, "video")) {
+ if ((name = gst_structure_get_string(str, "format")))
+ player->set_mode.video_zc = name[0] == 'S';
- __mmplayer_release_signal_connection(player, MM_PLAYER_SIGNAL_TYPE_AUDIOBIN);
- __mmplayer_del_sink(player, player->pipeline->audiobin[MMPLAYER_A_SINK].gst);
+ __mmplayer_update_content_attrs(player, ATTR_VIDEO);
- MMPLAYER_RELEASE_ELEMENT(player, player->pipeline->audiobin, MMPLAYER_A_BIN);
- MMPLAYER_FREEIF(player->pipeline->audiobin);
+ if (player->video_stream_changed_cb) {
+ LOGE("call the video stream changed cb\n");
+ player->video_stream_changed_cb(player->video_stream_changed_cb_user_param);
}
+ } else
+ goto ERROR;
- if (player->num_dynamic_pad == 0)
- __mmplayer_pipeline_complete(NULL, player);
- }
+ERROR:
- if (!MMPLAYER_IS_MS_BUFF_SRC(player)) {
- if (text_selector)
- __mmplayer_handle_text_decode_path(player, text_selector);
- }
+ gst_caps_unref(caps);
MMPLAYER_FLEAVE();
-ERROR:
- if (srcpad) {
- gst_object_unref(GST_OBJECT(srcpad));
- srcpad = NULL;
- }
-
- if (player->gapless.reconfigure) {
- player->gapless.reconfigure = FALSE;
- MMPLAYER_PLAYBACK_UNLOCK(player);
- }
+ return;
}
-static void
-__mmplayer_gst_decode_callback(GstElement *elem, GstPad *pad, gpointer data)
-{
- mm_player_t* player = NULL;
- MMHandleType attrs = 0;
- GstElement* pipeline = NULL;
- GstCaps* caps = NULL;
- gchar* caps_str = NULL;
- GstStructure* str = NULL;
- const gchar* name = NULL;
- GstPad* sinkpad = NULL;
- GstElement* sinkbin = NULL;
- gboolean reusing = FALSE;
- GstElement *text_selector = NULL;
- /* check handles */
- player = (mm_player_t*) data;
- MMPLAYER_RETURN_IF_FAIL(elem && pad);
- MMPLAYER_RETURN_IF_FAIL(player && player->pipeline && player->pipeline->mainbin);
+/**
+ * This function is to create audio pipeline for playing.
+ *
+ * @param player [in] handle of player
+ *
+ * @return This function returns zero on success.
+ * @remark
+ * @see __mmplayer_gst_create_midi_pipeline, __mmplayer_gst_create_video_pipeline
+ */
+/* macro for code readability. just for sinkbin-creation functions */
+#define MMPLAYER_CREATE_ELEMENT(x_bin, x_id, x_factory, x_name, x_add_bucket, x_player) \
+do {\
+ x_bin[x_id].id = x_id;\
+ x_bin[x_id].gst = gst_element_factory_make(x_factory, x_name);\
+ if (!x_bin[x_id].gst) {\
+ LOGE("failed to create %s \n", x_factory);\
+ goto ERROR;\
+ } else {\
+ if (x_player->ini.set_dump_element_flag)\
+ __mmplayer_add_dump_buffer_probe(x_player, x_bin[x_id].gst);\
+ } \
+ if (x_add_bucket)\
+ element_bucket = g_list_append(element_bucket, &x_bin[x_id]);\
+} while (0);
- pipeline = player->pipeline->mainbin[MMPLAYER_M_PIPE].gst;
+void
+__mmplayer_audio_stream_clear_buffer(mm_player_t* player, gboolean send_all)
+{
+ GList *l = NULL;
- attrs = MMPLAYER_GET_ATTRS(player);
- if (!attrs) {
- LOGE("cannot get content attribute\n");
- goto ERROR;
- }
+ MMPLAYER_FENTER();
+ MMPLAYER_RETURN_IF_FAIL(player);
- /* get mimetype from caps */
- caps = gst_pad_query_caps(pad, NULL);
- if (!caps) {
- LOGE("cannot get caps from pad.\n");
- goto ERROR;
+ if (player->audio_stream_buff_list) {
+ for (l = g_list_first(player->audio_stream_buff_list); l; l = g_list_next(l)) {
+ mm_player_audio_stream_buff_t *tmp = (mm_player_audio_stream_buff_t *)l->data;
+ if (tmp) {
+ if (send_all) {
+ LOGD("[%"G_GUINT64_FORMAT"] send remained data.", tmp->channel_mask);
+ __mmplayer_audio_stream_send_data(player, tmp);
+ }
+ if (tmp->pcm_data)
+ g_free(tmp->pcm_data);
+ g_free(tmp);
+ }
+ }
+ g_list_free(player->audio_stream_buff_list);
+ player->audio_stream_buff_list = NULL;
}
- caps_str = gst_caps_to_string(caps);
- str = gst_caps_get_structure(caps, 0);
- if (!str) {
- LOGE("cannot get structure from caps.\n");
- goto ERROR;
- }
+ MMPLAYER_FLEAVE();
+}
- name = gst_structure_get_name(str);
- if (!name) {
- LOGE("cannot get mimetype from structure.\n");
- goto ERROR;
- }
+static void
+__mmplayer_audio_stream_send_data(mm_player_t* player, mm_player_audio_stream_buff_t *a_buffer)
+{
+ MMPlayerAudioStreamDataType audio_stream = { 0, };
- //LOGD("detected mimetype : %s\n", name);
+ MMPLAYER_FENTER();
+ MMPLAYER_RETURN_IF_FAIL(player && player->audio_stream_render_cb_ex);
- if (strstr(name, "audio")) {
- if (player->pipeline->audiobin == NULL) {
- if (MM_ERROR_NONE != __mmplayer_gst_create_audio_pipeline(player)) {
- LOGE("failed to create audiobin. continuing without audio\n");
- goto ERROR;
- }
+ audio_stream.bitrate = a_buffer->bitrate;
+ audio_stream.channel = a_buffer->channel;
+ audio_stream.depth = a_buffer->depth;
+ audio_stream.is_little_endian = a_buffer->is_little_endian;
+ audio_stream.channel_mask = a_buffer->channel_mask;
+ audio_stream.data_size = a_buffer->data_size;
+ audio_stream.data = a_buffer->pcm_data;
- sinkbin = player->pipeline->audiobin[MMPLAYER_A_BIN].gst;
- LOGD("creating audiosink bin success\n");
- } else {
- reusing = TRUE;
- sinkbin = player->pipeline->audiobin[MMPLAYER_A_BIN].gst;
- LOGD("reusing audiobin\n");
- _mmplayer_update_content_attrs(player, ATTR_AUDIO);
- }
+ /* LOGD("[%"G_GUINT64_FORMAT"] send data size:%d, %p", audio_stream.channel_mask, audio_stream.data_size, player->audio_stream_cb_user_param); */
+ player->audio_stream_render_cb_ex(&audio_stream, player->audio_stream_cb_user_param);
- if (player->selector[MM_PLAYER_TRACK_TYPE_AUDIO].total_track_num <= 0) // should not update if content have multi audio tracks
- mm_attrs_set_int_by_name(attrs, "content_audio_track_num", 1);
+ MMPLAYER_FLEAVE();
+}
- player->audiosink_linked = 1;
+static void
+__mmplayer_audio_stream_decoded_render_cb(GstElement* object, GstBuffer *buffer, GstPad *pad, gpointer data)
+{
+ mm_player_t* player = (mm_player_t*) data;
- sinkpad = gst_element_get_static_pad(GST_ELEMENT(sinkbin), "sink");
- if (!sinkpad) {
- LOGE("failed to get pad from sinkbin\n");
- goto ERROR;
- }
- } else if (strstr(name, "video")) {
- if (caps_str && (strstr(caps_str, "ST12") || strstr(caps_str, "SN12") ||
- strstr(caps_str, "SN21") || strstr(caps_str, "S420") || strstr(caps_str, "SR32")))
- player->set_mode.video_zc = TRUE;
+ gint channel = 0;
+ gint rate = 0;
+ gint depth = 0;
+ gint endianness = 0;
+ guint64 channel_mask = 0;
+ void *a_data = NULL;
+ gint a_size = 0;
+ mm_player_audio_stream_buff_t *a_buffer = NULL;
+ GstMapInfo mapinfo = GST_MAP_INFO_INIT;
+ GList *l = NULL;
- if (player->pipeline->videobin == NULL) {
- /* NOTE : not make videobin because application dose not want to play it even though file has video stream. */
- /* get video surface type */
- int surface_type = 0;
- mm_attrs_get_int_by_name(player->attrs, "display_surface_type", &surface_type);
- LOGD("display_surface_type(%d)\n", surface_type);
+ MMPLAYER_FENTER();
+ MMPLAYER_RETURN_IF_FAIL(player && player->audio_stream_render_cb_ex);
- if (surface_type == MM_DISPLAY_SURFACE_NULL) {
- LOGD("not make videobin because it dose not want\n");
- goto ERROR;
- }
+ gst_buffer_map(buffer, &mapinfo, GST_MAP_READ);
+ a_data = mapinfo.data;
+ a_size = mapinfo.size;
- if (surface_type == MM_DISPLAY_SURFACE_OVERLAY) {
- /* mark video overlay for acquire */
- if (player->video_overlay_resource == NULL) {
- if (mm_resource_manager_mark_for_acquire(player->resource_manager,
- MM_RESOURCE_MANAGER_RES_TYPE_VIDEO_OVERLAY,
- MM_RESOURCE_MANAGER_RES_VOLUME_FULL,
- &player->video_overlay_resource)
- != MM_RESOURCE_MANAGER_ERROR_NONE) {
- LOGE("could not mark video_overlay resource for acquire\n");
- goto ERROR;
- }
- }
- }
+ GstCaps *caps = gst_pad_get_current_caps(pad);
+ GstStructure *structure = gst_caps_get_structure(caps, 0);
- player->interrupted_by_resource = FALSE;
- /* acquire resources for video overlay */
- if (mm_resource_manager_commit(player->resource_manager) !=
- MM_RESOURCE_MANAGER_ERROR_NONE) {
- LOGE("could not acquire resources for video playing\n");
- goto ERROR;
- }
+ /* MMPLAYER_LOG_GST_CAPS_TYPE(caps); */
+ gst_structure_get_int(structure, "rate", &rate);
+ gst_structure_get_int(structure, "channels", &channel);
+ gst_structure_get_int(structure, "depth", &depth);
+ gst_structure_get_int(structure, "endianness", &endianness);
+ gst_structure_get(structure, "channel-mask", GST_TYPE_BITMASK, &channel_mask, NULL);
+ gst_caps_unref(GST_CAPS(caps));
- if (MM_ERROR_NONE != __mmplayer_gst_create_video_pipeline(player, caps, surface_type)) {
- LOGE("failed to create videobin. continuing without video\n");
- goto ERROR;
- }
-
- sinkbin = player->pipeline->videobin[MMPLAYER_V_BIN].gst;
- LOGD("creating videosink bin success\n");
- } else {
- reusing = TRUE;
- sinkbin = player->pipeline->videobin[MMPLAYER_V_BIN].gst;
- LOGD("re-using videobin\n");
- _mmplayer_update_content_attrs(player, ATTR_VIDEO);
- }
-
- player->videosink_linked = 1;
-
- sinkpad = gst_element_get_static_pad(GST_ELEMENT(sinkbin), "sink");
- if (!sinkpad) {
- LOGE("failed to get pad from sinkbin\n");
- goto ERROR;
- }
- } else if (strstr(name, "text")) {
- if (player->pipeline->textbin == NULL) {
- MMPlayerGstElement* mainbin = NULL;
-
- if (MM_ERROR_NONE != __mmplayer_gst_create_text_sink_bin(player)) {
- LOGE("failed to create text sink bin. continuing without text\n");
- goto ERROR;
- }
-
- sinkbin = player->pipeline->textbin[MMPLAYER_T_BIN].gst;
- LOGD("creating textsink bin success\n");
-
- /* FIXIT : track number shouldn't be hardcoded */
- mm_attrs_set_int_by_name(attrs, "content_text_track_num", 1);
-
- player->textsink_linked = 1;
- LOGI("player->textsink_linked set to 1\n");
-
- sinkpad = gst_element_get_static_pad(GST_ELEMENT(sinkbin), "text_sink");
- if (!sinkpad) {
- LOGE("failed to get pad from sinkbin\n");
- goto ERROR;
- }
-
- mainbin = player->pipeline->mainbin;
-
- if (!mainbin[MMPLAYER_M_T_INPUT_SELECTOR].gst) {
- /* input selector */
- text_selector = gst_element_factory_make("input-selector", "subtitle_inselector");
- if (!text_selector) {
- LOGE("failed to create subtitle input selector element\n");
- goto ERROR;
- }
- g_object_set(text_selector, "sync-streams", TRUE, NULL);
-
- mainbin[MMPLAYER_M_T_INPUT_SELECTOR].id = MMPLAYER_M_T_INPUT_SELECTOR;
- mainbin[MMPLAYER_M_T_INPUT_SELECTOR].gst = text_selector;
-
- /* warm up */
- if (GST_STATE_CHANGE_FAILURE == gst_element_set_state(text_selector, GST_STATE_READY)) {
- LOGE("failed to set state(READY) to sinkbin\n");
- goto ERROR;
- }
+ /* In case of the sync is false, use buffer list. *
+ * The num of buffer list depends on the num of audio channels */
+ if (player->audio_stream_buff_list) {
+ for (l = g_list_first(player->audio_stream_buff_list); l; l = g_list_next(l)) {
+ mm_player_audio_stream_buff_t *tmp = (mm_player_audio_stream_buff_t *)l->data;
+ if (tmp) {
+ if (channel_mask == tmp->channel_mask) {
+ /* LOGD("[%"G_GUINT64_FORMAT"] total: %d, data: %d, buffer: %d", channel_mask, tmp->data_size, a_size, tmp->buff_size); */
+ if (tmp->data_size + a_size < tmp->buff_size) {
+ memcpy(tmp->pcm_data + tmp->data_size, a_data, a_size);
+ tmp->data_size += a_size;
+ } else {
+ /* send data to client */
+ __mmplayer_audio_stream_send_data(player, tmp);
- if (!gst_bin_add(GST_BIN(mainbin[MMPLAYER_M_PIPE].gst), text_selector)) {
- LOGW("failed to add subtitle input selector\n");
- goto ERROR;
+ if (a_size > tmp->buff_size) {
+ LOGD("[%"G_GUINT64_FORMAT"] adj buffer size %d -> %d", channel_mask, tmp->buff_size, a_size);
+ tmp->pcm_data = g_realloc(tmp->pcm_data, a_size);
+ if (tmp->pcm_data == NULL) {
+ LOGE("failed to realloc data.");
+ goto DONE;
+ }
+ tmp->buff_size = a_size;
+ }
+ memset(tmp->pcm_data, 0x00, tmp->buff_size);
+ memcpy(tmp->pcm_data, a_data, a_size);
+ tmp->data_size = a_size;
+ }
+ goto DONE;
}
-
- LOGD("created element input-selector");
-
} else {
- LOGD("already having subtitle input selector");
- text_selector = mainbin[MMPLAYER_M_T_INPUT_SELECTOR].gst;
+ LOGE("data is empty in list.");
+ goto DONE;
}
- } else {
- if (!player->textsink_linked) {
- LOGD("re-using textbin\n");
+ }
+ }
- reusing = TRUE;
- sinkbin = player->pipeline->textbin[MMPLAYER_T_BIN].gst;
+ /* create new audio stream data */
+ a_buffer = (mm_player_audio_stream_buff_t*)g_malloc0(sizeof(mm_player_audio_stream_buff_t));
+ if (a_buffer == NULL) {
+ LOGE("failed to alloc data.");
+ goto DONE;
+ }
+ a_buffer->bitrate = rate;
+ a_buffer->channel = channel;
+ a_buffer->depth = depth;
+ a_buffer->is_little_endian = (endianness == 1234 ? 1 : 0);
+ a_buffer->channel_mask = channel_mask;
+ a_buffer->data_size = a_size;
- player->textsink_linked = 1;
- LOGI("player->textsink_linked set to 1\n");
- } else
- LOGD("ignoring internal subtutle since external subtitle is available");
+ if (!player->audio_stream_sink_sync) {
+ /* If sync is FALSE, use buffer list to reduce the IPC. */
+ a_buffer->buff_size = (a_size > player->ini.pcm_buffer_size) ? (a_size) : (player->ini.pcm_buffer_size);
+ a_buffer->pcm_data = g_malloc(a_buffer->buff_size);
+ if (a_buffer->pcm_data == NULL) {
+ LOGE("failed to alloc data.");
+ g_free(a_buffer);
+ goto DONE;
}
+ memcpy(a_buffer->pcm_data, a_data, a_size);
+ /* LOGD("new [%"G_GUINT64_FORMAT"] total:%d buff:%d", channel_mask, a_buffer->data_size, a_buffer->buff_size); */
+ player->audio_stream_buff_list = g_list_append(player->audio_stream_buff_list, a_buffer);
} else {
- LOGW("unknown type of elementary stream!ignoring it...\n");
- goto ERROR;
+ /* If sync is TRUE, send data directly. */
+ a_buffer->pcm_data = a_data;
+ __mmplayer_audio_stream_send_data(player, a_buffer);
+ g_free(a_buffer);
}
- if (sinkbin) {
- if (!reusing) {
- /* warm up */
- if (GST_STATE_CHANGE_FAILURE == gst_element_set_state(sinkbin, GST_STATE_READY)) {
- LOGE("failed to set state(READY) to sinkbin\n");
- goto ERROR;
- }
+DONE:
+ gst_buffer_unmap(buffer, &mapinfo);
+ MMPLAYER_FLEAVE();
+}
- /* Added for multi audio support to avoid adding audio bin again*/
- /* add */
- if (FALSE == gst_bin_add(GST_BIN(pipeline), sinkbin)) {
- LOGE("failed to add sinkbin to pipeline\n");
- goto ERROR;
- }
- }
+static void
+__mmplayer_gst_audio_deinterleave_pad_added(GstElement *elem, GstPad *pad, gpointer data)
+{
+ mm_player_t* player = (mm_player_t*)data;
+ MMPlayerGstElement* audiobin = player->pipeline->audiobin;
+ GstPad* sinkpad = NULL;
+ GstElement *queue = NULL, *sink = NULL;
- /* link */
- if (GST_PAD_LINK_OK != gst_pad_link(pad, sinkpad)) {
- LOGE("failed to get pad from sinkbin\n");
- goto ERROR;
- }
+ MMPLAYER_FENTER();
+ MMPLAYER_RETURN_IF_FAIL(player && player->pipeline && player->pipeline->mainbin);
- if (!reusing) {
- /* run */
- if (GST_STATE_CHANGE_FAILURE == gst_element_set_state(sinkbin, GST_STATE_PAUSED)) {
- LOGE("failed to set state(PAUSED) to sinkbin\n");
- goto ERROR;
- }
+ queue = gst_element_factory_make("queue", NULL);
+ if (queue == NULL) {
+ LOGD("fail make queue\n");
+ goto ERROR;
+ }
- if (text_selector) {
- if (GST_STATE_CHANGE_FAILURE == gst_element_set_state(text_selector, GST_STATE_PAUSED)) {
- LOGE("failed to set state(PAUSED) to sinkbin\n");
- goto ERROR;
- }
- }
- }
+ sink = gst_element_factory_make("fakesink", NULL);
+ if (sink == NULL) {
+ LOGD("fail make fakesink\n");
+ goto ERROR;
+ }
- gst_object_unref(sinkpad);
- sinkpad = NULL;
+ gst_bin_add_many(GST_BIN(audiobin[MMPLAYER_A_BIN].gst), queue, sink, NULL);
+
+ if (!gst_element_link_pads_full(queue, "src", sink, "sink", GST_PAD_LINK_CHECK_NOTHING)) {
+ LOGW("failed to link queue & sink\n");
+ goto ERROR;
}
- LOGD("[handle: %p] linking sink bin success", player);
+ sinkpad = gst_element_get_static_pad(queue, "sink");
- /* FIXIT : we cannot hold callback for 'no-more-pad' signal because signal was emitted in
- * streaming task. if the task blocked, then buffer will not flow to the next element
- *(autoplugging element). so this is special hack for streaming. please try to remove it
- */
- /* dec stream count. we can remove fakesink if it's zero */
- if (player->num_dynamic_pad)
- player->num_dynamic_pad--;
+ if (GST_PAD_LINK_OK != gst_pad_link(pad, sinkpad)) {
+ LOGW("failed to link [%s:%s] to queue\n", GST_DEBUG_PAD_NAME(pad));
+ goto ERROR;
+ }
- LOGD("no more pads: %d stream count dec : %d(num of dynamic pad)\n", player->no_more_pad, player->num_dynamic_pad);
+ LOGE("player->audio_stream_sink_sync: %d\n", player->audio_stream_sink_sync);
- if ((player->no_more_pad) && (player->num_dynamic_pad == 0))
- __mmplayer_pipeline_complete(NULL, player);
+ gst_object_unref(sinkpad);
+ g_object_set(sink, "sync", player->audio_stream_sink_sync, NULL);
+ g_object_set(sink, "signal-handoffs", TRUE, NULL);
-ERROR:
+ gst_element_set_state(sink, GST_STATE_PAUSED);
+ gst_element_set_state(queue, GST_STATE_PAUSED);
- MMPLAYER_FREEIF(caps_str);
+ MMPLAYER_SIGNAL_CONNECT(player,
+ G_OBJECT(sink),
+ MM_PLAYER_SIGNAL_TYPE_AUDIOBIN,
+ "handoff",
+ G_CALLBACK(__mmplayer_audio_stream_decoded_render_cb),
+ (gpointer)player);
- if (caps)
- gst_caps_unref(caps);
+ MMPLAYER_FLEAVE();
+ return;
- if (sinkpad)
+ERROR:
+ LOGE("__mmplayer_gst_audio_deinterleave_pad_added ERROR\n");
+ if (queue) {
+ gst_object_unref(GST_OBJECT(queue));
+ queue = NULL;
+ }
+ if (sink) {
+ gst_object_unref(GST_OBJECT(sink));
+ sink = NULL;
+ }
+ if (sinkpad) {
gst_object_unref(GST_OBJECT(sinkpad));
-
- /* flusing out new attributes */
- if (mmf_attrs_commit(attrs))
- LOGE("failed to comit attributes\n");
+ sinkpad = NULL;
+ }
return;
}
-static gboolean
-__mmplayer_get_property_value_for_rotation(mm_player_t* player, int rotation_angle, int *value)
+void __mmplayer_gst_set_audiosink_property(mm_player_t* player, MMHandleType attrs)
{
- int pro_value = 0; // in the case of expection, default will be returned.
- int dest_angle = rotation_angle;
- int rotation_type = -1;
-
- MMPLAYER_RETURN_VAL_IF_FAIL(player, FALSE);
- MMPLAYER_RETURN_VAL_IF_FAIL(value, FALSE);
- MMPLAYER_RETURN_VAL_IF_FAIL(rotation_angle >= 0, FALSE);
-
- if (rotation_angle >= 360)
- dest_angle = rotation_angle - 360;
+ #define MAX_PROPS_LEN 128
+ gint latency_mode = 0;
+ gchar *stream_type = NULL;
+ gchar *latency = NULL;
+ gint stream_id = 0;
+ gchar stream_props[MAX_PROPS_LEN] = {0,};
+ GstStructure *props = NULL;
- /* chech if supported or not */
- if (dest_angle % 90) {
- LOGD("not supported rotation angle = %d", rotation_angle);
- return FALSE;
- }
+ /* set volume table
+ * It should be set after player creation through attribute.
+ * But, it can not be changed during playing.
+ */
+ MMPLAYER_FENTER();
+ mm_attrs_get_int_by_name(attrs, "sound_stream_index", &stream_id);
+ mm_attrs_get_string_by_name(attrs, "sound_stream_type", &stream_type);
- /*
- * tizenwlsink (A)
- * custom_convert - none (B)
- * videoflip - none (C)
- */
- if (player->set_mode.video_zc) {
- if (player->pipeline->videobin[MMPLAYER_V_CONV].gst) // B
- rotation_type = ROTATION_USING_CUSTOM;
- else // A
- rotation_type = ROTATION_USING_SINK;
+ if (!stream_type) {
+ LOGE("stream_type is null.\n");
} else {
- int surface_type = 0;
- rotation_type = ROTATION_USING_FLIP;
-
- mm_attrs_get_int_by_name(player->attrs, "display_surface_type", &surface_type);
- LOGD("check display surface type attribute: %d", surface_type);
-
- if (surface_type == MM_DISPLAY_SURFACE_OVERLAY)
- rotation_type = ROTATION_USING_SINK;
+ if (player->sound.focus_id)
+ snprintf(stream_props, sizeof(stream_props)-1, "props,media.role=%s, media.parent_id=%d, media.focus_id=%d",
+ stream_type, stream_id, player->sound.focus_id);
else
- rotation_type = ROTATION_USING_FLIP; //C
-
- LOGD("using %d type for rotation", rotation_type);
+ snprintf(stream_props, sizeof(stream_props)-1, "props,media.role=%s, media.parent_id=%d",
+ stream_type, stream_id);
+ props = gst_structure_from_string(stream_props, NULL);
+ g_object_set(player->pipeline->audiobin[MMPLAYER_A_SINK].gst, "stream-properties", props, NULL);
+ LOGI("stream_type[%s], stream_id[%d], focus_id[%d], result[%s].\n",
+ stream_type, stream_id, player->sound.focus_id, stream_props);
+ gst_structure_free(props);
}
- /* get property value for setting */
- switch (rotation_type) {
- case ROTATION_USING_SINK: // tizenwlsink
- {
- switch (dest_angle) {
- case 0:
- break;
- case 90:
- pro_value = 3; // clockwise 90
- break;
- case 180:
- pro_value = 2;
- break;
- case 270:
- pro_value = 1; // counter-clockwise 90
- break;
- }
- }
- break;
- case ROTATION_USING_CUSTOM:
- {
- gchar *ename = NULL;
- ename = GST_OBJECT_NAME(gst_element_get_factory(player->pipeline->videobin[MMPLAYER_V_CONV].gst));
+ mm_attrs_get_int_by_name(attrs, "sound_latency_mode", &latency_mode);
- if (g_strrstr(ename, "fimcconvert")) {
- switch (dest_angle) {
- case 0:
- break;
- case 90:
- pro_value = 90; // clockwise 90
- break;
- case 180:
- pro_value = 180;
- break;
- case 270:
- pro_value = 270; // counter-clockwise 90
- break;
- }
- }
- }
+ switch (latency_mode) {
+ case AUDIO_LATENCY_MODE_LOW:
+ latency = g_strndup("low", 3);
break;
- case ROTATION_USING_FLIP: // videoflip
- {
- switch (dest_angle) {
- case 0:
- break;
- case 90:
- pro_value = 1; // clockwise 90
- break;
- case 180:
- pro_value = 2;
- break;
- case 270:
- pro_value = 3; // counter-clockwise 90
- break;
- }
- }
+ case AUDIO_LATENCY_MODE_MID:
+ latency = g_strndup("mid", 3);
break;
- }
-
- LOGD("setting rotation property value : %d, used rotation type : %d", pro_value, rotation_type);
+ case AUDIO_LATENCY_MODE_HIGH:
+ latency = g_strndup("high", 4);
+ break;
+ };
- *value = pro_value;
+ g_object_set(player->pipeline->audiobin[MMPLAYER_A_SINK].gst,
+ "latency", latency,
+ NULL);
- return TRUE;
-}
+ LOGD("audiosink property - latency=%s \n", latency);
-int
-__mmplayer_video_param_check_video_sink_bin(mm_player_t* player)
-{
- /* check video sinkbin is created */
- MMPLAYER_RETURN_VAL_IF_FAIL(player &&
- player->pipeline &&
- player->pipeline->videobin &&
- player->pipeline->videobin[MMPLAYER_V_BIN].gst &&
- player->pipeline->videobin[MMPLAYER_V_SINK].gst,
- MM_ERROR_PLAYER_NOT_INITIALIZED);
+ g_free(latency);
- return MM_ERROR_NONE;
+ MMPLAYER_FLEAVE();
}
-void
-__mmplayer_video_param_set_display_rotation(mm_player_t* player)
+static int
+__mmplayer_gst_create_audio_pipeline(mm_player_t* player)
{
- int rotation_value = 0;
- int org_angle = 0; // current supported angle values are 0, 90, 180, 270
- int user_angle = 0;
+ MMPlayerGstElement* first_element = NULL;
+ MMPlayerGstElement* audiobin = NULL;
+ MMHandleType attrs = 0;
+ GstPad *pad = NULL;
+ GstPad *ghostpad = NULL;
+ GList* element_bucket = NULL;
+ gboolean link_audio_sink_now = TRUE;
+ int i = 0;
+ GstCaps *acaps;
+
MMPLAYER_FENTER();
- /* check video sinkbin is created */
- if (MM_ERROR_NONE != __mmplayer_video_param_check_video_sink_bin(player))
- return;
+ MMPLAYER_RETURN_VAL_IF_FAIL(player && player->pipeline, MM_ERROR_PLAYER_NOT_INITIALIZED);
- __mmplayer_get_video_angle(player, &user_angle, &org_angle);
+ /* alloc handles */
+ audiobin = (MMPlayerGstElement*)g_malloc0(sizeof(MMPlayerGstElement) * MMPLAYER_A_NUM);
+ if (!audiobin) {
+ LOGE("failed to allocate memory for audiobin\n");
+ return MM_ERROR_PLAYER_NO_FREE_SPACE;
+ }
- /* get rotation value to set */
- __mmplayer_get_property_value_for_rotation(player, org_angle+user_angle, &rotation_value);
- g_object_set(player->pipeline->videobin[MMPLAYER_V_SINK].gst, "rotate", rotation_value, NULL);
- LOGD("set video param : rotate %d", rotation_value);
-}
+ attrs = MMPLAYER_GET_ATTRS(player);
-void
-__mmplayer_video_param_set_display_visible(mm_player_t* player)
-{
- MMHandleType attrs = 0;
- int visible = 0;
- MMPLAYER_FENTER();
+ /* create bin */
+ audiobin[MMPLAYER_A_BIN].id = MMPLAYER_A_BIN;
+ audiobin[MMPLAYER_A_BIN].gst = gst_bin_new("audiobin");
+ if (!audiobin[MMPLAYER_A_BIN].gst) {
+ LOGE("failed to create audiobin\n");
+ goto ERROR;
+ }
- /* check video sinkbin is created */
- if (MM_ERROR_NONE != __mmplayer_video_param_check_video_sink_bin(player))
- return;
+ /* take it */
+ player->pipeline->audiobin = audiobin;
- attrs = MMPLAYER_GET_ATTRS(player);
- MMPLAYER_RETURN_IF_FAIL(attrs);
+ player->set_mode.pcm_extraction = __mmplayer_can_extract_pcm(player);
- mm_attrs_get_int_by_name(attrs, "display_visible", &visible);
- g_object_set(player->pipeline->videobin[MMPLAYER_V_SINK].gst, "visible", visible, NULL);
- LOGD("set video param : visible %d", visible);
-}
+ /* Adding audiotp plugin for reverse trickplay feature */
+// MMPLAYER_CREATE_ELEMENT(audiobin, MMPLAYER_A_TP, "audiotp", "audio trickplay", TRUE, player);
-void
-__mmplayer_video_param_set_display_method(mm_player_t* player)
-{
- MMHandleType attrs = 0;
- int display_method = 0;
- MMPLAYER_FENTER();
+ /* converter */
+ MMPLAYER_CREATE_ELEMENT(audiobin, MMPLAYER_A_CONV, "audioconvert", "audio converter", TRUE, player);
- /* check video sinkbin is created */
- if (MM_ERROR_NONE != __mmplayer_video_param_check_video_sink_bin(player))
- return;
+ /* replaygain volume */
+ MMPLAYER_CREATE_ELEMENT(audiobin, MMPLAYER_A_RGVOL, "rgvolume", "audio rgvolume", TRUE, player);
+ if (player->sound.rg_enable)
+ g_object_set(G_OBJECT(audiobin[MMPLAYER_A_RGVOL].gst), "enable-rgvolume", TRUE, NULL);
+ else
+ g_object_set(G_OBJECT(audiobin[MMPLAYER_A_RGVOL].gst), "enable-rgvolume", FALSE, NULL);
- attrs = MMPLAYER_GET_ATTRS(player);
- MMPLAYER_RETURN_IF_FAIL(attrs);
+ /* resampler */
+ MMPLAYER_CREATE_ELEMENT(audiobin, MMPLAYER_A_RESAMPLER, player->ini.audioresampler_element, "audio resampler", TRUE, player);
- mm_attrs_get_int_by_name(attrs, "display_method", &display_method);
- g_object_set(player->pipeline->videobin[MMPLAYER_V_SINK].gst, "display-geometry-method", display_method, NULL);
- LOGD("set video param : method %d", display_method);
-}
+ if (player->set_mode.pcm_extraction) {
+ // pcm extraction only and no sound output
+ if (player->audio_stream_render_cb_ex) {
+ char *caps_str = NULL;
+ GstCaps* caps = NULL;
+ gchar *format = NULL;
-void
-__mmplayer_video_param_set_roi_area(mm_player_t* player)
-{
- MMHandleType attrs = 0;
- void *handle = NULL;
- /*set wl_display*/
- int win_roi_x = 0;
- int win_roi_y = 0;
- int win_roi_width = 0;
- int win_roi_height = 0;
- MMPLAYER_FENTER();
+ /* capsfilter */
+ MMPLAYER_CREATE_ELEMENT(audiobin, MMPLAYER_A_CAPS_DEFAULT, "capsfilter", "audio capsfilter", TRUE, player);
- /* check video sinkbin is created */
- if (MM_ERROR_NONE != __mmplayer_video_param_check_video_sink_bin(player))
- return;
-
- attrs = MMPLAYER_GET_ATTRS(player);
- MMPLAYER_RETURN_IF_FAIL(attrs);
+ mm_attrs_get_string_by_name(player->attrs, "pcm_audioformat", &format);
- mm_attrs_get_data_by_name(attrs, "display_overlay", &handle);
+ LOGD("contents : format: %s samplerate : %d pcm_channel: %d", format, player->pcm_samplerate, player->pcm_channel);
- if (handle) {
- /* It should be set after setting window */
- mm_attrs_get_int_by_name(attrs, "display_win_roi_x", &win_roi_x);
- mm_attrs_get_int_by_name(attrs, "display_win_roi_y", &win_roi_y);
- mm_attrs_get_int_by_name(attrs, "display_win_roi_width", &win_roi_width);
- mm_attrs_get_int_by_name(attrs, "display_win_roi_height", &win_roi_height);
+ caps = gst_caps_new_simple("audio/x-raw",
+ "format", G_TYPE_STRING, format,
+ "rate", G_TYPE_INT, player->pcm_samplerate,
+ "channels", G_TYPE_INT, player->pcm_channel,
+ NULL);
+ caps_str = gst_caps_to_string(caps);
+ LOGD("new caps : %s\n", caps_str);
- /* After setting window handle, set display roi area */
- gst_video_overlay_set_display_roi_area(
- GST_VIDEO_OVERLAY(player->pipeline->videobin[MMPLAYER_V_SINK].gst),
- win_roi_x, win_roi_y, win_roi_width, win_roi_height);
- LOGD("set video param : roi area : x(%d) y(%d) width(%d) height(%d)",
- win_roi_x, win_roi_y, win_roi_width, win_roi_height);
+ g_object_set(GST_ELEMENT(audiobin[MMPLAYER_A_CAPS_DEFAULT].gst), "caps", caps, NULL);
- }
-}
-void
-__mmplayer_video_param_set_display_overlay(mm_player_t* player)
-{
- MMHandleType attrs = 0;
- void *handle = NULL;
+ /* clean */
+ gst_caps_unref(caps);
+ MMPLAYER_FREEIF(caps_str);
- /* check video sinkbin is created */
- if (MM_ERROR_NONE != __mmplayer_video_param_check_video_sink_bin(player))
- return;
+ MMPLAYER_CREATE_ELEMENT(audiobin, MMPLAYER_A_DEINTERLEAVE, "deinterleave", "deinterleave", TRUE, player);
- attrs = MMPLAYER_GET_ATTRS(player);
- MMPLAYER_RETURN_IF_FAIL(attrs);
+ g_object_set(G_OBJECT(audiobin[MMPLAYER_A_DEINTERLEAVE].gst), "keep-positions", TRUE, NULL);
+ /* raw pad handling signal */
+ MMPLAYER_SIGNAL_CONNECT(player,
+ (audiobin[MMPLAYER_A_DEINTERLEAVE].gst),
+ MM_PLAYER_SIGNAL_TYPE_AUTOPLUG, "pad-added",
+ G_CALLBACK(__mmplayer_gst_audio_deinterleave_pad_added), player);
+ } else {
+ int dst_samplerate = 0;
+ int dst_channels = 0;
+ int dst_depth = 0;
+ char *caps_str = NULL;
+ GstCaps* caps = NULL;
- /* common case if using overlay surface */
- mm_attrs_get_data_by_name(attrs, "display_overlay", &handle);
+ /* get conf. values */
+ mm_attrs_multiple_get(player->attrs,
+ NULL,
+ "pcm_extraction_samplerate", &dst_samplerate,
+ "pcm_extraction_channels", &dst_channels,
+ "pcm_extraction_depth", &dst_depth,
+ NULL);
- if (handle) {
- /* default is using wl_surface_id */
- unsigned int wl_surface_id = 0;
- wl_surface_id = *(int*)handle;
- LOGD("set video param : wl_surface_id %d %p", wl_surface_id, *(int*)handle);
- gst_video_overlay_set_wl_window_wl_surface_id(
- GST_VIDEO_OVERLAY(player->pipeline->videobin[MMPLAYER_V_SINK].gst),
- *(int*)handle);
- } else
- /* FIXIT : is it error case? */
- LOGW("still we don't have a window handle on player attribute. create it's own surface.");
-}
+ /* capsfilter */
+ MMPLAYER_CREATE_ELEMENT(audiobin, MMPLAYER_A_CAPS_DEFAULT, "capsfilter", "audio capsfilter", TRUE, player);
+ caps = gst_caps_new_simple("audio/x-raw",
+ "rate", G_TYPE_INT, dst_samplerate,
+ "channels", G_TYPE_INT, dst_channels,
+ "depth", G_TYPE_INT, dst_depth,
+ NULL);
+ caps_str = gst_caps_to_string(caps);
+ LOGD("new caps : %s\n", caps_str);
+ g_object_set(GST_ELEMENT(audiobin[MMPLAYER_A_CAPS_DEFAULT].gst), "caps", caps, NULL);
-int
-__mmplayer_update_wayland_videosink_video_param(mm_player_t* player, char *param_name)
-{
- bool update_all_param = FALSE;
- MMPLAYER_FENTER();
+ /* clean */
+ gst_caps_unref(caps);
+ MMPLAYER_FREEIF(caps_str);
- /* check video sinkbin is created */
- if (MM_ERROR_NONE != __mmplayer_video_param_check_video_sink_bin(player))
- return MM_ERROR_PLAYER_NOT_INITIALIZED;
+ /* fake sink */
+ MMPLAYER_CREATE_ELEMENT(audiobin, MMPLAYER_A_SINK, "fakesink", "fakesink", TRUE, player);
- if (strcmp(player->ini.videosink_element_overlay, "tizenwlsink")) {
- LOGE("can not find tizenwlsink");
- return MM_ERROR_PLAYER_INTERNAL;
- }
+ /* set sync */
+ g_object_set(G_OBJECT(audiobin[MMPLAYER_A_SINK].gst), "sync", FALSE, NULL);
+ }
+ } else {
+ // normal playback
+ //GstCaps* caps = NULL;
+ gint channels = 0;
- LOGD("param_name : %s", param_name);
- if (!g_strcmp0(param_name, "update_all_param"))
- update_all_param = TRUE;
+ /* for logical volume control */
+ MMPLAYER_CREATE_ELEMENT(audiobin, MMPLAYER_A_VOL, "volume", "volume", TRUE, player);
+ g_object_set(G_OBJECT(audiobin[MMPLAYER_A_VOL].gst), "volume", player->sound.volume, NULL);
- if (update_all_param || !g_strcmp0(param_name, "display_overlay"))
- __mmplayer_video_param_set_display_overlay(player);
- if (update_all_param || !g_strcmp0(param_name, "display_method"))
- __mmplayer_video_param_set_display_method(player);
- if (update_all_param || !g_strcmp0(param_name, "display_visible"))
- __mmplayer_video_param_set_display_visible(player);
- if (update_all_param || !g_strcmp0(param_name, "display_rotation"))
- __mmplayer_video_param_set_display_rotation(player);
- if (update_all_param || !g_strcmp0(param_name, "display_win_roi_x"))
- __mmplayer_video_param_set_roi_area(player);
+ if (player->sound.mute) {
+ LOGD("mute enabled\n");
+ g_object_set(G_OBJECT(audiobin[MMPLAYER_A_VOL].gst), "mute", player->sound.mute, NULL);
+ }
- return MM_ERROR_NONE;
-}
+#if 0
+ /*capsfilter */
+ MMPLAYER_CREATE_ELEMENT(audiobin, MMPLAYER_A_CAPS_DEFAULT, "capsfilter", "audiocapsfilter", TRUE, player);
+ caps = gst_caps_from_string("audio/x-raw-int, "
+ "endianness = (int) LITTLE_ENDIAN, "
+ "signed = (boolean) true, "
+ "width = (int) 16, "
+ "depth = (int) 16");
+ g_object_set(GST_ELEMENT(audiobin[MMPLAYER_A_CAPS_DEFAULT].gst), "caps", caps, NULL);
+ gst_caps_unref(caps);
+#endif
-int
-_mmplayer_update_video_param(mm_player_t* player, char *param_name)
-{
- MMHandleType attrs = 0;
- int surface_type = 0;
- int ret = MM_ERROR_NONE;
+ /* check if multi-channels */
+ if (player->pipeline->mainbin && player->pipeline->mainbin[MMPLAYER_M_DEMUX].gst) {
+ GstPad *srcpad = NULL;
+ GstCaps *caps = NULL;
- MMPLAYER_FENTER();
+ if ((srcpad = gst_element_get_static_pad(player->pipeline->mainbin[MMPLAYER_M_DEMUX].gst, "src"))) {
+ if ((caps = gst_pad_query_caps(srcpad, NULL))) {
+ //MMPLAYER_LOG_GST_CAPS_TYPE(caps);
+ GstStructure *str = gst_caps_get_structure(caps, 0);
+ if (str)
+ gst_structure_get_int(str, "channels", &channels);
+ gst_caps_unref(caps);
+ }
+ gst_object_unref(srcpad);
+ }
+ }
- /* check video sinkbin is created */
- if (MM_ERROR_NONE != __mmplayer_video_param_check_video_sink_bin(player))
- return MM_ERROR_PLAYER_NOT_INITIALIZED;
+ /* audio effect element. if audio effect is enabled */
+ if ((strcmp(player->ini.audioeffect_element, ""))
+ && (channels <= 2)
+ && (player->ini.use_audio_effect_preset || player->ini.use_audio_effect_custom)) {
+ MMPLAYER_CREATE_ELEMENT(audiobin, MMPLAYER_A_FILTER, player->ini.audioeffect_element, "audio effect filter", TRUE, player);
- attrs = MMPLAYER_GET_ATTRS(player);
- if (!attrs) {
- LOGE("cannot get content attribute");
- return MM_ERROR_PLAYER_INTERNAL;
- }
- LOGD("param_name : %s", param_name);
+ LOGD("audio effect config. bypass = %d, effect type = %d", player->bypass_audio_effect, player->audio_effect_info.effect_type);
- /* update display surface */
- mm_attrs_get_int_by_name(attrs, "display_surface_type", &surface_type);
- LOGD("check display surface type attribute: %d", surface_type);
+ if ((!player->bypass_audio_effect)
+ && (player->ini.use_audio_effect_preset || player->ini.use_audio_effect_custom)) {
+ if (MM_AUDIO_EFFECT_TYPE_CUSTOM == player->audio_effect_info.effect_type) {
+ if (!_mmplayer_audio_effect_custom_apply(player))
+ LOGI("apply audio effect(custom) setting success\n");
+ }
+ }
- /* configuring display */
- switch (surface_type) {
- case MM_DISPLAY_SURFACE_OVERLAY:
- {
- ret = __mmplayer_update_wayland_videosink_video_param(player, param_name);
- if (ret != MM_ERROR_NONE)
- return ret;
+ if ((strcmp(player->ini.audioeffect_element_custom, ""))
+ && (player->set_mode.rich_audio))
+ MMPLAYER_CREATE_ELEMENT(audiobin, MMPLAYER_A_FILTER_SEC, player->ini.audioeffect_element_custom, "audio effect filter custom", TRUE, player);
}
- break;
- }
-
- MMPLAYER_FLEAVE();
- return MM_ERROR_NONE;
-}
+ /* create audio sink */
+ LOGD("360 spherical %d, channels %d, ambisonic type %d, format %d, order %d",
+ player->is_content_spherical, channels, player->video360_metadata.ambisonic_type,
+ player->video360_metadata.ambisonic_format, player->video360_metadata.ambisonic_order);
-int
-_mmplayer_set_audio_only(MMHandleType hplayer, bool audio_only)
-{
- gboolean disable_overlay = FALSE;
- mm_player_t* player = (mm_player_t*) hplayer;
- int ret = MM_ERROR_NONE;
+ /* Note: qtdemux converts audio metadata defaults to openalsink defaults. */
+ if (player->is_360_feature_enabled &&
+ player->is_content_spherical &&
+ channels == 4 &&
+ player->video360_metadata.ambisonic_type == MMFILE_AMBISONIC_TYPE_PERIPHONIC &&
+ player->video360_metadata.ambisonic_format == MMFILE_AMBISONIC_FORMAT_AMB &&
+ player->video360_metadata.ambisonic_order == MMFILE_AMBISONIC_ORDER_FOA) {
- MMPLAYER_FENTER();
- MMPLAYER_RETURN_VAL_IF_FAIL(player && player->pipeline, MM_ERROR_PLAYER_NOT_INITIALIZED);
- MMPLAYER_RETURN_VAL_IF_FAIL(player->pipeline->videobin &&
- player->pipeline->videobin[MMPLAYER_V_SINK].gst,
- MM_ERROR_PLAYER_NO_OP); /* invalid op */
+ strncpy(player->ini.audiosink_element, "openalsink", PLAYER_INI_MAX_STRLEN - 1);
- if (!g_object_class_find_property(G_OBJECT_GET_CLASS(player->pipeline->videobin[MMPLAYER_V_SINK].gst), "disable-overlay")) {
- LOGW("Display control is not supported");
- return MM_ERROR_PLAYER_INTERNAL;
- }
+ MMPLAYER_CREATE_ELEMENT(audiobin, MMPLAYER_A_CONV_BFORMAT, "audioconvert", "audio-converter-bformat", link_audio_sink_now, player);
- g_object_get(player->pipeline->videobin[MMPLAYER_V_SINK].gst, "disable-overlay", &disable_overlay, NULL);
+ MMPLAYER_CREATE_ELEMENT(audiobin, MMPLAYER_A_CAPS_360, "capsfilter", "audio-caps-filter", link_audio_sink_now, player);
+ acaps = gst_caps_from_string(SPATIAL_AUDIO_CAPS);
+ g_object_set(G_OBJECT(audiobin[MMPLAYER_A_CAPS_360].gst), "caps", acaps, NULL);
+ gst_caps_unref(acaps);
- if (audio_only == (bool)disable_overlay) {
- LOGE("It's the same with current setting: (%d)", audio_only);
- return MM_ERROR_NONE;
- }
+ MMPLAYER_CREATE_ELEMENT(audiobin, MMPLAYER_A_SINK, "openalsink", "audiosink", link_audio_sink_now, player);
+ g_object_set(G_OBJECT(audiobin[MMPLAYER_A_SINK].gst), "source-ambisonics-type", 1, NULL);
+ sound_manager_create_stream_information(SOUND_STREAM_TYPE_MEDIA, NULL, NULL, &stream_info);
+ g_object_set(G_OBJECT(audiobin[MMPLAYER_A_SINK].gst), "stream-info", stream_info, NULL);
- if (audio_only) {
- LOGE("disable overlay");
- g_object_set(player->pipeline->videobin[MMPLAYER_V_SINK].gst, "disable-overlay", TRUE, NULL);
+ player->is_openal_plugin_used = TRUE;
- /* release overlay resource */
- if (player->video_overlay_resource != NULL) {
- ret = mm_resource_manager_mark_for_release(player->resource_manager,
- player->video_overlay_resource);
- if (ret != MM_RESOURCE_MANAGER_ERROR_NONE) {
- LOGE("failed to mark overlay resource for release, ret(0x%x)\n", ret);
- goto ERROR;
+ if (player->video360_yaw_radians <= M_PI &&
+ player->video360_yaw_radians >= -M_PI &&
+ player->video360_pitch_radians <= M_PI_2 &&
+ player->video360_pitch_radians >= -M_PI_2) {
+ g_object_set(G_OBJECT(audiobin[MMPLAYER_A_SINK].gst),
+ "source-orientation-y", (int) (player->video360_yaw_radians * 180.0 / M_PI),
+ "source-orientation-x", (int) (player->video360_pitch_radians * 180.0 / M_PI), NULL);
+ } else if (player->video360_metadata.init_view_heading || player->video360_metadata.init_view_pitch) {
+ g_object_set(G_OBJECT(audiobin[MMPLAYER_A_SINK].gst),
+ "source-orientation-y", player->video360_metadata.init_view_heading,
+ "source-orientation-x", player->video360_metadata.init_view_pitch, NULL);
}
- player->video_overlay_resource = NULL;
+ } else {
+ if (player->is_360_feature_enabled && player->is_content_spherical)
+ LOGW("Audio track isn't of the ambisonic type and can't be played back as a spatial sound.\n");
+ MMPLAYER_CREATE_ELEMENT(audiobin, MMPLAYER_A_SINK, player->ini.audiosink_element, "audiosink", link_audio_sink_now, player);
}
- ret = mm_resource_manager_commit(player->resource_manager);
- if (ret != MM_RESOURCE_MANAGER_ERROR_NONE) {
- LOGE("failed to commit acquiring of overlay resource, ret(0x%x)\n", ret);
- goto ERROR;
- }
- } else {
- /* mark video overlay for acquire */
- if (player->video_overlay_resource == NULL) {
- ret = mm_resource_manager_mark_for_acquire(player->resource_manager,
- MM_RESOURCE_MANAGER_RES_TYPE_VIDEO_OVERLAY,
- MM_RESOURCE_MANAGER_RES_VOLUME_FULL,
- &player->video_overlay_resource);
- if (ret != MM_RESOURCE_MANAGER_ERROR_NONE) {
- LOGE("could not prepare for video_overlay resource\n");
- goto ERROR;
- }
- }
+ /* qos on */
+ g_object_set(G_OBJECT(audiobin[MMPLAYER_A_SINK].gst), "qos", TRUE, NULL); /* qos on */
+ g_object_set(G_OBJECT(audiobin[MMPLAYER_A_SINK].gst), "slave-method", GST_AUDIO_BASE_SINK_SLAVE_NONE, NULL);
- player->interrupted_by_resource = FALSE;
- /* acquire resources for video overlay */
- ret = mm_resource_manager_commit(player->resource_manager);
- if (ret != MM_RESOURCE_MANAGER_ERROR_NONE) {
- LOGE("could not acquire resources for video playing\n");
- goto ERROR;
+
+ if ((MMPLAYER_IS_RTSP_STREAMING(player)) ||
+ (player->videodec_linked && player->ini.use_system_clock)) {
+ LOGD("system clock will be used.\n");
+ g_object_set(G_OBJECT(audiobin[MMPLAYER_A_SINK].gst), "provide-clock", FALSE, NULL);
}
- LOGD("enable overlay");
- __mmplayer_video_param_set_display_overlay(player);
- g_object_set(player->pipeline->videobin[MMPLAYER_V_SINK].gst, "disable-overlay", FALSE, NULL);
+ if (g_strrstr(player->ini.audiosink_element, "pulsesink"))
+ __mmplayer_gst_set_audiosink_property(player, attrs);
}
-ERROR:
- MMPLAYER_FLEAVE();
- return MM_ERROR_NONE;
-}
+ if (audiobin[MMPLAYER_A_SINK].gst) {
+ GstPad *sink_pad = NULL;
+ sink_pad = gst_element_get_static_pad(audiobin[MMPLAYER_A_SINK].gst, "sink");
+ MMPLAYER_SIGNAL_CONNECT(player, sink_pad, MM_PLAYER_SIGNAL_TYPE_AUDIOBIN,
+ "notify::caps", G_CALLBACK(__mmplayer_gst_caps_notify_cb), player);
+ gst_object_unref(GST_OBJECT(sink_pad));
+ }
-int
-_mmplayer_get_audio_only(MMHandleType hplayer, bool *paudio_only)
-{
- mm_player_t* player = (mm_player_t*) hplayer;
- gboolean disable_overlay = FALSE;
+ __mmplayer_add_sink(player, audiobin[MMPLAYER_A_SINK].gst);
- MMPLAYER_FENTER();
+ /* adding created elements to bin */
+ LOGD("adding created elements to bin\n");
+ if (!__mmplayer_gst_element_add_bucket_to_bin(GST_BIN(audiobin[MMPLAYER_A_BIN].gst), element_bucket)) {
+ LOGE("failed to add elements\n");
+ goto ERROR;
+ }
- MMPLAYER_RETURN_VAL_IF_FAIL(player && player->pipeline, MM_ERROR_PLAYER_NOT_INITIALIZED);
- MMPLAYER_RETURN_VAL_IF_FAIL(paudio_only, MM_ERROR_INVALID_ARGUMENT);
- MMPLAYER_RETURN_VAL_IF_FAIL(player->pipeline->videobin &&
- player->pipeline->videobin[MMPLAYER_V_SINK].gst,
- MM_ERROR_PLAYER_NO_OP); /* invalid op */
+ /* linking elements in the bucket by added order. */
+ LOGD("Linking elements in the bucket by added order.\n");
+ if (__mmplayer_gst_element_link_bucket(element_bucket) == -1) {
+ LOGE("failed to link elements\n");
+ goto ERROR;
+ }
- if (!g_object_class_find_property(G_OBJECT_GET_CLASS(player->pipeline->videobin[MMPLAYER_V_SINK].gst), "disable-overlay")) {
- LOGW("Display control is not supported");
- return MM_ERROR_PLAYER_INTERNAL;
+ /* get first element's sinkpad for creating ghostpad */
+ first_element = (MMPlayerGstElement *)element_bucket->data;
+ if (!first_element) {
+ LOGE("failed to get first elem\n");
+ goto ERROR;
}
- g_object_get(player->pipeline->videobin[MMPLAYER_V_SINK].gst, "disable-overlay", &disable_overlay, NULL);
+ pad = gst_element_get_static_pad(GST_ELEMENT(first_element->gst), "sink");
+ if (!pad) {
+ LOGE("failed to get pad from first element of audiobin\n");
+ goto ERROR;
+ }
- *paudio_only = (bool)(disable_overlay);
+ ghostpad = gst_ghost_pad_new("sink", pad);
+ if (!ghostpad) {
+ LOGE("failed to create ghostpad\n");
+ goto ERROR;
+ }
- LOGD("audio_only : %d", *paudio_only);
+ if (FALSE == gst_element_add_pad(audiobin[MMPLAYER_A_BIN].gst, ghostpad)) {
+ LOGE("failed to add ghostpad to audiobin\n");
+ goto ERROR;
+ }
+
+ gst_object_unref(pad);
+ g_list_free(element_bucket);
MMPLAYER_FLEAVE();
return MM_ERROR_NONE;
-}
-static int
-__mmplayer_gst_element_link_bucket(GList* element_bucket)
-{
- GList* bucket = element_bucket;
- MMPlayerGstElement* element = NULL;
- MMPlayerGstElement* prv_element = NULL;
- gint successful_link_count = 0;
+ERROR:
- MMPLAYER_FENTER();
+ LOGD("ERROR : releasing audiobin\n");
- MMPLAYER_RETURN_VAL_IF_FAIL(element_bucket, -1);
+ if (pad)
+ gst_object_unref(GST_OBJECT(pad));
- prv_element = (MMPlayerGstElement*)bucket->data;
- bucket = bucket->next;
+ if (ghostpad)
+ gst_object_unref(GST_OBJECT(ghostpad));
- for (; bucket; bucket = bucket->next) {
- element = (MMPlayerGstElement*)bucket->data;
+ if (element_bucket)
+ g_list_free(element_bucket);
- if (element && element->gst) {
- /* If next element is audio appsrc then make a separate audio pipeline */
- if (!strcmp(GST_ELEMENT_NAME(GST_ELEMENT(element->gst)), "audio_appsrc") ||
- !strcmp(GST_ELEMENT_NAME(GST_ELEMENT(element->gst)), "subtitle_appsrc")) {
- prv_element = element;
- continue;
- }
+ /* release element which are not added to bin */
+ for (i = 1; i < MMPLAYER_A_NUM; i++) {
+ /* NOTE : skip bin */
+ if (audiobin[i].gst) {
+ GstObject* parent = NULL;
+ parent = gst_element_get_parent(audiobin[i].gst);
- if (prv_element && prv_element->gst) {
- if (gst_element_link(GST_ELEMENT(prv_element->gst), GST_ELEMENT(element->gst))) {
- LOGD("linking [%s] to [%s] success\n",
- GST_ELEMENT_NAME(GST_ELEMENT(prv_element->gst)),
- GST_ELEMENT_NAME(GST_ELEMENT(element->gst)));
- successful_link_count++;
- } else {
- LOGD("linking [%s] to [%s] failed\n",
- GST_ELEMENT_NAME(GST_ELEMENT(prv_element->gst)),
- GST_ELEMENT_NAME(GST_ELEMENT(element->gst)));
- return -1;
- }
- }
+ if (!parent) {
+ gst_object_unref(GST_OBJECT(audiobin[i].gst));
+ audiobin[i].gst = NULL;
+ } else
+ gst_object_unref(GST_OBJECT(parent));
}
-
- prv_element = element;
}
- MMPLAYER_FLEAVE();
+ /* release audiobin with it's childs */
+ if (audiobin[MMPLAYER_A_BIN].gst)
+ gst_object_unref(GST_OBJECT(audiobin[MMPLAYER_A_BIN].gst));
- return successful_link_count;
+ MMPLAYER_FREEIF(audiobin);
+
+ player->pipeline->audiobin = NULL;
+
+ return MM_ERROR_PLAYER_INTERNAL;
}
-static int
-__mmplayer_gst_element_add_bucket_to_bin(GstBin* bin, GList* element_bucket)
+static GstPadProbeReturn
+__mmplayer_audio_stream_probe(GstPad *pad, GstPadProbeInfo *info, gpointer u_data)
{
- GList* bucket = element_bucket;
- MMPlayerGstElement* element = NULL;
- int successful_add_count = 0;
+ mm_player_t* player = (mm_player_t*) u_data;
+ GstBuffer *pad_buffer = gst_pad_probe_info_get_buffer(info);
+ GstMapInfo probe_info = GST_MAP_INFO_INIT;
- MMPLAYER_FENTER();
+ gst_buffer_map(pad_buffer, &probe_info, GST_MAP_READ);
- MMPLAYER_RETURN_VAL_IF_FAIL(element_bucket, 0);
- MMPLAYER_RETURN_VAL_IF_FAIL(bin, 0);
+ if (player->audio_stream_cb && probe_info.size && probe_info.data)
+ player->audio_stream_cb((void *)probe_info.data, probe_info.size, player->audio_stream_cb_user_param);
- for (; bucket; bucket = bucket->next) {
- element = (MMPlayerGstElement*)bucket->data;
+ return GST_PAD_PROBE_OK;
+}
- if (element && element->gst) {
- if (!gst_bin_add(bin, GST_ELEMENT(element->gst))) {
- LOGD("__mmplayer_gst_element_link_bucket : Adding element [%s] to bin [%s] failed\n",
- GST_ELEMENT_NAME(GST_ELEMENT(element->gst)),
- GST_ELEMENT_NAME(GST_ELEMENT(bin)));
- return 0;
+static guint32 _mmplayer_convert_fourcc_string_to_value(const gchar* format_name)
+{
+ return format_name[0] | (format_name[1] << 8) | (format_name[2] << 16) | (format_name[3] << 24);
+}
+
+int _mmplayer_video_stream_release_bo(mm_player_t* player, void* bo)
+{
+ int ret = MM_ERROR_NONE;
+ GList *l = NULL;
+ MMPLAYER_RETURN_VAL_IF_FAIL(player, MM_ERROR_PLAYER_NOT_INITIALIZED);
+ MMPLAYER_RETURN_VAL_IF_FAIL(bo, MM_ERROR_INVALID_ARGUMENT);
+
+ MMPLAYER_VIDEO_BO_LOCK(player);
+
+ if (player->video_bo_list) {
+ for (l = g_list_first(player->video_bo_list); l; l = g_list_next(l)) {
+ mm_player_video_bo_info_t* tmp = (mm_player_video_bo_info_t *)l->data;
+ if (tmp && tmp->bo == bo) {
+ tmp->using = FALSE;
+ LOGD("release bo %p", bo);
+ tbm_bo_unref(tmp->bo);
+ MMPLAYER_VIDEO_BO_UNLOCK(player);
+ MMPLAYER_VIDEO_BO_SIGNAL(player);
+ return ret;
}
- successful_add_count++;
}
+ } else {
+ /* hw codec is running or the list was reset for DRC. */
+ LOGW("there is no bo list.");
}
+ MMPLAYER_VIDEO_BO_UNLOCK(player);
- MMPLAYER_FLEAVE();
-
- return successful_add_count;
+ LOGW("failed to find bo %p", bo);
+ return ret;
}
-static void __mmplayer_gst_caps_notify_cb(GstPad * pad, GParamSpec * unused, gpointer data)
+static void
+__mmplayer_video_stream_destroy_bo_list(mm_player_t* player)
{
- mm_player_t* player = (mm_player_t*) data;
- GstCaps *caps = NULL;
- GstStructure *str = NULL;
- const char *name;
+ GList *l = NULL;
MMPLAYER_FENTER();
+ MMPLAYER_RETURN_IF_FAIL(player);
- MMPLAYER_RETURN_IF_FAIL(pad)
- MMPLAYER_RETURN_IF_FAIL(unused)
- MMPLAYER_RETURN_IF_FAIL(data)
-
- caps = gst_pad_get_current_caps(pad);
- if (!caps)
- return;
-
- str = gst_caps_get_structure(caps, 0);
- if (!str)
- goto ERROR;
-
- name = gst_structure_get_name(str);
- if (!name)
- goto ERROR;
-
- LOGD("name = %s\n", name);
-
- if (strstr(name, "audio")) {
- _mmplayer_update_content_attrs(player, ATTR_AUDIO);
-
- if (player->audio_stream_changed_cb) {
- LOGE("call the audio stream changed cb\n");
- player->audio_stream_changed_cb(player->audio_stream_changed_cb_user_param);
+ MMPLAYER_VIDEO_BO_LOCK(player);
+ if (player->video_bo_list) {
+ LOGD("destroy video_bo_list : %d", g_list_length(player->video_bo_list));
+ for (l = g_list_first(player->video_bo_list); l; l = g_list_next(l)) {
+ mm_player_video_bo_info_t* tmp = (mm_player_video_bo_info_t *)l->data;
+ if (tmp) {
+ if (tmp->bo)
+ tbm_bo_unref(tmp->bo);
+ g_free(tmp);
+ }
}
- } else if (strstr(name, "video")) {
- if ((name = gst_structure_get_string(str, "format")))
- player->set_mode.video_zc = name[0] == 'S';
+ g_list_free(player->video_bo_list);
+ player->video_bo_list = NULL;
+ }
+ player->video_bo_size = 0;
+ MMPLAYER_VIDEO_BO_UNLOCK(player);
- _mmplayer_update_content_attrs(player, ATTR_VIDEO);
+ MMPLAYER_FLEAVE();
+ return;
+}
- if (player->video_stream_changed_cb) {
- LOGE("call the video stream changed cb\n");
- player->video_stream_changed_cb(player->video_stream_changed_cb_user_param);
- }
- } else
- goto ERROR;
+static void*
+__mmplayer_video_stream_get_bo(mm_player_t* player, int size)
+{
+ GList *l = NULL;
+ MMPLAYER_RETURN_VAL_IF_FAIL(player, NULL);
+ gboolean ret = TRUE;
-ERROR:
+ /* check DRC, if it is, destroy the prev bo list to create again */
+ if (player->video_bo_size != size) {
+ LOGD("video size is changed: %d -> %d", player->video_bo_size, size);
+ __mmplayer_video_stream_destroy_bo_list(player);
+ player->video_bo_size = size;
+ }
- gst_caps_unref(caps);
+ MMPLAYER_VIDEO_BO_LOCK(player);
- MMPLAYER_FLEAVE();
+ if ((!player->video_bo_list) ||
+ (g_list_length(player->video_bo_list) < player->ini.num_of_video_bo)) {
- return;
-}
+ /* create bo list */
+ int idx = 0;
+ LOGD("Create bo list for decoded video stream(num:%d)", player->ini.num_of_video_bo);
+ if (player->video_bo_list) {
+ /* if bo list did not created all, try it again. */
+ idx = g_list_length(player->video_bo_list);
+ LOGD("bo list exist(len: %d)", idx);
+ }
+ for (; idx < player->ini.num_of_video_bo; idx++) {
+ mm_player_video_bo_info_t* bo_info = g_new(mm_player_video_bo_info_t, 1);
+ if (!bo_info) {
+ LOGE("Fail to alloc bo_info.");
+ break;
+ }
+ bo_info->bo = tbm_bo_alloc(player->bufmgr, size, TBM_BO_DEFAULT);
+ if (!bo_info->bo) {
+ LOGE("Fail to tbm_bo_alloc.");
+ g_free(bo_info);
+ break;
+ }
+ bo_info->using = FALSE;
+ player->video_bo_list = g_list_append(player->video_bo_list, bo_info);
+ }
-/**
- * This function is to create audio pipeline for playing.
- *
- * @param player [in] handle of player
- *
- * @return This function returns zero on success.
- * @remark
- * @see __mmplayer_gst_create_midi_pipeline, __mmplayer_gst_create_video_pipeline
- */
-/* macro for code readability. just for sinkbin-creation functions */
-#define MMPLAYER_CREATE_ELEMENT(x_bin, x_id, x_factory, x_name, x_add_bucket, x_player) \
-do {\
- x_bin[x_id].id = x_id;\
- x_bin[x_id].gst = gst_element_factory_make(x_factory, x_name);\
- if (!x_bin[x_id].gst) {\
- LOGE("failed to create %s \n", x_factory);\
- goto ERROR;\
- } else {\
- if (x_player->ini.set_dump_element_flag)\
- __mmplayer_add_dump_buffer_probe(x_player, x_bin[x_id].gst);\
- } \
- if (x_add_bucket)\
- element_bucket = g_list_append(element_bucket, &x_bin[x_id]);\
-} while (0);
+ /* update video num buffers */
+ player->video_num_buffers = idx;
+ if (idx == player->ini.num_of_video_bo)
+ player->video_extra_num_buffers = player->ini.num_of_video_bo/2;
-static void
-__mmplayer_audio_stream_clear_buffer(mm_player_t* player, gboolean send_all)
-{
- GList *l = NULL;
+ if (idx == 0) {
+ MMPLAYER_VIDEO_BO_UNLOCK(player);
+ return NULL;
+ }
- MMPLAYER_FENTER();
- MMPLAYER_RETURN_IF_FAIL(player);
+ LOGD("Num of video buffers(%d/%d)", player->video_num_buffers, player->video_extra_num_buffers);
+ }
- if (player->audio_stream_buff_list) {
- for (l = g_list_first(player->audio_stream_buff_list); l; l = g_list_next(l)) {
- mm_player_audio_stream_buff_t *tmp = (mm_player_audio_stream_buff_t *)l->data;
- if (tmp) {
- if (send_all) {
- LOGD("[%"G_GUINT64_FORMAT"] send remained data.", tmp->channel_mask);
- __mmplayer_audio_stream_send_data(player, tmp);
- }
- if (tmp->pcm_data)
- g_free(tmp->pcm_data);
- g_free(tmp);
+ while (TRUE) {
+ /* get bo from list*/
+ for (l = g_list_first(player->video_bo_list); l; l = g_list_next(l)) {
+ mm_player_video_bo_info_t* tmp = (mm_player_video_bo_info_t *)l->data;
+ if (tmp && (tmp->using == FALSE)) {
+ LOGD("found bo %p to use", tmp->bo);
+ tmp->using = TRUE;
+ MMPLAYER_VIDEO_BO_UNLOCK(player);
+ return tbm_bo_ref(tmp->bo);
}
}
- g_list_free(player->audio_stream_buff_list);
- player->audio_stream_buff_list = NULL;
- }
+ if (!ret) {
+ LOGE("failed to get bo in %d timeout", player->ini.video_bo_timeout);
+ MMPLAYER_VIDEO_BO_UNLOCK(player);
+ return NULL;
+ }
- MMPLAYER_FLEAVE();
+ if (player->ini.video_bo_timeout <= 0) {
+ MMPLAYER_VIDEO_BO_WAIT(player);
+ } else {
+ gint64 timeout = g_get_monotonic_time() + player->ini.video_bo_timeout*G_TIME_SPAN_SECOND;
+ ret = MMPLAYER_VIDEO_BO_WAIT_UNTIL(player, timeout);
+ }
+ continue;
+ }
}
static void
-__mmplayer_audio_stream_send_data(mm_player_t* player, mm_player_audio_stream_buff_t *a_buffer)
+__mmplayer_video_stream_decoded_preroll_cb(GstElement* object, GstBuffer *buffer, GstPad *pad, gpointer data)
{
- MMPlayerAudioStreamDataType audio_stream = { 0, };
-
+ mm_player_t* player = (mm_player_t*)data;
MMPLAYER_FENTER();
- MMPLAYER_RETURN_IF_FAIL(player && player->audio_stream_render_cb_ex);
+ MMPLAYER_RETURN_IF_FAIL(player && player->video_stream_cb);
- audio_stream.bitrate = a_buffer->bitrate;
- audio_stream.channel = a_buffer->channel;
- audio_stream.depth = a_buffer->depth;
- audio_stream.is_little_endian = a_buffer->is_little_endian;
- audio_stream.channel_mask = a_buffer->channel_mask;
- audio_stream.data_size = a_buffer->data_size;
- audio_stream.data = a_buffer->pcm_data;
+ /* send prerolled pkt */
+ player->video_stream_prerolled = FALSE;
- /* LOGD("[%"G_GUINT64_FORMAT"] send data size:%d, %p", audio_stream.channel_mask, audio_stream.data_size, player->audio_stream_cb_user_param); */
- player->audio_stream_render_cb_ex(&audio_stream, player->audio_stream_cb_user_param);
+ __mmplayer_video_stream_decoded_render_cb(object, buffer, pad, data);
- MMPLAYER_FLEAVE();
+ /* not to send prerolled pkt again */
+ player->video_stream_prerolled = TRUE;
}
static void
-__mmplayer_audio_stream_decoded_render_cb(GstElement* object, GstBuffer *buffer, GstPad *pad, gpointer data)
+__mmplayer_video_stream_decoded_render_cb(GstElement* object, GstBuffer *buffer, GstPad *pad, gpointer data)
{
- mm_player_t* player = (mm_player_t*) data;
-
- gint channel = 0;
- gint rate = 0;
- gint depth = 0;
- gint endianness = 0;
- guint64 channel_mask = 0;
- void *a_data = NULL;
- gint a_size = 0;
- mm_player_audio_stream_buff_t *a_buffer = NULL;
+ mm_player_t* player = (mm_player_t*)data;
+ GstCaps *caps = NULL;
+ MMPlayerVideoStreamDataType *stream = NULL;
+ MMVideoBuffer *video_buffer = NULL;
+ GstMemory *dataBlock = NULL;
+ GstMemory *metaBlock = NULL;
GstMapInfo mapinfo = GST_MAP_INFO_INIT;
- GList *l = NULL;
+ GstStructure *structure = NULL;
+ const gchar *string_format = NULL;
+ unsigned int fourcc = 0;
MMPLAYER_FENTER();
- MMPLAYER_RETURN_IF_FAIL(player && player->audio_stream_render_cb_ex);
+ MMPLAYER_RETURN_IF_FAIL(player && player->video_stream_cb);
- gst_buffer_map(buffer, &mapinfo, GST_MAP_READ);
- a_data = mapinfo.data;
- a_size = mapinfo.size;
+ if (player->video_stream_prerolled) {
+ player->video_stream_prerolled = FALSE;
+ LOGD("skip the prerolled pkt not to send it again");
+ return;
+ }
- GstCaps *caps = gst_pad_get_current_caps(pad);
- GstStructure *structure = gst_caps_get_structure(caps, 0);
+ caps = gst_pad_get_current_caps(pad);
+ if (caps == NULL) {
+ LOGE("Caps is NULL.");
+ return;
+ }
/* MMPLAYER_LOG_GST_CAPS_TYPE(caps); */
- gst_structure_get_int(structure, "rate", &rate);
- gst_structure_get_int(structure, "channels", &channel);
- gst_structure_get_int(structure, "depth", &depth);
- gst_structure_get_int(structure, "endianness", &endianness);
- gst_structure_get(structure, "channel-mask", GST_TYPE_BITMASK, &channel_mask, NULL);
- gst_caps_unref(GST_CAPS(caps));
- /* In case of the sync is false, use buffer list. *
- * The num of buffer list depends on the num of audio channels */
- if (player->audio_stream_buff_list) {
- for (l = g_list_first(player->audio_stream_buff_list); l; l = g_list_next(l)) {
- mm_player_audio_stream_buff_t *tmp = (mm_player_audio_stream_buff_t *)l->data;
- if (tmp) {
- if (channel_mask == tmp->channel_mask) {
- /* LOGD("[%"G_GUINT64_FORMAT"] total: %d, data: %d, buffer: %d", channel_mask, tmp->data_size, a_size, tmp->buff_size); */
- if (tmp->data_size + a_size < tmp->buff_size) {
- memcpy(tmp->pcm_data + tmp->data_size, a_data, a_size);
- tmp->data_size += a_size;
- } else {
- /* send data to client */
- __mmplayer_audio_stream_send_data(player, tmp);
+ /* clear stream data structure */
+ stream = (MMPlayerVideoStreamDataType *)g_malloc0(sizeof(MMPlayerVideoStreamDataType));
+ if (!stream) {
+ LOGE("failed to alloc mem for video data");
+ return;
+ }
- if (a_size > tmp->buff_size) {
- LOGD("[%"G_GUINT64_FORMAT"] adj buffer size %d -> %d", channel_mask, tmp->buff_size, a_size);
- tmp->pcm_data = g_realloc(tmp->pcm_data, a_size);
- if (tmp->pcm_data == NULL) {
- LOGE("failed to realloc data.");
- goto DONE;
- }
- tmp->buff_size = a_size;
- }
- memset(tmp->pcm_data, 0x00, tmp->buff_size);
- memcpy(tmp->pcm_data, a_data, a_size);
- tmp->data_size = a_size;
- }
- goto DONE;
- }
- } else {
- LOGE("data is empty in list.");
- goto DONE;
- }
- }
- }
-
- /* create new audio stream data */
- a_buffer = (mm_player_audio_stream_buff_t*)g_malloc0(sizeof(mm_player_audio_stream_buff_t));
- if (a_buffer == NULL) {
- LOGE("failed to alloc data.");
- goto DONE;
- }
- a_buffer->bitrate = rate;
- a_buffer->channel = channel;
- a_buffer->depth = depth;
- a_buffer->is_little_endian = (endianness == 1234 ? 1 : 0);
- a_buffer->channel_mask = channel_mask;
- a_buffer->data_size = a_size;
-
- if (!player->audio_stream_sink_sync) {
- /* If sync is FALSE, use buffer list to reduce the IPC. */
- a_buffer->buff_size = (a_size > player->ini.pcm_buffer_size) ? (a_size) : (player->ini.pcm_buffer_size);
- a_buffer->pcm_data = g_malloc(a_buffer->buff_size);
- if (a_buffer->pcm_data == NULL) {
- LOGE("failed to alloc data.");
- g_free(a_buffer);
- goto DONE;
- }
- memcpy(a_buffer->pcm_data, a_data, a_size);
- /* LOGD("new [%"G_GUINT64_FORMAT"] total:%d buff:%d", channel_mask, a_buffer->data_size, a_buffer->buff_size); */
- player->audio_stream_buff_list = g_list_append(player->audio_stream_buff_list, a_buffer);
- } else {
- /* If sync is TRUE, send data directly. */
- a_buffer->pcm_data = a_data;
- __mmplayer_audio_stream_send_data(player, a_buffer);
- g_free(a_buffer);
- }
-
-DONE:
- gst_buffer_unmap(buffer, &mapinfo);
- MMPLAYER_FLEAVE();
-}
-
-static void
-__mmplayer_gst_audio_deinterleave_pad_added(GstElement *elem, GstPad *pad, gpointer data)
-{
- mm_player_t* player = (mm_player_t*)data;
- MMPlayerGstElement* audiobin = player->pipeline->audiobin;
- GstPad* sinkpad = NULL;
- GstElement *queue = NULL, *sink = NULL;
-
- MMPLAYER_FENTER();
- MMPLAYER_RETURN_IF_FAIL(player && player->pipeline && player->pipeline->mainbin);
-
- queue = gst_element_factory_make("queue", NULL);
- if (queue == NULL) {
- LOGD("fail make queue\n");
- goto ERROR;
- }
-
- sink = gst_element_factory_make("fakesink", NULL);
- if (sink == NULL) {
- LOGD("fail make fakesink\n");
- goto ERROR;
- }
-
- gst_bin_add_many(GST_BIN(audiobin[MMPLAYER_A_BIN].gst), queue, sink, NULL);
-
- if (!gst_element_link_pads_full(queue, "src", sink, "sink", GST_PAD_LINK_CHECK_NOTHING)) {
- LOGW("failed to link queue & sink\n");
- goto ERROR;
- }
-
- sinkpad = gst_element_get_static_pad(queue, "sink");
-
- if (GST_PAD_LINK_OK != gst_pad_link(pad, sinkpad)) {
- LOGW("failed to link [%s:%s] to queue\n", GST_DEBUG_PAD_NAME(pad));
- goto ERROR;
- }
-
- LOGE("player->audio_stream_sink_sync: %d\n", player->audio_stream_sink_sync);
-
- gst_object_unref(sinkpad);
- g_object_set(sink, "sync", player->audio_stream_sink_sync, NULL);
- g_object_set(sink, "signal-handoffs", TRUE, NULL);
-
- gst_element_set_state(sink, GST_STATE_PAUSED);
- gst_element_set_state(queue, GST_STATE_PAUSED);
-
- MMPLAYER_SIGNAL_CONNECT(player,
- G_OBJECT(sink),
- MM_PLAYER_SIGNAL_TYPE_AUDIOBIN,
- "handoff",
- G_CALLBACK(__mmplayer_audio_stream_decoded_render_cb),
- (gpointer)player);
-
- MMPLAYER_FLEAVE();
- return;
-
-ERROR:
- LOGE("__mmplayer_gst_audio_deinterleave_pad_added ERROR\n");
- if (queue) {
- gst_object_unref(GST_OBJECT(queue));
- queue = NULL;
- }
- if (sink) {
- gst_object_unref(GST_OBJECT(sink));
- sink = NULL;
- }
- if (sinkpad) {
- gst_object_unref(GST_OBJECT(sinkpad));
- sinkpad = NULL;
- }
-
- return;
-}
-
-void __mmplayer_gst_set_audiosink_property(mm_player_t* player, MMHandleType attrs)
-{
- #define MAX_PROPS_LEN 128
- gint latency_mode = 0;
- gchar *stream_type = NULL;
- gchar *latency = NULL;
- gint stream_id = 0;
- gchar stream_props[MAX_PROPS_LEN] = {0,};
- GstStructure *props = NULL;
-
- /* set volume table
- * It should be set after player creation through attribute.
- * But, it can not be changed during playing.
- */
- MMPLAYER_FENTER();
- mm_attrs_get_int_by_name(attrs, "sound_stream_index", &stream_id);
- mm_attrs_get_string_by_name(attrs, "sound_stream_type", &stream_type);
-
- if (!stream_type) {
- LOGE("stream_type is null.\n");
- } else {
- if (player->sound.focus_id)
- snprintf(stream_props, sizeof(stream_props)-1, "props,media.role=%s, media.parent_id=%d, media.focus_id=%d",
- stream_type, stream_id, player->sound.focus_id);
- else
- snprintf(stream_props, sizeof(stream_props)-1, "props,media.role=%s, media.parent_id=%d",
- stream_type, stream_id);
- props = gst_structure_from_string(stream_props, NULL);
- g_object_set(player->pipeline->audiobin[MMPLAYER_A_SINK].gst, "stream-properties", props, NULL);
- LOGI("stream_type[%s], stream_id[%d], focus_id[%d], result[%s].\n",
- stream_type, stream_id, player->sound.focus_id, stream_props);
- gst_structure_free(props);
- }
-
- mm_attrs_get_int_by_name(attrs, "sound_latency_mode", &latency_mode);
-
- switch (latency_mode) {
- case AUDIO_LATENCY_MODE_LOW:
- latency = g_strndup("low", 3);
- break;
- case AUDIO_LATENCY_MODE_MID:
- latency = g_strndup("mid", 3);
- break;
- case AUDIO_LATENCY_MODE_HIGH:
- latency = g_strndup("high", 4);
- break;
- };
-
- g_object_set(player->pipeline->audiobin[MMPLAYER_A_SINK].gst,
- "latency", latency,
- NULL);
-
- LOGD("audiosink property - latency=%s \n", latency);
-
- g_free(latency);
-
- MMPLAYER_FLEAVE();
-}
-
-static int
-__mmplayer_gst_create_audio_pipeline(mm_player_t* player)
-{
- MMPlayerGstElement* first_element = NULL;
- MMPlayerGstElement* audiobin = NULL;
- MMHandleType attrs = 0;
- GstPad *pad = NULL;
- GstPad *ghostpad = NULL;
- GList* element_bucket = NULL;
- gboolean link_audio_sink_now = TRUE;
- int i = 0;
- GstCaps *acaps;
-
- MMPLAYER_FENTER();
-
- MMPLAYER_RETURN_VAL_IF_FAIL(player && player->pipeline, MM_ERROR_PLAYER_NOT_INITIALIZED);
-
- /* alloc handles */
- audiobin = (MMPlayerGstElement*)g_malloc0(sizeof(MMPlayerGstElement) * MMPLAYER_A_NUM);
- if (!audiobin) {
- LOGE("failed to allocate memory for audiobin\n");
- return MM_ERROR_PLAYER_NO_FREE_SPACE;
- }
-
- attrs = MMPLAYER_GET_ATTRS(player);
-
- /* create bin */
- audiobin[MMPLAYER_A_BIN].id = MMPLAYER_A_BIN;
- audiobin[MMPLAYER_A_BIN].gst = gst_bin_new("audiobin");
- if (!audiobin[MMPLAYER_A_BIN].gst) {
- LOGE("failed to create audiobin\n");
- goto ERROR;
- }
-
- /* take it */
- player->pipeline->audiobin = audiobin;
-
- player->set_mode.pcm_extraction = __mmplayer_can_extract_pcm(player);
-
- /* Adding audiotp plugin for reverse trickplay feature */
-// MMPLAYER_CREATE_ELEMENT(audiobin, MMPLAYER_A_TP, "audiotp", "audio trickplay", TRUE, player);
-
- /* converter */
- MMPLAYER_CREATE_ELEMENT(audiobin, MMPLAYER_A_CONV, "audioconvert", "audio converter", TRUE, player);
-
- /* replaygain volume */
- MMPLAYER_CREATE_ELEMENT(audiobin, MMPLAYER_A_RGVOL, "rgvolume", "audio rgvolume", TRUE, player);
- if (player->sound.rg_enable)
- g_object_set(G_OBJECT(audiobin[MMPLAYER_A_RGVOL].gst), "enable-rgvolume", TRUE, NULL);
- else
- g_object_set(G_OBJECT(audiobin[MMPLAYER_A_RGVOL].gst), "enable-rgvolume", FALSE, NULL);
-
- /* resampler */
- MMPLAYER_CREATE_ELEMENT(audiobin, MMPLAYER_A_RESAMPLER, player->ini.audioresampler_element, "audio resampler", TRUE, player);
-
- if (player->set_mode.pcm_extraction) {
- // pcm extraction only and no sound output
- if (player->audio_stream_render_cb_ex) {
- char *caps_str = NULL;
- GstCaps* caps = NULL;
- gchar *format = NULL;
-
- /* capsfilter */
- MMPLAYER_CREATE_ELEMENT(audiobin, MMPLAYER_A_CAPS_DEFAULT, "capsfilter", "audio capsfilter", TRUE, player);
-
- mm_attrs_get_string_by_name(player->attrs, "pcm_audioformat", &format);
-
- LOGD("contents : format: %s samplerate : %d pcm_channel: %d", format, player->pcm_samplerate, player->pcm_channel);
-
- caps = gst_caps_new_simple("audio/x-raw",
- "format", G_TYPE_STRING, format,
- "rate", G_TYPE_INT, player->pcm_samplerate,
- "channels", G_TYPE_INT, player->pcm_channel,
- NULL);
- caps_str = gst_caps_to_string(caps);
- LOGD("new caps : %s\n", caps_str);
-
- g_object_set(GST_ELEMENT(audiobin[MMPLAYER_A_CAPS_DEFAULT].gst), "caps", caps, NULL);
-
- /* clean */
- gst_caps_unref(caps);
- MMPLAYER_FREEIF(caps_str);
-
- MMPLAYER_CREATE_ELEMENT(audiobin, MMPLAYER_A_DEINTERLEAVE, "deinterleave", "deinterleave", TRUE, player);
-
- g_object_set(G_OBJECT(audiobin[MMPLAYER_A_DEINTERLEAVE].gst), "keep-positions", TRUE, NULL);
- /* raw pad handling signal */
- MMPLAYER_SIGNAL_CONNECT(player,
- (audiobin[MMPLAYER_A_DEINTERLEAVE].gst),
- MM_PLAYER_SIGNAL_TYPE_AUTOPLUG, "pad-added",
- G_CALLBACK(__mmplayer_gst_audio_deinterleave_pad_added), player);
- } else {
- int dst_samplerate = 0;
- int dst_channels = 0;
- int dst_depth = 0;
- char *caps_str = NULL;
- GstCaps* caps = NULL;
-
- /* get conf. values */
- mm_attrs_multiple_get(player->attrs,
- NULL,
- "pcm_extraction_samplerate", &dst_samplerate,
- "pcm_extraction_channels", &dst_channels,
- "pcm_extraction_depth", &dst_depth,
- NULL);
-
- /* capsfilter */
- MMPLAYER_CREATE_ELEMENT(audiobin, MMPLAYER_A_CAPS_DEFAULT, "capsfilter", "audio capsfilter", TRUE, player);
- caps = gst_caps_new_simple("audio/x-raw",
- "rate", G_TYPE_INT, dst_samplerate,
- "channels", G_TYPE_INT, dst_channels,
- "depth", G_TYPE_INT, dst_depth,
- NULL);
- caps_str = gst_caps_to_string(caps);
- LOGD("new caps : %s\n", caps_str);
-
- g_object_set(GST_ELEMENT(audiobin[MMPLAYER_A_CAPS_DEFAULT].gst), "caps", caps, NULL);
-
- /* clean */
- gst_caps_unref(caps);
- MMPLAYER_FREEIF(caps_str);
-
- /* fake sink */
- MMPLAYER_CREATE_ELEMENT(audiobin, MMPLAYER_A_SINK, "fakesink", "fakesink", TRUE, player);
-
- /* set sync */
- g_object_set(G_OBJECT(audiobin[MMPLAYER_A_SINK].gst), "sync", FALSE, NULL);
- }
- } else {
- // normal playback
- //GstCaps* caps = NULL;
- gint channels = 0;
-
- /* for logical volume control */
- MMPLAYER_CREATE_ELEMENT(audiobin, MMPLAYER_A_VOL, "volume", "volume", TRUE, player);
- g_object_set(G_OBJECT(audiobin[MMPLAYER_A_VOL].gst), "volume", player->sound.volume, NULL);
-
- if (player->sound.mute) {
- LOGD("mute enabled\n");
- g_object_set(G_OBJECT(audiobin[MMPLAYER_A_VOL].gst), "mute", player->sound.mute, NULL);
- }
-
-#if 0
- /*capsfilter */
- MMPLAYER_CREATE_ELEMENT(audiobin, MMPLAYER_A_CAPS_DEFAULT, "capsfilter", "audiocapsfilter", TRUE, player);
- caps = gst_caps_from_string("audio/x-raw-int, "
- "endianness = (int) LITTLE_ENDIAN, "
- "signed = (boolean) true, "
- "width = (int) 16, "
- "depth = (int) 16");
- g_object_set(GST_ELEMENT(audiobin[MMPLAYER_A_CAPS_DEFAULT].gst), "caps", caps, NULL);
- gst_caps_unref(caps);
-#endif
-
- /* check if multi-channels */
- if (player->pipeline->mainbin && player->pipeline->mainbin[MMPLAYER_M_DEMUX].gst) {
- GstPad *srcpad = NULL;
- GstCaps *caps = NULL;
-
- if ((srcpad = gst_element_get_static_pad(player->pipeline->mainbin[MMPLAYER_M_DEMUX].gst, "src"))) {
- if ((caps = gst_pad_query_caps(srcpad, NULL))) {
- //MMPLAYER_LOG_GST_CAPS_TYPE(caps);
- GstStructure *str = gst_caps_get_structure(caps, 0);
- if (str)
- gst_structure_get_int(str, "channels", &channels);
- gst_caps_unref(caps);
- }
- gst_object_unref(srcpad);
- }
- }
-
- /* audio effect element. if audio effect is enabled */
- if ((strcmp(player->ini.audioeffect_element, ""))
- && (channels <= 2)
- && (player->ini.use_audio_effect_preset || player->ini.use_audio_effect_custom)) {
- MMPLAYER_CREATE_ELEMENT(audiobin, MMPLAYER_A_FILTER, player->ini.audioeffect_element, "audio effect filter", TRUE, player);
-
- LOGD("audio effect config. bypass = %d, effect type = %d", player->bypass_audio_effect, player->audio_effect_info.effect_type);
-
- if ((!player->bypass_audio_effect)
- && (player->ini.use_audio_effect_preset || player->ini.use_audio_effect_custom)) {
- if (MM_AUDIO_EFFECT_TYPE_CUSTOM == player->audio_effect_info.effect_type) {
- if (!_mmplayer_audio_effect_custom_apply(player))
- LOGI("apply audio effect(custom) setting success\n");
- }
- }
-
- if ((strcmp(player->ini.audioeffect_element_custom, ""))
- && (player->set_mode.rich_audio))
- MMPLAYER_CREATE_ELEMENT(audiobin, MMPLAYER_A_FILTER_SEC, player->ini.audioeffect_element_custom, "audio effect filter custom", TRUE, player);
- }
-
- /* create audio sink */
- LOGD("360 spherical %d, channels %d, ambisonic type %d, format %d, order %d",
- player->is_content_spherical, channels, player->video360_metadata.ambisonic_type,
- player->video360_metadata.ambisonic_format, player->video360_metadata.ambisonic_order);
-
- /* Note: qtdemux converts audio metadata defaults to openalsink defaults. */
- if (player->is_360_feature_enabled &&
- player->is_content_spherical &&
- channels == 4 &&
- player->video360_metadata.ambisonic_type == MMFILE_AMBISONIC_TYPE_PERIPHONIC &&
- player->video360_metadata.ambisonic_format == MMFILE_AMBISONIC_FORMAT_AMB &&
- player->video360_metadata.ambisonic_order == MMFILE_AMBISONIC_ORDER_FOA) {
-
- strncpy(player->ini.audiosink_element, "openalsink", PLAYER_INI_MAX_STRLEN - 1);
-
- MMPLAYER_CREATE_ELEMENT(audiobin, MMPLAYER_A_CONV_BFORMAT, "audioconvert", "audio-converter-bformat", link_audio_sink_now, player);
-
- MMPLAYER_CREATE_ELEMENT(audiobin, MMPLAYER_A_CAPS_360, "capsfilter", "audio-caps-filter", link_audio_sink_now, player);
- acaps = gst_caps_from_string(SPATIAL_AUDIO_CAPS);
- g_object_set(G_OBJECT(audiobin[MMPLAYER_A_CAPS_360].gst), "caps", acaps, NULL);
- gst_caps_unref(acaps);
-
- MMPLAYER_CREATE_ELEMENT(audiobin, MMPLAYER_A_SINK, "openalsink", "audiosink", link_audio_sink_now, player);
- g_object_set(G_OBJECT(audiobin[MMPLAYER_A_SINK].gst), "source-ambisonics-type", 1, NULL);
- sound_manager_create_stream_information(SOUND_STREAM_TYPE_MEDIA, NULL, NULL, &stream_info);
- g_object_set(G_OBJECT(audiobin[MMPLAYER_A_SINK].gst), "stream-info", stream_info, NULL);
-
- player->is_openal_plugin_used = TRUE;
-
- if (player->video360_yaw_radians <= M_PI &&
- player->video360_yaw_radians >= -M_PI &&
- player->video360_pitch_radians <= M_PI_2 &&
- player->video360_pitch_radians >= -M_PI_2) {
- g_object_set(G_OBJECT(audiobin[MMPLAYER_A_SINK].gst),
- "source-orientation-y", (int) (player->video360_yaw_radians * 180.0 / M_PI),
- "source-orientation-x", (int) (player->video360_pitch_radians * 180.0 / M_PI), NULL);
- } else if (player->video360_metadata.init_view_heading || player->video360_metadata.init_view_pitch) {
- g_object_set(G_OBJECT(audiobin[MMPLAYER_A_SINK].gst),
- "source-orientation-y", player->video360_metadata.init_view_heading,
- "source-orientation-x", player->video360_metadata.init_view_pitch, NULL);
- }
- } else {
- if (player->is_360_feature_enabled && player->is_content_spherical)
- LOGW("Audio track isn't of the ambisonic type and can't be played back as a spatial sound.\n");
- MMPLAYER_CREATE_ELEMENT(audiobin, MMPLAYER_A_SINK, player->ini.audiosink_element, "audiosink", link_audio_sink_now, player);
- }
-
- /* qos on */
- g_object_set(G_OBJECT(audiobin[MMPLAYER_A_SINK].gst), "qos", TRUE, NULL); /* qos on */
- g_object_set(G_OBJECT(audiobin[MMPLAYER_A_SINK].gst), "slave-method", GST_AUDIO_BASE_SINK_SLAVE_NONE, NULL);
-
-
- if ((MMPLAYER_IS_RTSP_STREAMING(player)) ||
- (player->videodec_linked && player->ini.use_system_clock)) {
- LOGD("system clock will be used.\n");
- g_object_set(G_OBJECT(audiobin[MMPLAYER_A_SINK].gst), "provide-clock", FALSE, NULL);
- }
-
- if (g_strrstr(player->ini.audiosink_element, "pulsesink"))
- __mmplayer_gst_set_audiosink_property(player, attrs);
- }
-
- if (audiobin[MMPLAYER_A_SINK].gst) {
- GstPad *sink_pad = NULL;
- sink_pad = gst_element_get_static_pad(audiobin[MMPLAYER_A_SINK].gst, "sink");
- MMPLAYER_SIGNAL_CONNECT(player, sink_pad, MM_PLAYER_SIGNAL_TYPE_AUDIOBIN,
- "notify::caps", G_CALLBACK(__mmplayer_gst_caps_notify_cb), player);
- gst_object_unref(GST_OBJECT(sink_pad));
- }
-
- __mmplayer_add_sink(player, audiobin[MMPLAYER_A_SINK].gst);
-
- /* adding created elements to bin */
- LOGD("adding created elements to bin\n");
- if (!__mmplayer_gst_element_add_bucket_to_bin(GST_BIN(audiobin[MMPLAYER_A_BIN].gst), element_bucket)) {
- LOGE("failed to add elements\n");
- goto ERROR;
- }
-
- /* linking elements in the bucket by added order. */
- LOGD("Linking elements in the bucket by added order.\n");
- if (__mmplayer_gst_element_link_bucket(element_bucket) == -1) {
- LOGE("failed to link elements\n");
- goto ERROR;
- }
-
- /* get first element's sinkpad for creating ghostpad */
- first_element = (MMPlayerGstElement *)element_bucket->data;
- if (!first_element) {
- LOGE("failed to get first elem\n");
- goto ERROR;
- }
-
- pad = gst_element_get_static_pad(GST_ELEMENT(first_element->gst), "sink");
- if (!pad) {
- LOGE("failed to get pad from first element of audiobin\n");
- goto ERROR;
- }
-
- ghostpad = gst_ghost_pad_new("sink", pad);
- if (!ghostpad) {
- LOGE("failed to create ghostpad\n");
- goto ERROR;
- }
-
- if (FALSE == gst_element_add_pad(audiobin[MMPLAYER_A_BIN].gst, ghostpad)) {
- LOGE("failed to add ghostpad to audiobin\n");
- goto ERROR;
- }
-
- gst_object_unref(pad);
-
- g_list_free(element_bucket);
- MMPLAYER_FLEAVE();
-
- return MM_ERROR_NONE;
-
-ERROR:
-
- LOGD("ERROR : releasing audiobin\n");
-
- if (pad)
- gst_object_unref(GST_OBJECT(pad));
-
- if (ghostpad)
- gst_object_unref(GST_OBJECT(ghostpad));
-
- if (element_bucket)
- g_list_free(element_bucket);
-
- /* release element which are not added to bin */
- for (i = 1; i < MMPLAYER_A_NUM; i++) {
- /* NOTE : skip bin */
- if (audiobin[i].gst) {
- GstObject* parent = NULL;
- parent = gst_element_get_parent(audiobin[i].gst);
-
- if (!parent) {
- gst_object_unref(GST_OBJECT(audiobin[i].gst));
- audiobin[i].gst = NULL;
- } else
- gst_object_unref(GST_OBJECT(parent));
- }
- }
-
- /* release audiobin with it's childs */
- if (audiobin[MMPLAYER_A_BIN].gst)
- gst_object_unref(GST_OBJECT(audiobin[MMPLAYER_A_BIN].gst));
-
- MMPLAYER_FREEIF(audiobin);
-
- player->pipeline->audiobin = NULL;
-
- return MM_ERROR_PLAYER_INTERNAL;
-}
-
-static GstPadProbeReturn
-__mmplayer_audio_stream_probe(GstPad *pad, GstPadProbeInfo *info, gpointer u_data)
-{
- mm_player_t* player = (mm_player_t*) u_data;
- GstBuffer *pad_buffer = gst_pad_probe_info_get_buffer(info);
- GstMapInfo probe_info = GST_MAP_INFO_INIT;
-
- gst_buffer_map(pad_buffer, &probe_info, GST_MAP_READ);
-
- if (player->audio_stream_cb && probe_info.size && probe_info.data)
- player->audio_stream_cb((void *)probe_info.data, probe_info.size, player->audio_stream_cb_user_param);
-
- return GST_PAD_PROBE_OK;
-}
-
-static guint32 _mmplayer_convert_fourcc_string_to_value(const gchar* format_name)
-{
- return format_name[0] | (format_name[1] << 8) | (format_name[2] << 16) | (format_name[3] << 24);
-}
-
-int _mmplayer_video_stream_release_bo(mm_player_t* player, void* bo)
-{
- int ret = MM_ERROR_NONE;
- GList *l = NULL;
- MMPLAYER_RETURN_VAL_IF_FAIL(player, MM_ERROR_PLAYER_NOT_INITIALIZED);
- MMPLAYER_RETURN_VAL_IF_FAIL(bo, MM_ERROR_INVALID_ARGUMENT);
-
- MMPLAYER_VIDEO_BO_LOCK(player);
-
- if (player->video_bo_list) {
- for (l = g_list_first(player->video_bo_list); l; l = g_list_next(l)) {
- mm_player_video_bo_info_t* tmp = (mm_player_video_bo_info_t *)l->data;
- if (tmp && tmp->bo == bo) {
- tmp->using = FALSE;
- LOGD("release bo %p", bo);
- tbm_bo_unref(tmp->bo);
- MMPLAYER_VIDEO_BO_UNLOCK(player);
- MMPLAYER_VIDEO_BO_SIGNAL(player);
- return ret;
- }
- }
- } else {
- /* hw codec is running or the list was reset for DRC. */
- LOGW("there is no bo list.");
- }
- MMPLAYER_VIDEO_BO_UNLOCK(player);
-
- LOGW("failed to find bo %p", bo);
- return ret;
-}
-
-static void
-__mmplayer_video_stream_destroy_bo_list(mm_player_t* player)
-{
- GList *l = NULL;
-
- MMPLAYER_FENTER();
- MMPLAYER_RETURN_IF_FAIL(player);
-
- MMPLAYER_VIDEO_BO_LOCK(player);
- if (player->video_bo_list) {
- LOGD("destroy video_bo_list : %d", g_list_length(player->video_bo_list));
- for (l = g_list_first(player->video_bo_list); l; l = g_list_next(l)) {
- mm_player_video_bo_info_t* tmp = (mm_player_video_bo_info_t *)l->data;
- if (tmp) {
- if (tmp->bo)
- tbm_bo_unref(tmp->bo);
- g_free(tmp);
- }
- }
- g_list_free(player->video_bo_list);
- player->video_bo_list = NULL;
- }
- player->video_bo_size = 0;
- MMPLAYER_VIDEO_BO_UNLOCK(player);
-
- MMPLAYER_FLEAVE();
- return;
-}
-
-static void*
-__mmplayer_video_stream_get_bo(mm_player_t* player, int size)
-{
- GList *l = NULL;
- MMPLAYER_RETURN_VAL_IF_FAIL(player, NULL);
- gboolean ret = TRUE;
-
- /* check DRC, if it is, destroy the prev bo list to create again */
- if (player->video_bo_size != size) {
- LOGD("video size is changed: %d -> %d", player->video_bo_size, size);
- __mmplayer_video_stream_destroy_bo_list(player);
- player->video_bo_size = size;
- }
-
- MMPLAYER_VIDEO_BO_LOCK(player);
-
- if ((!player->video_bo_list) ||
- (g_list_length(player->video_bo_list) < player->ini.num_of_video_bo)) {
-
- /* create bo list */
- int idx = 0;
- LOGD("Create bo list for decoded video stream(num:%d)", player->ini.num_of_video_bo);
-
- if (player->video_bo_list) {
- /* if bo list did not created all, try it again. */
- idx = g_list_length(player->video_bo_list);
- LOGD("bo list exist(len: %d)", idx);
- }
-
- for (; idx < player->ini.num_of_video_bo; idx++) {
- mm_player_video_bo_info_t* bo_info = g_new(mm_player_video_bo_info_t, 1);
- if (!bo_info) {
- LOGE("Fail to alloc bo_info.");
- break;
- }
- bo_info->bo = tbm_bo_alloc(player->bufmgr, size, TBM_BO_DEFAULT);
- if (!bo_info->bo) {
- LOGE("Fail to tbm_bo_alloc.");
- g_free(bo_info);
- break;
- }
- bo_info->using = FALSE;
- player->video_bo_list = g_list_append(player->video_bo_list, bo_info);
- }
-
- /* update video num buffers */
- player->video_num_buffers = idx;
- if (idx == player->ini.num_of_video_bo)
- player->video_extra_num_buffers = player->ini.num_of_video_bo/2;
-
- if (idx == 0) {
- MMPLAYER_VIDEO_BO_UNLOCK(player);
- return NULL;
- }
-
- LOGD("Num of video buffers(%d/%d)", player->video_num_buffers, player->video_extra_num_buffers);
- }
-
- while (TRUE) {
- /* get bo from list*/
- for (l = g_list_first(player->video_bo_list); l; l = g_list_next(l)) {
- mm_player_video_bo_info_t* tmp = (mm_player_video_bo_info_t *)l->data;
- if (tmp && (tmp->using == FALSE)) {
- LOGD("found bo %p to use", tmp->bo);
- tmp->using = TRUE;
- MMPLAYER_VIDEO_BO_UNLOCK(player);
- return tbm_bo_ref(tmp->bo);
- }
- }
- if (!ret) {
- LOGE("failed to get bo in %d timeout", player->ini.video_bo_timeout);
- MMPLAYER_VIDEO_BO_UNLOCK(player);
- return NULL;
- }
-
- if (player->ini.video_bo_timeout <= 0) {
- MMPLAYER_VIDEO_BO_WAIT(player);
- } else {
- gint64 timeout = g_get_monotonic_time() + player->ini.video_bo_timeout*G_TIME_SPAN_SECOND;
- ret = MMPLAYER_VIDEO_BO_WAIT_UNTIL(player, timeout);
- }
- continue;
- }
-}
-
-static void
-__mmplayer_video_stream_decoded_preroll_cb(GstElement* object, GstBuffer *buffer, GstPad *pad, gpointer data)
-{
- mm_player_t* player = (mm_player_t*)data;
- MMPLAYER_FENTER();
- MMPLAYER_RETURN_IF_FAIL(player && player->video_stream_cb);
-
- /* send prerolled pkt */
- player->video_stream_prerolled = FALSE;
-
- __mmplayer_video_stream_decoded_render_cb(object, buffer, pad, data);
-
- /* not to send prerolled pkt again */
- player->video_stream_prerolled = TRUE;
-}
-
-static void
-__mmplayer_video_stream_decoded_render_cb(GstElement* object, GstBuffer *buffer, GstPad *pad, gpointer data)
-{
- mm_player_t* player = (mm_player_t*)data;
- GstCaps *caps = NULL;
- MMPlayerVideoStreamDataType *stream = NULL;
- MMVideoBuffer *video_buffer = NULL;
- GstMemory *dataBlock = NULL;
- GstMemory *metaBlock = NULL;
- GstMapInfo mapinfo = GST_MAP_INFO_INIT;
- GstStructure *structure = NULL;
- const gchar *string_format = NULL;
- unsigned int fourcc = 0;
-
- MMPLAYER_FENTER();
- MMPLAYER_RETURN_IF_FAIL(player && player->video_stream_cb);
-
- if (player->video_stream_prerolled) {
- player->video_stream_prerolled = FALSE;
- LOGD("skip the prerolled pkt not to send it again");
- return;
- }
-
- caps = gst_pad_get_current_caps(pad);
- if (caps == NULL) {
- LOGE("Caps is NULL.");
- return;
- }
-
- /* MMPLAYER_LOG_GST_CAPS_TYPE(caps); */
-
- /* clear stream data structure */
- stream = (MMPlayerVideoStreamDataType *)g_malloc0(sizeof(MMPlayerVideoStreamDataType));
- if (!stream) {
- LOGE("failed to alloc mem for video data");
- return;
- }
-
- structure = gst_caps_get_structure(caps, 0);
- gst_structure_get_int(structure, "width", &(stream->width));
- gst_structure_get_int(structure, "height", &(stream->height));
- string_format = gst_structure_get_string(structure, "format");
- if (string_format)
- fourcc = _mmplayer_convert_fourcc_string_to_value(string_format);
- stream->format = util_get_pixtype(fourcc);
- gst_caps_unref(caps);
- caps = NULL;
-
- __mmplayer_get_video_angle(player, NULL, &stream->orientation);
-
- /*
- LOGD("Call video steramCb, data[%p], Width[%d],Height[%d], Format[%d]",
- GST_BUFFER_DATA(buffer), stream.width, stream.height, stream.format);
- */
-
- if (stream->width == 0 || stream->height == 0 || stream->format == MM_PIXEL_FORMAT_INVALID) {
- LOGE("Wrong condition!!");
- goto ERROR;
- }
-
- /* set size and timestamp */
- dataBlock = gst_buffer_peek_memory(buffer, 0);
- stream->length_total = gst_memory_get_sizes(dataBlock, NULL, NULL);
- stream->timestamp = (unsigned int)(GST_TIME_AS_MSECONDS(GST_BUFFER_PTS(buffer))); /* nano sec -> mili sec */
-
- /* check zero-copy */
- if (player->set_mode.video_zc &&
- player->set_mode.media_packet_video_stream &&
- gst_buffer_n_memory(buffer) > 1) {
- metaBlock = gst_buffer_peek_memory(buffer, 1);
- gst_memory_map(metaBlock, &mapinfo, GST_MAP_READ);
- video_buffer = (MMVideoBuffer *)mapinfo.data;
- }
-
- if (video_buffer) { /* hw codec */
- /* set tbm bo */
- if (video_buffer->type == MM_VIDEO_BUFFER_TYPE_TBM_BO) {
- int i = 0;
-
- /* copy pointer of tbm bo, stride, elevation */
- while (i < MM_VIDEO_BUFFER_PLANE_MAX && video_buffer->handle.bo[i]) {
- stream->bo[i] = tbm_bo_ref(video_buffer->handle.bo[i]);
- i++;
- }
- } else {
- LOGE("Not support video buffer format");
- goto ERROR;
- }
- memcpy(stream->stride, video_buffer->stride_width,
- sizeof(int) * MM_VIDEO_BUFFER_PLANE_MAX);
- memcpy(stream->elevation, video_buffer->stride_height,
- sizeof(int) * MM_VIDEO_BUFFER_PLANE_MAX);
-
- /* will be released, by calling _mm_player_video_stream_internal_buffer_unref() */
- stream->internal_buffer = gst_buffer_ref(buffer);
- } else { /* sw codec */
- int i = 0;
- int j = 0;
- int k = 0;
- int ret = TBM_SURFACE_ERROR_NONE;
- int src_stride[MM_PLAYER_IMGB_MPLANE_MAX] = { 0, };
- int src_offset[MM_PLAYER_IMGB_MPLANE_MAX] = { 0, };
- int size = 0;
- unsigned char *src = NULL;
- unsigned char *dest = NULL;
- tbm_bo_handle thandle;
- tbm_surface_h surface;
- tbm_surface_info_s info;
- gboolean gst_ret;
-
- gst_ret = gst_memory_map(dataBlock, &mapinfo, GST_MAP_READWRITE);
- if (!gst_ret) {
- LOGE("fail to gst_memory_map");
- goto ERROR;
- }
-
-
- if (stream->format == MM_PIXEL_FORMAT_I420) {
- surface = tbm_surface_create(stream->width, stream->height, TBM_FORMAT_YUV420);
-
- ret = tbm_surface_get_info(surface, &info);
-
- if (ret != TBM_SURFACE_ERROR_NONE) {
- tbm_surface_destroy(surface);
- goto ERROR;
- }
- tbm_surface_destroy(surface);
-
- src_stride[0] = GST_ROUND_UP_4(stream->width);
- src_stride[1] = src_stride[2] = GST_ROUND_UP_4(stream->width>>1);
- src_offset[1] = src_stride[0] * GST_ROUND_UP_2(stream->height);
- src_offset[2] = src_offset[1] + (src_stride[1] * (GST_ROUND_UP_2(stream->height)>>1));
- stream->stride[0] = info.planes[0].stride;
- stream->elevation[0] = info.planes[0].size / info.planes[0].stride;
- stream->stride[1] = info.planes[1].stride;
- stream->elevation[1] = info.planes[1].size / info.planes[1].stride;
- stream->stride[2] = info.planes[2].stride;
- stream->elevation[2] = info.planes[2].size / info.planes[2].stride;
- size = info.planes[0].size + info.planes[1].size + info.planes[2].size;
- } else if (stream->format == MM_PIXEL_FORMAT_RGBA) {
- stream->stride[0] = stream->width * 4;
- stream->elevation[0] = stream->height;
- size = stream->stride[0] * stream->height;
- } else {
- LOGE("Not support format %d", stream->format);
- goto ERROR;
- }
-
- stream->bo[0] = __mmplayer_video_stream_get_bo(player, size);
- if (!stream->bo[0]) {
- LOGE("Fail to tbm_bo_alloc!!");
- goto ERROR;
- }
-
- thandle = tbm_bo_map(stream->bo[0], TBM_DEVICE_CPU, TBM_OPTION_WRITE);
- if (thandle.ptr && mapinfo.data) {
- if (stream->format == MM_PIXEL_FORMAT_I420) {
- for (i = 0; i < 3; i++) {
- src = mapinfo.data + src_offset[i];
- dest = thandle.ptr + info.planes[i].offset;
-
- if (i > 0) k = 1;
- for (j = 0; j < stream->height>>k; j++) {
- memcpy(dest, src, stream->width>>k);
- src += src_stride[i];
- dest += stream->stride[i];
- }
- }
- } else if (stream->format == MM_PIXEL_FORMAT_RGBA) {
- memcpy(thandle.ptr, mapinfo.data, size);
- } else {
- LOGE("Not support format %d", stream->format);
- goto ERROR;
- }
- } else {
- LOGE("data pointer is wrong. dest : %p, src : %p",
- thandle.ptr, mapinfo.data);
- goto ERROR;
- }
- tbm_bo_unmap(stream->bo[0]);
- }
-
- if (player->video_stream_cb) { /* This has been already checked at the entry */
- if (!player->video_stream_cb(stream, player->video_stream_cb_user_param)) {
- LOGE("failed to send video stream data.");
- goto ERROR;
- }
- }
-
- if (metaBlock)
- gst_memory_unmap(metaBlock, &mapinfo);
- else
- gst_memory_unmap(dataBlock, &mapinfo);
-
- return;
-
-ERROR:
- LOGE("release video stream resource.");
- if (metaBlock) {
- int i = 0;
- for (i = 0 ; i < MM_VIDEO_BUFFER_PLANE_MAX ; i++) {
- if (stream->bo[i])
- tbm_bo_unref(stream->bo[i]);
- }
- gst_memory_unmap(metaBlock, &mapinfo);
-
- /* unref gst buffer */
- if (stream->internal_buffer)
- gst_buffer_unref(stream->internal_buffer);
- } else if (dataBlock) {
- if (stream->bo[0])
- _mmplayer_video_stream_release_bo(player, stream->bo[0]);
- gst_memory_unmap(dataBlock, &mapinfo);
- }
-
- g_free(stream);
- return;
-}
-
-static int
-__mmplayer_gst_create_video_filters(mm_player_t* player, GList** bucket)
-{
- gchar* video_csc = "videoconvert"; /* default colorspace converter */
- GList* element_bucket = NULL;
-
- MMPLAYER_RETURN_VAL_IF_FAIL(player && player->pipeline && player->pipeline->videobin, MM_ERROR_PLAYER_NOT_INITIALIZED);
-
- MMPLAYER_FENTER();
-
- if (player->set_mode.video_zc || (player->is_360_feature_enabled && player->is_content_spherical)) {
- LOGD("do not need to add video filters.");
- return MM_ERROR_NONE;
- }
-
- /* in case of sw codec except 360 playback,
- * if libav video decoder is selected, videoconvert is required to render the shm wl-buffer which support RGB only via tizenwlsink. */
- MMPLAYER_CREATE_ELEMENT(player->pipeline->videobin, MMPLAYER_V_CONV, video_csc, "video converter", TRUE, player);
- LOGD("using video converter: %s", video_csc);
-
- /* set video rotator */
- MMPLAYER_CREATE_ELEMENT(player->pipeline->videobin, MMPLAYER_V_FLIP, "videoflip", "video rotator", TRUE, player);
-
- *bucket = element_bucket;
- MMPLAYER_FLEAVE();
- return MM_ERROR_NONE;
-
-ERROR: /* refer MMPLAYER_CREATE_ELEMENT */
- g_list_free(element_bucket);
-
- *bucket = NULL;
- MMPLAYER_FLEAVE();
- return MM_ERROR_PLAYER_INTERNAL;
-}
-
-/**
- * This function is to create video pipeline.
- *
- * @param player [in] handle of player
- * caps [in] src caps of decoder
- * surface_type [in] surface type for video rendering
- *
- * @return This function returns zero on success.
- * @remark
- * @see __mmplayer_gst_create_audio_pipeline, __mmplayer_gst_create_midi_pipeline
- */
-/**
- * VIDEO PIPELINE
- * - video overlay surface(arm/x86) : tizenwlsink
- */
-static int
-__mmplayer_gst_create_video_pipeline(mm_player_t* player, GstCaps* caps, MMDisplaySurfaceType surface_type)
-{
- GstPad *pad = NULL;
- MMHandleType attrs;
- GList*element_bucket = NULL;
- MMPlayerGstElement* first_element = NULL;
- MMPlayerGstElement* videobin = NULL;
- gchar *videosink_element = NULL;
-
- MMPLAYER_FENTER();
-
- MMPLAYER_RETURN_VAL_IF_FAIL(player && player->pipeline, MM_ERROR_PLAYER_NOT_INITIALIZED);
-
- /* alloc handles */
- videobin = (MMPlayerGstElement*)g_malloc0(sizeof(MMPlayerGstElement) * MMPLAYER_V_NUM);
- if (!videobin)
- return MM_ERROR_PLAYER_NO_FREE_SPACE;
-
- player->pipeline->videobin = videobin;
-
- attrs = MMPLAYER_GET_ATTRS(player);
- if (!attrs) {
- LOGE("cannot get content attribute");
- return MM_ERROR_PLAYER_INTERNAL;
- }
-
- /* create bin */
- videobin[MMPLAYER_V_BIN].id = MMPLAYER_V_BIN;
- videobin[MMPLAYER_V_BIN].gst = gst_bin_new("videobin");
- if (!videobin[MMPLAYER_V_BIN].gst) {
- LOGE("failed to create videobin");
- goto ERROR;
- }
-
- int enable_video_decoded_cb = 0;
- mm_attrs_get_int_by_name(player->attrs, "enable_video_decoded_cb", &enable_video_decoded_cb);
-
- if (player->is_360_feature_enabled && player->is_content_spherical) {
- LOGD("video360 elem will be added.");
-
- MMPLAYER_CREATE_ELEMENT(videobin, MMPLAYER_V_360, "video360",
- "video-360", TRUE, player);
-
- /* Set spatial media metadata and/or user settings to the element.
- * */
- g_object_set(G_OBJECT(videobin[MMPLAYER_V_360].gst),
- "projection-type", player->video360_metadata.projection_type, NULL);
-
- g_object_set(G_OBJECT(videobin[MMPLAYER_V_360].gst),
- "stereo-mode", player->video360_metadata.stereo_mode, NULL);
-
- if (player->video360_metadata.full_pano_width_pixels &&
- player->video360_metadata.full_pano_height_pixels &&
- player->video360_metadata.cropped_area_image_width &&
- player->video360_metadata.cropped_area_image_height) {
- g_object_set(G_OBJECT(videobin[MMPLAYER_V_360].gst),
- "projection-bounds-top", player->video360_metadata.cropped_area_top,
- "projection-bounds-bottom", player->video360_metadata.full_pano_height_pixels -
- player->video360_metadata.cropped_area_top - player->video360_metadata.cropped_area_image_height,
- "projection-bounds-left", player->video360_metadata.cropped_area_left,
- "projection-bounds-right", player->video360_metadata.full_pano_width_pixels -
- player->video360_metadata.cropped_area_left - player->video360_metadata.cropped_area_image_width,
- NULL);
- }
-
- if (player->video360_horizontal_fov && player->video360_vertical_fov) {
- g_object_set(G_OBJECT(videobin[MMPLAYER_V_360].gst),
- "horizontal-fov", player->video360_horizontal_fov,
- "vertical-fov", player->video360_vertical_fov, NULL);
- }
-
- if (player->video360_zoom <= VIDEO360_MAX_ZOOM && player->video360_zoom > 1.0f) {
- g_object_set(G_OBJECT(videobin[MMPLAYER_V_360].gst),
- "zoom", 1.0f / player->video360_zoom, NULL);
- }
-
- if (player->video360_yaw_radians <= M_PI &&
- player->video360_yaw_radians >= -M_PI &&
- player->video360_pitch_radians <= M_PI_2 &&
- player->video360_pitch_radians >= -M_PI_2) {
- g_object_set(G_OBJECT(videobin[MMPLAYER_V_360].gst),
- "pose-yaw", (int) (player->video360_yaw_radians * 180.0 / M_PI),
- "pose-pitch", (int) (player->video360_pitch_radians * 180.0 / M_PI), NULL);
- } else if (player->video360_metadata.init_view_heading || player->video360_metadata.init_view_pitch) {
- g_object_set(G_OBJECT(videobin[MMPLAYER_V_360].gst),
- "pose-yaw", player->video360_metadata.init_view_heading,
- "pose-pitch", player->video360_metadata.init_view_pitch, NULL);
- }
-
- g_object_set(G_OBJECT(videobin[MMPLAYER_V_360].gst),
- "passthrough", !player->is_video360_enabled, NULL);
- }
-
- /* set video sink */
- switch (surface_type) {
- case MM_DISPLAY_SURFACE_OVERLAY:
- if (__mmplayer_gst_create_video_filters(player, &element_bucket) != MM_ERROR_NONE)
- goto ERROR;
- if (strlen(player->ini.videosink_element_overlay) > 0)
- videosink_element = player->ini.videosink_element_overlay;
- else
- goto ERROR;
- break;
- case MM_DISPLAY_SURFACE_NULL:
- if (strlen(player->ini.videosink_element_fake) > 0)
- videosink_element = player->ini.videosink_element_fake;
- else
- goto ERROR;
- break;
- case MM_DISPLAY_SURFACE_REMOTE:
- if (strlen(player->ini.videosink_element_fake) > 0)
- videosink_element = player->ini.videosink_element_fake;
- else
- goto ERROR;
- break;
- default:
- LOGE("unidentified surface type");
- goto ERROR;
- }
- LOGD("surface_type %d, selected videosink name: %s", surface_type, videosink_element);
-
- MMPLAYER_CREATE_ELEMENT(videobin, MMPLAYER_V_SINK, videosink_element, "videosink", TRUE, player);
-
- /* additional setting for sink plug-in */
- switch (surface_type) {
- case MM_DISPLAY_SURFACE_OVERLAY:
- {
- bool use_tbm = (player->set_mode.video_zc || (player->is_360_feature_enabled && player->is_content_spherical));
- if (!use_tbm) {
- LOGD("selected videosink name: %s", videosink_element);
-
- /* support shard memory with S/W codec on HawkP */
- if (strncmp(videosink_element, "tizenwlsink", strlen(videosink_element)) == 0) {
- g_object_set(player->pipeline->videobin[MMPLAYER_V_SINK].gst,
- "use-tbm", use_tbm, NULL);
- }
- } else {
- if (attrs) {
- int gapless = 0;
-
- mm_attrs_get_int_by_name(attrs, "gapless_mode", &gapless);
-
- if (gapless > 0) {
- LOGD("disable last-sample");
- g_object_set(player->pipeline->videobin[MMPLAYER_V_SINK].gst, "enable-last-sample", FALSE, NULL);
- }
- }
- }
- if (player->set_mode.media_packet_video_stream) {
- int enable = 0;
- mm_attrs_get_int_by_name(player->attrs, "enable_video_decoded_cb", &enable);
- if (enable)
- g_object_set(G_OBJECT(player->pipeline->videobin[MMPLAYER_V_SINK].gst), "signal-handoffs", TRUE, NULL);
-
- MMPLAYER_SIGNAL_CONNECT(player,
- G_OBJECT(player->pipeline->videobin[MMPLAYER_V_SINK].gst),
- MM_PLAYER_SIGNAL_TYPE_VIDEOBIN,
- "handoff",
- G_CALLBACK(__mmplayer_video_stream_decoded_render_cb),
- (gpointer)player);
-
- MMPLAYER_SIGNAL_CONNECT(player,
- G_OBJECT(player->pipeline->videobin[MMPLAYER_V_SINK].gst),
- MM_PLAYER_SIGNAL_TYPE_VIDEOBIN,
- "preroll-handoff",
- G_CALLBACK(__mmplayer_video_stream_decoded_preroll_cb),
- (gpointer)player);
- }
- break;
- }
- case MM_DISPLAY_SURFACE_REMOTE:
- {
- if (player->set_mode.media_packet_video_stream) {
- LOGE("add data probe at videosink");
- g_object_set(G_OBJECT(player->pipeline->videobin[MMPLAYER_V_SINK].gst),
- "sync", TRUE, "signal-handoffs", TRUE, NULL);
-
- MMPLAYER_SIGNAL_CONNECT(player,
- G_OBJECT(player->pipeline->videobin[MMPLAYER_V_SINK].gst),
- MM_PLAYER_SIGNAL_TYPE_VIDEOBIN,
- "handoff",
- G_CALLBACK(__mmplayer_video_stream_decoded_render_cb),
- (gpointer)player);
-
- MMPLAYER_SIGNAL_CONNECT(player,
- G_OBJECT(player->pipeline->videobin[MMPLAYER_V_SINK].gst),
- MM_PLAYER_SIGNAL_TYPE_VIDEOBIN,
- "preroll-handoff",
- G_CALLBACK(__mmplayer_video_stream_decoded_preroll_cb),
- (gpointer)player);
- if (attrs) {
- int gapless = 0;
-
- mm_attrs_get_int_by_name(attrs, "gapless_mode", &gapless);
-
- if (gapless > 0) {
- LOGD("disable last-sample");
- g_object_set(player->pipeline->videobin[MMPLAYER_V_SINK].gst, "enable-last-sample", FALSE, NULL);
- }
- }
- }
- break;
- }
- default:
- break;
- }
-
- if (_mmplayer_update_video_param(player, "update_all_param") != MM_ERROR_NONE)
- goto ERROR;
-
- if (videobin[MMPLAYER_V_SINK].gst) {
- GstPad *sink_pad = NULL;
- sink_pad = gst_element_get_static_pad(videobin[MMPLAYER_V_SINK].gst, "sink");
- if (sink_pad) {
- MMPLAYER_SIGNAL_CONNECT(player, sink_pad, MM_PLAYER_SIGNAL_TYPE_VIDEOBIN,
- "notify::caps", G_CALLBACK(__mmplayer_gst_caps_notify_cb), player);
- gst_object_unref(GST_OBJECT(sink_pad));
- } else
- LOGW("failed to get sink pad from videosink\n");
- }
-
- /* store it as it's sink element */
- __mmplayer_add_sink(player, videobin[MMPLAYER_V_SINK].gst);
-
- /* adding created elements to bin */
- if (!__mmplayer_gst_element_add_bucket_to_bin(GST_BIN(videobin[MMPLAYER_V_BIN].gst), element_bucket)) {
- LOGE("failed to add elements\n");
- goto ERROR;
- }
-
- /* Linking elements in the bucket by added order */
- if (__mmplayer_gst_element_link_bucket(element_bucket) == -1) {
- LOGE("failed to link elements\n");
- goto ERROR;
- }
-
- /* get first element's sinkpad for creating ghostpad */
- if (element_bucket)
- first_element = (MMPlayerGstElement *)element_bucket->data;
- if (!first_element) {
- LOGE("failed to get first element from bucket\n");
- goto ERROR;
- }
-
- pad = gst_element_get_static_pad(GST_ELEMENT(first_element->gst), "sink");
- if (!pad) {
- LOGE("failed to get pad from first element\n");
- goto ERROR;
- }
-
- /* create ghostpad */
- player->ghost_pad_for_videobin = gst_ghost_pad_new("sink", pad);
- if (FALSE == gst_element_add_pad(videobin[MMPLAYER_V_BIN].gst, player->ghost_pad_for_videobin)) {
- LOGE("failed to add ghostpad to videobin\n");
- goto ERROR;
- }
- gst_object_unref(pad);
-
- /* done. free allocated variables */
- if (element_bucket)
- g_list_free(element_bucket);
-
- MMPLAYER_FLEAVE();
-
- return MM_ERROR_NONE;
-
-ERROR:
- LOGE("ERROR : releasing videobin\n");
-
- g_list_free(element_bucket);
-
- if (pad)
- gst_object_unref(GST_OBJECT(pad));
-
- /* release videobin with it's childs */
- if (videobin[MMPLAYER_V_BIN].gst)
- gst_object_unref(GST_OBJECT(videobin[MMPLAYER_V_BIN].gst));
-
-
- MMPLAYER_FREEIF(videobin);
-
- player->pipeline->videobin = NULL;
-
- return MM_ERROR_PLAYER_INTERNAL;
-}
-
-static int __mmplayer_gst_create_plain_text_elements(mm_player_t* player)
-{
- GList *element_bucket = NULL;
- MMPlayerGstElement *textbin = player->pipeline->textbin;
-
- MMPLAYER_CREATE_ELEMENT(textbin, MMPLAYER_T_QUEUE, "queue", "text_queue", TRUE, player);
- MMPLAYER_CREATE_ELEMENT(textbin, MMPLAYER_T_IDENTITY, "identity", "text_identity", TRUE, player);
- g_object_set(G_OBJECT(textbin[MMPLAYER_T_IDENTITY].gst),
- "signal-handoffs", FALSE,
- NULL);
-
- MMPLAYER_CREATE_ELEMENT(textbin, MMPLAYER_T_FAKE_SINK, "fakesink", "text_fakesink", TRUE, player);
- MMPLAYER_SIGNAL_CONNECT(player,
- G_OBJECT(textbin[MMPLAYER_T_FAKE_SINK].gst),
- MM_PLAYER_SIGNAL_TYPE_TEXTBIN,
- "handoff",
- G_CALLBACK(__mmplayer_update_subtitle),
- (gpointer)player);
-
- g_object_set(G_OBJECT(textbin[MMPLAYER_T_FAKE_SINK].gst), "sync", TRUE, NULL);
- g_object_set(G_OBJECT(textbin[MMPLAYER_T_FAKE_SINK].gst), "signal-handoffs", TRUE, NULL);
-
- if (!player->play_subtitle) {
- LOGD("add textbin sink as sink element of whole pipeline.\n");
- __mmplayer_add_sink(player, GST_ELEMENT(textbin[MMPLAYER_T_FAKE_SINK].gst));
- }
-
- /* adding created elements to bin */
- LOGD("adding created elements to bin\n");
- if (!__mmplayer_gst_element_add_bucket_to_bin(GST_BIN(textbin[MMPLAYER_T_BIN].gst), element_bucket)) {
- LOGE("failed to add elements\n");
- goto ERROR;
- }
-
- /* unset sink flag from textbin. not to hold eos when video data is shorter than subtitle */
- GST_OBJECT_FLAG_UNSET(textbin[MMPLAYER_T_BIN].gst, GST_ELEMENT_FLAG_SINK);
- GST_OBJECT_FLAG_UNSET(textbin[MMPLAYER_T_FAKE_SINK].gst, GST_ELEMENT_FLAG_SINK);
-
- /* linking elements in the bucket by added order. */
- LOGD("Linking elements in the bucket by added order.\n");
- if (__mmplayer_gst_element_link_bucket(element_bucket) == -1) {
- LOGE("failed to link elements\n");
- goto ERROR;
- }
-
- /* done. free allocated variables */
- g_list_free(element_bucket);
-
- if (textbin[MMPLAYER_T_QUEUE].gst) {
- GstPad *pad = NULL;
- GstPad *ghostpad = NULL;
-
- pad = gst_element_get_static_pad(GST_ELEMENT(textbin[MMPLAYER_T_QUEUE].gst), "sink");
- if (!pad) {
- LOGE("failed to get sink pad of text queue");
- goto ERROR;
- }
-
- ghostpad = gst_ghost_pad_new("text_sink", pad);
- gst_object_unref(pad);
-
- if (!ghostpad) {
- LOGE("failed to create ghostpad of textbin\n");
- goto ERROR;
- }
-
- if (!gst_element_add_pad(textbin[MMPLAYER_T_BIN].gst, ghostpad)) {
- LOGE("failed to add ghostpad to textbin\n");
- gst_object_unref(ghostpad);
- goto ERROR;
- }
- }
-
- return MM_ERROR_NONE;
-
-ERROR:
- g_list_free(element_bucket);
-
- if (!player->play_subtitle && textbin[MMPLAYER_T_FAKE_SINK].gst) {
- LOGE("remove textbin sink from sink list");
- __mmplayer_del_sink(player, textbin[MMPLAYER_T_FAKE_SINK].gst);
- }
-
- /* release element at __mmplayer_gst_create_text_sink_bin */
- return MM_ERROR_PLAYER_INTERNAL;
-}
-
-static int __mmplayer_gst_create_text_sink_bin(mm_player_t* player)
-{
- MMPlayerGstElement *textbin = NULL;
- GList *element_bucket = NULL;
- int surface_type = 0;
- gint i = 0;
-
- MMPLAYER_FENTER();
-
- MMPLAYER_RETURN_VAL_IF_FAIL(player && player->pipeline, MM_ERROR_PLAYER_NOT_INITIALIZED);
-
- /* alloc handles */
- textbin = (MMPlayerGstElement*)g_malloc0(sizeof(MMPlayerGstElement) * MMPLAYER_T_NUM);
- if (!textbin) {
- LOGE("failed to allocate memory for textbin\n");
- return MM_ERROR_PLAYER_NO_FREE_SPACE;
- }
-
- /* create bin */
- textbin[MMPLAYER_T_BIN].id = MMPLAYER_T_BIN;
- textbin[MMPLAYER_T_BIN].gst = gst_bin_new("textbin");
- if (!textbin[MMPLAYER_T_BIN].gst) {
- LOGE("failed to create textbin\n");
- goto ERROR;
- }
-
- /* take it */
- player->pipeline->textbin = textbin;
-
- /* fakesink */
- mm_attrs_get_int_by_name(player->attrs, "display_surface_type", &surface_type);
- LOGD("surface type for subtitle : %d", surface_type);
- switch (surface_type) {
- case MM_DISPLAY_SURFACE_OVERLAY:
- case MM_DISPLAY_SURFACE_NULL:
- case MM_DISPLAY_SURFACE_REMOTE:
- if (__mmplayer_gst_create_plain_text_elements(player) != MM_ERROR_NONE) {
- LOGE("failed to make plain text elements\n");
- goto ERROR;
- }
- break;
- default:
- goto ERROR;
- break;
- }
-
- MMPLAYER_FLEAVE();
-
- return MM_ERROR_NONE;
-
-ERROR:
-
- LOGD("ERROR : releasing textbin\n");
-
- g_list_free(element_bucket);
-
- /* release signal */
- __mmplayer_release_signal_connection(player, MM_PLAYER_SIGNAL_TYPE_TEXTBIN);
-
- /* release element which are not added to bin */
- for (i = 1; i < MMPLAYER_T_NUM; i++) {
- /* NOTE : skip bin */
- if (textbin[i].gst) {
- GstObject* parent = NULL;
- parent = gst_element_get_parent(textbin[i].gst);
-
- if (!parent) {
- gst_object_unref(GST_OBJECT(textbin[i].gst));
- textbin[i].gst = NULL;
- } else {
- gst_object_unref(GST_OBJECT(parent));
- }
- }
- }
-
- /* release textbin with it's childs */
- if (textbin[MMPLAYER_T_BIN].gst)
- gst_object_unref(GST_OBJECT(textbin[MMPLAYER_T_BIN].gst));
-
- MMPLAYER_FREEIF(player->pipeline->textbin);
- player->pipeline->textbin = NULL;
-
- MMPLAYER_FLEAVE();
- return MM_ERROR_PLAYER_INTERNAL;
-}
-
-
-static int
-__mmplayer_gst_create_text_pipeline(mm_player_t* player)
-{
- MMPlayerGstElement* mainbin = NULL;
- MMPlayerGstElement* textbin = NULL;
- MMHandleType attrs = 0;
- GstElement *subsrc = NULL;
- GstElement *subparse = NULL;
- gchar *subtitle_uri = NULL;
- const gchar *charset = NULL;
- GstPad *pad = NULL;
-
- MMPLAYER_FENTER();
-
- /* get mainbin */
- MMPLAYER_RETURN_VAL_IF_FAIL(player &&
- player->pipeline &&
- player->pipeline->mainbin, MM_ERROR_PLAYER_NOT_INITIALIZED);
-
- mainbin = player->pipeline->mainbin;
-
- attrs = MMPLAYER_GET_ATTRS(player);
- if (!attrs) {
- LOGE("cannot get content attribute\n");
- return MM_ERROR_PLAYER_INTERNAL;
- }
-
- mm_attrs_get_string_by_name(attrs, "subtitle_uri", &subtitle_uri);
- if (!subtitle_uri || strlen(subtitle_uri) < 1) {
- LOGE("subtitle uri is not proper filepath.\n");
- return MM_ERROR_PLAYER_INVALID_URI;
- }
-
- if (!util_get_storage_info(subtitle_uri, &player->storage_info[MMPLAYER_PATH_TEXT])) {
- LOGE("failed to get storage info of subtitle path");
- return MM_ERROR_PLAYER_INVALID_URI;
- }
-
- SECURE_LOGD("subtitle file path is [%s].\n", subtitle_uri);
-
- MMPLAYER_SUBTITLE_INFO_LOCK(player);
- player->subtitle_language_list = NULL;
- MMPLAYER_SUBTITLE_INFO_UNLOCK(player);
-
- /* create the subtitle source */
- subsrc = gst_element_factory_make("filesrc", "subtitle_source");
- if (!subsrc) {
- LOGE("failed to create filesrc element\n");
- goto ERROR;
- }
- g_object_set(G_OBJECT(subsrc), "location", subtitle_uri, NULL);
+ structure = gst_caps_get_structure(caps, 0);
+ gst_structure_get_int(structure, "width", &(stream->width));
+ gst_structure_get_int(structure, "height", &(stream->height));
+ string_format = gst_structure_get_string(structure, "format");
+ if (string_format)
+ fourcc = _mmplayer_convert_fourcc_string_to_value(string_format);
+ stream->format = util_get_pixtype(fourcc);
+ gst_caps_unref(caps);
+ caps = NULL;
- mainbin[MMPLAYER_M_SUBSRC].id = MMPLAYER_M_SUBSRC;
- mainbin[MMPLAYER_M_SUBSRC].gst = subsrc;
+ __mmplayer_get_video_angle(player, NULL, &stream->orientation);
- if (!gst_bin_add(GST_BIN(mainbin[MMPLAYER_M_PIPE].gst), subsrc)) {
- LOGW("failed to add queue\n");
- gst_object_unref(mainbin[MMPLAYER_M_SUBSRC].gst);
- mainbin[MMPLAYER_M_SUBSRC].gst = NULL;
- mainbin[MMPLAYER_M_SUBSRC].id = MMPLAYER_M_NUM;
- goto ERROR;
- }
+ /*
+ LOGD("Call video steramCb, data[%p], Width[%d],Height[%d], Format[%d]",
+ GST_BUFFER_DATA(buffer), stream.width, stream.height, stream.format);
+ */
- /* subparse */
- subparse = gst_element_factory_make("subparse", "subtitle_parser");
- if (!subparse) {
- LOGE("failed to create subparse element\n");
+ if (stream->width == 0 || stream->height == 0 || stream->format == MM_PIXEL_FORMAT_INVALID) {
+ LOGE("Wrong condition!!");
goto ERROR;
}
- charset = util_get_charset(subtitle_uri);
- if (charset) {
- LOGD("detected charset is %s\n", charset);
- g_object_set(G_OBJECT(subparse), "subtitle-encoding", charset, NULL);
- }
-
- mainbin[MMPLAYER_M_SUBPARSE].id = MMPLAYER_M_SUBPARSE;
- mainbin[MMPLAYER_M_SUBPARSE].gst = subparse;
-
- if (!gst_bin_add(GST_BIN(mainbin[MMPLAYER_M_PIPE].gst), subparse)) {
- LOGW("failed to add subparse\n");
- gst_object_unref(mainbin[MMPLAYER_M_SUBPARSE].gst);
- mainbin[MMPLAYER_M_SUBPARSE].gst = NULL;
- mainbin[MMPLAYER_M_SUBPARSE].id = MMPLAYER_M_NUM;
- goto ERROR;
- }
+ /* set size and timestamp */
+ dataBlock = gst_buffer_peek_memory(buffer, 0);
+ stream->length_total = gst_memory_get_sizes(dataBlock, NULL, NULL);
+ stream->timestamp = (unsigned int)(GST_TIME_AS_MSECONDS(GST_BUFFER_PTS(buffer))); /* nano sec -> mili sec */
- if (!gst_element_link_pads(subsrc, "src", subparse, "sink")) {
- LOGW("failed to link subsrc and subparse\n");
- goto ERROR;
+ /* check zero-copy */
+ if (player->set_mode.video_zc &&
+ player->set_mode.media_packet_video_stream &&
+ gst_buffer_n_memory(buffer) > 1) {
+ metaBlock = gst_buffer_peek_memory(buffer, 1);
+ gst_memory_map(metaBlock, &mapinfo, GST_MAP_READ);
+ video_buffer = (MMVideoBuffer *)mapinfo.data;
}
- player->play_subtitle = TRUE;
- player->adjust_subtitle_pos = 0;
-
- LOGD("play subtitle using subtitle file\n");
+ if (video_buffer) { /* hw codec */
+ /* set tbm bo */
+ if (video_buffer->type == MM_VIDEO_BUFFER_TYPE_TBM_BO) {
+ int i = 0;
- if (player->pipeline->textbin == NULL) {
- if (MM_ERROR_NONE != __mmplayer_gst_create_text_sink_bin(player)) {
- LOGE("failed to create text sink bin. continuing without text\n");
+ /* copy pointer of tbm bo, stride, elevation */
+ while (i < MM_VIDEO_BUFFER_PLANE_MAX && video_buffer->handle.bo[i]) {
+ stream->bo[i] = tbm_bo_ref(video_buffer->handle.bo[i]);
+ i++;
+ }
+ } else {
+ LOGE("Not support video buffer format");
goto ERROR;
}
+ memcpy(stream->stride, video_buffer->stride_width,
+ sizeof(int) * MM_VIDEO_BUFFER_PLANE_MAX);
+ memcpy(stream->elevation, video_buffer->stride_height,
+ sizeof(int) * MM_VIDEO_BUFFER_PLANE_MAX);
- textbin = player->pipeline->textbin;
-
- if (!gst_bin_add(GST_BIN(mainbin[MMPLAYER_M_PIPE].gst), GST_ELEMENT(textbin[MMPLAYER_T_BIN].gst))) {
- LOGW("failed to add textbin\n");
-
- /* release signal */
- __mmplayer_release_signal_connection(player, MM_PLAYER_SIGNAL_TYPE_TEXTBIN);
+ /* will be released, by calling _mm_player_video_stream_internal_buffer_unref() */
+ stream->internal_buffer = gst_buffer_ref(buffer);
+ } else { /* sw codec */
+ int i = 0;
+ int j = 0;
+ int k = 0;
+ int ret = TBM_SURFACE_ERROR_NONE;
+ int src_stride[MM_PLAYER_IMGB_MPLANE_MAX] = { 0, };
+ int src_offset[MM_PLAYER_IMGB_MPLANE_MAX] = { 0, };
+ int size = 0;
+ unsigned char *src = NULL;
+ unsigned char *dest = NULL;
+ tbm_bo_handle thandle;
+ tbm_surface_h surface;
+ tbm_surface_info_s info;
+ gboolean gst_ret;
- /* release textbin with it's childs */
- gst_object_unref(GST_OBJECT(textbin[MMPLAYER_T_BIN].gst));
- MMPLAYER_FREEIF(player->pipeline->textbin);
- player->pipeline->textbin = textbin = NULL;
+ gst_ret = gst_memory_map(dataBlock, &mapinfo, GST_MAP_READWRITE);
+ if (!gst_ret) {
+ LOGE("fail to gst_memory_map");
goto ERROR;
}
- LOGD("link text input selector and textbin ghost pad");
-
- player->textsink_linked = 1;
- player->external_text_idx = 0;
- LOGI("player->textsink_linked set to 1\n");
- } else {
- textbin = player->pipeline->textbin;
- LOGD("text bin has been created. reuse it.");
- player->external_text_idx = 1;
- }
-
- if (!gst_element_link_pads(subparse, "src", textbin[MMPLAYER_T_BIN].gst, "text_sink")) {
- LOGW("failed to link subparse and textbin\n");
- goto ERROR;
- }
-
- pad = gst_element_get_static_pad(textbin[MMPLAYER_T_FAKE_SINK].gst, "sink");
- if (!pad) {
- LOGE("failed to get sink pad from textsink to probe data");
- goto ERROR;
- }
-
- gst_pad_add_probe(pad, GST_PAD_PROBE_TYPE_BUFFER,
- __mmplayer_subtitle_adjust_position_probe, player, NULL);
-
- gst_object_unref(pad);
- pad = NULL;
-
- /* create dot. for debugging */
- MMPLAYER_GENERATE_DOT_IF_ENABLED(player, "pipeline-with-subtitle");
- MMPLAYER_FLEAVE();
-
- return MM_ERROR_NONE;
-
-ERROR:
- /* release text pipeline resource */
- player->textsink_linked = 0;
-
- /* release signal */
- __mmplayer_release_signal_connection(player, MM_PLAYER_SIGNAL_TYPE_TEXTBIN);
-
- if (player->pipeline->textbin) {
- LOGE("remove textbin");
-
- /* release textbin with it's childs */
- MMPLAYER_RELEASE_ELEMENT(player, player->pipeline->textbin, MMPLAYER_T_BIN);
- MMPLAYER_FREEIF(player->pipeline->textbin);
- player->pipeline->textbin = NULL;
-
- }
-
- /* release subtitle elem */
- MMPLAYER_RELEASE_ELEMENT(player, player->pipeline->mainbin, MMPLAYER_M_SUBSRC);
- MMPLAYER_RELEASE_ELEMENT(player, player->pipeline->mainbin, MMPLAYER_M_SUBPARSE);
- return MM_ERROR_PLAYER_INTERNAL;
-}
+ if (stream->format == MM_PIXEL_FORMAT_I420) {
+ surface = tbm_surface_create(stream->width, stream->height, TBM_FORMAT_YUV420);
-gboolean
-__mmplayer_update_subtitle(GstElement* object, GstBuffer *buffer, GstPad *pad, gpointer data)
-{
- mm_player_t* player = (mm_player_t*) data;
- MMMessageParamType msg = {0, };
- GstClockTime duration = 0;
- gpointer text = NULL;
- guint text_size = 0;
- gboolean ret = TRUE;
- GstMapInfo mapinfo = GST_MAP_INFO_INIT;
+ ret = tbm_surface_get_info(surface, &info);
- MMPLAYER_FENTER();
+ if (ret != TBM_SURFACE_ERROR_NONE) {
+ tbm_surface_destroy(surface);
+ goto ERROR;
+ }
+ tbm_surface_destroy(surface);
- MMPLAYER_RETURN_VAL_IF_FAIL(player, FALSE);
- MMPLAYER_RETURN_VAL_IF_FAIL(buffer, FALSE);
+ src_stride[0] = GST_ROUND_UP_4(stream->width);
+ src_stride[1] = src_stride[2] = GST_ROUND_UP_4(stream->width>>1);
+ src_offset[1] = src_stride[0] * GST_ROUND_UP_2(stream->height);
+ src_offset[2] = src_offset[1] + (src_stride[1] * (GST_ROUND_UP_2(stream->height)>>1));
+ stream->stride[0] = info.planes[0].stride;
+ stream->elevation[0] = info.planes[0].size / info.planes[0].stride;
+ stream->stride[1] = info.planes[1].stride;
+ stream->elevation[1] = info.planes[1].size / info.planes[1].stride;
+ stream->stride[2] = info.planes[2].stride;
+ stream->elevation[2] = info.planes[2].size / info.planes[2].stride;
+ size = info.planes[0].size + info.planes[1].size + info.planes[2].size;
+ } else if (stream->format == MM_PIXEL_FORMAT_RGBA) {
+ stream->stride[0] = stream->width * 4;
+ stream->elevation[0] = stream->height;
+ size = stream->stride[0] * stream->height;
+ } else {
+ LOGE("Not support format %d", stream->format);
+ goto ERROR;
+ }
- if (player->is_subtitle_force_drop) {
- LOGW("subtitle is dropped forcedly.");
- return ret;
- }
+ stream->bo[0] = __mmplayer_video_stream_get_bo(player, size);
+ if (!stream->bo[0]) {
+ LOGE("Fail to tbm_bo_alloc!!");
+ goto ERROR;
+ }
- gst_buffer_map(buffer, &mapinfo, GST_MAP_READ);
- text = mapinfo.data;
- text_size = mapinfo.size;
- duration = GST_BUFFER_DURATION(buffer);
+ thandle = tbm_bo_map(stream->bo[0], TBM_DEVICE_CPU, TBM_OPTION_WRITE);
+ if (thandle.ptr && mapinfo.data) {
+ if (stream->format == MM_PIXEL_FORMAT_I420) {
+ for (i = 0; i < 3; i++) {
+ src = mapinfo.data + src_offset[i];
+ dest = thandle.ptr + info.planes[i].offset;
- if (player->set_mode.subtitle_off) {
- LOGD("subtitle is OFF.\n");
- return TRUE;
+ if (i > 0) k = 1;
+ for (j = 0; j < stream->height>>k; j++) {
+ memcpy(dest, src, stream->width>>k);
+ src += src_stride[i];
+ dest += stream->stride[i];
+ }
+ }
+ } else if (stream->format == MM_PIXEL_FORMAT_RGBA) {
+ memcpy(thandle.ptr, mapinfo.data, size);
+ } else {
+ LOGE("Not support format %d", stream->format);
+ goto ERROR;
+ }
+ } else {
+ LOGE("data pointer is wrong. dest : %p, src : %p",
+ thandle.ptr, mapinfo.data);
+ goto ERROR;
+ }
+ tbm_bo_unmap(stream->bo[0]);
}
- if (!text || (text_size == 0)) {
- LOGD("There is no subtitle to be displayed.\n");
- return TRUE;
+ if (player->video_stream_cb) { /* This has been already checked at the entry */
+ if (!player->video_stream_cb(stream, player->video_stream_cb_user_param)) {
+ LOGE("failed to send video stream data.");
+ goto ERROR;
+ }
}
- msg.data = (void *) text;
- msg.subtitle.duration = GST_TIME_AS_MSECONDS(duration);
+ if (metaBlock)
+ gst_memory_unmap(metaBlock, &mapinfo);
+ else
+ gst_memory_unmap(dataBlock, &mapinfo);
- LOGD("update subtitle : [%ld msec] %s\n'", msg.subtitle.duration, (char*)msg.data);
+ return;
- MMPLAYER_POST_MSG(player, MM_MESSAGE_UPDATE_SUBTITLE, &msg);
- gst_buffer_unmap(buffer, &mapinfo);
+ERROR:
+ LOGE("release video stream resource.");
+ if (metaBlock) {
+ int i = 0;
+ for (i = 0 ; i < MM_VIDEO_BUFFER_PLANE_MAX ; i++) {
+ if (stream->bo[i])
+ tbm_bo_unref(stream->bo[i]);
+ }
+ gst_memory_unmap(metaBlock, &mapinfo);
- MMPLAYER_FLEAVE();
+ /* unref gst buffer */
+ if (stream->internal_buffer)
+ gst_buffer_unref(stream->internal_buffer);
+ } else if (dataBlock) {
+ if (stream->bo[0])
+ _mmplayer_video_stream_release_bo(player, stream->bo[0]);
+ gst_memory_unmap(dataBlock, &mapinfo);
+ }
- return ret;
+ g_free(stream);
+ return;
}
-static GstPadProbeReturn
-__mmplayer_subtitle_adjust_position_probe(GstPad *pad, GstPadProbeInfo *info, gpointer u_data)
+static int
+__mmplayer_gst_create_video_filters(mm_player_t* player, GList** bucket)
{
- mm_player_t *player = (mm_player_t *) u_data;
- GstClockTime cur_timestamp = 0;
- gint64 adjusted_timestamp = 0;
- GstBuffer *buffer = gst_pad_probe_info_get_buffer(info);
-
- MMPLAYER_RETURN_VAL_IF_FAIL(player, FALSE);
-
- if (player->set_mode.subtitle_off) {
- LOGD("subtitle is OFF.\n");
- return TRUE;
- }
-
- if (player->adjust_subtitle_pos == 0) {
- LOGD("nothing to do");
- return TRUE;
- }
-
- cur_timestamp = GST_BUFFER_TIMESTAMP(buffer);
- adjusted_timestamp = (gint64) cur_timestamp +((gint64) player->adjust_subtitle_pos * G_GINT64_CONSTANT(1000000));
-
- if (adjusted_timestamp < 0) {
- LOGD("adjusted_timestamp under zero");
- MMPLAYER_FLEAVE();
- return FALSE;
- }
+ gchar* video_csc = "videoconvert"; /* default colorspace converter */
+ GList* element_bucket = NULL;
- GST_BUFFER_TIMESTAMP(buffer) = (GstClockTime) adjusted_timestamp;
- LOGD("buffer timestamp changed %" GST_TIME_FORMAT " -> %" GST_TIME_FORMAT "",
- GST_TIME_ARGS(cur_timestamp),
- GST_TIME_ARGS(GST_BUFFER_TIMESTAMP(buffer)));
+ MMPLAYER_RETURN_VAL_IF_FAIL(player && player->pipeline && player->pipeline->videobin, MM_ERROR_PLAYER_NOT_INITIALIZED);
- return GST_PAD_PROBE_OK;
-}
-static int __gst_adjust_subtitle_position(mm_player_t* player, int format, int position)
-{
MMPLAYER_FENTER();
- /* check player and subtitlebin are created */
- MMPLAYER_RETURN_VAL_IF_FAIL(player && player->pipeline, MM_ERROR_PLAYER_NOT_INITIALIZED);
- MMPLAYER_RETURN_VAL_IF_FAIL(player->play_subtitle, MM_ERROR_NOT_SUPPORT_API);
-
- if (position == 0) {
- LOGD("nothing to do\n");
- MMPLAYER_FLEAVE();
+ if (player->set_mode.video_zc || (player->is_360_feature_enabled && player->is_content_spherical)) {
+ LOGD("do not need to add video filters.");
return MM_ERROR_NONE;
}
- switch (format) {
- case MM_PLAYER_POS_FORMAT_TIME:
- {
- /* check current postion */
- player->adjust_subtitle_pos = position;
-
- LOGD("save adjust_subtitle_pos in player") ;
- }
- break;
+ /* in case of sw codec except 360 playback,
+ * if libav video decoder is selected, videoconvert is required to render the shm wl-buffer which support RGB only via tizenwlsink. */
+ MMPLAYER_CREATE_ELEMENT(player->pipeline->videobin, MMPLAYER_V_CONV, video_csc, "video converter", TRUE, player);
+ LOGD("using video converter: %s", video_csc);
- default:
- {
- LOGW("invalid format.\n");
- MMPLAYER_FLEAVE();
- return MM_ERROR_INVALID_ARGUMENT;
- }
- }
+ /* set video rotator */
+ MMPLAYER_CREATE_ELEMENT(player->pipeline->videobin, MMPLAYER_V_FLIP, "videoflip", "video rotator", TRUE, player);
+ *bucket = element_bucket;
MMPLAYER_FLEAVE();
-
return MM_ERROR_NONE;
+
+ERROR: /* refer MMPLAYER_CREATE_ELEMENT */
+ g_list_free(element_bucket);
+
+ *bucket = NULL;
+ MMPLAYER_FLEAVE();
+ return MM_ERROR_PLAYER_INTERNAL;
}
-static void
-__gst_appsrc_feed_data_mem(GstElement *element, guint size, gpointer user_data)
+/**
+ * This function is to create video pipeline.
+ *
+ * @param player [in] handle of player
+ * caps [in] src caps of decoder
+ * surface_type [in] surface type for video rendering
+ *
+ * @return This function returns zero on success.
+ * @remark
+ * @see __mmplayer_gst_create_audio_pipeline, __mmplayer_gst_create_midi_pipeline
+ */
+/**
+ * VIDEO PIPELINE
+ * - video overlay surface(arm/x86) : tizenwlsink
+ */
+static int
+__mmplayer_gst_create_video_pipeline(mm_player_t* player, GstCaps* caps, MMDisplaySurfaceType surface_type)
{
- GstElement *appsrc = element;
- MMPlayerInputBuffer *buf = (MMPlayerInputBuffer *)user_data;
- GstBuffer *buffer = NULL;
- GstFlowReturn ret = GST_FLOW_OK;
- gint len = size;
+ GstPad *pad = NULL;
+ MMHandleType attrs;
+ GList*element_bucket = NULL;
+ MMPlayerGstElement* first_element = NULL;
+ MMPlayerGstElement* videobin = NULL;
+ gchar *videosink_element = NULL;
- MMPLAYER_RETURN_IF_FAIL(element);
- MMPLAYER_RETURN_IF_FAIL(buf);
+ MMPLAYER_FENTER();
- buffer = gst_buffer_new();
+ MMPLAYER_RETURN_VAL_IF_FAIL(player && player->pipeline, MM_ERROR_PLAYER_NOT_INITIALIZED);
- if (buf->offset >= buf->len) {
- LOGD("call eos appsrc\n");
- g_signal_emit_by_name(appsrc, "end-of-stream", &ret);
- return;
+ /* alloc handles */
+ videobin = (MMPlayerGstElement*)g_malloc0(sizeof(MMPlayerGstElement) * MMPLAYER_V_NUM);
+ if (!videobin)
+ return MM_ERROR_PLAYER_NO_FREE_SPACE;
+
+ player->pipeline->videobin = videobin;
+
+ attrs = MMPLAYER_GET_ATTRS(player);
+ if (!attrs) {
+ LOGE("cannot get content attribute");
+ return MM_ERROR_PLAYER_INTERNAL;
}
- if (buf->len - buf->offset < size)
- len = buf->len - buf->offset;
+ /* create bin */
+ videobin[MMPLAYER_V_BIN].id = MMPLAYER_V_BIN;
+ videobin[MMPLAYER_V_BIN].gst = gst_bin_new("videobin");
+ if (!videobin[MMPLAYER_V_BIN].gst) {
+ LOGE("failed to create videobin");
+ goto ERROR;
+ }
- gst_buffer_insert_memory(buffer, -1, gst_memory_new_wrapped(0, (guint8 *)(buf->buf + buf->offset), len, 0, len, NULL, NULL));
- GST_BUFFER_OFFSET(buffer) = (guint64)buf->offset;
- GST_BUFFER_OFFSET_END(buffer) = (guint64)(buf->offset + len);
+ int enable_video_decoded_cb = 0;
+ mm_attrs_get_int_by_name(player->attrs, "enable_video_decoded_cb", &enable_video_decoded_cb);
- //LOGD("feed buffer %p, offset %u-%u length %u", buffer, buf->offset, (buf->offset+len), len);
- g_signal_emit_by_name(appsrc, "push-buffer", buffer, &ret);
+ if (player->is_360_feature_enabled && player->is_content_spherical) {
+ LOGD("video360 elem will be added.");
- buf->offset += len;
-}
+ MMPLAYER_CREATE_ELEMENT(videobin, MMPLAYER_V_360, "video360",
+ "video-360", TRUE, player);
-static gboolean
-__gst_appsrc_seek_data_mem(GstElement *element, guint64 size, gpointer user_data)
-{
- MMPlayerInputBuffer *buf = (MMPlayerInputBuffer *)user_data;
+ /* Set spatial media metadata and/or user settings to the element.
+ * */
+ g_object_set(G_OBJECT(videobin[MMPLAYER_V_360].gst),
+ "projection-type", player->video360_metadata.projection_type, NULL);
- MMPLAYER_RETURN_VAL_IF_FAIL(buf, FALSE);
+ g_object_set(G_OBJECT(videobin[MMPLAYER_V_360].gst),
+ "stereo-mode", player->video360_metadata.stereo_mode, NULL);
- buf->offset = (int)size;
+ if (player->video360_metadata.full_pano_width_pixels &&
+ player->video360_metadata.full_pano_height_pixels &&
+ player->video360_metadata.cropped_area_image_width &&
+ player->video360_metadata.cropped_area_image_height) {
+ g_object_set(G_OBJECT(videobin[MMPLAYER_V_360].gst),
+ "projection-bounds-top", player->video360_metadata.cropped_area_top,
+ "projection-bounds-bottom", player->video360_metadata.full_pano_height_pixels -
+ player->video360_metadata.cropped_area_top - player->video360_metadata.cropped_area_image_height,
+ "projection-bounds-left", player->video360_metadata.cropped_area_left,
+ "projection-bounds-right", player->video360_metadata.full_pano_width_pixels -
+ player->video360_metadata.cropped_area_left - player->video360_metadata.cropped_area_image_width,
+ NULL);
+ }
- return TRUE;
-}
+ if (player->video360_horizontal_fov && player->video360_vertical_fov) {
+ g_object_set(G_OBJECT(videobin[MMPLAYER_V_360].gst),
+ "horizontal-fov", player->video360_horizontal_fov,
+ "vertical-fov", player->video360_vertical_fov, NULL);
+ }
-static GstBusSyncReply
-__mmplayer_bus_sync_callback(GstBus * bus, GstMessage * message, gpointer data)
-{
- mm_player_t *player = (mm_player_t *)data;
- GstBusSyncReply reply = GST_BUS_DROP;
+ if (player->video360_zoom <= VIDEO360_MAX_ZOOM && player->video360_zoom > 1.0f) {
+ g_object_set(G_OBJECT(videobin[MMPLAYER_V_360].gst),
+ "zoom", 1.0f / player->video360_zoom, NULL);
+ }
- if (!(player->pipeline && player->pipeline->mainbin)) {
- LOGE("player pipeline handle is null");
- return GST_BUS_PASS;
- }
+ if (player->video360_yaw_radians <= M_PI &&
+ player->video360_yaw_radians >= -M_PI &&
+ player->video360_pitch_radians <= M_PI_2 &&
+ player->video360_pitch_radians >= -M_PI_2) {
+ g_object_set(G_OBJECT(videobin[MMPLAYER_V_360].gst),
+ "pose-yaw", (int) (player->video360_yaw_radians * 180.0 / M_PI),
+ "pose-pitch", (int) (player->video360_pitch_radians * 180.0 / M_PI), NULL);
+ } else if (player->video360_metadata.init_view_heading || player->video360_metadata.init_view_pitch) {
+ g_object_set(G_OBJECT(videobin[MMPLAYER_V_360].gst),
+ "pose-yaw", player->video360_metadata.init_view_heading,
+ "pose-pitch", player->video360_metadata.init_view_pitch, NULL);
+ }
- if (!__mmplayer_check_useful_message(player, message)) {
- gst_message_unref(message);
- return GST_BUS_DROP;
+ g_object_set(G_OBJECT(videobin[MMPLAYER_V_360].gst),
+ "passthrough", !player->is_video360_enabled, NULL);
}
- switch (GST_MESSAGE_TYPE(message)) {
- case GST_MESSAGE_STATE_CHANGED:
- /* post directly for fast launch */
- if (player->sync_handler) {
- __mmplayer_gst_callback(message, player);
- reply = GST_BUS_DROP;
- } else
- reply = GST_BUS_PASS;
+ /* set video sink */
+ switch (surface_type) {
+ case MM_DISPLAY_SURFACE_OVERLAY:
+ if (__mmplayer_gst_create_video_filters(player, &element_bucket) != MM_ERROR_NONE)
+ goto ERROR;
+ if (strlen(player->ini.videosink_element_overlay) > 0)
+ videosink_element = player->ini.videosink_element_overlay;
+ else
+ goto ERROR;
break;
- case GST_MESSAGE_TAG:
- __mmplayer_gst_extract_tag_from_msg(player, message);
-
- #if 0 // debug
- {
- GstTagList *tags = NULL;
-
- gst_message_parse_tag(message, &tags);
- if (tags) {
- LOGE("TAGS received from element \"%s\".\n",
- GST_STR_NULL(GST_ELEMENT_NAME(GST_MESSAGE_SRC(message))));
-
- gst_tag_list_foreach(tags, print_tag, NULL);
- gst_tag_list_free(tags);
- tags = NULL;
- }
- break;
- }
- #endif
+ case MM_DISPLAY_SURFACE_NULL:
+ if (strlen(player->ini.videosink_element_fake) > 0)
+ videosink_element = player->ini.videosink_element_fake;
+ else
+ goto ERROR;
break;
-
- case GST_MESSAGE_DURATION_CHANGED:
- __mmplayer_gst_handle_duration(player, message);
+ case MM_DISPLAY_SURFACE_REMOTE:
+ if (strlen(player->ini.videosink_element_fake) > 0)
+ videosink_element = player->ini.videosink_element_fake;
+ else
+ goto ERROR;
break;
- case GST_MESSAGE_ASYNC_DONE:
- /* NOTE:Don't call gst_callback directly
- * because previous frame can be showed even though this message is received for seek.
- */
default:
- reply = GST_BUS_PASS;
- break;
+ LOGE("unidentified surface type");
+ goto ERROR;
}
+ LOGD("surface_type %d, selected videosink name: %s", surface_type, videosink_element);
- if (reply == GST_BUS_DROP)
- gst_message_unref(message);
-
- return reply;
-}
+ MMPLAYER_CREATE_ELEMENT(videobin, MMPLAYER_V_SINK, videosink_element, "videosink", TRUE, player);
-static gboolean
-__mmplayer_gst_create_decoder(mm_player_t *player,
- MMPlayerTrackType track,
- GstPad* srcpad,
- enum MainElementID elemId,
- const gchar* name)
-{
- gboolean ret = TRUE;
- GstPad *sinkpad = NULL;
+ /* additional setting for sink plug-in */
+ switch (surface_type) {
+ case MM_DISPLAY_SURFACE_OVERLAY:
+ {
+ bool use_tbm = (player->set_mode.video_zc || (player->is_360_feature_enabled && player->is_content_spherical));
+ if (!use_tbm) {
+ LOGD("selected videosink name: %s", videosink_element);
- MMPLAYER_FENTER();
+ /* support shard memory with S/W codec on HawkP */
+ if (strncmp(videosink_element, "tizenwlsink", strlen(videosink_element)) == 0) {
+ g_object_set(player->pipeline->videobin[MMPLAYER_V_SINK].gst,
+ "use-tbm", use_tbm, NULL);
+ }
+ } else {
+ if (attrs) {
+ int gapless = 0;
- MMPLAYER_RETURN_VAL_IF_FAIL(player &&
- player->pipeline &&
- player->pipeline->mainbin, FALSE);
- MMPLAYER_RETURN_VAL_IF_FAIL((track == MM_PLAYER_TRACK_TYPE_AUDIO || track == MM_PLAYER_TRACK_TYPE_VIDEO), FALSE);
- MMPLAYER_RETURN_VAL_IF_FAIL(srcpad, FALSE);
- MMPLAYER_RETURN_VAL_IF_FAIL((player->pipeline->mainbin[elemId].gst == NULL), FALSE);
+ mm_attrs_get_int_by_name(attrs, "gapless_mode", &gapless);
- GstElement *decodebin = NULL;
- GstCaps *dec_caps = NULL;
+ if (gapless > 0) {
+ LOGD("disable last-sample");
+ g_object_set(player->pipeline->videobin[MMPLAYER_V_SINK].gst, "enable-last-sample", FALSE, NULL);
+ }
+ }
+ }
+ if (player->set_mode.media_packet_video_stream) {
+ int enable = 0;
+ mm_attrs_get_int_by_name(player->attrs, "enable_video_decoded_cb", &enable);
+ if (enable)
+ g_object_set(G_OBJECT(player->pipeline->videobin[MMPLAYER_V_SINK].gst), "signal-handoffs", TRUE, NULL);
- /* create decodebin */
- decodebin = gst_element_factory_make("decodebin", name);
+ MMPLAYER_SIGNAL_CONNECT(player,
+ G_OBJECT(player->pipeline->videobin[MMPLAYER_V_SINK].gst),
+ MM_PLAYER_SIGNAL_TYPE_VIDEOBIN,
+ "handoff",
+ G_CALLBACK(__mmplayer_video_stream_decoded_render_cb),
+ (gpointer)player);
- if (!decodebin) {
- LOGE("error : fail to create decodebin for %d decoder\n", track);
- ret = FALSE;
- goto ERROR;
+ MMPLAYER_SIGNAL_CONNECT(player,
+ G_OBJECT(player->pipeline->videobin[MMPLAYER_V_SINK].gst),
+ MM_PLAYER_SIGNAL_TYPE_VIDEOBIN,
+ "preroll-handoff",
+ G_CALLBACK(__mmplayer_video_stream_decoded_preroll_cb),
+ (gpointer)player);
+ }
+ break;
}
+ case MM_DISPLAY_SURFACE_REMOTE:
+ {
+ if (player->set_mode.media_packet_video_stream) {
+ LOGE("add data probe at videosink");
+ g_object_set(G_OBJECT(player->pipeline->videobin[MMPLAYER_V_SINK].gst),
+ "sync", TRUE, "signal-handoffs", TRUE, NULL);
- /* raw pad handling signal */
- MMPLAYER_SIGNAL_CONNECT(player, G_OBJECT(decodebin), MM_PLAYER_SIGNAL_TYPE_AUTOPLUG, "pad-added",
- G_CALLBACK(__mmplayer_gst_decode_pad_added), player);
+ MMPLAYER_SIGNAL_CONNECT(player,
+ G_OBJECT(player->pipeline->videobin[MMPLAYER_V_SINK].gst),
+ MM_PLAYER_SIGNAL_TYPE_VIDEOBIN,
+ "handoff",
+ G_CALLBACK(__mmplayer_video_stream_decoded_render_cb),
+ (gpointer)player);
- /* This signal is emitted whenever decodebin finds a new stream. It is emitted
- before looking for any elements that can handle that stream.*/
- MMPLAYER_SIGNAL_CONNECT(player, G_OBJECT(decodebin), MM_PLAYER_SIGNAL_TYPE_AUTOPLUG, "autoplug-select",
- G_CALLBACK(__mmplayer_gst_decode_autoplug_select), player);
+ MMPLAYER_SIGNAL_CONNECT(player,
+ G_OBJECT(player->pipeline->videobin[MMPLAYER_V_SINK].gst),
+ MM_PLAYER_SIGNAL_TYPE_VIDEOBIN,
+ "preroll-handoff",
+ G_CALLBACK(__mmplayer_video_stream_decoded_preroll_cb),
+ (gpointer)player);
+ if (attrs) {
+ int gapless = 0;
- /* This signal is emitted when a element is added to the bin.*/
- MMPLAYER_SIGNAL_CONNECT(player, G_OBJECT(decodebin), MM_PLAYER_SIGNAL_TYPE_AUTOPLUG, "element-added",
- G_CALLBACK(__mmplayer_gst_element_added), player);
+ mm_attrs_get_int_by_name(attrs, "gapless_mode", &gapless);
- if (!gst_bin_add(GST_BIN(player->pipeline->mainbin[MMPLAYER_M_PIPE].gst), decodebin)) {
- LOGE("failed to add new decodebin\n");
- ret = FALSE;
- goto ERROR;
+ if (gapless > 0) {
+ LOGD("disable last-sample");
+ g_object_set(player->pipeline->videobin[MMPLAYER_V_SINK].gst, "enable-last-sample", FALSE, NULL);
+ }
+ }
+ }
+ break;
}
-
- dec_caps = gst_pad_query_caps(srcpad, NULL);
- if (dec_caps) {
- //LOGD("got pad %s:%s , dec_caps %" GST_PTR_FORMAT, GST_DEBUG_PAD_NAME(srcpad), dec_caps);
- g_object_set(G_OBJECT(decodebin), "sink-caps", dec_caps, NULL);
- gst_caps_unref(dec_caps);
+ default:
+ break;
}
- player->pipeline->mainbin[elemId].id = elemId;
- player->pipeline->mainbin[elemId].gst = decodebin;
-
- sinkpad = gst_element_get_static_pad(decodebin, "sink");
+ if (_mmplayer_update_video_param(player, "update_all_param") != MM_ERROR_NONE)
+ goto ERROR;
- if (GST_PAD_LINK_OK != gst_pad_link(srcpad, sinkpad)) {
- LOGW("failed to link [%s:%s] to decoder\n", GST_DEBUG_PAD_NAME(srcpad));
- gst_object_unref(GST_OBJECT(decodebin));
+ if (videobin[MMPLAYER_V_SINK].gst) {
+ GstPad *sink_pad = NULL;
+ sink_pad = gst_element_get_static_pad(videobin[MMPLAYER_V_SINK].gst, "sink");
+ if (sink_pad) {
+ MMPLAYER_SIGNAL_CONNECT(player, sink_pad, MM_PLAYER_SIGNAL_TYPE_VIDEOBIN,
+ "notify::caps", G_CALLBACK(__mmplayer_gst_caps_notify_cb), player);
+ gst_object_unref(GST_OBJECT(sink_pad));
+ } else
+ LOGW("failed to get sink pad from videosink\n");
}
- if (GST_STATE_CHANGE_FAILURE == gst_element_sync_state_with_parent(decodebin))
- LOGE("failed to sync second level decodebin state with parent\n");
-
- LOGD("Total num of %d tracks = %d \n", track, player->selector[track].total_track_num);
+ /* store it as it's sink element */
+ __mmplayer_add_sink(player, videobin[MMPLAYER_V_SINK].gst);
-ERROR:
- if (sinkpad) {
- gst_object_unref(GST_OBJECT(sinkpad));
- sinkpad = NULL;
+ /* adding created elements to bin */
+ if (!__mmplayer_gst_element_add_bucket_to_bin(GST_BIN(videobin[MMPLAYER_V_BIN].gst), element_bucket)) {
+ LOGE("failed to add elements\n");
+ goto ERROR;
}
- MMPLAYER_FLEAVE();
- return ret;
-}
-
-/**
- * This function is to create audio or video pipeline for playing.
- *
- * @param player [in] handle of player
- *
- * @return This function returns zero on success.
- * @remark
- * @see
- */
-static int
-__mmplayer_gst_create_pipeline(mm_player_t* player)
-{
- GstBus *bus = NULL;
- MMPlayerGstElement *mainbin = NULL;
- MMHandleType attrs = 0;
- GstElement* element = NULL;
- GstElement* elem_src_audio = NULL;
- GstElement* elem_src_subtitle = NULL;
- GstElement* es_video_queue = NULL;
- GstElement* es_audio_queue = NULL;
- GstElement* es_subtitle_queue = NULL;
- GList* element_bucket = NULL;
- gboolean need_state_holder = TRUE;
- gint i = 0;
-#ifdef SW_CODEC_ONLY
- int surface_type = 0;
-#endif
- MMPLAYER_FENTER();
+ /* Linking elements in the bucket by added order */
+ if (__mmplayer_gst_element_link_bucket(element_bucket) == -1) {
+ LOGE("failed to link elements\n");
+ goto ERROR;
+ }
- MMPLAYER_RETURN_VAL_IF_FAIL(player, MM_ERROR_PLAYER_NOT_INITIALIZED);
+ /* get first element's sinkpad for creating ghostpad */
+ if (element_bucket)
+ first_element = (MMPlayerGstElement *)element_bucket->data;
+ if (!first_element) {
+ LOGE("failed to get first element from bucket\n");
+ goto ERROR;
+ }
- /* get profile attribute */
- attrs = MMPLAYER_GET_ATTRS(player);
- if (!attrs) {
- LOGE("cannot get content attribute\n");
- goto INIT_ERROR;
+ pad = gst_element_get_static_pad(GST_ELEMENT(first_element->gst), "sink");
+ if (!pad) {
+ LOGE("failed to get pad from first element\n");
+ goto ERROR;
}
- /* create pipeline handles */
- if (player->pipeline) {
- LOGW("pipeline should be released before create new one\n");
- goto INIT_ERROR;
+ /* create ghostpad */
+ player->ghost_pad_for_videobin = gst_ghost_pad_new("sink", pad);
+ if (FALSE == gst_element_add_pad(videobin[MMPLAYER_V_BIN].gst, player->ghost_pad_for_videobin)) {
+ LOGE("failed to add ghostpad to videobin\n");
+ goto ERROR;
}
+ gst_object_unref(pad);
- player->video360_metadata.is_spherical = -1;
- player->is_openal_plugin_used = FALSE;
+ /* done. free allocated variables */
+ if (element_bucket)
+ g_list_free(element_bucket);
- player->pipeline = (MMPlayerGstPipelineInfo*) g_malloc0(sizeof(MMPlayerGstPipelineInfo));
- if (player->pipeline == NULL)
- goto INIT_ERROR;
+ MMPLAYER_FLEAVE();
- memset(player->pipeline, 0, sizeof(MMPlayerGstPipelineInfo)); /* g_malloc0 did this job already */
+ return MM_ERROR_NONE;
- /* create mainbin */
- mainbin = (MMPlayerGstElement*) g_malloc0(sizeof(MMPlayerGstElement) * MMPLAYER_M_NUM);
- if (mainbin == NULL)
- goto INIT_ERROR;
+ERROR:
+ LOGE("ERROR : releasing videobin\n");
- memset(mainbin, 0, sizeof(MMPlayerGstElement) * MMPLAYER_M_NUM); /* g_malloc0 did this job already */
+ g_list_free(element_bucket);
- /* create pipeline */
- mainbin[MMPLAYER_M_PIPE].id = MMPLAYER_M_PIPE;
- mainbin[MMPLAYER_M_PIPE].gst = gst_pipeline_new("player");
- if (!mainbin[MMPLAYER_M_PIPE].gst) {
- LOGE("failed to create pipeline\n");
- goto INIT_ERROR;
- }
- player->demux_pad_index = 0;
- player->subtitle_language_list = NULL;
+ if (pad)
+ gst_object_unref(GST_OBJECT(pad));
- player->is_subtitle_force_drop = FALSE;
- player->last_multiwin_status = FALSE;
+ /* release videobin with it's childs */
+ if (videobin[MMPLAYER_V_BIN].gst)
+ gst_object_unref(GST_OBJECT(videobin[MMPLAYER_V_BIN].gst));
- _mmplayer_track_initialize(player);
- __mmplayer_initialize_storage_info(player, MMPLAYER_PATH_MAX);
- /* create source element */
- switch (player->profile.uri_type) {
- /* rtsp streamming */
- case MM_PLAYER_URI_TYPE_URL_RTSP:
- {
- gchar *user_agent;
+ MMPLAYER_FREEIF(videobin);
- element = gst_element_factory_make("rtspsrc", "rtsp source");
+ player->pipeline->videobin = NULL;
- if (!element) {
- LOGE("failed to create streaming source element\n");
- break;
- }
+ return MM_ERROR_PLAYER_INTERNAL;
+}
- /* make it zero */
- user_agent = NULL;
+static int __mmplayer_gst_create_plain_text_elements(mm_player_t* player)
+{
+ GList *element_bucket = NULL;
+ MMPlayerGstElement *textbin = player->pipeline->textbin;
- /* get attribute */
- mm_attrs_get_string_by_name(attrs, "streaming_user_agent", &user_agent);
+ MMPLAYER_CREATE_ELEMENT(textbin, MMPLAYER_T_QUEUE, "queue", "text_queue", TRUE, player);
+ MMPLAYER_CREATE_ELEMENT(textbin, MMPLAYER_T_IDENTITY, "identity", "text_identity", TRUE, player);
+ g_object_set(G_OBJECT(textbin[MMPLAYER_T_IDENTITY].gst),
+ "signal-handoffs", FALSE,
+ NULL);
- SECURE_LOGD("user_agent : %s\n", user_agent);
+ MMPLAYER_CREATE_ELEMENT(textbin, MMPLAYER_T_FAKE_SINK, "fakesink", "text_fakesink", TRUE, player);
+ MMPLAYER_SIGNAL_CONNECT(player,
+ G_OBJECT(textbin[MMPLAYER_T_FAKE_SINK].gst),
+ MM_PLAYER_SIGNAL_TYPE_TEXTBIN,
+ "handoff",
+ G_CALLBACK(__mmplayer_update_subtitle),
+ (gpointer)player);
- /* setting property to streaming source */
- g_object_set(G_OBJECT(element), "location", player->profile.uri, NULL);
- if (user_agent)
- g_object_set(G_OBJECT(element), "user-agent", user_agent, NULL);
+ g_object_set(G_OBJECT(textbin[MMPLAYER_T_FAKE_SINK].gst), "sync", TRUE, NULL);
+ g_object_set(G_OBJECT(textbin[MMPLAYER_T_FAKE_SINK].gst), "signal-handoffs", TRUE, NULL);
- MMPLAYER_SIGNAL_CONNECT(player, G_OBJECT(element), MM_PLAYER_SIGNAL_TYPE_AUTOPLUG, "pad-added",
- G_CALLBACK(__mmplayer_gst_rtp_dynamic_pad), player);
- MMPLAYER_SIGNAL_CONNECT(player, G_OBJECT(element), MM_PLAYER_SIGNAL_TYPE_AUTOPLUG, "no-more-pads",
- G_CALLBACK(__mmplayer_gst_rtp_no_more_pads), player);
+ if (!player->play_subtitle) {
+ LOGD("add textbin sink as sink element of whole pipeline.\n");
+ __mmplayer_add_sink(player, GST_ELEMENT(textbin[MMPLAYER_T_FAKE_SINK].gst));
+ }
+
+ /* adding created elements to bin */
+ LOGD("adding created elements to bin\n");
+ if (!__mmplayer_gst_element_add_bucket_to_bin(GST_BIN(textbin[MMPLAYER_T_BIN].gst), element_bucket)) {
+ LOGE("failed to add elements\n");
+ goto ERROR;
+ }
+
+ /* unset sink flag from textbin. not to hold eos when video data is shorter than subtitle */
+ GST_OBJECT_FLAG_UNSET(textbin[MMPLAYER_T_BIN].gst, GST_ELEMENT_FLAG_SINK);
+ GST_OBJECT_FLAG_UNSET(textbin[MMPLAYER_T_FAKE_SINK].gst, GST_ELEMENT_FLAG_SINK);
+
+ /* linking elements in the bucket by added order. */
+ LOGD("Linking elements in the bucket by added order.\n");
+ if (__mmplayer_gst_element_link_bucket(element_bucket) == -1) {
+ LOGE("failed to link elements\n");
+ goto ERROR;
+ }
+
+ /* done. free allocated variables */
+ g_list_free(element_bucket);
+
+ if (textbin[MMPLAYER_T_QUEUE].gst) {
+ GstPad *pad = NULL;
+ GstPad *ghostpad = NULL;
+
+ pad = gst_element_get_static_pad(GST_ELEMENT(textbin[MMPLAYER_T_QUEUE].gst), "sink");
+ if (!pad) {
+ LOGE("failed to get sink pad of text queue");
+ goto ERROR;
}
- break;
- /* http streaming*/
- case MM_PLAYER_URI_TYPE_URL_HTTP:
- {
- gchar *user_agent, *cookies, **cookie_list;
- gint http_timeout = DEFAULT_HTTP_TIMEOUT;
- user_agent = cookies = NULL;
- cookie_list = NULL;
- gint mode = MM_PLAYER_PD_MODE_NONE;
+ ghostpad = gst_ghost_pad_new("text_sink", pad);
+ gst_object_unref(pad);
- mm_attrs_get_int_by_name(attrs, "pd_mode", &mode);
+ if (!ghostpad) {
+ LOGE("failed to create ghostpad of textbin\n");
+ goto ERROR;
+ }
- player->pd_mode = mode;
+ if (!gst_element_add_pad(textbin[MMPLAYER_T_BIN].gst, ghostpad)) {
+ LOGE("failed to add ghostpad to textbin\n");
+ gst_object_unref(ghostpad);
+ goto ERROR;
+ }
+ }
- LOGD("http playback, PD mode : %d\n", player->pd_mode);
+ return MM_ERROR_NONE;
- if (!MMPLAYER_IS_HTTP_PD(player)) {
- element = gst_element_factory_make(player->ini.httpsrc_element, "http_streaming_source");
- if (!element) {
- LOGE("failed to create http streaming source element[%s].\n", player->ini.httpsrc_element);
- break;
- }
- LOGD("using http streamming source [%s].\n", player->ini.httpsrc_element);
+ERROR:
+ g_list_free(element_bucket);
- /* get attribute */
- mm_attrs_get_string_by_name(attrs, "streaming_cookie", &cookies);
- mm_attrs_get_string_by_name(attrs, "streaming_user_agent", &user_agent);
+ if (!player->play_subtitle && textbin[MMPLAYER_T_FAKE_SINK].gst) {
+ LOGE("remove textbin sink from sink list");
+ __mmplayer_del_sink(player, textbin[MMPLAYER_T_FAKE_SINK].gst);
+ }
- if (player->ini.http_timeout != DEFAULT_HTTP_TIMEOUT) {
- LOGD("get timeout from ini\n");
- http_timeout = player->ini.http_timeout;
- }
+ /* release element at __mmplayer_gst_create_text_sink_bin */
+ return MM_ERROR_PLAYER_INTERNAL;
+}
- /* get attribute */
- SECURE_LOGD("location : %s\n", player->profile.uri);
- SECURE_LOGD("cookies : %s\n", cookies);
- SECURE_LOGD("user_agent : %s\n", user_agent);
- LOGD("timeout : %d\n", http_timeout);
+static int __mmplayer_gst_create_text_sink_bin(mm_player_t* player)
+{
+ MMPlayerGstElement *textbin = NULL;
+ GList *element_bucket = NULL;
+ int surface_type = 0;
+ gint i = 0;
- /* setting property to streaming source */
- g_object_set(G_OBJECT(element), "location", player->profile.uri, NULL);
- g_object_set(G_OBJECT(element), "timeout", http_timeout, NULL);
- g_object_set(G_OBJECT(element), "blocksize", (unsigned long)(64*1024), NULL);
+ MMPLAYER_FENTER();
- /* parsing cookies */
- if ((cookie_list = util_get_cookie_list((const char*)cookies))) {
- g_object_set(G_OBJECT(element), "cookies", cookie_list, NULL);
- g_strfreev(cookie_list);
- }
- if (user_agent)
- g_object_set(G_OBJECT(element), "user-agent", user_agent, NULL);
+ MMPLAYER_RETURN_VAL_IF_FAIL(player && player->pipeline, MM_ERROR_PLAYER_NOT_INITIALIZED);
- if (MMPLAYER_URL_HAS_DASH_SUFFIX(player))
- LOGW("it's dash. and it's still experimental feature.");
- } else {
- // progressive download
- gchar* location = NULL;
+ /* alloc handles */
+ textbin = (MMPlayerGstElement*)g_malloc0(sizeof(MMPlayerGstElement) * MMPLAYER_T_NUM);
+ if (!textbin) {
+ LOGE("failed to allocate memory for textbin\n");
+ return MM_ERROR_PLAYER_NO_FREE_SPACE;
+ }
- if (player->pd_mode == MM_PLAYER_PD_MODE_URI) {
- gchar *path = NULL;
+ /* create bin */
+ textbin[MMPLAYER_T_BIN].id = MMPLAYER_T_BIN;
+ textbin[MMPLAYER_T_BIN].gst = gst_bin_new("textbin");
+ if (!textbin[MMPLAYER_T_BIN].gst) {
+ LOGE("failed to create textbin\n");
+ goto ERROR;
+ }
- mm_attrs_get_string_by_name(attrs, "pd_location", &path);
+ /* take it */
+ player->pipeline->textbin = textbin;
- MMPLAYER_FREEIF(player->pd_file_save_path);
+ /* fakesink */
+ mm_attrs_get_int_by_name(player->attrs, "display_surface_type", &surface_type);
+ LOGD("surface type for subtitle : %d", surface_type);
+ switch (surface_type) {
+ case MM_DISPLAY_SURFACE_OVERLAY:
+ case MM_DISPLAY_SURFACE_NULL:
+ case MM_DISPLAY_SURFACE_REMOTE:
+ if (__mmplayer_gst_create_plain_text_elements(player) != MM_ERROR_NONE) {
+ LOGE("failed to make plain text elements\n");
+ goto ERROR;
+ }
+ break;
+ default:
+ goto ERROR;
+ break;
+ }
- LOGD("PD Location : %s\n", path);
+ MMPLAYER_FLEAVE();
- if (path) {
- if (!util_get_storage_info(path, &player->storage_info[MMPLAYER_PATH_VOD])) {
- LOGE("failed to get storage info");
- break;
- }
- player->pd_file_save_path = g_strdup(path);
- } else {
- LOGE("can't find pd location so, it should be set \n");
- break;
- }
- }
+ return MM_ERROR_NONE;
- element = gst_element_factory_make("pdpushsrc", "PD pushsrc");
- if (!element) {
- LOGE("failed to create PD push source element[%s].\n", "pdpushsrc");
- break;
- }
+ERROR:
- if (player->pd_mode == MM_PLAYER_PD_MODE_URI)
- g_object_set(G_OBJECT(element), "location", player->pd_file_save_path, NULL);
- else
- g_object_set(G_OBJECT(element), "location", player->profile.uri, NULL);
- g_object_get(element, "location", &location, NULL);
- LOGD("PD_LOCATION [%s].\n", location);
- if (location)
- g_free(location);
- }
- }
- break;
+ LOGD("ERROR : releasing textbin\n");
- /* file source */
- case MM_PLAYER_URI_TYPE_FILE:
- {
- LOGD("using filesrc for 'file://' handler.\n");
- if (!util_get_storage_info(player->profile.uri, &player->storage_info[MMPLAYER_PATH_VOD])) {
- LOGE("failed to get storage info");
- break;
- }
+ g_list_free(element_bucket);
- element = gst_element_factory_make("filesrc", "source");
- if (!element) {
- LOGE("failed to create filesrc\n");
- break;
- }
+ /* release signal */
+ __mmplayer_release_signal_connection(player, MM_PLAYER_SIGNAL_TYPE_TEXTBIN);
- g_object_set(G_OBJECT(element), "location", (player->profile.uri)+7, NULL); /* uri+7 -> remove "file:// */
- }
- break;
+ /* release element which are not added to bin */
+ for (i = 1; i < MMPLAYER_T_NUM; i++) {
+ /* NOTE : skip bin */
+ if (textbin[i].gst) {
+ GstObject* parent = NULL;
+ parent = gst_element_get_parent(textbin[i].gst);
- case MM_PLAYER_URI_TYPE_SS:
- {
- gint http_timeout = DEFAULT_HTTP_TIMEOUT;
- element = gst_element_factory_make("souphttpsrc", "http streaming source");
- if (!element) {
- LOGE("failed to create http streaming source element[%s]", player->ini.httpsrc_element);
- break;
+ if (!parent) {
+ gst_object_unref(GST_OBJECT(textbin[i].gst));
+ textbin[i].gst = NULL;
+ } else {
+ gst_object_unref(GST_OBJECT(parent));
}
+ }
+ }
- if (player->ini.http_timeout != DEFAULT_HTTP_TIMEOUT) {
- LOGD("get timeout from ini\n");
- http_timeout = player->ini.http_timeout;
- }
+ /* release textbin with it's childs */
+ if (textbin[MMPLAYER_T_BIN].gst)
+ gst_object_unref(GST_OBJECT(textbin[MMPLAYER_T_BIN].gst));
- /* setting property to streaming source */
- g_object_set(G_OBJECT(element), "location", player->profile.uri, NULL);
- g_object_set(G_OBJECT(element), "timeout", http_timeout, NULL);
- }
- break;
- case MM_PLAYER_URI_TYPE_MS_BUFF:
- {
- LOGD("MS buff src is selected\n");
+ MMPLAYER_FREEIF(player->pipeline->textbin);
+ player->pipeline->textbin = NULL;
- if (player->v_stream_caps) {
- element = gst_element_factory_make("appsrc", "video_appsrc");
- if (!element) {
- LOGF("failed to create video app source element[appsrc].\n");
- break;
- }
+ MMPLAYER_FLEAVE();
+ return MM_ERROR_PLAYER_INTERNAL;
+}
- if (player->a_stream_caps) {
- elem_src_audio = gst_element_factory_make("appsrc", "audio_appsrc");
- if (!elem_src_audio) {
- LOGF("failed to create audio app source element[appsrc].\n");
- break;
- }
- }
- } else if (player->a_stream_caps) {
- /* no video, only audio pipeline*/
- element = gst_element_factory_make("appsrc", "audio_appsrc");
- if (!element) {
- LOGF("failed to create audio app source element[appsrc].\n");
- break;
- }
- }
- if (player->s_stream_caps) {
- elem_src_subtitle = gst_element_factory_make("appsrc", "subtitle_appsrc");
- if (!elem_src_subtitle) {
- LOGF("failed to create subtitle app source element[appsrc].\n");
- break;
- }
- }
+static int
+__mmplayer_gst_create_text_pipeline(mm_player_t* player)
+{
+ MMPlayerGstElement* mainbin = NULL;
+ MMPlayerGstElement* textbin = NULL;
+ MMHandleType attrs = 0;
+ GstElement *subsrc = NULL;
+ GstElement *subparse = NULL;
+ gchar *subtitle_uri = NULL;
+ const gchar *charset = NULL;
+ GstPad *pad = NULL;
- LOGD("setting app sources properties.\n");
- LOGD("location : %s\n", player->profile.uri);
+ MMPLAYER_FENTER();
- if (player->v_stream_caps && element) {
- g_object_set(G_OBJECT(element), "format", GST_FORMAT_TIME,
- "blocksize", (guint)1048576, /* size of many video frames are larger than default blocksize as 4096 */
- "caps", player->v_stream_caps, NULL);
+ /* get mainbin */
+ MMPLAYER_RETURN_VAL_IF_FAIL(player &&
+ player->pipeline &&
+ player->pipeline->mainbin, MM_ERROR_PLAYER_NOT_INITIALIZED);
- if (player->media_stream_buffer_max_size[MM_PLAYER_STREAM_TYPE_VIDEO] > 0)
- g_object_set(G_OBJECT(element), "max-bytes", player->media_stream_buffer_max_size[MM_PLAYER_STREAM_TYPE_VIDEO], NULL);
- if (player->media_stream_buffer_min_percent[MM_PLAYER_STREAM_TYPE_VIDEO] > 0)
- g_object_set(G_OBJECT(element), "min-percent", player->media_stream_buffer_min_percent[MM_PLAYER_STREAM_TYPE_VIDEO], NULL);
+ mainbin = player->pipeline->mainbin;
- /*Fix Seek External Demuxer: set audio and video appsrc as seekable */
- gst_app_src_set_stream_type((GstAppSrc*)G_OBJECT(element), GST_APP_STREAM_TYPE_SEEKABLE);
- MMPLAYER_SIGNAL_CONNECT(player, element, MM_PLAYER_SIGNAL_TYPE_OTHERS, "seek-data",
- G_CALLBACK(__gst_seek_video_data), player);
+ attrs = MMPLAYER_GET_ATTRS(player);
+ if (!attrs) {
+ LOGE("cannot get content attribute\n");
+ return MM_ERROR_PLAYER_INTERNAL;
+ }
- if (player->a_stream_caps && elem_src_audio) {
- g_object_set(G_OBJECT(elem_src_audio), "format", GST_FORMAT_TIME,
- "caps", player->a_stream_caps, NULL);
+ mm_attrs_get_string_by_name(attrs, "subtitle_uri", &subtitle_uri);
+ if (!subtitle_uri || strlen(subtitle_uri) < 1) {
+ LOGE("subtitle uri is not proper filepath.\n");
+ return MM_ERROR_PLAYER_INVALID_URI;
+ }
- if (player->media_stream_buffer_max_size[MM_PLAYER_STREAM_TYPE_AUDIO] > 0)
- g_object_set(G_OBJECT(elem_src_audio), "max-bytes", player->media_stream_buffer_max_size[MM_PLAYER_STREAM_TYPE_AUDIO], NULL);
- if (player->media_stream_buffer_min_percent[MM_PLAYER_STREAM_TYPE_AUDIO] > 0)
- g_object_set(G_OBJECT(elem_src_audio), "min-percent", player->media_stream_buffer_min_percent[MM_PLAYER_STREAM_TYPE_AUDIO], NULL);
+ if (!util_get_storage_info(subtitle_uri, &player->storage_info[MMPLAYER_PATH_TEXT])) {
+ LOGE("failed to get storage info of subtitle path");
+ return MM_ERROR_PLAYER_INVALID_URI;
+ }
- /*Fix Seek External Demuxer: set audio and video appsrc as seekable */
- gst_app_src_set_stream_type((GstAppSrc*)G_OBJECT(elem_src_audio), GST_APP_STREAM_TYPE_SEEKABLE);
- MMPLAYER_SIGNAL_CONNECT(player, elem_src_audio, MM_PLAYER_SIGNAL_TYPE_OTHERS, "seek-data",
- G_CALLBACK(__gst_seek_audio_data), player);
- }
- } else if (player->a_stream_caps && element) {
- g_object_set(G_OBJECT(element), "format", GST_FORMAT_TIME,
- "caps", player->a_stream_caps, NULL);
+ SECURE_LOGD("subtitle file path is [%s].\n", subtitle_uri);
- if (player->media_stream_buffer_max_size[MM_PLAYER_STREAM_TYPE_AUDIO] > 0)
- g_object_set(G_OBJECT(element), "max-bytes", player->media_stream_buffer_max_size[MM_PLAYER_STREAM_TYPE_AUDIO], NULL);
- if (player->media_stream_buffer_min_percent[MM_PLAYER_STREAM_TYPE_AUDIO] > 0)
- g_object_set(G_OBJECT(element), "min-percent", player->media_stream_buffer_min_percent[MM_PLAYER_STREAM_TYPE_AUDIO], NULL);
+ MMPLAYER_SUBTITLE_INFO_LOCK(player);
+ player->subtitle_language_list = NULL;
+ MMPLAYER_SUBTITLE_INFO_UNLOCK(player);
- /*Fix Seek External Demuxer: set audio and video appsrc as seekable */
- gst_app_src_set_stream_type((GstAppSrc*)G_OBJECT(element), GST_APP_STREAM_TYPE_SEEKABLE);
- MMPLAYER_SIGNAL_CONNECT(player, element, MM_PLAYER_SIGNAL_TYPE_OTHERS, "seek-data",
- G_CALLBACK(__gst_seek_audio_data), player);
- }
+ /* create the subtitle source */
+ subsrc = gst_element_factory_make("filesrc", "subtitle_source");
+ if (!subsrc) {
+ LOGE("failed to create filesrc element\n");
+ goto ERROR;
+ }
+ g_object_set(G_OBJECT(subsrc), "location", subtitle_uri, NULL);
- if (player->s_stream_caps && elem_src_subtitle) {
- g_object_set(G_OBJECT(elem_src_subtitle), "format", GST_FORMAT_TIME,
- "caps", player->s_stream_caps, NULL);
+ mainbin[MMPLAYER_M_SUBSRC].id = MMPLAYER_M_SUBSRC;
+ mainbin[MMPLAYER_M_SUBSRC].gst = subsrc;
- if (player->media_stream_buffer_max_size[MM_PLAYER_STREAM_TYPE_TEXT] > 0)
- g_object_set(G_OBJECT(elem_src_subtitle), "max-bytes", player->media_stream_buffer_max_size[MM_PLAYER_STREAM_TYPE_TEXT], NULL);
- if (player->media_stream_buffer_min_percent[MM_PLAYER_STREAM_TYPE_TEXT] > 0)
- g_object_set(G_OBJECT(elem_src_subtitle), "min-percent", player->media_stream_buffer_min_percent[MM_PLAYER_STREAM_TYPE_TEXT], NULL);
+ if (!gst_bin_add(GST_BIN(mainbin[MMPLAYER_M_PIPE].gst), subsrc)) {
+ LOGW("failed to add queue\n");
+ gst_object_unref(mainbin[MMPLAYER_M_SUBSRC].gst);
+ mainbin[MMPLAYER_M_SUBSRC].gst = NULL;
+ mainbin[MMPLAYER_M_SUBSRC].id = MMPLAYER_M_NUM;
+ goto ERROR;
+ }
- gst_app_src_set_stream_type((GstAppSrc*)G_OBJECT(elem_src_subtitle), GST_APP_STREAM_TYPE_SEEKABLE);
+ /* subparse */
+ subparse = gst_element_factory_make("subparse", "subtitle_parser");
+ if (!subparse) {
+ LOGE("failed to create subparse element\n");
+ goto ERROR;
+ }
- MMPLAYER_SIGNAL_CONNECT(player, elem_src_subtitle, MM_PLAYER_SIGNAL_TYPE_OTHERS, "seek-data",
- G_CALLBACK(__gst_seek_subtitle_data), player);
- }
+ charset = util_get_charset(subtitle_uri);
+ if (charset) {
+ LOGD("detected charset is %s\n", charset);
+ g_object_set(G_OBJECT(subparse), "subtitle-encoding", charset, NULL);
+ }
- if (player->v_stream_caps && element) {
- MMPLAYER_SIGNAL_CONNECT(player, element, MM_PLAYER_SIGNAL_TYPE_OTHERS, "need-data",
- G_CALLBACK(__gst_appsrc_feed_video_data), player);
- MMPLAYER_SIGNAL_CONNECT(player, element, MM_PLAYER_SIGNAL_TYPE_OTHERS, "enough-data",
- G_CALLBACK(__gst_appsrc_enough_video_data), player);
+ mainbin[MMPLAYER_M_SUBPARSE].id = MMPLAYER_M_SUBPARSE;
+ mainbin[MMPLAYER_M_SUBPARSE].gst = subparse;
- if (player->a_stream_caps && elem_src_audio) {
- MMPLAYER_SIGNAL_CONNECT(player, elem_src_audio, MM_PLAYER_SIGNAL_TYPE_OTHERS, "need-data",
- G_CALLBACK(__gst_appsrc_feed_audio_data), player);
- MMPLAYER_SIGNAL_CONNECT(player, elem_src_audio, MM_PLAYER_SIGNAL_TYPE_OTHERS, "enough-data",
- G_CALLBACK(__gst_appsrc_enough_audio_data), player);
- }
- } else if (player->a_stream_caps && element) {
- MMPLAYER_SIGNAL_CONNECT(player, element, MM_PLAYER_SIGNAL_TYPE_OTHERS, "need-data",
- G_CALLBACK(__gst_appsrc_feed_audio_data), player);
- MMPLAYER_SIGNAL_CONNECT(player, element, MM_PLAYER_SIGNAL_TYPE_OTHERS, "enough-data",
- G_CALLBACK(__gst_appsrc_enough_audio_data), player);
- }
+ if (!gst_bin_add(GST_BIN(mainbin[MMPLAYER_M_PIPE].gst), subparse)) {
+ LOGW("failed to add subparse\n");
+ gst_object_unref(mainbin[MMPLAYER_M_SUBPARSE].gst);
+ mainbin[MMPLAYER_M_SUBPARSE].gst = NULL;
+ mainbin[MMPLAYER_M_SUBPARSE].id = MMPLAYER_M_NUM;
+ goto ERROR;
+ }
- if (player->s_stream_caps && elem_src_subtitle)
- MMPLAYER_SIGNAL_CONNECT(player, elem_src_subtitle, MM_PLAYER_SIGNAL_TYPE_OTHERS, "need-data",
- G_CALLBACK(__gst_appsrc_feed_subtitle_data), player);
+ if (!gst_element_link_pads(subsrc, "src", subparse, "sink")) {
+ LOGW("failed to link subsrc and subparse\n");
+ goto ERROR;
+ }
- need_state_holder = FALSE;
+ player->play_subtitle = TRUE;
+ player->adjust_subtitle_pos = 0;
- mm_attrs_set_int_by_name(attrs, "profile_prepare_async", TRUE);
- if (mmf_attrs_commit(attrs)) /* return -1 if error */
- LOGE("failed to commit\n");
+ LOGD("play subtitle using subtitle file\n");
+
+ if (player->pipeline->textbin == NULL) {
+ if (MM_ERROR_NONE != __mmplayer_gst_create_text_sink_bin(player)) {
+ LOGE("failed to create text sink bin. continuing without text\n");
+ goto ERROR;
}
- break;
- /* appsrc */
- case MM_PLAYER_URI_TYPE_MEM:
- {
- guint64 stream_type = GST_APP_STREAM_TYPE_RANDOM_ACCESS;
- LOGD("mem src is selected\n");
+ textbin = player->pipeline->textbin;
- element = gst_element_factory_make("appsrc", "mem-source");
- if (!element) {
- LOGE("failed to create appsrc element\n");
- break;
- }
+ if (!gst_bin_add(GST_BIN(mainbin[MMPLAYER_M_PIPE].gst), GST_ELEMENT(textbin[MMPLAYER_T_BIN].gst))) {
+ LOGW("failed to add textbin\n");
- g_object_set(element, "stream-type", stream_type, NULL);
- g_object_set(element, "size", player->profile.input_mem.len, NULL);
- g_object_set(element, "blocksize", (guint64)20480, NULL);
+ /* release signal */
+ __mmplayer_release_signal_connection(player, MM_PLAYER_SIGNAL_TYPE_TEXTBIN);
- MMPLAYER_SIGNAL_CONNECT(player, element, MM_PLAYER_SIGNAL_TYPE_OTHERS, "seek-data",
- G_CALLBACK(__gst_appsrc_seek_data_mem), &player->profile.input_mem);
- MMPLAYER_SIGNAL_CONNECT(player, element, MM_PLAYER_SIGNAL_TYPE_OTHERS, "need-data",
- G_CALLBACK(__gst_appsrc_feed_data_mem), &player->profile.input_mem);
+ /* release textbin with it's childs */
+ gst_object_unref(GST_OBJECT(textbin[MMPLAYER_T_BIN].gst));
+ MMPLAYER_FREEIF(player->pipeline->textbin);
+ player->pipeline->textbin = textbin = NULL;
+ goto ERROR;
}
- break;
- case MM_PLAYER_URI_TYPE_URL:
- break;
- case MM_PLAYER_URI_TYPE_TEMP:
- break;
+ LOGD("link text input selector and textbin ghost pad");
- case MM_PLAYER_URI_TYPE_NONE:
- default:
- break;
+ player->textsink_linked = 1;
+ player->external_text_idx = 0;
+ LOGI("player->textsink_linked set to 1\n");
+ } else {
+ textbin = player->pipeline->textbin;
+ LOGD("text bin has been created. reuse it.");
+ player->external_text_idx = 1;
}
- /* check source element is OK */
- if (!element) {
- LOGE("no source element was created.\n");
- goto INIT_ERROR;
+ if (!gst_element_link_pads(subparse, "src", textbin[MMPLAYER_T_BIN].gst, "text_sink")) {
+ LOGW("failed to link subparse and textbin\n");
+ goto ERROR;
}
- /* take source element */
- mainbin[MMPLAYER_M_SRC].id = MMPLAYER_M_SRC;
- mainbin[MMPLAYER_M_SRC].gst = element;
- element_bucket = g_list_append(element_bucket, &mainbin[MMPLAYER_M_SRC]);
-
- if ((MMPLAYER_IS_STREAMING(player)) && (player->streamer == NULL)) {
- player->streamer = __mm_player_streaming_create();
- __mm_player_streaming_initialize(player->streamer);
+ pad = gst_element_get_static_pad(textbin[MMPLAYER_T_FAKE_SINK].gst, "sink");
+ if (!pad) {
+ LOGE("failed to get sink pad from textsink to probe data");
+ goto ERROR;
}
- if (MMPLAYER_IS_HTTP_PD(player)) {
- gint pre_buffering_time = player->streamer->buffering_req.prebuffer_time;
+ gst_pad_add_probe(pad, GST_PAD_PROBE_TYPE_BUFFER,
+ __mmplayer_subtitle_adjust_position_probe, player, NULL);
- LOGD("Picked queue2 element(pre buffer : %d ms)....\n", pre_buffering_time);
- element = gst_element_factory_make("queue2", "queue2");
- if (!element) {
- LOGE("failed to create http streaming buffer element\n");
- goto INIT_ERROR;
- }
+ gst_object_unref(pad);
+ pad = NULL;
- /* take it */
- mainbin[MMPLAYER_M_MUXED_S_BUFFER].id = MMPLAYER_M_MUXED_S_BUFFER;
- mainbin[MMPLAYER_M_MUXED_S_BUFFER].gst = element;
- element_bucket = g_list_append(element_bucket, &mainbin[MMPLAYER_M_MUXED_S_BUFFER]);
+ /* create dot. for debugging */
+ MMPLAYER_GENERATE_DOT_IF_ENABLED(player, "pipeline-with-subtitle");
+ MMPLAYER_FLEAVE();
- pre_buffering_time = (pre_buffering_time > 0) ? (pre_buffering_time) : (player->ini.http_buffering_time);
+ return MM_ERROR_NONE;
- player->streamer->is_pd_mode = TRUE;
+ERROR:
+ /* release text pipeline resource */
+ player->textsink_linked = 0;
+
+ /* release signal */
+ __mmplayer_release_signal_connection(player, MM_PLAYER_SIGNAL_TYPE_TEXTBIN);
+
+ if (player->pipeline->textbin) {
+ LOGE("remove textbin");
+
+ /* release textbin with it's childs */
+ MMPLAYER_RELEASE_ELEMENT(player, player->pipeline->textbin, MMPLAYER_T_BIN);
+ MMPLAYER_FREEIF(player->pipeline->textbin);
+ player->pipeline->textbin = NULL;
- __mm_player_streaming_set_queue2(player->streamer,
- element,
- TRUE,
- player->ini.http_max_size_bytes, // + PLAYER_PD_EXT_MAX_SIZE_BYTE,
- pre_buffering_time,
- 1.0,
- player->ini.http_buffering_limit,
- MUXED_BUFFER_TYPE_MEM_QUEUE,
- NULL,
- 0);
}
- if (MMPLAYER_IS_MS_BUFF_SRC(player)) {
- if (player->v_stream_caps) {
- es_video_queue = gst_element_factory_make("queue2", "video_queue");
- if (!es_video_queue) {
- LOGE("create es_video_queue for es player failed\n");
- goto INIT_ERROR;
- }
- g_object_set(G_OBJECT(es_video_queue), "max-size-buffers", 2, NULL);
- mainbin[MMPLAYER_M_V_BUFFER].id = MMPLAYER_M_V_BUFFER;
- mainbin[MMPLAYER_M_V_BUFFER].gst = es_video_queue;
- element_bucket = g_list_append(element_bucket, &mainbin[MMPLAYER_M_V_BUFFER]);
- /* Adding audio appsrc to bucket */
- if (player->a_stream_caps && elem_src_audio) {
- mainbin[MMPLAYER_M_2ND_SRC].id = MMPLAYER_M_2ND_SRC;
- mainbin[MMPLAYER_M_2ND_SRC].gst = elem_src_audio;
- element_bucket = g_list_append(element_bucket, &mainbin[MMPLAYER_M_2ND_SRC]);
+ /* release subtitle elem */
+ MMPLAYER_RELEASE_ELEMENT(player, player->pipeline->mainbin, MMPLAYER_M_SUBSRC);
+ MMPLAYER_RELEASE_ELEMENT(player, player->pipeline->mainbin, MMPLAYER_M_SUBPARSE);
- es_audio_queue = gst_element_factory_make("queue2", "audio_queue");
- if (!es_audio_queue) {
- LOGE("create es_audio_queue for es player failed\n");
- goto INIT_ERROR;
- }
- g_object_set(G_OBJECT(es_audio_queue), "max-size-buffers", 2, NULL);
+ return MM_ERROR_PLAYER_INTERNAL;
+}
- mainbin[MMPLAYER_M_A_BUFFER].id = MMPLAYER_M_A_BUFFER;
- mainbin[MMPLAYER_M_A_BUFFER].gst = es_audio_queue;
- element_bucket = g_list_append(element_bucket, &mainbin[MMPLAYER_M_A_BUFFER]);
- }
- } else if (player->a_stream_caps) {
- /* Only audio stream, no video */
- es_audio_queue = gst_element_factory_make("queue2", "audio_queue");
- if (!es_audio_queue) {
- LOGE("create es_audio_queue for es player failed\n");
- goto INIT_ERROR;
- }
- mainbin[MMPLAYER_M_A_BUFFER].id = MMPLAYER_M_A_BUFFER;
- mainbin[MMPLAYER_M_A_BUFFER].gst = es_audio_queue;
- element_bucket = g_list_append(element_bucket, &mainbin[MMPLAYER_M_A_BUFFER]);
- }
+gboolean
+__mmplayer_update_subtitle(GstElement* object, GstBuffer *buffer, GstPad *pad, gpointer data)
+{
+ mm_player_t* player = (mm_player_t*) data;
+ MMMessageParamType msg = {0, };
+ GstClockTime duration = 0;
+ gpointer text = NULL;
+ guint text_size = 0;
+ gboolean ret = TRUE;
+ GstMapInfo mapinfo = GST_MAP_INFO_INIT;
- if (player->s_stream_caps && elem_src_subtitle) {
- mainbin[MMPLAYER_M_SUBSRC].id = MMPLAYER_M_SUBSRC;
- mainbin[MMPLAYER_M_SUBSRC].gst = elem_src_subtitle;
- element_bucket = g_list_append(element_bucket, &mainbin[MMPLAYER_M_SUBSRC]);
+ MMPLAYER_FENTER();
- es_subtitle_queue = gst_element_factory_make("queue2", "subtitle_queue");
- if (!es_subtitle_queue) {
- LOGE("create es_subtitle_queue for es player failed\n");
- goto INIT_ERROR;
- }
- mainbin[MMPLAYER_M_S_BUFFER].id = MMPLAYER_M_V_BUFFER;
- mainbin[MMPLAYER_M_S_BUFFER].gst = es_subtitle_queue;
- element_bucket = g_list_append(element_bucket, &mainbin[MMPLAYER_M_S_BUFFER]);
- }
+ MMPLAYER_RETURN_VAL_IF_FAIL(player, FALSE);
+ MMPLAYER_RETURN_VAL_IF_FAIL(buffer, FALSE);
+
+ if (player->is_subtitle_force_drop) {
+ LOGW("subtitle is dropped forcedly.");
+ return ret;
}
- /* create autoplugging element if src element is not a rtsp src */
- if ((player->profile.uri_type != MM_PLAYER_URI_TYPE_URL_RTSP) &&
- (player->profile.uri_type != MM_PLAYER_URI_TYPE_MS_BUFF)) {
- element = NULL;
- enum MainElementID elemId = MMPLAYER_M_NUM;
+ gst_buffer_map(buffer, &mapinfo, GST_MAP_READ);
+ text = mapinfo.data;
+ text_size = mapinfo.size;
+ duration = GST_BUFFER_DURATION(buffer);
- if (((MMPLAYER_IS_HTTP_PD(player)) ||
- (!MMPLAYER_IS_HTTP_STREAMING(player)))) {
- elemId = MMPLAYER_M_AUTOPLUG;
- element = __mmplayer_create_decodebin(player);
- if (element) {
- /* default size of mq in decodebin is 2M
- * but it can cause blocking issue during seeking depends on content. */
- g_object_set(G_OBJECT(element), "max-size-bytes", (5*1024*1024), NULL);
- }
- need_state_holder = FALSE;
- } else {
- elemId = MMPLAYER_M_TYPEFIND;
- element = gst_element_factory_make("typefind", "typefinder");
- MMPLAYER_SIGNAL_CONNECT(player, element, MM_PLAYER_SIGNAL_TYPE_AUTOPLUG, "have-type",
- G_CALLBACK(__mmplayer_typefind_have_type), (gpointer)player);
- }
+ if (player->set_mode.subtitle_off) {
+ LOGD("subtitle is OFF.\n");
+ return TRUE;
+ }
- /* check autoplug element is OK */
- if (!element) {
- LOGE("can not create element(%d)\n", elemId);
- goto INIT_ERROR;
- }
+ if (!text || (text_size == 0)) {
+ LOGD("There is no subtitle to be displayed.\n");
+ return TRUE;
+ }
+
+ msg.data = (void *) text;
+ msg.subtitle.duration = GST_TIME_AS_MSECONDS(duration);
+
+ LOGD("update subtitle : [%ld msec] %s\n'", msg.subtitle.duration, (char*)msg.data);
+
+ MMPLAYER_POST_MSG(player, MM_MESSAGE_UPDATE_SUBTITLE, &msg);
+ gst_buffer_unmap(buffer, &mapinfo);
+
+ MMPLAYER_FLEAVE();
+
+ return ret;
+}
+
+static GstPadProbeReturn
+__mmplayer_subtitle_adjust_position_probe(GstPad *pad, GstPadProbeInfo *info, gpointer u_data)
+{
+ mm_player_t *player = (mm_player_t *) u_data;
+ GstClockTime cur_timestamp = 0;
+ gint64 adjusted_timestamp = 0;
+ GstBuffer *buffer = gst_pad_probe_info_get_buffer(info);
- mainbin[elemId].id = elemId;
- mainbin[elemId].gst = element;
+ MMPLAYER_RETURN_VAL_IF_FAIL(player, FALSE);
- element_bucket = g_list_append(element_bucket, &mainbin[elemId]);
+ if (player->set_mode.subtitle_off) {
+ LOGD("subtitle is OFF.\n");
+ return TRUE;
}
- /* add elements to pipeline */
- if (!__mmplayer_gst_element_add_bucket_to_bin(GST_BIN(mainbin[MMPLAYER_M_PIPE].gst), element_bucket)) {
- LOGE("Failed to add elements to pipeline\n");
- goto INIT_ERROR;
+ if (player->adjust_subtitle_pos == 0) {
+ LOGD("nothing to do");
+ return TRUE;
}
+ cur_timestamp = GST_BUFFER_TIMESTAMP(buffer);
+ adjusted_timestamp = (gint64) cur_timestamp +((gint64) player->adjust_subtitle_pos * G_GINT64_CONSTANT(1000000));
- /* linking elements in the bucket by added order. */
- if (__mmplayer_gst_element_link_bucket(element_bucket) == -1) {
- LOGE("Failed to link some elements\n");
- goto INIT_ERROR;
+ if (adjusted_timestamp < 0) {
+ LOGD("adjusted_timestamp under zero");
+ MMPLAYER_FLEAVE();
+ return FALSE;
}
+ GST_BUFFER_TIMESTAMP(buffer) = (GstClockTime) adjusted_timestamp;
+ LOGD("buffer timestamp changed %" GST_TIME_FORMAT " -> %" GST_TIME_FORMAT "",
+ GST_TIME_ARGS(cur_timestamp),
+ GST_TIME_ARGS(GST_BUFFER_TIMESTAMP(buffer)));
- /* create fakesink element for keeping the pipeline state PAUSED. if needed */
- if (need_state_holder) {
- /* create */
- mainbin[MMPLAYER_M_SRC_FAKESINK].id = MMPLAYER_M_SRC_FAKESINK;
- mainbin[MMPLAYER_M_SRC_FAKESINK].gst = gst_element_factory_make("fakesink", "state-holder");
+ return GST_PAD_PROBE_OK;
+}
+static int __gst_adjust_subtitle_position(mm_player_t* player, int format, int position)
+{
+ MMPLAYER_FENTER();
- if (!mainbin[MMPLAYER_M_SRC_FAKESINK].gst) {
- LOGE("fakesink element could not be created\n");
- goto INIT_ERROR;
- }
- GST_OBJECT_FLAG_UNSET(mainbin[MMPLAYER_M_SRC_FAKESINK].gst, GST_ELEMENT_FLAG_SINK);
+ /* check player and subtitlebin are created */
+ MMPLAYER_RETURN_VAL_IF_FAIL(player && player->pipeline, MM_ERROR_PLAYER_NOT_INITIALIZED);
+ MMPLAYER_RETURN_VAL_IF_FAIL(player->play_subtitle, MM_ERROR_NOT_SUPPORT_API);
- /* take ownership of fakesink. we are reusing it */
- gst_object_ref(mainbin[MMPLAYER_M_SRC_FAKESINK].gst);
+ if (position == 0) {
+ LOGD("nothing to do\n");
+ MMPLAYER_FLEAVE();
+ return MM_ERROR_NONE;
+ }
- /* add */
- if (FALSE == gst_bin_add(GST_BIN(mainbin[MMPLAYER_M_PIPE].gst),
- mainbin[MMPLAYER_M_SRC_FAKESINK].gst)) {
- LOGE("failed to add fakesink to bin\n");
- goto INIT_ERROR;
+ switch (format) {
+ case MM_PLAYER_POS_FORMAT_TIME:
+ {
+ /* check current postion */
+ player->adjust_subtitle_pos = position;
+
+ LOGD("save adjust_subtitle_pos in player") ;
}
- }
+ break;
- /* now we have completed mainbin. take it */
- player->pipeline->mainbin = mainbin;
+ default:
+ {
+ LOGW("invalid format.\n");
+ MMPLAYER_FLEAVE();
+ return MM_ERROR_INVALID_ARGUMENT;
+ }
+ }
- if (MMPLAYER_IS_MS_BUFF_SRC(player)) {
- GstPad *srcpad = NULL;
+ MMPLAYER_FLEAVE();
- if (mainbin[MMPLAYER_M_V_BUFFER].gst) {
- srcpad = gst_element_get_static_pad(mainbin[MMPLAYER_M_V_BUFFER].gst, "src");
- if (srcpad) {
- __mmplayer_gst_create_decoder(player,
- MM_PLAYER_TRACK_TYPE_VIDEO,
- srcpad,
- MMPLAYER_M_AUTOPLUG_V_DEC,
- "video_decodebin");
+ return MM_ERROR_NONE;
+}
- gst_object_unref(GST_OBJECT(srcpad));
- srcpad = NULL;
- }
- }
+static void
+__gst_appsrc_feed_data_mem(GstElement *element, guint size, gpointer user_data)
+{
+ GstElement *appsrc = element;
+ MMPlayerInputBuffer *buf = (MMPlayerInputBuffer *)user_data;
+ GstBuffer *buffer = NULL;
+ GstFlowReturn ret = GST_FLOW_OK;
+ gint len = size;
- if ((player->a_stream_caps) && (mainbin[MMPLAYER_M_A_BUFFER].gst)) {
- srcpad = gst_element_get_static_pad(mainbin[MMPLAYER_M_A_BUFFER].gst, "src");
- if (srcpad) {
- __mmplayer_gst_create_decoder(player,
- MM_PLAYER_TRACK_TYPE_AUDIO,
- srcpad,
- MMPLAYER_M_AUTOPLUG_A_DEC,
- "audio_decodebin");
+ MMPLAYER_RETURN_IF_FAIL(element);
+ MMPLAYER_RETURN_IF_FAIL(buf);
- gst_object_unref(GST_OBJECT(srcpad));
- srcpad = NULL;
- } // else error
- } // else error
+ buffer = gst_buffer_new();
- if (mainbin[MMPLAYER_M_S_BUFFER].gst)
- __mmplayer_try_to_plug_decodebin(player, gst_element_get_static_pad(mainbin[MMPLAYER_M_S_BUFFER].gst, "src"), player->s_stream_caps);
+ if (buf->offset >= buf->len) {
+ LOGD("call eos appsrc\n");
+ g_signal_emit_by_name(appsrc, "end-of-stream", &ret);
+ return;
}
- /* Note : check whether subtitle atrribute uri is set. If uri is set, then try to play subtitle file */
- if (__mmplayer_check_subtitle(player)) {
- if (MM_ERROR_NONE != __mmplayer_gst_create_text_pipeline(player))
- LOGE("fail to create text pipeline");
- }
+ if (buf->len - buf->offset < size)
+ len = buf->len - buf->offset;
- /* connect bus callback */
- bus = gst_pipeline_get_bus(GST_PIPELINE(mainbin[MMPLAYER_M_PIPE].gst));
- if (!bus) {
- LOGE("cannot get bus from pipeline.\n");
- goto INIT_ERROR;
- }
+ gst_buffer_insert_memory(buffer, -1, gst_memory_new_wrapped(0, (guint8 *)(buf->buf + buf->offset), len, 0, len, NULL, NULL));
+ GST_BUFFER_OFFSET(buffer) = (guint64)buf->offset;
+ GST_BUFFER_OFFSET_END(buffer) = (guint64)(buf->offset + len);
- player->bus_watcher = gst_bus_add_watch(bus, (GstBusFunc)__mmplayer_gst_msg_push, player);
+ //LOGD("feed buffer %p, offset %u-%u length %u", buffer, buf->offset, (buf->offset+len), len);
+ g_signal_emit_by_name(appsrc, "push-buffer", buffer, &ret);
- player->context.thread_default = g_main_context_get_thread_default();
+ buf->offset += len;
+}
- if (player->context.thread_default == NULL) {
- player->context.thread_default = g_main_context_default();
- LOGD("thread-default context is the global default context");
- }
- LOGW("bus watcher thread context = %p, watcher : %d", player->context.thread_default, player->bus_watcher);
+static gboolean
+__gst_appsrc_seek_data_mem(GstElement *element, guint64 size, gpointer user_data)
+{
+ MMPlayerInputBuffer *buf = (MMPlayerInputBuffer *)user_data;
- /* set sync handler to get tag synchronously */
- gst_bus_set_sync_handler(bus, __mmplayer_bus_sync_callback, player, NULL);
+ MMPLAYER_RETURN_VAL_IF_FAIL(buf, FALSE);
- /* finished */
- gst_object_unref(GST_OBJECT(bus));
- g_list_free(element_bucket);
+ buf->offset = (int)size;
- /* create gst bus_msb_cb thread */
- g_mutex_init(&player->bus_msg_thread_mutex);
- g_cond_init(&player->bus_msg_thread_cond);
- player->bus_msg_thread_exit = FALSE;
- player->bus_msg_thread =
- g_thread_try_new("gst_bus_msg_thread", __mmplayer_gst_bus_msg_thread, (gpointer)player, NULL);
- if (!player->bus_msg_thread) {
- LOGE("failed to create gst BUS msg thread");
- g_mutex_clear(&player->bus_msg_thread_mutex);
- g_cond_clear(&player->bus_msg_thread_cond);
- goto INIT_ERROR;
+ return TRUE;
+}
+
+static gboolean
+__mmplayer_gst_create_decoder(mm_player_t *player,
+ MMPlayerTrackType track,
+ GstPad* srcpad,
+ enum MainElementID elemId,
+ const gchar* name)
+{
+ gboolean ret = TRUE;
+ GstPad *sinkpad = NULL;
+
+ MMPLAYER_FENTER();
+
+ MMPLAYER_RETURN_VAL_IF_FAIL(player &&
+ player->pipeline &&
+ player->pipeline->mainbin, FALSE);
+ MMPLAYER_RETURN_VAL_IF_FAIL((track == MM_PLAYER_TRACK_TYPE_AUDIO || track == MM_PLAYER_TRACK_TYPE_VIDEO), FALSE);
+ MMPLAYER_RETURN_VAL_IF_FAIL(srcpad, FALSE);
+ MMPLAYER_RETURN_VAL_IF_FAIL((player->pipeline->mainbin[elemId].gst == NULL), FALSE);
+
+ GstElement *decodebin = NULL;
+ GstCaps *dec_caps = NULL;
+
+ /* create decodebin */
+ decodebin = gst_element_factory_make("decodebin", name);
+
+ if (!decodebin) {
+ LOGE("error : fail to create decodebin for %d decoder\n", track);
+ ret = FALSE;
+ goto ERROR;
}
- MMPLAYER_FLEAVE();
+ /* raw pad handling signal */
+ MMPLAYER_SIGNAL_CONNECT(player, G_OBJECT(decodebin), MM_PLAYER_SIGNAL_TYPE_AUTOPLUG, "pad-added",
+ G_CALLBACK(__mmplayer_gst_decode_pad_added), player);
- return MM_ERROR_NONE;
+ /* This signal is emitted whenever decodebin finds a new stream. It is emitted
+ before looking for any elements that can handle that stream.*/
+ MMPLAYER_SIGNAL_CONNECT(player, G_OBJECT(decodebin), MM_PLAYER_SIGNAL_TYPE_AUTOPLUG, "autoplug-select",
+ G_CALLBACK(__mmplayer_gst_decode_autoplug_select), player);
-INIT_ERROR:
- __mmplayer_gst_destroy_pipeline(player);
- g_list_free(element_bucket);
+ /* This signal is emitted when a element is added to the bin.*/
+ MMPLAYER_SIGNAL_CONNECT(player, G_OBJECT(decodebin), MM_PLAYER_SIGNAL_TYPE_AUTOPLUG, "element-added",
+ G_CALLBACK(__mmplayer_gst_element_added), player);
- if (mainbin) {
- /* release element which are not added to bin */
- for (i = 1; i < MMPLAYER_M_NUM; i++) {
- /* NOTE : skip pipeline */
- if (mainbin[i].gst) {
- GstObject* parent = NULL;
- parent = gst_element_get_parent(mainbin[i].gst);
+ if (!gst_bin_add(GST_BIN(player->pipeline->mainbin[MMPLAYER_M_PIPE].gst), decodebin)) {
+ LOGE("failed to add new decodebin\n");
+ ret = FALSE;
+ goto ERROR;
+ }
- if (!parent) {
- gst_object_unref(GST_OBJECT(mainbin[i].gst));
- mainbin[i].gst = NULL;
- } else
- gst_object_unref(GST_OBJECT(parent));
- }
- }
+ dec_caps = gst_pad_query_caps(srcpad, NULL);
+ if (dec_caps) {
+ //LOGD("got pad %s:%s , dec_caps %" GST_PTR_FORMAT, GST_DEBUG_PAD_NAME(srcpad), dec_caps);
+ g_object_set(G_OBJECT(decodebin), "sink-caps", dec_caps, NULL);
+ gst_caps_unref(dec_caps);
+ }
- /* release pipeline with it's childs */
- if (mainbin[MMPLAYER_M_PIPE].gst)
- gst_object_unref(GST_OBJECT(mainbin[MMPLAYER_M_PIPE].gst));
+ player->pipeline->mainbin[elemId].id = elemId;
+ player->pipeline->mainbin[elemId].gst = decodebin;
- MMPLAYER_FREEIF(mainbin);
+ sinkpad = gst_element_get_static_pad(decodebin, "sink");
+
+ if (GST_PAD_LINK_OK != gst_pad_link(srcpad, sinkpad)) {
+ LOGW("failed to link [%s:%s] to decoder\n", GST_DEBUG_PAD_NAME(srcpad));
+ gst_object_unref(GST_OBJECT(decodebin));
}
- MMPLAYER_FREEIF(player->pipeline);
- return MM_ERROR_PLAYER_INTERNAL;
-}
-
-static void
-__mmplayer_reset_gapless_state(mm_player_t* player)
-{
- MMPLAYER_FENTER();
- MMPLAYER_RETURN_IF_FAIL(player
- && player->pipeline
- && player->pipeline->audiobin
- && player->pipeline->audiobin[MMPLAYER_A_BIN].gst);
+ if (GST_STATE_CHANGE_FAILURE == gst_element_sync_state_with_parent(decodebin))
+ LOGE("failed to sync second level decodebin state with parent\n");
- memset(&player->gapless, 0, sizeof(mm_player_gapless_t));
+ LOGD("Total num of %d tracks = %d \n", track, player->selector[track].total_track_num);
+ERROR:
+ if (sinkpad) {
+ gst_object_unref(GST_OBJECT(sinkpad));
+ sinkpad = NULL;
+ }
MMPLAYER_FLEAVE();
- return;
+
+ return ret;
}
+/**
+ * This function is to create audio or video pipeline for playing.
+ *
+ * @param player [in] handle of player
+ *
+ * @return This function returns zero on success.
+ * @remark
+ * @see
+ */
static int
-__mmplayer_gst_destroy_pipeline(mm_player_t* player)
+__mmplayer_gst_create_pipeline(mm_player_t* player)
{
- gint timeout = 0;
- int ret = MM_ERROR_NONE;
-
+ GstBus *bus = NULL;
+ MMPlayerGstElement *mainbin = NULL;
+ MMHandleType attrs = 0;
+ GstElement* element = NULL;
+ GstElement* elem_src_audio = NULL;
+ GstElement* elem_src_subtitle = NULL;
+ GstElement* es_video_queue = NULL;
+ GstElement* es_audio_queue = NULL;
+ GstElement* es_subtitle_queue = NULL;
+ GList* element_bucket = NULL;
+ gboolean need_state_holder = TRUE;
+ gint i = 0;
+#ifdef SW_CODEC_ONLY
+ int surface_type = 0;
+#endif
MMPLAYER_FENTER();
- MMPLAYER_RETURN_VAL_IF_FAIL(player, MM_ERROR_INVALID_HANDLE);
-
- /* cleanup stuffs */
- MMPLAYER_FREEIF(player->type);
- player->have_dynamic_pad = FALSE;
- player->no_more_pad = FALSE;
- player->num_dynamic_pad = 0;
- player->demux_pad_index = 0;
- player->use_deinterleave = FALSE;
- player->max_audio_channels = 0;
- player->video_share_api_delta = 0;
- player->video_share_clock_delta = 0;
- player->video_hub_download_mode = 0;
-
- MMPLAYER_SUBTITLE_INFO_LOCK(player);
- player->subtitle_language_list = NULL;
- MMPLAYER_SUBTITLE_INFO_UNLOCK(player);
+ MMPLAYER_RETURN_VAL_IF_FAIL(player, MM_ERROR_PLAYER_NOT_INITIALIZED);
- __mmplayer_reset_gapless_state(player);
+ /* get profile attribute */
+ attrs = MMPLAYER_GET_ATTRS(player);
+ if (!attrs) {
+ LOGE("cannot get content attribute\n");
+ goto INIT_ERROR;
+ }
- if (player->streamer) {
- __mm_player_streaming_deinitialize(player->streamer);
- __mm_player_streaming_destroy(player->streamer);
- player->streamer = NULL;
+ /* create pipeline handles */
+ if (player->pipeline) {
+ LOGW("pipeline should be released before create new one\n");
+ goto INIT_ERROR;
}
- /* cleanup unlinked mime type */
- MMPLAYER_FREEIF(player->unlinked_audio_mime);
- MMPLAYER_FREEIF(player->unlinked_video_mime);
- MMPLAYER_FREEIF(player->unlinked_demuxer_mime);
+ player->video360_metadata.is_spherical = -1;
+ player->is_openal_plugin_used = FALSE;
- /* cleanup running stuffs */
- __mmplayer_cancel_eos_timer(player);
+ player->pipeline = (MMPlayerGstPipelineInfo*) g_malloc0(sizeof(MMPlayerGstPipelineInfo));
+ if (player->pipeline == NULL)
+ goto INIT_ERROR;
- /* cleanup gst stuffs */
- if (player->pipeline) {
- MMPlayerGstElement* mainbin = player->pipeline->mainbin;
- GstTagList* tag_list = player->pipeline->tag_list;
+ memset(player->pipeline, 0, sizeof(MMPlayerGstPipelineInfo)); /* g_malloc0 did this job already */
- /* first we need to disconnect all signal hander */
- __mmplayer_release_signal_connection(player, MM_PLAYER_SIGNAL_TYPE_ALL);
+ /* create mainbin */
+ mainbin = (MMPlayerGstElement*) g_malloc0(sizeof(MMPlayerGstElement) * MMPLAYER_M_NUM);
+ if (mainbin == NULL)
+ goto INIT_ERROR;
- if (mainbin) {
- MMPlayerGstElement* audiobin = player->pipeline->audiobin;
- MMPlayerGstElement* videobin = player->pipeline->videobin;
- MMPlayerGstElement* textbin = player->pipeline->textbin;
- GstBus *bus = gst_pipeline_get_bus(GST_PIPELINE(mainbin[MMPLAYER_M_PIPE].gst));
- gst_bus_set_sync_handler(bus, NULL, NULL, NULL);
- gst_object_unref(bus);
+ memset(mainbin, 0, sizeof(MMPlayerGstElement) * MMPLAYER_M_NUM); /* g_malloc0 did this job already */
- timeout = MMPLAYER_STATE_CHANGE_TIMEOUT(player);
- ret = __mmplayer_gst_set_state(player, mainbin[MMPLAYER_M_PIPE].gst, GST_STATE_NULL, FALSE, timeout);
- if (ret != MM_ERROR_NONE) {
- LOGE("fail to change state to NULL\n");
- return MM_ERROR_PLAYER_INTERNAL;
- }
+ /* create pipeline */
+ mainbin[MMPLAYER_M_PIPE].id = MMPLAYER_M_PIPE;
+ mainbin[MMPLAYER_M_PIPE].gst = gst_pipeline_new("player");
+ if (!mainbin[MMPLAYER_M_PIPE].gst) {
+ LOGE("failed to create pipeline\n");
+ goto INIT_ERROR;
+ }
+ player->demux_pad_index = 0;
+ player->subtitle_language_list = NULL;
- LOGW("succeeded in changing state to NULL\n");
+ player->is_subtitle_force_drop = FALSE;
+ player->last_multiwin_status = FALSE;
- gst_object_unref(GST_OBJECT(mainbin[MMPLAYER_M_PIPE].gst));
+ _mmplayer_track_initialize(player);
+ __mmplayer_initialize_storage_info(player, MMPLAYER_PATH_MAX);
- /* free fakesink */
- if (mainbin[MMPLAYER_M_SRC_FAKESINK].gst)
- gst_object_unref(GST_OBJECT(mainbin[MMPLAYER_M_SRC_FAKESINK].gst));
+ /* create source element */
+ switch (player->profile.uri_type) {
+ /* rtsp streamming */
+ case MM_PLAYER_URI_TYPE_URL_RTSP:
+ {
+ gchar *user_agent;
- /* free avsysaudiosink
- avsysaudiosink should be unref when destory pipeline just after start play with BT.
- Because audiosink is created but never added to bin, and therefore it will not be unref when pipeline is destroyed.
- */
- MMPLAYER_FREEIF(audiobin);
- MMPLAYER_FREEIF(videobin);
- MMPLAYER_FREEIF(textbin);
- MMPLAYER_FREEIF(mainbin);
- }
+ element = gst_element_factory_make("rtspsrc", "rtsp source");
- if (tag_list)
- gst_tag_list_free(tag_list);
+ if (!element) {
+ LOGE("failed to create streaming source element\n");
+ break;
+ }
- MMPLAYER_FREEIF(player->pipeline);
- }
- MMPLAYER_FREEIF(player->album_art);
+ /* make it zero */
+ user_agent = NULL;
- if (player->v_stream_caps) {
- gst_caps_unref(player->v_stream_caps);
- player->v_stream_caps = NULL;
- }
- if (player->a_stream_caps) {
- gst_caps_unref(player->a_stream_caps);
- player->a_stream_caps = NULL;
- }
+ /* get attribute */
+ mm_attrs_get_string_by_name(attrs, "streaming_user_agent", &user_agent);
- if (player->s_stream_caps) {
- gst_caps_unref(player->s_stream_caps);
- player->s_stream_caps = NULL;
- }
- _mmplayer_track_destroy(player);
+ SECURE_LOGD("user_agent : %s\n", user_agent);
- if (player->sink_elements)
- g_list_free(player->sink_elements);
- player->sink_elements = NULL;
+ /* setting property to streaming source */
+ g_object_set(G_OBJECT(element), "location", player->profile.uri, NULL);
+ if (user_agent)
+ g_object_set(G_OBJECT(element), "user-agent", user_agent, NULL);
- if (player->bufmgr) {
- tbm_bufmgr_deinit(player->bufmgr);
- player->bufmgr = NULL;
- }
+ MMPLAYER_SIGNAL_CONNECT(player, G_OBJECT(element), MM_PLAYER_SIGNAL_TYPE_AUTOPLUG, "pad-added",
+ G_CALLBACK(__mmplayer_gst_rtp_dynamic_pad), player);
+ MMPLAYER_SIGNAL_CONNECT(player, G_OBJECT(element), MM_PLAYER_SIGNAL_TYPE_AUTOPLUG, "no-more-pads",
+ G_CALLBACK(__mmplayer_gst_rtp_no_more_pads), player);
+ }
+ break;
- LOGW("finished destroy pipeline\n");
+ /* http streaming*/
+ case MM_PLAYER_URI_TYPE_URL_HTTP:
+ {
+ gchar *user_agent, *cookies, **cookie_list;
+ gint http_timeout = DEFAULT_HTTP_TIMEOUT;
+ user_agent = cookies = NULL;
+ cookie_list = NULL;
+ gint mode = MM_PLAYER_PD_MODE_NONE;
- MMPLAYER_FLEAVE();
+ mm_attrs_get_int_by_name(attrs, "pd_mode", &mode);
- return ret;
-}
+ player->pd_mode = mode;
-static void __mmplayer_gst_handle_async(mm_player_t* player, gboolean async, enum MMPlayerSinkType type)
-{
- MMPlayerGstElement *videobin = NULL, *audiobin = NULL, *textbin = NULL;
+ LOGD("http playback, PD mode : %d\n", player->pd_mode);
+
+ if (!MMPLAYER_IS_HTTP_PD(player)) {
+ element = gst_element_factory_make(player->ini.httpsrc_element, "http_streaming_source");
+ if (!element) {
+ LOGE("failed to create http streaming source element[%s].\n", player->ini.httpsrc_element);
+ break;
+ }
+ LOGD("using http streamming source [%s].\n", player->ini.httpsrc_element);
- MMPLAYER_RETURN_IF_FAIL(player && player->pipeline);
+ /* get attribute */
+ mm_attrs_get_string_by_name(attrs, "streaming_cookie", &cookies);
+ mm_attrs_get_string_by_name(attrs, "streaming_user_agent", &user_agent);
- audiobin = player->pipeline->audiobin; /* can be null */
- videobin = player->pipeline->videobin; /* can be null */
- textbin = player->pipeline->textbin; /* can be null */
+ if (player->ini.http_timeout != DEFAULT_HTTP_TIMEOUT) {
+ LOGD("get timeout from ini\n");
+ http_timeout = player->ini.http_timeout;
+ }
- LOGD("Async will be set to %d about 0x%X type sink", async, type);
+ /* get attribute */
+ SECURE_LOGD("location : %s\n", player->profile.uri);
+ SECURE_LOGD("cookies : %s\n", cookies);
+ SECURE_LOGD("user_agent : %s\n", user_agent);
+ LOGD("timeout : %d\n", http_timeout);
- if ((type & MMPLAYER_AUDIO_SINK) && audiobin && audiobin[MMPLAYER_A_SINK].gst)
- g_object_set(audiobin[MMPLAYER_A_SINK].gst, "async", async, NULL);
+ /* setting property to streaming source */
+ g_object_set(G_OBJECT(element), "location", player->profile.uri, NULL);
+ g_object_set(G_OBJECT(element), "timeout", http_timeout, NULL);
+ g_object_set(G_OBJECT(element), "blocksize", (unsigned long)(64*1024), NULL);
- if ((type & MMPLAYER_VIDEO_SINK) && videobin && videobin[MMPLAYER_V_SINK].gst)
- g_object_set(videobin[MMPLAYER_V_SINK].gst, "async", async, NULL);
+ /* parsing cookies */
+ if ((cookie_list = util_get_cookie_list((const char*)cookies))) {
+ g_object_set(G_OBJECT(element), "cookies", cookie_list, NULL);
+ g_strfreev(cookie_list);
+ }
+ if (user_agent)
+ g_object_set(G_OBJECT(element), "user-agent", user_agent, NULL);
- if ((type & MMPLAYER_TEXT_SINK) && textbin && textbin[MMPLAYER_T_FAKE_SINK].gst)
- g_object_set(textbin[MMPLAYER_T_FAKE_SINK].gst, "async", async, NULL);
+ if (MMPLAYER_URL_HAS_DASH_SUFFIX(player))
+ LOGW("it's dash. and it's still experimental feature.");
+ } else {
+ // progressive download
+ gchar* location = NULL;
- return;
-}
+ if (player->pd_mode == MM_PLAYER_PD_MODE_URI) {
+ gchar *path = NULL;
-static int __gst_realize(mm_player_t* player)
-{
- gint timeout = 0;
- int ret = MM_ERROR_NONE;
+ mm_attrs_get_string_by_name(attrs, "pd_location", &path);
- MMPLAYER_FENTER();
+ MMPLAYER_FREEIF(player->pd_file_save_path);
- MMPLAYER_RETURN_VAL_IF_FAIL(player, MM_ERROR_PLAYER_NOT_INITIALIZED);
+ LOGD("PD Location : %s\n", path);
- MMPLAYER_PENDING_STATE(player) = MM_PLAYER_STATE_READY;
+ if (path) {
+ if (!util_get_storage_info(path, &player->storage_info[MMPLAYER_PATH_VOD])) {
+ LOGE("failed to get storage info");
+ break;
+ }
+ player->pd_file_save_path = g_strdup(path);
+ } else {
+ LOGE("can't find pd location so, it should be set \n");
+ break;
+ }
+ }
- ret = __mmplayer_gst_create_pipeline(player);
- if (ret) {
- LOGE("failed to create pipeline\n");
- return ret;
- }
+ element = gst_element_factory_make("pdpushsrc", "PD pushsrc");
+ if (!element) {
+ LOGE("failed to create PD push source element[%s].\n", "pdpushsrc");
+ break;
+ }
- /* set pipeline state to READY */
- /* NOTE : state change to READY must be performed sync. */
- timeout = MMPLAYER_STATE_CHANGE_TIMEOUT(player);
- ret = __mmplayer_gst_set_state(player,
- player->pipeline->mainbin[MMPLAYER_M_PIPE].gst, GST_STATE_READY, FALSE, timeout);
+ if (player->pd_mode == MM_PLAYER_PD_MODE_URI)
+ g_object_set(G_OBJECT(element), "location", player->pd_file_save_path, NULL);
+ else
+ g_object_set(G_OBJECT(element), "location", player->profile.uri, NULL);
+ g_object_get(element, "location", &location, NULL);
+ LOGD("PD_LOCATION [%s].\n", location);
+ if (location)
+ g_free(location);
+ }
+ }
+ break;
- if (ret != MM_ERROR_NONE) {
- /* return error if failed to set state */
- LOGE("failed to set READY state");
- return ret;
- }
+ /* file source */
+ case MM_PLAYER_URI_TYPE_FILE:
+ {
+ LOGD("using filesrc for 'file://' handler.\n");
+ if (!util_get_storage_info(player->profile.uri, &player->storage_info[MMPLAYER_PATH_VOD])) {
+ LOGE("failed to get storage info");
+ break;
+ }
- MMPLAYER_SET_STATE(player, MM_PLAYER_STATE_READY);
+ element = gst_element_factory_make("filesrc", "source");
+ if (!element) {
+ LOGE("failed to create filesrc\n");
+ break;
+ }
- /* create dot before error-return. for debugging */
- MMPLAYER_GENERATE_DOT_IF_ENABLED(player, "pipeline-status-realize");
+ g_object_set(G_OBJECT(element), "location", (player->profile.uri)+7, NULL); /* uri+7 -> remove "file:// */
+ }
+ break;
- MMPLAYER_FLEAVE();
+ case MM_PLAYER_URI_TYPE_SS:
+ {
+ gint http_timeout = DEFAULT_HTTP_TIMEOUT;
+ element = gst_element_factory_make("souphttpsrc", "http streaming source");
+ if (!element) {
+ LOGE("failed to create http streaming source element[%s]", player->ini.httpsrc_element);
+ break;
+ }
- return ret;
-}
+ if (player->ini.http_timeout != DEFAULT_HTTP_TIMEOUT) {
+ LOGD("get timeout from ini\n");
+ http_timeout = player->ini.http_timeout;
+ }
-static int __gst_unrealize(mm_player_t* player)
-{
- int ret = MM_ERROR_NONE;
+ /* setting property to streaming source */
+ g_object_set(G_OBJECT(element), "location", player->profile.uri, NULL);
+ g_object_set(G_OBJECT(element), "timeout", http_timeout, NULL);
+ }
+ break;
+ case MM_PLAYER_URI_TYPE_MS_BUFF:
+ {
+ LOGD("MS buff src is selected\n");
- MMPLAYER_FENTER();
+ if (player->v_stream_caps) {
+ element = gst_element_factory_make("appsrc", "video_appsrc");
+ if (!element) {
+ LOGF("failed to create video app source element[appsrc].\n");
+ break;
+ }
- MMPLAYER_RETURN_VAL_IF_FAIL(player, MM_ERROR_PLAYER_NOT_INITIALIZED);
+ if (player->a_stream_caps) {
+ elem_src_audio = gst_element_factory_make("appsrc", "audio_appsrc");
+ if (!elem_src_audio) {
+ LOGF("failed to create audio app source element[appsrc].\n");
+ break;
+ }
+ }
+ } else if (player->a_stream_caps) {
+ /* no video, only audio pipeline*/
+ element = gst_element_factory_make("appsrc", "audio_appsrc");
+ if (!element) {
+ LOGF("failed to create audio app source element[appsrc].\n");
+ break;
+ }
+ }
- MMPLAYER_PENDING_STATE(player) = MM_PLAYER_STATE_NULL;
- MMPLAYER_PRINT_STATE(player);
+ if (player->s_stream_caps) {
+ elem_src_subtitle = gst_element_factory_make("appsrc", "subtitle_appsrc");
+ if (!elem_src_subtitle) {
+ LOGF("failed to create subtitle app source element[appsrc].\n");
+ break;
+ }
+ }
- /* release miscellaneous information */
- __mmplayer_release_misc(player);
+ LOGD("setting app sources properties.\n");
+ LOGD("location : %s\n", player->profile.uri);
- /* destroy pipeline */
- ret = __mmplayer_gst_destroy_pipeline(player);
- if (ret != MM_ERROR_NONE) {
- LOGE("failed to destory pipeline\n");
- return ret;
- }
+ if (player->v_stream_caps && element) {
+ g_object_set(G_OBJECT(element), "format", GST_FORMAT_TIME,
+ "blocksize", (guint)1048576, /* size of many video frames are larger than default blocksize as 4096 */
+ "caps", player->v_stream_caps, NULL);
- /* release miscellaneous information.
- these info needs to be released after pipeline is destroyed. */
- __mmplayer_release_misc_post(player);
+ if (player->media_stream_buffer_max_size[MM_PLAYER_STREAM_TYPE_VIDEO] > 0)
+ g_object_set(G_OBJECT(element), "max-bytes", player->media_stream_buffer_max_size[MM_PLAYER_STREAM_TYPE_VIDEO], NULL);
+ if (player->media_stream_buffer_min_percent[MM_PLAYER_STREAM_TYPE_VIDEO] > 0)
+ g_object_set(G_OBJECT(element), "min-percent", player->media_stream_buffer_min_percent[MM_PLAYER_STREAM_TYPE_VIDEO], NULL);
- MMPLAYER_SET_STATE(player, MM_PLAYER_STATE_NULL);
+ /*Fix Seek External Demuxer: set audio and video appsrc as seekable */
+ gst_app_src_set_stream_type((GstAppSrc*)G_OBJECT(element), GST_APP_STREAM_TYPE_SEEKABLE);
+ MMPLAYER_SIGNAL_CONNECT(player, element, MM_PLAYER_SIGNAL_TYPE_OTHERS, "seek-data",
+ G_CALLBACK(__gst_seek_video_data), player);
- MMPLAYER_FLEAVE();
+ if (player->a_stream_caps && elem_src_audio) {
+ g_object_set(G_OBJECT(elem_src_audio), "format", GST_FORMAT_TIME,
+ "caps", player->a_stream_caps, NULL);
- return ret;
-}
+ if (player->media_stream_buffer_max_size[MM_PLAYER_STREAM_TYPE_AUDIO] > 0)
+ g_object_set(G_OBJECT(elem_src_audio), "max-bytes", player->media_stream_buffer_max_size[MM_PLAYER_STREAM_TYPE_AUDIO], NULL);
+ if (player->media_stream_buffer_min_percent[MM_PLAYER_STREAM_TYPE_AUDIO] > 0)
+ g_object_set(G_OBJECT(elem_src_audio), "min-percent", player->media_stream_buffer_min_percent[MM_PLAYER_STREAM_TYPE_AUDIO], NULL);
-static int __gst_pending_seek(mm_player_t* player)
-{
- MMPlayerStateType current_state = MM_PLAYER_STATE_NONE;
- int ret = MM_ERROR_NONE;
+ /*Fix Seek External Demuxer: set audio and video appsrc as seekable */
+ gst_app_src_set_stream_type((GstAppSrc*)G_OBJECT(elem_src_audio), GST_APP_STREAM_TYPE_SEEKABLE);
+ MMPLAYER_SIGNAL_CONNECT(player, elem_src_audio, MM_PLAYER_SIGNAL_TYPE_OTHERS, "seek-data",
+ G_CALLBACK(__gst_seek_audio_data), player);
+ }
+ } else if (player->a_stream_caps && element) {
+ g_object_set(G_OBJECT(element), "format", GST_FORMAT_TIME,
+ "caps", player->a_stream_caps, NULL);
- MMPLAYER_FENTER();
+ if (player->media_stream_buffer_max_size[MM_PLAYER_STREAM_TYPE_AUDIO] > 0)
+ g_object_set(G_OBJECT(element), "max-bytes", player->media_stream_buffer_max_size[MM_PLAYER_STREAM_TYPE_AUDIO], NULL);
+ if (player->media_stream_buffer_min_percent[MM_PLAYER_STREAM_TYPE_AUDIO] > 0)
+ g_object_set(G_OBJECT(element), "min-percent", player->media_stream_buffer_min_percent[MM_PLAYER_STREAM_TYPE_AUDIO], NULL);
- MMPLAYER_RETURN_VAL_IF_FAIL(player && player->pipeline, MM_ERROR_PLAYER_NOT_INITIALIZED);
+ /*Fix Seek External Demuxer: set audio and video appsrc as seekable */
+ gst_app_src_set_stream_type((GstAppSrc*)G_OBJECT(element), GST_APP_STREAM_TYPE_SEEKABLE);
+ MMPLAYER_SIGNAL_CONNECT(player, element, MM_PLAYER_SIGNAL_TYPE_OTHERS, "seek-data",
+ G_CALLBACK(__gst_seek_audio_data), player);
+ }
- if (!player->pending_seek.is_pending) {
- LOGD("pending seek is not reserved. nothing to do.\n");
- return ret;
- }
+ if (player->s_stream_caps && elem_src_subtitle) {
+ g_object_set(G_OBJECT(elem_src_subtitle), "format", GST_FORMAT_TIME,
+ "caps", player->s_stream_caps, NULL);
- /* check player state if player could pending seek or not. */
- current_state = MMPLAYER_CURRENT_STATE(player);
+ if (player->media_stream_buffer_max_size[MM_PLAYER_STREAM_TYPE_TEXT] > 0)
+ g_object_set(G_OBJECT(elem_src_subtitle), "max-bytes", player->media_stream_buffer_max_size[MM_PLAYER_STREAM_TYPE_TEXT], NULL);
+ if (player->media_stream_buffer_min_percent[MM_PLAYER_STREAM_TYPE_TEXT] > 0)
+ g_object_set(G_OBJECT(elem_src_subtitle), "min-percent", player->media_stream_buffer_min_percent[MM_PLAYER_STREAM_TYPE_TEXT], NULL);
- if (current_state != MM_PLAYER_STATE_PAUSED && current_state != MM_PLAYER_STATE_PLAYING) {
- LOGW("try to pending seek in %s state, try next time. \n",
- MMPLAYER_STATE_GET_NAME(current_state));
- return ret;
- }
+ gst_app_src_set_stream_type((GstAppSrc*)G_OBJECT(elem_src_subtitle), GST_APP_STREAM_TYPE_SEEKABLE);
- LOGD("trying to play from(%"G_GINT64_FORMAT") pending position\n", player->pending_seek.pos);
+ MMPLAYER_SIGNAL_CONNECT(player, elem_src_subtitle, MM_PLAYER_SIGNAL_TYPE_OTHERS, "seek-data",
+ G_CALLBACK(__gst_seek_subtitle_data), player);
+ }
- ret = __gst_set_position(player, player->pending_seek.format, player->pending_seek.pos, FALSE);
+ if (player->v_stream_caps && element) {
+ MMPLAYER_SIGNAL_CONNECT(player, element, MM_PLAYER_SIGNAL_TYPE_OTHERS, "need-data",
+ G_CALLBACK(__gst_appsrc_feed_video_data), player);
+ MMPLAYER_SIGNAL_CONNECT(player, element, MM_PLAYER_SIGNAL_TYPE_OTHERS, "enough-data",
+ G_CALLBACK(__gst_appsrc_enough_video_data), player);
- if (MM_ERROR_NONE != ret)
- LOGE("failed to seek pending postion. just keep staying current position.\n");
+ if (player->a_stream_caps && elem_src_audio) {
+ MMPLAYER_SIGNAL_CONNECT(player, elem_src_audio, MM_PLAYER_SIGNAL_TYPE_OTHERS, "need-data",
+ G_CALLBACK(__gst_appsrc_feed_audio_data), player);
+ MMPLAYER_SIGNAL_CONNECT(player, elem_src_audio, MM_PLAYER_SIGNAL_TYPE_OTHERS, "enough-data",
+ G_CALLBACK(__gst_appsrc_enough_audio_data), player);
+ }
+ } else if (player->a_stream_caps && element) {
+ MMPLAYER_SIGNAL_CONNECT(player, element, MM_PLAYER_SIGNAL_TYPE_OTHERS, "need-data",
+ G_CALLBACK(__gst_appsrc_feed_audio_data), player);
+ MMPLAYER_SIGNAL_CONNECT(player, element, MM_PLAYER_SIGNAL_TYPE_OTHERS, "enough-data",
+ G_CALLBACK(__gst_appsrc_enough_audio_data), player);
+ }
- player->pending_seek.is_pending = FALSE;
+ if (player->s_stream_caps && elem_src_subtitle)
+ MMPLAYER_SIGNAL_CONNECT(player, elem_src_subtitle, MM_PLAYER_SIGNAL_TYPE_OTHERS, "need-data",
+ G_CALLBACK(__gst_appsrc_feed_subtitle_data), player);
- MMPLAYER_FLEAVE();
+ need_state_holder = FALSE;
- return ret;
-}
+ mm_attrs_set_int_by_name(attrs, "profile_prepare_async", TRUE);
+ if (mmf_attrs_commit(attrs)) /* return -1 if error */
+ LOGE("failed to commit\n");
+ }
+ break;
+ /* appsrc */
+ case MM_PLAYER_URI_TYPE_MEM:
+ {
+ guint64 stream_type = GST_APP_STREAM_TYPE_RANDOM_ACCESS;
-static int __gst_start(mm_player_t* player)
-{
- int ret = MM_ERROR_NONE;
- gboolean async = FALSE;
+ LOGD("mem src is selected\n");
- MMPLAYER_FENTER();
+ element = gst_element_factory_make("appsrc", "mem-source");
+ if (!element) {
+ LOGE("failed to create appsrc element\n");
+ break;
+ }
- MMPLAYER_RETURN_VAL_IF_FAIL(player && player->pipeline, MM_ERROR_PLAYER_NOT_INITIALIZED);
+ g_object_set(element, "stream-type", stream_type, NULL);
+ g_object_set(element, "size", player->profile.input_mem.len, NULL);
+ g_object_set(element, "blocksize", (guint64)20480, NULL);
- /* NOTE : if SetPosition was called before Start. do it now */
- /* streaming doesn't support it. so it should be always sync */
- /* !!create one more api to check if there is pending seek rather than checking variables */
- if (player->pending_seek.is_pending && !MMPLAYER_IS_STREAMING(player)) {
- MMPLAYER_TARGET_STATE(player) = MM_PLAYER_STATE_PAUSED;
- ret = __gst_pause(player, FALSE);
- if (ret != MM_ERROR_NONE) {
- LOGE("failed to set state to PAUSED for pending seek");
- return ret;
+ MMPLAYER_SIGNAL_CONNECT(player, element, MM_PLAYER_SIGNAL_TYPE_OTHERS, "seek-data",
+ G_CALLBACK(__gst_appsrc_seek_data_mem), &player->profile.input_mem);
+ MMPLAYER_SIGNAL_CONNECT(player, element, MM_PLAYER_SIGNAL_TYPE_OTHERS, "need-data",
+ G_CALLBACK(__gst_appsrc_feed_data_mem), &player->profile.input_mem);
}
+ break;
+ case MM_PLAYER_URI_TYPE_URL:
+ break;
- MMPLAYER_TARGET_STATE(player) = MM_PLAYER_STATE_PLAYING;
- if (__gst_pending_seek(player) != MM_ERROR_NONE)
- LOGW("failed to seek pending postion. starting from the begin of content");
- }
-
- LOGD("current state before doing transition");
- MMPLAYER_PENDING_STATE(player) = MM_PLAYER_STATE_PLAYING;
- MMPLAYER_PRINT_STATE(player);
+ case MM_PLAYER_URI_TYPE_TEMP:
+ break;
- /* set pipeline state to PLAYING */
- ret = __mmplayer_gst_set_state(player,
- player->pipeline->mainbin[MMPLAYER_M_PIPE].gst, GST_STATE_PLAYING, async, MMPLAYER_STATE_CHANGE_TIMEOUT(player));
+ case MM_PLAYER_URI_TYPE_NONE:
+ default:
+ break;
+ }
- if (ret == MM_ERROR_NONE) {
- MMPLAYER_SET_STATE(player, MM_PLAYER_STATE_PLAYING);
- } else {
- LOGE("failed to set state to PLAYING");
- return ret;
+ /* check source element is OK */
+ if (!element) {
+ LOGE("no source element was created.\n");
+ goto INIT_ERROR;
}
- /* generating debug info before returning error */
- MMPLAYER_GENERATE_DOT_IF_ENABLED(player, "pipeline-status-start");
+ /* take source element */
+ mainbin[MMPLAYER_M_SRC].id = MMPLAYER_M_SRC;
+ mainbin[MMPLAYER_M_SRC].gst = element;
+ element_bucket = g_list_append(element_bucket, &mainbin[MMPLAYER_M_SRC]);
- MMPLAYER_FLEAVE();
+ if ((MMPLAYER_IS_STREAMING(player)) && (player->streamer == NULL)) {
+ player->streamer = __mm_player_streaming_create();
+ __mm_player_streaming_initialize(player->streamer);
+ }
- return ret;
-}
+ if (MMPLAYER_IS_HTTP_PD(player)) {
+ gint pre_buffering_time = player->streamer->buffering_req.prebuffer_time;
-static int __gst_stop(mm_player_t* player)
-{
- GstStateChangeReturn change_ret = GST_STATE_CHANGE_SUCCESS;
- MMHandleType attrs = 0;
- gboolean rewind = FALSE;
- gint timeout = 0;
- int ret = MM_ERROR_NONE;
+ LOGD("Picked queue2 element(pre buffer : %d ms)....\n", pre_buffering_time);
+ element = gst_element_factory_make("queue2", "queue2");
+ if (!element) {
+ LOGE("failed to create http streaming buffer element\n");
+ goto INIT_ERROR;
+ }
- MMPLAYER_FENTER();
+ /* take it */
+ mainbin[MMPLAYER_M_MUXED_S_BUFFER].id = MMPLAYER_M_MUXED_S_BUFFER;
+ mainbin[MMPLAYER_M_MUXED_S_BUFFER].gst = element;
+ element_bucket = g_list_append(element_bucket, &mainbin[MMPLAYER_M_MUXED_S_BUFFER]);
- MMPLAYER_RETURN_VAL_IF_FAIL(player && player->pipeline, MM_ERROR_PLAYER_NOT_INITIALIZED);
- MMPLAYER_RETURN_VAL_IF_FAIL(player->pipeline->mainbin, MM_ERROR_PLAYER_NOT_INITIALIZED);
+ pre_buffering_time = (pre_buffering_time > 0) ? (pre_buffering_time) : (player->ini.http_buffering_time);
- LOGD("current state before doing transition");
- MMPLAYER_PENDING_STATE(player) = MM_PLAYER_STATE_READY;
- MMPLAYER_PRINT_STATE(player);
+ player->streamer->is_pd_mode = TRUE;
- attrs = MMPLAYER_GET_ATTRS(player);
- if (!attrs) {
- LOGE("cannot get content attribute\n");
- return MM_ERROR_PLAYER_INTERNAL;
+ __mm_player_streaming_set_queue2(player->streamer,
+ element,
+ TRUE,
+ player->ini.http_max_size_bytes, // + PLAYER_PD_EXT_MAX_SIZE_BYTE,
+ pre_buffering_time,
+ 1.0,
+ player->ini.http_buffering_limit,
+ MUXED_BUFFER_TYPE_MEM_QUEUE,
+ NULL,
+ 0);
}
+ if (MMPLAYER_IS_MS_BUFF_SRC(player)) {
+ if (player->v_stream_caps) {
+ es_video_queue = gst_element_factory_make("queue2", "video_queue");
+ if (!es_video_queue) {
+ LOGE("create es_video_queue for es player failed\n");
+ goto INIT_ERROR;
+ }
+ g_object_set(G_OBJECT(es_video_queue), "max-size-buffers", 2, NULL);
+ mainbin[MMPLAYER_M_V_BUFFER].id = MMPLAYER_M_V_BUFFER;
+ mainbin[MMPLAYER_M_V_BUFFER].gst = es_video_queue;
+ element_bucket = g_list_append(element_bucket, &mainbin[MMPLAYER_M_V_BUFFER]);
- /* Just set state to PAUESED and the rewind. it's usual player behavior. */
- timeout = MMPLAYER_STATE_CHANGE_TIMEOUT(player);
+ /* Adding audio appsrc to bucket */
+ if (player->a_stream_caps && elem_src_audio) {
+ mainbin[MMPLAYER_M_2ND_SRC].id = MMPLAYER_M_2ND_SRC;
+ mainbin[MMPLAYER_M_2ND_SRC].gst = elem_src_audio;
+ element_bucket = g_list_append(element_bucket, &mainbin[MMPLAYER_M_2ND_SRC]);
- if ((!MMPLAYER_IS_STREAMING(player) && !MMPLAYER_IS_MS_BUFF_SRC(player)) ||
- (player->streaming_type == STREAMING_SERVICE_VOD && player->videodec_linked))
- rewind = TRUE;
+ es_audio_queue = gst_element_factory_make("queue2", "audio_queue");
+ if (!es_audio_queue) {
+ LOGE("create es_audio_queue for es player failed\n");
+ goto INIT_ERROR;
+ }
+ g_object_set(G_OBJECT(es_audio_queue), "max-size-buffers", 2, NULL);
- if (player->es_player_push_mode || MMPLAYER_IS_HTTP_PD(player)) {
- /* disable the async state transition because there could be no data in the pipeline */
- __mmplayer_gst_handle_async(player, FALSE, MMPLAYER_SINK_ALL);
- }
+ mainbin[MMPLAYER_M_A_BUFFER].id = MMPLAYER_M_A_BUFFER;
+ mainbin[MMPLAYER_M_A_BUFFER].gst = es_audio_queue;
+ element_bucket = g_list_append(element_bucket, &mainbin[MMPLAYER_M_A_BUFFER]);
+ }
+ } else if (player->a_stream_caps) {
+ /* Only audio stream, no video */
+ es_audio_queue = gst_element_factory_make("queue2", "audio_queue");
+ if (!es_audio_queue) {
+ LOGE("create es_audio_queue for es player failed\n");
+ goto INIT_ERROR;
+ }
+ mainbin[MMPLAYER_M_A_BUFFER].id = MMPLAYER_M_A_BUFFER;
+ mainbin[MMPLAYER_M_A_BUFFER].gst = es_audio_queue;
+ element_bucket = g_list_append(element_bucket, &mainbin[MMPLAYER_M_A_BUFFER]);
+ }
- /* set gst state */
- ret = __mmplayer_gst_set_state(player, player->pipeline->mainbin[MMPLAYER_M_PIPE].gst, GST_STATE_PAUSED, FALSE, timeout);
+ if (player->s_stream_caps && elem_src_subtitle) {
+ mainbin[MMPLAYER_M_SUBSRC].id = MMPLAYER_M_SUBSRC;
+ mainbin[MMPLAYER_M_SUBSRC].gst = elem_src_subtitle;
+ element_bucket = g_list_append(element_bucket, &mainbin[MMPLAYER_M_SUBSRC]);
- if (player->es_player_push_mode || MMPLAYER_IS_HTTP_PD(player)) {
- /* enable the async state transition as default operation */
- __mmplayer_gst_handle_async(player, TRUE, MMPLAYER_SINK_ALL);
+ es_subtitle_queue = gst_element_factory_make("queue2", "subtitle_queue");
+ if (!es_subtitle_queue) {
+ LOGE("create es_subtitle_queue for es player failed\n");
+ goto INIT_ERROR;
+ }
+ mainbin[MMPLAYER_M_S_BUFFER].id = MMPLAYER_M_V_BUFFER;
+ mainbin[MMPLAYER_M_S_BUFFER].gst = es_subtitle_queue;
+ element_bucket = g_list_append(element_bucket, &mainbin[MMPLAYER_M_S_BUFFER]);
+ }
}
- /* return if set_state has failed */
- if (ret != MM_ERROR_NONE) {
- LOGE("failed to set state.\n");
- return ret;
- }
+ /* create autoplugging element if src element is not a rtsp src */
+ if ((player->profile.uri_type != MM_PLAYER_URI_TYPE_URL_RTSP) &&
+ (player->profile.uri_type != MM_PLAYER_URI_TYPE_MS_BUFF)) {
+ element = NULL;
+ enum MainElementID elemId = MMPLAYER_M_NUM;
- /* rewind */
- if (rewind) {
- if (!__gst_seek(player, player->pipeline->mainbin[MMPLAYER_M_PIPE].gst, player->playback_rate,
- GST_FORMAT_TIME, GST_SEEK_FLAG_FLUSH, GST_SEEK_TYPE_SET, 0,
- GST_SEEK_TYPE_NONE, GST_CLOCK_TIME_NONE)) {
- LOGW("failed to rewind\n");
- ret = MM_ERROR_PLAYER_SEEK;
+ if (((MMPLAYER_IS_HTTP_PD(player)) ||
+ (!MMPLAYER_IS_HTTP_STREAMING(player)))) {
+ elemId = MMPLAYER_M_AUTOPLUG;
+ element = __mmplayer_create_decodebin(player);
+ if (element) {
+ /* default size of mq in decodebin is 2M
+ * but it can cause blocking issue during seeking depends on content. */
+ g_object_set(G_OBJECT(element), "max-size-bytes", (5*1024*1024), NULL);
+ }
+ need_state_holder = FALSE;
+ } else {
+ elemId = MMPLAYER_M_TYPEFIND;
+ element = gst_element_factory_make("typefind", "typefinder");
+ MMPLAYER_SIGNAL_CONNECT(player, element, MM_PLAYER_SIGNAL_TYPE_AUTOPLUG, "have-type",
+ G_CALLBACK(__mmplayer_typefind_have_type), (gpointer)player);
}
- }
- /* initialize */
- player->sent_bos = FALSE;
+ /* check autoplug element is OK */
+ if (!element) {
+ LOGE("can not create element(%d)\n", elemId);
+ goto INIT_ERROR;
+ }
- if (player->es_player_push_mode) //for cloudgame
- timeout = 0;
+ mainbin[elemId].id = elemId;
+ mainbin[elemId].gst = element;
- /* wait for seek to complete */
- change_ret = gst_element_get_state(player->pipeline->mainbin[MMPLAYER_M_PIPE].gst, NULL, NULL, timeout * GST_SECOND);
- if (change_ret == GST_STATE_CHANGE_SUCCESS || change_ret == GST_STATE_CHANGE_NO_PREROLL) {
- MMPLAYER_SET_STATE(player, MM_PLAYER_STATE_READY);
- } else {
- LOGE("fail to stop player.\n");
- ret = MM_ERROR_PLAYER_INTERNAL;
- __mmplayer_dump_pipeline_state(player);
+ element_bucket = g_list_append(element_bucket, &mainbin[elemId]);
}
- /* generate dot file if enabled */
- MMPLAYER_GENERATE_DOT_IF_ENABLED(player, "pipeline-status-stop");
+ /* add elements to pipeline */
+ if (!__mmplayer_gst_element_add_bucket_to_bin(GST_BIN(mainbin[MMPLAYER_M_PIPE].gst), element_bucket)) {
+ LOGE("Failed to add elements to pipeline\n");
+ goto INIT_ERROR;
+ }
- MMPLAYER_FLEAVE();
- return ret;
-}
+ /* linking elements in the bucket by added order. */
+ if (__mmplayer_gst_element_link_bucket(element_bucket) == -1) {
+ LOGE("Failed to link some elements\n");
+ goto INIT_ERROR;
+ }
-int __gst_pause(mm_player_t* player, gboolean async)
-{
- int ret = MM_ERROR_NONE;
- MMPLAYER_FENTER();
+ /* create fakesink element for keeping the pipeline state PAUSED. if needed */
+ if (need_state_holder) {
+ /* create */
+ mainbin[MMPLAYER_M_SRC_FAKESINK].id = MMPLAYER_M_SRC_FAKESINK;
+ mainbin[MMPLAYER_M_SRC_FAKESINK].gst = gst_element_factory_make("fakesink", "state-holder");
- MMPLAYER_RETURN_VAL_IF_FAIL(player && player->pipeline, MM_ERROR_PLAYER_NOT_INITIALIZED);
- MMPLAYER_RETURN_VAL_IF_FAIL(player->pipeline->mainbin, MM_ERROR_PLAYER_NOT_INITIALIZED);
+ if (!mainbin[MMPLAYER_M_SRC_FAKESINK].gst) {
+ LOGE("fakesink element could not be created\n");
+ goto INIT_ERROR;
+ }
+ GST_OBJECT_FLAG_UNSET(mainbin[MMPLAYER_M_SRC_FAKESINK].gst, GST_ELEMENT_FLAG_SINK);
- LOGD("current state before doing transition");
- MMPLAYER_PENDING_STATE(player) = MM_PLAYER_STATE_PAUSED;
- MMPLAYER_PRINT_STATE(player);
+ /* take ownership of fakesink. we are reusing it */
+ gst_object_ref(mainbin[MMPLAYER_M_SRC_FAKESINK].gst);
- /* set pipeline status to PAUSED */
- ret = __mmplayer_gst_set_state(player,
- player->pipeline->mainbin[MMPLAYER_M_PIPE].gst, GST_STATE_PAUSED, async, MMPLAYER_STATE_CHANGE_TIMEOUT(player));
+ /* add */
+ if (FALSE == gst_bin_add(GST_BIN(mainbin[MMPLAYER_M_PIPE].gst),
+ mainbin[MMPLAYER_M_SRC_FAKESINK].gst)) {
+ LOGE("failed to add fakesink to bin\n");
+ goto INIT_ERROR;
+ }
+ }
- if (FALSE == async) {
- if (ret != MM_ERROR_NONE) {
- GstMessage *msg = NULL;
- GTimer *timer = NULL;
- gdouble MAX_TIMEOUT_SEC = 3;
+ /* now we have completed mainbin. take it */
+ player->pipeline->mainbin = mainbin;
- LOGE("failed to set state to PAUSED");
+ if (MMPLAYER_IS_MS_BUFF_SRC(player)) {
+ GstPad *srcpad = NULL;
- if (!player->bus_watcher) {
- LOGE("there is no bus msg thread. pipeline is shutting down.");
- return ret;
- }
+ if (mainbin[MMPLAYER_M_V_BUFFER].gst) {
+ srcpad = gst_element_get_static_pad(mainbin[MMPLAYER_M_V_BUFFER].gst, "src");
+ if (srcpad) {
+ __mmplayer_gst_create_decoder(player,
+ MM_PLAYER_TRACK_TYPE_VIDEO,
+ srcpad,
+ MMPLAYER_M_AUTOPLUG_V_DEC,
+ "video_decodebin");
- if (player->msg_posted) {
- LOGE("error msg is already posted.");
- return ret;
+ gst_object_unref(GST_OBJECT(srcpad));
+ srcpad = NULL;
}
+ }
- timer = g_timer_new();
- g_timer_start(timer);
-
- GstBus *bus = gst_pipeline_get_bus(GST_PIPELINE(player->pipeline->mainbin[MMPLAYER_M_PIPE].gst));
+ if ((player->a_stream_caps) && (mainbin[MMPLAYER_M_A_BUFFER].gst)) {
+ srcpad = gst_element_get_static_pad(mainbin[MMPLAYER_M_A_BUFFER].gst, "src");
+ if (srcpad) {
+ __mmplayer_gst_create_decoder(player,
+ MM_PLAYER_TRACK_TYPE_AUDIO,
+ srcpad,
+ MMPLAYER_M_AUTOPLUG_A_DEC,
+ "audio_decodebin");
- do {
- msg = gst_bus_timed_pop(bus, 100 * GST_MSECOND);
- if (msg) {
- if (GST_MESSAGE_TYPE(msg) == GST_MESSAGE_ERROR) {
- GError *error = NULL;
+ gst_object_unref(GST_OBJECT(srcpad));
+ srcpad = NULL;
+ } // else error
+ } // else error
- /* parse error code */
- gst_message_parse_error(msg, &error, NULL);
+ if (mainbin[MMPLAYER_M_S_BUFFER].gst)
+ __mmplayer_try_to_plug_decodebin(player, gst_element_get_static_pad(mainbin[MMPLAYER_M_S_BUFFER].gst, "src"), player->s_stream_caps);
+ }
- if (gst_structure_has_name(gst_message_get_structure(msg), "streaming_error")) {
- /* Note : the streaming error from the streaming source is handled
- * using __mmplayer_handle_streaming_error.
- */
- __mmplayer_handle_streaming_error(player, msg);
+ /* Note : check whether subtitle atrribute uri is set. If uri is set, then try to play subtitle file */
+ if (__mmplayer_check_subtitle(player)) {
+ if (MM_ERROR_NONE != __mmplayer_gst_create_text_pipeline(player))
+ LOGE("fail to create text pipeline");
+ }
- } else if (error) {
- LOGE("paring error posted from bus, domain : %s, code : %d", g_quark_to_string(error->domain), error->code);
+ /* connect bus callback */
+ bus = gst_pipeline_get_bus(GST_PIPELINE(mainbin[MMPLAYER_M_PIPE].gst));
+ if (!bus) {
+ LOGE("cannot get bus from pipeline.\n");
+ goto INIT_ERROR;
+ }
- if (error->domain == GST_STREAM_ERROR)
- ret = __gst_handle_stream_error(player, error, msg);
- else if (error->domain == GST_RESOURCE_ERROR)
- ret = __gst_handle_resource_error(player, error->code, NULL);
- else if (error->domain == GST_LIBRARY_ERROR)
- ret = __gst_handle_library_error(player, error->code);
- else if (error->domain == GST_CORE_ERROR)
- ret = __gst_handle_core_error(player, error->code);
+ player->bus_watcher = gst_bus_add_watch(bus, (GstBusFunc)__mmplayer_gst_msg_push, player);
- g_error_free(error);
- }
- player->msg_posted = TRUE;
- }
- gst_message_unref(msg);
- }
- } while (!player->msg_posted && (g_timer_elapsed(timer, NULL) < MAX_TIMEOUT_SEC));
- /* clean */
- gst_object_unref(bus);
- g_timer_stop(timer);
- g_timer_destroy(timer);
+ player->context.thread_default = g_main_context_get_thread_default();
- return ret;
+ if (player->context.thread_default == NULL) {
+ player->context.thread_default = g_main_context_default();
+ LOGD("thread-default context is the global default context");
+ }
+ LOGW("bus watcher thread context = %p, watcher : %d", player->context.thread_default, player->bus_watcher);
- } else if ((!MMPLAYER_IS_RTSP_STREAMING(player)) && (!player->video_stream_cb) &&
- (!player->pipeline->videobin) && (!player->pipeline->audiobin)) {
+ /* set sync handler to get tag synchronously */
+ gst_bus_set_sync_handler(bus, __mmplayer_bus_sync_callback, player, NULL);
- return MM_ERROR_PLAYER_CODEC_NOT_FOUND;
+ /* finished */
+ gst_object_unref(GST_OBJECT(bus));
+ g_list_free(element_bucket);
- } else {
- MMPLAYER_SET_STATE(player, MM_PLAYER_STATE_PAUSED);
- }
+ /* create gst bus_msb_cb thread */
+ g_mutex_init(&player->bus_msg_thread_mutex);
+ g_cond_init(&player->bus_msg_thread_cond);
+ player->bus_msg_thread_exit = FALSE;
+ player->bus_msg_thread =
+ g_thread_try_new("gst_bus_msg_thread", __mmplayer_gst_bus_msg_thread, (gpointer)player, NULL);
+ if (!player->bus_msg_thread) {
+ LOGE("failed to create gst BUS msg thread");
+ g_mutex_clear(&player->bus_msg_thread_mutex);
+ g_cond_clear(&player->bus_msg_thread_cond);
+ goto INIT_ERROR;
}
- /* generate dot file before returning error */
- MMPLAYER_GENERATE_DOT_IF_ENABLED(player, "pipeline-status-pause");
-
MMPLAYER_FLEAVE();
- return ret;
-}
+ return MM_ERROR_NONE;
-int __gst_resume(mm_player_t* player, gboolean async)
-{
- int ret = MM_ERROR_NONE;
- gint timeout = 0;
+INIT_ERROR:
+ __mmplayer_gst_destroy_pipeline(player);
+ g_list_free(element_bucket);
- MMPLAYER_FENTER();
+ if (mainbin) {
+ /* release element which are not added to bin */
+ for (i = 1; i < MMPLAYER_M_NUM; i++) {
+ /* NOTE : skip pipeline */
+ if (mainbin[i].gst) {
+ GstObject* parent = NULL;
+ parent = gst_element_get_parent(mainbin[i].gst);
- MMPLAYER_RETURN_VAL_IF_FAIL(player && player->pipeline,
- MM_ERROR_PLAYER_NOT_INITIALIZED);
+ if (!parent) {
+ gst_object_unref(GST_OBJECT(mainbin[i].gst));
+ mainbin[i].gst = NULL;
+ } else
+ gst_object_unref(GST_OBJECT(parent));
+ }
+ }
- LOGD("current state before doing transition");
- MMPLAYER_PENDING_STATE(player) = MM_PLAYER_STATE_PLAYING;
- MMPLAYER_PRINT_STATE(player);
+ /* release pipeline with it's childs */
+ if (mainbin[MMPLAYER_M_PIPE].gst)
+ gst_object_unref(GST_OBJECT(mainbin[MMPLAYER_M_PIPE].gst));
- if (async)
- LOGD("do async state transition to PLAYING");
+ MMPLAYER_FREEIF(mainbin);
+ }
- /* set pipeline state to PLAYING */
- timeout = MMPLAYER_STATE_CHANGE_TIMEOUT(player);
+ MMPLAYER_FREEIF(player->pipeline);
+ return MM_ERROR_PLAYER_INTERNAL;
+}
- ret = __mmplayer_gst_set_state(player,
- player->pipeline->mainbin[MMPLAYER_M_PIPE].gst, GST_STATE_PLAYING, async, timeout);
- if (ret != MM_ERROR_NONE) {
- LOGE("failed to set state to PLAYING");
- goto EXIT;
- } else {
- if (async == FALSE)
- MMPLAYER_SET_STATE(player, MM_PLAYER_STATE_PLAYING);
- }
+static void
+__mmplayer_reset_gapless_state(mm_player_t* player)
+{
+ MMPLAYER_FENTER();
+ MMPLAYER_RETURN_IF_FAIL(player
+ && player->pipeline
+ && player->pipeline->audiobin
+ && player->pipeline->audiobin[MMPLAYER_A_BIN].gst);
-EXIT:
- /* generate dot file */
- MMPLAYER_GENERATE_DOT_IF_ENABLED(player, "pipeline-status-resume");
+ memset(&player->gapless, 0, sizeof(mm_player_gapless_t));
MMPLAYER_FLEAVE();
-
- return ret;
+ return;
}
static int
-__gst_set_position(mm_player_t* player, int format, gint64 position, gboolean internal_called)
+__mmplayer_gst_destroy_pipeline(mm_player_t* player)
{
- gint64 dur_nsec = 0;
- gint64 pos_nsec = 0;
- gboolean ret = TRUE;
- gboolean accurated = FALSE;
- GstSeekFlags seek_flags = GST_SEEK_FLAG_FLUSH;
+ gint timeout = 0;
+ int ret = MM_ERROR_NONE;
MMPLAYER_FENTER();
- MMPLAYER_RETURN_VAL_IF_FAIL(player && player->pipeline, MM_ERROR_PLAYER_NOT_INITIALIZED);
- MMPLAYER_RETURN_VAL_IF_FAIL(!MMPLAYER_IS_LIVE_STREAMING(player), MM_ERROR_PLAYER_NO_OP);
-
- if (MMPLAYER_CURRENT_STATE(player) != MM_PLAYER_STATE_PLAYING
- && MMPLAYER_CURRENT_STATE(player) != MM_PLAYER_STATE_PAUSED)
- goto PENDING;
-
- if (!MMPLAYER_IS_MS_BUFF_SRC(player)) {
- /* check duration */
- /* NOTE : duration cannot be zero except live streaming.
- * Since some element could have some timing problemn with quering duration, try again.
- */
- if (player->duration == 0) {
- if (!gst_element_query_duration(player->pipeline->mainbin[MMPLAYER_M_PIPE].gst, GST_FORMAT_TIME, &dur_nsec)) {
- /* For RTSP Streaming , duration is not returned in READY state. So seek to the previous position does not work properly.
- * Added a patch to postpone the actual seek when state changes to PLAY. Sending a fake SEEK_COMPLETED event to finish the current request. */
- if ((MMPLAYER_IS_RTSP_STREAMING(player)) && (__mmplayer_get_stream_service_type(player) == STREAMING_SERVICE_VOD)) {
- player->pending_seek.is_pending = TRUE;
- player->pending_seek.format = format;
- player->pending_seek.pos = position;
- player->seek_state = MMPLAYER_SEEK_NONE;
- MMPLAYER_POST_MSG(player, MM_MESSAGE_SEEK_COMPLETED, NULL);
- return MM_ERROR_NONE;
- } else {
- goto SEEK_ERROR;
- }
- }
- player->duration = dur_nsec;
- }
- }
- LOGD("playback rate: %f\n", player->playback_rate);
-
- mm_attrs_get_int_by_name(player->attrs, "accurate_seek", &accurated);
- if (accurated)
- seek_flags |= GST_SEEK_FLAG_ACCURATE;
- else
- seek_flags |= GST_SEEK_FLAG_KEY_UNIT;
-
- /* do seek */
- switch (format) {
- case MM_PLAYER_POS_FORMAT_TIME:
- {
- if (!MMPLAYER_IS_MS_BUFF_SRC(player)) {
- GstQuery *query = NULL;
- gboolean seekable = FALSE;
-
- /* check position is valid or not */
- if (position > player->duration)
- goto INVALID_ARGS;
-
- query = gst_query_new_seeking(GST_FORMAT_TIME);
- if (gst_element_query(player->pipeline->mainbin[MMPLAYER_M_PIPE].gst, query)) {
- gst_query_parse_seeking(query, NULL, &seekable, NULL, NULL);
- gst_query_unref(query);
-
- if (!seekable) {
- LOGW("non-seekable content");
- player->seek_state = MMPLAYER_SEEK_NONE;
- return MM_ERROR_PLAYER_NO_OP;
- }
- } else {
- LOGW("failed to get seeking query");
- gst_query_unref(query); /* keep seeking operation */
- }
- LOGD("seeking to(%"G_GINT64_FORMAT") nsec, duration is %"G_GINT64_FORMAT" nsec\n", position, player->duration);
+ MMPLAYER_RETURN_VAL_IF_FAIL(player, MM_ERROR_INVALID_HANDLE);
- /* For rtspsrc stack , npt-start value coming from server is used for finding the current position.
- But when a rtsp clip (especially from Youtube Desktop View) is paused and kept for sometime,npt-start is still increasing.
- This causes problem is position calculation during normal pause resume scenarios also.
- Currently during seek , we are sending the current position to rtspsrc module for position saving for later use. */
- if ((MMPLAYER_IS_RTSP_STREAMING(player)) &&
- (__mmplayer_get_stream_service_type(player) == STREAMING_SERVICE_VOD)) {
- if (!gst_element_query_position(player->pipeline->mainbin[MMPLAYER_M_PIPE].gst, GST_FORMAT_TIME, &pos_nsec))
- LOGW("getting current position failed in seek\n");
+ /* cleanup stuffs */
+ MMPLAYER_FREEIF(player->type);
+ player->have_dynamic_pad = FALSE;
+ player->no_more_pad = FALSE;
+ player->num_dynamic_pad = 0;
+ player->demux_pad_index = 0;
+ player->use_deinterleave = FALSE;
+ player->max_audio_channels = 0;
+ player->video_share_api_delta = 0;
+ player->video_share_clock_delta = 0;
+ player->video_hub_download_mode = 0;
- player->last_position = pos_nsec;
- g_object_set(player->pipeline->mainbin[MMPLAYER_M_SRC].gst, "resume-position", player->last_position, NULL);
- }
+ MMPLAYER_SUBTITLE_INFO_LOCK(player);
+ player->subtitle_language_list = NULL;
+ MMPLAYER_SUBTITLE_INFO_UNLOCK(player);
- if (player->seek_state != MMPLAYER_SEEK_NONE) {
- LOGD("not completed seek");
- return MM_ERROR_PLAYER_DOING_SEEK;
- }
- }
+ __mmplayer_reset_gapless_state(player);
- if (!internal_called)
- player->seek_state = MMPLAYER_SEEK_IN_PROGRESS;
+ if (player->streamer) {
+ __mm_player_streaming_deinitialize(player->streamer);
+ __mm_player_streaming_destroy(player->streamer);
+ player->streamer = NULL;
+ }
- if ((MMPLAYER_IS_HTTP_STREAMING(player)) && (!player->videodec_linked)) {
- gint64 cur_time = 0;
+ /* cleanup unlinked mime type */
+ MMPLAYER_FREEIF(player->unlinked_audio_mime);
+ MMPLAYER_FREEIF(player->unlinked_video_mime);
+ MMPLAYER_FREEIF(player->unlinked_demuxer_mime);
- /* get current position */
- gst_element_query_position(player->pipeline->mainbin[MMPLAYER_M_PIPE].gst, GST_FORMAT_TIME, &cur_time);
+ /* cleanup running stuffs */
+ __mmplayer_cancel_eos_timer(player);
- /* flush */
- GstEvent *event = gst_event_new_seek(1.0,
- GST_FORMAT_TIME,
- (GstSeekFlags)GST_SEEK_FLAG_FLUSH,
- GST_SEEK_TYPE_SET, cur_time,
- GST_SEEK_TYPE_NONE, GST_CLOCK_TIME_NONE);
- if (event)
- __gst_send_event_to_sink(player, event);
+ /* cleanup gst stuffs */
+ if (player->pipeline) {
+ MMPlayerGstElement* mainbin = player->pipeline->mainbin;
+ GstTagList* tag_list = player->pipeline->tag_list;
- if (!MMPLAYER_IS_RTSP_STREAMING(player))
- __gst_pause(player, FALSE);
- }
+ /* first we need to disconnect all signal hander */
+ __mmplayer_release_signal_connection(player, MM_PLAYER_SIGNAL_TYPE_ALL);
- pos_nsec = position;
+ if (mainbin) {
+ MMPlayerGstElement* audiobin = player->pipeline->audiobin;
+ MMPlayerGstElement* videobin = player->pipeline->videobin;
+ MMPlayerGstElement* textbin = player->pipeline->textbin;
+ GstBus *bus = gst_pipeline_get_bus(GST_PIPELINE(mainbin[MMPLAYER_M_PIPE].gst));
+ gst_bus_set_sync_handler(bus, NULL, NULL, NULL);
+ gst_object_unref(bus);
- /* rtsp streaming case, there is no sink after READY TO PAUSE state(no preroll state change).
- that's why set position through property. */
- if ((MMPLAYER_IS_RTSP_STREAMING(player)) &&
- (MMPLAYER_CURRENT_STATE(player) == MM_PLAYER_STATE_PAUSED) &&
- (MMPLAYER_PREV_STATE(player) == MM_PLAYER_STATE_READY) &&
- (!player->videodec_linked) && (!player->audiodec_linked)) {
+ timeout = MMPLAYER_STATE_CHANGE_TIMEOUT(player);
+ ret = __mmplayer_gst_set_state(player, mainbin[MMPLAYER_M_PIPE].gst, GST_STATE_NULL, FALSE, timeout);
+ if (ret != MM_ERROR_NONE) {
+ LOGE("fail to change state to NULL\n");
+ return MM_ERROR_PLAYER_INTERNAL;
+ }
- g_object_set(player->pipeline->mainbin[MMPLAYER_M_SRC].gst, "pending-start-position", pos_nsec, NULL);
- LOGD("[%s] set position =%"GST_TIME_FORMAT,
- GST_ELEMENT_NAME(player->pipeline->mainbin[MMPLAYER_M_SRC].gst), GST_TIME_ARGS(pos_nsec));
- player->seek_state = MMPLAYER_SEEK_NONE;
- MMPLAYER_POST_MSG(player, MM_MESSAGE_SEEK_COMPLETED, NULL);
- } else {
- ret = __gst_seek(player, player->pipeline->mainbin[MMPLAYER_M_PIPE].gst, player->playback_rate,
- GST_FORMAT_TIME, seek_flags,
- GST_SEEK_TYPE_SET, pos_nsec, GST_SEEK_TYPE_NONE, GST_CLOCK_TIME_NONE);
- }
+ LOGW("succeeded in changing state to NULL\n");
- if (!ret) {
- LOGE("failed to set position.");
- goto SEEK_ERROR;
- }
- }
- break;
+ gst_object_unref(GST_OBJECT(mainbin[MMPLAYER_M_PIPE].gst));
- case MM_PLAYER_POS_FORMAT_PERCENT:
- {
- LOGD("seeking to %"G_GINT64_FORMAT"%%", position);
+ /* free fakesink */
+ if (mainbin[MMPLAYER_M_SRC_FAKESINK].gst)
+ gst_object_unref(GST_OBJECT(mainbin[MMPLAYER_M_SRC_FAKESINK].gst));
- if (player->seek_state != MMPLAYER_SEEK_NONE) {
- LOGD("not completed seek");
- return MM_ERROR_PLAYER_DOING_SEEK;
+ /* free avsysaudiosink
+ avsysaudiosink should be unref when destory pipeline just after start play with BT.
+ Because audiosink is created but never added to bin, and therefore it will not be unref when pipeline is destroyed.
+ */
+ MMPLAYER_FREEIF(audiobin);
+ MMPLAYER_FREEIF(videobin);
+ MMPLAYER_FREEIF(textbin);
+ MMPLAYER_FREEIF(mainbin);
}
- if (!internal_called)
- player->seek_state = MMPLAYER_SEEK_IN_PROGRESS;
+ if (tag_list)
+ gst_tag_list_free(tag_list);
- /* FIXIT : why don't we use 'GST_FORMAT_PERCENT' */
- pos_nsec = (gint64)((position * player->duration) / 100);
- ret = __gst_seek(player, player->pipeline->mainbin[MMPLAYER_M_PIPE].gst, player->playback_rate,
- GST_FORMAT_TIME, seek_flags,
- GST_SEEK_TYPE_SET, pos_nsec, GST_SEEK_TYPE_NONE, GST_CLOCK_TIME_NONE);
- if (!ret) {
- LOGE("failed to set position. pos[%"G_GINT64_FORMAT"] dur[%"G_GINT64_FORMAT"] ", pos_nsec, player->duration);
- goto SEEK_ERROR;
- }
+ MMPLAYER_FREEIF(player->pipeline);
}
- break;
+ MMPLAYER_FREEIF(player->album_art);
- default:
- goto INVALID_ARGS;
+ if (player->v_stream_caps) {
+ gst_caps_unref(player->v_stream_caps);
+ player->v_stream_caps = NULL;
}
-
- /* NOTE : store last seeking point to overcome some bad operation
- * (returning zero when getting current position) of some elements
- */
- player->last_position = pos_nsec;
-
- /* MSL should guarante playback rate when seek is selected during trick play of fast forward. */
- if (player->playback_rate > 1.0)
- _mmplayer_set_playspeed((MMHandleType)player, player->playback_rate, FALSE);
-
- if ((!internal_called) &&
- (player->streamer) && (player->streamer->buffering_state & MM_PLAYER_BUFFERING_IN_PROGRESS)) {
- LOGD("buffering should be reset after seeking");
- player->streamer->buffering_state = MM_PLAYER_BUFFERING_ABORT;
- player->streamer->buffering_percent = 100; /* after seeking, new per can be non-zero. */
+ if (player->a_stream_caps) {
+ gst_caps_unref(player->a_stream_caps);
+ player->a_stream_caps = NULL;
}
- MMPLAYER_FLEAVE();
- return MM_ERROR_NONE;
-
-PENDING:
- player->pending_seek.is_pending = TRUE;
- player->pending_seek.format = format;
- player->pending_seek.pos = position;
-
- LOGW("player current-state : %s, pending-state : %s, just preserve pending position(%"G_GINT64_FORMAT").\n",
- MMPLAYER_STATE_GET_NAME(MMPLAYER_CURRENT_STATE(player)),
- MMPLAYER_STATE_GET_NAME(MMPLAYER_PENDING_STATE(player)),
- player->pending_seek.pos);
-
- return MM_ERROR_NONE;
-
-INVALID_ARGS:
- LOGE("invalid arguments, position: %"G_GINT64_FORMAT" dur : %"G_GINT64_FORMAT" format : %d \n", position, player->duration, format);
- return MM_ERROR_INVALID_ARGUMENT;
-
-SEEK_ERROR:
- player->seek_state = MMPLAYER_SEEK_NONE;
- return MM_ERROR_PLAYER_SEEK;
-}
-
-#define TRICKPLAY_OFFSET GST_MSECOND
-
-static int
-__gst_get_position(mm_player_t* player, int format, gint64* position)
-{
- MMPlayerStateType current_state = MM_PLAYER_STATE_NONE;
- gint64 pos_nsec = 0;
- gboolean ret = TRUE;
-
- MMPLAYER_RETURN_VAL_IF_FAIL(player && position && player->pipeline && player->pipeline->mainbin,
- MM_ERROR_PLAYER_NOT_INITIALIZED);
-
- current_state = MMPLAYER_CURRENT_STATE(player);
-
- /* NOTE : query position except paused state to overcome some bad operation
- * please refer to below comments in details
- */
- if (current_state != MM_PLAYER_STATE_PAUSED)
- ret = gst_element_query_position(player->pipeline->mainbin[MMPLAYER_M_PIPE].gst, GST_FORMAT_TIME, &pos_nsec);
-
- /* NOTE : get last point to overcome some bad operation of some elements
- *(returning zero when getting current position in paused state
- * and when failed to get postion during seeking
- */
- if ((current_state == MM_PLAYER_STATE_PAUSED) || (!ret)) {
- LOGD("pos_nsec = %"GST_TIME_FORMAT" and ret = %d and state = %d", GST_TIME_ARGS(pos_nsec), ret, current_state);
-
- if (player->playback_rate < 0.0)
- pos_nsec = player->last_position - TRICKPLAY_OFFSET;
- else
- pos_nsec = player->last_position;
-
- if (!ret)
- pos_nsec = player->last_position;
- else
- player->last_position = pos_nsec;
-
- LOGD("returning last point : %"GST_TIME_FORMAT, GST_TIME_ARGS(pos_nsec));
+ if (player->s_stream_caps) {
+ gst_caps_unref(player->s_stream_caps);
+ player->s_stream_caps = NULL;
+ }
+ _mmplayer_track_destroy(player);
- } else {
- if (player->duration > 0 && pos_nsec > player->duration)
- pos_nsec = player->duration;
+ if (player->sink_elements)
+ g_list_free(player->sink_elements);
+ player->sink_elements = NULL;
- player->last_position = pos_nsec;
+ if (player->bufmgr) {
+ tbm_bufmgr_deinit(player->bufmgr);
+ player->bufmgr = NULL;
}
- switch (format) {
- case MM_PLAYER_POS_FORMAT_TIME:
- *position = pos_nsec;
- break;
+ LOGW("finished destroy pipeline\n");
- case MM_PLAYER_POS_FORMAT_PERCENT:
- {
- if (player->duration <= 0) {
- LOGD("duration is [%"G_GINT64_FORMAT"], so returning position 0\n", player->duration);
- *position = 0;
- } else {
- LOGD("position is [%"G_GINT64_FORMAT"] nsec , duration is [%"G_GINT64_FORMAT"] nsec", pos_nsec, player->duration);
- *position = (gint64)(pos_nsec * 100 / player->duration);
- }
- break;
- }
- default:
- return MM_ERROR_PLAYER_INTERNAL;
- }
+ MMPLAYER_FLEAVE();
- return MM_ERROR_NONE;
+ return ret;
}
-
-static int __gst_get_buffer_position(mm_player_t* player, int format, unsigned long* start_pos, unsigned long* stop_pos)
+static int __gst_realize(mm_player_t* player)
{
-#define STREAMING_IS_FINISHED 0
-#define BUFFERING_MAX_PER 100
-#define DEFAULT_PER_VALUE -1
-#define CHECK_PERCENT_VALUE(a, min, max)(((a) > (min)) ? (((a) < (max)) ? (a) : (max)) : (min))
-
- MMPlayerGstElement *mainbin = NULL;
- gint start_per = DEFAULT_PER_VALUE, stop_per = DEFAULT_PER_VALUE;
- gint64 buffered_total = 0;
- gint64 position = 0;
- gint buffered_sec = -1;
- GstBufferingMode mode = GST_BUFFERING_STREAM;
- gint64 content_size_time = player->duration;
- guint64 content_size_bytes = player->http_content_size;
+ gint timeout = 0;
+ int ret = MM_ERROR_NONE;
- MMPLAYER_RETURN_VAL_IF_FAIL(player &&
- player->pipeline &&
- player->pipeline->mainbin,
- MM_ERROR_PLAYER_NOT_INITIALIZED);
+ MMPLAYER_FENTER();
- MMPLAYER_RETURN_VAL_IF_FAIL(start_pos && stop_pos, MM_ERROR_INVALID_ARGUMENT);
+ MMPLAYER_RETURN_VAL_IF_FAIL(player, MM_ERROR_PLAYER_NOT_INITIALIZED);
- *start_pos = 0;
- *stop_pos = 0;
+ MMPLAYER_PENDING_STATE(player) = MM_PLAYER_STATE_READY;
- if (!MMPLAYER_IS_HTTP_STREAMING(player)) {
- /* and rtsp is not ready yet. */
- LOGW("it's only used for http streaming case.\n");
- return MM_ERROR_PLAYER_NO_OP;
+ ret = __mmplayer_gst_create_pipeline(player);
+ if (ret) {
+ LOGE("failed to create pipeline\n");
+ return ret;
}
- if (format != MM_PLAYER_POS_FORMAT_PERCENT) {
- LOGW("Time format is not supported yet.\n");
- return MM_ERROR_INVALID_ARGUMENT;
- }
+ /* set pipeline state to READY */
+ /* NOTE : state change to READY must be performed sync. */
+ timeout = MMPLAYER_STATE_CHANGE_TIMEOUT(player);
+ ret = __mmplayer_gst_set_state(player,
+ player->pipeline->mainbin[MMPLAYER_M_PIPE].gst, GST_STATE_READY, FALSE, timeout);
- if (content_size_time <= 0 || content_size_bytes <= 0) {
- LOGW("there is no content size.");
- return MM_ERROR_NONE;
+ if (ret != MM_ERROR_NONE) {
+ /* return error if failed to set state */
+ LOGE("failed to set READY state");
+ return ret;
}
- if (__gst_get_position(player, MM_PLAYER_POS_FORMAT_TIME, &position) != MM_ERROR_NONE) {
- LOGW("fail to get current position.");
- return MM_ERROR_NONE;
- }
+ MMPLAYER_SET_STATE(player, MM_PLAYER_STATE_READY);
- LOGD("pos %"G_GINT64_FORMAT" msec, dur %d sec, len %"G_GUINT64_FORMAT" bytes",
- GST_TIME_AS_MSECONDS(position), (guint)GST_TIME_AS_SECONDS(content_size_time), content_size_bytes);
+ /* create dot before error-return. for debugging */
+ MMPLAYER_GENERATE_DOT_IF_ENABLED(player, "pipeline-status-realize");
- mainbin = player->pipeline->mainbin;
- start_per = (gint)(floor(100 *(gdouble)position / (gdouble)content_size_time));
-
- if (mainbin[MMPLAYER_M_MUXED_S_BUFFER].gst) {
- GstQuery *query = NULL;
- gint byte_in_rate = 0, byte_out_rate = 0;
- gint64 estimated_total = 0;
-
- query = gst_query_new_buffering(GST_FORMAT_BYTES);
- if (!query || !gst_element_query(mainbin[MMPLAYER_M_MUXED_S_BUFFER].gst, query)) {
- LOGW("fail to get buffering query from queue2");
- if (query)
- gst_query_unref(query);
- return MM_ERROR_NONE;
- }
+ MMPLAYER_FLEAVE();
- gst_query_parse_buffering_stats(query, &mode, &byte_in_rate, &byte_out_rate, NULL);
- LOGD("mode %d, in_rate %d, out_rate %d", mode, byte_in_rate, byte_out_rate);
+ return ret;
+}
- if (mode == GST_BUFFERING_STREAM) {
- /* using only queue in case of push mode(ts / mp3) */
- if (gst_element_query_position(mainbin[MMPLAYER_M_SRC].gst,
- GST_FORMAT_BYTES, &buffered_total)) {
- LOGD("buffered_total %"G_GINT64_FORMAT, buffered_total);
- stop_per = 100 * buffered_total / content_size_bytes;
- }
- } else {
- /* GST_BUFFERING_TIMESHIFT or GST_BUFFERING_DOWNLOAD */
- guint idx = 0;
- guint num_of_ranges = 0;
- gint64 start_byte = 0, stop_byte = 0;
-
- gst_query_parse_buffering_range(query, NULL, NULL, NULL, &estimated_total);
- if (estimated_total != STREAMING_IS_FINISHED) {
- /* buffered size info from queue2 */
- num_of_ranges = gst_query_get_n_buffering_ranges(query);
- for (idx = 0; idx < num_of_ranges; idx++) {
- gst_query_parse_nth_buffering_range(query, idx, &start_byte, &stop_byte);
- LOGD("range %d, %"G_GINT64_FORMAT" ~ %"G_GUINT64_FORMAT, idx, start_byte, stop_byte);
-
- buffered_total += (stop_byte - start_byte);
- }
- } else
- stop_per = BUFFERING_MAX_PER;
- }
- gst_query_unref(query);
- }
+static int __gst_unrealize(mm_player_t* player)
+{
+ int ret = MM_ERROR_NONE;
- if (stop_per == DEFAULT_PER_VALUE) {
- guint dur_sec = (guint)(content_size_time/GST_SECOND);
- if (dur_sec > 0) {
- guint avg_byterate = (guint)(content_size_bytes/dur_sec);
+ MMPLAYER_FENTER();
- /* buffered size info from multiqueue */
- if (mainbin[MMPLAYER_M_DEMUXED_S_BUFFER].gst) {
- guint curr_size_bytes = 0;
- g_object_get(G_OBJECT(mainbin[MMPLAYER_M_DEMUXED_S_BUFFER].gst),
- "curr-size-bytes", &curr_size_bytes, NULL);
- LOGD("curr_size_bytes of multiqueue = %d", curr_size_bytes);
- buffered_total += curr_size_bytes;
- }
+ MMPLAYER_RETURN_VAL_IF_FAIL(player, MM_ERROR_PLAYER_NOT_INITIALIZED);
- if (avg_byterate > 0)
- buffered_sec = (gint)(ceil((gdouble)buffered_total/(gdouble)avg_byterate));
- else if (player->total_maximum_bitrate > 0)
- buffered_sec = (gint)(ceil((gdouble)GET_BIT_FROM_BYTE(buffered_total)/(gdouble)player->total_maximum_bitrate));
- else if (player->total_bitrate > 0)
- buffered_sec = (gint)(ceil((gdouble)GET_BIT_FROM_BYTE(buffered_total)/(gdouble)player->total_bitrate));
+ MMPLAYER_PENDING_STATE(player) = MM_PLAYER_STATE_NULL;
+ MMPLAYER_PRINT_STATE(player);
- if (buffered_sec >= 0)
- stop_per = start_per +(gint)(ceil)(100*(gdouble)buffered_sec/(gdouble)dur_sec);
- }
+ /* release miscellaneous information */
+ __mmplayer_release_misc(player);
+
+ /* destroy pipeline */
+ ret = __mmplayer_gst_destroy_pipeline(player);
+ if (ret != MM_ERROR_NONE) {
+ LOGE("failed to destory pipeline\n");
+ return ret;
}
- *start_pos = CHECK_PERCENT_VALUE(start_per, 0, 100);
- *stop_pos = CHECK_PERCENT_VALUE(stop_per, *start_pos, 100);
+ /* release miscellaneous information.
+ these info needs to be released after pipeline is destroyed. */
+ __mmplayer_release_misc_post(player);
+
+ MMPLAYER_SET_STATE(player, MM_PLAYER_STATE_NULL);
- LOGD("buffered info: %"G_GINT64_FORMAT" bytes, %d sec, per %lu~%lu\n",
- buffered_total, buffered_sec, *start_pos, *stop_pos);
+ MMPLAYER_FLEAVE();
- return MM_ERROR_NONE;
+ return ret;
}
static int
return MM_ERROR_NONE;
}
-static int __mmfplayer_parse_profile(const char *uri, void *param, MMPlayerParseProfile* data)
+static int __mmplayer_parse_profile(const char *uri, void *param, MMPlayerParseProfile* data)
{
int ret = MM_ERROR_PLAYER_INVALID_URI;
char *path = NULL;
return ret;
}
-gboolean
+static gboolean
__mmplayer_can_do_interrupt(mm_player_t *player)
{
if (!player || !player->pipeline || !player->attrs) {
mm_attrs_get_data_by_name(attrs, "profile_user_param", ¶m);
if (player->profile.uri_type == MM_PLAYER_URI_TYPE_NONE) {
- ret = __mmfplayer_parse_profile((const char*)uri, param, &player->profile);
+ ret = __mmplayer_parse_profile((const char*)uri, param, &player->profile);
if (ret != MM_ERROR_NONE) {
LOGE("failed to parse profile\n");
MMPLAYER_CMD_UNLOCK(player);
/* destroy the gst bus msg thread which is created during realize.
this funct have to be called before getting cmd lock. */
- _mmplayer_bus_msg_thread_destroy(player);
+ __mmplayer_bus_msg_thread_destroy(player);
MMPLAYER_CMD_LOCK(player);
/* check current state */
}
/* start pipeline */
- ret = __gst_start(player);
+ ret = __mmplayer_gst_start(player);
if (ret != MM_ERROR_NONE)
LOGE("failed to start player.\n");
player->seek_state = MMPLAYER_SEEK_NONE;
/* stop pipeline */
- ret = __gst_stop(player);
+ ret = __mmplayer_gst_stop(player);
if (ret != MM_ERROR_NONE)
LOGE("failed to stop player.\n");
}
/* pause pipeline */
- ret = __gst_pause(player, async);
+ ret = __mmplayer_gst_pause(player, async);
if (ret != MM_ERROR_NONE)
LOGE("failed to pause player. ret : 0x%x\n", ret);
/* check current state */
MMPLAYER_CHECK_STATE(player, MMPLAYER_COMMAND_RESUME);
- ret = __gst_resume(player, async);
+ ret = __mmplayer_gst_resume(player, async);
if (ret != MM_ERROR_NONE)
LOGE("failed to resume player.\n");
stop = pos_nsec;
}
- if (!__gst_seek(player, player->pipeline->mainbin[MMPLAYER_M_PIPE].gst,
+ if (!__mmplayer_gst_seek(player, player->pipeline->mainbin[MMPLAYER_M_PIPE].gst,
player->playback_rate,
GST_FORMAT_TIME,
(GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_ACCURATE),
/* check pipline building state */
__mmplayer_check_pipeline(player);
- ret = __gst_set_position(player, format, position, FALSE);
+ ret = __mmplayer_gst_set_position(player, format, position, FALSE);
MMPLAYER_FLEAVE();
MMPLAYER_RETURN_VAL_IF_FAIL(player, MM_ERROR_PLAYER_NOT_INITIALIZED);
- ret = __gst_get_position(player, format, position);
+ ret = __mmplayer_gst_get_position(player, format, position);
return ret;
}
MMPLAYER_RETURN_VAL_IF_FAIL(player, MM_ERROR_PLAYER_NOT_INITIALIZED);
- ret = __gst_get_buffer_position(player, format, start_pos, stop_pos);
+ ret = __mmplayer_gst_get_buffer_position(player, format, start_pos, stop_pos);
return ret;
}
continue;
}
- if (__mmfplayer_parse_profile((const char*)uri, NULL, &profile) != MM_ERROR_NONE) {
+ if (__mmplayer_parse_profile((const char*)uri, NULL, &profile) != MM_ERROR_NONE) {
LOGE("failed to parse profile\n");
continue;
}
mm_attrs_get_string_by_name(attrs, "profile_uri", &uri);
- if (__mmfplayer_parse_profile((const char*)uri, NULL, &player->profile) != MM_ERROR_NONE) {
+ if (__mmplayer_parse_profile((const char*)uri, NULL, &player->profile) != MM_ERROR_NONE) {
LOGE("failed to parse profile\n");
msg_param.code = MM_ERROR_PLAYER_INVALID_URI;
goto ERROR;
return;
}
-static gboolean __mmplayer_configure_audio_callback(mm_player_t* player)
+gboolean __mmplayer_configure_audio_callback(mm_player_t* player)
{
MMPLAYER_FENTER();
MMPLAYER_RETURN_VAL_IF_FAIL(player, FALSE);
return TRUE;
}
-static gboolean
-__mmplayer_handle_streaming_error(mm_player_t* player, GstMessage * message)
-{
- LOGD("\n");
- MMMessageParamType msg_param;
- gchar *msg_src_element = NULL;
- GstStructure *s = NULL;
- guint error_id = 0;
- gchar *error_string = NULL;
-
- MMPLAYER_FENTER();
-
- MMPLAYER_RETURN_VAL_IF_FAIL(player, FALSE);
- MMPLAYER_RETURN_VAL_IF_FAIL(message, FALSE);
-
- s = gst_structure_copy(gst_message_get_structure(message));
-
-
- if (!gst_structure_get_uint(s, "error_id", &error_id))
- error_id = MMPLAYER_STREAMING_ERROR_NONE;
-
- switch (error_id) {
- case MMPLAYER_STREAMING_ERROR_UNSUPPORTED_AUDIO:
- msg_param.code = MM_ERROR_PLAYER_STREAMING_UNSUPPORTED_AUDIO;
- break;
- case MMPLAYER_STREAMING_ERROR_UNSUPPORTED_VIDEO:
- msg_param.code = MM_ERROR_PLAYER_STREAMING_UNSUPPORTED_VIDEO;
- break;
- case MMPLAYER_STREAMING_ERROR_CONNECTION_FAIL:
- msg_param.code = MM_ERROR_PLAYER_STREAMING_CONNECTION_FAIL;
- break;
- case MMPLAYER_STREAMING_ERROR_DNS_FAIL:
- msg_param.code = MM_ERROR_PLAYER_STREAMING_DNS_FAIL;
- break;
- case MMPLAYER_STREAMING_ERROR_SERVER_DISCONNECTED:
- msg_param.code = MM_ERROR_PLAYER_STREAMING_SERVER_DISCONNECTED;
- break;
- case MMPLAYER_STREAMING_ERROR_BAD_SERVER:
- msg_param.code = MM_ERROR_PLAYER_STREAMING_BAD_SERVER;
- break;
- case MMPLAYER_STREAMING_ERROR_INVALID_PROTOCOL:
- msg_param.code = MM_ERROR_PLAYER_STREAMING_INVALID_PROTOCOL;
- break;
- case MMPLAYER_STREAMING_ERROR_INVALID_URL:
- msg_param.code = MM_ERROR_PLAYER_STREAMING_INVALID_URL;
- break;
- case MMPLAYER_STREAMING_ERROR_UNEXPECTED_MSG:
- msg_param.code = MM_ERROR_PLAYER_STREAMING_UNEXPECTED_MSG;
- break;
- case MMPLAYER_STREAMING_ERROR_OUT_OF_MEMORIES:
- msg_param.code = MM_ERROR_PLAYER_STREAMING_OUT_OF_MEMORIES;
- break;
- case MMPLAYER_STREAMING_ERROR_RTSP_TIMEOUT:
- msg_param.code = MM_ERROR_PLAYER_STREAMING_RTSP_TIMEOUT;
- break;
- case MMPLAYER_STREAMING_ERROR_BAD_REQUEST:
- msg_param.code = MM_ERROR_PLAYER_STREAMING_BAD_REQUEST;
- break;
- case MMPLAYER_STREAMING_ERROR_NOT_AUTHORIZED:
- msg_param.code = MM_ERROR_PLAYER_STREAMING_NOT_AUTHORIZED;
- break;
- case MMPLAYER_STREAMING_ERROR_PAYMENT_REQUIRED:
- msg_param.code = MM_ERROR_PLAYER_STREAMING_PAYMENT_REQUIRED;
- break;
- case MMPLAYER_STREAMING_ERROR_FORBIDDEN:
- msg_param.code = MM_ERROR_PLAYER_STREAMING_FORBIDDEN;
- break;
- case MMPLAYER_STREAMING_ERROR_CONTENT_NOT_FOUND:
- msg_param.code = MM_ERROR_PLAYER_STREAMING_CONTENT_NOT_FOUND;
- break;
- case MMPLAYER_STREAMING_ERROR_METHOD_NOT_ALLOWED:
- msg_param.code = MM_ERROR_PLAYER_STREAMING_METHOD_NOT_ALLOWED;
- break;
- case MMPLAYER_STREAMING_ERROR_NOT_ACCEPTABLE:
- msg_param.code = MM_ERROR_PLAYER_STREAMING_NOT_ACCEPTABLE;
- break;
- case MMPLAYER_STREAMING_ERROR_PROXY_AUTHENTICATION_REQUIRED:
- msg_param.code = MM_ERROR_PLAYER_STREAMING_PROXY_AUTHENTICATION_REQUIRED;
- break;
- case MMPLAYER_STREAMING_ERROR_SERVER_TIMEOUT:
- msg_param.code = MM_ERROR_PLAYER_STREAMING_SERVER_TIMEOUT;
- break;
- case MMPLAYER_STREAMING_ERROR_GONE:
- msg_param.code = MM_ERROR_PLAYER_STREAMING_GONE;
- break;
- case MMPLAYER_STREAMING_ERROR_LENGTH_REQUIRED:
- msg_param.code = MM_ERROR_PLAYER_STREAMING_LENGTH_REQUIRED;
- break;
- case MMPLAYER_STREAMING_ERROR_PRECONDITION_FAILED:
- msg_param.code = MM_ERROR_PLAYER_STREAMING_PRECONDITION_FAILED;
- break;
- case MMPLAYER_STREAMING_ERROR_REQUEST_ENTITY_TOO_LARGE:
- msg_param.code = MM_ERROR_PLAYER_STREAMING_REQUEST_ENTITY_TOO_LARGE;
- break;
- case MMPLAYER_STREAMING_ERROR_REQUEST_URI_TOO_LARGE:
- msg_param.code = MM_ERROR_PLAYER_STREAMING_REQUEST_URI_TOO_LARGE;
- break;
- case MMPLAYER_STREAMING_ERROR_UNSUPPORTED_MEDIA_TYPE:
- msg_param.code = MM_ERROR_PLAYER_STREAMING_UNSUPPORTED_MEDIA_TYPE;
- break;
- case MMPLAYER_STREAMING_ERROR_PARAMETER_NOT_UNDERSTOOD:
- msg_param.code = MM_ERROR_PLAYER_STREAMING_PARAMETER_NOT_UNDERSTOOD;
- break;
- case MMPLAYER_STREAMING_ERROR_CONFERENCE_NOT_FOUND:
- msg_param.code = MM_ERROR_PLAYER_STREAMING_CONFERENCE_NOT_FOUND;
- break;
- case MMPLAYER_STREAMING_ERROR_NOT_ENOUGH_BANDWIDTH:
- msg_param.code = MM_ERROR_PLAYER_STREAMING_NOT_ENOUGH_BANDWIDTH;
- break;
- case MMPLAYER_STREAMING_ERROR_NO_SESSION_ID:
- msg_param.code = MM_ERROR_PLAYER_STREAMING_NO_SESSION_ID;
- break;
- case MMPLAYER_STREAMING_ERROR_METHOD_NOT_VALID_IN_THIS_STATE:
- msg_param.code = MM_ERROR_PLAYER_STREAMING_METHOD_NOT_VALID_IN_THIS_STATE;
- break;
- case MMPLAYER_STREAMING_ERROR_HEADER_FIELD_NOT_VALID_FOR_SOURCE:
- msg_param.code = MM_ERROR_PLAYER_STREAMING_HEADER_FIELD_NOT_VALID_FOR_SOURCE;
- break;
- case MMPLAYER_STREAMING_ERROR_INVALID_RANGE:
- msg_param.code = MM_ERROR_PLAYER_STREAMING_INVALID_RANGE;
- break;
- case MMPLAYER_STREAMING_ERROR_PARAMETER_IS_READONLY:
- msg_param.code = MM_ERROR_PLAYER_STREAMING_PARAMETER_IS_READONLY;
- break;
- case MMPLAYER_STREAMING_ERROR_AGGREGATE_OP_NOT_ALLOWED:
- msg_param.code = MM_ERROR_PLAYER_STREAMING_AGGREGATE_OP_NOT_ALLOWED;
- break;
- case MMPLAYER_STREAMING_ERROR_ONLY_AGGREGATE_OP_ALLOWED:
- msg_param.code = MM_ERROR_PLAYER_STREAMING_ONLY_AGGREGATE_OP_ALLOWED;
- break;
- case MMPLAYER_STREAMING_ERROR_BAD_TRANSPORT:
- msg_param.code = MM_ERROR_PLAYER_STREAMING_BAD_TRANSPORT;
- break;
- case MMPLAYER_STREAMING_ERROR_DESTINATION_UNREACHABLE:
- msg_param.code = MM_ERROR_PLAYER_STREAMING_DESTINATION_UNREACHABLE;
- break;
- case MMPLAYER_STREAMING_ERROR_INTERNAL_SERVER_ERROR:
- msg_param.code = MM_ERROR_PLAYER_STREAMING_INTERNAL_SERVER_ERROR;
- break;
- case MMPLAYER_STREAMING_ERROR_NOT_IMPLEMENTED:
- msg_param.code = MM_ERROR_PLAYER_STREAMING_NOT_IMPLEMENTED;
- break;
- case MMPLAYER_STREAMING_ERROR_BAD_GATEWAY:
- msg_param.code = MM_ERROR_PLAYER_STREAMING_BAD_GATEWAY;
- break;
- case MMPLAYER_STREAMING_ERROR_SERVICE_UNAVAILABLE:
- msg_param.code = MM_ERROR_PLAYER_STREAMING_SERVICE_UNAVAILABLE;
- break;
- case MMPLAYER_STREAMING_ERROR_GATEWAY_TIME_OUT:
- msg_param.code = MM_ERROR_PLAYER_STREAMING_GATEWAY_TIME_OUT;
- break;
- case MMPLAYER_STREAMING_ERROR_RTSP_VERSION_NOT_SUPPORTED:
- msg_param.code = MM_ERROR_PLAYER_STREAMING_RTSP_VERSION_NOT_SUPPORTED;
- break;
- case MMPLAYER_STREAMING_ERROR_OPTION_NOT_SUPPORTED:
- msg_param.code = MM_ERROR_PLAYER_STREAMING_OPTION_NOT_SUPPORTED;
- break;
- default:
- {
- gst_structure_free(s);
- return MM_ERROR_PLAYER_STREAMING_FAIL;
- }
- }
-
- error_string = g_strdup(gst_structure_get_string(s, "error_string"));
- if (error_string)
- msg_param.data = (void *) error_string;
-
- if (message->src) {
- msg_src_element = GST_ELEMENT_NAME(GST_ELEMENT_CAST(message->src));
-
- LOGE("-Msg src : [%s] Code : [%x] Error : [%s] \n",
- msg_src_element, msg_param.code, (char*)msg_param.data);
- }
-
- /* post error to application */
- if (!player->msg_posted) {
- MMPLAYER_POST_MSG(player, MM_MESSAGE_ERROR, &msg_param);
-
- /* don't post more if one was sent already */
- player->msg_posted = TRUE;
- } else
- LOGD("skip error post because it's sent already.\n");
-
- gst_structure_free(s);
- MMPLAYER_FLEAVE();
- g_free(error_string);
-
- return TRUE;
-
-}
-
-static void
-__mmplayer_handle_eos_delay(mm_player_t* player, int delay_in_ms)
-{
- MMPLAYER_RETURN_IF_FAIL(player);
-
- /* post now if delay is zero */
- if (delay_in_ms == 0 || player->set_mode.pcm_extraction) {
- LOGD("eos delay is zero. posting EOS now\n");
- MMPLAYER_POST_MSG(player, MM_MESSAGE_END_OF_STREAM, NULL);
-
- if (player->set_mode.pcm_extraction)
- __mmplayer_cancel_eos_timer(player);
-
- return;
- }
-
- /* cancel if existing */
- __mmplayer_cancel_eos_timer(player);
-
- /* init new timeout */
- /* NOTE : consider give high priority to this timer */
- LOGD("posting EOS message after [%d] msec\n", delay_in_ms);
-
- player->eos_timer = g_timeout_add(delay_in_ms,
- __mmplayer_eos_timer_cb, player);
-
- player->context.global_default = g_main_context_default();
- LOGD("global default context = %p, eos timer id = %d", player->context.global_default, player->eos_timer);
-
- /* check timer is valid. if not, send EOS now */
- if (player->eos_timer == 0) {
- LOGW("creating timer for delayed EOS has failed. sending EOS now\n");
- MMPLAYER_POST_MSG(player, MM_MESSAGE_END_OF_STREAM, NULL);
- }
-}
-
-static void
+void
__mmplayer_cancel_eos_timer(mm_player_t* player)
{
MMPLAYER_RETURN_IF_FAIL(player);
return;
}
-static gboolean
-__mmplayer_eos_timer_cb(gpointer u_data)
-{
- mm_player_t* player = NULL;
- MMHandleType attrs = 0;
- int count = 0;
-
- MMPLAYER_RETURN_VAL_IF_FAIL(u_data, FALSE);
-
- player = (mm_player_t*) u_data;
- attrs = MMPLAYER_GET_ATTRS(player);
-
- mm_attrs_get_int_by_name(attrs, "profile_play_count", &count);
-
- if (count == -1) {
- gint ret_value = 0;
- ret_value = __gst_set_position(player, MM_PLAYER_POS_FORMAT_TIME, 0, TRUE);
- if (ret_value != MM_ERROR_NONE)
- LOGE("seeking to 0 failed in repeat play");
- } else {
- /* posting eos */
- MMPLAYER_POST_MSG(player, MM_MESSAGE_END_OF_STREAM, NULL);
- }
-
- /* we are returning FALSE as we need only one posting */
- return FALSE;
-}
-
-/* sending event to one of sinkelements */
-static gboolean
-__gst_send_event_to_sink(mm_player_t* player, GstEvent* event)
-{
- GstEvent * event2 = NULL;
- GList *sinks = NULL;
- gboolean res = FALSE;
- MMPLAYER_FENTER();
-
- MMPLAYER_RETURN_VAL_IF_FAIL(player, FALSE);
- MMPLAYER_RETURN_VAL_IF_FAIL(event, FALSE);
-
- /* While adding subtitles in live feeds seek is getting called.
- Adding defensive check in framework layer.*/
- if (GST_EVENT_TYPE(event) == GST_EVENT_SEEK) {
- if (MMPLAYER_IS_LIVE_STREAMING(player)) {
- LOGE("Should not send seek event during live playback");
- return TRUE;
- }
- }
-
- if (player->play_subtitle)
- event2 = gst_event_copy((const GstEvent *)event);
-
- sinks = player->sink_elements;
- while (sinks) {
- GstElement *sink = GST_ELEMENT_CAST(sinks->data);
-
- if (GST_IS_ELEMENT(sink)) {
- /* keep ref to the event */
- gst_event_ref(event);
-
- if ((res = gst_element_send_event(sink, event))) {
- LOGD("sending event[%s] to sink element [%s] success!\n",
- GST_EVENT_TYPE_NAME(event), GST_ELEMENT_NAME(sink));
-
- /* rtsp case, asyn_done is not called after seek during pause state */
- if (MMPLAYER_IS_RTSP_STREAMING(player)) {
- if (GST_EVENT_TYPE(event) == GST_EVENT_SEEK) {
- if (MMPLAYER_TARGET_STATE(player) == MM_PLAYER_STATE_PAUSED) {
- LOGD("RTSP seek completed, after pause state..\n");
- player->seek_state = MMPLAYER_SEEK_NONE;
- MMPLAYER_POST_MSG(player, MM_MESSAGE_SEEK_COMPLETED, NULL);
- }
-
- }
- }
-
- if (MMPLAYER_IS_MS_BUFF_SRC(player)) {
- sinks = g_list_next(sinks);
- continue;
- } else {
- break;
- }
- }
-
- LOGD("sending event[%s] to sink element [%s] failed. try with next one.\n",
- GST_EVENT_TYPE_NAME(event), GST_ELEMENT_NAME(sink));
- }
-
- sinks = g_list_next(sinks);
- }
-
- /* Note : Textbin is not linked to the video or audio bin.
- * It needs to send the event to the text sink seperatelly.
- */
- if (player->play_subtitle && player->pipeline) {
- GstElement *text_sink = GST_ELEMENT_CAST(player->pipeline->textbin[MMPLAYER_T_FAKE_SINK].gst);
-
- if (GST_IS_ELEMENT(text_sink)) {
- /* keep ref to the event */
- gst_event_ref(event2);
-
- if ((res = gst_element_send_event(text_sink, event2)))
- LOGD("sending event[%s] to subtitle sink element [%s] success!\n",
- GST_EVENT_TYPE_NAME(event2), GST_ELEMENT_NAME(text_sink));
- else
- LOGE("sending event[%s] to subtitle sink element [%s] failed!\n",
- GST_EVENT_TYPE_NAME(event2), GST_ELEMENT_NAME(text_sink));
-
- gst_event_unref(event2);
- }
- }
-
- gst_event_unref(event);
-
- MMPLAYER_FLEAVE();
-
- return res;
-}
-
static void
__mmplayer_add_sink(mm_player_t* player, GstElement* sink)
{
MMPLAYER_FLEAVE();
}
-static gboolean
-__gst_seek(mm_player_t* player, GstElement * element, gdouble rate,
- GstFormat format, GstSeekFlags flags, GstSeekType cur_type,
- gint64 cur, GstSeekType stop_type, gint64 stop)
-{
- GstEvent* event = NULL;
- gboolean result = FALSE;
-
- MMPLAYER_FENTER();
-
- MMPLAYER_RETURN_VAL_IF_FAIL(player, FALSE);
-
- if (player->pipeline && player->pipeline->textbin)
- __mmplayer_drop_subtitle(player, FALSE);
-
- event = gst_event_new_seek(rate, format, flags, cur_type,
- cur, stop_type, stop);
-
- result = __gst_send_event_to_sink(player, event);
-
- MMPLAYER_FLEAVE();
-
- return result;
-}
-
/* NOTE : be careful with calling this api. please refer to below glib comment
* glib comment : Note that there is a bug in GObject that makes this function much
* less useful than it might seem otherwise. Once gobject is disposed, the callback
gint64 position = 0;
LOGD("do get/set position for new videosink plugin");
- if (__gst_get_position(player, MM_PLAYER_POS_FORMAT_TIME, &position)) {
+ if (__mmplayer_gst_get_position(player, MM_PLAYER_POS_FORMAT_TIME, &position)) {
LOGE("failed to get position");
return MM_ERROR_PLAYER_INTERNAL;
}
#ifdef SINKCHANGE_WITH_ACCURATE_SEEK
/* accurate seek */
- if (__gst_set_position(player, MM_PLAYER_POS_FORMAT_TIME, position, TRUE)) {
+ if (__mmplayer_gst_set_position(player, MM_PLAYER_POS_FORMAT_TIME, position, TRUE)) {
LOGE("failed to set position");
return MM_ERROR_PLAYER_INTERNAL;
}
#else
/* key unit seek */
- ret = __gst_seek(player, player->pipeline->mainbin[MMPLAYER_M_PIPE].gst, 1.0,
+ ret = __mmplayer_gst_seek(player, player->pipeline->mainbin[MMPLAYER_M_PIPE].gst, 1.0,
GST_FORMAT_TIME, (GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_KEY_UNIT),
GST_SEEK_TYPE_SET, position,
GST_SEEK_TYPE_NONE, GST_CLOCK_TIME_NONE);
LOGD("seek time = %"G_GINT64_FORMAT", rate = %f", time, player->playback_rate);
event = gst_event_new_seek(player->playback_rate, GST_FORMAT_TIME, (GstSeekFlags)(GST_SEEK_FLAG_FLUSH), GST_SEEK_TYPE_SET, time, GST_SEEK_TYPE_NONE, -1);
if (event) {
- __gst_send_event_to_sink(player, event);
+ __mmplayer_gst_send_event_to_sink(player, event);
} else {
result = MM_ERROR_PLAYER_INTERNAL;
LOGE("gst_event_new_seek failed"); /* pipeline will got error and can not be recovered */
if (current_state == GST_STATE_PLAYING) {
event = gst_event_new_seek(player->playback_rate, GST_FORMAT_TIME, (GstSeekFlags)(GST_SEEK_FLAG_SEGMENT | GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_SKIP), GST_SEEK_TYPE_SET, time, GST_SEEK_TYPE_NONE, -1);
if (event) {
- __gst_send_event_to_sink(player, event);
+ __mmplayer_gst_send_event_to_sink(player, event);
} else {
result = MM_ERROR_PLAYER_INTERNAL;
goto EXIT;
return MM_ERROR_NONE;
}
-gboolean
-__is_ms_buff_src(mm_player_t* player)
-{
- MMPLAYER_RETURN_VAL_IF_FAIL(player, FALSE);
-
- return (player->profile.uri_type == MM_PLAYER_URI_TYPE_MS_BUFF) ? TRUE : FALSE;
-}
-
-gboolean
-__has_suffix(mm_player_t* player, const gchar* suffix)
-{
- MMPLAYER_RETURN_VAL_IF_FAIL(player, FALSE);
- MMPLAYER_RETURN_VAL_IF_FAIL(suffix, FALSE);
-
- gboolean ret = FALSE;
- gchar* t_url = g_ascii_strdown(player->profile.uri, -1);
- gchar* t_suffix = g_ascii_strdown(suffix, -1);
-
- if (g_str_has_suffix(player->profile.uri, suffix))
- ret = TRUE;
-
- MMPLAYER_FREEIF(t_url);
- MMPLAYER_FREEIF(t_suffix);
-
- return ret;
-}
-
int
_mmplayer_set_video_hub_download_mode(MMHandleType hplayer, bool mode)
{