#define DEFAULT_PCM_OUT_SAMPLERATE 44100
#define DEFAULT_PCM_OUT_CHANNEL 2
+#define MQ_UNLINKED_CACHE_TIME (500 * GST_MSECOND)
+
/*---------------------------------------------------------------------------
| LOCAL CONSTANT DEFINITIONS: |
---------------------------------------------------------------------------*/
}
static GstPadProbeReturn
-__mmplayer_gst_selector_blocked(GstPad *pad, GstPadProbeInfo *info, gpointer data)
+__mmplayer_gst_combiner_blocked(GstPad *pad, GstPadProbeInfo *info, gpointer data)
{
LOGD("pad(%s:%s) is blocked", GST_DEBUG_PAD_NAME(pad));
return GST_PAD_PROBE_OK;
}
static GstPadProbeReturn
-__mmplayer_gst_selector_event_probe(GstPad *pad, GstPadProbeInfo *info, gpointer data)
+__mmplayer_gst_combiner_event_probe(GstPad *pad, GstPadProbeInfo *info, gpointer data)
{
GstPadProbeReturn ret = GST_PAD_PROBE_OK;
GstEvent *event = GST_PAD_PROBE_INFO_DATA(info);
return ret;
MMPLAYER_GST_GET_CAPS_INFO_FROM_PAD(pad, caps, str, name, caps_ret);
- if (!caps_ret)
+ if (!caps_ret) {
+ GstStream *stream = NULL;
+
+ if (GST_EVENT_TYPE(event) != GST_EVENT_STREAM_START)
+ goto ERROR;
+
+ gst_event_parse_stream (event, &stream);
+ if (stream == NULL) {
+ LOGW ("Got a STREAM_START event without a GstStream");
+ goto ERROR;
+ }
+
+ name = gst_stream_type_get_name(gst_stream_get_stream_type(stream));
+ gst_object_unref (stream);
+ }
+
+ if (!name)
goto ERROR;
if (strstr(name, "audio")) {
case GST_EVENT_EOS:
{
/* in case of gapless, drop eos event not to send it to sink */
+ MMPLAYER_RECONFIGURE_LOCK(player);
if (player->gapless.reconfigure && !player->msg_posted) {
LOGD("[%d] %s:%s EOS received but will be drop", stream_type, GST_DEBUG_PAD_NAME(pad));
ret = GST_PAD_PROBE_DROP;
}
+ MMPLAYER_RECONFIGURE_UNLOCK(player);
break;
}
case GST_EVENT_STREAM_START:
}
static GstElement *
-__mmplayer_gst_make_concat(mmplayer_t *player, main_element_id_e elem_idx)
+__mmplayer_gst_make_concat(mmplayer_t *player, main_element_id_e elem_idx, mmplayer_track_type_e stream_type)
{
GstElement *pipeline = NULL;
- GstElement *concat = NULL;
+ g_autoptr(GstElement) concat = NULL;
+ g_autoptr(GstPad) srcpad = NULL;
MMPLAYER_FENTER();
MMPLAYER_RETURN_VAL_IF_FAIL(player && player->pipeline && player->pipeline->mainbin, NULL);
return NULL;
}
+ srcpad = gst_element_get_static_pad(concat, "src");
+
+ LOGD("blocking %s:%s", GST_DEBUG_PAD_NAME(srcpad));
+ player->track[stream_type].block_id = gst_pad_add_probe(srcpad, GST_PAD_PROBE_TYPE_BLOCK_DOWNSTREAM,
+ __mmplayer_gst_combiner_blocked, NULL, NULL);
+ player->track[stream_type].event_probe_id = gst_pad_add_probe(srcpad, GST_PAD_PROBE_TYPE_EVENT_BOTH|GST_PAD_PROBE_TYPE_EVENT_FLUSH,
+ __mmplayer_gst_combiner_event_probe, player, NULL);
+
+
gst_element_set_state(concat, GST_STATE_PAUSED);
pipeline = player->pipeline->mainbin[MMPLAYER_M_PIPE].gst;
if (!gst_bin_add(GST_BIN(pipeline), concat)) {
LOGE("failed to add concat to pipeline");
gst_element_set_state(concat, GST_STATE_NULL);
- gst_object_unref(GST_OBJECT(concat));
return NULL;
}
player->pipeline->mainbin[elem_idx].gst = concat;
MMPLAYER_FLEAVE();
- return concat;
+ return g_steal_pointer(&concat);
}
static GstElement *
LOGD("blocking %s:%s", GST_DEBUG_PAD_NAME(srcpad));
player->track[stream_type].block_id = gst_pad_add_probe(srcpad, GST_PAD_PROBE_TYPE_BLOCK_DOWNSTREAM,
- __mmplayer_gst_selector_blocked, NULL, NULL);
+ __mmplayer_gst_combiner_blocked, NULL, NULL);
player->track[stream_type].event_probe_id = gst_pad_add_probe(srcpad, GST_PAD_PROBE_TYPE_EVENT_BOTH|GST_PAD_PROBE_TYPE_EVENT_FLUSH,
- __mmplayer_gst_selector_event_probe, player, NULL);
+ __mmplayer_gst_combiner_event_probe, player, NULL);
gst_element_set_state(selector, GST_STATE_PAUSED);
if (MMPLAYER_USE_DECODEBIN(player))
combiner = __mmplayer_gst_make_selector(player, elem_idx, stream_type);
else
- combiner = __mmplayer_gst_make_concat(player, elem_idx);
+ combiner = __mmplayer_gst_make_concat(player, elem_idx, stream_type);
if (!combiner)
goto ERROR;
LOGI("subtitle duration is invalid, subtitle duration change "
"GST_CLOCK_TIME_NONE -> %" GST_TIME_FORMAT, GST_TIME_ARGS(duration));
}
- msg.subtitle.duration = GST_TIME_AS_MSECONDS(duration);
+ msg.subtitle.duration = (unsigned long)GST_TIME_AS_MSECONDS(duration);
- LOGD("update subtitle : [%ld msec] %s", msg.subtitle.duration, (char *)msg.data);
+ LOGD("update subtitle : [%lu msec] %s", msg.subtitle.duration, (char *)msg.data);
MMPLAYER_POST_MSG(player, MM_MESSAGE_UPDATE_SUBTITLE, &msg);
gst_buffer_unmap(buffer, &mapinfo);
gint64 adjusted_timestamp = 0;
GstBuffer *buffer = gst_pad_probe_info_get_buffer(info);
- MMPLAYER_RETURN_VAL_IF_FAIL(player, FALSE);
+ MMPLAYER_RETURN_VAL_IF_FAIL(player, GST_PAD_PROBE_DROP);
- if (player->set_mode.subtitle_off) {
- LOGD("subtitle is OFF.");
- return TRUE;
- }
+ if (player->set_mode.subtitle_off)
+ return GST_PAD_PROBE_OK;
- if (player->adjust_subtitle_pos == 0) {
- LOGD("nothing to do");
- return TRUE;
- }
+ if (player->adjust_subtitle_pos == 0)
+ return GST_PAD_PROBE_OK;
cur_timestamp = GST_BUFFER_TIMESTAMP(buffer);
adjusted_timestamp = (gint64)cur_timestamp + ((gint64)player->adjust_subtitle_pos * G_GINT64_CONSTANT(1000000));
if (adjusted_timestamp < 0) {
LOGD("adjusted_timestamp under zero");
MMPLAYER_FLEAVE();
- return FALSE;
+ return GST_PAD_PROBE_DROP;
}
GST_BUFFER_TIMESTAMP(buffer) = (GstClockTime) adjusted_timestamp;
MMPLAYER_RETURN_VAL_IF_FAIL(player, MM_ERROR_INVALID_HANDLE);
/* cleanup stuffs */
- MMPLAYER_FREEIF(player->type);
+ MMPLAYER_FREEIF(player->type_caps_str);
player->no_more_pad = FALSE;
player->num_dynamic_pad = 0;
}
MMPLAYER_FREEIF(player->album_art);
- if (player->type_caps) {
- gst_caps_unref(player->type_caps);
- player->type_caps = NULL;
- }
-
if (player->v_stream_caps) {
gst_caps_unref(player->v_stream_caps);
player->v_stream_caps = NULL;
MMPLAYER_RETURN_VAL_IF_FAIL(player, MM_ERROR_PLAYER_NOT_INITIALIZED);
MMPLAYER_RETURN_VAL_IF_FAIL(duration, MM_ERROR_COMMON_INVALID_ARGUMENT);
- if (g_strrstr(player->type, "video/mpegts"))
+ if (g_strrstr(player->type_caps_str, "video/mpegts"))
__mmplayer_update_duration_value(player);
*duration = player->duration;
__mmplayer_update_content_type_info(mmplayer_t *player)
{
MMPLAYER_FENTER();
- MMPLAYER_RETURN_IF_FAIL(player && player->type);
+ MMPLAYER_RETURN_IF_FAIL(player && player->type_caps_str);
- if (__mmplayer_is_midi_type(player->type)) {
+ if (__mmplayer_is_midi_type(player->type_caps_str)) {
player->bypass_audio_effect = TRUE;
return;
}
return;
}
- if (g_strrstr(player->type, "application/x-hls")) {
+ if (g_strrstr(player->type_caps_str, "application/x-hls")) {
/* If it can't know exact type when it parses uri because of redirection case,
* it will be fixed by typefinder or when doing autoplugging.
*/
player->profile.uri_type = MM_PLAYER_URI_TYPE_HLS;
player->streamer->is_adaptive_streaming = TRUE;
- } else if (g_strrstr(player->type, "application/dash+xml")) {
+ } else if (g_strrstr(player->type_caps_str, "application/dash+xml")) {
player->profile.uri_type = MM_PLAYER_URI_TYPE_DASH;
player->streamer->is_adaptive_streaming = TRUE;
}
/* in case of TS, fixed buffering mode should be used because player can not get exact duration time */
- if ((player->streamer->is_adaptive_streaming) || (g_strrstr(player->type, "video/mpegts"))) {
+ if ((player->streamer->is_adaptive_streaming) || (g_strrstr(player->type_caps_str, "video/mpegts"))) {
player->streamer->buffering_req.mode = MM_PLAYER_BUFFERING_MODE_FIXED;
if (player->streamer->buffering_req.rebuffer_time <= MIN_BUFFERING_TIME) { /* if user did not set the rebuffer value */
MMPLAYER_RETURN_IF_FAIL(player && tf && caps);
- /* store type string */
- if (player->type_caps) {
- gst_caps_unref(player->type_caps);
- player->type_caps = NULL;
- }
-
- player->type_caps = gst_caps_copy(caps);
- MMPLAYER_LOG_GST_CAPS_TYPE(player->type_caps);
-
- MMPLAYER_FREEIF(player->type);
- player->type = gst_caps_to_string(caps);
- if (player->type)
- LOGD("[handle: %p] media type %s found, probability %d%% / %d",
- player, player->type, probability, gst_caps_get_size(caps));
+ MMPLAYER_FREEIF(player->type_caps_str);
+ player->type_caps_str = gst_caps_to_string(caps);
+ LOGD("[handle: %p] media type %s found, probability %d%% / %d",
+ player, player->type_caps_str, probability, gst_caps_get_size(caps));
if ((!MMPLAYER_IS_RTSP_STREAMING(player)) &&
- (g_strrstr(player->type, "audio/x-raw-int"))) {
+ (g_strrstr(player->type_caps_str, "audio/x-raw-int"))) {
LOGE("not support media format");
if (player->msg_posted == FALSE) {
if (!_mmplayer_gst_create_decoder(player, pad, caps)) {
gboolean async = FALSE;
- LOGE("failed to autoplug %s", player->type);
+ LOGE("failed to autoplug %s", player->type_caps_str);
mm_attrs_get_int_by_name(player->attrs, "profile_prepare_async", &async);
/* NOTE : in case of ts streaming, player could not get the correct duration info *
* skip the pull mode(file or ring buffering) setting. */
if (dur_bytes > 0) {
- if (!g_strrstr(player->type, "video/mpegts")) {
+ if (!g_strrstr(player->type_caps_str, "video/mpegts")) {
type = MUXED_BUFFER_TYPE_MEM_RING_BUFFER;
player->streamer->ring_buffer_size = player->ini.http_ring_buffer_size;
}
return FALSE;
}
+ if (!MMPLAYER_USE_DECODEBIN(player)) {
+ if (player->pipeline->mainbin[MMPLAYER_M_AUTOPLUG].gst)
+ g_object_set(G_OBJECT(player->pipeline->mainbin[MMPLAYER_M_AUTOPLUG].gst),
+ "uri", profile.uri, NULL);
+ }
+
SECURE_LOGD("next playback uri: %s", uri);
return TRUE;
}
}
num_of_uri = g_list_length(player->uri_info.uri_list);
+ if (!MMPLAYER_USE_DECODEBIN(player))
+ player->gapless.running = TRUE;
LOGD("repeat count = %d, num_of_list = %d", count, num_of_uri);
LOGD("there is no next uri and no repeat");
goto ERROR;
}
+
+ if (!MMPLAYER_USE_DECODEBIN(player)) {
+ if (player->pipeline->mainbin[MMPLAYER_M_AUTOPLUG].gst)
+ g_object_set(G_OBJECT(player->pipeline->mainbin[MMPLAYER_M_AUTOPLUG].gst),
+ "uri", player->profile.uri, NULL);
+ }
+
LOGD("looping cnt %d", count);
} else {
/* gapless playback path */
this function need to be updated according to the supported media format
@see player->ini.audio_offload_media_format */
- if (__mmplayer_is_only_mp3_type(player->type)) {
+ if (__mmplayer_is_only_mp3_type(player->type_caps_str)) {
LOGD("offload supportable media format type");
return TRUE;
}
MMPLAYER_FENTER();
MMPLAYER_RETURN_VAL_IF_FAIL(player && player->attrs, FALSE);
- LOGD("current stream : %s, sink: %s", player->type, player->ini.audio_offload_sink_element);
+ LOGD("current stream : %s, sink: %s", player->type_caps_str, player->ini.audio_offload_sink_element);
if (!__mmplayer_is_offload_supported_type(player))
goto DONE;
LOGD("[handle: %p] found new element [%s] to link", player, factory_name);
/* store type string */
- if (player->type == NULL) {
- player->type = gst_caps_to_string(caps);
+ if (player->type_caps_str == NULL) {
+ player->type_caps_str = gst_caps_to_string(caps);
__mmplayer_update_content_type_info(player);
}
gint stype = 0;
gint width = 0;
GstStructure *str = NULL;
- mm_attrs_get_int_by_name(player->attrs, "display_surface_type", &stype);
- /* don't make video because of not required */
- if ((stype == MM_DISPLAY_SURFACE_NULL) &&
- (!player->set_mode.video_export)) {
- LOGD("no need video decoding, expose pad");
- result = GST_AUTOPLUG_SELECT_EXPOSE;
- goto DONE;
+ /* parsebin in adaptivedemux get error if there is no parser */
+ if ((!g_strrstr(GST_ELEMENT_NAME(bin), "parsebin")) ||
+ ((!MMPLAYER_IS_HTTP_LIVE_STREAMING(player)) && (!MMPLAYER_IS_DASH_STREAMING(player)))) {
+ mm_attrs_get_int_by_name(player->attrs, "display_surface_type", &stype);
+
+ /* don't make video because of not required */
+ if ((stype == MM_DISPLAY_SURFACE_NULL) &&
+ (!player->set_mode.video_export)) {
+ LOGD("no need video decoding, expose pad");
+ result = GST_AUTOPLUG_SELECT_EXPOSE;
+ goto DONE;
+ }
}
/* get w/h for omx state-tune */
if (!__mmplayer_verify_gapless_play_path(player)) {
LOGD("decoding is finished.");
- MMPLAYER_CMD_UNLOCK(player);
- return;
+ if (MMPLAYER_USE_DECODEBIN(player)) {
+ MMPLAYER_CMD_UNLOCK(player);
+ return;
+ }
}
- _mmplayer_set_reconfigure_state(player, TRUE);
- MMPLAYER_CMD_UNLOCK(player);
-
- MMPLAYER_POST_MSG(player, MM_MESSAGE_FLUSH_BUFFER, NULL);
- __mmplayer_deactivate_old_path(player);
+ if (MMPLAYER_USE_DECODEBIN(player)) {
+ _mmplayer_set_reconfigure_state(player, TRUE);
+ MMPLAYER_CMD_UNLOCK(player);
+ MMPLAYER_POST_MSG(player, MM_MESSAGE_FLUSH_BUFFER, NULL);
+ __mmplayer_deactivate_old_path(player);
+ } else {
+ player->gapless.update_segment[MM_PLAYER_TRACK_TYPE_AUDIO] = FALSE;
+ player->gapless.update_segment[MM_PLAYER_TRACK_TYPE_VIDEO] = FALSE;
+ MMPLAYER_CMD_UNLOCK(player);
+ }
MMPLAYER_FLEAVE();
}
player->pipeline->mainbin[MMPLAYER_M_ADAPTIVE_DEMUX].id = MMPLAYER_M_ADAPTIVE_DEMUX;
player->pipeline->mainbin[MMPLAYER_M_ADAPTIVE_DEMUX].gst = element;
- MMPLAYER_FREEIF(player->type);
+ MMPLAYER_FREEIF(player->type_caps_str);
if (g_strrstr(factory_name, "hlsdemux")) {
- player->type = g_strdup("application/x-hls");
+ player->type_caps_str = g_strdup("application/x-hls");
player->profile.uri_type = MM_PLAYER_URI_TYPE_HLS;
} else if (g_strrstr(factory_name, "dashdemux")) {
- player->type = g_strdup("application/dash+xml");
+ player->type_caps_str = g_strdup("application/dash+xml");
player->profile.uri_type = MM_PLAYER_URI_TYPE_DASH;
} else {
LOGE("not supported type");
player->streamer->buffering_req.prebuffer_time);
g_object_set(player->pipeline->mainbin[MMPLAYER_M_ADAPTIVE_DEMUX].gst,
- "max-bandwidth", player->adaptive_info.limit.bandwidth,
+ "max-bitrate", player->adaptive_info.limit.bandwidth,
"max-video-width", player->adaptive_info.limit.width,
"max-video-height", player->adaptive_info.limit.height,
"low-watermark-time", (guint64)(player->streamer->buffering_req.prebuffer_time * GST_MSECOND),
if (g_strrstr(factory_name, "mpegaudioparse")) {
if ((MMPLAYER_IS_HTTP_STREAMING(player)) &&
- (__mmplayer_is_only_mp3_type(player->type))) {
+ (__mmplayer_is_only_mp3_type(player->type_caps_str))) {
LOGD("[mpegaudioparse] set streaming pull mode.");
g_object_set(G_OBJECT(element), "http-pull-mp3dec", TRUE, NULL);
}
} else if ((player->pipeline->mainbin[MMPLAYER_M_DEMUX].gst) &&
(g_strrstr(GST_ELEMENT_NAME(element), "multiqueue"))) {
+
LOGD("plugged element is multiqueue. take it %s", GST_ELEMENT_NAME(element));
+ /* set mq unlinked cache size to avoid not-linked error */
+ gboolean sync_by_running_time = FALSE;
+ g_object_get(G_OBJECT(element), "sync-by-running-time", &sync_by_running_time, NULL);
+ if (sync_by_running_time)
+ g_object_set(G_OBJECT(element), "unlinked-cache-time", MQ_UNLINKED_CACHE_TIME, NULL);
+
player->pipeline->mainbin[MMPLAYER_M_DEMUXED_S_BUFFER].id = MMPLAYER_M_DEMUXED_S_BUFFER;
player->pipeline->mainbin[MMPLAYER_M_DEMUXED_S_BUFFER].gst = element;
/* get subtitle attribute */
attrs = MMPLAYER_GET_ATTRS(player);
- if (!attrs)
- return FALSE;
+ MMPLAYER_RETURN_VAL_IF_FAIL(attrs, FALSE);
mm_attrs_get_string_by_name(attrs, "subtitle_uri", &subtitle_uri);
- if (!subtitle_uri || !strlen(subtitle_uri))
+ if (!subtitle_uri || (strlen(subtitle_uri) == 0)) {
+ MMPLAYER_FLEAVE();
return FALSE;
+ }
SECURE_LOGD("subtitle uri is %s[%zu]", subtitle_uri, strlen(subtitle_uri));
- player->is_external_subtitle_present = TRUE;
MMPLAYER_FLEAVE();
LOGD("seek time = %"G_GINT64_FORMAT", rate = %f", time, player->playback_rate);
event = gst_event_new_seek(player->playback_rate, GST_FORMAT_TIME, (GstSeekFlags)(GST_SEEK_FLAG_FLUSH), GST_SEEK_TYPE_SET, time, GST_SEEK_TYPE_NONE, -1);
- if (event) {
- _mmplayer_gst_send_event_to_sink(player, event);
- } else {
+ if (!event) {
result = MM_ERROR_PLAYER_INTERNAL;
LOGE("gst_event_new_seek failed"); /* pipeline will got error and can not be recovered */
goto ERROR;
}
+ if (player->seek_state == MMPLAYER_SEEK_IN_PROGRESS) {
+ GstElement *text_sink = GST_ELEMENT_CAST(player->pipeline->textbin[MMPLAYER_T_FAKE_SINK].gst);
+ if (GST_IS_ELEMENT(text_sink)) {
+ if (gst_element_send_event(text_sink, event))
+ LOGD("sending event[%s] to subtitle sink element [%s] success!",
+ GST_EVENT_TYPE_NAME(event), GST_ELEMENT_NAME(text_sink));
+ else
+ LOGE("sending event[%s] to subtitle sink element [%s] failed!",
+ GST_EVENT_TYPE_NAME(event), GST_ELEMENT_NAME(text_sink));
+ }
+ } else {
+ _mmplayer_gst_send_event_to_sink(player, event);
+ }
+
/* sync state with current pipeline */
gst_element_sync_state_with_parent(textbin[MMPLAYER_T_BIN].gst);
gst_element_sync_state_with_parent(mainbin[MMPLAYER_M_SUBPARSE].gst);
}
MMPLAYER_SUBTITLE_INFO_UNLOCK(player);
}
+ player->is_external_subtitle_present = TRUE;
MMPLAYER_FLEAVE();
return result;
player = (mmplayer_t *)hplayer;
MMPLAYER_RETURN_VAL_IF_FAIL(player, MM_ERROR_PLAYER_NOT_INITIALIZED);
+ MMPLAYER_RETURN_VAL_IF_FAIL(type < MM_PLAYER_TRACK_TYPE_MAX, MM_ERROR_PLAYER_NOT_INITIALIZED);
if (!player->pipeline) {
LOGE("Track %d pre setting -> %d", type, index);
if (player->pipeline && player->pipeline->mainbin && player->pipeline->mainbin[MMPLAYER_M_ADAPTIVE_DEMUX].gst) {
LOGD("update max limit of %s", GST_ELEMENT_NAME(player->pipeline->mainbin[MMPLAYER_M_ADAPTIVE_DEMUX].gst));
g_object_set(player->pipeline->mainbin[MMPLAYER_M_ADAPTIVE_DEMUX].gst,
- "max-bandwidth", bandwidth, "max-video-width", width, "max-video-height", height, NULL);
+ "max-bitrate", bandwidth, "max-video-width", width, "max-video-height", height, NULL);
/* FIXME: seek to current position for applying new variant limitation */
}
* So, if it's not set yet, fill it with found data.
*/
if (!audio_codec) {
- if (g_strrstr(player->type, "audio/midi"))
+ if (g_strrstr(player->type_caps_str, "audio/midi"))
audio_codec = "MIDI";
- else if (g_strrstr(player->type, "audio/x-amr"))
+ else if (g_strrstr(player->type_caps_str, "audio/x-amr"))
audio_codec = "AMR";
- else if (g_strrstr(player->type, "audio/mpeg")
- && !g_strrstr(player->type, "mpegversion=(int)1"))
+ else if (g_strrstr(player->type_caps_str, "audio/mpeg")
+ && !g_strrstr(player->type_caps_str, "mpegversion=(int)1"))
audio_codec = "AAC";
else
audio_codec = "unknown";