if ( ret == GST_STATE_CHANGE_FAILURE )
{
debug_error("failed to set [%s] state to [%d]\n", GST_ELEMENT_NAME(element), state);
+
+ /* dump state of all element */
+ __mmplayer_dump_pipeline_state( player );
+
return MM_ERROR_PLAYER_INTERNAL;
}
gst_element_state_get_name(state), timeout );
debug_error(" [%s] state : %s pending : %s \n",
- GST_ELEMENT_NAME(element),
- gst_element_state_get_name(element_state),
+ GST_ELEMENT_NAME(element),
+ gst_element_state_get_name(element_state),
gst_element_state_get_name(element_pending_state) );
+ /* dump state of all element */
+ __mmplayer_dump_pipeline_state( player );
+
return MM_ERROR_PLAYER_INTERNAL;
}
}
else
{
- debug_warning("failed to get negitiated caps from videosink");
+ debug_log("no negitiated caps from videosink");
}
gst_object_unref( pad );
pad = NULL;
}
else
{
- debug_warning("failed to get pad from videosink");
+ debug_log("no videosink sink pad");
}
}
debug_fenter();
return_val_if_fail ( player &&
- player->pipeline &&
- player->pipeline->mainbin &&
- player->pipeline->mainbin[MMPLAYER_M_SRC].gst,
- FALSE );
+ player->pipeline &&
+ player->pipeline->mainbin &&
+ player->pipeline->mainbin[MMPLAYER_M_SRC].gst,
+ FALSE );
/* streaming service type if streaming */
if ( ! MMPLAYER_IS_STREAMING(player) );
}
mm_attrs_set_int_by_name ( attrs, "streaming_type", streaming_type );
- /* validate all */
- if ( mmf_attrs_commit ( attrs ) )
- {
- debug_warning("updating streaming service type failed. pause/resume/seek may not working properly if stream is live stream\n");
- return FALSE;
- }
debug_fleave();
case MM_PLAYER_STATE_PLAYING:
{
/* non-managed prepare case, should be updated */
- if ( ! player->need_update_content_dur)
+ if ( ! player->need_update_content_dur )
{
player->need_update_content_dur = TRUE;
_mmplayer_update_content_attrs ( player );
if ( !player->msg_cb )
{
- debug_warning("no msg callback. can't post\n");
+ debug_warning("no msg callback. can't post msg now\n");
return FALSE;
}
MMPLAYER_POST_MSG( player, MM_MESSAGE_RESUMED_BY_REW, NULL );
}
- ret_value = __gst_seek( player, player->pipeline->mainbin[MMPLAYER_M_PIPE].gst, 1.0,
+ ret_value = __gst_seek( player, player->pipeline->mainbin[MMPLAYER_M_PIPE].gst, player->playback_rate,
GST_FORMAT_TIME, GST_SEEK_FLAG_FLUSH, GST_SEEK_TYPE_SET,
0, GST_SEEK_TYPE_NONE, GST_CLOCK_TIME_NONE);
/* FIXIT : track number shouldn't be hardcoded */
mm_attrs_set_int_by_name(attrs, "content_audio_track_num", 1);
-
player->audiosink_linked = 1;
- debug_msg("player->audsink_linked set to 1\n");
sinkpad = gst_element_get_static_pad( GST_ELEMENT(sinkbin), "sink" );
if ( !sinkpad )
{
if (player->pipeline->videobin == NULL)
{
- /* NOTE : not make videobin because application dose not want to play it even though file has video stream.
- */
-
+ /* NOTE : not make videobin because application dose not want to play it even though file has video stream. */
/* get video surface type */
int surface_type = 0;
mm_attrs_get_int_by_name (player->attrs, "display_surface_type", &surface_type);
/* FIXIT : track number shouldn't be hardcoded */
mm_attrs_set_int_by_name(attrs, "content_video_track_num", 1);
-
player->videosink_linked = 1;
- debug_msg("player->videosink_linked set to 1\n");
sinkpad = gst_element_get_static_pad( GST_ELEMENT(sinkbin), "sink" );
if ( !sinkpad )
sinkpad = NULL;
}
- /* update track number attributes */
- if ( mmf_attrs_commit ( attrs ) )
- debug_error("failed to commit attrs\n");
-
debug_log("linking sink bin success\n");
/* FIXIT : we cannot hold callback for 'no-more-pad' signal because signal was emitted in
{
int pro_value = 0; // in the case of expection, default will be returned.
int dest_angle = rotation_angle;
- char *element_name = NULL;
int rotation_using_type = -1;
#define ROTATION_USING_X 0
- #define ROTATION_USING_FLIP 1
+ #define ROTATION_USING_FIMC 1
+ #define ROTATION_USING_FLIP 2
return_val_if_fail(player, FALSE);
return_val_if_fail(value, FALSE);
if (player->use_video_stream)
{
- rotation_using_type = ROTATION_USING_FLIP;
+ if (player->is_nv12_tiled)
+ {
+ rotation_using_type = ROTATION_USING_FIMC;
+ }
+ else
+ {
+ rotation_using_type = ROTATION_USING_FLIP;
+ }
}
else
{
rotation_using_type = ROTATION_USING_X;
break;
case MM_DISPLAY_SURFACE_EVAS:
+ if (player->is_nv12_tiled && !strcmp(PLAYER_INI()->videosink_element_evas,"evasimagesink"))
+ {
+ rotation_using_type = ROTATION_USING_FIMC;
+ }
+ else if (!player->is_nv12_tiled)
+ {
+ rotation_using_type = ROTATION_USING_FLIP;
+ }
+ else
+ {
+ debug_error("it should not be here..");
+ return FALSE;
+ }
+ break;
default:
rotation_using_type = ROTATION_USING_FLIP;
break;
}
}
break;
+ case ROTATION_USING_FIMC: // fimcconvert
+ {
+ switch (dest_angle)
+ {
+ case 0:
+ break;
+ case 90:
+ pro_value = 90; // clockwise 90
+ break;
+ case 180:
+ pro_value = 180;
+ break;
+ case 270:
+ pro_value = 270; // counter-clockwise 90
+ break;
+ }
+ }
+ break;
case ROTATION_USING_FLIP: // videoflip
{
switch (dest_angle)
/* check video stream callback is used */
if( player->use_video_stream )
{
- debug_log("using video stream callback with memsink. player handle : [%p]", player);
+ if (player->is_nv12_tiled)
+ {
+ gchar *ename = NULL;
+ int width = 0;
+ int height = 0;
+
+ mm_attrs_get_int_by_name(attrs, "display_width", &width);
+ mm_attrs_get_int_by_name(attrs, "display_height", &height);
+
+ /* resize video frame with requested values for fimcconvert */
+ ename = GST_PLUGIN_FEATURE_NAME(gst_element_get_factory(player->pipeline->videobin[MMPLAYER_V_CONV].gst));
- /* apply roate */
- g_object_set(player->pipeline->videobin[MMPLAYER_V_FLIP].gst, "method", rotation_value, NULL);
+ if (g_strrstr(ename, "fimcconvert"))
+ {
+ if (width)
+ g_object_set(player->pipeline->videobin[MMPLAYER_V_CONV].gst, "dst-width", width, NULL);
+
+ if (height)
+ g_object_set(player->pipeline->videobin[MMPLAYER_V_CONV].gst, "dst-height", height, NULL);
+
+ g_object_set(player->pipeline->videobin[MMPLAYER_V_CONV].gst, "rotate", rotation_value, NULL);
+ debug_log("updating fimcconvert - r[%d], w[%d], h[%d]", rotation_value, width, height);
+ }
+ else
+ {
+ debug_error("no available video converter");
+ return MM_ERROR_PLAYER_INTERNAL;
+ }
+ }
+ else
+ {
+ debug_log("using video stream callback with memsink. player handle : [%p]", player);
+ g_object_set(player->pipeline->videobin[MMPLAYER_V_FLIP].gst, "method", rotation_value, NULL);
+ }
+
return MM_ERROR_NONE;
}
mm_attrs_get_data_by_name(attrs, "display_overlay", &xid);
if ( xid )
{
+#define GST_VAAPI_DISPLAY_TYPE_X11 1
+ if (!strncmp(PLAYER_INI()->videosink_element_x,"vaapisink", strlen("vaapisink"))){
+ debug_log("set video param: vaapisink display %d", GST_VAAPI_DISPLAY_TYPE_X11);
+ g_object_set(player->pipeline->videobin[MMPLAYER_V_SINK].gst,
+ "display", GST_VAAPI_DISPLAY_TYPE_X11,
+ NULL);
+ }
+
debug_log("set video param : xid %d", *(int*)xid);
gst_x_overlay_set_xwindow_id( GST_X_OVERLAY( player->pipeline->videobin[MMPLAYER_V_SINK].gst ), *(int*)xid );
}
debug_log("set video param : visible %d", visible);
debug_log("set video param : force aspect ratio %d", force_aspect_ratio);
}
+
+ /* if vaapisink */
+ if (!strncmp(PLAYER_INI()->videosink_element_x, "vaapisink", strlen("vaapisink")))
+ {
+ g_object_set(player->pipeline->videobin[MMPLAYER_V_SINK].gst,
+ "rotation", rotation_value,
+ NULL);
+ debug_log("set video param: vaapisink rotation %d", rotation_value);
+ }
}
break;
case MM_DISPLAY_SURFACE_EVAS:
return MM_ERROR_PLAYER_INTERNAL;
}
+ /* if evasimagesink */
+ if (!strcmp(PLAYER_INI()->videosink_element_evas,"evasimagesink") && player->is_nv12_tiled)
+ {
+ int width = 0;
+ int height = 0;
+ int no_scaling = !scaling;
+
+ mm_attrs_get_int_by_name(attrs, "display_width", &width);
+ mm_attrs_get_int_by_name(attrs, "display_height", &height);
+
+ /* NOTE: fimcconvert does not manage index of src buffer from upstream src-plugin, decoder gives frame information in output buffer with no ordering */
+ g_object_set(player->pipeline->videobin[MMPLAYER_V_CONV].gst, "dst-buffer-num", 5, NULL);
+
+ if (no_scaling)
+ {
+ /* no-scaling order to fimcconvert, original width, height size of media src will be passed to sink plugin */
+ g_object_set(player->pipeline->videobin[MMPLAYER_V_CONV].gst,
+ "dst-width", 0, /* setting 0, output video width will be media src's width */
+ "dst-height", 0, /* setting 0, output video height will be media src's height */
+ NULL);
+ }
+ else
+ {
+ /* scaling order to fimcconvert */
+ if (width)
+ {
+ g_object_set(player->pipeline->videobin[MMPLAYER_V_CONV].gst, "dst-width", width, NULL);
+ }
+ if (height)
+ {
+ g_object_set(player->pipeline->videobin[MMPLAYER_V_CONV].gst, "dst-height", height, NULL);
+ }
+ debug_log("set video param : video frame scaling down to width(%d) height(%d)", width, height);
+ }
+ debug_log("set video param : display_evas_do_scaling %d", scaling);
+ }
+
/* if evaspixmapsink */
if (!strcmp(PLAYER_INI()->videosink_element_evas,"evaspixmapsink"))
{
"display-geometry-method", display_method,
"draw-borders", FALSE,
"visible", visible,
+ "rotate", rotation_value,
"pixmap-id-callback", pixmap_id_cb,
"pixmap-id-callback-userdata", pixmap_id_cb_user_data,
NULL );
if ( ! player->is_sound_extraction )
{
GstCaps* caps = NULL;
- guint channels = 0;
+ gint channels = 0;
/* for logical volume control */
MMPLAYER_CREATE_ELEMENT(audiobin, MMPLAYER_A_VOL, "volume", "volume", TRUE);
/*capsfilter */
MMPLAYER_CREATE_ELEMENT(audiobin, MMPLAYER_A_CAPS_DEFAULT, "capsfilter", "audiocapsfilter", TRUE);
- caps = gst_caps_from_string( "audio/x-raw-int, "
- "endianness = (int) LITTLE_ENDIAN, "
- "signed = (boolean) true, "
- "width = (int) 16, "
- "depth = (int) 16" );
+ caps = gst_caps_from_string( "audio/x-raw-int, "
+ "endianness = (int) LITTLE_ENDIAN, "
+ "signed = (boolean) true, "
+ "width = (int) 16, "
+ "depth = (int) 16" );
g_object_set (GST_ELEMENT(audiobin[MMPLAYER_A_CAPS_DEFAULT].gst), "caps", caps, NULL );
gst_caps_unref( caps );
GstPad *srcpad = NULL;
GstCaps *caps = NULL;
- if (srcpad = gst_element_get_static_pad(player->pipeline->mainbin[MMPLAYER_M_DEMUX].gst, "src"))
+ if ((srcpad = gst_element_get_static_pad(player->pipeline->mainbin[MMPLAYER_M_DEMUX].gst, "src")))
{
- if (caps = gst_pad_get_caps(srcpad))
+ if ((caps = gst_pad_get_caps(srcpad)))
{
MMPLAYER_LOG_GST_CAPS_TYPE(caps);
GstStructure *str = gst_caps_get_structure(caps, 0);
/* get conf. values */
mm_attrs_multiple_get(player->attrs,
- NULL,
- "pcm_extraction_samplerate", &dst_samplerate,
- "pcm_extraction_channels", &dst_channels,
- "pcm_extraction_depth", &dst_depth,
- NULL);
+ NULL,
+ "pcm_extraction_samplerate", &dst_samplerate,
+ "pcm_extraction_channels", &dst_channels,
+ "pcm_extraction_depth", &dst_depth,
+ NULL);
/* capsfilter */
MMPLAYER_CREATE_ELEMENT(audiobin, MMPLAYER_A_CAPS_DEFAULT, "capsfilter", "audiocapsfilter", TRUE);
caps = gst_caps_new_simple ("audio/x-raw-int",
- "rate", G_TYPE_INT, dst_samplerate,
- "channels", G_TYPE_INT, dst_channels,
- "depth", G_TYPE_INT, dst_depth,
+ "rate", G_TYPE_INT, dst_samplerate,
+ "channels", G_TYPE_INT, dst_channels,
+ "depth", G_TYPE_INT, dst_depth,
NULL);
caps_type = gst_caps_to_string(caps);
debug_log("resampler new caps : %s\n", caps_type);
- g_object_set (GST_ELEMENT(audiobin[MMPLAYER_A_CAPS_DEFAULT].gst), "caps", caps, NULL );
+ g_object_set (GST_ELEMENT(audiobin[MMPLAYER_A_CAPS_DEFAULT].gst), "caps", caps, NULL );
- /* clean */
- gst_caps_unref( caps );
- MMPLAYER_FREEIF( caps_type );
+ /* clean */
+ gst_caps_unref( caps );
+ MMPLAYER_FREEIF( caps_type );
/* fake sink */
MMPLAYER_CREATE_ELEMENT(audiobin, MMPLAYER_A_SINK, "fakesink", "fakesink", TRUE);
g_list_free(element_bucket);
mm_attrs_set_int_by_name(attrs, "content_audio_found", TRUE);
- if ( mmf_attrs_commit ( attrs ) ) /* return -1 if error */
- debug_error("failed to commit attribute ""content_audio_found"".\n");
debug_fleave();
/**
* VIDEO PIPELINE
* - x surface (arm/x86) : videoflip ! xvimagesink
- * - evas surface (arm) : ffmpegcolorspace ! videoflip ! evasimagesink
+ * - evas surface (arm) : fimcconvert ! evasimagesink
* - evas surface (x86) : videoconvertor ! videoflip ! evasimagesink
*/
static int
if( player->use_video_stream ) // video stream callback, so send raw video data to application
{
GstStructure *str = NULL;
- guint32 fourcc = 0;
gint ret = 0;
- gint width = 0; //width of video
- gint height = 0; //height of video
- GstCaps* video_caps = NULL;
debug_log("using memsink\n");
/* first, create colorspace convert */
- if (strlen(PLAYER_INI()->name_of_video_converter) > 0)
+ if (player->is_nv12_tiled)
{
+ vconv_factory = "fimcconvert";
+ }
+ else // get video converter from player ini file
+ {
+ if (strlen(PLAYER_INI()->name_of_video_converter) > 0)
+ {
vconv_factory = PLAYER_INI()->name_of_video_converter;
+ }
}
if (vconv_factory)
MMPLAYER_CREATE_ELEMENT(videobin, MMPLAYER_V_CONV, vconv_factory, "video converter", TRUE);
}
- /* rotator, scaler and capsfilter */
- MMPLAYER_CREATE_ELEMENT(videobin, MMPLAYER_V_FLIP, "videoflip", "video rotator", TRUE);
- MMPLAYER_CREATE_ELEMENT(videobin, MMPLAYER_V_SCALE, "videoscale", "video scaler", TRUE);
- MMPLAYER_CREATE_ELEMENT(videobin, MMPLAYER_V_CAPS, "capsfilter", "videocapsfilter", TRUE);
-
- /* get video stream caps parsed by demuxer */
- str = gst_caps_get_structure (player->v_stream_caps, 0);
- if ( !str )
+ if ( !player->is_nv12_tiled)
{
- debug_error("cannot get structure");
- goto ERROR;
- }
+ gint width = 0; //width of video
+ gint height = 0; //height of video
+ GstCaps* video_caps = NULL;
- mm_attrs_get_int_by_name(attrs, "display_width", &width);
- mm_attrs_get_int_by_name(attrs, "display_height", &height);
- if (!width || !height)
- {
- /* we set width/height of original media's size to capsfilter for scaling video */
- ret = gst_structure_get_int (str, "width", &width);
- if ( !ret )
+ /* rotator, scaler and capsfilter */
+ if (strncmp(PLAYER_INI()->videosink_element_x, "vaapisink", strlen("vaapisink"))){
+ MMPLAYER_CREATE_ELEMENT(videobin, MMPLAYER_V_FLIP, "videoflip", "video rotator", TRUE);
+ MMPLAYER_CREATE_ELEMENT(videobin, MMPLAYER_V_SCALE, "videoscale", "video scaler", TRUE);
+ MMPLAYER_CREATE_ELEMENT(videobin, MMPLAYER_V_CAPS, "capsfilter", "videocapsfilter", TRUE);
+ }
+
+ /* get video stream caps parsed by demuxer */
+ str = gst_caps_get_structure (player->v_stream_caps, 0);
+ if ( !str )
{
- debug_error("cannot get width");
+ debug_error("cannot get structure");
goto ERROR;
}
- ret = gst_structure_get_int(str, "height", &height);
- if ( !ret )
- {
- debug_error("cannot get height");
- goto ERROR;
+ mm_attrs_get_int_by_name(attrs, "display_width", &width);
+ mm_attrs_get_int_by_name(attrs, "display_height", &height);
+ if (!width || !height) {
+ /* we set width/height of original media's size to capsfilter for scaling video */
+ ret = gst_structure_get_int (str, "width", &width);
+ if ( !ret )
+ {
+ debug_error("cannot get width");
+ goto ERROR;
+ }
+
+ ret = gst_structure_get_int(str, "height", &height);
+ if ( !ret )
+ {
+ debug_error("cannot get height");
+ goto ERROR;
+ }
}
- }
- video_caps = gst_caps_new_simple( "video/x-raw-rgb",
- "width", G_TYPE_INT, width,
- "height", G_TYPE_INT, height,
- NULL);
+ video_caps = gst_caps_new_simple( "video/x-raw-rgb",
+ "width", G_TYPE_INT, width,
+ "height", G_TYPE_INT, height,
+ NULL);
- g_object_set (GST_ELEMENT(videobin[MMPLAYER_V_CAPS].gst), "caps", video_caps, NULL );
+ g_object_set (GST_ELEMENT(videobin[MMPLAYER_V_CAPS].gst), "caps", video_caps, NULL );
- gst_caps_unref( video_caps );
+ gst_caps_unref( video_caps );
+ }
/* finally, create video sink. output will be BGRA8888. */
MMPLAYER_CREATE_ELEMENT(videobin, MMPLAYER_V_SINK, "avsysmemsink", "videosink", TRUE);
if (strlen(PLAYER_INI()->name_of_video_converter) > 0)
{
vconv_factory = PLAYER_INI()->name_of_video_converter;
+
+ if ( (player->is_nv12_tiled && (surface_type == MM_DISPLAY_SURFACE_EVAS) &&
+ !strcmp(PLAYER_INI()->videosink_element_evas, "evasimagesink") ) )
+ {
+ vconv_factory = "fimcconvert";
+ }
+ else if (player->is_nv12_tiled)
+ {
+ vconv_factory = NULL;
+ }
+
if (vconv_factory)
{
MMPLAYER_CREATE_ELEMENT(videobin, MMPLAYER_V_CONV, vconv_factory, "video converter", TRUE);
}
}
+ if (strncmp(PLAYER_INI()->videosink_element_x,"vaapisink", strlen("vaapisink"))){
/* set video rotator */
- MMPLAYER_CREATE_ELEMENT(videobin, MMPLAYER_V_FLIP, "videoflip", "video rotator", TRUE);
+ if ( !player->is_nv12_tiled )
+ MMPLAYER_CREATE_ELEMENT(videobin, MMPLAYER_V_FLIP, "videoflip", "video rotator", TRUE);
/* videoscaler */
#if !defined(__arm__)
MMPLAYER_CREATE_ELEMENT(videobin, MMPLAYER_V_SCALE, "videoscale", "videoscaler", TRUE);
#endif
+ }
/* set video sink */
switch (surface_type)
if (pixmap_id_cb) /* this is used for the videoTextue(canvasTexture) overlay */
{
videosink_element = PLAYER_INI()->videosink_element_x;
- debug_warning("video texture usage");
}
else
{
g_list_free(element_bucket);
mm_attrs_set_int_by_name(attrs, "content_video_found", TRUE);
- if ( mmf_attrs_commit ( attrs ) ) /* return -1 if error */
- debug_error("failed to commit attribute ""content_video_found"".\n");
debug_fleave();
return MM_ERROR_PLAYER_INTERNAL;
}
-static int __mmplayer_gst_create_text_pipeline(mm_player_t* player)
+static int __mmplayer_gst_create_text_pipeline(mm_player_t* player)
{
MMPlayerGstElement* first_element = NULL;
MMPlayerGstElement* textbin = NULL;
if ( ret != MM_ERROR_NONE )
{
/* return error if failed to set state */
- debug_error("failed to set state PAUSED (live : READY).\n");
-
- /* dump state of all element */
- __mmplayer_dump_pipeline_state( player );
-
+ debug_error("failed to set READY state");
return ret;
}
else
debug_log("current state before doing transition");
MMPLAYER_PENDING_STATE(player) = MM_PLAYER_STATE_PLAYING;
- MMPLAYER_PRINT_STATE(player);
+ MMPLAYER_PRINT_STATE(player);
/* set pipeline state to PLAYING */
ret = __mmplayer_gst_set_state(player,
else
{
debug_error("failed to set state to PLAYING");
-
- /* dump state of all element */
- __mmplayer_dump_pipeline_state( player );
-
return ret;
}
- /* FIXIT : analyze so called "async problem" */
- /* set async off */
- __gst_set_async_state_change( player, FALSE );
-
/* generating debug info before returning error */
MMPLAYER_GENERATE_DOT_IF_ENABLED ( player, "pipeline-status-start" );
if ( ret != MM_ERROR_NONE )
{
debug_error("failed to set state.\n");
-
- /* dump state of all element. don't care it success or not */
- __mmplayer_dump_pipeline_state( player );
-
return ret;
}
/* rewind */
if ( rewind )
{
- if ( ! __gst_seek( player, player->pipeline->mainbin[MMPLAYER_M_PIPE].gst, 1.0,
+ if ( ! __gst_seek( player, player->pipeline->mainbin[MMPLAYER_M_PIPE].gst, player->playback_rate,
GST_FORMAT_TIME, GST_SEEK_FLAG_FLUSH, GST_SEEK_TYPE_SET, 0,
GST_SEEK_TYPE_NONE, GST_CLOCK_TIME_NONE) )
{
{
debug_error("fail to stop player.\n");
ret = MM_ERROR_PLAYER_INTERNAL;
+ __mmplayer_dump_pipeline_state(player);
}
/* generate dot file if enabled */
ret = __mmplayer_gst_set_state(player,
player->pipeline->mainbin[MMPLAYER_M_PIPE].gst, GST_STATE_PAUSED, async, MMPLAYER_STATE_CHANGE_TIMEOUT(player));
- if ( ret != MM_ERROR_NONE )
+ if ( FALSE == async && ret != MM_ERROR_NONE )
{
- debug_error("failed to set state to PAUSED\n");
+ GstMessage *msg = NULL;
+ GTimer *timer = NULL;
+ gdouble MAX_TIMEOUT_SEC = 3;
- /* dump state of all element */
- __mmplayer_dump_pipeline_state( player );
+ debug_error("failed to set state to PAUSED");
+
+ timer = g_timer_new();
+ g_timer_start(timer);
+
+ GstBus *bus = gst_pipeline_get_bus (GST_PIPELINE(player->pipeline->mainbin[MMPLAYER_M_PIPE].gst));
+ /* check if gst error posted or not */
+ do
+ {
+ if (msg = gst_bus_timed_pop(bus, GST_SECOND /2))
+ {
+ if (GST_MESSAGE_TYPE(msg) == GST_MESSAGE_ERROR)
+ {
+ GError *error = NULL;
+ /* parse error code */
+ gst_message_parse_error(msg, &error, NULL);
+
+ if ( error->domain == GST_STREAM_ERROR )
+ {
+ ret = __gst_handle_stream_error( player, error, msg );
+ player->posted_msg = TRUE;
+ break;
+ }
+ else if ( error->domain == GST_RESOURCE_ERROR )
+ {
+ ret = __gst_handle_resource_error( player, error->code );
+ player->posted_msg = TRUE;
+ break;
+ }
+ }
+ gst_message_unref(msg);
+ }
+ } while (g_timer_elapsed(timer, NULL) < MAX_TIMEOUT_SEC);
+
+ /* clean */
+ gst_object_unref(bus);
+ g_timer_stop (timer);
+ g_timer_destroy (timer);
return ret;
}
{
int ret = MM_ERROR_NONE;
gint timeout = 0;
+ GstBus *bus = NULL;
debug_fenter();
if ( async )
debug_log("do async state transition to PLAYING.\n");
+ /* clean bus sync handler because it's not needed any more */
+ bus = gst_pipeline_get_bus (GST_PIPELINE(player->pipeline->mainbin[MMPLAYER_M_PIPE].gst));
+ gst_bus_set_sync_handler (bus, NULL, NULL);
+ gst_object_unref(bus);
+
/* set pipeline state to PLAYING */
timeout = MMPLAYER_STATE_CHANGE_TIMEOUT(player);
+
ret = __mmplayer_gst_set_state(player,
player->pipeline->mainbin[MMPLAYER_M_PIPE].gst, GST_STATE_PLAYING, async, timeout );
if (ret != MM_ERROR_NONE)
{
debug_error("failed to set state to PLAYING\n");
- /* dump state of all element */
- __mmplayer_dump_pipeline_state( player );
-
return ret;
}
else
{
if (async == FALSE)
- {
+ {
MMPLAYER_SET_STATE ( player, MM_PLAYER_STATE_PLAYING );
}
}
player->doing_seek = TRUE;
pos_nsec = position * G_GINT64_CONSTANT(1000000);
- ret = __gst_seek ( player, player->pipeline->mainbin[MMPLAYER_M_PIPE].gst, 1.0,
+ ret = __gst_seek ( player, player->pipeline->mainbin[MMPLAYER_M_PIPE].gst, player->playback_rate,
GST_FORMAT_TIME, ( GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_ACCURATE ),
GST_SEEK_TYPE_SET, pos_nsec, GST_SEEK_TYPE_NONE, GST_CLOCK_TIME_NONE );
if ( !ret )
/* FIXIT : why don't we use 'GST_FORMAT_PERCENT' */
pos_nsec = (gint64) ( ( position * player->duration ) / 100 );
- ret = __gst_seek ( player, player->pipeline->mainbin[MMPLAYER_M_PIPE].gst, 1.0,
+ ret = __gst_seek ( player, player->pipeline->mainbin[MMPLAYER_M_PIPE].gst, player->playback_rate,
GST_FORMAT_TIME, ( GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_ACCURATE ),
GST_SEEK_TYPE_SET, pos_nsec, GST_SEEK_TYPE_NONE, GST_CLOCK_TIME_NONE );
if ( !ret )
}
else
{
- data->uri_type = MM_PLAYER_URI_TYPE_FILE;
+ data->uri_type = MM_PLAYER_URI_TYPE_FILE;
}
ret = TRUE;
}
return FALSE;
}
+
gboolean _asm_lazy_pause(gpointer *data)
{
mm_player_t* player = (mm_player_t*)data;
return FALSE;
}
+
ASM_cb_result_t
__mmplayer_asm_callback(int handle, ASM_event_sources_t event_src, ASM_sound_commands_t command, unsigned int sound_status, void* cb_data)
{
debug_log("sound extraction is working...so, asm command is ignored.\n");
return result;
}
-
+
player->sm.by_asm_cb = 1; // it should be enabled for player state transition with called application command
player->sm.event_src = event_src;
{
/* can use video overlay simultaneously */
/* video resource conflict */
- if(player->pipeline->videobin)
+ if(player->pipeline->videobin)
{
if (PLAYER_INI()->multiple_codec_supported)
{
case ASM_COMMAND_STOP: // notification case
{
- debug_log("Got msg from asm to stop");
+ debug_warning("Got msg from asm to stop");
result = _mmplayer_stop((MMHandleType)player);
if (result != MM_ERROR_NONE)
player->sm.by_asm_cb = 0; // reset because no message any more from asm
}
break;
-
+
case ASM_COMMAND_PAUSE:
{
- debug_log("Got msg from asm to Pause");
-
+ debug_warning("Got msg from asm to Pause");
+
if(event_src == ASM_EVENT_SOURCE_CALL_START
|| event_src == ASM_EVENT_SOURCE_ALARM_START
|| event_src == ASM_EVENT_SOURCE_MEDIA)
{
//hold 0.7 second to excute "fadedown mute" effect
- debug_log ("do fade down->pause->undo fade down");
-
+ debug_warning ("do fade down->pause->undo fade down");
+
__mmplayer_do_sound_fadedown(player, MM_PLAYER_FADEOUT_TIME_DEFAULT);
-
+
result = _mmplayer_pause((MMHandleType)player);
if (result != MM_ERROR_NONE)
{
g_object_set( player->pipeline->audiobin[MMPLAYER_A_SINK].gst, "mute", 2, NULL);
player->lazy_pause_event_id = g_timeout_add(LAZY_PAUSE_TIMEOUT_MSEC, (GSourceFunc)_asm_lazy_pause, (gpointer)player);
- debug_log ("set lazy pause timer (id=[%d], timeout=[%d ms])", player->lazy_pause_event_id, LAZY_PAUSE_TIMEOUT_MSEC);
+ debug_warning ("set lazy pause timer (id=[%d], timeout=[%d ms])", player->lazy_pause_event_id, LAZY_PAUSE_TIMEOUT_MSEC);
}
else
{
cb_res = ASM_CB_RES_PAUSE;
}
break;
-
+
case ASM_COMMAND_RESUME:
{
- debug_log("Got msg from asm to Resume. So, application can resume. code (%d) \n", event_src);
+ debug_warning("Got msg from asm to Resume. So, application can resume. code (%d) \n", event_src);
player->sm.by_asm_cb = 0;
//ASM server is single thread daemon. So use g_idle_add() to post resume msg
g_idle_add((GSourceFunc)_asm_postmsg, (gpointer)player);
_mmplayer_create_player(MMHandleType handle) // @
{
mm_player_t* player = MM_PLAYER_CAST(handle);
- gint i;
debug_fenter();
}
/* withdraw asm */
- if ( MM_ERROR_NONE != _mmplayer_asm_deregister(&player->sm) )
+ if ( MM_ERROR_NONE != _mmplayer_asm_unregister(&player->sm) )
{
debug_error("failed to deregister asm server\n");
return MM_ERROR_PLAYER_INTERNAL;
}
else
{
- __mmplayer_realize_streaming_ext(player);
+ ret = __mmplayer_realize_streaming_ext(player);
}
debug_fleave();
/* set player state if success */
if ( MM_ERROR_NONE == ret )
{
- ret = _mmplayer_asm_set_state(hplayer, ASM_STATE_STOP);
- if ( ret )
- {
- debug_error("failed to set asm state to STOP\n");
- return ret;
+ if (player->sm.state != ASM_STATE_STOP) {
+ ret = _mmplayer_asm_set_state(hplayer, ASM_STATE_STOP);
+ if ( ret )
+ {
+ debug_error("failed to set asm state to STOP\n");
+ return ret;
+ }
}
}
* ( returning zero when getting current position in paused state) of some
* elements
*/
- ret = gst_element_query_position(player->pipeline->mainbin[MMPLAYER_M_PIPE].gst, &fmt, &pos_msec);
- if ( ! ret )
- debug_warning("getting current position failed in paused\n");
+ if ( !gst_element_query_position(player->pipeline->mainbin[MMPLAYER_M_PIPE].gst, &fmt, &pos_msec))
+ debug_warning("getting current position failed in paused\n");
player->last_position = pos_msec;
}
{
mm_player_t* player = (mm_player_t*)hplayer;
int ret = MM_ERROR_NONE;
+ gboolean async = FALSE;
debug_fenter();
__mmplayer_set_play_count( player, infinity );
if ( (!__gst_seek( player, player->pipeline->mainbin[MMPLAYER_M_PIPE].gst,
- 1.0,
+ player->playback_rate,
GST_FORMAT_TIME,
( GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_ACCURATE ),
GST_SEEK_TYPE_SET, start_pos,
static int
__mmplayer_set_pcm_extraction(mm_player_t* player)
{
- guint64 start_nsec = 0;
- guint64 end_nsec = 0;
- guint64 dur_nsec = 0;
- guint64 dur_msec = 0;
+ gint64 start_nsec = 0;
+ gint64 end_nsec = 0;
+ gint64 dur_nsec = 0;
+ gint64 dur_msec = 0;
GstFormat fmt = GST_FORMAT_TIME;
int required_start = 0;
int required_end = 0;
return TRUE;
}
- debug_log("not midi.\n");
-
return FALSE;
}
/* try to plug */
if ( ! __mmplayer_try_to_plug( player, pad, caps ) )
{
+ gboolean async = FALSE;
+
debug_error("failed to autoplug for type : %s\n", player->type);
+ mm_attrs_get_int_by_name(player->attrs, "profile_prepare_async", &async);
- if ( ( PLAYER_INI()->async_start ) &&
- ( player->posted_msg == FALSE ) )
+ if (async && player->posted_msg == FALSE)
{
__mmplayer_post_missed_plugin( player );
}
debug_fenter();
- return_val_if_fail( player &&
- player->pipeline &&
- player->pipeline->mainbin,
- FALSE );
-
+ return_val_if_fail( player && player->pipeline && player->pipeline->mainbin, FALSE );
mainbin = player->pipeline->mainbin;
mime = gst_structure_get_name(gst_caps_get_structure(caps, 0));
/* return if we got raw output */
- if(g_str_has_prefix(mime, "video/x-raw") || g_str_has_prefix(mime, "audio/x-raw") || g_str_has_prefix(mime, "text/plain") ||g_str_has_prefix(mime, "text/x-pango-markup"))
+ if(g_str_has_prefix(mime, "video/x-raw") || g_str_has_prefix(mime, "audio/x-raw")
+ || g_str_has_prefix(mime, "video/x-surface")
+ || g_str_has_prefix(mime, "text/plain") ||g_str_has_prefix(mime, "text/x-pango-markup"))
{
element = (GstElement*)gst_pad_get_parent(pad);
-
-
/* NOTE : When no decoder has added during autoplugging. like a simple wave playback.
* No queue will be added. I think it can caused breaking sound when playing raw audio
* frames but there's no different. Decodebin also doesn't add with those wav fils.
* has linked. if so, we need to add queue for quality of output. note that
* decodebin also has same problem.
*/
-
klass = gst_element_factory_get_klass( gst_element_get_factory(element) );
/* add queue if needed */
- if( (g_strrstr(klass, "Demux") ||
- g_strrstr(klass, "Depayloader") ||
- g_strrstr(klass, "Parse")) && !g_str_has_prefix(mime, "text"))
+ if( (g_strrstr(klass, "Demux") || g_strrstr(klass, "Depayloader")
+ || g_strrstr(klass, "Parse")) && !g_str_has_prefix(mime, "text"))
{
debug_log("adding raw queue\n");
item = player->factories;
for(; item != NULL ; item = item->next)
{
-
GstElementFactory *factory = GST_ELEMENT_FACTORY(item->data);
const GList *pads;
gint idx = 0;
if ( skip ) continue;
-
/* check factory class for filtering */
klass = gst_element_factory_get_klass(GST_ELEMENT_FACTORY(factory));
*/
if ( g_strrstr(klass, "Codec/Decoder/Image") )
{
- debug_log("player doesn't need [%s] so, skipping it\n",
- GST_PLUGIN_FEATURE_NAME (factory) );
-
+ debug_log("player doesn't need [%s] so, skipping it\n", GST_PLUGIN_FEATURE_NAME (factory) );
continue;
}
-
/* check pad compatability */
for(pads = gst_element_factory_get_static_pad_templates(factory);
pads != NULL; pads=pads->next)
- {
+ {
GstStaticPadTemplate *temp1 = pads->data;
GstCaps* static_caps = NULL;
}
/* store specific handles for futher control */
- if(g_strrstr(klass, "Demux") || g_strrstr(klass, "Parse"))
- {
+ if(g_strrstr(klass, "Demux") || g_strrstr(klass, "Parse"))
+ {
/* FIXIT : first value will be overwritten if there's more
* than 1 demuxer/parser
*/
}
else if(g_strrstr(klass, "Decoder") && __mmplayer_link_decoder(player,pad))
{
- if(mainbin[MMPLAYER_M_DEC1].gst == NULL)
- {
- debug_log("plugged element is decoder. take it[MMPLAYER_M_DEC1]\n");
- mainbin[MMPLAYER_M_DEC1].id = MMPLAYER_M_DEC1;
- mainbin[MMPLAYER_M_DEC1].gst = new_element;
- }
- else if(mainbin[MMPLAYER_M_DEC2].gst == NULL)
- {
- debug_log("plugged element is decoder. take it[MMPLAYER_M_DEC2]\n");
- mainbin[MMPLAYER_M_DEC2].id = MMPLAYER_M_DEC2;
- mainbin[MMPLAYER_M_DEC2].gst = new_element;
- }
-
+ if(mainbin[MMPLAYER_M_DEC1].gst == NULL)
+ {
+ debug_log("plugged element is decoder. take it[MMPLAYER_M_DEC1]\n");
+ mainbin[MMPLAYER_M_DEC1].id = MMPLAYER_M_DEC1;
+ mainbin[MMPLAYER_M_DEC1].gst = new_element;
+ }
+ else if(mainbin[MMPLAYER_M_DEC2].gst == NULL)
+ {
+ debug_log("plugged element is decoder. take it[MMPLAYER_M_DEC2]\n");
+ mainbin[MMPLAYER_M_DEC2].id = MMPLAYER_M_DEC2;
+ mainbin[MMPLAYER_M_DEC2].gst = new_element;
+ }
/* NOTE : IF one codec is found, add it to supported_codec and remove from
* missing plugin. Both of them are used to check what's supported codec
* before returning result of play start. And, missing plugin should be
return FALSE;
-
ERROR:
-
/* release */
if ( queue )
gst_object_unref( queue );
mm_attrs_set_int_by_name(player->attrs, "content_audio_channels", channels);
debug_log("audio samplerate : %d channels : %d", samplerate, channels);
-
- /* validate all */
- if ( mmf_attrs_commit ( player->attrs ) )
- {
- debug_error("failed to update attributes");
- return;
- }
}
else if (g_strrstr(name, "video"))
{
gboolean is_drm = FALSE;
gboolean sound_extraction = FALSE;
- debug_fenter();
-
return_val_if_fail ( player, FALSE );
attrs = MMPLAYER_GET_ATTRS(player);
if ( ! sound_extraction || is_drm )
{
- debug_log("pcm extraction param.. is drm = %d, extraction mode = %d", is_drm, sound_extraction);
+ debug_log("checking pcm extraction mode : %d, drm : %d", sound_extraction, is_drm);
return FALSE;
}
- debug_fleave();
-
return TRUE;
}
/* post error to application */
if ( ! player->posted_msg )
{
- if (msg_param.code == MM_MESSAGE_DRM_NOT_AUTHORIZED )
- {
- MMPLAYER_POST_MSG( player, MM_MESSAGE_DRM_NOT_AUTHORIZED, NULL );
- }
- else
- {
- MMPLAYER_POST_MSG( player, MM_MESSAGE_ERROR, &msg_param );
- }
-
+ MMPLAYER_POST_MSG( player, MM_MESSAGE_ERROR, &msg_param );
/* don't post more if one was sent already */
player->posted_msg = TRUE;
}
break;
case GST_RESOURCE_ERROR_NOT_FOUND:
case GST_RESOURCE_ERROR_OPEN_READ:
- if ( MMPLAYER_IS_HTTP_STREAMING(player) || MMPLAYER_IS_HTTP_LIVE_STREAMING ( player ) )
+ if ( MMPLAYER_IS_HTTP_STREAMING(player) || MMPLAYER_IS_HTTP_LIVE_STREAMING ( player )
+ || MMPLAYER_IS_RTSP_STREAMING(player))
{
trans_err = MM_ERROR_PLAYER_STREAMING_CONNECTION_FAIL;
break;
}
case GST_RESOURCE_ERROR_READ:
- if ( MMPLAYER_IS_HTTP_STREAMING(player) || MMPLAYER_IS_HTTP_LIVE_STREAMING ( player ))
+ if ( MMPLAYER_IS_HTTP_STREAMING(player) || MMPLAYER_IS_HTTP_LIVE_STREAMING ( player )
+ || MMPLAYER_IS_RTSP_STREAMING(player))
{
trans_err = MM_ERROR_PLAYER_STREAMING_FAIL;
break;
}
- case GST_RESOURCE_ERROR_SEEK:
+ case GST_RESOURCE_ERROR_WRITE:
case GST_RESOURCE_ERROR_FAILED:
+ trans_err = MM_ERROR_PLAYER_INTERNAL;
+ break;
+
+ case GST_RESOURCE_ERROR_SEEK:
case GST_RESOURCE_ERROR_TOO_LAZY:
case GST_RESOURCE_ERROR_BUSY:
case GST_RESOURCE_ERROR_OPEN_WRITE:
case GST_RESOURCE_ERROR_OPEN_READ_WRITE:
case GST_RESOURCE_ERROR_CLOSE:
- case GST_RESOURCE_ERROR_WRITE:
case GST_RESOURCE_ERROR_SYNC:
case GST_RESOURCE_ERROR_SETTINGS:
default:
/* Decoder Custom Message */
if ( strstr(error->message, "ongoing") )
{
- if ( strcasestr(klass, "audio") )
+ if ( strncasecmp(klass, "audio", 5) )
{
if ( ( player->can_support_codec & FOUND_PLUGIN_VIDEO ) )
{
}
}
- else if ( strcasestr(klass, "video") )
+ else if ( strncasecmp(klass, "video", 5) )
{
if ( ( player->can_support_codec & FOUND_PLUGIN_AUDIO ) )
{
case GST_STREAM_ERROR_DECRYPT_NOKEY:
{
debug_error("decryption error, [%s] failed, reason : [%s]\n", src_element_name, error->message);
- return MM_MESSAGE_DRM_NOT_AUTHORIZED;
+
+ if ( strstr(error->message, "rights expired") )
+ {
+ return MM_ERROR_PLAYER_DRM_EXPIRED;
+ }
+ else if ( strstr(error->message, "no rights") )
+ {
+ return MM_ERROR_PLAYER_DRM_NO_LICENSE;
+ }
+ else if ( strstr(error->message, "has future rights") )
+ {
+ return MM_ERROR_PLAYER_DRM_FUTURE_USE;
+ }
+
+ return MM_ERROR_PLAYER_DRM_NOT_AUTHORIZED;
}
break;
return "INVAID";
}
}
+
gboolean
__is_rtsp_streaming ( mm_player_t* player )
{