gboolean last,
gpointer user_data);
+/* Callbacks to get the eos */
+static int _eos_timer_fct (void *data);
GstElement *
make_queue ()
if (!ev->pipeline)
goto failure_pipeline;
- ev->bus = gst_pipeline_get_bus (GST_PIPELINE (ev->pipeline));
- if (!ev->bus)
+ ev->eos_bus = gst_pipeline_get_bus (GST_PIPELINE (ev->pipeline));
+ if (!ev->eos_bus)
goto failure_bus;
/* We allocate the sinks lists */
failure_audio_sinks:
ecore_list_destroy (ev->video_sinks);
failure_video_sinks:
- gst_object_unref (GST_OBJECT (ev->bus));
+ gst_object_unref (GST_OBJECT (ev->eos_bus));
failure_bus:
/* this call is not really necessary */
gst_element_set_state (ev->pipeline, GST_STATE_NULL);
gst_element_set_state (ev->pipeline, GST_STATE_NULL);
gst_object_unref (GST_OBJECT (ev->pipeline));
- gst_object_unref (GST_OBJECT (ev->bus));
+ gst_object_unref (GST_OBJECT (ev->eos_bus));
gst_deinit ();
ecore_list_destroy (ev->video_sinks);
Emotion_Gstreamer_Video *ev;
ev = (Emotion_Gstreamer_Video *)video;
- printf ("Open file gstreamer... %s\n", file);
/* Evas Object */
ev->obj = obj;
/* we clear the sink lists */
ecore_list_clear (ev->video_sinks);
ecore_list_clear (ev->audio_sinks);
+
+ /* shutdown eos */
+ if (ev->eos_timer) {
+ ecore_timer_del (ev->eos_timer);
+ ev->eos_timer = NULL;
+ }
}
static void
ev = (Emotion_Gstreamer_Video *)video;
gst_element_set_state (ev->pipeline, GST_STATE_PLAYING);
ev->play = 1;
+
+ /* eos */
+ ev->eos_timer = ecore_timer_add (0.1, _eos_timer_fct, ev);
}
static void
gst_element_set_state (ev->pipeline, GST_STATE_PAUSED);
ev->play = 0;
+
+ /* shutdown eos */
+ if (ev->eos_timer) {
+ ecore_timer_del (ev->eos_timer);
+ ev->eos_timer = NULL;
+ }
}
static void
ev = (Emotion_Gstreamer_Video *)user_data;
caps = gst_pad_get_caps (new_pad);
str = gst_caps_to_string (caps);
-/* g_print ("New pad : %s\n", str); */
/* video stream */
if (g_str_has_prefix (str, "video/")) {
Emotion_Video_Sink *vsink;
return bin;
}
-static gboolean
-_bus_call (GstBus *bus,
- GstMessage *msg,
- gpointer data)
-{
- Emotion_Gstreamer_Video *ev;
-
- ev = (Emotion_Gstreamer_Video *)data;
- if (!ev) return 0;
-
- switch (GST_MESSAGE_TYPE (msg)) {
- case GST_MESSAGE_EOS:
- g_print ("End-of-stream\n");
- break;
- case GST_MESSAGE_ERROR: {
- gchar *debug;
- GError *err;
-
- gst_message_parse_error (msg, &err, &debug);
- g_free (debug);
-
- g_print ("Error: %s\n", err->message);
- g_error_free (err);
-
- break;
- }
- default:
- break;
- }
-
- return TRUE;
-}
-
static int
_cdda_pipeline_build (void *video, const char * device, unsigned int track)
{
gint64 time;
gst_query_parse_duration (query, NULL, &time);
- g_print (" duration : %" GST_TIME_FORMAT "\n\n", GST_TIME_ARGS (time));
asink->length_time = (double)time / (double)GST_SECOND;
}
gst_query_unref (query);
GstElement *visbin;
g_snprintf (buf, 128, "visbin%d", index);
- g_print ("vis : %s\n", buf);
visbin = gst_bin_get_by_name (GST_BIN (ev->pipeline), buf);
if (visbin) {
GstPad *srcpad;
G_CALLBACK (cb_handoff), ev);
}
}
-
- gst_bus_add_watch (ev->bus, _bus_call, ev);
return 1;
return 0;
}
+
+int _eos_timer_fct (void *data)
+{
+ Emotion_Gstreamer_Video *ev;
+ GstMessage *msg;
+
+ ev = (Emotion_Gstreamer_Video *)data;
+ while ((msg = gst_bus_poll (ev->eos_bus, GST_MESSAGE_ERROR | GST_MESSAGE_EOS, 0))) {
+ switch (GST_MESSAGE_TYPE(msg)) {
+ case GST_MESSAGE_ERROR: {
+ gchar *debug;
+ GError *err;
+
+ gst_message_parse_error (msg, &err, &debug);
+ g_free (debug);
+
+ g_print ("Error: %s\n", err->message);
+ g_error_free (err);
+
+ break;
+ }
+ case GST_MESSAGE_EOS:
+ if (ev->eos_timer)
+ {
+ ecore_timer_del(ev->eos_timer);
+ ev->eos_timer = NULL;
+ }
+ ev->play = 0;
+ _emotion_decode_stop(ev->obj);
+ _emotion_playback_finished(ev->obj);
+ break;
+ default:
+ break;
+ }
+ gst_message_unref (msg);
+ }
+ return 1;
+}