emotion: add webcam detection, cleanup code and add timing detection.
[profile/ivi/emotion.git] / src / modules / gstreamer / emotion_gstreamer.c
1 #include <unistd.h>
2 #include <fcntl.h>
3
4 #include <Eina.h>
5
6 #include "emotion_private.h"
7 #include "emotion_gstreamer.h"
8 #include "Emotion.h"
9
10 int _emotion_gstreamer_log_domain = -1;
11
12 /* Callbacks to get the eos */
13 static Eina_Bool  _eos_timer_fct   (void *data);
14 static void _for_each_tag    (GstTagList const* list, gchar const* tag, void *data);
15 static void _free_metadata   (Emotion_Gstreamer_Metadata *m);
16
17 /* Interface */
18
19 static unsigned char  em_init                     (Evas_Object     *obj,
20                                                    void           **emotion_video,
21                                                    Emotion_Module_Options *opt);
22
23 static int            em_shutdown                 (void           *video);
24
25 static unsigned char  em_file_open                (const char     *file,
26                                                    Evas_Object     *obj,
27                                                    void            *video);
28
29 static void           em_file_close               (void            *video);
30
31 static void           em_play                     (void            *video,
32                                                    double           pos);
33
34 static void           em_stop                     (void            *video);
35
36 static void           em_size_get                 (void            *video,
37                                                    int             *width,
38                                                    int             *height);
39
40 static void           em_pos_set                  (void            *video,
41                                                    double           pos);
42
43
44 static double         em_len_get                  (void            *video);
45
46 static int            em_fps_num_get              (void            *video);
47
48 static int            em_fps_den_get              (void            *video);
49
50 static double         em_fps_get                  (void            *video);
51
52 static double         em_pos_get                  (void            *video);
53
54 static void           em_vis_set                  (void            *video,
55                                                    Emotion_Vis      vis);
56
57 static Emotion_Vis    em_vis_get                  (void            *video);
58
59 static Eina_Bool      em_vis_supported            (void            *video,
60                                                    Emotion_Vis      vis);
61
62 static double         em_ratio_get                (void            *video);
63
64 static int            em_video_handled            (void            *video);
65
66 static int            em_audio_handled            (void            *video);
67
68 static int            em_seekable                 (void            *video);
69
70 static void           em_frame_done               (void            *video);
71
72 static Emotion_Format em_format_get               (void            *video);
73
74 static void           em_video_data_size_get      (void            *video,
75                                                    int             *w,
76                                                    int             *h);
77
78 static int            em_yuv_rows_get             (void            *video,
79                                                    int              w,
80                                                    int              h,
81                                                    unsigned char  **yrows,
82                                                    unsigned char  **urows,
83                                                    unsigned char  **vrows);
84
85 static int            em_bgra_data_get            (void            *video,
86                                                    unsigned char  **bgra_data);
87
88 static void           em_event_feed               (void            *video,
89                                                    int              event);
90
91 static void           em_event_mouse_button_feed  (void            *video,
92                                                    int              button,
93                                                    int              x,
94                                                    int              y);
95
96 static void           em_event_mouse_move_feed    (void            *video,
97                                                    int              x,
98                                                    int              y);
99
100 static int            em_video_channel_count      (void             *video);
101
102 static void           em_video_channel_set        (void             *video,
103                                                    int               channel);
104
105 static int            em_video_channel_get        (void             *video);
106
107 static const char    *em_video_channel_name_get   (void             *video,
108                                                    int               channel);
109
110 static void           em_video_channel_mute_set   (void             *video,
111                                                    int               mute);
112
113 static int            em_video_channel_mute_get   (void             *video);
114
115 static int            em_audio_channel_count      (void             *video);
116
117 static void           em_audio_channel_set        (void             *video,
118                                                    int               channel);
119
120 static int            em_audio_channel_get        (void             *video);
121
122 static const char    *em_audio_channel_name_get   (void             *video,
123                                                    int               channel);
124
125 static void           em_audio_channel_mute_set   (void             *video,
126                                                    int               mute);
127
128 static int            em_audio_channel_mute_get   (void             *video);
129
130 static void           em_audio_channel_volume_set (void             *video,
131                                                    double             vol);
132
133 static double         em_audio_channel_volume_get (void             *video);
134
135 static int            em_spu_channel_count        (void             *video);
136
137 static void           em_spu_channel_set          (void             *video,
138                                                    int               channel);
139
140 static int            em_spu_channel_get          (void             *video);
141
142 static const char    *em_spu_channel_name_get     (void             *video,
143                                                    int               channel);
144
145 static void           em_spu_channel_mute_set     (void             *video,
146                                                    int               mute);
147
148 static int            em_spu_channel_mute_get     (void             *video);
149
150 static int            em_chapter_count            (void             *video);
151
152 static void           em_chapter_set              (void             *video,
153                                                    int               chapter);
154
155 static int            em_chapter_get              (void             *video);
156
157 static const char    *em_chapter_name_get         (void             *video,
158                                                    int               chapter);
159
160 static void           em_speed_set                (void             *video,
161                                                    double            speed);
162
163 static double         em_speed_get                (void             *video);
164
165 static int            em_eject                    (void             *video);
166
167 static const char    *em_meta_get                 (void             *video,
168                                                    int               meta);
169
170 /* Module interface */
171
172 static Emotion_Video_Module em_module =
173 {
174    em_init, /* init */
175    em_shutdown, /* shutdown */
176    em_file_open, /* file_open */
177    em_file_close, /* file_close */
178    em_play, /* play */
179    em_stop, /* stop */
180    em_size_get, /* size_get */
181    em_pos_set, /* pos_set */
182    em_len_get, /* len_get */
183    em_fps_num_get, /* fps_num_get */
184    em_fps_den_get, /* fps_den_get */
185    em_fps_get, /* fps_get */
186    em_pos_get, /* pos_get */
187    em_vis_set, /* vis_set */
188    em_vis_get, /* vis_get */
189    em_vis_supported, /* vis_supported */
190    em_ratio_get, /* ratio_get */
191    em_video_handled, /* video_handled */
192    em_audio_handled, /* audio_handled */
193    em_seekable, /* seekable */
194    em_frame_done, /* frame_done */
195    em_format_get, /* format_get */
196    em_video_data_size_get, /* video_data_size_get */
197    em_yuv_rows_get, /* yuv_rows_get */
198    em_bgra_data_get, /* bgra_data_get */
199    em_event_feed, /* event_feed */
200    em_event_mouse_button_feed, /* event_mouse_button_feed */
201    em_event_mouse_move_feed, /* event_mouse_move_feed */
202    em_video_channel_count, /* video_channel_count */
203    em_video_channel_set, /* video_channel_set */
204    em_video_channel_get, /* video_channel_get */
205    em_video_channel_name_get, /* video_channel_name_get */
206    em_video_channel_mute_set, /* video_channel_mute_set */
207    em_video_channel_mute_get, /* video_channel_mute_get */
208    em_audio_channel_count, /* audio_channel_count */
209    em_audio_channel_set, /* audio_channel_set */
210    em_audio_channel_get, /* audio_channel_get */
211    em_audio_channel_name_get, /* audio_channel_name_get */
212    em_audio_channel_mute_set, /* audio_channel_mute_set */
213    em_audio_channel_mute_get, /* audio_channel_mute_get */
214    em_audio_channel_volume_set, /* audio_channel_volume_set */
215    em_audio_channel_volume_get, /* audio_channel_volume_get */
216    em_spu_channel_count, /* spu_channel_count */
217    em_spu_channel_set, /* spu_channel_set */
218    em_spu_channel_get, /* spu_channel_get */
219    em_spu_channel_name_get, /* spu_channel_name_get */
220    em_spu_channel_mute_set, /* spu_channel_mute_set */
221    em_spu_channel_mute_get, /* spu_channel_mute_get */
222    em_chapter_count, /* chapter_count */
223    em_chapter_set, /* chapter_set */
224    em_chapter_get, /* chapter_get */
225    em_chapter_name_get, /* chapter_name_get */
226    em_speed_set, /* speed_set */
227    em_speed_get, /* speed_get */
228    em_eject, /* eject */
229    em_meta_get, /* meta_get */
230    NULL /* handle */
231 };
232
233 static Emotion_Video_Stream *
234 emotion_video_stream_new(Emotion_Gstreamer_Video *ev)
235 {
236    Emotion_Video_Stream *vstream;
237
238    if (!ev) return NULL;
239
240    vstream = (Emotion_Video_Stream *)calloc(1, sizeof(Emotion_Video_Stream));
241    if (!vstream) return NULL;
242
243    ev->video_streams = eina_list_append(ev->video_streams, vstream);
244    if (eina_error_get())
245      {
246         free(vstream);
247         return NULL;
248      }
249    return vstream;
250 }
251
252 static void
253 emotion_video_stream_free(Emotion_Gstreamer_Video *ev, Emotion_Video_Stream *vstream)
254 {
255    if (!ev || !vstream) return;
256
257    ev->video_streams = eina_list_remove(ev->video_streams, vstream);
258         free(vstream);
259 }
260
261 static const char *
262 emotion_visualization_element_name_get(Emotion_Vis visualisation)
263 {
264    switch (visualisation)
265      {
266       case EMOTION_VIS_NONE:
267          return NULL;
268       case EMOTION_VIS_GOOM:
269          return "goom";
270       case EMOTION_VIS_LIBVISUAL_BUMPSCOPE:
271          return "libvisual_bumpscope";
272       case EMOTION_VIS_LIBVISUAL_CORONA:
273          return "libvisual_corona";
274       case EMOTION_VIS_LIBVISUAL_DANCING_PARTICLES:
275          return "libvisual_dancingparticles";
276       case EMOTION_VIS_LIBVISUAL_GDKPIXBUF:
277          return "libvisual_gdkpixbuf";
278       case EMOTION_VIS_LIBVISUAL_G_FORCE:
279          return "libvisual_G-Force";
280       case EMOTION_VIS_LIBVISUAL_GOOM:
281          return "libvisual_goom";
282       case EMOTION_VIS_LIBVISUAL_INFINITE:
283          return "libvisual_infinite";
284       case EMOTION_VIS_LIBVISUAL_JAKDAW:
285          return "libvisual_jakdaw";
286       case EMOTION_VIS_LIBVISUAL_JESS:
287          return "libvisual_jess";
288       case EMOTION_VIS_LIBVISUAL_LV_ANALYSER:
289          return "libvisual_lv_analyzer";
290       case EMOTION_VIS_LIBVISUAL_LV_FLOWER:
291          return "libvisual_lv_flower";
292       case EMOTION_VIS_LIBVISUAL_LV_GLTEST:
293          return "libvisual_lv_gltest";
294       case EMOTION_VIS_LIBVISUAL_LV_SCOPE:
295          return "libvisual_lv_scope";
296       case EMOTION_VIS_LIBVISUAL_MADSPIN:
297          return "libvisual_madspin";
298       case EMOTION_VIS_LIBVISUAL_NEBULUS:
299          return "libvisual_nebulus";
300       case EMOTION_VIS_LIBVISUAL_OINKSIE:
301          return "libvisual_oinksie";
302       case EMOTION_VIS_LIBVISUAL_PLASMA:
303          return "libvisual_plazma";
304       default:
305          return "goom";
306      }
307 }
308
309 static unsigned char
310 em_init(Evas_Object            *obj,
311         void                  **emotion_video,
312         Emotion_Module_Options *opt __UNUSED__)
313 {
314    Emotion_Gstreamer_Video *ev;
315    GError                  *error;
316
317    if (!emotion_video)
318      return 0;
319
320    ev = calloc(1, sizeof(Emotion_Gstreamer_Video));
321    if (!ev) return 0;
322
323    ev->obj = obj;
324
325    /* Initialization of gstreamer */
326    if (!gst_init_check(NULL, NULL, &error))
327      goto failure;
328
329    /* Default values */
330    ev->ratio = 1.0;
331    ev->vis = EMOTION_VIS_NONE;
332    ev->volume = 0.8;
333
334    *emotion_video = ev;
335
336    return 1;
337
338 failure:
339    free(ev);
340
341    return 0;
342 }
343
344 static int
345 em_shutdown(void *video)
346 {
347    Emotion_Gstreamer_Video *ev;
348    Emotion_Audio_Stream *astream;
349    Emotion_Video_Stream *vstream;
350
351    ev = (Emotion_Gstreamer_Video *)video;
352    if (!ev)
353      return 0;
354
355    if (ev->pipeline)
356      {
357        gst_element_set_state(ev->pipeline, GST_STATE_NULL);
358        gst_object_unref(ev->pipeline);
359        ev->pipeline = NULL;
360      }
361
362    EINA_LIST_FREE(ev->audio_streams, astream)
363      free(astream);
364    EINA_LIST_FREE(ev->video_streams, vstream)
365      free(vstream);
366
367    free(ev);
368
369    return 1;
370 }
371
372
373 static unsigned char
374 em_file_open(const char   *file,
375              Evas_Object  *obj,
376              void         *video)
377 {
378    Emotion_Gstreamer_Video *ev;
379    Eina_Strbuf *sbuf = NULL;
380    const char *uri;
381    double start, end;
382    int i;
383
384    ev = (Emotion_Gstreamer_Video *)video;
385
386    if (!file) return EINA_FALSE;
387    if (strstr(file, "://") == NULL)
388      {
389         sbuf = eina_strbuf_new();
390         eina_strbuf_append(sbuf, "file://");
391         if (strncmp(file, "./", 2) == 0)
392           file += 2;
393         if (strstr(file, ":/") != NULL)
394           { /* We absolutely need file:///C:/ under Windows, so adding it here */
395              eina_strbuf_append(sbuf, "/");
396           }
397         else if (*file != '/')
398           {
399              char tmp[PATH_MAX];
400
401              if (getcwd(tmp, PATH_MAX))
402                {
403                   eina_strbuf_append(sbuf, tmp);
404                   eina_strbuf_append(sbuf, "/");
405                }
406           }
407         eina_strbuf_append(sbuf, file);
408      }
409
410    start = ecore_time_get();
411    uri = sbuf ? eina_strbuf_string_get(sbuf) : file;
412    DBG("setting file to '%s'", uri);
413    ev->pipeline = gstreamer_video_sink_new(ev, obj, uri);
414    if (sbuf) eina_strbuf_free(sbuf);
415    end = ecore_time_get();
416    DBG("Pipeline creation: %f", end - start);
417
418    if (!ev->pipeline)
419      return EINA_FALSE;
420
421    start = ecore_time_get();
422    ev->eos_bus = gst_pipeline_get_bus(GST_PIPELINE(ev->pipeline));
423    if (!ev->eos_bus)
424      {
425         ERR("could not get the bus");
426         return EINA_FALSE;
427      }
428    end = ecore_time_get();
429    DBG("Get the bus: %f", end - start);
430
431    /* Evas Object */
432    ev->obj = obj;
433
434    ev->position = 0.0;
435
436    g_object_get(G_OBJECT(ev->pipeline),
437                 "n-audio", &ev->audio_stream_nbr,
438                 "n-video", &ev->video_stream_nbr,
439                 NULL);
440
441    if ((ev->video_stream_nbr == 0) && (ev->audio_stream_nbr == 0))
442      {
443         ERR("No audio nor video stream found");
444         gst_object_unref(ev->pipeline);
445         ev->pipeline = NULL;
446         return EINA_FALSE;
447      }
448
449    /* video stream */
450
451    start = ecore_time_get();
452    for (i = 0; i < ev->video_stream_nbr; i++)
453      {
454         Emotion_Video_Stream *vstream;
455         GstPad       *pad;
456         GstCaps      *caps;
457         GstStructure *structure;
458         GstQuery     *query;
459         const GValue *val;
460         gchar        *str;
461
462         gdouble length_time = 0.0;
463         gint width;
464         gint height;
465         gint fps_num;
466         gint fps_den;
467         guint32 fourcc = 0;
468
469         g_signal_emit_by_name(ev->pipeline, "get-video-pad", i, &pad);
470         if (!pad)
471           continue;
472
473         caps = gst_pad_get_negotiated_caps(pad);
474         if (!caps)
475           goto unref_pad_v;
476         structure = gst_caps_get_structure(caps, 0);
477         str = gst_caps_to_string(caps);
478
479         if (!gst_structure_get_int(structure, "width", &width))
480           goto unref_caps_v;
481         if (!gst_structure_get_int(structure, "height", &height))
482           goto unref_caps_v;
483         if (!gst_structure_get_fraction(structure, "framerate", &fps_num, &fps_den))
484           goto unref_caps_v;
485
486         if (g_str_has_prefix(str, "video/x-raw-yuv"))
487           {
488              val = gst_structure_get_value(structure, "format");
489              fourcc = gst_value_get_fourcc(val);
490           }
491         else if (g_str_has_prefix(str, "video/x-raw-rgb"))
492           fourcc = GST_MAKE_FOURCC('A', 'R', 'G', 'B');
493         else
494           goto unref_caps_v;
495
496         query = gst_query_new_duration(GST_FORMAT_TIME);
497         if (gst_pad_peer_query(pad, query))
498           {
499              gint64 t;
500
501              gst_query_parse_duration(query, NULL, &t);
502              length_time = (double)t / (double)GST_SECOND;
503           }
504         else
505           goto unref_query_v;
506
507         vstream = emotion_video_stream_new(ev);
508         if (!vstream) goto unref_query_v;
509
510         vstream->length_time = length_time;
511         vstream->width = width;
512         vstream->height = height;
513         vstream->fps_num = fps_num;
514         vstream->fps_den = fps_den;
515         vstream->fourcc = fourcc;
516         vstream->index = i;
517
518      unref_query_v:
519         gst_query_unref(query);
520      unref_caps_v:
521         gst_caps_unref(caps);
522      unref_pad_v:
523         gst_object_unref(pad);
524      }
525    end = ecore_time_get();
526    DBG("Get video streams: %f", end - start);
527
528    /* Audio streams */
529
530    start = ecore_time_get();
531    for (i = 0; i < ev->audio_stream_nbr; i++)
532      {
533         Emotion_Audio_Stream *astream;
534         GstPad       *pad;
535         GstCaps      *caps;
536         GstStructure *structure;
537         GstQuery     *query;
538
539         gdouble length_time = 0.0;
540         gint channels;
541         gint samplerate;
542
543         g_signal_emit_by_name(ev->pipeline, "get-audio-pad", i, &pad);
544         if (!pad)
545           continue;
546
547         caps = gst_pad_get_negotiated_caps(pad);
548         if (!caps)
549           goto unref_pad_a;
550         structure = gst_caps_get_structure(caps, 0);
551
552         if (!gst_structure_get_int(structure, "channels", &channels))
553           goto unref_caps_a;
554         if (!gst_structure_get_int(structure, "rate", &samplerate))
555           goto unref_caps_a;
556
557         query = gst_query_new_duration(GST_FORMAT_TIME);
558         if (gst_pad_peer_query(pad, query))
559           {
560              gint64 t;
561
562              gst_query_parse_duration(query, NULL, &t);
563              length_time = (double)t / (double)GST_SECOND;
564           }
565         else
566           goto unref_query_a;
567
568         astream = calloc(1, sizeof(Emotion_Audio_Stream));
569         if (!astream) continue;
570         ev->audio_streams = eina_list_append(ev->audio_streams, astream);
571         if (eina_error_get())
572           {
573              free(astream);
574              continue;
575           }
576
577         astream->length_time = length_time;
578         astream->channels = channels;
579         astream->samplerate = samplerate;
580
581      unref_query_a:
582         gst_query_unref(query);
583      unref_caps_a:
584         gst_caps_unref(caps);
585      unref_pad_a:
586         gst_object_unref(pad);
587      }
588    end = ecore_time_get();
589    DBG("Get audio streams: %f", end - start);
590
591    /* Visualization sink */
592
593    start = ecore_time_get();
594    if (ev->video_stream_nbr == 0)
595      {
596         GstElement *vis = NULL;
597         Emotion_Video_Stream *vstream;
598         Emotion_Audio_Stream *astream;
599         gint flags;
600         const char *vis_name;
601
602         if (!(vis_name = emotion_visualization_element_name_get(ev->vis)))
603           {
604              ERR("pb vis name %d\n", ev->vis);
605              goto finalize;
606           }
607
608         astream = eina_list_data_get(ev->audio_streams);
609
610         vis = gst_element_factory_make(vis_name, "vissink");
611         vstream = emotion_video_stream_new(ev);
612         if (!vstream)
613           goto finalize;
614         else
615           DBG("could not create visualization stream");
616
617         vstream->length_time = astream->length_time;
618         vstream->width = 320;
619         vstream->height = 200;
620         vstream->fps_num = 25;
621         vstream->fps_den = 1;
622         vstream->fourcc = GST_MAKE_FOURCC('A', 'R', 'G', 'B');
623
624         g_object_set(G_OBJECT(ev->pipeline), "vis-plugin", vis, NULL);
625         g_object_get(G_OBJECT(ev->pipeline), "flags", &flags, NULL);
626         flags |= 0x00000008;
627         g_object_set(G_OBJECT(ev->pipeline), "flags", flags, NULL);
628      }
629    end = ecore_time_get();
630    DBG("Get visualization streams: %f", end - start);
631
632  finalize:
633
634    ev->video_stream_nbr = eina_list_count(ev->video_streams);
635    ev->audio_stream_nbr = eina_list_count(ev->audio_streams);
636
637    if (ev->video_stream_nbr == 1)
638      {
639        Emotion_Video_Stream *vstream;
640
641        vstream = eina_list_data_get(ev->video_streams);
642        ev->ratio = (double)vstream->width / (double)vstream->height;
643        _emotion_frame_resize(ev->obj, vstream->width, vstream->height, ev->ratio);
644      }
645
646    {
647      /* on recapitule : */
648      Emotion_Video_Stream *vstream;
649      Emotion_Audio_Stream *astream;
650
651      vstream = eina_list_data_get(ev->video_streams);
652      if (vstream)
653        {
654          DBG("video size=%dx%d, fps=%d/%d, "
655              "fourcc=%"GST_FOURCC_FORMAT", length=%"GST_TIME_FORMAT,
656              vstream->width, vstream->height, vstream->fps_num, vstream->fps_den,
657              GST_FOURCC_ARGS(vstream->fourcc),
658              GST_TIME_ARGS((guint64)(vstream->length_time * GST_SECOND)));
659        }
660
661      astream = eina_list_data_get(ev->audio_streams);
662      if (astream)
663        {
664          DBG("audio channels=%d, rate=%d, length=%"GST_TIME_FORMAT,
665              astream->channels, astream->samplerate,
666              GST_TIME_ARGS((guint64)(astream->length_time * GST_SECOND)));
667        }
668    }
669
670    if (ev->metadata)
671      _free_metadata(ev->metadata);
672    ev->metadata = calloc(1, sizeof(Emotion_Gstreamer_Metadata));
673
674    start = ecore_time_get();
675    em_audio_channel_volume_set(ev, ev->volume);
676
677    _eos_timer_fct(ev);
678    _emotion_open_done(ev->obj);
679    end = ecore_time_get();
680    DBG("Last stuff: %f", end - start);
681
682    return 1;
683 }
684
685 static void
686 em_file_close(void *video)
687 {
688    Emotion_Gstreamer_Video *ev;
689    Emotion_Audio_Stream *astream;
690    Emotion_Video_Stream *vstream;
691
692    ev = (Emotion_Gstreamer_Video *)video;
693    if (!ev)
694      return;
695
696    if (ev->eos_bus)
697      {
698         gst_object_unref(GST_OBJECT(ev->eos_bus));
699         ev->eos_bus = NULL;
700      }
701
702    /* we clear the stream lists */
703    EINA_LIST_FREE(ev->audio_streams, astream)
704      free(astream);
705    EINA_LIST_FREE(ev->video_streams, vstream)
706      free(vstream);
707
708    /* shutdown eos */
709    if (ev->eos_timer)
710      {
711         ecore_timer_del(ev->eos_timer);
712         ev->eos_timer = NULL;
713      }
714
715    if (ev->metadata)
716      {
717         _free_metadata(ev->metadata);
718         ev->metadata = NULL;
719      }
720 }
721
722 static void
723 em_play(void   *video,
724         double  pos __UNUSED__)
725 {
726    Emotion_Gstreamer_Video *ev;
727
728    ev = (Emotion_Gstreamer_Video *)video;
729    gst_element_set_state(ev->pipeline, GST_STATE_PLAYING);
730    ev->play = 1;
731    ev->play_started = 1;
732
733    /* eos */
734    ev->eos_timer = ecore_timer_add(0.1, _eos_timer_fct, ev);
735 }
736
737 static void
738 em_stop(void *video)
739 {
740    Emotion_Gstreamer_Video *ev;
741
742    ev = (Emotion_Gstreamer_Video *)video;
743
744    /* shutdown eos */
745    if (ev->eos_timer)
746      {
747         ecore_timer_del(ev->eos_timer);
748         ev->eos_timer = NULL;
749      }
750
751    gst_element_set_state(ev->pipeline, GST_STATE_PAUSED);
752    ev->play = 0;
753 }
754
755 static void
756 em_size_get(void  *video,
757             int   *width,
758             int   *height)
759 {
760    Emotion_Gstreamer_Video *ev;
761    Emotion_Video_Stream      *vstream;
762
763    ev = (Emotion_Gstreamer_Video *)video;
764
765    vstream = (Emotion_Video_Stream *)eina_list_nth(ev->video_streams, ev->video_stream_nbr - 1);
766    if (vstream)
767      {
768         if (width) *width = vstream->width;
769         if (height) *height = vstream->height;
770      }
771    else
772      {
773         if (width) *width = 0;
774         if (height) *height = 0;
775      }
776 }
777
778 static void
779 em_pos_set(void   *video,
780            double  pos)
781 {
782    Emotion_Gstreamer_Video *ev;
783    gboolean res;
784
785    ev = (Emotion_Gstreamer_Video *)video;
786
787    res = gst_element_seek(ev->pipeline, 1.0,
788                           GST_FORMAT_TIME,
789                           GST_SEEK_FLAG_ACCURATE | GST_SEEK_FLAG_FLUSH,
790                           GST_SEEK_TYPE_SET,
791                           (gint64)(pos * (double)GST_SECOND),
792                           GST_SEEK_TYPE_NONE, -1);
793 }
794
795 static double
796 em_len_get(void *video)
797 {
798    Emotion_Gstreamer_Video *ev;
799    Emotion_Video_Stream *vstream;
800    Emotion_Audio_Stream *astream;
801    Eina_List *l;
802    GstFormat fmt;
803    gint64 val;
804    gboolean ret;
805
806    ev = video;
807    fmt = GST_FORMAT_TIME;
808    ret = gst_element_query_duration(ev->pipeline, &fmt, &val);
809    if (!ret)
810      goto fallback;
811
812    if (fmt != GST_FORMAT_TIME)
813      {
814         DBG("requrested duration in time, but got %s instead.",
815                 gst_format_get_name(fmt));
816         goto fallback;
817      }
818
819    if (val <= 0.0)
820      goto fallback;
821
822    return val / 1000000000.0;
823
824  fallback:
825    EINA_LIST_FOREACH(ev->audio_streams, l, astream)
826      if (astream->length_time >= 0)
827        return astream->length_time;
828
829    EINA_LIST_FOREACH(ev->video_streams, l, vstream)
830      if (vstream->length_time >= 0)
831        return vstream->length_time;
832
833    return 0.0;
834 }
835
836 static int
837 em_fps_num_get(void *video)
838 {
839    Emotion_Gstreamer_Video *ev;
840    Emotion_Video_Stream      *vstream;
841
842    ev = (Emotion_Gstreamer_Video *)video;
843
844    vstream = (Emotion_Video_Stream *)eina_list_nth(ev->video_streams, ev->video_stream_nbr - 1);
845    if (vstream)
846      return vstream->fps_num;
847
848    return 0;
849 }
850
851 static int
852 em_fps_den_get(void *video)
853 {
854    Emotion_Gstreamer_Video *ev;
855    Emotion_Video_Stream      *vstream;
856
857    ev = (Emotion_Gstreamer_Video *)video;
858
859    vstream = (Emotion_Video_Stream *)eina_list_nth(ev->video_streams, ev->video_stream_nbr - 1);
860    if (vstream)
861      return vstream->fps_den;
862
863    return 1;
864 }
865
866 static double
867 em_fps_get(void *video)
868 {
869    Emotion_Gstreamer_Video *ev;
870    Emotion_Video_Stream      *vstream;
871
872    ev = (Emotion_Gstreamer_Video *)video;
873
874    vstream = (Emotion_Video_Stream *)eina_list_nth(ev->video_streams, ev->video_stream_nbr - 1);
875    if (vstream)
876      return (double)vstream->fps_num / (double)vstream->fps_den;
877
878    return 0.0;
879 }
880
881 static double
882 em_pos_get(void *video)
883 {
884    Emotion_Gstreamer_Video *ev;
885    GstFormat fmt;
886    gint64 val;
887    gboolean ret;
888
889    ev = video;
890    fmt = GST_FORMAT_TIME;
891    ret = gst_element_query_position(ev->pipeline, &fmt, &val);
892    if (!ret)
893      return ev->position;
894
895    if (fmt != GST_FORMAT_TIME)
896      {
897         ERR("requrested position in time, but got %s instead.",
898             gst_format_get_name(fmt));
899         return ev->position;
900      }
901
902    ev->position = val / 1000000000.0;
903    return ev->position;
904 }
905
906 static void
907 em_vis_set(void *video,
908            Emotion_Vis vis)
909 {
910    Emotion_Gstreamer_Video *ev;
911
912    ev = (Emotion_Gstreamer_Video *)video;
913
914    if (ev->vis == vis) return;
915    ev->vis = vis;
916 }
917
918 static Emotion_Vis
919 em_vis_get(void *video)
920 {
921    Emotion_Gstreamer_Video *ev;
922
923    ev = (Emotion_Gstreamer_Video *)video;
924
925    return ev->vis;
926 }
927
928 static Eina_Bool
929 em_vis_supported(void *ef __UNUSED__, Emotion_Vis vis)
930 {
931    const char *name;
932    GstElementFactory *factory;
933
934    if (vis == EMOTION_VIS_NONE)
935      return EINA_TRUE;
936
937    name = emotion_visualization_element_name_get(vis);
938    if (!name)
939      return EINA_FALSE;
940
941    factory = gst_element_factory_find(name);
942    if (!factory)
943      return EINA_FALSE;
944
945    gst_object_unref(factory);
946    return EINA_TRUE;
947 }
948
949 static double
950 em_ratio_get(void *video)
951 {
952    Emotion_Gstreamer_Video *ev;
953
954    ev = (Emotion_Gstreamer_Video *)video;
955
956    return ev->ratio;
957 }
958
959 static int
960 em_video_handled(void *video)
961 {
962    Emotion_Gstreamer_Video *ev;
963
964    ev = (Emotion_Gstreamer_Video *)video;
965
966    if (!eina_list_count(ev->video_streams))
967      return 0;
968
969    return 1;
970 }
971
972 static int
973 em_audio_handled(void *video)
974 {
975    Emotion_Gstreamer_Video *ev;
976
977    ev = (Emotion_Gstreamer_Video *)video;
978
979    if (!eina_list_count(ev->audio_streams))
980      return 0;
981
982    return 1;
983 }
984
985 static int
986 em_seekable(void *video __UNUSED__)
987 {
988    return 1;
989 }
990
991 static void
992 em_frame_done(void *video __UNUSED__)
993 {
994 }
995
996 static Emotion_Format
997 em_format_get(void *video)
998 {
999    Emotion_Gstreamer_Video *ev;
1000    Emotion_Video_Stream      *vstream;
1001
1002    ev = (Emotion_Gstreamer_Video *)video;
1003
1004    vstream = (Emotion_Video_Stream *)eina_list_nth(ev->video_streams, ev->video_stream_nbr - 1);
1005    if (vstream)
1006      {
1007         switch (vstream->fourcc)
1008           {
1009            case GST_MAKE_FOURCC('I', '4', '2', '0'):
1010               return EMOTION_FORMAT_I420;
1011            case GST_MAKE_FOURCC('Y', 'V', '1', '2'):
1012               return EMOTION_FORMAT_YV12;
1013            case GST_MAKE_FOURCC('Y', 'U', 'Y', '2'):
1014               return EMOTION_FORMAT_YUY2;
1015            case GST_MAKE_FOURCC('A', 'R', 'G', 'B'):
1016               return EMOTION_FORMAT_BGRA;
1017            default:
1018               return EMOTION_FORMAT_NONE;
1019           }
1020      }
1021    return EMOTION_FORMAT_NONE;
1022 }
1023
1024 static void
1025 em_video_data_size_get(void *video, int *w, int *h)
1026 {
1027    Emotion_Gstreamer_Video *ev;
1028    Emotion_Video_Stream      *vstream;
1029
1030    ev = (Emotion_Gstreamer_Video *)video;
1031
1032    vstream = (Emotion_Video_Stream *)eina_list_nth(ev->video_streams, ev->video_stream_nbr - 1);
1033    if (vstream)
1034      {
1035         *w = vstream->width;
1036         *h = vstream->height;
1037      }
1038    else
1039      {
1040         *w = 0;
1041         *h = 0;
1042      }
1043 }
1044
1045 static int
1046 em_yuv_rows_get(void           *video __UNUSED__,
1047                 int             w __UNUSED__,
1048                 int             h __UNUSED__,
1049                 unsigned char **yrows __UNUSED__,
1050                 unsigned char **urows __UNUSED__,
1051                 unsigned char **vrows __UNUSED__)
1052 {
1053    return 0;
1054 }
1055
1056 static int
1057 em_bgra_data_get(void *video __UNUSED__, unsigned char **bgra_data __UNUSED__)
1058 {
1059    return 0;
1060 }
1061
1062 static void
1063 em_event_feed(void *video __UNUSED__, int event __UNUSED__)
1064 {
1065 }
1066
1067 static void
1068 em_event_mouse_button_feed(void *video __UNUSED__, int button __UNUSED__, int x __UNUSED__, int y __UNUSED__)
1069 {
1070 }
1071
1072 static void
1073 em_event_mouse_move_feed(void *video __UNUSED__, int x __UNUSED__, int y __UNUSED__)
1074 {
1075 }
1076
1077 /* Video channels */
1078 static int
1079 em_video_channel_count(void *video)
1080 {
1081    Emotion_Gstreamer_Video *ev;
1082
1083    ev = (Emotion_Gstreamer_Video *)video;
1084
1085    return eina_list_count(ev->video_streams);
1086 }
1087
1088 static void
1089 em_video_channel_set(void *video __UNUSED__,
1090                      int   channel __UNUSED__)
1091 {
1092 #if 0
1093    Emotion_Gstreamer_Video *ev;
1094
1095    ev = (Emotion_Gstreamer_Video *)video;
1096
1097    if (channel < 0) channel = 0;
1098 #endif
1099    /* FIXME: a faire... */
1100 }
1101
1102 static int
1103 em_video_channel_get(void *video)
1104 {
1105    Emotion_Gstreamer_Video *ev;
1106
1107    ev = (Emotion_Gstreamer_Video *)video;
1108
1109    return ev->video_stream_nbr;
1110 }
1111
1112 static const char *
1113 em_video_channel_name_get(void *video __UNUSED__,
1114                           int   channel __UNUSED__)
1115 {
1116    return NULL;
1117 }
1118
1119 static void
1120 em_video_channel_mute_set(void *video,
1121                           int   mute)
1122 {
1123    Emotion_Gstreamer_Video *ev;
1124
1125    ev = (Emotion_Gstreamer_Video *)video;
1126
1127    ev->video_mute = mute;
1128 }
1129
1130 static int
1131 em_video_channel_mute_get(void *video)
1132 {
1133    Emotion_Gstreamer_Video *ev;
1134
1135    ev = (Emotion_Gstreamer_Video *)video;
1136
1137    return ev->video_mute;
1138 }
1139
1140 /* Audio channels */
1141
1142 static int
1143 em_audio_channel_count(void *video)
1144 {
1145    Emotion_Gstreamer_Video *ev;
1146
1147    ev = (Emotion_Gstreamer_Video *)video;
1148
1149    return eina_list_count(ev->audio_streams);
1150 }
1151
1152 static void
1153 em_audio_channel_set(void *video __UNUSED__,
1154                      int   channel __UNUSED__)
1155 {
1156 #if 0
1157    Emotion_Gstreamer_Video *ev;
1158
1159    ev = (Emotion_Gstreamer_Video *)video;
1160
1161    if (channel < -1) channel = -1;
1162 #endif
1163    /* FIXME: a faire... */
1164 }
1165
1166 static int
1167 em_audio_channel_get(void *video)
1168 {
1169    Emotion_Gstreamer_Video *ev;
1170
1171    ev = (Emotion_Gstreamer_Video *)video;
1172
1173    return ev->audio_stream_nbr;
1174 }
1175
1176 static const char *
1177 em_audio_channel_name_get(void *video __UNUSED__,
1178                           int   channel __UNUSED__)
1179 {
1180    return NULL;
1181 }
1182
1183 #define GST_PLAY_FLAG_AUDIO (1 << 1)
1184
1185 static void
1186 em_audio_channel_mute_set(void *video,
1187                           int   mute)
1188 {
1189    Emotion_Gstreamer_Video *ev;
1190    int flags;
1191
1192    ev = (Emotion_Gstreamer_Video *)video;
1193
1194    if (ev->audio_mute == mute)
1195      return;
1196
1197    ev->audio_mute = mute;
1198
1199    g_object_set(G_OBJECT(ev->pipeline), "mute", !!mute, NULL);
1200    /* This code should stop the decoding of only the audio stream, but everything stop :"( */
1201    /* g_object_get(G_OBJECT(ev->pipeline), "flags", &flags, NULL); */
1202    /* if (mute) */
1203    /*   flags &= ~GST_PLAY_FLAG_AUDIO; */
1204    /* else */
1205    /*   flags |= GST_PLAY_FLAG_AUDIO; */
1206    /* g_object_set(G_OBJECT(ev->pipeline), "flags", flags, NULL); */
1207    /* g_object_get(G_OBJECT(ev->pipeline), "flags", &flags, NULL); */
1208    /* fprintf(stderr, "flags-n: %x\n", flags); */
1209 }
1210
1211 static int
1212 em_audio_channel_mute_get(void *video)
1213 {
1214    Emotion_Gstreamer_Video *ev;
1215
1216    ev = (Emotion_Gstreamer_Video *)video;
1217
1218    return ev->audio_mute;
1219 }
1220
1221 static void
1222 em_audio_channel_volume_set(void  *video,
1223                             double vol)
1224 {
1225    Emotion_Gstreamer_Video *ev;
1226
1227    ev = (Emotion_Gstreamer_Video *)video;
1228
1229    if (vol < 0.0)
1230      vol = 0.0;
1231    if (vol > 1.0)
1232      vol = 1.0;
1233    ev->volume = vol;
1234    g_object_set(G_OBJECT(ev->pipeline), "volume", vol, NULL);
1235 }
1236
1237 static double
1238 em_audio_channel_volume_get(void *video)
1239 {
1240    Emotion_Gstreamer_Video *ev;
1241
1242    ev = (Emotion_Gstreamer_Video *)video;
1243
1244    return ev->volume;
1245 }
1246
1247 /* spu stuff */
1248
1249 static int
1250 em_spu_channel_count(void *video __UNUSED__)
1251 {
1252    return 0;
1253 }
1254
1255 static void
1256 em_spu_channel_set(void *video __UNUSED__, int channel __UNUSED__)
1257 {
1258 }
1259
1260 static int
1261 em_spu_channel_get(void *video __UNUSED__)
1262 {
1263    return 1;
1264 }
1265
1266 static const char *
1267 em_spu_channel_name_get(void *video __UNUSED__, int channel __UNUSED__)
1268 {
1269    return NULL;
1270 }
1271
1272 static void
1273 em_spu_channel_mute_set(void *video __UNUSED__, int mute __UNUSED__)
1274 {
1275 }
1276
1277 static int
1278 em_spu_channel_mute_get(void *video __UNUSED__)
1279 {
1280    return 0;
1281 }
1282
1283 static int
1284 em_chapter_count(void *video __UNUSED__)
1285 {
1286    return 0;
1287 }
1288
1289 static void
1290 em_chapter_set(void *video __UNUSED__, int chapter __UNUSED__)
1291 {
1292 }
1293
1294 static int
1295 em_chapter_get(void *video __UNUSED__)
1296 {
1297    return 0;
1298 }
1299
1300 static const char *
1301 em_chapter_name_get(void *video __UNUSED__, int chapter __UNUSED__)
1302 {
1303    return NULL;
1304 }
1305
1306 static void
1307 em_speed_set(void *video __UNUSED__, double speed __UNUSED__)
1308 {
1309 }
1310
1311 static double
1312 em_speed_get(void *video __UNUSED__)
1313 {
1314    return 1.0;
1315 }
1316
1317 static int
1318 em_eject(void *video __UNUSED__)
1319 {
1320    return 1;
1321 }
1322
1323 static const char *
1324 em_meta_get(void *video, int meta)
1325 {
1326    Emotion_Gstreamer_Video *ev;
1327    const char *str = NULL;
1328
1329    ev = (Emotion_Gstreamer_Video *)video;
1330
1331    if (!ev || !ev->metadata) return NULL;
1332    switch (meta)
1333      {
1334       case META_TRACK_TITLE:
1335          str = ev->metadata->title;
1336          break;
1337       case META_TRACK_ARTIST:
1338          str = ev->metadata->artist;
1339          break;
1340       case  META_TRACK_ALBUM:
1341          str = ev->metadata->album;
1342          break;
1343       case META_TRACK_YEAR:
1344          str = ev->metadata->year;
1345          break;
1346       case META_TRACK_GENRE:
1347          str = ev->metadata->genre;
1348          break;
1349       case META_TRACK_COMMENT:
1350          str = ev->metadata->comment;
1351          break;
1352       case META_TRACK_DISCID:
1353          str = ev->metadata->disc_id;
1354          break;
1355       default:
1356          break;
1357      }
1358
1359    return str;
1360 }
1361
1362 static Eina_Bool
1363 module_open(Evas_Object           *obj,
1364             const Emotion_Video_Module **module,
1365             void                 **video,
1366             Emotion_Module_Options *opt)
1367 {
1368    if (!module)
1369      return EINA_FALSE;
1370
1371    if (_emotion_gstreamer_log_domain < 0)
1372      {
1373         eina_threads_init();
1374         eina_log_threads_enable();
1375         _emotion_gstreamer_log_domain = eina_log_domain_register
1376           ("emotion-gstreamer", EINA_COLOR_LIGHTCYAN);
1377         if (_emotion_gstreamer_log_domain < 0)
1378           {
1379              EINA_LOG_CRIT("Could not register log domain 'emotion-gstreamer'");
1380              return EINA_FALSE;
1381           }
1382      }
1383
1384    if (!em_module.init(obj, video, opt))
1385      return EINA_FALSE;
1386
1387    eina_threads_init();
1388
1389    *module = &em_module;
1390    return EINA_TRUE;
1391 }
1392
1393 static void
1394 module_close(Emotion_Video_Module *module __UNUSED__,
1395              void                 *video)
1396 {
1397    em_module.shutdown(video);
1398
1399    eina_threads_shutdown();
1400 }
1401
1402 Eina_Bool
1403 gstreamer_module_init(void)
1404 {
1405    GError *error;
1406
1407    if (!gst_init_check(0, NULL, &error))
1408      {
1409         EINA_LOG_CRIT("Could not init GStreamer");
1410         return EINA_FALSE;
1411      }
1412
1413    if (gst_plugin_register_static(GST_VERSION_MAJOR, GST_VERSION_MINOR,
1414                                   "emotion-sink",
1415                                   "video sink plugin for Emotion",
1416                                   gstreamer_plugin_init,
1417                                   VERSION,
1418                                   "LGPL",
1419                                   "Enlightenment",
1420                                   PACKAGE,
1421                                   "http://www.enlightenment.org/") == FALSE)
1422      {
1423         EINA_LOG_CRIT("Could not load static gstreamer video sink for Emotion.");
1424         return EINA_FALSE;
1425      }
1426
1427    return _emotion_module_register("gstreamer", module_open, module_close);
1428 }
1429
1430 void
1431 gstreamer_module_shutdown(void)
1432 {
1433    _emotion_module_unregister("gstreamer");
1434
1435    gst_deinit();
1436 }
1437
1438 #ifndef EMOTION_STATIC_BUILD_GSTREAMER
1439
1440 EINA_MODULE_INIT(gstreamer_module_init);
1441 EINA_MODULE_SHUTDOWN(gstreamer_module_shutdown);
1442
1443 #endif
1444
1445 static void
1446 _for_each_tag(GstTagList const* list,
1447                     gchar const* tag,
1448                     void *data)
1449 {
1450    Emotion_Gstreamer_Video *ev;
1451    int i;
1452    int count;
1453
1454
1455    ev = (Emotion_Gstreamer_Video*)data;
1456
1457    if (!ev || !ev->metadata) return;
1458
1459    count = gst_tag_list_get_tag_size(list, tag);
1460
1461    for (i = 0; i < count; i++)
1462      {
1463         if (!strcmp(tag, GST_TAG_TITLE))
1464           {
1465              char *str;
1466              if (ev->metadata->title) g_free(ev->metadata->title);
1467              if (gst_tag_list_get_string(list, GST_TAG_TITLE, &str))
1468                ev->metadata->title = str;
1469              else
1470                ev->metadata->title = NULL;
1471              break;
1472           }
1473         if (!strcmp(tag, GST_TAG_ALBUM))
1474           {
1475              gchar *str;
1476              if (ev->metadata->album) g_free(ev->metadata->album);
1477              if (gst_tag_list_get_string(list, GST_TAG_ALBUM, &str))
1478                ev->metadata->album = str;
1479              else
1480                ev->metadata->album = NULL;
1481              break;
1482           }
1483         if (!strcmp(tag, GST_TAG_ARTIST))
1484           {
1485              gchar *str;
1486              if (ev->metadata->artist) g_free( ev->metadata->artist);
1487              if (gst_tag_list_get_string(list, GST_TAG_ARTIST, &str))
1488                ev->metadata->artist = str;
1489              else
1490                ev->metadata->artist = NULL;
1491              break;
1492           }
1493         if (!strcmp(tag, GST_TAG_GENRE))
1494           {
1495              gchar *str;
1496              if (ev->metadata->genre) g_free( ev->metadata->genre);
1497              if (gst_tag_list_get_string(list, GST_TAG_GENRE, &str))
1498                ev->metadata->genre = str;
1499              else
1500                ev->metadata->genre = NULL;
1501              break;
1502           }
1503         if (!strcmp(tag, GST_TAG_COMMENT))
1504           {
1505              gchar *str;
1506              if (ev->metadata->comment) g_free(ev->metadata->comment);
1507              if (gst_tag_list_get_string(list, GST_TAG_COMMENT, &str))
1508                ev->metadata->comment = str;
1509              else
1510                ev->metadata->comment = NULL;
1511              break;
1512           }
1513         if (!strcmp(tag, GST_TAG_DATE))
1514           {
1515              gchar *str;
1516              const GValue *date;
1517              if (ev->metadata->year) g_free(ev->metadata->year);
1518              date = gst_tag_list_get_value_index(list, GST_TAG_DATE, 0);
1519              if (date)
1520                str = g_strdup_value_contents(date);
1521              else
1522                str = NULL;
1523              ev->metadata->year = str;
1524              break;
1525           }
1526
1527         if (!strcmp(tag, GST_TAG_TRACK_NUMBER))
1528           {
1529              gchar *str;
1530              const GValue *track;
1531              if (ev->metadata->count) g_free( ev->metadata->count);
1532              track = gst_tag_list_get_value_index(list, GST_TAG_TRACK_NUMBER, 0);
1533              if (track)
1534                str = g_strdup_value_contents(track);
1535              else
1536                str = NULL;
1537              ev->metadata->count = str;
1538              break;
1539           }
1540
1541 #ifdef GST_TAG_CDDA_CDDB_DISCID
1542         if (!strcmp(tag, GST_TAG_CDDA_CDDB_DISCID))
1543           {
1544              gchar *str;
1545              const GValue *discid;
1546              if (ev->metadata->disc_id) g_free(ev->metadata->disc_id);
1547              discid = gst_tag_list_get_value_index(list, GST_TAG_CDDA_CDDB_DISCID, 0);
1548              if (discid)
1549                str = g_strdup_value_contents(discid);
1550              else
1551                str = NULL;
1552              ev->metadata->disc_id = str;
1553              break;
1554           }
1555 #endif
1556      }
1557
1558 }
1559
1560 static void
1561 _free_metadata(Emotion_Gstreamer_Metadata *m)
1562 {
1563   if (!m) return;
1564
1565   if (m->title)   g_free(m->title);
1566   if (m->album)   g_free(m->album);
1567   if (m->artist)  g_free(m->artist);
1568   if (m->genre)   g_free(m->genre);
1569   if (m->comment) g_free(m->comment);
1570   if (m->year)    g_free(m->year);
1571   if (m->count)   g_free(m->count);
1572   if (m->disc_id) g_free(m->disc_id);
1573
1574   free(m);
1575 }
1576
1577 static Eina_Bool
1578 _eos_timer_fct(void *data)
1579 {
1580    Emotion_Gstreamer_Video *ev;
1581    GstMessage              *msg;
1582
1583    ev = (Emotion_Gstreamer_Video *)data;
1584    if (ev->play_started)
1585      {
1586         _emotion_playback_started(ev->obj);
1587         ev->play_started = 0;
1588      }
1589    while ((msg = gst_bus_poll(ev->eos_bus, GST_MESSAGE_ERROR | GST_MESSAGE_EOS | GST_MESSAGE_TAG | GST_MESSAGE_ASYNC_DONE, 0)))
1590      {
1591         switch (GST_MESSAGE_TYPE(msg))
1592           {
1593            case GST_MESSAGE_ERROR:
1594                 {
1595                    gchar *debug;
1596                    GError *err;
1597
1598                    gst_message_parse_error(msg, &err, &debug);
1599                    g_free(debug);
1600
1601                    ERR("Error: %s", err->message);
1602                    g_error_free(err);
1603
1604                    break;
1605                 }
1606            case GST_MESSAGE_EOS:
1607               if (ev->eos_timer)
1608                 {
1609                    ecore_timer_del(ev->eos_timer);
1610                    ev->eos_timer = NULL;
1611                 }
1612               ev->play = 0;
1613               _emotion_decode_stop(ev->obj);
1614               _emotion_playback_finished(ev->obj);
1615               break;
1616            case GST_MESSAGE_TAG:
1617                 {
1618                    GstTagList *new_tags;
1619                    gst_message_parse_tag(msg, &new_tags);
1620                    if (new_tags)
1621                      {
1622                         gst_tag_list_foreach(new_tags, (GstTagForeachFunc)_for_each_tag, ev);
1623                         gst_tag_list_free(new_tags);
1624                      }
1625                    break;
1626                 }
1627            case GST_MESSAGE_ASYNC_DONE:
1628               _emotion_seek_done(ev->obj);
1629               break;
1630            default:
1631               ERR("bus say: %s [%i]\n",
1632                   GST_MESSAGE_SRC_NAME(msg),
1633                   GST_MESSAGE_TYPE(msg));
1634               break;
1635           }
1636         gst_message_unref(msg);
1637      }
1638    return EINA_TRUE;
1639 }