38abf57dd2ffcc4fbd33955c48a56637e3c98158
[profile/ivi/emotion.git] / src / modules / gstreamer / emotion_gstreamer.c
1 #ifdef HAVE_CONFIG_H
2 # include "config.h"
3 #endif
4
5 #ifdef HAVE_UNISTD_H
6 # include <unistd.h>
7 #endif
8 #include <fcntl.h>
9
10 #include <Eina.h>
11 #include <Evas.h>
12 #include <Ecore.h>
13
14 #define HTTP_STREAM 0
15 #define RTSP_STREAM 1
16 #include <glib.h>
17 #include <gst/gst.h>
18 #include <glib-object.h>
19 #include <gst/video/gstvideosink.h>
20 #include <gst/video/video.h>
21
22 #ifdef HAVE_ECORE_X
23 # include <Ecore_X.h>
24 # include <Ecore_Evas.h>
25 # ifdef HAVE_XOVERLAY_H
26 #  include <gst/interfaces/xoverlay.h>
27 # endif
28 #endif
29
30 #include "Emotion.h"
31 #include "emotion_private.h"
32 #include "emotion_gstreamer.h"
33
34 Eina_Bool window_manager_video = EINA_FALSE;
35 int _emotion_gstreamer_log_domain = -1;
36 Eina_Bool debug_fps = EINA_FALSE;
37 Eina_Bool _ecore_x_available = EINA_FALSE;
38
39 /* Callbacks to get the eos */
40 static void _for_each_tag    (GstTagList const* list, gchar const* tag, void *data);
41 static void _free_metadata   (Emotion_Gstreamer_Metadata *m);
42
43 /* Interface */
44
45 static unsigned char  em_init                     (Evas_Object     *obj,
46                                                    void           **emotion_video,
47                                                    Emotion_Module_Options *opt);
48
49 static unsigned char  em_file_open                (const char     *file,
50                                                    Evas_Object     *obj,
51                                                    void            *video);
52
53 static void           em_file_close               (void            *video);
54
55 static void           em_play                     (void            *video,
56                                                    double           pos);
57
58 static void           em_stop                     (void            *video);
59
60 static void           em_size_get                 (void            *video,
61                                                    int             *width,
62                                                    int             *height);
63
64 static void           em_pos_set                  (void            *video,
65                                                    double           pos);
66
67
68 static double         em_len_get                  (void            *video);
69
70 static double         em_buffer_size_get          (void            *video);
71
72 static int            em_fps_num_get              (void            *video);
73
74 static int            em_fps_den_get              (void            *video);
75
76 static double         em_fps_get                  (void            *video);
77
78 static double         em_pos_get                  (void            *video);
79
80 static void           em_vis_set                  (void            *video,
81                                                    Emotion_Vis      vis);
82
83 static Emotion_Vis    em_vis_get                  (void            *video);
84
85 static Eina_Bool      em_vis_supported            (void            *video,
86                                                    Emotion_Vis      vis);
87
88 static double         em_ratio_get                (void            *video);
89
90 static int            em_video_handled            (void            *video);
91
92 static int            em_audio_handled            (void            *video);
93
94 static int            em_seekable                 (void            *video);
95
96 static void           em_frame_done               (void            *video);
97
98 static Emotion_Format em_format_get               (void            *video);
99
100 static void           em_video_data_size_get      (void            *video,
101                                                    int             *w,
102                                                    int             *h);
103
104 static int            em_yuv_rows_get             (void            *video,
105                                                    int              w,
106                                                    int              h,
107                                                    unsigned char  **yrows,
108                                                    unsigned char  **urows,
109                                                    unsigned char  **vrows);
110
111 static int            em_bgra_data_get            (void            *video,
112                                                    unsigned char  **bgra_data);
113
114 static void           em_event_feed               (void            *video,
115                                                    int              event);
116
117 static void           em_event_mouse_button_feed  (void            *video,
118                                                    int              button,
119                                                    int              x,
120                                                    int              y);
121
122 static void           em_event_mouse_move_feed    (void            *video,
123                                                    int              x,
124                                                    int              y);
125
126 static int            em_video_channel_count      (void             *video);
127
128 static void           em_video_channel_set        (void             *video,
129                                                    int               channel);
130
131 static int            em_video_channel_get        (void             *video);
132
133 static const char    *em_video_channel_name_get   (void             *video,
134                                                    int               channel);
135
136 static void           em_video_channel_mute_set   (void             *video,
137                                                    int               mute);
138
139 static int            em_video_channel_mute_get   (void             *video);
140
141 static int            em_audio_channel_count      (void             *video);
142
143 static void           em_audio_channel_set        (void             *video,
144                                                    int               channel);
145
146 static int            em_audio_channel_get        (void             *video);
147
148 static const char    *em_audio_channel_name_get   (void             *video,
149                                                    int               channel);
150
151 static void           em_audio_channel_mute_set   (void             *video,
152                                                    int               mute);
153
154 static int            em_audio_channel_mute_get   (void             *video);
155
156 static void           em_audio_channel_volume_set (void             *video,
157                                                    double             vol);
158
159 static double         em_audio_channel_volume_get (void             *video);
160
161 static int            em_spu_channel_count        (void             *video);
162
163 static void           em_spu_channel_set          (void             *video,
164                                                    int               channel);
165
166 static int            em_spu_channel_get          (void             *video);
167
168 static const char    *em_spu_channel_name_get     (void             *video,
169                                                    int               channel);
170
171 static void           em_spu_channel_mute_set     (void             *video,
172                                                    int               mute);
173
174 static int            em_spu_channel_mute_get     (void             *video);
175
176 static int            em_chapter_count            (void             *video);
177
178 static void           em_chapter_set              (void             *video,
179                                                    int               chapter);
180
181 static int            em_chapter_get              (void             *video);
182
183 static const char    *em_chapter_name_get         (void             *video,
184                                                    int               chapter);
185
186 static void           em_speed_set                (void             *video,
187                                                    double            speed);
188
189 static double         em_speed_get                (void             *video);
190
191 static int            em_eject                    (void             *video);
192
193 static const char    *em_meta_get                 (void             *video,
194                                                    int               meta);
195
196 static void           em_priority_set             (void             *video,
197                                                    Eina_Bool         pri);
198 static Eina_Bool      em_priority_get             (void             *video);
199
200 static GstBusSyncReply _eos_sync_fct(GstBus *bus,
201                                      GstMessage *message,
202                                      gpointer data);
203
204 /* Module interface */
205
206 static Emotion_Video_Module em_module =
207 {
208    em_init, /* init */
209    em_shutdown, /* shutdown */
210    em_file_open, /* file_open */
211    em_file_close, /* file_close */
212    em_play, /* play */
213    em_stop, /* stop */
214    em_size_get, /* size_get */
215    em_pos_set, /* pos_set */
216    em_len_get, /* len_get */
217    em_buffer_size_get, /* buffer_size_get */
218    em_fps_num_get, /* fps_num_get */
219    em_fps_den_get, /* fps_den_get */
220    em_fps_get, /* fps_get */
221    em_pos_get, /* pos_get */
222    em_vis_set, /* vis_set */
223    em_vis_get, /* vis_get */
224    em_vis_supported, /* vis_supported */
225    em_ratio_get, /* ratio_get */
226    em_video_handled, /* video_handled */
227    em_audio_handled, /* audio_handled */
228    em_seekable, /* seekable */
229    em_frame_done, /* frame_done */
230    em_format_get, /* format_get */
231    em_video_data_size_get, /* video_data_size_get */
232    em_yuv_rows_get, /* yuv_rows_get */
233    em_bgra_data_get, /* bgra_data_get */
234    em_event_feed, /* event_feed */
235    em_event_mouse_button_feed, /* event_mouse_button_feed */
236    em_event_mouse_move_feed, /* event_mouse_move_feed */
237    em_video_channel_count, /* video_channel_count */
238    em_video_channel_set, /* video_channel_set */
239    em_video_channel_get, /* video_channel_get */
240    em_video_channel_name_get, /* video_channel_name_get */
241    em_video_channel_mute_set, /* video_channel_mute_set */
242    em_video_channel_mute_get, /* video_channel_mute_get */
243    em_audio_channel_count, /* audio_channel_count */
244    em_audio_channel_set, /* audio_channel_set */
245    em_audio_channel_get, /* audio_channel_get */
246    em_audio_channel_name_get, /* audio_channel_name_get */
247    em_audio_channel_mute_set, /* audio_channel_mute_set */
248    em_audio_channel_mute_get, /* audio_channel_mute_get */
249    em_audio_channel_volume_set, /* audio_channel_volume_set */
250    em_audio_channel_volume_get, /* audio_channel_volume_get */
251    em_spu_channel_count, /* spu_channel_count */
252    em_spu_channel_set, /* spu_channel_set */
253    em_spu_channel_get, /* spu_channel_get */
254    em_spu_channel_name_get, /* spu_channel_name_get */
255    em_spu_channel_mute_set, /* spu_channel_mute_set */
256    em_spu_channel_mute_get, /* spu_channel_mute_get */
257    em_chapter_count, /* chapter_count */
258    em_chapter_set, /* chapter_set */
259    em_chapter_get, /* chapter_get */
260    em_chapter_name_get, /* chapter_name_get */
261    em_speed_set, /* speed_set */
262    em_speed_get, /* speed_get */
263    em_eject, /* eject */
264    em_meta_get, /* meta_get */
265    em_priority_set, /* priority_set */
266    em_priority_get, /* priority_get */
267    NULL /* handle */
268 };
269
270 static int priority_overide = 0;
271
272 static Emotion_Video_Stream *
273 emotion_video_stream_new(Emotion_Gstreamer_Video *ev)
274 {
275    Emotion_Video_Stream *vstream;
276
277    if (!ev) return NULL;
278
279    vstream = (Emotion_Video_Stream *)calloc(1, sizeof(Emotion_Video_Stream));
280    if (!vstream) return NULL;
281
282    ev->video_streams = eina_list_append(ev->video_streams, vstream);
283    if (eina_error_get())
284      {
285         free(vstream);
286         return NULL;
287      }
288    return vstream;
289 }
290
291 static const char *
292 emotion_visualization_element_name_get(Emotion_Vis visualisation)
293 {
294    switch (visualisation)
295      {
296       case EMOTION_VIS_NONE:
297          return NULL;
298       case EMOTION_VIS_GOOM:
299          return "goom";
300       case EMOTION_VIS_LIBVISUAL_BUMPSCOPE:
301          return "libvisual_bumpscope";
302       case EMOTION_VIS_LIBVISUAL_CORONA:
303          return "libvisual_corona";
304       case EMOTION_VIS_LIBVISUAL_DANCING_PARTICLES:
305          return "libvisual_dancingparticles";
306       case EMOTION_VIS_LIBVISUAL_GDKPIXBUF:
307          return "libvisual_gdkpixbuf";
308       case EMOTION_VIS_LIBVISUAL_G_FORCE:
309          return "libvisual_G-Force";
310       case EMOTION_VIS_LIBVISUAL_GOOM:
311          return "libvisual_goom";
312       case EMOTION_VIS_LIBVISUAL_INFINITE:
313          return "libvisual_infinite";
314       case EMOTION_VIS_LIBVISUAL_JAKDAW:
315          return "libvisual_jakdaw";
316       case EMOTION_VIS_LIBVISUAL_JESS:
317          return "libvisual_jess";
318       case EMOTION_VIS_LIBVISUAL_LV_ANALYSER:
319          return "libvisual_lv_analyzer";
320       case EMOTION_VIS_LIBVISUAL_LV_FLOWER:
321          return "libvisual_lv_flower";
322       case EMOTION_VIS_LIBVISUAL_LV_GLTEST:
323          return "libvisual_lv_gltest";
324       case EMOTION_VIS_LIBVISUAL_LV_SCOPE:
325          return "libvisual_lv_scope";
326       case EMOTION_VIS_LIBVISUAL_MADSPIN:
327          return "libvisual_madspin";
328       case EMOTION_VIS_LIBVISUAL_NEBULUS:
329          return "libvisual_nebulus";
330       case EMOTION_VIS_LIBVISUAL_OINKSIE:
331          return "libvisual_oinksie";
332       case EMOTION_VIS_LIBVISUAL_PLASMA:
333          return "libvisual_plazma";
334       default:
335          return "goom";
336      }
337 }
338
339 static unsigned char
340 em_init(Evas_Object            *obj,
341         void                  **emotion_video,
342         Emotion_Module_Options *opt __UNUSED__)
343 {
344    Emotion_Gstreamer_Video *ev;
345    GError                  *error;
346
347    if (!emotion_video)
348      return 0;
349
350    ev = calloc(1, sizeof(Emotion_Gstreamer_Video));
351    if (!ev) return 0;
352
353    ev->obj = obj;
354
355    /* Initialization of gstreamer */
356    if (!gst_init_check(NULL, NULL, &error))
357      goto failure;
358
359    /* Default values */
360    ev->ratio = 1.0;
361    ev->vis = EMOTION_VIS_NONE;
362    ev->volume = 0.8;
363    ev->play_started = 0;
364    ev->delete_me = EINA_FALSE;
365    ev->threads = NULL;
366
367    *emotion_video = ev;
368
369    return 1;
370
371 failure:
372    free(ev);
373
374    return 0;
375 }
376
377 static void
378 em_cleanup(Emotion_Gstreamer_Video *ev)
379 {
380    Emotion_Audio_Stream *astream;
381    Emotion_Video_Stream *vstream;
382
383    if (ev->send)
384      {
385         emotion_gstreamer_buffer_free(ev->send);
386         ev->send = NULL;
387      }
388
389    if (ev->eos_bus)
390      {
391         gst_object_unref(GST_OBJECT(ev->eos_bus));
392         ev->eos_bus = NULL;
393      }
394
395    if (ev->metadata)
396      {
397         _free_metadata(ev->metadata);
398         ev->metadata = NULL;
399      }
400
401    if (ev->last_buffer)
402      {
403         gst_buffer_unref(ev->last_buffer);
404         ev->last_buffer = NULL;
405      }
406
407    if (!ev->stream)
408      {
409         evas_object_image_video_surface_set(emotion_object_image_get(ev->obj), NULL);
410         ev->stream = EINA_TRUE;
411      }
412
413    if (ev->pipeline)
414      {
415        gstreamer_video_sink_new(ev, ev->obj, NULL);
416
417        g_object_set(G_OBJECT(ev->esink), "ev", NULL, NULL);
418        g_object_set(G_OBJECT(ev->esink), "evas-object", NULL, NULL);
419        gst_element_set_state(ev->pipeline, GST_STATE_NULL);
420        gst_object_unref(ev->pipeline);
421
422        ev->pipeline = NULL;
423        ev->sink = NULL;
424
425        if (ev->teepad) gst_object_unref(ev->teepad);
426        ev->teepad = NULL;
427        if (ev->xvpad) gst_object_unref(ev->xvpad);
428        ev->xvpad = NULL;
429
430 #ifdef HAVE_ECORE_X
431        fprintf(stderr, "destroying window: %i\n", ev->win);
432        if (ev->win) ecore_x_window_free(ev->win);
433        ev->win = 0;
434 #endif
435      }
436
437    EINA_LIST_FREE(ev->audio_streams, astream)
438      free(astream);
439    EINA_LIST_FREE(ev->video_streams, vstream)
440      free(vstream);
441 }
442
443 int
444 em_shutdown(void *video)
445 {
446    Emotion_Gstreamer_Video *ev;
447
448    ev = (Emotion_Gstreamer_Video *)video;
449    if (!ev)
450      return 0;
451
452    if (ev->threads)
453      {
454         Ecore_Thread *t;
455
456         EINA_LIST_FREE(ev->threads, t)
457           ecore_thread_cancel(t);
458
459         ev->delete_me = EINA_TRUE;
460         return EINA_FALSE;
461      }
462
463    if (ev->in != ev->out)
464      {
465         ev->delete_me = EINA_TRUE;
466         return EINA_FALSE;
467      }
468
469    em_cleanup(ev);
470
471    free(ev);
472
473    return 1;
474 }
475
476
477 static unsigned char
478 em_file_open(const char   *file,
479              Evas_Object  *obj,
480              void         *video)
481 {
482    Emotion_Gstreamer_Video *ev;
483    Eina_Strbuf *sbuf = NULL;
484    const char *uri;
485
486    ev = (Emotion_Gstreamer_Video *)video;
487
488    if (!file) return EINA_FALSE;
489    if (strstr(file, "://") == NULL)
490      {
491         sbuf = eina_strbuf_new();
492         eina_strbuf_append(sbuf, "file://");
493         if (strncmp(file, "./", 2) == 0)
494           file += 2;
495         if (strstr(file, ":/") != NULL)
496           { /* We absolutely need file:///C:/ under Windows, so adding it here */
497              eina_strbuf_append(sbuf, "/");
498           }
499         else if (*file != '/')
500           {
501              char tmp[PATH_MAX];
502
503              if (getcwd(tmp, PATH_MAX))
504                {
505                   eina_strbuf_append(sbuf, tmp);
506                   eina_strbuf_append(sbuf, "/");
507                }
508           }
509         eina_strbuf_append(sbuf, file);
510      }
511
512    ev->play_started = 0;
513    ev->pipeline_parsed = 0;
514
515    uri = sbuf ? eina_strbuf_string_get(sbuf) : file;
516    DBG("setting file to '%s'", uri);
517    ev->pipeline = gstreamer_video_sink_new(ev, obj, uri);
518    if (sbuf) eina_strbuf_free(sbuf);
519
520    if (!ev->pipeline)
521      return EINA_FALSE;
522
523    ev->eos_bus = gst_pipeline_get_bus(GST_PIPELINE(ev->pipeline));
524    if (!ev->eos_bus)
525      {
526         ERR("could not get the bus");
527         return EINA_FALSE;
528      }
529
530    gst_bus_set_sync_handler(ev->eos_bus, _eos_sync_fct, ev);
531
532    /* Evas Object */
533    ev->obj = obj;
534
535    ev->position = 0.0;
536
537    return 1;
538 }
539
540 static void
541 em_file_close(void *video)
542 {
543    Emotion_Gstreamer_Video *ev;
544
545    ev = (Emotion_Gstreamer_Video *)video;
546    if (!ev)
547      return;
548
549    if (ev->threads)
550      {
551         Ecore_Thread *t;
552
553         EINA_LIST_FREE(ev->threads, t)
554           ecore_thread_cancel(t);
555      }
556
557    em_cleanup(ev);
558
559    ev->pipeline_parsed = EINA_FALSE;
560    ev->play_started = 0;
561 }
562
563 static void
564 em_play(void   *video,
565         double  pos __UNUSED__)
566 {
567    Emotion_Gstreamer_Video *ev;
568
569    ev = (Emotion_Gstreamer_Video *)video;
570    if (!ev->pipeline) return ;
571
572    if (ev->pipeline_parsed)
573      gst_element_set_state(ev->pipeline, GST_STATE_PLAYING);
574    ev->play = 1;
575    ev->play_started = 1;
576 }
577
578 static void
579 em_stop(void *video)
580 {
581    Emotion_Gstreamer_Video *ev;
582
583    ev = (Emotion_Gstreamer_Video *)video;
584
585    if (!ev->pipeline) return ;
586
587    if (ev->pipeline_parsed)
588      gst_element_set_state(ev->pipeline, GST_STATE_PAUSED);
589    ev->play = 0;
590 }
591
592 static void
593 em_size_get(void  *video,
594             int   *width,
595             int   *height)
596 {
597    Emotion_Gstreamer_Video *ev;
598    Emotion_Video_Stream      *vstream;
599
600    ev = (Emotion_Gstreamer_Video *)video;
601
602    if (!_emotion_gstreamer_video_pipeline_parse(ev, EINA_FALSE))
603      goto on_error;
604
605    vstream = eina_list_nth(ev->video_streams, ev->video_stream_nbr - 1);
606    if (vstream)
607      {
608         if (width) *width = vstream->width;
609         if (height) *height = vstream->height;
610
611         return ;
612      }
613
614  on_error:
615    if (width) *width = 0;
616    if (height) *height = 0;
617 }
618
619 static void
620 em_pos_set(void   *video,
621            double  pos)
622 {
623    Emotion_Gstreamer_Video *ev;
624
625    ev = (Emotion_Gstreamer_Video *)video;
626
627    if (!ev->pipeline) return ;
628
629    if (ev->play)
630      gst_element_set_state(ev->pipeline, GST_STATE_PAUSED);
631
632    gst_element_seek(ev->pipeline, 1.0,
633                           GST_FORMAT_TIME,
634                           GST_SEEK_FLAG_ACCURATE | GST_SEEK_FLAG_FLUSH,
635                           GST_SEEK_TYPE_SET,
636                           (gint64)(pos * (double)GST_SECOND),
637                           GST_SEEK_TYPE_NONE, -1);
638
639    if (ev->play)
640      gst_element_set_state(ev->pipeline, GST_STATE_PLAYING);
641 }
642
643 static double
644 em_len_get(void *video)
645 {
646    Emotion_Gstreamer_Video *ev;
647    Emotion_Video_Stream *vstream;
648    Emotion_Audio_Stream *astream;
649    Eina_List *l;
650    GstFormat fmt;
651    gint64 val;
652    gboolean ret;
653
654    ev = video;
655    fmt = GST_FORMAT_TIME;
656
657    if (!ev->pipeline) return 0.0;
658
659    ret = gst_element_query_duration(ev->pipeline, &fmt, &val);
660    if (!ret)
661      goto fallback;
662
663    if (fmt != GST_FORMAT_TIME)
664      {
665         DBG("requrested duration in time, but got %s instead.",
666             gst_format_get_name(fmt));
667         goto fallback;
668      }
669
670    if (val <= 0.0)
671      goto fallback;
672
673    return val / 1000000000.0;
674
675  fallback:
676    if (!_emotion_gstreamer_video_pipeline_parse(ev, EINA_FALSE))
677      return 0.0;
678
679    EINA_LIST_FOREACH(ev->audio_streams, l, astream)
680      if (astream->length_time >= 0)
681        return astream->length_time;
682
683    EINA_LIST_FOREACH(ev->video_streams, l, vstream)
684      if (vstream->length_time >= 0)
685        return vstream->length_time;
686
687    return 0.0;
688 }
689
690 static double
691 em_buffer_size_get(void *video)
692 {
693    Emotion_Gstreamer_Video *ev;
694
695    GstQuery *query;
696    gboolean busy;
697    gint percent;
698
699    ev = video;
700
701    if (!ev->pipeline) return 0.0;
702
703    query = gst_query_new_buffering(GST_FORMAT_DEFAULT);
704    if (gst_element_query(ev->pipeline, query))
705      gst_query_parse_buffering_percent(query, &busy, &percent);
706    else
707      percent = 100;
708
709    gst_query_unref(query);
710    return ((float)(percent)) / 100.0;
711 }
712
713 static int
714 em_fps_num_get(void *video)
715 {
716    Emotion_Gstreamer_Video *ev;
717    Emotion_Video_Stream      *vstream;
718
719    ev = (Emotion_Gstreamer_Video *)video;
720
721    if (!_emotion_gstreamer_video_pipeline_parse(ev, EINA_FALSE))
722      return 0;
723
724    vstream = eina_list_nth(ev->video_streams, ev->video_stream_nbr - 1);
725    if (vstream)
726      return vstream->fps_num;
727
728    return 0;
729 }
730
731 static int
732 em_fps_den_get(void *video)
733 {
734    Emotion_Gstreamer_Video *ev;
735    Emotion_Video_Stream      *vstream;
736
737    ev = (Emotion_Gstreamer_Video *)video;
738
739    if (!_emotion_gstreamer_video_pipeline_parse(ev, EINA_FALSE))
740      return 1;
741
742    vstream = eina_list_nth(ev->video_streams, ev->video_stream_nbr - 1);
743    if (vstream)
744      return vstream->fps_den;
745
746    return 1;
747 }
748
749 static double
750 em_fps_get(void *video)
751 {
752    Emotion_Gstreamer_Video *ev;
753    Emotion_Video_Stream      *vstream;
754
755    ev = (Emotion_Gstreamer_Video *)video;
756
757    if (!_emotion_gstreamer_video_pipeline_parse(ev, EINA_FALSE))
758      return 0.0;
759
760    vstream = eina_list_nth(ev->video_streams, ev->video_stream_nbr - 1);
761    if (vstream)
762      return (double)vstream->fps_num / (double)vstream->fps_den;
763
764    return 0.0;
765 }
766
767 static double
768 em_pos_get(void *video)
769 {
770    Emotion_Gstreamer_Video *ev;
771    GstFormat fmt;
772    gint64 val;
773    gboolean ret;
774
775    ev = video;
776    fmt = GST_FORMAT_TIME;
777
778    if (!ev->pipeline) return 0.0;
779
780    ret = gst_element_query_position(ev->pipeline, &fmt, &val);
781    if (!ret)
782      return ev->position;
783
784    if (fmt != GST_FORMAT_TIME)
785      {
786         ERR("requrested position in time, but got %s instead.",
787             gst_format_get_name(fmt));
788         return ev->position;
789      }
790
791    ev->position = val / 1000000000.0;
792    return ev->position;
793 }
794
795 static void
796 em_vis_set(void *video,
797            Emotion_Vis vis)
798 {
799    Emotion_Gstreamer_Video *ev;
800
801    ev = (Emotion_Gstreamer_Video *)video;
802
803    ev->vis = vis;
804 }
805
806 static Emotion_Vis
807 em_vis_get(void *video)
808 {
809    Emotion_Gstreamer_Video *ev;
810
811    ev = (Emotion_Gstreamer_Video *)video;
812
813    return ev->vis;
814 }
815
816 static Eina_Bool
817 em_vis_supported(void *ef __UNUSED__, Emotion_Vis vis)
818 {
819    const char *name;
820    GstElementFactory *factory;
821
822    if (vis == EMOTION_VIS_NONE)
823      return EINA_TRUE;
824
825    name = emotion_visualization_element_name_get(vis);
826    if (!name)
827      return EINA_FALSE;
828
829    factory = gst_element_factory_find(name);
830    if (!factory)
831      return EINA_FALSE;
832
833    gst_object_unref(factory);
834    return EINA_TRUE;
835 }
836
837 static double
838 em_ratio_get(void *video)
839 {
840    Emotion_Gstreamer_Video *ev;
841
842    ev = (Emotion_Gstreamer_Video *)video;
843
844    return ev->ratio;
845 }
846
847 static int
848 em_video_handled(void *video)
849 {
850    Emotion_Gstreamer_Video *ev;
851
852    ev = (Emotion_Gstreamer_Video *)video;
853
854    _emotion_gstreamer_video_pipeline_parse(ev, EINA_FALSE);
855
856    if (!eina_list_count(ev->video_streams))
857      return 0;
858
859    return 1;
860 }
861
862 static int
863 em_audio_handled(void *video)
864 {
865    Emotion_Gstreamer_Video *ev;
866
867    ev = (Emotion_Gstreamer_Video *)video;
868
869    _emotion_gstreamer_video_pipeline_parse(ev, EINA_FALSE);
870
871    if (!eina_list_count(ev->audio_streams))
872      return 0;
873
874    return 1;
875 }
876
877 static int
878 em_seekable(void *video __UNUSED__)
879 {
880    return 1;
881 }
882
883 static void
884 em_frame_done(void *video __UNUSED__)
885 {
886 }
887
888 static Emotion_Format
889 em_format_get(void *video)
890 {
891    Emotion_Gstreamer_Video *ev;
892    Emotion_Video_Stream    *vstream;
893
894    ev = (Emotion_Gstreamer_Video *)video;
895
896    if (!_emotion_gstreamer_video_pipeline_parse(ev, EINA_FALSE))
897      return EMOTION_FORMAT_NONE;
898
899    vstream = eina_list_nth(ev->video_streams, ev->video_stream_nbr - 1);
900    if (vstream)
901      {
902         switch (vstream->fourcc)
903           {
904            case GST_MAKE_FOURCC('I', '4', '2', '0'):
905               return EMOTION_FORMAT_I420;
906            case GST_MAKE_FOURCC('Y', 'V', '1', '2'):
907               return EMOTION_FORMAT_YV12;
908            case GST_MAKE_FOURCC('Y', 'U', 'Y', '2'):
909               return EMOTION_FORMAT_YUY2;
910            case GST_MAKE_FOURCC('A', 'R', 'G', 'B'):
911               return EMOTION_FORMAT_BGRA;
912            default:
913               return EMOTION_FORMAT_NONE;
914           }
915      }
916    return EMOTION_FORMAT_NONE;
917 }
918
919 static void
920 em_video_data_size_get(void *video, int *w, int *h)
921 {
922    Emotion_Gstreamer_Video *ev;
923    Emotion_Video_Stream    *vstream;
924
925    ev = (Emotion_Gstreamer_Video *)video;
926
927    if (ev->pipeline && (!ev->video_stream_nbr || !ev->video_streams))
928      if (!_emotion_gstreamer_video_pipeline_parse(ev, EINA_FALSE))
929        goto on_error;
930
931    vstream = eina_list_nth(ev->video_streams, ev->video_stream_nbr - 1);
932    if (vstream)
933      {
934         *w = vstream->width;
935         *h = vstream->height;
936
937         return ;
938      }
939
940  on_error:
941    *w = 0;
942    *h = 0;
943 }
944
945 static int
946 em_yuv_rows_get(void           *video __UNUSED__,
947                 int             w __UNUSED__,
948                 int             h __UNUSED__,
949                 unsigned char **yrows __UNUSED__,
950                 unsigned char **urows __UNUSED__,
951                 unsigned char **vrows __UNUSED__)
952 {
953    return 0;
954 }
955
956 static int
957 em_bgra_data_get(void *video __UNUSED__, unsigned char **bgra_data __UNUSED__)
958 {
959    return 0;
960 }
961
962 static void
963 em_event_feed(void *video __UNUSED__, int event __UNUSED__)
964 {
965 }
966
967 static void
968 em_event_mouse_button_feed(void *video __UNUSED__, int button __UNUSED__, int x __UNUSED__, int y __UNUSED__)
969 {
970 }
971
972 static void
973 em_event_mouse_move_feed(void *video __UNUSED__, int x __UNUSED__, int y __UNUSED__)
974 {
975 }
976
977 /* Video channels */
978 static int
979 em_video_channel_count(void *video)
980 {
981    Emotion_Gstreamer_Video *ev;
982
983    ev = (Emotion_Gstreamer_Video *)video;
984
985    _emotion_gstreamer_video_pipeline_parse(ev, EINA_FALSE);
986
987    return eina_list_count(ev->video_streams);
988 }
989
990 static void
991 em_video_channel_set(void *video __UNUSED__,
992                      int   channel __UNUSED__)
993 {
994 #if 0
995    Emotion_Gstreamer_Video *ev;
996
997    ev = (Emotion_Gstreamer_Video *)video;
998
999    if (channel < 0) channel = 0;
1000 #endif
1001    /* FIXME: a faire... */
1002 }
1003
1004 static int
1005 em_video_channel_get(void *video)
1006 {
1007    Emotion_Gstreamer_Video *ev;
1008
1009    ev = (Emotion_Gstreamer_Video *)video;
1010
1011    _emotion_gstreamer_video_pipeline_parse(ev, EINA_FALSE);
1012
1013    return ev->video_stream_nbr;
1014 }
1015
1016 static const char *
1017 em_video_channel_name_get(void *video __UNUSED__,
1018                           int   channel __UNUSED__)
1019 {
1020    return NULL;
1021 }
1022
1023 static void
1024 em_video_channel_mute_set(void *video,
1025                           int   mute)
1026 {
1027    Emotion_Gstreamer_Video *ev;
1028
1029    ev = (Emotion_Gstreamer_Video *)video;
1030
1031    ev->video_mute = mute;
1032 }
1033
1034 static int
1035 em_video_channel_mute_get(void *video)
1036 {
1037    Emotion_Gstreamer_Video *ev;
1038
1039    ev = (Emotion_Gstreamer_Video *)video;
1040
1041    return ev->video_mute;
1042 }
1043
1044 /* Audio channels */
1045
1046 static int
1047 em_audio_channel_count(void *video)
1048 {
1049    Emotion_Gstreamer_Video *ev;
1050
1051    ev = (Emotion_Gstreamer_Video *)video;
1052
1053    _emotion_gstreamer_video_pipeline_parse(ev, EINA_FALSE);
1054
1055    return eina_list_count(ev->audio_streams);
1056 }
1057
1058 static void
1059 em_audio_channel_set(void *video __UNUSED__,
1060                      int   channel __UNUSED__)
1061 {
1062 #if 0
1063    Emotion_Gstreamer_Video *ev;
1064
1065    ev = (Emotion_Gstreamer_Video *)video;
1066
1067    if (channel < -1) channel = -1;
1068 #endif
1069    /* FIXME: a faire... */
1070 }
1071
1072 static int
1073 em_audio_channel_get(void *video)
1074 {
1075    Emotion_Gstreamer_Video *ev;
1076
1077    ev = (Emotion_Gstreamer_Video *)video;
1078
1079    _emotion_gstreamer_video_pipeline_parse(ev, EINA_FALSE);
1080
1081    return ev->audio_stream_nbr;
1082 }
1083
1084 static const char *
1085 em_audio_channel_name_get(void *video __UNUSED__,
1086                           int   channel __UNUSED__)
1087 {
1088    return NULL;
1089 }
1090
1091 #define GST_PLAY_FLAG_AUDIO (1 << 1)
1092
1093 static void
1094 em_audio_channel_mute_set(void *video,
1095                           int   mute)
1096 {
1097    /* NOTE: at first I wanted to completly shutdown the audio path on mute,
1098       but that's not possible as the audio sink could be the clock source
1099       for the pipeline (at least that's the case on some of the hardware
1100       I have been tested emotion on.
1101     */
1102    Emotion_Gstreamer_Video *ev;
1103
1104    ev = (Emotion_Gstreamer_Video *)video;
1105
1106    if (!ev->pipeline) return ;
1107
1108    ev->audio_mute = mute;
1109
1110    g_object_set(G_OBJECT(ev->pipeline), "mute", !!mute, NULL);
1111 }
1112
1113 static int
1114 em_audio_channel_mute_get(void *video)
1115 {
1116    Emotion_Gstreamer_Video *ev;
1117
1118    ev = (Emotion_Gstreamer_Video *)video;
1119
1120    return ev->audio_mute;
1121 }
1122
1123 static void
1124 em_audio_channel_volume_set(void  *video,
1125                             double vol)
1126 {
1127    Emotion_Gstreamer_Video *ev;
1128
1129    ev = (Emotion_Gstreamer_Video *)video;
1130
1131    if (!ev->pipeline) return ;
1132
1133    if (vol < 0.0)
1134      vol = 0.0;
1135    if (vol > 1.0)
1136      vol = 1.0;
1137    ev->volume = vol;
1138    g_object_set(G_OBJECT(ev->pipeline), "volume", vol, NULL);
1139 }
1140
1141 static double
1142 em_audio_channel_volume_get(void *video)
1143 {
1144    Emotion_Gstreamer_Video *ev;
1145
1146    ev = (Emotion_Gstreamer_Video *)video;
1147
1148    return ev->volume;
1149 }
1150
1151 /* spu stuff */
1152
1153 static int
1154 em_spu_channel_count(void *video __UNUSED__)
1155 {
1156    return 0;
1157 }
1158
1159 static void
1160 em_spu_channel_set(void *video __UNUSED__, int channel __UNUSED__)
1161 {
1162 }
1163
1164 static int
1165 em_spu_channel_get(void *video __UNUSED__)
1166 {
1167    return 1;
1168 }
1169
1170 static const char *
1171 em_spu_channel_name_get(void *video __UNUSED__, int channel __UNUSED__)
1172 {
1173    return NULL;
1174 }
1175
1176 static void
1177 em_spu_channel_mute_set(void *video __UNUSED__, int mute __UNUSED__)
1178 {
1179 }
1180
1181 static int
1182 em_spu_channel_mute_get(void *video __UNUSED__)
1183 {
1184    return 0;
1185 }
1186
1187 static int
1188 em_chapter_count(void *video __UNUSED__)
1189 {
1190    return 0;
1191 }
1192
1193 static void
1194 em_chapter_set(void *video __UNUSED__, int chapter __UNUSED__)
1195 {
1196 }
1197
1198 static int
1199 em_chapter_get(void *video __UNUSED__)
1200 {
1201    return 0;
1202 }
1203
1204 static const char *
1205 em_chapter_name_get(void *video __UNUSED__, int chapter __UNUSED__)
1206 {
1207    return NULL;
1208 }
1209
1210 static void
1211 em_speed_set(void *video __UNUSED__, double speed __UNUSED__)
1212 {
1213 }
1214
1215 static double
1216 em_speed_get(void *video __UNUSED__)
1217 {
1218    return 1.0;
1219 }
1220
1221 static int
1222 em_eject(void *video __UNUSED__)
1223 {
1224    return 1;
1225 }
1226
1227 static const char *
1228 em_meta_get(void *video, int meta)
1229 {
1230    Emotion_Gstreamer_Video *ev;
1231    const char *str = NULL;
1232
1233    ev = (Emotion_Gstreamer_Video *)video;
1234
1235    if (!ev || !ev->metadata) return NULL;
1236    switch (meta)
1237      {
1238       case META_TRACK_TITLE:
1239          str = ev->metadata->title;
1240          break;
1241       case META_TRACK_ARTIST:
1242          str = ev->metadata->artist;
1243          break;
1244       case  META_TRACK_ALBUM:
1245          str = ev->metadata->album;
1246          break;
1247       case META_TRACK_YEAR:
1248          str = ev->metadata->year;
1249          break;
1250       case META_TRACK_GENRE:
1251          str = ev->metadata->genre;
1252          break;
1253       case META_TRACK_COMMENT:
1254          str = ev->metadata->comment;
1255          break;
1256       case META_TRACK_DISCID:
1257          str = ev->metadata->disc_id;
1258          break;
1259       default:
1260          break;
1261      }
1262
1263    return str;
1264 }
1265
1266 static void
1267 em_priority_set(void *video, Eina_Bool pri)
1268 {
1269    Emotion_Gstreamer_Video *ev;
1270
1271    ev = video;
1272    if (priority_overide > 3) return ; /* If we failed to much to create that pipeline, let's don't wast our time anymore */
1273    ev->priority = pri;
1274 }
1275
1276 static Eina_Bool
1277 em_priority_get(void *video)
1278 {
1279    Emotion_Gstreamer_Video *ev;
1280
1281    ev = video;
1282    return ev->stream;
1283 }
1284
1285 #ifdef HAVE_ECORE_X
1286 static Eina_Bool
1287 _ecore_event_x_destroy(void *data __UNUSED__, int type __UNUSED__, void *event __UNUSED__)
1288 {
1289    Ecore_X_Event_Window_Destroy *ev = event;
1290
1291    fprintf(stderr, "killed window: %x (%x)\n", ev->win, ev->event_win);
1292
1293    return EINA_TRUE;
1294 }
1295 #endif
1296
1297 static Eina_Bool
1298 module_open(Evas_Object           *obj,
1299             const Emotion_Video_Module **module,
1300             void                 **video,
1301             Emotion_Module_Options *opt)
1302 {
1303 #ifdef HAVE_ECORE_X
1304    Ecore_X_Window *roots;
1305    int num;
1306 #endif
1307
1308    if (!module)
1309      return EINA_FALSE;
1310
1311    if (_emotion_gstreamer_log_domain < 0)
1312      {
1313         eina_threads_init();
1314         eina_log_threads_enable();
1315         _emotion_gstreamer_log_domain = eina_log_domain_register
1316           ("emotion-gstreamer", EINA_COLOR_LIGHTCYAN);
1317         if (_emotion_gstreamer_log_domain < 0)
1318           {
1319              EINA_LOG_CRIT("Could not register log domain 'emotion-gstreamer'");
1320              return EINA_FALSE;
1321           }
1322      }
1323
1324    if (!em_module.init(obj, video, opt))
1325      return EINA_FALSE;
1326
1327 #ifdef HAVE_ECORE_X
1328    ecore_event_handler_add(ECORE_X_EVENT_WINDOW_DESTROY, _ecore_event_x_destroy, NULL);
1329 #endif
1330
1331    if (getenv("EMOTION_FPS_DEBUG")) debug_fps = EINA_TRUE;
1332
1333    eina_threads_init();
1334
1335 #ifdef HAVE_ECORE_X
1336    if (ecore_x_init(NULL) > 0)
1337      {
1338         _ecore_x_available = EINA_TRUE;
1339      }
1340
1341    /* Check if the window manager is able to handle our special Xv window. */
1342    roots = _ecore_x_available ? ecore_x_window_root_list(&num) : NULL;
1343    if (roots && num > 0)
1344      {
1345         Ecore_X_Window  win, twin;
1346         int nwins;
1347
1348         nwins = ecore_x_window_prop_window_get(roots[0],
1349                                                ECORE_X_ATOM_NET_SUPPORTING_WM_CHECK,
1350                                                &win, 1);
1351         if (nwins > 0)
1352           {
1353              nwins = ecore_x_window_prop_window_get(win,
1354                                                     ECORE_X_ATOM_NET_SUPPORTING_WM_CHECK,
1355                                                     &twin, 1);
1356              if (nwins > 0 && twin == win)
1357                {
1358                   Ecore_X_Atom *supported;
1359                   int supported_num;
1360                   int i;
1361
1362                   if (ecore_x_netwm_supported_get(roots[0], &supported, &supported_num))
1363                     {
1364                        Eina_Bool parent = EINA_FALSE;
1365                        Eina_Bool video_position = EINA_FALSE;
1366
1367                        for (i = 0; i < supported_num; ++i)
1368                          {
1369                             if (supported[i] == ECORE_X_ATOM_E_VIDEO_PARENT)
1370                               parent = EINA_TRUE;
1371                             else if (supported[i] == ECORE_X_ATOM_E_VIDEO_POSITION)
1372                               video_position = EINA_TRUE;
1373                             if (parent && video_position)
1374                               break;
1375                          }
1376
1377                        if (parent && video_position)
1378                          {
1379                             window_manager_video = EINA_TRUE;
1380                          }
1381                     }
1382                }
1383           }
1384      }
1385    free(roots);
1386 #endif
1387
1388    *module = &em_module;
1389    return EINA_TRUE;
1390 }
1391
1392 static void
1393 module_close(Emotion_Video_Module *module __UNUSED__,
1394              void                 *video)
1395 {
1396    em_module.shutdown(video);
1397
1398 #ifdef HAVE_ECORE_X
1399    if (_ecore_x_available)
1400      {
1401         ecore_x_shutdown();
1402      }
1403 #endif
1404
1405    eina_threads_shutdown();
1406 }
1407
1408 Eina_Bool
1409 gstreamer_module_init(void)
1410 {
1411    GError *error;
1412
1413    if (!gst_init_check(0, NULL, &error))
1414      {
1415         EINA_LOG_CRIT("Could not init GStreamer");
1416         return EINA_FALSE;
1417      }
1418
1419    if (gst_plugin_register_static(GST_VERSION_MAJOR, GST_VERSION_MINOR,
1420                                   "emotion-sink",
1421                                   "video sink plugin for Emotion",
1422                                   gstreamer_plugin_init,
1423                                   VERSION,
1424                                   "LGPL",
1425                                   "Enlightenment",
1426                                   PACKAGE,
1427                                   "http://www.enlightenment.org/") == FALSE)
1428      {
1429         EINA_LOG_CRIT("Could not load static gstreamer video sink for Emotion.");
1430         return EINA_FALSE;
1431      }
1432
1433    return _emotion_module_register("gstreamer", module_open, module_close);
1434 }
1435
1436 void
1437 gstreamer_module_shutdown(void)
1438 {
1439    _emotion_module_unregister("gstreamer");
1440
1441    gst_deinit();
1442 }
1443
1444 #ifndef EMOTION_STATIC_BUILD_GSTREAMER
1445
1446 EINA_MODULE_INIT(gstreamer_module_init);
1447 EINA_MODULE_SHUTDOWN(gstreamer_module_shutdown);
1448
1449 #endif
1450
1451 static void
1452 _for_each_tag(GstTagList const* list,
1453                     gchar const* tag,
1454                     void *data)
1455 {
1456    Emotion_Gstreamer_Video *ev;
1457    int i;
1458    int count;
1459
1460
1461    ev = (Emotion_Gstreamer_Video*)data;
1462
1463    if (!ev || !ev->metadata) return;
1464
1465    count = gst_tag_list_get_tag_size(list, tag);
1466
1467    for (i = 0; i < count; i++)
1468      {
1469         if (!strcmp(tag, GST_TAG_TITLE))
1470           {
1471              char *str;
1472              g_free(ev->metadata->title);
1473              if (gst_tag_list_get_string(list, GST_TAG_TITLE, &str))
1474                ev->metadata->title = str;
1475              else
1476                ev->metadata->title = NULL;
1477              break;
1478           }
1479         if (!strcmp(tag, GST_TAG_ALBUM))
1480           {
1481              gchar *str;
1482              g_free(ev->metadata->album);
1483              if (gst_tag_list_get_string(list, GST_TAG_ALBUM, &str))
1484                ev->metadata->album = str;
1485              else
1486                ev->metadata->album = NULL;
1487              break;
1488           }
1489         if (!strcmp(tag, GST_TAG_ARTIST))
1490           {
1491              gchar *str;
1492              g_free(ev->metadata->artist);
1493              if (gst_tag_list_get_string(list, GST_TAG_ARTIST, &str))
1494                ev->metadata->artist = str;
1495              else
1496                ev->metadata->artist = NULL;
1497              break;
1498           }
1499         if (!strcmp(tag, GST_TAG_GENRE))
1500           {
1501              gchar *str;
1502              g_free(ev->metadata->genre);
1503              if (gst_tag_list_get_string(list, GST_TAG_GENRE, &str))
1504                ev->metadata->genre = str;
1505              else
1506                ev->metadata->genre = NULL;
1507              break;
1508           }
1509         if (!strcmp(tag, GST_TAG_COMMENT))
1510           {
1511              gchar *str;
1512              g_free(ev->metadata->comment);
1513              if (gst_tag_list_get_string(list, GST_TAG_COMMENT, &str))
1514                ev->metadata->comment = str;
1515              else
1516                ev->metadata->comment = NULL;
1517              break;
1518           }
1519         if (!strcmp(tag, GST_TAG_DATE))
1520           {
1521              gchar *str;
1522              const GValue *date;
1523              g_free(ev->metadata->year);
1524              date = gst_tag_list_get_value_index(list, GST_TAG_DATE, 0);
1525              if (date)
1526                str = g_strdup_value_contents(date);
1527              else
1528                str = NULL;
1529              ev->metadata->year = str;
1530              break;
1531           }
1532
1533         if (!strcmp(tag, GST_TAG_TRACK_NUMBER))
1534           {
1535              gchar *str;
1536              const GValue *track;
1537              g_free(ev->metadata->count);
1538              track = gst_tag_list_get_value_index(list, GST_TAG_TRACK_NUMBER, 0);
1539              if (track)
1540                str = g_strdup_value_contents(track);
1541              else
1542                str = NULL;
1543              ev->metadata->count = str;
1544              break;
1545           }
1546
1547 #ifdef GST_TAG_CDDA_CDDB_DISCID
1548         if (!strcmp(tag, GST_TAG_CDDA_CDDB_DISCID))
1549           {
1550              gchar *str;
1551              const GValue *discid;
1552              g_free(ev->metadata->disc_id);
1553              discid = gst_tag_list_get_value_index(list, GST_TAG_CDDA_CDDB_DISCID, 0);
1554              if (discid)
1555                str = g_strdup_value_contents(discid);
1556              else
1557                str = NULL;
1558              ev->metadata->disc_id = str;
1559              break;
1560           }
1561 #endif
1562      }
1563
1564 }
1565
1566 static void
1567 _free_metadata(Emotion_Gstreamer_Metadata *m)
1568 {
1569   if (!m) return;
1570
1571   g_free(m->title);
1572   g_free(m->album);
1573   g_free(m->artist);
1574   g_free(m->genre);
1575   g_free(m->comment);
1576   g_free(m->year);
1577   g_free(m->count);
1578   g_free(m->disc_id);
1579
1580   free(m);
1581 }
1582
1583 static Eina_Bool
1584 _em_restart_stream(void *data)
1585 {
1586    Emotion_Gstreamer_Video *ev;
1587
1588    ev = data;
1589
1590    ev->pipeline = gstreamer_video_sink_new(ev, ev->obj, ev->uri);
1591
1592    if (ev->pipeline)
1593      {
1594         ev->eos_bus = gst_pipeline_get_bus(GST_PIPELINE(ev->pipeline));
1595         if (!ev->eos_bus)
1596           {
1597              ERR("could not get the bus");
1598              return EINA_FALSE;
1599           }
1600
1601         gst_bus_set_sync_handler(ev->eos_bus, _eos_sync_fct, ev);
1602      }
1603
1604    return ECORE_CALLBACK_CANCEL;
1605 }
1606
1607 static void
1608 _eos_main_fct(void *data)
1609 {
1610    Emotion_Gstreamer_Message *send;
1611    Emotion_Gstreamer_Video *ev;
1612    GstMessage              *msg;
1613
1614    send = data;
1615    ev = send->ev;
1616    msg = send->msg;
1617
1618    if (ev->play_started && !ev->delete_me)
1619      {
1620         _emotion_playback_started(ev->obj);
1621         ev->play_started = 0;
1622      }
1623
1624    switch (GST_MESSAGE_TYPE(msg))
1625      {
1626       case GST_MESSAGE_EOS:
1627          if (!ev->delete_me)
1628            {
1629               ev->play = 0;
1630               _emotion_decode_stop(ev->obj);
1631               _emotion_playback_finished(ev->obj);
1632            }
1633          break;
1634       case GST_MESSAGE_TAG:
1635          if (!ev->delete_me)
1636            {
1637               GstTagList *new_tags;
1638               gst_message_parse_tag(msg, &new_tags);
1639               if (new_tags)
1640                 {
1641                    gst_tag_list_foreach(new_tags,
1642                                         (GstTagForeachFunc)_for_each_tag,
1643                                         ev);
1644                    gst_tag_list_free(new_tags);
1645                 }
1646            }
1647          break;
1648       case GST_MESSAGE_ASYNC_DONE:
1649          if (!ev->delete_me) _emotion_seek_done(ev->obj);
1650          break;
1651       case GST_MESSAGE_STREAM_STATUS:
1652          break;
1653       case GST_MESSAGE_ERROR:
1654          em_cleanup(ev);
1655
1656          if (ev->priority)
1657            {
1658              ERR("Switching back to canvas rendering.");
1659              ev->priority = EINA_FALSE;
1660              priority_overide++;
1661
1662              ecore_idler_add(_em_restart_stream, ev);
1663            }
1664          break;
1665       default:
1666          ERR("bus say: %s [%i - %s]",
1667              GST_MESSAGE_SRC_NAME(msg),
1668              GST_MESSAGE_TYPE(msg),
1669              GST_MESSAGE_TYPE_NAME(msg));
1670          break;
1671      }
1672
1673    emotion_gstreamer_message_free(send);
1674 }
1675
1676 static GstBusSyncReply
1677 _eos_sync_fct(GstBus *bus __UNUSED__, GstMessage *msg, gpointer data)
1678 {
1679    Emotion_Gstreamer_Video *ev = data;
1680    Emotion_Gstreamer_Message *send;
1681
1682    switch (GST_MESSAGE_TYPE(msg))
1683      {
1684       case GST_MESSAGE_EOS:
1685       case GST_MESSAGE_TAG:
1686       case GST_MESSAGE_ASYNC_DONE:
1687       case GST_MESSAGE_STREAM_STATUS:
1688          INF("bus say: %s [%i - %s]",
1689              GST_MESSAGE_SRC_NAME(msg),
1690              GST_MESSAGE_TYPE(msg),
1691              GST_MESSAGE_TYPE_NAME(msg));
1692          send = emotion_gstreamer_message_alloc(ev, msg);
1693
1694          if (send) ecore_main_loop_thread_safe_call_async(_eos_main_fct, send);
1695
1696          break;
1697
1698       case GST_MESSAGE_STATE_CHANGED:
1699         {
1700            GstState old_state, new_state;
1701
1702            gst_message_parse_state_changed(msg, &old_state, &new_state, NULL);
1703            INF("Element %s changed state from %s to %s.",
1704                GST_OBJECT_NAME(msg->src),
1705                gst_element_state_get_name(old_state),
1706                gst_element_state_get_name(new_state));
1707            break;
1708         }
1709       case GST_MESSAGE_ERROR:
1710         {
1711            GError *error;
1712            gchar *debug;
1713
1714            gst_message_parse_error(msg, &error, &debug);
1715            ERR("ERROR from element %s: %s", GST_OBJECT_NAME(msg->src), error->message);
1716            ERR("Debugging info: %s", (debug) ? debug : "none");
1717            g_error_free(error);
1718            g_free(debug);
1719
1720            if (strncmp(GST_OBJECT_NAME(msg->src), "xvimagesink", 11) == 0)
1721              {
1722                 send = emotion_gstreamer_message_alloc(ev, msg);
1723
1724                 if (send) ecore_main_loop_thread_safe_call_async(_eos_main_fct, send);
1725              }
1726            break;
1727         }
1728       case GST_MESSAGE_WARNING:
1729         {
1730            GError *error;
1731            gchar *debug;
1732
1733            gst_message_parse_warning(msg, &error, &debug);
1734            WRN("WARNING from element %s: %s", GST_OBJECT_NAME(msg->src), error->message);
1735            WRN("Debugging info: %s", (debug) ? debug : "none");
1736            g_error_free(error);
1737            g_free(debug);
1738            break;
1739         }
1740       default:
1741          WRN("bus say: %s [%i - %s]",
1742              GST_MESSAGE_SRC_NAME(msg),
1743              GST_MESSAGE_TYPE(msg),
1744              GST_MESSAGE_TYPE_NAME(msg));
1745          break;
1746      }
1747
1748    gst_message_unref(msg);
1749
1750    return GST_BUS_DROP;
1751 }
1752
1753 Eina_Bool
1754 _emotion_gstreamer_video_pipeline_parse(Emotion_Gstreamer_Video *ev,
1755                                         Eina_Bool force)
1756 {
1757    gboolean res;
1758    int i;
1759
1760    if (ev->pipeline_parsed)
1761      return EINA_TRUE;
1762
1763    if (force && ev->threads)
1764      {
1765         Ecore_Thread *t;
1766
1767         EINA_LIST_FREE(ev->threads, t)
1768           ecore_thread_cancel(t);
1769      }
1770
1771    if (ev->threads)
1772      return EINA_FALSE;
1773
1774    res = gst_element_get_state(ev->pipeline, NULL, NULL, GST_CLOCK_TIME_NONE);
1775    if (res == GST_STATE_CHANGE_NO_PREROLL)
1776      {
1777        gst_element_set_state(ev->pipeline, GST_STATE_PLAYING);
1778
1779        res = gst_element_get_state(ev->pipeline, NULL, NULL, GST_CLOCK_TIME_NONE);
1780      }
1781
1782    /** NOTE: you need to set: GST_DEBUG_DUMP_DOT_DIR=/tmp EMOTION_ENGINE=gstreamer to save the $EMOTION_GSTREAMER_DOT file in '/tmp' */
1783    /** then call dot -Tpng -oemotion_pipeline.png /tmp/$TIMESTAMP-$EMOTION_GSTREAMER_DOT.dot */
1784    if (getenv("EMOTION_GSTREAMER_DOT"))
1785      GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS(GST_BIN(ev->pipeline),
1786                                        GST_DEBUG_GRAPH_SHOW_ALL,
1787                                        getenv("EMOTION_GSTREAMER_DOT"));
1788
1789    if (!(res == GST_STATE_CHANGE_SUCCESS
1790          || res == GST_STATE_CHANGE_NO_PREROLL))
1791      {
1792         ERR("Unable to get GST_CLOCK_TIME_NONE.");
1793         return EINA_FALSE;
1794      }
1795
1796    g_object_get(G_OBJECT(ev->pipeline),
1797                 "n-audio", &ev->audio_stream_nbr,
1798                 "n-video", &ev->video_stream_nbr,
1799                 NULL);
1800
1801    if ((ev->video_stream_nbr == 0) && (ev->audio_stream_nbr == 0))
1802      {
1803         ERR("No audio nor video stream found");
1804         return EINA_FALSE;
1805      }
1806
1807    /* video stream */
1808    for (i = 0; i < ev->video_stream_nbr; i++)
1809      {
1810         Emotion_Video_Stream *vstream;
1811         GstPad       *pad = NULL;
1812         GstCaps      *caps;
1813         GstStructure *structure;
1814         GstQuery     *query;
1815         const GValue *val;
1816         gchar        *str;
1817
1818         gdouble length_time = 0.0;
1819         gint width;
1820         gint height;
1821         gint fps_num;
1822         gint fps_den;
1823         guint32 fourcc = 0;
1824
1825         g_signal_emit_by_name(ev->pipeline, "get-video-pad", i, &pad);
1826         if (!pad)
1827           continue;
1828
1829         caps = gst_pad_get_negotiated_caps(pad);
1830         if (!caps)
1831           goto unref_pad_v;
1832         structure = gst_caps_get_structure(caps, 0);
1833         str = gst_caps_to_string(caps);
1834
1835         if (!gst_structure_get_int(structure, "width", &width))
1836           goto unref_caps_v;
1837         if (!gst_structure_get_int(structure, "height", &height))
1838           goto unref_caps_v;
1839         if (!gst_structure_get_fraction(structure, "framerate", &fps_num, &fps_den))
1840           goto unref_caps_v;
1841
1842         if (g_str_has_prefix(str, "video/x-raw-yuv"))
1843           {
1844              val = gst_structure_get_value(structure, "format");
1845              fourcc = gst_value_get_fourcc(val);
1846           }
1847         else if (g_str_has_prefix(str, "video/x-raw-rgb"))
1848           fourcc = GST_MAKE_FOURCC('A', 'R', 'G', 'B');
1849         else
1850           goto unref_caps_v;
1851
1852         query = gst_query_new_duration(GST_FORMAT_TIME);
1853         if (gst_pad_peer_query(pad, query))
1854           {
1855              gint64 t;
1856
1857              gst_query_parse_duration(query, NULL, &t);
1858              length_time = (double)t / (double)GST_SECOND;
1859           }
1860         else
1861           goto unref_query_v;
1862
1863         vstream = emotion_video_stream_new(ev);
1864         if (!vstream) goto unref_query_v;
1865
1866         vstream->length_time = length_time;
1867         vstream->width = width;
1868         vstream->height = height;
1869         vstream->fps_num = fps_num;
1870         vstream->fps_den = fps_den;
1871         vstream->fourcc = fourcc;
1872         vstream->index = i;
1873
1874      unref_query_v:
1875         gst_query_unref(query);
1876      unref_caps_v:
1877         gst_caps_unref(caps);
1878      unref_pad_v:
1879         gst_object_unref(pad);
1880      }
1881
1882    /* Audio streams */
1883    for (i = 0; i < ev->audio_stream_nbr; i++)
1884      {
1885         Emotion_Audio_Stream *astream;
1886         GstPad       *pad;
1887         GstCaps      *caps;
1888         GstStructure *structure;
1889         GstQuery     *query;
1890
1891         gdouble length_time = 0.0;
1892         gint channels;
1893         gint samplerate;
1894
1895         g_signal_emit_by_name(ev->pipeline, "get-audio-pad", i, &pad);
1896         if (!pad)
1897           continue;
1898
1899         caps = gst_pad_get_negotiated_caps(pad);
1900         if (!caps)
1901           goto unref_pad_a;
1902         structure = gst_caps_get_structure(caps, 0);
1903
1904         if (!gst_structure_get_int(structure, "channels", &channels))
1905           goto unref_caps_a;
1906         if (!gst_structure_get_int(structure, "rate", &samplerate))
1907           goto unref_caps_a;
1908
1909         query = gst_query_new_duration(GST_FORMAT_TIME);
1910         if (gst_pad_peer_query(pad, query))
1911           {
1912              gint64 t;
1913
1914              gst_query_parse_duration(query, NULL, &t);
1915              length_time = (double)t / (double)GST_SECOND;
1916           }
1917         else
1918           goto unref_query_a;
1919
1920         astream = calloc(1, sizeof(Emotion_Audio_Stream));
1921         if (!astream) continue;
1922         ev->audio_streams = eina_list_append(ev->audio_streams, astream);
1923         if (eina_error_get())
1924           {
1925              free(astream);
1926              continue;
1927           }
1928
1929         astream->length_time = length_time;
1930         astream->channels = channels;
1931         astream->samplerate = samplerate;
1932
1933      unref_query_a:
1934         gst_query_unref(query);
1935      unref_caps_a:
1936         gst_caps_unref(caps);
1937      unref_pad_a:
1938         gst_object_unref(pad);
1939      }
1940
1941    /* Visualization sink */
1942    if (ev->video_stream_nbr == 0)
1943      {
1944         GstElement *vis = NULL;
1945         Emotion_Video_Stream *vstream;
1946         Emotion_Audio_Stream *astream;
1947         gint flags;
1948         const char *vis_name;
1949
1950         if (!(vis_name = emotion_visualization_element_name_get(ev->vis)))
1951           {
1952              WRN("pb vis name %d", ev->vis);
1953              goto finalize;
1954           }
1955
1956         astream = eina_list_data_get(ev->audio_streams);
1957
1958         vis = gst_element_factory_make(vis_name, "vissink");
1959         vstream = emotion_video_stream_new(ev);
1960         if (!vstream)
1961           goto finalize;
1962         else
1963           DBG("could not create visualization stream");
1964
1965         vstream->length_time = astream->length_time;
1966         vstream->width = 320;
1967         vstream->height = 200;
1968         vstream->fps_num = 25;
1969         vstream->fps_den = 1;
1970         vstream->fourcc = GST_MAKE_FOURCC('A', 'R', 'G', 'B');
1971
1972         g_object_set(G_OBJECT(ev->pipeline), "vis-plugin", vis, NULL);
1973         g_object_get(G_OBJECT(ev->pipeline), "flags", &flags, NULL);
1974         flags |= 0x00000008;
1975         g_object_set(G_OBJECT(ev->pipeline), "flags", flags, NULL);
1976      }
1977
1978  finalize:
1979
1980    ev->video_stream_nbr = eina_list_count(ev->video_streams);
1981    ev->audio_stream_nbr = eina_list_count(ev->audio_streams);
1982
1983    if (ev->video_stream_nbr == 1)
1984      {
1985        Emotion_Video_Stream *vstream;
1986
1987        vstream = eina_list_data_get(ev->video_streams);
1988        ev->ratio = (double)vstream->width / (double)vstream->height;
1989        _emotion_frame_resize(ev->obj, vstream->width, vstream->height, ev->ratio);
1990      }
1991
1992    {
1993      /* on recapitule : */
1994      Emotion_Video_Stream *vstream;
1995      Emotion_Audio_Stream *astream;
1996
1997      vstream = eina_list_data_get(ev->video_streams);
1998      if (vstream)
1999        {
2000          DBG("video size=%dx%d, fps=%d/%d, "
2001              "fourcc=%"GST_FOURCC_FORMAT", length=%"GST_TIME_FORMAT,
2002              vstream->width, vstream->height, vstream->fps_num, vstream->fps_den,
2003              GST_FOURCC_ARGS(vstream->fourcc),
2004              GST_TIME_ARGS((guint64)(vstream->length_time * GST_SECOND)));
2005        }
2006
2007      astream = eina_list_data_get(ev->audio_streams);
2008      if (astream)
2009        {
2010          DBG("audio channels=%d, rate=%d, length=%"GST_TIME_FORMAT,
2011              astream->channels, astream->samplerate,
2012              GST_TIME_ARGS((guint64)(astream->length_time * GST_SECOND)));
2013        }
2014    }
2015
2016    if (ev->metadata)
2017      _free_metadata(ev->metadata);
2018    ev->metadata = calloc(1, sizeof(Emotion_Gstreamer_Metadata));
2019
2020    ev->pipeline_parsed = EINA_TRUE;
2021
2022    em_audio_channel_volume_set(ev, ev->volume);
2023    em_audio_channel_mute_set(ev, ev->audio_mute);
2024
2025    if (ev->play_started)
2026      {
2027         _emotion_playback_started(ev->obj);
2028         ev->play_started = 0;
2029      }
2030
2031    _emotion_open_done(ev->obj);
2032
2033    return EINA_TRUE;
2034 }