9d6b213563657025e2e51c42114f56d8a0b9d586
[profile/ivi/emotion.git] / src / modules / gstreamer / emotion_gstreamer.c
1 #ifdef HAVE_CONFIG_H
2 # include "config.h"
3 #endif
4
5 #ifdef HAVE_UNISTD_H
6 # include <unistd.h>
7 #endif
8 #include <fcntl.h>
9
10 #include <Eina.h>
11 #include <Evas.h>
12 #include <Ecore.h>
13
14 #define HTTP_STREAM 0
15 #define RTSP_STREAM 1
16 #include <glib.h>
17 #include <gst/gst.h>
18 #include <glib-object.h>
19 #include <gst/video/gstvideosink.h>
20 #include <gst/video/video.h>
21
22 #ifdef HAVE_ECORE_X
23 # include <Ecore_X.h>
24 # include <Ecore_Evas.h>
25 # ifdef HAVE_XOVERLAY_H
26 #  include <gst/interfaces/xoverlay.h>
27 # endif
28 #endif
29
30 #include "Emotion.h"
31 #include "emotion_private.h"
32 #include "emotion_gstreamer.h"
33
34 Eina_Bool window_manager_video = EINA_FALSE;
35 int _emotion_gstreamer_log_domain = -1;
36 Eina_Bool debug_fps = EINA_FALSE;
37 Eina_Bool _ecore_x_available = EINA_FALSE;
38
39 /* Callbacks to get the eos */
40 static void _for_each_tag    (GstTagList const* list, gchar const* tag, void *data);
41 static void _free_metadata   (Emotion_Gstreamer_Metadata *m);
42
43 /* Interface */
44
45 static unsigned char  em_init                     (Evas_Object     *obj,
46                                                    void           **emotion_video,
47                                                    Emotion_Module_Options *opt);
48
49 static unsigned char  em_file_open                (const char     *file,
50                                                    Evas_Object     *obj,
51                                                    void            *video);
52
53 static void           em_file_close               (void            *video);
54
55 static void           em_play                     (void            *video,
56                                                    double           pos);
57
58 static void           em_stop                     (void            *video);
59
60 static void           em_size_get                 (void            *video,
61                                                    int             *width,
62                                                    int             *height);
63
64 static void           em_pos_set                  (void            *video,
65                                                    double           pos);
66
67
68 static double         em_len_get                  (void            *video);
69
70 static double         em_buffer_size_get          (void            *video);
71
72 static int            em_fps_num_get              (void            *video);
73
74 static int            em_fps_den_get              (void            *video);
75
76 static double         em_fps_get                  (void            *video);
77
78 static double         em_pos_get                  (void            *video);
79
80 static void           em_vis_set                  (void            *video,
81                                                    Emotion_Vis      vis);
82
83 static Emotion_Vis    em_vis_get                  (void            *video);
84
85 static Eina_Bool      em_vis_supported            (void            *video,
86                                                    Emotion_Vis      vis);
87
88 static double         em_ratio_get                (void            *video);
89
90 static int            em_video_handled            (void            *video);
91
92 static int            em_audio_handled            (void            *video);
93
94 static int            em_seekable                 (void            *video);
95
96 static void           em_frame_done               (void            *video);
97
98 static Emotion_Format em_format_get               (void            *video);
99
100 static void           em_video_data_size_get      (void            *video,
101                                                    int             *w,
102                                                    int             *h);
103
104 static int            em_yuv_rows_get             (void            *video,
105                                                    int              w,
106                                                    int              h,
107                                                    unsigned char  **yrows,
108                                                    unsigned char  **urows,
109                                                    unsigned char  **vrows);
110
111 static int            em_bgra_data_get            (void            *video,
112                                                    unsigned char  **bgra_data);
113
114 static void           em_event_feed               (void            *video,
115                                                    int              event);
116
117 static void           em_event_mouse_button_feed  (void            *video,
118                                                    int              button,
119                                                    int              x,
120                                                    int              y);
121
122 static void           em_event_mouse_move_feed    (void            *video,
123                                                    int              x,
124                                                    int              y);
125
126 static int            em_video_channel_count      (void             *video);
127
128 static void           em_video_channel_set        (void             *video,
129                                                    int               channel);
130
131 static int            em_video_channel_get        (void             *video);
132
133 static const char    *em_video_channel_name_get   (void             *video,
134                                                    int               channel);
135
136 static void           em_video_channel_mute_set   (void             *video,
137                                                    int               mute);
138
139 static int            em_video_channel_mute_get   (void             *video);
140
141 static int            em_audio_channel_count      (void             *video);
142
143 static void           em_audio_channel_set        (void             *video,
144                                                    int               channel);
145
146 static int            em_audio_channel_get        (void             *video);
147
148 static const char    *em_audio_channel_name_get   (void             *video,
149                                                    int               channel);
150
151 static void           em_audio_channel_mute_set   (void             *video,
152                                                    int               mute);
153
154 static int            em_audio_channel_mute_get   (void             *video);
155
156 static void           em_audio_channel_volume_set (void             *video,
157                                                    double             vol);
158
159 static double         em_audio_channel_volume_get (void             *video);
160
161 static int            em_spu_channel_count        (void             *video);
162
163 static void           em_spu_channel_set          (void             *video,
164                                                    int               channel);
165
166 static int            em_spu_channel_get          (void             *video);
167
168 static const char    *em_spu_channel_name_get     (void             *video,
169                                                    int               channel);
170
171 static void           em_spu_channel_mute_set     (void             *video,
172                                                    int               mute);
173
174 static int            em_spu_channel_mute_get     (void             *video);
175
176 static int            em_chapter_count            (void             *video);
177
178 static void           em_chapter_set              (void             *video,
179                                                    int               chapter);
180
181 static int            em_chapter_get              (void             *video);
182
183 static const char    *em_chapter_name_get         (void             *video,
184                                                    int               chapter);
185
186 static void           em_speed_set                (void             *video,
187                                                    double            speed);
188
189 static double         em_speed_get                (void             *video);
190
191 static int            em_eject                    (void             *video);
192
193 static const char    *em_meta_get                 (void             *video,
194                                                    int               meta);
195
196 static void           em_priority_set             (void             *video,
197                                                    Eina_Bool         pri);
198 static Eina_Bool      em_priority_get             (void             *video);
199
200 static GstBusSyncReply _eos_sync_fct(GstBus *bus,
201                                      GstMessage *message,
202                                      gpointer data);
203
204 /* Module interface */
205
206 static Emotion_Video_Module em_module =
207 {
208    em_init, /* init */
209    em_shutdown, /* shutdown */
210    em_file_open, /* file_open */
211    em_file_close, /* file_close */
212    em_play, /* play */
213    em_stop, /* stop */
214    em_size_get, /* size_get */
215    em_pos_set, /* pos_set */
216    em_len_get, /* len_get */
217    em_buffer_size_get, /* buffer_size_get */
218    em_fps_num_get, /* fps_num_get */
219    em_fps_den_get, /* fps_den_get */
220    em_fps_get, /* fps_get */
221    em_pos_get, /* pos_get */
222    em_vis_set, /* vis_set */
223    em_vis_get, /* vis_get */
224    em_vis_supported, /* vis_supported */
225    em_ratio_get, /* ratio_get */
226    em_video_handled, /* video_handled */
227    em_audio_handled, /* audio_handled */
228    em_seekable, /* seekable */
229    em_frame_done, /* frame_done */
230    em_format_get, /* format_get */
231    em_video_data_size_get, /* video_data_size_get */
232    em_yuv_rows_get, /* yuv_rows_get */
233    em_bgra_data_get, /* bgra_data_get */
234    em_event_feed, /* event_feed */
235    em_event_mouse_button_feed, /* event_mouse_button_feed */
236    em_event_mouse_move_feed, /* event_mouse_move_feed */
237    em_video_channel_count, /* video_channel_count */
238    em_video_channel_set, /* video_channel_set */
239    em_video_channel_get, /* video_channel_get */
240    em_video_channel_name_get, /* video_channel_name_get */
241    em_video_channel_mute_set, /* video_channel_mute_set */
242    em_video_channel_mute_get, /* video_channel_mute_get */
243    em_audio_channel_count, /* audio_channel_count */
244    em_audio_channel_set, /* audio_channel_set */
245    em_audio_channel_get, /* audio_channel_get */
246    em_audio_channel_name_get, /* audio_channel_name_get */
247    em_audio_channel_mute_set, /* audio_channel_mute_set */
248    em_audio_channel_mute_get, /* audio_channel_mute_get */
249    em_audio_channel_volume_set, /* audio_channel_volume_set */
250    em_audio_channel_volume_get, /* audio_channel_volume_get */
251    em_spu_channel_count, /* spu_channel_count */
252    em_spu_channel_set, /* spu_channel_set */
253    em_spu_channel_get, /* spu_channel_get */
254    em_spu_channel_name_get, /* spu_channel_name_get */
255    em_spu_channel_mute_set, /* spu_channel_mute_set */
256    em_spu_channel_mute_get, /* spu_channel_mute_get */
257    em_chapter_count, /* chapter_count */
258    em_chapter_set, /* chapter_set */
259    em_chapter_get, /* chapter_get */
260    em_chapter_name_get, /* chapter_name_get */
261    em_speed_set, /* speed_set */
262    em_speed_get, /* speed_get */
263    em_eject, /* eject */
264    em_meta_get, /* meta_get */
265    em_priority_set, /* priority_set */
266    em_priority_get, /* priority_get */
267    NULL /* handle */
268 };
269
270 static int priority_overide = 0;
271
272 static Emotion_Video_Stream *
273 emotion_video_stream_new(Emotion_Gstreamer_Video *ev)
274 {
275    Emotion_Video_Stream *vstream;
276
277    if (!ev) return NULL;
278
279    vstream = (Emotion_Video_Stream *)calloc(1, sizeof(Emotion_Video_Stream));
280    if (!vstream) return NULL;
281
282    ev->video_streams = eina_list_append(ev->video_streams, vstream);
283    if (eina_error_get())
284      {
285         free(vstream);
286         return NULL;
287      }
288    return vstream;
289 }
290
291 static const char *
292 emotion_visualization_element_name_get(Emotion_Vis visualisation)
293 {
294    switch (visualisation)
295      {
296       case EMOTION_VIS_NONE:
297          return NULL;
298       case EMOTION_VIS_GOOM:
299          return "goom";
300       case EMOTION_VIS_LIBVISUAL_BUMPSCOPE:
301          return "libvisual_bumpscope";
302       case EMOTION_VIS_LIBVISUAL_CORONA:
303          return "libvisual_corona";
304       case EMOTION_VIS_LIBVISUAL_DANCING_PARTICLES:
305          return "libvisual_dancingparticles";
306       case EMOTION_VIS_LIBVISUAL_GDKPIXBUF:
307          return "libvisual_gdkpixbuf";
308       case EMOTION_VIS_LIBVISUAL_G_FORCE:
309          return "libvisual_G-Force";
310       case EMOTION_VIS_LIBVISUAL_GOOM:
311          return "libvisual_goom";
312       case EMOTION_VIS_LIBVISUAL_INFINITE:
313          return "libvisual_infinite";
314       case EMOTION_VIS_LIBVISUAL_JAKDAW:
315          return "libvisual_jakdaw";
316       case EMOTION_VIS_LIBVISUAL_JESS:
317          return "libvisual_jess";
318       case EMOTION_VIS_LIBVISUAL_LV_ANALYSER:
319          return "libvisual_lv_analyzer";
320       case EMOTION_VIS_LIBVISUAL_LV_FLOWER:
321          return "libvisual_lv_flower";
322       case EMOTION_VIS_LIBVISUAL_LV_GLTEST:
323          return "libvisual_lv_gltest";
324       case EMOTION_VIS_LIBVISUAL_LV_SCOPE:
325          return "libvisual_lv_scope";
326       case EMOTION_VIS_LIBVISUAL_MADSPIN:
327          return "libvisual_madspin";
328       case EMOTION_VIS_LIBVISUAL_NEBULUS:
329          return "libvisual_nebulus";
330       case EMOTION_VIS_LIBVISUAL_OINKSIE:
331          return "libvisual_oinksie";
332       case EMOTION_VIS_LIBVISUAL_PLASMA:
333          return "libvisual_plazma";
334       default:
335          return "goom";
336      }
337 }
338
339 static unsigned char
340 em_init(Evas_Object            *obj,
341         void                  **emotion_video,
342         Emotion_Module_Options *opt __UNUSED__)
343 {
344    Emotion_Gstreamer_Video *ev;
345    GError                  *error;
346
347    if (!emotion_video)
348      return 0;
349
350    ev = calloc(1, sizeof(Emotion_Gstreamer_Video));
351    if (!ev) return 0;
352
353    ev->obj = obj;
354
355    /* Initialization of gstreamer */
356    if (!gst_init_check(NULL, NULL, &error))
357      goto failure;
358
359    /* Default values */
360    ev->ratio = 1.0;
361    ev->vis = EMOTION_VIS_NONE;
362    ev->volume = 0.8;
363    ev->play_started = 0;
364    ev->delete_me = EINA_FALSE;
365    ev->threads = NULL;
366
367    *emotion_video = ev;
368
369    return 1;
370
371 failure:
372    free(ev);
373
374    return 0;
375 }
376
377 static void
378 em_cleanup(Emotion_Gstreamer_Video *ev)
379 {
380    Emotion_Audio_Stream *astream;
381    Emotion_Video_Stream *vstream;
382
383    if (ev->send)
384      {
385         emotion_gstreamer_buffer_free(ev->send);
386         ev->send = NULL;
387      }
388
389    if (ev->eos_bus)
390      {
391         gst_object_unref(GST_OBJECT(ev->eos_bus));
392         ev->eos_bus = NULL;
393      }
394
395    if (ev->metadata)
396      {
397         _free_metadata(ev->metadata);
398         ev->metadata = NULL;
399      }
400
401    if (ev->last_buffer)
402      {
403         gst_buffer_unref(ev->last_buffer);
404         ev->last_buffer = NULL;
405      }
406
407    if (!ev->stream)
408      {
409         evas_object_image_video_surface_set(emotion_object_image_get(ev->obj), NULL);
410         ev->stream = EINA_TRUE;
411      }
412
413    if (ev->pipeline)
414      {
415        gstreamer_video_sink_new(ev, ev->obj, NULL);
416
417        g_object_set(G_OBJECT(ev->esink), "ev", NULL, NULL);
418        g_object_set(G_OBJECT(ev->esink), "evas-object", NULL, NULL);
419        gst_element_set_state(ev->pipeline, GST_STATE_NULL);
420        gst_object_unref(ev->pipeline);
421
422        ev->pipeline = NULL;
423        ev->sink = NULL;
424
425        if (ev->teepad) gst_object_unref(ev->teepad);
426        ev->teepad = NULL;
427        if (ev->xvpad) gst_object_unref(ev->xvpad);
428        ev->xvpad = NULL;
429
430 #ifdef HAVE_ECORE_X
431        fprintf(stderr, "destroying window: %i\n", ev->win);
432        if (ev->win) ecore_x_window_free(ev->win);
433        ev->win = 0;
434 #endif
435      }
436
437    EINA_LIST_FREE(ev->audio_streams, astream)
438      free(astream);
439    EINA_LIST_FREE(ev->video_streams, vstream)
440      free(vstream);
441 }
442
443 int
444 em_shutdown(void *video)
445 {
446    Emotion_Gstreamer_Video *ev;
447
448    ev = (Emotion_Gstreamer_Video *)video;
449    if (!ev)
450      return 0;
451
452    if (ev->threads)
453      {
454         Ecore_Thread *t;
455
456         EINA_LIST_FREE(ev->threads, t)
457           ecore_thread_cancel(t);
458
459         ev->delete_me = EINA_TRUE;
460         return EINA_FALSE;
461      }
462
463    if (ev->in != ev->out)
464      {
465         ev->delete_me = EINA_TRUE;
466         return EINA_FALSE;
467      }
468
469    em_cleanup(ev);
470
471    free(ev);
472
473    return 1;
474 }
475
476
477 static unsigned char
478 em_file_open(const char   *file,
479              Evas_Object  *obj,
480              void         *video)
481 {
482    Emotion_Gstreamer_Video *ev;
483    Eina_Strbuf *sbuf = NULL;
484    const char *uri;
485
486    ev = (Emotion_Gstreamer_Video *)video;
487
488    if (!file) return EINA_FALSE;
489    if (strstr(file, "://") == NULL)
490      {
491         sbuf = eina_strbuf_new();
492         eina_strbuf_append(sbuf, "file://");
493         if (strncmp(file, "./", 2) == 0)
494           file += 2;
495         if (strstr(file, ":/") != NULL)
496           { /* We absolutely need file:///C:/ under Windows, so adding it here */
497              eina_strbuf_append(sbuf, "/");
498           }
499         else if (*file != '/')
500           {
501              char tmp[PATH_MAX];
502
503              if (getcwd(tmp, PATH_MAX))
504                {
505                   eina_strbuf_append(sbuf, tmp);
506                   eina_strbuf_append(sbuf, "/");
507                }
508           }
509         eina_strbuf_append(sbuf, file);
510      }
511
512    ev->play_started = 0;
513    ev->pipeline_parsed = 0;
514
515    uri = sbuf ? eina_strbuf_string_get(sbuf) : file;
516    DBG("setting file to '%s'", uri);
517    ev->pipeline = gstreamer_video_sink_new(ev, obj, uri);
518    if (sbuf) eina_strbuf_free(sbuf);
519
520    if (!ev->pipeline)
521      return EINA_FALSE;
522
523    ev->eos_bus = gst_pipeline_get_bus(GST_PIPELINE(ev->pipeline));
524    if (!ev->eos_bus)
525      {
526         ERR("could not get the bus");
527         return EINA_FALSE;
528      }
529
530    gst_bus_set_sync_handler(ev->eos_bus, _eos_sync_fct, ev);
531
532    /* Evas Object */
533    ev->obj = obj;
534
535    ev->position = 0.0;
536
537    return 1;
538 }
539
540 static void
541 em_file_close(void *video)
542 {
543    Emotion_Gstreamer_Video *ev;
544
545    ev = (Emotion_Gstreamer_Video *)video;
546    if (!ev)
547      return;
548
549    if (ev->threads)
550      {
551         Ecore_Thread *t;
552
553         EINA_LIST_FREE(ev->threads, t)
554           ecore_thread_cancel(t);
555      }
556
557    em_cleanup(ev);
558
559    ev->pipeline_parsed = EINA_FALSE;
560    ev->play_started = 0;
561 }
562
563 static void
564 em_play(void   *video,
565         double  pos __UNUSED__)
566 {
567    Emotion_Gstreamer_Video *ev;
568
569    ev = (Emotion_Gstreamer_Video *)video;
570    if (!ev->pipeline) return ;
571
572    if (ev->pipeline_parsed)
573      gst_element_set_state(ev->pipeline, GST_STATE_PLAYING);
574    ev->play = 1;
575    ev->play_started = 1;
576 }
577
578 static void
579 em_stop(void *video)
580 {
581    Emotion_Gstreamer_Video *ev;
582
583    ev = (Emotion_Gstreamer_Video *)video;
584
585    if (!ev->pipeline) return ;
586
587    if (ev->pipeline_parsed)
588      gst_element_set_state(ev->pipeline, GST_STATE_PAUSED);
589    ev->play = 0;
590 }
591
592 static void
593 em_size_get(void  *video,
594             int   *width,
595             int   *height)
596 {
597    Emotion_Gstreamer_Video *ev;
598    Emotion_Video_Stream      *vstream;
599
600    ev = (Emotion_Gstreamer_Video *)video;
601
602    if (!_emotion_gstreamer_video_pipeline_parse(ev, EINA_FALSE))
603      goto on_error;
604
605    vstream = eina_list_nth(ev->video_streams, ev->video_stream_nbr - 1);
606    if (vstream)
607      {
608         if (width) *width = vstream->width;
609         if (height) *height = vstream->height;
610
611         return ;
612      }
613
614  on_error:
615    if (width) *width = 0;
616    if (height) *height = 0;
617 }
618
619 static void
620 em_pos_set(void   *video,
621            double  pos)
622 {
623    Emotion_Gstreamer_Video *ev;
624    gboolean res;
625
626    ev = (Emotion_Gstreamer_Video *)video;
627
628    if (!ev->pipeline) return ;
629
630    if (ev->play)
631      res = gst_element_set_state(ev->pipeline, GST_STATE_PAUSED);
632
633    res = gst_element_seek(ev->pipeline, 1.0,
634                           GST_FORMAT_TIME,
635                           GST_SEEK_FLAG_ACCURATE | GST_SEEK_FLAG_FLUSH,
636                           GST_SEEK_TYPE_SET,
637                           (gint64)(pos * (double)GST_SECOND),
638                           GST_SEEK_TYPE_NONE, -1);
639
640    if (ev->play)
641      res = gst_element_set_state(ev->pipeline, GST_STATE_PLAYING);
642 }
643
644 static double
645 em_len_get(void *video)
646 {
647    Emotion_Gstreamer_Video *ev;
648    Emotion_Video_Stream *vstream;
649    Emotion_Audio_Stream *astream;
650    Eina_List *l;
651    GstFormat fmt;
652    gint64 val;
653    gboolean ret;
654
655    ev = video;
656    fmt = GST_FORMAT_TIME;
657
658    if (!ev->pipeline) return 0.0;
659
660    ret = gst_element_query_duration(ev->pipeline, &fmt, &val);
661    if (!ret)
662      goto fallback;
663
664    if (fmt != GST_FORMAT_TIME)
665      {
666         DBG("requrested duration in time, but got %s instead.",
667             gst_format_get_name(fmt));
668         goto fallback;
669      }
670
671    if (val <= 0.0)
672      goto fallback;
673
674    return val / 1000000000.0;
675
676  fallback:
677    if (!_emotion_gstreamer_video_pipeline_parse(ev, EINA_FALSE))
678      return 0.0;
679
680    EINA_LIST_FOREACH(ev->audio_streams, l, astream)
681      if (astream->length_time >= 0)
682        return astream->length_time;
683
684    EINA_LIST_FOREACH(ev->video_streams, l, vstream)
685      if (vstream->length_time >= 0)
686        return vstream->length_time;
687
688    return 0.0;
689 }
690
691 static double
692 em_buffer_size_get(void *video)
693 {
694    Emotion_Gstreamer_Video *ev;
695
696    GstQuery *query;
697    gboolean busy;
698    gint percent;
699
700    ev = video;
701
702    if (!ev->pipeline) return 0.0;
703
704    query = gst_query_new_buffering(GST_FORMAT_DEFAULT);
705    if (gst_element_query(ev->pipeline, query))
706      gst_query_parse_buffering_percent(query, &busy, &percent);
707    else
708      percent = 100;
709
710    gst_query_unref(query);
711    return ((float)(percent)) / 100.0;
712 }
713
714 static int
715 em_fps_num_get(void *video)
716 {
717    Emotion_Gstreamer_Video *ev;
718    Emotion_Video_Stream      *vstream;
719
720    ev = (Emotion_Gstreamer_Video *)video;
721
722    if (!_emotion_gstreamer_video_pipeline_parse(ev, EINA_FALSE))
723      return 0;
724
725    vstream = eina_list_nth(ev->video_streams, ev->video_stream_nbr - 1);
726    if (vstream)
727      return vstream->fps_num;
728
729    return 0;
730 }
731
732 static int
733 em_fps_den_get(void *video)
734 {
735    Emotion_Gstreamer_Video *ev;
736    Emotion_Video_Stream      *vstream;
737
738    ev = (Emotion_Gstreamer_Video *)video;
739
740    if (!_emotion_gstreamer_video_pipeline_parse(ev, EINA_FALSE))
741      return 1;
742
743    vstream = eina_list_nth(ev->video_streams, ev->video_stream_nbr - 1);
744    if (vstream)
745      return vstream->fps_den;
746
747    return 1;
748 }
749
750 static double
751 em_fps_get(void *video)
752 {
753    Emotion_Gstreamer_Video *ev;
754    Emotion_Video_Stream      *vstream;
755
756    ev = (Emotion_Gstreamer_Video *)video;
757
758    if (!_emotion_gstreamer_video_pipeline_parse(ev, EINA_FALSE))
759      return 0.0;
760
761    vstream = eina_list_nth(ev->video_streams, ev->video_stream_nbr - 1);
762    if (vstream)
763      return (double)vstream->fps_num / (double)vstream->fps_den;
764
765    return 0.0;
766 }
767
768 static double
769 em_pos_get(void *video)
770 {
771    Emotion_Gstreamer_Video *ev;
772    GstFormat fmt;
773    gint64 val;
774    gboolean ret;
775
776    ev = video;
777    fmt = GST_FORMAT_TIME;
778
779    if (!ev->pipeline) return 0.0;
780
781    ret = gst_element_query_position(ev->pipeline, &fmt, &val);
782    if (!ret)
783      return ev->position;
784
785    if (fmt != GST_FORMAT_TIME)
786      {
787         ERR("requrested position in time, but got %s instead.",
788             gst_format_get_name(fmt));
789         return ev->position;
790      }
791
792    ev->position = val / 1000000000.0;
793    return ev->position;
794 }
795
796 static void
797 em_vis_set(void *video,
798            Emotion_Vis vis)
799 {
800    Emotion_Gstreamer_Video *ev;
801
802    ev = (Emotion_Gstreamer_Video *)video;
803
804    ev->vis = vis;
805 }
806
807 static Emotion_Vis
808 em_vis_get(void *video)
809 {
810    Emotion_Gstreamer_Video *ev;
811
812    ev = (Emotion_Gstreamer_Video *)video;
813
814    return ev->vis;
815 }
816
817 static Eina_Bool
818 em_vis_supported(void *ef __UNUSED__, Emotion_Vis vis)
819 {
820    const char *name;
821    GstElementFactory *factory;
822
823    if (vis == EMOTION_VIS_NONE)
824      return EINA_TRUE;
825
826    name = emotion_visualization_element_name_get(vis);
827    if (!name)
828      return EINA_FALSE;
829
830    factory = gst_element_factory_find(name);
831    if (!factory)
832      return EINA_FALSE;
833
834    gst_object_unref(factory);
835    return EINA_TRUE;
836 }
837
838 static double
839 em_ratio_get(void *video)
840 {
841    Emotion_Gstreamer_Video *ev;
842
843    ev = (Emotion_Gstreamer_Video *)video;
844
845    return ev->ratio;
846 }
847
848 static int
849 em_video_handled(void *video)
850 {
851    Emotion_Gstreamer_Video *ev;
852
853    ev = (Emotion_Gstreamer_Video *)video;
854
855    _emotion_gstreamer_video_pipeline_parse(ev, EINA_FALSE);
856
857    if (!eina_list_count(ev->video_streams))
858      return 0;
859
860    return 1;
861 }
862
863 static int
864 em_audio_handled(void *video)
865 {
866    Emotion_Gstreamer_Video *ev;
867
868    ev = (Emotion_Gstreamer_Video *)video;
869
870    _emotion_gstreamer_video_pipeline_parse(ev, EINA_FALSE);
871
872    if (!eina_list_count(ev->audio_streams))
873      return 0;
874
875    return 1;
876 }
877
878 static int
879 em_seekable(void *video __UNUSED__)
880 {
881    return 1;
882 }
883
884 static void
885 em_frame_done(void *video __UNUSED__)
886 {
887 }
888
889 static Emotion_Format
890 em_format_get(void *video)
891 {
892    Emotion_Gstreamer_Video *ev;
893    Emotion_Video_Stream    *vstream;
894
895    ev = (Emotion_Gstreamer_Video *)video;
896
897    if (!_emotion_gstreamer_video_pipeline_parse(ev, EINA_FALSE))
898      return EMOTION_FORMAT_NONE;
899
900    vstream = eina_list_nth(ev->video_streams, ev->video_stream_nbr - 1);
901    if (vstream)
902      {
903         switch (vstream->fourcc)
904           {
905            case GST_MAKE_FOURCC('I', '4', '2', '0'):
906               return EMOTION_FORMAT_I420;
907            case GST_MAKE_FOURCC('Y', 'V', '1', '2'):
908               return EMOTION_FORMAT_YV12;
909            case GST_MAKE_FOURCC('Y', 'U', 'Y', '2'):
910               return EMOTION_FORMAT_YUY2;
911            case GST_MAKE_FOURCC('A', 'R', 'G', 'B'):
912               return EMOTION_FORMAT_BGRA;
913            default:
914               return EMOTION_FORMAT_NONE;
915           }
916      }
917    return EMOTION_FORMAT_NONE;
918 }
919
920 static void
921 em_video_data_size_get(void *video, int *w, int *h)
922 {
923    Emotion_Gstreamer_Video *ev;
924    Emotion_Video_Stream    *vstream;
925
926    ev = (Emotion_Gstreamer_Video *)video;
927
928    if (ev->pipeline && (!ev->video_stream_nbr || !ev->video_streams))
929      if (!_emotion_gstreamer_video_pipeline_parse(ev, EINA_FALSE))
930        goto on_error;
931
932    vstream = eina_list_nth(ev->video_streams, ev->video_stream_nbr - 1);
933    if (vstream)
934      {
935         *w = vstream->width;
936         *h = vstream->height;
937
938         return ;
939      }
940
941  on_error:
942    *w = 0;
943    *h = 0;
944 }
945
946 static int
947 em_yuv_rows_get(void           *video __UNUSED__,
948                 int             w __UNUSED__,
949                 int             h __UNUSED__,
950                 unsigned char **yrows __UNUSED__,
951                 unsigned char **urows __UNUSED__,
952                 unsigned char **vrows __UNUSED__)
953 {
954    return 0;
955 }
956
957 static int
958 em_bgra_data_get(void *video __UNUSED__, unsigned char **bgra_data __UNUSED__)
959 {
960    return 0;
961 }
962
963 static void
964 em_event_feed(void *video __UNUSED__, int event __UNUSED__)
965 {
966 }
967
968 static void
969 em_event_mouse_button_feed(void *video __UNUSED__, int button __UNUSED__, int x __UNUSED__, int y __UNUSED__)
970 {
971 }
972
973 static void
974 em_event_mouse_move_feed(void *video __UNUSED__, int x __UNUSED__, int y __UNUSED__)
975 {
976 }
977
978 /* Video channels */
979 static int
980 em_video_channel_count(void *video)
981 {
982    Emotion_Gstreamer_Video *ev;
983
984    ev = (Emotion_Gstreamer_Video *)video;
985
986    _emotion_gstreamer_video_pipeline_parse(ev, EINA_FALSE);
987
988    return eina_list_count(ev->video_streams);
989 }
990
991 static void
992 em_video_channel_set(void *video __UNUSED__,
993                      int   channel __UNUSED__)
994 {
995 #if 0
996    Emotion_Gstreamer_Video *ev;
997
998    ev = (Emotion_Gstreamer_Video *)video;
999
1000    if (channel < 0) channel = 0;
1001 #endif
1002    /* FIXME: a faire... */
1003 }
1004
1005 static int
1006 em_video_channel_get(void *video)
1007 {
1008    Emotion_Gstreamer_Video *ev;
1009
1010    ev = (Emotion_Gstreamer_Video *)video;
1011
1012    _emotion_gstreamer_video_pipeline_parse(ev, EINA_FALSE);
1013
1014    return ev->video_stream_nbr;
1015 }
1016
1017 static const char *
1018 em_video_channel_name_get(void *video __UNUSED__,
1019                           int   channel __UNUSED__)
1020 {
1021    return NULL;
1022 }
1023
1024 static void
1025 em_video_channel_mute_set(void *video,
1026                           int   mute)
1027 {
1028    Emotion_Gstreamer_Video *ev;
1029
1030    ev = (Emotion_Gstreamer_Video *)video;
1031
1032    ev->video_mute = mute;
1033 }
1034
1035 static int
1036 em_video_channel_mute_get(void *video)
1037 {
1038    Emotion_Gstreamer_Video *ev;
1039
1040    ev = (Emotion_Gstreamer_Video *)video;
1041
1042    return ev->video_mute;
1043 }
1044
1045 /* Audio channels */
1046
1047 static int
1048 em_audio_channel_count(void *video)
1049 {
1050    Emotion_Gstreamer_Video *ev;
1051
1052    ev = (Emotion_Gstreamer_Video *)video;
1053
1054    _emotion_gstreamer_video_pipeline_parse(ev, EINA_FALSE);
1055
1056    return eina_list_count(ev->audio_streams);
1057 }
1058
1059 static void
1060 em_audio_channel_set(void *video __UNUSED__,
1061                      int   channel __UNUSED__)
1062 {
1063 #if 0
1064    Emotion_Gstreamer_Video *ev;
1065
1066    ev = (Emotion_Gstreamer_Video *)video;
1067
1068    if (channel < -1) channel = -1;
1069 #endif
1070    /* FIXME: a faire... */
1071 }
1072
1073 static int
1074 em_audio_channel_get(void *video)
1075 {
1076    Emotion_Gstreamer_Video *ev;
1077
1078    ev = (Emotion_Gstreamer_Video *)video;
1079
1080    _emotion_gstreamer_video_pipeline_parse(ev, EINA_FALSE);
1081
1082    return ev->audio_stream_nbr;
1083 }
1084
1085 static const char *
1086 em_audio_channel_name_get(void *video __UNUSED__,
1087                           int   channel __UNUSED__)
1088 {
1089    return NULL;
1090 }
1091
1092 #define GST_PLAY_FLAG_AUDIO (1 << 1)
1093
1094 static void
1095 em_audio_channel_mute_set(void *video,
1096                           int   mute)
1097 {
1098    /* NOTE: at first I wanted to completly shutdown the audio path on mute,
1099       but that's not possible as the audio sink could be the clock source
1100       for the pipeline (at least that's the case on some of the hardware
1101       I have been tested emotion on.
1102     */
1103    Emotion_Gstreamer_Video *ev;
1104
1105    ev = (Emotion_Gstreamer_Video *)video;
1106
1107    if (!ev->pipeline) return ;
1108
1109    ev->audio_mute = mute;
1110
1111    g_object_set(G_OBJECT(ev->pipeline), "mute", !!mute, NULL);
1112 }
1113
1114 static int
1115 em_audio_channel_mute_get(void *video)
1116 {
1117    Emotion_Gstreamer_Video *ev;
1118
1119    ev = (Emotion_Gstreamer_Video *)video;
1120
1121    return ev->audio_mute;
1122 }
1123
1124 static void
1125 em_audio_channel_volume_set(void  *video,
1126                             double vol)
1127 {
1128    Emotion_Gstreamer_Video *ev;
1129
1130    ev = (Emotion_Gstreamer_Video *)video;
1131
1132    if (!ev->pipeline) return ;
1133
1134    if (vol < 0.0)
1135      vol = 0.0;
1136    if (vol > 1.0)
1137      vol = 1.0;
1138    ev->volume = vol;
1139    g_object_set(G_OBJECT(ev->pipeline), "volume", vol, NULL);
1140 }
1141
1142 static double
1143 em_audio_channel_volume_get(void *video)
1144 {
1145    Emotion_Gstreamer_Video *ev;
1146
1147    ev = (Emotion_Gstreamer_Video *)video;
1148
1149    return ev->volume;
1150 }
1151
1152 /* spu stuff */
1153
1154 static int
1155 em_spu_channel_count(void *video __UNUSED__)
1156 {
1157    return 0;
1158 }
1159
1160 static void
1161 em_spu_channel_set(void *video __UNUSED__, int channel __UNUSED__)
1162 {
1163 }
1164
1165 static int
1166 em_spu_channel_get(void *video __UNUSED__)
1167 {
1168    return 1;
1169 }
1170
1171 static const char *
1172 em_spu_channel_name_get(void *video __UNUSED__, int channel __UNUSED__)
1173 {
1174    return NULL;
1175 }
1176
1177 static void
1178 em_spu_channel_mute_set(void *video __UNUSED__, int mute __UNUSED__)
1179 {
1180 }
1181
1182 static int
1183 em_spu_channel_mute_get(void *video __UNUSED__)
1184 {
1185    return 0;
1186 }
1187
1188 static int
1189 em_chapter_count(void *video __UNUSED__)
1190 {
1191    return 0;
1192 }
1193
1194 static void
1195 em_chapter_set(void *video __UNUSED__, int chapter __UNUSED__)
1196 {
1197 }
1198
1199 static int
1200 em_chapter_get(void *video __UNUSED__)
1201 {
1202    return 0;
1203 }
1204
1205 static const char *
1206 em_chapter_name_get(void *video __UNUSED__, int chapter __UNUSED__)
1207 {
1208    return NULL;
1209 }
1210
1211 static void
1212 em_speed_set(void *video __UNUSED__, double speed __UNUSED__)
1213 {
1214 }
1215
1216 static double
1217 em_speed_get(void *video __UNUSED__)
1218 {
1219    return 1.0;
1220 }
1221
1222 static int
1223 em_eject(void *video __UNUSED__)
1224 {
1225    return 1;
1226 }
1227
1228 static const char *
1229 em_meta_get(void *video, int meta)
1230 {
1231    Emotion_Gstreamer_Video *ev;
1232    const char *str = NULL;
1233
1234    ev = (Emotion_Gstreamer_Video *)video;
1235
1236    if (!ev || !ev->metadata) return NULL;
1237    switch (meta)
1238      {
1239       case META_TRACK_TITLE:
1240          str = ev->metadata->title;
1241          break;
1242       case META_TRACK_ARTIST:
1243          str = ev->metadata->artist;
1244          break;
1245       case  META_TRACK_ALBUM:
1246          str = ev->metadata->album;
1247          break;
1248       case META_TRACK_YEAR:
1249          str = ev->metadata->year;
1250          break;
1251       case META_TRACK_GENRE:
1252          str = ev->metadata->genre;
1253          break;
1254       case META_TRACK_COMMENT:
1255          str = ev->metadata->comment;
1256          break;
1257       case META_TRACK_DISCID:
1258          str = ev->metadata->disc_id;
1259          break;
1260       default:
1261          break;
1262      }
1263
1264    return str;
1265 }
1266
1267 static void
1268 em_priority_set(void *video, Eina_Bool pri)
1269 {
1270    Emotion_Gstreamer_Video *ev;
1271
1272    ev = video;
1273    if (priority_overide > 3) return ; /* If we failed to much to create that pipeline, let's don't wast our time anymore */
1274    ev->priority = pri;
1275 }
1276
1277 static Eina_Bool
1278 em_priority_get(void *video)
1279 {
1280    Emotion_Gstreamer_Video *ev;
1281
1282    ev = video;
1283    return ev->stream;
1284 }
1285
1286 #ifdef HAVE_ECORE_X
1287 static Eina_Bool
1288 _ecore_event_x_destroy(void *data __UNUSED__, int type __UNUSED__, void *event __UNUSED__)
1289 {
1290    Ecore_X_Event_Window_Destroy *ev = event;
1291
1292    fprintf(stderr, "killed window: %x (%x)\n", ev->win, ev->event_win);
1293
1294    return EINA_TRUE;
1295 }
1296 #endif
1297
1298 static Eina_Bool
1299 module_open(Evas_Object           *obj,
1300             const Emotion_Video_Module **module,
1301             void                 **video,
1302             Emotion_Module_Options *opt)
1303 {
1304 #ifdef HAVE_ECORE_X
1305    Ecore_X_Window *roots;
1306    int num;
1307 #endif
1308
1309    if (!module)
1310      return EINA_FALSE;
1311
1312    if (_emotion_gstreamer_log_domain < 0)
1313      {
1314         eina_threads_init();
1315         eina_log_threads_enable();
1316         _emotion_gstreamer_log_domain = eina_log_domain_register
1317           ("emotion-gstreamer", EINA_COLOR_LIGHTCYAN);
1318         if (_emotion_gstreamer_log_domain < 0)
1319           {
1320              EINA_LOG_CRIT("Could not register log domain 'emotion-gstreamer'");
1321              return EINA_FALSE;
1322           }
1323      }
1324
1325    if (!em_module.init(obj, video, opt))
1326      return EINA_FALSE;
1327
1328 #ifdef HAVE_ECORE_X
1329    ecore_event_handler_add(ECORE_X_EVENT_WINDOW_DESTROY, _ecore_event_x_destroy, NULL);
1330 #endif
1331
1332    if (getenv("EMOTION_FPS_DEBUG")) debug_fps = EINA_TRUE;
1333
1334    eina_threads_init();
1335
1336 #ifdef HAVE_ECORE_X
1337    if (ecore_x_init(NULL) > 0)
1338      {
1339         _ecore_x_available = EINA_TRUE;
1340      }
1341
1342    /* Check if the window manager is able to handle our special Xv window. */
1343    roots = _ecore_x_available ? ecore_x_window_root_list(&num) : NULL;
1344    if (roots && num > 0)
1345      {
1346         Ecore_X_Window  win, twin;
1347         int nwins;
1348
1349         nwins = ecore_x_window_prop_window_get(roots[0],
1350                                                ECORE_X_ATOM_NET_SUPPORTING_WM_CHECK,
1351                                                &win, 1);
1352         if (nwins > 0)
1353           {
1354              nwins = ecore_x_window_prop_window_get(win,
1355                                                     ECORE_X_ATOM_NET_SUPPORTING_WM_CHECK,
1356                                                     &twin, 1);
1357              if (nwins > 0 && twin == win)
1358                {
1359                   Ecore_X_Atom *supported;
1360                   int supported_num;
1361                   int i;
1362
1363                   if (ecore_x_netwm_supported_get(roots[0], &supported, &supported_num))
1364                     {
1365                        Eina_Bool parent = EINA_FALSE;
1366                        Eina_Bool video_position = EINA_FALSE;
1367
1368                        for (i = 0; i < supported_num; ++i)
1369                          {
1370                             if (supported[i] == ECORE_X_ATOM_E_VIDEO_PARENT)
1371                               parent = EINA_TRUE;
1372                             else if (supported[i] == ECORE_X_ATOM_E_VIDEO_POSITION)
1373                               video_position = EINA_TRUE;
1374                             if (parent && video_position)
1375                               break;
1376                          }
1377
1378                        if (parent && video_position)
1379                          {
1380                             window_manager_video = EINA_TRUE;
1381                          }
1382                     }
1383                }
1384           }
1385      }
1386    free(roots);
1387 #endif
1388
1389    *module = &em_module;
1390    return EINA_TRUE;
1391 }
1392
1393 static void
1394 module_close(Emotion_Video_Module *module __UNUSED__,
1395              void                 *video)
1396 {
1397    em_module.shutdown(video);
1398
1399 #ifdef HAVE_ECORE_X
1400    if (_ecore_x_available)
1401      {
1402         ecore_x_shutdown();
1403      }
1404 #endif
1405
1406    eina_threads_shutdown();
1407 }
1408
1409 Eina_Bool
1410 gstreamer_module_init(void)
1411 {
1412    GError *error;
1413
1414    if (!gst_init_check(0, NULL, &error))
1415      {
1416         EINA_LOG_CRIT("Could not init GStreamer");
1417         return EINA_FALSE;
1418      }
1419
1420    if (gst_plugin_register_static(GST_VERSION_MAJOR, GST_VERSION_MINOR,
1421                                   "emotion-sink",
1422                                   "video sink plugin for Emotion",
1423                                   gstreamer_plugin_init,
1424                                   VERSION,
1425                                   "LGPL",
1426                                   "Enlightenment",
1427                                   PACKAGE,
1428                                   "http://www.enlightenment.org/") == FALSE)
1429      {
1430         EINA_LOG_CRIT("Could not load static gstreamer video sink for Emotion.");
1431         return EINA_FALSE;
1432      }
1433
1434    return _emotion_module_register("gstreamer", module_open, module_close);
1435 }
1436
1437 void
1438 gstreamer_module_shutdown(void)
1439 {
1440    _emotion_module_unregister("gstreamer");
1441
1442    gst_deinit();
1443 }
1444
1445 #ifndef EMOTION_STATIC_BUILD_GSTREAMER
1446
1447 EINA_MODULE_INIT(gstreamer_module_init);
1448 EINA_MODULE_SHUTDOWN(gstreamer_module_shutdown);
1449
1450 #endif
1451
1452 static void
1453 _for_each_tag(GstTagList const* list,
1454                     gchar const* tag,
1455                     void *data)
1456 {
1457    Emotion_Gstreamer_Video *ev;
1458    int i;
1459    int count;
1460
1461
1462    ev = (Emotion_Gstreamer_Video*)data;
1463
1464    if (!ev || !ev->metadata) return;
1465
1466    count = gst_tag_list_get_tag_size(list, tag);
1467
1468    for (i = 0; i < count; i++)
1469      {
1470         if (!strcmp(tag, GST_TAG_TITLE))
1471           {
1472              char *str;
1473              g_free(ev->metadata->title);
1474              if (gst_tag_list_get_string(list, GST_TAG_TITLE, &str))
1475                ev->metadata->title = str;
1476              else
1477                ev->metadata->title = NULL;
1478              break;
1479           }
1480         if (!strcmp(tag, GST_TAG_ALBUM))
1481           {
1482              gchar *str;
1483              g_free(ev->metadata->album);
1484              if (gst_tag_list_get_string(list, GST_TAG_ALBUM, &str))
1485                ev->metadata->album = str;
1486              else
1487                ev->metadata->album = NULL;
1488              break;
1489           }
1490         if (!strcmp(tag, GST_TAG_ARTIST))
1491           {
1492              gchar *str;
1493              g_free(ev->metadata->artist);
1494              if (gst_tag_list_get_string(list, GST_TAG_ARTIST, &str))
1495                ev->metadata->artist = str;
1496              else
1497                ev->metadata->artist = NULL;
1498              break;
1499           }
1500         if (!strcmp(tag, GST_TAG_GENRE))
1501           {
1502              gchar *str;
1503              g_free(ev->metadata->genre);
1504              if (gst_tag_list_get_string(list, GST_TAG_GENRE, &str))
1505                ev->metadata->genre = str;
1506              else
1507                ev->metadata->genre = NULL;
1508              break;
1509           }
1510         if (!strcmp(tag, GST_TAG_COMMENT))
1511           {
1512              gchar *str;
1513              g_free(ev->metadata->comment);
1514              if (gst_tag_list_get_string(list, GST_TAG_COMMENT, &str))
1515                ev->metadata->comment = str;
1516              else
1517                ev->metadata->comment = NULL;
1518              break;
1519           }
1520         if (!strcmp(tag, GST_TAG_DATE))
1521           {
1522              gchar *str;
1523              const GValue *date;
1524              g_free(ev->metadata->year);
1525              date = gst_tag_list_get_value_index(list, GST_TAG_DATE, 0);
1526              if (date)
1527                str = g_strdup_value_contents(date);
1528              else
1529                str = NULL;
1530              ev->metadata->year = str;
1531              break;
1532           }
1533
1534         if (!strcmp(tag, GST_TAG_TRACK_NUMBER))
1535           {
1536              gchar *str;
1537              const GValue *track;
1538              g_free(ev->metadata->count);
1539              track = gst_tag_list_get_value_index(list, GST_TAG_TRACK_NUMBER, 0);
1540              if (track)
1541                str = g_strdup_value_contents(track);
1542              else
1543                str = NULL;
1544              ev->metadata->count = str;
1545              break;
1546           }
1547
1548 #ifdef GST_TAG_CDDA_CDDB_DISCID
1549         if (!strcmp(tag, GST_TAG_CDDA_CDDB_DISCID))
1550           {
1551              gchar *str;
1552              const GValue *discid;
1553              g_free(ev->metadata->disc_id);
1554              discid = gst_tag_list_get_value_index(list, GST_TAG_CDDA_CDDB_DISCID, 0);
1555              if (discid)
1556                str = g_strdup_value_contents(discid);
1557              else
1558                str = NULL;
1559              ev->metadata->disc_id = str;
1560              break;
1561           }
1562 #endif
1563      }
1564
1565 }
1566
1567 static void
1568 _free_metadata(Emotion_Gstreamer_Metadata *m)
1569 {
1570   if (!m) return;
1571
1572   g_free(m->title);
1573   g_free(m->album);
1574   g_free(m->artist);
1575   g_free(m->genre);
1576   g_free(m->comment);
1577   g_free(m->year);
1578   g_free(m->count);
1579   g_free(m->disc_id);
1580
1581   free(m);
1582 }
1583
1584 static Eina_Bool
1585 _em_restart_stream(void *data)
1586 {
1587    Emotion_Gstreamer_Video *ev;
1588
1589    ev = data;
1590
1591    ev->pipeline = gstreamer_video_sink_new(ev, ev->obj, ev->uri);
1592
1593    if (ev->pipeline)
1594      {
1595         ev->eos_bus = gst_pipeline_get_bus(GST_PIPELINE(ev->pipeline));
1596         if (!ev->eos_bus)
1597           {
1598              ERR("could not get the bus");
1599              return EINA_FALSE;
1600           }
1601
1602         gst_bus_set_sync_handler(ev->eos_bus, _eos_sync_fct, ev);
1603      }
1604
1605    return ECORE_CALLBACK_CANCEL;
1606 }
1607
1608 static void
1609 _eos_main_fct(void *data)
1610 {
1611    Emotion_Gstreamer_Message *send;
1612    Emotion_Gstreamer_Video *ev;
1613    GstMessage              *msg;
1614
1615    send = data;
1616    ev = send->ev;
1617    msg = send->msg;
1618
1619    if (ev->play_started && !ev->delete_me)
1620      {
1621         _emotion_playback_started(ev->obj);
1622         ev->play_started = 0;
1623      }
1624
1625    switch (GST_MESSAGE_TYPE(msg))
1626      {
1627       case GST_MESSAGE_EOS:
1628          if (!ev->delete_me)
1629            {
1630               ev->play = 0;
1631               _emotion_decode_stop(ev->obj);
1632               _emotion_playback_finished(ev->obj);
1633            }
1634          break;
1635       case GST_MESSAGE_TAG:
1636          if (!ev->delete_me)
1637            {
1638               GstTagList *new_tags;
1639               gst_message_parse_tag(msg, &new_tags);
1640               if (new_tags)
1641                 {
1642                    gst_tag_list_foreach(new_tags,
1643                                         (GstTagForeachFunc)_for_each_tag,
1644                                         ev);
1645                    gst_tag_list_free(new_tags);
1646                 }
1647            }
1648          break;
1649       case GST_MESSAGE_ASYNC_DONE:
1650          if (!ev->delete_me) _emotion_seek_done(ev->obj);
1651          break;
1652       case GST_MESSAGE_STREAM_STATUS:
1653          break;
1654       case GST_MESSAGE_ERROR:
1655          em_cleanup(ev);
1656
1657          if (ev->priority)
1658            {
1659              ERR("Switching back to canvas rendering.");
1660              ev->priority = EINA_FALSE;
1661              priority_overide++;
1662
1663              ecore_idler_add(_em_restart_stream, ev);
1664            }
1665          break;
1666       default:
1667          ERR("bus say: %s [%i - %s]",
1668              GST_MESSAGE_SRC_NAME(msg),
1669              GST_MESSAGE_TYPE(msg),
1670              GST_MESSAGE_TYPE_NAME(msg));
1671          break;
1672      }
1673
1674    emotion_gstreamer_message_free(send);
1675 }
1676
1677 static GstBusSyncReply
1678 _eos_sync_fct(GstBus *bus __UNUSED__, GstMessage *msg, gpointer data)
1679 {
1680    Emotion_Gstreamer_Video *ev = data;
1681    Emotion_Gstreamer_Message *send;
1682
1683    switch (GST_MESSAGE_TYPE(msg))
1684      {
1685       case GST_MESSAGE_EOS:
1686       case GST_MESSAGE_TAG:
1687       case GST_MESSAGE_ASYNC_DONE:
1688       case GST_MESSAGE_STREAM_STATUS:
1689          INF("bus say: %s [%i - %s]",
1690              GST_MESSAGE_SRC_NAME(msg),
1691              GST_MESSAGE_TYPE(msg),
1692              GST_MESSAGE_TYPE_NAME(msg));
1693          send = emotion_gstreamer_message_alloc(ev, msg);
1694
1695          if (send) ecore_main_loop_thread_safe_call_async(_eos_main_fct, send);
1696
1697          break;
1698
1699       case GST_MESSAGE_STATE_CHANGED:
1700         {
1701            GstState old_state, new_state;
1702
1703            gst_message_parse_state_changed(msg, &old_state, &new_state, NULL);
1704            INF("Element %s changed state from %s to %s.",
1705                GST_OBJECT_NAME(msg->src),
1706                gst_element_state_get_name(old_state),
1707                gst_element_state_get_name(new_state));
1708            break;
1709         }
1710       case GST_MESSAGE_ERROR:
1711         {
1712            GError *error;
1713            gchar *debug;
1714
1715            gst_message_parse_error(msg, &error, &debug);
1716            ERR("ERROR from element %s: %s", GST_OBJECT_NAME(msg->src), error->message);
1717            ERR("Debugging info: %s", (debug) ? debug : "none");
1718            g_error_free(error);
1719            g_free(debug);
1720
1721            if (strncmp(GST_OBJECT_NAME(msg->src), "xvimagesink", 11) == 0)
1722              {
1723                 send = emotion_gstreamer_message_alloc(ev, msg);
1724
1725                 if (send) ecore_main_loop_thread_safe_call_async(_eos_main_fct, send);
1726              }
1727            break;
1728         }
1729       case GST_MESSAGE_WARNING:
1730         {
1731            GError *error;
1732            gchar *debug;
1733
1734            gst_message_parse_warning(msg, &error, &debug);
1735            WRN("WARNING from element %s: %s", GST_OBJECT_NAME(msg->src), error->message);
1736            WRN("Debugging info: %s", (debug) ? debug : "none");
1737            g_error_free(error);
1738            g_free(debug);
1739            break;
1740         }
1741       default:
1742          WRN("bus say: %s [%i - %s]",
1743              GST_MESSAGE_SRC_NAME(msg),
1744              GST_MESSAGE_TYPE(msg),
1745              GST_MESSAGE_TYPE_NAME(msg));
1746          break;
1747      }
1748
1749    gst_message_unref(msg);
1750
1751    return GST_BUS_DROP;
1752 }
1753
1754 Eina_Bool
1755 _emotion_gstreamer_video_pipeline_parse(Emotion_Gstreamer_Video *ev,
1756                                         Eina_Bool force)
1757 {
1758    gboolean res;
1759    int i;
1760
1761    if (ev->pipeline_parsed)
1762      return EINA_TRUE;
1763
1764    if (force && ev->threads)
1765      {
1766         Ecore_Thread *t;
1767
1768         EINA_LIST_FREE(ev->threads, t)
1769           ecore_thread_cancel(t);
1770      }
1771
1772    if (ev->threads)
1773      return EINA_FALSE;
1774
1775    res = gst_element_get_state(ev->pipeline, NULL, NULL, GST_CLOCK_TIME_NONE);
1776    if (res == GST_STATE_CHANGE_NO_PREROLL)
1777      {
1778        gst_element_set_state(ev->pipeline, GST_STATE_PLAYING);
1779
1780        res = gst_element_get_state(ev->pipeline, NULL, NULL, GST_CLOCK_TIME_NONE);
1781      }
1782
1783    /** NOTE: you need to set: GST_DEBUG_DUMP_DOT_DIR=/tmp EMOTION_ENGINE=gstreamer to save the $EMOTION_GSTREAMER_DOT file in '/tmp' */
1784    /** then call dot -Tpng -oemotion_pipeline.png /tmp/$TIMESTAMP-$EMOTION_GSTREAMER_DOT.dot */
1785    if (getenv("EMOTION_GSTREAMER_DOT"))
1786      GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS(GST_BIN(ev->pipeline),
1787                                        GST_DEBUG_GRAPH_SHOW_ALL,
1788                                        getenv("EMOTION_GSTREAMER_DOT"));
1789
1790    if (!(res == GST_STATE_CHANGE_SUCCESS
1791          || res == GST_STATE_CHANGE_NO_PREROLL))
1792      {
1793         ERR("Unable to get GST_CLOCK_TIME_NONE.");
1794         return EINA_FALSE;
1795      }
1796
1797    g_object_get(G_OBJECT(ev->pipeline),
1798                 "n-audio", &ev->audio_stream_nbr,
1799                 "n-video", &ev->video_stream_nbr,
1800                 NULL);
1801
1802    if ((ev->video_stream_nbr == 0) && (ev->audio_stream_nbr == 0))
1803      {
1804         ERR("No audio nor video stream found");
1805         return EINA_FALSE;
1806      }
1807
1808    /* video stream */
1809    for (i = 0; i < ev->video_stream_nbr; i++)
1810      {
1811         Emotion_Video_Stream *vstream;
1812         GstPad       *pad = NULL;
1813         GstCaps      *caps;
1814         GstStructure *structure;
1815         GstQuery     *query;
1816         const GValue *val;
1817         gchar        *str;
1818
1819         gdouble length_time = 0.0;
1820         gint width;
1821         gint height;
1822         gint fps_num;
1823         gint fps_den;
1824         guint32 fourcc = 0;
1825
1826         g_signal_emit_by_name(ev->pipeline, "get-video-pad", i, &pad);
1827         if (!pad)
1828           continue;
1829
1830         caps = gst_pad_get_negotiated_caps(pad);
1831         if (!caps)
1832           goto unref_pad_v;
1833         structure = gst_caps_get_structure(caps, 0);
1834         str = gst_caps_to_string(caps);
1835
1836         if (!gst_structure_get_int(structure, "width", &width))
1837           goto unref_caps_v;
1838         if (!gst_structure_get_int(structure, "height", &height))
1839           goto unref_caps_v;
1840         if (!gst_structure_get_fraction(structure, "framerate", &fps_num, &fps_den))
1841           goto unref_caps_v;
1842
1843         if (g_str_has_prefix(str, "video/x-raw-yuv"))
1844           {
1845              val = gst_structure_get_value(structure, "format");
1846              fourcc = gst_value_get_fourcc(val);
1847           }
1848         else if (g_str_has_prefix(str, "video/x-raw-rgb"))
1849           fourcc = GST_MAKE_FOURCC('A', 'R', 'G', 'B');
1850         else
1851           goto unref_caps_v;
1852
1853         query = gst_query_new_duration(GST_FORMAT_TIME);
1854         if (gst_pad_peer_query(pad, query))
1855           {
1856              gint64 t;
1857
1858              gst_query_parse_duration(query, NULL, &t);
1859              length_time = (double)t / (double)GST_SECOND;
1860           }
1861         else
1862           goto unref_query_v;
1863
1864         vstream = emotion_video_stream_new(ev);
1865         if (!vstream) goto unref_query_v;
1866
1867         vstream->length_time = length_time;
1868         vstream->width = width;
1869         vstream->height = height;
1870         vstream->fps_num = fps_num;
1871         vstream->fps_den = fps_den;
1872         vstream->fourcc = fourcc;
1873         vstream->index = i;
1874
1875      unref_query_v:
1876         gst_query_unref(query);
1877      unref_caps_v:
1878         gst_caps_unref(caps);
1879      unref_pad_v:
1880         gst_object_unref(pad);
1881      }
1882
1883    /* Audio streams */
1884    for (i = 0; i < ev->audio_stream_nbr; i++)
1885      {
1886         Emotion_Audio_Stream *astream;
1887         GstPad       *pad;
1888         GstCaps      *caps;
1889         GstStructure *structure;
1890         GstQuery     *query;
1891
1892         gdouble length_time = 0.0;
1893         gint channels;
1894         gint samplerate;
1895
1896         g_signal_emit_by_name(ev->pipeline, "get-audio-pad", i, &pad);
1897         if (!pad)
1898           continue;
1899
1900         caps = gst_pad_get_negotiated_caps(pad);
1901         if (!caps)
1902           goto unref_pad_a;
1903         structure = gst_caps_get_structure(caps, 0);
1904
1905         if (!gst_structure_get_int(structure, "channels", &channels))
1906           goto unref_caps_a;
1907         if (!gst_structure_get_int(structure, "rate", &samplerate))
1908           goto unref_caps_a;
1909
1910         query = gst_query_new_duration(GST_FORMAT_TIME);
1911         if (gst_pad_peer_query(pad, query))
1912           {
1913              gint64 t;
1914
1915              gst_query_parse_duration(query, NULL, &t);
1916              length_time = (double)t / (double)GST_SECOND;
1917           }
1918         else
1919           goto unref_query_a;
1920
1921         astream = calloc(1, sizeof(Emotion_Audio_Stream));
1922         if (!astream) continue;
1923         ev->audio_streams = eina_list_append(ev->audio_streams, astream);
1924         if (eina_error_get())
1925           {
1926              free(astream);
1927              continue;
1928           }
1929
1930         astream->length_time = length_time;
1931         astream->channels = channels;
1932         astream->samplerate = samplerate;
1933
1934      unref_query_a:
1935         gst_query_unref(query);
1936      unref_caps_a:
1937         gst_caps_unref(caps);
1938      unref_pad_a:
1939         gst_object_unref(pad);
1940      }
1941
1942    /* Visualization sink */
1943    if (ev->video_stream_nbr == 0)
1944      {
1945         GstElement *vis = NULL;
1946         Emotion_Video_Stream *vstream;
1947         Emotion_Audio_Stream *astream;
1948         gint flags;
1949         const char *vis_name;
1950
1951         if (!(vis_name = emotion_visualization_element_name_get(ev->vis)))
1952           {
1953              WRN("pb vis name %d", ev->vis);
1954              goto finalize;
1955           }
1956
1957         astream = eina_list_data_get(ev->audio_streams);
1958
1959         vis = gst_element_factory_make(vis_name, "vissink");
1960         vstream = emotion_video_stream_new(ev);
1961         if (!vstream)
1962           goto finalize;
1963         else
1964           DBG("could not create visualization stream");
1965
1966         vstream->length_time = astream->length_time;
1967         vstream->width = 320;
1968         vstream->height = 200;
1969         vstream->fps_num = 25;
1970         vstream->fps_den = 1;
1971         vstream->fourcc = GST_MAKE_FOURCC('A', 'R', 'G', 'B');
1972
1973         g_object_set(G_OBJECT(ev->pipeline), "vis-plugin", vis, NULL);
1974         g_object_get(G_OBJECT(ev->pipeline), "flags", &flags, NULL);
1975         flags |= 0x00000008;
1976         g_object_set(G_OBJECT(ev->pipeline), "flags", flags, NULL);
1977      }
1978
1979  finalize:
1980
1981    ev->video_stream_nbr = eina_list_count(ev->video_streams);
1982    ev->audio_stream_nbr = eina_list_count(ev->audio_streams);
1983
1984    if (ev->video_stream_nbr == 1)
1985      {
1986        Emotion_Video_Stream *vstream;
1987
1988        vstream = eina_list_data_get(ev->video_streams);
1989        ev->ratio = (double)vstream->width / (double)vstream->height;
1990        _emotion_frame_resize(ev->obj, vstream->width, vstream->height, ev->ratio);
1991      }
1992
1993    {
1994      /* on recapitule : */
1995      Emotion_Video_Stream *vstream;
1996      Emotion_Audio_Stream *astream;
1997
1998      vstream = eina_list_data_get(ev->video_streams);
1999      if (vstream)
2000        {
2001          DBG("video size=%dx%d, fps=%d/%d, "
2002              "fourcc=%"GST_FOURCC_FORMAT", length=%"GST_TIME_FORMAT,
2003              vstream->width, vstream->height, vstream->fps_num, vstream->fps_den,
2004              GST_FOURCC_ARGS(vstream->fourcc),
2005              GST_TIME_ARGS((guint64)(vstream->length_time * GST_SECOND)));
2006        }
2007
2008      astream = eina_list_data_get(ev->audio_streams);
2009      if (astream)
2010        {
2011          DBG("audio channels=%d, rate=%d, length=%"GST_TIME_FORMAT,
2012              astream->channels, astream->samplerate,
2013              GST_TIME_ARGS((guint64)(astream->length_time * GST_SECOND)));
2014        }
2015    }
2016
2017    if (ev->metadata)
2018      _free_metadata(ev->metadata);
2019    ev->metadata = calloc(1, sizeof(Emotion_Gstreamer_Metadata));
2020
2021    ev->pipeline_parsed = EINA_TRUE;
2022
2023    em_audio_channel_volume_set(ev, ev->volume);
2024    em_audio_channel_mute_set(ev, ev->audio_mute);
2025
2026    if (ev->play_started)
2027      {
2028         _emotion_playback_started(ev->obj);
2029         ev->play_started = 0;
2030      }
2031
2032    _emotion_open_done(ev->obj);
2033
2034    return EINA_TRUE;
2035 }