a5654c95a3f9ea2fa3d0d03b57224d5e7ec43353
[profile/ivi/emotion.git] / src / modules / gstreamer / emotion_gstreamer.c
1 #include <unistd.h>
2 #include <fcntl.h>
3
4 #include <Eina.h>
5
6 #include "emotion_private.h"
7 #include "emotion_gstreamer.h"
8 #include "Emotion.h"
9
10 int _emotion_gstreamer_log_domain = -1;
11
12 /* Callbacks to get the eos */
13 static void _for_each_tag    (GstTagList const* list, gchar const* tag, void *data);
14 static void _free_metadata   (Emotion_Gstreamer_Metadata *m);
15
16 /* Interface */
17
18 static unsigned char  em_init                     (Evas_Object     *obj,
19                                                    void           **emotion_video,
20                                                    Emotion_Module_Options *opt);
21
22 static int            em_shutdown                 (void           *video);
23
24 static unsigned char  em_file_open                (const char     *file,
25                                                    Evas_Object     *obj,
26                                                    void            *video);
27
28 static void           em_file_close               (void            *video);
29
30 static void           em_play                     (void            *video,
31                                                    double           pos);
32
33 static void           em_stop                     (void            *video);
34
35 static void           em_size_get                 (void            *video,
36                                                    int             *width,
37                                                    int             *height);
38
39 static void           em_pos_set                  (void            *video,
40                                                    double           pos);
41
42
43 static double         em_len_get                  (void            *video);
44
45 static int            em_fps_num_get              (void            *video);
46
47 static int            em_fps_den_get              (void            *video);
48
49 static double         em_fps_get                  (void            *video);
50
51 static double         em_pos_get                  (void            *video);
52
53 static void           em_vis_set                  (void            *video,
54                                                    Emotion_Vis      vis);
55
56 static Emotion_Vis    em_vis_get                  (void            *video);
57
58 static Eina_Bool      em_vis_supported            (void            *video,
59                                                    Emotion_Vis      vis);
60
61 static double         em_ratio_get                (void            *video);
62
63 static int            em_video_handled            (void            *video);
64
65 static int            em_audio_handled            (void            *video);
66
67 static int            em_seekable                 (void            *video);
68
69 static void           em_frame_done               (void            *video);
70
71 static Emotion_Format em_format_get               (void            *video);
72
73 static void           em_video_data_size_get      (void            *video,
74                                                    int             *w,
75                                                    int             *h);
76
77 static int            em_yuv_rows_get             (void            *video,
78                                                    int              w,
79                                                    int              h,
80                                                    unsigned char  **yrows,
81                                                    unsigned char  **urows,
82                                                    unsigned char  **vrows);
83
84 static int            em_bgra_data_get            (void            *video,
85                                                    unsigned char  **bgra_data);
86
87 static void           em_event_feed               (void            *video,
88                                                    int              event);
89
90 static void           em_event_mouse_button_feed  (void            *video,
91                                                    int              button,
92                                                    int              x,
93                                                    int              y);
94
95 static void           em_event_mouse_move_feed    (void            *video,
96                                                    int              x,
97                                                    int              y);
98
99 static int            em_video_channel_count      (void             *video);
100
101 static void           em_video_channel_set        (void             *video,
102                                                    int               channel);
103
104 static int            em_video_channel_get        (void             *video);
105
106 static const char    *em_video_channel_name_get   (void             *video,
107                                                    int               channel);
108
109 static void           em_video_channel_mute_set   (void             *video,
110                                                    int               mute);
111
112 static int            em_video_channel_mute_get   (void             *video);
113
114 static int            em_audio_channel_count      (void             *video);
115
116 static void           em_audio_channel_set        (void             *video,
117                                                    int               channel);
118
119 static int            em_audio_channel_get        (void             *video);
120
121 static const char    *em_audio_channel_name_get   (void             *video,
122                                                    int               channel);
123
124 static void           em_audio_channel_mute_set   (void             *video,
125                                                    int               mute);
126
127 static int            em_audio_channel_mute_get   (void             *video);
128
129 static void           em_audio_channel_volume_set (void             *video,
130                                                    double             vol);
131
132 static double         em_audio_channel_volume_get (void             *video);
133
134 static int            em_spu_channel_count        (void             *video);
135
136 static void           em_spu_channel_set          (void             *video,
137                                                    int               channel);
138
139 static int            em_spu_channel_get          (void             *video);
140
141 static const char    *em_spu_channel_name_get     (void             *video,
142                                                    int               channel);
143
144 static void           em_spu_channel_mute_set     (void             *video,
145                                                    int               mute);
146
147 static int            em_spu_channel_mute_get     (void             *video);
148
149 static int            em_chapter_count            (void             *video);
150
151 static void           em_chapter_set              (void             *video,
152                                                    int               chapter);
153
154 static int            em_chapter_get              (void             *video);
155
156 static const char    *em_chapter_name_get         (void             *video,
157                                                    int               chapter);
158
159 static void           em_speed_set                (void             *video,
160                                                    double            speed);
161
162 static double         em_speed_get                (void             *video);
163
164 static int            em_eject                    (void             *video);
165
166 static const char    *em_meta_get                 (void             *video,
167                                                    int               meta);
168
169 static GstBusSyncReply _eos_sync_fct(GstBus *bus,
170                                      GstMessage *message,
171                                      gpointer data);
172
173 /* Module interface */
174
175 static Emotion_Video_Module em_module =
176 {
177    em_init, /* init */
178    em_shutdown, /* shutdown */
179    em_file_open, /* file_open */
180    em_file_close, /* file_close */
181    em_play, /* play */
182    em_stop, /* stop */
183    em_size_get, /* size_get */
184    em_pos_set, /* pos_set */
185    em_len_get, /* len_get */
186    em_fps_num_get, /* fps_num_get */
187    em_fps_den_get, /* fps_den_get */
188    em_fps_get, /* fps_get */
189    em_pos_get, /* pos_get */
190    em_vis_set, /* vis_set */
191    em_vis_get, /* vis_get */
192    em_vis_supported, /* vis_supported */
193    em_ratio_get, /* ratio_get */
194    em_video_handled, /* video_handled */
195    em_audio_handled, /* audio_handled */
196    em_seekable, /* seekable */
197    em_frame_done, /* frame_done */
198    em_format_get, /* format_get */
199    em_video_data_size_get, /* video_data_size_get */
200    em_yuv_rows_get, /* yuv_rows_get */
201    em_bgra_data_get, /* bgra_data_get */
202    em_event_feed, /* event_feed */
203    em_event_mouse_button_feed, /* event_mouse_button_feed */
204    em_event_mouse_move_feed, /* event_mouse_move_feed */
205    em_video_channel_count, /* video_channel_count */
206    em_video_channel_set, /* video_channel_set */
207    em_video_channel_get, /* video_channel_get */
208    em_video_channel_name_get, /* video_channel_name_get */
209    em_video_channel_mute_set, /* video_channel_mute_set */
210    em_video_channel_mute_get, /* video_channel_mute_get */
211    em_audio_channel_count, /* audio_channel_count */
212    em_audio_channel_set, /* audio_channel_set */
213    em_audio_channel_get, /* audio_channel_get */
214    em_audio_channel_name_get, /* audio_channel_name_get */
215    em_audio_channel_mute_set, /* audio_channel_mute_set */
216    em_audio_channel_mute_get, /* audio_channel_mute_get */
217    em_audio_channel_volume_set, /* audio_channel_volume_set */
218    em_audio_channel_volume_get, /* audio_channel_volume_get */
219    em_spu_channel_count, /* spu_channel_count */
220    em_spu_channel_set, /* spu_channel_set */
221    em_spu_channel_get, /* spu_channel_get */
222    em_spu_channel_name_get, /* spu_channel_name_get */
223    em_spu_channel_mute_set, /* spu_channel_mute_set */
224    em_spu_channel_mute_get, /* spu_channel_mute_get */
225    em_chapter_count, /* chapter_count */
226    em_chapter_set, /* chapter_set */
227    em_chapter_get, /* chapter_get */
228    em_chapter_name_get, /* chapter_name_get */
229    em_speed_set, /* speed_set */
230    em_speed_get, /* speed_get */
231    em_eject, /* eject */
232    em_meta_get, /* meta_get */
233    NULL /* handle */
234 };
235
236 static Emotion_Video_Stream *
237 emotion_video_stream_new(Emotion_Gstreamer_Video *ev)
238 {
239    Emotion_Video_Stream *vstream;
240
241    if (!ev) return NULL;
242
243    vstream = (Emotion_Video_Stream *)calloc(1, sizeof(Emotion_Video_Stream));
244    if (!vstream) return NULL;
245
246    ev->video_streams = eina_list_append(ev->video_streams, vstream);
247    if (eina_error_get())
248      {
249         free(vstream);
250         return NULL;
251      }
252    return vstream;
253 }
254
255 static void
256 emotion_video_stream_free(Emotion_Gstreamer_Video *ev, Emotion_Video_Stream *vstream)
257 {
258    if (!ev || !vstream) return;
259
260    ev->video_streams = eina_list_remove(ev->video_streams, vstream);
261         free(vstream);
262 }
263
264 static const char *
265 emotion_visualization_element_name_get(Emotion_Vis visualisation)
266 {
267    switch (visualisation)
268      {
269       case EMOTION_VIS_NONE:
270          return NULL;
271       case EMOTION_VIS_GOOM:
272          return "goom";
273       case EMOTION_VIS_LIBVISUAL_BUMPSCOPE:
274          return "libvisual_bumpscope";
275       case EMOTION_VIS_LIBVISUAL_CORONA:
276          return "libvisual_corona";
277       case EMOTION_VIS_LIBVISUAL_DANCING_PARTICLES:
278          return "libvisual_dancingparticles";
279       case EMOTION_VIS_LIBVISUAL_GDKPIXBUF:
280          return "libvisual_gdkpixbuf";
281       case EMOTION_VIS_LIBVISUAL_G_FORCE:
282          return "libvisual_G-Force";
283       case EMOTION_VIS_LIBVISUAL_GOOM:
284          return "libvisual_goom";
285       case EMOTION_VIS_LIBVISUAL_INFINITE:
286          return "libvisual_infinite";
287       case EMOTION_VIS_LIBVISUAL_JAKDAW:
288          return "libvisual_jakdaw";
289       case EMOTION_VIS_LIBVISUAL_JESS:
290          return "libvisual_jess";
291       case EMOTION_VIS_LIBVISUAL_LV_ANALYSER:
292          return "libvisual_lv_analyzer";
293       case EMOTION_VIS_LIBVISUAL_LV_FLOWER:
294          return "libvisual_lv_flower";
295       case EMOTION_VIS_LIBVISUAL_LV_GLTEST:
296          return "libvisual_lv_gltest";
297       case EMOTION_VIS_LIBVISUAL_LV_SCOPE:
298          return "libvisual_lv_scope";
299       case EMOTION_VIS_LIBVISUAL_MADSPIN:
300          return "libvisual_madspin";
301       case EMOTION_VIS_LIBVISUAL_NEBULUS:
302          return "libvisual_nebulus";
303       case EMOTION_VIS_LIBVISUAL_OINKSIE:
304          return "libvisual_oinksie";
305       case EMOTION_VIS_LIBVISUAL_PLASMA:
306          return "libvisual_plazma";
307       default:
308          return "goom";
309      }
310 }
311
312 static unsigned char
313 em_init(Evas_Object            *obj,
314         void                  **emotion_video,
315         Emotion_Module_Options *opt __UNUSED__)
316 {
317    Emotion_Gstreamer_Video *ev;
318    GError                  *error;
319
320    if (!emotion_video)
321      return 0;
322
323    ev = calloc(1, sizeof(Emotion_Gstreamer_Video));
324    if (!ev) return 0;
325
326    ev->obj = obj;
327
328    /* Initialization of gstreamer */
329    if (!gst_init_check(NULL, NULL, &error))
330      goto failure;
331
332    /* Default values */
333    ev->ratio = 1.0;
334    ev->vis = EMOTION_VIS_NONE;
335    ev->volume = 0.8;
336    ev->play_started = 0;
337
338    *emotion_video = ev;
339
340    return 1;
341
342 failure:
343    free(ev);
344
345    return 0;
346 }
347
348 static int
349 em_shutdown(void *video)
350 {
351    Emotion_Gstreamer_Video *ev;
352    Emotion_Audio_Stream *astream;
353    Emotion_Video_Stream *vstream;
354
355    ev = (Emotion_Gstreamer_Video *)video;
356    if (!ev)
357      return 0;
358
359    if (ev->thread)
360      {
361         ecore_thread_cancel(ev->thread);
362         ev->thread = NULL;
363      }
364
365    if (ev->eos_bus)
366      {
367         gst_object_unref(GST_OBJECT(ev->eos_bus));
368         ev->eos_bus = NULL;
369      }
370
371    if (ev->pipeline)
372      {
373        gst_element_set_state(ev->pipeline, GST_STATE_NULL);
374        gst_object_unref(ev->pipeline);
375        ev->pipeline = NULL;
376      }
377
378    EINA_LIST_FREE(ev->audio_streams, astream)
379      free(astream);
380    EINA_LIST_FREE(ev->video_streams, vstream)
381      free(vstream);
382
383    free(ev);
384
385    return 1;
386 }
387
388
389 static unsigned char
390 em_file_open(const char   *file,
391              Evas_Object  *obj,
392              void         *video)
393 {
394    Emotion_Gstreamer_Video *ev;
395    Eina_Strbuf *sbuf = NULL;
396    const char *uri;
397
398    ev = (Emotion_Gstreamer_Video *)video;
399
400    if (!file) return EINA_FALSE;
401    if (strstr(file, "://") == NULL)
402      {
403         sbuf = eina_strbuf_new();
404         eina_strbuf_append(sbuf, "file://");
405         if (strncmp(file, "./", 2) == 0)
406           file += 2;
407         if (strstr(file, ":/") != NULL)
408           { /* We absolutely need file:///C:/ under Windows, so adding it here */
409              eina_strbuf_append(sbuf, "/");
410           }
411         else if (*file != '/')
412           {
413              char tmp[PATH_MAX];
414
415              if (getcwd(tmp, PATH_MAX))
416                {
417                   eina_strbuf_append(sbuf, tmp);
418                   eina_strbuf_append(sbuf, "/");
419                }
420           }
421         eina_strbuf_append(sbuf, file);
422      }
423
424    ev->play_started = 0;
425
426    uri = sbuf ? eina_strbuf_string_get(sbuf) : file;
427    DBG("setting file to '%s'", uri);
428    ev->pipeline = gstreamer_video_sink_new(ev, obj, uri);
429    if (sbuf) eina_strbuf_free(sbuf);
430
431    if (!ev->pipeline)
432      return EINA_FALSE;
433
434    ev->eos_bus = gst_pipeline_get_bus(GST_PIPELINE(ev->pipeline));
435    if (!ev->eos_bus)
436      {
437         ERR("could not get the bus");
438         return EINA_FALSE;
439      }
440
441    gst_bus_set_sync_handler(ev->eos_bus, _eos_sync_fct, ev);
442
443    /* Evas Object */
444    ev->obj = obj;
445
446    ev->position = 0.0;
447
448    return 1;
449 }
450
451 static void
452 em_file_close(void *video)
453 {
454    Emotion_Gstreamer_Video *ev;
455    Emotion_Audio_Stream *astream;
456    Emotion_Video_Stream *vstream;
457
458    ev = (Emotion_Gstreamer_Video *)video;
459    if (!ev)
460      return;
461
462    if (ev->eos_bus)
463      {
464         gst_object_unref(GST_OBJECT(ev->eos_bus));
465         ev->eos_bus = NULL;
466      }
467
468    if (ev->thread)
469      {
470         ecore_thread_cancel(ev->thread);
471         ev->thread = NULL;
472      }
473
474    if (ev->pipeline)
475      {
476         gst_element_set_state(ev->pipeline, GST_STATE_NULL);
477         gst_object_unref(ev->pipeline);
478         ev->pipeline = NULL;
479      }
480
481    /* we clear the stream lists */
482    EINA_LIST_FREE(ev->audio_streams, astream)
483      free(astream);
484    EINA_LIST_FREE(ev->video_streams, vstream)
485      free(vstream);
486    ev->pipeline_parsed = EINA_FALSE;
487    ev->play_started = 0;
488
489    /* shutdown eos */
490    if (ev->metadata)
491      {
492         _free_metadata(ev->metadata);
493         ev->metadata = NULL;
494      }
495 }
496
497 static void
498 em_play(void   *video,
499         double  pos __UNUSED__)
500 {
501    Emotion_Gstreamer_Video *ev;
502
503    ev = (Emotion_Gstreamer_Video *)video;
504    if (!ev->pipeline) return ;
505
506    gst_element_set_state(ev->pipeline, GST_STATE_PLAYING);
507    ev->play = 1;
508    ev->play_started = 1;
509 }
510
511 static void
512 em_stop(void *video)
513 {
514    Emotion_Gstreamer_Video *ev;
515
516    ev = (Emotion_Gstreamer_Video *)video;
517
518    if (!ev->pipeline) return ;
519  
520    gst_element_set_state(ev->pipeline, GST_STATE_PAUSED);
521    ev->play = 0;
522 }
523
524 static void
525 em_size_get(void  *video,
526             int   *width,
527             int   *height)
528 {
529    Emotion_Gstreamer_Video *ev;
530    Emotion_Video_Stream      *vstream;
531
532    ev = (Emotion_Gstreamer_Video *)video;
533
534    if (!_emotion_gstreamer_video_pipeline_parse(ev, EINA_FALSE))
535      goto on_error;
536
537    vstream = eina_list_nth(ev->video_streams, ev->video_stream_nbr - 1);
538    if (vstream)
539      {
540         if (width) *width = vstream->width;
541         if (height) *height = vstream->height;
542
543         return ;
544      }
545
546  on_error:
547    if (width) *width = 0;
548    if (height) *height = 0;
549 }
550
551 static void
552 em_pos_set(void   *video,
553            double  pos)
554 {
555    Emotion_Gstreamer_Video *ev;
556    gboolean res;
557
558    ev = (Emotion_Gstreamer_Video *)video;
559
560    if (!ev->pipeline) return ;
561
562    if (ev->play)
563      res = gst_element_set_state(ev->pipeline, GST_STATE_PAUSED);
564
565    res = gst_element_seek(ev->pipeline, 1.0,
566                           GST_FORMAT_TIME,
567                           GST_SEEK_FLAG_ACCURATE | GST_SEEK_FLAG_FLUSH,
568                           GST_SEEK_TYPE_SET,
569                           (gint64)(pos * (double)GST_SECOND),
570                           GST_SEEK_TYPE_NONE, -1);
571
572    if (ev->play)
573      res = gst_element_set_state(ev->pipeline, GST_STATE_PLAYING);
574 }
575
576 static double
577 em_len_get(void *video)
578 {
579    Emotion_Gstreamer_Video *ev;
580    Emotion_Video_Stream *vstream;
581    Emotion_Audio_Stream *astream;
582    Eina_List *l;
583    GstFormat fmt;
584    gint64 val;
585    gboolean ret;
586
587    ev = video;
588    fmt = GST_FORMAT_TIME;
589
590    if (!ev->pipeline) return 0.0;
591
592    ret = gst_element_query_duration(ev->pipeline, &fmt, &val);
593    if (!ret)
594      goto fallback;
595
596    if (fmt != GST_FORMAT_TIME)
597      {
598         DBG("requrested duration in time, but got %s instead.",
599             gst_format_get_name(fmt));
600         goto fallback;
601      }
602
603    if (val <= 0.0)
604      goto fallback;
605
606    return val / 1000000000.0;
607
608  fallback:
609    if (!_emotion_gstreamer_video_pipeline_parse(ev, EINA_FALSE))
610      return 0.0;
611
612    EINA_LIST_FOREACH(ev->audio_streams, l, astream)
613      if (astream->length_time >= 0)
614        return astream->length_time;
615
616    EINA_LIST_FOREACH(ev->video_streams, l, vstream)
617      if (vstream->length_time >= 0)
618        return vstream->length_time;
619
620    return 0.0;
621 }
622
623 static int
624 em_fps_num_get(void *video)
625 {
626    Emotion_Gstreamer_Video *ev;
627    Emotion_Video_Stream      *vstream;
628
629    ev = (Emotion_Gstreamer_Video *)video;
630
631    if (!_emotion_gstreamer_video_pipeline_parse(ev, EINA_FALSE))
632      return 0;
633
634    vstream = eina_list_nth(ev->video_streams, ev->video_stream_nbr - 1);
635    if (vstream)
636      return vstream->fps_num;
637
638    return 0;
639 }
640
641 static int
642 em_fps_den_get(void *video)
643 {
644    Emotion_Gstreamer_Video *ev;
645    Emotion_Video_Stream      *vstream;
646
647    ev = (Emotion_Gstreamer_Video *)video;
648
649    if (!_emotion_gstreamer_video_pipeline_parse(ev, EINA_FALSE))
650      return 1;
651
652    vstream = eina_list_nth(ev->video_streams, ev->video_stream_nbr - 1);
653    if (vstream)
654      return vstream->fps_den;
655
656    return 1;
657 }
658
659 static double
660 em_fps_get(void *video)
661 {
662    Emotion_Gstreamer_Video *ev;
663    Emotion_Video_Stream      *vstream;
664
665    ev = (Emotion_Gstreamer_Video *)video;
666
667    if (!_emotion_gstreamer_video_pipeline_parse(ev, EINA_FALSE))
668      return 0.0;
669
670    vstream = eina_list_nth(ev->video_streams, ev->video_stream_nbr - 1);
671    if (vstream)
672      return (double)vstream->fps_num / (double)vstream->fps_den;
673
674    return 0.0;
675 }
676
677 static double
678 em_pos_get(void *video)
679 {
680    Emotion_Gstreamer_Video *ev;
681    GstFormat fmt;
682    gint64 val;
683    gboolean ret;
684
685    ev = video;
686    fmt = GST_FORMAT_TIME;
687
688    if (!ev->pipeline) return 0.0;
689
690    ret = gst_element_query_position(ev->pipeline, &fmt, &val);
691    if (!ret)
692      return ev->position;
693
694    if (fmt != GST_FORMAT_TIME)
695      {
696         ERR("requrested position in time, but got %s instead.",
697             gst_format_get_name(fmt));
698         return ev->position;
699      }
700
701    ev->position = val / 1000000000.0;
702    return ev->position;
703 }
704
705 static void
706 em_vis_set(void *video,
707            Emotion_Vis vis)
708 {
709    Emotion_Gstreamer_Video *ev;
710
711    ev = (Emotion_Gstreamer_Video *)video;
712
713    ev->vis = vis;
714 }
715
716 static Emotion_Vis
717 em_vis_get(void *video)
718 {
719    Emotion_Gstreamer_Video *ev;
720
721    ev = (Emotion_Gstreamer_Video *)video;
722
723    return ev->vis;
724 }
725
726 static Eina_Bool
727 em_vis_supported(void *ef __UNUSED__, Emotion_Vis vis)
728 {
729    const char *name;
730    GstElementFactory *factory;
731
732    if (vis == EMOTION_VIS_NONE)
733      return EINA_TRUE;
734
735    name = emotion_visualization_element_name_get(vis);
736    if (!name)
737      return EINA_FALSE;
738
739    factory = gst_element_factory_find(name);
740    if (!factory)
741      return EINA_FALSE;
742
743    gst_object_unref(factory);
744    return EINA_TRUE;
745 }
746
747 static double
748 em_ratio_get(void *video)
749 {
750    Emotion_Gstreamer_Video *ev;
751
752    ev = (Emotion_Gstreamer_Video *)video;
753
754    return ev->ratio;
755 }
756
757 static int
758 em_video_handled(void *video)
759 {
760    Emotion_Gstreamer_Video *ev;
761
762    ev = (Emotion_Gstreamer_Video *)video;
763
764    _emotion_gstreamer_video_pipeline_parse(ev, EINA_FALSE);
765
766    if (!eina_list_count(ev->video_streams))
767      return 0;
768
769    return 1;
770 }
771
772 static int
773 em_audio_handled(void *video)
774 {
775    Emotion_Gstreamer_Video *ev;
776
777    ev = (Emotion_Gstreamer_Video *)video;
778
779    _emotion_gstreamer_video_pipeline_parse(ev, EINA_FALSE);
780
781    if (!eina_list_count(ev->audio_streams))
782      return 0;
783
784    return 1;
785 }
786
787 static int
788 em_seekable(void *video __UNUSED__)
789 {
790    return 1;
791 }
792
793 static void
794 em_frame_done(void *video __UNUSED__)
795 {
796 }
797
798 static Emotion_Format
799 em_format_get(void *video)
800 {
801    Emotion_Gstreamer_Video *ev;
802    Emotion_Video_Stream    *vstream;
803
804    ev = (Emotion_Gstreamer_Video *)video;
805
806    if (!_emotion_gstreamer_video_pipeline_parse(ev, EINA_FALSE))
807      return EMOTION_FORMAT_NONE;
808
809    vstream = eina_list_nth(ev->video_streams, ev->video_stream_nbr - 1);
810    if (vstream)
811      {
812         switch (vstream->fourcc)
813           {
814            case GST_MAKE_FOURCC('I', '4', '2', '0'):
815               return EMOTION_FORMAT_I420;
816            case GST_MAKE_FOURCC('Y', 'V', '1', '2'):
817               return EMOTION_FORMAT_YV12;
818            case GST_MAKE_FOURCC('Y', 'U', 'Y', '2'):
819               return EMOTION_FORMAT_YUY2;
820            case GST_MAKE_FOURCC('A', 'R', 'G', 'B'):
821               return EMOTION_FORMAT_BGRA;
822            default:
823               return EMOTION_FORMAT_NONE;
824           }
825      }
826    return EMOTION_FORMAT_NONE;
827 }
828
829 static void
830 em_video_data_size_get(void *video, int *w, int *h)
831 {
832    Emotion_Gstreamer_Video *ev;
833    Emotion_Video_Stream    *vstream;
834
835    ev = (Emotion_Gstreamer_Video *)video;
836
837    if (!_emotion_gstreamer_video_pipeline_parse(ev, EINA_FALSE))
838      goto on_error;
839
840    vstream = eina_list_nth(ev->video_streams, ev->video_stream_nbr - 1);
841    if (vstream)
842      {
843         *w = vstream->width;
844         *h = vstream->height;
845
846         return ;
847      }
848
849  on_error:
850    *w = 0;
851    *h = 0;
852 }
853
854 static int
855 em_yuv_rows_get(void           *video __UNUSED__,
856                 int             w __UNUSED__,
857                 int             h __UNUSED__,
858                 unsigned char **yrows __UNUSED__,
859                 unsigned char **urows __UNUSED__,
860                 unsigned char **vrows __UNUSED__)
861 {
862    return 0;
863 }
864
865 static int
866 em_bgra_data_get(void *video __UNUSED__, unsigned char **bgra_data __UNUSED__)
867 {
868    return 0;
869 }
870
871 static void
872 em_event_feed(void *video __UNUSED__, int event __UNUSED__)
873 {
874 }
875
876 static void
877 em_event_mouse_button_feed(void *video __UNUSED__, int button __UNUSED__, int x __UNUSED__, int y __UNUSED__)
878 {
879 }
880
881 static void
882 em_event_mouse_move_feed(void *video __UNUSED__, int x __UNUSED__, int y __UNUSED__)
883 {
884 }
885
886 /* Video channels */
887 static int
888 em_video_channel_count(void *video)
889 {
890    Emotion_Gstreamer_Video *ev;
891
892    ev = (Emotion_Gstreamer_Video *)video;
893
894    _emotion_gstreamer_video_pipeline_parse(ev, EINA_FALSE);
895
896    return eina_list_count(ev->video_streams);
897 }
898
899 static void
900 em_video_channel_set(void *video __UNUSED__,
901                      int   channel __UNUSED__)
902 {
903 #if 0
904    Emotion_Gstreamer_Video *ev;
905
906    ev = (Emotion_Gstreamer_Video *)video;
907
908    if (channel < 0) channel = 0;
909 #endif
910    /* FIXME: a faire... */
911 }
912
913 static int
914 em_video_channel_get(void *video)
915 {
916    Emotion_Gstreamer_Video *ev;
917
918    ev = (Emotion_Gstreamer_Video *)video;
919
920    _emotion_gstreamer_video_pipeline_parse(ev, EINA_FALSE);
921
922    return ev->video_stream_nbr;
923 }
924
925 static const char *
926 em_video_channel_name_get(void *video __UNUSED__,
927                           int   channel __UNUSED__)
928 {
929    return NULL;
930 }
931
932 static void
933 em_video_channel_mute_set(void *video,
934                           int   mute)
935 {
936    Emotion_Gstreamer_Video *ev;
937
938    ev = (Emotion_Gstreamer_Video *)video;
939
940    ev->video_mute = mute;
941 }
942
943 static int
944 em_video_channel_mute_get(void *video)
945 {
946    Emotion_Gstreamer_Video *ev;
947
948    ev = (Emotion_Gstreamer_Video *)video;
949
950    return ev->video_mute;
951 }
952
953 /* Audio channels */
954
955 static int
956 em_audio_channel_count(void *video)
957 {
958    Emotion_Gstreamer_Video *ev;
959
960    ev = (Emotion_Gstreamer_Video *)video;
961
962    _emotion_gstreamer_video_pipeline_parse(ev, EINA_FALSE);
963
964    return eina_list_count(ev->audio_streams);
965 }
966
967 static void
968 em_audio_channel_set(void *video __UNUSED__,
969                      int   channel __UNUSED__)
970 {
971 #if 0
972    Emotion_Gstreamer_Video *ev;
973
974    ev = (Emotion_Gstreamer_Video *)video;
975
976    if (channel < -1) channel = -1;
977 #endif
978    /* FIXME: a faire... */
979 }
980
981 static int
982 em_audio_channel_get(void *video)
983 {
984    Emotion_Gstreamer_Video *ev;
985
986    ev = (Emotion_Gstreamer_Video *)video;
987
988    _emotion_gstreamer_video_pipeline_parse(ev, EINA_FALSE);
989
990    return ev->audio_stream_nbr;
991 }
992
993 static const char *
994 em_audio_channel_name_get(void *video __UNUSED__,
995                           int   channel __UNUSED__)
996 {
997    return NULL;
998 }
999
1000 #define GST_PLAY_FLAG_AUDIO (1 << 1)
1001
1002 static void
1003 em_audio_channel_mute_set(void *video,
1004                           int   mute)
1005 {
1006    Emotion_Gstreamer_Video *ev;
1007    int flags;
1008
1009    ev = (Emotion_Gstreamer_Video *)video;
1010
1011    if (!ev->pipeline) return ;
1012
1013    ev->audio_mute = mute;
1014
1015    g_object_set(G_OBJECT(ev->pipeline), "mute", !!mute, NULL);
1016    /* This code should stop the decoding of only the audio stream, but everything stop :"( */
1017    /* g_object_get(G_OBJECT(ev->pipeline), "flags", &flags, NULL); */
1018    /* if (mute) */
1019    /*   flags &= ~GST_PLAY_FLAG_AUDIO; */
1020    /* else */
1021    /*   flags |= GST_PLAY_FLAG_AUDIO; */
1022    /* g_object_set(G_OBJECT(ev->pipeline), "flags", flags, NULL); */
1023    /* g_object_get(G_OBJECT(ev->pipeline), "flags", &flags, NULL); */
1024    /* fprintf(stderr, "flags-n: %x\n", flags); */
1025 }
1026
1027 static int
1028 em_audio_channel_mute_get(void *video)
1029 {
1030    Emotion_Gstreamer_Video *ev;
1031
1032    ev = (Emotion_Gstreamer_Video *)video;
1033
1034    return ev->audio_mute;
1035 }
1036
1037 static void
1038 em_audio_channel_volume_set(void  *video,
1039                             double vol)
1040 {
1041    Emotion_Gstreamer_Video *ev;
1042
1043    ev = (Emotion_Gstreamer_Video *)video;
1044
1045    if (!ev->pipeline) return ;
1046
1047    if (vol < 0.0)
1048      vol = 0.0;
1049    if (vol > 1.0)
1050      vol = 1.0;
1051    ev->volume = vol;
1052    g_object_set(G_OBJECT(ev->pipeline), "volume", vol, NULL);
1053 }
1054
1055 static double
1056 em_audio_channel_volume_get(void *video)
1057 {
1058    Emotion_Gstreamer_Video *ev;
1059
1060    ev = (Emotion_Gstreamer_Video *)video;
1061
1062    return ev->volume;
1063 }
1064
1065 /* spu stuff */
1066
1067 static int
1068 em_spu_channel_count(void *video __UNUSED__)
1069 {
1070    return 0;
1071 }
1072
1073 static void
1074 em_spu_channel_set(void *video __UNUSED__, int channel __UNUSED__)
1075 {
1076 }
1077
1078 static int
1079 em_spu_channel_get(void *video __UNUSED__)
1080 {
1081    return 1;
1082 }
1083
1084 static const char *
1085 em_spu_channel_name_get(void *video __UNUSED__, int channel __UNUSED__)
1086 {
1087    return NULL;
1088 }
1089
1090 static void
1091 em_spu_channel_mute_set(void *video __UNUSED__, int mute __UNUSED__)
1092 {
1093 }
1094
1095 static int
1096 em_spu_channel_mute_get(void *video __UNUSED__)
1097 {
1098    return 0;
1099 }
1100
1101 static int
1102 em_chapter_count(void *video __UNUSED__)
1103 {
1104    return 0;
1105 }
1106
1107 static void
1108 em_chapter_set(void *video __UNUSED__, int chapter __UNUSED__)
1109 {
1110 }
1111
1112 static int
1113 em_chapter_get(void *video __UNUSED__)
1114 {
1115    return 0;
1116 }
1117
1118 static const char *
1119 em_chapter_name_get(void *video __UNUSED__, int chapter __UNUSED__)
1120 {
1121    return NULL;
1122 }
1123
1124 static void
1125 em_speed_set(void *video __UNUSED__, double speed __UNUSED__)
1126 {
1127 }
1128
1129 static double
1130 em_speed_get(void *video __UNUSED__)
1131 {
1132    return 1.0;
1133 }
1134
1135 static int
1136 em_eject(void *video __UNUSED__)
1137 {
1138    return 1;
1139 }
1140
1141 static const char *
1142 em_meta_get(void *video, int meta)
1143 {
1144    Emotion_Gstreamer_Video *ev;
1145    const char *str = NULL;
1146
1147    ev = (Emotion_Gstreamer_Video *)video;
1148
1149    if (!ev || !ev->metadata) return NULL;
1150    switch (meta)
1151      {
1152       case META_TRACK_TITLE:
1153          str = ev->metadata->title;
1154          break;
1155       case META_TRACK_ARTIST:
1156          str = ev->metadata->artist;
1157          break;
1158       case  META_TRACK_ALBUM:
1159          str = ev->metadata->album;
1160          break;
1161       case META_TRACK_YEAR:
1162          str = ev->metadata->year;
1163          break;
1164       case META_TRACK_GENRE:
1165          str = ev->metadata->genre;
1166          break;
1167       case META_TRACK_COMMENT:
1168          str = ev->metadata->comment;
1169          break;
1170       case META_TRACK_DISCID:
1171          str = ev->metadata->disc_id;
1172          break;
1173       default:
1174          break;
1175      }
1176
1177    return str;
1178 }
1179
1180 static Eina_Bool
1181 module_open(Evas_Object           *obj,
1182             const Emotion_Video_Module **module,
1183             void                 **video,
1184             Emotion_Module_Options *opt)
1185 {
1186    if (!module)
1187      return EINA_FALSE;
1188
1189    if (_emotion_gstreamer_log_domain < 0)
1190      {
1191         eina_threads_init();
1192         eina_log_threads_enable();
1193         _emotion_gstreamer_log_domain = eina_log_domain_register
1194           ("emotion-gstreamer", EINA_COLOR_LIGHTCYAN);
1195         if (_emotion_gstreamer_log_domain < 0)
1196           {
1197              EINA_LOG_CRIT("Could not register log domain 'emotion-gstreamer'");
1198              return EINA_FALSE;
1199           }
1200      }
1201
1202    if (!em_module.init(obj, video, opt))
1203      return EINA_FALSE;
1204
1205    eina_threads_init();
1206
1207    *module = &em_module;
1208    return EINA_TRUE;
1209 }
1210
1211 static void
1212 module_close(Emotion_Video_Module *module __UNUSED__,
1213              void                 *video)
1214 {
1215    em_module.shutdown(video);
1216
1217    eina_threads_shutdown();
1218 }
1219
1220 Eina_Bool
1221 gstreamer_module_init(void)
1222 {
1223    GError *error;
1224
1225    if (!gst_init_check(0, NULL, &error))
1226      {
1227         EINA_LOG_CRIT("Could not init GStreamer");
1228         return EINA_FALSE;
1229      }
1230
1231    if (gst_plugin_register_static(GST_VERSION_MAJOR, GST_VERSION_MINOR,
1232                                   "emotion-sink",
1233                                   "video sink plugin for Emotion",
1234                                   gstreamer_plugin_init,
1235                                   VERSION,
1236                                   "LGPL",
1237                                   "Enlightenment",
1238                                   PACKAGE,
1239                                   "http://www.enlightenment.org/") == FALSE)
1240      {
1241         EINA_LOG_CRIT("Could not load static gstreamer video sink for Emotion.");
1242         return EINA_FALSE;
1243      }
1244
1245    return _emotion_module_register("gstreamer", module_open, module_close);
1246 }
1247
1248 void
1249 gstreamer_module_shutdown(void)
1250 {
1251    _emotion_module_unregister("gstreamer");
1252
1253    gst_deinit();
1254 }
1255
1256 #ifndef EMOTION_STATIC_BUILD_GSTREAMER
1257
1258 EINA_MODULE_INIT(gstreamer_module_init);
1259 EINA_MODULE_SHUTDOWN(gstreamer_module_shutdown);
1260
1261 #endif
1262
1263 static void
1264 _for_each_tag(GstTagList const* list,
1265                     gchar const* tag,
1266                     void *data)
1267 {
1268    Emotion_Gstreamer_Video *ev;
1269    int i;
1270    int count;
1271
1272
1273    ev = (Emotion_Gstreamer_Video*)data;
1274
1275    if (!ev || !ev->metadata) return;
1276
1277    count = gst_tag_list_get_tag_size(list, tag);
1278
1279    for (i = 0; i < count; i++)
1280      {
1281         if (!strcmp(tag, GST_TAG_TITLE))
1282           {
1283              char *str;
1284              if (ev->metadata->title) g_free(ev->metadata->title);
1285              if (gst_tag_list_get_string(list, GST_TAG_TITLE, &str))
1286                ev->metadata->title = str;
1287              else
1288                ev->metadata->title = NULL;
1289              break;
1290           }
1291         if (!strcmp(tag, GST_TAG_ALBUM))
1292           {
1293              gchar *str;
1294              if (ev->metadata->album) g_free(ev->metadata->album);
1295              if (gst_tag_list_get_string(list, GST_TAG_ALBUM, &str))
1296                ev->metadata->album = str;
1297              else
1298                ev->metadata->album = NULL;
1299              break;
1300           }
1301         if (!strcmp(tag, GST_TAG_ARTIST))
1302           {
1303              gchar *str;
1304              if (ev->metadata->artist) g_free( ev->metadata->artist);
1305              if (gst_tag_list_get_string(list, GST_TAG_ARTIST, &str))
1306                ev->metadata->artist = str;
1307              else
1308                ev->metadata->artist = NULL;
1309              break;
1310           }
1311         if (!strcmp(tag, GST_TAG_GENRE))
1312           {
1313              gchar *str;
1314              if (ev->metadata->genre) g_free( ev->metadata->genre);
1315              if (gst_tag_list_get_string(list, GST_TAG_GENRE, &str))
1316                ev->metadata->genre = str;
1317              else
1318                ev->metadata->genre = NULL;
1319              break;
1320           }
1321         if (!strcmp(tag, GST_TAG_COMMENT))
1322           {
1323              gchar *str;
1324              if (ev->metadata->comment) g_free(ev->metadata->comment);
1325              if (gst_tag_list_get_string(list, GST_TAG_COMMENT, &str))
1326                ev->metadata->comment = str;
1327              else
1328                ev->metadata->comment = NULL;
1329              break;
1330           }
1331         if (!strcmp(tag, GST_TAG_DATE))
1332           {
1333              gchar *str;
1334              const GValue *date;
1335              if (ev->metadata->year) g_free(ev->metadata->year);
1336              date = gst_tag_list_get_value_index(list, GST_TAG_DATE, 0);
1337              if (date)
1338                str = g_strdup_value_contents(date);
1339              else
1340                str = NULL;
1341              ev->metadata->year = str;
1342              break;
1343           }
1344
1345         if (!strcmp(tag, GST_TAG_TRACK_NUMBER))
1346           {
1347              gchar *str;
1348              const GValue *track;
1349              if (ev->metadata->count) g_free( ev->metadata->count);
1350              track = gst_tag_list_get_value_index(list, GST_TAG_TRACK_NUMBER, 0);
1351              if (track)
1352                str = g_strdup_value_contents(track);
1353              else
1354                str = NULL;
1355              ev->metadata->count = str;
1356              break;
1357           }
1358
1359 #ifdef GST_TAG_CDDA_CDDB_DISCID
1360         if (!strcmp(tag, GST_TAG_CDDA_CDDB_DISCID))
1361           {
1362              gchar *str;
1363              const GValue *discid;
1364              if (ev->metadata->disc_id) g_free(ev->metadata->disc_id);
1365              discid = gst_tag_list_get_value_index(list, GST_TAG_CDDA_CDDB_DISCID, 0);
1366              if (discid)
1367                str = g_strdup_value_contents(discid);
1368              else
1369                str = NULL;
1370              ev->metadata->disc_id = str;
1371              break;
1372           }
1373 #endif
1374      }
1375
1376 }
1377
1378 static void
1379 _free_metadata(Emotion_Gstreamer_Metadata *m)
1380 {
1381   if (!m) return;
1382
1383   if (m->title)   g_free(m->title);
1384   if (m->album)   g_free(m->album);
1385   if (m->artist)  g_free(m->artist);
1386   if (m->genre)   g_free(m->genre);
1387   if (m->comment) g_free(m->comment);
1388   if (m->year)    g_free(m->year);
1389   if (m->count)   g_free(m->count);
1390   if (m->disc_id) g_free(m->disc_id);
1391
1392   free(m);
1393 }
1394
1395 static void
1396 _eos_main_fct(void *data)
1397 {
1398    Emotion_Gstreamer_Message *send;
1399    Emotion_Gstreamer_Video *ev;
1400    GstMessage              *msg;
1401
1402    send = data;
1403    ev = send->ev;
1404    msg = send->msg;
1405
1406    if (ev->play_started)
1407      {
1408         _emotion_playback_started(ev->obj);
1409         ev->play_started = 0;
1410      }
1411
1412    switch (GST_MESSAGE_TYPE(msg))
1413      {
1414       case GST_MESSAGE_ERROR:
1415         {
1416            gchar *debug;
1417            GError *err;
1418
1419            gst_message_parse_error(msg, &err, &debug);
1420            g_free(debug);
1421
1422            ERR("Error: %s", err->message);
1423            g_error_free(err);
1424
1425            break;
1426         }
1427       case GST_MESSAGE_EOS:
1428          ev->play = 0;
1429          _emotion_decode_stop(ev->obj);
1430          _emotion_playback_finished(ev->obj);
1431          break;
1432       case GST_MESSAGE_TAG:
1433         {
1434            GstTagList *new_tags;
1435            gst_message_parse_tag(msg, &new_tags);
1436            if (new_tags)
1437              {
1438                 gst_tag_list_foreach(new_tags,
1439                                      (GstTagForeachFunc)_for_each_tag,
1440                                      ev);
1441                 gst_tag_list_free(new_tags);
1442              }
1443            break;
1444         }
1445       case GST_MESSAGE_ASYNC_DONE:
1446          _emotion_seek_done(ev->obj);
1447          break;
1448       default:
1449          ERR("bus say: %s [%i]",
1450              GST_MESSAGE_SRC_NAME(msg),
1451              GST_MESSAGE_TYPE(msg));
1452          break;
1453      }
1454
1455    emotion_gstreamer_message_free(send);
1456 }
1457
1458 static GstBusSyncReply
1459 _eos_sync_fct(GstBus *bus, GstMessage *msg, gpointer data)
1460 {
1461    Emotion_Gstreamer_Video *ev = data;
1462    Emotion_Gstreamer_Message *send;
1463
1464    switch (GST_MESSAGE_TYPE(msg))
1465      {
1466       case GST_MESSAGE_ERROR:
1467       case GST_MESSAGE_EOS:
1468       case GST_MESSAGE_TAG:
1469       case GST_MESSAGE_ASYNC_DONE:
1470          send = emotion_gstreamer_message_alloc(ev, msg);
1471
1472          if (send) ecore_main_loop_thread_safe_call(_eos_main_fct, send);
1473
1474          break;
1475
1476       default:
1477          WRN("bus say: %s [%i]",
1478              GST_MESSAGE_SRC_NAME(msg),
1479              GST_MESSAGE_TYPE(msg));
1480          break;
1481      }
1482
1483    return GST_BUS_DROP;
1484 }
1485
1486 Eina_Bool
1487 _emotion_gstreamer_video_pipeline_parse(Emotion_Gstreamer_Video *ev,
1488                                         Eina_Bool force)
1489 {
1490    gboolean res;
1491    int i;
1492
1493    if (ev->pipeline_parsed)
1494      return EINA_TRUE;
1495
1496    if (force && ev->thread)
1497      {
1498         ecore_thread_cancel(ev->thread);
1499         ev->thread = NULL;
1500      }
1501
1502    if (ev->thread)
1503      return EINA_FALSE;
1504
1505    res = gst_element_get_state(ev->pipeline, NULL, NULL, GST_CLOCK_TIME_NONE);
1506    if (res == GST_STATE_CHANGE_NO_PREROLL)
1507      gst_element_set_state(ev->pipeline, GST_STATE_PLAYING);
1508
1509    res = gst_element_get_state(ev->pipeline, NULL, NULL, GST_CLOCK_TIME_NONE);
1510    if (!(res == GST_STATE_CHANGE_SUCCESS
1511          || res == GST_STATE_CHANGE_NO_PREROLL))
1512      {
1513         /** NOTE: you need to set: GST_DEBUG_DUMP_DOT_DIR=/tmp EMOTION_ENGINE=gstreamer to save the $EMOTION_GSTREAMER_DOT file in '/tmp' */
1514         /** then call dot -Tpng -oemotion_pipeline.png /tmp/$TIMESTAMP-$EMOTION_GSTREAMER_DOT.dot */
1515         if (getenv("EMOTION_GSTREAMER_DOT"))
1516           GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS(GST_BIN(ev->pipeline),
1517                                             GST_DEBUG_GRAPH_SHOW_ALL,
1518                                             getenv("EMOTION_GSTREAMER_DOT"));
1519
1520         ERR("Unable to get GST_CLOCK_TIME_NONE.");
1521         return EINA_FALSE;
1522      }
1523
1524    g_object_get(G_OBJECT(ev->pipeline),
1525                 "n-audio", &ev->audio_stream_nbr,
1526                 "n-video", &ev->video_stream_nbr,
1527                 NULL);
1528
1529    if ((ev->video_stream_nbr == 0) && (ev->audio_stream_nbr == 0))
1530      {
1531         ERR("No audio nor video stream found");
1532         return EINA_FALSE;
1533      }
1534
1535    /* video stream */
1536    for (i = 0; i < ev->video_stream_nbr; i++)
1537      {
1538         Emotion_Video_Stream *vstream;
1539         GstPad       *pad = NULL;
1540         GstCaps      *caps;
1541         GstStructure *structure;
1542         GstQuery     *query;
1543         const GValue *val;
1544         gchar        *str;
1545
1546         gdouble length_time = 0.0;
1547         gint width;
1548         gint height;
1549         gint fps_num;
1550         gint fps_den;
1551         guint32 fourcc = 0;
1552
1553         g_signal_emit_by_name(ev->pipeline, "get-video-pad", i, &pad);
1554         if (!pad)
1555           continue;
1556
1557         caps = gst_pad_get_negotiated_caps(pad);
1558         if (!caps)
1559           goto unref_pad_v;
1560         structure = gst_caps_get_structure(caps, 0);
1561         str = gst_caps_to_string(caps);
1562
1563         if (!gst_structure_get_int(structure, "width", &width))
1564           goto unref_caps_v;
1565         if (!gst_structure_get_int(structure, "height", &height))
1566           goto unref_caps_v;
1567         if (!gst_structure_get_fraction(structure, "framerate", &fps_num, &fps_den))
1568           goto unref_caps_v;
1569
1570         if (g_str_has_prefix(str, "video/x-raw-yuv"))
1571           {
1572              val = gst_structure_get_value(structure, "format");
1573              fourcc = gst_value_get_fourcc(val);
1574           }
1575         else if (g_str_has_prefix(str, "video/x-raw-rgb"))
1576           fourcc = GST_MAKE_FOURCC('A', 'R', 'G', 'B');
1577         else
1578           goto unref_caps_v;
1579
1580         query = gst_query_new_duration(GST_FORMAT_TIME);
1581         if (gst_pad_peer_query(pad, query))
1582           {
1583              gint64 t;
1584
1585              gst_query_parse_duration(query, NULL, &t);
1586              length_time = (double)t / (double)GST_SECOND;
1587           }
1588         else
1589           goto unref_query_v;
1590
1591         vstream = emotion_video_stream_new(ev);
1592         if (!vstream) goto unref_query_v;
1593
1594         vstream->length_time = length_time;
1595         vstream->width = width;
1596         vstream->height = height;
1597         vstream->fps_num = fps_num;
1598         vstream->fps_den = fps_den;
1599         vstream->fourcc = fourcc;
1600         vstream->index = i;
1601
1602      unref_query_v:
1603         gst_query_unref(query);
1604      unref_caps_v:
1605         gst_caps_unref(caps);
1606      unref_pad_v:
1607         gst_object_unref(pad);
1608      }
1609
1610    /* Audio streams */
1611    for (i = 0; i < ev->audio_stream_nbr; i++)
1612      {
1613         Emotion_Audio_Stream *astream;
1614         GstPad       *pad;
1615         GstCaps      *caps;
1616         GstStructure *structure;
1617         GstQuery     *query;
1618
1619         gdouble length_time = 0.0;
1620         gint channels;
1621         gint samplerate;
1622
1623         g_signal_emit_by_name(ev->pipeline, "get-audio-pad", i, &pad);
1624         if (!pad)
1625           continue;
1626
1627         caps = gst_pad_get_negotiated_caps(pad);
1628         if (!caps)
1629           goto unref_pad_a;
1630         structure = gst_caps_get_structure(caps, 0);
1631
1632         if (!gst_structure_get_int(structure, "channels", &channels))
1633           goto unref_caps_a;
1634         if (!gst_structure_get_int(structure, "rate", &samplerate))
1635           goto unref_caps_a;
1636
1637         query = gst_query_new_duration(GST_FORMAT_TIME);
1638         if (gst_pad_peer_query(pad, query))
1639           {
1640              gint64 t;
1641
1642              gst_query_parse_duration(query, NULL, &t);
1643              length_time = (double)t / (double)GST_SECOND;
1644           }
1645         else
1646           goto unref_query_a;
1647
1648         astream = calloc(1, sizeof(Emotion_Audio_Stream));
1649         if (!astream) continue;
1650         ev->audio_streams = eina_list_append(ev->audio_streams, astream);
1651         if (eina_error_get())
1652           {
1653              free(astream);
1654              continue;
1655           }
1656
1657         astream->length_time = length_time;
1658         astream->channels = channels;
1659         astream->samplerate = samplerate;
1660
1661      unref_query_a:
1662         gst_query_unref(query);
1663      unref_caps_a:
1664         gst_caps_unref(caps);
1665      unref_pad_a:
1666         gst_object_unref(pad);
1667      }
1668
1669    /* Visualization sink */
1670    if (ev->video_stream_nbr == 0)
1671      {
1672         GstElement *vis = NULL;
1673         Emotion_Video_Stream *vstream;
1674         Emotion_Audio_Stream *astream;
1675         gint flags;
1676         const char *vis_name;
1677
1678         if (!(vis_name = emotion_visualization_element_name_get(ev->vis)))
1679           {
1680              WRN("pb vis name %d", ev->vis);
1681              goto finalize;
1682           }
1683
1684         astream = eina_list_data_get(ev->audio_streams);
1685
1686         vis = gst_element_factory_make(vis_name, "vissink");
1687         vstream = emotion_video_stream_new(ev);
1688         if (!vstream)
1689           goto finalize;
1690         else
1691           DBG("could not create visualization stream");
1692
1693         vstream->length_time = astream->length_time;
1694         vstream->width = 320;
1695         vstream->height = 200;
1696         vstream->fps_num = 25;
1697         vstream->fps_den = 1;
1698         vstream->fourcc = GST_MAKE_FOURCC('A', 'R', 'G', 'B');
1699
1700         g_object_set(G_OBJECT(ev->pipeline), "vis-plugin", vis, NULL);
1701         g_object_get(G_OBJECT(ev->pipeline), "flags", &flags, NULL);
1702         flags |= 0x00000008;
1703         g_object_set(G_OBJECT(ev->pipeline), "flags", flags, NULL);
1704      }
1705
1706  finalize:
1707
1708    ev->video_stream_nbr = eina_list_count(ev->video_streams);
1709    ev->audio_stream_nbr = eina_list_count(ev->audio_streams);
1710
1711    if (ev->video_stream_nbr == 1)
1712      {
1713        Emotion_Video_Stream *vstream;
1714
1715        vstream = eina_list_data_get(ev->video_streams);
1716        ev->ratio = (double)vstream->width / (double)vstream->height;
1717        _emotion_frame_resize(ev->obj, vstream->width, vstream->height, ev->ratio);
1718      }
1719
1720    {
1721      /* on recapitule : */
1722      Emotion_Video_Stream *vstream;
1723      Emotion_Audio_Stream *astream;
1724
1725      vstream = eina_list_data_get(ev->video_streams);
1726      if (vstream)
1727        {
1728          DBG("video size=%dx%d, fps=%d/%d, "
1729              "fourcc=%"GST_FOURCC_FORMAT", length=%"GST_TIME_FORMAT,
1730              vstream->width, vstream->height, vstream->fps_num, vstream->fps_den,
1731              GST_FOURCC_ARGS(vstream->fourcc),
1732              GST_TIME_ARGS((guint64)(vstream->length_time * GST_SECOND)));
1733        }
1734
1735      astream = eina_list_data_get(ev->audio_streams);
1736      if (astream)
1737        {
1738          DBG("audio channels=%d, rate=%d, length=%"GST_TIME_FORMAT,
1739              astream->channels, astream->samplerate,
1740              GST_TIME_ARGS((guint64)(astream->length_time * GST_SECOND)));
1741        }
1742    }
1743
1744    if (ev->metadata)
1745      _free_metadata(ev->metadata);
1746    ev->metadata = calloc(1, sizeof(Emotion_Gstreamer_Metadata));
1747
1748    em_audio_channel_volume_set(ev, ev->volume);
1749    em_audio_channel_mute_set(ev, ev->audio_mute);
1750
1751    if (ev->play_started)
1752      {
1753         _emotion_playback_started(ev->obj);
1754         ev->play_started = 0;
1755      }
1756
1757    _emotion_open_done(ev->obj);
1758    ev->pipeline_parsed = EINA_TRUE;
1759
1760    return EINA_TRUE;
1761 }