cf22e3d5467676112f17af49923ed8c3849847fd
[profile/ivi/emotion.git] / src / modules / gstreamer / emotion_gstreamer.c
1 #include <unistd.h>
2 #include <fcntl.h>
3
4 #include <Eina.h>
5
6 #include "emotion_private.h"
7 #include "emotion_gstreamer.h"
8 #include "Emotion.h"
9
10 int _emotion_gstreamer_log_domain = -1;
11
12 /* Callbacks to get the eos */
13 static void _for_each_tag    (GstTagList const* list, gchar const* tag, void *data);
14 static void _free_metadata   (Emotion_Gstreamer_Metadata *m);
15
16 /* Interface */
17
18 static unsigned char  em_init                     (Evas_Object     *obj,
19                                                    void           **emotion_video,
20                                                    Emotion_Module_Options *opt);
21
22 static int            em_shutdown                 (void           *video);
23
24 static unsigned char  em_file_open                (const char     *file,
25                                                    Evas_Object     *obj,
26                                                    void            *video);
27
28 static void           em_file_close               (void            *video);
29
30 static void           em_play                     (void            *video,
31                                                    double           pos);
32
33 static void           em_stop                     (void            *video);
34
35 static void           em_size_get                 (void            *video,
36                                                    int             *width,
37                                                    int             *height);
38
39 static void           em_pos_set                  (void            *video,
40                                                    double           pos);
41
42
43 static double         em_len_get                  (void            *video);
44
45 static int            em_fps_num_get              (void            *video);
46
47 static int            em_fps_den_get              (void            *video);
48
49 static double         em_fps_get                  (void            *video);
50
51 static double         em_pos_get                  (void            *video);
52
53 static void           em_vis_set                  (void            *video,
54                                                    Emotion_Vis      vis);
55
56 static Emotion_Vis    em_vis_get                  (void            *video);
57
58 static Eina_Bool      em_vis_supported            (void            *video,
59                                                    Emotion_Vis      vis);
60
61 static double         em_ratio_get                (void            *video);
62
63 static int            em_video_handled            (void            *video);
64
65 static int            em_audio_handled            (void            *video);
66
67 static int            em_seekable                 (void            *video);
68
69 static void           em_frame_done               (void            *video);
70
71 static Emotion_Format em_format_get               (void            *video);
72
73 static void           em_video_data_size_get      (void            *video,
74                                                    int             *w,
75                                                    int             *h);
76
77 static int            em_yuv_rows_get             (void            *video,
78                                                    int              w,
79                                                    int              h,
80                                                    unsigned char  **yrows,
81                                                    unsigned char  **urows,
82                                                    unsigned char  **vrows);
83
84 static int            em_bgra_data_get            (void            *video,
85                                                    unsigned char  **bgra_data);
86
87 static void           em_event_feed               (void            *video,
88                                                    int              event);
89
90 static void           em_event_mouse_button_feed  (void            *video,
91                                                    int              button,
92                                                    int              x,
93                                                    int              y);
94
95 static void           em_event_mouse_move_feed    (void            *video,
96                                                    int              x,
97                                                    int              y);
98
99 static int            em_video_channel_count      (void             *video);
100
101 static void           em_video_channel_set        (void             *video,
102                                                    int               channel);
103
104 static int            em_video_channel_get        (void             *video);
105
106 static const char    *em_video_channel_name_get   (void             *video,
107                                                    int               channel);
108
109 static void           em_video_channel_mute_set   (void             *video,
110                                                    int               mute);
111
112 static int            em_video_channel_mute_get   (void             *video);
113
114 static int            em_audio_channel_count      (void             *video);
115
116 static void           em_audio_channel_set        (void             *video,
117                                                    int               channel);
118
119 static int            em_audio_channel_get        (void             *video);
120
121 static const char    *em_audio_channel_name_get   (void             *video,
122                                                    int               channel);
123
124 static void           em_audio_channel_mute_set   (void             *video,
125                                                    int               mute);
126
127 static int            em_audio_channel_mute_get   (void             *video);
128
129 static void           em_audio_channel_volume_set (void             *video,
130                                                    double             vol);
131
132 static double         em_audio_channel_volume_get (void             *video);
133
134 static int            em_spu_channel_count        (void             *video);
135
136 static void           em_spu_channel_set          (void             *video,
137                                                    int               channel);
138
139 static int            em_spu_channel_get          (void             *video);
140
141 static const char    *em_spu_channel_name_get     (void             *video,
142                                                    int               channel);
143
144 static void           em_spu_channel_mute_set     (void             *video,
145                                                    int               mute);
146
147 static int            em_spu_channel_mute_get     (void             *video);
148
149 static int            em_chapter_count            (void             *video);
150
151 static void           em_chapter_set              (void             *video,
152                                                    int               chapter);
153
154 static int            em_chapter_get              (void             *video);
155
156 static const char    *em_chapter_name_get         (void             *video,
157                                                    int               chapter);
158
159 static void           em_speed_set                (void             *video,
160                                                    double            speed);
161
162 static double         em_speed_get                (void             *video);
163
164 static int            em_eject                    (void             *video);
165
166 static const char    *em_meta_get                 (void             *video,
167                                                    int               meta);
168
169 static GstBusSyncReply _eos_sync_fct(GstBus *bus,
170                                      GstMessage *message,
171                                      gpointer data);
172
173 /* Module interface */
174
175 static Emotion_Video_Module em_module =
176 {
177    em_init, /* init */
178    em_shutdown, /* shutdown */
179    em_file_open, /* file_open */
180    em_file_close, /* file_close */
181    em_play, /* play */
182    em_stop, /* stop */
183    em_size_get, /* size_get */
184    em_pos_set, /* pos_set */
185    em_len_get, /* len_get */
186    em_fps_num_get, /* fps_num_get */
187    em_fps_den_get, /* fps_den_get */
188    em_fps_get, /* fps_get */
189    em_pos_get, /* pos_get */
190    em_vis_set, /* vis_set */
191    em_vis_get, /* vis_get */
192    em_vis_supported, /* vis_supported */
193    em_ratio_get, /* ratio_get */
194    em_video_handled, /* video_handled */
195    em_audio_handled, /* audio_handled */
196    em_seekable, /* seekable */
197    em_frame_done, /* frame_done */
198    em_format_get, /* format_get */
199    em_video_data_size_get, /* video_data_size_get */
200    em_yuv_rows_get, /* yuv_rows_get */
201    em_bgra_data_get, /* bgra_data_get */
202    em_event_feed, /* event_feed */
203    em_event_mouse_button_feed, /* event_mouse_button_feed */
204    em_event_mouse_move_feed, /* event_mouse_move_feed */
205    em_video_channel_count, /* video_channel_count */
206    em_video_channel_set, /* video_channel_set */
207    em_video_channel_get, /* video_channel_get */
208    em_video_channel_name_get, /* video_channel_name_get */
209    em_video_channel_mute_set, /* video_channel_mute_set */
210    em_video_channel_mute_get, /* video_channel_mute_get */
211    em_audio_channel_count, /* audio_channel_count */
212    em_audio_channel_set, /* audio_channel_set */
213    em_audio_channel_get, /* audio_channel_get */
214    em_audio_channel_name_get, /* audio_channel_name_get */
215    em_audio_channel_mute_set, /* audio_channel_mute_set */
216    em_audio_channel_mute_get, /* audio_channel_mute_get */
217    em_audio_channel_volume_set, /* audio_channel_volume_set */
218    em_audio_channel_volume_get, /* audio_channel_volume_get */
219    em_spu_channel_count, /* spu_channel_count */
220    em_spu_channel_set, /* spu_channel_set */
221    em_spu_channel_get, /* spu_channel_get */
222    em_spu_channel_name_get, /* spu_channel_name_get */
223    em_spu_channel_mute_set, /* spu_channel_mute_set */
224    em_spu_channel_mute_get, /* spu_channel_mute_get */
225    em_chapter_count, /* chapter_count */
226    em_chapter_set, /* chapter_set */
227    em_chapter_get, /* chapter_get */
228    em_chapter_name_get, /* chapter_name_get */
229    em_speed_set, /* speed_set */
230    em_speed_get, /* speed_get */
231    em_eject, /* eject */
232    em_meta_get, /* meta_get */
233    NULL /* handle */
234 };
235
236 static Emotion_Video_Stream *
237 emotion_video_stream_new(Emotion_Gstreamer_Video *ev)
238 {
239    Emotion_Video_Stream *vstream;
240
241    if (!ev) return NULL;
242
243    vstream = (Emotion_Video_Stream *)calloc(1, sizeof(Emotion_Video_Stream));
244    if (!vstream) return NULL;
245
246    ev->video_streams = eina_list_append(ev->video_streams, vstream);
247    if (eina_error_get())
248      {
249         free(vstream);
250         return NULL;
251      }
252    return vstream;
253 }
254
255 static void
256 emotion_video_stream_free(Emotion_Gstreamer_Video *ev, Emotion_Video_Stream *vstream)
257 {
258    if (!ev || !vstream) return;
259
260    ev->video_streams = eina_list_remove(ev->video_streams, vstream);
261         free(vstream);
262 }
263
264 static const char *
265 emotion_visualization_element_name_get(Emotion_Vis visualisation)
266 {
267    switch (visualisation)
268      {
269       case EMOTION_VIS_NONE:
270          return NULL;
271       case EMOTION_VIS_GOOM:
272          return "goom";
273       case EMOTION_VIS_LIBVISUAL_BUMPSCOPE:
274          return "libvisual_bumpscope";
275       case EMOTION_VIS_LIBVISUAL_CORONA:
276          return "libvisual_corona";
277       case EMOTION_VIS_LIBVISUAL_DANCING_PARTICLES:
278          return "libvisual_dancingparticles";
279       case EMOTION_VIS_LIBVISUAL_GDKPIXBUF:
280          return "libvisual_gdkpixbuf";
281       case EMOTION_VIS_LIBVISUAL_G_FORCE:
282          return "libvisual_G-Force";
283       case EMOTION_VIS_LIBVISUAL_GOOM:
284          return "libvisual_goom";
285       case EMOTION_VIS_LIBVISUAL_INFINITE:
286          return "libvisual_infinite";
287       case EMOTION_VIS_LIBVISUAL_JAKDAW:
288          return "libvisual_jakdaw";
289       case EMOTION_VIS_LIBVISUAL_JESS:
290          return "libvisual_jess";
291       case EMOTION_VIS_LIBVISUAL_LV_ANALYSER:
292          return "libvisual_lv_analyzer";
293       case EMOTION_VIS_LIBVISUAL_LV_FLOWER:
294          return "libvisual_lv_flower";
295       case EMOTION_VIS_LIBVISUAL_LV_GLTEST:
296          return "libvisual_lv_gltest";
297       case EMOTION_VIS_LIBVISUAL_LV_SCOPE:
298          return "libvisual_lv_scope";
299       case EMOTION_VIS_LIBVISUAL_MADSPIN:
300          return "libvisual_madspin";
301       case EMOTION_VIS_LIBVISUAL_NEBULUS:
302          return "libvisual_nebulus";
303       case EMOTION_VIS_LIBVISUAL_OINKSIE:
304          return "libvisual_oinksie";
305       case EMOTION_VIS_LIBVISUAL_PLASMA:
306          return "libvisual_plazma";
307       default:
308          return "goom";
309      }
310 }
311
312 static unsigned char
313 em_init(Evas_Object            *obj,
314         void                  **emotion_video,
315         Emotion_Module_Options *opt __UNUSED__)
316 {
317    Emotion_Gstreamer_Video *ev;
318    GError                  *error;
319
320    if (!emotion_video)
321      return 0;
322
323    ev = calloc(1, sizeof(Emotion_Gstreamer_Video));
324    if (!ev) return 0;
325
326    ev->obj = obj;
327
328    /* Initialization of gstreamer */
329    if (!gst_init_check(NULL, NULL, &error))
330      goto failure;
331
332    /* Default values */
333    ev->ratio = 1.0;
334    ev->vis = EMOTION_VIS_NONE;
335    ev->volume = 0.8;
336
337    *emotion_video = ev;
338
339    return 1;
340
341 failure:
342    free(ev);
343
344    return 0;
345 }
346
347 static int
348 em_shutdown(void *video)
349 {
350    Emotion_Gstreamer_Video *ev;
351    Emotion_Audio_Stream *astream;
352    Emotion_Video_Stream *vstream;
353
354    ev = (Emotion_Gstreamer_Video *)video;
355    if (!ev)
356      return 0;
357
358    if (ev->thread)
359      {
360         ecore_thread_cancel(ev->thread);
361         ev->thread = NULL;
362      }
363
364    if (ev->pipeline)
365      {
366        gst_element_set_state(ev->pipeline, GST_STATE_NULL);
367        gst_object_unref(ev->pipeline);
368        ev->pipeline = NULL;
369      }
370
371    EINA_LIST_FREE(ev->audio_streams, astream)
372      free(astream);
373    EINA_LIST_FREE(ev->video_streams, vstream)
374      free(vstream);
375
376    free(ev);
377
378    return 1;
379 }
380
381
382 static unsigned char
383 em_file_open(const char   *file,
384              Evas_Object  *obj,
385              void         *video)
386 {
387    Emotion_Gstreamer_Video *ev;
388    Eina_Strbuf *sbuf = NULL;
389    const char *uri;
390
391    ev = (Emotion_Gstreamer_Video *)video;
392
393    if (!file) return EINA_FALSE;
394    if (strstr(file, "://") == NULL)
395      {
396         sbuf = eina_strbuf_new();
397         eina_strbuf_append(sbuf, "file://");
398         if (strncmp(file, "./", 2) == 0)
399           file += 2;
400         if (strstr(file, ":/") != NULL)
401           { /* We absolutely need file:///C:/ under Windows, so adding it here */
402              eina_strbuf_append(sbuf, "/");
403           }
404         else if (*file != '/')
405           {
406              char tmp[PATH_MAX];
407
408              if (getcwd(tmp, PATH_MAX))
409                {
410                   eina_strbuf_append(sbuf, tmp);
411                   eina_strbuf_append(sbuf, "/");
412                }
413           }
414         eina_strbuf_append(sbuf, file);
415      }
416
417    uri = sbuf ? eina_strbuf_string_get(sbuf) : file;
418    DBG("setting file to '%s'", uri);
419    ev->pipeline = gstreamer_video_sink_new(ev, obj, uri);
420    if (sbuf) eina_strbuf_free(sbuf);
421
422    if (!ev->pipeline)
423      return EINA_FALSE;
424
425    ev->eos_bus = gst_pipeline_get_bus(GST_PIPELINE(ev->pipeline));
426    if (!ev->eos_bus)
427      {
428         ERR("could not get the bus");
429         return EINA_FALSE;
430      }
431
432    gst_bus_set_sync_handler(ev->eos_bus, _eos_sync_fct, ev);
433
434    /* Evas Object */
435    ev->obj = obj;
436
437    ev->position = 0.0;
438
439    return 1;
440 }
441
442 static void
443 em_file_close(void *video)
444 {
445    Emotion_Gstreamer_Video *ev;
446    Emotion_Audio_Stream *astream;
447    Emotion_Video_Stream *vstream;
448
449    ev = (Emotion_Gstreamer_Video *)video;
450    if (!ev)
451      return;
452
453    if (ev->eos_bus)
454      {
455         gst_object_unref(GST_OBJECT(ev->eos_bus));
456         ev->eos_bus = NULL;
457      }
458
459    if (ev->thread)
460      {
461         ecore_thread_cancel(ev->thread);
462         ev->thread = NULL;
463      }
464
465    if (ev->pipeline)
466      {
467         gst_element_set_state(ev->pipeline, GST_STATE_NULL);
468         gst_object_unref(ev->pipeline);
469         ev->pipeline = NULL;
470      }
471
472    /* we clear the stream lists */
473    EINA_LIST_FREE(ev->audio_streams, astream)
474      free(astream);
475    EINA_LIST_FREE(ev->video_streams, vstream)
476      free(vstream);
477    ev->pipeline_parsed = EINA_FALSE;
478
479    /* shutdown eos */
480    if (ev->metadata)
481      {
482         _free_metadata(ev->metadata);
483         ev->metadata = NULL;
484      }
485 }
486
487 static void
488 em_play(void   *video,
489         double  pos __UNUSED__)
490 {
491    Emotion_Gstreamer_Video *ev;
492
493    ev = (Emotion_Gstreamer_Video *)video;
494    if (!ev->pipeline) return ;
495
496    gst_element_set_state(ev->pipeline, GST_STATE_PLAYING);
497    ev->play = 1;
498    ev->play_started = 1;
499 }
500
501 static void
502 em_stop(void *video)
503 {
504    Emotion_Gstreamer_Video *ev;
505
506    ev = (Emotion_Gstreamer_Video *)video;
507
508    if (!ev->pipeline) return ;
509  
510    gst_element_set_state(ev->pipeline, GST_STATE_PAUSED);
511    ev->play = 0;
512 }
513
514 static void
515 em_size_get(void  *video,
516             int   *width,
517             int   *height)
518 {
519    Emotion_Gstreamer_Video *ev;
520    Emotion_Video_Stream      *vstream;
521
522    ev = (Emotion_Gstreamer_Video *)video;
523
524    if (!_emotion_gstreamer_video_pipeline_parse(ev, EINA_FALSE))
525      goto on_error;
526
527    vstream = eina_list_nth(ev->video_streams, ev->video_stream_nbr - 1);
528    if (vstream)
529      {
530         if (width) *width = vstream->width;
531         if (height) *height = vstream->height;
532
533         return ;
534      }
535
536  on_error:
537    if (width) *width = 0;
538    if (height) *height = 0;
539 }
540
541 static void
542 em_pos_set(void   *video,
543            double  pos)
544 {
545    Emotion_Gstreamer_Video *ev;
546    gboolean res;
547
548    ev = (Emotion_Gstreamer_Video *)video;
549
550    if (!ev->pipeline) return ;
551
552    if (ev->play)
553      res = gst_element_set_state(ev->pipeline, GST_STATE_PAUSED);
554
555    res = gst_element_seek(ev->pipeline, 1.0,
556                           GST_FORMAT_TIME,
557                           GST_SEEK_FLAG_ACCURATE | GST_SEEK_FLAG_FLUSH,
558                           GST_SEEK_TYPE_SET,
559                           (gint64)(pos * (double)GST_SECOND),
560                           GST_SEEK_TYPE_NONE, -1);
561
562    if (ev->play)
563      res = gst_element_set_state(ev->pipeline, GST_STATE_PLAYING);
564 }
565
566 static double
567 em_len_get(void *video)
568 {
569    Emotion_Gstreamer_Video *ev;
570    Emotion_Video_Stream *vstream;
571    Emotion_Audio_Stream *astream;
572    Eina_List *l;
573    GstFormat fmt;
574    gint64 val;
575    gboolean ret;
576
577    ev = video;
578    fmt = GST_FORMAT_TIME;
579
580    if (!ev->pipeline) return 0.0;
581
582    ret = gst_element_query_duration(ev->pipeline, &fmt, &val);
583    if (!ret)
584      goto fallback;
585
586    if (fmt != GST_FORMAT_TIME)
587      {
588         DBG("requrested duration in time, but got %s instead.",
589             gst_format_get_name(fmt));
590         goto fallback;
591      }
592
593    if (val <= 0.0)
594      goto fallback;
595
596    return val / 1000000000.0;
597
598  fallback:
599    if (!_emotion_gstreamer_video_pipeline_parse(ev, EINA_FALSE))
600      return 0.0;
601
602    EINA_LIST_FOREACH(ev->audio_streams, l, astream)
603      if (astream->length_time >= 0)
604        return astream->length_time;
605
606    EINA_LIST_FOREACH(ev->video_streams, l, vstream)
607      if (vstream->length_time >= 0)
608        return vstream->length_time;
609
610    return 0.0;
611 }
612
613 static int
614 em_fps_num_get(void *video)
615 {
616    Emotion_Gstreamer_Video *ev;
617    Emotion_Video_Stream      *vstream;
618
619    ev = (Emotion_Gstreamer_Video *)video;
620
621    if (!_emotion_gstreamer_video_pipeline_parse(ev, EINA_FALSE))
622      return 0;
623
624    vstream = eina_list_nth(ev->video_streams, ev->video_stream_nbr - 1);
625    if (vstream)
626      return vstream->fps_num;
627
628    return 0;
629 }
630
631 static int
632 em_fps_den_get(void *video)
633 {
634    Emotion_Gstreamer_Video *ev;
635    Emotion_Video_Stream      *vstream;
636
637    ev = (Emotion_Gstreamer_Video *)video;
638
639    if (!_emotion_gstreamer_video_pipeline_parse(ev, EINA_FALSE))
640      return 1;
641
642    vstream = eina_list_nth(ev->video_streams, ev->video_stream_nbr - 1);
643    if (vstream)
644      return vstream->fps_den;
645
646    return 1;
647 }
648
649 static double
650 em_fps_get(void *video)
651 {
652    Emotion_Gstreamer_Video *ev;
653    Emotion_Video_Stream      *vstream;
654
655    ev = (Emotion_Gstreamer_Video *)video;
656
657    if (!_emotion_gstreamer_video_pipeline_parse(ev, EINA_FALSE))
658      return 0.0;
659
660    vstream = eina_list_nth(ev->video_streams, ev->video_stream_nbr - 1);
661    if (vstream)
662      return (double)vstream->fps_num / (double)vstream->fps_den;
663
664    return 0.0;
665 }
666
667 static double
668 em_pos_get(void *video)
669 {
670    Emotion_Gstreamer_Video *ev;
671    GstFormat fmt;
672    gint64 val;
673    gboolean ret;
674
675    ev = video;
676    fmt = GST_FORMAT_TIME;
677
678    if (!ev->pipeline) return 0.0;
679
680    ret = gst_element_query_position(ev->pipeline, &fmt, &val);
681    if (!ret)
682      return ev->position;
683
684    if (fmt != GST_FORMAT_TIME)
685      {
686         ERR("requrested position in time, but got %s instead.",
687             gst_format_get_name(fmt));
688         return ev->position;
689      }
690
691    ev->position = val / 1000000000.0;
692    return ev->position;
693 }
694
695 static void
696 em_vis_set(void *video,
697            Emotion_Vis vis)
698 {
699    Emotion_Gstreamer_Video *ev;
700
701    ev = (Emotion_Gstreamer_Video *)video;
702
703    ev->vis = vis;
704 }
705
706 static Emotion_Vis
707 em_vis_get(void *video)
708 {
709    Emotion_Gstreamer_Video *ev;
710
711    ev = (Emotion_Gstreamer_Video *)video;
712
713    return ev->vis;
714 }
715
716 static Eina_Bool
717 em_vis_supported(void *ef __UNUSED__, Emotion_Vis vis)
718 {
719    const char *name;
720    GstElementFactory *factory;
721
722    if (vis == EMOTION_VIS_NONE)
723      return EINA_TRUE;
724
725    name = emotion_visualization_element_name_get(vis);
726    if (!name)
727      return EINA_FALSE;
728
729    factory = gst_element_factory_find(name);
730    if (!factory)
731      return EINA_FALSE;
732
733    gst_object_unref(factory);
734    return EINA_TRUE;
735 }
736
737 static double
738 em_ratio_get(void *video)
739 {
740    Emotion_Gstreamer_Video *ev;
741
742    ev = (Emotion_Gstreamer_Video *)video;
743
744    return ev->ratio;
745 }
746
747 static int
748 em_video_handled(void *video)
749 {
750    Emotion_Gstreamer_Video *ev;
751
752    ev = (Emotion_Gstreamer_Video *)video;
753
754    _emotion_gstreamer_video_pipeline_parse(ev, EINA_FALSE);
755
756    if (!eina_list_count(ev->video_streams))
757      return 0;
758
759    return 1;
760 }
761
762 static int
763 em_audio_handled(void *video)
764 {
765    Emotion_Gstreamer_Video *ev;
766
767    ev = (Emotion_Gstreamer_Video *)video;
768
769    _emotion_gstreamer_video_pipeline_parse(ev, EINA_FALSE);
770
771    if (!eina_list_count(ev->audio_streams))
772      return 0;
773
774    return 1;
775 }
776
777 static int
778 em_seekable(void *video __UNUSED__)
779 {
780    return 1;
781 }
782
783 static void
784 em_frame_done(void *video __UNUSED__)
785 {
786 }
787
788 static Emotion_Format
789 em_format_get(void *video)
790 {
791    Emotion_Gstreamer_Video *ev;
792    Emotion_Video_Stream    *vstream;
793
794    ev = (Emotion_Gstreamer_Video *)video;
795
796    if (!_emotion_gstreamer_video_pipeline_parse(ev, EINA_FALSE))
797      return EMOTION_FORMAT_NONE;
798
799    vstream = eina_list_nth(ev->video_streams, ev->video_stream_nbr - 1);
800    if (vstream)
801      {
802         switch (vstream->fourcc)
803           {
804            case GST_MAKE_FOURCC('I', '4', '2', '0'):
805               return EMOTION_FORMAT_I420;
806            case GST_MAKE_FOURCC('Y', 'V', '1', '2'):
807               return EMOTION_FORMAT_YV12;
808            case GST_MAKE_FOURCC('Y', 'U', 'Y', '2'):
809               return EMOTION_FORMAT_YUY2;
810            case GST_MAKE_FOURCC('A', 'R', 'G', 'B'):
811               return EMOTION_FORMAT_BGRA;
812            default:
813               return EMOTION_FORMAT_NONE;
814           }
815      }
816    return EMOTION_FORMAT_NONE;
817 }
818
819 static void
820 em_video_data_size_get(void *video, int *w, int *h)
821 {
822    Emotion_Gstreamer_Video *ev;
823    Emotion_Video_Stream    *vstream;
824
825    ev = (Emotion_Gstreamer_Video *)video;
826
827    if (!_emotion_gstreamer_video_pipeline_parse(ev, EINA_FALSE))
828      goto on_error;
829
830    vstream = eina_list_nth(ev->video_streams, ev->video_stream_nbr - 1);
831    if (vstream)
832      {
833         *w = vstream->width;
834         *h = vstream->height;
835
836         return ;
837      }
838
839  on_error:
840    *w = 0;
841    *h = 0;
842 }
843
844 static int
845 em_yuv_rows_get(void           *video __UNUSED__,
846                 int             w __UNUSED__,
847                 int             h __UNUSED__,
848                 unsigned char **yrows __UNUSED__,
849                 unsigned char **urows __UNUSED__,
850                 unsigned char **vrows __UNUSED__)
851 {
852    return 0;
853 }
854
855 static int
856 em_bgra_data_get(void *video __UNUSED__, unsigned char **bgra_data __UNUSED__)
857 {
858    return 0;
859 }
860
861 static void
862 em_event_feed(void *video __UNUSED__, int event __UNUSED__)
863 {
864 }
865
866 static void
867 em_event_mouse_button_feed(void *video __UNUSED__, int button __UNUSED__, int x __UNUSED__, int y __UNUSED__)
868 {
869 }
870
871 static void
872 em_event_mouse_move_feed(void *video __UNUSED__, int x __UNUSED__, int y __UNUSED__)
873 {
874 }
875
876 /* Video channels */
877 static int
878 em_video_channel_count(void *video)
879 {
880    Emotion_Gstreamer_Video *ev;
881
882    ev = (Emotion_Gstreamer_Video *)video;
883
884    _emotion_gstreamer_video_pipeline_parse(ev, EINA_FALSE);
885
886    return eina_list_count(ev->video_streams);
887 }
888
889 static void
890 em_video_channel_set(void *video __UNUSED__,
891                      int   channel __UNUSED__)
892 {
893 #if 0
894    Emotion_Gstreamer_Video *ev;
895
896    ev = (Emotion_Gstreamer_Video *)video;
897
898    if (channel < 0) channel = 0;
899 #endif
900    /* FIXME: a faire... */
901 }
902
903 static int
904 em_video_channel_get(void *video)
905 {
906    Emotion_Gstreamer_Video *ev;
907
908    ev = (Emotion_Gstreamer_Video *)video;
909
910    _emotion_gstreamer_video_pipeline_parse(ev, EINA_FALSE);
911
912    return ev->video_stream_nbr;
913 }
914
915 static const char *
916 em_video_channel_name_get(void *video __UNUSED__,
917                           int   channel __UNUSED__)
918 {
919    return NULL;
920 }
921
922 static void
923 em_video_channel_mute_set(void *video,
924                           int   mute)
925 {
926    Emotion_Gstreamer_Video *ev;
927
928    ev = (Emotion_Gstreamer_Video *)video;
929
930    ev->video_mute = mute;
931 }
932
933 static int
934 em_video_channel_mute_get(void *video)
935 {
936    Emotion_Gstreamer_Video *ev;
937
938    ev = (Emotion_Gstreamer_Video *)video;
939
940    return ev->video_mute;
941 }
942
943 /* Audio channels */
944
945 static int
946 em_audio_channel_count(void *video)
947 {
948    Emotion_Gstreamer_Video *ev;
949
950    ev = (Emotion_Gstreamer_Video *)video;
951
952    _emotion_gstreamer_video_pipeline_parse(ev, EINA_FALSE);
953
954    return eina_list_count(ev->audio_streams);
955 }
956
957 static void
958 em_audio_channel_set(void *video __UNUSED__,
959                      int   channel __UNUSED__)
960 {
961 #if 0
962    Emotion_Gstreamer_Video *ev;
963
964    ev = (Emotion_Gstreamer_Video *)video;
965
966    if (channel < -1) channel = -1;
967 #endif
968    /* FIXME: a faire... */
969 }
970
971 static int
972 em_audio_channel_get(void *video)
973 {
974    Emotion_Gstreamer_Video *ev;
975
976    ev = (Emotion_Gstreamer_Video *)video;
977
978    _emotion_gstreamer_video_pipeline_parse(ev, EINA_FALSE);
979
980    return ev->audio_stream_nbr;
981 }
982
983 static const char *
984 em_audio_channel_name_get(void *video __UNUSED__,
985                           int   channel __UNUSED__)
986 {
987    return NULL;
988 }
989
990 #define GST_PLAY_FLAG_AUDIO (1 << 1)
991
992 static void
993 em_audio_channel_mute_set(void *video,
994                           int   mute)
995 {
996    Emotion_Gstreamer_Video *ev;
997    int flags;
998
999    ev = (Emotion_Gstreamer_Video *)video;
1000
1001    if (!ev->pipeline) return ;
1002
1003    ev->audio_mute = mute;
1004
1005    g_object_set(G_OBJECT(ev->pipeline), "mute", !!mute, NULL);
1006    /* This code should stop the decoding of only the audio stream, but everything stop :"( */
1007    /* g_object_get(G_OBJECT(ev->pipeline), "flags", &flags, NULL); */
1008    /* if (mute) */
1009    /*   flags &= ~GST_PLAY_FLAG_AUDIO; */
1010    /* else */
1011    /*   flags |= GST_PLAY_FLAG_AUDIO; */
1012    /* g_object_set(G_OBJECT(ev->pipeline), "flags", flags, NULL); */
1013    /* g_object_get(G_OBJECT(ev->pipeline), "flags", &flags, NULL); */
1014    /* fprintf(stderr, "flags-n: %x\n", flags); */
1015 }
1016
1017 static int
1018 em_audio_channel_mute_get(void *video)
1019 {
1020    Emotion_Gstreamer_Video *ev;
1021
1022    ev = (Emotion_Gstreamer_Video *)video;
1023
1024    return ev->audio_mute;
1025 }
1026
1027 static void
1028 em_audio_channel_volume_set(void  *video,
1029                             double vol)
1030 {
1031    Emotion_Gstreamer_Video *ev;
1032
1033    ev = (Emotion_Gstreamer_Video *)video;
1034
1035    if (!ev->pipeline) return ;
1036
1037    if (vol < 0.0)
1038      vol = 0.0;
1039    if (vol > 1.0)
1040      vol = 1.0;
1041    ev->volume = vol;
1042    g_object_set(G_OBJECT(ev->pipeline), "volume", vol, NULL);
1043 }
1044
1045 static double
1046 em_audio_channel_volume_get(void *video)
1047 {
1048    Emotion_Gstreamer_Video *ev;
1049
1050    ev = (Emotion_Gstreamer_Video *)video;
1051
1052    return ev->volume;
1053 }
1054
1055 /* spu stuff */
1056
1057 static int
1058 em_spu_channel_count(void *video __UNUSED__)
1059 {
1060    return 0;
1061 }
1062
1063 static void
1064 em_spu_channel_set(void *video __UNUSED__, int channel __UNUSED__)
1065 {
1066 }
1067
1068 static int
1069 em_spu_channel_get(void *video __UNUSED__)
1070 {
1071    return 1;
1072 }
1073
1074 static const char *
1075 em_spu_channel_name_get(void *video __UNUSED__, int channel __UNUSED__)
1076 {
1077    return NULL;
1078 }
1079
1080 static void
1081 em_spu_channel_mute_set(void *video __UNUSED__, int mute __UNUSED__)
1082 {
1083 }
1084
1085 static int
1086 em_spu_channel_mute_get(void *video __UNUSED__)
1087 {
1088    return 0;
1089 }
1090
1091 static int
1092 em_chapter_count(void *video __UNUSED__)
1093 {
1094    return 0;
1095 }
1096
1097 static void
1098 em_chapter_set(void *video __UNUSED__, int chapter __UNUSED__)
1099 {
1100 }
1101
1102 static int
1103 em_chapter_get(void *video __UNUSED__)
1104 {
1105    return 0;
1106 }
1107
1108 static const char *
1109 em_chapter_name_get(void *video __UNUSED__, int chapter __UNUSED__)
1110 {
1111    return NULL;
1112 }
1113
1114 static void
1115 em_speed_set(void *video __UNUSED__, double speed __UNUSED__)
1116 {
1117 }
1118
1119 static double
1120 em_speed_get(void *video __UNUSED__)
1121 {
1122    return 1.0;
1123 }
1124
1125 static int
1126 em_eject(void *video __UNUSED__)
1127 {
1128    return 1;
1129 }
1130
1131 static const char *
1132 em_meta_get(void *video, int meta)
1133 {
1134    Emotion_Gstreamer_Video *ev;
1135    const char *str = NULL;
1136
1137    ev = (Emotion_Gstreamer_Video *)video;
1138
1139    if (!ev || !ev->metadata) return NULL;
1140    switch (meta)
1141      {
1142       case META_TRACK_TITLE:
1143          str = ev->metadata->title;
1144          break;
1145       case META_TRACK_ARTIST:
1146          str = ev->metadata->artist;
1147          break;
1148       case  META_TRACK_ALBUM:
1149          str = ev->metadata->album;
1150          break;
1151       case META_TRACK_YEAR:
1152          str = ev->metadata->year;
1153          break;
1154       case META_TRACK_GENRE:
1155          str = ev->metadata->genre;
1156          break;
1157       case META_TRACK_COMMENT:
1158          str = ev->metadata->comment;
1159          break;
1160       case META_TRACK_DISCID:
1161          str = ev->metadata->disc_id;
1162          break;
1163       default:
1164          break;
1165      }
1166
1167    return str;
1168 }
1169
1170 static Eina_Bool
1171 module_open(Evas_Object           *obj,
1172             const Emotion_Video_Module **module,
1173             void                 **video,
1174             Emotion_Module_Options *opt)
1175 {
1176    if (!module)
1177      return EINA_FALSE;
1178
1179    if (_emotion_gstreamer_log_domain < 0)
1180      {
1181         eina_threads_init();
1182         eina_log_threads_enable();
1183         _emotion_gstreamer_log_domain = eina_log_domain_register
1184           ("emotion-gstreamer", EINA_COLOR_LIGHTCYAN);
1185         if (_emotion_gstreamer_log_domain < 0)
1186           {
1187              EINA_LOG_CRIT("Could not register log domain 'emotion-gstreamer'");
1188              return EINA_FALSE;
1189           }
1190      }
1191
1192    if (!em_module.init(obj, video, opt))
1193      return EINA_FALSE;
1194
1195    eina_threads_init();
1196
1197    *module = &em_module;
1198    return EINA_TRUE;
1199 }
1200
1201 static void
1202 module_close(Emotion_Video_Module *module __UNUSED__,
1203              void                 *video)
1204 {
1205    em_module.shutdown(video);
1206
1207    eina_threads_shutdown();
1208 }
1209
1210 Eina_Bool
1211 gstreamer_module_init(void)
1212 {
1213    GError *error;
1214
1215    if (!gst_init_check(0, NULL, &error))
1216      {
1217         EINA_LOG_CRIT("Could not init GStreamer");
1218         return EINA_FALSE;
1219      }
1220
1221    if (gst_plugin_register_static(GST_VERSION_MAJOR, GST_VERSION_MINOR,
1222                                   "emotion-sink",
1223                                   "video sink plugin for Emotion",
1224                                   gstreamer_plugin_init,
1225                                   VERSION,
1226                                   "LGPL",
1227                                   "Enlightenment",
1228                                   PACKAGE,
1229                                   "http://www.enlightenment.org/") == FALSE)
1230      {
1231         EINA_LOG_CRIT("Could not load static gstreamer video sink for Emotion.");
1232         return EINA_FALSE;
1233      }
1234
1235    return _emotion_module_register("gstreamer", module_open, module_close);
1236 }
1237
1238 void
1239 gstreamer_module_shutdown(void)
1240 {
1241    _emotion_module_unregister("gstreamer");
1242
1243    gst_deinit();
1244 }
1245
1246 #ifndef EMOTION_STATIC_BUILD_GSTREAMER
1247
1248 EINA_MODULE_INIT(gstreamer_module_init);
1249 EINA_MODULE_SHUTDOWN(gstreamer_module_shutdown);
1250
1251 #endif
1252
1253 static void
1254 _for_each_tag(GstTagList const* list,
1255                     gchar const* tag,
1256                     void *data)
1257 {
1258    Emotion_Gstreamer_Video *ev;
1259    int i;
1260    int count;
1261
1262
1263    ev = (Emotion_Gstreamer_Video*)data;
1264
1265    if (!ev || !ev->metadata) return;
1266
1267    count = gst_tag_list_get_tag_size(list, tag);
1268
1269    for (i = 0; i < count; i++)
1270      {
1271         if (!strcmp(tag, GST_TAG_TITLE))
1272           {
1273              char *str;
1274              if (ev->metadata->title) g_free(ev->metadata->title);
1275              if (gst_tag_list_get_string(list, GST_TAG_TITLE, &str))
1276                ev->metadata->title = str;
1277              else
1278                ev->metadata->title = NULL;
1279              break;
1280           }
1281         if (!strcmp(tag, GST_TAG_ALBUM))
1282           {
1283              gchar *str;
1284              if (ev->metadata->album) g_free(ev->metadata->album);
1285              if (gst_tag_list_get_string(list, GST_TAG_ALBUM, &str))
1286                ev->metadata->album = str;
1287              else
1288                ev->metadata->album = NULL;
1289              break;
1290           }
1291         if (!strcmp(tag, GST_TAG_ARTIST))
1292           {
1293              gchar *str;
1294              if (ev->metadata->artist) g_free( ev->metadata->artist);
1295              if (gst_tag_list_get_string(list, GST_TAG_ARTIST, &str))
1296                ev->metadata->artist = str;
1297              else
1298                ev->metadata->artist = NULL;
1299              break;
1300           }
1301         if (!strcmp(tag, GST_TAG_GENRE))
1302           {
1303              gchar *str;
1304              if (ev->metadata->genre) g_free( ev->metadata->genre);
1305              if (gst_tag_list_get_string(list, GST_TAG_GENRE, &str))
1306                ev->metadata->genre = str;
1307              else
1308                ev->metadata->genre = NULL;
1309              break;
1310           }
1311         if (!strcmp(tag, GST_TAG_COMMENT))
1312           {
1313              gchar *str;
1314              if (ev->metadata->comment) g_free(ev->metadata->comment);
1315              if (gst_tag_list_get_string(list, GST_TAG_COMMENT, &str))
1316                ev->metadata->comment = str;
1317              else
1318                ev->metadata->comment = NULL;
1319              break;
1320           }
1321         if (!strcmp(tag, GST_TAG_DATE))
1322           {
1323              gchar *str;
1324              const GValue *date;
1325              if (ev->metadata->year) g_free(ev->metadata->year);
1326              date = gst_tag_list_get_value_index(list, GST_TAG_DATE, 0);
1327              if (date)
1328                str = g_strdup_value_contents(date);
1329              else
1330                str = NULL;
1331              ev->metadata->year = str;
1332              break;
1333           }
1334
1335         if (!strcmp(tag, GST_TAG_TRACK_NUMBER))
1336           {
1337              gchar *str;
1338              const GValue *track;
1339              if (ev->metadata->count) g_free( ev->metadata->count);
1340              track = gst_tag_list_get_value_index(list, GST_TAG_TRACK_NUMBER, 0);
1341              if (track)
1342                str = g_strdup_value_contents(track);
1343              else
1344                str = NULL;
1345              ev->metadata->count = str;
1346              break;
1347           }
1348
1349 #ifdef GST_TAG_CDDA_CDDB_DISCID
1350         if (!strcmp(tag, GST_TAG_CDDA_CDDB_DISCID))
1351           {
1352              gchar *str;
1353              const GValue *discid;
1354              if (ev->metadata->disc_id) g_free(ev->metadata->disc_id);
1355              discid = gst_tag_list_get_value_index(list, GST_TAG_CDDA_CDDB_DISCID, 0);
1356              if (discid)
1357                str = g_strdup_value_contents(discid);
1358              else
1359                str = NULL;
1360              ev->metadata->disc_id = str;
1361              break;
1362           }
1363 #endif
1364      }
1365
1366 }
1367
1368 static void
1369 _free_metadata(Emotion_Gstreamer_Metadata *m)
1370 {
1371   if (!m) return;
1372
1373   if (m->title)   g_free(m->title);
1374   if (m->album)   g_free(m->album);
1375   if (m->artist)  g_free(m->artist);
1376   if (m->genre)   g_free(m->genre);
1377   if (m->comment) g_free(m->comment);
1378   if (m->year)    g_free(m->year);
1379   if (m->count)   g_free(m->count);
1380   if (m->disc_id) g_free(m->disc_id);
1381
1382   free(m);
1383 }
1384
1385 static void
1386 _eos_main_fct(void *data)
1387 {
1388    Emotion_Gstreamer_Message *send;
1389    Emotion_Gstreamer_Video *ev;
1390    GstMessage              *msg;
1391
1392    send = data;
1393    ev = send->ev;
1394    msg = send->msg;
1395
1396    if (ev->play_started)
1397      {
1398         _emotion_playback_started(ev->obj);
1399         ev->play_started = 0;
1400      }
1401
1402    switch (GST_MESSAGE_TYPE(msg))
1403      {
1404       case GST_MESSAGE_ERROR:
1405         {
1406            gchar *debug;
1407            GError *err;
1408
1409            gst_message_parse_error(msg, &err, &debug);
1410            g_free(debug);
1411
1412            ERR("Error: %s", err->message);
1413            g_error_free(err);
1414
1415            break;
1416         }
1417       case GST_MESSAGE_EOS:
1418          ev->play = 0;
1419          _emotion_decode_stop(ev->obj);
1420          _emotion_playback_finished(ev->obj);
1421          break;
1422       case GST_MESSAGE_TAG:
1423         {
1424            GstTagList *new_tags;
1425            gst_message_parse_tag(msg, &new_tags);
1426            if (new_tags)
1427              {
1428                 gst_tag_list_foreach(new_tags,
1429                                      (GstTagForeachFunc)_for_each_tag,
1430                                      ev);
1431                 gst_tag_list_free(new_tags);
1432              }
1433            break;
1434         }
1435       case GST_MESSAGE_ASYNC_DONE:
1436          _emotion_seek_done(ev->obj);
1437          break;
1438       default:
1439          ERR("bus say: %s [%i]",
1440              GST_MESSAGE_SRC_NAME(msg),
1441              GST_MESSAGE_TYPE(msg));
1442          break;
1443      }
1444
1445    emotion_gstreamer_message_free(send);
1446 }
1447
1448 static GstBusSyncReply
1449 _eos_sync_fct(GstBus *bus, GstMessage *msg, gpointer data)
1450 {
1451    Emotion_Gstreamer_Video *ev = data;
1452    Emotion_Gstreamer_Message *send;
1453
1454    switch (GST_MESSAGE_TYPE(msg))
1455      {
1456       case GST_MESSAGE_ERROR:
1457       case GST_MESSAGE_EOS:
1458       case GST_MESSAGE_TAG:
1459       case GST_MESSAGE_ASYNC_DONE:
1460          send = emotion_gstreamer_message_alloc(ev, msg);
1461
1462          if (send) ecore_main_loop_thread_safe_call(_eos_main_fct, send);
1463
1464          break;
1465
1466       default:
1467          WRN("bus say: %s [%i]",
1468              GST_MESSAGE_SRC_NAME(msg),
1469              GST_MESSAGE_TYPE(msg));
1470          break;
1471      }
1472
1473    return GST_BUS_DROP;
1474 }
1475
1476 Eina_Bool
1477 _emotion_gstreamer_video_pipeline_parse(Emotion_Gstreamer_Video *ev,
1478                                         Eina_Bool force)
1479 {
1480    gboolean res;
1481    int i;
1482
1483    if (ev->pipeline_parsed)
1484      return EINA_TRUE;
1485
1486    if (force && ev->thread)
1487      {
1488         ecore_thread_cancel(ev->thread);
1489         ev->thread = NULL;
1490      }
1491
1492    if (ev->thread)
1493      return EINA_FALSE;
1494
1495    res = gst_element_get_state(ev->pipeline, NULL, NULL, GST_CLOCK_TIME_NONE);
1496    if (!(res == GST_STATE_CHANGE_SUCCESS
1497          || res == GST_STATE_CHANGE_NO_PREROLL))
1498      {
1499         /** NOTE: you need to set: GST_DEBUG_DUMP_DOT_DIR=/tmp EMOTION_ENGINE=gstreamer to save the $EMOTION_GSTREAMER_DOT file in '/tmp' */
1500         /** then call dot -Tpng -oemotion_pipeline.png /tmp/$TIMESTAMP-$EMOTION_GSTREAMER_DOT.dot */
1501         if (getenv("EMOTION_GSTREAMER_DOT"))
1502           GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS(GST_BIN(ev->pipeline),
1503                                             GST_DEBUG_GRAPH_SHOW_ALL,
1504                                             getenv("EMOTION_GSTREAMER_DOT"));
1505
1506         ERR("Unable to get GST_CLOCK_TIME_NONE.");
1507         return EINA_FALSE;
1508      }
1509
1510    g_object_get(G_OBJECT(ev->pipeline),
1511                 "n-audio", &ev->audio_stream_nbr,
1512                 "n-video", &ev->video_stream_nbr,
1513                 NULL);
1514
1515    if ((ev->video_stream_nbr == 0) && (ev->audio_stream_nbr == 0))
1516      {
1517         ERR("No audio nor video stream found");
1518         return EINA_FALSE;
1519      }
1520
1521    /* video stream */
1522    for (i = 0; i < ev->video_stream_nbr; i++)
1523      {
1524         Emotion_Video_Stream *vstream;
1525         GstPad       *pad = NULL;
1526         GstCaps      *caps;
1527         GstStructure *structure;
1528         GstQuery     *query;
1529         const GValue *val;
1530         gchar        *str;
1531
1532         gdouble length_time = 0.0;
1533         gint width;
1534         gint height;
1535         gint fps_num;
1536         gint fps_den;
1537         guint32 fourcc = 0;
1538
1539         g_signal_emit_by_name(ev->pipeline, "get-video-pad", i, &pad);
1540         if (!pad)
1541           continue;
1542
1543         caps = gst_pad_get_negotiated_caps(pad);
1544         if (!caps)
1545           goto unref_pad_v;
1546         structure = gst_caps_get_structure(caps, 0);
1547         str = gst_caps_to_string(caps);
1548
1549         if (!gst_structure_get_int(structure, "width", &width))
1550           goto unref_caps_v;
1551         if (!gst_structure_get_int(structure, "height", &height))
1552           goto unref_caps_v;
1553         if (!gst_structure_get_fraction(structure, "framerate", &fps_num, &fps_den))
1554           goto unref_caps_v;
1555
1556         if (g_str_has_prefix(str, "video/x-raw-yuv"))
1557           {
1558              val = gst_structure_get_value(structure, "format");
1559              fourcc = gst_value_get_fourcc(val);
1560           }
1561         else if (g_str_has_prefix(str, "video/x-raw-rgb"))
1562           fourcc = GST_MAKE_FOURCC('A', 'R', 'G', 'B');
1563         else
1564           goto unref_caps_v;
1565
1566         query = gst_query_new_duration(GST_FORMAT_TIME);
1567         if (gst_pad_peer_query(pad, query))
1568           {
1569              gint64 t;
1570
1571              gst_query_parse_duration(query, NULL, &t);
1572              length_time = (double)t / (double)GST_SECOND;
1573           }
1574         else
1575           goto unref_query_v;
1576
1577         vstream = emotion_video_stream_new(ev);
1578         if (!vstream) goto unref_query_v;
1579
1580         vstream->length_time = length_time;
1581         vstream->width = width;
1582         vstream->height = height;
1583         vstream->fps_num = fps_num;
1584         vstream->fps_den = fps_den;
1585         vstream->fourcc = fourcc;
1586         vstream->index = i;
1587
1588      unref_query_v:
1589         gst_query_unref(query);
1590      unref_caps_v:
1591         gst_caps_unref(caps);
1592      unref_pad_v:
1593         gst_object_unref(pad);
1594      }
1595
1596    /* Audio streams */
1597    for (i = 0; i < ev->audio_stream_nbr; i++)
1598      {
1599         Emotion_Audio_Stream *astream;
1600         GstPad       *pad;
1601         GstCaps      *caps;
1602         GstStructure *structure;
1603         GstQuery     *query;
1604
1605         gdouble length_time = 0.0;
1606         gint channels;
1607         gint samplerate;
1608
1609         g_signal_emit_by_name(ev->pipeline, "get-audio-pad", i, &pad);
1610         if (!pad)
1611           continue;
1612
1613         caps = gst_pad_get_negotiated_caps(pad);
1614         if (!caps)
1615           goto unref_pad_a;
1616         structure = gst_caps_get_structure(caps, 0);
1617
1618         if (!gst_structure_get_int(structure, "channels", &channels))
1619           goto unref_caps_a;
1620         if (!gst_structure_get_int(structure, "rate", &samplerate))
1621           goto unref_caps_a;
1622
1623         query = gst_query_new_duration(GST_FORMAT_TIME);
1624         if (gst_pad_peer_query(pad, query))
1625           {
1626              gint64 t;
1627
1628              gst_query_parse_duration(query, NULL, &t);
1629              length_time = (double)t / (double)GST_SECOND;
1630           }
1631         else
1632           goto unref_query_a;
1633
1634         astream = calloc(1, sizeof(Emotion_Audio_Stream));
1635         if (!astream) continue;
1636         ev->audio_streams = eina_list_append(ev->audio_streams, astream);
1637         if (eina_error_get())
1638           {
1639              free(astream);
1640              continue;
1641           }
1642
1643         astream->length_time = length_time;
1644         astream->channels = channels;
1645         astream->samplerate = samplerate;
1646
1647      unref_query_a:
1648         gst_query_unref(query);
1649      unref_caps_a:
1650         gst_caps_unref(caps);
1651      unref_pad_a:
1652         gst_object_unref(pad);
1653      }
1654
1655    /* Visualization sink */
1656    if (ev->video_stream_nbr == 0)
1657      {
1658         GstElement *vis = NULL;
1659         Emotion_Video_Stream *vstream;
1660         Emotion_Audio_Stream *astream;
1661         gint flags;
1662         const char *vis_name;
1663
1664         if (!(vis_name = emotion_visualization_element_name_get(ev->vis)))
1665           {
1666              WRN("pb vis name %d", ev->vis);
1667              goto finalize;
1668           }
1669
1670         astream = eina_list_data_get(ev->audio_streams);
1671
1672         vis = gst_element_factory_make(vis_name, "vissink");
1673         vstream = emotion_video_stream_new(ev);
1674         if (!vstream)
1675           goto finalize;
1676         else
1677           DBG("could not create visualization stream");
1678
1679         vstream->length_time = astream->length_time;
1680         vstream->width = 320;
1681         vstream->height = 200;
1682         vstream->fps_num = 25;
1683         vstream->fps_den = 1;
1684         vstream->fourcc = GST_MAKE_FOURCC('A', 'R', 'G', 'B');
1685
1686         g_object_set(G_OBJECT(ev->pipeline), "vis-plugin", vis, NULL);
1687         g_object_get(G_OBJECT(ev->pipeline), "flags", &flags, NULL);
1688         flags |= 0x00000008;
1689         g_object_set(G_OBJECT(ev->pipeline), "flags", flags, NULL);
1690      }
1691
1692  finalize:
1693
1694    ev->video_stream_nbr = eina_list_count(ev->video_streams);
1695    ev->audio_stream_nbr = eina_list_count(ev->audio_streams);
1696
1697    if (ev->video_stream_nbr == 1)
1698      {
1699        Emotion_Video_Stream *vstream;
1700
1701        vstream = eina_list_data_get(ev->video_streams);
1702        ev->ratio = (double)vstream->width / (double)vstream->height;
1703        _emotion_frame_resize(ev->obj, vstream->width, vstream->height, ev->ratio);
1704      }
1705
1706    {
1707      /* on recapitule : */
1708      Emotion_Video_Stream *vstream;
1709      Emotion_Audio_Stream *astream;
1710
1711      vstream = eina_list_data_get(ev->video_streams);
1712      if (vstream)
1713        {
1714          DBG("video size=%dx%d, fps=%d/%d, "
1715              "fourcc=%"GST_FOURCC_FORMAT", length=%"GST_TIME_FORMAT,
1716              vstream->width, vstream->height, vstream->fps_num, vstream->fps_den,
1717              GST_FOURCC_ARGS(vstream->fourcc),
1718              GST_TIME_ARGS((guint64)(vstream->length_time * GST_SECOND)));
1719        }
1720
1721      astream = eina_list_data_get(ev->audio_streams);
1722      if (astream)
1723        {
1724          DBG("audio channels=%d, rate=%d, length=%"GST_TIME_FORMAT,
1725              astream->channels, astream->samplerate,
1726              GST_TIME_ARGS((guint64)(astream->length_time * GST_SECOND)));
1727        }
1728    }
1729
1730    if (ev->metadata)
1731      _free_metadata(ev->metadata);
1732    ev->metadata = calloc(1, sizeof(Emotion_Gstreamer_Metadata));
1733
1734    em_audio_channel_volume_set(ev, ev->volume);
1735    em_audio_channel_mute_set(ev, ev->audio_mute);
1736
1737    if (ev->play_started)
1738      {
1739         _emotion_playback_started(ev->obj);
1740         ev->play_started = 0;
1741      }
1742
1743    _emotion_open_done(ev->obj);
1744    ev->pipeline_parsed = EINA_TRUE;
1745
1746    return EINA_TRUE;
1747 }