6 #include "emotion_private.h"
7 #include "emotion_gstreamer.h"
10 int _emotion_gstreamer_log_domain = -1;
12 /* Callbacks to get the eos */
13 static void _for_each_tag (GstTagList const* list, gchar const* tag, void *data);
14 static void _free_metadata (Emotion_Gstreamer_Metadata *m);
18 static unsigned char em_init (Evas_Object *obj,
20 Emotion_Module_Options *opt);
22 static int em_shutdown (void *video);
24 static unsigned char em_file_open (const char *file,
28 static void em_file_close (void *video);
30 static void em_play (void *video,
33 static void em_stop (void *video);
35 static void em_size_get (void *video,
39 static void em_pos_set (void *video,
43 static double em_len_get (void *video);
45 static int em_fps_num_get (void *video);
47 static int em_fps_den_get (void *video);
49 static double em_fps_get (void *video);
51 static double em_pos_get (void *video);
53 static void em_vis_set (void *video,
56 static Emotion_Vis em_vis_get (void *video);
58 static Eina_Bool em_vis_supported (void *video,
61 static double em_ratio_get (void *video);
63 static int em_video_handled (void *video);
65 static int em_audio_handled (void *video);
67 static int em_seekable (void *video);
69 static void em_frame_done (void *video);
71 static Emotion_Format em_format_get (void *video);
73 static void em_video_data_size_get (void *video,
77 static int em_yuv_rows_get (void *video,
80 unsigned char **yrows,
81 unsigned char **urows,
82 unsigned char **vrows);
84 static int em_bgra_data_get (void *video,
85 unsigned char **bgra_data);
87 static void em_event_feed (void *video,
90 static void em_event_mouse_button_feed (void *video,
95 static void em_event_mouse_move_feed (void *video,
99 static int em_video_channel_count (void *video);
101 static void em_video_channel_set (void *video,
104 static int em_video_channel_get (void *video);
106 static const char *em_video_channel_name_get (void *video,
109 static void em_video_channel_mute_set (void *video,
112 static int em_video_channel_mute_get (void *video);
114 static int em_audio_channel_count (void *video);
116 static void em_audio_channel_set (void *video,
119 static int em_audio_channel_get (void *video);
121 static const char *em_audio_channel_name_get (void *video,
124 static void em_audio_channel_mute_set (void *video,
127 static int em_audio_channel_mute_get (void *video);
129 static void em_audio_channel_volume_set (void *video,
132 static double em_audio_channel_volume_get (void *video);
134 static int em_spu_channel_count (void *video);
136 static void em_spu_channel_set (void *video,
139 static int em_spu_channel_get (void *video);
141 static const char *em_spu_channel_name_get (void *video,
144 static void em_spu_channel_mute_set (void *video,
147 static int em_spu_channel_mute_get (void *video);
149 static int em_chapter_count (void *video);
151 static void em_chapter_set (void *video,
154 static int em_chapter_get (void *video);
156 static const char *em_chapter_name_get (void *video,
159 static void em_speed_set (void *video,
162 static double em_speed_get (void *video);
164 static int em_eject (void *video);
166 static const char *em_meta_get (void *video,
169 static GstBusSyncReply _eos_sync_fct(GstBus *bus,
173 /* Module interface */
175 static Emotion_Video_Module em_module =
178 em_shutdown, /* shutdown */
179 em_file_open, /* file_open */
180 em_file_close, /* file_close */
183 em_size_get, /* size_get */
184 em_pos_set, /* pos_set */
185 em_len_get, /* len_get */
186 em_fps_num_get, /* fps_num_get */
187 em_fps_den_get, /* fps_den_get */
188 em_fps_get, /* fps_get */
189 em_pos_get, /* pos_get */
190 em_vis_set, /* vis_set */
191 em_vis_get, /* vis_get */
192 em_vis_supported, /* vis_supported */
193 em_ratio_get, /* ratio_get */
194 em_video_handled, /* video_handled */
195 em_audio_handled, /* audio_handled */
196 em_seekable, /* seekable */
197 em_frame_done, /* frame_done */
198 em_format_get, /* format_get */
199 em_video_data_size_get, /* video_data_size_get */
200 em_yuv_rows_get, /* yuv_rows_get */
201 em_bgra_data_get, /* bgra_data_get */
202 em_event_feed, /* event_feed */
203 em_event_mouse_button_feed, /* event_mouse_button_feed */
204 em_event_mouse_move_feed, /* event_mouse_move_feed */
205 em_video_channel_count, /* video_channel_count */
206 em_video_channel_set, /* video_channel_set */
207 em_video_channel_get, /* video_channel_get */
208 em_video_channel_name_get, /* video_channel_name_get */
209 em_video_channel_mute_set, /* video_channel_mute_set */
210 em_video_channel_mute_get, /* video_channel_mute_get */
211 em_audio_channel_count, /* audio_channel_count */
212 em_audio_channel_set, /* audio_channel_set */
213 em_audio_channel_get, /* audio_channel_get */
214 em_audio_channel_name_get, /* audio_channel_name_get */
215 em_audio_channel_mute_set, /* audio_channel_mute_set */
216 em_audio_channel_mute_get, /* audio_channel_mute_get */
217 em_audio_channel_volume_set, /* audio_channel_volume_set */
218 em_audio_channel_volume_get, /* audio_channel_volume_get */
219 em_spu_channel_count, /* spu_channel_count */
220 em_spu_channel_set, /* spu_channel_set */
221 em_spu_channel_get, /* spu_channel_get */
222 em_spu_channel_name_get, /* spu_channel_name_get */
223 em_spu_channel_mute_set, /* spu_channel_mute_set */
224 em_spu_channel_mute_get, /* spu_channel_mute_get */
225 em_chapter_count, /* chapter_count */
226 em_chapter_set, /* chapter_set */
227 em_chapter_get, /* chapter_get */
228 em_chapter_name_get, /* chapter_name_get */
229 em_speed_set, /* speed_set */
230 em_speed_get, /* speed_get */
231 em_eject, /* eject */
232 em_meta_get, /* meta_get */
236 static Emotion_Video_Stream *
237 emotion_video_stream_new(Emotion_Gstreamer_Video *ev)
239 Emotion_Video_Stream *vstream;
241 if (!ev) return NULL;
243 vstream = (Emotion_Video_Stream *)calloc(1, sizeof(Emotion_Video_Stream));
244 if (!vstream) return NULL;
246 ev->video_streams = eina_list_append(ev->video_streams, vstream);
247 if (eina_error_get())
256 emotion_video_stream_free(Emotion_Gstreamer_Video *ev, Emotion_Video_Stream *vstream)
258 if (!ev || !vstream) return;
260 ev->video_streams = eina_list_remove(ev->video_streams, vstream);
265 emotion_visualization_element_name_get(Emotion_Vis visualisation)
267 switch (visualisation)
269 case EMOTION_VIS_NONE:
271 case EMOTION_VIS_GOOM:
273 case EMOTION_VIS_LIBVISUAL_BUMPSCOPE:
274 return "libvisual_bumpscope";
275 case EMOTION_VIS_LIBVISUAL_CORONA:
276 return "libvisual_corona";
277 case EMOTION_VIS_LIBVISUAL_DANCING_PARTICLES:
278 return "libvisual_dancingparticles";
279 case EMOTION_VIS_LIBVISUAL_GDKPIXBUF:
280 return "libvisual_gdkpixbuf";
281 case EMOTION_VIS_LIBVISUAL_G_FORCE:
282 return "libvisual_G-Force";
283 case EMOTION_VIS_LIBVISUAL_GOOM:
284 return "libvisual_goom";
285 case EMOTION_VIS_LIBVISUAL_INFINITE:
286 return "libvisual_infinite";
287 case EMOTION_VIS_LIBVISUAL_JAKDAW:
288 return "libvisual_jakdaw";
289 case EMOTION_VIS_LIBVISUAL_JESS:
290 return "libvisual_jess";
291 case EMOTION_VIS_LIBVISUAL_LV_ANALYSER:
292 return "libvisual_lv_analyzer";
293 case EMOTION_VIS_LIBVISUAL_LV_FLOWER:
294 return "libvisual_lv_flower";
295 case EMOTION_VIS_LIBVISUAL_LV_GLTEST:
296 return "libvisual_lv_gltest";
297 case EMOTION_VIS_LIBVISUAL_LV_SCOPE:
298 return "libvisual_lv_scope";
299 case EMOTION_VIS_LIBVISUAL_MADSPIN:
300 return "libvisual_madspin";
301 case EMOTION_VIS_LIBVISUAL_NEBULUS:
302 return "libvisual_nebulus";
303 case EMOTION_VIS_LIBVISUAL_OINKSIE:
304 return "libvisual_oinksie";
305 case EMOTION_VIS_LIBVISUAL_PLASMA:
306 return "libvisual_plazma";
313 em_init(Evas_Object *obj,
314 void **emotion_video,
315 Emotion_Module_Options *opt __UNUSED__)
317 Emotion_Gstreamer_Video *ev;
323 ev = calloc(1, sizeof(Emotion_Gstreamer_Video));
328 /* Initialization of gstreamer */
329 if (!gst_init_check(NULL, NULL, &error))
334 ev->vis = EMOTION_VIS_NONE;
336 ev->play_started = 0;
349 em_shutdown(void *video)
351 Emotion_Gstreamer_Video *ev;
352 Emotion_Audio_Stream *astream;
353 Emotion_Video_Stream *vstream;
355 ev = (Emotion_Gstreamer_Video *)video;
361 ecore_thread_cancel(ev->thread);
367 gst_object_unref(GST_OBJECT(ev->eos_bus));
373 gst_element_set_state(ev->pipeline, GST_STATE_NULL);
374 gst_object_unref(ev->pipeline);
378 EINA_LIST_FREE(ev->audio_streams, astream)
380 EINA_LIST_FREE(ev->video_streams, vstream)
390 em_file_open(const char *file,
394 Emotion_Gstreamer_Video *ev;
395 Eina_Strbuf *sbuf = NULL;
398 ev = (Emotion_Gstreamer_Video *)video;
400 if (!file) return EINA_FALSE;
401 if (strstr(file, "://") == NULL)
403 sbuf = eina_strbuf_new();
404 eina_strbuf_append(sbuf, "file://");
405 if (strncmp(file, "./", 2) == 0)
407 if (strstr(file, ":/") != NULL)
408 { /* We absolutely need file:///C:/ under Windows, so adding it here */
409 eina_strbuf_append(sbuf, "/");
411 else if (*file != '/')
415 if (getcwd(tmp, PATH_MAX))
417 eina_strbuf_append(sbuf, tmp);
418 eina_strbuf_append(sbuf, "/");
421 eina_strbuf_append(sbuf, file);
424 ev->play_started = 0;
426 uri = sbuf ? eina_strbuf_string_get(sbuf) : file;
427 DBG("setting file to '%s'", uri);
428 ev->pipeline = gstreamer_video_sink_new(ev, obj, uri);
429 if (sbuf) eina_strbuf_free(sbuf);
434 ev->eos_bus = gst_pipeline_get_bus(GST_PIPELINE(ev->pipeline));
437 ERR("could not get the bus");
441 gst_bus_set_sync_handler(ev->eos_bus, _eos_sync_fct, ev);
452 em_file_close(void *video)
454 Emotion_Gstreamer_Video *ev;
455 Emotion_Audio_Stream *astream;
456 Emotion_Video_Stream *vstream;
458 ev = (Emotion_Gstreamer_Video *)video;
464 gst_object_unref(GST_OBJECT(ev->eos_bus));
470 ecore_thread_cancel(ev->thread);
476 gst_element_set_state(ev->pipeline, GST_STATE_NULL);
477 gst_object_unref(ev->pipeline);
481 /* we clear the stream lists */
482 EINA_LIST_FREE(ev->audio_streams, astream)
484 EINA_LIST_FREE(ev->video_streams, vstream)
486 ev->pipeline_parsed = EINA_FALSE;
487 ev->play_started = 0;
492 _free_metadata(ev->metadata);
499 double pos __UNUSED__)
501 Emotion_Gstreamer_Video *ev;
503 ev = (Emotion_Gstreamer_Video *)video;
504 if (!ev->pipeline) return ;
506 gst_element_set_state(ev->pipeline, GST_STATE_PLAYING);
508 ev->play_started = 1;
514 Emotion_Gstreamer_Video *ev;
516 ev = (Emotion_Gstreamer_Video *)video;
518 if (!ev->pipeline) return ;
520 gst_element_set_state(ev->pipeline, GST_STATE_PAUSED);
525 em_size_get(void *video,
529 Emotion_Gstreamer_Video *ev;
530 Emotion_Video_Stream *vstream;
532 ev = (Emotion_Gstreamer_Video *)video;
534 if (!_emotion_gstreamer_video_pipeline_parse(ev, EINA_FALSE))
537 vstream = eina_list_nth(ev->video_streams, ev->video_stream_nbr - 1);
540 if (width) *width = vstream->width;
541 if (height) *height = vstream->height;
547 if (width) *width = 0;
548 if (height) *height = 0;
552 em_pos_set(void *video,
555 Emotion_Gstreamer_Video *ev;
558 ev = (Emotion_Gstreamer_Video *)video;
560 if (!ev->pipeline) return ;
563 res = gst_element_set_state(ev->pipeline, GST_STATE_PAUSED);
565 res = gst_element_seek(ev->pipeline, 1.0,
567 GST_SEEK_FLAG_ACCURATE | GST_SEEK_FLAG_FLUSH,
569 (gint64)(pos * (double)GST_SECOND),
570 GST_SEEK_TYPE_NONE, -1);
573 res = gst_element_set_state(ev->pipeline, GST_STATE_PLAYING);
577 em_len_get(void *video)
579 Emotion_Gstreamer_Video *ev;
580 Emotion_Video_Stream *vstream;
581 Emotion_Audio_Stream *astream;
588 fmt = GST_FORMAT_TIME;
590 if (!ev->pipeline) return 0.0;
592 ret = gst_element_query_duration(ev->pipeline, &fmt, &val);
596 if (fmt != GST_FORMAT_TIME)
598 DBG("requrested duration in time, but got %s instead.",
599 gst_format_get_name(fmt));
606 return val / 1000000000.0;
609 if (!_emotion_gstreamer_video_pipeline_parse(ev, EINA_FALSE))
612 EINA_LIST_FOREACH(ev->audio_streams, l, astream)
613 if (astream->length_time >= 0)
614 return astream->length_time;
616 EINA_LIST_FOREACH(ev->video_streams, l, vstream)
617 if (vstream->length_time >= 0)
618 return vstream->length_time;
624 em_fps_num_get(void *video)
626 Emotion_Gstreamer_Video *ev;
627 Emotion_Video_Stream *vstream;
629 ev = (Emotion_Gstreamer_Video *)video;
631 if (!_emotion_gstreamer_video_pipeline_parse(ev, EINA_FALSE))
634 vstream = eina_list_nth(ev->video_streams, ev->video_stream_nbr - 1);
636 return vstream->fps_num;
642 em_fps_den_get(void *video)
644 Emotion_Gstreamer_Video *ev;
645 Emotion_Video_Stream *vstream;
647 ev = (Emotion_Gstreamer_Video *)video;
649 if (!_emotion_gstreamer_video_pipeline_parse(ev, EINA_FALSE))
652 vstream = eina_list_nth(ev->video_streams, ev->video_stream_nbr - 1);
654 return vstream->fps_den;
660 em_fps_get(void *video)
662 Emotion_Gstreamer_Video *ev;
663 Emotion_Video_Stream *vstream;
665 ev = (Emotion_Gstreamer_Video *)video;
667 if (!_emotion_gstreamer_video_pipeline_parse(ev, EINA_FALSE))
670 vstream = eina_list_nth(ev->video_streams, ev->video_stream_nbr - 1);
672 return (double)vstream->fps_num / (double)vstream->fps_den;
678 em_pos_get(void *video)
680 Emotion_Gstreamer_Video *ev;
686 fmt = GST_FORMAT_TIME;
688 if (!ev->pipeline) return 0.0;
690 ret = gst_element_query_position(ev->pipeline, &fmt, &val);
694 if (fmt != GST_FORMAT_TIME)
696 ERR("requrested position in time, but got %s instead.",
697 gst_format_get_name(fmt));
701 ev->position = val / 1000000000.0;
706 em_vis_set(void *video,
709 Emotion_Gstreamer_Video *ev;
711 ev = (Emotion_Gstreamer_Video *)video;
717 em_vis_get(void *video)
719 Emotion_Gstreamer_Video *ev;
721 ev = (Emotion_Gstreamer_Video *)video;
727 em_vis_supported(void *ef __UNUSED__, Emotion_Vis vis)
730 GstElementFactory *factory;
732 if (vis == EMOTION_VIS_NONE)
735 name = emotion_visualization_element_name_get(vis);
739 factory = gst_element_factory_find(name);
743 gst_object_unref(factory);
748 em_ratio_get(void *video)
750 Emotion_Gstreamer_Video *ev;
752 ev = (Emotion_Gstreamer_Video *)video;
758 em_video_handled(void *video)
760 Emotion_Gstreamer_Video *ev;
762 ev = (Emotion_Gstreamer_Video *)video;
764 _emotion_gstreamer_video_pipeline_parse(ev, EINA_FALSE);
766 if (!eina_list_count(ev->video_streams))
773 em_audio_handled(void *video)
775 Emotion_Gstreamer_Video *ev;
777 ev = (Emotion_Gstreamer_Video *)video;
779 _emotion_gstreamer_video_pipeline_parse(ev, EINA_FALSE);
781 if (!eina_list_count(ev->audio_streams))
788 em_seekable(void *video __UNUSED__)
794 em_frame_done(void *video __UNUSED__)
798 static Emotion_Format
799 em_format_get(void *video)
801 Emotion_Gstreamer_Video *ev;
802 Emotion_Video_Stream *vstream;
804 ev = (Emotion_Gstreamer_Video *)video;
806 if (!_emotion_gstreamer_video_pipeline_parse(ev, EINA_FALSE))
807 return EMOTION_FORMAT_NONE;
809 vstream = eina_list_nth(ev->video_streams, ev->video_stream_nbr - 1);
812 switch (vstream->fourcc)
814 case GST_MAKE_FOURCC('I', '4', '2', '0'):
815 return EMOTION_FORMAT_I420;
816 case GST_MAKE_FOURCC('Y', 'V', '1', '2'):
817 return EMOTION_FORMAT_YV12;
818 case GST_MAKE_FOURCC('Y', 'U', 'Y', '2'):
819 return EMOTION_FORMAT_YUY2;
820 case GST_MAKE_FOURCC('A', 'R', 'G', 'B'):
821 return EMOTION_FORMAT_BGRA;
823 return EMOTION_FORMAT_NONE;
826 return EMOTION_FORMAT_NONE;
830 em_video_data_size_get(void *video, int *w, int *h)
832 Emotion_Gstreamer_Video *ev;
833 Emotion_Video_Stream *vstream;
835 ev = (Emotion_Gstreamer_Video *)video;
837 if (!_emotion_gstreamer_video_pipeline_parse(ev, EINA_FALSE))
840 vstream = eina_list_nth(ev->video_streams, ev->video_stream_nbr - 1);
844 *h = vstream->height;
855 em_yuv_rows_get(void *video __UNUSED__,
858 unsigned char **yrows __UNUSED__,
859 unsigned char **urows __UNUSED__,
860 unsigned char **vrows __UNUSED__)
866 em_bgra_data_get(void *video __UNUSED__, unsigned char **bgra_data __UNUSED__)
872 em_event_feed(void *video __UNUSED__, int event __UNUSED__)
877 em_event_mouse_button_feed(void *video __UNUSED__, int button __UNUSED__, int x __UNUSED__, int y __UNUSED__)
882 em_event_mouse_move_feed(void *video __UNUSED__, int x __UNUSED__, int y __UNUSED__)
888 em_video_channel_count(void *video)
890 Emotion_Gstreamer_Video *ev;
892 ev = (Emotion_Gstreamer_Video *)video;
894 _emotion_gstreamer_video_pipeline_parse(ev, EINA_FALSE);
896 return eina_list_count(ev->video_streams);
900 em_video_channel_set(void *video __UNUSED__,
901 int channel __UNUSED__)
904 Emotion_Gstreamer_Video *ev;
906 ev = (Emotion_Gstreamer_Video *)video;
908 if (channel < 0) channel = 0;
910 /* FIXME: a faire... */
914 em_video_channel_get(void *video)
916 Emotion_Gstreamer_Video *ev;
918 ev = (Emotion_Gstreamer_Video *)video;
920 _emotion_gstreamer_video_pipeline_parse(ev, EINA_FALSE);
922 return ev->video_stream_nbr;
926 em_video_channel_name_get(void *video __UNUSED__,
927 int channel __UNUSED__)
933 em_video_channel_mute_set(void *video,
936 Emotion_Gstreamer_Video *ev;
938 ev = (Emotion_Gstreamer_Video *)video;
940 ev->video_mute = mute;
944 em_video_channel_mute_get(void *video)
946 Emotion_Gstreamer_Video *ev;
948 ev = (Emotion_Gstreamer_Video *)video;
950 return ev->video_mute;
956 em_audio_channel_count(void *video)
958 Emotion_Gstreamer_Video *ev;
960 ev = (Emotion_Gstreamer_Video *)video;
962 _emotion_gstreamer_video_pipeline_parse(ev, EINA_FALSE);
964 return eina_list_count(ev->audio_streams);
968 em_audio_channel_set(void *video __UNUSED__,
969 int channel __UNUSED__)
972 Emotion_Gstreamer_Video *ev;
974 ev = (Emotion_Gstreamer_Video *)video;
976 if (channel < -1) channel = -1;
978 /* FIXME: a faire... */
982 em_audio_channel_get(void *video)
984 Emotion_Gstreamer_Video *ev;
986 ev = (Emotion_Gstreamer_Video *)video;
988 _emotion_gstreamer_video_pipeline_parse(ev, EINA_FALSE);
990 return ev->audio_stream_nbr;
994 em_audio_channel_name_get(void *video __UNUSED__,
995 int channel __UNUSED__)
1000 #define GST_PLAY_FLAG_AUDIO (1 << 1)
1003 em_audio_channel_mute_set(void *video,
1006 Emotion_Gstreamer_Video *ev;
1009 ev = (Emotion_Gstreamer_Video *)video;
1011 if (!ev->pipeline) return ;
1013 ev->audio_mute = mute;
1015 g_object_set(G_OBJECT(ev->pipeline), "mute", !!mute, NULL);
1016 /* This code should stop the decoding of only the audio stream, but everything stop :"( */
1017 /* g_object_get(G_OBJECT(ev->pipeline), "flags", &flags, NULL); */
1019 /* flags &= ~GST_PLAY_FLAG_AUDIO; */
1021 /* flags |= GST_PLAY_FLAG_AUDIO; */
1022 /* g_object_set(G_OBJECT(ev->pipeline), "flags", flags, NULL); */
1023 /* g_object_get(G_OBJECT(ev->pipeline), "flags", &flags, NULL); */
1024 /* fprintf(stderr, "flags-n: %x\n", flags); */
1028 em_audio_channel_mute_get(void *video)
1030 Emotion_Gstreamer_Video *ev;
1032 ev = (Emotion_Gstreamer_Video *)video;
1034 return ev->audio_mute;
1038 em_audio_channel_volume_set(void *video,
1041 Emotion_Gstreamer_Video *ev;
1043 ev = (Emotion_Gstreamer_Video *)video;
1045 if (!ev->pipeline) return ;
1052 g_object_set(G_OBJECT(ev->pipeline), "volume", vol, NULL);
1056 em_audio_channel_volume_get(void *video)
1058 Emotion_Gstreamer_Video *ev;
1060 ev = (Emotion_Gstreamer_Video *)video;
1068 em_spu_channel_count(void *video __UNUSED__)
1074 em_spu_channel_set(void *video __UNUSED__, int channel __UNUSED__)
1079 em_spu_channel_get(void *video __UNUSED__)
1085 em_spu_channel_name_get(void *video __UNUSED__, int channel __UNUSED__)
1091 em_spu_channel_mute_set(void *video __UNUSED__, int mute __UNUSED__)
1096 em_spu_channel_mute_get(void *video __UNUSED__)
1102 em_chapter_count(void *video __UNUSED__)
1108 em_chapter_set(void *video __UNUSED__, int chapter __UNUSED__)
1113 em_chapter_get(void *video __UNUSED__)
1119 em_chapter_name_get(void *video __UNUSED__, int chapter __UNUSED__)
1125 em_speed_set(void *video __UNUSED__, double speed __UNUSED__)
1130 em_speed_get(void *video __UNUSED__)
1136 em_eject(void *video __UNUSED__)
1142 em_meta_get(void *video, int meta)
1144 Emotion_Gstreamer_Video *ev;
1145 const char *str = NULL;
1147 ev = (Emotion_Gstreamer_Video *)video;
1149 if (!ev || !ev->metadata) return NULL;
1152 case META_TRACK_TITLE:
1153 str = ev->metadata->title;
1155 case META_TRACK_ARTIST:
1156 str = ev->metadata->artist;
1158 case META_TRACK_ALBUM:
1159 str = ev->metadata->album;
1161 case META_TRACK_YEAR:
1162 str = ev->metadata->year;
1164 case META_TRACK_GENRE:
1165 str = ev->metadata->genre;
1167 case META_TRACK_COMMENT:
1168 str = ev->metadata->comment;
1170 case META_TRACK_DISCID:
1171 str = ev->metadata->disc_id;
1181 module_open(Evas_Object *obj,
1182 const Emotion_Video_Module **module,
1184 Emotion_Module_Options *opt)
1189 if (_emotion_gstreamer_log_domain < 0)
1191 eina_threads_init();
1192 eina_log_threads_enable();
1193 _emotion_gstreamer_log_domain = eina_log_domain_register
1194 ("emotion-gstreamer", EINA_COLOR_LIGHTCYAN);
1195 if (_emotion_gstreamer_log_domain < 0)
1197 EINA_LOG_CRIT("Could not register log domain 'emotion-gstreamer'");
1202 if (!em_module.init(obj, video, opt))
1205 eina_threads_init();
1207 *module = &em_module;
1212 module_close(Emotion_Video_Module *module __UNUSED__,
1215 em_module.shutdown(video);
1217 eina_threads_shutdown();
1221 gstreamer_module_init(void)
1225 if (!gst_init_check(0, NULL, &error))
1227 EINA_LOG_CRIT("Could not init GStreamer");
1231 if (gst_plugin_register_static(GST_VERSION_MAJOR, GST_VERSION_MINOR,
1233 "video sink plugin for Emotion",
1234 gstreamer_plugin_init,
1239 "http://www.enlightenment.org/") == FALSE)
1241 EINA_LOG_CRIT("Could not load static gstreamer video sink for Emotion.");
1245 return _emotion_module_register("gstreamer", module_open, module_close);
1249 gstreamer_module_shutdown(void)
1251 _emotion_module_unregister("gstreamer");
1256 #ifndef EMOTION_STATIC_BUILD_GSTREAMER
1258 EINA_MODULE_INIT(gstreamer_module_init);
1259 EINA_MODULE_SHUTDOWN(gstreamer_module_shutdown);
1264 _for_each_tag(GstTagList const* list,
1268 Emotion_Gstreamer_Video *ev;
1273 ev = (Emotion_Gstreamer_Video*)data;
1275 if (!ev || !ev->metadata) return;
1277 count = gst_tag_list_get_tag_size(list, tag);
1279 for (i = 0; i < count; i++)
1281 if (!strcmp(tag, GST_TAG_TITLE))
1284 if (ev->metadata->title) g_free(ev->metadata->title);
1285 if (gst_tag_list_get_string(list, GST_TAG_TITLE, &str))
1286 ev->metadata->title = str;
1288 ev->metadata->title = NULL;
1291 if (!strcmp(tag, GST_TAG_ALBUM))
1294 if (ev->metadata->album) g_free(ev->metadata->album);
1295 if (gst_tag_list_get_string(list, GST_TAG_ALBUM, &str))
1296 ev->metadata->album = str;
1298 ev->metadata->album = NULL;
1301 if (!strcmp(tag, GST_TAG_ARTIST))
1304 if (ev->metadata->artist) g_free( ev->metadata->artist);
1305 if (gst_tag_list_get_string(list, GST_TAG_ARTIST, &str))
1306 ev->metadata->artist = str;
1308 ev->metadata->artist = NULL;
1311 if (!strcmp(tag, GST_TAG_GENRE))
1314 if (ev->metadata->genre) g_free( ev->metadata->genre);
1315 if (gst_tag_list_get_string(list, GST_TAG_GENRE, &str))
1316 ev->metadata->genre = str;
1318 ev->metadata->genre = NULL;
1321 if (!strcmp(tag, GST_TAG_COMMENT))
1324 if (ev->metadata->comment) g_free(ev->metadata->comment);
1325 if (gst_tag_list_get_string(list, GST_TAG_COMMENT, &str))
1326 ev->metadata->comment = str;
1328 ev->metadata->comment = NULL;
1331 if (!strcmp(tag, GST_TAG_DATE))
1335 if (ev->metadata->year) g_free(ev->metadata->year);
1336 date = gst_tag_list_get_value_index(list, GST_TAG_DATE, 0);
1338 str = g_strdup_value_contents(date);
1341 ev->metadata->year = str;
1345 if (!strcmp(tag, GST_TAG_TRACK_NUMBER))
1348 const GValue *track;
1349 if (ev->metadata->count) g_free( ev->metadata->count);
1350 track = gst_tag_list_get_value_index(list, GST_TAG_TRACK_NUMBER, 0);
1352 str = g_strdup_value_contents(track);
1355 ev->metadata->count = str;
1359 #ifdef GST_TAG_CDDA_CDDB_DISCID
1360 if (!strcmp(tag, GST_TAG_CDDA_CDDB_DISCID))
1363 const GValue *discid;
1364 if (ev->metadata->disc_id) g_free(ev->metadata->disc_id);
1365 discid = gst_tag_list_get_value_index(list, GST_TAG_CDDA_CDDB_DISCID, 0);
1367 str = g_strdup_value_contents(discid);
1370 ev->metadata->disc_id = str;
1379 _free_metadata(Emotion_Gstreamer_Metadata *m)
1383 if (m->title) g_free(m->title);
1384 if (m->album) g_free(m->album);
1385 if (m->artist) g_free(m->artist);
1386 if (m->genre) g_free(m->genre);
1387 if (m->comment) g_free(m->comment);
1388 if (m->year) g_free(m->year);
1389 if (m->count) g_free(m->count);
1390 if (m->disc_id) g_free(m->disc_id);
1396 _eos_main_fct(void *data)
1398 Emotion_Gstreamer_Message *send;
1399 Emotion_Gstreamer_Video *ev;
1406 if (ev->play_started)
1408 _emotion_playback_started(ev->obj);
1409 ev->play_started = 0;
1412 switch (GST_MESSAGE_TYPE(msg))
1414 case GST_MESSAGE_ERROR:
1419 gst_message_parse_error(msg, &err, &debug);
1422 ERR("Error: %s", err->message);
1427 case GST_MESSAGE_EOS:
1429 _emotion_decode_stop(ev->obj);
1430 _emotion_playback_finished(ev->obj);
1432 case GST_MESSAGE_TAG:
1434 GstTagList *new_tags;
1435 gst_message_parse_tag(msg, &new_tags);
1438 gst_tag_list_foreach(new_tags,
1439 (GstTagForeachFunc)_for_each_tag,
1441 gst_tag_list_free(new_tags);
1445 case GST_MESSAGE_ASYNC_DONE:
1446 _emotion_seek_done(ev->obj);
1449 ERR("bus say: %s [%i]",
1450 GST_MESSAGE_SRC_NAME(msg),
1451 GST_MESSAGE_TYPE(msg));
1455 emotion_gstreamer_message_free(send);
1458 static GstBusSyncReply
1459 _eos_sync_fct(GstBus *bus, GstMessage *msg, gpointer data)
1461 Emotion_Gstreamer_Video *ev = data;
1462 Emotion_Gstreamer_Message *send;
1464 switch (GST_MESSAGE_TYPE(msg))
1466 case GST_MESSAGE_ERROR:
1467 case GST_MESSAGE_EOS:
1468 case GST_MESSAGE_TAG:
1469 case GST_MESSAGE_ASYNC_DONE:
1470 send = emotion_gstreamer_message_alloc(ev, msg);
1472 if (send) ecore_main_loop_thread_safe_call(_eos_main_fct, send);
1477 WRN("bus say: %s [%i]",
1478 GST_MESSAGE_SRC_NAME(msg),
1479 GST_MESSAGE_TYPE(msg));
1483 return GST_BUS_DROP;
1487 _emotion_gstreamer_video_pipeline_parse(Emotion_Gstreamer_Video *ev,
1493 if (ev->pipeline_parsed)
1496 if (force && ev->thread)
1498 ecore_thread_cancel(ev->thread);
1505 res = gst_element_get_state(ev->pipeline, NULL, NULL, GST_CLOCK_TIME_NONE);
1506 if (res == GST_STATE_CHANGE_NO_PREROLL)
1507 gst_element_set_state(ev->pipeline, GST_STATE_PLAYING);
1509 res = gst_element_get_state(ev->pipeline, NULL, NULL, GST_CLOCK_TIME_NONE);
1510 if (!(res == GST_STATE_CHANGE_SUCCESS
1511 || res == GST_STATE_CHANGE_NO_PREROLL))
1513 /** NOTE: you need to set: GST_DEBUG_DUMP_DOT_DIR=/tmp EMOTION_ENGINE=gstreamer to save the $EMOTION_GSTREAMER_DOT file in '/tmp' */
1514 /** then call dot -Tpng -oemotion_pipeline.png /tmp/$TIMESTAMP-$EMOTION_GSTREAMER_DOT.dot */
1515 if (getenv("EMOTION_GSTREAMER_DOT"))
1516 GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS(GST_BIN(ev->pipeline),
1517 GST_DEBUG_GRAPH_SHOW_ALL,
1518 getenv("EMOTION_GSTREAMER_DOT"));
1520 ERR("Unable to get GST_CLOCK_TIME_NONE.");
1524 g_object_get(G_OBJECT(ev->pipeline),
1525 "n-audio", &ev->audio_stream_nbr,
1526 "n-video", &ev->video_stream_nbr,
1529 if ((ev->video_stream_nbr == 0) && (ev->audio_stream_nbr == 0))
1531 ERR("No audio nor video stream found");
1536 for (i = 0; i < ev->video_stream_nbr; i++)
1538 Emotion_Video_Stream *vstream;
1541 GstStructure *structure;
1546 gdouble length_time = 0.0;
1553 g_signal_emit_by_name(ev->pipeline, "get-video-pad", i, &pad);
1557 caps = gst_pad_get_negotiated_caps(pad);
1560 structure = gst_caps_get_structure(caps, 0);
1561 str = gst_caps_to_string(caps);
1563 if (!gst_structure_get_int(structure, "width", &width))
1565 if (!gst_structure_get_int(structure, "height", &height))
1567 if (!gst_structure_get_fraction(structure, "framerate", &fps_num, &fps_den))
1570 if (g_str_has_prefix(str, "video/x-raw-yuv"))
1572 val = gst_structure_get_value(structure, "format");
1573 fourcc = gst_value_get_fourcc(val);
1575 else if (g_str_has_prefix(str, "video/x-raw-rgb"))
1576 fourcc = GST_MAKE_FOURCC('A', 'R', 'G', 'B');
1580 query = gst_query_new_duration(GST_FORMAT_TIME);
1581 if (gst_pad_peer_query(pad, query))
1585 gst_query_parse_duration(query, NULL, &t);
1586 length_time = (double)t / (double)GST_SECOND;
1591 vstream = emotion_video_stream_new(ev);
1592 if (!vstream) goto unref_query_v;
1594 vstream->length_time = length_time;
1595 vstream->width = width;
1596 vstream->height = height;
1597 vstream->fps_num = fps_num;
1598 vstream->fps_den = fps_den;
1599 vstream->fourcc = fourcc;
1603 gst_query_unref(query);
1605 gst_caps_unref(caps);
1607 gst_object_unref(pad);
1611 for (i = 0; i < ev->audio_stream_nbr; i++)
1613 Emotion_Audio_Stream *astream;
1616 GstStructure *structure;
1619 gdouble length_time = 0.0;
1623 g_signal_emit_by_name(ev->pipeline, "get-audio-pad", i, &pad);
1627 caps = gst_pad_get_negotiated_caps(pad);
1630 structure = gst_caps_get_structure(caps, 0);
1632 if (!gst_structure_get_int(structure, "channels", &channels))
1634 if (!gst_structure_get_int(structure, "rate", &samplerate))
1637 query = gst_query_new_duration(GST_FORMAT_TIME);
1638 if (gst_pad_peer_query(pad, query))
1642 gst_query_parse_duration(query, NULL, &t);
1643 length_time = (double)t / (double)GST_SECOND;
1648 astream = calloc(1, sizeof(Emotion_Audio_Stream));
1649 if (!astream) continue;
1650 ev->audio_streams = eina_list_append(ev->audio_streams, astream);
1651 if (eina_error_get())
1657 astream->length_time = length_time;
1658 astream->channels = channels;
1659 astream->samplerate = samplerate;
1662 gst_query_unref(query);
1664 gst_caps_unref(caps);
1666 gst_object_unref(pad);
1669 /* Visualization sink */
1670 if (ev->video_stream_nbr == 0)
1672 GstElement *vis = NULL;
1673 Emotion_Video_Stream *vstream;
1674 Emotion_Audio_Stream *astream;
1676 const char *vis_name;
1678 if (!(vis_name = emotion_visualization_element_name_get(ev->vis)))
1680 WRN("pb vis name %d", ev->vis);
1684 astream = eina_list_data_get(ev->audio_streams);
1686 vis = gst_element_factory_make(vis_name, "vissink");
1687 vstream = emotion_video_stream_new(ev);
1691 DBG("could not create visualization stream");
1693 vstream->length_time = astream->length_time;
1694 vstream->width = 320;
1695 vstream->height = 200;
1696 vstream->fps_num = 25;
1697 vstream->fps_den = 1;
1698 vstream->fourcc = GST_MAKE_FOURCC('A', 'R', 'G', 'B');
1700 g_object_set(G_OBJECT(ev->pipeline), "vis-plugin", vis, NULL);
1701 g_object_get(G_OBJECT(ev->pipeline), "flags", &flags, NULL);
1702 flags |= 0x00000008;
1703 g_object_set(G_OBJECT(ev->pipeline), "flags", flags, NULL);
1708 ev->video_stream_nbr = eina_list_count(ev->video_streams);
1709 ev->audio_stream_nbr = eina_list_count(ev->audio_streams);
1711 if (ev->video_stream_nbr == 1)
1713 Emotion_Video_Stream *vstream;
1715 vstream = eina_list_data_get(ev->video_streams);
1716 ev->ratio = (double)vstream->width / (double)vstream->height;
1717 _emotion_frame_resize(ev->obj, vstream->width, vstream->height, ev->ratio);
1721 /* on recapitule : */
1722 Emotion_Video_Stream *vstream;
1723 Emotion_Audio_Stream *astream;
1725 vstream = eina_list_data_get(ev->video_streams);
1728 DBG("video size=%dx%d, fps=%d/%d, "
1729 "fourcc=%"GST_FOURCC_FORMAT", length=%"GST_TIME_FORMAT,
1730 vstream->width, vstream->height, vstream->fps_num, vstream->fps_den,
1731 GST_FOURCC_ARGS(vstream->fourcc),
1732 GST_TIME_ARGS((guint64)(vstream->length_time * GST_SECOND)));
1735 astream = eina_list_data_get(ev->audio_streams);
1738 DBG("audio channels=%d, rate=%d, length=%"GST_TIME_FORMAT,
1739 astream->channels, astream->samplerate,
1740 GST_TIME_ARGS((guint64)(astream->length_time * GST_SECOND)));
1745 _free_metadata(ev->metadata);
1746 ev->metadata = calloc(1, sizeof(Emotion_Gstreamer_Metadata));
1748 em_audio_channel_volume_set(ev, ev->volume);
1749 em_audio_channel_mute_set(ev, ev->audio_mute);
1751 if (ev->play_started)
1753 _emotion_playback_started(ev->obj);
1754 ev->play_started = 0;
1757 _emotion_open_done(ev->obj);
1758 ev->pipeline_parsed = EINA_TRUE;