18 #include <glib-object.h>
19 #include <gst/video/gstvideosink.h>
20 #include <gst/video/video.h>
24 # include <Ecore_Evas.h>
25 # ifdef HAVE_XOVERLAY_H
26 # include <gst/interfaces/xoverlay.h>
31 #include "emotion_private.h"
32 #include "emotion_gstreamer.h"
34 Eina_Bool window_manager_video = EINA_FALSE;
35 int _emotion_gstreamer_log_domain = -1;
36 Eina_Bool debug_fps = EINA_FALSE;
37 Eina_Bool _ecore_x_available = EINA_FALSE;
39 /* Callbacks to get the eos */
40 static void _for_each_tag (GstTagList const* list, gchar const* tag, void *data);
41 static void _free_metadata (Emotion_Gstreamer_Metadata *m);
45 static unsigned char em_init (Evas_Object *obj,
47 Emotion_Module_Options *opt);
49 static unsigned char em_file_open (const char *file,
53 static void em_file_close (void *video);
55 static void em_play (void *video,
58 static void em_stop (void *video);
60 static void em_size_get (void *video,
64 static void em_pos_set (void *video,
68 static double em_len_get (void *video);
70 static double em_buffer_size_get (void *video);
72 static int em_fps_num_get (void *video);
74 static int em_fps_den_get (void *video);
76 static double em_fps_get (void *video);
78 static double em_pos_get (void *video);
80 static void em_vis_set (void *video,
83 static Emotion_Vis em_vis_get (void *video);
85 static Eina_Bool em_vis_supported (void *video,
88 static double em_ratio_get (void *video);
90 static int em_video_handled (void *video);
92 static int em_audio_handled (void *video);
94 static int em_seekable (void *video);
96 static void em_frame_done (void *video);
98 static Emotion_Format em_format_get (void *video);
100 static void em_video_data_size_get (void *video,
104 static int em_yuv_rows_get (void *video,
107 unsigned char **yrows,
108 unsigned char **urows,
109 unsigned char **vrows);
111 static int em_bgra_data_get (void *video,
112 unsigned char **bgra_data);
114 static void em_event_feed (void *video,
117 static void em_event_mouse_button_feed (void *video,
122 static void em_event_mouse_move_feed (void *video,
126 static int em_video_channel_count (void *video);
128 static void em_video_channel_set (void *video,
131 static int em_video_channel_get (void *video);
133 static const char *em_video_channel_name_get (void *video,
136 static void em_video_channel_mute_set (void *video,
139 static int em_video_channel_mute_get (void *video);
141 static int em_audio_channel_count (void *video);
143 static void em_audio_channel_set (void *video,
146 static int em_audio_channel_get (void *video);
148 static const char *em_audio_channel_name_get (void *video,
151 static void em_audio_channel_mute_set (void *video,
154 static int em_audio_channel_mute_get (void *video);
156 static void em_audio_channel_volume_set (void *video,
159 static double em_audio_channel_volume_get (void *video);
161 static int em_spu_channel_count (void *video);
163 static void em_spu_channel_set (void *video,
166 static int em_spu_channel_get (void *video);
168 static const char *em_spu_channel_name_get (void *video,
171 static void em_spu_channel_mute_set (void *video,
174 static int em_spu_channel_mute_get (void *video);
176 static int em_chapter_count (void *video);
178 static void em_chapter_set (void *video,
181 static int em_chapter_get (void *video);
183 static const char *em_chapter_name_get (void *video,
186 static void em_speed_set (void *video,
189 static double em_speed_get (void *video);
191 static int em_eject (void *video);
193 static const char *em_meta_get (void *video,
196 static void em_priority_set (void *video,
198 static Eina_Bool em_priority_get (void *video);
200 static GstBusSyncReply _eos_sync_fct(GstBus *bus,
204 /* Module interface */
206 static Emotion_Video_Module em_module =
209 em_shutdown, /* shutdown */
210 em_file_open, /* file_open */
211 em_file_close, /* file_close */
214 em_size_get, /* size_get */
215 em_pos_set, /* pos_set */
216 em_len_get, /* len_get */
217 em_buffer_size_get, /* buffer_size_get */
218 em_fps_num_get, /* fps_num_get */
219 em_fps_den_get, /* fps_den_get */
220 em_fps_get, /* fps_get */
221 em_pos_get, /* pos_get */
222 em_vis_set, /* vis_set */
223 em_vis_get, /* vis_get */
224 em_vis_supported, /* vis_supported */
225 em_ratio_get, /* ratio_get */
226 em_video_handled, /* video_handled */
227 em_audio_handled, /* audio_handled */
228 em_seekable, /* seekable */
229 em_frame_done, /* frame_done */
230 em_format_get, /* format_get */
231 em_video_data_size_get, /* video_data_size_get */
232 em_yuv_rows_get, /* yuv_rows_get */
233 em_bgra_data_get, /* bgra_data_get */
234 em_event_feed, /* event_feed */
235 em_event_mouse_button_feed, /* event_mouse_button_feed */
236 em_event_mouse_move_feed, /* event_mouse_move_feed */
237 em_video_channel_count, /* video_channel_count */
238 em_video_channel_set, /* video_channel_set */
239 em_video_channel_get, /* video_channel_get */
240 em_video_channel_name_get, /* video_channel_name_get */
241 em_video_channel_mute_set, /* video_channel_mute_set */
242 em_video_channel_mute_get, /* video_channel_mute_get */
243 em_audio_channel_count, /* audio_channel_count */
244 em_audio_channel_set, /* audio_channel_set */
245 em_audio_channel_get, /* audio_channel_get */
246 em_audio_channel_name_get, /* audio_channel_name_get */
247 em_audio_channel_mute_set, /* audio_channel_mute_set */
248 em_audio_channel_mute_get, /* audio_channel_mute_get */
249 em_audio_channel_volume_set, /* audio_channel_volume_set */
250 em_audio_channel_volume_get, /* audio_channel_volume_get */
251 em_spu_channel_count, /* spu_channel_count */
252 em_spu_channel_set, /* spu_channel_set */
253 em_spu_channel_get, /* spu_channel_get */
254 em_spu_channel_name_get, /* spu_channel_name_get */
255 em_spu_channel_mute_set, /* spu_channel_mute_set */
256 em_spu_channel_mute_get, /* spu_channel_mute_get */
257 em_chapter_count, /* chapter_count */
258 em_chapter_set, /* chapter_set */
259 em_chapter_get, /* chapter_get */
260 em_chapter_name_get, /* chapter_name_get */
261 em_speed_set, /* speed_set */
262 em_speed_get, /* speed_get */
263 em_eject, /* eject */
264 em_meta_get, /* meta_get */
265 em_priority_set, /* priority_set */
266 em_priority_get, /* priority_get */
270 static int priority_overide = 0;
272 static Emotion_Video_Stream *
273 emotion_video_stream_new(Emotion_Gstreamer_Video *ev)
275 Emotion_Video_Stream *vstream;
277 if (!ev) return NULL;
279 vstream = (Emotion_Video_Stream *)calloc(1, sizeof(Emotion_Video_Stream));
280 if (!vstream) return NULL;
282 ev->video_streams = eina_list_append(ev->video_streams, vstream);
283 if (eina_error_get())
292 emotion_visualization_element_name_get(Emotion_Vis visualisation)
294 switch (visualisation)
296 case EMOTION_VIS_NONE:
298 case EMOTION_VIS_GOOM:
300 case EMOTION_VIS_LIBVISUAL_BUMPSCOPE:
301 return "libvisual_bumpscope";
302 case EMOTION_VIS_LIBVISUAL_CORONA:
303 return "libvisual_corona";
304 case EMOTION_VIS_LIBVISUAL_DANCING_PARTICLES:
305 return "libvisual_dancingparticles";
306 case EMOTION_VIS_LIBVISUAL_GDKPIXBUF:
307 return "libvisual_gdkpixbuf";
308 case EMOTION_VIS_LIBVISUAL_G_FORCE:
309 return "libvisual_G-Force";
310 case EMOTION_VIS_LIBVISUAL_GOOM:
311 return "libvisual_goom";
312 case EMOTION_VIS_LIBVISUAL_INFINITE:
313 return "libvisual_infinite";
314 case EMOTION_VIS_LIBVISUAL_JAKDAW:
315 return "libvisual_jakdaw";
316 case EMOTION_VIS_LIBVISUAL_JESS:
317 return "libvisual_jess";
318 case EMOTION_VIS_LIBVISUAL_LV_ANALYSER:
319 return "libvisual_lv_analyzer";
320 case EMOTION_VIS_LIBVISUAL_LV_FLOWER:
321 return "libvisual_lv_flower";
322 case EMOTION_VIS_LIBVISUAL_LV_GLTEST:
323 return "libvisual_lv_gltest";
324 case EMOTION_VIS_LIBVISUAL_LV_SCOPE:
325 return "libvisual_lv_scope";
326 case EMOTION_VIS_LIBVISUAL_MADSPIN:
327 return "libvisual_madspin";
328 case EMOTION_VIS_LIBVISUAL_NEBULUS:
329 return "libvisual_nebulus";
330 case EMOTION_VIS_LIBVISUAL_OINKSIE:
331 return "libvisual_oinksie";
332 case EMOTION_VIS_LIBVISUAL_PLASMA:
333 return "libvisual_plazma";
340 em_init(Evas_Object *obj,
341 void **emotion_video,
342 Emotion_Module_Options *opt __UNUSED__)
344 Emotion_Gstreamer_Video *ev;
350 ev = calloc(1, sizeof(Emotion_Gstreamer_Video));
355 /* Initialization of gstreamer */
356 if (!gst_init_check(NULL, NULL, &error))
361 ev->vis = EMOTION_VIS_NONE;
363 ev->play_started = 0;
364 ev->delete_me = EINA_FALSE;
378 em_cleanup(Emotion_Gstreamer_Video *ev)
380 Emotion_Audio_Stream *astream;
381 Emotion_Video_Stream *vstream;
385 emotion_gstreamer_buffer_free(ev->send);
391 gst_object_unref(GST_OBJECT(ev->eos_bus));
397 _free_metadata(ev->metadata);
403 gst_buffer_unref(ev->last_buffer);
404 ev->last_buffer = NULL;
409 evas_object_image_video_surface_set(emotion_object_image_get(ev->obj), NULL);
410 ev->stream = EINA_TRUE;
415 gstreamer_video_sink_new(ev, ev->obj, NULL);
417 g_object_set(G_OBJECT(ev->esink), "ev", NULL, NULL);
418 g_object_set(G_OBJECT(ev->esink), "evas-object", NULL, NULL);
419 gst_element_set_state(ev->pipeline, GST_STATE_NULL);
420 gst_object_unref(ev->pipeline);
425 if (ev->teepad) gst_object_unref(ev->teepad);
427 if (ev->xvpad) gst_object_unref(ev->xvpad);
431 fprintf(stderr, "destroying window: %i\n", ev->win);
432 if (ev->win) ecore_x_window_free(ev->win);
437 EINA_LIST_FREE(ev->audio_streams, astream)
439 EINA_LIST_FREE(ev->video_streams, vstream)
444 em_shutdown(void *video)
446 Emotion_Gstreamer_Video *ev;
448 ev = (Emotion_Gstreamer_Video *)video;
456 EINA_LIST_FREE(ev->threads, t)
457 ecore_thread_cancel(t);
459 ev->delete_me = EINA_TRUE;
463 if (ev->in != ev->out)
465 ev->delete_me = EINA_TRUE;
478 em_file_open(const char *file,
482 Emotion_Gstreamer_Video *ev;
483 Eina_Strbuf *sbuf = NULL;
486 ev = (Emotion_Gstreamer_Video *)video;
488 if (!file) return EINA_FALSE;
489 if (strstr(file, "://") == NULL)
491 sbuf = eina_strbuf_new();
492 eina_strbuf_append(sbuf, "file://");
493 if (strncmp(file, "./", 2) == 0)
495 if (strstr(file, ":/") != NULL)
496 { /* We absolutely need file:///C:/ under Windows, so adding it here */
497 eina_strbuf_append(sbuf, "/");
499 else if (*file != '/')
503 if (getcwd(tmp, PATH_MAX))
505 eina_strbuf_append(sbuf, tmp);
506 eina_strbuf_append(sbuf, "/");
509 eina_strbuf_append(sbuf, file);
512 ev->play_started = 0;
513 ev->pipeline_parsed = 0;
515 uri = sbuf ? eina_strbuf_string_get(sbuf) : file;
516 DBG("setting file to '%s'", uri);
517 ev->pipeline = gstreamer_video_sink_new(ev, obj, uri);
518 if (sbuf) eina_strbuf_free(sbuf);
523 ev->eos_bus = gst_pipeline_get_bus(GST_PIPELINE(ev->pipeline));
526 ERR("could not get the bus");
530 gst_bus_set_sync_handler(ev->eos_bus, _eos_sync_fct, ev);
541 em_file_close(void *video)
543 Emotion_Gstreamer_Video *ev;
545 ev = (Emotion_Gstreamer_Video *)video;
553 EINA_LIST_FREE(ev->threads, t)
554 ecore_thread_cancel(t);
559 ev->pipeline_parsed = EINA_FALSE;
560 ev->play_started = 0;
565 double pos __UNUSED__)
567 Emotion_Gstreamer_Video *ev;
569 ev = (Emotion_Gstreamer_Video *)video;
570 if (!ev->pipeline) return ;
572 if (ev->pipeline_parsed)
573 gst_element_set_state(ev->pipeline, GST_STATE_PLAYING);
575 ev->play_started = 1;
581 Emotion_Gstreamer_Video *ev;
583 ev = (Emotion_Gstreamer_Video *)video;
585 if (!ev->pipeline) return ;
587 if (ev->pipeline_parsed)
588 gst_element_set_state(ev->pipeline, GST_STATE_PAUSED);
593 em_size_get(void *video,
597 Emotion_Gstreamer_Video *ev;
598 Emotion_Video_Stream *vstream;
600 ev = (Emotion_Gstreamer_Video *)video;
602 if (!_emotion_gstreamer_video_pipeline_parse(ev, EINA_FALSE))
605 vstream = eina_list_nth(ev->video_streams, ev->video_stream_nbr - 1);
608 if (width) *width = vstream->width;
609 if (height) *height = vstream->height;
615 if (width) *width = 0;
616 if (height) *height = 0;
620 em_pos_set(void *video,
623 Emotion_Gstreamer_Video *ev;
625 ev = (Emotion_Gstreamer_Video *)video;
627 if (!ev->pipeline) return ;
630 gst_element_set_state(ev->pipeline, GST_STATE_PAUSED);
632 gst_element_seek(ev->pipeline, 1.0,
634 GST_SEEK_FLAG_ACCURATE | GST_SEEK_FLAG_FLUSH,
636 (gint64)(pos * (double)GST_SECOND),
637 GST_SEEK_TYPE_NONE, -1);
640 gst_element_set_state(ev->pipeline, GST_STATE_PLAYING);
644 em_len_get(void *video)
646 Emotion_Gstreamer_Video *ev;
647 Emotion_Video_Stream *vstream;
648 Emotion_Audio_Stream *astream;
655 fmt = GST_FORMAT_TIME;
657 if (!ev->pipeline) return 0.0;
659 ret = gst_element_query_duration(ev->pipeline, &fmt, &val);
663 if (fmt != GST_FORMAT_TIME)
665 DBG("requrested duration in time, but got %s instead.",
666 gst_format_get_name(fmt));
673 return val / 1000000000.0;
676 if (!_emotion_gstreamer_video_pipeline_parse(ev, EINA_FALSE))
679 EINA_LIST_FOREACH(ev->audio_streams, l, astream)
680 if (astream->length_time >= 0)
681 return astream->length_time;
683 EINA_LIST_FOREACH(ev->video_streams, l, vstream)
684 if (vstream->length_time >= 0)
685 return vstream->length_time;
691 em_buffer_size_get(void *video)
693 Emotion_Gstreamer_Video *ev;
701 if (!ev->pipeline) return 0.0;
703 query = gst_query_new_buffering(GST_FORMAT_DEFAULT);
704 if (gst_element_query(ev->pipeline, query))
705 gst_query_parse_buffering_percent(query, &busy, &percent);
709 gst_query_unref(query);
710 return ((float)(percent)) / 100.0;
714 em_fps_num_get(void *video)
716 Emotion_Gstreamer_Video *ev;
717 Emotion_Video_Stream *vstream;
719 ev = (Emotion_Gstreamer_Video *)video;
721 if (!_emotion_gstreamer_video_pipeline_parse(ev, EINA_FALSE))
724 vstream = eina_list_nth(ev->video_streams, ev->video_stream_nbr - 1);
726 return vstream->fps_num;
732 em_fps_den_get(void *video)
734 Emotion_Gstreamer_Video *ev;
735 Emotion_Video_Stream *vstream;
737 ev = (Emotion_Gstreamer_Video *)video;
739 if (!_emotion_gstreamer_video_pipeline_parse(ev, EINA_FALSE))
742 vstream = eina_list_nth(ev->video_streams, ev->video_stream_nbr - 1);
744 return vstream->fps_den;
750 em_fps_get(void *video)
752 Emotion_Gstreamer_Video *ev;
753 Emotion_Video_Stream *vstream;
755 ev = (Emotion_Gstreamer_Video *)video;
757 if (!_emotion_gstreamer_video_pipeline_parse(ev, EINA_FALSE))
760 vstream = eina_list_nth(ev->video_streams, ev->video_stream_nbr - 1);
762 return (double)vstream->fps_num / (double)vstream->fps_den;
768 em_pos_get(void *video)
770 Emotion_Gstreamer_Video *ev;
776 fmt = GST_FORMAT_TIME;
778 if (!ev->pipeline) return 0.0;
780 ret = gst_element_query_position(ev->pipeline, &fmt, &val);
784 if (fmt != GST_FORMAT_TIME)
786 ERR("requrested position in time, but got %s instead.",
787 gst_format_get_name(fmt));
791 ev->position = val / 1000000000.0;
796 em_vis_set(void *video,
799 Emotion_Gstreamer_Video *ev;
801 ev = (Emotion_Gstreamer_Video *)video;
807 em_vis_get(void *video)
809 Emotion_Gstreamer_Video *ev;
811 ev = (Emotion_Gstreamer_Video *)video;
817 em_vis_supported(void *ef __UNUSED__, Emotion_Vis vis)
820 GstElementFactory *factory;
822 if (vis == EMOTION_VIS_NONE)
825 name = emotion_visualization_element_name_get(vis);
829 factory = gst_element_factory_find(name);
833 gst_object_unref(factory);
838 em_ratio_get(void *video)
840 Emotion_Gstreamer_Video *ev;
842 ev = (Emotion_Gstreamer_Video *)video;
848 em_video_handled(void *video)
850 Emotion_Gstreamer_Video *ev;
852 ev = (Emotion_Gstreamer_Video *)video;
854 _emotion_gstreamer_video_pipeline_parse(ev, EINA_FALSE);
856 if (!eina_list_count(ev->video_streams))
863 em_audio_handled(void *video)
865 Emotion_Gstreamer_Video *ev;
867 ev = (Emotion_Gstreamer_Video *)video;
869 _emotion_gstreamer_video_pipeline_parse(ev, EINA_FALSE);
871 if (!eina_list_count(ev->audio_streams))
878 em_seekable(void *video __UNUSED__)
884 em_frame_done(void *video __UNUSED__)
888 static Emotion_Format
889 em_format_get(void *video)
891 Emotion_Gstreamer_Video *ev;
892 Emotion_Video_Stream *vstream;
894 ev = (Emotion_Gstreamer_Video *)video;
896 if (!_emotion_gstreamer_video_pipeline_parse(ev, EINA_FALSE))
897 return EMOTION_FORMAT_NONE;
899 vstream = eina_list_nth(ev->video_streams, ev->video_stream_nbr - 1);
902 switch (vstream->fourcc)
904 case GST_MAKE_FOURCC('I', '4', '2', '0'):
905 return EMOTION_FORMAT_I420;
906 case GST_MAKE_FOURCC('Y', 'V', '1', '2'):
907 return EMOTION_FORMAT_YV12;
908 case GST_MAKE_FOURCC('Y', 'U', 'Y', '2'):
909 return EMOTION_FORMAT_YUY2;
910 case GST_MAKE_FOURCC('A', 'R', 'G', 'B'):
911 return EMOTION_FORMAT_BGRA;
913 return EMOTION_FORMAT_NONE;
916 return EMOTION_FORMAT_NONE;
920 em_video_data_size_get(void *video, int *w, int *h)
922 Emotion_Gstreamer_Video *ev;
923 Emotion_Video_Stream *vstream;
925 ev = (Emotion_Gstreamer_Video *)video;
927 if (ev->pipeline && (!ev->video_stream_nbr || !ev->video_streams))
928 if (!_emotion_gstreamer_video_pipeline_parse(ev, EINA_FALSE))
931 vstream = eina_list_nth(ev->video_streams, ev->video_stream_nbr - 1);
935 *h = vstream->height;
946 em_yuv_rows_get(void *video __UNUSED__,
949 unsigned char **yrows __UNUSED__,
950 unsigned char **urows __UNUSED__,
951 unsigned char **vrows __UNUSED__)
957 em_bgra_data_get(void *video __UNUSED__, unsigned char **bgra_data __UNUSED__)
963 em_event_feed(void *video __UNUSED__, int event __UNUSED__)
968 em_event_mouse_button_feed(void *video __UNUSED__, int button __UNUSED__, int x __UNUSED__, int y __UNUSED__)
973 em_event_mouse_move_feed(void *video __UNUSED__, int x __UNUSED__, int y __UNUSED__)
979 em_video_channel_count(void *video)
981 Emotion_Gstreamer_Video *ev;
983 ev = (Emotion_Gstreamer_Video *)video;
985 _emotion_gstreamer_video_pipeline_parse(ev, EINA_FALSE);
987 return eina_list_count(ev->video_streams);
991 em_video_channel_set(void *video __UNUSED__,
992 int channel __UNUSED__)
995 Emotion_Gstreamer_Video *ev;
997 ev = (Emotion_Gstreamer_Video *)video;
999 if (channel < 0) channel = 0;
1001 /* FIXME: a faire... */
1005 em_video_channel_get(void *video)
1007 Emotion_Gstreamer_Video *ev;
1009 ev = (Emotion_Gstreamer_Video *)video;
1011 _emotion_gstreamer_video_pipeline_parse(ev, EINA_FALSE);
1013 return ev->video_stream_nbr;
1017 em_video_channel_name_get(void *video __UNUSED__,
1018 int channel __UNUSED__)
1024 em_video_channel_mute_set(void *video,
1027 Emotion_Gstreamer_Video *ev;
1029 ev = (Emotion_Gstreamer_Video *)video;
1031 ev->video_mute = mute;
1035 em_video_channel_mute_get(void *video)
1037 Emotion_Gstreamer_Video *ev;
1039 ev = (Emotion_Gstreamer_Video *)video;
1041 return ev->video_mute;
1044 /* Audio channels */
1047 em_audio_channel_count(void *video)
1049 Emotion_Gstreamer_Video *ev;
1051 ev = (Emotion_Gstreamer_Video *)video;
1053 _emotion_gstreamer_video_pipeline_parse(ev, EINA_FALSE);
1055 return eina_list_count(ev->audio_streams);
1059 em_audio_channel_set(void *video __UNUSED__,
1060 int channel __UNUSED__)
1063 Emotion_Gstreamer_Video *ev;
1065 ev = (Emotion_Gstreamer_Video *)video;
1067 if (channel < -1) channel = -1;
1069 /* FIXME: a faire... */
1073 em_audio_channel_get(void *video)
1075 Emotion_Gstreamer_Video *ev;
1077 ev = (Emotion_Gstreamer_Video *)video;
1079 _emotion_gstreamer_video_pipeline_parse(ev, EINA_FALSE);
1081 return ev->audio_stream_nbr;
1085 em_audio_channel_name_get(void *video __UNUSED__,
1086 int channel __UNUSED__)
1091 #define GST_PLAY_FLAG_AUDIO (1 << 1)
1094 em_audio_channel_mute_set(void *video,
1097 /* NOTE: at first I wanted to completly shutdown the audio path on mute,
1098 but that's not possible as the audio sink could be the clock source
1099 for the pipeline (at least that's the case on some of the hardware
1100 I have been tested emotion on.
1102 Emotion_Gstreamer_Video *ev;
1104 ev = (Emotion_Gstreamer_Video *)video;
1106 if (!ev->pipeline) return ;
1108 ev->audio_mute = mute;
1110 g_object_set(G_OBJECT(ev->pipeline), "mute", !!mute, NULL);
1114 em_audio_channel_mute_get(void *video)
1116 Emotion_Gstreamer_Video *ev;
1118 ev = (Emotion_Gstreamer_Video *)video;
1120 return ev->audio_mute;
1124 em_audio_channel_volume_set(void *video,
1127 Emotion_Gstreamer_Video *ev;
1129 ev = (Emotion_Gstreamer_Video *)video;
1131 if (!ev->pipeline) return ;
1138 g_object_set(G_OBJECT(ev->pipeline), "volume", vol, NULL);
1142 em_audio_channel_volume_get(void *video)
1144 Emotion_Gstreamer_Video *ev;
1146 ev = (Emotion_Gstreamer_Video *)video;
1154 em_spu_channel_count(void *video __UNUSED__)
1160 em_spu_channel_set(void *video __UNUSED__, int channel __UNUSED__)
1165 em_spu_channel_get(void *video __UNUSED__)
1171 em_spu_channel_name_get(void *video __UNUSED__, int channel __UNUSED__)
1177 em_spu_channel_mute_set(void *video __UNUSED__, int mute __UNUSED__)
1182 em_spu_channel_mute_get(void *video __UNUSED__)
1188 em_chapter_count(void *video __UNUSED__)
1194 em_chapter_set(void *video __UNUSED__, int chapter __UNUSED__)
1199 em_chapter_get(void *video __UNUSED__)
1205 em_chapter_name_get(void *video __UNUSED__, int chapter __UNUSED__)
1211 em_speed_set(void *video __UNUSED__, double speed __UNUSED__)
1216 em_speed_get(void *video __UNUSED__)
1222 em_eject(void *video __UNUSED__)
1228 em_meta_get(void *video, int meta)
1230 Emotion_Gstreamer_Video *ev;
1231 const char *str = NULL;
1233 ev = (Emotion_Gstreamer_Video *)video;
1235 if (!ev || !ev->metadata) return NULL;
1238 case META_TRACK_TITLE:
1239 str = ev->metadata->title;
1241 case META_TRACK_ARTIST:
1242 str = ev->metadata->artist;
1244 case META_TRACK_ALBUM:
1245 str = ev->metadata->album;
1247 case META_TRACK_YEAR:
1248 str = ev->metadata->year;
1250 case META_TRACK_GENRE:
1251 str = ev->metadata->genre;
1253 case META_TRACK_COMMENT:
1254 str = ev->metadata->comment;
1256 case META_TRACK_DISCID:
1257 str = ev->metadata->disc_id;
1267 em_priority_set(void *video, Eina_Bool pri)
1269 Emotion_Gstreamer_Video *ev;
1272 if (priority_overide > 3) return ; /* If we failed to much to create that pipeline, let's don't wast our time anymore */
1277 em_priority_get(void *video)
1279 Emotion_Gstreamer_Video *ev;
1287 _ecore_event_x_destroy(void *data __UNUSED__, int type __UNUSED__, void *event __UNUSED__)
1289 Ecore_X_Event_Window_Destroy *ev = event;
1291 fprintf(stderr, "killed window: %x (%x)\n", ev->win, ev->event_win);
1298 module_open(Evas_Object *obj,
1299 const Emotion_Video_Module **module,
1301 Emotion_Module_Options *opt)
1304 Ecore_X_Window *roots;
1311 if (_emotion_gstreamer_log_domain < 0)
1313 eina_threads_init();
1314 eina_log_threads_enable();
1315 _emotion_gstreamer_log_domain = eina_log_domain_register
1316 ("emotion-gstreamer", EINA_COLOR_LIGHTCYAN);
1317 if (_emotion_gstreamer_log_domain < 0)
1319 EINA_LOG_CRIT("Could not register log domain 'emotion-gstreamer'");
1324 if (!em_module.init(obj, video, opt))
1328 ecore_event_handler_add(ECORE_X_EVENT_WINDOW_DESTROY, _ecore_event_x_destroy, NULL);
1331 if (getenv("EMOTION_FPS_DEBUG")) debug_fps = EINA_TRUE;
1333 eina_threads_init();
1336 if (ecore_x_init(NULL) > 0)
1338 _ecore_x_available = EINA_TRUE;
1341 /* Check if the window manager is able to handle our special Xv window. */
1342 roots = _ecore_x_available ? ecore_x_window_root_list(&num) : NULL;
1343 if (roots && num > 0)
1345 Ecore_X_Window win, twin;
1348 nwins = ecore_x_window_prop_window_get(roots[0],
1349 ECORE_X_ATOM_NET_SUPPORTING_WM_CHECK,
1353 nwins = ecore_x_window_prop_window_get(win,
1354 ECORE_X_ATOM_NET_SUPPORTING_WM_CHECK,
1356 if (nwins > 0 && twin == win)
1358 Ecore_X_Atom *supported;
1362 if (ecore_x_netwm_supported_get(roots[0], &supported, &supported_num))
1364 Eina_Bool parent = EINA_FALSE;
1365 Eina_Bool video_position = EINA_FALSE;
1367 for (i = 0; i < supported_num; ++i)
1369 if (supported[i] == ECORE_X_ATOM_E_VIDEO_PARENT)
1371 else if (supported[i] == ECORE_X_ATOM_E_VIDEO_POSITION)
1372 video_position = EINA_TRUE;
1373 if (parent && video_position)
1377 if (parent && video_position)
1379 window_manager_video = EINA_TRUE;
1388 *module = &em_module;
1393 module_close(Emotion_Video_Module *module __UNUSED__,
1396 em_module.shutdown(video);
1399 if (_ecore_x_available)
1405 eina_threads_shutdown();
1409 gstreamer_module_init(void)
1413 if (!gst_init_check(0, NULL, &error))
1415 EINA_LOG_CRIT("Could not init GStreamer");
1419 if (gst_plugin_register_static(GST_VERSION_MAJOR, GST_VERSION_MINOR,
1421 "video sink plugin for Emotion",
1422 gstreamer_plugin_init,
1427 "http://www.enlightenment.org/") == FALSE)
1429 EINA_LOG_CRIT("Could not load static gstreamer video sink for Emotion.");
1433 return _emotion_module_register("gstreamer", module_open, module_close);
1437 gstreamer_module_shutdown(void)
1439 _emotion_module_unregister("gstreamer");
1444 #ifndef EMOTION_STATIC_BUILD_GSTREAMER
1446 EINA_MODULE_INIT(gstreamer_module_init);
1447 EINA_MODULE_SHUTDOWN(gstreamer_module_shutdown);
1452 _for_each_tag(GstTagList const* list,
1456 Emotion_Gstreamer_Video *ev;
1461 ev = (Emotion_Gstreamer_Video*)data;
1463 if (!ev || !ev->metadata) return;
1465 count = gst_tag_list_get_tag_size(list, tag);
1467 for (i = 0; i < count; i++)
1469 if (!strcmp(tag, GST_TAG_TITLE))
1472 g_free(ev->metadata->title);
1473 if (gst_tag_list_get_string(list, GST_TAG_TITLE, &str))
1474 ev->metadata->title = str;
1476 ev->metadata->title = NULL;
1479 if (!strcmp(tag, GST_TAG_ALBUM))
1482 g_free(ev->metadata->album);
1483 if (gst_tag_list_get_string(list, GST_TAG_ALBUM, &str))
1484 ev->metadata->album = str;
1486 ev->metadata->album = NULL;
1489 if (!strcmp(tag, GST_TAG_ARTIST))
1492 g_free(ev->metadata->artist);
1493 if (gst_tag_list_get_string(list, GST_TAG_ARTIST, &str))
1494 ev->metadata->artist = str;
1496 ev->metadata->artist = NULL;
1499 if (!strcmp(tag, GST_TAG_GENRE))
1502 g_free(ev->metadata->genre);
1503 if (gst_tag_list_get_string(list, GST_TAG_GENRE, &str))
1504 ev->metadata->genre = str;
1506 ev->metadata->genre = NULL;
1509 if (!strcmp(tag, GST_TAG_COMMENT))
1512 g_free(ev->metadata->comment);
1513 if (gst_tag_list_get_string(list, GST_TAG_COMMENT, &str))
1514 ev->metadata->comment = str;
1516 ev->metadata->comment = NULL;
1519 if (!strcmp(tag, GST_TAG_DATE))
1523 g_free(ev->metadata->year);
1524 date = gst_tag_list_get_value_index(list, GST_TAG_DATE, 0);
1526 str = g_strdup_value_contents(date);
1529 ev->metadata->year = str;
1533 if (!strcmp(tag, GST_TAG_TRACK_NUMBER))
1536 const GValue *track;
1537 g_free(ev->metadata->count);
1538 track = gst_tag_list_get_value_index(list, GST_TAG_TRACK_NUMBER, 0);
1540 str = g_strdup_value_contents(track);
1543 ev->metadata->count = str;
1547 #ifdef GST_TAG_CDDA_CDDB_DISCID
1548 if (!strcmp(tag, GST_TAG_CDDA_CDDB_DISCID))
1551 const GValue *discid;
1552 g_free(ev->metadata->disc_id);
1553 discid = gst_tag_list_get_value_index(list, GST_TAG_CDDA_CDDB_DISCID, 0);
1555 str = g_strdup_value_contents(discid);
1558 ev->metadata->disc_id = str;
1567 _free_metadata(Emotion_Gstreamer_Metadata *m)
1584 _em_restart_stream(void *data)
1586 Emotion_Gstreamer_Video *ev;
1590 ev->pipeline = gstreamer_video_sink_new(ev, ev->obj, ev->uri);
1594 ev->eos_bus = gst_pipeline_get_bus(GST_PIPELINE(ev->pipeline));
1597 ERR("could not get the bus");
1601 gst_bus_set_sync_handler(ev->eos_bus, _eos_sync_fct, ev);
1604 return ECORE_CALLBACK_CANCEL;
1608 _eos_main_fct(void *data)
1610 Emotion_Gstreamer_Message *send;
1611 Emotion_Gstreamer_Video *ev;
1618 if (ev->play_started && !ev->delete_me)
1620 _emotion_playback_started(ev->obj);
1621 ev->play_started = 0;
1624 switch (GST_MESSAGE_TYPE(msg))
1626 case GST_MESSAGE_EOS:
1630 _emotion_decode_stop(ev->obj);
1631 _emotion_playback_finished(ev->obj);
1634 case GST_MESSAGE_TAG:
1637 GstTagList *new_tags;
1638 gst_message_parse_tag(msg, &new_tags);
1641 gst_tag_list_foreach(new_tags,
1642 (GstTagForeachFunc)_for_each_tag,
1644 gst_tag_list_free(new_tags);
1648 case GST_MESSAGE_ASYNC_DONE:
1649 if (!ev->delete_me) _emotion_seek_done(ev->obj);
1651 case GST_MESSAGE_STREAM_STATUS:
1653 case GST_MESSAGE_ERROR:
1658 ERR("Switching back to canvas rendering.");
1659 ev->priority = EINA_FALSE;
1662 ecore_idler_add(_em_restart_stream, ev);
1666 ERR("bus say: %s [%i - %s]",
1667 GST_MESSAGE_SRC_NAME(msg),
1668 GST_MESSAGE_TYPE(msg),
1669 GST_MESSAGE_TYPE_NAME(msg));
1673 emotion_gstreamer_message_free(send);
1676 static GstBusSyncReply
1677 _eos_sync_fct(GstBus *bus __UNUSED__, GstMessage *msg, gpointer data)
1679 Emotion_Gstreamer_Video *ev = data;
1680 Emotion_Gstreamer_Message *send;
1682 switch (GST_MESSAGE_TYPE(msg))
1684 case GST_MESSAGE_EOS:
1685 case GST_MESSAGE_TAG:
1686 case GST_MESSAGE_ASYNC_DONE:
1687 case GST_MESSAGE_STREAM_STATUS:
1688 INF("bus say: %s [%i - %s]",
1689 GST_MESSAGE_SRC_NAME(msg),
1690 GST_MESSAGE_TYPE(msg),
1691 GST_MESSAGE_TYPE_NAME(msg));
1692 send = emotion_gstreamer_message_alloc(ev, msg);
1694 if (send) ecore_main_loop_thread_safe_call_async(_eos_main_fct, send);
1698 case GST_MESSAGE_STATE_CHANGED:
1700 GstState old_state, new_state;
1702 gst_message_parse_state_changed(msg, &old_state, &new_state, NULL);
1703 INF("Element %s changed state from %s to %s.",
1704 GST_OBJECT_NAME(msg->src),
1705 gst_element_state_get_name(old_state),
1706 gst_element_state_get_name(new_state));
1709 case GST_MESSAGE_ERROR:
1714 gst_message_parse_error(msg, &error, &debug);
1715 ERR("ERROR from element %s: %s", GST_OBJECT_NAME(msg->src), error->message);
1716 ERR("Debugging info: %s", (debug) ? debug : "none");
1717 g_error_free(error);
1720 if (strncmp(GST_OBJECT_NAME(msg->src), "xvimagesink", 11) == 0)
1722 send = emotion_gstreamer_message_alloc(ev, msg);
1724 if (send) ecore_main_loop_thread_safe_call_async(_eos_main_fct, send);
1728 case GST_MESSAGE_WARNING:
1733 gst_message_parse_warning(msg, &error, &debug);
1734 WRN("WARNING from element %s: %s", GST_OBJECT_NAME(msg->src), error->message);
1735 WRN("Debugging info: %s", (debug) ? debug : "none");
1736 g_error_free(error);
1741 WRN("bus say: %s [%i - %s]",
1742 GST_MESSAGE_SRC_NAME(msg),
1743 GST_MESSAGE_TYPE(msg),
1744 GST_MESSAGE_TYPE_NAME(msg));
1748 gst_message_unref(msg);
1750 return GST_BUS_DROP;
1754 _emotion_gstreamer_video_pipeline_parse(Emotion_Gstreamer_Video *ev,
1760 if (ev->pipeline_parsed)
1763 if (force && ev->threads)
1767 EINA_LIST_FREE(ev->threads, t)
1768 ecore_thread_cancel(t);
1774 res = gst_element_get_state(ev->pipeline, NULL, NULL, GST_CLOCK_TIME_NONE);
1775 if (res == GST_STATE_CHANGE_NO_PREROLL)
1777 gst_element_set_state(ev->pipeline, GST_STATE_PLAYING);
1779 res = gst_element_get_state(ev->pipeline, NULL, NULL, GST_CLOCK_TIME_NONE);
1782 /** NOTE: you need to set: GST_DEBUG_DUMP_DOT_DIR=/tmp EMOTION_ENGINE=gstreamer to save the $EMOTION_GSTREAMER_DOT file in '/tmp' */
1783 /** then call dot -Tpng -oemotion_pipeline.png /tmp/$TIMESTAMP-$EMOTION_GSTREAMER_DOT.dot */
1784 if (getenv("EMOTION_GSTREAMER_DOT"))
1785 GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS(GST_BIN(ev->pipeline),
1786 GST_DEBUG_GRAPH_SHOW_ALL,
1787 getenv("EMOTION_GSTREAMER_DOT"));
1789 if (!(res == GST_STATE_CHANGE_SUCCESS
1790 || res == GST_STATE_CHANGE_NO_PREROLL))
1792 ERR("Unable to get GST_CLOCK_TIME_NONE.");
1796 g_object_get(G_OBJECT(ev->pipeline),
1797 "n-audio", &ev->audio_stream_nbr,
1798 "n-video", &ev->video_stream_nbr,
1801 if ((ev->video_stream_nbr == 0) && (ev->audio_stream_nbr == 0))
1803 ERR("No audio nor video stream found");
1808 for (i = 0; i < ev->video_stream_nbr; i++)
1810 Emotion_Video_Stream *vstream;
1813 GstStructure *structure;
1818 gdouble length_time = 0.0;
1825 g_signal_emit_by_name(ev->pipeline, "get-video-pad", i, &pad);
1829 caps = gst_pad_get_negotiated_caps(pad);
1832 structure = gst_caps_get_structure(caps, 0);
1833 str = gst_caps_to_string(caps);
1835 if (!gst_structure_get_int(structure, "width", &width))
1837 if (!gst_structure_get_int(structure, "height", &height))
1839 if (!gst_structure_get_fraction(structure, "framerate", &fps_num, &fps_den))
1842 if (g_str_has_prefix(str, "video/x-raw-yuv"))
1844 val = gst_structure_get_value(structure, "format");
1845 fourcc = gst_value_get_fourcc(val);
1847 else if (g_str_has_prefix(str, "video/x-raw-rgb"))
1848 fourcc = GST_MAKE_FOURCC('A', 'R', 'G', 'B');
1852 query = gst_query_new_duration(GST_FORMAT_TIME);
1853 if (gst_pad_peer_query(pad, query))
1857 gst_query_parse_duration(query, NULL, &t);
1858 length_time = (double)t / (double)GST_SECOND;
1863 vstream = emotion_video_stream_new(ev);
1864 if (!vstream) goto unref_query_v;
1866 vstream->length_time = length_time;
1867 vstream->width = width;
1868 vstream->height = height;
1869 vstream->fps_num = fps_num;
1870 vstream->fps_den = fps_den;
1871 vstream->fourcc = fourcc;
1875 gst_query_unref(query);
1877 gst_caps_unref(caps);
1879 gst_object_unref(pad);
1883 for (i = 0; i < ev->audio_stream_nbr; i++)
1885 Emotion_Audio_Stream *astream;
1888 GstStructure *structure;
1891 gdouble length_time = 0.0;
1895 g_signal_emit_by_name(ev->pipeline, "get-audio-pad", i, &pad);
1899 caps = gst_pad_get_negotiated_caps(pad);
1902 structure = gst_caps_get_structure(caps, 0);
1904 if (!gst_structure_get_int(structure, "channels", &channels))
1906 if (!gst_structure_get_int(structure, "rate", &samplerate))
1909 query = gst_query_new_duration(GST_FORMAT_TIME);
1910 if (gst_pad_peer_query(pad, query))
1914 gst_query_parse_duration(query, NULL, &t);
1915 length_time = (double)t / (double)GST_SECOND;
1920 astream = calloc(1, sizeof(Emotion_Audio_Stream));
1921 if (!astream) continue;
1922 ev->audio_streams = eina_list_append(ev->audio_streams, astream);
1923 if (eina_error_get())
1929 astream->length_time = length_time;
1930 astream->channels = channels;
1931 astream->samplerate = samplerate;
1934 gst_query_unref(query);
1936 gst_caps_unref(caps);
1938 gst_object_unref(pad);
1941 /* Visualization sink */
1942 if (ev->video_stream_nbr == 0)
1944 GstElement *vis = NULL;
1945 Emotion_Video_Stream *vstream;
1946 Emotion_Audio_Stream *astream;
1948 const char *vis_name;
1950 if (!(vis_name = emotion_visualization_element_name_get(ev->vis)))
1952 WRN("pb vis name %d", ev->vis);
1956 astream = eina_list_data_get(ev->audio_streams);
1958 vis = gst_element_factory_make(vis_name, "vissink");
1959 vstream = emotion_video_stream_new(ev);
1963 DBG("could not create visualization stream");
1965 vstream->length_time = astream->length_time;
1966 vstream->width = 320;
1967 vstream->height = 200;
1968 vstream->fps_num = 25;
1969 vstream->fps_den = 1;
1970 vstream->fourcc = GST_MAKE_FOURCC('A', 'R', 'G', 'B');
1972 g_object_set(G_OBJECT(ev->pipeline), "vis-plugin", vis, NULL);
1973 g_object_get(G_OBJECT(ev->pipeline), "flags", &flags, NULL);
1974 flags |= 0x00000008;
1975 g_object_set(G_OBJECT(ev->pipeline), "flags", flags, NULL);
1980 ev->video_stream_nbr = eina_list_count(ev->video_streams);
1981 ev->audio_stream_nbr = eina_list_count(ev->audio_streams);
1983 if (ev->video_stream_nbr == 1)
1985 Emotion_Video_Stream *vstream;
1987 vstream = eina_list_data_get(ev->video_streams);
1988 ev->ratio = (double)vstream->width / (double)vstream->height;
1989 _emotion_frame_resize(ev->obj, vstream->width, vstream->height, ev->ratio);
1993 /* on recapitule : */
1994 Emotion_Video_Stream *vstream;
1995 Emotion_Audio_Stream *astream;
1997 vstream = eina_list_data_get(ev->video_streams);
2000 DBG("video size=%dx%d, fps=%d/%d, "
2001 "fourcc=%"GST_FOURCC_FORMAT", length=%"GST_TIME_FORMAT,
2002 vstream->width, vstream->height, vstream->fps_num, vstream->fps_den,
2003 GST_FOURCC_ARGS(vstream->fourcc),
2004 GST_TIME_ARGS((guint64)(vstream->length_time * GST_SECOND)));
2007 astream = eina_list_data_get(ev->audio_streams);
2010 DBG("audio channels=%d, rate=%d, length=%"GST_TIME_FORMAT,
2011 astream->channels, astream->samplerate,
2012 GST_TIME_ARGS((guint64)(astream->length_time * GST_SECOND)));
2017 _free_metadata(ev->metadata);
2018 ev->metadata = calloc(1, sizeof(Emotion_Gstreamer_Metadata));
2020 ev->pipeline_parsed = EINA_TRUE;
2022 em_audio_channel_volume_set(ev, ev->volume);
2023 em_audio_channel_mute_set(ev, ev->audio_mute);
2025 if (ev->play_started)
2027 _emotion_playback_started(ev->obj);
2028 ev->play_started = 0;
2031 _emotion_open_done(ev->obj);