18 #include <glib-object.h>
19 #include <gst/video/gstvideosink.h>
20 #include <gst/video/video.h>
24 # include <Ecore_Evas.h>
25 # ifdef HAVE_XOVERLAY_H
26 # include <gst/interfaces/xoverlay.h>
31 #include "emotion_private.h"
32 #include "emotion_gstreamer.h"
34 Eina_Bool window_manager_video = EINA_FALSE;
35 int _emotion_gstreamer_log_domain = -1;
36 Eina_Bool debug_fps = EINA_FALSE;
37 Eina_Bool _ecore_x_available = EINA_FALSE;
39 /* Callbacks to get the eos */
40 static void _for_each_tag (GstTagList const* list, gchar const* tag, void *data);
41 static void _free_metadata (Emotion_Gstreamer_Metadata *m);
45 static unsigned char em_init (Evas_Object *obj,
47 Emotion_Module_Options *opt);
49 static unsigned char em_file_open (const char *file,
53 static void em_file_close (void *video);
55 static void em_play (void *video,
58 static void em_stop (void *video);
60 static void em_size_get (void *video,
64 static void em_pos_set (void *video,
68 static double em_len_get (void *video);
70 static double em_buffer_size_get (void *video);
72 static int em_fps_num_get (void *video);
74 static int em_fps_den_get (void *video);
76 static double em_fps_get (void *video);
78 static double em_pos_get (void *video);
80 static void em_vis_set (void *video,
83 static Emotion_Vis em_vis_get (void *video);
85 static Eina_Bool em_vis_supported (void *video,
88 static double em_ratio_get (void *video);
90 static int em_video_handled (void *video);
92 static int em_audio_handled (void *video);
94 static int em_seekable (void *video);
96 static void em_frame_done (void *video);
98 static Emotion_Format em_format_get (void *video);
100 static void em_video_data_size_get (void *video,
104 static int em_yuv_rows_get (void *video,
107 unsigned char **yrows,
108 unsigned char **urows,
109 unsigned char **vrows);
111 static int em_bgra_data_get (void *video,
112 unsigned char **bgra_data);
114 static void em_event_feed (void *video,
117 static void em_event_mouse_button_feed (void *video,
122 static void em_event_mouse_move_feed (void *video,
126 static int em_video_channel_count (void *video);
128 static void em_video_channel_set (void *video,
131 static int em_video_channel_get (void *video);
133 static const char *em_video_channel_name_get (void *video,
136 static void em_video_channel_mute_set (void *video,
139 static int em_video_channel_mute_get (void *video);
141 static int em_audio_channel_count (void *video);
143 static void em_audio_channel_set (void *video,
146 static int em_audio_channel_get (void *video);
148 static const char *em_audio_channel_name_get (void *video,
151 static void em_audio_channel_mute_set (void *video,
154 static int em_audio_channel_mute_get (void *video);
156 static void em_audio_channel_volume_set (void *video,
159 static double em_audio_channel_volume_get (void *video);
161 static int em_spu_channel_count (void *video);
163 static void em_spu_channel_set (void *video,
166 static int em_spu_channel_get (void *video);
168 static const char *em_spu_channel_name_get (void *video,
171 static void em_spu_channel_mute_set (void *video,
174 static int em_spu_channel_mute_get (void *video);
176 static int em_chapter_count (void *video);
178 static void em_chapter_set (void *video,
181 static int em_chapter_get (void *video);
183 static const char *em_chapter_name_get (void *video,
186 static void em_speed_set (void *video,
189 static double em_speed_get (void *video);
191 static int em_eject (void *video);
193 static const char *em_meta_get (void *video,
196 static void em_priority_set (void *video,
198 static Eina_Bool em_priority_get (void *video);
200 static GstBusSyncReply _eos_sync_fct(GstBus *bus,
204 /* Module interface */
206 static Emotion_Video_Module em_module =
209 em_shutdown, /* shutdown */
210 em_file_open, /* file_open */
211 em_file_close, /* file_close */
214 em_size_get, /* size_get */
215 em_pos_set, /* pos_set */
216 em_len_get, /* len_get */
217 em_buffer_size_get, /* buffer_size_get */
218 em_fps_num_get, /* fps_num_get */
219 em_fps_den_get, /* fps_den_get */
220 em_fps_get, /* fps_get */
221 em_pos_get, /* pos_get */
222 em_vis_set, /* vis_set */
223 em_vis_get, /* vis_get */
224 em_vis_supported, /* vis_supported */
225 em_ratio_get, /* ratio_get */
226 em_video_handled, /* video_handled */
227 em_audio_handled, /* audio_handled */
228 em_seekable, /* seekable */
229 em_frame_done, /* frame_done */
230 em_format_get, /* format_get */
231 em_video_data_size_get, /* video_data_size_get */
232 em_yuv_rows_get, /* yuv_rows_get */
233 em_bgra_data_get, /* bgra_data_get */
234 em_event_feed, /* event_feed */
235 em_event_mouse_button_feed, /* event_mouse_button_feed */
236 em_event_mouse_move_feed, /* event_mouse_move_feed */
237 em_video_channel_count, /* video_channel_count */
238 em_video_channel_set, /* video_channel_set */
239 em_video_channel_get, /* video_channel_get */
240 em_video_channel_name_get, /* video_channel_name_get */
241 em_video_channel_mute_set, /* video_channel_mute_set */
242 em_video_channel_mute_get, /* video_channel_mute_get */
243 em_audio_channel_count, /* audio_channel_count */
244 em_audio_channel_set, /* audio_channel_set */
245 em_audio_channel_get, /* audio_channel_get */
246 em_audio_channel_name_get, /* audio_channel_name_get */
247 em_audio_channel_mute_set, /* audio_channel_mute_set */
248 em_audio_channel_mute_get, /* audio_channel_mute_get */
249 em_audio_channel_volume_set, /* audio_channel_volume_set */
250 em_audio_channel_volume_get, /* audio_channel_volume_get */
251 em_spu_channel_count, /* spu_channel_count */
252 em_spu_channel_set, /* spu_channel_set */
253 em_spu_channel_get, /* spu_channel_get */
254 em_spu_channel_name_get, /* spu_channel_name_get */
255 em_spu_channel_mute_set, /* spu_channel_mute_set */
256 em_spu_channel_mute_get, /* spu_channel_mute_get */
257 em_chapter_count, /* chapter_count */
258 em_chapter_set, /* chapter_set */
259 em_chapter_get, /* chapter_get */
260 em_chapter_name_get, /* chapter_name_get */
261 em_speed_set, /* speed_set */
262 em_speed_get, /* speed_get */
263 em_eject, /* eject */
264 em_meta_get, /* meta_get */
265 em_priority_set, /* priority_set */
266 em_priority_get, /* priority_get */
270 static int priority_overide = 0;
272 static Emotion_Video_Stream *
273 emotion_video_stream_new(Emotion_Gstreamer_Video *ev)
275 Emotion_Video_Stream *vstream;
277 if (!ev) return NULL;
279 vstream = (Emotion_Video_Stream *)calloc(1, sizeof(Emotion_Video_Stream));
280 if (!vstream) return NULL;
282 ev->video_streams = eina_list_append(ev->video_streams, vstream);
283 if (eina_error_get())
292 emotion_visualization_element_name_get(Emotion_Vis visualisation)
294 switch (visualisation)
296 case EMOTION_VIS_NONE:
298 case EMOTION_VIS_GOOM:
300 case EMOTION_VIS_LIBVISUAL_BUMPSCOPE:
301 return "libvisual_bumpscope";
302 case EMOTION_VIS_LIBVISUAL_CORONA:
303 return "libvisual_corona";
304 case EMOTION_VIS_LIBVISUAL_DANCING_PARTICLES:
305 return "libvisual_dancingparticles";
306 case EMOTION_VIS_LIBVISUAL_GDKPIXBUF:
307 return "libvisual_gdkpixbuf";
308 case EMOTION_VIS_LIBVISUAL_G_FORCE:
309 return "libvisual_G-Force";
310 case EMOTION_VIS_LIBVISUAL_GOOM:
311 return "libvisual_goom";
312 case EMOTION_VIS_LIBVISUAL_INFINITE:
313 return "libvisual_infinite";
314 case EMOTION_VIS_LIBVISUAL_JAKDAW:
315 return "libvisual_jakdaw";
316 case EMOTION_VIS_LIBVISUAL_JESS:
317 return "libvisual_jess";
318 case EMOTION_VIS_LIBVISUAL_LV_ANALYSER:
319 return "libvisual_lv_analyzer";
320 case EMOTION_VIS_LIBVISUAL_LV_FLOWER:
321 return "libvisual_lv_flower";
322 case EMOTION_VIS_LIBVISUAL_LV_GLTEST:
323 return "libvisual_lv_gltest";
324 case EMOTION_VIS_LIBVISUAL_LV_SCOPE:
325 return "libvisual_lv_scope";
326 case EMOTION_VIS_LIBVISUAL_MADSPIN:
327 return "libvisual_madspin";
328 case EMOTION_VIS_LIBVISUAL_NEBULUS:
329 return "libvisual_nebulus";
330 case EMOTION_VIS_LIBVISUAL_OINKSIE:
331 return "libvisual_oinksie";
332 case EMOTION_VIS_LIBVISUAL_PLASMA:
333 return "libvisual_plazma";
340 em_init(Evas_Object *obj,
341 void **emotion_video,
342 Emotion_Module_Options *opt __UNUSED__)
344 Emotion_Gstreamer_Video *ev;
350 ev = calloc(1, sizeof(Emotion_Gstreamer_Video));
355 /* Initialization of gstreamer */
356 if (!gst_init_check(NULL, NULL, &error))
361 ev->vis = EMOTION_VIS_NONE;
363 ev->play_started = 0;
364 ev->delete_me = EINA_FALSE;
378 em_cleanup(Emotion_Gstreamer_Video *ev)
380 Emotion_Audio_Stream *astream;
381 Emotion_Video_Stream *vstream;
385 emotion_gstreamer_buffer_free(ev->send);
391 gst_object_unref(GST_OBJECT(ev->eos_bus));
397 _free_metadata(ev->metadata);
403 gst_buffer_unref(ev->last_buffer);
404 ev->last_buffer = NULL;
409 evas_object_image_video_surface_set(emotion_object_image_get(ev->obj), NULL);
410 ev->stream = EINA_TRUE;
415 gstreamer_video_sink_new(ev, ev->obj, NULL);
417 g_object_set(G_OBJECT(ev->esink), "ev", NULL, NULL);
418 g_object_set(G_OBJECT(ev->esink), "evas-object", NULL, NULL);
419 gst_element_set_state(ev->pipeline, GST_STATE_NULL);
420 gst_object_unref(ev->pipeline);
425 if (ev->teepad) gst_object_unref(ev->teepad);
427 if (ev->xvpad) gst_object_unref(ev->xvpad);
431 fprintf(stderr, "destroying window: %i\n", ev->win);
432 if (ev->win) ecore_x_window_free(ev->win);
437 EINA_LIST_FREE(ev->audio_streams, astream)
439 EINA_LIST_FREE(ev->video_streams, vstream)
444 em_shutdown(void *video)
446 Emotion_Gstreamer_Video *ev;
448 ev = (Emotion_Gstreamer_Video *)video;
456 EINA_LIST_FREE(ev->threads, t)
457 ecore_thread_cancel(t);
459 ev->delete_me = EINA_TRUE;
463 if (ev->in != ev->out)
465 ev->delete_me = EINA_TRUE;
478 em_file_open(const char *file,
482 Emotion_Gstreamer_Video *ev;
483 Eina_Strbuf *sbuf = NULL;
486 ev = (Emotion_Gstreamer_Video *)video;
488 if (!file) return EINA_FALSE;
489 if (strstr(file, "://") == NULL)
491 sbuf = eina_strbuf_new();
492 eina_strbuf_append(sbuf, "file://");
493 if (strncmp(file, "./", 2) == 0)
495 if (strstr(file, ":/") != NULL)
496 { /* We absolutely need file:///C:/ under Windows, so adding it here */
497 eina_strbuf_append(sbuf, "/");
499 else if (*file != '/')
503 if (getcwd(tmp, PATH_MAX))
505 eina_strbuf_append(sbuf, tmp);
506 eina_strbuf_append(sbuf, "/");
509 eina_strbuf_append(sbuf, file);
512 ev->play_started = 0;
513 ev->pipeline_parsed = 0;
515 uri = sbuf ? eina_strbuf_string_get(sbuf) : file;
516 DBG("setting file to '%s'", uri);
517 ev->pipeline = gstreamer_video_sink_new(ev, obj, uri);
518 if (sbuf) eina_strbuf_free(sbuf);
523 ev->eos_bus = gst_pipeline_get_bus(GST_PIPELINE(ev->pipeline));
526 ERR("could not get the bus");
530 gst_bus_set_sync_handler(ev->eos_bus, _eos_sync_fct, ev);
541 em_file_close(void *video)
543 Emotion_Gstreamer_Video *ev;
545 ev = (Emotion_Gstreamer_Video *)video;
553 EINA_LIST_FREE(ev->threads, t)
554 ecore_thread_cancel(t);
559 ev->pipeline_parsed = EINA_FALSE;
560 ev->play_started = 0;
565 double pos __UNUSED__)
567 Emotion_Gstreamer_Video *ev;
569 ev = (Emotion_Gstreamer_Video *)video;
570 if (!ev->pipeline) return ;
572 if (ev->pipeline_parsed)
573 gst_element_set_state(ev->pipeline, GST_STATE_PLAYING);
575 ev->play_started = 1;
581 Emotion_Gstreamer_Video *ev;
583 ev = (Emotion_Gstreamer_Video *)video;
585 if (!ev->pipeline) return ;
587 if (ev->pipeline_parsed)
588 gst_element_set_state(ev->pipeline, GST_STATE_PAUSED);
593 em_size_get(void *video,
597 Emotion_Gstreamer_Video *ev;
598 Emotion_Video_Stream *vstream;
600 ev = (Emotion_Gstreamer_Video *)video;
602 if (!_emotion_gstreamer_video_pipeline_parse(ev, EINA_FALSE))
605 vstream = eina_list_nth(ev->video_streams, ev->video_stream_nbr - 1);
608 if (width) *width = vstream->width;
609 if (height) *height = vstream->height;
615 if (width) *width = 0;
616 if (height) *height = 0;
620 em_pos_set(void *video,
623 Emotion_Gstreamer_Video *ev;
626 ev = (Emotion_Gstreamer_Video *)video;
628 if (!ev->pipeline) return ;
631 res = gst_element_set_state(ev->pipeline, GST_STATE_PAUSED);
633 res = gst_element_seek(ev->pipeline, 1.0,
635 GST_SEEK_FLAG_ACCURATE | GST_SEEK_FLAG_FLUSH,
637 (gint64)(pos * (double)GST_SECOND),
638 GST_SEEK_TYPE_NONE, -1);
641 res = gst_element_set_state(ev->pipeline, GST_STATE_PLAYING);
645 em_len_get(void *video)
647 Emotion_Gstreamer_Video *ev;
648 Emotion_Video_Stream *vstream;
649 Emotion_Audio_Stream *astream;
656 fmt = GST_FORMAT_TIME;
658 if (!ev->pipeline) return 0.0;
660 ret = gst_element_query_duration(ev->pipeline, &fmt, &val);
664 if (fmt != GST_FORMAT_TIME)
666 DBG("requrested duration in time, but got %s instead.",
667 gst_format_get_name(fmt));
674 return val / 1000000000.0;
677 if (!_emotion_gstreamer_video_pipeline_parse(ev, EINA_FALSE))
680 EINA_LIST_FOREACH(ev->audio_streams, l, astream)
681 if (astream->length_time >= 0)
682 return astream->length_time;
684 EINA_LIST_FOREACH(ev->video_streams, l, vstream)
685 if (vstream->length_time >= 0)
686 return vstream->length_time;
692 em_buffer_size_get(void *video)
694 Emotion_Gstreamer_Video *ev;
702 if (!ev->pipeline) return 0.0;
704 query = gst_query_new_buffering(GST_FORMAT_DEFAULT);
705 if (gst_element_query(ev->pipeline, query))
706 gst_query_parse_buffering_percent(query, &busy, &percent);
710 gst_query_unref(query);
711 return ((float)(percent)) / 100.0;
715 em_fps_num_get(void *video)
717 Emotion_Gstreamer_Video *ev;
718 Emotion_Video_Stream *vstream;
720 ev = (Emotion_Gstreamer_Video *)video;
722 if (!_emotion_gstreamer_video_pipeline_parse(ev, EINA_FALSE))
725 vstream = eina_list_nth(ev->video_streams, ev->video_stream_nbr - 1);
727 return vstream->fps_num;
733 em_fps_den_get(void *video)
735 Emotion_Gstreamer_Video *ev;
736 Emotion_Video_Stream *vstream;
738 ev = (Emotion_Gstreamer_Video *)video;
740 if (!_emotion_gstreamer_video_pipeline_parse(ev, EINA_FALSE))
743 vstream = eina_list_nth(ev->video_streams, ev->video_stream_nbr - 1);
745 return vstream->fps_den;
751 em_fps_get(void *video)
753 Emotion_Gstreamer_Video *ev;
754 Emotion_Video_Stream *vstream;
756 ev = (Emotion_Gstreamer_Video *)video;
758 if (!_emotion_gstreamer_video_pipeline_parse(ev, EINA_FALSE))
761 vstream = eina_list_nth(ev->video_streams, ev->video_stream_nbr - 1);
763 return (double)vstream->fps_num / (double)vstream->fps_den;
769 em_pos_get(void *video)
771 Emotion_Gstreamer_Video *ev;
777 fmt = GST_FORMAT_TIME;
779 if (!ev->pipeline) return 0.0;
781 ret = gst_element_query_position(ev->pipeline, &fmt, &val);
785 if (fmt != GST_FORMAT_TIME)
787 ERR("requrested position in time, but got %s instead.",
788 gst_format_get_name(fmt));
792 ev->position = val / 1000000000.0;
797 em_vis_set(void *video,
800 Emotion_Gstreamer_Video *ev;
802 ev = (Emotion_Gstreamer_Video *)video;
808 em_vis_get(void *video)
810 Emotion_Gstreamer_Video *ev;
812 ev = (Emotion_Gstreamer_Video *)video;
818 em_vis_supported(void *ef __UNUSED__, Emotion_Vis vis)
821 GstElementFactory *factory;
823 if (vis == EMOTION_VIS_NONE)
826 name = emotion_visualization_element_name_get(vis);
830 factory = gst_element_factory_find(name);
834 gst_object_unref(factory);
839 em_ratio_get(void *video)
841 Emotion_Gstreamer_Video *ev;
843 ev = (Emotion_Gstreamer_Video *)video;
849 em_video_handled(void *video)
851 Emotion_Gstreamer_Video *ev;
853 ev = (Emotion_Gstreamer_Video *)video;
855 _emotion_gstreamer_video_pipeline_parse(ev, EINA_FALSE);
857 if (!eina_list_count(ev->video_streams))
864 em_audio_handled(void *video)
866 Emotion_Gstreamer_Video *ev;
868 ev = (Emotion_Gstreamer_Video *)video;
870 _emotion_gstreamer_video_pipeline_parse(ev, EINA_FALSE);
872 if (!eina_list_count(ev->audio_streams))
879 em_seekable(void *video __UNUSED__)
885 em_frame_done(void *video __UNUSED__)
889 static Emotion_Format
890 em_format_get(void *video)
892 Emotion_Gstreamer_Video *ev;
893 Emotion_Video_Stream *vstream;
895 ev = (Emotion_Gstreamer_Video *)video;
897 if (!_emotion_gstreamer_video_pipeline_parse(ev, EINA_FALSE))
898 return EMOTION_FORMAT_NONE;
900 vstream = eina_list_nth(ev->video_streams, ev->video_stream_nbr - 1);
903 switch (vstream->fourcc)
905 case GST_MAKE_FOURCC('I', '4', '2', '0'):
906 return EMOTION_FORMAT_I420;
907 case GST_MAKE_FOURCC('Y', 'V', '1', '2'):
908 return EMOTION_FORMAT_YV12;
909 case GST_MAKE_FOURCC('Y', 'U', 'Y', '2'):
910 return EMOTION_FORMAT_YUY2;
911 case GST_MAKE_FOURCC('A', 'R', 'G', 'B'):
912 return EMOTION_FORMAT_BGRA;
914 return EMOTION_FORMAT_NONE;
917 return EMOTION_FORMAT_NONE;
921 em_video_data_size_get(void *video, int *w, int *h)
923 Emotion_Gstreamer_Video *ev;
924 Emotion_Video_Stream *vstream;
926 ev = (Emotion_Gstreamer_Video *)video;
928 if (ev->pipeline && (!ev->video_stream_nbr || !ev->video_streams))
929 if (!_emotion_gstreamer_video_pipeline_parse(ev, EINA_FALSE))
932 vstream = eina_list_nth(ev->video_streams, ev->video_stream_nbr - 1);
936 *h = vstream->height;
947 em_yuv_rows_get(void *video __UNUSED__,
950 unsigned char **yrows __UNUSED__,
951 unsigned char **urows __UNUSED__,
952 unsigned char **vrows __UNUSED__)
958 em_bgra_data_get(void *video __UNUSED__, unsigned char **bgra_data __UNUSED__)
964 em_event_feed(void *video __UNUSED__, int event __UNUSED__)
969 em_event_mouse_button_feed(void *video __UNUSED__, int button __UNUSED__, int x __UNUSED__, int y __UNUSED__)
974 em_event_mouse_move_feed(void *video __UNUSED__, int x __UNUSED__, int y __UNUSED__)
980 em_video_channel_count(void *video)
982 Emotion_Gstreamer_Video *ev;
984 ev = (Emotion_Gstreamer_Video *)video;
986 _emotion_gstreamer_video_pipeline_parse(ev, EINA_FALSE);
988 return eina_list_count(ev->video_streams);
992 em_video_channel_set(void *video __UNUSED__,
993 int channel __UNUSED__)
996 Emotion_Gstreamer_Video *ev;
998 ev = (Emotion_Gstreamer_Video *)video;
1000 if (channel < 0) channel = 0;
1002 /* FIXME: a faire... */
1006 em_video_channel_get(void *video)
1008 Emotion_Gstreamer_Video *ev;
1010 ev = (Emotion_Gstreamer_Video *)video;
1012 _emotion_gstreamer_video_pipeline_parse(ev, EINA_FALSE);
1014 return ev->video_stream_nbr;
1018 em_video_channel_name_get(void *video __UNUSED__,
1019 int channel __UNUSED__)
1025 em_video_channel_mute_set(void *video,
1028 Emotion_Gstreamer_Video *ev;
1030 ev = (Emotion_Gstreamer_Video *)video;
1032 ev->video_mute = mute;
1036 em_video_channel_mute_get(void *video)
1038 Emotion_Gstreamer_Video *ev;
1040 ev = (Emotion_Gstreamer_Video *)video;
1042 return ev->video_mute;
1045 /* Audio channels */
1048 em_audio_channel_count(void *video)
1050 Emotion_Gstreamer_Video *ev;
1052 ev = (Emotion_Gstreamer_Video *)video;
1054 _emotion_gstreamer_video_pipeline_parse(ev, EINA_FALSE);
1056 return eina_list_count(ev->audio_streams);
1060 em_audio_channel_set(void *video __UNUSED__,
1061 int channel __UNUSED__)
1064 Emotion_Gstreamer_Video *ev;
1066 ev = (Emotion_Gstreamer_Video *)video;
1068 if (channel < -1) channel = -1;
1070 /* FIXME: a faire... */
1074 em_audio_channel_get(void *video)
1076 Emotion_Gstreamer_Video *ev;
1078 ev = (Emotion_Gstreamer_Video *)video;
1080 _emotion_gstreamer_video_pipeline_parse(ev, EINA_FALSE);
1082 return ev->audio_stream_nbr;
1086 em_audio_channel_name_get(void *video __UNUSED__,
1087 int channel __UNUSED__)
1092 #define GST_PLAY_FLAG_AUDIO (1 << 1)
1095 em_audio_channel_mute_set(void *video,
1098 /* NOTE: at first I wanted to completly shutdown the audio path on mute,
1099 but that's not possible as the audio sink could be the clock source
1100 for the pipeline (at least that's the case on some of the hardware
1101 I have been tested emotion on.
1103 Emotion_Gstreamer_Video *ev;
1105 ev = (Emotion_Gstreamer_Video *)video;
1107 if (!ev->pipeline) return ;
1109 ev->audio_mute = mute;
1111 g_object_set(G_OBJECT(ev->pipeline), "mute", !!mute, NULL);
1115 em_audio_channel_mute_get(void *video)
1117 Emotion_Gstreamer_Video *ev;
1119 ev = (Emotion_Gstreamer_Video *)video;
1121 return ev->audio_mute;
1125 em_audio_channel_volume_set(void *video,
1128 Emotion_Gstreamer_Video *ev;
1130 ev = (Emotion_Gstreamer_Video *)video;
1132 if (!ev->pipeline) return ;
1139 g_object_set(G_OBJECT(ev->pipeline), "volume", vol, NULL);
1143 em_audio_channel_volume_get(void *video)
1145 Emotion_Gstreamer_Video *ev;
1147 ev = (Emotion_Gstreamer_Video *)video;
1155 em_spu_channel_count(void *video __UNUSED__)
1161 em_spu_channel_set(void *video __UNUSED__, int channel __UNUSED__)
1166 em_spu_channel_get(void *video __UNUSED__)
1172 em_spu_channel_name_get(void *video __UNUSED__, int channel __UNUSED__)
1178 em_spu_channel_mute_set(void *video __UNUSED__, int mute __UNUSED__)
1183 em_spu_channel_mute_get(void *video __UNUSED__)
1189 em_chapter_count(void *video __UNUSED__)
1195 em_chapter_set(void *video __UNUSED__, int chapter __UNUSED__)
1200 em_chapter_get(void *video __UNUSED__)
1206 em_chapter_name_get(void *video __UNUSED__, int chapter __UNUSED__)
1212 em_speed_set(void *video __UNUSED__, double speed __UNUSED__)
1217 em_speed_get(void *video __UNUSED__)
1223 em_eject(void *video __UNUSED__)
1229 em_meta_get(void *video, int meta)
1231 Emotion_Gstreamer_Video *ev;
1232 const char *str = NULL;
1234 ev = (Emotion_Gstreamer_Video *)video;
1236 if (!ev || !ev->metadata) return NULL;
1239 case META_TRACK_TITLE:
1240 str = ev->metadata->title;
1242 case META_TRACK_ARTIST:
1243 str = ev->metadata->artist;
1245 case META_TRACK_ALBUM:
1246 str = ev->metadata->album;
1248 case META_TRACK_YEAR:
1249 str = ev->metadata->year;
1251 case META_TRACK_GENRE:
1252 str = ev->metadata->genre;
1254 case META_TRACK_COMMENT:
1255 str = ev->metadata->comment;
1257 case META_TRACK_DISCID:
1258 str = ev->metadata->disc_id;
1268 em_priority_set(void *video, Eina_Bool pri)
1270 Emotion_Gstreamer_Video *ev;
1273 if (priority_overide > 3) return ; /* If we failed to much to create that pipeline, let's don't wast our time anymore */
1278 em_priority_get(void *video)
1280 Emotion_Gstreamer_Video *ev;
1288 _ecore_event_x_destroy(void *data __UNUSED__, int type __UNUSED__, void *event __UNUSED__)
1290 Ecore_X_Event_Window_Destroy *ev = event;
1292 fprintf(stderr, "killed window: %x (%x)\n", ev->win, ev->event_win);
1299 module_open(Evas_Object *obj,
1300 const Emotion_Video_Module **module,
1302 Emotion_Module_Options *opt)
1305 Ecore_X_Window *roots;
1312 if (_emotion_gstreamer_log_domain < 0)
1314 eina_threads_init();
1315 eina_log_threads_enable();
1316 _emotion_gstreamer_log_domain = eina_log_domain_register
1317 ("emotion-gstreamer", EINA_COLOR_LIGHTCYAN);
1318 if (_emotion_gstreamer_log_domain < 0)
1320 EINA_LOG_CRIT("Could not register log domain 'emotion-gstreamer'");
1325 if (!em_module.init(obj, video, opt))
1329 ecore_event_handler_add(ECORE_X_EVENT_WINDOW_DESTROY, _ecore_event_x_destroy, NULL);
1332 if (getenv("EMOTION_FPS_DEBUG")) debug_fps = EINA_TRUE;
1334 eina_threads_init();
1337 if (ecore_x_init(NULL) > 0)
1339 _ecore_x_available = EINA_TRUE;
1342 /* Check if the window manager is able to handle our special Xv window. */
1343 roots = _ecore_x_available ? ecore_x_window_root_list(&num) : NULL;
1344 if (roots && num > 0)
1346 Ecore_X_Window win, twin;
1349 nwins = ecore_x_window_prop_window_get(roots[0],
1350 ECORE_X_ATOM_NET_SUPPORTING_WM_CHECK,
1354 nwins = ecore_x_window_prop_window_get(win,
1355 ECORE_X_ATOM_NET_SUPPORTING_WM_CHECK,
1357 if (nwins > 0 && twin == win)
1359 Ecore_X_Atom *supported;
1363 if (ecore_x_netwm_supported_get(roots[0], &supported, &supported_num))
1365 Eina_Bool parent = EINA_FALSE;
1366 Eina_Bool video_position = EINA_FALSE;
1368 for (i = 0; i < supported_num; ++i)
1370 if (supported[i] == ECORE_X_ATOM_E_VIDEO_PARENT)
1372 else if (supported[i] == ECORE_X_ATOM_E_VIDEO_POSITION)
1373 video_position = EINA_TRUE;
1374 if (parent && video_position)
1378 if (parent && video_position)
1380 window_manager_video = EINA_TRUE;
1389 *module = &em_module;
1394 module_close(Emotion_Video_Module *module __UNUSED__,
1397 em_module.shutdown(video);
1400 if (_ecore_x_available)
1406 eina_threads_shutdown();
1410 gstreamer_module_init(void)
1414 if (!gst_init_check(0, NULL, &error))
1416 EINA_LOG_CRIT("Could not init GStreamer");
1420 if (gst_plugin_register_static(GST_VERSION_MAJOR, GST_VERSION_MINOR,
1422 "video sink plugin for Emotion",
1423 gstreamer_plugin_init,
1428 "http://www.enlightenment.org/") == FALSE)
1430 EINA_LOG_CRIT("Could not load static gstreamer video sink for Emotion.");
1434 return _emotion_module_register("gstreamer", module_open, module_close);
1438 gstreamer_module_shutdown(void)
1440 _emotion_module_unregister("gstreamer");
1445 #ifndef EMOTION_STATIC_BUILD_GSTREAMER
1447 EINA_MODULE_INIT(gstreamer_module_init);
1448 EINA_MODULE_SHUTDOWN(gstreamer_module_shutdown);
1453 _for_each_tag(GstTagList const* list,
1457 Emotion_Gstreamer_Video *ev;
1462 ev = (Emotion_Gstreamer_Video*)data;
1464 if (!ev || !ev->metadata) return;
1466 count = gst_tag_list_get_tag_size(list, tag);
1468 for (i = 0; i < count; i++)
1470 if (!strcmp(tag, GST_TAG_TITLE))
1473 g_free(ev->metadata->title);
1474 if (gst_tag_list_get_string(list, GST_TAG_TITLE, &str))
1475 ev->metadata->title = str;
1477 ev->metadata->title = NULL;
1480 if (!strcmp(tag, GST_TAG_ALBUM))
1483 g_free(ev->metadata->album);
1484 if (gst_tag_list_get_string(list, GST_TAG_ALBUM, &str))
1485 ev->metadata->album = str;
1487 ev->metadata->album = NULL;
1490 if (!strcmp(tag, GST_TAG_ARTIST))
1493 g_free(ev->metadata->artist);
1494 if (gst_tag_list_get_string(list, GST_TAG_ARTIST, &str))
1495 ev->metadata->artist = str;
1497 ev->metadata->artist = NULL;
1500 if (!strcmp(tag, GST_TAG_GENRE))
1503 g_free(ev->metadata->genre);
1504 if (gst_tag_list_get_string(list, GST_TAG_GENRE, &str))
1505 ev->metadata->genre = str;
1507 ev->metadata->genre = NULL;
1510 if (!strcmp(tag, GST_TAG_COMMENT))
1513 g_free(ev->metadata->comment);
1514 if (gst_tag_list_get_string(list, GST_TAG_COMMENT, &str))
1515 ev->metadata->comment = str;
1517 ev->metadata->comment = NULL;
1520 if (!strcmp(tag, GST_TAG_DATE))
1524 g_free(ev->metadata->year);
1525 date = gst_tag_list_get_value_index(list, GST_TAG_DATE, 0);
1527 str = g_strdup_value_contents(date);
1530 ev->metadata->year = str;
1534 if (!strcmp(tag, GST_TAG_TRACK_NUMBER))
1537 const GValue *track;
1538 g_free(ev->metadata->count);
1539 track = gst_tag_list_get_value_index(list, GST_TAG_TRACK_NUMBER, 0);
1541 str = g_strdup_value_contents(track);
1544 ev->metadata->count = str;
1548 #ifdef GST_TAG_CDDA_CDDB_DISCID
1549 if (!strcmp(tag, GST_TAG_CDDA_CDDB_DISCID))
1552 const GValue *discid;
1553 g_free(ev->metadata->disc_id);
1554 discid = gst_tag_list_get_value_index(list, GST_TAG_CDDA_CDDB_DISCID, 0);
1556 str = g_strdup_value_contents(discid);
1559 ev->metadata->disc_id = str;
1568 _free_metadata(Emotion_Gstreamer_Metadata *m)
1585 _em_restart_stream(void *data)
1587 Emotion_Gstreamer_Video *ev;
1591 ev->pipeline = gstreamer_video_sink_new(ev, ev->obj, ev->uri);
1595 ev->eos_bus = gst_pipeline_get_bus(GST_PIPELINE(ev->pipeline));
1598 ERR("could not get the bus");
1602 gst_bus_set_sync_handler(ev->eos_bus, _eos_sync_fct, ev);
1605 return ECORE_CALLBACK_CANCEL;
1609 _eos_main_fct(void *data)
1611 Emotion_Gstreamer_Message *send;
1612 Emotion_Gstreamer_Video *ev;
1619 if (ev->play_started && !ev->delete_me)
1621 _emotion_playback_started(ev->obj);
1622 ev->play_started = 0;
1625 switch (GST_MESSAGE_TYPE(msg))
1627 case GST_MESSAGE_EOS:
1631 _emotion_decode_stop(ev->obj);
1632 _emotion_playback_finished(ev->obj);
1635 case GST_MESSAGE_TAG:
1638 GstTagList *new_tags;
1639 gst_message_parse_tag(msg, &new_tags);
1642 gst_tag_list_foreach(new_tags,
1643 (GstTagForeachFunc)_for_each_tag,
1645 gst_tag_list_free(new_tags);
1649 case GST_MESSAGE_ASYNC_DONE:
1650 if (!ev->delete_me) _emotion_seek_done(ev->obj);
1652 case GST_MESSAGE_STREAM_STATUS:
1654 case GST_MESSAGE_ERROR:
1659 ERR("Switching back to canvas rendering.");
1660 ev->priority = EINA_FALSE;
1663 ecore_idler_add(_em_restart_stream, ev);
1667 ERR("bus say: %s [%i - %s]",
1668 GST_MESSAGE_SRC_NAME(msg),
1669 GST_MESSAGE_TYPE(msg),
1670 GST_MESSAGE_TYPE_NAME(msg));
1674 emotion_gstreamer_message_free(send);
1677 static GstBusSyncReply
1678 _eos_sync_fct(GstBus *bus __UNUSED__, GstMessage *msg, gpointer data)
1680 Emotion_Gstreamer_Video *ev = data;
1681 Emotion_Gstreamer_Message *send;
1683 switch (GST_MESSAGE_TYPE(msg))
1685 case GST_MESSAGE_EOS:
1686 case GST_MESSAGE_TAG:
1687 case GST_MESSAGE_ASYNC_DONE:
1688 case GST_MESSAGE_STREAM_STATUS:
1689 INF("bus say: %s [%i - %s]",
1690 GST_MESSAGE_SRC_NAME(msg),
1691 GST_MESSAGE_TYPE(msg),
1692 GST_MESSAGE_TYPE_NAME(msg));
1693 send = emotion_gstreamer_message_alloc(ev, msg);
1695 if (send) ecore_main_loop_thread_safe_call_async(_eos_main_fct, send);
1699 case GST_MESSAGE_STATE_CHANGED:
1701 GstState old_state, new_state;
1703 gst_message_parse_state_changed(msg, &old_state, &new_state, NULL);
1704 INF("Element %s changed state from %s to %s.",
1705 GST_OBJECT_NAME(msg->src),
1706 gst_element_state_get_name(old_state),
1707 gst_element_state_get_name(new_state));
1710 case GST_MESSAGE_ERROR:
1715 gst_message_parse_error(msg, &error, &debug);
1716 ERR("ERROR from element %s: %s", GST_OBJECT_NAME(msg->src), error->message);
1717 ERR("Debugging info: %s", (debug) ? debug : "none");
1718 g_error_free(error);
1721 if (strncmp(GST_OBJECT_NAME(msg->src), "xvimagesink", 11) == 0)
1723 send = emotion_gstreamer_message_alloc(ev, msg);
1725 if (send) ecore_main_loop_thread_safe_call_async(_eos_main_fct, send);
1729 case GST_MESSAGE_WARNING:
1734 gst_message_parse_warning(msg, &error, &debug);
1735 WRN("WARNING from element %s: %s", GST_OBJECT_NAME(msg->src), error->message);
1736 WRN("Debugging info: %s", (debug) ? debug : "none");
1737 g_error_free(error);
1742 WRN("bus say: %s [%i - %s]",
1743 GST_MESSAGE_SRC_NAME(msg),
1744 GST_MESSAGE_TYPE(msg),
1745 GST_MESSAGE_TYPE_NAME(msg));
1749 gst_message_unref(msg);
1751 return GST_BUS_DROP;
1755 _emotion_gstreamer_video_pipeline_parse(Emotion_Gstreamer_Video *ev,
1761 if (ev->pipeline_parsed)
1764 if (force && ev->threads)
1768 EINA_LIST_FREE(ev->threads, t)
1769 ecore_thread_cancel(t);
1775 res = gst_element_get_state(ev->pipeline, NULL, NULL, GST_CLOCK_TIME_NONE);
1776 if (res == GST_STATE_CHANGE_NO_PREROLL)
1778 gst_element_set_state(ev->pipeline, GST_STATE_PLAYING);
1780 res = gst_element_get_state(ev->pipeline, NULL, NULL, GST_CLOCK_TIME_NONE);
1783 /** NOTE: you need to set: GST_DEBUG_DUMP_DOT_DIR=/tmp EMOTION_ENGINE=gstreamer to save the $EMOTION_GSTREAMER_DOT file in '/tmp' */
1784 /** then call dot -Tpng -oemotion_pipeline.png /tmp/$TIMESTAMP-$EMOTION_GSTREAMER_DOT.dot */
1785 if (getenv("EMOTION_GSTREAMER_DOT"))
1786 GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS(GST_BIN(ev->pipeline),
1787 GST_DEBUG_GRAPH_SHOW_ALL,
1788 getenv("EMOTION_GSTREAMER_DOT"));
1790 if (!(res == GST_STATE_CHANGE_SUCCESS
1791 || res == GST_STATE_CHANGE_NO_PREROLL))
1793 ERR("Unable to get GST_CLOCK_TIME_NONE.");
1797 g_object_get(G_OBJECT(ev->pipeline),
1798 "n-audio", &ev->audio_stream_nbr,
1799 "n-video", &ev->video_stream_nbr,
1802 if ((ev->video_stream_nbr == 0) && (ev->audio_stream_nbr == 0))
1804 ERR("No audio nor video stream found");
1809 for (i = 0; i < ev->video_stream_nbr; i++)
1811 Emotion_Video_Stream *vstream;
1814 GstStructure *structure;
1819 gdouble length_time = 0.0;
1826 g_signal_emit_by_name(ev->pipeline, "get-video-pad", i, &pad);
1830 caps = gst_pad_get_negotiated_caps(pad);
1833 structure = gst_caps_get_structure(caps, 0);
1834 str = gst_caps_to_string(caps);
1836 if (!gst_structure_get_int(structure, "width", &width))
1838 if (!gst_structure_get_int(structure, "height", &height))
1840 if (!gst_structure_get_fraction(structure, "framerate", &fps_num, &fps_den))
1843 if (g_str_has_prefix(str, "video/x-raw-yuv"))
1845 val = gst_structure_get_value(structure, "format");
1846 fourcc = gst_value_get_fourcc(val);
1848 else if (g_str_has_prefix(str, "video/x-raw-rgb"))
1849 fourcc = GST_MAKE_FOURCC('A', 'R', 'G', 'B');
1853 query = gst_query_new_duration(GST_FORMAT_TIME);
1854 if (gst_pad_peer_query(pad, query))
1858 gst_query_parse_duration(query, NULL, &t);
1859 length_time = (double)t / (double)GST_SECOND;
1864 vstream = emotion_video_stream_new(ev);
1865 if (!vstream) goto unref_query_v;
1867 vstream->length_time = length_time;
1868 vstream->width = width;
1869 vstream->height = height;
1870 vstream->fps_num = fps_num;
1871 vstream->fps_den = fps_den;
1872 vstream->fourcc = fourcc;
1876 gst_query_unref(query);
1878 gst_caps_unref(caps);
1880 gst_object_unref(pad);
1884 for (i = 0; i < ev->audio_stream_nbr; i++)
1886 Emotion_Audio_Stream *astream;
1889 GstStructure *structure;
1892 gdouble length_time = 0.0;
1896 g_signal_emit_by_name(ev->pipeline, "get-audio-pad", i, &pad);
1900 caps = gst_pad_get_negotiated_caps(pad);
1903 structure = gst_caps_get_structure(caps, 0);
1905 if (!gst_structure_get_int(structure, "channels", &channels))
1907 if (!gst_structure_get_int(structure, "rate", &samplerate))
1910 query = gst_query_new_duration(GST_FORMAT_TIME);
1911 if (gst_pad_peer_query(pad, query))
1915 gst_query_parse_duration(query, NULL, &t);
1916 length_time = (double)t / (double)GST_SECOND;
1921 astream = calloc(1, sizeof(Emotion_Audio_Stream));
1922 if (!astream) continue;
1923 ev->audio_streams = eina_list_append(ev->audio_streams, astream);
1924 if (eina_error_get())
1930 astream->length_time = length_time;
1931 astream->channels = channels;
1932 astream->samplerate = samplerate;
1935 gst_query_unref(query);
1937 gst_caps_unref(caps);
1939 gst_object_unref(pad);
1942 /* Visualization sink */
1943 if (ev->video_stream_nbr == 0)
1945 GstElement *vis = NULL;
1946 Emotion_Video_Stream *vstream;
1947 Emotion_Audio_Stream *astream;
1949 const char *vis_name;
1951 if (!(vis_name = emotion_visualization_element_name_get(ev->vis)))
1953 WRN("pb vis name %d", ev->vis);
1957 astream = eina_list_data_get(ev->audio_streams);
1959 vis = gst_element_factory_make(vis_name, "vissink");
1960 vstream = emotion_video_stream_new(ev);
1964 DBG("could not create visualization stream");
1966 vstream->length_time = astream->length_time;
1967 vstream->width = 320;
1968 vstream->height = 200;
1969 vstream->fps_num = 25;
1970 vstream->fps_den = 1;
1971 vstream->fourcc = GST_MAKE_FOURCC('A', 'R', 'G', 'B');
1973 g_object_set(G_OBJECT(ev->pipeline), "vis-plugin", vis, NULL);
1974 g_object_get(G_OBJECT(ev->pipeline), "flags", &flags, NULL);
1975 flags |= 0x00000008;
1976 g_object_set(G_OBJECT(ev->pipeline), "flags", flags, NULL);
1981 ev->video_stream_nbr = eina_list_count(ev->video_streams);
1982 ev->audio_stream_nbr = eina_list_count(ev->audio_streams);
1984 if (ev->video_stream_nbr == 1)
1986 Emotion_Video_Stream *vstream;
1988 vstream = eina_list_data_get(ev->video_streams);
1989 ev->ratio = (double)vstream->width / (double)vstream->height;
1990 _emotion_frame_resize(ev->obj, vstream->width, vstream->height, ev->ratio);
1994 /* on recapitule : */
1995 Emotion_Video_Stream *vstream;
1996 Emotion_Audio_Stream *astream;
1998 vstream = eina_list_data_get(ev->video_streams);
2001 DBG("video size=%dx%d, fps=%d/%d, "
2002 "fourcc=%"GST_FOURCC_FORMAT", length=%"GST_TIME_FORMAT,
2003 vstream->width, vstream->height, vstream->fps_num, vstream->fps_den,
2004 GST_FOURCC_ARGS(vstream->fourcc),
2005 GST_TIME_ARGS((guint64)(vstream->length_time * GST_SECOND)));
2008 astream = eina_list_data_get(ev->audio_streams);
2011 DBG("audio channels=%d, rate=%d, length=%"GST_TIME_FORMAT,
2012 astream->channels, astream->samplerate,
2013 GST_TIME_ARGS((guint64)(astream->length_time * GST_SECOND)));
2018 _free_metadata(ev->metadata);
2019 ev->metadata = calloc(1, sizeof(Emotion_Gstreamer_Metadata));
2021 ev->pipeline_parsed = EINA_TRUE;
2023 em_audio_channel_volume_set(ev, ev->volume);
2024 em_audio_channel_mute_set(ev, ev->audio_mute);
2026 if (ev->play_started)
2028 _emotion_playback_started(ev->obj);
2029 ev->play_started = 0;
2032 _emotion_open_done(ev->obj);