3 #include "emotion_gstreamer.h"
5 static GstStaticPadTemplate sinktemplate = GST_STATIC_PAD_TEMPLATE("sink",
6 GST_PAD_SINK, GST_PAD_ALWAYS,
7 GST_STATIC_CAPS(GST_VIDEO_CAPS_YUV("{ I420, YV12, YUY2 }") ";"
8 GST_VIDEO_CAPS_BGRx ";" GST_VIDEO_CAPS_BGR ";" GST_VIDEO_CAPS_BGRA));
10 GST_DEBUG_CATEGORY_STATIC(evas_video_sink_debug);
11 #define GST_CAT_DEFAULT evas_video_sink_debug
26 static guint evas_video_sink_signals[LAST_SIGNAL] = { 0, };
28 #define _do_init(bla) \
29 GST_DEBUG_CATEGORY_INIT(evas_video_sink_debug, \
34 GST_BOILERPLATE_FULL(EvasVideoSink,
41 static void unlock_buffer_mutex(EvasVideoSinkPrivate* priv);
43 static void evas_video_sink_main_render(void *data);
46 evas_video_sink_base_init(gpointer g_class)
48 GstElementClass* element_class;
50 element_class = GST_ELEMENT_CLASS(g_class);
51 gst_element_class_add_pad_template(element_class, gst_static_pad_template_get(&sinktemplate));
52 gst_element_class_set_details_simple(element_class, "Evas video sink",
53 "Sink/Video", "Sends video data from a GStreamer pipeline to an Evas object",
54 "Vincent Torri <vtorri@univ-evry.fr>");
58 evas_video_sink_init(EvasVideoSink* sink, EvasVideoSinkClass* klass __UNUSED__)
60 EvasVideoSinkPrivate* priv;
63 sink->priv = priv = G_TYPE_INSTANCE_GET_PRIVATE(sink, EVAS_TYPE_VIDEO_SINK, EvasVideoSinkPrivate);
65 priv->last_buffer = NULL;
68 priv->gformat = GST_VIDEO_FORMAT_UNKNOWN;
69 priv->eformat = EVAS_COLORSPACE_ARGB8888;
70 priv->data_cond = g_cond_new();
71 priv->buffer_mutex = g_mutex_new();
72 priv->unlocked = EINA_FALSE;
76 /**** Object methods ****/
79 evas_video_sink_set_property(GObject * object, guint prop_id,
80 const GValue * value, GParamSpec * pspec)
83 EvasVideoSinkPrivate* priv;
85 sink = EVAS_VIDEO_SINK (object);
89 case PROP_EVAS_OBJECT:
90 g_mutex_lock(priv->buffer_mutex);
91 priv->o = g_value_get_pointer (value);
92 g_mutex_unlock(priv->buffer_mutex);
95 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
96 ERR("invalid property");
102 evas_video_sink_get_property(GObject * object, guint prop_id,
103 GValue * value, GParamSpec * pspec)
106 EvasVideoSinkPrivate* priv;
108 sink = EVAS_VIDEO_SINK (object);
112 case PROP_EVAS_OBJECT:
113 g_mutex_lock(priv->buffer_mutex);
114 g_value_set_pointer (value, priv->o);
115 g_mutex_unlock(priv->buffer_mutex);
118 g_mutex_lock(priv->buffer_mutex);
119 g_value_set_int(value, priv->width);
120 g_mutex_unlock(priv->buffer_mutex);
123 g_mutex_lock(priv->buffer_mutex);
124 g_value_set_int (value, priv->height);
125 g_mutex_unlock(priv->buffer_mutex);
128 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
129 ERR("invalide property");
135 evas_video_sink_dispose(GObject* object)
138 EvasVideoSinkPrivate* priv;
140 sink = EVAS_VIDEO_SINK(object);
143 if (priv->buffer_mutex) {
144 g_mutex_free(priv->buffer_mutex);
145 priv->buffer_mutex = 0;
148 if (priv->data_cond) {
149 g_cond_free(priv->data_cond);
153 if (priv->last_buffer) {
154 gst_buffer_unref(priv->last_buffer);
155 priv->last_buffer = NULL;
158 G_OBJECT_CLASS(parent_class)->dispose(object);
162 /**** BaseSink methods ****/
164 gboolean evas_video_sink_set_caps(GstBaseSink *bsink, GstCaps *caps)
167 EvasVideoSinkPrivate* priv;
168 GstVideoFormat format;
172 sink = EVAS_VIDEO_SINK(bsink);
175 if (G_UNLIKELY(!gst_video_format_parse_caps(caps, &format, &width, &height))) {
176 ERR("Unable to parse caps.");
181 priv->height = height;
183 printf("%p format :", priv->o);
186 case GST_VIDEO_FORMAT_I420: priv->eformat = EVAS_COLORSPACE_YCBCR422P601_PL;
189 case GST_VIDEO_FORMAT_YV12: priv->eformat = EVAS_COLORSPACE_YCBCR422P601_PL;
192 case GST_VIDEO_FORMAT_YUY2: priv->eformat = EVAS_COLORSPACE_YCBCR422601_PL;
195 case GST_VIDEO_FORMAT_BGR: priv->eformat = EVAS_COLORSPACE_ARGB8888;
198 case GST_VIDEO_FORMAT_BGRx: priv->eformat = EVAS_COLORSPACE_ARGB8888;
201 case GST_VIDEO_FORMAT_BGRA: priv->eformat = EVAS_COLORSPACE_ARGB8888;
205 ERR("unsupported : %d\n", format);
208 priv->gformat = format;
214 evas_video_sink_start(GstBaseSink* base_sink)
216 EvasVideoSinkPrivate* priv;
219 priv = EVAS_VIDEO_SINK(base_sink)->priv;
220 g_mutex_lock(priv->buffer_mutex);
224 priv->unlocked = EINA_FALSE;
225 g_mutex_unlock(priv->buffer_mutex);
230 evas_video_sink_stop(GstBaseSink* base_sink)
232 EvasVideoSinkPrivate* priv = EVAS_VIDEO_SINK(base_sink)->priv;
234 unlock_buffer_mutex(priv);
239 evas_video_sink_unlock(GstBaseSink* object)
243 sink = EVAS_VIDEO_SINK(object);
245 unlock_buffer_mutex(sink->priv);
247 return GST_CALL_PARENT_WITH_DEFAULT(GST_BASE_SINK_CLASS, unlock,
252 evas_video_sink_unlock_stop(GstBaseSink* object)
255 EvasVideoSinkPrivate* priv;
257 sink = EVAS_VIDEO_SINK(object);
260 g_mutex_lock(priv->buffer_mutex);
261 priv->unlocked = FALSE;
262 g_mutex_unlock(priv->buffer_mutex);
264 return GST_CALL_PARENT_WITH_DEFAULT(GST_BASE_SINK_CLASS, unlock_stop,
269 evas_video_sink_preroll(GstBaseSink* bsink, GstBuffer* buffer)
271 Emotion_Gstreamer_Buffer *send;
272 EvasVideoSinkPrivate *priv;
275 sink = EVAS_VIDEO_SINK(bsink);
278 send = emotion_gstreamer_buffer_alloc(priv, buffer, EINA_TRUE);
281 ecore_main_loop_thread_safe_call(evas_video_sink_main_render, send);
287 evas_video_sink_render(GstBaseSink* bsink, GstBuffer* buffer)
289 Emotion_Gstreamer_Buffer *send;
290 EvasVideoSinkPrivate *priv;
294 sink = EVAS_VIDEO_SINK(bsink);
297 g_mutex_lock(priv->buffer_mutex);
299 if (priv->unlocked) {
301 g_mutex_unlock(priv->buffer_mutex);
305 send = emotion_gstreamer_buffer_alloc(priv, buffer, EINA_FALSE);
306 if (!send) return GST_FLOW_ERROR;
308 ecore_main_loop_thread_safe_call(evas_video_sink_main_render, send);
310 g_cond_wait(priv->data_cond, priv->buffer_mutex);
311 g_mutex_unlock(priv->buffer_mutex);
316 static void evas_video_sink_main_render(void *data)
318 Emotion_Gstreamer_Buffer *send;
319 Emotion_Gstreamer_Video *ev;
320 Emotion_Video_Stream *vstream;
321 EvasVideoSinkPrivate* priv;
323 unsigned char *evas_data;
324 const guint8 *gst_data;
326 GstFormat fmt = GST_FORMAT_TIME;
334 if (!priv) goto exit_point;
336 buffer = send->frame;
337 preroll = send->preroll;
339 if (priv->unlocked) goto exit_point;
341 gst_data = GST_BUFFER_DATA(buffer);
342 if (!gst_data) goto exit_point;
344 ev = evas_object_data_get(priv->o, "_emotion_gstreamer_video");
345 if (!ev) goto exit_point;
347 _emotion_gstreamer_video_pipeline_parse(ev, EINA_TRUE);
349 // This prevent a race condition when data are still in the pipe
350 // but the buffer size as changed because of a request from
351 // emotion smart (like on a file set).
352 evas_object_image_size_get(priv->o, &w, &h);
353 if (w != priv->width || h != priv->height)
356 evas_object_image_size_set(priv->o, priv->width, priv->height);
357 evas_object_image_alpha_set(priv->o, 0);
358 evas_object_image_colorspace_set(priv->o, priv->eformat);
360 evas_data = (unsigned char *)evas_object_image_data_get(priv->o, 1);
362 // Evas's BGRA has pre-multiplied alpha while GStreamer's doesn't.
363 // Here we convert to Evas's BGRA.
364 switch (priv->gformat)
366 case GST_VIDEO_FORMAT_BGR:
368 unsigned char *evas_tmp;
372 evas_tmp = evas_data;
373 /* FIXME: could this be optimized ? */
374 for (x = 0; x < priv->height; x++) {
375 for (y = 0; y < priv->width; y++) {
376 evas_tmp[0] = gst_data[0];
377 evas_tmp[1] = gst_data[1];
378 evas_tmp[2] = gst_data[2];
387 // Evas's BGRA has pre-multiplied alpha while GStreamer's doesn't.
388 // Here we convert to Evas's BGRA.
389 case GST_VIDEO_FORMAT_BGRx:
391 unsigned char *evas_tmp;
395 evas_tmp = evas_data;
396 /* FIXME: could this be optimized ? */
397 for (x = 0; x < priv->height; x++) {
398 for (y = 0; y < priv->width; y++) {
399 evas_tmp[0] = gst_data[0];
400 evas_tmp[1] = gst_data[1];
401 evas_tmp[2] = gst_data[2];
410 // Evas's BGRA has pre-multiplied alpha while GStreamer's doesn't.
411 // Here we convert to Evas's BGRA.
412 case GST_VIDEO_FORMAT_BGRA:
414 unsigned char *evas_tmp;
419 evas_tmp = evas_data;
420 /* FIXME: could this be optimized ? */
421 for (x = 0; x < priv->height; x++) {
422 for (y = 0; y < priv->width; y++) {
424 evas_tmp[0] = (gst_data[0] * alpha) / 255;
425 evas_tmp[1] = (gst_data[1] * alpha) / 255;
426 evas_tmp[2] = (gst_data[2] * alpha) / 255;
435 case GST_VIDEO_FORMAT_I420:
438 const unsigned char **rows;
440 evas_object_image_pixels_dirty_set(priv->o, 1);
441 rows = (const unsigned char **)evas_data;
443 for (i = 0; i < priv->height; i++)
444 rows[i] = &gst_data[i * priv->width];
446 rows += priv->height;
447 for (i = 0; i < (priv->height / 2); i++)
448 rows[i] = &gst_data[priv->height * priv->width + i * (priv->width / 2)];
450 rows += priv->height / 2;
451 for (i = 0; i < (priv->height / 2); i++)
452 rows[i] = &gst_data[priv->height * priv->width + priv->height * (priv->width /4) + i * (priv->width / 2)];
456 case GST_VIDEO_FORMAT_YV12:
459 const unsigned char **rows;
461 evas_object_image_pixels_dirty_set(priv->o, 1);
463 rows = (const unsigned char **)evas_data;
465 for (i = 0; i < priv->height; i++)
466 rows[i] = &gst_data[i * priv->width];
468 rows += priv->height;
469 for (i = 0; i < (priv->height / 2); i++)
470 rows[i] = &gst_data[priv->height * priv->width + priv->height * (priv->width /4) + i * (priv->width / 2)];
472 rows += priv->height / 2;
473 for (i = 0; i < (priv->height / 2); i++)
474 rows[i] = &gst_data[priv->height * priv->width + i * (priv->width / 2)];
478 case GST_VIDEO_FORMAT_YUY2:
481 const unsigned char **rows;
483 evas_object_image_pixels_dirty_set(priv->o, 1);
485 rows = (const unsigned char **)evas_data;
487 for (i = 0; i < priv->height; i++)
488 rows[i] = &gst_data[i * priv->width * 2];
493 evas_object_image_data_update_add(priv->o, 0, 0, priv->width, priv->height);
494 evas_object_image_data_set(priv->o, evas_data);
495 evas_object_image_pixels_dirty_set(priv->o, 0);
497 _emotion_frame_new(ev->obj);
499 vstream = eina_list_nth(ev->video_streams, ev->video_stream_nbr - 1);
501 gst_element_query_position(ev->pipeline, &fmt, &pos);
502 ev->position = (double)pos / (double)GST_SECOND;
504 vstream->width = priv->width;
505 vstream->height = priv->height;
506 ev->ratio = (double) priv->width / (double) priv->height;
508 _emotion_video_pos_update(ev->obj, ev->position, vstream->length_time);
509 _emotion_frame_resize(ev->obj, priv->width, priv->height, ev->ratio);
511 if (priv->last_buffer) gst_buffer_unref(priv->last_buffer);
512 priv->last_buffer = gst_buffer_ref(buffer);
515 emotion_gstreamer_buffer_free(send);
517 if (preroll) return ;
519 g_mutex_lock(priv->buffer_mutex);
521 if (priv->unlocked) {
522 g_mutex_unlock(priv->buffer_mutex);
526 g_cond_signal(priv->data_cond);
527 g_mutex_unlock(priv->buffer_mutex);
531 unlock_buffer_mutex(EvasVideoSinkPrivate* priv)
533 g_mutex_lock(priv->buffer_mutex);
535 priv->unlocked = EINA_TRUE;
536 g_cond_signal(priv->data_cond);
537 g_mutex_unlock(priv->buffer_mutex);
541 marshal_VOID__MINIOBJECT(GClosure * closure, GValue * return_value __UNUSED__,
542 guint n_param_values, const GValue * param_values,
543 gpointer invocation_hint __UNUSED__, gpointer marshal_data)
545 typedef void (*marshalfunc_VOID__MINIOBJECT) (gpointer obj, gpointer arg1, gpointer data2);
546 marshalfunc_VOID__MINIOBJECT callback;
548 gpointer data1, data2;
550 cc = (GCClosure *) closure;
552 g_return_if_fail(n_param_values == 2);
554 if (G_CCLOSURE_SWAP_DATA(closure)) {
555 data1 = closure->data;
556 data2 = g_value_peek_pointer(param_values + 0);
558 data1 = g_value_peek_pointer(param_values + 0);
559 data2 = closure->data;
561 callback = (marshalfunc_VOID__MINIOBJECT) (marshal_data ? marshal_data : cc->callback);
563 callback(data1, gst_value_get_mini_object(param_values + 1), data2);
567 evas_video_sink_class_init(EvasVideoSinkClass* klass)
569 GObjectClass* gobject_class;
570 GstBaseSinkClass* gstbase_sink_class;
572 gobject_class = G_OBJECT_CLASS(klass);
573 gstbase_sink_class = GST_BASE_SINK_CLASS(klass);
575 g_type_class_add_private(klass, sizeof(EvasVideoSinkPrivate));
577 gobject_class->set_property = evas_video_sink_set_property;
578 gobject_class->get_property = evas_video_sink_get_property;
580 g_object_class_install_property (gobject_class, PROP_EVAS_OBJECT,
581 g_param_spec_pointer ("evas-object", "Evas Object",
582 "The Evas object where the display of the video will be done",
585 g_object_class_install_property (gobject_class, PROP_WIDTH,
586 g_param_spec_int ("width", "Width",
587 "The width of the video",
588 0, 65536, 0, G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
590 g_object_class_install_property (gobject_class, PROP_HEIGHT,
591 g_param_spec_int ("height", "Height",
592 "The height of the video",
593 0, 65536, 0, G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
595 gobject_class->dispose = evas_video_sink_dispose;
597 gstbase_sink_class->set_caps = evas_video_sink_set_caps;
598 gstbase_sink_class->stop = evas_video_sink_stop;
599 gstbase_sink_class->start = evas_video_sink_start;
600 gstbase_sink_class->unlock = evas_video_sink_unlock;
601 gstbase_sink_class->unlock_stop = evas_video_sink_unlock_stop;
602 gstbase_sink_class->render = evas_video_sink_render;
603 gstbase_sink_class->preroll = evas_video_sink_preroll;
605 evas_video_sink_signals[REPAINT_REQUESTED] = g_signal_new("repaint-requested",
606 G_TYPE_FROM_CLASS(klass),
607 (GSignalFlags)(G_SIGNAL_RUN_LAST | G_SIGNAL_ACTION),
611 marshal_VOID__MINIOBJECT,
612 G_TYPE_NONE, 1, GST_TYPE_BUFFER);
616 gstreamer_plugin_init (GstPlugin * plugin)
618 return gst_element_register (plugin,
621 EVAS_TYPE_VIDEO_SINK);
625 _emotion_gstreamer_pause(void *data, Ecore_Thread *thread __UNUSED__)
627 Emotion_Gstreamer_Video *ev = data;
629 gst_element_set_state(ev->pipeline, GST_STATE_PAUSED);
633 _emotion_gstreamer_cancel(void *data, Ecore_Thread *thread __UNUSED__)
635 Emotion_Gstreamer_Video *ev = data;
641 _emotion_gstreamer_end(void *data, Ecore_Thread *thread)
643 _emotion_gstreamer_video_pipeline_parse(data, EINA_TRUE);
644 _emotion_gstreamer_cancel(data, thread);
648 gstreamer_video_sink_new(Emotion_Gstreamer_Video *ev,
655 GstStateChangeReturn res;
657 obj = emotion_object_image_get(o);
660 ERR("Not Evas_Object specified");
664 playbin = gst_element_factory_make("playbin2", "playbin");
667 ERR("Unable to create 'playbin' GstElement.");
671 sink = gst_element_factory_make("emotion-sink", "sink");
674 ERR("Unable to create 'emotion-sink' GstElement.");
678 g_object_set(G_OBJECT(playbin), "video-sink", sink, NULL);
679 g_object_set(G_OBJECT(playbin), "uri", uri, NULL);
680 g_object_set(G_OBJECT(sink), "evas-object", obj, NULL);
682 ev->pipeline = playbin;
683 ev->thread = ecore_thread_run(_emotion_gstreamer_pause,
684 _emotion_gstreamer_end,
685 _emotion_gstreamer_cancel,
688 /** NOTE: you need to set: GST_DEBUG_DUMP_DOT_DIR=/tmp EMOTION_ENGINE=gstreamer to save the $EMOTION_GSTREAMER_DOT file in '/tmp' */
689 /** then call dot -Tpng -oemotion_pipeline.png /tmp/$TIMESTAMP-$EMOTION_GSTREAMER_DOT.dot */
690 if (getenv("EMOTION_GSTREAMER_DOT")) GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS(GST_BIN(playbin), GST_DEBUG_GRAPH_SHOW_ALL, getenv("EMOTION_GSTREAMER_DOT"));
692 evas_object_data_set(obj, "_emotion_gstreamer_video", ev);
697 gst_object_unref(playbin);