3 #include "emotion_gstreamer.h"
5 static GstStaticPadTemplate sinktemplate = GST_STATIC_PAD_TEMPLATE("sink",
6 GST_PAD_SINK, GST_PAD_ALWAYS,
7 GST_STATIC_CAPS(GST_VIDEO_CAPS_YUV("{ I420, YV12, YUY2 }") ";"
8 GST_VIDEO_CAPS_BGRx ";" GST_VIDEO_CAPS_BGR ";" GST_VIDEO_CAPS_BGRA));
10 GST_DEBUG_CATEGORY_STATIC(evas_video_sink_debug);
11 #define GST_CAT_DEFAULT evas_video_sink_debug
27 static guint evas_video_sink_signals[LAST_SIGNAL] = { 0, };
29 #define _do_init(bla) \
30 GST_DEBUG_CATEGORY_INIT(evas_video_sink_debug, \
35 GST_BOILERPLATE_FULL(EvasVideoSink,
42 static void unlock_buffer_mutex(EvasVideoSinkPrivate* priv);
44 static void evas_video_sink_main_render(void *data);
47 evas_video_sink_base_init(gpointer g_class)
49 GstElementClass* element_class;
51 element_class = GST_ELEMENT_CLASS(g_class);
52 gst_element_class_add_pad_template(element_class, gst_static_pad_template_get(&sinktemplate));
53 gst_element_class_set_details_simple(element_class, "Evas video sink",
54 "Sink/Video", "Sends video data from a GStreamer pipeline to an Evas object",
55 "Vincent Torri <vtorri@univ-evry.fr>");
59 evas_video_sink_init(EvasVideoSink* sink, EvasVideoSinkClass* klass __UNUSED__)
61 EvasVideoSinkPrivate* priv;
64 sink->priv = priv = G_TYPE_INSTANCE_GET_PRIVATE(sink, EVAS_TYPE_VIDEO_SINK, EvasVideoSinkPrivate);
66 priv->last_buffer = NULL;
69 priv->gformat = GST_VIDEO_FORMAT_UNKNOWN;
70 priv->eformat = EVAS_COLORSPACE_ARGB8888;
71 eina_lock_new(&priv->m);
72 eina_condition_new(&priv->c, &priv->m);
73 priv->unlocked = EINA_FALSE;
77 /**** Object methods ****/
79 _cleanup_priv(void *data, Evas *e __UNUSED__, Evas_Object *obj, void *event_info __UNUSED__)
81 EvasVideoSinkPrivate* priv;
85 eina_lock_take(&priv->m);
88 eina_lock_release(&priv->m);
92 evas_video_sink_set_property(GObject * object, guint prop_id,
93 const GValue * value, GParamSpec * pspec)
96 EvasVideoSinkPrivate* priv;
98 sink = EVAS_VIDEO_SINK (object);
102 case PROP_EVAS_OBJECT:
103 eina_lock_take(&priv->m);
104 evas_object_event_callback_del(priv->o, EVAS_CALLBACK_FREE, _cleanup_priv);
105 priv->o = g_value_get_pointer (value);
106 evas_object_event_callback_add(priv->o, EVAS_CALLBACK_FREE, _cleanup_priv, priv);
107 eina_lock_release(&priv->m);
110 eina_lock_take(&priv->m);
111 priv->ev = g_value_get_pointer (value);
112 eina_lock_release(&priv->m);
115 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
116 ERR("invalid property");
122 evas_video_sink_get_property(GObject * object, guint prop_id,
123 GValue * value, GParamSpec * pspec)
126 EvasVideoSinkPrivate* priv;
128 sink = EVAS_VIDEO_SINK (object);
132 case PROP_EVAS_OBJECT:
133 eina_lock_take(&priv->m);
134 g_value_set_pointer (value, priv->o);
135 eina_lock_release(&priv->m);
138 eina_lock_take(&priv->m);
139 g_value_set_int(value, priv->width);
140 eina_lock_release(&priv->m);
143 eina_lock_take(&priv->m);
144 g_value_set_int (value, priv->height);
145 eina_lock_release(&priv->m);
148 eina_lock_take(&priv->m);
149 g_value_set_pointer (value, priv->ev);
150 eina_lock_release(&priv->m);
153 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
154 ERR("invalide property");
160 evas_video_sink_dispose(GObject* object)
163 EvasVideoSinkPrivate* priv;
165 sink = EVAS_VIDEO_SINK(object);
168 eina_lock_free(&priv->m);
169 eina_condition_free(&priv->c);
171 if (priv->last_buffer) {
172 gst_buffer_unref(priv->last_buffer);
173 priv->last_buffer = NULL;
176 G_OBJECT_CLASS(parent_class)->dispose(object);
180 /**** BaseSink methods ****/
182 gboolean evas_video_sink_set_caps(GstBaseSink *bsink, GstCaps *caps)
185 EvasVideoSinkPrivate* priv;
186 GstVideoFormat format;
190 sink = EVAS_VIDEO_SINK(bsink);
193 if (G_UNLIKELY(!gst_video_format_parse_caps(caps, &format, &width, &height))) {
194 ERR("Unable to parse caps.");
199 priv->height = height;
201 printf("%p format :", priv->o);
204 case GST_VIDEO_FORMAT_I420: priv->eformat = EVAS_COLORSPACE_YCBCR422P601_PL;
207 case GST_VIDEO_FORMAT_YV12: priv->eformat = EVAS_COLORSPACE_YCBCR422P601_PL;
210 case GST_VIDEO_FORMAT_YUY2: priv->eformat = EVAS_COLORSPACE_YCBCR422601_PL;
213 case GST_VIDEO_FORMAT_BGR: priv->eformat = EVAS_COLORSPACE_ARGB8888;
216 case GST_VIDEO_FORMAT_BGRx: priv->eformat = EVAS_COLORSPACE_ARGB8888;
219 case GST_VIDEO_FORMAT_BGRA: priv->eformat = EVAS_COLORSPACE_ARGB8888;
223 ERR("unsupported : %d\n", format);
226 priv->gformat = format;
232 evas_video_sink_start(GstBaseSink* base_sink)
234 EvasVideoSinkPrivate* priv;
237 priv = EVAS_VIDEO_SINK(base_sink)->priv;
238 eina_lock_take(&priv->m);
242 priv->unlocked = EINA_FALSE;
243 eina_lock_release(&priv->m);
248 evas_video_sink_stop(GstBaseSink* base_sink)
250 EvasVideoSinkPrivate* priv = EVAS_VIDEO_SINK(base_sink)->priv;
252 unlock_buffer_mutex(priv);
257 evas_video_sink_unlock(GstBaseSink* object)
261 sink = EVAS_VIDEO_SINK(object);
263 unlock_buffer_mutex(sink->priv);
265 return GST_CALL_PARENT_WITH_DEFAULT(GST_BASE_SINK_CLASS, unlock,
270 evas_video_sink_unlock_stop(GstBaseSink* object)
273 EvasVideoSinkPrivate* priv;
275 sink = EVAS_VIDEO_SINK(object);
278 eina_lock_take(&priv->m);
279 priv->unlocked = FALSE;
280 eina_lock_release(&priv->m);
282 return GST_CALL_PARENT_WITH_DEFAULT(GST_BASE_SINK_CLASS, unlock_stop,
287 evas_video_sink_preroll(GstBaseSink* bsink, GstBuffer* buffer)
289 Emotion_Gstreamer_Buffer *send;
290 EvasVideoSinkPrivate *priv;
293 sink = EVAS_VIDEO_SINK(bsink);
296 send = emotion_gstreamer_buffer_alloc(priv, buffer, EINA_TRUE);
299 ecore_main_loop_thread_safe_call_async(evas_video_sink_main_render, send);
305 evas_video_sink_render(GstBaseSink* bsink, GstBuffer* buffer)
307 Emotion_Gstreamer_Buffer *send;
308 EvasVideoSinkPrivate *priv;
311 sink = EVAS_VIDEO_SINK(bsink);
314 eina_lock_take(&priv->m);
316 if (priv->unlocked) {
318 eina_lock_release(&priv->m);
322 send = emotion_gstreamer_buffer_alloc(priv, buffer, EINA_FALSE);
324 eina_lock_release(&priv->m);
325 return GST_FLOW_ERROR;
328 ecore_main_loop_thread_safe_call_async(evas_video_sink_main_render, send);
330 eina_condition_wait(&priv->c);
331 eina_lock_release(&priv->m);
337 evas_video_sink_main_render(void *data)
339 Emotion_Gstreamer_Buffer *send;
340 Emotion_Gstreamer_Video *ev = NULL;
341 Emotion_Video_Stream *vstream;
342 EvasVideoSinkPrivate* priv;
344 unsigned char *evas_data;
345 const guint8 *gst_data;
346 GstFormat fmt = GST_FORMAT_TIME;
354 if (!priv) goto exit_point;
355 if (!priv->o) goto exit_point;
357 buffer = send->frame;
358 preroll = send->preroll;
360 if (priv->unlocked) goto exit_point;
362 gst_data = GST_BUFFER_DATA(buffer);
363 if (!gst_data) goto exit_point;
366 if (!ev) goto exit_point;
368 _emotion_gstreamer_video_pipeline_parse(ev, EINA_TRUE);
370 // This prevent a race condition when data are still in the pipe
371 // but the buffer size as changed because of a request from
372 // emotion smart (like on a file set).
373 evas_object_image_size_get(priv->o, &w, &h);
374 if (w != priv->width || h != priv->height)
377 evas_object_image_size_set(priv->o, priv->width, priv->height);
378 evas_object_image_alpha_set(priv->o, 0);
379 evas_object_image_colorspace_set(priv->o, priv->eformat);
381 evas_data = (unsigned char *)evas_object_image_data_get(priv->o, 1);
383 // Evas's BGRA has pre-multiplied alpha while GStreamer's doesn't.
384 // Here we convert to Evas's BGRA.
385 switch (priv->gformat)
387 case GST_VIDEO_FORMAT_BGR:
389 unsigned char *evas_tmp;
393 evas_tmp = evas_data;
394 /* FIXME: could this be optimized ? */
395 for (x = 0; x < priv->height; x++) {
396 for (y = 0; y < priv->width; y++) {
397 evas_tmp[0] = gst_data[0];
398 evas_tmp[1] = gst_data[1];
399 evas_tmp[2] = gst_data[2];
408 // Evas's BGRA has pre-multiplied alpha while GStreamer's doesn't.
409 // Here we convert to Evas's BGRA.
410 case GST_VIDEO_FORMAT_BGRx:
412 unsigned char *evas_tmp;
416 evas_tmp = evas_data;
417 /* FIXME: could this be optimized ? */
418 for (x = 0; x < priv->height; x++) {
419 for (y = 0; y < priv->width; y++) {
420 evas_tmp[0] = gst_data[0];
421 evas_tmp[1] = gst_data[1];
422 evas_tmp[2] = gst_data[2];
431 // Evas's BGRA has pre-multiplied alpha while GStreamer's doesn't.
432 // Here we convert to Evas's BGRA.
433 case GST_VIDEO_FORMAT_BGRA:
435 unsigned char *evas_tmp;
440 evas_tmp = evas_data;
441 /* FIXME: could this be optimized ? */
442 for (x = 0; x < priv->height; x++) {
443 for (y = 0; y < priv->width; y++) {
445 evas_tmp[0] = (gst_data[0] * alpha) / 255;
446 evas_tmp[1] = (gst_data[1] * alpha) / 255;
447 evas_tmp[2] = (gst_data[2] * alpha) / 255;
456 case GST_VIDEO_FORMAT_I420:
459 const unsigned char **rows;
461 evas_object_image_pixels_dirty_set(priv->o, 1);
462 rows = (const unsigned char **)evas_data;
464 for (i = 0; i < priv->height; i++)
465 rows[i] = &gst_data[i * priv->width];
467 rows += priv->height;
468 for (i = 0; i < (priv->height / 2); i++)
469 rows[i] = &gst_data[priv->height * priv->width + i * (priv->width / 2)];
471 rows += priv->height / 2;
472 for (i = 0; i < (priv->height / 2); i++)
473 rows[i] = &gst_data[priv->height * priv->width + priv->height * (priv->width /4) + i * (priv->width / 2)];
477 case GST_VIDEO_FORMAT_YV12:
480 const unsigned char **rows;
482 evas_object_image_pixels_dirty_set(priv->o, 1);
484 rows = (const unsigned char **)evas_data;
486 for (i = 0; i < priv->height; i++)
487 rows[i] = &gst_data[i * priv->width];
489 rows += priv->height;
490 for (i = 0; i < (priv->height / 2); i++)
491 rows[i] = &gst_data[priv->height * priv->width + priv->height * (priv->width /4) + i * (priv->width / 2)];
493 rows += priv->height / 2;
494 for (i = 0; i < (priv->height / 2); i++)
495 rows[i] = &gst_data[priv->height * priv->width + i * (priv->width / 2)];
499 case GST_VIDEO_FORMAT_YUY2:
502 const unsigned char **rows;
504 evas_object_image_pixels_dirty_set(priv->o, 1);
506 rows = (const unsigned char **)evas_data;
508 for (i = 0; i < priv->height; i++)
509 rows[i] = &gst_data[i * priv->width * 2];
514 evas_object_image_data_update_add(priv->o, 0, 0, priv->width, priv->height);
515 evas_object_image_data_set(priv->o, evas_data);
516 evas_object_image_pixels_dirty_set(priv->o, 0);
518 _emotion_frame_new(ev->obj);
520 vstream = eina_list_nth(ev->video_streams, ev->video_stream_nbr - 1);
522 gst_element_query_position(ev->pipeline, &fmt, &pos);
523 ev->position = (double)pos / (double)GST_SECOND;
525 vstream->width = priv->width;
526 vstream->height = priv->height;
527 ev->ratio = (double) priv->width / (double) priv->height;
529 _emotion_video_pos_update(ev->obj, ev->position, vstream->length_time);
530 _emotion_frame_resize(ev->obj, priv->width, priv->height, ev->ratio);
532 if (priv->last_buffer) gst_buffer_unref(priv->last_buffer);
533 priv->last_buffer = gst_buffer_ref(buffer);
536 emotion_gstreamer_buffer_free(send);
538 if (preroll || !priv->o || !ev) return ;
540 eina_lock_take(&priv->m);
542 eina_condition_signal(&priv->c);
544 eina_lock_release(&priv->m);
548 unlock_buffer_mutex(EvasVideoSinkPrivate* priv)
550 eina_lock_take(&priv->m);
551 priv->unlocked = EINA_TRUE;
553 eina_condition_signal(&priv->c);
554 eina_lock_release(&priv->m);
558 marshal_VOID__MINIOBJECT(GClosure * closure, GValue * return_value __UNUSED__,
559 guint n_param_values, const GValue * param_values,
560 gpointer invocation_hint __UNUSED__, gpointer marshal_data)
562 typedef void (*marshalfunc_VOID__MINIOBJECT) (gpointer obj, gpointer arg1, gpointer data2);
563 marshalfunc_VOID__MINIOBJECT callback;
565 gpointer data1, data2;
567 cc = (GCClosure *) closure;
569 g_return_if_fail(n_param_values == 2);
571 if (G_CCLOSURE_SWAP_DATA(closure)) {
572 data1 = closure->data;
573 data2 = g_value_peek_pointer(param_values + 0);
575 data1 = g_value_peek_pointer(param_values + 0);
576 data2 = closure->data;
578 callback = (marshalfunc_VOID__MINIOBJECT) (marshal_data ? marshal_data : cc->callback);
580 callback(data1, gst_value_get_mini_object(param_values + 1), data2);
584 evas_video_sink_class_init(EvasVideoSinkClass* klass)
586 GObjectClass* gobject_class;
587 GstBaseSinkClass* gstbase_sink_class;
589 gobject_class = G_OBJECT_CLASS(klass);
590 gstbase_sink_class = GST_BASE_SINK_CLASS(klass);
592 g_type_class_add_private(klass, sizeof(EvasVideoSinkPrivate));
594 gobject_class->set_property = evas_video_sink_set_property;
595 gobject_class->get_property = evas_video_sink_get_property;
597 g_object_class_install_property (gobject_class, PROP_EVAS_OBJECT,
598 g_param_spec_pointer ("evas-object", "Evas Object",
599 "The Evas object where the display of the video will be done",
602 g_object_class_install_property (gobject_class, PROP_WIDTH,
603 g_param_spec_int ("width", "Width",
604 "The width of the video",
605 0, 65536, 0, G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
607 g_object_class_install_property (gobject_class, PROP_HEIGHT,
608 g_param_spec_int ("height", "Height",
609 "The height of the video",
610 0, 65536, 0, G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
611 g_object_class_install_property (gobject_class, PROP_EV,
612 g_param_spec_pointer ("ev", "Emotion_Gstreamer_Video",
613 "THe internal data of the emotion object",
616 gobject_class->dispose = evas_video_sink_dispose;
618 gstbase_sink_class->set_caps = evas_video_sink_set_caps;
619 gstbase_sink_class->stop = evas_video_sink_stop;
620 gstbase_sink_class->start = evas_video_sink_start;
621 gstbase_sink_class->unlock = evas_video_sink_unlock;
622 gstbase_sink_class->unlock_stop = evas_video_sink_unlock_stop;
623 gstbase_sink_class->render = evas_video_sink_render;
624 gstbase_sink_class->preroll = evas_video_sink_preroll;
626 evas_video_sink_signals[REPAINT_REQUESTED] = g_signal_new("repaint-requested",
627 G_TYPE_FROM_CLASS(klass),
628 (GSignalFlags)(G_SIGNAL_RUN_LAST | G_SIGNAL_ACTION),
632 marshal_VOID__MINIOBJECT,
633 G_TYPE_NONE, 1, GST_TYPE_BUFFER);
637 gstreamer_plugin_init (GstPlugin * plugin)
639 return gst_element_register (plugin,
642 EVAS_TYPE_VIDEO_SINK);
646 _emotion_gstreamer_pause(void *data, Ecore_Thread *thread)
648 Emotion_Gstreamer_Video *ev = data;
650 if (ecore_thread_check(thread) || !ev->pipeline) return ;
652 gst_element_set_state(ev->pipeline, GST_STATE_PAUSED);
656 _emotion_gstreamer_cancel(void *data, Ecore_Thread *thread)
658 Emotion_Gstreamer_Video *ev = data;
660 ev->threads = eina_list_remove(ev->threads, thread);
662 if (ev->in == ev->out && ev->threads == NULL && ev->delete_me)
667 _emotion_gstreamer_end(void *data, Ecore_Thread *thread)
669 _emotion_gstreamer_video_pipeline_parse(data, EINA_TRUE);
670 _emotion_gstreamer_cancel(data, thread);
674 gstreamer_video_sink_new(Emotion_Gstreamer_Video *ev,
682 obj = emotion_object_image_get(o);
685 ERR("Not Evas_Object specified");
689 playbin = gst_element_factory_make("playbin2", "playbin");
692 ERR("Unable to create 'playbin' GstElement.");
696 sink = gst_element_factory_make("emotion-sink", "sink");
699 ERR("Unable to create 'emotion-sink' GstElement.");
703 g_object_set(G_OBJECT(sink), "evas-object", obj, NULL);
704 g_object_set(G_OBJECT(sink), "ev", ev, NULL);
706 g_object_set(G_OBJECT(playbin), "video-sink", sink, NULL);
707 g_object_set(G_OBJECT(playbin), "uri", uri, NULL);
709 ev->pipeline = playbin;
711 ev->threads = eina_list_append(ev->threads,
712 ecore_thread_run(_emotion_gstreamer_pause,
713 _emotion_gstreamer_end,
714 _emotion_gstreamer_cancel,
717 /** NOTE: you need to set: GST_DEBUG_DUMP_DOT_DIR=/tmp EMOTION_ENGINE=gstreamer to save the $EMOTION_GSTREAMER_DOT file in '/tmp' */
718 /** then call dot -Tpng -oemotion_pipeline.png /tmp/$TIMESTAMP-$EMOTION_GSTREAMER_DOT.dot */
719 if (getenv("EMOTION_GSTREAMER_DOT")) GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS(GST_BIN(playbin), GST_DEBUG_GRAPH_SHOW_ALL, getenv("EMOTION_GSTREAMER_DOT"));
724 gst_object_unref(playbin);