3 #include "emotion_gstreamer.h"
5 static GstStaticPadTemplate sinktemplate = GST_STATIC_PAD_TEMPLATE("sink",
6 GST_PAD_SINK, GST_PAD_ALWAYS,
7 GST_STATIC_CAPS(GST_VIDEO_CAPS_YUV("{ I420, YV12, YUY2, NV12, ST12, TM12 }") ";"
8 GST_VIDEO_CAPS_BGRx ";" GST_VIDEO_CAPS_BGR ";" GST_VIDEO_CAPS_BGRA));
10 GST_DEBUG_CATEGORY_STATIC(evas_video_sink_debug);
11 #define GST_CAT_DEFAULT evas_video_sink_debug
27 static guint evas_video_sink_signals[LAST_SIGNAL] = { 0, };
29 #define _do_init(bla) \
30 GST_DEBUG_CATEGORY_INIT(evas_video_sink_debug, \
35 GST_BOILERPLATE_FULL(EvasVideoSink,
42 static void unlock_buffer_mutex(EvasVideoSinkPrivate* priv);
44 static void evas_video_sink_main_render(void *data);
47 evas_video_sink_base_init(gpointer g_class)
49 GstElementClass* element_class;
51 element_class = GST_ELEMENT_CLASS(g_class);
52 gst_element_class_add_pad_template(element_class, gst_static_pad_template_get(&sinktemplate));
53 gst_element_class_set_details_simple(element_class, "Evas video sink",
54 "Sink/Video", "Sends video data from a GStreamer pipeline to an Evas object",
55 "Vincent Torri <vtorri@univ-evry.fr>");
59 evas_video_sink_init(EvasVideoSink* sink, EvasVideoSinkClass* klass __UNUSED__)
61 EvasVideoSinkPrivate* priv;
64 sink->priv = priv = G_TYPE_INSTANCE_GET_PRIVATE(sink, EVAS_TYPE_VIDEO_SINK, EvasVideoSinkPrivate);
66 priv->last_buffer = NULL;
69 priv->gformat = GST_VIDEO_FORMAT_UNKNOWN;
70 priv->eformat = EVAS_COLORSPACE_ARGB8888;
71 eina_lock_new(&priv->m);
72 eina_condition_new(&priv->c, &priv->m);
73 priv->unlocked = EINA_FALSE;
77 /**** Object methods ****/
79 _cleanup_priv(void *data, Evas *e __UNUSED__, Evas_Object *obj, void *event_info __UNUSED__)
81 EvasVideoSinkPrivate* priv;
85 eina_lock_take(&priv->m);
88 eina_lock_release(&priv->m);
92 evas_video_sink_set_property(GObject * object, guint prop_id,
93 const GValue * value, GParamSpec * pspec)
96 EvasVideoSinkPrivate* priv;
98 sink = EVAS_VIDEO_SINK (object);
102 case PROP_EVAS_OBJECT:
103 eina_lock_take(&priv->m);
104 evas_object_event_callback_del(priv->o, EVAS_CALLBACK_FREE, _cleanup_priv);
105 priv->o = g_value_get_pointer (value);
106 evas_object_event_callback_add(priv->o, EVAS_CALLBACK_FREE, _cleanup_priv, priv);
107 eina_lock_release(&priv->m);
110 eina_lock_take(&priv->m);
111 priv->ev = g_value_get_pointer (value);
112 eina_lock_release(&priv->m);
115 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
116 ERR("invalid property");
122 evas_video_sink_get_property(GObject * object, guint prop_id,
123 GValue * value, GParamSpec * pspec)
126 EvasVideoSinkPrivate* priv;
128 sink = EVAS_VIDEO_SINK (object);
132 case PROP_EVAS_OBJECT:
133 eina_lock_take(&priv->m);
134 g_value_set_pointer (value, priv->o);
135 eina_lock_release(&priv->m);
138 eina_lock_take(&priv->m);
139 g_value_set_int(value, priv->width);
140 eina_lock_release(&priv->m);
143 eina_lock_take(&priv->m);
144 g_value_set_int (value, priv->height);
145 eina_lock_release(&priv->m);
148 eina_lock_take(&priv->m);
149 g_value_set_pointer (value, priv->ev);
150 eina_lock_release(&priv->m);
153 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
154 ERR("invalide property");
160 evas_video_sink_dispose(GObject* object)
163 EvasVideoSinkPrivate* priv;
165 sink = EVAS_VIDEO_SINK(object);
168 eina_lock_free(&priv->m);
169 eina_condition_free(&priv->c);
171 if (priv->last_buffer) {
172 gst_buffer_unref(priv->last_buffer);
173 priv->last_buffer = NULL;
176 G_OBJECT_CLASS(parent_class)->dispose(object);
180 /**** BaseSink methods ****/
182 gboolean evas_video_sink_set_caps(GstBaseSink *bsink, GstCaps *caps)
185 EvasVideoSinkPrivate* priv;
186 GstStructure *structure;
187 GstVideoFormat format;
192 sink = EVAS_VIDEO_SINK(bsink);
195 if (!gst_video_format_parse_caps(caps, &format, &width, &height))
197 ERR("Unable to parse caps.");
202 priv->height = height;
204 structure = gst_caps_get_structure(caps, 0);
206 if (gst_structure_get_fourcc(structure, "format", &fourcc))
210 case GST_MAKE_FOURCC('I', '4', '2', '0'):
211 priv->eformat = EVAS_COLORSPACE_YCBCR422P601_PL;
214 case GST_MAKE_FOURCC('Y', 'V', '1', '2'):
215 priv->eformat = EVAS_COLORSPACE_YCBCR422P601_PL;
218 case GST_MAKE_FOURCC('Y', 'U', 'Y', '2'):
219 priv->eformat = EVAS_COLORSPACE_YCBCR422601_PL;
222 case GST_MAKE_FOURCC('N', 'V', '1', '2'):
223 priv->eformat = EVAS_COLORSPACE_YCBCR420NV12601_PL;
226 case GST_MAKE_FOURCC('S', 'T', '1', '2'):
227 case GST_MAKE_FOURCC('T', 'M', '1', '2'):
228 priv->eformat = EVAS_COLORSPACE_YCBCR420TM12601_PL;
240 case GST_VIDEO_FORMAT_BGR: priv->eformat = EVAS_COLORSPACE_ARGB8888;
243 case GST_VIDEO_FORMAT_BGRx: priv->eformat = EVAS_COLORSPACE_ARGB8888;
246 case GST_VIDEO_FORMAT_BGRA: priv->eformat = EVAS_COLORSPACE_ARGB8888;
250 ERR("unsupported : %d\n", format);
254 priv->gformat = format;
260 evas_video_sink_start(GstBaseSink* base_sink)
262 EvasVideoSinkPrivate* priv;
265 priv = EVAS_VIDEO_SINK(base_sink)->priv;
266 eina_lock_take(&priv->m);
270 priv->unlocked = EINA_FALSE;
271 eina_lock_release(&priv->m);
276 evas_video_sink_stop(GstBaseSink* base_sink)
278 EvasVideoSinkPrivate* priv = EVAS_VIDEO_SINK(base_sink)->priv;
280 unlock_buffer_mutex(priv);
285 evas_video_sink_unlock(GstBaseSink* object)
289 sink = EVAS_VIDEO_SINK(object);
291 unlock_buffer_mutex(sink->priv);
293 return GST_CALL_PARENT_WITH_DEFAULT(GST_BASE_SINK_CLASS, unlock,
298 evas_video_sink_unlock_stop(GstBaseSink* object)
301 EvasVideoSinkPrivate* priv;
303 sink = EVAS_VIDEO_SINK(object);
306 eina_lock_take(&priv->m);
307 priv->unlocked = FALSE;
308 eina_lock_release(&priv->m);
310 return GST_CALL_PARENT_WITH_DEFAULT(GST_BASE_SINK_CLASS, unlock_stop,
315 evas_video_sink_preroll(GstBaseSink* bsink, GstBuffer* buffer)
317 Emotion_Gstreamer_Buffer *send;
318 EvasVideoSinkPrivate *priv;
321 sink = EVAS_VIDEO_SINK(bsink);
324 send = emotion_gstreamer_buffer_alloc(priv, buffer, EINA_TRUE);
327 ecore_main_loop_thread_safe_call_async(evas_video_sink_main_render, send);
333 evas_video_sink_render(GstBaseSink* bsink, GstBuffer* buffer)
335 Emotion_Gstreamer_Buffer *send;
336 EvasVideoSinkPrivate *priv;
339 sink = EVAS_VIDEO_SINK(bsink);
342 eina_lock_take(&priv->m);
344 if (priv->unlocked) {
346 eina_lock_release(&priv->m);
350 send = emotion_gstreamer_buffer_alloc(priv, buffer, EINA_FALSE);
352 eina_lock_release(&priv->m);
353 return GST_FLOW_ERROR;
356 ecore_main_loop_thread_safe_call_async(evas_video_sink_main_render, send);
358 eina_condition_wait(&priv->c);
359 eina_lock_release(&priv->m);
365 evas_video_sink_main_render(void *data)
367 Emotion_Gstreamer_Buffer *send;
368 Emotion_Gstreamer_Video *ev = NULL;
369 Emotion_Video_Stream *vstream;
370 EvasVideoSinkPrivate* priv;
372 unsigned char *evas_data;
373 const guint8 *gst_data;
374 GstFormat fmt = GST_FORMAT_TIME;
382 if (!priv) goto exit_point;
383 if (!priv->o) goto exit_point;
385 buffer = send->frame;
386 preroll = send->preroll;
388 if (priv->unlocked) goto exit_point;
390 gst_data = GST_BUFFER_DATA(buffer);
391 if (!gst_data) goto exit_point;
394 if (!ev) goto exit_point;
396 _emotion_gstreamer_video_pipeline_parse(ev, EINA_TRUE);
398 // This prevent a race condition when data are still in the pipe
399 // but the buffer size as changed because of a request from
400 // emotion smart (like on a file set).
401 evas_object_image_size_get(priv->o, &w, &h);
402 if (w != priv->width || h != priv->height)
405 evas_object_image_size_set(priv->o, priv->width, priv->height);
406 evas_object_image_alpha_set(priv->o, 0);
407 evas_object_image_colorspace_set(priv->o, priv->eformat);
409 evas_data = (unsigned char *)evas_object_image_data_get(priv->o, 1);
411 // Evas's BGRA has pre-multiplied alpha while GStreamer's doesn't.
412 // Here we convert to Evas's BGRA.
413 switch (priv->gformat)
415 case GST_VIDEO_FORMAT_BGR:
417 unsigned char *evas_tmp;
421 evas_tmp = evas_data;
422 /* FIXME: could this be optimized ? */
423 for (x = 0; x < priv->height; x++) {
424 for (y = 0; y < priv->width; y++) {
425 evas_tmp[0] = gst_data[0];
426 evas_tmp[1] = gst_data[1];
427 evas_tmp[2] = gst_data[2];
436 // Evas's BGRA has pre-multiplied alpha while GStreamer's doesn't.
437 // Here we convert to Evas's BGRA.
438 case GST_VIDEO_FORMAT_BGRx:
440 unsigned char *evas_tmp;
444 evas_tmp = evas_data;
445 /* FIXME: could this be optimized ? */
446 for (x = 0; x < priv->height; x++) {
447 for (y = 0; y < priv->width; y++) {
448 evas_tmp[0] = gst_data[0];
449 evas_tmp[1] = gst_data[1];
450 evas_tmp[2] = gst_data[2];
459 // Evas's BGRA has pre-multiplied alpha while GStreamer's doesn't.
460 // Here we convert to Evas's BGRA.
461 case GST_VIDEO_FORMAT_BGRA:
463 unsigned char *evas_tmp;
468 evas_tmp = evas_data;
469 /* FIXME: could this be optimized ? */
470 for (x = 0; x < priv->height; x++) {
471 for (y = 0; y < priv->width; y++) {
473 evas_tmp[0] = (gst_data[0] * alpha) / 255;
474 evas_tmp[1] = (gst_data[1] * alpha) / 255;
475 evas_tmp[2] = (gst_data[2] * alpha) / 255;
484 case GST_VIDEO_FORMAT_I420:
487 const unsigned char **rows;
489 evas_object_image_pixels_dirty_set(priv->o, 1);
490 rows = (const unsigned char **)evas_data;
492 for (i = 0; i < priv->height; i++)
493 rows[i] = &gst_data[i * priv->width];
495 rows += priv->height;
496 for (i = 0; i < (priv->height / 2); i++)
497 rows[i] = &gst_data[priv->height * priv->width + i * (priv->width / 2)];
499 rows += priv->height / 2;
500 for (i = 0; i < (priv->height / 2); i++)
501 rows[i] = &gst_data[priv->height * priv->width + priv->height * (priv->width /4) + i * (priv->width / 2)];
505 case GST_VIDEO_FORMAT_YV12:
508 const unsigned char **rows;
510 evas_object_image_pixels_dirty_set(priv->o, 1);
512 rows = (const unsigned char **)evas_data;
514 for (i = 0; i < priv->height; i++)
515 rows[i] = &gst_data[i * priv->width];
517 rows += priv->height;
518 for (i = 0; i < (priv->height / 2); i++)
519 rows[i] = &gst_data[priv->height * priv->width + priv->height * (priv->width /4) + i * (priv->width / 2)];
521 rows += priv->height / 2;
522 for (i = 0; i < (priv->height / 2); i++)
523 rows[i] = &gst_data[priv->height * priv->width + i * (priv->width / 2)];
527 case GST_VIDEO_FORMAT_YUY2:
530 const unsigned char **rows;
532 evas_object_image_pixels_dirty_set(priv->o, 1);
534 rows = (const unsigned char **)evas_data;
536 for (i = 0; i < priv->height; i++)
537 rows[i] = &gst_data[i * priv->width * 2];
543 switch (priv->eformat)
545 case EVAS_COLORSPACE_YCBCR420NV12601_PL:
548 const unsigned char **rows;
550 evas_object_image_pixels_dirty_set(priv->o, 1);
552 rows = (const unsigned char **)evas_data;
554 for (i = 0; i < priv->height; i++)
555 rows[i] = &gst_data[i * priv->width];
557 rows += priv->height;
558 for (i = 0; i < (priv->height / 2); i++)
559 rows[i] = &gst_data[priv->height * priv->width + i * priv->width];
562 case EVAS_COLORSPACE_YCBCR420TM12601_PL:
565 const unsigned char **rows;
567 evas_object_image_pixels_dirty_set(priv->o, 1);
569 rows = (const unsigned char **)evas_data;
571 for (i = 0; i < (priv->height / 32) / 2; i++)
572 rows[i] = &gst_data[i * priv->width * 2 * 32];
574 if ((priv->height / 32) % 2)
575 rows[i] = &gst_data[i * priv->width * 2 * 32];
577 rows += priv->height;
578 for (i = 0; i < ((priv->height / 2) / 32) / 2; ++i)
579 rows[i] = &gst_data[priv->height * priv->width + i * (priv->width / 2) * 2 * 16];
585 evas_object_image_data_update_add(priv->o, 0, 0, priv->width, priv->height);
586 evas_object_image_data_set(priv->o, evas_data);
587 evas_object_image_pixels_dirty_set(priv->o, 0);
589 _emotion_frame_new(ev->obj);
591 vstream = eina_list_nth(ev->video_streams, ev->video_stream_nbr - 1);
593 gst_element_query_position(ev->pipeline, &fmt, &pos);
594 ev->position = (double)pos / (double)GST_SECOND;
596 vstream->width = priv->width;
597 vstream->height = priv->height;
598 ev->ratio = (double) priv->width / (double) priv->height;
600 _emotion_video_pos_update(ev->obj, ev->position, vstream->length_time);
601 _emotion_frame_resize(ev->obj, priv->width, priv->height, ev->ratio);
603 if (priv->last_buffer) gst_buffer_unref(priv->last_buffer);
604 priv->last_buffer = gst_buffer_ref(buffer);
607 emotion_gstreamer_buffer_free(send);
609 if (preroll || !priv->o || !ev) return ;
611 eina_lock_take(&priv->m);
613 eina_condition_signal(&priv->c);
615 eina_lock_release(&priv->m);
619 unlock_buffer_mutex(EvasVideoSinkPrivate* priv)
621 eina_lock_take(&priv->m);
622 priv->unlocked = EINA_TRUE;
624 eina_condition_signal(&priv->c);
625 eina_lock_release(&priv->m);
629 marshal_VOID__MINIOBJECT(GClosure * closure, GValue * return_value __UNUSED__,
630 guint n_param_values, const GValue * param_values,
631 gpointer invocation_hint __UNUSED__, gpointer marshal_data)
633 typedef void (*marshalfunc_VOID__MINIOBJECT) (gpointer obj, gpointer arg1, gpointer data2);
634 marshalfunc_VOID__MINIOBJECT callback;
636 gpointer data1, data2;
638 cc = (GCClosure *) closure;
640 g_return_if_fail(n_param_values == 2);
642 if (G_CCLOSURE_SWAP_DATA(closure)) {
643 data1 = closure->data;
644 data2 = g_value_peek_pointer(param_values + 0);
646 data1 = g_value_peek_pointer(param_values + 0);
647 data2 = closure->data;
649 callback = (marshalfunc_VOID__MINIOBJECT) (marshal_data ? marshal_data : cc->callback);
651 callback(data1, gst_value_get_mini_object(param_values + 1), data2);
655 evas_video_sink_class_init(EvasVideoSinkClass* klass)
657 GObjectClass* gobject_class;
658 GstBaseSinkClass* gstbase_sink_class;
660 gobject_class = G_OBJECT_CLASS(klass);
661 gstbase_sink_class = GST_BASE_SINK_CLASS(klass);
663 g_type_class_add_private(klass, sizeof(EvasVideoSinkPrivate));
665 gobject_class->set_property = evas_video_sink_set_property;
666 gobject_class->get_property = evas_video_sink_get_property;
668 g_object_class_install_property (gobject_class, PROP_EVAS_OBJECT,
669 g_param_spec_pointer ("evas-object", "Evas Object",
670 "The Evas object where the display of the video will be done",
673 g_object_class_install_property (gobject_class, PROP_WIDTH,
674 g_param_spec_int ("width", "Width",
675 "The width of the video",
676 0, 65536, 0, G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
678 g_object_class_install_property (gobject_class, PROP_HEIGHT,
679 g_param_spec_int ("height", "Height",
680 "The height of the video",
681 0, 65536, 0, G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
682 g_object_class_install_property (gobject_class, PROP_EV,
683 g_param_spec_pointer ("ev", "Emotion_Gstreamer_Video",
684 "THe internal data of the emotion object",
687 gobject_class->dispose = evas_video_sink_dispose;
689 gstbase_sink_class->set_caps = evas_video_sink_set_caps;
690 gstbase_sink_class->stop = evas_video_sink_stop;
691 gstbase_sink_class->start = evas_video_sink_start;
692 gstbase_sink_class->unlock = evas_video_sink_unlock;
693 gstbase_sink_class->unlock_stop = evas_video_sink_unlock_stop;
694 gstbase_sink_class->render = evas_video_sink_render;
695 gstbase_sink_class->preroll = evas_video_sink_preroll;
697 evas_video_sink_signals[REPAINT_REQUESTED] = g_signal_new("repaint-requested",
698 G_TYPE_FROM_CLASS(klass),
699 (GSignalFlags)(G_SIGNAL_RUN_LAST | G_SIGNAL_ACTION),
703 marshal_VOID__MINIOBJECT,
704 G_TYPE_NONE, 1, GST_TYPE_BUFFER);
708 gstreamer_plugin_init (GstPlugin * plugin)
710 return gst_element_register (plugin,
713 EVAS_TYPE_VIDEO_SINK);
717 _emotion_gstreamer_pause(void *data, Ecore_Thread *thread)
719 Emotion_Gstreamer_Video *ev = data;
721 if (ecore_thread_check(thread) || !ev->pipeline) return ;
723 gst_element_set_state(ev->pipeline, GST_STATE_PAUSED);
727 _emotion_gstreamer_cancel(void *data, Ecore_Thread *thread)
729 Emotion_Gstreamer_Video *ev = data;
731 ev->threads = eina_list_remove(ev->threads, thread);
733 if (ev->in == ev->out && ev->threads == NULL && ev->delete_me)
738 _emotion_gstreamer_end(void *data, Ecore_Thread *thread)
740 Emotion_Gstreamer_Video *ev = data;
742 ev->threads = eina_list_remove(ev->threads, thread);
746 gst_element_set_state(ev->pipeline, GST_STATE_PLAYING);
747 ev->play_started = 1;
750 if (ev->in == ev->out && ev->threads == NULL && ev->delete_me)
753 _emotion_gstreamer_video_pipeline_parse(data, EINA_TRUE);
757 gstreamer_video_sink_new(Emotion_Gstreamer_Video *ev,
765 obj = emotion_object_image_get(o);
768 ERR("Not Evas_Object specified");
772 playbin = gst_element_factory_make("playbin2", "playbin");
775 ERR("Unable to create 'playbin' GstElement.");
779 sink = gst_element_factory_make("emotion-sink", "sink");
782 ERR("Unable to create 'emotion-sink' GstElement.");
786 g_object_set(G_OBJECT(sink), "evas-object", obj, NULL);
787 g_object_set(G_OBJECT(sink), "ev", ev, NULL);
789 g_object_set(G_OBJECT(playbin), "video-sink", sink, NULL);
790 g_object_set(G_OBJECT(playbin), "uri", uri, NULL);
792 ev->pipeline = playbin;
794 ev->threads = eina_list_append(ev->threads,
795 ecore_thread_run(_emotion_gstreamer_pause,
796 _emotion_gstreamer_end,
797 _emotion_gstreamer_cancel,
800 /** NOTE: you need to set: GST_DEBUG_DUMP_DOT_DIR=/tmp EMOTION_ENGINE=gstreamer to save the $EMOTION_GSTREAMER_DOT file in '/tmp' */
801 /** then call dot -Tpng -oemotion_pipeline.png /tmp/$TIMESTAMP-$EMOTION_GSTREAMER_DOT.dot */
802 if (getenv("EMOTION_GSTREAMER_DOT")) GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS(GST_BIN(playbin), GST_DEBUG_GRAPH_SHOW_ALL, getenv("EMOTION_GSTREAMER_DOT"));
807 gst_object_unref(playbin);