3 #include "emotion_gstreamer.h"
5 static GstStaticPadTemplate sinktemplate = GST_STATIC_PAD_TEMPLATE("sink",
6 GST_PAD_SINK, GST_PAD_ALWAYS,
7 GST_STATIC_CAPS(GST_VIDEO_CAPS_YUV("{ I420, YV12, YUY2, NV12, ST12, TM12 }") ";"
8 GST_VIDEO_CAPS_BGRx ";" GST_VIDEO_CAPS_BGR ";" GST_VIDEO_CAPS_BGRA));
10 GST_DEBUG_CATEGORY_STATIC(evas_video_sink_debug);
11 #define GST_CAT_DEFAULT evas_video_sink_debug
27 static guint evas_video_sink_signals[LAST_SIGNAL] = { 0, };
29 #define _do_init(bla) \
30 GST_DEBUG_CATEGORY_INIT(evas_video_sink_debug, \
35 GST_BOILERPLATE_FULL(EvasVideoSink,
42 static void unlock_buffer_mutex(EvasVideoSinkPrivate* priv);
44 static void evas_video_sink_main_render(void *data);
47 evas_video_sink_base_init(gpointer g_class)
49 GstElementClass* element_class;
51 element_class = GST_ELEMENT_CLASS(g_class);
52 gst_element_class_add_pad_template(element_class, gst_static_pad_template_get(&sinktemplate));
53 gst_element_class_set_details_simple(element_class, "Evas video sink",
54 "Sink/Video", "Sends video data from a GStreamer pipeline to an Evas object",
55 "Vincent Torri <vtorri@univ-evry.fr>");
59 evas_video_sink_init(EvasVideoSink* sink, EvasVideoSinkClass* klass __UNUSED__)
61 EvasVideoSinkPrivate* priv;
64 sink->priv = priv = G_TYPE_INSTANCE_GET_PRIVATE(sink, EVAS_TYPE_VIDEO_SINK, EvasVideoSinkPrivate);
66 priv->last_buffer = NULL;
69 priv->gformat = GST_VIDEO_FORMAT_UNKNOWN;
70 priv->eformat = EVAS_COLORSPACE_ARGB8888;
71 eina_lock_new(&priv->m);
72 eina_condition_new(&priv->c, &priv->m);
73 priv->unlocked = EINA_FALSE;
77 /**** Object methods ****/
79 _cleanup_priv(void *data, Evas *e __UNUSED__, Evas_Object *obj, void *event_info __UNUSED__)
81 EvasVideoSinkPrivate* priv;
85 eina_lock_take(&priv->m);
88 eina_lock_release(&priv->m);
92 evas_video_sink_set_property(GObject * object, guint prop_id,
93 const GValue * value, GParamSpec * pspec)
96 EvasVideoSinkPrivate* priv;
98 sink = EVAS_VIDEO_SINK (object);
102 case PROP_EVAS_OBJECT:
103 eina_lock_take(&priv->m);
104 evas_object_event_callback_del(priv->o, EVAS_CALLBACK_FREE, _cleanup_priv);
105 priv->o = g_value_get_pointer (value);
106 evas_object_event_callback_add(priv->o, EVAS_CALLBACK_FREE, _cleanup_priv, priv);
107 eina_lock_release(&priv->m);
110 eina_lock_take(&priv->m);
111 priv->ev = g_value_get_pointer (value);
112 eina_lock_release(&priv->m);
115 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
116 ERR("invalid property");
122 evas_video_sink_get_property(GObject * object, guint prop_id,
123 GValue * value, GParamSpec * pspec)
126 EvasVideoSinkPrivate* priv;
128 sink = EVAS_VIDEO_SINK (object);
132 case PROP_EVAS_OBJECT:
133 eina_lock_take(&priv->m);
134 g_value_set_pointer (value, priv->o);
135 eina_lock_release(&priv->m);
138 eina_lock_take(&priv->m);
139 g_value_set_int(value, priv->width);
140 eina_lock_release(&priv->m);
143 eina_lock_take(&priv->m);
144 g_value_set_int (value, priv->height);
145 eina_lock_release(&priv->m);
148 eina_lock_take(&priv->m);
149 g_value_set_pointer (value, priv->ev);
150 eina_lock_release(&priv->m);
153 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
154 ERR("invalide property");
160 evas_video_sink_dispose(GObject* object)
163 EvasVideoSinkPrivate* priv;
165 sink = EVAS_VIDEO_SINK(object);
168 eina_lock_free(&priv->m);
169 eina_condition_free(&priv->c);
171 if (priv->last_buffer) {
172 gst_buffer_unref(priv->last_buffer);
173 priv->last_buffer = NULL;
176 G_OBJECT_CLASS(parent_class)->dispose(object);
180 /**** BaseSink methods ****/
182 gboolean evas_video_sink_set_caps(GstBaseSink *bsink, GstCaps *caps)
185 EvasVideoSinkPrivate* priv;
186 GstStructure *structure;
187 GstVideoFormat format;
190 sink = EVAS_VIDEO_SINK(bsink);
193 structure = gst_caps_get_structure(caps, 0);
195 if (!((gst_structure_get_int(structure, "width", &priv->width)
196 && gst_structure_get_int(structure, "height", &priv->height))))
199 if (gst_structure_get_fourcc(structure, "format", &fourcc))
203 case GST_MAKE_FOURCC('I', '4', '2', '0'):
204 priv->eformat = EVAS_COLORSPACE_YCBCR422P601_PL;
207 case GST_MAKE_FOURCC('Y', 'V', '1', '2'):
208 priv->eformat = EVAS_COLORSPACE_YCBCR422P601_PL;
211 case GST_MAKE_FOURCC('Y', 'U', 'Y', '2'):
212 priv->eformat = EVAS_COLORSPACE_YCBCR422601_PL;
215 case GST_MAKE_FOURCC('N', 'V', '1', '2'):
216 priv->eformat = EVAS_COLORSPACE_YCBCR420NV12601_PL;
219 case GST_MAKE_FOURCC('S', 'T', '1', '2'):
220 case GST_MAKE_FOURCC('T', 'M', '1', '2'):
221 priv->eformat = EVAS_COLORSPACE_YCBCR420TM12601_PL;
231 if (!gst_video_format_parse_caps(caps, &format, &priv->width, &priv->height))
233 ERR("Unable to parse caps.");
239 case GST_VIDEO_FORMAT_BGR: priv->eformat = EVAS_COLORSPACE_ARGB8888;
242 case GST_VIDEO_FORMAT_BGRx: priv->eformat = EVAS_COLORSPACE_ARGB8888;
245 case GST_VIDEO_FORMAT_BGRA: priv->eformat = EVAS_COLORSPACE_ARGB8888;
249 ERR("unsupported : %d\n", format);
253 priv->gformat = format;
259 evas_video_sink_start(GstBaseSink* base_sink)
261 EvasVideoSinkPrivate* priv;
264 priv = EVAS_VIDEO_SINK(base_sink)->priv;
265 eina_lock_take(&priv->m);
269 priv->unlocked = EINA_FALSE;
270 eina_lock_release(&priv->m);
275 evas_video_sink_stop(GstBaseSink* base_sink)
277 EvasVideoSinkPrivate* priv = EVAS_VIDEO_SINK(base_sink)->priv;
279 unlock_buffer_mutex(priv);
284 evas_video_sink_unlock(GstBaseSink* object)
288 sink = EVAS_VIDEO_SINK(object);
290 unlock_buffer_mutex(sink->priv);
292 return GST_CALL_PARENT_WITH_DEFAULT(GST_BASE_SINK_CLASS, unlock,
297 evas_video_sink_unlock_stop(GstBaseSink* object)
300 EvasVideoSinkPrivate* priv;
302 sink = EVAS_VIDEO_SINK(object);
305 eina_lock_take(&priv->m);
306 priv->unlocked = FALSE;
307 eina_lock_release(&priv->m);
309 return GST_CALL_PARENT_WITH_DEFAULT(GST_BASE_SINK_CLASS, unlock_stop,
314 evas_video_sink_preroll(GstBaseSink* bsink, GstBuffer* buffer)
316 Emotion_Gstreamer_Buffer *send;
317 EvasVideoSinkPrivate *priv;
320 sink = EVAS_VIDEO_SINK(bsink);
323 send = emotion_gstreamer_buffer_alloc(priv, buffer, EINA_TRUE);
326 ecore_main_loop_thread_safe_call_async(evas_video_sink_main_render, send);
332 evas_video_sink_render(GstBaseSink* bsink, GstBuffer* buffer)
334 Emotion_Gstreamer_Buffer *send;
335 EvasVideoSinkPrivate *priv;
338 sink = EVAS_VIDEO_SINK(bsink);
341 eina_lock_take(&priv->m);
343 if (priv->unlocked) {
345 eina_lock_release(&priv->m);
349 send = emotion_gstreamer_buffer_alloc(priv, buffer, EINA_FALSE);
351 eina_lock_release(&priv->m);
352 return GST_FLOW_ERROR;
355 ecore_main_loop_thread_safe_call_async(evas_video_sink_main_render, send);
357 eina_condition_wait(&priv->c);
358 eina_lock_release(&priv->m);
364 evas_video_sink_main_render(void *data)
366 Emotion_Gstreamer_Buffer *send;
367 Emotion_Gstreamer_Video *ev = NULL;
368 Emotion_Video_Stream *vstream;
369 EvasVideoSinkPrivate* priv;
371 unsigned char *evas_data;
372 const guint8 *gst_data;
373 GstFormat fmt = GST_FORMAT_TIME;
381 if (!priv) goto exit_point;
382 if (!priv->o) goto exit_point;
384 buffer = send->frame;
385 preroll = send->preroll;
387 if (priv->unlocked) goto exit_point;
389 gst_data = GST_BUFFER_DATA(buffer);
390 if (!gst_data) goto exit_point;
393 if (!ev) goto exit_point;
395 _emotion_gstreamer_video_pipeline_parse(ev, EINA_TRUE);
397 // This prevent a race condition when data are still in the pipe
398 // but the buffer size as changed because of a request from
399 // emotion smart (like on a file set).
400 evas_object_image_size_get(priv->o, &w, &h);
401 if (w != priv->width || h != priv->height)
404 evas_object_image_size_set(priv->o, priv->width, priv->height);
405 evas_object_image_alpha_set(priv->o, 0);
406 evas_object_image_colorspace_set(priv->o, priv->eformat);
408 evas_data = (unsigned char *)evas_object_image_data_get(priv->o, 1);
410 // Evas's BGRA has pre-multiplied alpha while GStreamer's doesn't.
411 // Here we convert to Evas's BGRA.
412 switch (priv->gformat)
414 case GST_VIDEO_FORMAT_BGR:
416 unsigned char *evas_tmp;
420 evas_tmp = evas_data;
421 /* FIXME: could this be optimized ? */
422 for (x = 0; x < priv->height; x++) {
423 for (y = 0; y < priv->width; y++) {
424 evas_tmp[0] = gst_data[0];
425 evas_tmp[1] = gst_data[1];
426 evas_tmp[2] = gst_data[2];
435 // Evas's BGRA has pre-multiplied alpha while GStreamer's doesn't.
436 // Here we convert to Evas's BGRA.
437 case GST_VIDEO_FORMAT_BGRx:
439 unsigned char *evas_tmp;
443 evas_tmp = evas_data;
444 /* FIXME: could this be optimized ? */
445 for (x = 0; x < priv->height; x++) {
446 for (y = 0; y < priv->width; y++) {
447 evas_tmp[0] = gst_data[0];
448 evas_tmp[1] = gst_data[1];
449 evas_tmp[2] = gst_data[2];
458 // Evas's BGRA has pre-multiplied alpha while GStreamer's doesn't.
459 // Here we convert to Evas's BGRA.
460 case GST_VIDEO_FORMAT_BGRA:
462 unsigned char *evas_tmp;
467 evas_tmp = evas_data;
468 /* FIXME: could this be optimized ? */
469 for (x = 0; x < priv->height; x++) {
470 for (y = 0; y < priv->width; y++) {
472 evas_tmp[0] = (gst_data[0] * alpha) / 255;
473 evas_tmp[1] = (gst_data[1] * alpha) / 255;
474 evas_tmp[2] = (gst_data[2] * alpha) / 255;
483 case GST_VIDEO_FORMAT_I420:
486 const unsigned char **rows;
488 evas_object_image_pixels_dirty_set(priv->o, 1);
489 rows = (const unsigned char **)evas_data;
491 for (i = 0; i < priv->height; i++)
492 rows[i] = &gst_data[i * priv->width];
494 rows += priv->height;
495 for (i = 0; i < (priv->height / 2); i++)
496 rows[i] = &gst_data[priv->height * priv->width + i * (priv->width / 2)];
498 rows += priv->height / 2;
499 for (i = 0; i < (priv->height / 2); i++)
500 rows[i] = &gst_data[priv->height * priv->width + priv->height * (priv->width /4) + i * (priv->width / 2)];
504 case GST_VIDEO_FORMAT_YV12:
507 const unsigned char **rows;
509 evas_object_image_pixels_dirty_set(priv->o, 1);
511 rows = (const unsigned char **)evas_data;
513 for (i = 0; i < priv->height; i++)
514 rows[i] = &gst_data[i * priv->width];
516 rows += priv->height;
517 for (i = 0; i < (priv->height / 2); i++)
518 rows[i] = &gst_data[priv->height * priv->width + priv->height * (priv->width /4) + i * (priv->width / 2)];
520 rows += priv->height / 2;
521 for (i = 0; i < (priv->height / 2); i++)
522 rows[i] = &gst_data[priv->height * priv->width + i * (priv->width / 2)];
526 case GST_VIDEO_FORMAT_YUY2:
529 const unsigned char **rows;
531 evas_object_image_pixels_dirty_set(priv->o, 1);
533 rows = (const unsigned char **)evas_data;
535 for (i = 0; i < priv->height; i++)
536 rows[i] = &gst_data[i * priv->width * 2];
542 switch (priv->eformat)
544 case EVAS_COLORSPACE_YCBCR420NV12601_PL:
547 const unsigned char **rows;
549 evas_object_image_pixels_dirty_set(priv->o, 1);
551 rows = (const unsigned char **)evas_data;
553 for (i = 0; i < priv->height; i++)
554 rows[i] = &gst_data[i * priv->width];
556 rows += priv->height;
557 for (i = 0; i < (priv->height / 2); i++)
558 rows[i] = &gst_data[priv->height * priv->width + i * priv->width];
561 case EVAS_COLORSPACE_YCBCR420TM12601_PL:
564 const unsigned char **rows;
566 evas_object_image_pixels_dirty_set(priv->o, 1);
568 rows = (const unsigned char **)evas_data;
570 for (i = 0; i < (priv->height / 32) / 2; i++)
571 rows[i] = &gst_data[i * priv->width * 2 * 32];
573 if ((priv->height / 32) % 2)
574 rows[i] = &gst_data[i * priv->width * 2 * 32];
576 rows += priv->height;
577 for (i = 0; i < ((priv->height / 2) / 32) / 2; ++i)
578 rows[i] = &gst_data[priv->height * priv->width + i * (priv->width / 2) * 2 * 16];
584 evas_object_image_data_update_add(priv->o, 0, 0, priv->width, priv->height);
585 evas_object_image_data_set(priv->o, evas_data);
586 evas_object_image_pixels_dirty_set(priv->o, 0);
588 _emotion_frame_new(ev->obj);
590 vstream = eina_list_nth(ev->video_streams, ev->video_stream_nbr - 1);
592 gst_element_query_position(ev->pipeline, &fmt, &pos);
593 ev->position = (double)pos / (double)GST_SECOND;
595 vstream->width = priv->width;
596 vstream->height = priv->height;
597 ev->ratio = (double) priv->width / (double) priv->height;
599 _emotion_video_pos_update(ev->obj, ev->position, vstream->length_time);
600 _emotion_frame_resize(ev->obj, priv->width, priv->height, ev->ratio);
602 if (priv->last_buffer) gst_buffer_unref(priv->last_buffer);
603 priv->last_buffer = gst_buffer_ref(buffer);
606 emotion_gstreamer_buffer_free(send);
608 if (preroll || !priv->o || !ev) return ;
610 eina_lock_take(&priv->m);
612 eina_condition_signal(&priv->c);
614 eina_lock_release(&priv->m);
618 unlock_buffer_mutex(EvasVideoSinkPrivate* priv)
620 eina_lock_take(&priv->m);
621 priv->unlocked = EINA_TRUE;
623 eina_condition_signal(&priv->c);
624 eina_lock_release(&priv->m);
628 marshal_VOID__MINIOBJECT(GClosure * closure, GValue * return_value __UNUSED__,
629 guint n_param_values, const GValue * param_values,
630 gpointer invocation_hint __UNUSED__, gpointer marshal_data)
632 typedef void (*marshalfunc_VOID__MINIOBJECT) (gpointer obj, gpointer arg1, gpointer data2);
633 marshalfunc_VOID__MINIOBJECT callback;
635 gpointer data1, data2;
637 cc = (GCClosure *) closure;
639 g_return_if_fail(n_param_values == 2);
641 if (G_CCLOSURE_SWAP_DATA(closure)) {
642 data1 = closure->data;
643 data2 = g_value_peek_pointer(param_values + 0);
645 data1 = g_value_peek_pointer(param_values + 0);
646 data2 = closure->data;
648 callback = (marshalfunc_VOID__MINIOBJECT) (marshal_data ? marshal_data : cc->callback);
650 callback(data1, gst_value_get_mini_object(param_values + 1), data2);
654 evas_video_sink_class_init(EvasVideoSinkClass* klass)
656 GObjectClass* gobject_class;
657 GstBaseSinkClass* gstbase_sink_class;
659 gobject_class = G_OBJECT_CLASS(klass);
660 gstbase_sink_class = GST_BASE_SINK_CLASS(klass);
662 g_type_class_add_private(klass, sizeof(EvasVideoSinkPrivate));
664 gobject_class->set_property = evas_video_sink_set_property;
665 gobject_class->get_property = evas_video_sink_get_property;
667 g_object_class_install_property (gobject_class, PROP_EVAS_OBJECT,
668 g_param_spec_pointer ("evas-object", "Evas Object",
669 "The Evas object where the display of the video will be done",
672 g_object_class_install_property (gobject_class, PROP_WIDTH,
673 g_param_spec_int ("width", "Width",
674 "The width of the video",
675 0, 65536, 0, G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
677 g_object_class_install_property (gobject_class, PROP_HEIGHT,
678 g_param_spec_int ("height", "Height",
679 "The height of the video",
680 0, 65536, 0, G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
681 g_object_class_install_property (gobject_class, PROP_EV,
682 g_param_spec_pointer ("ev", "Emotion_Gstreamer_Video",
683 "THe internal data of the emotion object",
686 gobject_class->dispose = evas_video_sink_dispose;
688 gstbase_sink_class->set_caps = evas_video_sink_set_caps;
689 gstbase_sink_class->stop = evas_video_sink_stop;
690 gstbase_sink_class->start = evas_video_sink_start;
691 gstbase_sink_class->unlock = evas_video_sink_unlock;
692 gstbase_sink_class->unlock_stop = evas_video_sink_unlock_stop;
693 gstbase_sink_class->render = evas_video_sink_render;
694 gstbase_sink_class->preroll = evas_video_sink_preroll;
696 evas_video_sink_signals[REPAINT_REQUESTED] = g_signal_new("repaint-requested",
697 G_TYPE_FROM_CLASS(klass),
698 (GSignalFlags)(G_SIGNAL_RUN_LAST | G_SIGNAL_ACTION),
702 marshal_VOID__MINIOBJECT,
703 G_TYPE_NONE, 1, GST_TYPE_BUFFER);
707 gstreamer_plugin_init (GstPlugin * plugin)
709 return gst_element_register (plugin,
712 EVAS_TYPE_VIDEO_SINK);
716 _emotion_gstreamer_pause(void *data, Ecore_Thread *thread)
718 Emotion_Gstreamer_Video *ev = data;
720 if (ecore_thread_check(thread) || !ev->pipeline) return ;
722 gst_element_set_state(ev->pipeline, GST_STATE_PAUSED);
726 _emotion_gstreamer_cancel(void *data, Ecore_Thread *thread)
728 Emotion_Gstreamer_Video *ev = data;
730 ev->threads = eina_list_remove(ev->threads, thread);
732 if (ev->in == ev->out && ev->threads == NULL && ev->delete_me)
737 _emotion_gstreamer_end(void *data, Ecore_Thread *thread)
739 Emotion_Gstreamer_Video *ev = data;
741 ev->threads = eina_list_remove(ev->threads, thread);
745 gst_element_set_state(ev->pipeline, GST_STATE_PLAYING);
746 ev->play_started = 1;
749 if (ev->in == ev->out && ev->threads == NULL && ev->delete_me)
752 _emotion_gstreamer_video_pipeline_parse(data, EINA_TRUE);
756 gstreamer_video_sink_new(Emotion_Gstreamer_Video *ev,
764 obj = emotion_object_image_get(o);
767 ERR("Not Evas_Object specified");
771 playbin = gst_element_factory_make("playbin2", "playbin");
774 ERR("Unable to create 'playbin' GstElement.");
778 sink = gst_element_factory_make("emotion-sink", "sink");
781 ERR("Unable to create 'emotion-sink' GstElement.");
785 g_object_set(G_OBJECT(sink), "evas-object", obj, NULL);
786 g_object_set(G_OBJECT(sink), "ev", ev, NULL);
788 g_object_set(G_OBJECT(playbin), "video-sink", sink, NULL);
789 g_object_set(G_OBJECT(playbin), "uri", uri, NULL);
791 ev->pipeline = playbin;
793 ev->threads = eina_list_append(ev->threads,
794 ecore_thread_run(_emotion_gstreamer_pause,
795 _emotion_gstreamer_end,
796 _emotion_gstreamer_cancel,
799 /** NOTE: you need to set: GST_DEBUG_DUMP_DOT_DIR=/tmp EMOTION_ENGINE=gstreamer to save the $EMOTION_GSTREAMER_DOT file in '/tmp' */
800 /** then call dot -Tpng -oemotion_pipeline.png /tmp/$TIMESTAMP-$EMOTION_GSTREAMER_DOT.dot */
801 if (getenv("EMOTION_GSTREAMER_DOT")) GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS(GST_BIN(playbin), GST_DEBUG_GRAPH_SHOW_ALL, getenv("EMOTION_GSTREAMER_DOT"));
806 gst_object_unref(playbin);