1 #include "emotion_gstreamer.h"
3 static GstStaticPadTemplate sinktemplate = GST_STATIC_PAD_TEMPLATE("sink",
4 GST_PAD_SINK, GST_PAD_ALWAYS,
5 GST_STATIC_CAPS(GST_VIDEO_CAPS_YUV("{ I420, YV12, YUY2, NV12, ST12, TM12 }") ";"
6 GST_VIDEO_CAPS_BGRx ";" GST_VIDEO_CAPS_BGR ";" GST_VIDEO_CAPS_BGRA));
8 GST_DEBUG_CATEGORY_STATIC(evas_video_sink_debug);
9 #define GST_CAT_DEFAULT evas_video_sink_debug
25 static guint evas_video_sink_signals[LAST_SIGNAL] = { 0, };
27 #define _do_init(bla) \
28 GST_DEBUG_CATEGORY_INIT(evas_video_sink_debug, \
33 GST_BOILERPLATE_FULL(EvasVideoSink,
40 static void unlock_buffer_mutex(EvasVideoSinkPrivate* priv);
41 static void evas_video_sink_main_render(void *data);
42 static void evas_video_sink_samsung_main_render(void *data);
45 evas_video_sink_base_init(gpointer g_class)
47 GstElementClass* element_class;
49 element_class = GST_ELEMENT_CLASS(g_class);
50 gst_element_class_add_pad_template(element_class, gst_static_pad_template_get(&sinktemplate));
51 gst_element_class_set_details_simple(element_class, "Evas video sink",
52 "Sink/Video", "Sends video data from a GStreamer pipeline to an Evas object",
53 "Vincent Torri <vtorri@univ-evry.fr>");
57 evas_video_sink_init(EvasVideoSink* sink, EvasVideoSinkClass* klass __UNUSED__)
59 EvasVideoSinkPrivate* priv;
62 sink->priv = priv = G_TYPE_INSTANCE_GET_PRIVATE(sink, EVAS_TYPE_VIDEO_SINK, EvasVideoSinkPrivate);
67 priv->eformat = EVAS_COLORSPACE_ARGB8888;
68 priv->samsung = EINA_FALSE;
69 eina_lock_new(&priv->m);
70 eina_condition_new(&priv->c, &priv->m);
71 priv->unlocked = EINA_FALSE;
74 /**** Object methods ****/
76 _cleanup_priv(void *data, Evas *e __UNUSED__, Evas_Object *obj, void *event_info __UNUSED__)
78 EvasVideoSinkPrivate* priv;
82 eina_lock_take(&priv->m);
85 eina_lock_release(&priv->m);
89 evas_video_sink_set_property(GObject * object, guint prop_id,
90 const GValue * value, GParamSpec * pspec)
93 EvasVideoSinkPrivate* priv;
95 sink = EVAS_VIDEO_SINK (object);
99 case PROP_EVAS_OBJECT:
100 eina_lock_take(&priv->m);
101 evas_object_event_callback_del(priv->o, EVAS_CALLBACK_FREE, _cleanup_priv);
102 priv->o = g_value_get_pointer (value);
103 INF("sink set Evas_Object %p.", priv->o);
104 evas_object_event_callback_add(priv->o, EVAS_CALLBACK_FREE, _cleanup_priv, priv);
105 eina_lock_release(&priv->m);
109 eina_lock_take(&priv->m);
110 priv->ev = g_value_get_pointer (value);
112 priv->ev->samsung = EINA_TRUE;
113 eina_lock_release(&priv->m);
116 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
117 ERR("invalid property");
123 evas_video_sink_get_property(GObject * object, guint prop_id,
124 GValue * value, GParamSpec * pspec)
127 EvasVideoSinkPrivate* priv;
129 sink = EVAS_VIDEO_SINK (object);
133 case PROP_EVAS_OBJECT:
134 INF("sink get property.");
135 eina_lock_take(&priv->m);
136 g_value_set_pointer(value, priv->o);
137 eina_lock_release(&priv->m);
140 INF("sink get width.");
141 eina_lock_take(&priv->m);
142 g_value_set_int(value, priv->width);
143 eina_lock_release(&priv->m);
146 INF("sink get height.");
147 eina_lock_take(&priv->m);
148 g_value_set_int (value, priv->height);
149 eina_lock_release(&priv->m);
153 eina_lock_take(&priv->m);
154 g_value_set_pointer (value, priv->ev);
155 eina_lock_release(&priv->m);
158 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
159 ERR("invalide property");
165 evas_video_sink_dispose(GObject* object)
168 EvasVideoSinkPrivate* priv;
172 sink = EVAS_VIDEO_SINK(object);
175 eina_lock_free(&priv->m);
176 eina_condition_free(&priv->c);
178 G_OBJECT_CLASS(parent_class)->dispose(object);
182 /**** BaseSink methods ****/
184 gboolean evas_video_sink_set_caps(GstBaseSink *bsink, GstCaps *caps)
187 EvasVideoSinkPrivate* priv;
188 GstStructure *structure;
189 GstVideoFormat format;
193 sink = EVAS_VIDEO_SINK(bsink);
196 structure = gst_caps_get_structure(caps, 0);
198 if (gst_structure_get_int(structure, "width", (int*) &priv->width)
199 && gst_structure_get_int(structure, "height", (int*) &priv->height)
200 && gst_structure_get_fourcc(structure, "format", &fourcc))
202 priv->source_height = priv->height;
204 for (i = 0; colorspace_fourcc_convertion[i].name != NULL; ++i)
205 if (fourcc == colorspace_fourcc_convertion[i].fourcc)
207 fprintf(stderr, "Found '%s'\n", colorspace_fourcc_convertion[i].name);
208 priv->eformat = colorspace_fourcc_convertion[i].eformat;
209 priv->func = colorspace_fourcc_convertion[i].func;
210 if (colorspace_fourcc_convertion[i].force_height)
212 priv->height = (priv->height >> 1) << 1;
215 priv->ev->kill_buffer = EINA_TRUE;
219 if (fourcc == GST_MAKE_FOURCC('S', 'T', '1', '2'))
221 fprintf(stderr, "Found '%s'\n", "ST12");
222 priv->eformat = EVAS_COLORSPACE_YCBCR420TM12601_PL;
223 priv->samsung = EINA_TRUE;
227 priv->ev->samsung = EINA_TRUE;
228 priv->ev->kill_buffer = EINA_TRUE;
234 INF("fallback code !");
235 if (!gst_video_format_parse_caps(caps, &format, (int*) &priv->width, (int*) &priv->height))
237 ERR("Unable to parse caps.");
241 priv->source_height = priv->height;
243 for (i = 0; colorspace_format_convertion[i].name != NULL; ++i)
244 if (format == colorspace_format_convertion[i].format)
246 fprintf(stderr, "Found '%s'\n", colorspace_format_convertion[i].name);
247 priv->eformat = colorspace_format_convertion[i].eformat;
248 priv->func = colorspace_format_convertion[i].func;
250 priv->ev->kill_buffer = EINA_FALSE;
254 ERR("unsupported : %d\n", format);
259 evas_video_sink_start(GstBaseSink* base_sink)
261 EvasVideoSinkPrivate* priv;
266 priv = EVAS_VIDEO_SINK(base_sink)->priv;
267 eina_lock_take(&priv->m);
271 priv->unlocked = EINA_FALSE;
272 eina_lock_release(&priv->m);
277 evas_video_sink_stop(GstBaseSink* base_sink)
279 EvasVideoSinkPrivate* priv = EVAS_VIDEO_SINK(base_sink)->priv;
283 unlock_buffer_mutex(priv);
288 evas_video_sink_unlock(GstBaseSink* object)
294 sink = EVAS_VIDEO_SINK(object);
296 unlock_buffer_mutex(sink->priv);
298 return GST_CALL_PARENT_WITH_DEFAULT(GST_BASE_SINK_CLASS, unlock,
303 evas_video_sink_unlock_stop(GstBaseSink* object)
306 EvasVideoSinkPrivate* priv;
308 sink = EVAS_VIDEO_SINK(object);
311 INF("sink unlock stop");
313 eina_lock_take(&priv->m);
314 priv->unlocked = FALSE;
315 eina_lock_release(&priv->m);
317 return GST_CALL_PARENT_WITH_DEFAULT(GST_BASE_SINK_CLASS, unlock_stop,
322 evas_video_sink_preroll(GstBaseSink* bsink, GstBuffer* buffer)
324 Emotion_Gstreamer_Buffer *send;
325 EvasVideoSinkPrivate *priv;
328 INF("sink preroll %p [%i]", GST_BUFFER_DATA(buffer), GST_BUFFER_SIZE(buffer));
330 sink = EVAS_VIDEO_SINK(bsink);
333 if (GST_BUFFER_SIZE(buffer) <= 0 && !priv->samsung)
339 send = emotion_gstreamer_buffer_alloc(priv, buffer, EINA_TRUE);
347 GstStructure *structure;
349 gboolean is_multiplane = FALSE;
351 caps = GST_BUFFER_CAPS(buffer);
352 structure = gst_caps_get_structure (caps, 0);
353 gst_structure_get_boolean(structure, "multiplane", &is_multiplane);
354 gst_caps_unref(caps);
357 priv->func = _evas_video_st12_multiplane;
359 priv->func = _evas_video_st12;
362 ecore_main_loop_thread_safe_call_async(evas_video_sink_samsung_main_render, send);
365 ecore_main_loop_thread_safe_call_async(evas_video_sink_main_render, send);
372 evas_video_sink_render(GstBaseSink* bsink, GstBuffer* buffer)
374 Emotion_Gstreamer_Buffer *send;
375 EvasVideoSinkPrivate *priv;
378 INF("sink render %p", buffer);
380 sink = EVAS_VIDEO_SINK(bsink);
383 eina_lock_take(&priv->m);
385 if (priv->unlocked) {
387 eina_lock_release(&priv->m);
391 send = emotion_gstreamer_buffer_alloc(priv, buffer, EINA_FALSE);
393 eina_lock_release(&priv->m);
394 return GST_FLOW_ERROR;
401 GstStructure *structure;
403 gboolean is_multiplane = FALSE;
405 caps = GST_BUFFER_CAPS(buffer);
406 structure = gst_caps_get_structure (caps, 0);
407 gst_structure_get_boolean(structure, "multiplane", &is_multiplane);
408 gst_caps_unref(caps);
411 priv->func = _evas_video_st12_multiplane;
413 priv->func = _evas_video_st12;
416 ecore_main_loop_thread_safe_call_async(evas_video_sink_samsung_main_render, send);
419 ecore_main_loop_thread_safe_call_async(evas_video_sink_main_render, send);
421 eina_condition_wait(&priv->c);
422 eina_lock_release(&priv->m);
428 _update_emotion_fps(Emotion_Gstreamer_Video *ev)
432 if (!debug_fps) return ;
434 tim = ecore_time_get();
437 if (ev->rlapse == 0.0)
440 ev->flapse = ev->frames;
442 else if ((tim - ev->rlapse) >= 0.5)
444 printf("FRAME: %i, FPS: %3.1f\n",
446 (ev->frames - ev->flapse) / (tim - ev->rlapse));
448 ev->flapse = ev->frames;
453 evas_video_sink_samsung_main_render(void *data)
455 Emotion_Gstreamer_Buffer *send;
456 Emotion_Video_Stream *vstream;
457 EvasVideoSinkPrivate* priv;
459 unsigned char *evas_data;
460 const guint8 *gst_data;
461 GstFormat fmt = GST_FORMAT_TIME;
464 int stride, elevation;
469 if (!send) goto exit_point;
472 buffer = send->frame;
473 preroll = send->preroll;
475 if (!priv || !priv->o || priv->unlocked)
480 emotion_gstreamer_buffer_free(send->ev->send);
481 send->ev->send = NULL;
484 if (!send->ev->stream && !send->force)
486 send->ev->send = send;
487 _emotion_frame_new(send->ev->obj);
491 _emotion_gstreamer_video_pipeline_parse(send->ev, EINA_TRUE);
493 /* Getting stride to compute the right size and then fill the object properly */
494 /* Y => [0] and UV in [1] */
495 if (priv->func == _evas_video_st12_multiplane)
497 const GstMultiPlaneImageBuffer *mp_buf = (const GstMultiPlaneImageBuffer *) buffer;
499 stride = mp_buf->stride[0];
500 elevation = mp_buf->elevation[0];
501 priv->width = mp_buf->width[0];
502 priv->height = mp_buf->height[0];
504 gst_data = (const guint8 *) mp_buf;
508 const SCMN_IMGB *imgb = (const SCMN_IMGB *) GST_BUFFER_MALLOCDATA(buffer);
510 stride = imgb->stride[0];
511 elevation = imgb->elevation[0];
512 priv->width = imgb->width[0];
513 priv->height = imgb->height[0];
515 gst_data = (const guint8 *) imgb;
518 evas_object_geometry_get(priv->o, NULL, NULL, &w, &h);
520 send->ev->fill.width = (double) stride / priv->width;
521 send->ev->fill.height = (double) elevation / priv->height;
523 evas_object_image_alpha_set(priv->o, 0);
524 evas_object_image_colorspace_set(priv->o, priv->eformat);
525 evas_object_image_size_set(priv->o, stride, elevation);
527 _update_emotion_fps(send->ev);
529 evas_data = evas_object_image_data_get(priv->o, 1);
532 priv->func(evas_data, gst_data, stride, elevation, elevation);
534 WRN("No way to decode %x colorspace !", priv->eformat);
536 evas_object_image_data_set(priv->o, evas_data);
537 evas_object_image_data_update_add(priv->o, 0, 0, priv->width, priv->height);
538 evas_object_image_pixels_dirty_set(priv->o, 0);
540 if (!preroll && send->ev->play_started)
542 _emotion_playback_started(send->ev->obj);
543 send->ev->play_started = 0;
548 _emotion_frame_new(send->ev->obj);
551 vstream = eina_list_nth(send->ev->video_streams, send->ev->video_stream_nbr - 1);
553 gst_element_query_position(send->ev->pipeline, &fmt, &pos);
554 send->ev->position = (double)pos / (double)GST_SECOND;
558 vstream->width = priv->width;
559 vstream->height = priv->height;
561 _emotion_video_pos_update(send->ev->obj, send->ev->position, vstream->length_time);
564 send->ev->ratio = (double) priv->width / (double) priv->height;
565 _emotion_frame_refill(send->ev->obj, send->ev->fill.width, send->ev->fill.height);
566 _emotion_frame_resize(send->ev->obj, priv->width, priv->height, send->ev->ratio);
568 buffer = gst_buffer_ref(buffer);
569 if (send->ev->last_buffer) gst_buffer_unref(send->ev->last_buffer);
570 send->ev->last_buffer = buffer;
573 emotion_gstreamer_buffer_free(send);
576 if (preroll || !priv->o) return ;
579 eina_condition_signal(&priv->c);
583 evas_video_sink_main_render(void *data)
585 Emotion_Gstreamer_Buffer *send;
586 Emotion_Gstreamer_Video *ev = NULL;
587 Emotion_Video_Stream *vstream;
588 EvasVideoSinkPrivate* priv;
590 unsigned char *evas_data;
591 GstFormat fmt = GST_FORMAT_TIME;
597 if (!send) goto exit_point;
600 buffer = send->frame;
601 preroll = send->preroll;
604 if (!priv || !priv->o || priv->unlocked)
607 if (ev->send && send != ev->send)
609 emotion_gstreamer_buffer_free(ev->send);
613 if (!ev->stream && !send->force)
616 _emotion_frame_new(ev->obj);
617 evas_object_image_data_update_add(priv->o, 0, 0, priv->width, priv->height);
621 _emotion_gstreamer_video_pipeline_parse(ev, EINA_TRUE);
623 INF("sink main render [%i, %i] (source height: %i)", priv->width, priv->height, priv->source_height);
625 evas_object_image_alpha_set(priv->o, 0);
626 evas_object_image_colorspace_set(priv->o, priv->eformat);
627 evas_object_image_size_set(priv->o, priv->width, priv->height);
629 evas_data = evas_object_image_data_get(priv->o, 1);
632 priv->func(evas_data, GST_BUFFER_DATA(buffer), priv->width, priv->source_height, priv->height);
634 WRN("No way to decode %x colorspace !", priv->eformat);
636 evas_object_image_data_set(priv->o, evas_data);
637 evas_object_image_data_update_add(priv->o, 0, 0, priv->width, priv->height);
638 evas_object_image_pixels_dirty_set(priv->o, 0);
640 _update_emotion_fps(ev);
642 if (!preroll && ev->play_started)
644 _emotion_playback_started(ev->obj);
645 ev->play_started = 0;
650 _emotion_frame_new(ev->obj);
653 gst_element_query_position(ev->pipeline, &fmt, &pos);
654 ev->position = (double)pos / (double)GST_SECOND;
656 vstream = eina_list_nth(ev->video_streams, ev->video_stream_nbr - 1);
660 vstream->width = priv->width;
661 vstream->height = priv->height;
662 _emotion_video_pos_update(ev->obj, ev->position, vstream->length_time);
665 ev->ratio = (double) priv->width / (double) priv->height;
667 _emotion_frame_resize(ev->obj, priv->width, priv->height, ev->ratio);
669 buffer = gst_buffer_ref(buffer);
670 if (ev->last_buffer) gst_buffer_unref(ev->last_buffer);
671 ev->last_buffer = buffer;
674 emotion_gstreamer_buffer_free(send);
677 if (preroll || !priv->o) return ;
680 eina_condition_signal(&priv->c);
684 unlock_buffer_mutex(EvasVideoSinkPrivate* priv)
686 priv->unlocked = EINA_TRUE;
688 eina_condition_signal(&priv->c);
692 marshal_VOID__MINIOBJECT(GClosure * closure, GValue * return_value __UNUSED__,
693 guint n_param_values, const GValue * param_values,
694 gpointer invocation_hint __UNUSED__, gpointer marshal_data)
696 typedef void (*marshalfunc_VOID__MINIOBJECT) (gpointer obj, gpointer arg1, gpointer data2);
697 marshalfunc_VOID__MINIOBJECT callback;
699 gpointer data1, data2;
701 cc = (GCClosure *) closure;
703 g_return_if_fail(n_param_values == 2);
705 if (G_CCLOSURE_SWAP_DATA(closure)) {
706 data1 = closure->data;
707 data2 = g_value_peek_pointer(param_values + 0);
709 data1 = g_value_peek_pointer(param_values + 0);
710 data2 = closure->data;
712 callback = (marshalfunc_VOID__MINIOBJECT) (marshal_data ? marshal_data : cc->callback);
714 callback(data1, gst_value_get_mini_object(param_values + 1), data2);
718 evas_video_sink_class_init(EvasVideoSinkClass* klass)
720 GObjectClass* gobject_class;
721 GstBaseSinkClass* gstbase_sink_class;
723 gobject_class = G_OBJECT_CLASS(klass);
724 gstbase_sink_class = GST_BASE_SINK_CLASS(klass);
726 g_type_class_add_private(klass, sizeof(EvasVideoSinkPrivate));
728 gobject_class->set_property = evas_video_sink_set_property;
729 gobject_class->get_property = evas_video_sink_get_property;
731 g_object_class_install_property (gobject_class, PROP_EVAS_OBJECT,
732 g_param_spec_pointer ("evas-object", "Evas Object",
733 "The Evas object where the display of the video will be done",
736 g_object_class_install_property (gobject_class, PROP_WIDTH,
737 g_param_spec_int ("width", "Width",
738 "The width of the video",
739 0, 65536, 0, G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
741 g_object_class_install_property (gobject_class, PROP_HEIGHT,
742 g_param_spec_int ("height", "Height",
743 "The height of the video",
744 0, 65536, 0, G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
745 g_object_class_install_property (gobject_class, PROP_EV,
746 g_param_spec_pointer ("ev", "Emotion_Gstreamer_Video",
747 "THe internal data of the emotion object",
750 gobject_class->dispose = evas_video_sink_dispose;
752 gstbase_sink_class->set_caps = evas_video_sink_set_caps;
753 gstbase_sink_class->stop = evas_video_sink_stop;
754 gstbase_sink_class->start = evas_video_sink_start;
755 gstbase_sink_class->unlock = evas_video_sink_unlock;
756 gstbase_sink_class->unlock_stop = evas_video_sink_unlock_stop;
757 gstbase_sink_class->render = evas_video_sink_render;
758 gstbase_sink_class->preroll = evas_video_sink_preroll;
760 evas_video_sink_signals[REPAINT_REQUESTED] = g_signal_new("repaint-requested",
761 G_TYPE_FROM_CLASS(klass),
762 (GSignalFlags)(G_SIGNAL_RUN_LAST | G_SIGNAL_ACTION),
766 marshal_VOID__MINIOBJECT,
767 G_TYPE_NONE, 1, GST_TYPE_BUFFER);
771 gstreamer_plugin_init (GstPlugin * plugin)
773 return gst_element_register (plugin,
776 EVAS_TYPE_VIDEO_SINK);
780 _emotion_gstreamer_pause(void *data, Ecore_Thread *thread)
782 Emotion_Gstreamer_Video *ev = data;
785 if (ecore_thread_check(thread) || !ev->pipeline) return ;
787 gst_element_set_state(ev->pipeline, GST_STATE_PAUSED);
788 res = gst_element_get_state(ev->pipeline, NULL, NULL, GST_CLOCK_TIME_NONE);
789 if (res == GST_STATE_CHANGE_NO_PREROLL)
791 gst_element_set_state(ev->pipeline, GST_STATE_PLAYING);
792 gst_element_get_state(ev->pipeline, NULL, NULL, GST_CLOCK_TIME_NONE);
797 _emotion_gstreamer_cancel(void *data, Ecore_Thread *thread)
799 Emotion_Gstreamer_Video *ev = data;
801 ev->threads = eina_list_remove(ev->threads, thread);
803 if (getenv("EMOTION_GSTREAMER_DOT")) GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS(GST_BIN(ev->pipeline), GST_DEBUG_GRAPH_SHOW_ALL, getenv("EMOTION_GSTREAMER_DOT"));
805 if (ev->in == ev->out && ev->delete_me)
810 _emotion_gstreamer_end(void *data, Ecore_Thread *thread)
812 Emotion_Gstreamer_Video *ev = data;
814 ev->threads = eina_list_remove(ev->threads, thread);
818 gst_element_set_state(ev->pipeline, GST_STATE_PLAYING);
819 ev->play_started = 1;
822 if (getenv("EMOTION_GSTREAMER_DOT")) GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS(GST_BIN(ev->pipeline), GST_DEBUG_GRAPH_SHOW_ALL, getenv("EMOTION_GSTREAMER_DOT"));
824 if (ev->in == ev->out && ev->delete_me)
827 _emotion_gstreamer_video_pipeline_parse(data, EINA_TRUE);
831 _video_resize(void *data, Evas_Object *obj __UNUSED__, const Evas_Video_Surface *surface __UNUSED__,
832 Evas_Coord w, Evas_Coord h)
834 Emotion_Gstreamer_Video *ev = data;
837 ecore_x_window_resize(ev->win, w, h);
839 fprintf(stderr, "resize: %i, %i\n", w, h);
843 _video_move(void *data, Evas_Object *obj __UNUSED__, const Evas_Video_Surface *surface __UNUSED__,
844 Evas_Coord x, Evas_Coord y)
846 Emotion_Gstreamer_Video *ev = data;
850 fprintf(stderr, "move: %i, %i\n", x, y);
851 pos[0] = x; pos[1] = y;
852 ecore_x_window_prop_card32_set(ev->win, ECORE_X_ATOM_E_VIDEO_POSITION, pos, 2);
857 /* Much better idea to always feed the XvImageSink and let him handle optimizing the rendering as we do */
859 _block_pad_unlink_cb(GstPad *pad, gboolean blocked, gpointer user_data)
863 Emotion_Gstreamer_Video *ev = user_data;
866 gst_pad_unlink(ev->teepad, ev->xvpad);
867 gev = gst_event_new_eos();
868 gst_pad_send_event(ev->xvpad, gev);
869 gst_pad_set_blocked_async(pad, FALSE, _block_pad_unlink_cb, NULL);
874 _block_pad_link_cb(GstPad *pad, gboolean blocked, gpointer user_data)
878 Emotion_Gstreamer_Video *ev = user_data;
880 gst_pad_link(ev->teepad, ev->xvpad);
882 gst_element_set_state(ev->xvsink, GST_STATE_PLAYING);
884 gst_element_set_state(ev->xvsink, GST_STATE_PAUSED);
885 gst_pad_set_blocked_async(pad, FALSE, _block_pad_link_cb, NULL);
891 _video_show(void *data, Evas_Object *obj __UNUSED__, const Evas_Video_Surface *surface __UNUSED__)
893 Emotion_Gstreamer_Video *ev = data;
895 fprintf(stderr, "show xv\n");
897 ecore_x_window_show(ev->win);
899 /* gst_pad_set_blocked_async(ev->teepad, TRUE, _block_pad_link_cb, ev); */
903 _video_hide(void *data, Evas_Object *obj __UNUSED__, const Evas_Video_Surface *surface __UNUSED__)
905 Emotion_Gstreamer_Video *ev = data;
907 fprintf(stderr, "hide xv\n");
909 ecore_x_window_hide(ev->win);
911 /* gst_pad_set_blocked_async(ev->teepad, TRUE, _block_pad_unlink_cb, ev); */
915 _video_update_pixels(void *data, Evas_Object *obj __UNUSED__, const Evas_Video_Surface *surface __UNUSED__)
917 Emotion_Gstreamer_Video *ev = data;
918 Emotion_Gstreamer_Buffer *send;
920 if (!ev->send) return ;
923 send->force = EINA_TRUE;
925 evas_video_sink_main_render(send);
929 gstreamer_video_sink_new(Emotion_Gstreamer_Video *ev,
934 GstElement *bin = NULL;
935 GstElement *esink = NULL;
936 GstElement *xvsink = NULL;
937 GstElement *tee = NULL;
938 GstElement *queue = NULL;
944 #if defined HAVE_ECORE_X && defined HAVE_XOVERLAY_H
949 obj = emotion_object_image_get(o);
952 ERR("Not Evas_Object specified");
959 launch = emotion_webcam_custom_get(uri);
962 GError *error = NULL;
964 playbin = gst_parse_bin_from_description(launch, 1, &error);
967 ERR("Unable to setup command : '%s' got error '%s'.", launch, error->message);
973 WRN("got recoverable error '%s' for command : '%s'.", error->message, launch);
979 playbin = gst_element_factory_make("playbin2", "playbin");
982 ERR("Unable to create 'playbin' GstElement.");
987 bin = gst_bin_new(NULL);
990 ERR("Unable to create GstBin !");
994 tee = gst_element_factory_make("tee", NULL);
997 ERR("Unable to create 'tee' GstElement.");
1001 #if defined HAVE_ECORE_X && defined HAVE_XOVERLAY_H
1002 if (window_manager_video)
1004 engines = evas_render_method_list();
1006 engine = eina_list_nth(engines, evas_output_method_get(evas_object_evas_get(obj)) - 1);
1008 if (ev->priority && engine && strstr(engine, "_x11") != NULL)
1011 Evas_Coord x, y, w, h;
1013 Ecore_X_Window parent;
1015 evas_object_geometry_get(obj, &x, &y, &w, &h);
1017 ee = ecore_evas_ecore_evas_get(evas_object_evas_get(obj));
1022 /* Here we really need to have the help of the window manager, this code will change when we update E17. */
1023 parent = (Ecore_X_Window) ecore_evas_window_get(ee);
1024 fprintf(stderr, "parent: %x\n", parent);
1026 win = ecore_x_window_new(0, x, y, w, h);
1027 fprintf(stderr, "creating window: %x [%i, %i, %i, %i]\n", win, x, y, w, h);
1030 Ecore_X_Window_State state[] = { ECORE_X_WINDOW_STATE_SKIP_TASKBAR, ECORE_X_WINDOW_STATE_SKIP_PAGER };
1032 ecore_x_netwm_window_state_set(win, state, 2);
1033 ecore_x_window_hide(win);
1034 xvsink = gst_element_factory_make("xvimagesink", NULL);
1037 unsigned int pos[2];
1039 #ifdef HAVE_X_OVERLAY_SET
1040 gst_x_overlay_set_window_handle(GST_X_OVERLAY(xvsink), win);
1042 gst_x_overlay_set_xwindow_id(GST_X_OVERLAY(xvsink), win);
1046 ecore_x_window_prop_card32_set(win, ECORE_X_ATOM_E_VIDEO_PARENT, &parent, 1);
1048 pos[0] = x; pos[1] = y;
1049 ecore_x_window_prop_card32_set(win, ECORE_X_ATOM_E_VIDEO_POSITION, pos, 2);
1053 fprintf(stderr, "destroying win: %x\n", win);
1054 ecore_x_window_free(win);
1058 evas_render_method_list_free(engines);
1061 # warning "no ecore_x or xoverlay"
1064 esink = gst_element_factory_make("emotion-sink", "sink");
1067 ERR("Unable to create 'emotion-sink' GstElement.");
1068 goto unref_pipeline;
1071 g_object_set(G_OBJECT(esink), "evas-object", obj, NULL);
1072 g_object_set(G_OBJECT(esink), "ev", ev, NULL);
1074 evas_object_image_pixels_get_callback_set(obj, NULL, NULL);
1076 /* We need queue to force each video sink to be in its own thread */
1077 queue = gst_element_factory_make("queue", NULL);
1080 ERR("Unable to create 'queue' GstElement.");
1081 goto unref_pipeline;
1084 gst_bin_add_many(GST_BIN(bin), tee, queue, esink, NULL);
1085 gst_element_link_many(queue, esink, NULL);
1087 /* link both sink to GstTee */
1088 pad = gst_element_get_pad(queue, "sink");
1089 teepad = gst_element_get_request_pad(tee, "src%d");
1090 gst_pad_link(teepad, pad);
1091 gst_object_unref(pad);
1092 gst_object_unref(teepad);
1096 GstElement *fakeeos;
1098 queue = gst_element_factory_make("queue", NULL);
1099 fakeeos = GST_ELEMENT(GST_BIN(g_object_new(GST_TYPE_FAKEEOS_BIN, "name", "eosbin", NULL)));
1100 if (queue && fakeeos)
1104 gst_bin_add_many(GST_BIN(bin), fakeeos, NULL);
1106 gst_bin_add_many(GST_BIN(fakeeos), queue, xvsink, NULL);
1107 gst_element_link_many(queue, xvsink, NULL);
1108 queue_pad = gst_element_get_pad(queue, "sink");
1109 gst_element_add_pad(fakeeos, gst_ghost_pad_new("sink", queue_pad));
1111 pad = gst_element_get_pad(fakeeos, "sink");
1112 teepad = gst_element_get_request_pad(tee, "src%d");
1113 gst_pad_link(teepad, pad);
1117 ev->teepad = teepad;
1122 if (fakeeos) gst_object_unref(fakeeos);
1123 if (queue) gst_object_unref(queue);
1124 gst_object_unref(xvsink);
1129 teepad = gst_element_get_pad(tee, "sink");
1130 gst_element_add_pad(bin, gst_ghost_pad_new("sink", teepad));
1131 gst_object_unref(teepad);
1133 #define GST_PLAY_FLAG_NATIVE_VIDEO (1 << 6)
1134 #define GST_PLAY_FLAG_DOWNLOAD (1 << 7)
1135 #define GST_PLAY_FLAG_AUDIO (1 << 1)
1136 #define GST_PLAY_FLAG_NATIVE_AUDIO (1 << 5)
1140 g_object_set(G_OBJECT(playbin), "sink", bin, NULL);
1144 g_object_get(G_OBJECT(playbin), "flags", &flags, NULL);
1145 g_object_set(G_OBJECT(playbin), "flags", flags | GST_PLAY_FLAG_NATIVE_VIDEO | GST_PLAY_FLAG_DOWNLOAD | GST_PLAY_FLAG_NATIVE_AUDIO, NULL);
1146 g_object_set(G_OBJECT(playbin), "video-sink", bin, NULL);
1147 g_object_set(G_OBJECT(playbin), "uri", uri, NULL);
1150 evas_object_image_pixels_get_callback_set(obj, NULL, NULL);
1152 ev->stream = EINA_TRUE;
1156 Evas_Video_Surface video;
1158 video.version = EVAS_VIDEO_SURFACE_VERSION;
1160 video.parent = NULL;
1161 video.move = _video_move;
1162 video.resize = _video_resize;
1163 video.show = _video_show;
1164 video.hide = _video_hide;
1165 video.update_pixels = _video_update_pixels;
1167 evas_object_image_video_surface_set(obj, &video);
1168 ev->stream = EINA_FALSE;
1171 eina_stringshare_replace(&ev->uri, uri);
1172 ev->pipeline = playbin;
1175 ev->xvsink = xvsink;
1177 ev->threads = eina_list_append(ev->threads,
1178 ecore_thread_run(_emotion_gstreamer_pause,
1179 _emotion_gstreamer_end,
1180 _emotion_gstreamer_cancel,
1183 /** NOTE: you need to set: GST_DEBUG_DUMP_DOT_DIR=/tmp EMOTION_ENGINE=gstreamer to save the $EMOTION_GSTREAMER_DOT file in '/tmp' */
1184 /** then call dot -Tpng -oemotion_pipeline.png /tmp/$TIMESTAMP-$EMOTION_GSTREAMER_DOT.dot */
1185 if (getenv("EMOTION_GSTREAMER_DOT")) GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS(GST_BIN(playbin), GST_DEBUG_GRAPH_SHOW_ALL, getenv("EMOTION_GSTREAMER_DOT"));
1190 gst_object_unref(xvsink);
1191 gst_object_unref(esink);
1192 gst_object_unref(tee);
1193 gst_object_unref(bin);
1194 gst_object_unref(playbin);