emotion: call the right render function.
[profile/ivi/emotion.git] / src / modules / gstreamer / emotion_sink.c
1 #ifdef HAVE_CONFIG_H
2 # include "config.h"
3 #endif
4
5 #include <Eina.h>
6 #include <Evas.h>
7 #include <Ecore.h>
8
9 #define HTTP_STREAM 0
10 #define RTSP_STREAM 1
11 #include <glib.h>
12 #include <gst/gst.h>
13 #include <glib-object.h>
14 #include <gst/video/gstvideosink.h>
15 #include <gst/video/video.h>
16
17 #ifdef HAVE_ECORE_X
18 # include <Ecore_X.h>
19 # include <Ecore_Evas.h>
20 # ifdef HAVE_XOVERLAY_H
21 #  include <gst/interfaces/xoverlay.h>
22 # endif
23 #endif
24
25 #include "Emotion.h"
26 #include "emotion_private.h"
27 #include "emotion_gstreamer.h"
28
29 static GstStaticPadTemplate sinktemplate = GST_STATIC_PAD_TEMPLATE("sink",
30                                                                    GST_PAD_SINK, GST_PAD_ALWAYS,
31                                                                    GST_STATIC_CAPS(GST_VIDEO_CAPS_YUV("{ I420, YV12, YUY2, NV12, ST12, TM12 }") ";"
32                                                                                    GST_VIDEO_CAPS_BGRx ";" GST_VIDEO_CAPS_BGR ";" GST_VIDEO_CAPS_BGRA));
33
34 GST_DEBUG_CATEGORY_STATIC(evas_video_sink_debug);
35 #define GST_CAT_DEFAULT evas_video_sink_debug
36
37 enum {
38   REPAINT_REQUESTED,
39   LAST_SIGNAL
40 };
41
42 enum {
43   PROP_0,
44   PROP_EVAS_OBJECT,
45   PROP_WIDTH,
46   PROP_HEIGHT,
47   PROP_EV,
48   PROP_LAST
49 };
50
51 static guint evas_video_sink_signals[LAST_SIGNAL] = { 0, };
52
53 #define _do_init(bla)                                   \
54   GST_DEBUG_CATEGORY_INIT(evas_video_sink_debug,        \
55                           "emotion-sink",               \
56                           0,                            \
57                           "emotion video sink")
58
59 GST_BOILERPLATE_FULL(EvasVideoSink,
60                      evas_video_sink,
61                      GstVideoSink,
62                      GST_TYPE_VIDEO_SINK,
63                      _do_init);
64
65
66 static void unlock_buffer_mutex(EvasVideoSinkPrivate* priv);
67 static void evas_video_sink_main_render(void *data);
68 static void evas_video_sink_samsung_main_render(void *data);
69
70 static void
71 evas_video_sink_base_init(gpointer g_class)
72 {
73    GstElementClass* element_class;
74
75    element_class = GST_ELEMENT_CLASS(g_class);
76    gst_element_class_add_pad_template(element_class, gst_static_pad_template_get(&sinktemplate));
77    gst_element_class_set_details_simple(element_class, "Evas video sink",
78                                         "Sink/Video", "Sends video data from a GStreamer pipeline to an Evas object",
79                                         "Vincent Torri <vtorri@univ-evry.fr>");
80 }
81
82 static void
83 evas_video_sink_init(EvasVideoSink* sink, EvasVideoSinkClass* klass __UNUSED__)
84 {
85    EvasVideoSinkPrivate* priv;
86
87    INF("sink init");
88    sink->priv = priv = G_TYPE_INSTANCE_GET_PRIVATE(sink, EVAS_TYPE_VIDEO_SINK, EvasVideoSinkPrivate);
89    priv->o = NULL;
90    priv->width = 0;
91    priv->height = 0;
92    priv->func = NULL;
93    priv->eformat = EVAS_COLORSPACE_ARGB8888;
94    priv->samsung = EINA_FALSE;
95    eina_lock_new(&priv->m);
96    eina_condition_new(&priv->c, &priv->m);
97    priv->unlocked = EINA_FALSE;
98 }
99
100 /**** Object methods ****/
101 static void
102 _cleanup_priv(void *data, Evas *e __UNUSED__, Evas_Object *obj, void *event_info __UNUSED__)
103 {
104    EvasVideoSinkPrivate* priv;
105
106    priv = data;
107
108    eina_lock_take(&priv->m);
109    if (priv->o == obj)
110      priv->o = NULL;
111    eina_lock_release(&priv->m);
112 }
113
114 static void
115 evas_video_sink_set_property(GObject * object, guint prop_id,
116                              const GValue * value, GParamSpec * pspec)
117 {
118    EvasVideoSink* sink;
119    EvasVideoSinkPrivate* priv;
120
121    sink = EVAS_VIDEO_SINK (object);
122    priv = sink->priv;
123
124    switch (prop_id) {
125     case PROP_EVAS_OBJECT:
126        eina_lock_take(&priv->m);
127        evas_object_event_callback_del(priv->o, EVAS_CALLBACK_FREE, _cleanup_priv);
128        priv->o = g_value_get_pointer (value);
129        INF("sink set Evas_Object %p.", priv->o);
130        evas_object_event_callback_add(priv->o, EVAS_CALLBACK_FREE, _cleanup_priv, priv);
131        eina_lock_release(&priv->m);
132        break;
133     case PROP_EV:
134        INF("sink set ev.");
135        eina_lock_take(&priv->m);
136        priv->ev = g_value_get_pointer (value);
137        if (priv->ev)
138          priv->ev->samsung = EINA_TRUE;
139        eina_lock_release(&priv->m);
140        break;
141     default:
142        G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
143        ERR("invalid property");
144        break;
145    }
146 }
147
148 static void
149 evas_video_sink_get_property(GObject * object, guint prop_id,
150                              GValue * value, GParamSpec * pspec)
151 {
152    EvasVideoSink* sink;
153    EvasVideoSinkPrivate* priv;
154
155    sink = EVAS_VIDEO_SINK (object);
156    priv = sink->priv;
157
158    switch (prop_id) {
159     case PROP_EVAS_OBJECT:
160        INF("sink get property.");
161        eina_lock_take(&priv->m);
162        g_value_set_pointer(value, priv->o);
163        eina_lock_release(&priv->m);
164        break;
165     case PROP_WIDTH:
166        INF("sink get width.");
167        eina_lock_take(&priv->m);
168        g_value_set_int(value, priv->width);
169        eina_lock_release(&priv->m);
170        break;
171     case PROP_HEIGHT:
172        INF("sink get height.");
173        eina_lock_take(&priv->m);
174        g_value_set_int (value, priv->height);
175        eina_lock_release(&priv->m);
176        break;
177     case PROP_EV:
178        INF("sink get ev.");
179        eina_lock_take(&priv->m);
180        g_value_set_pointer (value, priv->ev);
181        eina_lock_release(&priv->m);
182        break;
183     default:
184        G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
185        ERR("invalide property");
186        break;
187    }
188 }
189
190 static void
191 evas_video_sink_dispose(GObject* object)
192 {
193    EvasVideoSink* sink;
194    EvasVideoSinkPrivate* priv;
195
196    INF("dispose.");
197
198    sink = EVAS_VIDEO_SINK(object);
199    priv = sink->priv;
200
201    eina_lock_free(&priv->m);
202    eina_condition_free(&priv->c);
203
204    G_OBJECT_CLASS(parent_class)->dispose(object);
205 }
206
207
208 /**** BaseSink methods ****/
209
210 gboolean evas_video_sink_set_caps(GstBaseSink *bsink, GstCaps *caps)
211 {
212    EvasVideoSink* sink;
213    EvasVideoSinkPrivate* priv;
214    GstStructure *structure;
215    GstVideoFormat format;
216    guint32 fourcc;
217    unsigned int i;
218
219    sink = EVAS_VIDEO_SINK(bsink);
220    priv = sink->priv;
221
222    structure = gst_caps_get_structure(caps, 0);
223
224    if (gst_structure_get_int(structure, "width", (int*) &priv->width)
225        && gst_structure_get_int(structure, "height", (int*) &priv->height)
226        && gst_structure_get_fourcc(structure, "format", &fourcc))
227      {
228         priv->source_height = priv->height;
229
230         for (i = 0; colorspace_fourcc_convertion[i].name != NULL; ++i)
231           if (fourcc == colorspace_fourcc_convertion[i].fourcc)
232             {
233                fprintf(stderr, "Found '%s'\n", colorspace_fourcc_convertion[i].name);
234                priv->eformat = colorspace_fourcc_convertion[i].eformat;
235                priv->func = colorspace_fourcc_convertion[i].func;
236                if (colorspace_fourcc_convertion[i].force_height)
237                  {
238                     priv->height = (priv->height >> 1) << 1;
239                  }
240                if (priv->ev)
241                  priv->ev->kill_buffer = EINA_TRUE;
242                return TRUE;
243             }
244
245         if (fourcc == GST_MAKE_FOURCC('S', 'T', '1', '2'))
246           {
247              fprintf(stderr, "Found '%s'\n", "ST12");
248              priv->eformat = EVAS_COLORSPACE_YCBCR420TM12601_PL;
249              priv->samsung = EINA_TRUE;
250              priv->func = NULL;
251              if (priv->ev)
252                {
253                   priv->ev->samsung = EINA_TRUE;
254                   priv->ev->kill_buffer = EINA_TRUE;
255                }
256              return TRUE;
257           }
258      }
259
260    INF("fallback code !");
261    if (!gst_video_format_parse_caps(caps, &format, (int*) &priv->width, (int*) &priv->height))
262      {
263         ERR("Unable to parse caps.");
264         return FALSE;
265      }
266
267    priv->source_height = priv->height;
268
269    for (i = 0; colorspace_format_convertion[i].name != NULL; ++i)
270      if (format == colorspace_format_convertion[i].format)
271        {
272           fprintf(stderr, "Found '%s'\n", colorspace_format_convertion[i].name);
273           priv->eformat = colorspace_format_convertion[i].eformat;
274           priv->func = colorspace_format_convertion[i].func;
275           if (priv->ev)
276             priv->ev->kill_buffer = EINA_FALSE;
277           return TRUE;
278        }
279
280    ERR("unsupported : %d\n", format);
281    return FALSE;
282 }
283
284 static gboolean
285 evas_video_sink_start(GstBaseSink* base_sink)
286 {
287    EvasVideoSinkPrivate* priv;
288    gboolean res = TRUE;
289
290    INF("sink start");
291
292    priv = EVAS_VIDEO_SINK(base_sink)->priv;
293    eina_lock_take(&priv->m);
294    if (!priv->o)
295      res = FALSE;
296    else
297      priv->unlocked = EINA_FALSE;
298    eina_lock_release(&priv->m);
299    return res;
300 }
301
302 static gboolean
303 evas_video_sink_stop(GstBaseSink* base_sink)
304 {
305    EvasVideoSinkPrivate* priv = EVAS_VIDEO_SINK(base_sink)->priv;
306
307    INF("sink stop");
308
309    unlock_buffer_mutex(priv);
310    return TRUE;
311 }
312
313 static gboolean
314 evas_video_sink_unlock(GstBaseSink* object)
315 {
316    EvasVideoSink* sink;
317
318    INF("sink unlock");
319
320    sink = EVAS_VIDEO_SINK(object);
321
322    unlock_buffer_mutex(sink->priv);
323
324    return GST_CALL_PARENT_WITH_DEFAULT(GST_BASE_SINK_CLASS, unlock,
325                                        (object), TRUE);
326 }
327
328 static gboolean
329 evas_video_sink_unlock_stop(GstBaseSink* object)
330 {
331    EvasVideoSink* sink;
332    EvasVideoSinkPrivate* priv;
333
334    sink = EVAS_VIDEO_SINK(object);
335    priv = sink->priv;
336
337    INF("sink unlock stop");
338
339    eina_lock_take(&priv->m);
340    priv->unlocked = FALSE;
341    eina_lock_release(&priv->m);
342
343    return GST_CALL_PARENT_WITH_DEFAULT(GST_BASE_SINK_CLASS, unlock_stop,
344                                        (object), TRUE);
345 }
346
347 static GstFlowReturn
348 evas_video_sink_preroll(GstBaseSink* bsink, GstBuffer* buffer)
349 {
350    Emotion_Gstreamer_Buffer *send;
351    EvasVideoSinkPrivate *priv;
352    EvasVideoSink *sink;
353
354    INF("sink preroll %p [%i]", GST_BUFFER_DATA(buffer), GST_BUFFER_SIZE(buffer));
355
356    sink = EVAS_VIDEO_SINK(bsink);
357    priv = sink->priv;
358
359    if (GST_BUFFER_SIZE(buffer) <= 0 && !priv->samsung)
360      {
361         WRN("empty buffer");
362         return GST_FLOW_OK;
363      }
364
365    send = emotion_gstreamer_buffer_alloc(priv, buffer, EINA_TRUE);
366
367    if (send)
368      {
369         if (priv->samsung)
370           {
371              if (!priv->func)
372                {
373                   GstStructure *structure;
374                   GstCaps *caps;
375                   gboolean is_multiplane = FALSE;
376
377                   caps = GST_BUFFER_CAPS(buffer);
378                   structure = gst_caps_get_structure (caps, 0);
379                   gst_structure_get_boolean(structure, "multiplane", &is_multiplane);
380                   gst_caps_unref(caps);
381
382                   if (is_multiplane)
383                     priv->func = _evas_video_st12_multiplane;
384                   else
385                     priv->func = _evas_video_st12;
386                }
387
388              ecore_main_loop_thread_safe_call_async(evas_video_sink_samsung_main_render, send);
389           }
390         else
391           ecore_main_loop_thread_safe_call_async(evas_video_sink_main_render, send);
392      }
393
394    return GST_FLOW_OK;
395 }
396
397 static GstFlowReturn
398 evas_video_sink_render(GstBaseSink* bsink, GstBuffer* buffer)
399 {
400    Emotion_Gstreamer_Buffer *send;
401    EvasVideoSinkPrivate *priv;
402    EvasVideoSink *sink;
403
404    INF("sink render %p", buffer);
405
406    sink = EVAS_VIDEO_SINK(bsink);
407    priv = sink->priv;
408
409    eina_lock_take(&priv->m);
410
411    if (priv->unlocked) {
412       ERR("LOCKED");
413       eina_lock_release(&priv->m);
414       return GST_FLOW_OK;
415    }
416
417    send = emotion_gstreamer_buffer_alloc(priv, buffer, EINA_FALSE);
418    if (!send) {
419       eina_lock_release(&priv->m);
420       return GST_FLOW_ERROR;
421    }
422
423    if (priv->samsung)
424      {
425         if (!priv->func)
426           {
427              GstStructure *structure;
428              GstCaps *caps;
429              gboolean is_multiplane = FALSE;
430
431              caps = GST_BUFFER_CAPS(buffer);
432              structure = gst_caps_get_structure (caps, 0);
433              gst_structure_get_boolean(structure, "multiplane", &is_multiplane);
434              gst_caps_unref(caps);
435
436              if (is_multiplane)
437                priv->func = _evas_video_st12_multiplane;
438              else
439                priv->func = _evas_video_st12;
440           }
441
442         ecore_main_loop_thread_safe_call_async(evas_video_sink_samsung_main_render, send);
443      }
444    else
445      ecore_main_loop_thread_safe_call_async(evas_video_sink_main_render, send);
446
447    eina_condition_wait(&priv->c);
448    eina_lock_release(&priv->m);
449
450    return GST_FLOW_OK;
451 }
452
453 static void
454 _update_emotion_fps(Emotion_Gstreamer_Video *ev)
455 {
456    double tim;
457
458    if (!debug_fps) return ;
459
460    tim = ecore_time_get();
461    ev->frames++;
462
463    if (ev->rlapse == 0.0)
464      {
465         ev->rlapse = tim;
466         ev->flapse = ev->frames;
467      }
468    else if ((tim - ev->rlapse) >= 0.5)
469      {
470         printf("FRAME: %i, FPS: %3.1f\n",
471                ev->frames,
472                (ev->frames - ev->flapse) / (tim - ev->rlapse));
473         ev->rlapse = tim;
474         ev->flapse = ev->frames;
475      }
476 }
477
478 static void
479 evas_video_sink_samsung_main_render(void *data)
480 {
481    Emotion_Gstreamer_Buffer *send;
482    Emotion_Video_Stream *vstream;
483    EvasVideoSinkPrivate *priv = NULL;
484    GstBuffer* buffer;
485    unsigned char *evas_data;
486    const guint8 *gst_data;
487    GstFormat fmt = GST_FORMAT_TIME;
488    gint64 pos;
489    Eina_Bool preroll = EINA_FALSE;
490    int stride, elevation;
491    Evas_Coord w, h;
492
493    send = data;
494
495    if (!send) goto exit_point;
496
497    priv = send->sink;
498    buffer = send->frame;
499    preroll = send->preroll;
500
501    /* frame after cleanup */
502    if (!preroll && !send->ev->last_buffer)
503      {
504         priv = NULL;
505         goto exit_point;
506      }
507
508    if (!priv || !priv->o || priv->unlocked)
509      goto exit_point;
510
511    if (send->ev->send)
512      {
513         emotion_gstreamer_buffer_free(send->ev->send);
514         send->ev->send = NULL;
515      }
516
517    if (!send->ev->stream && !send->force)
518      {
519         send->ev->send = send;
520         _emotion_frame_new(send->ev->obj);
521         goto exit_stream;
522      }
523
524    _emotion_gstreamer_video_pipeline_parse(send->ev, EINA_TRUE);
525
526    /* Getting stride to compute the right size and then fill the object properly */
527    /* Y => [0] and UV in [1] */
528    if (priv->func == _evas_video_st12_multiplane)
529      {
530         const GstMultiPlaneImageBuffer *mp_buf = (const GstMultiPlaneImageBuffer *) buffer;
531
532         stride = mp_buf->stride[0];
533         elevation = mp_buf->elevation[0];
534         priv->width = mp_buf->width[0];
535         priv->height = mp_buf->height[0];
536
537         gst_data = (const guint8 *) mp_buf;
538      }
539    else
540      {
541         const SCMN_IMGB *imgb = (const SCMN_IMGB *) GST_BUFFER_MALLOCDATA(buffer);
542
543         stride = imgb->stride[0];
544         elevation = imgb->elevation[0];
545         priv->width = imgb->width[0];
546         priv->height = imgb->height[0];
547
548         gst_data = (const guint8 *) imgb;
549      }
550
551    evas_object_geometry_get(priv->o, NULL, NULL, &w, &h);
552
553    send->ev->fill.width = (double) stride / priv->width;
554    send->ev->fill.height = (double) elevation / priv->height;
555
556    evas_object_image_alpha_set(priv->o, 0);
557    evas_object_image_colorspace_set(priv->o, priv->eformat);
558    evas_object_image_size_set(priv->o, stride, elevation);
559
560    _update_emotion_fps(send->ev);
561
562    evas_data = evas_object_image_data_get(priv->o, 1);
563
564    if (priv->func)
565      priv->func(evas_data, gst_data, stride, elevation, elevation);
566    else
567      WRN("No way to decode %x colorspace !", priv->eformat);
568
569    evas_object_image_data_set(priv->o, evas_data);
570    evas_object_image_data_update_add(priv->o, 0, 0, priv->width, priv->height);
571    evas_object_image_pixels_dirty_set(priv->o, 0);
572
573    if (!preroll && send->ev->play_started)
574      {
575         _emotion_playback_started(send->ev->obj);
576         send->ev->play_started = 0;
577      }
578
579    if (!send->force)
580      {
581         _emotion_frame_new(send->ev->obj);
582      }
583
584    vstream = eina_list_nth(send->ev->video_streams, send->ev->video_stream_nbr - 1);
585
586    gst_element_query_position(send->ev->pipeline, &fmt, &pos);
587    send->ev->position = (double)pos / (double)GST_SECOND;
588
589    if (vstream)
590      {
591         vstream->width = priv->width;
592         vstream->height = priv->height;
593
594         _emotion_video_pos_update(send->ev->obj, send->ev->position, vstream->length_time);
595      }
596
597    send->ev->ratio = (double) priv->width / (double) priv->height;
598    _emotion_frame_refill(send->ev->obj, send->ev->fill.width, send->ev->fill.height);
599    _emotion_frame_resize(send->ev->obj, priv->width, priv->height, send->ev->ratio);
600
601    buffer = gst_buffer_ref(buffer);
602    if (send->ev->last_buffer) gst_buffer_unref(send->ev->last_buffer);
603    send->ev->last_buffer = buffer;
604
605  exit_point:
606    emotion_gstreamer_buffer_free(send);
607
608  exit_stream:
609    if (priv)
610      {
611         if (preroll || !priv->o) return;
612         
613         if (!priv->unlocked)
614           eina_condition_signal(&priv->c);
615      }
616 }
617
618 static void
619 evas_video_sink_main_render(void *data)
620 {
621    Emotion_Gstreamer_Buffer *send;
622    Emotion_Gstreamer_Video *ev = NULL;
623    Emotion_Video_Stream *vstream;
624    EvasVideoSinkPrivate *priv = NULL;
625    GstBuffer *buffer;
626    unsigned char *evas_data;
627    GstFormat fmt = GST_FORMAT_TIME;
628    gint64 pos;
629    Eina_Bool preroll = EINA_FALSE;
630
631    send = data;
632
633    if (!send) goto exit_point;
634
635    priv = send->sink;
636    buffer = send->frame;
637    preroll = send->preroll;
638    ev = send->ev;
639
640    /* frame after cleanup */
641    if (!preroll && !ev->last_buffer)
642      {
643         priv = NULL;
644         goto exit_point;
645      }
646
647    if (!priv || !priv->o || priv->unlocked)
648      goto exit_point;
649
650    if (ev->send && send != ev->send)
651      {
652         emotion_gstreamer_buffer_free(ev->send);
653         ev->send = NULL;
654      }
655
656    if (!ev->stream && !send->force)
657      {
658         ev->send = send;
659         _emotion_frame_new(ev->obj);
660         evas_object_image_data_update_add(priv->o, 0, 0, priv->width, priv->height);
661         goto exit_stream;
662      }
663
664    _emotion_gstreamer_video_pipeline_parse(ev, EINA_TRUE);
665
666    INF("sink main render [%i, %i] (source height: %i)", priv->width, priv->height, priv->source_height);
667
668    evas_object_image_alpha_set(priv->o, 0);
669    evas_object_image_colorspace_set(priv->o, priv->eformat);
670    evas_object_image_size_set(priv->o, priv->width, priv->height);
671
672    evas_data = evas_object_image_data_get(priv->o, 1);
673
674    if (priv->func)
675      priv->func(evas_data, GST_BUFFER_DATA(buffer), priv->width, priv->source_height, priv->height);
676    else
677      WRN("No way to decode %x colorspace !", priv->eformat);
678
679    evas_object_image_data_set(priv->o, evas_data);
680    evas_object_image_data_update_add(priv->o, 0, 0, priv->width, priv->height);
681    evas_object_image_pixels_dirty_set(priv->o, 0);
682
683    _update_emotion_fps(ev);
684
685    if (!preroll && ev->play_started)
686      {
687         _emotion_playback_started(ev->obj);
688         ev->play_started = 0;
689      }
690
691    if (!send->force)
692      {
693         _emotion_frame_new(ev->obj);
694      }
695
696    gst_element_query_position(ev->pipeline, &fmt, &pos);
697    ev->position = (double)pos / (double)GST_SECOND;
698
699    vstream = eina_list_nth(ev->video_streams, ev->video_stream_nbr - 1);
700
701    if (vstream)
702      {
703        vstream->width = priv->width;
704        vstream->height = priv->height;
705        _emotion_video_pos_update(ev->obj, ev->position, vstream->length_time);
706      }
707
708    ev->ratio = (double) priv->width / (double) priv->height;
709
710    _emotion_frame_resize(ev->obj, priv->width, priv->height, ev->ratio);
711
712    buffer = gst_buffer_ref(buffer);
713    if (ev->last_buffer) gst_buffer_unref(ev->last_buffer);
714    ev->last_buffer = buffer;
715
716  exit_point:
717    emotion_gstreamer_buffer_free(send);
718
719  exit_stream:
720    if (priv)
721      {
722         if (preroll || !priv->o) return;
723         
724         if (!priv->unlocked)
725           eina_condition_signal(&priv->c);
726      }
727 }
728
729 static void
730 unlock_buffer_mutex(EvasVideoSinkPrivate* priv)
731 {
732    priv->unlocked = EINA_TRUE;
733
734    eina_condition_signal(&priv->c);
735 }
736
737 static void
738 marshal_VOID__MINIOBJECT(GClosure * closure, GValue * return_value __UNUSED__,
739                          guint n_param_values, const GValue * param_values,
740                          gpointer invocation_hint __UNUSED__, gpointer marshal_data)
741 {
742    typedef void (*marshalfunc_VOID__MINIOBJECT) (gpointer obj, gpointer arg1, gpointer data2);
743    marshalfunc_VOID__MINIOBJECT callback;
744    GCClosure *cc;
745    gpointer data1, data2;
746
747    cc = (GCClosure *) closure;
748
749    g_return_if_fail(n_param_values == 2);
750
751    if (G_CCLOSURE_SWAP_DATA(closure)) {
752       data1 = closure->data;
753       data2 = g_value_peek_pointer(param_values + 0);
754    } else {
755       data1 = g_value_peek_pointer(param_values + 0);
756       data2 = closure->data;
757    }
758    callback = (marshalfunc_VOID__MINIOBJECT) (marshal_data ? marshal_data : cc->callback);
759
760    callback(data1, gst_value_get_mini_object(param_values + 1), data2);
761 }
762
763 static void
764 evas_video_sink_class_init(EvasVideoSinkClass* klass)
765 {
766    GObjectClass* gobject_class;
767    GstBaseSinkClass* gstbase_sink_class;
768
769    gobject_class = G_OBJECT_CLASS(klass);
770    gstbase_sink_class = GST_BASE_SINK_CLASS(klass);
771
772    g_type_class_add_private(klass, sizeof(EvasVideoSinkPrivate));
773
774    gobject_class->set_property = evas_video_sink_set_property;
775    gobject_class->get_property = evas_video_sink_get_property;
776
777    g_object_class_install_property (gobject_class, PROP_EVAS_OBJECT,
778                                     g_param_spec_pointer ("evas-object", "Evas Object",
779                                                           "The Evas object where the display of the video will be done",
780                                                           G_PARAM_READWRITE));
781
782    g_object_class_install_property (gobject_class, PROP_WIDTH,
783                                     g_param_spec_int ("width", "Width",
784                                                       "The width of the video",
785                                                       0, 65536, 0, G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
786
787    g_object_class_install_property (gobject_class, PROP_HEIGHT,
788                                     g_param_spec_int ("height", "Height",
789                                                       "The height of the video",
790                                                       0, 65536, 0, G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
791    g_object_class_install_property (gobject_class, PROP_EV,
792                                     g_param_spec_pointer ("ev", "Emotion_Gstreamer_Video",
793                                                           "THe internal data of the emotion object",
794                                                           G_PARAM_READWRITE));
795
796    gobject_class->dispose = evas_video_sink_dispose;
797
798    gstbase_sink_class->set_caps = evas_video_sink_set_caps;
799    gstbase_sink_class->stop = evas_video_sink_stop;
800    gstbase_sink_class->start = evas_video_sink_start;
801    gstbase_sink_class->unlock = evas_video_sink_unlock;
802    gstbase_sink_class->unlock_stop = evas_video_sink_unlock_stop;
803    gstbase_sink_class->render = evas_video_sink_render;
804    gstbase_sink_class->preroll = evas_video_sink_preroll;
805
806    evas_video_sink_signals[REPAINT_REQUESTED] = g_signal_new("repaint-requested",
807                                                              G_TYPE_FROM_CLASS(klass),
808                                                              (GSignalFlags)(G_SIGNAL_RUN_LAST | G_SIGNAL_ACTION),
809                                                              0,
810                                                              0,
811                                                              0,
812                                                              marshal_VOID__MINIOBJECT,
813                                                              G_TYPE_NONE, 1, GST_TYPE_BUFFER);
814 }
815
816 gboolean
817 gstreamer_plugin_init (GstPlugin * plugin)
818 {
819    return gst_element_register (plugin,
820                                 "emotion-sink",
821                                 GST_RANK_NONE,
822                                 EVAS_TYPE_VIDEO_SINK);
823 }
824
825 static void
826 _emotion_gstreamer_pause(void *data, Ecore_Thread *thread)
827 {
828    Emotion_Gstreamer_Video *ev = data;
829    gboolean res;
830
831    if (ecore_thread_check(thread) || !ev->pipeline) return ;
832
833    gst_element_set_state(ev->pipeline, GST_STATE_PAUSED);
834    res = gst_element_get_state(ev->pipeline, NULL, NULL, GST_CLOCK_TIME_NONE);
835    if (res == GST_STATE_CHANGE_NO_PREROLL)
836      {
837         gst_element_set_state(ev->pipeline, GST_STATE_PLAYING);
838         gst_element_get_state(ev->pipeline, NULL, NULL, GST_CLOCK_TIME_NONE);
839      }
840 }
841
842 static void
843 _emotion_gstreamer_cancel(void *data, Ecore_Thread *thread)
844 {
845    Emotion_Gstreamer_Video *ev = data;
846
847    ev->threads = eina_list_remove(ev->threads, thread);
848
849    if (getenv("EMOTION_GSTREAMER_DOT")) GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS(GST_BIN(ev->pipeline), GST_DEBUG_GRAPH_SHOW_ALL, getenv("EMOTION_GSTREAMER_DOT"));
850
851    if (ev->in == ev->out && ev->delete_me)
852      em_shutdown(ev);
853 }
854
855 static void
856 _emotion_gstreamer_end(void *data, Ecore_Thread *thread)
857 {
858    Emotion_Gstreamer_Video *ev = data;
859
860    ev->threads = eina_list_remove(ev->threads, thread);
861
862    if (ev->play)
863      {
864         gst_element_set_state(ev->pipeline, GST_STATE_PLAYING);
865         ev->play_started = 1;
866      }
867
868    if (getenv("EMOTION_GSTREAMER_DOT")) GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS(GST_BIN(ev->pipeline), GST_DEBUG_GRAPH_SHOW_ALL, getenv("EMOTION_GSTREAMER_DOT"));
869
870    if (ev->in == ev->out && ev->delete_me)
871      em_shutdown(ev);
872    else
873      _emotion_gstreamer_video_pipeline_parse(data, EINA_TRUE);
874 }
875
876 static void
877 _video_resize(void *data, Evas_Object *obj __UNUSED__, const Evas_Video_Surface *surface __UNUSED__,
878               Evas_Coord w, Evas_Coord h)
879 {
880 #ifdef HAVE_ECORE_X
881    Emotion_Gstreamer_Video *ev = data;
882
883    ecore_x_window_resize(ev->win, w, h);
884    fprintf(stderr, "resize: %i, %i\n", w, h);
885 #endif
886 }
887
888 static void
889 _video_move(void *data, Evas_Object *obj __UNUSED__, const Evas_Video_Surface *surface __UNUSED__,
890             Evas_Coord x, Evas_Coord y)
891 {
892 #ifdef HAVE_ECORE_X
893    Emotion_Gstreamer_Video *ev = data;
894    unsigned int pos[2];
895
896    fprintf(stderr, "move: %i, %i\n", x, y);
897    pos[0] = x; pos[1] = y;
898    ecore_x_window_prop_card32_set(ev->win, ECORE_X_ATOM_E_VIDEO_POSITION, pos, 2);
899 #endif
900 }
901
902 #if 0
903 /* Much better idea to always feed the XvImageSink and let him handle optimizing the rendering as we do */
904 static void
905 _block_pad_unlink_cb(GstPad *pad, gboolean blocked, gpointer user_data)
906 {
907    if (blocked)
908      {
909         Emotion_Gstreamer_Video *ev = user_data;
910         GstEvent *gev;
911
912         gst_pad_unlink(ev->xvteepad, ev->xvpad);
913         gev = gst_event_new_eos();
914         gst_pad_send_event(ev->xvpad, gev);
915         gst_pad_set_blocked_async(pad, FALSE, _block_pad_unlink_cb, NULL);
916      }
917 }
918
919 static void
920 _block_pad_link_cb(GstPad *pad, gboolean blocked, gpointer user_data)
921 {
922    if (blocked)
923      {
924         Emotion_Gstreamer_Video *ev = user_data;
925
926         gst_pad_link(ev->xvteepad, ev->xvpad);
927         if (ev->play)
928           gst_element_set_state(ev->xvsink, GST_STATE_PLAYING);
929         else
930           gst_element_set_state(ev->xvsink, GST_STATE_PAUSED);
931         gst_pad_set_blocked_async(pad, FALSE, _block_pad_link_cb, NULL);
932      }
933 }
934 #endif
935
936 static void
937 _video_show(void *data, Evas_Object *obj __UNUSED__, const Evas_Video_Surface *surface __UNUSED__)
938 {
939 #ifdef HAVE_ECORE_X
940    Emotion_Gstreamer_Video *ev = data;
941
942    fprintf(stderr, "show xv\n");
943    ecore_x_window_show(ev->win);
944 #endif
945    /* gst_pad_set_blocked_async(ev->xvteepad, TRUE, _block_pad_link_cb, ev); */
946 }
947
948 static void
949 _video_hide(void *data, Evas_Object *obj __UNUSED__, const Evas_Video_Surface *surface __UNUSED__)
950 {
951 #ifdef HAVE_ECORE_X
952    Emotion_Gstreamer_Video *ev = data;
953
954    fprintf(stderr, "hide xv\n");
955    ecore_x_window_hide(ev->win);
956 #endif
957    /* gst_pad_set_blocked_async(ev->xvteepad, TRUE, _block_pad_unlink_cb, ev); */
958 }
959
960 static void
961 _video_update_pixels(void *data, Evas_Object *obj __UNUSED__, const Evas_Video_Surface *surface __UNUSED__)
962 {
963    Emotion_Gstreamer_Video *ev = data;
964    Emotion_Gstreamer_Buffer *send;
965    EvasVideoSinkPrivate *priv = NULL;
966
967    if (!ev->send) return ;
968
969    send = ev->send;
970    priv = send->sink;
971    send->force = EINA_TRUE;
972    ev->send = NULL;
973
974    if (priv->samsung)
975       evas_video_sink_samsung_main_render(send);
976    else
977       evas_video_sink_main_render(send);
978 }
979
980 static void
981 _image_resize(void *data, Evas *e __UNUSED__, Evas_Object *obj, void *event_info __UNUSED__)
982 {
983    Emotion_Gstreamer_Video *ev = data;
984    Evas_Coord width, height;
985    int image_area, src_area;
986    double ratio;
987
988    GstElementFactory *cfactory = NULL;
989    GstElement *convert = NULL, *filter = NULL, *queue = NULL;
990    GstPad *pad = NULL, *teepad = NULL;
991    GstCaps *caps = NULL;
992    Eina_List *l, *engines;
993    const char *ename, *engine = NULL;
994
995    evas_object_geometry_get(obj, NULL, NULL, &width, &height);
996    image_area = width * height;
997    src_area = ev->src_width * ev->src_height;
998    ratio = (double)image_area / (double)src_area;
999
1000    // when an image is much smaller than original video size,
1001    // add fimcconvert element to the pipeline
1002    if (ratio < 0.8 && ev->stream && !ev->convert)
1003      {
1004         cfactory = gst_element_factory_find("fimcconvert");
1005         if (!cfactory) return;
1006
1007         convert = gst_element_factory_create(cfactory, NULL);
1008         if (!convert) return;
1009
1010         // add capsfilter to limit size and formats based on the backend
1011         filter = gst_element_factory_make("capsfilter", "fimccapsfilter");
1012         if (!filter)
1013           {
1014              gst_object_unref(convert);
1015              return;
1016           }
1017
1018         engines = evas_render_method_list();
1019         EINA_LIST_FOREACH(engines, l, ename)
1020           {
1021              if (evas_render_method_lookup(ename) ==
1022                  evas_output_method_get(evas_object_evas_get(obj)))
1023                {
1024                   engine = ename;
1025                   break;
1026                }
1027           }
1028
1029         if (strstr(engine, "software") != NULL)
1030           {
1031              caps = gst_caps_new_simple("video/x-raw-rgb",
1032                                         "width", G_TYPE_INT, width,
1033                                         "height", G_TYPE_INT, height,
1034                                         NULL);
1035           }
1036         else if (strstr(engine, "gl") != NULL)
1037           {
1038              caps = gst_caps_new_simple("video/x-raw-yuv",
1039                                         "width", G_TYPE_INT, width,
1040                                         "height", G_TYPE_INT, height,
1041                                         NULL);
1042           }
1043         g_object_set(G_OBJECT(filter), "caps", caps, NULL);
1044         gst_caps_unref(caps);
1045
1046         // add new elements to the pipeline
1047         queue = gst_bin_get_by_name(GST_BIN(ev->sink), "equeue");
1048         gst_element_unlink(ev->tee, queue);
1049         gst_element_release_request_pad(ev->tee, ev->eteepad);
1050         gst_object_unref(ev->eteepad);
1051
1052         gst_bin_add_many(GST_BIN(ev->sink), convert, filter, NULL);
1053         gst_element_link_many(ev->tee, convert, filter, queue, NULL);
1054
1055         pad = gst_element_get_pad(convert, "sink");
1056         teepad = gst_element_get_request_pad(ev->tee, "src%d");
1057         gst_pad_link(teepad, pad);
1058         gst_object_unref(pad);
1059
1060         gst_element_sync_state_with_parent(convert);
1061         gst_element_sync_state_with_parent(filter);
1062
1063         ev->eteepad = teepad;
1064         ev->convert = convert;
1065         evas_render_method_list_free(engines);
1066
1067         INF("add fimcconvert element. video size: %dx%d. emotion object size: %dx%d",
1068             ev->src_width, ev->src_height, width, height);
1069      }
1070    // set size again to the capsfilter when the image is resized
1071    else if (ev->convert)
1072      {
1073         filter = gst_bin_get_by_name(GST_BIN(ev->sink), "fimccapsfilter");
1074
1075         engines = evas_render_method_list();
1076         EINA_LIST_FOREACH(engines, l, ename)
1077           {
1078              if (evas_render_method_lookup(ename) ==
1079                  evas_output_method_get(evas_object_evas_get(obj)))
1080                {
1081                   engine = ename;
1082                   break;
1083                }
1084           }
1085
1086         if (strstr(engine, "software") != NULL)
1087           {
1088              caps = gst_caps_new_simple("video/x-raw-rgb",
1089                                         "width", G_TYPE_INT, width,
1090                                         "height", G_TYPE_INT, height,
1091                                         NULL);
1092           }
1093         else if (strstr(engine, "gl") != NULL)
1094           {
1095              caps = gst_caps_new_simple("video/x-raw-yuv",
1096                                         "width", G_TYPE_INT, width,
1097                                         "height", G_TYPE_INT, height,
1098                                         NULL);
1099           }
1100
1101         g_object_set(G_OBJECT(filter), "caps", caps, NULL);
1102         gst_caps_unref(caps);
1103         evas_render_method_list_free(engines);
1104
1105         INF("set capsfilter size again:. video size: %dx%d. emotion object size: %dx%d",
1106             ev->src_width, ev->src_height, width, height);
1107      }
1108 }
1109
1110 GstElement *
1111 gstreamer_video_sink_new(Emotion_Gstreamer_Video *ev,
1112                          Evas_Object *o,
1113                          const char *uri)
1114 {
1115    GstElement *playbin;
1116    GstElement *bin = NULL;
1117    GstElement *esink = NULL;
1118    GstElement *xvsink = NULL;
1119    GstElement *tee = NULL;
1120    GstElement *queue = NULL;
1121    Evas_Object *obj;
1122    GstPad *pad;
1123    GstPad *teepad;
1124    int flags;
1125    const char *launch;
1126 #if defined HAVE_ECORE_X && defined HAVE_XOVERLAY_H
1127    const char *engine = NULL;
1128    Eina_List *engines;
1129 #endif
1130
1131    obj = emotion_object_image_get(o);
1132    if (!obj)
1133      {
1134         ERR("Not Evas_Object specified");
1135         return NULL;
1136      }
1137
1138    if (!uri)
1139      return NULL;
1140
1141    launch = emotion_webcam_custom_get(uri);
1142    if (launch)
1143      {
1144         GError *error = NULL;
1145
1146         playbin = gst_parse_bin_from_description(launch, 1, &error);
1147         if (!playbin)
1148           {
1149              ERR("Unable to setup command : '%s' got error '%s'.", launch, error->message);
1150              g_error_free(error);
1151              return NULL;
1152           }
1153         if (error)
1154           {
1155              WRN("got recoverable error '%s' for command : '%s'.", error->message, launch);
1156              g_error_free(error);
1157           }
1158      }
1159    else
1160      {
1161         playbin = gst_element_factory_make("playbin2", "playbin");
1162         if (!playbin)
1163           {
1164              ERR("Unable to create 'playbin' GstElement.");
1165              return NULL;
1166           }
1167      }
1168
1169    bin = gst_bin_new(NULL);
1170    if (!bin)
1171      {
1172        ERR("Unable to create GstBin !");
1173        goto unref_pipeline;
1174      }
1175
1176    tee = gst_element_factory_make("tee", NULL);
1177    if (!tee)
1178      {
1179        ERR("Unable to create 'tee' GstElement.");
1180        goto unref_pipeline;
1181      }
1182
1183 #if defined HAVE_ECORE_X && defined HAVE_XOVERLAY_H
1184    if (window_manager_video)
1185      {
1186         Eina_List *l;
1187         const char *ename;
1188         
1189         engines = evas_render_method_list();
1190
1191         EINA_LIST_FOREACH(engines, l, ename)
1192           {
1193              if (evas_render_method_lookup(ename) == 
1194                  evas_output_method_get(evas_object_evas_get(obj)))
1195                {
1196                   engine = ename;
1197                   break;
1198                }
1199           }
1200
1201        if (ev->priority && engine && strstr(engine, "_x11") != NULL)
1202          {
1203            Ecore_Evas *ee;
1204            Evas_Coord x, y, w, h;
1205            Ecore_X_Window win;
1206            Ecore_X_Window parent;
1207
1208            evas_object_geometry_get(obj, &x, &y, &w, &h);
1209
1210            ee = ecore_evas_ecore_evas_get(evas_object_evas_get(obj));
1211
1212            if (w < 4) w = 4;
1213            if (h < 2) h = 2;
1214
1215            /* Here we really need to have the help of the window manager, this code will change when we update E17. */
1216            parent = (Ecore_X_Window) ecore_evas_window_get(ee);
1217            fprintf(stderr, "parent: %x\n", parent);
1218
1219            win = ecore_x_window_new(0, x, y, w, h);
1220            fprintf(stderr, "creating window: %x [%i, %i, %i, %i]\n", win, x, y, w, h);
1221            if (win)
1222              {
1223                Ecore_X_Window_State state[] = { ECORE_X_WINDOW_STATE_SKIP_TASKBAR, ECORE_X_WINDOW_STATE_SKIP_PAGER };
1224
1225                ecore_x_netwm_window_state_set(win, state, 2);
1226                ecore_x_window_hide(win);
1227                xvsink = gst_element_factory_make("xvimagesink", NULL);
1228                if (xvsink)
1229                  {
1230                    unsigned int pos[2];
1231
1232 #ifdef HAVE_X_OVERLAY_SET
1233                    gst_x_overlay_set_window_handle(GST_X_OVERLAY(xvsink), win);
1234 #else
1235                    gst_x_overlay_set_xwindow_id(GST_X_OVERLAY(xvsink), win);
1236 #endif
1237                    ev->win = win;
1238
1239                    ecore_x_window_prop_card32_set(win, ECORE_X_ATOM_E_VIDEO_PARENT, &parent, 1);
1240
1241                    pos[0] = x; pos[1] = y;
1242                    ecore_x_window_prop_card32_set(win, ECORE_X_ATOM_E_VIDEO_POSITION, pos, 2);
1243                  }
1244                else
1245                  {
1246                    fprintf(stderr, "destroying win: %x\n", win);
1247                    ecore_x_window_free(win);
1248                  }
1249              }
1250          }
1251        evas_render_method_list_free(engines);
1252      }
1253 #else
1254 # warning "missing: ecore_x OR xoverlay"
1255 #endif
1256
1257    esink = gst_element_factory_make("emotion-sink", "sink");
1258    if (!esink)
1259      {
1260         ERR("Unable to create 'emotion-sink' GstElement.");
1261         goto unref_pipeline;
1262      }
1263
1264    g_object_set(G_OBJECT(esink), "evas-object", obj, NULL);
1265    g_object_set(G_OBJECT(esink), "ev", ev, NULL);
1266
1267    evas_object_image_pixels_get_callback_set(obj, NULL, NULL);
1268    evas_object_event_callback_add(obj, EVAS_CALLBACK_RESIZE, _image_resize, ev);
1269
1270    /* We need queue to force each video sink to be in its own thread */
1271    queue = gst_element_factory_make("queue", "equeue");
1272    if (!queue)
1273      {
1274         ERR("Unable to create 'queue' GstElement.");
1275         goto unref_pipeline;
1276      }
1277
1278    gst_bin_add_many(GST_BIN(bin), tee, queue, esink, NULL);
1279    gst_element_link_many(queue, esink, NULL);
1280
1281    /* link both sink to GstTee */
1282    pad = gst_element_get_pad(queue, "sink");
1283    teepad = gst_element_get_request_pad(tee, "src%d");
1284    gst_pad_link(teepad, pad);
1285    gst_object_unref(pad);
1286
1287    ev->eteepad = teepad;
1288
1289    if (xvsink)
1290      {
1291         GstElement *fakeeos;
1292
1293         queue = gst_element_factory_make("queue", "xvqueue");
1294         fakeeos = GST_ELEMENT(GST_BIN(g_object_new(GST_TYPE_FAKEEOS_BIN, "name", "eosbin", NULL)));
1295         if (queue && fakeeos)
1296           {
1297              GstPad *queue_pad;
1298
1299              gst_bin_add_many(GST_BIN(bin), fakeeos, NULL);
1300
1301              gst_bin_add_many(GST_BIN(fakeeos), queue, xvsink, NULL);
1302              gst_element_link_many(queue, xvsink, NULL);
1303              queue_pad = gst_element_get_pad(queue, "sink");
1304              gst_element_add_pad(fakeeos, gst_ghost_pad_new("sink", queue_pad));
1305
1306              pad = gst_element_get_pad(fakeeos, "sink");
1307              teepad = gst_element_get_request_pad(tee, "src%d");
1308              gst_pad_link(teepad, pad);
1309
1310              xvsink = fakeeos;
1311
1312              ev->xvteepad = teepad;
1313              ev->xvpad = pad;
1314           }
1315         else
1316           {
1317              if (fakeeos) gst_object_unref(fakeeos);
1318              if (queue) gst_object_unref(queue);
1319              gst_object_unref(xvsink);
1320              xvsink = NULL;
1321           }
1322      }
1323
1324    teepad = gst_element_get_pad(tee, "sink");
1325    gst_element_add_pad(bin, gst_ghost_pad_new("sink", teepad));
1326    gst_object_unref(teepad);
1327
1328 #define GST_PLAY_FLAG_NATIVE_VIDEO  (1 << 6)
1329 #define GST_PLAY_FLAG_DOWNLOAD      (1 << 7)
1330 #define GST_PLAY_FLAG_AUDIO         (1 << 1)
1331 #define GST_PLAY_FLAG_NATIVE_AUDIO  (1 << 5)
1332
1333    if (launch)
1334      {
1335         g_object_set(G_OBJECT(playbin), "sink", bin, NULL);
1336      }
1337    else
1338      {
1339         g_object_get(G_OBJECT(playbin), "flags", &flags, NULL);
1340         g_object_set(G_OBJECT(playbin), "flags", flags | GST_PLAY_FLAG_NATIVE_VIDEO | GST_PLAY_FLAG_DOWNLOAD | GST_PLAY_FLAG_NATIVE_AUDIO, NULL);
1341         g_object_set(G_OBJECT(playbin), "video-sink", bin, NULL);
1342         g_object_set(G_OBJECT(playbin), "uri", uri, NULL);
1343      }
1344
1345    evas_object_image_pixels_get_callback_set(obj, NULL, NULL);
1346
1347    ev->stream = EINA_TRUE;
1348
1349    if (xvsink)
1350      {
1351         Evas_Video_Surface video;
1352
1353         video.version = EVAS_VIDEO_SURFACE_VERSION;
1354         video.data = ev;
1355         video.parent = NULL;
1356         video.move = _video_move;
1357         video.resize = _video_resize;
1358         video.show = _video_show;
1359         video.hide = _video_hide;
1360         video.update_pixels = _video_update_pixels;
1361
1362         evas_object_image_video_surface_set(obj, &video);
1363         ev->stream = EINA_FALSE;
1364      }
1365
1366    eina_stringshare_replace(&ev->uri, uri);
1367    ev->pipeline = playbin;
1368    ev->sink = bin;
1369    ev->esink = esink;
1370    ev->xvsink = xvsink;
1371    ev->tee = tee;
1372    ev->threads = eina_list_append(ev->threads,
1373                                   ecore_thread_run(_emotion_gstreamer_pause,
1374                                                    _emotion_gstreamer_end,
1375                                                    _emotion_gstreamer_cancel,
1376                                                    ev));
1377
1378    /** NOTE: you need to set: GST_DEBUG_DUMP_DOT_DIR=/tmp EMOTION_ENGINE=gstreamer to save the $EMOTION_GSTREAMER_DOT file in '/tmp' */
1379    /** then call dot -Tpng -oemotion_pipeline.png /tmp/$TIMESTAMP-$EMOTION_GSTREAMER_DOT.dot */
1380    if (getenv("EMOTION_GSTREAMER_DOT")) GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS(GST_BIN(playbin), GST_DEBUG_GRAPH_SHOW_ALL, getenv("EMOTION_GSTREAMER_DOT"));
1381
1382    return playbin;
1383
1384  unref_pipeline:
1385    gst_object_unref(xvsink);
1386    gst_object_unref(esink);
1387    gst_object_unref(tee);
1388    gst_object_unref(bin);
1389    gst_object_unref(playbin);
1390    return NULL;
1391 }