433d8dca508adc9d02f1c59c18d457b764ee2a3d
[profile/ivi/emotion.git] / src / modules / gstreamer / emotion_sink.c
1 #ifdef HAVE_CONFIG_H
2 # include "config.h"
3 #endif
4
5 #include <Eina.h>
6 #include <Evas.h>
7 #include <Ecore.h>
8
9 #define HTTP_STREAM 0
10 #define RTSP_STREAM 1
11 #include <glib.h>
12 #include <gst/gst.h>
13 #include <glib-object.h>
14 #include <gst/video/gstvideosink.h>
15 #include <gst/video/video.h>
16
17 #ifdef HAVE_ECORE_X
18 # include <Ecore_X.h>
19 # include <Ecore_Evas.h>
20 # ifdef HAVE_XOVERLAY_H
21 #  include <gst/interfaces/xoverlay.h>
22 # endif
23 #endif
24
25 #include "Emotion.h"
26 #include "emotion_private.h"
27 #include "emotion_gstreamer.h"
28
29 static GstStaticPadTemplate sinktemplate = GST_STATIC_PAD_TEMPLATE("sink",
30                                                                    GST_PAD_SINK, GST_PAD_ALWAYS,
31                                                                    GST_STATIC_CAPS(GST_VIDEO_CAPS_YUV("{ I420, YV12, YUY2, NV12, ST12, TM12 }") ";"
32                                                                                    GST_VIDEO_CAPS_BGRx ";" GST_VIDEO_CAPS_BGR ";" GST_VIDEO_CAPS_BGRA));
33
34 GST_DEBUG_CATEGORY_STATIC(evas_video_sink_debug);
35 #define GST_CAT_DEFAULT evas_video_sink_debug
36
37 enum {
38   REPAINT_REQUESTED,
39   LAST_SIGNAL
40 };
41
42 enum {
43   PROP_0,
44   PROP_EVAS_OBJECT,
45   PROP_WIDTH,
46   PROP_HEIGHT,
47   PROP_EV,
48   PROP_LAST
49 };
50
51 static guint evas_video_sink_signals[LAST_SIGNAL] = { 0, };
52
53 #define _do_init(bla)                                   \
54   GST_DEBUG_CATEGORY_INIT(evas_video_sink_debug,        \
55                           "emotion-sink",               \
56                           0,                            \
57                           "emotion video sink")
58
59 GST_BOILERPLATE_FULL(EvasVideoSink,
60                      evas_video_sink,
61                      GstVideoSink,
62                      GST_TYPE_VIDEO_SINK,
63                      _do_init);
64
65
66 static void unlock_buffer_mutex(EvasVideoSinkPrivate* priv);
67 static void evas_video_sink_main_render(void *data);
68 static void evas_video_sink_samsung_main_render(void *data);
69
70 static void
71 evas_video_sink_base_init(gpointer g_class)
72 {
73    GstElementClass* element_class;
74
75    element_class = GST_ELEMENT_CLASS(g_class);
76    gst_element_class_add_pad_template(element_class, gst_static_pad_template_get(&sinktemplate));
77    gst_element_class_set_details_simple(element_class, "Evas video sink",
78                                         "Sink/Video", "Sends video data from a GStreamer pipeline to an Evas object",
79                                         "Vincent Torri <vtorri@univ-evry.fr>");
80 }
81
82 static void
83 evas_video_sink_init(EvasVideoSink* sink, EvasVideoSinkClass* klass __UNUSED__)
84 {
85    EvasVideoSinkPrivate* priv;
86
87    INF("sink init");
88    sink->priv = priv = G_TYPE_INSTANCE_GET_PRIVATE(sink, EVAS_TYPE_VIDEO_SINK, EvasVideoSinkPrivate);
89    priv->o = NULL;
90    priv->width = 0;
91    priv->height = 0;
92    priv->func = NULL;
93    priv->eformat = EVAS_COLORSPACE_ARGB8888;
94    priv->samsung = EINA_FALSE;
95    eina_lock_new(&priv->m);
96    eina_condition_new(&priv->c, &priv->m);
97    priv->unlocked = EINA_FALSE;
98 }
99
100 /**** Object methods ****/
101 static void
102 _cleanup_priv(void *data, Evas *e __UNUSED__, Evas_Object *obj, void *event_info __UNUSED__)
103 {
104    EvasVideoSinkPrivate* priv;
105
106    priv = data;
107
108    eina_lock_take(&priv->m);
109    if (priv->o == obj)
110      priv->o = NULL;
111    eina_lock_release(&priv->m);
112 }
113
114 static void
115 evas_video_sink_set_property(GObject * object, guint prop_id,
116                              const GValue * value, GParamSpec * pspec)
117 {
118    EvasVideoSink* sink;
119    EvasVideoSinkPrivate* priv;
120
121    sink = EVAS_VIDEO_SINK (object);
122    priv = sink->priv;
123
124    switch (prop_id) {
125     case PROP_EVAS_OBJECT:
126        eina_lock_take(&priv->m);
127        evas_object_event_callback_del(priv->o, EVAS_CALLBACK_FREE, _cleanup_priv);
128        priv->o = g_value_get_pointer (value);
129        INF("sink set Evas_Object %p.", priv->o);
130        evas_object_event_callback_add(priv->o, EVAS_CALLBACK_FREE, _cleanup_priv, priv);
131        eina_lock_release(&priv->m);
132        break;
133     case PROP_EV:
134        INF("sink set ev.");
135        eina_lock_take(&priv->m);
136        priv->ev = g_value_get_pointer (value);
137        if (priv->ev)
138          priv->ev->samsung = EINA_TRUE;
139        eina_lock_release(&priv->m);
140        break;
141     default:
142        G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
143        ERR("invalid property");
144        break;
145    }
146 }
147
148 static void
149 evas_video_sink_get_property(GObject * object, guint prop_id,
150                              GValue * value, GParamSpec * pspec)
151 {
152    EvasVideoSink* sink;
153    EvasVideoSinkPrivate* priv;
154
155    sink = EVAS_VIDEO_SINK (object);
156    priv = sink->priv;
157
158    switch (prop_id) {
159     case PROP_EVAS_OBJECT:
160        INF("sink get property.");
161        eina_lock_take(&priv->m);
162        g_value_set_pointer(value, priv->o);
163        eina_lock_release(&priv->m);
164        break;
165     case PROP_WIDTH:
166        INF("sink get width.");
167        eina_lock_take(&priv->m);
168        g_value_set_int(value, priv->width);
169        eina_lock_release(&priv->m);
170        break;
171     case PROP_HEIGHT:
172        INF("sink get height.");
173        eina_lock_take(&priv->m);
174        g_value_set_int (value, priv->height);
175        eina_lock_release(&priv->m);
176        break;
177     case PROP_EV:
178        INF("sink get ev.");
179        eina_lock_take(&priv->m);
180        g_value_set_pointer (value, priv->ev);
181        eina_lock_release(&priv->m);
182        break;
183     default:
184        G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
185        ERR("invalide property");
186        break;
187    }
188 }
189
190 static void
191 evas_video_sink_dispose(GObject* object)
192 {
193    EvasVideoSink* sink;
194    EvasVideoSinkPrivate* priv;
195
196    INF("dispose.");
197
198    sink = EVAS_VIDEO_SINK(object);
199    priv = sink->priv;
200
201    eina_lock_free(&priv->m);
202    eina_condition_free(&priv->c);
203
204    G_OBJECT_CLASS(parent_class)->dispose(object);
205 }
206
207
208 /**** BaseSink methods ****/
209
210 gboolean evas_video_sink_set_caps(GstBaseSink *bsink, GstCaps *caps)
211 {
212    EvasVideoSink* sink;
213    EvasVideoSinkPrivate* priv;
214    GstStructure *structure;
215    GstVideoFormat format;
216    guint32 fourcc;
217    unsigned int i;
218
219    sink = EVAS_VIDEO_SINK(bsink);
220    priv = sink->priv;
221
222    structure = gst_caps_get_structure(caps, 0);
223
224    if (gst_structure_get_int(structure, "width", (int*) &priv->width)
225        && gst_structure_get_int(structure, "height", (int*) &priv->height)
226        && gst_structure_get_fourcc(structure, "format", &fourcc))
227      {
228         priv->source_height = priv->height;
229
230         for (i = 0; colorspace_fourcc_convertion[i].name != NULL; ++i)
231           if (fourcc == colorspace_fourcc_convertion[i].fourcc)
232             {
233                fprintf(stderr, "Found '%s'\n", colorspace_fourcc_convertion[i].name);
234                priv->eformat = colorspace_fourcc_convertion[i].eformat;
235                priv->func = colorspace_fourcc_convertion[i].func;
236                if (colorspace_fourcc_convertion[i].force_height)
237                  {
238                     priv->height = (priv->height >> 1) << 1;
239                  }
240                if (priv->ev)
241                  priv->ev->kill_buffer = EINA_TRUE;
242                return TRUE;
243             }
244
245         if (fourcc == GST_MAKE_FOURCC('S', 'T', '1', '2'))
246           {
247              fprintf(stderr, "Found '%s'\n", "ST12");
248              priv->eformat = EVAS_COLORSPACE_YCBCR420TM12601_PL;
249              priv->samsung = EINA_TRUE;
250              priv->func = NULL;
251              if (priv->ev)
252                {
253                   priv->ev->samsung = EINA_TRUE;
254                   priv->ev->kill_buffer = EINA_TRUE;
255                }
256              return TRUE;
257           }
258      }
259
260    INF("fallback code !");
261    if (!gst_video_format_parse_caps(caps, &format, (int*) &priv->width, (int*) &priv->height))
262      {
263         ERR("Unable to parse caps.");
264         return FALSE;
265      }
266
267    priv->source_height = priv->height;
268
269    for (i = 0; colorspace_format_convertion[i].name != NULL; ++i)
270      if (format == colorspace_format_convertion[i].format)
271        {
272           fprintf(stderr, "Found '%s'\n", colorspace_format_convertion[i].name);
273           priv->eformat = colorspace_format_convertion[i].eformat;
274           priv->func = colorspace_format_convertion[i].func;
275           if (priv->ev)
276             priv->ev->kill_buffer = EINA_FALSE;
277           return TRUE;
278        }
279
280    ERR("unsupported : %d\n", format);
281    return FALSE;
282 }
283
284 static gboolean
285 evas_video_sink_start(GstBaseSink* base_sink)
286 {
287    EvasVideoSinkPrivate* priv;
288    gboolean res = TRUE;
289
290    INF("sink start");
291
292    priv = EVAS_VIDEO_SINK(base_sink)->priv;
293    eina_lock_take(&priv->m);
294    if (!priv->o)
295      res = FALSE;
296    else
297      priv->unlocked = EINA_FALSE;
298    eina_lock_release(&priv->m);
299    return res;
300 }
301
302 static gboolean
303 evas_video_sink_stop(GstBaseSink* base_sink)
304 {
305    EvasVideoSinkPrivate* priv = EVAS_VIDEO_SINK(base_sink)->priv;
306
307    INF("sink stop");
308
309    unlock_buffer_mutex(priv);
310    return TRUE;
311 }
312
313 static gboolean
314 evas_video_sink_unlock(GstBaseSink* object)
315 {
316    EvasVideoSink* sink;
317
318    INF("sink unlock");
319
320    sink = EVAS_VIDEO_SINK(object);
321
322    unlock_buffer_mutex(sink->priv);
323
324    return GST_CALL_PARENT_WITH_DEFAULT(GST_BASE_SINK_CLASS, unlock,
325                                        (object), TRUE);
326 }
327
328 static gboolean
329 evas_video_sink_unlock_stop(GstBaseSink* object)
330 {
331    EvasVideoSink* sink;
332    EvasVideoSinkPrivate* priv;
333
334    sink = EVAS_VIDEO_SINK(object);
335    priv = sink->priv;
336
337    INF("sink unlock stop");
338
339    eina_lock_take(&priv->m);
340    priv->unlocked = FALSE;
341    eina_lock_release(&priv->m);
342
343    return GST_CALL_PARENT_WITH_DEFAULT(GST_BASE_SINK_CLASS, unlock_stop,
344                                        (object), TRUE);
345 }
346
347 static GstFlowReturn
348 evas_video_sink_preroll(GstBaseSink* bsink, GstBuffer* buffer)
349 {
350    Emotion_Gstreamer_Buffer *send;
351    EvasVideoSinkPrivate *priv;
352    EvasVideoSink *sink;
353
354    INF("sink preroll %p [%i]", GST_BUFFER_DATA(buffer), GST_BUFFER_SIZE(buffer));
355
356    sink = EVAS_VIDEO_SINK(bsink);
357    priv = sink->priv;
358
359    if (GST_BUFFER_SIZE(buffer) <= 0 && !priv->samsung)
360      {
361         WRN("empty buffer");
362         return GST_FLOW_OK;
363      }
364
365    send = emotion_gstreamer_buffer_alloc(priv, buffer, EINA_TRUE);
366
367    if (send)
368      {
369         if (priv->samsung)
370           {
371              if (!priv->func)
372                {
373                   GstStructure *structure;
374                   GstCaps *caps;
375                   gboolean is_multiplane = FALSE;
376
377                   caps = GST_BUFFER_CAPS(buffer);
378                   structure = gst_caps_get_structure (caps, 0);
379                   gst_structure_get_boolean(structure, "multiplane", &is_multiplane);
380                   gst_caps_unref(caps);
381
382                   if (is_multiplane)
383                     priv->func = _evas_video_st12_multiplane;
384                   else
385                     priv->func = _evas_video_st12;
386                }
387
388              ecore_main_loop_thread_safe_call_async(evas_video_sink_samsung_main_render, send);
389           }
390         else
391           ecore_main_loop_thread_safe_call_async(evas_video_sink_main_render, send);
392      }
393
394    return GST_FLOW_OK;
395 }
396
397 static GstFlowReturn
398 evas_video_sink_render(GstBaseSink* bsink, GstBuffer* buffer)
399 {
400    Emotion_Gstreamer_Buffer *send;
401    EvasVideoSinkPrivate *priv;
402    EvasVideoSink *sink;
403
404    INF("sink render %p", buffer);
405
406    sink = EVAS_VIDEO_SINK(bsink);
407    priv = sink->priv;
408
409    eina_lock_take(&priv->m);
410
411    if (priv->unlocked) {
412       ERR("LOCKED");
413       eina_lock_release(&priv->m);
414       return GST_FLOW_OK;
415    }
416
417    send = emotion_gstreamer_buffer_alloc(priv, buffer, EINA_FALSE);
418    if (!send) {
419       eina_lock_release(&priv->m);
420       return GST_FLOW_ERROR;
421    }
422
423    if (priv->samsung)
424      {
425         if (!priv->func)
426           {
427              GstStructure *structure;
428              GstCaps *caps;
429              gboolean is_multiplane = FALSE;
430
431              caps = GST_BUFFER_CAPS(buffer);
432              structure = gst_caps_get_structure (caps, 0);
433              gst_structure_get_boolean(structure, "multiplane", &is_multiplane);
434              gst_caps_unref(caps);
435
436              if (is_multiplane)
437                priv->func = _evas_video_st12_multiplane;
438              else
439                priv->func = _evas_video_st12;
440           }
441
442         ecore_main_loop_thread_safe_call_async(evas_video_sink_samsung_main_render, send);
443      }
444    else
445      ecore_main_loop_thread_safe_call_async(evas_video_sink_main_render, send);
446
447    eina_condition_wait(&priv->c);
448    eina_lock_release(&priv->m);
449
450    return GST_FLOW_OK;
451 }
452
453 static void
454 _update_emotion_fps(Emotion_Gstreamer_Video *ev)
455 {
456    double tim;
457
458    if (!debug_fps) return ;
459
460    tim = ecore_time_get();
461    ev->frames++;
462
463    if (ev->rlapse == 0.0)
464      {
465         ev->rlapse = tim;
466         ev->flapse = ev->frames;
467      }
468    else if ((tim - ev->rlapse) >= 0.5)
469      {
470         printf("FRAME: %i, FPS: %3.1f\n",
471                ev->frames,
472                (ev->frames - ev->flapse) / (tim - ev->rlapse));
473         ev->rlapse = tim;
474         ev->flapse = ev->frames;
475      }
476 }
477
478 static void
479 evas_video_sink_samsung_main_render(void *data)
480 {
481    Emotion_Gstreamer_Buffer *send;
482    Emotion_Video_Stream *vstream;
483    EvasVideoSinkPrivate *priv = NULL;
484    GstBuffer* buffer;
485    unsigned char *evas_data;
486    const guint8 *gst_data;
487    GstFormat fmt = GST_FORMAT_TIME;
488    gint64 pos;
489    Eina_Bool preroll = EINA_FALSE;
490    int stride, elevation;
491    Evas_Coord w, h;
492
493    send = data;
494
495    if (!send) goto exit_point;
496
497    priv = send->sink;
498    buffer = send->frame;
499    preroll = send->preroll;
500
501    /* frame after cleanup */
502    if (!preroll && !send->ev->last_buffer)
503      {
504         priv = NULL;
505         goto exit_point;
506      }
507
508    if (!priv || !priv->o || priv->unlocked)
509      goto exit_point;
510
511    if (send->ev->send)
512      {
513         emotion_gstreamer_buffer_free(send->ev->send);
514         send->ev->send = NULL;
515      }
516
517    if (!send->ev->stream && !send->force)
518      {
519         send->ev->send = send;
520         _emotion_frame_new(send->ev->obj);
521         goto exit_stream;
522      }
523
524    _emotion_gstreamer_video_pipeline_parse(send->ev, EINA_TRUE);
525
526    /* Getting stride to compute the right size and then fill the object properly */
527    /* Y => [0] and UV in [1] */
528    if (priv->func == _evas_video_st12_multiplane)
529      {
530         const GstMultiPlaneImageBuffer *mp_buf = (const GstMultiPlaneImageBuffer *) buffer;
531
532         stride = mp_buf->stride[0];
533         elevation = mp_buf->elevation[0];
534         priv->width = mp_buf->width[0];
535         priv->height = mp_buf->height[0];
536
537         gst_data = (const guint8 *) mp_buf;
538      }
539    else
540      {
541         const SCMN_IMGB *imgb = (const SCMN_IMGB *) GST_BUFFER_MALLOCDATA(buffer);
542
543         stride = imgb->stride[0];
544         elevation = imgb->elevation[0];
545         priv->width = imgb->width[0];
546         priv->height = imgb->height[0];
547
548         gst_data = (const guint8 *) imgb;
549      }
550
551    evas_object_geometry_get(priv->o, NULL, NULL, &w, &h);
552
553    send->ev->fill.width = (double) stride / priv->width;
554    send->ev->fill.height = (double) elevation / priv->height;
555
556    evas_object_image_alpha_set(priv->o, 0);
557    evas_object_image_colorspace_set(priv->o, priv->eformat);
558    evas_object_image_size_set(priv->o, stride, elevation);
559
560    _update_emotion_fps(send->ev);
561
562    evas_data = evas_object_image_data_get(priv->o, 1);
563
564    if (priv->func)
565      priv->func(evas_data, gst_data, stride, elevation, elevation);
566    else
567      WRN("No way to decode %x colorspace !", priv->eformat);
568
569    evas_object_image_data_set(priv->o, evas_data);
570    evas_object_image_data_update_add(priv->o, 0, 0, priv->width, priv->height);
571    evas_object_image_pixels_dirty_set(priv->o, 0);
572
573    if (!preroll && send->ev->play_started)
574      {
575         _emotion_playback_started(send->ev->obj);
576         send->ev->play_started = 0;
577      }
578
579    if (!send->force)
580      {
581         _emotion_frame_new(send->ev->obj);
582      }
583
584    vstream = eina_list_nth(send->ev->video_streams, send->ev->video_stream_nbr - 1);
585
586    gst_element_query_position(send->ev->pipeline, &fmt, &pos);
587    send->ev->position = (double)pos / (double)GST_SECOND;
588
589    if (vstream)
590      {
591         vstream->width = priv->width;
592         vstream->height = priv->height;
593
594         _emotion_video_pos_update(send->ev->obj, send->ev->position, vstream->length_time);
595      }
596
597    send->ev->ratio = (double) priv->width / (double) priv->height;
598    _emotion_frame_refill(send->ev->obj, send->ev->fill.width, send->ev->fill.height);
599    _emotion_frame_resize(send->ev->obj, priv->width, priv->height, send->ev->ratio);
600
601    buffer = gst_buffer_ref(buffer);
602    if (send->ev->last_buffer) gst_buffer_unref(send->ev->last_buffer);
603    send->ev->last_buffer = buffer;
604
605  exit_point:
606    emotion_gstreamer_buffer_free(send);
607
608  exit_stream:
609    if (priv)
610      {
611         if (preroll || !priv->o) return;
612         
613         if (!priv->unlocked)
614           eina_condition_signal(&priv->c);
615      }
616 }
617
618 static void
619 evas_video_sink_main_render(void *data)
620 {
621    Emotion_Gstreamer_Buffer *send;
622    Emotion_Gstreamer_Video *ev = NULL;
623    Emotion_Video_Stream *vstream;
624    EvasVideoSinkPrivate *priv = NULL;
625    GstBuffer *buffer;
626    unsigned char *evas_data;
627    GstFormat fmt = GST_FORMAT_TIME;
628    gint64 pos;
629    Eina_Bool preroll = EINA_FALSE;
630
631    send = data;
632
633    if (!send) goto exit_point;
634
635    priv = send->sink;
636    buffer = send->frame;
637    preroll = send->preroll;
638    ev = send->ev;
639
640    /* frame after cleanup */
641    if (!preroll && !ev->last_buffer)
642      {
643         priv = NULL;
644         goto exit_point;
645      }
646
647    if (!priv || !priv->o || priv->unlocked)
648      goto exit_point;
649
650    if (ev->send && send != ev->send)
651      {
652         emotion_gstreamer_buffer_free(ev->send);
653         ev->send = NULL;
654      }
655
656    if (!ev->stream && !send->force)
657      {
658         ev->send = send;
659         _emotion_frame_new(ev->obj);
660         evas_object_image_data_update_add(priv->o, 0, 0, priv->width, priv->height);
661         goto exit_stream;
662      }
663
664    _emotion_gstreamer_video_pipeline_parse(ev, EINA_TRUE);
665
666    INF("sink main render [%i, %i] (source height: %i)", priv->width, priv->height, priv->source_height);
667
668    evas_object_image_alpha_set(priv->o, 0);
669    evas_object_image_colorspace_set(priv->o, priv->eformat);
670    evas_object_image_size_set(priv->o, priv->width, priv->height);
671
672    evas_data = evas_object_image_data_get(priv->o, 1);
673
674    if (priv->func)
675      priv->func(evas_data, GST_BUFFER_DATA(buffer), priv->width, priv->source_height, priv->height);
676    else
677      WRN("No way to decode %x colorspace !", priv->eformat);
678
679    evas_object_image_data_set(priv->o, evas_data);
680    evas_object_image_data_update_add(priv->o, 0, 0, priv->width, priv->height);
681    evas_object_image_pixels_dirty_set(priv->o, 0);
682
683    _update_emotion_fps(ev);
684
685    if (!preroll && ev->play_started)
686      {
687         _emotion_playback_started(ev->obj);
688         ev->play_started = 0;
689      }
690
691    if (!send->force)
692      {
693         _emotion_frame_new(ev->obj);
694      }
695
696    gst_element_query_position(ev->pipeline, &fmt, &pos);
697    ev->position = (double)pos / (double)GST_SECOND;
698
699    vstream = eina_list_nth(ev->video_streams, ev->video_stream_nbr - 1);
700
701    if (vstream)
702      {
703        vstream->width = priv->width;
704        vstream->height = priv->height;
705        _emotion_video_pos_update(ev->obj, ev->position, vstream->length_time);
706      }
707
708    ev->ratio = (double) priv->width / (double) priv->height;
709
710    _emotion_frame_resize(ev->obj, priv->width, priv->height, ev->ratio);
711
712    buffer = gst_buffer_ref(buffer);
713    if (ev->last_buffer) gst_buffer_unref(ev->last_buffer);
714    ev->last_buffer = buffer;
715
716  exit_point:
717    emotion_gstreamer_buffer_free(send);
718
719  exit_stream:
720    if (priv)
721      {
722         if (preroll || !priv->o) return;
723         
724         if (!priv->unlocked)
725           eina_condition_signal(&priv->c);
726      }
727 }
728
729 static void
730 unlock_buffer_mutex(EvasVideoSinkPrivate* priv)
731 {
732    priv->unlocked = EINA_TRUE;
733
734    eina_condition_signal(&priv->c);
735 }
736
737 static void
738 marshal_VOID__MINIOBJECT(GClosure * closure, GValue * return_value __UNUSED__,
739                          guint n_param_values, const GValue * param_values,
740                          gpointer invocation_hint __UNUSED__, gpointer marshal_data)
741 {
742    typedef void (*marshalfunc_VOID__MINIOBJECT) (gpointer obj, gpointer arg1, gpointer data2);
743    marshalfunc_VOID__MINIOBJECT callback;
744    GCClosure *cc;
745    gpointer data1, data2;
746
747    cc = (GCClosure *) closure;
748
749    g_return_if_fail(n_param_values == 2);
750
751    if (G_CCLOSURE_SWAP_DATA(closure)) {
752       data1 = closure->data;
753       data2 = g_value_peek_pointer(param_values + 0);
754    } else {
755       data1 = g_value_peek_pointer(param_values + 0);
756       data2 = closure->data;
757    }
758    callback = (marshalfunc_VOID__MINIOBJECT) (marshal_data ? marshal_data : cc->callback);
759
760    callback(data1, gst_value_get_mini_object(param_values + 1), data2);
761 }
762
763 static void
764 evas_video_sink_class_init(EvasVideoSinkClass* klass)
765 {
766    GObjectClass* gobject_class;
767    GstBaseSinkClass* gstbase_sink_class;
768
769    gobject_class = G_OBJECT_CLASS(klass);
770    gstbase_sink_class = GST_BASE_SINK_CLASS(klass);
771
772    g_type_class_add_private(klass, sizeof(EvasVideoSinkPrivate));
773
774    gobject_class->set_property = evas_video_sink_set_property;
775    gobject_class->get_property = evas_video_sink_get_property;
776
777    g_object_class_install_property (gobject_class, PROP_EVAS_OBJECT,
778                                     g_param_spec_pointer ("evas-object", "Evas Object",
779                                                           "The Evas object where the display of the video will be done",
780                                                           G_PARAM_READWRITE));
781
782    g_object_class_install_property (gobject_class, PROP_WIDTH,
783                                     g_param_spec_int ("width", "Width",
784                                                       "The width of the video",
785                                                       0, 65536, 0, G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
786
787    g_object_class_install_property (gobject_class, PROP_HEIGHT,
788                                     g_param_spec_int ("height", "Height",
789                                                       "The height of the video",
790                                                       0, 65536, 0, G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
791    g_object_class_install_property (gobject_class, PROP_EV,
792                                     g_param_spec_pointer ("ev", "Emotion_Gstreamer_Video",
793                                                           "THe internal data of the emotion object",
794                                                           G_PARAM_READWRITE));
795
796    gobject_class->dispose = evas_video_sink_dispose;
797
798    gstbase_sink_class->set_caps = evas_video_sink_set_caps;
799    gstbase_sink_class->stop = evas_video_sink_stop;
800    gstbase_sink_class->start = evas_video_sink_start;
801    gstbase_sink_class->unlock = evas_video_sink_unlock;
802    gstbase_sink_class->unlock_stop = evas_video_sink_unlock_stop;
803    gstbase_sink_class->render = evas_video_sink_render;
804    gstbase_sink_class->preroll = evas_video_sink_preroll;
805
806    evas_video_sink_signals[REPAINT_REQUESTED] = g_signal_new("repaint-requested",
807                                                              G_TYPE_FROM_CLASS(klass),
808                                                              (GSignalFlags)(G_SIGNAL_RUN_LAST | G_SIGNAL_ACTION),
809                                                              0,
810                                                              0,
811                                                              0,
812                                                              marshal_VOID__MINIOBJECT,
813                                                              G_TYPE_NONE, 1, GST_TYPE_BUFFER);
814 }
815
816 gboolean
817 gstreamer_plugin_init (GstPlugin * plugin)
818 {
819    return gst_element_register (plugin,
820                                 "emotion-sink",
821                                 GST_RANK_NONE,
822                                 EVAS_TYPE_VIDEO_SINK);
823 }
824
825 static void
826 _emotion_gstreamer_pause(void *data, Ecore_Thread *thread)
827 {
828    Emotion_Gstreamer_Video *ev = data;
829    gboolean res;
830
831    if (ecore_thread_check(thread) || !ev->pipeline) return ;
832
833    gst_element_set_state(ev->pipeline, GST_STATE_PAUSED);
834    res = gst_element_get_state(ev->pipeline, NULL, NULL, GST_CLOCK_TIME_NONE);
835    if (res == GST_STATE_CHANGE_NO_PREROLL)
836      {
837         gst_element_set_state(ev->pipeline, GST_STATE_PLAYING);
838         gst_element_get_state(ev->pipeline, NULL, NULL, GST_CLOCK_TIME_NONE);
839      }
840 }
841
842 static void
843 _emotion_gstreamer_cancel(void *data, Ecore_Thread *thread)
844 {
845    Emotion_Gstreamer_Video *ev = data;
846
847    ev->threads = eina_list_remove(ev->threads, thread);
848
849    if (getenv("EMOTION_GSTREAMER_DOT")) GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS(GST_BIN(ev->pipeline), GST_DEBUG_GRAPH_SHOW_ALL, getenv("EMOTION_GSTREAMER_DOT"));
850
851    if (ev->in == ev->out && ev->delete_me)
852      em_shutdown(ev);
853 }
854
855 static void
856 _emotion_gstreamer_end(void *data, Ecore_Thread *thread)
857 {
858    Emotion_Gstreamer_Video *ev = data;
859
860    ev->threads = eina_list_remove(ev->threads, thread);
861
862    if (ev->play)
863      {
864         gst_element_set_state(ev->pipeline, GST_STATE_PLAYING);
865         ev->play_started = 1;
866      }
867
868    if (getenv("EMOTION_GSTREAMER_DOT")) GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS(GST_BIN(ev->pipeline), GST_DEBUG_GRAPH_SHOW_ALL, getenv("EMOTION_GSTREAMER_DOT"));
869
870    if (ev->in == ev->out && ev->delete_me)
871      em_shutdown(ev);
872    else
873      _emotion_gstreamer_video_pipeline_parse(data, EINA_TRUE);
874 }
875
876 static void
877 _video_resize(void *data, Evas_Object *obj __UNUSED__, const Evas_Video_Surface *surface __UNUSED__,
878               Evas_Coord w, Evas_Coord h)
879 {
880 #ifdef HAVE_ECORE_X
881    Emotion_Gstreamer_Video *ev = data;
882
883    ecore_x_window_resize(ev->win, w, h);
884    fprintf(stderr, "resize: %i, %i\n", w, h);
885 #endif
886 }
887
888 static void
889 _video_move(void *data, Evas_Object *obj __UNUSED__, const Evas_Video_Surface *surface __UNUSED__,
890             Evas_Coord x, Evas_Coord y)
891 {
892 #ifdef HAVE_ECORE_X
893    Emotion_Gstreamer_Video *ev = data;
894    unsigned int pos[2];
895
896    fprintf(stderr, "move: %i, %i\n", x, y);
897    pos[0] = x; pos[1] = y;
898    ecore_x_window_prop_card32_set(ev->win, ECORE_X_ATOM_E_VIDEO_POSITION, pos, 2);
899 #endif
900 }
901
902 #if 0
903 /* Much better idea to always feed the XvImageSink and let him handle optimizing the rendering as we do */
904 static void
905 _block_pad_unlink_cb(GstPad *pad, gboolean blocked, gpointer user_data)
906 {
907    if (blocked)
908      {
909         Emotion_Gstreamer_Video *ev = user_data;
910         GstEvent *gev;
911
912         gst_pad_unlink(ev->xvteepad, ev->xvpad);
913         gev = gst_event_new_eos();
914         gst_pad_send_event(ev->xvpad, gev);
915         gst_pad_set_blocked_async(pad, FALSE, _block_pad_unlink_cb, NULL);
916      }
917 }
918
919 static void
920 _block_pad_link_cb(GstPad *pad, gboolean blocked, gpointer user_data)
921 {
922    if (blocked)
923      {
924         Emotion_Gstreamer_Video *ev = user_data;
925
926         gst_pad_link(ev->xvteepad, ev->xvpad);
927         if (ev->play)
928           gst_element_set_state(ev->xvsink, GST_STATE_PLAYING);
929         else
930           gst_element_set_state(ev->xvsink, GST_STATE_PAUSED);
931         gst_pad_set_blocked_async(pad, FALSE, _block_pad_link_cb, NULL);
932      }
933 }
934 #endif
935
936 static void
937 _video_show(void *data, Evas_Object *obj __UNUSED__, const Evas_Video_Surface *surface __UNUSED__)
938 {
939 #ifdef HAVE_ECORE_X
940    Emotion_Gstreamer_Video *ev = data;
941
942    fprintf(stderr, "show xv\n");
943    ecore_x_window_show(ev->win);
944 #endif
945    /* gst_pad_set_blocked_async(ev->xvteepad, TRUE, _block_pad_link_cb, ev); */
946 }
947
948 static void
949 _video_hide(void *data, Evas_Object *obj __UNUSED__, const Evas_Video_Surface *surface __UNUSED__)
950 {
951 #ifdef HAVE_ECORE_X
952    Emotion_Gstreamer_Video *ev = data;
953
954    fprintf(stderr, "hide xv\n");
955    ecore_x_window_hide(ev->win);
956 #endif
957    /* gst_pad_set_blocked_async(ev->xvteepad, TRUE, _block_pad_unlink_cb, ev); */
958 }
959
960 static void
961 _video_update_pixels(void *data, Evas_Object *obj __UNUSED__, const Evas_Video_Surface *surface __UNUSED__)
962 {
963    Emotion_Gstreamer_Video *ev = data;
964    Emotion_Gstreamer_Buffer *send;
965
966    if (!ev->send) return ;
967
968    send = ev->send;
969    send->force = EINA_TRUE;
970    ev->send = NULL;
971    evas_video_sink_main_render(send);
972 }
973
974 static void
975 _image_resize(void *data, Evas *e __UNUSED__, Evas_Object *obj, void *event_info __UNUSED__)
976 {
977    Emotion_Gstreamer_Video *ev = data;
978    Evas_Coord width, height;
979    int image_area, src_area;
980    double ratio;
981
982    GstElementFactory *cfactory = NULL;
983    GstElement *convert = NULL, *filter = NULL, *queue = NULL;
984    GstPad *pad = NULL, *teepad = NULL;
985    GstCaps *caps = NULL;
986    Eina_List *l, *engines;
987    const char *ename, *engine = NULL;
988
989    evas_object_geometry_get(obj, NULL, NULL, &width, &height);
990    image_area = width * height;
991    src_area = ev->src_width * ev->src_height;
992    ratio = (double)image_area / (double)src_area;
993
994    // when an image is much smaller than original video size,
995    // add fimcconvert element to the pipeline
996    if (ratio < 0.8 && ev->stream && !ev->convert)
997      {
998         cfactory = gst_element_factory_find("fimcconvert");
999         if (!cfactory) return;
1000
1001         convert = gst_element_factory_create(cfactory, NULL);
1002         if (!convert) return;
1003
1004         // add capsfilter to limit size and formats based on the backend
1005         filter = gst_element_factory_make("capsfilter", "fimccapsfilter");
1006         if (!filter)
1007           {
1008              gst_object_unref(convert);
1009              return;
1010           }
1011
1012         engines = evas_render_method_list();
1013         EINA_LIST_FOREACH(engines, l, ename)
1014           {
1015              if (evas_render_method_lookup(ename) ==
1016                  evas_output_method_get(evas_object_evas_get(obj)))
1017                {
1018                   engine = ename;
1019                   break;
1020                }
1021           }
1022
1023         if (strstr(engine, "software") != NULL)
1024           {
1025              caps = gst_caps_new_simple("video/x-raw-rgb",
1026                                         "width", G_TYPE_INT, width,
1027                                         "height", G_TYPE_INT, height,
1028                                         NULL);
1029           }
1030         else if (strstr(engine, "gl") != NULL)
1031           {
1032              caps = gst_caps_new_simple("video/x-raw-yuv",
1033                                         "width", G_TYPE_INT, width,
1034                                         "height", G_TYPE_INT, height,
1035                                         NULL);
1036           }
1037         g_object_set(G_OBJECT(filter), "caps", caps, NULL);
1038         gst_caps_unref(caps);
1039
1040         // add new elements to the pipeline
1041         queue = gst_bin_get_by_name(GST_BIN(ev->sink), "equeue");
1042         gst_element_unlink(ev->tee, queue);
1043         gst_element_release_request_pad(ev->tee, ev->eteepad);
1044         gst_object_unref(ev->eteepad);
1045
1046         gst_bin_add_many(GST_BIN(ev->sink), convert, filter, NULL);
1047         gst_element_link_many(ev->tee, convert, filter, queue, NULL);
1048
1049         pad = gst_element_get_pad(convert, "sink");
1050         teepad = gst_element_get_request_pad(ev->tee, "src%d");
1051         gst_pad_link(teepad, pad);
1052         gst_object_unref(pad);
1053
1054         gst_element_sync_state_with_parent(convert);
1055         gst_element_sync_state_with_parent(filter);
1056
1057         ev->eteepad = teepad;
1058         ev->convert = convert;
1059         evas_render_method_list_free(engines);
1060
1061         INF("add fimcconvert element. video size: %dx%d. emotion object size: %dx%d",
1062             ev->src_width, ev->src_height, width, height);
1063      }
1064    // set size again to the capsfilter when the image is resized
1065    else if (ev->convert)
1066      {
1067         filter = gst_bin_get_by_name(GST_BIN(ev->sink), "fimccapsfilter");
1068
1069         engines = evas_render_method_list();
1070         EINA_LIST_FOREACH(engines, l, ename)
1071           {
1072              if (evas_render_method_lookup(ename) ==
1073                  evas_output_method_get(evas_object_evas_get(obj)))
1074                {
1075                   engine = ename;
1076                   break;
1077                }
1078           }
1079
1080         if (strstr(engine, "software") != NULL)
1081           {
1082              caps = gst_caps_new_simple("video/x-raw-rgb",
1083                                         "width", G_TYPE_INT, width,
1084                                         "height", G_TYPE_INT, height,
1085                                         NULL);
1086           }
1087         else if (strstr(engine, "gl") != NULL)
1088           {
1089              caps = gst_caps_new_simple("video/x-raw-yuv",
1090                                         "width", G_TYPE_INT, width,
1091                                         "height", G_TYPE_INT, height,
1092                                         NULL);
1093           }
1094
1095         g_object_set(G_OBJECT(filter), "caps", caps, NULL);
1096         gst_caps_unref(caps);
1097         evas_render_method_list_free(engines);
1098
1099         INF("set capsfilter size again:. video size: %dx%d. emotion object size: %dx%d",
1100             ev->src_width, ev->src_height, width, height);
1101      }
1102 }
1103
1104 GstElement *
1105 gstreamer_video_sink_new(Emotion_Gstreamer_Video *ev,
1106                          Evas_Object *o,
1107                          const char *uri)
1108 {
1109    GstElement *playbin;
1110    GstElement *bin = NULL;
1111    GstElement *esink = NULL;
1112    GstElement *xvsink = NULL;
1113    GstElement *tee = NULL;
1114    GstElement *queue = NULL;
1115    Evas_Object *obj;
1116    GstPad *pad;
1117    GstPad *teepad;
1118    int flags;
1119    const char *launch;
1120 #if defined HAVE_ECORE_X && defined HAVE_XOVERLAY_H
1121    const char *engine = NULL;
1122    Eina_List *engines;
1123 #endif
1124
1125    obj = emotion_object_image_get(o);
1126    if (!obj)
1127      {
1128         ERR("Not Evas_Object specified");
1129         return NULL;
1130      }
1131
1132    if (!uri)
1133      return NULL;
1134
1135    launch = emotion_webcam_custom_get(uri);
1136    if (launch)
1137      {
1138         GError *error = NULL;
1139
1140         playbin = gst_parse_bin_from_description(launch, 1, &error);
1141         if (!playbin)
1142           {
1143              ERR("Unable to setup command : '%s' got error '%s'.", launch, error->message);
1144              g_error_free(error);
1145              return NULL;
1146           }
1147         if (error)
1148           {
1149              WRN("got recoverable error '%s' for command : '%s'.", error->message, launch);
1150              g_error_free(error);
1151           }
1152      }
1153    else
1154      {
1155         playbin = gst_element_factory_make("playbin2", "playbin");
1156         if (!playbin)
1157           {
1158              ERR("Unable to create 'playbin' GstElement.");
1159              return NULL;
1160           }
1161      }
1162
1163    bin = gst_bin_new(NULL);
1164    if (!bin)
1165      {
1166        ERR("Unable to create GstBin !");
1167        goto unref_pipeline;
1168      }
1169
1170    tee = gst_element_factory_make("tee", NULL);
1171    if (!tee)
1172      {
1173        ERR("Unable to create 'tee' GstElement.");
1174        goto unref_pipeline;
1175      }
1176
1177 #if defined HAVE_ECORE_X && defined HAVE_XOVERLAY_H
1178    if (window_manager_video)
1179      {
1180         Eina_List *l;
1181         const char *ename;
1182         
1183         engines = evas_render_method_list();
1184
1185         EINA_LIST_FOREACH(engines, l, ename)
1186           {
1187              if (evas_render_method_lookup(ename) == 
1188                  evas_output_method_get(evas_object_evas_get(obj)))
1189                {
1190                   engine = ename;
1191                   break;
1192                }
1193           }
1194
1195        if (ev->priority && engine && strstr(engine, "_x11") != NULL)
1196          {
1197            Ecore_Evas *ee;
1198            Evas_Coord x, y, w, h;
1199            Ecore_X_Window win;
1200            Ecore_X_Window parent;
1201
1202            evas_object_geometry_get(obj, &x, &y, &w, &h);
1203
1204            ee = ecore_evas_ecore_evas_get(evas_object_evas_get(obj));
1205
1206            if (w < 4) w = 4;
1207            if (h < 2) h = 2;
1208
1209            /* Here we really need to have the help of the window manager, this code will change when we update E17. */
1210            parent = (Ecore_X_Window) ecore_evas_window_get(ee);
1211            fprintf(stderr, "parent: %x\n", parent);
1212
1213            win = ecore_x_window_new(0, x, y, w, h);
1214            fprintf(stderr, "creating window: %x [%i, %i, %i, %i]\n", win, x, y, w, h);
1215            if (win)
1216              {
1217                Ecore_X_Window_State state[] = { ECORE_X_WINDOW_STATE_SKIP_TASKBAR, ECORE_X_WINDOW_STATE_SKIP_PAGER };
1218
1219                ecore_x_netwm_window_state_set(win, state, 2);
1220                ecore_x_window_hide(win);
1221                xvsink = gst_element_factory_make("xvimagesink", NULL);
1222                if (xvsink)
1223                  {
1224                    unsigned int pos[2];
1225
1226 #ifdef HAVE_X_OVERLAY_SET
1227                    gst_x_overlay_set_window_handle(GST_X_OVERLAY(xvsink), win);
1228 #else
1229                    gst_x_overlay_set_xwindow_id(GST_X_OVERLAY(xvsink), win);
1230 #endif
1231                    ev->win = win;
1232
1233                    ecore_x_window_prop_card32_set(win, ECORE_X_ATOM_E_VIDEO_PARENT, &parent, 1);
1234
1235                    pos[0] = x; pos[1] = y;
1236                    ecore_x_window_prop_card32_set(win, ECORE_X_ATOM_E_VIDEO_POSITION, pos, 2);
1237                  }
1238                else
1239                  {
1240                    fprintf(stderr, "destroying win: %x\n", win);
1241                    ecore_x_window_free(win);
1242                  }
1243              }
1244          }
1245        evas_render_method_list_free(engines);
1246      }
1247 #else
1248 # warning "missing: ecore_x OR xoverlay"
1249 #endif
1250
1251    esink = gst_element_factory_make("emotion-sink", "sink");
1252    if (!esink)
1253      {
1254         ERR("Unable to create 'emotion-sink' GstElement.");
1255         goto unref_pipeline;
1256      }
1257
1258    g_object_set(G_OBJECT(esink), "evas-object", obj, NULL);
1259    g_object_set(G_OBJECT(esink), "ev", ev, NULL);
1260
1261    evas_object_image_pixels_get_callback_set(obj, NULL, NULL);
1262    evas_object_event_callback_add(obj, EVAS_CALLBACK_RESIZE, _image_resize, ev);
1263
1264    /* We need queue to force each video sink to be in its own thread */
1265    queue = gst_element_factory_make("queue", "equeue");
1266    if (!queue)
1267      {
1268         ERR("Unable to create 'queue' GstElement.");
1269         goto unref_pipeline;
1270      }
1271
1272    gst_bin_add_many(GST_BIN(bin), tee, queue, esink, NULL);
1273    gst_element_link_many(queue, esink, NULL);
1274
1275    /* link both sink to GstTee */
1276    pad = gst_element_get_pad(queue, "sink");
1277    teepad = gst_element_get_request_pad(tee, "src%d");
1278    gst_pad_link(teepad, pad);
1279    gst_object_unref(pad);
1280
1281    ev->eteepad = teepad;
1282
1283    if (xvsink)
1284      {
1285         GstElement *fakeeos;
1286
1287         queue = gst_element_factory_make("queue", "xvqueue");
1288         fakeeos = GST_ELEMENT(GST_BIN(g_object_new(GST_TYPE_FAKEEOS_BIN, "name", "eosbin", NULL)));
1289         if (queue && fakeeos)
1290           {
1291              GstPad *queue_pad;
1292
1293              gst_bin_add_many(GST_BIN(bin), fakeeos, NULL);
1294
1295              gst_bin_add_many(GST_BIN(fakeeos), queue, xvsink, NULL);
1296              gst_element_link_many(queue, xvsink, NULL);
1297              queue_pad = gst_element_get_pad(queue, "sink");
1298              gst_element_add_pad(fakeeos, gst_ghost_pad_new("sink", queue_pad));
1299
1300              pad = gst_element_get_pad(fakeeos, "sink");
1301              teepad = gst_element_get_request_pad(tee, "src%d");
1302              gst_pad_link(teepad, pad);
1303
1304              xvsink = fakeeos;
1305
1306              ev->xvteepad = teepad;
1307              ev->xvpad = pad;
1308           }
1309         else
1310           {
1311              if (fakeeos) gst_object_unref(fakeeos);
1312              if (queue) gst_object_unref(queue);
1313              gst_object_unref(xvsink);
1314              xvsink = NULL;
1315           }
1316      }
1317
1318    teepad = gst_element_get_pad(tee, "sink");
1319    gst_element_add_pad(bin, gst_ghost_pad_new("sink", teepad));
1320    gst_object_unref(teepad);
1321
1322 #define GST_PLAY_FLAG_NATIVE_VIDEO  (1 << 6)
1323 #define GST_PLAY_FLAG_DOWNLOAD      (1 << 7)
1324 #define GST_PLAY_FLAG_AUDIO         (1 << 1)
1325 #define GST_PLAY_FLAG_NATIVE_AUDIO  (1 << 5)
1326
1327    if (launch)
1328      {
1329         g_object_set(G_OBJECT(playbin), "sink", bin, NULL);
1330      }
1331    else
1332      {
1333         g_object_get(G_OBJECT(playbin), "flags", &flags, NULL);
1334         g_object_set(G_OBJECT(playbin), "flags", flags | GST_PLAY_FLAG_NATIVE_VIDEO | GST_PLAY_FLAG_DOWNLOAD | GST_PLAY_FLAG_NATIVE_AUDIO, NULL);
1335         g_object_set(G_OBJECT(playbin), "video-sink", bin, NULL);
1336         g_object_set(G_OBJECT(playbin), "uri", uri, NULL);
1337      }
1338
1339    evas_object_image_pixels_get_callback_set(obj, NULL, NULL);
1340
1341    ev->stream = EINA_TRUE;
1342
1343    if (xvsink)
1344      {
1345         Evas_Video_Surface video;
1346
1347         video.version = EVAS_VIDEO_SURFACE_VERSION;
1348         video.data = ev;
1349         video.parent = NULL;
1350         video.move = _video_move;
1351         video.resize = _video_resize;
1352         video.show = _video_show;
1353         video.hide = _video_hide;
1354         video.update_pixels = _video_update_pixels;
1355
1356         evas_object_image_video_surface_set(obj, &video);
1357         ev->stream = EINA_FALSE;
1358      }
1359
1360    eina_stringshare_replace(&ev->uri, uri);
1361    ev->pipeline = playbin;
1362    ev->sink = bin;
1363    ev->esink = esink;
1364    ev->xvsink = xvsink;
1365    ev->tee = tee;
1366    ev->threads = eina_list_append(ev->threads,
1367                                   ecore_thread_run(_emotion_gstreamer_pause,
1368                                                    _emotion_gstreamer_end,
1369                                                    _emotion_gstreamer_cancel,
1370                                                    ev));
1371
1372    /** NOTE: you need to set: GST_DEBUG_DUMP_DOT_DIR=/tmp EMOTION_ENGINE=gstreamer to save the $EMOTION_GSTREAMER_DOT file in '/tmp' */
1373    /** then call dot -Tpng -oemotion_pipeline.png /tmp/$TIMESTAMP-$EMOTION_GSTREAMER_DOT.dot */
1374    if (getenv("EMOTION_GSTREAMER_DOT")) GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS(GST_BIN(playbin), GST_DEBUG_GRAPH_SHOW_ALL, getenv("EMOTION_GSTREAMER_DOT"));
1375
1376    return playbin;
1377
1378  unref_pipeline:
1379    gst_object_unref(xvsink);
1380    gst_object_unref(esink);
1381    gst_object_unref(tee);
1382    gst_object_unref(bin);
1383    gst_object_unref(playbin);
1384    return NULL;
1385 }