emotion: fix recursive call.
[profile/ivi/emotion.git] / src / modules / gstreamer / emotion_sink.c
1 #include "emotion_gstreamer.h"
2
3 static GstStaticPadTemplate sinktemplate = GST_STATIC_PAD_TEMPLATE("sink",
4                                                                    GST_PAD_SINK, GST_PAD_ALWAYS,
5                                                                    GST_STATIC_CAPS(GST_VIDEO_CAPS_YUV("{ I420, YV12, YUY2, NV12, ST12, TM12 }") ";"
6                                                                                    GST_VIDEO_CAPS_BGRx ";" GST_VIDEO_CAPS_BGR ";" GST_VIDEO_CAPS_BGRA));
7
8 GST_DEBUG_CATEGORY_STATIC(evas_video_sink_debug);
9 #define GST_CAT_DEFAULT evas_video_sink_debug
10
11 enum {
12   REPAINT_REQUESTED,
13   LAST_SIGNAL
14 };
15
16 enum {
17   PROP_0,
18   PROP_EVAS_OBJECT,
19   PROP_WIDTH,
20   PROP_HEIGHT,
21   PROP_EV,
22   PROP_LAST
23 };
24
25 static guint evas_video_sink_signals[LAST_SIGNAL] = { 0, };
26
27 #define _do_init(bla)                                   \
28   GST_DEBUG_CATEGORY_INIT(evas_video_sink_debug,        \
29                           "emotion-sink",               \
30                           0,                            \
31                           "emotion video sink")
32
33 GST_BOILERPLATE_FULL(EvasVideoSink,
34                      evas_video_sink,
35                      GstVideoSink,
36                      GST_TYPE_VIDEO_SINK,
37                      _do_init);
38
39
40 static void unlock_buffer_mutex(EvasVideoSinkPrivate* priv);
41 static void evas_video_sink_main_render(void *data);
42 static void evas_video_sink_samsung_main_render(void *data);
43
44 static void
45 _evas_video_bgrx_step(unsigned char *evas_data, const unsigned char *gst_data,
46                       unsigned int w, unsigned int h __UNUSED__, unsigned int output_height, unsigned int step)
47 {
48    unsigned int x;
49    unsigned int y;
50
51    for (y = 0; y < output_height; ++y)
52      {
53         for (x = 0; x < w; x++)
54           {
55              evas_data[0] = gst_data[0];
56              evas_data[1] = gst_data[1];
57              evas_data[2] = gst_data[2];
58              evas_data[3] = 255;
59              gst_data += step;
60              evas_data += 4;
61           }
62      }
63 }
64
65 static void
66 _evas_video_bgr(unsigned char *evas_data, const unsigned char *gst_data, unsigned int w, unsigned int h, unsigned int output_height)
67 {
68    _evas_video_bgrx_step(evas_data, gst_data, w, h, output_height, 3);
69 }
70
71 static void
72 _evas_video_bgrx(unsigned char *evas_data, const unsigned char *gst_data, unsigned int w, unsigned int h, unsigned int output_height)
73 {
74    _evas_video_bgrx_step(evas_data, gst_data, w, h, output_height, 4);
75 }
76
77 static void
78 _evas_video_bgra(unsigned char *evas_data, const unsigned char *gst_data, unsigned int w, unsigned int h __UNUSED__, unsigned int output_height)
79 {
80    unsigned int x;
81    unsigned int y;
82
83    for (y = 0; y < output_height; ++y)
84      {
85         unsigned char alpha;
86
87         for (x = 0; x < w; ++x)
88           {
89              alpha = gst_data[3];
90              evas_data[0] = (gst_data[0] * alpha) / 255;
91              evas_data[1] = (gst_data[1] * alpha) / 255;
92              evas_data[2] = (gst_data[2] * alpha) / 255;
93              evas_data[3] = alpha;
94              gst_data += 4;
95              evas_data += 4;
96           }
97      }
98 }
99
100 static void
101 _evas_video_i420(unsigned char *evas_data, const unsigned char *gst_data, unsigned int w, unsigned int h __UNUSED__, unsigned int output_height)
102 {
103    const unsigned char **rows;
104    unsigned int i, j;
105    unsigned int rh;
106
107    rh = output_height;
108
109    rows = (const unsigned char **)evas_data;
110
111    for (i = 0; i < rh; i++)
112      rows[i] = &gst_data[i * w];
113
114    for (j = 0; j < (rh / 2); j++, i++)
115      rows[i] = &gst_data[h * w + j * (w / 2)];
116
117    for (j = 0; j < (rh / 2); j++, i++)
118      rows[i] = &gst_data[h * w + rh * (w / 4) + j * (w / 2)];
119 }
120
121 static void
122 _evas_video_yv12(unsigned char *evas_data, const unsigned char *gst_data, unsigned int w, unsigned int h __UNUSED__, unsigned int output_height)
123 {
124    const unsigned char **rows;
125    unsigned int i, j;
126    unsigned int rh;
127
128    rh = output_height;
129
130    rows = (const unsigned char **)evas_data;
131
132    for (i = 0; i < rh; i++)
133      rows[i] = &gst_data[i * w];
134
135    for (j = 0; j < (rh / 2); j++, i++)
136      rows[i] = &gst_data[h * w + rh * (w / 4) + j * (w / 2)];
137
138    for (j = 0; j < (rh / 2); j++, i++)
139      rows[i] = &gst_data[h * w + j * (w / 2)];
140 }
141
142 static void
143 _evas_video_yuy2(unsigned char *evas_data, const unsigned char *gst_data, unsigned int w, unsigned int h __UNUSED__, unsigned int output_height)
144 {
145    const unsigned char **rows;
146    unsigned int i;
147
148    rows = (const unsigned char **)evas_data;
149
150    for (i = 0; i < output_height; i++)
151      rows[i] = &gst_data[i * w * 2];
152 }
153
154 static void
155 _evas_video_nv12(unsigned char *evas_data, const unsigned char *gst_data, unsigned int w, unsigned int h __UNUSED__, unsigned int output_height)
156 {
157    const unsigned char **rows;
158    unsigned int i, j;
159    unsigned int rh;
160
161    rh = output_height;
162
163    rows = (const unsigned char **)evas_data;
164
165    for (i = 0; i < rh; i++)
166      rows[i] = &gst_data[i * w];
167
168    for (j = 0; j < (rh / 2); j++, i++)
169      rows[i] = &gst_data[rh * w + j * w];
170 }
171
172 static void
173 _evas_video_mt12(unsigned char *evas_data, const unsigned char *gst_data, unsigned int w, unsigned int h, unsigned int output_height __UNUSED__)
174 {
175    const unsigned char **rows;
176    unsigned int i;
177    unsigned int j;
178
179    rows = (const unsigned char **)evas_data;
180
181    for (i = 0; i < (h / 32) / 2; i++)
182      rows[i] = &gst_data[i * w * 2 * 32];
183
184    if ((h / 32) % 2)
185      {
186         rows[i] = &gst_data[i * w * 2 * 32];
187         i++;
188      }
189
190    for (j = 0; j < ((h / 2) / 32) / 2; ++j, ++i)
191      rows[i] = &gst_data[h * w + j * (w / 2) * 2 * 16];
192 }
193
194 static void
195 _evas_video_st12_multiplane(unsigned char *evas_data, const unsigned char *gst_data, unsigned int w, unsigned int h, unsigned int output_height __UNUSED__)
196 {
197    const GstMultiPlaneImageBuffer *mp_buf = (const GstMultiPlaneImageBuffer *) gst_data;
198    const unsigned char **rows;
199    unsigned int i;
200    unsigned int j;
201
202    rows = (const unsigned char **)evas_data;
203
204    for (i = 0; i < (h / 32) / 2; i++)
205      rows[i] = mp_buf->uaddr[0] + i * w * 2 * 32;
206    if ((h / 32) % 2)
207      {
208         rows[i] = mp_buf->uaddr[0] + i * w * 2 * 32;
209         i++;
210      }
211
212    for (j = 0; j < ((h / 2) / 16) / 2; j++, i++)
213      {
214        rows[i] = mp_buf->uaddr[1] + j * w * 2 * 16 * 2;
215      }
216    if (((h / 2) / 16) % 2)
217      rows[i] = mp_buf->uaddr[1] + j * w * 2 * 16 * 2;
218 }
219
220 static void
221 _evas_video_st12(unsigned char *evas_data, const unsigned char *gst_data, unsigned int w __UNUSED__, unsigned int h, unsigned int output_height __UNUSED__)
222 {
223    const SCMN_IMGB *imgb = (const SCMN_IMGB *) gst_data;
224    const unsigned char **rows;
225    unsigned int i, j;
226
227    rows = (const unsigned char **)evas_data;
228
229    for (i = 0; i < (h / 32) / 2; i++)
230      rows[i] = imgb->uaddr[0] + i * imgb->stride[0] * 2 * 32;
231    if ((h / 32) % 2)
232      {
233         rows[i] = imgb->uaddr[0] + i * imgb->stride[0] * 2 * 32;
234         i++;
235      }
236
237    for (j = 0; j < (unsigned int) imgb->elevation[1] / 32 / 2; j++, i++)
238      rows[i] = imgb->uaddr[1] + j * imgb->stride[1] * 32 * 2;
239    if ((imgb->elevation[1] / 32) % 2)
240      rows[i++] = imgb->uaddr[1] + j * imgb->stride[1] * 32 * 2;
241 }
242
243 static const struct {
244    const char *name;
245    guint32 fourcc;
246    Evas_Colorspace eformat;
247    Evas_Video_Convert_Cb func;
248    Eina_Bool force_height;
249 } colorspace_fourcc_convertion[] = {
250   { "I420", GST_MAKE_FOURCC('I', '4', '2', '0'), EVAS_COLORSPACE_YCBCR422P601_PL, _evas_video_i420, EINA_TRUE },
251   { "YV12", GST_MAKE_FOURCC('Y', 'V', '1', '2'), EVAS_COLORSPACE_YCBCR422P601_PL, _evas_video_yv12, EINA_TRUE },
252   { "YUY2", GST_MAKE_FOURCC('Y', 'U', 'Y', '2'), EVAS_COLORSPACE_YCBCR422601_PL, _evas_video_yuy2, EINA_FALSE },
253   { "NV12", GST_MAKE_FOURCC('N', 'V', '1', '2'), EVAS_COLORSPACE_YCBCR420NV12601_PL, _evas_video_nv12, EINA_TRUE },
254   { "TM12", GST_MAKE_FOURCC('T', 'M', '1', '2'), EVAS_COLORSPACE_YCBCR420TM12601_PL, _evas_video_mt12, EINA_TRUE }
255 };
256
257 static const struct {
258    const char *name;
259    GstVideoFormat format;
260    Evas_Colorspace eformat;
261    Evas_Video_Convert_Cb func;
262 } colorspace_format_convertion[] = {
263   { "BGR", GST_VIDEO_FORMAT_BGR, EVAS_COLORSPACE_ARGB8888, _evas_video_bgr },
264   { "BGRx", GST_VIDEO_FORMAT_BGRx, EVAS_COLORSPACE_ARGB8888, _evas_video_bgrx },
265   { "BGRA", GST_VIDEO_FORMAT_BGRA, EVAS_COLORSPACE_ARGB8888, _evas_video_bgra }
266 };
267
268 static void
269 evas_video_sink_base_init(gpointer g_class)
270 {
271    GstElementClass* element_class;
272
273    element_class = GST_ELEMENT_CLASS(g_class);
274    gst_element_class_add_pad_template(element_class, gst_static_pad_template_get(&sinktemplate));
275    gst_element_class_set_details_simple(element_class, "Evas video sink",
276                                         "Sink/Video", "Sends video data from a GStreamer pipeline to an Evas object",
277                                         "Vincent Torri <vtorri@univ-evry.fr>");
278 }
279
280 static void
281 evas_video_sink_init(EvasVideoSink* sink, EvasVideoSinkClass* klass __UNUSED__)
282 {
283    EvasVideoSinkPrivate* priv;
284
285    INF("sink init");
286    sink->priv = priv = G_TYPE_INSTANCE_GET_PRIVATE(sink, EVAS_TYPE_VIDEO_SINK, EvasVideoSinkPrivate);
287    priv->o = NULL;
288    priv->width = 0;
289    priv->height = 0;
290    priv->func = NULL;
291    priv->eformat = EVAS_COLORSPACE_ARGB8888;
292    priv->samsung = EINA_FALSE;
293    eina_lock_new(&priv->m);
294    eina_condition_new(&priv->c, &priv->m);
295    priv->unlocked = EINA_FALSE;
296 }
297
298 /**** Object methods ****/
299 static void
300 _cleanup_priv(void *data, Evas *e __UNUSED__, Evas_Object *obj, void *event_info __UNUSED__)
301 {
302    EvasVideoSinkPrivate* priv;
303
304    priv = data;
305
306    eina_lock_take(&priv->m);
307    if (priv->o == obj)
308      priv->o = NULL;
309    eina_lock_release(&priv->m);
310 }
311
312 static void
313 evas_video_sink_set_property(GObject * object, guint prop_id,
314                              const GValue * value, GParamSpec * pspec)
315 {
316    EvasVideoSink* sink;
317    EvasVideoSinkPrivate* priv;
318
319    sink = EVAS_VIDEO_SINK (object);
320    priv = sink->priv;
321
322    switch (prop_id) {
323     case PROP_EVAS_OBJECT:
324        eina_lock_take(&priv->m);
325        evas_object_event_callback_del(priv->o, EVAS_CALLBACK_FREE, _cleanup_priv);
326        priv->o = g_value_get_pointer (value);
327        INF("sink set Evas_Object %p.", priv->o);
328        evas_object_event_callback_add(priv->o, EVAS_CALLBACK_FREE, _cleanup_priv, priv);
329        eina_lock_release(&priv->m);
330        break;
331     case PROP_EV:
332        INF("sink set ev.");
333        eina_lock_take(&priv->m);
334        priv->ev = g_value_get_pointer (value);
335        if (priv->ev)
336          priv->ev->samsung = EINA_TRUE;
337        eina_lock_release(&priv->m);
338        break;
339     default:
340        G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
341        ERR("invalid property");
342        break;
343    }
344 }
345
346 static void
347 evas_video_sink_get_property(GObject * object, guint prop_id,
348                              GValue * value, GParamSpec * pspec)
349 {
350    EvasVideoSink* sink;
351    EvasVideoSinkPrivate* priv;
352
353    sink = EVAS_VIDEO_SINK (object);
354    priv = sink->priv;
355
356    switch (prop_id) {
357     case PROP_EVAS_OBJECT:
358        INF("sink get property.");
359        eina_lock_take(&priv->m);
360        g_value_set_pointer(value, priv->o);
361        eina_lock_release(&priv->m);
362        break;
363     case PROP_WIDTH:
364        INF("sink get width.");
365        eina_lock_take(&priv->m);
366        g_value_set_int(value, priv->width);
367        eina_lock_release(&priv->m);
368        break;
369     case PROP_HEIGHT:
370        INF("sink get height.");
371        eina_lock_take(&priv->m);
372        g_value_set_int (value, priv->height);
373        eina_lock_release(&priv->m);
374        break;
375     case PROP_EV:
376        INF("sink get ev.");
377        eina_lock_take(&priv->m);
378        g_value_set_pointer (value, priv->ev);
379        eina_lock_release(&priv->m);
380        break;
381     default:
382        G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
383        ERR("invalide property");
384        break;
385    }
386 }
387
388 static void
389 evas_video_sink_dispose(GObject* object)
390 {
391    EvasVideoSink* sink;
392    EvasVideoSinkPrivate* priv;
393
394    INF("dispose.");
395
396    sink = EVAS_VIDEO_SINK(object);
397    priv = sink->priv;
398
399    eina_lock_free(&priv->m);
400    eina_condition_free(&priv->c);
401
402    G_OBJECT_CLASS(parent_class)->dispose(object);
403 }
404
405
406 /**** BaseSink methods ****/
407
408 gboolean evas_video_sink_set_caps(GstBaseSink *bsink, GstCaps *caps)
409 {
410    EvasVideoSink* sink;
411    EvasVideoSinkPrivate* priv;
412    GstStructure *structure;
413    GstVideoFormat format;
414    guint32 fourcc;
415    unsigned int i;
416
417    sink = EVAS_VIDEO_SINK(bsink);
418    priv = sink->priv;
419
420    structure = gst_caps_get_structure(caps, 0);
421
422    if (gst_structure_get_int(structure, "width", (int*) &priv->width)
423        && gst_structure_get_int(structure, "height", (int*) &priv->height)
424        && gst_structure_get_fourcc(structure, "format", &fourcc))
425      {
426         priv->source_height = priv->height;
427
428         for (i = 0; i < sizeof (colorspace_fourcc_convertion) / sizeof (colorspace_fourcc_convertion[0]); ++i)
429           if (fourcc == colorspace_fourcc_convertion[i].fourcc)
430             {
431                fprintf(stderr, "Found '%s'\n", colorspace_fourcc_convertion[i].name);
432                priv->eformat = colorspace_fourcc_convertion[i].eformat;
433                priv->func = colorspace_fourcc_convertion[i].func;
434                if (colorspace_fourcc_convertion[i].force_height)
435                  {
436                     priv->height = (priv->height >> 1) << 1;
437                  }
438                if (priv->ev)
439                  priv->ev->kill_buffer = EINA_TRUE;
440                return TRUE;
441             }
442
443         if (fourcc == GST_MAKE_FOURCC('S', 'T', '1', '2'))
444           {
445              fprintf(stderr, "Found '%s'\n", "ST12");
446              priv->eformat = EVAS_COLORSPACE_YCBCR420TM12601_PL;
447              priv->samsung = EINA_TRUE;
448              priv->func = NULL;
449              if (priv->ev)
450                {
451                   priv->ev->samsung = EINA_TRUE;
452                   priv->ev->kill_buffer = EINA_TRUE;
453                }
454              return TRUE;
455           }
456      }
457
458    INF("fallback code !");
459    if (!gst_video_format_parse_caps(caps, &format, (int*) &priv->width, (int*) &priv->height))
460      {
461         ERR("Unable to parse caps.");
462         return FALSE;
463      }
464
465    priv->source_height = priv->height;
466
467    for (i = 0; i < sizeof (colorspace_format_convertion) / sizeof (colorspace_format_convertion[0]); ++i)
468      if (format == colorspace_format_convertion[i].format)
469        {
470           fprintf(stderr, "Found '%s'\n", colorspace_format_convertion[i].name);
471           priv->eformat = colorspace_format_convertion[i].eformat;
472           priv->func = colorspace_format_convertion[i].func;
473           if (priv->ev)
474             priv->ev->kill_buffer = EINA_FALSE;
475           return TRUE;
476        }
477
478    ERR("unsupported : %d\n", format);
479    return FALSE;
480 }
481
482 static gboolean
483 evas_video_sink_start(GstBaseSink* base_sink)
484 {
485    EvasVideoSinkPrivate* priv;
486    gboolean res = TRUE;
487
488    INF("sink start");
489
490    priv = EVAS_VIDEO_SINK(base_sink)->priv;
491    eina_lock_take(&priv->m);
492    if (!priv->o)
493      res = FALSE;
494    else
495      priv->unlocked = EINA_FALSE;
496    eina_lock_release(&priv->m);
497    return res;
498 }
499
500 static gboolean
501 evas_video_sink_stop(GstBaseSink* base_sink)
502 {
503    EvasVideoSinkPrivate* priv = EVAS_VIDEO_SINK(base_sink)->priv;
504
505    INF("sink stop");
506
507    unlock_buffer_mutex(priv);
508    return TRUE;
509 }
510
511 static gboolean
512 evas_video_sink_unlock(GstBaseSink* object)
513 {
514    EvasVideoSink* sink;
515
516    INF("sink unlock");
517
518    sink = EVAS_VIDEO_SINK(object);
519
520    unlock_buffer_mutex(sink->priv);
521
522    return GST_CALL_PARENT_WITH_DEFAULT(GST_BASE_SINK_CLASS, unlock,
523                                        (object), TRUE);
524 }
525
526 static gboolean
527 evas_video_sink_unlock_stop(GstBaseSink* object)
528 {
529    EvasVideoSink* sink;
530    EvasVideoSinkPrivate* priv;
531
532    sink = EVAS_VIDEO_SINK(object);
533    priv = sink->priv;
534
535    INF("sink unlock stop");
536
537    eina_lock_take(&priv->m);
538    priv->unlocked = FALSE;
539    eina_lock_release(&priv->m);
540
541    return GST_CALL_PARENT_WITH_DEFAULT(GST_BASE_SINK_CLASS, unlock_stop,
542                                        (object), TRUE);
543 }
544
545 static GstFlowReturn
546 evas_video_sink_preroll(GstBaseSink* bsink, GstBuffer* buffer)
547 {
548    Emotion_Gstreamer_Buffer *send;
549    EvasVideoSinkPrivate *priv;
550    EvasVideoSink *sink;
551
552    INF("sink preroll %p [%i]", GST_BUFFER_DATA(buffer), GST_BUFFER_SIZE(buffer));
553
554    sink = EVAS_VIDEO_SINK(bsink);
555    priv = sink->priv;
556
557    if (GST_BUFFER_SIZE(buffer) <= 0 && !priv->samsung)
558      {
559         WRN("empty buffer");
560         return GST_FLOW_OK;
561      }
562
563    send = emotion_gstreamer_buffer_alloc(priv, buffer, EINA_TRUE);
564
565    if (send)
566      {
567         if (priv->samsung)
568           {
569              if (!priv->func)
570                {
571                   GstStructure *structure;
572                   GstCaps *caps;
573                   gboolean is_multiplane = FALSE;
574
575                   caps = GST_BUFFER_CAPS(buffer);
576                   structure = gst_caps_get_structure (caps, 0);
577                   gst_structure_get_boolean(structure, "multiplane", &is_multiplane);
578                   gst_caps_unref(caps);
579
580                   if (is_multiplane)
581                     priv->func = _evas_video_st12_multiplane;
582                   else
583                     priv->func = _evas_video_st12;
584                }
585
586              ecore_main_loop_thread_safe_call_async(evas_video_sink_samsung_main_render, send);
587           }
588         else
589           ecore_main_loop_thread_safe_call_async(evas_video_sink_main_render, send);
590      }
591
592    return GST_FLOW_OK;
593 }
594
595 static GstFlowReturn
596 evas_video_sink_render(GstBaseSink* bsink, GstBuffer* buffer)
597 {
598    Emotion_Gstreamer_Buffer *send;
599    EvasVideoSinkPrivate *priv;
600    EvasVideoSink *sink;
601
602    INF("sink render %p", buffer);
603
604    sink = EVAS_VIDEO_SINK(bsink);
605    priv = sink->priv;
606
607    eina_lock_take(&priv->m);
608
609    if (priv->unlocked) {
610       ERR("LOCKED");
611       eina_lock_release(&priv->m);
612       return GST_FLOW_OK;
613    }
614
615    send = emotion_gstreamer_buffer_alloc(priv, buffer, EINA_FALSE);
616    if (!send) {
617       eina_lock_release(&priv->m);
618       return GST_FLOW_ERROR;
619    }
620
621    if (priv->samsung)
622      {
623         if (!priv->func)
624           {
625              GstStructure *structure;
626              GstCaps *caps;
627              gboolean is_multiplane = FALSE;
628
629              caps = GST_BUFFER_CAPS(buffer);
630              structure = gst_caps_get_structure (caps, 0);
631              gst_structure_get_boolean(structure, "multiplane", &is_multiplane);
632              gst_caps_unref(caps);
633
634              if (is_multiplane)
635                priv->func = _evas_video_st12_multiplane;
636              else
637                priv->func = _evas_video_st12;
638           }
639
640         ecore_main_loop_thread_safe_call_async(evas_video_sink_samsung_main_render, send);
641      }
642    else
643      ecore_main_loop_thread_safe_call_async(evas_video_sink_main_render, send);
644
645    eina_condition_wait(&priv->c);
646    eina_lock_release(&priv->m);
647
648    return GST_FLOW_OK;
649 }
650
651 static void
652 _update_emotion_fps(Emotion_Gstreamer_Video *ev)
653 {
654    double tim;
655
656    if (!debug_fps) return ;
657
658    tim = ecore_time_get();
659    ev->frames++;
660
661    if (ev->rlapse == 0.0)
662      {
663         ev->rlapse = tim;
664         ev->flapse = ev->frames;
665      }
666    else if ((tim - ev->rlapse) >= 0.5)
667      {
668         printf("FRAME: %i, FPS: %3.1f\n",
669                ev->frames,
670                (ev->frames - ev->flapse) / (tim - ev->rlapse));
671         ev->rlapse = tim;
672         ev->flapse = ev->frames;
673      }
674 }
675
676 static void
677 evas_video_sink_samsung_main_render(void *data)
678 {
679    Emotion_Gstreamer_Buffer *send;
680    Emotion_Video_Stream *vstream;
681    EvasVideoSinkPrivate* priv;
682    GstBuffer* buffer;
683    unsigned char *evas_data;
684    const guint8 *gst_data;
685    GstFormat fmt = GST_FORMAT_TIME;
686    gint64 pos;
687    Eina_Bool preroll;
688    int stride, elevation;
689    Evas_Coord w, h;
690
691    send = data;
692
693    if (!send) goto exit_point;
694
695    priv = send->sink;
696    buffer = send->frame;
697    preroll = send->preroll;
698
699    if (!priv || !priv->o || priv->unlocked)
700      goto exit_point;
701
702    if (send->ev->send)
703      {
704         emotion_gstreamer_buffer_free(send->ev->send);
705         send->ev->send = NULL;
706      }
707
708    if (!send->ev->stream && !send->force)
709      {
710         send->ev->send = send;
711         goto exit_stream;
712      }
713
714    _emotion_gstreamer_video_pipeline_parse(send->ev, EINA_TRUE);
715
716    /* Getting stride to compute the right size and then fill the object properly */
717    /* Y => [0] and UV in [1] */
718    if (priv->func == _evas_video_st12_multiplane)
719      {
720         const GstMultiPlaneImageBuffer *mp_buf = (const GstMultiPlaneImageBuffer *) buffer;
721
722         stride = mp_buf->stride[0];
723         elevation = mp_buf->elevation[0];
724         priv->width = mp_buf->width[0];
725         priv->height = mp_buf->height[0];
726
727         gst_data = (const guint8 *) mp_buf;
728      }
729    else
730      {
731         const SCMN_IMGB *imgb = (const SCMN_IMGB *) GST_BUFFER_MALLOCDATA(buffer);
732
733         stride = imgb->stride[0];
734         elevation = imgb->elevation[0];
735         priv->width = imgb->width[0];
736         priv->height = imgb->height[0];
737
738         gst_data = (const guint8 *) imgb;
739      }
740
741    evas_object_geometry_get(priv->o, NULL, NULL, &w, &h);
742
743    send->ev->fill.width = (double) stride / priv->width;
744    send->ev->fill.height = (double) elevation / priv->height;
745
746    evas_object_image_alpha_set(priv->o, 0);
747    evas_object_image_colorspace_set(priv->o, priv->eformat);
748    evas_object_image_size_set(priv->o, stride, elevation);
749
750    _update_emotion_fps(send->ev);
751
752    evas_data = evas_object_image_data_get(priv->o, 1);
753
754    if (priv->func)
755      priv->func(evas_data, gst_data, stride, elevation, elevation);
756    else
757      WRN("No way to decode %x colorspace !", priv->eformat);
758
759    evas_object_image_data_set(priv->o, evas_data);
760    evas_object_image_data_update_add(priv->o, 0, 0, priv->width, priv->height);
761    evas_object_image_pixels_dirty_set(priv->o, 0);
762
763    if (!preroll && send->ev->play_started)
764      {
765         _emotion_playback_started(send->ev->obj);
766         send->ev->play_started = 0;
767      }
768
769    _emotion_frame_new(send->ev->obj);
770
771    vstream = eina_list_nth(send->ev->video_streams, send->ev->video_stream_nbr - 1);
772
773    gst_element_query_position(send->ev->pipeline, &fmt, &pos);
774    send->ev->position = (double)pos / (double)GST_SECOND;
775
776    if (vstream)
777      {
778         vstream->width = priv->width;
779         vstream->height = priv->height;
780
781         _emotion_video_pos_update(send->ev->obj, send->ev->position, vstream->length_time);
782      }
783
784    send->ev->ratio = (double) priv->width / (double) priv->height;
785    _emotion_frame_refill(send->ev->obj, send->ev->fill.width, send->ev->fill.height);
786    _emotion_frame_resize(send->ev->obj, priv->width, priv->height, send->ev->ratio);
787
788    buffer = gst_buffer_ref(buffer);
789    if (send->ev->last_buffer) gst_buffer_unref(send->ev->last_buffer);
790    send->ev->last_buffer = buffer;
791
792  exit_point:
793    emotion_gstreamer_buffer_free(send);
794
795  exit_stream:
796    if (preroll || !priv->o) return ;
797
798    if (!priv->unlocked)
799      eina_condition_signal(&priv->c);
800 }
801
802 static void
803 evas_video_sink_main_render(void *data)
804 {
805    Emotion_Gstreamer_Buffer *send;
806    Emotion_Gstreamer_Video *ev = NULL;
807    Emotion_Video_Stream *vstream;
808    EvasVideoSinkPrivate* priv;
809    GstBuffer* buffer;
810    unsigned char *evas_data;
811    GstFormat fmt = GST_FORMAT_TIME;
812    gint64 pos;
813    Eina_Bool preroll;
814
815    send = data;
816
817    if (!send) goto exit_point;
818
819    priv = send->sink;
820    buffer = send->frame;
821    preroll = send->preroll;
822    ev = send->ev;
823
824    if (!priv || !priv->o || priv->unlocked)
825      goto exit_point;
826
827    if (!ev->stream && !send->force)
828      {
829        if (ev->send && send != ev->send)
830           emotion_gstreamer_buffer_free(ev->send);
831         ev->send = send;
832         evas_object_image_data_update_add(priv->o, 0, 0, priv->width, priv->height);
833         goto exit_stream;
834      }
835
836    _emotion_gstreamer_video_pipeline_parse(ev, EINA_TRUE);
837
838    INF("sink main render [%i, %i] (source height: %i)", priv->width, priv->height, priv->source_height);
839
840    evas_object_image_alpha_set(priv->o, 0);
841    evas_object_image_colorspace_set(priv->o, priv->eformat);
842    evas_object_image_size_set(priv->o, priv->width, priv->height);
843
844    evas_data = evas_object_image_data_get(priv->o, 1);
845
846    if (priv->func)
847      priv->func(evas_data, GST_BUFFER_DATA(buffer), priv->width, priv->source_height, priv->height);
848    else
849      WRN("No way to decode %x colorspace !", priv->eformat);
850
851    evas_object_image_data_set(priv->o, evas_data);
852    evas_object_image_data_update_add(priv->o, 0, 0, priv->width, priv->height);
853    evas_object_image_pixels_dirty_set(priv->o, 0);
854
855    if (!preroll && ev->play_started)
856      {
857         _emotion_playback_started(ev->obj);
858         ev->play_started = 0;
859      }
860
861    _emotion_frame_new(ev->obj);
862
863    gst_element_query_position(ev->pipeline, &fmt, &pos);
864    ev->position = (double)pos / (double)GST_SECOND;
865
866    vstream = eina_list_nth(ev->video_streams, ev->video_stream_nbr - 1);
867
868    if (vstream)
869      {
870        vstream->width = priv->width;
871        vstream->height = priv->height;
872        _emotion_video_pos_update(ev->obj, ev->position, vstream->length_time);
873      }
874
875    ev->ratio = (double) priv->width / (double) priv->height;
876
877    _emotion_frame_resize(ev->obj, priv->width, priv->height, ev->ratio);
878
879    buffer = gst_buffer_ref(buffer);
880    if (ev->last_buffer) gst_buffer_unref(ev->last_buffer);
881    ev->last_buffer = buffer;
882
883  exit_point:
884    emotion_gstreamer_buffer_free(send);
885
886  exit_stream:
887    if (preroll || !priv->o) return ;
888
889    if (!priv->unlocked)
890      eina_condition_signal(&priv->c);
891 }
892
893 static void
894 unlock_buffer_mutex(EvasVideoSinkPrivate* priv)
895 {
896    priv->unlocked = EINA_TRUE;
897
898    eina_condition_signal(&priv->c);
899 }
900
901 static void
902 marshal_VOID__MINIOBJECT(GClosure * closure, GValue * return_value __UNUSED__,
903                          guint n_param_values, const GValue * param_values,
904                          gpointer invocation_hint __UNUSED__, gpointer marshal_data)
905 {
906    typedef void (*marshalfunc_VOID__MINIOBJECT) (gpointer obj, gpointer arg1, gpointer data2);
907    marshalfunc_VOID__MINIOBJECT callback;
908    GCClosure *cc;
909    gpointer data1, data2;
910
911    cc = (GCClosure *) closure;
912
913    g_return_if_fail(n_param_values == 2);
914
915    if (G_CCLOSURE_SWAP_DATA(closure)) {
916       data1 = closure->data;
917       data2 = g_value_peek_pointer(param_values + 0);
918    } else {
919       data1 = g_value_peek_pointer(param_values + 0);
920       data2 = closure->data;
921    }
922    callback = (marshalfunc_VOID__MINIOBJECT) (marshal_data ? marshal_data : cc->callback);
923
924    callback(data1, gst_value_get_mini_object(param_values + 1), data2);
925 }
926
927 static void
928 evas_video_sink_class_init(EvasVideoSinkClass* klass)
929 {
930    GObjectClass* gobject_class;
931    GstBaseSinkClass* gstbase_sink_class;
932
933    gobject_class = G_OBJECT_CLASS(klass);
934    gstbase_sink_class = GST_BASE_SINK_CLASS(klass);
935
936    g_type_class_add_private(klass, sizeof(EvasVideoSinkPrivate));
937
938    gobject_class->set_property = evas_video_sink_set_property;
939    gobject_class->get_property = evas_video_sink_get_property;
940
941    g_object_class_install_property (gobject_class, PROP_EVAS_OBJECT,
942                                     g_param_spec_pointer ("evas-object", "Evas Object",
943                                                           "The Evas object where the display of the video will be done",
944                                                           G_PARAM_READWRITE));
945
946    g_object_class_install_property (gobject_class, PROP_WIDTH,
947                                     g_param_spec_int ("width", "Width",
948                                                       "The width of the video",
949                                                       0, 65536, 0, G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
950
951    g_object_class_install_property (gobject_class, PROP_HEIGHT,
952                                     g_param_spec_int ("height", "Height",
953                                                       "The height of the video",
954                                                       0, 65536, 0, G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
955    g_object_class_install_property (gobject_class, PROP_EV,
956                                     g_param_spec_pointer ("ev", "Emotion_Gstreamer_Video",
957                                                           "THe internal data of the emotion object",
958                                                           G_PARAM_READWRITE));
959
960    gobject_class->dispose = evas_video_sink_dispose;
961
962    gstbase_sink_class->set_caps = evas_video_sink_set_caps;
963    gstbase_sink_class->stop = evas_video_sink_stop;
964    gstbase_sink_class->start = evas_video_sink_start;
965    gstbase_sink_class->unlock = evas_video_sink_unlock;
966    gstbase_sink_class->unlock_stop = evas_video_sink_unlock_stop;
967    gstbase_sink_class->render = evas_video_sink_render;
968    gstbase_sink_class->preroll = evas_video_sink_preroll;
969
970    evas_video_sink_signals[REPAINT_REQUESTED] = g_signal_new("repaint-requested",
971                                                              G_TYPE_FROM_CLASS(klass),
972                                                              (GSignalFlags)(G_SIGNAL_RUN_LAST | G_SIGNAL_ACTION),
973                                                              0,
974                                                              0,
975                                                              0,
976                                                              marshal_VOID__MINIOBJECT,
977                                                              G_TYPE_NONE, 1, GST_TYPE_BUFFER);
978 }
979
980 gboolean
981 gstreamer_plugin_init (GstPlugin * plugin)
982 {
983    return gst_element_register (plugin,
984                                 "emotion-sink",
985                                 GST_RANK_NONE,
986                                 EVAS_TYPE_VIDEO_SINK);
987 }
988
989 static void
990 _emotion_gstreamer_pause(void *data, Ecore_Thread *thread)
991 {
992    Emotion_Gstreamer_Video *ev = data;
993    gboolean res;
994
995    if (ecore_thread_check(thread) || !ev->pipeline) return ;
996
997    gst_element_set_state(ev->pipeline, GST_STATE_PAUSED);
998    res = gst_element_get_state(ev->pipeline, NULL, NULL, GST_CLOCK_TIME_NONE);
999    if (res == GST_STATE_CHANGE_NO_PREROLL)
1000      {
1001         gst_element_set_state(ev->pipeline, GST_STATE_PLAYING);
1002         gst_element_get_state(ev->pipeline, NULL, NULL, GST_CLOCK_TIME_NONE);
1003      }
1004 }
1005
1006 static void
1007 _emotion_gstreamer_cancel(void *data, Ecore_Thread *thread)
1008 {
1009    Emotion_Gstreamer_Video *ev = data;
1010
1011    ev->threads = eina_list_remove(ev->threads, thread);
1012
1013    if (getenv("EMOTION_GSTREAMER_DOT")) GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS(GST_BIN(ev->pipeline), GST_DEBUG_GRAPH_SHOW_ALL, getenv("EMOTION_GSTREAMER_DOT"));
1014
1015    if (ev->in == ev->out && ev->delete_me)
1016      em_shutdown(ev);
1017 }
1018
1019 static void
1020 _emotion_gstreamer_end(void *data, Ecore_Thread *thread)
1021 {
1022    Emotion_Gstreamer_Video *ev = data;
1023
1024    ev->threads = eina_list_remove(ev->threads, thread);
1025
1026    if (ev->play)
1027      {
1028         gst_element_set_state(ev->pipeline, GST_STATE_PLAYING);
1029         ev->play_started = 1;
1030      }
1031
1032    if (getenv("EMOTION_GSTREAMER_DOT")) GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS(GST_BIN(ev->pipeline), GST_DEBUG_GRAPH_SHOW_ALL, getenv("EMOTION_GSTREAMER_DOT"));
1033
1034    if (ev->in == ev->out && ev->delete_me)
1035      em_shutdown(ev);
1036    else
1037      _emotion_gstreamer_video_pipeline_parse(data, EINA_TRUE);
1038 }
1039
1040 static void
1041 _video_resize(void *data, Evas_Object *obj __UNUSED__, const Evas_Video_Surface *surface __UNUSED__,
1042               Evas_Coord w, Evas_Coord h)
1043 {
1044    Emotion_Gstreamer_Video *ev = data;
1045
1046 #ifdef HAVE_ECORE_X
1047    ecore_x_window_resize(ev->win, w, h);
1048 #endif
1049    fprintf(stderr, "resize: %i, %i\n", w, h);
1050 }
1051
1052 static void
1053 _video_move(void *data, Evas_Object *obj __UNUSED__, const Evas_Video_Surface *surface __UNUSED__,
1054             Evas_Coord x, Evas_Coord y)
1055 {
1056    Emotion_Gstreamer_Video *ev = data;
1057 #ifdef HAVE_ECORE_X
1058    unsigned int pos[2];
1059
1060    fprintf(stderr, "move: %i, %i\n", x, y);
1061    pos[0] = x; pos[1] = y;
1062    ecore_x_window_prop_card32_set(ev->win, ECORE_X_ATOM_E_VIDEO_POSITION, pos, 2);
1063 #endif
1064 }
1065
1066 #if 0
1067 /* Much better idea to always feed the XvImageSink and let him handle optimizing the rendering as we do */
1068 static void
1069 _block_pad_unlink_cb(GstPad *pad, gboolean blocked, gpointer user_data)
1070 {
1071    if (blocked)
1072      {
1073         Emotion_Gstreamer_Video *ev = user_data;
1074         GstEvent *gev;
1075
1076         gst_pad_unlink(ev->teepad, ev->xvpad);
1077         gev = gst_event_new_eos();
1078         gst_pad_send_event(ev->xvpad, gev);
1079         gst_pad_set_blocked_async(pad, FALSE, _block_pad_unlink_cb, NULL);
1080      }
1081 }
1082
1083 static void
1084 _block_pad_link_cb(GstPad *pad, gboolean blocked, gpointer user_data)
1085 {
1086    if (blocked)
1087      {
1088         Emotion_Gstreamer_Video *ev = user_data;
1089
1090         gst_pad_link(ev->teepad, ev->xvpad);
1091         if (ev->play)
1092           gst_element_set_state(ev->xvsink, GST_STATE_PLAYING);
1093         else
1094           gst_element_set_state(ev->xvsink, GST_STATE_PAUSED);
1095         gst_pad_set_blocked_async(pad, FALSE, _block_pad_link_cb, NULL);
1096      }
1097 }
1098 #endif
1099
1100 static void
1101 _video_show(void *data, Evas_Object *obj __UNUSED__, const Evas_Video_Surface *surface __UNUSED__)
1102 {
1103    Emotion_Gstreamer_Video *ev = data;
1104
1105    fprintf(stderr, "show xv\n");
1106 #ifdef HAVE_ECORE_X
1107    ecore_x_window_show(ev->win);
1108 #endif
1109    /* gst_pad_set_blocked_async(ev->teepad, TRUE, _block_pad_link_cb, ev); */
1110 }
1111
1112 static void
1113 _video_hide(void *data, Evas_Object *obj __UNUSED__, const Evas_Video_Surface *surface __UNUSED__)
1114 {
1115    Emotion_Gstreamer_Video *ev = data;
1116
1117    fprintf(stderr, "hide xv\n");
1118 #ifdef HAVE_ECORE_X
1119    ecore_x_window_hide(ev->win);
1120 #endif
1121    /* gst_pad_set_blocked_async(ev->teepad, TRUE, _block_pad_unlink_cb, ev); */
1122 }
1123
1124 static void
1125 _video_update_pixels(void *data, Evas_Object *obj __UNUSED__, const Evas_Video_Surface *surface __UNUSED__)
1126 {
1127    Emotion_Gstreamer_Video *ev = data;
1128    Emotion_Gstreamer_Buffer *send;
1129
1130    if (!ev->send) return ;
1131
1132    send = ev->send;
1133    send->force = EINA_TRUE;
1134    ev->send = NULL;
1135    evas_video_sink_main_render(send);
1136 }
1137
1138 GstElement *
1139 gstreamer_video_sink_new(Emotion_Gstreamer_Video *ev,
1140                          Evas_Object *o,
1141                          const char *uri)
1142 {
1143    GstElement *playbin;
1144    GstElement *bin = NULL;
1145    GstElement *esink = NULL;
1146    GstElement *xvsink = NULL;
1147    GstElement *tee = NULL;
1148    GstElement *queue = NULL;
1149    Evas_Object *obj;
1150    GstPad *pad;
1151    GstPad *teepad;
1152    int flags;
1153 #if defined HAVE_ECORE_X && defined HAVE_XOVERLAY_H
1154    const char *engine;
1155    Eina_List *engines;
1156 #endif
1157
1158    obj = emotion_object_image_get(o);
1159    if (!obj)
1160      {
1161         ERR("Not Evas_Object specified");
1162         return NULL;
1163      }
1164
1165    if (!uri)
1166      return NULL;
1167
1168    playbin = gst_element_factory_make("playbin2", "playbin");
1169    if (!playbin)
1170      {
1171         ERR("Unable to create 'playbin' GstElement.");
1172         return NULL;
1173      }
1174
1175    bin = gst_bin_new(NULL);
1176    if (!bin)
1177      {
1178        ERR("Unable to create GstBin !");
1179        goto unref_pipeline;
1180      }
1181
1182    tee = gst_element_factory_make("tee", NULL);
1183    if (!tee)
1184      {
1185        ERR("Unable to create 'tee' GstElement.");
1186        goto unref_pipeline;
1187      }
1188
1189 #if defined HAVE_ECORE_X && defined HAVE_XOVERLAY_H
1190    if (window_manager_video)
1191      {
1192        engines = evas_render_method_list();
1193
1194        engine = eina_list_nth(engines, evas_output_method_get(evas_object_evas_get(obj)) - 1);
1195
1196        if (ev->priority && engine && strstr(engine, "_x11") != NULL)
1197          {
1198            Ecore_Evas *ee;
1199            Evas_Coord x, y, w, h;
1200            Ecore_X_Window win;
1201            Ecore_X_Window parent;
1202
1203            evas_object_geometry_get(obj, &x, &y, &w, &h);
1204
1205            ee = ecore_evas_ecore_evas_get(evas_object_evas_get(obj));
1206
1207            if (w < 4) w = 4;
1208            if (h < 2) h = 2;
1209
1210            /* Here we really need to have the help of the window manager, this code will change when we update E17. */
1211            parent = (Ecore_X_Window) ecore_evas_window_get(ee);
1212            fprintf(stderr, "parent: %x\n", parent);
1213
1214            win = ecore_x_window_new(0, x, y, w, h);
1215            fprintf(stderr, "creating window: %x [%i, %i, %i, %i]\n", win, x, y, w, h);
1216            if (win)
1217              {
1218                Ecore_X_Window_State state[] = { ECORE_X_WINDOW_STATE_SKIP_TASKBAR, ECORE_X_WINDOW_STATE_SKIP_PAGER };
1219
1220                ecore_x_netwm_window_state_set(win, state, 2);
1221                ecore_x_window_hide(win);
1222                xvsink = gst_element_factory_make("xvimagesink", NULL);
1223                if (xvsink)
1224                  {
1225                    unsigned int pos[2];
1226
1227 #ifdef HAVE_X_OVERLAY_SET
1228                    gst_x_overlay_set_window_handle(GST_X_OVERLAY(xvsink), win);
1229 #else
1230                    gst_x_overlay_set_xwindow_id(GST_X_OVERLAY(xvsink), win);
1231 #endif
1232                    ev->win = win;
1233
1234                    ecore_x_window_prop_card32_set(win, ECORE_X_ATOM_E_VIDEO_PARENT, &parent, 1);
1235
1236                    pos[0] = x; pos[1] = y;
1237                    ecore_x_window_prop_card32_set(win, ECORE_X_ATOM_E_VIDEO_POSITION, pos, 2);
1238                  }
1239                else
1240                  {
1241                    fprintf(stderr, "destroying win: %x\n", win);
1242                    ecore_x_window_free(win);
1243                  }
1244              }
1245          }
1246        evas_render_method_list_free(engines);
1247      }
1248 #else
1249 # warning "no ecore_x or xoverlay"
1250 #endif
1251
1252    esink = gst_element_factory_make("emotion-sink", "sink");
1253    if (!esink)
1254      {
1255         ERR("Unable to create 'emotion-sink' GstElement.");
1256         goto unref_pipeline;
1257      }
1258
1259    g_object_set(G_OBJECT(esink), "evas-object", obj, NULL);
1260    g_object_set(G_OBJECT(esink), "ev", ev, NULL);
1261
1262    evas_object_image_pixels_get_callback_set(obj, NULL, NULL);
1263
1264    /* We need queue to force each video sink to be in its own thread */
1265    queue = gst_element_factory_make("queue", NULL);
1266    if (!queue)
1267      {
1268         ERR("Unable to create 'queue' GstElement.");
1269         goto unref_pipeline;
1270      }
1271
1272    gst_bin_add_many(GST_BIN(bin), tee, queue, esink, NULL);
1273    gst_element_link_many(queue, esink, NULL);
1274
1275    /* link both sink to GstTee */
1276    pad = gst_element_get_pad(queue, "sink");
1277    teepad = gst_element_get_request_pad(tee, "src%d");
1278    gst_pad_link(teepad, pad);
1279    gst_object_unref(pad);
1280    gst_object_unref(teepad);
1281
1282    if (xvsink)
1283      {
1284         GstElement *fakeeos;
1285
1286         queue = gst_element_factory_make("queue", NULL);
1287         fakeeos = GST_ELEMENT(GST_BIN(g_object_new(GST_TYPE_FAKEEOS_BIN, "name", "eosbin", NULL)));
1288         if (queue && fakeeos)
1289           {
1290              GstPad *queue_pad;
1291
1292              gst_bin_add_many(GST_BIN(bin), fakeeos, NULL);
1293
1294              gst_bin_add_many(GST_BIN(fakeeos), queue, xvsink, NULL);
1295              gst_element_link_many(queue, xvsink, NULL);
1296              queue_pad = gst_element_get_pad(queue, "sink");
1297              gst_element_add_pad(fakeeos, gst_ghost_pad_new("sink", queue_pad));
1298
1299              pad = gst_element_get_pad(fakeeos, "sink");
1300              teepad = gst_element_get_request_pad(tee, "src%d");
1301              gst_pad_link(teepad, pad);
1302
1303              xvsink = fakeeos;
1304
1305              ev->teepad = teepad;
1306              ev->xvpad = pad;
1307           }
1308         else
1309           {
1310              if (fakeeos) gst_object_unref(fakeeos);
1311              if (queue) gst_object_unref(queue);
1312              gst_object_unref(xvsink);
1313              xvsink = NULL;
1314           }
1315      }
1316
1317    teepad = gst_element_get_pad(tee, "sink");
1318    gst_element_add_pad(bin, gst_ghost_pad_new("sink", teepad));
1319    gst_object_unref(teepad);
1320
1321 #define GST_PLAY_FLAG_NATIVE_VIDEO  (1 << 6)
1322 #define GST_PLAY_FLAG_DOWNLOAD      (1 << 7)
1323 #define GST_PLAY_FLAG_AUDIO         (1 << 1)
1324 #define GST_PLAY_FLAG_NATIVE_AUDIO  (1 << 5)
1325
1326    g_object_get(G_OBJECT(playbin), "flags", &flags, NULL);
1327    g_object_set(G_OBJECT(playbin), "flags", flags | GST_PLAY_FLAG_NATIVE_VIDEO | GST_PLAY_FLAG_DOWNLOAD | GST_PLAY_FLAG_NATIVE_AUDIO, NULL);
1328    g_object_set(G_OBJECT(playbin), "video-sink", bin, NULL);
1329    g_object_set(G_OBJECT(playbin), "uri", uri, NULL);
1330
1331    evas_object_image_pixels_get_callback_set(obj, NULL, NULL);
1332
1333    ev->stream = EINA_TRUE;
1334
1335    if (xvsink)
1336      {
1337         Evas_Video_Surface video;
1338
1339         video.version = EVAS_VIDEO_SURFACE_VERSION;
1340         video.data = ev;
1341         video.parent = NULL;
1342         video.move = _video_move;
1343         video.resize = _video_resize;
1344         video.show = _video_show;
1345         video.hide = _video_hide;
1346         video.update_pixels = _video_update_pixels;
1347
1348         evas_object_image_video_surface_set(obj, &video);
1349         ev->stream = EINA_FALSE;
1350      }
1351
1352    eina_stringshare_replace(&ev->uri, uri);
1353    ev->pipeline = playbin;
1354    ev->sink = bin;
1355    ev->esink = esink;
1356    ev->xvsink = xvsink;
1357    ev->tee = tee;
1358    ev->threads = eina_list_append(ev->threads,
1359                                   ecore_thread_run(_emotion_gstreamer_pause,
1360                                                    _emotion_gstreamer_end,
1361                                                    _emotion_gstreamer_cancel,
1362                                                    ev));
1363
1364    /** NOTE: you need to set: GST_DEBUG_DUMP_DOT_DIR=/tmp EMOTION_ENGINE=gstreamer to save the $EMOTION_GSTREAMER_DOT file in '/tmp' */
1365    /** then call dot -Tpng -oemotion_pipeline.png /tmp/$TIMESTAMP-$EMOTION_GSTREAMER_DOT.dot */
1366    if (getenv("EMOTION_GSTREAMER_DOT")) GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS(GST_BIN(playbin), GST_DEBUG_GRAPH_SHOW_ALL, getenv("EMOTION_GSTREAMER_DOT"));
1367
1368    return playbin;
1369
1370  unref_pipeline:
1371    gst_object_unref(xvsink);
1372    gst_object_unref(esink);
1373    gst_object_unref(tee);
1374    gst_object_unref(bin);
1375    gst_object_unref(playbin);
1376    return NULL;
1377 }