emotion: handle Samsung S5PC110 and S5PC210.
[profile/ivi/emotion.git] / src / modules / gstreamer / emotion_sink.c
1 #include <Ecore.h>
2
3 #include "emotion_gstreamer.h"
4
5 static GstStaticPadTemplate sinktemplate = GST_STATIC_PAD_TEMPLATE("sink",
6                                                                    GST_PAD_SINK, GST_PAD_ALWAYS,
7                                                                    GST_STATIC_CAPS(GST_VIDEO_CAPS_YUV("{ I420, YV12, YUY2, NV12, ST12, TM12 }") ";"
8                                                                                    GST_VIDEO_CAPS_BGRx ";" GST_VIDEO_CAPS_BGR ";" GST_VIDEO_CAPS_BGRA));
9
10 GST_DEBUG_CATEGORY_STATIC(evas_video_sink_debug);
11 #define GST_CAT_DEFAULT evas_video_sink_debug
12
13 enum {
14   REPAINT_REQUESTED,
15   LAST_SIGNAL
16 };
17
18 enum {
19   PROP_0,
20   PROP_EVAS_OBJECT,
21   PROP_WIDTH,
22   PROP_HEIGHT,
23   PROP_EV,
24   PROP_LAST
25 };
26
27 static guint evas_video_sink_signals[LAST_SIGNAL] = { 0, };
28
29 #define _do_init(bla)                                   \
30   GST_DEBUG_CATEGORY_INIT(evas_video_sink_debug,        \
31                           "emotion-sink",               \
32                           0,                            \
33                           "emotion video sink")
34
35 GST_BOILERPLATE_FULL(EvasVideoSink,
36                      evas_video_sink,
37                      GstVideoSink,
38                      GST_TYPE_VIDEO_SINK,
39                      _do_init);
40
41
42 static void unlock_buffer_mutex(EvasVideoSinkPrivate* priv);
43 static void evas_video_sink_main_render(void *data);
44 static void evas_video_sink_samsung_main_render(void *data);
45
46 static void
47 _evas_video_bgrx_step(unsigned char *evas_data, const unsigned char *gst_data,
48                       unsigned int w, unsigned int h, unsigned int step)
49 {
50    unsigned int x;
51    unsigned int y;
52
53    for (y = 0; y < h; ++y)
54      {
55         for (x = 0; x < w; x++)
56           {
57              evas_data[0] = gst_data[0];
58              evas_data[1] = gst_data[1];
59              evas_data[2] = gst_data[2];
60              evas_data[3] = 255;
61              gst_data += step;
62              evas_data += 4;
63           }
64      }
65 }
66
67 static void
68 _evas_video_bgr(unsigned char *evas_data, const unsigned char *gst_data, unsigned int w, unsigned int h)
69 {
70    _evas_video_bgrx_step(evas_data, gst_data, w, h, 3);
71 }
72
73 static void
74 _evas_video_bgrx(unsigned char *evas_data, const unsigned char *gst_data, unsigned int w, unsigned int h)
75 {
76    _evas_video_bgrx_step(evas_data, gst_data, w, h, 4);
77 }
78
79 static void
80 _evas_video_bgra(unsigned char *evas_data, const unsigned char *gst_data, unsigned int w, unsigned int h)
81 {
82    unsigned int x;
83    unsigned int y;
84
85    for (y = 0; y < h; ++y)
86      {
87         unsigned char alpha;
88
89         for (x = 0; x < w; ++x)
90           {
91              alpha = gst_data[3];
92              evas_data[0] = (gst_data[0] * alpha) / 255;
93              evas_data[1] = (gst_data[1] * alpha) / 255;
94              evas_data[2] = (gst_data[2] * alpha) / 255;
95              evas_data[3] = alpha;
96              gst_data += 4;
97              evas_data += 4;
98           }
99      }
100 }
101
102 static void
103 _evas_video_i420(unsigned char *evas_data, const unsigned char *gst_data, unsigned int w, unsigned int h)
104 {
105    const unsigned char **rows;
106    unsigned int i;
107
108    rows = (const unsigned char **)evas_data;
109
110    for (i = 0; i < h; i++)
111      rows[i] = &gst_data[i * w];
112
113    rows += h;
114    for (i = 0; i < (h / 2); i++)
115      rows[i] = &gst_data[h * w + i * (w / 2)];
116
117    rows += h / 2;
118    for (i = 0; i < (h / 2); i++)
119      rows[i] = &gst_data[h * w + h * (w /4) + i * (w / 2)];
120 }
121
122 static void
123 _evas_video_yv12(unsigned char *evas_data, const unsigned char *gst_data, unsigned int w, unsigned int h)
124 {
125    const unsigned char **rows;
126    unsigned int i;
127
128    rows = (const unsigned char **)evas_data;
129
130    for (i = 0; i < h; i++)
131      rows[i] = &gst_data[i * w];
132
133    rows += h;
134    for (i = 0; i < (h / 2); i++)
135      rows[i] = &gst_data[h * w + h * (w /4) + i * (w / 2)];
136
137    rows += h / 2;
138    for (i = 0; i < (h / 2); i++)
139      rows[i] = &gst_data[h * w + i * (w / 2)];
140 }
141
142 static void
143 _evas_video_yuy2(unsigned char *evas_data, const unsigned char *gst_data, unsigned int w, unsigned int h)
144 {
145    const unsigned char **rows;
146    unsigned int i;
147
148    rows = (const unsigned char **)evas_data;
149
150    for (i = 0; i < h; i++)
151      rows[i] = &gst_data[i * w * 2];
152 }
153
154 static void
155 _evas_video_nv12(unsigned char *evas_data, const unsigned char *gst_data, unsigned int w, unsigned int h)
156 {
157    const unsigned char **rows;
158    unsigned int i, j;
159
160    rows = (const unsigned char **)evas_data;
161
162    for (i = 0; i < h; i++)
163      rows[i] = &gst_data[i * w];
164
165    rows += h;
166    for (j = 0; j < (h / 2); j++, i++)
167      rows[i] = &gst_data[h * w + j * w];
168 }
169
170 static void
171 _evas_video_mt12(unsigned char *evas_data, const unsigned char *gst_data, unsigned int w, unsigned int h)
172 {
173    const unsigned char **rows;
174    unsigned int i;
175    unsigned int j;
176
177    rows = (const unsigned char **)evas_data;
178
179    for (i = 0; i < (h / 32) / 2; i++)
180      rows[i] = &gst_data[i * w * 2 * 32];
181
182    if ((h / 32) % 2)
183      {
184         rows[i] = &gst_data[i * w * 2 * 32];
185         i++;
186      }
187
188    rows += h;
189    for (j = 0; j < ((h / 2) / 32) / 2; ++j, ++i)
190      rows[i] = &gst_data[h * w + j * (w / 2) * 2 * 16];
191 }
192
193 static void
194 _evas_video_st12_multiplane(unsigned char *evas_data, const unsigned char *gst_data, unsigned int w, unsigned int h)
195 {
196    const GstMultiPlaneImageBuffer *mp_buf = (const GstMultiPlaneImageBuffer *) gst_data;
197    const unsigned char **rows;
198    unsigned int i;
199    unsigned int j;
200
201    rows = (const unsigned char **)evas_data;
202
203    for (i = 0; i < (h / 32) / 2; i++)
204      rows[i] = mp_buf->uaddr[0] + i * w * 2 * 32;
205    if ((h / 32) % 2)
206      {
207         rows[i] = mp_buf->uaddr[0] + i * w * 2 * 32;
208         i++;
209      }
210
211    for (j = 0; j < ((h / 2) / 16) / 2; j++, i++)
212      rows[i] = mp_buf->uaddr[1] + j * w * 2 * 16;
213    if (((h / 2) / 16) % 2)
214      rows[i] = mp_buf->uaddr[0] + j * w * 2 * 16;
215 }
216
217 static void
218 _evas_video_st12(unsigned char *evas_data, const unsigned char *gst_data, unsigned int w, unsigned int h)
219 {
220    const SCMN_IMGB *imgb = (const SCMN_IMGB *) gst_data;
221    const unsigned char **rows;
222    unsigned int i, j;
223
224    rows = (const unsigned char **)evas_data;
225
226    for (i = 0; i < (h / 32) / 2; i++)
227      rows[i] = imgb->uaddr[0] + i * w * 2 * 32;
228    if ((h / 32) % 2)
229      {
230         rows[i] = imgb->uaddr[0] + i * w * 2 * 32;
231         i++;
232      }
233
234    for (j = 0; j < ((h / 2) / 16) / 2; j++, i++)
235      rows[i] = imgb->uaddr[1] + j * w * 2 * 16;
236    if (((h / 2) / 16) % 2)
237      rows[i] = imgb->uaddr[1] + j * w * 2 * 16;
238 }
239
240 static const struct {
241    guint32 fourcc;
242    Evas_Colorspace eformat;
243    Evas_Video_Convert_Cb func;
244 } colorspace_fourcc_convertion[] = {
245   { GST_MAKE_FOURCC('I', '4', '2', '0'), EVAS_COLORSPACE_YCBCR422P601_PL, _evas_video_i420 },
246   { GST_MAKE_FOURCC('Y', 'V', '1', '2'), EVAS_COLORSPACE_YCBCR422P601_PL, _evas_video_yv12 },
247   { GST_MAKE_FOURCC('Y', 'U', 'Y', '2'), EVAS_COLORSPACE_YCBCR422601_PL, _evas_video_yuy2 },
248   { GST_MAKE_FOURCC('N', 'V', '1', '2'), EVAS_COLORSPACE_YCBCR420NV12601_PL, _evas_video_nv12 },
249   { GST_MAKE_FOURCC('T', 'M', '1', '2'), EVAS_COLORSPACE_YCBCR420TM12601_PL, _evas_video_mt12 }
250 };
251
252 static const struct {
253    GstVideoFormat format;
254    Evas_Colorspace eformat;
255    Evas_Video_Convert_Cb func;
256 } colorspace_format_convertion[] = {
257   { GST_VIDEO_FORMAT_BGR, EVAS_COLORSPACE_ARGB8888, _evas_video_bgr },
258   { GST_VIDEO_FORMAT_BGRx, EVAS_COLORSPACE_ARGB8888, _evas_video_bgrx },
259   { GST_VIDEO_FORMAT_BGRA, EVAS_COLORSPACE_ARGB8888, _evas_video_bgra }
260 };
261
262 static void
263 evas_video_sink_base_init(gpointer g_class)
264 {
265    GstElementClass* element_class;
266
267    element_class = GST_ELEMENT_CLASS(g_class);
268    gst_element_class_add_pad_template(element_class, gst_static_pad_template_get(&sinktemplate));
269    gst_element_class_set_details_simple(element_class, "Evas video sink",
270                                         "Sink/Video", "Sends video data from a GStreamer pipeline to an Evas object",
271                                         "Vincent Torri <vtorri@univ-evry.fr>");
272 }
273
274 static void
275 evas_video_sink_init(EvasVideoSink* sink, EvasVideoSinkClass* klass __UNUSED__)
276 {
277    EvasVideoSinkPrivate* priv;
278
279    INF("sink init");
280    sink->priv = priv = G_TYPE_INSTANCE_GET_PRIVATE(sink, EVAS_TYPE_VIDEO_SINK, EvasVideoSinkPrivate);
281    priv->o = NULL;
282    priv->last_buffer = NULL;
283    priv->width = 0;
284    priv->height = 0;
285    priv->func = NULL;
286    priv->eformat = EVAS_COLORSPACE_ARGB8888;
287    priv->samsung = EINA_FALSE;
288    eina_lock_new(&priv->m);
289    eina_condition_new(&priv->c, &priv->m);
290    priv->unlocked = EINA_FALSE;
291 }
292
293
294 /**** Object methods ****/
295 static void
296 _cleanup_priv(void *data, Evas *e __UNUSED__, Evas_Object *obj, void *event_info __UNUSED__)
297 {
298    EvasVideoSinkPrivate* priv;
299
300    priv = data;
301
302    eina_lock_take(&priv->m);
303    if (priv->o == obj)
304      priv->o = NULL;
305    eina_lock_release(&priv->m);
306 }
307
308 static void
309 evas_video_sink_set_property(GObject * object, guint prop_id,
310                              const GValue * value, GParamSpec * pspec)
311 {
312    EvasVideoSink* sink;
313    EvasVideoSinkPrivate* priv;
314
315    sink = EVAS_VIDEO_SINK (object);
316    priv = sink->priv;
317
318    switch (prop_id) {
319     case PROP_EVAS_OBJECT:
320        eina_lock_take(&priv->m);
321        evas_object_event_callback_del(priv->o, EVAS_CALLBACK_FREE, _cleanup_priv);
322        priv->o = g_value_get_pointer (value);
323        INF("sink set Evas_Object %p.", priv->o);
324        evas_object_event_callback_add(priv->o, EVAS_CALLBACK_FREE, _cleanup_priv, priv);
325        eina_lock_release(&priv->m);
326        break;
327     case PROP_EV:
328        INF("sink set ev.");
329        eina_lock_take(&priv->m);
330        priv->ev = g_value_get_pointer (value);
331        eina_lock_release(&priv->m);
332        break;
333     default:
334        G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
335        ERR("invalid property");
336        break;
337    }
338 }
339
340 static void
341 evas_video_sink_get_property(GObject * object, guint prop_id,
342                              GValue * value, GParamSpec * pspec)
343 {
344    EvasVideoSink* sink;
345    EvasVideoSinkPrivate* priv;
346
347    sink = EVAS_VIDEO_SINK (object);
348    priv = sink->priv;
349
350    switch (prop_id) {
351     case PROP_EVAS_OBJECT:
352        INF("sink get property.");
353        eina_lock_take(&priv->m);
354        g_value_set_pointer(value, priv->o);
355        eina_lock_release(&priv->m);
356        break;
357     case PROP_WIDTH:
358        INF("sink get width.");
359        eina_lock_take(&priv->m);
360        g_value_set_int(value, priv->width);
361        eina_lock_release(&priv->m);
362        break;
363     case PROP_HEIGHT:
364        INF("sink get height.");
365        eina_lock_take(&priv->m);
366        g_value_set_int (value, priv->height);
367        eina_lock_release(&priv->m);
368        break;
369     case PROP_EV:
370        INF("sink get ev.");
371        eina_lock_take(&priv->m);
372        g_value_set_pointer (value, priv->ev);
373        eina_lock_release(&priv->m);
374        break;
375     default:
376        G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
377        ERR("invalide property");
378        break;
379    }
380 }
381
382 static void
383 evas_video_sink_dispose(GObject* object)
384 {
385    EvasVideoSink* sink;
386    EvasVideoSinkPrivate* priv;
387
388    INF("dispose.");
389
390    sink = EVAS_VIDEO_SINK(object);
391    priv = sink->priv;
392
393    eina_lock_free(&priv->m);
394    eina_condition_free(&priv->c);
395
396    if (priv->last_buffer) {
397       gst_buffer_unref(priv->last_buffer);
398       priv->last_buffer = NULL;
399    }
400
401    G_OBJECT_CLASS(parent_class)->dispose(object);
402 }
403
404
405 /**** BaseSink methods ****/
406
407 gboolean evas_video_sink_set_caps(GstBaseSink *bsink, GstCaps *caps)
408 {
409    EvasVideoSink* sink;
410    EvasVideoSinkPrivate* priv;
411    GstStructure *structure;
412    GstVideoFormat format;
413    guint32 fourcc;
414    unsigned int i;
415
416    sink = EVAS_VIDEO_SINK(bsink);
417    priv = sink->priv;
418
419    structure = gst_caps_get_structure(caps, 0);
420
421    if (gst_structure_get_int(structure, "width", &priv->width)
422        && gst_structure_get_int(structure, "height", &priv->height)
423        && gst_structure_get_fourcc(structure, "format", &fourcc))
424      {
425         for (i = 0; i < sizeof (colorspace_fourcc_convertion) / sizeof (colorspace_fourcc_convertion[0]); ++i)
426           if (fourcc == colorspace_fourcc_convertion[i].fourcc)
427             {
428                priv->eformat = colorspace_fourcc_convertion[i].eformat;
429                priv->func = colorspace_fourcc_convertion[i].func;
430                return TRUE;
431             }
432
433         if (fourcc == GST_MAKE_FOURCC('S', 'T', '1', '2'))
434           {
435              priv->eformat = EVAS_COLORSPACE_YCBCR420TM12601_PL;
436              priv->samsung = EINA_TRUE;
437              priv->func = NULL;
438           }
439      }
440
441    INF("fallback code !");
442    if (!gst_video_format_parse_caps(caps, &format, &priv->width, &priv->height))
443      {
444         ERR("Unable to parse caps.");
445         return FALSE;
446      }
447
448    for (i = 0; i < sizeof (colorspace_format_convertion) / sizeof (colorspace_format_convertion[0]); ++i)
449      if (format == colorspace_format_convertion[i].format)
450        {
451           priv->eformat = colorspace_format_convertion[i].eformat;
452           priv->func = colorspace_format_convertion[i].func;
453           return TRUE;
454        }
455
456    ERR("unsupported : %d\n", format);
457    return FALSE;
458 }
459
460 static gboolean
461 evas_video_sink_start(GstBaseSink* base_sink)
462 {
463    EvasVideoSinkPrivate* priv;
464    gboolean res = TRUE;
465
466    INF("sink start");
467
468    priv = EVAS_VIDEO_SINK(base_sink)->priv;
469    eina_lock_take(&priv->m);
470    if (!priv->o)
471      res = FALSE;
472    else
473      priv->unlocked = EINA_FALSE;
474    eina_lock_release(&priv->m);
475    return res;
476 }
477
478 static gboolean
479 evas_video_sink_stop(GstBaseSink* base_sink)
480 {
481    EvasVideoSinkPrivate* priv = EVAS_VIDEO_SINK(base_sink)->priv;
482
483    INF("sink stop");
484
485    unlock_buffer_mutex(priv);
486    return TRUE;
487 }
488
489 static gboolean
490 evas_video_sink_unlock(GstBaseSink* object)
491 {
492    EvasVideoSink* sink;
493
494    INF("sink unlock");
495
496    sink = EVAS_VIDEO_SINK(object);
497
498    unlock_buffer_mutex(sink->priv);
499
500    return GST_CALL_PARENT_WITH_DEFAULT(GST_BASE_SINK_CLASS, unlock,
501                                        (object), TRUE);
502 }
503
504 static gboolean
505 evas_video_sink_unlock_stop(GstBaseSink* object)
506 {
507    EvasVideoSink* sink;
508    EvasVideoSinkPrivate* priv;
509
510    sink = EVAS_VIDEO_SINK(object);
511    priv = sink->priv;
512
513    INF("sink unlock stop");
514
515    eina_lock_take(&priv->m);
516    priv->unlocked = FALSE;
517    eina_lock_release(&priv->m);
518
519    return GST_CALL_PARENT_WITH_DEFAULT(GST_BASE_SINK_CLASS, unlock_stop,
520                                        (object), TRUE);
521 }
522
523 static GstFlowReturn
524 evas_video_sink_preroll(GstBaseSink* bsink, GstBuffer* buffer)
525 {
526    Emotion_Gstreamer_Buffer *send;
527    EvasVideoSinkPrivate *priv;
528    EvasVideoSink *sink;
529
530    INF("sink preroll %p [%i]", GST_BUFFER_DATA(buffer), GST_BUFFER_SIZE(buffer));
531
532    sink = EVAS_VIDEO_SINK(bsink);
533    priv = sink->priv;
534
535    if (GST_BUFFER_SIZE(buffer) <= 0 && !priv->samsung)
536      {
537         WRN("empty buffer");
538         return GST_FLOW_OK;
539      }
540
541    send = emotion_gstreamer_buffer_alloc(priv, buffer, EINA_TRUE);
542
543    if (send)
544      {
545         if (priv->samsung)
546           {
547              if (!priv->func)
548                {
549                   GstStructure *structure;
550                   GstCaps *caps;
551                   gboolean is_multiplane = FALSE;
552
553                   caps = GST_BUFFER_CAPS(buffer);
554                   structure = gst_caps_get_structure (caps, 0);
555                   gst_structure_get_boolean(structure, "multiplane", &is_multiplane);
556
557                   if (is_multiplane)
558                     priv->func = _evas_video_st12_multiplane;
559                   else
560                     priv->func = _evas_video_st12;
561                }
562
563              ecore_main_loop_thread_safe_call_async(evas_video_sink_samsung_main_render, send);
564           }
565         else
566           ecore_main_loop_thread_safe_call_async(evas_video_sink_main_render, send);
567      }
568
569    return GST_FLOW_OK;
570 }
571
572 static GstFlowReturn
573 evas_video_sink_render(GstBaseSink* bsink, GstBuffer* buffer)
574 {
575    Emotion_Gstreamer_Buffer *send;
576    EvasVideoSinkPrivate *priv;
577    EvasVideoSink *sink;
578
579    INF("sink render %p [%i]", GST_BUFFER_DATA(buffer), GST_BUFFER_SIZE(buffer));
580
581    sink = EVAS_VIDEO_SINK(bsink);
582    priv = sink->priv;
583
584    eina_lock_take(&priv->m);
585
586    if (priv->unlocked) {
587       ERR("LOCKED");
588       eina_lock_release(&priv->m);
589       return GST_FLOW_OK;
590    }
591
592    send = emotion_gstreamer_buffer_alloc(priv, buffer, EINA_FALSE);
593    if (!send) {
594       eina_lock_release(&priv->m);
595       return GST_FLOW_ERROR;
596    }
597
598    if (priv->samsung)
599      {
600         if (!priv->func)
601           {
602              GstStructure *structure;
603              GstCaps *caps;
604              gboolean is_multiplane = FALSE;
605
606              caps = GST_BUFFER_CAPS(buffer);
607              structure = gst_caps_get_structure (caps, 0);
608              gst_structure_get_boolean(structure, "multiplane", &is_multiplane);
609
610              if (is_multiplane)
611                priv->func = _evas_video_st12_multiplane;
612              else
613                priv->func = _evas_video_st12;
614           }
615
616         ecore_main_loop_thread_safe_call_async(evas_video_sink_samsung_main_render, send);
617      }
618    else
619      ecore_main_loop_thread_safe_call_async(evas_video_sink_main_render, send);
620
621    eina_condition_wait(&priv->c);
622    eina_lock_release(&priv->m);
623
624    return GST_FLOW_OK;
625 }
626
627 static void
628 evas_video_sink_samsung_main_render(void *data)
629 {
630    Emotion_Gstreamer_Buffer *send;
631    Emotion_Video_Stream *vstream;
632    EvasVideoSinkPrivate* priv;
633    GstBuffer* buffer;
634    unsigned char *evas_data;
635    const guint8 *gst_data;
636    GstFormat fmt = GST_FORMAT_TIME;
637    gint64 pos;
638    Eina_Bool preroll;
639    int stride, elevation;
640    Evas_Coord w, h;
641
642    send = data;
643
644    if (!send) goto exit_point;
645
646    priv = send->sink;
647    buffer = send->frame;
648    preroll = send->preroll;
649
650    if (!priv || !priv->o || priv->unlocked || !send->ev)
651      goto exit_point;
652
653    _emotion_gstreamer_video_pipeline_parse(send->ev, EINA_TRUE);
654
655    /* Getting stride to compute the right size and then fill the object properly */
656    /* Y => [0] and UV in [1] */
657    if (priv->func == _evas_video_st12_multiplane)
658      {
659         const GstMultiPlaneImageBuffer *mp_buf = (const GstMultiPlaneImageBuffer *) buffer;
660
661         stride = mp_buf->stride[0];
662         elevation = mp_buf->elevation[0];
663         priv->width = mp_buf->width[0];
664         priv->height = mp_buf->height[0];
665
666         gst_data = (const guint8 *) mp_buf;
667      }
668    else
669      {
670         const SCMN_IMGB *imgb = (const SCMN_IMGB *) GST_BUFFER_MALLOCDATA(buffer);
671
672         stride = imgb->stride[0];
673         elevation = imgb->elevation[0];
674         priv->width = imgb->width[0];
675         priv->height = imgb->height[0];
676
677         gst_data = (const guint8 *) imgb;
678      }
679
680    INF("sink main render [%i, %i] - [%i, %i]", priv->width, priv->height, stride, elevation);
681
682    evas_object_image_alpha_set(priv->o, 0);
683    evas_object_image_colorspace_set(priv->o, priv->eformat);
684    evas_object_image_size_set(priv->o, stride, elevation);
685    evas_object_geometry_get(priv->o, NULL, NULL, &w, &h);
686    evas_object_image_fill_set(priv->o, 0, 0, stride * w / priv->width, elevation * h / priv->height);
687
688    evas_data = evas_object_image_data_get(priv->o, 1);
689
690    if (priv->func)
691      priv->func(evas_data, gst_data, stride, elevation);
692    else
693      WRN("No way to decode %x colorspace !", priv->eformat);
694
695    evas_object_image_data_set(priv->o, evas_data);
696    evas_object_image_data_update_add(priv->o, 0, 0, priv->width, priv->height);
697    evas_object_image_pixels_dirty_set(priv->o, 0);
698
699    _emotion_frame_new(send->ev->obj);
700
701    vstream = eina_list_nth(send->ev->video_streams, send->ev->video_stream_nbr - 1);
702
703    gst_element_query_position(send->ev->pipeline, &fmt, &pos);
704    send->ev->position = (double)pos / (double)GST_SECOND;
705
706    vstream->width = priv->width;
707    vstream->height = priv->height;
708    send->ev->ratio = (double) priv->width / (double) priv->height;
709
710    _emotion_video_pos_update(send->ev->obj, send->ev->position, vstream->length_time);
711    _emotion_frame_resize(send->ev->obj, priv->width, priv->height, send->ev->ratio);
712
713    if (priv->last_buffer) gst_buffer_unref(priv->last_buffer);
714    priv->last_buffer = gst_buffer_ref(buffer);
715
716  exit_point:
717    emotion_gstreamer_buffer_free(send);
718
719    if (preroll || !priv->o || !send->ev) return ;
720
721    eina_lock_take(&priv->m);
722    if (!priv->unlocked)
723      eina_condition_signal(&priv->c);
724
725    eina_lock_release(&priv->m);
726 }
727
728 static void
729 evas_video_sink_main_render(void *data)
730 {
731    Emotion_Gstreamer_Buffer *send;
732    Emotion_Gstreamer_Video *ev = NULL;
733    Emotion_Video_Stream *vstream;
734    EvasVideoSinkPrivate* priv;
735    GstBuffer* buffer;
736    unsigned char *evas_data;
737    GstFormat fmt = GST_FORMAT_TIME;
738    gint64 pos;
739    Eina_Bool preroll;
740
741    send = data;
742
743    if (!send) goto exit_point;
744
745    priv = send->sink;
746    buffer = send->frame;
747    preroll = send->preroll;
748    ev = send->ev;
749
750    if (!priv || !priv->o || priv->unlocked || !ev)
751      goto exit_point;
752
753    _emotion_gstreamer_video_pipeline_parse(ev, EINA_TRUE);
754
755    INF("sink main render [%i, %i]", priv->width, priv->height);
756
757    evas_object_image_alpha_set(priv->o, 0);
758    evas_object_image_colorspace_set(priv->o, priv->eformat);
759    evas_object_image_size_set(priv->o, priv->width, priv->height);
760
761    evas_data = evas_object_image_data_get(priv->o, 1);
762
763    if (priv->func)
764      priv->func(evas_data, GST_BUFFER_DATA(buffer), priv->width, priv->height);
765    else
766      WRN("No way to decode %x colorspace !", priv->eformat);
767
768    evas_object_image_data_set(priv->o, evas_data);
769    evas_object_image_data_update_add(priv->o, 0, 0, priv->width, priv->height);
770    evas_object_image_pixels_dirty_set(priv->o, 0);
771
772    _emotion_frame_new(ev->obj);
773
774    vstream = eina_list_nth(ev->video_streams, ev->video_stream_nbr - 1);
775
776    gst_element_query_position(ev->pipeline, &fmt, &pos);
777    ev->position = (double)pos / (double)GST_SECOND;
778
779    vstream->width = priv->width;
780    vstream->height = priv->height;
781    ev->ratio = (double) priv->width / (double) priv->height;
782
783    _emotion_video_pos_update(ev->obj, ev->position, vstream->length_time);
784    _emotion_frame_resize(ev->obj, priv->width, priv->height, ev->ratio);
785
786    if (priv->last_buffer) gst_buffer_unref(priv->last_buffer);
787    priv->last_buffer = gst_buffer_ref(buffer);
788
789  exit_point:
790    emotion_gstreamer_buffer_free(send);
791
792    if (preroll || !priv->o || !ev) return ;
793
794    eina_lock_take(&priv->m);
795    if (!priv->unlocked)
796      eina_condition_signal(&priv->c);
797
798    eina_lock_release(&priv->m);
799 }
800
801 static void
802 unlock_buffer_mutex(EvasVideoSinkPrivate* priv)
803 {
804    eina_lock_take(&priv->m);
805    priv->unlocked = EINA_TRUE;
806
807    eina_condition_signal(&priv->c);
808    eina_lock_release(&priv->m);
809 }
810
811 static void
812 marshal_VOID__MINIOBJECT(GClosure * closure, GValue * return_value __UNUSED__,
813                          guint n_param_values, const GValue * param_values,
814                          gpointer invocation_hint __UNUSED__, gpointer marshal_data)
815 {
816    typedef void (*marshalfunc_VOID__MINIOBJECT) (gpointer obj, gpointer arg1, gpointer data2);
817    marshalfunc_VOID__MINIOBJECT callback;
818    GCClosure *cc;
819    gpointer data1, data2;
820
821    cc = (GCClosure *) closure;
822
823    g_return_if_fail(n_param_values == 2);
824
825    if (G_CCLOSURE_SWAP_DATA(closure)) {
826       data1 = closure->data;
827       data2 = g_value_peek_pointer(param_values + 0);
828    } else {
829       data1 = g_value_peek_pointer(param_values + 0);
830       data2 = closure->data;
831    }
832    callback = (marshalfunc_VOID__MINIOBJECT) (marshal_data ? marshal_data : cc->callback);
833
834    callback(data1, gst_value_get_mini_object(param_values + 1), data2);
835 }
836
837 static void
838 evas_video_sink_class_init(EvasVideoSinkClass* klass)
839 {
840    GObjectClass* gobject_class;
841    GstBaseSinkClass* gstbase_sink_class;
842
843    gobject_class = G_OBJECT_CLASS(klass);
844    gstbase_sink_class = GST_BASE_SINK_CLASS(klass);
845
846    g_type_class_add_private(klass, sizeof(EvasVideoSinkPrivate));
847
848    gobject_class->set_property = evas_video_sink_set_property;
849    gobject_class->get_property = evas_video_sink_get_property;
850
851    g_object_class_install_property (gobject_class, PROP_EVAS_OBJECT,
852                                     g_param_spec_pointer ("evas-object", "Evas Object",
853                                                           "The Evas object where the display of the video will be done",
854                                                           G_PARAM_READWRITE));
855
856    g_object_class_install_property (gobject_class, PROP_WIDTH,
857                                     g_param_spec_int ("width", "Width",
858                                                       "The width of the video",
859                                                       0, 65536, 0, G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
860
861    g_object_class_install_property (gobject_class, PROP_HEIGHT,
862                                     g_param_spec_int ("height", "Height",
863                                                       "The height of the video",
864                                                       0, 65536, 0, G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
865    g_object_class_install_property (gobject_class, PROP_EV,
866                                     g_param_spec_pointer ("ev", "Emotion_Gstreamer_Video",
867                                                           "THe internal data of the emotion object",
868                                                           G_PARAM_READWRITE));
869
870    gobject_class->dispose = evas_video_sink_dispose;
871
872    gstbase_sink_class->set_caps = evas_video_sink_set_caps;
873    gstbase_sink_class->stop = evas_video_sink_stop;
874    gstbase_sink_class->start = evas_video_sink_start;
875    gstbase_sink_class->unlock = evas_video_sink_unlock;
876    gstbase_sink_class->unlock_stop = evas_video_sink_unlock_stop;
877    gstbase_sink_class->render = evas_video_sink_render;
878    gstbase_sink_class->preroll = evas_video_sink_preroll;
879
880    evas_video_sink_signals[REPAINT_REQUESTED] = g_signal_new("repaint-requested",
881                                                              G_TYPE_FROM_CLASS(klass),
882                                                              (GSignalFlags)(G_SIGNAL_RUN_LAST | G_SIGNAL_ACTION),
883                                                              0,
884                                                              0,
885                                                              0,
886                                                              marshal_VOID__MINIOBJECT,
887                                                              G_TYPE_NONE, 1, GST_TYPE_BUFFER);
888 }
889
890 gboolean
891 gstreamer_plugin_init (GstPlugin * plugin)
892 {
893    return gst_element_register (plugin,
894                                 "emotion-sink",
895                                 GST_RANK_NONE,
896                                 EVAS_TYPE_VIDEO_SINK);
897 }
898
899 static void
900 _emotion_gstreamer_pause(void *data, Ecore_Thread *thread)
901 {
902    Emotion_Gstreamer_Video *ev = data;
903
904    if (ecore_thread_check(thread) || !ev->pipeline) return ;
905
906    gst_element_set_state(ev->pipeline, GST_STATE_PAUSED);
907 }
908
909 static void
910 _emotion_gstreamer_cancel(void *data, Ecore_Thread *thread)
911 {
912    Emotion_Gstreamer_Video *ev = data;
913
914    ev->threads = eina_list_remove(ev->threads, thread);
915
916    if (getenv("EMOTION_GSTREAMER_DOT")) GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS(GST_BIN(ev->pipeline), GST_DEBUG_GRAPH_SHOW_ALL, getenv("EMOTION_GSTREAMER_DOT"));
917
918    if (ev->in == ev->out && ev->threads == NULL && ev->delete_me)
919      em_shutdown(ev);
920 }
921
922 static void
923 _emotion_gstreamer_end(void *data, Ecore_Thread *thread)
924 {
925    Emotion_Gstreamer_Video *ev = data;
926
927    ev->threads = eina_list_remove(ev->threads, thread);
928
929    if (ev->play)
930      {
931         gst_element_set_state(ev->pipeline, GST_STATE_PLAYING);
932         ev->play_started = 1;
933      }
934
935    if (getenv("EMOTION_GSTREAMER_DOT")) GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS(GST_BIN(ev->pipeline), GST_DEBUG_GRAPH_SHOW_ALL, getenv("EMOTION_GSTREAMER_DOT"));
936
937    if (ev->in == ev->out && ev->threads == NULL && ev->delete_me)
938      em_shutdown(ev);
939    else
940      _emotion_gstreamer_video_pipeline_parse(data, EINA_TRUE);
941 }
942
943 GstElement *
944 gstreamer_video_sink_new(Emotion_Gstreamer_Video *ev,
945                          Evas_Object *o,
946                          const char *uri)
947 {
948    GstElement *playbin;
949    GstElement *sink;
950    Evas_Object *obj;
951    int flags;
952
953    obj = emotion_object_image_get(o);
954    if (!obj)
955      {
956         ERR("Not Evas_Object specified");
957         return NULL;
958      }
959
960    playbin = gst_element_factory_make("playbin2", "playbin");
961    if (!playbin)
962      {
963         ERR("Unable to create 'playbin' GstElement.");
964         return NULL;
965      }
966
967    sink = gst_element_factory_make("emotion-sink", "sink");
968    if (!sink)
969      {
970         ERR("Unable to create 'emotion-sink' GstElement.");
971         goto unref_pipeline;
972      }
973
974 #define GST_PLAY_FLAG_NATIVE_VIDEO  (1 << 6)
975 #define GST_PLAY_FLAG_DOWNLOAD      (1 << 7)
976 #define GST_PLAY_FLAG_BUFFERING     (1 << 8)
977
978    g_object_set(G_OBJECT(sink), "evas-object", obj, NULL);
979    g_object_set(G_OBJECT(sink), "ev", ev, NULL);
980
981    g_object_get(G_OBJECT(playbin), "flags", &flags, NULL);
982    g_object_set(G_OBJECT(playbin), "flags", flags | GST_PLAY_FLAG_NATIVE_VIDEO | GST_PLAY_FLAG_DOWNLOAD | GST_PLAY_FLAG_BUFFERING, NULL);
983    g_object_set(G_OBJECT(playbin), "video-sink", sink, NULL);
984    g_object_set(G_OBJECT(playbin), "uri", uri, NULL);
985
986    ev->pipeline = playbin;
987    ev->sink = sink;
988    ev->threads = eina_list_append(ev->threads,
989                                   ecore_thread_run(_emotion_gstreamer_pause,
990                                                    _emotion_gstreamer_end,
991                                                    _emotion_gstreamer_cancel,
992                                                    ev));
993
994    /** NOTE: you need to set: GST_DEBUG_DUMP_DOT_DIR=/tmp EMOTION_ENGINE=gstreamer to save the $EMOTION_GSTREAMER_DOT file in '/tmp' */
995    /** then call dot -Tpng -oemotion_pipeline.png /tmp/$TIMESTAMP-$EMOTION_GSTREAMER_DOT.dot */
996    if (getenv("EMOTION_GSTREAMER_DOT")) GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS(GST_BIN(playbin), GST_DEBUG_GRAPH_SHOW_ALL, getenv("EMOTION_GSTREAMER_DOT"));
997
998    return playbin;
999
1000  unref_pipeline:
1001    gst_object_unref(playbin);
1002    return NULL;
1003 }