emotion: properly delay buffer destruction.
[profile/ivi/emotion.git] / src / modules / gstreamer / emotion_sink.c
1 #include <Ecore.h>
2
3 #include "emotion_gstreamer.h"
4
5 static GstStaticPadTemplate sinktemplate = GST_STATIC_PAD_TEMPLATE("sink",
6                                                                    GST_PAD_SINK, GST_PAD_ALWAYS,
7                                                                    GST_STATIC_CAPS(GST_VIDEO_CAPS_YUV("{ I420, YV12, YUY2, NV12, ST12, TM12 }") ";"
8                                                                                    GST_VIDEO_CAPS_BGRx ";" GST_VIDEO_CAPS_BGR ";" GST_VIDEO_CAPS_BGRA));
9
10 GST_DEBUG_CATEGORY_STATIC(evas_video_sink_debug);
11 #define GST_CAT_DEFAULT evas_video_sink_debug
12
13 enum {
14   REPAINT_REQUESTED,
15   LAST_SIGNAL
16 };
17
18 enum {
19   PROP_0,
20   PROP_EVAS_OBJECT,
21   PROP_WIDTH,
22   PROP_HEIGHT,
23   PROP_EV,
24   PROP_LAST
25 };
26
27 static guint evas_video_sink_signals[LAST_SIGNAL] = { 0, };
28
29 #define _do_init(bla)                                   \
30   GST_DEBUG_CATEGORY_INIT(evas_video_sink_debug,        \
31                           "emotion-sink",               \
32                           0,                            \
33                           "emotion video sink")
34
35 GST_BOILERPLATE_FULL(EvasVideoSink,
36                      evas_video_sink,
37                      GstVideoSink,
38                      GST_TYPE_VIDEO_SINK,
39                      _do_init);
40
41
42 static void unlock_buffer_mutex(EvasVideoSinkPrivate* priv);
43 static void evas_video_sink_main_render(void *data);
44 static void evas_video_sink_samsung_main_render(void *data);
45
46 static void
47 _evas_video_bgrx_step(unsigned char *evas_data, const unsigned char *gst_data,
48                       unsigned int w, unsigned int h __UNUSED__, unsigned int output_height, unsigned int step)
49 {
50    unsigned int x;
51    unsigned int y;
52
53    for (y = 0; y < output_height; ++y)
54      {
55         for (x = 0; x < w; x++)
56           {
57              evas_data[0] = gst_data[0];
58              evas_data[1] = gst_data[1];
59              evas_data[2] = gst_data[2];
60              evas_data[3] = 255;
61              gst_data += step;
62              evas_data += 4;
63           }
64      }
65 }
66
67 static void
68 _evas_video_bgr(unsigned char *evas_data, const unsigned char *gst_data, unsigned int w, unsigned int h, unsigned int output_height)
69 {
70    _evas_video_bgrx_step(evas_data, gst_data, w, h, output_height, 3);
71 }
72
73 static void
74 _evas_video_bgrx(unsigned char *evas_data, const unsigned char *gst_data, unsigned int w, unsigned int h, unsigned int output_height)
75 {
76    _evas_video_bgrx_step(evas_data, gst_data, w, h, output_height, 4);
77 }
78
79 static void
80 _evas_video_bgra(unsigned char *evas_data, const unsigned char *gst_data, unsigned int w, unsigned int h __UNUSED__, unsigned int output_height)
81 {
82    unsigned int x;
83    unsigned int y;
84
85    for (y = 0; y < output_height; ++y)
86      {
87         unsigned char alpha;
88
89         for (x = 0; x < w; ++x)
90           {
91              alpha = gst_data[3];
92              evas_data[0] = (gst_data[0] * alpha) / 255;
93              evas_data[1] = (gst_data[1] * alpha) / 255;
94              evas_data[2] = (gst_data[2] * alpha) / 255;
95              evas_data[3] = alpha;
96              gst_data += 4;
97              evas_data += 4;
98           }
99      }
100 }
101
102 static void
103 _evas_video_i420(unsigned char *evas_data, const unsigned char *gst_data, unsigned int w, unsigned int h __UNUSED__, unsigned int output_height)
104 {
105    const unsigned char **rows;
106    unsigned int i, j;
107    unsigned int rh;
108
109    rh = output_height;
110
111    rows = (const unsigned char **)evas_data;
112
113    for (i = 0; i < rh; i++)
114      rows[i] = &gst_data[i * w];
115
116    for (j = 0; j < (rh / 2); j++, i++)
117      rows[i] = &gst_data[h * w + j * (w / 2)];
118
119    for (j = 0; j < (rh / 2); j++, i++)
120      rows[i] = &gst_data[h * w + rh * (w / 4) + j * (w / 2)];
121 }
122
123 static void
124 _evas_video_yv12(unsigned char *evas_data, const unsigned char *gst_data, unsigned int w, unsigned int h __UNUSED__, unsigned int output_height)
125 {
126    const unsigned char **rows;
127    unsigned int i, j;
128    unsigned int rh;
129
130    rh = output_height;
131
132    rows = (const unsigned char **)evas_data;
133
134    for (i = 0; i < rh; i++)
135      rows[i] = &gst_data[i * w];
136
137    for (j = 0; j < (rh / 2); j++, i++)
138      rows[i] = &gst_data[h * w + rh * (w / 4) + j * (w / 2)];
139
140    for (j = 0; j < (rh / 2); j++, i++)
141      rows[i] = &gst_data[h * w + j * (w / 2)];
142 }
143
144 static void
145 _evas_video_yuy2(unsigned char *evas_data, const unsigned char *gst_data, unsigned int w, unsigned int h __UNUSED__, unsigned int output_height)
146 {
147    const unsigned char **rows;
148    unsigned int i;
149
150    rows = (const unsigned char **)evas_data;
151
152    for (i = 0; i < output_height; i++)
153      rows[i] = &gst_data[i * w * 2];
154 }
155
156 static void
157 _evas_video_nv12(unsigned char *evas_data, const unsigned char *gst_data, unsigned int w, unsigned int h __UNUSED__, unsigned int output_height)
158 {
159    const unsigned char **rows;
160    unsigned int i, j;
161    unsigned int rh;
162
163    rh = output_height;
164
165    rows = (const unsigned char **)evas_data;
166
167    for (i = 0; i < rh; i++)
168      rows[i] = &gst_data[i * w];
169
170    for (j = 0; j < (rh / 2); j++, i++)
171      rows[i] = &gst_data[rh * w + j * w];
172 }
173
174 static void
175 _evas_video_mt12(unsigned char *evas_data, const unsigned char *gst_data, unsigned int w, unsigned int h, unsigned int output_height __UNUSED__)
176 {
177    const unsigned char **rows;
178    unsigned int i;
179    unsigned int j;
180
181    rows = (const unsigned char **)evas_data;
182
183    for (i = 0; i < (h / 32) / 2; i++)
184      rows[i] = &gst_data[i * w * 2 * 32];
185
186    if ((h / 32) % 2)
187      {
188         rows[i] = &gst_data[i * w * 2 * 32];
189         i++;
190      }
191
192    for (j = 0; j < ((h / 2) / 32) / 2; ++j, ++i)
193      rows[i] = &gst_data[h * w + j * (w / 2) * 2 * 16];
194 }
195
196 static void
197 _evas_video_st12_multiplane(unsigned char *evas_data, const unsigned char *gst_data, unsigned int w, unsigned int h, unsigned int output_height __UNUSED__)
198 {
199    const GstMultiPlaneImageBuffer *mp_buf = (const GstMultiPlaneImageBuffer *) gst_data;
200    const unsigned char **rows;
201    unsigned int i;
202    unsigned int j;
203
204    rows = (const unsigned char **)evas_data;
205
206    for (i = 0; i < (h / 32) / 2; i++)
207      rows[i] = mp_buf->uaddr[0] + i * w * 2 * 32;
208    if ((h / 32) % 2)
209      {
210         rows[i] = mp_buf->uaddr[0] + i * w * 2 * 32;
211         i++;
212      }
213
214    for (j = 0; j < ((h / 2) / 16) / 2; j++, i++)
215      {
216        rows[i] = mp_buf->uaddr[1] + j * w * 2 * 16 * 2;
217      }
218    if (((h / 2) / 16) % 2)
219      rows[i] = mp_buf->uaddr[1] + j * w * 2 * 16 * 2;
220 }
221
222 static void
223 _evas_video_st12(unsigned char *evas_data, const unsigned char *gst_data, unsigned int w __UNUSED__, unsigned int h, unsigned int output_height __UNUSED__)
224 {
225    const SCMN_IMGB *imgb = (const SCMN_IMGB *) gst_data;
226    const unsigned char **rows;
227    unsigned int i, j;
228
229    rows = (const unsigned char **)evas_data;
230
231    for (i = 0; i < (h / 32) / 2; i++)
232      rows[i] = imgb->uaddr[0] + i * imgb->stride[0] * 2 * 32;
233    if ((h / 32) % 2)
234      {
235         rows[i] = imgb->uaddr[0] + i * imgb->stride[0] * 2 * 32;
236         i++;
237      }
238
239    for (j = 0; j < (unsigned int) imgb->elevation[1] / 32 / 2; j++, i++)
240      rows[i] = imgb->uaddr[1] + j * imgb->stride[1] * 32 * 2;
241    if ((imgb->elevation[1] / 32) % 2)
242      rows[i++] = imgb->uaddr[1] + j * imgb->stride[1] * 32 * 2;
243 }
244
245 static const struct {
246    const char *name;
247    guint32 fourcc;
248    Evas_Colorspace eformat;
249    Evas_Video_Convert_Cb func;
250    Eina_Bool force_height;
251 } colorspace_fourcc_convertion[] = {
252   { "I420", GST_MAKE_FOURCC('I', '4', '2', '0'), EVAS_COLORSPACE_YCBCR422P601_PL, _evas_video_i420, EINA_TRUE },
253   { "YV12", GST_MAKE_FOURCC('Y', 'V', '1', '2'), EVAS_COLORSPACE_YCBCR422P601_PL, _evas_video_yv12, EINA_TRUE },
254   { "YUY2", GST_MAKE_FOURCC('Y', 'U', 'Y', '2'), EVAS_COLORSPACE_YCBCR422601_PL, _evas_video_yuy2, EINA_FALSE },
255   { "NV12", GST_MAKE_FOURCC('N', 'V', '1', '2'), EVAS_COLORSPACE_YCBCR420NV12601_PL, _evas_video_nv12, EINA_TRUE },
256   { "TM12", GST_MAKE_FOURCC('T', 'M', '1', '2'), EVAS_COLORSPACE_YCBCR420TM12601_PL, _evas_video_mt12, EINA_TRUE }
257 };
258
259 static const struct {
260    const char *name;
261    GstVideoFormat format;
262    Evas_Colorspace eformat;
263    Evas_Video_Convert_Cb func;
264 } colorspace_format_convertion[] = {
265   { "BGR", GST_VIDEO_FORMAT_BGR, EVAS_COLORSPACE_ARGB8888, _evas_video_bgr },
266   { "BGRx", GST_VIDEO_FORMAT_BGRx, EVAS_COLORSPACE_ARGB8888, _evas_video_bgrx },
267   { "BGRA", GST_VIDEO_FORMAT_BGRA, EVAS_COLORSPACE_ARGB8888, _evas_video_bgra }
268 };
269
270 static void
271 evas_video_sink_base_init(gpointer g_class)
272 {
273    GstElementClass* element_class;
274
275    element_class = GST_ELEMENT_CLASS(g_class);
276    gst_element_class_add_pad_template(element_class, gst_static_pad_template_get(&sinktemplate));
277    gst_element_class_set_details_simple(element_class, "Evas video sink",
278                                         "Sink/Video", "Sends video data from a GStreamer pipeline to an Evas object",
279                                         "Vincent Torri <vtorri@univ-evry.fr>");
280 }
281
282 static void
283 evas_video_sink_init(EvasVideoSink* sink, EvasVideoSinkClass* klass __UNUSED__)
284 {
285    EvasVideoSinkPrivate* priv;
286
287    INF("sink init");
288    sink->priv = priv = G_TYPE_INSTANCE_GET_PRIVATE(sink, EVAS_TYPE_VIDEO_SINK, EvasVideoSinkPrivate);
289    priv->o = NULL;
290    priv->width = 0;
291    priv->height = 0;
292    priv->func = NULL;
293    priv->eformat = EVAS_COLORSPACE_ARGB8888;
294    priv->samsung = EINA_FALSE;
295    eina_lock_new(&priv->m);
296    eina_condition_new(&priv->c, &priv->m);
297    priv->unlocked = EINA_FALSE;
298 }
299
300 /**** Object methods ****/
301 static void
302 _cleanup_priv(void *data, Evas *e __UNUSED__, Evas_Object *obj, void *event_info __UNUSED__)
303 {
304    EvasVideoSinkPrivate* priv;
305
306    priv = data;
307
308    eina_lock_take(&priv->m);
309    if (priv->o == obj)
310      priv->o = NULL;
311    eina_lock_release(&priv->m);
312 }
313
314 static void
315 evas_video_sink_set_property(GObject * object, guint prop_id,
316                              const GValue * value, GParamSpec * pspec)
317 {
318    EvasVideoSink* sink;
319    EvasVideoSinkPrivate* priv;
320
321    sink = EVAS_VIDEO_SINK (object);
322    priv = sink->priv;
323
324    switch (prop_id) {
325     case PROP_EVAS_OBJECT:
326        eina_lock_take(&priv->m);
327        evas_object_event_callback_del(priv->o, EVAS_CALLBACK_FREE, _cleanup_priv);
328        priv->o = g_value_get_pointer (value);
329        INF("sink set Evas_Object %p.", priv->o);
330        evas_object_event_callback_add(priv->o, EVAS_CALLBACK_FREE, _cleanup_priv, priv);
331        eina_lock_release(&priv->m);
332        break;
333     case PROP_EV:
334        INF("sink set ev.");
335        eina_lock_take(&priv->m);
336        priv->ev = g_value_get_pointer (value);
337        if (priv->ev)
338          priv->ev->samsung = EINA_TRUE;
339        eina_lock_release(&priv->m);
340        break;
341     default:
342        G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
343        ERR("invalid property");
344        break;
345    }
346 }
347
348 static void
349 evas_video_sink_get_property(GObject * object, guint prop_id,
350                              GValue * value, GParamSpec * pspec)
351 {
352    EvasVideoSink* sink;
353    EvasVideoSinkPrivate* priv;
354
355    sink = EVAS_VIDEO_SINK (object);
356    priv = sink->priv;
357
358    switch (prop_id) {
359     case PROP_EVAS_OBJECT:
360        INF("sink get property.");
361        eina_lock_take(&priv->m);
362        g_value_set_pointer(value, priv->o);
363        eina_lock_release(&priv->m);
364        break;
365     case PROP_WIDTH:
366        INF("sink get width.");
367        eina_lock_take(&priv->m);
368        g_value_set_int(value, priv->width);
369        eina_lock_release(&priv->m);
370        break;
371     case PROP_HEIGHT:
372        INF("sink get height.");
373        eina_lock_take(&priv->m);
374        g_value_set_int (value, priv->height);
375        eina_lock_release(&priv->m);
376        break;
377     case PROP_EV:
378        INF("sink get ev.");
379        eina_lock_take(&priv->m);
380        g_value_set_pointer (value, priv->ev);
381        eina_lock_release(&priv->m);
382        break;
383     default:
384        G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
385        ERR("invalide property");
386        break;
387    }
388 }
389
390 static void
391 evas_video_sink_dispose(GObject* object)
392 {
393    EvasVideoSink* sink;
394    EvasVideoSinkPrivate* priv;
395
396    INF("dispose.");
397
398    sink = EVAS_VIDEO_SINK(object);
399    priv = sink->priv;
400
401    eina_lock_free(&priv->m);
402    eina_condition_free(&priv->c);
403
404    G_OBJECT_CLASS(parent_class)->dispose(object);
405 }
406
407
408 /**** BaseSink methods ****/
409
410 gboolean evas_video_sink_set_caps(GstBaseSink *bsink, GstCaps *caps)
411 {
412    EvasVideoSink* sink;
413    EvasVideoSinkPrivate* priv;
414    GstStructure *structure;
415    GstVideoFormat format;
416    guint32 fourcc;
417    unsigned int i;
418
419    sink = EVAS_VIDEO_SINK(bsink);
420    priv = sink->priv;
421
422    structure = gst_caps_get_structure(caps, 0);
423
424    if (gst_structure_get_int(structure, "width", (int*) &priv->width)
425        && gst_structure_get_int(structure, "height", (int*) &priv->height)
426        && gst_structure_get_fourcc(structure, "format", &fourcc))
427      {
428         priv->source_height = priv->height;
429
430         for (i = 0; i < sizeof (colorspace_fourcc_convertion) / sizeof (colorspace_fourcc_convertion[0]); ++i)
431           if (fourcc == colorspace_fourcc_convertion[i].fourcc)
432             {
433                fprintf(stderr, "Found '%s'\n", colorspace_fourcc_convertion[i].name);
434                priv->eformat = colorspace_fourcc_convertion[i].eformat;
435                priv->func = colorspace_fourcc_convertion[i].func;
436                if (colorspace_fourcc_convertion[i].force_height)
437                  {
438                     priv->height = (priv->height >> 1) << 1;
439                  }
440                if (priv->ev)
441                  priv->ev->kill_buffer = EINA_TRUE;
442                return TRUE;
443             }
444
445         if (fourcc == GST_MAKE_FOURCC('S', 'T', '1', '2'))
446           {
447              fprintf(stderr, "Found '%s'\n", "ST12");
448              priv->eformat = EVAS_COLORSPACE_YCBCR420TM12601_PL;
449              priv->samsung = EINA_TRUE;
450              priv->func = NULL;
451              if (priv->ev)
452                {
453                   priv->ev->samsung = EINA_TRUE;
454                   priv->ev->kill_buffer = EINA_TRUE;
455                }
456              return TRUE;
457           }
458      }
459
460    INF("fallback code !");
461    if (!gst_video_format_parse_caps(caps, &format, (int*) &priv->width, (int*) &priv->height))
462      {
463         ERR("Unable to parse caps.");
464         return FALSE;
465      }
466
467    priv->source_height = priv->height;
468
469    for (i = 0; i < sizeof (colorspace_format_convertion) / sizeof (colorspace_format_convertion[0]); ++i)
470      if (format == colorspace_format_convertion[i].format)
471        {
472           fprintf(stderr, "Found '%s'\n", colorspace_format_convertion[i].name);
473           priv->eformat = colorspace_format_convertion[i].eformat;
474           priv->func = colorspace_format_convertion[i].func;
475           if (priv->ev)
476             priv->ev->kill_buffer = EINA_FALSE;
477           return TRUE;
478        }
479
480    ERR("unsupported : %d\n", format);
481    return FALSE;
482 }
483
484 static gboolean
485 evas_video_sink_start(GstBaseSink* base_sink)
486 {
487    EvasVideoSinkPrivate* priv;
488    gboolean res = TRUE;
489
490    INF("sink start");
491
492    priv = EVAS_VIDEO_SINK(base_sink)->priv;
493    eina_lock_take(&priv->m);
494    if (!priv->o)
495      res = FALSE;
496    else
497      priv->unlocked = EINA_FALSE;
498    eina_lock_release(&priv->m);
499    return res;
500 }
501
502 static gboolean
503 evas_video_sink_stop(GstBaseSink* base_sink)
504 {
505    EvasVideoSinkPrivate* priv = EVAS_VIDEO_SINK(base_sink)->priv;
506
507    INF("sink stop");
508
509    unlock_buffer_mutex(priv);
510    return TRUE;
511 }
512
513 static gboolean
514 evas_video_sink_unlock(GstBaseSink* object)
515 {
516    EvasVideoSink* sink;
517
518    INF("sink unlock");
519
520    sink = EVAS_VIDEO_SINK(object);
521
522    unlock_buffer_mutex(sink->priv);
523
524    return GST_CALL_PARENT_WITH_DEFAULT(GST_BASE_SINK_CLASS, unlock,
525                                        (object), TRUE);
526 }
527
528 static gboolean
529 evas_video_sink_unlock_stop(GstBaseSink* object)
530 {
531    EvasVideoSink* sink;
532    EvasVideoSinkPrivate* priv;
533
534    sink = EVAS_VIDEO_SINK(object);
535    priv = sink->priv;
536
537    INF("sink unlock stop");
538
539    eina_lock_take(&priv->m);
540    priv->unlocked = FALSE;
541    eina_lock_release(&priv->m);
542
543    return GST_CALL_PARENT_WITH_DEFAULT(GST_BASE_SINK_CLASS, unlock_stop,
544                                        (object), TRUE);
545 }
546
547 static GstFlowReturn
548 evas_video_sink_preroll(GstBaseSink* bsink, GstBuffer* buffer)
549 {
550    Emotion_Gstreamer_Buffer *send;
551    EvasVideoSinkPrivate *priv;
552    EvasVideoSink *sink;
553
554    INF("sink preroll %p [%i]", GST_BUFFER_DATA(buffer), GST_BUFFER_SIZE(buffer));
555
556    sink = EVAS_VIDEO_SINK(bsink);
557    priv = sink->priv;
558
559    if (GST_BUFFER_SIZE(buffer) <= 0 && !priv->samsung)
560      {
561         WRN("empty buffer");
562         return GST_FLOW_OK;
563      }
564
565    send = emotion_gstreamer_buffer_alloc(priv, buffer, EINA_TRUE);
566
567    if (send)
568      {
569         if (priv->samsung)
570           {
571              if (!priv->func)
572                {
573                   GstStructure *structure;
574                   GstCaps *caps;
575                   gboolean is_multiplane = FALSE;
576
577                   caps = GST_BUFFER_CAPS(buffer);
578                   structure = gst_caps_get_structure (caps, 0);
579                   gst_structure_get_boolean(structure, "multiplane", &is_multiplane);
580                   gst_caps_unref(caps);
581
582                   if (is_multiplane)
583                     priv->func = _evas_video_st12_multiplane;
584                   else
585                     priv->func = _evas_video_st12;
586                }
587
588              ecore_main_loop_thread_safe_call_async(evas_video_sink_samsung_main_render, send);
589           }
590         else
591           ecore_main_loop_thread_safe_call_async(evas_video_sink_main_render, send);
592      }
593
594    return GST_FLOW_OK;
595 }
596
597 static GstFlowReturn
598 evas_video_sink_render(GstBaseSink* bsink, GstBuffer* buffer)
599 {
600    Emotion_Gstreamer_Buffer *send;
601    EvasVideoSinkPrivate *priv;
602    EvasVideoSink *sink;
603
604    INF("sink render %p [%i]", GST_BUFFER_DATA(buffer), GST_BUFFER_SIZE(buffer));
605
606    sink = EVAS_VIDEO_SINK(bsink);
607    priv = sink->priv;
608
609    eina_lock_take(&priv->m);
610
611    if (priv->unlocked) {
612       ERR("LOCKED");
613       eina_lock_release(&priv->m);
614       return GST_FLOW_OK;
615    }
616
617    send = emotion_gstreamer_buffer_alloc(priv, buffer, EINA_FALSE);
618    if (!send) {
619       eina_lock_release(&priv->m);
620       return GST_FLOW_ERROR;
621    }
622
623    if (priv->samsung)
624      {
625         if (!priv->func)
626           {
627              GstStructure *structure;
628              GstCaps *caps;
629              gboolean is_multiplane = FALSE;
630
631              caps = GST_BUFFER_CAPS(buffer);
632              structure = gst_caps_get_structure (caps, 0);
633              gst_structure_get_boolean(structure, "multiplane", &is_multiplane);
634              gst_caps_unref(caps);
635
636              if (is_multiplane)
637                priv->func = _evas_video_st12_multiplane;
638              else
639                priv->func = _evas_video_st12;
640           }
641
642         ecore_main_loop_thread_safe_call_async(evas_video_sink_samsung_main_render, send);
643      }
644    else
645      ecore_main_loop_thread_safe_call_async(evas_video_sink_main_render, send);
646
647    eina_condition_wait(&priv->c);
648    eina_lock_release(&priv->m);
649
650    return GST_FLOW_OK;
651 }
652
653 static void
654 evas_video_sink_samsung_main_render(void *data)
655 {
656    Emotion_Gstreamer_Buffer *send;
657    Emotion_Video_Stream *vstream;
658    EvasVideoSinkPrivate* priv;
659    GstBuffer* buffer;
660    unsigned char *evas_data;
661    const guint8 *gst_data;
662    GstFormat fmt = GST_FORMAT_TIME;
663    gint64 pos;
664    Eina_Bool preroll;
665    int stride, elevation;
666    Evas_Coord w, h;
667
668    send = data;
669
670    if (!send) goto exit_point;
671
672    priv = send->sink;
673    buffer = send->frame;
674    preroll = send->preroll;
675
676    if (!priv || !priv->o || priv->unlocked || !send->ev)
677      goto exit_point;
678
679    _emotion_gstreamer_video_pipeline_parse(send->ev, EINA_TRUE);
680
681    /* Getting stride to compute the right size and then fill the object properly */
682    /* Y => [0] and UV in [1] */
683    if (priv->func == _evas_video_st12_multiplane)
684      {
685         const GstMultiPlaneImageBuffer *mp_buf = (const GstMultiPlaneImageBuffer *) buffer;
686
687         stride = mp_buf->stride[0];
688         elevation = mp_buf->elevation[0];
689         priv->width = mp_buf->width[0];
690         priv->height = mp_buf->height[0];
691
692         gst_data = (const guint8 *) mp_buf;
693      }
694    else
695      {
696         const SCMN_IMGB *imgb = (const SCMN_IMGB *) GST_BUFFER_MALLOCDATA(buffer);
697
698         stride = imgb->stride[0];
699         elevation = imgb->elevation[0];
700         priv->width = imgb->width[0];
701         priv->height = imgb->height[0];
702
703         gst_data = (const guint8 *) imgb;
704      }
705
706    evas_object_geometry_get(priv->o, NULL, NULL, &w, &h);
707    INF("sink main render [%i, %i] - [%i, %i] => [%i, %i] - [%i, %i]",
708        priv->width, priv->height,
709        stride, elevation,
710        w, h,
711        stride * w / priv->width, elevation * h / priv->height);
712
713    send->ev->fill.width = stride * w / priv->width;
714    send->ev->fill.height = elevation * h / priv->height;
715
716    evas_object_image_alpha_set(priv->o, 0);
717    evas_object_image_colorspace_set(priv->o, priv->eformat);
718    evas_object_image_size_set(priv->o, stride, elevation);
719    evas_object_image_fill_set(priv->o, 0, 0, send->ev->fill.width, send->ev->fill.height);
720
721    evas_data = evas_object_image_data_get(priv->o, 1);
722
723    if (priv->func)
724      priv->func(evas_data, gst_data, stride, elevation, elevation);
725    else
726      WRN("No way to decode %x colorspace !", priv->eformat);
727
728    evas_object_image_data_set(priv->o, evas_data);
729    evas_object_image_data_update_add(priv->o, 0, 0, priv->width, priv->height);
730    evas_object_image_pixels_dirty_set(priv->o, 0);
731
732    _emotion_frame_new(send->ev->obj);
733
734    vstream = eina_list_nth(send->ev->video_streams, send->ev->video_stream_nbr - 1);
735
736    gst_element_query_position(send->ev->pipeline, &fmt, &pos);
737    send->ev->position = (double)pos / (double)GST_SECOND;
738
739    vstream->width = priv->width;
740    vstream->height = priv->height;
741    send->ev->ratio = (double) priv->width / (double) priv->height;
742
743    _emotion_video_pos_update(send->ev->obj, send->ev->position, vstream->length_time);
744    _emotion_frame_resize(send->ev->obj, priv->width, priv->height, send->ev->ratio);
745
746  exit_point:
747    emotion_gstreamer_buffer_free(send);
748
749    if (preroll || !priv->o || !send->ev) return ;
750
751    eina_lock_take(&priv->m);
752    if (!priv->unlocked)
753      eina_condition_signal(&priv->c);
754
755    eina_lock_release(&priv->m);
756 }
757
758 static void
759 evas_video_sink_main_render(void *data)
760 {
761    Emotion_Gstreamer_Buffer *send;
762    Emotion_Gstreamer_Video *ev = NULL;
763    Emotion_Video_Stream *vstream;
764    EvasVideoSinkPrivate* priv;
765    GstBuffer* buffer;
766    unsigned char *evas_data;
767    GstFormat fmt = GST_FORMAT_TIME;
768    gint64 pos;
769    Eina_Bool preroll;
770
771    send = data;
772
773    if (!send) goto exit_point;
774
775    priv = send->sink;
776    buffer = send->frame;
777    preroll = send->preroll;
778    ev = send->ev;
779
780    if (!priv || !priv->o || priv->unlocked || !ev)
781      goto exit_point;
782
783    _emotion_gstreamer_video_pipeline_parse(ev, EINA_TRUE);
784
785    INF("sink main render [%i, %i] (source height: %i)", priv->width, priv->height, priv->source_height);
786
787    evas_object_image_alpha_set(priv->o, 0);
788    evas_object_image_colorspace_set(priv->o, priv->eformat);
789    evas_object_image_size_set(priv->o, priv->width, priv->height);
790
791    evas_data = evas_object_image_data_get(priv->o, 1);
792
793    if (priv->func)
794      priv->func(evas_data, GST_BUFFER_DATA(buffer), priv->width, priv->source_height, priv->height);
795    else
796      WRN("No way to decode %x colorspace !", priv->eformat);
797
798    evas_object_image_data_set(priv->o, evas_data);
799    evas_object_image_data_update_add(priv->o, 0, 0, priv->width, priv->height);
800    evas_object_image_pixels_dirty_set(priv->o, 0);
801
802    _emotion_frame_new(ev->obj);
803
804    vstream = eina_list_nth(ev->video_streams, ev->video_stream_nbr - 1);
805
806    gst_element_query_position(ev->pipeline, &fmt, &pos);
807    ev->position = (double)pos / (double)GST_SECOND;
808
809    vstream->width = priv->width;
810    vstream->height = priv->height;
811    ev->ratio = (double) priv->width / (double) priv->height;
812
813    _emotion_video_pos_update(ev->obj, ev->position, vstream->length_time);
814    _emotion_frame_resize(ev->obj, priv->width, priv->height, ev->ratio);
815
816    if (ev->last_buffer) gst_buffer_unref(ev->last_buffer);
817    ev->last_buffer = gst_buffer_ref(buffer);
818
819  exit_point:
820    emotion_gstreamer_buffer_free(send);
821
822    if (preroll || !priv->o || !ev) return ;
823
824    eina_lock_take(&priv->m);
825    if (!priv->unlocked)
826      eina_condition_signal(&priv->c);
827
828    eina_lock_release(&priv->m);
829 }
830
831 static void
832 unlock_buffer_mutex(EvasVideoSinkPrivate* priv)
833 {
834    eina_lock_take(&priv->m);
835    priv->unlocked = EINA_TRUE;
836
837    eina_condition_signal(&priv->c);
838    eina_lock_release(&priv->m);
839 }
840
841 static void
842 marshal_VOID__MINIOBJECT(GClosure * closure, GValue * return_value __UNUSED__,
843                          guint n_param_values, const GValue * param_values,
844                          gpointer invocation_hint __UNUSED__, gpointer marshal_data)
845 {
846    typedef void (*marshalfunc_VOID__MINIOBJECT) (gpointer obj, gpointer arg1, gpointer data2);
847    marshalfunc_VOID__MINIOBJECT callback;
848    GCClosure *cc;
849    gpointer data1, data2;
850
851    cc = (GCClosure *) closure;
852
853    g_return_if_fail(n_param_values == 2);
854
855    if (G_CCLOSURE_SWAP_DATA(closure)) {
856       data1 = closure->data;
857       data2 = g_value_peek_pointer(param_values + 0);
858    } else {
859       data1 = g_value_peek_pointer(param_values + 0);
860       data2 = closure->data;
861    }
862    callback = (marshalfunc_VOID__MINIOBJECT) (marshal_data ? marshal_data : cc->callback);
863
864    callback(data1, gst_value_get_mini_object(param_values + 1), data2);
865 }
866
867 static void
868 evas_video_sink_class_init(EvasVideoSinkClass* klass)
869 {
870    GObjectClass* gobject_class;
871    GstBaseSinkClass* gstbase_sink_class;
872
873    gobject_class = G_OBJECT_CLASS(klass);
874    gstbase_sink_class = GST_BASE_SINK_CLASS(klass);
875
876    g_type_class_add_private(klass, sizeof(EvasVideoSinkPrivate));
877
878    gobject_class->set_property = evas_video_sink_set_property;
879    gobject_class->get_property = evas_video_sink_get_property;
880
881    g_object_class_install_property (gobject_class, PROP_EVAS_OBJECT,
882                                     g_param_spec_pointer ("evas-object", "Evas Object",
883                                                           "The Evas object where the display of the video will be done",
884                                                           G_PARAM_READWRITE));
885
886    g_object_class_install_property (gobject_class, PROP_WIDTH,
887                                     g_param_spec_int ("width", "Width",
888                                                       "The width of the video",
889                                                       0, 65536, 0, G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
890
891    g_object_class_install_property (gobject_class, PROP_HEIGHT,
892                                     g_param_spec_int ("height", "Height",
893                                                       "The height of the video",
894                                                       0, 65536, 0, G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
895    g_object_class_install_property (gobject_class, PROP_EV,
896                                     g_param_spec_pointer ("ev", "Emotion_Gstreamer_Video",
897                                                           "THe internal data of the emotion object",
898                                                           G_PARAM_READWRITE));
899
900    gobject_class->dispose = evas_video_sink_dispose;
901
902    gstbase_sink_class->set_caps = evas_video_sink_set_caps;
903    gstbase_sink_class->stop = evas_video_sink_stop;
904    gstbase_sink_class->start = evas_video_sink_start;
905    gstbase_sink_class->unlock = evas_video_sink_unlock;
906    gstbase_sink_class->unlock_stop = evas_video_sink_unlock_stop;
907    gstbase_sink_class->render = evas_video_sink_render;
908    gstbase_sink_class->preroll = evas_video_sink_preroll;
909
910    evas_video_sink_signals[REPAINT_REQUESTED] = g_signal_new("repaint-requested",
911                                                              G_TYPE_FROM_CLASS(klass),
912                                                              (GSignalFlags)(G_SIGNAL_RUN_LAST | G_SIGNAL_ACTION),
913                                                              0,
914                                                              0,
915                                                              0,
916                                                              marshal_VOID__MINIOBJECT,
917                                                              G_TYPE_NONE, 1, GST_TYPE_BUFFER);
918 }
919
920 gboolean
921 gstreamer_plugin_init (GstPlugin * plugin)
922 {
923    return gst_element_register (plugin,
924                                 "emotion-sink",
925                                 GST_RANK_NONE,
926                                 EVAS_TYPE_VIDEO_SINK);
927 }
928
929 static void
930 _emotion_gstreamer_pause(void *data, Ecore_Thread *thread)
931 {
932    Emotion_Gstreamer_Video *ev = data;
933
934    if (ecore_thread_check(thread) || !ev->pipeline) return ;
935
936    gst_element_set_state(ev->pipeline, GST_STATE_PAUSED);
937 }
938
939 static void
940 _emotion_gstreamer_cancel(void *data, Ecore_Thread *thread)
941 {
942    Emotion_Gstreamer_Video *ev = data;
943
944    ev->threads = eina_list_remove(ev->threads, thread);
945
946    if (getenv("EMOTION_GSTREAMER_DOT")) GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS(GST_BIN(ev->pipeline), GST_DEBUG_GRAPH_SHOW_ALL, getenv("EMOTION_GSTREAMER_DOT"));
947
948    if (ev->in == ev->out && ev->threads == NULL && ev->delete_me)
949      em_shutdown(ev);
950 }
951
952 static void
953 _emotion_gstreamer_end(void *data, Ecore_Thread *thread)
954 {
955    Emotion_Gstreamer_Video *ev = data;
956
957    ev->threads = eina_list_remove(ev->threads, thread);
958
959    if (ev->play)
960      {
961         gst_element_set_state(ev->pipeline, GST_STATE_PLAYING);
962         ev->play_started = 1;
963      }
964
965    if (getenv("EMOTION_GSTREAMER_DOT")) GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS(GST_BIN(ev->pipeline), GST_DEBUG_GRAPH_SHOW_ALL, getenv("EMOTION_GSTREAMER_DOT"));
966
967    if (ev->in == ev->out && ev->threads == NULL && ev->delete_me)
968      em_shutdown(ev);
969    else
970      _emotion_gstreamer_video_pipeline_parse(data, EINA_TRUE);
971 }
972
973 static void
974 _on_post_clear(void *data, Evas *e __UNUSED__, void *event_info __UNUSED__)
975 {
976    Emotion_Gstreamer_Video *ev = data;
977
978    if (!ev->kill_buffer) return ;
979
980    if (ev->last_buffer) gst_buffer_unref(ev->last_buffer);
981    ev->last_buffer = NULL;
982 }
983
984 static void
985 _on_resize_fill(void *data, Evas *e __UNUSED__, Evas_Object *obj, void *event_info __UNUSED__)
986 {
987    Emotion_Gstreamer_Video *ev = data;
988
989    if (ev->samsung)
990      evas_object_image_fill_set(obj, 0, 0, ev->fill.width, ev->fill.height);
991 }
992
993 GstElement *
994 gstreamer_video_sink_new(Emotion_Gstreamer_Video *ev,
995                          Evas_Object *o,
996                          const char *uri)
997 {
998    GstElement *playbin;
999    GstElement *sink;
1000    Evas_Object *obj;
1001    int flags;
1002
1003    obj = emotion_object_image_get(o);
1004    if (!obj)
1005      {
1006         ERR("Not Evas_Object specified");
1007         return NULL;
1008      }
1009
1010    evas_object_event_callback_del_full(obj, EVAS_CALLBACK_RESIZE, _on_resize_fill, ev);
1011    evas_event_callback_del_full(evas_object_evas_get(obj), EVAS_CALLBACK_RENDER_FLUSH_POST, _on_post_clear, ev);
1012
1013    playbin = gst_element_factory_make("playbin2", "playbin");
1014    if (!playbin)
1015      {
1016         ERR("Unable to create 'playbin' GstElement.");
1017         return NULL;
1018      }
1019
1020    sink = gst_element_factory_make("emotion-sink", "sink");
1021    if (!sink)
1022      {
1023         ERR("Unable to create 'emotion-sink' GstElement.");
1024         goto unref_pipeline;
1025      }
1026
1027 #define GST_PLAY_FLAG_NATIVE_VIDEO  (1 << 6)
1028 #define GST_PLAY_FLAG_DOWNLOAD      (1 << 7)
1029
1030    g_object_set(G_OBJECT(sink), "evas-object", obj, NULL);
1031    g_object_set(G_OBJECT(sink), "ev", ev, NULL);
1032
1033    evas_object_image_pixels_get_callback_set(obj, NULL, NULL);
1034
1035    g_object_get(G_OBJECT(playbin), "flags", &flags, NULL);
1036    g_object_set(G_OBJECT(playbin), "flags", flags | GST_PLAY_FLAG_NATIVE_VIDEO | GST_PLAY_FLAG_DOWNLOAD, NULL);
1037    g_object_set(G_OBJECT(playbin), "video-sink", sink, NULL);
1038    g_object_set(G_OBJECT(playbin), "uri", uri, NULL);
1039
1040    evas_object_image_pixels_get_callback_set(obj, NULL, NULL);
1041    evas_object_event_callback_add(obj, EVAS_CALLBACK_RESIZE, _on_resize_fill, ev);
1042    evas_event_callback_add(evas_object_evas_get(obj), EVAS_CALLBACK_RENDER_FLUSH_POST, _on_post_clear, ev);
1043
1044    ev->pipeline = playbin;
1045    ev->sink = sink;
1046    ev->threads = eina_list_append(ev->threads,
1047                                   ecore_thread_run(_emotion_gstreamer_pause,
1048                                                    _emotion_gstreamer_end,
1049                                                    _emotion_gstreamer_cancel,
1050                                                    ev));
1051
1052    /** NOTE: you need to set: GST_DEBUG_DUMP_DOT_DIR=/tmp EMOTION_ENGINE=gstreamer to save the $EMOTION_GSTREAMER_DOT file in '/tmp' */
1053    /** then call dot -Tpng -oemotion_pipeline.png /tmp/$TIMESTAMP-$EMOTION_GSTREAMER_DOT.dot */
1054    if (getenv("EMOTION_GSTREAMER_DOT")) GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS(GST_BIN(playbin), GST_DEBUG_GRAPH_SHOW_ALL, getenv("EMOTION_GSTREAMER_DOT"));
1055
1056    return playbin;
1057
1058  unref_pipeline:
1059    gst_object_unref(playbin);
1060    return NULL;
1061 }