Add Tizen 2.0 packaging
[profile/ivi/emotion.git] / src / modules / gstreamer / emotion_sink.c
1 #ifdef HAVE_CONFIG_H
2 # include "config.h"
3 #endif
4
5 #include <Eina.h>
6 #include <Evas.h>
7 #include <Ecore.h>
8
9 #define HTTP_STREAM 0
10 #define RTSP_STREAM 1
11 #include <glib.h>
12 #include <gst/gst.h>
13 #include <glib-object.h>
14 #include <gst/video/gstvideosink.h>
15 #include <gst/video/video.h>
16
17 #ifdef HAVE_ECORE_X
18 # include <Ecore_X.h>
19 # include <Ecore_Evas.h>
20 # ifdef HAVE_XOVERLAY_H
21 #  include <gst/interfaces/xoverlay.h>
22 # endif
23 #endif
24
25 #include "Emotion.h"
26 #include "emotion_private.h"
27 #include "emotion_gstreamer.h"
28
29 static GstStaticPadTemplate sinktemplate = GST_STATIC_PAD_TEMPLATE("sink",
30                                                                    GST_PAD_SINK, GST_PAD_ALWAYS,
31                                                                    GST_STATIC_CAPS(GST_VIDEO_CAPS_YUV("{ I420, YV12, YUY2, NV12, ST12, TM12 }") ";"
32                                                                                    GST_VIDEO_CAPS_BGRx ";" GST_VIDEO_CAPS_BGR ";" GST_VIDEO_CAPS_BGRA));
33
34 GST_DEBUG_CATEGORY_STATIC(evas_video_sink_debug);
35 #define GST_CAT_DEFAULT evas_video_sink_debug
36
37 enum {
38   REPAINT_REQUESTED,
39   LAST_SIGNAL
40 };
41
42 enum {
43   PROP_0,
44   PROP_EVAS_OBJECT,
45   PROP_WIDTH,
46   PROP_HEIGHT,
47   PROP_EV,
48   PROP_LAST
49 };
50
51 static guint evas_video_sink_signals[LAST_SIGNAL] = { 0, };
52
53 #define _do_init(bla)                                   \
54   GST_DEBUG_CATEGORY_INIT(evas_video_sink_debug,        \
55                           "emotion-sink",               \
56                           0,                            \
57                           "emotion video sink")
58
59 GST_BOILERPLATE_FULL(EvasVideoSink,
60                      evas_video_sink,
61                      GstVideoSink,
62                      GST_TYPE_VIDEO_SINK,
63                      _do_init);
64
65
66 static void unlock_buffer_mutex(EvasVideoSinkPrivate* priv);
67 static void evas_video_sink_main_render(void *data);
68 static void evas_video_sink_samsung_main_render(void *data);
69
70 static void
71 evas_video_sink_base_init(gpointer g_class)
72 {
73    GstElementClass* element_class;
74
75    element_class = GST_ELEMENT_CLASS(g_class);
76    gst_element_class_add_pad_template(element_class, gst_static_pad_template_get(&sinktemplate));
77    gst_element_class_set_details_simple(element_class, "Evas video sink",
78                                         "Sink/Video", "Sends video data from a GStreamer pipeline to an Evas object",
79                                         "Vincent Torri <vtorri@univ-evry.fr>");
80 }
81
82 static void
83 evas_video_sink_init(EvasVideoSink* sink, EvasVideoSinkClass* klass __UNUSED__)
84 {
85    EvasVideoSinkPrivate* priv;
86
87    INF("sink init");
88    sink->priv = priv = G_TYPE_INSTANCE_GET_PRIVATE(sink, EVAS_TYPE_VIDEO_SINK, EvasVideoSinkPrivate);
89    priv->o = NULL;
90    priv->width = 0;
91    priv->height = 0;
92    priv->func = NULL;
93    priv->eformat = EVAS_COLORSPACE_ARGB8888;
94    priv->samsung = EINA_FALSE;
95    eina_lock_new(&priv->m);
96    eina_condition_new(&priv->c, &priv->m);
97    priv->unlocked = EINA_FALSE;
98 }
99
100 /**** Object methods ****/
101 static void
102 _cleanup_priv(void *data, Evas *e __UNUSED__, Evas_Object *obj, void *event_info __UNUSED__)
103 {
104    EvasVideoSinkPrivate* priv;
105
106    priv = data;
107
108    eina_lock_take(&priv->m);
109    if (priv->o == obj)
110      priv->o = NULL;
111    eina_lock_release(&priv->m);
112 }
113
114 static void
115 evas_video_sink_set_property(GObject * object, guint prop_id,
116                              const GValue * value, GParamSpec * pspec)
117 {
118    EvasVideoSink* sink;
119    EvasVideoSinkPrivate* priv;
120
121    sink = EVAS_VIDEO_SINK (object);
122    priv = sink->priv;
123
124    switch (prop_id) {
125     case PROP_EVAS_OBJECT:
126        eina_lock_take(&priv->m);
127        evas_object_event_callback_del(priv->o, EVAS_CALLBACK_FREE, _cleanup_priv);
128        priv->o = g_value_get_pointer (value);
129        INF("sink set Evas_Object %p.", priv->o);
130        evas_object_event_callback_add(priv->o, EVAS_CALLBACK_FREE, _cleanup_priv, priv);
131        eina_lock_release(&priv->m);
132        break;
133     case PROP_EV:
134        INF("sink set ev.");
135        eina_lock_take(&priv->m);
136        priv->ev = g_value_get_pointer (value);
137        if (priv->ev)
138          priv->ev->samsung = EINA_TRUE;
139        eina_lock_release(&priv->m);
140        break;
141     default:
142        G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
143        ERR("invalid property");
144        break;
145    }
146 }
147
148 static void
149 evas_video_sink_get_property(GObject * object, guint prop_id,
150                              GValue * value, GParamSpec * pspec)
151 {
152    EvasVideoSink* sink;
153    EvasVideoSinkPrivate* priv;
154
155    sink = EVAS_VIDEO_SINK (object);
156    priv = sink->priv;
157
158    switch (prop_id) {
159     case PROP_EVAS_OBJECT:
160        INF("sink get property.");
161        eina_lock_take(&priv->m);
162        g_value_set_pointer(value, priv->o);
163        eina_lock_release(&priv->m);
164        break;
165     case PROP_WIDTH:
166        INF("sink get width.");
167        eina_lock_take(&priv->m);
168        g_value_set_int(value, priv->width);
169        eina_lock_release(&priv->m);
170        break;
171     case PROP_HEIGHT:
172        INF("sink get height.");
173        eina_lock_take(&priv->m);
174        g_value_set_int (value, priv->height);
175        eina_lock_release(&priv->m);
176        break;
177     case PROP_EV:
178        INF("sink get ev.");
179        eina_lock_take(&priv->m);
180        g_value_set_pointer (value, priv->ev);
181        eina_lock_release(&priv->m);
182        break;
183     default:
184        G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
185        ERR("invalide property");
186        break;
187    }
188 }
189
190 static void
191 evas_video_sink_dispose(GObject* object)
192 {
193    EvasVideoSink* sink;
194    EvasVideoSinkPrivate* priv;
195
196    INF("dispose.");
197
198    sink = EVAS_VIDEO_SINK(object);
199    priv = sink->priv;
200
201    eina_lock_free(&priv->m);
202    eina_condition_free(&priv->c);
203
204    G_OBJECT_CLASS(parent_class)->dispose(object);
205 }
206
207
208 /**** BaseSink methods ****/
209
210 gboolean evas_video_sink_set_caps(GstBaseSink *bsink, GstCaps *caps)
211 {
212    EvasVideoSink* sink;
213    EvasVideoSinkPrivate* priv;
214    GstStructure *structure;
215    GstVideoFormat format;
216    guint32 fourcc;
217    unsigned int i;
218
219    sink = EVAS_VIDEO_SINK(bsink);
220    priv = sink->priv;
221
222    structure = gst_caps_get_structure(caps, 0);
223
224    if (gst_structure_get_int(structure, "width", (int*) &priv->width)
225        && gst_structure_get_int(structure, "height", (int*) &priv->height)
226        && gst_structure_get_fourcc(structure, "format", &fourcc))
227      {
228         priv->source_height = priv->height;
229
230         for (i = 0; colorspace_fourcc_convertion[i].name != NULL; ++i)
231           if (fourcc == colorspace_fourcc_convertion[i].fourcc)
232             {
233                fprintf(stderr, "Found '%s'\n", colorspace_fourcc_convertion[i].name);
234                priv->eformat = colorspace_fourcc_convertion[i].eformat;
235                priv->func = colorspace_fourcc_convertion[i].func;
236                if (colorspace_fourcc_convertion[i].force_height)
237                  {
238                     priv->height = (priv->height >> 1) << 1;
239                  }
240                if (priv->ev)
241                  priv->ev->kill_buffer = EINA_TRUE;
242                return TRUE;
243             }
244
245         if (fourcc == GST_MAKE_FOURCC('S', 'T', '1', '2'))
246           {
247              fprintf(stderr, "Found '%s'\n", "ST12");
248              priv->eformat = EVAS_COLORSPACE_YCBCR420TM12601_PL;
249              priv->samsung = EINA_TRUE;
250              priv->func = NULL;
251              if (priv->ev)
252                {
253                   priv->ev->samsung = EINA_TRUE;
254                   priv->ev->kill_buffer = EINA_TRUE;
255                }
256              return TRUE;
257           }
258      }
259
260    INF("fallback code !");
261    if (!gst_video_format_parse_caps(caps, &format, (int*) &priv->width, (int*) &priv->height))
262      {
263         ERR("Unable to parse caps.");
264         return FALSE;
265      }
266
267    priv->source_height = priv->height;
268
269    for (i = 0; colorspace_format_convertion[i].name != NULL; ++i)
270      if (format == colorspace_format_convertion[i].format)
271        {
272           fprintf(stderr, "Found '%s'\n", colorspace_format_convertion[i].name);
273           priv->eformat = colorspace_format_convertion[i].eformat;
274           priv->func = colorspace_format_convertion[i].func;
275           if (priv->ev)
276             priv->ev->kill_buffer = EINA_FALSE;
277           return TRUE;
278        }
279
280    ERR("unsupported : %d\n", format);
281    return FALSE;
282 }
283
284 static gboolean
285 evas_video_sink_start(GstBaseSink* base_sink)
286 {
287    EvasVideoSinkPrivate* priv;
288    gboolean res = TRUE;
289
290    INF("sink start");
291
292    priv = EVAS_VIDEO_SINK(base_sink)->priv;
293    eina_lock_take(&priv->m);
294    if (!priv->o)
295      res = FALSE;
296    else
297      priv->unlocked = EINA_FALSE;
298    eina_lock_release(&priv->m);
299    return res;
300 }
301
302 static gboolean
303 evas_video_sink_stop(GstBaseSink* base_sink)
304 {
305    EvasVideoSinkPrivate* priv = EVAS_VIDEO_SINK(base_sink)->priv;
306
307    INF("sink stop");
308
309    unlock_buffer_mutex(priv);
310    return TRUE;
311 }
312
313 static gboolean
314 evas_video_sink_unlock(GstBaseSink* object)
315 {
316    EvasVideoSink* sink;
317
318    INF("sink unlock");
319
320    sink = EVAS_VIDEO_SINK(object);
321
322    unlock_buffer_mutex(sink->priv);
323
324    return GST_CALL_PARENT_WITH_DEFAULT(GST_BASE_SINK_CLASS, unlock,
325                                        (object), TRUE);
326 }
327
328 static gboolean
329 evas_video_sink_unlock_stop(GstBaseSink* object)
330 {
331    EvasVideoSink* sink;
332    EvasVideoSinkPrivate* priv;
333
334    sink = EVAS_VIDEO_SINK(object);
335    priv = sink->priv;
336
337    INF("sink unlock stop");
338
339    eina_lock_take(&priv->m);
340    priv->unlocked = FALSE;
341    eina_lock_release(&priv->m);
342
343    return GST_CALL_PARENT_WITH_DEFAULT(GST_BASE_SINK_CLASS, unlock_stop,
344                                        (object), TRUE);
345 }
346
347 static GstFlowReturn
348 evas_video_sink_preroll(GstBaseSink* bsink, GstBuffer* buffer)
349 {
350    Emotion_Gstreamer_Buffer *send;
351    EvasVideoSinkPrivate *priv;
352    EvasVideoSink *sink;
353
354    INF("sink preroll %p [%i]", GST_BUFFER_DATA(buffer), GST_BUFFER_SIZE(buffer));
355
356    sink = EVAS_VIDEO_SINK(bsink);
357    priv = sink->priv;
358
359    if (GST_BUFFER_SIZE(buffer) <= 0 && !priv->samsung)
360      {
361         WRN("empty buffer");
362         return GST_FLOW_OK;
363      }
364
365    send = emotion_gstreamer_buffer_alloc(priv, buffer, EINA_TRUE);
366
367    if (send)
368      {
369         if (priv->samsung)
370           {
371              if (!priv->func)
372                {
373                   GstStructure *structure;
374                   GstCaps *caps;
375                   gboolean is_multiplane = FALSE;
376
377                   caps = GST_BUFFER_CAPS(buffer);
378                   structure = gst_caps_get_structure (caps, 0);
379                   gst_structure_get_boolean(structure, "multiplane", &is_multiplane);
380                   gst_caps_unref(caps);
381
382                   if (is_multiplane)
383                     priv->func = _evas_video_st12_multiplane;
384                   else
385                     priv->func = _evas_video_st12;
386                }
387
388              ecore_main_loop_thread_safe_call_async(evas_video_sink_samsung_main_render, send);
389           }
390         else
391           ecore_main_loop_thread_safe_call_async(evas_video_sink_main_render, send);
392      }
393
394    return GST_FLOW_OK;
395 }
396
397 static GstFlowReturn
398 evas_video_sink_render(GstBaseSink* bsink, GstBuffer* buffer)
399 {
400    Emotion_Gstreamer_Buffer *send;
401    EvasVideoSinkPrivate *priv;
402    EvasVideoSink *sink;
403
404    INF("sink render %p", buffer);
405
406    sink = EVAS_VIDEO_SINK(bsink);
407    priv = sink->priv;
408
409    eina_lock_take(&priv->m);
410
411    if (priv->unlocked) {
412       ERR("LOCKED");
413       eina_lock_release(&priv->m);
414       return GST_FLOW_OK;
415    }
416
417    send = emotion_gstreamer_buffer_alloc(priv, buffer, EINA_FALSE);
418    if (!send) {
419       eina_lock_release(&priv->m);
420       return GST_FLOW_ERROR;
421    }
422
423    if (priv->samsung)
424      {
425         if (!priv->func)
426           {
427              GstStructure *structure;
428              GstCaps *caps;
429              gboolean is_multiplane = FALSE;
430
431              caps = GST_BUFFER_CAPS(buffer);
432              structure = gst_caps_get_structure (caps, 0);
433              gst_structure_get_boolean(structure, "multiplane", &is_multiplane);
434              gst_caps_unref(caps);
435
436              if (is_multiplane)
437                priv->func = _evas_video_st12_multiplane;
438              else
439                priv->func = _evas_video_st12;
440           }
441
442         ecore_main_loop_thread_safe_call_async(evas_video_sink_samsung_main_render, send);
443      }
444    else
445      ecore_main_loop_thread_safe_call_async(evas_video_sink_main_render, send);
446
447    eina_condition_wait(&priv->c);
448    eina_lock_release(&priv->m);
449
450    return GST_FLOW_OK;
451 }
452
453 static void
454 _update_emotion_fps(Emotion_Gstreamer_Video *ev)
455 {
456    double tim;
457
458    if (!debug_fps) return ;
459
460    tim = ecore_time_get();
461    ev->frames++;
462
463    if (ev->rlapse == 0.0)
464      {
465         ev->rlapse = tim;
466         ev->flapse = ev->frames;
467      }
468    else if ((tim - ev->rlapse) >= 0.5)
469      {
470         printf("FRAME: %i, FPS: %3.1f\n",
471                ev->frames,
472                (ev->frames - ev->flapse) / (tim - ev->rlapse));
473         ev->rlapse = tim;
474         ev->flapse = ev->frames;
475      }
476 }
477
478 static void
479 evas_video_sink_samsung_main_render(void *data)
480 {
481    Emotion_Gstreamer_Buffer *send;
482    Emotion_Video_Stream *vstream;
483    EvasVideoSinkPrivate *priv = NULL;
484    GstBuffer* buffer;
485    unsigned char *evas_data;
486    const guint8 *gst_data;
487    GstFormat fmt = GST_FORMAT_TIME;
488    gint64 pos;
489    Eina_Bool preroll = EINA_FALSE;
490    int stride, elevation;
491    Evas_Coord w, h;
492
493    send = data;
494
495    if (!send) goto exit_point;
496
497    priv = send->sink;
498    buffer = send->frame;
499    preroll = send->preroll;
500
501    /* frame after cleanup */
502    if (!preroll && !send->ev->last_buffer)
503      {
504         priv = NULL;
505         goto exit_point;
506      }
507
508    if (!priv || !priv->o || priv->unlocked)
509      goto exit_point;
510
511    if (send->ev->send)
512      {
513         emotion_gstreamer_buffer_free(send->ev->send);
514         send->ev->send = NULL;
515      }
516
517    if (!send->ev->stream && !send->force)
518      {
519         send->ev->send = send;
520         _emotion_frame_new(send->ev->obj);
521         goto exit_stream;
522      }
523
524    _emotion_gstreamer_video_pipeline_parse(send->ev, EINA_TRUE);
525
526    /* Getting stride to compute the right size and then fill the object properly */
527    /* Y => [0] and UV in [1] */
528    if (priv->func == _evas_video_st12_multiplane)
529      {
530         const GstMultiPlaneImageBuffer *mp_buf = (const GstMultiPlaneImageBuffer *) buffer;
531
532         stride = mp_buf->stride[0];
533         elevation = mp_buf->elevation[0];
534         priv->width = mp_buf->width[0];
535         priv->height = mp_buf->height[0];
536
537         gst_data = (const guint8 *) mp_buf;
538      }
539    else
540      {
541         const SCMN_IMGB *imgb = (const SCMN_IMGB *) GST_BUFFER_MALLOCDATA(buffer);
542
543         stride = imgb->stride[0];
544         elevation = imgb->elevation[0];
545         priv->width = imgb->width[0];
546         priv->height = imgb->height[0];
547
548         gst_data = (const guint8 *) imgb;
549      }
550
551    evas_object_geometry_get(priv->o, NULL, NULL, &w, &h);
552
553    send->ev->fill.width = (double) stride / priv->width;
554    send->ev->fill.height = (double) elevation / priv->height;
555
556    evas_object_image_alpha_set(priv->o, 0);
557    evas_object_image_colorspace_set(priv->o, priv->eformat);
558    evas_object_image_size_set(priv->o, stride, elevation);
559
560    _update_emotion_fps(send->ev);
561
562    evas_data = evas_object_image_data_get(priv->o, 1);
563
564    if (priv->func)
565      priv->func(evas_data, gst_data, stride, elevation, elevation);
566    else
567      WRN("No way to decode %x colorspace !", priv->eformat);
568
569    evas_object_image_data_set(priv->o, evas_data);
570    evas_object_image_data_update_add(priv->o, 0, 0, priv->width, priv->height);
571    evas_object_image_pixels_dirty_set(priv->o, 0);
572
573    if (!preroll && send->ev->play_started)
574      {
575         _emotion_playback_started(send->ev->obj);
576         send->ev->play_started = 0;
577      }
578
579    if (!send->force)
580      {
581         _emotion_frame_new(send->ev->obj);
582      }
583
584    vstream = eina_list_nth(send->ev->video_streams, send->ev->video_stream_nbr - 1);
585
586    gst_element_query_position(send->ev->pipeline, &fmt, &pos);
587    send->ev->position = (double)pos / (double)GST_SECOND;
588
589    if (vstream)
590      {
591         vstream->width = priv->width;
592         vstream->height = priv->height;
593
594         _emotion_video_pos_update(send->ev->obj, send->ev->position, vstream->length_time);
595      }
596
597    send->ev->ratio = (double) priv->width / (double) priv->height;
598    _emotion_frame_refill(send->ev->obj, send->ev->fill.width, send->ev->fill.height);
599    _emotion_frame_resize(send->ev->obj, priv->width, priv->height, send->ev->ratio);
600
601    buffer = gst_buffer_ref(buffer);
602    if (send->ev->last_buffer) gst_buffer_unref(send->ev->last_buffer);
603    send->ev->last_buffer = buffer;
604
605  exit_point:
606    emotion_gstreamer_buffer_free(send);
607
608  exit_stream:
609    if (priv)
610      {
611         if (preroll || !priv->o) return;
612         
613         if (!priv->unlocked)
614           eina_condition_signal(&priv->c);
615      }
616 }
617
618 static void
619 evas_video_sink_main_render(void *data)
620 {
621    Emotion_Gstreamer_Buffer *send;
622    Emotion_Gstreamer_Video *ev = NULL;
623    Emotion_Video_Stream *vstream;
624    EvasVideoSinkPrivate *priv = NULL;
625    GstBuffer *buffer;
626    unsigned char *evas_data;
627    GstFormat fmt = GST_FORMAT_TIME;
628    gint64 pos;
629    Eina_Bool preroll = EINA_FALSE;
630
631    send = data;
632
633    if (!send) goto exit_point;
634
635    priv = send->sink;
636    buffer = send->frame;
637    preroll = send->preroll;
638    ev = send->ev;
639
640    /* frame after cleanup */
641    if (!preroll && !ev->last_buffer)
642      {
643         priv = NULL;
644         goto exit_point;
645      }
646
647    if (!priv || !priv->o || priv->unlocked)
648      goto exit_point;
649
650    if (ev->send && send != ev->send)
651      {
652         emotion_gstreamer_buffer_free(ev->send);
653         ev->send = NULL;
654      }
655
656    if (!ev->stream && !send->force)
657      {
658         ev->send = send;
659         _emotion_frame_new(ev->obj);
660         evas_object_image_data_update_add(priv->o, 0, 0, priv->width, priv->height);
661         goto exit_stream;
662      }
663
664    _emotion_gstreamer_video_pipeline_parse(ev, EINA_TRUE);
665
666    INF("sink main render [%i, %i] (source height: %i)", priv->width, priv->height, priv->source_height);
667
668    evas_object_image_alpha_set(priv->o, 0);
669    evas_object_image_colorspace_set(priv->o, priv->eformat);
670    evas_object_image_size_set(priv->o, priv->width, priv->height);
671
672    evas_data = evas_object_image_data_get(priv->o, 1);
673
674    if (priv->func)
675      priv->func(evas_data, GST_BUFFER_DATA(buffer), priv->width, priv->source_height, priv->height);
676    else
677      WRN("No way to decode %x colorspace !", priv->eformat);
678
679    evas_object_image_data_set(priv->o, evas_data);
680    evas_object_image_data_update_add(priv->o, 0, 0, priv->width, priv->height);
681    evas_object_image_pixels_dirty_set(priv->o, 0);
682
683    _update_emotion_fps(ev);
684
685    if (!preroll && ev->play_started)
686      {
687         _emotion_playback_started(ev->obj);
688         ev->play_started = 0;
689      }
690
691    if (!send->force)
692      {
693         _emotion_frame_new(ev->obj);
694      }
695
696    gst_element_query_position(ev->pipeline, &fmt, &pos);
697    ev->position = (double)pos / (double)GST_SECOND;
698
699    vstream = eina_list_nth(ev->video_streams, ev->video_stream_nbr - 1);
700
701    if (vstream)
702      {
703        vstream->width = priv->width;
704        vstream->height = priv->height;
705        _emotion_video_pos_update(ev->obj, ev->position, vstream->length_time);
706      }
707
708    ev->ratio = (double) priv->width / (double) priv->height;
709
710    _emotion_frame_resize(ev->obj, priv->width, priv->height, ev->ratio);
711
712    buffer = gst_buffer_ref(buffer);
713    if (ev->last_buffer) gst_buffer_unref(ev->last_buffer);
714    ev->last_buffer = buffer;
715
716  exit_point:
717    emotion_gstreamer_buffer_free(send);
718
719  exit_stream:
720    if (priv)
721      {
722         if (preroll || !priv->o) return;
723         
724         if (!priv->unlocked)
725           eina_condition_signal(&priv->c);
726      }
727 }
728
729 static void
730 unlock_buffer_mutex(EvasVideoSinkPrivate* priv)
731 {
732    priv->unlocked = EINA_TRUE;
733
734    eina_condition_signal(&priv->c);
735 }
736
737 static void
738 marshal_VOID__MINIOBJECT(GClosure * closure, GValue * return_value __UNUSED__,
739                          guint n_param_values, const GValue * param_values,
740                          gpointer invocation_hint __UNUSED__, gpointer marshal_data)
741 {
742    typedef void (*marshalfunc_VOID__MINIOBJECT) (gpointer obj, gpointer arg1, gpointer data2);
743    marshalfunc_VOID__MINIOBJECT callback;
744    GCClosure *cc;
745    gpointer data1, data2;
746
747    cc = (GCClosure *) closure;
748
749    g_return_if_fail(n_param_values == 2);
750
751    if (G_CCLOSURE_SWAP_DATA(closure)) {
752       data1 = closure->data;
753       data2 = g_value_peek_pointer(param_values + 0);
754    } else {
755       data1 = g_value_peek_pointer(param_values + 0);
756       data2 = closure->data;
757    }
758    callback = (marshalfunc_VOID__MINIOBJECT) (marshal_data ? marshal_data : cc->callback);
759
760    callback(data1, gst_value_get_mini_object(param_values + 1), data2);
761 }
762
763 static void
764 evas_video_sink_class_init(EvasVideoSinkClass* klass)
765 {
766    GObjectClass* gobject_class;
767    GstBaseSinkClass* gstbase_sink_class;
768
769    gobject_class = G_OBJECT_CLASS(klass);
770    gstbase_sink_class = GST_BASE_SINK_CLASS(klass);
771
772    g_type_class_add_private(klass, sizeof(EvasVideoSinkPrivate));
773
774    gobject_class->set_property = evas_video_sink_set_property;
775    gobject_class->get_property = evas_video_sink_get_property;
776
777    g_object_class_install_property (gobject_class, PROP_EVAS_OBJECT,
778                                     g_param_spec_pointer ("evas-object", "Evas Object",
779                                                           "The Evas object where the display of the video will be done",
780                                                           G_PARAM_READWRITE));
781
782    g_object_class_install_property (gobject_class, PROP_WIDTH,
783                                     g_param_spec_int ("width", "Width",
784                                                       "The width of the video",
785                                                       0, 65536, 0, G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
786
787    g_object_class_install_property (gobject_class, PROP_HEIGHT,
788                                     g_param_spec_int ("height", "Height",
789                                                       "The height of the video",
790                                                       0, 65536, 0, G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
791    g_object_class_install_property (gobject_class, PROP_EV,
792                                     g_param_spec_pointer ("ev", "Emotion_Gstreamer_Video",
793                                                           "THe internal data of the emotion object",
794                                                           G_PARAM_READWRITE));
795
796    gobject_class->dispose = evas_video_sink_dispose;
797
798    gstbase_sink_class->set_caps = evas_video_sink_set_caps;
799    gstbase_sink_class->stop = evas_video_sink_stop;
800    gstbase_sink_class->start = evas_video_sink_start;
801    gstbase_sink_class->unlock = evas_video_sink_unlock;
802    gstbase_sink_class->unlock_stop = evas_video_sink_unlock_stop;
803    gstbase_sink_class->render = evas_video_sink_render;
804    gstbase_sink_class->preroll = evas_video_sink_preroll;
805
806    evas_video_sink_signals[REPAINT_REQUESTED] = g_signal_new("repaint-requested",
807                                                              G_TYPE_FROM_CLASS(klass),
808                                                              (GSignalFlags)(G_SIGNAL_RUN_LAST | G_SIGNAL_ACTION),
809                                                              0,
810                                                              0,
811                                                              0,
812                                                              marshal_VOID__MINIOBJECT,
813                                                              G_TYPE_NONE, 1, GST_TYPE_BUFFER);
814 }
815
816 gboolean
817 gstreamer_plugin_init (GstPlugin * plugin)
818 {
819    return gst_element_register (plugin,
820                                 "emotion-sink",
821                                 GST_RANK_NONE,
822                                 EVAS_TYPE_VIDEO_SINK);
823 }
824
825 static void
826 _emotion_gstreamer_pause(void *data, Ecore_Thread *thread)
827 {
828    Emotion_Gstreamer_Video *ev = data;
829    gboolean res;
830
831    if (ecore_thread_check(thread) || !ev->pipeline) return ;
832
833    gst_element_set_state(ev->pipeline, GST_STATE_PAUSED);
834    res = gst_element_get_state(ev->pipeline, NULL, NULL, GST_CLOCK_TIME_NONE);
835    if (res == GST_STATE_CHANGE_NO_PREROLL)
836      {
837         gst_element_set_state(ev->pipeline, GST_STATE_PLAYING);
838         gst_element_get_state(ev->pipeline, NULL, NULL, GST_CLOCK_TIME_NONE);
839      }
840 }
841
842 static void
843 _emotion_gstreamer_cancel(void *data, Ecore_Thread *thread)
844 {
845    Emotion_Gstreamer_Video *ev = data;
846
847    ev->threads = eina_list_remove(ev->threads, thread);
848
849    if (getenv("EMOTION_GSTREAMER_DOT")) GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS(GST_BIN(ev->pipeline), GST_DEBUG_GRAPH_SHOW_ALL, getenv("EMOTION_GSTREAMER_DOT"));
850
851    if (ev->in == ev->out && ev->delete_me)
852      em_shutdown(ev);
853 }
854
855 static void
856 _emotion_gstreamer_end(void *data, Ecore_Thread *thread)
857 {
858    Emotion_Gstreamer_Video *ev = data;
859
860    ev->threads = eina_list_remove(ev->threads, thread);
861
862    if (ev->play)
863      {
864         gst_element_set_state(ev->pipeline, GST_STATE_PLAYING);
865         ev->play_started = 1;
866      }
867
868    if (getenv("EMOTION_GSTREAMER_DOT")) GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS(GST_BIN(ev->pipeline), GST_DEBUG_GRAPH_SHOW_ALL, getenv("EMOTION_GSTREAMER_DOT"));
869
870    if (ev->in == ev->out && ev->delete_me)
871      em_shutdown(ev);
872    else
873      _emotion_gstreamer_video_pipeline_parse(data, EINA_TRUE);
874 }
875
876 static void
877 _video_resize(void *data, Evas_Object *obj __UNUSED__, const Evas_Video_Surface *surface __UNUSED__,
878               Evas_Coord w, Evas_Coord h)
879 {
880 #ifdef HAVE_ECORE_X
881    Emotion_Gstreamer_Video *ev = data;
882
883    ecore_x_window_resize(ev->win, w, h);
884 #endif
885    fprintf(stderr, "resize: %i, %i\n", w, h);
886 }
887
888 static void
889 _video_move(void *data, Evas_Object *obj __UNUSED__, const Evas_Video_Surface *surface __UNUSED__,
890             Evas_Coord x, Evas_Coord y)
891 {
892 #ifdef HAVE_ECORE_X
893    Emotion_Gstreamer_Video *ev = data;
894    unsigned int pos[2];
895
896    fprintf(stderr, "move: %i, %i\n", x, y);
897    pos[0] = x; pos[1] = y;
898    ecore_x_window_prop_card32_set(ev->win, ECORE_X_ATOM_E_VIDEO_POSITION, pos, 2);
899 #endif
900 }
901
902 #if 0
903 /* Much better idea to always feed the XvImageSink and let him handle optimizing the rendering as we do */
904 static void
905 _block_pad_unlink_cb(GstPad *pad, gboolean blocked, gpointer user_data)
906 {
907    if (blocked)
908      {
909         Emotion_Gstreamer_Video *ev = user_data;
910         GstEvent *gev;
911
912         gst_pad_unlink(ev->xvteepad, ev->xvpad);
913         gev = gst_event_new_eos();
914         gst_pad_send_event(ev->xvpad, gev);
915         gst_pad_set_blocked_async(pad, FALSE, _block_pad_unlink_cb, NULL);
916      }
917 }
918
919 static void
920 _block_pad_link_cb(GstPad *pad, gboolean blocked, gpointer user_data)
921 {
922    if (blocked)
923      {
924         Emotion_Gstreamer_Video *ev = user_data;
925
926         gst_pad_link(ev->xvteepad, ev->xvpad);
927         if (ev->play)
928           gst_element_set_state(ev->xvsink, GST_STATE_PLAYING);
929         else
930           gst_element_set_state(ev->xvsink, GST_STATE_PAUSED);
931         gst_pad_set_blocked_async(pad, FALSE, _block_pad_link_cb, NULL);
932      }
933 }
934 #endif
935
936 static void
937 _video_show(void *data, Evas_Object *obj __UNUSED__, const Evas_Video_Surface *surface __UNUSED__)
938 {
939 #ifdef HAVE_ECORE_X
940    Emotion_Gstreamer_Video *ev = data;
941
942    fprintf(stderr, "show xv\n");
943    ecore_x_window_show(ev->win);
944 #endif
945    /* gst_pad_set_blocked_async(ev->xvteepad, TRUE, _block_pad_link_cb, ev); */
946 }
947
948 static void
949 _video_hide(void *data, Evas_Object *obj __UNUSED__, const Evas_Video_Surface *surface __UNUSED__)
950 {
951 #ifdef HAVE_ECORE_X
952    Emotion_Gstreamer_Video *ev = data;
953
954    fprintf(stderr, "hide xv\n");
955    ecore_x_window_hide(ev->win);
956 #endif
957    /* gst_pad_set_blocked_async(ev->xvteepad, TRUE, _block_pad_unlink_cb, ev); */
958 }
959
960 static void
961 _video_update_pixels(void *data, Evas_Object *obj __UNUSED__, const Evas_Video_Surface *surface __UNUSED__)
962 {
963    Emotion_Gstreamer_Video *ev = data;
964    Emotion_Gstreamer_Buffer *send;
965
966    if (!ev->send) return ;
967
968    send = ev->send;
969    send->force = EINA_TRUE;
970    ev->send = NULL;
971    evas_video_sink_main_render(send);
972 }
973
974 static void
975 _image_resize(void *data, Evas *e __UNUSED__, Evas_Object *obj, void *event_info __UNUSED__)
976 {
977    Emotion_Gstreamer_Video *ev = data;
978    Evas_Coord width, height;
979    int image_area, src_area;
980    double ratio;
981
982    evas_object_geometry_get(obj, NULL, NULL, &width, &height);
983    image_area = width * height;
984    src_area = ev->src_width * ev->src_height;
985    ratio = (double)image_area / (double)src_area;
986
987    // when an image is much smaller than original video size,
988    // add fimcconvert element to the pipeline
989    if (ratio < 0.8 && !ev->priority && !ev->convert)
990      {
991         GstElementFactory *cfactory = NULL;
992
993         cfactory = gst_element_factory_find("fimcconvert");
994         if (cfactory)
995           {
996              GstElement *convert = NULL;
997
998              convert = gst_element_factory_create(cfactory, NULL);
999              if (convert)
1000                {
1001                   GstElement *queue = NULL;
1002                   GstPad *pad, *teepad;
1003
1004                   queue = gst_bin_get_by_name(GST_BIN(ev->sink), "equeue");
1005                   gst_element_unlink(ev->tee, queue);
1006                   gst_element_release_request_pad(ev->tee, ev->eteepad);
1007                   gst_object_unref(ev->eteepad);
1008
1009                   gst_bin_add(GST_BIN(ev->sink), convert);
1010                   gst_element_link_many(ev->tee, convert, queue, NULL);
1011                   pad = gst_element_get_pad(convert, "sink");
1012                   teepad = gst_element_get_request_pad(ev->tee, "src%d");
1013                   gst_pad_link(teepad, pad);
1014                   gst_object_unref(pad);
1015
1016                   g_object_set(G_OBJECT(convert), "src-width", width, NULL);
1017                   g_object_set(G_OBJECT(convert), "src-height", height, NULL);
1018                   g_object_set(G_OBJECT(convert), "qos", TRUE, NULL);
1019                   gst_element_sync_state_with_parent(convert);
1020
1021                   ev->eteepad = teepad;
1022                   ev->convert = convert;
1023                }
1024           }
1025      }
1026    // TODO: when an image is resized(e.g rotation), set size again to fimcconvert
1027    // TODO: fimcconvert has an issue about resetting 
1028    //else if (ev->convert)
1029    //  {
1030    //     g_object_set(G_OBJECT(ev->convert), "src-width", w, NULL);
1031    //     g_object_set(G_OBJECT(ev->convert), "src-height", h, NULL);
1032    //  }
1033 }
1034
1035 GstElement *
1036 gstreamer_video_sink_new(Emotion_Gstreamer_Video *ev,
1037                          Evas_Object *o,
1038                          const char *uri)
1039 {
1040    GstElement *playbin;
1041    GstElement *bin = NULL;
1042    GstElement *esink = NULL;
1043    GstElement *xvsink = NULL;
1044    GstElement *tee = NULL;
1045    GstElement *queue = NULL;
1046    Evas_Object *obj;
1047    GstPad *pad;
1048    GstPad *teepad;
1049    int flags;
1050    const char *launch;
1051 #if defined HAVE_ECORE_X && defined HAVE_XOVERLAY_H
1052    const char *engine = NULL;
1053    Eina_List *engines;
1054 #endif
1055
1056    obj = emotion_object_image_get(o);
1057    if (!obj)
1058      {
1059         ERR("Not Evas_Object specified");
1060         return NULL;
1061      }
1062
1063    if (!uri)
1064      return NULL;
1065
1066    launch = emotion_webcam_custom_get(uri);
1067    if (launch)
1068      {
1069         GError *error = NULL;
1070
1071         playbin = gst_parse_bin_from_description(launch, 1, &error);
1072         if (!playbin)
1073           {
1074              ERR("Unable to setup command : '%s' got error '%s'.", launch, error->message);
1075              g_error_free(error);
1076              return NULL;
1077           }
1078         if (error)
1079           {
1080              WRN("got recoverable error '%s' for command : '%s'.", error->message, launch);
1081              g_error_free(error);
1082           }
1083      }
1084    else
1085      {
1086         playbin = gst_element_factory_make("playbin2", "playbin");
1087         if (!playbin)
1088           {
1089              ERR("Unable to create 'playbin' GstElement.");
1090              return NULL;
1091           }
1092      }
1093
1094    bin = gst_bin_new(NULL);
1095    if (!bin)
1096      {
1097        ERR("Unable to create GstBin !");
1098        goto unref_pipeline;
1099      }
1100
1101    tee = gst_element_factory_make("tee", NULL);
1102    if (!tee)
1103      {
1104        ERR("Unable to create 'tee' GstElement.");
1105        goto unref_pipeline;
1106      }
1107
1108 #if defined HAVE_ECORE_X && defined HAVE_XOVERLAY_H
1109    if (window_manager_video)
1110      {
1111         Eina_List *l;
1112         const char *ename;
1113         
1114         engines = evas_render_method_list();
1115
1116         EINA_LIST_FOREACH(engines, l, ename)
1117           {
1118              if (evas_render_method_lookup(ename) == 
1119                  evas_output_method_get(evas_object_evas_get(obj)))
1120                {
1121                   engine = ename;
1122                   break;
1123                }
1124           }
1125
1126        if (ev->priority && engine && strstr(engine, "_x11") != NULL)
1127          {
1128            Ecore_Evas *ee;
1129            Evas_Coord x, y, w, h;
1130            Ecore_X_Window win;
1131            Ecore_X_Window parent;
1132
1133            evas_object_geometry_get(obj, &x, &y, &w, &h);
1134
1135            ee = ecore_evas_ecore_evas_get(evas_object_evas_get(obj));
1136
1137            if (w < 4) w = 4;
1138            if (h < 2) h = 2;
1139
1140            /* Here we really need to have the help of the window manager, this code will change when we update E17. */
1141            parent = (Ecore_X_Window) ecore_evas_window_get(ee);
1142            fprintf(stderr, "parent: %x\n", parent);
1143
1144            win = ecore_x_window_new(0, x, y, w, h);
1145            fprintf(stderr, "creating window: %x [%i, %i, %i, %i]\n", win, x, y, w, h);
1146            if (win)
1147              {
1148                Ecore_X_Window_State state[] = { ECORE_X_WINDOW_STATE_SKIP_TASKBAR, ECORE_X_WINDOW_STATE_SKIP_PAGER };
1149
1150                ecore_x_netwm_window_state_set(win, state, 2);
1151                ecore_x_window_hide(win);
1152                xvsink = gst_element_factory_make("xvimagesink", NULL);
1153                if (xvsink)
1154                  {
1155                    unsigned int pos[2];
1156
1157 #ifdef HAVE_X_OVERLAY_SET
1158                    gst_x_overlay_set_window_handle(GST_X_OVERLAY(xvsink), win);
1159 #else
1160                    gst_x_overlay_set_xwindow_id(GST_X_OVERLAY(xvsink), win);
1161 #endif
1162                    ev->win = win;
1163
1164                    ecore_x_window_prop_card32_set(win, ECORE_X_ATOM_E_VIDEO_PARENT, &parent, 1);
1165
1166                    pos[0] = x; pos[1] = y;
1167                    ecore_x_window_prop_card32_set(win, ECORE_X_ATOM_E_VIDEO_POSITION, pos, 2);
1168                  }
1169                else
1170                  {
1171                    fprintf(stderr, "destroying win: %x\n", win);
1172                    ecore_x_window_free(win);
1173                  }
1174              }
1175          }
1176        evas_render_method_list_free(engines);
1177      }
1178 #else
1179 # warning "missing: ecore_x OR xoverlay"
1180 #endif
1181
1182    esink = gst_element_factory_make("emotion-sink", "sink");
1183    if (!esink)
1184      {
1185         ERR("Unable to create 'emotion-sink' GstElement.");
1186         goto unref_pipeline;
1187      }
1188
1189    g_object_set(G_OBJECT(esink), "evas-object", obj, NULL);
1190    g_object_set(G_OBJECT(esink), "ev", ev, NULL);
1191
1192    evas_object_image_pixels_get_callback_set(obj, NULL, NULL);
1193    evas_object_event_callback_add(obj, EVAS_CALLBACK_RESIZE, _image_resize, ev);
1194
1195    /* We need queue to force each video sink to be in its own thread */
1196    queue = gst_element_factory_make("queue", "equeue");
1197    if (!queue)
1198      {
1199         ERR("Unable to create 'queue' GstElement.");
1200         goto unref_pipeline;
1201      }
1202
1203    gst_bin_add_many(GST_BIN(bin), tee, queue, esink, NULL);
1204    gst_element_link_many(queue, esink, NULL);
1205
1206    /* link both sink to GstTee */
1207    pad = gst_element_get_pad(queue, "sink");
1208    teepad = gst_element_get_request_pad(tee, "src%d");
1209    gst_pad_link(teepad, pad);
1210    gst_object_unref(pad);
1211
1212    ev->eteepad = teepad;
1213
1214    if (xvsink)
1215      {
1216         GstElement *fakeeos;
1217
1218         queue = gst_element_factory_make("queue", "xvqueue");
1219         fakeeos = GST_ELEMENT(GST_BIN(g_object_new(GST_TYPE_FAKEEOS_BIN, "name", "eosbin", NULL)));
1220         if (queue && fakeeos)
1221           {
1222              GstPad *queue_pad;
1223
1224              gst_bin_add_many(GST_BIN(bin), fakeeos, NULL);
1225
1226              gst_bin_add_many(GST_BIN(fakeeos), queue, xvsink, NULL);
1227              gst_element_link_many(queue, xvsink, NULL);
1228              queue_pad = gst_element_get_pad(queue, "sink");
1229              gst_element_add_pad(fakeeos, gst_ghost_pad_new("sink", queue_pad));
1230
1231              pad = gst_element_get_pad(fakeeos, "sink");
1232              teepad = gst_element_get_request_pad(tee, "src%d");
1233              gst_pad_link(teepad, pad);
1234
1235              xvsink = fakeeos;
1236
1237              ev->xvteepad = teepad;
1238              ev->xvpad = pad;
1239           }
1240         else
1241           {
1242              if (fakeeos) gst_object_unref(fakeeos);
1243              if (queue) gst_object_unref(queue);
1244              gst_object_unref(xvsink);
1245              xvsink = NULL;
1246           }
1247      }
1248
1249    teepad = gst_element_get_pad(tee, "sink");
1250    gst_element_add_pad(bin, gst_ghost_pad_new("sink", teepad));
1251    gst_object_unref(teepad);
1252
1253 #define GST_PLAY_FLAG_NATIVE_VIDEO  (1 << 6)
1254 #define GST_PLAY_FLAG_DOWNLOAD      (1 << 7)
1255 #define GST_PLAY_FLAG_AUDIO         (1 << 1)
1256 #define GST_PLAY_FLAG_NATIVE_AUDIO  (1 << 5)
1257
1258    if (launch)
1259      {
1260         g_object_set(G_OBJECT(playbin), "sink", bin, NULL);
1261      }
1262    else
1263      {
1264         g_object_get(G_OBJECT(playbin), "flags", &flags, NULL);
1265         g_object_set(G_OBJECT(playbin), "flags", flags | GST_PLAY_FLAG_NATIVE_VIDEO | GST_PLAY_FLAG_DOWNLOAD | GST_PLAY_FLAG_NATIVE_AUDIO, NULL);
1266         g_object_set(G_OBJECT(playbin), "video-sink", bin, NULL);
1267         g_object_set(G_OBJECT(playbin), "uri", uri, NULL);
1268      }
1269
1270    evas_object_image_pixels_get_callback_set(obj, NULL, NULL);
1271
1272    ev->stream = EINA_TRUE;
1273
1274    if (xvsink)
1275      {
1276         Evas_Video_Surface video;
1277
1278         video.version = EVAS_VIDEO_SURFACE_VERSION;
1279         video.data = ev;
1280         video.parent = NULL;
1281         video.move = _video_move;
1282         video.resize = _video_resize;
1283         video.show = _video_show;
1284         video.hide = _video_hide;
1285         video.update_pixels = _video_update_pixels;
1286
1287         evas_object_image_video_surface_set(obj, &video);
1288         ev->stream = EINA_FALSE;
1289      }
1290
1291    eina_stringshare_replace(&ev->uri, uri);
1292    ev->pipeline = playbin;
1293    ev->sink = bin;
1294    ev->esink = esink;
1295    ev->xvsink = xvsink;
1296    ev->tee = tee;
1297    ev->threads = eina_list_append(ev->threads,
1298                                   ecore_thread_run(_emotion_gstreamer_pause,
1299                                                    _emotion_gstreamer_end,
1300                                                    _emotion_gstreamer_cancel,
1301                                                    ev));
1302
1303    /** NOTE: you need to set: GST_DEBUG_DUMP_DOT_DIR=/tmp EMOTION_ENGINE=gstreamer to save the $EMOTION_GSTREAMER_DOT file in '/tmp' */
1304    /** then call dot -Tpng -oemotion_pipeline.png /tmp/$TIMESTAMP-$EMOTION_GSTREAMER_DOT.dot */
1305    if (getenv("EMOTION_GSTREAMER_DOT")) GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS(GST_BIN(playbin), GST_DEBUG_GRAPH_SHOW_ALL, getenv("EMOTION_GSTREAMER_DOT"));
1306
1307    return playbin;
1308
1309  unref_pipeline:
1310    gst_object_unref(xvsink);
1311    gst_object_unref(esink);
1312    gst_object_unref(tee);
1313    gst_object_unref(bin);
1314    gst_object_unref(playbin);
1315    return NULL;
1316 }