339a36c2a1adee0f09c6e634a99481a7400e1882
[profile/ivi/emotion.git] / src / modules / gstreamer / emotion_sink.c
1 #include "emotion_gstreamer.h"
2
3 static GstStaticPadTemplate sinktemplate = GST_STATIC_PAD_TEMPLATE("sink",
4                                                                    GST_PAD_SINK, GST_PAD_ALWAYS,
5                                                                    GST_STATIC_CAPS(GST_VIDEO_CAPS_YUV("{ I420, YV12, YUY2, NV12, ST12, TM12 }") ";"
6                                                                                    GST_VIDEO_CAPS_BGRx ";" GST_VIDEO_CAPS_BGR ";" GST_VIDEO_CAPS_BGRA));
7
8 GST_DEBUG_CATEGORY_STATIC(evas_video_sink_debug);
9 #define GST_CAT_DEFAULT evas_video_sink_debug
10
11 enum {
12   REPAINT_REQUESTED,
13   LAST_SIGNAL
14 };
15
16 enum {
17   PROP_0,
18   PROP_EVAS_OBJECT,
19   PROP_WIDTH,
20   PROP_HEIGHT,
21   PROP_EV,
22   PROP_LAST
23 };
24
25 static guint evas_video_sink_signals[LAST_SIGNAL] = { 0, };
26
27 #define _do_init(bla)                                   \
28   GST_DEBUG_CATEGORY_INIT(evas_video_sink_debug,        \
29                           "emotion-sink",               \
30                           0,                            \
31                           "emotion video sink")
32
33 GST_BOILERPLATE_FULL(EvasVideoSink,
34                      evas_video_sink,
35                      GstVideoSink,
36                      GST_TYPE_VIDEO_SINK,
37                      _do_init);
38
39
40 static void unlock_buffer_mutex(EvasVideoSinkPrivate* priv);
41 static void evas_video_sink_main_render(void *data);
42 static void evas_video_sink_samsung_main_render(void *data);
43
44 static void
45 evas_video_sink_base_init(gpointer g_class)
46 {
47    GstElementClass* element_class;
48
49    element_class = GST_ELEMENT_CLASS(g_class);
50    gst_element_class_add_pad_template(element_class, gst_static_pad_template_get(&sinktemplate));
51    gst_element_class_set_details_simple(element_class, "Evas video sink",
52                                         "Sink/Video", "Sends video data from a GStreamer pipeline to an Evas object",
53                                         "Vincent Torri <vtorri@univ-evry.fr>");
54 }
55
56 static void
57 evas_video_sink_init(EvasVideoSink* sink, EvasVideoSinkClass* klass __UNUSED__)
58 {
59    EvasVideoSinkPrivate* priv;
60
61    INF("sink init");
62    sink->priv = priv = G_TYPE_INSTANCE_GET_PRIVATE(sink, EVAS_TYPE_VIDEO_SINK, EvasVideoSinkPrivate);
63    priv->o = NULL;
64    priv->width = 0;
65    priv->height = 0;
66    priv->func = NULL;
67    priv->eformat = EVAS_COLORSPACE_ARGB8888;
68    priv->samsung = EINA_FALSE;
69    eina_lock_new(&priv->m);
70    eina_condition_new(&priv->c, &priv->m);
71    priv->unlocked = EINA_FALSE;
72 }
73
74 /**** Object methods ****/
75 static void
76 _cleanup_priv(void *data, Evas *e __UNUSED__, Evas_Object *obj, void *event_info __UNUSED__)
77 {
78    EvasVideoSinkPrivate* priv;
79
80    priv = data;
81
82    eina_lock_take(&priv->m);
83    if (priv->o == obj)
84      priv->o = NULL;
85    eina_lock_release(&priv->m);
86 }
87
88 static void
89 evas_video_sink_set_property(GObject * object, guint prop_id,
90                              const GValue * value, GParamSpec * pspec)
91 {
92    EvasVideoSink* sink;
93    EvasVideoSinkPrivate* priv;
94
95    sink = EVAS_VIDEO_SINK (object);
96    priv = sink->priv;
97
98    switch (prop_id) {
99     case PROP_EVAS_OBJECT:
100        eina_lock_take(&priv->m);
101        evas_object_event_callback_del(priv->o, EVAS_CALLBACK_FREE, _cleanup_priv);
102        priv->o = g_value_get_pointer (value);
103        INF("sink set Evas_Object %p.", priv->o);
104        evas_object_event_callback_add(priv->o, EVAS_CALLBACK_FREE, _cleanup_priv, priv);
105        eina_lock_release(&priv->m);
106        break;
107     case PROP_EV:
108        INF("sink set ev.");
109        eina_lock_take(&priv->m);
110        priv->ev = g_value_get_pointer (value);
111        if (priv->ev)
112          priv->ev->samsung = EINA_TRUE;
113        eina_lock_release(&priv->m);
114        break;
115     default:
116        G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
117        ERR("invalid property");
118        break;
119    }
120 }
121
122 static void
123 evas_video_sink_get_property(GObject * object, guint prop_id,
124                              GValue * value, GParamSpec * pspec)
125 {
126    EvasVideoSink* sink;
127    EvasVideoSinkPrivate* priv;
128
129    sink = EVAS_VIDEO_SINK (object);
130    priv = sink->priv;
131
132    switch (prop_id) {
133     case PROP_EVAS_OBJECT:
134        INF("sink get property.");
135        eina_lock_take(&priv->m);
136        g_value_set_pointer(value, priv->o);
137        eina_lock_release(&priv->m);
138        break;
139     case PROP_WIDTH:
140        INF("sink get width.");
141        eina_lock_take(&priv->m);
142        g_value_set_int(value, priv->width);
143        eina_lock_release(&priv->m);
144        break;
145     case PROP_HEIGHT:
146        INF("sink get height.");
147        eina_lock_take(&priv->m);
148        g_value_set_int (value, priv->height);
149        eina_lock_release(&priv->m);
150        break;
151     case PROP_EV:
152        INF("sink get ev.");
153        eina_lock_take(&priv->m);
154        g_value_set_pointer (value, priv->ev);
155        eina_lock_release(&priv->m);
156        break;
157     default:
158        G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
159        ERR("invalide property");
160        break;
161    }
162 }
163
164 static void
165 evas_video_sink_dispose(GObject* object)
166 {
167    EvasVideoSink* sink;
168    EvasVideoSinkPrivate* priv;
169
170    INF("dispose.");
171
172    sink = EVAS_VIDEO_SINK(object);
173    priv = sink->priv;
174
175    eina_lock_free(&priv->m);
176    eina_condition_free(&priv->c);
177
178    G_OBJECT_CLASS(parent_class)->dispose(object);
179 }
180
181
182 /**** BaseSink methods ****/
183
184 gboolean evas_video_sink_set_caps(GstBaseSink *bsink, GstCaps *caps)
185 {
186    EvasVideoSink* sink;
187    EvasVideoSinkPrivate* priv;
188    GstStructure *structure;
189    GstVideoFormat format;
190    guint32 fourcc;
191    unsigned int i;
192
193    sink = EVAS_VIDEO_SINK(bsink);
194    priv = sink->priv;
195
196    structure = gst_caps_get_structure(caps, 0);
197
198    if (gst_structure_get_int(structure, "width", (int*) &priv->width)
199        && gst_structure_get_int(structure, "height", (int*) &priv->height)
200        && gst_structure_get_fourcc(structure, "format", &fourcc))
201      {
202         priv->source_height = priv->height;
203
204         for (i = 0; colorspace_fourcc_convertion[i].name != NULL; ++i)
205           if (fourcc == colorspace_fourcc_convertion[i].fourcc)
206             {
207                fprintf(stderr, "Found '%s'\n", colorspace_fourcc_convertion[i].name);
208                priv->eformat = colorspace_fourcc_convertion[i].eformat;
209                priv->func = colorspace_fourcc_convertion[i].func;
210                if (colorspace_fourcc_convertion[i].force_height)
211                  {
212                     priv->height = (priv->height >> 1) << 1;
213                  }
214                if (priv->ev)
215                  priv->ev->kill_buffer = EINA_TRUE;
216                return TRUE;
217             }
218
219         if (fourcc == GST_MAKE_FOURCC('S', 'T', '1', '2'))
220           {
221              fprintf(stderr, "Found '%s'\n", "ST12");
222              priv->eformat = EVAS_COLORSPACE_YCBCR420TM12601_PL;
223              priv->samsung = EINA_TRUE;
224              priv->func = NULL;
225              if (priv->ev)
226                {
227                   priv->ev->samsung = EINA_TRUE;
228                   priv->ev->kill_buffer = EINA_TRUE;
229                }
230              return TRUE;
231           }
232      }
233
234    INF("fallback code !");
235    if (!gst_video_format_parse_caps(caps, &format, (int*) &priv->width, (int*) &priv->height))
236      {
237         ERR("Unable to parse caps.");
238         return FALSE;
239      }
240
241    priv->source_height = priv->height;
242
243    for (i = 0; colorspace_format_convertion[i].name != NULL; ++i)
244      if (format == colorspace_format_convertion[i].format)
245        {
246           fprintf(stderr, "Found '%s'\n", colorspace_format_convertion[i].name);
247           priv->eformat = colorspace_format_convertion[i].eformat;
248           priv->func = colorspace_format_convertion[i].func;
249           if (priv->ev)
250             priv->ev->kill_buffer = EINA_FALSE;
251           return TRUE;
252        }
253
254    ERR("unsupported : %d\n", format);
255    return FALSE;
256 }
257
258 static gboolean
259 evas_video_sink_start(GstBaseSink* base_sink)
260 {
261    EvasVideoSinkPrivate* priv;
262    gboolean res = TRUE;
263
264    INF("sink start");
265
266    priv = EVAS_VIDEO_SINK(base_sink)->priv;
267    eina_lock_take(&priv->m);
268    if (!priv->o)
269      res = FALSE;
270    else
271      priv->unlocked = EINA_FALSE;
272    eina_lock_release(&priv->m);
273    return res;
274 }
275
276 static gboolean
277 evas_video_sink_stop(GstBaseSink* base_sink)
278 {
279    EvasVideoSinkPrivate* priv = EVAS_VIDEO_SINK(base_sink)->priv;
280
281    INF("sink stop");
282
283    unlock_buffer_mutex(priv);
284    return TRUE;
285 }
286
287 static gboolean
288 evas_video_sink_unlock(GstBaseSink* object)
289 {
290    EvasVideoSink* sink;
291
292    INF("sink unlock");
293
294    sink = EVAS_VIDEO_SINK(object);
295
296    unlock_buffer_mutex(sink->priv);
297
298    return GST_CALL_PARENT_WITH_DEFAULT(GST_BASE_SINK_CLASS, unlock,
299                                        (object), TRUE);
300 }
301
302 static gboolean
303 evas_video_sink_unlock_stop(GstBaseSink* object)
304 {
305    EvasVideoSink* sink;
306    EvasVideoSinkPrivate* priv;
307
308    sink = EVAS_VIDEO_SINK(object);
309    priv = sink->priv;
310
311    INF("sink unlock stop");
312
313    eina_lock_take(&priv->m);
314    priv->unlocked = FALSE;
315    eina_lock_release(&priv->m);
316
317    return GST_CALL_PARENT_WITH_DEFAULT(GST_BASE_SINK_CLASS, unlock_stop,
318                                        (object), TRUE);
319 }
320
321 static GstFlowReturn
322 evas_video_sink_preroll(GstBaseSink* bsink, GstBuffer* buffer)
323 {
324    Emotion_Gstreamer_Buffer *send;
325    EvasVideoSinkPrivate *priv;
326    EvasVideoSink *sink;
327
328    INF("sink preroll %p [%i]", GST_BUFFER_DATA(buffer), GST_BUFFER_SIZE(buffer));
329
330    sink = EVAS_VIDEO_SINK(bsink);
331    priv = sink->priv;
332
333    if (GST_BUFFER_SIZE(buffer) <= 0 && !priv->samsung)
334      {
335         WRN("empty buffer");
336         return GST_FLOW_OK;
337      }
338
339    send = emotion_gstreamer_buffer_alloc(priv, buffer, EINA_TRUE);
340
341    if (send)
342      {
343         if (priv->samsung)
344           {
345              if (!priv->func)
346                {
347                   GstStructure *structure;
348                   GstCaps *caps;
349                   gboolean is_multiplane = FALSE;
350
351                   caps = GST_BUFFER_CAPS(buffer);
352                   structure = gst_caps_get_structure (caps, 0);
353                   gst_structure_get_boolean(structure, "multiplane", &is_multiplane);
354                   gst_caps_unref(caps);
355
356                   if (is_multiplane)
357                     priv->func = _evas_video_st12_multiplane;
358                   else
359                     priv->func = _evas_video_st12;
360                }
361
362              ecore_main_loop_thread_safe_call_async(evas_video_sink_samsung_main_render, send);
363           }
364         else
365           ecore_main_loop_thread_safe_call_async(evas_video_sink_main_render, send);
366      }
367
368    return GST_FLOW_OK;
369 }
370
371 static GstFlowReturn
372 evas_video_sink_render(GstBaseSink* bsink, GstBuffer* buffer)
373 {
374    Emotion_Gstreamer_Buffer *send;
375    EvasVideoSinkPrivate *priv;
376    EvasVideoSink *sink;
377
378    INF("sink render %p", buffer);
379
380    sink = EVAS_VIDEO_SINK(bsink);
381    priv = sink->priv;
382
383    eina_lock_take(&priv->m);
384
385    if (priv->unlocked) {
386       ERR("LOCKED");
387       eina_lock_release(&priv->m);
388       return GST_FLOW_OK;
389    }
390
391    send = emotion_gstreamer_buffer_alloc(priv, buffer, EINA_FALSE);
392    if (!send) {
393       eina_lock_release(&priv->m);
394       return GST_FLOW_ERROR;
395    }
396
397    if (priv->samsung)
398      {
399         if (!priv->func)
400           {
401              GstStructure *structure;
402              GstCaps *caps;
403              gboolean is_multiplane = FALSE;
404
405              caps = GST_BUFFER_CAPS(buffer);
406              structure = gst_caps_get_structure (caps, 0);
407              gst_structure_get_boolean(structure, "multiplane", &is_multiplane);
408              gst_caps_unref(caps);
409
410              if (is_multiplane)
411                priv->func = _evas_video_st12_multiplane;
412              else
413                priv->func = _evas_video_st12;
414           }
415
416         ecore_main_loop_thread_safe_call_async(evas_video_sink_samsung_main_render, send);
417      }
418    else
419      ecore_main_loop_thread_safe_call_async(evas_video_sink_main_render, send);
420
421    eina_condition_wait(&priv->c);
422    eina_lock_release(&priv->m);
423
424    return GST_FLOW_OK;
425 }
426
427 static void
428 _update_emotion_fps(Emotion_Gstreamer_Video *ev)
429 {
430    double tim;
431
432    if (!debug_fps) return ;
433
434    tim = ecore_time_get();
435    ev->frames++;
436
437    if (ev->rlapse == 0.0)
438      {
439         ev->rlapse = tim;
440         ev->flapse = ev->frames;
441      }
442    else if ((tim - ev->rlapse) >= 0.5)
443      {
444         printf("FRAME: %i, FPS: %3.1f\n",
445                ev->frames,
446                (ev->frames - ev->flapse) / (tim - ev->rlapse));
447         ev->rlapse = tim;
448         ev->flapse = ev->frames;
449      }
450 }
451
452 static void
453 evas_video_sink_samsung_main_render(void *data)
454 {
455    Emotion_Gstreamer_Buffer *send;
456    Emotion_Video_Stream *vstream;
457    EvasVideoSinkPrivate* priv;
458    GstBuffer* buffer;
459    unsigned char *evas_data;
460    const guint8 *gst_data;
461    GstFormat fmt = GST_FORMAT_TIME;
462    gint64 pos;
463    Eina_Bool preroll;
464    int stride, elevation;
465    Evas_Coord w, h;
466
467    send = data;
468
469    if (!send) goto exit_point;
470
471    priv = send->sink;
472    buffer = send->frame;
473    preroll = send->preroll;
474
475    if (!priv || !priv->o || priv->unlocked)
476      goto exit_point;
477
478    if (send->ev->send)
479      {
480         emotion_gstreamer_buffer_free(send->ev->send);
481         send->ev->send = NULL;
482      }
483
484    if (!send->ev->stream && !send->force)
485      {
486         send->ev->send = send;
487         _emotion_frame_new(send->ev->obj);
488         goto exit_stream;
489      }
490
491    _emotion_gstreamer_video_pipeline_parse(send->ev, EINA_TRUE);
492
493    /* Getting stride to compute the right size and then fill the object properly */
494    /* Y => [0] and UV in [1] */
495    if (priv->func == _evas_video_st12_multiplane)
496      {
497         const GstMultiPlaneImageBuffer *mp_buf = (const GstMultiPlaneImageBuffer *) buffer;
498
499         stride = mp_buf->stride[0];
500         elevation = mp_buf->elevation[0];
501         priv->width = mp_buf->width[0];
502         priv->height = mp_buf->height[0];
503
504         gst_data = (const guint8 *) mp_buf;
505      }
506    else
507      {
508         const SCMN_IMGB *imgb = (const SCMN_IMGB *) GST_BUFFER_MALLOCDATA(buffer);
509
510         stride = imgb->stride[0];
511         elevation = imgb->elevation[0];
512         priv->width = imgb->width[0];
513         priv->height = imgb->height[0];
514
515         gst_data = (const guint8 *) imgb;
516      }
517
518    evas_object_geometry_get(priv->o, NULL, NULL, &w, &h);
519
520    send->ev->fill.width = (double) stride / priv->width;
521    send->ev->fill.height = (double) elevation / priv->height;
522
523    evas_object_image_alpha_set(priv->o, 0);
524    evas_object_image_colorspace_set(priv->o, priv->eformat);
525    evas_object_image_size_set(priv->o, stride, elevation);
526
527    _update_emotion_fps(send->ev);
528
529    evas_data = evas_object_image_data_get(priv->o, 1);
530
531    if (priv->func)
532      priv->func(evas_data, gst_data, stride, elevation, elevation);
533    else
534      WRN("No way to decode %x colorspace !", priv->eformat);
535
536    evas_object_image_data_set(priv->o, evas_data);
537    evas_object_image_data_update_add(priv->o, 0, 0, priv->width, priv->height);
538    evas_object_image_pixels_dirty_set(priv->o, 0);
539
540    if (!preroll && send->ev->play_started)
541      {
542         _emotion_playback_started(send->ev->obj);
543         send->ev->play_started = 0;
544      }
545
546    if (!send->force)
547      {
548         _emotion_frame_new(send->ev->obj);
549      }
550
551    vstream = eina_list_nth(send->ev->video_streams, send->ev->video_stream_nbr - 1);
552
553    gst_element_query_position(send->ev->pipeline, &fmt, &pos);
554    send->ev->position = (double)pos / (double)GST_SECOND;
555
556    if (vstream)
557      {
558         vstream->width = priv->width;
559         vstream->height = priv->height;
560
561         _emotion_video_pos_update(send->ev->obj, send->ev->position, vstream->length_time);
562      }
563
564    send->ev->ratio = (double) priv->width / (double) priv->height;
565    _emotion_frame_refill(send->ev->obj, send->ev->fill.width, send->ev->fill.height);
566    _emotion_frame_resize(send->ev->obj, priv->width, priv->height, send->ev->ratio);
567
568    buffer = gst_buffer_ref(buffer);
569    if (send->ev->last_buffer) gst_buffer_unref(send->ev->last_buffer);
570    send->ev->last_buffer = buffer;
571
572  exit_point:
573    emotion_gstreamer_buffer_free(send);
574
575  exit_stream:
576    if (preroll || !priv->o) return ;
577
578    if (!priv->unlocked)
579      eina_condition_signal(&priv->c);
580 }
581
582 static void
583 evas_video_sink_main_render(void *data)
584 {
585    Emotion_Gstreamer_Buffer *send;
586    Emotion_Gstreamer_Video *ev = NULL;
587    Emotion_Video_Stream *vstream;
588    EvasVideoSinkPrivate* priv;
589    GstBuffer* buffer;
590    unsigned char *evas_data;
591    GstFormat fmt = GST_FORMAT_TIME;
592    gint64 pos;
593    Eina_Bool preroll;
594
595    send = data;
596
597    if (!send) goto exit_point;
598
599    priv = send->sink;
600    buffer = send->frame;
601    preroll = send->preroll;
602    ev = send->ev;
603
604    if (!priv || !priv->o || priv->unlocked)
605      goto exit_point;
606
607    if (ev->send && send != ev->send)
608      {
609         emotion_gstreamer_buffer_free(ev->send);
610         ev->send = NULL;
611      }
612
613    if (!ev->stream && !send->force)
614      {
615         ev->send = send;
616         _emotion_frame_new(ev->obj);
617         evas_object_image_data_update_add(priv->o, 0, 0, priv->width, priv->height);
618         goto exit_stream;
619      }
620
621    _emotion_gstreamer_video_pipeline_parse(ev, EINA_TRUE);
622
623    INF("sink main render [%i, %i] (source height: %i)", priv->width, priv->height, priv->source_height);
624
625    evas_object_image_alpha_set(priv->o, 0);
626    evas_object_image_colorspace_set(priv->o, priv->eformat);
627    evas_object_image_size_set(priv->o, priv->width, priv->height);
628
629    evas_data = evas_object_image_data_get(priv->o, 1);
630
631    if (priv->func)
632      priv->func(evas_data, GST_BUFFER_DATA(buffer), priv->width, priv->source_height, priv->height);
633    else
634      WRN("No way to decode %x colorspace !", priv->eformat);
635
636    evas_object_image_data_set(priv->o, evas_data);
637    evas_object_image_data_update_add(priv->o, 0, 0, priv->width, priv->height);
638    evas_object_image_pixels_dirty_set(priv->o, 0);
639
640    _update_emotion_fps(ev);
641
642    if (!preroll && ev->play_started)
643      {
644         _emotion_playback_started(ev->obj);
645         ev->play_started = 0;
646      }
647
648    if (!send->force)
649      {
650         _emotion_frame_new(ev->obj);
651      }
652
653    gst_element_query_position(ev->pipeline, &fmt, &pos);
654    ev->position = (double)pos / (double)GST_SECOND;
655
656    vstream = eina_list_nth(ev->video_streams, ev->video_stream_nbr - 1);
657
658    if (vstream)
659      {
660        vstream->width = priv->width;
661        vstream->height = priv->height;
662        _emotion_video_pos_update(ev->obj, ev->position, vstream->length_time);
663      }
664
665    ev->ratio = (double) priv->width / (double) priv->height;
666
667    _emotion_frame_resize(ev->obj, priv->width, priv->height, ev->ratio);
668
669    buffer = gst_buffer_ref(buffer);
670    if (ev->last_buffer) gst_buffer_unref(ev->last_buffer);
671    ev->last_buffer = buffer;
672
673  exit_point:
674    emotion_gstreamer_buffer_free(send);
675
676  exit_stream:
677    if (preroll || !priv->o) return ;
678
679    if (!priv->unlocked)
680      eina_condition_signal(&priv->c);
681 }
682
683 static void
684 unlock_buffer_mutex(EvasVideoSinkPrivate* priv)
685 {
686    priv->unlocked = EINA_TRUE;
687
688    eina_condition_signal(&priv->c);
689 }
690
691 static void
692 marshal_VOID__MINIOBJECT(GClosure * closure, GValue * return_value __UNUSED__,
693                          guint n_param_values, const GValue * param_values,
694                          gpointer invocation_hint __UNUSED__, gpointer marshal_data)
695 {
696    typedef void (*marshalfunc_VOID__MINIOBJECT) (gpointer obj, gpointer arg1, gpointer data2);
697    marshalfunc_VOID__MINIOBJECT callback;
698    GCClosure *cc;
699    gpointer data1, data2;
700
701    cc = (GCClosure *) closure;
702
703    g_return_if_fail(n_param_values == 2);
704
705    if (G_CCLOSURE_SWAP_DATA(closure)) {
706       data1 = closure->data;
707       data2 = g_value_peek_pointer(param_values + 0);
708    } else {
709       data1 = g_value_peek_pointer(param_values + 0);
710       data2 = closure->data;
711    }
712    callback = (marshalfunc_VOID__MINIOBJECT) (marshal_data ? marshal_data : cc->callback);
713
714    callback(data1, gst_value_get_mini_object(param_values + 1), data2);
715 }
716
717 static void
718 evas_video_sink_class_init(EvasVideoSinkClass* klass)
719 {
720    GObjectClass* gobject_class;
721    GstBaseSinkClass* gstbase_sink_class;
722
723    gobject_class = G_OBJECT_CLASS(klass);
724    gstbase_sink_class = GST_BASE_SINK_CLASS(klass);
725
726    g_type_class_add_private(klass, sizeof(EvasVideoSinkPrivate));
727
728    gobject_class->set_property = evas_video_sink_set_property;
729    gobject_class->get_property = evas_video_sink_get_property;
730
731    g_object_class_install_property (gobject_class, PROP_EVAS_OBJECT,
732                                     g_param_spec_pointer ("evas-object", "Evas Object",
733                                                           "The Evas object where the display of the video will be done",
734                                                           G_PARAM_READWRITE));
735
736    g_object_class_install_property (gobject_class, PROP_WIDTH,
737                                     g_param_spec_int ("width", "Width",
738                                                       "The width of the video",
739                                                       0, 65536, 0, G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
740
741    g_object_class_install_property (gobject_class, PROP_HEIGHT,
742                                     g_param_spec_int ("height", "Height",
743                                                       "The height of the video",
744                                                       0, 65536, 0, G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
745    g_object_class_install_property (gobject_class, PROP_EV,
746                                     g_param_spec_pointer ("ev", "Emotion_Gstreamer_Video",
747                                                           "THe internal data of the emotion object",
748                                                           G_PARAM_READWRITE));
749
750    gobject_class->dispose = evas_video_sink_dispose;
751
752    gstbase_sink_class->set_caps = evas_video_sink_set_caps;
753    gstbase_sink_class->stop = evas_video_sink_stop;
754    gstbase_sink_class->start = evas_video_sink_start;
755    gstbase_sink_class->unlock = evas_video_sink_unlock;
756    gstbase_sink_class->unlock_stop = evas_video_sink_unlock_stop;
757    gstbase_sink_class->render = evas_video_sink_render;
758    gstbase_sink_class->preroll = evas_video_sink_preroll;
759
760    evas_video_sink_signals[REPAINT_REQUESTED] = g_signal_new("repaint-requested",
761                                                              G_TYPE_FROM_CLASS(klass),
762                                                              (GSignalFlags)(G_SIGNAL_RUN_LAST | G_SIGNAL_ACTION),
763                                                              0,
764                                                              0,
765                                                              0,
766                                                              marshal_VOID__MINIOBJECT,
767                                                              G_TYPE_NONE, 1, GST_TYPE_BUFFER);
768 }
769
770 gboolean
771 gstreamer_plugin_init (GstPlugin * plugin)
772 {
773    return gst_element_register (plugin,
774                                 "emotion-sink",
775                                 GST_RANK_NONE,
776                                 EVAS_TYPE_VIDEO_SINK);
777 }
778
779 static void
780 _emotion_gstreamer_pause(void *data, Ecore_Thread *thread)
781 {
782    Emotion_Gstreamer_Video *ev = data;
783    gboolean res;
784
785    if (ecore_thread_check(thread) || !ev->pipeline) return ;
786
787    gst_element_set_state(ev->pipeline, GST_STATE_PAUSED);
788    res = gst_element_get_state(ev->pipeline, NULL, NULL, GST_CLOCK_TIME_NONE);
789    if (res == GST_STATE_CHANGE_NO_PREROLL)
790      {
791         gst_element_set_state(ev->pipeline, GST_STATE_PLAYING);
792         gst_element_get_state(ev->pipeline, NULL, NULL, GST_CLOCK_TIME_NONE);
793      }
794 }
795
796 static void
797 _emotion_gstreamer_cancel(void *data, Ecore_Thread *thread)
798 {
799    Emotion_Gstreamer_Video *ev = data;
800
801    ev->threads = eina_list_remove(ev->threads, thread);
802
803    if (getenv("EMOTION_GSTREAMER_DOT")) GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS(GST_BIN(ev->pipeline), GST_DEBUG_GRAPH_SHOW_ALL, getenv("EMOTION_GSTREAMER_DOT"));
804
805    if (ev->in == ev->out && ev->delete_me)
806      em_shutdown(ev);
807 }
808
809 static void
810 _emotion_gstreamer_end(void *data, Ecore_Thread *thread)
811 {
812    Emotion_Gstreamer_Video *ev = data;
813
814    ev->threads = eina_list_remove(ev->threads, thread);
815
816    if (ev->play)
817      {
818         gst_element_set_state(ev->pipeline, GST_STATE_PLAYING);
819         ev->play_started = 1;
820      }
821
822    if (getenv("EMOTION_GSTREAMER_DOT")) GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS(GST_BIN(ev->pipeline), GST_DEBUG_GRAPH_SHOW_ALL, getenv("EMOTION_GSTREAMER_DOT"));
823
824    if (ev->in == ev->out && ev->delete_me)
825      em_shutdown(ev);
826    else
827      _emotion_gstreamer_video_pipeline_parse(data, EINA_TRUE);
828 }
829
830 static void
831 _video_resize(void *data, Evas_Object *obj __UNUSED__, const Evas_Video_Surface *surface __UNUSED__,
832               Evas_Coord w, Evas_Coord h)
833 {
834    Emotion_Gstreamer_Video *ev = data;
835
836 #ifdef HAVE_ECORE_X
837    ecore_x_window_resize(ev->win, w, h);
838 #endif
839    fprintf(stderr, "resize: %i, %i\n", w, h);
840 }
841
842 static void
843 _video_move(void *data, Evas_Object *obj __UNUSED__, const Evas_Video_Surface *surface __UNUSED__,
844             Evas_Coord x, Evas_Coord y)
845 {
846    Emotion_Gstreamer_Video *ev = data;
847 #ifdef HAVE_ECORE_X
848    unsigned int pos[2];
849
850    fprintf(stderr, "move: %i, %i\n", x, y);
851    pos[0] = x; pos[1] = y;
852    ecore_x_window_prop_card32_set(ev->win, ECORE_X_ATOM_E_VIDEO_POSITION, pos, 2);
853 #endif
854 }
855
856 #if 0
857 /* Much better idea to always feed the XvImageSink and let him handle optimizing the rendering as we do */
858 static void
859 _block_pad_unlink_cb(GstPad *pad, gboolean blocked, gpointer user_data)
860 {
861    if (blocked)
862      {
863         Emotion_Gstreamer_Video *ev = user_data;
864         GstEvent *gev;
865
866         gst_pad_unlink(ev->teepad, ev->xvpad);
867         gev = gst_event_new_eos();
868         gst_pad_send_event(ev->xvpad, gev);
869         gst_pad_set_blocked_async(pad, FALSE, _block_pad_unlink_cb, NULL);
870      }
871 }
872
873 static void
874 _block_pad_link_cb(GstPad *pad, gboolean blocked, gpointer user_data)
875 {
876    if (blocked)
877      {
878         Emotion_Gstreamer_Video *ev = user_data;
879
880         gst_pad_link(ev->teepad, ev->xvpad);
881         if (ev->play)
882           gst_element_set_state(ev->xvsink, GST_STATE_PLAYING);
883         else
884           gst_element_set_state(ev->xvsink, GST_STATE_PAUSED);
885         gst_pad_set_blocked_async(pad, FALSE, _block_pad_link_cb, NULL);
886      }
887 }
888 #endif
889
890 static void
891 _video_show(void *data, Evas_Object *obj __UNUSED__, const Evas_Video_Surface *surface __UNUSED__)
892 {
893    Emotion_Gstreamer_Video *ev = data;
894
895    fprintf(stderr, "show xv\n");
896 #ifdef HAVE_ECORE_X
897    ecore_x_window_show(ev->win);
898 #endif
899    /* gst_pad_set_blocked_async(ev->teepad, TRUE, _block_pad_link_cb, ev); */
900 }
901
902 static void
903 _video_hide(void *data, Evas_Object *obj __UNUSED__, const Evas_Video_Surface *surface __UNUSED__)
904 {
905    Emotion_Gstreamer_Video *ev = data;
906
907    fprintf(stderr, "hide xv\n");
908 #ifdef HAVE_ECORE_X
909    ecore_x_window_hide(ev->win);
910 #endif
911    /* gst_pad_set_blocked_async(ev->teepad, TRUE, _block_pad_unlink_cb, ev); */
912 }
913
914 static void
915 _video_update_pixels(void *data, Evas_Object *obj __UNUSED__, const Evas_Video_Surface *surface __UNUSED__)
916 {
917    Emotion_Gstreamer_Video *ev = data;
918    Emotion_Gstreamer_Buffer *send;
919
920    if (!ev->send) return ;
921
922    send = ev->send;
923    send->force = EINA_TRUE;
924    ev->send = NULL;
925    evas_video_sink_main_render(send);
926 }
927
928 GstElement *
929 gstreamer_video_sink_new(Emotion_Gstreamer_Video *ev,
930                          Evas_Object *o,
931                          const char *uri)
932 {
933    GstElement *playbin;
934    GstElement *bin = NULL;
935    GstElement *esink = NULL;
936    GstElement *xvsink = NULL;
937    GstElement *tee = NULL;
938    GstElement *queue = NULL;
939    Evas_Object *obj;
940    GstPad *pad;
941    GstPad *teepad;
942    int flags;
943    const char *launch;
944 #if defined HAVE_ECORE_X && defined HAVE_XOVERLAY_H
945    const char *engine;
946    Eina_List *engines;
947 #endif
948
949    obj = emotion_object_image_get(o);
950    if (!obj)
951      {
952         ERR("Not Evas_Object specified");
953         return NULL;
954      }
955
956    if (!uri)
957      return NULL;
958
959    launch = emotion_webcam_custom_get(uri);
960    if (launch)
961      {
962         GError *error = NULL;
963
964         playbin = gst_parse_bin_from_description(launch, 1, &error);
965         if (!playbin)
966           {
967              ERR("Unable to setup command : '%s' got error '%s'.", launch, error->message);
968              g_error_free(error);
969              return NULL;
970           }
971         if (error)
972           {
973              WRN("got recoverable error '%s' for command : '%s'.", error->message, launch);
974              g_error_free(error);
975           }
976      }
977    else
978      {
979         playbin = gst_element_factory_make("playbin2", "playbin");
980         if (!playbin)
981           {
982              ERR("Unable to create 'playbin' GstElement.");
983              return NULL;
984           }
985      }
986
987    bin = gst_bin_new(NULL);
988    if (!bin)
989      {
990        ERR("Unable to create GstBin !");
991        goto unref_pipeline;
992      }
993
994    tee = gst_element_factory_make("tee", NULL);
995    if (!tee)
996      {
997        ERR("Unable to create 'tee' GstElement.");
998        goto unref_pipeline;
999      }
1000
1001 #if defined HAVE_ECORE_X && defined HAVE_XOVERLAY_H
1002    if (window_manager_video)
1003      {
1004        engines = evas_render_method_list();
1005
1006        engine = eina_list_nth(engines, evas_output_method_get(evas_object_evas_get(obj)) - 1);
1007
1008        if (ev->priority && engine && strstr(engine, "_x11") != NULL)
1009          {
1010            Ecore_Evas *ee;
1011            Evas_Coord x, y, w, h;
1012            Ecore_X_Window win;
1013            Ecore_X_Window parent;
1014
1015            evas_object_geometry_get(obj, &x, &y, &w, &h);
1016
1017            ee = ecore_evas_ecore_evas_get(evas_object_evas_get(obj));
1018
1019            if (w < 4) w = 4;
1020            if (h < 2) h = 2;
1021
1022            /* Here we really need to have the help of the window manager, this code will change when we update E17. */
1023            parent = (Ecore_X_Window) ecore_evas_window_get(ee);
1024            fprintf(stderr, "parent: %x\n", parent);
1025
1026            win = ecore_x_window_new(0, x, y, w, h);
1027            fprintf(stderr, "creating window: %x [%i, %i, %i, %i]\n", win, x, y, w, h);
1028            if (win)
1029              {
1030                Ecore_X_Window_State state[] = { ECORE_X_WINDOW_STATE_SKIP_TASKBAR, ECORE_X_WINDOW_STATE_SKIP_PAGER };
1031
1032                ecore_x_netwm_window_state_set(win, state, 2);
1033                ecore_x_window_hide(win);
1034                xvsink = gst_element_factory_make("xvimagesink", NULL);
1035                if (xvsink)
1036                  {
1037                    unsigned int pos[2];
1038
1039 #ifdef HAVE_X_OVERLAY_SET
1040                    gst_x_overlay_set_window_handle(GST_X_OVERLAY(xvsink), win);
1041 #else
1042                    gst_x_overlay_set_xwindow_id(GST_X_OVERLAY(xvsink), win);
1043 #endif
1044                    ev->win = win;
1045
1046                    ecore_x_window_prop_card32_set(win, ECORE_X_ATOM_E_VIDEO_PARENT, &parent, 1);
1047
1048                    pos[0] = x; pos[1] = y;
1049                    ecore_x_window_prop_card32_set(win, ECORE_X_ATOM_E_VIDEO_POSITION, pos, 2);
1050                  }
1051                else
1052                  {
1053                    fprintf(stderr, "destroying win: %x\n", win);
1054                    ecore_x_window_free(win);
1055                  }
1056              }
1057          }
1058        evas_render_method_list_free(engines);
1059      }
1060 #else
1061 # warning "no ecore_x or xoverlay"
1062 #endif
1063
1064    esink = gst_element_factory_make("emotion-sink", "sink");
1065    if (!esink)
1066      {
1067         ERR("Unable to create 'emotion-sink' GstElement.");
1068         goto unref_pipeline;
1069      }
1070
1071    g_object_set(G_OBJECT(esink), "evas-object", obj, NULL);
1072    g_object_set(G_OBJECT(esink), "ev", ev, NULL);
1073
1074    evas_object_image_pixels_get_callback_set(obj, NULL, NULL);
1075
1076    /* We need queue to force each video sink to be in its own thread */
1077    queue = gst_element_factory_make("queue", NULL);
1078    if (!queue)
1079      {
1080         ERR("Unable to create 'queue' GstElement.");
1081         goto unref_pipeline;
1082      }
1083
1084    gst_bin_add_many(GST_BIN(bin), tee, queue, esink, NULL);
1085    gst_element_link_many(queue, esink, NULL);
1086
1087    /* link both sink to GstTee */
1088    pad = gst_element_get_pad(queue, "sink");
1089    teepad = gst_element_get_request_pad(tee, "src%d");
1090    gst_pad_link(teepad, pad);
1091    gst_object_unref(pad);
1092    gst_object_unref(teepad);
1093
1094    if (xvsink)
1095      {
1096         GstElement *fakeeos;
1097
1098         queue = gst_element_factory_make("queue", NULL);
1099         fakeeos = GST_ELEMENT(GST_BIN(g_object_new(GST_TYPE_FAKEEOS_BIN, "name", "eosbin", NULL)));
1100         if (queue && fakeeos)
1101           {
1102              GstPad *queue_pad;
1103
1104              gst_bin_add_many(GST_BIN(bin), fakeeos, NULL);
1105
1106              gst_bin_add_many(GST_BIN(fakeeos), queue, xvsink, NULL);
1107              gst_element_link_many(queue, xvsink, NULL);
1108              queue_pad = gst_element_get_pad(queue, "sink");
1109              gst_element_add_pad(fakeeos, gst_ghost_pad_new("sink", queue_pad));
1110
1111              pad = gst_element_get_pad(fakeeos, "sink");
1112              teepad = gst_element_get_request_pad(tee, "src%d");
1113              gst_pad_link(teepad, pad);
1114
1115              xvsink = fakeeos;
1116
1117              ev->teepad = teepad;
1118              ev->xvpad = pad;
1119           }
1120         else
1121           {
1122              if (fakeeos) gst_object_unref(fakeeos);
1123              if (queue) gst_object_unref(queue);
1124              gst_object_unref(xvsink);
1125              xvsink = NULL;
1126           }
1127      }
1128
1129    teepad = gst_element_get_pad(tee, "sink");
1130    gst_element_add_pad(bin, gst_ghost_pad_new("sink", teepad));
1131    gst_object_unref(teepad);
1132
1133 #define GST_PLAY_FLAG_NATIVE_VIDEO  (1 << 6)
1134 #define GST_PLAY_FLAG_DOWNLOAD      (1 << 7)
1135 #define GST_PLAY_FLAG_AUDIO         (1 << 1)
1136 #define GST_PLAY_FLAG_NATIVE_AUDIO  (1 << 5)
1137
1138    if (launch)
1139      {
1140         g_object_set(G_OBJECT(playbin), "sink", bin, NULL);
1141      }
1142    else
1143      {
1144         g_object_get(G_OBJECT(playbin), "flags", &flags, NULL);
1145         g_object_set(G_OBJECT(playbin), "flags", flags | GST_PLAY_FLAG_NATIVE_VIDEO | GST_PLAY_FLAG_DOWNLOAD | GST_PLAY_FLAG_NATIVE_AUDIO, NULL);
1146         g_object_set(G_OBJECT(playbin), "video-sink", bin, NULL);
1147         g_object_set(G_OBJECT(playbin), "uri", uri, NULL);
1148      }
1149
1150    evas_object_image_pixels_get_callback_set(obj, NULL, NULL);
1151
1152    ev->stream = EINA_TRUE;
1153
1154    if (xvsink)
1155      {
1156         Evas_Video_Surface video;
1157
1158         video.version = EVAS_VIDEO_SURFACE_VERSION;
1159         video.data = ev;
1160         video.parent = NULL;
1161         video.move = _video_move;
1162         video.resize = _video_resize;
1163         video.show = _video_show;
1164         video.hide = _video_hide;
1165         video.update_pixels = _video_update_pixels;
1166
1167         evas_object_image_video_surface_set(obj, &video);
1168         ev->stream = EINA_FALSE;
1169      }
1170
1171    eina_stringshare_replace(&ev->uri, uri);
1172    ev->pipeline = playbin;
1173    ev->sink = bin;
1174    ev->esink = esink;
1175    ev->xvsink = xvsink;
1176    ev->tee = tee;
1177    ev->threads = eina_list_append(ev->threads,
1178                                   ecore_thread_run(_emotion_gstreamer_pause,
1179                                                    _emotion_gstreamer_end,
1180                                                    _emotion_gstreamer_cancel,
1181                                                    ev));
1182
1183    /** NOTE: you need to set: GST_DEBUG_DUMP_DOT_DIR=/tmp EMOTION_ENGINE=gstreamer to save the $EMOTION_GSTREAMER_DOT file in '/tmp' */
1184    /** then call dot -Tpng -oemotion_pipeline.png /tmp/$TIMESTAMP-$EMOTION_GSTREAMER_DOT.dot */
1185    if (getenv("EMOTION_GSTREAMER_DOT")) GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS(GST_BIN(playbin), GST_DEBUG_GRAPH_SHOW_ALL, getenv("EMOTION_GSTREAMER_DOT"));
1186
1187    return playbin;
1188
1189  unref_pipeline:
1190    gst_object_unref(xvsink);
1191    gst_object_unref(esink);
1192    gst_object_unref(tee);
1193    gst_object_unref(bin);
1194    gst_object_unref(playbin);
1195    return NULL;
1196 }