emotion: fix buggies, add YUY2 support.
[profile/ivi/emotion.git] / src / modules / gstreamer / emotion_sink.c
1 #include <glib.h>
2 #include <gst/gst.h>
3 #include <gst/video/video.h>
4 #include <gst/video/gstvideosink.h>
5
6 #include <Ecore.h>
7
8 #include "emotion_gstreamer.h"
9
10 static GstStaticPadTemplate sinktemplate = GST_STATIC_PAD_TEMPLATE("sink",
11                                                                    GST_PAD_SINK, GST_PAD_ALWAYS,
12                                                                    GST_STATIC_CAPS(GST_VIDEO_CAPS_YUV("{ I420, YV12, YUY2 }") ";"
13                                                                                    GST_VIDEO_CAPS_BGRx ";" GST_VIDEO_CAPS_BGR ";" GST_VIDEO_CAPS_BGRA));
14
15 GST_DEBUG_CATEGORY_STATIC(evas_video_sink_debug);
16 #define GST_CAT_DEFAULT evas_video_sink_debug
17
18 enum {
19   REPAINT_REQUESTED,
20   LAST_SIGNAL
21 };
22
23 enum {
24   PROP_0,
25   PROP_EVAS_OBJECT,
26   PROP_WIDTH,
27   PROP_HEIGHT,
28   PROP_LAST,
29 };
30
31 static guint evas_video_sink_signals[LAST_SIGNAL] = { 0, };
32
33 struct _EvasVideoSinkPrivate {
34    Evas_Object *o;
35    Ecore_Pipe *p;
36
37    int width;
38    int height;
39    Evas_Colorspace eformat;
40    GstVideoFormat gformat;
41
42    GMutex* buffer_mutex;
43    GCond* data_cond;
44
45    GstBuffer *last_buffer; /* We need to keep a copy of the last inserted buffer as evas doesn't copy YUV data around */
46
47    // If this is TRUE all processing should finish ASAP
48    // This is necessary because there could be a race between
49    // unlock() and render(), where unlock() wins, signals the
50    // GCond, then render() tries to render a frame although
51    // everything else isn't running anymore. This will lead
52    // to deadlocks because render() holds the stream lock.
53    //
54    // Protected by the buffer mutex
55    Eina_Bool unlocked : 1;
56    Eina_Bool preroll : 1;
57 };
58
59 #define _do_init(bla)                                   \
60   GST_DEBUG_CATEGORY_INIT(evas_video_sink_debug,        \
61                           "emotion-sink",               \
62                           0,                            \
63                           "emotion video sink")
64
65 GST_BOILERPLATE_FULL(EvasVideoSink,
66                      evas_video_sink,
67                      GstVideoSink,
68                      GST_TYPE_VIDEO_SINK,
69                      _do_init);
70
71
72 static void unlock_buffer_mutex(EvasVideoSinkPrivate* priv);
73
74 static void evas_video_sink_render_handler(void *data, void *buf, unsigned int len);
75
76 static void
77 evas_video_sink_base_init(gpointer g_class)
78 {
79    GstElementClass* element_class;
80
81    element_class = GST_ELEMENT_CLASS(g_class);
82    gst_element_class_add_pad_template(element_class, gst_static_pad_template_get(&sinktemplate));
83    gst_element_class_set_details_simple(element_class, "Evas video sink",
84                                         "Sink/Video", "Sends video data from a GStreamer pipeline to an Evas object",
85                                         "Vincent Torri <vtorri@univ-evry.fr>");
86 }
87
88 static void
89 evas_video_sink_init(EvasVideoSink* sink, EvasVideoSinkClass* klass __UNUSED__)
90 {
91    EvasVideoSinkPrivate* priv;
92
93    INF("sink init");
94    sink->priv = priv = G_TYPE_INSTANCE_GET_PRIVATE(sink, EVAS_TYPE_VIDEO_SINK, EvasVideoSinkPrivate);
95    priv->o = NULL;
96    priv->p = ecore_pipe_add(evas_video_sink_render_handler, sink);
97    priv->last_buffer = NULL;
98    priv->width = 0;
99    priv->height = 0;
100    priv->gformat = GST_VIDEO_FORMAT_UNKNOWN;
101    priv->eformat = EVAS_COLORSPACE_ARGB8888;
102    priv->data_cond = g_cond_new();
103    priv->buffer_mutex = g_mutex_new();
104    priv->preroll = EINA_FALSE;
105    priv->unlocked = EINA_FALSE;
106 }
107
108
109 /**** Object methods ****/
110
111 static void
112 evas_video_sink_set_property(GObject * object, guint prop_id,
113                              const GValue * value, GParamSpec * pspec)
114 {
115    EvasVideoSink* sink;
116    EvasVideoSinkPrivate* priv;
117
118    sink = EVAS_VIDEO_SINK (object);
119    priv = sink->priv;
120
121    switch (prop_id) {
122     case PROP_EVAS_OBJECT:
123        g_mutex_lock(priv->buffer_mutex);
124        priv->o = g_value_get_pointer (value);
125        g_mutex_unlock(priv->buffer_mutex);
126        break;
127     default:
128        G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
129        ERR("invalid property");
130        break;
131    }
132 }
133
134 static void
135 evas_video_sink_get_property(GObject * object, guint prop_id,
136                              GValue * value, GParamSpec * pspec)
137 {
138    EvasVideoSink* sink;
139    EvasVideoSinkPrivate* priv;
140
141    sink = EVAS_VIDEO_SINK (object);
142    priv = sink->priv;
143
144    switch (prop_id) {
145     case PROP_EVAS_OBJECT:
146        g_mutex_lock(priv->buffer_mutex);
147        g_value_set_pointer (value, priv->o);
148        g_mutex_unlock(priv->buffer_mutex);
149        break;
150     case PROP_WIDTH:
151        g_mutex_lock(priv->buffer_mutex);
152        g_value_set_int(value, priv->width);
153        g_mutex_unlock(priv->buffer_mutex);
154        break;
155     case PROP_HEIGHT:
156        g_mutex_lock(priv->buffer_mutex);
157        g_value_set_int (value, priv->height);
158        g_mutex_unlock(priv->buffer_mutex);
159        break;
160     default:
161        G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
162        ERR("invalide property");
163        break;
164    }
165 }
166
167 static void
168 evas_video_sink_dispose(GObject* object)
169 {
170    EvasVideoSink* sink;
171    EvasVideoSinkPrivate* priv;
172
173    sink = EVAS_VIDEO_SINK(object);
174    priv = sink->priv;
175
176    if (priv->buffer_mutex) {
177       g_mutex_free(priv->buffer_mutex);
178       priv->buffer_mutex = 0;
179    }
180
181    if (priv->data_cond) {
182       g_cond_free(priv->data_cond);
183       priv->data_cond = 0;
184    }
185
186    if (priv->p) {
187       ecore_pipe_del(priv->p);
188       priv->p = NULL;
189    }
190
191    if (priv->last_buffer) {
192       gst_buffer_unref(priv->last_buffer);
193       priv->last_buffer = NULL;
194    }
195
196    G_OBJECT_CLASS(parent_class)->dispose(object);
197 }
198
199
200 /**** BaseSink methods ****/
201
202 gboolean evas_video_sink_set_caps(GstBaseSink *bsink, GstCaps *caps)
203 {
204    EvasVideoSink* sink;
205    EvasVideoSinkPrivate* priv;
206    GstVideoFormat format;
207    int width;
208    int height;
209
210    sink = EVAS_VIDEO_SINK(bsink);
211    priv = sink->priv;
212
213    if (G_UNLIKELY(!gst_video_format_parse_caps(caps, &format, &width, &height))) {
214       ERR("Unable to parse caps.");
215       return FALSE;
216    }
217
218    priv->width = width;
219    priv->height = height;
220
221    printf("%p format :", priv->o);
222    switch (format)
223      {
224       case GST_VIDEO_FORMAT_I420: priv->eformat = EVAS_COLORSPACE_YCBCR422P601_PL;
225          printf ("I420\n");
226          break;
227       case GST_VIDEO_FORMAT_YV12: priv->eformat = EVAS_COLORSPACE_YCBCR422P601_PL;
228          printf ("YV12\n");
229          break;
230       case GST_VIDEO_FORMAT_YUY2: priv->eformat = EVAS_COLORSPACE_YCBCR422601_PL;
231          printf("YUY2\n");
232          break;
233       case GST_VIDEO_FORMAT_BGR: priv->eformat = EVAS_COLORSPACE_ARGB8888;
234          printf ("BGR\n");
235          break;
236       case GST_VIDEO_FORMAT_BGRx: priv->eformat = EVAS_COLORSPACE_ARGB8888;
237          printf ("BGRx\n");
238          break;
239       case GST_VIDEO_FORMAT_BGRA: priv->eformat = EVAS_COLORSPACE_ARGB8888;
240          printf ("BGRA\n");
241          break;
242       default:
243          ERR("unsupported : %d\n", format);
244          return FALSE;
245      }
246    priv->gformat = format;
247
248    return TRUE;
249 }
250
251 static gboolean
252 evas_video_sink_start(GstBaseSink* base_sink)
253 {
254    EvasVideoSinkPrivate* priv;
255    gboolean res = TRUE;
256
257    priv = EVAS_VIDEO_SINK(base_sink)->priv;
258    g_mutex_lock(priv->buffer_mutex);
259    if (!priv->o)
260      res = FALSE;
261    else
262      {
263         if (!priv->p)
264           res = FALSE;
265         else
266           {
267              priv->unlocked = EINA_FALSE;
268           }
269      }
270    g_mutex_unlock(priv->buffer_mutex);
271    return res;
272 }
273
274 static gboolean
275 evas_video_sink_stop(GstBaseSink* base_sink)
276 {
277    EvasVideoSinkPrivate* priv = EVAS_VIDEO_SINK(base_sink)->priv;
278
279    unlock_buffer_mutex(priv);
280    return TRUE;
281 }
282
283 static gboolean
284 evas_video_sink_unlock(GstBaseSink* object)
285 {
286    EvasVideoSink* sink;
287
288    sink = EVAS_VIDEO_SINK(object);
289
290    unlock_buffer_mutex(sink->priv);
291
292    return GST_CALL_PARENT_WITH_DEFAULT(GST_BASE_SINK_CLASS, unlock,
293                                        (object), TRUE);
294 }
295
296 static gboolean
297 evas_video_sink_unlock_stop(GstBaseSink* object)
298 {
299    EvasVideoSink* sink;
300    EvasVideoSinkPrivate* priv;
301
302    sink = EVAS_VIDEO_SINK(object);
303    priv = sink->priv;
304
305    g_mutex_lock(priv->buffer_mutex);
306    priv->unlocked = FALSE;
307    g_mutex_unlock(priv->buffer_mutex);
308
309    return GST_CALL_PARENT_WITH_DEFAULT(GST_BASE_SINK_CLASS, unlock_stop,
310                                        (object), TRUE);
311 }
312
313 static GstFlowReturn
314 evas_video_sink_preroll(GstBaseSink* bsink, GstBuffer* buffer)
315 {
316    GstBuffer *send;
317    EvasVideoSink* sink;
318    EvasVideoSinkPrivate* priv;
319
320    sink = EVAS_VIDEO_SINK(bsink);
321    priv = sink->priv;
322
323    send = gst_buffer_ref(buffer);
324
325    priv->preroll = EINA_TRUE;
326
327    ecore_pipe_write(priv->p, &send, sizeof(buffer));
328    return GST_FLOW_OK;
329 }
330
331 static GstFlowReturn
332 evas_video_sink_render(GstBaseSink* bsink, GstBuffer* buffer)
333 {
334    GstBuffer *send;
335    EvasVideoSink* sink;
336    EvasVideoSinkPrivate* priv;
337    Eina_Bool ret;
338
339    sink = EVAS_VIDEO_SINK(bsink);
340    priv = sink->priv;
341
342    g_mutex_lock(priv->buffer_mutex);
343
344    if (priv->unlocked) {
345       ERR("LOCKED");
346       g_mutex_unlock(priv->buffer_mutex);
347       return GST_FLOW_OK;
348    }
349
350    priv->preroll = EINA_FALSE;
351
352    send = gst_buffer_ref(buffer);
353    ret = ecore_pipe_write(priv->p, &send, sizeof(buffer));
354    if (!ret)
355      return GST_FLOW_ERROR;
356
357    g_cond_wait(priv->data_cond, priv->buffer_mutex);
358    g_mutex_unlock(priv->buffer_mutex);
359
360    return GST_FLOW_OK;
361 }
362
363 static void evas_video_sink_render_handler(void *data,
364                                            void *buf,
365                                            unsigned int len)
366 {
367    Emotion_Gstreamer_Video *ev;
368    Emotion_Video_Stream *vstream;
369    EvasVideoSink* sink;
370    EvasVideoSinkPrivate* priv;
371    GstBuffer* buffer;
372    unsigned char *evas_data;
373    const guint8 *gst_data;
374    GstQuery *query;
375    GstFormat fmt = GST_FORMAT_TIME;
376    Evas_Coord w, h;
377    gint64 pos;
378
379    sink = (EvasVideoSink *)data;
380    priv = sink->priv;
381
382    buffer = *((GstBuffer **)buf);
383
384    if (priv->unlocked)
385      goto exit_point;
386
387    gst_data = GST_BUFFER_DATA(buffer);
388    if (!gst_data) goto exit_point;
389
390    // This prevent a race condition when data are still in the pipe
391    // but the buffer size as changed because of a request from
392    // emotion smart (like on a file set).
393    evas_object_image_size_get(priv->o, &w, &h);
394    if (w != priv->width || h != priv->height)
395      goto exit_point;
396
397    ev = evas_object_data_get(priv->o, "_emotion_gstreamer_video");
398    if (!ev) goto exit_point;
399
400    evas_object_image_size_set(priv->o, priv->width, priv->height);
401    evas_object_image_alpha_set(priv->o, 0);
402    evas_object_image_colorspace_set(priv->o, priv->eformat);
403
404    evas_data = (unsigned char *)evas_object_image_data_get(priv->o, 1);
405
406    // Evas's BGRA has pre-multiplied alpha while GStreamer's doesn't.
407    // Here we convert to Evas's BGRA.
408    switch (priv->gformat)
409      {
410       case GST_VIDEO_FORMAT_BGR:
411         {
412            unsigned char *evas_tmp;
413            int x;
414            int y;
415
416            evas_tmp = evas_data;
417            /* FIXME: could this be optimized ? */
418            for (x = 0; x < priv->height; x++) {
419               for (y = 0; y < priv->width; y++) {
420                  evas_tmp[0] = gst_data[0];
421                  evas_tmp[1] = gst_data[1];
422                  evas_tmp[2] = gst_data[2];
423                  evas_tmp[3] = 255;
424                  gst_data += 3;
425                  evas_tmp += 4;
426               }
427            }
428            break;
429         }
430
431         // Evas's BGRA has pre-multiplied alpha while GStreamer's doesn't.
432         // Here we convert to Evas's BGRA.
433       case GST_VIDEO_FORMAT_BGRx:
434         {
435            unsigned char *evas_tmp;
436            int x;
437            int y;
438
439            evas_tmp = evas_data;
440            /* FIXME: could this be optimized ? */
441            for (x = 0; x < priv->height; x++) {
442               for (y = 0; y < priv->width; y++) {
443                  evas_tmp[0] = gst_data[0];
444                  evas_tmp[1] = gst_data[1];
445                  evas_tmp[2] = gst_data[2];
446                  evas_tmp[3] = 255;
447                  gst_data += 4;
448                  evas_tmp += 4;
449               }
450            }
451            break;
452         }
453
454         // Evas's BGRA has pre-multiplied alpha while GStreamer's doesn't.
455         // Here we convert to Evas's BGRA.
456       case GST_VIDEO_FORMAT_BGRA:
457         {
458            unsigned char *evas_tmp;
459            int x;
460            int y;
461            unsigned char alpha;
462
463            evas_tmp = evas_data;
464            /* FIXME: could this be optimized ? */
465            for (x = 0; x < priv->height; x++) {
466               for (y = 0; y < priv->width; y++) {
467                  alpha = gst_data[3];
468                  evas_tmp[0] = (gst_data[0] * alpha) / 255;
469                  evas_tmp[1] = (gst_data[1] * alpha) / 255;
470                  evas_tmp[2] = (gst_data[2] * alpha) / 255;
471                  evas_tmp[3] = alpha;
472                  gst_data += 4;
473                  evas_tmp += 4;
474               }
475            }
476            break;
477         }
478
479       case GST_VIDEO_FORMAT_I420:
480         {
481            int i;
482            const unsigned char **rows;
483
484            evas_object_image_pixels_dirty_set(priv->o, 1);
485            rows = (const unsigned char **)evas_data;
486
487            for (i = 0; i < priv->height; i++)
488              rows[i] = &gst_data[i * priv->width];
489
490            rows += priv->height;
491            for (i = 0; i < (priv->height / 2); i++)
492              rows[i] = &gst_data[priv->height * priv->width + i * (priv->width / 2)];
493
494            rows += priv->height / 2;
495            for (i = 0; i < (priv->height / 2); i++)
496              rows[i] = &gst_data[priv->height * priv->width + priv->height * (priv->width /4) + i * (priv->width / 2)];
497            break;
498         }
499
500       case GST_VIDEO_FORMAT_YV12:
501         {
502            int i;
503            const unsigned char **rows;
504
505            evas_object_image_pixels_dirty_set(priv->o, 1);
506
507            rows = (const unsigned char **)evas_data;
508
509            for (i = 0; i < priv->height; i++)
510              rows[i] = &gst_data[i * priv->width];
511
512            rows += priv->height;
513            for (i = 0; i < (priv->height / 2); i++)
514              rows[i] = &gst_data[priv->height * priv->width + priv->height * (priv->width /4) + i * (priv->width / 2)];
515
516            rows += priv->height / 2;
517            for (i = 0; i < (priv->height / 2); i++)
518              rows[i] = &gst_data[priv->height * priv->width + i * (priv->width / 2)];
519            break;
520         }
521
522       case GST_VIDEO_FORMAT_YUY2:
523         {
524            int i;
525            const unsigned char **rows;
526
527            evas_object_image_pixels_dirty_set(priv->o, 1);
528
529            rows = (const unsigned char **)evas_data;
530
531            for (i = 0; i < priv->height; i++)
532              rows[i] = &gst_data[i * priv->width * 2];
533            break;
534         }
535      }
536
537    evas_object_image_data_update_add(priv->o, 0, 0, priv->width, priv->height);
538    evas_object_image_data_set(priv->o, evas_data);
539    evas_object_image_pixels_dirty_set(priv->o, 0);
540
541    _emotion_frame_new(ev->obj);
542
543    vstream = eina_list_nth(ev->video_streams, ev->video_stream_nbr - 1);
544
545    gst_element_query_position(ev->pipeline, &fmt, &pos);
546    ev->position = (double)pos / (double)GST_SECOND;
547
548    vstream->width = priv->width;
549    vstream->height = priv->height;
550    ev->ratio = (double) priv->width / (double) priv->height;
551
552    _emotion_video_pos_update(ev->obj, ev->position, vstream->length_time);
553    _emotion_frame_resize(ev->obj, priv->width, priv->height, ev->ratio);
554
555  exit_point:
556    if (priv->last_buffer) gst_buffer_unref(priv->last_buffer);
557    priv->last_buffer = buffer;
558
559    if (priv->preroll) return ;
560
561    g_mutex_lock(priv->buffer_mutex);
562
563    if (priv->unlocked) {
564       g_mutex_unlock(priv->buffer_mutex);
565       return;
566    }
567
568    g_cond_signal(priv->data_cond);
569    g_mutex_unlock(priv->buffer_mutex);
570 }
571
572 static void
573 unlock_buffer_mutex(EvasVideoSinkPrivate* priv)
574 {
575    g_mutex_lock(priv->buffer_mutex);
576
577    priv->unlocked = EINA_TRUE;
578    g_cond_signal(priv->data_cond);
579    g_mutex_unlock(priv->buffer_mutex);
580 }
581
582 static void
583 marshal_VOID__MINIOBJECT(GClosure * closure, GValue * return_value __UNUSED__,
584                          guint n_param_values, const GValue * param_values,
585                          gpointer invocation_hint __UNUSED__, gpointer marshal_data)
586 {
587    typedef void (*marshalfunc_VOID__MINIOBJECT) (gpointer obj, gpointer arg1, gpointer data2);
588    marshalfunc_VOID__MINIOBJECT callback;
589    GCClosure *cc;
590    gpointer data1, data2;
591
592    cc = (GCClosure *) closure;
593
594    g_return_if_fail(n_param_values == 2);
595
596    if (G_CCLOSURE_SWAP_DATA(closure)) {
597       data1 = closure->data;
598       data2 = g_value_peek_pointer(param_values + 0);
599    } else {
600       data1 = g_value_peek_pointer(param_values + 0);
601       data2 = closure->data;
602    }
603    callback = (marshalfunc_VOID__MINIOBJECT) (marshal_data ? marshal_data : cc->callback);
604
605    callback(data1, gst_value_get_mini_object(param_values + 1), data2);
606 }
607
608 static void
609 evas_video_sink_class_init(EvasVideoSinkClass* klass)
610 {
611    GObjectClass* gobject_class;
612    GstBaseSinkClass* gstbase_sink_class;
613
614    gobject_class = G_OBJECT_CLASS(klass);
615    gstbase_sink_class = GST_BASE_SINK_CLASS(klass);
616
617    g_type_class_add_private(klass, sizeof(EvasVideoSinkPrivate));
618
619    gobject_class->set_property = evas_video_sink_set_property;
620    gobject_class->get_property = evas_video_sink_get_property;
621
622    g_object_class_install_property (gobject_class, PROP_EVAS_OBJECT,
623                                     g_param_spec_pointer ("evas-object", "Evas Object",
624                                                           "The Evas object where the display of the video will be done",
625                                                           G_PARAM_READWRITE));
626
627    g_object_class_install_property (gobject_class, PROP_WIDTH,
628                                     g_param_spec_int ("width", "Width",
629                                                       "The width of the video",
630                                                       0, 65536, 0, G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
631
632    g_object_class_install_property (gobject_class, PROP_HEIGHT,
633                                     g_param_spec_int ("height", "Height",
634                                                       "The height of the video",
635                                                       0, 65536, 0, G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
636
637    gobject_class->dispose = evas_video_sink_dispose;
638
639    gstbase_sink_class->set_caps = evas_video_sink_set_caps;
640    gstbase_sink_class->stop = evas_video_sink_stop;
641    gstbase_sink_class->start = evas_video_sink_start;
642    gstbase_sink_class->unlock = evas_video_sink_unlock;
643    gstbase_sink_class->unlock_stop = evas_video_sink_unlock_stop;
644    gstbase_sink_class->render = evas_video_sink_render;
645    gstbase_sink_class->preroll = evas_video_sink_preroll;
646
647    evas_video_sink_signals[REPAINT_REQUESTED] = g_signal_new("repaint-requested",
648                                                              G_TYPE_FROM_CLASS(klass),
649                                                              (GSignalFlags)(G_SIGNAL_RUN_LAST | G_SIGNAL_ACTION),
650                                                              0,
651                                                              0,
652                                                              0,
653                                                              marshal_VOID__MINIOBJECT,
654                                                              G_TYPE_NONE, 1, GST_TYPE_BUFFER);
655 }
656
657 gboolean
658 gstreamer_plugin_init (GstPlugin * plugin)
659 {
660    return gst_element_register (plugin,
661                                 "emotion-sink",
662                                 GST_RANK_NONE,
663                                 EVAS_TYPE_VIDEO_SINK);
664 }
665
666 GstElement *
667 gstreamer_video_sink_new(Emotion_Gstreamer_Video *ev,
668                          Evas_Object *o,
669                          const char *uri)
670 {
671    GstElement *playbin;
672    GstElement *sink;
673    Evas_Object *obj;
674    GstStateChangeReturn res;
675    double start, end;
676
677    obj = _emotion_image_get(o);
678    if (!obj)
679      {
680         ERR("Not Evas_Object specified");
681         return NULL;
682      }
683
684    start = ecore_time_get();
685    playbin = gst_element_factory_make("playbin2", "playbin");
686    if (!playbin)
687      {
688         ERR("Unable to create 'playbin' GstElement.");
689         return NULL;
690      }
691    end = ecore_time_get();
692    DBG("Playbin2: %f", end - start);
693
694    start = ecore_time_get();
695    sink = gst_element_factory_make("emotion-sink", "sink");
696    if (!sink)
697      {
698         ERR("Unable to create 'emotion-sink' GstElement.");
699         goto unref_pipeline;
700      }
701
702    g_object_set(G_OBJECT(playbin), "video-sink", sink, NULL);
703    g_object_set(G_OBJECT(playbin), "uri", uri, NULL);
704    g_object_set(G_OBJECT(sink), "evas-object", obj, NULL);
705
706    end = ecore_time_get();
707
708    DBG("emotion-sink: %f", end - start);
709
710    start = ecore_time_get();
711    /* res = gst_element_set_state(playbin, GST_STATE_PLAYING); */
712    res = gst_element_set_state(playbin, GST_STATE_PAUSED);
713    if (res == GST_STATE_CHANGE_FAILURE)
714      {
715         ERR("Unable to set GST_STATE_PAUSED.");
716         goto unref_pipeline;
717      }
718    end = ecore_time_get();
719    DBG("Pause pipeline: %f", end - start);
720
721    start = ecore_time_get();
722    res = gst_element_get_state(playbin, NULL, NULL, GST_CLOCK_TIME_NONE);
723    if (res != GST_STATE_CHANGE_SUCCESS)
724      {
725         /** NOTE: you need to set: GST_DEBUG_DUMP_DOT_DIR=/tmp EMOTION_ENGINE=gstreamer to save the $EMOTION_GSTREAMER_DOT file in '/tmp' */
726         /** then call dot -Tpng -oemotion_pipeline.png /tmp/$TIMESTAMP-$EMOTION_GSTREAMER_DOT.dot */
727         if (getenv("EMOTION_GSTREAMER_DOT")) GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS(GST_BIN(playbin), GST_DEBUG_GRAPH_SHOW_ALL, getenv("EMOTION_GSTREAMER_DOT"));
728
729         ERR("Unable to get GST_CLOCK_TIME_NONE.");
730         goto unref_pipeline;
731      }
732    end = ecore_time_get();
733    DBG("No time: %f", end - start);
734
735    /** NOTE: you need to set: GST_DEBUG_DUMP_DOT_DIR=/tmp EMOTION_ENGINE=gstreamer to save the $EMOTION_GSTREAMER_DOT file in '/tmp' */
736    /** then call dot -Tpng -oemotion_pipeline.png /tmp/$TIMESTAMP-$EMOTION_GSTREAMER_DOT.dot */
737    if (getenv("EMOTION_GSTREAMER_DOT")) GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS(GST_BIN(playbin), GST_DEBUG_GRAPH_SHOW_ALL, getenv("EMOTION_GSTREAMER_DOT"));
738
739    evas_object_data_set(obj, "_emotion_gstreamer_video", ev);
740
741    return playbin;
742
743  unref_pipeline:
744    gst_object_unref(playbin);
745    return NULL;
746 }