emotion: use new ecore infrastructure and remove some race condition.
[profile/ivi/emotion.git] / src / modules / gstreamer / emotion_sink.c
1 #include <Ecore.h>
2
3 #include "emotion_gstreamer.h"
4
5 static GstStaticPadTemplate sinktemplate = GST_STATIC_PAD_TEMPLATE("sink",
6                                                                    GST_PAD_SINK, GST_PAD_ALWAYS,
7                                                                    GST_STATIC_CAPS(GST_VIDEO_CAPS_YUV("{ I420, YV12, YUY2 }") ";"
8                                                                                    GST_VIDEO_CAPS_BGRx ";" GST_VIDEO_CAPS_BGR ";" GST_VIDEO_CAPS_BGRA));
9
10 GST_DEBUG_CATEGORY_STATIC(evas_video_sink_debug);
11 #define GST_CAT_DEFAULT evas_video_sink_debug
12
13 enum {
14   REPAINT_REQUESTED,
15   LAST_SIGNAL
16 };
17
18 enum {
19   PROP_0,
20   PROP_EVAS_OBJECT,
21   PROP_WIDTH,
22   PROP_HEIGHT,
23   PROP_LAST,
24 };
25
26 static guint evas_video_sink_signals[LAST_SIGNAL] = { 0, };
27
28 #define _do_init(bla)                                   \
29   GST_DEBUG_CATEGORY_INIT(evas_video_sink_debug,        \
30                           "emotion-sink",               \
31                           0,                            \
32                           "emotion video sink")
33
34 GST_BOILERPLATE_FULL(EvasVideoSink,
35                      evas_video_sink,
36                      GstVideoSink,
37                      GST_TYPE_VIDEO_SINK,
38                      _do_init);
39
40
41 static void unlock_buffer_mutex(EvasVideoSinkPrivate* priv);
42
43 static void evas_video_sink_main_render(void *data);
44
45 static void
46 evas_video_sink_base_init(gpointer g_class)
47 {
48    GstElementClass* element_class;
49
50    element_class = GST_ELEMENT_CLASS(g_class);
51    gst_element_class_add_pad_template(element_class, gst_static_pad_template_get(&sinktemplate));
52    gst_element_class_set_details_simple(element_class, "Evas video sink",
53                                         "Sink/Video", "Sends video data from a GStreamer pipeline to an Evas object",
54                                         "Vincent Torri <vtorri@univ-evry.fr>");
55 }
56
57 static void
58 evas_video_sink_init(EvasVideoSink* sink, EvasVideoSinkClass* klass __UNUSED__)
59 {
60    EvasVideoSinkPrivate* priv;
61
62    INF("sink init");
63    sink->priv = priv = G_TYPE_INSTANCE_GET_PRIVATE(sink, EVAS_TYPE_VIDEO_SINK, EvasVideoSinkPrivate);
64    priv->o = NULL;
65    priv->last_buffer = NULL;
66    priv->width = 0;
67    priv->height = 0;
68    priv->gformat = GST_VIDEO_FORMAT_UNKNOWN;
69    priv->eformat = EVAS_COLORSPACE_ARGB8888;
70    priv->data_cond = g_cond_new();
71    priv->buffer_mutex = g_mutex_new();
72    priv->unlocked = EINA_FALSE;
73 }
74
75
76 /**** Object methods ****/
77
78 static void
79 evas_video_sink_set_property(GObject * object, guint prop_id,
80                              const GValue * value, GParamSpec * pspec)
81 {
82    EvasVideoSink* sink;
83    EvasVideoSinkPrivate* priv;
84
85    sink = EVAS_VIDEO_SINK (object);
86    priv = sink->priv;
87
88    switch (prop_id) {
89     case PROP_EVAS_OBJECT:
90        g_mutex_lock(priv->buffer_mutex);
91        priv->o = g_value_get_pointer (value);
92        g_mutex_unlock(priv->buffer_mutex);
93        break;
94     default:
95        G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
96        ERR("invalid property");
97        break;
98    }
99 }
100
101 static void
102 evas_video_sink_get_property(GObject * object, guint prop_id,
103                              GValue * value, GParamSpec * pspec)
104 {
105    EvasVideoSink* sink;
106    EvasVideoSinkPrivate* priv;
107
108    sink = EVAS_VIDEO_SINK (object);
109    priv = sink->priv;
110
111    switch (prop_id) {
112     case PROP_EVAS_OBJECT:
113        g_mutex_lock(priv->buffer_mutex);
114        g_value_set_pointer (value, priv->o);
115        g_mutex_unlock(priv->buffer_mutex);
116        break;
117     case PROP_WIDTH:
118        g_mutex_lock(priv->buffer_mutex);
119        g_value_set_int(value, priv->width);
120        g_mutex_unlock(priv->buffer_mutex);
121        break;
122     case PROP_HEIGHT:
123        g_mutex_lock(priv->buffer_mutex);
124        g_value_set_int (value, priv->height);
125        g_mutex_unlock(priv->buffer_mutex);
126        break;
127     default:
128        G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
129        ERR("invalide property");
130        break;
131    }
132 }
133
134 static void
135 evas_video_sink_dispose(GObject* object)
136 {
137    EvasVideoSink* sink;
138    EvasVideoSinkPrivate* priv;
139
140    sink = EVAS_VIDEO_SINK(object);
141    priv = sink->priv;
142
143    if (priv->buffer_mutex) {
144       g_mutex_free(priv->buffer_mutex);
145       priv->buffer_mutex = 0;
146    }
147
148    if (priv->data_cond) {
149       g_cond_free(priv->data_cond);
150       priv->data_cond = 0;
151    }
152
153    if (priv->last_buffer) {
154       gst_buffer_unref(priv->last_buffer);
155       priv->last_buffer = NULL;
156    }
157
158    G_OBJECT_CLASS(parent_class)->dispose(object);
159 }
160
161
162 /**** BaseSink methods ****/
163
164 gboolean evas_video_sink_set_caps(GstBaseSink *bsink, GstCaps *caps)
165 {
166    EvasVideoSink* sink;
167    EvasVideoSinkPrivate* priv;
168    GstVideoFormat format;
169    int width;
170    int height;
171
172    sink = EVAS_VIDEO_SINK(bsink);
173    priv = sink->priv;
174
175    if (G_UNLIKELY(!gst_video_format_parse_caps(caps, &format, &width, &height))) {
176       ERR("Unable to parse caps.");
177       return FALSE;
178    }
179
180    priv->width = width;
181    priv->height = height;
182
183    printf("%p format :", priv->o);
184    switch (format)
185      {
186       case GST_VIDEO_FORMAT_I420: priv->eformat = EVAS_COLORSPACE_YCBCR422P601_PL;
187          printf ("I420\n");
188          break;
189       case GST_VIDEO_FORMAT_YV12: priv->eformat = EVAS_COLORSPACE_YCBCR422P601_PL;
190          printf ("YV12\n");
191          break;
192       case GST_VIDEO_FORMAT_YUY2: priv->eformat = EVAS_COLORSPACE_YCBCR422601_PL;
193          printf("YUY2\n");
194          break;
195       case GST_VIDEO_FORMAT_BGR: priv->eformat = EVAS_COLORSPACE_ARGB8888;
196          printf ("BGR\n");
197          break;
198       case GST_VIDEO_FORMAT_BGRx: priv->eformat = EVAS_COLORSPACE_ARGB8888;
199          printf ("BGRx\n");
200          break;
201       case GST_VIDEO_FORMAT_BGRA: priv->eformat = EVAS_COLORSPACE_ARGB8888;
202          printf ("BGRA\n");
203          break;
204       default:
205          ERR("unsupported : %d\n", format);
206          return FALSE;
207      }
208    priv->gformat = format;
209
210    return TRUE;
211 }
212
213 static gboolean
214 evas_video_sink_start(GstBaseSink* base_sink)
215 {
216    EvasVideoSinkPrivate* priv;
217    gboolean res = TRUE;
218
219    priv = EVAS_VIDEO_SINK(base_sink)->priv;
220    g_mutex_lock(priv->buffer_mutex);
221    if (!priv->o)
222      res = FALSE;
223    else
224      priv->unlocked = EINA_FALSE;
225    g_mutex_unlock(priv->buffer_mutex);
226    return res;
227 }
228
229 static gboolean
230 evas_video_sink_stop(GstBaseSink* base_sink)
231 {
232    EvasVideoSinkPrivate* priv = EVAS_VIDEO_SINK(base_sink)->priv;
233
234    unlock_buffer_mutex(priv);
235    return TRUE;
236 }
237
238 static gboolean
239 evas_video_sink_unlock(GstBaseSink* object)
240 {
241    EvasVideoSink* sink;
242
243    sink = EVAS_VIDEO_SINK(object);
244
245    unlock_buffer_mutex(sink->priv);
246
247    return GST_CALL_PARENT_WITH_DEFAULT(GST_BASE_SINK_CLASS, unlock,
248                                        (object), TRUE);
249 }
250
251 static gboolean
252 evas_video_sink_unlock_stop(GstBaseSink* object)
253 {
254    EvasVideoSink* sink;
255    EvasVideoSinkPrivate* priv;
256
257    sink = EVAS_VIDEO_SINK(object);
258    priv = sink->priv;
259
260    g_mutex_lock(priv->buffer_mutex);
261    priv->unlocked = FALSE;
262    g_mutex_unlock(priv->buffer_mutex);
263
264    return GST_CALL_PARENT_WITH_DEFAULT(GST_BASE_SINK_CLASS, unlock_stop,
265                                        (object), TRUE);
266 }
267
268 static GstFlowReturn
269 evas_video_sink_preroll(GstBaseSink* bsink, GstBuffer* buffer)
270 {
271    Emotion_Gstreamer_Buffer *send;
272    EvasVideoSinkPrivate *priv;
273    EvasVideoSink *sink;
274
275    sink = EVAS_VIDEO_SINK(bsink);
276    priv = sink->priv;
277
278    send = emotion_gstreamer_buffer_alloc(priv, buffer, EINA_TRUE);
279
280    if (send)
281      ecore_main_loop_thread_safe_call(evas_video_sink_main_render, send);
282
283    return GST_FLOW_OK;
284 }
285
286 static GstFlowReturn
287 evas_video_sink_render(GstBaseSink* bsink, GstBuffer* buffer)
288 {
289    Emotion_Gstreamer_Buffer *send;
290    EvasVideoSinkPrivate *priv;
291    EvasVideoSink *sink;
292    Eina_Bool ret;
293
294    sink = EVAS_VIDEO_SINK(bsink);
295    priv = sink->priv;
296
297    g_mutex_lock(priv->buffer_mutex);
298
299    if (priv->unlocked) {
300       ERR("LOCKED");
301       g_mutex_unlock(priv->buffer_mutex);
302       return GST_FLOW_OK;
303    }
304
305    send = emotion_gstreamer_buffer_alloc(priv, buffer, EINA_FALSE);
306    if (!send) return GST_FLOW_ERROR;
307
308    ecore_main_loop_thread_safe_call(evas_video_sink_main_render, send);
309
310    g_cond_wait(priv->data_cond, priv->buffer_mutex);
311    g_mutex_unlock(priv->buffer_mutex);
312
313    return GST_FLOW_OK;
314 }
315
316 static void evas_video_sink_main_render(void *data)
317 {
318    Emotion_Gstreamer_Buffer *send;
319    Emotion_Gstreamer_Video *ev;
320    Emotion_Video_Stream *vstream;
321    EvasVideoSinkPrivate* priv;
322    GstBuffer* buffer;
323    unsigned char *evas_data;
324    const guint8 *gst_data;
325    GstQuery *query;
326    GstFormat fmt = GST_FORMAT_TIME;
327    Evas_Coord w, h;
328    gint64 pos;
329    Eina_Bool preroll;
330
331    send = data;
332
333    priv = send->sink;
334    if (!priv) goto exit_point;
335
336    buffer = send->frame;
337    preroll = send->preroll;
338
339    if (priv->unlocked) goto exit_point;
340
341    gst_data = GST_BUFFER_DATA(buffer);
342    if (!gst_data) goto exit_point;
343
344    // This prevent a race condition when data are still in the pipe
345    // but the buffer size as changed because of a request from
346    // emotion smart (like on a file set).
347    evas_object_image_size_get(priv->o, &w, &h);
348    if (w != priv->width || h != priv->height)
349      goto exit_point;
350
351    ev = evas_object_data_get(priv->o, "_emotion_gstreamer_video");
352    if (!ev) goto exit_point;
353
354    evas_object_image_size_set(priv->o, priv->width, priv->height);
355    evas_object_image_alpha_set(priv->o, 0);
356    evas_object_image_colorspace_set(priv->o, priv->eformat);
357
358    evas_data = (unsigned char *)evas_object_image_data_get(priv->o, 1);
359
360    // Evas's BGRA has pre-multiplied alpha while GStreamer's doesn't.
361    // Here we convert to Evas's BGRA.
362    switch (priv->gformat)
363      {
364       case GST_VIDEO_FORMAT_BGR:
365         {
366            unsigned char *evas_tmp;
367            int x;
368            int y;
369
370            evas_tmp = evas_data;
371            /* FIXME: could this be optimized ? */
372            for (x = 0; x < priv->height; x++) {
373               for (y = 0; y < priv->width; y++) {
374                  evas_tmp[0] = gst_data[0];
375                  evas_tmp[1] = gst_data[1];
376                  evas_tmp[2] = gst_data[2];
377                  evas_tmp[3] = 255;
378                  gst_data += 3;
379                  evas_tmp += 4;
380               }
381            }
382            break;
383         }
384
385         // Evas's BGRA has pre-multiplied alpha while GStreamer's doesn't.
386         // Here we convert to Evas's BGRA.
387       case GST_VIDEO_FORMAT_BGRx:
388         {
389            unsigned char *evas_tmp;
390            int x;
391            int y;
392
393            evas_tmp = evas_data;
394            /* FIXME: could this be optimized ? */
395            for (x = 0; x < priv->height; x++) {
396               for (y = 0; y < priv->width; y++) {
397                  evas_tmp[0] = gst_data[0];
398                  evas_tmp[1] = gst_data[1];
399                  evas_tmp[2] = gst_data[2];
400                  evas_tmp[3] = 255;
401                  gst_data += 4;
402                  evas_tmp += 4;
403               }
404            }
405            break;
406         }
407
408         // Evas's BGRA has pre-multiplied alpha while GStreamer's doesn't.
409         // Here we convert to Evas's BGRA.
410       case GST_VIDEO_FORMAT_BGRA:
411         {
412            unsigned char *evas_tmp;
413            int x;
414            int y;
415            unsigned char alpha;
416
417            evas_tmp = evas_data;
418            /* FIXME: could this be optimized ? */
419            for (x = 0; x < priv->height; x++) {
420               for (y = 0; y < priv->width; y++) {
421                  alpha = gst_data[3];
422                  evas_tmp[0] = (gst_data[0] * alpha) / 255;
423                  evas_tmp[1] = (gst_data[1] * alpha) / 255;
424                  evas_tmp[2] = (gst_data[2] * alpha) / 255;
425                  evas_tmp[3] = alpha;
426                  gst_data += 4;
427                  evas_tmp += 4;
428               }
429            }
430            break;
431         }
432
433       case GST_VIDEO_FORMAT_I420:
434         {
435            int i;
436            const unsigned char **rows;
437
438            evas_object_image_pixels_dirty_set(priv->o, 1);
439            rows = (const unsigned char **)evas_data;
440
441            for (i = 0; i < priv->height; i++)
442              rows[i] = &gst_data[i * priv->width];
443
444            rows += priv->height;
445            for (i = 0; i < (priv->height / 2); i++)
446              rows[i] = &gst_data[priv->height * priv->width + i * (priv->width / 2)];
447
448            rows += priv->height / 2;
449            for (i = 0; i < (priv->height / 2); i++)
450              rows[i] = &gst_data[priv->height * priv->width + priv->height * (priv->width /4) + i * (priv->width / 2)];
451            break;
452         }
453
454       case GST_VIDEO_FORMAT_YV12:
455         {
456            int i;
457            const unsigned char **rows;
458
459            evas_object_image_pixels_dirty_set(priv->o, 1);
460
461            rows = (const unsigned char **)evas_data;
462
463            for (i = 0; i < priv->height; i++)
464              rows[i] = &gst_data[i * priv->width];
465
466            rows += priv->height;
467            for (i = 0; i < (priv->height / 2); i++)
468              rows[i] = &gst_data[priv->height * priv->width + priv->height * (priv->width /4) + i * (priv->width / 2)];
469
470            rows += priv->height / 2;
471            for (i = 0; i < (priv->height / 2); i++)
472              rows[i] = &gst_data[priv->height * priv->width + i * (priv->width / 2)];
473            break;
474         }
475
476       case GST_VIDEO_FORMAT_YUY2:
477         {
478            int i;
479            const unsigned char **rows;
480
481            evas_object_image_pixels_dirty_set(priv->o, 1);
482
483            rows = (const unsigned char **)evas_data;
484
485            for (i = 0; i < priv->height; i++)
486              rows[i] = &gst_data[i * priv->width * 2];
487            break;
488         }
489      }
490
491    evas_object_image_data_update_add(priv->o, 0, 0, priv->width, priv->height);
492    evas_object_image_data_set(priv->o, evas_data);
493    evas_object_image_pixels_dirty_set(priv->o, 0);
494
495    _emotion_frame_new(ev->obj);
496
497    vstream = eina_list_nth(ev->video_streams, ev->video_stream_nbr - 1);
498
499    gst_element_query_position(ev->pipeline, &fmt, &pos);
500    ev->position = (double)pos / (double)GST_SECOND;
501
502    vstream->width = priv->width;
503    vstream->height = priv->height;
504    ev->ratio = (double) priv->width / (double) priv->height;
505
506    _emotion_video_pos_update(ev->obj, ev->position, vstream->length_time);
507    _emotion_frame_resize(ev->obj, priv->width, priv->height, ev->ratio);
508
509    if (priv->last_buffer) gst_buffer_unref(priv->last_buffer);
510    priv->last_buffer = gst_buffer_ref(buffer);
511
512  exit_point:
513    emotion_gstreamer_buffer_free(send);
514
515    if (preroll) return ;
516
517    g_mutex_lock(priv->buffer_mutex);
518
519    if (priv->unlocked) {
520       g_mutex_unlock(priv->buffer_mutex);
521       return;
522    }
523
524    g_cond_signal(priv->data_cond);
525    g_mutex_unlock(priv->buffer_mutex);
526 }
527
528 static void
529 unlock_buffer_mutex(EvasVideoSinkPrivate* priv)
530 {
531    g_mutex_lock(priv->buffer_mutex);
532
533    priv->unlocked = EINA_TRUE;
534    g_cond_signal(priv->data_cond);
535    g_mutex_unlock(priv->buffer_mutex);
536 }
537
538 static void
539 marshal_VOID__MINIOBJECT(GClosure * closure, GValue * return_value __UNUSED__,
540                          guint n_param_values, const GValue * param_values,
541                          gpointer invocation_hint __UNUSED__, gpointer marshal_data)
542 {
543    typedef void (*marshalfunc_VOID__MINIOBJECT) (gpointer obj, gpointer arg1, gpointer data2);
544    marshalfunc_VOID__MINIOBJECT callback;
545    GCClosure *cc;
546    gpointer data1, data2;
547
548    cc = (GCClosure *) closure;
549
550    g_return_if_fail(n_param_values == 2);
551
552    if (G_CCLOSURE_SWAP_DATA(closure)) {
553       data1 = closure->data;
554       data2 = g_value_peek_pointer(param_values + 0);
555    } else {
556       data1 = g_value_peek_pointer(param_values + 0);
557       data2 = closure->data;
558    }
559    callback = (marshalfunc_VOID__MINIOBJECT) (marshal_data ? marshal_data : cc->callback);
560
561    callback(data1, gst_value_get_mini_object(param_values + 1), data2);
562 }
563
564 static void
565 evas_video_sink_class_init(EvasVideoSinkClass* klass)
566 {
567    GObjectClass* gobject_class;
568    GstBaseSinkClass* gstbase_sink_class;
569
570    gobject_class = G_OBJECT_CLASS(klass);
571    gstbase_sink_class = GST_BASE_SINK_CLASS(klass);
572
573    g_type_class_add_private(klass, sizeof(EvasVideoSinkPrivate));
574
575    gobject_class->set_property = evas_video_sink_set_property;
576    gobject_class->get_property = evas_video_sink_get_property;
577
578    g_object_class_install_property (gobject_class, PROP_EVAS_OBJECT,
579                                     g_param_spec_pointer ("evas-object", "Evas Object",
580                                                           "The Evas object where the display of the video will be done",
581                                                           G_PARAM_READWRITE));
582
583    g_object_class_install_property (gobject_class, PROP_WIDTH,
584                                     g_param_spec_int ("width", "Width",
585                                                       "The width of the video",
586                                                       0, 65536, 0, G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
587
588    g_object_class_install_property (gobject_class, PROP_HEIGHT,
589                                     g_param_spec_int ("height", "Height",
590                                                       "The height of the video",
591                                                       0, 65536, 0, G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
592
593    gobject_class->dispose = evas_video_sink_dispose;
594
595    gstbase_sink_class->set_caps = evas_video_sink_set_caps;
596    gstbase_sink_class->stop = evas_video_sink_stop;
597    gstbase_sink_class->start = evas_video_sink_start;
598    gstbase_sink_class->unlock = evas_video_sink_unlock;
599    gstbase_sink_class->unlock_stop = evas_video_sink_unlock_stop;
600    gstbase_sink_class->render = evas_video_sink_render;
601    gstbase_sink_class->preroll = evas_video_sink_preroll;
602
603    evas_video_sink_signals[REPAINT_REQUESTED] = g_signal_new("repaint-requested",
604                                                              G_TYPE_FROM_CLASS(klass),
605                                                              (GSignalFlags)(G_SIGNAL_RUN_LAST | G_SIGNAL_ACTION),
606                                                              0,
607                                                              0,
608                                                              0,
609                                                              marshal_VOID__MINIOBJECT,
610                                                              G_TYPE_NONE, 1, GST_TYPE_BUFFER);
611 }
612
613 gboolean
614 gstreamer_plugin_init (GstPlugin * plugin)
615 {
616    return gst_element_register (plugin,
617                                 "emotion-sink",
618                                 GST_RANK_NONE,
619                                 EVAS_TYPE_VIDEO_SINK);
620 }
621
622 GstElement *
623 gstreamer_video_sink_new(Emotion_Gstreamer_Video *ev,
624                          Evas_Object *o,
625                          const char *uri)
626 {
627    GstElement *playbin;
628    GstElement *sink;
629    Evas_Object *obj;
630    GstStateChangeReturn res;
631    double start, end;
632
633    obj = _emotion_image_get(o);
634    if (!obj)
635      {
636         ERR("Not Evas_Object specified");
637         return NULL;
638      }
639
640    start = ecore_time_get();
641    playbin = gst_element_factory_make("playbin2", "playbin");
642    if (!playbin)
643      {
644         ERR("Unable to create 'playbin' GstElement.");
645         return NULL;
646      }
647    end = ecore_time_get();
648    DBG("Playbin2: %f", end - start);
649
650    start = ecore_time_get();
651    sink = gst_element_factory_make("emotion-sink", "sink");
652    if (!sink)
653      {
654         ERR("Unable to create 'emotion-sink' GstElement.");
655         goto unref_pipeline;
656      }
657
658    g_object_set(G_OBJECT(playbin), "video-sink", sink, NULL);
659    g_object_set(G_OBJECT(playbin), "uri", uri, NULL);
660    g_object_set(G_OBJECT(sink), "evas-object", obj, NULL);
661
662    end = ecore_time_get();
663
664    DBG("emotion-sink: %f", end - start);
665
666    start = ecore_time_get();
667    /* res = gst_element_set_state(playbin, GST_STATE_PLAYING); */
668    res = gst_element_set_state(playbin, GST_STATE_PAUSED);
669    if (res == GST_STATE_CHANGE_FAILURE)
670      {
671         ERR("Unable to set GST_STATE_PAUSED.");
672         goto unref_pipeline;
673      }
674    end = ecore_time_get();
675    DBG("Pause pipeline: %f", end - start);
676
677    start = ecore_time_get();
678    res = gst_element_get_state(playbin, NULL, NULL, GST_CLOCK_TIME_NONE);
679    if (res != GST_STATE_CHANGE_SUCCESS)
680      {
681         /** NOTE: you need to set: GST_DEBUG_DUMP_DOT_DIR=/tmp EMOTION_ENGINE=gstreamer to save the $EMOTION_GSTREAMER_DOT file in '/tmp' */
682         /** then call dot -Tpng -oemotion_pipeline.png /tmp/$TIMESTAMP-$EMOTION_GSTREAMER_DOT.dot */
683         if (getenv("EMOTION_GSTREAMER_DOT")) GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS(GST_BIN(playbin), GST_DEBUG_GRAPH_SHOW_ALL, getenv("EMOTION_GSTREAMER_DOT"));
684
685         ERR("Unable to get GST_CLOCK_TIME_NONE.");
686         goto unref_pipeline;
687      }
688    end = ecore_time_get();
689    DBG("No time: %f", end - start);
690
691    /** NOTE: you need to set: GST_DEBUG_DUMP_DOT_DIR=/tmp EMOTION_ENGINE=gstreamer to save the $EMOTION_GSTREAMER_DOT file in '/tmp' */
692    /** then call dot -Tpng -oemotion_pipeline.png /tmp/$TIMESTAMP-$EMOTION_GSTREAMER_DOT.dot */
693    if (getenv("EMOTION_GSTREAMER_DOT")) GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS(GST_BIN(playbin), GST_DEBUG_GRAPH_SHOW_ALL, getenv("EMOTION_GSTREAMER_DOT"));
694
695    evas_object_data_set(obj, "_emotion_gstreamer_video", ev);
696
697    return playbin;
698
699  unref_pipeline:
700    gst_object_unref(playbin);
701    return NULL;
702 }