e8f5bbf9d70a6e36683fa6e1460a3f3c571944d0
[profile/ivi/emotion.git] / src / modules / gstreamer / emotion_gstreamer_pipeline.c
1 /*
2  * vim:ts=8:sw=3:sts=8:noexpandtab:cino=>5n-3f0^-2{2
3  */
4 #include <unistd.h>
5 #include <fcntl.h>
6
7 #include "emotion_private.h"
8 #include "emotion_gstreamer.h"
9 #include "emotion_gstreamer_pipeline.h"
10
11
12 gboolean
13 emotion_pipeline_pause(GstElement *pipeline)
14 {
15    GstStateChangeReturn res;
16
17    res = gst_element_set_state((pipeline), GST_STATE_PAUSED);
18    if (res == GST_STATE_CHANGE_FAILURE)
19      {
20         g_print("Emotion-Gstreamer ERROR: could not pause\n");
21         return 0;
22      }
23
24    res = gst_element_get_state((pipeline), NULL, NULL, GST_CLOCK_TIME_NONE);
25    if (res != GST_STATE_CHANGE_SUCCESS)
26      {
27         g_print("Emotion-Gstreamer ERROR: could not complete pause\n");
28         return 0;
29      }
30
31    return 1;
32 }
33
34 /* Send the video frame to the evas object */
35 void
36 cb_handoff(GstElement *fakesrc,
37            GstBuffer  *buffer,
38            GstPad     *pad,
39            gpointer    user_data)
40 {
41    GstQuery *query;
42    void *buf[2];
43
44    Emotion_Gstreamer_Video *ev = (Emotion_Gstreamer_Video *)user_data;
45    if (!ev)
46      return;
47
48    if (!ev->video_mute)
49      {
50         if (!ev->obj_data)
51           ev->obj_data = malloc(GST_BUFFER_SIZE(buffer) * sizeof(void));
52
53         memcpy(ev->obj_data, GST_BUFFER_DATA(buffer), GST_BUFFER_SIZE(buffer));
54         buf[0] = GST_BUFFER_DATA(buffer);
55         buf[1] = buffer;
56         ecore_pipe_write(ev->pipe, buf, sizeof(buf));
57      }
58    else
59      {
60         Emotion_Audio_Sink *asink;
61         asink = (Emotion_Audio_Sink *)eina_list_nth(ev->audio_sinks, ev->audio_sink_nbr);
62         _emotion_video_pos_update(ev->obj, ev->position, asink->length_time);
63      }
64
65    query = gst_query_new_position(GST_FORMAT_TIME);
66    if (gst_pad_query(gst_pad_get_peer(pad), query))
67      {
68         gint64 position;
69
70         gst_query_parse_position(query, NULL, &position);
71         ev->position = (double)position / (double)GST_SECOND;
72      }
73    gst_query_unref(query);
74 }
75
76 void
77 file_new_decoded_pad_cb(GstElement *decodebin,
78                         GstPad     *new_pad,
79                         gboolean    last,
80                         gpointer    user_data)
81 {
82    Emotion_Gstreamer_Video *ev;
83    GstCaps *caps;
84    gchar   *str;
85    unsigned int index;
86
87    ev = (Emotion_Gstreamer_Video *)user_data;
88    caps = gst_pad_get_caps(new_pad);
89    str = gst_caps_to_string(caps);
90    /* video stream */
91    if (g_str_has_prefix(str, "video/"))
92      {
93         Emotion_Video_Sink *vsink;
94         GstElement         *queue;
95         GstPad             *videopad;
96
97         vsink = (Emotion_Video_Sink *)calloc(1, sizeof(Emotion_Video_Sink));
98         if (!vsink) return;
99         ev->video_sinks = eina_list_append(ev->video_sinks, vsink);
100         if (!eina_list_data_find(ev->video_sinks, vsink))
101           {
102              free(vsink);
103              return;
104           }
105
106         queue = gst_element_factory_make("queue", NULL);
107         vsink->sink = gst_element_factory_make("fakesink", "videosink");
108         gst_bin_add_many(GST_BIN(ev->pipeline), queue, vsink->sink, NULL);
109         gst_element_link(queue, vsink->sink);
110         videopad = gst_element_get_pad(queue, "sink");
111         gst_pad_link(new_pad, videopad);
112         gst_object_unref(videopad);
113         if (eina_list_count(ev->video_sinks) == 1)
114           {
115              ev->ratio = (double)vsink->width / (double)vsink->height;
116           }
117         gst_element_set_state(queue, GST_STATE_PAUSED);
118         gst_element_set_state(vsink->sink, GST_STATE_PAUSED);
119      }
120    /* audio stream */
121    else if (g_str_has_prefix(str, "audio/"))
122      {
123         Emotion_Audio_Sink *asink;
124         GstPad             *audiopad;
125
126         asink = (Emotion_Audio_Sink *)calloc(1, sizeof(Emotion_Audio_Sink));
127         if (!asink) return;
128         ev->audio_sinks = eina_list_append(ev->audio_sinks, asink);
129         if (!eina_list_data_find(ev->audio_sinks, asink))
130           {
131              free(asink);
132              return;
133           }
134
135         for (index = 0; asink != eina_list_nth(ev->audio_sinks, index); index++)
136           ;
137         asink->sink = emotion_audio_sink_create(ev, index);
138         gst_bin_add(GST_BIN(ev->pipeline), asink->sink);
139         audiopad = gst_element_get_pad(asink->sink, "sink");
140         gst_pad_link(new_pad, audiopad);
141         gst_element_set_state(asink->sink, GST_STATE_PAUSED);
142      }
143
144    free(str);
145 }
146
147 Emotion_Video_Sink *
148 emotion_video_sink_new(Emotion_Gstreamer_Video *ev)
149 {
150    Emotion_Video_Sink *vsink;
151
152    if (!ev) return NULL;
153
154    vsink = (Emotion_Video_Sink *)calloc(1, sizeof(Emotion_Video_Sink));
155    if (!vsink) return NULL;
156
157    ev->video_sinks = eina_list_append(ev->video_sinks, vsink);
158    if (!eina_list_data_find(ev->video_sinks, vsink))
159      {
160         free(vsink);
161         return NULL;
162      }
163    return vsink;
164 }
165
166 void
167 emotion_video_sink_free(Emotion_Gstreamer_Video *ev, Emotion_Video_Sink *vsink)
168 {
169    if (!ev || !vsink) return;
170
171    ev->video_sinks = eina_list_remove(ev->video_sinks, vsink);
172         free(vsink);
173 }
174
175 Emotion_Video_Sink *
176 emotion_visualization_sink_create(Emotion_Gstreamer_Video *ev, Emotion_Audio_Sink *asink)
177 {
178    Emotion_Video_Sink *vsink;
179
180    if (!ev) return NULL;
181
182    vsink = emotion_video_sink_new(ev);
183    if (!vsink) return NULL;
184
185    vsink->sink = gst_bin_get_by_name(GST_BIN(asink->sink), "vissink1");
186    if (!vsink->sink)
187      {
188         emotion_video_sink_free(ev, vsink);
189         return NULL;
190      }
191    vsink->width = 320;
192    vsink->height = 200;
193    ev->ratio = (double)vsink->width / (double)vsink->height;
194    vsink->fps_num = 25;
195    vsink->fps_den = 1;
196    vsink->fourcc = GST_MAKE_FOURCC('A', 'R', 'G', 'B');
197    vsink->length_time = asink->length_time;
198
199    g_object_set(G_OBJECT(vsink->sink), "sync", TRUE, NULL);
200    g_object_set(G_OBJECT(vsink->sink), "signal-handoffs", TRUE, NULL);
201    g_signal_connect(G_OBJECT(vsink->sink),
202                     "handoff",
203                     G_CALLBACK(cb_handoff), ev);
204    return vsink;
205 }
206
207 int
208 emotion_pipeline_cdda_track_count_get(void *video)
209 {
210    Emotion_Gstreamer_Video *ev;
211    GstBus                  *bus;
212    guint                    tracks_count = 0;
213    gboolean                 done;
214
215    ev = (Emotion_Gstreamer_Video *)video;
216    if (!ev) return tracks_count;
217
218    done = FALSE;
219    bus = gst_element_get_bus(ev->pipeline);
220    if (!bus) return tracks_count;
221
222    while (!done)
223      {
224         GstMessage *message;
225
226         message = gst_bus_pop(bus);
227         if (message == NULL)
228           /* All messages read, we're done */
229           break;
230
231         switch (GST_MESSAGE_TYPE(message))
232           {
233            case GST_MESSAGE_TAG:
234                 {
235                    GstTagList *tags;
236
237                    gst_message_parse_tag(message, &tags);
238
239                    gst_tag_list_get_uint(tags, GST_TAG_TRACK_COUNT, &tracks_count);
240                    if (tracks_count) done = TRUE;
241                    break;
242                 }
243            case GST_MESSAGE_ERROR:
244            default:
245               break;
246           }
247         gst_message_unref(message);
248      }
249
250    gst_object_unref(GST_OBJECT(bus));
251
252    return tracks_count;
253 }
254
255 const char *
256 emotion_visualization_element_name_get(Emotion_Vis visualisation)
257 {
258    switch (visualisation)
259      {
260       case EMOTION_VIS_NONE:
261          return NULL;
262       case EMOTION_VIS_GOOM:
263          return "goom";
264       case EMOTION_VIS_LIBVISUAL_BUMPSCOPE:
265          return "libvisual_bumpscope";
266       case EMOTION_VIS_LIBVISUAL_CORONA:
267          return "libvisual_corona";
268       case EMOTION_VIS_LIBVISUAL_DANCING_PARTICLES:
269          return "libvisual_dancingparticles";
270       case EMOTION_VIS_LIBVISUAL_GDKPIXBUF:
271          return "libvisual_gdkpixbuf";
272       case EMOTION_VIS_LIBVISUAL_G_FORCE:
273          return "libvisual_G-Force";
274       case EMOTION_VIS_LIBVISUAL_GOOM:
275          return "libvisual_goom";
276       case EMOTION_VIS_LIBVISUAL_INFINITE:
277          return "libvisual_infinite";
278       case EMOTION_VIS_LIBVISUAL_JAKDAW:
279          return "libvisual_jakdaw";
280       case EMOTION_VIS_LIBVISUAL_JESS:
281          return "libvisual_jess";
282       case EMOTION_VIS_LIBVISUAL_LV_ANALYSER:
283          return "libvisual_lv_analyzer";
284       case EMOTION_VIS_LIBVISUAL_LV_FLOWER:
285          return "libvisual_lv_flower";
286       case EMOTION_VIS_LIBVISUAL_LV_GLTEST:
287          return "libvisual_lv_gltest";
288       case EMOTION_VIS_LIBVISUAL_LV_SCOPE:
289          return "libvisual_lv_scope";
290       case EMOTION_VIS_LIBVISUAL_MADSPIN:
291          return "libvisual_madspin";
292       case EMOTION_VIS_LIBVISUAL_NEBULUS:
293          return "libvisual_nebulus";
294       case EMOTION_VIS_LIBVISUAL_OINKSIE:
295          return "libvisual_oinksie";
296       case EMOTION_VIS_LIBVISUAL_PLASMA:
297          return "libvisual_plazma";
298       default:
299          return "goom";
300      }
301 }
302
303 static GstElement *
304 emotion_visualization_bin_create(Emotion_Gstreamer_Video *ev, int index)
305 {
306    const char *vis_name;
307    char buf[64];
308    GstElement *vis, *visbin, *queue, *conv, *cspace, *sink;
309    GstPad *vispad;
310    GstCaps *caps;
311
312    if (ev->vis == EMOTION_VIS_NONE)
313      return NULL;
314
315    vis_name = emotion_visualization_element_name_get(ev->vis);
316    if (!vis_name)
317      return NULL;
318
319    g_snprintf(buf, sizeof(buf), "vis%d", index);
320    vis = gst_element_factory_make(vis_name, buf);
321    if (!vis)
322      return NULL;
323
324    g_snprintf(buf, sizeof(buf), "visbin%d", index);
325    visbin = gst_bin_new(buf);
326
327    queue = gst_element_factory_make("queue", NULL);
328    conv = gst_element_factory_make("audioconvert", NULL);
329    cspace = gst_element_factory_make("ffmpegcolorspace", NULL);
330    g_snprintf(buf, sizeof(buf), "vissink%d", index);
331    sink = gst_element_factory_make("fakesink", buf);
332
333    if ((!visbin) || (!queue) || (!conv) || (!cspace) || (!sink))
334      goto error;
335
336    gst_bin_add_many(GST_BIN(visbin), queue, conv, vis, cspace, sink, NULL);
337    gst_element_link_many(queue, conv, vis, cspace, NULL);
338    caps = gst_caps_new_simple("video/x-raw-rgb",
339                               "bpp", G_TYPE_INT, 32,
340                               "width", G_TYPE_INT, 320,
341                               "height", G_TYPE_INT, 200,
342                               NULL);
343    gst_element_link_filtered(cspace, sink, caps);
344
345    vispad = gst_element_get_pad(queue, "sink");
346    gst_element_add_pad(visbin, gst_ghost_pad_new("sink", vispad));
347    gst_object_unref(vispad);
348
349    return visbin;
350
351  error:
352    if (vis)
353      gst_object_unref(vis);
354    if (visbin)
355      gst_object_unref(visbin);
356    if (queue)
357      gst_object_unref(queue);
358    if (conv)
359      gst_object_unref(conv);
360    if (cspace)
361      gst_object_unref(cspace);
362    if (sink)
363      gst_object_unref(sink);
364
365    return NULL;
366 }
367
368 static GstElement *
369 emotion_audio_bin_create(Emotion_Gstreamer_Video *ev, int index)
370 {
371    GstElement *audiobin, *queue, *conv, *resample, *volume, *sink;
372    GstPad *audiopad;
373    double vol;
374
375    audiobin = gst_bin_new(NULL);
376    queue = gst_element_factory_make("queue", NULL);
377    conv = gst_element_factory_make("audioconvert", NULL);
378    resample = gst_element_factory_make("audioresample", NULL);
379    volume = gst_element_factory_make("volume", "volume");
380
381    if (index == 1)
382      sink = gst_element_factory_make("autoaudiosink", NULL);
383    else
384      /* XXX hack: use a proper mixer element here */
385      sink = gst_element_factory_make("fakesink", NULL);
386
387    if ((!audiobin) || (!queue) || (!conv) || (!resample) || (!volume) || (!sink))
388      goto error;
389
390    g_object_get(volume, "volume", &vol, NULL);
391    ev->volume = vol;
392
393    gst_bin_add_many(GST_BIN(audiobin),
394                     queue, conv, resample, volume, sink, NULL);
395    gst_element_link_many(queue, conv, resample, volume, sink, NULL);
396
397    audiopad = gst_element_get_pad(queue, "sink");
398    gst_element_add_pad(audiobin, gst_ghost_pad_new("sink", audiopad));
399    gst_object_unref(audiopad);
400
401    return audiobin;
402
403  error:
404    if (audiobin)
405      gst_object_unref(audiobin);
406    if (queue)
407      gst_object_unref(queue);
408    if (conv)
409      gst_object_unref(conv);
410    if (resample)
411      gst_object_unref(resample);
412    if (volume)
413      gst_object_unref(volume);
414    if (sink)
415      gst_object_unref(sink);
416
417    return NULL;
418 }
419
420
421 GstElement *
422 emotion_audio_sink_create(Emotion_Gstreamer_Video *ev, int index)
423 {
424    gchar       buf[128];
425    GstElement *bin;
426    GstElement *audiobin;
427    GstElement *visbin = NULL;
428    GstElement *tee;
429    GstPad     *teepad;
430    GstPad     *binpad;
431
432    audiobin = emotion_audio_bin_create(ev, index);
433    if (!audiobin)
434      return NULL;
435
436    bin = gst_bin_new(NULL);
437    if (!bin)
438      {
439         gst_object_unref(audiobin);
440         return NULL;
441      }
442
443    g_snprintf(buf, 128, "tee%d", index);
444    tee = gst_element_factory_make("tee", buf);
445
446    visbin = emotion_visualization_bin_create(ev, index);
447
448    gst_bin_add_many(GST_BIN(bin), tee, audiobin, visbin, NULL);
449
450    binpad = gst_element_get_pad(audiobin, "sink");
451    teepad = gst_element_get_request_pad(tee, "src%d");
452    gst_pad_link(teepad, binpad);
453    gst_object_unref(teepad);
454    gst_object_unref(binpad);
455
456    if (visbin)
457      {
458         binpad = gst_element_get_pad(visbin, "sink");
459         teepad = gst_element_get_request_pad(tee, "src%d");
460         gst_pad_link(teepad, binpad);
461         gst_object_unref(teepad);
462         gst_object_unref(binpad);
463      }
464
465    teepad = gst_element_get_pad(tee, "sink");
466    gst_element_add_pad(bin, gst_ghost_pad_new("sink", teepad));
467    gst_object_unref(teepad);
468
469    return bin;
470 }
471
472 void
473 emotion_streams_sinks_get(Emotion_Gstreamer_Video *ev, GstElement *decoder)
474 {
475    GstIterator *it;
476    Eina_List   *alist;
477    Eina_List   *vlist;
478    gpointer     data;
479
480    alist = ev->audio_sinks;
481    vlist = ev->video_sinks;
482
483    it = gst_element_iterate_src_pads(decoder);
484    while (gst_iterator_next(it, &data) == GST_ITERATOR_OK)
485      {
486         GstPad  *pad;
487         GstCaps *caps;
488         gchar   *str;
489
490         pad = GST_PAD(data);
491
492         caps = gst_pad_get_caps(pad);
493         str = gst_caps_to_string(caps);
494         g_print("caps !! %s\n", str);
495
496         /* video stream */
497         if (g_str_has_prefix(str, "video/"))
498           {
499              Emotion_Video_Sink *vsink;
500
501              vsink = (Emotion_Video_Sink *)eina_list_data_get(vlist);
502              vlist = eina_list_next(vlist);
503
504              emotion_video_sink_fill(vsink, pad, caps);
505           }
506         /* audio stream */
507         else if (g_str_has_prefix(str, "audio/"))
508           {
509              Emotion_Audio_Sink *asink;
510              unsigned int index;
511
512              asink = (Emotion_Audio_Sink *)eina_list_data_get(alist);
513              alist = eina_list_next(alist);
514
515              emotion_audio_sink_fill(asink, pad, caps);
516
517              for (index = 0; asink != eina_list_nth(ev->audio_sinks, index) ; index++)
518                ;
519
520              if (eina_list_count(ev->video_sinks) == 0)
521                {
522                   if (index == 1)
523                     {
524                        Emotion_Video_Sink *vsink;
525
526                        vsink = emotion_visualization_sink_create(ev, asink);
527                        if (!vsink) goto finalize;
528                     }
529                }
530              else
531                {
532                   gchar       buf[128];
533                   GstElement *visbin;
534
535                   g_snprintf(buf, 128, "visbin%d", index);
536                   visbin = gst_bin_get_by_name(GST_BIN(ev->pipeline), buf);
537                   if (visbin)
538                     {
539                        GstPad *srcpad;
540                        GstPad *sinkpad;
541
542                        sinkpad = gst_element_get_pad(visbin, "sink");
543                        srcpad = gst_pad_get_peer(sinkpad);
544                        gst_pad_unlink(srcpad, sinkpad);
545
546                        gst_object_unref(srcpad);
547                        gst_object_unref(sinkpad);
548                     }
549                }
550           }
551 finalize:
552         gst_caps_unref(caps);
553         g_free(str);
554         gst_object_unref(pad);
555      }
556    gst_iterator_free(it);
557 }
558
559 void
560 emotion_video_sink_fill(Emotion_Video_Sink *vsink, GstPad *pad, GstCaps *caps)
561 {
562    GstStructure *structure;
563    GstQuery     *query;
564    const GValue *val;
565    gchar        *str;
566
567    structure = gst_caps_get_structure(caps, 0);
568    str = gst_caps_to_string(caps);
569
570    gst_structure_get_int(structure, "width", &vsink->width);
571    gst_structure_get_int(structure, "height", &vsink->height);
572
573    vsink->fps_num = 1;
574    vsink->fps_den = 1;
575    val = gst_structure_get_value(structure, "framerate");
576    if (val)
577      {
578         vsink->fps_num = gst_value_get_fraction_numerator(val);
579         vsink->fps_den = gst_value_get_fraction_denominator(val);
580      }
581    if (g_str_has_prefix(str, "video/x-raw-yuv"))
582      {
583         val = gst_structure_get_value(structure, "format");
584         vsink->fourcc = gst_value_get_fourcc(val);
585      }
586    else if (g_str_has_prefix(str, "video/x-raw-rgb"))
587      vsink->fourcc = GST_MAKE_FOURCC('A', 'R', 'G', 'B');
588    else
589      vsink->fourcc = 0;
590
591    query = gst_query_new_duration(GST_FORMAT_TIME);
592    if (gst_pad_query(pad, query))
593      {
594         gint64 time;
595
596         gst_query_parse_duration(query, NULL, &time);
597         vsink->length_time = (double)time / (double)GST_SECOND;
598      }
599    g_free(str);
600    gst_query_unref(query);
601 }
602
603 void
604 emotion_audio_sink_fill(Emotion_Audio_Sink *asink, GstPad *pad, GstCaps *caps)
605 {
606    GstStructure *structure;
607    GstQuery     *query;
608
609    structure = gst_caps_get_structure(caps, 0);
610
611    gst_structure_get_int(structure, "channels", &asink->channels);
612    gst_structure_get_int(structure, "rate", &asink->samplerate);
613
614    query = gst_query_new_duration(GST_FORMAT_TIME);
615    if (gst_pad_query(pad, query))
616      {
617         gint64 time;
618
619         gst_query_parse_duration(query, NULL, &time);
620         asink->length_time = (double)time / (double)GST_SECOND;
621      }
622    gst_query_unref(query);
623 }