1 # Playback tutorial 1: Playbin usage
5 We have already worked with the `playbin` element, which is capable of
6 building a complete playback pipeline without much work on our side.
7 This tutorial shows how to further customize `playbin` in case its
8 default values do not suit our particular needs.
12 - How to find out how many streams a file contains, and how to switch
15 - How to gather information regarding each stream.
19 More often than not, multiple audio, video and subtitle streams can be
20 found embedded in a single file. The most common case are regular
21 movies, which contain one video and one audio stream (Stereo or 5.1
22 audio tracks are considered a single stream). It is also increasingly
23 common to find movies with one video and multiple audio streams, to
24 account for different languages. In this case, the user selects one
25 audio stream, and the application will only play that one, ignoring the
28 To be able to select the appropriate stream, the user needs to know
29 certain information about them, for example, their language. This
30 information is embedded in the streams in the form of “metadata”
31 (annexed data), and this tutorial shows how to retrieve it.
33 Subtitles can also be embedded in a file, along with audio and video,
34 but they are dealt with in more detail in [Playback tutorial 2: Subtitle
35 management]. Finally, multiple video streams can also be found in a
36 single file, for example, in DVD with multiple angles of the same scene,
37 but they are somewhat rare.
39 > ![information] Embedding multiple streams inside a single file is
40 > called “multiplexing” or “muxing”, and such file is then known as a
41 > “container”. Common container formats are Matroska (.mkv), Quicktime
42 > (.qt, .mov, .mp4), Ogg (.ogg) or Webm (.webm).
44 > Retrieving the individual streams from within the container is called
45 > “demultiplexing” or “demuxing”.
47 The following code recovers the amount of streams in the file, their
48 associated metadata, and allows switching the audio stream while the
51 ## The multilingual player
53 Copy this code into a text file named `playback-tutorial-1.c` (or find
54 it in the GStreamer installation).
56 **playback-tutorial-1.c**
61 /* Structure to contain all our information, so we can pass it around */
62 typedef struct _CustomData {
63 GstElement *playbin; /* Our one and only element */
65 gint n_video; /* Number of embedded video streams */
66 gint n_audio; /* Number of embedded audio streams */
67 gint n_text; /* Number of embedded subtitle streams */
69 gint current_video; /* Currently playing video stream */
70 gint current_audio; /* Currently playing audio stream */
71 gint current_text; /* Currently playing subtitle stream */
73 GMainLoop *main_loop; /* GLib's Main Loop */
78 GST_PLAY_FLAG_VIDEO = (1 << 0), /* We want video output */
79 GST_PLAY_FLAG_AUDIO = (1 << 1), /* We want audio output */
80 GST_PLAY_FLAG_TEXT = (1 << 2) /* We want subtitle output */
83 /* Forward definition for the message and keyboard processing functions */
84 static gboolean handle_message (GstBus *bus, GstMessage *msg, CustomData *data);
85 static gboolean handle_keyboard (GIOChannel *source, GIOCondition cond, CustomData *data);
87 int main(int argc, char *argv[]) {
90 GstStateChangeReturn ret;
94 /* Initialize GStreamer */
95 gst_init (&argc, &argv);
97 /* Create the elements */
98 data.playbin = gst_element_factory_make ("playbin", "playbin");
101 g_printerr ("Not all elements could be created.\n");
105 /* Set the URI to play */
106 g_object_set (data.playbin, "uri", "https://www.freedesktop.org/software/gstreamer-sdk/data/media/sintel_cropped_multilingual.webm", NULL);
108 /* Set flags to show Audio and Video but ignore Subtitles */
109 g_object_get (data.playbin, "flags", &flags, NULL);
110 flags |= GST_PLAY_FLAG_VIDEO | GST_PLAY_FLAG_AUDIO;
111 flags &= ~GST_PLAY_FLAG_TEXT;
112 g_object_set (data.playbin, "flags", flags, NULL);
114 /* Set connection speed. This will affect some internal decisions of playbin */
115 g_object_set (data.playbin, "connection-speed", 56, NULL);
117 /* Add a bus watch, so we get notified when a message arrives */
118 bus = gst_element_get_bus (data.playbin);
119 gst_bus_add_watch (bus, (GstBusFunc)handle_message, &data);
121 /* Add a keyboard watch so we get notified of keystrokes */
123 io_stdin = g_io_channel_win32_new_fd (fileno (stdin));
125 io_stdin = g_io_channel_unix_new (fileno (stdin));
127 g_io_add_watch (io_stdin, G_IO_IN, (GIOFunc)handle_keyboard, &data);
130 ret = gst_element_set_state (data.playbin, GST_STATE_PLAYING);
131 if (ret == GST_STATE_CHANGE_FAILURE) {
132 g_printerr ("Unable to set the pipeline to the playing state.\n");
133 gst_object_unref (data.playbin);
137 /* Create a GLib Main Loop and set it to run */
138 data.main_loop = g_main_loop_new (NULL, FALSE);
139 g_main_loop_run (data.main_loop);
142 g_main_loop_unref (data.main_loop);
143 g_io_channel_unref (io_stdin);
144 gst_object_unref (bus);
145 gst_element_set_state (data.playbin, GST_STATE_NULL);
146 gst_object_unref (data.playbin);
150 /* Extract some metadata from the streams and print it on the screen */
151 static void analyze_streams (CustomData *data) {
157 /* Read some properties */
158 g_object_get (data->playbin, "n-video", &data->n_video, NULL);
159 g_object_get (data->playbin, "n-audio", &data->n_audio, NULL);
160 g_object_get (data->playbin, "n-text", &data->n_text, NULL);
162 g_print ("%d video stream(s), %d audio stream(s), %d text stream(s)\n",
163 data->n_video, data->n_audio, data->n_text);
166 for (i = 0; i < data->n_video; i++) {
168 /* Retrieve the stream's video tags */
169 g_signal_emit_by_name (data->playbin, "get-video-tags", i, &tags);
171 g_print ("video stream %d:\n", i);
172 gst_tag_list_get_string (tags, GST_TAG_VIDEO_CODEC, &str);
173 g_print (" codec: %s\n", str ? str : "unknown");
175 gst_tag_list_free (tags);
180 for (i = 0; i < data->n_audio; i++) {
182 /* Retrieve the stream's audio tags */
183 g_signal_emit_by_name (data->playbin, "get-audio-tags", i, &tags);
185 g_print ("audio stream %d:\n", i);
186 if (gst_tag_list_get_string (tags, GST_TAG_AUDIO_CODEC, &str)) {
187 g_print (" codec: %s\n", str);
190 if (gst_tag_list_get_string (tags, GST_TAG_LANGUAGE_CODE, &str)) {
191 g_print (" language: %s\n", str);
194 if (gst_tag_list_get_uint (tags, GST_TAG_BITRATE, &rate)) {
195 g_print (" bitrate: %d\n", rate);
197 gst_tag_list_free (tags);
202 for (i = 0; i < data->n_text; i++) {
204 /* Retrieve the stream's subtitle tags */
205 g_signal_emit_by_name (data->playbin, "get-text-tags", i, &tags);
207 g_print ("subtitle stream %d:\n", i);
208 if (gst_tag_list_get_string (tags, GST_TAG_LANGUAGE_CODE, &str)) {
209 g_print (" language: %s\n", str);
212 gst_tag_list_free (tags);
216 g_object_get (data->playbin, "current-video", &data->current_video, NULL);
217 g_object_get (data->playbin, "current-audio", &data->current_audio, NULL);
218 g_object_get (data->playbin, "current-text", &data->current_text, NULL);
221 g_print ("Currently playing video stream %d, audio stream %d and text stream %d\n",
222 data->current_video, data->current_audio, data->current_text);
223 g_print ("Type any number and hit ENTER to select a different audio stream\n");
226 /* Process messages from GStreamer */
227 static gboolean handle_message (GstBus *bus, GstMessage *msg, CustomData *data) {
231 switch (GST_MESSAGE_TYPE (msg)) {
232 case GST_MESSAGE_ERROR:
233 gst_message_parse_error (msg, &err, &debug_info);
234 g_printerr ("Error received from element %s: %s\n", GST_OBJECT_NAME (msg->src), err->message);
235 g_printerr ("Debugging information: %s\n", debug_info ? debug_info : "none");
236 g_clear_error (&err);
238 g_main_loop_quit (data->main_loop);
240 case GST_MESSAGE_EOS:
241 g_print ("End-Of-Stream reached.\n");
242 g_main_loop_quit (data->main_loop);
244 case GST_MESSAGE_STATE_CHANGED: {
245 GstState old_state, new_state, pending_state;
246 gst_message_parse_state_changed (msg, &old_state, &new_state, &pending_state);
247 if (GST_MESSAGE_SRC (msg) == GST_OBJECT (data->playbin)) {
248 if (new_state == GST_STATE_PLAYING) {
249 /* Once we are in the playing state, analyze the streams */
250 analyze_streams (data);
256 /* We want to keep receiving messages */
260 /* Process keyboard input */
261 static gboolean handle_keyboard (GIOChannel *source, GIOCondition cond, CustomData *data) {
264 if (g_io_channel_read_line (source, &str, NULL, NULL, NULL) == G_IO_STATUS_NORMAL) {
265 int index = g_ascii_strtoull (str, NULL, 0);
266 if (index < 0 || index >= data->n_audio) {
267 g_printerr ("Index out of bounds\n");
269 /* If the input was a valid audio stream index, set the current audio stream */
270 g_print ("Setting current audio stream to %d\n", index);
271 g_object_set (data->playbin, "current-audio", index, NULL);
279 > ![information] If you need help to compile this code, refer to the
280 > **Building the tutorials** section for your platform: [Mac] or
281 > [Windows] or use this specific command on Linux:
283 > `` gcc playback-tutorial-1.c -o playback-tutorial-1 `pkg-config --cflags --libs gstreamer-1.0` ``
285 > If you need help to run this code, refer to the **Running the
286 > tutorials** section for your platform: [Mac OS X], [Windows][1], for
287 > [iOS] or for [android].
289 > This tutorial opens a window and displays a movie, with accompanying
290 > audio. The media is fetched from the Internet, so the window might take
291 > a few seconds to appear, depending on your connection speed. The number
292 > of audio streams is shown in the terminal, and the user can switch from
293 > one to another by entering a number and pressing enter. A small delay is
296 > Bear in mind that there is no latency management (buffering), so on slow
297 > connections, the movie might stop after a few seconds. See how [Tutorial
298 > 12: Live streaming] solves this issue.
300 > Required libraries: `gstreamer-1.0`
305 /* Structure to contain all our information, so we can pass it around */
306 typedef struct _CustomData {
307 GstElement *playbin; /* Our one and only element */
309 gint n_video; /* Number of embedded video streams */
310 gint n_audio; /* Number of embedded audio streams */
311 gint n_text; /* Number of embedded subtitle streams */
313 gint current_video; /* Currently playing video stream */
314 gint current_audio; /* Currently playing audio stream */
315 gint current_text; /* Currently playing subtitle stream */
317 GMainLoop *main_loop; /* GLib's Main Loop */
321 We start, as usual, putting all our variables in a structure, so we can
322 pass it around to functions. For this tutorial, we need the amount of
323 streams of each type, and the currently playing one. Also, we are going
324 to use a different mechanism to wait for messages that allows
325 interactivity, so we need a GLib's main loop object.
330 GST_PLAY_FLAG_VIDEO = (1 << 0), /* We want video output */
331 GST_PLAY_FLAG_AUDIO = (1 << 1), /* We want audio output */
332 GST_PLAY_FLAG_TEXT = (1 << 2) /* We want subtitle output */
336 Later we are going to set some of `playbin`'s flags. We would like to
337 have a handy enum that allows manipulating these flags easily, but since
338 `playbin` is a plug-in and not a part of the GStreamer core, this enum
339 is not available to us. The “trick” is simply to declare this enum in
340 our code, as it appears in the `playbin` documentation: `GstPlayFlags`.
341 GObject allows introspection, so the possible values for these flags can
342 be retrieved at runtime without using this trick, but in a far more
346 /* Forward definition for the message and keyboard processing functions */
347 static gboolean handle_message (GstBus *bus, GstMessage *msg, CustomData *data);
348 static gboolean handle_keyboard (GIOChannel *source, GIOCondition cond, CustomData *data);
351 Forward declarations for the two callbacks we will be using.
352 `handle_message` for the GStreamer messages, as we have already seen,
353 and `handle_keyboard` for key strokes, since this tutorial is
354 introducing a limited amount of interactivity.
356 We skip over the creation of the pipeline, the instantiation of
357 `playbin` and pointing it to our test media through the `uri`
358 property. `playbin` is in itself a pipeline, and in this case it is the
359 only element in the pipeline, so we skip completely the creation of the
360 pipeline, and use directly the `playbin` element.
362 We focus on some of the other properties of `playbin`, though:
365 /* Set flags to show Audio and Video but ignore Subtitles */
366 g_object_get (data.playbin, "flags", &flags, NULL);
367 flags |= GST_PLAY_FLAG_VIDEO | GST_PLAY_FLAG_AUDIO;
368 flags &= ~GST_PLAY_FLAG_TEXT;
369 g_object_set (data.playbin, "flags", flags, NULL);
372 `playbin`'s behavior can be changed through its `flags` property, which
373 can have any combination of `GstPlayFlags`. The most interesting values
376 | Flag | Description |
377 |---------------------------|------------------------------------------------------------------------------------------------------------------------------------|
378 | GST_PLAY_FLAG_VIDEO | Enable video rendering. If this flag is not set, there will be no video output. |
379 | GST_PLAY_FLAG_AUDIO | Enable audio rendering. If this flag is not set, there will be no audio output. |
380 | GST_PLAY_FLAG_TEXT | Enable subtitle rendering. If this flag is not set, subtitles will not be shown in the video output. |
381 | GST_PLAY_FLAG_VIS | Enable rendering of visualisations when there is no video stream. Playback tutorial 6: Audio visualization goes into more details. |
382 | GST_PLAY_FLAG_DOWNLOAD | See Basic tutorial 12: Streaming and Playback tutorial 4: Progressive streaming. |
383 | GST_PLAY_FLAG_BUFFERING | See Basic tutorial 12: Streaming and Playback tutorial 4: Progressive streaming. |
384 | GST_PLAY_FLAG_DEINTERLACE | If the video content was interlaced, this flag instructs playbin to deinterlace it before displaying it. |
386 In our case, for demonstration purposes, we are enabling audio and video
387 and disabling subtitles, leaving the rest of flags to their default
388 values (this is why we read the current value of the flags with
389 `g_object_get()` before overwriting it with `g_object_set()`).
392 /* Set connection speed. This will affect some internal decisions of playbin */
393 g_object_set (data.playbin, "connection-speed", 56, NULL);
396 This property is not really useful in this example.
397 `connection-speed` informs `playbin` of the maximum speed of our network
398 connection, so, in case multiple versions of the requested media are
399 available in the server, `playbin` chooses the most appropriate. This is
400 mostly used in combination with streaming protocols like `mms` or
403 We have set all these properties one by one, but we could have all of
404 them with a single call to `g_object_set()`:
407 g_object_set (data.playbin, "uri", "https://www.freedesktop.org/software/gstreamer-sdk/data/media/sintel_cropped_multilingual.webm", "flags", flags, "connection-speed", 56, NULL);
410 This is why `g_object_set()` requires a NULL as the last parameter.
413 /* Add a keyboard watch so we get notified of keystrokes */
415 io_stdin = g_io_channel_win32_new_fd (fileno (stdin));
417 io_stdin = g_io_channel_unix_new (fileno (stdin));
419 g_io_add_watch (io_stdin, G_IO_IN, (GIOFunc)handle_keyboard, &data);
422 These lines connect a callback function to the standard input (the
423 keyboard). The mechanism shown here is specific to GLib, and not really
424 related to GStreamer, so there is no point in going into much depth.
425 Applications normally have their own way of handling user input, and
426 GStreamer has little to do with it besides the Navigation interface
427 discussed briefly in [Tutorial 17: DVD playback].
430 /* Create a GLib Main Loop and set it to run */
431 data.main_loop = g_main_loop_new (NULL, FALSE);
432 g_main_loop_run (data.main_loop);
435 To allow interactivity, we will no longer poll the GStreamer bus
436 manually. Instead, we create a `GMainLoop`(GLib main loop) and set it
437 running with `g_main_loop_run()`. This function blocks and will not
438 return until `g_main_loop_quit()` is issued. In the meantime, it will
439 call the callbacks we have registered at the appropriate
440 times: `handle_message` when a message appears on the bus, and
441 `handle_keyboard` when the user presses any key.
443 There is nothing new in handle\_message, except that when the pipeline
444 moves to the PLAYING state, it will call the `analyze_streams` function:
447 /* Extract some metadata from the streams and print it on the screen */
448 static void analyze_streams (CustomData *data) {
454 /* Read some properties */
455 g_object_get (data->playbin, "n-video", &data->n_video, NULL);
456 g_object_get (data->playbin, "n-audio", &data->n_audio, NULL);
457 g_object_get (data->playbin, "n-text", &data->n_text, NULL);
460 As the comment says, this function just gathers information from the
461 media and prints it on the screen. The number of video, audio and
462 subtitle streams is directly available through the `n-video`,
463 `n-audio` and `n-text` properties.
466 for (i = 0; i < data->n_video; i++) {
468 /* Retrieve the stream's video tags */
469 g_signal_emit_by_name (data->playbin, "get-video-tags", i, &tags);
471 g_print ("video stream %d:\n", i);
472 gst_tag_list_get_string (tags, GST_TAG_VIDEO_CODEC, &str);
473 g_print (" codec: %s\n", str ? str : "unknown");
475 gst_tag_list_free (tags);
480 Now, for each stream, we want to retrieve its metadata. Metadata is
481 stored as tags in a `GstTagList` structure, which is a list of data
482 pieces identified by a name. The `GstTagList` associated with a stream
483 can be recovered with `g_signal_emit_by_name()`, and then individual
484 tags are extracted with the `gst_tag_list_get_*` functions
485 like `gst_tag_list_get_string()` for example.
488 > This rather unintuitive way of retrieving the tag list
489 > is called an Action Signal. Action signals are emitted by the
490 > application to a specific element, which then performs an action and
491 > returns a result. They behave like a dynamic function call, in which
492 > methods of a class are identified by their name (the signal's name)
493 > instead of their memory address. These signals are listed In the
494 > documentation along with the regular signals, and are tagged “Action”.
495 > See `playbin`, for example.
497 `playbin` defines 3 action signals to retrieve metadata:
498 `get-video-tags`, `get-audio-tags` and `get-text-tags`. The name if the
499 tags is standardized, and the list can be found in the `GstTagList`
500 documentation. In this example we are interested in the
501 `GST_TAG_LANGUAGE_CODE` of the streams and their `GST_TAG_*_CODEC`
502 (audio, video or text).
505 g_object_get (data->playbin, "current-video", &data->current_video, NULL);
506 g_object_get (data->playbin, "current-audio", &data->current_audio, NULL);
507 g_object_get (data->playbin, "current-text", &data->current_text, NULL);
510 Once we have extracted all the metadata we want, we get the streams that
511 are currently selected through 3 more properties of `playbin`:
512 `current-video`, `current-audio` and `current-text`.
514 It is interesting to always check the currently selected streams and
515 never make any assumption. Multiple internal conditions can make
516 `playbin` behave differently in different executions. Also, the order in
517 which the streams are listed can change from one run to another, so
518 checking the metadata to identify one particular stream becomes crucial.
521 /* Process keyboard input */
522 static gboolean handle_keyboard (GIOChannel *source, GIOCondition cond, CustomData *data) {
525 if (g_io_channel_read_line (source, &str, NULL, NULL, NULL) == G_IO_STATUS_NORMAL) {
526 int index = g_ascii_strtoull (str, NULL, 0);
527 if (index < 0 || index >= data->n_audio) {
528 g_printerr ("Index out of bounds\n");
530 /* If the input was a valid audio stream index, set the current audio stream */
531 g_print ("Setting current audio stream to %d\n", index);
532 g_object_set (data->playbin, "current-audio", index, NULL);
540 Finally, we allow the user to switch the running audio stream. This very
541 basic function just reads a string from the standard input (the
542 keyboard), interprets it as a number, and tries to set the
543 `current-audio` property of `playbin` (which previously we have only
546 Bear in mind that the switch is not immediate. Some of the previously
547 decoded audio will still be flowing through the pipeline, while the new
548 stream becomes active and is decoded. The delay depends on the
549 particular multiplexing of the streams in the container, and the length
550 `playbin` has selected for its internal queues (which depends on the
553 If you execute the tutorial, you will be able to switch from one
554 language to another while the movie is running by pressing 0, 1 or 2
555 (and ENTER). This concludes this tutorial.
559 This tutorial has shown:
561 - A few more of `playbin`'s properties: `flags`, `connection-speed`,
562 `n-video`, `n-audio`, `n-text`, `current-video`, `current-audio` and
565 - How to retrieve the list of tags associated with a stream
566 with `g_signal_emit_by_name()`.
568 - How to retrieve a particular tag from the list with
569 `gst_tag_list_get_string()`or `gst_tag_list_get_uint()`
571 - How to switch the current audio simply by writing to the
572 `current-audio` property.
574 The next playback tutorial shows how to handle subtitles, either
575 embedded in the container or in an external file.
577 Remember that attached to this page you should find the complete source
578 code of the tutorial and any accessory files needed to build it.
580 It has been a pleasure having you here, and see you soon!
582 [Playback tutorial 2: Subtitle management]: tutorials/playback/subtitle-management.md
583 [information]: images/icons/emoticons/information.png
584 [Mac]: installing/on-mac-osx.md
585 [Windows]: installing/on-windows.md
586 [Mac OS X]: installing/on-mac-osx.md#building-the-tutorials
587 [1]: installing/on-windows.md#running-the-tutorials
588 [iOS]: installing/for-ios-development.md#building-the-tutorials
589 [android]: installing/for-android-development.md#building-the-tutorials