2 title: Playback Components
7 GStreamer includes several higher-level components to simplify an
8 application developer's life. All of the components discussed here (for
9 now) are targetted at media playback. The idea of each of these
10 components is to integrate as closely as possible with a GStreamer
11 pipeline, but to hide the complexity of media type detection and several
12 other rather complex topics that have been discussed in [Advanced
13 GStreamer concepts](manual/advanced/index.md).
15 We currently recommend people to use either playbin (see
16 [Playbin](#playbin)) or decodebin (see [Decodebin](#decodebin)),
17 depending on their needs. Playbin is the recommended solution for
18 everything related to simple playback of media that should just work.
19 Decodebin is a more flexible autoplugger that could be used to add more
20 advanced features, such as playlist support, crossfading of audio tracks
21 and so on. Its programming interface is more low-level than that of
26 Playbin is an element that can be created using the standard GStreamer
27 API (e.g. `gst_element_factory_make ()`). The factory is conveniently
28 called “playbin”. By being a `GstPipeline` (and thus a `GstElement`),
29 playbin automatically supports all of the features of this class,
30 including error handling, tag support, state handling, getting stream
31 positions, seeking, and so on.
33 Setting up a playbin pipeline is as simple as creating an instance of
34 the playbin element, setting a file location using the “uri” property on
35 playbin, and then setting the element to the `GST_STATE_PLAYING` state
36 (the location has to be a valid URI, so “\<protocol\>://\<location\>”,
37 e.g. file:///tmp/my.ogg or http://www.example.org/stream.ogg).
38 Internally, playbin will set up a pipeline to playback the media
44 [.. my_bus_callback goes here ..]
55 gst_init (&argc, &argv);
56 loop = g_main_loop_new (NULL, FALSE);
58 /* make sure we have a URI */
60 g_print ("Usage: %s <URI>\n", argv[0]);
65 play = gst_element_factory_make ("playbin", "play");
66 g_object_set (G_OBJECT (play), "uri", argv[1], NULL);
68 bus = gst_pipeline_get_bus (GST_PIPELINE (play));
69 gst_bus_add_watch (bus, my_bus_callback, loop);
70 gst_object_unref (bus);
72 gst_element_set_state (play, GST_STATE_PLAYING);
75 g_main_loop_run (loop);
78 gst_element_set_state (play, GST_STATE_NULL);
79 gst_object_unref (GST_OBJECT (play));
86 Playbin has several features that have been discussed previously:
88 - Settable video and audio output (using the “video-sink” and
89 “audio-sink” properties).
91 - Mostly controllable and trackable as a `GstElement`, including error
92 handling, eos handling, tag handling, state handling (through the
93 `GstBus`), media position handling and seeking.
95 - Buffers network-sources, with buffer fullness notifications being
96 passed through the `GstBus`.
98 - Supports visualizations for audio-only media.
100 - Supports subtitles, both in the media as well as from separate
101 files. For separate subtitle files, use the “suburi” property.
103 - Supports stream selection and disabling. If your media has multiple
104 audio or subtitle tracks, you can dynamically choose which one to
105 play back, or decide to turn it off altogether (which is especially
106 useful to turn off subtitles). For each of those, use the
107 “current-text” and other related properties.
109 For convenience, it is possible to test “playbin” on the commandline,
110 using the command “gst-launch-1.0 playbin uri=file:///path/to/file”.
114 Decodebin is the actual autoplugger backend of playbin, which was
115 discussed in the previous section. Decodebin will, in short, accept
116 input from a source that is linked to its sinkpad and will try to detect
117 the media type contained in the stream, and set up decoder routines for
118 each of those. It will automatically select decoders. For each decoded
119 stream, it will emit the “pad-added” signal, to let the client know
120 about the newly found decoded stream. For unknown streams (which might
121 be the whole stream), it will emit the “unknown-type” signal. The
122 application is then responsible for reporting the error to the user.
129 [.. my_bus_callback goes here ..]
133 GstElement *pipeline, *audio;
136 cb_newpad (GstElement *decodebin,
145 audiopad = gst_element_get_static_pad (audio, "sink");
146 if (GST_PAD_IS_LINKED (audiopad)) {
147 g_object_unref (audiopad);
151 /* check media type */
152 caps = gst_pad_query_caps (pad, NULL);
153 str = gst_caps_get_structure (caps, 0);
154 if (!g_strrstr (gst_structure_get_name (str), "audio")) {
155 gst_caps_unref (caps);
156 gst_object_unref (audiopad);
159 gst_caps_unref (caps);
162 gst_pad_link (pad, audiopad);
164 g_object_unref (audiopad);
172 GstElement *src, *dec, *conv, *sink;
177 gst_init (&argc, &argv);
178 loop = g_main_loop_new (NULL, FALSE);
180 /* make sure we have input */
182 g_print ("Usage: %s <filename>\n", argv[0]);
187 pipeline = gst_pipeline_new ("pipeline");
189 bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
190 gst_bus_add_watch (bus, my_bus_callback, loop);
191 gst_object_unref (bus);
193 src = gst_element_factory_make ("filesrc", "source");
194 g_object_set (G_OBJECT (src), "location", argv[1], NULL);
195 dec = gst_element_factory_make ("decodebin", "decoder");
196 g_signal_connect (dec, "pad-added", G_CALLBACK (cb_newpad), NULL);
197 gst_bin_add_many (GST_BIN (pipeline), src, dec, NULL);
198 gst_element_link (src, dec);
200 /* create audio output */
201 audio = gst_bin_new ("audiobin");
202 conv = gst_element_factory_make ("audioconvert", "aconv");
203 audiopad = gst_element_get_static_pad (conv, "sink");
204 sink = gst_element_factory_make ("alsasink", "sink");
205 gst_bin_add_many (GST_BIN (audio), conv, sink, NULL);
206 gst_element_link (conv, sink);
207 gst_element_add_pad (audio,
208 gst_ghost_pad_new ("sink", audiopad));
209 gst_object_unref (audiopad);
210 gst_bin_add (GST_BIN (pipeline), audio);
213 gst_element_set_state (pipeline, GST_STATE_PLAYING);
214 g_main_loop_run (loop);
217 gst_element_set_state (pipeline, GST_STATE_NULL);
218 gst_object_unref (GST_OBJECT (pipeline));
226 Decodebin, similar to playbin, supports the following features:
228 - Can decode an unlimited number of contained streams to decoded
231 - Is handled as a `GstElement` in all ways, including tag or error
232 forwarding and state handling.
234 Although decodebin is a good autoplugger, there's a whole lot of things
235 that it does not do and is not intended to do:
237 - Taking care of input streams with a known media type (e.g. a DVD, an
240 - Selection of streams (e.g. which audio track to play in case of
241 multi-language media streams).
243 - Overlaying subtitles over a decoded video stream.
245 Decodebin can be easily tested on the commandline, e.g. by using the
246 command `gst-launch-1.0 filesrc location=file.ogg ! decodebin
247 ! audioconvert ! audioresample ! autoaudiosink`.
251 The uridecodebin element is very similar to decodebin, only that it
252 automatically plugs a source plugin based on the protocol of the URI
255 Uridecodebin will also automatically insert buffering elements when the
256 uri is a slow network source. The buffering element will post BUFFERING
257 messages that the application needs to handle as explained in
258 [Buffering](manual/advanced/buffering.md). The following properties can be used
259 to configure the buffering method:
261 - The buffer-size property allows you to configure a maximum size in
262 bytes for the buffer element.
264 - The buffer-duration property allows you to configure a maximum size
265 in time for the buffer element. The time will be estimated based on
266 the bitrate of the network.
268 - With the download property you can enable the download buffering
269 method as described in [Download
270 buffering](manual/advanced/buffering.md#download-buffering). Setting this
271 option to TRUE will only enable download buffering for selected
272 formats such as quicktime, flash video, avi and webm.
274 - You can also enable buffering on the parsed/demuxed data with the
275 use-buffering property. This is interesting to enable buffering on
276 slower random access media such as a network file server.
278 URIDecodebin can be easily tested on the commandline, e.g. by using the
279 command `gst-launch-1.0 uridecodebin uri=file:///file.ogg !
280 ! audioconvert ! audioresample ! autoaudiosink`.
284 The playsink element is a powerful sink element. It has request pads for
285 raw decoded audio, video and text and it will configure itself to play
286 the media streams. It has the following features:
288 - It exposes GstStreamVolume, GstVideoOverlay, GstNavigation and
289 GstColorBalance interfaces and automatically plugs software elements
290 to implement the interfaces when needed.
292 - It will automatically plug conversion elements.
294 - Can optionally render visualizations when there is no video input.
296 - Configurable sink elements.
298 - Configurable audio/video sync offset to fine-tune synchronization in
301 - Support for taking a snapshot of the last video frame.
303 Below is an example of how you can use playsink. We use a uridecodebin
304 element to decode into raw audio and video streams which we then link to
305 the playsink request pads. We only link the first audio and video pads,
306 you could use an input-selector to link all pads.
314 [.. my_bus_callback goes here ..]
320 GstElement *pipeline, *sink;
323 cb_pad_added (GstElement *dec,
330 GstPadTemplate *templ;
331 GstElementClass *klass;
333 /* check media type */
334 caps = gst_pad_query_caps (pad, NULL);
335 str = gst_caps_get_structure (caps, 0);
336 name = gst_structure_get_name (str);
338 klass = GST_ELEMENT_GET_CLASS (sink);
340 if (g_str_has_prefix (name, "audio")) {
341 templ = gst_element_class_get_pad_template (klass, "audio_sink");
342 } else if (g_str_has_prefix (name, "video")) {
343 templ = gst_element_class_get_pad_template (klass, "video_sink");
344 } else if (g_str_has_prefix (name, "text")) {
345 templ = gst_element_class_get_pad_template (klass, "text_sink");
353 sinkpad = gst_element_request_pad (sink, templ, NULL, NULL);
355 if (!gst_pad_is_linked (sinkpad))
356 gst_pad_link (pad, sinkpad);
358 gst_object_unref (sinkpad);
371 gst_init (&argc, &argv);
372 loop = g_main_loop_new (NULL, FALSE);
374 /* make sure we have input */
376 g_print ("Usage: %s <uri>\n", argv[0]);
381 pipeline = gst_pipeline_new ("pipeline");
383 bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
384 gst_bus_add_watch (bus, my_bus_callback, loop);
385 gst_object_unref (bus);
387 dec = gst_element_factory_make ("uridecodebin", "source");
388 g_object_set (G_OBJECT (dec), "uri", argv[1], NULL);
389 g_signal_connect (dec, "pad-added", G_CALLBACK (cb_pad_added), NULL);
391 /* create audio output */
392 sink = gst_element_factory_make ("playsink", "sink");
393 gst_util_set_object_arg (G_OBJECT (sink), "flags",
394 "soft-colorbalance+soft-volume+vis+text+audio+video");
395 gst_bin_add_many (GST_BIN (pipeline), dec, sink, NULL);
398 gst_element_set_state (pipeline, GST_STATE_PLAYING);
399 g_main_loop_run (loop);
402 gst_element_set_state (pipeline, GST_STATE_NULL);
403 gst_object_unref (GST_OBJECT (pipeline));
412 This example will show audio and video depending on what you give it.
413 Try this example on an audio file and you will see that it shows
414 visualizations. You can change the visualization at runtime by changing
415 the vis-plugin property.