responsible for reporting the error to the user.
</para>
<programlisting><!-- example-begin decodebin.c a -->
-#include <gst/gst.h>
+<![CDATA[
+#include <gst/gst.h>
+]]>
<!-- example-end decodebin.c a -->
-[.. my_bus_callback goes here ..]<!-- example-begin decodebin.c b --><!--
+[.. my_bus_callback goes here ..]<!-- example-begin decodebin.c b -->
+<!--
static gboolean
my_bus_callback (GstBus *bus,
GstMessage *message,
}
--><!-- example-end decodebin.c b -->
<!-- example-begin decodebin.c c -->
+<![CDATA[
GstElement *pipeline, *audio;
static void
GstBus *bus;
/* init GStreamer */
- gst_init (&argc, &argv);
+ gst_init (&argc, &argv);
loop = g_main_loop_new (NULL, FALSE);
/* make sure we have input */
if (argc != 2) {
- g_print ("Usage: %s <filename>\n", argv[0]);
+ g_print ("Usage: %s <filename>\n", argv[0]);
return -1;
}
return 0;
}
+]]>
<!-- example-end decodebin.c c --></programlisting>
-
<para>
Decodebin, similar to playbin, supports the following features:
</para>
! audioconvert ! audioresample ! autoaudiosink</command>.
</para>
</sect1>
+
+ <sect1 id="section-components-playsink">
+ <title>Playsink</title>
+ <para>
+ The playsink element is a powerful sink element. It has request pads
+ for raw decoded audio, video and text and it will configure itself to
+ play the media streams. It has the following features:
+ </para>
+ <itemizedlist>
+ <listitem>
+ <para>
+ It exposes GstStreamVolume, GstVideoOverlay, GstNavigation and
+ GstColorBalance interfaces and automatically plugs software
+ elements to implement the interfaces when needed.
+ </para>
+ </listitem>
+ <listitem>
+ <para>
+ It will automatically plug conversion elements.
+ </para>
+ </listitem>
+ <listitem>
+ <para>
+ Can optionally render visualizations when there is no video input.
+ </para>
+ </listitem>
+ <listitem>
+ <para>
+ Configurable sink elements.
+ </para>
+ </listitem>
+ <listitem>
+ <para>
+ Configurable audio/video sync offset to fine-tune synchronization
+ in badly muxed files.
+ </para>
+ </listitem>
+ <listitem>
+ <para>
+ Support for taking a snapshot of the last video frame.
+ </para>
+ </listitem>
+ </itemizedlist>
+ <para>
+ Below is an example of how you can use playsink. We use a uridecodebin
+ element to decode into raw audio and video streams which we then link
+ to the playsink request pads. We only link the first audio and video
+ pads, you could use an input-selector to link all pads.
+ </para>
+ <programlisting>
+<!-- example-begin playsink.c a -->
+<![CDATA[
+#include <gst/gst.h>
+]]>
+<!-- example-end playsink.c a -->
+[.. my_bus_callback goes here ..]
+<!-- example-begin playsink.c b -->
+<!--
+static gboolean
+my_bus_callback (GstBus *bus,
+ GstMessage *message,
+ gpointer data)
+{
+ GMainLoop *loop = data;
+
+ switch (GST_MESSAGE_TYPE (message)) {
+ case GST_MESSAGE_ERROR: {
+ GError *err;
+ gchar *debug;
+
+ gst_message_parse_error (message, &err, &debug);
+ g_print ("Error: %s\n", err->message);
+ g_error_free (err);
+ g_free (debug);
+
+ g_main_loop_quit (loop);
+ break;
+ }
+ case GST_MESSAGE_EOS:
+ /* end-of-stream */
+ g_main_loop_quit (loop);
+ break;
+ default:
+ /* unhandled message */
+ break;
+ }
+
+ /* remove message from the queue */
+ return TRUE;
+}
+-->
+<!-- example-end playsink.c b -->
+<!-- example-begin playsink.c c -->
+<![CDATA[
+GstElement *pipeline, *sink;
+
+static void
+cb_pad_added (GstElement *dec,
+ GstPad *pad,
+ gpointer data)
+{
+ GstCaps *caps;
+ GstStructure *str;
+ const gchar *name;
+ GstPadTemplate *templ;
+ GstElementClass *klass;
+
+ /* check media type */
+ caps = gst_pad_query_caps (pad, NULL);
+ str = gst_caps_get_structure (caps, 0);
+ name = gst_structure_get_name (str);
+
+ klass = GST_ELEMENT_GET_CLASS (sink);
+
+ if (g_str_has_prefix (name, "audio")) {
+ templ = gst_element_class_get_pad_template (klass, "audio_sink");
+ } else if (g_str_has_prefix (name, "video")) {
+ templ = gst_element_class_get_pad_template (klass, "video_sink");
+ } else if (g_str_has_prefix (name, "text")) {
+ templ = gst_element_class_get_pad_template (klass, "text_sink");
+ } else {
+ templ = NULL;
+ }
+
+ if (templ) {
+ GstPad *sinkpad;
+
+ sinkpad = gst_element_request_pad (sink, templ, NULL, NULL);
+
+ if (!gst_pad_is_linked (sinkpad))
+ gst_pad_link (pad, sinkpad);
+
+ gst_object_unref (sinkpad);
+ }
+}
+
+gint
+main (gint argc,
+ gchar *argv[])
+{
+ GMainLoop *loop;
+ GstElement *dec;
+ GstBus *bus;
+
+ /* init GStreamer */
+ gst_init (&argc, &argv);
+ loop = g_main_loop_new (NULL, FALSE);
+
+ /* make sure we have input */
+ if (argc != 2) {
+ g_print ("Usage: %s <uri>\n", argv[0]);
+ return -1;
+ }
+
+ /* setup */
+ pipeline = gst_pipeline_new ("pipeline");
+
+ bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
+ gst_bus_add_watch (bus, my_bus_callback, loop);
+ gst_object_unref (bus);
+
+ dec = gst_element_factory_make ("uridecodebin", "source");
+ g_object_set (G_OBJECT (dec), "uri", argv[1], NULL);
+ g_signal_connect (dec, "pad-added", G_CALLBACK (cb_pad_added), NULL);
+
+ /* create audio output */
+ sink = gst_element_factory_make ("playsink", "sink");
+ gst_util_set_object_arg (G_OBJECT (sink), "flags",
+ "soft-colorbalance+soft-volume+vis+text+audio+video");
+ gst_bin_add_many (GST_BIN (pipeline), dec, sink, NULL);
+
+ /* run */
+ gst_element_set_state (pipeline, GST_STATE_PLAYING);
+ g_main_loop_run (loop);
+
+ /* cleanup */
+ gst_element_set_state (pipeline, GST_STATE_NULL);
+ gst_object_unref (GST_OBJECT (pipeline));
+
+ return 0;
+}
+]]>
+<!-- example-end playsink.c c -->
+ </programlisting>
+ <para>
+ This example will show audio and video depending on what you
+ give it. Try this example on an audio file and you will see that
+ it shows visualizations. You can change the visualization at runtime by
+ changing the vis-plugin property.
+ </para>
+ </sect1>
</chapter>