\r
/* Structure to contain all our information, so we can pass it around */\r
typedef struct _CustomData {\r
- GstElement *playbin2; /* Our one and only element */\r
+ GstElement *playbin; /* Our one and only element */\r
gboolean playing; /* Are we in the PLAYING state? */\r
gboolean terminate; /* Should we terminate execution? */\r
gboolean seek_enabled; /* Is seeking enabled for this media? */\r
gst_init (&argc, &argv);\r
\r
/* Create the elements */\r
- data.playbin2 = gst_element_factory_make ("playbin2", "playbin2");\r
+ data.playbin = gst_element_factory_make ("playbin", "playbin");\r
\r
- if (!data.playbin2) {\r
+ if (!data.playbin) {\r
g_printerr ("Not all elements could be created.\n");\r
return -1;\r
}\r
\r
/* Set the URI to play */\r
- g_object_set (data.playbin2, "uri", "http://docs.gstreamer.com/media/sintel_trailer-480p.webm", NULL);\r
+ g_object_set (data.playbin, "uri", "http://docs.gstreamer.com/media/sintel_trailer-480p.webm", NULL);\r
\r
/* Start playing */\r
- ret = gst_element_set_state (data.playbin2, GST_STATE_PLAYING);\r
+ ret = gst_element_set_state (data.playbin, GST_STATE_PLAYING);\r
if (ret == GST_STATE_CHANGE_FAILURE) {\r
g_printerr ("Unable to set the pipeline to the playing state.\n");\r
- gst_object_unref (data.playbin2);\r
+ gst_object_unref (data.playbin);\r
return -1;\r
}\r
\r
/* Listen to the bus */\r
- bus = gst_element_get_bus (data.playbin2);\r
+ bus = gst_element_get_bus (data.playbin);\r
do {\r
msg = gst_bus_timed_pop_filtered (bus, 100 * GST_MSECOND,\r
GST_MESSAGE_STATE_CHANGED | GST_MESSAGE_ERROR | GST_MESSAGE_EOS | GST_MESSAGE_DURATION);\r
} else {\r
/* We got no message, this means the timeout expired */\r
if (data.playing) {\r
- GstFormat fmt = GST_FORMAT_TIME;\r
gint64 current = -1;\r
\r
/* Query the current position of the stream */\r
- if (!gst_element_query_position (data.playbin2, &fmt, ¤t)) {\r
+ if (!gst_element_query_position (data.playbin, GST_FORMAT_TIME, ¤t)) {\r
g_printerr ("Could not query current position.\n");\r
}\r
\r
/* If we didn't know it yet, query the stream duration */\r
if (!GST_CLOCK_TIME_IS_VALID (data.duration)) {\r
- if (!gst_element_query_duration (data.playbin2, &fmt, &data.duration)) {\r
+ if (!gst_element_query_duration (data.playbin, GST_FORMAT_TIME, &data.duration)) {\r
g_printerr ("Could not query current duration.\n");\r
}\r
}\r
/* If seeking is enabled, we have not done it yet, and the time is right, seek */\r
if (data.seek_enabled && !data.seek_done && current > 10 * GST_SECOND) {\r
g_print ("\nReached 10s, performing seek...\n");\r
- gst_element_seek_simple (data.playbin2, GST_FORMAT_TIME,\r
+ gst_element_seek_simple (data.playbin, GST_FORMAT_TIME,\r
GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_KEY_UNIT, 30 * GST_SECOND);\r
data.seek_done = TRUE;\r
}\r
\r
/* Free resources */\r
gst_object_unref (bus);\r
- gst_element_set_state (data.playbin2, GST_STATE_NULL);\r
- gst_object_unref (data.playbin2);\r
+ gst_element_set_state (data.playbin, GST_STATE_NULL);\r
+ gst_object_unref (data.playbin);\r
return 0;\r
}\r
\r
case GST_MESSAGE_STATE_CHANGED: {\r
GstState old_state, new_state, pending_state;\r
gst_message_parse_state_changed (msg, &old_state, &new_state, &pending_state);\r
- if (GST_MESSAGE_SRC (msg) == GST_OBJECT (data->playbin2)) {\r
+ if (GST_MESSAGE_SRC (msg) == GST_OBJECT (data->playbin)) {\r
g_print ("Pipeline state changed from %s to %s:\n",\r
gst_element_state_get_name (old_state), gst_element_state_get_name (new_state));\r
\r
GstQuery *query;\r
gint64 start, end;\r
query = gst_query_new_seeking (GST_FORMAT_TIME);\r
- if (gst_element_query (data->playbin2, query)) {\r
+ if (gst_element_query (data->playbin, query)) {\r
gst_query_parse_seeking (query, NULL, &data->seek_enabled, &start, &end);\r
if (data->seek_enabled) {\r
g_print ("Seeking is ENABLED from %" GST_TIME_FORMAT " to %" GST_TIME_FORMAT "\n",\r
#include <gtk/gtk.h>
#include <gst/gst.h>
-#include <gst/interfaces/xoverlay.h>
+#include <gst/video/videooverlay.h>
#include <gdk/gdk.h>
#if defined (GDK_WINDOWING_X11)
/* Structure to contain all our information, so we can pass it around */
typedef struct _CustomData {
- GstElement *playbin2; /* Our one and only pipeline */
+ GstElement *playbin; /* Our one and only pipeline */
GtkWidget *slider; /* Slider widget to keep track of current position */
GtkWidget *streams_list; /* Text widget to display info about the streams */
#elif defined (GDK_WINDOWING_X11)
window_handle = GDK_WINDOW_XID (window);
#endif
- /* Pass it to playbin2, which implements XOverlay and will forward it to the video sink */
- gst_x_overlay_set_window_handle (GST_X_OVERLAY (data->playbin2), window_handle);
+ /* Pass it to playbin, which implements XOverlay and will forward it to the video sink */
+ gst_video_overlay_set_window_handle (GST_VIDEO_OVERLAY (data->playbin), window_handle);
}
/* This function is called when the PLAY button is clicked */
static void play_cb (GtkButton *button, CustomData *data) {
- gst_element_set_state (data->playbin2, GST_STATE_PLAYING);
+ gst_element_set_state (data->playbin, GST_STATE_PLAYING);
}
/* This function is called when the PAUSE button is clicked */
static void pause_cb (GtkButton *button, CustomData *data) {
- gst_element_set_state (data->playbin2, GST_STATE_PAUSED);
+ gst_element_set_state (data->playbin, GST_STATE_PAUSED);
}
/* This function is called when the STOP button is clicked */
static void stop_cb (GtkButton *button, CustomData *data) {
- gst_element_set_state (data->playbin2, GST_STATE_READY);
+ gst_element_set_state (data->playbin, GST_STATE_READY);
}
/* This function is called when the main window is closed */
/* This function is called everytime the video window needs to be redrawn (due to damage/exposure,
* rescaling, etc). GStreamer takes care of this in the PAUSED and PLAYING states, otherwise,
* we simply draw a black rectangle to avoid garbage showing up. */
-static gboolean expose_cb (GtkWidget *widget, GdkEventExpose *event, CustomData *data) {
+static gboolean draw_cb (GtkWidget *widget, cairo_t *cr, CustomData *data) {
if (data->state < GST_STATE_PAUSED) {
GtkAllocation allocation;
- GdkWindow *window = gtk_widget_get_window (widget);
- cairo_t *cr;
/* Cairo is a 2D graphics library which we use here to clean the video window.
* It is used by GStreamer for other reasons, so it will always be available to us. */
gtk_widget_get_allocation (widget, &allocation);
- cr = gdk_cairo_create (window);
cairo_set_source_rgb (cr, 0, 0, 0);
cairo_rectangle (cr, 0, 0, allocation.width, allocation.height);
cairo_fill (cr);
- cairo_destroy (cr);
}
return FALSE;
* new position here. */
static void slider_cb (GtkRange *range, CustomData *data) {
gdouble value = gtk_range_get_value (GTK_RANGE (data->slider));
- gst_element_seek_simple (data->playbin2, GST_FORMAT_TIME, GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_KEY_UNIT,
+ gst_element_seek_simple (data->playbin, GST_FORMAT_TIME, GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_KEY_UNIT,
(gint64)(value * GST_SECOND));
}
video_window = gtk_drawing_area_new ();
gtk_widget_set_double_buffered (video_window, FALSE);
g_signal_connect (video_window, "realize", G_CALLBACK (realize_cb), data);
- g_signal_connect (video_window, "expose_event", G_CALLBACK (expose_cb), data);
+ g_signal_connect (video_window, "draw", G_CALLBACK (draw_cb), data);
play_button = gtk_button_new_from_stock (GTK_STOCK_MEDIA_PLAY);
g_signal_connect (G_OBJECT (play_button), "clicked", G_CALLBACK (play_cb), data);
stop_button = gtk_button_new_from_stock (GTK_STOCK_MEDIA_STOP);
g_signal_connect (G_OBJECT (stop_button), "clicked", G_CALLBACK (stop_cb), data);
- data->slider = gtk_hscale_new_with_range (0, 100, 1);
+ data->slider = gtk_scale_new_with_range (GTK_ORIENTATION_HORIZONTAL, 0, 100, 1);
gtk_scale_set_draw_value (GTK_SCALE (data->slider), 0);
data->slider_update_signal_id = g_signal_connect (G_OBJECT (data->slider), "value-changed", G_CALLBACK (slider_cb), data);
data->streams_list = gtk_text_view_new ();
gtk_text_view_set_editable (GTK_TEXT_VIEW (data->streams_list), FALSE);
- controls = gtk_hbox_new (FALSE, 0);
+ controls = gtk_box_new (GTK_ORIENTATION_HORIZONTAL, 0);
gtk_box_pack_start (GTK_BOX (controls), play_button, FALSE, FALSE, 2);
gtk_box_pack_start (GTK_BOX (controls), pause_button, FALSE, FALSE, 2);
gtk_box_pack_start (GTK_BOX (controls), stop_button, FALSE, FALSE, 2);
gtk_box_pack_start (GTK_BOX (controls), data->slider, TRUE, TRUE, 2);
- main_hbox = gtk_hbox_new (FALSE, 0);
+ main_hbox = gtk_box_new (GTK_ORIENTATION_HORIZONTAL, 0);
gtk_box_pack_start (GTK_BOX (main_hbox), video_window, TRUE, TRUE, 0);
gtk_box_pack_start (GTK_BOX (main_hbox), data->streams_list, FALSE, FALSE, 2);
- main_box = gtk_vbox_new (FALSE, 0);
+ main_box = gtk_box_new (GTK_ORIENTATION_VERTICAL, 0);
gtk_box_pack_start (GTK_BOX (main_box), main_hbox, TRUE, TRUE, 0);
gtk_box_pack_start (GTK_BOX (main_box), controls, FALSE, FALSE, 0);
gtk_container_add (GTK_CONTAINER (main_window), main_box);
/* This function is called periodically to refresh the GUI */
static gboolean refresh_ui (CustomData *data) {
- GstFormat fmt = GST_FORMAT_TIME;
gint64 current = -1;
/* We do not want to update anything unless we are in the PAUSED or PLAYING states */
/* If we didn't know it yet, query the stream duration */
if (!GST_CLOCK_TIME_IS_VALID (data->duration)) {
- if (!gst_element_query_duration (data->playbin2, &fmt, &data->duration)) {
+ if (!gst_element_query_duration (data->playbin, GST_FORMAT_TIME, &data->duration)) {
g_printerr ("Could not query current duration.\n");
} else {
/* Set the range of the slider to the clip duration, in SECONDS */
}
}
- if (gst_element_query_position (data->playbin2, &fmt, ¤t)) {
+ if (gst_element_query_position (data->playbin, GST_FORMAT_TIME, ¤t)) {
/* Block the "value-changed" signal, so the slider_cb function is not called
* (which would trigger a seek the user has not requested) */
g_signal_handler_block (data->slider, data->slider_update_signal_id);
}
/* This function is called when new metadata is discovered in the stream */
-static void tags_cb (GstElement *playbin2, gint stream, CustomData *data) {
+static void tags_cb (GstElement *playbin, gint stream, CustomData *data) {
/* We are possibly in a GStreamer working thread, so we notify the main
* thread of this event through a message in the bus */
- gst_element_post_message (playbin2,
- gst_message_new_application (GST_OBJECT (playbin2),
- gst_structure_new ("tags-changed", NULL)));
+ gst_element_post_message (playbin,
+ gst_message_new_application (GST_OBJECT (playbin),
+ gst_structure_new_empty ("tags-changed")));
}
/* This function is called when an error message is posted on the bus */
g_free (debug_info);
/* Set the pipeline to READY (which stops playback) */
- gst_element_set_state (data->playbin2, GST_STATE_READY);
+ gst_element_set_state (data->playbin, GST_STATE_READY);
}
/* This function is called when an End-Of-Stream message is posted on the bus.
* We just set the pipeline to READY (which stops playback) */
static void eos_cb (GstBus *bus, GstMessage *msg, CustomData *data) {
g_print ("End-Of-Stream reached.\n");
- gst_element_set_state (data->playbin2, GST_STATE_READY);
+ gst_element_set_state (data->playbin, GST_STATE_READY);
}
/* This function is called when the pipeline changes states. We use it to
static void state_changed_cb (GstBus *bus, GstMessage *msg, CustomData *data) {
GstState old_state, new_state, pending_state;
gst_message_parse_state_changed (msg, &old_state, &new_state, &pending_state);
- if (GST_MESSAGE_SRC (msg) == GST_OBJECT (data->playbin2)) {
+ if (GST_MESSAGE_SRC (msg) == GST_OBJECT (data->playbin)) {
data->state = new_state;
g_print ("State set to %s\n", gst_element_state_get_name (new_state));
if (old_state == GST_STATE_READY && new_state == GST_STATE_PAUSED) {
gtk_text_buffer_set_text (text, "", -1);
/* Read some properties */
- g_object_get (data->playbin2, "n-video", &n_video, NULL);
- g_object_get (data->playbin2, "n-audio", &n_audio, NULL);
- g_object_get (data->playbin2, "n-text", &n_text, NULL);
+ g_object_get (data->playbin, "n-video", &n_video, NULL);
+ g_object_get (data->playbin, "n-audio", &n_audio, NULL);
+ g_object_get (data->playbin, "n-text", &n_text, NULL);
for (i = 0; i < n_video; i++) {
tags = NULL;
/* Retrieve the stream's video tags */
- g_signal_emit_by_name (data->playbin2, "get-video-tags", i, &tags);
+ g_signal_emit_by_name (data->playbin, "get-video-tags", i, &tags);
if (tags) {
total_str = g_strdup_printf ("video stream %d:\n", i);
gtk_text_buffer_insert_at_cursor (text, total_str, -1);
for (i = 0; i < n_audio; i++) {
tags = NULL;
/* Retrieve the stream's audio tags */
- g_signal_emit_by_name (data->playbin2, "get-audio-tags", i, &tags);
+ g_signal_emit_by_name (data->playbin, "get-audio-tags", i, &tags);
if (tags) {
total_str = g_strdup_printf ("\naudio stream %d:\n", i);
gtk_text_buffer_insert_at_cursor (text, total_str, -1);
for (i = 0; i < n_text; i++) {
tags = NULL;
/* Retrieve the stream's subtitle tags */
- g_signal_emit_by_name (data->playbin2, "get-text-tags", i, &tags);
+ g_signal_emit_by_name (data->playbin, "get-text-tags", i, &tags);
if (tags) {
total_str = g_strdup_printf ("\nsubtitle stream %d:\n", i);
gtk_text_buffer_insert_at_cursor (text, total_str, -1);
/* This function is called when an "application" message is posted on the bus.
* Here we retrieve the message posted by the tags_cb callback */
static void application_cb (GstBus *bus, GstMessage *msg, CustomData *data) {
- if (g_strcmp0 (gst_structure_get_name (msg->structure), "tags-changed") == 0) {
+ if (g_strcmp0 (gst_structure_get_name (gst_message_get_structure (msg)), "tags-changed") == 0) {
/* If the message is the "tags-changed" (only one we are currently issuing), update
* the stream info GUI */
analyze_streams (data);
data.duration = GST_CLOCK_TIME_NONE;
/* Create the elements */
- data.playbin2 = gst_element_factory_make ("playbin2", "playbin2");
+ data.playbin = gst_element_factory_make ("playbin", "playbin");
- if (!data.playbin2) {
+ if (!data.playbin) {
g_printerr ("Not all elements could be created.\n");
return -1;
}
/* Set the URI to play */
- g_object_set (data.playbin2, "uri", "http://docs.gstreamer.com/media/sintel_trailer-480p.webm", NULL);
+ g_object_set (data.playbin, "uri", "http://docs.gstreamer.com/media/sintel_trailer-480p.webm", NULL);
- /* Connect to interesting signals in playbin2 */
- g_signal_connect (G_OBJECT (data.playbin2), "video-tags-changed", (GCallback) tags_cb, &data);
- g_signal_connect (G_OBJECT (data.playbin2), "audio-tags-changed", (GCallback) tags_cb, &data);
- g_signal_connect (G_OBJECT (data.playbin2), "text-tags-changed", (GCallback) tags_cb, &data);
+ /* Connect to interesting signals in playbin */
+ g_signal_connect (G_OBJECT (data.playbin), "video-tags-changed", (GCallback) tags_cb, &data);
+ g_signal_connect (G_OBJECT (data.playbin), "audio-tags-changed", (GCallback) tags_cb, &data);
+ g_signal_connect (G_OBJECT (data.playbin), "text-tags-changed", (GCallback) tags_cb, &data);
/* Create the GUI */
create_ui (&data);
/* Instruct the bus to emit signals for each received message, and connect to the interesting signals */
- bus = gst_element_get_bus (data.playbin2);
+ bus = gst_element_get_bus (data.playbin);
gst_bus_add_signal_watch (bus);
g_signal_connect (G_OBJECT (bus), "message::error", (GCallback)error_cb, &data);
g_signal_connect (G_OBJECT (bus), "message::eos", (GCallback)eos_cb, &data);
gst_object_unref (bus);
/* Start playing */
- ret = gst_element_set_state (data.playbin2, GST_STATE_PLAYING);
+ ret = gst_element_set_state (data.playbin, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE) {
g_printerr ("Unable to set the pipeline to the playing state.\n");
- gst_object_unref (data.playbin2);
+ gst_object_unref (data.playbin);
return -1;
}
gtk_main ();
/* Free resources */
- gst_element_set_state (data.playbin2, GST_STATE_NULL);
- gst_object_unref (data.playbin2);
+ gst_element_set_state (data.playbin, GST_STATE_NULL);
+ gst_object_unref (data.playbin);
return 0;
}
#include <gst/gst.h>
+#include <gst/audio/audio.h>
#include <string.h>
#define CHUNK_SIZE 1024 /* Amount of bytes we are sending in each buffer */
#define SAMPLE_RATE 44100 /* Samples per second we are sending */
-#define AUDIO_CAPS "audio/x-raw-int,channels=1,rate=%d,signed=(boolean)true,width=16,depth=16,endianness=BYTE_ORDER"
/* Structure to contain all our information, so we can pass it to callbacks */
typedef struct _CustomData {
GstBuffer *buffer;
GstFlowReturn ret;
int i;
+ GstMapInfo map;
gint16 *raw;
gint num_samples = CHUNK_SIZE / 2; /* Because each sample is 16 bits */
gfloat freq;
GST_BUFFER_DURATION (buffer) = gst_util_uint64_scale (CHUNK_SIZE, GST_SECOND, SAMPLE_RATE);
/* Generate some psychodelic waveforms */
- raw = (gint16 *)GST_BUFFER_DATA (buffer);
+ gst_buffer_map (buffer, &map, GST_MAP_WRITE);
+ raw = (gint16 *)map.data;
data->c += data->d;
data->d -= data->c / 1000;
freq = 1100 + 1000 * data->d;
data->b -= data->a / freq;
raw[i] = (gint16)(500 * data->a);
}
+ gst_buffer_unmap (buffer, &map);
data->num_samples += num_samples;
/* Push the buffer into the appsrc */
}
/* The appsink has received a buffer */
-static void new_buffer (GstElement *sink, CustomData *data) {
- GstBuffer *buffer;
+static void new_sample (GstElement *sink, CustomData *data) {
+ GstSample *sample;
/* Retrieve the buffer */
- g_signal_emit_by_name (sink, "pull-buffer", &buffer);
- if (buffer) {
+ g_signal_emit_by_name (sink, "pull-sample", &sample);
+ if (sample) {
/* The only thing we do in this example is print a * to indicate a received buffer */
g_print ("*");
- gst_buffer_unref (buffer);
+ gst_sample_unref (sample);
}
}
GstPadTemplate *tee_src_pad_template;
GstPad *tee_audio_pad, *tee_video_pad, *tee_app_pad;
GstPad *queue_audio_pad, *queue_video_pad, *queue_app_pad;
- gchar *audio_caps_text;
+ GstAudioInfo info;
GstCaps *audio_caps;
GstBus *bus;
data.video_queue = gst_element_factory_make ("queue", "video_queue");
data.audio_convert2 = gst_element_factory_make ("audioconvert", "audio_convert2");
data.visual = gst_element_factory_make ("wavescope", "visual");
- data.video_convert = gst_element_factory_make ("ffmpegcolorspace", "csp");
+ data.video_convert = gst_element_factory_make ("videoconvert", "video_convert");
data.video_sink = gst_element_factory_make ("autovideosink", "video_sink");
data.app_queue = gst_element_factory_make ("queue", "app_queue");
data.app_sink = gst_element_factory_make ("appsink", "app_sink");
g_object_set (data.visual, "shader", 0, "style", 0, NULL);
/* Configure appsrc */
- audio_caps_text = g_strdup_printf (AUDIO_CAPS, SAMPLE_RATE);
- audio_caps = gst_caps_from_string (audio_caps_text);
- g_object_set (data.app_source, "caps", audio_caps, NULL);
+ gst_audio_info_set_format (&info, GST_AUDIO_FORMAT_S16, SAMPLE_RATE, 1, NULL);
+ audio_caps = gst_audio_info_to_caps (&info);
+ g_object_set (data.app_source, "caps", audio_caps, "format", GST_FORMAT_TIME, NULL);
g_signal_connect (data.app_source, "need-data", G_CALLBACK (start_feed), &data);
g_signal_connect (data.app_source, "enough-data", G_CALLBACK (stop_feed), &data);
/* Configure appsink */
g_object_set (data.app_sink, "emit-signals", TRUE, "caps", audio_caps, NULL);
- g_signal_connect (data.app_sink, "new-buffer", G_CALLBACK (new_buffer), &data);
+ g_signal_connect (data.app_sink, "new-sample", G_CALLBACK (new_sample), &data);
gst_caps_unref (audio_caps);
- g_free (audio_caps_text);
/* Link all elements that can be automatically linked because they have "Always" pads */
gst_bin_add_many (GST_BIN (data.pipeline), data.app_source, data.tee, data.audio_queue, data.audio_convert1, data.audio_resample,
}
/* Manually link the Tee, which has "Request" pads */
- tee_src_pad_template = gst_element_class_get_pad_template (GST_ELEMENT_GET_CLASS (data.tee), "src%d");
+ tee_src_pad_template = gst_element_class_get_pad_template (GST_ELEMENT_GET_CLASS (data.tee), "src_%u");
tee_audio_pad = gst_element_request_pad (data.tee, tee_src_pad_template, NULL, NULL);
g_print ("Obtained request pad %s for audio branch.\n", gst_pad_get_name (tee_audio_pad));
queue_audio_pad = gst_element_get_static_pad (data.audio_queue, "sink");