fi
fi
fi
- ], , [
- AC_SUBST(HAVE_XSHM)
- AC_SUBST(XSHM_LIBS)
+ ], , [
+ AC_SUBST(HAVE_XSHM)
+ AC_SUBST(XSHM_LIBS)
])
-dnl v4l/v4l2 checks have been moved down because they require X
-
-dnl *** Video 4 Linux ***
-dnl for information about the header/define, see sys/v4l/gstv4lelement.h
-dnl renamed to GST_V4L in accordance with V4L2 below
-translit(dnm, m, l) AM_CONDITIONAL(USE_GST_V4L, true)
-AG_GST_CHECK_FEATURE(GST_V4L, [Video 4 Linux], video4linux, [
- AC_CHECK_DECL(VID_TYPE_MPEG_ENCODER, HAVE_GST_V4L="yes", HAVE_GST_V4L="no", [
-#include <sys/types.h>
-#define _LINUX_TIME_H
-#define __user
-#include <linux/videodev.h>
- ])
-
- dnl we can build v4l without Xv, but then we won't have XOverlay support
- if test "x$HAVE_GST_V4L" = "xyes" -a "x$HAVE_XVIDEO" != "xyes"
- then
- AC_MSG_NOTICE([NO XVIDEO FOUND, VIDEO4LINUX WILL BE BUILT])
- AC_MSG_NOTICE([WITHOUT XOVERLAY SUPPORT])
- fi
-])
-
-# Optional gudev for device probing
-AC_ARG_WITH([gudev],
- AC_HELP_STRING([--with-gudev],
- [device detection with gudev]),
- [],
- [with_gudev=check])
-if test x$HAVE_GST_V4L = xyes; then
- if test x$with_gudev != xno; then
- PKG_CHECK_MODULES(GUDEV, [ gudev-1.0 >= 143 ],
- [ have_gudev=yes
- AC_DEFINE(HAVE_GUDEV, 1,
- [Whether gudev is available for device detection])
- AC_DEFINE([G_UDEV_API_IS_SUBJECT_TO_CHANGE], 1, [I know the API is subject to change.])
- ], [
- have_gudev=no
- ])
- else
- have_gudev=no
- fi
-fi
-AC_SUBST(GUDEV_CFLAGS)
-AC_SUBST(GUDEV_LIBS)
-
dnl *** ext plug-ins ***
dnl keep this list sorted alphabetically !
if (pipe (ret->pfd) == -1)
goto error;
- ret->rec_mutex = g_new (GStaticRecMutex, 1);
- g_static_rec_mutex_init (ret->rec_mutex);
-
- ret->task_mutex = g_new (GStaticRecMutex, 1);
- g_static_rec_mutex_init (ret->task_mutex);
+ #if !GLIB_CHECK_VERSION (2, 31, 0)
+ g_static_rec_mutex_init (&ret->rec_mutex);
+ #else
+ g_rec_mutex_init (&ret->rec_mutex);
+ #endif
+ g_static_rec_mutex_init (&ret->task_mutex);
- ret->task = gst_task_create (task_monitor_alsa, ret);
+ ret->task = gst_task_new (task_monitor_alsa, ret);
- gst_task_set_lock (ret->task, ret->task_mutex);
+ gst_task_set_lock (ret->task, &ret->task_mutex);
ret->device = g_strdup (device);
ret->dir = dir;
riff \
app
- noinst_HEADERS = gettext.h gst-i18n-plugin.h
+ noinst_HEADERS = gettext.h gst-i18n-plugin.h glib-compat-private.h
# dependencies:
-audio: interfaces pbutils
-
-cdda: tag
+audio: interfaces tag
riff: tag audio
#include <string.h>
+#include "gstapp-marshal.h"
#include "gstappsink.h"
+ #include "gst/glib-compat-private.h"
+
struct _GstAppSinkPrivate
{
GstCaps *caps;
--- /dev/null
+/* GStreamer
+ * Copyright (C) 1999,2000 Erik Walthinsen <omega@cse.ogi.edu>
+ * 2005 Wim Taymans <wim@fluendo.com>
+ *
+ * gstaudiobasesink.c:
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+
+/**
+ * SECTION:gstaudiobasesink
+ * @short_description: Base class for audio sinks
+ * @see_also: #GstAudioSink, #GstAudioRingBuffer.
+ *
+ * This is the base class for audio sinks. Subclasses need to implement the
+ * ::create_ringbuffer vmethod. This base class will then take care of
+ * writing samples to the ringbuffer, synchronisation, clipping and flushing.
+ *
+ * Last reviewed on 2006-09-27 (0.10.12)
+ */
+
+#include <string.h>
+
++/* FIXME 0.11: suppress warnings for deprecated API such as GStaticRecMutex
++ * with newer GLib versions (>= 2.31.0) */
++#define GLIB_DISABLE_DEPRECATION_WARNINGS
+#include "gstaudiobasesink.h"
+
+GST_DEBUG_CATEGORY_STATIC (gst_audio_base_sink_debug);
+#define GST_CAT_DEFAULT gst_audio_base_sink_debug
+
+#define GST_AUDIO_BASE_SINK_GET_PRIVATE(obj) \
+ (G_TYPE_INSTANCE_GET_PRIVATE ((obj), GST_TYPE_AUDIO_BASE_SINK, GstAudioBaseSinkPrivate))
+
+struct _GstAudioBaseSinkPrivate
+{
+ /* upstream latency */
+ GstClockTime us_latency;
+ /* the clock slaving algorithm in use */
+ GstAudioBaseSinkSlaveMethod slave_method;
+ /* running average of clock skew */
+ GstClockTimeDiff avg_skew;
+ /* the number of samples we aligned last time */
+ gint64 last_align;
+
+ gboolean sync_latency;
+
+ GstClockTime eos_time;
+
+ /* number of microseconds we allow clock slaving to drift
+ * before resyncing */
+ guint64 drift_tolerance;
+
+ /* number of nanoseconds we allow timestamps to drift
+ * before resyncing */
+ GstClockTime alignment_threshold;
+
+ /* time of the previous detected discont candidate */
+ GstClockTime discont_time;
+
+ /* number of nanoseconds to wait until creating a discontinuity */
+ GstClockTime discont_wait;
+};
+
+/* BaseAudioSink signals and args */
+enum
+{
+ /* FILL ME */
+ LAST_SIGNAL
+};
+
+/* FIXME: 0.11, store the buffer_time and latency_time in nanoseconds */
+#define DEFAULT_BUFFER_TIME ((200 * GST_MSECOND) / GST_USECOND)
+#define DEFAULT_LATENCY_TIME ((10 * GST_MSECOND) / GST_USECOND)
+#define DEFAULT_PROVIDE_CLOCK TRUE
+#define DEFAULT_SLAVE_METHOD GST_AUDIO_BASE_SINK_SLAVE_SKEW
+
+/* FIXME, enable pull mode when clock slaving and trick modes are figured out */
+#define DEFAULT_CAN_ACTIVATE_PULL FALSE
+
+/* when timestamps drift for more than 40ms we resync. This should
+ * be anough to compensate for timestamp rounding errors. */
+#define DEFAULT_ALIGNMENT_THRESHOLD (40 * GST_MSECOND)
+
+/* when clock slaving drift for more than 40ms we resync. This is
+ * a reasonable default */
+#define DEFAULT_DRIFT_TOLERANCE ((40 * GST_MSECOND) / GST_USECOND)
+
+/* allow for one second before resyncing to see if the timestamps drift will
+ * fix itself, or is a permanent offset */
+#define DEFAULT_DISCONT_WAIT (1 * GST_SECOND)
+
+enum
+{
+ PROP_0,
+
+ PROP_BUFFER_TIME,
+ PROP_LATENCY_TIME,
+ PROP_PROVIDE_CLOCK,
+ PROP_SLAVE_METHOD,
+ PROP_CAN_ACTIVATE_PULL,
+ PROP_ALIGNMENT_THRESHOLD,
+ PROP_DRIFT_TOLERANCE,
+ PROP_DISCONT_WAIT,
+
+ PROP_LAST
+};
+
+GType
+gst_audio_base_sink_slave_method_get_type (void)
+{
+ static volatile gsize slave_method_type = 0;
+ static const GEnumValue slave_method[] = {
+ {GST_AUDIO_BASE_SINK_SLAVE_RESAMPLE, "GST_AUDIO_BASE_SINK_SLAVE_RESAMPLE",
+ "resample"},
+ {GST_AUDIO_BASE_SINK_SLAVE_SKEW, "GST_AUDIO_BASE_SINK_SLAVE_SKEW", "skew"},
+ {GST_AUDIO_BASE_SINK_SLAVE_NONE, "GST_AUDIO_BASE_SINK_SLAVE_NONE", "none"},
+ {0, NULL, NULL},
+ };
+
+ if (g_once_init_enter (&slave_method_type)) {
+ GType tmp =
+ g_enum_register_static ("GstAudioBaseSinkSlaveMethod", slave_method);
+ g_once_init_leave (&slave_method_type, tmp);
+ }
+
+ return (GType) slave_method_type;
+}
+
+
+#define _do_init \
+ GST_DEBUG_CATEGORY_INIT (gst_audio_base_sink_debug, "audiobasesink", 0, "audiobasesink element");
+#define gst_audio_base_sink_parent_class parent_class
+G_DEFINE_TYPE_WITH_CODE (GstAudioBaseSink, gst_audio_base_sink,
+ GST_TYPE_BASE_SINK, _do_init);
+
+static void gst_audio_base_sink_dispose (GObject * object);
+
+static void gst_audio_base_sink_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec);
+static void gst_audio_base_sink_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec);
+
+#if 0
+static GstStateChangeReturn gst_audio_base_sink_async_play (GstBaseSink *
+ basesink);
+#endif
+static GstStateChangeReturn gst_audio_base_sink_change_state (GstElement *
+ element, GstStateChange transition);
+static gboolean gst_audio_base_sink_activate_pull (GstBaseSink * basesink,
+ gboolean active);
+static gboolean gst_audio_base_sink_query (GstElement * element, GstQuery *
+ query);
+
+static GstClock *gst_audio_base_sink_provide_clock (GstElement * elem);
+static GstClockTime gst_audio_base_sink_get_time (GstClock * clock,
+ GstAudioBaseSink * sink);
+static void gst_audio_base_sink_callback (GstAudioRingBuffer * rbuf,
+ guint8 * data, guint len, gpointer user_data);
+
+static GstFlowReturn gst_audio_base_sink_preroll (GstBaseSink * bsink,
+ GstBuffer * buffer);
+static GstFlowReturn gst_audio_base_sink_render (GstBaseSink * bsink,
+ GstBuffer * buffer);
+static gboolean gst_audio_base_sink_event (GstBaseSink * bsink,
+ GstEvent * event);
+static GstFlowReturn gst_audio_base_sink_wait_eos (GstBaseSink * bsink,
+ GstEvent * event);
+static void gst_audio_base_sink_get_times (GstBaseSink * bsink,
+ GstBuffer * buffer, GstClockTime * start, GstClockTime * end);
+static gboolean gst_audio_base_sink_setcaps (GstBaseSink * bsink,
+ GstCaps * caps);
+static void gst_audio_base_sink_fixate (GstBaseSink * bsink, GstCaps * caps);
+
+static gboolean gst_audio_base_sink_query_pad (GstBaseSink * bsink,
+ GstQuery * query);
+
+
+/* static guint gst_audio_base_sink_signals[LAST_SIGNAL] = { 0 }; */
+
+static void
+gst_audio_base_sink_class_init (GstAudioBaseSinkClass * klass)
+{
+ GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
+ GstBaseSinkClass *gstbasesink_class;
+
+ gobject_class = (GObjectClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
+ gstbasesink_class = (GstBaseSinkClass *) klass;
+
+ g_type_class_add_private (klass, sizeof (GstAudioBaseSinkPrivate));
+
+ gobject_class->set_property = gst_audio_base_sink_set_property;
+ gobject_class->get_property = gst_audio_base_sink_get_property;
+ gobject_class->dispose = gst_audio_base_sink_dispose;
+
+ g_object_class_install_property (gobject_class, PROP_BUFFER_TIME,
+ g_param_spec_int64 ("buffer-time", "Buffer Time",
+ "Size of audio buffer in microseconds", 1,
+ G_MAXINT64, DEFAULT_BUFFER_TIME,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_LATENCY_TIME,
+ g_param_spec_int64 ("latency-time", "Latency Time",
+ "Audio latency in microseconds", 1,
+ G_MAXINT64, DEFAULT_LATENCY_TIME,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_PROVIDE_CLOCK,
+ g_param_spec_boolean ("provide-clock", "Provide Clock",
+ "Provide a clock to be used as the global pipeline clock",
+ DEFAULT_PROVIDE_CLOCK, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_SLAVE_METHOD,
+ g_param_spec_enum ("slave-method", "Slave Method",
+ "Algorithm to use to match the rate of the masterclock",
+ GST_TYPE_AUDIO_BASE_SINK_SLAVE_METHOD, DEFAULT_SLAVE_METHOD,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_CAN_ACTIVATE_PULL,
+ g_param_spec_boolean ("can-activate-pull", "Allow Pull Scheduling",
+ "Allow pull-based scheduling", DEFAULT_CAN_ACTIVATE_PULL,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ /**
+ * GstAudioBaseSink:drift-tolerance
+ *
+ * Controls the amount of time in microseconds that clocks are allowed
+ * to drift before resynchronisation happens.
+ *
+ * Since: 0.10.26
+ */
+ g_object_class_install_property (gobject_class, PROP_DRIFT_TOLERANCE,
+ g_param_spec_int64 ("drift-tolerance", "Drift Tolerance",
+ "Tolerance for clock drift in microseconds", 1,
+ G_MAXINT64, DEFAULT_DRIFT_TOLERANCE,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ /**
+ * GstAudioBaseSink:alignment_threshold
+ *
+ * Controls the amount of time in nanoseconds that timestamps are allowed
+ * to drift from their ideal time before choosing not to align them.
+ *
+ * Since: 0.10.36
+ */
+ g_object_class_install_property (gobject_class, PROP_ALIGNMENT_THRESHOLD,
+ g_param_spec_uint64 ("alignment-threshold", "Alignment Threshold",
+ "Timestamp alignment threshold in nanoseconds", 1,
+ G_MAXUINT64 - 1, DEFAULT_ALIGNMENT_THRESHOLD,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ /**
+ * GstAudioBaseSink:discont-wait
+ *
+ * A window of time in nanoseconds to wait before creating a discontinuity as
+ * a result of breaching the drift-tolerance.
+ *
+ * Since: 0.10.36
+ */
+ g_object_class_install_property (gobject_class, PROP_DISCONT_WAIT,
+ g_param_spec_uint64 ("discont-wait", "Discont Wait",
+ "Window of time in nanoseconds to wait before "
+ "creating a discontinuity", 0,
+ G_MAXUINT64 - 1, DEFAULT_DISCONT_WAIT,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ gstelement_class->change_state =
+ GST_DEBUG_FUNCPTR (gst_audio_base_sink_change_state);
+ gstelement_class->provide_clock =
+ GST_DEBUG_FUNCPTR (gst_audio_base_sink_provide_clock);
+ gstelement_class->query = GST_DEBUG_FUNCPTR (gst_audio_base_sink_query);
+
+ gstbasesink_class->fixate = GST_DEBUG_FUNCPTR (gst_audio_base_sink_fixate);
+ gstbasesink_class->set_caps = GST_DEBUG_FUNCPTR (gst_audio_base_sink_setcaps);
+ gstbasesink_class->event = GST_DEBUG_FUNCPTR (gst_audio_base_sink_event);
+ gstbasesink_class->wait_eos =
+ GST_DEBUG_FUNCPTR (gst_audio_base_sink_wait_eos);
+ gstbasesink_class->get_times =
+ GST_DEBUG_FUNCPTR (gst_audio_base_sink_get_times);
+ gstbasesink_class->preroll = GST_DEBUG_FUNCPTR (gst_audio_base_sink_preroll);
+ gstbasesink_class->render = GST_DEBUG_FUNCPTR (gst_audio_base_sink_render);
+ gstbasesink_class->query = GST_DEBUG_FUNCPTR (gst_audio_base_sink_query_pad);
+ gstbasesink_class->activate_pull =
+ GST_DEBUG_FUNCPTR (gst_audio_base_sink_activate_pull);
+
+ /* ref class from a thread-safe context to work around missing bit of
+ * thread-safety in GObject */
+ g_type_class_ref (GST_TYPE_AUDIO_CLOCK);
+ g_type_class_ref (GST_TYPE_AUDIO_RING_BUFFER);
+
+}
+
+static void
+gst_audio_base_sink_init (GstAudioBaseSink * audiobasesink)
+{
+ GstBaseSink *basesink;
+
+ audiobasesink->priv = GST_AUDIO_BASE_SINK_GET_PRIVATE (audiobasesink);
+
+ audiobasesink->buffer_time = DEFAULT_BUFFER_TIME;
+ audiobasesink->latency_time = DEFAULT_LATENCY_TIME;
+ audiobasesink->priv->slave_method = DEFAULT_SLAVE_METHOD;
+ audiobasesink->priv->drift_tolerance = DEFAULT_DRIFT_TOLERANCE;
+ audiobasesink->priv->alignment_threshold = DEFAULT_ALIGNMENT_THRESHOLD;
+ audiobasesink->priv->discont_wait = DEFAULT_DISCONT_WAIT;
+
+ audiobasesink->provided_clock = gst_audio_clock_new ("GstAudioSinkClock",
+ (GstAudioClockGetTimeFunc) gst_audio_base_sink_get_time, audiobasesink,
+ NULL);
+
+ basesink = GST_BASE_SINK_CAST (audiobasesink);
+ basesink->can_activate_push = TRUE;
+ basesink->can_activate_pull = DEFAULT_CAN_ACTIVATE_PULL;
+
+ gst_base_sink_set_last_sample_enabled (basesink, FALSE);
+ if (DEFAULT_PROVIDE_CLOCK)
+ GST_OBJECT_FLAG_SET (basesink, GST_ELEMENT_FLAG_PROVIDE_CLOCK);
+ else
+ GST_OBJECT_FLAG_UNSET (basesink, GST_ELEMENT_FLAG_PROVIDE_CLOCK);
+}
+
+static void
+gst_audio_base_sink_dispose (GObject * object)
+{
+ GstAudioBaseSink *sink;
+
+ sink = GST_AUDIO_BASE_SINK (object);
+
+ if (sink->provided_clock) {
+ gst_audio_clock_invalidate (sink->provided_clock);
+ gst_object_unref (sink->provided_clock);
+ sink->provided_clock = NULL;
+ }
+
+ if (sink->ringbuffer) {
+ gst_object_unparent (GST_OBJECT_CAST (sink->ringbuffer));
+ sink->ringbuffer = NULL;
+ }
+
+ G_OBJECT_CLASS (parent_class)->dispose (object);
+}
+
+
+static GstClock *
+gst_audio_base_sink_provide_clock (GstElement * elem)
+{
+ GstAudioBaseSink *sink;
+ GstClock *clock;
+
+ sink = GST_AUDIO_BASE_SINK (elem);
+
+ /* we have no ringbuffer (must be NULL state) */
+ if (sink->ringbuffer == NULL)
+ goto wrong_state;
+
+ if (!gst_audio_ring_buffer_is_acquired (sink->ringbuffer))
+ goto wrong_state;
+
+ GST_OBJECT_LOCK (sink);
+ if (!GST_OBJECT_FLAG_IS_SET (sink, GST_ELEMENT_FLAG_PROVIDE_CLOCK))
+ goto clock_disabled;
+
+ clock = GST_CLOCK_CAST (gst_object_ref (sink->provided_clock));
+ GST_OBJECT_UNLOCK (sink);
+
+ return clock;
+
+ /* ERRORS */
+wrong_state:
+ {
+ GST_DEBUG_OBJECT (sink, "ringbuffer not acquired");
+ return NULL;
+ }
+clock_disabled:
+ {
+ GST_DEBUG_OBJECT (sink, "clock provide disabled");
+ GST_OBJECT_UNLOCK (sink);
+ return NULL;
+ }
+}
+
+static gboolean
+gst_audio_base_sink_query_pad (GstBaseSink * bsink, GstQuery * query)
+{
+ gboolean res = FALSE;
+ GstAudioBaseSink *basesink;
+
+ basesink = GST_AUDIO_BASE_SINK (bsink);
+
+ switch (GST_QUERY_TYPE (query)) {
+ case GST_QUERY_CONVERT:
+ {
+ GstFormat src_fmt, dest_fmt;
+ gint64 src_val, dest_val;
+
+ GST_LOG_OBJECT (basesink, "query convert");
+
+ if (basesink->ringbuffer) {
+ gst_query_parse_convert (query, &src_fmt, &src_val, &dest_fmt, NULL);
+ res =
+ gst_audio_ring_buffer_convert (basesink->ringbuffer, src_fmt,
+ src_val, dest_fmt, &dest_val);
+ if (res) {
+ gst_query_set_convert (query, src_fmt, src_val, dest_fmt, dest_val);
+ }
+ }
+ break;
+ }
+ default:
+ res = GST_BASE_SINK_CLASS (parent_class)->query (bsink, query);
+ break;
+ }
+ return res;
+}
+
+static gboolean
+gst_audio_base_sink_query (GstElement * element, GstQuery * query)
+{
+ gboolean res = FALSE;
+ GstAudioBaseSink *basesink;
+
+ basesink = GST_AUDIO_BASE_SINK (element);
+
+ switch (GST_QUERY_TYPE (query)) {
+ case GST_QUERY_LATENCY:
+ {
+ gboolean live, us_live;
+ GstClockTime min_l, max_l;
+
+ GST_DEBUG_OBJECT (basesink, "latency query");
+
+ /* ask parent first, it will do an upstream query for us. */
+ if ((res =
+ gst_base_sink_query_latency (GST_BASE_SINK_CAST (basesink), &live,
+ &us_live, &min_l, &max_l))) {
+ GstClockTime base_latency, min_latency, max_latency;
+
+ /* we and upstream are both live, adjust the min_latency */
+ if (live && us_live) {
+ GstAudioRingBufferSpec *spec;
+
+ GST_OBJECT_LOCK (basesink);
+ if (!basesink->ringbuffer || !basesink->ringbuffer->spec.info.rate) {
+ GST_OBJECT_UNLOCK (basesink);
+
+ GST_DEBUG_OBJECT (basesink,
+ "we are not yet negotiated, can't report latency yet");
+ res = FALSE;
+ goto done;
+ }
+ spec = &basesink->ringbuffer->spec;
+
+ basesink->priv->us_latency = min_l;
+
+ base_latency =
+ gst_util_uint64_scale_int (spec->seglatency * spec->segsize,
+ GST_SECOND, spec->info.rate * spec->info.bpf);
+ GST_OBJECT_UNLOCK (basesink);
+
+ /* we cannot go lower than the buffer size and the min peer latency */
+ min_latency = base_latency + min_l;
+ /* the max latency is the max of the peer, we can delay an infinite
+ * amount of time. */
+ max_latency = (max_l == -1) ? -1 : (base_latency + max_l);
+
+ GST_DEBUG_OBJECT (basesink,
+ "peer min %" GST_TIME_FORMAT ", our min latency: %"
+ GST_TIME_FORMAT, GST_TIME_ARGS (min_l),
+ GST_TIME_ARGS (min_latency));
+ GST_DEBUG_OBJECT (basesink,
+ "peer max %" GST_TIME_FORMAT ", our max latency: %"
+ GST_TIME_FORMAT, GST_TIME_ARGS (max_l),
+ GST_TIME_ARGS (max_latency));
+ } else {
+ GST_DEBUG_OBJECT (basesink,
+ "peer or we are not live, don't care about latency");
+ min_latency = min_l;
+ max_latency = max_l;
+ }
+ gst_query_set_latency (query, live, min_latency, max_latency);
+ }
+ break;
+ }
+ case GST_QUERY_CONVERT:
+ {
+ GstFormat src_fmt, dest_fmt;
+ gint64 src_val, dest_val;
+
+ GST_LOG_OBJECT (basesink, "query convert");
+
+ if (basesink->ringbuffer) {
+ gst_query_parse_convert (query, &src_fmt, &src_val, &dest_fmt, NULL);
+ res =
+ gst_audio_ring_buffer_convert (basesink->ringbuffer, src_fmt,
+ src_val, dest_fmt, &dest_val);
+ if (res) {
+ gst_query_set_convert (query, src_fmt, src_val, dest_fmt, dest_val);
+ }
+ }
+ break;
+ }
+ default:
+ res = GST_ELEMENT_CLASS (parent_class)->query (element, query);
+ break;
+ }
+
+done:
+ return res;
+}
+
+
+static GstClockTime
+gst_audio_base_sink_get_time (GstClock * clock, GstAudioBaseSink * sink)
+{
+ guint64 raw, samples;
+ guint delay;
+ GstClockTime result;
+
+ if (sink->ringbuffer == NULL || sink->ringbuffer->spec.info.rate == 0)
+ return GST_CLOCK_TIME_NONE;
+
+ /* our processed samples are always increasing */
+ raw = samples = gst_audio_ring_buffer_samples_done (sink->ringbuffer);
+
+ /* the number of samples not yet processed, this is still queued in the
+ * device (not played for playback). */
+ delay = gst_audio_ring_buffer_delay (sink->ringbuffer);
+
+ if (G_LIKELY (samples >= delay))
+ samples -= delay;
+ else
+ samples = 0;
+
+ result = gst_util_uint64_scale_int (samples, GST_SECOND,
+ sink->ringbuffer->spec.info.rate);
+
+ GST_DEBUG_OBJECT (sink,
+ "processed samples: raw %" G_GUINT64_FORMAT ", delay %u, real %"
+ G_GUINT64_FORMAT ", time %" GST_TIME_FORMAT,
+ raw, delay, samples, GST_TIME_ARGS (result));
+
+ return result;
+}
+
+/**
+ * gst_audio_base_sink_set_provide_clock:
+ * @sink: a #GstAudioBaseSink
+ * @provide: new state
+ *
+ * Controls whether @sink will provide a clock or not. If @provide is %TRUE,
+ * gst_element_provide_clock() will return a clock that reflects the datarate
+ * of @sink. If @provide is %FALSE, gst_element_provide_clock() will return NULL.
+ *
+ * Since: 0.10.16
+ */
+void
+gst_audio_base_sink_set_provide_clock (GstAudioBaseSink * sink,
+ gboolean provide)
+{
+ g_return_if_fail (GST_IS_AUDIO_BASE_SINK (sink));
+
+ GST_OBJECT_LOCK (sink);
+ if (provide)
+ GST_OBJECT_FLAG_SET (sink, GST_ELEMENT_FLAG_PROVIDE_CLOCK);
+ else
+ GST_OBJECT_FLAG_UNSET (sink, GST_ELEMENT_FLAG_PROVIDE_CLOCK);
+ GST_OBJECT_UNLOCK (sink);
+}
+
+/**
+ * gst_audio_base_sink_get_provide_clock:
+ * @sink: a #GstAudioBaseSink
+ *
+ * Queries whether @sink will provide a clock or not. See also
+ * gst_audio_base_sink_set_provide_clock.
+ *
+ * Returns: %TRUE if @sink will provide a clock.
+ *
+ * Since: 0.10.16
+ */
+gboolean
+gst_audio_base_sink_get_provide_clock (GstAudioBaseSink * sink)
+{
+ gboolean result;
+
+ g_return_val_if_fail (GST_IS_AUDIO_BASE_SINK (sink), FALSE);
+
+ GST_OBJECT_LOCK (sink);
+ result = GST_OBJECT_FLAG_IS_SET (sink, GST_ELEMENT_FLAG_PROVIDE_CLOCK);
+ GST_OBJECT_UNLOCK (sink);
+
+ return result;
+}
+
+/**
+ * gst_audio_base_sink_set_slave_method:
+ * @sink: a #GstAudioBaseSink
+ * @method: the new slave method
+ *
+ * Controls how clock slaving will be performed in @sink.
+ *
+ * Since: 0.10.16
+ */
+void
+gst_audio_base_sink_set_slave_method (GstAudioBaseSink * sink,
+ GstAudioBaseSinkSlaveMethod method)
+{
+ g_return_if_fail (GST_IS_AUDIO_BASE_SINK (sink));
+
+ GST_OBJECT_LOCK (sink);
+ sink->priv->slave_method = method;
+ GST_OBJECT_UNLOCK (sink);
+}
+
+/**
+ * gst_audio_base_sink_get_slave_method:
+ * @sink: a #GstAudioBaseSink
+ *
+ * Get the current slave method used by @sink.
+ *
+ * Returns: The current slave method used by @sink.
+ *
+ * Since: 0.10.16
+ */
+GstAudioBaseSinkSlaveMethod
+gst_audio_base_sink_get_slave_method (GstAudioBaseSink * sink)
+{
+ GstAudioBaseSinkSlaveMethod result;
+
+ g_return_val_if_fail (GST_IS_AUDIO_BASE_SINK (sink), -1);
+
+ GST_OBJECT_LOCK (sink);
+ result = sink->priv->slave_method;
+ GST_OBJECT_UNLOCK (sink);
+
+ return result;
+}
+
+
+/**
+ * gst_audio_base_sink_set_drift_tolerance:
+ * @sink: a #GstAudioBaseSink
+ * @drift_tolerance: the new drift tolerance in microseconds
+ *
+ * Controls the sink's drift tolerance.
+ *
+ * Since: 0.10.31
+ */
+void
+gst_audio_base_sink_set_drift_tolerance (GstAudioBaseSink * sink,
+ gint64 drift_tolerance)
+{
+ g_return_if_fail (GST_IS_AUDIO_BASE_SINK (sink));
+
+ GST_OBJECT_LOCK (sink);
+ sink->priv->drift_tolerance = drift_tolerance;
+ GST_OBJECT_UNLOCK (sink);
+}
+
+/**
+ * gst_audio_base_sink_get_drift_tolerance
+ * @sink: a #GstAudioBaseSink
+ *
+ * Get the current drift tolerance, in microseconds, used by @sink.
+ *
+ * Returns: The current drift tolerance used by @sink.
+ *
+ * Since: 0.10.31
+ */
+gint64
+gst_audio_base_sink_get_drift_tolerance (GstAudioBaseSink * sink)
+{
+ gint64 result;
+
+ g_return_val_if_fail (GST_IS_AUDIO_BASE_SINK (sink), -1);
+
+ GST_OBJECT_LOCK (sink);
+ result = sink->priv->drift_tolerance;
+ GST_OBJECT_UNLOCK (sink);
+
+ return result;
+}
+
+/**
+ * gst_audio_base_sink_set_alignment_threshold:
+ * @sink: a #GstAudioBaseSink
+ * @alignment_threshold: the new alignment threshold in nanoseconds
+ *
+ * Controls the sink's alignment threshold.
+ *
+ * Since: 0.10.36
+ */
+void
+gst_audio_base_sink_set_alignment_threshold (GstAudioBaseSink * sink,
+ GstClockTime alignment_threshold)
+{
+ g_return_if_fail (GST_IS_AUDIO_BASE_SINK (sink));
+
+ GST_OBJECT_LOCK (sink);
+ sink->priv->alignment_threshold = alignment_threshold;
+ GST_OBJECT_UNLOCK (sink);
+}
+
+/**
+ * gst_audio_base_sink_get_alignment_threshold
+ * @sink: a #GstAudioBaseSink
+ *
+ * Get the current alignment threshold, in nanoseconds, used by @sink.
+ *
+ * Returns: The current alignment threshold used by @sink.
+ *
+ * Since: 0.10.36
+ */
+GstClockTime
+gst_audio_base_sink_get_alignment_threshold (GstAudioBaseSink * sink)
+{
+ gint64 result;
+
+ g_return_val_if_fail (GST_IS_AUDIO_BASE_SINK (sink), -1);
+
+ GST_OBJECT_LOCK (sink);
+ result = sink->priv->alignment_threshold;
+ GST_OBJECT_UNLOCK (sink);
+
+ return result;
+}
+
+/**
+ * gst_audio_base_sink_set_discont_wait:
+ * @sink: a #GstAudioBaseSink
+ * @discont_wait: the new discont wait in nanoseconds
+ *
+ * Controls how long the sink will wait before creating a discontinuity.
+ *
+ * Since: 0.10.36
+ */
+void
+gst_audio_base_sink_set_discont_wait (GstAudioBaseSink * sink,
+ GstClockTime discont_wait)
+{
+ g_return_if_fail (GST_IS_AUDIO_BASE_SINK (sink));
+
+ GST_OBJECT_LOCK (sink);
+ sink->priv->discont_wait = discont_wait;
+ GST_OBJECT_UNLOCK (sink);
+}
+
+/**
+ * gst_audio_base_sink_get_discont_wait
+ * @sink: a #GstAudioBaseSink
+ *
+ * Get the current discont wait, in nanoseconds, used by @sink.
+ *
+ * Returns: The current discont wait used by @sink.
+ *
+ * Since: 0.10.36
+ */
+GstClockTime
+gst_audio_base_sink_get_discont_wait (GstAudioBaseSink * sink)
+{
+ GstClockTime result;
+
+ g_return_val_if_fail (GST_IS_AUDIO_BASE_SINK (sink), -1);
+
+ GST_OBJECT_LOCK (sink);
+ result = sink->priv->discont_wait;
+ GST_OBJECT_UNLOCK (sink);
+
+ return result;
+}
+
+static void
+gst_audio_base_sink_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+{
+ GstAudioBaseSink *sink;
+
+ sink = GST_AUDIO_BASE_SINK (object);
+
+ switch (prop_id) {
+ case PROP_BUFFER_TIME:
+ sink->buffer_time = g_value_get_int64 (value);
+ break;
+ case PROP_LATENCY_TIME:
+ sink->latency_time = g_value_get_int64 (value);
+ break;
+ case PROP_PROVIDE_CLOCK:
+ gst_audio_base_sink_set_provide_clock (sink, g_value_get_boolean (value));
+ break;
+ case PROP_SLAVE_METHOD:
+ gst_audio_base_sink_set_slave_method (sink, g_value_get_enum (value));
+ break;
+ case PROP_CAN_ACTIVATE_PULL:
+ GST_BASE_SINK (sink)->can_activate_pull = g_value_get_boolean (value);
+ break;
+ case PROP_DRIFT_TOLERANCE:
+ gst_audio_base_sink_set_drift_tolerance (sink, g_value_get_int64 (value));
+ break;
+ case PROP_ALIGNMENT_THRESHOLD:
+ gst_audio_base_sink_set_alignment_threshold (sink,
+ g_value_get_uint64 (value));
+ break;
+ case PROP_DISCONT_WAIT:
+ gst_audio_base_sink_set_discont_wait (sink, g_value_get_uint64 (value));
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+gst_audio_base_sink_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec)
+{
+ GstAudioBaseSink *sink;
+
+ sink = GST_AUDIO_BASE_SINK (object);
+
+ switch (prop_id) {
+ case PROP_BUFFER_TIME:
+ g_value_set_int64 (value, sink->buffer_time);
+ break;
+ case PROP_LATENCY_TIME:
+ g_value_set_int64 (value, sink->latency_time);
+ break;
+ case PROP_PROVIDE_CLOCK:
+ g_value_set_boolean (value, gst_audio_base_sink_get_provide_clock (sink));
+ break;
+ case PROP_SLAVE_METHOD:
+ g_value_set_enum (value, gst_audio_base_sink_get_slave_method (sink));
+ break;
+ case PROP_CAN_ACTIVATE_PULL:
+ g_value_set_boolean (value, GST_BASE_SINK (sink)->can_activate_pull);
+ break;
+ case PROP_DRIFT_TOLERANCE:
+ g_value_set_int64 (value, gst_audio_base_sink_get_drift_tolerance (sink));
+ break;
+ case PROP_ALIGNMENT_THRESHOLD:
+ g_value_set_uint64 (value,
+ gst_audio_base_sink_get_alignment_threshold (sink));
+ break;
+ case PROP_DISCONT_WAIT:
+ g_value_set_uint64 (value, gst_audio_base_sink_get_discont_wait (sink));
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static gboolean
+gst_audio_base_sink_setcaps (GstBaseSink * bsink, GstCaps * caps)
+{
+ GstAudioBaseSink *sink = GST_AUDIO_BASE_SINK (bsink);
+ GstAudioRingBufferSpec *spec;
+ GstClockTime now;
+ GstClockTime crate_num, crate_denom;
+
+ if (!sink->ringbuffer)
+ return FALSE;
+
+ spec = &sink->ringbuffer->spec;
+
+ GST_DEBUG_OBJECT (sink, "release old ringbuffer");
+
+ /* get current time, updates the last_time. When the subclass has a clock that
+ * restarts from 0 when a new format is negotiated, it will call
+ * gst_audio_clock_reset() which will use this last_time to create an offset
+ * so that time from the clock keeps on increasing monotonically. */
+ now = gst_clock_get_time (sink->provided_clock);
+
+ GST_DEBUG_OBJECT (sink, "time was %" GST_TIME_FORMAT, GST_TIME_ARGS (now));
+
+ /* release old ringbuffer */
+ gst_audio_ring_buffer_pause (sink->ringbuffer);
+ gst_audio_ring_buffer_activate (sink->ringbuffer, FALSE);
+ gst_audio_ring_buffer_release (sink->ringbuffer);
+
+ GST_DEBUG_OBJECT (sink, "parse caps");
+
+ spec->buffer_time = sink->buffer_time;
+ spec->latency_time = sink->latency_time;
+
+ /* parse new caps */
+ if (!gst_audio_ring_buffer_parse_caps (spec, caps))
+ goto parse_error;
+
+ gst_audio_ring_buffer_debug_spec_buff (spec);
+
+ GST_DEBUG_OBJECT (sink, "acquire ringbuffer");
+ if (!gst_audio_ring_buffer_acquire (sink->ringbuffer, spec))
+ goto acquire_error;
+
+ if (bsink->pad_mode == GST_PAD_MODE_PUSH) {
+ GST_DEBUG_OBJECT (sink, "activate ringbuffer");
+ gst_audio_ring_buffer_activate (sink->ringbuffer, TRUE);
+ }
+
+ /* due to possible changes in the spec file we should recalibrate the clock */
+ gst_clock_get_calibration (sink->provided_clock, NULL, NULL,
+ &crate_num, &crate_denom);
+ gst_clock_set_calibration (sink->provided_clock,
+ gst_clock_get_internal_time (sink->provided_clock), now, crate_num,
+ crate_denom);
+
+ /* calculate actual latency and buffer times.
+ * FIXME: In 0.11, store the latency_time internally in ns */
+ spec->latency_time = gst_util_uint64_scale (spec->segsize,
+ (GST_SECOND / GST_USECOND), spec->info.rate * spec->info.bpf);
+
+ spec->buffer_time = spec->segtotal * spec->latency_time;
+
+ gst_audio_ring_buffer_debug_spec_buff (spec);
+
+ return TRUE;
+
+ /* ERRORS */
+parse_error:
+ {
+ GST_DEBUG_OBJECT (sink, "could not parse caps");
+ GST_ELEMENT_ERROR (sink, STREAM, FORMAT,
+ (NULL), ("cannot parse audio format."));
+ return FALSE;
+ }
+acquire_error:
+ {
+ GST_DEBUG_OBJECT (sink, "could not acquire ringbuffer");
+ return FALSE;
+ }
+}
+
+static void
+gst_audio_base_sink_fixate (GstBaseSink * bsink, GstCaps * caps)
+{
+ GstStructure *s;
+ gint width, depth;
+
+ s = gst_caps_get_structure (caps, 0);
+
+ /* fields for all formats */
+ gst_structure_fixate_field_nearest_int (s, "rate", 44100);
+ gst_structure_fixate_field_nearest_int (s, "channels", 2);
+ gst_structure_fixate_field_nearest_int (s, "width", 16);
+
+ /* fields for int */
+ if (gst_structure_has_field (s, "depth")) {
+ gst_structure_get_int (s, "width", &width);
+ /* round width to nearest multiple of 8 for the depth */
+ depth = GST_ROUND_UP_8 (width);
+ gst_structure_fixate_field_nearest_int (s, "depth", depth);
+ }
+ if (gst_structure_has_field (s, "signed"))
+ gst_structure_fixate_field_boolean (s, "signed", TRUE);
+ if (gst_structure_has_field (s, "endianness"))
+ gst_structure_fixate_field_nearest_int (s, "endianness", G_BYTE_ORDER);
+
+ GST_BASE_SINK_CLASS (parent_class)->fixate (bsink, caps);
+}
+
+static void
+gst_audio_base_sink_get_times (GstBaseSink * bsink, GstBuffer * buffer,
+ GstClockTime * start, GstClockTime * end)
+{
+ /* our clock sync is a bit too much for the base class to handle so
+ * we implement it ourselves. */
+ *start = GST_CLOCK_TIME_NONE;
+ *end = GST_CLOCK_TIME_NONE;
+}
+
+/* This waits for the drain to happen and can be canceled */
+static gboolean
+gst_audio_base_sink_drain (GstAudioBaseSink * sink)
+{
+ if (!sink->ringbuffer)
+ return TRUE;
+ if (!sink->ringbuffer->spec.info.rate)
+ return TRUE;
+
+ /* if PLAYING is interrupted,
+ * arrange to have clock running when going to PLAYING again */
+ g_atomic_int_set (&sink->eos_rendering, 1);
+
+ /* need to start playback before we can drain, but only when
+ * we have successfully negotiated a format and thus acquired the
+ * ringbuffer. */
+ if (gst_audio_ring_buffer_is_acquired (sink->ringbuffer))
+ gst_audio_ring_buffer_start (sink->ringbuffer);
+
+ if (sink->priv->eos_time != -1) {
+ GST_DEBUG_OBJECT (sink,
+ "last sample time %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (sink->priv->eos_time));
+
+ /* wait for the EOS time to be reached, this is the time when the last
+ * sample is played. */
+ gst_base_sink_wait_eos (GST_BASE_SINK (sink), sink->priv->eos_time, NULL);
+
+ GST_DEBUG_OBJECT (sink, "drained audio");
+ }
+ g_atomic_int_set (&sink->eos_rendering, 0);
+ return TRUE;
+}
+
+static GstFlowReturn
+gst_audio_base_sink_wait_eos (GstBaseSink * bsink, GstEvent * event)
+{
+ GstAudioBaseSink *sink = GST_AUDIO_BASE_SINK (bsink);
+ GstFlowReturn ret;
+
+ ret = GST_BASE_SINK_CLASS (parent_class)->event (bsink, event);
+ if (ret != GST_FLOW_OK)
+ return ret;
+
+ /* now wait till we played everything */
+ gst_audio_base_sink_drain (sink);
+
+ return ret;
+}
+
+static gboolean
+gst_audio_base_sink_event (GstBaseSink * bsink, GstEvent * event)
+{
+ GstAudioBaseSink *sink = GST_AUDIO_BASE_SINK (bsink);
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_FLUSH_START:
+ if (sink->ringbuffer)
+ gst_audio_ring_buffer_set_flushing (sink->ringbuffer, TRUE);
+ break;
+ case GST_EVENT_FLUSH_STOP:
+ /* always resync on sample after a flush */
+ sink->priv->avg_skew = -1;
+ sink->next_sample = -1;
+ sink->priv->eos_time = -1;
+ sink->priv->discont_time = -1;
+ if (sink->ringbuffer)
+ gst_audio_ring_buffer_set_flushing (sink->ringbuffer, FALSE);
+ break;
+ default:
+ break;
+ }
+ return GST_BASE_SINK_CLASS (parent_class)->event (bsink, event);
+}
+
+static GstFlowReturn
+gst_audio_base_sink_preroll (GstBaseSink * bsink, GstBuffer * buffer)
+{
+ GstAudioBaseSink *sink = GST_AUDIO_BASE_SINK (bsink);
+
+ if (!gst_audio_ring_buffer_is_acquired (sink->ringbuffer))
+ goto wrong_state;
+
+ /* we don't really do anything when prerolling. We could make a
+ * property to play this buffer to have some sort of scrubbing
+ * support. */
+ return GST_FLOW_OK;
+
+wrong_state:
+ {
+ GST_DEBUG_OBJECT (sink, "ringbuffer in wrong state");
+ GST_ELEMENT_ERROR (sink, STREAM, FORMAT, (NULL), ("sink not negotiated."));
+ return GST_FLOW_NOT_NEGOTIATED;
+ }
+}
+
+static guint64
+gst_audio_base_sink_get_offset (GstAudioBaseSink * sink)
+{
+ guint64 sample;
+ gint writeseg, segdone, sps;
+ gint diff;
+
+ /* assume we can append to the previous sample */
+ sample = sink->next_sample;
+ /* no previous sample, try to insert at position 0 */
+ if (sample == -1)
+ sample = 0;
+
+ sps = sink->ringbuffer->samples_per_seg;
+
+ /* figure out the segment and the offset inside the segment where
+ * the sample should be written. */
+ writeseg = sample / sps;
+
+ /* get the currently processed segment */
+ segdone = g_atomic_int_get (&sink->ringbuffer->segdone)
+ - sink->ringbuffer->segbase;
+
+ /* see how far away it is from the write segment */
+ diff = writeseg - segdone;
+ if (diff < 0) {
+ /* sample would be dropped, position to next playable position */
+ sample = (segdone + 1) * sps;
+ }
+
+ return sample;
+}
+
+static GstClockTime
+clock_convert_external (GstClockTime external, GstClockTime cinternal,
+ GstClockTime cexternal, GstClockTime crate_num, GstClockTime crate_denom)
+{
+ /* adjust for rate and speed */
+ if (external >= cexternal) {
+ external =
+ gst_util_uint64_scale (external - cexternal, crate_denom, crate_num);
+ external += cinternal;
+ } else {
+ external =
+ gst_util_uint64_scale (cexternal - external, crate_denom, crate_num);
+ if (cinternal > external)
+ external = cinternal - external;
+ else
+ external = 0;
+ }
+ return external;
+}
+
+/* algorithm to calculate sample positions that will result in resampling to
+ * match the clock rate of the master */
+static void
+gst_audio_base_sink_resample_slaving (GstAudioBaseSink * sink,
+ GstClockTime render_start, GstClockTime render_stop,
+ GstClockTime * srender_start, GstClockTime * srender_stop)
+{
+ GstClockTime cinternal, cexternal;
+ GstClockTime crate_num, crate_denom;
+
+ /* FIXME, we can sample and add observations here or use the timeouts on the
+ * clock. No idea which one is better or more stable. The timeout seems more
+ * arbitrary but this one seems more demanding and does not work when there is
+ * no data comming in to the sink. */
+#if 0
+ GstClockTime etime, itime;
+ gdouble r_squared;
+
+ /* sample clocks and figure out clock skew */
+ etime = gst_clock_get_time (GST_ELEMENT_CLOCK (sink));
+ itime = gst_audio_clock_get_time (sink->provided_clock);
+
+ /* add new observation */
+ gst_clock_add_observation (sink->provided_clock, itime, etime, &r_squared);
+#endif
+
+ /* get calibration parameters to compensate for speed and offset differences
+ * when we are slaved */
+ gst_clock_get_calibration (sink->provided_clock, &cinternal, &cexternal,
+ &crate_num, &crate_denom);
+
+ GST_DEBUG_OBJECT (sink, "internal %" GST_TIME_FORMAT " external %"
+ GST_TIME_FORMAT " %" G_GUINT64_FORMAT "/%" G_GUINT64_FORMAT " = %f",
+ GST_TIME_ARGS (cinternal), GST_TIME_ARGS (cexternal), crate_num,
+ crate_denom, gst_guint64_to_gdouble (crate_num) /
+ gst_guint64_to_gdouble (crate_denom));
+
+ if (crate_num == 0)
+ crate_denom = crate_num = 1;
+
+ /* bring external time to internal time */
+ render_start = clock_convert_external (render_start, cinternal, cexternal,
+ crate_num, crate_denom);
+ render_stop = clock_convert_external (render_stop, cinternal, cexternal,
+ crate_num, crate_denom);
+
+ GST_DEBUG_OBJECT (sink,
+ "after slaving: start %" GST_TIME_FORMAT " - stop %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (render_start), GST_TIME_ARGS (render_stop));
+
+ *srender_start = render_start;
+ *srender_stop = render_stop;
+}
+
+/* algorithm to calculate sample positions that will result in changing the
+ * playout pointer to match the clock rate of the master */
+static void
+gst_audio_base_sink_skew_slaving (GstAudioBaseSink * sink,
+ GstClockTime render_start, GstClockTime render_stop,
+ GstClockTime * srender_start, GstClockTime * srender_stop)
+{
+ GstClockTime cinternal, cexternal, crate_num, crate_denom;
+ GstClockTime etime, itime;
+ GstClockTimeDiff skew, mdrift, mdrift2;
+ gint driftsamples;
+ gint64 last_align;
+
+ /* get calibration parameters to compensate for offsets */
+ gst_clock_get_calibration (sink->provided_clock, &cinternal, &cexternal,
+ &crate_num, &crate_denom);
+
+ /* sample clocks and figure out clock skew */
+ etime = gst_clock_get_time (GST_ELEMENT_CLOCK (sink));
+ itime = gst_audio_clock_get_time (sink->provided_clock);
+ itime = gst_audio_clock_adjust (sink->provided_clock, itime);
+
+ GST_DEBUG_OBJECT (sink,
+ "internal %" GST_TIME_FORMAT " external %" GST_TIME_FORMAT
+ " cinternal %" GST_TIME_FORMAT " cexternal %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (itime), GST_TIME_ARGS (etime),
+ GST_TIME_ARGS (cinternal), GST_TIME_ARGS (cexternal));
+
+ /* make sure we never go below 0 */
+ etime = etime > cexternal ? etime - cexternal : 0;
+ itime = itime > cinternal ? itime - cinternal : 0;
+
+ /* do itime - etime.
+ * positive value means external clock goes slower
+ * negative value means external clock goes faster */
+ skew = GST_CLOCK_DIFF (etime, itime);
+ if (sink->priv->avg_skew == -1) {
+ /* first observation */
+ sink->priv->avg_skew = skew;
+ } else {
+ /* next observations use a moving average */
+ sink->priv->avg_skew = (31 * sink->priv->avg_skew + skew) / 32;
+ }
+
+ GST_DEBUG_OBJECT (sink, "internal %" GST_TIME_FORMAT " external %"
+ GST_TIME_FORMAT " skew %" G_GINT64_FORMAT " avg %" G_GINT64_FORMAT,
+ GST_TIME_ARGS (itime), GST_TIME_ARGS (etime), skew, sink->priv->avg_skew);
+
+ /* the max drift we allow */
+ mdrift = sink->priv->drift_tolerance * 1000;
+ mdrift2 = mdrift / 2;
+
+ /* adjust playout pointer based on skew */
+ if (sink->priv->avg_skew > mdrift2) {
+ /* master is running slower, move internal time forward */
+ GST_WARNING_OBJECT (sink,
+ "correct clock skew %" G_GINT64_FORMAT " > %" G_GINT64_FORMAT,
+ sink->priv->avg_skew, mdrift2);
+ cexternal = cexternal > mdrift ? cexternal - mdrift : 0;
+ sink->priv->avg_skew -= mdrift;
+
+ driftsamples = (sink->ringbuffer->spec.info.rate * mdrift) / GST_SECOND;
+ last_align = sink->priv->last_align;
+
+ /* if we were aligning in the wrong direction or we aligned more than what we
+ * will correct, resync */
+ if (last_align < 0 || last_align > driftsamples)
+ sink->next_sample = -1;
+
+ GST_DEBUG_OBJECT (sink,
+ "last_align %" G_GINT64_FORMAT " driftsamples %u, next %"
+ G_GUINT64_FORMAT, last_align, driftsamples, sink->next_sample);
+
+ gst_clock_set_calibration (sink->provided_clock, cinternal, cexternal,
+ crate_num, crate_denom);
+ } else if (sink->priv->avg_skew < -mdrift2) {
+ /* master is running faster, move external time forwards */
+ GST_WARNING_OBJECT (sink,
+ "correct clock skew %" G_GINT64_FORMAT " < %" G_GINT64_FORMAT,
+ sink->priv->avg_skew, -mdrift2);
+ cexternal += mdrift;
+ sink->priv->avg_skew += mdrift;
+
+ driftsamples = (sink->ringbuffer->spec.info.rate * mdrift) / GST_SECOND;
+ last_align = sink->priv->last_align;
+
+ /* if we were aligning in the wrong direction or we aligned more than what we
+ * will correct, resync */
+ if (last_align > 0 || -last_align > driftsamples)
+ sink->next_sample = -1;
+
+ GST_DEBUG_OBJECT (sink,
+ "last_align %" G_GINT64_FORMAT " driftsamples %u, next %"
+ G_GUINT64_FORMAT, last_align, driftsamples, sink->next_sample);
+
+ gst_clock_set_calibration (sink->provided_clock, cinternal, cexternal,
+ crate_num, crate_denom);
+ }
+
+ /* convert, ignoring speed */
+ render_start = clock_convert_external (render_start, cinternal, cexternal,
+ crate_num, crate_denom);
+ render_stop = clock_convert_external (render_stop, cinternal, cexternal,
+ crate_num, crate_denom);
+
+ *srender_start = render_start;
+ *srender_stop = render_stop;
+}
+
+/* apply the clock offset but do no slaving otherwise */
+static void
+gst_audio_base_sink_none_slaving (GstAudioBaseSink * sink,
+ GstClockTime render_start, GstClockTime render_stop,
+ GstClockTime * srender_start, GstClockTime * srender_stop)
+{
+ GstClockTime cinternal, cexternal, crate_num, crate_denom;
+
+ /* get calibration parameters to compensate for offsets */
+ gst_clock_get_calibration (sink->provided_clock, &cinternal, &cexternal,
+ &crate_num, &crate_denom);
+
+ /* convert, ignoring speed */
+ render_start = clock_convert_external (render_start, cinternal, cexternal,
+ crate_num, crate_denom);
+ render_stop = clock_convert_external (render_stop, cinternal, cexternal,
+ crate_num, crate_denom);
+
+ *srender_start = render_start;
+ *srender_stop = render_stop;
+}
+
+/* converts render_start and render_stop to their slaved values */
+static void
+gst_audio_base_sink_handle_slaving (GstAudioBaseSink * sink,
+ GstClockTime render_start, GstClockTime render_stop,
+ GstClockTime * srender_start, GstClockTime * srender_stop)
+{
+ switch (sink->priv->slave_method) {
+ case GST_AUDIO_BASE_SINK_SLAVE_RESAMPLE:
+ gst_audio_base_sink_resample_slaving (sink, render_start, render_stop,
+ srender_start, srender_stop);
+ break;
+ case GST_AUDIO_BASE_SINK_SLAVE_SKEW:
+ gst_audio_base_sink_skew_slaving (sink, render_start, render_stop,
+ srender_start, srender_stop);
+ break;
+ case GST_AUDIO_BASE_SINK_SLAVE_NONE:
+ gst_audio_base_sink_none_slaving (sink, render_start, render_stop,
+ srender_start, srender_stop);
+ break;
+ default:
+ g_warning ("unknown slaving method %d", sink->priv->slave_method);
+ break;
+ }
+}
+
+/* must be called with LOCK */
+static GstFlowReturn
+gst_audio_base_sink_sync_latency (GstBaseSink * bsink, GstMiniObject * obj)
+{
+ GstClock *clock;
+ GstClockReturn status;
+ GstClockTime time, render_delay;
+ GstFlowReturn ret;
+ GstAudioBaseSink *sink;
+ GstClockTime itime, etime;
+ GstClockTime rate_num, rate_denom;
+ GstClockTimeDiff jitter;
+
+ sink = GST_AUDIO_BASE_SINK (bsink);
+
+ clock = GST_ELEMENT_CLOCK (sink);
+ if (G_UNLIKELY (clock == NULL))
+ goto no_clock;
+
+ /* we provided the global clock, don't need to do anything special */
+ if (clock == sink->provided_clock)
+ goto no_slaving;
+
+ GST_OBJECT_UNLOCK (sink);
+
+ do {
+ GST_DEBUG_OBJECT (sink, "checking preroll");
+
+ ret = gst_base_sink_do_preroll (bsink, obj);
+ if (ret != GST_FLOW_OK)
+ goto flushing;
+
+ GST_OBJECT_LOCK (sink);
+ time = sink->priv->us_latency;
+ GST_OBJECT_UNLOCK (sink);
+
+ /* Renderdelay is added onto our own latency, and needs
+ * to be subtracted as well */
+ render_delay = gst_base_sink_get_render_delay (bsink);
+
+ if (G_LIKELY (time > render_delay))
+ time -= render_delay;
+ else
+ time = 0;
+
+ /* preroll done, we can sync since we are in PLAYING now. */
+ GST_DEBUG_OBJECT (sink, "possibly waiting for clock to reach %"
+ GST_TIME_FORMAT, GST_TIME_ARGS (time));
+
+ /* wait for the clock, this can be interrupted because we got shut down or
+ * we PAUSED. */
+ status = gst_base_sink_wait_clock (bsink, time, &jitter);
+
+ GST_DEBUG_OBJECT (sink, "clock returned %d %" GST_TIME_FORMAT, status,
+ GST_TIME_ARGS (jitter));
+
+ /* invalid time, no clock or sync disabled, just continue then */
+ if (status == GST_CLOCK_BADTIME)
+ break;
+
+ /* waiting could have been interrupted and we can be flushing now */
+ if (G_UNLIKELY (bsink->flushing))
+ goto flushing;
+
+ /* retry if we got unscheduled, which means we did not reach the timeout
+ * yet. if some other error occures, we continue. */
+ } while (status == GST_CLOCK_UNSCHEDULED);
+
+ GST_OBJECT_LOCK (sink);
+ GST_DEBUG_OBJECT (sink, "latency synced");
+
+ /* when we prerolled in time, we can accurately set the calibration,
+ * our internal clock should exactly have been the latency (== the running
+ * time of the external clock) */
+ etime = GST_ELEMENT_CAST (sink)->base_time + time;
+ itime = gst_audio_clock_get_time (sink->provided_clock);
+ itime = gst_audio_clock_adjust (sink->provided_clock, itime);
+
+ if (status == GST_CLOCK_EARLY) {
+ /* when we prerolled late, we have to take into account the lateness */
+ GST_DEBUG_OBJECT (sink, "late preroll, adding jitter");
+ etime += jitter;
+ }
+
+ /* start ringbuffer so we can start slaving right away when we need to */
+ gst_audio_ring_buffer_start (sink->ringbuffer);
+
+ GST_DEBUG_OBJECT (sink,
+ "internal time: %" GST_TIME_FORMAT " external time: %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (itime), GST_TIME_ARGS (etime));
+
+ /* copy the original calibrated rate but update the internal and external
+ * times. */
+ gst_clock_get_calibration (sink->provided_clock, NULL, NULL, &rate_num,
+ &rate_denom);
+ gst_clock_set_calibration (sink->provided_clock, itime, etime,
+ rate_num, rate_denom);
+
+ switch (sink->priv->slave_method) {
+ case GST_AUDIO_BASE_SINK_SLAVE_RESAMPLE:
+ /* only set as master when we are resampling */
+ GST_DEBUG_OBJECT (sink, "Setting clock as master");
+ gst_clock_set_master (sink->provided_clock, clock);
+ break;
+ case GST_AUDIO_BASE_SINK_SLAVE_SKEW:
+ case GST_AUDIO_BASE_SINK_SLAVE_NONE:
+ default:
+ break;
+ }
+
+ sink->priv->avg_skew = -1;
+ sink->next_sample = -1;
+ sink->priv->eos_time = -1;
+ sink->priv->discont_time = -1;
+
+ return GST_FLOW_OK;
+
+ /* ERRORS */
+no_clock:
+ {
+ GST_DEBUG_OBJECT (sink, "we have no clock");
+ return GST_FLOW_OK;
+ }
+no_slaving:
+ {
+ GST_DEBUG_OBJECT (sink, "we are not slaved");
+ return GST_FLOW_OK;
+ }
+flushing:
+ {
+ GST_DEBUG_OBJECT (sink, "we are flushing");
+ GST_OBJECT_LOCK (sink);
+ return GST_FLOW_WRONG_STATE;
+ }
+}
+
+static gint64
+gst_audio_base_sink_get_alignment (GstAudioBaseSink * sink,
+ GstClockTime sample_offset)
+{
+ GstAudioRingBuffer *ringbuf = sink->ringbuffer;
+ gint64 align;
+ gint64 sample_diff;
+ gint64 max_sample_diff;
+ gint segdone = g_atomic_int_get (&ringbuf->segdone) - ringbuf->segbase;
+ gint64 samples_done = segdone * ringbuf->samples_per_seg;
+ gint64 headroom = sample_offset - samples_done;
+ gboolean allow_align = TRUE;
+ gboolean discont = FALSE;
+ gint rate;
+
+ /* now try to align the sample to the previous one, first see how big the
+ * difference is. */
+ if (sample_offset >= sink->next_sample)
+ sample_diff = sample_offset - sink->next_sample;
+ else
+ sample_diff = sink->next_sample - sample_offset;
+
+ rate = GST_AUDIO_INFO_RATE (&ringbuf->spec.info);
+
+ /* calculate the max allowed drift in units of samples. */
+ max_sample_diff = gst_util_uint64_scale_int (sink->priv->alignment_threshold,
+ rate, GST_SECOND);
+
+ /* calc align with previous sample */
+ align = sink->next_sample - sample_offset;
+
+ /* don't align if it means writing behind the read-segment */
+ if (sample_diff > headroom && align < 0)
+ allow_align = FALSE;
+
+ if (G_UNLIKELY (sample_diff >= max_sample_diff)) {
+ /* wait before deciding to make a discontinuity */
+ if (sink->priv->discont_wait > 0) {
+ GstClockTime time = gst_util_uint64_scale_int (sample_offset,
+ GST_SECOND, rate);
+ if (sink->priv->discont_time == -1) {
+ /* discont candidate */
+ sink->priv->discont_time = time;
+ } else if (time - sink->priv->discont_time >= sink->priv->discont_wait) {
+ /* discont_wait expired, discontinuity detected */
+ discont = TRUE;
+ sink->priv->discont_time = -1;
+ }
+ } else {
+ discont = TRUE;
+ }
+ } else if (G_UNLIKELY (sink->priv->discont_time != -1)) {
+ /* we have had a discont, but are now back on track! */
+ sink->priv->discont_time = -1;
+ }
+
+ if (G_LIKELY (!discont && allow_align)) {
+ GST_DEBUG_OBJECT (sink,
+ "align with prev sample, ABS (%" G_GINT64_FORMAT ") < %"
+ G_GINT64_FORMAT, align, max_sample_diff);
+ } else {
+ gint64 diff_s G_GNUC_UNUSED;
+
+ /* calculate sample diff in seconds for error message */
+ diff_s = gst_util_uint64_scale_int (sample_diff, GST_SECOND, rate);
+
+ /* timestamps drifted apart from previous samples too much, we need to
+ * resync. We log this as an element warning. */
+ GST_WARNING_OBJECT (sink,
+ "Unexpected discontinuity in audio timestamps of "
+ "%s%" GST_TIME_FORMAT ", resyncing",
+ sample_offset > sink->next_sample ? "+" : "-", GST_TIME_ARGS (diff_s));
+ align = 0;
+ }
+
+ return align;
+}
+
+static GstFlowReturn
+gst_audio_base_sink_render (GstBaseSink * bsink, GstBuffer * buf)
+{
+ guint64 in_offset;
+ GstClockTime time, stop, render_start, render_stop, sample_offset;
+ GstClockTimeDiff sync_offset, ts_offset;
+ GstAudioBaseSinkClass *bclass;
+ GstAudioBaseSink *sink;
+ GstAudioRingBuffer *ringbuf;
+ gint64 diff, align;
+ guint64 ctime, cstop;
+ gsize offset;
+ guint8 *data;
+ gsize size;
+ guint samples, written;
+ gint bpf, rate;
+ gint accum;
+ gint out_samples;
+ GstClockTime base_time, render_delay, latency;
+ GstClock *clock;
+ gboolean sync, slaved, align_next;
+ GstFlowReturn ret;
+ GstSegment clip_seg;
+ gint64 time_offset;
+ GstBuffer *out = NULL;
+
+ sink = GST_AUDIO_BASE_SINK (bsink);
+ bclass = GST_AUDIO_BASE_SINK_GET_CLASS (sink);
+
+ ringbuf = sink->ringbuffer;
+
+ /* can't do anything when we don't have the device */
+ if (G_UNLIKELY (!gst_audio_ring_buffer_is_acquired (ringbuf)))
+ goto wrong_state;
+
+ /* Wait for upstream latency before starting the ringbuffer, we do this so
+ * that we can align the first sample of the ringbuffer to the base_time +
+ * latency. */
+ GST_OBJECT_LOCK (sink);
+ base_time = GST_ELEMENT_CAST (sink)->base_time;
+ if (G_UNLIKELY (sink->priv->sync_latency)) {
+ ret = gst_audio_base_sink_sync_latency (bsink, GST_MINI_OBJECT_CAST (buf));
+ GST_OBJECT_UNLOCK (sink);
+ if (G_UNLIKELY (ret != GST_FLOW_OK))
+ goto sync_latency_failed;
+ /* only do this once until we are set back to PLAYING */
+ sink->priv->sync_latency = FALSE;
+ } else {
+ GST_OBJECT_UNLOCK (sink);
+ }
+
+ /* Before we go on, let's see if we need to payload the data. If yes, we also
+ * need to unref the output buffer before leaving. */
+ if (bclass->payload) {
+ out = bclass->payload (sink, buf);
+
+ if (!out)
+ goto payload_failed;
+
+ buf = out;
+ }
+
+ bpf = GST_AUDIO_INFO_BPF (&ringbuf->spec.info);
+ rate = GST_AUDIO_INFO_RATE (&ringbuf->spec.info);
+
+ size = gst_buffer_get_size (buf);
+ if (G_UNLIKELY (size % bpf) != 0)
+ goto wrong_size;
+
+ samples = size / bpf;
+ out_samples = samples;
+
+ in_offset = GST_BUFFER_OFFSET (buf);
+ time = GST_BUFFER_TIMESTAMP (buf);
+
+ GST_DEBUG_OBJECT (sink,
+ "time %" GST_TIME_FORMAT ", offset %" G_GUINT64_FORMAT ", start %"
+ GST_TIME_FORMAT ", samples %u", GST_TIME_ARGS (time), in_offset,
+ GST_TIME_ARGS (bsink->segment.start), samples);
+
+ offset = 0;
+
+ /* if not valid timestamp or we can't clip or sync, try to play
+ * sample ASAP */
+ if (!GST_CLOCK_TIME_IS_VALID (time)) {
+ render_start = gst_audio_base_sink_get_offset (sink);
+ render_stop = render_start + samples;
+ GST_DEBUG_OBJECT (sink, "Buffer of size %" G_GSIZE_FORMAT " has no time."
+ " Using render_start=%" G_GUINT64_FORMAT, size, render_start);
+ /* we don't have a start so we don't know stop either */
+ stop = -1;
+ goto no_sync;
+ }
+
+ /* let's calc stop based on the number of samples in the buffer instead
+ * of trusting the DURATION */
+ stop = time + gst_util_uint64_scale_int (samples, GST_SECOND, rate);
+
+ /* prepare the clipping segment. Since we will be subtracting ts-offset and
+ * device-delay later we scale the start and stop with those values so that we
+ * can correctly clip them */
+ clip_seg.format = GST_FORMAT_TIME;
+ clip_seg.start = bsink->segment.start;
+ clip_seg.stop = bsink->segment.stop;
+ clip_seg.duration = -1;
+
+ /* the sync offset is the combination of ts-offset and device-delay */
+ latency = gst_base_sink_get_latency (bsink);
+ ts_offset = gst_base_sink_get_ts_offset (bsink);
+ render_delay = gst_base_sink_get_render_delay (bsink);
+ sync_offset = ts_offset - render_delay + latency;
+
+ GST_DEBUG_OBJECT (sink,
+ "sync-offset %" G_GINT64_FORMAT ", render-delay %" GST_TIME_FORMAT
+ ", ts-offset %" G_GINT64_FORMAT, sync_offset,
+ GST_TIME_ARGS (render_delay), ts_offset);
+
+ /* compensate for ts-offset and device-delay when negative we need to
+ * clip. */
+ if (sync_offset < 0) {
+ clip_seg.start += -sync_offset;
+ if (clip_seg.stop != -1)
+ clip_seg.stop += -sync_offset;
+ }
+
+ /* samples should be rendered based on their timestamp. All samples
+ * arriving before the segment.start or after segment.stop are to be
+ * thrown away. All samples should also be clipped to the segment
+ * boundaries */
+ if (!gst_segment_clip (&clip_seg, GST_FORMAT_TIME, time, stop, &ctime,
+ &cstop))
+ goto out_of_segment;
+
+ /* see if some clipping happened */
+ diff = ctime - time;
+ if (diff > 0) {
+ /* bring clipped time to samples */
+ diff = gst_util_uint64_scale_int (diff, rate, GST_SECOND);
+ GST_DEBUG_OBJECT (sink, "clipping start to %" GST_TIME_FORMAT " %"
+ G_GUINT64_FORMAT " samples", GST_TIME_ARGS (ctime), diff);
+ samples -= diff;
+ offset += diff * bpf;
+ time = ctime;
+ }
+ diff = stop - cstop;
+ if (diff > 0) {
+ /* bring clipped time to samples */
+ diff = gst_util_uint64_scale_int (diff, rate, GST_SECOND);
+ GST_DEBUG_OBJECT (sink, "clipping stop to %" GST_TIME_FORMAT " %"
+ G_GUINT64_FORMAT " samples", GST_TIME_ARGS (cstop), diff);
+ samples -= diff;
+ stop = cstop;
+ }
+
+ /* figure out how to sync */
+ if ((clock = GST_ELEMENT_CLOCK (bsink)))
+ sync = bsink->sync;
+ else
+ sync = FALSE;
+
+ if (!sync) {
+ /* no sync needed, play sample ASAP */
+ render_start = gst_audio_base_sink_get_offset (sink);
+ render_stop = render_start + samples;
+ GST_DEBUG_OBJECT (sink,
+ "no sync needed. Using render_start=%" G_GUINT64_FORMAT, render_start);
+ goto no_sync;
+ }
+
+ /* bring buffer start and stop times to running time */
+ render_start =
+ gst_segment_to_running_time (&bsink->segment, GST_FORMAT_TIME, time);
+ render_stop =
+ gst_segment_to_running_time (&bsink->segment, GST_FORMAT_TIME, stop);
+
+ GST_DEBUG_OBJECT (sink,
+ "running: start %" GST_TIME_FORMAT " - stop %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (render_start), GST_TIME_ARGS (render_stop));
+
+ /* store the time of the last sample, we'll use this to perform sync on the
+ * last sample when draining the buffer */
+ if (bsink->segment.rate >= 0.0) {
+ sink->priv->eos_time = render_stop;
+ } else {
+ sink->priv->eos_time = render_start;
+ }
+
+ /* compensate for ts-offset and delay we know this will not underflow because we
+ * clipped above. */
+ GST_DEBUG_OBJECT (sink,
+ "compensating for sync-offset %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (sync_offset));
+ render_start += sync_offset;
+ render_stop += sync_offset;
+
+ GST_DEBUG_OBJECT (sink, "adding base_time %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (base_time));
+
+ /* add base time to sync against the clock */
+ render_start += base_time;
+ render_stop += base_time;
+
+ GST_DEBUG_OBJECT (sink,
+ "after compensation: start %" GST_TIME_FORMAT " - stop %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (render_start), GST_TIME_ARGS (render_stop));
+
+ if ((slaved = clock != sink->provided_clock)) {
+ /* handle clock slaving */
+ gst_audio_base_sink_handle_slaving (sink, render_start, render_stop,
+ &render_start, &render_stop);
+ } else {
+ /* no slaving needed but we need to adapt to the clock calibration
+ * parameters */
+ gst_audio_base_sink_none_slaving (sink, render_start, render_stop,
+ &render_start, &render_stop);
+ }
+
+ GST_DEBUG_OBJECT (sink,
+ "final timestamps: start %" GST_TIME_FORMAT " - stop %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (render_start), GST_TIME_ARGS (render_stop));
+
+ /* bring to position in the ringbuffer */
+ time_offset = GST_AUDIO_CLOCK_CAST (sink->provided_clock)->time_offset;
+ GST_DEBUG_OBJECT (sink,
+ "time offset %" GST_TIME_FORMAT, GST_TIME_ARGS (time_offset));
+ if (render_start > time_offset)
+ render_start -= time_offset;
+ else
+ render_start = 0;
+ if (render_stop > time_offset)
+ render_stop -= time_offset;
+ else
+ render_stop = 0;
+
+ /* in some clock slaving cases, all late samples end up at 0 first,
+ * and subsequent ones align with that until threshold exceeded,
+ * and then sync back to 0 and so on, so avoid that altogether */
+ if (G_UNLIKELY (render_start == 0 && render_stop == 0))
+ goto too_late;
+
+ /* and bring the time to the rate corrected offset in the buffer */
+ render_start = gst_util_uint64_scale_int (render_start, rate, GST_SECOND);
+ render_stop = gst_util_uint64_scale_int (render_stop, rate, GST_SECOND);
+
+ /* positive playback rate, first sample is render_start, negative rate, first
+ * sample is render_stop. When no rate conversion is active, render exactly
+ * the amount of input samples to avoid aligning to rounding errors. */
+ if (bsink->segment.rate >= 0.0) {
+ sample_offset = render_start;
+ if (bsink->segment.rate == 1.0)
+ render_stop = sample_offset + samples;
+ } else {
+ sample_offset = render_stop;
+ if (bsink->segment.rate == -1.0)
+ render_start = sample_offset + samples;
+ }
+
+ /* always resync after a discont */
+ if (G_UNLIKELY (GST_BUFFER_FLAG_IS_SET (buf, GST_BUFFER_FLAG_DISCONT))) {
+ GST_DEBUG_OBJECT (sink, "resync after discont");
+ goto no_align;
+ }
+
+ /* resync when we don't know what to align the sample with */
+ if (G_UNLIKELY (sink->next_sample == -1)) {
+ GST_DEBUG_OBJECT (sink,
+ "no align possible: no previous sample position known");
+ goto no_align;
+ }
+
+ align = gst_audio_base_sink_get_alignment (sink, sample_offset);
+ sink->priv->last_align = align;
+
+ /* apply alignment */
+ render_start += align;
+
+ /* only align stop if we are not slaved to resample */
+ if (slaved && sink->priv->slave_method == GST_AUDIO_BASE_SINK_SLAVE_RESAMPLE) {
+ GST_DEBUG_OBJECT (sink, "no stop time align needed: we are slaved");
+ goto no_align;
+ }
+ render_stop += align;
+
+no_align:
+ /* number of target samples is difference between start and stop */
+ out_samples = render_stop - render_start;
+
+no_sync:
+ /* we render the first or last sample first, depending on the rate */
+ if (bsink->segment.rate >= 0.0)
+ sample_offset = render_start;
+ else
+ sample_offset = render_stop;
+
+ GST_DEBUG_OBJECT (sink, "rendering at %" G_GUINT64_FORMAT " %d/%d",
+ sample_offset, samples, out_samples);
+
+ /* we need to accumulate over different runs for when we get interrupted */
+ accum = 0;
+ align_next = TRUE;
+ data = gst_buffer_map (buf, &size, NULL, GST_MAP_READ);
+ do {
+ written =
+ gst_audio_ring_buffer_commit (ringbuf, &sample_offset,
+ data + offset, samples, out_samples, &accum);
+
+ GST_DEBUG_OBJECT (sink, "wrote %u of %u", written, samples);
+ /* if we wrote all, we're done */
+ if (written == samples)
+ break;
+
+ /* else something interrupted us and we wait for preroll. */
+ if ((ret = gst_base_sink_wait_preroll (bsink)) != GST_FLOW_OK)
+ goto stopping;
+
+ /* if we got interrupted, we cannot assume that the next sample should
+ * be aligned to this one */
+ align_next = FALSE;
+
+ /* update the output samples. FIXME, this will just skip them when pausing
+ * during trick mode */
+ if (out_samples > written) {
+ out_samples -= written;
+ accum = 0;
+ } else
+ break;
+
+ samples -= written;
+ offset += written * bpf;
+ } while (TRUE);
+ gst_buffer_unmap (buf, data, size);
+
+ if (align_next)
+ sink->next_sample = sample_offset;
+ else
+ sink->next_sample = -1;
+
+ GST_DEBUG_OBJECT (sink, "next sample expected at %" G_GUINT64_FORMAT,
+ sink->next_sample);
+
+ if (GST_CLOCK_TIME_IS_VALID (stop) && stop >= bsink->segment.stop) {
+ GST_DEBUG_OBJECT (sink,
+ "start playback because we are at the end of segment");
+ gst_audio_ring_buffer_start (ringbuf);
+ }
+
+ ret = GST_FLOW_OK;
+
+done:
+ if (out)
+ gst_buffer_unref (out);
+
+ return ret;
+
+ /* SPECIAL cases */
+out_of_segment:
+ {
+ GST_DEBUG_OBJECT (sink,
+ "dropping sample out of segment time %" GST_TIME_FORMAT ", start %"
+ GST_TIME_FORMAT, GST_TIME_ARGS (time),
+ GST_TIME_ARGS (bsink->segment.start));
+ ret = GST_FLOW_OK;
+ goto done;
+ }
+too_late:
+ {
+ GST_DEBUG_OBJECT (sink, "dropping late sample");
+ return GST_FLOW_OK;
+ }
+ /* ERRORS */
+payload_failed:
+ {
+ GST_ELEMENT_ERROR (sink, STREAM, FORMAT, (NULL), ("failed to payload."));
+ ret = GST_FLOW_ERROR;
+ goto done;
+ }
+wrong_state:
+ {
+ GST_DEBUG_OBJECT (sink, "ringbuffer not negotiated");
+ GST_ELEMENT_ERROR (sink, STREAM, FORMAT, (NULL), ("sink not negotiated."));
+ ret = GST_FLOW_NOT_NEGOTIATED;
+ goto done;
+ }
+wrong_size:
+ {
+ GST_DEBUG_OBJECT (sink, "wrong size");
+ GST_ELEMENT_ERROR (sink, STREAM, WRONG_TYPE,
+ (NULL), ("sink received buffer of wrong size."));
+ ret = GST_FLOW_ERROR;
+ goto done;
+ }
+stopping:
+ {
+ GST_DEBUG_OBJECT (sink, "preroll got interrupted: %d (%s)", ret,
+ gst_flow_get_name (ret));
+ gst_buffer_unmap (buf, data, size);
+ goto done;
+ }
+sync_latency_failed:
+ {
+ GST_DEBUG_OBJECT (sink, "failed waiting for latency");
+ goto done;
+ }
+}
+
+/**
+ * gst_audio_base_sink_create_ringbuffer:
+ * @sink: a #GstAudioBaseSink.
+ *
+ * Create and return the #GstAudioRingBuffer for @sink. This function will call the
+ * ::create_ringbuffer vmethod and will set @sink as the parent of the returned
+ * buffer (see gst_object_set_parent()).
+ *
+ * Returns: The new ringbuffer of @sink.
+ */
+GstAudioRingBuffer *
+gst_audio_base_sink_create_ringbuffer (GstAudioBaseSink * sink)
+{
+ GstAudioBaseSinkClass *bclass;
+ GstAudioRingBuffer *buffer = NULL;
+
+ bclass = GST_AUDIO_BASE_SINK_GET_CLASS (sink);
+ if (bclass->create_ringbuffer)
+ buffer = bclass->create_ringbuffer (sink);
+
+ if (buffer)
+ gst_object_set_parent (GST_OBJECT (buffer), GST_OBJECT (sink));
+
+ return buffer;
+}
+
+static void
+gst_audio_base_sink_callback (GstAudioRingBuffer * rbuf, guint8 * data,
+ guint len, gpointer user_data)
+{
+ GstBaseSink *basesink;
+ GstAudioBaseSink *sink;
+ GstBuffer *buf;
+ GstFlowReturn ret;
+ gsize size;
+
+ basesink = GST_BASE_SINK (user_data);
+ sink = GST_AUDIO_BASE_SINK (user_data);
+
+ GST_PAD_STREAM_LOCK (basesink->sinkpad);
+
+ /* would be nice to arrange for pad_alloc_buffer to return data -- as it is we
+ will copy twice, once into data, once into DMA */
+ GST_LOG_OBJECT (basesink, "pulling %u bytes offset %" G_GUINT64_FORMAT
+ " to fill audio buffer", len, basesink->offset);
+ ret =
+ gst_pad_pull_range (basesink->sinkpad, basesink->segment.position, len,
+ &buf);
+
+ if (ret != GST_FLOW_OK) {
+ if (ret == GST_FLOW_EOS)
+ goto eos;
+ else
+ goto error;
+ }
+
+ GST_BASE_SINK_PREROLL_LOCK (basesink);
+ if (basesink->flushing)
+ goto flushing;
+
+ /* complete preroll and wait for PLAYING */
+ ret = gst_base_sink_do_preroll (basesink, GST_MINI_OBJECT_CAST (buf));
+ if (ret != GST_FLOW_OK)
+ goto preroll_error;
+
+ size = gst_buffer_get_size (buf);
+
+ if (len != size) {
+ GST_INFO_OBJECT (basesink,
+ "got different size than requested from sink pad: %u"
+ " != %" G_GSIZE_FORMAT, len, size);
+ len = MIN (size, len);
+ }
+
+ basesink->segment.position += len;
+
+ gst_buffer_extract (buf, 0, data, len);
+ GST_BASE_SINK_PREROLL_UNLOCK (basesink);
+
+ GST_PAD_STREAM_UNLOCK (basesink->sinkpad);
+
+ return;
+
+error:
+ {
+ GST_WARNING_OBJECT (basesink, "Got flow '%s' but can't return it: %d",
+ gst_flow_get_name (ret), ret);
+ gst_audio_ring_buffer_pause (rbuf);
+ GST_PAD_STREAM_UNLOCK (basesink->sinkpad);
+ return;
+ }
+eos:
+ {
+ /* FIXME: this is not quite correct; we'll be called endlessly until
+ * the sink gets shut down; maybe we should set a flag somewhere, or
+ * set segment.stop and segment.duration to the last sample or so */
+ GST_DEBUG_OBJECT (sink, "EOS");
+ gst_audio_base_sink_drain (sink);
+ gst_audio_ring_buffer_pause (rbuf);
+ gst_element_post_message (GST_ELEMENT_CAST (sink),
+ gst_message_new_eos (GST_OBJECT_CAST (sink)));
+ GST_PAD_STREAM_UNLOCK (basesink->sinkpad);
+ }
+flushing:
+ {
+ GST_DEBUG_OBJECT (sink, "we are flushing");
+ gst_audio_ring_buffer_pause (rbuf);
+ GST_BASE_SINK_PREROLL_UNLOCK (basesink);
+ GST_PAD_STREAM_UNLOCK (basesink->sinkpad);
+ return;
+ }
+preroll_error:
+ {
+ GST_DEBUG_OBJECT (sink, "error %s", gst_flow_get_name (ret));
+ gst_audio_ring_buffer_pause (rbuf);
+ GST_BASE_SINK_PREROLL_UNLOCK (basesink);
+ GST_PAD_STREAM_UNLOCK (basesink->sinkpad);
+ return;
+ }
+}
+
+static gboolean
+gst_audio_base_sink_activate_pull (GstBaseSink * basesink, gboolean active)
+{
+ gboolean ret;
+ GstAudioBaseSink *sink = GST_AUDIO_BASE_SINK (basesink);
+
+ if (active) {
+ GST_DEBUG_OBJECT (basesink, "activating pull");
+
+ gst_audio_ring_buffer_set_callback (sink->ringbuffer,
+ gst_audio_base_sink_callback, sink);
+
+ ret = gst_audio_ring_buffer_activate (sink->ringbuffer, TRUE);
+ } else {
+ GST_DEBUG_OBJECT (basesink, "deactivating pull");
+ gst_audio_ring_buffer_set_callback (sink->ringbuffer, NULL, NULL);
+ ret = gst_audio_ring_buffer_activate (sink->ringbuffer, FALSE);
+ }
+
+ return ret;
+}
+
+#if 0
+/* should be called with the LOCK */
+static GstStateChangeReturn
+gst_audio_base_sink_async_play (GstBaseSink * basesink)
+{
+ GstAudioBaseSink *sink;
+
+ sink = GST_AUDIO_BASE_SINK (basesink);
+
+ GST_DEBUG_OBJECT (sink, "ringbuffer may start now");
+ sink->priv->sync_latency = TRUE;
+ gst_audio_ring_buffer_may_start (sink->ringbuffer, TRUE);
+ if (basesink->pad_mode == GST_PAD_MODE_PULL) {
+ /* we always start the ringbuffer in pull mode immediatly */
+ gst_audio_ring_buffer_start (sink->ringbuffer);
+ }
+
+ return GST_STATE_CHANGE_SUCCESS;
+}
+#endif
+
+static GstStateChangeReturn
+gst_audio_base_sink_change_state (GstElement * element,
+ GstStateChange transition)
+{
+ GstStateChangeReturn ret = GST_STATE_CHANGE_SUCCESS;
+ GstAudioBaseSink *sink = GST_AUDIO_BASE_SINK (element);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_NULL_TO_READY:
+ if (sink->ringbuffer == NULL) {
+ gst_audio_clock_reset (GST_AUDIO_CLOCK (sink->provided_clock), 0);
+ sink->ringbuffer = gst_audio_base_sink_create_ringbuffer (sink);
+ }
+ if (!gst_audio_ring_buffer_open_device (sink->ringbuffer))
+ goto open_failed;
+ break;
+ case GST_STATE_CHANGE_READY_TO_PAUSED:
+ sink->next_sample = -1;
+ sink->priv->last_align = -1;
+ sink->priv->eos_time = -1;
+ sink->priv->discont_time = -1;
+ gst_audio_ring_buffer_set_flushing (sink->ringbuffer, FALSE);
+ gst_audio_ring_buffer_may_start (sink->ringbuffer, FALSE);
+
+ /* Only post clock-provide messages if this is the clock that
+ * we've created. If the subclass has overriden it the subclass
+ * should post this messages whenever necessary */
+ if (sink->provided_clock && GST_IS_AUDIO_CLOCK (sink->provided_clock) &&
+ GST_AUDIO_CLOCK_CAST (sink->provided_clock)->func ==
+ (GstAudioClockGetTimeFunc) gst_audio_base_sink_get_time)
+ gst_element_post_message (element,
+ gst_message_new_clock_provide (GST_OBJECT_CAST (element),
+ sink->provided_clock, TRUE));
+ break;
+ case GST_STATE_CHANGE_PAUSED_TO_PLAYING:
+ {
+ gboolean eos;
+
+ GST_OBJECT_LOCK (sink);
+ GST_DEBUG_OBJECT (sink, "ringbuffer may start now");
+ sink->priv->sync_latency = TRUE;
+ eos = GST_BASE_SINK (sink)->eos;
+ GST_OBJECT_UNLOCK (sink);
+
+ gst_audio_ring_buffer_may_start (sink->ringbuffer, TRUE);
+ if (GST_BASE_SINK_CAST (sink)->pad_mode == GST_PAD_MODE_PULL ||
+ g_atomic_int_get (&sink->eos_rendering) || eos) {
+ /* we always start the ringbuffer in pull mode immediatly */
+ /* sync rendering on eos needs running clock,
+ * and others need running clock when finished rendering eos */
+ gst_audio_ring_buffer_start (sink->ringbuffer);
+ }
+ break;
+ }
+ case GST_STATE_CHANGE_PLAYING_TO_PAUSED:
+ /* ringbuffer cannot start anymore */
+ gst_audio_ring_buffer_may_start (sink->ringbuffer, FALSE);
+ gst_audio_ring_buffer_pause (sink->ringbuffer);
+
+ GST_OBJECT_LOCK (sink);
+ sink->priv->sync_latency = FALSE;
+ GST_OBJECT_UNLOCK (sink);
+ break;
+ case GST_STATE_CHANGE_PAUSED_TO_READY:
+ /* Only post clock-lost messages if this is the clock that
+ * we've created. If the subclass has overriden it the subclass
+ * should post this messages whenever necessary */
+ if (sink->provided_clock && GST_IS_AUDIO_CLOCK (sink->provided_clock) &&
+ GST_AUDIO_CLOCK_CAST (sink->provided_clock)->func ==
+ (GstAudioClockGetTimeFunc) gst_audio_base_sink_get_time)
+ gst_element_post_message (element,
+ gst_message_new_clock_lost (GST_OBJECT_CAST (element),
+ sink->provided_clock));
+
+ /* make sure we unblock before calling the parent state change
+ * so it can grab the STREAM_LOCK */
+ gst_audio_ring_buffer_set_flushing (sink->ringbuffer, TRUE);
+ break;
+ default:
+ break;
+ }
+
+ ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_PLAYING_TO_PAUSED:
+ /* stop slaving ourselves to the master, if any */
+ gst_clock_set_master (sink->provided_clock, NULL);
+ break;
+ case GST_STATE_CHANGE_PAUSED_TO_READY:
+ gst_audio_ring_buffer_activate (sink->ringbuffer, FALSE);
+ gst_audio_ring_buffer_release (sink->ringbuffer);
+ break;
+ case GST_STATE_CHANGE_READY_TO_NULL:
+ /* we release again here because the aqcuire happens when setting the
+ * caps, which happens before we commit the state to PAUSED and thus the
+ * PAUSED->READY state change (see above, where we release the ringbuffer)
+ * might not be called when we get here. */
+ gst_audio_ring_buffer_activate (sink->ringbuffer, FALSE);
+ gst_audio_ring_buffer_release (sink->ringbuffer);
+ gst_audio_ring_buffer_close_device (sink->ringbuffer);
+ GST_OBJECT_LOCK (sink);
+ gst_object_unparent (GST_OBJECT_CAST (sink->ringbuffer));
+ sink->ringbuffer = NULL;
+ GST_OBJECT_UNLOCK (sink);
+ break;
+ default:
+ break;
+ }
+
+ return ret;
+
+ /* ERRORS */
+open_failed:
+ {
+ /* subclass must post a meaningful error message */
+ GST_DEBUG_OBJECT (sink, "open failed");
+ return GST_STATE_CHANGE_FAILURE;
+ }
+}
--- /dev/null
+/* GStreamer
+ * Copyright (C) 2005 Wim Taymans <wim@fluendo.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+
+/**
+ * SECTION:gstaudioringbuffer
+ * @short_description: Base class for audio ringbuffer implementations
+ * @see_also: #GstAudioBaseSink, #GstAudioSink
+ *
+ * <refsect2>
+ * <para>
+ * This object is the base class for audio ringbuffers used by the base
+ * audio source and sink classes.
+ * </para>
+ * <para>
+ * The ringbuffer abstracts a circular buffer of data. One reader and
+ * one writer can operate on the data from different threads in a lockfree
+ * manner. The base class is sufficiently flexible to be used as an
+ * abstraction for DMA based ringbuffers as well as a pure software
+ * implementations.
+ * </para>
+ * </refsect2>
+ *
+ * Last reviewed on 2006-02-02 (0.10.4)
+ */
+
+#include <string.h>
+
+#include "gstaudioringbuffer.h"
+
++#include "gst/glib-compat-private.h"
++
+GST_DEBUG_CATEGORY_STATIC (gst_audio_ring_buffer_debug);
+#define GST_CAT_DEFAULT gst_audio_ring_buffer_debug
+
+static void gst_audio_ring_buffer_dispose (GObject * object);
+static void gst_audio_ring_buffer_finalize (GObject * object);
+
+static gboolean gst_audio_ring_buffer_pause_unlocked (GstAudioRingBuffer * buf);
+static void default_clear_all (GstAudioRingBuffer * buf);
+static guint default_commit (GstAudioRingBuffer * buf, guint64 * sample,
+ guchar * data, gint in_samples, gint out_samples, gint * accum);
+
+/* ringbuffer abstract base class */
+G_DEFINE_ABSTRACT_TYPE (GstAudioRingBuffer, gst_audio_ring_buffer,
+ GST_TYPE_OBJECT);
+
+static void
+gst_audio_ring_buffer_class_init (GstAudioRingBufferClass * klass)
+{
+ GObjectClass *gobject_class;
+ GstAudioRingBufferClass *gstaudioringbuffer_class;
+
+ gobject_class = (GObjectClass *) klass;
+ gstaudioringbuffer_class = (GstAudioRingBufferClass *) klass;
+
+ GST_DEBUG_CATEGORY_INIT (gst_audio_ring_buffer_debug, "ringbuffer", 0,
+ "ringbuffer class");
+
+ gobject_class->dispose = gst_audio_ring_buffer_dispose;
+ gobject_class->finalize = gst_audio_ring_buffer_finalize;
+
+ gstaudioringbuffer_class->clear_all = GST_DEBUG_FUNCPTR (default_clear_all);
+ gstaudioringbuffer_class->commit = GST_DEBUG_FUNCPTR (default_commit);
+}
+
+static void
+gst_audio_ring_buffer_init (GstAudioRingBuffer * ringbuffer)
+{
+ ringbuffer->open = FALSE;
+ ringbuffer->acquired = FALSE;
+ ringbuffer->state = GST_AUDIO_RING_BUFFER_STATE_STOPPED;
+ ringbuffer->cond = g_cond_new ();
+ ringbuffer->waiting = 0;
+ ringbuffer->empty_seg = NULL;
+ ringbuffer->flushing = TRUE;
+}
+
+static void
+gst_audio_ring_buffer_dispose (GObject * object)
+{
+ GstAudioRingBuffer *ringbuffer = GST_AUDIO_RING_BUFFER (object);
+
+ gst_caps_replace (&ringbuffer->spec.caps, NULL);
+
+ G_OBJECT_CLASS (gst_audio_ring_buffer_parent_class)->dispose (G_OBJECT
+ (ringbuffer));
+}
+
+static void
+gst_audio_ring_buffer_finalize (GObject * object)
+{
+ GstAudioRingBuffer *ringbuffer = GST_AUDIO_RING_BUFFER (object);
+
+ g_cond_free (ringbuffer->cond);
+ g_free (ringbuffer->empty_seg);
+
+ G_OBJECT_CLASS (gst_audio_ring_buffer_parent_class)->finalize (G_OBJECT
+ (ringbuffer));
+}
+
+#ifndef GST_DISABLE_GST_DEBUG
+static const gchar *format_type_names[] = {
+ "raw",
+ "mu law",
+ "a law",
+ "ima adpcm",
+ "mpeg",
+ "gsm",
+ "iec958",
+ "ac3",
+ "eac3",
+ "dts"
+};
+#endif
+
+/**
+ * gst_audio_ring_buffer_debug_spec_caps:
+ * @spec: the spec to debug
+ *
+ * Print debug info about the parsed caps in @spec to the debug log.
+ */
+void
+gst_audio_ring_buffer_debug_spec_caps (GstAudioRingBufferSpec * spec)
+{
+#if 0
+ gint i, bytes;
+#endif
+
+ GST_DEBUG ("spec caps: %p %" GST_PTR_FORMAT, spec->caps, spec->caps);
+ GST_DEBUG ("parsed caps: type: %d, '%s'", spec->type,
+ format_type_names[spec->type]);
+#if 0
+ GST_DEBUG ("parsed caps: width: %d", spec->width);
+ GST_DEBUG ("parsed caps: sign: %d", spec->sign);
+ GST_DEBUG ("parsed caps: bigend: %d", spec->bigend);
+ GST_DEBUG ("parsed caps: rate: %d", spec->rate);
+ GST_DEBUG ("parsed caps: channels: %d", spec->channels);
+ GST_DEBUG ("parsed caps: sample bytes: %d", spec->bytes_per_sample);
+ bytes = (spec->width >> 3) * spec->channels;
+ for (i = 0; i < bytes; i++) {
+ GST_DEBUG ("silence byte %d: %02x", i, spec->silence_sample[i]);
+ }
+#endif
+}
+
+/**
+ * gst_audio_ring_buffer_debug_spec_buff:
+ * @spec: the spec to debug
+ *
+ * Print debug info about the buffer sized in @spec to the debug log.
+ */
+void
+gst_audio_ring_buffer_debug_spec_buff (GstAudioRingBufferSpec * spec)
+{
+ gint bpf = GST_AUDIO_INFO_BPF (&spec->info);
+
+ GST_DEBUG ("acquire ringbuffer: buffer time: %" G_GINT64_FORMAT " usec",
+ spec->buffer_time);
+ GST_DEBUG ("acquire ringbuffer: latency time: %" G_GINT64_FORMAT " usec",
+ spec->latency_time);
+ GST_DEBUG ("acquire ringbuffer: total segments: %d", spec->segtotal);
+ GST_DEBUG ("acquire ringbuffer: latency segments: %d", spec->seglatency);
+ GST_DEBUG ("acquire ringbuffer: segment size: %d bytes = %d samples",
+ spec->segsize, spec->segsize / bpf);
+ GST_DEBUG ("acquire ringbuffer: buffer size: %d bytes = %d samples",
+ spec->segsize * spec->segtotal, spec->segsize * spec->segtotal / bpf);
+}
+
+/**
+ * gst_audio_ring_buffer_parse_caps:
+ * @spec: a spec
+ * @caps: a #GstCaps
+ *
+ * Parse @caps into @spec.
+ *
+ * Returns: TRUE if the caps could be parsed.
+ */
+gboolean
+gst_audio_ring_buffer_parse_caps (GstAudioRingBufferSpec * spec, GstCaps * caps)
+{
+ const gchar *mimetype;
+ GstStructure *structure;
+ gint i;
+ GstAudioInfo info;
+
+ structure = gst_caps_get_structure (caps, 0);
+ gst_audio_info_init (&info);
+
+ /* we have to differentiate between int and float formats */
+ mimetype = gst_structure_get_name (structure);
+
+ if (g_str_equal (mimetype, "audio/x-raw")) {
+ if (!gst_audio_info_from_caps (&info, caps))
+ goto parse_error;
+
+ spec->type = GST_BUFTYPE_RAW;
+ } else if (g_str_equal (mimetype, "audio/x-alaw")) {
+ /* extract the needed information from the cap */
+ if (!(gst_structure_get_int (structure, "rate", &info.rate) &&
+ gst_structure_get_int (structure, "channels", &info.channels)))
+ goto parse_error;
+
+ spec->type = GST_BUFTYPE_A_LAW;
+ info.bpf = info.channels;
+ } else if (g_str_equal (mimetype, "audio/x-mulaw")) {
+ /* extract the needed information from the cap */
+ if (!(gst_structure_get_int (structure, "rate", &info.rate) &&
+ gst_structure_get_int (structure, "channels", &info.channels)))
+ goto parse_error;
+
+ spec->type = GST_BUFTYPE_MU_LAW;
+ info.bpf = info.channels;
+ } else if (g_str_equal (mimetype, "audio/x-iec958")) {
+ /* extract the needed information from the cap */
+ if (!(gst_structure_get_int (structure, "rate", &info.rate)))
+ goto parse_error;
+
+ spec->type = GST_BUFTYPE_IEC958;
+ info.bpf = 4;
+ } else if (g_str_equal (mimetype, "audio/x-ac3")) {
+ /* extract the needed information from the cap */
+ if (!(gst_structure_get_int (structure, "rate", &info.rate)))
+ goto parse_error;
+
+ spec->type = GST_BUFTYPE_AC3;
+ info.bpf = 4;
+ } else if (g_str_equal (mimetype, "audio/x-eac3")) {
+ /* extract the needed information from the cap */
+ if (!(gst_structure_get_int (structure, "rate", &info.rate)))
+ goto parse_error;
+
+ spec->type = GST_BUFTYPE_EAC3;
+ info.bpf = 16;
+ } else if (g_str_equal (mimetype, "audio/x-dts")) {
+ /* extract the needed information from the cap */
+ if (!(gst_structure_get_int (structure, "rate", &info.rate)))
+ goto parse_error;
+
+ spec->type = GST_BUFTYPE_DTS;
+ info.bpf = 4;
+ } else if (g_str_equal (mimetype, "audio/mpeg") &&
+ gst_structure_get_int (structure, "mpegaudioversion", &i) &&
+ (i == 1 || i == 2)) {
+ /* Now we know this is MPEG-1 or MPEG-2 (non AAC) */
+ /* extract the needed information from the cap */
+ if (!(gst_structure_get_int (structure, "rate", &info.rate)))
+ goto parse_error;
+
+ spec->type = GST_BUFTYPE_MPEG;
+ info.bpf = 4;
+ } else {
+ goto parse_error;
+ }
+
+ gst_caps_replace (&spec->caps, caps);
+
+ g_return_val_if_fail (spec->latency_time != 0, FALSE);
+
+ /* calculate suggested segsize and segtotal. segsize should be one unit
+ * of 'latency_time' samples, scaling for the fact that latency_time is
+ * currently stored in microseconds (FIXME: in 0.11) */
+ spec->segsize = gst_util_uint64_scale (info.rate * info.bpf,
+ spec->latency_time, GST_SECOND / GST_USECOND);
+ /* Round to an integer number of samples */
+ spec->segsize -= spec->segsize % info.bpf;
+
+ spec->segtotal = spec->buffer_time / spec->latency_time;
+ /* leave the latency undefined now, implementations can change it but if it's
+ * not changed, we assume the same value as segtotal */
+ spec->seglatency = -1;
+
+ spec->info = info;
+
+ gst_audio_ring_buffer_debug_spec_caps (spec);
+ gst_audio_ring_buffer_debug_spec_buff (spec);
+
+ return TRUE;
+
+ /* ERRORS */
+parse_error:
+ {
+ GST_DEBUG ("could not parse caps");
+ return FALSE;
+ }
+}
+
+/**
+ * gst_audio_ring_buffer_convert:
+ * @buf: the #GstAudioRingBuffer
+ * @src_fmt: the source format
+ * @src_val: the source value
+ * @dest_fmt: the destination format
+ * @dest_val: a location to store the converted value
+ *
+ * Convert @src_val in @src_fmt to the equivalent value in @dest_fmt. The result
+ * will be put in @dest_val.
+ *
+ * Returns: TRUE if the conversion succeeded.
+ *
+ * Since: 0.10.22.
+ */
+gboolean
+gst_audio_ring_buffer_convert (GstAudioRingBuffer * buf,
+ GstFormat src_fmt, gint64 src_val, GstFormat dest_fmt, gint64 * dest_val)
+{
+ gboolean res;
+
+ GST_OBJECT_LOCK (buf);
+ res =
+ gst_audio_info_convert (&buf->spec.info, src_fmt, src_val, dest_fmt,
+ dest_val);
+ GST_OBJECT_UNLOCK (buf);
+
+ return res;
+}
+
+/**
+ * gst_audio_ring_buffer_set_callback:
+ * @buf: the #GstAudioRingBuffer to set the callback on
+ * @cb: the callback to set
+ * @user_data: user data passed to the callback
+ *
+ * Sets the given callback function on the buffer. This function
+ * will be called every time a segment has been written to a device.
+ *
+ * MT safe.
+ */
+void
+gst_audio_ring_buffer_set_callback (GstAudioRingBuffer * buf,
+ GstAudioRingBufferCallback cb, gpointer user_data)
+{
+ g_return_if_fail (GST_IS_AUDIO_RING_BUFFER (buf));
+
+ GST_OBJECT_LOCK (buf);
+ buf->callback = cb;
+ buf->cb_data = user_data;
+ GST_OBJECT_UNLOCK (buf);
+}
+
+
+/**
+ * gst_audio_ring_buffer_open_device:
+ * @buf: the #GstAudioRingBuffer
+ *
+ * Open the audio device associated with the ring buffer. Does not perform any
+ * setup on the device. You must open the device before acquiring the ring
+ * buffer.
+ *
+ * Returns: TRUE if the device could be opened, FALSE on error.
+ *
+ * MT safe.
+ */
+gboolean
+gst_audio_ring_buffer_open_device (GstAudioRingBuffer * buf)
+{
+ gboolean res = TRUE;
+ GstAudioRingBufferClass *rclass;
+
+ g_return_val_if_fail (GST_IS_AUDIO_RING_BUFFER (buf), FALSE);
+
+ GST_DEBUG_OBJECT (buf, "opening device");
+
+ GST_OBJECT_LOCK (buf);
+ if (G_UNLIKELY (buf->open))
+ goto was_opened;
+
+ buf->open = TRUE;
+
+ /* if this fails, something is wrong in this file */
+ g_assert (!buf->acquired);
+
+ rclass = GST_AUDIO_RING_BUFFER_GET_CLASS (buf);
+ if (G_LIKELY (rclass->open_device))
+ res = rclass->open_device (buf);
+
+ if (G_UNLIKELY (!res))
+ goto open_failed;
+
+ GST_DEBUG_OBJECT (buf, "opened device");
+
+done:
+ GST_OBJECT_UNLOCK (buf);
+
+ return res;
+
+ /* ERRORS */
+was_opened:
+ {
+ GST_DEBUG_OBJECT (buf, "Device for ring buffer already open");
+ g_warning ("Device for ring buffer %p already open, fix your code", buf);
+ res = TRUE;
+ goto done;
+ }
+open_failed:
+ {
+ buf->open = FALSE;
+ GST_DEBUG_OBJECT (buf, "failed opening device");
+ goto done;
+ }
+}
+
+/**
+ * gst_audio_ring_buffer_close_device:
+ * @buf: the #GstAudioRingBuffer
+ *
+ * Close the audio device associated with the ring buffer. The ring buffer
+ * should already have been released via gst_audio_ring_buffer_release().
+ *
+ * Returns: TRUE if the device could be closed, FALSE on error.
+ *
+ * MT safe.
+ */
+gboolean
+gst_audio_ring_buffer_close_device (GstAudioRingBuffer * buf)
+{
+ gboolean res = TRUE;
+ GstAudioRingBufferClass *rclass;
+
+ g_return_val_if_fail (GST_IS_AUDIO_RING_BUFFER (buf), FALSE);
+
+ GST_DEBUG_OBJECT (buf, "closing device");
+
+ GST_OBJECT_LOCK (buf);
+ if (G_UNLIKELY (!buf->open))
+ goto was_closed;
+
+ if (G_UNLIKELY (buf->acquired))
+ goto was_acquired;
+
+ buf->open = FALSE;
+
+ rclass = GST_AUDIO_RING_BUFFER_GET_CLASS (buf);
+ if (G_LIKELY (rclass->close_device))
+ res = rclass->close_device (buf);
+
+ if (G_UNLIKELY (!res))
+ goto close_error;
+
+ GST_DEBUG_OBJECT (buf, "closed device");
+
+done:
+ GST_OBJECT_UNLOCK (buf);
+
+ return res;
+
+ /* ERRORS */
+was_closed:
+ {
+ GST_DEBUG_OBJECT (buf, "Device for ring buffer already closed");
+ g_warning ("Device for ring buffer %p already closed, fix your code", buf);
+ res = TRUE;
+ goto done;
+ }
+was_acquired:
+ {
+ GST_DEBUG_OBJECT (buf, "Resources for ring buffer still acquired");
+ g_critical ("Resources for ring buffer %p still acquired", buf);
+ res = FALSE;
+ goto done;
+ }
+close_error:
+ {
+ buf->open = TRUE;
+ GST_DEBUG_OBJECT (buf, "error closing device");
+ goto done;
+ }
+}
+
+/**
+ * gst_audio_ring_buffer_device_is_open:
+ * @buf: the #GstAudioRingBuffer
+ *
+ * Checks the status of the device associated with the ring buffer.
+ *
+ * Returns: TRUE if the device was open, FALSE if it was closed.
+ *
+ * MT safe.
+ */
+gboolean
+gst_audio_ring_buffer_device_is_open (GstAudioRingBuffer * buf)
+{
+ gboolean res = TRUE;
+
+ g_return_val_if_fail (GST_IS_AUDIO_RING_BUFFER (buf), FALSE);
+
+ GST_OBJECT_LOCK (buf);
+ res = buf->open;
+ GST_OBJECT_UNLOCK (buf);
+
+ return res;
+}
+
+/**
+ * gst_audio_ring_buffer_acquire:
+ * @buf: the #GstAudioRingBuffer to acquire
+ * @spec: the specs of the buffer
+ *
+ * Allocate the resources for the ringbuffer. This function fills
+ * in the data pointer of the ring buffer with a valid #GstBuffer
+ * to which samples can be written.
+ *
+ * Returns: TRUE if the device could be acquired, FALSE on error.
+ *
+ * MT safe.
+ */
+gboolean
+gst_audio_ring_buffer_acquire (GstAudioRingBuffer * buf,
+ GstAudioRingBufferSpec * spec)
+{
+ gboolean res = FALSE;
+ GstAudioRingBufferClass *rclass;
+ gint segsize, bpf;
+
+ g_return_val_if_fail (GST_IS_AUDIO_RING_BUFFER (buf), FALSE);
+
+ GST_DEBUG_OBJECT (buf, "acquiring device %p", buf);
+
+ GST_OBJECT_LOCK (buf);
+ if (G_UNLIKELY (!buf->open))
+ goto not_opened;
+
+ if (G_UNLIKELY (buf->acquired))
+ goto was_acquired;
+
+ buf->acquired = TRUE;
+
+ rclass = GST_AUDIO_RING_BUFFER_GET_CLASS (buf);
+ if (G_LIKELY (rclass->acquire))
+ res = rclass->acquire (buf, spec);
+
+ if (G_UNLIKELY (!res))
+ goto acquire_failed;
+
+ if (G_UNLIKELY ((bpf = buf->spec.info.bpf) == 0))
+ goto invalid_bpf;
+
+ /* if the seglatency was overwritten with something else than -1, use it, else
+ * assume segtotal as the latency */
+ if (buf->spec.seglatency == -1)
+ buf->spec.seglatency = buf->spec.segtotal;
+
+ segsize = buf->spec.segsize;
+
+ buf->samples_per_seg = segsize / bpf;
+
+ /* create an empty segment */
+ g_free (buf->empty_seg);
+ buf->empty_seg = g_malloc (segsize);
+
+ if (buf->spec.type == GST_BUFTYPE_RAW) {
+ gst_audio_format_fill_silence (buf->spec.info.finfo, buf->empty_seg,
+ segsize);
+ } else {
+ /* FIXME, non-raw formats get 0 as the empty sample */
+ memset (buf->empty_seg, 0, segsize);
+ }
+ GST_DEBUG_OBJECT (buf, "acquired device");
+
+done:
+ GST_OBJECT_UNLOCK (buf);
+
+ return res;
+
+ /* ERRORS */
+not_opened:
+ {
+ GST_DEBUG_OBJECT (buf, "device not opened");
+ g_critical ("Device for %p not opened", buf);
+ res = FALSE;
+ goto done;
+ }
+was_acquired:
+ {
+ res = TRUE;
+ GST_DEBUG_OBJECT (buf, "device was acquired");
+ goto done;
+ }
+acquire_failed:
+ {
+ buf->acquired = FALSE;
+ GST_DEBUG_OBJECT (buf, "failed to acquire device");
+ goto done;
+ }
+invalid_bpf:
+ {
+ g_warning
+ ("invalid bytes_per_frame from acquire ringbuffer %p, fix the element",
+ buf);
+ buf->acquired = FALSE;
+ res = FALSE;
+ goto done;
+ }
+}
+
+/**
+ * gst_audio_ring_buffer_release:
+ * @buf: the #GstAudioRingBuffer to release
+ *
+ * Free the resources of the ringbuffer.
+ *
+ * Returns: TRUE if the device could be released, FALSE on error.
+ *
+ * MT safe.
+ */
+gboolean
+gst_audio_ring_buffer_release (GstAudioRingBuffer * buf)
+{
+ gboolean res = FALSE;
+ GstAudioRingBufferClass *rclass;
+
+ g_return_val_if_fail (GST_IS_AUDIO_RING_BUFFER (buf), FALSE);
+
+ GST_DEBUG_OBJECT (buf, "releasing device");
+
+ gst_audio_ring_buffer_stop (buf);
+
+ GST_OBJECT_LOCK (buf);
+ if (G_UNLIKELY (!buf->acquired))
+ goto was_released;
+
+ buf->acquired = FALSE;
+
+ /* if this fails, something is wrong in this file */
+ g_assert (buf->open == TRUE);
+
+ rclass = GST_AUDIO_RING_BUFFER_GET_CLASS (buf);
+ if (G_LIKELY (rclass->release))
+ res = rclass->release (buf);
+
+ /* signal any waiters */
+ GST_DEBUG_OBJECT (buf, "signal waiter");
+ GST_AUDIO_RING_BUFFER_SIGNAL (buf);
+
+ if (G_UNLIKELY (!res))
+ goto release_failed;
+
+ g_free (buf->empty_seg);
+ buf->empty_seg = NULL;
+ GST_DEBUG_OBJECT (buf, "released device");
+
+done:
+ GST_OBJECT_UNLOCK (buf);
+
+ return res;
+
+ /* ERRORS */
+was_released:
+ {
+ res = TRUE;
+ GST_DEBUG_OBJECT (buf, "device was released");
+ goto done;
+ }
+release_failed:
+ {
+ buf->acquired = TRUE;
+ GST_DEBUG_OBJECT (buf, "failed to release device");
+ goto done;
+ }
+}
+
+/**
+ * gst_audio_ring_buffer_is_acquired:
+ * @buf: the #GstAudioRingBuffer to check
+ *
+ * Check if the ringbuffer is acquired and ready to use.
+ *
+ * Returns: TRUE if the ringbuffer is acquired, FALSE on error.
+ *
+ * MT safe.
+ */
+gboolean
+gst_audio_ring_buffer_is_acquired (GstAudioRingBuffer * buf)
+{
+ gboolean res;
+
+ g_return_val_if_fail (GST_IS_AUDIO_RING_BUFFER (buf), FALSE);
+
+ GST_OBJECT_LOCK (buf);
+ res = buf->acquired;
+ GST_OBJECT_UNLOCK (buf);
+
+ return res;
+}
+
+/**
+ * gst_audio_ring_buffer_activate:
+ * @buf: the #GstAudioRingBuffer to activate
+ * @active: the new mode
+ *
+ * Activate @buf to start or stop pulling data.
+ *
+ * MT safe.
+ *
+ * Returns: TRUE if the device could be activated in the requested mode,
+ * FALSE on error.
+ *
+ * Since: 0.10.22.
+ */
+gboolean
+gst_audio_ring_buffer_activate (GstAudioRingBuffer * buf, gboolean active)
+{
+ gboolean res = FALSE;
+ GstAudioRingBufferClass *rclass;
+
+ g_return_val_if_fail (GST_IS_AUDIO_RING_BUFFER (buf), FALSE);
+
+ GST_DEBUG_OBJECT (buf, "activate device");
+
+ GST_OBJECT_LOCK (buf);
+ if (G_UNLIKELY (active && !buf->acquired))
+ goto not_acquired;
+
+ if (G_UNLIKELY (buf->active == active))
+ goto was_active;
+
+ rclass = GST_AUDIO_RING_BUFFER_GET_CLASS (buf);
+ /* if there is no activate function we assume it was started/released
+ * in the acquire method */
+ if (G_LIKELY (rclass->activate))
+ res = rclass->activate (buf, active);
+ else
+ res = TRUE;
+
+ if (G_UNLIKELY (!res))
+ goto activate_failed;
+
+ buf->active = active;
+
+done:
+ GST_OBJECT_UNLOCK (buf);
+
+ return res;
+
+ /* ERRORS */
+not_acquired:
+ {
+ GST_DEBUG_OBJECT (buf, "device not acquired");
+ g_critical ("Device for %p not acquired", buf);
+ res = FALSE;
+ goto done;
+ }
+was_active:
+ {
+ res = TRUE;
+ GST_DEBUG_OBJECT (buf, "device was active in mode %d", active);
+ goto done;
+ }
+activate_failed:
+ {
+ GST_DEBUG_OBJECT (buf, "failed to activate device");
+ goto done;
+ }
+}
+
+/**
+ * gst_audio_ring_buffer_is_active:
+ * @buf: the #GstAudioRingBuffer
+ *
+ * Check if @buf is activated.
+ *
+ * MT safe.
+ *
+ * Returns: TRUE if the device is active.
+ *
+ * Since: 0.10.22.
+ */
+gboolean
+gst_audio_ring_buffer_is_active (GstAudioRingBuffer * buf)
+{
+ gboolean res;
+
+ g_return_val_if_fail (GST_IS_AUDIO_RING_BUFFER (buf), FALSE);
+
+ GST_OBJECT_LOCK (buf);
+ res = buf->active;
+ GST_OBJECT_UNLOCK (buf);
+
+ return res;
+}
+
+
+/**
+ * gst_audio_ring_buffer_set_flushing:
+ * @buf: the #GstAudioRingBuffer to flush
+ * @flushing: the new mode
+ *
+ * Set the ringbuffer to flushing mode or normal mode.
+ *
+ * MT safe.
+ */
+void
+gst_audio_ring_buffer_set_flushing (GstAudioRingBuffer * buf, gboolean flushing)
+{
+ g_return_if_fail (GST_IS_AUDIO_RING_BUFFER (buf));
+
+ GST_OBJECT_LOCK (buf);
+ buf->flushing = flushing;
+
+ if (flushing) {
+ gst_audio_ring_buffer_pause_unlocked (buf);
+ } else {
+ gst_audio_ring_buffer_clear_all (buf);
+ }
+ GST_OBJECT_UNLOCK (buf);
+}
+
+/**
+ * gst_audio_ring_buffer_start:
+ * @buf: the #GstAudioRingBuffer to start
+ *
+ * Start processing samples from the ringbuffer.
+ *
+ * Returns: TRUE if the device could be started, FALSE on error.
+ *
+ * MT safe.
+ */
+gboolean
+gst_audio_ring_buffer_start (GstAudioRingBuffer * buf)
+{
+ gboolean res = FALSE;
+ GstAudioRingBufferClass *rclass;
+ gboolean resume = FALSE;
+
+ g_return_val_if_fail (GST_IS_AUDIO_RING_BUFFER (buf), FALSE);
+
+ GST_DEBUG_OBJECT (buf, "starting ringbuffer");
+
+ GST_OBJECT_LOCK (buf);
+ if (G_UNLIKELY (buf->flushing))
+ goto flushing;
+
+ if (G_UNLIKELY (!buf->acquired))
+ goto not_acquired;
+
+ if (G_UNLIKELY (g_atomic_int_get (&buf->may_start) == FALSE))
+ goto may_not_start;
+
+ /* if stopped, set to started */
+ res = g_atomic_int_compare_and_exchange (&buf->state,
+ GST_AUDIO_RING_BUFFER_STATE_STOPPED, GST_AUDIO_RING_BUFFER_STATE_STARTED);
+
+ if (!res) {
+ GST_DEBUG_OBJECT (buf, "was not stopped, try paused");
+ /* was not stopped, try from paused */
+ res = g_atomic_int_compare_and_exchange (&buf->state,
+ GST_AUDIO_RING_BUFFER_STATE_PAUSED,
+ GST_AUDIO_RING_BUFFER_STATE_STARTED);
+ if (!res) {
+ /* was not paused either, must be started then */
+ res = TRUE;
+ GST_DEBUG_OBJECT (buf, "was not paused, must have been started");
+ goto done;
+ }
+ resume = TRUE;
+ GST_DEBUG_OBJECT (buf, "resuming");
+ }
+
+ rclass = GST_AUDIO_RING_BUFFER_GET_CLASS (buf);
+ if (resume) {
+ if (G_LIKELY (rclass->resume))
+ res = rclass->resume (buf);
+ } else {
+ if (G_LIKELY (rclass->start))
+ res = rclass->start (buf);
+ }
+
+ if (G_UNLIKELY (!res)) {
+ buf->state = GST_AUDIO_RING_BUFFER_STATE_PAUSED;
+ GST_DEBUG_OBJECT (buf, "failed to start");
+ } else {
+ GST_DEBUG_OBJECT (buf, "started");
+ }
+
+done:
+ GST_OBJECT_UNLOCK (buf);
+
+ return res;
+
+flushing:
+ {
+ GST_DEBUG_OBJECT (buf, "we are flushing");
+ GST_OBJECT_UNLOCK (buf);
+ return FALSE;
+ }
+not_acquired:
+ {
+ GST_DEBUG_OBJECT (buf, "we are not acquired");
+ GST_OBJECT_UNLOCK (buf);
+ return FALSE;
+ }
+may_not_start:
+ {
+ GST_DEBUG_OBJECT (buf, "we may not start");
+ GST_OBJECT_UNLOCK (buf);
+ return FALSE;
+ }
+}
+
+static gboolean
+gst_audio_ring_buffer_pause_unlocked (GstAudioRingBuffer * buf)
+{
+ gboolean res = FALSE;
+ GstAudioRingBufferClass *rclass;
+
+ GST_DEBUG_OBJECT (buf, "pausing ringbuffer");
+
+ /* if started, set to paused */
+ res = g_atomic_int_compare_and_exchange (&buf->state,
+ GST_AUDIO_RING_BUFFER_STATE_STARTED, GST_AUDIO_RING_BUFFER_STATE_PAUSED);
+
+ if (!res)
+ goto not_started;
+
+ /* signal any waiters */
+ GST_DEBUG_OBJECT (buf, "signal waiter");
+ GST_AUDIO_RING_BUFFER_SIGNAL (buf);
+
+ rclass = GST_AUDIO_RING_BUFFER_GET_CLASS (buf);
+ if (G_LIKELY (rclass->pause))
+ res = rclass->pause (buf);
+
+ if (G_UNLIKELY (!res)) {
+ buf->state = GST_AUDIO_RING_BUFFER_STATE_STARTED;
+ GST_DEBUG_OBJECT (buf, "failed to pause");
+ } else {
+ GST_DEBUG_OBJECT (buf, "paused");
+ }
+
+ return res;
+
+not_started:
+ {
+ /* was not started */
+ GST_DEBUG_OBJECT (buf, "was not started");
+ return TRUE;
+ }
+}
+
+/**
+ * gst_audio_ring_buffer_pause:
+ * @buf: the #GstAudioRingBuffer to pause
+ *
+ * Pause processing samples from the ringbuffer.
+ *
+ * Returns: TRUE if the device could be paused, FALSE on error.
+ *
+ * MT safe.
+ */
+gboolean
+gst_audio_ring_buffer_pause (GstAudioRingBuffer * buf)
+{
+ gboolean res = FALSE;
+
+ g_return_val_if_fail (GST_IS_AUDIO_RING_BUFFER (buf), FALSE);
+
+ GST_OBJECT_LOCK (buf);
+ if (G_UNLIKELY (buf->flushing))
+ goto flushing;
+
+ if (G_UNLIKELY (!buf->acquired))
+ goto not_acquired;
+
+ res = gst_audio_ring_buffer_pause_unlocked (buf);
+ GST_OBJECT_UNLOCK (buf);
+
+ return res;
+
+ /* ERRORS */
+flushing:
+ {
+ GST_DEBUG_OBJECT (buf, "we are flushing");
+ GST_OBJECT_UNLOCK (buf);
+ return FALSE;
+ }
+not_acquired:
+ {
+ GST_DEBUG_OBJECT (buf, "not acquired");
+ GST_OBJECT_UNLOCK (buf);
+ return FALSE;
+ }
+}
+
+/**
+ * gst_audio_ring_buffer_stop:
+ * @buf: the #GstAudioRingBuffer to stop
+ *
+ * Stop processing samples from the ringbuffer.
+ *
+ * Returns: TRUE if the device could be stopped, FALSE on error.
+ *
+ * MT safe.
+ */
+gboolean
+gst_audio_ring_buffer_stop (GstAudioRingBuffer * buf)
+{
+ gboolean res = FALSE;
+ GstAudioRingBufferClass *rclass;
+
+ g_return_val_if_fail (GST_IS_AUDIO_RING_BUFFER (buf), FALSE);
+
+ GST_DEBUG_OBJECT (buf, "stopping");
+
+ GST_OBJECT_LOCK (buf);
+
+ /* if started, set to stopped */
+ res = g_atomic_int_compare_and_exchange (&buf->state,
+ GST_AUDIO_RING_BUFFER_STATE_STARTED, GST_AUDIO_RING_BUFFER_STATE_STOPPED);
+
+ if (!res) {
+ GST_DEBUG_OBJECT (buf, "was not started, try paused");
+ /* was not started, try from paused */
+ res = g_atomic_int_compare_and_exchange (&buf->state,
+ GST_AUDIO_RING_BUFFER_STATE_PAUSED,
+ GST_AUDIO_RING_BUFFER_STATE_STOPPED);
+ if (!res) {
+ /* was not paused either, must have been stopped then */
+ res = TRUE;
+ GST_DEBUG_OBJECT (buf, "was not paused, must have been stopped");
+ goto done;
+ }
+ }
+
+ /* signal any waiters */
+ GST_DEBUG_OBJECT (buf, "signal waiter");
+ GST_AUDIO_RING_BUFFER_SIGNAL (buf);
+
+ rclass = GST_AUDIO_RING_BUFFER_GET_CLASS (buf);
+ if (G_LIKELY (rclass->stop))
+ res = rclass->stop (buf);
+
+ if (G_UNLIKELY (!res)) {
+ buf->state = GST_AUDIO_RING_BUFFER_STATE_STARTED;
+ GST_DEBUG_OBJECT (buf, "failed to stop");
+ } else {
+ GST_DEBUG_OBJECT (buf, "stopped");
+ }
+done:
+ GST_OBJECT_UNLOCK (buf);
+
+ return res;
+}
+
+/**
+ * gst_audio_ring_buffer_delay:
+ * @buf: the #GstAudioRingBuffer to query
+ *
+ * Get the number of samples queued in the audio device. This is
+ * usually less than the segment size but can be bigger when the
+ * implementation uses another internal buffer between the audio
+ * device.
+ *
+ * For playback ringbuffers this is the amount of samples transfered from the
+ * ringbuffer to the device but still not played.
+ *
+ * For capture ringbuffers this is the amount of samples in the device that are
+ * not yet transfered to the ringbuffer.
+ *
+ * Returns: The number of samples queued in the audio device.
+ *
+ * MT safe.
+ */
+guint
+gst_audio_ring_buffer_delay (GstAudioRingBuffer * buf)
+{
+ GstAudioRingBufferClass *rclass;
+ guint res;
+
+ g_return_val_if_fail (GST_IS_AUDIO_RING_BUFFER (buf), 0);
+
+ /* buffer must be acquired */
+ if (G_UNLIKELY (!gst_audio_ring_buffer_is_acquired (buf)))
+ goto not_acquired;
+
+ rclass = GST_AUDIO_RING_BUFFER_GET_CLASS (buf);
+ if (G_LIKELY (rclass->delay))
+ res = rclass->delay (buf);
+ else
+ res = 0;
+
+ return res;
+
+not_acquired:
+ {
+ GST_DEBUG_OBJECT (buf, "not acquired");
+ return 0;
+ }
+}
+
+/**
+ * gst_audio_ring_buffer_samples_done:
+ * @buf: the #GstAudioRingBuffer to query
+ *
+ * Get the number of samples that were processed by the ringbuffer
+ * since it was last started. This does not include the number of samples not
+ * yet processed (see gst_audio_ring_buffer_delay()).
+ *
+ * Returns: The number of samples processed by the ringbuffer.
+ *
+ * MT safe.
+ */
+guint64
+gst_audio_ring_buffer_samples_done (GstAudioRingBuffer * buf)
+{
+ gint segdone;
+ guint64 samples;
+
+ g_return_val_if_fail (GST_IS_AUDIO_RING_BUFFER (buf), 0);
+
+ /* get the amount of segments we processed */
+ segdone = g_atomic_int_get (&buf->segdone);
+
+ /* convert to samples */
+ samples = ((guint64) segdone) * buf->samples_per_seg;
+
+ return samples;
+}
+
+/**
+ * gst_audio_ring_buffer_set_sample:
+ * @buf: the #GstAudioRingBuffer to use
+ * @sample: the sample number to set
+ *
+ * Make sure that the next sample written to the device is
+ * accounted for as being the @sample sample written to the
+ * device. This value will be used in reporting the current
+ * sample position of the ringbuffer.
+ *
+ * This function will also clear the buffer with silence.
+ *
+ * MT safe.
+ */
+void
+gst_audio_ring_buffer_set_sample (GstAudioRingBuffer * buf, guint64 sample)
+{
+ g_return_if_fail (GST_IS_AUDIO_RING_BUFFER (buf));
+
+ if (sample == -1)
+ sample = 0;
+
+ if (G_UNLIKELY (buf->samples_per_seg == 0))
+ return;
+
+ /* FIXME, we assume the ringbuffer can restart at a random
+ * position, round down to the beginning and keep track of
+ * offset when calculating the processed samples. */
+ buf->segbase = buf->segdone - sample / buf->samples_per_seg;
+
+ gst_audio_ring_buffer_clear_all (buf);
+
+ GST_DEBUG_OBJECT (buf, "set sample to %" G_GUINT64_FORMAT ", segbase %d",
+ sample, buf->segbase);
+}
+
+static void
+default_clear_all (GstAudioRingBuffer * buf)
+{
+ gint i;
+
+ /* not fatal, we just are not negotiated yet */
+ if (G_UNLIKELY (buf->spec.segtotal <= 0))
+ return;
+
+ GST_DEBUG_OBJECT (buf, "clear all segments");
+
+ for (i = 0; i < buf->spec.segtotal; i++) {
+ gst_audio_ring_buffer_clear (buf, i);
+ }
+}
+
+/**
+ * gst_audio_ring_buffer_clear_all:
+ * @buf: the #GstAudioRingBuffer to clear
+ *
+ * Fill the ringbuffer with silence.
+ *
+ * MT safe.
+ */
+void
+gst_audio_ring_buffer_clear_all (GstAudioRingBuffer * buf)
+{
+ GstAudioRingBufferClass *rclass;
+
+ g_return_if_fail (GST_IS_AUDIO_RING_BUFFER (buf));
+
+ rclass = GST_AUDIO_RING_BUFFER_GET_CLASS (buf);
+
+ if (G_LIKELY (rclass->clear_all))
+ rclass->clear_all (buf);
+}
+
+
+static gboolean
+wait_segment (GstAudioRingBuffer * buf)
+{
+ gint segments;
+ gboolean wait = TRUE;
+
+ /* buffer must be started now or we deadlock since nobody is reading */
+ if (G_UNLIKELY (g_atomic_int_get (&buf->state) !=
+ GST_AUDIO_RING_BUFFER_STATE_STARTED)) {
+ /* see if we are allowed to start it */
+ if (G_UNLIKELY (g_atomic_int_get (&buf->may_start) == FALSE))
+ goto no_start;
+
+ GST_DEBUG_OBJECT (buf, "start!");
+ segments = g_atomic_int_get (&buf->segdone);
+ gst_audio_ring_buffer_start (buf);
+
+ /* After starting, the writer may have wrote segments already and then we
+ * don't need to wait anymore */
+ if (G_LIKELY (g_atomic_int_get (&buf->segdone) != segments))
+ wait = FALSE;
+ }
+
+ /* take lock first, then update our waiting flag */
+ GST_OBJECT_LOCK (buf);
+ if (G_UNLIKELY (buf->flushing))
+ goto flushing;
+
+ if (G_UNLIKELY (g_atomic_int_get (&buf->state) !=
+ GST_AUDIO_RING_BUFFER_STATE_STARTED))
+ goto not_started;
+
+ if (G_LIKELY (wait)) {
+ if (g_atomic_int_compare_and_exchange (&buf->waiting, 0, 1)) {
+ GST_DEBUG_OBJECT (buf, "waiting..");
+ GST_AUDIO_RING_BUFFER_WAIT (buf);
+
+ if (G_UNLIKELY (buf->flushing))
+ goto flushing;
+
+ if (G_UNLIKELY (g_atomic_int_get (&buf->state) !=
+ GST_AUDIO_RING_BUFFER_STATE_STARTED))
+ goto not_started;
+ }
+ }
+ GST_OBJECT_UNLOCK (buf);
+
+ return TRUE;
+
+ /* ERROR */
+not_started:
+ {
+ g_atomic_int_compare_and_exchange (&buf->waiting, 1, 0);
+ GST_DEBUG_OBJECT (buf, "stopped processing");
+ GST_OBJECT_UNLOCK (buf);
+ return FALSE;
+ }
+flushing:
+ {
+ g_atomic_int_compare_and_exchange (&buf->waiting, 1, 0);
+ GST_DEBUG_OBJECT (buf, "flushing");
+ GST_OBJECT_UNLOCK (buf);
+ return FALSE;
+ }
+no_start:
+ {
+ GST_DEBUG_OBJECT (buf, "not allowed to start");
+ return FALSE;
+ }
+}
+
+#define FWD_SAMPLES(s,se,d,de) \
+G_STMT_START { \
+ /* no rate conversion */ \
+ guint towrite = MIN (se + bpf - s, de - d); \
+ /* simple copy */ \
+ if (!skip) \
+ memcpy (d, s, towrite); \
+ in_samples -= towrite / bpf; \
+ out_samples -= towrite / bpf; \
+ s += towrite; \
+ GST_DEBUG ("copy %u bytes", towrite); \
+} G_STMT_END
+
+/* in_samples >= out_samples, rate > 1.0 */
+#define FWD_UP_SAMPLES(s,se,d,de) \
+G_STMT_START { \
+ guint8 *sb = s, *db = d; \
+ while (s <= se && d < de) { \
+ if (!skip) \
+ memcpy (d, s, bpf); \
+ s += bpf; \
+ *accum += outr; \
+ if ((*accum << 1) >= inr) { \
+ *accum -= inr; \
+ d += bpf; \
+ } \
+ } \
+ in_samples -= (s - sb)/bpf; \
+ out_samples -= (d - db)/bpf; \
+ GST_DEBUG ("fwd_up end %d/%d",*accum,*toprocess); \
+} G_STMT_END
+
+/* out_samples > in_samples, for rates smaller than 1.0 */
+#define FWD_DOWN_SAMPLES(s,se,d,de) \
+G_STMT_START { \
+ guint8 *sb = s, *db = d; \
+ while (s <= se && d < de) { \
+ if (!skip) \
+ memcpy (d, s, bpf); \
+ d += bpf; \
+ *accum += inr; \
+ if ((*accum << 1) >= outr) { \
+ *accum -= outr; \
+ s += bpf; \
+ } \
+ } \
+ in_samples -= (s - sb)/bpf; \
+ out_samples -= (d - db)/bpf; \
+ GST_DEBUG ("fwd_down end %d/%d",*accum,*toprocess); \
+} G_STMT_END
+
+#define REV_UP_SAMPLES(s,se,d,de) \
+G_STMT_START { \
+ guint8 *sb = se, *db = d; \
+ while (s <= se && d < de) { \
+ if (!skip) \
+ memcpy (d, se, bpf); \
+ se -= bpf; \
+ *accum += outr; \
+ while (d < de && (*accum << 1) >= inr) { \
+ *accum -= inr; \
+ d += bpf; \
+ } \
+ } \
+ in_samples -= (sb - se)/bpf; \
+ out_samples -= (d - db)/bpf; \
+ GST_DEBUG ("rev_up end %d/%d",*accum,*toprocess); \
+} G_STMT_END
+
+#define REV_DOWN_SAMPLES(s,se,d,de) \
+G_STMT_START { \
+ guint8 *sb = se, *db = d; \
+ while (s <= se && d < de) { \
+ if (!skip) \
+ memcpy (d, se, bpf); \
+ d += bpf; \
+ *accum += inr; \
+ while (s <= se && (*accum << 1) >= outr) { \
+ *accum -= outr; \
+ se -= bpf; \
+ } \
+ } \
+ in_samples -= (sb - se)/bpf; \
+ out_samples -= (d - db)/bpf; \
+ GST_DEBUG ("rev_down end %d/%d",*accum,*toprocess); \
+} G_STMT_END
+
+static guint
+default_commit (GstAudioRingBuffer * buf, guint64 * sample,
+ guchar * data, gint in_samples, gint out_samples, gint * accum)
+{
+ gint segdone;
+ gint segsize, segtotal, bpf, sps;
+ guint8 *dest, *data_end;
+ gint writeseg, sampleoff;
+ gint *toprocess;
+ gint inr, outr;
+ gboolean reverse;
+
+ g_return_val_if_fail (buf->memory != NULL, -1);
+ g_return_val_if_fail (data != NULL, -1);
+
+ dest = buf->memory;
+ segsize = buf->spec.segsize;
+ segtotal = buf->spec.segtotal;
+ bpf = buf->spec.info.bpf;
+ sps = buf->samples_per_seg;
+
+ reverse = out_samples < 0;
+ out_samples = ABS (out_samples);
+
+ if (in_samples >= out_samples)
+ toprocess = &in_samples;
+ else
+ toprocess = &out_samples;
+
+ inr = in_samples - 1;
+ outr = out_samples - 1;
+
+ /* data_end points to the last sample we have to write, not past it. This is
+ * needed to properly handle reverse playback: it points to the last sample. */
+ data_end = data + (bpf * inr);
+
+ /* figure out the segment and the offset inside the segment where
+ * the first sample should be written. */
+ writeseg = *sample / sps;
+ sampleoff = (*sample % sps) * bpf;
+
+ /* write out all samples */
+ while (*toprocess > 0) {
+ gint avail;
+ guint8 *d, *d_end;
+ gint ws;
+ gboolean skip;
+
+ while (TRUE) {
+ gint diff;
+
+ /* get the currently processed segment */
+ segdone = g_atomic_int_get (&buf->segdone) - buf->segbase;
+
+ /* see how far away it is from the write segment */
+ diff = writeseg - segdone;
+
+ GST_DEBUG
+ ("pointer at %d, write to %d-%d, diff %d, segtotal %d, segsize %d, base %d",
+ segdone, writeseg, sampleoff, diff, segtotal, segsize, buf->segbase);
+
+ /* segment too far ahead, writer too slow, we need to drop, hopefully UNLIKELY */
+ if (G_UNLIKELY (diff < 0)) {
+ /* we need to drop one segment at a time, pretend we wrote a
+ * segment. */
+ skip = TRUE;
+ break;
+ }
+
+ /* write segment is within writable range, we can break the loop and
+ * start writing the data. */
+ if (diff < segtotal) {
+ skip = FALSE;
+ break;
+ }
+
+ /* else we need to wait for the segment to become writable. */
+ if (!wait_segment (buf))
+ goto not_started;
+ }
+
+ /* we can write now */
+ ws = writeseg % segtotal;
+ avail = MIN (segsize - sampleoff, bpf * out_samples);
+
+ d = dest + (ws * segsize) + sampleoff;
+ d_end = d + avail;
+ *sample += avail / bpf;
+
+ GST_DEBUG_OBJECT (buf, "write @%p seg %d, sps %d, off %d, avail %d",
+ dest + ws * segsize, ws, sps, sampleoff, avail);
+
+ if (G_LIKELY (inr == outr && !reverse)) {
+ /* no rate conversion, simply copy samples */
+ FWD_SAMPLES (data, data_end, d, d_end);
+ } else if (!reverse) {
+ if (inr >= outr)
+ /* forward speed up */
+ FWD_UP_SAMPLES (data, data_end, d, d_end);
+ else
+ /* forward slow down */
+ FWD_DOWN_SAMPLES (data, data_end, d, d_end);
+ } else {
+ if (inr >= outr)
+ /* reverse speed up */
+ REV_UP_SAMPLES (data, data_end, d, d_end);
+ else
+ /* reverse slow down */
+ REV_DOWN_SAMPLES (data, data_end, d, d_end);
+ }
+
+ /* for the next iteration we write to the next segment at the beginning. */
+ writeseg++;
+ sampleoff = 0;
+ }
+ /* we consumed all samples here */
+ data = data_end + bpf;
+
+done:
+ return inr - ((data_end - data) / bpf);
+
+ /* ERRORS */
+not_started:
+ {
+ GST_DEBUG_OBJECT (buf, "stopped processing");
+ goto done;
+ }
+}
+
+/**
+ * gst_audio_ring_buffer_commit:
+ * @buf: the #GstAudioRingBuffer to commit
+ * @sample: the sample position of the data
+ * @data: the data to commit
+ * @in_samples: the number of samples in the data to commit
+ * @out_samples: the number of samples to write to the ringbuffer
+ * @accum: accumulator for rate conversion.
+ *
+ * Commit @in_samples samples pointed to by @data to the ringbuffer @buf.
+ *
+ * @in_samples and @out_samples define the rate conversion to perform on the
+ * samples in @data. For negative rates, @out_samples must be negative and
+ * @in_samples positive.
+ *
+ * When @out_samples is positive, the first sample will be written at position @sample
+ * in the ringbuffer. When @out_samples is negative, the last sample will be written to
+ * @sample in reverse order.
+ *
+ * @out_samples does not need to be a multiple of the segment size of the ringbuffer
+ * although it is recommended for optimal performance.
+ *
+ * @accum will hold a temporary accumulator used in rate conversion and should be
+ * set to 0 when this function is first called. In case the commit operation is
+ * interrupted, one can resume the processing by passing the previously returned
+ * @accum value back to this function.
+ *
+ * MT safe.
+ *
+ * Returns: The number of samples written to the ringbuffer or -1 on error. The
+ * number of samples written can be less than @out_samples when @buf was interrupted
+ * with a flush or stop.
+ *
+ * Since: 0.10.11.
+ */
+guint
+gst_audio_ring_buffer_commit (GstAudioRingBuffer * buf, guint64 * sample,
+ guchar * data, gint in_samples, gint out_samples, gint * accum)
+{
+ GstAudioRingBufferClass *rclass;
+ guint res = -1;
+
+ g_return_val_if_fail (GST_IS_AUDIO_RING_BUFFER (buf), -1);
+
+ if (G_UNLIKELY (in_samples == 0 || out_samples == 0))
+ return in_samples;
+
+ rclass = GST_AUDIO_RING_BUFFER_GET_CLASS (buf);
+
+ if (G_LIKELY (rclass->commit))
+ res = rclass->commit (buf, sample, data, in_samples, out_samples, accum);
+
+ return res;
+}
+
+/**
+ * gst_audio_ring_buffer_read:
+ * @buf: the #GstAudioRingBuffer to read from
+ * @sample: the sample position of the data
+ * @data: where the data should be read
+ * @len: the number of samples in data to read
+ *
+ * Read @len samples from the ringbuffer into the memory pointed
+ * to by @data.
+ * The first sample should be read from position @sample in
+ * the ringbuffer.
+ *
+ * @len should not be a multiple of the segment size of the ringbuffer
+ * although it is recommended.
+ *
+ * Returns: The number of samples read from the ringbuffer or -1 on
+ * error.
+ *
+ * MT safe.
+ */
+guint
+gst_audio_ring_buffer_read (GstAudioRingBuffer * buf, guint64 sample,
+ guchar * data, guint len)
+{
+ gint segdone;
+ gint segsize, segtotal, bpf, sps;
+ guint8 *dest;
+ guint to_read;
+
+ g_return_val_if_fail (GST_IS_AUDIO_RING_BUFFER (buf), -1);
+ g_return_val_if_fail (buf->memory != NULL, -1);
+ g_return_val_if_fail (data != NULL, -1);
+
+ dest = buf->memory;
+ segsize = buf->spec.segsize;
+ segtotal = buf->spec.segtotal;
+ bpf = buf->spec.info.bpf;
+ sps = buf->samples_per_seg;
+
+ to_read = len;
+ /* read enough samples */
+ while (to_read > 0) {
+ gint sampleslen;
+ gint readseg, sampleoff;
+
+ /* figure out the segment and the offset inside the segment where
+ * the sample should be read from. */
+ readseg = sample / sps;
+ sampleoff = (sample % sps);
+
+ while (TRUE) {
+ gint diff;
+
+ /* get the currently processed segment */
+ segdone = g_atomic_int_get (&buf->segdone) - buf->segbase;
+
+ /* see how far away it is from the read segment, normally segdone (where
+ * the hardware is writing) is bigger than readseg (where software is
+ * reading) */
+ diff = segdone - readseg;
+
+ GST_DEBUG
+ ("pointer at %d, sample %" G_GUINT64_FORMAT
+ ", read from %d-%d, to_read %d, diff %d, segtotal %d, segsize %d",
+ segdone, sample, readseg, sampleoff, to_read, diff, segtotal,
+ segsize);
+
+ /* segment too far ahead, reader too slow */
+ if (G_UNLIKELY (diff >= segtotal)) {
+ /* pretend we read an empty segment. */
+ sampleslen = MIN (sps, to_read);
+ memcpy (data, buf->empty_seg, sampleslen * bpf);
+ goto next;
+ }
+
+ /* read segment is within readable range, we can break the loop and
+ * start reading the data. */
+ if (diff > 0)
+ break;
+
+ /* else we need to wait for the segment to become readable. */
+ if (!wait_segment (buf))
+ goto not_started;
+ }
+
+ /* we can read now */
+ readseg = readseg % segtotal;
+ sampleslen = MIN (sps - sampleoff, to_read);
+
+ GST_DEBUG_OBJECT (buf, "read @%p seg %d, off %d, sampleslen %d",
+ dest + readseg * segsize, readseg, sampleoff, sampleslen);
+
+ memcpy (data, dest + (readseg * segsize) + (sampleoff * bpf),
+ (sampleslen * bpf));
+
+ next:
+ to_read -= sampleslen;
+ sample += sampleslen;
+ data += sampleslen * bpf;
+ }
+
+ return len - to_read;
+
+ /* ERRORS */
+not_started:
+ {
+ GST_DEBUG_OBJECT (buf, "stopped processing");
+ return len - to_read;
+ }
+}
+
+/**
+ * gst_audio_ring_buffer_prepare_read:
+ * @buf: the #GstAudioRingBuffer to read from
+ * @segment: the segment to read
+ * @readptr: the pointer to the memory where samples can be read
+ * @len: the number of bytes to read
+ *
+ * Returns a pointer to memory where the data from segment @segment
+ * can be found. This function is mostly used by subclasses.
+ *
+ * Returns: FALSE if the buffer is not started.
+ *
+ * MT safe.
+ */
+gboolean
+gst_audio_ring_buffer_prepare_read (GstAudioRingBuffer * buf, gint * segment,
+ guint8 ** readptr, gint * len)
+{
+ guint8 *data;
+ gint segdone;
+
+ g_return_val_if_fail (GST_IS_AUDIO_RING_BUFFER (buf), FALSE);
+
+ if (buf->callback == NULL) {
+ /* push mode, fail when nothing is started */
+ if (g_atomic_int_get (&buf->state) != GST_AUDIO_RING_BUFFER_STATE_STARTED)
+ return FALSE;
+ }
+
+ g_return_val_if_fail (buf->memory != NULL, FALSE);
+ g_return_val_if_fail (segment != NULL, FALSE);
+ g_return_val_if_fail (readptr != NULL, FALSE);
+ g_return_val_if_fail (len != NULL, FALSE);
+
+ data = buf->memory;
+
+ /* get the position of the pointer */
+ segdone = g_atomic_int_get (&buf->segdone);
+
+ *segment = segdone % buf->spec.segtotal;
+ *len = buf->spec.segsize;
+ *readptr = data + *segment * *len;
+
+ GST_LOG ("prepare read from segment %d (real %d) @%p",
+ *segment, segdone, *readptr);
+
+ /* callback to fill the memory with data, for pull based
+ * scheduling. */
+ if (buf->callback)
+ buf->callback (buf, *readptr, *len, buf->cb_data);
+
+ return TRUE;
+}
+
+/**
+ * gst_audio_ring_buffer_advance:
+ * @buf: the #GstAudioRingBuffer to advance
+ * @advance: the number of segments written
+ *
+ * Subclasses should call this function to notify the fact that
+ * @advance segments are now processed by the device.
+ *
+ * MT safe.
+ */
+void
+gst_audio_ring_buffer_advance (GstAudioRingBuffer * buf, guint advance)
+{
+ g_return_if_fail (GST_IS_AUDIO_RING_BUFFER (buf));
+
+ /* update counter */
+ g_atomic_int_add (&buf->segdone, advance);
+
+ /* the lock is already taken when the waiting flag is set,
+ * we grab the lock as well to make sure the waiter is actually
+ * waiting for the signal */
+ if (g_atomic_int_compare_and_exchange (&buf->waiting, 1, 0)) {
+ GST_OBJECT_LOCK (buf);
+ GST_DEBUG_OBJECT (buf, "signal waiter");
+ GST_AUDIO_RING_BUFFER_SIGNAL (buf);
+ GST_OBJECT_UNLOCK (buf);
+ }
+}
+
+/**
+ * gst_audio_ring_buffer_clear:
+ * @buf: the #GstAudioRingBuffer to clear
+ * @segment: the segment to clear
+ *
+ * Clear the given segment of the buffer with silence samples.
+ * This function is used by subclasses.
+ *
+ * MT safe.
+ */
+void
+gst_audio_ring_buffer_clear (GstAudioRingBuffer * buf, gint segment)
+{
+ guint8 *data;
+
+ g_return_if_fail (GST_IS_AUDIO_RING_BUFFER (buf));
+
+ /* no data means it's already cleared */
+ if (G_UNLIKELY (buf->memory == NULL))
+ return;
+
+ /* no empty_seg means it's not opened */
+ if (G_UNLIKELY (buf->empty_seg == NULL))
+ return;
+
+ segment %= buf->spec.segtotal;
+
+ data = buf->memory;
+ data += segment * buf->spec.segsize;
+
+ GST_LOG ("clear segment %d @%p", segment, data);
+
+ memcpy (data, buf->empty_seg, buf->spec.segsize);
+}
+
+/**
+ * gst_audio_ring_buffer_may_start:
+ * @buf: the #GstAudioRingBuffer
+ * @allowed: the new value
+ *
+ * Tell the ringbuffer that it is allowed to start playback when
+ * the ringbuffer is filled with samples.
+ *
+ * MT safe.
+ *
+ * Since: 0.10.6
+ */
+void
+gst_audio_ring_buffer_may_start (GstAudioRingBuffer * buf, gboolean allowed)
+{
+ g_return_if_fail (GST_IS_AUDIO_RING_BUFFER (buf));
+
+ GST_LOG_OBJECT (buf, "may start: %d", allowed);
+ g_atomic_int_set (&buf->may_start, allowed);
+}
if (!result)
goto could_not_open;
- buf->data = gst_buffer_new_and_alloc (spec->segtotal * spec->segsize);
- memset (GST_BUFFER_DATA (buf->data), 0, GST_BUFFER_SIZE (buf->data));
+ buf->size = spec->segtotal * spec->segsize;
+ buf->memory = g_malloc0 (buf->size);
- abuf = GST_AUDIORING_BUFFER (buf);
+ abuf = GST_AUDIO_SRC_RING_BUFFER (buf);
abuf->running = TRUE;
+ /* FIXME: handle thread creation failure */
+ #if !GLIB_CHECK_VERSION (2, 31, 0)
src->thread =
g_thread_create ((GThreadFunc) audioringbuffer_thread_func, buf, TRUE,
NULL);
- GST_AUDIO_SRC_RING_BUFFER_WAIT (buf);
+ #else
+ src->thread = g_thread_try_new ("audiosrc-ringbuffer",
+ (GThreadFunc) audioringbuffer_thread_func, buf, NULL);
+ #endif
+
- GST_AUDIORING_BUFFER_WAIT (buf);
++ GST_AUDIO_RING_BUFFER_WAIT (buf);
return result;
#endif
#include "gstrtspconnection.h"
-#include "gstrtspbase64.h"
+ #include "gst/glib-compat-private.h"
+
union gst_sockaddr
{
struct sockaddr sa;
--- /dev/null
- * video_window_xid = GDK_WINDOW_XID (video_window->window);
+/* GStreamer Video Overlay interface
+ * Copyright (C) 2003 Ronald Bultje <rbultje@ronald.bitfreak.net>
+ * Copyright (C) 2011 Tim-Philipp Müller <tim@centricular.net>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+/**
+ * SECTION:gstvideooverlay
+ * @short_description: Interface for setting/getting a window system resource
+ * on elements supporting it to configure a window into which to render a
+ * video.
+ *
+ * <refsect2>
+ * <para>
+ * The #GstVideoOverlay interface is used for 2 main purposes :
+ * <itemizedlist>
+ * <listitem>
+ * <para>
+ * To get a grab on the Window where the video sink element is going to render.
+ * This is achieved by either being informed about the Window identifier that
+ * the video sink element generated, or by forcing the video sink element to use
+ * a specific Window identifier for rendering.
+ * </para>
+ * </listitem>
+ * <listitem>
+ * <para>
+ * To force a redrawing of the latest video frame the video sink element
+ * displayed on the Window. Indeed if the #GstPipeline is in #GST_STATE_PAUSED
+ * state, moving the Window around will damage its content. Application
+ * developers will want to handle the Expose events themselves and force the
+ * video sink element to refresh the Window's content.
+ * </para>
+ * </listitem>
+ * </itemizedlist>
+ * </para>
+ * <para>
+ * Using the Window created by the video sink is probably the simplest scenario,
+ * in some cases, though, it might not be flexible enough for application
+ * developers if they need to catch events such as mouse moves and button
+ * clicks.
+ * </para>
+ * <para>
+ * Setting a specific Window identifier on the video sink element is the most
+ * flexible solution but it has some issues. Indeed the application needs to set
+ * its Window identifier at the right time to avoid internal Window creation
+ * from the video sink element. To solve this issue a #GstMessage is posted on
+ * the bus to inform the application that it should set the Window identifier
+ * immediately. Here is an example on how to do that correctly:
+ * |[
+ * static GstBusSyncReply
+ * create_window (GstBus * bus, GstMessage * message, GstPipeline * pipeline)
+ * {
+ * // ignore anything but 'prepare-window-handle' element messages
+ * if (!gst_is_video_overlay_prepare_window_handle_message (message))
+ * return GST_BUS_PASS;
+ *
+ * win = XCreateSimpleWindow (disp, root, 0, 0, 320, 240, 0, 0, 0);
+ *
+ * XSetWindowBackgroundPixmap (disp, win, None);
+ *
+ * XMapRaised (disp, win);
+ *
+ * XSync (disp, FALSE);
+ *
+ * gst_video_overlay_set_window_handle (GST_VIDEO_OVERLAY (GST_MESSAGE_SRC (message)),
+ * win);
+ *
+ * gst_message_unref (message);
+ *
+ * return GST_BUS_DROP;
+ * }
+ * ...
+ * int
+ * main (int argc, char **argv)
+ * {
+ * ...
+ * bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
+ * gst_bus_set_sync_handler (bus, (GstBusSyncHandler) create_window, pipeline);
+ * ...
+ * }
+ * ]|
+ * </para>
+ * </refsect2>
+ * <refsect2>
+ * <title>Two basic usage scenarios</title>
+ * <para>
+ * There are two basic usage scenarios: in the simplest case, the application
+ * knows exactly what particular element is used for video output, which is
+ * usually the case when the application creates the videosink to use
+ * (e.g. #xvimagesink, #ximagesink, etc.) itself; in this case, the application
+ * can just create the videosink element, create and realize the window to
+ * render the video on and then call gst_video_overlay_set_window_handle() directly
+ * with the XID or native window handle, before starting up the pipeline.
+ * </para>
+ * <para>
+ * In the other and more common case, the application does not know in advance
+ * what GStreamer video sink element will be used for video output. This is
+ * usually the case when an element such as #autovideosink or #gconfvideosink
+ * is used. In this case, the video sink element itself is created
+ * asynchronously from a GStreamer streaming thread some time after the
+ * pipeline has been started up. When that happens, however, the video sink
+ * will need to know right then whether to render onto an already existing
+ * application window or whether to create its own window. This is when it
+ * posts a prepare-window-handle message, and that is also why this message needs
+ * to be handled in a sync bus handler which will be called from the streaming
+ * thread directly (because the video sink will need an answer right then).
+ * </para>
+ * <para>
+ * As response to the prepare-window-handle element message in the bus sync
+ * handler, the application may use gst_video_overlay_set_window_handle() to tell
+ * the video sink to render onto an existing window surface. At this point the
+ * application should already have obtained the window handle / XID, so it
+ * just needs to set it. It is generally not advisable to call any GUI toolkit
+ * functions or window system functions from the streaming thread in which the
+ * prepare-window-handle message is handled, because most GUI toolkits and
+ * windowing systems are not thread-safe at all and a lot of care would be
+ * required to co-ordinate the toolkit and window system calls of the
+ * different threads (Gtk+ users please note: prior to Gtk+ 2.18
+ * GDK_WINDOW_XID() was just a simple structure access, so generally fine to do
+ * within the bus sync handler; this macro was changed to a function call in
+ * Gtk+ 2.18 and later, which is likely to cause problems when called from a
+ * sync handler; see below for a better approach without GDK_WINDOW_XID()
+ * used in the callback).
+ * </para>
+ * </refsect2>
+ * <refsect2>
+ * <title>GstVideoOverlay and Gtk+</title>
+ * <para>
+ * |[
++ * #include <gst/interfaces/xoverlay.h>
+ * #include <gtk/gtk.h>
+ * #ifdef GDK_WINDOWING_X11
+ * #include <gdk/gdkx.h> // for GDK_WINDOW_XID
+ * #endif
+ * ...
+ * static gulong video_window_xid = 0;
+ * ...
+ * static GstBusSyncReply
+ * bus_sync_handler (GstBus * bus, GstMessage * message, gpointer user_data)
+ * {
+ * // ignore anything but 'prepare-window-handle' element messages
+ * if (!gst_is_video_overlay_prepare_window_handle_message (message))
+ * return GST_BUS_PASS;
+ *
+ * if (video_window_xid != 0) {
+ * GstVideoOverlay *xoverlay;
+ *
+ * // GST_MESSAGE_SRC (message) will be the video sink element
+ * xoverlay = GST_VIDEO_OVERLAY (GST_MESSAGE_SRC (message));
+ * gst_video_overlay_set_window_handle (xoverlay, video_window_xid);
+ * } else {
+ * g_warning ("Should have obtained video_window_xid by now!");
+ * }
+ *
+ * gst_message_unref (message);
+ * return GST_BUS_DROP;
+ * }
+ * ...
+ * static void
+ * video_widget_realize_cb (GtkWidget * widget, gpointer data)
+ * {
+ * #if GTK_CHECK_VERSION(2,18,0)
+ * // This is here just for pedagogical purposes, GDK_WINDOW_XID will call
+ * // it as well in newer Gtk versions
+ * if (!gdk_window_ensure_native (widget->window))
+ * g_error ("Couldn't create native window needed for GstVideoOverlay!");
+ * #endif
+ *
+ * #ifdef GDK_WINDOWING_X11
- * gtk_widget_realize (window);
++ * video_window_xid = GDK_WINDOW_XID (gtk_widget_get_window (video_window));
+ * #endif
+ * }
+ * ...
+ * int
+ * main (int argc, char **argv)
+ * {
+ * GtkWidget *video_window;
+ * GtkWidget *app_window;
+ * ...
+ * app_window = gtk_window_new (GTK_WINDOW_TOPLEVEL);
+ * ...
+ * video_window = gtk_drawing_area_new ();
+ * g_signal_connect (video_window, "realize",
+ * G_CALLBACK (video_widget_realize_cb), NULL);
+ * gtk_widget_set_double_buffered (video_window, FALSE);
+ * ...
+ * // usually the video_window will not be directly embedded into the
+ * // application window like this, but there will be many other widgets
+ * // and the video window will be embedded in one of them instead
+ * gtk_container_add (GTK_CONTAINER (ap_window), video_window);
+ * ...
+ * // show the GUI
+ * gtk_widget_show_all (app_window);
+ *
+ * // realize window now so that the video window gets created and we can
+ * // obtain its XID before the pipeline is started up and the videosink
+ * // asks for the XID of the window to render onto
++ * gtk_widget_realize (video_window);
+ *
+ * // we should have the XID now
+ * g_assert (video_window_xid != 0);
+ * ...
+ * // set up sync handler for setting the xid once the pipeline is started
+ * bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
+ * gst_bus_set_sync_handler (bus, (GstBusSyncHandler) bus_sync_handler, NULL);
+ * gst_object_unref (bus);
+ * ...
+ * gst_element_set_state (pipeline, GST_STATE_PLAYING);
+ * ...
+ * }
+ * ]|
+ * </para>
+ * </refsect2>
+ * <refsect2>
+ * <title>GstVideoOverlay and Qt</title>
+ * <para>
+ * |[
+ * #include <glib.h>
+ * #include <gst/gst.h>
+ * #include <gst/interfaces/videooverlay.h>
+ *
+ * #include <QApplication>
+ * #include <QTimer>
+ * #include <QWidget>
+ *
+ * int main(int argc, char *argv[])
+ * {
+ * if (!g_thread_supported ())
+ * g_thread_init (NULL);
+ *
+ * gst_init (&argc, &argv);
+ * QApplication app(argc, argv);
+ * app.connect(&app, SIGNAL(lastWindowClosed()), &app, SLOT(quit ()));
+ *
+ * // prepare the pipeline
+ *
+ * GstElement *pipeline = gst_pipeline_new ("xvoverlay");
+ * GstElement *src = gst_element_factory_make ("videotestsrc", NULL);
+ * GstElement *sink = gst_element_factory_make ("xvimagesink", NULL);
+ * gst_bin_add_many (GST_BIN (pipeline), src, sink, NULL);
+ * gst_element_link (src, sink);
+ *
+ * // prepare the ui
+ *
+ * QWidget window;
+ * window.resize(320, 240);
+ * window.show();
+ *
+ * WId xwinid = window.winId();
+ * gst_video_overlay_set_window_handle (GST_VIDEO_OVERLAY (sink), xwinid);
+ *
+ * // run the pipeline
+ *
+ * GstStateChangeReturn sret = gst_element_set_state (pipeline,
+ * GST_STATE_PLAYING);
+ * if (sret == GST_STATE_CHANGE_FAILURE) {
+ * gst_element_set_state (pipeline, GST_STATE_NULL);
+ * gst_object_unref (pipeline);
+ * // Exit application
+ * QTimer::singleShot(0, QApplication::activeWindow(), SLOT(quit()));
+ * }
+ *
+ * int ret = app.exec();
+ *
+ * window.hide();
+ * gst_element_set_state (pipeline, GST_STATE_NULL);
+ * gst_object_unref (pipeline);
+ *
+ * return ret;
+ * }
+ * ]|
+ * </para>
+ * </refsect2>
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include "videooverlay.h"
+
+GType
+gst_video_overlay_get_type (void)
+{
+ static GType gst_video_overlay_type = 0;
+
+ if (!gst_video_overlay_type) {
+ static const GTypeInfo gst_video_overlay_info = {
+ sizeof (GstVideoOverlayInterface),
+ NULL,
+ NULL,
+ NULL,
+ NULL,
+ NULL,
+ 0,
+ 0,
+ NULL,
+ };
+
+ gst_video_overlay_type = g_type_register_static (G_TYPE_INTERFACE,
+ "GstVideoOverlay", &gst_video_overlay_info, 0);
+ }
+
+ return gst_video_overlay_type;
+}
+
+/**
+ * gst_video_overlay_set_window_handle:
+ * @overlay: a #GstVideoOverlay to set the window on.
+ * @handle: a handle referencing the window.
+ *
+ * This will call the video overlay's set_window_handle method. You
+ * should use this method to tell to a XOverlay to display video output to a
+ * specific window (e.g. an XWindow on X11). Passing 0 as the @handle will
+ * tell the overlay to stop using that window and create an internal one.
+ *
+ * Since: 0.10.31
+ */
+void
+gst_video_overlay_set_window_handle (GstVideoOverlay * overlay, guintptr handle)
+{
+ GstVideoOverlayInterface *iface;
+
+ g_return_if_fail (overlay != NULL);
+ g_return_if_fail (GST_IS_VIDEO_OVERLAY (overlay));
+
+ iface = GST_VIDEO_OVERLAY_GET_INTERFACE (overlay);
+
+ if (iface->set_window_handle) {
+ iface->set_window_handle (overlay, handle);
+ }
+}
+
+/**
+ * gst_video_overlay_got_window_handle:
+ * @overlay: a #GstVideoOverlay which got a window
+ * @handle: a platform-specific handle referencing the window
+ *
+ * This will post a "have-window-handle" element message on the bus.
+ *
+ * This function should only be used by video overlay plugin developers.
+ */
+void
+gst_video_overlay_got_window_handle (GstVideoOverlay * overlay, guintptr handle)
+{
+ GstStructure *s;
+ GstMessage *msg;
+
+ g_return_if_fail (overlay != NULL);
+ g_return_if_fail (GST_IS_VIDEO_OVERLAY (overlay));
+
+ GST_LOG_OBJECT (GST_OBJECT (overlay), "window_handle = %p", (gpointer)
+ handle);
+ s = gst_structure_new ("have-window-handle",
+ "window-handle", G_TYPE_UINT64, (guint64) handle, NULL);
+ msg = gst_message_new_element (GST_OBJECT (overlay), s);
+ gst_element_post_message (GST_ELEMENT (overlay), msg);
+}
+
+/**
+ * gst_video_overlay_prepare_window_handle:
+ * @overlay: a #GstVideoOverlay which does not yet have an Window handle set
+ *
+ * This will post a "prepare-window-handle" element message on the bus
+ * to give applications an opportunity to call
+ * gst_video_overlay_set_window_handle() before a plugin creates its own
+ * window.
+ *
+ * This function should only be used by video overlay plugin developers.
+ */
+void
+gst_video_overlay_prepare_window_handle (GstVideoOverlay * overlay)
+{
+ GstStructure *s;
+ GstMessage *msg;
+
+ g_return_if_fail (overlay != NULL);
+ g_return_if_fail (GST_IS_VIDEO_OVERLAY (overlay));
+
+ GST_LOG_OBJECT (GST_OBJECT (overlay), "prepare window handle");
+ s = gst_structure_new_empty ("prepare-window-handle");
+ msg = gst_message_new_element (GST_OBJECT (overlay), s);
+ gst_element_post_message (GST_ELEMENT (overlay), msg);
+}
+
+/**
+ * gst_video_overlay_expose:
+ * @overlay: a #GstVideoOverlay to expose.
+ *
+ * Tell an overlay that it has been exposed. This will redraw the current frame
+ * in the drawable even if the pipeline is PAUSED.
+ */
+void
+gst_video_overlay_expose (GstVideoOverlay * overlay)
+{
+ GstVideoOverlayInterface *iface;
+
+ g_return_if_fail (overlay != NULL);
+ g_return_if_fail (GST_IS_VIDEO_OVERLAY (overlay));
+
+ iface = GST_VIDEO_OVERLAY_GET_INTERFACE (overlay);
+
+ if (iface->expose) {
+ iface->expose (overlay);
+ }
+}
+
+/**
+ * gst_video_overlay_handle_events:
+ * @overlay: a #GstVideoOverlay to expose.
+ * @handle_events: a #gboolean indicating if events should be handled or not.
+ *
+ * Tell an overlay that it should handle events from the window system. These
+ * events are forwarded upstream as navigation events. In some window system,
+ * events are not propagated in the window hierarchy if a client is listening
+ * for them. This method allows you to disable events handling completely
+ * from the XOverlay.
+ *
+ * Since: 0.10.12
+ */
+void
+gst_video_overlay_handle_events (GstVideoOverlay * overlay,
+ gboolean handle_events)
+{
+ GstVideoOverlayInterface *iface;
+
+ g_return_if_fail (overlay != NULL);
+ g_return_if_fail (GST_IS_VIDEO_OVERLAY (overlay));
+
+ iface = GST_VIDEO_OVERLAY_GET_INTERFACE (overlay);
+
+ if (iface->handle_events) {
+ iface->handle_events (overlay, handle_events);
+ }
+}
+
+/**
+ * gst_video_overlay_set_render_rectangle:
+ * @overlay: a #GstVideoOverlay
+ * @x: the horizontal offset of the render area inside the window
+ * @y: the vertical offset of the render area inside the window
+ * @width: the width of the render area inside the window
+ * @height: the height of the render area inside the window
+ *
+ * Configure a subregion as a video target within the window set by
+ * gst_video_overlay_set_window_handle(). If this is not used or not supported
+ * the video will fill the area of the window set as the overlay to 100%.
+ * By specifying the rectangle, the video can be overlayed to a specific region
+ * of that window only. After setting the new rectangle one should call
+ * gst_video_overlay_expose() to force a redraw. To unset the region pass -1 for
+ * the @width and @height parameters.
+ *
+ * This method is needed for non fullscreen video overlay in UI toolkits that
+ * do not support subwindows.
+ *
+ * Returns: %FALSE if not supported by the sink.
+ *
+ * Since: 0.10.29
+ */
+gboolean
+gst_video_overlay_set_render_rectangle (GstVideoOverlay * overlay,
+ gint x, gint y, gint width, gint height)
+{
+ GstVideoOverlayInterface *iface;
+
+ g_return_val_if_fail (overlay != NULL, FALSE);
+ g_return_val_if_fail (GST_IS_VIDEO_OVERLAY (overlay), FALSE);
+ g_return_val_if_fail ((width == -1 && height == -1) ||
+ (width > 0 && height > 0), FALSE);
+
+ iface = GST_VIDEO_OVERLAY_GET_INTERFACE (overlay);
+
+ if (iface->set_render_rectangle) {
+ iface->set_render_rectangle (overlay, x, y, width, height);
+ return TRUE;
+ }
+ return FALSE;
+}
+
+/**
+ * gst_is_video_overlay_prepare_window_handle_message:
+ * @msg: a #GstMessage
+ *
+ * Convenience function to check if the given message is a
+ * "prepare-window-handle" message from a #GstVideoOverlay.
+ *
+ * Since: 0.11.2
+ *
+ * Returns: whether @msg is a "prepare-window-handle" message
+ */
+gboolean
+gst_is_video_overlay_prepare_window_handle_message (GstMessage * msg)
+{
+ g_return_val_if_fail (msg != NULL, FALSE);
+
+ if (GST_MESSAGE_TYPE (msg) != GST_MESSAGE_ELEMENT)
+ return FALSE;
+
+ return gst_message_has_name (msg, "prepare-window-handle");
+}
}
static gboolean
- _is_raw_video_pad (GstPad * pad)
+ _is_video_pad (GstPad * pad, gboolean * hw_accelerated)
{
- GstPad *peer = gst_pad_get_peer (pad);
- GstCaps *caps;
+ GstCaps *caps = gst_pad_get_current_caps (pad);
- gboolean raw;
+ gboolean ret;
+ const gchar *name;
+
- if (peer) {
- caps = gst_pad_get_negotiated_caps (peer);
- if (!caps) {
- caps = gst_pad_get_caps_reffed (peer);
- }
- gst_object_unref (peer);
- } else {
- caps = gst_pad_get_caps_reffed (pad);
- }
-
-
+ name = gst_structure_get_name (gst_caps_get_structure (caps, 0));
+ if (g_str_has_prefix (name, "video/x-raw-")) {
+ ret = TRUE;
+ if (hw_accelerated)
+ *hw_accelerated = FALSE;
- raw = _is_raw_video (gst_caps_get_structure (caps, 0));
+ } else if (g_str_has_prefix (name, "video/x-surface")) {
+ ret = TRUE;
+ if (hw_accelerated)
+ *hw_accelerated = TRUE;
+ } else {
+
+ ret = FALSE;
+ if (hw_accelerated)
+ *hw_accelerated = FALSE;
+ }
gst_caps_unref (caps);
/* Send segments to the parser/overlay if necessary. These are not sent
* outside this element because of the proxy pad event function */
if (self->video_segment.format != GST_FORMAT_UNDEFINED) {
- GstEvent *event1, *event2;
+ GstEvent *event1;
- sink = gst_element_get_static_pad (self->pre_colorspace, "sink");
- if (G_UNLIKELY (!sink)) {
- GST_WARNING_OBJECT (self, "Can't get sink pad from " COLORSPACE);
- continue;
+ if (is_video) {
+ sink = gst_element_get_static_pad (self->pre_colorspace, "sink");
+ if (G_UNLIKELY (!sink)) {
+ GST_WARNING_OBJECT (self, "Can't get sink pad from " COLORSPACE);
+ continue;
+ }
+ } else {
+ sink = gst_element_get_static_pad (self->overlay, "video_sink");
+ if (G_UNLIKELY (!sink)) {
+ GST_WARNING_OBJECT (self, "Can't get sink pad from textoverlay");
+ continue;
+ }
}
- _generate_update_newsegment_event (&self->video_segment, &event1,
- &event2);
- GST_DEBUG_OBJECT (self,
- "Pushing video accumulate newsegment event: %" GST_PTR_FORMAT,
- event1->structure);
+ _generate_update_segment_event (&self->video_segment, &event1);
GST_DEBUG_OBJECT (self,
- "Pushing video update newsegment event: %" GST_PTR_FORMAT,
- event2->structure);
+ "Pushing video segment event: %" GST_PTR_FORMAT, event1);
gst_pad_send_event (sink, event1);
- gst_pad_send_event (sink, event2);
gst_object_unref (sink);
}
#include "gstplay-marshal.h"
#include "gstplay-enum.h"
#include "gstrawcaps.h"
+#include "gstplayback.h"
+ #include "gst/glib-compat-private.h"
+
#define GST_TYPE_URI_DECODE_BIN \
(gst_uri_decode_bin_get_type())
#define GST_URI_DECODE_BIN(obj) \
base_class->transform_caps =
GST_DEBUG_FUNCPTR (gst_video_rate_transform_caps);
base_class->transform_ip = GST_DEBUG_FUNCPTR (gst_video_rate_transform_ip);
- base_class->prepare_output_buffer =
- GST_DEBUG_FUNCPTR (gst_video_rate_prepare_output_buffer);
- base_class->event = GST_DEBUG_FUNCPTR (gst_video_rate_event);
+ base_class->sink_event = GST_DEBUG_FUNCPTR (gst_video_rate_sink_event);
base_class->start = GST_DEBUG_FUNCPTR (gst_video_rate_start);
+ base_class->stop = GST_DEBUG_FUNCPTR (gst_video_rate_stop);
base_class->fixate_caps = GST_DEBUG_FUNCPTR (gst_video_rate_fixate_caps);
base_class->query = GST_DEBUG_FUNCPTR (gst_video_rate_query);
/* Debugging category */
#include <gst/gstinfo.h>
+ #include "gst/glib-compat-private.h"
+
GST_DEBUG_CATEGORY_EXTERN (gst_debug_ximagesink);
+GST_DEBUG_CATEGORY_EXTERN (GST_CAT_PERFORMANCE);
#define GST_CAT_DEFAULT gst_debug_ximagesink
typedef struct
/* Debugging category */
#include <gst/gstinfo.h>
-GST_DEBUG_CATEGORY_STATIC (gst_debug_xvimagesink);
+ #include "gst/glib-compat-private.h"
+
+GST_DEBUG_CATEGORY_EXTERN (gst_debug_xvimagesink);
+GST_DEBUG_CATEGORY_EXTERN (GST_CAT_PERFORMANCE);
#define GST_CAT_DEFAULT gst_debug_xvimagesink
-GST_DEBUG_CATEGORY_STATIC (GST_CAT_PERFORMANCE);
typedef struct
{
GIO_SUBDIRS = gio
endif
- SUBDIRS = app audio dynamic $(FT2_SUBDIRS) $(GIO_SUBDIRS) overlay playrec encoding
-SUBDIRS = app audio dynamic fft $(FT2_SUBDIRS) $(GIO_SUBDIRS) overlay playrec v4l encoding
--
- DIST_SUBDIRS = app audio dynamic gio overlay seek snapshot playrec encoding
-DIST_SUBDIRS = app audio dynamic fft gio overlay seek snapshot playrec v4l encoding
++SUBDIRS = app audio dynamic fft $(FT2_SUBDIRS) $(GIO_SUBDIRS) overlay playrec encoding
++DIST_SUBDIRS = app audio dynamic fft gio overlay seek snapshot playrec encoding
include $(top_srcdir)/common/parallel-subdirs.mak
--- /dev/null
+/* GStreamer
+ * Copyright (C) <2010> Stefan Kost <ensonic@users.sf.net>
+ *
+ * gtk-videooverlay: demonstrate overlay handling using gtk
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include <glib.h>
+#include <gdk/gdkx.h>
+#include <gtk/gtk.h>
+
+#include <gst/gst.h>
+#include <gst/video/videooverlay.h>
+
+#include <string.h>
+
+static void
+window_closed (GtkWidget * widget, GdkEvent * event, gpointer user_data)
+{
+ GstElement *pipeline = user_data;
+
+ gtk_widget_hide (widget);
+ gst_element_set_state (pipeline, GST_STATE_NULL);
+ gtk_main_quit ();
+}
+
+/* slightly convoluted way to find a working video sink that's not a bin,
+ * one could use autovideosink from gst-plugins-good instead
+ */
+static GstElement *
+find_video_sink (void)
+{
+ GstStateChangeReturn sret;
+ GstElement *sink;
+
+ if ((sink = gst_element_factory_make ("xvimagesink", NULL))) {
+ sret = gst_element_set_state (sink, GST_STATE_READY);
+ if (sret == GST_STATE_CHANGE_SUCCESS)
+ return sink;
+
+ gst_element_set_state (sink, GST_STATE_NULL);
+ }
+ gst_object_unref (sink);
+
+ if ((sink = gst_element_factory_make ("ximagesink", NULL))) {
+ sret = gst_element_set_state (sink, GST_STATE_READY);
+ if (sret == GST_STATE_CHANGE_SUCCESS)
+ return sink;
+
+ gst_element_set_state (sink, GST_STATE_NULL);
+ }
+ gst_object_unref (sink);
+
+ if (strcmp (DEFAULT_VIDEOSINK, "xvimagesink") == 0 ||
+ strcmp (DEFAULT_VIDEOSINK, "ximagesink") == 0)
+ return NULL;
+
+ if ((sink = gst_element_factory_make (DEFAULT_VIDEOSINK, NULL))) {
+ if (GST_IS_BIN (sink)) {
+ gst_object_unref (sink);
+ return NULL;
+ }
+
+ sret = gst_element_set_state (sink, GST_STATE_READY);
+ if (sret == GST_STATE_CHANGE_SUCCESS)
+ return sink;
+
+ gst_element_set_state (sink, GST_STATE_NULL);
+ }
+ gst_object_unref (sink);
+ return NULL;
+}
+
+int
+main (int argc, char **argv)
+{
+ GdkWindow *video_window_xwindow;
+ GtkWidget *window, *video_window;
+ GstElement *pipeline, *src, *sink;
+ gulong embed_xid;
+ GstStateChangeReturn sret;
+
++#if !GLIB_CHECK_VERSION (2, 31, 0)
+ if (!g_thread_supported ())
+ g_thread_init (NULL);
++#endif
+
+ gst_init (&argc, &argv);
+ gtk_init (&argc, &argv);
+
+ /* prepare the pipeline */
+
+ pipeline = gst_pipeline_new ("xvoverlay");
+ src = gst_element_factory_make ("videotestsrc", NULL);
+ sink = find_video_sink ();
+
+ if (sink == NULL)
+ g_error ("Couldn't find a working video sink.");
+
+ gst_bin_add_many (GST_BIN (pipeline), src, sink, NULL);
+ gst_element_link (src, sink);
+
+ /* prepare the ui */
+
+ window = gtk_window_new (GTK_WINDOW_TOPLEVEL);
+ g_signal_connect (G_OBJECT (window), "delete-event",
+ G_CALLBACK (window_closed), (gpointer) pipeline);
+ gtk_window_set_default_size (GTK_WINDOW (window), 320, 240);
+ gtk_window_set_title (GTK_WINDOW (window), "GstXOverlay Gtk+ demo");
+
+ video_window = gtk_drawing_area_new ();
+ gtk_widget_set_double_buffered (video_window, FALSE);
+ gtk_container_add (GTK_CONTAINER (window), video_window);
+ gtk_container_set_border_width (GTK_CONTAINER (window), 16);
+
+ gtk_widget_show_all (window);
+ gtk_widget_realize (window);
+
+ video_window_xwindow = gtk_widget_get_window (video_window);
+ embed_xid = GDK_WINDOW_XID (video_window_xwindow);
+ gst_video_overlay_set_window_handle (GST_VIDEO_OVERLAY (sink), embed_xid);
+
+ /* run the pipeline */
+
+ sret = gst_element_set_state (pipeline, GST_STATE_PLAYING);
+ if (sret == GST_STATE_CHANGE_FAILURE)
+ gst_element_set_state (pipeline, GST_STATE_NULL);
+ else
+ gtk_main ();
+
+ gst_object_unref (pipeline);
+ return 0;
+}
--- /dev/null
+/* GStreamer
+ * Copyright (C) <2010> Stefan Kost <ensonic@users.sf.net>
+ *
+ * qt-xoverlay: demonstrate overlay handling using qt
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include <glib.h>
+#include <gst/gst.h>
+#include <gst/video/videooverlay.h>
+
+#include <QApplication>
+#include <QTimer>
+#include <QWidget>
+
+/* slightly convoluted way to find a working video sink that's not a bin,
+ * one could use autovideosink from gst-plugins-good instead
+ */
+static GstElement *
+find_video_sink (void)
+{
+ GstStateChangeReturn sret;
+ GstElement *sink;
+
+ if ((sink = gst_element_factory_make ("xvimagesink", NULL))) {
+ sret = gst_element_set_state (sink, GST_STATE_READY);
+ if (sret == GST_STATE_CHANGE_SUCCESS)
+ return sink;
+
+ gst_element_set_state (sink, GST_STATE_NULL);
+ }
+ gst_object_unref (sink);
+
+ if ((sink = gst_element_factory_make ("ximagesink", NULL))) {
+ sret = gst_element_set_state (sink, GST_STATE_READY);
+ if (sret == GST_STATE_CHANGE_SUCCESS)
+ return sink;
+
+ gst_element_set_state (sink, GST_STATE_NULL);
+ }
+ gst_object_unref (sink);
+
+ if (strcmp (DEFAULT_VIDEOSINK, "xvimagesink") == 0 ||
+ strcmp (DEFAULT_VIDEOSINK, "ximagesink") == 0)
+ return NULL;
+
+ if ((sink = gst_element_factory_make (DEFAULT_VIDEOSINK, NULL))) {
+ if (GST_IS_BIN (sink)) {
+ gst_object_unref (sink);
+ return NULL;
+ }
+
+ sret = gst_element_set_state (sink, GST_STATE_READY);
+ if (sret == GST_STATE_CHANGE_SUCCESS)
+ return sink;
+
+ gst_element_set_state (sink, GST_STATE_NULL);
+ }
+ gst_object_unref (sink);
+ return NULL;
+}
+
+int main(int argc, char *argv[])
+{
++#if !GLIB_CHECK_VERSION (2, 31, 0)
+ if (!g_thread_supported ())
+ g_thread_init (NULL);
++#endif
+
+ gst_init (&argc, &argv);
+ QApplication app(argc, argv);
+ app.connect(&app, SIGNAL(lastWindowClosed()), &app, SLOT(quit ()));
+
+ /* prepare the pipeline */
+
+ GstElement *pipeline = gst_pipeline_new ("xvoverlay");
+ GstElement *src = gst_element_factory_make ("videotestsrc", NULL);
+ GstElement *sink = find_video_sink ();
+
+ if (sink == NULL)
+ g_error ("Couldn't find a working video sink.");
+
+ gst_bin_add_many (GST_BIN (pipeline), src, sink, NULL);
+ gst_element_link (src, sink);
+
+ /* prepare the ui */
+
+ QWidget window;
+ window.resize(320, 240);
+ window.setWindowTitle("GstXOverlay Qt demo");
+ window.show();
+
+ WId xwinid = window.winId();
+ gst_video_overlay_set_window_handle (GST_VIDEO_OVERLAY (sink), xwinid);
+
+ /* run the pipeline */
+
+ GstStateChangeReturn sret = gst_element_set_state (pipeline,
+ GST_STATE_PLAYING);
+ if (sret == GST_STATE_CHANGE_FAILURE) {
+ gst_element_set_state (pipeline, GST_STATE_NULL);
+ gst_object_unref (pipeline);
+ /* Exit application */
+ QTimer::singleShot(0, QApplication::activeWindow(), SLOT(quit()));
+ }
+
+ int ret = app.exec();
+
+ window.hide();
+ gst_element_set_state (pipeline, GST_STATE_NULL);
+ gst_object_unref (pipeline);
+
+ return ret;
+}
--- /dev/null
+/* GStreamer
+ * Copyright (C) <2008> Stefan Kost <ensonic@users.sf.net>
+ *
+ * test-videooverlay: test videooverlay custom event handling and subregions
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include <stdlib.h>
+#include <string.h>
+#include <math.h>
+
+#include <glib.h>
+#include <gdk/gdkx.h>
+#include <gtk/gtk.h>
+
+#include <gst/gst.h>
+#include <gst/video/videooverlay.h>
+#include <gst/video/gstvideosink.h>
+
+static struct
+{
+ gint w, h;
+ GstVideoOverlay *overlay;
+ GtkWidget *widget;
+ gdouble a, p;
+ GstVideoRectangle rect;
+ gboolean running;
+} anim_state;
+
+static gboolean verbose = FALSE;
+
+static gboolean
+animate_render_rect (gpointer user_data)
+{
+ if (anim_state.running) {
+ GstVideoRectangle *r = &anim_state.rect;
+ gdouble s = sin (3.0 * anim_state.a);
+ gdouble c = cos (2.0 * anim_state.a);
+
+ anim_state.a += anim_state.p;
+ if (anim_state.a > (G_PI + G_PI))
+ anim_state.a -= (G_PI + G_PI);
+
+ r->w = anim_state.w / 2;
+ r->x = (r->w - (r->w / 2)) + c * (r->w / 2);
+ r->h = anim_state.h / 2;
+ r->y = (r->h - (r->h / 2)) + s * (r->h / 2);
+
+ gst_video_overlay_set_render_rectangle (anim_state.overlay, r->x, r->y,
+ r->w, r->h);
+ gtk_widget_queue_draw (anim_state.widget);
+ }
+ return TRUE;
+}
+
+static gboolean
+handle_resize_cb (GtkWidget * widget, GdkEventConfigure * event,
+ gpointer user_data)
+{
+ GtkAllocation allocation;
+
+ gtk_widget_get_allocation (widget, &allocation);
+
+ if (verbose) {
+ g_print ("resize(%p): %dx%d\n", widget, allocation.width,
+ allocation.height);
+ }
+ anim_state.w = allocation.width;
+ anim_state.h = allocation.height;
+ animate_render_rect (NULL);
+
+ return FALSE;
+}
+
+static gboolean
+handle_draw_cb (GtkWidget * widget, cairo_t * cr, gpointer user_data)
+{
+ GstVideoRectangle *r = &anim_state.rect;
+ GtkStyle *style;
+ int width, height;
+
+ width = gtk_widget_get_allocated_width (widget);
+ height = gtk_widget_get_allocated_height (widget);
+
+ style = gtk_widget_get_style (widget);
+
+ gdk_cairo_set_source_color (cr, &style->bg[GTK_STATE_NORMAL]);
+
+ /* we should only redraw outside of the video rect! */
+ cairo_rectangle (cr, 0, 0, r->x, height);
+ cairo_rectangle (cr, r->x + r->w, 0, width - (r->x + r->w), height);
+
+ cairo_rectangle (cr, 0, 0, width, r->y);
+ cairo_rectangle (cr, 0, r->y + r->h, width, height - (r->y + r->h));
+
+ cairo_fill (cr);
+
+ if (verbose) {
+ g_print ("draw(%p)\n", widget);
+ }
+ gst_video_overlay_expose (anim_state.overlay);
+ return FALSE;
+}
+
+static void
+window_closed (GtkWidget * widget, GdkEvent * event, gpointer user_data)
+{
+ GstElement *pipeline = user_data;
+
+ if (verbose) {
+ g_print ("stopping\n");
+ }
+ anim_state.running = FALSE;
+ gtk_widget_hide (widget);
+ gst_element_set_state (pipeline, GST_STATE_NULL);
+ gtk_main_quit ();
+}
+
+gint
+main (gint argc, gchar ** argv)
+{
+ GdkWindow *video_window_xwindow;
+ GtkWidget *window, *video_window;
+ GstElement *pipeline, *src, *sink;
+ GstStateChangeReturn sret;
+ gulong embed_xid = 0;
+ gboolean force_aspect = FALSE, draw_borders = FALSE;
+
++#if !GLIB_CHECK_VERSION (2, 31, 0)
+ if (!g_thread_supported ())
+ g_thread_init (NULL);
++#endif
+
+ gst_init (&argc, &argv);
+ gtk_init (&argc, &argv);
+
+ if (argc) {
+ gint arg;
+ for (arg = 0; arg < argc; arg++) {
+ if (!strcmp (argv[arg], "-a"))
+ force_aspect = TRUE;
+ else if (!strcmp (argv[arg], "-b"))
+ draw_borders = TRUE;
+ else if (!strcmp (argv[arg], "-v"))
+ verbose = TRUE;
+ }
+ }
+
+ /* prepare the pipeline */
+
+ pipeline = gst_pipeline_new ("xvoverlay");
+ src = gst_element_factory_make ("videotestsrc", NULL);
+ sink = gst_element_factory_make ("xvimagesink", NULL);
+ gst_bin_add_many (GST_BIN (pipeline), src, sink, NULL);
+ gst_element_link (src, sink);
+
+ g_object_set (G_OBJECT (sink), "handle-events", FALSE,
+ "force-aspect-ratio", force_aspect, "draw-borders", draw_borders, NULL);
+
+ /* prepare the ui */
+
+ window = gtk_window_new (GTK_WINDOW_TOPLEVEL);
+ g_signal_connect (G_OBJECT (window), "delete-event",
+ G_CALLBACK (window_closed), (gpointer) pipeline);
+ gtk_window_set_default_size (GTK_WINDOW (window), 320, 240);
+
+ video_window = gtk_drawing_area_new ();
+ gtk_widget_set_double_buffered (video_window, FALSE);
+ gtk_container_add (GTK_CONTAINER (window), video_window);
+
+ /* show the gui and play */
+ gtk_widget_show_all (window);
+
+ /* realize window now so that the video window gets created and we can
+ * obtain its XID before the pipeline is started up and the videosink
+ * asks for the XID of the window to render onto */
+ gtk_widget_realize (window);
+
+ video_window_xwindow = gtk_widget_get_window (video_window);
+ embed_xid = GDK_WINDOW_XID (video_window_xwindow);
+ if (verbose) {
+ g_print ("Window realize: got XID %lu\n", embed_xid);
+ }
+
+ /* we know what the video sink is in this case (xvimagesink), so we can
+ * just set it directly here now (instead of waiting for a
+ * prepare-window-handle element message in a sync bus handler and setting
+ * it there) */
+ gst_video_overlay_set_window_handle (GST_VIDEO_OVERLAY (sink), embed_xid);
+
+ anim_state.overlay = GST_VIDEO_OVERLAY (sink);
+ anim_state.widget = video_window;
+ anim_state.w = 320;
+ anim_state.h = 240;
+ anim_state.a = 0.0;
+ anim_state.p = (G_PI + G_PI) / 200.0;
+
+ handle_resize_cb (video_window, NULL, sink);
+ g_signal_connect (video_window, "configure-event",
+ G_CALLBACK (handle_resize_cb), NULL);
+ g_signal_connect (video_window, "draw", G_CALLBACK (handle_draw_cb), NULL);
+
+ g_timeout_add (50, (GSourceFunc) animate_render_rect, NULL);
+
+ /* run the pipeline */
+ sret = gst_element_set_state (pipeline, GST_STATE_PLAYING);
+ if (sret == GST_STATE_CHANGE_FAILURE)
+ gst_element_set_state (pipeline, GST_STATE_NULL);
+ else {
+ anim_state.running = TRUE;
+ gtk_main ();
+ }
+
+ gst_object_unref (pipeline);
+ return 0;
+}