--- /dev/null
- dependencies : [gst_dep, gstbase_dep, gstaudio_dep, gstpbutils_dep, libpulse_dep],
+ pulse_sources = [
+ 'gstpulseelement.c',
+ 'plugin.c',
+ 'pulsesink.c',
+ 'pulsesrc.c',
+ 'pulsedeviceprovider.c',
+ 'pulseutil.c',
+ ]
+
+ libpulse_dep = dependency('libpulse', version : '>=2.0', required : get_option('pulse'))
+
+ if libpulse_dep.found()
+ gstpulse = library('gstpulseaudio',
+ pulse_sources,
+ c_args : gst_plugins_good_args,
+ include_directories : [configinc, libsinc],
++ dependencies : [gst_dep, gstbase_dep, gstaudio_dep, gstpbutils_dep, libpulse_dep, gio_dep],
+ install : true,
+ install_dir : plugins_install_dir,
+ )
+ pkgconfig.generate(gstpulse, install_dir : plugins_pkgconfig_install_dir)
+ plugins += [gstpulse]
+ endif
--- /dev/null
+ /*-*- Mode: C; c-basic-offset: 2 -*-*/
+
+ /* GStreamer pulseaudio plugin
+ *
+ * Copyright (c) 2004-2008 Lennart Poettering
+ * (c) 2009 Wim Taymans
+ *
+ * gst-pulse is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU Lesser General Public License as
+ * published by the Free Software Foundation; either version 2.1 of the
+ * License, or (at your option) any later version.
+ *
+ * gst-pulse is distributed in the hope that it will be useful, but
+ * WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with gst-pulse; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301
+ * USA.
+ */
+
+ /**
+ * SECTION:element-pulsesink
+ * @title: pulsesink
+ * @see_also: pulsesrc
+ *
+ * This element outputs audio to a
+ * [PulseAudio sound server](http://www.pulseaudio.org).
+ *
+ * ## Example pipelines
+ * |[
+ * gst-launch-1.0 -v filesrc location=sine.ogg ! oggdemux ! vorbisdec ! audioconvert ! audioresample ! pulsesink
+ * ]| Play an Ogg/Vorbis file.
+ * |[
+ * gst-launch-1.0 -v audiotestsrc ! audioconvert ! volume volume=0.4 ! pulsesink
+ * ]| Play a 440Hz sine wave.
+ * |[
+ * gst-launch-1.0 -v audiotestsrc ! pulsesink stream-properties="props,media.title=test"
+ * ]| Play a sine wave and set a stream property. The property can be checked
+ * with "pactl list".
+ *
+ */
+
+ #ifdef HAVE_CONFIG_H
+ #include "config.h"
+ #endif
+
+ #include <string.h>
+ #include <stdio.h>
+
+ #include <gst/base/gstbasesink.h>
+ #include <gst/gsttaglist.h>
+ #include <gst/audio/audio.h>
+ #include <gst/gst-i18n-plugin.h>
+
+ #include <gst/pbutils/pbutils.h> /* only used for GST_PLUGINS_BASE_VERSION_* */
+
+ #include <gst/glib-compat-private.h>
+
+ #include "gstpulseelements.h"
++#if defined(__TIZEN__) && defined(PCM_DUMP_ENABLE)
++#include <vconf.h>
++#endif /* __TIZEN__ && PCM_DUMP_ENABLE */
+ #include "pulsesink.h"
+ #include "pulseutil.h"
+
+ GST_DEBUG_CATEGORY_EXTERN (pulse_debug);
+ #define GST_CAT_DEFAULT pulse_debug
+
+ #define DEFAULT_SERVER NULL
+ #define DEFAULT_DEVICE NULL
+ #define DEFAULT_CURRENT_DEVICE NULL
+ #define DEFAULT_DEVICE_NAME NULL
+ #define DEFAULT_VOLUME 1.0
+ #define DEFAULT_MUTE FALSE
+ #define MAX_VOLUME 10.0
++#ifdef __TIZEN__
++#define DEFAULT_AUDIO_LATENCY "mid"
++#define DEFAULT_AUTO_RENDER_DELAY FALSE
++#endif /* __TIZEN__ */
+
+ enum
+ {
+ PROP_0,
+ PROP_SERVER,
+ PROP_DEVICE,
+ PROP_CURRENT_DEVICE,
+ PROP_DEVICE_NAME,
+ PROP_VOLUME,
+ PROP_MUTE,
+ PROP_CLIENT_NAME,
+ PROP_STREAM_PROPERTIES,
++#ifdef __TIZEN__
++ PROP_AUDIO_LATENCY,
++ PROP_AUTO_RENDER_DELAY,
++#endif /* __TIZEN__ */
+ PROP_LAST
+ };
+
++#if defined(__TIZEN__) && defined(PCM_DUMP_ENABLE)
++#define GST_PULSESINK_DUMP_VCONF_KEY "memory/private/sound/pcm_dump"
++#define GST_PULSESINK_DUMP_INPUT_PATH_PREFIX "/tmp/dump_pulsesink_in_"
++#define GST_PULSESINK_DUMP_OUTPUT_PATH_PREFIX "/tmp/dump_pulsesink_out_"
++#define GST_PULSESINK_DUMP_INPUT_FLAG 0x00000400
++#define GST_PULSESINK_DUMP_OUTPUT_FLAG 0x00000800
++#endif /* __TIZEN__ && PCM_DUMP_ENABLE */
++
+ #define GST_TYPE_PULSERING_BUFFER \
+ (gst_pulseringbuffer_get_type())
+ #define GST_PULSERING_BUFFER(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_PULSERING_BUFFER,GstPulseRingBuffer))
+ #define GST_PULSERING_BUFFER_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_PULSERING_BUFFER,GstPulseRingBufferClass))
+ #define GST_PULSERING_BUFFER_GET_CLASS(obj) \
+ (G_TYPE_INSTANCE_GET_CLASS ((obj), GST_TYPE_PULSERING_BUFFER, GstPulseRingBufferClass))
+ #define GST_PULSERING_BUFFER_CAST(obj) \
+ ((GstPulseRingBuffer *)obj)
+ #define GST_IS_PULSERING_BUFFER(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_PULSERING_BUFFER))
+ #define GST_IS_PULSERING_BUFFER_CLASS(klass)\
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_PULSERING_BUFFER))
+
+ typedef struct _GstPulseRingBuffer GstPulseRingBuffer;
+ typedef struct _GstPulseRingBufferClass GstPulseRingBufferClass;
+
+ typedef struct _GstPulseContext GstPulseContext;
+
+ /* A note on threading.
+ *
+ * We use a pa_threaded_mainloop to interact with the PulseAudio server. This
+ * starts up a separate thread that runs a mainloop to carry back events,
+ * messages and timing updates from the PulseAudio server.
+ *
+ * In most cases, the PulseAudio API we use communicates with the server and
+ * processes replies asynchronously. Operations on PA objects that result in
+ * such communication are protected with a pa_threaded_mainloop_lock() and
+ * pa_threaded_mainloop_unlock(). These guarantee mutual exclusion with the
+ * mainloop thread -- when an iteration of the mainloop thread begins, it first
+ * tries to acquire this lock, and cannot do so if our code also holds that
+ * lock.
+ *
+ * When we need to complete an operation synchronously, we use
+ * pa_threaded_mainloop_wait() and pa_threaded_mainloop_signal(). These work
+ * much as pthread conditionals do. pa_threaded_mainloop_wait() is called with
+ * the mainloop lock held. It releases the lock (thereby allowing the mainloop
+ * to execute), and waits till one of our callbacks to be executed by the
+ * mainloop thread calls pa_threaded_mainloop_signal(). At the end of the
+ * mainloop iteration, the pa_threaded_mainloop_wait() will reacquire the
+ * mainloop lock and return control to the caller.
+ */
+
+ /* Store the PA contexts in a hash table to allow easy sharing among
+ * multiple instances of the sink. Keys are $context_name@$server_name
+ * (strings) and values should be GstPulseContext pointers.
+ */
+ struct _GstPulseContext
+ {
+ pa_context *context;
+ GSList *ring_buffers;
+ };
+
+ static GHashTable *gst_pulse_shared_contexts = NULL;
+
+ /* use one static main-loop for all instances
+ * this is needed to make the context sharing work as the contexts are
+ * released when releasing their parent main-loop
+ */
+ static pa_threaded_mainloop *mainloop = NULL;
+ static guint mainloop_ref_ct = 0;
+
+ /* lock for access to shared resources */
+ static GMutex pa_shared_resource_mutex;
+
+ /* We keep a custom ringbuffer that is backed up by data allocated by
+ * pulseaudio. We must also override the commit function to write into
+ * pulseaudio memory instead. */
+ struct _GstPulseRingBuffer
+ {
+ GstAudioRingBuffer object;
+
+ gchar *context_name;
+ gchar *stream_name;
+
+ pa_context *context;
+ pa_stream *stream;
+ pa_stream *probe_stream;
+
+ pa_format_info *format;
+ guint channels;
+ gboolean is_pcm;
+
+ void *m_data;
+ size_t m_towrite;
+ size_t m_writable;
+ gint64 m_offset;
+ gint64 m_lastoffset;
+
+ gboolean corked:1;
+ gboolean in_commit:1;
+ gboolean paused:1;
+ };
+ struct _GstPulseRingBufferClass
+ {
+ GstAudioRingBufferClass parent_class;
+ };
+
+ static GType gst_pulseringbuffer_get_type (void);
+ static void gst_pulseringbuffer_finalize (GObject * object);
+
+ static GstAudioRingBufferClass *ring_parent_class = NULL;
+
+ static gboolean gst_pulseringbuffer_open_device (GstAudioRingBuffer * buf);
+ static gboolean gst_pulseringbuffer_close_device (GstAudioRingBuffer * buf);
+ static gboolean gst_pulseringbuffer_acquire (GstAudioRingBuffer * buf,
+ GstAudioRingBufferSpec * spec);
+ static gboolean gst_pulseringbuffer_release (GstAudioRingBuffer * buf);
+ static gboolean gst_pulseringbuffer_start (GstAudioRingBuffer * buf);
+ static gboolean gst_pulseringbuffer_pause (GstAudioRingBuffer * buf);
+ static gboolean gst_pulseringbuffer_stop (GstAudioRingBuffer * buf);
+ static void gst_pulseringbuffer_clear (GstAudioRingBuffer * buf);
+ static guint gst_pulseringbuffer_commit (GstAudioRingBuffer * buf,
+ guint64 * sample, guchar * data, gint in_samples, gint out_samples,
+ gint * accum);
++#ifdef __TIZEN__
++static gboolean gst_pulsering_set_corked (GstPulseRingBuffer * pbuf, gboolean corked,
++ gboolean wait);
++#endif
+
+ G_DEFINE_TYPE (GstPulseRingBuffer, gst_pulseringbuffer,
+ GST_TYPE_AUDIO_RING_BUFFER);
+
+ static void
+ gst_pulsesink_init_contexts (void)
+ {
+ g_mutex_init (&pa_shared_resource_mutex);
+ gst_pulse_shared_contexts = g_hash_table_new_full (g_str_hash, g_str_equal,
+ g_free, NULL);
+ }
+
+ static void
+ gst_pulseringbuffer_class_init (GstPulseRingBufferClass * klass)
+ {
+ GObjectClass *gobject_class;
+ GstAudioRingBufferClass *gstringbuffer_class;
+
+ gobject_class = (GObjectClass *) klass;
+ gstringbuffer_class = (GstAudioRingBufferClass *) klass;
+
+ ring_parent_class = g_type_class_peek_parent (klass);
+
+ gobject_class->finalize = gst_pulseringbuffer_finalize;
+
+ gstringbuffer_class->open_device =
+ GST_DEBUG_FUNCPTR (gst_pulseringbuffer_open_device);
+ gstringbuffer_class->close_device =
+ GST_DEBUG_FUNCPTR (gst_pulseringbuffer_close_device);
+ gstringbuffer_class->acquire =
+ GST_DEBUG_FUNCPTR (gst_pulseringbuffer_acquire);
+ gstringbuffer_class->release =
+ GST_DEBUG_FUNCPTR (gst_pulseringbuffer_release);
+ gstringbuffer_class->start = GST_DEBUG_FUNCPTR (gst_pulseringbuffer_start);
+ gstringbuffer_class->pause = GST_DEBUG_FUNCPTR (gst_pulseringbuffer_pause);
+ gstringbuffer_class->resume = GST_DEBUG_FUNCPTR (gst_pulseringbuffer_start);
+ gstringbuffer_class->stop = GST_DEBUG_FUNCPTR (gst_pulseringbuffer_stop);
+ gstringbuffer_class->clear_all =
+ GST_DEBUG_FUNCPTR (gst_pulseringbuffer_clear);
+
+ gstringbuffer_class->commit = GST_DEBUG_FUNCPTR (gst_pulseringbuffer_commit);
+ }
+
+ static void
+ gst_pulseringbuffer_init (GstPulseRingBuffer * pbuf)
+ {
+ pbuf->stream_name = NULL;
+ pbuf->context = NULL;
+ pbuf->stream = NULL;
+ pbuf->probe_stream = NULL;
+
+ pbuf->format = NULL;
+ pbuf->channels = 0;
+ pbuf->is_pcm = FALSE;
+
+ pbuf->m_data = NULL;
+ pbuf->m_towrite = 0;
+ pbuf->m_writable = 0;
+ pbuf->m_offset = 0;
+ pbuf->m_lastoffset = 0;
+
+ pbuf->corked = TRUE;
+ pbuf->in_commit = FALSE;
+ pbuf->paused = FALSE;
+ }
+
+ /* Call with mainloop lock held if wait == TRUE) */
+ static void
+ gst_pulse_destroy_stream (pa_stream * stream, gboolean wait)
+ {
+ /* Make sure we don't get any further callbacks */
+ pa_stream_set_write_callback (stream, NULL, NULL);
+ pa_stream_set_underflow_callback (stream, NULL, NULL);
+ pa_stream_set_overflow_callback (stream, NULL, NULL);
+
+ pa_stream_disconnect (stream);
+
+ if (wait)
+ pa_threaded_mainloop_wait (mainloop);
+
+ pa_stream_set_state_callback (stream, NULL, NULL);
+ pa_stream_unref (stream);
+ }
+
+ static void
+ gst_pulsering_destroy_stream (GstPulseRingBuffer * pbuf)
+ {
+ if (pbuf->probe_stream) {
+ gst_pulse_destroy_stream (pbuf->probe_stream, FALSE);
+ pbuf->probe_stream = NULL;
+ }
+
+ if (pbuf->stream) {
+
+ if (pbuf->m_data) {
+ /* drop shm memory buffer */
+ pa_stream_cancel_write (pbuf->stream);
+
+ /* reset internal variables */
+ pbuf->m_data = NULL;
+ pbuf->m_towrite = 0;
+ pbuf->m_writable = 0;
+ pbuf->m_offset = 0;
+ pbuf->m_lastoffset = 0;
+ }
+ if (pbuf->format) {
+ pa_format_info_free (pbuf->format);
+ pbuf->format = NULL;
+ pbuf->channels = 0;
+ pbuf->is_pcm = FALSE;
+ }
+
+ pa_stream_disconnect (pbuf->stream);
+
+ /* Make sure we don't get any further callbacks */
+ pa_stream_set_state_callback (pbuf->stream, NULL, NULL);
+ pa_stream_set_write_callback (pbuf->stream, NULL, NULL);
+ pa_stream_set_underflow_callback (pbuf->stream, NULL, NULL);
+ pa_stream_set_overflow_callback (pbuf->stream, NULL, NULL);
++#ifdef __TIZEN__
++ pa_stream_set_latency_update_callback (pbuf->stream, NULL, NULL);
++ pa_stream_set_suspended_callback (pbuf->stream, NULL, NULL);
++ pa_stream_set_started_callback (pbuf->stream, NULL, NULL);
++ pa_stream_set_event_callback (pbuf->stream, NULL, NULL);
++#endif
+
+ pa_stream_unref (pbuf->stream);
+ pbuf->stream = NULL;
+ }
+
+ g_free (pbuf->stream_name);
+ pbuf->stream_name = NULL;
+ }
+
+ static void
+ gst_pulsering_destroy_context (GstPulseRingBuffer * pbuf)
+ {
+ g_mutex_lock (&pa_shared_resource_mutex);
+
+ GST_DEBUG_OBJECT (pbuf, "destroying ringbuffer %p", pbuf);
+
+ gst_pulsering_destroy_stream (pbuf);
+
+ if (pbuf->context) {
+ pa_context_unref (pbuf->context);
+ pbuf->context = NULL;
+ }
+
+ if (pbuf->context_name) {
+ GstPulseContext *pctx;
+
+ pctx = g_hash_table_lookup (gst_pulse_shared_contexts, pbuf->context_name);
+
+ GST_DEBUG_OBJECT (pbuf, "releasing context with name %s, pbuf=%p, pctx=%p",
+ pbuf->context_name, pbuf, pctx);
+
+ if (pctx) {
+ pctx->ring_buffers = g_slist_remove (pctx->ring_buffers, pbuf);
+ if (pctx->ring_buffers == NULL) {
+ GST_DEBUG_OBJECT (pbuf,
+ "destroying final context with name %s, pbuf=%p, pctx=%p",
+ pbuf->context_name, pbuf, pctx);
+
+ pa_context_disconnect (pctx->context);
+
+ /* Make sure we don't get any further callbacks */
+ pa_context_set_state_callback (pctx->context, NULL, NULL);
+ pa_context_set_subscribe_callback (pctx->context, NULL, NULL);
+
+ g_hash_table_remove (gst_pulse_shared_contexts, pbuf->context_name);
+
+ pa_context_unref (pctx->context);
+ g_slice_free (GstPulseContext, pctx);
+ }
+ }
+ g_free (pbuf->context_name);
+ pbuf->context_name = NULL;
+ }
+ g_mutex_unlock (&pa_shared_resource_mutex);
+ }
+
+ static void
+ gst_pulseringbuffer_finalize (GObject * object)
+ {
+ GstPulseRingBuffer *ringbuffer;
+
+ ringbuffer = GST_PULSERING_BUFFER_CAST (object);
+
+ gst_pulsering_destroy_context (ringbuffer);
+ G_OBJECT_CLASS (ring_parent_class)->finalize (object);
+ }
+
+
+ #define CONTEXT_OK(c) ((c) && PA_CONTEXT_IS_GOOD (pa_context_get_state ((c))))
+ #define STREAM_OK(s) ((s) && PA_STREAM_IS_GOOD (pa_stream_get_state ((s))))
+
+ static gboolean
+ gst_pulsering_is_dead (GstPulseSink * psink, GstPulseRingBuffer * pbuf,
+ gboolean check_stream)
+ {
+ if (!CONTEXT_OK (pbuf->context))
+ goto error;
+
+ if (check_stream && !STREAM_OK (pbuf->stream))
+ goto error;
+
+ return FALSE;
+
+ error:
+ {
+ const gchar *err_str =
+ pbuf->context ? pa_strerror (pa_context_errno (pbuf->context)) : NULL;
+ GST_ELEMENT_ERROR (psink, RESOURCE, FAILED, ("Disconnected: %s",
+ err_str), (NULL));
+ return TRUE;
+ }
+ }
+
+ static void
+ gst_pulsering_context_state_cb (pa_context * c, void *userdata)
+ {
+ pa_context_state_t state;
+ pa_threaded_mainloop *mainloop = (pa_threaded_mainloop *) userdata;
+
+ state = pa_context_get_state (c);
+
+ GST_LOG ("got new context state %d", state);
+
+ switch (state) {
+ case PA_CONTEXT_READY:
+ case PA_CONTEXT_TERMINATED:
+ case PA_CONTEXT_FAILED:
+ GST_LOG ("signaling");
+ pa_threaded_mainloop_signal (mainloop, 0);
+ break;
+
+ case PA_CONTEXT_UNCONNECTED:
+ case PA_CONTEXT_CONNECTING:
+ case PA_CONTEXT_AUTHORIZING:
+ case PA_CONTEXT_SETTING_NAME:
+ break;
+ }
+ }
+
+ static void
+ gst_pulsering_context_subscribe_cb (pa_context * c,
+ pa_subscription_event_type_t t, uint32_t idx, void *userdata)
+ {
+ GstPulseSink *psink;
+ GstPulseContext *pctx = (GstPulseContext *) userdata;
+ GSList *walk;
+
+ if (t != (PA_SUBSCRIPTION_EVENT_SINK_INPUT | PA_SUBSCRIPTION_EVENT_CHANGE) &&
+ t != (PA_SUBSCRIPTION_EVENT_SINK_INPUT | PA_SUBSCRIPTION_EVENT_NEW))
+ return;
+
+ for (walk = pctx->ring_buffers; walk; walk = g_slist_next (walk)) {
+ GstPulseRingBuffer *pbuf = (GstPulseRingBuffer *) walk->data;
+ psink = GST_PULSESINK_CAST (GST_OBJECT_PARENT (pbuf));
+
+ GST_LOG_OBJECT (psink, "type %04x, idx %u", t, idx);
+
+ if (!pbuf->stream)
+ continue;
+
+ if (idx != pa_stream_get_index (pbuf->stream))
+ continue;
+
+ if (psink->device && pbuf->is_pcm &&
+ !g_str_equal (psink->device,
+ pa_stream_get_device_name (pbuf->stream))) {
+ /* Underlying sink changed. And this is not a passthrough stream. Let's
+ * see if someone upstream wants to try to renegotiate. */
+ GstEvent *renego;
+
+ g_free (psink->device);
+ psink->device = g_strdup (pa_stream_get_device_name (pbuf->stream));
+
+ GST_INFO_OBJECT (psink, "emitting sink-changed");
+
+ /* FIXME: send reconfigure event instead and let decodebin/playbin
+ * handle that. Also take care of ac3 alignment. See "pulse-format-lost" */
+ renego = gst_event_new_custom (GST_EVENT_CUSTOM_UPSTREAM,
+ gst_structure_new_empty ("pulse-sink-changed"));
+
+ if (!gst_pad_push_event (GST_BASE_SINK (psink)->sinkpad, renego))
+ GST_DEBUG_OBJECT (psink, "Emitted sink-changed - nobody was listening");
+ }
+
+ /* Actually this event is also triggered when other properties of
+ * the stream change that are unrelated to the volume. However it is
+ * probably cheaper to signal the change here and check for the
+ * volume when the GObject property is read instead of querying it always. */
+
+ /* inform streaming thread to notify */
+ g_atomic_int_compare_and_exchange (&psink->notify, 0, 1);
+ }
+ }
+
+ /* will be called when the device should be opened. In this case we will connect
+ * to the server. We should not try to open any streams in this state. */
+ static gboolean
+ gst_pulseringbuffer_open_device (GstAudioRingBuffer * buf)
+ {
+ GstPulseSink *psink;
+ GstPulseRingBuffer *pbuf;
+ GstPulseContext *pctx;
+ pa_mainloop_api *api;
+ gboolean need_unlock_shared;
+
+ psink = GST_PULSESINK_CAST (GST_OBJECT_PARENT (buf));
+ pbuf = GST_PULSERING_BUFFER_CAST (buf);
+
+ g_assert (!pbuf->stream);
+ g_assert (psink->client_name);
+
+ if (psink->server)
+ pbuf->context_name = g_strdup_printf ("%s@%s", psink->client_name,
+ psink->server);
+ else
+ pbuf->context_name = g_strdup (psink->client_name);
+
+ pa_threaded_mainloop_lock (mainloop);
+
+ g_mutex_lock (&pa_shared_resource_mutex);
+ need_unlock_shared = TRUE;
+
+ pctx = g_hash_table_lookup (gst_pulse_shared_contexts, pbuf->context_name);
+ if (pctx == NULL) {
+ pctx = g_slice_new0 (GstPulseContext);
+
+ /* get the mainloop api and create a context */
+ GST_INFO_OBJECT (psink, "new context with name %s, pbuf=%p, pctx=%p",
+ pbuf->context_name, pbuf, pctx);
+ api = pa_threaded_mainloop_get_api (mainloop);
+ if (!(pctx->context = pa_context_new (api, pbuf->context_name)))
+ goto create_failed;
+
+ pctx->ring_buffers = g_slist_prepend (pctx->ring_buffers, pbuf);
+ g_hash_table_insert (gst_pulse_shared_contexts,
+ g_strdup (pbuf->context_name), (gpointer) pctx);
+ /* register some essential callbacks */
+ pa_context_set_state_callback (pctx->context,
+ gst_pulsering_context_state_cb, mainloop);
+ pa_context_set_subscribe_callback (pctx->context,
+ gst_pulsering_context_subscribe_cb, pctx);
+
+ /* try to connect to the server and wait for completion, we don't want to
+ * autospawn a daemon */
+ GST_LOG_OBJECT (psink, "connect to server %s",
+ GST_STR_NULL (psink->server));
+ if (pa_context_connect (pctx->context, psink->server,
+ PA_CONTEXT_NOAUTOSPAWN, NULL) < 0)
+ goto connect_failed;
+ } else {
+ GST_INFO_OBJECT (psink,
+ "reusing shared context with name %s, pbuf=%p, pctx=%p",
+ pbuf->context_name, pbuf, pctx);
+ pctx->ring_buffers = g_slist_prepend (pctx->ring_buffers, pbuf);
+ }
+
+ g_mutex_unlock (&pa_shared_resource_mutex);
+ need_unlock_shared = FALSE;
+
+ /* context created or shared okay */
+ pbuf->context = pa_context_ref (pctx->context);
+
+ for (;;) {
+ pa_context_state_t state;
+
+ state = pa_context_get_state (pbuf->context);
+
+ GST_LOG_OBJECT (psink, "context state is now %d", state);
+
+ if (!PA_CONTEXT_IS_GOOD (state))
+ goto connect_failed;
+
+ if (state == PA_CONTEXT_READY)
+ break;
+
+ /* Wait until the context is ready */
+ GST_LOG_OBJECT (psink, "waiting..");
+ pa_threaded_mainloop_wait (mainloop);
+ }
+
+ if (pa_context_get_server_protocol_version (pbuf->context) < 22) {
+ /* We need PulseAudio >= 1.0 on the server side for the extended API */
+ goto bad_server_version;
+ }
+
+ GST_LOG_OBJECT (psink, "opened the device");
+
+ pa_threaded_mainloop_unlock (mainloop);
+
+ return TRUE;
+
+ /* ERRORS */
+ unlock_and_fail:
+ {
+ if (need_unlock_shared)
+ g_mutex_unlock (&pa_shared_resource_mutex);
+ gst_pulsering_destroy_context (pbuf);
+ pa_threaded_mainloop_unlock (mainloop);
+ return FALSE;
+ }
+ create_failed:
+ {
+ GST_ELEMENT_ERROR (psink, RESOURCE, FAILED,
+ ("Failed to create context"), (NULL));
+ g_slice_free (GstPulseContext, pctx);
+ goto unlock_and_fail;
+ }
+ connect_failed:
+ {
+ GST_ELEMENT_ERROR (psink, RESOURCE, FAILED, ("Failed to connect: %s",
+ pa_strerror (pa_context_errno (pctx->context))), (NULL));
+ goto unlock_and_fail;
+ }
+ bad_server_version:
+ {
+ GST_ELEMENT_ERROR (psink, RESOURCE, FAILED, ("PulseAudio server version "
+ "is too old."), (NULL));
+ goto unlock_and_fail;
+ }
+ }
+
+ /* close the device */
+ static gboolean
+ gst_pulseringbuffer_close_device (GstAudioRingBuffer * buf)
+ {
+ GstPulseSink *psink;
+ GstPulseRingBuffer *pbuf;
+
+ pbuf = GST_PULSERING_BUFFER_CAST (buf);
+ psink = GST_PULSESINK_CAST (GST_OBJECT_PARENT (buf));
+
+ GST_LOG_OBJECT (psink, "closing device");
+
+ pa_threaded_mainloop_lock (mainloop);
+ gst_pulsering_destroy_context (pbuf);
+ pa_threaded_mainloop_unlock (mainloop);
+
++#if defined(__TIZEN__) && defined(PCM_DUMP_ENABLE)
++ if (psink->dump_fd_input) {
++ fclose(psink->dump_fd_input);
++ psink->dump_fd_input = NULL;
++ }
++#endif /* __TIZEN__ && PCM_DUMP_ENABLE */
++
+ GST_LOG_OBJECT (psink, "closed device");
+
+ return TRUE;
+ }
+
+ static void
+ gst_pulsering_stream_state_cb (pa_stream * s, void *userdata)
+ {
+ GstPulseSink *psink;
+ GstPulseRingBuffer *pbuf;
+ pa_stream_state_t state;
+
+ pbuf = GST_PULSERING_BUFFER_CAST (userdata);
+ psink = GST_PULSESINK_CAST (GST_OBJECT_PARENT (pbuf));
+
+ state = pa_stream_get_state (s);
+ GST_LOG_OBJECT (psink, "got new stream state %d", state);
+
+ switch (state) {
+ case PA_STREAM_READY:
+ case PA_STREAM_FAILED:
+ case PA_STREAM_TERMINATED:
+ GST_LOG_OBJECT (psink, "signaling");
+ pa_threaded_mainloop_signal (mainloop, 0);
+ break;
+ case PA_STREAM_UNCONNECTED:
+ case PA_STREAM_CREATING:
+ break;
+ }
+ }
+
+ static void
+ gst_pulsering_stream_request_cb (pa_stream * s, size_t length, void *userdata)
+ {
+ GstPulseSink *psink;
+ GstAudioRingBuffer *rbuf;
+ GstPulseRingBuffer *pbuf;
+
+ rbuf = GST_AUDIO_RING_BUFFER_CAST (userdata);
+ pbuf = GST_PULSERING_BUFFER_CAST (userdata);
+ psink = GST_PULSESINK_CAST (GST_OBJECT_PARENT (pbuf));
+
+ GST_LOG_OBJECT (psink, "got request for length %" G_GSIZE_FORMAT, length);
+
+ if (pbuf->in_commit && (length >= rbuf->spec.segsize)) {
+ /* only signal when we are waiting in the commit thread
+ * and got request for at least a segment */
+ pa_threaded_mainloop_signal (mainloop, 0);
+ }
+ }
+
+ static void
+ gst_pulsering_stream_underflow_cb (pa_stream * s, void *userdata)
+ {
+ GstPulseSink *psink;
+ GstPulseRingBuffer *pbuf;
+
+ pbuf = GST_PULSERING_BUFFER_CAST (userdata);
+ psink = GST_PULSESINK_CAST (GST_OBJECT_PARENT (pbuf));
+
+ GST_WARNING_OBJECT (psink, "Got underflow");
+ }
+
+ static void
+ gst_pulsering_stream_overflow_cb (pa_stream * s, void *userdata)
+ {
+ GstPulseSink *psink;
+ GstPulseRingBuffer *pbuf;
+
+ pbuf = GST_PULSERING_BUFFER_CAST (userdata);
+ psink = GST_PULSESINK_CAST (GST_OBJECT_PARENT (pbuf));
+
+ GST_WARNING_OBJECT (psink, "Got overflow");
+ }
+
+ static void
+ gst_pulsering_stream_latency_cb (pa_stream * s, void *userdata)
+ {
+ GstPulseSink *psink;
+ GstPulseRingBuffer *pbuf;
+ GstAudioRingBuffer *ringbuf;
+ const pa_timing_info *info;
+ pa_usec_t sink_usec;
+
+ info = pa_stream_get_timing_info (s);
+
+ pbuf = GST_PULSERING_BUFFER_CAST (userdata);
+ psink = GST_PULSESINK_CAST (GST_OBJECT_PARENT (pbuf));
+ ringbuf = GST_AUDIO_RING_BUFFER (pbuf);
+
+ if (!info) {
+ GST_LOG_OBJECT (psink, "latency update (information unknown)");
+ return;
+ }
+
+ if (!info->read_index_corrupt) {
+ /* Update segdone based on the read index. segdone is of segment
+ * granularity, while the read index is at byte granularity. We take the
+ * ceiling while converting the latter to the former since it is more
+ * conservative to report that we've read more than we have than to report
+ * less. One concern here is that latency updates happen every 100ms, which
+ * means segdone is not updated very often, but increasing the update
+ * frequency would mean more communication overhead. */
+ g_atomic_int_set (&ringbuf->segdone,
+ (int) gst_util_uint64_scale_ceil (info->read_index, 1,
+ ringbuf->spec.segsize));
+ }
+
+ sink_usec = info->configured_sink_usec;
+
+ GST_LOG_OBJECT (psink,
+ "latency_update, %" G_GUINT64_FORMAT ", %d:%" G_GINT64_FORMAT ", %d:%"
+ G_GUINT64_FORMAT ", %" G_GUINT64_FORMAT ", %" G_GUINT64_FORMAT,
+ GST_TIMEVAL_TO_TIME (info->timestamp), info->write_index_corrupt,
+ info->write_index, info->read_index_corrupt, info->read_index,
+ info->sink_usec, sink_usec);
++#ifdef __TIZEN__
++ if (!psink || !psink->auto_render_delay)
++ return;
++
++ if (sink_usec < info->sink_usec)
++ gst_base_sink_set_render_delay (GST_BASE_SINK(psink),
++ (info->sink_usec - sink_usec) * G_GINT64_CONSTANT (1000));
++ else
++ gst_base_sink_set_render_delay (GST_BASE_SINK(psink), 0);
++
++ GST_DEBUG_OBJECT (psink,
++ "Current render delay is %llu", gst_base_sink_get_render_delay (GST_BASE_SINK(psink)));
++#endif
+ }
+
+ static void
+ gst_pulsering_stream_suspended_cb (pa_stream * p, void *userdata)
+ {
+ GstPulseSink *psink;
+ GstPulseRingBuffer *pbuf;
+
+ pbuf = GST_PULSERING_BUFFER_CAST (userdata);
+ psink = GST_PULSESINK_CAST (GST_OBJECT_PARENT (pbuf));
+
+ if (pa_stream_is_suspended (p))
+ GST_DEBUG_OBJECT (psink, "stream suspended");
+ else
+ GST_DEBUG_OBJECT (psink, "stream resumed");
+ }
+
+ static void
+ gst_pulsering_stream_started_cb (pa_stream * p, void *userdata)
+ {
+ GstPulseSink *psink;
+ GstPulseRingBuffer *pbuf;
+
+ pbuf = GST_PULSERING_BUFFER_CAST (userdata);
+ psink = GST_PULSESINK_CAST (GST_OBJECT_PARENT (pbuf));
+
+ GST_DEBUG_OBJECT (psink, "stream started");
+ }
+
+ static void
+ gst_pulsering_stream_event_cb (pa_stream * p, const char *name,
+ pa_proplist * pl, void *userdata)
+ {
+ GstPulseSink *psink;
+ GstPulseRingBuffer *pbuf;
+
+ pbuf = GST_PULSERING_BUFFER_CAST (userdata);
+ psink = GST_PULSESINK_CAST (GST_OBJECT_PARENT (pbuf));
+
+ if (!strcmp (name, PA_STREAM_EVENT_REQUEST_CORK)) {
+ /* the stream wants to PAUSE, post a message for the application. */
+ GST_DEBUG_OBJECT (psink, "got request for CORK");
+ gst_element_post_message (GST_ELEMENT_CAST (psink),
+ gst_message_new_request_state (GST_OBJECT_CAST (psink),
+ GST_STATE_PAUSED));
+
+ } else if (!strcmp (name, PA_STREAM_EVENT_REQUEST_UNCORK)) {
+ GST_DEBUG_OBJECT (psink, "got request for UNCORK");
+ gst_element_post_message (GST_ELEMENT_CAST (psink),
+ gst_message_new_request_state (GST_OBJECT_CAST (psink),
+ GST_STATE_PLAYING));
+ } else if (!strcmp (name, PA_STREAM_EVENT_FORMAT_LOST)) {
+ GstEvent *renego;
+
+ if (g_atomic_int_get (&psink->format_lost)) {
+ /* Duplicate event before we're done reconfiguring, discard */
+ return;
+ }
+
+ GST_DEBUG_OBJECT (psink, "got FORMAT LOST");
+ g_atomic_int_set (&psink->format_lost, 1);
+ psink->format_lost_time = g_ascii_strtoull (pa_proplist_gets (pl,
+ "stream-time"), NULL, 0) * 1000;
+
+ g_free (psink->device);
+ psink->device = g_strdup (pa_proplist_gets (pl, "device"));
+
+ /* FIXME: send reconfigure event instead and let decodebin/playbin
+ * handle that. Also take care of ac3 alignment */
+ renego = gst_event_new_custom (GST_EVENT_CUSTOM_UPSTREAM,
+ gst_structure_new_empty ("pulse-format-lost"));
+
+ #if 0
+ if (g_str_equal (gst_structure_get_name (st), "audio/x-eac3")) {
+ GstStructure *event_st = gst_structure_new ("ac3parse-set-alignment",
+ "alignment", G_TYPE_STRING, pbin->dbin ? "frame" : "iec61937", NULL);
+
+ if (!gst_pad_push_event (pbin->sinkpad,
+ gst_event_new_custom (GST_EVENT_CUSTOM_UPSTREAM, event_st)))
+ GST_WARNING_OBJECT (pbin->sinkpad, "Could not update alignment");
+ }
+ #endif
+
+ if (!gst_pad_push_event (GST_BASE_SINK (psink)->sinkpad, renego)) {
+ /* Nobody handled the format change - emit an error */
+ GST_ELEMENT_ERROR (psink, STREAM, FORMAT, ("Sink format changed"),
+ ("Sink format changed"));
+ }
++#ifdef __TIZEN__
++ } else if (!strcmp (name, PA_STREAM_EVENT_POP_TIMEOUT)) {
++ GST_WARNING_OBJECT (psink, "got event [%s], cork stream now!!!!", name);
++ gst_pulsering_set_corked (pbuf, TRUE, FALSE);
++#endif
+ } else {
+ GST_DEBUG_OBJECT (psink, "got unknown event %s", name);
+ }
+ }
+
+ /* Called with the mainloop locked */
+ static gboolean
+ gst_pulsering_wait_for_stream_ready (GstPulseSink * psink, pa_stream * stream)
+ {
+ pa_stream_state_t state;
+
+ for (;;) {
+ state = pa_stream_get_state (stream);
+
+ GST_LOG_OBJECT (psink, "stream state is now %d", state);
+
+ if (!PA_STREAM_IS_GOOD (state))
+ return FALSE;
+
+ if (state == PA_STREAM_READY)
+ return TRUE;
+
+ /* Wait until the stream is ready */
+ pa_threaded_mainloop_wait (mainloop);
+ }
+ }
+
+
+ /* This method should create a new stream of the given @spec. No playback should
+ * start yet so we start in the corked state. */
+ static gboolean
+ gst_pulseringbuffer_acquire (GstAudioRingBuffer * buf,
+ GstAudioRingBufferSpec * spec)
+ {
+ GstPulseSink *psink;
+ GstPulseRingBuffer *pbuf;
+ pa_buffer_attr wanted;
+ const pa_buffer_attr *actual;
+ pa_channel_map channel_map;
+ pa_operation *o = NULL;
++#ifndef __TIZEN__
+ pa_cvolume v;
++#endif
+ pa_cvolume *pv = NULL;
+ pa_stream_flags_t flags;
+ const gchar *name;
+ GstAudioClock *clock;
+ pa_format_info *formats[1];
+ #ifndef GST_DISABLE_GST_DEBUG
+ gchar print_buf[PA_FORMAT_INFO_SNPRINT_MAX];
+ #endif
+
+ psink = GST_PULSESINK_CAST (GST_OBJECT_PARENT (buf));
+ pbuf = GST_PULSERING_BUFFER_CAST (buf);
+
+ GST_LOG_OBJECT (psink, "creating sample spec");
+ /* convert the gstreamer sample spec to the pulseaudio format */
+ if (!gst_pulse_fill_format_info (spec, &pbuf->format, &pbuf->channels))
+ goto invalid_spec;
+ pbuf->is_pcm = pa_format_info_is_pcm (pbuf->format);
+
+ pa_threaded_mainloop_lock (mainloop);
+
+ /* we need a context and a no stream */
+ g_assert (pbuf->context);
+ g_assert (!pbuf->stream);
+
+ /* if we have a probe, disconnect it first so that if we're creating a
+ * compressed stream, it doesn't get blocked by a PCM stream */
+ if (pbuf->probe_stream) {
+ gst_pulse_destroy_stream (pbuf->probe_stream, TRUE);
+ pbuf->probe_stream = NULL;
+ }
+
+ /* enable event notifications */
+ GST_LOG_OBJECT (psink, "subscribing to context events");
+ if (!(o = pa_context_subscribe (pbuf->context,
+ PA_SUBSCRIPTION_MASK_SINK_INPUT, NULL, NULL)))
+ goto subscribe_failed;
+
+ pa_operation_unref (o);
+
+ /* initialize the channel map */
+ if (pbuf->is_pcm && gst_pulse_gst_to_channel_map (&channel_map, spec))
+ pa_format_info_set_channel_map (pbuf->format, &channel_map);
+
+ /* find a good name for the stream */
+ if (psink->stream_name)
+ name = psink->stream_name;
+ else
+ name = "Playback Stream";
+
++#if defined(__TIZEN__) && defined(PCM_DUMP_ENABLE)
++ if (psink->need_dump_input == TRUE && psink->dump_fd_input == NULL) {
++ char *suffix , *dump_path;
++ GDateTime *time = g_date_time_new_now_local();
++
++ suffix = g_date_time_format(time, "%m%d_%H%M%S");
++ dump_path = g_strdup_printf("%s%dch_%dhz_%s.pcm", GST_PULSESINK_DUMP_INPUT_PATH_PREFIX, pbuf->channels, spec->info.rate, suffix);
++ GST_WARNING_OBJECT(psink, "pulse-sink dumping enabled: dump path [%s]", dump_path);
++ psink->dump_fd_input = fopen(dump_path, "w+");
++
++ g_free(suffix);
++ g_free(dump_path);
++ g_date_time_unref(time);
++ }
++#endif /* __TIZEN__ && PCM_DUMP_ENABLE */
++
+ /* create a stream */
+ formats[0] = pbuf->format;
+ if (!(pbuf->stream = pa_stream_new_extended (pbuf->context, name, formats, 1,
+ psink->proplist)))
+ goto stream_failed;
+
+ /* install essential callbacks */
+ pa_stream_set_state_callback (pbuf->stream,
+ gst_pulsering_stream_state_cb, pbuf);
+ pa_stream_set_write_callback (pbuf->stream,
+ gst_pulsering_stream_request_cb, pbuf);
+ pa_stream_set_underflow_callback (pbuf->stream,
+ gst_pulsering_stream_underflow_cb, pbuf);
+ pa_stream_set_overflow_callback (pbuf->stream,
+ gst_pulsering_stream_overflow_cb, pbuf);
+ pa_stream_set_latency_update_callback (pbuf->stream,
+ gst_pulsering_stream_latency_cb, pbuf);
+ pa_stream_set_suspended_callback (pbuf->stream,
+ gst_pulsering_stream_suspended_cb, pbuf);
+ pa_stream_set_started_callback (pbuf->stream,
+ gst_pulsering_stream_started_cb, pbuf);
+ pa_stream_set_event_callback (pbuf->stream,
+ gst_pulsering_stream_event_cb, pbuf);
+
+ /* buffering requirements. When setting prebuf to 0, the stream will not pause
+ * when we cause an underrun, which causes time to continue. */
+ memset (&wanted, 0, sizeof (wanted));
+ wanted.tlength = spec->segtotal * spec->segsize;
+ wanted.maxlength = -1;
+ wanted.prebuf = 0;
+ wanted.minreq = spec->segsize;
+
+ GST_INFO_OBJECT (psink, "tlength: %d", wanted.tlength);
+ GST_INFO_OBJECT (psink, "maxlength: %d", wanted.maxlength);
+ GST_INFO_OBJECT (psink, "prebuf: %d", wanted.prebuf);
+ GST_INFO_OBJECT (psink, "minreq: %d", wanted.minreq);
+
++#ifndef __TIZEN__
+ /* configure volume when we changed it, else we leave the default */
+ if (psink->volume_set) {
+ GST_LOG_OBJECT (psink, "have volume of %f", psink->volume);
+ pv = &v;
+ if (pbuf->is_pcm)
+ gst_pulse_cvolume_from_linear (pv, pbuf->channels, psink->volume);
+ else {
+ GST_DEBUG_OBJECT (psink, "passthrough stream, not setting volume");
+ pv = NULL;
+ }
+ } else {
+ pv = NULL;
+ }
++#endif
+
+ /* construct the flags */
+ flags = PA_STREAM_INTERPOLATE_TIMING | PA_STREAM_AUTO_TIMING_UPDATE |
+ PA_STREAM_ADJUST_LATENCY | PA_STREAM_START_CORKED;
+
++#ifndef __TIZEN__
+ if (psink->mute_set) {
+ if (psink->mute)
+ flags |= PA_STREAM_START_MUTED;
+ else
+ flags |= PA_STREAM_START_UNMUTED;
+ }
++#endif
+
+ /* we always start corked (see flags above) */
+ pbuf->corked = TRUE;
+
+ /* try to connect now */
+ GST_LOG_OBJECT (psink, "connect for playback to device %s",
+ GST_STR_NULL (psink->device));
+ if (pa_stream_connect_playback (pbuf->stream, psink->device,
+ &wanted, flags, pv, NULL) < 0)
+ goto connect_failed;
+
+ /* our clock will now start from 0 again */
+ clock = GST_AUDIO_CLOCK (GST_AUDIO_BASE_SINK (psink)->provided_clock);
+ gst_audio_clock_reset (clock, 0);
+
+ if (!gst_pulsering_wait_for_stream_ready (psink, pbuf->stream))
+ goto connect_failed;
+
+ g_free (psink->device);
+ psink->device = g_strdup (pa_stream_get_device_name (pbuf->stream));
+
+ #ifndef GST_DISABLE_GST_DEBUG
+ pa_format_info_snprint (print_buf, sizeof (print_buf),
+ pa_stream_get_format_info (pbuf->stream));
+ GST_INFO_OBJECT (psink, "negotiated to: %s", print_buf);
+ #endif
+
++#ifdef __TIZEN__
++ {
++ uint32_t idx;
++ if ((idx = pa_stream_get_index (pbuf->stream)) == PA_INVALID_INDEX)
++ goto no_index;
++ if (psink->volume_set)
++ gst_pulse_set_volume_ratio (idx, "out", psink->volume);
++ if (psink->mute)
++ gst_pulse_set_volume_ratio (idx, "out", 0);
++ }
++#endif
+ /* After we passed the volume off of to PA we never want to set it
+ again, since it is PA's job to save/restore volumes. */
+ psink->volume_set = psink->mute_set = FALSE;
+
+ GST_LOG_OBJECT (psink, "stream is acquired now");
+
+ /* get the actual buffering properties now */
+ actual = pa_stream_get_buffer_attr (pbuf->stream);
+
+ GST_INFO_OBJECT (psink, "tlength: %d (wanted: %d)", actual->tlength,
+ wanted.tlength);
+ GST_INFO_OBJECT (psink, "maxlength: %d", actual->maxlength);
+ GST_INFO_OBJECT (psink, "prebuf: %d", actual->prebuf);
+ GST_INFO_OBJECT (psink, "minreq: %d (wanted %d)", actual->minreq,
+ wanted.minreq);
+
+ spec->segsize = actual->minreq;
+ spec->segtotal = actual->tlength / spec->segsize;
+
+ pa_threaded_mainloop_unlock (mainloop);
+
+ return TRUE;
+
+ /* ERRORS */
+ unlock_and_fail:
+ {
+ gst_pulsering_destroy_stream (pbuf);
+ pa_threaded_mainloop_unlock (mainloop);
+
+ return FALSE;
+ }
+ invalid_spec:
+ {
+ GST_ELEMENT_ERROR (psink, RESOURCE, SETTINGS,
+ ("Invalid sample specification."), (NULL));
+ return FALSE;
+ }
+ subscribe_failed:
+ {
+ GST_ELEMENT_ERROR (psink, RESOURCE, FAILED,
+ ("pa_context_subscribe() failed: %s",
+ pa_strerror (pa_context_errno (pbuf->context))), (NULL));
+ goto unlock_and_fail;
+ }
+ stream_failed:
+ {
+ GST_ELEMENT_ERROR (psink, RESOURCE, FAILED,
+ ("Failed to create stream: %s",
+ pa_strerror (pa_context_errno (pbuf->context))), (NULL));
+ goto unlock_and_fail;
+ }
+ connect_failed:
+ {
+ GST_ELEMENT_ERROR (psink, RESOURCE, FAILED,
+ ("Failed to connect stream: %s",
+ pa_strerror (pa_context_errno (pbuf->context))), (NULL));
+ goto unlock_and_fail;
+ }
++#ifdef __TIZEN__
++no_index:
++ {
++ GST_ELEMENT_ERROR (psink, RESOURCE, FAILED,
++ ("Failed to get stream index: %s",
++ pa_strerror (pa_context_errno (pbuf->context))), (NULL));
++ goto unlock_and_fail;
++ }
++#endif
+ }
+
+ /* free the stream that we acquired before */
+ static gboolean
+ gst_pulseringbuffer_release (GstAudioRingBuffer * buf)
+ {
+ GstPulseRingBuffer *pbuf;
+
+ pbuf = GST_PULSERING_BUFFER_CAST (buf);
+
+ pa_threaded_mainloop_lock (mainloop);
+ gst_pulsering_destroy_stream (pbuf);
+ pa_threaded_mainloop_unlock (mainloop);
+
+ {
+ GstPulseSink *psink;
+
+ psink = GST_PULSESINK_CAST (GST_OBJECT_PARENT (pbuf));
+ g_atomic_int_set (&psink->format_lost, FALSE);
+ psink->format_lost_time = GST_CLOCK_TIME_NONE;
+ }
+
+ return TRUE;
+ }
+
+ static void
+ gst_pulsering_success_cb (pa_stream * s, int success, void *userdata)
+ {
+ pa_threaded_mainloop_signal (mainloop, 0);
+ }
+
+ /* update the corked state of a stream, must be called with the mainloop
+ * lock */
+ static gboolean
+ gst_pulsering_set_corked (GstPulseRingBuffer * pbuf, gboolean corked,
+ gboolean wait)
+ {
+ pa_operation *o = NULL;
+ GstPulseSink *psink;
+ gboolean res = FALSE;
+
+ psink = GST_PULSESINK_CAST (GST_OBJECT_PARENT (pbuf));
+
+ if (g_atomic_int_get (&psink->format_lost)) {
+ /* Sink format changed, stream's gone so fake being paused */
+ return TRUE;
+ }
+
+ GST_DEBUG_OBJECT (psink, "setting corked state to %d", corked);
+ if (pbuf->corked != corked) {
+ if (!(o = pa_stream_cork (pbuf->stream, corked,
+ gst_pulsering_success_cb, pbuf)))
+ goto cork_failed;
+
+ while (wait && pa_operation_get_state (o) == PA_OPERATION_RUNNING) {
+ pa_threaded_mainloop_wait (mainloop);
+ if (gst_pulsering_is_dead (psink, pbuf, TRUE))
+ goto server_dead;
+ }
+ pbuf->corked = corked;
+ } else {
+ GST_DEBUG_OBJECT (psink, "skipping, already in requested state");
+ }
+ res = TRUE;
+
+ cleanup:
+ if (o)
+ pa_operation_unref (o);
+
+ return res;
+
+ /* ERRORS */
+ server_dead:
+ {
+ GST_DEBUG_OBJECT (psink, "the server is dead");
+ goto cleanup;
+ }
+ cork_failed:
+ {
+ GST_ELEMENT_ERROR (psink, RESOURCE, FAILED,
+ ("pa_stream_cork() failed: %s",
+ pa_strerror (pa_context_errno (pbuf->context))), (NULL));
+ goto cleanup;
+ }
+ }
+
+ static void
+ gst_pulseringbuffer_clear (GstAudioRingBuffer * buf)
+ {
+ GstPulseSink *psink;
+ GstPulseRingBuffer *pbuf;
+ pa_operation *o = NULL;
+
+ pbuf = GST_PULSERING_BUFFER_CAST (buf);
+ psink = GST_PULSESINK_CAST (GST_OBJECT_PARENT (pbuf));
+
+ pa_threaded_mainloop_lock (mainloop);
+ GST_DEBUG_OBJECT (psink, "clearing");
+ if (pbuf->stream) {
+ /* don't wait for the flush to complete */
+ if ((o = pa_stream_flush (pbuf->stream, NULL, pbuf)))
+ pa_operation_unref (o);
+ }
+ pa_threaded_mainloop_unlock (mainloop);
+ }
+
+ #if 0
+ /* called from pulse thread with the mainloop lock */
+ static void
+ mainloop_enter_defer_cb (pa_mainloop_api * api, void *userdata)
+ {
+ GstPulseSink *pulsesink = GST_PULSESINK (userdata);
+ GstMessage *message;
+ GValue val = { 0 };
+
+ GST_DEBUG_OBJECT (pulsesink, "posting ENTER stream status");
+ message = gst_message_new_stream_status (GST_OBJECT (pulsesink),
+ GST_STREAM_STATUS_TYPE_ENTER, GST_ELEMENT (pulsesink));
+ g_value_init (&val, GST_TYPE_G_THREAD);
+ g_value_set_boxed (&val, g_thread_self ());
+ gst_message_set_stream_status_object (message, &val);
+ g_value_unset (&val);
+
+ gst_element_post_message (GST_ELEMENT (pulsesink), message);
+
+ g_return_if_fail (pulsesink->defer_pending);
+ pulsesink->defer_pending--;
+ pa_threaded_mainloop_signal (mainloop, 0);
+ }
+ #endif
+
+ /* start/resume playback ASAP, we don't uncork here but in the commit method */
+ static gboolean
+ gst_pulseringbuffer_start (GstAudioRingBuffer * buf)
+ {
+ GstPulseSink *psink;
+ GstPulseRingBuffer *pbuf;
+
+ pbuf = GST_PULSERING_BUFFER_CAST (buf);
+ psink = GST_PULSESINK_CAST (GST_OBJECT_PARENT (pbuf));
+
+ pa_threaded_mainloop_lock (mainloop);
+
+ GST_DEBUG_OBJECT (psink, "starting");
+ pbuf->paused = FALSE;
+
+ /* EOS needs running clock */
+ if (GST_BASE_SINK_CAST (psink)->eos ||
+ g_atomic_int_get (&GST_AUDIO_BASE_SINK (psink)->eos_rendering))
+ gst_pulsering_set_corked (pbuf, FALSE, FALSE);
+
+ #if 0
+ GST_DEBUG_OBJECT (psink, "scheduling stream status");
+ psink->defer_pending++;
+ pa_mainloop_api_once (pa_threaded_mainloop_get_api (mainloop),
+ mainloop_enter_defer_cb, psink);
+
+ /* Wait for the stream status message to be posted. This needs to be done
+ * synchronously because the callback will take the mainloop lock
+ * (implicitly) and then take the GST_OBJECT_LOCK. Everywhere else, we take
+ * the locks in the reverse order, so not doing this synchronously could
+ * cause a deadlock. */
+ GST_DEBUG_OBJECT (psink, "waiting for stream status (ENTER) to be posted");
+ pa_threaded_mainloop_wait (mainloop);
+ #endif
+
+ pa_threaded_mainloop_unlock (mainloop);
+
+ return TRUE;
+ }
+
+ /* pause/stop playback ASAP */
+ static gboolean
+ gst_pulseringbuffer_pause (GstAudioRingBuffer * buf)
+ {
+ GstPulseSink *psink;
+ GstPulseRingBuffer *pbuf;
+ gboolean res;
+
+ pbuf = GST_PULSERING_BUFFER_CAST (buf);
+ psink = GST_PULSESINK_CAST (GST_OBJECT_PARENT (pbuf));
+
+ pa_threaded_mainloop_lock (mainloop);
+ GST_DEBUG_OBJECT (psink, "pausing and corking");
+ /* make sure the commit method stops writing */
+ pbuf->paused = TRUE;
+ res = gst_pulsering_set_corked (pbuf, TRUE, TRUE);
+ if (pbuf->in_commit) {
+ /* we are waiting in a commit, signal */
+ GST_DEBUG_OBJECT (psink, "signal commit");
+ pa_threaded_mainloop_signal (mainloop, 0);
+ }
+ pa_threaded_mainloop_unlock (mainloop);
+
+ return res;
+ }
+
+ #if 0
+ /* called from pulse thread with the mainloop lock */
+ static void
+ mainloop_leave_defer_cb (pa_mainloop_api * api, void *userdata)
+ {
+ GstPulseSink *pulsesink = GST_PULSESINK (userdata);
+ GstMessage *message;
+ GValue val = { 0 };
+
+ GST_DEBUG_OBJECT (pulsesink, "posting LEAVE stream status");
+ message = gst_message_new_stream_status (GST_OBJECT (pulsesink),
+ GST_STREAM_STATUS_TYPE_LEAVE, GST_ELEMENT (pulsesink));
+ g_value_init (&val, GST_TYPE_G_THREAD);
+ g_value_set_boxed (&val, g_thread_self ());
+ gst_message_set_stream_status_object (message, &val);
+ g_value_unset (&val);
+
+ gst_element_post_message (GST_ELEMENT (pulsesink), message);
+
+ g_return_if_fail (pulsesink->defer_pending);
+ pulsesink->defer_pending--;
+ pa_threaded_mainloop_signal (mainloop, 0);
+ }
+ #endif
+
+ /* stop playback, we flush everything. */
+ static gboolean
+ gst_pulseringbuffer_stop (GstAudioRingBuffer * buf)
+ {
+ GstPulseSink *psink;
+ GstPulseRingBuffer *pbuf;
+ gboolean res = FALSE;
+ pa_operation *o = NULL;
+
+ pbuf = GST_PULSERING_BUFFER_CAST (buf);
+ psink = GST_PULSESINK_CAST (GST_OBJECT_PARENT (pbuf));
+
+ pa_threaded_mainloop_lock (mainloop);
+
+ pbuf->paused = TRUE;
+ res = gst_pulsering_set_corked (pbuf, TRUE, TRUE);
+
+ /* Inform anyone waiting in _commit() call that it shall wakeup */
+ if (pbuf->in_commit) {
+ GST_DEBUG_OBJECT (psink, "signal commit thread");
+ pa_threaded_mainloop_signal (mainloop, 0);
+ }
+ if (g_atomic_int_get (&psink->format_lost)) {
+ /* Don't try to flush, the stream's probably gone by now */
+ res = TRUE;
+ goto cleanup;
+ }
+
+ /* then try to flush, it's not fatal when this fails */
+ GST_DEBUG_OBJECT (psink, "flushing");
+ if ((o = pa_stream_flush (pbuf->stream, gst_pulsering_success_cb, pbuf))) {
+ while (pa_operation_get_state (o) == PA_OPERATION_RUNNING) {
+ GST_DEBUG_OBJECT (psink, "wait for completion");
+ pa_threaded_mainloop_wait (mainloop);
+ if (gst_pulsering_is_dead (psink, pbuf, TRUE))
+ goto server_dead;
+ }
+ GST_DEBUG_OBJECT (psink, "flush completed");
+ }
+ res = TRUE;
+
+ cleanup:
+ if (o) {
+ pa_operation_cancel (o);
+ pa_operation_unref (o);
+ }
+ #if 0
+ GST_DEBUG_OBJECT (psink, "scheduling stream status");
+ psink->defer_pending++;
+ pa_mainloop_api_once (pa_threaded_mainloop_get_api (mainloop),
+ mainloop_leave_defer_cb, psink);
+
+ /* Wait for the stream status message to be posted. This needs to be done
+ * synchronously because the callback will take the mainloop lock
+ * (implicitly) and then take the GST_OBJECT_LOCK. Everywhere else, we take
+ * the locks in the reverse order, so not doing this synchronously could
+ * cause a deadlock. */
+ GST_DEBUG_OBJECT (psink, "waiting for stream status (LEAVE) to be posted");
+ pa_threaded_mainloop_wait (mainloop);
+ #endif
+
+ pa_threaded_mainloop_unlock (mainloop);
+
+ return res;
+
+ /* ERRORS */
+ server_dead:
+ {
+ GST_DEBUG_OBJECT (psink, "the server is dead");
+ goto cleanup;
+ }
+ }
+
+ /* in_samples >= out_samples, rate > 1.0 */
+ #define FWD_UP_SAMPLES(s,se,d,de) \
+ G_STMT_START { \
+ guint8 *sb = s, *db = d; \
+ while (s <= se && d < de) { \
+ memcpy (d, s, bpf); \
+ s += bpf; \
+ *accum += outr; \
+ if ((*accum << 1) >= inr) { \
+ *accum -= inr; \
+ d += bpf; \
+ } \
+ } \
+ in_samples -= (s - sb)/bpf; \
+ out_samples -= (d - db)/bpf; \
+ GST_DEBUG ("fwd_up end %d/%d",*accum,*toprocess); \
+ } G_STMT_END
+
+ /* out_samples > in_samples, for rates smaller than 1.0 */
+ #define FWD_DOWN_SAMPLES(s,se,d,de) \
+ G_STMT_START { \
+ guint8 *sb = s, *db = d; \
+ while (s <= se && d < de) { \
+ memcpy (d, s, bpf); \
+ d += bpf; \
+ *accum += inr; \
+ if ((*accum << 1) >= outr) { \
+ *accum -= outr; \
+ s += bpf; \
+ } \
+ } \
+ in_samples -= (s - sb)/bpf; \
+ out_samples -= (d - db)/bpf; \
+ GST_DEBUG ("fwd_down end %d/%d",*accum,*toprocess); \
+ } G_STMT_END
+
+ #define REV_UP_SAMPLES(s,se,d,de) \
+ G_STMT_START { \
+ guint8 *sb = se, *db = d; \
+ while (s <= se && d < de) { \
+ memcpy (d, se, bpf); \
+ se -= bpf; \
+ *accum += outr; \
+ while (d < de && (*accum << 1) >= inr) { \
+ *accum -= inr; \
+ d += bpf; \
+ } \
+ } \
+ in_samples -= (sb - se)/bpf; \
+ out_samples -= (d - db)/bpf; \
+ GST_DEBUG ("rev_up end %d/%d",*accum,*toprocess); \
+ } G_STMT_END
+
+ #define REV_DOWN_SAMPLES(s,se,d,de) \
+ G_STMT_START { \
+ guint8 *sb = se, *db = d; \
+ while (s <= se && d < de) { \
+ memcpy (d, se, bpf); \
+ d += bpf; \
+ *accum += inr; \
+ while (s <= se && (*accum << 1) >= outr) { \
+ *accum -= outr; \
+ se -= bpf; \
+ } \
+ } \
+ in_samples -= (sb - se)/bpf; \
+ out_samples -= (d - db)/bpf; \
+ GST_DEBUG ("rev_down end %d/%d",*accum,*toprocess); \
+ } G_STMT_END
+
+ /* our custom commit function because we write into the buffer of pulseaudio
+ * instead of keeping our own buffer */
+ static guint
+ gst_pulseringbuffer_commit (GstAudioRingBuffer * buf, guint64 * sample,
+ guchar * data, gint in_samples, gint out_samples, gint * accum)
+ {
+ GstPulseSink *psink;
+ GstPulseRingBuffer *pbuf;
+ guint result;
+ guint8 *data_end;
+ gboolean reverse;
+ gint *toprocess;
+ gint inr, outr, bpf;
+ gint64 offset;
+ guint bufsize;
+
+ pbuf = GST_PULSERING_BUFFER_CAST (buf);
+ psink = GST_PULSESINK_CAST (GST_OBJECT_PARENT (pbuf));
+
+ /* FIXME post message rather than using a signal (as mixer interface) */
+ if (g_atomic_int_compare_and_exchange (&psink->notify, 1, 0)) {
+ g_object_notify (G_OBJECT (psink), "volume");
+ g_object_notify (G_OBJECT (psink), "mute");
+ g_object_notify (G_OBJECT (psink), "current-device");
+ }
+
+ /* make sure the ringbuffer is started */
+ if (G_UNLIKELY (g_atomic_int_get (&buf->state) !=
+ GST_AUDIO_RING_BUFFER_STATE_STARTED)) {
+ /* see if we are allowed to start it */
+ if (G_UNLIKELY (g_atomic_int_get (&buf->may_start) == FALSE))
+ goto no_start;
+
+ GST_DEBUG_OBJECT (buf, "start!");
+ if (!gst_audio_ring_buffer_start (buf))
+ goto start_failed;
+ }
+
+ pa_threaded_mainloop_lock (mainloop);
+
+ GST_DEBUG_OBJECT (psink, "entering commit");
+ pbuf->in_commit = TRUE;
+
+ bpf = GST_AUDIO_INFO_BPF (&buf->spec.info);
+ bufsize = buf->spec.segsize * buf->spec.segtotal;
+
+ /* our toy resampler for trick modes */
+ reverse = out_samples < 0;
+ out_samples = ABS (out_samples);
+
+ if (in_samples >= out_samples)
+ toprocess = &in_samples;
+ else
+ toprocess = &out_samples;
+
+ inr = in_samples - 1;
+ outr = out_samples - 1;
+
+ GST_DEBUG_OBJECT (psink, "in %d, out %d", inr, outr);
+
+ /* data_end points to the last sample we have to write, not past it. This is
+ * needed to properly handle reverse playback: it points to the last sample. */
+ data_end = data + (bpf * inr);
+
+ if (g_atomic_int_get (&psink->format_lost)) {
+ /* Sink format changed, drop the data and hope upstream renegotiates */
+ goto fake_done;
+ }
+
+ if (pbuf->paused)
+ goto was_paused;
+
++#ifdef __TIZEN__
++ /* ensure running clock for whatever out there */
++ if (pbuf->corked) {
++ if (!gst_pulsering_set_corked (pbuf, FALSE, FALSE))
++ goto uncork_failed;
++ }
++#endif
+ /* offset is in bytes */
+ offset = *sample * bpf;
+
+ while (*toprocess > 0) {
+ size_t avail;
+ guint towrite;
+
+ GST_LOG_OBJECT (psink,
+ "need to write %d samples at offset %" G_GINT64_FORMAT, *toprocess,
+ offset);
+
+ if (offset != pbuf->m_lastoffset)
+ GST_LOG_OBJECT (psink, "discontinuity, offset is %" G_GINT64_FORMAT ", "
+ "last offset was %" G_GINT64_FORMAT, offset, pbuf->m_lastoffset);
+
+ towrite = out_samples * bpf;
+
+ /* Wait for at least segsize bytes to become available */
+ if (towrite > buf->spec.segsize)
+ towrite = buf->spec.segsize;
+
+ if ((pbuf->m_writable < towrite) || (offset != pbuf->m_lastoffset)) {
+ /* if no room left or discontinuity in offset,
+ we need to flush data and get a new buffer */
+
+ /* flush the buffer if possible */
+ if ((pbuf->m_data != NULL) && (pbuf->m_towrite > 0)) {
+
+ GST_LOG_OBJECT (psink,
+ "flushing %u samples at offset %" G_GINT64_FORMAT,
+ (guint) pbuf->m_towrite / bpf, pbuf->m_offset);
+
+ if (pa_stream_write (pbuf->stream, (uint8_t *) pbuf->m_data,
+ pbuf->m_towrite, NULL, pbuf->m_offset, PA_SEEK_ABSOLUTE) < 0) {
+ goto write_failed;
+ }
+ }
+ pbuf->m_towrite = 0;
+ pbuf->m_offset = offset; /* keep track of current offset */
+
+ /* get a buffer to write in for now on */
+ for (;;) {
+ pbuf->m_writable = pa_stream_writable_size (pbuf->stream);
+
+ if (g_atomic_int_get (&psink->format_lost)) {
+ /* Sink format changed, give up and hope upstream renegotiates */
+ goto fake_done;
+ }
+
+ if (pbuf->m_writable == (size_t) - 1)
+ goto writable_size_failed;
+
+ pbuf->m_writable /= bpf;
+ pbuf->m_writable *= bpf; /* handle only complete samples */
+
+ if (pbuf->m_writable >= towrite)
+ break;
+
+ /* see if we need to uncork because we have no free space */
+ if (pbuf->corked) {
+ if (!gst_pulsering_set_corked (pbuf, FALSE, FALSE))
+ goto uncork_failed;
+ }
+
+ /* we can't write segsize bytes, wait a bit */
+ GST_LOG_OBJECT (psink, "waiting for free space");
+ pa_threaded_mainloop_wait (mainloop);
+
+ if (pbuf->paused)
+ goto was_paused;
+ }
+
+ /* Recalculate what we can write in the next chunk */
+ towrite = out_samples * bpf;
+ if (pbuf->m_writable > towrite)
+ pbuf->m_writable = towrite;
+
+ GST_LOG_OBJECT (psink, "requesting %" G_GSIZE_FORMAT " bytes of "
+ "shared memory", pbuf->m_writable);
+
+ if (pa_stream_begin_write (pbuf->stream, &pbuf->m_data,
+ &pbuf->m_writable) < 0) {
+ GST_LOG_OBJECT (psink, "pa_stream_begin_write() failed");
+ goto writable_size_failed;
+ }
+
+ GST_LOG_OBJECT (psink, "got %" G_GSIZE_FORMAT " bytes of shared memory",
+ pbuf->m_writable);
+
+ }
+
+ if (towrite > pbuf->m_writable)
+ towrite = pbuf->m_writable;
+ avail = towrite / bpf;
+
+ GST_LOG_OBJECT (psink, "writing %u samples at offset %" G_GUINT64_FORMAT,
+ (guint) avail, offset);
+
+ /* No trick modes for passthrough streams */
+ if (G_UNLIKELY (!pbuf->is_pcm && (inr != outr || reverse))) {
+ GST_WARNING_OBJECT (psink, "Passthrough stream can't run in trick mode");
+ goto unlock_and_fail;
+ }
+
+ if (G_LIKELY (inr == outr && !reverse)) {
+ /* no rate conversion, simply write out the samples */
+ /* copy the data into internal buffer */
+
+ memcpy ((guint8 *) pbuf->m_data + pbuf->m_towrite, data, towrite);
+ pbuf->m_towrite += towrite;
+ pbuf->m_writable -= towrite;
+
+ data += towrite;
+ in_samples -= avail;
+ out_samples -= avail;
+ } else {
+ guint8 *dest, *d, *d_end;
+
+ /* write into the PulseAudio shm buffer */
+ dest = d = (guint8 *) pbuf->m_data + pbuf->m_towrite;
+ d_end = d + towrite;
+
+ if (!reverse) {
+ if (inr >= outr)
+ /* forward speed up */
+ FWD_UP_SAMPLES (data, data_end, d, d_end);
+ else
+ /* forward slow down */
+ FWD_DOWN_SAMPLES (data, data_end, d, d_end);
+ } else {
+ if (inr >= outr)
+ /* reverse speed up */
+ REV_UP_SAMPLES (data, data_end, d, d_end);
+ else
+ /* reverse slow down */
+ REV_DOWN_SAMPLES (data, data_end, d, d_end);
+ }
+ /* see what we have left to write */
+ towrite = (d - dest);
+ pbuf->m_towrite += towrite;
+ pbuf->m_writable -= towrite;
+
+ avail = towrite / bpf;
+ }
+
+ /* flush the buffer if it's full */
+ if ((pbuf->m_data != NULL) && (pbuf->m_towrite > 0)
+ && (pbuf->m_writable == 0)) {
+ GST_LOG_OBJECT (psink, "flushing %u samples at offset %" G_GINT64_FORMAT,
+ (guint) pbuf->m_towrite / bpf, pbuf->m_offset);
+
+ if (pa_stream_write (pbuf->stream, (uint8_t *) pbuf->m_data,
+ pbuf->m_towrite, NULL, pbuf->m_offset, PA_SEEK_ABSOLUTE) < 0) {
+ goto write_failed;
+ }
+ pbuf->m_towrite = 0;
+ pbuf->m_offset = offset + towrite; /* keep track of current offset */
+ }
+
+ *sample += avail;
+ offset += avail * bpf;
+ pbuf->m_lastoffset = offset;
+
+ /* check if we need to uncork after writing the samples */
+ if (pbuf->corked) {
+ const pa_timing_info *info;
+
+ if ((info = pa_stream_get_timing_info (pbuf->stream))) {
+ GST_LOG_OBJECT (psink,
+ "read_index at %" G_GUINT64_FORMAT ", offset %" G_GINT64_FORMAT,
+ info->read_index, offset);
+
+ /* we uncork when the read_index is too far behind the offset we need
+ * to write to. */
+ if (info->read_index + bufsize <= offset) {
+ if (!gst_pulsering_set_corked (pbuf, FALSE, FALSE))
+ goto uncork_failed;
+ }
+ } else {
+ GST_LOG_OBJECT (psink, "no timing info available yet");
+ }
+ }
+ }
+
+ fake_done:
+ /* we consumed all samples here */
+ data = data_end + bpf;
+
+ pbuf->in_commit = FALSE;
+ pa_threaded_mainloop_unlock (mainloop);
+
+ done:
+ result = inr - ((data_end - data) / bpf);
+ GST_LOG_OBJECT (psink, "wrote %d samples", result);
+
+ return result;
+
+ /* ERRORS */
+ unlock_and_fail:
+ {
+ pbuf->in_commit = FALSE;
+ GST_LOG_OBJECT (psink, "we are reset");
+ pa_threaded_mainloop_unlock (mainloop);
+ goto done;
+ }
+ no_start:
+ {
+ GST_LOG_OBJECT (psink, "we can not start");
+ return 0;
+ }
+ start_failed:
+ {
+ GST_LOG_OBJECT (psink, "failed to start the ringbuffer");
+ return 0;
+ }
+ uncork_failed:
+ {
+ pbuf->in_commit = FALSE;
+ GST_ERROR_OBJECT (psink, "uncork failed");
+ pa_threaded_mainloop_unlock (mainloop);
+ goto done;
+ }
+ was_paused:
+ {
+ pbuf->in_commit = FALSE;
+ GST_LOG_OBJECT (psink, "we are paused");
+ pa_threaded_mainloop_unlock (mainloop);
+ goto done;
+ }
+ writable_size_failed:
+ {
+ GST_ELEMENT_ERROR (psink, RESOURCE, FAILED,
+ ("pa_stream_writable_size() failed: %s",
+ pa_strerror (pa_context_errno (pbuf->context))), (NULL));
+ goto unlock_and_fail;
+ }
+ write_failed:
+ {
+ GST_ELEMENT_ERROR (psink, RESOURCE, FAILED,
+ ("pa_stream_write() failed: %s",
+ pa_strerror (pa_context_errno (pbuf->context))), (NULL));
+ goto unlock_and_fail;
+ }
+ }
+
+ /* write pending local samples, must be called with the mainloop lock */
+ static void
+ gst_pulsering_flush (GstPulseRingBuffer * pbuf)
+ {
+ GstPulseSink *psink;
+
+ psink = GST_PULSESINK_CAST (GST_OBJECT_PARENT (pbuf));
+ GST_DEBUG_OBJECT (psink, "entering flush");
+
+ /* flush the buffer if possible */
+ if (pbuf->stream && (pbuf->m_data != NULL) && (pbuf->m_towrite > 0)) {
+ #ifndef GST_DISABLE_GST_DEBUG
+ gint bpf;
+
+ bpf = (GST_AUDIO_RING_BUFFER_CAST (pbuf))->spec.info.bpf;
+ GST_LOG_OBJECT (psink,
+ "flushing %u samples at offset %" G_GINT64_FORMAT,
+ (guint) pbuf->m_towrite / bpf, pbuf->m_offset);
+ #endif
+
+ if (pa_stream_write (pbuf->stream, (uint8_t *) pbuf->m_data,
+ pbuf->m_towrite, NULL, pbuf->m_offset, PA_SEEK_ABSOLUTE) < 0) {
+ goto write_failed;
+ }
+
+ pbuf->m_towrite = 0;
+ pbuf->m_offset += pbuf->m_towrite; /* keep track of current offset */
+ }
+
+ done:
+ return;
+
+ /* ERRORS */
+ write_failed:
+ {
+ GST_ELEMENT_ERROR (psink, RESOURCE, FAILED,
+ ("pa_stream_write() failed: %s",
+ pa_strerror (pa_context_errno (pbuf->context))), (NULL));
+ goto done;
+ }
+ }
+
+ static void gst_pulsesink_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec);
+ static void gst_pulsesink_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec);
+ static void gst_pulsesink_finalize (GObject * object);
+
+ static gboolean gst_pulsesink_event (GstBaseSink * sink, GstEvent * event);
+ static gboolean gst_pulsesink_query (GstBaseSink * sink, GstQuery * query);
+
+ static GstStateChangeReturn gst_pulsesink_change_state (GstElement * element,
+ GstStateChange transition);
+
+ #define gst_pulsesink_parent_class parent_class
+ G_DEFINE_TYPE_WITH_CODE (GstPulseSink, gst_pulsesink, GST_TYPE_AUDIO_BASE_SINK,
+ gst_pulsesink_init_contexts ();
+ G_IMPLEMENT_INTERFACE (GST_TYPE_STREAM_VOLUME, NULL)
+ );
+ GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (pulsesink, "pulsesink",
+ GST_RANK_PRIMARY + 10, GST_TYPE_PULSESINK, pulse_element_init (plugin));
+
+ static GstAudioRingBuffer *
+ gst_pulsesink_create_ringbuffer (GstAudioBaseSink * sink)
+ {
+ GstAudioRingBuffer *buffer;
+
+ GST_DEBUG_OBJECT (sink, "creating ringbuffer");
+ buffer = g_object_new (GST_TYPE_PULSERING_BUFFER, NULL);
+ GST_DEBUG_OBJECT (sink, "created ringbuffer @%p", buffer);
+
+ return buffer;
+ }
+
+ static GstBuffer *
+ gst_pulsesink_payload (GstAudioBaseSink * sink, GstBuffer * buf)
+ {
+ switch (sink->ringbuffer->spec.type) {
+ case GST_AUDIO_RING_BUFFER_FORMAT_TYPE_AC3:
+ case GST_AUDIO_RING_BUFFER_FORMAT_TYPE_EAC3:
+ case GST_AUDIO_RING_BUFFER_FORMAT_TYPE_DTS:
+ case GST_AUDIO_RING_BUFFER_FORMAT_TYPE_MPEG:
+ case GST_AUDIO_RING_BUFFER_FORMAT_TYPE_MPEG2_AAC:
+ case GST_AUDIO_RING_BUFFER_FORMAT_TYPE_MPEG4_AAC:
+ {
+ /* FIXME: alloc memory from PA if possible */
+ gint framesize = gst_audio_iec61937_frame_size (&sink->ringbuffer->spec);
+ GstBuffer *out;
+ GstMapInfo inmap, outmap;
+ gboolean res;
+
+ if (framesize <= 0)
+ return NULL;
+
+ out = gst_buffer_new_and_alloc (framesize);
+
+ gst_buffer_map (buf, &inmap, GST_MAP_READ);
+ gst_buffer_map (out, &outmap, GST_MAP_WRITE);
+
+ res = gst_audio_iec61937_payload (inmap.data, inmap.size,
+ outmap.data, outmap.size, &sink->ringbuffer->spec, G_BIG_ENDIAN);
+
+ gst_buffer_unmap (buf, &inmap);
+ gst_buffer_unmap (out, &outmap);
+
+ if (!res) {
+ gst_buffer_unref (out);
+ return NULL;
+ }
+
+ gst_buffer_copy_into (out, buf, GST_BUFFER_COPY_METADATA, 0, -1);
+ return out;
+ }
+
+ default:
+ return gst_buffer_ref (buf);
+ }
+ }
+
++#if defined(__TIZEN__) && defined(PCM_DUMP_ENABLE)
++static GstPadProbeReturn
++gst_pulsesink_pad_dump_probe (GstPad * pad, GstPadProbeInfo * info, gpointer data)
++{
++ GstPulseSink *psink = GST_PULSESINK_CAST (data);
++ size_t written = 0;
++ GstBuffer *buffer = GST_PAD_PROBE_INFO_BUFFER (info);
++ GstMapInfo in_map;
++ if (psink->dump_fd_input) {
++ gst_buffer_map(buffer, &in_map, GST_MAP_READ);
++ written = fwrite(in_map.data, 1, in_map.size, psink->dump_fd_input);
++ if (written != in_map.size)
++ GST_WARNING("failed to write!!! ferror=%d", ferror(psink->dump_fd_input));
++ gst_buffer_unmap(buffer, &in_map);
++ }
++ return GST_PAD_PROBE_OK;
++}
++#endif /* __TIZEN__ && PCM_DUMP_ENABLE */
++
+ static void
+ gst_pulsesink_class_init (GstPulseSinkClass * klass)
+ {
+ GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
+ GstBaseSinkClass *gstbasesink_class = GST_BASE_SINK_CLASS (klass);
+ GstBaseSinkClass *bc;
+ GstAudioBaseSinkClass *gstaudiosink_class = GST_AUDIO_BASE_SINK_CLASS (klass);
+ GstElementClass *gstelement_class = GST_ELEMENT_CLASS (klass);
+ GstCaps *caps;
+ gchar *clientname;
+
+ gobject_class->finalize = gst_pulsesink_finalize;
+ gobject_class->set_property = gst_pulsesink_set_property;
+ gobject_class->get_property = gst_pulsesink_get_property;
+
+ gstbasesink_class->event = GST_DEBUG_FUNCPTR (gst_pulsesink_event);
+ gstbasesink_class->query = GST_DEBUG_FUNCPTR (gst_pulsesink_query);
+
+ /* restore the original basesink pull methods */
+ bc = g_type_class_peek (GST_TYPE_BASE_SINK);
+ gstbasesink_class->activate_pull = GST_DEBUG_FUNCPTR (bc->activate_pull);
+
+ gstelement_class->change_state =
+ GST_DEBUG_FUNCPTR (gst_pulsesink_change_state);
+
+ gstaudiosink_class->create_ringbuffer =
+ GST_DEBUG_FUNCPTR (gst_pulsesink_create_ringbuffer);
+ gstaudiosink_class->payload = GST_DEBUG_FUNCPTR (gst_pulsesink_payload);
+
+ /* Overwrite GObject fields */
+ g_object_class_install_property (gobject_class,
+ PROP_SERVER,
+ g_param_spec_string ("server", "Server",
+ "The PulseAudio server to connect to", DEFAULT_SERVER,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_DEVICE,
+ g_param_spec_string ("device", "Device",
+ "The PulseAudio sink device to connect to", DEFAULT_DEVICE,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_CURRENT_DEVICE,
+ g_param_spec_string ("current-device", "Current Device",
+ "The current PulseAudio sink device", DEFAULT_CURRENT_DEVICE,
+ G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class,
+ PROP_DEVICE_NAME,
+ g_param_spec_string ("device-name", "Device name",
+ "Human-readable name of the sound device", DEFAULT_DEVICE_NAME,
+ G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class,
+ PROP_VOLUME,
+ g_param_spec_double ("volume", "Volume",
+ "Linear volume of this stream, 1.0=100%", 0.0, MAX_VOLUME,
+ DEFAULT_VOLUME, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class,
+ PROP_MUTE,
+ g_param_spec_boolean ("mute", "Mute",
+ "Mute state of this stream", DEFAULT_MUTE,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ /**
+ * GstPulseSink:client-name:
+ *
+ * The PulseAudio client name to use.
+ */
+ clientname = gst_pulse_client_name ();
+ g_object_class_install_property (gobject_class,
+ PROP_CLIENT_NAME,
+ g_param_spec_string ("client-name", "Client Name",
+ "The PulseAudio client name to use", clientname,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
+ GST_PARAM_MUTABLE_READY));
+ g_free (clientname);
+
+ /**
+ * GstPulseSink:stream-properties:
+ *
+ * List of pulseaudio stream properties. A list of defined properties can be
+ * found in the [pulseaudio api docs](http://0pointer.de/lennart/projects/pulseaudio/doxygen/proplist_8h.html).
+ *
+ * Below is an example for registering as a music application to pulseaudio.
+ * |[
+ * GstStructure *props;
+ *
+ * props = gst_structure_from_string ("props,media.role=music", NULL);
+ * g_object_set (pulse, "stream-properties", props, NULL);
+ * gst_structure_free
+ * ]|
+ */
+ g_object_class_install_property (gobject_class,
+ PROP_STREAM_PROPERTIES,
+ g_param_spec_boxed ("stream-properties", "stream properties",
+ "list of pulseaudio stream properties",
+ GST_TYPE_STRUCTURE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
++#ifdef __TIZEN__
++ g_object_class_install_property (gobject_class,
++ PROP_AUDIO_LATENCY,
++ g_param_spec_string ("latency", "Audio Backend Latency",
++ "Audio Backend Latency (\"low\": Low Latency, \"mid\": Mid Latency, \"high\": High Latency)",
++ DEFAULT_AUDIO_LATENCY,
++ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
++
++ g_object_class_install_property (gobject_class,
++ PROP_AUTO_RENDER_DELAY,
++ g_param_spec_boolean ("auto-render-delay", "Auto Render Delay",
++ "Apply render delay automatically", DEFAULT_AUTO_RENDER_DELAY,
++ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
++#endif /* __TIZEN__ */
++
+ gst_element_class_set_static_metadata (gstelement_class,
+ "PulseAudio Audio Sink",
+ "Sink/Audio", "Plays audio to a PulseAudio server", "Lennart Poettering");
+
+ caps =
+ gst_pulse_fix_pcm_caps (gst_caps_from_string (PULSE_SINK_TEMPLATE_CAPS));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_pad_template_new ("sink", GST_PAD_SINK, GST_PAD_ALWAYS, caps));
+ gst_caps_unref (caps);
+ }
+
+ static void
+ free_device_info (GstPulseDeviceInfo * device_info)
+ {
+ GList *l;
+
+ g_free (device_info->description);
+
+ for (l = g_list_first (device_info->formats); l; l = g_list_next (l))
+ pa_format_info_free ((pa_format_info *) l->data);
+
+ g_list_free (device_info->formats);
+ }
+
+ /* Returns the current time of the sink ringbuffer. The timing_info is updated
+ * on every data write/flush and every 100ms (PA_STREAM_AUTO_TIMING_UPDATE).
+ */
+ static GstClockTime
+ gst_pulsesink_get_time (GstClock * clock, GstAudioBaseSink * sink)
+ {
+ GstPulseSink *psink;
+ GstPulseRingBuffer *pbuf;
+ pa_usec_t time;
+
+ if (!sink->ringbuffer || !sink->ringbuffer->acquired)
+ return GST_CLOCK_TIME_NONE;
+
+ pbuf = GST_PULSERING_BUFFER_CAST (sink->ringbuffer);
+ psink = GST_PULSESINK_CAST (GST_OBJECT_PARENT (pbuf));
+
+ if (g_atomic_int_get (&psink->format_lost)) {
+ /* Stream was lost in a format change, it'll get set up again once
+ * upstream renegotiates */
+ return psink->format_lost_time;
+ }
+
+ pa_threaded_mainloop_lock (mainloop);
+ if (gst_pulsering_is_dead (psink, pbuf, TRUE))
+ goto server_dead;
+
+ /* if we don't have enough data to get a timestamp, just return NONE, which
+ * will return the last reported time */
+ if (pa_stream_get_time (pbuf->stream, &time) < 0) {
+ GST_DEBUG_OBJECT (psink, "could not get time");
+ time = GST_CLOCK_TIME_NONE;
+ } else
+ time *= 1000;
+ pa_threaded_mainloop_unlock (mainloop);
+
+ GST_LOG_OBJECT (psink, "current time is %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (time));
+
+ return time;
+
+ /* ERRORS */
+ server_dead:
+ {
+ GST_DEBUG_OBJECT (psink, "the server is dead");
+ pa_threaded_mainloop_unlock (mainloop);
+
+ return GST_CLOCK_TIME_NONE;
+ }
+ }
+
+ static void
+ gst_pulsesink_sink_info_cb (pa_context * c, const pa_sink_info * i, int eol,
+ void *userdata)
+ {
+ GstPulseDeviceInfo *device_info = (GstPulseDeviceInfo *) userdata;
+ guint8 j;
+
+ if (!i)
+ goto done;
+
+ device_info->description = g_strdup (i->description);
+
+ device_info->formats = NULL;
+ for (j = 0; j < i->n_formats; j++)
+ device_info->formats = g_list_prepend (device_info->formats,
+ pa_format_info_copy (i->formats[j]));
+
+ done:
+ pa_threaded_mainloop_signal (mainloop, 0);
+ }
+
+ /* Call with mainloop lock held */
+ static pa_stream *
+ gst_pulsesink_create_probe_stream (GstPulseSink * psink,
+ GstPulseRingBuffer * pbuf, pa_format_info * format)
+ {
+ pa_format_info *formats[1] = { format };
+ pa_stream *stream;
+ pa_stream_flags_t flags;
+
+ GST_LOG_OBJECT (psink, "Creating probe stream");
+
+ if (!(stream = pa_stream_new_extended (pbuf->context, "pulsesink probe",
+ formats, 1, psink->proplist)))
+ goto error;
+
+ /* construct the flags */
+ flags = PA_STREAM_INTERPOLATE_TIMING | PA_STREAM_AUTO_TIMING_UPDATE |
+ PA_STREAM_ADJUST_LATENCY | PA_STREAM_START_CORKED;
+
+ pa_stream_set_state_callback (stream, gst_pulsering_stream_state_cb, pbuf);
+
+ if (pa_stream_connect_playback (stream, psink->device, NULL, flags, NULL,
+ NULL) < 0)
+ goto error;
+
+ if (!gst_pulsering_wait_for_stream_ready (psink, stream))
+ goto error;
+
+ return stream;
+
+ error:
+ if (stream)
+ pa_stream_unref (stream);
+ return NULL;
+ }
+
+ static GstCaps *
+ gst_pulsesink_query_getcaps (GstPulseSink * psink, GstCaps * filter)
+ {
+ GstPulseRingBuffer *pbuf = NULL;
+ GstPulseDeviceInfo device_info = { NULL, NULL };
+ GstCaps *ret = NULL;
+ GList *i;
+ pa_operation *o = NULL;
+ pa_stream *stream;
+
+ GST_OBJECT_LOCK (psink);
+ pbuf = GST_PULSERING_BUFFER_CAST (GST_AUDIO_BASE_SINK (psink)->ringbuffer);
+ if (pbuf != NULL)
+ gst_object_ref (pbuf);
+ GST_OBJECT_UNLOCK (psink);
+
+ if (!pbuf) {
+ ret = gst_pad_get_pad_template_caps (GST_AUDIO_BASE_SINK_PAD (psink));
+ goto out;
+ }
+
+ GST_OBJECT_LOCK (pbuf);
+ pa_threaded_mainloop_lock (mainloop);
+
+ if (!pbuf->context) {
+ ret = gst_pad_get_pad_template_caps (GST_AUDIO_BASE_SINK_PAD (psink));
+ goto unlock;
+ }
+
+ ret = gst_caps_new_empty ();
+
+ if (pbuf->stream) {
+ /* We're in PAUSED or higher */
+ stream = pbuf->stream;
+
+ } else if (pbuf->probe_stream) {
+ /* We're not paused, but have a cached probe stream */
+ stream = pbuf->probe_stream;
+
+ } else {
+ /* We're not yet in PAUSED and still need to create a probe stream.
+ *
+ * FIXME: PA doesn't accept "any" format. We fix something reasonable since
+ * this is merely a probe. This should eventually be fixed in PA and
+ * hard-coding the format should be dropped. */
+ pa_format_info *format = pa_format_info_new ();
+ format->encoding = PA_ENCODING_PCM;
+ pa_format_info_set_sample_format (format, PA_SAMPLE_S16LE);
+ pa_format_info_set_rate (format, GST_AUDIO_DEF_RATE);
+ pa_format_info_set_channels (format, GST_AUDIO_DEF_CHANNELS);
+
+ pbuf->probe_stream = gst_pulsesink_create_probe_stream (psink, pbuf,
+ format);
+
+ pa_format_info_free (format);
+
+ if (!pbuf->probe_stream) {
+ GST_WARNING_OBJECT (psink, "Could not create probe stream");
+ goto unlock;
+ }
+
+ stream = pbuf->probe_stream;
+ }
+
+ if (!(o = pa_context_get_sink_info_by_name (pbuf->context,
+ pa_stream_get_device_name (stream), gst_pulsesink_sink_info_cb,
+ &device_info)))
+ goto info_failed;
+
+ while (pa_operation_get_state (o) == PA_OPERATION_RUNNING) {
+ pa_threaded_mainloop_wait (mainloop);
+ if (gst_pulsering_is_dead (psink, pbuf, FALSE))
+ goto unlock;
+ }
+
+ for (i = g_list_first (device_info.formats); i; i = g_list_next (i)) {
+ GstCaps *caps = gst_pulse_format_info_to_caps ((pa_format_info *) i->data);
+ if (caps)
+ gst_caps_append (ret, caps);
+ }
+
+ unlock:
+ pa_threaded_mainloop_unlock (mainloop);
+ /* FIXME: this could be freed after device_name is got */
+ GST_OBJECT_UNLOCK (pbuf);
+
+ if (filter) {
+ GstCaps *tmp = gst_caps_intersect_full (filter, ret,
+ GST_CAPS_INTERSECT_FIRST);
+ gst_caps_unref (ret);
+ ret = tmp;
+ }
+
+ out:
+ free_device_info (&device_info);
+
+ if (o)
+ pa_operation_unref (o);
+
+ if (pbuf)
+ gst_object_unref (pbuf);
+
+ GST_DEBUG_OBJECT (psink, "caps %" GST_PTR_FORMAT, ret);
+
+ return ret;
+
+ info_failed:
+ {
+ GST_ELEMENT_ERROR (psink, RESOURCE, FAILED,
+ ("pa_context_get_sink_input_info() failed: %s",
+ pa_strerror (pa_context_errno (pbuf->context))), (NULL));
+ goto unlock;
+ }
+ }
+
+ static gboolean
+ gst_pulsesink_query_acceptcaps (GstPulseSink * psink, GstCaps * caps)
+ {
+ GstPulseRingBuffer *pbuf = NULL;
+ GstPulseDeviceInfo device_info = { NULL, NULL };
+ GstCaps *pad_caps;
+ GstStructure *st;
+ gboolean ret = FALSE;
+
+ GstAudioRingBufferSpec spec = { 0 };
+ pa_operation *o = NULL;
+ pa_channel_map channel_map;
+ pa_format_info *format = NULL;
+ guint channels;
+
+ pad_caps = gst_pad_get_pad_template_caps (GST_BASE_SINK_PAD (psink));
+ ret = gst_caps_is_subset (caps, pad_caps);
+ gst_caps_unref (pad_caps);
+
+ GST_DEBUG_OBJECT (psink, "caps %" GST_PTR_FORMAT, caps);
+
+ /* Template caps didn't match */
+ if (!ret)
+ goto done;
+
+ /* If we've not got fixed caps, creating a stream might fail, so let's just
+ * return from here with default acceptcaps behaviour */
+ if (!gst_caps_is_fixed (caps))
+ goto done;
+
+ GST_OBJECT_LOCK (psink);
+ pbuf = GST_PULSERING_BUFFER_CAST (GST_AUDIO_BASE_SINK (psink)->ringbuffer);
+ if (pbuf != NULL)
+ gst_object_ref (pbuf);
+ GST_OBJECT_UNLOCK (psink);
+
+ /* We're still in NULL state */
+ if (pbuf == NULL)
+ goto done;
+
+ GST_OBJECT_LOCK (pbuf);
+ pa_threaded_mainloop_lock (mainloop);
+
+ if (pbuf->context == NULL)
+ goto out;
+
+ ret = FALSE;
+
+ spec.latency_time = GST_AUDIO_BASE_SINK (psink)->latency_time;
+ if (!gst_audio_ring_buffer_parse_caps (&spec, caps))
+ goto out;
+
+ if (!gst_pulse_fill_format_info (&spec, &format, &channels))
+ goto out;
+
+ /* Make sure input is framed (one frame per buffer) and can be payloaded */
+ if (!pa_format_info_is_pcm (format)) {
+ gboolean framed = FALSE, parsed = FALSE;
+ st = gst_caps_get_structure (caps, 0);
+
+ gst_structure_get_boolean (st, "framed", &framed);
+ gst_structure_get_boolean (st, "parsed", &parsed);
+ if ((!framed && !parsed) || gst_audio_iec61937_frame_size (&spec) <= 0)
+ goto out;
+ }
+
+ /* initialize the channel map */
+ if (pa_format_info_is_pcm (format) &&
+ gst_pulse_gst_to_channel_map (&channel_map, &spec))
+ pa_format_info_set_channel_map (format, &channel_map);
+
+ if (pbuf->stream || pbuf->probe_stream) {
+ /* We're already in PAUSED or above, so just reuse this stream to query
+ * sink formats and use those. */
+ GList *i;
+ const char *device_name = pa_stream_get_device_name (pbuf->stream ?
+ pbuf->stream : pbuf->probe_stream);
+
+ if (!(o = pa_context_get_sink_info_by_name (pbuf->context, device_name,
+ gst_pulsesink_sink_info_cb, &device_info)))
+ goto info_failed;
+
+ while (pa_operation_get_state (o) == PA_OPERATION_RUNNING) {
+ pa_threaded_mainloop_wait (mainloop);
+ if (gst_pulsering_is_dead (psink, pbuf, FALSE))
+ goto out;
+ }
+
+ for (i = g_list_first (device_info.formats); i; i = g_list_next (i)) {
+ if (pa_format_info_is_compatible ((pa_format_info *) i->data, format)) {
+ ret = TRUE;
+ break;
+ }
+ }
+ } else {
+ /* We're in READY, let's connect a stream to see if the format is
+ * accepted by whatever sink we're routed to */
+ pbuf->probe_stream = gst_pulsesink_create_probe_stream (psink, pbuf,
+ format);
+ if (pbuf->probe_stream)
+ ret = TRUE;
+ }
+
+ out:
+ if (format)
+ pa_format_info_free (format);
+
+ free_device_info (&device_info);
+
+ if (o)
+ pa_operation_unref (o);
+
+ pa_threaded_mainloop_unlock (mainloop);
+ GST_OBJECT_UNLOCK (pbuf);
+
+ gst_caps_replace (&spec.caps, NULL);
+ gst_object_unref (pbuf);
+
+ done:
+
+ return ret;
+
+ info_failed:
+ {
+ GST_ELEMENT_ERROR (psink, RESOURCE, FAILED,
+ ("pa_context_get_sink_input_info() failed: %s",
+ pa_strerror (pa_context_errno (pbuf->context))), (NULL));
+ goto out;
+ }
+ }
+
+ static void
+ gst_pulsesink_init (GstPulseSink * pulsesink)
+ {
++#if defined(__TIZEN__) && defined(PCM_DUMP_ENABLE)
++ GstPad *sinkpad = NULL;
++ int vconf_dump = 0;
++#endif /* __TIZEN__ && PCM_DUMP_ENABLE */
++
+ pulsesink->server = NULL;
+ pulsesink->device = NULL;
+ pulsesink->device_info.description = NULL;
+ pulsesink->client_name = gst_pulse_client_name ();
+
+ pulsesink->device_info.formats = NULL;
+
+ pulsesink->volume = DEFAULT_VOLUME;
+ pulsesink->volume_set = FALSE;
+
+ pulsesink->mute = DEFAULT_MUTE;
+ pulsesink->mute_set = FALSE;
+
+ pulsesink->notify = 0;
+
+ g_atomic_int_set (&pulsesink->format_lost, FALSE);
+ pulsesink->format_lost_time = GST_CLOCK_TIME_NONE;
+
+ pulsesink->properties = NULL;
+ pulsesink->proplist = NULL;
++#ifdef __TIZEN__
++ pulsesink->latency = g_strdup (DEFAULT_AUDIO_LATENCY);
++ pulsesink->auto_render_delay = DEFAULT_AUTO_RENDER_DELAY;
++ pulsesink->proplist = pa_proplist_new();
++ pa_proplist_sets(pulsesink->proplist, PA_PROP_MEDIA_TIZEN_AUDIO_LATENCY, pulsesink->latency);
++#ifdef PCM_DUMP_ENABLE
++ if (vconf_get_int(GST_PULSESINK_DUMP_VCONF_KEY, &vconf_dump)) {
++ GST_WARNING("vconf_get_int %s failed", GST_PULSESINK_DUMP_VCONF_KEY);
++ }
++ pulsesink->need_dump_input = vconf_dump & GST_PULSESINK_DUMP_INPUT_FLAG ? TRUE : FALSE;
++ pulsesink->dump_fd_input = NULL;
++ if (pulsesink->need_dump_input) {
++ sinkpad = gst_element_get_static_pad((GstElement *)pulsesink, "sink");
++ if (sinkpad) {
++ gst_pad_add_probe (sinkpad, GST_PAD_PROBE_TYPE_BUFFER, gst_pulsesink_pad_dump_probe, pulsesink, NULL);
++ gst_object_unref (GST_OBJECT(sinkpad));
++ }
++ }
++#endif
++#endif /* __TIZEN__ */
+
+ /* override with a custom clock */
+ if (GST_AUDIO_BASE_SINK (pulsesink)->provided_clock)
+ gst_object_unref (GST_AUDIO_BASE_SINK (pulsesink)->provided_clock);
+
+ GST_AUDIO_BASE_SINK (pulsesink)->provided_clock =
+ gst_audio_clock_new ("GstPulseSinkClock",
+ (GstAudioClockGetTimeFunc) gst_pulsesink_get_time, pulsesink, NULL);
+ }
+
+ static void
+ gst_pulsesink_finalize (GObject * object)
+ {
+ GstPulseSink *pulsesink = GST_PULSESINK_CAST (object);
+
+ g_free (pulsesink->server);
+ g_free (pulsesink->device);
+ g_free (pulsesink->client_name);
+ g_free (pulsesink->current_sink_name);
+
+ free_device_info (&pulsesink->device_info);
+
+ if (pulsesink->properties)
+ gst_structure_free (pulsesink->properties);
+ if (pulsesink->proplist)
+ pa_proplist_free (pulsesink->proplist);
+
++#ifdef __TIZEN__
++ g_free (pulsesink->latency);
++#endif /* __TIZEN__ */
++
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+ }
+
+ static void
+ gst_pulsesink_set_volume (GstPulseSink * psink, gdouble volume)
+ {
++#ifndef __TIZEN__
+ pa_cvolume v;
+ pa_operation *o = NULL;
++#endif
+ GstPulseRingBuffer *pbuf;
+ uint32_t idx;
+
++#ifndef __TIZEN__
+ if (!mainloop)
+ goto no_mainloop;
+
+ pa_threaded_mainloop_lock (mainloop);
++#endif
+
+ GST_DEBUG_OBJECT (psink, "setting volume to %f", volume);
+
+ pbuf = GST_PULSERING_BUFFER_CAST (GST_AUDIO_BASE_SINK (psink)->ringbuffer);
+ if (pbuf == NULL || pbuf->stream == NULL)
+ goto no_buffer;
+
+ if ((idx = pa_stream_get_index (pbuf->stream)) == PA_INVALID_INDEX)
+ goto no_index;
+
++#ifndef __TIZEN__
+ if (pbuf->is_pcm)
+ gst_pulse_cvolume_from_linear (&v, pbuf->channels, volume);
+ else
+ /* FIXME: this will eventually be superseded by checks to see if the volume
+ * is readable/writable */
+ goto unlock;
+
+ if (!(o = pa_context_set_sink_input_volume (pbuf->context, idx,
+ &v, NULL, NULL)))
+ goto volume_failed;
+
++#else
++ if (!psink->mute)
++ gst_pulse_set_volume_ratio (idx, "out", volume);
++ psink->volume = volume;
++#endif
++
+ /* We don't really care about the result of this call */
+ unlock:
++#ifndef __TIZEN__
+
+ if (o)
+ pa_operation_unref (o);
+
+ pa_threaded_mainloop_unlock (mainloop);
++#endif
+
+ return;
+
+ /* ERRORS */
++#ifndef __TIZEN__
+ no_mainloop:
+ {
+ psink->volume = volume;
+ psink->volume_set = TRUE;
+
+ GST_DEBUG_OBJECT (psink, "we have no mainloop");
+ return;
+ }
++#endif
+ no_buffer:
+ {
+ psink->volume = volume;
+ psink->volume_set = TRUE;
+
+ GST_DEBUG_OBJECT (psink, "we have no ringbuffer");
+ goto unlock;
+ }
+ no_index:
+ {
+ GST_DEBUG_OBJECT (psink, "we don't have a stream index");
+ goto unlock;
+ }
++#ifndef __TIZEN__
+ volume_failed:
+ {
+ GST_ELEMENT_ERROR (psink, RESOURCE, FAILED,
+ ("pa_stream_set_sink_input_volume() failed: %s",
+ pa_strerror (pa_context_errno (pbuf->context))), (NULL));
+ goto unlock;
+ }
++#endif
+ }
+
+ static void
+ gst_pulsesink_set_mute (GstPulseSink * psink, gboolean mute)
+ {
++#ifndef __TIZEN__
+ pa_operation *o = NULL;
++#endif
+ GstPulseRingBuffer *pbuf;
+ uint32_t idx;
+
++#ifndef __TIZEN__
+ if (!mainloop)
+ goto no_mainloop;
+
+ pa_threaded_mainloop_lock (mainloop);
++#endif
+
+ GST_DEBUG_OBJECT (psink, "setting mute state to %d", mute);
+
+ pbuf = GST_PULSERING_BUFFER_CAST (GST_AUDIO_BASE_SINK (psink)->ringbuffer);
+ if (pbuf == NULL || pbuf->stream == NULL)
+ goto no_buffer;
+
+ if ((idx = pa_stream_get_index (pbuf->stream)) == PA_INVALID_INDEX)
+ goto no_index;
+
++#ifndef __TIZEN__
+ if (!(o = pa_context_set_sink_input_mute (pbuf->context, idx,
+ mute, NULL, NULL)))
+ goto mute_failed;
++#else
++ gst_pulse_set_volume_ratio (idx, "out", mute ? 0 : psink->volume);
++ psink->mute = mute;
++#endif
+
+ /* We don't really care about the result of this call */
+ unlock:
++#ifndef __TIZEN__
+
+ if (o)
+ pa_operation_unref (o);
+
+ pa_threaded_mainloop_unlock (mainloop);
++#endif
+
+ return;
+
+ /* ERRORS */
++#ifndef __TIZEN__
+ no_mainloop:
+ {
+ psink->mute = mute;
+ psink->mute_set = TRUE;
+
+ GST_DEBUG_OBJECT (psink, "we have no mainloop");
+ return;
+ }
++#endif
+ no_buffer:
+ {
+ psink->mute = mute;
+ psink->mute_set = TRUE;
+
+ GST_DEBUG_OBJECT (psink, "we have no ringbuffer");
+ goto unlock;
+ }
+ no_index:
+ {
+ GST_DEBUG_OBJECT (psink, "we don't have a stream index");
+ goto unlock;
+ }
++#ifndef __TIZEN__
+ mute_failed:
+ {
+ GST_ELEMENT_ERROR (psink, RESOURCE, FAILED,
+ ("pa_stream_set_sink_input_mute() failed: %s",
+ pa_strerror (pa_context_errno (pbuf->context))), (NULL));
+ goto unlock;
+ }
++#endif
+ }
+
+ static void
+ gst_pulsesink_sink_input_info_cb (pa_context * c, const pa_sink_input_info * i,
+ int eol, void *userdata)
+ {
+ GstPulseRingBuffer *pbuf;
+ GstPulseSink *psink;
+
+ pbuf = GST_PULSERING_BUFFER_CAST (userdata);
+ psink = GST_PULSESINK_CAST (GST_OBJECT_PARENT (pbuf));
+
+ if (!i)
+ goto done;
+
+ if (!pbuf->stream)
+ goto done;
+
+ /* If the index doesn't match our current stream,
+ * it implies we just recreated the stream (caps change)
+ */
+ if (i->index == pa_stream_get_index (pbuf->stream)) {
+ psink->volume = pa_sw_volume_to_linear (pa_cvolume_max (&i->volume));
+ psink->mute = i->mute;
+ psink->current_sink_idx = i->sink;
+
+ if (psink->volume > MAX_VOLUME) {
+ GST_WARNING_OBJECT (psink, "Clipped volume from %f to %f", psink->volume,
+ MAX_VOLUME);
+ psink->volume = MAX_VOLUME;
+ }
+ }
+
+ done:
+ pa_threaded_mainloop_signal (mainloop, 0);
+ }
+
+ static void
+ gst_pulsesink_get_sink_input_info (GstPulseSink * psink, gdouble * volume,
+ gboolean * mute)
+ {
+ GstPulseRingBuffer *pbuf;
+ pa_operation *o = NULL;
+ uint32_t idx;
+
+ if (!mainloop)
+ goto no_mainloop;
+
+ pa_threaded_mainloop_lock (mainloop);
+
+ pbuf = GST_PULSERING_BUFFER_CAST (GST_AUDIO_BASE_SINK (psink)->ringbuffer);
+ if (pbuf == NULL || pbuf->stream == NULL)
+ goto no_buffer;
+
+ if ((idx = pa_stream_get_index (pbuf->stream)) == PA_INVALID_INDEX)
+ goto no_index;
+
+ if (!(o = pa_context_get_sink_input_info (pbuf->context, idx,
+ gst_pulsesink_sink_input_info_cb, pbuf)))
+ goto info_failed;
+
+ while (pa_operation_get_state (o) == PA_OPERATION_RUNNING) {
+ pa_threaded_mainloop_wait (mainloop);
+ if (gst_pulsering_is_dead (psink, pbuf, TRUE))
+ goto unlock;
+ }
+
+ unlock:
+ if (volume)
+ *volume = psink->volume;
+ if (mute)
+ *mute = psink->mute;
+
+ if (o)
+ pa_operation_unref (o);
+
+ pa_threaded_mainloop_unlock (mainloop);
+
+ return;
+
+ /* ERRORS */
+ no_mainloop:
+ {
+ if (volume)
+ *volume = psink->volume;
+ if (mute)
+ *mute = psink->mute;
+
+ GST_DEBUG_OBJECT (psink, "we have no mainloop");
+ return;
+ }
+ no_buffer:
+ {
+ GST_DEBUG_OBJECT (psink, "we have no ringbuffer");
+ goto unlock;
+ }
+ no_index:
+ {
+ GST_DEBUG_OBJECT (psink, "we don't have a stream index");
+ goto unlock;
+ }
+ info_failed:
+ {
+ GST_ELEMENT_ERROR (psink, RESOURCE, FAILED,
+ ("pa_context_get_sink_input_info() failed: %s",
+ pa_strerror (pa_context_errno (pbuf->context))), (NULL));
+ goto unlock;
+ }
+ }
+
+ static void
+ gst_pulsesink_current_sink_info_cb (pa_context * c, const pa_sink_info * i,
+ int eol, void *userdata)
+ {
+ GstPulseSink *psink;
+
+ psink = GST_PULSESINK_CAST (userdata);
+
+ if (!i)
+ goto done;
+
+ /* If the index doesn't match our current stream,
+ * it implies we just recreated the stream (caps change)
+ */
+ if (i->index == psink->current_sink_idx) {
+ g_free (psink->current_sink_name);
+ psink->current_sink_name = g_strdup (i->name);
+ }
+
+ done:
+ pa_threaded_mainloop_signal (mainloop, 0);
+ }
+
+ static gchar *
+ gst_pulsesink_get_current_device (GstPulseSink * pulsesink)
+ {
+ pa_operation *o = NULL;
+ GstPulseRingBuffer *pbuf;
+ gchar *current_sink;
+
+ if (!mainloop)
+ goto no_mainloop;
+
+ pbuf =
+ GST_PULSERING_BUFFER_CAST (GST_AUDIO_BASE_SINK (pulsesink)->ringbuffer);
+ if (pbuf == NULL || pbuf->stream == NULL)
+ goto no_buffer;
+
+ gst_pulsesink_get_sink_input_info (pulsesink, NULL, NULL);
+
+ pa_threaded_mainloop_lock (mainloop);
+
+ if (!(o = pa_context_get_sink_info_by_index (pbuf->context,
+ pulsesink->current_sink_idx, gst_pulsesink_current_sink_info_cb,
+ pulsesink)))
+ goto info_failed;
+
+ while (pa_operation_get_state (o) == PA_OPERATION_RUNNING) {
+ pa_threaded_mainloop_wait (mainloop);
+ if (gst_pulsering_is_dead (pulsesink, pbuf, TRUE))
+ goto unlock;
+ }
+
+ unlock:
+
+ current_sink = g_strdup (pulsesink->current_sink_name);
+
+ if (o)
+ pa_operation_unref (o);
+
+ pa_threaded_mainloop_unlock (mainloop);
+
+ return current_sink;
+
+ /* ERRORS */
+ no_mainloop:
+ {
+ GST_DEBUG_OBJECT (pulsesink, "we have no mainloop");
+ return NULL;
+ }
+ no_buffer:
+ {
+ GST_DEBUG_OBJECT (pulsesink, "we have no ringbuffer");
+ return NULL;
+ }
+ info_failed:
+ {
+ GST_ELEMENT_ERROR (pulsesink, RESOURCE, FAILED,
+ ("pa_context_get_sink_input_info() failed: %s",
+ pa_strerror (pa_context_errno (pbuf->context))), (NULL));
+ goto unlock;
+ }
+ }
+
+ static gchar *
+ gst_pulsesink_device_description (GstPulseSink * psink)
+ {
+ GstPulseRingBuffer *pbuf;
+ pa_operation *o = NULL;
+ gchar *t;
+
+ if (!mainloop)
+ goto no_mainloop;
+
+ pa_threaded_mainloop_lock (mainloop);
+ pbuf = GST_PULSERING_BUFFER_CAST (GST_AUDIO_BASE_SINK (psink)->ringbuffer);
+ if (pbuf == NULL)
+ goto no_buffer;
+
+ free_device_info (&psink->device_info);
+ if (!(o = pa_context_get_sink_info_by_name (pbuf->context,
+ psink->device, gst_pulsesink_sink_info_cb, &psink->device_info)))
+ goto info_failed;
+
+ while (pa_operation_get_state (o) == PA_OPERATION_RUNNING) {
+ pa_threaded_mainloop_wait (mainloop);
+ if (gst_pulsering_is_dead (psink, pbuf, FALSE))
+ goto unlock;
+ }
+
+ unlock:
+ if (o)
+ pa_operation_unref (o);
+
+ t = g_strdup (psink->device_info.description);
+ pa_threaded_mainloop_unlock (mainloop);
+
+ return t;
+
+ /* ERRORS */
+ no_mainloop:
+ {
+ GST_DEBUG_OBJECT (psink, "we have no mainloop");
+ return NULL;
+ }
+ no_buffer:
+ {
+ GST_DEBUG_OBJECT (psink, "we have no ringbuffer");
+ goto unlock;
+ }
+ info_failed:
+ {
+ GST_ELEMENT_ERROR (psink, RESOURCE, FAILED,
+ ("pa_context_get_sink_info_by_index() failed: %s",
+ pa_strerror (pa_context_errno (pbuf->context))), (NULL));
+ goto unlock;
+ }
+ }
+
+ static void
+ gst_pulsesink_set_stream_device (GstPulseSink * psink, const gchar * device)
+ {
+ pa_operation *o = NULL;
+ GstPulseRingBuffer *pbuf;
+ uint32_t idx;
+
+ if (!mainloop)
+ goto no_mainloop;
+
+ pa_threaded_mainloop_lock (mainloop);
+
+ pbuf = GST_PULSERING_BUFFER_CAST (GST_AUDIO_BASE_SINK (psink)->ringbuffer);
+ if (pbuf == NULL || pbuf->stream == NULL)
+ goto no_buffer;
+
+ if ((idx = pa_stream_get_index (pbuf->stream)) == PA_INVALID_INDEX)
+ goto no_index;
+
+
+ GST_DEBUG_OBJECT (psink, "setting stream device to %s", device);
+
+ if (!(o = pa_context_move_sink_input_by_name (pbuf->context, idx, device,
+ NULL, NULL)))
+ goto move_failed;
+
+ unlock:
+
+ if (o)
+ pa_operation_unref (o);
+
+ pa_threaded_mainloop_unlock (mainloop);
+
+ return;
+
+ /* ERRORS */
+ no_mainloop:
+ {
+ GST_DEBUG_OBJECT (psink, "we have no mainloop");
+ return;
+ }
+ no_buffer:
+ {
+ GST_DEBUG_OBJECT (psink, "we have no ringbuffer");
+ goto unlock;
+ }
+ no_index:
+ {
+ GST_DEBUG_OBJECT (psink, "we don't have a stream index");
+ return;
+ }
+ move_failed:
+ {
+ GST_ELEMENT_ERROR (psink, RESOURCE, FAILED,
+ ("pa_context_move_sink_input_by_name(%s) failed: %s", device,
+ pa_strerror (pa_context_errno (pbuf->context))), (NULL));
+ goto unlock;
+ }
+ }
+
+
+ static void
+ gst_pulsesink_set_property (GObject * object,
+ guint prop_id, const GValue * value, GParamSpec * pspec)
+ {
+ GstPulseSink *pulsesink = GST_PULSESINK_CAST (object);
+
+ switch (prop_id) {
+ case PROP_SERVER:
+ g_free (pulsesink->server);
+ pulsesink->server = g_value_dup_string (value);
+ break;
+ case PROP_DEVICE:
+ g_free (pulsesink->device);
+ pulsesink->device = g_value_dup_string (value);
+ gst_pulsesink_set_stream_device (pulsesink, pulsesink->device);
+ break;
+ case PROP_VOLUME:
+ gst_pulsesink_set_volume (pulsesink, g_value_get_double (value));
+ break;
+ case PROP_MUTE:
+ gst_pulsesink_set_mute (pulsesink, g_value_get_boolean (value));
+ break;
+ case PROP_CLIENT_NAME:
+ g_free (pulsesink->client_name);
+ if (!g_value_get_string (value)) {
+ GST_WARNING_OBJECT (pulsesink,
+ "Empty PulseAudio client name not allowed. Resetting to default value");
+ pulsesink->client_name = gst_pulse_client_name ();
+ } else
+ pulsesink->client_name = g_value_dup_string (value);
+ break;
+ case PROP_STREAM_PROPERTIES:
+ if (pulsesink->properties)
+ gst_structure_free (pulsesink->properties);
+ pulsesink->properties =
+ gst_structure_copy (gst_value_get_structure (value));
+ if (pulsesink->proplist)
+ pa_proplist_free (pulsesink->proplist);
+ pulsesink->proplist = gst_pulse_make_proplist (pulsesink->properties);
+ break;
++#ifdef __TIZEN__
++ case PROP_AUDIO_LATENCY:
++ g_free (pulsesink->latency);
++ pulsesink->latency = g_value_dup_string (value);
++ /* setting NULL restores the default latency */
++ if (pulsesink->latency == NULL) {
++ pulsesink->latency = g_strdup (DEFAULT_AUDIO_LATENCY);
++ }
++ if (!pulsesink->proplist) {
++ pulsesink->proplist = pa_proplist_new();
++ }
++ pa_proplist_sets(pulsesink->proplist, PA_PROP_MEDIA_TIZEN_AUDIO_LATENCY, pulsesink->latency);
++ GST_DEBUG_OBJECT(pulsesink, "latency(%s)", pulsesink->latency);
++ break;
++ case PROP_AUTO_RENDER_DELAY:
++ pulsesink->auto_render_delay = g_value_get_boolean (value);
++ GST_DEBUG_OBJECT (pulsesink, "setting auto-render-delay to %d", g_value_get_boolean (value));
++ break;
++#endif /* __TIZEN__ */
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+ }
+
+ static void
+ gst_pulsesink_get_property (GObject * object,
+ guint prop_id, GValue * value, GParamSpec * pspec)
+ {
+
+ GstPulseSink *pulsesink = GST_PULSESINK_CAST (object);
+
+ switch (prop_id) {
+ case PROP_SERVER:
+ g_value_set_string (value, pulsesink->server);
+ break;
+ case PROP_DEVICE:
+ g_value_set_string (value, pulsesink->device);
+ break;
+ case PROP_CURRENT_DEVICE:
+ {
+ gchar *current_device = gst_pulsesink_get_current_device (pulsesink);
+ if (current_device)
+ g_value_take_string (value, current_device);
+ else
+ g_value_set_string (value, "");
+ break;
+ }
+ case PROP_DEVICE_NAME:
+ g_value_take_string (value, gst_pulsesink_device_description (pulsesink));
+ break;
+ case PROP_VOLUME:
+ {
++#ifndef __TIZEN__
+ gdouble volume;
+
+ gst_pulsesink_get_sink_input_info (pulsesink, &volume, NULL);
+ g_value_set_double (value, volume);
++#else
++ g_value_set_double (value, pulsesink->volume);
++#endif
+ break;
+ }
+ case PROP_MUTE:
+ {
++#ifndef __TIZEN__
+ gboolean mute;
+
+ gst_pulsesink_get_sink_input_info (pulsesink, NULL, &mute);
+ g_value_set_boolean (value, mute);
++#else
++ g_value_set_boolean (value, pulsesink->mute);
++#endif
+ break;
+ }
+ case PROP_CLIENT_NAME:
+ g_value_set_string (value, pulsesink->client_name);
+ break;
+ case PROP_STREAM_PROPERTIES:
+ gst_value_set_structure (value, pulsesink->properties);
+ break;
++#ifdef __TIZEN__
++ case PROP_AUDIO_LATENCY:
++ g_value_set_string (value, pulsesink->latency);
++ break;
++ case PROP_AUTO_RENDER_DELAY:
++ g_value_set_boolean (value, pulsesink->auto_render_delay);
++ break;
++#endif /* __TIZEN__ */
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+ }
+
+ static void
+ gst_pulsesink_change_title (GstPulseSink * psink, const gchar * t)
+ {
+ pa_operation *o = NULL;
+ GstPulseRingBuffer *pbuf;
+
+ pa_threaded_mainloop_lock (mainloop);
+
+ pbuf = GST_PULSERING_BUFFER_CAST (GST_AUDIO_BASE_SINK (psink)->ringbuffer);
+
+ if (pbuf == NULL || pbuf->stream == NULL)
+ goto no_buffer;
+
+ g_free (pbuf->stream_name);
+ pbuf->stream_name = g_strdup (t);
+
+ if (!(o = pa_stream_set_name (pbuf->stream, pbuf->stream_name, NULL, NULL)))
+ goto name_failed;
+
+ /* We're not interested if this operation failed or not */
+ unlock:
+
+ if (o)
+ pa_operation_unref (o);
+ pa_threaded_mainloop_unlock (mainloop);
+
+ return;
+
+ /* ERRORS */
+ no_buffer:
+ {
+ GST_DEBUG_OBJECT (psink, "we have no ringbuffer");
+ goto unlock;
+ }
+ name_failed:
+ {
+ GST_ELEMENT_ERROR (psink, RESOURCE, FAILED,
+ ("pa_stream_set_name() failed: %s",
+ pa_strerror (pa_context_errno (pbuf->context))), (NULL));
+ goto unlock;
+ }
+ }
+
+ static void
+ gst_pulsesink_change_props (GstPulseSink * psink, GstTagList * l)
+ {
+ static const gchar *const map[] = {
+ GST_TAG_TITLE, PA_PROP_MEDIA_TITLE,
+
+ /* might get overridden in the next iteration by GST_TAG_ARTIST */
+ GST_TAG_PERFORMER, PA_PROP_MEDIA_ARTIST,
+
+ GST_TAG_ARTIST, PA_PROP_MEDIA_ARTIST,
+ GST_TAG_LANGUAGE_CODE, PA_PROP_MEDIA_LANGUAGE,
+ GST_TAG_LOCATION, PA_PROP_MEDIA_FILENAME,
+ /* We might add more here later on ... */
+ NULL
+ };
+ pa_proplist *pl = NULL;
+ const gchar *const *t;
+ gboolean empty = TRUE;
+ pa_operation *o = NULL;
+ GstPulseRingBuffer *pbuf;
+
+ pl = pa_proplist_new ();
+
+ for (t = map; *t; t += 2) {
+ gchar *n = NULL;
+
+ if (gst_tag_list_get_string (l, *t, &n)) {
+
+ if (n && *n) {
+ pa_proplist_sets (pl, *(t + 1), n);
+ empty = FALSE;
+ }
+
+ g_free (n);
+ }
+ }
+ if (empty)
+ goto finish;
+
+ pa_threaded_mainloop_lock (mainloop);
+ pbuf = GST_PULSERING_BUFFER_CAST (GST_AUDIO_BASE_SINK (psink)->ringbuffer);
+ if (pbuf == NULL || pbuf->stream == NULL)
+ goto no_buffer;
+
+ /* We're not interested if this operation failed or not */
+ if (!(o = pa_stream_proplist_update (pbuf->stream, PA_UPDATE_REPLACE,
+ pl, NULL, NULL))) {
+ GST_DEBUG_OBJECT (psink, "pa_stream_proplist_update() failed");
+ }
+
+ unlock:
+
+ if (o)
+ pa_operation_unref (o);
+
+ pa_threaded_mainloop_unlock (mainloop);
+
+ finish:
+
+ if (pl)
+ pa_proplist_free (pl);
+
+ return;
+
+ /* ERRORS */
+ no_buffer:
+ {
+ GST_DEBUG_OBJECT (psink, "we have no ringbuffer");
+ goto unlock;
+ }
+ }
+
+ static void
+ gst_pulsesink_flush_ringbuffer (GstPulseSink * psink)
+ {
+ GstPulseRingBuffer *pbuf;
+
+ pa_threaded_mainloop_lock (mainloop);
+
+ pbuf = GST_PULSERING_BUFFER_CAST (GST_AUDIO_BASE_SINK (psink)->ringbuffer);
+
+ if (pbuf == NULL || pbuf->stream == NULL)
+ goto no_buffer;
+
+ gst_pulsering_flush (pbuf);
+
+ /* Uncork if we haven't already (happens when waiting to get enough data
+ * to send out the first time) */
+ if (pbuf->corked)
+ gst_pulsering_set_corked (pbuf, FALSE, FALSE);
+
+ /* We're not interested if this operation failed or not */
+ unlock:
+ pa_threaded_mainloop_unlock (mainloop);
+
+ return;
+
+ /* ERRORS */
+ no_buffer:
+ {
+ GST_DEBUG_OBJECT (psink, "we have no ringbuffer");
+ goto unlock;
+ }
+ }
+
+ static gboolean
+ gst_pulsesink_event (GstBaseSink * sink, GstEvent * event)
+ {
+ GstPulseSink *pulsesink = GST_PULSESINK_CAST (sink);
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_TAG:{
+ gchar *title = NULL, *artist = NULL, *location = NULL, *description =
+ NULL, *t = NULL, *buf = NULL;
+ GstTagList *l;
+
+ gst_event_parse_tag (event, &l);
+
+ gst_tag_list_get_string (l, GST_TAG_TITLE, &title);
+ gst_tag_list_get_string (l, GST_TAG_ARTIST, &artist);
+ gst_tag_list_get_string (l, GST_TAG_LOCATION, &location);
+ gst_tag_list_get_string (l, GST_TAG_DESCRIPTION, &description);
+
+ if (!artist)
+ gst_tag_list_get_string (l, GST_TAG_PERFORMER, &artist);
+
+ if (title && artist)
+ /* TRANSLATORS: 'song title' by 'artist name' */
+ t = buf = g_strdup_printf (_("'%s' by '%s'"), g_strstrip (title),
+ g_strstrip (artist));
+ else if (title)
+ t = g_strstrip (title);
+ else if (description)
+ t = g_strstrip (description);
+ else if (location)
+ t = g_strstrip (location);
+
+ if (t)
+ gst_pulsesink_change_title (pulsesink, t);
+
+ g_free (title);
+ g_free (artist);
+ g_free (location);
+ g_free (description);
+ g_free (buf);
+
+ gst_pulsesink_change_props (pulsesink, l);
+
+ break;
+ }
+ case GST_EVENT_GAP:{
+ GstClockTime timestamp, duration;
+
+ gst_event_parse_gap (event, ×tamp, &duration);
+ if (duration == GST_CLOCK_TIME_NONE)
+ gst_pulsesink_flush_ringbuffer (pulsesink);
+ break;
+ }
+ case GST_EVENT_EOS:
+ gst_pulsesink_flush_ringbuffer (pulsesink);
+ break;
+ default:
+ ;
+ }
+
+ return GST_BASE_SINK_CLASS (parent_class)->event (sink, event);
+ }
+
+ static gboolean
+ gst_pulsesink_query (GstBaseSink * sink, GstQuery * query)
+ {
+ GstPulseSink *pulsesink = GST_PULSESINK_CAST (sink);
+ gboolean ret = FALSE;
+
+ switch (GST_QUERY_TYPE (query)) {
+ case GST_QUERY_CAPS:
+ {
+ GstCaps *caps, *filter;
+
+ gst_query_parse_caps (query, &filter);
+ caps = gst_pulsesink_query_getcaps (pulsesink, filter);
+
+ if (caps) {
+ gst_query_set_caps_result (query, caps);
+ gst_caps_unref (caps);
+ ret = TRUE;
+ }
+ break;
+ }
+ case GST_QUERY_ACCEPT_CAPS:
+ {
+ GstCaps *caps;
+
+ gst_query_parse_accept_caps (query, &caps);
+ ret = gst_pulsesink_query_acceptcaps (pulsesink, caps);
+ gst_query_set_accept_caps_result (query, ret);
+ ret = TRUE;
+ break;
+ }
+ default:
+ ret = GST_BASE_SINK_CLASS (parent_class)->query (sink, query);
+ break;
+ }
+ return ret;
+ }
+
+ static void
+ gst_pulsesink_release_mainloop (GstPulseSink * psink)
+ {
+ if (!mainloop)
+ return;
+
+ pa_threaded_mainloop_lock (mainloop);
+ while (psink->defer_pending) {
+ GST_DEBUG_OBJECT (psink, "waiting for stream status message emission");
+ pa_threaded_mainloop_wait (mainloop);
+ }
+ pa_threaded_mainloop_unlock (mainloop);
+
+ g_mutex_lock (&pa_shared_resource_mutex);
+ mainloop_ref_ct--;
+ if (!mainloop_ref_ct) {
+ GST_INFO_OBJECT (psink, "terminating pa main loop thread");
+ pa_threaded_mainloop_stop (mainloop);
+ pa_threaded_mainloop_free (mainloop);
+ mainloop = NULL;
+ }
+ g_mutex_unlock (&pa_shared_resource_mutex);
+ }
+
+ static GstStateChangeReturn
+ gst_pulsesink_change_state (GstElement * element, GstStateChange transition)
+ {
+ GstPulseSink *pulsesink = GST_PULSESINK (element);
+ GstStateChangeReturn ret;
+
+ switch (transition) {
+ case GST_STATE_CHANGE_NULL_TO_READY:
+ g_mutex_lock (&pa_shared_resource_mutex);
+ if (!mainloop_ref_ct) {
+ GST_INFO_OBJECT (element, "new pa main loop thread");
+ if (!(mainloop = pa_threaded_mainloop_new ()))
+ goto mainloop_failed;
+ if (pa_threaded_mainloop_start (mainloop) < 0) {
+ pa_threaded_mainloop_free (mainloop);
+ goto mainloop_start_failed;
+ }
+ mainloop_ref_ct = 1;
+ g_mutex_unlock (&pa_shared_resource_mutex);
+ } else {
+ GST_INFO_OBJECT (element, "reusing pa main loop thread");
+ mainloop_ref_ct++;
+ g_mutex_unlock (&pa_shared_resource_mutex);
+ }
+ break;
+ case GST_STATE_CHANGE_READY_TO_PAUSED:
+ gst_element_post_message (element,
+ gst_message_new_clock_provide (GST_OBJECT_CAST (element),
+ GST_AUDIO_BASE_SINK (pulsesink)->provided_clock, TRUE));
+ break;
+
+ default:
+ break;
+ }
+
+ ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
+ if (ret == GST_STATE_CHANGE_FAILURE)
+ goto state_failure;
+
+ switch (transition) {
+ case GST_STATE_CHANGE_PAUSED_TO_READY:
+ /* format_lost is reset in release() in audiobasesink */
+ gst_element_post_message (element,
+ gst_message_new_clock_lost (GST_OBJECT_CAST (element),
+ GST_AUDIO_BASE_SINK (pulsesink)->provided_clock));
+ break;
+ case GST_STATE_CHANGE_READY_TO_NULL:
+ gst_pulsesink_release_mainloop (pulsesink);
+ break;
+ default:
+ break;
+ }
+
+ return ret;
+
+ /* ERRORS */
+ mainloop_failed:
+ {
+ g_mutex_unlock (&pa_shared_resource_mutex);
+ GST_ELEMENT_ERROR (pulsesink, RESOURCE, FAILED,
+ ("pa_threaded_mainloop_new() failed"), (NULL));
+ return GST_STATE_CHANGE_FAILURE;
+ }
+ mainloop_start_failed:
+ {
+ g_mutex_unlock (&pa_shared_resource_mutex);
+ GST_ELEMENT_ERROR (pulsesink, RESOURCE, FAILED,
+ ("pa_threaded_mainloop_start() failed"), (NULL));
+ return GST_STATE_CHANGE_FAILURE;
+ }
+ state_failure:
+ {
+ if (transition == GST_STATE_CHANGE_NULL_TO_READY) {
+ /* Clear the PA mainloop if audiobasesink failed to open the ring_buffer */
+ g_assert (mainloop);
+ gst_pulsesink_release_mainloop (pulsesink);
+ }
+ return ret;
+ }
+ }
--- /dev/null
+ /*-*- Mode: C; c-basic-offset: 2 -*-*/
+
+ /*
+ * GStreamer pulseaudio plugin
+ *
+ * Copyright (c) 2004-2008 Lennart Poettering
+ *
+ * gst-pulse is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU Lesser General Public License as
+ * published by the Free Software Foundation; either version 2.1 of the
+ * License, or (at your option) any later version.
+ *
+ * gst-pulse is distributed in the hope that it will be useful, but
+ * WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with gst-pulse; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301
+ * USA.
+ */
+
+ #ifndef __GST_PULSESINK_H__
+ #define __GST_PULSESINK_H__
+
+ #ifdef HAVE_CONFIG_H
+ #include "config.h"
+ #endif
+
++#if defined(__TIZEN__) && defined(PCM_DUMP_ENABLE)
++#include <stdio.h>
++#endif
++
+ #include <gst/gst.h>
+ #include <gst/audio/audio.h>
+ #include <gst/audio/gstaudiosink.h>
+
+ #include <pulse/pulseaudio.h>
+ #include <pulse/thread-mainloop.h>
+
+ #include "pulseutil.h"
+
+ G_BEGIN_DECLS
+
+ #define GST_TYPE_PULSESINK (gst_pulsesink_get_type())
+ G_DECLARE_FINAL_TYPE (GstPulseSink, gst_pulsesink, GST, PULSESINK,
+ GstAudioBaseSink)
+ #define GST_PULSESINK_CAST(obj) ((GstPulseSink *)(obj))
+
+ typedef struct _GstPulseDeviceInfo {
+ gchar *description;
+ GList *formats;
+ } GstPulseDeviceInfo;
+
+ struct _GstPulseSink
+ {
+ GstAudioBaseSink sink;
+
+ gchar *server, *device, *stream_name, *client_name;
+ GstPulseDeviceInfo device_info;
+
+ gdouble volume;
+ gboolean volume_set:1;
+ gboolean mute:1;
+ gboolean mute_set:1;
+ guint32 current_sink_idx;
+ gchar *current_sink_name;
+
+ guint defer_pending;
+
+ gint notify; /* atomic */
+
++#ifdef __TIZEN__
++ gchar *latency;
++ gboolean auto_render_delay:1;
++#ifdef PCM_DUMP_ENABLE
++ gint need_dump_input;
++ FILE *dump_fd_input;
++#endif
++#endif /* __TIZEN__ */
++
+ const gchar *pa_version;
+
+ GstStructure *properties;
+ pa_proplist *proplist;
+
+ gint format_lost;
+ GstClockTime format_lost_time;
+ };
+
+ #define PULSE_SINK_TEMPLATE_CAPS \
+ _PULSE_CAPS_PCM \
+ _PULSE_CAPS_AC3 \
+ _PULSE_CAPS_EAC3 \
+ _PULSE_CAPS_DTS \
+ _PULSE_CAPS_MP3 \
+ _PULSE_CAPS_AAC
+
+ G_END_DECLS
+
+ #endif /* __GST_PULSESINK_H__ */
--- /dev/null
-
+ /*
+ * GStreamer pulseaudio plugin
+ *
+ * Copyright (c) 2004-2008 Lennart Poettering
+ *
+ * gst-pulse is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU Lesser General Public License as
+ * published by the Free Software Foundation; either version 2.1 of the
+ * License, or (at your option) any later version.
+ *
+ * gst-pulse is distributed in the hope that it will be useful, but
+ * WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with gst-pulse; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301
+ * USA.
+ */
+
+ /**
+ * SECTION:element-pulsesrc
+ * @title: pulsesrc
+ * @see_also: pulsesink
+ *
+ * This element captures audio from a
+ * [PulseAudio sound server](http://www.pulseaudio.org).
+ *
+ * ## Example pipelines
+ * |[
+ * gst-launch-1.0 -v pulsesrc ! audioconvert ! vorbisenc ! oggmux ! filesink location=alsasrc.ogg
+ * ]| Record from a sound card using pulseaudio and encode to Ogg/Vorbis.
+ *
+ */
+
+ #ifdef HAVE_CONFIG_H
+ #include "config.h"
+ #endif
+
+ #include <string.h>
+ #include <stdio.h>
+
+ #include <gst/base/gstbasesrc.h>
+ #include <gst/gsttaglist.h>
+ #include <gst/audio/audio.h>
+
+ #include "gstpulseelements.h"
+ #include "pulsesrc.h"
+ #include "pulseutil.h"
+
++#if defined(__TIZEN__) && defined(PCM_DUMP_ENABLE)
++#include <vconf.h>
++#endif
++
+ GST_DEBUG_CATEGORY_EXTERN (pulse_debug);
+ #define GST_CAT_DEFAULT pulse_debug
+
+ #define DEFAULT_SERVER NULL
+ #define DEFAULT_DEVICE NULL
+ #define DEFAULT_CURRENT_DEVICE NULL
+ #define DEFAULT_DEVICE_NAME NULL
+
+ #define DEFAULT_VOLUME 1.0
+ #define DEFAULT_MUTE FALSE
+ #define MAX_VOLUME 10.0
++#ifdef __TIZEN__
++#define DEFAULT_AUDIO_LATENCY "mid"
++#endif /* __TIZEN__ */
++/* See the pulsesink code for notes on how we interact with the PA mainloop
++ * thread. */
+
+ /* See the pulsesink code for notes on how we interact with the PA mainloop
+ * thread. */
+
+ enum
+ {
+ PROP_0,
+ PROP_SERVER,
+ PROP_DEVICE,
+ PROP_DEVICE_NAME,
+ PROP_CURRENT_DEVICE,
+ PROP_CLIENT_NAME,
+ PROP_STREAM_PROPERTIES,
+ PROP_SOURCE_OUTPUT_INDEX,
+ PROP_VOLUME,
+ PROP_MUTE,
++#ifdef __TIZEN__
++ PROP_AUDIO_LATENCY,
++#endif /* __TIZEN__ */
+ PROP_LAST
+ };
+
++#if defined(__TIZEN__) && defined(PCM_DUMP_ENABLE)
++#define GST_PULSESRC_DUMP_VCONF_KEY "memory/private/sound/pcm_dump"
++#define GST_PULSESRC_DUMP_OUTPUT_PATH_PREFIX "/tmp/dump_pulsesrc_out"
++#define GST_PULSESRC_DUMP_OUTPUT_FLAG 0x00200000U
++#endif
++
+ static void gst_pulsesrc_destroy_stream (GstPulseSrc * pulsesrc);
+ static void gst_pulsesrc_destroy_context (GstPulseSrc * pulsesrc);
+
+ static void gst_pulsesrc_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec);
+ static void gst_pulsesrc_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec);
+ static void gst_pulsesrc_finalize (GObject * object);
+
+ static gboolean gst_pulsesrc_set_corked (GstPulseSrc * psrc, gboolean corked,
+ gboolean wait);
+ static gboolean gst_pulsesrc_open (GstAudioSrc * asrc);
+
+ static gboolean gst_pulsesrc_close (GstAudioSrc * asrc);
+
+ static gboolean gst_pulsesrc_prepare (GstAudioSrc * asrc,
+ GstAudioRingBufferSpec * spec);
+
+ static gboolean gst_pulsesrc_unprepare (GstAudioSrc * asrc);
+
+ static guint gst_pulsesrc_read (GstAudioSrc * asrc, gpointer data,
+ guint length, GstClockTime * timestamp);
+ static guint gst_pulsesrc_delay (GstAudioSrc * asrc);
+
+ static void gst_pulsesrc_reset (GstAudioSrc * src);
+
+ static gboolean gst_pulsesrc_negotiate (GstBaseSrc * basesrc);
+ static gboolean gst_pulsesrc_event (GstBaseSrc * basesrc, GstEvent * event);
+
+ static GstStateChangeReturn gst_pulsesrc_change_state (GstElement *
+ element, GstStateChange transition);
+
+ static GstClockTime gst_pulsesrc_get_time (GstClock * clock, GstPulseSrc * src);
+
+ #define gst_pulsesrc_parent_class parent_class
+ G_DEFINE_TYPE_WITH_CODE (GstPulseSrc, gst_pulsesrc, GST_TYPE_AUDIO_SRC,
+ G_IMPLEMENT_INTERFACE (GST_TYPE_STREAM_VOLUME, NULL));
+ GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (pulsesrc, "pulsesrc",
+ GST_RANK_PRIMARY + 10, GST_TYPE_PULSESRC, pulse_element_init (plugin));
+
+ static void
+ gst_pulsesrc_class_init (GstPulseSrcClass * klass)
+ {
+ GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
+ GstAudioSrcClass *gstaudiosrc_class = GST_AUDIO_SRC_CLASS (klass);
+ GstBaseSrcClass *gstbasesrc_class = GST_BASE_SRC_CLASS (klass);
+ GstElementClass *gstelement_class = GST_ELEMENT_CLASS (klass);
+ GstCaps *caps;
+ gchar *clientname;
+
+ gobject_class->finalize = gst_pulsesrc_finalize;
+ gobject_class->set_property = gst_pulsesrc_set_property;
+ gobject_class->get_property = gst_pulsesrc_get_property;
+
+ gstelement_class->change_state =
+ GST_DEBUG_FUNCPTR (gst_pulsesrc_change_state);
+
+ gstbasesrc_class->event = GST_DEBUG_FUNCPTR (gst_pulsesrc_event);
+ gstbasesrc_class->negotiate = GST_DEBUG_FUNCPTR (gst_pulsesrc_negotiate);
+
+ gstaudiosrc_class->open = GST_DEBUG_FUNCPTR (gst_pulsesrc_open);
+ gstaudiosrc_class->close = GST_DEBUG_FUNCPTR (gst_pulsesrc_close);
+ gstaudiosrc_class->prepare = GST_DEBUG_FUNCPTR (gst_pulsesrc_prepare);
+ gstaudiosrc_class->unprepare = GST_DEBUG_FUNCPTR (gst_pulsesrc_unprepare);
+ gstaudiosrc_class->read = GST_DEBUG_FUNCPTR (gst_pulsesrc_read);
+ gstaudiosrc_class->delay = GST_DEBUG_FUNCPTR (gst_pulsesrc_delay);
+ gstaudiosrc_class->reset = GST_DEBUG_FUNCPTR (gst_pulsesrc_reset);
+
+ /* Overwrite GObject fields */
+ g_object_class_install_property (gobject_class,
+ PROP_SERVER,
+ g_param_spec_string ("server", "Server",
+ "The PulseAudio server to connect to", DEFAULT_SERVER,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_DEVICE,
+ g_param_spec_string ("device", "Device",
+ "The PulseAudio source device to connect to", DEFAULT_DEVICE,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_CURRENT_DEVICE,
+ g_param_spec_string ("current-device", "Current Device",
+ "The current PulseAudio source device", DEFAULT_CURRENT_DEVICE,
+ G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class,
+ PROP_DEVICE_NAME,
+ g_param_spec_string ("device-name", "Device name",
+ "Human-readable name of the sound device", DEFAULT_DEVICE_NAME,
+ G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
+
+ clientname = gst_pulse_client_name ();
+ /**
+ * GstPulseSrc:client-name
+ *
+ * The PulseAudio client name to use.
+ */
+ g_object_class_install_property (gobject_class,
+ PROP_CLIENT_NAME,
+ g_param_spec_string ("client-name", "Client Name",
+ "The PulseAudio client_name_to_use", clientname,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
+ GST_PARAM_MUTABLE_READY));
+ g_free (clientname);
+
+ /**
+ * GstPulseSrc:stream-properties:
+ *
+ * List of pulseaudio stream properties. A list of defined properties can be
+ * found in the [pulseaudio api docs](http://0pointer.de/lennart/projects/pulseaudio/doxygen/proplist_8h.html).
+ *
+ * Below is an example for registering as a music application to pulseaudio.
+ * |[
+ * GstStructure *props;
+ *
+ * props = gst_structure_from_string ("props,media.role=music", NULL);
+ * g_object_set (pulse, "stream-properties", props, NULL);
+ * gst_structure_free (props);
+ * ]|
+ */
+ g_object_class_install_property (gobject_class,
+ PROP_STREAM_PROPERTIES,
+ g_param_spec_boxed ("stream-properties", "stream properties",
+ "list of pulseaudio stream properties",
+ GST_TYPE_STRUCTURE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ /**
+ * GstPulseSrc:source-output-index:
+ *
+ * The index of the PulseAudio source output corresponding to this element.
+ */
+ g_object_class_install_property (gobject_class,
+ PROP_SOURCE_OUTPUT_INDEX,
+ g_param_spec_uint ("source-output-index", "source output index",
+ "The index of the PulseAudio source output corresponding to this "
+ "record stream", 0, G_MAXUINT, PA_INVALID_INDEX,
+ G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
+
+ gst_element_class_set_static_metadata (gstelement_class,
+ "PulseAudio Audio Source",
+ "Source/Audio",
+ "Captures audio from a PulseAudio server", "Lennart Poettering");
+
+ caps = gst_pulse_fix_pcm_caps (gst_caps_from_string (_PULSE_CAPS_PCM));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_pad_template_new ("src", GST_PAD_SRC, GST_PAD_ALWAYS, caps));
+ gst_caps_unref (caps);
+
+ /**
+ * GstPulseSrc:volume:
+ *
+ * The volume of the record stream.
+ */
+ g_object_class_install_property (gobject_class,
+ PROP_VOLUME, g_param_spec_double ("volume", "Volume",
+ "Linear volume of this stream, 1.0=100%",
+ 0.0, MAX_VOLUME, DEFAULT_VOLUME,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ /**
+ * GstPulseSrc:mute:
+ *
+ * Whether the stream is muted or not.
+ */
+ g_object_class_install_property (gobject_class,
+ PROP_MUTE, g_param_spec_boolean ("mute", "Mute",
+ "Mute state of this stream",
+ DEFAULT_MUTE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
++
++#ifdef __TIZEN__
++ g_object_class_install_property (gobject_class,
++ PROP_AUDIO_LATENCY,
++ g_param_spec_string ("latency", "Audio Backend Latency",
++ "Audio Backend Latency (\"low\": Low Latency, \"mid\": Mid Latency, \"high\": High Latency)",
++ DEFAULT_AUDIO_LATENCY,
++ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
++#endif /* __TIZEN__ */
+ }
+
++#if defined(__TIZEN__) && defined(PCM_DUMP_ENABLE)
++static GstPadProbeReturn
++gst_pulsesrc_pad_dump_probe (GstPad *pad, GstPadProbeInfo * info, gpointer data)
++{
++ GstPulseSrc *pulsesrc = GST_PULSESRC_CAST (data);
++ size_t written = 0;
++ GstBuffer *buffer = GST_PAD_PROBE_INFO_BUFFER (info);
++ GstMapInfo in_map;
++ if (pulsesrc->dump_fd_output) {
++ gst_buffer_map(buffer, &in_map, GST_MAP_READ);
++ written = fwrite(in_map.data, 1, in_map.size, pulsesrc->dump_fd_output);
++ if (written != in_map.size)
++ GST_WARNING("failed to write!!! ferror=%d", ferror(pulsesrc->dump_fd_output));
++ gst_buffer_unmap(buffer, &in_map);
++ }
++ return GST_PAD_PROBE_OK;
++}
++#endif
++
+ static void
+ gst_pulsesrc_init (GstPulseSrc * pulsesrc)
+ {
++#if defined(__TIZEN__) && defined(PCM_DUMP_ENABLE)
++ GstPad *srcpad = NULL;
++ int vconf_dump = 0;
++#endif
+ pulsesrc->server = NULL;
+ pulsesrc->device = NULL;
+ pulsesrc->client_name = gst_pulse_client_name ();
+ pulsesrc->device_description = NULL;
+
+ pulsesrc->context = NULL;
+ pulsesrc->stream = NULL;
+ pulsesrc->stream_connected = FALSE;
+ pulsesrc->source_output_idx = PA_INVALID_INDEX;
+
+ pulsesrc->read_buffer = NULL;
+ pulsesrc->read_buffer_length = 0;
+
+ pa_sample_spec_init (&pulsesrc->sample_spec);
+
+ pulsesrc->operation_success = FALSE;
+ pulsesrc->paused = TRUE;
+ pulsesrc->in_read = FALSE;
+
+ pulsesrc->volume = DEFAULT_VOLUME;
+ pulsesrc->volume_set = FALSE;
+
+ pulsesrc->mute = DEFAULT_MUTE;
+ pulsesrc->mute_set = FALSE;
+
+ pulsesrc->notify = 0;
+
+ pulsesrc->properties = NULL;
+ pulsesrc->proplist = NULL;
-
++#ifdef __TIZEN__
++ pulsesrc->latency = g_strdup (DEFAULT_AUDIO_LATENCY);
++ pulsesrc->proplist = pa_proplist_new();
++ pa_proplist_sets(pulsesrc->proplist, PA_PROP_MEDIA_TIZEN_AUDIO_LATENCY, pulsesrc->latency);
++
++#ifdef PCM_DUMP_ENABLE
++ if (vconf_get_int(GST_PULSESRC_DUMP_VCONF_KEY, &vconf_dump)) {
++ GST_WARNING("vconf_get_int %s failed", GST_PULSESRC_DUMP_VCONF_KEY);
++ }
++ pulsesrc->need_dump_output = vconf_dump & GST_PULSESRC_DUMP_OUTPUT_FLAG ? TRUE : FALSE;
++ pulsesrc->dump_fd_output = NULL;
++ if (pulsesrc->need_dump_output) {
++ srcpad = gst_element_get_static_pad((GstElement *)pulsesrc, "src");
++ gst_pad_add_probe (srcpad, GST_PAD_PROBE_TYPE_BUFFER, gst_pulsesrc_pad_dump_probe, pulsesrc, NULL);
++ }
++#endif /* PCM_DUMP_ENABLE */
++#endif /* __TIZEN__ */
+ /* this should be the default but it isn't yet */
+ gst_audio_base_src_set_slave_method (GST_AUDIO_BASE_SRC (pulsesrc),
+ GST_AUDIO_BASE_SRC_SLAVE_SKEW);
+
+ /* override with a custom clock */
+ if (GST_AUDIO_BASE_SRC (pulsesrc)->clock)
+ gst_object_unref (GST_AUDIO_BASE_SRC (pulsesrc)->clock);
+
+ GST_AUDIO_BASE_SRC (pulsesrc)->clock =
+ gst_audio_clock_new ("GstPulseSrcClock",
+ (GstAudioClockGetTimeFunc) gst_pulsesrc_get_time, pulsesrc, NULL);
+ }
+
+ static void
+ gst_pulsesrc_destroy_stream (GstPulseSrc * pulsesrc)
+ {
+ if (pulsesrc->stream) {
+ pa_stream_disconnect (pulsesrc->stream);
+ pa_stream_unref (pulsesrc->stream);
+ pulsesrc->stream = NULL;
+ pulsesrc->stream_connected = FALSE;
+ pulsesrc->source_output_idx = PA_INVALID_INDEX;
+ g_object_notify (G_OBJECT (pulsesrc), "source-output-index");
+ }
+
+ g_free (pulsesrc->device_description);
+ pulsesrc->device_description = NULL;
+ }
+
+ static void
+ gst_pulsesrc_destroy_context (GstPulseSrc * pulsesrc)
+ {
+
+ gst_pulsesrc_destroy_stream (pulsesrc);
+
+ if (pulsesrc->context) {
+ pa_context_disconnect (pulsesrc->context);
+
+ /* Make sure we don't get any further callbacks */
+ pa_context_set_state_callback (pulsesrc->context, NULL, NULL);
+ pa_context_set_subscribe_callback (pulsesrc->context, NULL, NULL);
+
+ pa_context_unref (pulsesrc->context);
+
+ pulsesrc->context = NULL;
+ }
+ }
+
+ static void
+ gst_pulsesrc_finalize (GObject * object)
+ {
+ GstPulseSrc *pulsesrc = GST_PULSESRC_CAST (object);
+
+ g_free (pulsesrc->server);
+ g_free (pulsesrc->device);
+ g_free (pulsesrc->client_name);
+ g_free (pulsesrc->current_source_name);
+
+ if (pulsesrc->properties)
+ gst_structure_free (pulsesrc->properties);
+ if (pulsesrc->proplist)
+ pa_proplist_free (pulsesrc->proplist);
+
++#ifdef __TIZEN__
++ g_free (pulsesrc->latency);
++#endif /* __TIZEN__ */
++
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+ }
+
+ #define CONTEXT_OK(c) ((c) && PA_CONTEXT_IS_GOOD (pa_context_get_state ((c))))
+ #define STREAM_OK(s) ((s) && PA_STREAM_IS_GOOD (pa_stream_get_state ((s))))
+
+ static gboolean
+ gst_pulsesrc_is_dead (GstPulseSrc * pulsesrc, gboolean check_stream)
+ {
+ if (!pulsesrc->stream_connected)
+ return TRUE;
+
+ if (!CONTEXT_OK (pulsesrc->context))
+ goto error;
+
+ if (check_stream && !STREAM_OK (pulsesrc->stream))
+ goto error;
+
+ return FALSE;
+
+ error:
+ {
+ const gchar *err_str = pulsesrc->context ?
+ pa_strerror (pa_context_errno (pulsesrc->context)) : NULL;
+ GST_ELEMENT_ERROR ((pulsesrc), RESOURCE, FAILED, ("Disconnected: %s",
+ err_str), (NULL));
+ return TRUE;
+ }
+ }
+
+ static void
+ gst_pulsesrc_source_info_cb (pa_context * c, const pa_source_info * i, int eol,
+ void *userdata)
+ {
+ GstPulseSrc *pulsesrc = GST_PULSESRC_CAST (userdata);
+
+ if (!i)
+ goto done;
+
+ g_free (pulsesrc->device_description);
+ pulsesrc->device_description = g_strdup (i->description);
+
+ done:
+ pa_threaded_mainloop_signal (pulsesrc->mainloop, 0);
+ }
+
+ static gchar *
+ gst_pulsesrc_device_description (GstPulseSrc * pulsesrc)
+ {
+ pa_operation *o = NULL;
+ gchar *t;
+
+ if (!pulsesrc->mainloop)
+ goto no_mainloop;
+
+ pa_threaded_mainloop_lock (pulsesrc->mainloop);
+
+ if (!(o = pa_context_get_source_info_by_name (pulsesrc->context,
+ pulsesrc->device, gst_pulsesrc_source_info_cb, pulsesrc))) {
+
+ GST_ELEMENT_ERROR (pulsesrc, RESOURCE, FAILED,
+ ("pa_stream_get_source_info() failed: %s",
+ pa_strerror (pa_context_errno (pulsesrc->context))), (NULL));
+ goto unlock;
+ }
+
+ while (pa_operation_get_state (o) == PA_OPERATION_RUNNING) {
+
+ if (gst_pulsesrc_is_dead (pulsesrc, FALSE))
+ goto unlock;
+
+ pa_threaded_mainloop_wait (pulsesrc->mainloop);
+ }
+
+ unlock:
+
+ if (o)
+ pa_operation_unref (o);
+
+ t = g_strdup (pulsesrc->device_description);
+
+ pa_threaded_mainloop_unlock (pulsesrc->mainloop);
+
+ return t;
+
+ no_mainloop:
+ {
+ GST_DEBUG_OBJECT (pulsesrc, "have no mainloop");
+ return NULL;
+ }
+ }
+
+ static void
+ gst_pulsesrc_source_output_info_cb (pa_context * c,
+ const pa_source_output_info * i, int eol, void *userdata)
+ {
+ GstPulseSrc *psrc;
+
+ psrc = GST_PULSESRC_CAST (userdata);
+
+ if (!i)
+ goto done;
+
+ /* If the index doesn't match our current stream,
+ * it implies we just recreated the stream (caps change)
+ */
+ if (i->index == psrc->source_output_idx) {
+ psrc->volume = pa_sw_volume_to_linear (pa_cvolume_max (&i->volume));
+ psrc->mute = i->mute;
+ psrc->current_source_idx = i->source;
+
+ if (G_UNLIKELY (psrc->volume > MAX_VOLUME)) {
+ GST_WARNING_OBJECT (psrc, "Clipped volume from %f to %f",
+ psrc->volume, MAX_VOLUME);
+ psrc->volume = MAX_VOLUME;
+ }
+ }
+
+ done:
+ pa_threaded_mainloop_signal (psrc->mainloop, 0);
+ }
+
+ static void
+ gst_pulsesrc_get_source_output_info (GstPulseSrc * pulsesrc, gdouble * volume,
+ gboolean * mute)
+ {
+ pa_operation *o = NULL;
+
+ if (!pulsesrc->mainloop)
+ goto no_mainloop;
+
+ if (pulsesrc->source_output_idx == PA_INVALID_INDEX)
+ goto no_index;
+
+ pa_threaded_mainloop_lock (pulsesrc->mainloop);
+
+ if (!(o = pa_context_get_source_output_info (pulsesrc->context,
+ pulsesrc->source_output_idx, gst_pulsesrc_source_output_info_cb,
+ pulsesrc)))
+ goto info_failed;
+
+ while (pa_operation_get_state (o) == PA_OPERATION_RUNNING) {
+ pa_threaded_mainloop_wait (pulsesrc->mainloop);
+ if (gst_pulsesrc_is_dead (pulsesrc, TRUE))
+ goto unlock;
+ }
+
+ unlock:
+
+ if (volume)
+ *volume = pulsesrc->volume;
+ if (mute)
+ *mute = pulsesrc->mute;
+
+ if (o)
+ pa_operation_unref (o);
+
+ pa_threaded_mainloop_unlock (pulsesrc->mainloop);
+
+ return;
+
+ /* ERRORS */
+ no_mainloop:
+ {
+ GST_DEBUG_OBJECT (pulsesrc, "we have no mainloop");
+ if (volume)
+ *volume = pulsesrc->volume;
+ if (mute)
+ *mute = pulsesrc->mute;
+ return;
+ }
+ no_index:
+ {
+ GST_DEBUG_OBJECT (pulsesrc, "we don't have a stream index");
+ if (volume)
+ *volume = pulsesrc->volume;
+ if (mute)
+ *mute = pulsesrc->mute;
+ return;
+ }
+ info_failed:
+ {
+ GST_ELEMENT_ERROR (pulsesrc, RESOURCE, FAILED,
+ ("pa_context_get_source_output_info() failed: %s",
+ pa_strerror (pa_context_errno (pulsesrc->context))), (NULL));
+ goto unlock;
+ }
+ }
+
+ static void
+ gst_pulsesrc_current_source_info_cb (pa_context * c, const pa_source_info * i,
+ int eol, void *userdata)
+ {
+ GstPulseSrc *psrc;
+
+ psrc = GST_PULSESRC_CAST (userdata);
+
+ if (!i)
+ goto done;
+
+ /* If the index doesn't match our current stream,
+ * it implies we just recreated the stream (caps change)
+ */
+ if (i->index == psrc->current_source_idx) {
+ g_free (psrc->current_source_name);
+ psrc->current_source_name = g_strdup (i->name);
+ }
+
+ done:
+ pa_threaded_mainloop_signal (psrc->mainloop, 0);
+ }
+
+ static gchar *
+ gst_pulsesrc_get_current_device (GstPulseSrc * pulsesrc)
+ {
+ pa_operation *o = NULL;
+ gchar *current_src;
+
+ if (!pulsesrc->mainloop)
+ goto no_mainloop;
+
+ if (pulsesrc->source_output_idx == PA_INVALID_INDEX)
+ goto no_index;
+
+ gst_pulsesrc_get_source_output_info (pulsesrc, NULL, NULL);
+
+ pa_threaded_mainloop_lock (pulsesrc->mainloop);
+
+
+ if (!(o = pa_context_get_source_info_by_index (pulsesrc->context,
+ pulsesrc->current_source_idx, gst_pulsesrc_current_source_info_cb,
+ pulsesrc)))
+ goto info_failed;
+
+ while (pa_operation_get_state (o) == PA_OPERATION_RUNNING) {
+ pa_threaded_mainloop_wait (pulsesrc->mainloop);
+ if (gst_pulsesrc_is_dead (pulsesrc, TRUE))
+ goto unlock;
+ }
+
+ unlock:
+
+ current_src = g_strdup (pulsesrc->current_source_name);
+
+ if (o)
+ pa_operation_unref (o);
+
+ pa_threaded_mainloop_unlock (pulsesrc->mainloop);
+
+ return current_src;
+
+ /* ERRORS */
+ no_mainloop:
+ {
+ GST_DEBUG_OBJECT (pulsesrc, "we have no mainloop");
+ return NULL;
+ }
+ no_index:
+ {
+ GST_DEBUG_OBJECT (pulsesrc, "we don't have a stream index");
+ return NULL;
+ }
+ info_failed:
+ {
+ GST_ELEMENT_ERROR (pulsesrc, RESOURCE, FAILED,
+ ("pa_context_get_source_output_info() failed: %s",
+ pa_strerror (pa_context_errno (pulsesrc->context))), (NULL));
+ goto unlock;
+ }
+ }
+
+ static void
+ gst_pulsesrc_set_stream_volume (GstPulseSrc * pulsesrc, gdouble volume)
+ {
+ pa_cvolume v;
+ pa_operation *o = NULL;
+
+ if (!pulsesrc->mainloop)
+ goto no_mainloop;
+
+ if (pulsesrc->source_output_idx == PA_INVALID_INDEX)
+ goto no_index;
+
+ pa_threaded_mainloop_lock (pulsesrc->mainloop);
+
+ GST_DEBUG_OBJECT (pulsesrc, "setting volume to %f", volume);
+
+ gst_pulse_cvolume_from_linear (&v, pulsesrc->sample_spec.channels, volume);
+
+ if (!(o = pa_context_set_source_output_volume (pulsesrc->context,
+ pulsesrc->source_output_idx, &v, NULL, NULL)))
+ goto volume_failed;
+
+ /* We don't really care about the result of this call */
+ unlock:
+
+ if (o)
+ pa_operation_unref (o);
+
+ pa_threaded_mainloop_unlock (pulsesrc->mainloop);
+
+ return;
+
+ /* ERRORS */
+ no_mainloop:
+ {
+ pulsesrc->volume = volume;
+ pulsesrc->volume_set = TRUE;
+ GST_DEBUG_OBJECT (pulsesrc, "we have no mainloop");
+ return;
+ }
+ no_index:
+ {
+ pulsesrc->volume = volume;
+ pulsesrc->volume_set = TRUE;
+ GST_DEBUG_OBJECT (pulsesrc, "we don't have a stream index");
+ return;
+ }
+ volume_failed:
+ {
+ GST_ELEMENT_ERROR (pulsesrc, RESOURCE, FAILED,
+ ("pa_stream_set_source_output_volume() failed: %s",
+ pa_strerror (pa_context_errno (pulsesrc->context))), (NULL));
+ goto unlock;
+ }
+ }
+
+ static void
+ gst_pulsesrc_set_stream_mute (GstPulseSrc * pulsesrc, gboolean mute)
+ {
+ pa_operation *o = NULL;
+
+ if (!pulsesrc->mainloop)
+ goto no_mainloop;
+
+ if (pulsesrc->source_output_idx == PA_INVALID_INDEX)
+ goto no_index;
+
+ pa_threaded_mainloop_lock (pulsesrc->mainloop);
+
+ GST_DEBUG_OBJECT (pulsesrc, "setting mute state to %d", mute);
+
+ if (!(o = pa_context_set_source_output_mute (pulsesrc->context,
+ pulsesrc->source_output_idx, mute, NULL, NULL)))
+ goto mute_failed;
+
+ /* We don't really care about the result of this call */
+ unlock:
+
+ if (o)
+ pa_operation_unref (o);
+
+ pa_threaded_mainloop_unlock (pulsesrc->mainloop);
+
+ return;
+
+ /* ERRORS */
+ no_mainloop:
+ {
+ pulsesrc->mute = mute;
+ pulsesrc->mute_set = TRUE;
+ GST_DEBUG_OBJECT (pulsesrc, "we have no mainloop");
+ return;
+ }
+ no_index:
+ {
+ pulsesrc->mute = mute;
+ pulsesrc->mute_set = TRUE;
+ GST_DEBUG_OBJECT (pulsesrc, "we don't have a stream index");
+ return;
+ }
+ mute_failed:
+ {
+ GST_ELEMENT_ERROR (pulsesrc, RESOURCE, FAILED,
+ ("pa_stream_set_source_output_mute() failed: %s",
+ pa_strerror (pa_context_errno (pulsesrc->context))), (NULL));
+ goto unlock;
+ }
+ }
+
+ static void
+ gst_pulsesrc_set_stream_device (GstPulseSrc * pulsesrc, const gchar * device)
+ {
+ pa_operation *o = NULL;
+
+ if (!pulsesrc->mainloop)
+ goto no_mainloop;
+
+ if (pulsesrc->source_output_idx == PA_INVALID_INDEX)
+ goto no_index;
+
+ pa_threaded_mainloop_lock (pulsesrc->mainloop);
+
+ GST_DEBUG_OBJECT (pulsesrc, "setting stream device to %s", device);
+
+ if (!(o = pa_context_move_source_output_by_name (pulsesrc->context,
+ pulsesrc->source_output_idx, device, NULL, NULL)))
+ goto move_failed;
+
+ unlock:
+
+ if (o)
+ pa_operation_unref (o);
+
+ pa_threaded_mainloop_unlock (pulsesrc->mainloop);
+
+ return;
+
+ /* ERRORS */
+ no_mainloop:
+ {
+ GST_DEBUG_OBJECT (pulsesrc, "we have no mainloop");
+ return;
+ }
+ no_index:
+ {
+ GST_DEBUG_OBJECT (pulsesrc, "we don't have a stream index");
+ return;
+ }
+ move_failed:
+ {
+ GST_ELEMENT_ERROR (pulsesrc, RESOURCE, FAILED,
+ ("pa_context_move_source_output_by_name(%s) failed: %s",
+ device, pa_strerror (pa_context_errno (pulsesrc->context))),
+ (NULL));
+ goto unlock;
+ }
+ }
+
+ static void
+ gst_pulsesrc_set_property (GObject * object,
+ guint prop_id, const GValue * value, GParamSpec * pspec)
+ {
+
+ GstPulseSrc *pulsesrc = GST_PULSESRC_CAST (object);
+
+ switch (prop_id) {
+ case PROP_SERVER:
+ g_free (pulsesrc->server);
+ pulsesrc->server = g_value_dup_string (value);
+ break;
+ case PROP_DEVICE:
+ g_free (pulsesrc->device);
+ pulsesrc->device = g_value_dup_string (value);
+ gst_pulsesrc_set_stream_device (pulsesrc, pulsesrc->device);
+ break;
+ case PROP_CLIENT_NAME:
+ g_free (pulsesrc->client_name);
+ if (!g_value_get_string (value)) {
+ GST_WARNING_OBJECT (pulsesrc,
+ "Empty PulseAudio client name not allowed. Resetting to default value");
+ pulsesrc->client_name = gst_pulse_client_name ();
+ } else
+ pulsesrc->client_name = g_value_dup_string (value);
+ break;
+ case PROP_STREAM_PROPERTIES:
+ if (pulsesrc->properties)
+ gst_structure_free (pulsesrc->properties);
+ pulsesrc->properties =
+ gst_structure_copy (gst_value_get_structure (value));
+ if (pulsesrc->proplist)
+ pa_proplist_free (pulsesrc->proplist);
+ pulsesrc->proplist = gst_pulse_make_proplist (pulsesrc->properties);
+ break;
+ case PROP_VOLUME:
+ gst_pulsesrc_set_stream_volume (pulsesrc, g_value_get_double (value));
+ break;
+ case PROP_MUTE:
+ gst_pulsesrc_set_stream_mute (pulsesrc, g_value_get_boolean (value));
+ break;
++#ifdef __TIZEN__
++ case PROP_AUDIO_LATENCY:
++ g_free (pulsesrc->latency);
++ pulsesrc->latency = g_value_dup_string (value);
++ /* setting NULL restores the default latency */
++ if (pulsesrc->latency == NULL) {
++ pulsesrc->latency = g_strdup (DEFAULT_AUDIO_LATENCY);
++ }
++ if (!pulsesrc->proplist) {
++ pulsesrc->proplist = pa_proplist_new();
++ }
++ pa_proplist_sets(pulsesrc->proplist, PA_PROP_MEDIA_TIZEN_AUDIO_LATENCY, pulsesrc->latency);
++ GST_DEBUG_OBJECT(pulsesrc, "latency(%s)", pulsesrc->latency);
++ break;
++#endif /* __TIZEN__ */
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+ }
+
+ static void
+ gst_pulsesrc_get_property (GObject * object,
+ guint prop_id, GValue * value, GParamSpec * pspec)
+ {
+
+ GstPulseSrc *pulsesrc = GST_PULSESRC_CAST (object);
+
+ switch (prop_id) {
+ case PROP_SERVER:
+ g_value_set_string (value, pulsesrc->server);
+ break;
+ case PROP_DEVICE:
+ g_value_set_string (value, pulsesrc->device);
+ break;
+ case PROP_CURRENT_DEVICE:
+ {
+ gchar *current_device = gst_pulsesrc_get_current_device (pulsesrc);
+ if (current_device)
+ g_value_take_string (value, current_device);
+ else
+ g_value_set_string (value, "");
+ break;
+ }
+ case PROP_DEVICE_NAME:
+ g_value_take_string (value, gst_pulsesrc_device_description (pulsesrc));
+ break;
+ case PROP_CLIENT_NAME:
+ g_value_set_string (value, pulsesrc->client_name);
+ break;
+ case PROP_STREAM_PROPERTIES:
+ gst_value_set_structure (value, pulsesrc->properties);
+ break;
+ case PROP_SOURCE_OUTPUT_INDEX:
+ g_value_set_uint (value, pulsesrc->source_output_idx);
+ break;
+ case PROP_VOLUME:
+ {
+ gdouble volume;
+ gst_pulsesrc_get_source_output_info (pulsesrc, &volume, NULL);
+ g_value_set_double (value, volume);
+ break;
+ }
+ case PROP_MUTE:
+ {
+ gboolean mute;
+ gst_pulsesrc_get_source_output_info (pulsesrc, NULL, &mute);
+ g_value_set_boolean (value, mute);
+ break;
+ }
++#ifdef __TIZEN__
++ case PROP_AUDIO_LATENCY:
++ g_value_set_string (value, pulsesrc->latency);
++ break;
++#endif /* __TIZEN__ */
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+ }
+
+ static void
+ gst_pulsesrc_context_state_cb (pa_context * c, void *userdata)
+ {
+ GstPulseSrc *pulsesrc = GST_PULSESRC_CAST (userdata);
+
+ switch (pa_context_get_state (c)) {
+ case PA_CONTEXT_READY:
+ case PA_CONTEXT_TERMINATED:
+ case PA_CONTEXT_FAILED:
+ pa_threaded_mainloop_signal (pulsesrc->mainloop, 0);
+ break;
+
+ case PA_CONTEXT_UNCONNECTED:
+ case PA_CONTEXT_CONNECTING:
+ case PA_CONTEXT_AUTHORIZING:
+ case PA_CONTEXT_SETTING_NAME:
+ break;
+ }
+ }
+
+ static void
+ gst_pulsesrc_stream_state_cb (pa_stream * s, void *userdata)
+ {
+ GstPulseSrc *pulsesrc = GST_PULSESRC_CAST (userdata);
+
+ switch (pa_stream_get_state (s)) {
+
+ case PA_STREAM_READY:
+ case PA_STREAM_FAILED:
+ case PA_STREAM_TERMINATED:
+ pa_threaded_mainloop_signal (pulsesrc->mainloop, 0);
+ break;
+
+ case PA_STREAM_UNCONNECTED:
+ case PA_STREAM_CREATING:
+ break;
+ }
+ }
+
+ static void
+ gst_pulsesrc_stream_request_cb (pa_stream * s, size_t length, void *userdata)
+ {
+ GstPulseSrc *pulsesrc = GST_PULSESRC_CAST (userdata);
+
+ GST_LOG_OBJECT (pulsesrc, "got request for length %" G_GSIZE_FORMAT, length);
+
+ if (pulsesrc->in_read) {
+ /* only signal when reading */
+ pa_threaded_mainloop_signal (pulsesrc->mainloop, 0);
+ }
+ }
+
+ static void
+ gst_pulsesrc_stream_latency_update_cb (pa_stream * s, void *userdata)
+ {
+ const pa_timing_info *info;
+ pa_usec_t source_usec;
+
+ info = pa_stream_get_timing_info (s);
+
+ if (!info) {
+ GST_LOG_OBJECT (GST_PULSESRC_CAST (userdata),
+ "latency update (information unknown)");
+ return;
+ }
+ source_usec = info->configured_source_usec;
+
+ GST_LOG_OBJECT (GST_PULSESRC_CAST (userdata),
+ "latency_update, %" G_GUINT64_FORMAT ", %d:%" G_GINT64_FORMAT ", %d:%"
+ G_GUINT64_FORMAT ", %" G_GUINT64_FORMAT ", %" G_GUINT64_FORMAT,
+ GST_TIMEVAL_TO_TIME (info->timestamp), info->write_index_corrupt,
+ info->write_index, info->read_index_corrupt, info->read_index,
+ info->source_usec, source_usec);
+ }
+
+ static void
+ gst_pulsesrc_stream_underflow_cb (pa_stream * s, void *userdata)
+ {
+ GST_WARNING_OBJECT (GST_PULSESRC_CAST (userdata), "Got underflow");
+ }
+
+ static void
+ gst_pulsesrc_stream_overflow_cb (pa_stream * s, void *userdata)
+ {
+ GST_WARNING_OBJECT (GST_PULSESRC_CAST (userdata), "Got overflow");
+ }
+
+ static void
+ gst_pulsesrc_context_subscribe_cb (pa_context * c,
+ pa_subscription_event_type_t t, uint32_t idx, void *userdata)
+ {
+ GstPulseSrc *psrc = GST_PULSESRC (userdata);
+
+ if (t != (PA_SUBSCRIPTION_EVENT_SOURCE_OUTPUT | PA_SUBSCRIPTION_EVENT_CHANGE)
+ && t != (PA_SUBSCRIPTION_EVENT_SOURCE_OUTPUT | PA_SUBSCRIPTION_EVENT_NEW))
+ return;
+
+ if (idx != psrc->source_output_idx)
+ return;
+
+ /* Actually this event is also triggered when other properties of the stream
+ * change that are unrelated to the volume. However it is probably cheaper to
+ * signal the change here and check for the volume when the GObject property
+ * is read instead of querying it always. */
+
+ /* inform streaming thread to notify */
+ g_atomic_int_compare_and_exchange (&psrc->notify, 0, 1);
+ }
+
+ static gboolean
+ gst_pulsesrc_open (GstAudioSrc * asrc)
+ {
+ GstPulseSrc *pulsesrc = GST_PULSESRC_CAST (asrc);
+
+ pa_threaded_mainloop_lock (pulsesrc->mainloop);
+
+ g_assert (!pulsesrc->context);
+ g_assert (!pulsesrc->stream);
+
+ GST_DEBUG_OBJECT (pulsesrc, "opening device");
+
+ if (!(pulsesrc->context =
+ pa_context_new (pa_threaded_mainloop_get_api (pulsesrc->mainloop),
+ pulsesrc->client_name))) {
+ GST_ELEMENT_ERROR (pulsesrc, RESOURCE, FAILED, ("Failed to create context"),
+ (NULL));
+ goto unlock_and_fail;
+ }
+
+ pa_context_set_state_callback (pulsesrc->context,
+ gst_pulsesrc_context_state_cb, pulsesrc);
+ pa_context_set_subscribe_callback (pulsesrc->context,
+ gst_pulsesrc_context_subscribe_cb, pulsesrc);
+
+ GST_DEBUG_OBJECT (pulsesrc, "connect to server %s",
+ GST_STR_NULL (pulsesrc->server));
+
+ if (pa_context_connect (pulsesrc->context, pulsesrc->server, 0, NULL) < 0) {
+ GST_ELEMENT_ERROR (pulsesrc, RESOURCE, FAILED, ("Failed to connect: %s",
+ pa_strerror (pa_context_errno (pulsesrc->context))), (NULL));
+ goto unlock_and_fail;
+ }
+
+ for (;;) {
+ pa_context_state_t state;
+
+ state = pa_context_get_state (pulsesrc->context);
+
+ if (!PA_CONTEXT_IS_GOOD (state)) {
+ GST_ELEMENT_ERROR (pulsesrc, RESOURCE, FAILED, ("Failed to connect: %s",
+ pa_strerror (pa_context_errno (pulsesrc->context))), (NULL));
+ goto unlock_and_fail;
+ }
+
+ if (state == PA_CONTEXT_READY)
+ break;
+
+ /* Wait until the context is ready */
+ pa_threaded_mainloop_wait (pulsesrc->mainloop);
+ }
+ GST_DEBUG_OBJECT (pulsesrc, "connected");
+
+ pa_threaded_mainloop_unlock (pulsesrc->mainloop);
+
+ return TRUE;
+
+ /* ERRORS */
+ unlock_and_fail:
+ {
+ gst_pulsesrc_destroy_context (pulsesrc);
+
+ pa_threaded_mainloop_unlock (pulsesrc->mainloop);
+
+ return FALSE;
+ }
+ }
+
+ static gboolean
+ gst_pulsesrc_close (GstAudioSrc * asrc)
+ {
+ GstPulseSrc *pulsesrc = GST_PULSESRC_CAST (asrc);
+
+ pa_threaded_mainloop_lock (pulsesrc->mainloop);
+ gst_pulsesrc_destroy_context (pulsesrc);
+ pa_threaded_mainloop_unlock (pulsesrc->mainloop);
++#if defined(__TIZEN__) && defined(PCM_DUMP_ENABLE)
++ if (pulsesrc->dump_fd_output) {
++ fclose(pulsesrc->dump_fd_output);
++ pulsesrc->dump_fd_output = NULL;
++ }
++#endif
+ return TRUE;
+ }
+
+ static gboolean
+ gst_pulsesrc_unprepare (GstAudioSrc * asrc)
+ {
+ GstPulseSrc *pulsesrc = GST_PULSESRC_CAST (asrc);
+
+ pa_threaded_mainloop_lock (pulsesrc->mainloop);
+ gst_pulsesrc_destroy_stream (pulsesrc);
+
+ pa_threaded_mainloop_unlock (pulsesrc->mainloop);
+
+ pulsesrc->read_buffer = NULL;
+ pulsesrc->read_buffer_length = 0;
+
+ return TRUE;
+ }
+
+ static guint
+ gst_pulsesrc_read (GstAudioSrc * asrc, gpointer data, guint length,
+ GstClockTime * timestamp)
+ {
+ GstPulseSrc *pulsesrc = GST_PULSESRC_CAST (asrc);
+ size_t sum = 0;
+
+ if (g_atomic_int_compare_and_exchange (&pulsesrc->notify, 1, 0)) {
+ g_object_notify (G_OBJECT (pulsesrc), "volume");
+ g_object_notify (G_OBJECT (pulsesrc), "mute");
+ g_object_notify (G_OBJECT (pulsesrc), "current-device");
+ }
+
+ pa_threaded_mainloop_lock (pulsesrc->mainloop);
+ pulsesrc->in_read = TRUE;
+
+ if (!pulsesrc->stream_connected)
+ goto not_connected;
+
+ if (pulsesrc->paused)
+ goto was_paused;
+
+ while (length > 0) {
+ size_t l;
+
+ GST_LOG_OBJECT (pulsesrc, "reading %u bytes", length);
+
+ /*check if we have a leftover buffer */
+ if (!pulsesrc->read_buffer) {
+ for (;;) {
+ if (gst_pulsesrc_is_dead (pulsesrc, TRUE))
+ goto unlock_and_fail;
+
+ /* read all available data, we keep a pointer to the data and the length
+ * and take from it what we need. */
+ if (pa_stream_peek (pulsesrc->stream, &pulsesrc->read_buffer,
+ &pulsesrc->read_buffer_length) < 0)
+ goto peek_failed;
+
+ GST_LOG_OBJECT (pulsesrc, "have data of %" G_GSIZE_FORMAT " bytes",
+ pulsesrc->read_buffer_length);
+
+ /* if we have data, process if */
+ if (pulsesrc->read_buffer && pulsesrc->read_buffer_length)
+ break;
+
+ /* now wait for more data to become available */
+ GST_LOG_OBJECT (pulsesrc, "waiting for data");
+ pa_threaded_mainloop_wait (pulsesrc->mainloop);
+
+ if (pulsesrc->paused)
+ goto was_paused;
+ }
+ }
+
+ l = pulsesrc->read_buffer_length >
+ length ? length : pulsesrc->read_buffer_length;
+
+ memcpy (data, pulsesrc->read_buffer, l);
+
+ pulsesrc->read_buffer = (const guint8 *) pulsesrc->read_buffer + l;
+ pulsesrc->read_buffer_length -= l;
+
+ data = (guint8 *) data + l;
+ length -= l;
+ sum += l;
+
+ if (pulsesrc->read_buffer_length <= 0) {
+ /* we copied all of the data, drop it now */
+ if (pa_stream_drop (pulsesrc->stream) < 0)
+ goto drop_failed;
+
+ /* reset pointer to data */
+ pulsesrc->read_buffer = NULL;
+ pulsesrc->read_buffer_length = 0;
+ }
+ }
+
+ pulsesrc->in_read = FALSE;
+ pa_threaded_mainloop_unlock (pulsesrc->mainloop);
+
+ return sum;
+
+ /* ERRORS */
+ not_connected:
+ {
+ GST_LOG_OBJECT (pulsesrc, "we are not connected");
+ goto unlock_and_fail;
+ }
+ was_paused:
+ {
+ GST_LOG_OBJECT (pulsesrc, "we are paused");
+ goto unlock_and_fail;
+ }
+ peek_failed:
+ {
+ GST_ELEMENT_ERROR (pulsesrc, RESOURCE, FAILED,
+ ("pa_stream_peek() failed: %s",
+ pa_strerror (pa_context_errno (pulsesrc->context))), (NULL));
+ goto unlock_and_fail;
+ }
+ drop_failed:
+ {
+ GST_ELEMENT_ERROR (pulsesrc, RESOURCE, FAILED,
+ ("pa_stream_drop() failed: %s",
+ pa_strerror (pa_context_errno (pulsesrc->context))), (NULL));
+ goto unlock_and_fail;
+ }
+ unlock_and_fail:
+ {
+ pulsesrc->in_read = FALSE;
+ pa_threaded_mainloop_unlock (pulsesrc->mainloop);
+
+ return (guint) - 1;
+ }
+ }
+
+ /* return the delay in samples */
+ static guint
+ gst_pulsesrc_delay (GstAudioSrc * asrc)
+ {
+ GstPulseSrc *pulsesrc = GST_PULSESRC_CAST (asrc);
+ pa_usec_t t;
+ int negative, res;
+ guint result;
+
+ pa_threaded_mainloop_lock (pulsesrc->mainloop);
+ if (gst_pulsesrc_is_dead (pulsesrc, TRUE))
+ goto server_dead;
+
+ /* get the latency, this can fail when we don't have a latency update yet.
+ * We don't want to wait for latency updates here but we just return 0. */
+ res = pa_stream_get_latency (pulsesrc->stream, &t, &negative);
+
+ pa_threaded_mainloop_unlock (pulsesrc->mainloop);
+
+ if (res < 0) {
+ GST_DEBUG_OBJECT (pulsesrc, "could not get latency");
+ result = 0;
+ } else {
+ if (negative)
+ result = 0;
+ else
+ result = (guint) ((t * pulsesrc->sample_spec.rate) / 1000000LL);
+ }
+ return result;
+
+ /* ERRORS */
+ server_dead:
+ {
+ GST_DEBUG_OBJECT (pulsesrc, "the server is dead");
+ pa_threaded_mainloop_unlock (pulsesrc->mainloop);
+ return 0;
+ }
+ }
+
+ static gboolean
+ gst_pulsesrc_create_stream (GstPulseSrc * pulsesrc, GstCaps ** caps,
+ GstAudioRingBufferSpec * rspec)
+ {
+ pa_channel_map channel_map;
+ const pa_channel_map *m;
+ GstStructure *s;
+ gboolean need_channel_layout = FALSE;
+ GstAudioRingBufferSpec new_spec, *spec = NULL;
+ const gchar *name;
+ int i;
+
+ /* If we already have a stream (renegotiation), free it first */
+ if (pulsesrc->stream)
+ gst_pulsesrc_destroy_stream (pulsesrc);
+
+ if (rspec) {
+ /* Post-negotiation, we already have a ringbuffer spec, so we just need to
+ * use it to create a stream. */
+ spec = rspec;
+
+ /* At this point, we expect the channel-mask to be set in caps, so we just
+ * use that */
+ if (!gst_pulse_gst_to_channel_map (&channel_map, spec))
+ goto invalid_spec;
+
+ } else if (caps) {
+ /* At negotiation time, we get a fixed caps and use it to set up a stream */
+ s = gst_caps_get_structure (*caps, 0);
+ gst_structure_get_int (s, "channels", &new_spec.info.channels);
+ if (!gst_structure_has_field (s, "channel-mask")) {
+ if (new_spec.info.channels == 1) {
+ pa_channel_map_init_mono (&channel_map);
+ } else if (new_spec.info.channels == 2) {
+ pa_channel_map_init_stereo (&channel_map);
+ } else {
+ need_channel_layout = TRUE;
+ gst_structure_set (s, "channel-mask", GST_TYPE_BITMASK,
+ G_GUINT64_CONSTANT (0), NULL);
+ }
+ }
+
+ memset (&new_spec, 0, sizeof (GstAudioRingBufferSpec));
+ new_spec.latency_time = GST_SECOND;
+ if (!gst_audio_ring_buffer_parse_caps (&new_spec, *caps))
+ goto invalid_caps;
+
+ /* Keep the refcount of the caps at 1 to make them writable */
+ gst_caps_unref (new_spec.caps);
+
+ if (!need_channel_layout
+ && !gst_pulse_gst_to_channel_map (&channel_map, &new_spec)) {
+ need_channel_layout = TRUE;
+ gst_structure_set (s, "channel-mask", GST_TYPE_BITMASK,
+ G_GUINT64_CONSTANT (0), NULL);
+ for (i = 0; i < G_N_ELEMENTS (new_spec.info.position); i++)
+ new_spec.info.position[i] = GST_AUDIO_CHANNEL_POSITION_INVALID;
+ }
+
+ spec = &new_spec;
+ } else {
+ /* !rspec && !caps */
+ g_assert_not_reached ();
+ }
+
+ if (!gst_pulse_fill_sample_spec (spec, &pulsesrc->sample_spec))
+ goto invalid_spec;
+
+ pa_threaded_mainloop_lock (pulsesrc->mainloop);
+
+ if (!pulsesrc->context)
+ goto bad_context;
+
+ name = "Record Stream";
+ if (pulsesrc->proplist) {
+ if (!(pulsesrc->stream = pa_stream_new_with_proplist (pulsesrc->context,
+ name, &pulsesrc->sample_spec,
+ (need_channel_layout) ? NULL : &channel_map,
+ pulsesrc->proplist)))
+ goto create_failed;
+
+ } else if (!(pulsesrc->stream = pa_stream_new (pulsesrc->context,
+ name, &pulsesrc->sample_spec,
+ (need_channel_layout) ? NULL : &channel_map)))
+ goto create_failed;
+
+ if (caps) {
+ m = pa_stream_get_channel_map (pulsesrc->stream);
+ gst_pulse_channel_map_to_gst (m, &new_spec);
+ gst_audio_channel_positions_to_valid_order (new_spec.info.position,
+ new_spec.info.channels);
+ gst_caps_unref (*caps);
+ *caps = gst_audio_info_to_caps (&new_spec.info);
+
+ GST_DEBUG_OBJECT (pulsesrc, "Caps are %" GST_PTR_FORMAT, *caps);
+ }
+
+
+ pa_stream_set_state_callback (pulsesrc->stream, gst_pulsesrc_stream_state_cb,
+ pulsesrc);
+ pa_stream_set_read_callback (pulsesrc->stream, gst_pulsesrc_stream_request_cb,
+ pulsesrc);
+ pa_stream_set_underflow_callback (pulsesrc->stream,
+ gst_pulsesrc_stream_underflow_cb, pulsesrc);
+ pa_stream_set_overflow_callback (pulsesrc->stream,
+ gst_pulsesrc_stream_overflow_cb, pulsesrc);
+ pa_stream_set_latency_update_callback (pulsesrc->stream,
+ gst_pulsesrc_stream_latency_update_cb, pulsesrc);
+
+ pa_threaded_mainloop_unlock (pulsesrc->mainloop);
+
+ return TRUE;
+
+ /* ERRORS */
+ invalid_caps:
+ {
+ GST_ELEMENT_ERROR (pulsesrc, RESOURCE, SETTINGS,
+ ("Can't parse caps."), (NULL));
+ goto fail;
+ }
+ invalid_spec:
+ {
+ GST_ELEMENT_ERROR (pulsesrc, RESOURCE, SETTINGS,
+ ("Invalid sample specification."), (NULL));
+ goto fail;
+ }
+ bad_context:
+ {
+ GST_ELEMENT_ERROR (pulsesrc, RESOURCE, FAILED, ("Bad context"), (NULL));
+ goto unlock_and_fail;
+ }
+ create_failed:
+ {
+ GST_ELEMENT_ERROR (pulsesrc, RESOURCE, FAILED,
+ ("Failed to create stream: %s",
+ pa_strerror (pa_context_errno (pulsesrc->context))), (NULL));
+ goto unlock_and_fail;
+ }
+ unlock_and_fail:
+ {
+ gst_pulsesrc_destroy_stream (pulsesrc);
+
+ pa_threaded_mainloop_unlock (pulsesrc->mainloop);
+
+ fail:
+ return FALSE;
+ }
+ }
+
+ static gboolean
+ gst_pulsesrc_event (GstBaseSrc * basesrc, GstEvent * event)
+ {
+ GST_DEBUG_OBJECT (basesrc, "handle event %" GST_PTR_FORMAT, event);
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_RECONFIGURE:
+ gst_pad_check_reconfigure (GST_BASE_SRC_PAD (basesrc));
+ break;
+ default:
+ break;
+ }
+ return GST_BASE_SRC_CLASS (parent_class)->event (basesrc, event);
+ }
+
+ /* This is essentially gst_base_src_negotiate_default() but the caps
+ * are guaranteed to have a channel layout for > 2 channels
+ */
+ static gboolean
+ gst_pulsesrc_negotiate (GstBaseSrc * basesrc)
+ {
+ GstPulseSrc *pulsesrc = GST_PULSESRC_CAST (basesrc);
+ GstCaps *thiscaps;
+ GstCaps *caps = NULL;
+ GstCaps *peercaps = NULL;
+ gboolean result = FALSE;
+
+ /* first see what is possible on our source pad */
+ thiscaps = gst_pad_query_caps (GST_BASE_SRC_PAD (basesrc), NULL);
+ GST_DEBUG_OBJECT (basesrc, "caps of src: %" GST_PTR_FORMAT, thiscaps);
+ /* nothing or anything is allowed, we're done */
+ if (thiscaps == NULL || gst_caps_is_any (thiscaps))
+ goto no_nego_needed;
+
+ /* get the peer caps */
+ peercaps = gst_pad_peer_query_caps (GST_BASE_SRC_PAD (basesrc), NULL);
+ GST_DEBUG_OBJECT (basesrc, "caps of peer: %" GST_PTR_FORMAT, peercaps);
+ if (peercaps) {
+ /* get intersection */
+ caps = gst_caps_intersect (thiscaps, peercaps);
+ GST_DEBUG_OBJECT (basesrc, "intersect: %" GST_PTR_FORMAT, caps);
+ gst_caps_unref (thiscaps);
+ gst_caps_unref (peercaps);
+ } else {
+ /* no peer, work with our own caps then */
+ caps = thiscaps;
+ }
+ if (caps) {
+ /* take first (and best, since they are sorted) possibility */
+ caps = gst_caps_truncate (caps);
+
+ /* now fixate */
+ if (!gst_caps_is_empty (caps)) {
+ caps = GST_BASE_SRC_CLASS (parent_class)->fixate (basesrc, caps);
+ GST_DEBUG_OBJECT (basesrc, "fixated to: %" GST_PTR_FORMAT, caps);
+
+ if (gst_caps_is_any (caps)) {
+ /* hmm, still anything, so element can do anything and
+ * nego is not needed */
+ result = TRUE;
+ } else if (gst_caps_is_fixed (caps)) {
+ /* yay, fixed caps, use those then */
+ result = gst_pulsesrc_create_stream (pulsesrc, &caps, NULL);
+ if (result)
+ result = gst_base_src_set_caps (basesrc, caps);
+ }
+ }
+ gst_caps_unref (caps);
+ }
+ return result;
+
+ no_nego_needed:
+ {
+ GST_DEBUG_OBJECT (basesrc, "no negotiation needed");
+ if (thiscaps)
+ gst_caps_unref (thiscaps);
+ return TRUE;
+ }
+ }
+
+ static gboolean
+ gst_pulsesrc_prepare (GstAudioSrc * asrc, GstAudioRingBufferSpec * spec)
+ {
+ pa_buffer_attr wanted;
+ const pa_buffer_attr *actual;
+ GstPulseSrc *pulsesrc = GST_PULSESRC_CAST (asrc);
+ pa_stream_flags_t flags;
+ pa_operation *o;
+ GstAudioClock *clock;
+
+ pa_threaded_mainloop_lock (pulsesrc->mainloop);
+
+ if (!pulsesrc->stream)
+ gst_pulsesrc_create_stream (pulsesrc, NULL, spec);
+
+ {
+ GstAudioRingBufferSpec s = *spec;
+ const pa_channel_map *m;
+
+ m = pa_stream_get_channel_map (pulsesrc->stream);
+ gst_pulse_channel_map_to_gst (m, &s);
+ gst_audio_ring_buffer_set_channel_positions (GST_AUDIO_BASE_SRC
+ (pulsesrc)->ringbuffer, s.info.position);
+ }
+
+ /* enable event notifications */
+ GST_LOG_OBJECT (pulsesrc, "subscribing to context events");
+ if (!(o = pa_context_subscribe (pulsesrc->context,
+ PA_SUBSCRIPTION_MASK_SOURCE_OUTPUT, NULL, NULL))) {
+ GST_ELEMENT_ERROR (pulsesrc, RESOURCE, FAILED,
+ ("pa_context_subscribe() failed: %s",
+ pa_strerror (pa_context_errno (pulsesrc->context))), (NULL));
+ goto unlock_and_fail;
+ }
+
+ pa_operation_unref (o);
+
+ /* There's a bit of a disconnect here between the audio ringbuffer and what
+ * PulseAudio provides. The audio ringbuffer provide a total of buffer_time
+ * worth of buffering, divided into segments of latency_time size. We're
+ * asking PulseAudio to provide a total latency of latency_time, which, with
+ * PA_STREAM_ADJUST_LATENCY, effectively sets itself up as a ringbuffer with
+ * one segment being the hardware buffer, and the other the software buffer.
+ * This segment size is returned as the fragsize.
+ *
+ * Since the two concepts don't map very well, what we do is keep segsize as
+ * it is (unless fragsize is even larger, in which case we use that). We'll
+ * get data from PulseAudio in smaller chunks than we want to pass on as an
+ * element, and we coalesce those chunks in the ringbuffer memory and pass it
+ * on in the expected chunk size. */
+ wanted.maxlength = spec->segsize * spec->segtotal;
+ wanted.tlength = -1;
+ wanted.prebuf = 0;
+ wanted.minreq = -1;
+ wanted.fragsize = spec->segsize;
+
+ GST_INFO_OBJECT (pulsesrc, "maxlength: %d", wanted.maxlength);
+ GST_INFO_OBJECT (pulsesrc, "tlength: %d", wanted.tlength);
+ GST_INFO_OBJECT (pulsesrc, "prebuf: %d", wanted.prebuf);
+ GST_INFO_OBJECT (pulsesrc, "minreq: %d", wanted.minreq);
+ GST_INFO_OBJECT (pulsesrc, "fragsize: %d", wanted.fragsize);
+
+ flags = PA_STREAM_INTERPOLATE_TIMING | PA_STREAM_AUTO_TIMING_UPDATE |
+ PA_STREAM_NOT_MONOTONIC | PA_STREAM_ADJUST_LATENCY |
+ PA_STREAM_START_CORKED;
+
+ if (pa_stream_connect_record (pulsesrc->stream, pulsesrc->device, &wanted,
+ flags) < 0) {
+ goto connect_failed;
+ }
+
+ /* our clock will now start from 0 again */
+ clock = GST_AUDIO_CLOCK (GST_AUDIO_BASE_SRC (pulsesrc)->clock);
+ gst_audio_clock_reset (clock, 0);
+
+ pulsesrc->corked = TRUE;
+
+ for (;;) {
+ pa_stream_state_t state;
+
+ state = pa_stream_get_state (pulsesrc->stream);
+
+ if (!PA_STREAM_IS_GOOD (state))
+ goto stream_is_bad;
+
+ if (state == PA_STREAM_READY)
+ break;
+
+ /* Wait until the stream is ready */
+ pa_threaded_mainloop_wait (pulsesrc->mainloop);
+ }
+ pulsesrc->stream_connected = TRUE;
+
+ /* store the source output index so it can be accessed via a property */
+ pulsesrc->source_output_idx = pa_stream_get_index (pulsesrc->stream);
+ g_object_notify (G_OBJECT (pulsesrc), "source-output-index");
+
+ /* Although source output stream muting is supported, there is a bug in
+ * PulseAudio that doesn't allow us to do this at startup, so we mute
+ * manually post-connect. This should be moved back pre-connect once things
+ * are fixed on the PulseAudio side. */
+ if (pulsesrc->mute_set && pulsesrc->mute) {
+ gst_pulsesrc_set_stream_mute (pulsesrc, pulsesrc->mute);
+ pulsesrc->mute_set = FALSE;
+ }
+
+ if (pulsesrc->volume_set) {
+ gst_pulsesrc_set_stream_volume (pulsesrc, pulsesrc->volume);
+ pulsesrc->volume_set = FALSE;
+ }
++#if defined(__TIZEN__) && defined(PCM_DUMP_ENABLE)
++ if (pulsesrc->need_dump_output) {
++ char *suffix , *dump_path;
++ GDateTime *time = NULL;
++ if (pulsesrc->dump_fd_output) {
++ fclose(pulsesrc->dump_fd_output);
++ pulsesrc->dump_fd_output = NULL;
++ }
++ time = g_date_time_new_now_local();
++ suffix = g_date_time_format(time, "%m%d_%H%M%S");
++ dump_path = g_strdup_printf("%s_%dch_%dhz_%s.pcm", GST_PULSESRC_DUMP_OUTPUT_PATH_PREFIX, pulsesrc->sample_spec.channels, pulsesrc->sample_spec.rate, suffix);
++ GST_WARNING_OBJECT(asrc,"pulse-source dumping enabled: dump path [%s]", dump_path);
++ pulsesrc->dump_fd_output = fopen(dump_path, "w+");
++
++ g_free(suffix);
++ g_free(dump_path);
++ g_date_time_unref(time);
++ }
++#endif
+
+ /* get the actual buffering properties now */
+ actual = pa_stream_get_buffer_attr (pulsesrc->stream);
+
+ GST_INFO_OBJECT (pulsesrc, "maxlength: %d", actual->maxlength);
+ GST_INFO_OBJECT (pulsesrc, "tlength: %d (wanted: %d)",
+ actual->tlength, wanted.tlength);
+ GST_INFO_OBJECT (pulsesrc, "prebuf: %d", actual->prebuf);
+ GST_INFO_OBJECT (pulsesrc, "minreq: %d (wanted %d)", actual->minreq,
+ wanted.minreq);
+ GST_INFO_OBJECT (pulsesrc, "fragsize: %d (wanted %d)",
+ actual->fragsize, wanted.fragsize);
+
+ if (actual->fragsize >= spec->segsize) {
+ spec->segsize = actual->fragsize;
+ } else {
+ /* fragsize is smaller than what we wanted, so let the read function
+ * coalesce the smaller chunks as they come in */
+ }
+
+ /* Fix up the total ringbuffer size based on what we actually got */
+ spec->segtotal = actual->maxlength / spec->segsize;
+ /* Don't buffer less than 2 segments as the ringbuffer can't deal with it */
+ if (spec->segtotal < 2)
+ spec->segtotal = 2;
+
+ if (!pulsesrc->paused) {
+ GST_DEBUG_OBJECT (pulsesrc, "uncorking because we are playing");
+ gst_pulsesrc_set_corked (pulsesrc, FALSE, FALSE);
+ }
+ pa_threaded_mainloop_unlock (pulsesrc->mainloop);
+
+ return TRUE;
+
+ /* ERRORS */
+ connect_failed:
+ {
+ GST_ELEMENT_ERROR (pulsesrc, RESOURCE, FAILED,
+ ("Failed to connect stream: %s",
+ pa_strerror (pa_context_errno (pulsesrc->context))), (NULL));
+ goto unlock_and_fail;
+ }
+ stream_is_bad:
+ {
+ GST_ELEMENT_ERROR (pulsesrc, RESOURCE, FAILED,
+ ("Failed to connect stream: %s",
+ pa_strerror (pa_context_errno (pulsesrc->context))), (NULL));
+ goto unlock_and_fail;
+ }
+ unlock_and_fail:
+ {
+ gst_pulsesrc_destroy_stream (pulsesrc);
+
+ pa_threaded_mainloop_unlock (pulsesrc->mainloop);
+ return FALSE;
+ }
+ }
+
+ static void
+ gst_pulsesrc_success_cb (pa_stream * s, int success, void *userdata)
+ {
+ GstPulseSrc *pulsesrc = GST_PULSESRC_CAST (userdata);
+
+ pulsesrc->operation_success = ! !success;
+ pa_threaded_mainloop_signal (pulsesrc->mainloop, 0);
+ }
+
+ static void
+ gst_pulsesrc_reset (GstAudioSrc * asrc)
+ {
+ GstPulseSrc *pulsesrc = GST_PULSESRC_CAST (asrc);
+ pa_operation *o = NULL;
+
+ pa_threaded_mainloop_lock (pulsesrc->mainloop);
+ GST_DEBUG_OBJECT (pulsesrc, "reset");
+
+ if (gst_pulsesrc_is_dead (pulsesrc, TRUE))
+ goto unlock_and_fail;
+
+ if (!(o =
+ pa_stream_flush (pulsesrc->stream, gst_pulsesrc_success_cb,
+ pulsesrc))) {
+ GST_ELEMENT_ERROR (pulsesrc, RESOURCE, FAILED,
+ ("pa_stream_flush() failed: %s",
+ pa_strerror (pa_context_errno (pulsesrc->context))), (NULL));
+ goto unlock_and_fail;
+ }
+
+ pulsesrc->paused = TRUE;
+ /* Inform anyone waiting in _write() call that it shall wakeup */
+ if (pulsesrc->in_read) {
+ pa_threaded_mainloop_signal (pulsesrc->mainloop, 0);
+ }
+
+ pulsesrc->operation_success = FALSE;
+ while (pa_operation_get_state (o) == PA_OPERATION_RUNNING) {
+
+ if (gst_pulsesrc_is_dead (pulsesrc, TRUE))
+ goto unlock_and_fail;
+
+ pa_threaded_mainloop_wait (pulsesrc->mainloop);
+ }
+
+ if (!pulsesrc->operation_success) {
+ GST_ELEMENT_ERROR (pulsesrc, RESOURCE, FAILED, ("Flush failed: %s",
+ pa_strerror (pa_context_errno (pulsesrc->context))), (NULL));
+ goto unlock_and_fail;
+ }
+
+ unlock_and_fail:
+
+ if (o) {
+ pa_operation_cancel (o);
+ pa_operation_unref (o);
+ }
+
+ pa_threaded_mainloop_unlock (pulsesrc->mainloop);
+ }
+
+ /* update the corked state of a stream, must be called with the mainloop
+ * lock */
+ static gboolean
+ gst_pulsesrc_set_corked (GstPulseSrc * psrc, gboolean corked, gboolean wait)
+ {
+ pa_operation *o = NULL;
+ gboolean res = FALSE;
+
+ GST_DEBUG_OBJECT (psrc, "setting corked state to %d", corked);
+ if (!psrc->stream_connected)
+ return TRUE;
+
+ if (psrc->corked != corked) {
+ if (!(o = pa_stream_cork (psrc->stream, corked,
+ gst_pulsesrc_success_cb, psrc)))
+ goto cork_failed;
+
+ while (wait && pa_operation_get_state (o) == PA_OPERATION_RUNNING) {
+ pa_threaded_mainloop_wait (psrc->mainloop);
+ if (gst_pulsesrc_is_dead (psrc, TRUE))
+ goto server_dead;
+ }
+ psrc->corked = corked;
+ } else {
+ GST_DEBUG_OBJECT (psrc, "skipping, already in requested state");
+ }
+ res = TRUE;
+
+ cleanup:
+ if (o)
+ pa_operation_unref (o);
+
+ return res;
+
+ /* ERRORS */
+ server_dead:
+ {
+ GST_DEBUG_OBJECT (psrc, "the server is dead");
+ goto cleanup;
+ }
+ cork_failed:
+ {
+ GST_ELEMENT_ERROR (psrc, RESOURCE, FAILED,
+ ("pa_stream_cork() failed: %s",
+ pa_strerror (pa_context_errno (psrc->context))), (NULL));
+ goto cleanup;
+ }
+ }
+
+ /* start/resume playback ASAP */
+ static gboolean
+ gst_pulsesrc_play (GstPulseSrc * psrc)
+ {
+ pa_threaded_mainloop_lock (psrc->mainloop);
+ GST_DEBUG_OBJECT (psrc, "playing");
+ psrc->paused = FALSE;
+ gst_pulsesrc_set_corked (psrc, FALSE, FALSE);
+ pa_threaded_mainloop_unlock (psrc->mainloop);
+
+ return TRUE;
+ }
+
+ /* pause/stop playback ASAP */
+ static gboolean
+ gst_pulsesrc_pause (GstPulseSrc * psrc)
+ {
+ pa_threaded_mainloop_lock (psrc->mainloop);
+ GST_DEBUG_OBJECT (psrc, "pausing");
+ /* make sure the commit method stops writing */
+ psrc->paused = TRUE;
+ if (psrc->in_read) {
+ /* we are waiting in a read, signal */
+ GST_DEBUG_OBJECT (psrc, "signal read");
+ pa_threaded_mainloop_signal (psrc->mainloop, 0);
+ }
+ pa_threaded_mainloop_unlock (psrc->mainloop);
+
+ return TRUE;
+ }
+
+ static GstStateChangeReturn
+ gst_pulsesrc_change_state (GstElement * element, GstStateChange transition)
+ {
+ GstStateChangeReturn ret;
+ GstPulseSrc *this = GST_PULSESRC_CAST (element);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_NULL_TO_READY:
+ if (!(this->mainloop = pa_threaded_mainloop_new ()))
+ goto mainloop_failed;
+ if (pa_threaded_mainloop_start (this->mainloop) < 0) {
+ pa_threaded_mainloop_free (this->mainloop);
+ this->mainloop = NULL;
+ goto mainloop_start_failed;
+ }
+ break;
+ case GST_STATE_CHANGE_READY_TO_PAUSED:
+ gst_element_post_message (element,
+ gst_message_new_clock_provide (GST_OBJECT_CAST (element),
+ GST_AUDIO_BASE_SRC (this)->clock, TRUE));
+ break;
+ case GST_STATE_CHANGE_PAUSED_TO_PLAYING:
+ /* uncork and start recording */
+ gst_pulsesrc_play (this);
+ break;
+ case GST_STATE_CHANGE_PLAYING_TO_PAUSED:
+ /* stop recording ASAP by corking */
+ pa_threaded_mainloop_lock (this->mainloop);
+ GST_DEBUG_OBJECT (this, "corking");
+ gst_pulsesrc_set_corked (this, TRUE, FALSE);
+ pa_threaded_mainloop_unlock (this->mainloop);
+ break;
+ default:
+ break;
+ }
+
+ ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_PLAYING_TO_PAUSED:
+ /* now make sure we get out of the _read method */
+ gst_pulsesrc_pause (this);
+ break;
+ case GST_STATE_CHANGE_READY_TO_NULL:
+ if (this->mainloop)
+ pa_threaded_mainloop_stop (this->mainloop);
+
+ gst_pulsesrc_destroy_context (this);
+
+ if (this->mainloop) {
+ pa_threaded_mainloop_free (this->mainloop);
+ this->mainloop = NULL;
+ }
+ break;
+ case GST_STATE_CHANGE_PAUSED_TO_READY:
+ /* format_lost is reset in release() in baseaudiosink */
+ gst_element_post_message (element,
+ gst_message_new_clock_lost (GST_OBJECT_CAST (element),
+ GST_AUDIO_BASE_SRC (this)->clock));
+ break;
+ default:
+ break;
+ }
+
+ return ret;
+
+ /* ERRORS */
+ mainloop_failed:
+ {
+ GST_ELEMENT_ERROR (this, RESOURCE, FAILED,
+ ("pa_threaded_mainloop_new() failed"), (NULL));
+ return GST_STATE_CHANGE_FAILURE;
+ }
+ mainloop_start_failed:
+ {
+ GST_ELEMENT_ERROR (this, RESOURCE, FAILED,
+ ("pa_threaded_mainloop_start() failed"), (NULL));
+ return GST_STATE_CHANGE_FAILURE;
+ }
+ }
+
+ static GstClockTime
+ gst_pulsesrc_get_time (GstClock * clock, GstPulseSrc * src)
+ {
+ pa_usec_t time = 0;
+
+ if (src->mainloop == NULL)
+ goto out;
+
+ pa_threaded_mainloop_lock (src->mainloop);
+ if (!src->stream)
+ goto unlock_and_out;
+
+ if (gst_pulsesrc_is_dead (src, TRUE))
+ goto unlock_and_out;
+
+ if (pa_stream_get_time (src->stream, &time) < 0) {
+ GST_DEBUG_OBJECT (src, "could not get time");
+ time = GST_CLOCK_TIME_NONE;
+ } else {
+ time *= 1000;
+ }
+
+
+ unlock_and_out:
+ pa_threaded_mainloop_unlock (src->mainloop);
+
+ out:
+ return time;
+ }
--- /dev/null
+ /*-*- Mode: C; c-basic-offset: 2 -*-*/
+
+ /*
+ * GStreamer pulseaudio plugin
+ *
+ * Copyright (c) 2004-2008 Lennart Poettering
+ *
+ * gst-pulse is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU Lesser General Public License as
+ * published by the Free Software Foundation; either version 2.1 of the
+ * License, or (at your option) any later version.
+ *
+ * gst-pulse is distributed in the hope that it will be useful, but
+ * WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with gst-pulse; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301
+ * USA.
+ */
+
+ #ifndef __GST_PULSESRC_H__
+ #define __GST_PULSESRC_H__
+
+ #include <gst/gst.h>
+ #include <gst/audio/gstaudiosrc.h>
+
+ #include <pulse/pulseaudio.h>
+ #include <pulse/thread-mainloop.h>
+
++#if defined(__TIZEN__) && defined(PCM_DUMP_ENABLE)
++#include <stdio.h>
++#endif
++
+ G_BEGIN_DECLS
+
+ #define GST_TYPE_PULSESRC (gst_pulsesrc_get_type())
+ G_DECLARE_FINAL_TYPE (GstPulseSrc, gst_pulsesrc, GST, PULSESRC, GstAudioSrc)
+ #define GST_PULSESRC_CAST(obj) ((GstPulseSrc *)(obj))
+
+ struct _GstPulseSrc
+ {
+ GstAudioSrc src;
+
+ gchar *server, *device, *client_name;
+
+ pa_threaded_mainloop *mainloop;
+
+ pa_context *context;
+ pa_stream *stream;
+ guint32 source_output_idx;
+
+ pa_sample_spec sample_spec;
+
+ const void *read_buffer;
+ size_t read_buffer_length;
+
+ gchar *device_description;
+
+ gdouble volume;
+ gboolean volume_set:1;
+ gboolean mute:1;
+ gboolean mute_set:1;
+ guint32 current_source_idx;
+ gchar *current_source_name;
+
+ gint notify; /* atomic */
+
+ gboolean corked:1;
+ gboolean stream_connected:1;
+ gboolean operation_success:1;
+ gboolean paused:1;
+ gboolean in_read:1;
+
++#ifdef __TIZEN__
++ gchar *latency;
++#endif /* __TIZEN__ */
++
+ GstStructure *properties;
+ pa_proplist *proplist;
++
++#if defined(__TIZEN__) && defined(PCM_DUMP_ENABLE)
++ gint need_dump_output;
++ FILE *dump_fd_output;
++#endif
+ };
+
+ G_END_DECLS
+
+ #endif /* __GST_PULSESRC_H__ */
--- /dev/null
+ /*
+ * GStreamer pulseaudio plugin
+ *
+ * Copyright (c) 2004-2008 Lennart Poettering
+ *
+ * gst-pulse is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU Lesser General Public License as
+ * published by the Free Software Foundation; either version 2.1 of the
+ * License, or (at your option) any later version.
+ *
+ * gst-pulse is distributed in the hope that it will be useful, but
+ * WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with gst-pulse; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301
+ * USA.
+ */
+
+ #ifdef HAVE_CONFIG_H
+ #include "config.h"
+ #endif
+
+ #include <gst/audio/audio.h>
+
+ #include "pulseutil.h"
+
+ #ifdef HAVE_UNISTD_H
+ # include <unistd.h> /* getpid on UNIX */
+ #endif
+ #ifdef HAVE_PROCESS_H
+ # include <process.h> /* getpid on win32 */
+ #endif
+
+ static const struct
+ {
+ GstAudioChannelPosition gst_pos;
+ pa_channel_position_t pa_pos;
+ } gst_pa_pos_table[] = {
+ {
+ GST_AUDIO_CHANNEL_POSITION_MONO, PA_CHANNEL_POSITION_MONO}, {
+ GST_AUDIO_CHANNEL_POSITION_FRONT_LEFT, PA_CHANNEL_POSITION_FRONT_LEFT}, {
+ GST_AUDIO_CHANNEL_POSITION_FRONT_RIGHT, PA_CHANNEL_POSITION_FRONT_RIGHT}, {
+ GST_AUDIO_CHANNEL_POSITION_REAR_CENTER, PA_CHANNEL_POSITION_REAR_CENTER}, {
+ GST_AUDIO_CHANNEL_POSITION_REAR_LEFT, PA_CHANNEL_POSITION_REAR_LEFT}, {
+ GST_AUDIO_CHANNEL_POSITION_REAR_RIGHT, PA_CHANNEL_POSITION_REAR_RIGHT}, {
+ GST_AUDIO_CHANNEL_POSITION_LFE1, PA_CHANNEL_POSITION_LFE}, {
+ GST_AUDIO_CHANNEL_POSITION_FRONT_CENTER, PA_CHANNEL_POSITION_FRONT_CENTER}, {
+ GST_AUDIO_CHANNEL_POSITION_FRONT_LEFT_OF_CENTER,
+ PA_CHANNEL_POSITION_FRONT_LEFT_OF_CENTER}, {
+ GST_AUDIO_CHANNEL_POSITION_FRONT_RIGHT_OF_CENTER,
+ PA_CHANNEL_POSITION_FRONT_RIGHT_OF_CENTER}, {
+ GST_AUDIO_CHANNEL_POSITION_SIDE_LEFT, PA_CHANNEL_POSITION_SIDE_LEFT}, {
+ GST_AUDIO_CHANNEL_POSITION_SIDE_RIGHT, PA_CHANNEL_POSITION_SIDE_RIGHT}, {
+ GST_AUDIO_CHANNEL_POSITION_TOP_CENTER, PA_CHANNEL_POSITION_TOP_CENTER}, {
+ GST_AUDIO_CHANNEL_POSITION_TOP_FRONT_LEFT,
+ PA_CHANNEL_POSITION_TOP_FRONT_LEFT}, {
+ GST_AUDIO_CHANNEL_POSITION_TOP_FRONT_RIGHT,
+ PA_CHANNEL_POSITION_TOP_FRONT_RIGHT}, {
+ GST_AUDIO_CHANNEL_POSITION_TOP_FRONT_CENTER,
+ PA_CHANNEL_POSITION_TOP_FRONT_CENTER}, {
+ GST_AUDIO_CHANNEL_POSITION_TOP_REAR_LEFT, PA_CHANNEL_POSITION_TOP_REAR_LEFT}, {
+ GST_AUDIO_CHANNEL_POSITION_TOP_REAR_RIGHT,
+ PA_CHANNEL_POSITION_TOP_REAR_RIGHT}, {
+ GST_AUDIO_CHANNEL_POSITION_TOP_REAR_CENTER,
+ PA_CHANNEL_POSITION_TOP_REAR_CENTER}, {
+ GST_AUDIO_CHANNEL_POSITION_NONE, PA_CHANNEL_POSITION_INVALID}
+ };
+
+ static gboolean
+ gstaudioformat_to_pasampleformat (GstAudioFormat format,
+ pa_sample_format_t * sf)
+ {
+ switch (format) {
+ case GST_AUDIO_FORMAT_U8:
+ *sf = PA_SAMPLE_U8;
+ break;
+ case GST_AUDIO_FORMAT_S16LE:
+ *sf = PA_SAMPLE_S16LE;
+ break;
+ case GST_AUDIO_FORMAT_S16BE:
+ *sf = PA_SAMPLE_S16BE;
+ break;
+ case GST_AUDIO_FORMAT_F32LE:
+ *sf = PA_SAMPLE_FLOAT32LE;
+ break;
+ case GST_AUDIO_FORMAT_F32BE:
+ *sf = PA_SAMPLE_FLOAT32BE;
+ break;
+ case GST_AUDIO_FORMAT_S32LE:
+ *sf = PA_SAMPLE_S32LE;
+ break;
+ case GST_AUDIO_FORMAT_S32BE:
+ *sf = PA_SAMPLE_S32BE;
+ break;
+ case GST_AUDIO_FORMAT_S24LE:
+ *sf = PA_SAMPLE_S24LE;
+ break;
+ case GST_AUDIO_FORMAT_S24BE:
+ *sf = PA_SAMPLE_S24BE;
+ break;
+ case GST_AUDIO_FORMAT_S24_32LE:
+ *sf = PA_SAMPLE_S24_32LE;
+ break;
+ case GST_AUDIO_FORMAT_S24_32BE:
+ *sf = PA_SAMPLE_S24_32BE;
+ break;
+ default:
+ return FALSE;
+ }
+ return TRUE;
+ }
+
+ gboolean
+ gst_pulse_fill_sample_spec (GstAudioRingBufferSpec * spec, pa_sample_spec * ss)
+ {
+ if (spec->type == GST_AUDIO_RING_BUFFER_FORMAT_TYPE_RAW) {
+ if (!gstaudioformat_to_pasampleformat (GST_AUDIO_INFO_FORMAT (&spec->info),
+ &ss->format))
+ return FALSE;
+ } else if (spec->type == GST_AUDIO_RING_BUFFER_FORMAT_TYPE_MU_LAW) {
+ ss->format = PA_SAMPLE_ULAW;
+ } else if (spec->type == GST_AUDIO_RING_BUFFER_FORMAT_TYPE_A_LAW) {
+ ss->format = PA_SAMPLE_ALAW;
+ } else
+ return FALSE;
+
+ ss->channels = GST_AUDIO_INFO_CHANNELS (&spec->info);
+ ss->rate = GST_AUDIO_INFO_RATE (&spec->info);
+
+ if (!pa_sample_spec_valid (ss))
+ return FALSE;
+
+ return TRUE;
+ }
+
+ gboolean
+ gst_pulse_fill_format_info (GstAudioRingBufferSpec * spec, pa_format_info ** f,
+ guint * channels)
+ {
+ pa_format_info *format;
+ pa_sample_format_t sf = PA_SAMPLE_INVALID;
+ GstAudioInfo *ainfo = &spec->info;
+
+ format = pa_format_info_new ();
+
+ if (spec->type == GST_AUDIO_RING_BUFFER_FORMAT_TYPE_MU_LAW) {
+ format->encoding = PA_ENCODING_PCM;
+ sf = PA_SAMPLE_ULAW;
+ } else if (spec->type == GST_AUDIO_RING_BUFFER_FORMAT_TYPE_A_LAW) {
+ format->encoding = PA_ENCODING_PCM;
+ sf = PA_SAMPLE_ALAW;
+ } else if (spec->type == GST_AUDIO_RING_BUFFER_FORMAT_TYPE_RAW) {
+ format->encoding = PA_ENCODING_PCM;
+ if (!gstaudioformat_to_pasampleformat (GST_AUDIO_INFO_FORMAT (ainfo), &sf))
+ goto fail;
+ } else if (spec->type == GST_AUDIO_RING_BUFFER_FORMAT_TYPE_AC3) {
+ format->encoding = PA_ENCODING_AC3_IEC61937;
+ } else if (spec->type == GST_AUDIO_RING_BUFFER_FORMAT_TYPE_EAC3) {
+ format->encoding = PA_ENCODING_EAC3_IEC61937;
+ } else if (spec->type == GST_AUDIO_RING_BUFFER_FORMAT_TYPE_DTS) {
+ format->encoding = PA_ENCODING_DTS_IEC61937;
+ } else if (spec->type == GST_AUDIO_RING_BUFFER_FORMAT_TYPE_MPEG) {
+ format->encoding = PA_ENCODING_MPEG_IEC61937;
+ #if PA_CHECK_VERSION(3,99,0)
+ } else if (spec->type == GST_AUDIO_RING_BUFFER_FORMAT_TYPE_MPEG2_AAC) {
+ format->encoding = PA_ENCODING_MPEG2_AAC_IEC61937;
+ } else if (spec->type == GST_AUDIO_RING_BUFFER_FORMAT_TYPE_MPEG4_AAC) {
+ /* HACK. treat MPEG4 AAC as MPEG2 AAC for the moment */
+ format->encoding = PA_ENCODING_MPEG2_AAC_IEC61937;
+ #endif
+ } else {
+ goto fail;
+ }
+
+ if (format->encoding == PA_ENCODING_PCM) {
+ pa_format_info_set_sample_format (format, sf);
+ pa_format_info_set_channels (format, GST_AUDIO_INFO_CHANNELS (ainfo));
+ }
+
+ pa_format_info_set_rate (format, GST_AUDIO_INFO_RATE (ainfo));
+
+ if (!pa_format_info_valid (format))
+ goto fail;
+
+ *f = format;
+ *channels = GST_AUDIO_INFO_CHANNELS (ainfo);
+
+ return TRUE;
+
+ fail:
+ if (format)
+ pa_format_info_free (format);
+ return FALSE;
+ }
+
+ const char *
+ gst_pulse_sample_format_to_caps_format (pa_sample_format_t sf)
+ {
+ switch (sf) {
+ case PA_SAMPLE_U8:
+ return "U8";
+
+ case PA_SAMPLE_S16LE:
+ return "S16LE";
+
+ case PA_SAMPLE_S16BE:
+ return "S16BE";
+
+ case PA_SAMPLE_FLOAT32LE:
+ return "F32LE";
+
+ case PA_SAMPLE_FLOAT32BE:
+ return "F32BE";
+
+ case PA_SAMPLE_S32LE:
+ return "S32LE";
+
+ case PA_SAMPLE_S32BE:
+ return "S32BE";
+
+ case PA_SAMPLE_S24LE:
+ return "S24LE";
+
+ case PA_SAMPLE_S24BE:
+ return "S24BE";
+
+ case PA_SAMPLE_S24_32LE:
+ return "S24_32LE";
+
+ case PA_SAMPLE_S24_32BE:
+ return "S24_32BE";
+
+ default:
+ return NULL;
+ }
+ }
+
+ /* PATH_MAX is not defined everywhere, e.g. on GNU Hurd */
+ #ifndef PATH_MAX
+ #define PATH_MAX 4096
+ #endif
+
+ gchar *
+ gst_pulse_client_name (void)
+ {
+ gchar buf[PATH_MAX];
+
+ const char *c;
+
+ if ((c = g_get_application_name ()))
+ return g_strdup (c);
+ else if (pa_get_binary_name (buf, sizeof (buf)))
+ return g_strdup (buf);
+ else
+ return g_strdup_printf ("GStreamer-pid-%lu", (gulong) getpid ());
+ }
+
+ pa_channel_map *
+ gst_pulse_gst_to_channel_map (pa_channel_map * map,
+ const GstAudioRingBufferSpec * spec)
+ {
+ gint i, j;
+ gint channels;
+ const GstAudioChannelPosition *pos;
+
+ pa_channel_map_init (map);
+
+ channels = GST_AUDIO_INFO_CHANNELS (&spec->info);
+ pos = spec->info.position;
+
+ for (j = 0; j < channels; j++) {
+ for (i = 0; i < G_N_ELEMENTS (gst_pa_pos_table); i++) {
+ if (pos[j] == gst_pa_pos_table[i].gst_pos) {
+ map->map[j] = gst_pa_pos_table[i].pa_pos;
+ break;
+ }
+ }
+ if (i == G_N_ELEMENTS (gst_pa_pos_table))
+ return NULL;
+ }
+
+ if (j != spec->info.channels) {
+ return NULL;
+ }
+
+ map->channels = spec->info.channels;
+
+ if (!pa_channel_map_valid (map)) {
+ return NULL;
+ }
+
+ return map;
+ }
+
+ GstAudioRingBufferSpec *
+ gst_pulse_channel_map_to_gst (const pa_channel_map * map,
+ GstAudioRingBufferSpec * spec)
+ {
+ gint i, j;
+ gboolean invalid = FALSE;
+ gint channels;
+ GstAudioChannelPosition *pos;
+
+ channels = GST_AUDIO_INFO_CHANNELS (&spec->info);
+
+ g_return_val_if_fail (map->channels == channels, NULL);
+
+ pos = spec->info.position;
+
+ for (j = 0; j < channels; j++) {
+ for (i = 0; j < channels && i < G_N_ELEMENTS (gst_pa_pos_table); i++) {
+ if (map->map[j] == gst_pa_pos_table[i].pa_pos) {
+ pos[j] = gst_pa_pos_table[i].gst_pos;
+ break;
+ }
+ }
+ if (i == G_N_ELEMENTS (gst_pa_pos_table))
+ return NULL;
+ }
+
+ if (!invalid
+ && !gst_audio_check_valid_channel_positions (pos, channels, FALSE))
+ invalid = TRUE;
+
+ if (invalid) {
+ for (i = 0; i < channels; i++)
+ pos[i] = GST_AUDIO_CHANNEL_POSITION_NONE;
+ } else {
+ if (pos[0] != GST_AUDIO_CHANNEL_POSITION_NONE)
+ spec->info.flags &= ~GST_AUDIO_FLAG_UNPOSITIONED;
+ }
+
+ return spec;
+ }
+
+ void
+ gst_pulse_cvolume_from_linear (pa_cvolume * v, unsigned channels,
+ gdouble volume)
+ {
+ pa_cvolume_set (v, channels, pa_sw_volume_from_linear (volume));
+ }
+
+ static gboolean
+ make_proplist_item (GQuark field_id, const GValue * value, gpointer user_data)
+ {
+ pa_proplist *p = (pa_proplist *) user_data;
+ gchar *prop_id = (gchar *) g_quark_to_string (field_id);
+
+ /* http://0pointer.de/lennart/projects/pulseaudio/doxygen/proplist_8h.html */
+
+ /* match prop id */
+
+ /* check type */
+ switch (G_VALUE_TYPE (value)) {
+ case G_TYPE_STRING:
+ pa_proplist_sets (p, prop_id, g_value_get_string (value));
+ break;
++#ifdef __TIZEN__
++ case G_TYPE_INT:
++ pa_proplist_setf (p, prop_id, "%d", g_value_get_int (value));
++ break;
++#endif
+ default:
+ GST_WARNING ("unmapped property type %s", G_VALUE_TYPE_NAME (value));
+ break;
+ }
+
+ return TRUE;
+ }
+
+ pa_proplist *
+ gst_pulse_make_proplist (const GstStructure * properties)
+ {
+ pa_proplist *proplist = pa_proplist_new ();
+
+ /* iterate the structure and fill the proplist */
+ gst_structure_foreach (properties, make_proplist_item, proplist);
+ return proplist;
+ }
+
+ GstStructure *
+ gst_pulse_make_structure (pa_proplist * properties)
+ {
+ GstStructure *str;
+ void *state = NULL;
+
+ str = gst_structure_new_empty ("pulse-proplist");
+
+ while (TRUE) {
+ const char *key, *val;
+
+ key = pa_proplist_iterate (properties, &state);
+ if (key == NULL)
+ break;
+
+ val = pa_proplist_gets (properties, key);
+
+ gst_structure_set (str, key, G_TYPE_STRING, val, NULL);
+ }
+ return str;
+ }
+
+ static gboolean
+ gst_pulse_format_info_int_prop_to_value (pa_format_info * format,
+ const char *key, GValue * value)
+ {
+ GValue v = { 0, };
+ int i;
+ int *a, n;
+ int min, max;
+
+ if (pa_format_info_get_prop_int (format, key, &i) == 0) {
+ g_value_init (value, G_TYPE_INT);
+ g_value_set_int (value, i);
+
+ } else if (pa_format_info_get_prop_int_array (format, key, &a, &n) == 0) {
+ g_value_init (value, GST_TYPE_LIST);
+ g_value_init (&v, G_TYPE_INT);
+
+ for (i = 0; i < n; i++) {
+ g_value_set_int (&v, a[i]);
+ gst_value_list_append_value (value, &v);
+ }
+
+ pa_xfree (a);
+
+ } else if (pa_format_info_get_prop_int_range (format, key, &min, &max) == 0) {
+ g_value_init (value, GST_TYPE_INT_RANGE);
+ gst_value_set_int_range (value, min, max);
+
+ } else {
+ /* Property not available or is not an int type */
+ return FALSE;
+ }
+
+ return TRUE;
+ }
+
+ GstCaps *
+ gst_pulse_format_info_to_caps (pa_format_info * format)
+ {
+ GstCaps *ret = NULL;
+ GValue v = { 0, };
+ pa_sample_spec ss;
+
+ switch (format->encoding) {
+ case PA_ENCODING_PCM:{
+ char *tmp = NULL;
+
+ pa_format_info_to_sample_spec (format, &ss, NULL);
+
+ if (pa_format_info_get_prop_string (format,
+ PA_PROP_FORMAT_SAMPLE_FORMAT, &tmp)) {
+ /* No specific sample format means any sample format */
+ ret = gst_pulse_fix_pcm_caps (gst_caps_from_string (_PULSE_CAPS_PCM));
+ goto out;
+
+ } else if (ss.format == PA_SAMPLE_ALAW) {
+ ret = gst_caps_from_string (_PULSE_CAPS_ALAW);
+
+ } else if (ss.format == PA_SAMPLE_ULAW) {
+ ret = gst_caps_from_string (_PULSE_CAPS_MULAW);
+
+ } else {
+ /* Linear PCM format */
+ const char *sformat =
+ gst_pulse_sample_format_to_caps_format (ss.format);
+
+ ret = gst_caps_from_string (_PULSE_CAPS_LINEAR);
+
+ if (sformat)
+ gst_caps_set_simple (ret, "format", G_TYPE_STRING, sformat, NULL);
+ }
+
+ pa_xfree (tmp);
+ break;
+ }
+
+ case PA_ENCODING_AC3_IEC61937:
+ ret = gst_caps_from_string (_PULSE_CAPS_AC3);
+ break;
+
+ case PA_ENCODING_EAC3_IEC61937:
+ ret = gst_caps_from_string (_PULSE_CAPS_EAC3);
+ break;
+
+ case PA_ENCODING_DTS_IEC61937:
+ ret = gst_caps_from_string (_PULSE_CAPS_DTS);
+ break;
+
+ case PA_ENCODING_MPEG_IEC61937:
+ ret = gst_caps_from_string (_PULSE_CAPS_MP3);
+ break;
+
+ default:
+ GST_WARNING ("Found a PA format that we don't support yet");
+ goto out;
+ }
+
+ if (gst_pulse_format_info_int_prop_to_value (format, PA_PROP_FORMAT_RATE, &v))
+ gst_caps_set_value (ret, "rate", &v);
+
+ g_value_unset (&v);
+
+ if (gst_pulse_format_info_int_prop_to_value (format, PA_PROP_FORMAT_CHANNELS,
+ &v))
+ gst_caps_set_value (ret, "channels", &v);
+
+ out:
+ return ret;
+ }
+
++#ifdef __TIZEN__
++#include <gio/gio.h>
++#define PA_BUS_NAME "org.pulseaudio.Server"
++#define PA_STREAM_MANAGER_OBJECT_PATH "/org/pulseaudio/StreamManager"
++#define PA_STREAM_MANAGER_INTERFACE "org.pulseaudio.StreamManager"
++#define PA_STREAM_MANAGER_METHOD_NAME_SET_VOLUME_RATIO "SetVolumeRatio"
++void
++gst_pulse_set_volume_ratio (uint32_t stream_index, const char *direction, double ratio)
++{
++ GDBusConnection *conn = NULL;
++ GError *err = NULL;
++ GVariant *result = NULL;
++ const gchar *dbus_ret = NULL;
++
++ conn = g_bus_get_sync (G_BUS_TYPE_SYSTEM, NULL, &err);
++ if (!conn || err) {
++ GST_ERROR ("g_bus_get_sync() error (%s)", err ? err->message : NULL);
++ if (err)
++ g_error_free (err);
++ return;
++ }
++
++ result = g_dbus_connection_call_sync (conn,
++ PA_BUS_NAME,
++ PA_STREAM_MANAGER_OBJECT_PATH,
++ PA_STREAM_MANAGER_INTERFACE,
++ PA_STREAM_MANAGER_METHOD_NAME_SET_VOLUME_RATIO,
++ g_variant_new("(sud)", direction, stream_index, ratio),
++ G_VARIANT_TYPE("(s)"),
++ G_DBUS_CALL_FLAGS_NONE,
++ 1000,
++ NULL,
++ &err);
++ if (!result || err) {
++ GST_ERROR ("g_dbus_connection_call_sync() for SET_VOLUME_RATIO error (%s)", err ? err->message : NULL);
++ if (err)
++ g_error_free (err);
++ goto finish;
++ }
++ g_variant_get (result, "(&s)", &dbus_ret);
++ GST_DEBUG ("SET_VOLUME_RATIO returns value(%s) for stream index(%u), ratio(%f)", dbus_ret, stream_index, ratio);
++
++finish:
++ g_variant_unref(result);
++ g_object_unref(conn);
++
++ return;
++}
++#endif
++
+ GstCaps *
+ gst_pulse_fix_pcm_caps (GstCaps * incaps)
+ {
+ GstCaps *outcaps;
+ int i;
+
+ outcaps = gst_caps_make_writable (incaps);
+
+ for (i = 0; i < gst_caps_get_size (outcaps); i++) {
+ GstStructure *st = gst_caps_get_structure (outcaps, i);
+ const gchar *format = gst_structure_get_name (st);
+ const GValue *value;
+ GValue new_value = G_VALUE_INIT;
+ gint min, max, step;
+
+ if (!(g_str_equal (format, "audio/x-raw") ||
+ g_str_equal (format, "audio/x-alaw") ||
+ g_str_equal (format, "audio/x-mulaw")))
+ continue;
+
+ value = gst_structure_get_value (st, "rate");
+
+ if (!GST_VALUE_HOLDS_INT_RANGE (value))
+ continue;
+
+ min = gst_value_get_int_range_min (value);
+ max = gst_value_get_int_range_max (value);
+ step = gst_value_get_int_range_step (value);
+
+ if (min > PA_RATE_MAX)
+ min = PA_RATE_MAX;
+ if (max > PA_RATE_MAX)
+ max = PA_RATE_MAX;
+
+ g_value_init (&new_value, GST_TYPE_INT_RANGE);
+ gst_value_set_int_range_step (&new_value, min, max, step);
+
+ gst_structure_take_value (st, "rate", &new_value);
+ }
+
+ return outcaps;
+ }
--- /dev/null
+ /*
+ * GStreamer pulseaudio plugin
+ *
+ * Copyright (c) 2004-2008 Lennart Poettering
+ *
+ * gst-pulse is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU Lesser General Public License as
+ * published by the Free Software Foundation; either version 2.1 of the
+ * License, or (at your option) any later version.
+ *
+ * gst-pulse is distributed in the hope that it will be useful, but
+ * WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with gst-pulse; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301
+ * USA.
+ */
+
+ #ifndef __GST_PULSEUTIL_H__
+ #define __GST_PULSEUTIL_H__
+
+ #ifdef HAVE_CONFIG_H
+ #include "config.h"
+ #endif
+
+ #include <gst/gst.h>
+ #include <pulse/pulseaudio.h>
+ #include <gst/audio/gstaudioringbuffer.h>
+ #include <gst/audio/gstaudiosink.h>
+
+
+ #if (G_BYTE_ORDER == G_LITTLE_ENDIAN)
+ # define _PULSE_FORMATS "{ S16LE, S16BE, F32LE, F32BE, S32LE, S32BE, " \
+ "S24LE, S24BE, S24_32LE, S24_32BE, U8 }"
+ #else
+ # define _PULSE_FORMATS "{ S16BE, S16LE, F32BE, F32LE, S32BE, S32LE, " \
+ "S24BE, S24LE, S24_32BE, S24_32LE, U8 }"
+ #endif
+
+ /* NOTE! that we do NOT actually support rate=MAX. This must be fixed up using
+ * gst_pulse_fix_pcm_caps() before being used. */
+ #define _PULSE_CAPS_LINEAR \
+ "audio/x-raw, " \
+ "format = (string) " _PULSE_FORMATS ", " \
+ "layout = (string) interleaved, " \
+ "rate = (int) [ 1, MAX ], " \
+ "channels = (int) [ 1, 32 ]; "
+ #define _PULSE_CAPS_ALAW \
+ "audio/x-alaw, " \
+ "rate = (int) [ 1, MAX], " \
+ "channels = (int) [ 1, 32 ]; "
+ #define _PULSE_CAPS_MULAW \
+ "audio/x-mulaw, " \
+ "rate = (int) [ 1, MAX], " \
+ "channels = (int) [ 1, 32 ]; "
+
+ #define _PULSE_CAPS_AC3 "audio/x-ac3, framed = (boolean) true; "
+ #define _PULSE_CAPS_EAC3 "audio/x-eac3, framed = (boolean) true; "
+ #define _PULSE_CAPS_DTS "audio/x-dts, framed = (boolean) true, " \
+ "block-size = (int) { 512, 1024, 2048 }; "
+ #define _PULSE_CAPS_MP3 "audio/mpeg, mpegversion = (int) 1, " \
+ "mpegaudioversion = (int) [ 1, 3 ], parsed = (boolean) true;"
+ #define _PULSE_CAPS_AAC "audio/mpeg, mpegversion = (int) { 2, 4 }, " \
+ "framed = (boolean) true, stream-format = (string) adts;"
+
+ #define _PULSE_CAPS_PCM \
+ _PULSE_CAPS_LINEAR \
+ _PULSE_CAPS_ALAW \
+ _PULSE_CAPS_MULAW
+
+
+ gboolean gst_pulse_fill_sample_spec (GstAudioRingBufferSpec * spec,
+ pa_sample_spec * ss);
+ gboolean gst_pulse_fill_format_info (GstAudioRingBufferSpec * spec,
+ pa_format_info ** f, guint * channels);
+ const char * gst_pulse_sample_format_to_caps_format (pa_sample_format_t sf);
+
+ gchar *gst_pulse_client_name (void);
+
+ pa_channel_map *gst_pulse_gst_to_channel_map (pa_channel_map * map,
+ const GstAudioRingBufferSpec * spec);
+
+ GstAudioRingBufferSpec *gst_pulse_channel_map_to_gst (const pa_channel_map * map,
+ GstAudioRingBufferSpec * spec);
+
+ void gst_pulse_cvolume_from_linear (pa_cvolume *v, unsigned channels, gdouble volume);
+
+ pa_proplist *gst_pulse_make_proplist (const GstStructure *properties);
+ GstStructure *gst_pulse_make_structure (pa_proplist *properties);
+
+ GstCaps * gst_pulse_format_info_to_caps (pa_format_info * format);
++
++#ifdef __TIZEN__
++void gst_pulse_set_volume_ratio (uint32_t stream_index, const char *direction, double ratio);
++#endif
+ GstCaps * gst_pulse_fix_pcm_caps (GstCaps * incaps);
+
+ #endif
--- /dev/null
-
+ /* GStreamer
+ * Copyright (C) 2007-2008 Wouter Cloetens <wouter@mind.be>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more
+ */
+
+ /**
+ * SECTION:element-souphttpsrc
+ * @title: souphttpsrc
+ *
+ * This plugin reads data from a remote location specified by a URI.
+ * Supported protocols are 'http', 'https'.
+ *
+ * An HTTP proxy must be specified by its URL.
+ * If the "http_proxy" environment variable is set, its value is used.
+ * If built with libsoup's GNOME integration features, the GNOME proxy
+ * configuration will be used, or failing that, proxy autodetection.
+ * The #GstSoupHTTPSrc:proxy property can be used to override the default.
+ *
+ * In case the #GstSoupHTTPSrc:iradio-mode property is set and the location is
+ * an HTTP resource, souphttpsrc will send special Icecast HTTP headers to the
+ * server to request additional Icecast meta-information.
+ * If the server is not an Icecast server, it will behave as if the
+ * #GstSoupHTTPSrc:iradio-mode property were not set. If it is, souphttpsrc will
+ * output data with a media type of application/x-icy, in which case you will
+ * need to use the #GstICYDemux element as follow-up element to extract the Icecast
+ * metadata and to determine the underlying media type.
+ *
+ * ## Example launch line
+ * |[
+ * gst-launch-1.0 -v souphttpsrc location=https://some.server.org/index.html
+ * ! filesink location=/home/joe/server.html
+ * ]| The above pipeline reads a web page from a server using the HTTPS protocol
+ * and writes it to a local file.
+ * |[
+ * gst-launch-1.0 -v souphttpsrc user-agent="FooPlayer 0.99 beta"
+ * automatic-redirect=false proxy=http://proxy.intranet.local:8080
+ * location=http://music.foobar.com/demo.mp3 ! mpgaudioparse
+ * ! mpg123audiodec ! audioconvert ! audioresample ! autoaudiosink
+ * ]| The above pipeline will read and decode and play an mp3 file from a
+ * web server using the HTTP protocol. If the server sends redirects,
+ * the request fails instead of following the redirect. The specified
+ * HTTP proxy server is used. The User-Agent HTTP request header
+ * is set to a custom string instead of "GStreamer souphttpsrc."
+ * |[
+ * gst-launch-1.0 -v souphttpsrc location=http://10.11.12.13/mjpeg
+ * do-timestamp=true ! multipartdemux
+ * ! image/jpeg,width=640,height=480 ! matroskamux
+ * ! filesink location=mjpeg.mkv
+ * ]| The above pipeline reads a motion JPEG stream from an IP camera
+ * using the HTTP protocol, encoded as mime/multipart image/jpeg
+ * parts, and writes a Matroska motion JPEG file. The width and
+ * height properties are set in the caps to provide the Matroska
+ * multiplexer with the information to set this in the header.
+ * Timestamps are set on the buffers as they arrive from the camera.
+ * These are used by the mime/multipart demultiplexer to emit timestamps
+ * on the JPEG-encoded video frame buffers. This allows the Matroska
+ * multiplexer to timestamp the frames in the resulting file.
+ *
+ */
+
+ #ifdef HAVE_CONFIG_H
+ #include "config.h"
+ #endif
+
+ #include <string.h>
+ #ifdef HAVE_STDLIB_H
+ #include <stdlib.h> /* atoi() */
+ #endif
+ #include <gst/gstelement.h>
+ #include <gst/gst-i18n-plugin.h>
+ #include <libsoup/soup.h>
+ #include "gstsoupelements.h"
+ #include "gstsouphttpsrc.h"
+ #include "gstsouputils.h"
+
+ #include <gst/tag/tag.h>
+
+ GST_DEBUG_CATEGORY_STATIC (souphttpsrc_debug);
+ #define GST_CAT_DEFAULT souphttpsrc_debug
+
+ #define GST_SOUP_SESSION_CONTEXT "gst.soup.session"
+
+ static GstStaticPadTemplate srctemplate = GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS_ANY);
+
+ enum
+ {
+ PROP_0,
+ PROP_LOCATION,
+ PROP_IS_LIVE,
+ PROP_USER_AGENT,
+ PROP_AUTOMATIC_REDIRECT,
+ PROP_PROXY,
+ PROP_USER_ID,
+ PROP_USER_PW,
+ PROP_PROXY_ID,
+ PROP_PROXY_PW,
+ PROP_COOKIES,
+ PROP_IRADIO_MODE,
+ PROP_TIMEOUT,
+ PROP_EXTRA_HEADERS,
+ PROP_SOUP_LOG_LEVEL,
+ PROP_COMPRESS,
+ PROP_KEEP_ALIVE,
+ PROP_SSL_STRICT,
+ PROP_SSL_CA_FILE,
+ PROP_SSL_USE_SYSTEM_CA_FILE,
+ PROP_TLS_DATABASE,
+ PROP_RETRIES,
+ PROP_METHOD,
+ PROP_TLS_INTERACTION,
+ };
+
+ #define DEFAULT_USER_AGENT "GStreamer souphttpsrc " PACKAGE_VERSION " "
+ #define DEFAULT_IRADIO_MODE TRUE
+ #define DEFAULT_SOUP_LOG_LEVEL SOUP_LOGGER_LOG_HEADERS
+ #define DEFAULT_COMPRESS FALSE
+ #define DEFAULT_KEEP_ALIVE TRUE
+ #define DEFAULT_SSL_STRICT TRUE
+ #define DEFAULT_SSL_CA_FILE NULL
+ #define DEFAULT_SSL_USE_SYSTEM_CA_FILE TRUE
+ #define DEFAULT_TLS_DATABASE NULL
+ #define DEFAULT_TLS_INTERACTION NULL
+ #define DEFAULT_TIMEOUT 15
+ #define DEFAULT_RETRIES 3
+ #define DEFAULT_SOUP_METHOD NULL
+
+ #define GROW_BLOCKSIZE_LIMIT 1
+ #define GROW_BLOCKSIZE_COUNT 1
+ #define GROW_BLOCKSIZE_FACTOR 2
+ #define REDUCE_BLOCKSIZE_LIMIT 0.20
+ #define REDUCE_BLOCKSIZE_COUNT 2
+ #define REDUCE_BLOCKSIZE_FACTOR 0.5
+ #define GROW_TIME_LIMIT (1 * GST_SECOND)
+
++#ifdef TIZEN_FEATURE_SOUP_MODIFICATION
++#define DLNA_OP_TIMED_SEEK 0x02
++#define DLNA_OP_BYTE_SEEK 0x01
++#endif
++
+ static void gst_soup_http_src_uri_handler_init (gpointer g_iface,
+ gpointer iface_data);
+ static void gst_soup_http_src_finalize (GObject * gobject);
+ static void gst_soup_http_src_dispose (GObject * gobject);
+
+ static void gst_soup_http_src_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec);
+ static void gst_soup_http_src_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec);
+
+ static GstStateChangeReturn gst_soup_http_src_change_state (GstElement *
+ element, GstStateChange transition);
+ static void gst_soup_http_src_set_context (GstElement * element,
+ GstContext * context);
+ static GstFlowReturn gst_soup_http_src_create (GstPushSrc * psrc,
+ GstBuffer ** outbuf);
+ static gboolean gst_soup_http_src_start (GstBaseSrc * bsrc);
+ static gboolean gst_soup_http_src_stop (GstBaseSrc * bsrc);
+ static gboolean gst_soup_http_src_get_size (GstBaseSrc * bsrc, guint64 * size);
+ static gboolean gst_soup_http_src_is_seekable (GstBaseSrc * bsrc);
+ static gboolean gst_soup_http_src_do_seek (GstBaseSrc * bsrc,
+ GstSegment * segment);
+ static gboolean gst_soup_http_src_query (GstBaseSrc * bsrc, GstQuery * query);
+ static gboolean gst_soup_http_src_unlock (GstBaseSrc * bsrc);
+ static gboolean gst_soup_http_src_unlock_stop (GstBaseSrc * bsrc);
+ static gboolean gst_soup_http_src_set_location (GstSoupHTTPSrc * src,
+ const gchar * uri, GError ** error);
+ static gboolean gst_soup_http_src_set_proxy (GstSoupHTTPSrc * src,
+ const gchar * uri);
+ static char *gst_soup_http_src_unicodify (const char *str);
+ static gboolean gst_soup_http_src_build_message (GstSoupHTTPSrc * src,
+ const gchar * method);
+ static void gst_soup_http_src_cancel_message (GstSoupHTTPSrc * src);
+ static gboolean gst_soup_http_src_add_range_header (GstSoupHTTPSrc * src,
+ guint64 offset, guint64 stop_offset);
+ static gboolean gst_soup_http_src_session_open (GstSoupHTTPSrc * src);
+ static void gst_soup_http_src_session_close (GstSoupHTTPSrc * src);
+ static GstFlowReturn gst_soup_http_src_parse_status (SoupMessage * msg,
+ GstSoupHTTPSrc * src);
+ static GstFlowReturn gst_soup_http_src_got_headers (GstSoupHTTPSrc * src,
+ SoupMessage * msg);
+ static void gst_soup_http_src_authenticate_cb (SoupSession * session,
+ SoupMessage * msg, SoupAuth * auth, gboolean retrying,
+ GstSoupHTTPSrc * src);
+
+ #define gst_soup_http_src_parent_class parent_class
+ G_DEFINE_TYPE_WITH_CODE (GstSoupHTTPSrc, gst_soup_http_src, GST_TYPE_PUSH_SRC,
+ G_IMPLEMENT_INTERFACE (GST_TYPE_URI_HANDLER,
+ gst_soup_http_src_uri_handler_init));
+ GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (souphttpsrc, "souphttpsrc",
+ GST_RANK_PRIMARY, GST_TYPE_SOUP_HTTP_SRC, soup_element_init (plugin));
+
+ static void
+ gst_soup_http_src_class_init (GstSoupHTTPSrcClass * klass)
+ {
+ GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
+ GstBaseSrcClass *gstbasesrc_class;
+ GstPushSrcClass *gstpushsrc_class;
+
+ gobject_class = (GObjectClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
+ gstbasesrc_class = (GstBaseSrcClass *) klass;
+ gstpushsrc_class = (GstPushSrcClass *) klass;
+
+ gobject_class->set_property = gst_soup_http_src_set_property;
+ gobject_class->get_property = gst_soup_http_src_get_property;
+ gobject_class->finalize = gst_soup_http_src_finalize;
+ gobject_class->dispose = gst_soup_http_src_dispose;
+
+ g_object_class_install_property (gobject_class,
+ PROP_LOCATION,
+ g_param_spec_string ("location", "Location",
+ "Location to read from", "",
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class,
+ PROP_USER_AGENT,
+ g_param_spec_string ("user-agent", "User-Agent",
+ "Value of the User-Agent HTTP request header field",
+ DEFAULT_USER_AGENT, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class,
+ PROP_AUTOMATIC_REDIRECT,
+ g_param_spec_boolean ("automatic-redirect", "automatic-redirect",
+ "Automatically follow HTTP redirects (HTTP Status Code 3xx)",
+ TRUE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class,
+ PROP_PROXY,
+ g_param_spec_string ("proxy", "Proxy",
+ "HTTP proxy server URI", "",
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class,
+ PROP_USER_ID,
+ g_param_spec_string ("user-id", "user-id",
+ "HTTP location URI user id for authentication", "",
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_USER_PW,
+ g_param_spec_string ("user-pw", "user-pw",
+ "HTTP location URI user password for authentication", "",
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_PROXY_ID,
+ g_param_spec_string ("proxy-id", "proxy-id",
+ "HTTP proxy URI user id for authentication", "",
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_PROXY_PW,
+ g_param_spec_string ("proxy-pw", "proxy-pw",
+ "HTTP proxy URI user password for authentication", "",
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_COOKIES,
+ g_param_spec_boxed ("cookies", "Cookies", "HTTP request cookies",
+ G_TYPE_STRV, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_IS_LIVE,
+ g_param_spec_boolean ("is-live", "is-live", "Act like a live source",
+ FALSE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_TIMEOUT,
+ g_param_spec_uint ("timeout", "timeout",
+ "Value in seconds to timeout a blocking I/O (0 = No timeout).", 0,
+ 3600, DEFAULT_TIMEOUT, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_EXTRA_HEADERS,
+ g_param_spec_boxed ("extra-headers", "Extra Headers",
+ "Extra headers to append to the HTTP request",
+ GST_TYPE_STRUCTURE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_IRADIO_MODE,
+ g_param_spec_boolean ("iradio-mode", "iradio-mode",
+ "Enable internet radio mode (ask server to send shoutcast/icecast "
+ "metadata interleaved with the actual stream data)",
+ DEFAULT_IRADIO_MODE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ /**
+ * GstSoupHTTPSrc::http-log-level:
+ *
+ * If set and > 0, captures and dumps HTTP session data as
+ * log messages if log level >= GST_LEVEL_TRACE
+ *
+ * Since: 1.4
+ */
+ g_object_class_install_property (gobject_class, PROP_SOUP_LOG_LEVEL,
+ g_param_spec_enum ("http-log-level", "HTTP log level",
+ "Set log level for soup's HTTP session log",
+ SOUP_TYPE_LOGGER_LOG_LEVEL, DEFAULT_SOUP_LOG_LEVEL,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ /**
+ * GstSoupHTTPSrc::compress:
+ *
+ * If set to %TRUE, souphttpsrc will automatically handle gzip
+ * and deflate Content-Encodings. This does not make much difference
+ * and causes more load for normal media files, but makes a real
+ * difference in size for plaintext files.
+ *
+ * Since: 1.4
+ */
+ g_object_class_install_property (gobject_class, PROP_COMPRESS,
+ g_param_spec_boolean ("compress", "Compress",
+ "Allow compressed content encodings",
+ DEFAULT_COMPRESS, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ /**
+ * GstSoupHTTPSrc::keep-alive:
+ *
+ * If set to %TRUE, souphttpsrc will keep alive connections when being
+ * set to READY state and only will close connections when connecting
+ * to a different server or when going to NULL state..
+ *
+ * Since: 1.4
+ */
+ g_object_class_install_property (gobject_class, PROP_KEEP_ALIVE,
+ g_param_spec_boolean ("keep-alive", "keep-alive",
+ "Use HTTP persistent connections", DEFAULT_KEEP_ALIVE,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ /**
+ * GstSoupHTTPSrc::ssl-strict:
+ *
+ * If set to %TRUE, souphttpsrc will reject all SSL certificates that
+ * are considered invalid.
+ *
+ * Since: 1.4
+ */
+ g_object_class_install_property (gobject_class, PROP_SSL_STRICT,
+ g_param_spec_boolean ("ssl-strict", "SSL Strict",
+ "Strict SSL certificate checking", DEFAULT_SSL_STRICT,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ /**
+ * GstSoupHTTPSrc::ssl-ca-file:
+ *
+ * A SSL anchor CA file that should be used for checking certificates
+ * instead of the system CA file.
+ *
+ * If this property is non-%NULL, #GstSoupHTTPSrc::ssl-use-system-ca-file
+ * value will be ignored.
+ *
+ * Deprecated: Use #GstSoupHTTPSrc::tls-database property instead.
+ * Since: 1.4
+ */
+ g_object_class_install_property (gobject_class, PROP_SSL_CA_FILE,
+ g_param_spec_string ("ssl-ca-file", "SSL CA File",
+ "Location of a SSL anchor CA file to use", DEFAULT_SSL_CA_FILE,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ /**
+ * GstSoupHTTPSrc::ssl-use-system-ca-file:
+ *
+ * If set to %TRUE, souphttpsrc will use the system's CA file for
+ * checking certificates, unless #GstSoupHTTPSrc::ssl-ca-file or
+ * #GstSoupHTTPSrc::tls-database are non-%NULL.
+ *
+ * Since: 1.4
+ */
+ g_object_class_install_property (gobject_class, PROP_SSL_USE_SYSTEM_CA_FILE,
+ g_param_spec_boolean ("ssl-use-system-ca-file", "Use System CA File",
+ "Use system CA file", DEFAULT_SSL_USE_SYSTEM_CA_FILE,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ /**
+ * GstSoupHTTPSrc::tls-database:
+ *
+ * TLS database with anchor certificate authorities used to validate
+ * the server certificate.
+ *
+ * If this property is non-%NULL, #GstSoupHTTPSrc::ssl-use-system-ca-file
+ * and #GstSoupHTTPSrc::ssl-ca-file values will be ignored.
+ *
+ * Since: 1.6
+ */
+ g_object_class_install_property (gobject_class, PROP_TLS_DATABASE,
+ g_param_spec_object ("tls-database", "TLS database",
+ "TLS database with anchor certificate authorities used to validate the server certificate",
+ G_TYPE_TLS_DATABASE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ /**
+ * GstSoupHTTPSrc::tls-interaction:
+ *
+ * A #GTlsInteraction object to be used when the connection or certificate
+ * database need to interact with the user. This will be used to prompt the
+ * user for passwords or certificate where necessary.
+ *
+ * Since: 1.8
+ */
+ g_object_class_install_property (gobject_class, PROP_TLS_INTERACTION,
+ g_param_spec_object ("tls-interaction", "TLS interaction",
+ "A GTlsInteraction object to be used when the connection or certificate database need to interact with the user.",
+ G_TYPE_TLS_INTERACTION, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ /**
+ * GstSoupHTTPSrc::retries:
+ *
+ * Maximum number of retries until giving up.
+ *
+ * Since: 1.4
+ */
+ g_object_class_install_property (gobject_class, PROP_RETRIES,
+ g_param_spec_int ("retries", "Retries",
+ "Maximum number of retries until giving up (-1=infinite)", -1,
+ G_MAXINT, DEFAULT_RETRIES,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ /**
+ * GstSoupHTTPSrc::method
+ *
+ * The HTTP method to use when making a request
+ *
+ * Since: 1.6
+ */
+ g_object_class_install_property (gobject_class, PROP_METHOD,
+ g_param_spec_string ("method", "HTTP method",
+ "The HTTP method to use (GET, HEAD, OPTIONS, etc)",
+ DEFAULT_SOUP_METHOD, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ gst_element_class_add_static_pad_template (gstelement_class, &srctemplate);
+
+ gst_element_class_set_static_metadata (gstelement_class, "HTTP client source",
+ "Source/Network",
+ "Receive data as a client over the network via HTTP using SOUP",
+ "Wouter Cloetens <wouter@mind.be>");
+ gstelement_class->change_state =
+ GST_DEBUG_FUNCPTR (gst_soup_http_src_change_state);
+ gstelement_class->set_context =
+ GST_DEBUG_FUNCPTR (gst_soup_http_src_set_context);
+
+ gstbasesrc_class->start = GST_DEBUG_FUNCPTR (gst_soup_http_src_start);
+ gstbasesrc_class->stop = GST_DEBUG_FUNCPTR (gst_soup_http_src_stop);
+ gstbasesrc_class->unlock = GST_DEBUG_FUNCPTR (gst_soup_http_src_unlock);
+ gstbasesrc_class->unlock_stop =
+ GST_DEBUG_FUNCPTR (gst_soup_http_src_unlock_stop);
+ gstbasesrc_class->get_size = GST_DEBUG_FUNCPTR (gst_soup_http_src_get_size);
+ gstbasesrc_class->is_seekable =
+ GST_DEBUG_FUNCPTR (gst_soup_http_src_is_seekable);
+ gstbasesrc_class->do_seek = GST_DEBUG_FUNCPTR (gst_soup_http_src_do_seek);
+ gstbasesrc_class->query = GST_DEBUG_FUNCPTR (gst_soup_http_src_query);
+
+ gstpushsrc_class->create = GST_DEBUG_FUNCPTR (gst_soup_http_src_create);
+
+ GST_DEBUG_CATEGORY_INIT (souphttpsrc_debug, "souphttpsrc", 0,
+ "SOUP HTTP src");
+ }
+
+ static void
+ gst_soup_http_src_reset (GstSoupHTTPSrc * src)
+ {
+ src->retry_count = 0;
+ src->have_size = FALSE;
+ src->got_headers = FALSE;
+ src->seekable = FALSE;
+ src->read_position = 0;
+ src->request_position = 0;
+ src->stop_position = -1;
+ src->content_size = 0;
+ src->have_body = FALSE;
+
+ src->reduce_blocksize_count = 0;
+ src->increase_blocksize_count = 0;
+ src->last_socket_read_time = 0;
+
++#ifdef TIZEN_FEATURE_SOUP_MODIFICATION
++ if (src->dash_oldest_segment) {
++ g_free (src->dash_oldest_segment);
++ src->dash_oldest_segment = NULL;
++ }
++ if (src->dash_newest_segment) {
++ g_free (src->dash_newest_segment);
++ src->dash_newest_segment = NULL;
++ }
++ src->dlna_opt = 0;
++#endif
++
+ g_cancellable_reset (src->cancellable);
+ g_mutex_lock (&src->mutex);
+ if (src->input_stream) {
+ g_object_unref (src->input_stream);
+ src->input_stream = NULL;
+ }
+ g_mutex_unlock (&src->mutex);
+
+ gst_caps_replace (&src->src_caps, NULL);
+ g_free (src->iradio_name);
+ src->iradio_name = NULL;
+ g_free (src->iradio_genre);
+ src->iradio_genre = NULL;
+ g_free (src->iradio_url);
+ src->iradio_url = NULL;
+ }
+
+ static void
+ gst_soup_http_src_init (GstSoupHTTPSrc * src)
+ {
+ const gchar *proxy;
+
+ g_mutex_init (&src->mutex);
+ g_cond_init (&src->have_headers_cond);
+ src->cancellable = g_cancellable_new ();
+ src->location = NULL;
+ src->redirection_uri = NULL;
+ src->automatic_redirect = TRUE;
+ src->user_agent = g_strdup (DEFAULT_USER_AGENT);
+ src->user_id = NULL;
+ src->user_pw = NULL;
+ src->proxy_id = NULL;
+ src->proxy_pw = NULL;
+ src->cookies = NULL;
+ src->iradio_mode = DEFAULT_IRADIO_MODE;
+ src->session = NULL;
+ src->external_session = NULL;
+ src->forced_external_session = FALSE;
+ src->msg = NULL;
+ src->timeout = DEFAULT_TIMEOUT;
+ src->log_level = DEFAULT_SOUP_LOG_LEVEL;
+ src->compress = DEFAULT_COMPRESS;
+ src->keep_alive = DEFAULT_KEEP_ALIVE;
+ src->ssl_strict = DEFAULT_SSL_STRICT;
+ src->ssl_use_system_ca_file = DEFAULT_SSL_USE_SYSTEM_CA_FILE;
+ src->tls_database = DEFAULT_TLS_DATABASE;
+ src->tls_interaction = DEFAULT_TLS_INTERACTION;
+ src->max_retries = DEFAULT_RETRIES;
+ src->method = DEFAULT_SOUP_METHOD;
+ src->minimum_blocksize = gst_base_src_get_blocksize (GST_BASE_SRC_CAST (src));
++#ifdef TIZEN_FEATURE_SOUP_MODIFICATION
++ src->dash_oldest_segment = NULL;
++ src->dash_newest_segment = NULL;
++ src->received_total = 0;
++ src->dlna_opt = 0;
++#endif
+ proxy = g_getenv ("http_proxy");
+ if (!gst_soup_http_src_set_proxy (src, proxy)) {
+ GST_WARNING_OBJECT (src,
+ "The proxy in the http_proxy env var (\"%s\") cannot be parsed.",
+ proxy);
+ }
+
+ gst_base_src_set_automatic_eos (GST_BASE_SRC (src), FALSE);
+
+ gst_soup_http_src_reset (src);
+ }
+
+ static void
+ gst_soup_http_src_dispose (GObject * gobject)
+ {
+ GstSoupHTTPSrc *src = GST_SOUP_HTTP_SRC (gobject);
+
+ GST_DEBUG_OBJECT (src, "dispose");
+
+ gst_soup_http_src_session_close (src);
+
+ if (src->external_session) {
+ g_object_unref (src->external_session);
+ src->external_session = NULL;
+ }
+
+ G_OBJECT_CLASS (parent_class)->dispose (gobject);
+ }
+
+ static void
+ gst_soup_http_src_finalize (GObject * gobject)
+ {
+ GstSoupHTTPSrc *src = GST_SOUP_HTTP_SRC (gobject);
+
+ GST_DEBUG_OBJECT (src, "finalize");
+
+ g_mutex_clear (&src->mutex);
+ g_cond_clear (&src->have_headers_cond);
+ g_object_unref (src->cancellable);
+ g_free (src->location);
+ g_free (src->redirection_uri);
+ g_free (src->user_agent);
+ if (src->proxy != NULL) {
+ soup_uri_free (src->proxy);
+ }
+ g_free (src->user_id);
+ g_free (src->user_pw);
+ g_free (src->proxy_id);
+ g_free (src->proxy_pw);
+ g_strfreev (src->cookies);
+
+ if (src->extra_headers) {
+ gst_structure_free (src->extra_headers);
+ src->extra_headers = NULL;
+ }
+
+ g_free (src->ssl_ca_file);
+
+ if (src->tls_database)
+ g_object_unref (src->tls_database);
+ g_free (src->method);
+
+ if (src->tls_interaction)
+ g_object_unref (src->tls_interaction);
+
+ G_OBJECT_CLASS (parent_class)->finalize (gobject);
+ }
+
+ static void
+ gst_soup_http_src_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+ {
+ GstSoupHTTPSrc *src = GST_SOUP_HTTP_SRC (object);
+
+ switch (prop_id) {
+ case PROP_LOCATION:
+ {
+ const gchar *location;
+
+ location = g_value_get_string (value);
+
+ if (location == NULL) {
+ GST_WARNING ("location property cannot be NULL");
+ goto done;
+ }
+ if (!gst_soup_http_src_set_location (src, location, NULL)) {
+ GST_WARNING ("badly formatted location");
+ goto done;
+ }
+ break;
+ }
+ case PROP_USER_AGENT:
+ g_free (src->user_agent);
+ src->user_agent = g_value_dup_string (value);
+ break;
+ case PROP_IRADIO_MODE:
+ src->iradio_mode = g_value_get_boolean (value);
+ break;
+ case PROP_AUTOMATIC_REDIRECT:
+ src->automatic_redirect = g_value_get_boolean (value);
+ break;
+ case PROP_PROXY:
+ {
+ const gchar *proxy;
+
+ proxy = g_value_get_string (value);
+ if (!gst_soup_http_src_set_proxy (src, proxy)) {
+ GST_WARNING ("badly formatted proxy URI");
+ goto done;
+ }
+ break;
+ }
+ case PROP_COOKIES:
+ g_strfreev (src->cookies);
+ src->cookies = g_strdupv (g_value_get_boxed (value));
+ break;
+ case PROP_IS_LIVE:
+ gst_base_src_set_live (GST_BASE_SRC (src), g_value_get_boolean (value));
+ break;
+ case PROP_USER_ID:
+ g_free (src->user_id);
+ src->user_id = g_value_dup_string (value);
+ break;
+ case PROP_USER_PW:
+ g_free (src->user_pw);
+ src->user_pw = g_value_dup_string (value);
+ break;
+ case PROP_PROXY_ID:
+ g_free (src->proxy_id);
+ src->proxy_id = g_value_dup_string (value);
+ break;
+ case PROP_PROXY_PW:
+ g_free (src->proxy_pw);
+ src->proxy_pw = g_value_dup_string (value);
+ break;
+ case PROP_TIMEOUT:
+ src->timeout = g_value_get_uint (value);
+ break;
+ case PROP_EXTRA_HEADERS:{
+ const GstStructure *s = gst_value_get_structure (value);
+
+ if (src->extra_headers)
+ gst_structure_free (src->extra_headers);
+
+ src->extra_headers = s ? gst_structure_copy (s) : NULL;
+ break;
+ }
+ case PROP_SOUP_LOG_LEVEL:
+ src->log_level = g_value_get_enum (value);
+ break;
+ case PROP_COMPRESS:
+ src->compress = g_value_get_boolean (value);
+ break;
+ case PROP_KEEP_ALIVE:
+ src->keep_alive = g_value_get_boolean (value);
+ break;
+ case PROP_SSL_STRICT:
+ src->ssl_strict = g_value_get_boolean (value);
+ break;
+ case PROP_SSL_CA_FILE:
+ g_free (src->ssl_ca_file);
+ src->ssl_ca_file = g_value_dup_string (value);
+ break;
+ case PROP_SSL_USE_SYSTEM_CA_FILE:
+ src->ssl_use_system_ca_file = g_value_get_boolean (value);
+ break;
+ case PROP_TLS_DATABASE:
+ g_clear_object (&src->tls_database);
+ src->tls_database = g_value_dup_object (value);
+ break;
+ case PROP_TLS_INTERACTION:
+ g_clear_object (&src->tls_interaction);
+ src->tls_interaction = g_value_dup_object (value);
+ break;
+ case PROP_RETRIES:
+ src->max_retries = g_value_get_int (value);
+ break;
+ case PROP_METHOD:
+ g_free (src->method);
+ src->method = g_value_dup_string (value);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+ done:
+ return;
+ }
+
+ static void
+ gst_soup_http_src_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec)
+ {
+ GstSoupHTTPSrc *src = GST_SOUP_HTTP_SRC (object);
+
+ switch (prop_id) {
+ case PROP_LOCATION:
+ g_value_set_string (value, src->location);
+ break;
+ case PROP_USER_AGENT:
+ g_value_set_string (value, src->user_agent);
+ break;
+ case PROP_AUTOMATIC_REDIRECT:
+ g_value_set_boolean (value, src->automatic_redirect);
+ break;
+ case PROP_PROXY:
+ if (src->proxy == NULL)
+ g_value_set_static_string (value, "");
+ else {
+ char *proxy = soup_uri_to_string (src->proxy, FALSE);
- if (offset || stop_offset != -1) {
+ g_value_set_string (value, proxy);
+ g_free (proxy);
+ }
+ break;
+ case PROP_COOKIES:
+ g_value_set_boxed (value, g_strdupv (src->cookies));
+ break;
+ case PROP_IS_LIVE:
+ g_value_set_boolean (value, gst_base_src_is_live (GST_BASE_SRC (src)));
+ break;
+ case PROP_IRADIO_MODE:
+ g_value_set_boolean (value, src->iradio_mode);
+ break;
+ case PROP_USER_ID:
+ g_value_set_string (value, src->user_id);
+ break;
+ case PROP_USER_PW:
+ g_value_set_string (value, src->user_pw);
+ break;
+ case PROP_PROXY_ID:
+ g_value_set_string (value, src->proxy_id);
+ break;
+ case PROP_PROXY_PW:
+ g_value_set_string (value, src->proxy_pw);
+ break;
+ case PROP_TIMEOUT:
+ g_value_set_uint (value, src->timeout);
+ break;
+ case PROP_EXTRA_HEADERS:
+ gst_value_set_structure (value, src->extra_headers);
+ break;
+ case PROP_SOUP_LOG_LEVEL:
+ g_value_set_enum (value, src->log_level);
+ break;
+ case PROP_COMPRESS:
+ g_value_set_boolean (value, src->compress);
+ break;
+ case PROP_KEEP_ALIVE:
+ g_value_set_boolean (value, src->keep_alive);
+ break;
+ case PROP_SSL_STRICT:
+ g_value_set_boolean (value, src->ssl_strict);
+ break;
+ case PROP_SSL_CA_FILE:
+ g_value_set_string (value, src->ssl_ca_file);
+ break;
+ case PROP_SSL_USE_SYSTEM_CA_FILE:
+ g_value_set_boolean (value, src->ssl_use_system_ca_file);
+ break;
+ case PROP_TLS_DATABASE:
+ g_value_set_object (value, src->tls_database);
+ break;
+ case PROP_TLS_INTERACTION:
+ g_value_set_object (value, src->tls_interaction);
+ break;
+ case PROP_RETRIES:
+ g_value_set_int (value, src->max_retries);
+ break;
+ case PROP_METHOD:
+ g_value_set_string (value, src->method);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+ }
+
+ static gchar *
+ gst_soup_http_src_unicodify (const gchar * str)
+ {
+ const gchar *env_vars[] = { "GST_ICY_TAG_ENCODING",
+ "GST_TAG_ENCODING", NULL
+ };
+
+ return gst_tag_freeform_string_to_utf8 (str, -1, env_vars);
+ }
+
+ static void
+ gst_soup_http_src_cancel_message (GstSoupHTTPSrc * src)
+ {
+ g_cancellable_cancel (src->cancellable);
+ g_cond_signal (&src->have_headers_cond);
+ }
+
+ static gboolean
+ gst_soup_http_src_add_range_header (GstSoupHTTPSrc * src, guint64 offset,
+ guint64 stop_offset)
+ {
+ gchar buf[64];
+ gint rc;
+
+ soup_message_headers_remove (src->msg->request_headers, "Range");
- GST_INFO_OBJECT (src, "got headers");
-
++
++/* This changes are needed to enable Seekable Contents from server.
++ We have observed that , for few specific networks ( VODAFONE ) , without theabove headers ,
++ Youtube is sending non-seekable contents to the Client. */
++#ifndef TIZEN_FEATURE_SOUP_MODIFICATION
++ if (offset || stop_offset != -1)
++#endif
++ {
+ if (stop_offset != -1) {
+ g_assert (offset != stop_offset);
+
+ rc = g_snprintf (buf, sizeof (buf), "bytes=%" G_GUINT64_FORMAT "-%"
+ G_GUINT64_FORMAT, offset, (stop_offset > 0) ? stop_offset - 1 :
+ stop_offset);
+ } else {
+ rc = g_snprintf (buf, sizeof (buf), "bytes=%" G_GUINT64_FORMAT "-",
+ offset);
+ }
+ if (rc > sizeof (buf) || rc < 0)
+ return FALSE;
+ soup_message_headers_append (src->msg->request_headers, "Range", buf);
+ }
+ src->read_position = offset;
+ return TRUE;
+ }
+
+ static gboolean
+ _append_extra_header (GQuark field_id, const GValue * value, gpointer user_data)
+ {
+ GstSoupHTTPSrc *src = GST_SOUP_HTTP_SRC (user_data);
+ const gchar *field_name = g_quark_to_string (field_id);
+ gchar *field_content = NULL;
+
+ if (G_VALUE_TYPE (value) == G_TYPE_STRING) {
+ field_content = g_value_dup_string (value);
+ } else {
+ GValue dest = { 0, };
+
+ g_value_init (&dest, G_TYPE_STRING);
+ if (g_value_transform (value, &dest)) {
+ field_content = g_value_dup_string (&dest);
+ }
+ }
+
+ if (field_content == NULL) {
+ GST_ERROR_OBJECT (src, "extra-headers field '%s' contains no value "
+ "or can't be converted to a string", field_name);
+ return FALSE;
+ }
+
+ GST_DEBUG_OBJECT (src, "Appending extra header: \"%s: %s\"", field_name,
+ field_content);
+ soup_message_headers_append (src->msg->request_headers, field_name,
+ field_content);
+
+ g_free (field_content);
+
+ return TRUE;
+ }
+
+ static gboolean
+ _append_extra_headers (GQuark field_id, const GValue * value,
+ gpointer user_data)
+ {
+ if (G_VALUE_TYPE (value) == GST_TYPE_ARRAY) {
+ guint n = gst_value_array_get_size (value);
+ guint i;
+
+ for (i = 0; i < n; i++) {
+ const GValue *v = gst_value_array_get_value (value, i);
+
+ if (!_append_extra_header (field_id, v, user_data))
+ return FALSE;
+ }
+ } else if (G_VALUE_TYPE (value) == GST_TYPE_LIST) {
+ guint n = gst_value_list_get_size (value);
+ guint i;
+
+ for (i = 0; i < n; i++) {
+ const GValue *v = gst_value_list_get_value (value, i);
+
+ if (!_append_extra_header (field_id, v, user_data))
+ return FALSE;
+ }
+ } else {
+ return _append_extra_header (field_id, value, user_data);
+ }
+
+ return TRUE;
+ }
+
+
+ static gboolean
+ gst_soup_http_src_add_extra_headers (GstSoupHTTPSrc * src)
+ {
+ if (!src->extra_headers)
+ return TRUE;
+
+ return gst_structure_foreach (src->extra_headers, _append_extra_headers, src);
+ }
+
+ static gboolean
+ gst_soup_http_src_session_open (GstSoupHTTPSrc * src)
+ {
+ if (src->session) {
+ GST_DEBUG_OBJECT (src, "Session is already open");
+ return TRUE;
+ }
+
+ if (!src->location) {
+ GST_ELEMENT_ERROR (src, RESOURCE, OPEN_READ, (_("No URL set.")),
+ ("Missing location property"));
+ return FALSE;
+ }
+
+ if (!src->session) {
+ GstQuery *query;
+ gboolean can_share = (src->timeout == DEFAULT_TIMEOUT)
+ && (src->cookies == NULL)
+ && (src->ssl_strict == DEFAULT_SSL_STRICT)
+ && (src->tls_interaction == NULL) && (src->proxy == NULL)
+ && (src->tls_database == DEFAULT_TLS_DATABASE)
+ && (src->ssl_ca_file == DEFAULT_SSL_CA_FILE)
+ && (src->ssl_use_system_ca_file == DEFAULT_SSL_USE_SYSTEM_CA_FILE);
+
+ query = gst_query_new_context (GST_SOUP_SESSION_CONTEXT);
+ if (gst_pad_peer_query (GST_BASE_SRC_PAD (src), query)) {
+ GstContext *context;
+
+ gst_query_parse_context (query, &context);
+ gst_element_set_context (GST_ELEMENT_CAST (src), context);
+ } else {
+ GstMessage *message;
+
+ message =
+ gst_message_new_need_context (GST_OBJECT_CAST (src),
+ GST_SOUP_SESSION_CONTEXT);
+ gst_element_post_message (GST_ELEMENT_CAST (src), message);
+ }
+ gst_query_unref (query);
+
+ GST_OBJECT_LOCK (src);
+ if (src->external_session && (can_share || src->forced_external_session)) {
+ GST_DEBUG_OBJECT (src, "Using external session %p",
+ src->external_session);
+ src->session = g_object_ref (src->external_session);
+ src->session_is_shared = TRUE;
+ } else {
+ GST_DEBUG_OBJECT (src, "Creating session (can share %d)", can_share);
+
+ /* We explicitly set User-Agent to NULL here and overwrite it per message
+ * to be able to have the same session with different User-Agents per
+ * source */
+ if (src->proxy == NULL) {
+ src->session =
+ soup_session_new_with_options (SOUP_SESSION_USER_AGENT,
+ NULL, SOUP_SESSION_TIMEOUT, src->timeout,
+ SOUP_SESSION_SSL_STRICT, src->ssl_strict,
+ SOUP_SESSION_TLS_INTERACTION, src->tls_interaction, NULL);
+ } else {
+ src->session =
+ soup_session_new_with_options (SOUP_SESSION_PROXY_URI, src->proxy,
+ SOUP_SESSION_TIMEOUT, src->timeout,
+ SOUP_SESSION_SSL_STRICT, src->ssl_strict,
+ SOUP_SESSION_USER_AGENT, NULL,
+ SOUP_SESSION_TLS_INTERACTION, src->tls_interaction, NULL);
+ }
+
+ if (src->session) {
+ gst_soup_util_log_setup (src->session, src->log_level,
+ GST_ELEMENT (src));
+ soup_session_add_feature_by_type (src->session,
+ SOUP_TYPE_CONTENT_DECODER);
+ soup_session_add_feature_by_type (src->session, SOUP_TYPE_COOKIE_JAR);
+
+ if (can_share) {
+ GstContext *context;
+ GstMessage *message;
+ GstStructure *s;
+
+ GST_DEBUG_OBJECT (src, "Sharing session %p", src->session);
+ src->session_is_shared = TRUE;
+
+ /* Unset the limit the number of maximum allowed connection */
+ g_object_set (src->session, SOUP_SESSION_MAX_CONNS, G_MAXINT,
+ SOUP_SESSION_MAX_CONNS_PER_HOST, G_MAXINT, NULL);
+
+ context = gst_context_new (GST_SOUP_SESSION_CONTEXT, TRUE);
+ s = gst_context_writable_structure (context);
+ gst_structure_set (s, "session", SOUP_TYPE_SESSION, src->session,
+ "force", G_TYPE_BOOLEAN, FALSE, NULL);
+
+ gst_object_ref (src->session);
+ GST_OBJECT_UNLOCK (src);
+ gst_element_set_context (GST_ELEMENT_CAST (src), context);
+ message =
+ gst_message_new_have_context (GST_OBJECT_CAST (src), context);
+ gst_element_post_message (GST_ELEMENT_CAST (src), message);
+ GST_OBJECT_LOCK (src);
+ gst_object_unref (src->session);
+ } else {
+ src->session_is_shared = FALSE;
+ }
+ }
+ }
+
+ if (!src->session) {
+ GST_ELEMENT_ERROR (src, LIBRARY, INIT,
+ (NULL), ("Failed to create session"));
+ GST_OBJECT_UNLOCK (src);
+ return FALSE;
+ }
+
+ g_signal_connect (src->session, "authenticate",
+ G_CALLBACK (gst_soup_http_src_authenticate_cb), src);
+
+ if (!src->session_is_shared) {
+ if (src->tls_database)
+ g_object_set (src->session, "tls-database", src->tls_database, NULL);
+ else if (src->ssl_ca_file)
+ g_object_set (src->session, "ssl-ca-file", src->ssl_ca_file, NULL);
+ else
+ g_object_set (src->session, "ssl-use-system-ca-file",
+ src->ssl_use_system_ca_file, NULL);
+ }
+ GST_OBJECT_UNLOCK (src);
+ } else {
+ GST_DEBUG_OBJECT (src, "Re-using session");
+ }
+
+ return TRUE;
+ }
+
+ static void
+ gst_soup_http_src_session_close (GstSoupHTTPSrc * src)
+ {
+ GST_DEBUG_OBJECT (src, "Closing session");
+
+ g_mutex_lock (&src->mutex);
+ if (src->msg) {
+ soup_session_cancel_message (src->session, src->msg, SOUP_STATUS_CANCELLED);
+ g_object_unref (src->msg);
+ src->msg = NULL;
+ }
+
+ if (src->session) {
+ if (!src->session_is_shared)
+ soup_session_abort (src->session);
+ g_signal_handlers_disconnect_by_func (src->session,
+ G_CALLBACK (gst_soup_http_src_authenticate_cb), src);
+ g_object_unref (src->session);
+ src->session = NULL;
+ }
+
+ g_mutex_unlock (&src->mutex);
+ }
+
+ static void
+ gst_soup_http_src_authenticate_cb (SoupSession * session, SoupMessage * msg,
+ SoupAuth * auth, gboolean retrying, GstSoupHTTPSrc * src)
+ {
+ /* Might be from another user of the shared session */
+ if (!GST_IS_SOUP_HTTP_SRC (src) || msg != src->msg)
+ return;
+
+ if (!retrying) {
+ /* First time authentication only, if we fail and are called again with retry true fall through */
+ if (msg->status_code == SOUP_STATUS_UNAUTHORIZED) {
+ if (src->user_id && src->user_pw)
+ soup_auth_authenticate (auth, src->user_id, src->user_pw);
+ } else if (msg->status_code == SOUP_STATUS_PROXY_AUTHENTICATION_REQUIRED) {
+ if (src->proxy_id && src->proxy_pw)
+ soup_auth_authenticate (auth, src->proxy_id, src->proxy_pw);
+ }
+ }
+ }
+
+ static void
+ insert_http_header (const gchar * name, const gchar * value, gpointer user_data)
+ {
+ GstStructure *headers = user_data;
+ const GValue *gv;
+
+ if (!g_utf8_validate (name, -1, NULL) || !g_utf8_validate (value, -1, NULL))
+ return;
+
+ gv = gst_structure_get_value (headers, name);
+ if (gv && GST_VALUE_HOLDS_ARRAY (gv)) {
+ GValue v = G_VALUE_INIT;
+
+ g_value_init (&v, G_TYPE_STRING);
+ g_value_set_string (&v, value);
+ gst_value_array_append_value ((GValue *) gv, &v);
+ g_value_unset (&v);
+ } else if (gv && G_VALUE_HOLDS_STRING (gv)) {
+ GValue arr = G_VALUE_INIT;
+ GValue v = G_VALUE_INIT;
+ const gchar *old_value = g_value_get_string (gv);
+
+ g_value_init (&arr, GST_TYPE_ARRAY);
+ g_value_init (&v, G_TYPE_STRING);
+ g_value_set_string (&v, old_value);
+ gst_value_array_append_value (&arr, &v);
+ g_value_set_string (&v, value);
+ gst_value_array_append_value (&arr, &v);
+
+ gst_structure_set_value (headers, name, &arr);
+ g_value_unset (&v);
+ g_value_unset (&arr);
+ } else {
+ gst_structure_set (headers, name, G_TYPE_STRING, value, NULL);
+ }
+ }
+
++#ifdef TIZEN_FEATURE_SOUP_MODIFICATION
++static void
++gst_soup_http_src_headers_foreach (const gchar * name, const gchar * val,
++ gpointer src)
++{
++ GST_INFO_OBJECT (src, " %s: %s", name, val);
++
++ if (g_ascii_strcasecmp (name, "Dash-Oldest-Segment") == 0) {
++ if (val) {
++ GstSoupHTTPSrc * tmp = src;
++ tmp->dash_oldest_segment = g_strdup (val);
++ GST_INFO_OBJECT (src, "Dash-Oldest-Segment set as %s ", tmp->dash_oldest_segment);
++ }
++ } else if (g_ascii_strcasecmp (name, "Dash-Newest-Segment") == 0) {
++ if (val) {
++ GstSoupHTTPSrc * tmp = src;
++ tmp->dash_newest_segment = g_strdup (val);
++ GST_INFO_OBJECT (src, "Dash-Newest-Segment set as %s ", tmp->dash_newest_segment);
++ }
++ }
++}
++#endif
++
+ static GstFlowReturn
+ gst_soup_http_src_got_headers (GstSoupHTTPSrc * src, SoupMessage * msg)
+ {
+ const char *value;
+ GstTagList *tag_list;
+ GstBaseSrc *basesrc;
+ guint64 newsize;
+ GHashTable *params = NULL;
+ GstEvent *http_headers_event;
+ GstStructure *http_headers, *headers;
+ const gchar *accept_ranges;
++#ifdef TIZEN_FEATURE_SOUP_MODIFICATION
++ gint64 start = 0, stop = 0, total = 0;
++#endif
+
++ GST_INFO_OBJECT (src, "got headers : %d", msg->status_code);
++#ifdef TIZEN_FEATURE_SOUP_MODIFICATION
++ soup_message_headers_foreach (msg->response_headers,
++ gst_soup_http_src_headers_foreach, src);
++#endif
+ if (msg->status_code == SOUP_STATUS_PROXY_AUTHENTICATION_REQUIRED &&
+ src->proxy_id && src->proxy_pw) {
+ /* wait for authenticate callback */
+ return GST_FLOW_OK;
+ }
+
+ http_headers = gst_structure_new_empty ("http-headers");
+ gst_structure_set (http_headers, "uri", G_TYPE_STRING, src->location,
+ "http-status-code", G_TYPE_UINT, msg->status_code, NULL);
+ if (src->redirection_uri)
+ gst_structure_set (http_headers, "redirection-uri", G_TYPE_STRING,
+ src->redirection_uri, NULL);
+ headers = gst_structure_new_empty ("request-headers");
+ soup_message_headers_foreach (msg->request_headers, insert_http_header,
+ headers);
+ gst_structure_set (http_headers, "request-headers", GST_TYPE_STRUCTURE,
+ headers, NULL);
+ gst_structure_free (headers);
+ headers = gst_structure_new_empty ("response-headers");
+ soup_message_headers_foreach (msg->response_headers, insert_http_header,
+ headers);
+ gst_structure_set (http_headers, "response-headers", GST_TYPE_STRUCTURE,
+ headers, NULL);
+ gst_structure_free (headers);
+
+ gst_element_post_message (GST_ELEMENT_CAST (src),
+ gst_message_new_element (GST_OBJECT_CAST (src),
+ gst_structure_copy (http_headers)));
+
+ if (msg->status_code == SOUP_STATUS_UNAUTHORIZED) {
+ /* force an error */
+ gst_structure_free (http_headers);
+ return gst_soup_http_src_parse_status (msg, src);
+ }
+
+ src->got_headers = TRUE;
+ g_cond_broadcast (&src->have_headers_cond);
+
+ http_headers_event =
+ gst_event_new_custom (GST_EVENT_CUSTOM_DOWNSTREAM_STICKY, http_headers);
+ gst_event_replace (&src->http_headers_event, http_headers_event);
+ gst_event_unref (http_headers_event);
+
++#ifdef TIZEN_FEATURE_SOUP_MODIFICATION
++ /* Parse DLNA OP CODE */
++ if ((value = soup_message_headers_get_one
++ (msg->response_headers, "contentFeatures.dlna.org")) != NULL) {
++ gchar **token = NULL;
++ gchar **ptr = NULL;
++
++ GST_DEBUG_OBJECT (src, "DLNA server response");
++
++ token = g_strsplit (value, ";", 0);
++ for (ptr = token ; *ptr ; ptr++) {
++ gchar *tmp = NULL;
++ gchar *op_code = NULL;
++
++ if (!strlen (*ptr))
++ continue;
++
++ tmp = g_ascii_strup (*ptr, strlen (*ptr));
++ if (!strstr (tmp, "DLNA.ORG_OP")) {
++ g_free (tmp);
++ continue;
++ }
++
++ g_free (tmp);
++
++ op_code = strchr (*ptr, '=');
++ if (op_code) {
++ op_code++;
++
++ src->dlna_opt = (atoi (op_code) / 10 << 1) | (atoi (op_code) % 10);
++ GST_DEBUG_OBJECT (src, "dlna op code: %s (0x%X)", op_code, src->dlna_opt);
++ break;
++ }
++ }
++ g_strfreev (token);
++ }
++#endif
++
+ /* Parse Content-Length. */
+ if (SOUP_STATUS_IS_SUCCESSFUL (msg->status_code) &&
+ (soup_message_headers_get_encoding (msg->response_headers) ==
+ SOUP_ENCODING_CONTENT_LENGTH)) {
++#ifdef TIZEN_FEATURE_SOUP_MODIFICATION
++ if (msg->status_code == SOUP_STATUS_PARTIAL_CONTENT) {
++ newsize = src->request_position +
++ soup_message_headers_get_content_length (msg->response_headers);
++ } else {
++ if (soup_message_headers_get_content_range(msg->response_headers, &start, &stop, &total) && (total > 0)) {
++ GST_DEBUG_OBJECT (src, "get range header : %" G_GINT64_FORMAT
++ "~%" G_GINT64_FORMAT"/%"G_GINT64_FORMAT, start, stop, total);
++ newsize = (guint64)total;
++ } else {
++ if ((src->have_size) && (src->content_size <= src->request_position)) {
++ newsize = src->content_size;
++ } else {
++ newsize = soup_message_headers_get_content_length (msg->response_headers);
++ }
++ }
++ }
++#else
+ newsize = src->request_position +
+ soup_message_headers_get_content_length (msg->response_headers);
++#endif
+ if (!src->have_size || (src->content_size != newsize)) {
+ src->content_size = newsize;
+ src->have_size = TRUE;
+ src->seekable = TRUE;
+ GST_DEBUG_OBJECT (src, "size = %" G_GUINT64_FORMAT, src->content_size);
+
+ basesrc = GST_BASE_SRC_CAST (src);
+ basesrc->segment.duration = src->content_size;
+ gst_element_post_message (GST_ELEMENT (src),
+ gst_message_new_duration_changed (GST_OBJECT (src)));
+ }
+ }
+
+ /* If the server reports Accept-Ranges: none we don't have to try
+ * doing range requests at all
+ */
+ if ((accept_ranges =
+ soup_message_headers_get_one (msg->response_headers,
+ "Accept-Ranges"))) {
+ if (g_ascii_strcasecmp (accept_ranges, "none") == 0)
+ src->seekable = FALSE;
+ }
++#ifdef TIZEN_FEATURE_SOUP_MODIFICATION
++ else if (src->dlna_opt & DLNA_OP_BYTE_SEEK) {
++ if (src->have_size) {
++ GST_DEBUG_OBJECT (src, "DLNA server is seekable");
++ src->seekable = TRUE;
++ }
++ }
++ /* The Range request header is always included.
++ * @ref gst_soup_http_src_add_range_header() */
++ else if ((msg->status_code == SOUP_STATUS_OK) &&
++ (soup_message_headers_get_content_range (msg->response_headers, &start, &stop, &total) == FALSE)) {
++ GST_DEBUG_OBJECT (src, "there is no accept range header");
++ src->seekable = FALSE;
++ }
++#endif
+
+ /* Icecast stuff */
+ tag_list = gst_tag_list_new_empty ();
+
+ if ((value =
+ soup_message_headers_get_one (msg->response_headers,
+ "icy-metaint")) != NULL) {
+ gint icy_metaint;
+
+ if (g_utf8_validate (value, -1, NULL)) {
+ icy_metaint = atoi (value);
+
+ GST_DEBUG_OBJECT (src, "icy-metaint: %s (parsed: %d)", value,
+ icy_metaint);
+ if (icy_metaint > 0) {
+ if (src->src_caps)
+ gst_caps_unref (src->src_caps);
+
+ src->src_caps = gst_caps_new_simple ("application/x-icy",
+ "metadata-interval", G_TYPE_INT, icy_metaint, NULL);
+
+ gst_base_src_set_caps (GST_BASE_SRC (src), src->src_caps);
+ }
+ }
+ }
+ if ((value =
+ soup_message_headers_get_content_type (msg->response_headers,
+ ¶ms)) != NULL) {
+ if (!g_utf8_validate (value, -1, NULL)) {
+ GST_WARNING_OBJECT (src, "Content-Type is invalid UTF-8");
+ } else if (g_ascii_strcasecmp (value, "audio/L16") == 0) {
+ gint channels = 2;
+ gint rate = 44100;
+ char *param;
+
+ GST_DEBUG_OBJECT (src, "Content-Type: %s", value);
+
+ if (src->src_caps) {
+ gst_caps_unref (src->src_caps);
+ src->src_caps = NULL;
+ }
+
+ param = g_hash_table_lookup (params, "channels");
+ if (param != NULL) {
+ guint64 val = g_ascii_strtoull (param, NULL, 10);
+ if (val < 64)
+ channels = val;
+ else
+ channels = 0;
+ }
+
+ param = g_hash_table_lookup (params, "rate");
+ if (param != NULL) {
+ guint64 val = g_ascii_strtoull (param, NULL, 10);
+ if (val < G_MAXINT)
+ rate = val;
+ else
+ rate = 0;
+ }
+
+ if (rate > 0 && channels > 0) {
+ src->src_caps = gst_caps_new_simple ("audio/x-unaligned-raw",
+ "format", G_TYPE_STRING, "S16BE",
+ "layout", G_TYPE_STRING, "interleaved",
+ "channels", G_TYPE_INT, channels, "rate", G_TYPE_INT, rate, NULL);
+
+ gst_base_src_set_caps (GST_BASE_SRC (src), src->src_caps);
+ }
+ } else {
+ GST_DEBUG_OBJECT (src, "Content-Type: %s", value);
+
+ /* Set the Content-Type field on the caps */
+ if (src->src_caps) {
+ src->src_caps = gst_caps_make_writable (src->src_caps);
+ gst_caps_set_simple (src->src_caps, "content-type", G_TYPE_STRING,
+ value, NULL);
+ gst_base_src_set_caps (GST_BASE_SRC (src), src->src_caps);
+ }
+ }
+ }
+
+ if (params != NULL)
+ g_hash_table_destroy (params);
+
+ if ((value =
+ soup_message_headers_get_one (msg->response_headers,
+ "icy-name")) != NULL) {
+ if (g_utf8_validate (value, -1, NULL)) {
+ g_free (src->iradio_name);
+ src->iradio_name = gst_soup_http_src_unicodify (value);
+ if (src->iradio_name) {
+ gst_tag_list_add (tag_list, GST_TAG_MERGE_REPLACE, GST_TAG_ORGANIZATION,
+ src->iradio_name, NULL);
+ }
+ }
+ }
+ if ((value =
+ soup_message_headers_get_one (msg->response_headers,
+ "icy-genre")) != NULL) {
+ if (g_utf8_validate (value, -1, NULL)) {
+ g_free (src->iradio_genre);
+ src->iradio_genre = gst_soup_http_src_unicodify (value);
+ if (src->iradio_genre) {
+ gst_tag_list_add (tag_list, GST_TAG_MERGE_REPLACE, GST_TAG_GENRE,
+ src->iradio_genre, NULL);
+ }
+ }
+ }
+ if ((value = soup_message_headers_get_one (msg->response_headers, "icy-url"))
+ != NULL) {
+ if (g_utf8_validate (value, -1, NULL)) {
+ g_free (src->iradio_url);
+ src->iradio_url = gst_soup_http_src_unicodify (value);
+ if (src->iradio_url) {
+ gst_tag_list_add (tag_list, GST_TAG_MERGE_REPLACE, GST_TAG_LOCATION,
+ src->iradio_url, NULL);
+ }
+ }
+ }
+ if (!gst_tag_list_is_empty (tag_list)) {
+ GST_DEBUG_OBJECT (src,
+ "calling gst_element_found_tags with %" GST_PTR_FORMAT, tag_list);
+ gst_pad_push_event (GST_BASE_SRC_PAD (src), gst_event_new_tag (tag_list));
+ } else {
+ gst_tag_list_unref (tag_list);
+ }
+
+ /* Handle HTTP errors. */
+ return gst_soup_http_src_parse_status (msg, src);
+ }
+
+ static GstBuffer *
+ gst_soup_http_src_alloc_buffer (GstSoupHTTPSrc * src)
+ {
+ GstBaseSrc *basesrc = GST_BASE_SRC_CAST (src);
+ GstFlowReturn rc;
+ GstBuffer *gstbuf;
+
+ rc = GST_BASE_SRC_CLASS (parent_class)->alloc (basesrc, -1,
+ basesrc->blocksize, &gstbuf);
+ if (G_UNLIKELY (rc != GST_FLOW_OK)) {
+ return NULL;
+ }
+
+ return gstbuf;
+ }
+
+ #define SOUP_HTTP_SRC_ERROR(src,soup_msg,cat,code,error_message) \
+ do { \
+ GST_ELEMENT_ERROR_WITH_DETAILS ((src), cat, code, ("%s", error_message), \
+ ("%s (%d), URL: %s, Redirect to: %s", (soup_msg)->reason_phrase, \
+ (soup_msg)->status_code, (src)->location, GST_STR_NULL ((src)->redirection_uri)), \
+ ("http-status-code", G_TYPE_UINT, (soup_msg)->status_code, \
+ "http-redirect-uri", G_TYPE_STRING, GST_STR_NULL ((src)->redirection_uri), NULL)); \
+ } while(0)
+
+ static GstFlowReturn
+ gst_soup_http_src_parse_status (SoupMessage * msg, GstSoupHTTPSrc * src)
+ {
+ if (msg->method == SOUP_METHOD_HEAD) {
+ if (!SOUP_STATUS_IS_SUCCESSFUL (msg->status_code))
+ GST_DEBUG_OBJECT (src, "Ignoring error %d during HEAD request",
+ msg->status_code);
+ return GST_FLOW_OK;
+ }
+
+ if (SOUP_STATUS_IS_TRANSPORT_ERROR (msg->status_code)) {
+ switch (msg->status_code) {
+ case SOUP_STATUS_CANT_RESOLVE:
+ case SOUP_STATUS_CANT_RESOLVE_PROXY:
+ SOUP_HTTP_SRC_ERROR (src, msg, RESOURCE, NOT_FOUND,
+ _("Could not resolve server name."));
+ return GST_FLOW_ERROR;
+ case SOUP_STATUS_CANT_CONNECT:
+ case SOUP_STATUS_CANT_CONNECT_PROXY:
+ SOUP_HTTP_SRC_ERROR (src, msg, RESOURCE, OPEN_READ,
+ _("Could not establish connection to server."));
+ return GST_FLOW_ERROR;
+ case SOUP_STATUS_SSL_FAILED:
+ SOUP_HTTP_SRC_ERROR (src, msg, RESOURCE, OPEN_READ,
+ _("Secure connection setup failed."));
+ return GST_FLOW_ERROR;
+ case SOUP_STATUS_IO_ERROR:
+ if (src->max_retries == -1 || src->retry_count < src->max_retries)
+ return GST_FLOW_CUSTOM_ERROR;
+ SOUP_HTTP_SRC_ERROR (src, msg, RESOURCE, READ,
+ _("A network error occurred, or the server closed the connection "
+ "unexpectedly."));
+ return GST_FLOW_ERROR;
+ case SOUP_STATUS_MALFORMED:
+ SOUP_HTTP_SRC_ERROR (src, msg, RESOURCE, READ,
+ _("Server sent bad data."));
+ return GST_FLOW_ERROR;
+ case SOUP_STATUS_CANCELLED:
+ /* No error message when interrupted by program. */
+ break;
+ }
+ return GST_FLOW_OK;
+ }
+
+ if (SOUP_STATUS_IS_CLIENT_ERROR (msg->status_code) ||
+ SOUP_STATUS_IS_REDIRECTION (msg->status_code) ||
+ SOUP_STATUS_IS_SERVER_ERROR (msg->status_code)) {
+ const gchar *reason_phrase;
+
+ reason_phrase = msg->reason_phrase;
+ if (reason_phrase && !g_utf8_validate (reason_phrase, -1, NULL)) {
+ GST_ERROR_OBJECT (src, "Invalid UTF-8 in reason");
+ reason_phrase = "(invalid)";
+ }
+
+ /* Report HTTP error. */
+
+ /* when content_size is unknown and we have just finished receiving
+ * a body message, requests that go beyond the content limits will result
+ * in an error. Here we convert those to EOS */
+ if (msg->status_code == SOUP_STATUS_REQUESTED_RANGE_NOT_SATISFIABLE &&
+ src->have_body && (!src->have_size ||
+ (src->request_position >= src->content_size))) {
+ GST_DEBUG_OBJECT (src, "Requested range out of limits and received full "
+ "body, returning EOS");
+ return GST_FLOW_EOS;
+ }
+
+ /* FIXME: reason_phrase is not translated and not suitable for user
+ * error dialog according to libsoup documentation.
+ */
+ if (msg->status_code == SOUP_STATUS_NOT_FOUND) {
+ SOUP_HTTP_SRC_ERROR (src, msg, RESOURCE, NOT_FOUND, (reason_phrase));
+ } else if (msg->status_code == SOUP_STATUS_UNAUTHORIZED
+ || msg->status_code == SOUP_STATUS_PAYMENT_REQUIRED
+ || msg->status_code == SOUP_STATUS_FORBIDDEN
+ || msg->status_code == SOUP_STATUS_PROXY_AUTHENTICATION_REQUIRED) {
+ SOUP_HTTP_SRC_ERROR (src, msg, RESOURCE, NOT_AUTHORIZED, (reason_phrase));
+ } else {
+ SOUP_HTTP_SRC_ERROR (src, msg, RESOURCE, OPEN_READ, (reason_phrase));
+ }
+ return GST_FLOW_ERROR;
+ }
+
+ return GST_FLOW_OK;
+ }
+
+ static void
+ gst_soup_http_src_restarted_cb (SoupMessage * msg, GstSoupHTTPSrc * src)
+ {
+ if (soup_session_would_redirect (src->session, msg)) {
+ src->redirection_uri =
+ soup_uri_to_string (soup_message_get_uri (msg), FALSE);
+ src->redirection_permanent =
+ (msg->status_code == SOUP_STATUS_MOVED_PERMANENTLY);
+ GST_DEBUG_OBJECT (src, "%u redirect to \"%s\" (permanent %d)",
+ msg->status_code, src->redirection_uri, src->redirection_permanent);
+ }
+ }
+
+ static gboolean
+ gst_soup_http_src_build_message (GstSoupHTTPSrc * src, const gchar * method)
+ {
+ g_return_val_if_fail (src->msg == NULL, FALSE);
+
+ src->msg = soup_message_new (method, src->location);
+ if (!src->msg) {
+ GST_ELEMENT_ERROR (src, RESOURCE, OPEN_READ,
+ ("Error parsing URL."), ("URL: %s", src->location));
+ return FALSE;
+ }
+
+ /* Duplicating the defaults of libsoup here. We don't want to set a
+ * User-Agent in the session as each source might have its own User-Agent
+ * set */
+ if (!src->user_agent || !*src->user_agent) {
+ gchar *user_agent =
+ g_strdup_printf ("libsoup/%u.%u.%u", soup_get_major_version (),
+ soup_get_minor_version (), soup_get_micro_version ());
+ soup_message_headers_append (src->msg->request_headers, "User-Agent",
+ user_agent);
+ g_free (user_agent);
+ } else if (g_str_has_suffix (src->user_agent, " ")) {
+ gchar *user_agent = g_strdup_printf ("%slibsoup/%u.%u.%u", src->user_agent,
+ soup_get_major_version (),
+ soup_get_minor_version (), soup_get_micro_version ());
+ soup_message_headers_append (src->msg->request_headers, "User-Agent",
+ user_agent);
+ g_free (user_agent);
+ } else {
+ soup_message_headers_append (src->msg->request_headers, "User-Agent",
+ src->user_agent);
+ }
+
+ if (!src->keep_alive) {
+ soup_message_headers_append (src->msg->request_headers, "Connection",
+ "close");
+ }
+ if (src->iradio_mode) {
+ soup_message_headers_append (src->msg->request_headers, "icy-metadata",
+ "1");
+ }
++
++#ifdef TIZEN_FEATURE_SOUP_MODIFICATION
++ /* This changes are needed to enable Seekable Contents from server.
++ We have observed that , for few specific networks ( VODAFONE ) , without theabove headers ,
++ Youtube is sending non-seekable contents to the Client. */
++ soup_message_headers_append (src->msg->request_headers, "Accept-Ranges","bytes");
++#endif
++
+ if (src->cookies) {
+ gchar **cookie;
+
+ for (cookie = src->cookies; *cookie != NULL; cookie++) {
+ soup_message_headers_append (src->msg->request_headers, "Cookie",
+ *cookie);
+ }
+
+ soup_message_disable_feature (src->msg, SOUP_TYPE_COOKIE_JAR);
+ }
+
+ if (!src->compress) {
+ soup_message_headers_append (src->msg->request_headers, "Accept-Encoding",
+ "identity");
+ }
+
+ soup_message_set_flags (src->msg, SOUP_MESSAGE_OVERWRITE_CHUNKS |
+ (src->automatic_redirect ? 0 : SOUP_MESSAGE_NO_REDIRECT));
+
+ if (src->automatic_redirect) {
+ g_signal_connect (src->msg, "restarted",
+ G_CALLBACK (gst_soup_http_src_restarted_cb), src);
+ }
+
+ gst_soup_http_src_add_range_header (src, src->request_position,
+ src->stop_position);
+
+ gst_soup_http_src_add_extra_headers (src);
+
++#ifdef TIZEN_FEATURE_SOUP_MODIFICATION
++ soup_message_headers_foreach (src->msg->request_headers,
++ gst_soup_http_src_headers_foreach, src);
++#endif
++
+ return TRUE;
+ }
+
+ /* Lock taken */
+ static GstFlowReturn
+ gst_soup_http_src_send_message (GstSoupHTTPSrc * src)
+ {
+ GstFlowReturn ret;
+ GError *error = NULL;
+
+ g_return_val_if_fail (src->msg != NULL, GST_FLOW_ERROR);
+ g_assert (src->input_stream == NULL);
+
+ src->input_stream =
+ soup_session_send (src->session, src->msg, src->cancellable, &error);
+
+ if (error)
+ GST_DEBUG_OBJECT (src, "Sending message failed: %s", error->message);
+
+ if (g_cancellable_is_cancelled (src->cancellable)) {
+ ret = GST_FLOW_FLUSHING;
+ goto done;
+ }
+
+ ret = gst_soup_http_src_got_headers (src, src->msg);
+ if (ret != GST_FLOW_OK) {
+ goto done;
+ }
+
+ if (!src->input_stream) {
+ GST_DEBUG_OBJECT (src, "Didn't get an input stream: %s", error->message);
+ ret = GST_FLOW_ERROR;
+ goto done;
+ }
+
+ if (SOUP_STATUS_IS_SUCCESSFUL (src->msg->status_code)) {
+ GST_DEBUG_OBJECT (src, "Successfully got a reply");
+ } else {
+ /* FIXME - be more helpful to people debugging */
+ ret = GST_FLOW_ERROR;
+ }
+
+ done:
+ if (error)
+ g_error_free (error);
+ return ret;
+ }
+
+ static GstFlowReturn
+ gst_soup_http_src_do_request (GstSoupHTTPSrc * src, const gchar * method)
+ {
+ GstFlowReturn ret;
+
+ if (src->max_retries != -1 && src->retry_count > src->max_retries) {
+ GST_DEBUG_OBJECT (src, "Max retries reached");
+ return GST_FLOW_ERROR;
+ }
+
+ src->retry_count++;
+ /* EOS immediately if we have an empty segment */
+ if (src->request_position == src->stop_position)
+ return GST_FLOW_EOS;
+
+ GST_LOG_OBJECT (src, "Running request for method: %s", method);
+
+ /* Update the position if we are retrying */
+ if (src->msg && src->request_position > 0) {
+ gst_soup_http_src_add_range_header (src, src->request_position,
+ src->stop_position);
+ } else if (src->msg && src->request_position == 0)
+ soup_message_headers_remove (src->msg->request_headers, "Range");
+
+ /* add_range_header() has the side effect of setting read_position to
+ * the requested position. This *needs* to be set regardless of having
+ * a message or not. Failure to do so would result in calculation being
+ * done with stale/wrong read position */
+ src->read_position = src->request_position;
+
+ if (!src->msg) {
+ if (!gst_soup_http_src_build_message (src, method)) {
+ return GST_FLOW_ERROR;
+ }
+ }
+
+ if (g_cancellable_is_cancelled (src->cancellable)) {
+ GST_INFO_OBJECT (src, "interrupted");
+ return GST_FLOW_FLUSHING;
+ }
+
+ ret = gst_soup_http_src_send_message (src);
+
+ /* Check if Range header was respected. */
++#ifdef TIZEN_FEATURE_SOUP_MODIFICATION
++ if (ret == GST_FLOW_OK && src->request_position > 0 &&
++ (src->msg->status_code != SOUP_STATUS_PARTIAL_CONTENT) &&
++ (src->request_position < src->content_size)) {
++#else
+ if (ret == GST_FLOW_OK && src->request_position > 0 &&
+ src->msg->status_code != SOUP_STATUS_PARTIAL_CONTENT) {
++#endif
+ src->seekable = FALSE;
+ GST_ELEMENT_ERROR_WITH_DETAILS (src, RESOURCE, SEEK,
+ (_("Server does not support seeking.")),
+ ("Server does not accept Range HTTP header, URL: %s, Redirect to: %s",
+ src->location, GST_STR_NULL (src->redirection_uri)),
+ ("http-status-code", G_TYPE_UINT, src->msg->status_code,
+ "http-redirection-uri", G_TYPE_STRING,
+ GST_STR_NULL (src->redirection_uri), NULL));
+ ret = GST_FLOW_ERROR;
+ }
+
+ return ret;
+ }
+
+ /*
+ * Check if the bytes_read is above a certain threshold of the blocksize, if
+ * that happens a few times in a row, increase the blocksize; Do the same in
+ * the opposite direction to reduce the blocksize.
+ */
+ static void
+ gst_soup_http_src_check_update_blocksize (GstSoupHTTPSrc * src,
+ gint64 bytes_read)
+ {
+ guint blocksize = gst_base_src_get_blocksize (GST_BASE_SRC_CAST (src));
+
+ gint64 time_since_last_read =
+ g_get_monotonic_time () * GST_USECOND - src->last_socket_read_time;
+
+ GST_LOG_OBJECT (src, "Checking to update blocksize. Read: %" G_GINT64_FORMAT
+ " bytes, blocksize: %u bytes, time since last read: %" GST_TIME_FORMAT,
+ bytes_read, blocksize, GST_TIME_ARGS (time_since_last_read));
+
+ if (bytes_read >= blocksize * GROW_BLOCKSIZE_LIMIT
+ && time_since_last_read <= GROW_TIME_LIMIT) {
+ src->reduce_blocksize_count = 0;
+ src->increase_blocksize_count++;
+
+ if (src->increase_blocksize_count >= GROW_BLOCKSIZE_COUNT) {
+ blocksize *= GROW_BLOCKSIZE_FACTOR;
+ GST_DEBUG_OBJECT (src, "Increased blocksize to %u", blocksize);
+ gst_base_src_set_blocksize (GST_BASE_SRC_CAST (src), blocksize);
+ src->increase_blocksize_count = 0;
+ }
+ } else if (bytes_read < blocksize * REDUCE_BLOCKSIZE_LIMIT
+ || time_since_last_read > GROW_TIME_LIMIT) {
+ src->reduce_blocksize_count++;
+ src->increase_blocksize_count = 0;
+
+ if (src->reduce_blocksize_count >= REDUCE_BLOCKSIZE_COUNT) {
+ blocksize *= REDUCE_BLOCKSIZE_FACTOR;
+ blocksize = MAX (blocksize, src->minimum_blocksize);
+ GST_DEBUG_OBJECT (src, "Decreased blocksize to %u", blocksize);
+ gst_base_src_set_blocksize (GST_BASE_SRC_CAST (src), blocksize);
+ src->reduce_blocksize_count = 0;
+ }
+ } else {
+ src->reduce_blocksize_count = src->increase_blocksize_count = 0;
+ }
+ }
+
+ static void
+ gst_soup_http_src_update_position (GstSoupHTTPSrc * src, gint64 bytes_read)
+ {
+ GstBaseSrc *basesrc = GST_BASE_SRC_CAST (src);
+ guint64 new_position;
+
+ new_position = src->read_position + bytes_read;
+ if (G_LIKELY (src->request_position == src->read_position))
+ src->request_position = new_position;
+ src->read_position = new_position;
+
+ if (src->have_size) {
+ if (new_position > src->content_size) {
+ GST_DEBUG_OBJECT (src, "Got position previous estimated content size "
+ "(%" G_GINT64_FORMAT " > %" G_GINT64_FORMAT ")", new_position,
+ src->content_size);
+ src->content_size = new_position;
+ basesrc->segment.duration = src->content_size;
+ gst_element_post_message (GST_ELEMENT (src),
+ gst_message_new_duration_changed (GST_OBJECT (src)));
+ } else if (new_position == src->content_size) {
+ GST_DEBUG_OBJECT (src, "We're EOS now");
+ }
+ }
+ }
+
+ static GstFlowReturn
+ gst_soup_http_src_read_buffer (GstSoupHTTPSrc * src, GstBuffer ** outbuf)
+ {
+ gssize read_bytes;
+ GstMapInfo mapinfo;
+ GstBaseSrc *bsrc;
+ GstFlowReturn ret;
+
+ bsrc = GST_BASE_SRC_CAST (src);
+
+ *outbuf = gst_soup_http_src_alloc_buffer (src);
+ if (!*outbuf) {
+ GST_WARNING_OBJECT (src, "Failed to allocate buffer");
+ return GST_FLOW_ERROR;
+ }
+
+ if (!gst_buffer_map (*outbuf, &mapinfo, GST_MAP_WRITE)) {
+ GST_WARNING_OBJECT (src, "Failed to map buffer");
+ return GST_FLOW_ERROR;
+ }
+
+ read_bytes =
+ g_input_stream_read (src->input_stream, mapinfo.data, mapinfo.size,
+ src->cancellable, NULL);
+ GST_DEBUG_OBJECT (src, "Read %" G_GSSIZE_FORMAT " bytes from http input",
+ read_bytes);
+
+ g_mutex_lock (&src->mutex);
+ if (g_cancellable_is_cancelled (src->cancellable)) {
+ gst_buffer_unmap (*outbuf, &mapinfo);
+ gst_buffer_unref (*outbuf);
+ g_mutex_unlock (&src->mutex);
+ return GST_FLOW_FLUSHING;
+ }
+
+ gst_buffer_unmap (*outbuf, &mapinfo);
+ if (read_bytes > 0) {
+ gst_buffer_set_size (*outbuf, read_bytes);
+ GST_BUFFER_OFFSET (*outbuf) = bsrc->segment.position;
+ ret = GST_FLOW_OK;
+ gst_soup_http_src_update_position (src, read_bytes);
++#ifdef TIZEN_FEATURE_SOUP_MODIFICATION
++ src->received_total += read_bytes;
++#endif
+
+ /* Got some data, reset retry counter */
+ src->retry_count = 0;
+
+ gst_soup_http_src_check_update_blocksize (src, read_bytes);
+
+ src->last_socket_read_time = g_get_monotonic_time () * GST_USECOND;
+
+ /* If we're at the end of a range request, read again to let libsoup
+ * finalize the request. This allows to reuse the connection again later,
+ * otherwise we would have to cancel the message and close the connection
+ */
+ if (bsrc->segment.stop != -1
+ && bsrc->segment.position + read_bytes >= bsrc->segment.stop) {
+ guint8 tmp[128];
+
+ g_object_unref (src->msg);
+ src->msg = NULL;
+ src->have_body = TRUE;
+
+ /* This should return immediately as we're at the end of the range */
+ read_bytes =
+ g_input_stream_read (src->input_stream, tmp, sizeof (tmp),
+ src->cancellable, NULL);
+ if (read_bytes > 0)
+ GST_ERROR_OBJECT (src,
+ "Read %" G_GSIZE_FORMAT " bytes after end of range", read_bytes);
+ }
+ } else {
+ gst_buffer_unref (*outbuf);
+ if (read_bytes < 0 ||
+ (src->have_size && src->read_position < src->content_size)) {
+ /* Maybe the server disconnected, retry */
+ ret = GST_FLOW_CUSTOM_ERROR;
+ } else {
+ g_object_unref (src->msg);
+ src->msg = NULL;
+ ret = GST_FLOW_EOS;
+ src->have_body = TRUE;
+ }
+ }
+ g_mutex_unlock (&src->mutex);
+
+ return ret;
+ }
+
+ static GstFlowReturn
+ gst_soup_http_src_create (GstPushSrc * psrc, GstBuffer ** outbuf)
+ {
+ GstSoupHTTPSrc *src;
+ GstFlowReturn ret = GST_FLOW_OK;
+ GstEvent *http_headers_event = NULL;
+
+ src = GST_SOUP_HTTP_SRC (psrc);
+
+ retry:
+ g_mutex_lock (&src->mutex);
+
+ /* Check for pending position change */
+ if (src->request_position != src->read_position) {
+ if (src->input_stream) {
+ g_input_stream_close (src->input_stream, src->cancellable, NULL);
+ g_object_unref (src->input_stream);
+ src->input_stream = NULL;
+ }
+ }
+
+ if (g_cancellable_is_cancelled (src->cancellable)) {
+ ret = GST_FLOW_FLUSHING;
+ g_mutex_unlock (&src->mutex);
+ goto done;
+ }
+
+ /* If we have no open connection to the server, start one */
+ if (!src->input_stream) {
+ *outbuf = NULL;
+ ret =
+ gst_soup_http_src_do_request (src,
+ src->method ? src->method : SOUP_METHOD_GET);
+ http_headers_event = src->http_headers_event;
+ src->http_headers_event = NULL;
+ }
+ g_mutex_unlock (&src->mutex);
+
+ if (ret == GST_FLOW_OK || ret == GST_FLOW_CUSTOM_ERROR) {
+ if (http_headers_event) {
+ gst_pad_push_event (GST_BASE_SRC_PAD (src), http_headers_event);
+ http_headers_event = NULL;
+ }
+ }
+
+ if (ret == GST_FLOW_OK)
+ ret = gst_soup_http_src_read_buffer (src, outbuf);
+
+ done:
+ GST_DEBUG_OBJECT (src, "Returning %d %s", ret, gst_flow_get_name (ret));
+ if (ret != GST_FLOW_OK) {
+ if (http_headers_event)
+ gst_event_unref (http_headers_event);
+
+ g_mutex_lock (&src->mutex);
+ if (src->input_stream) {
+ g_object_unref (src->input_stream);
+ src->input_stream = NULL;
+ }
+ g_mutex_unlock (&src->mutex);
+ if (ret == GST_FLOW_CUSTOM_ERROR) {
+ ret = GST_FLOW_OK;
+ goto retry;
+ }
+ }
+
+ if (ret == GST_FLOW_FLUSHING) {
+ g_mutex_lock (&src->mutex);
+ src->retry_count = 0;
+ g_mutex_unlock (&src->mutex);
+ }
+
+ return ret;
+ }
+
+ static gboolean
+ gst_soup_http_src_start (GstBaseSrc * bsrc)
+ {
+ GstSoupHTTPSrc *src = GST_SOUP_HTTP_SRC (bsrc);
+
+ GST_DEBUG_OBJECT (src, "start(\"%s\")", src->location);
+
++#ifdef TIZEN_FEATURE_SOUP_MODIFICATION
++ if (src->dash_oldest_segment) {
++ g_free (src->dash_oldest_segment);
++ src->dash_oldest_segment = NULL;
++ }
++ if (src->dash_newest_segment) {
++ g_free (src->dash_newest_segment);
++ src->dash_newest_segment = NULL;
++ }
++#endif
+ return gst_soup_http_src_session_open (src);
+ }
+
+ static gboolean
+ gst_soup_http_src_stop (GstBaseSrc * bsrc)
+ {
+ GstSoupHTTPSrc *src;
+
+ src = GST_SOUP_HTTP_SRC (bsrc);
+ GST_DEBUG_OBJECT (src, "stop()");
+ if (src->keep_alive && !src->msg && !src->session_is_shared)
+ gst_soup_http_src_cancel_message (src);
+ else
+ gst_soup_http_src_session_close (src);
+
+ gst_soup_http_src_reset (src);
+ return TRUE;
+ }
+
+ static GstStateChangeReturn
+ gst_soup_http_src_change_state (GstElement * element, GstStateChange transition)
+ {
+ GstStateChangeReturn ret;
+ GstSoupHTTPSrc *src;
+
+ src = GST_SOUP_HTTP_SRC (element);
+
+ switch (transition) {
++#ifdef TIZEN_FEATURE_SOUP_MODIFICATION
++ case GST_STATE_CHANGE_PAUSED_TO_READY:
++ GST_WARNING_OBJECT (src, "Last read pos"
++ ": %" G_GINT64_FORMAT ", received total : %" G_GINT64_FORMAT,
++ src->read_position, src->received_total);
++ break;
++#endif
+ case GST_STATE_CHANGE_READY_TO_NULL:
+ gst_soup_http_src_session_close (src);
+ break;
+ default:
+ break;
+ }
+
+ ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
+
+ return ret;
+ }
+
+ static void
+ gst_soup_http_src_set_context (GstElement * element, GstContext * context)
+ {
+ GstSoupHTTPSrc *src = GST_SOUP_HTTP_SRC (element);
+
+ if (g_strcmp0 (gst_context_get_context_type (context),
+ GST_SOUP_SESSION_CONTEXT) == 0) {
+ const GstStructure *s = gst_context_get_structure (context);
+
+ GST_OBJECT_LOCK (src);
+ if (src->external_session)
+ g_object_unref (src->external_session);
+ src->external_session = NULL;
+ gst_structure_get (s, "session", SOUP_TYPE_SESSION, &src->external_session,
+ NULL);
+ src->forced_external_session = FALSE;
+ gst_structure_get (s, "force", G_TYPE_BOOLEAN,
+ &src->forced_external_session, NULL);
+
+ GST_DEBUG_OBJECT (src, "Setting external session %p (force: %d)",
+ src->external_session, src->forced_external_session);
+ GST_OBJECT_UNLOCK (src);
+ }
+
+ GST_ELEMENT_CLASS (parent_class)->set_context (element, context);
+ }
+
+ /* Interrupt a blocking request. */
+ static gboolean
+ gst_soup_http_src_unlock (GstBaseSrc * bsrc)
+ {
+ GstSoupHTTPSrc *src;
+
+ src = GST_SOUP_HTTP_SRC (bsrc);
+ GST_DEBUG_OBJECT (src, "unlock()");
+
+ gst_soup_http_src_cancel_message (src);
+ return TRUE;
+ }
+
+ /* Interrupt interrupt. */
+ static gboolean
+ gst_soup_http_src_unlock_stop (GstBaseSrc * bsrc)
+ {
+ GstSoupHTTPSrc *src;
+
+ src = GST_SOUP_HTTP_SRC (bsrc);
+ GST_DEBUG_OBJECT (src, "unlock_stop()");
+
+ g_cancellable_reset (src->cancellable);
+ return TRUE;
+ }
+
+ static gboolean
+ gst_soup_http_src_get_size (GstBaseSrc * bsrc, guint64 * size)
+ {
+ GstSoupHTTPSrc *src;
+
+ src = GST_SOUP_HTTP_SRC (bsrc);
+
+ if (src->have_size) {
+ GST_DEBUG_OBJECT (src, "get_size() = %" G_GUINT64_FORMAT,
+ src->content_size);
+ *size = src->content_size;
+ return TRUE;
+ }
+ GST_DEBUG_OBJECT (src, "get_size() = FALSE");
+ return FALSE;
+ }
+
+ static void
+ gst_soup_http_src_check_seekable (GstSoupHTTPSrc * src)
+ {
+ GstFlowReturn ret = GST_FLOW_OK;
+
+ /* Special case to check if the server allows range requests
+ * before really starting to get data in the buffer creation
+ * loops.
+ */
+ if (!src->got_headers && GST_STATE (src) >= GST_STATE_PAUSED) {
+ g_mutex_lock (&src->mutex);
+ while (!src->got_headers && !g_cancellable_is_cancelled (src->cancellable)
+ && ret == GST_FLOW_OK) {
+ if ((src->msg && src->msg->method != SOUP_METHOD_HEAD)) {
+ /* wait for the current request to finish */
+ g_cond_wait (&src->have_headers_cond, &src->mutex);
+ } else {
+ if (gst_soup_http_src_session_open (src)) {
+ ret = gst_soup_http_src_do_request (src, SOUP_METHOD_HEAD);
+ }
+ }
+ }
+ g_mutex_unlock (&src->mutex);
+ }
+ }
+
+ static gboolean
+ gst_soup_http_src_is_seekable (GstBaseSrc * bsrc)
+ {
+ GstSoupHTTPSrc *src = GST_SOUP_HTTP_SRC (bsrc);
+
+ gst_soup_http_src_check_seekable (src);
+
+ return src->seekable;
+ }
+
+ static gboolean
+ gst_soup_http_src_do_seek (GstBaseSrc * bsrc, GstSegment * segment)
+ {
+ GstSoupHTTPSrc *src = GST_SOUP_HTTP_SRC (bsrc);
+
+ GST_DEBUG_OBJECT (src, "do_seek(%" G_GUINT64_FORMAT "-%" G_GUINT64_FORMAT
+ ")", segment->start, segment->stop);
+ if (src->read_position == segment->start &&
+ src->request_position == src->read_position &&
+ src->stop_position == segment->stop) {
+ GST_DEBUG_OBJECT (src,
+ "Seek to current read/end position and no seek pending");
+ return TRUE;
+ }
+
+ gst_soup_http_src_check_seekable (src);
+
+ /* If we have no headers we don't know yet if it is seekable or not.
+ * Store the start position and error out later if it isn't */
+ if (src->got_headers && !src->seekable) {
+ GST_WARNING_OBJECT (src, "Not seekable");
+ return FALSE;
+ }
+
+ if (segment->rate < 0.0 || segment->format != GST_FORMAT_BYTES) {
+ GST_WARNING_OBJECT (src, "Invalid seek segment");
+ return FALSE;
+ }
+
+ if (src->have_size && segment->start >= src->content_size) {
+ GST_WARNING_OBJECT (src,
+ "Potentially seeking behind end of file, might EOS immediately");
+ }
+
+ /* Wait for create() to handle the jump in offset. */
+ src->request_position = segment->start;
+ src->stop_position = segment->stop;
+
+ return TRUE;
+ }
+
+ static gboolean
+ gst_soup_http_src_query (GstBaseSrc * bsrc, GstQuery * query)
+ {
+ GstSoupHTTPSrc *src = GST_SOUP_HTTP_SRC (bsrc);
+ gboolean ret;
+ GstSchedulingFlags flags;
+ gint minsize, maxsize, align;
+
+ switch (GST_QUERY_TYPE (query)) {
+ case GST_QUERY_URI:
+ gst_query_set_uri (query, src->location);
+ if (src->redirection_uri != NULL) {
+ gst_query_set_uri_redirection (query, src->redirection_uri);
+ gst_query_set_uri_redirection_permanent (query,
+ src->redirection_permanent);
+ }
+ ret = TRUE;
+ break;
+ default:
+ ret = FALSE;
+ break;
+ }
+
+ if (!ret)
+ ret = GST_BASE_SRC_CLASS (parent_class)->query (bsrc, query);
+
+ switch (GST_QUERY_TYPE (query)) {
+ case GST_QUERY_SCHEDULING:
+ gst_query_parse_scheduling (query, &flags, &minsize, &maxsize, &align);
+ flags |= GST_SCHEDULING_FLAG_BANDWIDTH_LIMITED;
++
++#ifdef TIZEN_FEATURE_SOUP_MODIFICATION
++ if (gst_soup_http_src_is_seekable(bsrc)) {
++ GST_DEBUG_OBJECT (src, "set seekable flag");
++ flags |= GST_SCHEDULING_FLAG_SEEKABLE;
++ }
++#endif
+ gst_query_set_scheduling (query, flags, minsize, maxsize, align);
+ break;
+ default:
+ break;
+ }
+
+ return ret;
+ }
+
+ static gboolean
+ gst_soup_http_src_set_location (GstSoupHTTPSrc * src, const gchar * uri,
+ GError ** error)
+ {
+ const char *alt_schemes[] = { "icy://", "icyx://" };
+ guint i;
+
+ if (src->location) {
+ g_free (src->location);
+ src->location = NULL;
+ }
+
+ if (uri == NULL)
+ return FALSE;
+
+ for (i = 0; i < G_N_ELEMENTS (alt_schemes); i++) {
+ if (g_str_has_prefix (uri, alt_schemes[i])) {
+ src->location =
+ g_strdup_printf ("http://%s", uri + strlen (alt_schemes[i]));
+ return TRUE;
+ }
+ }
+
+ if (src->redirection_uri) {
+ g_free (src->redirection_uri);
+ src->redirection_uri = NULL;
+ }
+
+ src->location = g_strdup (uri);
+
+ return TRUE;
+ }
+
+ static gboolean
+ gst_soup_http_src_set_proxy (GstSoupHTTPSrc * src, const gchar * uri)
+ {
+ if (src->proxy) {
+ soup_uri_free (src->proxy);
+ src->proxy = NULL;
+ }
+
+ if (uri == NULL || *uri == '\0')
+ return TRUE;
+
+ if (g_strstr_len (uri, -1, "://")) {
+ src->proxy = soup_uri_new (uri);
+ } else {
+ gchar *new_uri = g_strconcat ("http://", uri, NULL);
+
+ src->proxy = soup_uri_new (new_uri);
+ g_free (new_uri);
+ }
+
+ return (src->proxy != NULL);
+ }
+
+ static GstURIType
+ gst_soup_http_src_uri_get_type (GType type)
+ {
+ return GST_URI_SRC;
+ }
+
+ static const gchar *const *
+ gst_soup_http_src_uri_get_protocols (GType type)
+ {
+ static const gchar *protocols[] = { "http", "https", "icy", "icyx", NULL };
+
+ return protocols;
+ }
+
+ static gchar *
+ gst_soup_http_src_uri_get_uri (GstURIHandler * handler)
+ {
+ GstSoupHTTPSrc *src = GST_SOUP_HTTP_SRC (handler);
+
+ /* FIXME: make thread-safe */
+ return g_strdup (src->location);
+ }
+
+ static gboolean
+ gst_soup_http_src_uri_set_uri (GstURIHandler * handler, const gchar * uri,
+ GError ** error)
+ {
+ GstSoupHTTPSrc *src = GST_SOUP_HTTP_SRC (handler);
+
+ return gst_soup_http_src_set_location (src, uri, error);
+ }
+
+ static void
+ gst_soup_http_src_uri_handler_init (gpointer g_iface, gpointer iface_data)
+ {
+ GstURIHandlerInterface *iface = (GstURIHandlerInterface *) g_iface;
+
+ iface->get_type = gst_soup_http_src_uri_get_type;
+ iface->get_protocols = gst_soup_http_src_uri_get_protocols;
+ iface->get_uri = gst_soup_http_src_uri_get_uri;
+ iface->set_uri = gst_soup_http_src_uri_set_uri;
+ }
--- /dev/null
- * Library General Public License for more
+ /* GStreamer
+ * Copyright (C) 2007-2008 Wouter Cloetens <wouter@mind.be>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
++ * Library General Public License for more
+ */
+
+ #ifndef __GST_SOUP_HTTP_SRC_H__
+ #define __GST_SOUP_HTTP_SRC_H__
+
+ #include <gst/gst.h>
+ #include <gst/base/gstpushsrc.h>
+ #include <glib.h>
+
+ G_BEGIN_DECLS
+
+ #include <libsoup/soup.h>
+
+ #define GST_TYPE_SOUP_HTTP_SRC \
+ (gst_soup_http_src_get_type())
+ #define GST_SOUP_HTTP_SRC(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_SOUP_HTTP_SRC,GstSoupHTTPSrc))
+ #define GST_SOUP_HTTP_SRC_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass), \
+ GST_TYPE_SOUP_HTTP_SRC,GstSoupHTTPSrcClass))
+ #define GST_IS_SOUP_HTTP_SRC(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_SOUP_HTTP_SRC))
+ #define GST_IS_SOUP_HTTP_SRC_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_SOUP_HTTP_SRC))
+
+ typedef struct _GstSoupHTTPSrc GstSoupHTTPSrc;
+ typedef struct _GstSoupHTTPSrcClass GstSoupHTTPSrcClass;
+
+ typedef enum {
+ GST_SOUP_HTTP_SRC_SESSION_IO_STATUS_IDLE,
+ GST_SOUP_HTTP_SRC_SESSION_IO_STATUS_QUEUED,
+ GST_SOUP_HTTP_SRC_SESSION_IO_STATUS_RUNNING,
+ GST_SOUP_HTTP_SRC_SESSION_IO_STATUS_CANCELLED,
+ } GstSoupHTTPSrcSessionIOStatus;
+
+ struct _GstSoupHTTPSrc {
+ GstPushSrc element;
+
+ gchar *location; /* Full URI. */
+ gchar *redirection_uri; /* Full URI after redirections. */
+ gboolean redirection_permanent; /* Permanent or temporary redirect? */
+ gchar *user_agent; /* User-Agent HTTP header. */
+ gboolean automatic_redirect; /* Follow redirects. */
+ SoupURI *proxy; /* HTTP proxy URI. */
+ gchar *user_id; /* Authentication user id for location URI. */
+ gchar *user_pw; /* Authentication user password for location URI. */
+ gchar *proxy_id; /* Authentication user id for proxy URI. */
+ gchar *proxy_pw; /* Authentication user password for proxy URI. */
+ gchar **cookies; /* HTTP request cookies. */
+ SoupSession *session; /* Async context. */
+ gboolean session_is_shared;
+ SoupSession *external_session; /* Shared via GstContext */
+ gboolean forced_external_session; /* If session was explicitly set from application */
+ SoupMessage *msg; /* Request message. */
+ gint retry_count; /* Number of retries since we received data */
+ gint max_retries; /* Maximum number of retries */
+ gchar *method; /* HTTP method */
+
+ gboolean got_headers; /* Already received headers from the server */
+ gboolean have_size; /* Received and parsed Content-Length
+ header. */
+ guint64 content_size; /* Value of Content-Length header. */
+ guint64 read_position; /* Current position. */
+ gboolean seekable; /* FALSE if the server does not support
+ Range. */
+ guint64 request_position; /* Seek to this position. */
+ guint64 stop_position; /* Stop at this position. */
+ gboolean have_body; /* Indicates if it has just been signaled the
+ * end of the message body. This is used to
+ * decide if an out of range request should be
+ * handled as an error or EOS when the content
+ * size is unknown */
+ gboolean keep_alive; /* Use keep-alive sessions */
+ gboolean ssl_strict;
+ gchar *ssl_ca_file;
+ gboolean ssl_use_system_ca_file;
+ GTlsDatabase *tls_database;
+ GTlsInteraction *tls_interaction;
+
+ GCancellable *cancellable;
+ GInputStream *input_stream;
+
+ gint reduce_blocksize_count;
+ gint increase_blocksize_count;
+ guint minimum_blocksize;
+
+ /* Shoutcast/icecast metadata extraction handling. */
+ gboolean iradio_mode;
+ GstCaps *src_caps;
+ gchar *iradio_name;
+ gchar *iradio_genre;
+ gchar *iradio_url;
+
+ GstStructure *extra_headers;
+
+ SoupLoggerLogLevel log_level;/* Soup HTTP session logger level */
+
+ gboolean compress;
+
+ guint timeout;
+
+ GMutex mutex;
+ GCond have_headers_cond;
+
+ GstEvent *http_headers_event;
+
+ gint64 last_socket_read_time;
++
++#ifdef TIZEN_FEATURE_SOUP_MODIFICATION
++ gchar *dash_oldest_segment;
++ gchar *dash_newest_segment;
++ guint64 received_total; /* temp: for debugging */
++ guint dlna_opt; /* DLNA server option */
++#endif
+ };
+
+ struct _GstSoupHTTPSrcClass {
+ GstPushSrcClass parent_class;
+ };
+
+ GType gst_soup_http_src_get_type (void);
+
+ G_END_DECLS
+
+ #endif /* __GST_SOUP_HTTP_SRC_H__ */
+
--- /dev/null
+ /* VPX
+ * Copyright (C) 2006 David Schleef <ds@schleef.org>
+ * Copyright (C) 2008,2009,2010 Entropy Wave Inc
+ * Copyright (C) 2010-2012 Sebastian Dröge <sebastian.droege@collabora.co.uk>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+ #ifdef HAVE_CONFIG_H
+ #include "config.h"
+ #endif
+
+ #if defined(HAVE_VP8_DECODER) || defined(HAVE_VP9_DECODER)
+
+ #include <string.h>
+
+ #include "gstvpxdec.h"
+ #include "gstvp8utils.h"
+
+ #include <gst/video/gstvideometa.h>
+ #include <gst/video/gstvideopool.h>
+
+ GST_DEBUG_CATEGORY_STATIC (gst_vpxdec_debug);
+ #define GST_CAT_DEFAULT gst_vpxdec_debug
+
+ #define DEFAULT_POST_PROCESSING FALSE
+ #define DEFAULT_POST_PROCESSING_FLAGS (VP8_DEBLOCK | VP8_DEMACROBLOCK | VP8_MFQE)
+ #define DEFAULT_DEBLOCKING_LEVEL 4
+ #define DEFAULT_NOISE_LEVEL 0
+ #define DEFAULT_THREADS 0
+ #define DEFAULT_VIDEO_CODEC_TAG NULL
+ #define DEFAULT_CODEC_ALGO NULL
+
+ enum
+ {
+ PROP_0,
+ PROP_POST_PROCESSING,
+ PROP_POST_PROCESSING_FLAGS,
+ PROP_DEBLOCKING_LEVEL,
+ PROP_NOISE_LEVEL,
+ PROP_THREADS
+ };
+
+ #define C_FLAGS(v) ((guint) v)
+ #define GST_VPX_DEC_TYPE_POST_PROCESSING_FLAGS (gst_vpx_dec_post_processing_flags_get_type())
+ static GType
+ gst_vpx_dec_post_processing_flags_get_type (void)
+ {
+ static const GFlagsValue values[] = {
+ {C_FLAGS (VP8_DEBLOCK), "Deblock", "deblock"},
+ {C_FLAGS (VP8_DEMACROBLOCK), "Demacroblock", "demacroblock"},
+ {C_FLAGS (VP8_ADDNOISE), "Add noise", "addnoise"},
+ #ifndef HAVE_VPX_1_8
+ {C_FLAGS (VP8_DEBUG_TXT_FRAME_INFO),
+ "Print frame information",
+ "visualize-frame-info"},
+ {C_FLAGS (VP8_DEBUG_TXT_MBLK_MODES),
+ "Show macroblock mode selection overlaid on image",
+ "visualize-macroblock-modes"},
+ {C_FLAGS (VP8_DEBUG_TXT_DC_DIFF),
+ "Show dc diff for each macro block overlaid on image",
+ "visualize-dc-diff"},
+ {C_FLAGS (VP8_DEBUG_TXT_RATE_INFO),
+ "Print video rate info",
+ "visualize-rate-info"},
+ #endif
+ {C_FLAGS (VP8_MFQE), "Multi-frame quality enhancement", "mfqe"},
+ {0, NULL, NULL}
+ };
+ static GType id = 0;
+
+ if (g_once_init_enter ((gsize *) & id)) {
+ GType _id;
+
+ _id = g_flags_register_static ("GstVPXDecPostProcessingFlags", values);
+
+ g_once_init_leave ((gsize *) & id, _id);
+ }
+
+ return id;
+ }
+
+ #undef C_FLAGS
+
+ static void gst_vpx_dec_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec);
+ static void gst_vpx_dec_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec);
+
+ static gboolean gst_vpx_dec_start (GstVideoDecoder * decoder);
+ static gboolean gst_vpx_dec_stop (GstVideoDecoder * decoder);
+ static gboolean gst_vpx_dec_set_format (GstVideoDecoder * decoder,
+ GstVideoCodecState * state);
+ static gboolean gst_vpx_dec_flush (GstVideoDecoder * decoder);
+ static GstFlowReturn
+ gst_vpx_dec_handle_frame (GstVideoDecoder * decoder,
+ GstVideoCodecFrame * frame);
+ static gboolean gst_vpx_dec_decide_allocation (GstVideoDecoder * decoder,
+ GstQuery * query);
+
+ static void gst_vpx_dec_image_to_buffer (GstVPXDec * dec,
+ const vpx_image_t * img, GstBuffer * buffer);
+ static GstFlowReturn gst_vpx_dec_open_codec (GstVPXDec * dec,
+ GstVideoCodecFrame * frame);
+ static void gst_vpx_dec_default_send_tags (GstVPXDec * dec);
+ static void gst_vpx_dec_set_stream_info (GstVPXDec * dec,
+ vpx_codec_stream_info_t * stream_info);
+ static void gst_vpx_dec_set_default_format (GstVPXDec * dec, GstVideoFormat fmt,
+ int width, int height);
+ static gboolean gst_vpx_dec_default_frame_format (GstVPXDec * dec,
+ vpx_image_t * img, GstVideoFormat * fmt);
+ static void gst_vpx_dec_handle_resolution_change (GstVPXDec * dec,
+ vpx_image_t * img, GstVideoFormat fmt);
+
+ #define parent_class gst_vpx_dec_parent_class
+ G_DEFINE_TYPE (GstVPXDec, gst_vpx_dec, GST_TYPE_VIDEO_DECODER);
+
+ static void
+ gst_vpx_dec_class_init (GstVPXDecClass * klass)
+ {
+ GObjectClass *gobject_class;
+ GstVideoDecoderClass *base_video_decoder_class;
+
+ gobject_class = G_OBJECT_CLASS (klass);
+ base_video_decoder_class = GST_VIDEO_DECODER_CLASS (klass);
+
+ gobject_class->set_property = gst_vpx_dec_set_property;
+ gobject_class->get_property = gst_vpx_dec_get_property;
+
+ g_object_class_install_property (gobject_class, PROP_POST_PROCESSING,
+ g_param_spec_boolean ("post-processing", "Post Processing",
+ "Enable post processing", DEFAULT_POST_PROCESSING,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_POST_PROCESSING_FLAGS,
+ g_param_spec_flags ("post-processing-flags", "Post Processing Flags",
+ "Flags to control post processing",
+ GST_VPX_DEC_TYPE_POST_PROCESSING_FLAGS, DEFAULT_POST_PROCESSING_FLAGS,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_DEBLOCKING_LEVEL,
+ g_param_spec_uint ("deblocking-level", "Deblocking Level",
+ "Deblocking level",
+ 0, 16, DEFAULT_DEBLOCKING_LEVEL,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_NOISE_LEVEL,
+ g_param_spec_uint ("noise-level", "Noise Level",
+ "Noise level",
+ 0, 16, DEFAULT_NOISE_LEVEL,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_THREADS,
+ g_param_spec_uint ("threads", "Max Threads",
+ "Maximum number of decoding threads",
+ 0, 16, DEFAULT_THREADS, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ base_video_decoder_class->start = GST_DEBUG_FUNCPTR (gst_vpx_dec_start);
+ base_video_decoder_class->stop = GST_DEBUG_FUNCPTR (gst_vpx_dec_stop);
+ base_video_decoder_class->flush = GST_DEBUG_FUNCPTR (gst_vpx_dec_flush);
+ base_video_decoder_class->set_format =
+ GST_DEBUG_FUNCPTR (gst_vpx_dec_set_format);
+ base_video_decoder_class->handle_frame =
+ GST_DEBUG_FUNCPTR (gst_vpx_dec_handle_frame);;
+ base_video_decoder_class->decide_allocation =
+ GST_DEBUG_FUNCPTR (gst_vpx_dec_decide_allocation);
+
+ klass->video_codec_tag = DEFAULT_VIDEO_CODEC_TAG;
+ klass->codec_algo = DEFAULT_CODEC_ALGO;
+ klass->open_codec = GST_DEBUG_FUNCPTR (gst_vpx_dec_open_codec);
+ klass->send_tags = GST_DEBUG_FUNCPTR (gst_vpx_dec_default_send_tags);
+ klass->set_stream_info = NULL;
+ klass->set_default_format = NULL;
+ klass->handle_resolution_change = NULL;
+ klass->get_frame_format =
+ GST_DEBUG_FUNCPTR (gst_vpx_dec_default_frame_format);
+
+ GST_DEBUG_CATEGORY_INIT (gst_vpxdec_debug, "vpxdec", 0, "VPX Decoder");
+
+ gst_type_mark_as_plugin_api (GST_VPX_DEC_TYPE_POST_PROCESSING_FLAGS, 0);
+ gst_type_mark_as_plugin_api (GST_TYPE_VPX_DEC, 0);
+ }
+
+ static void
+ gst_vpx_dec_init (GstVPXDec * gst_vpx_dec)
+ {
+ GstVideoDecoder *decoder = (GstVideoDecoder *) gst_vpx_dec;
+ GstVPXDecClass *vpxclass = GST_VPX_DEC_GET_CLASS (gst_vpx_dec);
+
+ GST_DEBUG_OBJECT (gst_vpx_dec, "gst_vpx_dec_init");
+ gst_video_decoder_set_packetized (decoder, TRUE);
+ gst_vpx_dec->post_processing = DEFAULT_POST_PROCESSING;
+ gst_vpx_dec->post_processing_flags = DEFAULT_POST_PROCESSING_FLAGS;
+ gst_vpx_dec->deblocking_level = DEFAULT_DEBLOCKING_LEVEL;
+ gst_vpx_dec->noise_level = DEFAULT_NOISE_LEVEL;
+
+ if (vpxclass->get_needs_sync_point) {
+ gst_video_decoder_set_needs_sync_point (GST_VIDEO_DECODER (gst_vpx_dec),
+ vpxclass->get_needs_sync_point (gst_vpx_dec));
+ }
+
+ gst_video_decoder_set_needs_format (decoder, TRUE);
+ gst_video_decoder_set_use_default_pad_acceptcaps (decoder, TRUE);
+ GST_PAD_SET_ACCEPT_TEMPLATE (GST_VIDEO_DECODER_SINK_PAD (decoder));
+ }
+
+ static void
+ gst_vpx_dec_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+ {
+ GstVPXDec *dec;
+
+ g_return_if_fail (GST_IS_VPX_DEC (object));
+ dec = GST_VPX_DEC (object);
+
+ GST_DEBUG_OBJECT (object, "gst_vpx_dec_set_property");
+ switch (prop_id) {
+ case PROP_POST_PROCESSING:
+ dec->post_processing = g_value_get_boolean (value);
+ break;
+ case PROP_POST_PROCESSING_FLAGS:
+ dec->post_processing_flags = g_value_get_flags (value);
+ break;
+ case PROP_DEBLOCKING_LEVEL:
+ dec->deblocking_level = g_value_get_uint (value);
+ break;
+ case PROP_NOISE_LEVEL:
+ dec->noise_level = g_value_get_uint (value);
+ break;
+ case PROP_THREADS:
+ dec->threads = g_value_get_uint (value);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+ }
+
+ static void
+ gst_vpx_dec_get_property (GObject * object, guint prop_id, GValue * value,
+ GParamSpec * pspec)
+ {
+ GstVPXDec *dec;
+
+ g_return_if_fail (GST_IS_VPX_DEC (object));
+ dec = GST_VPX_DEC (object);
+
+ switch (prop_id) {
+ case PROP_POST_PROCESSING:
+ g_value_set_boolean (value, dec->post_processing);
+ break;
+ case PROP_POST_PROCESSING_FLAGS:
+ g_value_set_flags (value, dec->post_processing_flags);
+ break;
+ case PROP_DEBLOCKING_LEVEL:
+ g_value_set_uint (value, dec->deblocking_level);
+ break;
+ case PROP_NOISE_LEVEL:
+ g_value_set_uint (value, dec->noise_level);
+ break;
+ case PROP_THREADS:
+ g_value_set_uint (value, dec->threads);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+ }
+
+ static gboolean
+ gst_vpx_dec_start (GstVideoDecoder * decoder)
+ {
+ GstVPXDec *gst_vpx_dec = GST_VPX_DEC (decoder);
+
+ GST_DEBUG_OBJECT (gst_vpx_dec, "start");
+ gst_vpx_dec->decoder_inited = FALSE;
+ gst_vpx_dec->safe_remap = FALSE;
+
+ return TRUE;
+ }
+
+ static gboolean
+ gst_vpx_dec_stop (GstVideoDecoder * base_video_decoder)
+ {
+ GstVPXDec *gst_vpx_dec = GST_VPX_DEC (base_video_decoder);
+
+ GST_DEBUG_OBJECT (gst_vpx_dec, "stop");
+
+ if (gst_vpx_dec->output_state) {
+ gst_video_codec_state_unref (gst_vpx_dec->output_state);
+ gst_vpx_dec->output_state = NULL;
+ }
+
+ if (gst_vpx_dec->input_state) {
+ gst_video_codec_state_unref (gst_vpx_dec->input_state);
+ gst_vpx_dec->input_state = NULL;
+ }
+
+ if (gst_vpx_dec->decoder_inited)
+ vpx_codec_destroy (&gst_vpx_dec->decoder);
+ gst_vpx_dec->decoder_inited = FALSE;
+
+ if (gst_vpx_dec->pool) {
+ gst_buffer_pool_set_active (gst_vpx_dec->pool, FALSE);
+ gst_object_unref (gst_vpx_dec->pool);
+ gst_vpx_dec->pool = NULL;
+ gst_vpx_dec->buf_size = 0;
+ }
+
+ return TRUE;
+ }
+
+ static gboolean
+ gst_vpx_dec_set_format (GstVideoDecoder * decoder, GstVideoCodecState * state)
+ {
+ GstVPXDec *gst_vpx_dec = GST_VPX_DEC (decoder);
+
+ GST_DEBUG_OBJECT (gst_vpx_dec, "set_format");
+
+ if (gst_vpx_dec->decoder_inited)
+ vpx_codec_destroy (&gst_vpx_dec->decoder);
+ gst_vpx_dec->decoder_inited = FALSE;
+
+ if (gst_vpx_dec->output_state) {
+ gst_video_codec_state_unref (gst_vpx_dec->output_state);
+ gst_vpx_dec->output_state = NULL;
+ }
+
+ if (gst_vpx_dec->input_state) {
+ gst_video_codec_state_unref (gst_vpx_dec->input_state);
+ }
+
+ gst_vpx_dec->input_state = gst_video_codec_state_ref (state);
+
+ return TRUE;
+ }
+
+ static gboolean
+ gst_vpx_dec_flush (GstVideoDecoder * base_video_decoder)
+ {
+ GstVPXDec *decoder;
+
+ GST_DEBUG_OBJECT (base_video_decoder, "flush");
+
+ decoder = GST_VPX_DEC (base_video_decoder);
+
+ if (decoder->output_state) {
+ gst_video_codec_state_unref (decoder->output_state);
+ decoder->output_state = NULL;
+ }
+
+ if (decoder->decoder_inited)
+ vpx_codec_destroy (&decoder->decoder);
+ decoder->decoder_inited = FALSE;
+
+ return TRUE;
+ }
+
+ static void
+ gst_vpx_dec_default_send_tags (GstVPXDec * dec)
+ {
+ GstTagList *list;
+ GstVPXDecClass *vpxclass = GST_VPX_DEC_GET_CLASS (dec);
+
+ if (vpxclass->video_codec_tag == NULL)
+ return;
+
+ list = gst_tag_list_new_empty ();
+ gst_tag_list_add (list, GST_TAG_MERGE_REPLACE,
+ GST_TAG_VIDEO_CODEC, vpxclass->video_codec_tag, NULL);
+
+ gst_pad_push_event (GST_VIDEO_DECODER_SRC_PAD (dec),
+ gst_event_new_tag (list));
+ }
+
+ struct Frame
+ {
+ GstMapInfo info;
+ GstBuffer *buffer;
+ };
+
+ static GstBuffer *
+ gst_vpx_dec_prepare_image (GstVPXDec * dec, const vpx_image_t * img)
+ {
+ gint comp;
+ GstVideoMeta *vmeta;
+ GstBuffer *buffer;
+ struct Frame *frame = img->fb_priv;
+ GstVideoInfo *info = &dec->output_state->info;
+
+ buffer = gst_buffer_ref (frame->buffer);
+
+ /* FIXME: an atomic remap would be preferable, for now we simply
+ * remap the buffer from RW to RO when using a sysmem allocator,
+ * in order to avoid a useless memcpy in GstVideoDecoder.
+ */
+ if (dec->safe_remap) {
+ gst_buffer_unmap (buffer, &frame->info);
+ gst_buffer_map (buffer, &frame->info, GST_MAP_READ);
+ }
+
+ vmeta = gst_buffer_get_video_meta (buffer);
+ vmeta->format = GST_VIDEO_INFO_FORMAT (info);
+ vmeta->width = GST_VIDEO_INFO_WIDTH (info);
+ vmeta->height = GST_VIDEO_INFO_HEIGHT (info);
+ vmeta->n_planes = GST_VIDEO_INFO_N_PLANES (info);
+
+ for (comp = 0; comp < 4; comp++) {
+ vmeta->stride[comp] = img->stride[comp];
+ vmeta->offset[comp] =
+ img->planes[comp] ? img->planes[comp] - frame->info.data : 0;
+ }
+
+ /* FIXME This is a READ/WRITE mapped buffer see bug #754826 */
+
+ return buffer;
+ }
+
+ static int
+ gst_vpx_dec_get_buffer_cb (gpointer priv, gsize min_size,
+ vpx_codec_frame_buffer_t * fb)
+ {
+ GstVPXDec *dec = priv;
+ GstBuffer *buffer = NULL;
+ struct Frame *frame;
+ GstFlowReturn ret;
+
+ if (!dec->pool || dec->buf_size != min_size) {
+ GstBufferPool *pool;
+ GstStructure *config;
+ GstCaps *caps;
+ GstAllocator *allocator;
+ GstAllocationParams params;
+
+ if (dec->pool) {
+ gst_buffer_pool_set_active (dec->pool, FALSE);
+ gst_object_unref (dec->pool);
+ dec->pool = NULL;
+ dec->buf_size = 0;
+ }
+
+ gst_video_decoder_get_allocator (GST_VIDEO_DECODER (dec), &allocator,
+ ¶ms);
+
+ if (allocator &&
+ GST_OBJECT_FLAG_IS_SET (allocator, GST_ALLOCATOR_FLAG_CUSTOM_ALLOC)) {
+ gst_object_unref (allocator);
+ allocator = NULL;
+ }
+
+ dec->safe_remap = (allocator == NULL
+ || !g_strcmp0 (allocator->mem_type, GST_ALLOCATOR_SYSMEM));
+
+ pool = gst_buffer_pool_new ();
+ config = gst_buffer_pool_get_config (pool);
+ gst_buffer_pool_config_set_allocator (config, allocator, ¶ms);
+ caps = gst_caps_from_string ("video/internal");
+ gst_buffer_pool_config_set_params (config, caps, min_size, 2, 0);
+ gst_caps_unref (caps);
+ gst_buffer_pool_set_config (pool, config);
+
+ if (allocator)
+ gst_object_unref (allocator);
+
+ if (!gst_buffer_pool_set_active (pool, TRUE)) {
+ GST_WARNING ("Failed to create internal pool");
+ gst_object_unref (pool);
+ return -1;
+ }
+
+ dec->pool = pool;
+ dec->buf_size = min_size;
+ }
+
+ ret = gst_buffer_pool_acquire_buffer (dec->pool, &buffer, NULL);
+ if (ret != GST_FLOW_OK) {
+ GST_WARNING ("Failed to acquire buffer from internal pool.");
+ return -1;
+ }
+
+ /* Add it now, while the buffer is writable */
+ gst_buffer_add_video_meta (buffer, GST_VIDEO_FRAME_FLAG_NONE,
+ GST_VIDEO_FORMAT_ENCODED, 0, 0);
+
+ frame = g_new0 (struct Frame, 1);
+ if (!gst_buffer_map (buffer, &frame->info, GST_MAP_READWRITE)) {
+ gst_buffer_unref (buffer);
+ g_free (frame);
+ GST_WARNING ("Failed to map buffer from internal pool.");
+ return -1;
+ }
+
+ fb->size = frame->info.size;
+ fb->data = frame->info.data;
+ frame->buffer = buffer;
+ fb->priv = frame;
+
+ GST_TRACE_OBJECT (priv, "Allocated buffer %p", frame->buffer);
+
+ return 0;
+ }
+
+ static int
+ gst_vpx_dec_release_buffer_cb (gpointer priv, vpx_codec_frame_buffer_t * fb)
+ {
+ struct Frame *frame = fb->priv;
+
+ /* We're sometimes called without a frame */
+ if (!frame)
+ return 0;
+
+ GST_TRACE_OBJECT (priv, "Release buffer %p", frame->buffer);
+
+ gst_buffer_unmap (frame->buffer, &frame->info);
+ gst_buffer_unref (frame->buffer);
+ g_free (frame);
+ fb->priv = NULL;
+
+ return 0;
+ }
+
+ static void
+ gst_vpx_dec_image_to_buffer (GstVPXDec * dec, const vpx_image_t * img,
+ GstBuffer * buffer)
+ {
+ int deststride, srcstride, height, width, line, comp;
+ guint8 *dest, *src;
+ GstVideoFrame frame;
+ GstVideoInfo *info = &dec->output_state->info;
+
+ if (!gst_video_frame_map (&frame, info, buffer, GST_MAP_WRITE)) {
+ GST_ERROR_OBJECT (dec, "Could not map video buffer");
+ return;
+ }
+
+ for (comp = 0; comp < 3; comp++) {
+ dest = GST_VIDEO_FRAME_COMP_DATA (&frame, comp);
+ src = img->planes[comp];
+ width = GST_VIDEO_FRAME_COMP_WIDTH (&frame, comp)
+ * GST_VIDEO_FRAME_COMP_PSTRIDE (&frame, comp);
+ height = GST_VIDEO_FRAME_COMP_HEIGHT (&frame, comp);
+ deststride = GST_VIDEO_FRAME_COMP_STRIDE (&frame, comp);
+ srcstride = img->stride[comp];
+
+ if (srcstride == deststride) {
+ GST_TRACE_OBJECT (dec, "Stride matches. Comp %d: %d, copying full plane",
+ comp, srcstride);
+ memcpy (dest, src, srcstride * height);
+ } else {
+ GST_TRACE_OBJECT (dec, "Stride mismatch. Comp %d: %d != %d, copying "
+ "line by line.", comp, srcstride, deststride);
+ for (line = 0; line < height; line++) {
+ memcpy (dest, src, width);
+ dest += deststride;
+ src += srcstride;
+ }
+ }
+ }
+
+ gst_video_frame_unmap (&frame);
+ }
+
+ static GstFlowReturn
+ gst_vpx_dec_open_codec (GstVPXDec * dec, GstVideoCodecFrame * frame)
+ {
+ int flags = 0;
+ vpx_codec_stream_info_t stream_info;
+ vpx_codec_caps_t caps;
+ vpx_codec_dec_cfg_t cfg;
+ vpx_codec_err_t status;
+ GstMapInfo minfo;
+ GstVPXDecClass *vpxclass = GST_VPX_DEC_GET_CLASS (dec);
+
+ g_return_val_if_fail (vpxclass->codec_algo != NULL, GST_FLOW_ERROR);
+
+ memset (&stream_info, 0, sizeof (stream_info));
+ memset (&cfg, 0, sizeof (cfg));
+ stream_info.sz = sizeof (stream_info);
+
+ if (!gst_buffer_map (frame->input_buffer, &minfo, GST_MAP_READ)) {
+ GST_ERROR_OBJECT (dec, "Failed to map input buffer");
+ return GST_FLOW_ERROR;
+ }
+
+ status = vpx_codec_peek_stream_info (vpxclass->codec_algo,
+ minfo.data, minfo.size, &stream_info);
+
+ gst_buffer_unmap (frame->input_buffer, &minfo);
+
+ if (status != VPX_CODEC_OK) {
+ GST_INFO_OBJECT (dec, "VPX preprocessing error: %s",
+ gst_vpx_error_name (status));
+ return GST_FLOW_CUSTOM_SUCCESS_1;
+ }
+ if (!stream_info.is_kf) {
+ GST_INFO_OBJECT (dec, "No keyframe, skipping");
+ return GST_FLOW_CUSTOM_SUCCESS_1;
+ }
+ if (stream_info.w == 0 || stream_info.h == 0) {
+ /* For VP8 it's possible to signal width or height to be 0, but it does
+ * not make sense to do so. For VP9 it's impossible. Hence, we most likely
+ * have a corrupt stream if width or height is 0. */
+ GST_INFO_OBJECT (dec, "Invalid resolution %d x %d", stream_info.w,
+ stream_info.h);
+ return GST_FLOW_CUSTOM_SUCCESS_1;
+ }
+
+ gst_vpx_dec_set_stream_info (dec, &stream_info);
+ gst_vpx_dec_set_default_format (dec, GST_VIDEO_FORMAT_I420, stream_info.w,
+ stream_info.h);
+
+ cfg.w = stream_info.w;
+ cfg.h = stream_info.h;
+
+ if (dec->threads > 0)
+ cfg.threads = dec->threads;
+ else
+ cfg.threads = g_get_num_processors ();
+
+ caps = vpx_codec_get_caps (vpxclass->codec_algo);
+
+ if (dec->post_processing) {
+ if (!(caps & VPX_CODEC_CAP_POSTPROC)) {
+ GST_WARNING_OBJECT (dec, "Decoder does not support post processing");
+ } else {
+ flags |= VPX_CODEC_USE_POSTPROC;
+ }
+ }
+
+ status =
+ vpx_codec_dec_init (&dec->decoder, vpxclass->codec_algo, &cfg, flags);
+ if (status != VPX_CODEC_OK) {
+ GST_ELEMENT_ERROR (dec, LIBRARY, INIT,
+ ("Failed to initialize VP8 decoder"), ("%s",
+ gst_vpx_error_name (status)));
+ return GST_FLOW_ERROR;
+ }
+
+ if ((caps & VPX_CODEC_CAP_POSTPROC) && dec->post_processing) {
+ vp8_postproc_cfg_t pp_cfg = { 0, };
+
+ pp_cfg.post_proc_flag = dec->post_processing_flags;
+ pp_cfg.deblocking_level = dec->deblocking_level;
+ pp_cfg.noise_level = dec->noise_level;
+
+ status = vpx_codec_control (&dec->decoder, VP8_SET_POSTPROC, &pp_cfg);
+ if (status != VPX_CODEC_OK) {
+ GST_WARNING_OBJECT (dec, "Couldn't set postprocessing settings: %s",
+ gst_vpx_error_name (status));
+ }
+ }
+ vpx_codec_set_frame_buffer_functions (&dec->decoder,
+ gst_vpx_dec_get_buffer_cb, gst_vpx_dec_release_buffer_cb, dec);
+
+ dec->decoder_inited = TRUE;
+
+ return GST_FLOW_OK;
+ }
+
+ static GstFlowReturn
+ gst_vpx_dec_handle_frame (GstVideoDecoder * decoder, GstVideoCodecFrame * frame)
+ {
+ GstVPXDec *dec;
+ GstFlowReturn ret = GST_FLOW_OK;
+ vpx_codec_err_t status;
+ vpx_codec_iter_t iter = NULL;
+ vpx_image_t *img;
+ long decoder_deadline = 0;
+ GstClockTimeDiff deadline;
+ GstMapInfo minfo;
+ GstVPXDecClass *vpxclass;
+ GstVideoFormat fmt;
+
+ GST_LOG_OBJECT (decoder, "handle_frame");
+
+ dec = GST_VPX_DEC (decoder);
+ vpxclass = GST_VPX_DEC_GET_CLASS (dec);
+
+ if (!dec->decoder_inited) {
+ ret = vpxclass->open_codec (dec, frame);
+ if (ret == GST_FLOW_CUSTOM_SUCCESS_1) {
+ GstVideoDecoderRequestSyncPointFlags flags = 0;
+
+ if (gst_video_decoder_get_needs_sync_point (decoder))
+ flags |= GST_VIDEO_DECODER_REQUEST_SYNC_POINT_DISCARD_INPUT;
+
+ gst_video_decoder_request_sync_point (decoder, frame, flags);
+ gst_video_decoder_drop_frame (decoder, frame);
+ return GST_FLOW_OK;
+ } else if (ret != GST_FLOW_OK) {
+ gst_video_codec_frame_unref (frame);
+ return ret;
+ }
+ }
+
+ deadline = gst_video_decoder_get_max_decode_time (decoder, frame);
+ if (deadline < 0) {
+ decoder_deadline = 1;
+ } else if (deadline == G_MAXINT64) {
+ decoder_deadline = 0;
+ } else {
+ decoder_deadline = MAX (1, deadline / GST_MSECOND);
+ }
+
+ if (!gst_buffer_map (frame->input_buffer, &minfo, GST_MAP_READ)) {
+ GST_ERROR_OBJECT (dec, "Failed to map input buffer");
+ gst_video_codec_frame_unref (frame);
+ return GST_FLOW_ERROR;
+ }
+
+ status = vpx_codec_decode (&dec->decoder,
+ minfo.data, minfo.size, NULL, decoder_deadline);
+
+ gst_buffer_unmap (frame->input_buffer, &minfo);
+
+ if (status) {
+ GstVideoDecoderRequestSyncPointFlags flags = 0;
+
+ GST_VIDEO_DECODER_ERROR (decoder, 1, LIBRARY, ENCODE,
+ ("Failed to decode frame"), ("%s", gst_vpx_error_name (status)), ret);
+
+ if (gst_video_decoder_get_needs_sync_point (decoder))
+ flags |= GST_VIDEO_DECODER_REQUEST_SYNC_POINT_DISCARD_INPUT;
+
+ gst_video_decoder_request_sync_point (decoder, frame, flags);
+ gst_video_codec_frame_unref (frame);
+ return ret;
+ }
+
+ img = vpx_codec_get_frame (&dec->decoder, &iter);
+ if (img) {
+ if (vpxclass->get_frame_format (dec, img, &fmt) == FALSE) {
++#ifndef __TIZEN__
+ vpx_img_free (img);
++#endif
+ GST_ELEMENT_ERROR (decoder, LIBRARY, ENCODE,
+ ("Failed to decode frame"), ("Unsupported color format %d",
+ img->fmt));
++#ifdef __TIZEN__
++ vpx_img_free (img);
++#endif
+ gst_video_codec_frame_unref (frame);
+ return GST_FLOW_ERROR;
+ }
+
+ if (deadline < 0) {
+ GST_LOG_OBJECT (dec, "Skipping late frame (%f s past deadline)",
+ (double) -deadline / GST_SECOND);
+ gst_video_decoder_drop_frame (decoder, frame);
+ } else {
+ gst_vpx_dec_handle_resolution_change (dec, img, fmt);
+ if (img->fb_priv && dec->have_video_meta) {
+ frame->output_buffer = gst_vpx_dec_prepare_image (dec, img);
+ ret = gst_video_decoder_finish_frame (decoder, frame);
+ } else {
+ ret = gst_video_decoder_allocate_output_frame (decoder, frame);
+
+ if (ret == GST_FLOW_OK) {
+ gst_vpx_dec_image_to_buffer (dec, img, frame->output_buffer);
+ ret = gst_video_decoder_finish_frame (decoder, frame);
+ } else {
+ gst_video_decoder_drop_frame (decoder, frame);
+ }
+ }
+ }
+
+ vpx_img_free (img);
+
+ while ((img = vpx_codec_get_frame (&dec->decoder, &iter))) {
+ GST_WARNING_OBJECT (decoder, "Multiple decoded frames... dropping");
+ vpx_img_free (img);
+ }
+ } else {
+ /* Invisible frame */
+ GST_VIDEO_CODEC_FRAME_SET_DECODE_ONLY (frame);
+ gst_video_decoder_finish_frame (decoder, frame);
+ }
+
+ return ret;
+ }
+
+ static gboolean
+ gst_vpx_dec_decide_allocation (GstVideoDecoder * bdec, GstQuery * query)
+ {
+ GstVPXDec *dec = GST_VPX_DEC (bdec);
+ GstBufferPool *pool;
+ GstStructure *config;
+
+ if (!GST_VIDEO_DECODER_CLASS (parent_class)->decide_allocation (bdec, query))
+ return FALSE;
+
+ g_assert (gst_query_get_n_allocation_pools (query) > 0);
+ gst_query_parse_nth_allocation_pool (query, 0, &pool, NULL, NULL, NULL);
+ g_assert (pool != NULL);
+
+ config = gst_buffer_pool_get_config (pool);
+ if (gst_query_find_allocation_meta (query, GST_VIDEO_META_API_TYPE, NULL)) {
+ gst_buffer_pool_config_add_option (config,
+ GST_BUFFER_POOL_OPTION_VIDEO_META);
+ dec->have_video_meta = TRUE;
+ }
+ gst_buffer_pool_set_config (pool, config);
+ gst_object_unref (pool);
+
+ return TRUE;
+ }
+
+ static void
+ gst_vpx_dec_set_stream_info (GstVPXDec * dec,
+ vpx_codec_stream_info_t * stream_info)
+ {
+ GstVPXDecClass *vpxclass = GST_VPX_DEC_GET_CLASS (dec);
+ if (vpxclass->set_stream_info != NULL) {
+ vpxclass->set_stream_info (dec, stream_info);
+ }
+ }
+
+ static void
+ gst_vpx_dec_set_default_format (GstVPXDec * dec, GstVideoFormat fmt, int width,
+ int height)
+ {
+ GstVPXDecClass *vpxclass = GST_VPX_DEC_GET_CLASS (dec);
+ if (vpxclass->set_default_format != NULL) {
+ vpxclass->set_default_format (dec, fmt, width, height);
+ }
+ }
+
+ static gboolean
+ gst_vpx_dec_default_frame_format (GstVPXDec * dec, vpx_image_t * img,
+ GstVideoFormat * fmt)
+ {
+ if (img->fmt == VPX_IMG_FMT_I420) {
+ *fmt = GST_VIDEO_FORMAT_I420;
+ return TRUE;
+ } else {
+ return FALSE;
+ }
+
+ }
+
+ static void
+ gst_vpx_dec_handle_resolution_change (GstVPXDec * dec, vpx_image_t * img,
+ GstVideoFormat fmt)
+ {
+ GstVPXDecClass *vpxclass = GST_VPX_DEC_GET_CLASS (dec);
+ if (vpxclass->handle_resolution_change != NULL) {
+ vpxclass->handle_resolution_change (dec, img, fmt);
+ }
+ }
+
+ #endif /* HAVE_VP8_DECODER || HAVE_VP9_DECODER */
--- /dev/null
-
+ /* GStreamer AAC parser plugin
+ * Copyright (C) 2008 Nokia Corporation. All rights reserved.
+ *
+ * Contact: Stefan Kost <stefan.kost@nokia.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+ /**
+ * SECTION:element-aacparse
+ * @title: aacparse
+ * @short_description: AAC parser
+ * @see_also: #GstAmrParse
+ *
+ * This is an AAC parser which handles both ADIF and ADTS stream formats.
+ *
+ * As ADIF format is not framed, it is not seekable and stream duration cannot
+ * be determined either. However, ADTS format AAC clips can be seeked, and parser
+ * can also estimate playback position and clip duration.
+ *
+ * ## Example launch line
+ * |[
+ * gst-launch-1.0 filesrc location=abc.aac ! aacparse ! faad ! audioresample ! audioconvert ! alsasink
+ * ]|
+ *
+ */
+
+ #ifdef HAVE_CONFIG_H
+ #include "config.h"
+ #endif
+
+ #include <string.h>
+
+ #include <gst/base/gstbitreader.h>
+ #include <gst/pbutils/pbutils.h>
+ #include "gstaudioparserselements.h"
+ #include "gstaacparse.h"
+
+
+ static GstStaticPadTemplate src_template = GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("audio/mpeg, "
+ "framed = (boolean) true, " "mpegversion = (int) { 2, 4 }, "
+ "stream-format = (string) { raw, adts, adif, loas };"));
+
+ static GstStaticPadTemplate sink_template = GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("audio/mpeg, mpegversion = (int) { 2, 4 };"));
+
+ GST_DEBUG_CATEGORY_STATIC (aacparse_debug);
+ #define GST_CAT_DEFAULT aacparse_debug
+
+
+ #define ADIF_MAX_SIZE 40 /* Should be enough */
+ #define ADTS_MAX_SIZE 10 /* Should be enough */
+ #define LOAS_MAX_SIZE 3 /* Should be enough */
+ #define RAW_MAX_SIZE 1 /* Correct framing is required */
+
+ #define ADTS_HEADERS_LENGTH 7UL /* Total byte-length of fixed and variable
+ headers prepended during raw to ADTS
+ conversion */
+
++#ifdef TIZEN_FEATURE_AACPARSE_MODIFICATION /* to get more accurate duration */
++#define AAC_MAX_ESTIMATE_DURATION_BUF (1024 * 1024) /* use first 1 Mbyte */
++#define AAC_SAMPLE_PER_FRAME 1024
++
++#define AAC_MAX_PULL_RANGE_BUF (1 * 1024 * 1024) /* 1 MByte */
++#define AAC_LARGE_FILE_SIZE (2 * 1024 * 1024) /* 2 MByte */
++#define gst_aac_parse_parent_class parent_class
++#endif
++
+ #define AAC_FRAME_DURATION(parse) (GST_SECOND/parse->frames_per_sec)
+
+ static const gint loas_sample_rate_table[16] = {
+ 96000, 88200, 64000, 48000, 44100, 32000, 24000, 22050,
+ 16000, 12000, 11025, 8000, 7350, 0, 0, 0
+ };
+
+ static const gint loas_channels_table[16] = {
+ 0, 1, 2, 3, 4, 5, 6, 8,
+ 0, 0, 0, 7, 8, 0, 8, 0
+ };
+
+ static gboolean gst_aac_parse_start (GstBaseParse * parse);
+ static gboolean gst_aac_parse_stop (GstBaseParse * parse);
+
+ static gboolean gst_aac_parse_sink_setcaps (GstBaseParse * parse,
+ GstCaps * caps);
+ static GstCaps *gst_aac_parse_sink_getcaps (GstBaseParse * parse,
+ GstCaps * filter);
+
+ static GstFlowReturn gst_aac_parse_handle_frame (GstBaseParse * parse,
+ GstBaseParseFrame * frame, gint * skipsize);
+ static GstFlowReturn gst_aac_parse_pre_push_frame (GstBaseParse * parse,
+ GstBaseParseFrame * frame);
+ static gboolean gst_aac_parse_src_event (GstBaseParse * parse,
+ GstEvent * event);
+
+ static gboolean gst_aac_parse_read_audio_specific_config (GstAacParse *
+ aacparse, GstBitReader * br, gint * object_type, gint * sample_rate,
+ gint * channels, gint * frame_samples);
+
++#ifdef TIZEN_FEATURE_AACPARSE_MODIFICATION
++static guint gst_aac_parse_adts_get_fast_frame_len (const guint8 * data);
++/* make full aac(adts) index table when seek */
++static gboolean gst_aac_parse_adts_src_eventfunc (GstBaseParse * parse,
++ GstEvent * event);
++int get_aac_parse_get_adts_frame_length (const unsigned char *data,
++ gint64 offset);
++static gboolean gst_aac_parse_estimate_duration (GstBaseParse * parse);
++static void gst_aac_parse_check_byte_seekability (GstBaseParse * parse);
++#endif
+
+ #define gst_aac_parse_parent_class parent_class
+ G_DEFINE_TYPE (GstAacParse, gst_aac_parse, GST_TYPE_BASE_PARSE);
+ GST_ELEMENT_REGISTER_DEFINE (aacparse, "aacparse",
+ GST_RANK_PRIMARY + 1, GST_TYPE_AAC_PARSE);
+
++#ifdef TIZEN_FEATURE_AACPARSE_MODIFICATION
++static inline gint
++gst_aac_parse_get_sample_rate_from_index (guint sr_idx)
++{
++ static const guint aac_sample_rates[] = { 96000, 88200, 64000, 48000, 44100,
++ 32000, 24000, 22050, 16000, 12000, 11025, 8000
++ };
++
++ if (sr_idx < G_N_ELEMENTS (aac_sample_rates))
++ return aac_sample_rates[sr_idx];
++ GST_WARNING ("Invalid sample rate index %u", sr_idx);
++ return 0;
++}
++#endif
+ /**
+ * gst_aac_parse_class_init:
+ * @klass: #GstAacParseClass.
+ *
+ */
+ static void
+ gst_aac_parse_class_init (GstAacParseClass * klass)
+ {
+ GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
+ GstBaseParseClass *parse_class = GST_BASE_PARSE_CLASS (klass);
+
+ GST_DEBUG_CATEGORY_INIT (aacparse_debug, "aacparse", 0,
+ "AAC audio stream parser");
+
+ gst_element_class_add_static_pad_template (element_class, &sink_template);
+ gst_element_class_add_static_pad_template (element_class, &src_template);
+
+ gst_element_class_set_static_metadata (element_class,
+ "AAC audio stream parser", "Codec/Parser/Audio",
+ "Advanced Audio Coding parser", "Stefan Kost <stefan.kost@nokia.com>");
+
+ parse_class->start = GST_DEBUG_FUNCPTR (gst_aac_parse_start);
+ parse_class->stop = GST_DEBUG_FUNCPTR (gst_aac_parse_stop);
+ parse_class->set_sink_caps = GST_DEBUG_FUNCPTR (gst_aac_parse_sink_setcaps);
+ parse_class->get_sink_caps = GST_DEBUG_FUNCPTR (gst_aac_parse_sink_getcaps);
+ parse_class->handle_frame = GST_DEBUG_FUNCPTR (gst_aac_parse_handle_frame);
+ parse_class->pre_push_frame =
+ GST_DEBUG_FUNCPTR (gst_aac_parse_pre_push_frame);
+ parse_class->src_event = GST_DEBUG_FUNCPTR (gst_aac_parse_src_event);
+ }
+
+
+ /**
+ * gst_aac_parse_init:
+ * @aacparse: #GstAacParse.
+ * @klass: #GstAacParseClass.
+ *
+ */
+ static void
+ gst_aac_parse_init (GstAacParse * aacparse)
+ {
+ GST_DEBUG ("initialized");
+ GST_PAD_SET_ACCEPT_INTERSECT (GST_BASE_PARSE_SINK_PAD (aacparse));
+ GST_PAD_SET_ACCEPT_TEMPLATE (GST_BASE_PARSE_SINK_PAD (aacparse));
+
+ aacparse->last_parsed_sample_rate = 0;
+ aacparse->last_parsed_channels = 0;
++#ifdef TIZEN_FEATURE_AACPARSE_MODIFICATION
++ /* to get more correct duration */
++ aacparse->first_frame = TRUE;
++#endif
+ }
+
+
+ /**
+ * gst_aac_parse_set_src_caps:
+ * @aacparse: #GstAacParse.
+ * @sink_caps: (proposed) caps of sink pad
+ *
+ * Set source pad caps according to current knowledge about the
+ * audio stream.
+ *
+ * Returns: TRUE if caps were successfully set.
+ */
+ static gboolean
+ gst_aac_parse_set_src_caps (GstAacParse * aacparse, GstCaps * sink_caps)
+ {
+ GstStructure *s;
+ GstCaps *src_caps = NULL, *peercaps;
+ gboolean res = FALSE;
+ const gchar *stream_format;
+ guint8 codec_data[2];
+ guint16 codec_data_data;
+ gint sample_rate_idx;
+
+ GST_DEBUG_OBJECT (aacparse, "sink caps: %" GST_PTR_FORMAT, sink_caps);
+ if (sink_caps)
+ src_caps = gst_caps_copy (sink_caps);
+ else
+ src_caps = gst_caps_new_empty_simple ("audio/mpeg");
+
+ gst_caps_set_simple (src_caps, "framed", G_TYPE_BOOLEAN, TRUE,
+ "mpegversion", G_TYPE_INT, aacparse->mpegversion, NULL);
+
+ aacparse->output_header_type = aacparse->header_type;
+ switch (aacparse->header_type) {
+ case DSPAAC_HEADER_NONE:
+ stream_format = "raw";
+ break;
+ case DSPAAC_HEADER_ADTS:
+ stream_format = "adts";
+ break;
+ case DSPAAC_HEADER_ADIF:
+ stream_format = "adif";
+ break;
+ case DSPAAC_HEADER_LOAS:
+ stream_format = "loas";
+ break;
+ default:
+ stream_format = NULL;
+ }
+
+ /* Generate codec data to be able to set profile/level on the caps */
+ sample_rate_idx =
+ gst_codec_utils_aac_get_index_from_sample_rate (aacparse->sample_rate);
+ if (sample_rate_idx < 0)
+ goto not_a_known_rate;
+ codec_data_data =
+ (aacparse->object_type << 11) |
+ (sample_rate_idx << 7) | (aacparse->channels << 3);
+ GST_WRITE_UINT16_BE (codec_data, codec_data_data);
+ gst_codec_utils_aac_caps_set_level_and_profile (src_caps, codec_data, 2);
+
+ s = gst_caps_get_structure (src_caps, 0);
+ if (aacparse->sample_rate > 0)
+ gst_structure_set (s, "rate", G_TYPE_INT, aacparse->sample_rate, NULL);
+ if (aacparse->channels > 0)
+ gst_structure_set (s, "channels", G_TYPE_INT, aacparse->channels, NULL);
+ if (stream_format)
+ gst_structure_set (s, "stream-format", G_TYPE_STRING, stream_format, NULL);
+
++#ifdef TIZEN_FEATURE_AACPARSE_MODIFICATION
++ if (!gst_structure_get_value (s, "codec_data")) {
++ GstBuffer *codec_data_buffer;
++ GST_WARNING("Insert codec_data to src_caps");
++ /* The codec_data data is according to AudioSpecificConfig,
++ ISO/IEC 14496-3, 1.6.2.1 */
++ codec_data_buffer = gst_buffer_new_and_alloc (2);
++ gst_buffer_fill (codec_data_buffer, 0, codec_data, 2);
++ gst_caps_set_simple (src_caps, "codec_data", GST_TYPE_BUFFER, codec_data_buffer, NULL);
++ gst_buffer_unref (codec_data_buffer);
++ }
++#endif
++
+ peercaps = gst_pad_peer_query_caps (GST_BASE_PARSE_SRC_PAD (aacparse), NULL);
+ if (peercaps && !gst_caps_can_intersect (src_caps, peercaps)) {
+ GST_DEBUG_OBJECT (GST_BASE_PARSE (aacparse)->srcpad,
+ "Caps can not intersect");
+ if (aacparse->header_type == DSPAAC_HEADER_ADTS) {
+ GST_DEBUG_OBJECT (GST_BASE_PARSE (aacparse)->srcpad,
+ "Input is ADTS, trying raw");
+ gst_caps_set_simple (src_caps, "stream-format", G_TYPE_STRING, "raw",
+ NULL);
+ if (gst_caps_can_intersect (src_caps, peercaps)) {
+ GstBuffer *codec_data_buffer;
+
+ GST_DEBUG_OBJECT (GST_BASE_PARSE (aacparse)->srcpad,
+ "Caps can intersect, we will drop the ADTS layer");
+ aacparse->output_header_type = DSPAAC_HEADER_NONE;
+
+ /* The codec_data data is according to AudioSpecificConfig,
+ ISO/IEC 14496-3, 1.6.2.1 */
+ codec_data_buffer = gst_buffer_new_and_alloc (2);
+ gst_buffer_fill (codec_data_buffer, 0, codec_data, 2);
+ gst_caps_set_simple (src_caps, "codec_data", GST_TYPE_BUFFER,
+ codec_data_buffer, NULL);
+ gst_buffer_unref (codec_data_buffer);
+ }
+ } else if (aacparse->header_type == DSPAAC_HEADER_NONE) {
+ GST_DEBUG_OBJECT (GST_BASE_PARSE (aacparse)->srcpad,
+ "Input is raw, trying ADTS");
+ gst_caps_set_simple (src_caps, "stream-format", G_TYPE_STRING, "adts",
+ NULL);
+ if (gst_caps_can_intersect (src_caps, peercaps)) {
+ GST_DEBUG_OBJECT (GST_BASE_PARSE (aacparse)->srcpad,
+ "Caps can intersect, we will prepend ADTS headers");
+ aacparse->output_header_type = DSPAAC_HEADER_ADTS;
+ }
+ }
+ }
+ if (peercaps)
+ gst_caps_unref (peercaps);
+
+ aacparse->last_parsed_channels = 0;
+ aacparse->last_parsed_sample_rate = 0;
+
+ GST_DEBUG_OBJECT (aacparse, "setting src caps: %" GST_PTR_FORMAT, src_caps);
+
+ res = gst_pad_set_caps (GST_BASE_PARSE (aacparse)->srcpad, src_caps);
+ gst_caps_unref (src_caps);
+ return res;
+
+ not_a_known_rate:
+ GST_ERROR_OBJECT (aacparse, "Not a known sample rate: %d",
+ aacparse->sample_rate);
+ gst_caps_unref (src_caps);
+ return FALSE;
+ }
+
+
+ /**
+ * gst_aac_parse_sink_setcaps:
+ * @sinkpad: GstPad
+ * @caps: GstCaps
+ *
+ * Implementation of "set_sink_caps" vmethod in #GstBaseParse class.
+ *
+ * Returns: TRUE on success.
+ */
+ static gboolean
+ gst_aac_parse_sink_setcaps (GstBaseParse * parse, GstCaps * caps)
+ {
+ GstAacParse *aacparse;
+ GstStructure *structure;
+ gchar *caps_str;
+ const GValue *value;
+
+ aacparse = GST_AAC_PARSE (parse);
+ structure = gst_caps_get_structure (caps, 0);
+ caps_str = gst_caps_to_string (caps);
+
+ GST_DEBUG_OBJECT (aacparse, "setcaps: %s", caps_str);
+ g_free (caps_str);
+
+ /* This is needed at least in case of RTP
+ * Parses the codec_data information to get ObjectType,
+ * number of channels and samplerate */
+ value = gst_structure_get_value (structure, "codec_data");
+ if (value) {
+ GstBuffer *buf = gst_value_get_buffer (value);
+
+ if (buf && gst_buffer_get_size (buf) >= 2) {
+ GstMapInfo map;
+ GstBitReader br;
+
+ if (!gst_buffer_map (buf, &map, GST_MAP_READ))
+ return FALSE;
+ gst_bit_reader_init (&br, map.data, map.size);
+ gst_aac_parse_read_audio_specific_config (aacparse, &br,
+ &aacparse->object_type, &aacparse->sample_rate, &aacparse->channels,
+ &aacparse->frame_samples);
+
+ aacparse->header_type = DSPAAC_HEADER_NONE;
+ aacparse->mpegversion = 4;
+ gst_buffer_unmap (buf, &map);
+
+ GST_DEBUG ("codec_data: object_type=%d, sample_rate=%d, channels=%d, "
+ "samples=%d", aacparse->object_type, aacparse->sample_rate,
+ aacparse->channels, aacparse->frame_samples);
+
+ /* arrange for metadata and get out of the way */
+ gst_aac_parse_set_src_caps (aacparse, caps);
+ if (aacparse->header_type == aacparse->output_header_type)
+ gst_base_parse_set_passthrough (parse, TRUE);
+
+ /* input is already correctly framed */
+ gst_base_parse_set_min_frame_size (parse, RAW_MAX_SIZE);
+ } else {
+ return FALSE;
+ }
+
+ /* caps info overrides */
+ gst_structure_get_int (structure, "rate", &aacparse->sample_rate);
+ gst_structure_get_int (structure, "channels", &aacparse->channels);
+ } else {
+ const gchar *stream_format =
+ gst_structure_get_string (structure, "stream-format");
+
+ if (g_strcmp0 (stream_format, "raw") == 0) {
+ GST_ERROR_OBJECT (parse, "Need codec_data for raw AAC");
+ return FALSE;
+ } else {
+ aacparse->sample_rate = 0;
+ aacparse->channels = 0;
+ aacparse->header_type = DSPAAC_HEADER_NOT_PARSED;
+ gst_base_parse_set_passthrough (parse, FALSE);
+ }
+ }
+ return TRUE;
+ }
+
+
+ /**
+ * gst_aac_parse_adts_get_frame_len:
+ * @data: block of data containing an ADTS header.
+ *
+ * This function calculates ADTS frame length from the given header.
+ *
+ * Returns: size of the ADTS frame.
+ */
+ static inline guint
+ gst_aac_parse_adts_get_frame_len (const guint8 * data)
+ {
+ return ((data[3] & 0x03) << 11) | (data[4] << 3) | ((data[5] & 0xe0) >> 5);
+ }
+
+
+ /**
+ * gst_aac_parse_check_adts_frame:
+ * @aacparse: #GstAacParse.
+ * @data: Data to be checked.
+ * @avail: Amount of data passed.
+ * @framesize: If valid ADTS frame was found, this will be set to tell the
+ * found frame size in bytes.
+ * @needed_data: If frame was not found, this may be set to tell how much
+ * more data is needed in the next round to detect the frame
+ * reliably. This may happen when a frame header candidate
+ * is found but it cannot be guaranteed to be the header without
+ * peeking the following data.
+ *
+ * Check if the given data contains contains ADTS frame. The algorithm
+ * will examine ADTS frame header and calculate the frame size. Also, another
+ * consecutive ADTS frame header need to be present after the found frame.
+ * Otherwise the data is not considered as a valid ADTS frame. However, this
+ * "extra check" is omitted when EOS has been received. In this case it is
+ * enough when data[0] contains a valid ADTS header.
+ *
+ * This function may set the #needed_data to indicate that a possible frame
+ * candidate has been found, but more data (#needed_data bytes) is needed to
+ * be absolutely sure. When this situation occurs, FALSE will be returned.
+ *
+ * When a valid frame is detected, this function will use
+ * gst_base_parse_set_min_frame_size() function from #GstBaseParse class
+ * to set the needed bytes for next frame.This way next data chunk is already
+ * of correct size.
+ *
+ * Returns: TRUE if the given data contains a valid ADTS header.
+ */
+ static gboolean
+ gst_aac_parse_check_adts_frame (GstAacParse * aacparse,
+ const guint8 * data, const guint avail, gboolean drain,
+ guint * framesize, guint * needed_data)
+ {
+ guint crc_size;
+
+ *needed_data = 0;
+
+ /* Absolute minimum to perform the ADTS syncword,
+ layer and sampling frequency tests */
+ if (G_UNLIKELY (avail < 3)) {
+ *needed_data = 3;
+ return FALSE;
+ }
+
+ /* Syncword and layer tests */
+ if ((data[0] == 0xff) && ((data[1] & 0xf6) == 0xf0)) {
+
+ /* Sampling frequency test */
+ if (G_UNLIKELY ((data[2] & 0x3C) >> 2 == 15))
+ return FALSE;
+
+ /* This looks like an ADTS frame header but
+ we need at least 6 bytes to proceed */
+ if (G_UNLIKELY (avail < 6)) {
+ *needed_data = 6;
+ return FALSE;
+ }
+
+ *framesize = gst_aac_parse_adts_get_frame_len (data);
+
+ /* If frame has CRC, it needs 2 bytes
+ for it at the end of the header */
+ crc_size = (data[1] & 0x01) ? 0 : 2;
+
+ /* CRC size test */
+ if (*framesize < 7 + crc_size) {
+ *needed_data = 7 + crc_size;
+ return FALSE;
+ }
+
+ /* In EOS mode this is enough. No need to examine the data further.
+ We also relax the check when we have sync, on the assumption that
+ if we're not looking at random data, we have a much higher chance
+ to get the correct sync, and this avoids losing two frames when
+ a single bit corruption happens. */
+ if (drain || !GST_BASE_PARSE_LOST_SYNC (aacparse)) {
+ return TRUE;
+ }
+
+ if (*framesize + ADTS_MAX_SIZE > avail) {
+ /* We have found a possible frame header candidate, but can't be
+ sure since we don't have enough data to check the next frame */
+ GST_DEBUG ("NEED MORE DATA: we need %d, available %d",
+ *framesize + ADTS_MAX_SIZE, avail);
+ *needed_data = *framesize + ADTS_MAX_SIZE;
+ gst_base_parse_set_min_frame_size (GST_BASE_PARSE (aacparse),
+ *framesize + ADTS_MAX_SIZE);
+ return FALSE;
+ }
+
+ if ((data[*framesize] == 0xff) && ((data[*framesize + 1] & 0xf6) == 0xf0)) {
+ guint nextlen = gst_aac_parse_adts_get_frame_len (data + (*framesize));
+
+ GST_LOG ("ADTS frame found, len: %d bytes", *framesize);
+ gst_base_parse_set_min_frame_size (GST_BASE_PARSE (aacparse),
+ nextlen + ADTS_MAX_SIZE);
+ return TRUE;
+ }
+ }
+ return FALSE;
+ }
+
+ static gboolean
+ gst_aac_parse_latm_get_value (GstAacParse * aacparse, GstBitReader * br,
+ guint32 * value)
+ {
+ guint8 bytes, i, byte;
+
+ *value = 0;
+ if (!gst_bit_reader_get_bits_uint8 (br, &bytes, 2))
+ return FALSE;
+ for (i = 0; i <= bytes; ++i) {
+ *value <<= 8;
+ if (!gst_bit_reader_get_bits_uint8 (br, &byte, 8))
+ return FALSE;
+ *value += byte;
+ }
+ return TRUE;
+ }
+
+ static gboolean
+ gst_aac_parse_get_audio_object_type (GstAacParse * aacparse, GstBitReader * br,
+ guint8 * audio_object_type)
+ {
+ if (!gst_bit_reader_get_bits_uint8 (br, audio_object_type, 5))
+ return FALSE;
+ if (*audio_object_type == 31) {
+ if (!gst_bit_reader_get_bits_uint8 (br, audio_object_type, 6))
+ return FALSE;
+ *audio_object_type += 32;
+ }
+ GST_LOG_OBJECT (aacparse, "audio object type %u", *audio_object_type);
+ return TRUE;
+ }
+
+ static gboolean
+ gst_aac_parse_get_audio_sample_rate (GstAacParse * aacparse, GstBitReader * br,
+ gint * sample_rate)
+ {
+ guint8 sampling_frequency_index;
+ if (!gst_bit_reader_get_bits_uint8 (br, &sampling_frequency_index, 4))
+ return FALSE;
+ GST_LOG_OBJECT (aacparse, "sampling_frequency_index: %u",
+ sampling_frequency_index);
+ if (sampling_frequency_index == 0xf) {
+ guint32 sampling_rate;
+ if (!gst_bit_reader_get_bits_uint32 (br, &sampling_rate, 24))
+ return FALSE;
+ *sample_rate = sampling_rate;
+ } else {
+ *sample_rate = loas_sample_rate_table[sampling_frequency_index];
+ if (!*sample_rate)
+ return FALSE;
+ }
+ aacparse->last_parsed_sample_rate = *sample_rate;
+ return TRUE;
+ }
+
+ /* See table 1.13 in ISO/IEC 14496-3 */
+ static gboolean
+ gst_aac_parse_read_audio_specific_config (GstAacParse * aacparse,
+ GstBitReader * br, gint * object_type, gint * sample_rate, gint * channels,
+ gint * frame_samples)
+ {
+ guint8 audio_object_type;
+ guint8 G_GNUC_UNUSED extension_audio_object_type;
+ guint8 channel_configuration, extension_channel_configuration;
+ gboolean G_GNUC_UNUSED sbr = FALSE, ps = FALSE;
+
+ if (!gst_aac_parse_get_audio_object_type (aacparse, br, &audio_object_type))
+ return FALSE;
+ if (object_type)
+ *object_type = audio_object_type;
+
+ if (!gst_aac_parse_get_audio_sample_rate (aacparse, br, sample_rate))
+ return FALSE;
+
+ if (!gst_bit_reader_get_bits_uint8 (br, &channel_configuration, 4))
+ return FALSE;
+ *channels = loas_channels_table[channel_configuration];
+ GST_LOG_OBJECT (aacparse, "channel_configuration: %d", channel_configuration);
+ if (!*channels)
+ return FALSE;
+
+ if (audio_object_type == 5 || audio_object_type == 29) {
+ extension_audio_object_type = 5;
+ sbr = TRUE;
+ if (audio_object_type == 29) {
+ ps = TRUE;
+ /* Parametric stereo. If we have a one-channel configuration, we can
+ * override it to stereo */
+ if (*channels == 1)
+ *channels = 2;
+ }
+
+ GST_LOG_OBJECT (aacparse,
+ "Audio object type 5 or 29, so rereading sampling rate (was %d)...",
+ *sample_rate);
+ if (!gst_aac_parse_get_audio_sample_rate (aacparse, br, sample_rate))
+ return FALSE;
+
+ if (!gst_aac_parse_get_audio_object_type (aacparse, br, &audio_object_type))
+ return FALSE;
+
+ if (audio_object_type == 22) {
+ /* extension channel configuration */
+ if (!gst_bit_reader_get_bits_uint8 (br, &extension_channel_configuration,
+ 4))
+ return FALSE;
+ GST_LOG_OBJECT (aacparse, "extension channel_configuration: %d",
+ extension_channel_configuration);
+ *channels = loas_channels_table[extension_channel_configuration];
+ if (!*channels)
+ return FALSE;
+ }
+ } else {
+ extension_audio_object_type = 0;
+ }
+
+ GST_INFO_OBJECT (aacparse, "Parsed AudioSpecificConfig: %d Hz, %d channels",
+ *sample_rate, *channels);
+
+ if (frame_samples && audio_object_type == 23) {
+ guint8 frame_flag;
+ /* Read the Decoder Configuration (GASpecificConfig) if present */
+ /* We only care about the first bit to know what the number of samples
+ * in a frame is */
+ if (!gst_bit_reader_get_bits_uint8 (br, &frame_flag, 1))
+ return FALSE;
+ *frame_samples = frame_flag ? 960 : 1024;
+ }
+
+ /* There's LOTS of stuff next, but we ignore it for now as we have
+ what we want (sample rate and number of channels */
+ GST_DEBUG_OBJECT (aacparse,
+ "Need more code to parse humongous LOAS data, currently ignored");
+ aacparse->last_parsed_channels = *channels;
+ return TRUE;
+ }
+
+
+ static gboolean
+ gst_aac_parse_read_loas_config (GstAacParse * aacparse, const guint8 * data,
+ guint avail, gint * sample_rate, gint * channels, gint * version)
+ {
+ GstBitReader br;
+ guint8 u8, v, vA;
+
+ /* No version in the bitstream, but the spec has LOAS in the MPEG-4 section */
+ if (version)
+ *version = 4;
+
+ gst_bit_reader_init (&br, data, avail);
+
+ /* skip sync word (11 bits) and size (13 bits) */
+ if (!gst_bit_reader_skip (&br, 11 + 13))
+ return FALSE;
+
+ /* First bit is "use last config" */
+ if (!gst_bit_reader_get_bits_uint8 (&br, &u8, 1))
+ return FALSE;
+ if (u8) {
+ GST_LOG_OBJECT (aacparse, "Frame uses previous config");
+ if (!aacparse->last_parsed_sample_rate || !aacparse->last_parsed_channels) {
+ GST_DEBUG_OBJECT (aacparse,
+ "No previous config to use. We'll look for more data.");
+ return FALSE;
+ }
+ *sample_rate = aacparse->last_parsed_sample_rate;
+ *channels = aacparse->last_parsed_channels;
+ return TRUE;
+ }
+
+ GST_DEBUG_OBJECT (aacparse, "Frame contains new config");
+
+ /* audioMuxVersion */
+ if (!gst_bit_reader_get_bits_uint8 (&br, &v, 1))
+ return FALSE;
+ if (v) {
+ /* audioMuxVersionA */
+ if (!gst_bit_reader_get_bits_uint8 (&br, &vA, 1))
+ return FALSE;
+ } else
+ vA = 0;
+
+ GST_LOG_OBJECT (aacparse, "v %d, vA %d", v, vA);
+ if (vA == 0) {
+ guint8 same_time, subframes, num_program, prog;
+ if (v == 1) {
+ guint32 value;
+ /* taraBufferFullness */
+ if (!gst_aac_parse_latm_get_value (aacparse, &br, &value))
+ return FALSE;
+ }
+ if (!gst_bit_reader_get_bits_uint8 (&br, &same_time, 1))
+ return FALSE;
+ if (!gst_bit_reader_get_bits_uint8 (&br, &subframes, 6))
+ return FALSE;
+ if (!gst_bit_reader_get_bits_uint8 (&br, &num_program, 4))
+ return FALSE;
+ GST_LOG_OBJECT (aacparse, "same_time %d, subframes %d, num_program %d",
+ same_time, subframes, num_program);
+
+ for (prog = 0; prog <= num_program; ++prog) {
+ guint8 num_layer, layer;
+ if (!gst_bit_reader_get_bits_uint8 (&br, &num_layer, 3))
+ return FALSE;
+ GST_LOG_OBJECT (aacparse, "Program %d: %d layers", prog, num_layer);
+
+ for (layer = 0; layer <= num_layer; ++layer) {
+ guint8 use_same_config;
+ if (prog == 0 && layer == 0) {
+ use_same_config = 0;
+ } else {
+ if (!gst_bit_reader_get_bits_uint8 (&br, &use_same_config, 1))
+ return FALSE;
+ }
+ if (!use_same_config) {
+ if (v == 0) {
+ if (!gst_aac_parse_read_audio_specific_config (aacparse, &br, NULL,
+ sample_rate, channels, NULL))
+ return FALSE;
+ } else {
+ guint32 asc_len;
+ if (!gst_aac_parse_latm_get_value (aacparse, &br, &asc_len))
+ return FALSE;
+ if (!gst_aac_parse_read_audio_specific_config (aacparse, &br, NULL,
+ sample_rate, channels, NULL))
+ return FALSE;
+ if (!gst_bit_reader_skip (&br, asc_len))
+ return FALSE;
+ }
+ }
+ }
+ }
+ GST_LOG_OBJECT (aacparse, "More data ignored");
+ } else {
+ GST_WARNING_OBJECT (aacparse, "Spec says \"TBD\"...");
+ return FALSE;
+ }
+ return TRUE;
+ }
+
+ /**
+ * gst_aac_parse_loas_get_frame_len:
+ * @data: block of data containing a LOAS header.
+ *
+ * This function calculates LOAS frame length from the given header.
+ *
+ * Returns: size of the LOAS frame.
+ */
+ static inline guint
+ gst_aac_parse_loas_get_frame_len (const guint8 * data)
+ {
+ return (((data[1] & 0x1f) << 8) | data[2]) + 3;
+ }
+
+
+ /**
+ * gst_aac_parse_check_loas_frame:
+ * @aacparse: #GstAacParse.
+ * @data: Data to be checked.
+ * @avail: Amount of data passed.
+ * @framesize: If valid LOAS frame was found, this will be set to tell the
+ * found frame size in bytes.
+ * @needed_data: If frame was not found, this may be set to tell how much
+ * more data is needed in the next round to detect the frame
+ * reliably. This may happen when a frame header candidate
+ * is found but it cannot be guaranteed to be the header without
+ * peeking the following data.
+ *
+ * Check if the given data contains contains LOAS frame. The algorithm
+ * will examine LOAS frame header and calculate the frame size. Also, another
+ * consecutive LOAS frame header need to be present after the found frame.
+ * Otherwise the data is not considered as a valid LOAS frame. However, this
+ * "extra check" is omitted when EOS has been received. In this case it is
+ * enough when data[0] contains a valid LOAS header.
+ *
+ * This function may set the #needed_data to indicate that a possible frame
+ * candidate has been found, but more data (#needed_data bytes) is needed to
+ * be absolutely sure. When this situation occurs, FALSE will be returned.
+ *
+ * When a valid frame is detected, this function will use
+ * gst_base_parse_set_min_frame_size() function from #GstBaseParse class
+ * to set the needed bytes for next frame.This way next data chunk is already
+ * of correct size.
+ *
+ * LOAS can have three different formats, if I read the spec correctly. Only
+ * one of them is supported here, as the two samples I have use this one.
+ *
+ * Returns: TRUE if the given data contains a valid LOAS header.
+ */
+ static gboolean
+ gst_aac_parse_check_loas_frame (GstAacParse * aacparse,
+ const guint8 * data, const guint avail, gboolean drain,
+ guint * framesize, guint * needed_data)
+ {
+ *needed_data = 0;
+
+ /* 3 byte header */
+ if (G_UNLIKELY (avail < 3)) {
+ *needed_data = 3;
+ return FALSE;
+ }
+
+ if ((data[0] == 0x56) && ((data[1] & 0xe0) == 0xe0)) {
+ *framesize = gst_aac_parse_loas_get_frame_len (data);
+ GST_DEBUG_OBJECT (aacparse, "Found possible %u byte LOAS frame",
+ *framesize);
+
+ /* In EOS mode this is enough. No need to examine the data further.
+ We also relax the check when we have sync, on the assumption that
+ if we're not looking at random data, we have a much higher chance
+ to get the correct sync, and this avoids losing two frames when
+ a single bit corruption happens. */
+ if (drain || !GST_BASE_PARSE_LOST_SYNC (aacparse)) {
+ return TRUE;
+ }
+
+ if (*framesize + LOAS_MAX_SIZE > avail) {
+ /* We have found a possible frame header candidate, but can't be
+ sure since we don't have enough data to check the next frame */
+ GST_DEBUG ("NEED MORE DATA: we need %d, available %d",
+ *framesize + LOAS_MAX_SIZE, avail);
+ *needed_data = *framesize + LOAS_MAX_SIZE;
+ gst_base_parse_set_min_frame_size (GST_BASE_PARSE (aacparse),
+ *framesize + LOAS_MAX_SIZE);
+ return FALSE;
+ }
+
+ if ((data[*framesize] == 0x56) && ((data[*framesize + 1] & 0xe0) == 0xe0)) {
+ guint nextlen = gst_aac_parse_loas_get_frame_len (data + (*framesize));
+
+ GST_LOG ("LOAS frame found, len: %d bytes", *framesize);
+ gst_base_parse_set_min_frame_size (GST_BASE_PARSE (aacparse),
+ nextlen + LOAS_MAX_SIZE);
+ return TRUE;
+ } else {
+ GST_DEBUG_OBJECT (aacparse, "That was a false positive");
+ }
+ }
+ return FALSE;
+ }
+
+ /* caller ensure sufficient data */
+ static inline void
+ gst_aac_parse_parse_adts_header (GstAacParse * aacparse, const guint8 * data,
+ gint * rate, gint * channels, gint * object, gint * version)
+ {
+
+ if (rate) {
+ gint sr_idx = (data[2] & 0x3c) >> 2;
+
+ *rate = gst_codec_utils_aac_get_sample_rate_from_index (sr_idx);
+ }
+ if (channels) {
+ *channels = ((data[2] & 0x01) << 2) | ((data[3] & 0xc0) >> 6);
+ if (*channels == 7)
+ *channels = 8;
+ }
+
+ if (version)
+ *version = (data[1] & 0x08) ? 2 : 4;
+ if (object)
+ *object = ((data[2] & 0xc0) >> 6) + 1;
+ }
+
+ /**
+ * gst_aac_parse_detect_stream:
+ * @aacparse: #GstAacParse.
+ * @data: A block of data that needs to be examined for stream characteristics.
+ * @avail: Size of the given datablock.
+ * @framesize: If valid stream was found, this will be set to tell the
+ * first frame size in bytes.
+ * @skipsize: If valid stream was found, this will be set to tell the first
+ * audio frame position within the given data.
+ *
+ * Examines the given piece of data and try to detect the format of it. It
+ * checks for "ADIF" header (in the beginning of the clip) and ADTS frame
+ * header. If the stream is detected, TRUE will be returned and #framesize
+ * is set to indicate the found frame size. Additionally, #skipsize might
+ * be set to indicate the number of bytes that need to be skipped, a.k.a. the
+ * position of the frame inside given data chunk.
+ *
+ * Returns: TRUE on success.
+ */
+ static gboolean
+ gst_aac_parse_detect_stream (GstAacParse * aacparse,
+ const guint8 * data, const guint avail, gboolean drain,
+ guint * framesize, gint * skipsize)
+ {
+ gboolean found = FALSE;
+ guint need_data_adts = 0, need_data_loas;
+ guint i = 0;
+
+ GST_DEBUG_OBJECT (aacparse, "Parsing header data");
+
+ /* FIXME: No need to check for ADIF if we are not in the beginning of the
+ stream */
+
+ /* Can we even parse the header? */
+ if (avail < MAX (ADTS_MAX_SIZE, LOAS_MAX_SIZE)) {
+ GST_DEBUG_OBJECT (aacparse, "Not enough data to check");
+ return FALSE;
+ }
+
+ for (i = 0; i < avail - 4; i++) {
+ if (((data[i] == 0xff) && ((data[i + 1] & 0xf6) == 0xf0)) ||
+ ((data[i] == 0x56) && ((data[i + 1] & 0xe0) == 0xe0)) ||
+ strncmp ((char *) data + i, "ADIF", 4) == 0) {
+ GST_DEBUG_OBJECT (aacparse, "Found signature at offset %u", i);
+ found = TRUE;
+
+ if (i) {
+ /* Trick: tell the parent class that we didn't find the frame yet,
+ but make it skip 'i' amount of bytes. Next time we arrive
+ here we have full frame in the beginning of the data. */
+ *skipsize = i;
+ return FALSE;
+ }
+ break;
+ }
+ }
+ if (!found) {
+ if (i)
+ *skipsize = i;
+ return FALSE;
+ }
+
+ if (gst_aac_parse_check_adts_frame (aacparse, data, avail, drain,
+ framesize, &need_data_adts)) {
+ gint rate, channels;
+
+ GST_INFO ("ADTS ID: %d, framesize: %d", (data[1] & 0x08) >> 3, *framesize);
+
+ gst_aac_parse_parse_adts_header (aacparse, data, &rate, &channels,
+ &aacparse->object_type, &aacparse->mpegversion);
+
+ if (!channels || !framesize) {
+ GST_DEBUG_OBJECT (aacparse, "impossible ADTS configuration");
+ return FALSE;
+ }
+
+ aacparse->header_type = DSPAAC_HEADER_ADTS;
+ gst_base_parse_set_frame_rate (GST_BASE_PARSE (aacparse), rate,
+ aacparse->frame_samples, 2, 2);
+
+ GST_DEBUG ("ADTS: samplerate %d, channels %d, objtype %d, version %d",
+ rate, channels, aacparse->object_type, aacparse->mpegversion);
+
+ gst_base_parse_set_syncable (GST_BASE_PARSE (aacparse), TRUE);
+
+ return TRUE;
+ }
+
+ if (gst_aac_parse_check_loas_frame (aacparse, data, avail, drain,
+ framesize, &need_data_loas)) {
+ gint rate = 0, channels = 0;
+
+ GST_INFO ("LOAS, framesize: %d", *framesize);
+
+ aacparse->header_type = DSPAAC_HEADER_LOAS;
+
+ if (!gst_aac_parse_read_loas_config (aacparse, data, avail, &rate,
+ &channels, &aacparse->mpegversion)) {
+ /* This is pretty normal when skipping data at the start of
+ * random stream (MPEG-TS capture for example) */
+ GST_LOG_OBJECT (aacparse, "Error reading LOAS config");
+ return FALSE;
+ }
+
+ if (rate && channels) {
+ gst_base_parse_set_frame_rate (GST_BASE_PARSE (aacparse), rate,
+ aacparse->frame_samples, 2, 2);
+
+ /* Don't store the sample rate and channels yet -
+ * this is just format detection. */
+ GST_DEBUG ("LOAS: samplerate %d, channels %d, objtype %d, version %d",
+ rate, channels, aacparse->object_type, aacparse->mpegversion);
+ }
+
+ gst_base_parse_set_syncable (GST_BASE_PARSE (aacparse), TRUE);
+
+ return TRUE;
+ }
+
+ if (need_data_adts || need_data_loas) {
+ /* This tells the parent class not to skip any data */
+ *skipsize = 0;
+ return FALSE;
+ }
+
+ if (avail < ADIF_MAX_SIZE)
+ return FALSE;
+
+ if (memcmp (data + i, "ADIF", 4) == 0) {
+ const guint8 *adif;
+ int skip_size = 0;
+ int bitstream_type;
+ int sr_idx;
+ GstCaps *sinkcaps;
+
+ aacparse->header_type = DSPAAC_HEADER_ADIF;
+ aacparse->mpegversion = 4;
+
+ /* Skip the "ADIF" bytes */
+ adif = data + i + 4;
+
+ /* copyright string */
+ if (adif[0] & 0x80)
+ skip_size += 9; /* skip 9 bytes */
+
+ bitstream_type = adif[0 + skip_size] & 0x10;
+ aacparse->bitrate =
+ ((unsigned int) (adif[0 + skip_size] & 0x0f) << 19) |
+ ((unsigned int) adif[1 + skip_size] << 11) |
+ ((unsigned int) adif[2 + skip_size] << 3) |
+ ((unsigned int) adif[3 + skip_size] & 0xe0);
+
+ /* CBR */
+ if (bitstream_type == 0) {
+ #if 0
+ /* Buffer fullness parsing. Currently not needed... */
+ guint num_elems = 0;
+ guint fullness = 0;
+
+ num_elems = (adif[3 + skip_size] & 0x1e);
+ GST_INFO ("ADIF num_config_elems: %d", num_elems);
+
+ fullness = ((unsigned int) (adif[3 + skip_size] & 0x01) << 19) |
+ ((unsigned int) adif[4 + skip_size] << 11) |
+ ((unsigned int) adif[5 + skip_size] << 3) |
+ ((unsigned int) (adif[6 + skip_size] & 0xe0) >> 5);
+
+ GST_INFO ("ADIF buffer fullness: %d", fullness);
+ #endif
+ aacparse->object_type = ((adif[6 + skip_size] & 0x01) << 1) |
+ ((adif[7 + skip_size] & 0x80) >> 7);
+ sr_idx = (adif[7 + skip_size] & 0x78) >> 3;
+ }
+ /* VBR */
+ else {
+ aacparse->object_type = (adif[4 + skip_size] & 0x18) >> 3;
+ sr_idx = ((adif[4 + skip_size] & 0x07) << 1) |
+ ((adif[5 + skip_size] & 0x80) >> 7);
+ }
+
+ /* FIXME: This gives totally wrong results. Duration calculation cannot
+ be based on this */
+ aacparse->sample_rate =
+ gst_codec_utils_aac_get_sample_rate_from_index (sr_idx);
+
+ /* baseparse is not given any fps,
+ * so it will give up on timestamps, seeking, etc */
+
+ /* FIXME: Can we assume this? */
+ aacparse->channels = 2;
+
+ GST_INFO ("ADIF: br=%d, samplerate=%d, objtype=%d",
+ aacparse->bitrate, aacparse->sample_rate, aacparse->object_type);
+
+ gst_base_parse_set_min_frame_size (GST_BASE_PARSE (aacparse), 512);
+
+ /* arrange for metadata and get out of the way */
+ sinkcaps = gst_pad_get_current_caps (GST_BASE_PARSE_SINK_PAD (aacparse));
+ gst_aac_parse_set_src_caps (aacparse, sinkcaps);
+ if (sinkcaps)
+ gst_caps_unref (sinkcaps);
+
+ /* not syncable, not easily seekable (unless we push data from start */
+ gst_base_parse_set_syncable (GST_BASE_PARSE_CAST (aacparse), FALSE);
+ gst_base_parse_set_passthrough (GST_BASE_PARSE_CAST (aacparse), TRUE);
+ gst_base_parse_set_average_bitrate (GST_BASE_PARSE_CAST (aacparse), 0);
+
+ *framesize = avail;
+ return TRUE;
+ }
+
+ /* This should never happen */
+ return FALSE;
+ }
+
+ /**
+ * gst_aac_parse_get_audio_profile_object_type
+ * @aacparse: #GstAacParse.
+ *
+ * Gets the MPEG-2 profile or the MPEG-4 object type value corresponding to the
+ * mpegversion and profile of @aacparse's src pad caps, according to the
+ * values defined by table 1.A.11 in ISO/IEC 14496-3.
+ *
+ * Returns: the profile or object type value corresponding to @aacparse's src
+ * pad caps, if such a value exists; otherwise G_MAXUINT8.
+ */
+ static guint8
+ gst_aac_parse_get_audio_profile_object_type (GstAacParse * aacparse)
+ {
+ GstCaps *srccaps;
+ GstStructure *srcstruct;
+ const gchar *profile;
+ guint8 ret;
+
+ srccaps = gst_pad_get_current_caps (GST_BASE_PARSE_SRC_PAD (aacparse));
+ if (G_UNLIKELY (srccaps == NULL)) {
+ return G_MAXUINT8;
+ }
+
+ srcstruct = gst_caps_get_structure (srccaps, 0);
+ profile = gst_structure_get_string (srcstruct, "profile");
+ if (G_UNLIKELY (profile == NULL)) {
+ gst_caps_unref (srccaps);
+ return G_MAXUINT8;
+ }
+
+ if (g_strcmp0 (profile, "main") == 0) {
+ ret = (guint8) 0U;
+ } else if (g_strcmp0 (profile, "lc") == 0) {
+ ret = (guint8) 1U;
+ } else if (g_strcmp0 (profile, "ssr") == 0) {
+ ret = (guint8) 2U;
+ } else if (g_strcmp0 (profile, "ltp") == 0) {
+ if (G_LIKELY (aacparse->mpegversion == 4))
+ ret = (guint8) 3U;
+ else
+ ret = G_MAXUINT8; /* LTP Object Type allowed only for MPEG-4 */
+ } else {
+ ret = G_MAXUINT8;
+ }
+
+ gst_caps_unref (srccaps);
+ return ret;
+ }
+
+ /**
+ * gst_aac_parse_get_audio_channel_configuration
+ * @num_channels: number of audio channels.
+ *
+ * Gets the Channel Configuration value, as defined by table 1.19 in ISO/IEC
+ * 14496-3, for a given number of audio channels.
+ *
+ * Returns: the Channel Configuration value corresponding to @num_channels, if
+ * such a value exists; otherwise G_MAXUINT8.
+ */
+ static guint8
+ gst_aac_parse_get_audio_channel_configuration (gint num_channels)
+ {
+ if (num_channels >= 1 && num_channels <= 6) /* Mono up to & including 5.1 */
+ return (guint8) num_channels;
+ else if (num_channels == 8) /* 7.1 */
+ return (guint8) 7U;
+ else
+ return G_MAXUINT8;
+
+ /* FIXME: Add support for configurations 11, 12 and 14 from
+ * ISO/IEC 14496-3:2009/PDAM 4 based on the actual channel layout
+ */
+ }
+
+ /**
+ * gst_aac_parse_get_audio_sampling_frequency_index:
+ * @sample_rate: audio sampling rate.
+ *
+ * Gets the Sampling Frequency Index value, as defined by table 1.18 in ISO/IEC
+ * 14496-3, for a given sampling rate.
+ *
+ * Returns: the Sampling Frequency Index value corresponding to @sample_rate,
+ * if such a value exists; otherwise G_MAXUINT8.
+ */
+ static guint8
+ gst_aac_parse_get_audio_sampling_frequency_index (gint sample_rate)
+ {
+ switch (sample_rate) {
+ case 96000:
+ return 0x0U;
+ case 88200:
+ return 0x1U;
+ case 64000:
+ return 0x2U;
+ case 48000:
+ return 0x3U;
+ case 44100:
+ return 0x4U;
+ case 32000:
+ return 0x5U;
+ case 24000:
+ return 0x6U;
+ case 22050:
+ return 0x7U;
+ case 16000:
+ return 0x8U;
+ case 12000:
+ return 0x9U;
+ case 11025:
+ return 0xAU;
+ case 8000:
+ return 0xBU;
+ case 7350:
+ return 0xCU;
+ default:
+ return G_MAXUINT8;
+ }
+ }
+
+ /**
+ * gst_aac_parse_prepend_adts_headers:
+ * @aacparse: #GstAacParse.
+ * @frame: raw AAC frame to which ADTS headers shall be prepended.
+ *
+ * Prepends ADTS headers to a raw AAC audio frame.
+ *
+ * Returns: TRUE if ADTS headers were successfully prepended; FALSE otherwise.
+ */
+ static gboolean
+ gst_aac_parse_prepend_adts_headers (GstAacParse * aacparse,
+ GstBaseParseFrame * frame)
+ {
+ GstMemory *mem;
+ guint8 *adts_headers;
+ gsize buf_size;
+ gsize frame_size;
+ guint8 id, profile, channel_configuration, sampling_frequency_index;
+
+ id = (aacparse->mpegversion == 4) ? 0x0U : 0x1U;
+ profile = gst_aac_parse_get_audio_profile_object_type (aacparse);
+ if (profile == G_MAXUINT8) {
+ GST_ERROR_OBJECT (aacparse, "Unsupported audio profile or object type");
+ return FALSE;
+ }
+ channel_configuration =
+ gst_aac_parse_get_audio_channel_configuration (aacparse->channels);
+ if (channel_configuration == G_MAXUINT8) {
+ GST_ERROR_OBJECT (aacparse, "Unsupported number of channels");
+ return FALSE;
+ }
+ sampling_frequency_index =
+ gst_aac_parse_get_audio_sampling_frequency_index (aacparse->sample_rate);
+ if (sampling_frequency_index == G_MAXUINT8) {
+ GST_ERROR_OBJECT (aacparse, "Unsupported sampling frequency");
+ return FALSE;
+ }
+
+ frame->out_buffer = gst_buffer_copy (frame->buffer);
+ buf_size = gst_buffer_get_size (frame->out_buffer);
+ frame_size = buf_size + ADTS_HEADERS_LENGTH;
+
+ if (G_UNLIKELY (frame_size >= 0x4000)) {
+ GST_ERROR_OBJECT (aacparse, "Frame size is too big for ADTS");
+ return FALSE;
+ }
+
+ adts_headers = (guint8 *) g_malloc0 (ADTS_HEADERS_LENGTH);
+
+ /* Note: no error correction bits are added to the resulting ADTS frames */
+ adts_headers[0] = 0xFFU;
+ adts_headers[1] = 0xF0U | (id << 3) | 0x1U;
+ adts_headers[2] = (profile << 6) | (sampling_frequency_index << 2) | 0x2U |
+ ((channel_configuration & 0x4U) >> 2);
+ adts_headers[3] = ((channel_configuration & 0x3U) << 6) | 0x30U |
+ (guint8) (frame_size >> 11);
+ adts_headers[4] = (guint8) ((frame_size >> 3) & 0x00FF);
+ adts_headers[5] = (guint8) (((frame_size & 0x0007) << 5) + 0x1FU);
+ adts_headers[6] = 0xFCU;
+
+ mem = gst_memory_new_wrapped (0, adts_headers, ADTS_HEADERS_LENGTH, 0,
+ ADTS_HEADERS_LENGTH, adts_headers, g_free);
+ gst_buffer_prepend_memory (frame->out_buffer, mem);
+
+ return TRUE;
+ }
+
+ /**
+ * gst_aac_parse_check_valid_frame:
+ * @parse: #GstBaseParse.
+ * @frame: #GstBaseParseFrame.
+ * @skipsize: How much data parent class should skip in order to find the
+ * frame header.
+ *
+ * Implementation of "handle_frame" vmethod in #GstBaseParse class.
+ *
+ * Also determines frame overhead.
+ * ADTS streams have a 7 byte header in each frame. MP4 and ADIF streams don't have
+ * a per-frame header. LOAS has 3 bytes.
+ *
+ * We're making a couple of simplifying assumptions:
+ *
+ * 1. We count Program Configuration Elements rather than searching for them
+ * in the streams to discount them - the overhead is negligible.
+ *
+ * 2. We ignore CRC. This has a worst-case impact of (num_raw_blocks + 1)*16
+ * bits, which should still not be significant enough to warrant the
+ * additional parsing through the headers
+ *
+ * Returns: a #GstFlowReturn.
+ */
+ static GstFlowReturn
+ gst_aac_parse_handle_frame (GstBaseParse * parse,
+ GstBaseParseFrame * frame, gint * skipsize)
+ {
+ GstMapInfo map;
+ GstAacParse *aacparse;
+ gboolean ret = FALSE;
+ gboolean lost_sync;
+ GstBuffer *buffer;
+ guint framesize;
+ gint rate = 0, channels = 0;
+
+ aacparse = GST_AAC_PARSE (parse);
+ buffer = frame->buffer;
+
+ gst_buffer_map (buffer, &map, GST_MAP_READ);
+
+ *skipsize = -1;
+ lost_sync = GST_BASE_PARSE_LOST_SYNC (parse);
+
+ if (aacparse->header_type == DSPAAC_HEADER_ADIF ||
+ aacparse->header_type == DSPAAC_HEADER_NONE) {
+ /* There is nothing to parse */
+ framesize = map.size;
+ ret = TRUE;
+
+ } else if (aacparse->header_type == DSPAAC_HEADER_NOT_PARSED || lost_sync) {
+
+ ret = gst_aac_parse_detect_stream (aacparse, map.data, map.size,
+ GST_BASE_PARSE_DRAINING (parse), &framesize, skipsize);
+
+ } else if (aacparse->header_type == DSPAAC_HEADER_ADTS) {
+ guint needed_data = 1024;
+
+ ret = gst_aac_parse_check_adts_frame (aacparse, map.data, map.size,
+ GST_BASE_PARSE_DRAINING (parse), &framesize, &needed_data);
+
+ if (!ret && needed_data) {
+ GST_DEBUG ("buffer didn't contain valid frame");
+ *skipsize = 0;
+ gst_base_parse_set_min_frame_size (GST_BASE_PARSE (aacparse),
+ needed_data);
+ }
+
+ } else if (aacparse->header_type == DSPAAC_HEADER_LOAS) {
+ guint needed_data = 1024;
+
+ ret = gst_aac_parse_check_loas_frame (aacparse, map.data,
+ map.size, GST_BASE_PARSE_DRAINING (parse), &framesize, &needed_data);
+
+ if (!ret && needed_data) {
+ GST_DEBUG ("buffer didn't contain valid frame");
+ *skipsize = 0;
+ gst_base_parse_set_min_frame_size (GST_BASE_PARSE (aacparse),
+ needed_data);
+ }
+
+ } else {
+ GST_DEBUG ("buffer didn't contain valid frame");
+ gst_base_parse_set_min_frame_size (GST_BASE_PARSE (aacparse),
+ ADTS_MAX_SIZE);
+ }
+
+ if (G_UNLIKELY (!ret))
+ goto exit;
+
+ if (aacparse->header_type == DSPAAC_HEADER_ADTS) {
+ /* see above */
+ frame->overhead = 7;
+
+ gst_aac_parse_parse_adts_header (aacparse, map.data,
+ &rate, &channels, NULL, NULL);
+
+ GST_LOG_OBJECT (aacparse, "rate: %d, chans: %d", rate, channels);
+
+ if (G_UNLIKELY (rate != aacparse->sample_rate
+ || channels != aacparse->channels)) {
+ aacparse->sample_rate = rate;
+ aacparse->channels = channels;
+
+ if (!gst_aac_parse_set_src_caps (aacparse, NULL)) {
+ /* If linking fails, we need to return appropriate error */
+ ret = GST_FLOW_NOT_LINKED;
+ }
+
+ gst_base_parse_set_frame_rate (GST_BASE_PARSE (aacparse),
+ aacparse->sample_rate, aacparse->frame_samples, 2, 2);
+ }
++#ifdef TIZEN_FEATURE_AACPARSE_MODIFICATION
++ if (aacparse->first_frame == TRUE) {
++ gboolean ret = FALSE;
++ aacparse->first_frame = FALSE;
++
++ ret = gst_aac_parse_estimate_duration (parse);
++ if (!ret) {
++ GST_WARNING_OBJECT (aacparse, "can not estimate total duration");
++ ret = GST_FLOW_NOT_SUPPORTED;
++ }
++ gst_aac_parse_check_byte_seekability (parse);
++ }
++#endif
+ } else if (aacparse->header_type == DSPAAC_HEADER_LOAS) {
+ gboolean setcaps = FALSE;
+
+ /* see above */
+ frame->overhead = 3;
+
+ if (!gst_aac_parse_read_loas_config (aacparse, map.data, map.size, &rate,
+ &channels, NULL) || !rate || !channels) {
+ /* This is pretty normal when skipping data at the start of
+ * random stream (MPEG-TS capture for example) */
+ GST_DEBUG_OBJECT (aacparse, "Error reading LOAS config. Skipping.");
+ /* Since we don't fully parse the LOAS config, we don't know for sure
+ * how much to skip. Just skip 1 to end up to the next marker and
+ * resume parsing from there */
+ *skipsize = 1;
+ goto exit;
+ }
+
+ if (G_UNLIKELY (rate != aacparse->sample_rate
+ || channels != aacparse->channels)) {
+ aacparse->sample_rate = rate;
+ aacparse->channels = channels;
+ setcaps = TRUE;
+ GST_INFO_OBJECT (aacparse, "New LOAS config: %d Hz, %d channels", rate,
+ channels);
+ }
+
+ /* We want to set caps both at start, and when rate/channels change.
+ Since only some LOAS frames have that info, we may receive frames
+ before knowing about rate/channels. */
+ if (setcaps
+ || !gst_pad_has_current_caps (GST_BASE_PARSE_SRC_PAD (aacparse))) {
+ if (!gst_aac_parse_set_src_caps (aacparse, NULL)) {
+ /* If linking fails, we need to return appropriate error */
+ ret = GST_FLOW_NOT_LINKED;
+ }
+
+ gst_base_parse_set_frame_rate (GST_BASE_PARSE (aacparse),
+ aacparse->sample_rate, aacparse->frame_samples, 2, 2);
+ }
+ }
++#ifdef TIZEN_FEATURE_AACPARSE_MODIFICATION
++ else if (aacparse->header_type == DSPAAC_HEADER_ADIF) {
++ /* to get more correct duration */
++ float estimated_duration = 0;
++ gint64 total_file_size;
++ gst_base_parse_get_upstream_size (parse, &total_file_size);
++ estimated_duration =
++ ((total_file_size * 8) / (float) (aacparse->bitrate * 1000)) *
++ GST_SECOND;
++ gst_base_parse_set_duration (parse, GST_FORMAT_TIME,
++ estimated_duration * 1000, 0);
++ }
++#endif
+
+ if (aacparse->header_type == DSPAAC_HEADER_NONE
+ && aacparse->output_header_type == DSPAAC_HEADER_ADTS) {
+ if (!gst_aac_parse_prepend_adts_headers (aacparse, frame)) {
+ GST_ERROR_OBJECT (aacparse, "Failed to prepend ADTS headers to frame");
+ ret = GST_FLOW_ERROR;
+ }
+ }
+
+ exit:
+ gst_buffer_unmap (buffer, &map);
+
+ if (ret) {
+ /* found, skip if needed */
+ if (*skipsize > 0)
+ return GST_FLOW_OK;
+ *skipsize = 0;
+ } else {
+ if (*skipsize < 0)
+ *skipsize = 1;
+ }
+
+ if (ret && framesize <= map.size) {
+ return gst_base_parse_finish_frame (parse, frame, framesize);
+ }
+
+ return GST_FLOW_OK;
+ }
+
+ static GstFlowReturn
+ gst_aac_parse_pre_push_frame (GstBaseParse * parse, GstBaseParseFrame * frame)
+ {
+ GstAacParse *aacparse = GST_AAC_PARSE (parse);
+
+ if (!aacparse->sent_codec_tag) {
+ GstTagList *taglist;
+ GstCaps *caps;
+
+ /* codec tag */
+ caps = gst_pad_get_current_caps (GST_BASE_PARSE_SRC_PAD (parse));
+ if (caps == NULL) {
+ if (GST_PAD_IS_FLUSHING (GST_BASE_PARSE_SRC_PAD (parse))) {
+ GST_INFO_OBJECT (parse, "Src pad is flushing");
+ return GST_FLOW_FLUSHING;
+ } else {
+ GST_INFO_OBJECT (parse, "Src pad is not negotiated!");
+ return GST_FLOW_NOT_NEGOTIATED;
+ }
+ }
+
+ taglist = gst_tag_list_new_empty ();
+ gst_pb_utils_add_codec_description_to_tag_list (taglist,
+ GST_TAG_AUDIO_CODEC, caps);
+ gst_caps_unref (caps);
+
+ gst_base_parse_merge_tags (parse, taglist, GST_TAG_MERGE_REPLACE);
+ gst_tag_list_unref (taglist);
+
+ /* also signals the end of first-frame processing */
+ aacparse->sent_codec_tag = TRUE;
+ }
+
+ /* As a special case, we can remove the ADTS framing and output raw AAC. */
+ if (aacparse->header_type == DSPAAC_HEADER_ADTS
+ && aacparse->output_header_type == DSPAAC_HEADER_NONE) {
+ guint header_size;
+ GstMapInfo map;
+ frame->out_buffer = gst_buffer_make_writable (frame->buffer);
+ frame->buffer = NULL;
+ gst_buffer_map (frame->out_buffer, &map, GST_MAP_READ);
+ header_size = (map.data[1] & 1) ? 7 : 9; /* optional CRC */
+ gst_buffer_unmap (frame->out_buffer, &map);
+ gst_buffer_resize (frame->out_buffer, header_size,
+ gst_buffer_get_size (frame->out_buffer) - header_size);
+ }
+
+ frame->flags |= GST_BASE_PARSE_FRAME_FLAG_CLIP;
+
+ return GST_FLOW_OK;
+ }
+
+
+ /**
+ * gst_aac_parse_start:
+ * @parse: #GstBaseParse.
+ *
+ * Implementation of "start" vmethod in #GstBaseParse class.
+ *
+ * Returns: TRUE if startup succeeded.
+ */
+ static gboolean
+ gst_aac_parse_start (GstBaseParse * parse)
+ {
+ GstAacParse *aacparse;
+
+ aacparse = GST_AAC_PARSE (parse);
+ GST_DEBUG ("start");
+ aacparse->frame_samples = 1024;
+ gst_base_parse_set_min_frame_size (GST_BASE_PARSE (aacparse), ADTS_MAX_SIZE);
+ aacparse->sent_codec_tag = FALSE;
+ aacparse->last_parsed_channels = 0;
+ aacparse->last_parsed_sample_rate = 0;
+ aacparse->object_type = 0;
+ aacparse->bitrate = 0;
+ aacparse->header_type = DSPAAC_HEADER_NOT_PARSED;
+ aacparse->output_header_type = DSPAAC_HEADER_NOT_PARSED;
+ aacparse->channels = 0;
+ aacparse->sample_rate = 0;
+ return TRUE;
+ }
+
+
+ /**
+ * gst_aac_parse_stop:
+ * @parse: #GstBaseParse.
+ *
+ * Implementation of "stop" vmethod in #GstBaseParse class.
+ *
+ * Returns: TRUE is stopping succeeded.
+ */
+ static gboolean
+ gst_aac_parse_stop (GstBaseParse * parse)
+ {
+ GST_DEBUG ("stop");
+ return TRUE;
+ }
+
+ static void
+ remove_fields (GstCaps * caps)
+ {
+ guint i, n;
+
+ n = gst_caps_get_size (caps);
+ for (i = 0; i < n; i++) {
+ GstStructure *s = gst_caps_get_structure (caps, i);
+
+ gst_structure_remove_field (s, "framed");
+ }
+ }
+
+ static void
+ add_conversion_fields (GstCaps * caps)
+ {
+ guint i, n;
+
+ n = gst_caps_get_size (caps);
+ for (i = 0; i < n; i++) {
+ GstStructure *s = gst_caps_get_structure (caps, i);
+
+ if (gst_structure_has_field (s, "stream-format")) {
+ const GValue *v = gst_structure_get_value (s, "stream-format");
+
+ if (G_VALUE_HOLDS_STRING (v)) {
+ const gchar *str = g_value_get_string (v);
+
+ if (strcmp (str, "adts") == 0 || strcmp (str, "raw") == 0) {
+ GValue va = G_VALUE_INIT;
+ GValue vs = G_VALUE_INIT;
+
+ g_value_init (&va, GST_TYPE_LIST);
+ g_value_init (&vs, G_TYPE_STRING);
+ g_value_set_string (&vs, "adts");
+ gst_value_list_append_value (&va, &vs);
+ g_value_set_string (&vs, "raw");
+ gst_value_list_append_value (&va, &vs);
+ gst_structure_set_value (s, "stream-format", &va);
+ g_value_unset (&va);
+ g_value_unset (&vs);
+ }
+ } else if (GST_VALUE_HOLDS_LIST (v)) {
+ gboolean contains_raw = FALSE;
+ gboolean contains_adts = FALSE;
+ guint m = gst_value_list_get_size (v), j;
+
+ for (j = 0; j < m; j++) {
+ const GValue *ve = gst_value_list_get_value (v, j);
+ const gchar *str;
+
+ if (G_VALUE_HOLDS_STRING (ve) && (str = g_value_get_string (ve))) {
+ if (strcmp (str, "adts") == 0)
+ contains_adts = TRUE;
+ else if (strcmp (str, "raw") == 0)
+ contains_raw = TRUE;
+ }
+ }
+
+ if (contains_adts || contains_raw) {
+ GValue va = G_VALUE_INIT;
+ GValue vs = G_VALUE_INIT;
+
+ g_value_init (&va, GST_TYPE_LIST);
+ g_value_init (&vs, G_TYPE_STRING);
+ g_value_copy (v, &va);
+
+ if (!contains_raw) {
+ g_value_set_string (&vs, "raw");
+ gst_value_list_append_value (&va, &vs);
+ }
+ if (!contains_adts) {
+ g_value_set_string (&vs, "adts");
+ gst_value_list_append_value (&va, &vs);
+ }
+
+ gst_structure_set_value (s, "stream-format", &va);
+
+ g_value_unset (&vs);
+ g_value_unset (&va);
+ }
+ }
+ }
+ }
+ }
+
+ static GstCaps *
+ gst_aac_parse_sink_getcaps (GstBaseParse * parse, GstCaps * filter)
+ {
+ GstCaps *peercaps, *templ;
+ GstCaps *res;
+
+ templ = gst_pad_get_pad_template_caps (GST_BASE_PARSE_SINK_PAD (parse));
+
+ if (filter) {
+ GstCaps *fcopy = gst_caps_copy (filter);
+ /* Remove the fields we convert */
+ remove_fields (fcopy);
+ add_conversion_fields (fcopy);
+ peercaps = gst_pad_peer_query_caps (GST_BASE_PARSE_SRC_PAD (parse), fcopy);
+ gst_caps_unref (fcopy);
+ } else
+ peercaps = gst_pad_peer_query_caps (GST_BASE_PARSE_SRC_PAD (parse), NULL);
+
+ if (peercaps) {
+ peercaps = gst_caps_make_writable (peercaps);
+ /* Remove the fields we convert */
+ remove_fields (peercaps);
+ add_conversion_fields (peercaps);
+
+ res = gst_caps_intersect_full (peercaps, templ, GST_CAPS_INTERSECT_FIRST);
+ gst_caps_unref (peercaps);
+ gst_caps_unref (templ);
+ } else {
+ res = templ;
+ }
+
+ if (filter) {
+ GstCaps *intersection;
+
+ intersection =
+ gst_caps_intersect_full (filter, res, GST_CAPS_INTERSECT_FIRST);
+ gst_caps_unref (res);
+ res = intersection;
+ }
+
+ return res;
+ }
+
+ static gboolean
+ gst_aac_parse_src_event (GstBaseParse * parse, GstEvent * event)
+ {
+ GstAacParse *aacparse = GST_AAC_PARSE (parse);
+
+ if (GST_EVENT_TYPE (event) == GST_EVENT_FLUSH_STOP) {
+ aacparse->last_parsed_channels = 0;
+ aacparse->last_parsed_sample_rate = 0;
+ }
++#ifdef TIZEN_FEATURE_AACPARSE_MODIFICATION
++ GST_DEBUG ("Entering gst_aac_parse_src_event header type = %d",
++ aacparse->header_type);
++ if (aacparse->header_type == DSPAAC_HEADER_ADTS)
++ return gst_aac_parse_adts_src_eventfunc (parse, event);
++#endif
+ return GST_BASE_PARSE_CLASS (parent_class)->src_event (parse, event);
++
++}
++
++#ifdef TIZEN_FEATURE_AACPARSE_MODIFICATION
++/**
++ * get_aac_parse_get_adts_framelength:
++ * @data: #GstBufferData.
++ * @offset: #GstBufferData offset
++ *
++ * Implementation to get adts framelength by using first some frame.
++ *
++ * Returns: frame size
++ */
++int
++get_aac_parse_get_adts_frame_length (const unsigned char *data, gint64 offset)
++{
++ const gint adts_header_length_no_crc = 7;
++ const gint adts_header_length_with_crc = 9;
++ gint frame_size = 0;
++ gint protection_absent;
++ gint head_size;
++
++ /* check of syncword */
++ if ((data[offset + 0] != 0xff) || ((data[offset + 1] & 0xf6) != 0xf0)) {
++ GST_ERROR ("check sync word is fail\n");
++ return -1;
++ }
++
++ /* check of protection absent */
++ protection_absent = (data[offset + 1] & 0x01);
++
++ /*check of frame length */
++ frame_size =
++ (data[offset + 3] & 0x3) << 11 | data[offset + 4] << 3 | data[offset +
++ 5] >> 5;
++
++ /* check of header size */
++ /* protectionAbsent is 0 if there is CRC */
++ head_size =
++ protection_absent ? adts_header_length_no_crc :
++ adts_header_length_with_crc;
++ if (head_size > frame_size) {
++ GST_ERROR ("return frame length as 0 (frameSize %u < headSize %u)",
++ frame_size, head_size);
++ return 0;
++ }
++
++ return frame_size;
++}
++
++/**
++ * gst_aac_parse_estimate_duration:
++ * @parse: #GstBaseParse.
++ *
++ * Implementation to get estimated total duration by using first some frame.
++ *
++ * Returns: TRUE if we can get estimated total duraion
++ */
++static gboolean
++gst_aac_parse_estimate_duration (GstBaseParse * parse)
++{
++ gboolean ret = FALSE;
++ GstFlowReturn res = GST_FLOW_OK;
++ gint64 pull_size = 0, file_size = 0, offset = 0, num_frames = 0, duration = 0;
++ guint sample_rate_index = 0, sample_rate = 0, channel = 0;
++ guint frame_size = 0, frame_duration_us = 0, estimated_bitrate = 0;
++ guint lost_sync_count = 0;
++ GstClockTime estimated_duration = GST_CLOCK_TIME_NONE;
++ GstBuffer *buffer = NULL;
++ guint8 *buf = NULL;
++ gint i = 0;
++ GstPadMode pad_mode = GST_PAD_MODE_NONE;
++ GstAacParse *aacparse;
++ gint64 buffer_size = 0;
++ GstMapInfo map;
++
++ aacparse = GST_AAC_PARSE (parse);
++ GST_LOG_OBJECT (aacparse, "gst_aac_parse_estimate_duration enter");
++
++ /* check baseparse define these fuction */
++ gst_base_parse_get_pad_mode (parse, &pad_mode);
++ if (pad_mode != GST_PAD_MODE_PULL) {
++ GST_INFO_OBJECT (aacparse,
++ "aac parser is not pull mode. can not estimate duration");
++ return FALSE;
++ }
++
++ gst_base_parse_get_upstream_size (parse, &file_size);
++
++ if (file_size < ADIF_MAX_SIZE) {
++ GST_ERROR_OBJECT (aacparse, "file size is too short");
++ return FALSE;
++ }
++
++ pull_size = MIN (file_size, AAC_MAX_ESTIMATE_DURATION_BUF);
++
++ res = gst_pad_pull_range (parse->sinkpad, 0, pull_size, &buffer);
++ if (res != GST_FLOW_OK) {
++ GST_ERROR_OBJECT (aacparse, "gst_pad_pull_range failed!");
++ return FALSE;
++ }
++
++ gst_buffer_map (buffer, &map, GST_MAP_READ);
++ buf = map.data;
++ buffer_size = map.size;
++ if (buffer_size != pull_size) {
++ GST_ERROR_OBJECT (aacparse,
++ "We got different buffer_size(%" G_GINT64_FORMAT ") with pull_size(%"
++ G_GINT64_FORMAT ").", buffer_size, pull_size);
++ }
++
++ /* MODIFICATION : add defence codes for real buffer_size is different with pull_size */
++ for (i = 0; i < buffer_size; i++) {
++ if ((buf[i] == 0xff) && ((buf[i + 1] & 0xf6) == 0xf0)) { /* aac sync word */
++ //guint profile = (buf[i+2] >> 6) & 0x3;
++ sample_rate_index = (buf[i + 2] >> 2) & 0xf;
++ sample_rate =
++ gst_aac_parse_get_sample_rate_from_index (sample_rate_index);
++ if (sample_rate == 0) {
++ GST_WARNING_OBJECT (aacparse, "Invalid sample rate index (0)");
++ goto EXIT;
++ }
++ channel = (buf[i + 2] & 0x1) << 2 | (buf[i + 3] >> 6);
++
++ GST_INFO_OBJECT (aacparse, "found sync. aac fs=%d, ch=%d", sample_rate,
++ channel);
++
++ /* count number of frames */
++ /* MODIFICATION : add defence codes for real buffer_size is different with pull_size */
++ //while (offset < pull_size) {
++ while (offset < buffer_size) {
++ frame_size = get_aac_parse_get_adts_frame_length (buf, i + offset);
++ if (frame_size == 0) {
++ GST_ERROR_OBJECT (aacparse,
++ "framesize error at offset %" G_GINT64_FORMAT, offset);
++ break;
++ } else if (frame_size == -1) {
++ offset++;
++ lost_sync_count++; // lost sync count limmitation 2K Bytes
++ if (lost_sync_count > (1024 * 2)) {
++ GST_WARNING_OBJECT (aacparse,
++ "lost_sync_count is larger than 2048");
++ goto EXIT;
++ }
++ } else {
++ offset += frame_size;
++ num_frames++;
++ lost_sync_count = 0;
++ }
++ } /* while */
++
++ /* if we can got full file, we can calculate the accurate duration */
++ /* MODIFICATION : add defence codes for real buffer_size is different with pull_size */
++ //if (pull_size == file_size) {
++ if (buffer_size == file_size) {
++ gfloat duration_for_one_frame = 0;
++ GstClockTime calculated_duration = GST_CLOCK_TIME_NONE;
++
++ GST_INFO_OBJECT (aacparse,
++ "we got total file (%" G_GINT64_FORMAT
++ " bytes). do not estimate but make Accurate total duration.",
++ pull_size);
++
++ duration_for_one_frame =
++ (gfloat) AAC_SAMPLE_PER_FRAME / (gfloat) sample_rate;
++ calculated_duration =
++ num_frames * duration_for_one_frame * 1000 * 1000 * 1000;
++
++ GST_INFO_OBJECT (aacparse, "duration_for_one_frame %f ms",
++ duration_for_one_frame);
++ GST_INFO_OBJECT (aacparse, "calculated duration = %" GST_TIME_FORMAT,
++ GST_TIME_ARGS (calculated_duration));
++ /* 0 means disable estimate */
++ gst_base_parse_set_duration (parse, GST_FORMAT_TIME,
++ calculated_duration, 0);
++
++ } else {
++ GST_INFO_OBJECT (aacparse,
++ "we got %" G_GUINT64_FORMAT " bytes in total file (%"
++ G_GINT64_FORMAT "). can not make accurate duration but Estimate.",
++ pull_size, file_size);
++ frame_duration_us =
++ (1024 * 1000000ll + (sample_rate - 1)) / sample_rate;
++ duration = num_frames * frame_duration_us;
++
++ if (duration == 0) {
++ GST_WARNING_OBJECT (aacparse, "Invalid duration");
++ goto EXIT;
++ }
++ estimated_bitrate =
++ (gint) ((gfloat) (offset * 8) / (gfloat) (duration / 1000));
++
++ if (estimated_bitrate == 0) {
++ GST_WARNING_OBJECT (aacparse, "Invalid estimated_bitrate");
++ goto EXIT;
++ }
++ estimated_duration =
++ (GstClockTime) ((file_size * 8) / (estimated_bitrate * 1000)) *
++ GST_SECOND;
++
++ GST_INFO_OBJECT (aacparse, "number of frame = %" G_GINT64_FORMAT,
++ num_frames);
++ GST_INFO_OBJECT (aacparse, "duration = %" G_GINT64_FORMAT,
++ duration / 1000000);
++ GST_INFO_OBJECT (aacparse, "byte = %" G_GINT64_FORMAT, offset);
++ GST_INFO_OBJECT (aacparse, "estimated bitrate = %d bps",
++ estimated_bitrate);
++ GST_INFO_OBJECT (aacparse, "estimated duration = %" GST_TIME_FORMAT,
++ GST_TIME_ARGS (estimated_duration));
++
++ gst_base_parse_set_average_bitrate (parse, estimated_bitrate * 1000);
++ /* set update_interval as duration(sec)/2 */
++ gst_base_parse_set_duration (parse, GST_FORMAT_TIME, estimated_duration,
++ (gint) (duration / 2));
++ }
++
++ break;
++ }
++ }
++ ret = TRUE;
++
++EXIT:
++ gst_buffer_unmap (buffer, &map);
++ gst_buffer_unref (buffer);
++ return ret;
++}
++
++
++/* perform seek in push based mode:
++ find BYTE position to move to based on time and delegate to upstream
++*/
++static gboolean
++gst_aac_audio_parse_do_push_seek (GstBaseParse * parse,
++ GstPad * pad, GstEvent * event)
++{
++ GstAacParse *aacparse = GST_AAC_PARSE (parse);
++ gdouble rate;
++ GstFormat format;
++ GstSeekFlags flags;
++ GstSeekType cur_type, stop_type;
++ gint64 cur, stop;
++ gboolean res;
++ gint64 byte_cur;
++ gint64 esimate_byte;
++ gint32 frame_dur;
++ gint64 upstream_total_bytes = 0;
++ GstFormat fmt = GST_FORMAT_BYTES;
++
++ GST_INFO_OBJECT (parse, "doing aac push-based seek");
++
++ gst_event_parse_seek (event, &rate, &format, &flags, &cur_type, &cur,
++ &stop_type, &stop);
++
++ /* FIXME, always play to the end */
++ stop = -1;
++
++ /* only forward streaming and seeking is possible */
++ if (rate <= 0)
++ goto unsupported_seek;
++
++ if (cur == 0) {
++ /* handle rewind only */
++ cur_type = GST_SEEK_TYPE_SET;
++ byte_cur = 0;
++ stop_type = GST_SEEK_TYPE_NONE;
++ stop = -1;
++ flags |= GST_SEEK_FLAG_FLUSH;
++ } else {
++ /* handle normal seek */
++ cur_type = GST_SEEK_TYPE_SET;
++ stop_type = GST_SEEK_TYPE_NONE;
++ stop = -1;
++ flags |= GST_SEEK_FLAG_FLUSH;
++
++ esimate_byte = (cur / (1000 * 1000)) * aacparse->frame_byte;
++ if (aacparse->sample_rate > 0)
++ frame_dur = (aacparse->spf * 1000) / aacparse->sample_rate;
++ else
++ goto unsupported_seek;
++ if (frame_dur > 0)
++ byte_cur = esimate_byte / (frame_dur);
++ else
++ goto unsupported_seek;
++
++ GST_INFO_OBJECT (parse, "frame_byte(%d) spf(%d) rate (%d) ",
++ aacparse->frame_byte, aacparse->spf, aacparse->sample_rate);
++ GST_INFO_OBJECT (parse,
++ "seek cur (%" G_GINT64_FORMAT ") = (%" GST_TIME_FORMAT ") ", cur,
++ GST_TIME_ARGS (cur));
++ GST_INFO_OBJECT (parse,
++ "esimate_byte(%" G_GINT64_FORMAT ") esimate_byte (%d)", esimate_byte,
++ frame_dur);
++ }
++
++ /* obtain real upstream total bytes */
++ if (!gst_pad_peer_query_duration (parse->sinkpad, fmt, &upstream_total_bytes))
++ upstream_total_bytes = 0;
++ GST_INFO_OBJECT (aacparse,
++ "gst_pad_query_peer_duration -upstream_total_bytes (%" G_GUINT64_FORMAT
++ ")", upstream_total_bytes);
++ aacparse->file_size = upstream_total_bytes;
++
++ if ((byte_cur == -1) || (byte_cur > aacparse->file_size)) {
++ GST_INFO_OBJECT (parse,
++ "[WEB-ERROR] seek cur (%" G_GINT64_FORMAT ") > file_size (%"
++ G_GINT64_FORMAT ") ", cur, aacparse->file_size);
++ goto abort_seek;
++ }
++
++ GST_INFO_OBJECT (parse,
++ "Pushing BYTE seek rate %g, " "start %" G_GINT64_FORMAT ", stop %"
++ G_GINT64_FORMAT, rate, byte_cur, stop);
++
++ if (!(flags & GST_SEEK_FLAG_KEY_UNIT)) {
++ GST_INFO_OBJECT (parse,
++ "Requested seek time: %" GST_TIME_FORMAT ", calculated seek offset: %"
++ G_GUINT64_FORMAT, GST_TIME_ARGS (cur), byte_cur);
++ }
++
++ /* BYTE seek event */
++ event =
++ gst_event_new_seek (rate, GST_FORMAT_BYTES, flags, cur_type, byte_cur,
++ stop_type, stop);
++ res = gst_pad_push_event (parse->sinkpad, event);
++
++ return res;
++
++ /* ERRORS */
++
++abort_seek:
++ {
++ GST_DEBUG_OBJECT (parse,
++ "could not determine byte position to seek to, " "seek aborted.");
++ return FALSE;
++ }
++
++unsupported_seek:
++ {
++ GST_DEBUG_OBJECT (parse, "unsupported seek, seek aborted.");
++ return FALSE;
++ }
++}
++
++
++static guint
++gst_aac_parse_adts_get_fast_frame_len (const guint8 * data)
++{
++ int length;
++ if ((data[0] == 0xff) && ((data[1] & 0xf6) == 0xf0)) {
++ length =
++ ((data[3] & 0x03) << 11) | (data[4] << 3) | ((data[5] & 0xe0) >> 5);
++ } else {
++ length = 0;
++ }
++ return length;
++}
++
++/**
++ * gst_aac_parse_adts_src_eventfunc:
++ * @parse: #GstBaseParse. #event
++ *
++ * before baseparse handles seek event, make full amr index table.
++ *
++ * Returns: TRUE on success.
++ */
++static gboolean
++gst_aac_parse_adts_src_eventfunc (GstBaseParse * parse, GstEvent * event)
++{
++ gboolean handled = FALSE;
++ GstAacParse *aacparse = GST_AAC_PARSE (parse);
++
++ switch (GST_EVENT_TYPE (event)) {
++ case GST_EVENT_SEEK:
++ {
++ GstFlowReturn res = GST_FLOW_OK;
++ gint64 base_offset = 0, cur = 0;
++ gint32 frame_count = 1; /* do not add first frame because it is already in index table */
++ gint64 second_count = 0; /* initial 1 second */
++ gint64 total_file_size = 0, start_offset = 0;
++ GstClockTime current_ts = GST_CLOCK_TIME_NONE;
++ GstPadMode pad_mode = GST_PAD_MODE_NONE;
++
++ /* check baseparse define these fuction */
++ gst_base_parse_get_pad_mode (parse, &pad_mode);
++ if (pad_mode != GST_PAD_MODE_PULL) {
++ gboolean ret = FALSE;
++ GstPad *srcpad = parse->srcpad;
++ GST_INFO_OBJECT (aacparse, "aac parser is PUSH MODE.");
++ /* check NULL */
++ if (aacparse->byte_seekable) {
++ ret = gst_aac_audio_parse_do_push_seek (parse, srcpad, event);
++ if (!ret) {
++ GST_INFO_OBJECT (aacparse, "PUSH mode seek() failed, Trying base seek()");
++ goto aac_seek_null_exit;
++ }
++ return ret;
++ }
++ GST_INFO_OBJECT (aacparse, "not support byte seek");
++ goto aac_seek_null_exit;
++ }
++
++ gst_base_parse_get_upstream_size (parse, &total_file_size);
++ gst_base_parse_get_index_last_offset (parse, &start_offset);
++ gst_base_parse_get_index_last_ts (parse, ¤t_ts);
++
++ if (total_file_size > AAC_LARGE_FILE_SIZE) {
++ gst_base_parse_set_seek_mode (parse, 0);
++ GST_INFO_OBJECT (aacparse, "larger than big size (2MB).");
++ goto aac_seek_null_exit;
++ }
++
++ GST_DEBUG ("gst_aac_parse_adts_src_eventfunc GST_EVENT_SEEK enter");
++
++ if (total_file_size == 0 || start_offset >= total_file_size) {
++ GST_ERROR ("last index offset %" G_GINT64_FORMAT
++ " is larger than file size %" G_GINT64_FORMAT, start_offset,
++ total_file_size);
++ break;
++ }
++
++ gst_event_parse_seek (event, NULL, NULL, NULL, NULL, &cur, NULL, NULL);
++ if (cur <= current_ts) {
++ GST_INFO ("seek to %" GST_TIME_FORMAT " within index table %"
++ GST_TIME_FORMAT ". do not make index table", GST_TIME_ARGS (cur),
++ GST_TIME_ARGS (current_ts));
++ break;
++ } else {
++ GST_INFO ("seek to %" GST_TIME_FORMAT " without index table %"
++ GST_TIME_FORMAT ". make index table", GST_TIME_ARGS (cur),
++ GST_TIME_ARGS (current_ts));
++ }
++
++ GST_INFO ("make AAC(ADTS) Index Table. file_size = %" G_GINT64_FORMAT
++ " last idx offset=%" G_GINT64_FORMAT ", last idx ts=%"
++ GST_TIME_FORMAT, total_file_size, start_offset,
++ GST_TIME_ARGS (current_ts));
++
++ base_offset = start_offset; /* set base by start offset */
++ second_count = current_ts + GST_SECOND; /* 1sec */
++
++ /************************************/
++ /* STEP 0: Setting parse information */
++ /************************************/
++ aacparse->spf = aacparse->frame_samples;
++ aacparse->frame_duration = (aacparse->spf * 1000 * 100) / aacparse->sample_rate; /* duration per frame (msec) */
++ aacparse->frame_per_sec = (aacparse->sample_rate) / aacparse->spf; /* frames per second (ea) */
++
++ /************************************/
++ /* STEP 1: MAX_PULL_RANGE_BUF cycle */
++ /************************************/
++ while (total_file_size - base_offset >= AAC_MAX_PULL_RANGE_BUF) {
++ gint64 offset = 0;
++ GstBuffer *buffer = NULL;
++ guint8 *buf = NULL;
++ GstMapInfo map;
++ GST_INFO ("gst_pad_pull_range %d bytes (from %" G_GINT64_FORMAT
++ ") use max size", AAC_MAX_PULL_RANGE_BUF, base_offset);
++ res =
++ gst_pad_pull_range (parse->sinkpad, base_offset,
++ base_offset + AAC_MAX_PULL_RANGE_BUF, &buffer);
++ if (res != GST_FLOW_OK) {
++ GST_ERROR ("gst_pad_pull_range failed!");
++ break;
++ }
++
++ gst_buffer_map (buffer, &map, GST_MAP_READ);
++ buf = map.data;
++ if (buf == NULL) {
++ gst_buffer_unmap (buffer, &map);
++ GST_WARNING ("buffer is NULL in make aac seek table's STEP1");
++ gst_buffer_unref (buffer);
++ goto aac_seek_null_exit;
++ }
++
++ while (offset <= AAC_MAX_PULL_RANGE_BUF) {
++ gint frame_size = 0;
++
++ /* make sure the values in the frame header look sane */
++ frame_size = gst_aac_parse_adts_get_fast_frame_len (buf);
++
++ if ((frame_size > 0)
++ && (frame_size < (AAC_MAX_PULL_RANGE_BUF - offset))) {
++ if (current_ts > second_count) { /* 1 sec == xx frames. we make idx per sec */
++ gst_base_parse_add_index_entry (parse, base_offset + offset, current_ts, TRUE, TRUE); /* force */
++ GST_DEBUG ("Adding index ts=%" GST_TIME_FORMAT " offset %"
++ G_GINT64_FORMAT, GST_TIME_ARGS (current_ts),
++ base_offset + offset);
++ second_count += GST_SECOND; /* 1sec */
++ }
++
++ current_ts += (aacparse->frame_duration * GST_MSECOND) / 100; /* each frame is (frame_duration) ms */
++ offset += frame_size;
++ buf += frame_size;
++ frame_count++;
++ } else if (frame_size >= (AAC_MAX_PULL_RANGE_BUF - offset)) {
++ GST_DEBUG ("we need refill buffer");
++ break;
++ } else {
++ GST_WARNING ("we lost sync");
++ buf++;
++ offset++;
++ }
++ } /* while */
++
++ base_offset = base_offset + offset;
++
++ gst_buffer_unmap (buffer, &map);
++ gst_buffer_unref (buffer);
++ } /* end MAX buffer cycle */
++
++ /*******************************/
++ /* STEP 2: Remain Buffer cycle */
++ /*******************************/
++ if (total_file_size - base_offset > 0) {
++ gint64 offset = 0;
++ GstBuffer *buffer = NULL;
++ guint8 *buf = NULL;
++ GstMapInfo map;
++
++ GST_INFO ("gst_pad_pull_range %" G_GINT64_FORMAT " bytes (from %"
++ G_GINT64_FORMAT ") use remain_buf size",
++ total_file_size - base_offset, base_offset);
++ res =
++ gst_pad_pull_range (parse->sinkpad, base_offset, total_file_size,
++ &buffer);
++ if (res != GST_FLOW_OK) {
++ GST_ERROR ("gst_pad_pull_range failed!");
++ break;
++ }
++
++ gst_buffer_map (buffer, &map, GST_MAP_READ);
++ buf = map.data;
++ if (buf == NULL) {
++ gst_buffer_unmap (buffer, &map);
++ GST_WARNING ("buffer is NULL in make aac seek table's STEP2");
++ gst_buffer_unref (buffer);
++ goto aac_seek_null_exit;
++ }
++
++ while (base_offset + offset < total_file_size) {
++ gint frame_size = 0;
++
++ /* make sure the values in the frame header look sane */
++ frame_size = gst_aac_parse_adts_get_fast_frame_len (buf);
++
++ if ((frame_size > 0)
++ && (frame_size <= (total_file_size - (base_offset + offset)))) {
++ if (current_ts > second_count) { /* 1 sec == xx frames. we make idx per sec */
++ gst_base_parse_add_index_entry (parse, base_offset + offset, current_ts, TRUE, TRUE); /* force */
++ GST_DEBUG ("Adding index ts=%" GST_TIME_FORMAT " offset %"
++ G_GINT64_FORMAT, GST_TIME_ARGS (current_ts),
++ base_offset + offset);
++ second_count += GST_SECOND; /* 1sec */
++ }
++
++ current_ts += (aacparse->frame_duration * GST_MSECOND) / 100; /* each frame is (frame_duration) ms */
++ offset += frame_size;
++ buf += frame_size;
++ frame_count++;
++ } else if (frame_size == 0) {
++ GST_DEBUG ("Frame size is 0 so, Decoding end..");
++ break;
++ } else {
++ GST_WARNING ("we lost sync");
++ buf++;
++ offset++;
++ }
++ } /* while */
++
++ gst_buffer_unmap (buffer, &map);
++ gst_buffer_unref (buffer);
++ }
++ /* end remain_buf buffer cycle */
++ GST_DEBUG ("gst_aac_parse_adts_src_eventfunc GST_EVENT_SEEK leave");
++ }
++ break;
++
++ default:
++ break;
++ }
++
++aac_seek_null_exit:
++
++ /* call baseparse src_event function to handle event */
++ handled = GST_BASE_PARSE_CLASS (parent_class)->src_event (parse, event);
++ return handled;
++}
++
++static void
++gst_aac_parse_check_byte_seekability (GstBaseParse * parse)
++{
++ GstQuery *query;
++ gboolean seekable = FALSE;
++ GstAacParse *aacparse = GST_AAC_PARSE (parse);
++ GST_LOG_OBJECT (aacparse, "gst_aac_parse_check_byte_seekability enter");
++
++ query = gst_query_new_seeking (GST_FORMAT_BYTES);
++ if (gst_pad_peer_query (parse->sinkpad, query))
++ gst_query_parse_seeking (query, NULL, &seekable, NULL, NULL);
++ else
++ GST_DEBUG_OBJECT (aacparse, "seeking query failed");
++
++ gst_query_unref (query);
++
++ GST_INFO_OBJECT (aacparse, "byte seekable: %d", seekable);
++
++ aacparse->byte_seekable = seekable;
+ }
++#endif /* TIZEN_FEATURE_AACPARSE_MODIFICATION */
--- /dev/null
+ /* GStreamer AAC parser
+ * Copyright (C) 2008 Nokia Corporation. All rights reserved.
+ *
+ * Contact: Stefan Kost <stefan.kost@nokia.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+ #ifndef __GST_AAC_PARSE_H__
+ #define __GST_AAC_PARSE_H__
+
+ #include <gst/gst.h>
+ #include <gst/base/gstbaseparse.h>
+
+ G_BEGIN_DECLS
+
+ #define GST_TYPE_AAC_PARSE \
+ (gst_aac_parse_get_type())
+ #define GST_AAC_PARSE(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj), GST_TYPE_AAC_PARSE, GstAacParse))
+ #define GST_AAC_PARSE_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass), GST_TYPE_AAC_PARSE, GstAacParseClass))
+ #define GST_IS_AAC_PARSE(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj), GST_TYPE_AAC_PARSE))
+ #define GST_IS_AAC_PARSE_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass), GST_TYPE_AAC_PARSE))
+
+
+ /**
+ * GstAacHeaderType:
+ * @DSPAAC_HEADER_NOT_PARSED: Header not parsed yet.
+ * @DSPAAC_HEADER_UNKNOWN: Unknown (not recognized) header.
+ * @DSPAAC_HEADER_ADIF: ADIF header found.
+ * @DSPAAC_HEADER_ADTS: ADTS header found.
+ * @DSPAAC_HEADER_LOAS: LOAS header found.
+ * @DSPAAC_HEADER_NONE: Raw stream, no header.
+ *
+ * Type header enumeration set in #header_type.
+ */
+ typedef enum {
+ DSPAAC_HEADER_NOT_PARSED,
+ DSPAAC_HEADER_UNKNOWN,
+ DSPAAC_HEADER_ADIF,
+ DSPAAC_HEADER_ADTS,
+ DSPAAC_HEADER_LOAS,
+ DSPAAC_HEADER_NONE
+ } GstAacHeaderType;
+
+
+ typedef struct _GstAacParse GstAacParse;
+ typedef struct _GstAacParseClass GstAacParseClass;
+
+ /**
+ * GstAacParse:
+ *
+ * The opaque GstAacParse data structure.
+ */
+ struct _GstAacParse {
+ GstBaseParse element;
+
+ /* Stream type -related info */
+ gint object_type;
+ gint bitrate;
+ gint sample_rate;
+ gint channels;
+ gint mpegversion;
+ gint frame_samples;
+
++#ifdef TIZEN_FEATURE_AACPARSE_MODIFICATION
++ gboolean first_frame; /* estimate duration once at the first time */
++ guint hdr_bitrate; /* estimated bitrate (bps) */
++ guint spf; /* samples per frame = frame_samples */
++ guint frame_duration; /* duration per frame (msec) */
++ guint frame_per_sec; /* frames per second (ea) */
++ guint bitstream_type; /* bitstream type - constant or variable */
++ guint adif_header_length;
++ guint num_program_config_elements;
++ guint read_bytes;
++ gint64 file_size;
++ guint frame_byte;
++ gboolean byte_seekable;
++#endif
++
+ GstAacHeaderType header_type;
+ GstAacHeaderType output_header_type;
+
+ gboolean sent_codec_tag;
+
+ gint last_parsed_sample_rate;
+ gint last_parsed_channels;
+ };
+
+ /**
+ * GstAacParseClass:
+ * @parent_class: Element parent class.
+ *
+ * The opaque GstAacParseClass data structure.
+ */
+ struct _GstAacParseClass {
+ GstBaseParseClass parent_class;
+ };
+
+ GType gst_aac_parse_get_type (void);
+
+ G_END_DECLS
+
+ #endif /* __GST_AAC_PARSE_H__ */
--- /dev/null
+ /* GStreamer MPEG audio parser
+ * Copyright (C) 2006-2007 Jan Schmidt <thaytan@mad.scientist.com>
+ * Copyright (C) 2010 Mark Nauwelaerts <mnauw users sf net>
+ * Copyright (C) 2010 Nokia Corporation. All rights reserved.
+ * Contact: Stefan Kost <stefan.kost@nokia.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+ /**
+ * SECTION:element-mpegaudioparse
+ * @title: mpegaudioparse
+ * @short_description: MPEG audio parser
+ * @see_also: #GstAmrParse, #GstAACParse
+ *
+ * Parses and frames mpeg1 audio streams. Provides seeking.
+ *
+ * ## Example launch line
+ * |[
+ * gst-launch-1.0 filesrc location=test.mp3 ! mpegaudioparse ! mpg123audiodec
+ * ! audioconvert ! audioresample ! autoaudiosink
+ * ]|
+ *
+ */
+
+ /* FIXME: we should make the base class (GstBaseParse) aware of the
+ * XING seek table somehow, so it can use it properly for things like
+ * accurate seeks. Currently it can only do a lookup via the convert function,
+ * but then doesn't know what the result represents exactly. One could either
+ * add a vfunc for index lookup, or just make mpegaudioparse populate the
+ * base class's index via the API provided.
+ */
+ #ifdef HAVE_CONFIG_H
+ #include "config.h"
+ #endif
+
+ #include <string.h>
+
+ #include "gstaudioparserselements.h"
+ #include "gstmpegaudioparse.h"
+ #include <gst/base/gstbytereader.h>
+ #include <gst/pbutils/pbutils.h>
+
+ GST_DEBUG_CATEGORY_STATIC (mpeg_audio_parse_debug);
+ #define GST_CAT_DEFAULT mpeg_audio_parse_debug
+
+ #define MPEG_AUDIO_CHANNEL_MODE_UNKNOWN -1
+ #define MPEG_AUDIO_CHANNEL_MODE_STEREO 0
+ #define MPEG_AUDIO_CHANNEL_MODE_JOINT_STEREO 1
+ #define MPEG_AUDIO_CHANNEL_MODE_DUAL_CHANNEL 2
+ #define MPEG_AUDIO_CHANNEL_MODE_MONO 3
+
+ #define CRC_UNKNOWN -1
+ #define CRC_PROTECTED 0
+ #define CRC_NOT_PROTECTED 1
+
+ #define XING_FRAMES_FLAG 0x0001
+ #define XING_BYTES_FLAG 0x0002
+ #define XING_TOC_FLAG 0x0004
+ #define XING_VBR_SCALE_FLAG 0x0008
+
+ #define MIN_FRAME_SIZE 6
+
++#ifdef TIZEN_FEATURE_MP3PARSE_MODIFICATION
++#define DEFAULT_CHECK_HTTP_SEEK FALSE
++
++/* Property */
++enum
++{
++ PROP_0,
++ PROP_CHECK_HTTP_SEEK
++};
++#endif
++
+ static GstStaticPadTemplate src_template = GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("audio/mpeg, "
+ "mpegversion = (int) 1, "
+ "layer = (int) [ 1, 3 ], "
+ "mpegaudioversion = (int) [ 1, 3], "
+ "rate = (int) [ 8000, 48000 ], "
+ "channels = (int) [ 1, 2 ], " "parsed=(boolean) true")
+ );
+
+ static GstStaticPadTemplate sink_template = GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("audio/mpeg, mpegversion = (int) 1")
+ );
+
+ static void gst_mpeg_audio_parse_finalize (GObject * object);
+
+ static gboolean gst_mpeg_audio_parse_start (GstBaseParse * parse);
+ static gboolean gst_mpeg_audio_parse_stop (GstBaseParse * parse);
++
++#ifdef TIZEN_FEATURE_MP3PARSE_MODIFICATION
++static void gst_mpeg_audio_parse_set_property (GObject * object, guint prop_id,
++ const GValue * value, GParamSpec * pspec);
++static void gst_mpeg_audio_parse_get_property (GObject * object, guint prop_id,
++ GValue * value, GParamSpec * pspec);
++static gboolean gst_mpeg_audio_parse_src_eventfunc (GstBaseParse * parse,
++ GstEvent * event);
++#endif
++
+ static GstFlowReturn gst_mpeg_audio_parse_handle_frame (GstBaseParse * parse,
+ GstBaseParseFrame * frame, gint * skipsize);
+ static GstFlowReturn gst_mpeg_audio_parse_pre_push_frame (GstBaseParse * parse,
+ GstBaseParseFrame * frame);
+ static gboolean gst_mpeg_audio_parse_convert (GstBaseParse * parse,
+ GstFormat src_format, gint64 src_value,
+ GstFormat dest_format, gint64 * dest_value);
+ static GstCaps *gst_mpeg_audio_parse_get_sink_caps (GstBaseParse * parse,
+ GstCaps * filter);
+
+ static void gst_mpeg_audio_parse_handle_first_frame (GstMpegAudioParse *
+ mp3parse, GstBuffer * buf);
+
+ #define gst_mpeg_audio_parse_parent_class parent_class
+ G_DEFINE_TYPE (GstMpegAudioParse, gst_mpeg_audio_parse, GST_TYPE_BASE_PARSE);
+ GST_ELEMENT_REGISTER_DEFINE (mpegaudioparse, "mpegaudioparse",
+ GST_RANK_PRIMARY + 2, GST_TYPE_MPEG_AUDIO_PARSE);
+
+ #define GST_TYPE_MPEG_AUDIO_CHANNEL_MODE \
+ (gst_mpeg_audio_channel_mode_get_type())
+
+ static const GEnumValue mpeg_audio_channel_mode[] = {
+ {MPEG_AUDIO_CHANNEL_MODE_UNKNOWN, "Unknown", "unknown"},
+ {MPEG_AUDIO_CHANNEL_MODE_MONO, "Mono", "mono"},
+ {MPEG_AUDIO_CHANNEL_MODE_DUAL_CHANNEL, "Dual Channel", "dual-channel"},
+ {MPEG_AUDIO_CHANNEL_MODE_JOINT_STEREO, "Joint Stereo", "joint-stereo"},
+ {MPEG_AUDIO_CHANNEL_MODE_STEREO, "Stereo", "stereo"},
+ {0, NULL, NULL},
+ };
+
+ static GType
+ gst_mpeg_audio_channel_mode_get_type (void)
+ {
+ static GType mpeg_audio_channel_mode_type = 0;
+
+ if (!mpeg_audio_channel_mode_type) {
+ mpeg_audio_channel_mode_type =
+ g_enum_register_static ("GstMpegAudioChannelMode",
+ mpeg_audio_channel_mode);
+ }
+ return mpeg_audio_channel_mode_type;
+ }
+
+ static const gchar *
+ gst_mpeg_audio_channel_mode_get_nick (gint mode)
+ {
+ guint i;
+ for (i = 0; i < G_N_ELEMENTS (mpeg_audio_channel_mode); i++) {
+ if (mpeg_audio_channel_mode[i].value == mode)
+ return mpeg_audio_channel_mode[i].value_nick;
+ }
+ return NULL;
+ }
+
+ static void
+ gst_mpeg_audio_parse_class_init (GstMpegAudioParseClass * klass)
+ {
+ GstBaseParseClass *parse_class = GST_BASE_PARSE_CLASS (klass);
+ GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
+ GObjectClass *object_class = G_OBJECT_CLASS (klass);
+
+ GST_DEBUG_CATEGORY_INIT (mpeg_audio_parse_debug, "mpegaudioparse", 0,
+ "MPEG1 audio stream parser");
+
+ object_class->finalize = gst_mpeg_audio_parse_finalize;
+
+ parse_class->start = GST_DEBUG_FUNCPTR (gst_mpeg_audio_parse_start);
+ parse_class->stop = GST_DEBUG_FUNCPTR (gst_mpeg_audio_parse_stop);
+ parse_class->handle_frame =
+ GST_DEBUG_FUNCPTR (gst_mpeg_audio_parse_handle_frame);
+ parse_class->pre_push_frame =
+ GST_DEBUG_FUNCPTR (gst_mpeg_audio_parse_pre_push_frame);
+ parse_class->convert = GST_DEBUG_FUNCPTR (gst_mpeg_audio_parse_convert);
+ parse_class->get_sink_caps =
+ GST_DEBUG_FUNCPTR (gst_mpeg_audio_parse_get_sink_caps);
+
++#ifdef TIZEN_FEATURE_MP3PARSE_MODIFICATION
++ object_class->set_property =
++ GST_DEBUG_FUNCPTR (gst_mpeg_audio_parse_set_property);
++ object_class->get_property =
++ GST_DEBUG_FUNCPTR (gst_mpeg_audio_parse_get_property);
++
++ g_object_class_install_property (object_class, PROP_CHECK_HTTP_SEEK,
++ g_param_spec_boolean ("http-pull-mp3dec", "enable/disable",
++ "enable/disable mp3dec http seek pull mode",
++ DEFAULT_CHECK_HTTP_SEEK, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
++ /* T.B.D : make full mp3 index table when seek */
++ parse_class->src_event = gst_mpeg_audio_parse_src_eventfunc;
++#endif
++
+ /* register tags */
+ #define GST_TAG_CRC "has-crc"
+ #define GST_TAG_MODE "channel-mode"
+
+ gst_tag_register (GST_TAG_CRC, GST_TAG_FLAG_META, G_TYPE_BOOLEAN,
+ "has crc", "Using CRC", NULL);
+ gst_tag_register (GST_TAG_MODE, GST_TAG_FLAG_ENCODED, G_TYPE_STRING,
+ "channel mode", "MPEG audio channel mode", NULL);
+
+ g_type_class_ref (GST_TYPE_MPEG_AUDIO_CHANNEL_MODE);
+
+ gst_element_class_add_static_pad_template (element_class, &sink_template);
+ gst_element_class_add_static_pad_template (element_class, &src_template);
+
+ gst_element_class_set_static_metadata (element_class, "MPEG1 Audio Parser",
+ "Codec/Parser/Audio",
+ "Parses and frames mpeg1 audio streams (levels 1-3), provides seek",
+ "Jan Schmidt <thaytan@mad.scientist.com>,"
+ "Mark Nauwelaerts <mark.nauwelaerts@collabora.co.uk>");
+ }
+
+ static void
+ gst_mpeg_audio_parse_reset (GstMpegAudioParse * mp3parse)
+ {
+ mp3parse->channels = -1;
+ mp3parse->rate = -1;
+ mp3parse->sent_codec_tag = FALSE;
+ mp3parse->last_posted_crc = CRC_UNKNOWN;
+ mp3parse->last_posted_channel_mode = MPEG_AUDIO_CHANNEL_MODE_UNKNOWN;
+ mp3parse->freerate = 0;
+
+ mp3parse->hdr_bitrate = 0;
+ mp3parse->bitrate_is_constant = TRUE;
+
+ mp3parse->xing_flags = 0;
+ mp3parse->xing_bitrate = 0;
+ mp3parse->xing_frames = 0;
+ mp3parse->xing_total_time = 0;
+ mp3parse->xing_bytes = 0;
+ mp3parse->xing_vbr_scale = 0;
+ memset (mp3parse->xing_seek_table, 0, sizeof (mp3parse->xing_seek_table));
+ memset (mp3parse->xing_seek_table_inverse, 0,
+ sizeof (mp3parse->xing_seek_table_inverse));
+
+ mp3parse->vbri_bitrate = 0;
+ mp3parse->vbri_frames = 0;
+ mp3parse->vbri_total_time = 0;
+ mp3parse->vbri_bytes = 0;
+ mp3parse->vbri_seek_points = 0;
+ g_free (mp3parse->vbri_seek_table);
+ mp3parse->vbri_seek_table = NULL;
+
+ mp3parse->encoder_delay = 0;
+ mp3parse->encoder_padding = 0;
+ }
+
+ static void
+ gst_mpeg_audio_parse_init (GstMpegAudioParse * mp3parse)
+ {
+ gst_mpeg_audio_parse_reset (mp3parse);
+ GST_PAD_SET_ACCEPT_INTERSECT (GST_BASE_PARSE_SINK_PAD (mp3parse));
+ GST_PAD_SET_ACCEPT_TEMPLATE (GST_BASE_PARSE_SINK_PAD (mp3parse));
+ }
+
+ static void
+ gst_mpeg_audio_parse_finalize (GObject * object)
+ {
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+ }
+
+ static gboolean
+ gst_mpeg_audio_parse_start (GstBaseParse * parse)
+ {
+ GstMpegAudioParse *mp3parse = GST_MPEG_AUDIO_PARSE (parse);
+
+ gst_base_parse_set_min_frame_size (GST_BASE_PARSE (mp3parse), MIN_FRAME_SIZE);
+ GST_DEBUG_OBJECT (parse, "starting");
+
+ gst_mpeg_audio_parse_reset (mp3parse);
+
++#ifdef TIZEN_FEATURE_MP3PARSE_MODIFICATION
++ if (mp3parse->http_seek_flag) {
++ /* Don't need Accurate Seek table (in http pull mode) */
++ GST_INFO_OBJECT (parse, "Enable (1) : mp3parse->http_seek_flag");
++ } else {
++ GST_INFO_OBJECT (parse, "Disable (0) : mp3parse->http_seek_flag");
++ }
++#endif
++
+ return TRUE;
+ }
+
++#ifdef TIZEN_FEATURE_MP3PARSE_MODIFICATION
++static void
++gst_mpeg_audio_parse_set_property (GObject * object, guint prop_id,
++ const GValue * value, GParamSpec * pspec)
++{
++ GstMpegAudioParse *mp3parse = GST_MPEG_AUDIO_PARSE (object);
++ GST_INFO_OBJECT (mp3parse, "set_property() START- prop_id(%d)", prop_id);
++ switch (prop_id) {
++ case PROP_CHECK_HTTP_SEEK:
++ mp3parse->http_seek_flag = g_value_get_boolean (value);
++ GST_INFO_OBJECT (mp3parse, "http_seek_flag(%d)",
++ mp3parse->http_seek_flag);
++ break;
++ default:
++ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
++ break;
++ }
++}
++
++static void
++gst_mpeg_audio_parse_get_property (GObject * object, guint prop_id,
++ GValue * value, GParamSpec * pspec)
++{
++ GstMpegAudioParse *mp3parse = GST_MPEG_AUDIO_PARSE (object);
++ GST_INFO_OBJECT (mp3parse, "get_property() START- prop_id(%d)", prop_id);
++ switch (prop_id) {
++ case PROP_CHECK_HTTP_SEEK:
++ g_value_set_boolean (value, mp3parse->http_seek_flag);
++ GST_INFO_OBJECT (mp3parse, "http_seek_flag(%d)",
++ mp3parse->http_seek_flag);
++ break;
++ default:
++ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
++ break;
++ }
++}
++#endif
++
+ static gboolean
+ gst_mpeg_audio_parse_stop (GstBaseParse * parse)
+ {
+ GstMpegAudioParse *mp3parse = GST_MPEG_AUDIO_PARSE (parse);
+
+ GST_DEBUG_OBJECT (parse, "stopping");
+
+ gst_mpeg_audio_parse_reset (mp3parse);
+
+ return TRUE;
+ }
+
+ static const guint mp3types_bitrates[2][3][16] = {
+ {
+ {0, 32, 64, 96, 128, 160, 192, 224, 256, 288, 320, 352, 384, 416, 448,},
+ {0, 32, 48, 56, 64, 80, 96, 112, 128, 160, 192, 224, 256, 320, 384,},
+ {0, 32, 40, 48, 56, 64, 80, 96, 112, 128, 160, 192, 224, 256, 320,}
+ },
+ {
+ {0, 32, 48, 56, 64, 80, 96, 112, 128, 144, 160, 176, 192, 224, 256,},
+ {0, 8, 16, 24, 32, 40, 48, 56, 64, 80, 96, 112, 128, 144, 160,},
+ {0, 8, 16, 24, 32, 40, 48, 56, 64, 80, 96, 112, 128, 144, 160,}
+ },
+ };
+
+ static const guint mp3types_freqs[3][3] = { {44100, 48000, 32000},
+ {22050, 24000, 16000},
+ {11025, 12000, 8000}
+ };
+
+ static inline guint
+ mp3_type_frame_length_from_header (GstMpegAudioParse * mp3parse, guint32 header,
+ guint * put_version, guint * put_layer, guint * put_channels,
+ guint * put_bitrate, guint * put_samplerate, guint * put_mode,
+ guint * put_crc)
+ {
+ guint length;
+ gulong mode, samplerate, bitrate, layer, channels, padding, crc;
+ gulong version;
+ gint lsf, mpg25;
+
+ if (header & (1 << 20)) {
+ lsf = (header & (1 << 19)) ? 0 : 1;
+ mpg25 = 0;
+ } else {
+ lsf = 1;
+ mpg25 = 1;
+ }
+
+ version = 1 + lsf + mpg25;
+
+ layer = 4 - ((header >> 17) & 0x3);
+
+ crc = (header >> 16) & 0x1;
+
+ bitrate = (header >> 12) & 0xF;
+ bitrate = mp3types_bitrates[lsf][layer - 1][bitrate] * 1000;
+ if (!bitrate) {
+ GST_LOG_OBJECT (mp3parse, "using freeform bitrate");
+ bitrate = mp3parse->freerate;
+ }
+
+ samplerate = (header >> 10) & 0x3;
+ samplerate = mp3types_freqs[lsf + mpg25][samplerate];
+
+ /* force 0 length if 0 bitrate */
+ padding = (bitrate > 0) ? (header >> 9) & 0x1 : 0;
+
+ mode = (header >> 6) & 0x3;
+ channels = (mode == 3) ? 1 : 2;
+
+ switch (layer) {
+ case 1:
+ length = 4 * ((bitrate * 12) / samplerate + padding);
+ break;
+ case 2:
+ length = (bitrate * 144) / samplerate + padding;
+ break;
+ default:
+ case 3:
+ length = (bitrate * 144) / (samplerate << lsf) + padding;
+ break;
+ }
+
+ GST_DEBUG_OBJECT (mp3parse, "Calculated mp3 frame length of %u bytes",
+ length);
+ GST_DEBUG_OBJECT (mp3parse, "samplerate = %lu, bitrate = %lu, version = %lu, "
+ "layer = %lu, channels = %lu, mode = %s", samplerate, bitrate, version,
+ layer, channels, gst_mpeg_audio_channel_mode_get_nick (mode));
+
+ if (put_version)
+ *put_version = version;
+ if (put_layer)
+ *put_layer = layer;
+ if (put_channels)
+ *put_channels = channels;
+ if (put_bitrate)
+ *put_bitrate = bitrate;
+ if (put_samplerate)
+ *put_samplerate = samplerate;
+ if (put_mode)
+ *put_mode = mode;
+ if (put_crc)
+ *put_crc = crc;
+
+ return length;
+ }
+
+ /* Minimum number of consecutive, valid-looking frames to consider
+ * for resyncing */
+ #define MIN_RESYNC_FRAMES 3
+
+ /* Perform extended validation to check that subsequent headers match
+ * the first header given here in important characteristics, to avoid
+ * false sync. We look for a minimum of MIN_RESYNC_FRAMES consecutive
+ * frames to match their major characteristics.
+ *
+ * If at_eos is set to TRUE, we just check that we don't find any invalid
+ * frames in whatever data is available, rather than requiring a full
+ * MIN_RESYNC_FRAMES of data.
+ *
+ * Returns TRUE if we've seen enough data to validate or reject the frame.
+ * If TRUE is returned, then *valid contains TRUE if it validated, or false
+ * if we decided it was false sync.
+ * If FALSE is returned, then *valid contains minimum needed data.
+ */
+ static gboolean
+ gst_mp3parse_validate_extended (GstMpegAudioParse * mp3parse, GstBuffer * buf,
+ guint32 header, int bpf, gboolean at_eos, gint * valid)
+ {
+ guint32 next_header;
+ GstMapInfo map;
+ gboolean res = TRUE;
+ int frames_found = 1;
+ int offset = bpf;
+
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+
+ while (frames_found < MIN_RESYNC_FRAMES) {
+ /* Check if we have enough data for all these frames, plus the next
+ frame header. */
+ if (map.size < offset + 4) {
+ if (at_eos) {
+ /* Running out of data at EOS is fine; just accept it */
+ *valid = TRUE;
+ goto cleanup;
+ } else {
+ *valid = offset + 4;
+ res = FALSE;
+ goto cleanup;
+ }
+ }
+
+ next_header = GST_READ_UINT32_BE (map.data + offset);
+ GST_DEBUG_OBJECT (mp3parse, "At %d: header=%08X, header2=%08X, bpf=%d",
+ offset, (unsigned int) header, (unsigned int) next_header, bpf);
+
+ /* mask the bits which are allowed to differ between frames */
+ #define HDRMASK ~((0xF << 12) /* bitrate */ | \
+ (0x1 << 9) /* padding */ | \
+ (0xf << 4) /* mode|mode extension */ | \
+ (0xf)) /* copyright|emphasis */
+
+ if ((next_header & HDRMASK) != (header & HDRMASK)) {
+ /* If any of the unmasked bits don't match, then it's not valid */
+ GST_DEBUG_OBJECT (mp3parse, "next header doesn't match "
+ "(header=%08X (%08X), header2=%08X (%08X), bpf=%d)",
+ (guint) header, (guint) header & HDRMASK, (guint) next_header,
+ (guint) next_header & HDRMASK, bpf);
+ *valid = FALSE;
+ goto cleanup;
+ } else if (((next_header >> 12) & 0xf) == 0xf) {
+ /* The essential parts were the same, but the bitrate held an
+ invalid value - also reject */
+ GST_DEBUG_OBJECT (mp3parse, "next header invalid (bitrate)");
+ *valid = FALSE;
+ goto cleanup;
+ }
+
+ bpf = mp3_type_frame_length_from_header (mp3parse, next_header,
+ NULL, NULL, NULL, NULL, NULL, NULL, NULL);
+
+ /* if no bitrate, and no freeform rate known, then fail */
+ if (G_UNLIKELY (!bpf)) {
+ GST_DEBUG_OBJECT (mp3parse, "next header invalid (bitrate 0)");
+ *valid = FALSE;
+ goto cleanup;
+ }
+
+ offset += bpf;
+ frames_found++;
+ }
+
+ *valid = TRUE;
+
+ cleanup:
+ gst_buffer_unmap (buf, &map);
+ return res;
+ }
+
+ static gboolean
+ gst_mpeg_audio_parse_head_check (GstMpegAudioParse * mp3parse,
+ unsigned long head)
+ {
+ GST_DEBUG_OBJECT (mp3parse, "checking mp3 header 0x%08lx", head);
+ /* if it's not a valid sync */
+ if ((head & 0xffe00000) != 0xffe00000) {
+ GST_WARNING_OBJECT (mp3parse, "invalid sync");
+ return FALSE;
+ }
+ /* if it's an invalid MPEG version */
+ if (((head >> 19) & 3) == 0x1) {
+ GST_WARNING_OBJECT (mp3parse, "invalid MPEG version: 0x%lx",
+ (head >> 19) & 3);
+ return FALSE;
+ }
+ /* if it's an invalid layer */
+ if (!((head >> 17) & 3)) {
+ GST_WARNING_OBJECT (mp3parse, "invalid layer: 0x%lx", (head >> 17) & 3);
+ return FALSE;
+ }
+ /* if it's an invalid bitrate */
+ if (((head >> 12) & 0xf) == 0xf) {
+ GST_WARNING_OBJECT (mp3parse, "invalid bitrate: 0x%lx", (head >> 12) & 0xf);
+ return FALSE;
+ }
+ /* if it's an invalid samplerate */
+ if (((head >> 10) & 0x3) == 0x3) {
+ GST_WARNING_OBJECT (mp3parse, "invalid samplerate: 0x%lx",
+ (head >> 10) & 0x3);
+ return FALSE;
+ }
+
+ if ((head & 0x3) == 0x2) {
+ /* Ignore this as there are some files with emphasis 0x2 that can
+ * be played fine. See BGO #537235 */
+ GST_WARNING_OBJECT (mp3parse, "invalid emphasis: 0x%lx", head & 0x3);
+ }
+
+ return TRUE;
+ }
+
+ /* Determines possible freeform frame rate/size by looking for next
+ * header with valid bitrate (0 or otherwise valid) (and sufficiently
+ * matching current header).
+ *
+ * Returns TRUE if we've found such one, and *rate then contains rate
+ * (or *rate contains 0 if decided no freeframe size could be determined).
+ * If not enough data, returns FALSE.
+ */
+ static gboolean
+ gst_mp3parse_find_freerate (GstMpegAudioParse * mp3parse, GstMapInfo * map,
+ guint32 header, gboolean at_eos, gint * _rate)
+ {
+ guint32 next_header;
+ const guint8 *data;
+ guint available;
+ int offset = 4;
+ gulong samplerate, rate, layer, padding;
+ gboolean valid;
+ gint lsf, mpg25;
+
+ available = map->size;
+ data = map->data;
+
+ *_rate = 0;
+
+ /* pick apart header again partially */
+ if (header & (1 << 20)) {
+ lsf = (header & (1 << 19)) ? 0 : 1;
+ mpg25 = 0;
+ } else {
+ lsf = 1;
+ mpg25 = 1;
+ }
+ layer = 4 - ((header >> 17) & 0x3);
+ samplerate = (header >> 10) & 0x3;
+ samplerate = mp3types_freqs[lsf + mpg25][samplerate];
+ padding = (header >> 9) & 0x1;
+
+ for (; offset < available; ++offset) {
+ /* Check if we have enough data for all these frames, plus the next
+ frame header. */
+ if (available < offset + 4) {
+ if (at_eos) {
+ /* Running out of data; failed to determine size */
+ return TRUE;
+ } else {
+ return FALSE;
+ }
+ }
+
+ valid = FALSE;
+ next_header = GST_READ_UINT32_BE (data + offset);
+ if ((next_header & 0xFFE00000) != 0xFFE00000)
+ goto next;
+
+ GST_DEBUG_OBJECT (mp3parse, "At %d: header=%08X, header2=%08X",
+ offset, (unsigned int) header, (unsigned int) next_header);
+
+ if ((next_header & HDRMASK) != (header & HDRMASK)) {
+ /* If any of the unmasked bits don't match, then it's not valid */
+ GST_DEBUG_OBJECT (mp3parse, "next header doesn't match "
+ "(header=%08X (%08X), header2=%08X (%08X))",
+ (guint) header, (guint) header & HDRMASK, (guint) next_header,
+ (guint) next_header & HDRMASK);
+ goto next;
+ } else if (((next_header >> 12) & 0xf) == 0xf) {
+ /* The essential parts were the same, but the bitrate held an
+ invalid value - also reject */
+ GST_DEBUG_OBJECT (mp3parse, "next header invalid (bitrate)");
+ goto next;
+ }
+
+ valid = TRUE;
+
+ next:
+ /* almost accept as free frame */
+ if (layer == 1) {
+ rate = samplerate * (offset - 4 * padding + 4) / 48000;
+ } else {
+ rate = samplerate * (offset - padding + 1) / (144 >> lsf) / 1000;
+ }
+
+ if (valid) {
+ GST_LOG_OBJECT (mp3parse, "calculated rate %lu", rate * 1000);
+ if (rate < 8 || (layer == 3 && rate > 640)) {
+ GST_DEBUG_OBJECT (mp3parse, "rate invalid");
+ if (rate < 8) {
+ /* maybe some hope */
+ continue;
+ } else {
+ GST_DEBUG_OBJECT (mp3parse, "aborting");
+ /* give up */
+ break;
+ }
+ }
+ *_rate = rate * 1000;
+ break;
+ } else {
+ /* avoid indefinite searching */
+ if (rate > 1000) {
+ GST_DEBUG_OBJECT (mp3parse, "exceeded sanity rate; aborting");
+ break;
+ }
+ }
+ }
+
+ return TRUE;
+ }
+
+ static GstFlowReturn
+ gst_mpeg_audio_parse_handle_frame (GstBaseParse * parse,
+ GstBaseParseFrame * frame, gint * skipsize)
+ {
+ GstMpegAudioParse *mp3parse = GST_MPEG_AUDIO_PARSE (parse);
+ GstBuffer *buf = frame->buffer;
+ GstByteReader reader;
+ gint off, bpf = 0;
+ gboolean lost_sync, draining, valid, caps_change;
+ guint32 header;
+ guint bitrate, layer, rate, channels, version, mode, crc;
+ GstMapInfo map;
+ gboolean res = FALSE;
+
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+ if (G_UNLIKELY (map.size < 6)) {
+ *skipsize = 1;
+ goto cleanup;
+ }
+
+ gst_byte_reader_init (&reader, map.data, map.size);
+
+ off = gst_byte_reader_masked_scan_uint32 (&reader, 0xffe00000, 0xffe00000,
+ 0, map.size);
+
+ GST_LOG_OBJECT (parse, "possible sync at buffer offset %d", off);
+
+ /* didn't find anything that looks like a sync word, skip */
+ if (off < 0) {
+ *skipsize = map.size - 3;
+ goto cleanup;
+ }
+
+ /* possible frame header, but not at offset 0? skip bytes before sync */
+ if (off > 0) {
+ *skipsize = off;
+ goto cleanup;
+ }
+
+ /* make sure the values in the frame header look sane */
+ header = GST_READ_UINT32_BE (map.data);
+ if (!gst_mpeg_audio_parse_head_check (mp3parse, header)) {
+ *skipsize = 1;
+ goto cleanup;
+ }
+
+ GST_LOG_OBJECT (parse, "got frame");
+
+ lost_sync = GST_BASE_PARSE_LOST_SYNC (parse);
+ draining = GST_BASE_PARSE_DRAINING (parse);
+
+ if (G_UNLIKELY (lost_sync))
+ mp3parse->freerate = 0;
+
+ bpf = mp3_type_frame_length_from_header (mp3parse, header,
+ &version, &layer, &channels, &bitrate, &rate, &mode, &crc);
+
+ if (channels != mp3parse->channels || rate != mp3parse->rate ||
+ layer != mp3parse->layer || version != mp3parse->version)
+ caps_change = TRUE;
+ else
+ caps_change = FALSE;
+
+ /* maybe free format */
+ if (bpf == 0) {
+ GST_LOG_OBJECT (mp3parse, "possibly free format");
+ if (lost_sync || mp3parse->freerate == 0) {
+ GST_DEBUG_OBJECT (mp3parse, "finding free format rate");
+ if (!gst_mp3parse_find_freerate (mp3parse, &map, header, draining,
+ &valid)) {
+ /* not enough data */
+ gst_base_parse_set_min_frame_size (parse, valid);
+ *skipsize = 0;
+ goto cleanup;
+ } else {
+ GST_DEBUG_OBJECT (parse, "determined freeform size %d", valid);
+ mp3parse->freerate = valid;
+ }
+ }
+ /* try again */
+ bpf = mp3_type_frame_length_from_header (mp3parse, header,
+ &version, &layer, &channels, &bitrate, &rate, &mode, &crc);
+ if (!bpf) {
+ /* did not come up with valid freeform length, reject after all */
+ *skipsize = 1;
+ goto cleanup;
+ }
+ }
+
+ if (!draining && (lost_sync || caps_change)) {
+ if (!gst_mp3parse_validate_extended (mp3parse, buf, header, bpf, draining,
+ &valid)) {
+ /* not enough data */
+ gst_base_parse_set_min_frame_size (parse, valid);
+ *skipsize = 0;
+ goto cleanup;
+ } else {
+ if (!valid) {
+ *skipsize = off + 2;
+ goto cleanup;
+ }
+ }
+ } else if (draining && lost_sync && caps_change && mp3parse->rate > 0) {
+ /* avoid caps jitter that we can't be sure of */
+ *skipsize = off + 2;
+ goto cleanup;
+ }
+
+ /* restore default minimum */
+ gst_base_parse_set_min_frame_size (parse, MIN_FRAME_SIZE);
+
+ res = TRUE;
+
+ /* metadata handling */
+ if (G_UNLIKELY (caps_change)) {
+ GstCaps *caps = gst_caps_new_simple ("audio/mpeg",
+ "mpegversion", G_TYPE_INT, 1,
+ "mpegaudioversion", G_TYPE_INT, version,
+ "layer", G_TYPE_INT, layer,
+ "rate", G_TYPE_INT, rate,
+ "channels", G_TYPE_INT, channels, "parsed", G_TYPE_BOOLEAN, TRUE, NULL);
+ gst_pad_set_caps (GST_BASE_PARSE_SRC_PAD (parse), caps);
+ gst_caps_unref (caps);
+
+ mp3parse->rate = rate;
+ mp3parse->channels = channels;
+ mp3parse->layer = layer;
+ mp3parse->version = version;
+
+ /* see http://www.codeproject.com/audio/MPEGAudioInfo.asp */
+ if (mp3parse->layer == 1)
+ mp3parse->spf = 384;
+ else if (mp3parse->layer == 2)
+ mp3parse->spf = 1152;
+ else if (mp3parse->version == 1) {
+ mp3parse->spf = 1152;
+ } else {
+ /* MPEG-2 or "2.5" */
+ mp3parse->spf = 576;
+ }
+
+ /* lead_in:
+ * We start pushing 9 frames earlier (29 frames for MPEG2) than
+ * segment start to be able to decode the first frame we want.
+ * 9 (29) frames are the theoretical maximum of frames that contain
+ * data for the current frame (bit reservoir).
+ *
+ * lead_out:
+ * Some mp3 streams have an offset in the timestamps, for which we have to
+ * push the frame *after* the end position in order for the decoder to be
+ * able to decode everything up until the segment.stop position. */
+ gst_base_parse_set_frame_rate (parse, mp3parse->rate, mp3parse->spf,
+ (version == 1) ? 10 : 30, 2);
+ }
+
+ if (mp3parse->hdr_bitrate && mp3parse->hdr_bitrate != bitrate) {
+ mp3parse->bitrate_is_constant = FALSE;
+ }
+ mp3parse->hdr_bitrate = bitrate;
+
+ /* For first frame; check for seek tables and output a codec tag */
+ gst_mpeg_audio_parse_handle_first_frame (mp3parse, buf);
+
+ /* store some frame info for later processing */
+ mp3parse->last_crc = crc;
+ mp3parse->last_mode = mode;
+
+ cleanup:
+ gst_buffer_unmap (buf, &map);
+
+ if (res && bpf <= map.size) {
+ return gst_base_parse_finish_frame (parse, frame, bpf);
+ }
+
+ return GST_FLOW_OK;
+ }
+
+ static void
+ gst_mpeg_audio_parse_handle_first_frame (GstMpegAudioParse * mp3parse,
+ GstBuffer * buf)
+ {
+ const guint32 xing_id = 0x58696e67; /* 'Xing' in hex */
+ const guint32 info_id = 0x496e666f; /* 'Info' in hex - found in LAME CBR files */
+ const guint32 vbri_id = 0x56425249; /* 'VBRI' in hex */
+ const guint32 lame_id = 0x4c414d45; /* 'LAME' in hex */
+ gint offset_xing, offset_vbri;
+ guint64 avail;
+ gint64 upstream_total_bytes = 0;
+ guint32 read_id_xing = 0, read_id_vbri = 0;
+ GstMapInfo map;
+ guint8 *data;
+ guint bitrate;
+
+ if (mp3parse->sent_codec_tag)
+ return;
+
+ /* Check first frame for Xing info */
+ if (mp3parse->version == 1) { /* MPEG-1 file */
+ if (mp3parse->channels == 1)
+ offset_xing = 0x11;
+ else
+ offset_xing = 0x20;
+ } else { /* MPEG-2 header */
+ if (mp3parse->channels == 1)
+ offset_xing = 0x09;
+ else
+ offset_xing = 0x11;
+ }
+
+ /* The VBRI tag is always at offset 0x20 */
+ offset_vbri = 0x20;
+
+ /* Skip the 4 bytes of the MP3 header too */
+ offset_xing += 4;
+ offset_vbri += 4;
+
+ /* Check if we have enough data to read the Xing header */
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+ data = map.data;
+ avail = map.size;
+
+ if (avail >= offset_xing + 4) {
+ read_id_xing = GST_READ_UINT32_BE (data + offset_xing);
+ }
+ if (avail >= offset_vbri + 4) {
+ read_id_vbri = GST_READ_UINT32_BE (data + offset_vbri);
+ }
+
+ /* obtain real upstream total bytes */
+ if (!gst_pad_peer_query_duration (GST_BASE_PARSE_SINK_PAD (mp3parse),
+ GST_FORMAT_BYTES, &upstream_total_bytes))
+ upstream_total_bytes = 0;
+
+ if (read_id_xing == xing_id || read_id_xing == info_id) {
+ guint32 xing_flags;
+ guint bytes_needed = offset_xing + 8;
+ gint64 total_bytes;
+ GstClockTime total_time;
+
+ GST_DEBUG_OBJECT (mp3parse, "Found Xing header marker 0x%x", xing_id);
+
+ /* Move data after Xing header */
+ data += offset_xing + 4;
+
+ /* Read 4 base bytes of flags, big-endian */
+ xing_flags = GST_READ_UINT32_BE (data);
+ data += 4;
+ if (xing_flags & XING_FRAMES_FLAG)
+ bytes_needed += 4;
+ if (xing_flags & XING_BYTES_FLAG)
+ bytes_needed += 4;
+ if (xing_flags & XING_TOC_FLAG)
+ bytes_needed += 100;
+ if (xing_flags & XING_VBR_SCALE_FLAG)
+ bytes_needed += 4;
+ if (avail < bytes_needed) {
+ GST_DEBUG_OBJECT (mp3parse,
+ "Not enough data to read Xing header (need %d)", bytes_needed);
+ goto cleanup;
+ }
+
+ GST_DEBUG_OBJECT (mp3parse, "Reading Xing header");
+ mp3parse->xing_flags = xing_flags;
+
+ if (xing_flags & XING_FRAMES_FLAG) {
+ mp3parse->xing_frames = GST_READ_UINT32_BE (data);
+ if (mp3parse->xing_frames == 0) {
+ GST_WARNING_OBJECT (mp3parse,
+ "Invalid number of frames in Xing header");
+ mp3parse->xing_flags &= ~XING_FRAMES_FLAG;
+ } else {
+ mp3parse->xing_total_time = gst_util_uint64_scale (GST_SECOND,
+ (guint64) (mp3parse->xing_frames) * (mp3parse->spf),
+ mp3parse->rate);
+ }
+
+ data += 4;
+ } else {
+ mp3parse->xing_frames = 0;
+ mp3parse->xing_total_time = 0;
+ }
+
+ if (xing_flags & XING_BYTES_FLAG) {
+ mp3parse->xing_bytes = GST_READ_UINT32_BE (data);
+ if (mp3parse->xing_bytes == 0) {
+ GST_WARNING_OBJECT (mp3parse, "Invalid number of bytes in Xing header");
+ mp3parse->xing_flags &= ~XING_BYTES_FLAG;
+ }
+ data += 4;
+ } else {
+ mp3parse->xing_bytes = 0;
+ }
+
+ /* If we know the upstream size and duration, compute the
+ * total bitrate, rounded up to the nearest kbit/sec */
+ if ((total_time = mp3parse->xing_total_time) &&
+ (total_bytes = mp3parse->xing_bytes)) {
+ mp3parse->xing_bitrate = gst_util_uint64_scale (total_bytes,
+ 8 * GST_SECOND, total_time);
+ mp3parse->xing_bitrate += 500;
+ mp3parse->xing_bitrate -= mp3parse->xing_bitrate % 1000;
+ }
+
+ if (xing_flags & XING_TOC_FLAG) {
+ int i, percent = 0;
+ guchar *table = mp3parse->xing_seek_table;
+ guchar old = 0, new;
+ guint first;
+
+ first = data[0];
+ GST_DEBUG_OBJECT (mp3parse,
+ "Subtracting initial offset of %d bytes from Xing TOC", first);
+
+ /* xing seek table: percent time -> 1/256 bytepos */
+ for (i = 0; i < 100; i++) {
+ new = data[i] - first;
+ if (old > new) {
+ GST_WARNING_OBJECT (mp3parse, "Skipping broken Xing TOC");
+ mp3parse->xing_flags &= ~XING_TOC_FLAG;
+ goto skip_toc;
+ }
+ mp3parse->xing_seek_table[i] = old = new;
+ }
+
+ /* build inverse table: 1/256 bytepos -> 1/100 percent time */
+ for (i = 0; i < 256; i++) {
+ while (percent < 99 && table[percent + 1] <= i)
+ percent++;
+
+ if (table[percent] == i) {
+ mp3parse->xing_seek_table_inverse[i] = percent * 100;
+ } else if (percent < 99 && table[percent]) {
+ gdouble fa, fb, fx;
+ gint a = percent, b = percent + 1;
+
+ fa = table[a];
+ fb = table[b];
+ fx = (b - a) / (fb - fa) * (i - fa) + a;
+ mp3parse->xing_seek_table_inverse[i] = (guint16) (fx * 100);
+ } else if (percent == 99) {
+ gdouble fa, fb, fx;
+ gint a = percent, b = 100;
+
+ fa = table[a];
+ fb = 256.0;
+ fx = (b - a) / (fb - fa) * (i - fa) + a;
+ mp3parse->xing_seek_table_inverse[i] = (guint16) (fx * 100);
+ }
+ }
+ skip_toc:
+ data += 100;
+ } else {
+ memset (mp3parse->xing_seek_table, 0, sizeof (mp3parse->xing_seek_table));
+ memset (mp3parse->xing_seek_table_inverse, 0,
+ sizeof (mp3parse->xing_seek_table_inverse));
+ }
+
+ if (xing_flags & XING_VBR_SCALE_FLAG) {
+ mp3parse->xing_vbr_scale = GST_READ_UINT32_BE (data);
+ data += 4;
+ } else
+ mp3parse->xing_vbr_scale = 0;
+
+ GST_DEBUG_OBJECT (mp3parse, "Xing header reported %u frames, time %"
+ GST_TIME_FORMAT ", %u bytes, vbr scale %u", mp3parse->xing_frames,
+ GST_TIME_ARGS (mp3parse->xing_total_time), mp3parse->xing_bytes,
+ mp3parse->xing_vbr_scale);
+
+ /* check for truncated file */
+ if (upstream_total_bytes && mp3parse->xing_bytes &&
+ mp3parse->xing_bytes * 0.8 > upstream_total_bytes) {
+ GST_WARNING_OBJECT (mp3parse, "File appears to have been truncated; "
+ "invalidating Xing header duration and size");
+ mp3parse->xing_flags &= ~XING_BYTES_FLAG;
+ mp3parse->xing_flags &= ~XING_FRAMES_FLAG;
+ }
+
+ /* Optional LAME tag? */
+ if (avail - bytes_needed >= 36 && GST_READ_UINT32_BE (data) == lame_id) {
+ gchar lame_version[10] = { 0, };
+ guint tag_rev;
+ guint32 encoder_delay, encoder_padding;
+
+ memcpy (lame_version, data, 9);
+ data += 9;
+ tag_rev = data[0] >> 4;
+ GST_DEBUG_OBJECT (mp3parse, "Found LAME tag revision %d created by '%s'",
+ tag_rev, lame_version);
+
+ /* Skip all the information we're not interested in */
+ data += 12;
+ /* Encoder delay and end padding */
+ encoder_delay = GST_READ_UINT24_BE (data);
+ encoder_delay >>= 12;
+ encoder_padding = GST_READ_UINT24_BE (data);
+ encoder_padding &= 0x000fff;
+
+ mp3parse->encoder_delay = encoder_delay;
+ mp3parse->encoder_padding = encoder_padding;
+
+ GST_DEBUG_OBJECT (mp3parse, "Encoder delay %u, encoder padding %u",
+ encoder_delay, encoder_padding);
+ }
+ } else if (read_id_vbri == vbri_id) {
+ gint64 total_bytes, total_frames;
+ GstClockTime total_time;
+ guint16 nseek_points;
+
+ GST_DEBUG_OBJECT (mp3parse, "Found VBRI header marker 0x%x", vbri_id);
+
+ if (avail < offset_vbri + 26) {
+ GST_DEBUG_OBJECT (mp3parse,
+ "Not enough data to read VBRI header (need %d)", offset_vbri + 26);
+ goto cleanup;
+ }
+
+ GST_DEBUG_OBJECT (mp3parse, "Reading VBRI header");
+
+ /* Move data after VBRI header */
+ data += offset_vbri + 4;
+
+ if (GST_READ_UINT16_BE (data) != 0x0001) {
+ GST_WARNING_OBJECT (mp3parse,
+ "Unsupported VBRI version 0x%x", GST_READ_UINT16_BE (data));
+ goto cleanup;
+ }
+ data += 2;
+
+ /* Skip encoder delay */
+ data += 2;
+
+ /* Skip quality */
+ data += 2;
+
+ total_bytes = GST_READ_UINT32_BE (data);
+ if (total_bytes != 0)
+ mp3parse->vbri_bytes = total_bytes;
+ data += 4;
+
+ total_frames = GST_READ_UINT32_BE (data);
+ if (total_frames != 0) {
+ mp3parse->vbri_frames = total_frames;
+ mp3parse->vbri_total_time = gst_util_uint64_scale (GST_SECOND,
+ (guint64) (mp3parse->vbri_frames) * (mp3parse->spf), mp3parse->rate);
+ }
+ data += 4;
+
+ /* If we know the upstream size and duration, compute the
+ * total bitrate, rounded up to the nearest kbit/sec */
+ if ((total_time = mp3parse->vbri_total_time) &&
+ (total_bytes = mp3parse->vbri_bytes)) {
+ mp3parse->vbri_bitrate = gst_util_uint64_scale (total_bytes,
+ 8 * GST_SECOND, total_time);
+ mp3parse->vbri_bitrate += 500;
+ mp3parse->vbri_bitrate -= mp3parse->vbri_bitrate % 1000;
+ }
+
+ nseek_points = GST_READ_UINT16_BE (data);
+ data += 2;
+
+ if (nseek_points > 0) {
+ guint scale, seek_bytes, seek_frames;
+ gint i;
+
+ mp3parse->vbri_seek_points = nseek_points;
+
+ scale = GST_READ_UINT16_BE (data);
+ data += 2;
+
+ seek_bytes = GST_READ_UINT16_BE (data);
+ data += 2;
+
+ seek_frames = GST_READ_UINT16_BE (data);
+
+ if (scale == 0 || seek_bytes == 0 || seek_bytes > 4 || seek_frames == 0) {
+ GST_WARNING_OBJECT (mp3parse, "Unsupported VBRI seek table");
+ goto out_vbri;
+ }
+
+ if (avail < offset_vbri + 26 + nseek_points * seek_bytes) {
+ GST_WARNING_OBJECT (mp3parse,
+ "Not enough data to read VBRI seek table (need %d)",
+ offset_vbri + 26 + nseek_points * seek_bytes);
+ goto out_vbri;
+ }
+
+ if (seek_frames * nseek_points < total_frames - seek_frames ||
+ seek_frames * nseek_points > total_frames + seek_frames) {
+ GST_WARNING_OBJECT (mp3parse,
+ "VBRI seek table doesn't cover the complete file");
+ goto out_vbri;
+ }
+
+ data = map.data;
+ data += offset_vbri + 26;
+
+ /* VBRI seek table: frame/seek_frames -> byte */
+ mp3parse->vbri_seek_table = g_new (guint32, nseek_points);
+ if (seek_bytes == 4)
+ for (i = 0; i < nseek_points; i++) {
+ mp3parse->vbri_seek_table[i] = GST_READ_UINT32_BE (data) * scale;
+ data += 4;
+ } else if (seek_bytes == 3)
+ for (i = 0; i < nseek_points; i++) {
+ mp3parse->vbri_seek_table[i] = GST_READ_UINT24_BE (data) * scale;
+ data += 3;
+ } else if (seek_bytes == 2)
+ for (i = 0; i < nseek_points; i++) {
+ mp3parse->vbri_seek_table[i] = GST_READ_UINT16_BE (data) * scale;
+ data += 2;
+ } else /* seek_bytes == 1 */
+ for (i = 0; i < nseek_points; i++) {
+ mp3parse->vbri_seek_table[i] = GST_READ_UINT8 (data) * scale;
+ data += 1;
+ }
+ }
+ out_vbri:
+
+ GST_DEBUG_OBJECT (mp3parse, "VBRI header reported %u frames, time %"
+ GST_TIME_FORMAT ", bytes %u", mp3parse->vbri_frames,
+ GST_TIME_ARGS (mp3parse->vbri_total_time), mp3parse->vbri_bytes);
+
+ /* check for truncated file */
+ if (upstream_total_bytes && mp3parse->vbri_bytes &&
+ mp3parse->vbri_bytes * 0.8 > upstream_total_bytes) {
+ GST_WARNING_OBJECT (mp3parse, "File appears to have been truncated; "
+ "invalidating VBRI header duration and size");
+ mp3parse->vbri_valid = FALSE;
+ } else {
+ mp3parse->vbri_valid = TRUE;
+ }
+ } else {
+ GST_DEBUG_OBJECT (mp3parse,
+ "Xing, LAME or VBRI header not found in first frame");
+ }
+
+ /* set duration if tables provided a valid one */
+ if (mp3parse->xing_flags & XING_FRAMES_FLAG) {
+ gst_base_parse_set_duration (GST_BASE_PARSE (mp3parse), GST_FORMAT_TIME,
+ mp3parse->xing_total_time, 0);
+ }
+ if (mp3parse->vbri_total_time != 0 && mp3parse->vbri_valid) {
+ gst_base_parse_set_duration (GST_BASE_PARSE (mp3parse), GST_FORMAT_TIME,
+ mp3parse->vbri_total_time, 0);
+ }
+
+ /* tell baseclass how nicely we can seek, and a bitrate if one found */
+ /* FIXME: fill index with seek table */
+ #if 0
+ seekable = GST_BASE_PARSE_SEEK_DEFAULT;
+ if ((mp3parse->xing_flags & XING_TOC_FLAG) && mp3parse->xing_bytes &&
+ mp3parse->xing_total_time)
+ seekable = GST_BASE_PARSE_SEEK_TABLE;
+
+ if (mp3parse->vbri_seek_table && mp3parse->vbri_bytes &&
+ mp3parse->vbri_total_time)
+ seekable = GST_BASE_PARSE_SEEK_TABLE;
+ #endif
+
+ if (mp3parse->xing_bitrate)
+ bitrate = mp3parse->xing_bitrate;
+ else if (mp3parse->vbri_bitrate)
+ bitrate = mp3parse->vbri_bitrate;
+ else
+ bitrate = 0;
+
+ gst_base_parse_set_average_bitrate (GST_BASE_PARSE (mp3parse), bitrate);
+
+ cleanup:
+ gst_buffer_unmap (buf, &map);
+ }
+
+ static gboolean
+ gst_mpeg_audio_parse_time_to_bytepos (GstMpegAudioParse * mp3parse,
+ GstClockTime ts, gint64 * bytepos)
+ {
+ gint64 total_bytes;
+ GstClockTime total_time;
+
+ /* If XING seek table exists use this for time->byte conversion */
+ if ((mp3parse->xing_flags & XING_TOC_FLAG) &&
+ (total_bytes = mp3parse->xing_bytes) &&
+ (total_time = mp3parse->xing_total_time)) {
+ gdouble fa, fb, fx;
+ gdouble percent =
+ CLAMP ((100.0 * gst_util_guint64_to_gdouble (ts)) /
+ gst_util_guint64_to_gdouble (total_time), 0.0, 100.0);
+ gint index = CLAMP (percent, 0, 99);
+
+ fa = mp3parse->xing_seek_table[index];
+ if (index < 99)
+ fb = mp3parse->xing_seek_table[index + 1];
+ else
+ fb = 256.0;
+
+ fx = fa + (fb - fa) * (percent - index);
+
+ *bytepos = (1.0 / 256.0) * fx * total_bytes;
+
+ return TRUE;
+ }
+
+ if (mp3parse->vbri_seek_table && (total_bytes = mp3parse->vbri_bytes) &&
+ (total_time = mp3parse->vbri_total_time)) {
+ gint i, j;
+ gdouble a, b, fa, fb;
+
+ i = gst_util_uint64_scale (ts, mp3parse->vbri_seek_points - 1, total_time);
+ i = CLAMP (i, 0, mp3parse->vbri_seek_points - 1);
+
+ a = gst_guint64_to_gdouble (gst_util_uint64_scale (i, total_time,
+ mp3parse->vbri_seek_points));
+ fa = 0.0;
+ for (j = i; j >= 0; j--)
+ fa += mp3parse->vbri_seek_table[j];
+
+ if (i + 1 < mp3parse->vbri_seek_points) {
+ b = gst_guint64_to_gdouble (gst_util_uint64_scale (i + 1, total_time,
+ mp3parse->vbri_seek_points));
+ fb = fa + mp3parse->vbri_seek_table[i + 1];
+ } else {
+ b = gst_guint64_to_gdouble (total_time);
+ fb = total_bytes;
+ }
+
+ *bytepos = fa + ((fb - fa) / (b - a)) * (gst_guint64_to_gdouble (ts) - a);
+
+ return TRUE;
+ }
+
+ /* If we have had a constant bit rate (so far), use it directly, as it
+ * may give slightly more accurate results than the base class. */
+ if (mp3parse->bitrate_is_constant && mp3parse->hdr_bitrate) {
+ *bytepos = gst_util_uint64_scale (ts, mp3parse->hdr_bitrate,
+ 8 * GST_SECOND);
+ return TRUE;
+ }
+
+ return FALSE;
+ }
+
+ static gboolean
+ gst_mpeg_audio_parse_bytepos_to_time (GstMpegAudioParse * mp3parse,
+ gint64 bytepos, GstClockTime * ts)
+ {
+ gint64 total_bytes;
+ GstClockTime total_time;
+
+ /* If XING seek table exists use this for byte->time conversion */
+ if ((mp3parse->xing_flags & XING_TOC_FLAG) &&
+ (total_bytes = mp3parse->xing_bytes) &&
+ (total_time = mp3parse->xing_total_time)) {
+ gdouble fa, fb, fx;
+ gdouble pos;
+ gint index;
+
+ pos = CLAMP ((bytepos * 256.0) / total_bytes, 0.0, 256.0);
+ index = CLAMP (pos, 0, 255);
+ fa = mp3parse->xing_seek_table_inverse[index];
+ if (index < 255)
+ fb = mp3parse->xing_seek_table_inverse[index + 1];
+ else
+ fb = 10000.0;
+
+ fx = fa + (fb - fa) * (pos - index);
+
+ *ts = (1.0 / 10000.0) * fx * gst_util_guint64_to_gdouble (total_time);
+
+ return TRUE;
+ }
+
+ if (mp3parse->vbri_seek_table &&
+ (total_bytes = mp3parse->vbri_bytes) &&
+ (total_time = mp3parse->vbri_total_time)) {
+ gint i = 0;
+ guint64 sum = 0;
+ gdouble a, b, fa, fb;
+
+ do {
+ sum += mp3parse->vbri_seek_table[i];
+ i++;
+ } while (i + 1 < mp3parse->vbri_seek_points
+ && sum + mp3parse->vbri_seek_table[i] < bytepos);
+ i--;
+
+ a = gst_guint64_to_gdouble (sum);
+ fa = gst_guint64_to_gdouble (gst_util_uint64_scale (i, total_time,
+ mp3parse->vbri_seek_points));
+
+ if (i + 1 < mp3parse->vbri_seek_points) {
+ b = a + mp3parse->vbri_seek_table[i + 1];
+ fb = gst_guint64_to_gdouble (gst_util_uint64_scale (i + 1, total_time,
+ mp3parse->vbri_seek_points));
+ } else {
+ b = total_bytes;
+ fb = gst_guint64_to_gdouble (total_time);
+ }
+
+ *ts = gst_gdouble_to_guint64 (fa + ((fb - fa) / (b - a)) * (bytepos - a));
+
+ return TRUE;
+ }
+
+ /* If we have had a constant bit rate (so far), use it directly, as it
+ * may give slightly more accurate results than the base class. */
+ if (mp3parse->bitrate_is_constant && mp3parse->hdr_bitrate) {
+ *ts = gst_util_uint64_scale (bytepos, 8 * GST_SECOND,
+ mp3parse->hdr_bitrate);
+ return TRUE;
+ }
+
+ return FALSE;
+ }
+
+ static gboolean
+ gst_mpeg_audio_parse_convert (GstBaseParse * parse, GstFormat src_format,
+ gint64 src_value, GstFormat dest_format, gint64 * dest_value)
+ {
+ GstMpegAudioParse *mp3parse = GST_MPEG_AUDIO_PARSE (parse);
+ gboolean res = FALSE;
+
+ if (src_format == GST_FORMAT_TIME && dest_format == GST_FORMAT_BYTES)
+ res =
+ gst_mpeg_audio_parse_time_to_bytepos (mp3parse, src_value, dest_value);
+ else if (src_format == GST_FORMAT_BYTES && dest_format == GST_FORMAT_TIME)
+ res = gst_mpeg_audio_parse_bytepos_to_time (mp3parse, src_value,
+ (GstClockTime *) dest_value);
+
+ /* if no tables, fall back to default estimated rate based conversion */
+ if (!res)
+ return gst_base_parse_convert_default (parse, src_format, src_value,
+ dest_format, dest_value);
+
+ return res;
+ }
+
+ static GstFlowReturn
+ gst_mpeg_audio_parse_pre_push_frame (GstBaseParse * parse,
+ GstBaseParseFrame * frame)
+ {
+ GstMpegAudioParse *mp3parse = GST_MPEG_AUDIO_PARSE (parse);
+ GstTagList *taglist = NULL;
+
+ /* we will create a taglist (if any of the parameters has changed)
+ * to add the tags that changed */
+ if (mp3parse->last_posted_crc != mp3parse->last_crc) {
+ gboolean using_crc;
+
+ if (!taglist)
+ taglist = gst_tag_list_new_empty ();
+
+ mp3parse->last_posted_crc = mp3parse->last_crc;
+ if (mp3parse->last_posted_crc == CRC_PROTECTED) {
+ using_crc = TRUE;
+ } else {
+ using_crc = FALSE;
+ }
+ gst_tag_list_add (taglist, GST_TAG_MERGE_REPLACE, GST_TAG_CRC,
+ using_crc, NULL);
+ }
+
+ if (mp3parse->last_posted_channel_mode != mp3parse->last_mode) {
+ if (!taglist)
+ taglist = gst_tag_list_new_empty ();
+
+ mp3parse->last_posted_channel_mode = mp3parse->last_mode;
+
+ gst_tag_list_add (taglist, GST_TAG_MERGE_REPLACE, GST_TAG_MODE,
+ gst_mpeg_audio_channel_mode_get_nick (mp3parse->last_mode), NULL);
+ }
+
+ /* tag sending done late enough in hook to ensure pending events
+ * have already been sent */
+ if (taglist != NULL || !mp3parse->sent_codec_tag) {
+ GstCaps *caps;
+
+ if (taglist == NULL)
+ taglist = gst_tag_list_new_empty ();
+
+ /* codec tag */
+ caps = gst_pad_get_current_caps (GST_BASE_PARSE_SRC_PAD (parse));
+ if (G_UNLIKELY (caps == NULL)) {
+ gst_tag_list_unref (taglist);
+
+ if (GST_PAD_IS_FLUSHING (GST_BASE_PARSE_SRC_PAD (parse))) {
+ GST_INFO_OBJECT (parse, "Src pad is flushing");
+ return GST_FLOW_FLUSHING;
+ } else {
+ GST_INFO_OBJECT (parse, "Src pad is not negotiated!");
+ return GST_FLOW_NOT_NEGOTIATED;
+ }
+ }
+ gst_pb_utils_add_codec_description_to_tag_list (taglist,
+ GST_TAG_AUDIO_CODEC, caps);
+ gst_caps_unref (caps);
+
+ if (mp3parse->hdr_bitrate > 0 && mp3parse->xing_bitrate == 0 &&
+ mp3parse->vbri_bitrate == 0) {
+ /* We don't have a VBR bitrate, so post the available bitrate as
+ * nominal and let baseparse calculate the real bitrate */
+ gst_tag_list_add (taglist, GST_TAG_MERGE_REPLACE,
+ GST_TAG_NOMINAL_BITRATE, mp3parse->hdr_bitrate, NULL);
+ }
+
+ /* also signals the end of first-frame processing */
+ mp3parse->sent_codec_tag = TRUE;
+ }
+
+ /* if the taglist exists, we need to update it so it gets sent out */
+ if (taglist) {
+ gst_base_parse_merge_tags (parse, taglist, GST_TAG_MERGE_REPLACE);
+ gst_tag_list_unref (taglist);
+ }
+
+ /* usual clipping applies */
+ frame->flags |= GST_BASE_PARSE_FRAME_FLAG_CLIP;
+
+ return GST_FLOW_OK;
+ }
+
+ static void
+ remove_fields (GstCaps * caps)
+ {
+ guint i, n;
+
+ n = gst_caps_get_size (caps);
+ for (i = 0; i < n; i++) {
+ GstStructure *s = gst_caps_get_structure (caps, i);
+
+ gst_structure_remove_field (s, "parsed");
+ }
+ }
+
+ static GstCaps *
+ gst_mpeg_audio_parse_get_sink_caps (GstBaseParse * parse, GstCaps * filter)
+ {
+ GstCaps *peercaps, *templ;
+ GstCaps *res;
+
+ templ = gst_pad_get_pad_template_caps (GST_BASE_PARSE_SINK_PAD (parse));
+ if (filter) {
+ GstCaps *fcopy = gst_caps_copy (filter);
+ /* Remove the fields we convert */
+ remove_fields (fcopy);
+ peercaps = gst_pad_peer_query_caps (GST_BASE_PARSE_SRC_PAD (parse), fcopy);
+ gst_caps_unref (fcopy);
+ } else
+ peercaps = gst_pad_peer_query_caps (GST_BASE_PARSE_SRC_PAD (parse), NULL);
+
+ if (peercaps) {
+ /* Remove the parsed field */
+ peercaps = gst_caps_make_writable (peercaps);
+ remove_fields (peercaps);
+
+ res = gst_caps_intersect_full (peercaps, templ, GST_CAPS_INTERSECT_FIRST);
+ gst_caps_unref (peercaps);
+ gst_caps_unref (templ);
+ } else {
+ res = templ;
+ }
+
+ if (filter) {
+ GstCaps *intersection;
+
+ intersection =
+ gst_caps_intersect_full (filter, res, GST_CAPS_INTERSECT_FIRST);
+ gst_caps_unref (res);
+ res = intersection;
+ }
+
+ return res;
+ }
++
++#ifdef TIZEN_FEATURE_MP3PARSE_MODIFICATION
++/**
++ * gst_mpeg_audio_parse_src_eventfunc:
++ * @parse: #GstBaseParse. #event
++ *
++ * before baseparse handles seek event, check any mode and flag.
++ *
++ * Returns: TRUE on success.
++ */
++static gboolean
++gst_mpeg_audio_parse_src_eventfunc (GstBaseParse * parse, GstEvent * event)
++{
++ gboolean handled = FALSE;
++ GstMpegAudioParse *mp3parse;
++ mp3parse = GST_MPEG_AUDIO_PARSE (parse);
++
++ GST_DEBUG_OBJECT (parse, "handling event %d, %s", GST_EVENT_TYPE (event),
++ GST_EVENT_TYPE_NAME (event));
++
++ switch (GST_EVENT_TYPE (event)) {
++ case GST_EVENT_SEEK:
++ {
++ GST_INFO_OBJECT (mp3parse, "GST_EVENT_SEEK enter");
++ if (mp3parse->http_seek_flag) {
++ GST_INFO_OBJECT (mp3parse,
++ "souphttpsrc is PULL MODE (so accurate seek mode is OFF)");
++ /* Check the declaration of this function in the baseparse */
++ gst_base_parse_set_seek_mode (parse, 0);
++ goto mp3_seek_null_exit;
++ }
++ GST_INFO_OBJECT (mp3parse, "GST_EVENT_SEEK leave");
++ break;
++ }
++ default:
++ break;
++ }
++
++mp3_seek_null_exit:
++ /* call baseparse src_event function to handle event */
++ handled = GST_BASE_PARSE_CLASS (parent_class)->src_event (parse, event);
++
++ return handled;
++}
++#endif
--- /dev/null
+ /* GStreamer MPEG audio parser
+ * Copyright (C) 2006-2007 Jan Schmidt <thaytan@mad.scientist.com>
+ * Copyright (C) 2010 Mark Nauwelaerts <mnauw users sf net>
+ * Copyright (C) 2010 Nokia Corporation. All rights reserved.
+ * Contact: Stefan Kost <stefan.kost@nokia.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+ #ifndef __GST_MPEG_AUDIO_PARSE_H__
+ #define __GST_MPEG_AUDIO_PARSE_H__
+
+ #include <gst/gst.h>
+ #include <gst/base/gstbaseparse.h>
+
+ G_BEGIN_DECLS
+
+ #define GST_TYPE_MPEG_AUDIO_PARSE \
+ (gst_mpeg_audio_parse_get_type())
+ #define GST_MPEG_AUDIO_PARSE(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj), GST_TYPE_MPEG_AUDIO_PARSE, GstMpegAudioParse))
+ #define GST_MPEG_AUDIO_PARSE_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass), GST_TYPE_MPEG_AUDIO_PARSE, GstMpegAudioParseClass))
+ #define GST_IS_MPEG_AUDIO_PARSE(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj), GST_TYPE_MPEG_AUDIO_PARSE))
+ #define GST_IS_MPEG_AUDIO_PARSE_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass), GST_TYPE_MPEG_AUDIO_PARSE))
+
+ typedef struct _GstMpegAudioParse GstMpegAudioParse;
+ typedef struct _GstMpegAudioParseClass GstMpegAudioParseClass;
+
+ /**
+ * GstMpegAudioParse:
+ *
+ * The opaque GstMpegAudioParse object
+ */
+ struct _GstMpegAudioParse {
+ GstBaseParse baseparse;
+
+ /*< private >*/
+ gint rate;
+ gint channels;
+ gint layer;
+ gint version;
+
+ GstClockTime max_bitreservoir;
+ /* samples per frame */
+ gint spf;
+
+ gint freerate;
+
+ gboolean sent_codec_tag;
+ guint last_posted_bitrate;
+ gint last_posted_crc, last_crc;
+ guint last_posted_channel_mode, last_mode;
+
+ /* Bitrate from non-vbr headers */
+ guint32 hdr_bitrate;
+ gboolean bitrate_is_constant;
+
+ /* Xing info */
+ guint32 xing_flags;
+ guint32 xing_frames;
+ GstClockTime xing_total_time;
+ guint32 xing_bytes;
+ /* percent -> filepos mapping */
+ guchar xing_seek_table[100];
+ /* filepos -> percent mapping */
+ guint16 xing_seek_table_inverse[256];
+ guint32 xing_vbr_scale;
+ guint xing_bitrate;
+
+ /* VBRI info */
+ guint32 vbri_frames;
+ GstClockTime vbri_total_time;
+ guint32 vbri_bytes;
+ guint vbri_bitrate;
+ guint vbri_seek_points;
+ guint32 *vbri_seek_table;
+ gboolean vbri_valid;
+
+ /* LAME info */
+ guint32 encoder_delay;
+ guint32 encoder_padding;
++#ifdef TIZEN_FEATURE_MP3PARSE_MODIFICATION
++ /* Additional info */
++ gboolean http_seek_flag;
++#endif
+ };
+
+ /**
+ * GstMpegAudioParseClass:
+ * @parent_class: Element parent class.
+ *
+ * The opaque GstMpegAudioParseClass data structure.
+ */
+ struct _GstMpegAudioParseClass {
+ GstBaseParseClass baseparse_class;
+ };
+
+ GType gst_mpeg_audio_parse_get_type (void);
+
+ G_END_DECLS
+
+ #endif /* __GST_MPEG_AUDIO_PARSE_H__ */
--- /dev/null
+ /* GStreamer
+ * Copyright (C) <1999> Erik Walthinsen <omega@temple-baptist.com>
+ * Copyright (C) <2006> Nokia Corporation (contact <stefan.kost@nokia.com>)
+ * Copyright (C) <2009-2010> STEricsson <benjamin.gaignard@stericsson.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+ /* Element-Checklist-Version: 5 */
+
+ /**
+ * SECTION:element-avidemux
+ * @title: avidemux
+ *
+ * Demuxes an .avi file into raw or compressed audio and/or video streams.
+ *
+ * This element supports both push and pull-based scheduling, depending on the
+ * capabilities of the upstream elements.
+ *
+ * ## Example launch line
+ * |[
+ * gst-launch-1.0 filesrc location=test.avi ! avidemux name=demux demux.audio_00 ! decodebin ! audioconvert ! audioresample ! autoaudiosink demux.video_00 ! queue ! decodebin ! videoconvert ! videoscale ! autovideosink
+ * ]| Play (parse and decode) an .avi file and try to output it to
+ * an automatically detected soundcard and videosink. If the AVI file contains
+ * compressed audio or video data, this will only work if you have the
+ * right decoder elements/plugins installed.
+ *
+ */
+
+ #ifdef HAVE_CONFIG_H
+ #include "config.h"
+ #endif
+
+ #include <string.h>
+ #include <stdio.h>
+
+ #include "gst/riff/riff-media.h"
+ #include "gstavielements.h"
+ #include "gstavidemux.h"
+ #include "avi-ids.h"
+ #include <gst/gst-i18n-plugin.h>
+ #include <gst/base/gstadapter.h>
+ #include <gst/tag/tag.h>
+
+ #define DIV_ROUND_UP(s,v) (((s) + ((v)-1)) / (v))
+
+ #define GST_AVI_KEYFRAME (1 << 0)
+ #define ENTRY_IS_KEYFRAME(e) ((e)->flags == GST_AVI_KEYFRAME)
+ #define ENTRY_SET_KEYFRAME(e) ((e)->flags = GST_AVI_KEYFRAME)
+ #define ENTRY_UNSET_KEYFRAME(e) ((e)->flags = 0)
+
+
+ GST_DEBUG_CATEGORY_STATIC (avidemux_debug);
+ #define GST_CAT_DEFAULT avidemux_debug
+
+ static GstStaticPadTemplate sink_templ = GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("video/x-msvideo")
+ );
+
+ #ifndef GST_DISABLE_GST_DEBUG
+ static const char *const snap_types[2][2] = {
+ {"any", "after"},
+ {"before", "nearest"},
+ };
+ #endif
+
+ static void gst_avi_demux_finalize (GObject * object);
+
+ static void gst_avi_demux_reset (GstAviDemux * avi);
+
+ #if 0
+ static const GstEventMask *gst_avi_demux_get_event_mask (GstPad * pad);
+ #endif
+ static gboolean gst_avi_demux_handle_src_event (GstPad * pad,
+ GstObject * parent, GstEvent * event);
+ static gboolean gst_avi_demux_handle_sink_event (GstPad * pad,
+ GstObject * parent, GstEvent * event);
+ static gboolean gst_avi_demux_push_event (GstAviDemux * avi, GstEvent * event);
+
+ #if 0
+ static const GstFormat *gst_avi_demux_get_src_formats (GstPad * pad);
+ #endif
+ static gboolean gst_avi_demux_handle_src_query (GstPad * pad,
+ GstObject * parent, GstQuery * query);
+ static gboolean gst_avi_demux_src_convert (GstPad * pad, GstFormat src_format,
+ gint64 src_value, GstFormat * dest_format, gint64 * dest_value);
+
+ static gboolean gst_avi_demux_do_seek (GstAviDemux * avi, GstSegment * segment,
+ GstSeekFlags flags);
+ static gboolean gst_avi_demux_handle_seek (GstAviDemux * avi, GstPad * pad,
+ GstEvent * event);
+ static gboolean gst_avi_demux_handle_seek_push (GstAviDemux * avi, GstPad * pad,
+ GstEvent * event);
+ static void gst_avi_demux_loop (GstPad * pad);
+ static gboolean gst_avi_demux_sink_activate (GstPad * sinkpad,
+ GstObject * parent);
+ static gboolean gst_avi_demux_sink_activate_mode (GstPad * sinkpad,
+ GstObject * parent, GstPadMode mode, gboolean active);
+ static GstFlowReturn gst_avi_demux_chain (GstPad * pad, GstObject * parent,
+ GstBuffer * buf);
+ #if 0
+ static void gst_avi_demux_set_index (GstElement * element, GstIndex * index);
+ static GstIndex *gst_avi_demux_get_index (GstElement * element);
+ #endif
+ static GstStateChangeReturn gst_avi_demux_change_state (GstElement * element,
+ GstStateChange transition);
+ static void gst_avi_demux_calculate_durations_from_index (GstAviDemux * avi);
+ static void gst_avi_demux_get_buffer_info (GstAviDemux * avi,
+ GstAviStream * stream, guint entry_n, GstClockTime * timestamp,
+ GstClockTime * ts_end, guint64 * offset, guint64 * offset_end);
+
+ static void gst_avi_demux_parse_idit (GstAviDemux * avi, GstBuffer * buf);
+ static void gst_avi_demux_parse_strd (GstAviDemux * avi, GstBuffer * buf);
+
+ static void parse_tag_value (GstAviDemux * avi, GstTagList * taglist,
+ const gchar * type, guint8 * ptr, guint tsize);
+
+ /* GObject methods */
+
+ #define gst_avi_demux_parent_class parent_class
+ G_DEFINE_TYPE (GstAviDemux, gst_avi_demux, GST_TYPE_ELEMENT);
+ GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (avidemux, "avidemux", GST_RANK_PRIMARY,
+ GST_TYPE_AVI_DEMUX, avi_element_init (plugin));
+
+ static void
+ gst_avi_demux_class_init (GstAviDemuxClass * klass)
+ {
+ GstElementClass *gstelement_class = GST_ELEMENT_CLASS (klass);
+ GObjectClass *gobject_class = (GObjectClass *) klass;
+ GstPadTemplate *videosrctempl, *audiosrctempl, *subsrctempl, *subpicsrctempl;
+ GstCaps *audcaps, *vidcaps, *subcaps, *subpiccaps;
+
+ GST_DEBUG_CATEGORY_INIT (avidemux_debug, "avidemux",
+ 0, "Demuxer for AVI streams");
+
+ gobject_class->finalize = gst_avi_demux_finalize;
+
+ gstelement_class->change_state =
+ GST_DEBUG_FUNCPTR (gst_avi_demux_change_state);
+ #if 0
+ gstelement_class->set_index = GST_DEBUG_FUNCPTR (gst_avi_demux_set_index);
+ gstelement_class->get_index = GST_DEBUG_FUNCPTR (gst_avi_demux_get_index);
+ #endif
+
+ audcaps = gst_riff_create_audio_template_caps ();
+ gst_caps_append (audcaps, gst_caps_new_empty_simple ("audio/x-avi-unknown"));
+ audiosrctempl = gst_pad_template_new ("audio_%u",
+ GST_PAD_SRC, GST_PAD_SOMETIMES, audcaps);
+
+ vidcaps = gst_riff_create_video_template_caps ();
+ gst_caps_append (vidcaps, gst_riff_create_iavs_template_caps ());
+ gst_caps_append (vidcaps, gst_caps_new_empty_simple ("video/x-avi-unknown"));
+ videosrctempl = gst_pad_template_new ("video_%u",
+ GST_PAD_SRC, GST_PAD_SOMETIMES, vidcaps);
+
+ subcaps = gst_caps_new_empty_simple ("application/x-subtitle-avi");
+ subsrctempl = gst_pad_template_new ("subtitle_%u",
+ GST_PAD_SRC, GST_PAD_SOMETIMES, subcaps);
+ subpiccaps = gst_caps_new_empty_simple ("subpicture/x-xsub");
+ subpicsrctempl = gst_pad_template_new ("subpicture_%u",
+ GST_PAD_SRC, GST_PAD_SOMETIMES, subpiccaps);
+ gst_element_class_add_pad_template (gstelement_class, audiosrctempl);
+ gst_element_class_add_pad_template (gstelement_class, videosrctempl);
+ gst_element_class_add_pad_template (gstelement_class, subsrctempl);
+ gst_element_class_add_pad_template (gstelement_class, subpicsrctempl);
+ gst_element_class_add_static_pad_template (gstelement_class, &sink_templ);
+
+ gst_caps_unref (audcaps);
+ gst_caps_unref (vidcaps);
+ gst_caps_unref (subcaps);
+ gst_caps_unref (subpiccaps);
+
+ gst_element_class_set_static_metadata (gstelement_class, "Avi demuxer",
+ "Codec/Demuxer",
+ "Demultiplex an avi file into audio and video",
+ "Erik Walthinsen <omega@cse.ogi.edu>, "
+ "Wim Taymans <wim.taymans@chello.be>, "
+ "Thijs Vermeir <thijsvermeir@gmail.com>");
+ }
+
+ static void
+ gst_avi_demux_init (GstAviDemux * avi)
+ {
+ avi->sinkpad = gst_pad_new_from_static_template (&sink_templ, "sink");
+ gst_pad_set_activate_function (avi->sinkpad,
+ GST_DEBUG_FUNCPTR (gst_avi_demux_sink_activate));
+ gst_pad_set_activatemode_function (avi->sinkpad,
+ GST_DEBUG_FUNCPTR (gst_avi_demux_sink_activate_mode));
+ gst_pad_set_chain_function (avi->sinkpad,
+ GST_DEBUG_FUNCPTR (gst_avi_demux_chain));
+ gst_pad_set_event_function (avi->sinkpad,
+ GST_DEBUG_FUNCPTR (gst_avi_demux_handle_sink_event));
+ gst_element_add_pad (GST_ELEMENT_CAST (avi), avi->sinkpad);
+
+ avi->adapter = gst_adapter_new ();
+ avi->flowcombiner = gst_flow_combiner_new ();
+
+ gst_avi_demux_reset (avi);
+
+ GST_OBJECT_FLAG_SET (avi, GST_ELEMENT_FLAG_INDEXABLE);
+ }
+
+ static void
+ gst_avi_demux_finalize (GObject * object)
+ {
+ GstAviDemux *avi = GST_AVI_DEMUX (object);
+
+ GST_DEBUG ("AVI: finalize");
+
+ g_object_unref (avi->adapter);
+ gst_flow_combiner_free (avi->flowcombiner);
+
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+ }
+
+ static void
+ gst_avi_demux_reset_stream (GstAviDemux * avi, GstAviStream * stream)
+ {
+ g_free (stream->strh);
+ g_free (stream->strf.data);
+ g_free (stream->name);
+ g_free (stream->index);
+ g_free (stream->indexes);
+ if (stream->initdata)
+ gst_buffer_unref (stream->initdata);
+ if (stream->extradata)
+ gst_buffer_unref (stream->extradata);
+ if (stream->rgb8_palette)
+ gst_buffer_unref (stream->rgb8_palette);
+ if (stream->pad) {
+ if (stream->exposed) {
+ gst_pad_set_active (stream->pad, FALSE);
+ gst_element_remove_pad (GST_ELEMENT_CAST (avi), stream->pad);
+ gst_flow_combiner_remove_pad (avi->flowcombiner, stream->pad);
+ } else
+ gst_object_unref (stream->pad);
+ }
+ if (stream->taglist) {
+ gst_tag_list_unref (stream->taglist);
+ stream->taglist = NULL;
+ }
+ memset (stream, 0, sizeof (GstAviStream));
+ }
+
+ static void
+ gst_avi_demux_reset (GstAviDemux * avi)
+ {
+ gint i;
+
+ GST_DEBUG ("AVI: reset");
+
+ for (i = 0; i < avi->num_streams; i++)
+ gst_avi_demux_reset_stream (avi, &avi->stream[i]);
+
+ avi->header_state = GST_AVI_DEMUX_HEADER_TAG_LIST;
+ avi->num_streams = 0;
+ avi->num_v_streams = 0;
+ avi->num_a_streams = 0;
+ avi->num_t_streams = 0;
+ avi->num_sp_streams = 0;
+ avi->main_stream = -1;
+
+ avi->have_group_id = FALSE;
+ avi->group_id = G_MAXUINT;
+
+ avi->state = GST_AVI_DEMUX_START;
+ avi->offset = 0;
+ avi->building_index = FALSE;
+
+ avi->index_offset = 0;
+ g_free (avi->avih);
+ avi->avih = NULL;
+
+ #if 0
+ if (avi->element_index)
+ gst_object_unref (avi->element_index);
+ avi->element_index = NULL;
+ #endif
+
+ if (avi->seg_event) {
+ gst_event_unref (avi->seg_event);
+ avi->seg_event = NULL;
+ }
+ if (avi->seek_event) {
+ gst_event_unref (avi->seek_event);
+ avi->seek_event = NULL;
+ }
+
+ if (avi->globaltags)
+ gst_tag_list_unref (avi->globaltags);
+ avi->globaltags = NULL;
+
+ avi->got_tags = TRUE; /* we always want to push global tags */
+ avi->have_eos = FALSE;
+ avi->seekable = TRUE;
+
+ gst_adapter_clear (avi->adapter);
+
+ gst_segment_init (&avi->segment, GST_FORMAT_TIME);
+ avi->segment_seqnum = 0;
+ }
+
+
+ /* GstElement methods */
+
+ #if 0
+ static const GstFormat *
+ gst_avi_demux_get_src_formats (GstPad * pad)
+ {
+ GstAviStream *stream = gst_pad_get_element_private (pad);
+
+ static const GstFormat src_a_formats[] = {
+ GST_FORMAT_TIME,
+ GST_FORMAT_BYTES,
+ GST_FORMAT_DEFAULT,
+ 0
+ };
+ static const GstFormat src_v_formats[] = {
+ GST_FORMAT_TIME,
+ GST_FORMAT_DEFAULT,
+ 0
+ };
+
+ return (stream->strh->type == GST_RIFF_FCC_auds ?
+ src_a_formats : src_v_formats);
+ }
+ #endif
+
+ /* assumes stream->strf.auds->av_bps != 0 */
+ static inline GstClockTime
+ avi_stream_convert_bytes_to_time_unchecked (GstAviStream * stream,
+ guint64 bytes)
+ {
+ return gst_util_uint64_scale_int (bytes, GST_SECOND,
+ stream->strf.auds->av_bps);
+ }
+
+ static inline guint64
+ avi_stream_convert_time_to_bytes_unchecked (GstAviStream * stream,
+ GstClockTime time)
+ {
+ return gst_util_uint64_scale_int (time, stream->strf.auds->av_bps,
+ GST_SECOND);
+ }
+
+ /* assumes stream->strh->rate != 0 */
+ static inline GstClockTime
+ avi_stream_convert_frames_to_time_unchecked (GstAviStream * stream,
+ guint64 frames)
+ {
+ return gst_util_uint64_scale (frames, stream->strh->scale * GST_SECOND,
+ stream->strh->rate);
+ }
+
+ static inline guint64
+ avi_stream_convert_time_to_frames_unchecked (GstAviStream * stream,
+ GstClockTime time)
+ {
+ return gst_util_uint64_scale (time, stream->strh->rate,
+ stream->strh->scale * GST_SECOND);
+ }
+
+ static gboolean
+ gst_avi_demux_src_convert (GstPad * pad,
+ GstFormat src_format,
+ gint64 src_value, GstFormat * dest_format, gint64 * dest_value)
+ {
+ GstAviStream *stream = gst_pad_get_element_private (pad);
+ gboolean res = TRUE;
+
+ GST_LOG_OBJECT (pad,
+ "Received src_format:%s, src_value:%" G_GUINT64_FORMAT
+ ", dest_format:%s", gst_format_get_name (src_format), src_value,
+ gst_format_get_name (*dest_format));
+
+ if (G_UNLIKELY (src_format == *dest_format)) {
+ *dest_value = src_value;
+ goto done;
+ }
+ if (G_UNLIKELY (!stream->strh || !stream->strf.data)) {
+ res = FALSE;
+ goto done;
+ }
+ if (G_UNLIKELY (stream->strh->type == GST_RIFF_FCC_vids &&
+ (src_format == GST_FORMAT_BYTES
+ || *dest_format == GST_FORMAT_BYTES))) {
+ res = FALSE;
+ goto done;
+ }
+
+ switch (src_format) {
+ case GST_FORMAT_TIME:
+ switch (*dest_format) {
+ case GST_FORMAT_BYTES:
+ *dest_value = gst_util_uint64_scale_int (src_value,
+ stream->strf.auds->av_bps, GST_SECOND);
+ break;
+ case GST_FORMAT_DEFAULT:
+ *dest_value =
+ gst_util_uint64_scale_round (src_value, stream->strh->rate,
+ stream->strh->scale * GST_SECOND);
+ break;
+ default:
+ res = FALSE;
+ break;
+ }
+ break;
+ case GST_FORMAT_BYTES:
+ switch (*dest_format) {
+ case GST_FORMAT_TIME:
+ if (stream->strf.auds->av_bps != 0) {
+ *dest_value = avi_stream_convert_bytes_to_time_unchecked (stream,
+ src_value);
+ } else
+ res = FALSE;
+ break;
+ default:
+ res = FALSE;
+ break;
+ }
+ break;
+ case GST_FORMAT_DEFAULT:
+ switch (*dest_format) {
+ case GST_FORMAT_TIME:
+ *dest_value =
+ avi_stream_convert_frames_to_time_unchecked (stream, src_value);
+ break;
+ default:
+ res = FALSE;
+ break;
+ }
+ break;
+ default:
+ res = FALSE;
+ }
+
+ done:
+ GST_LOG_OBJECT (pad,
+ "Returning res:%d dest_format:%s dest_value:%" G_GUINT64_FORMAT, res,
+ gst_format_get_name (*dest_format), *dest_value);
+ return res;
+ }
+
+ static gboolean
+ gst_avi_demux_handle_src_query (GstPad * pad, GstObject * parent,
+ GstQuery * query)
+ {
+ gboolean res = TRUE;
+ GstAviDemux *avi = GST_AVI_DEMUX (parent);
+
+ GstAviStream *stream = gst_pad_get_element_private (pad);
+
+ if (!stream->strh || !stream->strf.data)
+ return gst_pad_query_default (pad, parent, query);
+
+ switch (GST_QUERY_TYPE (query)) {
+ case GST_QUERY_POSITION:{
+ gint64 pos = 0;
+
+ GST_DEBUG ("pos query for stream %u: frames %u, bytes %u",
+ stream->num, stream->current_entry, stream->current_total);
+
+ /* FIXME, this looks clumsy */
+ if (stream->strh->type == GST_RIFF_FCC_auds) {
+ if (stream->is_vbr) {
+ /* VBR */
+ pos = avi_stream_convert_frames_to_time_unchecked (stream,
+ stream->current_entry);
+ GST_DEBUG_OBJECT (avi, "VBR convert frame %u, time %"
+ GST_TIME_FORMAT, stream->current_entry, GST_TIME_ARGS (pos));
+ } else if (stream->strf.auds->av_bps != 0) {
+ /* CBR */
+ pos = avi_stream_convert_bytes_to_time_unchecked (stream,
+ stream->current_total);
+ GST_DEBUG_OBJECT (avi,
+ "CBR convert bytes %u, time %" GST_TIME_FORMAT,
+ stream->current_total, GST_TIME_ARGS (pos));
+ } else if (stream->idx_n != 0 && stream->total_bytes != 0) {
+ /* calculate timestamps based on percentage of length */
+ guint64 xlen = avi->avih->us_frame *
+ avi->avih->tot_frames * GST_USECOND;
+
+ pos = gst_util_uint64_scale (xlen, stream->current_total,
+ stream->total_bytes);
+ GST_DEBUG_OBJECT (avi,
+ "CBR perc convert bytes %u, time %" GST_TIME_FORMAT,
+ stream->current_total, GST_TIME_ARGS (pos));
+ } else {
+ /* we don't know */
+ res = FALSE;
+ }
+ } else {
+ if (stream->strh->rate != 0) {
+ pos = gst_util_uint64_scale ((guint64) stream->current_entry *
+ stream->strh->scale, GST_SECOND, (guint64) stream->strh->rate);
+ } else {
+ pos = stream->current_entry * avi->avih->us_frame * GST_USECOND;
+ }
+ }
+ if (res) {
+ GST_DEBUG ("pos query : %" GST_TIME_FORMAT, GST_TIME_ARGS (pos));
+ gst_query_set_position (query, GST_FORMAT_TIME, pos);
+ } else
+ GST_WARNING ("pos query failed");
+ break;
+ }
+ case GST_QUERY_DURATION:
+ {
+ GstFormat fmt;
+ GstClockTime duration;
+
+ /* only act on audio or video streams */
+ if (stream->strh->type != GST_RIFF_FCC_auds &&
+ stream->strh->type != GST_RIFF_FCC_vids &&
+ stream->strh->type != GST_RIFF_FCC_iavs) {
+ res = FALSE;
+ break;
+ }
+
+ /* take stream duration, fall back to avih duration */
+ if ((duration = stream->duration) == -1)
+ if ((duration = stream->hdr_duration) == -1)
+ duration = avi->duration;
+
+ gst_query_parse_duration (query, &fmt, NULL);
+
+ switch (fmt) {
+ case GST_FORMAT_TIME:
+ gst_query_set_duration (query, fmt, duration);
+ break;
+ case GST_FORMAT_DEFAULT:
+ {
+ gint64 dur;
+ GST_DEBUG_OBJECT (query, "total frames is %" G_GUINT32_FORMAT,
+ stream->idx_n);
+
+ if (stream->idx_n > 0)
+ gst_query_set_duration (query, fmt, stream->idx_n);
+ else if (gst_pad_query_convert (pad, GST_FORMAT_TIME,
+ duration, fmt, &dur))
+ gst_query_set_duration (query, fmt, dur);
+ break;
+ }
+ default:
+ res = FALSE;
+ break;
+ }
+ break;
+ }
+ case GST_QUERY_SEEKING:{
+ GstFormat fmt;
+
+ gst_query_parse_seeking (query, &fmt, NULL, NULL, NULL);
+ if (fmt == GST_FORMAT_TIME) {
+ gboolean seekable = TRUE;
+
+ if (avi->streaming) {
+ seekable = avi->seekable;
+ }
+
+ gst_query_set_seeking (query, GST_FORMAT_TIME, seekable,
+ 0, stream->duration);
+ res = TRUE;
+ }
+ break;
+ }
+ case GST_QUERY_CONVERT:{
+ GstFormat src_fmt, dest_fmt;
+ gint64 src_val, dest_val;
+
+ gst_query_parse_convert (query, &src_fmt, &src_val, &dest_fmt, &dest_val);
+ if ((res = gst_avi_demux_src_convert (pad, src_fmt, src_val, &dest_fmt,
+ &dest_val)))
+ gst_query_set_convert (query, src_fmt, src_val, dest_fmt, dest_val);
+ else
+ res = gst_pad_query_default (pad, parent, query);
+ break;
+ }
+ case GST_QUERY_SEGMENT:
+ {
+ GstFormat format;
+ gint64 start, stop;
+
+ format = avi->segment.format;
+
+ start =
+ gst_segment_to_stream_time (&avi->segment, format,
+ avi->segment.start);
+ if ((stop = avi->segment.stop) == -1)
+ stop = avi->segment.duration;
+ else
+ stop = gst_segment_to_stream_time (&avi->segment, format, stop);
+
+ gst_query_set_segment (query, avi->segment.rate, format, start, stop);
+ res = TRUE;
+ break;
+ }
+ default:
+ res = gst_pad_query_default (pad, parent, query);
+ break;
+ }
+
+ return res;
+ }
+
+ #if 0
+ static const GstEventMask *
+ gst_avi_demux_get_event_mask (GstPad * pad)
+ {
+ static const GstEventMask masks[] = {
+ {GST_EVENT_SEEK, GST_SEEK_METHOD_SET | GST_SEEK_FLAG_KEY_UNIT},
+ {0,}
+ };
+
+ return masks;
+ }
+ #endif
+
+ #if 0
+ static guint64
+ gst_avi_demux_seek_streams (GstAviDemux * avi, guint64 offset, gboolean before)
+ {
+ GstAviStream *stream;
+ GstIndexEntry *entry;
+ gint i;
+ gint64 val, min = offset;
+
+ for (i = 0; i < avi->num_streams; i++) {
+ stream = &avi->stream[i];
+
+ entry = gst_index_get_assoc_entry (avi->element_index, stream->index_id,
+ before ? GST_INDEX_LOOKUP_BEFORE : GST_INDEX_LOOKUP_AFTER,
+ GST_ASSOCIATION_FLAG_NONE, GST_FORMAT_BYTES, offset);
+
+ if (before) {
+ if (entry) {
+ gst_index_entry_assoc_map (entry, GST_FORMAT_BYTES, &val);
+ GST_DEBUG_OBJECT (avi, "stream %d, previous entry at %"
+ G_GUINT64_FORMAT, i, val);
+ if (val < min)
+ min = val;
+ }
+ continue;
+ }
+
+ if (!entry) {
+ GST_DEBUG_OBJECT (avi, "no position for stream %d, assuming at start", i);
+ stream->current_entry = 0;
+ stream->current_total = 0;
+ continue;
+ }
+
+ gst_index_entry_assoc_map (entry, GST_FORMAT_BYTES, &val);
+ GST_DEBUG_OBJECT (avi, "stream %d, next entry at %" G_GUINT64_FORMAT,
+ i, val);
+
+ gst_index_entry_assoc_map (entry, GST_FORMAT_TIME, &val);
+ stream->current_total = val;
+ gst_index_entry_assoc_map (entry, GST_FORMAT_DEFAULT, &val);
+ stream->current_entry = val;
+ }
+
+ return min;
+ }
+ #endif
+
+ static gint
+ gst_avi_demux_index_entry_offset_search (GstAviIndexEntry * entry,
+ guint64 * offset)
+ {
+ if (entry->offset < *offset)
+ return -1;
+ else if (entry->offset > *offset)
+ return 1;
+ return 0;
+ }
+
+ static guint64
+ gst_avi_demux_seek_streams_index (GstAviDemux * avi, guint64 offset,
+ gboolean before)
+ {
+ GstAviStream *stream;
+ GstAviIndexEntry *entry;
+ gint i;
+ gint64 val, min = offset;
+ guint index = 0;
+
+ for (i = 0; i < avi->num_streams; i++) {
+ stream = &avi->stream[i];
+
+ /* compensate for chunk header */
+ offset += 8;
+ entry =
+ gst_util_array_binary_search (stream->index, stream->idx_n,
+ sizeof (GstAviIndexEntry),
+ (GCompareDataFunc) gst_avi_demux_index_entry_offset_search,
+ before ? GST_SEARCH_MODE_BEFORE : GST_SEARCH_MODE_AFTER, &offset, NULL);
+ offset -= 8;
+
+ if (entry)
+ index = entry - stream->index;
+
+ if (before) {
+ if (entry) {
+ val = stream->index[index].offset;
+ GST_DEBUG_OBJECT (avi,
+ "stream %d, previous entry at %" G_GUINT64_FORMAT, i, val);
+ if (val < min)
+ min = val;
+ }
+ continue;
+ }
+
+ if (!entry) {
+ GST_DEBUG_OBJECT (avi, "no position for stream %d, assuming at start", i);
+ stream->current_entry = 0;
+ stream->current_total = 0;
+ continue;
+ }
+
+ val = stream->index[index].offset - 8;
+ GST_DEBUG_OBJECT (avi, "stream %d, next entry at %" G_GUINT64_FORMAT, i,
+ val);
+
+ stream->current_total = stream->index[index].total;
+ stream->current_entry = index;
+ }
+
+ return min;
+ }
+
+ #define GST_AVI_SEEK_PUSH_DISPLACE (4 * GST_SECOND)
+
+ static gboolean
+ gst_avi_demux_handle_sink_event (GstPad * pad, GstObject * parent,
+ GstEvent * event)
+ {
+ gboolean res = TRUE;
+ GstAviDemux *avi = GST_AVI_DEMUX (parent);
+
+ GST_DEBUG_OBJECT (avi,
+ "have event type %s: %p on sink pad", GST_EVENT_TYPE_NAME (event), event);
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_SEGMENT:
+ {
+ gint64 boffset, offset = 0;
+ GstSegment segment;
+ GstEvent *segment_event;
+
+ /* some debug output */
+ gst_event_copy_segment (event, &segment);
+ GST_DEBUG_OBJECT (avi, "received newsegment %" GST_SEGMENT_FORMAT,
+ &segment);
+
+ /* chain will send initial newsegment after pads have been added */
+ if (avi->state != GST_AVI_DEMUX_MOVI) {
+ GST_DEBUG_OBJECT (avi, "still starting, eating event");
+ goto exit;
+ }
+
+ /* we only expect a BYTE segment, e.g. following a seek */
+ if (segment.format != GST_FORMAT_BYTES) {
+ GST_DEBUG_OBJECT (avi, "unsupported segment format, ignoring");
+ goto exit;
+ }
+
+ if (avi->have_index) {
+ GstAviIndexEntry *entry;
+ guint i = 0, index = 0, k = 0;
+ GstAviStream *stream;
+
+ /* compensate chunk header, stored index offset points after header */
+ boffset = segment.start + 8;
+ /* find which stream we're on */
+ do {
+ stream = &avi->stream[i];
+
+ /* find the index for start bytes offset */
+ entry = gst_util_array_binary_search (stream->index,
+ stream->idx_n, sizeof (GstAviIndexEntry),
+ (GCompareDataFunc) gst_avi_demux_index_entry_offset_search,
+ GST_SEARCH_MODE_AFTER, &boffset, NULL);
+
+ if (entry == NULL)
+ continue;
+ index = entry - stream->index;
+
+ /* we are on the stream with a chunk start offset closest to start */
+ if (!offset || stream->index[index].offset < offset) {
+ offset = stream->index[index].offset;
+ k = i;
+ }
+ /* exact match needs no further searching */
+ if (stream->index[index].offset == boffset)
+ break;
+ } while (++i < avi->num_streams);
+ boffset -= 8;
+ offset -= 8;
+ stream = &avi->stream[k];
+
+ /* so we have no idea what is to come, or where we are */
+ if (!offset) {
+ GST_WARNING_OBJECT (avi, "insufficient index data, forcing EOS");
+ goto eos;
+ }
+
+ /* get the ts corresponding to start offset bytes for the stream */
+ gst_avi_demux_get_buffer_info (avi, stream, index,
+ (GstClockTime *) & segment.time, NULL, NULL, NULL);
+ #if 0
+ } else if (avi->element_index) {
+ GstIndexEntry *entry;
+
+ /* Let's check if we have an index entry for this position */
+ entry = gst_index_get_assoc_entry (avi->element_index, avi->index_id,
+ GST_INDEX_LOOKUP_AFTER, GST_ASSOCIATION_FLAG_NONE,
+ GST_FORMAT_BYTES, segment.start);
+
+ /* we can not go where we have not yet been before ... */
+ if (!entry) {
+ GST_WARNING_OBJECT (avi, "insufficient index data, forcing EOS");
+ goto eos;
+ }
+
+ gst_index_entry_assoc_map (entry, GST_FORMAT_TIME,
+ (gint64 *) & segment.time);
+ gst_index_entry_assoc_map (entry, GST_FORMAT_BYTES, &offset);
+ #endif
+ } else {
+ GST_WARNING_OBJECT (avi, "no index data, forcing EOS");
+ goto eos;
+ }
+
+ segment.format = GST_FORMAT_TIME;
+ segment.start = segment.time;
+ segment.stop = GST_CLOCK_TIME_NONE;
+ segment.position = segment.start;
+
+ /* rescue duration */
+ segment.duration = avi->segment.duration;
+
+ /* set up segment and send downstream */
+ gst_segment_copy_into (&segment, &avi->segment);
+
+ GST_DEBUG_OBJECT (avi, "Pushing newseg %" GST_SEGMENT_FORMAT, &segment);
+ avi->segment_seqnum = gst_event_get_seqnum (event);
+ segment_event = gst_event_new_segment (&segment);
+ gst_event_set_seqnum (segment_event, gst_event_get_seqnum (event));
+ gst_avi_demux_push_event (avi, segment_event);
+
+ GST_DEBUG_OBJECT (avi, "next chunk expected at %" G_GINT64_FORMAT,
+ boffset);
+
+ /* adjust state for streaming thread accordingly */
+ if (avi->have_index)
+ gst_avi_demux_seek_streams_index (avi, offset, FALSE);
+ #if 0
+ else
+ gst_avi_demux_seek_streams (avi, offset, FALSE);
+ #endif
+
+ /* set up streaming thread */
+ g_assert (offset >= boffset);
+ avi->offset = boffset;
+ avi->todrop = offset - boffset;
+
+ exit:
+ gst_event_unref (event);
+ res = TRUE;
+ break;
+ eos:
+ /* set up for EOS */
+ avi->have_eos = TRUE;
+ goto exit;
+ }
+ case GST_EVENT_EOS:
+ {
+ if (avi->state != GST_AVI_DEMUX_MOVI) {
+ gst_event_unref (event);
+ GST_ELEMENT_ERROR (avi, STREAM, DEMUX,
+ (NULL), ("got eos and didn't receive a complete header object"));
+ } else if (!gst_avi_demux_push_event (avi, event)) {
+ GST_ELEMENT_ERROR (avi, STREAM, DEMUX,
+ (NULL), ("got eos but no streams (yet)"));
+ }
+ break;
+ }
+ case GST_EVENT_FLUSH_STOP:
+ {
+ gint i;
+
+ gst_adapter_clear (avi->adapter);
+ avi->have_eos = FALSE;
+ for (i = 0; i < avi->num_streams; i++) {
+ avi->stream[i].discont = TRUE;
+ }
+ /* fall through to default case so that the event gets passed downstream */
+ }
+ default:
+ res = gst_pad_event_default (pad, parent, event);
+ break;
+ }
+
+ return res;
+ }
+
+ static gboolean
+ gst_avi_demux_handle_src_event (GstPad * pad, GstObject * parent,
+ GstEvent * event)
+ {
+ gboolean res = TRUE;
+ GstAviDemux *avi = GST_AVI_DEMUX (parent);
+
+ GST_DEBUG_OBJECT (avi,
+ "have event type %s: %p on src pad", GST_EVENT_TYPE_NAME (event), event);
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_SEEK:
+ if (!avi->streaming) {
+ res = gst_avi_demux_handle_seek (avi, pad, event);
+ } else {
+ res = gst_avi_demux_handle_seek_push (avi, pad, event);
+ }
+ gst_event_unref (event);
+ break;
+ default:
+ res = gst_pad_event_default (pad, parent, event);
+ break;
+ }
+
+ return res;
+ }
+
+ /* streaming helper (push) */
+
+ /*
+ * gst_avi_demux_peek_chunk_info:
+ * @avi: Avi object
+ * @tag: holder for tag
+ * @size: holder for tag size
+ *
+ * Peek next chunk info (tag and size)
+ *
+ * Returns: TRUE when one chunk info has been got
+ */
+ static gboolean
+ gst_avi_demux_peek_chunk_info (GstAviDemux * avi, guint32 * tag, guint32 * size)
+ {
+ const guint8 *data = NULL;
+
+ if (gst_adapter_available (avi->adapter) < 8)
+ return FALSE;
+
+ data = gst_adapter_map (avi->adapter, 8);
+ *tag = GST_READ_UINT32_LE (data);
+ *size = GST_READ_UINT32_LE (data + 4);
+ gst_adapter_unmap (avi->adapter);
+
+ return TRUE;
+ }
+
+ /*
+ * gst_avi_demux_peek_chunk:
+ * @avi: Avi object
+ * @tag: holder for tag
+ * @size: holder for tag size
+ *
+ * Peek enough data for one full chunk
+ *
+ * Returns: %TRUE when one chunk has been got
+ */
+ static gboolean
+ gst_avi_demux_peek_chunk (GstAviDemux * avi, guint32 * tag, guint32 * size)
+ {
+ guint32 peek_size = 0;
+ gint available;
+
+ if (!gst_avi_demux_peek_chunk_info (avi, tag, size))
+ goto peek_failed;
+
+ /* size 0 -> empty data buffer would surprise most callers,
+ * large size -> do not bother trying to squeeze that into adapter,
+ * so we throw poor man's exception, which can be caught if caller really
+ * wants to handle 0 size chunk */
+ if (!(*size) || (*size) >= (1 << 30))
+ goto strange_size;
+
+ peek_size = (*size + 1) & ~1;
+ available = gst_adapter_available (avi->adapter);
+
+ GST_DEBUG_OBJECT (avi,
+ "Need to peek chunk of %d bytes to read chunk %" GST_FOURCC_FORMAT
+ ", %d bytes available", *size, GST_FOURCC_ARGS (*tag), available);
+
+ if (available < (8 + peek_size))
+ goto need_more;
+
+ return TRUE;
+
+ /* ERRORS */
+ peek_failed:
+ {
+ GST_INFO_OBJECT (avi, "Failed to peek");
+ return FALSE;
+ }
+ strange_size:
+ {
+ GST_INFO_OBJECT (avi,
+ "Invalid/unexpected chunk size %d for tag %" GST_FOURCC_FORMAT, *size,
+ GST_FOURCC_ARGS (*tag));
+ /* chain should give up */
+ avi->abort_buffering = TRUE;
+ return FALSE;
+ }
+ need_more:
+ {
+ GST_INFO_OBJECT (avi, "need more %d < %" G_GUINT32_FORMAT,
+ available, 8 + peek_size);
+ return FALSE;
+ }
+ }
+
+ /* AVI init */
+
+ /*
+ * gst_avi_demux_parse_file_header:
+ * @element: caller element (used for errors/debug).
+ * @buf: input data to be used for parsing.
+ *
+ * "Open" a RIFF/AVI file. The buffer should be at least 12
+ * bytes long. Takes ownership of @buf.
+ *
+ * Returns: TRUE if the file is a RIFF/AVI file, FALSE otherwise.
+ * Throws an error, caller should error out (fatal).
+ */
+ static gboolean
+ gst_avi_demux_parse_file_header (GstElement * element, GstBuffer * buf)
+ {
+ guint32 doctype;
+ GstClockTime stamp;
+
+ stamp = gst_util_get_timestamp ();
+
+ /* riff_parse posts an error */
+ if (!gst_riff_parse_file_header (element, buf, &doctype))
+ return FALSE;
+
+ if (doctype != GST_RIFF_RIFF_AVI)
+ goto not_avi;
+
+ stamp = gst_util_get_timestamp () - stamp;
+ GST_DEBUG_OBJECT (element, "header parsing took %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (stamp));
+
+ return TRUE;
+
+ /* ERRORS */
+ not_avi:
+ {
+ GST_ELEMENT_ERROR (element, STREAM, WRONG_TYPE, (NULL),
+ ("File is not an AVI file: 0x%" G_GINT32_MODIFIER "x", doctype));
+ return FALSE;
+ }
+ }
+
+ /*
+ * Read AVI file tag when streaming
+ */
+ static GstFlowReturn
+ gst_avi_demux_stream_init_push (GstAviDemux * avi)
+ {
+ if (gst_adapter_available (avi->adapter) >= 12) {
+ GstBuffer *tmp;
+
+ tmp = gst_adapter_take_buffer (avi->adapter, 12);
+
+ GST_DEBUG ("Parsing avi header");
+ if (!gst_avi_demux_parse_file_header (GST_ELEMENT_CAST (avi), tmp)) {
+ return GST_FLOW_ERROR;
+ }
+ GST_DEBUG ("header ok");
+ avi->offset += 12;
+
+ avi->state = GST_AVI_DEMUX_HEADER;
+ }
+ return GST_FLOW_OK;
+ }
+
+ /*
+ * Read AVI file tag
+ */
+ static GstFlowReturn
+ gst_avi_demux_stream_init_pull (GstAviDemux * avi)
+ {
+ GstFlowReturn res;
+ GstBuffer *buf = NULL;
+
+ res = gst_pad_pull_range (avi->sinkpad, avi->offset, 12, &buf);
+ if (res != GST_FLOW_OK)
+ return res;
+ else if (!gst_avi_demux_parse_file_header (GST_ELEMENT_CAST (avi), buf))
+ goto wrong_header;
+
+ avi->offset += 12;
+
+ return GST_FLOW_OK;
+
+ /* ERRORS */
+ wrong_header:
+ {
+ GST_DEBUG_OBJECT (avi, "error parsing file header");
+ return GST_FLOW_ERROR;
+ }
+ }
+
+ /* AVI header handling */
+ /*
+ * gst_avi_demux_parse_avih:
+ * @avi: caller element (used for errors/debug).
+ * @buf: input data to be used for parsing.
+ * @avih: pointer to structure (filled in by function) containing
+ * stream information (such as flags, number of streams, etc.).
+ *
+ * Read 'avih' header. Discards buffer after use.
+ *
+ * Returns: TRUE on success, FALSE otherwise. Throws an error if
+ * the header is invalid. The caller should error out
+ * (fatal).
+ */
+ static gboolean
+ gst_avi_demux_parse_avih (GstAviDemux * avi,
+ GstBuffer * buf, gst_riff_avih ** _avih)
+ {
+ gst_riff_avih *avih;
+ gsize size;
+
+ if (buf == NULL)
+ goto no_buffer;
+
+ size = gst_buffer_get_size (buf);
+ if (size < sizeof (gst_riff_avih))
+ goto avih_too_small;
+
+ avih = g_malloc (size);
+ gst_buffer_extract (buf, 0, avih, size);
+
+ #if (G_BYTE_ORDER == G_BIG_ENDIAN)
+ avih->us_frame = GUINT32_FROM_LE (avih->us_frame);
+ avih->max_bps = GUINT32_FROM_LE (avih->max_bps);
+ avih->pad_gran = GUINT32_FROM_LE (avih->pad_gran);
+ avih->flags = GUINT32_FROM_LE (avih->flags);
+ avih->tot_frames = GUINT32_FROM_LE (avih->tot_frames);
+ avih->init_frames = GUINT32_FROM_LE (avih->init_frames);
+ avih->streams = GUINT32_FROM_LE (avih->streams);
+ avih->bufsize = GUINT32_FROM_LE (avih->bufsize);
+ avih->width = GUINT32_FROM_LE (avih->width);
+ avih->height = GUINT32_FROM_LE (avih->height);
+ avih->scale = GUINT32_FROM_LE (avih->scale);
+ avih->rate = GUINT32_FROM_LE (avih->rate);
+ avih->start = GUINT32_FROM_LE (avih->start);
+ avih->length = GUINT32_FROM_LE (avih->length);
+ #endif
+
+ /* debug stuff */
+ GST_INFO_OBJECT (avi, "avih tag found:");
+ GST_INFO_OBJECT (avi, " us_frame %u", avih->us_frame);
+ GST_INFO_OBJECT (avi, " max_bps %u", avih->max_bps);
+ GST_INFO_OBJECT (avi, " pad_gran %u", avih->pad_gran);
+ GST_INFO_OBJECT (avi, " flags 0x%08x", avih->flags);
+ GST_INFO_OBJECT (avi, " tot_frames %u", avih->tot_frames);
+ GST_INFO_OBJECT (avi, " init_frames %u", avih->init_frames);
+ GST_INFO_OBJECT (avi, " streams %u", avih->streams);
+ GST_INFO_OBJECT (avi, " bufsize %u", avih->bufsize);
+ GST_INFO_OBJECT (avi, " width %u", avih->width);
+ GST_INFO_OBJECT (avi, " height %u", avih->height);
+ GST_INFO_OBJECT (avi, " scale %u", avih->scale);
+ GST_INFO_OBJECT (avi, " rate %u", avih->rate);
+ GST_INFO_OBJECT (avi, " start %u", avih->start);
+ GST_INFO_OBJECT (avi, " length %u", avih->length);
+
+ *_avih = avih;
+ gst_buffer_unref (buf);
+
+ if (avih->us_frame != 0 && avih->tot_frames != 0)
+ avi->duration =
+ (guint64) avih->us_frame * (guint64) avih->tot_frames * 1000;
+ else
+ avi->duration = GST_CLOCK_TIME_NONE;
+
+ GST_INFO_OBJECT (avi, " header duration %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (avi->duration));
+
+ return TRUE;
+
+ /* ERRORS */
+ no_buffer:
+ {
+ GST_ELEMENT_ERROR (avi, STREAM, DEMUX, (NULL), ("No buffer"));
+ return FALSE;
+ }
+ avih_too_small:
+ {
+ GST_ELEMENT_ERROR (avi, STREAM, DEMUX, (NULL),
+ ("Too small avih (%" G_GSIZE_FORMAT " available, %d needed)",
+ size, (int) sizeof (gst_riff_avih)));
+ gst_buffer_unref (buf);
+ return FALSE;
+ }
+ }
+
+ /*
+ * gst_avi_demux_parse_superindex:
+ * @avi: caller element (used for debugging/errors).
+ * @buf: input data to use for parsing.
+ * @locations: locations in the file (byte-offsets) that contain
+ * the actual indexes (see get_avi_demux_parse_subindex()).
+ * The array ends with GST_BUFFER_OFFSET_NONE.
+ *
+ * Reads superindex (openDML-2 spec stuff) from the provided data.
+ *
+ * Returns: TRUE on success, FALSE otherwise. Indexes should be skipped
+ * on error, but they are not fatal.
+ */
+ static gboolean
+ gst_avi_demux_parse_superindex (GstAviDemux * avi,
+ GstBuffer * buf, guint64 ** _indexes)
+ {
+ GstMapInfo map;
+ guint8 *data;
+ guint16 bpe = 16;
+ guint32 num, i;
+ guint64 *indexes;
+ gsize size;
+
+ *_indexes = NULL;
+
+ if (buf) {
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+ data = map.data;
+ size = map.size;
+ } else {
+ data = NULL;
+ size = 0;
+ }
+
+ if (size < 24)
+ goto too_small;
+
+ /* check type of index. The opendml2 specs state that
+ * there should be 4 dwords per array entry. Type can be
+ * either frame or field (and we don't care). */
+ if (GST_READ_UINT16_LE (data) != 4 ||
+ (data[2] & 0xfe) != 0x0 || data[3] != 0x0) {
+ GST_WARNING_OBJECT (avi,
+ "Superindex for stream has unexpected "
+ "size_entry %d (bytes) or flags 0x%02x/0x%02x",
+ GST_READ_UINT16_LE (data), data[2], data[3]);
+ bpe = GST_READ_UINT16_LE (data) * 4;
+ }
+ num = GST_READ_UINT32_LE (&data[4]);
+
+ GST_DEBUG_OBJECT (avi, "got %d indexes", num);
+
+ /* this can't work out well ... */
+ if (num > G_MAXUINT32 >> 1 || bpe < 8) {
+ goto invalid_params;
+ }
+
+ indexes = g_new (guint64, num + 1);
+ for (i = 0; i < num; i++) {
+ if (size < 24 + bpe * (i + 1))
+ break;
+ indexes[i] = GST_READ_UINT64_LE (&data[24 + bpe * i]);
+ GST_DEBUG_OBJECT (avi, "index %d at %" G_GUINT64_FORMAT, i, indexes[i]);
+ }
+ indexes[i] = GST_BUFFER_OFFSET_NONE;
+ *_indexes = indexes;
+
+ gst_buffer_unmap (buf, &map);
+ gst_buffer_unref (buf);
+
+ return TRUE;
+
+ /* ERRORS */
+ too_small:
+ {
+ GST_ERROR_OBJECT (avi,
+ "Not enough data to parse superindex (%" G_GSIZE_FORMAT
+ " available, 24 needed)", size);
+ if (buf) {
+ gst_buffer_unmap (buf, &map);
+ gst_buffer_unref (buf);
+ }
+ return FALSE;
+ }
+ invalid_params:
+ {
+ GST_ERROR_OBJECT (avi, "invalid index parameters (num = %d, bpe = %d)",
+ num, bpe);
+ gst_buffer_unmap (buf, &map);
+ gst_buffer_unref (buf);
+ return FALSE;
+ }
+ }
+
+ /* add an entry to the index of a stream. @num should be an estimate of the
+ * total amount of index entries for all streams and is used to dynamically
+ * allocate memory for the index entries. */
+ static inline gboolean
+ gst_avi_demux_add_index (GstAviDemux * avi, GstAviStream * stream,
+ guint num, GstAviIndexEntry * entry)
+ {
+ /* ensure index memory */
+ if (G_UNLIKELY (stream->idx_n >= stream->idx_max)) {
+ guint idx_max = stream->idx_max;
+ GstAviIndexEntry *new_idx;
+
+ /* we need to make some more room */
+ if (idx_max == 0) {
+ /* initial size guess, assume each stream has an equal amount of entries,
+ * overshoot with at least 8K */
+ idx_max = (num / avi->num_streams) + (8192 / sizeof (GstAviIndexEntry));
+ } else {
+ idx_max += 8192 / sizeof (GstAviIndexEntry);
+ GST_DEBUG_OBJECT (avi, "expanded index from %u to %u",
+ stream->idx_max, idx_max);
+ }
+ new_idx = g_try_renew (GstAviIndexEntry, stream->index, idx_max);
+ /* out of memory, if this fails stream->index is untouched. */
+ if (G_UNLIKELY (!new_idx))
+ return FALSE;
+ /* use new index */
+ stream->index = new_idx;
+ stream->idx_max = idx_max;
+ }
+
+ /* update entry total and stream stats. The entry total can be converted to
+ * the timestamp of the entry easily. */
+ if (stream->strh->type == GST_RIFF_FCC_auds) {
+ gint blockalign;
+
+ if (stream->is_vbr) {
+ entry->total = stream->total_blocks;
+ } else {
+ entry->total = stream->total_bytes;
+ }
+ blockalign = stream->strf.auds->blockalign;
+ if (blockalign > 0)
+ stream->total_blocks += DIV_ROUND_UP (entry->size, blockalign);
+ else
+ stream->total_blocks++;
+ } else {
+ if (stream->is_vbr) {
+ entry->total = stream->idx_n;
+ } else {
+ entry->total = stream->total_bytes;
+ }
+ }
+ stream->total_bytes += entry->size;
+ if (ENTRY_IS_KEYFRAME (entry))
+ stream->n_keyframes++;
+
+ /* and add */
+ GST_LOG_OBJECT (avi,
+ "Adding stream %u, index entry %d, kf %d, size %u "
+ ", offset %" G_GUINT64_FORMAT ", total %" G_GUINT64_FORMAT, stream->num,
+ stream->idx_n, ENTRY_IS_KEYFRAME (entry), entry->size, entry->offset,
+ entry->total);
+ stream->index[stream->idx_n++] = *entry;
+
+ return TRUE;
+ }
+
+ /* given @entry_n in @stream, calculate info such as timestamps and
+ * offsets for the entry. */
+ static void
+ gst_avi_demux_get_buffer_info (GstAviDemux * avi, GstAviStream * stream,
+ guint entry_n, GstClockTime * timestamp, GstClockTime * ts_end,
+ guint64 * offset, guint64 * offset_end)
+ {
+ GstAviIndexEntry *entry;
+
+ entry = &stream->index[entry_n];
+
+ if (stream->is_vbr) {
+ /* VBR stream next timestamp */
+ if (stream->strh->type == GST_RIFF_FCC_auds) {
+ if (timestamp)
+ *timestamp =
+ avi_stream_convert_frames_to_time_unchecked (stream, entry->total);
+ if (ts_end) {
+ gint size = 1;
+ if (G_LIKELY (entry_n + 1 < stream->idx_n))
+ size = stream->index[entry_n + 1].total - entry->total;
+ *ts_end = avi_stream_convert_frames_to_time_unchecked (stream,
+ entry->total + size);
+ }
+ } else {
+ if (timestamp)
+ *timestamp =
+ avi_stream_convert_frames_to_time_unchecked (stream, entry_n);
+ if (ts_end)
+ *ts_end = avi_stream_convert_frames_to_time_unchecked (stream,
+ entry_n + 1);
+ }
+ } else if (stream->strh->type == GST_RIFF_FCC_auds) {
+ /* constant rate stream */
+ if (timestamp)
+ *timestamp =
+ avi_stream_convert_bytes_to_time_unchecked (stream, entry->total);
+ if (ts_end)
+ *ts_end = avi_stream_convert_bytes_to_time_unchecked (stream,
+ entry->total + entry->size);
+ }
+ if (stream->strh->type == GST_RIFF_FCC_vids) {
+ /* video offsets are the frame number */
+ if (offset)
+ *offset = entry_n;
+ if (offset_end)
+ *offset_end = entry_n + 1;
+ } else {
+ /* no offsets for audio */
+ if (offset)
+ *offset = -1;
+ if (offset_end)
+ *offset_end = -1;
+ }
+ }
+
+ /* collect and debug stats about the indexes for all streams.
+ * This method is also responsible for filling in the stream duration
+ * as measured by the amount of index entries.
+ *
+ * Returns TRUE if the index is not empty, else FALSE */
+ static gboolean
+ gst_avi_demux_do_index_stats (GstAviDemux * avi)
+ {
+ guint total_idx = 0;
+ guint i;
+ #ifndef GST_DISABLE_GST_DEBUG
+ guint total_max = 0;
+ #endif
+
+ /* get stream stats now */
+ for (i = 0; i < avi->num_streams; i++) {
+ GstAviStream *stream;
+
+ if (G_UNLIKELY (!(stream = &avi->stream[i])))
+ continue;
+ if (G_UNLIKELY (!stream->strh))
+ continue;
+ if (G_UNLIKELY (!stream->index || stream->idx_n == 0))
+ continue;
+
+ /* we interested in the end_ts of the last entry, which is the total
+ * duration of this stream */
+ gst_avi_demux_get_buffer_info (avi, stream, stream->idx_n - 1,
+ NULL, &stream->idx_duration, NULL, NULL);
+
+ total_idx += stream->idx_n;
+ #ifndef GST_DISABLE_GST_DEBUG
+ total_max += stream->idx_max;
+ #endif
+ GST_INFO_OBJECT (avi, "Stream %d, dur %" GST_TIME_FORMAT ", %6u entries, "
+ "%5u keyframes, entry size = %2u, total size = %10u, allocated %10u",
+ i, GST_TIME_ARGS (stream->idx_duration), stream->idx_n,
+ stream->n_keyframes, (guint) sizeof (GstAviIndexEntry),
+ (guint) (stream->idx_n * sizeof (GstAviIndexEntry)),
+ (guint) (stream->idx_max * sizeof (GstAviIndexEntry)));
+
+ /* knowing all that we do, that also includes avg bitrate */
+ if (!stream->taglist) {
+ stream->taglist = gst_tag_list_new_empty ();
+ }
+ if (stream->total_bytes && stream->idx_duration)
+ gst_tag_list_add (stream->taglist, GST_TAG_MERGE_REPLACE,
+ GST_TAG_BITRATE,
+ (guint) gst_util_uint64_scale (stream->total_bytes * 8,
+ GST_SECOND, stream->idx_duration), NULL);
+ }
+ total_idx *= sizeof (GstAviIndexEntry);
+ #ifndef GST_DISABLE_GST_DEBUG
+ total_max *= sizeof (GstAviIndexEntry);
+ #endif
+ GST_INFO_OBJECT (avi, "%u bytes for index vs %u ideally, %u wasted",
+ total_max, total_idx, total_max - total_idx);
+
+ if (total_idx == 0) {
+ GST_WARNING_OBJECT (avi, "Index is empty !");
+ return FALSE;
+ }
+ return TRUE;
+ }
+
+ /*
+ * gst_avi_demux_parse_subindex:
+ * @avi: Avi object
+ * @buf: input data to use for parsing.
+ * @stream: stream context.
+ * @entries_list: a list (returned by the function) containing all the
+ * indexes parsed in this specific subindex. The first
+ * entry is also a pointer to allocated memory that needs
+ * to be free´ed. May be NULL if no supported indexes were
+ * found.
+ *
+ * Reads superindex (openDML-2 spec stuff) from the provided data.
+ * The buffer should contain a GST_RIFF_TAG_ix?? chunk.
+ *
+ * Returns: TRUE on success, FALSE otherwise. Errors are fatal, we
+ * throw an error, caller should bail out asap.
+ */
+ static gboolean
+ gst_avi_demux_parse_subindex (GstAviDemux * avi, GstAviStream * stream,
+ GstBuffer * buf)
+ {
+ GstMapInfo map;
+ guint8 *data;
+ guint16 bpe;
+ guint32 num, i;
+ guint64 baseoff;
+
+ if (buf == NULL)
+ return TRUE;
+
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+ data = map.data;
+
+ /* check size */
+ if (map.size < 24)
+ goto too_small;
+
+ /* We don't support index-data yet */
+ if (data[3] & 0x80)
+ goto not_implemented;
+
+ /* check type of index. The opendml2 specs state that
+ * there should be 4 dwords per array entry. Type can be
+ * either frame or field (and we don't care). */
+ bpe = (data[2] & 0x01) ? 12 : 8;
+ if (GST_READ_UINT16_LE (data) != bpe / 4 ||
+ (data[2] & 0xfe) != 0x0 || data[3] != 0x1) {
+ GST_WARNING_OBJECT (avi,
+ "Superindex for stream %d has unexpected "
+ "size_entry %d (bytes) or flags 0x%02x/0x%02x",
+ stream->num, GST_READ_UINT16_LE (data), data[2], data[3]);
+ bpe = GST_READ_UINT16_LE (data) * 4;
+ }
+ num = GST_READ_UINT32_LE (&data[4]);
+ baseoff = GST_READ_UINT64_LE (&data[12]);
+
+ /* If there's nothing, just return ! */
+ if (num == 0)
+ goto empty_index;
+
+ GST_INFO_OBJECT (avi, "Parsing subindex, nr_entries = %6d", num);
+
+ for (i = 0; i < num; i++) {
+ GstAviIndexEntry entry;
+
+ if (map.size < 24 + bpe * (i + 1))
+ break;
+
+ /* fill in offset and size. offset contains the keyframe flag in the
+ * upper bit*/
+ entry.offset = baseoff + GST_READ_UINT32_LE (&data[24 + bpe * i]);
+ entry.size = GST_READ_UINT32_LE (&data[24 + bpe * i + 4]);
+ /* handle flags */
+ if (stream->strh->type == GST_RIFF_FCC_auds) {
+ /* all audio frames are keyframes */
+ ENTRY_SET_KEYFRAME (&entry);
+ } else {
+ /* else read flags */
+ entry.flags = (entry.size & 0x80000000) ? 0 : GST_AVI_KEYFRAME;
+ }
+ entry.size &= ~0x80000000;
+
+ /* and add */
+ if (G_UNLIKELY (!gst_avi_demux_add_index (avi, stream, num, &entry)))
+ goto out_of_mem;
+ }
+ done:
+ gst_buffer_unmap (buf, &map);
+ gst_buffer_unref (buf);
+
+ return TRUE;
+
+ /* ERRORS */
+ too_small:
+ {
+ GST_ERROR_OBJECT (avi,
+ "Not enough data to parse subindex (%" G_GSIZE_FORMAT
+ " available, 24 needed)", map.size);
+ goto done; /* continue */
+ }
+ not_implemented:
+ {
+ GST_ELEMENT_ERROR (avi, STREAM, NOT_IMPLEMENTED, (NULL),
+ ("Subindex-is-data is not implemented"));
+ gst_buffer_unmap (buf, &map);
+ gst_buffer_unref (buf);
+ return FALSE;
+ }
+ empty_index:
+ {
+ GST_DEBUG_OBJECT (avi, "the index is empty");
+ goto done; /* continue */
+ }
+ out_of_mem:
+ {
+ GST_ELEMENT_ERROR (avi, RESOURCE, NO_SPACE_LEFT, (NULL),
+ ("Cannot allocate memory for %u*%u=%u bytes",
+ (guint) sizeof (GstAviIndexEntry), num,
+ (guint) sizeof (GstAviIndexEntry) * num));
+ gst_buffer_unmap (buf, &map);
+ gst_buffer_unref (buf);
+ return FALSE;
+ }
+ }
+
+ /*
+ * Create and push a flushing seek event upstream
+ */
+ static gboolean
+ perform_seek_to_offset (GstAviDemux * demux, guint64 offset, guint32 seqnum)
+ {
+ GstEvent *event;
+ gboolean res = 0;
+
+ GST_DEBUG_OBJECT (demux, "Seeking to %" G_GUINT64_FORMAT, offset);
+
+ event =
+ gst_event_new_seek (1.0, GST_FORMAT_BYTES,
+ GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_ACCURATE, GST_SEEK_TYPE_SET, offset,
+ GST_SEEK_TYPE_NONE, -1);
+ gst_event_set_seqnum (event, seqnum);
+ res = gst_pad_push_event (demux->sinkpad, event);
+
+ if (res)
+ demux->offset = offset;
+ return res;
+ }
+
+ /*
+ * Read AVI index when streaming
+ */
+ static gboolean
+ gst_avi_demux_read_subindexes_push (GstAviDemux * avi)
+ {
+ guint32 tag = 0, size;
+ GstBuffer *buf = NULL;
+ guint odml_stream;
+
+ GST_DEBUG_OBJECT (avi, "read subindexes for %d streams", avi->num_streams);
+
+ if (avi->odml_subidxs[avi->odml_subidx] != avi->offset)
+ return FALSE;
+
+ if (!gst_avi_demux_peek_chunk (avi, &tag, &size))
+ return TRUE;
+
+ /* this is the ODML chunk we expect */
+ odml_stream = avi->odml_stream;
+
+ if ((tag != GST_MAKE_FOURCC ('i', 'x', '0' + odml_stream / 10,
+ '0' + odml_stream % 10)) &&
+ (tag != GST_MAKE_FOURCC ('0' + odml_stream / 10,
+ '0' + odml_stream % 10, 'i', 'x'))) {
+ GST_WARNING_OBJECT (avi, "Not an ix## chunk (%" GST_FOURCC_FORMAT ")",
+ GST_FOURCC_ARGS (tag));
+ return FALSE;
+ }
+
+ avi->offset += 8 + GST_ROUND_UP_2 (size);
+ /* flush chunk header so we get just the 'size' payload data */
+ gst_adapter_flush (avi->adapter, 8);
+ buf = gst_adapter_take_buffer (avi->adapter, size);
+
+ if (!gst_avi_demux_parse_subindex (avi, &avi->stream[odml_stream], buf))
+ return FALSE;
+
+ /* we parsed the index, go to next subindex */
+ avi->odml_subidx++;
+
+ if (avi->odml_subidxs[avi->odml_subidx] == GST_BUFFER_OFFSET_NONE) {
+ /* we reached the end of the indexes for this stream, move to the next
+ * stream to handle the first index */
+ avi->odml_stream++;
+ avi->odml_subidx = 0;
+
+ if (avi->odml_stream < avi->num_streams) {
+ /* there are more indexes */
+ avi->odml_subidxs = avi->stream[avi->odml_stream].indexes;
+ } else {
+ /* we're done, get stream stats now */
+ avi->have_index = gst_avi_demux_do_index_stats (avi);
+
+ return TRUE;
+ }
+ }
+
+ /* seek to next index */
+ return perform_seek_to_offset (avi, avi->odml_subidxs[avi->odml_subidx],
+ avi->segment_seqnum);
+ }
+
+ /*
+ * Read AVI index
+ */
+ static void
+ gst_avi_demux_read_subindexes_pull (GstAviDemux * avi)
+ {
+ guint32 tag;
+ GstBuffer *buf;
+ gint i, n;
+
+ GST_DEBUG_OBJECT (avi, "read subindexes for %d streams", avi->num_streams);
+
+ for (n = 0; n < avi->num_streams; n++) {
+ GstAviStream *stream = &avi->stream[n];
+
+ if (stream->indexes == NULL)
+ continue;
+
+ for (i = 0; stream->indexes[i] != GST_BUFFER_OFFSET_NONE; i++) {
+ if (gst_riff_read_chunk (GST_ELEMENT_CAST (avi), avi->sinkpad,
+ &stream->indexes[i], &tag, &buf) != GST_FLOW_OK)
+ continue;
+ else if ((tag != GST_MAKE_FOURCC ('i', 'x', '0' + stream->num / 10,
+ '0' + stream->num % 10)) &&
+ (tag != GST_MAKE_FOURCC ('0' + stream->num / 10,
+ '0' + stream->num % 10, 'i', 'x'))) {
+ /* Some ODML files (created by god knows what muxer) have a ##ix format
+ * instead of the 'official' ix##. They are still valid though. */
+ GST_WARNING_OBJECT (avi, "Not an ix## chunk (%" GST_FOURCC_FORMAT ")",
+ GST_FOURCC_ARGS (tag));
+ gst_buffer_unref (buf);
+ continue;
+ }
+
+ if (!gst_avi_demux_parse_subindex (avi, stream, buf))
+ continue;
+ }
+
+ g_free (stream->indexes);
+ stream->indexes = NULL;
+ }
+ /* get stream stats now */
+ avi->have_index = gst_avi_demux_do_index_stats (avi);
+ }
+
+ /*
+ * gst_avi_demux_riff_parse_vprp:
+ * @element: caller element (used for debugging/error).
+ * @buf: input data to be used for parsing, stripped from header.
+ * @vprp: a pointer (returned by this function) to a filled-in vprp
+ * structure. Caller should free it.
+ *
+ * Parses a video stream´s vprp. This function takes ownership of @buf.
+ *
+ * Returns: TRUE if parsing succeeded, otherwise FALSE. The stream
+ * should be skipped on error, but it is not fatal.
+ */
+ static gboolean
+ gst_avi_demux_riff_parse_vprp (GstElement * element,
+ GstBuffer * buf, gst_riff_vprp ** _vprp)
+ {
+ gst_riff_vprp *vprp;
+ gint k;
+ gsize size;
+
+ g_return_val_if_fail (buf != NULL, FALSE);
+ g_return_val_if_fail (_vprp != NULL, FALSE);
+
+ size = gst_buffer_get_size (buf);
+
+ if (size < G_STRUCT_OFFSET (gst_riff_vprp, field_info))
+ goto too_small;
+
+ vprp = g_malloc (size);
+ gst_buffer_extract (buf, 0, vprp, size);
+
+ #if (G_BYTE_ORDER == G_BIG_ENDIAN)
+ vprp->format_token = GUINT32_FROM_LE (vprp->format_token);
+ vprp->standard = GUINT32_FROM_LE (vprp->standard);
+ vprp->vert_rate = GUINT32_FROM_LE (vprp->vert_rate);
+ vprp->hor_t_total = GUINT32_FROM_LE (vprp->hor_t_total);
+ vprp->vert_lines = GUINT32_FROM_LE (vprp->vert_lines);
+ vprp->aspect = GUINT32_FROM_LE (vprp->aspect);
+ vprp->width = GUINT32_FROM_LE (vprp->width);
+ vprp->height = GUINT32_FROM_LE (vprp->height);
+ vprp->fields = GUINT32_FROM_LE (vprp->fields);
+ #endif
+
+ /* size checking */
+ /* calculate fields based on size */
+ k = (size - G_STRUCT_OFFSET (gst_riff_vprp, field_info)) / vprp->fields;
+ if (vprp->fields > k) {
+ GST_WARNING_OBJECT (element,
+ "vprp header indicated %d fields, only %d available", vprp->fields, k);
+ vprp->fields = k;
+ }
+ if (vprp->fields > GST_RIFF_VPRP_VIDEO_FIELDS) {
+ GST_WARNING_OBJECT (element,
+ "vprp header indicated %d fields, at most %d supported", vprp->fields,
+ GST_RIFF_VPRP_VIDEO_FIELDS);
+ vprp->fields = GST_RIFF_VPRP_VIDEO_FIELDS;
+ }
+ #if (G_BYTE_ORDER == G_BIG_ENDIAN)
+ for (k = 0; k < vprp->fields; k++) {
+ gst_riff_vprp_video_field_desc *fd;
+
+ fd = &vprp->field_info[k];
+ fd->compressed_bm_height = GUINT32_FROM_LE (fd->compressed_bm_height);
+ fd->compressed_bm_width = GUINT32_FROM_LE (fd->compressed_bm_width);
+ fd->valid_bm_height = GUINT32_FROM_LE (fd->valid_bm_height);
+ fd->valid_bm_width = GUINT16_FROM_LE (fd->valid_bm_width);
+ fd->valid_bm_x_offset = GUINT16_FROM_LE (fd->valid_bm_x_offset);
+ fd->valid_bm_y_offset = GUINT32_FROM_LE (fd->valid_bm_y_offset);
+ fd->video_x_t_offset = GUINT32_FROM_LE (fd->video_x_t_offset);
+ fd->video_y_start = GUINT32_FROM_LE (fd->video_y_start);
+ }
+ #endif
+
+ /* debug */
+ GST_INFO_OBJECT (element, "vprp tag found in context vids:");
+ GST_INFO_OBJECT (element, " format_token %d", vprp->format_token);
+ GST_INFO_OBJECT (element, " standard %d", vprp->standard);
+ GST_INFO_OBJECT (element, " vert_rate %d", vprp->vert_rate);
+ GST_INFO_OBJECT (element, " hor_t_total %d", vprp->hor_t_total);
+ GST_INFO_OBJECT (element, " vert_lines %d", vprp->vert_lines);
+ GST_INFO_OBJECT (element, " aspect %d:%d", vprp->aspect >> 16,
+ vprp->aspect & 0xffff);
+ GST_INFO_OBJECT (element, " width %d", vprp->width);
+ GST_INFO_OBJECT (element, " height %d", vprp->height);
+ GST_INFO_OBJECT (element, " fields %d", vprp->fields);
+ for (k = 0; k < vprp->fields; k++) {
+ gst_riff_vprp_video_field_desc *fd;
+
+ fd = &(vprp->field_info[k]);
+ GST_INFO_OBJECT (element, " field %u description:", k);
+ GST_INFO_OBJECT (element, " compressed_bm_height %d",
+ fd->compressed_bm_height);
+ GST_INFO_OBJECT (element, " compressed_bm_width %d",
+ fd->compressed_bm_width);
+ GST_INFO_OBJECT (element, " valid_bm_height %d",
+ fd->valid_bm_height);
+ GST_INFO_OBJECT (element, " valid_bm_width %d", fd->valid_bm_width);
+ GST_INFO_OBJECT (element, " valid_bm_x_offset %d",
+ fd->valid_bm_x_offset);
+ GST_INFO_OBJECT (element, " valid_bm_y_offset %d",
+ fd->valid_bm_y_offset);
+ GST_INFO_OBJECT (element, " video_x_t_offset %d",
+ fd->video_x_t_offset);
+ GST_INFO_OBJECT (element, " video_y_start %d", fd->video_y_start);
+ }
+
+ gst_buffer_unref (buf);
+
+ *_vprp = vprp;
+
+ return TRUE;
+
+ /* ERRORS */
+ too_small:
+ {
+ GST_ERROR_OBJECT (element,
+ "Too small vprp (%" G_GSIZE_FORMAT " available, at least %d needed)",
+ size, (int) G_STRUCT_OFFSET (gst_riff_vprp, field_info));
+ gst_buffer_unref (buf);
+ return FALSE;
+ }
+ }
+
+ static void
+ gst_avi_demux_expose_streams (GstAviDemux * avi, gboolean force)
+ {
+ guint i;
+
+ GST_DEBUG_OBJECT (avi, "force : %d", force);
+
+ for (i = 0; i < avi->num_streams; i++) {
+ GstAviStream *stream = &avi->stream[i];
+
+ if (force || stream->idx_n != 0) {
+ GST_LOG_OBJECT (avi, "Adding pad %s", GST_PAD_NAME (stream->pad));
+ gst_element_add_pad ((GstElement *) avi, stream->pad);
+ gst_flow_combiner_add_pad (avi->flowcombiner, stream->pad);
+
+ #if 0
+ if (avi->element_index)
+ gst_index_get_writer_id (avi->element_index,
+ GST_OBJECT_CAST (stream->pad), &stream->index_id);
+ #endif
+
+ stream->exposed = TRUE;
+ if (avi->main_stream == -1)
+ avi->main_stream = i;
+ } else {
+ GST_WARNING_OBJECT (avi, "Stream #%d doesn't have any entry, removing it",
+ i);
+ gst_avi_demux_reset_stream (avi, stream);
+ }
+ }
+ }
+
+ /* buf contains LIST chunk data, and will be padded to even size,
+ * since some buggy files do not account for the padding of chunks
+ * within a LIST in the size of the LIST */
+ static inline void
+ gst_avi_demux_roundup_list (GstAviDemux * avi, GstBuffer ** buf)
+ {
+ gsize size;
+
+ size = gst_buffer_get_size (*buf);
+
+ if (G_UNLIKELY (size & 1)) {
+ GstBuffer *obuf;
+ GstMapInfo map;
+
+ GST_DEBUG_OBJECT (avi, "rounding up dubious list size %" G_GSIZE_FORMAT,
+ size);
+ obuf = gst_buffer_new_and_alloc (size + 1);
+
+ gst_buffer_map (obuf, &map, GST_MAP_WRITE);
+ gst_buffer_extract (*buf, 0, map.data, size);
+ /* assume 0 padding, at least makes outcome deterministic */
+ map.data[size] = 0;
+ gst_buffer_unmap (obuf, &map);
+ gst_buffer_replace (buf, obuf);
+ }
+ }
+
+ static GstCaps *
+ gst_avi_demux_check_caps (GstAviDemux * avi, GstAviStream * stream,
+ GstCaps * caps)
+ {
+ GstStructure *s;
+ const GValue *val;
+ GstBuffer *buf;
+
+ caps = gst_caps_make_writable (caps);
+
+ s = gst_caps_get_structure (caps, 0);
+ if (gst_structure_has_name (s, "video/x-raw")) {
+ stream->is_raw = TRUE;
+ stream->alignment = 32;
+ if (!gst_structure_has_field (s, "pixel-aspect-ratio"))
+ gst_structure_set (s, "pixel-aspect-ratio", GST_TYPE_FRACTION,
+ 1, 1, NULL);
+ if (gst_structure_has_field_typed (s, "palette_data", GST_TYPE_BUFFER)) {
+ gst_structure_get (s, "palette_data", GST_TYPE_BUFFER,
+ &stream->rgb8_palette, NULL);
+ gst_structure_remove_field (s, "palette_data");
+ return caps;
+ }
+ } else if (gst_structure_has_name (s, "video/x-h264")) {
+ GST_DEBUG_OBJECT (avi, "checking caps %" GST_PTR_FORMAT, caps);
+
+ /* some muxers put invalid bytestream stuff in h264 extra data */
+ val = gst_structure_get_value (s, "codec_data");
+ if (val && (buf = gst_value_get_buffer (val))) {
+ guint8 *data;
+ gint size;
+ GstMapInfo map;
+
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+ data = map.data;
+ size = map.size;
+ if (size >= 4) {
+ guint32 h = GST_READ_UINT32_BE (data);
+
+ gst_buffer_unmap (buf, &map);
+ if (h == 0x01 || (h >> 8) == 0x01) {
+ /* can hardly be valid AVC codec data */
+ GST_DEBUG_OBJECT (avi,
+ "discarding invalid codec_data containing byte-stream");
+ /* so do not pretend to downstream that it is packetized avc */
+ gst_structure_remove_field (s, "codec_data");
+ /* ... but rather properly parsed bytestream */
+ gst_structure_set (s, "stream-format", G_TYPE_STRING, "byte-stream",
+ "alignment", G_TYPE_STRING, "au", NULL);
+ }
+ } else {
+ gst_buffer_unmap (buf, &map);
+ }
+ }
+ }
+
+ return caps;
+ }
+
+ /*
+ * gst_avi_demux_parse_stream:
+ * @avi: calling element (used for debugging/errors).
+ * @buf: input buffer used to parse the stream.
+ *
+ * Parses all subchunks in a strl chunk (which defines a single
+ * stream). Discards the buffer after use. This function will
+ * increment the stream counter internally.
+ *
+ * Returns: whether the stream was identified successfully.
+ * Errors are not fatal. It does indicate the stream
+ * was skipped.
+ */
+ static gboolean
+ gst_avi_demux_parse_stream (GstAviDemux * avi, GstBuffer * buf)
+ {
+ GstAviStream *stream;
+ GstElementClass *klass;
+ GstPadTemplate *templ;
+ GstBuffer *sub = NULL;
+ guint offset = 4;
+ guint32 tag = 0;
+ gchar *codec_name = NULL, *padname = NULL;
+ const gchar *tag_name;
+ GstCaps *caps = NULL;
+ GstPad *pad;
+ GstElement *element;
+ gboolean got_strh = FALSE, got_strf = FALSE, got_vprp = FALSE;
+ gst_riff_vprp *vprp = NULL;
+ GstEvent *event;
+ gchar *stream_id;
+ GstMapInfo map;
+ gboolean sparse = FALSE;
+
+ element = GST_ELEMENT_CAST (avi);
+
+ GST_DEBUG_OBJECT (avi, "Parsing stream");
+
+ gst_avi_demux_roundup_list (avi, &buf);
+
+ if (avi->num_streams >= GST_AVI_DEMUX_MAX_STREAMS) {
+ GST_WARNING_OBJECT (avi,
+ "maximum no of streams (%d) exceeded, ignoring stream",
+ GST_AVI_DEMUX_MAX_STREAMS);
+ gst_buffer_unref (buf);
+ /* not a fatal error, let's say */
+ return TRUE;
+ }
+
+ stream = &avi->stream[avi->num_streams];
+
+ /* initial settings */
+ stream->idx_duration = GST_CLOCK_TIME_NONE;
+ stream->hdr_duration = GST_CLOCK_TIME_NONE;
+ stream->duration = GST_CLOCK_TIME_NONE;
+
+ while (gst_riff_parse_chunk (element, buf, &offset, &tag, &sub)) {
+ /* sub can be NULL if the chunk is empty */
+ if (sub == NULL) {
+ GST_DEBUG_OBJECT (avi, "ignoring empty chunk %" GST_FOURCC_FORMAT,
+ GST_FOURCC_ARGS (tag));
+ continue;
+ }
+ switch (tag) {
+ case GST_RIFF_TAG_strh:
+ {
+ gst_riff_strh *strh;
+
+ if (got_strh) {
+ GST_WARNING_OBJECT (avi, "Ignoring additional strh chunk");
+ break;
+ }
+ if (!gst_riff_parse_strh (element, sub, &stream->strh)) {
+ /* ownership given away */
+ sub = NULL;
+ GST_WARNING_OBJECT (avi, "Failed to parse strh chunk");
+ goto fail;
+ }
+ sub = NULL;
+ strh = stream->strh;
+ /* sanity check; stream header frame rate matches global header
+ * frame duration */
+ if (stream->strh->type == GST_RIFF_FCC_vids) {
+ GstClockTime s_dur;
+ GstClockTime h_dur = avi->avih->us_frame * GST_USECOND;
+
+ s_dur = gst_util_uint64_scale (GST_SECOND, strh->scale, strh->rate);
+ GST_DEBUG_OBJECT (avi, "verifying stream framerate %d/%d, "
+ "frame duration = %d ms", strh->rate, strh->scale,
+ (gint) (s_dur / GST_MSECOND));
+ if (h_dur > (10 * GST_MSECOND) && (s_dur > 10 * h_dur)) {
+ strh->rate = GST_SECOND / GST_USECOND;
+ strh->scale = h_dur / GST_USECOND;
+ GST_DEBUG_OBJECT (avi, "correcting stream framerate to %d/%d",
+ strh->rate, strh->scale);
+ }
+ }
+ /* determine duration as indicated by header */
+ stream->hdr_duration = gst_util_uint64_scale ((guint64) strh->length *
+ strh->scale, GST_SECOND, (guint64) strh->rate);
+ GST_INFO ("Stream duration according to header: %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (stream->hdr_duration));
+ if (stream->hdr_duration == 0)
+ stream->hdr_duration = GST_CLOCK_TIME_NONE;
+
+ got_strh = TRUE;
+ break;
+ }
+ case GST_RIFF_TAG_strf:
+ {
+ gboolean res = FALSE;
+
+ if (got_strf) {
+ GST_WARNING_OBJECT (avi, "Ignoring additional strf chunk");
+ break;
+ }
+ if (!got_strh) {
+ GST_ERROR_OBJECT (avi, "Found strf chunk before strh chunk");
+ goto fail;
+ }
+ switch (stream->strh->type) {
+ case GST_RIFF_FCC_vids:
+ stream->is_vbr = TRUE;
+ res = gst_riff_parse_strf_vids (element, sub,
+ &stream->strf.vids, &stream->extradata);
+ sub = NULL;
+ GST_DEBUG_OBJECT (element, "marking video as VBR, res %d", res);
+ break;
+ case GST_RIFF_FCC_auds:
+ res =
+ gst_riff_parse_strf_auds (element, sub, &stream->strf.auds,
+ &stream->extradata);
+ sub = NULL;
+ if (!res)
+ break;
+ stream->is_vbr = (stream->strh->samplesize == 0)
+ && stream->strh->scale > 1
+ && stream->strf.auds->blockalign != 1;
+ GST_DEBUG_OBJECT (element, "marking audio as VBR:%d, res %d",
+ stream->is_vbr, res);
+ /* we need these or we have no way to come up with timestamps */
+ if ((!stream->is_vbr && !stream->strf.auds->av_bps) ||
+ (stream->is_vbr && (!stream->strh->scale ||
+ !stream->strh->rate))) {
+ GST_WARNING_OBJECT (element,
+ "invalid audio header, ignoring stream");
+ goto fail;
+ }
+ /* some more sanity checks */
+ if (stream->is_vbr) {
+ if (stream->strf.auds->blockalign <= 4) {
+ /* that would mean (too) many frames per chunk,
+ * so not likely set as expected */
+ GST_DEBUG_OBJECT (element,
+ "suspicious blockalign %d for VBR audio; "
+ "overriding to 1 frame per chunk",
+ stream->strf.auds->blockalign);
+ /* this should top any likely value */
+ stream->strf.auds->blockalign = (1 << 12);
+ }
+ }
+ break;
+ case GST_RIFF_FCC_iavs:
+ stream->is_vbr = TRUE;
+ res = gst_riff_parse_strf_iavs (element, sub,
+ &stream->strf.iavs, &stream->extradata);
+ sub = NULL;
+ GST_DEBUG_OBJECT (element, "marking iavs as VBR, res %d", res);
+ break;
+ case GST_RIFF_FCC_txts:
+ /* nothing to parse here */
+ stream->is_vbr = (stream->strh->samplesize == 0)
+ && (stream->strh->scale > 1);
+ res = TRUE;
+ break;
+ default:
+ GST_ERROR_OBJECT (avi,
+ "Don´t know how to handle stream type %" GST_FOURCC_FORMAT,
+ GST_FOURCC_ARGS (stream->strh->type));
+ break;
+ }
+ if (sub) {
+ gst_buffer_unref (sub);
+ sub = NULL;
+ }
+ if (!res)
+ goto fail;
+ got_strf = TRUE;
+ break;
+ }
+ case GST_RIFF_TAG_vprp:
+ {
+ if (got_vprp) {
+ GST_WARNING_OBJECT (avi, "Ignoring additional vprp chunk");
+ break;
+ }
+ if (!got_strh) {
+ GST_ERROR_OBJECT (avi, "Found vprp chunk before strh chunk");
+ goto fail;
+ }
+ if (!got_strf) {
+ GST_ERROR_OBJECT (avi, "Found vprp chunk before strf chunk");
+ goto fail;
+ }
+
+ if (!gst_avi_demux_riff_parse_vprp (element, sub, &vprp)) {
+ GST_WARNING_OBJECT (avi, "Failed to parse vprp chunk");
+ /* not considered fatal */
+ g_free (vprp);
+ vprp = NULL;
+ } else
+ got_vprp = TRUE;
+ sub = NULL;
+ break;
+ }
+ case GST_RIFF_TAG_strd:
+ if (stream->initdata)
+ gst_buffer_unref (stream->initdata);
+ stream->initdata = sub;
+ if (sub != NULL) {
+ gst_avi_demux_parse_strd (avi, sub);
+ sub = NULL;
+ }
+ break;
+ case GST_RIFF_TAG_strn:
+ {
+ gchar *stream_name = NULL;
+
+ gst_buffer_map (sub, &map, GST_MAP_READ);
+
+ if (avi->globaltags == NULL)
+ avi->globaltags = gst_tag_list_new_empty ();
+ parse_tag_value (avi, avi->globaltags, GST_TAG_TITLE,
+ map.data, map.size);
+
+ if (gst_tag_list_get_string (avi->globaltags, GST_TAG_TITLE,
+ &stream_name)) {
+ GST_DEBUG_OBJECT (avi, "stream name: %s", stream_name);
+ g_free (stream->name);
+ stream->name = stream_name;
+ }
+
+ gst_buffer_unmap (sub, &map);
+ gst_buffer_unref (sub);
+ sub = NULL;
+ }
+ break;
+ case GST_RIFF_IDIT:
+ gst_avi_demux_parse_idit (avi, sub);
+ break;
+ default:
+ if (tag == GST_MAKE_FOURCC ('i', 'n', 'd', 'x') ||
+ tag == GST_MAKE_FOURCC ('i', 'x', '0' + avi->num_streams / 10,
+ '0' + avi->num_streams % 10)) {
+ g_free (stream->indexes);
+ gst_avi_demux_parse_superindex (avi, sub, &stream->indexes);
+ stream->superindex = TRUE;
+ sub = NULL;
+ break;
+ }
+ GST_WARNING_OBJECT (avi,
+ "Unknown stream header tag %" GST_FOURCC_FORMAT ", ignoring",
+ GST_FOURCC_ARGS (tag));
+ /* Only get buffer for debugging if the memdump is needed */
+ if (gst_debug_category_get_threshold (GST_CAT_DEFAULT) >= 9) {
+ GstMapInfo map;
+
+ gst_buffer_map (sub, &map, GST_MAP_READ);
+ GST_MEMDUMP_OBJECT (avi, "Unknown stream header tag", map.data,
+ map.size);
+ gst_buffer_unmap (sub, &map);
+ }
+ /* fall-through */
+ case GST_RIFF_TAG_JUNQ:
+ case GST_RIFF_TAG_JUNK:
+ break;
+ }
+ if (sub != NULL) {
+ gst_buffer_unref (sub);
+ sub = NULL;
+ }
+ }
+
+ if (!got_strh) {
+ GST_WARNING_OBJECT (avi, "Failed to find strh chunk");
+ goto fail;
+ }
+
+ if (!got_strf) {
+ GST_WARNING_OBJECT (avi, "Failed to find strf chunk");
+ goto fail;
+ }
+
+ /* get class to figure out the template */
+ klass = GST_ELEMENT_GET_CLASS (avi);
+
+ /* we now have all info, let´s set up a pad and a caps and be done */
+ /* create stream name + pad */
+ switch (stream->strh->type) {
+ case GST_RIFF_FCC_vids:{
+ guint32 fourcc;
+
+ fourcc = (stream->strf.vids->compression) ?
+ stream->strf.vids->compression : stream->strh->fcc_handler;
+ caps = gst_riff_create_video_caps (fourcc, stream->strh,
+ stream->strf.vids, stream->extradata, stream->initdata, &codec_name);
+
+ /* DXSB is XSUB, and it is placed inside a vids */
+ if (!caps || (fourcc != GST_MAKE_FOURCC ('D', 'X', 'S', 'B') &&
+ fourcc != GST_MAKE_FOURCC ('D', 'X', 'S', 'A'))) {
+ padname = g_strdup_printf ("video_%u", avi->num_v_streams);
+ templ = gst_element_class_get_pad_template (klass, "video_%u");
+ if (!caps) {
+ caps = gst_caps_new_simple ("video/x-avi-unknown", "fourcc",
+ G_TYPE_INT, fourcc, NULL);
+ } else if (got_vprp && vprp) {
+ guint32 aspect_n, aspect_d;
+ gint n, d;
+
+ aspect_n = vprp->aspect >> 16;
+ aspect_d = vprp->aspect & 0xffff;
+ /* calculate the pixel aspect ratio using w/h and aspect ratio */
+ n = aspect_n * stream->strf.vids->height;
+ d = aspect_d * stream->strf.vids->width;
+ if (n && d)
+ gst_caps_set_simple (caps, "pixel-aspect-ratio", GST_TYPE_FRACTION,
+ n, d, NULL);
+ }
+ caps = gst_avi_demux_check_caps (avi, stream, caps);
+ tag_name = GST_TAG_VIDEO_CODEC;
+ avi->num_v_streams++;
+ } else {
+ padname = g_strdup_printf ("subpicture_%u", avi->num_sp_streams);
+ templ = gst_element_class_get_pad_template (klass, "subpicture_%u");
+ tag_name = NULL;
+ avi->num_sp_streams++;
+ sparse = TRUE;
+ }
+ break;
+ }
+ case GST_RIFF_FCC_auds:{
+ /* FIXME: Do something with the channel reorder map */
+ padname = g_strdup_printf ("audio_%u", avi->num_a_streams);
+ templ = gst_element_class_get_pad_template (klass, "audio_%u");
+ caps = gst_riff_create_audio_caps (stream->strf.auds->format,
+ stream->strh, stream->strf.auds, stream->extradata,
+ stream->initdata, &codec_name, NULL);
+ if (!caps) {
+ caps = gst_caps_new_simple ("audio/x-avi-unknown", "codec_id",
+ G_TYPE_INT, stream->strf.auds->format, NULL);
+ }
+ tag_name = GST_TAG_AUDIO_CODEC;
+ avi->num_a_streams++;
+ break;
+ }
+ case GST_RIFF_FCC_iavs:{
+ guint32 fourcc = stream->strh->fcc_handler;
+
+ padname = g_strdup_printf ("video_%u", avi->num_v_streams);
+ templ = gst_element_class_get_pad_template (klass, "video_%u");
+ caps = gst_riff_create_iavs_caps (fourcc, stream->strh,
+ stream->strf.iavs, stream->extradata, stream->initdata, &codec_name);
+ if (!caps) {
+ caps = gst_caps_new_simple ("video/x-avi-unknown", "fourcc",
+ G_TYPE_INT, fourcc, NULL);
+ }
+ tag_name = GST_TAG_VIDEO_CODEC;
+ avi->num_v_streams++;
+ break;
+ }
+ case GST_RIFF_FCC_txts:{
+ padname = g_strdup_printf ("subtitle_%u", avi->num_t_streams);
+ templ = gst_element_class_get_pad_template (klass, "subtitle_%u");
+ caps = gst_caps_new_empty_simple ("application/x-subtitle-avi");
+ tag_name = NULL;
+ avi->num_t_streams++;
+ sparse = TRUE;
+ break;
+ }
+ default:
+ g_return_val_if_reached (FALSE);
+ }
+
+ /* no caps means no stream */
+ if (!caps) {
+ GST_ERROR_OBJECT (element, "Did not find caps for stream %s", padname);
++#ifdef TIZEN_FEATURE_AVIDEMUX_MODIFICATION
++ g_free (padname);
++#endif
+ goto fail;
+ }
+
+ GST_DEBUG_OBJECT (element, "codec-name=%s", codec_name ? codec_name : "NULL");
+ GST_DEBUG_OBJECT (element, "caps=%" GST_PTR_FORMAT, caps);
+
+ /* set proper settings and add it */
+ if (stream->pad)
+ gst_object_unref (stream->pad);
+ pad = stream->pad = gst_pad_new_from_template (templ, padname);
+ g_free (padname);
+
+ gst_pad_use_fixed_caps (pad);
+ #if 0
+ gst_pad_set_formats_function (pad,
+ GST_DEBUG_FUNCPTR (gst_avi_demux_get_src_formats));
+ gst_pad_set_event_mask_function (pad,
+ GST_DEBUG_FUNCPTR (gst_avi_demux_get_event_mask));
+ #endif
+ gst_pad_set_event_function (pad,
+ GST_DEBUG_FUNCPTR (gst_avi_demux_handle_src_event));
+ gst_pad_set_query_function (pad,
+ GST_DEBUG_FUNCPTR (gst_avi_demux_handle_src_query));
+ #if 0
+ gst_pad_set_convert_function (pad,
+ GST_DEBUG_FUNCPTR (gst_avi_demux_src_convert));
+ #endif
+
+ stream->num = avi->num_streams;
+
+ stream->start_entry = 0;
+ stream->step_entry = 0;
+ stream->stop_entry = 0;
+
+ stream->current_entry = -1;
+ stream->current_total = 0;
+
+ stream->discont = TRUE;
+
+ stream->total_bytes = 0;
+ stream->total_blocks = 0;
+ stream->n_keyframes = 0;
+
+ stream->idx_n = 0;
+ stream->idx_max = 0;
+
+ gst_pad_set_element_private (pad, stream);
+ avi->num_streams++;
+
+ gst_pad_set_active (pad, TRUE);
+ stream_id =
+ gst_pad_create_stream_id_printf (pad, GST_ELEMENT_CAST (avi), "%03u",
+ avi->num_streams);
+
+ event = gst_pad_get_sticky_event (avi->sinkpad, GST_EVENT_STREAM_START, 0);
+ if (event) {
+ if (gst_event_parse_group_id (event, &avi->group_id))
+ avi->have_group_id = TRUE;
+ else
+ avi->have_group_id = FALSE;
+ gst_event_unref (event);
+ } else if (!avi->have_group_id) {
+ avi->have_group_id = TRUE;
+ avi->group_id = gst_util_group_id_next ();
+ }
+
+ event = gst_event_new_stream_start (stream_id);
+ if (avi->have_group_id)
+ gst_event_set_group_id (event, avi->group_id);
+ if (sparse)
+ gst_event_set_stream_flags (event, GST_STREAM_FLAG_SPARSE);
+
+ gst_pad_push_event (pad, event);
+ g_free (stream_id);
+ gst_pad_set_caps (pad, caps);
+ gst_caps_unref (caps);
+
+ /* make tags */
+ if (codec_name && tag_name) {
+ if (!stream->taglist)
+ stream->taglist = gst_tag_list_new_empty ();
+
+ avi->got_tags = TRUE;
+
+ gst_tag_list_add (stream->taglist, GST_TAG_MERGE_APPEND, tag_name,
+ codec_name, NULL);
+ }
+
+ g_free (vprp);
+ g_free (codec_name);
+ gst_buffer_unref (buf);
+
+ return TRUE;
+
+ /* ERRORS */
+ fail:
+ {
+ /* unref any mem that may be in use */
+ if (buf)
+ gst_buffer_unref (buf);
+ if (sub)
+ gst_buffer_unref (sub);
+ g_free (vprp);
+ g_free (codec_name);
+ gst_avi_demux_reset_stream (avi, stream);
+ avi->num_streams++;
+ return FALSE;
+ }
+ }
+
+ /*
+ * gst_avi_demux_parse_odml:
+ * @avi: calling element (used for debug/error).
+ * @buf: input buffer to be used for parsing.
+ *
+ * Read an openDML-2.0 extension header. Fills in the frame number
+ * in the avi demuxer object when reading succeeds.
+ */
+ static void
+ gst_avi_demux_parse_odml (GstAviDemux * avi, GstBuffer * buf)
+ {
+ guint32 tag = 0;
+ guint offset = 4;
+ GstBuffer *sub = NULL;
+
+ while (gst_riff_parse_chunk (GST_ELEMENT_CAST (avi), buf, &offset, &tag,
+ &sub)) {
+ switch (tag) {
+ case GST_RIFF_TAG_dmlh:{
+ gst_riff_dmlh dmlh, *_dmlh;
+ GstMapInfo map;
+
+ /* sub == NULL is possible and means an empty buffer */
+ if (sub == NULL)
+ goto next;
+
+ gst_buffer_map (sub, &map, GST_MAP_READ);
+
+ /* check size */
+ if (map.size < sizeof (gst_riff_dmlh)) {
+ GST_ERROR_OBJECT (avi,
+ "DMLH entry is too small (%" G_GSIZE_FORMAT " bytes, %d needed)",
+ map.size, (int) sizeof (gst_riff_dmlh));
+ gst_buffer_unmap (sub, &map);
+ goto next;
+ }
+ _dmlh = (gst_riff_dmlh *) map.data;
+ dmlh.totalframes = GST_READ_UINT32_LE (&_dmlh->totalframes);
+ gst_buffer_unmap (sub, &map);
+
+ GST_INFO_OBJECT (avi, "dmlh tag found: totalframes: %u",
+ dmlh.totalframes);
+
+ avi->avih->tot_frames = dmlh.totalframes;
+ goto next;
+ }
+
+ default:
+ GST_WARNING_OBJECT (avi,
+ "Unknown tag %" GST_FOURCC_FORMAT " in ODML header",
+ GST_FOURCC_ARGS (tag));
+ /* Only get buffer for debugging if the memdump is needed */
+ if (gst_debug_category_get_threshold (GST_CAT_DEFAULT) >= 9) {
+ GstMapInfo map;
+
+ gst_buffer_map (sub, &map, GST_MAP_READ);
+ GST_MEMDUMP_OBJECT (avi, "Unknown ODML tag", map.data, map.size);
+ gst_buffer_unmap (sub, &map);
+ }
+ /* fall-through */
+ case GST_RIFF_TAG_JUNQ:
+ case GST_RIFF_TAG_JUNK:
+ next:
+ /* skip and move to next chunk */
+ if (sub) {
+ gst_buffer_unref (sub);
+ sub = NULL;
+ }
+ break;
+ }
+ }
+ if (buf)
+ gst_buffer_unref (buf);
+ }
+
+ /* Index helper */
+ static guint
+ gst_avi_demux_index_last (GstAviDemux * avi, GstAviStream * stream)
+ {
+ return stream->idx_n;
+ }
+
+ /* find a previous entry in the index with the given flags */
+ static guint
+ gst_avi_demux_index_prev (GstAviDemux * avi, GstAviStream * stream,
+ guint last, gboolean keyframe)
+ {
+ GstAviIndexEntry *entry;
+ guint i;
+
+ for (i = last; i > 0; i--) {
+ entry = &stream->index[i - 1];
+ if (!keyframe || ENTRY_IS_KEYFRAME (entry)) {
+ return i - 1;
+ }
+ }
+ return 0;
+ }
+
+ static guint
+ gst_avi_demux_index_next (GstAviDemux * avi, GstAviStream * stream,
+ guint last, gboolean keyframe)
+ {
+ GstAviIndexEntry *entry;
+ gint i;
+
+ for (i = last + 1; i < stream->idx_n; i++) {
+ entry = &stream->index[i];
+ if (!keyframe || ENTRY_IS_KEYFRAME (entry)) {
+ return i;
+ }
+ }
+ return stream->idx_n - 1;
+ }
+
+ static guint
+ gst_avi_demux_index_entry_search (GstAviIndexEntry * entry, guint64 * total)
+ {
+ if (entry->total < *total)
+ return -1;
+ else if (entry->total > *total)
+ return 1;
+ return 0;
+ }
+
+ /*
+ * gst_avi_demux_index_for_time:
+ * @avi: Avi object
+ * @stream: the stream
+ * @time: a time position
+ * @next: whether to look for entry before or after @time
+ *
+ * Finds the index entry which time is less/more or equal than the requested time.
+ * Try to avoid binary search when we can convert the time to an index
+ * position directly (for example for video frames with a fixed duration).
+ *
+ * Returns: the found position in the index.
+ */
+ static guint
+ gst_avi_demux_index_for_time (GstAviDemux * avi,
+ GstAviStream * stream, guint64 time, gboolean next)
+ {
+ guint index = -1;
+ guint64 total;
+
+ GST_LOG_OBJECT (avi, "search time:%" GST_TIME_FORMAT, GST_TIME_ARGS (time));
+
+ /* easy (and common) cases */
+ if (time == 0 || stream->idx_n == 0)
+ return 0;
+ if (time >= stream->idx_duration)
+ return stream->idx_n - 1;
+
+ /* figure out where we need to go. For that we convert the time to an
+ * index entry or we convert it to a total and then do a binary search. */
+ if (stream->is_vbr) {
+ /* VBR stream next timestamp */
+ if (stream->strh->type == GST_RIFF_FCC_auds) {
+ total = avi_stream_convert_time_to_frames_unchecked (stream, time);
+ } else {
+ index = avi_stream_convert_time_to_frames_unchecked (stream, time);
+ /* this entry typically undershoots the target time,
+ * so check a bit more if next needed */
+ if (next && index != -1) {
+ GstClockTime itime =
+ avi_stream_convert_frames_to_time_unchecked (stream, index);
+ if (itime < time && index + 1 < stream->idx_n)
+ index++;
+ }
+ }
+ } else if (stream->strh->type == GST_RIFF_FCC_auds) {
+ /* constant rate stream */
+ total = avi_stream_convert_time_to_bytes_unchecked (stream, time);
+ } else
+ return -1;
+
+ if (index == -1) {
+ GstAviIndexEntry *entry;
+
+ /* no index, find index with binary search on total */
+ GST_LOG_OBJECT (avi, "binary search for entry with total %"
+ G_GUINT64_FORMAT, total);
+
+ entry = gst_util_array_binary_search (stream->index,
+ stream->idx_n, sizeof (GstAviIndexEntry),
+ (GCompareDataFunc) gst_avi_demux_index_entry_search,
+ next ? GST_SEARCH_MODE_AFTER : GST_SEARCH_MODE_BEFORE, &total, NULL);
+
+ if (entry == NULL) {
+ GST_LOG_OBJECT (avi, "not found, assume index 0");
+ index = 0;
+ } else {
+ index = entry - stream->index;
+ GST_LOG_OBJECT (avi, "found at %u", index);
+ }
+ } else {
+ GST_LOG_OBJECT (avi, "converted time to index %u", index);
+ }
+
+ return index;
+ }
+
+ static inline GstAviStream *
+ gst_avi_demux_stream_for_id (GstAviDemux * avi, guint32 id)
+ {
+ guint stream_nr;
+ GstAviStream *stream;
+
+ /* get the stream for this entry */
+ stream_nr = CHUNKID_TO_STREAMNR (id);
+ if (G_UNLIKELY (stream_nr >= avi->num_streams)) {
+ GST_WARNING_OBJECT (avi,
+ "invalid stream nr %d (0x%08x, %" GST_FOURCC_FORMAT ")", stream_nr, id,
+ GST_FOURCC_ARGS (id));
+ return NULL;
+ }
+ stream = &avi->stream[stream_nr];
+ if (G_UNLIKELY (!stream->strh)) {
+ GST_WARNING_OBJECT (avi, "Unhandled stream %d, skipping", stream_nr);
+ return NULL;
+ }
+ return stream;
+ }
+
+ /*
+ * gst_avi_demux_parse_index:
+ * @avi: calling element (used for debugging/errors).
+ * @buf: buffer containing the full index.
+ *
+ * Read index entries from the provided buffer.
+ * The buffer should contain a GST_RIFF_TAG_idx1 chunk.
+ */
+ static gboolean
+ gst_avi_demux_parse_index (GstAviDemux * avi, GstBuffer * buf)
+ {
+ GstMapInfo map;
+ guint i, num, n;
+ gst_riff_index_entry *index;
+ GstClockTime stamp;
+ GstAviStream *stream;
+ GstAviIndexEntry entry;
+ guint32 id;
+
+ if (!buf)
+ return FALSE;
+
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+
+ stamp = gst_util_get_timestamp ();
+
+ /* see how many items in the index */
+ num = map.size / sizeof (gst_riff_index_entry);
+ if (num == 0)
+ goto empty_list;
+
+ GST_INFO_OBJECT (avi, "Parsing index, nr_entries = %6d", num);
+
+ index = (gst_riff_index_entry *) map.data;
+
+ /* figure out if the index is 0 based or relative to the MOVI start */
+ entry.offset = GST_READ_UINT32_LE (&index[0].offset);
+ if (entry.offset < avi->offset) {
+ avi->index_offset = avi->offset + 8;
+ GST_DEBUG ("index_offset = %" G_GUINT64_FORMAT, avi->index_offset);
+ } else {
+ avi->index_offset = 0;
+ GST_DEBUG ("index is 0 based");
+ }
+
+ for (i = 0, n = 0; i < num; i++) {
+ id = GST_READ_UINT32_LE (&index[i].id);
+ entry.offset = GST_READ_UINT32_LE (&index[i].offset);
+
+ /* some sanity checks */
+ if (G_UNLIKELY (id == GST_RIFF_rec || id == 0 ||
+ (entry.offset == 0 && n > 0)))
+ continue;
+
+ /* get the stream for this entry */
+ stream = gst_avi_demux_stream_for_id (avi, id);
+ if (G_UNLIKELY (!stream))
+ continue;
+
+ /* handle offset and size */
+ entry.offset += avi->index_offset + 8;
+ entry.size = GST_READ_UINT32_LE (&index[i].size);
+
+ /* handle flags */
+ if (stream->strh->type == GST_RIFF_FCC_auds) {
+ /* all audio frames are keyframes */
+ ENTRY_SET_KEYFRAME (&entry);
+ } else if (stream->strh->type == GST_RIFF_FCC_vids &&
+ stream->strf.vids->compression == GST_RIFF_DXSB) {
+ /* all xsub frames are keyframes */
+ ENTRY_SET_KEYFRAME (&entry);
+ } else {
+ guint32 flags;
+ /* else read flags */
+ flags = GST_READ_UINT32_LE (&index[i].flags);
+ if (flags & GST_RIFF_IF_KEYFRAME) {
+ ENTRY_SET_KEYFRAME (&entry);
+ } else {
+ ENTRY_UNSET_KEYFRAME (&entry);
+ }
+ }
+
+ /* and add */
+ if (G_UNLIKELY (!gst_avi_demux_add_index (avi, stream, num, &entry)))
+ goto out_of_mem;
+
+ n++;
+ }
+ gst_buffer_unmap (buf, &map);
+ gst_buffer_unref (buf);
+
+ /* get stream stats now */
+ avi->have_index = gst_avi_demux_do_index_stats (avi);
+
+ stamp = gst_util_get_timestamp () - stamp;
+ GST_DEBUG_OBJECT (avi, "index parsing took %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (stamp));
+
+ return TRUE;
+
+ /* ERRORS */
+ empty_list:
+ {
+ GST_DEBUG_OBJECT (avi, "empty index");
+ gst_buffer_unmap (buf, &map);
+ gst_buffer_unref (buf);
+ return FALSE;
+ }
+ out_of_mem:
+ {
+ GST_ELEMENT_ERROR (avi, RESOURCE, NO_SPACE_LEFT, (NULL),
+ ("Cannot allocate memory for %u*%u=%u bytes",
+ (guint) sizeof (GstAviIndexEntry), num,
+ (guint) sizeof (GstAviIndexEntry) * num));
+ gst_buffer_unmap (buf, &map);
+ gst_buffer_unref (buf);
+ return FALSE;
+ }
+ }
+
+ /*
+ * gst_avi_demux_stream_index:
+ * @avi: avi demuxer object.
+ *
+ * Seeks to index and reads it.
+ */
+ static void
+ gst_avi_demux_stream_index (GstAviDemux * avi)
+ {
+ GstFlowReturn res;
+ guint64 offset = avi->offset;
+ GstBuffer *buf = NULL;
+ guint32 tag;
+ guint32 size;
+ GstMapInfo map;
+
+ GST_DEBUG ("demux stream index at offset %" G_GUINT64_FORMAT, offset);
+
+ /* get chunk information */
+ res = gst_pad_pull_range (avi->sinkpad, offset, 8, &buf);
+ if (res != GST_FLOW_OK)
+ goto pull_failed;
+
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+ if (map.size < 8)
+ goto too_small;
+
+ /* check tag first before blindly trying to read 'size' bytes */
+ tag = GST_READ_UINT32_LE (map.data);
+ size = GST_READ_UINT32_LE (map.data + 4);
+ if (tag == GST_RIFF_TAG_LIST) {
+ /* this is the movi tag */
+ GST_DEBUG_OBJECT (avi, "skip LIST chunk, size %" G_GUINT32_FORMAT,
+ (8 + GST_ROUND_UP_2 (size)));
+ offset += 8 + GST_ROUND_UP_2 (size);
+ gst_buffer_unmap (buf, &map);
+ gst_buffer_unref (buf);
+
+ buf = NULL;
+ res = gst_pad_pull_range (avi->sinkpad, offset, 8, &buf);
+ if (res != GST_FLOW_OK)
+ goto pull_failed;
+
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+ if (map.size < 8)
+ goto too_small;
+
+ tag = GST_READ_UINT32_LE (map.data);
+ size = GST_READ_UINT32_LE (map.data + 4);
+ }
+ gst_buffer_unmap (buf, &map);
+ gst_buffer_unref (buf);
+
+ if (tag != GST_RIFF_TAG_idx1)
+ goto no_index;
+ if (!size)
+ goto zero_index;
+
+ GST_DEBUG ("index found at offset %" G_GUINT64_FORMAT, offset);
+
+ /* read chunk, advance offset */
+ if (gst_riff_read_chunk (GST_ELEMENT_CAST (avi),
+ avi->sinkpad, &offset, &tag, &buf) != GST_FLOW_OK)
+ return;
+
+ GST_DEBUG ("will parse index chunk size %" G_GSIZE_FORMAT " for tag %"
+ GST_FOURCC_FORMAT, gst_buffer_get_size (buf), GST_FOURCC_ARGS (tag));
+
+ gst_avi_demux_parse_index (avi, buf);
+
+ #ifndef GST_DISABLE_GST_DEBUG
+ /* debug our indexes */
+ {
+ gint i;
+ GstAviStream *stream;
+
+ for (i = 0; i < avi->num_streams; i++) {
+ stream = &avi->stream[i];
+ GST_DEBUG_OBJECT (avi, "stream %u: %u frames, %" G_GINT64_FORMAT " bytes",
+ i, stream->idx_n, stream->total_bytes);
+ }
+ }
+ #endif
+ return;
+
+ /* ERRORS */
+ pull_failed:
+ {
+ GST_DEBUG_OBJECT (avi,
+ "pull range failed: pos=%" G_GUINT64_FORMAT " size=8", offset);
+ return;
+ }
+ too_small:
+ {
+ GST_DEBUG_OBJECT (avi, "Buffer is too small");
+ gst_buffer_unmap (buf, &map);
+ gst_buffer_unref (buf);
+ return;
+ }
+ no_index:
+ {
+ GST_WARNING_OBJECT (avi,
+ "No index data (idx1) after movi chunk, but %" GST_FOURCC_FORMAT,
+ GST_FOURCC_ARGS (tag));
+ return;
+ }
+ zero_index:
+ {
+ GST_WARNING_OBJECT (avi, "Empty index data (idx1) after movi chunk");
+ return;
+ }
+ }
+
+ /*
+ * gst_avi_demux_stream_index_push:
+ * @avi: avi demuxer object.
+ *
+ * Read index.
+ */
+ static void
+ gst_avi_demux_stream_index_push (GstAviDemux * avi)
+ {
+ guint64 offset = avi->idx1_offset;
+ GstBuffer *buf;
+ guint32 tag;
+ guint32 size;
+
+ GST_DEBUG ("demux stream index at offset %" G_GUINT64_FORMAT, offset);
+
+ /* get chunk information */
+ if (!gst_avi_demux_peek_chunk (avi, &tag, &size))
+ return;
+
+ /* check tag first before blindly trying to read 'size' bytes */
+ if (tag == GST_RIFF_TAG_LIST) {
+ /* this is the movi tag */
+ GST_DEBUG_OBJECT (avi, "skip LIST chunk, size %" G_GUINT32_FORMAT,
+ (8 + GST_ROUND_UP_2 (size)));
+ avi->idx1_offset = offset + 8 + GST_ROUND_UP_2 (size);
+ /* issue seek to allow chain function to handle it and return! */
+ perform_seek_to_offset (avi, avi->idx1_offset, avi->segment_seqnum);
+ return;
+ }
+
+ if (tag != GST_RIFF_TAG_idx1)
+ goto no_index;
+
+ GST_DEBUG ("index found at offset %" G_GUINT64_FORMAT, offset);
+
+ /* flush chunk header */
+ gst_adapter_flush (avi->adapter, 8);
+ /* read chunk payload */
+ buf = gst_adapter_take_buffer (avi->adapter, size);
+ if (!buf)
+ goto pull_failed;
+ /* advance offset */
+ offset += 8 + GST_ROUND_UP_2 (size);
+
+ GST_DEBUG ("will parse index chunk size %" G_GSIZE_FORMAT " for tag %"
+ GST_FOURCC_FORMAT, gst_buffer_get_size (buf), GST_FOURCC_ARGS (tag));
+
+ avi->offset = avi->first_movi_offset;
+ gst_avi_demux_parse_index (avi, buf);
+
+ #ifndef GST_DISABLE_GST_DEBUG
+ /* debug our indexes */
+ {
+ gint i;
+ GstAviStream *stream;
+
+ for (i = 0; i < avi->num_streams; i++) {
+ stream = &avi->stream[i];
+ GST_DEBUG_OBJECT (avi, "stream %u: %u frames, %" G_GINT64_FORMAT " bytes",
+ i, stream->idx_n, stream->total_bytes);
+ }
+ }
+ #endif
+ return;
+
+ /* ERRORS */
+ pull_failed:
+ {
+ GST_DEBUG_OBJECT (avi,
+ "taking data from adapter failed: pos=%" G_GUINT64_FORMAT " size=%u",
+ offset, size);
+ return;
+ }
+ no_index:
+ {
+ GST_WARNING_OBJECT (avi,
+ "No index data (idx1) after movi chunk, but %" GST_FOURCC_FORMAT,
+ GST_FOURCC_ARGS (tag));
+ return;
+ }
+ }
+
+ /*
+ * gst_avi_demux_peek_tag:
+ *
+ * Returns the tag and size of the next chunk
+ */
+ static GstFlowReturn
+ gst_avi_demux_peek_tag (GstAviDemux * avi, guint64 offset, guint32 * tag,
+ guint * size)
+ {
+ GstFlowReturn res;
+ GstBuffer *buf = NULL;
+ GstMapInfo map;
+
+ res = gst_pad_pull_range (avi->sinkpad, offset, 8, &buf);
+ if (res != GST_FLOW_OK)
+ goto pull_failed;
+
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+ if (map.size != 8)
+ goto wrong_size;
+
+ *tag = GST_READ_UINT32_LE (map.data);
+ *size = GST_READ_UINT32_LE (map.data + 4);
+
+ GST_LOG_OBJECT (avi, "Tag[%" GST_FOURCC_FORMAT "] (size:%d) %"
+ G_GINT64_FORMAT " -- %" G_GINT64_FORMAT, GST_FOURCC_ARGS (*tag),
+ *size, offset + 8, offset + 8 + (gint64) * size);
+
+ done:
+ gst_buffer_unmap (buf, &map);
+ gst_buffer_unref (buf);
+
+ return res;
+
+ /* ERRORS */
+ pull_failed:
+ {
+ GST_DEBUG_OBJECT (avi, "pull_ranged returned %s", gst_flow_get_name (res));
+ return res;
+ }
+ wrong_size:
+ {
+ GST_DEBUG_OBJECT (avi, "got %" G_GSIZE_FORMAT " bytes which is <> 8 bytes",
+ map.size);
+ res = GST_FLOW_ERROR;
+ goto done;
+ }
+ }
+
+ /*
+ * gst_avi_demux_next_data_buffer:
+ *
+ * Returns the offset and size of the next buffer
+ * Position is the position of the buffer (after tag and size)
+ */
+ static GstFlowReturn
+ gst_avi_demux_next_data_buffer (GstAviDemux * avi, guint64 * offset,
+ guint32 * tag, guint * size)
+ {
+ guint64 off = *offset;
+ guint _size = 0;
+ GstFlowReturn res;
+
+ do {
+ res = gst_avi_demux_peek_tag (avi, off, tag, &_size);
+ if (res != GST_FLOW_OK)
+ break;
+ if (*tag == GST_RIFF_TAG_LIST || *tag == GST_RIFF_TAG_RIFF)
+ off += 8 + 4; /* skip tag + size + subtag */
+ else {
+ *offset = off + 8;
+ *size = _size;
+ break;
+ }
+ } while (TRUE);
+
+ return res;
+ }
+
+ /*
+ * gst_avi_demux_stream_scan:
+ * @avi: calling element (used for debugging/errors).
+ *
+ * Scan the file for all chunks to "create" a new index.
+ * pull-range based
+ */
+ static gboolean
+ gst_avi_demux_stream_scan (GstAviDemux * avi)
+ {
+ GstFlowReturn res;
+ GstAviStream *stream;
+ guint64 pos = 0;
+ guint64 length;
+ gint64 tmplength;
+ guint32 tag = 0;
+ guint num;
+
+ /* FIXME:
+ * - implement non-seekable source support.
+ */
+ GST_DEBUG_OBJECT (avi, "Creating index");
+
+ /* get the size of the file */
+ if (!gst_pad_peer_query_duration (avi->sinkpad, GST_FORMAT_BYTES, &tmplength))
+ return FALSE;
+ length = tmplength;
+
+ /* guess the total amount of entries we expect */
+ num = 16000;
+
+ while (TRUE) {
+ GstAviIndexEntry entry;
+ guint size = 0;
+
+ /* start reading data buffers to find the id and offset */
+ res = gst_avi_demux_next_data_buffer (avi, &pos, &tag, &size);
+ if (G_UNLIKELY (res != GST_FLOW_OK))
+ break;
+
+ /* get stream */
+ stream = gst_avi_demux_stream_for_id (avi, tag);
+ if (G_UNLIKELY (!stream))
+ goto next;
+
+ /* we can't figure out the keyframes, assume they all are */
+ entry.flags = GST_AVI_KEYFRAME;
+ entry.offset = pos;
+ entry.size = size;
+
+ /* and add to the index of this stream */
+ if (G_UNLIKELY (!gst_avi_demux_add_index (avi, stream, num, &entry)))
+ goto out_of_mem;
+
+ next:
+ /* update position */
+ pos += GST_ROUND_UP_2 (size);
+ if (G_UNLIKELY (pos > length)) {
+ GST_WARNING_OBJECT (avi,
+ "Stopping index lookup since we are further than EOF");
+ break;
+ }
+ }
+
+ /* collect stats */
+ avi->have_index = gst_avi_demux_do_index_stats (avi);
+
+ return TRUE;
+
+ /* ERRORS */
+ out_of_mem:
+ {
+ GST_ELEMENT_ERROR (avi, RESOURCE, NO_SPACE_LEFT, (NULL),
+ ("Cannot allocate memory for %u*%u=%u bytes",
+ (guint) sizeof (GstAviIndexEntry), num,
+ (guint) sizeof (GstAviIndexEntry) * num));
+ return FALSE;
+ }
+ }
+
+ static void
+ gst_avi_demux_calculate_durations_from_index (GstAviDemux * avi)
+ {
+ guint i;
+ GstClockTime total;
+ GstAviStream *stream;
+
+ total = GST_CLOCK_TIME_NONE;
+
+ /* all streams start at a timestamp 0 */
+ for (i = 0; i < avi->num_streams; i++) {
+ GstClockTime duration, hduration;
+ gst_riff_strh *strh;
+
+ stream = &avi->stream[i];
+ if (G_UNLIKELY (!stream || !stream->idx_n || !(strh = stream->strh)))
+ continue;
+
+ /* get header duration for the stream */
+ hduration = stream->hdr_duration;
+ /* index duration calculated during parsing */
+ duration = stream->idx_duration;
+
+ /* now pick a good duration */
+ if (GST_CLOCK_TIME_IS_VALID (duration)) {
+ /* index gave valid duration, use that */
+ GST_INFO ("Stream %p duration according to index: %" GST_TIME_FORMAT,
+ stream, GST_TIME_ARGS (duration));
+ } else {
+ /* fall back to header info to calculate a duration */
+ duration = hduration;
+ }
+ GST_INFO ("Setting duration of stream #%d to %" GST_TIME_FORMAT,
+ i, GST_TIME_ARGS (duration));
+ /* set duration for the stream */
+ stream->duration = duration;
+
+ /* find total duration */
+ if (total == GST_CLOCK_TIME_NONE ||
+ (GST_CLOCK_TIME_IS_VALID (duration) && duration > total))
+ total = duration;
+ }
+
+ if (GST_CLOCK_TIME_IS_VALID (total) && (total > 0)) {
+ /* now update the duration for those streams where we had none */
+ for (i = 0; i < avi->num_streams; i++) {
+ stream = &avi->stream[i];
+
+ if (!GST_CLOCK_TIME_IS_VALID (stream->duration)
+ || stream->duration == 0) {
+ stream->duration = total;
+
+ GST_INFO ("Stream %p duration according to total: %" GST_TIME_FORMAT,
+ stream, GST_TIME_ARGS (total));
+ }
+ }
+ }
+
+ /* and set the total duration in the segment. */
+ GST_INFO ("Setting total duration to: %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (total));
+
+ avi->segment.duration = total;
+ }
+
+ /* returns FALSE if there are no pads to deliver event to,
+ * otherwise TRUE (whatever the outcome of event sending),
+ * takes ownership of the event. */
+ static gboolean
+ gst_avi_demux_push_event (GstAviDemux * avi, GstEvent * event)
+ {
+ gboolean result = FALSE;
+ gint i;
+
+ GST_DEBUG_OBJECT (avi, "sending %s event to %d streams",
+ GST_EVENT_TYPE_NAME (event), avi->num_streams);
+
+ for (i = 0; i < avi->num_streams; i++) {
+ GstAviStream *stream = &avi->stream[i];
+
+ if (stream->pad) {
+ result = TRUE;
+ gst_pad_push_event (stream->pad, gst_event_ref (event));
+ }
+ }
+ gst_event_unref (event);
+ return result;
+ }
+
+ static void
+ gst_avi_demux_check_seekability (GstAviDemux * avi)
+ {
+ GstQuery *query;
+ gboolean seekable = FALSE;
+ gint64 start = -1, stop = -1;
+
+ query = gst_query_new_seeking (GST_FORMAT_BYTES);
+ if (!gst_pad_peer_query (avi->sinkpad, query)) {
+ GST_DEBUG_OBJECT (avi, "seeking query failed");
+ goto done;
+ }
+
+ gst_query_parse_seeking (query, NULL, &seekable, &start, &stop);
+
+ /* try harder to query upstream size if we didn't get it the first time */
+ if (seekable && stop == -1) {
+ GST_DEBUG_OBJECT (avi, "doing duration query to fix up unset stop");
+ gst_pad_peer_query_duration (avi->sinkpad, GST_FORMAT_BYTES, &stop);
+ }
+
+ /* if upstream doesn't know the size, it's likely that it's not seekable in
+ * practice even if it technically may be seekable */
+ if (seekable && (start != 0 || stop <= start)) {
+ GST_DEBUG_OBJECT (avi, "seekable but unknown start/stop -> disable");
+ seekable = FALSE;
+ }
+
+ done:
+ GST_INFO_OBJECT (avi, "seekable: %d (%" G_GUINT64_FORMAT " - %"
+ G_GUINT64_FORMAT ")", seekable, start, stop);
+ avi->seekable = seekable;
+
+ gst_query_unref (query);
+ }
+
+ /*
+ * Read AVI headers when streaming
+ */
+ static GstFlowReturn
+ gst_avi_demux_stream_header_push (GstAviDemux * avi)
+ {
+ GstFlowReturn ret = GST_FLOW_OK;
+ guint32 tag = 0;
+ guint32 ltag = 0;
+ guint32 size = 0;
+ const guint8 *data;
+ GstBuffer *buf = NULL, *sub = NULL;
+ guint offset = 4;
+ gint i;
+ GstTagList *tags = NULL;
+ guint8 fourcc[4];
+
+ GST_DEBUG ("Reading and parsing avi headers: %d", avi->header_state);
+
+ switch (avi->header_state) {
+ case GST_AVI_DEMUX_HEADER_TAG_LIST:
+ again:
+ if (gst_avi_demux_peek_chunk (avi, &tag, &size)) {
+ avi->offset += 8 + GST_ROUND_UP_2 (size);
+ if (tag != GST_RIFF_TAG_LIST)
+ goto header_no_list;
+
+ gst_adapter_flush (avi->adapter, 8);
+ /* Find the 'hdrl' LIST tag */
+ GST_DEBUG ("Reading %d bytes", size);
+ buf = gst_adapter_take_buffer (avi->adapter, size);
+
+ gst_buffer_extract (buf, 0, fourcc, 4);
+
+ if (GST_READ_UINT32_LE (fourcc) != GST_RIFF_LIST_hdrl) {
+ GST_WARNING_OBJECT (avi, "Invalid AVI header (no hdrl at start): %"
+ GST_FOURCC_FORMAT, GST_FOURCC_ARGS (tag));
+ gst_buffer_unref (buf);
+ goto again;
+ }
+
+ /* mind padding */
+ if (size & 1)
+ gst_adapter_flush (avi->adapter, 1);
+
+ GST_DEBUG ("'hdrl' LIST tag found. Parsing next chunk");
+
+ gst_avi_demux_roundup_list (avi, &buf);
+
+ /* the hdrl starts with a 'avih' header */
+ if (!gst_riff_parse_chunk (GST_ELEMENT_CAST (avi), buf, &offset, &tag,
+ &sub))
+ goto header_no_avih;
+
+ if (tag != GST_RIFF_TAG_avih)
+ goto header_no_avih;
+
+ if (!gst_avi_demux_parse_avih (avi, sub, &avi->avih))
+ goto header_wrong_avih;
+
+ GST_DEBUG_OBJECT (avi, "AVI header ok, reading elements from header");
+
+ /* now, read the elements from the header until the end */
+ while (gst_riff_parse_chunk (GST_ELEMENT_CAST (avi), buf, &offset, &tag,
+ &sub)) {
+ /* sub can be NULL on empty tags */
+ if (!sub)
+ continue;
+
+ switch (tag) {
+ case GST_RIFF_TAG_LIST:
+ if (gst_buffer_get_size (sub) < 4)
+ goto next;
+
+ gst_buffer_extract (sub, 0, fourcc, 4);
+
+ switch (GST_READ_UINT32_LE (fourcc)) {
+ case GST_RIFF_LIST_strl:
+ if (!(gst_avi_demux_parse_stream (avi, sub))) {
+ sub = NULL;
+ GST_ELEMENT_WARNING (avi, STREAM, DEMUX, (NULL),
+ ("failed to parse stream, ignoring"));
+ goto next;
+ }
+ sub = NULL;
+ goto next;
+ case GST_RIFF_LIST_odml:
+ gst_avi_demux_parse_odml (avi, sub);
+ sub = NULL;
+ break;
+ default:
+ GST_WARNING_OBJECT (avi,
+ "Unknown list %" GST_FOURCC_FORMAT " in AVI header",
+ GST_FOURCC_ARGS (GST_READ_UINT32_LE (fourcc)));
+ /* fall-through */
+ case GST_RIFF_TAG_JUNQ:
+ case GST_RIFF_TAG_JUNK:
+ goto next;
+ }
+ break;
+ case GST_RIFF_IDIT:
+ gst_avi_demux_parse_idit (avi, sub);
+ goto next;
+ default:
+ GST_WARNING_OBJECT (avi,
+ "Unknown tag %" GST_FOURCC_FORMAT " in AVI header",
+ GST_FOURCC_ARGS (tag));
+ /* Only get buffer for debugging if the memdump is needed */
+ if (gst_debug_category_get_threshold (GST_CAT_DEFAULT) >= 9) {
+ GstMapInfo map;
+
+ gst_buffer_map (sub, &map, GST_MAP_READ);
+ GST_MEMDUMP_OBJECT (avi, "Unknown tag", map.data, map.size);
+ gst_buffer_unmap (sub, &map);
+ }
+ /* fall-through */
+ case GST_RIFF_TAG_JUNQ:
+ case GST_RIFF_TAG_JUNK:
+ next:
+ /* move to next chunk */
+ if (sub)
+ gst_buffer_unref (sub);
+ sub = NULL;
+ break;
+ }
+ }
+ gst_buffer_unref (buf);
+ GST_DEBUG ("elements parsed");
+
+ /* check parsed streams */
+ if (avi->num_streams == 0) {
+ goto no_streams;
+ } else if (avi->num_streams != avi->avih->streams) {
+ GST_WARNING_OBJECT (avi,
+ "Stream header mentioned %d streams, but %d available",
+ avi->avih->streams, avi->num_streams);
+ }
+ GST_DEBUG ("Get junk and info next");
+ avi->header_state = GST_AVI_DEMUX_HEADER_INFO;
+ } else {
+ /* Need more data */
+ return ret;
+ }
+ /* fall-though */
+ case GST_AVI_DEMUX_HEADER_INFO:
+ GST_DEBUG_OBJECT (avi, "skipping junk between header and data ...");
+ while (TRUE) {
+ if (gst_adapter_available (avi->adapter) < 12)
+ return GST_FLOW_OK;
+
+ data = gst_adapter_map (avi->adapter, 12);
+ tag = GST_READ_UINT32_LE (data);
+ size = GST_READ_UINT32_LE (data + 4);
+ ltag = GST_READ_UINT32_LE (data + 8);
+ gst_adapter_unmap (avi->adapter);
+
+ if (tag == GST_RIFF_TAG_LIST) {
+ switch (ltag) {
+ case GST_RIFF_LIST_movi:
+ gst_adapter_flush (avi->adapter, 12);
+ if (!avi->first_movi_offset)
+ avi->first_movi_offset = avi->offset;
+ avi->offset += 12;
+ avi->idx1_offset = avi->offset + size - 4;
+ goto skipping_done;
+ case GST_RIFF_LIST_INFO:
+ GST_DEBUG ("Found INFO chunk");
+ if (gst_avi_demux_peek_chunk (avi, &tag, &size)) {
+ GST_DEBUG ("got size %d", size);
+ avi->offset += 12;
+ gst_adapter_flush (avi->adapter, 12);
+ if (size > 4) {
+ buf = gst_adapter_take_buffer (avi->adapter, size - 4);
+ /* mind padding */
+ if (size & 1)
+ gst_adapter_flush (avi->adapter, 1);
+ gst_riff_parse_info (GST_ELEMENT_CAST (avi), buf, &tags);
+ if (tags) {
+ if (avi->globaltags) {
+ gst_tag_list_insert (avi->globaltags, tags,
+ GST_TAG_MERGE_REPLACE);
+ gst_tag_list_unref (tags);
+ } else {
+ avi->globaltags = tags;
+ }
+ }
+ tags = NULL;
+ gst_buffer_unref (buf);
+
+ avi->offset += GST_ROUND_UP_2 (size) - 4;
+ } else {
+ GST_DEBUG ("skipping INFO LIST prefix");
+ }
+ } else {
+ /* Need more data */
+ return GST_FLOW_OK;
+ }
+ break;
+ default:
+ if (gst_avi_demux_peek_chunk (avi, &tag, &size) || size == 0) {
+ /* accept 0 size buffer here */
+ avi->abort_buffering = FALSE;
+ avi->offset += 8 + GST_ROUND_UP_2 (size);
+ gst_adapter_flush (avi->adapter, 8 + GST_ROUND_UP_2 (size));
+ } else {
+ /* Need more data */
+ return GST_FLOW_OK;
+ }
+ break;
+ }
+ } else {
+ if (gst_avi_demux_peek_chunk (avi, &tag, &size) || size == 0) {
+ /* accept 0 size buffer here */
+ avi->abort_buffering = FALSE;
+ avi->offset += 8 + GST_ROUND_UP_2 (size);
+ gst_adapter_flush (avi->adapter, 8 + GST_ROUND_UP_2 (size));
+ } else {
+ /* Need more data */
+ return GST_FLOW_OK;
+ }
+ }
+ }
+ break;
+ default:
+ GST_WARNING ("unhandled header state: %d", avi->header_state);
+ break;
+ }
+ skipping_done:
+
+ GST_DEBUG_OBJECT (avi, "skipping done ... (streams=%u, stream[0].indexes=%p)",
+ avi->num_streams, avi->stream[0].indexes);
+
+ GST_DEBUG ("Found movi chunk. Starting to stream data");
+ avi->state = GST_AVI_DEMUX_MOVI;
+
+ /* no indexes in push mode, but it still sets some variables */
+ gst_avi_demux_calculate_durations_from_index (avi);
+
+ gst_avi_demux_expose_streams (avi, TRUE);
+
+ /* prepare all streams for index 0 */
+ for (i = 0; i < avi->num_streams; i++)
+ avi->stream[i].current_entry = 0;
+
+ /* create initial NEWSEGMENT event */
+ if (avi->seg_event)
+ gst_event_unref (avi->seg_event);
+ avi->seg_event = gst_event_new_segment (&avi->segment);
+ if (avi->segment_seqnum)
+ gst_event_set_seqnum (avi->seg_event, avi->segment_seqnum);
+
+ gst_avi_demux_check_seekability (avi);
+
+ /* at this point we know all the streams and we can signal the no more
+ * pads signal */
+ GST_DEBUG_OBJECT (avi, "signaling no more pads");
+ gst_element_no_more_pads (GST_ELEMENT_CAST (avi));
+
+ return GST_FLOW_OK;
+
+ /* ERRORS */
+ no_streams:
+ {
+ GST_ELEMENT_ERROR (avi, STREAM, DEMUX, (NULL), ("No streams found"));
+ return GST_FLOW_ERROR;
+ }
+ header_no_list:
+ {
+ GST_ELEMENT_ERROR (avi, STREAM, DEMUX, (NULL),
+ ("Invalid AVI header (no LIST at start): %"
+ GST_FOURCC_FORMAT, GST_FOURCC_ARGS (tag)));
+ return GST_FLOW_ERROR;
+ }
+ header_no_avih:
+ {
+ GST_ELEMENT_ERROR (avi, STREAM, DEMUX, (NULL),
+ ("Invalid AVI header (no avih at start): %"
+ GST_FOURCC_FORMAT, GST_FOURCC_ARGS (tag)));
+ if (sub)
+ gst_buffer_unref (sub);
+
+ gst_buffer_unref (buf);
+ return GST_FLOW_ERROR;
+ }
+ header_wrong_avih:
+ {
+ gst_buffer_unref (buf);
+ return GST_FLOW_ERROR;
+ }
+ }
+
+ static void
+ gst_avi_demux_add_date_tag (GstAviDemux * avi, gint y, gint m, gint d,
+ gint h, gint min, gint s)
+ {
+ GDate *date;
+ GstDateTime *dt;
+
+ date = g_date_new_dmy (d, m, y);
+ if (!g_date_valid (date)) {
+ /* bogus date */
+ GST_WARNING_OBJECT (avi, "Refusing to add invalid date %d-%d-%d", y, m, d);
+ g_date_free (date);
+ return;
+ }
+
+ dt = gst_date_time_new_local_time (y, m, d, h, min, s);
+
+ if (avi->globaltags == NULL)
+ avi->globaltags = gst_tag_list_new_empty ();
+
+ gst_tag_list_add (avi->globaltags, GST_TAG_MERGE_REPLACE, GST_TAG_DATE, date,
+ NULL);
+ g_date_free (date);
+ if (dt) {
+ gst_tag_list_add (avi->globaltags, GST_TAG_MERGE_REPLACE, GST_TAG_DATE_TIME,
+ dt, NULL);
+ gst_date_time_unref (dt);
+ }
+ }
+
+ static void
+ gst_avi_demux_parse_idit_nums_only (GstAviDemux * avi, gchar * data)
+ {
+ gint y, m, d;
+ gint hr = 0, min = 0, sec = 0;
+ gint ret;
+
+ GST_DEBUG ("data : '%s'", data);
+
+ ret = sscanf (data, "%d:%d:%d %d:%d:%d", &y, &m, &d, &hr, &min, &sec);
+ if (ret < 3) {
+ /* Attempt YYYY/MM/DD/ HH:MM variant (found in CASIO cameras) */
+ ret = sscanf (data, "%04d/%02d/%02d/ %d:%d", &y, &m, &d, &hr, &min);
+ if (ret < 3) {
+ GST_WARNING_OBJECT (avi, "Failed to parse IDIT tag");
+ return;
+ }
+ }
+ gst_avi_demux_add_date_tag (avi, y, m, d, hr, min, sec);
+ }
+
+ static gint
+ get_month_num (gchar * data, guint size)
+ {
+ if (g_ascii_strncasecmp (data, "jan", 3) == 0) {
+ return 1;
+ } else if (g_ascii_strncasecmp (data, "feb", 3) == 0) {
+ return 2;
+ } else if (g_ascii_strncasecmp (data, "mar", 3) == 0) {
+ return 3;
+ } else if (g_ascii_strncasecmp (data, "apr", 3) == 0) {
+ return 4;
+ } else if (g_ascii_strncasecmp (data, "may", 3) == 0) {
+ return 5;
+ } else if (g_ascii_strncasecmp (data, "jun", 3) == 0) {
+ return 6;
+ } else if (g_ascii_strncasecmp (data, "jul", 3) == 0) {
+ return 7;
+ } else if (g_ascii_strncasecmp (data, "aug", 3) == 0) {
+ return 8;
+ } else if (g_ascii_strncasecmp (data, "sep", 3) == 0) {
+ return 9;
+ } else if (g_ascii_strncasecmp (data, "oct", 3) == 0) {
+ return 10;
+ } else if (g_ascii_strncasecmp (data, "nov", 3) == 0) {
+ return 11;
+ } else if (g_ascii_strncasecmp (data, "dec", 3) == 0) {
+ return 12;
+ }
+
+ return 0;
+ }
+
+ static void
+ gst_avi_demux_parse_idit_text (GstAviDemux * avi, gchar * data)
+ {
+ gint year, month, day;
+ gint hour, min, sec;
+ gint ret;
+ gchar weekday[4];
+ gchar monthstr[4];
+
+ ret = sscanf (data, "%3s %3s %d %d:%d:%d %d", weekday, monthstr, &day, &hour,
+ &min, &sec, &year);
+ if (ret != 7) {
+ GST_WARNING_OBJECT (avi, "Failed to parse IDIT tag");
+ return;
+ }
+ month = get_month_num (monthstr, strlen (monthstr));
+ gst_avi_demux_add_date_tag (avi, year, month, day, hour, min, sec);
+ }
+
+ static void
+ gst_avi_demux_parse_idit (GstAviDemux * avi, GstBuffer * buf)
+ {
+ GstMapInfo map;
+ gchar *ptr;
+ gsize left;
+ gchar *safedata = NULL;
+
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+ /*
+ * According to:
+ * http://www.eden-foundation.org/products/code/film_date_stamp/index.html
+ *
+ * This tag could be in one of the below formats
+ * 2005:08:17 11:42:43
+ * THU OCT 26 16:46:04 2006
+ * Mon Mar 3 09:44:56 2008
+ *
+ * FIXME: Our date tag doesn't include hours
+ */
+
+ /* skip eventual initial whitespace */
+ ptr = (gchar *) map.data;
+ left = map.size;
+
+ while (left > 0 && g_ascii_isspace (ptr[0])) {
+ ptr++;
+ left--;
+ }
+
+ if (left == 0) {
+ goto non_parsable;
+ }
+
+ /* make a safe copy to add a \0 to the end of the string */
+ safedata = g_strndup (ptr, left);
+
+ /* test if the first char is a alpha or a number */
+ if (g_ascii_isdigit (ptr[0])) {
+ gst_avi_demux_parse_idit_nums_only (avi, safedata);
+ g_free (safedata);
+ gst_buffer_unmap (buf, &map);
+ return;
+ } else if (g_ascii_isalpha (ptr[0])) {
+ gst_avi_demux_parse_idit_text (avi, safedata);
+ g_free (safedata);
+ gst_buffer_unmap (buf, &map);
+ return;
+ }
+
+ g_free (safedata);
+
+ non_parsable:
+ GST_WARNING_OBJECT (avi, "IDIT tag has no parsable info");
+ gst_buffer_unmap (buf, &map);
+ }
+
+ static void
+ parse_tag_value (GstAviDemux * avi, GstTagList * taglist, const gchar * type,
+ guint8 * ptr, guint tsize)
+ {
+ static const gchar *env_vars[] = { "GST_AVI_TAG_ENCODING",
+ "GST_RIFF_TAG_ENCODING", "GST_TAG_ENCODING", NULL
+ };
+ GType tag_type;
+ gchar *val;
+
+ tag_type = gst_tag_get_type (type);
+ val = gst_tag_freeform_string_to_utf8 ((gchar *) ptr, tsize, env_vars);
+
+ if (val != NULL) {
+ if (tag_type == G_TYPE_STRING) {
+ gst_tag_list_add (taglist, GST_TAG_MERGE_APPEND, type, val, NULL);
+ } else {
+ GValue tag_val = { 0, };
+
+ g_value_init (&tag_val, tag_type);
+ if (gst_value_deserialize (&tag_val, val)) {
+ gst_tag_list_add_value (taglist, GST_TAG_MERGE_APPEND, type, &tag_val);
+ } else {
+ GST_WARNING_OBJECT (avi, "could not deserialize '%s' into a "
+ "tag %s of type %s", val, type, g_type_name (tag_type));
+ }
+ g_value_unset (&tag_val);
+ }
+ g_free (val);
+ } else {
+ GST_WARNING_OBJECT (avi, "could not extract %s tag", type);
+ }
+ }
+
+ static void
+ gst_avi_demux_parse_strd (GstAviDemux * avi, GstBuffer * buf)
+ {
+ GstMapInfo map;
+ guint32 tag;
+
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+ if (map.size > 4) {
+ guint8 *ptr = map.data;
+ gsize left = map.size;
+
+ /* parsing based on
+ * http://www.eden-foundation.org/products/code/film_date_stamp/index.html
+ */
+ tag = GST_READ_UINT32_LE (ptr);
+ if ((tag == GST_MAKE_FOURCC ('A', 'V', 'I', 'F')) && (map.size > 98)) {
+ gsize sub_size;
+
+ ptr += 98;
+ left -= 98;
+ if (!memcmp (ptr, "FUJIFILM", 8)) {
+ GST_MEMDUMP_OBJECT (avi, "fujifim tag", ptr, 48);
+
+ ptr += 10;
+ left -= 10;
+ sub_size = 0;
+ while (ptr[sub_size] && sub_size < left)
+ sub_size++;
+
+ if (avi->globaltags == NULL)
+ avi->globaltags = gst_tag_list_new_empty ();
+
+ gst_tag_list_add (avi->globaltags, GST_TAG_MERGE_APPEND,
+ GST_TAG_DEVICE_MANUFACTURER, "FUJIFILM", NULL);
+ parse_tag_value (avi, avi->globaltags, GST_TAG_DEVICE_MODEL, ptr,
+ sub_size);
+
+ while (ptr[sub_size] == '\0' && sub_size < left)
+ sub_size++;
+
+ ptr += sub_size;
+ left -= sub_size;
+ sub_size = 0;
+ while (ptr[sub_size] && sub_size < left)
+ sub_size++;
+ if (ptr[4] == ':')
+ ptr[4] = '-';
+ if (ptr[7] == ':')
+ ptr[7] = '-';
+
+ parse_tag_value (avi, avi->globaltags, GST_TAG_DATE_TIME, ptr,
+ sub_size);
+ }
+ }
+ }
+ gst_buffer_unmap (buf, &map);
+ }
+
+ /*
+ * gst_avi_demux_parse_ncdt:
+ * @element: caller element (used for debugging/error).
+ * @buf: input data to be used for parsing, stripped from header.
+ * @taglist: a pointer to a taglist (returned by this function)
+ * containing information about this stream. May be
+ * NULL if no supported tags were found.
+ *
+ * Parses Nikon metadata from input data.
+ */
+ static void
+ gst_avi_demux_parse_ncdt (GstAviDemux * avi, GstBuffer * buf,
+ GstTagList ** _taglist)
+ {
+ GstMapInfo info;
+ guint8 *ptr;
+ gsize left;
+ guint tsize;
+ guint32 tag;
+ const gchar *type;
+ GstTagList *taglist;
+
+ g_return_if_fail (_taglist != NULL);
+
+ if (!buf) {
+ *_taglist = NULL;
+ return;
+ }
+ gst_buffer_map (buf, &info, GST_MAP_READ);
+
+ taglist = gst_tag_list_new_empty ();
+
+ ptr = info.data;
+ left = info.size;
+
+ while (left > 8) {
+ tag = GST_READ_UINT32_LE (ptr);
+ tsize = GST_READ_UINT32_LE (ptr + 4);
+
+ GST_MEMDUMP_OBJECT (avi, "tag chunk", ptr, MIN (tsize + 8, left));
+
+ left -= 8;
+ ptr += 8;
+
+ GST_DEBUG_OBJECT (avi, "tag %" GST_FOURCC_FORMAT ", size %u",
+ GST_FOURCC_ARGS (tag), tsize);
+
+ if (tsize > left) {
+ GST_WARNING_OBJECT (avi,
+ "Tagsize %d is larger than available data %" G_GSIZE_FORMAT,
+ tsize, left);
+ tsize = left;
+ }
+
+ /* find out the type of metadata */
+ switch (tag) {
+ case GST_RIFF_LIST_nctg:
+ while (tsize > 4) {
+ guint16 sub_tag = GST_READ_UINT16_LE (ptr);
+ guint16 sub_size = GST_READ_UINT16_LE (ptr + 2);
+
+ tsize -= 4;
+ ptr += 4;
+ left -= 4;
+
+ if (sub_size > tsize)
+ break;
+
+ GST_DEBUG_OBJECT (avi, "sub-tag %u, size %u", sub_tag, sub_size);
+ /* http://www.sno.phy.queensu.ca/~phil/exiftool/TagNames/Nikon.html#NCTG
+ * for some reason the sub_tag has a +2 offset
+ */
+ switch (sub_tag) {
+ case 0x03: /* Make */
+ type = GST_TAG_DEVICE_MANUFACTURER;
+ break;
+ case 0x04: /* Model */
+ type = GST_TAG_DEVICE_MODEL;
+ break;
+ /* TODO: 0x05: is software version, like V1.0 */
+ case 0x06: /* Software */
+ type = GST_TAG_ENCODER;
+ break;
+ case 0x13: /* CreationDate */
+ type = GST_TAG_DATE_TIME;
+ if (left > 7) {
+ if (ptr[4] == ':')
+ ptr[4] = '-';
+ if (ptr[7] == ':')
+ ptr[7] = '-';
+ }
+ break;
+ default:
+ type = NULL;
+ break;
+ }
+ if (type != NULL && ptr[0] != '\0') {
+ GST_DEBUG_OBJECT (avi, "mapped tag %u to tag %s", sub_tag, type);
+
+ parse_tag_value (avi, taglist, type, ptr, sub_size);
+ }
+
+ ptr += sub_size;
+ tsize -= sub_size;
+ left -= sub_size;
+ }
+ break;
+ default:
+ type = NULL;
+ GST_WARNING_OBJECT (avi,
+ "Unknown ncdt (metadata) tag entry %" GST_FOURCC_FORMAT,
+ GST_FOURCC_ARGS (tag));
+ GST_MEMDUMP_OBJECT (avi, "Unknown ncdt", ptr, tsize);
+ break;
+ }
+
+ if (tsize & 1) {
+ tsize++;
+ if (tsize > left)
+ tsize = left;
+ }
+
+ ptr += tsize;
+ left -= tsize;
+ }
+
+ if (!gst_tag_list_is_empty (taglist)) {
+ GST_INFO_OBJECT (avi, "extracted tags: %" GST_PTR_FORMAT, taglist);
+ *_taglist = taglist;
+ } else {
+ *_taglist = NULL;
+ gst_tag_list_unref (taglist);
+ }
+ gst_buffer_unmap (buf, &info);
+
+ return;
+ }
+
+ /*
+ * Read full AVI headers.
+ */
+ static GstFlowReturn
+ gst_avi_demux_stream_header_pull (GstAviDemux * avi)
+ {
+ GstFlowReturn res;
+ GstBuffer *buf, *sub = NULL;
+ guint32 tag;
+ guint offset = 4;
+ GstElement *element = GST_ELEMENT_CAST (avi);
+ GstClockTime stamp;
+ GstTagList *tags = NULL;
+ guint8 fourcc[4];
+
+ stamp = gst_util_get_timestamp ();
+
+ /* the header consists of a 'hdrl' LIST tag */
+ res = gst_riff_read_chunk (element, avi->sinkpad, &avi->offset, &tag, &buf);
+ if (res != GST_FLOW_OK)
+ goto pull_range_failed;
+ else if (tag != GST_RIFF_TAG_LIST)
+ goto no_list;
+ else if (gst_buffer_get_size (buf) < 4)
+ goto no_header;
+
+ GST_DEBUG_OBJECT (avi, "parsing headers");
+
+ /* Find the 'hdrl' LIST tag */
+ gst_buffer_extract (buf, 0, fourcc, 4);
+ while (GST_READ_UINT32_LE (fourcc) != GST_RIFF_LIST_hdrl) {
+ GST_LOG_OBJECT (avi, "buffer contains %" GST_FOURCC_FORMAT,
+ GST_FOURCC_ARGS (GST_READ_UINT32_LE (fourcc)));
+
+ /* Eat up */
+ gst_buffer_unref (buf);
+
+ /* read new chunk */
+ res = gst_riff_read_chunk (element, avi->sinkpad, &avi->offset, &tag, &buf);
+ if (res != GST_FLOW_OK)
+ goto pull_range_failed;
+ else if (tag != GST_RIFF_TAG_LIST)
+ goto no_list;
+ else if (gst_buffer_get_size (buf) < 4)
+ goto no_header;
+ gst_buffer_extract (buf, 0, fourcc, 4);
+ }
+
+ GST_DEBUG_OBJECT (avi, "hdrl LIST tag found");
+
+ gst_avi_demux_roundup_list (avi, &buf);
+
+ /* the hdrl starts with a 'avih' header */
+ if (!gst_riff_parse_chunk (element, buf, &offset, &tag, &sub))
+ goto no_avih;
+ else if (tag != GST_RIFF_TAG_avih)
+ goto no_avih;
+ else if (!gst_avi_demux_parse_avih (avi, sub, &avi->avih))
+ goto invalid_avih;
+
+ GST_DEBUG_OBJECT (avi, "AVI header ok, reading elements from header");
+
+ /* now, read the elements from the header until the end */
+ while (gst_riff_parse_chunk (element, buf, &offset, &tag, &sub)) {
+ GstMapInfo map;
+
+ /* sub can be NULL on empty tags */
+ if (!sub)
+ continue;
+
+ gst_buffer_map (sub, &map, GST_MAP_READ);
+
+ switch (tag) {
+ case GST_RIFF_TAG_LIST:
+ if (map.size < 4)
+ goto next;
+
+ switch (GST_READ_UINT32_LE (map.data)) {
+ case GST_RIFF_LIST_strl:
+ gst_buffer_unmap (sub, &map);
+ if (!(gst_avi_demux_parse_stream (avi, sub))) {
+ GST_ELEMENT_WARNING (avi, STREAM, DEMUX, (NULL),
+ ("failed to parse stream, ignoring"));
+ sub = NULL;
+ }
+ sub = NULL;
+ goto next;
+ case GST_RIFF_LIST_odml:
+ gst_buffer_unmap (sub, &map);
+ gst_avi_demux_parse_odml (avi, sub);
+ sub = NULL;
+ break;
+ case GST_RIFF_LIST_INFO:
+ gst_buffer_unmap (sub, &map);
+ gst_buffer_resize (sub, 4, -1);
+ gst_riff_parse_info (element, sub, &tags);
+ if (tags) {
+ if (avi->globaltags) {
+ gst_tag_list_insert (avi->globaltags, tags,
+ GST_TAG_MERGE_REPLACE);
+ gst_tag_list_unref (tags);
+ } else {
+ avi->globaltags = tags;
+ }
+ }
+ tags = NULL;
+ gst_buffer_unref (sub);
+ sub = NULL;
+ break;
+ case GST_RIFF_LIST_ncdt:
+ gst_buffer_unmap (sub, &map);
+ gst_buffer_resize (sub, 4, -1);
+ gst_avi_demux_parse_ncdt (avi, sub, &tags);
+ if (tags) {
+ if (avi->globaltags) {
+ gst_tag_list_insert (avi->globaltags, tags,
+ GST_TAG_MERGE_REPLACE);
+ gst_tag_list_unref (tags);
+ } else {
+ avi->globaltags = tags;
+ }
+ }
+ tags = NULL;
+ gst_buffer_unref (sub);
+ sub = NULL;
+ break;
+ default:
+ GST_WARNING_OBJECT (avi,
+ "Unknown list %" GST_FOURCC_FORMAT " in AVI header",
+ GST_FOURCC_ARGS (GST_READ_UINT32_LE (map.data)));
+ GST_MEMDUMP_OBJECT (avi, "Unknown list", map.data, map.size);
+ /* fall-through */
+ case GST_RIFF_TAG_JUNQ:
+ case GST_RIFF_TAG_JUNK:
+ goto next;
+ }
+ break;
+ case GST_RIFF_IDIT:
+ gst_avi_demux_parse_idit (avi, sub);
+ goto next;
+ default:
+ GST_WARNING_OBJECT (avi,
+ "Unknown tag %" GST_FOURCC_FORMAT " in AVI header",
+ GST_FOURCC_ARGS (tag));
+ GST_MEMDUMP_OBJECT (avi, "Unknown tag", map.data, map.size);
+ /* fall-through */
+ case GST_RIFF_TAG_JUNQ:
+ case GST_RIFF_TAG_JUNK:
+ next:
+ if (sub) {
+ gst_buffer_unmap (sub, &map);
+ gst_buffer_unref (sub);
+ }
+ sub = NULL;
+ break;
+ }
+ }
+ gst_buffer_unref (buf);
+ GST_DEBUG ("elements parsed");
+
+ /* check parsed streams */
+ if (avi->num_streams == 0)
+ goto no_streams;
+ else if (avi->num_streams != avi->avih->streams) {
+ GST_WARNING_OBJECT (avi,
+ "Stream header mentioned %d streams, but %d available",
+ avi->avih->streams, avi->num_streams);
+ }
+
+ GST_DEBUG_OBJECT (avi, "skipping junk between header and data, offset=%"
+ G_GUINT64_FORMAT, avi->offset);
+
+ /* Now, find the data (i.e. skip all junk between header and data) */
+ do {
+ GstMapInfo map;
+ guint size;
+ guint32 tag, ltag;
+
+ buf = NULL;
+ res = gst_pad_pull_range (avi->sinkpad, avi->offset, 12, &buf);
+ if (res != GST_FLOW_OK) {
+ GST_DEBUG_OBJECT (avi, "pull_range failure while looking for tags");
+ goto pull_range_failed;
+ } else if (gst_buffer_get_size (buf) < 12) {
+ GST_DEBUG_OBJECT (avi,
+ "got %" G_GSIZE_FORMAT " bytes which is less than 12 bytes",
+ gst_buffer_get_size (buf));
+ gst_buffer_unref (buf);
+ return GST_FLOW_ERROR;
+ }
+
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+ tag = GST_READ_UINT32_LE (map.data);
+ size = GST_READ_UINT32_LE (map.data + 4);
+ ltag = GST_READ_UINT32_LE (map.data + 8);
+
+ GST_DEBUG ("tag %" GST_FOURCC_FORMAT ", size %u",
+ GST_FOURCC_ARGS (tag), size);
+ GST_MEMDUMP ("Tag content", map.data, map.size);
+ gst_buffer_unmap (buf, &map);
+ gst_buffer_unref (buf);
+
+ switch (tag) {
+ case GST_RIFF_TAG_LIST:{
+ switch (ltag) {
+ case GST_RIFF_LIST_movi:
+ GST_DEBUG_OBJECT (avi,
+ "Reached the 'movi' tag, we're done with skipping");
+ goto skipping_done;
+ case GST_RIFF_LIST_INFO:
+ res =
+ gst_riff_read_chunk (element, avi->sinkpad, &avi->offset, &tag,
+ &buf);
+ if (res != GST_FLOW_OK) {
+ GST_DEBUG_OBJECT (avi, "couldn't read INFO chunk");
+ goto pull_range_failed;
+ }
+ GST_DEBUG ("got size %" G_GSIZE_FORMAT, gst_buffer_get_size (buf));
+ if (size < 4) {
+ GST_DEBUG ("skipping INFO LIST prefix");
+ avi->offset += (4 - GST_ROUND_UP_2 (size));
+ gst_buffer_unref (buf);
+ continue;
+ }
+
+ sub = gst_buffer_copy_region (buf, GST_BUFFER_COPY_ALL, 4, -1);
+ gst_riff_parse_info (element, sub, &tags);
+ if (tags) {
+ if (avi->globaltags) {
+ gst_tag_list_insert (avi->globaltags, tags,
+ GST_TAG_MERGE_REPLACE);
+ gst_tag_list_unref (tags);
+ } else {
+ avi->globaltags = tags;
+ }
+ }
+ tags = NULL;
+ if (sub) {
+ gst_buffer_unref (sub);
+ sub = NULL;
+ }
+ gst_buffer_unref (buf);
+ /* gst_riff_read_chunk() has already advanced avi->offset */
+ break;
+ case GST_RIFF_LIST_ncdt:
+ res =
+ gst_riff_read_chunk (element, avi->sinkpad, &avi->offset, &tag,
+ &buf);
+ if (res != GST_FLOW_OK) {
+ GST_DEBUG_OBJECT (avi, "couldn't read ncdt chunk");
+ goto pull_range_failed;
+ }
+ GST_DEBUG ("got size %" G_GSIZE_FORMAT, gst_buffer_get_size (buf));
+ if (size < 4) {
+ GST_DEBUG ("skipping ncdt LIST prefix");
+ avi->offset += (4 - GST_ROUND_UP_2 (size));
+ gst_buffer_unref (buf);
+ continue;
+ }
+
+ sub = gst_buffer_copy_region (buf, GST_BUFFER_COPY_ALL, 4, -1);
+ gst_avi_demux_parse_ncdt (avi, sub, &tags);
+ if (tags) {
+ if (avi->globaltags) {
+ gst_tag_list_insert (avi->globaltags, tags,
+ GST_TAG_MERGE_REPLACE);
+ gst_tag_list_unref (tags);
+ } else {
+ avi->globaltags = tags;
+ }
+ }
+ tags = NULL;
+ if (sub) {
+ gst_buffer_unref (sub);
+ sub = NULL;
+ }
+ gst_buffer_unref (buf);
+ /* gst_riff_read_chunk() has already advanced avi->offset */
+ break;
+ default:
+ GST_WARNING_OBJECT (avi,
+ "Skipping unknown list tag %" GST_FOURCC_FORMAT,
+ GST_FOURCC_ARGS (ltag));
+ avi->offset += 8 + GST_ROUND_UP_2 (size);
+ break;
+ }
+ }
+ break;
+ default:
+ GST_WARNING_OBJECT (avi, "Skipping unknown tag %" GST_FOURCC_FORMAT,
+ GST_FOURCC_ARGS (tag));
+ /* Fall-through */
+ case GST_MAKE_FOURCC ('J', 'U', 'N', 'Q'):
+ case GST_MAKE_FOURCC ('J', 'U', 'N', 'K'):
+ /* Only get buffer for debugging if the memdump is needed */
+ if (gst_debug_category_get_threshold (GST_CAT_DEFAULT) >= 9) {
+ buf = NULL;
+ res = gst_pad_pull_range (avi->sinkpad, avi->offset, size, &buf);
+ if (res != GST_FLOW_OK) {
+ GST_DEBUG_OBJECT (avi, "couldn't read INFO chunk");
+ goto pull_range_failed;
+ }
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+ GST_MEMDUMP ("Junk", map.data, map.size);
+ gst_buffer_unmap (buf, &map);
+ gst_buffer_unref (buf);
+ }
+ avi->offset += 8 + GST_ROUND_UP_2 (size);
+ break;
+ }
+ } while (1);
+ skipping_done:
+
+ GST_DEBUG_OBJECT (avi, "skipping done ... (streams=%u, stream[0].indexes=%p)",
+ avi->num_streams, avi->stream[0].indexes);
+
+ /* create or read stream index (for seeking) */
+ if (avi->stream[0].indexes != NULL) {
+ /* we read a super index already (gst_avi_demux_parse_superindex() ) */
+ gst_avi_demux_read_subindexes_pull (avi);
+ }
+ if (!avi->have_index) {
+ if (avi->avih->flags & GST_RIFF_AVIH_HASINDEX)
+ gst_avi_demux_stream_index (avi);
+
+ /* still no index, scan */
+ if (!avi->have_index) {
+ gst_avi_demux_stream_scan (avi);
+
+ /* still no index.. this is a fatal error for now.
+ * FIXME, we should switch to plain push mode without seeking
+ * instead of failing. */
+ if (!avi->have_index)
+ goto no_index;
+ }
+ }
+ /* use the indexes now to construct nice durations */
+ gst_avi_demux_calculate_durations_from_index (avi);
+
+ gst_avi_demux_expose_streams (avi, FALSE);
+
+ /* do initial seek to the default segment values */
+ gst_avi_demux_do_seek (avi, &avi->segment, 0);
+
+ /* create initial NEWSEGMENT event */
+ if (avi->seg_event)
+ gst_event_unref (avi->seg_event);
+ avi->seg_event = gst_event_new_segment (&avi->segment);
+ if (avi->segment_seqnum)
+ gst_event_set_seqnum (avi->seg_event, avi->segment_seqnum);
+
+ stamp = gst_util_get_timestamp () - stamp;
+ GST_DEBUG_OBJECT (avi, "pulling header took %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (stamp));
+
+ /* at this point we know all the streams and we can signal the no more
+ * pads signal */
+ GST_DEBUG_OBJECT (avi, "signaling no more pads");
+ gst_element_no_more_pads (GST_ELEMENT_CAST (avi));
+
+ return GST_FLOW_OK;
+
+ /* ERRORS */
+ no_list:
+ {
+ GST_ELEMENT_ERROR (avi, STREAM, DEMUX, (NULL),
+ ("Invalid AVI header (no LIST at start): %"
+ GST_FOURCC_FORMAT, GST_FOURCC_ARGS (tag)));
+ gst_buffer_unref (buf);
+ return GST_FLOW_ERROR;
+ }
+ no_header:
+ {
+ GST_ELEMENT_ERROR (avi, STREAM, DEMUX, (NULL),
+ ("Invalid AVI header (no hdrl at start): %"
+ GST_FOURCC_FORMAT, GST_FOURCC_ARGS (tag)));
+ gst_buffer_unref (buf);
+ return GST_FLOW_ERROR;
+ }
+ no_avih:
+ {
+ GST_ELEMENT_ERROR (avi, STREAM, DEMUX, (NULL),
+ ("Invalid AVI header (no avih at start): %"
+ GST_FOURCC_FORMAT, GST_FOURCC_ARGS (tag)));
+ if (sub)
+ gst_buffer_unref (sub);
+ gst_buffer_unref (buf);
+ return GST_FLOW_ERROR;
+ }
+ invalid_avih:
+ {
+ GST_ELEMENT_ERROR (avi, STREAM, DEMUX, (NULL),
+ ("Invalid AVI header (cannot parse avih at start)"));
+ gst_buffer_unref (buf);
+ return GST_FLOW_ERROR;
+ }
+ no_streams:
+ {
+ GST_ELEMENT_ERROR (avi, STREAM, DEMUX, (NULL), ("No streams found"));
+ return GST_FLOW_ERROR;
+ }
+ no_index:
+ {
+ GST_WARNING ("file without or too big index");
+ GST_ELEMENT_ERROR (avi, STREAM, DEMUX, (NULL),
+ ("Could not get/create index"));
+ return GST_FLOW_ERROR;
+ }
+ pull_range_failed:
+ {
+ if (res == GST_FLOW_FLUSHING)
+ return res;
+ GST_ELEMENT_ERROR (avi, STREAM, DEMUX, (NULL),
+ ("pull_range flow reading header: %s", gst_flow_get_name (res)));
+ return res;
+ }
+ }
+
+ /* move a stream to @index */
+ static void
+ gst_avi_demux_move_stream (GstAviDemux * avi, GstAviStream * stream,
+ GstSegment * segment, guint index)
+ {
+ GST_DEBUG_OBJECT (avi, "Move stream %d to %u", stream->num, index);
+
+ if (segment->rate < 0.0) {
+ guint next_key;
+ /* Because we don't know the frame order we need to push from the prev keyframe
+ * to the next keyframe. If there is a smart decoder downstream he will notice
+ * that there are too many encoded frames send and return EOS when there
+ * are enough decoded frames to fill the segment. */
+ next_key = gst_avi_demux_index_next (avi, stream, index, TRUE);
+
+ /* FIXME, we go back to 0, we should look at segment.start. We will however
+ * stop earlier when the see the timestamp < segment.start */
+ stream->start_entry = 0;
+ stream->step_entry = index;
+ stream->current_entry = index;
+ stream->stop_entry = next_key;
+
+ GST_DEBUG_OBJECT (avi, "reverse seek: start %u, step %u, stop %u",
+ stream->start_entry, stream->step_entry, stream->stop_entry);
+ } else {
+ stream->start_entry = index;
+ stream->step_entry = index;
+ stream->stop_entry = gst_avi_demux_index_last (avi, stream);
+ }
+ if (stream->current_entry != index) {
+ GST_DEBUG_OBJECT (avi, "Move DISCONT from %u to %u",
+ stream->current_entry, index);
+ stream->current_entry = index;
+ stream->discont = TRUE;
+ }
+
+ /* update the buffer info */
+ gst_avi_demux_get_buffer_info (avi, stream, index,
+ &stream->current_timestamp, &stream->current_ts_end,
+ &stream->current_offset, &stream->current_offset_end);
+
+ GST_DEBUG_OBJECT (avi, "Moved to %u, ts %" GST_TIME_FORMAT
+ ", ts_end %" GST_TIME_FORMAT ", off %" G_GUINT64_FORMAT
+ ", off_end %" G_GUINT64_FORMAT, index,
+ GST_TIME_ARGS (stream->current_timestamp),
+ GST_TIME_ARGS (stream->current_ts_end), stream->current_offset,
+ stream->current_offset_end);
+
+ GST_DEBUG_OBJECT (avi, "Seeking to offset %" G_GUINT64_FORMAT,
+ stream->index[index].offset);
+ }
+
+ /*
+ * Do the actual seeking.
+ */
+ static gboolean
+ gst_avi_demux_do_seek (GstAviDemux * avi, GstSegment * segment,
+ GstSeekFlags flags)
+ {
+ GstClockTime seek_time;
+ gboolean keyframe, before, after;
+ guint i, index;
+ GstAviStream *stream;
+ gboolean next;
+
+ seek_time = segment->position;
+ keyframe = ! !(flags & GST_SEEK_FLAG_KEY_UNIT);
+ before = ! !(flags & GST_SEEK_FLAG_SNAP_BEFORE);
+ after = ! !(flags & GST_SEEK_FLAG_SNAP_AFTER);
+
+ GST_DEBUG_OBJECT (avi, "seek to: %" GST_TIME_FORMAT
+ " keyframe seeking:%d, %s", GST_TIME_ARGS (seek_time), keyframe,
+ snap_types[before ? 1 : 0][after ? 1 : 0]);
+
+ /* FIXME, this code assumes the main stream with keyframes is stream 0,
+ * which is mostly correct... */
+ stream = &avi->stream[avi->main_stream];
+
+ next = after && !before;
+ if (segment->rate < 0)
+ next = !next;
+
+ /* get the entry index for the requested position */
+ index = gst_avi_demux_index_for_time (avi, stream, seek_time, next);
+ GST_DEBUG_OBJECT (avi, "Got entry %u", index);
+ if (index == -1)
+ return FALSE;
+
+ /* check if we are already on a keyframe */
+ if (!ENTRY_IS_KEYFRAME (&stream->index[index])) {
+ if (next) {
+ GST_DEBUG_OBJECT (avi, "not keyframe, searching forward");
+ /* now go to the next keyframe, this is where we should start
+ * decoding from. */
+ index = gst_avi_demux_index_next (avi, stream, index, TRUE);
+ GST_DEBUG_OBJECT (avi, "next keyframe at %u", index);
+ } else {
+ GST_DEBUG_OBJECT (avi, "not keyframe, searching back");
+ /* now go to the previous keyframe, this is where we should start
+ * decoding from. */
+ index = gst_avi_demux_index_prev (avi, stream, index, TRUE);
+ GST_DEBUG_OBJECT (avi, "previous keyframe at %u", index);
+ }
+ }
+
+ /* move the main stream to this position */
+ gst_avi_demux_move_stream (avi, stream, segment, index);
+
+ if (keyframe) {
+ /* when seeking to a keyframe, we update the result seek time
+ * to the time of the keyframe. */
+ seek_time = stream->current_timestamp;
+ GST_DEBUG_OBJECT (avi, "keyframe adjusted to %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (seek_time));
+ /* the seek time is always the position ... */
+ segment->position = seek_time;
+ /* ... and start and stream time when going forwards,
+ * otherwise only stop time */
+ if (segment->rate > 0.0)
+ segment->start = segment->time = seek_time;
+ else
+ segment->stop = seek_time;
+ }
+
+ /* now set DISCONT and align the other streams */
+ for (i = 0; i < avi->num_streams; i++) {
+ GstAviStream *ostream;
+
+ ostream = &avi->stream[i];
+ if ((ostream == stream) || (ostream->index == NULL))
+ continue;
+
+ /* get the entry index for the requested position */
+ index = gst_avi_demux_index_for_time (avi, ostream, seek_time, FALSE);
+ if (index == -1)
+ continue;
+
+ /* move to previous keyframe */
+ if (!ENTRY_IS_KEYFRAME (&ostream->index[index]))
+ index = gst_avi_demux_index_prev (avi, ostream, index, TRUE);
+
+ gst_avi_demux_move_stream (avi, ostream, segment, index);
+ }
+ GST_DEBUG_OBJECT (avi, "done seek to: %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (seek_time));
+
+ return TRUE;
+ }
+
+ /*
+ * Handle seek event in pull mode.
+ */
+ static gboolean
+ gst_avi_demux_handle_seek (GstAviDemux * avi, GstPad * pad, GstEvent * event)
+ {
+ gdouble rate;
+ GstFormat format;
+ GstSeekFlags flags;
+ GstSeekType cur_type = GST_SEEK_TYPE_NONE, stop_type;
+ gint64 cur, stop;
+ gboolean flush;
+ gboolean update;
+ GstSegment seeksegment = { 0, };
+ gint i;
+ guint32 seqnum = 0;
+
+ if (event) {
+ GST_DEBUG_OBJECT (avi, "doing seek with event");
+
+ gst_event_parse_seek (event, &rate, &format, &flags,
+ &cur_type, &cur, &stop_type, &stop);
+ seqnum = gst_event_get_seqnum (event);
+
+ /* we have to have a format as the segment format. Try to convert
+ * if not. */
+ if (format != GST_FORMAT_TIME) {
+ gboolean res = TRUE;
+
+ if (cur_type != GST_SEEK_TYPE_NONE)
+ res = gst_pad_query_convert (pad, format, cur, GST_FORMAT_TIME, &cur);
+ if (res && stop_type != GST_SEEK_TYPE_NONE)
+ res = gst_pad_query_convert (pad, format, stop, GST_FORMAT_TIME, &stop);
+ if (!res)
+ goto no_format;
+
+ format = GST_FORMAT_TIME;
+ }
+ GST_DEBUG_OBJECT (avi,
+ "seek requested: rate %g cur %" GST_TIME_FORMAT " stop %"
+ GST_TIME_FORMAT, rate, GST_TIME_ARGS (cur), GST_TIME_ARGS (stop));
+ /* FIXME: can we do anything with rate!=1.0 */
+ } else {
+ GST_DEBUG_OBJECT (avi, "doing seek without event");
+ flags = 0;
+ rate = 1.0;
+ }
+
+ /* save flush flag */
+ flush = flags & GST_SEEK_FLAG_FLUSH;
+
+ if (flush) {
+ GstEvent *fevent = gst_event_new_flush_start ();
+
+ if (seqnum)
+ gst_event_set_seqnum (fevent, seqnum);
+ /* for a flushing seek, we send a flush_start on all pads. This will
+ * eventually stop streaming with a WRONG_STATE. We can thus eventually
+ * take the STREAM_LOCK. */
+ GST_DEBUG_OBJECT (avi, "sending flush start");
+ gst_avi_demux_push_event (avi, gst_event_ref (fevent));
+ gst_pad_push_event (avi->sinkpad, fevent);
+ } else {
+ /* a non-flushing seek, we PAUSE the task so that we can take the
+ * STREAM_LOCK */
+ GST_DEBUG_OBJECT (avi, "non flushing seek, pausing task");
+ gst_pad_pause_task (avi->sinkpad);
+ }
+
+ /* wait for streaming to stop */
+ GST_DEBUG_OBJECT (avi, "wait for streaming to stop");
+ GST_PAD_STREAM_LOCK (avi->sinkpad);
+
+ /* copy segment, we need this because we still need the old
+ * segment when we close the current segment. */
+ memcpy (&seeksegment, &avi->segment, sizeof (GstSegment));
+
+ if (event) {
+ GST_DEBUG_OBJECT (avi, "configuring seek");
+ gst_segment_do_seek (&seeksegment, rate, format, flags,
+ cur_type, cur, stop_type, stop, &update);
+ }
+ /* do the seek, seeksegment.position contains the new position, this
+ * actually never fails. */
+ gst_avi_demux_do_seek (avi, &seeksegment, flags);
+
+ if (flush) {
+ GstEvent *fevent = gst_event_new_flush_stop (TRUE);
+
+ if (seqnum)
+ gst_event_set_seqnum (fevent, seqnum);
+
+ GST_DEBUG_OBJECT (avi, "sending flush stop");
+ gst_avi_demux_push_event (avi, gst_event_ref (fevent));
+ gst_pad_push_event (avi->sinkpad, fevent);
+ }
+
+ /* now update the real segment info */
+ memcpy (&avi->segment, &seeksegment, sizeof (GstSegment));
+
+ /* post the SEGMENT_START message when we do segmented playback */
+ if (avi->segment.flags & GST_SEEK_FLAG_SEGMENT) {
+ GstMessage *segment_start_msg =
+ gst_message_new_segment_start (GST_OBJECT_CAST (avi),
+ avi->segment.format, avi->segment.position);
+ if (seqnum)
+ gst_message_set_seqnum (segment_start_msg, seqnum);
+ gst_element_post_message (GST_ELEMENT_CAST (avi), segment_start_msg);
+ }
+
+ /* queue the segment event for the streaming thread. */
+ if (avi->seg_event)
+ gst_event_unref (avi->seg_event);
+ avi->seg_event = gst_event_new_segment (&avi->segment);
+ if (seqnum)
+ gst_event_set_seqnum (avi->seg_event, seqnum);
+ avi->segment_seqnum = seqnum;
+
+ if (!avi->streaming) {
+ gst_pad_start_task (avi->sinkpad, (GstTaskFunction) gst_avi_demux_loop,
+ avi->sinkpad, NULL);
+ }
+ /* reset the last flow and mark discont, seek is always DISCONT */
+ for (i = 0; i < avi->num_streams; i++) {
+ GST_DEBUG_OBJECT (avi, "marking DISCONT");
+ avi->stream[i].discont = TRUE;
+ }
+ /* likewise for the whole new segment */
+ gst_flow_combiner_reset (avi->flowcombiner);
+ GST_PAD_STREAM_UNLOCK (avi->sinkpad);
+
+ return TRUE;
+
+ /* ERRORS */
+ no_format:
+ {
+ GST_DEBUG_OBJECT (avi, "unsupported format given, seek aborted.");
+ return FALSE;
+ }
+ }
+
+ /*
+ * Handle seek event in push mode.
+ */
+ static gboolean
+ avi_demux_handle_seek_push (GstAviDemux * avi, GstPad * pad, GstEvent * event)
+ {
+ gdouble rate;
+ GstFormat format;
+ GstSeekFlags flags;
+ GstSeekType cur_type = GST_SEEK_TYPE_NONE, stop_type;
+ gint64 cur, stop;
+ gboolean keyframe, before, after, next;
+ GstAviStream *stream;
+ guint index;
+ guint n, str_num;
+ guint64 min_offset;
+ GstSegment seeksegment;
+ gboolean update;
+
+ /* check we have the index */
+ if (!avi->have_index) {
+ GST_DEBUG_OBJECT (avi, "no seek index built, seek aborted.");
+ return FALSE;
+ } else {
+ GST_DEBUG_OBJECT (avi, "doing push-based seek with event");
+ }
+
+ gst_event_parse_seek (event, &rate, &format, &flags,
+ &cur_type, &cur, &stop_type, &stop);
+
+ if (format != GST_FORMAT_TIME) {
+ gboolean res = TRUE;
+
+ if (cur_type != GST_SEEK_TYPE_NONE)
+ res = gst_pad_query_convert (pad, format, cur, GST_FORMAT_TIME, &cur);
+ if (res && stop_type != GST_SEEK_TYPE_NONE)
+ res = gst_pad_query_convert (pad, format, stop, GST_FORMAT_TIME, &stop);
+ if (!res) {
+ GST_DEBUG_OBJECT (avi, "unsupported format given, seek aborted.");
+ return FALSE;
+ }
+
+ format = GST_FORMAT_TIME;
+ }
+
+ /* let gst_segment handle any tricky stuff */
+ GST_DEBUG_OBJECT (avi, "configuring seek");
+ memcpy (&seeksegment, &avi->segment, sizeof (GstSegment));
+ gst_segment_do_seek (&seeksegment, rate, format, flags,
+ cur_type, cur, stop_type, stop, &update);
+
+ keyframe = ! !(flags & GST_SEEK_FLAG_KEY_UNIT);
+ cur = seeksegment.position;
+ before = ! !(flags & GST_SEEK_FLAG_SNAP_BEFORE);
+ after = ! !(flags & GST_SEEK_FLAG_SNAP_AFTER);
+
+ GST_DEBUG_OBJECT (avi,
+ "Seek requested: ts %" GST_TIME_FORMAT " stop %" GST_TIME_FORMAT
+ ", kf %u, %s, rate %lf", GST_TIME_ARGS (cur), GST_TIME_ARGS (stop),
+ keyframe, snap_types[before ? 1 : 0][after ? 1 : 0], rate);
+
+ if (rate < 0) {
+ GST_DEBUG_OBJECT (avi, "negative rate seek not supported in push mode");
+ return FALSE;
+ }
+
+ /* FIXME, this code assumes the main stream with keyframes is stream 0,
+ * which is mostly correct... */
+ str_num = avi->main_stream;
+ stream = &avi->stream[str_num];
+
+ next = after && !before;
+ if (seeksegment.rate < 0)
+ next = !next;
+
+ /* get the entry index for the requested position */
+ index = gst_avi_demux_index_for_time (avi, stream, cur, next);
+ GST_DEBUG_OBJECT (avi, "str %u: Found entry %u for %" GST_TIME_FORMAT,
+ str_num, index, GST_TIME_ARGS (cur));
+ if (index == -1)
+ return -1;
+
+ /* check if we are already on a keyframe */
+ if (!ENTRY_IS_KEYFRAME (&stream->index[index])) {
+ if (next) {
+ GST_DEBUG_OBJECT (avi, "Entry is not a keyframe - searching forward");
+ /* now go to the next keyframe, this is where we should start
+ * decoding from. */
+ index = gst_avi_demux_index_next (avi, stream, index, TRUE);
+ GST_DEBUG_OBJECT (avi, "Found next keyframe at %u", index);
+ } else {
+ GST_DEBUG_OBJECT (avi, "Entry is not a keyframe - searching back");
+ /* now go to the previous keyframe, this is where we should start
+ * decoding from. */
+ index = gst_avi_demux_index_prev (avi, stream, index, TRUE);
+ GST_DEBUG_OBJECT (avi, "Found previous keyframe at %u", index);
+ }
+ }
+
+ gst_avi_demux_get_buffer_info (avi, stream, index,
+ &stream->current_timestamp, &stream->current_ts_end,
+ &stream->current_offset, &stream->current_offset_end);
+
+ /* re-use cur to be the timestamp of the seek as it _will_ be */
+ cur = stream->current_timestamp;
+
+ min_offset = stream->index[index].offset;
+ avi->seek_kf_offset = min_offset - 8;
+
+ GST_DEBUG_OBJECT (avi,
+ "Seek to: ts %" GST_TIME_FORMAT " (on str %u, idx %u, offset %"
+ G_GUINT64_FORMAT ")", GST_TIME_ARGS (stream->current_timestamp), str_num,
+ index, min_offset);
+
+ for (n = 0; n < avi->num_streams; n++) {
+ GstAviStream *str = &avi->stream[n];
+ guint idx;
+
+ if (n == avi->main_stream)
+ continue;
+
+ /* get the entry index for the requested position */
+ idx = gst_avi_demux_index_for_time (avi, str, cur, FALSE);
+ GST_DEBUG_OBJECT (avi, "str %u: Found entry %u for %" GST_TIME_FORMAT, n,
+ idx, GST_TIME_ARGS (cur));
+ if (idx == -1)
+ continue;
+
+ /* check if we are already on a keyframe */
+ if (!ENTRY_IS_KEYFRAME (&str->index[idx])) {
+ if (next) {
+ GST_DEBUG_OBJECT (avi, "Entry is not a keyframe - searching forward");
+ /* now go to the next keyframe, this is where we should start
+ * decoding from. */
+ idx = gst_avi_demux_index_next (avi, str, idx, TRUE);
+ GST_DEBUG_OBJECT (avi, "Found next keyframe at %u", idx);
+ } else {
+ GST_DEBUG_OBJECT (avi, "Entry is not a keyframe - searching back");
+ /* now go to the previous keyframe, this is where we should start
+ * decoding from. */
+ idx = gst_avi_demux_index_prev (avi, str, idx, TRUE);
+ GST_DEBUG_OBJECT (avi, "Found previous keyframe at %u", idx);
+ }
+ }
+
+ gst_avi_demux_get_buffer_info (avi, str, idx,
+ &str->current_timestamp, &str->current_ts_end,
+ &str->current_offset, &str->current_offset_end);
+
+ if (str->index[idx].offset < min_offset) {
+ min_offset = str->index[idx].offset;
+ GST_DEBUG_OBJECT (avi,
+ "Found an earlier offset at %" G_GUINT64_FORMAT ", str %u",
+ min_offset, n);
+ str_num = n;
+ stream = str;
+ index = idx;
+ }
+ }
+
+ GST_DEBUG_OBJECT (avi,
+ "Seek performed: str %u, offset %" G_GUINT64_FORMAT ", idx %u, ts %"
+ GST_TIME_FORMAT ", ts_end %" GST_TIME_FORMAT ", off %" G_GUINT64_FORMAT
+ ", off_end %" G_GUINT64_FORMAT, str_num, min_offset, index,
+ GST_TIME_ARGS (stream->current_timestamp),
+ GST_TIME_ARGS (stream->current_ts_end), stream->current_offset,
+ stream->current_offset_end);
+
+ /* index data refers to data, not chunk header (for pull mode convenience) */
+ min_offset -= 8;
+ GST_DEBUG_OBJECT (avi, "seeking to chunk at offset %" G_GUINT64_FORMAT,
+ min_offset);
+
+ if (!perform_seek_to_offset (avi, min_offset, gst_event_get_seqnum (event))) {
+ GST_DEBUG_OBJECT (avi, "seek event failed!");
+ return FALSE;
+ }
+
+ return TRUE;
+ }
+
+ /*
+ * Handle whether we can perform the seek event or if we have to let the chain
+ * function handle seeks to build the seek indexes first.
+ */
+ static gboolean
+ gst_avi_demux_handle_seek_push (GstAviDemux * avi, GstPad * pad,
+ GstEvent * event)
+ {
+ /* check for having parsed index already */
+ if (!avi->have_index) {
+ guint64 offset = 0;
+ gboolean building_index;
+
+ GST_OBJECT_LOCK (avi);
+ /* handle the seek event in the chain function */
+ avi->state = GST_AVI_DEMUX_SEEK;
+
+ /* copy the event */
+ if (avi->seek_event)
+ gst_event_unref (avi->seek_event);
+ avi->seek_event = gst_event_ref (event);
+
+ /* set the building_index flag so that only one thread can setup the
+ * structures for index seeking. */
+ building_index = avi->building_index;
+ if (!building_index) {
+ avi->building_index = TRUE;
+ if (avi->stream[0].indexes) {
+ avi->odml_stream = 0;
+ avi->odml_subidxs = avi->stream[avi->odml_stream].indexes;
+ offset = avi->odml_subidxs[0];
+ } else {
+ offset = avi->idx1_offset;
+ }
+ }
+ GST_OBJECT_UNLOCK (avi);
+
+ if (!building_index) {
+ /* seek to the first subindex or legacy index */
+ GST_INFO_OBJECT (avi,
+ "Seeking to legacy index/first subindex at %" G_GUINT64_FORMAT,
+ offset);
+ return perform_seek_to_offset (avi, offset, gst_event_get_seqnum (event));
+ }
+
+ /* FIXME: we have to always return true so that we don't block the seek
+ * thread.
+ * Note: maybe it is OK to return true if we're still building the index */
+ return TRUE;
+ }
+
+ return avi_demux_handle_seek_push (avi, pad, event);
+ }
+
+ /*
+ * Helper for gst_avi_demux_invert()
+ */
+ static inline void
+ swap_line (guint8 * d1, guint8 * d2, guint8 * tmp, gint bytes)
+ {
+ memcpy (tmp, d1, bytes);
+ memcpy (d1, d2, bytes);
+ memcpy (d2, tmp, bytes);
+ }
+
+
+ #define gst_avi_demux_is_uncompressed(fourcc) \
+ (fourcc == GST_RIFF_DIB || \
+ fourcc == GST_RIFF_rgb || \
+ fourcc == GST_RIFF_RGB || fourcc == GST_RIFF_RAW)
+
+ /*
+ * Invert DIB buffers... Takes existing buffer and
+ * returns either the buffer or a new one (with old
+ * one dereferenced).
+ * FIXME: can't we preallocate tmp? and remember stride, bpp?
+ */
+ static GstBuffer *
+ gst_avi_demux_invert (GstAviStream * stream, GstBuffer * buf)
+ {
+ gint y, w, h;
+ gint bpp, stride;
+ guint8 *tmp = NULL;
+ GstMapInfo map;
+ guint32 fourcc;
+
+ if (stream->strh->type != GST_RIFF_FCC_vids)
+ return buf;
+
+ if (stream->strf.vids == NULL) {
+ GST_WARNING ("Failed to retrieve vids for stream");
+ return buf;
+ }
+
+ fourcc = (stream->strf.vids->compression) ?
+ stream->strf.vids->compression : stream->strh->fcc_handler;
+ if (!gst_avi_demux_is_uncompressed (fourcc)) {
+ return buf; /* Ignore non DIB buffers */
+ }
+
+ /* raw rgb data is stored topdown, but instead of inverting the buffer, */
+ /* some tools just negate the height field in the header (e.g. ffmpeg) */
+ if (((gint32) stream->strf.vids->height) < 0)
+ return buf;
+
+ h = stream->strf.vids->height;
+ w = stream->strf.vids->width;
+ bpp = stream->strf.vids->bit_cnt ? stream->strf.vids->bit_cnt : 8;
+ stride = GST_ROUND_UP_4 (w * (bpp / 8));
+
+ buf = gst_buffer_make_writable (buf);
+
+ gst_buffer_map (buf, &map, GST_MAP_READWRITE);
+ if (map.size < (stride * h)) {
+ GST_WARNING ("Buffer is smaller than reported Width x Height x Depth");
+ gst_buffer_unmap (buf, &map);
+ return buf;
+ }
+
+ tmp = g_malloc (stride);
+
+ for (y = 0; y < h / 2; y++) {
+ swap_line (map.data + stride * y, map.data + stride * (h - 1 - y), tmp,
+ stride);
+ }
+
+ g_free (tmp);
+
+ gst_buffer_unmap (buf, &map);
+
+ /* append palette to paletted RGB8 buffer data */
+ if (stream->rgb8_palette != NULL)
+ buf = gst_buffer_append (buf, gst_buffer_ref (stream->rgb8_palette));
+
+ return buf;
+ }
+
+ #if 0
+ static void
+ gst_avi_demux_add_assoc (GstAviDemux * avi, GstAviStream * stream,
+ GstClockTime timestamp, guint64 offset, gboolean keyframe)
+ {
+ /* do not add indefinitely for open-ended streaming */
+ if (G_UNLIKELY (avi->element_index && avi->seekable)) {
+ GST_LOG_OBJECT (avi, "adding association %" GST_TIME_FORMAT "-> %"
+ G_GUINT64_FORMAT, GST_TIME_ARGS (timestamp), offset);
+ gst_index_add_association (avi->element_index, avi->index_id,
+ keyframe ? GST_ASSOCIATION_FLAG_KEY_UNIT :
+ GST_ASSOCIATION_FLAG_DELTA_UNIT, GST_FORMAT_TIME, timestamp,
+ GST_FORMAT_BYTES, offset, NULL);
+ /* current_entry is DEFAULT (frame #) */
+ gst_index_add_association (avi->element_index, stream->index_id,
+ keyframe ? GST_ASSOCIATION_FLAG_KEY_UNIT :
+ GST_ASSOCIATION_FLAG_DELTA_UNIT, GST_FORMAT_TIME, timestamp,
+ GST_FORMAT_BYTES, offset, GST_FORMAT_DEFAULT, stream->current_entry,
+ NULL);
+ }
+ }
+ #endif
+
+ /*
+ * Returns the aggregated GstFlowReturn.
+ */
+ static GstFlowReturn
+ gst_avi_demux_combine_flows (GstAviDemux * avi, GstAviStream * stream,
+ GstFlowReturn ret)
+ {
+ GST_LOG_OBJECT (avi, "Stream %s:%s flow return: %s",
+ GST_DEBUG_PAD_NAME (stream->pad), gst_flow_get_name (ret));
+ ret = gst_flow_combiner_update_pad_flow (avi->flowcombiner, stream->pad, ret);
+ GST_LOG_OBJECT (avi, "combined to return %s", gst_flow_get_name (ret));
+
+ return ret;
+ }
+
+ /* move @stream to the next position in its index */
+ static GstFlowReturn
+ gst_avi_demux_advance (GstAviDemux * avi, GstAviStream * stream,
+ GstFlowReturn ret)
+ {
+ guint old_entry, new_entry;
+
+ old_entry = stream->current_entry;
+ /* move forwards */
+ new_entry = old_entry + 1;
+
+ /* see if we reached the end */
+ if (new_entry >= stream->stop_entry) {
+ if (avi->segment.rate < 0.0) {
+ if (stream->step_entry == stream->start_entry) {
+ /* we stepped all the way to the start, eos */
+ GST_DEBUG_OBJECT (avi, "reverse reached start %u", stream->start_entry);
+ goto eos;
+ }
+ /* backwards, stop becomes step, find a new step */
+ stream->stop_entry = stream->step_entry;
+ stream->step_entry = gst_avi_demux_index_prev (avi, stream,
+ stream->stop_entry, TRUE);
+
+ GST_DEBUG_OBJECT (avi,
+ "reverse playback jump: start %u, step %u, stop %u",
+ stream->start_entry, stream->step_entry, stream->stop_entry);
+
+ /* and start from the previous keyframe now */
+ new_entry = stream->step_entry;
+ } else {
+ /* EOS */
+ GST_DEBUG_OBJECT (avi, "forward reached stop %u", stream->stop_entry);
+ goto eos;
+ }
+ }
+
+ if (new_entry != old_entry) {
+ stream->current_entry = new_entry;
+ stream->current_total = stream->index[new_entry].total;
+
+ if (new_entry == old_entry + 1) {
+ GST_DEBUG_OBJECT (avi, "moved forwards from %u to %u",
+ old_entry, new_entry);
+ /* we simply moved one step forwards, reuse current info */
+ stream->current_timestamp = stream->current_ts_end;
+ stream->current_offset = stream->current_offset_end;
+ gst_avi_demux_get_buffer_info (avi, stream, new_entry,
+ NULL, &stream->current_ts_end, NULL, &stream->current_offset_end);
+ } else {
+ /* we moved DISCONT, full update */
+ gst_avi_demux_get_buffer_info (avi, stream, new_entry,
+ &stream->current_timestamp, &stream->current_ts_end,
+ &stream->current_offset, &stream->current_offset_end);
+ /* and MARK discont for this stream */
+ stream->discont = TRUE;
+ GST_DEBUG_OBJECT (avi, "Moved from %u to %u, ts %" GST_TIME_FORMAT
+ ", ts_end %" GST_TIME_FORMAT ", off %" G_GUINT64_FORMAT
+ ", off_end %" G_GUINT64_FORMAT, old_entry, new_entry,
+ GST_TIME_ARGS (stream->current_timestamp),
+ GST_TIME_ARGS (stream->current_ts_end), stream->current_offset,
+ stream->current_offset_end);
+ }
+ }
+ return ret;
+
+ /* ERROR */
+ eos:
+ {
+ GST_DEBUG_OBJECT (avi, "we are EOS");
+ /* setting current_timestamp to -1 marks EOS */
+ stream->current_timestamp = -1;
+ return GST_FLOW_EOS;
+ }
+ }
+
+ /* find the stream with the lowest current position when going forwards or with
+ * the highest position when going backwards, this is the stream
+ * we should push from next */
+ static gint
+ gst_avi_demux_find_next (GstAviDemux * avi, gfloat rate)
+ {
+ guint64 min_time, max_time;
+ guint stream_num, i;
+
+ max_time = 0;
+ min_time = G_MAXUINT64;
+ stream_num = -1;
+
+ for (i = 0; i < avi->num_streams; i++) {
+ guint64 position;
+ GstAviStream *stream;
+
+ stream = &avi->stream[i];
+
+ /* ignore streams that finished */
+ if (stream->pad && GST_PAD_LAST_FLOW_RETURN (stream->pad) == GST_FLOW_EOS)
+ continue;
+
+ position = stream->current_timestamp;
+
+ /* position of -1 is EOS */
+ if (position != -1) {
+ if (rate > 0.0 && position < min_time) {
+ min_time = position;
+ stream_num = i;
+ } else if (rate < 0.0 && position >= max_time) {
+ max_time = position;
+ stream_num = i;
+ }
+ }
+ }
+ return stream_num;
+ }
+
+ static GstBuffer *
+ gst_avi_demux_align_buffer (GstAviDemux * demux,
+ GstBuffer * buffer, gsize alignment)
+ {
+ GstMapInfo map;
+
+ gst_buffer_map (buffer, &map, GST_MAP_READ);
+
+ if (map.size < sizeof (guintptr)) {
+ gst_buffer_unmap (buffer, &map);
+ return buffer;
+ }
+
+ if (((guintptr) map.data) & (alignment - 1)) {
+ GstBuffer *new_buffer;
+ GstAllocationParams params = { 0, alignment - 1, 0, 0, };
+
+ new_buffer = gst_buffer_new_allocate (NULL,
+ gst_buffer_get_size (buffer), ¶ms);
+
+ /* Copy data "by hand", so ensure alignment is kept: */
+ gst_buffer_fill (new_buffer, 0, map.data, map.size);
+
+ gst_buffer_copy_into (new_buffer, buffer, GST_BUFFER_COPY_METADATA, 0, -1);
+ GST_DEBUG_OBJECT (demux,
+ "We want output aligned on %" G_GSIZE_FORMAT ", reallocated",
+ alignment);
+
+ gst_buffer_unmap (buffer, &map);
+ gst_buffer_unref (buffer);
+
+ return new_buffer;
+ }
+
+ gst_buffer_unmap (buffer, &map);
+ return buffer;
+ }
+
+ static GstFlowReturn
+ gst_avi_demux_loop_data (GstAviDemux * avi)
+ {
+ GstFlowReturn ret = GST_FLOW_OK;
+ guint stream_num;
+ GstAviStream *stream;
+ gboolean processed = FALSE;
+ GstBuffer *buf;
+ guint64 offset, size;
+ GstClockTime timestamp, duration;
+ guint64 out_offset, out_offset_end;
+ gboolean keyframe;
+ GstAviIndexEntry *entry;
+
+ do {
+ stream_num = gst_avi_demux_find_next (avi, avi->segment.rate);
+
+ /* all are EOS */
+ if (G_UNLIKELY (stream_num == -1)) {
+ GST_DEBUG_OBJECT (avi, "all streams are EOS");
+ goto eos;
+ }
+
+ /* we have the stream now */
+ stream = &avi->stream[stream_num];
+
+ /* skip streams without pads */
+ if (!stream->pad) {
+ GST_DEBUG_OBJECT (avi, "skipping entry from stream %d without pad",
+ stream_num);
+ goto next;
+ }
+
+ /* get the timing info for the entry */
+ timestamp = stream->current_timestamp;
+ duration = stream->current_ts_end - timestamp;
+ out_offset = stream->current_offset;
+ out_offset_end = stream->current_offset_end;
+
+ /* get the entry data info */
+ entry = &stream->index[stream->current_entry];
+ offset = entry->offset;
+ size = entry->size;
+ keyframe = ENTRY_IS_KEYFRAME (entry);
+
+ /* skip empty entries */
+ if (size == 0) {
+ GST_DEBUG_OBJECT (avi, "Skipping entry %u (%" G_GUINT64_FORMAT ", %p)",
+ stream->current_entry, size, stream->pad);
+ goto next;
+ }
+
+ if (avi->segment.rate > 0.0) {
+ /* only check this for forwards playback for now */
+ if (keyframe && GST_CLOCK_TIME_IS_VALID (avi->segment.stop)
+ && (timestamp > avi->segment.stop)) {
+ goto eos_stop;
+ }
+ } else {
+ if (keyframe && GST_CLOCK_TIME_IS_VALID (avi->segment.start)
+ && (timestamp < avi->segment.start))
+ goto eos_stop;
+ }
+
+ GST_LOG ("reading buffer (size=%" G_GUINT64_FORMAT "), stream %d, pos %"
+ G_GUINT64_FORMAT " (0x%" G_GINT64_MODIFIER "x), kf %d", size,
+ stream_num, offset, offset, keyframe);
+
+ /* FIXME, check large chunks and cut them up */
+
+ /* pull in the data */
+ buf = NULL;
+ ret = gst_pad_pull_range (avi->sinkpad, offset, size, &buf);
+ if (ret != GST_FLOW_OK)
+ goto pull_failed;
+
+ /* check for short buffers, this is EOS as well */
+ if (gst_buffer_get_size (buf) < size)
+ goto short_buffer;
+
+ /* invert the picture if needed, and append palette for RGB8P */
+ buf = gst_avi_demux_invert (stream, buf);
+
+ /* mark non-keyframes */
+ if (keyframe || stream->is_raw) {
+ GST_BUFFER_FLAG_UNSET (buf, GST_BUFFER_FLAG_DELTA_UNIT);
+ GST_BUFFER_PTS (buf) = timestamp;
+ } else {
+ GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_DELTA_UNIT);
+ GST_BUFFER_PTS (buf) = GST_CLOCK_TIME_NONE;
+ }
+
+ GST_BUFFER_DTS (buf) = timestamp;
+
+ GST_BUFFER_DURATION (buf) = duration;
+ GST_BUFFER_OFFSET (buf) = out_offset;
+ GST_BUFFER_OFFSET_END (buf) = out_offset_end;
+
+ /* mark discont when pending */
+ if (stream->discont) {
+ GST_DEBUG_OBJECT (avi, "setting DISCONT flag");
+ GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_DISCONT);
+ stream->discont = FALSE;
+ } else {
+ GST_BUFFER_FLAG_UNSET (buf, GST_BUFFER_FLAG_DISCONT);
+ }
+ #if 0
+ gst_avi_demux_add_assoc (avi, stream, timestamp, offset, keyframe);
+ #endif
+
+ /* update current position in the segment */
+ avi->segment.position = timestamp;
+
+ GST_DEBUG_OBJECT (avi, "Pushing buffer of size %" G_GSIZE_FORMAT ", ts %"
+ GST_TIME_FORMAT ", dur %" GST_TIME_FORMAT ", off %" G_GUINT64_FORMAT
+ ", off_end %" G_GUINT64_FORMAT,
+ gst_buffer_get_size (buf), GST_TIME_ARGS (timestamp),
+ GST_TIME_ARGS (duration), out_offset, out_offset_end);
+
+ if (stream->alignment > 1)
+ buf = gst_avi_demux_align_buffer (avi, buf, stream->alignment);
+ ret = gst_pad_push (stream->pad, buf);
+
+ /* mark as processed, we increment the frame and byte counters then
+ * leave the while loop and return the GstFlowReturn */
+ processed = TRUE;
+
+ if (avi->segment.rate < 0) {
+ if (timestamp > avi->segment.stop && ret == GST_FLOW_EOS) {
+ /* In reverse playback we can get a GST_FLOW_EOS when
+ * we are at the end of the segment, so we just need to jump
+ * back to the previous section. */
+ GST_DEBUG_OBJECT (avi, "downstream has reached end of segment");
+ ret = GST_FLOW_OK;
+ }
+ }
+ next:
+ /* move to next item */
+ ret = gst_avi_demux_advance (avi, stream, ret);
+
+ /* combine flows */
+ ret = gst_avi_demux_combine_flows (avi, stream, ret);
+ } while (!processed);
+
+ beach:
+ return ret;
+
+ /* special cases */
+ eos:
+ {
+ GST_DEBUG_OBJECT (avi, "No samples left for any streams - EOS");
+ ret = GST_FLOW_EOS;
+ goto beach;
+ }
+ eos_stop:
+ {
+ GST_LOG_OBJECT (avi, "Found keyframe after segment,"
+ " setting EOS (%" GST_TIME_FORMAT " > %" GST_TIME_FORMAT ")",
+ GST_TIME_ARGS (timestamp), GST_TIME_ARGS (avi->segment.stop));
+ ret = GST_FLOW_EOS;
+ /* move to next stream */
+ goto next;
+ }
+ pull_failed:
+ {
+ GST_DEBUG_OBJECT (avi, "pull range failed: pos=%" G_GUINT64_FORMAT
+ " size=%" G_GUINT64_FORMAT, offset, size);
+ goto beach;
+ }
+ short_buffer:
+ {
+ GST_WARNING_OBJECT (avi, "Short read at offset %" G_GUINT64_FORMAT
+ ", only got %" G_GSIZE_FORMAT "/%" G_GUINT64_FORMAT
+ " bytes (truncated file?)", offset, gst_buffer_get_size (buf), size);
+ gst_buffer_unref (buf);
+ ret = GST_FLOW_EOS;
+ goto beach;
+ }
+ }
+
+ /*
+ * Read data. If we have an index it delegates to
+ * gst_avi_demux_process_next_entry().
+ */
+ static GstFlowReturn
+ gst_avi_demux_stream_data (GstAviDemux * avi)
+ {
+ guint32 tag = 0;
+ guint32 size = 0;
+ gint stream_nr = 0;
+ GstFlowReturn res = GST_FLOW_OK;
+
+ if (G_UNLIKELY (avi->have_eos)) {
+ /* Clean adapter, we're done */
+ gst_adapter_clear (avi->adapter);
+ return GST_FLOW_EOS;
+ }
+
+ if (G_UNLIKELY (avi->todrop)) {
+ guint drop;
+
+ if ((drop = gst_adapter_available (avi->adapter))) {
+ if (drop > avi->todrop)
+ drop = avi->todrop;
+ GST_DEBUG_OBJECT (avi, "Dropping %d bytes", drop);
+ gst_adapter_flush (avi->adapter, drop);
+ avi->todrop -= drop;
+ avi->offset += drop;
+ }
+ }
+
+ /* Iterate until need more data, so adapter won't grow too much */
+ while (1) {
+ if (G_UNLIKELY (!gst_avi_demux_peek_chunk_info (avi, &tag, &size))) {
+ return GST_FLOW_OK;
+ }
+
+ GST_DEBUG ("Trying chunk (%" GST_FOURCC_FORMAT "), size %d",
+ GST_FOURCC_ARGS (tag), size);
+
+ if (G_LIKELY ((tag & 0xff) >= '0' && (tag & 0xff) <= '9' &&
+ ((tag >> 8) & 0xff) >= '0' && ((tag >> 8) & 0xff) <= '9')) {
+ GST_LOG ("Chunk ok");
+ } else if ((tag & 0xffff) == (('x' << 8) | 'i')) {
+ GST_DEBUG ("Found sub-index tag");
+ if (gst_avi_demux_peek_chunk (avi, &tag, &size) || size == 0) {
+ /* accept 0 size buffer here */
+ avi->abort_buffering = FALSE;
+ GST_DEBUG (" skipping %d bytes for now", size);
+ gst_adapter_flush (avi->adapter, 8 + GST_ROUND_UP_2 (size));
+ }
+ return GST_FLOW_OK;
+ } else if (tag == GST_RIFF_TAG_RIFF) {
+ /* RIFF tags can appear in ODML files, just jump over them */
+ if (gst_adapter_available (avi->adapter) >= 12) {
+ GST_DEBUG ("Found RIFF tag, skipping RIFF header");
+ gst_adapter_flush (avi->adapter, 12);
+ continue;
+ }
+ return GST_FLOW_OK;
+ } else if (tag == GST_RIFF_TAG_idx1) {
+ GST_DEBUG ("Found index tag");
+ if (gst_avi_demux_peek_chunk (avi, &tag, &size) || size == 0) {
+ /* accept 0 size buffer here */
+ avi->abort_buffering = FALSE;
+ GST_DEBUG (" skipping %d bytes for now", size);
+ gst_adapter_flush (avi->adapter, 8 + GST_ROUND_UP_2 (size));
+ }
+ return GST_FLOW_OK;
+ } else if (tag == GST_RIFF_TAG_LIST) {
+ /* movi chunks might be grouped in rec list */
+ if (gst_adapter_available (avi->adapter) >= 12) {
+ GST_DEBUG ("Found LIST tag, skipping LIST header");
+ gst_adapter_flush (avi->adapter, 12);
+ continue;
+ }
+ return GST_FLOW_OK;
+ } else if (tag == GST_RIFF_TAG_JUNK || tag == GST_RIFF_TAG_JUNQ) {
+ /* rec list might contain JUNK chunks */
+ GST_DEBUG ("Found JUNK tag");
+ if (gst_avi_demux_peek_chunk (avi, &tag, &size) || size == 0) {
+ /* accept 0 size buffer here */
+ avi->abort_buffering = FALSE;
+ GST_DEBUG (" skipping %d bytes for now", size);
+ gst_adapter_flush (avi->adapter, 8 + GST_ROUND_UP_2 (size));
+ }
+ return GST_FLOW_OK;
+ } else {
+ GST_DEBUG ("No more stream chunks, send EOS");
+ avi->have_eos = TRUE;
+ return GST_FLOW_EOS;
+ }
+
+ if (G_UNLIKELY (!gst_avi_demux_peek_chunk (avi, &tag, &size))) {
+ /* supposedly one hopes to catch a nicer chunk later on ... */
+ /* FIXME ?? give up here rather than possibly ending up going
+ * through the whole file */
+ if (avi->abort_buffering) {
+ avi->abort_buffering = FALSE;
+ if (size) {
+ gst_adapter_flush (avi->adapter, 8);
+ return GST_FLOW_OK;
+ }
+ } else {
+ return GST_FLOW_OK;
+ }
+ }
+ GST_DEBUG ("chunk ID %" GST_FOURCC_FORMAT ", size %u",
+ GST_FOURCC_ARGS (tag), size);
+
+ stream_nr = CHUNKID_TO_STREAMNR (tag);
+
+ if (G_UNLIKELY (stream_nr < 0 || stream_nr >= avi->num_streams)) {
+ /* recoverable */
+ GST_WARNING ("Invalid stream ID %d (%" GST_FOURCC_FORMAT ")",
+ stream_nr, GST_FOURCC_ARGS (tag));
+ avi->offset += 8 + GST_ROUND_UP_2 (size);
+ gst_adapter_flush (avi->adapter, 8 + GST_ROUND_UP_2 (size));
+ } else {
+ GstAviStream *stream;
+ GstClockTime next_ts = 0;
+ GstBuffer *buf = NULL;
+ #if 0
+ guint64 offset;
+ #endif
+ gboolean saw_desired_kf = stream_nr != avi->main_stream
+ || avi->offset >= avi->seek_kf_offset;
+
+ if (stream_nr == avi->main_stream && avi->offset == avi->seek_kf_offset) {
+ GST_DEBUG_OBJECT (avi, "Desired keyframe reached");
+ avi->seek_kf_offset = 0;
+ }
+
+ if (saw_desired_kf) {
+ gst_adapter_flush (avi->adapter, 8);
+ /* get buffer */
+ if (size) {
+ buf = gst_adapter_take_buffer (avi->adapter, GST_ROUND_UP_2 (size));
+ /* patch the size */
+ gst_buffer_resize (buf, 0, size);
+ } else {
+ buf = NULL;
+ }
+ } else {
+ GST_DEBUG_OBJECT (avi,
+ "Desired keyframe not yet reached, flushing chunk");
+ gst_adapter_flush (avi->adapter, 8 + GST_ROUND_UP_2 (size));
+ }
+
+ #if 0
+ offset = avi->offset;
+ #endif
+ avi->offset += 8 + GST_ROUND_UP_2 (size);
+
+ stream = &avi->stream[stream_nr];
+
+ /* set delay (if any)
+ if (stream->strh->init_frames == stream->current_frame &&
+ stream->delay == 0)
+ stream->delay = next_ts;
+ */
+
+ /* parsing of corresponding header may have failed */
+ if (G_UNLIKELY (!stream->pad)) {
+ GST_WARNING_OBJECT (avi, "no pad for stream ID %" GST_FOURCC_FORMAT,
+ GST_FOURCC_ARGS (tag));
+ if (buf)
+ gst_buffer_unref (buf);
+ } else {
+ /* get time of this buffer */
+ gst_pad_query_position (stream->pad, GST_FORMAT_TIME,
+ (gint64 *) & next_ts);
+
+ #if 0
+ gst_avi_demux_add_assoc (avi, stream, next_ts, offset, FALSE);
+ #endif
+
+ /* increment our positions */
+ stream->current_entry++;
+ /* as in pull mode, 'total' is either bytes (CBR) or frames (VBR) */
+ if (stream->strh->type == GST_RIFF_FCC_auds && stream->is_vbr) {
+ gint blockalign = stream->strf.auds->blockalign;
+ if (blockalign > 0)
+ stream->current_total += DIV_ROUND_UP (size, blockalign);
+ else
+ stream->current_total++;
+ } else {
+ stream->current_total += size;
+ }
+ GST_LOG_OBJECT (avi, "current entry %u, total %u",
+ stream->current_entry, stream->current_total);
+
+ /* update current position in the segment */
+ avi->segment.position = next_ts;
+
+ if (saw_desired_kf && buf) {
+ GstClockTime dur_ts = 0;
+
+ /* invert the picture if needed, and append palette for RGB8P */
+ buf = gst_avi_demux_invert (stream, buf);
+
+ gst_pad_query_position (stream->pad, GST_FORMAT_TIME,
+ (gint64 *) & dur_ts);
+
+ GST_BUFFER_DTS (buf) = next_ts;
+ GST_BUFFER_PTS (buf) = GST_CLOCK_TIME_NONE;
+ GST_BUFFER_DURATION (buf) = dur_ts - next_ts;
+ if (stream->strh->type == GST_RIFF_FCC_vids) {
+ GST_BUFFER_OFFSET (buf) = stream->current_entry - 1;
+ GST_BUFFER_OFFSET_END (buf) = stream->current_entry;
+ } else {
+ GST_BUFFER_OFFSET (buf) = GST_BUFFER_OFFSET_NONE;
+ GST_BUFFER_OFFSET_END (buf) = GST_BUFFER_OFFSET_NONE;
+ }
+
+ GST_DEBUG_OBJECT (avi,
+ "Pushing buffer with time=%" GST_TIME_FORMAT ", duration %"
+ GST_TIME_FORMAT ", offset %" G_GUINT64_FORMAT
+ " and size %d over pad %s", GST_TIME_ARGS (next_ts),
+ GST_TIME_ARGS (GST_BUFFER_DURATION (buf)),
+ GST_BUFFER_OFFSET (buf), size, GST_PAD_NAME (stream->pad));
+
+ /* mark discont when pending */
+ if (G_UNLIKELY (stream->discont)) {
+ GST_DEBUG_OBJECT (avi, "Setting DISCONT");
+ GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_DISCONT);
+ stream->discont = FALSE;
+ } else {
+ GST_BUFFER_FLAG_UNSET (buf, GST_BUFFER_FLAG_DISCONT);
+ }
+
+ if (stream->alignment > 1)
+ buf = gst_avi_demux_align_buffer (avi, buf, stream->alignment);
+ res = gst_pad_push (stream->pad, buf);
+ buf = NULL;
+
+ /* combine flows */
+ res = gst_avi_demux_combine_flows (avi, stream, res);
+ if (G_UNLIKELY (res != GST_FLOW_OK)) {
+ GST_DEBUG ("Push failed; %s", gst_flow_get_name (res));
+ return res;
+ }
+ }
+ }
+ }
+ }
+
+ return res;
+ }
+
+ /*
+ * Send pending tags.
+ */
+ static void
+ push_tag_lists (GstAviDemux * avi)
+ {
+ guint i;
+ GstTagList *tags;
+
+ if (!avi->got_tags)
+ return;
+
+ GST_DEBUG_OBJECT (avi, "Pushing pending tag lists");
+
+ for (i = 0; i < avi->num_streams; i++) {
+ GstAviStream *stream = &avi->stream[i];
+ GstPad *pad = stream->pad;
+
+ tags = stream->taglist;
+
+ if (pad && tags) {
+ GST_DEBUG_OBJECT (pad, "Tags: %" GST_PTR_FORMAT, tags);
+
+ gst_pad_push_event (pad, gst_event_new_tag (tags));
+ stream->taglist = NULL;
+ }
+ }
+
+ if (!(tags = avi->globaltags))
+ tags = gst_tag_list_new_empty ();
+
+ gst_tag_list_add (tags, GST_TAG_MERGE_REPLACE,
+ GST_TAG_CONTAINER_FORMAT, "AVI", NULL);
+
+ GST_DEBUG_OBJECT (avi, "Global tags: %" GST_PTR_FORMAT, tags);
+ gst_tag_list_set_scope (tags, GST_TAG_SCOPE_GLOBAL);
+ gst_avi_demux_push_event (avi, gst_event_new_tag (tags));
+ avi->globaltags = NULL;
+ avi->got_tags = FALSE;
+ }
+
+ static void
+ gst_avi_demux_loop (GstPad * pad)
+ {
+ GstFlowReturn res;
+ GstAviDemux *avi = GST_AVI_DEMUX (GST_PAD_PARENT (pad));
+
+ switch (avi->state) {
+ case GST_AVI_DEMUX_START:
+ res = gst_avi_demux_stream_init_pull (avi);
+ if (G_UNLIKELY (res != GST_FLOW_OK)) {
+ GST_WARNING ("stream_init flow: %s", gst_flow_get_name (res));
+ goto pause;
+ }
+ avi->state = GST_AVI_DEMUX_HEADER;
+ /* fall-through */
+ case GST_AVI_DEMUX_HEADER:
+ res = gst_avi_demux_stream_header_pull (avi);
+ if (G_UNLIKELY (res != GST_FLOW_OK)) {
+ GST_WARNING ("stream_header flow: %s", gst_flow_get_name (res));
+ goto pause;
+ }
+ avi->state = GST_AVI_DEMUX_MOVI;
+ break;
+ case GST_AVI_DEMUX_MOVI:
+ if (G_UNLIKELY (avi->seg_event)) {
+ gst_avi_demux_push_event (avi, avi->seg_event);
+ avi->seg_event = NULL;
+ }
+ if (G_UNLIKELY (avi->got_tags)) {
+ push_tag_lists (avi);
+ }
+ /* process each index entry in turn */
+ res = gst_avi_demux_loop_data (avi);
+
+ /* pause when error */
+ if (G_UNLIKELY (res != GST_FLOW_OK)) {
+ GST_INFO ("stream_movi flow: %s", gst_flow_get_name (res));
+ goto pause;
+ }
+ break;
+ default:
+ GST_ERROR_OBJECT (avi, "unknown state %d", avi->state);
+ res = GST_FLOW_ERROR;
+ goto pause;
+ }
+
+ return;
+
+ /* ERRORS */
+ pause:{
+
+ gboolean push_eos = FALSE;
+ GST_LOG_OBJECT (avi, "pausing task, reason %s", gst_flow_get_name (res));
+ gst_pad_pause_task (avi->sinkpad);
+
+ if (res == GST_FLOW_EOS) {
+ /* handle end-of-stream/segment */
+ /* so align our position with the end of it, if there is one
+ * this ensures a subsequent will arrive at correct base/acc time */
+ if (avi->segment.rate > 0.0 &&
+ GST_CLOCK_TIME_IS_VALID (avi->segment.stop))
+ avi->segment.position = avi->segment.stop;
+ else if (avi->segment.rate < 0.0)
+ avi->segment.position = avi->segment.start;
+ if (avi->segment.flags & GST_SEEK_FLAG_SEGMENT) {
+ gint64 stop;
+ GstEvent *event;
+ GstMessage *msg;
+
+ if ((stop = avi->segment.stop) == -1)
+ stop = avi->segment.duration;
+
+ GST_INFO_OBJECT (avi, "sending segment_done");
+
+ msg =
+ gst_message_new_segment_done (GST_OBJECT_CAST (avi),
+ GST_FORMAT_TIME, stop);
+ if (avi->segment_seqnum)
+ gst_message_set_seqnum (msg, avi->segment_seqnum);
+ gst_element_post_message (GST_ELEMENT_CAST (avi), msg);
+
+ event = gst_event_new_segment_done (GST_FORMAT_TIME, stop);
+ if (avi->segment_seqnum)
+ gst_event_set_seqnum (event, avi->segment_seqnum);
+ gst_avi_demux_push_event (avi, event);
+ } else {
+ push_eos = TRUE;
+ }
+ } else if (res == GST_FLOW_NOT_LINKED || res < GST_FLOW_EOS) {
+ /* for fatal errors we post an error message, wrong-state is
+ * not fatal because it happens due to flushes and only means
+ * that we should stop now. */
+ GST_ELEMENT_FLOW_ERROR (avi, res);
+ push_eos = TRUE;
+ }
+ if (push_eos) {
+ GstEvent *event;
+
+ GST_INFO_OBJECT (avi, "sending eos");
+ event = gst_event_new_eos ();
+ if (avi->segment_seqnum)
+ gst_event_set_seqnum (event, avi->segment_seqnum);
+ if (!gst_avi_demux_push_event (avi, event) && (res == GST_FLOW_EOS)) {
+ GST_ELEMENT_ERROR (avi, STREAM, DEMUX,
+ (NULL), ("got eos but no streams (yet)"));
+ }
+ }
+ }
+ }
+
+
+ static GstFlowReturn
+ gst_avi_demux_chain (GstPad * pad, GstObject * parent, GstBuffer * buf)
+ {
+ GstFlowReturn res;
+ GstAviDemux *avi = GST_AVI_DEMUX (parent);
+ gint i;
+
+ if (GST_BUFFER_IS_DISCONT (buf)) {
+ GST_DEBUG_OBJECT (avi, "got DISCONT");
+ gst_adapter_clear (avi->adapter);
+ /* mark all streams DISCONT */
+ for (i = 0; i < avi->num_streams; i++)
+ avi->stream[i].discont = TRUE;
+ }
+
+ GST_DEBUG ("Store %" G_GSIZE_FORMAT " bytes in adapter",
+ gst_buffer_get_size (buf));
+ gst_adapter_push (avi->adapter, buf);
+
+ switch (avi->state) {
+ case GST_AVI_DEMUX_START:
+ if ((res = gst_avi_demux_stream_init_push (avi)) != GST_FLOW_OK) {
+ GST_WARNING ("stream_init flow: %s", gst_flow_get_name (res));
+ break;
+ }
+ break;
+ case GST_AVI_DEMUX_HEADER:
+ if ((res = gst_avi_demux_stream_header_push (avi)) != GST_FLOW_OK) {
+ GST_WARNING ("stream_header flow: %s", gst_flow_get_name (res));
+ break;
+ }
+ break;
+ case GST_AVI_DEMUX_MOVI:
+ if (G_UNLIKELY (avi->seg_event)) {
+ gst_avi_demux_push_event (avi, avi->seg_event);
+ avi->seg_event = NULL;
+ }
+ if (G_UNLIKELY (avi->got_tags)) {
+ push_tag_lists (avi);
+ }
+ res = gst_avi_demux_stream_data (avi);
+ break;
+ case GST_AVI_DEMUX_SEEK:
+ {
+ GstEvent *event;
+
+ res = GST_FLOW_OK;
+
+ /* obtain and parse indexes */
+ if (avi->stream[0].indexes && !gst_avi_demux_read_subindexes_push (avi))
+ /* seek in subindex read function failed */
+ goto index_failed;
+
+ if (!avi->stream[0].indexes && !avi->have_index
+ && avi->avih->flags & GST_RIFF_AVIH_HASINDEX)
+ gst_avi_demux_stream_index_push (avi);
+
+ if (avi->have_index) {
+ /* use the indexes now to construct nice durations */
+ gst_avi_demux_calculate_durations_from_index (avi);
+ } else {
+ /* still parsing indexes */
+ break;
+ }
+
+ GST_OBJECT_LOCK (avi);
+ event = avi->seek_event;
+ avi->seek_event = NULL;
+ GST_OBJECT_UNLOCK (avi);
+
+ /* calculate and perform seek */
+ if (!avi_demux_handle_seek_push (avi, avi->sinkpad, event)) {
+ gst_event_unref (event);
+ goto seek_failed;
+ }
+
+ gst_event_unref (event);
+ avi->state = GST_AVI_DEMUX_MOVI;
+ break;
+ }
+ default:
+ GST_ELEMENT_ERROR (avi, STREAM, FAILED, (NULL),
+ ("Illegal internal state"));
+ res = GST_FLOW_ERROR;
+ break;
+ }
+
+ GST_DEBUG_OBJECT (avi, "state: %d res:%s", avi->state,
+ gst_flow_get_name (res));
+
+ if (G_UNLIKELY (avi->abort_buffering))
+ goto abort_buffering;
+
+ return res;
+
+ /* ERRORS */
+ index_failed:
+ {
+ GST_ELEMENT_ERROR (avi, STREAM, DEMUX, (NULL), ("failed to read indexes"));
+ return GST_FLOW_ERROR;
+ }
+ seek_failed:
+ {
+ GST_ELEMENT_ERROR (avi, STREAM, DEMUX, (NULL), ("push mode seek failed"));
+ return GST_FLOW_ERROR;
+ }
+ abort_buffering:
+ {
+ avi->abort_buffering = FALSE;
+ GST_ELEMENT_ERROR (avi, STREAM, DEMUX, (NULL), ("unhandled buffer size"));
+ return GST_FLOW_ERROR;
+ }
+ }
+
+ static gboolean
+ gst_avi_demux_sink_activate (GstPad * sinkpad, GstObject * parent)
+ {
+ GstQuery *query;
+ gboolean pull_mode;
+
+ query = gst_query_new_scheduling ();
+
+ if (!gst_pad_peer_query (sinkpad, query)) {
+ gst_query_unref (query);
+ goto activate_push;
+ }
+
+ pull_mode = gst_query_has_scheduling_mode_with_flags (query,
+ GST_PAD_MODE_PULL, GST_SCHEDULING_FLAG_SEEKABLE);
+ gst_query_unref (query);
+
+ if (!pull_mode)
+ goto activate_push;
+
+ GST_DEBUG_OBJECT (sinkpad, "activating pull");
+ return gst_pad_activate_mode (sinkpad, GST_PAD_MODE_PULL, TRUE);
+
+ activate_push:
+ {
+ GST_DEBUG_OBJECT (sinkpad, "activating push");
+ return gst_pad_activate_mode (sinkpad, GST_PAD_MODE_PUSH, TRUE);
+ }
+ }
+
+ static gboolean
+ gst_avi_demux_sink_activate_mode (GstPad * sinkpad, GstObject * parent,
+ GstPadMode mode, gboolean active)
+ {
+ gboolean res;
+ GstAviDemux *avi = GST_AVI_DEMUX (parent);
+
+ switch (mode) {
+ case GST_PAD_MODE_PULL:
+ if (active) {
+ avi->streaming = FALSE;
+ res = gst_pad_start_task (sinkpad, (GstTaskFunction) gst_avi_demux_loop,
+ sinkpad, NULL);
+ } else {
+ res = gst_pad_stop_task (sinkpad);
+ }
+ break;
+ case GST_PAD_MODE_PUSH:
+ if (active) {
+ GST_DEBUG ("avi: activating push/chain function");
+ avi->streaming = TRUE;
+ } else {
+ GST_DEBUG ("avi: deactivating push/chain function");
+ }
+ res = TRUE;
+ break;
+ default:
+ res = FALSE;
+ break;
+ }
+ return res;
+ }
+
+ #if 0
+ static void
+ gst_avi_demux_set_index (GstElement * element, GstIndex * index)
+ {
+ GstAviDemux *avi = GST_AVI_DEMUX (element);
+
+ GST_OBJECT_LOCK (avi);
+ if (avi->element_index)
+ gst_object_unref (avi->element_index);
+ if (index) {
+ avi->element_index = gst_object_ref (index);
+ } else {
+ avi->element_index = NULL;
+ }
+ GST_OBJECT_UNLOCK (avi);
+ /* object lock might be taken again */
+ if (index)
+ gst_index_get_writer_id (index, GST_OBJECT_CAST (element), &avi->index_id);
+ GST_DEBUG_OBJECT (avi, "Set index %" GST_PTR_FORMAT, avi->element_index);
+ }
+
+ static GstIndex *
+ gst_avi_demux_get_index (GstElement * element)
+ {
+ GstIndex *result = NULL;
+ GstAviDemux *avi = GST_AVI_DEMUX (element);
+
+ GST_OBJECT_LOCK (avi);
+ if (avi->element_index)
+ result = gst_object_ref (avi->element_index);
+ GST_OBJECT_UNLOCK (avi);
+
+ GST_DEBUG_OBJECT (avi, "Returning index %" GST_PTR_FORMAT, result);
+
+ return result;
+ }
+ #endif
+
+ static GstStateChangeReturn
+ gst_avi_demux_change_state (GstElement * element, GstStateChange transition)
+ {
+ GstStateChangeReturn ret;
+ GstAviDemux *avi = GST_AVI_DEMUX (element);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_READY_TO_PAUSED:
+ avi->streaming = FALSE;
+ gst_segment_init (&avi->segment, GST_FORMAT_TIME);
+ break;
+ default:
+ break;
+ }
+
+ ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
+ if (ret == GST_STATE_CHANGE_FAILURE)
+ goto done;
+
+ switch (transition) {
+ case GST_STATE_CHANGE_PAUSED_TO_READY:
+ avi->have_index = FALSE;
+ gst_avi_demux_reset (avi);
+ break;
+ default:
+ break;
+ }
+
+ done:
+ return ret;
+ }
--- /dev/null
+ /* GStreamer
+ * Copyright (C) <2007> Julien Moutte <julien@moutte.net>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+ /**
+ * SECTION:element-flvdemux
+ * @title: flvdemux
+ *
+ * flvdemux demuxes an FLV file into the different contained streams.
+ *
+ * ## Example launch line
+ * |[
+ * gst-launch-1.0 -v filesrc location=/path/to/flv ! flvdemux ! audioconvert ! autoaudiosink
+ * ]| This pipeline demuxes an FLV file and outputs the contained raw audio streams.
+ *
+ */
+
+ #ifdef HAVE_CONFIG_H
+ #include "config.h"
+ #endif
+
+ #include "gstflvelements.h"
+ #include "gstflvdemux.h"
+ #include "gstflvmux.h"
+
+ #include <string.h>
+ #include <stdio.h>
+ #include <gst/base/gstbytereader.h>
+ #include <gst/base/gstbytewriter.h>
+ #include <gst/pbutils/descriptions.h>
+ #include <gst/pbutils/pbutils.h>
+ #include <gst/audio/audio.h>
+ #include <gst/video/video.h>
+ #include <gst/tag/tag.h>
+
+ /* FIXME: don't rely on own GstIndex */
+ #include "gstindex.c"
+ #include "gstmemindex.c"
+ #define GST_ASSOCIATION_FLAG_NONE GST_INDEX_ASSOCIATION_FLAG_NONE
+ #define GST_ASSOCIATION_FLAG_KEY_UNIT GST_INDEX_ASSOCIATION_FLAG_KEY_UNIT
+ #define GST_ASSOCIATION_FLAG_DELTA_UNIT GST_INDEX_ASSOCIATION_FLAG_DELTA_UNIT
+
+ static GstStaticPadTemplate flv_sink_template = GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("video/x-flv")
+ );
+
+ static GstStaticPadTemplate audio_src_template =
+ GST_STATIC_PAD_TEMPLATE ("audio",
+ GST_PAD_SRC,
+ GST_PAD_SOMETIMES,
+ GST_STATIC_CAPS
+ ("audio/x-adpcm, layout = (string) swf, channels = (int) { 1, 2 }, rate = (int) { 5512, 11025, 22050, 44100 }; "
+ "audio/mpeg, mpegversion = (int) 1, layer = (int) 3, channels = (int) { 1, 2 }, rate = (int) { 5512, 8000, 11025, 22050, 44100 }, parsed = (boolean) TRUE; "
+ "audio/mpeg, mpegversion = (int) 4, stream-format = (string) raw, framed = (boolean) TRUE; "
+ "audio/x-nellymoser, channels = (int) { 1, 2 }, rate = (int) { 5512, 8000, 11025, 16000, 22050, 44100 }; "
+ "audio/x-raw, format = (string) { U8, S16LE }, layout = (string) interleaved, channels = (int) { 1, 2 }, rate = (int) { 5512, 11025, 22050, 44100 }; "
+ "audio/x-alaw, channels = (int) { 1, 2 }, rate = (int) 8000; "
+ "audio/x-mulaw, channels = (int) { 1, 2 }, rate = (int) 8000; "
+ "audio/x-speex, channels = (int) 1, rate = (int) 16000;")
+ );
+
+ static GstStaticPadTemplate video_src_template =
+ GST_STATIC_PAD_TEMPLATE ("video",
+ GST_PAD_SRC,
+ GST_PAD_SOMETIMES,
+ GST_STATIC_CAPS ("video/x-flash-video, flvversion=(int) 1; "
+ "video/x-flash-screen; "
+ "video/x-vp6-flash; " "video/x-vp6-alpha; "
+ "video/x-h264, stream-format=avc;")
+ );
+
+ #define gst_flv_demux_parent_class parent_class
+ G_DEFINE_TYPE (GstFlvDemux, gst_flv_demux, GST_TYPE_ELEMENT);
+ GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (flvdemux, "flvdemux",
+ GST_RANK_PRIMARY, GST_TYPE_FLV_DEMUX, flv_element_init (plugin));
+
+ /* 9 bytes of header + 4 bytes of first previous tag size */
+ #define FLV_HEADER_SIZE 13
+ /* 1 byte of tag type + 3 bytes of tag data size */
+ #define FLV_TAG_TYPE_SIZE 4
+
+ /* two seconds - consider dts are resynced to another base if this different */
+ #define RESYNC_THRESHOLD 2000
+
+ /* how much stream time to wait for audio tags to appear after we have video, or vice versa */
+ #define NO_MORE_PADS_THRESHOLD (6 * GST_SECOND)
+
+ static gboolean flv_demux_handle_seek_push (GstFlvDemux * demux,
+ GstEvent * event);
+ static gboolean gst_flv_demux_handle_seek_pull (GstFlvDemux * demux,
+ GstEvent * event, gboolean seeking);
+
+ static gboolean gst_flv_demux_sink_query (GstPad * pad, GstObject * parent,
+ GstQuery * query);
+ static gboolean gst_flv_demux_query (GstPad * pad, GstObject * parent,
+ GstQuery * query);
+ static gboolean gst_flv_demux_src_event (GstPad * pad, GstObject * parent,
+ GstEvent * event);
+
+ static GstIndex *gst_flv_demux_get_index (GstElement * element);
+
+ static void gst_flv_demux_push_tags (GstFlvDemux * demux);
+
+ static void
+ gst_flv_demux_parse_and_add_index_entry (GstFlvDemux * demux, GstClockTime ts,
+ guint64 pos, gboolean keyframe)
+ {
+ GstIndexAssociation associations[2];
+ GstIndex *index;
+ GstIndexEntry *entry;
+
+ GST_LOG_OBJECT (demux,
+ "adding key=%d association %" GST_TIME_FORMAT "-> %" G_GUINT64_FORMAT,
+ keyframe, GST_TIME_ARGS (ts), pos);
+
+ /* if upstream is not seekable there is no point in building an index */
+ if (!demux->upstream_seekable)
+ return;
+
+ index = gst_flv_demux_get_index (GST_ELEMENT (demux));
+
+ if (!index)
+ return;
+
+ /* entry may already have been added before, avoid adding indefinitely */
+ entry = gst_index_get_assoc_entry (index, demux->index_id,
+ GST_INDEX_LOOKUP_EXACT, GST_ASSOCIATION_FLAG_NONE, GST_FORMAT_BYTES, pos);
+
+ if (entry) {
+ #ifndef GST_DISABLE_GST_DEBUG
+ gint64 time = 0;
+ gboolean key;
+
+ gst_index_entry_assoc_map (entry, GST_FORMAT_TIME, &time);
+ key = ! !(GST_INDEX_ASSOC_FLAGS (entry) & GST_ASSOCIATION_FLAG_KEY_UNIT);
+ GST_LOG_OBJECT (demux, "position already mapped to time %" GST_TIME_FORMAT
+ ", keyframe %d", GST_TIME_ARGS (time), key);
+ /* there is not really a way to delete the existing one */
+ if (time != ts || key != ! !keyframe)
+ GST_DEBUG_OBJECT (demux, "metadata mismatch");
+ #endif
+ gst_object_unref (index);
+ return;
+ }
+
+ associations[0].format = GST_FORMAT_TIME;
+ associations[0].value = ts;
+ associations[1].format = GST_FORMAT_BYTES;
+ associations[1].value = pos;
+
+ gst_index_add_associationv (index, demux->index_id,
+ (keyframe) ? GST_ASSOCIATION_FLAG_KEY_UNIT :
+ GST_ASSOCIATION_FLAG_DELTA_UNIT, 2,
+ (const GstIndexAssociation *) &associations);
+
+ if (pos > demux->index_max_pos)
+ demux->index_max_pos = pos;
+ if (ts > demux->index_max_time)
+ demux->index_max_time = ts;
+
+ gst_object_unref (index);
+ }
+
+ static gchar *
+ FLV_GET_STRING (GstByteReader * reader)
+ {
+ guint16 string_size = 0;
+ gchar *string = NULL;
+ const guint8 *str = NULL;
+
+ g_return_val_if_fail (reader != NULL, NULL);
+
+ if (G_UNLIKELY (!gst_byte_reader_get_uint16_be (reader, &string_size)))
+ return NULL;
+
+ if (G_UNLIKELY (string_size > gst_byte_reader_get_remaining (reader)))
+ return NULL;
+
+ string = g_try_malloc0 (string_size + 1);
+ if (G_UNLIKELY (!string)) {
+ return NULL;
+ }
+
+ if (G_UNLIKELY (!gst_byte_reader_get_data (reader, string_size, &str))) {
+ g_free (string);
+ return NULL;
+ }
+
+ memcpy (string, str, string_size);
+ /* Check utf-8 validity if it's not an empty string */
+ if (string[0] && !g_utf8_validate (string, string_size, NULL)) {
+ g_free (string);
+ return NULL;
+ }
+
+ return string;
+ }
+
+ static void
+ gst_flv_demux_check_seekability (GstFlvDemux * demux)
+ {
+ GstQuery *query;
+ gint64 start = -1, stop = -1;
+
+ demux->upstream_seekable = FALSE;
+
+ query = gst_query_new_seeking (GST_FORMAT_BYTES);
+ if (!gst_pad_peer_query (demux->sinkpad, query)) {
+ GST_DEBUG_OBJECT (demux, "seeking query failed");
+ gst_query_unref (query);
+ return;
+ }
+
+ gst_query_parse_seeking (query, NULL, &demux->upstream_seekable,
+ &start, &stop);
+
+ gst_query_unref (query);
+
+ /* try harder to query upstream size if we didn't get it the first time */
+ if (demux->upstream_seekable && stop == -1) {
+ GST_DEBUG_OBJECT (demux, "doing duration query to fix up unset stop");
+ gst_pad_peer_query_duration (demux->sinkpad, GST_FORMAT_BYTES, &stop);
+ }
+
+ /* if upstream doesn't know the size, it's likely that it's not seekable in
+ * practice even if it technically may be seekable */
+ if (demux->upstream_seekable && (start != 0 || stop <= start)) {
+ GST_DEBUG_OBJECT (demux, "seekable but unknown start/stop -> disable");
+ demux->upstream_seekable = FALSE;
+ }
+
+ GST_DEBUG_OBJECT (demux, "upstream seekable: %d", demux->upstream_seekable);
+ }
+
+ static GstDateTime *
+ parse_flv_demux_parse_date_string (const gchar * s)
+ {
+ static const gchar months[12][4] = {
+ "Jan", "Feb", "Mar", "Apr",
+ "May", "Jun", "Jul", "Aug",
+ "Sep", "Oct", "Nov", "Dec"
+ };
+ GstDateTime *dt = NULL;
+ gchar **tokens;
+ guint64 d;
+ gchar *endptr, *stripped;
+ gint i, hh, mm, ss;
+ gint year = -1, month = -1, day = -1;
+ gint hour = -1, minute = -1, seconds = -1;
+
+ stripped = g_strstrip (g_strdup (s));
+
+ /* "Fri Oct 15 15:13:16 2004" needs to be parsed */
+ tokens = g_strsplit (stripped, " ", -1);
+
+ g_free (stripped);
+
+ if (g_strv_length (tokens) != 5)
+ goto out;
+
+ /* year */
+ d = g_ascii_strtoull (tokens[4], &endptr, 10);
+ if (d == 0 && *endptr != '\0')
+ goto out;
+
+ year = d;
+
+ /* month */
+ if (strlen (tokens[1]) != 3)
+ goto out;
+ for (i = 0; i < 12; i++) {
+ if (!strcmp (tokens[1], months[i])) {
+ break;
+ }
+ }
+ if (i == 12)
+ goto out;
+
+ month = i + 1;
+
+ /* day */
+ d = g_ascii_strtoull (tokens[2], &endptr, 10);
+ if (d == 0 && *endptr != '\0')
+ goto out;
+
+ day = d;
+
+ /* time */
+ hh = mm = ss = 0;
+ if (sscanf (tokens[3], "%d:%d:%d", &hh, &mm, &ss) < 2)
+ goto out;
+ if (hh >= 0 && hh < 24 && mm >= 0 && mm < 60 && ss >= 0 && ss < 60) {
+ hour = hh;
+ minute = mm;
+ seconds = ss;
+ }
+
+ out:
+
+ if (tokens)
+ g_strfreev (tokens);
+
+ if (year > 0)
+ dt = gst_date_time_new (0.0, year, month, day, hour, minute, seconds);
+
+ return dt;
+ }
+
+ static gboolean
+ gst_flv_demux_parse_metadata_item (GstFlvDemux * demux, GstByteReader * reader,
+ gboolean * end_marker)
+ {
+ gchar *tag_name = NULL;
+ guint8 tag_type = 0;
+
+ /* Initialize the end_marker flag to FALSE */
+ *end_marker = FALSE;
+
+ /* Name of the tag */
+ tag_name = FLV_GET_STRING (reader);
+ if (G_UNLIKELY (!tag_name)) {
+ GST_WARNING_OBJECT (demux, "failed reading tag name");
+ return FALSE;
+ }
+
+ /* What kind of object is that */
+ if (!gst_byte_reader_get_uint8 (reader, &tag_type))
+ goto error;
+
+ GST_DEBUG_OBJECT (demux, "tag name %s, tag type %d", tag_name, tag_type);
+
+ switch (tag_type) {
+ case 0: /* Double */
+ { /* Use a union to read the uint64 and then as a double */
+ gdouble d = 0;
+
+ if (!gst_byte_reader_get_float64_be (reader, &d))
+ goto error;
+
+ GST_DEBUG_OBJECT (demux, "%s => (double) %f", tag_name, d);
+
+ if (!strcmp (tag_name, "duration")) {
+ demux->duration = d * GST_SECOND;
+
+ gst_tag_list_add (demux->taglist, GST_TAG_MERGE_REPLACE,
+ GST_TAG_DURATION, demux->duration, NULL);
+ } else if (!strcmp (tag_name, "AspectRatioX")) {
+ demux->par_x = d;
+ demux->got_par = TRUE;
+ } else if (!strcmp (tag_name, "AspectRatioY")) {
+ demux->par_y = d;
+ demux->got_par = TRUE;
+ } else if (!strcmp (tag_name, "width")) {
+ demux->w = d;
+ } else if (!strcmp (tag_name, "height")) {
+ demux->h = d;
+ } else if (!strcmp (tag_name, "framerate")) {
+ demux->framerate = d;
+ } else if (!strcmp (tag_name, "audiodatarate")) {
+ demux->audio_bitrate = (guint) (d * 1024);
+ gst_tag_list_add (demux->audio_tags, GST_TAG_MERGE_REPLACE,
+ GST_TAG_NOMINAL_BITRATE, demux->audio_bitrate, NULL);
+ } else if (!strcmp (tag_name, "videodatarate")) {
+ demux->video_bitrate = (guint) (d * 1024);
+ gst_tag_list_add (demux->video_tags, GST_TAG_MERGE_REPLACE,
+ GST_TAG_NOMINAL_BITRATE, demux->video_bitrate, NULL);
+ } else {
+ GST_INFO_OBJECT (demux, "Tag \'%s\' not handled", tag_name);
+ }
+
+ break;
+ }
+ case 1: /* Boolean */
+ {
+ guint8 b = 0;
+
+ if (!gst_byte_reader_get_uint8 (reader, &b))
+ goto error;
+
+ GST_DEBUG_OBJECT (demux, "%s => (boolean) %d", tag_name, b);
+
+ GST_INFO_OBJECT (demux, "Tag \'%s\' not handled", tag_name);
+
+ break;
+ }
+ case 2: /* String */
+ {
+ gchar *s = NULL;
+
+ s = FLV_GET_STRING (reader);
+ if (s == NULL)
+ goto error;
+ if (!strcmp (s, "")) {
+ /* Not strictly an error, just an empty string */
+ g_free (s);
+ break;
+ }
+
+ GST_DEBUG_OBJECT (demux, "%s => (string) %s", tag_name, s);
+
+ if (!strcmp (tag_name, "creationdate")) {
+ GstDateTime *dt;
+
+ dt = parse_flv_demux_parse_date_string (s);
+ if (dt == NULL) {
+ GST_DEBUG_OBJECT (demux, "Failed to parse '%s' into datetime", s);
+ } else {
+ gst_tag_list_add (demux->taglist, GST_TAG_MERGE_REPLACE,
+ GST_TAG_DATE_TIME, dt, NULL);
+ gst_date_time_unref (dt);
+ }
+ } else if (!strcmp (tag_name, "creator")) {
+ gst_tag_list_add (demux->taglist, GST_TAG_MERGE_REPLACE,
+ GST_TAG_ARTIST, s, NULL);
+ } else if (!strcmp (tag_name, "title")) {
+ gst_tag_list_add (demux->taglist, GST_TAG_MERGE_REPLACE,
+ GST_TAG_TITLE, s, NULL);
+ } else if (!strcmp (tag_name, "metadatacreator")
+ || !strcmp (tag_name, "encoder")) {
+ gst_tag_list_add (demux->taglist, GST_TAG_MERGE_REPLACE,
+ GST_TAG_ENCODER, s, NULL);
+ } else {
+ GST_INFO_OBJECT (demux, "Tag \'%s\' not handled", tag_name);
+ }
+
+ g_free (s);
+
+ break;
+ }
+ case 3: /* Object */
+ {
+ gboolean end_of_object_marker = FALSE;
+
+ while (!end_of_object_marker) {
+ gboolean ok = gst_flv_demux_parse_metadata_item (demux, reader,
+ &end_of_object_marker);
+
+ if (G_UNLIKELY (!ok)) {
+ GST_WARNING_OBJECT (demux, "failed reading a tag, skipping");
+ goto error;
+ }
+ }
+
+ break;
+ }
+ case 8: /* ECMA array */
+ {
+ guint32 nb_elems = 0;
+ gboolean end_of_object_marker = FALSE;
+
+ if (!gst_byte_reader_get_uint32_be (reader, &nb_elems))
+ goto error;
+
+ GST_DEBUG_OBJECT (demux, "there are approx. %d elements in the array",
+ nb_elems);
+
+ while (!end_of_object_marker) {
+ gboolean ok = gst_flv_demux_parse_metadata_item (demux, reader,
+ &end_of_object_marker);
+
+ if (G_UNLIKELY (!ok)) {
+ GST_WARNING_OBJECT (demux, "failed reading a tag, skipping");
+ goto error;
+ }
+ }
+
+ break;
+ }
+ case 9: /* End marker */
+ {
+ GST_DEBUG_OBJECT (demux, "end marker ?");
+ if (tag_name[0] == '\0') {
+
+ GST_DEBUG_OBJECT (demux, "end marker detected");
+
+ *end_marker = TRUE;
+ }
+
+ break;
+ }
+ case 10: /* Array */
+ {
+ guint32 nb_elems = 0;
+
+ if (!gst_byte_reader_get_uint32_be (reader, &nb_elems))
+ goto error;
+
+ GST_DEBUG_OBJECT (demux, "array has %d elements", nb_elems);
+
+ if (!strcmp (tag_name, "times")) {
+ if (demux->times) {
+ g_array_free (demux->times, TRUE);
+ }
+ demux->times = g_array_new (FALSE, TRUE, sizeof (gdouble));
+ } else if (!strcmp (tag_name, "filepositions")) {
+ if (demux->filepositions) {
+ g_array_free (demux->filepositions, TRUE);
+ }
+ demux->filepositions = g_array_new (FALSE, TRUE, sizeof (gdouble));
+ }
+
+ while (nb_elems--) {
+ guint8 elem_type = 0;
+
+ if (!gst_byte_reader_get_uint8 (reader, &elem_type))
+ goto error;
+
+ switch (elem_type) {
+ case 0:
+ {
+ gdouble d;
+
+ if (!gst_byte_reader_get_float64_be (reader, &d))
+ goto error;
+
+ GST_DEBUG_OBJECT (demux, "element is a double %f", d);
+
+ if (!strcmp (tag_name, "times") && demux->times) {
+ g_array_append_val (demux->times, d);
+ } else if (!strcmp (tag_name, "filepositions") &&
+ demux->filepositions) {
+ g_array_append_val (demux->filepositions, d);
+ }
+ break;
+ }
+ default:
+ GST_WARNING_OBJECT (demux, "unsupported array element type %d",
+ elem_type);
+ }
+ }
+
+ break;
+ }
+ case 11: /* Date */
+ {
+ gdouble d = 0;
+ gint16 i = 0;
+
+ if (!gst_byte_reader_get_float64_be (reader, &d))
+ goto error;
+
+ if (!gst_byte_reader_get_int16_be (reader, &i))
+ goto error;
+
+ GST_DEBUG_OBJECT (demux,
+ "%s => (date as a double) %f, timezone offset %d", tag_name, d, i);
+
+ GST_INFO_OBJECT (demux, "Tag \'%s\' not handled", tag_name);
+
+ break;
+ }
+ default:
+ GST_WARNING_OBJECT (demux, "unsupported tag type %d", tag_type);
+ }
+
+ g_free (tag_name);
+
+ return TRUE;
+
+ error:
+ g_free (tag_name);
+
+ return FALSE;
+ }
+
+ static void
+ gst_flv_demux_clear_tags (GstFlvDemux * demux)
+ {
+ GST_DEBUG_OBJECT (demux, "clearing taglist");
+
+ if (demux->taglist) {
+ gst_tag_list_unref (demux->taglist);
+ }
+ demux->taglist = gst_tag_list_new_empty ();
+ gst_tag_list_set_scope (demux->taglist, GST_TAG_SCOPE_GLOBAL);
+
+ if (demux->audio_tags) {
+ gst_tag_list_unref (demux->audio_tags);
+ }
+ demux->audio_tags = gst_tag_list_new_empty ();
+
+ if (demux->video_tags) {
+ gst_tag_list_unref (demux->video_tags);
+ }
+ demux->video_tags = gst_tag_list_new_empty ();
+ }
+
+ static GstFlowReturn
+ gst_flv_demux_parse_tag_script (GstFlvDemux * demux, GstBuffer * buffer)
+ {
+ GstFlowReturn ret = GST_FLOW_OK;
+ GstByteReader reader;
+ guint8 type = 0;
+ GstMapInfo map;
+
+ g_return_val_if_fail (gst_buffer_get_size (buffer) >= 7, GST_FLOW_ERROR);
+
+ gst_buffer_map (buffer, &map, GST_MAP_READ);
+ gst_byte_reader_init (&reader, map.data, map.size);
+
+ gst_byte_reader_skip_unchecked (&reader, 7);
+
+ GST_LOG_OBJECT (demux, "parsing a script tag");
+
+ if (!gst_byte_reader_get_uint8 (&reader, &type))
+ goto cleanup;
+
+ /* Must be string */
+ if (type == 2) {
+ gchar *function_name;
+ guint i;
+
+ function_name = FLV_GET_STRING (&reader);
+
+ GST_LOG_OBJECT (demux, "function name is %s", GST_STR_NULL (function_name));
+
+ if (function_name != NULL && strcmp (function_name, "onMetaData") == 0) {
+ gboolean end_marker = FALSE;
+ GST_DEBUG_OBJECT (demux, "we have a metadata script object");
+
+ gst_flv_demux_clear_tags (demux);
+
+ if (!gst_byte_reader_get_uint8 (&reader, &type)) {
+ g_free (function_name);
+ goto cleanup;
+ }
+
+ switch (type) {
+ case 8:
+ {
+ guint32 nb_elems = 0;
+
+ /* ECMA array */
+ if (!gst_byte_reader_get_uint32_be (&reader, &nb_elems)) {
+ g_free (function_name);
+ goto cleanup;
+ }
+
+ /* The number of elements is just a hint, some files have
+ nb_elements == 0 and actually contain items. */
+ GST_DEBUG_OBJECT (demux, "there are approx. %d elements in the array",
+ nb_elems);
+ }
+ /* fallthrough to read data */
+ case 3:
+ {
+ /* Object */
+ while (!end_marker) {
+ gboolean ok =
+ gst_flv_demux_parse_metadata_item (demux, &reader, &end_marker);
+
+ if (G_UNLIKELY (!ok)) {
+ GST_WARNING_OBJECT (demux, "failed reading a tag, skipping");
+ break;
+ }
+ }
+ }
+ break;
+ default:
+ GST_DEBUG_OBJECT (demux, "Unhandled script data type : %d", type);
+ g_free (function_name);
+ goto cleanup;
+ }
+
+ gst_flv_demux_push_tags (demux);
+ }
+
+ g_free (function_name);
+
+ if (demux->times && demux->filepositions) {
+ guint num;
+
+ /* If an index was found, insert associations */
+ num = MIN (demux->times->len, demux->filepositions->len);
+ for (i = 0; i < num; i++) {
+ guint64 time, fileposition;
+
+ time = g_array_index (demux->times, gdouble, i) * GST_SECOND;
+ fileposition = g_array_index (demux->filepositions, gdouble, i);
+ gst_flv_demux_parse_and_add_index_entry (demux, time, fileposition,
+ TRUE);
+ }
+ demux->indexed = TRUE;
+ }
+ }
+
+ cleanup:
+ gst_buffer_unmap (buffer, &map);
+
+ return ret;
+ }
+
+ static gboolean
+ have_group_id (GstFlvDemux * demux)
+ {
+ GstEvent *event;
+
+ event = gst_pad_get_sticky_event (demux->sinkpad, GST_EVENT_STREAM_START, 0);
+ if (event) {
+ if (gst_event_parse_group_id (event, &demux->group_id))
+ demux->have_group_id = TRUE;
+ else
+ demux->have_group_id = FALSE;
+ gst_event_unref (event);
+ } else if (!demux->have_group_id) {
+ demux->have_group_id = TRUE;
+ demux->group_id = gst_util_group_id_next ();
+ }
+
+ return demux->have_group_id;
+ }
+
+ static gboolean
+ gst_flv_demux_audio_negotiate (GstFlvDemux * demux, guint32 codec_tag,
+ guint32 rate, guint32 channels, guint32 width)
+ {
+ GstCaps *caps = NULL, *old_caps;
+ gboolean ret = FALSE;
+ guint adjusted_rate = rate;
+ guint adjusted_channels = channels;
+ GstEvent *event;
+ gchar *stream_id;
+
+ switch (codec_tag) {
+ case 1:
+ caps = gst_caps_new_simple ("audio/x-adpcm", "layout", G_TYPE_STRING,
+ "swf", NULL);
+ break;
+ case 2:
+ case 14:
+ caps = gst_caps_new_simple ("audio/mpeg",
+ "mpegversion", G_TYPE_INT, 1, "layer", G_TYPE_INT, 3,
+ "parsed", G_TYPE_BOOLEAN, TRUE, NULL);
+ break;
+ case 0:
+ case 3:
+ {
+ GstAudioFormat format;
+
+ /* Assuming little endian for 0 (aka endianness of the
+ * system on which the file was created) as most people
+ * are probably using little endian machines */
+ format = gst_audio_format_build_integer ((width == 8) ? FALSE : TRUE,
+ G_LITTLE_ENDIAN, width, width);
+
+ caps = gst_caps_new_simple ("audio/x-raw",
+ "format", G_TYPE_STRING, gst_audio_format_to_string (format),
+ "layout", G_TYPE_STRING, "interleaved", NULL);
+ break;
+ }
+ case 4:
+ case 5:
+ case 6:
+ caps = gst_caps_new_empty_simple ("audio/x-nellymoser");
+ break;
+ case 10:
+ {
+ GstMapInfo map;
+ if (!demux->audio_codec_data) {
+ GST_DEBUG_OBJECT (demux, "don't have AAC codec data yet");
+ ret = TRUE;
+ goto done;
+ }
+
+ gst_buffer_map (demux->audio_codec_data, &map, GST_MAP_READ);
+
+ /* use codec-data to extract and verify samplerate */
+ if (map.size >= 2) {
+ gint freq_index;
+
+ freq_index = GST_READ_UINT16_BE (map.data);
+ freq_index = (freq_index & 0x0780) >> 7;
+ adjusted_rate =
+ gst_codec_utils_aac_get_sample_rate_from_index (freq_index);
+
+ if (adjusted_rate && (rate != adjusted_rate)) {
+ GST_LOG_OBJECT (demux, "Ajusting AAC sample rate %d -> %d", rate,
+ adjusted_rate);
+ } else {
+ adjusted_rate = rate;
+ }
+
+ adjusted_channels =
+ gst_codec_utils_aac_get_channels (map.data, map.size);
+
+ if (adjusted_channels && (channels != adjusted_channels)) {
+ GST_LOG_OBJECT (demux, "Ajusting AAC channels %d -> %d", channels,
+ adjusted_channels);
+ } else {
+ adjusted_channels = channels;
+ }
+ }
+ gst_buffer_unmap (demux->audio_codec_data, &map);
+
+ caps = gst_caps_new_simple ("audio/mpeg",
+ "mpegversion", G_TYPE_INT, 4, "framed", G_TYPE_BOOLEAN, TRUE,
+ "stream-format", G_TYPE_STRING, "raw", NULL);
+ break;
+ }
+ case 7:
+ caps = gst_caps_new_empty_simple ("audio/x-alaw");
+ break;
+ case 8:
+ caps = gst_caps_new_empty_simple ("audio/x-mulaw");
+ break;
+ case 11:
+ {
+ GValue streamheader = G_VALUE_INIT;
+ GValue value = G_VALUE_INIT;
+ GstByteWriter w;
+ GstStructure *structure;
+ GstBuffer *buf;
+ GstTagList *tags;
+
+ caps = gst_caps_new_empty_simple ("audio/x-speex");
+ structure = gst_caps_get_structure (caps, 0);
+
+ GST_DEBUG_OBJECT (demux, "generating speex header");
+
+ /* Speex decoder expects streamheader to be { [header], [comment] } */
+ g_value_init (&streamheader, GST_TYPE_ARRAY);
+
+ /* header part */
+ gst_byte_writer_init_with_size (&w, 80, TRUE);
+ gst_byte_writer_put_data (&w, (guint8 *) "Speex ", 8);
+ gst_byte_writer_put_data (&w, (guint8 *) "1.1.12", 7);
+ gst_byte_writer_fill (&w, 0, 13);
+ gst_byte_writer_put_uint32_le (&w, 1); /* version */
+ gst_byte_writer_put_uint32_le (&w, 80); /* header_size */
+ gst_byte_writer_put_uint32_le (&w, 16000); /* rate */
+ gst_byte_writer_put_uint32_le (&w, 1); /* mode: Wideband */
+ gst_byte_writer_put_uint32_le (&w, 4); /* mode_bitstream_version */
+ gst_byte_writer_put_uint32_le (&w, 1); /* nb_channels: 1 */
+ gst_byte_writer_put_uint32_le (&w, -1); /* bitrate */
+ gst_byte_writer_put_uint32_le (&w, 0x50); /* frame_size */
+ gst_byte_writer_put_uint32_le (&w, 0); /* VBR */
+ gst_byte_writer_put_uint32_le (&w, 1); /* frames_per_packet */
+ gst_byte_writer_put_uint32_le (&w, 0); /* extra_headers */
+ gst_byte_writer_put_uint32_le (&w, 0); /* reserved1 */
+ gst_byte_writer_put_uint32_le (&w, 0); /* reserved2 */
+ g_assert (gst_byte_writer_get_size (&w) == 80);
+
+ g_value_init (&value, GST_TYPE_BUFFER);
+ g_value_take_boxed (&value, gst_byte_writer_reset_and_get_buffer (&w));
+ gst_value_array_append_value (&streamheader, &value);
+ g_value_unset (&value);
+
+ /* comment part */
+ g_value_init (&value, GST_TYPE_BUFFER);
+ tags = gst_tag_list_new_empty ();
+ buf = gst_tag_list_to_vorbiscomment_buffer (tags, NULL, 0, "No comments");
+ gst_tag_list_unref (tags);
+ g_value_take_boxed (&value, buf);
+ gst_value_array_append_value (&streamheader, &value);
+ g_value_unset (&value);
+
+ gst_structure_take_value (structure, "streamheader", &streamheader);
+
+ channels = 1;
+ adjusted_rate = 16000;
+ break;
+ }
+ default:
+ GST_WARNING_OBJECT (demux, "unsupported audio codec tag %u", codec_tag);
+ break;
+ }
+
+ if (G_UNLIKELY (!caps)) {
+ GST_WARNING_OBJECT (demux, "failed creating caps for audio pad");
+ goto beach;
+ }
+
+ gst_caps_set_simple (caps, "rate", G_TYPE_INT, adjusted_rate,
+ "channels", G_TYPE_INT, adjusted_channels, NULL);
+
+ if (demux->audio_codec_data) {
+ gst_caps_set_simple (caps, "codec_data", GST_TYPE_BUFFER,
+ demux->audio_codec_data, NULL);
+ }
+
+ old_caps = gst_pad_get_current_caps (demux->audio_pad);
+ if (!old_caps) {
+ stream_id =
+ gst_pad_create_stream_id (demux->audio_pad, GST_ELEMENT_CAST (demux),
+ "audio");
+
+ event = gst_event_new_stream_start (stream_id);
+ if (demux->segment_seqnum != GST_SEQNUM_INVALID)
+ gst_event_set_seqnum (event, demux->segment_seqnum);
+ if (have_group_id (demux))
+ gst_event_set_group_id (event, demux->group_id);
+ gst_pad_push_event (demux->audio_pad, event);
+ g_free (stream_id);
+ }
+ if (!old_caps || !gst_caps_is_equal (old_caps, caps))
+ ret = gst_pad_set_caps (demux->audio_pad, caps);
+ else
+ ret = TRUE;
+
+ if (old_caps)
+ gst_caps_unref (old_caps);
+
+ done:
+ if (G_LIKELY (ret)) {
+ /* Store the caps we got from tags */
+ demux->audio_codec_tag = codec_tag;
+ demux->rate = rate;
+ demux->channels = channels;
+ demux->width = width;
+
+ if (caps) {
+ GST_DEBUG_OBJECT (demux->audio_pad, "successfully negotiated caps %"
+ GST_PTR_FORMAT, caps);
+
+ gst_flv_demux_push_tags (demux);
+ } else {
+ GST_DEBUG_OBJECT (demux->audio_pad, "delayed setting caps");
+ }
+ } else {
+ GST_WARNING_OBJECT (demux->audio_pad, "failed negotiating caps %"
+ GST_PTR_FORMAT, caps);
+ }
+
+ if (caps)
+ gst_caps_unref (caps);
+
+ beach:
+ return ret;
+ }
+
+ static gboolean
+ gst_flv_demux_push_src_event (GstFlvDemux * demux, GstEvent * event)
+ {
+ gboolean ret = TRUE;
+
+ if (demux->audio_pad)
+ ret |= gst_pad_push_event (demux->audio_pad, gst_event_ref (event));
+
+ if (demux->video_pad)
+ ret |= gst_pad_push_event (demux->video_pad, gst_event_ref (event));
+
+ gst_event_unref (event);
+
+ return ret;
+ }
+
+ static void
+ gst_flv_demux_add_codec_tag (GstFlvDemux * demux, const gchar * tag,
+ GstPad * pad)
+ {
+ if (pad) {
+ GstCaps *caps = gst_pad_get_current_caps (pad);
+
+ if (caps) {
+ gchar *codec_name = gst_pb_utils_get_codec_description (caps);
+
+ if (codec_name) {
+ gst_tag_list_add (demux->taglist, GST_TAG_MERGE_REPLACE,
+ tag, codec_name, NULL);
+ g_free (codec_name);
+ }
+
+ gst_caps_unref (caps);
+ }
+ }
+ }
+
+ static void
+ gst_flv_demux_push_tags (GstFlvDemux * demux)
+ {
+ GstEvent *tag_event;
+
+ gst_flv_demux_add_codec_tag (demux, GST_TAG_AUDIO_CODEC, demux->audio_pad);
+ gst_flv_demux_add_codec_tag (demux, GST_TAG_VIDEO_CODEC, demux->video_pad);
+
+ GST_DEBUG_OBJECT (demux, "pushing %" GST_PTR_FORMAT, demux->taglist);
+
+ tag_event = gst_event_new_tag (gst_tag_list_copy (demux->taglist));
+ if (demux->segment_seqnum != GST_SEQNUM_INVALID)
+ gst_event_set_seqnum (tag_event, demux->segment_seqnum);
+ gst_flv_demux_push_src_event (demux, tag_event);
+
++#ifdef TIZEN_FEATURE_FLVDEMUX_MODIFICATION
++ GST_DEBUG_OBJECT (demux, "post tag msg %" GST_PTR_FORMAT, demux->taglist);
++
++ /* post message flv tag (for early recive application) */
++ gst_element_post_message (GST_ELEMENT_CAST (demux),
++ gst_message_new_tag (GST_OBJECT_CAST (demux),
++ gst_tag_list_copy (demux->taglist)));
++#endif
++
+ if (demux->audio_pad) {
+ GST_DEBUG_OBJECT (demux->audio_pad, "pushing audio %" GST_PTR_FORMAT,
+ demux->audio_tags);
+ tag_event = gst_event_new_tag (gst_tag_list_copy (demux->audio_tags));
+ if (demux->segment_seqnum != GST_SEQNUM_INVALID)
+ gst_event_set_seqnum (tag_event, demux->segment_seqnum);
+ gst_pad_push_event (demux->audio_pad, tag_event);
+ }
+
+ if (demux->video_pad) {
+ GST_DEBUG_OBJECT (demux->video_pad, "pushing video %" GST_PTR_FORMAT,
+ demux->video_tags);
+ tag_event = gst_event_new_tag (gst_tag_list_copy (demux->video_tags));
+ if (demux->segment_seqnum != GST_SEQNUM_INVALID)
+ gst_event_set_seqnum (tag_event, demux->segment_seqnum);
+ gst_pad_push_event (demux->video_pad, tag_event);
+ }
+ }
+
+ static gboolean
+ gst_flv_demux_update_resync (GstFlvDemux * demux, guint32 dts, gboolean discont,
+ guint32 * last, GstClockTime * offset)
+ {
+ gboolean ret = FALSE;
+ gint32 ddts = dts - *last;
+ if (!discont && ddts <= -RESYNC_THRESHOLD) {
+ /* Theoretically, we should use subtract the duration of the last buffer,
+ but this demuxer sends no durations on buffers, not sure if it cannot
+ know, or just does not care to calculate. */
+ *offset -= ddts * GST_MSECOND;
+ GST_WARNING_OBJECT (demux,
+ "Large dts gap (%" G_GINT32_FORMAT " ms), assuming resync, offset now %"
+ GST_TIME_FORMAT "", ddts, GST_TIME_ARGS (*offset));
+
+ ret = TRUE;
+ }
+ *last = dts;
+
+ return ret;
+ }
+
+ static void
+ gst_flv_demux_sync_streams (GstFlvDemux * demux)
+ {
+ /* Check if the audio or video stream are more than 3s behind the other
+ * stream, and if so send a gap event accordingly */
+
+ if (demux->audio_pad && GST_CLOCK_TIME_IS_VALID (demux->segment.position) &&
+ demux->last_audio_pts * GST_MSECOND + demux->audio_time_offset +
+ 3 * GST_SECOND < demux->segment.position) {
+ GstEvent *event;
+ guint64 start =
+ demux->last_audio_pts * GST_MSECOND + demux->audio_time_offset;
+ guint64 stop = demux->segment.position - 3 * GST_SECOND;
+
+ GST_DEBUG_OBJECT (demux,
+ "Synchronizing audio stream with video stream by advancing time from %"
+ GST_TIME_FORMAT " to %" GST_TIME_FORMAT, GST_TIME_ARGS (start),
+ GST_TIME_ARGS (stop));
+
+ demux->last_audio_pts = (stop - demux->audio_time_offset) / GST_MSECOND;
+
+ event = gst_event_new_gap (start, stop - start);
+ if (demux->segment_seqnum != GST_SEQNUM_INVALID)
+ gst_event_set_seqnum (event, demux->segment_seqnum);
+ gst_pad_push_event (demux->audio_pad, event);
+ }
+
+ if (demux->video_pad && GST_CLOCK_TIME_IS_VALID (demux->segment.position) &&
+ demux->last_video_dts * GST_MSECOND + demux->video_time_offset +
+ 3 * GST_SECOND < demux->segment.position) {
+ GstEvent *event;
+ guint64 start =
+ demux->last_video_dts * GST_MSECOND + demux->video_time_offset;
+ guint64 stop = demux->segment.position - 3 * GST_SECOND;
+
+ GST_DEBUG_OBJECT (demux,
+ "Synchronizing video stream with audio stream by advancing time from %"
+ GST_TIME_FORMAT " to %" GST_TIME_FORMAT, GST_TIME_ARGS (start),
+ GST_TIME_ARGS (stop));
+
+ demux->last_video_dts = (stop - demux->video_time_offset) / GST_MSECOND;
+
+ event = gst_event_new_gap (start, stop - start);
+ if (demux->segment_seqnum != GST_SEQNUM_INVALID)
+ gst_event_set_seqnum (event, demux->segment_seqnum);
+ gst_pad_push_event (demux->video_pad, event);
+ }
+ }
+
+ static GstFlowReturn
+ gst_flv_demux_parse_tag_audio (GstFlvDemux * demux, GstBuffer * buffer)
+ {
+ GstFlowReturn ret = GST_FLOW_OK;
+ guint32 pts = 0, codec_tag = 0, rate = 5512, width = 8, channels = 1;
+ guint32 codec_data = 0, pts_ext = 0;
+ guint8 flags = 0;
+ GstMapInfo map;
+ GstBuffer *outbuf;
+ guint8 *data;
+
+ GST_LOG_OBJECT (demux, "parsing an audio tag");
+
+ if (G_UNLIKELY (!demux->audio_pad && demux->no_more_pads)) {
+ #ifndef GST_DISABLE_DEBUG
+ if (G_UNLIKELY (!demux->no_audio_warned)) {
+ GST_WARNING_OBJECT (demux,
+ "Signaled no-more-pads already but had no audio pad -- ignoring");
+ demux->no_audio_warned = TRUE;
+ }
+ #endif
+ return GST_FLOW_OK;
+ }
+
+ g_return_val_if_fail (gst_buffer_get_size (buffer) == demux->tag_size,
+ GST_FLOW_ERROR);
+
+ /* Error out on tags with too small headers */
+ if (gst_buffer_get_size (buffer) < 11) {
+ GST_ERROR_OBJECT (demux, "Too small tag size (%" G_GSIZE_FORMAT ")",
+ gst_buffer_get_size (buffer));
+ return GST_FLOW_ERROR;
+ }
+
+ gst_buffer_map (buffer, &map, GST_MAP_READ);
+ data = map.data;
+
+ /* Grab information about audio tag */
+ pts = GST_READ_UINT24_BE (data);
+ /* read the pts extension to 32 bits integer */
+ pts_ext = GST_READ_UINT8 (data + 3);
+ /* Combine them */
+ pts |= pts_ext << 24;
+
+ GST_LOG_OBJECT (demux, "pts bytes %02X %02X %02X %02X (%d)", data[0], data[1],
+ data[2], data[3], pts);
+
+ /* Skip the stream id and go directly to the flags */
+ flags = GST_READ_UINT8 (data + 7);
+
+ /* Silently skip buffers with no data */
+ if (map.size == 11)
+ goto beach;
+
+ /* Channels */
+ if (flags & 0x01) {
+ channels = 2;
+ }
+ /* Width */
+ if (flags & 0x02) {
+ width = 16;
+ }
+ /* Sampling rate */
+ if ((flags & 0x0C) == 0x0C) {
+ rate = 44100;
+ } else if ((flags & 0x0C) == 0x08) {
+ rate = 22050;
+ } else if ((flags & 0x0C) == 0x04) {
+ rate = 11025;
+ }
+ /* Codec tag */
+ codec_tag = flags >> 4;
+ if (codec_tag == 10) { /* AAC has an extra byte for packet type */
+ codec_data = 2;
+ } else {
+ codec_data = 1;
+ }
+
+ /* codec tags with special rates */
+ if (codec_tag == 5 || codec_tag == 14 || codec_tag == 7 || codec_tag == 8)
+ rate = 8000;
+ else if ((codec_tag == 4) || (codec_tag == 11))
+ rate = 16000;
+
+ GST_LOG_OBJECT (demux, "audio tag with %d channels, %dHz sampling rate, "
+ "%d bits width, codec tag %u (flags %02X)", channels, rate, width,
+ codec_tag, flags);
+
+ if (codec_tag == 10) {
+ guint8 aac_packet_type = GST_READ_UINT8 (data + 8);
+
+ switch (aac_packet_type) {
+ case 0:
+ {
+ /* AudioSpecificConfig data */
+ GST_LOG_OBJECT (demux, "got an AAC codec data packet");
+ if (demux->audio_codec_data) {
+ gst_buffer_unref (demux->audio_codec_data);
+ }
+ demux->audio_codec_data =
+ gst_buffer_copy_region (buffer, GST_BUFFER_COPY_MEMORY,
+ 7 + codec_data, demux->tag_data_size - codec_data);
+
+ /* Use that buffer data in the caps */
+ if (demux->audio_pad)
+ gst_flv_demux_audio_negotiate (demux, codec_tag, rate, channels,
+ width);
+ goto beach;
+ }
+ case 1:
+ if (!demux->audio_codec_data) {
+ GST_ERROR_OBJECT (demux, "got AAC audio packet before codec data");
+ ret = GST_FLOW_OK;
+ goto beach;
+ }
+ /* AAC raw packet */
+ GST_LOG_OBJECT (demux, "got a raw AAC audio packet");
+ break;
+ default:
+ GST_WARNING_OBJECT (demux, "invalid AAC packet type %u",
+ aac_packet_type);
+ }
+ }
+
+ /* If we don't have our audio pad created, then create it. */
+ if (G_UNLIKELY (!demux->audio_pad)) {
+ demux->audio_pad =
+ gst_pad_new_from_template (gst_element_class_get_pad_template
+ (GST_ELEMENT_GET_CLASS (demux), "audio"), "audio");
+ if (G_UNLIKELY (!demux->audio_pad)) {
+ GST_WARNING_OBJECT (demux, "failed creating audio pad");
+ ret = GST_FLOW_ERROR;
+ goto beach;
+ }
+
+ /* Set functions on the pad */
+ gst_pad_set_query_function (demux->audio_pad,
+ GST_DEBUG_FUNCPTR (gst_flv_demux_query));
+ gst_pad_set_event_function (demux->audio_pad,
+ GST_DEBUG_FUNCPTR (gst_flv_demux_src_event));
+
+ gst_pad_use_fixed_caps (demux->audio_pad);
+
+ /* Make it active */
+ gst_pad_set_active (demux->audio_pad, TRUE);
+
+ /* Negotiate caps */
+ if (!gst_flv_demux_audio_negotiate (demux, codec_tag, rate, channels,
+ width)) {
+ gst_object_unref (demux->audio_pad);
+ demux->audio_pad = NULL;
+ ret = GST_FLOW_ERROR;
+ goto beach;
+ }
+ #ifndef GST_DISABLE_GST_DEBUG
+ {
+ GstCaps *caps;
+
+ caps = gst_pad_get_current_caps (demux->audio_pad);
+ GST_DEBUG_OBJECT (demux, "created audio pad with caps %" GST_PTR_FORMAT,
+ caps);
+ if (caps)
+ gst_caps_unref (caps);
+ }
+ #endif
+
+ /* We need to set caps before adding */
+ gst_element_add_pad (GST_ELEMENT (demux),
+ gst_object_ref (demux->audio_pad));
+ gst_flow_combiner_add_pad (demux->flowcombiner, demux->audio_pad);
+
+ /* We only emit no more pads when we have audio and video. Indeed we can
+ * not trust the FLV header to tell us if there will be only audio or
+ * only video and we would just break discovery of some files */
+ if (demux->audio_pad && demux->video_pad) {
+ GST_DEBUG_OBJECT (demux, "emitting no more pads");
+ gst_element_no_more_pads (GST_ELEMENT (demux));
+ demux->no_more_pads = TRUE;
+ }
+ }
+
+ /* Check if caps have changed */
+ if (G_UNLIKELY (rate != demux->rate || channels != demux->channels ||
+ codec_tag != demux->audio_codec_tag || width != demux->width)) {
+ GST_DEBUG_OBJECT (demux, "audio settings have changed, changing caps");
+
+ gst_buffer_replace (&demux->audio_codec_data, NULL);
+
+ /* Negotiate caps */
+ if (!gst_flv_demux_audio_negotiate (demux, codec_tag, rate, channels,
+ width)) {
+ ret = GST_FLOW_ERROR;
+ goto beach;
+ }
+ }
+
+ /* Check if we have anything to push */
+ if (demux->tag_data_size <= codec_data) {
+ GST_LOG_OBJECT (demux, "Nothing left in this tag, returning");
+ goto beach;
+ }
+
+ /* Create buffer from pad */
+ outbuf = gst_buffer_copy_region (buffer, GST_BUFFER_COPY_MEMORY,
+ 7 + codec_data, demux->tag_data_size - codec_data);
+
+ /* detect (and deem to be resyncs) large pts gaps */
+ if (gst_flv_demux_update_resync (demux, pts, demux->audio_need_discont,
+ &demux->last_audio_pts, &demux->audio_time_offset)) {
+ GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_RESYNC);
+ }
+
+ /* Fill buffer with data */
+ GST_BUFFER_PTS (outbuf) = pts * GST_MSECOND + demux->audio_time_offset;
+ GST_BUFFER_DTS (outbuf) = GST_BUFFER_PTS (outbuf);
+ GST_BUFFER_DURATION (outbuf) = GST_CLOCK_TIME_NONE;
+ GST_BUFFER_OFFSET (outbuf) = demux->audio_offset++;
+ GST_BUFFER_OFFSET_END (outbuf) = demux->audio_offset;
+
+ if (demux->duration == GST_CLOCK_TIME_NONE ||
+ demux->duration < GST_BUFFER_TIMESTAMP (outbuf))
+ demux->duration = GST_BUFFER_TIMESTAMP (outbuf);
+
+ /* Only add audio frames to the index if we have no video,
+ * and if the index is not yet complete */
+ if (!demux->has_video && !demux->indexed) {
+ gst_flv_demux_parse_and_add_index_entry (demux,
+ GST_BUFFER_TIMESTAMP (outbuf), demux->cur_tag_offset, TRUE);
+ }
+
+ if (G_UNLIKELY (demux->audio_need_discont)) {
+ GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DISCONT);
+ demux->audio_need_discont = FALSE;
+ }
+
+ demux->segment.position = GST_BUFFER_TIMESTAMP (outbuf);
+
+ /* Do we need a newsegment event ? */
+ if (G_UNLIKELY (demux->audio_need_segment)) {
+ if (!demux->new_seg_event) {
+ GST_DEBUG_OBJECT (demux, "pushing newsegment from %"
+ GST_TIME_FORMAT " to %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (demux->segment.position),
+ GST_TIME_ARGS (demux->segment.stop));
+ demux->segment.start = demux->segment.time = demux->segment.position;
+ demux->new_seg_event = gst_event_new_segment (&demux->segment);
+ gst_event_set_seqnum (demux->new_seg_event, demux->segment_seqnum);
+ } else {
+ GST_DEBUG_OBJECT (demux, "pushing pre-generated newsegment event");
+ }
+
+ gst_pad_push_event (demux->audio_pad, gst_event_ref (demux->new_seg_event));
+
+ demux->audio_need_segment = FALSE;
+ }
+
+ GST_LOG_OBJECT (demux,
+ "pushing %" G_GSIZE_FORMAT " bytes buffer at pts %" GST_TIME_FORMAT
+ " with duration %" GST_TIME_FORMAT ", offset %" G_GUINT64_FORMAT,
+ gst_buffer_get_size (outbuf),
+ GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (outbuf)),
+ GST_TIME_ARGS (GST_BUFFER_DURATION (outbuf)), GST_BUFFER_OFFSET (outbuf));
+
+ if (!GST_CLOCK_TIME_IS_VALID (demux->audio_start)) {
+ demux->audio_start = GST_BUFFER_TIMESTAMP (outbuf);
+ }
+ if (!GST_CLOCK_TIME_IS_VALID (demux->audio_first_ts)) {
+ demux->audio_first_ts = GST_BUFFER_TIMESTAMP (outbuf);
+ }
+
+ if (G_UNLIKELY (!demux->no_more_pads
+ && (GST_CLOCK_DIFF (demux->audio_start,
+ GST_BUFFER_TIMESTAMP (outbuf)) > NO_MORE_PADS_THRESHOLD))) {
+ GST_DEBUG_OBJECT (demux,
+ "Signalling no-more-pads because no video stream was found"
+ " after 6 seconds of audio");
+ gst_element_no_more_pads (GST_ELEMENT_CAST (demux));
+ demux->no_more_pads = TRUE;
+ }
+
+ /* Push downstream */
+ ret = gst_pad_push (demux->audio_pad, outbuf);
+
+ if (G_UNLIKELY (ret != GST_FLOW_OK) &&
+ demux->segment.rate < 0.0 && ret == GST_FLOW_EOS &&
+ demux->segment.position > demux->segment.stop) {
+ /* In reverse playback we can get a GST_FLOW_EOS when
+ * we are at the end of the segment, so we just need to jump
+ * back to the previous section. */
+ GST_DEBUG_OBJECT (demux, "downstream has reached end of segment");
+ demux->audio_done = TRUE;
+ ret = GST_FLOW_OK;
+ goto beach;
+ }
+
+ ret = gst_flow_combiner_update_pad_flow (demux->flowcombiner,
+ demux->audio_pad, ret);
+
+ if (ret == GST_FLOW_OK) {
+ gst_flv_demux_sync_streams (demux);
+ }
+
+ beach:
+ gst_buffer_unmap (buffer, &map);
+
+ return ret;
+ }
+
+ static gboolean
+ gst_flv_demux_video_negotiate (GstFlvDemux * demux, guint32 codec_tag)
+ {
+ gboolean ret = FALSE;
+ GstCaps *caps = NULL, *old_caps;
+ GstEvent *event;
+ gchar *stream_id;
+
+ /* Generate caps for that pad */
+ switch (codec_tag) {
+ case 2:
+ caps =
+ gst_caps_new_simple ("video/x-flash-video", "flvversion", G_TYPE_INT,
+ 1, NULL);
+ break;
+ case 3:
+ caps = gst_caps_new_empty_simple ("video/x-flash-screen");
+ break;
+ case 4:
+ caps = gst_caps_new_empty_simple ("video/x-vp6-flash");
+ break;
+ case 5:
+ caps = gst_caps_new_empty_simple ("video/x-vp6-alpha");
+ break;
+ case 7:
+ if (!demux->video_codec_data) {
+ GST_DEBUG_OBJECT (demux, "don't have h264 codec data yet");
+ ret = TRUE;
+ goto done;
+ }
+ caps =
+ gst_caps_new_simple ("video/x-h264", "stream-format", G_TYPE_STRING,
+ "avc", NULL);
+ break;
+ /* The following two are non-standard but apparently used, see in ffmpeg
+ * https://git.videolan.org/?p=ffmpeg.git;a=blob;f=libavformat/flvdec.c;h=2bf1e059e1cbeeb79e4af9542da23f4560e1cf59;hb=b18d6c58000beed872d6bb1fe7d0fbe75ae26aef#l254
+ * https://git.videolan.org/?p=ffmpeg.git;a=blob;f=libavformat/flvdec.c;h=2bf1e059e1cbeeb79e4af9542da23f4560e1cf59;hb=b18d6c58000beed872d6bb1fe7d0fbe75ae26aef#l282
+ */
+ case 8:
+ caps = gst_caps_new_empty_simple ("video/x-h263");
+ break;
+ case 9:
+ caps =
+ gst_caps_new_simple ("video/mpeg", "mpegversion", G_TYPE_INT, 4,
+ "systemstream", G_TYPE_BOOLEAN, FALSE, NULL);
+ break;
+ default:
+ GST_WARNING_OBJECT (demux, "unsupported video codec tag %u", codec_tag);
+ }
+
+ if (G_UNLIKELY (!caps)) {
+ GST_WARNING_OBJECT (demux, "failed creating caps for video pad");
+ goto beach;
+ }
+
+ if (demux->got_par) {
+ gst_caps_set_simple (caps, "pixel-aspect-ratio", GST_TYPE_FRACTION,
+ demux->par_x, demux->par_y, NULL);
+ }
+
+ if (G_LIKELY (demux->w)) {
+ gst_caps_set_simple (caps, "width", G_TYPE_INT, demux->w, NULL);
+ }
+
+ if (G_LIKELY (demux->h)) {
+ gst_caps_set_simple (caps, "height", G_TYPE_INT, demux->h, NULL);
+ }
+
+ if (G_LIKELY (demux->framerate)) {
+ gint num = 0, den = 0;
+
+ gst_video_guess_framerate (GST_SECOND / demux->framerate, &num, &den);
+ GST_DEBUG_OBJECT (demux->video_pad,
+ "fps to be used on caps %f (as a fraction = %d/%d)", demux->framerate,
+ num, den);
+
+ gst_caps_set_simple (caps, "framerate", GST_TYPE_FRACTION, num, den, NULL);
+ }
+
+ if (demux->video_codec_data) {
+ gst_caps_set_simple (caps, "codec_data", GST_TYPE_BUFFER,
+ demux->video_codec_data, NULL);
+ }
+
+ old_caps = gst_pad_get_current_caps (demux->video_pad);
+ if (!old_caps) {
+ stream_id =
+ gst_pad_create_stream_id (demux->video_pad, GST_ELEMENT_CAST (demux),
+ "video");
+ event = gst_event_new_stream_start (stream_id);
+ if (demux->segment_seqnum != GST_SEQNUM_INVALID)
+ gst_event_set_seqnum (event, demux->segment_seqnum);
+ g_free (stream_id);
+
+ if (have_group_id (demux))
+ gst_event_set_group_id (event, demux->group_id);
+ gst_pad_push_event (demux->video_pad, event);
+ }
+
+ if (!old_caps || !gst_caps_is_equal (old_caps, caps))
+ ret = gst_pad_set_caps (demux->video_pad, caps);
+ else
+ ret = TRUE;
+
+ if (old_caps)
+ gst_caps_unref (old_caps);
+
+ done:
+ if (G_LIKELY (ret)) {
+ /* Store the caps we have set */
+ demux->video_codec_tag = codec_tag;
+
+ if (caps) {
+ GST_DEBUG_OBJECT (demux->video_pad, "successfully negotiated caps %"
+ GST_PTR_FORMAT, caps);
+
+ gst_flv_demux_push_tags (demux);
+ } else {
+ GST_DEBUG_OBJECT (demux->video_pad, "delayed setting caps");
+ }
+ } else {
+ GST_WARNING_OBJECT (demux->video_pad, "failed negotiating caps %"
+ GST_PTR_FORMAT, caps);
+ }
+
+ if (caps)
+ gst_caps_unref (caps);
+
+ beach:
+ return ret;
+ }
+
+ static GstFlowReturn
+ gst_flv_demux_parse_tag_video (GstFlvDemux * demux, GstBuffer * buffer)
+ {
+ GstFlowReturn ret = GST_FLOW_OK;
+ guint32 dts = 0, codec_data = 1, dts_ext = 0;
+ gint32 cts = 0;
+ gboolean keyframe = FALSE;
+ guint8 flags = 0, codec_tag = 0;
+ GstBuffer *outbuf;
+ GstMapInfo map;
+ guint8 *data;
+
+ g_return_val_if_fail (gst_buffer_get_size (buffer) == demux->tag_size,
+ GST_FLOW_ERROR);
+
+ GST_LOG_OBJECT (demux, "parsing a video tag");
+
+ if G_UNLIKELY
+ (!demux->video_pad && demux->no_more_pads) {
+ #ifndef GST_DISABLE_DEBUG
+ if G_UNLIKELY
+ (!demux->no_video_warned) {
+ GST_WARNING_OBJECT (demux,
+ "Signaled no-more-pads already but had no video pad -- ignoring");
+ demux->no_video_warned = TRUE;
+ }
+ #endif
+ return GST_FLOW_OK;
+ }
+
+ if (gst_buffer_get_size (buffer) < 12) {
+ GST_ERROR_OBJECT (demux, "Too small tag size");
+ return GST_FLOW_ERROR;
+ }
+
+ gst_buffer_map (buffer, &map, GST_MAP_READ);
+ data = map.data;
+
+ /* Grab information about video tag */
+ dts = GST_READ_UINT24_BE (data);
+ /* read the dts extension to 32 bits integer */
+ dts_ext = GST_READ_UINT8 (data + 3);
+ /* Combine them */
+ dts |= dts_ext << 24;
+
+ GST_LOG_OBJECT (demux, "dts bytes %02X %02X %02X %02X (%d)", data[0], data[1],
+ data[2], data[3], dts);
+
+ /* Skip the stream id and go directly to the flags */
+ flags = GST_READ_UINT8 (data + 7);
+
+ /* Keyframe */
+ if ((flags >> 4) == 1) {
+ keyframe = TRUE;
+ }
+ /* Codec tag */
+ codec_tag = flags & 0x0F;
+ if (codec_tag == 4 || codec_tag == 5) {
+ codec_data = 2;
+ } else if (codec_tag == 7) {
+ codec_data = 5;
+
+ cts = GST_READ_UINT24_BE (data + 9);
+ cts = (cts + 0xff800000) ^ 0xff800000;
+
+ if (cts < 0 && ABS (cts) > dts) {
+ GST_ERROR_OBJECT (demux, "Detected a negative composition time offset "
+ "'%d' that would lead to negative PTS, fixing", cts);
+ cts += ABS (cts) - dts;
+ }
+
+ GST_LOG_OBJECT (demux, "got cts %d", cts);
+ }
+
+ GST_LOG_OBJECT (demux, "video tag with codec tag %u, keyframe (%d) "
+ "(flags %02X)", codec_tag, keyframe, flags);
+
+ if (codec_tag == 7) {
+ guint8 avc_packet_type = GST_READ_UINT8 (data + 8);
+
+ switch (avc_packet_type) {
+ case 0:
+ {
+ if (demux->tag_data_size < codec_data) {
+ GST_ERROR_OBJECT (demux, "Got invalid H.264 codec, ignoring.");
+ break;
+ }
+
+ /* AVCDecoderConfigurationRecord data */
+ GST_LOG_OBJECT (demux, "got an H.264 codec data packet");
+ if (demux->video_codec_data) {
+ gst_buffer_unref (demux->video_codec_data);
+ }
+ demux->video_codec_data = gst_buffer_copy_region (buffer,
+ GST_BUFFER_COPY_MEMORY, 7 + codec_data,
+ demux->tag_data_size - codec_data);;
+ /* Use that buffer data in the caps */
+ if (demux->video_pad)
+ gst_flv_demux_video_negotiate (demux, codec_tag);
+ goto beach;
+ }
+ case 1:
+ /* H.264 NALU packet */
+ if (!demux->video_codec_data) {
+ GST_ERROR_OBJECT (demux, "got H.264 video packet before codec data");
+ ret = GST_FLOW_OK;
+ goto beach;
+ }
+ GST_LOG_OBJECT (demux, "got a H.264 NALU video packet");
+ break;
+ default:
+ GST_WARNING_OBJECT (demux, "invalid video packet type %u",
+ avc_packet_type);
+ }
+ }
+
+ /* If we don't have our video pad created, then create it. */
+ if (G_UNLIKELY (!demux->video_pad)) {
+ demux->video_pad =
+ gst_pad_new_from_template (gst_element_class_get_pad_template
+ (GST_ELEMENT_GET_CLASS (demux), "video"), "video");
+ if (G_UNLIKELY (!demux->video_pad)) {
+ GST_WARNING_OBJECT (demux, "failed creating video pad");
+ ret = GST_FLOW_ERROR;
+ goto beach;
+ }
+
+ /* Set functions on the pad */
+ gst_pad_set_query_function (demux->video_pad,
+ GST_DEBUG_FUNCPTR (gst_flv_demux_query));
+ gst_pad_set_event_function (demux->video_pad,
+ GST_DEBUG_FUNCPTR (gst_flv_demux_src_event));
+
+ gst_pad_use_fixed_caps (demux->video_pad);
+
+ /* Make it active */
+ gst_pad_set_active (demux->video_pad, TRUE);
+
+ /* Needs to be active before setting caps */
+ if (!gst_flv_demux_video_negotiate (demux, codec_tag)) {
+ gst_object_unref (demux->video_pad);
+ demux->video_pad = NULL;
+ ret = GST_FLOW_ERROR;
+ goto beach;
+ }
+
+ /* When we ve set pixel-aspect-ratio we use that boolean to detect a
+ * metadata tag that would come later and trigger a caps change */
+ demux->got_par = FALSE;
+
+ #ifndef GST_DISABLE_GST_DEBUG
+ {
+ GstCaps *caps;
+
+ caps = gst_pad_get_current_caps (demux->video_pad);
+ GST_DEBUG_OBJECT (demux, "created video pad with caps %" GST_PTR_FORMAT,
+ caps);
+ if (caps)
+ gst_caps_unref (caps);
+ }
+ #endif
+
+ /* We need to set caps before adding */
+ gst_element_add_pad (GST_ELEMENT (demux),
+ gst_object_ref (demux->video_pad));
+ gst_flow_combiner_add_pad (demux->flowcombiner, demux->video_pad);
+
+ /* We only emit no more pads when we have audio and video. Indeed we can
+ * not trust the FLV header to tell us if there will be only audio or
+ * only video and we would just break discovery of some files */
+ if (demux->audio_pad && demux->video_pad) {
+ GST_DEBUG_OBJECT (demux, "emitting no more pads");
+ gst_element_no_more_pads (GST_ELEMENT (demux));
+ demux->no_more_pads = TRUE;
+ }
+ }
+
+ /* Check if caps have changed */
+ if (G_UNLIKELY (codec_tag != demux->video_codec_tag || demux->got_par)) {
+ GST_DEBUG_OBJECT (demux, "video settings have changed, changing caps");
+ if (codec_tag != demux->video_codec_tag)
+ gst_buffer_replace (&demux->video_codec_data, NULL);
+
+ if (!gst_flv_demux_video_negotiate (demux, codec_tag)) {
+ ret = GST_FLOW_ERROR;
+ goto beach;
+ }
+
+ /* When we ve set pixel-aspect-ratio we use that boolean to detect a
+ * metadata tag that would come later and trigger a caps change */
+ demux->got_par = FALSE;
+ }
+
+ /* Check if we have anything to push */
+ if (demux->tag_data_size <= codec_data) {
+ GST_LOG_OBJECT (demux, "Nothing left in this tag, returning");
+ goto beach;
+ }
+
+ /* Create buffer from pad */
+ outbuf = gst_buffer_copy_region (buffer, GST_BUFFER_COPY_MEMORY,
+ 7 + codec_data, demux->tag_data_size - codec_data);
+
+ /* detect (and deem to be resyncs) large dts gaps */
+ if (gst_flv_demux_update_resync (demux, dts, demux->video_need_discont,
+ &demux->last_video_dts, &demux->video_time_offset)) {
+ GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_RESYNC);
+ }
+
+ /* Fill buffer with data */
+ GST_LOG_OBJECT (demux, "dts %u pts %u cts %d", dts, dts + cts, cts);
+
+ GST_BUFFER_PTS (outbuf) =
+ (dts + cts) * GST_MSECOND + demux->video_time_offset;
+ GST_BUFFER_DTS (outbuf) = dts * GST_MSECOND + demux->video_time_offset;
+ GST_BUFFER_DURATION (outbuf) = GST_CLOCK_TIME_NONE;
+ GST_BUFFER_OFFSET (outbuf) = demux->video_offset++;
+ GST_BUFFER_OFFSET_END (outbuf) = demux->video_offset;
+
+ if (demux->duration == GST_CLOCK_TIME_NONE ||
+ demux->duration < GST_BUFFER_TIMESTAMP (outbuf))
+ demux->duration = GST_BUFFER_TIMESTAMP (outbuf);
+
+ if (!keyframe)
+ GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DELTA_UNIT);
+
+ if (!demux->indexed) {
+ gst_flv_demux_parse_and_add_index_entry (demux,
+ GST_BUFFER_TIMESTAMP (outbuf), demux->cur_tag_offset, keyframe);
+ }
+
+ if (G_UNLIKELY (demux->video_need_discont)) {
+ GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DISCONT);
+ demux->video_need_discont = FALSE;
+ }
+
+ demux->segment.position = GST_BUFFER_TIMESTAMP (outbuf);
+
+ /* Do we need a newsegment event ? */
+ if (G_UNLIKELY (demux->video_need_segment)) {
+ if (!demux->new_seg_event) {
+ GST_DEBUG_OBJECT (demux, "pushing newsegment from %"
+ GST_TIME_FORMAT " to %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (demux->segment.position),
+ GST_TIME_ARGS (demux->segment.stop));
+ demux->segment.start = demux->segment.time = demux->segment.position;
+ demux->new_seg_event = gst_event_new_segment (&demux->segment);
+ if (demux->segment_seqnum != GST_SEQNUM_INVALID)
+ gst_event_set_seqnum (demux->new_seg_event, demux->segment_seqnum);
+ } else {
+ GST_DEBUG_OBJECT (demux, "pushing pre-generated newsegment event");
+ }
+
+ gst_pad_push_event (demux->video_pad, gst_event_ref (demux->new_seg_event));
+
+ demux->video_need_segment = FALSE;
+ }
+
+ GST_LOG_OBJECT (demux,
+ "pushing %" G_GSIZE_FORMAT " bytes buffer at dts %" GST_TIME_FORMAT
+ " with duration %" GST_TIME_FORMAT ", offset %" G_GUINT64_FORMAT
+ ", keyframe (%d)", gst_buffer_get_size (outbuf),
+ GST_TIME_ARGS (GST_BUFFER_DTS (outbuf)),
+ GST_TIME_ARGS (GST_BUFFER_DURATION (outbuf)), GST_BUFFER_OFFSET (outbuf),
+ keyframe);
+
+ if (!GST_CLOCK_TIME_IS_VALID (demux->video_start)) {
+ demux->video_start = GST_BUFFER_TIMESTAMP (outbuf);
+ }
+ if (!GST_CLOCK_TIME_IS_VALID (demux->audio_first_ts)) {
+ demux->video_first_ts = GST_BUFFER_TIMESTAMP (outbuf);
+ }
+
+ if (G_UNLIKELY (!demux->no_more_pads
+ && (GST_CLOCK_DIFF (demux->video_start,
+ GST_BUFFER_TIMESTAMP (outbuf)) > NO_MORE_PADS_THRESHOLD))) {
+ GST_DEBUG_OBJECT (demux,
+ "Signalling no-more-pads because no audio stream was found"
+ " after 6 seconds of video");
+ gst_element_no_more_pads (GST_ELEMENT_CAST (demux));
+ demux->no_more_pads = TRUE;
+ }
+
+ /* Push downstream */
+ ret = gst_pad_push (demux->video_pad, outbuf);
+
+ if (G_UNLIKELY (ret != GST_FLOW_OK) &&
+ demux->segment.rate < 0.0 && ret == GST_FLOW_EOS &&
+ demux->segment.position > demux->segment.stop) {
+ /* In reverse playback we can get a GST_FLOW_EOS when
+ * we are at the end of the segment, so we just need to jump
+ * back to the previous section. */
+ GST_DEBUG_OBJECT (demux, "downstream has reached end of segment");
+ demux->video_done = TRUE;
+ ret = GST_FLOW_OK;
+ goto beach;
+ }
+
+ ret = gst_flow_combiner_update_pad_flow (demux->flowcombiner,
+ demux->video_pad, ret);
+
+ if (ret == GST_FLOW_OK) {
+ gst_flv_demux_sync_streams (demux);
+ }
+
+ beach:
+ gst_buffer_unmap (buffer, &map);
+ return ret;
+ }
+
+ static GstClockTime
+ gst_flv_demux_parse_tag_timestamp (GstFlvDemux * demux, gboolean index,
+ GstBuffer * buffer, size_t * tag_size)
+ {
+ guint32 dts = 0, dts_ext = 0;
+ guint32 tag_data_size;
+ guint8 type;
+ gboolean keyframe = TRUE;
+ GstClockTime ret = GST_CLOCK_TIME_NONE;
+ GstMapInfo map;
+ guint8 *data;
+ gsize size;
+
+ g_return_val_if_fail (gst_buffer_get_size (buffer) >= 12,
+ GST_CLOCK_TIME_NONE);
+
+ gst_buffer_map (buffer, &map, GST_MAP_READ);
+ data = map.data;
+ size = map.size;
+
+ type = data[0];
+
+ if (type != 9 && type != 8 && type != 18) {
+ GST_WARNING_OBJECT (demux, "Unsupported tag type %u", data[0]);
+ goto exit;
+ }
+
+ if (type == 9)
+ demux->has_video = TRUE;
+ else if (type == 8)
+ demux->has_audio = TRUE;
+
+ tag_data_size = GST_READ_UINT24_BE (data + 1);
+
+ if (size >= tag_data_size + 11 + 4) {
+ if (GST_READ_UINT32_BE (data + tag_data_size + 11) != tag_data_size + 11) {
+ GST_WARNING_OBJECT (demux, "Invalid tag size");
+ goto exit;
+ }
+ }
+
+ if (tag_size)
+ *tag_size = tag_data_size + 11 + 4;
+
+ data += 4;
+
+ GST_LOG_OBJECT (demux, "dts bytes %02X %02X %02X %02X", data[0], data[1],
+ data[2], data[3]);
+
+ /* Grab timestamp of tag tag */
+ dts = GST_READ_UINT24_BE (data);
+ /* read the dts extension to 32 bits integer */
+ dts_ext = GST_READ_UINT8 (data + 3);
+ /* Combine them */
+ dts |= dts_ext << 24;
+
+ if (type == 9) {
+ data += 7;
+
+ keyframe = ((data[0] >> 4) == 1);
+ }
+
+ ret = dts * GST_MSECOND;
+ GST_LOG_OBJECT (demux, "dts: %" GST_TIME_FORMAT, GST_TIME_ARGS (ret));
+
+ if (index && !demux->indexed && (type == 9 || (type == 8
+ && !demux->has_video))) {
+ gst_flv_demux_parse_and_add_index_entry (demux, ret, demux->offset,
+ keyframe);
+ }
+
+ if (demux->duration == GST_CLOCK_TIME_NONE || demux->duration < ret)
+ demux->duration = ret;
+
+ exit:
+ gst_buffer_unmap (buffer, &map);
+ return ret;
+ }
+
+ static GstFlowReturn
+ gst_flv_demux_parse_tag_type (GstFlvDemux * demux, GstBuffer * buffer)
+ {
+ GstFlowReturn ret = GST_FLOW_OK;
+ guint8 tag_type = 0;
+ GstMapInfo map;
+
+ g_return_val_if_fail (gst_buffer_get_size (buffer) >= 4, GST_FLOW_ERROR);
+
+ gst_buffer_map (buffer, &map, GST_MAP_READ);
+
+ tag_type = map.data[0];
+
+ /* Tag size is 1 byte of type + 3 bytes of size + 7 bytes + tag data size +
+ * 4 bytes of previous tag size */
+ demux->tag_data_size = GST_READ_UINT24_BE (map.data + 1);
+ demux->tag_size = demux->tag_data_size + 11;
+
+ GST_LOG_OBJECT (demux, "tag data size is %" G_GUINT64_FORMAT,
+ demux->tag_data_size);
+
+ gst_buffer_unmap (buffer, &map);
+
+ switch (tag_type) {
+ case 9:
+ demux->state = FLV_STATE_TAG_VIDEO;
+ demux->has_video = TRUE;
+ break;
+ case 8:
+ demux->state = FLV_STATE_TAG_AUDIO;
+ demux->has_audio = TRUE;
+ break;
+ case 18:
+ demux->state = FLV_STATE_TAG_SCRIPT;
+ break;
+ default:
+ GST_WARNING_OBJECT (demux, "unsupported tag type %u", tag_type);
+ demux->state = FLV_STATE_SKIP;
+ }
+
+ return ret;
+ }
+
+ static GstFlowReturn
+ gst_flv_demux_parse_header (GstFlvDemux * demux, GstBuffer * buffer)
+ {
+ GstFlowReturn ret = GST_FLOW_OK;
+ GstMapInfo map;
+
+ g_return_val_if_fail (gst_buffer_get_size (buffer) >= 9, GST_FLOW_ERROR);
+
+ gst_buffer_map (buffer, &map, GST_MAP_READ);
+
+ /* Check for the FLV tag */
+ if (map.data[0] == 'F' && map.data[1] == 'L' && map.data[2] == 'V') {
+ GST_DEBUG_OBJECT (demux, "FLV header detected");
+ } else {
+ if (G_UNLIKELY (demux->strict)) {
+ GST_WARNING_OBJECT (demux, "invalid header tag detected");
+ ret = GST_FLOW_EOS;
+ goto beach;
+ }
+ }
+
+ if (map.data[3] == '1') {
+ GST_DEBUG_OBJECT (demux, "FLV version 1 detected");
+ } else {
+ if (G_UNLIKELY (demux->strict)) {
+ GST_WARNING_OBJECT (demux, "invalid header version detected");
+ ret = GST_FLOW_EOS;
+ goto beach;
+ }
+
+ }
+
+ /* Now look at audio/video flags */
+ {
+ guint8 flags = map.data[4];
+
+ demux->has_video = demux->has_audio = FALSE;
+
+ if (flags & 1) {
+ GST_DEBUG_OBJECT (demux, "there is a video stream");
+ demux->has_video = TRUE;
+ }
+ if (flags & 4) {
+ GST_DEBUG_OBJECT (demux, "there is an audio stream");
+ demux->has_audio = TRUE;
+ }
+ }
+
+ /* do a one-time seekability check */
+ gst_flv_demux_check_seekability (demux);
+
+ /* We don't care about the rest */
+ demux->need_header = FALSE;
+
+ beach:
+ gst_buffer_unmap (buffer, &map);
+ return ret;
+ }
+
+
+ static void
+ gst_flv_demux_flush (GstFlvDemux * demux, gboolean discont)
+ {
+ GST_DEBUG_OBJECT (demux, "flushing queued data in the FLV demuxer");
+
+ gst_adapter_clear (demux->adapter);
+
+ demux->audio_need_discont = TRUE;
+ demux->video_need_discont = TRUE;
+
+ demux->flushing = FALSE;
+
+ /* Only in push mode and if we're not during a seek */
+ if (!demux->random_access && demux->state != FLV_STATE_SEEK) {
+ /* After a flush we expect a tag_type */
+ demux->state = FLV_STATE_TAG_TYPE;
+ /* We reset the offset and will get one from first push */
+ demux->offset = 0;
+ }
+ }
+
+ static void
+ gst_flv_demux_cleanup (GstFlvDemux * demux)
+ {
+ GST_DEBUG_OBJECT (demux, "cleaning up FLV demuxer");
+
+ demux->state = FLV_STATE_HEADER;
+
+ demux->have_group_id = FALSE;
+ demux->group_id = G_MAXUINT;
+
+ demux->flushing = FALSE;
+ demux->need_header = TRUE;
+ demux->audio_need_segment = TRUE;
+ demux->video_need_segment = TRUE;
+ demux->audio_need_discont = TRUE;
+ demux->video_need_discont = TRUE;
+
+ demux->has_audio = FALSE;
+ demux->has_video = FALSE;
+ demux->got_par = FALSE;
+
+ demux->indexed = FALSE;
+ demux->upstream_seekable = FALSE;
+ demux->file_size = 0;
+ demux->segment_seqnum = 0;
+
+ demux->index_max_pos = 0;
+ demux->index_max_time = 0;
+
+ demux->audio_start = demux->video_start = GST_CLOCK_TIME_NONE;
+ demux->last_audio_pts = demux->last_video_dts = 0;
+ demux->audio_time_offset = demux->video_time_offset = 0;
+
+ demux->no_more_pads = FALSE;
+
+ #ifndef GST_DISABLE_DEBUG
+ demux->no_audio_warned = FALSE;
+ demux->no_video_warned = FALSE;
+ #endif
+
+ gst_segment_init (&demux->segment, GST_FORMAT_TIME);
+
+ demux->w = demux->h = 0;
+ demux->framerate = 0.0;
+ demux->par_x = demux->par_y = 1;
+ demux->video_offset = 0;
+ demux->audio_offset = 0;
+ demux->offset = demux->cur_tag_offset = 0;
+ demux->tag_size = demux->tag_data_size = 0;
+ demux->duration = GST_CLOCK_TIME_NONE;
+
+ if (demux->new_seg_event) {
+ gst_event_unref (demux->new_seg_event);
+ demux->new_seg_event = NULL;
+ }
+
+ gst_adapter_clear (demux->adapter);
+
+ if (demux->audio_codec_data) {
+ gst_buffer_unref (demux->audio_codec_data);
+ demux->audio_codec_data = NULL;
+ }
+
+ if (demux->video_codec_data) {
+ gst_buffer_unref (demux->video_codec_data);
+ demux->video_codec_data = NULL;
+ }
+
+ if (demux->audio_pad) {
+ gst_flow_combiner_remove_pad (demux->flowcombiner, demux->audio_pad);
+ gst_element_remove_pad (GST_ELEMENT (demux), demux->audio_pad);
+ gst_object_unref (demux->audio_pad);
+ demux->audio_pad = NULL;
+ }
+
+ if (demux->video_pad) {
+ gst_flow_combiner_remove_pad (demux->flowcombiner, demux->video_pad);
+ gst_element_remove_pad (GST_ELEMENT (demux), demux->video_pad);
+ gst_object_unref (demux->video_pad);
+ demux->video_pad = NULL;
+ }
+
+ if (demux->times) {
+ g_array_free (demux->times, TRUE);
+ demux->times = NULL;
+ }
+
+ if (demux->filepositions) {
+ g_array_free (demux->filepositions, TRUE);
+ demux->filepositions = NULL;
+ }
+
+ demux->video_bitrate = 0;
+ demux->audio_bitrate = 0;
+
+ gst_flv_demux_clear_tags (demux);
+ }
+
+ /*
+ * Create and push a flushing seek event upstream
+ */
+ static gboolean
+ flv_demux_seek_to_offset (GstFlvDemux * demux, guint64 offset)
+ {
+ GstEvent *event;
+ gboolean res = 0;
+
+ GST_DEBUG_OBJECT (demux, "Seeking to %" G_GUINT64_FORMAT, offset);
+
+ event =
+ gst_event_new_seek (1.0, GST_FORMAT_BYTES,
+ GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_ACCURATE, GST_SEEK_TYPE_SET, offset,
+ GST_SEEK_TYPE_NONE, -1);
+ if (demux->segment_seqnum != GST_SEQNUM_INVALID)
+ gst_event_set_seqnum (event, demux->segment_seqnum);
+
+ res = gst_pad_push_event (demux->sinkpad, event);
+
+ if (res)
+ demux->offset = offset;
+ return res;
+ }
+
+ static GstFlowReturn
+ gst_flv_demux_chain (GstPad * pad, GstObject * parent, GstBuffer * buffer)
+ {
+ GstFlowReturn ret = GST_FLOW_OK;
+ GstFlvDemux *demux = NULL;
+
+ demux = GST_FLV_DEMUX (parent);
+
+ GST_LOG_OBJECT (demux,
+ "received buffer of %" G_GSIZE_FORMAT " bytes at offset %"
+ G_GUINT64_FORMAT, gst_buffer_get_size (buffer),
+ GST_BUFFER_OFFSET (buffer));
+
+ if (G_UNLIKELY (GST_BUFFER_OFFSET (buffer) == 0)) {
+ GST_DEBUG_OBJECT (demux, "beginning of file, expect header");
+ demux->state = FLV_STATE_HEADER;
+ demux->offset = 0;
+ }
+
+ if (G_UNLIKELY (demux->offset == 0 && GST_BUFFER_OFFSET (buffer) != 0)) {
+ GST_DEBUG_OBJECT (demux, "offset was zero, synchronizing with buffer's");
+ demux->offset = GST_BUFFER_OFFSET (buffer);
+ }
+
+ if (GST_BUFFER_FLAG_IS_SET (buffer, GST_BUFFER_FLAG_DISCONT)) {
+ GST_DEBUG_OBJECT (demux, "Discontinuity");
+ gst_adapter_clear (demux->adapter);
+ }
+
+ gst_adapter_push (demux->adapter, buffer);
+
+ if (demux->seeking) {
+ demux->state = FLV_STATE_SEEK;
+ GST_OBJECT_LOCK (demux);
+ demux->seeking = FALSE;
+ GST_OBJECT_UNLOCK (demux);
+ }
+
+ parse:
+ if (G_UNLIKELY (ret != GST_FLOW_OK)) {
+ GST_DEBUG_OBJECT (demux, "got flow return %s", gst_flow_get_name (ret));
+ goto beach;
+ }
+
+ if (G_UNLIKELY (demux->flushing)) {
+ GST_DEBUG_OBJECT (demux, "we are now flushing, exiting parser loop");
+ ret = GST_FLOW_FLUSHING;
+ goto beach;
+ }
+
+ switch (demux->state) {
+ case FLV_STATE_HEADER:
+ {
+ if (gst_adapter_available (demux->adapter) >= FLV_HEADER_SIZE) {
+ GstBuffer *buffer;
+
+ buffer = gst_adapter_take_buffer (demux->adapter, FLV_HEADER_SIZE);
+
+ ret = gst_flv_demux_parse_header (demux, buffer);
+
+ gst_buffer_unref (buffer);
+ demux->offset += FLV_HEADER_SIZE;
+
+ demux->state = FLV_STATE_TAG_TYPE;
+ goto parse;
+ } else {
+ goto beach;
+ }
+ }
+ case FLV_STATE_TAG_TYPE:
+ {
+ if (gst_adapter_available (demux->adapter) >= FLV_TAG_TYPE_SIZE) {
+ GstBuffer *buffer;
+
+ /* Remember the tag offset in bytes */
+ demux->cur_tag_offset = demux->offset;
+
+ buffer = gst_adapter_take_buffer (demux->adapter, FLV_TAG_TYPE_SIZE);
+
+ ret = gst_flv_demux_parse_tag_type (demux, buffer);
+
+ gst_buffer_unref (buffer);
+ demux->offset += FLV_TAG_TYPE_SIZE;
+
+ /* last tag is not an index => no index/don't know where the index is
+ * seek back to the beginning */
+ if (demux->seek_event && demux->state != FLV_STATE_TAG_SCRIPT)
+ goto no_index;
+
+ goto parse;
+ } else {
+ goto beach;
+ }
+ }
+ case FLV_STATE_TAG_VIDEO:
+ {
+ if (gst_adapter_available (demux->adapter) >= demux->tag_size) {
+ GstBuffer *buffer;
+
+ buffer = gst_adapter_take_buffer (demux->adapter, demux->tag_size);
+
+ ret = gst_flv_demux_parse_tag_video (demux, buffer);
+
+ gst_buffer_unref (buffer);
+ demux->offset += demux->tag_size;
+
+ demux->state = FLV_STATE_TAG_TYPE;
+ goto parse;
+ } else {
+ goto beach;
+ }
+ }
+ case FLV_STATE_TAG_AUDIO:
+ {
+ if (gst_adapter_available (demux->adapter) >= demux->tag_size) {
+ GstBuffer *buffer;
+
+ buffer = gst_adapter_take_buffer (demux->adapter, demux->tag_size);
+
+ ret = gst_flv_demux_parse_tag_audio (demux, buffer);
+
+ gst_buffer_unref (buffer);
+ demux->offset += demux->tag_size;
+
+ demux->state = FLV_STATE_TAG_TYPE;
+ goto parse;
+ } else {
+ goto beach;
+ }
+ }
+ case FLV_STATE_TAG_SCRIPT:
+ {
+ if (gst_adapter_available (demux->adapter) >= demux->tag_size) {
+ GstBuffer *buffer;
+
+ buffer = gst_adapter_take_buffer (demux->adapter, demux->tag_size);
+
+ ret = gst_flv_demux_parse_tag_script (demux, buffer);
+
+ gst_buffer_unref (buffer);
+ demux->offset += demux->tag_size;
+
+ demux->state = FLV_STATE_TAG_TYPE;
+
+ /* if there's a seek event we're here for the index so if we don't have it
+ * we seek back to the beginning */
+ if (demux->seek_event) {
+ if (demux->indexed)
+ demux->state = FLV_STATE_SEEK;
+ else
+ goto no_index;
+ }
+
+ goto parse;
+ } else {
+ goto beach;
+ }
+ }
+ case FLV_STATE_SEEK:
+ {
+ GstEvent *event;
+
+ ret = GST_FLOW_OK;
+
+ if (!demux->indexed) {
+ if (demux->offset == demux->file_size - sizeof (guint32)) {
+ guint64 seek_offset;
+ guint8 *data;
+
+ data = gst_adapter_take (demux->adapter, 4);
+ if (!data)
+ goto no_index;
+
+ seek_offset = demux->file_size - sizeof (guint32) -
+ GST_READ_UINT32_BE (data);
+ g_free (data);
+
+ GST_INFO_OBJECT (demux,
+ "Seeking to beginning of last tag at %" G_GUINT64_FORMAT,
+ seek_offset);
+ demux->state = FLV_STATE_TAG_TYPE;
+ flv_demux_seek_to_offset (demux, seek_offset);
+ goto beach;
+ } else
+ goto no_index;
+ }
+
+ GST_OBJECT_LOCK (demux);
+ event = demux->seek_event;
+ demux->seek_event = NULL;
+ GST_OBJECT_UNLOCK (demux);
+
+ /* calculate and perform seek */
+ if (!flv_demux_handle_seek_push (demux, event))
+ goto seek_failed;
+
+ gst_event_unref (event);
+ demux->state = FLV_STATE_TAG_TYPE;
+ goto beach;
+ }
+ case FLV_STATE_SKIP:
+ /* Skip unknown tags (set in _parse_tag_type()) */
+ if (gst_adapter_available (demux->adapter) >= demux->tag_size) {
+ gst_adapter_flush (demux->adapter, demux->tag_size);
+ demux->offset += demux->tag_size;
+ demux->state = FLV_STATE_TAG_TYPE;
+ goto parse;
+ } else {
+ goto beach;
+ }
+ default:
+ GST_DEBUG_OBJECT (demux, "unexpected demuxer state");
+ }
+
+ beach:
+ return ret;
+
+ /* ERRORS */
+ no_index:
+ {
+ GST_OBJECT_LOCK (demux);
+ demux->seeking = FALSE;
+ gst_event_unref (demux->seek_event);
+ demux->seek_event = NULL;
+ GST_OBJECT_UNLOCK (demux);
+ GST_WARNING_OBJECT (demux,
+ "failed to find an index, seeking back to beginning");
+ flv_demux_seek_to_offset (demux, 0);
+ return GST_FLOW_OK;
+ }
+ seek_failed:
+ {
+ GST_ELEMENT_ERROR (demux, STREAM, DEMUX, (NULL), ("seek failed"));
+ return GST_FLOW_ERROR;
+ }
+
+ }
+
+ static GstFlowReturn
+ gst_flv_demux_pull_range (GstFlvDemux * demux, GstPad * pad, guint64 offset,
+ guint size, GstBuffer ** buffer)
+ {
+ GstFlowReturn ret;
+
+ ret = gst_pad_pull_range (pad, offset, size, buffer);
+ if (G_UNLIKELY (ret != GST_FLOW_OK)) {
+ GST_WARNING_OBJECT (demux,
+ "failed when pulling %d bytes from offset %" G_GUINT64_FORMAT ": %s",
+ size, offset, gst_flow_get_name (ret));
+ *buffer = NULL;
+ return ret;
+ }
+
+ if (G_UNLIKELY (*buffer && gst_buffer_get_size (*buffer) != size)) {
+ GST_WARNING_OBJECT (demux,
+ "partial pull got %" G_GSIZE_FORMAT " when expecting %d from offset %"
+ G_GUINT64_FORMAT, gst_buffer_get_size (*buffer), size, offset);
+ gst_buffer_unref (*buffer);
+ ret = GST_FLOW_EOS;
+ *buffer = NULL;
+ return ret;
+ }
+
+ return ret;
+ }
+
+ static GstFlowReturn
+ gst_flv_demux_pull_tag (GstPad * pad, GstFlvDemux * demux)
+ {
+ GstBuffer *buffer = NULL;
+ GstFlowReturn ret = GST_FLOW_OK;
+
+ /* Store tag offset */
+ demux->cur_tag_offset = demux->offset;
+
+ /* Get the first 4 bytes to identify tag type and size */
+ if (G_UNLIKELY ((ret = gst_flv_demux_pull_range (demux, pad, demux->offset,
+ FLV_TAG_TYPE_SIZE, &buffer)) != GST_FLOW_OK))
+ goto beach;
+
+ /* Identify tag type */
+ ret = gst_flv_demux_parse_tag_type (demux, buffer);
+
+ gst_buffer_unref (buffer);
+
+ if (G_UNLIKELY (ret != GST_FLOW_OK))
+ goto beach;
+
+ /* Jump over tag type + size */
+ demux->offset += FLV_TAG_TYPE_SIZE;
+
+ /* Pull the whole tag */
+ buffer = NULL;
+ if (G_UNLIKELY ((ret = gst_flv_demux_pull_range (demux, pad, demux->offset,
+ demux->tag_size, &buffer)) != GST_FLOW_OK))
+ goto beach;
+
+ switch (demux->state) {
+ case FLV_STATE_TAG_VIDEO:
+ ret = gst_flv_demux_parse_tag_video (demux, buffer);
+ break;
+ case FLV_STATE_TAG_AUDIO:
+ ret = gst_flv_demux_parse_tag_audio (demux, buffer);
+ break;
+ case FLV_STATE_TAG_SCRIPT:
+ ret = gst_flv_demux_parse_tag_script (demux, buffer);
+ break;
+ default:
+ GST_WARNING_OBJECT (demux, "unexpected state %d", demux->state);
+ }
+
+ gst_buffer_unref (buffer);
+
+ /* Jump over that part we've just parsed */
+ demux->offset += demux->tag_size;
+
+ /* Make sure we reinitialize the tag size */
+ demux->tag_size = 0;
+
+ /* Ready for the next tag */
+ demux->state = FLV_STATE_TAG_TYPE;
+
+ if (G_UNLIKELY (ret == GST_FLOW_NOT_LINKED)) {
+ GST_WARNING_OBJECT (demux, "parsing this tag returned not-linked and "
+ "neither video nor audio are linked");
+ }
+
+ beach:
+ return ret;
+ }
+
+ static GstFlowReturn
+ gst_flv_demux_pull_header (GstPad * pad, GstFlvDemux * demux)
+ {
+ GstBuffer *buffer = NULL;
+ GstFlowReturn ret = GST_FLOW_OK;
+
+ /* Get the first 9 bytes */
+ if (G_UNLIKELY ((ret = gst_flv_demux_pull_range (demux, pad, demux->offset,
+ FLV_HEADER_SIZE, &buffer)) != GST_FLOW_OK))
+ goto beach;
+
+ ret = gst_flv_demux_parse_header (demux, buffer);
+
+ gst_buffer_unref (buffer);
+
+ /* Jump over the header now */
+ demux->offset += FLV_HEADER_SIZE;
+ demux->state = FLV_STATE_TAG_TYPE;
+
+ beach:
+ return ret;
+ }
+
+ static void
+ gst_flv_demux_move_to_offset (GstFlvDemux * demux, gint64 offset,
+ gboolean reset)
+ {
+ demux->offset = offset;
+
+ /* Tell all the stream we moved to a different position (discont) */
+ demux->audio_need_discont = TRUE;
+ demux->video_need_discont = TRUE;
+
+ /* next section setup */
+ demux->from_offset = -1;
+ demux->audio_done = demux->video_done = FALSE;
+ demux->audio_first_ts = demux->video_first_ts = GST_CLOCK_TIME_NONE;
+
+ if (reset) {
+ demux->from_offset = -1;
+ demux->to_offset = G_MAXINT64;
+ }
+
+ /* If we seeked at the beginning of the file parse the header again */
+ if (G_UNLIKELY (!demux->offset)) {
+ demux->state = FLV_STATE_HEADER;
+ } else { /* or parse a tag */
+ demux->state = FLV_STATE_TAG_TYPE;
+ }
+ }
+
+ static GstFlowReturn
+ gst_flv_demux_seek_to_prev_keyframe (GstFlvDemux * demux)
+ {
+ GstFlowReturn ret = GST_FLOW_EOS;
+ GstIndex *index;
+ GstIndexEntry *entry = NULL;
+
+ GST_DEBUG_OBJECT (demux,
+ "terminated section started at offset %" G_GINT64_FORMAT,
+ demux->from_offset);
+
+ /* we are done if we got all audio and video */
+ if ((!GST_CLOCK_TIME_IS_VALID (demux->audio_first_ts) ||
+ demux->audio_first_ts < demux->segment.start) &&
+ (!GST_CLOCK_TIME_IS_VALID (demux->video_first_ts) ||
+ demux->video_first_ts < demux->segment.start))
+ goto done;
+
+ if (demux->from_offset <= 0)
+ goto done;
+
+ GST_DEBUG_OBJECT (demux, "locating previous position");
+
+ index = gst_flv_demux_get_index (GST_ELEMENT (demux));
+
+ /* locate index entry before previous start position */
+ if (index) {
+ entry = gst_index_get_assoc_entry (index, demux->index_id,
+ GST_INDEX_LOOKUP_BEFORE, GST_ASSOCIATION_FLAG_KEY_UNIT,
+ GST_FORMAT_BYTES, demux->from_offset - 1);
+
+ if (entry) {
+ gint64 bytes = 0, time = 0;
+
+ gst_index_entry_assoc_map (entry, GST_FORMAT_BYTES, &bytes);
+ gst_index_entry_assoc_map (entry, GST_FORMAT_TIME, &time);
+
+ GST_DEBUG_OBJECT (demux, "found index entry for %" G_GINT64_FORMAT
+ " at %" GST_TIME_FORMAT ", seeking to %" G_GINT64_FORMAT,
+ demux->offset - 1, GST_TIME_ARGS (time), bytes);
+
+ /* setup for next section */
+ demux->to_offset = demux->from_offset;
+ gst_flv_demux_move_to_offset (demux, bytes, FALSE);
+ ret = GST_FLOW_OK;
+ }
+
+ gst_object_unref (index);
+ }
+
+ done:
+ return ret;
+ }
+
+ static GstFlowReturn
+ gst_flv_demux_create_index (GstFlvDemux * demux, gint64 pos, GstClockTime ts)
+ {
+ gint64 size;
+ size_t tag_size;
+ guint64 old_offset;
+ GstBuffer *buffer;
+ GstClockTime tag_time;
+ GstFlowReturn ret = GST_FLOW_OK;
+
+ if (!gst_pad_peer_query_duration (demux->sinkpad, GST_FORMAT_BYTES, &size))
+ return GST_FLOW_OK;
+
+ GST_DEBUG_OBJECT (demux, "building index at %" G_GINT64_FORMAT
+ " looking for time %" GST_TIME_FORMAT, pos, GST_TIME_ARGS (ts));
+
+ old_offset = demux->offset;
+ demux->offset = pos;
+
+ buffer = NULL;
+ while ((ret = gst_flv_demux_pull_range (demux, demux->sinkpad, demux->offset,
+ 12, &buffer)) == GST_FLOW_OK) {
+ tag_time =
+ gst_flv_demux_parse_tag_timestamp (demux, TRUE, buffer, &tag_size);
+
+ gst_buffer_unref (buffer);
+ buffer = NULL;
+
+ if (G_UNLIKELY (tag_time == GST_CLOCK_TIME_NONE || tag_time > ts))
+ goto exit;
+
+ demux->offset += tag_size;
+ }
+
+ if (ret == GST_FLOW_EOS) {
+ /* file ran out, so mark we have complete index */
+ demux->indexed = TRUE;
+ ret = GST_FLOW_OK;
+ }
+
+ exit:
+ demux->offset = old_offset;
+
+ return ret;
+ }
+
+ static gint64
+ gst_flv_demux_get_metadata (GstFlvDemux * demux)
+ {
+ gint64 ret = 0, offset;
+ size_t tag_size, size;
+ GstBuffer *buffer = NULL;
+ GstMapInfo map;
+
+ if (!gst_pad_peer_query_duration (demux->sinkpad, GST_FORMAT_BYTES, &offset))
+ goto exit;
+
+ ret = offset;
+ GST_DEBUG_OBJECT (demux, "upstream size: %" G_GINT64_FORMAT, offset);
+ if (G_UNLIKELY (offset < 4))
+ goto exit;
+
+ offset -= 4;
+ if (GST_FLOW_OK != gst_flv_demux_pull_range (demux, demux->sinkpad, offset,
+ 4, &buffer))
+ goto exit;
+
+ gst_buffer_map (buffer, &map, GST_MAP_READ);
+ tag_size = GST_READ_UINT32_BE (map.data);
+ gst_buffer_unmap (buffer, &map);
+ GST_DEBUG_OBJECT (demux, "last tag size: %" G_GSIZE_FORMAT, tag_size);
+ gst_buffer_unref (buffer);
+ buffer = NULL;
+
+ if (G_UNLIKELY (offset < tag_size))
+ goto exit;
+
+ offset -= tag_size;
+ if (GST_FLOW_OK != gst_flv_demux_pull_range (demux, demux->sinkpad, offset,
+ 12, &buffer))
+ goto exit;
+
+ /* a consistency check */
+ gst_buffer_map (buffer, &map, GST_MAP_READ);
+ size = GST_READ_UINT24_BE (map.data + 1);
+ if (size != tag_size - 11) {
+ gst_buffer_unmap (buffer, &map);
+ GST_DEBUG_OBJECT (demux,
+ "tag size %" G_GSIZE_FORMAT ", expected %" G_GSIZE_FORMAT
+ ", corrupt or truncated file", size, tag_size - 11);
+ goto exit;
+ }
+
+ /* try to update duration with timestamp in any case */
+ gst_flv_demux_parse_tag_timestamp (demux, FALSE, buffer, &size);
+
+ /* maybe get some more metadata */
+ if (map.data[0] == 18) {
+ gst_buffer_unmap (buffer, &map);
+ gst_buffer_unref (buffer);
+ buffer = NULL;
+ GST_DEBUG_OBJECT (demux, "script tag, pulling it to parse");
+ offset += 4;
+ if (GST_FLOW_OK == gst_flv_demux_pull_range (demux, demux->sinkpad, offset,
+ tag_size, &buffer))
+ gst_flv_demux_parse_tag_script (demux, buffer);
+ } else {
+ gst_buffer_unmap (buffer, &map);
+ }
+
+ exit:
+ if (buffer)
+ gst_buffer_unref (buffer);
+
+ return ret;
+ }
+
+ static void
+ gst_flv_demux_loop (GstPad * pad)
+ {
+ GstFlvDemux *demux = NULL;
+ GstFlowReturn ret = GST_FLOW_OK;
+
+ demux = GST_FLV_DEMUX (gst_pad_get_parent (pad));
+
+ /* pull in data */
+ switch (demux->state) {
+ case FLV_STATE_TAG_TYPE:
+ if (demux->from_offset == -1)
+ demux->from_offset = demux->offset;
+ ret = gst_flv_demux_pull_tag (pad, demux);
+ /* if we have seen real data, we probably passed a possible metadata
+ * header located at start. So if we do not yet have an index,
+ * try to pick up metadata (index, duration) at the end */
+ if (G_UNLIKELY (!demux->file_size && !demux->indexed &&
+ (demux->has_video || demux->has_audio)))
+ demux->file_size = gst_flv_demux_get_metadata (demux);
+ break;
+ case FLV_STATE_DONE:
+ ret = GST_FLOW_EOS;
+ break;
+ case FLV_STATE_SEEK:
+ /* seek issued with insufficient index;
+ * scan for index in task thread from current maximum offset to
+ * desired time and then perform seek */
+ /* TODO maybe some buffering message or so to indicate scan progress */
+ ret = gst_flv_demux_create_index (demux, demux->index_max_pos,
+ demux->seek_time);
+ if (ret != GST_FLOW_OK)
+ goto pause;
+ /* position and state arranged by seek,
+ * also unrefs event */
+ gst_flv_demux_handle_seek_pull (demux, demux->seek_event, FALSE);
+ demux->seek_event = NULL;
+ break;
+ default:
+ ret = gst_flv_demux_pull_header (pad, demux);
+ /* index scans start after header */
+ demux->index_max_pos = demux->offset;
+ break;
+ }
+
+ if (demux->segment.rate < 0.0) {
+ /* check end of section */
+ if ((gint64) demux->offset >= demux->to_offset ||
+ demux->segment.position >= demux->segment.stop + 2 * GST_SECOND ||
+ (demux->audio_done && demux->video_done))
+ ret = gst_flv_demux_seek_to_prev_keyframe (demux);
+ } else {
+ /* check EOS condition */
+ if ((demux->segment.stop != -1) &&
+ (demux->segment.position >= demux->segment.stop)) {
+ ret = GST_FLOW_EOS;
+ }
+ }
+
+ /* pause if something went wrong or at end */
+ if (G_UNLIKELY (ret != GST_FLOW_OK) && !(ret == GST_FLOW_NOT_LINKED
+ && !demux->no_more_pads))
+ goto pause;
+
+ gst_object_unref (demux);
+
+ return;
+
+ pause:
+ {
+ const gchar *reason = gst_flow_get_name (ret);
+ GstMessage *message;
+ GstEvent *event;
+
+ GST_LOG_OBJECT (demux, "pausing task, reason %s", reason);
+ gst_pad_pause_task (pad);
+
+ if (ret == GST_FLOW_EOS) {
+ /* handle end-of-stream/segment */
+ /* so align our position with the end of it, if there is one
+ * this ensures a subsequent will arrive at correct base/acc time */
+ if (demux->segment.rate > 0.0 &&
+ GST_CLOCK_TIME_IS_VALID (demux->segment.stop))
+ demux->segment.position = demux->segment.stop;
+ else if (demux->segment.rate < 0.0)
+ demux->segment.position = demux->segment.start;
+
+ /* perform EOS logic */
+ if (!demux->no_more_pads) {
+ gst_element_no_more_pads (GST_ELEMENT_CAST (demux));
+ demux->no_more_pads = TRUE;
+ }
+
+ if (demux->segment.flags & GST_SEGMENT_FLAG_SEGMENT) {
+ gint64 stop;
+
+ /* for segment playback we need to post when (in stream time)
+ * we stopped, this is either stop (when set) or the duration. */
+ if ((stop = demux->segment.stop) == -1)
+ stop = demux->segment.duration;
+
+ if (demux->segment.rate >= 0) {
+ GST_LOG_OBJECT (demux, "Sending segment done, at end of segment");
+ message = gst_message_new_segment_done (GST_OBJECT_CAST (demux),
+ GST_FORMAT_TIME, stop);
+ gst_message_set_seqnum (message, demux->segment_seqnum);
+ gst_element_post_message (GST_ELEMENT_CAST (demux), message);
+ event = gst_event_new_segment_done (GST_FORMAT_TIME, stop);
+ gst_event_set_seqnum (event, demux->segment_seqnum);
+ gst_flv_demux_push_src_event (demux, event);
+ } else { /* Reverse playback */
+ GST_LOG_OBJECT (demux, "Sending segment done, at beginning of "
+ "segment");
+ message = gst_message_new_segment_done (GST_OBJECT_CAST (demux),
+ GST_FORMAT_TIME, demux->segment.start);
+ gst_message_set_seqnum (message, demux->segment_seqnum);
+ gst_element_post_message (GST_ELEMENT_CAST (demux), message);
+ event = gst_event_new_segment_done (GST_FORMAT_TIME,
+ demux->segment.start);
+ gst_event_set_seqnum (event, demux->segment_seqnum);
+ gst_flv_demux_push_src_event (demux, event);
+ }
+ } else {
+ /* normal playback, send EOS to all linked pads */
+ if (!demux->no_more_pads) {
+ gst_element_no_more_pads (GST_ELEMENT (demux));
+ demux->no_more_pads = TRUE;
+ }
+
+ GST_LOG_OBJECT (demux, "Sending EOS, at end of stream");
+ if (!demux->audio_pad && !demux->video_pad) {
+ GST_ELEMENT_ERROR (demux, STREAM, FAILED,
+ ("Internal data stream error."), ("Got EOS before any data"));
+ } else {
+ event = gst_event_new_eos ();
+ if (demux->segment_seqnum != GST_SEQNUM_INVALID)
+ gst_event_set_seqnum (event, demux->segment_seqnum);
+ if (!gst_flv_demux_push_src_event (demux, event))
+ GST_WARNING_OBJECT (demux, "failed pushing EOS on streams");
+ }
+ }
+ } else if (ret == GST_FLOW_NOT_LINKED || ret < GST_FLOW_EOS) {
+ GST_ELEMENT_FLOW_ERROR (demux, ret);
+ event = gst_event_new_eos ();
+ if (demux->segment_seqnum != GST_SEQNUM_INVALID)
+ gst_event_set_seqnum (event, demux->segment_seqnum);
+ gst_flv_demux_push_src_event (demux, event);
+ }
+ gst_object_unref (demux);
+ return;
+ }
+ }
+
+ static guint64
+ gst_flv_demux_find_offset (GstFlvDemux * demux, GstSegment * segment,
+ GstSeekFlags seek_flags)
+ {
+ gint64 bytes = 0;
+ gint64 time = 0;
+ GstIndex *index;
+ GstIndexEntry *entry;
+
+ g_return_val_if_fail (segment != NULL, 0);
+
+ time = segment->position;
+
+ index = gst_flv_demux_get_index (GST_ELEMENT (demux));
+
+ if (index) {
+ /* Let's check if we have an index entry for that seek time */
+ entry = gst_index_get_assoc_entry (index, demux->index_id,
+ seek_flags & GST_SEEK_FLAG_SNAP_AFTER ?
+ GST_INDEX_LOOKUP_AFTER : GST_INDEX_LOOKUP_BEFORE,
+ GST_ASSOCIATION_FLAG_KEY_UNIT, GST_FORMAT_TIME, time);
+
+ if (entry) {
+ gst_index_entry_assoc_map (entry, GST_FORMAT_BYTES, &bytes);
+ gst_index_entry_assoc_map (entry, GST_FORMAT_TIME, &time);
+
+ GST_DEBUG_OBJECT (demux, "found index entry for %" GST_TIME_FORMAT
+ " at %" GST_TIME_FORMAT ", seeking to %" G_GINT64_FORMAT,
+ GST_TIME_ARGS (segment->position), GST_TIME_ARGS (time), bytes);
+
+ /* Key frame seeking */
+ if (seek_flags & GST_SEEK_FLAG_KEY_UNIT) {
+ /* Adjust the segment so that the keyframe fits in */
+ segment->start = segment->time = time;
+ segment->position = time;
+ }
+ } else {
+ GST_DEBUG_OBJECT (demux, "no index entry found for %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (segment->start));
+ }
+
+ gst_object_unref (index);
+ }
+
+ return bytes;
+ }
+
+ static gboolean
+ flv_demux_handle_seek_push (GstFlvDemux * demux, GstEvent * event)
+ {
+ GstFormat format;
+ GstSeekFlags flags;
+ GstSeekType start_type, stop_type;
+ gint64 start, stop;
+ gdouble rate;
+ gboolean update, flush, ret;
+ GstSegment seeksegment;
+
+ gst_event_parse_seek (event, &rate, &format, &flags,
+ &start_type, &start, &stop_type, &stop);
+
+ if (format != GST_FORMAT_TIME)
+ goto wrong_format;
+
+ flush = ! !(flags & GST_SEEK_FLAG_FLUSH);
+
+ /* Work on a copy until we are sure the seek succeeded. */
+ memcpy (&seeksegment, &demux->segment, sizeof (GstSegment));
+
+ GST_DEBUG_OBJECT (demux, "segment before configure %" GST_SEGMENT_FORMAT,
+ &demux->segment);
+
+ /* Apply the seek to our segment */
+ gst_segment_do_seek (&seeksegment, rate, format, flags,
+ start_type, start, stop_type, stop, &update);
+
+ GST_DEBUG_OBJECT (demux, "segment configured %" GST_SEGMENT_FORMAT,
+ &seeksegment);
+
+ if (flush || seeksegment.position != demux->segment.position) {
+ /* Do the actual seeking */
+ guint64 offset = gst_flv_demux_find_offset (demux, &seeksegment, flags);
+
+ GST_DEBUG_OBJECT (demux, "generating an upstream seek at position %"
+ G_GUINT64_FORMAT, offset);
+ event = gst_event_new_seek (seeksegment.rate, GST_FORMAT_BYTES,
+ flags | GST_SEEK_FLAG_ACCURATE, GST_SEEK_TYPE_SET,
+ offset, GST_SEEK_TYPE_NONE, 0);
+ gst_event_set_seqnum (event, gst_event_get_seqnum (event));
+ ret = gst_pad_push_event (demux->sinkpad, event);
+ if (G_UNLIKELY (!ret)) {
+ GST_WARNING_OBJECT (demux, "upstream seek failed");
+ }
+
+ gst_flow_combiner_reset (demux->flowcombiner);
+ /* Tell all the stream we moved to a different position (discont) */
+ demux->audio_need_discont = TRUE;
+ demux->video_need_discont = TRUE;
+ } else {
+ ret = TRUE;
+ }
+
+ if (ret) {
+ /* Ok seek succeeded, take the newly configured segment */
+ memcpy (&demux->segment, &seeksegment, sizeof (GstSegment));
+
+ /* Tell all the stream a new segment is needed */
+ demux->audio_need_segment = TRUE;
+ demux->video_need_segment = TRUE;
+ /* Clean any potential newsegment event kept for the streams. The first
+ * stream needing a new segment will create a new one. */
+ if (G_UNLIKELY (demux->new_seg_event)) {
+ gst_event_unref (demux->new_seg_event);
+ demux->new_seg_event = NULL;
+ }
+ GST_DEBUG_OBJECT (demux, "preparing newsegment from %"
+ GST_TIME_FORMAT " to %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (demux->segment.start),
+ GST_TIME_ARGS (demux->segment.stop));
+ demux->new_seg_event = gst_event_new_segment (&demux->segment);
+ if (demux->segment_seqnum != GST_SEQNUM_INVALID)
+ gst_event_set_seqnum (demux->new_seg_event, demux->segment_seqnum);
+ gst_event_unref (event);
+ } else {
+ ret = gst_pad_push_event (demux->sinkpad, event);
+ }
+
+ return ret;
+
+ /* ERRORS */
+ wrong_format:
+ {
+ GST_WARNING_OBJECT (demux, "we only support seeking in TIME format");
+ gst_event_unref (event);
+ return FALSE;
+ }
+ }
+
+ static gboolean
+ gst_flv_demux_handle_seek_push (GstFlvDemux * demux, GstEvent * event)
+ {
+ GstFormat format;
+
+ gst_event_parse_seek (event, NULL, &format, NULL, NULL, NULL, NULL, NULL);
+
+ if (format != GST_FORMAT_TIME) {
+ GST_WARNING_OBJECT (demux, "we only support seeking in TIME format");
+ gst_event_unref (event);
+ return FALSE;
+ }
+
+ /* First try upstream */
+ if (gst_pad_push_event (demux->sinkpad, gst_event_ref (event))) {
+ GST_DEBUG_OBJECT (demux, "Upstream successfully seeked");
+ gst_event_unref (event);
+ return TRUE;
+ }
+
+ if (!demux->indexed) {
+ guint64 seek_offset = 0;
+ gboolean building_index;
+
+ GST_OBJECT_LOCK (demux);
+ /* handle the seek in the chain function */
+ demux->seeking = TRUE;
+ demux->state = FLV_STATE_SEEK;
+
+ /* copy the event */
+ gst_event_replace (&demux->seek_event, event);
+
+ /* set the building_index flag so that only one thread can setup the
+ * structures for index seeking. */
+ building_index = demux->building_index;
+ if (!building_index) {
+ demux->building_index = TRUE;
+ if (!demux->file_size
+ && !gst_pad_peer_query_duration (demux->sinkpad, GST_FORMAT_BYTES,
+ &demux->file_size)) {
+ GST_WARNING_OBJECT (demux, "Failed to query upstream file size");
+ GST_OBJECT_UNLOCK (demux);
+ return FALSE;
+ }
+
+ /* we hope the last tag is a scriptdataobject containing an index
+ * the size of the last tag is given in the last guint32 bits
+ * then we seek to the beginning of the tag, parse it and hopefully obtain an index */
+ seek_offset = demux->file_size - sizeof (guint32);
+ GST_DEBUG_OBJECT (demux,
+ "File size obtained, seeking to %" G_GUINT64_FORMAT, seek_offset);
+ }
+ GST_OBJECT_UNLOCK (demux);
+
+ if (!building_index) {
+ GST_INFO_OBJECT (demux, "Seeking to last 4 bytes at %" G_GUINT64_FORMAT,
+ seek_offset);
+ return flv_demux_seek_to_offset (demux, seek_offset);
+ }
+
+ /* FIXME: we have to always return true so that we don't block the seek
+ * thread.
+ * Note: maybe it is OK to return true if we're still building the index */
+ return TRUE;
+ }
+
+ return flv_demux_handle_seek_push (demux, event);
+ }
+
+ static gboolean
+ gst_flv_demux_handle_seek_pull (GstFlvDemux * demux, GstEvent * event,
+ gboolean seeking)
+ {
+ GstFormat format;
+ GstSeekFlags flags;
+ GstSeekType start_type, stop_type;
+ gint64 start, stop;
+ gdouble rate;
+ gboolean update, flush, ret = FALSE;
+ GstSegment seeksegment;
+ GstEvent *flush_event;
+ GstMessage *message;
+ guint32 seqnum;
+
+ gst_event_parse_seek (event, &rate, &format, &flags,
+ &start_type, &start, &stop_type, &stop);
+ seqnum = gst_event_get_seqnum (event);
+
+ if (format != GST_FORMAT_TIME)
+ goto wrong_format;
+
+ /* mark seeking thread entering flushing/pausing */
+ GST_OBJECT_LOCK (demux);
+ if (seeking)
+ demux->seeking = seeking;
+ GST_OBJECT_UNLOCK (demux);
+
+ flush = ! !(flags & GST_SEEK_FLAG_FLUSH);
+
+ if (flush) {
+ /* Flush start up and downstream to make sure data flow and loops are
+ idle */
+ flush_event = gst_event_new_flush_start ();
+ gst_event_set_seqnum (flush_event, seqnum);
+ gst_flv_demux_push_src_event (demux, flush_event);
+
+ flush_event = gst_event_new_flush_start ();
+ gst_event_set_seqnum (flush_event, seqnum);
+ gst_pad_push_event (demux->sinkpad, flush_event);
+ } else {
+ /* Pause the pulling task */
+ gst_pad_pause_task (demux->sinkpad);
+ }
+
+ /* Take the stream lock */
+ GST_PAD_STREAM_LOCK (demux->sinkpad);
+ demux->segment_seqnum = seqnum;
+
+ if (flush) {
+ /* Stop flushing upstream we need to pull */
+ flush_event = gst_event_new_flush_stop (TRUE);
+ gst_event_set_seqnum (flush_event, seqnum);
+ gst_pad_push_event (demux->sinkpad, flush_event);
+ }
+
+ /* Work on a copy until we are sure the seek succeeded. */
+ memcpy (&seeksegment, &demux->segment, sizeof (GstSegment));
+
+ GST_DEBUG_OBJECT (demux, "segment before configure %" GST_SEGMENT_FORMAT,
+ &demux->segment);
+
+ /* Apply the seek to our segment */
+ gst_segment_do_seek (&seeksegment, rate, format, flags,
+ start_type, start, stop_type, stop, &update);
+
+ GST_DEBUG_OBJECT (demux, "segment configured %" GST_SEGMENT_FORMAT,
+ &seeksegment);
+
+ if (flush || seeksegment.position != demux->segment.position) {
+ /* Do the actual seeking */
+ /* index is reliable if it is complete or we do not go to far ahead */
+ if (seeking && !demux->indexed &&
+ seeksegment.position > demux->index_max_time + 10 * GST_SECOND) {
+ GST_DEBUG_OBJECT (demux, "delaying seek to post-scan; "
+ " index only up to %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (demux->index_max_time));
+ /* stop flushing for now */
+ if (flush) {
+ flush_event = gst_event_new_flush_stop (TRUE);
+ gst_event_set_seqnum (flush_event, seqnum);
+ gst_flv_demux_push_src_event (demux, flush_event);
+ }
+ /* delegate scanning and index building to task thread to avoid
+ * occupying main (UI) loop */
+ if (demux->seek_event)
+ gst_event_unref (demux->seek_event);
+ demux->seek_event = gst_event_ref (event);
+ demux->seek_time = seeksegment.position;
+ demux->state = FLV_STATE_SEEK;
+ /* do not know about success yet, but we did care and handled it */
+ ret = TRUE;
+ goto exit;
+ }
+
+ /* now index should be as reliable as it can be for current purpose */
+ gst_flv_demux_move_to_offset (demux,
+ gst_flv_demux_find_offset (demux, &seeksegment, flags), TRUE);
+ ret = TRUE;
+ } else {
+ ret = TRUE;
+ }
+
+ if (flush) {
+ /* Stop flushing, the sinks are at time 0 now */
+ flush_event = gst_event_new_flush_stop (TRUE);
+ gst_event_set_seqnum (flush_event, seqnum);
+ gst_flv_demux_push_src_event (demux, flush_event);
+ }
+
+ if (ret) {
+ /* Ok seek succeeded, take the newly configured segment */
+ memcpy (&demux->segment, &seeksegment, sizeof (GstSegment));
+
+ /* Notify about the start of a new segment */
+ if (demux->segment.flags & GST_SEGMENT_FLAG_SEGMENT) {
+ message = gst_message_new_segment_start (GST_OBJECT (demux),
+ demux->segment.format, demux->segment.position);
+ gst_message_set_seqnum (message, seqnum);
+ gst_element_post_message (GST_ELEMENT (demux), message);
+ }
+
+ gst_flow_combiner_reset (demux->flowcombiner);
+ /* Tell all the stream a new segment is needed */
+ demux->audio_need_segment = TRUE;
+ demux->video_need_segment = TRUE;
+ /* Clean any potential newsegment event kept for the streams. The first
+ * stream needing a new segment will create a new one. */
+ if (G_UNLIKELY (demux->new_seg_event)) {
+ gst_event_unref (demux->new_seg_event);
+ demux->new_seg_event = NULL;
+ }
+ GST_DEBUG_OBJECT (demux, "preparing newsegment from %"
+ GST_TIME_FORMAT " to %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (demux->segment.start),
+ GST_TIME_ARGS (demux->segment.stop));
+ demux->new_seg_event = gst_event_new_segment (&demux->segment);
+ gst_event_set_seqnum (demux->new_seg_event, seqnum);
+ }
+
+ exit:
+ GST_OBJECT_LOCK (demux);
+ seeking = demux->seeking && !seeking;
+ demux->seeking = FALSE;
+ GST_OBJECT_UNLOCK (demux);
+
+ /* if we detect an external seek having started (and possibly already having
+ * flushed), do not restart task to give it a chance.
+ * Otherwise external one's flushing will take care to pause task */
+ if (seeking) {
+ gst_pad_pause_task (demux->sinkpad);
+ } else {
+ gst_pad_start_task (demux->sinkpad,
+ (GstTaskFunction) gst_flv_demux_loop, demux->sinkpad, NULL);
+ }
+
+ GST_PAD_STREAM_UNLOCK (demux->sinkpad);
+
+ gst_event_unref (event);
+ return ret;
+
+ /* ERRORS */
+ wrong_format:
+ {
+ GST_WARNING_OBJECT (demux, "we only support seeking in TIME format");
+ gst_event_unref (event);
+ return ret;
+ }
+ }
+
+ /* If we can pull that's preferred */
+ static gboolean
+ gst_flv_demux_sink_activate (GstPad * sinkpad, GstObject * parent)
+ {
+ GstQuery *query;
+ gboolean pull_mode;
+
+ query = gst_query_new_scheduling ();
+
+ if (!gst_pad_peer_query (sinkpad, query)) {
+ gst_query_unref (query);
+ goto activate_push;
+ }
+
+ pull_mode = gst_query_has_scheduling_mode_with_flags (query,
+ GST_PAD_MODE_PULL, GST_SCHEDULING_FLAG_SEEKABLE);
+ gst_query_unref (query);
+
+ if (!pull_mode)
+ goto activate_push;
+
+ GST_DEBUG_OBJECT (sinkpad, "activating pull");
+ return gst_pad_activate_mode (sinkpad, GST_PAD_MODE_PULL, TRUE);
+
+ activate_push:
+ {
+ GST_DEBUG_OBJECT (sinkpad, "activating push");
+ return gst_pad_activate_mode (sinkpad, GST_PAD_MODE_PUSH, TRUE);
+ }
+ }
+
+ static gboolean
+ gst_flv_demux_sink_activate_mode (GstPad * sinkpad, GstObject * parent,
+ GstPadMode mode, gboolean active)
+ {
+ gboolean res;
+ GstFlvDemux *demux;
+
+ demux = GST_FLV_DEMUX (parent);
+
+ switch (mode) {
+ case GST_PAD_MODE_PUSH:
+ demux->random_access = FALSE;
+ res = TRUE;
+ break;
+ case GST_PAD_MODE_PULL:
+ if (active) {
+ demux->random_access = TRUE;
+ demux->segment_seqnum = gst_util_seqnum_next ();
+ res = gst_pad_start_task (sinkpad, (GstTaskFunction) gst_flv_demux_loop,
+ sinkpad, NULL);
+ } else {
+ demux->random_access = FALSE;
+ res = gst_pad_stop_task (sinkpad);
+ }
+ break;
+ default:
+ res = FALSE;
+ break;
+ }
+ return res;
+ }
+
+ static gboolean
+ gst_flv_demux_sink_event (GstPad * pad, GstObject * parent, GstEvent * event)
+ {
+ GstFlvDemux *demux;
+ gboolean ret = FALSE;
+
+ demux = GST_FLV_DEMUX (parent);
+
+ GST_DEBUG_OBJECT (demux, "handling event %s", GST_EVENT_TYPE_NAME (event));
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_FLUSH_START:
+ GST_DEBUG_OBJECT (demux, "trying to force chain function to exit");
+ demux->flushing = TRUE;
+ ret = gst_flv_demux_push_src_event (demux, event);
+ break;
+ case GST_EVENT_FLUSH_STOP:
+ GST_DEBUG_OBJECT (demux, "flushing FLV demuxer");
+ gst_flv_demux_flush (demux, TRUE);
+ ret = gst_flv_demux_push_src_event (demux, event);
+ break;
+ case GST_EVENT_EOS:
+ {
+ GstIndex *index;
+
+ GST_DEBUG_OBJECT (demux, "received EOS");
+
+ index = gst_flv_demux_get_index (GST_ELEMENT (demux));
+
+ if (index) {
+ GST_DEBUG_OBJECT (demux, "committing index");
+ gst_index_commit (index, demux->index_id);
+ gst_object_unref (index);
+ }
+
+ if (!demux->audio_pad && !demux->video_pad) {
+ GST_ELEMENT_ERROR (demux, STREAM, FAILED,
+ ("Internal data stream error."), ("Got EOS before any data"));
+ gst_event_unref (event);
+ } else {
+ if (!demux->no_more_pads) {
+ gst_element_no_more_pads (GST_ELEMENT (demux));
+ demux->no_more_pads = TRUE;
+ }
+
+ if (!gst_flv_demux_push_src_event (demux, event))
+ GST_WARNING_OBJECT (demux, "failed pushing EOS on streams");
+ }
+ ret = TRUE;
+ break;
+ }
+ case GST_EVENT_SEGMENT:
+ {
+ GstSegment in_segment;
+
+ GST_DEBUG_OBJECT (demux, "received new segment");
+
+ gst_event_copy_segment (event, &in_segment);
+ demux->segment_seqnum = gst_event_get_seqnum (event);
+
+ if (in_segment.format == GST_FORMAT_TIME) {
+ /* time segment, this is perfect, copy over the values. */
+ memcpy (&demux->segment, &in_segment, sizeof (in_segment));
+
+ GST_DEBUG_OBJECT (demux, "NEWSEGMENT: %" GST_SEGMENT_FORMAT,
+ &demux->segment);
+
+ /* and forward */
+ ret = gst_flv_demux_push_src_event (demux, event);
+ } else {
+ /* non-time format */
+ demux->audio_need_segment = TRUE;
+ demux->video_need_segment = TRUE;
+ ret = TRUE;
+ gst_event_unref (event);
+ if (demux->new_seg_event) {
+ gst_event_unref (demux->new_seg_event);
+ demux->new_seg_event = NULL;
+ }
+ }
+ gst_flow_combiner_reset (demux->flowcombiner);
+ break;
+ }
+ default:
+ ret = gst_pad_event_default (pad, parent, event);
+ break;
+ }
+
+ return ret;
+ }
+
+ static gboolean
+ gst_flv_demux_sink_query (GstPad * pad, GstObject * parent, GstQuery * query)
+ {
+ GstFlvDemux *demux;
+ gboolean ret = FALSE;
+
+ demux = GST_FLV_DEMUX (parent);
+
+ switch (GST_QUERY_TYPE (query)) {
+ case GST_QUERY_BITRATE:
+ {
+ guint total_bitrate = 0;
+
+ if (demux->audio_pad) {
+ if (!demux->audio_bitrate) {
+ GST_DEBUG_OBJECT (demux,
+ "Have audio pad but no audio bitrate, can't answer BITRATE query");
+ break;
+ }
+ total_bitrate = demux->audio_bitrate;
+ }
+ if (demux->video_pad) {
+ if (!demux->video_bitrate) {
+ GST_DEBUG_OBJECT (demux,
+ "Have video pad but no video bitrate, can't answer BITRATE query");
+ break;
+ }
+ total_bitrate += demux->video_bitrate;
+ }
+
+ GST_DEBUG_OBJECT (demux,
+ "bitrate query. total_bitrate:%" G_GUINT32_FORMAT, total_bitrate);
+
+ if (total_bitrate) {
+ /* Padding of 2kbit/s for container overhead */
+ gst_query_set_bitrate (query, total_bitrate + 2048);
+ ret = TRUE;
+ }
+ break;
+ }
+ default:
+ ret = gst_pad_query_default (pad, parent, query);
+ break;
+ }
+
+ return ret;
+ }
+
+ static gboolean
+ gst_flv_demux_src_event (GstPad * pad, GstObject * parent, GstEvent * event)
+ {
+ GstFlvDemux *demux;
+ gboolean ret = FALSE;
+
+ demux = GST_FLV_DEMUX (parent);
+
+ GST_DEBUG_OBJECT (demux, "handling event %s", GST_EVENT_TYPE_NAME (event));
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_SEEK:
+ /* Try to push upstream first */
+ gst_event_ref (event);
+ ret = gst_pad_push_event (demux->sinkpad, event);
+ if (ret) {
+ gst_event_unref (event);
+ break;
+ }
+ if (demux->random_access) {
+ ret = gst_flv_demux_handle_seek_pull (demux, event, TRUE);
+ } else {
+ ret = gst_flv_demux_handle_seek_push (demux, event);
+ }
+ break;
+ default:
+ ret = gst_pad_push_event (demux->sinkpad, event);
+ break;
+ }
+
+ return ret;
+ }
+
+ static gboolean
+ gst_flv_demux_query (GstPad * pad, GstObject * parent, GstQuery * query)
+ {
+ gboolean res = TRUE;
+ GstFlvDemux *demux;
+
+ demux = GST_FLV_DEMUX (parent);
+
+ switch (GST_QUERY_TYPE (query)) {
+ case GST_QUERY_DURATION:
+ {
+ GstFormat format;
+
+ gst_query_parse_duration (query, &format, NULL);
+
+ /* duration is time only */
+ if (format != GST_FORMAT_TIME) {
+ GST_DEBUG_OBJECT (demux, "duration query only supported for time "
+ "format");
+ res = FALSE;
+ goto beach;
+ }
+
+ /* Try to push upstream first */
+ res = gst_pad_peer_query (demux->sinkpad, query);
+ if (res)
+ goto beach;
+
+ GST_DEBUG_OBJECT (pad, "duration query, replying %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (demux->duration));
+
+ gst_query_set_duration (query, GST_FORMAT_TIME, demux->duration);
+ res = TRUE;
+ break;
+ }
+ case GST_QUERY_POSITION:
+ {
+ GstFormat format;
+
+ gst_query_parse_position (query, &format, NULL);
+
+ /* position is time only */
+ if (format != GST_FORMAT_TIME) {
+ GST_DEBUG_OBJECT (demux, "position query only supported for time "
+ "format");
+ res = FALSE;
+ goto beach;
+ }
+
+ GST_DEBUG_OBJECT (pad, "position query, replying %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (demux->segment.position));
+
+ gst_query_set_position (query, GST_FORMAT_TIME, demux->segment.position);
+
+ break;
+ }
+
+ case GST_QUERY_SEEKING:{
+ GstFormat fmt;
+
+ gst_query_parse_seeking (query, &fmt, NULL, NULL, NULL);
+
+ /* First ask upstream */
+ if (fmt == GST_FORMAT_TIME && gst_pad_peer_query (demux->sinkpad, query)) {
+ gboolean seekable;
+
+ gst_query_parse_seeking (query, NULL, &seekable, NULL, NULL);
+ if (seekable) {
+ res = TRUE;
+ break;
+ }
+ }
+ res = TRUE;
+ /* FIXME, check index this way is not thread safe */
+ if (fmt != GST_FORMAT_TIME || !demux->index) {
+ gst_query_set_seeking (query, fmt, FALSE, -1, -1);
+ } else if (demux->random_access) {
+ gst_query_set_seeking (query, GST_FORMAT_TIME, TRUE, 0,
+ demux->duration);
+ } else {
+ GstQuery *peerquery = gst_query_new_seeking (GST_FORMAT_BYTES);
+ gboolean seekable = gst_pad_peer_query (demux->sinkpad, peerquery);
+
+ if (seekable)
+ gst_query_parse_seeking (peerquery, NULL, &seekable, NULL, NULL);
+ gst_query_unref (peerquery);
+
+ if (seekable)
+ gst_query_set_seeking (query, GST_FORMAT_TIME, seekable, 0,
+ demux->duration);
+ else
+ gst_query_set_seeking (query, GST_FORMAT_TIME, FALSE, -1, -1);
+ }
+ break;
+ }
+ case GST_QUERY_SEGMENT:
+ {
+ GstFormat format;
+ gint64 start, stop;
+
+ format = demux->segment.format;
+
+ start =
+ gst_segment_to_stream_time (&demux->segment, format,
+ demux->segment.start);
+ if ((stop = demux->segment.stop) == -1)
+ stop = demux->segment.duration;
+ else
+ stop = gst_segment_to_stream_time (&demux->segment, format, stop);
+
+ gst_query_set_segment (query, demux->segment.rate, format, start, stop);
+ res = TRUE;
+ break;
+ }
+ case GST_QUERY_LATENCY:
+ default:
+ res = gst_pad_query_default (pad, parent, query);
+ break;
+ }
+
+ beach:
+
+ return res;
+ }
+
+ static GstStateChangeReturn
+ gst_flv_demux_change_state (GstElement * element, GstStateChange transition)
+ {
+ GstFlvDemux *demux;
+ GstStateChangeReturn ret;
+
+ demux = GST_FLV_DEMUX (element);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_READY_TO_PAUSED:
+ /* If this is our own index destroy it as the
+ * old entries might be wrong for the new stream */
+ if (demux->own_index) {
+ gst_object_unref (demux->index);
+ demux->index = NULL;
+ demux->own_index = FALSE;
+ }
+
+ /* If no index was created, generate one */
+ if (G_UNLIKELY (!demux->index)) {
+ GST_DEBUG_OBJECT (demux, "no index provided creating our own");
+
+ demux->index = g_object_new (gst_mem_index_get_type (), NULL);
+
+ gst_index_get_writer_id (demux->index, GST_OBJECT (demux),
+ &demux->index_id);
+ demux->own_index = TRUE;
+ }
+ gst_flv_demux_cleanup (demux);
+ break;
+ default:
+ break;
+ }
+
+ ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
+ if (ret == GST_STATE_CHANGE_FAILURE)
+ return ret;
+
+ switch (transition) {
+ case GST_STATE_CHANGE_PAUSED_TO_READY:
+ gst_flv_demux_cleanup (demux);
+ break;
+ default:
+ break;
+ }
+
+ return ret;
+ }
+
+ #if 0
+ static void
+ gst_flv_demux_set_index (GstElement * element, GstIndex * index)
+ {
+ GstFlvDemux *demux = GST_FLV_DEMUX (element);
+ GstIndex *old_index;
+
+ GST_OBJECT_LOCK (demux);
+
+ old_index = demux->index;
+
+ if (index) {
+ demux->index = gst_object_ref (index);
+ demux->own_index = FALSE;
+ } else
+ demux->index = NULL;
+
+ if (old_index)
+ gst_object_unref (demux->index);
+
+ gst_object_ref (index);
+
+ GST_OBJECT_UNLOCK (demux);
+
+ /* object lock might be taken again */
+ if (index)
+ gst_index_get_writer_id (index, GST_OBJECT (element), &demux->index_id);
+
+ GST_DEBUG_OBJECT (demux, "Set index %" GST_PTR_FORMAT, demux->index);
+
+ gst_object_unref (index);
+ }
+ #endif
+
+ static GstIndex *
+ gst_flv_demux_get_index (GstElement * element)
+ {
+ GstIndex *result = NULL;
+
+ GstFlvDemux *demux = GST_FLV_DEMUX (element);
+
+ GST_OBJECT_LOCK (demux);
+ if (demux->index)
+ result = gst_object_ref (demux->index);
+ GST_OBJECT_UNLOCK (demux);
+
+ return result;
+ }
+
+ static void
+ gst_flv_demux_dispose (GObject * object)
+ {
+ GstFlvDemux *demux = GST_FLV_DEMUX (object);
+
+ GST_DEBUG_OBJECT (demux, "disposing FLV demuxer");
+
+ if (demux->adapter) {
+ gst_adapter_clear (demux->adapter);
+ g_object_unref (demux->adapter);
+ demux->adapter = NULL;
+ }
+
+ if (demux->taglist) {
+ gst_tag_list_unref (demux->taglist);
+ demux->taglist = NULL;
+ }
+
+ if (demux->audio_tags) {
+ gst_tag_list_unref (demux->audio_tags);
+ demux->audio_tags = NULL;
+ }
+
+ if (demux->video_tags) {
+ gst_tag_list_unref (demux->video_tags);
+ demux->video_tags = NULL;
+ }
+
+ if (demux->flowcombiner) {
+ gst_flow_combiner_free (demux->flowcombiner);
+ demux->flowcombiner = NULL;
+ }
+
+ if (demux->new_seg_event) {
+ gst_event_unref (demux->new_seg_event);
+ demux->new_seg_event = NULL;
+ }
+
+ if (demux->audio_codec_data) {
+ gst_buffer_unref (demux->audio_codec_data);
+ demux->audio_codec_data = NULL;
+ }
+
+ if (demux->video_codec_data) {
+ gst_buffer_unref (demux->video_codec_data);
+ demux->video_codec_data = NULL;
+ }
+
+ if (demux->audio_pad) {
+ gst_object_unref (demux->audio_pad);
+ demux->audio_pad = NULL;
+ }
+
+ if (demux->video_pad) {
+ gst_object_unref (demux->video_pad);
+ demux->video_pad = NULL;
+ }
+
+ if (demux->index) {
+ gst_object_unref (demux->index);
+ demux->index = NULL;
+ }
+
+ if (demux->times) {
+ g_array_free (demux->times, TRUE);
+ demux->times = NULL;
+ }
+
+ if (demux->filepositions) {
+ g_array_free (demux->filepositions, TRUE);
+ demux->filepositions = NULL;
+ }
+
+ GST_CALL_PARENT (G_OBJECT_CLASS, dispose, (object));
+ }
+
+ static void
+ gst_flv_demux_class_init (GstFlvDemuxClass * klass)
+ {
+ GstElementClass *gstelement_class = GST_ELEMENT_CLASS (klass);
+ GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
+
+ gobject_class->dispose = gst_flv_demux_dispose;
+
+ gstelement_class->change_state =
+ GST_DEBUG_FUNCPTR (gst_flv_demux_change_state);
+
+ #if 0
+ gstelement_class->set_index = GST_DEBUG_FUNCPTR (gst_flv_demux_set_index);
+ gstelement_class->get_index = GST_DEBUG_FUNCPTR (gst_flv_demux_get_index);
+ #endif
+
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &flv_sink_template);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &audio_src_template);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &video_src_template);
+ gst_element_class_set_static_metadata (gstelement_class, "FLV Demuxer",
+ "Codec/Demuxer", "Demux FLV feeds into digital streams",
+ "Julien Moutte <julien@moutte.net>");
+ }
+
+ static void
+ gst_flv_demux_init (GstFlvDemux * demux)
+ {
+ demux->sinkpad =
+ gst_pad_new_from_static_template (&flv_sink_template, "sink");
+ GST_PAD_SET_ACCEPT_TEMPLATE (demux->sinkpad);
+ gst_pad_set_event_function (demux->sinkpad,
+ GST_DEBUG_FUNCPTR (gst_flv_demux_sink_event));
+ gst_pad_set_chain_function (demux->sinkpad,
+ GST_DEBUG_FUNCPTR (gst_flv_demux_chain));
+ gst_pad_set_activate_function (demux->sinkpad,
+ GST_DEBUG_FUNCPTR (gst_flv_demux_sink_activate));
+ gst_pad_set_activatemode_function (demux->sinkpad,
+ GST_DEBUG_FUNCPTR (gst_flv_demux_sink_activate_mode));
+ gst_pad_set_query_function (demux->sinkpad,
+ GST_DEBUG_FUNCPTR (gst_flv_demux_sink_query));
+
+ gst_element_add_pad (GST_ELEMENT (demux), demux->sinkpad);
+
+ demux->adapter = gst_adapter_new ();
+ demux->flowcombiner = gst_flow_combiner_new ();
+
+ demux->own_index = FALSE;
+
+ GST_OBJECT_FLAG_SET (demux, GST_ELEMENT_FLAG_INDEXABLE);
+
+ gst_flv_demux_cleanup (demux);
+ }
--- /dev/null
+ /* GStreamer
+ * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+ /*
+ * Unless otherwise indicated, Source Code is licensed under MIT license.
+ * See further explanation attached in License Statement (distributed in the file
+ * LICENSE).
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy of
+ * this software and associated documentation files (the "Software"), to deal in
+ * the Software without restriction, including without limitation the rights to
+ * use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
+ * of the Software, and to permit persons to whom the Software is furnished to do
+ * so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in all
+ * copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+ * SOFTWARE.
+ */
+
+
+ #ifndef __FOURCC_H__
+ #define __FOURCC_H__
+
+ #include <gst/gst.h>
+
+ G_BEGIN_DECLS
+
+ #define FOURCC_null 0x0
+
+ #define FOURCC_2vuy GST_MAKE_FOURCC('2','v','u','y')
+ #define FOURCC_FMP4 GST_MAKE_FOURCC('F','M','P','4')
+ #define FOURCC_H264 GST_MAKE_FOURCC('H','2','6','4')
+ #define FOURCC_H265 GST_MAKE_FOURCC('H','2','6','5')
+ #define FOURCC_MAC3 GST_MAKE_FOURCC('M','A','C','3')
+ #define FOURCC_MAC6 GST_MAKE_FOURCC('M','A','C','6')
+ #define FOURCC_MP4V GST_MAKE_FOURCC('M','P','4','V')
+ #define FOURCC_PICT GST_MAKE_FOURCC('P','I','C','T')
+ #define FOURCC_QDM2 GST_MAKE_FOURCC('Q','D','M','2')
+ #define FOURCC_SVQ3 GST_MAKE_FOURCC('S','V','Q','3')
+ #define FOURCC_VP31 GST_MAKE_FOURCC('V','P','3','1')
+ #define FOURCC_VP80 GST_MAKE_FOURCC('V','P','8','0')
+ #define FOURCC_WRLE GST_MAKE_FOURCC('W','R','L','E')
+ #define FOURCC_XMP_ GST_MAKE_FOURCC('X','M','P','_')
+ #define FOURCC_XVID GST_MAKE_FOURCC('X','V','I','D')
+ #define FOURCC__ART GST_MAKE_FOURCC(0xa9,'A','R','T')
+ #define FOURCC_____ GST_MAKE_FOURCC('-','-','-','-')
+ #define FOURCC___in GST_MAKE_FOURCC(' ',' ','i','n')
+ #define FOURCC___ty GST_MAKE_FOURCC(' ',' ','t','y')
+ #define FOURCC__alb GST_MAKE_FOURCC(0xa9,'a','l','b')
+ #define FOURCC__cpy GST_MAKE_FOURCC(0xa9,'c','p','y')
+ #define FOURCC__day GST_MAKE_FOURCC(0xa9,'d','a','y')
+ #define FOURCC__des GST_MAKE_FOURCC(0xa9,'d','e','s')
+ #define FOURCC__enc GST_MAKE_FOURCC(0xa9,'e','n','c')
+ #define FOURCC__gen GST_MAKE_FOURCC(0xa9, 'g', 'e', 'n')
+ #define FOURCC__grp GST_MAKE_FOURCC(0xa9,'g','r','p')
+ #define FOURCC__inf GST_MAKE_FOURCC(0xa9,'i','n','f')
+ #define FOURCC__lyr GST_MAKE_FOURCC(0xa9,'l','y','r')
+ #define FOURCC__mp3 GST_MAKE_FOURCC('.','m','p','3')
+ #define FOURCC__nam GST_MAKE_FOURCC(0xa9,'n','a','m')
+ #define FOURCC__req GST_MAKE_FOURCC(0xa9,'r','e','q')
+ #define FOURCC__too GST_MAKE_FOURCC(0xa9,'t','o','o')
+ #define FOURCC__wrt GST_MAKE_FOURCC(0xa9,'w','r','t')
+ #define FOURCC_aART GST_MAKE_FOURCC('a','A','R','T')
+ #define FOURCC_ac_3 GST_MAKE_FOURCC('a','c','-','3')
+ #define FOURCC_agsm GST_MAKE_FOURCC('a','g','s','m')
+ #define FOURCC_alac GST_MAKE_FOURCC('a','l','a','c')
+ #define FOURCC_fLaC GST_MAKE_FOURCC('f','L','a','C')
+ #define FOURCC_dfLa GST_MAKE_FOURCC('d','f','L','a')
+ #define FOURCC_alaw GST_MAKE_FOURCC('a','l','a','w')
+ #define FOURCC_alis GST_MAKE_FOURCC('a','l','i','s')
+ #define FOURCC_appl GST_MAKE_FOURCC('a','p','p','l')
+ #define FOURCC_avc1 GST_MAKE_FOURCC('a','v','c','1')
+ #define FOURCC_avc3 GST_MAKE_FOURCC('a','v','c','3')
+ #define FOURCC_avcC GST_MAKE_FOURCC('a','v','c','C')
+ #define FOURCC_c608 GST_MAKE_FOURCC('c','6','0','8')
+ #define FOURCC_c708 GST_MAKE_FOURCC('c','7','0','8')
+ #define FOURCC_ccdp GST_MAKE_FOURCC('c','c','d','p')
+ #define FOURCC_cdat GST_MAKE_FOURCC('c','d','a','t')
+ #define FOURCC_cdt2 GST_MAKE_FOURCC('c','d','t','2')
+ #define FOURCC_clcp GST_MAKE_FOURCC('c','l','c','p')
+ #define FOURCC_clip GST_MAKE_FOURCC('c','l','i','p')
+ #define FOURCC_cmov GST_MAKE_FOURCC('c','m','o','v')
+ #define FOURCC_cmvd GST_MAKE_FOURCC('c','m','v','d')
+ #define FOURCC_co64 GST_MAKE_FOURCC('c','o','6','4')
+ #define FOURCC_covr GST_MAKE_FOURCC('c','o','v','r')
+ #define FOURCC_cpil GST_MAKE_FOURCC('c','p','i','l')
+ #define FOURCC_cprt GST_MAKE_FOURCC('c','p','r','t')
+ #define FOURCC_crgn GST_MAKE_FOURCC('c','r','g','n')
+ #define FOURCC_ctab GST_MAKE_FOURCC('c','t','a','b')
+ #define FOURCC_ctts GST_MAKE_FOURCC('c','t','t','s')
+ #define FOURCC_cslg GST_MAKE_FOURCC('c','s','l','g')
+ #define FOURCC_d263 GST_MAKE_FOURCC('d','2','6','3')
+ #define FOURCC_dac3 GST_MAKE_FOURCC('d','a','c','3')
+ #define FOURCC_damr GST_MAKE_FOURCC('d','a','m','r')
+ #define FOURCC_data GST_MAKE_FOURCC('d','a','t','a')
+ #define FOURCC_dcom GST_MAKE_FOURCC('d','c','o','m')
+ #define FOURCC_desc GST_MAKE_FOURCC('d','e','s','c')
+ #define FOURCC_dhlr GST_MAKE_FOURCC('d','h','l','r')
+ #define FOURCC_dinf GST_MAKE_FOURCC('d','i','n','f')
+ #define FOURCC_disc GST_MAKE_FOURCC('d','i','s','c')
+ #define FOURCC_disk GST_MAKE_FOURCC('d','i','s','k')
+ #define FOURCC_drac GST_MAKE_FOURCC('d','r','a','c')
+ #define FOURCC_dref GST_MAKE_FOURCC('d','r','e','f')
+ #define FOURCC_drmi GST_MAKE_FOURCC('d','r','m','i')
+ #define FOURCC_drms GST_MAKE_FOURCC('d','r','m','s')
+ #define FOURCC_dvcp GST_MAKE_FOURCC('d','v','c','p')
+ #define FOURCC_dvc_ GST_MAKE_FOURCC('d','v','c',' ')
+ #define FOURCC_dv5p GST_MAKE_FOURCC('d','v','5','p')
+ #define FOURCC_dv5n GST_MAKE_FOURCC('d','v','5','n')
+ #define FOURCC_dva1 GST_MAKE_FOURCC('d','v','a','1')
+ #define FOURCC_dvav GST_MAKE_FOURCC('d','v','a','v')
+ #define FOURCC_dvh1 GST_MAKE_FOURCC('d','v','h','1')
+ #define FOURCC_dvhe GST_MAKE_FOURCC('d','v','h','e')
+ #define FOURCC_dvcC GST_MAKE_FOURCC('d','v','c','C')
+ #define FOURCC_edts GST_MAKE_FOURCC('e','d','t','s')
+ #define FOURCC_elst GST_MAKE_FOURCC('e','l','s','t')
+ #define FOURCC_enda GST_MAKE_FOURCC('e','n','d','a')
+ #define FOURCC_esds GST_MAKE_FOURCC('e','s','d','s')
+ #define FOURCC_fmp4 GST_MAKE_FOURCC('f','m','p','4')
+ #define FOURCC_free GST_MAKE_FOURCC('f','r','e','e')
+ #define FOURCC_frma GST_MAKE_FOURCC('f','r','m','a')
+ #define FOURCC_ftyp GST_MAKE_FOURCC('f','t','y','p')
+ #define FOURCC_ftab GST_MAKE_FOURCC('f','t','a','b')
+ #define FOURCC_gama GST_MAKE_FOURCC('g','a','m','a')
+ #define FOURCC_glbl GST_MAKE_FOURCC('g','l','b','l')
+ #define FOURCC_gmhd GST_MAKE_FOURCC('g','m','h','d')
+ #define FOURCC_gmin GST_MAKE_FOURCC('g','m','i','n')
+ #define FOURCC_gnre GST_MAKE_FOURCC('g','n','r','e')
+ #define FOURCC_h263 GST_MAKE_FOURCC('h','2','6','3')
+ #define FOURCC_hdlr GST_MAKE_FOURCC('h','d','l','r')
+ #define FOURCC_hev1 GST_MAKE_FOURCC('h','e','v','1')
+ #define FOURCC_hint GST_MAKE_FOURCC('h','i','n','t')
+ #define FOURCC_hmhd GST_MAKE_FOURCC('h','m','h','d')
+ #define FOURCC_hndl GST_MAKE_FOURCC('h','n','d','l')
+ #define FOURCC_hnti GST_MAKE_FOURCC('h','n','t','i')
+ #define FOURCC_hvc1 GST_MAKE_FOURCC('h','v','c','1')
+ #define FOURCC_hvcC GST_MAKE_FOURCC('h','v','c','C')
+ #define FOURCC_ilst GST_MAKE_FOURCC('i','l','s','t')
+ #define FOURCC_ima4 GST_MAKE_FOURCC('i','m','a','4')
+ #define FOURCC_imap GST_MAKE_FOURCC('i','m','a','p')
+ #define FOURCC_s16l GST_MAKE_FOURCC('s','1','6','l')
+ #define FOURCC_in24 GST_MAKE_FOURCC('i','n','2','4')
+ #define FOURCC_in32 GST_MAKE_FOURCC('i','n','3','2')
+ #define FOURCC_fl64 GST_MAKE_FOURCC('f','l','6','4')
+ #define FOURCC_fl32 GST_MAKE_FOURCC('f','l','3','2')
+ #define FOURCC_jp2c GST_MAKE_FOURCC('j','p','2','c')
+ #define FOURCC_jpeg GST_MAKE_FOURCC('j','p','e','g')
+ #define FOURCC_keyw GST_MAKE_FOURCC('k','e','y','w')
+ #define FOURCC_kmat GST_MAKE_FOURCC('k','m','a','t')
+ #define FOURCC_kywd GST_MAKE_FOURCC('k','y','w','d')
+ #define FOURCC_load GST_MAKE_FOURCC('l','o','a','d')
+ #define FOURCC_matt GST_MAKE_FOURCC('m','a','t','t')
+ #define FOURCC_mdat GST_MAKE_FOURCC('m','d','a','t')
+ #define FOURCC_mdhd GST_MAKE_FOURCC('m','d','h','d')
+ #define FOURCC_mdia GST_MAKE_FOURCC('m','d','i','a')
+ #define FOURCC_mdir GST_MAKE_FOURCC('m','d','i','r')
+ #define FOURCC_mean GST_MAKE_FOURCC('m','e','a','n')
+ #define FOURCC_meta GST_MAKE_FOURCC('m','e','t','a')
+ #define FOURCC_mhlr GST_MAKE_FOURCC('m','h','l','r')
+ #define FOURCC_minf GST_MAKE_FOURCC('m','i','n','f')
+ #define FOURCC_moov GST_MAKE_FOURCC('m','o','o','v')
+ #define FOURCC_mp3_ GST_MAKE_FOURCC('m','p','3',' ')
+ #define FOURCC_mp4a GST_MAKE_FOURCC('m','p','4','a')
+ #define FOURCC_mp4s GST_MAKE_FOURCC('m','p','4','s')
+ #define FOURCC_mp4s GST_MAKE_FOURCC('m','p','4','s')
+ #define FOURCC_mp4v GST_MAKE_FOURCC('m','p','4','v')
+ #define FOURCC_name GST_MAKE_FOURCC('n','a','m','e')
+ #define FOURCC_nclc GST_MAKE_FOURCC('n','c','l','c')
+ #define FOURCC_nclx GST_MAKE_FOURCC('n','c','l','x')
+ #define FOURCC_nmhd GST_MAKE_FOURCC('n','m','h','d')
+ #define FOURCC_opus GST_MAKE_FOURCC('O','p','u','s')
+ #define FOURCC_dops GST_MAKE_FOURCC('d','O','p','s')
+ #define FOURCC_pasp GST_MAKE_FOURCC('p','a','s','p')
+ #define FOURCC_colr GST_MAKE_FOURCC('c','o','l','r')
+ #define FOURCC_clap GST_MAKE_FOURCC('c','l','a','p')
+ #define FOURCC_tapt GST_MAKE_FOURCC('t','a','p','t')
+ #define FOURCC_clef GST_MAKE_FOURCC('c','l','e','f')
+ #define FOURCC_prof GST_MAKE_FOURCC('p','r','o','f')
+ #define FOURCC_enof GST_MAKE_FOURCC('e','n','o','f')
+ #define FOURCC_fiel GST_MAKE_FOURCC('f','i','e','l')
+ #define FOURCC_pcst GST_MAKE_FOURCC('p','c','s','t')
+ #define FOURCC_pgap GST_MAKE_FOURCC('p','g','a','p')
+ #define FOURCC_png GST_MAKE_FOURCC('p','n','g',' ')
+ #define FOURCC_pnot GST_MAKE_FOURCC('p','n','o','t')
+ #define FOURCC_qt__ GST_MAKE_FOURCC('q','t',' ',' ')
+ #define FOURCC_qtim GST_MAKE_FOURCC('q','t','i','m')
+ #define FOURCC_raw_ GST_MAKE_FOURCC('r','a','w',' ')
+ #define FOURCC_rdrf GST_MAKE_FOURCC('r','d','r','f')
+ #define FOURCC_rle_ GST_MAKE_FOURCC('r','l','e',' ')
+ #define FOURCC_rmda GST_MAKE_FOURCC('r','m','d','a')
+ #define FOURCC_rmdr GST_MAKE_FOURCC('r','m','d','r')
+ #define FOURCC_rmra GST_MAKE_FOURCC('r','m','r','a')
+ #define FOURCC_rmvc GST_MAKE_FOURCC('r','m','v','c')
+ #define FOURCC_rtp_ GST_MAKE_FOURCC('r','t','p',' ')
+ #define FOURCC_rtsp GST_MAKE_FOURCC('r','t','s','p')
+ #define FOURCC_s263 GST_MAKE_FOURCC('s','2','6','3')
+ #define FOURCC_samr GST_MAKE_FOURCC('s','a','m','r')
+ #define FOURCC_sawb GST_MAKE_FOURCC('s','a','w','b')
+ #define FOURCC_sbtl GST_MAKE_FOURCC('s','b','t','l')
+ #define FOURCC_sdp_ GST_MAKE_FOURCC('s','d','p',' ')
+ #define FOURCC_sidx GST_MAKE_FOURCC('s','i','d','x')
+ #define FOURCC_skip GST_MAKE_FOURCC('s','k','i','p')
+ #define FOURCC_smhd GST_MAKE_FOURCC('s','m','h','d')
+ #define FOURCC_soaa GST_MAKE_FOURCC('s','o','a','a')
+ #define FOURCC_soal GST_MAKE_FOURCC('s','o','a','l')
+ #define FOURCC_soar GST_MAKE_FOURCC('s','o','a','r')
+ #define FOURCC_soco GST_MAKE_FOURCC('s','o','c','o')
+ #define FOURCC_sonm GST_MAKE_FOURCC('s','o','n','m')
+ #define FOURCC_sosn GST_MAKE_FOURCC('s','o','s','n')
+ #define FOURCC_soun GST_MAKE_FOURCC('s','o','u','n')
+ #define FOURCC_sowt GST_MAKE_FOURCC('s','o','w','t')
+ #define FOURCC_stbl GST_MAKE_FOURCC('s','t','b','l')
+ #define FOURCC_stco GST_MAKE_FOURCC('s','t','c','o')
+ #define FOURCC_stpp GST_MAKE_FOURCC('s','t','p','p')
+ #define FOURCC_stps GST_MAKE_FOURCC('s','t','p','s')
+ #define FOURCC_strf GST_MAKE_FOURCC('s','t','r','f')
+ #define FOURCC_strm GST_MAKE_FOURCC('s','t','r','m')
+ #define FOURCC_stsc GST_MAKE_FOURCC('s','t','s','c')
+ #define FOURCC_stsd GST_MAKE_FOURCC('s','t','s','d')
+ #define FOURCC_stss GST_MAKE_FOURCC('s','t','s','s')
+ #define FOURCC_stsz GST_MAKE_FOURCC('s','t','s','z')
+ #define FOURCC_stts GST_MAKE_FOURCC('s','t','t','s')
+ #define FOURCC_styp GST_MAKE_FOURCC('s','t','y','p')
+ #define FOURCC_subp GST_MAKE_FOURCC('s','u','b','p')
+ #define FOURCC_subt GST_MAKE_FOURCC('s','u','b','t')
+ #define FOURCC_text GST_MAKE_FOURCC('t','e','x','t')
+ #define FOURCC_tcmi GST_MAKE_FOURCC('t','c','m','i')
+ #define FOURCC_tkhd GST_MAKE_FOURCC('t','k','h','d')
+ #define FOURCC_tmcd GST_MAKE_FOURCC('t','m','c','d')
+ #define FOURCC_tmpo GST_MAKE_FOURCC('t','m','p','o')
+ #define FOURCC_trak GST_MAKE_FOURCC('t','r','a','k')
+ #define FOURCC_tref GST_MAKE_FOURCC('t','r','e','f')
+ #define FOURCC_trkn GST_MAKE_FOURCC('t','r','k','n')
+ #define FOURCC_tven GST_MAKE_FOURCC('t','v','e','n')
+ #define FOURCC_tves GST_MAKE_FOURCC('t','v','e','s')
+ #define FOURCC_tvsh GST_MAKE_FOURCC('t','v','s','h')
+ #define FOURCC_tvsn GST_MAKE_FOURCC('t','v','s','n')
+ #define FOURCC_twos GST_MAKE_FOURCC('t','w','o','s')
+ #define FOURCC_tx3g GST_MAKE_FOURCC('t','x','3','g')
+ #define FOURCC_udta GST_MAKE_FOURCC('u','d','t','a')
+ #define FOURCC_ulaw GST_MAKE_FOURCC('u','l','a','w')
+ #define FOURCC_url_ GST_MAKE_FOURCC('u','r','l',' ')
+ #define FOURCC_uuid GST_MAKE_FOURCC('u','u','i','d')
+ #define FOURCC_v210 GST_MAKE_FOURCC('v','2','1','0')
+ #define FOURCC_vc_1 GST_MAKE_FOURCC('v','c','-','1')
+ #define FOURCC_vide GST_MAKE_FOURCC('v','i','d','e')
+ #define FOURCC_vmhd GST_MAKE_FOURCC('v','m','h','d')
+ #define FOURCC_vp08 GST_MAKE_FOURCC('v','p','0','8')
+ #define FOURCC_vp09 GST_MAKE_FOURCC('v','p','0','9')
+ #define FOURCC_vpcC GST_MAKE_FOURCC('v','p','c','C')
+ #define FOURCC_xvid GST_MAKE_FOURCC('x','v','i','d')
+ #define FOURCC_wave GST_MAKE_FOURCC('w','a','v','e')
+ #define FOURCC_wide GST_MAKE_FOURCC('w','i','d','e')
+ #define FOURCC_zlib GST_MAKE_FOURCC('z','l','i','b')
+ #define FOURCC_lpcm GST_MAKE_FOURCC('l','p','c','m')
+ #define FOURCC_av01 GST_MAKE_FOURCC('a','v','0','1')
+ #define FOURCC_av1C GST_MAKE_FOURCC('a','v','1','C')
+ #define FOURCC_av1f GST_MAKE_FOURCC('a','v','1','f')
+ #define FOURCC_av1m GST_MAKE_FOURCC('a','v','1','m')
+ #define FOURCC_av1s GST_MAKE_FOURCC('a','v','1','s')
+ #define FOURCC_av1M GST_MAKE_FOURCC('a','v','1','M')
+
+ #define FOURCC_cfhd GST_MAKE_FOURCC('C','F','H','D')
+ #define FOURCC_ap4x GST_MAKE_FOURCC('a','p','4','x')
+ #define FOURCC_ap4h GST_MAKE_FOURCC('a','p','4','h')
+ #define FOURCC_apch GST_MAKE_FOURCC('a','p','c','h')
+ #define FOURCC_apcn GST_MAKE_FOURCC('a','p','c','n')
+ #define FOURCC_apco GST_MAKE_FOURCC('a','p','c','o')
+ #define FOURCC_apcs GST_MAKE_FOURCC('a','p','c','s')
+ #define FOURCC_m1v GST_MAKE_FOURCC('m','1','v',' ')
+ #define FOURCC_vivo GST_MAKE_FOURCC('v','i','v','o')
+ #define FOURCC_saiz GST_MAKE_FOURCC('s','a','i','z')
+ #define FOURCC_saio GST_MAKE_FOURCC('s','a','i','o')
+
+ #define FOURCC_3gg6 GST_MAKE_FOURCC('3','g','g','6')
+ #define FOURCC_3gg7 GST_MAKE_FOURCC('3','g','g','7')
+ #define FOURCC_3gp4 GST_MAKE_FOURCC('3','g','p','4')
+ #define FOURCC_3gp6 GST_MAKE_FOURCC('3','g','p','6')
+ #define FOURCC_3gr6 GST_MAKE_FOURCC('3','g','r','6')
+ #define FOURCC_3g__ GST_MAKE_FOURCC('3','g',0,0)
+ #define FOURCC_isml GST_MAKE_FOURCC('i','s','m','l')
+ #define FOURCC_iso2 GST_MAKE_FOURCC('i','s','o','2')
+ #define FOURCC_isom GST_MAKE_FOURCC('i','s','o','m')
+ #define FOURCC_mp41 GST_MAKE_FOURCC('m','p','4','1')
+ #define FOURCC_mp42 GST_MAKE_FOURCC('m','p','4','2')
+ #define FOURCC_piff GST_MAKE_FOURCC('p','i','f','f')
+ #define FOURCC_titl GST_MAKE_FOURCC('t','i','t','l')
+
+ /* SVQ3 fourcc */
+ #define FOURCC_SEQH GST_MAKE_FOURCC('S','E','Q','H')
+ #define FOURCC_SMI_ GST_MAKE_FOURCC('S','M','I',' ')
+
+ /* 3gpp asset meta data fourcc */
+ #define FOURCC_albm GST_MAKE_FOURCC('a','l','b','m')
+ #define FOURCC_auth GST_MAKE_FOURCC('a','u','t','h')
+ #define FOURCC_clsf GST_MAKE_FOURCC('c','l','s','f')
+ #define FOURCC_dscp GST_MAKE_FOURCC('d','s','c','p')
+ #define FOURCC_loci GST_MAKE_FOURCC('l','o','c','i')
+ #define FOURCC_perf GST_MAKE_FOURCC('p','e','r','f')
+ #define FOURCC_rtng GST_MAKE_FOURCC('r','t','n','g')
+ #define FOURCC_yrrc GST_MAKE_FOURCC('y','r','r','c')
+
+ /* misc tag stuff */
+ #define FOURCC_ID32 GST_MAKE_FOURCC('I', 'D','3','2')
+
+ /* ISO Motion JPEG 2000 fourcc */
+ #define FOURCC_cdef GST_MAKE_FOURCC('c','d','e','f')
+ #define FOURCC_cmap GST_MAKE_FOURCC('c','m','a','p')
+ #define FOURCC_ihdr GST_MAKE_FOURCC('i','h','d','r')
+ #define FOURCC_jp2h GST_MAKE_FOURCC('j','p','2','h')
+ #define FOURCC_jp2x GST_MAKE_FOURCC('j','p','2','x')
+ #define FOURCC_mjp2 GST_MAKE_FOURCC('m','j','p','2')
+
+ /* some buggy hardware's notion of mdhd */
+ #define FOURCC_mhdr GST_MAKE_FOURCC('m','h','d','r')
+
+ /* Fragmented MP4 */
+ #define FOURCC_btrt GST_MAKE_FOURCC('b','t','r','t')
+ #define FOURCC_mehd GST_MAKE_FOURCC('m','e','h','d')
+ #define FOURCC_mfhd GST_MAKE_FOURCC('m','f','h','d')
+ #define FOURCC_mfra GST_MAKE_FOURCC('m','f','r','a')
+ #define FOURCC_mfro GST_MAKE_FOURCC('m','f','r','o')
+ #define FOURCC_moof GST_MAKE_FOURCC('m','o','o','f')
+ #define FOURCC_mvex GST_MAKE_FOURCC('m','v','e','x')
+ #define FOURCC_mvhd GST_MAKE_FOURCC('m','v','h','d')
+ #define FOURCC_ovc1 GST_MAKE_FOURCC('o','v','c','1')
+ #define FOURCC_owma GST_MAKE_FOURCC('o','w','m','a')
+ #define FOURCC_sdtp GST_MAKE_FOURCC('s','d','t','p')
+ #define FOURCC_tfhd GST_MAKE_FOURCC('t','f','h','d')
+ #define FOURCC_tfra GST_MAKE_FOURCC('t','f','r','a')
+ #define FOURCC_traf GST_MAKE_FOURCC('t','r','a','f')
+ #define FOURCC_trex GST_MAKE_FOURCC('t','r','e','x')
+ #define FOURCC_trun GST_MAKE_FOURCC('t','r','u','n')
+ #define FOURCC_wma_ GST_MAKE_FOURCC('w','m','a',' ')
+
+ /* MPEG DASH */
+ #define FOURCC_tfdt GST_MAKE_FOURCC('t','f','d','t')
+
+ /* Xiph fourcc */
+ #define FOURCC_XdxT GST_MAKE_FOURCC('X','d','x','T')
+ #define FOURCC_XiTh GST_MAKE_FOURCC('X','i','T','h')
+ #define FOURCC_tCtC GST_MAKE_FOURCC('t','C','t','C')
+ #define FOURCC_tCtH GST_MAKE_FOURCC('t','C','t','H')
+ #define FOURCC_tCt_ GST_MAKE_FOURCC('t','C','t','#')
+
+ /* ilst metatags */
+ #define FOURCC__cmt GST_MAKE_FOURCC(0xa9, 'c','m','t')
+
+ /* apple tags */
+ #define FOURCC__mak GST_MAKE_FOURCC(0xa9, 'm','a','k')
+ #define FOURCC__mod GST_MAKE_FOURCC(0xa9, 'm','o','d')
+ #define FOURCC__swr GST_MAKE_FOURCC(0xa9, 's','w','r')
+
+ /* Chapters reference */
+ #define FOURCC_chap GST_MAKE_FOURCC('c','h','a','p')
+
+ /* For Microsoft Wave formats embedded in quicktime, the FOURCC is
+ 'm', 's', then the 16 bit wave codec id */
+ #define MS_WAVE_FOURCC(codecid) GST_MAKE_FOURCC( \
+ 'm', 's', ((codecid)>>8)&0xff, ((codecid)&0xff))
+
+ /* MPEG Application Format , Stereo Video */
+ #define FOURCC_ss01 GST_MAKE_FOURCC('s','s','0','1')
+ #define FOURCC_ss02 GST_MAKE_FOURCC('s','s','0','2')
+ #define FOURCC_svmi GST_MAKE_FOURCC('s','v','m','i')
+ #define FOURCC_scdi GST_MAKE_FOURCC('s','c','d','i')
+
+ /* Protected streams */
+ #define FOURCC_encv GST_MAKE_FOURCC('e','n','c','v')
+ #define FOURCC_enca GST_MAKE_FOURCC('e','n','c','a')
+ #define FOURCC_enct GST_MAKE_FOURCC('e','n','c','t')
+ #define FOURCC_encs GST_MAKE_FOURCC('e','n','c','s')
+ #define FOURCC_sinf GST_MAKE_FOURCC('s','i','n','f')
+ #define FOURCC_frma GST_MAKE_FOURCC('f','r','m','a')
+ #define FOURCC_schm GST_MAKE_FOURCC('s','c','h','m')
+ #define FOURCC_schi GST_MAKE_FOURCC('s','c','h','i')
+
+ /* Common Encryption */
+ #define FOURCC_pssh GST_MAKE_FOURCC('p','s','s','h')
+ #define FOURCC_tenc GST_MAKE_FOURCC('t','e','n','c')
+ #define FOURCC_cenc GST_MAKE_FOURCC('c','e','n','c')
+ #define FOURCC_cbcs GST_MAKE_FOURCC('c','b','c','s')
+
+ /* Audible AAX encrypted audio */
+ #define FOURCC_aavd GST_MAKE_FOURCC('a','a','v','d')
+ #define FOURCC_adrm GST_MAKE_FOURCC('a','d','r','m')
+
++#ifdef TIZEN_FEATURE_QTDEMUX_MODIFICATION
++/* Spatial Audio */
++#define FOURCC_SA3D GST_MAKE_FOURCC('S','A','3','D')
++#endif /* TIZEN_FEATURE_QTDEMUX_MODIFICATION */
++
+ G_END_DECLS
+
+ #endif /* __FOURCC_H__ */
--- /dev/null
+ /* Quicktime muxer plugin for GStreamer
+ * Copyright (C) 2008-2010 Thiago Santos <thiagoss@embedded.ufcg.edu.br>
+ * Copyright (C) 2008 Mark Nauwelaerts <mnauw@users.sf.net>
+ * Copyright (C) 2010 Nokia Corporation. All rights reserved.
+ * Copyright (C) 2014 Jan Schmidt <jan@centricular.com>
+ * Contact: Stefan Kost <stefan.kost@nokia.com>
+
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+ /*
+ * Unless otherwise indicated, Source Code is licensed under MIT license.
+ * See further explanation attached in License Statement (distributed in the file
+ * LICENSE).
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy of
+ * this software and associated documentation files (the "Software"), to deal in
+ * the Software without restriction, including without limitation the rights to
+ * use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
+ * of the Software, and to permit persons to whom the Software is furnished to do
+ * so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in all
+ * copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+ * SOFTWARE.
+ */
+
+
+ /**
+ * SECTION:GstQTMux
+ * @title: GstQTMux
+ * @short_description: Muxer for ISO MP4-based files
+ */
+
+ /*
+ * Based on avimux
+ */
+
+ #ifdef HAVE_CONFIG_H
+ #include "config.h"
+ #endif
+
+ #include <glib/gstdio.h>
+
+ #include <gst/gst.h>
+ #include <gst/base/gstbytereader.h>
+ #include <gst/base/gstbitreader.h>
+ #include <gst/audio/audio.h>
+ #include <gst/video/video.h>
+ #include <gst/tag/tag.h>
+ #include <gst/pbutils/pbutils.h>
+
+ #include <sys/types.h>
+ #ifdef G_OS_WIN32
+ #include <io.h> /* lseek, open, close, read */
+ #undef lseek
+ #define lseek _lseeki64
+ #undef off_t
+ #define off_t guint64
+ #endif
+
+ #ifdef _MSC_VER
+ #define ftruncate g_win32_ftruncate
+ #endif
+
+ #ifdef HAVE_UNISTD_H
+ # include <unistd.h>
+ #endif
+
+ #include "gstisomp4elements.h"
+ #include "gstqtmux.h"
+
+ GST_DEBUG_CATEGORY_STATIC (gst_qt_mux_debug);
+ #define GST_CAT_DEFAULT gst_qt_mux_debug
+
+ #ifndef ABSDIFF
+ #define ABSDIFF(a, b) ((a) > (b) ? (a) - (b) : (b) - (a))
+ #endif
+
+ /* Hacker notes.
+ *
+ * The basic building blocks of MP4 files are:
+ * - an 'ftyp' box at the very start
+ * - an 'mdat' box which contains the raw audio/video/subtitle data;
+ * this is just a bunch of bytes, completely unframed and possibly
+ * unordered with no additional meta-information
+ * - a 'moov' box that contains information about the different streams
+ * and what they contain, as well as sample tables for each stream
+ * that tell the demuxer where in the mdat box each buffer/sample is
+ * and what its duration/timestamp etc. is, and whether it's a
+ * keyframe etc.
+ * Additionally, fragmented MP4 works by writing chunks of data in
+ * pairs of 'moof' and 'mdat' boxes:
+ * - 'moof' boxes, header preceding each mdat fragment describing the
+ * contents, like a moov but only for that fragment.
+ * - a 'mfra' box for Fragmented MP4, which is written at the end and
+ * contains a summary of all fragments and seek tables.
+ *
+ * Currently mp4mux can work in 4 different modes / generate 4 types
+ * of output files/streams:
+ *
+ * - Normal mp4: mp4mux will write a little ftyp identifier at the
+ * beginning, then start an mdat box into which it will write all the
+ * sample data. At EOS it will then write the moov header with track
+ * headers and sample tables at the end of the file, and rewrite the
+ * start of the file to fix up the mdat box size at the beginning.
+ * It has to wait for EOS to write the moov (which includes the
+ * sample tables) because it doesn't know how much space those
+ * tables will be. The output downstream must be seekable to rewrite
+ * the mdat box at EOS.
+ *
+ * - Fragmented mp4: moov header with track headers at start
+ * but no sample table, followed by N fragments, each containing
+ * track headers with sample tables followed by some data. Downstream
+ * does not need to be seekable if the 'streamable' flag is TRUE,
+ * as the final mfra and total duration will be omitted.
+ *
+ * - Fast-start mp4: the goal here is to create a file where the moov
+ * headers are at the beginning; what mp4mux will do is write all
+ * sample data into a temp file and build moov header plus sample
+ * tables in memory and then when EOS comes, it will push out the
+ * moov header plus sample tables at the beginning, followed by the
+ * mdat sample data at the end which is read in from the temp file
+ * Files created in this mode are better for streaming over the
+ * network, since the client doesn't have to seek to the end of the
+ * file to get the headers, but it requires copying all sample data
+ * out of the temp file at EOS, which can be expensive. Downstream does
+ * not need to be seekable, because of the use of the temp file.
+ *
+ * - Robust Muxing mode: In this mode, qtmux uses the reserved-max-duration
+ * and reserved-moov-update-period properties to reserve free space
+ * at the start of the file and periodically write the MOOV atom out
+ * to it. That means that killing the muxing at any point still
+ * results in a playable file, at the cost of wasting some amount of
+ * free space at the start of file. The approximate recording duration
+ * has to be known in advance to estimate how much free space to reserve
+ * for the moov, and the downstream must be seekable.
+ * If the moov header grows larger than the reserved space, an error
+ * is generated - so it's better to over-estimate the amount of space
+ * to reserve. To ensure the file is playable at any point, the moov
+ * is updated using a 'ping-pong' strategy, so the output is never in
+ * an invalid state.
+ */
+
+ #ifndef GST_REMOVE_DEPRECATED
+ enum
+ {
+ DTS_METHOD_DD,
+ DTS_METHOD_REORDER,
+ DTS_METHOD_ASC
+ };
+
+ static GType
+ gst_qt_mux_dts_method_get_type (void)
+ {
+ static GType gst_qt_mux_dts_method = 0;
+
+ if (!gst_qt_mux_dts_method) {
+ static const GEnumValue dts_methods[] = {
+ {DTS_METHOD_DD, "delta/duration", "dd"},
+ {DTS_METHOD_REORDER, "reorder", "reorder"},
+ {DTS_METHOD_ASC, "ascending", "asc"},
+ {0, NULL, NULL},
+ };
+
+ gst_qt_mux_dts_method =
+ g_enum_register_static ("GstQTMuxDtsMethods", dts_methods);
+ }
+
+ return gst_qt_mux_dts_method;
+ }
+
+ #define GST_TYPE_QT_MUX_DTS_METHOD \
+ (gst_qt_mux_dts_method_get_type ())
+ #endif
+
+ static GType
+ gst_qt_mux_fragment_mode_get_type (void)
+ {
+ static GType gst_qt_mux_fragment_mode = 0;
+
+ if (!gst_qt_mux_fragment_mode) {
+ static const GEnumValue gst_qt_mux_fragment_modes[] = {
+ {GST_QT_MUX_FRAGMENT_DASH_OR_MSS, "Dash or Smoothstreaming",
+ "dash-or-mss"},
+ {GST_QT_MUX_FRAGMENT_FIRST_MOOV_THEN_FINALISE,
+ "First MOOV Fragment Then Finalise", "first-moov-then-finalise"},
+ /* internal only */
+ /* {GST_QT_MUX_FRAGMENT_STREAMABLE, "streamable", "Streamable (ISML only. Deprecated elsewhere)"}, */
+ {0, NULL, NULL},
+ };
+
+ gst_qt_mux_fragment_mode =
+ g_enum_register_static ("GstQTMuxFragmentMode",
+ gst_qt_mux_fragment_modes);
+ }
+
+ return gst_qt_mux_fragment_mode;
+ }
+
+ #define GST_TYPE_QT_MUX_FRAGMENT_MODE \
+ (gst_qt_mux_fragment_mode_get_type ())
+
+ enum
+ {
+ PROP_PAD_0,
+ PROP_PAD_TRAK_TIMESCALE,
+ };
+
+ #define DEFAULT_PAD_TRAK_TIMESCALE 0
+
+ G_DEFINE_TYPE (GstQTMuxPad, gst_qt_mux_pad, GST_TYPE_AGGREGATOR_PAD);
+
+ static void
+ gst_qt_mux_pad_set_property (GObject * object,
+ guint prop_id, const GValue * value, GParamSpec * pspec)
+ {
+ GstQTMuxPad *pad = GST_QT_MUX_PAD_CAST (object);
+
+ GST_OBJECT_LOCK (pad);
+ switch (prop_id) {
+ case PROP_PAD_TRAK_TIMESCALE:
+ pad->trak_timescale = g_value_get_uint (value);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+ GST_OBJECT_UNLOCK (pad);
+ }
+
+ static void
+ gst_qt_mux_pad_get_property (GObject * object,
+ guint prop_id, GValue * value, GParamSpec * pspec)
+ {
+ GstQTMuxPad *pad = GST_QT_MUX_PAD_CAST (object);
+
+ GST_OBJECT_LOCK (pad);
+ switch (prop_id) {
+ case PROP_PAD_TRAK_TIMESCALE:
+ g_value_set_uint (value, pad->trak_timescale);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+ GST_OBJECT_UNLOCK (pad);
+ }
+
+ static void
+ gst_qt_mux_pad_class_init (GstQTMuxPadClass * klass)
+ {
+ GObjectClass *gobject_class = (GObjectClass *) klass;
+
+ gobject_class->get_property = gst_qt_mux_pad_get_property;
+ gobject_class->set_property = gst_qt_mux_pad_set_property;
+
+ g_object_class_install_property (gobject_class, PROP_PAD_TRAK_TIMESCALE,
+ g_param_spec_uint ("trak-timescale", "Track timescale",
+ "Timescale to use for this pad's trak (units per second, 0 is automatic)",
+ 0, G_MAXUINT32, DEFAULT_PAD_TRAK_TIMESCALE,
+ G_PARAM_READWRITE | G_PARAM_CONSTRUCT | G_PARAM_STATIC_STRINGS));
+ }
+
+ static void
+ gst_qt_mux_pad_init (GstQTMuxPad * pad)
+ {
+ pad->trak_timescale = DEFAULT_PAD_TRAK_TIMESCALE;
+ }
+
+ static guint32
+ gst_qt_mux_pad_get_timescale (GstQTMuxPad * pad)
+ {
+ guint32 timescale;
+
+ GST_OBJECT_LOCK (pad);
+ timescale = pad->trak_timescale;
+ GST_OBJECT_UNLOCK (pad);
+
+ return timescale;
+ }
+
+ /* QTMux signals and args */
+ enum
+ {
+ /* FILL ME */
+ LAST_SIGNAL
+ };
+
+ enum
+ {
+ PROP_0,
+ PROP_MOVIE_TIMESCALE,
+ PROP_TRAK_TIMESCALE,
+ PROP_FAST_START,
+ PROP_FAST_START_TEMP_FILE,
+ PROP_MOOV_RECOV_FILE,
+ PROP_FRAGMENT_DURATION,
+ PROP_RESERVED_MAX_DURATION,
+ PROP_RESERVED_DURATION_REMAINING,
+ PROP_RESERVED_MOOV_UPDATE_PERIOD,
+ PROP_RESERVED_BYTES_PER_SEC,
+ PROP_RESERVED_PREFILL,
+ #ifndef GST_REMOVE_DEPRECATED
+ PROP_DTS_METHOD,
+ #endif
+ PROP_DO_CTTS,
+ PROP_INTERLEAVE_BYTES,
+ PROP_INTERLEAVE_TIME,
+ PROP_FORCE_CHUNKS,
+ PROP_MAX_RAW_AUDIO_DRIFT,
+ PROP_START_GAP_THRESHOLD,
+ PROP_FORCE_CREATE_TIMECODE_TRAK,
+ PROP_FRAGMENT_MODE,
++#ifdef TIZEN_FEATURE_GST_MUX_ENHANCEMENT
++ PROP_EXPECTED_TRAILER_SIZE,
++#endif /* TIZEN_FEATURE_GST_MUX_ENHANCEMENT */
+ };
+
+ /* some spare for header size as well */
+ #define MDAT_LARGE_FILE_LIMIT ((guint64) 1024 * 1024 * 1024 * 2)
+
+ #define DEFAULT_MOVIE_TIMESCALE 0
+ #define DEFAULT_TRAK_TIMESCALE 0
+ #define DEFAULT_DO_CTTS TRUE
+ #define DEFAULT_FAST_START FALSE
+ #define DEFAULT_FAST_START_TEMP_FILE NULL
+ #define DEFAULT_MOOV_RECOV_FILE NULL
+ #define DEFAULT_FRAGMENT_DURATION 0
+ #define DEFAULT_STREAMABLE TRUE
+ #ifndef GST_REMOVE_DEPRECATED
+ #define DEFAULT_DTS_METHOD DTS_METHOD_REORDER
+ #endif
+ #define DEFAULT_RESERVED_MAX_DURATION GST_CLOCK_TIME_NONE
+ #define DEFAULT_RESERVED_MOOV_UPDATE_PERIOD GST_CLOCK_TIME_NONE
+ #define DEFAULT_RESERVED_BYTES_PER_SEC_PER_TRAK 550
+ #define DEFAULT_RESERVED_PREFILL FALSE
+ #define DEFAULT_INTERLEAVE_BYTES 0
+ #define DEFAULT_INTERLEAVE_TIME 250*GST_MSECOND
+ #define DEFAULT_FORCE_CHUNKS (FALSE)
+ #define DEFAULT_MAX_RAW_AUDIO_DRIFT 40 * GST_MSECOND
+ #define DEFAULT_START_GAP_THRESHOLD 0
+ #define DEFAULT_FORCE_CREATE_TIMECODE_TRAK FALSE
+ #define DEFAULT_FRAGMENT_MODE GST_QT_MUX_FRAGMENT_DASH_OR_MSS
+
+ static void gst_qt_mux_finalize (GObject * object);
+
+ /* property functions */
+ static void gst_qt_mux_set_property (GObject * object,
+ guint prop_id, const GValue * value, GParamSpec * pspec);
+ static void gst_qt_mux_get_property (GObject * object,
+ guint prop_id, GValue * value, GParamSpec * pspec);
+
+ /* pad functions */
+ static GstPad *gst_qt_mux_request_new_pad (GstElement * element,
+ GstPadTemplate * templ, const gchar * name, const GstCaps * caps);
+ static void gst_qt_mux_release_pad (GstElement * element, GstPad * pad);
+
+ /* event */
+ static gboolean gst_qt_mux_sink_event (GstAggregator * agg,
+ GstAggregatorPad * agg_pad, GstEvent * event);
+ static GstFlowReturn gst_qt_mux_sink_event_pre_queue (GstAggregator * self,
+ GstAggregatorPad * aggpad, GstEvent * event);
+
+ /* aggregator */
+ static GstAggregatorPad *gst_qt_mux_create_new_pad (GstAggregator * self,
+ GstPadTemplate * templ, const gchar * req_name, const GstCaps * caps);
+ static GstFlowReturn gst_qt_mux_aggregate (GstAggregator * agg,
+ gboolean timeout);
+ static GstBuffer *gst_qt_mux_clip_running_time (GstAggregator * agg,
+ GstAggregatorPad * agg_pad, GstBuffer * buf);
+ static gboolean gst_qt_mux_start (GstAggregator * agg);
+ static gboolean gst_qt_mux_stop (GstAggregator * agg);
+
+ /* internal */
+
+ static GstFlowReturn gst_qt_mux_add_buffer (GstQTMux * qtmux, GstQTMuxPad * pad,
+ GstBuffer * buf);
+
+ static GstFlowReturn
+ gst_qt_mux_robust_recording_rewrite_moov (GstQTMux * qtmux);
+
+ static void gst_qt_mux_update_global_statistics (GstQTMux * qtmux);
+ static void gst_qt_mux_update_edit_lists (GstQTMux * qtmux);
+
+ static GstFlowReturn gst_qtmux_push_mdat_stored_buffers (GstQTMux * qtmux);
+
+ static GstElementClass *parent_class = NULL;
+
++#ifdef TIZEN_FEATURE_GST_MUX_ENHANCEMENT
++/*
++ [[ Metadata Size ]]
++ 1. Common
++ free = 8
++ moov = 8
++ mvhd = 108
++ -------------
++ total : 124
++
++ 2. Video
++ i. Video common
++ trak = 8
++ tkhd = 92
++ mdia = 8
++ mdhd = 32
++ hdlr = 45
++ minf = 8
++ vmhd = 20
++ dinf = 36 (8, dref : 16 , url : 12)
++ stbl = 8
++ ---------------
++ total : 257
++
++ ii. Variation in file format
++ - MP4
++ ftyp = 32
++ udta = 61
++ - 3GP
++ ftyp = 28
++ udta = 8
++
++ iii. Variation in codec
++ - MPEG4
++ stsd = 137(16, mp4v : 86, esds : 35)
++
++ - H.264 = 487(or 489) + (8*stts_count) + (8*frame) + (4*I-frame)
++ stsd = 134 (SPS 9, PPS 4) or 136 (SPS 111, PPS 4)
++
++ - H.263 = 470 + + (8*stts_count) + (8*frame) + (4*I-frame)
++ stsd = 102 -> different from H.264
++
++ iv. Variation in frame
++ stts = 16 + (8*stts_count)
++ stss = 16 + (4*I-frame)
++ stsc = 28
++ stsz = 20 + (4*frame)
++ stco = 16 + (4*frame)
++
++ 3. Audio
++ i. Audio common
++ trak = 8
++ tkhd = 92
++ mdia = 8
++ mdhd = 32
++ hdlr = 45
++ minf = 8
++ smhd = 16
++ dinf = 36 (8, dref : 16, url : 12)
++ stbl = 8
++ ---------------
++ total : 253
++
++ stts = 16
++ stsz = 20
++ stco = 16
++ ------------
++ total : 52
++
++ ii. Variation in file format
++ - MP4
++ udta = 61
++ - 3GP
++ udta = 8
++
++ iii. Variation in codec
++ - Common
++ stts = 16 + (8*stts_count)
++ stsc = 28
++ stsz = 20 + (4*frame)
++ stco = 16 + (4*frame)
++
++ - AAC
++ stsd = 94 (16, mp4a : 78(36 ,esds : 42))
++
++ - AMR
++ stsd = 69 (16, samr : 53(36, damr : 17))
++*/
++
++/* trailer entry size */
++#define ENTRY_SIZE_VIDEO_STTS 8
++#define ENTRY_SIZE_VIDEO_STSS 4
++#define ENTRY_SIZE_VIDEO_STSZ 4
++#define ENTRY_SIZE_VIDEO_STCO 4
++#define ENTRY_SIZE_AUDIO_STTS 8
++#define ENTRY_SIZE_AUDIO_STSZ 4
++#define ENTRY_SIZE_AUDIO_STCO 4
++
++#define ENTRY_SIZE_VIDEO_MPEG4_STSD 137
++#define ENTRY_SIZE_VIDEO_H263P_STSD 102
++#define ENTRY_SIZE_AUDIO_AAC_STSD 94
++#define ENTRY_SIZE_AUDIO_AMR_STSD 69
++
++#define ENTRY_SIZE_STSC 28
++#define ENTRY_SIZE_VIDEO_ST 68 /*atom size (stss + stts + stsc + stsz + stco ) * (size + atom + version + flags + sample count)+stsz(sample size) */
++#define ENTRY_SIZE_AUDIO_ST 52 /*atom size (stss + stsc + stsz + stco ) * (size + atom + version + flags + sample count)+stsz(sample size) */
++
++/* common */
++#define MUX_COMMON_SIZE_HEADER 124 /* free + moov + moov.mvhd*/
++
++#define MUX_COMMON_SIZE_VIDEO_HEADER 257
++#define MUX_COMMON_SIZE_AUDIO_HEADER 253
++
++#define MUX_COMMON_SIZE_MP4_FTYP 32
++#define MUX_COMMON_SIZE_3GP_FTYP 28
++
++#define MUX_COMMON_SIZE_MP4_UDTA 61
++#define MUX_COMMON_SIZE_3GP_UDTA 8
++
++static void
++gst_qt_mux_update_expected_trailer_size (GstQTMux *qtmux, GstQTMuxPad *pad)
++{
++ guint nb_video_frames = 0;
++ guint nb_video_i_frames = 0;
++ guint nb_video_stts_entry = 0;
++ guint nb_audio_frames = 0;
++ guint nb_audio_stts_entry = 0;
++ gboolean video_stream = FALSE;
++ gboolean audio_stream = FALSE;
++ guint exp_size = 0;
++ GstQTMuxClass *qtmux_klass = NULL;
++
++ if (qtmux == NULL || pad == NULL) {
++ GST_ERROR_OBJECT (qtmux, "Invalid parameter");
++ return;
++ }
++
++ qtmux_klass = (GstQTMuxClass *)(G_OBJECT_GET_CLASS(qtmux));
++
++ if (!strncmp(GST_PAD_NAME(pad->collect.pad), "video", 5)) {
++ nb_video_frames += pad->trak->mdia.minf.stbl.stsz.table_size;
++ nb_video_i_frames += pad->trak->mdia.minf.stbl.stss.entries.len;
++ nb_video_stts_entry += pad->trak->mdia.minf.stbl.stts.entries.len;
++
++ video_stream = TRUE;
++ } else if (!strncmp(GST_PAD_NAME(pad->collect.pad), "audio", 5)) {
++ nb_audio_frames += pad->trak->mdia.minf.stbl.stsz.table_size;
++ nb_audio_stts_entry += pad->trak->mdia.minf.stbl.stts.entries.len;
++
++ audio_stream = TRUE;
++ }
++
++ /* free + moov + mvhd */
++ qtmux->expected_trailer_size = MUX_COMMON_SIZE_HEADER;
++
++ /* ftyp + udta * 3 (There is 3 udta fields and it's same size) */
++ switch (qtmux_klass->format) {
++ case GST_QT_MUX_FORMAT_MP4:
++ qtmux->expected_trailer_size += MUX_COMMON_SIZE_MP4_FTYP + MUX_COMMON_SIZE_MP4_UDTA * 3;
++ break;
++ case GST_QT_MUX_FORMAT_3GP:
++ qtmux->expected_trailer_size += MUX_COMMON_SIZE_3GP_FTYP + MUX_COMMON_SIZE_3GP_UDTA * 3;
++ break;
++ default:
++ break;
++ }
++
++ /* Calculate trailer size for video stream */
++ if (video_stream) {
++ switch (pad->fourcc) {
++ case FOURCC_h263:
++ case FOURCC_s263:
++ exp_size += MUX_COMMON_SIZE_VIDEO_HEADER + ENTRY_SIZE_VIDEO_H263P_STSD;
++ break;
++ case FOURCC_mp4v:
++ case FOURCC_MP4V:
++ case FOURCC_fmp4:
++ case FOURCC_FMP4:
++ case FOURCC_3gp4:
++ case FOURCC_3gp6:
++ case FOURCC_3gg6:
++ exp_size += MUX_COMMON_SIZE_VIDEO_HEADER + ENTRY_SIZE_VIDEO_MPEG4_STSD;
++ break;
++ default:
++ break;
++ }
++
++ /* frame related */
++ exp_size += ENTRY_SIZE_VIDEO_ST + (ENTRY_SIZE_VIDEO_STTS * nb_video_stts_entry) +
++ (ENTRY_SIZE_VIDEO_STSS * nb_video_i_frames) + (ENTRY_SIZE_STSC) +
++ ((ENTRY_SIZE_VIDEO_STSZ + ENTRY_SIZE_VIDEO_STCO) * nb_video_frames);
++
++ qtmux->video_expected_trailer_size = exp_size;
++ }
++
++ /* Calculate trailer size for audio stream */
++ if (audio_stream) {
++ exp_size += MUX_COMMON_SIZE_AUDIO_HEADER + ENTRY_SIZE_AUDIO_ST + (ENTRY_SIZE_AUDIO_STTS * nb_audio_stts_entry) +
++ (ENTRY_SIZE_STSC) + ((ENTRY_SIZE_AUDIO_STSZ + ENTRY_SIZE_AUDIO_STCO) * nb_audio_frames);
++
++ if (pad->fourcc == FOURCC_samr)
++ exp_size += ENTRY_SIZE_AUDIO_AMR_STSD;
++ else
++ exp_size += ENTRY_SIZE_AUDIO_AAC_STSD;
++
++ qtmux->audio_expected_trailer_size = exp_size;
++ }
++
++ qtmux->expected_trailer_size += qtmux->video_expected_trailer_size + qtmux->audio_expected_trailer_size;
++
++ /*
++ GST_INFO_OBJECT (qtmux, "pad type %s", GST_PAD_NAME(pad->collect.pad));
++ GST_INFO_OBJECT (qtmux, "VIDEO : stts-entry=[%d], i-frame=[%d], video-sample=[%d]", nb_video_stts_entry, nb_video_i_frames, nb_video_frames);
++ GST_INFO_OBJECT (qtmux, "AUDIO : stts-entry=[%d], audio-sample=[%d]", nb_audio_stts_entry, nb_audio_frames);
++ GST_INFO_OBJECT (qtmux, "expected trailer size %d", qtmux->expected_trailer_size);
++ */
++
++ return;
++}
++#endif /* TIZEN_FEATURE_GST_MUX_ENHANCEMENT */
++
+ static void
+ gst_qt_mux_base_init (gpointer g_class)
+ {
+ GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
+ GstQTMuxClass *klass = (GstQTMuxClass *) g_class;
+ GstQTMuxClassParams *params;
+ GstPadTemplate *videosinktempl, *audiosinktempl, *subtitlesinktempl,
+ *captionsinktempl;
+ GstPadTemplate *srctempl;
+ gchar *longname, *description;
+
+ params =
+ (GstQTMuxClassParams *) g_type_get_qdata (G_OBJECT_CLASS_TYPE (g_class),
+ GST_QT_MUX_PARAMS_QDATA);
+ if (!params)
+ return;
+
+ /* construct the element details struct */
+ longname = g_strdup_printf ("%s Muxer", params->prop->long_name);
+ description = g_strdup_printf ("Multiplex audio and video into a %s file",
+ params->prop->long_name);
+ gst_element_class_set_metadata (element_class, longname,
+ "Codec/Muxer", description,
+ "Thiago Sousa Santos <thiagoss@embedded.ufcg.edu.br>");
+ g_free (longname);
+ g_free (description);
+
+ /* pad templates */
+ srctempl = gst_pad_template_new_with_gtype ("src", GST_PAD_SRC,
+ GST_PAD_ALWAYS, params->src_caps, GST_TYPE_AGGREGATOR_PAD);
+ gst_element_class_add_pad_template (element_class, srctempl);
+
+ if (params->audio_sink_caps) {
+ audiosinktempl = gst_pad_template_new_with_gtype ("audio_%u",
+ GST_PAD_SINK, GST_PAD_REQUEST, params->audio_sink_caps,
+ GST_TYPE_QT_MUX_PAD);
+ gst_element_class_add_pad_template (element_class, audiosinktempl);
+ }
+
+ if (params->video_sink_caps) {
+ videosinktempl = gst_pad_template_new_with_gtype ("video_%u",
+ GST_PAD_SINK, GST_PAD_REQUEST, params->video_sink_caps,
+ GST_TYPE_QT_MUX_PAD);
+ gst_element_class_add_pad_template (element_class, videosinktempl);
+ }
+
+ if (params->subtitle_sink_caps) {
+ subtitlesinktempl = gst_pad_template_new_with_gtype ("subtitle_%u",
+ GST_PAD_SINK, GST_PAD_REQUEST, params->subtitle_sink_caps,
+ GST_TYPE_QT_MUX_PAD);
+ gst_element_class_add_pad_template (element_class, subtitlesinktempl);
+ }
+
+ if (params->caption_sink_caps) {
+ captionsinktempl = gst_pad_template_new_with_gtype ("caption_%u",
+ GST_PAD_SINK, GST_PAD_REQUEST, params->caption_sink_caps,
+ GST_TYPE_QT_MUX_PAD);
+ gst_element_class_add_pad_template (element_class, captionsinktempl);
+ }
+
+ klass->format = params->prop->format;
+ }
+
+ static void
+ gst_qt_mux_class_init (GstQTMuxClass * klass)
+ {
+ GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
+ GstAggregatorClass *gstagg_class = GST_AGGREGATOR_CLASS (klass);
++#ifdef TIZEN_FEATURE_GST_MUX_ENHANCEMENT
++ GParamSpec *tspec = NULL;
++#endif /* TIZEN_FEATURE_GST_MUX_ENHANCEMENT */
+
+ gobject_class = (GObjectClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
+
+ parent_class = g_type_class_peek_parent (klass);
+
+ gobject_class->finalize = gst_qt_mux_finalize;
+ gobject_class->get_property = gst_qt_mux_get_property;
+ gobject_class->set_property = gst_qt_mux_set_property;
+
+ g_object_class_install_property (gobject_class, PROP_MOVIE_TIMESCALE,
+ g_param_spec_uint ("movie-timescale", "Movie timescale",
+ "Timescale to use in the movie (units per second, 0 == default)",
+ 0, G_MAXUINT32, DEFAULT_MOVIE_TIMESCALE,
+ G_PARAM_READWRITE | G_PARAM_CONSTRUCT | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_TRAK_TIMESCALE,
+ g_param_spec_uint ("trak-timescale", "Track timescale",
+ "Timescale to use for the tracks (units per second, 0 is automatic)",
+ 0, G_MAXUINT32, DEFAULT_TRAK_TIMESCALE,
+ G_PARAM_READWRITE | G_PARAM_CONSTRUCT | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_DO_CTTS,
+ g_param_spec_boolean ("presentation-time",
+ "Include presentation-time info",
+ "Calculate and include presentation/composition time "
+ "(in addition to decoding time)", DEFAULT_DO_CTTS,
+ G_PARAM_READWRITE | G_PARAM_CONSTRUCT | G_PARAM_STATIC_STRINGS));
+ #ifndef GST_REMOVE_DEPRECATED
+ g_object_class_install_property (gobject_class, PROP_DTS_METHOD,
+ g_param_spec_enum ("dts-method", "dts-method",
+ "Method to determine DTS time (DEPRECATED)",
+ GST_TYPE_QT_MUX_DTS_METHOD, DEFAULT_DTS_METHOD,
+ G_PARAM_DEPRECATED | G_PARAM_READWRITE | G_PARAM_CONSTRUCT |
+ G_PARAM_STATIC_STRINGS));
+ #endif
+ g_object_class_install_property (gobject_class, PROP_FAST_START,
+ g_param_spec_boolean ("faststart", "Format file to faststart",
+ "If the file should be formatted for faststart (headers first)",
+ DEFAULT_FAST_START, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_FAST_START_TEMP_FILE,
+ g_param_spec_string ("faststart-file", "File to use for storing buffers",
+ "File that will be used temporarily to store data from the stream "
+ "when creating a faststart file. If null a filepath will be "
+ "created automatically", DEFAULT_FAST_START_TEMP_FILE,
+ G_PARAM_READWRITE | G_PARAM_CONSTRUCT | G_PARAM_STATIC_STRINGS |
+ GST_PARAM_DOC_SHOW_DEFAULT));
+ g_object_class_install_property (gobject_class, PROP_MOOV_RECOV_FILE,
+ g_param_spec_string ("moov-recovery-file",
+ "File to store data for posterior moov atom recovery",
+ "File to be used to store "
+ "data for moov atom making movie file recovery possible in case "
+ "of a crash during muxing. Null for disabled. (Experimental)",
+ DEFAULT_MOOV_RECOV_FILE,
+ G_PARAM_READWRITE | G_PARAM_CONSTRUCT | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_FRAGMENT_DURATION,
+ g_param_spec_uint ("fragment-duration", "Fragment duration",
+ "Fragment durations in ms (produce a fragmented file if > 0)",
+ 0, G_MAXUINT32, klass->format == GST_QT_MUX_FORMAT_ISML ?
+ 2000 : DEFAULT_FRAGMENT_DURATION,
+ G_PARAM_READWRITE | G_PARAM_CONSTRUCT | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_RESERVED_MAX_DURATION,
+ g_param_spec_uint64 ("reserved-max-duration",
+ "Reserved maximum file duration (ns)",
+ "When set to a value > 0, reserves space for index tables at the "
+ "beginning of the file.",
+ 0, G_MAXUINT64, DEFAULT_RESERVED_MAX_DURATION,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class,
+ PROP_RESERVED_DURATION_REMAINING,
+ g_param_spec_uint64 ("reserved-duration-remaining",
+ "Report the approximate amount of remaining recording space (ns)",
+ "Reports the approximate amount of remaining moov header space "
+ "reserved using reserved-max-duration", 0, G_MAXUINT64, 0,
+ G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class,
+ PROP_RESERVED_MOOV_UPDATE_PERIOD,
+ g_param_spec_uint64 ("reserved-moov-update-period",
+ "Interval at which to update index tables (ns)",
+ "When used with reserved-max-duration, periodically updates the "
+ "index tables with information muxed so far.", 0, G_MAXUINT64,
+ DEFAULT_RESERVED_MOOV_UPDATE_PERIOD,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_RESERVED_BYTES_PER_SEC,
+ g_param_spec_uint ("reserved-bytes-per-sec",
+ "Reserved MOOV bytes per second, per track",
+ "Multiplier for converting reserved-max-duration into bytes of header to reserve, per second, per track",
+ 0, 10000, DEFAULT_RESERVED_BYTES_PER_SEC_PER_TRAK,
+ G_PARAM_READWRITE | G_PARAM_CONSTRUCT | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_RESERVED_PREFILL,
+ g_param_spec_boolean ("reserved-prefill",
+ "Reserved Prefill Samples Table",
+ "Prefill samples table of reserved duration",
+ DEFAULT_RESERVED_PREFILL,
+ G_PARAM_READWRITE | G_PARAM_CONSTRUCT | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_INTERLEAVE_BYTES,
+ g_param_spec_uint64 ("interleave-bytes", "Interleave (bytes)",
+ "Interleave between streams in bytes",
+ 0, G_MAXUINT64, DEFAULT_INTERLEAVE_BYTES,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_INTERLEAVE_TIME,
+ g_param_spec_uint64 ("interleave-time", "Interleave (time)",
+ "Interleave between streams in nanoseconds",
+ 0, G_MAXUINT64, DEFAULT_INTERLEAVE_TIME,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_FORCE_CHUNKS,
+ g_param_spec_boolean ("force-chunks", "Force Chunks",
+ "Force multiple chunks to be created even for single-stream files",
+ DEFAULT_FORCE_CHUNKS, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_MAX_RAW_AUDIO_DRIFT,
+ g_param_spec_uint64 ("max-raw-audio-drift", "Max Raw Audio Drift",
+ "Maximum allowed drift of raw audio samples vs. timestamps in nanoseconds",
+ 0, G_MAXUINT64, DEFAULT_MAX_RAW_AUDIO_DRIFT,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_START_GAP_THRESHOLD,
+ g_param_spec_uint64 ("start-gap-threshold", "Start Gap Threshold",
+ "Threshold for creating an edit list for gaps at the start in nanoseconds",
+ 0, G_MAXUINT64, DEFAULT_START_GAP_THRESHOLD,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class,
+ PROP_FORCE_CREATE_TIMECODE_TRAK,
+ g_param_spec_boolean ("force-create-timecode-trak",
+ "Force Create Timecode Trak",
+ "Create a timecode trak even in unsupported flavors",
+ DEFAULT_FORCE_CREATE_TIMECODE_TRAK,
+ G_PARAM_READWRITE | G_PARAM_CONSTRUCT | G_PARAM_STATIC_STRINGS));
+
+ /**
+ * GstBaseQTMux:fragment-mode:
+ *
+ * Influence how fragmented files are produces. Only has any affect when the
+ * the 'fragment-duration' property is set to a value greater than '0'
+ *
+ * Currently, two options exist:
+ * - "dash-or-mss": for the original fragmented mode that supports dash or
+ * mocrosoft smoothstreaming with a single input stream
+ * - "first-moov-then-finalise" is a fragmented mode that will start with a
+ * self-contained 'moov' atom fo the first fragment, then produce fragments.
+ * When the file is finalised, the initial 'moov' is invalidated and a
+ * new 'moov' is written covering the entire file.
+ *
+ * Since: 1.20
+ */
+ g_object_class_install_property (gobject_class, PROP_FRAGMENT_MODE,
+ g_param_spec_enum ("fragment-mode", "Fragment Mode",
+ "How to to write fragments to the file. Only used when "
+ "\'fragment-duration\' is greather than 0",
+ GST_TYPE_QT_MUX_FRAGMENT_MODE, DEFAULT_FRAGMENT_MODE,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
++#ifdef TIZEN_FEATURE_GST_MUX_ENHANCEMENT
++ tspec = g_param_spec_uint("expected-trailer-size", "Expected Trailer Size",
++ "Expected trailer size (bytes)",
++ 0, G_MAXUINT, 0, G_PARAM_READABLE | G_PARAM_STATIC_STRINGS);
++ if (tspec)
++ g_object_class_install_property(gobject_class, PROP_EXPECTED_TRAILER_SIZE, tspec);
++ else
++ GST_ERROR("g_param_spec failed for \"expected-trailer-size\"");
++#endif /* TIZEN_FEATURE_GST_MUX_ENHANCEMENT */
++
+ gstelement_class->request_new_pad =
+ GST_DEBUG_FUNCPTR (gst_qt_mux_request_new_pad);
+ gstelement_class->release_pad = GST_DEBUG_FUNCPTR (gst_qt_mux_release_pad);
+
+ gstagg_class->sink_event = gst_qt_mux_sink_event;
+ gstagg_class->sink_event_pre_queue = gst_qt_mux_sink_event_pre_queue;
+ gstagg_class->aggregate = gst_qt_mux_aggregate;
+ gstagg_class->clip = gst_qt_mux_clip_running_time;
+ gstagg_class->start = gst_qt_mux_start;
+ gstagg_class->stop = gst_qt_mux_stop;
+ gstagg_class->create_new_pad = gst_qt_mux_create_new_pad;
+
+ gst_type_mark_as_plugin_api (GST_TYPE_QT_MUX_PAD, 0);
+ gst_type_mark_as_plugin_api (GST_TYPE_QT_MUX_DTS_METHOD, 0);
+ gst_type_mark_as_plugin_api (GST_TYPE_QT_MUX_FRAGMENT_MODE, 0);
+ }
+
+ static void
+ gst_qt_mux_pad_reset (GstQTMuxPad * qtpad)
+ {
+ qtpad->fourcc = 0;
+ qtpad->is_out_of_order = FALSE;
+ qtpad->sample_size = 0;
+ qtpad->sync = FALSE;
+ qtpad->last_dts = 0;
+ qtpad->sample_offset = 0;
+ qtpad->dts_adjustment = GST_CLOCK_TIME_NONE;
+ qtpad->first_ts = GST_CLOCK_TIME_NONE;
+ qtpad->first_dts = GST_CLOCK_TIME_NONE;
+ qtpad->prepare_buf_func = NULL;
+ qtpad->create_empty_buffer = NULL;
+ qtpad->avg_bitrate = 0;
+ qtpad->max_bitrate = 0;
+ qtpad->total_duration = 0;
+ qtpad->total_bytes = 0;
+ qtpad->sparse = FALSE;
+ qtpad->first_cc_sample_size = 0;
+ qtpad->flow_status = GST_FLOW_OK;
+ qtpad->warned_empty_duration = FALSE;
+
+ gst_buffer_replace (&qtpad->last_buf, NULL);
+
+ gst_caps_replace (&qtpad->configured_caps, NULL);
+
+ if (qtpad->tags) {
+ gst_tag_list_unref (qtpad->tags);
+ qtpad->tags = NULL;
+ }
+
+ /* reference owned elsewhere */
+ qtpad->trak = NULL;
+ qtpad->tc_trak = NULL;
+
+ if (qtpad->traf) {
+ atom_traf_free (qtpad->traf);
+ qtpad->traf = NULL;
+ }
+ atom_array_clear (&qtpad->fragment_buffers);
+ if (qtpad->samples)
+ g_array_unref (qtpad->samples);
+ qtpad->samples = NULL;
+
+ /* reference owned elsewhere */
+ qtpad->tfra = NULL;
+
+ qtpad->first_pts = GST_CLOCK_TIME_NONE;
+ qtpad->tc_pos = -1;
+ if (qtpad->first_tc)
+ gst_video_time_code_free (qtpad->first_tc);
+ qtpad->first_tc = NULL;
+
+ if (qtpad->raw_audio_adapter)
+ gst_object_unref (qtpad->raw_audio_adapter);
+ qtpad->raw_audio_adapter = NULL;
+ }
+
+ /*
+ * Takes GstQTMux back to its initial state
+ */
+ static void
+ gst_qt_mux_reset (GstQTMux * qtmux, gboolean alloc)
+ {
+ GSList *walk;
+ GList *l;
+
+ qtmux->state = GST_QT_MUX_STATE_NONE;
+ qtmux->header_size = 0;
+ qtmux->mdat_size = 0;
+ qtmux->moov_pos = 0;
+ qtmux->mdat_pos = 0;
+ qtmux->longest_chunk = GST_CLOCK_TIME_NONE;
+ qtmux->fragment_sequence = 0;
+
+ if (qtmux->ftyp) {
+ atom_ftyp_free (qtmux->ftyp);
+ qtmux->ftyp = NULL;
+ }
+ if (qtmux->moov) {
+ atom_moov_free (qtmux->moov);
+ qtmux->moov = NULL;
+ }
+ if (qtmux->mfra) {
+ atom_mfra_free (qtmux->mfra);
+ qtmux->mfra = NULL;
+ }
+ if (qtmux->fast_start_file) {
+ fclose (qtmux->fast_start_file);
+ g_remove (qtmux->fast_start_file_path);
+ qtmux->fast_start_file = NULL;
+ }
+ if (qtmux->moov_recov_file) {
+ fclose (qtmux->moov_recov_file);
+ qtmux->moov_recov_file = NULL;
+ }
+ for (walk = qtmux->extra_atoms; walk; walk = g_slist_next (walk)) {
+ AtomInfo *ainfo = (AtomInfo *) walk->data;
+ ainfo->free_func (ainfo->atom);
+ g_free (ainfo);
+ }
+ g_slist_free (qtmux->extra_atoms);
+ qtmux->extra_atoms = NULL;
+
+ GST_OBJECT_LOCK (qtmux);
+ gst_tag_setter_reset_tags (GST_TAG_SETTER (qtmux));
+
+ /* reset pad data */
+ for (l = GST_ELEMENT_CAST (qtmux)->sinkpads; l; l = l->next) {
+ GstQTMuxPad *qtpad = (GstQTMuxPad *) l->data;
+ gst_qt_mux_pad_reset (qtpad);
+
+ /* hm, moov_free above yanked the traks away from us,
+ * so do not free, but do clear */
+ qtpad->trak = NULL;
+ }
+
+ if (alloc) {
+ qtmux->moov = atom_moov_new (qtmux->context);
++#ifndef TIZEN_FEATURE_GST_MUX_ENHANCEMENT
+ /* ensure all is as nice and fresh as request_new_pad would provide it */
+ for (l = GST_ELEMENT_CAST (qtmux)->sinkpads; l; l = l->next) {
+ GstQTMuxPad *qtpad = (GstQTMuxPad *) l->data;
+
+ qtpad->trak = atom_trak_new (qtmux->context);
+ atom_moov_add_trak (qtmux->moov, qtpad->trak);
+ }
++#endif
+ }
+ GST_OBJECT_UNLOCK (qtmux);
+
+ g_list_free_full (qtmux->output_buffers, (GDestroyNotify) gst_buffer_unref);
+ qtmux->output_buffers = NULL;
+
+ qtmux->current_pad = NULL;
+ qtmux->current_chunk_size = 0;
+ qtmux->current_chunk_duration = 0;
+ qtmux->current_chunk_offset = -1;
+
+ qtmux->reserved_moov_size = 0;
+ qtmux->last_moov_update = GST_CLOCK_TIME_NONE;
+ qtmux->muxed_since_last_update = 0;
+ qtmux->reserved_duration_remaining = GST_CLOCK_TIME_NONE;
++
++#ifdef TIZEN_FEATURE_GST_MUX_ENHANCEMENT
++ qtmux->expected_trailer_size = 0;
++ qtmux->video_expected_trailer_size = 0;
++ qtmux->audio_expected_trailer_size = 0;
++#endif /* TIZEN_FEATURE_GST_MUX_ENHANCEMENT */
+ }
+
+ static GstBuffer *
+ gst_qt_mux_clip_running_time (GstAggregator * agg,
+ GstAggregatorPad * agg_pad, GstBuffer * buf)
+ {
+ GstQTMuxPad *qtpad = GST_QT_MUX_PAD (agg_pad);
+ GstBuffer *outbuf = buf;
+
+ /* invalid left alone and passed */
+ if (G_LIKELY (GST_CLOCK_TIME_IS_VALID (GST_BUFFER_DTS_OR_PTS (buf)))) {
+ GstClockTime time;
+ GstClockTime buf_dts, abs_dts;
+ gint dts_sign;
+
+ time = GST_BUFFER_PTS (buf);
+
+ if (GST_CLOCK_TIME_IS_VALID (time)) {
+ time =
+ gst_segment_to_running_time (&agg_pad->segment, GST_FORMAT_TIME,
+ time);
+ if (G_UNLIKELY (!GST_CLOCK_TIME_IS_VALID (time))) {
+ GST_DEBUG_OBJECT (agg_pad, "clipping buffer on pad outside segment %"
+ GST_TIME_FORMAT, GST_TIME_ARGS (GST_BUFFER_PTS (buf)));
+ gst_buffer_unref (buf);
+ return NULL;
+ }
+ }
+
+ GST_LOG_OBJECT (agg_pad, "buffer pts %" GST_TIME_FORMAT " -> %"
+ GST_TIME_FORMAT " running time",
+ GST_TIME_ARGS (GST_BUFFER_PTS (buf)), GST_TIME_ARGS (time));
+ outbuf = gst_buffer_make_writable (buf);
+ GST_BUFFER_PTS (outbuf) = time;
+
+ dts_sign = gst_segment_to_running_time_full (&agg_pad->segment,
+ GST_FORMAT_TIME, GST_BUFFER_DTS (outbuf), &abs_dts);
+ buf_dts = GST_BUFFER_DTS (outbuf);
+ if (dts_sign > 0) {
+ GST_BUFFER_DTS (outbuf) = abs_dts;
+ qtpad->dts = abs_dts;
+ } else if (dts_sign < 0) {
+ GST_BUFFER_DTS (outbuf) = GST_CLOCK_TIME_NONE;
+ qtpad->dts = -((gint64) abs_dts);
+ } else {
+ GST_BUFFER_DTS (outbuf) = GST_CLOCK_TIME_NONE;
+ qtpad->dts = GST_CLOCK_STIME_NONE;
+ }
+
+ GST_LOG_OBJECT (agg_pad, "buffer dts %" GST_TIME_FORMAT " -> %"
+ GST_STIME_FORMAT " running time", GST_TIME_ARGS (buf_dts),
+ GST_STIME_ARGS (qtpad->dts));
+ }
+
+ return outbuf;
+ }
+
+ static void
+ gst_qt_mux_init (GstQTMux * qtmux, GstQTMuxClass * qtmux_klass)
+ {
+ /* properties set to default upon construction */
+
+ qtmux->reserved_max_duration = DEFAULT_RESERVED_MAX_DURATION;
+ qtmux->reserved_moov_update_period = DEFAULT_RESERVED_MOOV_UPDATE_PERIOD;
+ qtmux->reserved_bytes_per_sec_per_trak =
+ DEFAULT_RESERVED_BYTES_PER_SEC_PER_TRAK;
+ qtmux->interleave_bytes = DEFAULT_INTERLEAVE_BYTES;
+ qtmux->interleave_time = DEFAULT_INTERLEAVE_TIME;
+ qtmux->force_chunks = DEFAULT_FORCE_CHUNKS;
+ qtmux->max_raw_audio_drift = DEFAULT_MAX_RAW_AUDIO_DRIFT;
+ qtmux->start_gap_threshold = DEFAULT_START_GAP_THRESHOLD;
+ qtmux->force_create_timecode_trak = DEFAULT_FORCE_CREATE_TIMECODE_TRAK;
+
+ /* always need this */
+ qtmux->context =
+ atoms_context_new (gst_qt_mux_map_format_to_flavor (qtmux_klass->format),
+ qtmux->force_create_timecode_trak);
+
+ /* internals to initial state */
+ gst_qt_mux_reset (qtmux, TRUE);
+ }
+
+
+ static void
+ gst_qt_mux_finalize (GObject * object)
+ {
+ GstQTMux *qtmux = GST_QT_MUX_CAST (object);
+
+ gst_qt_mux_reset (qtmux, FALSE);
+
+ g_free (qtmux->fast_start_file_path);
+ g_free (qtmux->moov_recov_file_path);
+
+ atoms_context_free (qtmux->context);
+
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+ }
+
+ static GstBuffer *
+ gst_qt_mux_prepare_jpc_buffer (GstQTMuxPad * qtpad, GstBuffer * buf,
+ GstQTMux * qtmux)
+ {
+ GstBuffer *newbuf;
+ GstMapInfo map;
+ gsize size;
+
+ GST_LOG_OBJECT (qtmux, "Preparing jpc buffer");
+
+ if (buf == NULL)
+ return NULL;
+
+ size = gst_buffer_get_size (buf);
+ newbuf = gst_buffer_new_and_alloc (size + 8);
+ gst_buffer_copy_into (newbuf, buf, GST_BUFFER_COPY_ALL, 8, size);
+
+ gst_buffer_map (newbuf, &map, GST_MAP_WRITE);
+ GST_WRITE_UINT32_BE (map.data, map.size);
+ GST_WRITE_UINT32_LE (map.data + 4, FOURCC_jp2c);
+
+ gst_buffer_unmap (buf, &map);
+ gst_buffer_unref (buf);
+
+ return newbuf;
+ }
+
+ static gsize
+ extract_608_field_from_s334_1a (const guint8 * ccdata, gsize ccdata_size,
+ guint field, guint8 ** res)
+ {
+ guint8 *storage;
+ gsize storage_size = 128;
+ gsize i, res_size = 0;
+
+ storage = g_malloc0 (storage_size);
+
+ /* Iterate over the ccdata and put the corresponding tuples for the given field
+ * in the storage */
+ for (i = 0; i < ccdata_size; i += 3) {
+ if ((field == 1 && (ccdata[i * 3] & 0x80)) ||
+ (field == 2 && !(ccdata[i * 3] & 0x80))) {
+ GST_DEBUG ("Storing matching cc for field %d : 0x%02x 0x%02x", field,
+ ccdata[i * 3 + 1], ccdata[i * 3 + 2]);
+ if (res_size >= storage_size) {
+ storage_size += 128;
+ storage = g_realloc (storage, storage_size);
+ }
+ storage[res_size] = ccdata[i * 3 + 1];
+ storage[res_size + 1] = ccdata[i * 3 + 2];
+ res_size += 2;
+ }
+ }
+
+ if (res_size == 0) {
+ g_free (storage);
+ *res = NULL;
+ return 0;
+ }
+
+ *res = storage;
+ return res_size;
+ }
+
+
+ static GstBuffer *
+ gst_qt_mux_prepare_caption_buffer (GstQTMuxPad * qtpad, GstBuffer * buf,
+ GstQTMux * qtmux)
+ {
+ GstBuffer *newbuf = NULL;
+ GstMapInfo map, inmap;
+ gsize size;
+ gboolean in_prefill;
+
+ if (buf == NULL)
+ return NULL;
+
+ in_prefill = (qtmux->mux_mode == GST_QT_MUX_MODE_ROBUST_RECORDING_PREFILL);
+
+ size = gst_buffer_get_size (buf);
+ gst_buffer_map (buf, &inmap, GST_MAP_READ);
+
+ GST_LOG_OBJECT (qtmux,
+ "Preparing caption buffer %" GST_FOURCC_FORMAT " size:%" G_GSIZE_FORMAT,
+ GST_FOURCC_ARGS (qtpad->fourcc), size);
+
+ switch (qtpad->fourcc) {
+ case FOURCC_c608:
+ {
+ guint8 *cdat, *cdt2;
+ gsize cdat_size, cdt2_size, total_size = 0;
+ gsize write_offs = 0;
+
+ cdat_size =
+ extract_608_field_from_s334_1a (inmap.data, inmap.size, 1, &cdat);
+ cdt2_size =
+ extract_608_field_from_s334_1a (inmap.data, inmap.size, 2, &cdt2);
+
+ if (cdat_size)
+ total_size += cdat_size + 8;
+ if (cdt2_size)
+ total_size += cdt2_size + 8;
+ if (total_size == 0) {
+ GST_DEBUG_OBJECT (qtmux, "No 608 data ?");
+ /* FIXME : We might want to *always* store something, even if
+ * it's "empty" CC (i.e. 0x80 0x80) */
+ break;
+ }
+
+ newbuf = gst_buffer_new_and_alloc (in_prefill ? 20 : total_size);
+ /* Let's copy over all metadata and not the memory */
+ gst_buffer_copy_into (newbuf, buf, GST_BUFFER_COPY_METADATA, 0, size);
+
+ gst_buffer_map (newbuf, &map, GST_MAP_WRITE);
+ if (cdat_size || in_prefill) {
+ GST_WRITE_UINT32_BE (map.data, in_prefill ? 10 : cdat_size + 8);
+ GST_WRITE_UINT32_LE (map.data + 4, FOURCC_cdat);
+ if (cdat_size)
+ memcpy (map.data + 8, cdat, in_prefill ? 2 : cdat_size);
+ else {
+ /* Write 'empty' CC */
+ map.data[8] = 0x80;
+ map.data[9] = 0x80;
+ }
+ write_offs = in_prefill ? 10 : cdat_size + 8;
+ if (cdat_size)
+ g_free (cdat);
+ }
+
+ if (cdt2_size || in_prefill) {
+ GST_WRITE_UINT32_BE (map.data + write_offs,
+ in_prefill ? 10 : cdt2_size + 8);
+ GST_WRITE_UINT32_LE (map.data + write_offs + 4, FOURCC_cdt2);
+ if (cdt2_size)
+ memcpy (map.data + write_offs + 8, cdt2, in_prefill ? 2 : cdt2_size);
+ else {
+ /* Write 'empty' CC */
+ map.data[write_offs + 8] = 0x80;
+ map.data[write_offs + 9] = 0x80;
+ }
+ if (cdt2_size)
+ g_free (cdt2);
+ }
+ gst_buffer_unmap (newbuf, &map);
+ break;
+ }
+ break;
+ case FOURCC_c708:
+ {
+ gsize actual_size;
+
+ /* Take the whole CDP */
+ if (in_prefill) {
+ if (size > qtpad->first_cc_sample_size) {
+ GST_ELEMENT_WARNING (qtmux, RESOURCE, WRITE,
+ ("Truncating too big CEA708 sample (%" G_GSIZE_FORMAT " > %u)",
+ size, qtpad->first_cc_sample_size), (NULL));
+ } else if (size < qtpad->first_cc_sample_size) {
+ GST_ELEMENT_WARNING (qtmux, RESOURCE, WRITE,
+ ("Padding too small CEA708 sample (%" G_GSIZE_FORMAT " < %u)",
+ size, qtpad->first_cc_sample_size), (NULL));
+ }
+
+ actual_size = MIN (qtpad->first_cc_sample_size, size);
+ } else {
+ actual_size = size;
+ }
+
+ newbuf = gst_buffer_new_and_alloc (actual_size + 8);
+
+ /* Let's copy over all metadata and not the memory */
+ gst_buffer_copy_into (newbuf, buf, GST_BUFFER_COPY_METADATA, 0, -1);
+
+ gst_buffer_map (newbuf, &map, GST_MAP_WRITE);
+
+ GST_WRITE_UINT32_BE (map.data, actual_size + 8);
+ GST_WRITE_UINT32_LE (map.data + 4, FOURCC_ccdp);
+ memcpy (map.data + 8, inmap.data, actual_size);
+
+ gst_buffer_unmap (newbuf, &map);
+ break;
+ }
+ default:
+ /* theoretically this should never happen, but let's keep this here in case */
+ GST_WARNING_OBJECT (qtmux, "Unknown caption format");
+ break;
+ }
+
+ gst_buffer_unmap (buf, &inmap);
+ gst_buffer_unref (buf);
+
+ return newbuf;
+ }
+
+ static GstBuffer *
+ gst_qt_mux_prepare_tx3g_buffer (GstQTMuxPad * qtpad, GstBuffer * buf,
+ GstQTMux * qtmux)
+ {
+ GstBuffer *newbuf;
+ GstMapInfo frommap;
+ GstMapInfo tomap;
+ gsize size;
+ const guint8 *dataend;
+
+ GST_LOG_OBJECT (qtmux, "Preparing tx3g buffer %" GST_PTR_FORMAT, buf);
+
+ if (buf == NULL)
+ return NULL;
+
+ gst_buffer_map (buf, &frommap, GST_MAP_READ);
+
+ dataend = memchr (frommap.data, 0, frommap.size);
+ size = dataend ? dataend - frommap.data : frommap.size;
+ newbuf = gst_buffer_new_and_alloc (size + 2);
+
+ gst_buffer_map (newbuf, &tomap, GST_MAP_WRITE);
+
+ GST_WRITE_UINT16_BE (tomap.data, size);
+ memcpy (tomap.data + 2, frommap.data, size);
+
+ gst_buffer_unmap (newbuf, &tomap);
+ gst_buffer_unmap (buf, &frommap);
+
+ gst_buffer_copy_into (newbuf, buf, GST_BUFFER_COPY_METADATA, 0, size);
+
+ /* gst_buffer_copy_into is trying to be too clever and
+ * won't copy duration when size is different */
+ GST_BUFFER_DURATION (newbuf) = GST_BUFFER_DURATION (buf);
+
+ gst_buffer_unref (buf);
+
+ return newbuf;
+ }
+
+ static void
+ gst_qt_mux_pad_add_ac3_extension (GstQTMux * qtmux, GstQTMuxPad * qtpad,
+ guint8 fscod, guint8 frmsizcod, guint8 bsid, guint8 bsmod, guint8 acmod,
+ guint8 lfe_on)
+ {
+ AtomInfo *ext;
+
+ g_return_if_fail (qtpad->trak_ste);
+
+ ext = build_ac3_extension (fscod, bsid, bsmod, acmod, lfe_on, frmsizcod >> 1); /* bitrate_code is inside frmsizcod */
+
+ sample_table_entry_add_ext_atom (qtpad->trak_ste, ext);
+ }
+
+ static GstBuffer *
+ gst_qt_mux_prepare_parse_ac3_frame (GstQTMuxPad * qtpad, GstBuffer * buf,
+ GstQTMux * qtmux)
+ {
+ GstMapInfo map;
+ GstByteReader reader;
+ guint off;
+
+ if (!gst_buffer_map (buf, &map, GST_MAP_READ)) {
+ GST_WARNING_OBJECT (qtpad, "Failed to map buffer");
+ return buf;
+ }
+
+ if (G_UNLIKELY (map.size < 8))
+ goto done;
+
+ gst_byte_reader_init (&reader, map.data, map.size);
+ off = gst_byte_reader_masked_scan_uint32 (&reader, 0xffff0000, 0x0b770000,
+ 0, map.size);
+
+ if (off != -1) {
+ GstBitReader bits;
+ guint8 fscod, frmsizcod, bsid, bsmod, acmod, lfe_on;
+
+ GST_DEBUG_OBJECT (qtpad, "Found ac3 sync point at offset: %u", off);
+
+ gst_bit_reader_init (&bits, map.data, map.size);
+
+ /* off + sync + crc */
+ gst_bit_reader_skip_unchecked (&bits, off * 8 + 16 + 16);
+
+ fscod = gst_bit_reader_get_bits_uint8_unchecked (&bits, 2);
+ frmsizcod = gst_bit_reader_get_bits_uint8_unchecked (&bits, 6);
+ bsid = gst_bit_reader_get_bits_uint8_unchecked (&bits, 5);
+ bsmod = gst_bit_reader_get_bits_uint8_unchecked (&bits, 3);
+ acmod = gst_bit_reader_get_bits_uint8_unchecked (&bits, 3);
+
+ if ((acmod & 0x1) && (acmod != 0x1)) /* 3 front channels */
+ gst_bit_reader_skip_unchecked (&bits, 2);
+ if ((acmod & 0x4)) /* if a surround channel exists */
+ gst_bit_reader_skip_unchecked (&bits, 2);
+ if (acmod == 0x2) /* if in 2/0 mode */
+ gst_bit_reader_skip_unchecked (&bits, 2);
+
+ lfe_on = gst_bit_reader_get_bits_uint8_unchecked (&bits, 1);
+
+ gst_qt_mux_pad_add_ac3_extension (qtmux, qtpad, fscod, frmsizcod, bsid,
+ bsmod, acmod, lfe_on);
+
+ /* AC-3 spec says that those values should be constant for the
+ * whole stream when muxed in mp4. We trust the input follows it */
+ GST_DEBUG_OBJECT (qtpad, "Data parsed, removing "
+ "prepare buffer function");
+ qtpad->prepare_buf_func = NULL;
+ }
+
+ done:
+ gst_buffer_unmap (buf, &map);
+ return buf;
+ }
+
+ static GstBuffer *
+ gst_qt_mux_create_empty_tx3g_buffer (GstQTMuxPad * qtpad, gint64 duration)
+ {
+ guint8 *data;
+
+ data = g_malloc (2);
+ GST_WRITE_UINT16_BE (data, 0);
+
+ return gst_buffer_new_wrapped (data, 2);
+ }
+
+ static void
+ gst_qt_mux_add_mp4_tag (GstQTMux * qtmux, const GstTagList * list,
+ AtomUDTA * udta, const char *tag, const char *tag2, guint32 fourcc)
+ {
+ switch (gst_tag_get_type (tag)) {
+ /* strings */
+ case G_TYPE_STRING:
+ {
+ gchar *str = NULL;
+
+ if (!gst_tag_list_get_string (list, tag, &str) || !str)
+ break;
+ GST_DEBUG_OBJECT (qtmux, "Adding tag %" GST_FOURCC_FORMAT " -> %s",
+ GST_FOURCC_ARGS (fourcc), str);
+ atom_udta_add_str_tag (udta, fourcc, str);
+ g_free (str);
+ break;
+ }
+ /* double */
+ case G_TYPE_DOUBLE:
+ {
+ gdouble value;
+
+ if (!gst_tag_list_get_double (list, tag, &value))
+ break;
+ GST_DEBUG_OBJECT (qtmux, "Adding tag %" GST_FOURCC_FORMAT " -> %u",
+ GST_FOURCC_ARGS (fourcc), (gint) value);
+ atom_udta_add_uint_tag (udta, fourcc, 21, (gint) value);
+ break;
+ }
+ case G_TYPE_UINT:
+ {
+ guint value = 0;
+ if (tag2) {
+ /* paired unsigned integers */
+ guint count = 0;
+ gboolean got_tag;
+
+ got_tag = gst_tag_list_get_uint (list, tag, &value);
+ got_tag = gst_tag_list_get_uint (list, tag2, &count) || got_tag;
+ if (!got_tag)
+ break;
+ GST_DEBUG_OBJECT (qtmux, "Adding tag %" GST_FOURCC_FORMAT " -> %u/%u",
+ GST_FOURCC_ARGS (fourcc), value, count);
+ atom_udta_add_uint_tag (udta, fourcc, 0,
+ value << 16 | (count & 0xFFFF));
+ } else {
+ /* unpaired unsigned integers */
+ if (!gst_tag_list_get_uint (list, tag, &value))
+ break;
+ GST_DEBUG_OBJECT (qtmux, "Adding tag %" GST_FOURCC_FORMAT " -> %u",
+ GST_FOURCC_ARGS (fourcc), value);
+ atom_udta_add_uint_tag (udta, fourcc, 1, value);
+ }
+ break;
+ }
+ default:
+ g_assert_not_reached ();
+ break;
+ }
+ }
+
+ static void
+ gst_qt_mux_add_mp4_date (GstQTMux * qtmux, const GstTagList * list,
+ AtomUDTA * udta, const char *tag, const char *tag2, guint32 fourcc)
+ {
+ GDate *date = NULL;
+ GDateYear year;
+ GDateMonth month;
+ GDateDay day;
+ gchar *str;
+
+ g_return_if_fail (gst_tag_get_type (tag) == G_TYPE_DATE);
+
+ if (!gst_tag_list_get_date (list, tag, &date) || !date)
+ return;
+
+ year = g_date_get_year (date);
+ month = g_date_get_month (date);
+ day = g_date_get_day (date);
+
+ g_date_free (date);
+
+ if (year == G_DATE_BAD_YEAR && month == G_DATE_BAD_MONTH &&
+ day == G_DATE_BAD_DAY) {
+ GST_WARNING_OBJECT (qtmux, "invalid date in tag");
+ return;
+ }
+
+ str = g_strdup_printf ("%u-%u-%u", year, month, day);
+ GST_DEBUG_OBJECT (qtmux, "Adding tag %" GST_FOURCC_FORMAT " -> %s",
+ GST_FOURCC_ARGS (fourcc), str);
+ atom_udta_add_str_tag (udta, fourcc, str);
+ g_free (str);
+ }
+
+ static void
+ gst_qt_mux_add_mp4_cover (GstQTMux * qtmux, const GstTagList * list,
+ AtomUDTA * udta, const char *tag, const char *tag2, guint32 fourcc)
+ {
+ GValue value = { 0, };
+ GstBuffer *buf;
+ GstSample *sample;
+ GstCaps *caps;
+ GstStructure *structure;
+ gint flags = 0;
+ GstMapInfo map;
+
+ g_return_if_fail (gst_tag_get_type (tag) == GST_TYPE_SAMPLE);
+
+ if (!gst_tag_list_copy_value (&value, list, tag))
+ return;
+
+ sample = gst_value_get_sample (&value);
+
+ if (!sample)
+ goto done;
+
+ buf = gst_sample_get_buffer (sample);
+ if (!buf)
+ goto done;
+
+ caps = gst_sample_get_caps (sample);
+ if (!caps) {
+ GST_WARNING_OBJECT (qtmux, "preview image without caps");
+ goto done;
+ }
+
+ GST_DEBUG_OBJECT (qtmux, "preview image caps %" GST_PTR_FORMAT, caps);
+
+ structure = gst_caps_get_structure (caps, 0);
+ if (gst_structure_has_name (structure, "image/jpeg"))
+ flags = 13;
+ else if (gst_structure_has_name (structure, "image/png"))
+ flags = 14;
+
+ if (!flags) {
+ GST_WARNING_OBJECT (qtmux, "preview image format not supported");
+ goto done;
+ }
+
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+ GST_DEBUG_OBJECT (qtmux, "Adding tag %" GST_FOURCC_FORMAT
+ " -> image size %" G_GSIZE_FORMAT "", GST_FOURCC_ARGS (fourcc), map.size);
+ atom_udta_add_tag (udta, fourcc, flags, map.data, map.size);
+ gst_buffer_unmap (buf, &map);
+ done:
+ g_value_unset (&value);
+ }
+
+ static void
+ gst_qt_mux_add_3gp_str (GstQTMux * qtmux, const GstTagList * list,
+ AtomUDTA * udta, const char *tag, const char *tag2, guint32 fourcc)
+ {
+ gchar *str = NULL;
+ guint number;
+
+ g_return_if_fail (gst_tag_get_type (tag) == G_TYPE_STRING);
+ g_return_if_fail (!tag2 || gst_tag_get_type (tag2) == G_TYPE_UINT);
+
+ if (!gst_tag_list_get_string (list, tag, &str) || !str)
+ return;
+
+ if (tag2)
+ if (!gst_tag_list_get_uint (list, tag2, &number))
+ tag2 = NULL;
+
+ if (!tag2) {
+ GST_DEBUG_OBJECT (qtmux, "Adding tag %" GST_FOURCC_FORMAT " -> %s",
+ GST_FOURCC_ARGS (fourcc), str);
+ atom_udta_add_3gp_str_tag (udta, fourcc, str);
+ } else {
+ GST_DEBUG_OBJECT (qtmux, "Adding tag %" GST_FOURCC_FORMAT " -> %s/%d",
+ GST_FOURCC_ARGS (fourcc), str, number);
+ atom_udta_add_3gp_str_int_tag (udta, fourcc, str, number);
+ }
+
+ g_free (str);
+ }
+
+ static void
+ gst_qt_mux_add_3gp_date (GstQTMux * qtmux, const GstTagList * list,
+ AtomUDTA * udta, const char *tag, const char *tag2, guint32 fourcc)
+ {
+ GDate *date = NULL;
+ GDateYear year;
+
+ g_return_if_fail (gst_tag_get_type (tag) == G_TYPE_DATE);
+
+ if (!gst_tag_list_get_date (list, tag, &date) || !date)
+ return;
+
+ year = g_date_get_year (date);
+ g_date_free (date);
+
+ if (year == G_DATE_BAD_YEAR) {
+ GST_WARNING_OBJECT (qtmux, "invalid date in tag");
+ return;
+ }
+
+ GST_DEBUG_OBJECT (qtmux, "Adding tag %" GST_FOURCC_FORMAT " -> %d",
+ GST_FOURCC_ARGS (fourcc), year);
+ atom_udta_add_3gp_uint_tag (udta, fourcc, year);
+ }
+
+ static void
+ gst_qt_mux_add_3gp_location (GstQTMux * qtmux, const GstTagList * list,
+ AtomUDTA * udta, const char *tag, const char *tag2, guint32 fourcc)
+ {
+ gdouble latitude = -360, longitude = -360, altitude = 0;
+ gchar *location = NULL;
+ guint8 *data, *ddata;
+ gint size = 0, len = 0;
+ gboolean ret = FALSE;
+
+ g_return_if_fail (strcmp (tag, GST_TAG_GEO_LOCATION_NAME) == 0);
+
+ ret = gst_tag_list_get_string (list, tag, &location);
+ ret |= gst_tag_list_get_double (list, GST_TAG_GEO_LOCATION_LONGITUDE,
+ &longitude);
+ ret |= gst_tag_list_get_double (list, GST_TAG_GEO_LOCATION_LATITUDE,
+ &latitude);
+ ret |= gst_tag_list_get_double (list, GST_TAG_GEO_LOCATION_ELEVATION,
+ &altitude);
+
+ if (!ret)
+ return;
+
+ if (location)
+ len = strlen (location);
+ size += len + 1 + 2;
+
+ /* role + (long, lat, alt) + body + notes */
+ size += 1 + 3 * 4 + 1 + 1;
+
+ data = ddata = g_malloc (size);
+
+ /* language tag */
+ GST_WRITE_UINT16_BE (data, language_code (GST_QT_MUX_DEFAULT_TAG_LANGUAGE));
+ /* location */
+ if (location)
+ memcpy (data + 2, location, len);
+ GST_WRITE_UINT8 (data + 2 + len, 0);
+ data += len + 1 + 2;
+ /* role */
+ GST_WRITE_UINT8 (data, 0);
+ /* long, lat, alt */
+ #define QT_WRITE_SFP32(data, fp) GST_WRITE_UINT32_BE(data, (guint32) ((gint) (fp * 65536.0)))
+ QT_WRITE_SFP32 (data + 1, longitude);
+ QT_WRITE_SFP32 (data + 5, latitude);
+ QT_WRITE_SFP32 (data + 9, altitude);
+ /* neither astronomical body nor notes */
+ GST_WRITE_UINT16_BE (data + 13, 0);
+
+ GST_DEBUG_OBJECT (qtmux, "Adding tag 'loci'");
+ atom_udta_add_3gp_tag (udta, fourcc, ddata, size);
+ g_free (ddata);
+ }
+
+ static void
+ gst_qt_mux_add_3gp_keywords (GstQTMux * qtmux, const GstTagList * list,
+ AtomUDTA * udta, const char *tag, const char *tag2, guint32 fourcc)
+ {
+ gchar *keywords = NULL;
+ guint8 *data, *ddata;
+ gint size = 0, i;
+ gchar **kwds;
+
+ g_return_if_fail (strcmp (tag, GST_TAG_KEYWORDS) == 0);
+
+ if (!gst_tag_list_get_string (list, tag, &keywords) || !keywords)
+ return;
+
+ kwds = g_strsplit (keywords, ",", 0);
+ g_free (keywords);
+
+ size = 0;
+ for (i = 0; kwds[i]; i++) {
+ /* size byte + null-terminator */
+ size += strlen (kwds[i]) + 1 + 1;
+ }
+
+ /* language tag + count + keywords */
+ size += 2 + 1;
+
+ data = ddata = g_malloc (size);
+
+ /* language tag */
+ GST_WRITE_UINT16_BE (data, language_code (GST_QT_MUX_DEFAULT_TAG_LANGUAGE));
+ /* count */
+ GST_WRITE_UINT8 (data + 2, i);
+ data += 3;
+ /* keywords */
+ for (i = 0; kwds[i]; ++i) {
+ gint len = strlen (kwds[i]);
+
+ GST_DEBUG_OBJECT (qtmux, "Adding tag %" GST_FOURCC_FORMAT " -> %s",
+ GST_FOURCC_ARGS (fourcc), kwds[i]);
+ /* size */
+ GST_WRITE_UINT8 (data, len + 1);
+ memcpy (data + 1, kwds[i], len + 1);
+ data += len + 2;
+ }
+
+ g_strfreev (kwds);
+
+ atom_udta_add_3gp_tag (udta, fourcc, ddata, size);
+ g_free (ddata);
+ }
+
+ static gboolean
+ gst_qt_mux_parse_classification_string (GstQTMux * qtmux, const gchar * input,
+ guint32 * p_fourcc, guint16 * p_table, gchar ** p_content)
+ {
+ guint32 fourcc;
+ gint table;
+ gint size;
+ const gchar *data;
+
+ data = input;
+ size = strlen (input);
+
+ if (size < 4 + 3 + 1 + 1 + 1) {
+ /* at least the minimum xxxx://y/z */
+ GST_WARNING_OBJECT (qtmux, "Classification tag input (%s) too short, "
+ "ignoring", input);
+ return FALSE;
+ }
+
+ /* read the fourcc */
+ memcpy (&fourcc, data, 4);
+ size -= 4;
+ data += 4;
+
+ if (strncmp (data, "://", 3) != 0) {
+ goto mismatch;
+ }
+ data += 3;
+ size -= 3;
+
+ /* read the table number */
+ if (sscanf (data, "%d", &table) != 1) {
+ goto mismatch;
+ }
+ if (table < 0) {
+ GST_WARNING_OBJECT (qtmux, "Invalid table number in classification tag (%d)"
+ ", table numbers should be positive, ignoring tag", table);
+ return FALSE;
+ }
+
+ /* find the next / */
+ while (size > 0 && data[0] != '/') {
+ data += 1;
+ size -= 1;
+ }
+ if (size == 0) {
+ goto mismatch;
+ }
+ g_assert (data[0] == '/');
+
+ /* skip the '/' */
+ data += 1;
+ size -= 1;
+ if (size == 0) {
+ goto mismatch;
+ }
+
+ /* read up the rest of the string */
+ *p_content = g_strdup (data);
+ *p_table = (guint16) table;
+ *p_fourcc = fourcc;
+ return TRUE;
+
+ mismatch:
+ {
+ GST_WARNING_OBJECT (qtmux, "Ignoring classification tag as "
+ "input (%s) didn't match the expected entitycode://table/content",
+ input);
+ return FALSE;
+ }
+ }
+
+ static void
+ gst_qt_mux_add_3gp_classification (GstQTMux * qtmux, const GstTagList * list,
+ AtomUDTA * udta, const char *tag, const char *tag2, guint32 fourcc)
+ {
+ gchar *clsf_data = NULL;
+ gint size = 0;
+ guint32 entity = 0;
+ guint16 table = 0;
+ gchar *content = NULL;
+ guint8 *data;
+
+ g_return_if_fail (strcmp (tag, GST_TAG_3GP_CLASSIFICATION) == 0);
+
+ if (!gst_tag_list_get_string (list, tag, &clsf_data) || !clsf_data)
+ return;
+
+ GST_DEBUG_OBJECT (qtmux, "Adding tag %" GST_FOURCC_FORMAT " -> %s",
+ GST_FOURCC_ARGS (fourcc), clsf_data);
+
+ /* parse the string, format is:
+ * entityfourcc://table/content
+ */
+ gst_qt_mux_parse_classification_string (qtmux, clsf_data, &entity, &table,
+ &content);
+ g_free (clsf_data);
+ /* +1 for the \0 */
+ size = strlen (content) + 1;
+
+ /* now we have everything, build the atom
+ * atom description is at 3GPP TS 26.244 V8.2.0 (2009-09) */
+ data = g_malloc (4 + 2 + 2 + size);
+ GST_WRITE_UINT32_LE (data, entity);
+ GST_WRITE_UINT16_BE (data + 4, (guint16) table);
+ GST_WRITE_UINT16_BE (data + 6, 0);
+ memcpy (data + 8, content, size);
+ g_free (content);
+
+ atom_udta_add_3gp_tag (udta, fourcc, data, 4 + 2 + 2 + size);
+ g_free (data);
+ }
+
+ typedef void (*GstQTMuxAddUdtaTagFunc) (GstQTMux * mux,
+ const GstTagList * list, AtomUDTA * udta, const char *tag,
+ const char *tag2, guint32 fourcc);
+
+ /*
+ * Struct to record mappings from gstreamer tags to fourcc codes
+ */
+ typedef struct _GstTagToFourcc
+ {
+ guint32 fourcc;
+ const gchar *gsttag;
+ const gchar *gsttag2;
+ const GstQTMuxAddUdtaTagFunc func;
+ } GstTagToFourcc;
+
+ /* tag list tags to fourcc matching */
+ static const GstTagToFourcc tag_matches_mp4[] = {
+ {FOURCC__alb, GST_TAG_ALBUM, NULL, gst_qt_mux_add_mp4_tag},
+ {FOURCC_soal, GST_TAG_ALBUM_SORTNAME, NULL, gst_qt_mux_add_mp4_tag},
+ {FOURCC__ART, GST_TAG_ARTIST, NULL, gst_qt_mux_add_mp4_tag},
+ {FOURCC_soar, GST_TAG_ARTIST_SORTNAME, NULL, gst_qt_mux_add_mp4_tag},
+ {FOURCC_aART, GST_TAG_ALBUM_ARTIST, NULL, gst_qt_mux_add_mp4_tag},
+ {FOURCC_soaa, GST_TAG_ALBUM_ARTIST_SORTNAME, NULL, gst_qt_mux_add_mp4_tag},
+ {FOURCC__swr, GST_TAG_APPLICATION_NAME, NULL, gst_qt_mux_add_mp4_tag},
+ {FOURCC__cmt, GST_TAG_COMMENT, NULL, gst_qt_mux_add_mp4_tag},
+ {FOURCC__wrt, GST_TAG_COMPOSER, NULL, gst_qt_mux_add_mp4_tag},
+ {FOURCC_soco, GST_TAG_COMPOSER_SORTNAME, NULL, gst_qt_mux_add_mp4_tag},
+ {FOURCC_tvsh, GST_TAG_SHOW_NAME, NULL, gst_qt_mux_add_mp4_tag},
+ {FOURCC_sosn, GST_TAG_SHOW_SORTNAME, NULL, gst_qt_mux_add_mp4_tag},
+ {FOURCC_tvsn, GST_TAG_SHOW_SEASON_NUMBER, NULL, gst_qt_mux_add_mp4_tag},
+ {FOURCC_tves, GST_TAG_SHOW_EPISODE_NUMBER, NULL, gst_qt_mux_add_mp4_tag},
+ {FOURCC__gen, GST_TAG_GENRE, NULL, gst_qt_mux_add_mp4_tag},
+ {FOURCC__nam, GST_TAG_TITLE, NULL, gst_qt_mux_add_mp4_tag},
+ {FOURCC_sonm, GST_TAG_TITLE_SORTNAME, NULL, gst_qt_mux_add_mp4_tag},
+ {FOURCC_perf, GST_TAG_PERFORMER, NULL, gst_qt_mux_add_mp4_tag},
+ {FOURCC__grp, GST_TAG_GROUPING, NULL, gst_qt_mux_add_mp4_tag},
+ {FOURCC__des, GST_TAG_DESCRIPTION, NULL, gst_qt_mux_add_mp4_tag},
+ {FOURCC__lyr, GST_TAG_LYRICS, NULL, gst_qt_mux_add_mp4_tag},
+ {FOURCC__too, GST_TAG_ENCODER, NULL, gst_qt_mux_add_mp4_tag},
+ {FOURCC_cprt, GST_TAG_COPYRIGHT, NULL, gst_qt_mux_add_mp4_tag},
+ {FOURCC_keyw, GST_TAG_KEYWORDS, NULL, gst_qt_mux_add_mp4_tag},
+ {FOURCC__day, GST_TAG_DATE, NULL, gst_qt_mux_add_mp4_date},
+ {FOURCC_tmpo, GST_TAG_BEATS_PER_MINUTE, NULL, gst_qt_mux_add_mp4_tag},
+ {FOURCC_trkn, GST_TAG_TRACK_NUMBER, GST_TAG_TRACK_COUNT,
+ gst_qt_mux_add_mp4_tag},
+ {FOURCC_disk, GST_TAG_ALBUM_VOLUME_NUMBER, GST_TAG_ALBUM_VOLUME_COUNT,
+ gst_qt_mux_add_mp4_tag},
+ {FOURCC_covr, GST_TAG_PREVIEW_IMAGE, NULL, gst_qt_mux_add_mp4_cover},
+ {FOURCC_covr, GST_TAG_IMAGE, NULL, gst_qt_mux_add_mp4_cover},
+ {0, NULL,}
+ };
+
+ static const GstTagToFourcc tag_matches_3gp[] = {
+ {FOURCC_titl, GST_TAG_TITLE, NULL, gst_qt_mux_add_3gp_str},
+ {FOURCC_dscp, GST_TAG_DESCRIPTION, NULL, gst_qt_mux_add_3gp_str},
+ {FOURCC_cprt, GST_TAG_COPYRIGHT, NULL, gst_qt_mux_add_3gp_str},
+ {FOURCC_perf, GST_TAG_ARTIST, NULL, gst_qt_mux_add_3gp_str},
+ {FOURCC_auth, GST_TAG_COMPOSER, NULL, gst_qt_mux_add_3gp_str},
+ {FOURCC_gnre, GST_TAG_GENRE, NULL, gst_qt_mux_add_3gp_str},
+ {FOURCC_kywd, GST_TAG_KEYWORDS, NULL, gst_qt_mux_add_3gp_keywords},
+ {FOURCC_yrrc, GST_TAG_DATE, NULL, gst_qt_mux_add_3gp_date},
+ {FOURCC_albm, GST_TAG_ALBUM, GST_TAG_TRACK_NUMBER, gst_qt_mux_add_3gp_str},
+ {FOURCC_loci, GST_TAG_GEO_LOCATION_NAME, NULL, gst_qt_mux_add_3gp_location},
+ {FOURCC_clsf, GST_TAG_3GP_CLASSIFICATION, NULL,
+ gst_qt_mux_add_3gp_classification},
+ {0, NULL,}
+ };
+
+ /* qtdemux produces these for atoms it cannot parse */
+ #define GST_QT_DEMUX_PRIVATE_TAG "private-qt-tag"
+
+ static void
+ gst_qt_mux_add_xmp_tags (GstQTMux * qtmux, const GstTagList * list)
+ {
+ GstQTMuxClass *qtmux_klass = (GstQTMuxClass *) (G_OBJECT_GET_CLASS (qtmux));
+ GstBuffer *xmp = NULL;
+
+ /* adobe specs only have 'quicktime' and 'mp4',
+ * but I guess we can extrapolate to gpp.
+ * Keep mj2 out for now as we don't add any tags for it yet.
+ * If you have further info about xmp on these formats, please share */
+ if (qtmux_klass->format == GST_QT_MUX_FORMAT_MJ2)
+ return;
+
+ GST_DEBUG_OBJECT (qtmux, "Adding xmp tags");
+
+ if (qtmux_klass->format == GST_QT_MUX_FORMAT_QT) {
+ xmp = gst_tag_xmp_writer_tag_list_to_xmp_buffer (GST_TAG_XMP_WRITER (qtmux),
+ list, TRUE);
+ if (xmp)
+ atom_udta_add_xmp_tags (&qtmux->moov->udta, xmp);
+ } else {
+ AtomInfo *ainfo;
+ /* for isom/mp4, it is a top level uuid atom */
+ xmp = gst_tag_xmp_writer_tag_list_to_xmp_buffer (GST_TAG_XMP_WRITER (qtmux),
+ list, TRUE);
+ if (xmp) {
+ ainfo = build_uuid_xmp_atom (xmp);
+ if (ainfo) {
+ qtmux->extra_atoms = g_slist_prepend (qtmux->extra_atoms, ainfo);
+ }
+ }
+ }
+ if (xmp)
+ gst_buffer_unref (xmp);
+ }
+
+ static void
+ gst_qt_mux_add_metadata_tags (GstQTMux * qtmux, const GstTagList * list,
+ AtomUDTA * udta)
+ {
+ GstQTMuxClass *qtmux_klass = (GstQTMuxClass *) (G_OBJECT_GET_CLASS (qtmux));
+ guint32 fourcc;
+ gint i;
+ const gchar *tag, *tag2;
+ const GstTagToFourcc *tag_matches;
+
+ switch (qtmux_klass->format) {
+ case GST_QT_MUX_FORMAT_3GP:
+ tag_matches = tag_matches_3gp;
+ break;
+ case GST_QT_MUX_FORMAT_MJ2:
+ tag_matches = NULL;
+ break;
+ default:
+ /* sort of iTunes style for mp4 and QT (?) */
+ tag_matches = tag_matches_mp4;
+ break;
+ }
+
+ if (!tag_matches)
+ return;
+
+ /* Clear existing tags so we don't add them over and over */
+ atom_udta_clear_tags (udta);
+
+ for (i = 0; tag_matches[i].fourcc; i++) {
+ fourcc = tag_matches[i].fourcc;
+ tag = tag_matches[i].gsttag;
+ tag2 = tag_matches[i].gsttag2;
+
+ g_assert (tag_matches[i].func);
+ tag_matches[i].func (qtmux, list, udta, tag, tag2, fourcc);
+ }
+
+ /* add unparsed blobs if present */
+ if (gst_tag_exists (GST_QT_DEMUX_PRIVATE_TAG)) {
+ guint num_tags;
+
+ num_tags = gst_tag_list_get_tag_size (list, GST_QT_DEMUX_PRIVATE_TAG);
+ for (i = 0; i < num_tags; ++i) {
+ GstSample *sample = NULL;
+ GstBuffer *buf;
+ const GstStructure *s;
+
+ if (!gst_tag_list_get_sample_index (list, GST_QT_DEMUX_PRIVATE_TAG, i,
+ &sample))
+ continue;
+ buf = gst_sample_get_buffer (sample);
+
+ if (buf && (s = gst_sample_get_info (sample))) {
+ const gchar *style = NULL;
+ GstMapInfo map;
+
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+ GST_DEBUG_OBJECT (qtmux,
+ "Found private tag %d/%d; size %" G_GSIZE_FORMAT ", info %"
+ GST_PTR_FORMAT, i, num_tags, map.size, s);
+ if (s && (style = gst_structure_get_string (s, "style"))) {
+ /* try to prevent some style tag ending up into another variant
+ * (todo: make into a list if more cases) */
+ if ((strcmp (style, "itunes") == 0 &&
+ qtmux_klass->format == GST_QT_MUX_FORMAT_MP4) ||
+ (strcmp (style, "iso") == 0 &&
+ qtmux_klass->format == GST_QT_MUX_FORMAT_3GP)) {
+ GST_DEBUG_OBJECT (qtmux, "Adding private tag");
+ atom_udta_add_blob_tag (udta, map.data, map.size);
+ }
+ }
+ gst_buffer_unmap (buf, &map);
+ }
+ gst_sample_unref (sample);
+ }
+ }
+
+ return;
+ }
+
+ /*
+ * Gets the tagsetter iface taglist and puts the known tags
+ * into the output stream
+ */
+ static void
+ gst_qt_mux_setup_metadata (GstQTMux * qtmux)
+ {
+ const GstTagList *tags = NULL;
+ GList *l;
+
+ GST_OBJECT_LOCK (qtmux);
+ if (qtmux->tags_changed) {
+ tags = gst_tag_setter_get_tag_list (GST_TAG_SETTER (qtmux));
+ qtmux->tags_changed = FALSE;
+ }
+ GST_OBJECT_UNLOCK (qtmux);
+
+ GST_LOG_OBJECT (qtmux, "tags: %" GST_PTR_FORMAT, tags);
+
+ if (tags && !gst_tag_list_is_empty (tags)) {
+ GstTagList *copy = gst_tag_list_copy (tags);
+
+ GST_DEBUG_OBJECT (qtmux, "Removing bogus tags");
+ gst_tag_list_remove_tag (copy, GST_TAG_VIDEO_CODEC);
+ gst_tag_list_remove_tag (copy, GST_TAG_AUDIO_CODEC);
+ gst_tag_list_remove_tag (copy, GST_TAG_CONTAINER_FORMAT);
+
+ GST_DEBUG_OBJECT (qtmux, "Formatting tags");
+ gst_qt_mux_add_metadata_tags (qtmux, copy, &qtmux->moov->udta);
+ gst_qt_mux_add_xmp_tags (qtmux, copy);
+ gst_tag_list_unref (copy);
+ } else {
+ GST_DEBUG_OBJECT (qtmux, "No new tags received");
+ }
+
+ GST_OBJECT_LOCK (qtmux);
+ for (l = GST_ELEMENT (qtmux)->sinkpads; l; l = l->next) {
+ GstQTMuxPad *qpad = GST_QT_MUX_PAD (l->data);
+
+ if (qpad->tags_changed && qpad->tags) {
+ GST_DEBUG_OBJECT (qpad, "Adding tags");
+ gst_tag_list_remove_tag (qpad->tags, GST_TAG_CONTAINER_FORMAT);
+ gst_qt_mux_add_metadata_tags (qtmux, qpad->tags, &qpad->trak->udta);
+ qpad->tags_changed = FALSE;
+ GST_DEBUG_OBJECT (qpad, "Tags added");
+ } else {
+ GST_DEBUG_OBJECT (qpad, "No new tags received");
+ }
+ }
+ GST_OBJECT_UNLOCK (qtmux);
+ }
+
+ static inline GstBuffer *
+ _gst_buffer_new_take_data (guint8 * data, guint size)
+ {
+ GstBuffer *buf;
+
+ buf = gst_buffer_new ();
+ gst_buffer_append_memory (buf,
+ gst_memory_new_wrapped (0, data, size, 0, size, data, g_free));
+
+ return buf;
+ }
+
+ static GstFlowReturn
+ gst_qt_mux_send_buffer (GstQTMux * qtmux, GstBuffer * buf, guint64 * offset,
+ gboolean mind_fast)
+ {
+ GstFlowReturn res = GST_FLOW_OK;
+ gsize size;
+
+ g_return_val_if_fail (buf != NULL, GST_FLOW_ERROR);
+
+ size = gst_buffer_get_size (buf);
+ GST_LOG_OBJECT (qtmux, "sending buffer size %" G_GSIZE_FORMAT, size);
+
+ if (mind_fast && qtmux->fast_start_file) {
+ GstMapInfo map;
+ gint ret;
+
+ GST_LOG_OBJECT (qtmux, "to temporary file");
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+ ret = fwrite (map.data, sizeof (guint8), map.size, qtmux->fast_start_file);
+ gst_buffer_unmap (buf, &map);
+ gst_buffer_unref (buf);
+ if (ret != size)
+ goto write_error;
+ else
+ res = GST_FLOW_OK;
+ } else {
+ if (!mind_fast) {
+ res = gst_qtmux_push_mdat_stored_buffers (qtmux);
+ }
+
+ if (res == GST_FLOW_OK) {
+ GST_LOG_OBJECT (qtmux, "downstream");
+ res = gst_aggregator_finish_buffer (GST_AGGREGATOR (qtmux), buf);
+ }
+ }
+
+ if (res != GST_FLOW_OK)
+ GST_WARNING_OBJECT (qtmux,
+ "Failed to send buffer (%p) size %" G_GSIZE_FORMAT, buf, size);
+
+ if (G_LIKELY (offset))
+ *offset += size;
+
+ return res;
+
+ /* ERRORS */
+ write_error:
+ {
+ GST_ELEMENT_ERROR (qtmux, RESOURCE, WRITE,
+ ("Failed to write to temporary file"), GST_ERROR_SYSTEM);
+ return GST_FLOW_ERROR;
+ }
+ }
+
+ static gboolean
+ gst_qt_mux_seek_to_beginning (FILE * f)
+ {
+ #ifdef HAVE_FSEEKO
+ if (fseeko (f, (off_t) 0, SEEK_SET) != 0)
+ return FALSE;
+ #elif defined (G_OS_UNIX) || defined (G_OS_WIN32)
+ if (lseek (fileno (f), (off_t) 0, SEEK_SET) == (off_t) - 1)
+ return FALSE;
+ #else
+ if (fseek (f, (long) 0, SEEK_SET) != 0)
+ return FALSE;
+ #endif
+ return TRUE;
+ }
+
+ static GstFlowReturn
+ gst_qt_mux_send_buffered_data (GstQTMux * qtmux, guint64 * offset)
+ {
+ GstFlowReturn ret = GST_FLOW_OK;
+ GstBuffer *buf = NULL;
+
+ if (fflush (qtmux->fast_start_file))
+ goto flush_failed;
+
+ if (!gst_qt_mux_seek_to_beginning (qtmux->fast_start_file))
+ goto seek_failed;
+
+ /* hm, this could all take a really really long time,
+ * but there may not be another way to get moov atom first
+ * (somehow optimize copy?) */
+ GST_DEBUG_OBJECT (qtmux, "Sending buffered data");
+ while (ret == GST_FLOW_OK) {
+ const int bufsize = 4096;
+ GstMapInfo map;
+ gsize size;
+
+ buf = gst_buffer_new_and_alloc (bufsize);
+ gst_buffer_map (buf, &map, GST_MAP_WRITE);
+ size = fread (map.data, sizeof (guint8), bufsize, qtmux->fast_start_file);
+ if (size == 0) {
+ gst_buffer_unmap (buf, &map);
+ break;
+ }
+ GST_LOG_OBJECT (qtmux, "Pushing buffered buffer of size %d", (gint) size);
+ gst_buffer_unmap (buf, &map);
+ if (size != bufsize)
+ gst_buffer_set_size (buf, size);
+ ret = gst_qt_mux_send_buffer (qtmux, buf, offset, FALSE);
+ buf = NULL;
+ }
+ if (buf)
+ gst_buffer_unref (buf);
+
+ if (ftruncate (fileno (qtmux->fast_start_file), 0))
+ goto seek_failed;
+ if (!gst_qt_mux_seek_to_beginning (qtmux->fast_start_file))
+ goto seek_failed;
+
+ return ret;
+
+ /* ERRORS */
+ flush_failed:
+ {
+ GST_ELEMENT_ERROR (qtmux, RESOURCE, WRITE,
+ ("Failed to flush temporary file"), GST_ERROR_SYSTEM);
+ ret = GST_FLOW_ERROR;
+ goto fail;
+ }
+ seek_failed:
+ {
+ GST_ELEMENT_ERROR (qtmux, RESOURCE, SEEK,
+ ("Failed to seek temporary file"), GST_ERROR_SYSTEM);
+ ret = GST_FLOW_ERROR;
+ goto fail;
+ }
+ fail:
+ {
+ /* clear descriptor so we don't remove temp file later on,
+ * might be possible to recover */
+ fclose (qtmux->fast_start_file);
+ qtmux->fast_start_file = NULL;
+ return ret;
+ }
+ }
+
+ /*
+ * Sends the initial mdat atom fields (size fields and fourcc type),
+ * the subsequent buffers are considered part of it's data.
+ * As we can't predict the amount of data that we are going to place in mdat
+ * we need to record the position of the size field in the stream so we can
+ * seek back to it later and update when the streams have finished.
+ */
+ static GstFlowReturn
+ gst_qt_mux_send_mdat_header (GstQTMux * qtmux, guint64 * off, guint64 size,
+ gboolean extended, gboolean fsync_after)
+ {
+ GstBuffer *buf;
+ GstMapInfo map;
+ gboolean mind_fast = FALSE;
+
+ GST_DEBUG_OBJECT (qtmux, "Sending mdat's atom header, "
+ "size %" G_GUINT64_FORMAT, size);
+
+ /* if the qtmux state is EOS, really write the mdat, otherwise
+ * allow size == 0 for a placeholder atom */
+ if (qtmux->state == GST_QT_MUX_STATE_EOS || size > 0)
+ size += 8;
+
+ if (extended) {
+ gboolean large_file = (size > MDAT_LARGE_FILE_LIMIT);
+ /* Always write 16-bytes, but put a free atom first
+ * if the size is < 4GB. */
+ buf = gst_buffer_new_and_alloc (16);
+ gst_buffer_map (buf, &map, GST_MAP_WRITE);
+
+ if (large_file) {
+ /* Write extended mdat header and large_size field */
+ GST_WRITE_UINT32_BE (map.data, 1);
+ GST_WRITE_UINT32_LE (map.data + 4, FOURCC_mdat);
+ GST_WRITE_UINT64_BE (map.data + 8, size + 8);
+ } else {
+ /* Write an empty free atom, then standard 32-bit mdat */
+ GST_WRITE_UINT32_BE (map.data, 8);
+ GST_WRITE_UINT32_LE (map.data + 4, FOURCC_free);
+ GST_WRITE_UINT32_BE (map.data + 8, size);
+ GST_WRITE_UINT32_LE (map.data + 12, FOURCC_mdat);
+ }
+ gst_buffer_unmap (buf, &map);
+ } else {
+ buf = gst_buffer_new_and_alloc (8);
+ gst_buffer_map (buf, &map, GST_MAP_WRITE);
+
+ /* Vanilla 32-bit mdat */
+ GST_WRITE_UINT32_BE (map.data, size);
+ GST_WRITE_UINT32_LE (map.data + 4, FOURCC_mdat);
+ gst_buffer_unmap (buf, &map);
+ }
+
+ GST_LOG_OBJECT (qtmux, "Pushing mdat header");
+ if (fsync_after)
+ GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_SYNC_AFTER);
+
+ mind_fast = qtmux->mux_mode == GST_QT_MUX_MODE_MOOV_AT_END
+ && !qtmux->downstream_seekable;
+
+ return gst_qt_mux_send_buffer (qtmux, buf, off, mind_fast);
+ }
+
+ static void
+ gst_qt_mux_seek_to (GstQTMux * qtmux, guint64 position)
+ {
+ GstSegment segment;
+
+ gst_segment_init (&segment, GST_FORMAT_BYTES);
+ segment.start = position;
+ GST_LOG_OBJECT (qtmux, "seeking to byte position %" G_GUINT64_FORMAT,
+ position);
+ gst_aggregator_update_segment (GST_AGGREGATOR (qtmux), &segment);
+ }
+
+ /*
+ * We get the position of the mdat size field, seek back to it
+ * and overwrite with the real value
+ */
+ static GstFlowReturn
+ gst_qt_mux_update_mdat_size (GstQTMux * qtmux, guint64 mdat_pos,
+ guint64 mdat_size, guint64 * offset, gboolean fsync_after)
+ {
+
+ /* We must have recorded the mdat position for this to work */
+ g_assert (mdat_pos != 0);
+
+ /* seek and rewrite the header */
+ gst_qt_mux_seek_to (qtmux, mdat_pos);
+
+ return gst_qt_mux_send_mdat_header (qtmux, offset, mdat_size, TRUE,
+ fsync_after);
+ }
+
+ static GstFlowReturn
+ gst_qt_mux_send_ftyp (GstQTMux * qtmux, guint64 * off)
+ {
+ GstBuffer *buf;
+ guint64 size = 0, offset = 0;
+ guint8 *data = NULL;
+
+ GST_DEBUG_OBJECT (qtmux, "Sending ftyp atom");
+
+ if (!atom_ftyp_copy_data (qtmux->ftyp, &data, &size, &offset))
+ goto serialize_error;
+
+ buf = _gst_buffer_new_take_data (data, offset);
+
+ GST_LOG_OBJECT (qtmux, "Pushing ftyp");
+ return gst_qt_mux_send_buffer (qtmux, buf, off, FALSE);
+
+ /* ERRORS */
+ serialize_error:
+ {
+ GST_ELEMENT_ERROR (qtmux, STREAM, MUX, (NULL),
+ ("Failed to serialize ftyp"));
+ return GST_FLOW_ERROR;
+ }
+ }
+
+ static void
+ gst_qt_mux_prepare_ftyp (GstQTMux * qtmux, AtomFTYP ** p_ftyp,
+ GstBuffer ** p_prefix)
+ {
+ GstQTMuxClass *qtmux_klass = (GstQTMuxClass *) (G_OBJECT_GET_CLASS (qtmux));
+ guint32 major, version;
+ GList *comp;
+ GstBuffer *prefix = NULL;
+ AtomFTYP *ftyp = NULL;
+
+ GST_DEBUG_OBJECT (qtmux, "Preparing ftyp and possible prefix atom");
+
+ /* init and send context and ftyp based on current property state */
+ gst_qt_mux_map_format_to_header (qtmux_klass->format, &prefix, &major,
+ &version, &comp, qtmux->moov, qtmux->longest_chunk,
+ qtmux->fast_start_file != NULL);
+ ftyp = atom_ftyp_new (qtmux->context, major, version, comp);
+ if (comp)
+ g_list_free (comp);
+ if (prefix) {
+ if (p_prefix)
+ *p_prefix = prefix;
+ else
+ gst_buffer_unref (prefix);
+ }
+ *p_ftyp = ftyp;
+ }
+
+ static GstFlowReturn
+ gst_qt_mux_prepare_and_send_ftyp (GstQTMux * qtmux)
+ {
+ GstFlowReturn ret = GST_FLOW_OK;
+ GstBuffer *prefix = NULL;
+
+ GST_DEBUG_OBJECT (qtmux, "Preparing to send ftyp atom");
+
+ /* init and send context and ftyp based on current property state */
+ if (qtmux->ftyp) {
+ atom_ftyp_free (qtmux->ftyp);
+ qtmux->ftyp = NULL;
+ }
+ gst_qt_mux_prepare_ftyp (qtmux, &qtmux->ftyp, &prefix);
+ if (prefix) {
+ ret = gst_qt_mux_send_buffer (qtmux, prefix, &qtmux->header_size, FALSE);
+ if (ret != GST_FLOW_OK)
+ return ret;
+ }
+ return gst_qt_mux_send_ftyp (qtmux, &qtmux->header_size);
+ }
+
+ static void
+ gst_qt_mux_set_header_on_caps (GstQTMux * mux, GstBuffer * buf)
+ {
+ GstStructure *structure;
+ GValue array = { 0 };
+ GValue value = { 0 };
+ GstCaps *caps, *tcaps;
+
+ tcaps = gst_pad_get_current_caps (GST_AGGREGATOR_SRC_PAD (mux));
+ caps = gst_caps_copy (tcaps);
+ gst_caps_unref (tcaps);
+
+ structure = gst_caps_get_structure (caps, 0);
+
+ g_value_init (&array, GST_TYPE_ARRAY);
+
+ GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_HEADER);
+ g_value_init (&value, GST_TYPE_BUFFER);
+ gst_value_take_buffer (&value, gst_buffer_ref (buf));
+ gst_value_array_append_value (&array, &value);
+ g_value_unset (&value);
+
+ gst_structure_set_value (structure, "streamheader", &array);
+ g_value_unset (&array);
+ gst_aggregator_set_src_caps (GST_AGGREGATOR (mux), caps);
+ gst_caps_unref (caps);
+ }
+
+ /*
+ * Write out a free space atom. The offset is adjusted by the full
+ * size, but a smaller buffer is sent
+ */
+ static GstFlowReturn
+ gst_qt_mux_send_free_atom (GstQTMux * qtmux, guint64 * off, guint32 size,
+ gboolean fsync_after)
+ {
+ Atom *node_header;
+ GstBuffer *buf;
+ guint8 *data = NULL;
+ guint64 offset = 0, bsize = 0;
+ GstFlowReturn ret;
+
+ GST_DEBUG_OBJECT (qtmux, "Sending free atom header of size %u", size);
+
+ /* We can't make a free space atom smaller than the header */
+ if (size < 8)
+ goto too_small;
+
+ node_header = g_malloc0 (sizeof (Atom));
+ node_header->type = FOURCC_free;
+ node_header->size = size;
+
+ bsize = offset = 0;
+ if (atom_copy_data (node_header, &data, &bsize, &offset) == 0)
+ goto serialize_error;
+
+ buf = _gst_buffer_new_take_data (data, offset);
+ g_free (node_header);
+
+ if (fsync_after)
+ GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_SYNC_AFTER);
+
+ GST_LOG_OBJECT (qtmux, "Pushing free atom");
+ ret = gst_qt_mux_send_buffer (qtmux, buf, off, FALSE);
+
+ if (off) {
+ *off += size - 8;
+
+ /* Make sure downstream position ends up at the end of this free box */
+ gst_qt_mux_seek_to (qtmux, *off);
+ }
+
+ return ret;
+
+ /* ERRORS */
+ too_small:
+ {
+ GST_ELEMENT_ERROR (qtmux, STREAM, MUX, (NULL),
+ ("Not enough free reserved space"));
+ return GST_FLOW_ERROR;
+ }
+ serialize_error:
+ {
+ GST_ELEMENT_ERROR (qtmux, STREAM, MUX, (NULL),
+ ("Failed to serialize mdat"));
+ g_free (node_header);
+ return GST_FLOW_ERROR;
+ }
+ }
+
+ static void
+ gst_qt_mux_configure_moov_full (GstQTMux * qtmux, gboolean fragmented,
+ guint32 timescale)
+ {
+ /* inform lower layers of our property wishes, and determine duration.
+ * Let moov take care of this using its list of traks;
+ * so that released pads are also included */
+ GST_DEBUG_OBJECT (qtmux, "Updating timescale to %" G_GUINT32_FORMAT,
+ timescale);
+ atom_moov_update_timescale (qtmux->moov, timescale);
+ atom_moov_set_fragmented (qtmux->moov, fragmented);
+
+ atom_moov_update_duration (qtmux->moov);
+ }
+
+ static void
+ gst_qt_mux_configure_moov (GstQTMux * qtmux)
+ {
+ gboolean fragmented = FALSE;
+ guint32 timescale;
+
+ GST_OBJECT_LOCK (qtmux);
+ timescale = qtmux->timescale;
+ if (qtmux->mux_mode == GST_QT_MUX_MODE_FRAGMENTED
+ && qtmux->fragment_mode != GST_QT_MUX_FRAGMENT_FIRST_MOOV_THEN_FINALISE)
+ fragmented = TRUE;
+ GST_OBJECT_UNLOCK (qtmux);
+
+ gst_qt_mux_configure_moov_full (qtmux, fragmented, timescale);
+ }
+
+ static GstFlowReturn
+ gst_qt_mux_send_moov (GstQTMux * qtmux, guint64 * _offset,
+ guint64 padded_moov_size, gboolean mind_fast, gboolean fsync_after)
+ {
+ guint64 offset = 0, size = 0;
+ guint8 *data;
+ GstBuffer *buf;
+ GstFlowReturn ret = GST_FLOW_OK;
+ GList *l;
+ guint64 current_time = atoms_get_current_qt_time ();
+
+ /* update modification times */
+ qtmux->moov->mvhd.time_info.modification_time = current_time;
+
+ GST_OBJECT_LOCK (qtmux);
+ for (l = GST_ELEMENT_CAST (qtmux)->sinkpads; l; l = l->next) {
+ GstQTMuxPad *qtpad = (GstQTMuxPad *) l->data;
+
+ qtpad->trak->mdia.mdhd.time_info.modification_time = current_time;
+ qtpad->trak->tkhd.modification_time = current_time;
+ }
+ GST_OBJECT_UNLOCK (qtmux);
+
+ /* serialize moov */
+ offset = size = 0;
+ data = NULL;
+ GST_LOG_OBJECT (qtmux, "Copying movie header into buffer");
+ if (!atom_moov_copy_data (qtmux->moov, &data, &size, &offset))
+ goto serialize_error;
+ qtmux->last_moov_size = offset;
+
+ /* Check we have enough reserved space for this and a Free atom */
+ if (padded_moov_size > 0 && offset + 8 > padded_moov_size)
+ goto too_small_reserved;
+ buf = _gst_buffer_new_take_data (data, offset);
+ GST_DEBUG_OBJECT (qtmux, "Pushing moov atoms");
+
+ /* If at EOS, this is the final moov, put in the streamheader
+ * (apparently used by a flumotion util) */
+ if (qtmux->state == GST_QT_MUX_STATE_EOS)
+ gst_qt_mux_set_header_on_caps (qtmux, buf);
+
+ if (fsync_after)
+ GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_SYNC_AFTER);
+ ret = gst_qt_mux_send_buffer (qtmux, buf, _offset, mind_fast);
+
+ /* Write out a free atom if needed */
+ if (ret == GST_FLOW_OK && offset < padded_moov_size) {
+ GST_LOG_OBJECT (qtmux, "Writing out free atom of size %u",
+ (guint32) (padded_moov_size - offset));
+ ret =
+ gst_qt_mux_send_free_atom (qtmux, _offset, padded_moov_size - offset,
+ fsync_after);
+ }
+
+ return ret;
+ too_small_reserved:
+ {
+ GST_ELEMENT_ERROR (qtmux, STREAM, MUX,
+ ("Not enough free reserved header space"),
+ ("Needed %" G_GUINT64_FORMAT " bytes, reserved %" G_GUINT64_FORMAT,
+ offset + 8, padded_moov_size));
+ return GST_FLOW_ERROR;
+ }
+ serialize_error:
+ {
+ g_free (data);
+ return GST_FLOW_ERROR;
+ }
+ }
+
+ /* either calculates size of extra atoms or pushes them */
+ static GstFlowReturn
+ gst_qt_mux_send_extra_atoms (GstQTMux * qtmux, gboolean send, guint64 * offset,
+ gboolean mind_fast)
+ {
+ GSList *walk;
+ guint64 loffset = 0, size = 0;
+ guint8 *data;
+ GstFlowReturn ret = GST_FLOW_OK;
+
+ for (walk = qtmux->extra_atoms; walk; walk = g_slist_next (walk)) {
+ AtomInfo *ainfo = (AtomInfo *) walk->data;
+
+ loffset = size = 0;
+ data = NULL;
+ if (!ainfo->copy_data_func (ainfo->atom,
+ send ? &data : NULL, &size, &loffset))
+ goto serialize_error;
+
+ if (send) {
+ GstBuffer *buf;
+
+ GST_DEBUG_OBJECT (qtmux,
+ "Pushing extra top-level atom %" GST_FOURCC_FORMAT,
+ GST_FOURCC_ARGS (ainfo->atom->type));
+ buf = _gst_buffer_new_take_data (data, loffset);
+ ret = gst_qt_mux_send_buffer (qtmux, buf, offset, FALSE);
+ if (ret != GST_FLOW_OK)
+ break;
+ } else {
+ if (offset)
+ *offset += loffset;
+ }
+ }
+
+ return ret;
+
+ serialize_error:
+ {
+ g_free (data);
+ return GST_FLOW_ERROR;
+ }
+ }
+
+ static gboolean
+ gst_qt_mux_downstream_is_seekable (GstQTMux * qtmux)
+ {
+ gboolean seekable = FALSE;
+ GstQuery *query = gst_query_new_seeking (GST_FORMAT_BYTES);
+
+ if (gst_pad_peer_query (GST_AGGREGATOR_SRC_PAD (qtmux), query)) {
+ gst_query_parse_seeking (query, NULL, &seekable, NULL, NULL);
+ GST_INFO_OBJECT (qtmux, "downstream is %sseekable", seekable ? "" : "not ");
+ } else {
+ /* have to assume seeking is not supported if query not handled downstream */
+ GST_WARNING_OBJECT (qtmux, "downstream did not handle seeking query");
+ seekable = FALSE;
+ }
+ gst_query_unref (query);
+
+ return seekable;
+ }
+
+ static void
+ gst_qt_mux_prepare_moov_recovery (GstQTMux * qtmux)
+ {
+ GList *l;
+ gboolean fail = FALSE;
+ AtomFTYP *ftyp = NULL;
+ GstBuffer *prefix = NULL;
+
+ GST_DEBUG_OBJECT (qtmux, "Opening moov recovery file: %s",
+ qtmux->moov_recov_file_path);
+
+ qtmux->moov_recov_file = g_fopen (qtmux->moov_recov_file_path, "wb+");
+ if (qtmux->moov_recov_file == NULL) {
+ GST_WARNING_OBJECT (qtmux, "Failed to open moov recovery file in %s",
+ qtmux->moov_recov_file_path);
+ return;
+ }
+
+ gst_qt_mux_prepare_ftyp (qtmux, &ftyp, &prefix);
+
+ GST_OBJECT_LOCK (qtmux);
+ if (!atoms_recov_write_headers (qtmux->moov_recov_file, ftyp, prefix,
+ qtmux->moov, qtmux->timescale,
+ g_list_length (GST_ELEMENT (qtmux)->sinkpads))) {
+ GST_WARNING_OBJECT (qtmux, "Failed to write moov recovery file " "headers");
+ GST_OBJECT_UNLOCK (qtmux);
+ goto fail;
+ }
+ GST_OBJECT_UNLOCK (qtmux);
+
+ atom_ftyp_free (ftyp);
+ if (prefix)
+ gst_buffer_unref (prefix);
+
+ GST_OBJECT_LOCK (qtmux);
+ for (l = GST_ELEMENT_CAST (qtmux)->sinkpads; l; l = l->next) {
+ GstQTMuxPad *qpad = (GstQTMuxPad *) l->data;
+ /* write info for each stream */
+ fail = atoms_recov_write_trak_info (qtmux->moov_recov_file, qpad->trak);
+ if (fail) {
+ GST_WARNING_OBJECT (qtmux, "Failed to write trak info to recovery "
+ "file");
+ break;
+ }
+ }
+ GST_OBJECT_UNLOCK (qtmux);
+
+ return;
+
+ fail:
+ /* cleanup */
+ fclose (qtmux->moov_recov_file);
+ qtmux->moov_recov_file = NULL;
+ }
+
+ static guint64
+ prefill_get_block_index (GstQTMux * qtmux, GstQTMuxPad * qpad)
+ {
+ switch (qpad->fourcc) {
+ case FOURCC_apch:
+ case FOURCC_apcn:
+ case FOURCC_apcs:
+ case FOURCC_apco:
+ case FOURCC_ap4h:
+ case FOURCC_ap4x:
+ case FOURCC_c608:
+ case FOURCC_c708:
+ return qpad->sample_offset;
+ case FOURCC_sowt:
+ case FOURCC_twos:
+ return gst_util_uint64_scale_ceil (qpad->sample_offset,
+ qpad->expected_sample_duration_n,
+ qpad->expected_sample_duration_d *
+ atom_trak_get_timescale (qpad->trak));
+ default:
+ return -1;
+ }
+ }
+
+ static guint
+ prefill_get_sample_size (GstQTMux * qtmux, GstQTMuxPad * qpad)
+ {
+ switch (qpad->fourcc) {
+ case FOURCC_apch:
+ if (((SampleTableEntryMP4V *) qpad->trak_ste)->height <= 480) {
+ return 300000;
+ } else if (((SampleTableEntryMP4V *) qpad->trak_ste)->height <= 576) {
+ return 350000;
+ } else if (((SampleTableEntryMP4V *) qpad->trak_ste)->height <= 720) {
+ return 525000;
+ } else if (((SampleTableEntryMP4V *) qpad->trak_ste)->height <= 1080) {
+ return 1050000;
+ } else if (((SampleTableEntryMP4V *) qpad->trak_ste)->height <= 2160) {
+ return 4150000;
+ } else {
+ return 16600000;
+ }
+ break;
+ case FOURCC_apcn:
+ if (((SampleTableEntryMP4V *) qpad->trak_ste)->height <= 480) {
+ return 200000;
+ } else if (((SampleTableEntryMP4V *) qpad->trak_ste)->height <= 576) {
+ return 250000;
+ } else if (((SampleTableEntryMP4V *) qpad->trak_ste)->height <= 720) {
+ return 350000;
+ } else if (((SampleTableEntryMP4V *) qpad->trak_ste)->height <= 1080) {
+ return 700000;
+ } else if (((SampleTableEntryMP4V *) qpad->trak_ste)->height <= 2160) {
+ return 2800000;
+ } else {
+ return 11200000;
+ }
+ break;
+ case FOURCC_apcs:
+ if (((SampleTableEntryMP4V *) qpad->trak_ste)->height <= 480) {
+ return 150000;
+ } else if (((SampleTableEntryMP4V *) qpad->trak_ste)->height <= 576) {
+ return 200000;
+ } else if (((SampleTableEntryMP4V *) qpad->trak_ste)->height <= 720) {
+ return 250000;
+ } else if (((SampleTableEntryMP4V *) qpad->trak_ste)->height <= 1080) {
+ return 500000;
+ } else if (((SampleTableEntryMP4V *) qpad->trak_ste)->height <= 2160) {
+ return 2800000;
+ } else {
+ return 11200000;
+ }
+ break;
+ case FOURCC_apco:
+ if (((SampleTableEntryMP4V *) qpad->trak_ste)->height <= 480) {
+ return 80000;
+ } else if (((SampleTableEntryMP4V *) qpad->trak_ste)->height <= 576) {
+ return 100000;
+ } else if (((SampleTableEntryMP4V *) qpad->trak_ste)->height <= 720) {
+ return 150000;
+ } else if (((SampleTableEntryMP4V *) qpad->trak_ste)->height <= 1080) {
+ return 250000;
+ } else if (((SampleTableEntryMP4V *) qpad->trak_ste)->height <= 2160) {
+ return 900000;
+ } else {
+ return 3600000;
+ }
+ break;
+ case FOURCC_c608:
+ /* We always write both cdat and cdt2 atom in prefill mode */
+ return 20;
+ case FOURCC_c708:{
+ if (qpad->first_cc_sample_size == 0) {
+ GstBuffer *buf =
+ gst_aggregator_pad_peek_buffer (GST_AGGREGATOR_PAD (qpad));
+ g_assert (buf != NULL);
+ qpad->first_cc_sample_size = gst_buffer_get_size (buf);
+ g_assert (qpad->first_cc_sample_size != 0);
+ gst_buffer_unref (buf);
+ }
+ return qpad->first_cc_sample_size + 8;
+ }
+ case FOURCC_sowt:
+ case FOURCC_twos:{
+ guint64 block_idx;
+ guint64 next_sample_offset;
+
+ block_idx = prefill_get_block_index (qtmux, qpad);
+ next_sample_offset =
+ gst_util_uint64_scale (block_idx + 1,
+ qpad->expected_sample_duration_d *
+ atom_trak_get_timescale (qpad->trak),
+ qpad->expected_sample_duration_n);
+
+ return (next_sample_offset - qpad->sample_offset) * qpad->sample_size;
+ }
+ case FOURCC_ap4h:
+ case FOURCC_ap4x:
+ default:
+ GST_ERROR_OBJECT (qtmux, "unsupported codec for pre-filling");
+ return -1;
+ }
+
+ return -1;
+ }
+
+ static GstClockTime
+ prefill_get_next_timestamp (GstQTMux * qtmux, GstQTMuxPad * qpad)
+ {
+ switch (qpad->fourcc) {
+ case FOURCC_apch:
+ case FOURCC_apcn:
+ case FOURCC_apcs:
+ case FOURCC_apco:
+ case FOURCC_ap4h:
+ case FOURCC_ap4x:
+ case FOURCC_c608:
+ case FOURCC_c708:
+ return gst_util_uint64_scale (qpad->sample_offset + 1,
+ qpad->expected_sample_duration_d * GST_SECOND,
+ qpad->expected_sample_duration_n);
+ case FOURCC_sowt:
+ case FOURCC_twos:{
+ guint64 block_idx;
+ guint64 next_sample_offset;
+
+ block_idx = prefill_get_block_index (qtmux, qpad);
+ next_sample_offset =
+ gst_util_uint64_scale (block_idx + 1,
+ qpad->expected_sample_duration_d *
+ atom_trak_get_timescale (qpad->trak),
+ qpad->expected_sample_duration_n);
+
+ return gst_util_uint64_scale (next_sample_offset, GST_SECOND,
+ atom_trak_get_timescale (qpad->trak));
+ }
+ default:
+ GST_ERROR_OBJECT (qtmux, "unsupported codec for pre-filling");
+ return -1;
+ }
+
+ return -1;
+ }
+
+ static GstBuffer *
+ prefill_raw_audio_prepare_buf_func (GstQTMuxPad * qtpad, GstBuffer * buf,
+ GstQTMux * qtmux)
+ {
+ guint64 block_idx;
+ guint64 nsamples;
+ GstClockTime input_timestamp;
+ guint64 input_timestamp_distance;
+
+ if (buf)
+ gst_adapter_push (qtpad->raw_audio_adapter, buf);
+
+ block_idx = gst_util_uint64_scale_ceil (qtpad->raw_audio_adapter_offset,
+ qtpad->expected_sample_duration_n,
+ qtpad->expected_sample_duration_d *
+ atom_trak_get_timescale (qtpad->trak));
+ nsamples =
+ gst_util_uint64_scale (block_idx + 1,
+ qtpad->expected_sample_duration_d * atom_trak_get_timescale (qtpad->trak),
+ qtpad->expected_sample_duration_n) - qtpad->raw_audio_adapter_offset;
+
+ if ((!gst_aggregator_pad_is_eos (GST_AGGREGATOR_PAD (qtpad))
+ && gst_adapter_available (qtpad->raw_audio_adapter) <
+ nsamples * qtpad->sample_size)
+ || gst_adapter_available (qtpad->raw_audio_adapter) == 0) {
+ return NULL;
+ }
+
+ input_timestamp =
+ gst_adapter_prev_pts (qtpad->raw_audio_adapter,
+ &input_timestamp_distance);
+ if (input_timestamp != GST_CLOCK_TIME_NONE)
+ input_timestamp +=
+ gst_util_uint64_scale (input_timestamp_distance, GST_SECOND,
+ qtpad->sample_size * atom_trak_get_timescale (qtpad->trak));
+
+ buf =
+ gst_adapter_take_buffer (qtpad->raw_audio_adapter,
+ !gst_aggregator_pad_is_eos (GST_AGGREGATOR_PAD (qtpad)) ? nsamples *
+ qtpad->sample_size : gst_adapter_available (qtpad->raw_audio_adapter));
+ GST_BUFFER_PTS (buf) = input_timestamp;
+ GST_BUFFER_DTS (buf) = GST_CLOCK_TIME_NONE;
+ GST_BUFFER_DURATION (buf) = GST_CLOCK_TIME_NONE;
+
+ qtpad->raw_audio_adapter_offset += nsamples;
+
+ /* Check if we have yet another block of raw audio in the adapter */
+ nsamples =
+ gst_util_uint64_scale (block_idx + 2,
+ qtpad->expected_sample_duration_d * atom_trak_get_timescale (qtpad->trak),
+ qtpad->expected_sample_duration_n) - qtpad->raw_audio_adapter_offset;
+ if (gst_adapter_available (qtpad->raw_audio_adapter) >=
+ nsamples * qtpad->sample_size) {
+ input_timestamp =
+ gst_adapter_prev_pts (qtpad->raw_audio_adapter,
+ &input_timestamp_distance);
+ if (input_timestamp != GST_CLOCK_TIME_NONE)
+ input_timestamp +=
+ gst_util_uint64_scale (input_timestamp_distance, GST_SECOND,
+ qtpad->sample_size * atom_trak_get_timescale (qtpad->trak));
+ qtpad->raw_audio_adapter_pts = input_timestamp;
+ } else {
+ qtpad->raw_audio_adapter_pts = GST_CLOCK_TIME_NONE;
+ }
+
+ return buf;
+ }
+
+ /* Must be called with object lock */
+ static void
+ find_video_sample_duration (GstQTMux * qtmux, guint * dur_n, guint * dur_d)
+ {
+ GList *l;
+
+ /* Find the (first) video track and assume that we have to output
+ * in that size */
+ for (l = GST_ELEMENT_CAST (qtmux)->sinkpads; l; l = l->next) {
+ GstQTMuxPad *tmp_qpad = (GstQTMuxPad *) l->data;
+
+ if (tmp_qpad->trak->is_video) {
+ *dur_n = tmp_qpad->expected_sample_duration_n;
+ *dur_d = tmp_qpad->expected_sample_duration_d;
+ break;
+ }
+ }
+
+ if (l == NULL) {
+ GST_INFO_OBJECT (qtmux,
+ "Found no video framerate, using 40ms audio buffers");
+ *dur_n = 25;
+ *dur_d = 1;
+ }
+ }
+
+ /* Called when all pads are prerolled to adjust and */
+ static gboolean
+ prefill_update_sample_size (GstQTMux * qtmux, GstQTMuxPad * qpad)
+ {
+ switch (qpad->fourcc) {
+ case FOURCC_apch:
+ case FOURCC_apcn:
+ case FOURCC_apcs:
+ case FOURCC_apco:
+ case FOURCC_ap4h:
+ case FOURCC_ap4x:
+ {
+ guint sample_size = prefill_get_sample_size (qtmux, qpad);
+ atom_trak_set_constant_size_samples (qpad->trak, sample_size);
+ return TRUE;
+ }
+ case FOURCC_c608:
+ case FOURCC_c708:
+ {
+ guint sample_size = prefill_get_sample_size (qtmux, qpad);
+ /* We need a "valid" duration */
+ find_video_sample_duration (qtmux, &qpad->expected_sample_duration_n,
+ &qpad->expected_sample_duration_d);
+ atom_trak_set_constant_size_samples (qpad->trak, sample_size);
+ return TRUE;
+ }
+ case FOURCC_sowt:
+ case FOURCC_twos:{
+ find_video_sample_duration (qtmux, &qpad->expected_sample_duration_n,
+ &qpad->expected_sample_duration_d);
+ /* Set a prepare_buf_func that ensures this */
+ qpad->prepare_buf_func = prefill_raw_audio_prepare_buf_func;
+ qpad->raw_audio_adapter = gst_adapter_new ();
+ qpad->raw_audio_adapter_offset = 0;
+ qpad->raw_audio_adapter_pts = GST_CLOCK_TIME_NONE;
+
+ return TRUE;
+ }
+ default:
+ return TRUE;
+ }
+ }
+
+ /* Only called at startup when doing the "fake" iteration of all tracks in order
+ * to prefill the sample tables in the header. */
+ static GstQTMuxPad *
+ find_best_pad_prefill_start (GstQTMux * qtmux)
+ {
+ GstQTMuxPad *best_pad = NULL;
+
+ /* If interleave limits have been specified and the current pad is within
+ * those interleave limits, pick that one, otherwise let's try to figure out
+ * the next best one. */
+
+ if (qtmux->current_pad &&
+ (qtmux->interleave_bytes != 0 || qtmux->interleave_time != 0) &&
+ (qtmux->interleave_bytes == 0
+ || qtmux->current_chunk_size <= qtmux->interleave_bytes)
+ && (qtmux->interleave_time == 0
+ || qtmux->current_chunk_duration <= qtmux->interleave_time)
+ && qtmux->mux_mode != GST_QT_MUX_MODE_FRAGMENTED) {
+
+ if (qtmux->current_pad->total_duration < qtmux->reserved_max_duration) {
+ best_pad = qtmux->current_pad;
+ }
+ } else {
+ GST_OBJECT_LOCK (qtmux);
+ if (GST_ELEMENT_CAST (qtmux)->sinkpads->next) {
+ /* Attempt to try another pad if we have one. Otherwise use the only pad
+ * present */
+ best_pad = qtmux->current_pad = NULL;
+ }
+ GST_OBJECT_UNLOCK (qtmux);
+ }
+
+ /* The next best pad is the one which has the lowest timestamp and hasn't
+ * exceeded the reserved max duration */
+ if (!best_pad) {
+ GList *l;
+ GstClockTime best_time = GST_CLOCK_TIME_NONE;
+
+ GST_OBJECT_LOCK (qtmux);
+ for (l = GST_ELEMENT_CAST (qtmux)->sinkpads; l; l = l->next) {
+ GstQTMuxPad *qtpad = (GstQTMuxPad *) l->data;
+ GstClockTime timestamp;
+
+ if (qtpad->total_duration >= qtmux->reserved_max_duration)
+ continue;
+
+ timestamp = qtpad->total_duration;
+
+ if (best_pad == NULL ||
+ !GST_CLOCK_TIME_IS_VALID (best_time) || timestamp < best_time) {
+ best_pad = qtpad;
+ best_time = timestamp;
+ }
+ }
+ GST_OBJECT_UNLOCK (qtmux);
+ }
+
+ return best_pad;
+ }
+
+ /* Called when starting the file in prefill_mode to figure out all the entries
+ * of the header based on the input stream and reserved maximum duration.
+ *
+ * The _actual_ header (i.e. with the proper duration and trimmed sample tables)
+ * will be updated and written on EOS. */
+ static gboolean
+ gst_qt_mux_prefill_samples (GstQTMux * qtmux)
+ {
+ GstQTMuxPad *qpad;
+ GList *l;
+ GstQTMuxClass *qtmux_klass = (GstQTMuxClass *) (G_OBJECT_GET_CLASS (qtmux));
+
+ /* Update expected sample sizes/durations as needed, this is for raw
+ * audio where samples are actual audio samples. */
+ GST_OBJECT_LOCK (qtmux);
+ for (l = GST_ELEMENT_CAST (qtmux)->sinkpads; l; l = l->next) {
+ GstQTMuxPad *qpad = (GstQTMuxPad *) l->data;
+
+ if (!prefill_update_sample_size (qtmux, qpad)) {
+ GST_OBJECT_UNLOCK (qtmux);
+ return FALSE;
+ }
+ }
+ GST_OBJECT_UNLOCK (qtmux);
+
+ if (qtmux_klass->format == GST_QT_MUX_FORMAT_QT ||
+ qtmux->force_create_timecode_trak) {
+ /* For the first sample check/update timecode as needed. We do that before
+ * all actual samples as the code in gst_qt_mux_add_buffer() does it with
+ * initial buffer directly, not with last_buf */
+ GST_OBJECT_LOCK (qtmux);
+ for (l = GST_ELEMENT_CAST (qtmux)->sinkpads; l; l = l->next) {
+ GstQTMuxPad *qpad = (GstQTMuxPad *) l->data;
+ GstBuffer *buffer =
+ gst_aggregator_pad_peek_buffer (GST_AGGREGATOR_PAD (qpad));
+ GstVideoTimeCodeMeta *tc_meta;
+
+ if (buffer && (tc_meta = gst_buffer_get_video_time_code_meta (buffer))
+ && qpad->trak->is_video) {
+ GstVideoTimeCode *tc = &tc_meta->tc;
+
+ qpad->tc_trak = atom_trak_new (qtmux->context);
+ atom_moov_add_trak (qtmux->moov, qpad->tc_trak);
+
+ qpad->trak->tref = atom_tref_new (FOURCC_tmcd);
+ atom_tref_add_entry (qpad->trak->tref, qpad->tc_trak->tkhd.track_ID);
+
+ atom_trak_set_timecode_type (qpad->tc_trak, qtmux->context,
+ qpad->trak->mdia.mdhd.time_info.timescale, tc);
+
+ atom_trak_add_samples (qpad->tc_trak, 1, 1, 4,
+ qtmux->mdat_size, FALSE, 0);
+
+ qpad->tc_pos = qtmux->mdat_size;
+ qpad->first_tc = gst_video_time_code_copy (tc);
+ qpad->first_pts = GST_BUFFER_PTS (buffer);
+
+ qtmux->current_chunk_offset = -1;
+ qtmux->current_chunk_size = 0;
+ qtmux->current_chunk_duration = 0;
+ qtmux->mdat_size += 4;
+ }
+ if (buffer)
+ gst_buffer_unref (buffer);
+ }
+ GST_OBJECT_UNLOCK (qtmux);
+ }
+
+ while ((qpad = find_best_pad_prefill_start (qtmux))) {
+ GstClockTime timestamp, next_timestamp, duration;
+ guint nsamples, sample_size;
+ guint64 chunk_offset;
+ gint64 scaled_duration;
+ gint64 pts_offset = 0;
+ gboolean sync = FALSE;
+ TrakBufferEntryInfo sample_entry;
+
+ sample_size = prefill_get_sample_size (qtmux, qpad);
+
+ if (sample_size == -1) {
+ return FALSE;
+ }
+
+ if (!qpad->samples)
+ qpad->samples = g_array_new (FALSE, FALSE, sizeof (TrakBufferEntryInfo));
+
+ timestamp = qpad->total_duration;
+ next_timestamp = prefill_get_next_timestamp (qtmux, qpad);
+ duration = next_timestamp - timestamp;
+
+ if (qpad->first_ts == GST_CLOCK_TIME_NONE)
+ qpad->first_ts = timestamp;
+ if (qpad->first_dts == GST_CLOCK_TIME_NONE)
+ qpad->first_dts = timestamp;
+
+ if (qtmux->current_pad != qpad || qtmux->current_chunk_offset == -1) {
+ qtmux->current_pad = qpad;
+ if (qtmux->current_chunk_offset == -1)
+ qtmux->current_chunk_offset = qtmux->mdat_size;
+ else
+ qtmux->current_chunk_offset += qtmux->current_chunk_size;
+ qtmux->current_chunk_size = 0;
+ qtmux->current_chunk_duration = 0;
+ }
+ if (qpad->sample_size)
+ nsamples = sample_size / qpad->sample_size;
+ else
+ nsamples = 1;
+ qpad->last_dts = timestamp;
+ scaled_duration = gst_util_uint64_scale_round (timestamp + duration,
+ atom_trak_get_timescale (qpad->trak),
+ GST_SECOND) - gst_util_uint64_scale_round (timestamp,
+ atom_trak_get_timescale (qpad->trak), GST_SECOND);
+
+ qtmux->current_chunk_size += sample_size;
+ qtmux->current_chunk_duration += duration;
+ qpad->total_bytes += sample_size;
+
+ chunk_offset = qtmux->current_chunk_offset;
+
+ /* I-frame only, no frame reordering */
+ sync = FALSE;
+ pts_offset = 0;
+
+ if (qtmux->current_chunk_duration > qtmux->longest_chunk
+ || !GST_CLOCK_TIME_IS_VALID (qtmux->longest_chunk)) {
+ qtmux->longest_chunk = qtmux->current_chunk_duration;
+ }
+
+ sample_entry.track_id = qpad->trak->tkhd.track_ID;
+ sample_entry.nsamples = nsamples;
+ sample_entry.delta = scaled_duration / nsamples;
+ sample_entry.size = sample_size / nsamples;
+ sample_entry.chunk_offset = chunk_offset;
+ sample_entry.pts_offset = pts_offset;
+ sample_entry.sync = sync;
+ sample_entry.do_pts = TRUE;
+ g_array_append_val (qpad->samples, sample_entry);
+ atom_trak_add_samples (qpad->trak, nsamples, scaled_duration / nsamples,
+ sample_size / nsamples, chunk_offset, sync, pts_offset);
+
+ qpad->total_duration = next_timestamp;
+ qtmux->mdat_size += sample_size;
+ qpad->sample_offset += nsamples;
+ }
+
+ return TRUE;
+ }
+
+ static GstFlowReturn
+ gst_qt_mux_start_file (GstQTMux * qtmux)
+ {
+ GstQTMuxClass *qtmux_klass = (GstQTMuxClass *) (G_OBJECT_GET_CLASS (qtmux));
+ GstFlowReturn ret = GST_FLOW_OK;
+ GstCaps *caps;
+ GstClockTime reserved_max_duration;
+ guint reserved_bytes_per_sec_per_trak;
+ GList *l;
+
+ GST_DEBUG_OBJECT (qtmux, "starting file");
+
+ GST_OBJECT_LOCK (qtmux);
+ reserved_max_duration = qtmux->reserved_max_duration;
+ reserved_bytes_per_sec_per_trak = qtmux->reserved_bytes_per_sec_per_trak;
+ GST_OBJECT_UNLOCK (qtmux);
+
+ caps =
+ gst_caps_copy (gst_pad_get_pad_template_caps (GST_AGGREGATOR_SRC_PAD
+ (qtmux)));
+ /* qtmux has structure with and without variant, remove all but the first */
+ g_assert (gst_caps_truncate (caps));
+ gst_aggregator_set_src_caps (GST_AGGREGATOR (qtmux), caps);
+ gst_caps_unref (caps);
+
+ /* Default is 'normal' mode */
+ qtmux->mux_mode = GST_QT_MUX_MODE_MOOV_AT_END;
+
+ /* Require a sensible fragment duration when muxing
+ * using the ISML muxer */
+ if (qtmux_klass->format == GST_QT_MUX_FORMAT_ISML &&
+ qtmux->fragment_duration == 0)
+ goto invalid_isml;
+
+ if (qtmux->fragment_duration > 0) {
+ qtmux->mux_mode = GST_QT_MUX_MODE_FRAGMENTED;
+ if (qtmux->streamable
+ && qtmux->fragment_mode == GST_QT_MUX_FRAGMENT_DASH_OR_MSS) {
+ qtmux->fragment_mode = GST_QT_MUX_FRAGMENT_STREAMABLE;
+ }
+ } else if (qtmux->fast_start) {
+ qtmux->mux_mode = GST_QT_MUX_MODE_FAST_START;
+ } else if (reserved_max_duration != GST_CLOCK_TIME_NONE) {
+ if (reserved_max_duration == 0) {
+ GST_ELEMENT_ERROR (qtmux, STREAM, MUX,
+ ("reserved-max-duration of 0 is not allowed"), (NULL));
+ return GST_FLOW_ERROR;
+ }
+ if (qtmux->reserved_prefill)
+ qtmux->mux_mode = GST_QT_MUX_MODE_ROBUST_RECORDING_PREFILL;
+ else
+ qtmux->mux_mode = GST_QT_MUX_MODE_ROBUST_RECORDING;
+ }
+
+ qtmux->downstream_seekable = gst_qt_mux_downstream_is_seekable (qtmux);
+ switch (qtmux->mux_mode) {
+ case GST_QT_MUX_MODE_MOOV_AT_END:
+ break;
+ case GST_QT_MUX_MODE_ROBUST_RECORDING:
+ /* We have to be able to seek to rewrite the mdat header, or any
+ * moov atom we write will not be visible in the file, because an
+ * MDAT with 0 as the size covers the rest of the file. A file
+ * with no moov is not playable, so error out now. */
+ if (!qtmux->downstream_seekable) {
+ GST_ELEMENT_ERROR (qtmux, STREAM, MUX,
+ ("Downstream is not seekable - will not be able to create a playable file"),
+ (NULL));
+ return GST_FLOW_ERROR;
+ }
+ if (qtmux->reserved_moov_update_period == GST_CLOCK_TIME_NONE) {
+ GST_WARNING_OBJECT (qtmux,
+ "Robust muxing requires reserved-moov-update-period to be set");
+ }
+ break;
+ case GST_QT_MUX_MODE_FAST_START:
+ break; /* Don't need seekability, ignore */
+ case GST_QT_MUX_MODE_FRAGMENTED:
+ if (qtmux->fragment_mode == GST_QT_MUX_FRAGMENT_STREAMABLE)
+ break;
+ if (!qtmux->downstream_seekable) {
+ if (qtmux->fragment_mode == GST_QT_MUX_FRAGMENT_DASH_OR_MSS) {
+ GST_WARNING_OBJECT (qtmux, "downstream is not seekable, but "
+ "streamable=false. Will ignore that and create streamable output "
+ "instead");
+ qtmux->streamable = TRUE;
+ g_object_notify (G_OBJECT (qtmux), "streamable");
+ qtmux->fragment_mode = GST_QT_MUX_FRAGMENT_STREAMABLE;
+ }
+ }
+ break;
+ case GST_QT_MUX_MODE_ROBUST_RECORDING_PREFILL:
+ if (!qtmux->downstream_seekable) {
+ GST_WARNING_OBJECT (qtmux,
+ "downstream is not seekable, will not be able "
+ "to trim samples table at the end if less than reserved-duration is "
+ "recorded");
+ }
+ break;
+ }
+
+ GST_OBJECT_LOCK (qtmux);
+
+ if (qtmux->timescale == 0) {
+ guint32 suggested_timescale = 0;
+
+ /* Calculate a reasonable timescale for the moov:
+ * If there is video, it is the biggest video track timescale or an even
+ * multiple of it if it's smaller than 1800.
+ * Otherwise it is 1800 */
+ for (l = GST_ELEMENT_CAST (qtmux)->sinkpads; l; l = l->next) {
+ GstQTMuxPad *qpad = (GstQTMuxPad *) l->data;
+
+ if (!qpad->trak)
+ continue;
+
+ /* not video */
+ if (!qpad->trak->mdia.minf.vmhd)
+ continue;
+
+ suggested_timescale =
+ MAX (qpad->trak->mdia.mdhd.time_info.timescale, suggested_timescale);
+ }
+
+ if (suggested_timescale == 0)
+ suggested_timescale = 1800;
+
+ while (suggested_timescale < 1800)
+ suggested_timescale *= 2;
+
+ qtmux->timescale = suggested_timescale;
+ }
+
+ /* Set width/height/timescale of any closed caption tracks to that of the
+ * first video track */
+ {
+ guint video_width = 0, video_height = 0;
+ guint32 video_timescale = 0;
+ GList *l;
+
+ for (l = GST_ELEMENT_CAST (qtmux)->sinkpads; l; l = l->next) {
+ GstQTMuxPad *qpad = (GstQTMuxPad *) l->data;
+
+ if (!qpad->trak)
+ continue;
+
+ /* Not closed caption */
+ if (qpad->trak->mdia.hdlr.handler_type != FOURCC_clcp)
+ continue;
+
+ if (video_width == 0 || video_height == 0 || video_timescale == 0) {
+ GList *l2;
+
+ for (l2 = GST_ELEMENT_CAST (qtmux)->sinkpads; l2; l2 = l2->next) {
+ GstQTMuxPad *qpad2 = (GstQTMuxPad *) l2->data;
+
+ if (!qpad2->trak)
+ continue;
+
+ /* not video */
+ if (!qpad2->trak->mdia.minf.vmhd)
+ continue;
+
+ video_width = qpad2->trak->tkhd.width;
+ video_height = qpad2->trak->tkhd.height;
+ video_timescale = qpad2->trak->mdia.mdhd.time_info.timescale;
+ }
+ }
+
+ qpad->trak->tkhd.width = video_width << 16;
+ qpad->trak->tkhd.height = video_height << 16;
+ qpad->trak->mdia.mdhd.time_info.timescale = video_timescale;
+ }
+ }
+
+ /* initialize our moov recovery file */
+ if (qtmux->moov_recov_file_path) {
+ gst_qt_mux_prepare_moov_recovery (qtmux);
+ }
+
+ /* Make sure the first time we update the moov, we'll
+ * include any tagsetter tags */
+ qtmux->tags_changed = TRUE;
+
+ GST_OBJECT_UNLOCK (qtmux);
+
+ /*
+ * send mdat header if already needed, and mark position for later update.
+ * We don't send ftyp now if we are on fast start mode, because we can
+ * better fine tune using the information we gather to create the whole moov
+ * atom.
+ */
+ switch (qtmux->mux_mode) {
+ case GST_QT_MUX_MODE_MOOV_AT_END:
+ ret = gst_qt_mux_prepare_and_send_ftyp (qtmux);
+ if (ret != GST_FLOW_OK)
+ break;
+
+ /* Store this as the mdat offset for later updating
+ * when we write the moov */
+ qtmux->mdat_pos = qtmux->header_size;
+ /* extended atom in case we go over 4GB while writing and need
+ * the full 64-bit atom */
+ if (qtmux->downstream_seekable)
+ ret =
+ gst_qt_mux_send_mdat_header (qtmux, &qtmux->header_size, 0, TRUE,
+ FALSE);
+ break;
+ case GST_QT_MUX_MODE_ROBUST_RECORDING:
+ ret = gst_qt_mux_prepare_and_send_ftyp (qtmux);
+ if (ret != GST_FLOW_OK)
+ break;
+
+ /* Pad ftyp out to an 8-byte boundary before starting the moov
+ * ping pong region. It should be well less than 1 disk sector,
+ * unless there's a bajillion compatible types listed,
+ * but let's be sure the free atom doesn't cross a sector
+ * boundary anyway */
+ if (qtmux->header_size % 8) {
+ /* Extra 8 bytes for the padding free atom header */
+ guint padding = (guint) (16 - (qtmux->header_size % 8));
+ GST_LOG_OBJECT (qtmux, "Rounding ftyp by %u bytes", padding);
+ ret =
+ gst_qt_mux_send_free_atom (qtmux, &qtmux->header_size, padding,
+ FALSE);
+ if (ret != GST_FLOW_OK)
+ return ret;
+ }
+
+ /* Store this as the moov offset for later updating.
+ * We record mdat position below */
+ qtmux->moov_pos = qtmux->header_size;
+
+ /* Set up the initial 'ping' state of the ping-pong buffers */
+ qtmux->reserved_moov_first_active = TRUE;
+
+ gst_qt_mux_configure_moov (qtmux);
+ gst_qt_mux_setup_metadata (qtmux);
+ /* Empty free atom to begin, starting on an 8-byte boundary */
+ ret = gst_qt_mux_send_free_atom (qtmux, &qtmux->header_size, 8, FALSE);
+ if (ret != GST_FLOW_OK)
+ return ret;
+ /* Moov header, not padded yet */
+ ret = gst_qt_mux_send_moov (qtmux, &qtmux->header_size, 0, FALSE, FALSE);
+ if (ret != GST_FLOW_OK)
+ return ret;
+ /* The moov we just sent contains the 'base' size of the moov, before
+ * we put in any time-dependent per-trak data. Use that to make
+ * a good estimate of how much extra to reserve */
+ /* Calculate how much space to reserve for our MOOV atom.
+ * We actually reserve twice that, for ping-pong buffers */
+ qtmux->base_moov_size = qtmux->last_moov_size;
+ GST_LOG_OBJECT (qtmux, "Base moov size is %u before any indexes",
+ qtmux->base_moov_size);
+ qtmux->reserved_moov_size = qtmux->base_moov_size +
+ gst_util_uint64_scale (reserved_max_duration,
+ reserved_bytes_per_sec_per_trak *
+ atom_moov_get_trak_count (qtmux->moov), GST_SECOND);
+
+ /* Need space for at least 4 atom headers. More really, but
+ * this as an absolute minimum */
+ if (qtmux->reserved_moov_size < 4 * 8)
+ goto reserved_moov_too_small;
+
+ GST_DEBUG_OBJECT (qtmux, "reserving header area of size %u",
+ 2 * qtmux->reserved_moov_size + 16);
+
+ GST_OBJECT_LOCK (qtmux);
+ qtmux->reserved_duration_remaining =
+ gst_util_uint64_scale (qtmux->reserved_moov_size -
+ qtmux->base_moov_size, GST_SECOND,
+ reserved_bytes_per_sec_per_trak *
+ atom_moov_get_trak_count (qtmux->moov));
+ GST_OBJECT_UNLOCK (qtmux);
+
+ /* Now that we know how much reserved space is targeted,
+ * output a free atom to fill the extra reserved */
+ ret = gst_qt_mux_send_free_atom (qtmux, &qtmux->header_size,
+ qtmux->reserved_moov_size - qtmux->base_moov_size, FALSE);
+ if (ret != GST_FLOW_OK)
+ return ret;
+
+ /* Then a free atom containing 'pong' buffer, with an
+ * extra 8 bytes to account for the free atom header itself */
+ ret = gst_qt_mux_send_free_atom (qtmux, &qtmux->header_size,
+ qtmux->reserved_moov_size + 8, FALSE);
+ if (ret != GST_FLOW_OK)
+ return ret;
+
+ /* extra atoms go after the free/moov(s), before the mdat */
+ ret =
+ gst_qt_mux_send_extra_atoms (qtmux, TRUE, &qtmux->header_size, FALSE);
+ if (ret != GST_FLOW_OK)
+ return ret;
+
+ qtmux->mdat_pos = qtmux->header_size;
+ /* extended atom in case we go over 4GB while writing and need
+ * the full 64-bit atom */
+ ret =
+ gst_qt_mux_send_mdat_header (qtmux, &qtmux->header_size, 0, TRUE,
+ FALSE);
+ break;
+ case GST_QT_MUX_MODE_ROBUST_RECORDING_PREFILL:
+ {
+ guint32 atom_size;
+
+ ret = gst_qt_mux_prepare_and_send_ftyp (qtmux);
+ if (ret != GST_FLOW_OK)
+ break;
+
+ /* Store this as the moov offset for later updating.
+ * We record mdat position below */
+ qtmux->moov_pos = qtmux->header_size;
+
+ if (!gst_qt_mux_prefill_samples (qtmux)) {
+ GST_ELEMENT_ERROR (qtmux, STREAM, MUX,
+ ("Unsupported codecs or configuration for prefill mode"), (NULL));
+
+ return GST_FLOW_ERROR;
+ }
+
+ gst_qt_mux_update_global_statistics (qtmux);
+ gst_qt_mux_configure_moov (qtmux);
+ gst_qt_mux_update_edit_lists (qtmux);
+ gst_qt_mux_setup_metadata (qtmux);
+
+ /* Moov header with pre-filled samples */
+ ret = gst_qt_mux_send_moov (qtmux, &qtmux->header_size, 0, FALSE, FALSE);
+ if (ret != GST_FLOW_OK)
+ return ret;
+
+ GST_OBJECT_LOCK (qtmux);
+ atom_size = 12 * g_list_length (GST_ELEMENT (qtmux)->sinkpads) + 8;
+ GST_OBJECT_UNLOCK (qtmux);
+
+ /* last_moov_size now contains the full size of the moov, moov_pos the
+ * position. This allows us to rewrite it in the very end as needed */
+ qtmux->reserved_moov_size = qtmux->last_moov_size + atom_size;
+
+ /* Send an additional free atom at the end so we definitely have space
+ * to rewrite the moov header at the end and remove the samples that
+ * were not actually written */
+ ret =
+ gst_qt_mux_send_free_atom (qtmux, &qtmux->header_size, atom_size,
+ FALSE);
+ if (ret != GST_FLOW_OK)
+ return ret;
+
+ /* extra atoms go after the free/moov(s), before the mdat */
+ ret =
+ gst_qt_mux_send_extra_atoms (qtmux, TRUE, &qtmux->header_size, FALSE);
+ if (ret != GST_FLOW_OK)
+ return ret;
+
+ qtmux->mdat_pos = qtmux->header_size;
+
+ /* And now send the mdat header */
+ ret =
+ gst_qt_mux_send_mdat_header (qtmux, &qtmux->header_size,
+ qtmux->mdat_size, TRUE, FALSE);
+
+ /* chunks position is set relative to the first byte of the
+ * MDAT atom payload. Set the overall offset into the file */
+ atom_moov_chunks_set_offset (qtmux->moov, qtmux->header_size);
+
+ {
+ gst_qt_mux_seek_to (qtmux, qtmux->moov_pos);
+
+ ret = gst_qt_mux_send_moov (qtmux, NULL, 0, FALSE, FALSE);
+ if (ret != GST_FLOW_OK)
+ return ret;
+
+ gst_qt_mux_seek_to (qtmux, qtmux->header_size);
+ }
+
+ GST_OBJECT_LOCK (qtmux);
+ qtmux->current_chunk_size = 0;
+ qtmux->current_chunk_duration = 0;
+ qtmux->current_chunk_offset = -1;
+ qtmux->mdat_size = 0;
+ qtmux->current_pad = NULL;
+ qtmux->longest_chunk = GST_CLOCK_TIME_NONE;
+
+ for (l = GST_ELEMENT_CAST (qtmux)->sinkpads; l; l = l->next) {
+ GstQTMuxPad *qtpad = (GstQTMuxPad *) l->data;
+
+ qtpad->total_bytes = 0;
+ qtpad->total_duration = 0;
+ qtpad->first_dts = qtpad->first_ts = GST_CLOCK_TIME_NONE;
+ qtpad->last_dts = GST_CLOCK_TIME_NONE;
+ qtpad->sample_offset = 0;
+ }
+ GST_OBJECT_UNLOCK (qtmux);
+
+ break;
+ }
+ case GST_QT_MUX_MODE_FAST_START:
+ GST_OBJECT_LOCK (qtmux);
+ qtmux->fast_start_file = g_fopen (qtmux->fast_start_file_path, "wb+");
+ if (!qtmux->fast_start_file)
+ goto open_failed;
+ GST_OBJECT_UNLOCK (qtmux);
+ /* send a dummy buffer for preroll */
+ ret = gst_qt_mux_send_buffer (qtmux, gst_buffer_new (), NULL, FALSE);
+ break;
+ case GST_QT_MUX_MODE_FRAGMENTED:
+ ret = gst_qt_mux_prepare_and_send_ftyp (qtmux);
+ if (ret != GST_FLOW_OK)
+ break;
+
+ GST_DEBUG_OBJECT (qtmux, "fragment duration %d ms, writing headers",
+ qtmux->fragment_duration);
+ qtmux->fragment_sequence = 0;
+ if (qtmux->fragment_mode == GST_QT_MUX_FRAGMENT_FIRST_MOOV_THEN_FINALISE) {
+ /* Store this as the mdat offset for later updating
+ * when we write the moov */
+ qtmux->mdat_pos = qtmux->header_size;
+ /* extended atom in case we go over 4GB while writing and need
+ * the full 64-bit atom */
+ ret =
+ gst_qt_mux_send_mdat_header (qtmux, &qtmux->header_size, 0, TRUE,
+ FALSE);
+ if (ret != GST_FLOW_OK)
+ return ret;
+ } else {
+ /* store the moov pos so we can update the duration later
+ * in non-streamable mode */
+ qtmux->moov_pos = qtmux->header_size;
+
+ /* prepare moov and/or tags */
+ qtmux->fragment_sequence++;
+ gst_qt_mux_configure_moov (qtmux);
+ gst_qt_mux_setup_metadata (qtmux);
+ ret =
+ gst_qt_mux_send_moov (qtmux, &qtmux->header_size, 0, FALSE, FALSE);
+ if (ret != GST_FLOW_OK)
+ return ret;
+ /* extra atoms */
+ ret =
+ gst_qt_mux_send_extra_atoms (qtmux, TRUE, &qtmux->header_size,
+ FALSE);
+ if (ret != GST_FLOW_OK)
+ break;
+ }
+ /* prepare index if not streamable, or overwriting with moov */
+ if (qtmux->fragment_mode == GST_QT_MUX_FRAGMENT_DASH_OR_MSS)
+ qtmux->mfra = atom_mfra_new (qtmux->context);
+ break;
+ }
+
+ return ret;
+ /* ERRORS */
+ invalid_isml:
+ {
+ GST_ELEMENT_ERROR (qtmux, STREAM, MUX,
+ ("Cannot create an ISML file with 0 fragment duration"), (NULL));
+ return GST_FLOW_ERROR;
+ }
+ reserved_moov_too_small:
+ {
+ GST_ELEMENT_ERROR (qtmux, STREAM, MUX,
+ ("Not enough reserved space for creating headers"), (NULL));
+ return GST_FLOW_ERROR;
+ }
+ open_failed:
+ {
+ GST_ELEMENT_ERROR (qtmux, RESOURCE, OPEN_READ_WRITE,
+ (("Could not open temporary file \"%s\""),
+ qtmux->fast_start_file_path), GST_ERROR_SYSTEM);
+ GST_OBJECT_UNLOCK (qtmux);
+ return GST_FLOW_ERROR;
+ }
+ }
+
+ static GstFlowReturn
+ gst_qt_mux_send_last_buffers (GstQTMux * qtmux)
+ {
+ GstFlowReturn ret = GST_FLOW_OK;
+ GList *sinkpads, *l;
+
+ GST_OBJECT_LOCK (qtmux);
+ sinkpads = g_list_copy_deep (GST_ELEMENT_CAST (qtmux)->sinkpads,
+ (GCopyFunc) gst_object_ref, NULL);
+ GST_OBJECT_UNLOCK (qtmux);
+
+ for (l = sinkpads; l; l = l->next) {
+ GstQTMuxPad *qtpad = (GstQTMuxPad *) l->data;
+
+ /* avoid add_buffer complaining if not negotiated
+ * in which case no buffers either, so skipping */
+ if (!qtpad->fourcc) {
+ GST_DEBUG_OBJECT (qtmux, "Pad %s has never had buffers",
+ GST_PAD_NAME (qtpad));
+ continue;
+ }
+
+ /* send last buffer; also flushes possibly queued buffers/ts */
+ GST_DEBUG_OBJECT (qtmux, "Sending the last buffer for pad %s",
+ GST_PAD_NAME (qtpad));
+ ret = gst_qt_mux_add_buffer (qtmux, qtpad, NULL);
+ if (ret != GST_FLOW_OK) {
+ GST_WARNING_OBJECT (qtmux, "Failed to send last buffer for %s, "
+ "flow return: %s", GST_PAD_NAME (qtpad), gst_flow_get_name (ret));
+ }
+ }
+
+ g_list_free_full (sinkpads, gst_object_unref);
+
+ return ret;
+ }
+
+ static void
+ gst_qt_mux_update_global_statistics (GstQTMux * qtmux)
+ {
+ GList *l;
+
+ /* for setting some subtitles fields */
+ guint max_width = 0;
+ guint max_height = 0;
+
+ qtmux->first_ts = qtmux->last_dts = GST_CLOCK_TIME_NONE;
+
+ GST_OBJECT_LOCK (qtmux);
+ for (l = GST_ELEMENT_CAST (qtmux)->sinkpads; l; l = l->next) {
+ GstQTMuxPad *qtpad = (GstQTMuxPad *) l->data;
+
+ if (!qtpad->fourcc) {
+ GST_DEBUG_OBJECT (qtmux, "Pad %s has never had buffers",
+ GST_PAD_NAME (qtpad));
+ continue;
+ }
+
+ /* having flushed above, can check for buffers now */
+ if (GST_CLOCK_TIME_IS_VALID (qtpad->first_ts)) {
+ GstClockTime first_pts_in = qtpad->first_ts;
+ /* it should be, since we got first_ts by adding adjustment
+ * to a positive incoming PTS */
+ if (qtpad->dts_adjustment <= first_pts_in)
+ first_pts_in -= qtpad->dts_adjustment;
+ /* determine max stream duration */
+ if (!GST_CLOCK_TIME_IS_VALID (qtmux->last_dts)
+ || qtpad->last_dts > qtmux->last_dts) {
+ qtmux->last_dts = qtpad->last_dts;
+ }
+ if (!GST_CLOCK_TIME_IS_VALID (qtmux->first_ts)
+ || first_pts_in < qtmux->first_ts) {
+ /* we need the original incoming PTS here, as this first_ts
+ * is used in update_edit_lists to construct the edit list that arrange
+ * for sync'ed streams. The first_ts is most likely obtained from
+ * some (audio) stream with 0 dts_adjustment and initial 0 PTS,
+ * so it makes no difference, though it matters in other cases */
+ qtmux->first_ts = first_pts_in;
+ }
+ }
+
+ /* subtitles need to know the video width/height,
+ * it is stored shifted 16 bits to the left according to the
+ * spec */
+ max_width = MAX (max_width, (qtpad->trak->tkhd.width >> 16));
+ max_height = MAX (max_height, (qtpad->trak->tkhd.height >> 16));
+
+ /* update average bitrate of streams if needed */
+ {
+ guint32 avgbitrate = 0;
+ guint32 maxbitrate = qtpad->max_bitrate;
+
+ if (qtpad->avg_bitrate)
+ avgbitrate = qtpad->avg_bitrate;
+ else if (qtpad->total_duration > 0)
+ avgbitrate = (guint32) gst_util_uint64_scale_round (qtpad->total_bytes,
+ 8 * GST_SECOND, qtpad->total_duration);
+
+ atom_trak_update_bitrates (qtpad->trak, avgbitrate, maxbitrate);
+ }
+ }
+ GST_OBJECT_UNLOCK (qtmux);
+
+ /* need to update values on subtitle traks now that we know the
+ * max width and height */
+ GST_OBJECT_LOCK (qtmux);
+ for (l = GST_ELEMENT_CAST (qtmux)->sinkpads; l; l = l->next) {
+ GstQTMuxPad *qtpad = (GstQTMuxPad *) l->data;
+
+ if (!qtpad->fourcc) {
+ GST_DEBUG_OBJECT (qtmux, "Pad %s has never had buffers",
+ GST_PAD_NAME (qtpad));
+ continue;
+ }
+
+ if (qtpad->fourcc == FOURCC_tx3g) {
+ atom_trak_tx3g_update_dimension (qtpad->trak, max_width, max_height);
+ }
+ }
+ GST_OBJECT_UNLOCK (qtmux);
+ }
+
+ /* Called after gst_qt_mux_update_global_statistics() updates the
+ * first_ts tracking, to create/set edit lists for delayed streams */
+ static void
+ gst_qt_mux_update_edit_lists (GstQTMux * qtmux)
+ {
+ GList *l;
+
+ GST_DEBUG_OBJECT (qtmux, "Media first ts selected: %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (qtmux->first_ts));
+ /* add/update EDTSs for late streams. configure_moov will have
+ * set the trak durations above by summing the sample tables,
+ * here we extend that if needing to insert an empty segment */
+ GST_OBJECT_LOCK (qtmux);
+ for (l = GST_ELEMENT_CAST (qtmux)->sinkpads; l; l = l->next) {
+ GstQTMuxPad *qtpad = (GstQTMuxPad *) l->data;
+
+ atom_trak_edts_clear (qtpad->trak);
+
+ if (GST_CLOCK_TIME_IS_VALID (qtpad->first_ts)) {
+ guint32 lateness = 0;
+ guint32 duration = qtpad->trak->tkhd.duration;
+ gboolean has_gap;
+
+ has_gap = (qtpad->first_ts > (qtmux->first_ts + qtpad->dts_adjustment));
+
+ if (has_gap) {
+ GstClockTime diff, trak_lateness;
+
+ diff = qtpad->first_ts - (qtmux->first_ts + qtpad->dts_adjustment);
+ lateness = gst_util_uint64_scale_round (diff,
+ qtmux->timescale, GST_SECOND);
+
+ /* Allow up to 1 trak timescale unit of lateness, Such a small
+ * timestamp/duration can't be represented by the trak-specific parts
+ * of the headers anyway, so it's irrelevantly small */
+ trak_lateness = gst_util_uint64_scale (diff,
+ atom_trak_get_timescale (qtpad->trak), GST_SECOND);
+
+ if (trak_lateness > 0 && diff > qtmux->start_gap_threshold) {
+ GST_DEBUG_OBJECT (qtmux,
+ "Pad %s is a late stream by %" GST_TIME_FORMAT,
+ GST_PAD_NAME (qtpad), GST_TIME_ARGS (diff));
+
+ atom_trak_set_elst_entry (qtpad->trak, 0, lateness, (guint32) - 1,
+ (guint32) (1 * 65536.0));
+ }
+ }
+
+ /* Always write an edit list for the whole track. In general this is not
+ * necessary except for the case of having a gap or DTS adjustment but
+ * it allows to give the whole track's duration in the usually more
+ * accurate media timescale
+ */
+ {
+ GstClockTime ctts = 0;
+ guint32 media_start;
+
+ if (qtpad->first_ts > qtpad->first_dts)
+ ctts = qtpad->first_ts - qtpad->first_dts;
+
+ media_start = gst_util_uint64_scale_round (ctts,
+ atom_trak_get_timescale (qtpad->trak), GST_SECOND);
+
+ /* atom_trak_set_elst_entry() has a quirk - if the edit list
+ * is empty because there's no gap added above, this call
+ * will not replace index 1, it will create the entry at index 0.
+ * Luckily, that's exactly what we want here */
+ atom_trak_set_elst_entry (qtpad->trak, 1, duration, media_start,
+ (guint32) (1 * 65536.0));
+ }
+
+ /* need to add the empty time to the trak duration */
+ duration += lateness;
+ qtpad->trak->tkhd.duration = duration;
+ if (qtpad->tc_trak) {
+ qtpad->tc_trak->tkhd.duration = duration;
+ qtpad->tc_trak->mdia.mdhd.time_info.duration = duration;
+ }
+
+ /* And possibly grow the moov duration */
+ if (duration > qtmux->moov->mvhd.time_info.duration) {
+ qtmux->moov->mvhd.time_info.duration = duration;
+ qtmux->moov->mvex.mehd.fragment_duration = duration;
+ }
+ }
+ }
+ GST_OBJECT_UNLOCK (qtmux);
+ }
+
+ static GstFlowReturn
+ gst_qt_mux_update_timecode (GstQTMux * qtmux, GstQTMuxPad * qtpad)
+ {
+ GstBuffer *buf;
+ GstMapInfo map;
+ guint64 offset = qtpad->tc_pos;
+ GstQTMuxClass *qtmux_klass = (GstQTMuxClass *) (G_OBJECT_GET_CLASS (qtmux));
+
+ if (qtmux_klass->format != GST_QT_MUX_FORMAT_QT &&
+ !qtmux->force_create_timecode_trak)
+ return GST_FLOW_OK;
+
+ g_assert (qtpad->tc_pos != -1);
+
+ gst_qt_mux_seek_to (qtmux, offset);
+
+ buf = gst_buffer_new_and_alloc (4);
+ gst_buffer_map (buf, &map, GST_MAP_WRITE);
+
+ GST_WRITE_UINT32_BE (map.data,
+ gst_video_time_code_frames_since_daily_jam (qtpad->first_tc));
+ gst_buffer_unmap (buf, &map);
+
+ /* Reset this value, so the timecode won't be re-rewritten */
+ qtpad->tc_pos = -1;
+
+ return gst_qt_mux_send_buffer (qtmux, buf, &offset, FALSE);
+ }
+
+ static void
+ unref_buffer_if_set (GstBuffer * buffer)
+ {
+ if (buffer)
+ gst_buffer_unref (buffer);
+ }
+
+ static GstFlowReturn
+ gst_qtmux_push_mdat_stored_buffers (GstQTMux * qtmux)
+ {
+ GstFlowReturn ret = GST_FLOW_OK;
+ GList *l = qtmux->output_buffers;
+ guint64 mdat_header_size = 0, size = 0;
+
+ for (; l; l = g_list_next (l)) {
+ GstBuffer *buf = (GstBuffer *) l->data;
+
+ size += gst_buffer_get_size (buf);
+ }
+
+ if (size == 0)
+ return GST_FLOW_OK;
+
+ GST_DEBUG_OBJECT (qtmux, "Pushing stored buffers of size %" G_GUINT64_FORMAT
+ " current mdat size %" G_GUINT64_FORMAT, size, qtmux->mdat_size);
+
+ ret = gst_qt_mux_send_mdat_header (qtmux, &mdat_header_size, size,
+ size > MDAT_LARGE_FILE_LIMIT, FALSE);
+
+ /* reset chunking */
+ qtmux->current_chunk_size = 0;
+ qtmux->current_chunk_duration = 0;
+ qtmux->current_chunk_offset = -1;
+
+ /* on the first mdat, we need to offset the header by the mdat header size
+ * as the moov offset is in relation to the first data byte inside the first
+ * mdat */
+ if (qtmux->mdat_size == 0)
+ qtmux->header_size += mdat_header_size;
+ qtmux->mdat_size += mdat_header_size;
+
+ l = qtmux->output_buffers;
+ while (ret == GST_FLOW_OK && l) {
+ GstBuffer *buf = (GstBuffer *) l->data;
+
+ ret = gst_qt_mux_send_buffer (qtmux, buf, &qtmux->mdat_size, TRUE);
+
+ l->data = NULL;
+ l = g_list_next (l);
+ }
+
+ g_list_free_full (qtmux->output_buffers,
+ (GDestroyNotify) unref_buffer_if_set);
+ qtmux->output_buffers = NULL;
+
+ return ret;
+ }
+
+ static GstFlowReturn
+ gst_qt_mux_stop_file (GstQTMux * qtmux)
+ {
+ gboolean ret = GST_FLOW_OK;
+ guint64 offset = 0, size = 0;
+ gboolean large_file;
+ GList *sinkpads, *l;
+
+ GST_DEBUG_OBJECT (qtmux, "Updating remaining values and sending last data");
+
+ /* pushing last buffers for each pad */
+ if ((ret = gst_qt_mux_send_last_buffers (qtmux)) != GST_FLOW_OK)
+ return ret;
+
+ if (qtmux->mux_mode == GST_QT_MUX_MODE_FRAGMENTED
+ && qtmux->fragment_mode == GST_QT_MUX_FRAGMENT_STREAMABLE) {
+ /* Streamable mode; no need to write duration or MFRA */
+ GST_DEBUG_OBJECT (qtmux, "streamable file; nothing to stop");
+ return GST_FLOW_OK;
+ }
+
+ gst_qt_mux_update_global_statistics (qtmux);
+
+ GST_OBJECT_LOCK (qtmux);
+ sinkpads = g_list_copy_deep (GST_ELEMENT_CAST (qtmux)->sinkpads,
+ (GCopyFunc) gst_object_ref, NULL);
+ GST_OBJECT_UNLOCK (qtmux);
+
+ for (l = sinkpads; l; l = l->next) {
+ GstQTMuxPad *qtpad = (GstQTMuxPad *) l->data;
+
+ if (qtpad->tc_pos != -1) {
+ /* File is being stopped and timecode hasn't been updated. Update it now
+ * with whatever we have */
+ ret = gst_qt_mux_update_timecode (qtmux, qtpad);
+ if (ret != GST_FLOW_OK) {
+ g_list_free_full (sinkpads, gst_object_unref);
+ return ret;
+ }
+ }
+ }
+
+ g_list_free_full (sinkpads, gst_object_unref);
+
+ switch (qtmux->mux_mode) {
+ case GST_QT_MUX_MODE_MOOV_AT_END:{
+ if (!qtmux->downstream_seekable) {
+ ret = gst_qtmux_push_mdat_stored_buffers (qtmux);
+ if (ret != GST_FLOW_OK)
+ return ret;
+ }
+ break;
+ }
+ case GST_QT_MUX_MODE_FRAGMENTED:{
+ GstBuffer *buf;
+ GstClockTime duration;
+
+ if (qtmux->mfra) {
+ guint8 *data = NULL;
+
+ size = offset = 0;
+
+ GST_DEBUG_OBJECT (qtmux, "adding mfra");
+ if (!atom_mfra_copy_data (qtmux->mfra, &data, &size, &offset))
+ goto serialize_error;
+ buf = _gst_buffer_new_take_data (data, offset);
+ ret = gst_qt_mux_send_buffer (qtmux, buf, NULL, FALSE);
+ if (ret != GST_FLOW_OK)
+ return ret;
+ }
+
+ /* only mvex duration is updated,
+ * mvhd should be consistent with empty moov
+ * (but TODO maybe some clients do not handle that well ?) */
+ duration = gst_util_uint64_scale_round (qtmux->last_dts, qtmux->timescale,
+ GST_SECOND);
+
+ GST_DEBUG_OBJECT (qtmux,
+ "writing moov with mvhd/mvex duration %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (qtmux->last_dts));
+ if (qtmux->fragment_mode == GST_QT_MUX_FRAGMENT_FIRST_MOOV_THEN_FINALISE) {
+ /* seek and overwrite the original moov with an invalid atom */
+ /* XXX: assumes an extended size atom is not used for the moov */
+
+ qtmux->moov->mvhd.time_info.duration = duration;
+
+ /* (+4) skip the skip bytes */
+ gst_qt_mux_seek_to (qtmux, qtmux->moov_pos + 4);
+
+ /* invalidate the previous moov */
+ buf = gst_buffer_new_wrapped (g_strdup ("h"), 1);
+ ret = gst_qt_mux_send_buffer (qtmux, buf, NULL, FALSE);
+ if (ret != GST_FLOW_OK)
+ return ret;
+
+ /* we want to rewrite the first mdat to cover the entire data before
+ * this moov */
+ qtmux->mdat_size = qtmux->header_size - qtmux->mdat_pos - 16;
+
+ gst_qt_mux_seek_to (qtmux, qtmux->mdat_pos);
+
+ ret = gst_qt_mux_update_mdat_size (qtmux, qtmux->mdat_pos,
+ qtmux->mdat_size, NULL, FALSE);
+ if (ret != GST_FLOW_OK)
+ return ret;
+
+ /* Then write the moov atom as in moov-at-end *without* updating the
+ * mdat size */
+ gst_qt_mux_seek_to (qtmux, qtmux->header_size);
+
+ /* revert back to moov-at-end assumptions where header_size is the
+ * size up to the first byte of data in the mdat */
+ qtmux->header_size = qtmux->mdat_pos + 16;
+ break;
+ } else {
+ qtmux->moov->mvex.mehd.fragment_duration = duration;
+
+ /* seek and rewrite the header */
+ gst_qt_mux_seek_to (qtmux, qtmux->moov_pos);
+ /* no need to seek back */
+ return gst_qt_mux_send_moov (qtmux, NULL, 0, FALSE, FALSE);
+ }
+ }
+ case GST_QT_MUX_MODE_ROBUST_RECORDING:{
+ ret = gst_qt_mux_robust_recording_rewrite_moov (qtmux);
+ if (G_UNLIKELY (ret != GST_FLOW_OK))
+ return ret;
+ /* Finalise by writing the final size into the mdat. Up until now
+ * it's been 0, which means 'rest of the file'
+ * No need to seek back after this, we won't write any more */
+ return gst_qt_mux_update_mdat_size (qtmux, qtmux->mdat_pos,
+ qtmux->mdat_size, NULL, TRUE);
+ }
+ case GST_QT_MUX_MODE_ROBUST_RECORDING_PREFILL:{
+ GList *l;
+ guint32 next_track_id = qtmux->moov->mvhd.next_track_id;
+
+ GST_OBJECT_LOCK (qtmux);
+ for (l = GST_ELEMENT_CAST (qtmux)->sinkpads; l; l = l->next) {
+ GstQTMuxPad *qpad = (GstQTMuxPad *) l->data;
+ guint64 block_idx;
+ AtomSTBL *stbl = &qpad->trak->mdia.minf.stbl;
+
+ /* Get the block index of the last sample we wrote, not of the next
+ * sample we would write */
+ block_idx = prefill_get_block_index (qtmux, qpad);
+
+ /* stts */
+ if (block_idx > 0) {
+ STTSEntry *entry;
+ guint64 nsamples = 0;
+ gint i, n;
+
+ n = atom_array_get_len (&stbl->stts.entries);
+ for (i = 0; i < n; i++) {
+ entry = &atom_array_index (&stbl->stts.entries, i);
+ if (nsamples + entry->sample_count >= qpad->sample_offset) {
+ entry->sample_count = qpad->sample_offset - nsamples;
+ stbl->stts.entries.len = i + 1;
+ break;
+ }
+ nsamples += entry->sample_count;
+ }
+ g_assert (i < n);
+ } else {
+ stbl->stts.entries.len = 0;
+ }
+
+ /* stsz */
+ {
+ g_assert (stbl->stsz.entries.len == 0);
+ stbl->stsz.table_size = qpad->sample_offset;
+ }
+
+ /* stco/stsc */
+ {
+ gint i, n;
+ guint64 nsamples = 0;
+ gint chunk_index = 0;
+ const TrakBufferEntryInfo *sample_entry;
+
+ if (block_idx > 0) {
+ sample_entry =
+ &g_array_index (qpad->samples, TrakBufferEntryInfo,
+ block_idx - 1);
+
+ n = stbl->stco64.entries.len;
+ for (i = 0; i < n; i++) {
+ guint64 *entry = &atom_array_index (&stbl->stco64.entries, i);
+
+ if (*entry == sample_entry->chunk_offset) {
+ stbl->stco64.entries.len = i + 1;
+ chunk_index = i + 1;
+ break;
+ }
+ }
+ g_assert (i < n);
+ g_assert (chunk_index > 0);
+
+ n = stbl->stsc.entries.len;
+ for (i = 0; i < n; i++) {
+ STSCEntry *entry = &atom_array_index (&stbl->stsc.entries, i);
+
+ if (entry->first_chunk >= chunk_index)
+ break;
+
+ if (i > 0) {
+ nsamples +=
+ (entry->first_chunk - atom_array_index (&stbl->stsc.entries,
+ i -
+ 1).first_chunk) * atom_array_index (&stbl->stsc.entries,
+ i - 1).samples_per_chunk;
+ }
+ }
+ g_assert (i <= n);
+
+ if (i > 0) {
+ STSCEntry *prev_entry =
+ &atom_array_index (&stbl->stsc.entries, i - 1);
+ nsamples +=
+ (chunk_index -
+ prev_entry->first_chunk) * prev_entry->samples_per_chunk;
+ if (qpad->sample_offset - nsamples > 0) {
+ stbl->stsc.entries.len = i;
+ atom_stsc_add_new_entry (&stbl->stsc, chunk_index,
+ qpad->sample_offset - nsamples, stbl->stsd.n_entries);
+ } else {
+ stbl->stsc.entries.len = i;
+ stbl->stco64.entries.len--;
+ }
+ } else {
+ /* Everything in a single chunk */
+ stbl->stsc.entries.len = 0;
+ atom_stsc_add_new_entry (&stbl->stsc, chunk_index,
+ qpad->sample_offset, stbl->stsd.n_entries);
+ }
+ } else {
+ stbl->stco64.entries.len = 0;
+ stbl->stsc.entries.len = 0;
+ }
+ }
+
+ {
+ GList *walk2;
+
+ for (walk2 = qtmux->moov->mvex.trexs; walk2; walk2 = walk2->next) {
+ AtomTREX *trex = walk2->data;
+
+ if (trex->track_ID == qpad->trak->tkhd.track_ID) {
+ trex->track_ID = next_track_id;
+ break;
+ }
+ }
+
+ qpad->trak->tkhd.track_ID = next_track_id++;
+ }
+ }
+ GST_OBJECT_UNLOCK (qtmux);
+
+ qtmux->moov->mvhd.next_track_id = next_track_id;
+
+ gst_qt_mux_update_global_statistics (qtmux);
+ gst_qt_mux_configure_moov (qtmux);
+
+ gst_qt_mux_update_edit_lists (qtmux);
+
+ /* Check if any gap edit lists were added. We don't have any space
+ * reserved for this in the moov and the pre-finalized moov would have
+ * broken A/V synchronization. Error out here now
+ */
+ GST_OBJECT_LOCK (qtmux);
+ for (l = GST_ELEMENT_CAST (qtmux)->sinkpads; l; l = l->next) {
+ GstQTMuxPad *qpad = (GstQTMuxPad *) l->data;
+
+ if (qpad->trak->edts
+ && g_slist_length (qpad->trak->edts->elst.entries) > 1) {
+ GST_ELEMENT_ERROR (qtmux, STREAM, MUX, (NULL),
+ ("Can't support gaps in prefill mode"));
+
+ GST_OBJECT_UNLOCK (qtmux);
+
+ return GST_FLOW_ERROR;
+ }
+ }
+ GST_OBJECT_UNLOCK (qtmux);
+
+ gst_qt_mux_setup_metadata (qtmux);
+ atom_moov_chunks_set_offset (qtmux->moov, qtmux->header_size);
+
+ {
+ gst_qt_mux_seek_to (qtmux, qtmux->moov_pos);
+
+ ret =
+ gst_qt_mux_send_moov (qtmux, NULL, qtmux->reserved_moov_size, FALSE,
+ FALSE);
+ if (ret != GST_FLOW_OK)
+ return ret;
+
+ if (qtmux->reserved_moov_size > qtmux->last_moov_size) {
+ ret =
+ gst_qt_mux_send_free_atom (qtmux, NULL,
+ qtmux->reserved_moov_size - qtmux->last_moov_size, TRUE);
+ }
+
+ if (ret != GST_FLOW_OK)
+ return ret;
+ }
+
+ ret = gst_qt_mux_update_mdat_size (qtmux, qtmux->mdat_pos,
+ qtmux->mdat_size, NULL, FALSE);
+ return ret;
+ }
+ default:
+ break;
+ }
+
+ /* Moov-at-end or fast-start mode from here down */
+ gst_qt_mux_configure_moov (qtmux);
+
+ gst_qt_mux_update_edit_lists (qtmux);
+
+ /* tags into file metadata */
+ gst_qt_mux_setup_metadata (qtmux);
+
+ large_file = (qtmux->mdat_size > MDAT_LARGE_FILE_LIMIT);
+
+ switch (qtmux->mux_mode) {
+ case GST_QT_MUX_MODE_FAST_START:{
+ /* if faststart, update the offset of the atoms in the movie with the offset
+ * that the movie headers before mdat will cause.
+ * Also, send the ftyp */
+ offset = size = 0;
+
+ ret = gst_qt_mux_prepare_and_send_ftyp (qtmux);
+ if (ret != GST_FLOW_OK) {
+ goto ftyp_error;
+ }
+ /* copy into NULL to obtain size */
+ if (!atom_moov_copy_data (qtmux->moov, NULL, &size, &offset))
+ goto serialize_error;
+ GST_DEBUG_OBJECT (qtmux, "calculated moov atom size %" G_GUINT64_FORMAT,
+ offset);
+ offset += qtmux->header_size + (large_file ? 16 : 8);
+
+ /* sum up with the extra atoms size */
+ ret = gst_qt_mux_send_extra_atoms (qtmux, FALSE, &offset, FALSE);
+ if (ret != GST_FLOW_OK)
+ return ret;
+ break;
+ }
+ default:
+ offset = qtmux->header_size;
+ break;
+ }
+
+ /* Now that we know the size of moov + extra atoms, we can adjust
+ * the chunk offsets stored into the moov */
+ atom_moov_chunks_set_offset (qtmux->moov, offset);
+
+ /* write out moov and extra atoms */
+ /* note: as of this point, we no longer care about tracking written data size,
+ * since there is no more use for it anyway */
+ ret = gst_qt_mux_send_moov (qtmux, NULL, 0, FALSE, FALSE);
+ if (ret != GST_FLOW_OK)
+ return ret;
+
+ /* extra atoms */
+ ret = gst_qt_mux_send_extra_atoms (qtmux, TRUE, NULL, FALSE);
+ if (ret != GST_FLOW_OK)
+ return ret;
+
+ switch (qtmux->mux_mode) {
+ case GST_QT_MUX_MODE_MOOV_AT_END:
+ {
+ if (qtmux->downstream_seekable) {
+ /* mdat needs update iff not using faststart */
+ GST_DEBUG_OBJECT (qtmux, "updating mdat size at position %"
+ G_GUINT64_FORMAT " to size %" G_GUINT64_FORMAT, qtmux->mdat_pos,
+ qtmux->mdat_size);
+ ret = gst_qt_mux_update_mdat_size (qtmux, qtmux->mdat_pos,
+ qtmux->mdat_size, NULL, FALSE);
+ /* note; no seeking back to the end of file is done,
+ * since we no longer write anything anyway */
+ }
+ break;
+ }
+ case GST_QT_MUX_MODE_FAST_START:
+ {
+ /* send mdat atom and move buffered data into it */
+ /* mdat_size = accumulated (buffered data) */
+ ret = gst_qt_mux_send_mdat_header (qtmux, NULL, qtmux->mdat_size,
+ large_file, FALSE);
+ if (ret != GST_FLOW_OK)
+ return ret;
+ ret = gst_qt_mux_send_buffered_data (qtmux, NULL);
+ if (ret != GST_FLOW_OK)
+ return ret;
+ break;
+ }
+ case GST_QT_MUX_MODE_FRAGMENTED:
+ g_assert (qtmux->fragment_mode ==
+ GST_QT_MUX_FRAGMENT_FIRST_MOOV_THEN_FINALISE);
+ break;
+ default:
+ g_assert_not_reached ();
+ }
+
+ return ret;
+
+ /* ERRORS */
+ serialize_error:
+ {
+ GST_ELEMENT_ERROR (qtmux, STREAM, MUX, (NULL),
+ ("Failed to serialize moov"));
+ return GST_FLOW_ERROR;
+ }
+ ftyp_error:
+ {
+ GST_ELEMENT_ERROR (qtmux, STREAM, MUX, (NULL), ("Failed to send ftyp"));
+ return GST_FLOW_ERROR;
+ }
+ }
+
+ static gboolean
+ gst_qtmux_pad_update_fragment_duration (GstElement * element, GstPad * pad,
+ gpointer user_data)
+ {
+ GstQTMux *qtmux = (GstQTMux *) element;
+ GstQTMuxPad *qt_pad = GST_QT_MUX_PAD (pad);
+
+ qt_pad->fragment_duration = gst_util_uint64_scale (qtmux->fragment_duration,
+ atom_trak_get_timescale (qt_pad->trak), 1000);
+
+ return TRUE;
+ }
+
+ static gboolean
+ gst_qtmux_pad_collect_traf (GstElement * element, GstPad * pad,
+ gpointer user_data)
+ {
+ GstQTMuxPad *qt_pad = GST_QT_MUX_PAD (pad);
+ AtomMOOF *moof = user_data;
+
+ GST_TRACE_OBJECT (pad, "adding traf %p to moof %p", qt_pad->traf, moof);
+
+ /* takes ownership */
+ if (qt_pad->traf)
+ atom_moof_add_traf (moof, qt_pad->traf);
+ qt_pad->traf = NULL;
+
+ return TRUE;
+ }
+
+ static GstFlowReturn
+ gst_qt_mux_pad_fragment_add_buffer (GstQTMux * qtmux, GstQTMuxPad * pad,
+ GstBuffer * buf, gboolean force, guint32 nsamples, gint64 dts,
+ guint32 delta, guint32 size, guint64 chunk_offset, gboolean sync,
+ gint64 pts_offset)
+ {
+ GstFlowReturn ret = GST_FLOW_OK;
+ guint index = 0;
+
+ GST_LOG_OBJECT (pad, "%p %u %" G_GUINT64_FORMAT " %" G_GUINT64_FORMAT,
+ pad->traf, force, qtmux->current_chunk_offset, chunk_offset);
+
+ /* setup if needed */
+ if (G_UNLIKELY (!pad->traf || force))
+ goto init;
+
+ flush:
+ /* flush pad fragment if threshold reached,
+ * or at new keyframe if we should be minding those in the first place */
+ if (G_UNLIKELY (force || (sync && pad->sync) ||
+ pad->fragment_duration < (gint64) delta)) {
+
+ if (qtmux->fragment_mode == GST_QT_MUX_FRAGMENT_FIRST_MOOV_THEN_FINALISE) {
+ if (qtmux->fragment_sequence == 0) {
+ /* the first fragment which we write as a moov */
+ guint64 orig_offset;
+ guint64 offset = orig_offset = qtmux->mdat_pos + 16 + qtmux->mdat_size;
+ guint64 chunk_increase, buf_size;
+ AtomMOOF *moof;
+
+ GST_LOG_OBJECT (qtmux, "current file offset calculated to be %"
+ G_GUINT64_FORMAT " based on mdat pos %" G_GUINT64_FORMAT
+ " and size %" G_GUINT64_FORMAT, offset, qtmux->mdat_pos,
+ qtmux->mdat_size);
+
+ moof = atom_moof_new (qtmux->context, qtmux->fragment_sequence);
+ gst_element_foreach_sink_pad (GST_ELEMENT (qtmux),
+ gst_qtmux_pad_collect_traf, moof);
+ atom_moof_free (moof);
+
+ ret = gst_qt_mux_update_mdat_size (qtmux, qtmux->mdat_pos,
+ qtmux->mdat_size, NULL, FALSE);
+ if (ret != GST_FLOW_OK)
+ return ret;
+
+ /* seek back to the end of the file */
+ qtmux->moov_pos = offset;
+ gst_qt_mux_seek_to (qtmux, qtmux->moov_pos);
+
+ /* update moov data */
+ gst_qt_mux_update_global_statistics (qtmux);
+ gst_qt_mux_configure_moov_full (qtmux, TRUE, qtmux->timescale);
+ gst_qt_mux_update_edit_lists (qtmux);
+ gst_qt_mux_setup_metadata (qtmux);
+ /* chunk offset is the offset to the first byte inside the mdat */
+ atom_moov_chunks_set_offset (qtmux->moov, qtmux->mdat_pos + 16);
+
+ ret = gst_qt_mux_send_moov (qtmux, &offset, 0, TRUE, FALSE);
+ if (ret != GST_FLOW_OK)
+ return ret;
+
+ /* for the continuation in fragments, header_size is the tracking write
+ * position */
+ qtmux->header_size = offset;
+ qtmux->moof_mdat_pos = 0;
+
+ buf_size = (buf ? gst_buffer_get_size (buf) : 0);
+
+ chunk_increase = offset - orig_offset + 16;
+ /* we need to undo the addition to qtmux->current_chunk_size of this
+ * buffer performed in gst_qt_mux_register_buffer_in_chunk() */
+ chunk_increase += qtmux->current_chunk_size - buf_size;
+ GST_LOG_OBJECT (qtmux, "We think we have written %" G_GUINT64_FORMAT
+ " including a moov and mdat header of %" G_GUINT64_FORMAT
+ ". mangling this buffer's chunk offset from %" G_GUINT64_FORMAT
+ " to %" G_GUINT64_FORMAT, qtmux->header_size,
+ offset - orig_offset + 16, chunk_offset,
+ chunk_offset + chunk_increase);
+ /* this is the offset for the current chunk that is applied to all subsequent chunks */
+ chunk_offset += chunk_increase;
+ qtmux->current_chunk_offset += chunk_increase;
+ qtmux->current_chunk_size = buf_size;
+ GST_LOG_OBJECT (qtmux, "change next chunk offset to %" G_GUINT64_FORMAT
+ " and size to %" G_GUINT64_FORMAT, qtmux->current_chunk_offset,
+ qtmux->current_chunk_size);
+
+ gst_element_foreach_sink_pad (GST_ELEMENT (qtmux),
+ gst_qtmux_pad_update_fragment_duration, NULL);
+ } else {
+ AtomMOOF *moof;
+ guint64 size = 0, offset = 0;
+ guint8 *data = NULL;
+ GstBuffer *moof_buffer;
+ guint64 moof_size = 0, buf_size;
+ guint64 chunk_increase;
+
+ /* rewrite the mdat header */
+ ret = gst_qt_mux_update_mdat_size (qtmux, qtmux->moof_mdat_pos,
+ qtmux->header_size - qtmux->moof_mdat_pos - 16, NULL, FALSE);
+ if (ret != GST_FLOW_OK)
+ return ret;
+
+ /* reseek back to the current position */
+ gst_qt_mux_seek_to (qtmux, qtmux->header_size);
+
+ moof = atom_moof_new (qtmux->context, qtmux->fragment_sequence);
+ gst_element_foreach_sink_pad (GST_ELEMENT (qtmux),
+ gst_qtmux_pad_collect_traf, moof);
+ atom_moof_set_base_offset (moof, qtmux->moof_mdat_pos);
+ atom_moof_copy_data (moof, &data, &size, &offset);
+ moof_buffer = _gst_buffer_new_take_data (data, offset);
+ moof_size = gst_buffer_get_size (moof_buffer);
+
+ atom_moof_free (moof);
+ /* now we know where moof ends up, update offset in tfra */
+ if (pad->tfra)
+ atom_tfra_update_offset (pad->tfra, qtmux->header_size);
+
+ GST_LOG_OBJECT (qtmux, "writing moof of size %" G_GUINT64_FORMAT,
+ moof_size);
+ ret =
+ gst_qt_mux_send_buffer (qtmux, moof_buffer, &qtmux->header_size,
+ FALSE);
+ if (ret != GST_FLOW_OK)
+ goto moof_send_error;
+ qtmux->moof_mdat_pos = 0;
+
+ /* if we are writing a final moov, then we need to increase our chunk
+ * offsets to include the moof/mdat headers that were just written so
+ * so that they are correctly skipped over.
+ */
+ buf_size = (buf ? gst_buffer_get_size (buf) : 0);
+ chunk_increase = moof_size + 16;
+ /* we need to undo the addition to qtmux->current_chunk_size of this
+ * buffer performed in gst_qt_mux_register_buffer_in_chunk() */
+ chunk_increase += qtmux->current_chunk_size - buf_size;
+ GST_LOG_OBJECT (qtmux, "We think we have currently written %"
+ G_GUINT64_FORMAT " including a moof of %" G_GUINT64_FORMAT
+ " mangling this buffer's chunk offset from %" G_GUINT64_FORMAT
+ " to %" G_GUINT64_FORMAT, qtmux->header_size, moof_size,
+ chunk_offset, chunk_offset + chunk_increase);
+ chunk_offset += chunk_increase;
+ /* this is the offset for the next chunk */
+ qtmux->current_chunk_offset += chunk_increase;
+ qtmux->current_chunk_size = buf_size;
+ GST_LOG_OBJECT (qtmux, "change next chunk offset to %" G_GUINT64_FORMAT
+ " and size to %" G_GUINT64_FORMAT, qtmux->current_chunk_offset,
+ qtmux->current_chunk_size);
+
+ /* if we are are generating a moof, it is for all streams */
+ gst_element_foreach_sink_pad (GST_ELEMENT (qtmux),
+ gst_qtmux_pad_update_fragment_duration, NULL);
+ }
+ } else {
+ /* not moov-related. writes out moof then mdat for a single stream only */
+ AtomMOOF *moof;
+ guint64 size = 0, offset = 0;
+ guint8 *data = NULL;
+ GstBuffer *moof_buffer;
+ guint i, total_size;
+ AtomTRUN *first_trun;
+
+ total_size = 0;
+ for (i = 0; i < atom_array_get_len (&pad->fragment_buffers); i++) {
+ total_size +=
+ gst_buffer_get_size (atom_array_index (&pad->fragment_buffers, i));
+ }
+
+ moof = atom_moof_new (qtmux->context, qtmux->fragment_sequence);
+ /* takes ownership */
+ atom_moof_add_traf (moof, pad->traf);
+ /* write the offset into the first 'trun'. All other truns are assumed
+ * to follow on from this trun. Skip over the mdat header (+12) */
+ atom_moof_copy_data (moof, &data, &size, &offset);
+ first_trun = (AtomTRUN *) pad->traf->truns->data;
+ atom_trun_set_offset (first_trun, offset + 12);
+ pad->traf = NULL;
+ size = offset = 0;
+ atom_moof_copy_data (moof, &data, &size, &offset);
+ moof_buffer = _gst_buffer_new_take_data (data, offset);
+
+ atom_moof_free (moof);
+
+ /* now we know where moof ends up, update offset in tfra */
+ if (pad->tfra)
+ atom_tfra_update_offset (pad->tfra, qtmux->header_size);
+
+ GST_LOG_OBJECT (qtmux, "writing moof size %" G_GSIZE_FORMAT,
+ gst_buffer_get_size (moof_buffer));
+ ret =
+ gst_qt_mux_send_buffer (qtmux, moof_buffer, &qtmux->header_size,
+ FALSE);
+ if (ret != GST_FLOW_OK)
+ goto moof_send_error;
+
+ GST_LOG_OBJECT (qtmux, "writing %d buffers, total_size %d",
+ atom_array_get_len (&pad->fragment_buffers), total_size);
+
+ ret = gst_qt_mux_send_mdat_header (qtmux, &qtmux->header_size, total_size,
+ FALSE, FALSE);
+ if (ret != GST_FLOW_OK)
+ goto mdat_header_send_error;
+
+ for (index = 0; index < atom_array_get_len (&pad->fragment_buffers);
+ index++) {
+ GST_DEBUG_OBJECT (qtmux, "sending fragment %p",
+ atom_array_index (&pad->fragment_buffers, index));
+ ret =
+ gst_qt_mux_send_buffer (qtmux,
+ atom_array_index (&pad->fragment_buffers, index),
+ &qtmux->header_size, FALSE);
+ if (ret != GST_FLOW_OK)
+ goto fragment_buf_send_error;
+ }
+
+ }
+ atom_array_clear (&pad->fragment_buffers);
+ qtmux->fragment_sequence++;
+ force = FALSE;
+ }
+
+ init:
+ if (qtmux->fragment_mode == GST_QT_MUX_FRAGMENT_FIRST_MOOV_THEN_FINALISE
+ && qtmux->fragment_sequence == 0) {
+ atom_trak_add_samples (pad->trak, nsamples, (gint32) delta, size,
+ chunk_offset, sync, pts_offset);
+
+ ret = gst_qt_mux_send_buffer (qtmux, buf, &qtmux->mdat_size, TRUE);
+ if (ret != GST_FLOW_OK)
+ return ret;
+ buf = NULL;
+
+ if (G_UNLIKELY (force))
+ goto flush;
+
+ if (!pad->traf) {
+ pad->traf = atom_traf_new (qtmux->context, atom_trak_get_id (pad->trak));
+ pad->fragment_duration = gst_util_uint64_scale (qtmux->fragment_duration,
+ atom_trak_get_timescale (pad->trak), 1000);
+ }
+ pad->fragment_duration -= delta;
+
+ return ret;
+ } else if (G_UNLIKELY (!pad->traf)) {
+ GstClockTime first_dts = 0, current_dts;
+ gint64 first_qt_dts;
+ GST_LOG_OBJECT (pad, "setting up new fragment");
+ pad->traf = atom_traf_new (qtmux->context, atom_trak_get_id (pad->trak));
+ atom_array_init (&pad->fragment_buffers, 512);
+ pad->fragment_duration = gst_util_uint64_scale (qtmux->fragment_duration,
+ atom_trak_get_timescale (pad->trak), 1000);
+
+ if (G_UNLIKELY (qtmux->mfra && !pad->tfra)) {
+ pad->tfra = atom_tfra_new (qtmux->context, atom_trak_get_id (pad->trak));
+ atom_mfra_add_tfra (qtmux->mfra, pad->tfra);
+ }
+ if (GST_CLOCK_TIME_IS_VALID (pad->first_dts))
+ first_dts = pad->first_dts;
+
+ current_dts =
+ gst_util_uint64_scale (dts, GST_SECOND,
+ atom_trak_get_timescale (pad->trak));
+ first_qt_dts =
+ gst_util_uint64_scale (first_dts, atom_trak_get_timescale (pad->trak),
+ GST_SECOND);
+ GST_DEBUG_OBJECT (pad, "calculating base decode time with first dts %"
+ G_GINT64_FORMAT " (%" GST_TIME_FORMAT ") and current dts %"
+ G_GINT64_FORMAT " (%" GST_TIME_FORMAT ") of %" G_GINT64_FORMAT " (%"
+ GST_STIME_FORMAT ")", first_qt_dts, GST_TIME_ARGS (first_dts), dts,
+ GST_TIME_ARGS (current_dts), dts - first_qt_dts,
+ GST_STIME_ARGS (current_dts - first_dts));
+ atom_traf_set_base_decode_time (pad->traf, dts - first_qt_dts);
+ }
+
+ if (qtmux->fragment_mode == GST_QT_MUX_FRAGMENT_FIRST_MOOV_THEN_FINALISE) {
+ if (qtmux->fragment_sequence > 0 && !force) {
+ if (qtmux->moof_mdat_pos == 0) {
+ /* send temporary mdat */
+ qtmux->moof_mdat_pos = qtmux->header_size;
+ ret = gst_qt_mux_send_mdat_header (qtmux, &qtmux->header_size, 0,
+ TRUE, FALSE);
+ if (ret != GST_FLOW_OK)
+ goto mdat_header_send_error;
+ }
+
+ if (buf) {
+ atom_trak_add_samples (pad->trak, nsamples, (gint32) delta, size,
+ chunk_offset, sync, pts_offset);
+ atom_traf_add_samples (pad->traf, nsamples, delta, size,
+ qtmux->header_size - qtmux->moof_mdat_pos, sync, pts_offset,
+ pad->sync && sync);
+
+ ret = gst_qt_mux_send_buffer (qtmux, buf, &qtmux->header_size, TRUE);
+ if (ret != GST_FLOW_OK)
+ return ret;
+ buf = NULL;
+ }
+ }
+ } else {
+ /* add buffer and metadata */
+ atom_traf_add_samples (pad->traf, nsamples, delta, size, 0, sync,
+ pts_offset, pad->sync && sync);
+ GST_LOG_OBJECT (qtmux, "adding buffer %p to fragments", buf);
+ atom_array_append (&pad->fragment_buffers, g_steal_pointer (&buf), 256);
+ }
+ pad->fragment_duration -= delta;
+
+ if (pad->tfra) {
+ guint32 sn = atom_traf_get_sample_num (pad->traf);
+
+ if ((sync && pad->sync) || (sn == 1 && !pad->sync))
+ atom_tfra_add_entry (pad->tfra, dts, sn);
+ }
+
+ if (G_UNLIKELY (force))
+ goto flush;
+
+ return ret;
+
+ moof_send_error:
+ {
+ guint i;
+
+ GST_ERROR_OBJECT (qtmux, "Failed to send moof buffer");
+ for (i = 0; i < atom_array_get_len (&pad->fragment_buffers); i++)
+ gst_buffer_unref (atom_array_index (&pad->fragment_buffers, i));
+ atom_array_clear (&pad->fragment_buffers);
+ gst_clear_buffer (&buf);
+
+ return ret;
+ }
+
+ mdat_header_send_error:
+ {
+ guint i;
+
+ GST_ERROR_OBJECT (qtmux, "Failed to send mdat header");
+ for (i = 0; i < atom_array_get_len (&pad->fragment_buffers); i++)
+ gst_buffer_unref (atom_array_index (&pad->fragment_buffers, i));
+ atom_array_clear (&pad->fragment_buffers);
+ gst_clear_buffer (&buf);
+
+ return ret;
+ }
+
+ fragment_buf_send_error:
+ {
+ guint i;
+
+ GST_ERROR_OBJECT (qtmux, "Failed to send fragment");
+ for (i = index + 1; i < atom_array_get_len (&pad->fragment_buffers); i++) {
+ gst_buffer_unref (atom_array_index (&pad->fragment_buffers, i));
+ }
+ atom_array_clear (&pad->fragment_buffers);
+ gst_clear_buffer (&buf);
+
+ return ret;
+ }
+ }
+
+ /* Here's the clever bit of robust recording: Updating the moov
+ * header is done using a ping-pong scheme inside 2 blocks of size
+ * 'reserved_moov_size' at the start of the file, in such a way that the
+ * file on-disk is always valid if interrupted.
+ * Inside the reserved space, we have 2 pairs of free + moov atoms
+ * (in that order), free-A + moov-A @ offset 0 and free-B + moov-B at
+ * at offset "reserved_moov_size".
+ *
+ * 1. Free-A has 0 size payload, moov-A immediately after is
+ * active/current, and is padded with an internal Free atom to
+ * end at reserved_space/2. Free-B is at reserved_space/2, sized
+ * to cover the remaining free space (including moov-B).
+ * 2. We write moov-B (which is invisible inside free-B), and pad it to
+ * end at the end of free space. Then, we update free-A to size
+ * reserved_space/2 + sizeof(free-B), which hides moov-A and the
+ * free-B header, and makes moov-B active.
+ * 3. Rewrite moov-A inside free-A, with padding out to free-B.
+ * Change the size of free-A to make moov-A active again.
+ * 4. Rinse and repeat.
+ *
+ */
+ static GstFlowReturn
+ gst_qt_mux_robust_recording_rewrite_moov (GstQTMux * qtmux)
+ {
+ GstFlowReturn ret;
+ guint64 freeA_offset;
+ guint32 new_freeA_size;
+ guint64 new_moov_offset;
+
+ /* Update moov info, then seek and rewrite the MOOV atom */
+ gst_qt_mux_update_global_statistics (qtmux);
+ gst_qt_mux_configure_moov (qtmux);
+
+ gst_qt_mux_update_edit_lists (qtmux);
+
+ /* tags into file metadata */
+ gst_qt_mux_setup_metadata (qtmux);
+
+ /* chunks position is set relative to the first byte of the
+ * MDAT atom payload. Set the overall offset into the file */
+ atom_moov_chunks_set_offset (qtmux->moov, qtmux->header_size);
+
+ /* Calculate which moov to rewrite. qtmux->moov_pos points to
+ * the start of the free-A header */
+ freeA_offset = qtmux->moov_pos;
+ if (qtmux->reserved_moov_first_active) {
+ GST_DEBUG_OBJECT (qtmux, "Updating pong moov header");
+ /* After this, freeA will include itself, moovA, plus the freeB
+ * header */
+ new_freeA_size = qtmux->reserved_moov_size + 16;
+ } else {
+ GST_DEBUG_OBJECT (qtmux, "Updating ping moov header");
+ new_freeA_size = 8;
+ }
+ /* the moov we update is after free-A, calculate its offset */
+ new_moov_offset = freeA_offset + new_freeA_size;
+
+ /* Swap ping-pong cadence marker */
+ qtmux->reserved_moov_first_active = !qtmux->reserved_moov_first_active;
+
+ /* seek and rewrite the MOOV atom */
+ gst_qt_mux_seek_to (qtmux, new_moov_offset);
+
+ ret =
+ gst_qt_mux_send_moov (qtmux, NULL, qtmux->reserved_moov_size, FALSE,
+ TRUE);
+ if (ret != GST_FLOW_OK)
+ return ret;
+
+ /* Update the estimated recording space remaining, based on amount used so
+ * far and duration muxed so far */
+ if (qtmux->last_moov_size > qtmux->base_moov_size && qtmux->last_dts > 0) {
+ GstClockTime remain;
+ GstClockTime time_muxed = qtmux->last_dts;
+
+ remain =
+ gst_util_uint64_scale (qtmux->reserved_moov_size -
+ qtmux->last_moov_size, time_muxed,
+ qtmux->last_moov_size - qtmux->base_moov_size);
+ /* Always under-estimate slightly, so users
+ * have time to stop muxing before we run out */
+ if (remain < GST_SECOND / 2)
+ remain = 0;
+ else
+ remain -= GST_SECOND / 2;
+
+ GST_INFO_OBJECT (qtmux,
+ "Reserved %u header bytes. Used %u in %" GST_TIME_FORMAT
+ ". Remaining now %u or approx %" G_GUINT64_FORMAT " ns\n",
+ qtmux->reserved_moov_size, qtmux->last_moov_size,
+ GST_TIME_ARGS (qtmux->last_dts),
+ qtmux->reserved_moov_size - qtmux->last_moov_size, remain);
+
+ GST_OBJECT_LOCK (qtmux);
+ qtmux->reserved_duration_remaining = remain;
+ qtmux->muxed_since_last_update = 0;
+ GST_DEBUG_OBJECT (qtmux, "reserved remaining duration now %"
+ G_GUINT64_FORMAT, qtmux->reserved_duration_remaining);
+ GST_OBJECT_UNLOCK (qtmux);
+ }
+
+
+ /* Now update the moov-A size. Don't pass offset, since we don't need
+ * send_free_atom() to seek for us - all our callers seek back to
+ * where they need after this, or they don't need it */
+ gst_qt_mux_seek_to (qtmux, freeA_offset);
+
+ ret = gst_qt_mux_send_free_atom (qtmux, NULL, new_freeA_size, TRUE);
+
+ return ret;
+ }
+
+ static GstFlowReturn
+ gst_qt_mux_robust_recording_update (GstQTMux * qtmux, GstClockTime position)
+ {
+ GstFlowReturn flow_ret;
+
+ guint64 mdat_offset = qtmux->mdat_pos + 16 + qtmux->mdat_size;
+
+ GST_OBJECT_LOCK (qtmux);
+
+ /* Update the offset of how much we've muxed, so the
+ * report of remaining space keeps counting down */
+ if (position > qtmux->last_moov_update &&
+ position - qtmux->last_moov_update > qtmux->muxed_since_last_update) {
+ GST_LOG_OBJECT (qtmux,
+ "Muxed time %" G_GUINT64_FORMAT " since last moov update",
+ qtmux->muxed_since_last_update);
+ qtmux->muxed_since_last_update = position - qtmux->last_moov_update;
+ }
+
+ /* Next, check if we're supposed to send periodic moov updates downstream */
+ if (qtmux->reserved_moov_update_period == GST_CLOCK_TIME_NONE) {
+ GST_OBJECT_UNLOCK (qtmux);
+ return GST_FLOW_OK;
+ }
+
+ /* Update if position is > the threshold or there's been no update yet */
+ if (qtmux->last_moov_update != GST_CLOCK_TIME_NONE &&
+ (position <= qtmux->last_moov_update ||
+ (position - qtmux->last_moov_update) <
+ qtmux->reserved_moov_update_period)) {
+ GST_OBJECT_UNLOCK (qtmux);
+ return GST_FLOW_OK; /* No update needed yet */
+ }
+
+ qtmux->last_moov_update = position;
+ GST_OBJECT_UNLOCK (qtmux);
+
+ GST_DEBUG_OBJECT (qtmux, "Update moov atom, position %" GST_TIME_FORMAT
+ " mdat starts @ %" G_GUINT64_FORMAT " we were a %" G_GUINT64_FORMAT,
+ GST_TIME_ARGS (position), qtmux->mdat_pos, mdat_offset);
+
+ flow_ret = gst_qt_mux_robust_recording_rewrite_moov (qtmux);
+ if (G_UNLIKELY (flow_ret != GST_FLOW_OK))
+ return flow_ret;
+
+ /* Seek back to previous position */
+ gst_qt_mux_seek_to (qtmux, mdat_offset);
+
+ return flow_ret;
+ }
+
+ static GstFlowReturn
+ gst_qt_mux_register_and_push_sample (GstQTMux * qtmux, GstQTMuxPad * pad,
+ GstBuffer * buffer, gboolean is_last_buffer, guint nsamples,
+ gint64 last_dts, gint64 scaled_duration, guint sample_size,
+ guint64 chunk_offset, gboolean sync, gboolean do_pts, gint64 pts_offset)
+ {
+ GstFlowReturn ret = GST_FLOW_OK;
+
+ /* note that a new chunk is started each time (not fancy but works) */
+ if (qtmux->moov_recov_file) {
+ if (!atoms_recov_write_trak_samples (qtmux->moov_recov_file, pad->trak,
+ nsamples, (gint32) scaled_duration, sample_size, chunk_offset, sync,
+ do_pts, pts_offset)) {
+ GST_WARNING_OBJECT (qtmux, "Failed to write sample information to "
+ "recovery file, disabling recovery");
+ fclose (qtmux->moov_recov_file);
+ qtmux->moov_recov_file = NULL;
+ }
+ }
+
+ switch (qtmux->mux_mode) {
+ case GST_QT_MUX_MODE_ROBUST_RECORDING_PREFILL:{
+ const TrakBufferEntryInfo *sample_entry;
+ guint64 block_idx = prefill_get_block_index (qtmux, pad);
+
+ if (block_idx >= pad->samples->len) {
+ GST_ELEMENT_ERROR (qtmux, STREAM, MUX, (NULL),
+ ("Unexpected sample %" G_GUINT64_FORMAT ", expected up to %u",
+ block_idx, pad->samples->len));
+ gst_buffer_unref (buffer);
+ return GST_FLOW_ERROR;
+ }
+
+ /* Check if all values are as expected */
+ sample_entry =
+ &g_array_index (pad->samples, TrakBufferEntryInfo, block_idx);
+
+ /* Allow +/- 1 difference for the scaled_duration to allow
+ * for some rounding errors
+ */
+ if (sample_entry->nsamples != nsamples
+ || ABSDIFF (sample_entry->delta, scaled_duration) > 1
+ || sample_entry->size != sample_size
+ || sample_entry->chunk_offset != chunk_offset
+ || sample_entry->pts_offset != pts_offset
+ || sample_entry->sync != sync) {
+ GST_ELEMENT_ERROR (qtmux, STREAM, MUX, (NULL),
+ ("Unexpected values in sample %" G_GUINT64_FORMAT,
+ pad->sample_offset + 1));
+ GST_ERROR_OBJECT (qtmux, "Expected: samples %u, delta %u, size %u, "
+ "chunk offset %" G_GUINT64_FORMAT ", "
+ "pts offset %" G_GUINT64_FORMAT ", sync %d",
+ sample_entry->nsamples,
+ sample_entry->delta,
+ sample_entry->size,
+ sample_entry->chunk_offset,
+ sample_entry->pts_offset, sample_entry->sync);
+ GST_ERROR_OBJECT (qtmux, "Got: samples %u, delta %u, size %u, "
+ "chunk offset %" G_GUINT64_FORMAT ", "
+ "pts offset %" G_GUINT64_FORMAT ", sync %d",
+ nsamples,
+ (guint) scaled_duration,
+ sample_size, chunk_offset, pts_offset, sync);
+
+ gst_buffer_unref (buffer);
+ return GST_FLOW_ERROR;
+ }
+
+ ret = gst_qt_mux_send_buffer (qtmux, buffer, &qtmux->mdat_size, TRUE);
+ break;
+ }
+ case GST_QT_MUX_MODE_MOOV_AT_END:
+ case GST_QT_MUX_MODE_FAST_START:
+ case GST_QT_MUX_MODE_ROBUST_RECORDING:
+ atom_trak_add_samples (pad->trak, nsamples, (gint32) scaled_duration,
+ sample_size, chunk_offset, sync, pts_offset);
+ if (qtmux->mux_mode == GST_QT_MUX_MODE_MOOV_AT_END
+ && !qtmux->downstream_seekable) {
+ qtmux->output_buffers = g_list_append (qtmux->output_buffers, buffer);
+ ret = GST_FLOW_OK;
+ } else {
+ ret = gst_qt_mux_send_buffer (qtmux, buffer, &qtmux->mdat_size, TRUE);
+ }
+ /* Check if it's time to re-write the headers in robust-recording mode */
+ if (ret == GST_FLOW_OK
+ && qtmux->mux_mode == GST_QT_MUX_MODE_ROBUST_RECORDING)
+ ret = gst_qt_mux_robust_recording_update (qtmux, pad->total_duration);
+ break;
+ case GST_QT_MUX_MODE_FRAGMENTED:
+ /* ensure that always sync samples are marked as such */
+ ret = gst_qt_mux_pad_fragment_add_buffer (qtmux, pad, buffer,
+ is_last_buffer, nsamples, last_dts, (gint32) scaled_duration,
+ sample_size, chunk_offset, !pad->sync || sync, pts_offset);
+ break;
+ }
+
+ return ret;
+ }
+
+ static void
+ gst_qt_mux_register_buffer_in_chunk (GstQTMux * qtmux, GstQTMuxPad * pad,
+ guint buffer_size, GstClockTime duration)
+ {
+ /* not that much happens here,
+ * but updating any of this very likely needs to happen all in sync,
+ * unless there is a very good reason not to */
+
+ /* for computing the avg bitrate */
+ pad->total_bytes += buffer_size;
+ pad->total_duration += duration;
+ /* for keeping track of where we are in chunk;
+ * ensures that data really is located as recorded in atoms */
+
+ qtmux->current_chunk_size += buffer_size;
+ qtmux->current_chunk_duration += duration;
+ }
+
+ static GstFlowReturn
+ gst_qt_mux_check_and_update_timecode (GstQTMux * qtmux, GstQTMuxPad * pad,
+ GstBuffer * buf, GstFlowReturn ret)
+ {
+ GstVideoTimeCodeMeta *tc_meta;
+ GstVideoTimeCode *tc;
+ GstBuffer *tc_buf;
+ gsize szret;
+ guint32 frames_since_daily_jam;
+ GstQTMuxClass *qtmux_klass = (GstQTMuxClass *) (G_OBJECT_GET_CLASS (qtmux));
+
+ if (!pad->trak->is_video)
+ return ret;
+
+ if (qtmux_klass->format != GST_QT_MUX_FORMAT_QT &&
+ !qtmux->force_create_timecode_trak)
+ return ret;
+
+ if (buf == NULL || (pad->tc_trak != NULL && pad->tc_pos == -1))
+ return ret;
+
+ tc_meta = gst_buffer_get_video_time_code_meta (buf);
+ if (!tc_meta)
+ return ret;
+
+ tc = &tc_meta->tc;
+
+ /* This means we never got a timecode before */
+ if (pad->first_tc == NULL) {
+ guint64 *offset;
+
+ #ifndef GST_DISABLE_GST_DEBUG
+ gchar *tc_str = gst_video_time_code_to_string (tc);
+ GST_DEBUG_OBJECT (qtmux, "Found first timecode %s", tc_str);
+ g_free (tc_str);
+ #endif
+ g_assert (pad->tc_trak == NULL);
+ pad->first_tc = gst_video_time_code_copy (tc);
+
+ if (qtmux->mux_mode == GST_QT_MUX_MODE_FRAGMENTED
+ && qtmux->fragment_sequence > 0) {
+ offset = &qtmux->header_size;
+ } else {
+ offset = &qtmux->mdat_size;
+ }
+ /* If frames are out of order, the frame we're currently getting might
+ * not be the first one. Just write a 0 timecode for now and wait
+ * until we receive a timecode that's lower than the current one */
+ if (pad->is_out_of_order) {
+ pad->first_pts = GST_BUFFER_PTS (buf);
+ frames_since_daily_jam = 0;
+ /* Position to rewrite */
+ pad->tc_pos = *offset;
+ } else {
+ frames_since_daily_jam =
+ gst_video_time_code_frames_since_daily_jam (pad->first_tc);
+ frames_since_daily_jam = GUINT32_TO_BE (frames_since_daily_jam);
+ }
+ /* Write the timecode trak now */
+ pad->tc_trak = atom_trak_new (qtmux->context);
+ atom_moov_add_trak (qtmux->moov, pad->tc_trak);
+
+ pad->trak->tref = atom_tref_new (FOURCC_tmcd);
+ atom_tref_add_entry (pad->trak->tref, pad->tc_trak->tkhd.track_ID);
+
+ atom_trak_set_timecode_type (pad->tc_trak, qtmux->context,
+ pad->trak->mdia.mdhd.time_info.timescale, pad->first_tc);
+
+ tc_buf = gst_buffer_new_allocate (NULL, 4, NULL);
+ szret = gst_buffer_fill (tc_buf, 0, &frames_since_daily_jam, 4);
+ g_assert (szret == 4);
+
+ atom_trak_add_samples (pad->tc_trak, 1, 1, 4, *offset, FALSE, 0);
+
+ if (qtmux->mux_mode == GST_QT_MUX_MODE_MOOV_AT_END
+ && !qtmux->downstream_seekable) {
+ ret = gst_qtmux_push_mdat_stored_buffers (qtmux);
+ qtmux->output_buffers = g_list_append (qtmux->output_buffers, tc_buf);
+ ret = GST_FLOW_OK;
+ } else {
+ ret = gst_qt_mux_send_buffer (qtmux, tc_buf, offset, TRUE);
+ }
+
+ /* Need to reset the current chunk (of the previous pad) here because
+ * some other data was written now above, and the pad has to start a
+ * new chunk now */
+ qtmux->current_chunk_offset = -1;
+ qtmux->current_chunk_size = 0;
+ qtmux->current_chunk_duration = 0;
+ } else if (qtmux->mux_mode == GST_QT_MUX_MODE_ROBUST_RECORDING_PREFILL) {
+ frames_since_daily_jam =
+ gst_video_time_code_frames_since_daily_jam (pad->first_tc);
+ frames_since_daily_jam = GUINT32_TO_BE (frames_since_daily_jam);
+
+ tc_buf = gst_buffer_new_allocate (NULL, 4, NULL);
+ szret = gst_buffer_fill (tc_buf, 0, &frames_since_daily_jam, 4);
+ g_assert (szret == 4);
+
+ if (qtmux->mux_mode == GST_QT_MUX_MODE_MOOV_AT_END
+ && !qtmux->downstream_seekable) {
+ ret = gst_qtmux_push_mdat_stored_buffers (qtmux);
+ qtmux->output_buffers = g_list_append (qtmux->output_buffers, tc_buf);
+ ret = GST_FLOW_OK;
+ } else {
+ ret = gst_qt_mux_send_buffer (qtmux, tc_buf, &qtmux->mdat_size, TRUE);
+ }
+ pad->tc_pos = -1;
+
+ qtmux->current_chunk_offset = -1;
+ qtmux->current_chunk_size = 0;
+ qtmux->current_chunk_duration = 0;
+ } else if (pad->is_out_of_order) {
+ /* Check for a lower timecode than the one stored */
+ g_assert (pad->tc_trak != NULL);
+ if (GST_BUFFER_DTS (buf) <= pad->first_pts) {
+ if (gst_video_time_code_compare (tc, pad->first_tc) == -1) {
+ gst_video_time_code_free (pad->first_tc);
+ pad->first_tc = gst_video_time_code_copy (tc);
+ }
+ } else {
+ guint64 bk_size = qtmux->mdat_size;
+ /* If this frame's DTS is after the first PTS received, it means
+ * we've already received the first frame to be presented. Otherwise
+ * the decoder would need to go back in time */
+ gst_qt_mux_update_timecode (qtmux, pad);
+
+ /* Reset writing position */
+ gst_qt_mux_seek_to (qtmux, bk_size);
+ }
+ }
+
+ return ret;
+ }
+
+ /*
+ * Here we push the buffer and update the tables in the track atoms
+ */
+ static GstFlowReturn
+ gst_qt_mux_add_buffer (GstQTMux * qtmux, GstQTMuxPad * pad, GstBuffer * buf)
+ {
+ GstBuffer *last_buf = NULL;
+ GstClockTime duration;
+ guint nsamples, sample_size;
+ guint64 chunk_offset;
+ gint64 last_dts, scaled_duration;
+ gint64 pts_offset = 0;
+ gboolean sync = FALSE;
+ GstFlowReturn ret = GST_FLOW_OK;
+ guint buffer_size;
+
+ /* GAP event, nothing to do */
+ if (buf && gst_buffer_get_size (buf) == 0 &&
+ GST_BUFFER_FLAG_IS_SET (buf, GST_BUFFER_FLAG_GAP)) {
+ gst_buffer_unref (buf);
+ return GST_FLOW_OK;
+ }
+
+ if (!pad->fourcc)
+ goto not_negotiated;
+
+ /* if this pad has a prepare function, call it */
+ if (pad->prepare_buf_func != NULL) {
+ GstBuffer *new_buf;
+
+ new_buf = pad->prepare_buf_func (pad, buf, qtmux);
+ if (buf && !new_buf)
+ return GST_FLOW_OK;
+ buf = new_buf;
+ }
+
+ ret = gst_qt_mux_check_and_update_timecode (qtmux, pad, buf, ret);
+ if (ret != GST_FLOW_OK) {
+ if (buf)
+ gst_buffer_unref (buf);
+ return ret;
+ }
+
+ last_buf = pad->last_buf;
+ pad->last_buf = buf;
+
+ if (last_buf == NULL) {
+ #ifndef GST_DISABLE_GST_DEBUG
+ if (buf == NULL) {
+ GST_DEBUG_OBJECT (qtmux, "Pad %s has no previous buffer stored and "
+ "received NULL buffer, doing nothing", GST_PAD_NAME (pad));
+ } else {
+ GST_LOG_OBJECT (qtmux,
+ "Pad %s has no previous buffer stored, storing now",
+ GST_PAD_NAME (pad));
+ }
+ #endif
+ goto exit;
+ }
+
+ if (!GST_BUFFER_PTS_IS_VALID (last_buf))
+ goto no_pts;
+
+ /* if this is the first buffer, store the timestamp */
+ if (G_UNLIKELY (pad->first_ts == GST_CLOCK_TIME_NONE)) {
+ if (GST_BUFFER_PTS_IS_VALID (last_buf)) {
+ pad->first_ts = GST_BUFFER_PTS (last_buf);
+ } else if (GST_BUFFER_DTS_IS_VALID (last_buf)) {
+ pad->first_ts = GST_BUFFER_DTS (last_buf);
+ }
+
+ if (GST_BUFFER_DTS_IS_VALID (last_buf)) {
+ pad->first_dts = pad->last_dts = GST_BUFFER_DTS (last_buf);
+ } else if (GST_BUFFER_PTS_IS_VALID (last_buf)) {
+ pad->first_dts = pad->last_dts = GST_BUFFER_PTS (last_buf);
+ }
+
+ if (GST_CLOCK_TIME_IS_VALID (pad->first_ts)) {
+ GST_DEBUG ("setting first_ts to %" G_GUINT64_FORMAT, pad->first_ts);
+ } else {
+ GST_WARNING_OBJECT (qtmux, "First buffer for pad %s has no timestamp, "
+ "using 0 as first timestamp", GST_PAD_NAME (pad));
+ pad->first_ts = pad->first_dts = 0;
+ }
+ GST_DEBUG_OBJECT (qtmux, "Stored first timestamp for pad %s %"
+ GST_TIME_FORMAT, GST_PAD_NAME (pad), GST_TIME_ARGS (pad->first_ts));
+ }
+
+ if (buf && GST_CLOCK_TIME_IS_VALID (GST_BUFFER_DTS (buf)) &&
+ GST_CLOCK_TIME_IS_VALID (GST_BUFFER_DTS (last_buf)) &&
+ GST_BUFFER_DTS (buf) < GST_BUFFER_DTS (last_buf)) {
+ GST_ERROR ("decreasing DTS value %" GST_TIME_FORMAT " < %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (GST_BUFFER_DTS (buf)),
+ GST_TIME_ARGS (GST_BUFFER_DTS (last_buf)));
+ pad->last_buf = buf = gst_buffer_make_writable (buf);
+ GST_BUFFER_DTS (buf) = GST_BUFFER_DTS (last_buf);
+ }
+
+ buffer_size = gst_buffer_get_size (last_buf);
+
+ if (qtmux->mux_mode == GST_QT_MUX_MODE_ROBUST_RECORDING_PREFILL) {
+ guint required_buffer_size = prefill_get_sample_size (qtmux, pad);
+ guint fill_size = required_buffer_size - buffer_size;
+ GstMemory *mem;
+ GstMapInfo map;
+
+ if (required_buffer_size < buffer_size) {
+ GST_ELEMENT_ERROR (qtmux, STREAM, MUX, (NULL),
+ ("Sample size %u bigger than expected maximum %u", buffer_size,
+ required_buffer_size));
+ goto bail;
+ }
+
+ if (fill_size > 0) {
+ GST_DEBUG_OBJECT (qtmux,
+ "Padding buffer by %u bytes to reach required %u bytes", fill_size,
+ required_buffer_size);
+ mem = gst_allocator_alloc (NULL, fill_size, NULL);
+ gst_memory_map (mem, &map, GST_MAP_WRITE);
+ memset (map.data, 0, map.size);
+ gst_memory_unmap (mem, &map);
+ last_buf = gst_buffer_make_writable (last_buf);
+ gst_buffer_append_memory (last_buf, mem);
+ buffer_size = required_buffer_size;
+ }
+ }
+
+ /* duration actually means time delta between samples, so we calculate
+ * the duration based on the difference in DTS or PTS, falling back
+ * to DURATION if the other two don't exist, such as with the last
+ * sample before EOS. Or use 0 if nothing else is available,
+ * making sure that duration doesn't go negative and wraparound. */
+ if (GST_BUFFER_DURATION_IS_VALID (last_buf))
+ duration = GST_BUFFER_DURATION (last_buf);
+ else
+ duration = 0;
+ if (!pad->sparse) {
+ if (buf && GST_BUFFER_DTS_IS_VALID (buf)
+ && GST_BUFFER_DTS_IS_VALID (last_buf)) {
+ if (GST_BUFFER_DTS (buf) >= GST_BUFFER_DTS (last_buf))
+ duration = GST_BUFFER_DTS (buf) - GST_BUFFER_DTS (last_buf);
+ } else if (buf && GST_BUFFER_PTS_IS_VALID (buf)
+ && GST_BUFFER_PTS_IS_VALID (last_buf)) {
+ if (GST_BUFFER_PTS (buf) >= GST_BUFFER_PTS (last_buf))
+ duration = GST_BUFFER_PTS (buf) - GST_BUFFER_PTS (last_buf);
+ }
+ if (duration == 0 && !pad->warned_empty_duration) {
+ GST_WARNING_OBJECT (qtmux,
+ "Sample with zero duration on pad %" GST_PTR_FORMAT
+ " due to missing or backward timestamps on the input stream", pad);
+ pad->warned_empty_duration = TRUE;
+ }
+ }
+
+ if (qtmux->current_pad != pad || qtmux->current_chunk_offset == -1) {
+ GST_DEBUG_OBJECT (qtmux,
+ "Switching to next chunk for pad %s:%s: offset %" G_GUINT64_FORMAT
+ ", size %" G_GUINT64_FORMAT ", duration %" GST_TIME_FORMAT,
+ GST_DEBUG_PAD_NAME (pad), qtmux->current_chunk_offset,
+ qtmux->current_chunk_size,
+ GST_TIME_ARGS (qtmux->current_chunk_duration));
+ qtmux->current_pad = pad;
+ if (qtmux->current_chunk_offset == -1)
+ qtmux->current_chunk_offset = qtmux->mdat_size;
+ else
+ qtmux->current_chunk_offset += qtmux->current_chunk_size;
+ qtmux->current_chunk_size = 0;
+ qtmux->current_chunk_duration = 0;
+ }
+
+ last_dts = gst_util_uint64_scale_round (pad->last_dts,
+ atom_trak_get_timescale (pad->trak), GST_SECOND);
+
+ /* fragments only deal with 1 buffer == 1 chunk (== 1 sample) */
+ if (pad->sample_size && (qtmux->mux_mode != GST_QT_MUX_MODE_FRAGMENTED
+ || qtmux->fragment_mode ==
+ GST_QT_MUX_FRAGMENT_FIRST_MOOV_THEN_FINALISE)) {
+ GstClockTime expected_timestamp;
+
+ /* Constant size packets: usually raw audio (with many samples per
+ buffer (= chunk)), but can also be fixed-packet-size codecs like ADPCM
+ */
+ sample_size = pad->sample_size;
+ if (buffer_size % sample_size != 0)
+ goto fragmented_sample;
+
+ /* note: qt raw audio storage warps it implicitly into a timewise
+ * perfect stream, discarding buffer times.
+ * If the difference between the current PTS and the expected one
+ * becomes too big, we error out: there was a gap and we have no way to
+ * represent that, causing A/V sync to be off */
+ expected_timestamp =
+ gst_util_uint64_scale (pad->sample_offset, GST_SECOND,
+ atom_trak_get_timescale (pad->trak)) + pad->first_ts;
+ if (ABSDIFF (GST_BUFFER_DTS_OR_PTS (last_buf),
+ expected_timestamp) > qtmux->max_raw_audio_drift)
+ goto raw_audio_timestamp_drift;
+
+ if (GST_BUFFER_DURATION (last_buf) != GST_CLOCK_TIME_NONE) {
+ nsamples = gst_util_uint64_scale_round (GST_BUFFER_DURATION (last_buf),
+ atom_trak_get_timescale (pad->trak), GST_SECOND);
+ duration = GST_BUFFER_DURATION (last_buf);
+ } else {
+ nsamples = buffer_size / sample_size;
+ duration =
+ gst_util_uint64_scale_round (nsamples, GST_SECOND,
+ atom_trak_get_timescale (pad->trak));
+ }
+
+ /* timescale = samplerate */
+ scaled_duration = 1;
+ pad->last_dts =
+ pad->first_dts + gst_util_uint64_scale_round (pad->sample_offset +
+ nsamples, GST_SECOND, atom_trak_get_timescale (pad->trak));
+ } else {
+ nsamples = 1;
+ sample_size = buffer_size;
+ if (!pad->sparse && ((buf && GST_BUFFER_DTS_IS_VALID (buf))
+ || GST_BUFFER_DTS_IS_VALID (last_buf))) {
+ gint64 scaled_dts;
+ if (buf && GST_BUFFER_DTS_IS_VALID (buf)) {
+ pad->last_dts = GST_BUFFER_DTS (buf);
+ } else {
+ pad->last_dts = GST_BUFFER_DTS (last_buf) + duration;
+ }
+ if ((gint64) (pad->last_dts) < 0) {
+ scaled_dts = -gst_util_uint64_scale_round (-pad->last_dts,
+ atom_trak_get_timescale (pad->trak), GST_SECOND);
+ } else {
+ scaled_dts = gst_util_uint64_scale_round (pad->last_dts,
+ atom_trak_get_timescale (pad->trak), GST_SECOND);
+ }
+ scaled_duration = scaled_dts - last_dts;
+ last_dts = scaled_dts;
+ } else {
+ /* first convert intended timestamp (in GstClockTime resolution) to
+ * trak timescale, then derive delta;
+ * this ensures sums of (scale)delta add up to converted timestamp,
+ * which only deviates at most 1/scale from timestamp itself */
+ scaled_duration = gst_util_uint64_scale_round (pad->last_dts + duration,
+ atom_trak_get_timescale (pad->trak), GST_SECOND) - last_dts;
+ pad->last_dts += duration;
+ }
+ }
+
+ gst_qt_mux_register_buffer_in_chunk (qtmux, pad, buffer_size, duration);
+
+ chunk_offset = qtmux->current_chunk_offset;
+
+ GST_LOG_OBJECT (qtmux,
+ "Pad (%s) dts updated to %" GST_TIME_FORMAT,
+ GST_PAD_NAME (pad), GST_TIME_ARGS (pad->last_dts));
+ GST_LOG_OBJECT (qtmux,
+ "Adding %d samples to track, duration: %" G_GUINT64_FORMAT
+ " size: %" G_GUINT32_FORMAT " chunk offset: %" G_GUINT64_FORMAT,
+ nsamples, scaled_duration, sample_size, chunk_offset);
+
+ /* might be a sync sample */
+ if (pad->sync &&
+ !GST_BUFFER_FLAG_IS_SET (last_buf, GST_BUFFER_FLAG_DELTA_UNIT)) {
+ GST_LOG_OBJECT (qtmux, "Adding new sync sample entry for track of pad %s",
+ GST_PAD_NAME (pad));
+ sync = TRUE;
+ }
+
+ if (GST_BUFFER_DTS_IS_VALID (last_buf)) {
+ last_dts = gst_util_uint64_scale_round (GST_BUFFER_DTS (last_buf),
+ atom_trak_get_timescale (pad->trak), GST_SECOND);
+ pts_offset =
+ (gint64) (gst_util_uint64_scale_round (GST_BUFFER_PTS (last_buf),
+ atom_trak_get_timescale (pad->trak), GST_SECOND) - last_dts);
+ } else {
+ pts_offset = 0;
+ last_dts = gst_util_uint64_scale_round (GST_BUFFER_PTS (last_buf),
+ atom_trak_get_timescale (pad->trak), GST_SECOND);
+ }
+ GST_DEBUG ("dts: %" GST_TIME_FORMAT " pts: %" GST_TIME_FORMAT
+ " timebase_dts: %d pts_offset: %d",
+ GST_TIME_ARGS (GST_BUFFER_DTS (last_buf)),
+ GST_TIME_ARGS (GST_BUFFER_PTS (last_buf)),
+ (int) (last_dts), (int) (pts_offset));
+
+ if (GST_CLOCK_TIME_IS_VALID (duration)
+ && (qtmux->current_chunk_duration > qtmux->longest_chunk
+ || !GST_CLOCK_TIME_IS_VALID (qtmux->longest_chunk))) {
+ GST_DEBUG_OBJECT (qtmux,
+ "New longest chunk found: %" GST_TIME_FORMAT ", pad %s",
+ GST_TIME_ARGS (qtmux->current_chunk_duration), GST_PAD_NAME (pad));
+ qtmux->longest_chunk = qtmux->current_chunk_duration;
+ }
+
+ if (qtmux->mux_mode == GST_QT_MUX_MODE_ROBUST_RECORDING_PREFILL) {
+ const TrakBufferEntryInfo *sample_entry;
+ guint64 block_idx = prefill_get_block_index (qtmux, pad);
+
+ if (block_idx >= pad->samples->len) {
+ GST_ELEMENT_ERROR (qtmux, STREAM, MUX, (NULL),
+ ("Unexpected sample %" G_GUINT64_FORMAT ", expected up to %u",
+ block_idx, pad->samples->len));
+ goto bail;
+ }
+
+ /* Check if all values are as expected */
+ sample_entry =
+ &g_array_index (pad->samples, TrakBufferEntryInfo, block_idx);
+
+ if (chunk_offset < sample_entry->chunk_offset) {
+ guint fill_size = sample_entry->chunk_offset - chunk_offset;
+ GstBuffer *fill_buf;
+
+ fill_buf = gst_buffer_new_allocate (NULL, fill_size, NULL);
+ gst_buffer_memset (fill_buf, 0, 0, fill_size);
+
+ ret = gst_qt_mux_send_buffer (qtmux, fill_buf, &qtmux->mdat_size, TRUE);
+ if (ret != GST_FLOW_OK)
+ goto bail;
+ qtmux->current_chunk_offset = chunk_offset = sample_entry->chunk_offset;
+ qtmux->current_chunk_size = buffer_size;
+ qtmux->current_chunk_duration = duration;
+ } else if (chunk_offset != sample_entry->chunk_offset) {
+ GST_ELEMENT_ERROR (qtmux, STREAM, MUX, (NULL),
+ ("Unexpected chunk offset %" G_GUINT64_FORMAT ", expected up to %"
+ G_GUINT64_FORMAT, chunk_offset, sample_entry->chunk_offset));
+ goto bail;
+ }
+ }
+
+ /* now we go and register this buffer/sample all over */
+ pad->flow_status = gst_qt_mux_register_and_push_sample (qtmux, pad, last_buf,
+ buf == NULL, nsamples, last_dts, scaled_duration, sample_size,
+ chunk_offset, sync, TRUE, pts_offset);
+ if (pad->flow_status != GST_FLOW_OK)
+ goto sample_error;
+
+ pad->sample_offset += nsamples;
+
+ /* if this is sparse and we have a next buffer, check if there is any gap
+ * between them to insert an empty sample */
+ if (pad->sparse && buf) {
+ if (pad->create_empty_buffer) {
+ GstBuffer *empty_buf;
+ gint64 empty_duration =
+ GST_BUFFER_PTS (buf) - (GST_BUFFER_PTS (last_buf) + duration);
+ gint64 empty_duration_scaled;
+ guint empty_size;
+
+ empty_buf = pad->create_empty_buffer (pad, empty_duration);
+
+ pad->last_dts = GST_BUFFER_PTS (buf);
+ empty_duration_scaled = gst_util_uint64_scale_round (pad->last_dts,
+ atom_trak_get_timescale (pad->trak), GST_SECOND)
+ - (last_dts + scaled_duration);
+ empty_size = gst_buffer_get_size (empty_buf);
+
+ gst_qt_mux_register_buffer_in_chunk (qtmux, pad, empty_size,
+ empty_duration);
+
+ ret =
+ gst_qt_mux_register_and_push_sample (qtmux, pad, empty_buf, FALSE, 1,
+ last_dts + scaled_duration, empty_duration_scaled,
+ empty_size, chunk_offset, sync, TRUE, 0);
+ } else if (pad->fourcc != FOURCC_c608 && pad->fourcc != FOURCC_c708) {
+ /* This assert is kept here to make sure implementors of new
+ * sparse input format decide whether there needs to be special
+ * gap handling or not */
+ g_assert_not_reached ();
+ GST_WARNING_OBJECT (qtmux,
+ "no empty buffer creation function found for pad %s",
+ GST_PAD_NAME (pad));
+ }
+ }
+
++#ifdef TIZEN_FEATURE_GST_MUX_ENHANCEMENT
++ gst_qt_mux_update_expected_trailer_size(qtmux, pad);
++#endif /* TIZEN_FEATURE_GST_MUX_ENHANCEMENT */
++
+ exit:
+
+ return ret;
+
+ /* ERRORS */
+ bail:
+ {
+ gst_buffer_unref (last_buf);
+ return GST_FLOW_ERROR;
+ }
+ fragmented_sample:
+ {
+ GST_ELEMENT_ERROR (qtmux, STREAM, MUX, (NULL),
+ ("Audio buffer contains fragmented sample."));
+ goto bail;
+ }
+ raw_audio_timestamp_drift:
+ {
+ /* TODO: Could in theory be implemented with edit lists */
+ GST_ELEMENT_ERROR (qtmux, STREAM, MUX, (NULL),
+ ("Audio stream timestamps are drifting (got %" GST_TIME_FORMAT
+ ", expected %" GST_TIME_FORMAT "). This is not supported yet!",
+ GST_TIME_ARGS (GST_BUFFER_DTS_OR_PTS (last_buf)),
+ GST_TIME_ARGS (gst_util_uint64_scale (pad->sample_offset,
+ GST_SECOND,
+ atom_trak_get_timescale (pad->trak)) + pad->first_ts)));
+ goto bail;
+ }
+ no_pts:
+ {
+ GST_ELEMENT_ERROR (qtmux, STREAM, MUX, (NULL), ("Buffer has no PTS."));
+ goto bail;
+ }
+ not_negotiated:
+ {
+ GST_ELEMENT_ERROR (qtmux, CORE, NEGOTIATION, (NULL),
+ ("format wasn't negotiated before buffer flow on pad %s",
+ GST_PAD_NAME (pad)));
+ if (buf)
+ gst_buffer_unref (buf);
+ return GST_FLOW_NOT_NEGOTIATED;
+ }
+ sample_error:
+ {
+ GST_ELEMENT_ERROR (qtmux, STREAM, MUX, (NULL), ("Failed to push sample."));
+ return pad->flow_status;
+ }
+ }
+
+ /*
+ * DTS running time can be negative. There is no way to represent that in
+ * MP4 however, thus we need to offset DTS so that it starts from 0.
+ */
+ static void
+ gst_qt_pad_adjust_buffer_dts (GstQTMux * qtmux, GstQTMuxPad * pad,
+ GstBuffer ** buf)
+ {
+ GstClockTime pts;
+ gint64 dts;
+
+ pts = GST_BUFFER_PTS (*buf);
+ dts = pad->dts;
+
+ GST_LOG_OBJECT (qtmux, "selected pad %s with PTS %" GST_TIME_FORMAT
+ " and DTS %" GST_STIME_FORMAT, GST_PAD_NAME (pad),
+ GST_TIME_ARGS (pts), GST_STIME_ARGS (dts));
+
+ if (!GST_CLOCK_TIME_IS_VALID (pad->dts_adjustment)) {
+ if (GST_CLOCK_STIME_IS_VALID (dts) && dts < 0)
+ pad->dts_adjustment = -dts;
+ else
+ pad->dts_adjustment = 0;
+ }
+
+ if (pad->dts_adjustment > 0) {
+ *buf = gst_buffer_make_writable (*buf);
+
+ dts += pad->dts_adjustment;
+
+ if (GST_CLOCK_TIME_IS_VALID (pts))
+ pts += pad->dts_adjustment;
+
+ if (GST_CLOCK_STIME_IS_VALID (dts) && dts < 0) {
+ GST_WARNING_OBJECT (pad, "Decreasing DTS.");
+ dts = 0;
+ }
+
+ if (pts < dts) {
+ GST_WARNING_OBJECT (pad, "DTS is bigger then PTS");
+ pts = dts;
+ }
+
+ GST_BUFFER_PTS (*buf) = pts;
+ GST_BUFFER_DTS (*buf) = dts;
+
+ GST_LOG_OBJECT (qtmux, "time adjusted to PTS %" GST_TIME_FORMAT
+ " and DTS %" GST_TIME_FORMAT, GST_TIME_ARGS (pts), GST_TIME_ARGS (dts));
+ }
+ }
+
+ static GstQTMuxPad *
+ find_best_pad (GstQTMux * qtmux)
+ {
+ GList *l;
+ GstQTMuxPad *best_pad = NULL;
+
+ if (qtmux->mux_mode == GST_QT_MUX_MODE_ROBUST_RECORDING_PREFILL) {
+ guint64 smallest_offset = G_MAXUINT64;
+ guint64 chunk_offset = 0;
+
+ GST_OBJECT_LOCK (qtmux);
+ for (l = GST_ELEMENT_CAST (qtmux)->sinkpads; l; l = l->next) {
+ GstQTMuxPad *qtpad = (GstQTMuxPad *) l->data;
+ const TrakBufferEntryInfo *sample_entry;
+ guint64 block_idx, current_block_idx;
+ guint64 chunk_offset_offset = 0;
+ GstBuffer *tmp_buf =
+ gst_aggregator_pad_peek_buffer (GST_AGGREGATOR_PAD (qtpad));
+
+ /* Check for EOS pads and just skip them */
+ if (!tmp_buf && !qtpad->last_buf && (!qtpad->raw_audio_adapter
+ || gst_adapter_available (qtpad->raw_audio_adapter) == 0))
+ continue;
+ if (tmp_buf)
+ gst_buffer_unref (tmp_buf);
+
+ /* Find the exact offset where the next sample of this track is supposed
+ * to be written at */
+ block_idx = current_block_idx = prefill_get_block_index (qtmux, qtpad);
+ if (!qtpad->samples || block_idx >= qtpad->samples->len) {
+ GST_ELEMENT_ERROR (qtmux, RESOURCE, SETTINGS,
+ ("Failed to create samples in prefill mode"), (NULL));
+ return NULL;
+ }
+
+ sample_entry =
+ &g_array_index (qtpad->samples, TrakBufferEntryInfo, block_idx);
+ while (block_idx > 0) {
+ const TrakBufferEntryInfo *tmp =
+ &g_array_index (qtpad->samples, TrakBufferEntryInfo, block_idx - 1);
+
+ if (tmp->chunk_offset != sample_entry->chunk_offset)
+ break;
+ chunk_offset_offset += tmp->size * tmp->nsamples;
+ block_idx--;
+ }
+
+ /* Except for the previously selected pad being EOS we always have
+ * qtmux->current_chunk_offset + qtmux->current_chunk_size
+ * ==
+ * sample_entry->chunk_offset + chunk_offset_offset
+ * for the best pad. Instead of checking that, we just return the
+ * pad that has the smallest offset for the next to-be-written sample.
+ */
+ if (sample_entry->chunk_offset + chunk_offset_offset < smallest_offset) {
+ smallest_offset = sample_entry->chunk_offset + chunk_offset_offset;
+ best_pad = qtpad;
+ chunk_offset = sample_entry->chunk_offset;
+ }
+ }
+ GST_OBJECT_UNLOCK (qtmux);
+
+ if (chunk_offset != qtmux->current_chunk_offset) {
+ qtmux->current_pad = NULL;
+ }
+
+ return best_pad;
+ }
+
+ if (qtmux->current_pad && (qtmux->interleave_bytes != 0
+ || qtmux->interleave_time != 0) && (qtmux->interleave_bytes == 0
+ || qtmux->current_chunk_size <= qtmux->interleave_bytes)
+ && (qtmux->interleave_time == 0
+ || qtmux->current_chunk_duration <= qtmux->interleave_time)
+ && qtmux->mux_mode != GST_QT_MUX_MODE_FRAGMENTED) {
+ GstBuffer *tmp_buf =
+ gst_aggregator_pad_peek_buffer (GST_AGGREGATOR_PAD
+ (qtmux->current_pad));
+
+ if (tmp_buf || qtmux->current_pad->last_buf) {
+ best_pad = qtmux->current_pad;
+ if (tmp_buf)
+ gst_buffer_unref (tmp_buf);
+ GST_DEBUG_OBJECT (qtmux, "Reusing pad %s:%s",
+ GST_DEBUG_PAD_NAME (best_pad));
+ }
+ } else {
+ gboolean push_stored = FALSE;
+
+ GST_OBJECT_LOCK (qtmux);
+ if ((GST_ELEMENT (qtmux)->sinkpads && GST_ELEMENT (qtmux)->sinkpads->next)
+ || qtmux->force_chunks) {
+ /* Only switch pads if we have more than one, otherwise
+ * we can just put everything into a single chunk and save
+ * a few bytes of offsets.
+ *
+ * Various applications and the Apple ProRes spec require chunking even
+ * in case of single stream files.
+ */
+ if (qtmux->current_pad)
+ GST_DEBUG_OBJECT (qtmux, "Switching from pad %s:%s",
+ GST_DEBUG_PAD_NAME (qtmux->current_pad));
+ best_pad = qtmux->current_pad = NULL;
+ push_stored = TRUE;
+ }
+ GST_OBJECT_UNLOCK (qtmux);
+
+ if (push_stored)
+ gst_qtmux_push_mdat_stored_buffers (qtmux);
+ }
+
+ if (!best_pad) {
+ GstClockTime best_time = GST_CLOCK_TIME_NONE;
+
+ GST_OBJECT_LOCK (qtmux);
+ for (l = GST_ELEMENT_CAST (qtmux)->sinkpads; l; l = l->next) {
+ GstQTMuxPad *qtpad = (GstQTMuxPad *) l->data;
+ GstBuffer *tmp_buf;
+ GstClockTime timestamp;
+
+ tmp_buf = gst_aggregator_pad_peek_buffer (GST_AGGREGATOR_PAD (qtpad));;
+ if (!tmp_buf) {
+ /* This one is newly EOS now, finish it for real */
+ if (qtpad->last_buf) {
+ timestamp = GST_BUFFER_DTS_OR_PTS (qtpad->last_buf);
+ } else {
+ continue;
+ }
+ } else {
+ if (qtpad->last_buf)
+ timestamp = GST_BUFFER_DTS_OR_PTS (qtpad->last_buf);
+ else
+ timestamp = GST_BUFFER_DTS_OR_PTS (tmp_buf);
+ }
+
+ if (best_pad == NULL ||
+ !GST_CLOCK_TIME_IS_VALID (best_time) || timestamp < best_time) {
+ best_pad = qtpad;
+ best_time = timestamp;
+ }
+
+ if (tmp_buf)
+ gst_buffer_unref (tmp_buf);
+ }
+ GST_OBJECT_UNLOCK (qtmux);
+
+ if (best_pad) {
+ GST_DEBUG_OBJECT (qtmux, "Choosing pad %s:%s",
+ GST_DEBUG_PAD_NAME (best_pad));
+ } else {
+ GST_DEBUG_OBJECT (qtmux, "No best pad: EOS");
+ }
+ }
+
+ return best_pad;
+ }
+
+ static gboolean
+ gst_qt_mux_are_all_pads_eos (GstQTMux * mux)
+ {
+ GList *l;
+ gboolean ret = TRUE;
+
+ GST_OBJECT_LOCK (mux);
+ for (l = GST_ELEMENT_CAST (mux)->sinkpads; l; l = l->next) {
+ if (!gst_aggregator_pad_is_eos (GST_AGGREGATOR_PAD (l->data))) {
+ ret = FALSE;
+ break;
+ }
+ }
+ GST_OBJECT_UNLOCK (mux);
+
+ return ret;
+ }
+
+ static GstFlowReturn
+ gst_qt_mux_aggregate (GstAggregator * agg, gboolean timeout)
+ {
+ GstFlowReturn ret = GST_FLOW_OK;
+ GstQTMux *qtmux = GST_QT_MUX_CAST (agg);
+ GstQTMuxPad *best_pad = NULL;
+
+ if (G_UNLIKELY (qtmux->state == GST_QT_MUX_STATE_STARTED)) {
+ if ((ret = gst_qt_mux_start_file (qtmux)) != GST_FLOW_OK)
+ return ret;
+
+ qtmux->state = GST_QT_MUX_STATE_DATA;
+ }
+
+ if (G_UNLIKELY (qtmux->state == GST_QT_MUX_STATE_EOS))
+ return GST_FLOW_EOS;
+
+ best_pad = find_best_pad (qtmux);
+
+ /* clipping already converted to running time */
+ if (best_pad != NULL) {
+ GstBuffer *buf = NULL;
+
+ /* FIXME: the function should always return flow_status information, that
+ * is supposed to be stored each time buffers (collected from the pads)
+ * are pushed. */
+ if (best_pad->flow_status != GST_FLOW_OK)
+ return best_pad->flow_status;
+
+ if (qtmux->mux_mode != GST_QT_MUX_MODE_ROBUST_RECORDING_PREFILL ||
+ best_pad->raw_audio_adapter == NULL ||
+ best_pad->raw_audio_adapter_pts == GST_CLOCK_TIME_NONE)
+ buf = gst_aggregator_pad_pop_buffer (GST_AGGREGATOR_PAD (best_pad));
+
+ g_assert (buf || best_pad->last_buf || (best_pad->raw_audio_adapter
+ && gst_adapter_available (best_pad->raw_audio_adapter) > 0));
+
+ if (buf)
+ gst_qt_pad_adjust_buffer_dts (qtmux, best_pad, &buf);
+
+ ret = gst_qt_mux_add_buffer (qtmux, best_pad, buf);
+ } else if (gst_qt_mux_are_all_pads_eos (qtmux)) {
+
+ qtmux->state = GST_QT_MUX_STATE_EOS;
+ ret = gst_qt_mux_stop_file (qtmux);
+ if (ret == GST_FLOW_OK) {
+ GST_DEBUG_OBJECT (qtmux, "We are eos");
+ ret = GST_FLOW_EOS;
+ } else {
+ GST_WARNING_OBJECT (qtmux, "Failed to stop file: %s",
+ gst_flow_get_name (ret));
+ }
+ }
+
+ return ret;
+ }
+
+ static gboolean
+ check_field (GQuark field_id, const GValue * value, gpointer user_data)
+ {
+ GstStructure *structure = (GstStructure *) user_data;
+ const GValue *other = gst_structure_id_get_value (structure, field_id);
+ const gchar *name = gst_structure_get_name (structure);
+
+ if (g_str_has_prefix (name, "video/")) {
+ /* ignore framerate with video caps */
+ if (g_strcmp0 (g_quark_to_string (field_id), "framerate") == 0)
+ return TRUE;
+ }
+
+ if (g_strcmp0 (name, "video/x-h264") == 0 ||
+ g_strcmp0 (name, "video/x-h265") == 0) {
+ /* We support muxing multiple codec_data structures, and the new SPS
+ * will contain updated tier / level / profiles, which means we do
+ * not need to fail renegotiation when those change.
+ */
+ if (g_strcmp0 (g_quark_to_string (field_id), "codec_data") == 0) {
+ return TRUE;
+ } else if (g_strcmp0 (g_quark_to_string (field_id), "tier") == 0) {
+ return TRUE;
+ } else if (g_strcmp0 (g_quark_to_string (field_id), "level") == 0) {
+ return TRUE;
+ } else if (g_strcmp0 (g_quark_to_string (field_id), "profile") == 0) {
+ return TRUE;
+ } else if (g_strcmp0 (g_quark_to_string (field_id), "chroma-format") == 0) {
+ return TRUE;
+ } else if (g_strcmp0 (g_quark_to_string (field_id), "bit-depth-luma") == 0) {
+ return TRUE;
+ } else if (g_strcmp0 (g_quark_to_string (field_id),
+ "bit-depth-chroma") == 0) {
+ return TRUE;
+ } else if (g_strcmp0 (g_quark_to_string (field_id), "colorimetry") == 0) {
+ return TRUE;
+ } else if (g_strcmp0 (g_quark_to_string (field_id), "width") == 0) {
+ /* TODO: this may require a separate track but gst, vlc, ffmpeg and
+ * browsers work with this so... */
+ return TRUE;
+ } else if (g_strcmp0 (g_quark_to_string (field_id), "height") == 0) {
+ return TRUE;
+ }
+ }
+
+ if (other == NULL)
+ return FALSE;
+ return gst_value_compare (value, other) == GST_VALUE_EQUAL;
+ }
+
+ static gboolean
+ gst_qtmux_caps_is_subset_full (GstQTMux * qtmux, GstCaps * subset,
+ GstCaps * superset)
+ {
+ GstStructure *sub_s = gst_caps_get_structure (subset, 0);
+ GstStructure *sup_s = gst_caps_get_structure (superset, 0);
+
+ if (!gst_structure_has_name (sup_s, gst_structure_get_name (sub_s)))
+ return FALSE;
+
+ return gst_structure_foreach (sub_s, check_field, sup_s);
+ }
+
+ /* will unref @qtmux */
+ static gboolean
+ gst_qt_mux_can_renegotiate (GstQTMux * qtmux, GstPad * pad, GstCaps * caps)
+ {
+ GstQTMuxPad *qtmuxpad = GST_QT_MUX_PAD_CAST (pad);
+
+ /* does not go well to renegotiate stream mid-way, unless
+ * the old caps are a subset of the new one (this means upstream
+ * added more info to the caps, as both should be 'fixed' caps) */
+
+ if (!qtmuxpad->configured_caps) {
+ GST_DEBUG_OBJECT (qtmux, "pad %s accepted caps %" GST_PTR_FORMAT,
+ GST_PAD_NAME (pad), caps);
+ return TRUE;
+ }
+
+ g_assert (caps != NULL);
+
+ if (!gst_qtmux_caps_is_subset_full (qtmux, qtmuxpad->configured_caps, caps)) {
+ GST_WARNING_OBJECT (qtmux,
+ "pad %s refused renegotiation to %" GST_PTR_FORMAT " from %"
+ GST_PTR_FORMAT, GST_PAD_NAME (pad), caps, qtmuxpad->configured_caps);
+ return FALSE;
+ }
+
+ GST_DEBUG_OBJECT (qtmux,
+ "pad %s accepted renegotiation to %" GST_PTR_FORMAT " from %"
+ GST_PTR_FORMAT, GST_PAD_NAME (pad), caps, qtmuxpad->configured_caps);
+
+ return TRUE;
+ }
+
+ static gboolean
+ gst_qt_mux_audio_sink_set_caps (GstQTMuxPad * qtpad, GstCaps * caps)
+ {
+ GstPad *pad = GST_PAD (qtpad);
+ GstQTMux *qtmux = GST_QT_MUX_CAST (gst_pad_get_parent (pad));
+ GstQTMuxClass *qtmux_klass = (GstQTMuxClass *) (G_OBJECT_GET_CLASS (qtmux));
+ GstStructure *structure;
+ const gchar *mimetype;
+ gint rate, channels;
+ const GValue *value = NULL;
+ const GstBuffer *codec_data = NULL;
+ GstQTMuxFormat format;
+ AudioSampleEntry entry = { 0, };
+ AtomInfo *ext_atom = NULL;
+ gint constant_size = 0;
+ const gchar *stream_format;
+ guint32 timescale;
+
+ GST_DEBUG_OBJECT (qtmux, "%s:%s, caps=%" GST_PTR_FORMAT,
+ GST_DEBUG_PAD_NAME (pad), caps);
+
+ qtpad->prepare_buf_func = NULL;
+
+ format = qtmux_klass->format;
+ structure = gst_caps_get_structure (caps, 0);
+ mimetype = gst_structure_get_name (structure);
+
+ /* common info */
+ if (!gst_structure_get_int (structure, "channels", &channels) ||
+ !gst_structure_get_int (structure, "rate", &rate)) {
+ goto refuse_caps;
+ }
+
+ /* optional */
+ value = gst_structure_get_value (structure, "codec_data");
+ if (value != NULL)
+ codec_data = gst_value_get_buffer (value);
+
+ qtpad->is_out_of_order = FALSE;
+
+ /* set common properties */
+ entry.sample_rate = rate;
+ entry.channels = channels;
+ /* default */
+ entry.sample_size = 16;
+ /* this is the typical compressed case */
+ if (format == GST_QT_MUX_FORMAT_QT) {
+ entry.version = 1;
+ entry.compression_id = -2;
+ }
+
+ /* now map onto a fourcc, and some extra properties */
+ if (strcmp (mimetype, "audio/mpeg") == 0) {
+ gint mpegversion = 0, mpegaudioversion = 0;
+ gint layer = -1;
+
+ gst_structure_get_int (structure, "mpegversion", &mpegversion);
+ switch (mpegversion) {
+ case 1:
+ gst_structure_get_int (structure, "layer", &layer);
+ gst_structure_get_int (structure, "mpegaudioversion",
+ &mpegaudioversion);
+
+ /* mp1/2/3 */
+ /* note: QuickTime player does not like mp3 either way in iso/mp4 */
+ if (format == GST_QT_MUX_FORMAT_QT)
+ entry.fourcc = FOURCC__mp3;
+ else {
+ entry.fourcc = FOURCC_mp4a;
+ ext_atom =
+ build_esds_extension (qtpad->trak, ESDS_OBJECT_TYPE_MPEG1_P3,
+ ESDS_STREAM_TYPE_AUDIO, codec_data, qtpad->avg_bitrate,
+ qtpad->max_bitrate);
+ }
+ if (layer == 1) {
+ g_warn_if_fail (format == GST_QT_MUX_FORMAT_MP4
+ || format == GST_QT_MUX_FORMAT_QT);
+ entry.samples_per_packet = 384;
+ } else if (layer == 2) {
+ g_warn_if_fail (format == GST_QT_MUX_FORMAT_MP4
+ || format == GST_QT_MUX_FORMAT_QT);
+ entry.samples_per_packet = 1152;
+ } else {
+ g_warn_if_fail (layer == 3);
+ entry.samples_per_packet = (mpegaudioversion <= 1) ? 1152 : 576;
+ }
+ entry.bytes_per_sample = 2;
+ break;
+ case 4:
+
+ /* check stream-format */
+ stream_format = gst_structure_get_string (structure, "stream-format");
+ if (stream_format) {
+ if (strcmp (stream_format, "raw") != 0) {
+ GST_WARNING_OBJECT (qtmux, "Unsupported AAC stream-format %s, "
+ "please use 'raw'", stream_format);
+ goto refuse_caps;
+ }
+ } else {
+ GST_WARNING_OBJECT (qtmux, "No stream-format present in caps, "
+ "assuming 'raw'");
+ }
+
+ if (!codec_data || gst_buffer_get_size ((GstBuffer *) codec_data) < 2) {
+ GST_WARNING_OBJECT (qtmux, "no (valid) codec_data for AAC audio");
+ goto refuse_caps;
+ } else {
+ guint8 profile;
+
+ gst_buffer_extract ((GstBuffer *) codec_data, 0, &profile, 1);
+ /* warn if not Low Complexity profile */
+ profile >>= 3;
+ if (profile != 2)
+ GST_WARNING_OBJECT (qtmux,
+ "non-LC AAC may not run well on (Apple) QuickTime/iTunes");
+ }
+
+ /* AAC */
+ entry.fourcc = FOURCC_mp4a;
+
+ if (format == GST_QT_MUX_FORMAT_QT)
+ ext_atom = build_mov_aac_extension (qtpad->trak, codec_data,
+ qtpad->avg_bitrate, qtpad->max_bitrate);
+ else
+ ext_atom =
+ build_esds_extension (qtpad->trak, ESDS_OBJECT_TYPE_MPEG4_P3,
+ ESDS_STREAM_TYPE_AUDIO, codec_data, qtpad->avg_bitrate,
+ qtpad->max_bitrate);
+ break;
+ default:
+ break;
+ }
+ } else if (strcmp (mimetype, "audio/AMR") == 0) {
+ entry.fourcc = FOURCC_samr;
+ entry.sample_size = 16;
+ entry.samples_per_packet = 160;
+ entry.bytes_per_sample = 2;
+ ext_atom = build_amr_extension ();
+ } else if (strcmp (mimetype, "audio/AMR-WB") == 0) {
+ entry.fourcc = FOURCC_sawb;
+ entry.sample_size = 16;
+ entry.samples_per_packet = 320;
+ entry.bytes_per_sample = 2;
+ ext_atom = build_amr_extension ();
+ } else if (strcmp (mimetype, "audio/x-raw") == 0) {
+ GstAudioInfo info;
+
+ gst_audio_info_init (&info);
+ if (!gst_audio_info_from_caps (&info, caps))
+ goto refuse_caps;
+
+ /* spec has no place for a distinction in these */
+ if (info.finfo->width != info.finfo->depth) {
+ GST_DEBUG_OBJECT (qtmux, "width must be same as depth!");
+ goto refuse_caps;
+ }
+
+ if ((info.finfo->flags & GST_AUDIO_FORMAT_FLAG_SIGNED)) {
+ if (info.finfo->endianness == G_LITTLE_ENDIAN)
+ entry.fourcc = FOURCC_sowt;
+ else if (info.finfo->endianness == G_BIG_ENDIAN)
+ entry.fourcc = FOURCC_twos;
+ else
+ entry.fourcc = FOURCC_sowt;
+ /* maximum backward compatibility; only new version for > 16 bit */
+ if (info.finfo->depth <= 16)
+ entry.version = 0;
+ /* not compressed in any case */
+ entry.compression_id = 0;
+ /* QT spec says: max at 16 bit even if sample size were actually larger,
+ * however, most players (e.g. QuickTime!) seem to disagree, so ... */
+ entry.sample_size = info.finfo->depth;
+ entry.bytes_per_sample = info.finfo->depth / 8;
+ entry.samples_per_packet = 1;
+ entry.bytes_per_packet = info.finfo->depth / 8;
+ entry.bytes_per_frame = entry.bytes_per_packet * info.channels;
+ } else {
+ if (info.finfo->width == 8 && info.finfo->depth == 8) {
+ /* fall back to old 8-bit version */
+ entry.fourcc = FOURCC_raw_;
+ entry.version = 0;
+ entry.compression_id = 0;
+ entry.sample_size = 8;
+ } else {
+ GST_DEBUG_OBJECT (qtmux, "non 8-bit PCM must be signed");
+ goto refuse_caps;
+ }
+ }
+ constant_size = (info.finfo->depth / 8) * info.channels;
+ } else if (strcmp (mimetype, "audio/x-alaw") == 0) {
+ entry.fourcc = FOURCC_alaw;
+ entry.samples_per_packet = 1023;
+ entry.bytes_per_sample = 2;
+ } else if (strcmp (mimetype, "audio/x-mulaw") == 0) {
+ entry.fourcc = FOURCC_ulaw;
+ entry.samples_per_packet = 1023;
+ entry.bytes_per_sample = 2;
+ } else if (strcmp (mimetype, "audio/x-adpcm") == 0) {
+ gint blocksize;
+ if (!gst_structure_get_int (structure, "block_align", &blocksize)) {
+ GST_DEBUG_OBJECT (qtmux, "broken caps, block_align missing");
+ goto refuse_caps;
+ }
+ /* Currently only supports WAV-style IMA ADPCM, for which the codec id is
+ 0x11 */
+ entry.fourcc = MS_WAVE_FOURCC (0x11);
+ /* 4 byte header per channel (including one sample). 2 samples per byte
+ remaining. Simplifying gives the following (samples per block per
+ channel) */
+ entry.samples_per_packet = 2 * blocksize / channels - 7;
+ entry.bytes_per_sample = 2;
+
+ entry.bytes_per_frame = blocksize;
+ entry.bytes_per_packet = blocksize / channels;
+ /* ADPCM has constant size packets */
+ constant_size = 1;
+ /* TODO: I don't really understand why this helps, but it does! Constant
+ * size and compression_id of -2 seem to be incompatible, and other files
+ * in the wild use this too. */
+ entry.compression_id = -1;
+
+ ext_atom = build_ima_adpcm_extension (channels, rate, blocksize);
+ } else if (strcmp (mimetype, "audio/x-alac") == 0) {
+ GstBuffer *codec_config;
+ gint len;
+ GstMapInfo map;
+
+ entry.fourcc = FOURCC_alac;
+ gst_buffer_map ((GstBuffer *) codec_data, &map, GST_MAP_READ);
+ /* let's check if codec data already comes with 'alac' atom prefix */
+ if (!codec_data || (len = map.size) < 28) {
+ GST_DEBUG_OBJECT (qtmux, "broken caps, codec data missing");
+ gst_buffer_unmap ((GstBuffer *) codec_data, &map);
+ goto refuse_caps;
+ }
+ if (GST_READ_UINT32_LE (map.data + 4) == FOURCC_alac) {
+ len -= 8;
+ codec_config =
+ gst_buffer_copy_region ((GstBuffer *) codec_data,
+ GST_BUFFER_COPY_MEMORY, 8, len);
+ } else {
+ codec_config = gst_buffer_ref ((GstBuffer *) codec_data);
+ }
+ gst_buffer_unmap ((GstBuffer *) codec_data, &map);
+ if (len != 28) {
+ /* does not look good, but perhaps some trailing unneeded stuff */
+ GST_WARNING_OBJECT (qtmux, "unexpected codec-data size, possibly broken");
+ }
+ if (format == GST_QT_MUX_FORMAT_QT)
+ ext_atom = build_mov_alac_extension (codec_config);
+ else
+ ext_atom = build_codec_data_extension (FOURCC_alac, codec_config);
+ /* set some more info */
+ gst_buffer_map (codec_config, &map, GST_MAP_READ);
+ entry.bytes_per_sample = 2;
+ entry.samples_per_packet = GST_READ_UINT32_BE (map.data + 4);
+ gst_buffer_unmap (codec_config, &map);
+ gst_buffer_unref (codec_config);
+ } else if (strcmp (mimetype, "audio/x-ac3") == 0) {
+ entry.fourcc = FOURCC_ac_3;
+
+ /* Fixed values according to TS 102 366 but it also mentions that
+ * they should be ignored */
+ entry.channels = 2;
+ entry.sample_size = 16;
+
+ /* AC-3 needs an extension atom but its data can only be obtained from
+ * the stream itself. Abuse the prepare_buf_func so we parse a frame
+ * and get the needed data */
+ qtpad->prepare_buf_func = gst_qt_mux_prepare_parse_ac3_frame;
+ } else if (strcmp (mimetype, "audio/x-opus") == 0) {
+ /* Based on the specification defined in:
+ * https://www.opus-codec.org/docs/opus_in_isobmff.html */
+ guint8 channels, mapping_family, stream_count, coupled_count;
+ guint16 pre_skip;
+ gint16 output_gain;
+ guint32 rate;
+ guint8 channel_mapping[256];
+ const GValue *streamheader;
+ const GValue *first_element;
+ GstBuffer *header;
+
+ entry.fourcc = FOURCC_opus;
+ entry.sample_size = 16;
+
+ streamheader = gst_structure_get_value (structure, "streamheader");
+ if (streamheader && GST_VALUE_HOLDS_ARRAY (streamheader) &&
+ gst_value_array_get_size (streamheader) != 0) {
+ first_element = gst_value_array_get_value (streamheader, 0);
+ header = gst_value_get_buffer (first_element);
+ if (!gst_codec_utils_opus_parse_header (header, &rate, &channels,
+ &mapping_family, &stream_count, &coupled_count, channel_mapping,
+ &pre_skip, &output_gain)) {
+ GST_ERROR_OBJECT (qtmux, "Incomplete OpusHead");
+ goto refuse_caps;
+ }
+ } else {
+ GST_WARNING_OBJECT (qtmux,
+ "no streamheader field in caps %" GST_PTR_FORMAT, caps);
+
+ if (!gst_codec_utils_opus_parse_caps (caps, &rate, &channels,
+ &mapping_family, &stream_count, &coupled_count,
+ channel_mapping)) {
+ GST_ERROR_OBJECT (qtmux, "Incomplete Opus caps");
+ goto refuse_caps;
+ }
+ pre_skip = 0;
+ output_gain = 0;
+ }
+
+ entry.channels = channels;
+ ext_atom = build_opus_extension (rate, channels, mapping_family,
+ stream_count, coupled_count, channel_mapping, pre_skip, output_gain);
+ }
+
+ if (!entry.fourcc)
+ goto refuse_caps;
+
+ timescale = gst_qt_mux_pad_get_timescale (GST_QT_MUX_PAD_CAST (pad));
+ if (!timescale && qtmux->trak_timescale)
+ timescale = qtmux->trak_timescale;
+ else if (!timescale)
+ timescale = entry.sample_rate;
+
+ /* ok, set the pad info accordingly */
+ qtpad->fourcc = entry.fourcc;
+ qtpad->sample_size = constant_size;
+ qtpad->trak_ste =
+ (SampleTableEntry *) atom_trak_set_audio_type (qtpad->trak,
+ qtmux->context, &entry, timescale, ext_atom, constant_size);
+
+ gst_object_unref (qtmux);
+ return TRUE;
+
+ /* ERRORS */
+ refuse_caps:
+ {
+ GST_WARNING_OBJECT (qtmux, "pad %s refused caps %" GST_PTR_FORMAT,
+ GST_PAD_NAME (pad), caps);
+ gst_object_unref (qtmux);
+ return FALSE;
+ }
+ }
+
+ static gboolean
+ gst_qt_mux_video_sink_set_caps (GstQTMuxPad * qtpad, GstCaps * caps)
+ {
+ GstPad *pad = GST_PAD (qtpad);
+ GstQTMux *qtmux = GST_QT_MUX_CAST (gst_pad_get_parent (pad));
+ GstQTMuxClass *qtmux_klass = (GstQTMuxClass *) (G_OBJECT_GET_CLASS (qtmux));
+ GstStructure *structure;
+ const gchar *mimetype;
+ gint width, height, depth = -1;
+ gint framerate_num, framerate_den;
+ guint32 rate;
+ const GValue *value = NULL;
+ const GstBuffer *codec_data = NULL;
+ VisualSampleEntry entry = { 0, };
+ GstQTMuxFormat format;
+ AtomInfo *ext_atom = NULL;
+ GList *ext_atom_list = NULL;
+ gboolean sync = FALSE;
+ int par_num, par_den;
+ const gchar *multiview_mode;
+
+ GST_DEBUG_OBJECT (qtmux, "%s:%s, caps=%" GST_PTR_FORMAT,
+ GST_DEBUG_PAD_NAME (pad), caps);
+
+ qtpad->prepare_buf_func = NULL;
+
+ format = qtmux_klass->format;
+ structure = gst_caps_get_structure (caps, 0);
+ mimetype = gst_structure_get_name (structure);
+
+ /* required parts */
+ if (!gst_structure_get_int (structure, "width", &width) ||
+ !gst_structure_get_int (structure, "height", &height))
+ goto refuse_caps;
+
+ /* optional */
+ depth = -1;
+ /* works as a default timebase */
+ framerate_num = 10000;
+ framerate_den = 1;
+ gst_structure_get_fraction (structure, "framerate", &framerate_num,
+ &framerate_den);
+ gst_structure_get_int (structure, "depth", &depth);
+ value = gst_structure_get_value (structure, "codec_data");
+ if (value != NULL)
+ codec_data = gst_value_get_buffer (value);
+
+ par_num = 1;
+ par_den = 1;
+ gst_structure_get_fraction (structure, "pixel-aspect-ratio", &par_num,
+ &par_den);
+
+ qtpad->is_out_of_order = FALSE;
+
+ /* bring frame numerator into a range that ensures both reasonable resolution
+ * as well as a fair duration */
+ qtpad->expected_sample_duration_n = framerate_num;
+ qtpad->expected_sample_duration_d = framerate_den;
+
+ rate = gst_qt_mux_pad_get_timescale (GST_QT_MUX_PAD_CAST (pad));
+ if (!rate && qtmux->trak_timescale)
+ rate = qtmux->trak_timescale;
+ else if (!rate)
+ rate = atom_framerate_to_timescale (framerate_num, framerate_den);
+
+ GST_DEBUG_OBJECT (qtmux, "Rate of video track selected: %" G_GUINT32_FORMAT,
+ rate);
+
+ multiview_mode = gst_structure_get_string (structure, "multiview-mode");
+ if (multiview_mode && !qtpad->trak->mdia.minf.stbl.svmi) {
+ GstVideoMultiviewMode mode;
+ GstVideoMultiviewFlags flags = 0;
+
+ mode = gst_video_multiview_mode_from_caps_string (multiview_mode);
+ gst_structure_get_flagset (structure,
+ "multiview-flags", (guint *) & flags, NULL);
+ switch (mode) {
+ case GST_VIDEO_MULTIVIEW_MODE_MONO:
+ /* Nothing to do for mono, just don't warn about it */
+ break;
+ case GST_VIDEO_MULTIVIEW_MODE_SIDE_BY_SIDE:
+ qtpad->trak->mdia.minf.stbl.svmi =
+ atom_svmi_new (0,
+ flags & GST_VIDEO_MULTIVIEW_FLAGS_RIGHT_VIEW_FIRST);
+ break;
+ case GST_VIDEO_MULTIVIEW_MODE_ROW_INTERLEAVED:
+ qtpad->trak->mdia.minf.stbl.svmi =
+ atom_svmi_new (1,
+ flags & GST_VIDEO_MULTIVIEW_FLAGS_RIGHT_VIEW_FIRST);
+ break;
+ case GST_VIDEO_MULTIVIEW_MODE_FRAME_BY_FRAME:
+ qtpad->trak->mdia.minf.stbl.svmi =
+ atom_svmi_new (2,
+ flags & GST_VIDEO_MULTIVIEW_FLAGS_RIGHT_VIEW_FIRST);
+ break;
+ default:
+ GST_DEBUG_OBJECT (qtmux, "Unsupported multiview-mode %s",
+ multiview_mode);
+ break;
+ }
+ }
+
+ /* set common properties */
+ entry.width = width;
+ entry.height = height;
+ entry.par_n = par_num;
+ entry.par_d = par_den;
+ /* should be OK according to qt and iso spec, override if really needed */
+ entry.color_table_id = -1;
+ entry.frame_count = 1;
+ entry.depth = 24;
+
+ /* sync entries by default */
+ sync = TRUE;
+
+ /* now map onto a fourcc, and some extra properties */
+ if (strcmp (mimetype, "video/x-raw") == 0) {
+ const gchar *format;
+ GstVideoFormat fmt;
+ const GstVideoFormatInfo *vinfo;
+
+ format = gst_structure_get_string (structure, "format");
+ fmt = gst_video_format_from_string (format);
+ vinfo = gst_video_format_get_info (fmt);
+
+ switch (fmt) {
+ case GST_VIDEO_FORMAT_UYVY:
+ if (depth == -1)
+ depth = 24;
+ entry.fourcc = FOURCC_2vuy;
+ entry.depth = depth;
+ sync = FALSE;
+ break;
+ case GST_VIDEO_FORMAT_v210:
+ if (depth == -1)
+ depth = 24;
+ entry.fourcc = FOURCC_v210;
+ entry.depth = depth;
+ sync = FALSE;
+ break;
+ default:
+ if (GST_VIDEO_FORMAT_INFO_FLAGS (vinfo) & GST_VIDEO_FORMAT_FLAG_RGB) {
+ entry.fourcc = FOURCC_raw_;
+ entry.depth = GST_VIDEO_FORMAT_INFO_PSTRIDE (vinfo, 0) * 8;
+ sync = FALSE;
+ }
+ break;
+ }
+ } else if (strcmp (mimetype, "video/x-h263") == 0) {
+ ext_atom = NULL;
+ if (format == GST_QT_MUX_FORMAT_QT)
+ entry.fourcc = FOURCC_h263;
+ else
+ entry.fourcc = FOURCC_s263;
+ ext_atom = build_h263_extension ();
+ if (ext_atom != NULL)
+ ext_atom_list = g_list_prepend (ext_atom_list, ext_atom);
+ } else if (strcmp (mimetype, "video/x-divx") == 0 ||
+ strcmp (mimetype, "video/mpeg") == 0) {
+ gint version = 0;
+
+ if (strcmp (mimetype, "video/x-divx") == 0) {
+ gst_structure_get_int (structure, "divxversion", &version);
+ version = version == 5 ? 1 : 0;
+ } else {
+ gst_structure_get_int (structure, "mpegversion", &version);
+ version = version == 4 ? 1 : 0;
+ }
+ if (version) {
+ entry.fourcc = FOURCC_mp4v;
+ ext_atom =
+ build_esds_extension (qtpad->trak, ESDS_OBJECT_TYPE_MPEG4_P2,
+ ESDS_STREAM_TYPE_VISUAL, codec_data, qtpad->avg_bitrate,
+ qtpad->max_bitrate);
+ if (ext_atom != NULL)
+ ext_atom_list = g_list_prepend (ext_atom_list, ext_atom);
+ if (!codec_data)
+ GST_WARNING_OBJECT (qtmux, "no codec_data for MPEG4 video; "
+ "output might not play in Apple QuickTime (try global-headers?)");
+ }
+ } else if (strcmp (mimetype, "video/x-h264") == 0) {
+ const gchar *stream_format;
+
+ if (!codec_data) {
+ GST_WARNING_OBJECT (qtmux, "no codec_data in h264 caps");
+ goto refuse_caps;
+ }
+
+ stream_format = gst_structure_get_string (structure, "stream-format");
+
+ if (!g_strcmp0 (stream_format, "avc")) {
+ entry.fourcc = FOURCC_avc1;
+ } else if (!g_strcmp0 (stream_format, "avc3")) {
+ entry.fourcc = FOURCC_avc3;
+ } else {
+ g_assert_not_reached ();
+ }
+
+ ext_atom = build_btrt_extension (0, qtpad->avg_bitrate, qtpad->max_bitrate);
+ if (ext_atom != NULL)
+ ext_atom_list = g_list_prepend (ext_atom_list, ext_atom);
+ ext_atom = build_codec_data_extension (FOURCC_avcC, codec_data);
+ if (ext_atom != NULL)
+ ext_atom_list = g_list_prepend (ext_atom_list, ext_atom);
+ } else if (strcmp (mimetype, "video/x-h265") == 0) {
+ const gchar *format;
+
+ if (!codec_data) {
+ GST_WARNING_OBJECT (qtmux, "no codec_data in h265 caps");
+ goto refuse_caps;
+ }
+
+ format = gst_structure_get_string (structure, "stream-format");
+ if (strcmp (format, "hvc1") == 0)
+ entry.fourcc = FOURCC_hvc1;
+ else if (strcmp (format, "hev1") == 0)
+ entry.fourcc = FOURCC_hev1;
+
+ ext_atom = build_btrt_extension (0, qtpad->avg_bitrate, qtpad->max_bitrate);
+ if (ext_atom != NULL)
+ ext_atom_list = g_list_prepend (ext_atom_list, ext_atom);
+
+ ext_atom = build_codec_data_extension (FOURCC_hvcC, codec_data);
+ if (ext_atom != NULL)
+ ext_atom_list = g_list_prepend (ext_atom_list, ext_atom);
+
+ } else if (strcmp (mimetype, "video/x-svq") == 0) {
+ gint version = 0;
+ const GstBuffer *seqh = NULL;
+ const GValue *seqh_value;
+ gdouble gamma = 0;
+
+ gst_structure_get_int (structure, "svqversion", &version);
+ if (version == 3) {
+ entry.fourcc = FOURCC_SVQ3;
+ entry.version = 3;
+ entry.depth = 32;
+
+ seqh_value = gst_structure_get_value (structure, "seqh");
+ if (seqh_value) {
+ seqh = gst_value_get_buffer (seqh_value);
+ ext_atom = build_SMI_atom (seqh);
+ if (ext_atom)
+ ext_atom_list = g_list_prepend (ext_atom_list, ext_atom);
+ }
+
+ /* we need to add the gamma anyway because quicktime might crash
+ * when it doesn't find it */
+ if (!gst_structure_get_double (structure, "applied-gamma", &gamma)) {
+ /* it seems that using 0 here makes it ignored */
+ gamma = 0.0;
+ }
+ ext_atom = build_gama_atom (gamma);
+ if (ext_atom)
+ ext_atom_list = g_list_prepend (ext_atom_list, ext_atom);
+ } else {
+ GST_WARNING_OBJECT (qtmux, "SVQ version %d not supported. Please file "
+ "a bug at http://bugzilla.gnome.org", version);
+ }
+ } else if (strcmp (mimetype, "video/x-dv") == 0) {
+ gint version = 0;
+ gboolean pal = TRUE;
+
+ sync = FALSE;
+ if (framerate_num != 25 || framerate_den != 1)
+ pal = FALSE;
+ gst_structure_get_int (structure, "dvversion", &version);
+ /* fall back to typical one */
+ if (!version)
+ version = 25;
+ switch (version) {
+ case 25:
+ if (pal)
+ entry.fourcc = FOURCC_dvcp;
+ else
+ entry.fourcc = FOURCC_dvc_;
+ break;
+ case 50:
+ if (pal)
+ entry.fourcc = FOURCC_dv5p;
+ else
+ entry.fourcc = FOURCC_dv5n;
+ break;
+ default:
+ GST_WARNING_OBJECT (qtmux, "unrecognized dv version");
+ break;
+ }
+ } else if (strcmp (mimetype, "image/jpeg") == 0) {
+ entry.fourcc = FOURCC_jpeg;
+ sync = FALSE;
+ } else if (strcmp (mimetype, "image/png") == 0) {
+ entry.fourcc = FOURCC_png;
+ sync = FALSE;
+ } else if (strcmp (mimetype, "image/x-j2c") == 0 ||
+ strcmp (mimetype, "image/x-jpc") == 0) {
+ const gchar *colorspace;
+ const GValue *cmap_array;
+ const GValue *cdef_array;
+ gint ncomp = 0;
+
+ if (strcmp (mimetype, "image/x-jpc") == 0) {
+ qtpad->prepare_buf_func = gst_qt_mux_prepare_jpc_buffer;
+ }
+
+ gst_structure_get_int (structure, "num-components", &ncomp);
+ cmap_array = gst_structure_get_value (structure, "component-map");
+ cdef_array = gst_structure_get_value (structure, "channel-definitions");
+
+ ext_atom = NULL;
+ entry.fourcc = FOURCC_mjp2;
+ sync = FALSE;
+
+ colorspace = gst_structure_get_string (structure, "colorspace");
+ if (colorspace &&
+ (ext_atom =
+ build_jp2h_extension (width, height, colorspace, ncomp, cmap_array,
+ cdef_array)) != NULL) {
+ ext_atom_list = g_list_append (ext_atom_list, ext_atom);
+
+ ext_atom = build_jp2x_extension (codec_data);
+ if (ext_atom)
+ ext_atom_list = g_list_append (ext_atom_list, ext_atom);
+ } else {
+ GST_DEBUG_OBJECT (qtmux, "missing or invalid fourcc in jp2 caps");
+ goto refuse_caps;
+ }
+ } else if (strcmp (mimetype, "video/x-vp8") == 0) {
+ entry.fourcc = FOURCC_vp08;
+ } else if (strcmp (mimetype, "video/x-vp9") == 0) {
+ entry.fourcc = FOURCC_vp09;
+ } else if (strcmp (mimetype, "video/x-dirac") == 0) {
+ entry.fourcc = FOURCC_drac;
+ } else if (strcmp (mimetype, "video/x-qt-part") == 0) {
+ guint32 fourcc = 0;
+
+ gst_structure_get_uint (structure, "format", &fourcc);
+ entry.fourcc = fourcc;
+ } else if (strcmp (mimetype, "video/x-mp4-part") == 0) {
+ guint32 fourcc = 0;
+
+ gst_structure_get_uint (structure, "format", &fourcc);
+ entry.fourcc = fourcc;
+ } else if (strcmp (mimetype, "video/x-prores") == 0) {
+ const gchar *variant;
+
+ variant = gst_structure_get_string (structure, "variant");
+ if (!variant || !g_strcmp0 (variant, "standard"))
+ entry.fourcc = FOURCC_apcn;
+ else if (!g_strcmp0 (variant, "lt"))
+ entry.fourcc = FOURCC_apcs;
+ else if (!g_strcmp0 (variant, "hq"))
+ entry.fourcc = FOURCC_apch;
+ else if (!g_strcmp0 (variant, "proxy"))
+ entry.fourcc = FOURCC_apco;
+ else if (!g_strcmp0 (variant, "4444"))
+ entry.fourcc = FOURCC_ap4h;
+ else if (!g_strcmp0 (variant, "4444xq"))
+ entry.fourcc = FOURCC_ap4x;
+
+ sync = FALSE;
+
+ if (!qtmux->interleave_time_set)
+ qtmux->interleave_time = 500 * GST_MSECOND;
+ if (!qtmux->interleave_bytes_set)
+ qtmux->interleave_bytes = width > 720 ? 4 * 1024 * 1024 : 2 * 1024 * 1024;
+ } else if (strcmp (mimetype, "video/x-cineform") == 0) {
+ entry.fourcc = FOURCC_cfhd;
+ sync = FALSE;
+ } else if (strcmp (mimetype, "video/x-av1") == 0) {
+ gint presentation_delay;
+ guint8 presentation_delay_byte = 0;
+ GstBuffer *av1_codec_data;
+
+ if (gst_structure_get_int (structure, "presentation-delay",
+ &presentation_delay)) {
+ presentation_delay_byte = 1 << 5;
+ presentation_delay_byte |= MAX (0xF, presentation_delay & 0xF);
+ }
+
+
+ av1_codec_data = gst_buffer_new_allocate (NULL, 5, NULL);
+ /* Fill version and 3 bytes of flags to 0 */
+ gst_buffer_memset (av1_codec_data, 0, 0, 4);
+ gst_buffer_fill (av1_codec_data, 4, &presentation_delay_byte, 1);
+ if (codec_data)
+ av1_codec_data = gst_buffer_append (av1_codec_data,
+ gst_buffer_ref ((GstBuffer *) codec_data));
+
+ entry.fourcc = FOURCC_av01;
+
+ ext_atom = build_btrt_extension (0, qtpad->avg_bitrate, qtpad->max_bitrate);
+ if (ext_atom != NULL)
+ ext_atom_list = g_list_prepend (ext_atom_list, ext_atom);
+ ext_atom = build_codec_data_extension (FOURCC_av1C, av1_codec_data);
+ if (ext_atom != NULL)
+ ext_atom_list = g_list_prepend (ext_atom_list, ext_atom);
+ gst_buffer_unref (av1_codec_data);
+ }
+
+ if (!entry.fourcc)
+ goto refuse_caps;
+
+ if (qtmux_klass->format == GST_QT_MUX_FORMAT_QT ||
+ qtmux_klass->format == GST_QT_MUX_FORMAT_MP4) {
+ const gchar *s;
+ GstVideoColorimetry colorimetry;
+
+ s = gst_structure_get_string (structure, "colorimetry");
+ if (s && gst_video_colorimetry_from_string (&colorimetry, s)) {
+ ext_atom =
+ build_colr_extension (&colorimetry,
+ qtmux_klass->format == GST_QT_MUX_FORMAT_MP4);
+ if (ext_atom)
+ ext_atom_list = g_list_append (ext_atom_list, ext_atom);
+ }
+ }
+
+ if (qtmux_klass->format == GST_QT_MUX_FORMAT_QT
+ || strcmp (mimetype, "image/x-j2c") == 0
+ || strcmp (mimetype, "image/x-jpc") == 0) {
+ const gchar *s;
+ GstVideoInterlaceMode interlace_mode;
+ GstVideoFieldOrder field_order;
+ gint fields = -1;
+
+ if (strcmp (mimetype, "image/x-j2c") == 0 ||
+ strcmp (mimetype, "image/x-jpc") == 0) {
+
+ fields = 1;
+ gst_structure_get_int (structure, "fields", &fields);
+ }
+
+ s = gst_structure_get_string (structure, "interlace-mode");
+ if (s)
+ interlace_mode = gst_video_interlace_mode_from_string (s);
+ else
+ interlace_mode =
+ (fields <=
+ 1) ? GST_VIDEO_INTERLACE_MODE_PROGRESSIVE :
+ GST_VIDEO_INTERLACE_MODE_MIXED;
+
+ field_order = GST_VIDEO_FIELD_ORDER_UNKNOWN;
+ if (interlace_mode == GST_VIDEO_INTERLACE_MODE_INTERLEAVED) {
+ s = gst_structure_get_string (structure, "field-order");
+ if (s)
+ field_order = gst_video_field_order_from_string (s);
+ }
+
+ ext_atom = build_fiel_extension (interlace_mode, field_order);
+ if (ext_atom)
+ ext_atom_list = g_list_append (ext_atom_list, ext_atom);
+ }
+
+
+ if (qtmux_klass->format == GST_QT_MUX_FORMAT_QT &&
+ width > 640 && width <= 1052 && height >= 480 && height <= 576) {
+ /* The 'clap' extension is also defined for MP4 but inventing values in
+ * general seems a bit tricky for this one. We only write it for
+ * SD resolution in MOV, where it is a requirement.
+ * The same goes for the 'tapt' extension, just that it is not defined for
+ * MP4 and only for MOV
+ */
+ gint dar_num, dar_den;
+ gint clef_width, clef_height, prof_width;
+ gint clap_width_n, clap_width_d, clap_height;
+ gint cdiv;
+ double approx_dar;
+
+ /* First, guess display aspect ratio based on pixel aspect ratio,
+ * width and height. We assume that display aspect ratio is either
+ * 4:3 or 16:9
+ */
+ approx_dar = (gdouble) (width * par_num) / (height * par_den);
+ if (approx_dar > 11.0 / 9 && approx_dar < 14.0 / 9) {
+ dar_num = 4;
+ dar_den = 3;
+ } else if (approx_dar > 15.0 / 9 && approx_dar < 18.0 / 9) {
+ dar_num = 16;
+ dar_den = 9;
+ } else {
+ dar_num = width * par_num;
+ dar_den = height * par_den;
+ cdiv = gst_util_greatest_common_divisor (dar_num, dar_den);
+ dar_num /= cdiv;
+ dar_den /= cdiv;
+ }
+
+ /* Then, calculate clean-aperture values (clap and clef)
+ * using the guessed DAR.
+ */
+ clef_height = clap_height = (height == 486 ? 480 : height);
+ clef_width = gst_util_uint64_scale (clef_height,
+ dar_num * G_GUINT64_CONSTANT (65536), dar_den);
+ prof_width = gst_util_uint64_scale (width,
+ par_num * G_GUINT64_CONSTANT (65536), par_den);
+ clap_width_n = clap_height * dar_num * par_den;
+ clap_width_d = dar_den * par_num;
+ cdiv = gst_util_greatest_common_divisor (clap_width_n, clap_width_d);
+ clap_width_n /= cdiv;
+ clap_width_d /= cdiv;
+
+ ext_atom = build_tapt_extension (clef_width, clef_height << 16, prof_width,
+ height << 16, width << 16, height << 16);
+ qtpad->trak->tapt = ext_atom;
+
+ ext_atom = build_clap_extension (clap_width_n, clap_width_d,
+ clap_height, 1, 0, 1, 0, 1);
+ if (ext_atom)
+ ext_atom_list = g_list_append (ext_atom_list, ext_atom);
+ }
+
+ /* ok, set the pad info accordingly */
+ qtpad->fourcc = entry.fourcc;
+ qtpad->sync = sync;
+ qtpad->trak_ste =
+ (SampleTableEntry *) atom_trak_set_video_type (qtpad->trak,
+ qtmux->context, &entry, rate, ext_atom_list);
+ if (strcmp (mimetype, "video/x-prores") == 0) {
+ SampleTableEntryMP4V *mp4v = (SampleTableEntryMP4V *) qtpad->trak_ste;
+ const gchar *compressor = NULL;
+ mp4v->spatial_quality = 0x3FF;
+ mp4v->temporal_quality = 0;
+ mp4v->vendor = FOURCC_appl;
+ mp4v->horizontal_resolution = 72 << 16;
+ mp4v->vertical_resolution = 72 << 16;
+ mp4v->depth = (entry.fourcc == FOURCC_ap4h
+ || entry.fourcc == FOURCC_ap4x) ? (depth > 0 ? depth : 32) : 24;
+
+ /* Set compressor name, required by some software */
+ switch (entry.fourcc) {
+ case FOURCC_apcn:
+ compressor = "Apple ProRes 422";
+ break;
+ case FOURCC_apcs:
+ compressor = "Apple ProRes 422 LT";
+ break;
+ case FOURCC_apch:
+ compressor = "Apple ProRes 422 HQ";
+ break;
+ case FOURCC_apco:
+ compressor = "Apple ProRes 422 Proxy";
+ break;
+ case FOURCC_ap4h:
+ compressor = "Apple ProRes 4444";
+ break;
+ case FOURCC_ap4x:
+ compressor = "Apple ProRes 4444 XQ";
+ break;
+ }
+ if (compressor) {
+ strcpy ((gchar *) mp4v->compressor + 1, compressor);
+ mp4v->compressor[0] = strlen (compressor);
+ }
+ }
+
+ gst_object_unref (qtmux);
+ return TRUE;
+
+ /* ERRORS */
+ refuse_caps:
+ {
+ GST_WARNING_OBJECT (qtmux, "pad %s refused caps %" GST_PTR_FORMAT,
+ GST_PAD_NAME (pad), caps);
+ gst_object_unref (qtmux);
+ return FALSE;
+ }
+ }
+
+ static gboolean
+ gst_qt_mux_subtitle_sink_set_caps (GstQTMuxPad * qtpad, GstCaps * caps)
+ {
+ GstPad *pad = GST_PAD (qtpad);
+ GstQTMux *qtmux = GST_QT_MUX_CAST (gst_pad_get_parent (pad));
+ GstStructure *structure;
+ SubtitleSampleEntry entry = { 0, };
+
+ GST_DEBUG_OBJECT (qtmux, "%s:%s, caps=%" GST_PTR_FORMAT,
+ GST_DEBUG_PAD_NAME (pad), caps);
+
+ /* subtitles default */
+ subtitle_sample_entry_init (&entry);
+ qtpad->is_out_of_order = FALSE;
+ qtpad->sync = FALSE;
+ qtpad->sparse = TRUE;
+ qtpad->prepare_buf_func = NULL;
+
+ structure = gst_caps_get_structure (caps, 0);
+
+ if (gst_structure_has_name (structure, "text/x-raw")) {
+ const gchar *format = gst_structure_get_string (structure, "format");
+ if (format && strcmp (format, "utf8") == 0) {
+ entry.fourcc = FOURCC_tx3g;
+ qtpad->prepare_buf_func = gst_qt_mux_prepare_tx3g_buffer;
+ qtpad->create_empty_buffer = gst_qt_mux_create_empty_tx3g_buffer;
+ }
+ }
+
+ if (!entry.fourcc)
+ goto refuse_caps;
+
+ qtpad->fourcc = entry.fourcc;
+ qtpad->trak_ste =
+ (SampleTableEntry *) atom_trak_set_subtitle_type (qtpad->trak,
+ qtmux->context, &entry);
+
+ gst_object_unref (qtmux);
+ return TRUE;
+
+ /* ERRORS */
+ refuse_caps:
+ {
+ GST_WARNING_OBJECT (qtmux, "pad %s refused caps %" GST_PTR_FORMAT,
+ GST_PAD_NAME (pad), caps);
+ gst_object_unref (qtmux);
+ return FALSE;
+ }
+ }
+
+ static gboolean
+ gst_qt_mux_caption_sink_set_caps (GstQTMuxPad * qtpad, GstCaps * caps)
+ {
+ GstPad *pad = GST_PAD (qtpad);
+ GstQTMux *qtmux = GST_QT_MUX_CAST (gst_pad_get_parent (pad));
+ GstStructure *structure;
+ guint32 fourcc_entry;
+ guint32 timescale;
+
+ GST_DEBUG_OBJECT (qtmux, "%s:%s, caps=%" GST_PTR_FORMAT,
+ GST_DEBUG_PAD_NAME (pad), caps);
+
+ /* captions default */
+ qtpad->is_out_of_order = FALSE;
+ qtpad->sync = FALSE;
+ qtpad->sparse = TRUE;
+ /* Closed caption data are within atoms */
+ qtpad->prepare_buf_func = gst_qt_mux_prepare_caption_buffer;
+
+ structure = gst_caps_get_structure (caps, 0);
+
+ /* We know we only handle 608,format=s334-1a and 708,format=cdp */
+ if (gst_structure_has_name (structure, "closedcaption/x-cea-608")) {
+ fourcc_entry = FOURCC_c608;
+ } else if (gst_structure_has_name (structure, "closedcaption/x-cea-708")) {
+ fourcc_entry = FOURCC_c708;
+ } else
+ goto refuse_caps;
+
+ /* We set the real timescale later to the one from the video track when
+ * writing the headers */
+ timescale = gst_qt_mux_pad_get_timescale (GST_QT_MUX_PAD_CAST (pad));
+ if (!timescale && qtmux->trak_timescale)
+ timescale = qtmux->trak_timescale;
+ else if (!timescale)
+ timescale = 30000;
+
+ qtpad->fourcc = fourcc_entry;
+ qtpad->trak_ste =
+ (SampleTableEntry *) atom_trak_set_caption_type (qtpad->trak,
+ qtmux->context, timescale, fourcc_entry);
+
+ /* Initialize caption track language code to 0 unless something else is
+ * specified. Without this, Final Cut considers it "non-standard"
+ */
+ qtpad->trak->mdia.mdhd.language_code = 0;
+
+ gst_object_unref (qtmux);
+ return TRUE;
+
+ /* ERRORS */
+ refuse_caps:
+ {
+ GST_WARNING_OBJECT (qtmux, "pad %s refused caps %" GST_PTR_FORMAT,
+ GST_PAD_NAME (pad), caps);
+ gst_object_unref (qtmux);
+ return FALSE;
+ }
+ }
+
+ static GstFlowReturn
+ gst_qt_mux_sink_event_pre_queue (GstAggregator * agg,
+ GstAggregatorPad * agg_pad, GstEvent * event)
+ {
+ GstAggregatorClass *agg_class = GST_AGGREGATOR_CLASS (parent_class);
+ GstQTMux *qtmux;
+ GstFlowReturn ret = GST_FLOW_OK;
+
+ qtmux = GST_QT_MUX_CAST (agg);
+
+ if (GST_EVENT_TYPE (event) == GST_EVENT_CAPS) {
+ GstCaps *caps;
+
+ gst_event_parse_caps (event, &caps);
+ if (!gst_qt_mux_can_renegotiate (qtmux, GST_PAD (agg_pad), caps)) {
+ gst_event_unref (event);
+ event = NULL;
+ ret = GST_FLOW_NOT_NEGOTIATED;
+ }
+ }
+
+ if (event != NULL)
+ ret = agg_class->sink_event_pre_queue (agg, agg_pad, event);
+
+ return ret;
+ }
+
+
+ static gboolean
+ gst_qt_mux_sink_event (GstAggregator * agg, GstAggregatorPad * agg_pad,
+ GstEvent * event)
+ {
+ GstAggregatorClass *agg_class = GST_AGGREGATOR_CLASS (parent_class);
+ GstQTMuxPad *qtmux_pad;
+ GstQTMux *qtmux;
+ guint32 avg_bitrate = 0, max_bitrate = 0;
+ GstPad *pad = GST_PAD (agg_pad);
+ gboolean ret = TRUE;
+
+ qtmux = GST_QT_MUX_CAST (agg);
+ qtmux_pad = GST_QT_MUX_PAD_CAST (agg_pad);
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_CAPS:
+ {
+ GstCaps *caps;
+
+ gst_event_parse_caps (event, &caps);
+
+ /* find stream data */
+ g_assert (qtmux_pad->set_caps);
+
+ /* depending on codec (h264/h265 for example), muxer will append a new
+ * stsd entry per set_caps(), but it's not ideal if referenced fields
+ * in caps is not updated from previous one.
+ * Each set_caps() implementation can be more enhanced
+ * so that we can avoid duplicated atoms though, this identical caps
+ * case is one we can skip obviously */
+ if (qtmux_pad->configured_caps &&
+ gst_caps_is_equal (qtmux_pad->configured_caps, caps)) {
+ GST_DEBUG_OBJECT (qtmux_pad, "Ignore duplicated caps %" GST_PTR_FORMAT,
+ caps);
+ } else {
+ ret = qtmux_pad->set_caps (qtmux_pad, caps);
+
+ GST_OBJECT_LOCK (qtmux);
+ if (qtmux->current_pad == qtmux_pad) {
+ qtmux->current_chunk_offset = -1;
+ qtmux->current_chunk_size = 0;
+ qtmux->current_chunk_duration = 0;
+ }
+ GST_OBJECT_UNLOCK (qtmux);
+ }
+
+ if (ret)
+ gst_caps_replace (&qtmux_pad->configured_caps, caps);
+
+ gst_event_unref (event);
+ event = NULL;
+ break;
+ }
+ case GST_EVENT_TAG:{
+ GstTagList *list;
+ GstTagSetter *setter = GST_TAG_SETTER (qtmux);
+ GstTagMergeMode mode;
+ gchar *code;
+
+ GST_OBJECT_LOCK (qtmux);
+ mode = gst_tag_setter_get_tag_merge_mode (setter);
+
+ gst_event_parse_tag (event, &list);
+ GST_DEBUG_OBJECT (qtmux, "received tag event on pad %s:%s : %"
+ GST_PTR_FORMAT, GST_DEBUG_PAD_NAME (pad), list);
+
+ if (gst_tag_list_get_scope (list) == GST_TAG_SCOPE_GLOBAL) {
+ gst_tag_setter_merge_tags (setter, list, mode);
+ qtmux->tags_changed = TRUE;
+ } else {
+ if (!qtmux_pad->tags)
+ qtmux_pad->tags = gst_tag_list_new_empty ();
+ gst_tag_list_insert (qtmux_pad->tags, list, mode);
+ qtmux_pad->tags_changed = TRUE;
+ }
+ GST_OBJECT_UNLOCK (qtmux);
+
+ if (gst_tag_list_get_uint (list, GST_TAG_BITRATE, &avg_bitrate) |
+ gst_tag_list_get_uint (list, GST_TAG_MAXIMUM_BITRATE, &max_bitrate)) {
+ if (avg_bitrate > 0 && avg_bitrate < G_MAXUINT32)
+ qtmux_pad->avg_bitrate = avg_bitrate;
+ if (max_bitrate > 0 && max_bitrate < G_MAXUINT32)
+ qtmux_pad->max_bitrate = max_bitrate;
+ }
+
+ if (gst_tag_list_get_string (list, GST_TAG_LANGUAGE_CODE, &code)) {
+ const char *iso_code = gst_tag_get_language_code_iso_639_2T (code);
+ if (iso_code) {
+ if (qtmux_pad->trak) {
+ /* https://developer.apple.com/library/mac/#documentation/QuickTime/QTFF/QTFFChap4/qtff4.html */
+ qtmux_pad->trak->mdia.mdhd.language_code = language_code (iso_code);
+ }
+ }
+ g_free (code);
+ }
+
+ gst_event_unref (event);
+ event = NULL;
+ ret = TRUE;
+ break;
+ }
+ default:
+ break;
+ }
+
+ if (event != NULL)
+ ret = agg_class->sink_event (agg, agg_pad, event);
+
+ return ret;
+ }
+
+ static void
+ gst_qt_mux_release_pad (GstElement * element, GstPad * pad)
+ {
+ GstQTMux *mux = GST_QT_MUX_CAST (element);
+ GstQTMuxPad *muxpad = GST_QT_MUX_PAD_CAST (pad);
+
+ GST_DEBUG_OBJECT (element, "Releasing %s:%s", GST_DEBUG_PAD_NAME (pad));
+
+ /* Take a ref to the pad so we can clean it up after removing it from the element */
+ pad = gst_object_ref (pad);
+
+ /* Do aggregate level cleanup */
+ GST_ELEMENT_CLASS (parent_class)->release_pad (element, pad);
+
+ GST_OBJECT_LOCK (mux);
+ if (mux->current_pad && GST_PAD (mux->current_pad) == pad) {
+ mux->current_pad = NULL;
+ mux->current_chunk_size = 0;
+ mux->current_chunk_duration = 0;
+ }
+
+ gst_qt_mux_pad_reset (muxpad);
+
+ if (GST_ELEMENT (mux)->sinkpads == NULL) {
+ /* No more outstanding request pads, reset our counters */
+ mux->video_pads = 0;
+ mux->audio_pads = 0;
+ mux->subtitle_pads = 0;
+ }
+ GST_OBJECT_UNLOCK (mux);
+
+ gst_object_unref (pad);
+ }
+
+ static GstAggregatorPad *
+ gst_qt_mux_create_new_pad (GstAggregator * self,
+ GstPadTemplate * templ, const gchar * req_name, const GstCaps * caps)
+ {
+ return g_object_new (GST_TYPE_QT_MUX_PAD, "name", req_name, "direction",
+ templ->direction, "template", templ, NULL);
+ }
+
+ static GstPad *
+ gst_qt_mux_request_new_pad (GstElement * element,
+ GstPadTemplate * templ, const gchar * req_name, const GstCaps * caps)
+ {
+ GstElementClass *klass = GST_ELEMENT_GET_CLASS (element);
+ GstQTMux *qtmux = GST_QT_MUX_CAST (element);
+ GstQTMuxPad *qtpad;
+ GstQTPadSetCapsFunc setcaps_func;
+ gchar *name;
+ gint pad_id;
+
+ if (templ->direction != GST_PAD_SINK)
+ goto wrong_direction;
+
+ if (qtmux->state > GST_QT_MUX_STATE_STARTED)
+ goto too_late;
+
+ if (templ == gst_element_class_get_pad_template (klass, "audio_%u")) {
+ setcaps_func = gst_qt_mux_audio_sink_set_caps;
+ if (req_name != NULL && sscanf (req_name, "audio_%u", &pad_id) == 1) {
+ name = g_strdup (req_name);
+ } else {
+ name = g_strdup_printf ("audio_%u", qtmux->audio_pads++);
+ }
+ } else if (templ == gst_element_class_get_pad_template (klass, "video_%u")) {
+ setcaps_func = gst_qt_mux_video_sink_set_caps;
+ if (req_name != NULL && sscanf (req_name, "video_%u", &pad_id) == 1) {
+ name = g_strdup (req_name);
+ } else {
+ name = g_strdup_printf ("video_%u", qtmux->video_pads++);
+ }
+ } else if (templ == gst_element_class_get_pad_template (klass, "subtitle_%u")) {
+ setcaps_func = gst_qt_mux_subtitle_sink_set_caps;
+ if (req_name != NULL && sscanf (req_name, "subtitle_%u", &pad_id) == 1) {
+ name = g_strdup (req_name);
+ } else {
+ name = g_strdup_printf ("subtitle_%u", qtmux->subtitle_pads++);
+ }
+ } else if (templ == gst_element_class_get_pad_template (klass, "caption_%u")) {
+ setcaps_func = gst_qt_mux_caption_sink_set_caps;
+ if (req_name != NULL && sscanf (req_name, "caption_%u", &pad_id) == 1) {
+ name = g_strdup (req_name);
+ } else {
+ name = g_strdup_printf ("caption_%u", qtmux->caption_pads++);
+ }
+ } else
+ goto wrong_template;
+
+ GST_DEBUG_OBJECT (qtmux, "Requested pad: %s", name);
+
+ qtpad = (GstQTMuxPad *)
+ GST_ELEMENT_CLASS (parent_class)->request_new_pad (element,
+ templ, name, caps);
+
+ g_free (name);
+
+ /* set up pad */
+ GST_OBJECT_LOCK (qtmux);
+ gst_qt_mux_pad_reset (qtpad);
+ qtpad->trak = atom_trak_new (qtmux->context);
+
+ atom_moov_add_trak (qtmux->moov, qtpad->trak);
+ GST_OBJECT_UNLOCK (qtmux);
+
+ /* set up pad functions */
+ qtpad->set_caps = setcaps_func;
+ qtpad->dts = G_MININT64;
+
+ return GST_PAD (qtpad);
+
+ /* ERRORS */
+ wrong_direction:
+ {
+ GST_WARNING_OBJECT (qtmux, "Request pad that is not a SINK pad.");
+ return NULL;
+ }
+ too_late:
+ {
+ GST_WARNING_OBJECT (qtmux, "Not providing request pad after stream start.");
+ return NULL;
+ }
+ wrong_template:
+ {
+ GST_WARNING_OBJECT (qtmux, "This is not our template!");
+ return NULL;
+ }
+ }
+
+ static void
+ gst_qt_mux_get_property (GObject * object,
+ guint prop_id, GValue * value, GParamSpec * pspec)
+ {
+ GstQTMux *qtmux = GST_QT_MUX_CAST (object);
+
+ GST_OBJECT_LOCK (qtmux);
+ switch (prop_id) {
+ case PROP_MOVIE_TIMESCALE:
+ g_value_set_uint (value, qtmux->timescale);
+ break;
+ case PROP_TRAK_TIMESCALE:
+ g_value_set_uint (value, qtmux->trak_timescale);
+ break;
+ case PROP_DO_CTTS:
+ g_value_set_boolean (value, qtmux->guess_pts);
+ break;
+ #ifndef GST_REMOVE_DEPRECATED
+ case PROP_DTS_METHOD:
+ g_value_set_enum (value, qtmux->dts_method);
+ break;
+ #endif
+ case PROP_FAST_START:
+ g_value_set_boolean (value, qtmux->fast_start);
+ break;
+ case PROP_FAST_START_TEMP_FILE:
+ g_value_set_string (value, qtmux->fast_start_file_path);
+ break;
+ case PROP_MOOV_RECOV_FILE:
+ g_value_set_string (value, qtmux->moov_recov_file_path);
+ break;
+ case PROP_FRAGMENT_DURATION:
+ g_value_set_uint (value, qtmux->fragment_duration);
+ break;
+ case PROP_RESERVED_MAX_DURATION:
+ g_value_set_uint64 (value, qtmux->reserved_max_duration);
+ break;
+ case PROP_RESERVED_DURATION_REMAINING:
+ if (qtmux->reserved_duration_remaining == GST_CLOCK_TIME_NONE)
+ g_value_set_uint64 (value, qtmux->reserved_max_duration);
+ else {
+ GstClockTime remaining = qtmux->reserved_duration_remaining;
+
+ /* Report the remaining space as the calculated remaining, minus
+ * however much we've muxed since the last update */
+ if (remaining > qtmux->muxed_since_last_update)
+ remaining -= qtmux->muxed_since_last_update;
+ else
+ remaining = 0;
+ GST_LOG_OBJECT (qtmux, "reserved duration remaining - reporting %"
+ G_GUINT64_FORMAT "(%" G_GUINT64_FORMAT " - %" G_GUINT64_FORMAT,
+ remaining, qtmux->reserved_duration_remaining,
+ qtmux->muxed_since_last_update);
+ g_value_set_uint64 (value, remaining);
+ }
+ break;
+ case PROP_RESERVED_MOOV_UPDATE_PERIOD:
+ g_value_set_uint64 (value, qtmux->reserved_moov_update_period);
+ break;
+ case PROP_RESERVED_BYTES_PER_SEC:
+ g_value_set_uint (value, qtmux->reserved_bytes_per_sec_per_trak);
+ break;
+ case PROP_RESERVED_PREFILL:
+ g_value_set_boolean (value, qtmux->reserved_prefill);
+ break;
+ case PROP_INTERLEAVE_BYTES:
+ g_value_set_uint64 (value, qtmux->interleave_bytes);
+ break;
+ case PROP_INTERLEAVE_TIME:
+ g_value_set_uint64 (value, qtmux->interleave_time);
+ break;
+ case PROP_FORCE_CHUNKS:
+ g_value_set_boolean (value, qtmux->force_chunks);
+ break;
+ case PROP_MAX_RAW_AUDIO_DRIFT:
+ g_value_set_uint64 (value, qtmux->max_raw_audio_drift);
+ break;
+ case PROP_START_GAP_THRESHOLD:
+ g_value_set_uint64 (value, qtmux->start_gap_threshold);
+ break;
+ case PROP_FORCE_CREATE_TIMECODE_TRAK:
+ g_value_set_boolean (value, qtmux->force_create_timecode_trak);
+ break;
+ case PROP_FRAGMENT_MODE:{
+ GstQTMuxFragmentMode mode = qtmux->fragment_mode;
+ if (mode == GST_QT_MUX_FRAGMENT_STREAMABLE)
+ mode = GST_QT_MUX_FRAGMENT_DASH_OR_MSS;
+ g_value_set_enum (value, mode);
+ break;
+ }
++#ifdef TIZEN_FEATURE_GST_MUX_ENHANCEMENT
++ case PROP_EXPECTED_TRAILER_SIZE:
++ g_value_set_uint(value, qtmux->expected_trailer_size);
++ break;
++#endif /* TIZEN_FEATURE_GST_MUX_ENHANCEMENT */
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+ GST_OBJECT_UNLOCK (qtmux);
+ }
+
+ static void
+ gst_qt_mux_generate_fast_start_file_path (GstQTMux * qtmux)
+ {
+ gchar *tmp;
+
+ g_free (qtmux->fast_start_file_path);
+ qtmux->fast_start_file_path = NULL;
+
+ tmp = g_strdup_printf ("%s%d", "qtmux", g_random_int ());
+ qtmux->fast_start_file_path = g_build_filename (g_get_tmp_dir (), tmp, NULL);
+ g_free (tmp);
+ }
+
+ static void
+ gst_qt_mux_set_property (GObject * object,
+ guint prop_id, const GValue * value, GParamSpec * pspec)
+ {
+ GstQTMux *qtmux = GST_QT_MUX_CAST (object);
+
+ GST_OBJECT_LOCK (qtmux);
+ switch (prop_id) {
+ case PROP_MOVIE_TIMESCALE:
+ qtmux->timescale = g_value_get_uint (value);
+ break;
+ case PROP_TRAK_TIMESCALE:
+ qtmux->trak_timescale = g_value_get_uint (value);
+ break;
+ case PROP_DO_CTTS:
+ qtmux->guess_pts = g_value_get_boolean (value);
+ break;
+ #ifndef GST_REMOVE_DEPRECATED
+ case PROP_DTS_METHOD:
+ qtmux->dts_method = g_value_get_enum (value);
+ break;
+ #endif
+ case PROP_FAST_START:
+ qtmux->fast_start = g_value_get_boolean (value);
+ break;
+ case PROP_FAST_START_TEMP_FILE:
+ g_free (qtmux->fast_start_file_path);
+ qtmux->fast_start_file_path = g_value_dup_string (value);
+ /* NULL means to generate a random one */
+ if (!qtmux->fast_start_file_path) {
+ gst_qt_mux_generate_fast_start_file_path (qtmux);
+ }
+ break;
+ case PROP_MOOV_RECOV_FILE:
+ g_free (qtmux->moov_recov_file_path);
+ qtmux->moov_recov_file_path = g_value_dup_string (value);
+ break;
+ case PROP_FRAGMENT_DURATION:
+ qtmux->fragment_duration = g_value_get_uint (value);
+ break;
+ case PROP_RESERVED_MAX_DURATION:
+ qtmux->reserved_max_duration = g_value_get_uint64 (value);
+ break;
+ case PROP_RESERVED_MOOV_UPDATE_PERIOD:
+ qtmux->reserved_moov_update_period = g_value_get_uint64 (value);
+ break;
+ case PROP_RESERVED_BYTES_PER_SEC:
+ qtmux->reserved_bytes_per_sec_per_trak = g_value_get_uint (value);
+ break;
+ case PROP_RESERVED_PREFILL:
+ qtmux->reserved_prefill = g_value_get_boolean (value);
+ break;
+ case PROP_INTERLEAVE_BYTES:
+ qtmux->interleave_bytes = g_value_get_uint64 (value);
+ qtmux->interleave_bytes_set = TRUE;
+ break;
+ case PROP_INTERLEAVE_TIME:
+ qtmux->interleave_time = g_value_get_uint64 (value);
+ qtmux->interleave_time_set = TRUE;
+ break;
+ case PROP_FORCE_CHUNKS:
+ qtmux->force_chunks = g_value_get_boolean (value);
+ break;
+ case PROP_MAX_RAW_AUDIO_DRIFT:
+ qtmux->max_raw_audio_drift = g_value_get_uint64 (value);
+ break;
+ case PROP_START_GAP_THRESHOLD:
+ qtmux->start_gap_threshold = g_value_get_uint64 (value);
+ break;
+ case PROP_FORCE_CREATE_TIMECODE_TRAK:
+ qtmux->force_create_timecode_trak = g_value_get_boolean (value);
+ qtmux->context->force_create_timecode_trak =
+ qtmux->force_create_timecode_trak;
+ break;
+ case PROP_FRAGMENT_MODE:{
+ GstQTMuxFragmentMode mode = g_value_get_enum (value);
+ if (mode != GST_QT_MUX_FRAGMENT_STREAMABLE)
+ qtmux->fragment_mode = mode;
+ break;
+ }
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+ GST_OBJECT_UNLOCK (qtmux);
+ }
+
+ static gboolean
+ gst_qt_mux_start (GstAggregator * agg)
+ {
+ GstQTMux *qtmux = GST_QT_MUX_CAST (agg);
+ GstSegment segment;
+
+ qtmux->state = GST_QT_MUX_STATE_STARTED;
+
+ /* let downstream know we think in BYTES and expect to do seeking later on */
+ gst_segment_init (&segment, GST_FORMAT_BYTES);
+ gst_aggregator_update_segment (agg, &segment);
+
+ return TRUE;
+ }
+
+ static gboolean
+ gst_qt_mux_stop (GstAggregator * agg)
+ {
+ GstQTMux *qtmux = GST_QT_MUX_CAST (agg);
+
+ gst_qt_mux_reset (qtmux, TRUE);
+
+ return TRUE;
+ }
+
+ enum
+ {
+ PROP_SUBCLASS_STREAMABLE = 1,
+ };
+
+ static void
+ gst_qt_mux_subclass_set_property (GObject * object,
+ guint prop_id, const GValue * value, GParamSpec * pspec)
+ {
+ GstQTMux *qtmux = GST_QT_MUX_CAST (object);
+
+ GST_OBJECT_LOCK (qtmux);
+ switch (prop_id) {
+ case PROP_SUBCLASS_STREAMABLE:{
+ GstQTMuxClass *qtmux_klass =
+ (GstQTMuxClass *) (G_OBJECT_GET_CLASS (qtmux));
+ if (qtmux_klass->format == GST_QT_MUX_FORMAT_ISML) {
+ qtmux->streamable = g_value_get_boolean (value);
+ }
+ break;
+ }
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+ GST_OBJECT_UNLOCK (qtmux);
+ }
+
+ static void
+ gst_qt_mux_subclass_get_property (GObject * object,
+ guint prop_id, GValue * value, GParamSpec * pspec)
+ {
+ GstQTMux *qtmux = GST_QT_MUX_CAST (object);
+
+ GST_OBJECT_LOCK (qtmux);
+ switch (prop_id) {
+ case PROP_SUBCLASS_STREAMABLE:
+ g_value_set_boolean (value, qtmux->streamable);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+ GST_OBJECT_UNLOCK (qtmux);
+ }
+
+ static void
+ gst_qt_mux_subclass_class_init (GstQTMuxClass * klass)
+ {
+ GObjectClass *gobject_class = (GObjectClass *) klass;
+ GParamFlags streamable_flags;
+ const gchar *streamable_desc;
+ gboolean streamable;
+ #define STREAMABLE_DESC "If set to true, the output should be as if it is to "\
+ "be streamed and hence no indexes written or duration written."
+
+ gobject_class->set_property = gst_qt_mux_subclass_set_property;
+ gobject_class->get_property = gst_qt_mux_subclass_get_property;
+
+ streamable_flags = G_PARAM_READWRITE | G_PARAM_CONSTRUCT;
+ if (klass->format == GST_QT_MUX_FORMAT_ISML) {
+ streamable_desc = STREAMABLE_DESC;
+ streamable = DEFAULT_STREAMABLE;
+ } else {
+ streamable_desc =
+ STREAMABLE_DESC " (DEPRECATED, only valid for fragmented MP4)";
+ streamable_flags |= G_PARAM_DEPRECATED;
+ streamable = FALSE;
+ }
+
+ g_object_class_install_property (gobject_class, PROP_SUBCLASS_STREAMABLE,
+ g_param_spec_boolean ("streamable", "Streamable", streamable_desc,
+ streamable, streamable_flags | G_PARAM_STATIC_STRINGS));
+ }
+
+ static void
+ gst_qt_mux_subclass_init (GstQTMux * qtmux)
+ {
+ }
+
+ gboolean
+ gst_qt_mux_register (GstPlugin * plugin)
+ {
+ GTypeInfo parent_typeinfo = {
+ sizeof (GstQTMuxClass),
+ (GBaseInitFunc) gst_qt_mux_base_init,
+ NULL,
+ (GClassInitFunc) gst_qt_mux_class_init,
+ NULL,
+ NULL,
+ sizeof (GstQTMux),
+ 0,
+ (GInstanceInitFunc) gst_qt_mux_init,
+ };
+ static const GInterfaceInfo tag_setter_info = {
+ NULL, NULL, NULL
+ };
+ static const GInterfaceInfo tag_xmp_writer_info = {
+ NULL, NULL, NULL
+ };
+ static const GInterfaceInfo preset_info = {
+ NULL, NULL, NULL
+ };
+ GType parent_type;
+ GstQTMuxFormat format;
+ GstQTMuxClassParams *params;
+ guint i = 0;
+
+ GST_DEBUG_CATEGORY_INIT (gst_qt_mux_debug, "qtmux", 0, "QT Muxer");
+
+ GST_LOG ("Registering muxers");
+
+ parent_type =
+ g_type_register_static (GST_TYPE_AGGREGATOR, "GstBaseQTMux",
+ &parent_typeinfo, 0);
+ g_type_add_interface_static (parent_type, GST_TYPE_TAG_SETTER,
+ &tag_setter_info);
+ g_type_add_interface_static (parent_type, GST_TYPE_TAG_XMP_WRITER,
+ &tag_xmp_writer_info);
+ g_type_add_interface_static (parent_type, GST_TYPE_PRESET, &preset_info);
+
+ gst_type_mark_as_plugin_api (parent_type, 0);
+
+ while (TRUE) {
+ GType type;
+ GTypeInfo subclass_typeinfo = {
+ sizeof (GstQTMuxClass),
+ NULL,
+ NULL,
+ (GClassInitFunc) gst_qt_mux_subclass_class_init,
+ NULL,
+ NULL,
+ sizeof (GstQTMux),
+ 0,
+ (GInstanceInitFunc) gst_qt_mux_subclass_init,
+ };
+ GstQTMuxFormatProp *prop;
+ GstCaps *subtitle_caps, *caption_caps;
+
+ prop = &gst_qt_mux_format_list[i];
+ format = prop->format;
+ if (format == GST_QT_MUX_FORMAT_NONE)
+ break;
+
+ /* create a cache for these properties */
+ params = g_new0 (GstQTMuxClassParams, 1);
+ params->prop = prop;
+ params->src_caps = gst_static_caps_get (&prop->src_caps);
+ params->video_sink_caps = gst_static_caps_get (&prop->video_sink_caps);
+ params->audio_sink_caps = gst_static_caps_get (&prop->audio_sink_caps);
+ subtitle_caps = gst_static_caps_get (&prop->subtitle_sink_caps);
+ if (!gst_caps_is_equal (subtitle_caps, GST_CAPS_NONE)) {
+ params->subtitle_sink_caps = subtitle_caps;
+ } else {
+ gst_caps_unref (subtitle_caps);
+ }
+ caption_caps = gst_static_caps_get (&prop->caption_sink_caps);
+ if (!gst_caps_is_equal (caption_caps, GST_CAPS_NONE)) {
+ params->caption_sink_caps = caption_caps;
+ } else {
+ gst_caps_unref (caption_caps);
+ }
+
+ /* create the type now */
+ type =
+ g_type_register_static (parent_type, prop->type_name,
+ &subclass_typeinfo, 0);
+ g_type_set_qdata (type, GST_QT_MUX_PARAMS_QDATA, (gpointer) params);
+
+ if (!gst_element_register (plugin, prop->name, prop->rank, type))
+ return FALSE;
+
+ i++;
+ }
+
+ GST_LOG ("Finished registering muxers");
+
+ /* FIXME: ideally classification tag should be added and
+ registered in gstreamer core gsttaglist
+ */
+
+ GST_LOG ("Registering tags");
+
+ gst_tag_register (GST_TAG_3GP_CLASSIFICATION, GST_TAG_FLAG_META,
+ G_TYPE_STRING, GST_TAG_3GP_CLASSIFICATION, "content classification",
+ gst_tag_merge_use_first);
+
+ isomp4_element_init (plugin);
+
+ GST_LOG ("Finished registering tags");
+
+ return TRUE;
+ }
+
+ GST_ELEMENT_REGISTER_DEFINE_CUSTOM (qtmux, gst_qt_mux_register);
--- /dev/null
+ /* Quicktime muxer plugin for GStreamer
+ * Copyright (C) 2008-2010 Thiago Santos <thiagoss@embedded.ufcg.edu.br>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+ /*
+ * Unless otherwise indicated, Source Code is licensed under MIT license.
+ * See further explanation attached in License Statement (distributed in the file
+ * LICENSE).
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy of
+ * this software and associated documentation files (the "Software"), to deal in
+ * the Software without restriction, including without limitation the rights to
+ * use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
+ * of the Software, and to permit persons to whom the Software is furnished to do
+ * so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in all
+ * copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+ * SOFTWARE.
+ */
+
+ #ifndef __GST_QT_MUX_H__
+ #define __GST_QT_MUX_H__
+
+ #include <gst/gst.h>
+ #include <gst/base/gstaggregator.h>
+
+ #include "fourcc.h"
+ #include "atoms.h"
+ #include "atomsrecovery.h"
+ #include "gstqtmuxmap.h"
+
+ G_BEGIN_DECLS
+
+ #define GST_TYPE_QT_MUX (gst_qt_mux_get_type())
+ #define GST_QT_MUX(obj) (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_QT_MUX, GstQTMux))
+ #define GST_QT_MUX_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_QT_MUX, GstQTMux))
+ #define GST_IS_QT_MUX(obj) (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_QT_MUX))
+ #define GST_IS_QT_MUX_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_QT_MUX))
+ #define GST_QT_MUX_CAST(obj) ((GstQTMux*)(obj))
+
+
+ typedef struct _GstQTMux GstQTMux;
+ typedef struct _GstQTMuxClass GstQTMuxClass;
+ typedef struct _GstQTMuxPad GstQTMuxPad;
+ typedef struct _GstQTMuxPadClass GstQTMuxPadClass;
+
+ /*
+ * GstQTPadPrepareBufferFunc
+ *
+ * Receives a buffer (takes ref) and returns a new buffer that should
+ * replace the passed one.
+ *
+ * Useful for when the pad/datatype needs some manipulation before
+ * being muxed. (Originally added for image/x-jpc support, for which buffers
+ * need to be wrapped into a isom box)
+ */
+ typedef GstBuffer * (*GstQTPadPrepareBufferFunc) (GstQTMuxPad * pad,
+ GstBuffer * buf, GstQTMux * qtmux);
+ typedef gboolean (*GstQTPadSetCapsFunc) (GstQTMuxPad * pad, GstCaps * caps);
+ typedef GstBuffer * (*GstQTPadCreateEmptyBufferFunc) (GstQTMuxPad * pad, gint64 duration);
+
+ GType gst_qt_mux_pad_get_type (void);
+
+ #define GST_TYPE_QT_MUX_PAD \
+ (gst_qt_mux_pad_get_type())
+ #define GST_QT_MUX_PAD(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST ((obj), GST_TYPE_QT_MUX_PAD, GstQTMuxPad))
+ #define GST_QT_MUX_PAD_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST ((klass), GST_TYPE_QT_MUX_PAD, GstQTMuxPadClass))
+ #define GST_IS_QT_MUX_PAD(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_QT_MUX_PAD))
+ #define GST_IS_QT_MUX_PAD_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE ((klass), GST_TYPE_QT_MUX_PAD))
+ #define GST_QT_MUX_PAD_CAST(obj) \
+ ((GstQTMuxPad *)(obj))
+
+ struct _GstQTMuxPad
+ {
+ GstAggregatorPad parent;
+
+ guint32 trak_timescale;
+
+ /* fourcc id of stream */
+ guint32 fourcc;
+ /* whether using format that have out of order buffers */
+ gboolean is_out_of_order;
+ /* if not 0, track with constant sized samples, e.g. raw audio */
+ guint sample_size;
+ /* make sync table entry */
+ gboolean sync;
+ /* if it is a sparse stream
+ * (meaning we can't use PTS differences to compute duration) */
+ gboolean sparse;
+ /* bitrates */
+ guint32 avg_bitrate, max_bitrate;
+ /* expected sample duration */
+ guint expected_sample_duration_n;
+ guint expected_sample_duration_d;
+
+ /* for avg bitrate calculation */
+ guint64 total_bytes;
+ guint64 total_duration;
+
+ GstBuffer *last_buf;
+ /* dts of last_buf */
+ GstClockTime last_dts;
+ guint64 sample_offset;
+
+ /* TRUE if we saw backward/missing DTS on this
+ * pad (and warned about it */
+ gboolean warned_empty_duration;
+
+ /* This is compensate for CTTS */
+ GstClockTime dts_adjustment;
+
+ /* store the first timestamp for comparing with other streams and
+ * know if there are late streams */
+ /* subjected to dts adjustment */
+ GstClockTime first_ts;
+ GstClockTime first_dts;
+
+ gint64 dts; /* the signed version of the DTS converted to running time. */
+
+ /* all the atom and chunk book-keeping is delegated here
+ * unowned/uncounted reference, parent MOOV owns */
+ AtomTRAK *trak;
+ AtomTRAK *tc_trak;
+ SampleTableEntry *trak_ste;
+ /* fragmented support */
+ /* meta data book-keeping delegated here */
+ AtomTRAF *traf;
+ /* fragment buffers */
+ ATOM_ARRAY (GstBuffer *) fragment_buffers;
+ /* running fragment duration */
+ gint64 fragment_duration;
+ /* optional fragment index book-keeping */
+ AtomTFRA *tfra;
+
+ /* Set when tags are received, cleared when written to moov */
+ gboolean tags_changed;
+
+ GstTagList *tags;
+
+ /* if nothing is set, it won't be called */
+ GstQTPadPrepareBufferFunc prepare_buf_func;
+ GstQTPadSetCapsFunc set_caps;
+ GstQTPadCreateEmptyBufferFunc create_empty_buffer;
+
+ /* SMPTE timecode */
+ GstVideoTimeCode *first_tc;
+ GstClockTime first_pts;
+ guint64 tc_pos;
+
+ /* for keeping track in pre-fill mode */
+ GArray *samples;
+ guint first_cc_sample_size;
+ /* current sample */
+ GstAdapter *raw_audio_adapter;
+ guint64 raw_audio_adapter_offset;
+ GstClockTime raw_audio_adapter_pts;
+ GstFlowReturn flow_status;
+
+ GstCaps *configured_caps;
+ };
+
+ struct _GstQTMuxPadClass
+ {
+ GstAggregatorPadClass parent;
+ };
+
+ #define QTMUX_NO_OF_TS 10
+
+ typedef enum _GstQTMuxState
+ {
+ GST_QT_MUX_STATE_NONE,
+ GST_QT_MUX_STATE_STARTED,
+ GST_QT_MUX_STATE_DATA,
+ GST_QT_MUX_STATE_EOS
+ } GstQTMuxState;
+
+ typedef enum _GstQtMuxMode {
+ GST_QT_MUX_MODE_MOOV_AT_END,
+ GST_QT_MUX_MODE_FRAGMENTED,
+ GST_QT_MUX_MODE_FAST_START,
+ GST_QT_MUX_MODE_ROBUST_RECORDING,
+ GST_QT_MUX_MODE_ROBUST_RECORDING_PREFILL,
+ } GstQtMuxMode;
+
+ /**
+ * GstQTMuxFragmentMode:
+ * @GST_QT_MUX_FRAGMENT_DASH_OR_MSS: dash-or-mss
+ * @GST_QT_MUX_FRAGMENT_FIRST_MOOV_THEN_FINALISE: first-moov-then-finalise
+ * @GST_QT_MUX_FRAGMENT_STREAMABLE: streamable (private value)
+ *
+ * Since: 1.20
+ */
+ typedef enum _GstQTMuxFragmentMode
+ {
+ GST_QT_MUX_FRAGMENT_DASH_OR_MSS = 0,
+ GST_QT_MUX_FRAGMENT_FIRST_MOOV_THEN_FINALISE,
+ GST_QT_MUX_FRAGMENT_STREAMABLE = G_MAXUINT32, /* internal value */
+ } GstQTMuxFragmentMode;
+
+ struct _GstQTMux
+ {
+ GstAggregator parent;
+
+ /* state */
+ GstQTMuxState state;
+
+ /* Mux mode, inferred from property
+ * set in gst_qt_mux_start_file() */
+ GstQtMuxMode mux_mode;
+ /* fragment_mode, controls how fragments are created. Only if
+ * @mux_mode == GST_QT_MUX_MODE_FRAGMENTED */
+ GstQTMuxFragmentMode fragment_mode;
+
+ /* whether downstream is seekable */
+ gboolean downstream_seekable;
+
+ /* size of header (prefix, atoms (ftyp, possibly moov, mdat header)) */
+ guint64 header_size;
+ /* accumulated size of raw media data (not including mdat header) */
+ guint64 mdat_size;
+ /* position of the moov (for fragmented mode) or reserved moov atom
+ * area (for robust-muxing mode) */
+ guint64 moov_pos;
+ /* position of mdat atom header (for later updating of size) in
+ * moov-at-end, fragmented and robust-muxing modes */
+ guint64 mdat_pos;
+ /* position of the mdat atom header of the latest fragment for writing
+ * the default base offset in fragmented mode first-moov-then-finalise and
+ * any other future non-streaming fragmented mode */
+ guint64 moof_mdat_pos;
+
+ /* keep track of the largest chunk to fine-tune brands */
+ GstClockTime longest_chunk;
+
+ /* Earliest timestamp across all pads/traks
+ * (unadjusted incoming PTS) */
+ GstClockTime first_ts;
+ /* Last DTS across all pads (= duration) */
+ GstClockTime last_dts;
+
+ /* Last pad we used for writing the current chunk */
+ GstQTMuxPad *current_pad;
+ guint64 current_chunk_size;
+ GstClockTime current_chunk_duration;
+ guint64 current_chunk_offset;
+
+ /* list of buffers to hold for batching inside a single mdat when downstream
+ * is not seekable */
+ GList *output_buffers;
+
+ /* atom helper objects */
+ AtomsContext *context;
+ AtomFTYP *ftyp;
+ AtomMOOV *moov;
+ GSList *extra_atoms; /* list of extra top-level atoms (e.g. UUID for xmp)
+ * Stored as AtomInfo structs */
+
+ /* Set when tags are received, cleared when written to moov */
+ gboolean tags_changed;
+
+ /* fragmented file index */
+ AtomMFRA *mfra;
+
+ /* fast start */
+ FILE *fast_start_file;
+
+ /* moov recovery */
+ FILE *moov_recov_file;
+
+ /* fragment sequence */
+ guint32 fragment_sequence;
+
+ /* properties */
+ guint32 timescale;
+ guint32 trak_timescale;
+ AtomsTreeFlavor flavor;
+ gboolean fast_start;
+ gboolean guess_pts;
+ #ifndef GST_REMOVE_DEPRECATED
+ gint dts_method;
+ #endif
+ gchar *fast_start_file_path;
+ gchar *moov_recov_file_path;
+ guint32 fragment_duration;
+ /* Whether or not to work in 'streamable' mode and not
+ * seek to rewrite headers - only valid for fragmented
+ * mode. Deprecated */
+ gboolean streamable;
+
+ /* Requested target maximum duration */
+ GstClockTime reserved_max_duration;
+ /* Estimate of remaining reserved header space (in ns of recording) */
+ GstClockTime reserved_duration_remaining;
+ /* Multiplier for conversion from reserved_max_duration to bytes */
+ guint reserved_bytes_per_sec_per_trak;
+
+ guint64 interleave_bytes;
+ GstClockTime interleave_time;
+ gboolean interleave_bytes_set, interleave_time_set;
+ gboolean force_chunks;
+
+ GstClockTime max_raw_audio_drift;
+
+ /* Reserved minimum MOOV size in bytes
+ * This is converted from reserved_max_duration
+ * using the bytes/trak/sec estimate */
+ guint32 reserved_moov_size;
+ /* Basic size of the moov (static headers + tags) */
+ guint32 base_moov_size;
+ /* Size of the most recently generated moov header */
+ guint32 last_moov_size;
+ /* True if the first moov in the ping-pong buffers
+ * is the active one. See gst_qt_mux_robust_recording_rewrite_moov() */
+ gboolean reserved_moov_first_active;
+
+ /* Tracking of periodic MOOV updates */
+ GstClockTime last_moov_update;
+ GstClockTime reserved_moov_update_period;
+ GstClockTime muxed_since_last_update;
+
+ gboolean reserved_prefill;
+
+ GstClockTime start_gap_threshold;
+
+ gboolean force_create_timecode_trak;
+
+ /* for request pad naming */
+ guint video_pads, audio_pads, subtitle_pads, caption_pads;
++
++#ifdef TIZEN_FEATURE_GST_MUX_ENHANCEMENT
++ guint expected_trailer_size;
++ guint audio_expected_trailer_size;
++ guint video_expected_trailer_size;
++#endif /* TIZEN_FEATURE_GST_MUX_ENHANCEMENT */
+ };
+
+ struct _GstQTMuxClass
+ {
+ GstAggregatorClass parent_class;
+
+ GstQTMuxFormat format;
+ };
+
+ /* type register helper struct */
+ typedef struct _GstQTMuxClassParams
+ {
+ GstQTMuxFormatProp *prop;
+ GstCaps *src_caps;
+ GstCaps *video_sink_caps;
+ GstCaps *audio_sink_caps;
+ GstCaps *subtitle_sink_caps;
+ GstCaps *caption_sink_caps;
+ } GstQTMuxClassParams;
+
+ #define GST_QT_MUX_PARAMS_QDATA g_quark_from_static_string("qt-mux-params")
+
+ GType gst_qt_mux_get_type (void);
+ gboolean gst_qt_mux_register (GstPlugin * plugin);
+
+ /* FIXME: ideally classification tag should be added and
+ * registered in gstreamer core gsttaglist
+ *
+ * this tag is a string in the format: entityfourcc://table_num/content
+ * FIXME Shouldn't we add a field for 'language'?
+ */
+ #define GST_TAG_3GP_CLASSIFICATION "classification"
+
+ G_END_DECLS
+
+ #endif /* __GST_QT_MUX_H__ */
--- /dev/null
-# include <zlib.h>
+ /* GStreamer
+ * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
+ * Copyright (C) <2003> David A. Schleef <ds@schleef.org>
+ * Copyright (C) <2006> Wim Taymans <wim@fluendo.com>
+ * Copyright (C) <2007> Julien Moutte <julien@fluendo.com>
+ * Copyright (C) <2009> Tim-Philipp Müller <tim centricular net>
+ * Copyright (C) <2009> STEricsson <benjamin.gaignard@stericsson.com>
+ * Copyright (C) <2013> Sreerenj Balachandran <sreerenj.balachandran@intel.com>
+ * Copyright (C) <2013> Intel Corporation
+ * Copyright (C) <2014> Centricular Ltd
+ * Copyright (C) <2015> YouView TV Ltd.
+ * Copyright (C) <2016> British Broadcasting Corporation
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+ /**
+ * SECTION:element-qtdemux
+ * @title: qtdemux
+ *
+ * Demuxes a .mov file into raw or compressed audio and/or video streams.
+ *
+ * This element supports both push and pull-based scheduling, depending on the
+ * capabilities of the upstream elements.
+ *
+ * ## Example launch line
+ * |[
+ * gst-launch-1.0 filesrc location=test.mov ! qtdemux name=demux demux.audio_0 ! queue ! decodebin ! audioconvert ! audioresample ! autoaudiosink demux.video_0 ! queue ! decodebin ! videoconvert ! videoscale ! autovideosink
+ * ]| Play (parse and decode) a .mov file and try to output it to
+ * an automatically detected soundcard and videosink. If the MOV file contains
+ * compressed audio or video data, this will only work if you have the
+ * right decoder elements/plugins installed.
+ *
+ */
+
+ #ifdef HAVE_CONFIG_H
+ #include "config.h"
+ #endif
+
+ #include "gst/gst-i18n-plugin.h"
+
+ #include <glib/gprintf.h>
+ #include <gst/base/base.h>
+ #include <gst/tag/tag.h>
+ #include <gst/audio/audio.h>
+ #include <gst/riff/riff.h>
+ #include <gst/pbutils/pbutils.h>
+
+ #include "gstisomp4elements.h"
+ #include "qtatomparser.h"
+ #include "qtdemux_types.h"
+ #include "qtdemux_dump.h"
+ #include "fourcc.h"
+ #include "descriptors.h"
+ #include "qtdemux_lang.h"
+ #include "qtdemux.h"
+ #include "qtpalette.h"
+ #include "qtdemux_tags.h"
+ #include "qtdemux_tree.h"
+
+ #include <stdlib.h>
+ #include <string.h>
+
+ #include <math.h>
+ #include <gst/math-compat.h>
+
+ #ifdef HAVE_ZLIB
++#include <zlib.h>
+ #endif
+
+ /* max. size considered 'sane' for non-mdat atoms */
+ #define QTDEMUX_MAX_ATOM_SIZE (32*1024*1024)
+
+ /* if the sample index is larger than this, something is likely wrong */
+ #define QTDEMUX_MAX_SAMPLE_INDEX_SIZE (200*1024*1024)
+
+ /* For converting qt creation times to unix epoch times */
+ #define QTDEMUX_SECONDS_PER_DAY (60 * 60 * 24)
+ #define QTDEMUX_LEAP_YEARS_FROM_1904_TO_1970 17
+ #define QTDEMUX_SECONDS_FROM_1904_TO_1970 (((1970 - 1904) * (guint64) 365 + \
+ QTDEMUX_LEAP_YEARS_FROM_1904_TO_1970) * QTDEMUX_SECONDS_PER_DAY)
+
+ #define QTDEMUX_TREE_NODE_FOURCC(n) (QT_FOURCC(((guint8 *) (n)->data) + 4))
+
+ #define STREAM_IS_EOS(s) ((s)->time_position == GST_CLOCK_TIME_NONE)
+
+ #define ABSDIFF(x, y) ( (x) > (y) ? ((x) - (y)) : ((y) - (x)) )
+
+ #define QTDEMUX_STREAM(s) ((QtDemuxStream *)(s))
+ #define QTDEMUX_N_STREAMS(demux) ((demux)->active_streams->len)
+ #define QTDEMUX_NTH_STREAM(demux,idx) \
+ QTDEMUX_STREAM(g_ptr_array_index((demux)->active_streams,idx))
+ #define QTDEMUX_NTH_OLD_STREAM(demux,idx) \
+ QTDEMUX_STREAM(g_ptr_array_index((demux)->old_streams,idx))
+
+ #define CUR_STREAM(s) (&((s)->stsd_entries[(s)->cur_stsd_entry_index]))
+
+ GST_DEBUG_CATEGORY (qtdemux_debug);
+ #define GST_CAT_DEFAULT qtdemux_debug
+
+ typedef struct _QtDemuxCencSampleSetInfo QtDemuxCencSampleSetInfo;
+ typedef struct _QtDemuxAavdEncryptionInfo QtDemuxAavdEncryptionInfo;
+
++#ifdef TIZEN_FEATURE_QTDEMUX_MODIFICATION
++typedef struct _QtDemuxSphericalMetadata QtDemuxSphericalMetadata;
++
++struct _QtDemuxSphericalMetadata
++{
++ gboolean is_spherical;
++ gboolean is_stitched;
++ char *stitching_software;
++ char *projection_type;
++ char *stereo_mode;
++ int source_count;
++ int init_view_heading;
++ int init_view_pitch;
++ int init_view_roll;
++ int timestamp;
++ int full_pano_width_pixels;
++ int full_pano_height_pixels;
++ int cropped_area_image_width;
++ int cropped_area_image_height;
++ int cropped_area_left;
++ int cropped_area_top;
++ QTDEMUX_AMBISONIC_TYPE ambisonic_type;
++ QTDEMUX_AMBISONIC_FORMAT ambisonic_format;
++ QTDEMUX_AMBISONIC_ORDER ambisonic_order;
++};
++
++#endif /* TIZEN_FEATURE_QTDEMUX_MODIFICATION */
++
+ /* Macros for converting to/from timescale */
+ #define QTSTREAMTIME_TO_GSTTIME(stream, value) (gst_util_uint64_scale((value), GST_SECOND, (stream)->timescale))
+ #define GSTTIME_TO_QTSTREAMTIME(stream, value) (gst_util_uint64_scale((value), (stream)->timescale, GST_SECOND))
+
+ #define QTTIME_TO_GSTTIME(qtdemux, value) (gst_util_uint64_scale((value), GST_SECOND, (qtdemux)->timescale))
+ #define GSTTIME_TO_QTTIME(qtdemux, value) (gst_util_uint64_scale((value), (qtdemux)->timescale, GST_SECOND))
+
+ /* timestamp is the DTS */
+ #define QTSAMPLE_DTS(stream,sample) (QTSTREAMTIME_TO_GSTTIME((stream), (sample)->timestamp))
+ /* timestamp + offset + cslg_shift is the outgoing PTS */
+ #define QTSAMPLE_PTS(stream,sample) (QTSTREAMTIME_TO_GSTTIME((stream), (sample)->timestamp + (stream)->cslg_shift + (sample)->pts_offset))
+ /* timestamp + offset is the PTS used for internal seek calculations */
+ #define QTSAMPLE_PTS_NO_CSLG(stream,sample) (QTSTREAMTIME_TO_GSTTIME((stream), (sample)->timestamp + (sample)->pts_offset))
+ /* timestamp + duration - dts is the duration */
+ #define QTSAMPLE_DUR_DTS(stream, sample, dts) (QTSTREAMTIME_TO_GSTTIME ((stream), (sample)->timestamp + (sample)->duration) - (dts))
+
+ #define QTSAMPLE_KEYFRAME(stream,sample) ((stream)->all_keyframe || (sample)->keyframe)
+
+ #define QTDEMUX_EXPOSE_GET_LOCK(demux) (&((demux)->expose_lock))
+ #define QTDEMUX_EXPOSE_LOCK(demux) G_STMT_START { \
+ GST_TRACE("Locking from thread %p", g_thread_self()); \
+ g_mutex_lock (QTDEMUX_EXPOSE_GET_LOCK (demux)); \
+ GST_TRACE("Locked from thread %p", g_thread_self()); \
+ } G_STMT_END
+
+ #define QTDEMUX_EXPOSE_UNLOCK(demux) G_STMT_START { \
+ GST_TRACE("Unlocking from thread %p", g_thread_self()); \
+ g_mutex_unlock (QTDEMUX_EXPOSE_GET_LOCK (demux)); \
+ } G_STMT_END
+
+ /*
+ * Quicktime has tracks and segments. A track is a continuous piece of
+ * multimedia content. The track is not always played from start to finish but
+ * instead, pieces of the track are 'cut out' and played in sequence. This is
+ * what the segments do.
+ *
+ * Inside the track we have keyframes (K) and delta frames. The track has its
+ * own timing, which starts from 0 and extends to end. The position in the track
+ * is called the media_time.
+ *
+ * The segments now describe the pieces that should be played from this track
+ * and are basically tuples of media_time/duration/rate entries. We can have
+ * multiple segments and they are all played after one another. An example:
+ *
+ * segment 1: media_time: 1 second, duration: 1 second, rate 1
+ * segment 2: media_time: 3 second, duration: 2 second, rate 2
+ *
+ * To correctly play back this track, one must play: 1 second of media starting
+ * from media_time 1 followed by 2 seconds of media starting from media_time 3
+ * at a rate of 2.
+ *
+ * Each of the segments will be played at a specific time, the first segment at
+ * time 0, the second one after the duration of the first one, etc.. Note that
+ * the time in resulting playback is not identical to the media_time of the
+ * track anymore.
+ *
+ * Visually, assuming the track has 4 second of media_time:
+ *
+ * (a) (b) (c) (d)
+ * .-----------------------------------------------------------.
+ * track: | K.....K.........K........K.......K.......K...........K... |
+ * '-----------------------------------------------------------'
+ * 0 1 2 3 4
+ * .------------^ ^ .----------^ ^
+ * / .-------------' / .------------------'
+ * / / .-----' /
+ * .--------------. .--------------.
+ * | segment 1 | | segment 2 |
+ * '--------------' '--------------'
+ *
+ * The challenge here is to cut out the right pieces of the track for each of
+ * the playback segments. This fortunately can easily be done with the SEGMENT
+ * events of GStreamer.
+ *
+ * For playback of segment 1, we need to provide the decoder with the keyframe
+ * (a), in the above figure, but we must instruct it only to output the decoded
+ * data between second 1 and 2. We do this with a SEGMENT event for 1 to 2, time
+ * position set to the time of the segment: 0.
+ *
+ * We then proceed to push data from keyframe (a) to frame (b). The decoder
+ * decodes but clips all before media_time 1.
+ *
+ * After finishing a segment, we push out a new SEGMENT event with the clipping
+ * boundaries of the new data.
+ *
+ * This is a good usecase for the GStreamer accumulated SEGMENT events.
+ */
+
+ struct _QtDemuxSegment
+ {
+ /* global time and duration, all gst time */
+ GstClockTime time;
+ GstClockTime stop_time;
+ GstClockTime duration;
+ /* media time of trak, all gst time */
+ GstClockTime media_start;
+ GstClockTime media_stop;
+ gdouble rate;
+ /* Media start time in trak timescale units */
+ guint32 trak_media_start;
+ };
+
+ #define QTSEGMENT_IS_EMPTY(s) ((s)->media_start == GST_CLOCK_TIME_NONE)
+
+ /* Used with fragmented MP4 files (mfra atom) */
+ struct _QtDemuxRandomAccessEntry
+ {
+ GstClockTime ts;
+ guint64 moof_offset;
+ };
+
+
+ /* Contains properties and cryptographic info for a set of samples from a
+ * track protected using Common Encryption (cenc) */
+ struct _QtDemuxCencSampleSetInfo
+ {
+ GstStructure *default_properties;
+
+ /* @crypto_info holds one GstStructure per sample */
+ GPtrArray *crypto_info;
+ };
+
+ struct _QtDemuxAavdEncryptionInfo
+ {
+ GstStructure *default_properties;
+ };
+
+ static const gchar *
+ qt_demux_state_string (enum QtDemuxState state)
+ {
+ switch (state) {
+ case QTDEMUX_STATE_INITIAL:
+ return "<INITIAL>";
+ case QTDEMUX_STATE_HEADER:
+ return "<HEADER>";
+ case QTDEMUX_STATE_MOVIE:
+ return "<MOVIE>";
+ case QTDEMUX_STATE_BUFFER_MDAT:
+ return "<BUFFER_MDAT>";
+ default:
+ return "<UNKNOWN>";
+ }
+ }
+
+ static GstFlowReturn qtdemux_add_fragmented_samples (GstQTDemux * qtdemux);
+
+ static void gst_qtdemux_check_send_pending_segment (GstQTDemux * demux);
+
+ static GstStaticPadTemplate gst_qtdemux_sink_template =
+ GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("video/quicktime; video/mj2; audio/x-m4a; "
+ "application/x-3gp")
+ );
+
+ static GstStaticPadTemplate gst_qtdemux_videosrc_template =
+ GST_STATIC_PAD_TEMPLATE ("video_%u",
+ GST_PAD_SRC,
+ GST_PAD_SOMETIMES,
+ GST_STATIC_CAPS_ANY);
+
+ static GstStaticPadTemplate gst_qtdemux_audiosrc_template =
+ GST_STATIC_PAD_TEMPLATE ("audio_%u",
+ GST_PAD_SRC,
+ GST_PAD_SOMETIMES,
+ GST_STATIC_CAPS_ANY);
+
+ static GstStaticPadTemplate gst_qtdemux_subsrc_template =
+ GST_STATIC_PAD_TEMPLATE ("subtitle_%u",
+ GST_PAD_SRC,
+ GST_PAD_SOMETIMES,
+ GST_STATIC_CAPS_ANY);
+
+ #define gst_qtdemux_parent_class parent_class
+ G_DEFINE_TYPE (GstQTDemux, gst_qtdemux, GST_TYPE_ELEMENT);
+ GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (qtdemux, "qtdemux",
+ GST_RANK_PRIMARY, GST_TYPE_QTDEMUX, isomp4_element_init (plugin));
+
+ static void gst_qtdemux_dispose (GObject * object);
+ static void gst_qtdemux_finalize (GObject * object);
+
+ static guint32
+ gst_qtdemux_find_index_linear (GstQTDemux * qtdemux, QtDemuxStream * str,
+ GstClockTime media_time);
+ static guint32
+ gst_qtdemux_find_index_for_given_media_offset_linear (GstQTDemux * qtdemux,
+ QtDemuxStream * str, gint64 media_offset);
+
+ #if 0
+ static void gst_qtdemux_set_index (GstElement * element, GstIndex * index);
+ static GstIndex *gst_qtdemux_get_index (GstElement * element);
+ #endif
+ static GstStateChangeReturn gst_qtdemux_change_state (GstElement * element,
+ GstStateChange transition);
+ static void gst_qtdemux_set_context (GstElement * element,
+ GstContext * context);
+ static gboolean qtdemux_sink_activate (GstPad * sinkpad, GstObject * parent);
+ static gboolean qtdemux_sink_activate_mode (GstPad * sinkpad,
+ GstObject * parent, GstPadMode mode, gboolean active);
+
+ static void gst_qtdemux_loop (GstPad * pad);
+ static GstFlowReturn gst_qtdemux_chain (GstPad * sinkpad, GstObject * parent,
+ GstBuffer * inbuf);
+ static gboolean gst_qtdemux_handle_sink_event (GstPad * pad, GstObject * parent,
+ GstEvent * event);
+ static gboolean gst_qtdemux_handle_sink_query (GstPad * pad, GstObject * parent,
+ GstQuery * query);
+ static gboolean gst_qtdemux_setcaps (GstQTDemux * qtdemux, GstCaps * caps);
+ static gboolean gst_qtdemux_configure_stream (GstQTDemux * qtdemux,
+ QtDemuxStream * stream);
+ static void gst_qtdemux_stream_check_and_change_stsd_index (GstQTDemux * demux,
+ QtDemuxStream * stream);
+ static GstFlowReturn gst_qtdemux_process_adapter (GstQTDemux * demux,
+ gboolean force);
+
+ static void gst_qtdemux_check_seekability (GstQTDemux * demux);
+
+ static gboolean qtdemux_parse_moov (GstQTDemux * qtdemux,
+ const guint8 * buffer, guint length);
+ static gboolean qtdemux_parse_node (GstQTDemux * qtdemux, GNode * node,
+ const guint8 * buffer, guint length);
+ static gboolean qtdemux_parse_tree (GstQTDemux * qtdemux);
+
+ static void gst_qtdemux_handle_esds (GstQTDemux * qtdemux,
+ QtDemuxStream * stream, QtDemuxStreamStsdEntry * entry, GNode * esds,
+ GstTagList * list);
+ static GstCaps *qtdemux_video_caps (GstQTDemux * qtdemux,
+ QtDemuxStream * stream, QtDemuxStreamStsdEntry * entry, guint32 fourcc,
+ const guint8 * stsd_entry_data, gchar ** codec_name);
+ static GstCaps *qtdemux_audio_caps (GstQTDemux * qtdemux,
+ QtDemuxStream * stream, QtDemuxStreamStsdEntry * entry, guint32 fourcc,
+ const guint8 * data, int len, gchar ** codec_name);
+ static GstCaps *qtdemux_sub_caps (GstQTDemux * qtdemux, QtDemuxStream * stream,
+ QtDemuxStreamStsdEntry * entry, guint32 fourcc, const guint8 * data,
+ gchar ** codec_name);
+ static GstCaps *qtdemux_generic_caps (GstQTDemux * qtdemux,
+ QtDemuxStream * stream, QtDemuxStreamStsdEntry * entry, guint32 fourcc,
+ const guint8 * stsd_entry_data, gchar ** codec_name);
+
+ static gboolean qtdemux_parse_samples (GstQTDemux * qtdemux,
+ QtDemuxStream * stream, guint32 n);
+ static GstFlowReturn qtdemux_expose_streams (GstQTDemux * qtdemux);
+ static QtDemuxStream *gst_qtdemux_stream_ref (QtDemuxStream * stream);
+ static void gst_qtdemux_stream_unref (QtDemuxStream * stream);
+ static void gst_qtdemux_stream_clear (QtDemuxStream * stream);
+ static GstFlowReturn qtdemux_prepare_streams (GstQTDemux * qtdemux);
+ static void qtdemux_do_allocation (QtDemuxStream * stream,
+ GstQTDemux * qtdemux);
+ static gboolean gst_qtdemux_activate_segment (GstQTDemux * qtdemux,
+ QtDemuxStream * stream, guint32 seg_idx, GstClockTime offset);
+ static gboolean gst_qtdemux_stream_update_segment (GstQTDemux * qtdemux,
+ QtDemuxStream * stream, gint seg_idx, GstClockTime offset,
+ GstClockTime * _start, GstClockTime * _stop);
+ static void gst_qtdemux_send_gap_for_segment (GstQTDemux * demux,
+ QtDemuxStream * stream, gint segment_index, GstClockTime pos);
+
+ static gboolean qtdemux_pull_mfro_mfra (GstQTDemux * qtdemux);
+ static void check_update_duration (GstQTDemux * qtdemux, GstClockTime duration);
+
+ static gchar *qtdemux_uuid_bytes_to_string (gconstpointer uuid_bytes);
+
+ static GstStructure *qtdemux_get_cenc_sample_properties (GstQTDemux * qtdemux,
+ QtDemuxStream * stream, guint sample_index);
+ static void gst_qtdemux_append_protection_system_id (GstQTDemux * qtdemux,
+ const gchar * id);
+ static void qtdemux_gst_structure_free (GstStructure * gststructure);
+ static void gst_qtdemux_reset (GstQTDemux * qtdemux, gboolean hard);
+
++#ifdef TIZEN_FEATURE_QTDEMUX_MODIFICATION
++static void gst_tag_register_spherical_tags (void);
++#endif /* TIZEN_FEATURE_QTDEMUX_MODIFICATION */
++
+ static void
+ gst_qtdemux_class_init (GstQTDemuxClass * klass)
+ {
+ GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
+
+ gobject_class = (GObjectClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
+
+ parent_class = g_type_class_peek_parent (klass);
+
+ gobject_class->dispose = gst_qtdemux_dispose;
+ gobject_class->finalize = gst_qtdemux_finalize;
+
+ gstelement_class->change_state = GST_DEBUG_FUNCPTR (gst_qtdemux_change_state);
+ #if 0
+ gstelement_class->set_index = GST_DEBUG_FUNCPTR (gst_qtdemux_set_index);
+ gstelement_class->get_index = GST_DEBUG_FUNCPTR (gst_qtdemux_get_index);
+ #endif
+ gstelement_class->set_context = GST_DEBUG_FUNCPTR (gst_qtdemux_set_context);
+
+ gst_tag_register_musicbrainz_tags ();
+
++#ifdef TIZEN_FEATURE_QTDEMUX_MODIFICATION
++ gst_tag_register_spherical_tags ();
++#endif /* TIZEN_FEATURE_QTDEMUX_MODIFICATION */
++
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_qtdemux_sink_template);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_qtdemux_videosrc_template);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_qtdemux_audiosrc_template);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_qtdemux_subsrc_template);
+ gst_element_class_set_static_metadata (gstelement_class, "QuickTime demuxer",
+ "Codec/Demuxer",
+ "Demultiplex a QuickTime file into audio and video streams",
+ "David Schleef <ds@schleef.org>, Wim Taymans <wim@fluendo.com>");
+
+ GST_DEBUG_CATEGORY_INIT (qtdemux_debug, "qtdemux", 0, "qtdemux plugin");
+ gst_riff_init ();
+ }
+
+ static void
+ gst_qtdemux_init (GstQTDemux * qtdemux)
+ {
+ qtdemux->sinkpad =
+ gst_pad_new_from_static_template (&gst_qtdemux_sink_template, "sink");
+ gst_pad_set_activate_function (qtdemux->sinkpad, qtdemux_sink_activate);
+ gst_pad_set_activatemode_function (qtdemux->sinkpad,
+ qtdemux_sink_activate_mode);
+ gst_pad_set_chain_function (qtdemux->sinkpad, gst_qtdemux_chain);
+ gst_pad_set_event_function (qtdemux->sinkpad, gst_qtdemux_handle_sink_event);
+ gst_pad_set_query_function (qtdemux->sinkpad, gst_qtdemux_handle_sink_query);
+ gst_element_add_pad (GST_ELEMENT_CAST (qtdemux), qtdemux->sinkpad);
+
+ qtdemux->adapter = gst_adapter_new ();
+ g_queue_init (&qtdemux->protection_event_queue);
+ qtdemux->flowcombiner = gst_flow_combiner_new ();
+ g_mutex_init (&qtdemux->expose_lock);
+
+ qtdemux->active_streams = g_ptr_array_new_with_free_func
+ ((GDestroyNotify) gst_qtdemux_stream_unref);
+ qtdemux->old_streams = g_ptr_array_new_with_free_func
+ ((GDestroyNotify) gst_qtdemux_stream_unref);
+
++#ifdef TIZEN_FEATURE_QTDEMUX_MODIFICATION
++ qtdemux->spherical_metadata = (QtDemuxSphericalMetadata *)
++ malloc (sizeof (QtDemuxSphericalMetadata));
++
++ if (qtdemux->spherical_metadata) {
++ qtdemux->spherical_metadata->is_spherical = FALSE;
++ qtdemux->spherical_metadata->is_stitched = FALSE;
++ qtdemux->spherical_metadata->stitching_software = NULL;
++ qtdemux->spherical_metadata->projection_type = NULL;
++ qtdemux->spherical_metadata->stereo_mode = NULL;
++ qtdemux->spherical_metadata->source_count = 0;
++ qtdemux->spherical_metadata->init_view_heading = 0;
++ qtdemux->spherical_metadata->init_view_pitch = 0;
++ qtdemux->spherical_metadata->init_view_roll = 0;
++ qtdemux->spherical_metadata->timestamp = 0;
++ qtdemux->spherical_metadata->full_pano_width_pixels = 0;
++ qtdemux->spherical_metadata->full_pano_height_pixels = 0;
++ qtdemux->spherical_metadata->cropped_area_image_width = 0;
++ qtdemux->spherical_metadata->cropped_area_image_height = 0;
++ qtdemux->spherical_metadata->cropped_area_left = 0;
++ qtdemux->spherical_metadata->cropped_area_top = 0;
++ qtdemux->spherical_metadata->ambisonic_type = QTDEMUX_AMBISONIC_TYPE_UNKNOWN;
++ qtdemux->spherical_metadata->ambisonic_format = QTDEMUX_AMBISONIC_FORMAT_UNKNOWN;
++ qtdemux->spherical_metadata->ambisonic_order = QTDEMUX_AMBISONIC_ORDER_UNKNOWN;
++ }
++#endif /* TIZEN_FEATURE_QTDEMUX_MODIFICATION */
++
+ GST_OBJECT_FLAG_SET (qtdemux, GST_ELEMENT_FLAG_INDEXABLE);
+
+ gst_qtdemux_reset (qtdemux, TRUE);
+ }
+
+ static void
+ gst_qtdemux_finalize (GObject * object)
+ {
+ GstQTDemux *qtdemux = GST_QTDEMUX (object);
+
+ g_free (qtdemux->redirect_location);
+
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+ }
+
+ static void
+ gst_qtdemux_dispose (GObject * object)
+ {
+ GstQTDemux *qtdemux = GST_QTDEMUX (object);
+
++#ifdef TIZEN_FEATURE_QTDEMUX_MODIFICATION
++ if (qtdemux->spherical_metadata) {
++ if (qtdemux->spherical_metadata->stitching_software)
++ free(qtdemux->spherical_metadata->stitching_software);
++ if (qtdemux->spherical_metadata->projection_type)
++ free(qtdemux->spherical_metadata->projection_type);
++ if (qtdemux->spherical_metadata->stereo_mode)
++ free(qtdemux->spherical_metadata->stereo_mode);
++
++ free(qtdemux->spherical_metadata);
++ qtdemux->spherical_metadata = NULL;
++ }
++#endif /* TIZEN_FEATURE_QTDEMUX_MODIFICATION */
++
+ if (qtdemux->adapter) {
+ g_object_unref (G_OBJECT (qtdemux->adapter));
+ qtdemux->adapter = NULL;
+ }
+ gst_tag_list_unref (qtdemux->tag_list);
+ gst_flow_combiner_free (qtdemux->flowcombiner);
+ g_queue_foreach (&qtdemux->protection_event_queue, (GFunc) gst_event_unref,
+ NULL);
+ g_queue_clear (&qtdemux->protection_event_queue);
+
+ g_free (qtdemux->cenc_aux_info_sizes);
+ qtdemux->cenc_aux_info_sizes = NULL;
+ g_mutex_clear (&qtdemux->expose_lock);
+
+ g_ptr_array_free (qtdemux->active_streams, TRUE);
+ g_ptr_array_free (qtdemux->old_streams, TRUE);
+
+ G_OBJECT_CLASS (parent_class)->dispose (object);
+ }
+
+ static void
+ gst_qtdemux_post_no_playable_stream_error (GstQTDemux * qtdemux)
+ {
+ if (qtdemux->redirect_location) {
+ GST_ELEMENT_ERROR_WITH_DETAILS (qtdemux, STREAM, DEMUX,
+ (_("This file contains no playable streams.")),
+ ("no known streams found, a redirect message has been posted"),
+ ("redirect-location", G_TYPE_STRING, qtdemux->redirect_location, NULL));
+ } else {
+ GST_ELEMENT_ERROR (qtdemux, STREAM, DEMUX,
+ (_("This file contains no playable streams.")),
+ ("no known streams found"));
+ }
+ }
+
+ static GstBuffer *
+ _gst_buffer_new_wrapped (gpointer mem, gsize size, GFreeFunc free_func)
+ {
+ return gst_buffer_new_wrapped_full (free_func ? 0 : GST_MEMORY_FLAG_READONLY,
+ mem, size, 0, size, mem, free_func);
+ }
+
+ static GstFlowReturn
+ gst_qtdemux_pull_atom (GstQTDemux * qtdemux, guint64 offset, guint64 size,
+ GstBuffer ** buf)
+ {
+ GstFlowReturn flow;
+ GstMapInfo map;
+ gsize bsize;
+
+ if (G_UNLIKELY (size == 0)) {
+ GstFlowReturn ret;
+ GstBuffer *tmp = NULL;
+
+ ret = gst_qtdemux_pull_atom (qtdemux, offset, sizeof (guint32), &tmp);
+ if (ret != GST_FLOW_OK)
+ return ret;
+
+ gst_buffer_map (tmp, &map, GST_MAP_READ);
+ size = QT_UINT32 (map.data);
+ GST_DEBUG_OBJECT (qtdemux, "size 0x%08" G_GINT64_MODIFIER "x", size);
+
+ gst_buffer_unmap (tmp, &map);
+ gst_buffer_unref (tmp);
+ }
+
+ /* Sanity check: catch bogus sizes (fuzzed/broken files) */
+ if (G_UNLIKELY (size > QTDEMUX_MAX_ATOM_SIZE)) {
+ if (qtdemux->state != QTDEMUX_STATE_MOVIE && qtdemux->got_moov) {
+ /* we're pulling header but already got most interesting bits,
+ * so never mind the rest (e.g. tags) (that much) */
+ GST_WARNING_OBJECT (qtdemux, "atom has bogus size %" G_GUINT64_FORMAT,
+ size);
+ return GST_FLOW_EOS;
+ } else {
+ GST_ELEMENT_ERROR (qtdemux, STREAM, DEMUX,
+ (_("This file is invalid and cannot be played.")),
+ ("atom has bogus size %" G_GUINT64_FORMAT, size));
+ return GST_FLOW_ERROR;
+ }
+ }
+
+ flow = gst_pad_pull_range (qtdemux->sinkpad, offset, size, buf);
+
+ if (G_UNLIKELY (flow != GST_FLOW_OK))
+ return flow;
+
+ bsize = gst_buffer_get_size (*buf);
+ /* Catch short reads - we don't want any partial atoms */
+ if (G_UNLIKELY (bsize < size)) {
+ GST_WARNING_OBJECT (qtdemux,
+ "short read: %" G_GSIZE_FORMAT " < %" G_GUINT64_FORMAT, bsize, size);
+ gst_buffer_unref (*buf);
+ *buf = NULL;
+ return GST_FLOW_EOS;
+ }
+
+ return flow;
+ }
+
+ #if 1
+ static gboolean
+ gst_qtdemux_src_convert (GstQTDemux * qtdemux, GstPad * pad,
+ GstFormat src_format, gint64 src_value, GstFormat dest_format,
+ gint64 * dest_value)
+ {
+ gboolean res = TRUE;
+ QtDemuxStream *stream = gst_pad_get_element_private (pad);
+ gint32 index;
+
+ if (stream->subtype != FOURCC_vide) {
+ res = FALSE;
+ goto done;
+ }
+
+ switch (src_format) {
+ case GST_FORMAT_TIME:
+ switch (dest_format) {
+ case GST_FORMAT_BYTES:{
+ index = gst_qtdemux_find_index_linear (qtdemux, stream, src_value);
+ if (-1 == index) {
+ res = FALSE;
+ goto done;
+ }
+
+ *dest_value = stream->samples[index].offset;
+
+ GST_DEBUG_OBJECT (qtdemux, "Format Conversion Time->Offset :%"
+ GST_TIME_FORMAT "->%" G_GUINT64_FORMAT,
+ GST_TIME_ARGS (src_value), *dest_value);
+ break;
+ }
+ default:
+ res = FALSE;
+ break;
+ }
+ break;
+ case GST_FORMAT_BYTES:
+ switch (dest_format) {
+ case GST_FORMAT_TIME:{
+ index =
+ gst_qtdemux_find_index_for_given_media_offset_linear (qtdemux,
+ stream, src_value);
+
+ if (-1 == index) {
+ res = FALSE;
+ goto done;
+ }
+
+ *dest_value =
+ QTSTREAMTIME_TO_GSTTIME (stream,
+ stream->samples[index].timestamp);
+ GST_DEBUG_OBJECT (qtdemux,
+ "Format Conversion Offset->Time :%" G_GUINT64_FORMAT "->%"
+ GST_TIME_FORMAT, src_value, GST_TIME_ARGS (*dest_value));
+ break;
+ }
+ default:
+ res = FALSE;
+ break;
+ }
+ break;
+ default:
+ res = FALSE;
+ break;
+ }
+
+ done:
+ return res;
+ }
+ #endif
+
+ static gboolean
+ gst_qtdemux_get_duration (GstQTDemux * qtdemux, GstClockTime * duration)
+ {
+ gboolean res = FALSE;
+
+ *duration = GST_CLOCK_TIME_NONE;
+
+ if (qtdemux->duration != 0 &&
+ qtdemux->duration != G_MAXINT64 && qtdemux->timescale != 0) {
+ *duration = QTTIME_TO_GSTTIME (qtdemux, qtdemux->duration);
+ res = TRUE;
+ } else {
+ *duration = GST_CLOCK_TIME_NONE;
+ }
+
+ return res;
+ }
+
+ static gboolean
+ gst_qtdemux_handle_src_query (GstPad * pad, GstObject * parent,
+ GstQuery * query)
+ {
+ gboolean res = FALSE;
+ GstQTDemux *qtdemux = GST_QTDEMUX (parent);
+
+ GST_LOG_OBJECT (pad, "%s query", GST_QUERY_TYPE_NAME (query));
+
+ switch (GST_QUERY_TYPE (query)) {
+ case GST_QUERY_POSITION:{
+ GstFormat fmt;
+
+ gst_query_parse_position (query, &fmt, NULL);
+ if (fmt == GST_FORMAT_TIME
+ && GST_CLOCK_TIME_IS_VALID (qtdemux->segment.position)) {
+ gst_query_set_position (query, GST_FORMAT_TIME,
+ qtdemux->segment.position);
+ res = TRUE;
+ }
+ }
+ break;
+ case GST_QUERY_DURATION:{
+ GstFormat fmt;
+
+ gst_query_parse_duration (query, &fmt, NULL);
+ if (fmt == GST_FORMAT_TIME) {
+ /* First try to query upstream */
+ res = gst_pad_query_default (pad, parent, query);
+ if (!res) {
+ GstClockTime duration;
+ if (gst_qtdemux_get_duration (qtdemux, &duration) && duration > 0) {
+ gst_query_set_duration (query, GST_FORMAT_TIME, duration);
+ res = TRUE;
+ }
+ }
+ }
+ break;
+ }
+ case GST_QUERY_CONVERT:{
+ GstFormat src_fmt, dest_fmt;
+ gint64 src_value, dest_value = 0;
+
+ gst_query_parse_convert (query, &src_fmt, &src_value, &dest_fmt, NULL);
+
+ res = gst_qtdemux_src_convert (qtdemux, pad,
+ src_fmt, src_value, dest_fmt, &dest_value);
+ if (res)
+ gst_query_set_convert (query, src_fmt, src_value, dest_fmt, dest_value);
+
+ break;
+ }
+ case GST_QUERY_FORMATS:
+ gst_query_set_formats (query, 2, GST_FORMAT_TIME, GST_FORMAT_BYTES);
+ res = TRUE;
+ break;
+ case GST_QUERY_SEEKING:{
+ GstFormat fmt;
+ gboolean seekable;
+
+ gst_query_parse_seeking (query, &fmt, NULL, NULL, NULL);
+
+ if (fmt == GST_FORMAT_BYTES) {
+ /* We always refuse BYTES seeks from downstream */
+ break;
+ }
+
+ /* try upstream first */
+ res = gst_pad_query_default (pad, parent, query);
+
+ if (!res) {
+ gst_query_parse_seeking (query, &fmt, NULL, NULL, NULL);
+ if (fmt == GST_FORMAT_TIME) {
+ GstClockTime duration;
+
+ gst_qtdemux_get_duration (qtdemux, &duration);
+ seekable = TRUE;
+ if (!qtdemux->pullbased) {
+ GstQuery *q;
+
+ /* we might be able with help from upstream */
+ seekable = FALSE;
+ q = gst_query_new_seeking (GST_FORMAT_BYTES);
+ if (gst_pad_peer_query (qtdemux->sinkpad, q)) {
+ gst_query_parse_seeking (q, &fmt, &seekable, NULL, NULL);
+ GST_LOG_OBJECT (qtdemux, "upstream BYTE seekable %d", seekable);
+ }
+ gst_query_unref (q);
+ }
+ gst_query_set_seeking (query, GST_FORMAT_TIME, seekable, 0, duration);
+ res = TRUE;
+ }
+ }
+ break;
+ }
+ case GST_QUERY_SEGMENT:
+ {
+ GstFormat format;
+ gint64 start, stop;
+
+ format = qtdemux->segment.format;
+
+ start =
+ gst_segment_to_stream_time (&qtdemux->segment, format,
+ qtdemux->segment.start);
+ if ((stop = qtdemux->segment.stop) == -1)
+ stop = qtdemux->segment.duration;
+ else
+ stop = gst_segment_to_stream_time (&qtdemux->segment, format, stop);
+
+ gst_query_set_segment (query, qtdemux->segment.rate, format, start, stop);
+ res = TRUE;
+ break;
+ }
+ default:
+ res = gst_pad_query_default (pad, parent, query);
+ break;
+ }
+
+ return res;
+ }
+
+ static void
+ gst_qtdemux_push_tags (GstQTDemux * qtdemux, QtDemuxStream * stream)
+ {
+ if (G_LIKELY (stream->pad)) {
+ GST_DEBUG_OBJECT (qtdemux, "Checking pad %s:%s for tags",
+ GST_DEBUG_PAD_NAME (stream->pad));
+
+ if (!gst_tag_list_is_empty (stream->stream_tags)) {
+ GST_DEBUG_OBJECT (qtdemux, "Sending tags %" GST_PTR_FORMAT,
+ stream->stream_tags);
+ gst_pad_push_event (stream->pad,
+ gst_event_new_tag (gst_tag_list_ref (stream->stream_tags)));
++#ifdef TIZEN_FEATURE_QTDEMUX_MODIFICATION
++ /* post message qtdemux tag (for early recive application) */
++ gst_element_post_message (GST_ELEMENT_CAST (qtdemux),
++ gst_message_new_tag (GST_OBJECT_CAST (qtdemux),
++ gst_tag_list_copy (stream->stream_tags)));
++#endif
+ }
+
+ if (G_UNLIKELY (stream->send_global_tags)) {
+ GST_DEBUG_OBJECT (qtdemux, "Sending global tags %" GST_PTR_FORMAT,
+ qtdemux->tag_list);
+ gst_pad_push_event (stream->pad,
+ gst_event_new_tag (gst_tag_list_ref (qtdemux->tag_list)));
+ stream->send_global_tags = FALSE;
+ }
+ }
+ }
+
+ /* push event on all source pads; takes ownership of the event */
+ static void
+ gst_qtdemux_push_event (GstQTDemux * qtdemux, GstEvent * event)
+ {
+ gboolean has_valid_stream = FALSE;
+ GstEventType etype = GST_EVENT_TYPE (event);
+ guint i;
+
+ GST_DEBUG_OBJECT (qtdemux, "pushing %s event on all source pads",
+ GST_EVENT_TYPE_NAME (event));
+
+ for (i = 0; i < QTDEMUX_N_STREAMS (qtdemux); i++) {
+ GstPad *pad;
+ QtDemuxStream *stream = QTDEMUX_NTH_STREAM (qtdemux, i);
+ GST_DEBUG_OBJECT (qtdemux, "pushing on track-id %u", stream->track_id);
+
+ if ((pad = stream->pad)) {
+ has_valid_stream = TRUE;
+
+ if (etype == GST_EVENT_EOS) {
+ /* let's not send twice */
+ if (stream->sent_eos)
+ continue;
+ stream->sent_eos = TRUE;
+ }
+
+ gst_pad_push_event (pad, gst_event_ref (event));
+ }
+ }
+
+ gst_event_unref (event);
+
+ /* if it is EOS and there are no pads, post an error */
+ if (!has_valid_stream && etype == GST_EVENT_EOS) {
+ gst_qtdemux_post_no_playable_stream_error (qtdemux);
+ }
+ }
+
+ typedef struct
+ {
+ guint64 media_time;
+ } FindData;
+
+ static gint
+ find_func (QtDemuxSample * s1, gint64 * media_time, gpointer user_data)
+ {
+ if ((gint64) s1->timestamp > *media_time)
+ return 1;
+ if ((gint64) s1->timestamp == *media_time)
+ return 0;
+
+ return -1;
+ }
+
+ /* find the index of the sample that includes the data for @media_time using a
+ * binary search. Only to be called in optimized cases of linear search below.
+ *
+ * Returns the index of the sample with the corresponding *DTS*.
+ */
+ static guint32
+ gst_qtdemux_find_index (GstQTDemux * qtdemux, QtDemuxStream * str,
+ guint64 media_time)
+ {
+ QtDemuxSample *result;
+ guint32 index;
+
+ /* convert media_time to mov format */
+ media_time =
+ gst_util_uint64_scale_ceil (media_time, str->timescale, GST_SECOND);
+
+ result = gst_util_array_binary_search (str->samples, str->stbl_index + 1,
+ sizeof (QtDemuxSample), (GCompareDataFunc) find_func,
+ GST_SEARCH_MODE_BEFORE, &media_time, NULL);
+
+ if (G_LIKELY (result))
+ index = result - str->samples;
+ else
+ index = 0;
+
+ return index;
+ }
+
+
+
+ /* find the index of the sample that includes the data for @media_offset using a
+ * linear search
+ *
+ * Returns the index of the sample.
+ */
+ static guint32
+ gst_qtdemux_find_index_for_given_media_offset_linear (GstQTDemux * qtdemux,
+ QtDemuxStream * str, gint64 media_offset)
+ {
+ QtDemuxSample *result = str->samples;
+ guint32 index = 0;
+
+ if (result == NULL || str->n_samples == 0)
+ return -1;
+
+ if (media_offset == result->offset)
+ return index;
+
+ result++;
+ while (index < str->n_samples - 1) {
+ if (!qtdemux_parse_samples (qtdemux, str, index + 1))
+ goto parse_failed;
+
+ if (media_offset < result->offset)
+ break;
+
+ index++;
+ result++;
+ }
+ return index;
+
+ /* ERRORS */
+ parse_failed:
+ {
+ GST_LOG_OBJECT (qtdemux, "Parsing of index %u failed!", index + 1);
+ return -1;
+ }
+ }
+
+ /* find the index of the sample that includes the data for @media_time using a
+ * linear search, and keeping in mind that not all samples may have been parsed
+ * yet. If possible, it will delegate to binary search.
+ *
+ * Returns the index of the sample.
+ */
+ static guint32
+ gst_qtdemux_find_index_linear (GstQTDemux * qtdemux, QtDemuxStream * str,
+ GstClockTime media_time)
+ {
+ guint32 index = 0;
+ guint64 mov_time;
+ QtDemuxSample *sample;
+
+ /* convert media_time to mov format */
+ mov_time =
+ gst_util_uint64_scale_ceil (media_time, str->timescale, GST_SECOND);
+
+ sample = str->samples;
+ if (mov_time == sample->timestamp + sample->pts_offset)
+ return index;
+
+ /* use faster search if requested time in already parsed range */
+ sample = str->samples + str->stbl_index;
+ if (str->stbl_index >= 0 && mov_time <= sample->timestamp) {
+ index = gst_qtdemux_find_index (qtdemux, str, media_time);
+ sample = str->samples + index;
+ } else {
+ while (index < str->n_samples - 1) {
+ if (!qtdemux_parse_samples (qtdemux, str, index + 1))
+ goto parse_failed;
+
+ sample = str->samples + index + 1;
+ if (mov_time < sample->timestamp) {
+ sample = str->samples + index;
+ break;
+ }
+
+ index++;
+ }
+ }
+
+ /* sample->timestamp is now <= media_time, need to find the corresponding
+ * PTS now by looking backwards */
+ while (index > 0 && sample->timestamp + sample->pts_offset > mov_time) {
+ index--;
+ sample = str->samples + index;
+ }
+
+ return index;
+
+ /* ERRORS */
+ parse_failed:
+ {
+ GST_LOG_OBJECT (qtdemux, "Parsing of index %u failed!", index + 1);
+ return -1;
+ }
+ }
+
+ /* find the index of the keyframe needed to decode the sample at @index
+ * of stream @str, or of a subsequent keyframe (depending on @next)
+ *
+ * Returns the index of the keyframe.
+ */
+ static guint32
+ gst_qtdemux_find_keyframe (GstQTDemux * qtdemux, QtDemuxStream * str,
+ guint32 index, gboolean next)
+ {
+ guint32 new_index = index;
+
+ if (index >= str->n_samples) {
+ new_index = str->n_samples;
+ goto beach;
+ }
+
+ /* all keyframes, return index */
+ if (str->all_keyframe) {
+ new_index = index;
+ goto beach;
+ }
+
+ /* else search until we have a keyframe */
+ while (new_index < str->n_samples) {
+ if (next && !qtdemux_parse_samples (qtdemux, str, new_index))
+ goto parse_failed;
+
+ if (str->samples[new_index].keyframe)
+ break;
+
+ if (new_index == 0)
+ break;
+
+ if (next)
+ new_index++;
+ else
+ new_index--;
+ }
+
+ if (new_index == str->n_samples) {
+ GST_DEBUG_OBJECT (qtdemux, "no next keyframe");
+ new_index = -1;
+ }
+
+ beach:
+ GST_DEBUG_OBJECT (qtdemux, "searching for keyframe index %s index %u "
+ "gave %u", next ? "after" : "before", index, new_index);
+
+ return new_index;
+
+ /* ERRORS */
+ parse_failed:
+ {
+ GST_LOG_OBJECT (qtdemux, "Parsing of index %u failed!", new_index);
+ return -1;
+ }
+ }
+
+ /* find the segment for @time_position for @stream
+ *
+ * Returns the index of the segment containing @time_position.
+ * Returns the last segment and sets the @eos variable to TRUE
+ * if the time is beyond the end. @eos may be NULL
+ */
+ static guint32
+ gst_qtdemux_find_segment (GstQTDemux * qtdemux, QtDemuxStream * stream,
+ GstClockTime time_position)
+ {
+ gint i;
+ guint32 seg_idx;
+
+ GST_LOG_OBJECT (stream->pad, "finding segment for %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (time_position));
+
+ seg_idx = -1;
+ for (i = 0; i < stream->n_segments; i++) {
+ QtDemuxSegment *segment = &stream->segments[i];
+
+ GST_LOG_OBJECT (stream->pad,
+ "looking at segment %" GST_TIME_FORMAT "-%" GST_TIME_FORMAT,
+ GST_TIME_ARGS (segment->time), GST_TIME_ARGS (segment->stop_time));
+
+ /* For the last segment we include stop_time in the last segment */
+ if (i < stream->n_segments - 1) {
+ if (segment->time <= time_position && time_position < segment->stop_time) {
+ GST_LOG_OBJECT (stream->pad, "segment %d matches", i);
+ seg_idx = i;
+ break;
+ }
+ } else {
+ /* Last segment always matches */
+ seg_idx = i;
+ break;
+ }
+ }
+ return seg_idx;
+ }
+
+ /* move the stream @str to the sample position @index.
+ *
+ * Updates @str->sample_index and marks discontinuity if needed.
+ */
+ static void
+ gst_qtdemux_move_stream (GstQTDemux * qtdemux, QtDemuxStream * str,
+ guint32 index)
+ {
+ /* no change needed */
+ if (index == str->sample_index)
+ return;
+
+ GST_DEBUG_OBJECT (qtdemux, "moving to sample %u of %u", index,
+ str->n_samples);
+
+ /* position changed, we have a discont */
+ str->sample_index = index;
+ str->offset_in_sample = 0;
+ /* Each time we move in the stream we store the position where we are
+ * starting from */
+ str->from_sample = index;
+ str->discont = TRUE;
+ }
+
+ static void
+ gst_qtdemux_adjust_seek (GstQTDemux * qtdemux, gint64 desired_time,
+ gboolean use_sparse, gboolean next, gint64 * key_time, gint64 * key_offset)
+ {
+ guint64 min_offset;
+ gint64 min_byte_offset = -1;
+ guint i;
+
+ min_offset = desired_time;
+
+ /* for each stream, find the index of the sample in the segment
+ * and move back to the previous keyframe. */
+ for (i = 0; i < QTDEMUX_N_STREAMS (qtdemux); i++) {
+ QtDemuxStream *str;
+ guint32 index, kindex;
+ guint32 seg_idx;
+ GstClockTime media_start;
+ GstClockTime media_time;
+ GstClockTime seg_time;
+ QtDemuxSegment *seg;
+ gboolean empty_segment = FALSE;
+
+ str = QTDEMUX_NTH_STREAM (qtdemux, i);
+
+ if (CUR_STREAM (str)->sparse && !use_sparse)
+ continue;
+
+ seg_idx = gst_qtdemux_find_segment (qtdemux, str, desired_time);
+ GST_DEBUG_OBJECT (qtdemux, "align segment %d", seg_idx);
+
+ /* get segment and time in the segment */
+ seg = &str->segments[seg_idx];
+ seg_time = (desired_time - seg->time) * seg->rate;
+
+ while (QTSEGMENT_IS_EMPTY (seg)) {
+ seg_time = 0;
+ empty_segment = TRUE;
+ GST_DEBUG_OBJECT (str->pad, "Segment %d is empty, moving to next one",
+ seg_idx);
+ seg_idx++;
+ if (seg_idx == str->n_segments)
+ break;
+ seg = &str->segments[seg_idx];
+ }
+
+ if (seg_idx == str->n_segments) {
+ /* FIXME track shouldn't have the last segment as empty, but if it
+ * happens we better handle it */
+ continue;
+ }
+
+ /* get the media time in the segment */
+ media_start = seg->media_start + seg_time;
+
+ /* get the index of the sample with media time */
+ index = gst_qtdemux_find_index_linear (qtdemux, str, media_start);
+ GST_DEBUG_OBJECT (qtdemux, "sample for %" GST_TIME_FORMAT " at %u"
+ " at offset %" G_GUINT64_FORMAT " (empty segment: %d)",
+ GST_TIME_ARGS (media_start), index, str->samples[index].offset,
+ empty_segment);
+
+ /* shift to next frame if we are looking for next keyframe */
+ if (next && QTSAMPLE_PTS_NO_CSLG (str, &str->samples[index]) < media_start
+ && index < str->stbl_index)
+ index++;
+
+ if (!empty_segment) {
+ /* find previous keyframe */
+ kindex = gst_qtdemux_find_keyframe (qtdemux, str, index, next);
+
+ /* we will settle for one before if none found after */
+ if (next && kindex == -1)
+ kindex = gst_qtdemux_find_keyframe (qtdemux, str, index, FALSE);
+
+ /* Update the requested time whenever a keyframe was found, to make it
+ * accurate and avoid having the first buffer fall outside of the segment
+ */
+ if (kindex != -1) {
+ index = kindex;
+
+ /* get timestamp of keyframe */
+ media_time = QTSAMPLE_PTS_NO_CSLG (str, &str->samples[kindex]);
+ GST_DEBUG_OBJECT (qtdemux,
+ "keyframe at %u with time %" GST_TIME_FORMAT " at offset %"
+ G_GUINT64_FORMAT, kindex, GST_TIME_ARGS (media_time),
+ str->samples[kindex].offset);
+
+ /* keyframes in the segment get a chance to change the
+ * desired_offset. keyframes out of the segment are
+ * ignored. */
+ if (media_time >= seg->media_start) {
+ GstClockTime seg_time;
+
+ /* this keyframe is inside the segment, convert back to
+ * segment time */
+ seg_time = (media_time - seg->media_start) + seg->time;
+ if ((!next && (seg_time < min_offset)) ||
+ (next && (seg_time > min_offset)))
+ min_offset = seg_time;
+ }
+ }
+ }
+
+ if (min_byte_offset < 0 || str->samples[index].offset < min_byte_offset)
+ min_byte_offset = str->samples[index].offset;
+ }
+
+ if (key_time)
+ *key_time = min_offset;
+ if (key_offset)
+ *key_offset = min_byte_offset;
+ }
+
+ static gboolean
+ gst_qtdemux_convert_seek (GstPad * pad, GstFormat * format,
+ GstSeekType cur_type, gint64 * cur, GstSeekType stop_type, gint64 * stop)
+ {
+ gboolean res;
+
+ g_return_val_if_fail (format != NULL, FALSE);
+ g_return_val_if_fail (cur != NULL, FALSE);
+ g_return_val_if_fail (stop != NULL, FALSE);
+
+ if (*format == GST_FORMAT_TIME)
+ return TRUE;
+
+ res = TRUE;
+ if (cur_type != GST_SEEK_TYPE_NONE)
+ res = gst_pad_query_convert (pad, *format, *cur, GST_FORMAT_TIME, cur);
+ if (res && stop_type != GST_SEEK_TYPE_NONE)
+ res = gst_pad_query_convert (pad, *format, *stop, GST_FORMAT_TIME, stop);
+
+ if (res)
+ *format = GST_FORMAT_TIME;
+
+ return res;
+ }
+
+ /* perform seek in push based mode:
+ find BYTE position to move to based on time and delegate to upstream
+ */
+ static gboolean
+ gst_qtdemux_do_push_seek (GstQTDemux * qtdemux, GstPad * pad, GstEvent * event)
+ {
+ gdouble rate;
+ GstFormat format;
+ GstSeekFlags flags;
+ GstSeekType cur_type, stop_type;
+ gint64 cur, stop, key_cur;
+ gboolean res;
+ gint64 byte_cur;
+ gint64 original_stop;
+ guint32 seqnum;
+
+ GST_DEBUG_OBJECT (qtdemux, "doing push-based seek");
+
+ gst_event_parse_seek (event, &rate, &format, &flags,
+ &cur_type, &cur, &stop_type, &stop);
+ seqnum = gst_event_get_seqnum (event);
+
+ /* Directly send the instant-rate-change event here before taking the
+ * stream-lock so that it can be applied as soon as possible */
+ if (flags & GST_SEEK_FLAG_INSTANT_RATE_CHANGE) {
+ GstEvent *ev;
+
+ /* instant rate change only supported if direction does not change. All
+ * other requirements are already checked before creating the seek event
+ * but let's double-check here to be sure */
+ if ((qtdemux->segment.rate > 0 && rate < 0) ||
+ (qtdemux->segment.rate < 0 && rate > 0) ||
+ cur_type != GST_SEEK_TYPE_NONE ||
+ stop_type != GST_SEEK_TYPE_NONE || (flags & GST_SEEK_FLAG_FLUSH)) {
+ GST_ERROR_OBJECT (qtdemux,
+ "Instant rate change seeks only supported in the "
+ "same direction, without flushing and position change");
+ return FALSE;
+ }
+
+ ev = gst_event_new_instant_rate_change (rate / qtdemux->segment.rate,
+ (GstSegmentFlags) flags);
+ gst_event_set_seqnum (ev, seqnum);
+ gst_qtdemux_push_event (qtdemux, ev);
+ return TRUE;
+ }
+
+ /* only forward streaming and seeking is possible */
+ if (rate <= 0)
+ goto unsupported_seek;
+
+ /* convert to TIME if needed and possible */
+ if (!gst_qtdemux_convert_seek (pad, &format, cur_type, &cur,
+ stop_type, &stop))
+ goto no_format;
+
+ /* Upstream seek in bytes will have undefined stop, but qtdemux stores
+ * the original stop position to use when upstream pushes the new segment
+ * for this seek */
+ original_stop = stop;
+ stop = -1;
+
+ /* find reasonable corresponding BYTE position,
+ * also try to mind about keyframes, since we can not go back a bit for them
+ * later on */
+ /* determining @next here based on SNAP_BEFORE/SNAP_AFTER should
+ * mostly just work, but let's not yet boldly go there ... */
+ gst_qtdemux_adjust_seek (qtdemux, cur, FALSE, FALSE, &key_cur, &byte_cur);
+
+ if (byte_cur == -1)
+ goto abort_seek;
+
+ GST_DEBUG_OBJECT (qtdemux, "Pushing BYTE seek rate %g, "
+ "start %" G_GINT64_FORMAT ", stop %" G_GINT64_FORMAT, rate, byte_cur,
+ stop);
+
+ GST_OBJECT_LOCK (qtdemux);
+ qtdemux->seek_offset = byte_cur;
+ if (!(flags & GST_SEEK_FLAG_KEY_UNIT)) {
+ qtdemux->push_seek_start = cur;
+ } else {
+ qtdemux->push_seek_start = key_cur;
+ }
+
+ if (stop_type == GST_SEEK_TYPE_NONE) {
+ qtdemux->push_seek_stop = qtdemux->segment.stop;
+ } else {
+ qtdemux->push_seek_stop = original_stop;
+ }
+ GST_OBJECT_UNLOCK (qtdemux);
+
+ qtdemux->segment_seqnum = seqnum;
+ /* BYTE seek event */
+ event = gst_event_new_seek (rate, GST_FORMAT_BYTES, flags, cur_type, byte_cur,
+ stop_type, stop);
+ gst_event_set_seqnum (event, seqnum);
+ res = gst_pad_push_event (qtdemux->sinkpad, event);
+
+ return res;
+
+ /* ERRORS */
+ abort_seek:
+ {
+ GST_DEBUG_OBJECT (qtdemux, "could not determine byte position to seek to, "
+ "seek aborted.");
+ return FALSE;
+ }
+ unsupported_seek:
+ {
+ GST_DEBUG_OBJECT (qtdemux, "unsupported seek, seek aborted.");
+ return FALSE;
+ }
+ no_format:
+ {
+ GST_DEBUG_OBJECT (qtdemux, "unsupported format given, seek aborted.");
+ return FALSE;
+ }
+ }
+
+ /* perform the seek.
+ *
+ * We set all segment_indexes in the streams to unknown and
+ * adjust the time_position to the desired position. this is enough
+ * to trigger a segment switch in the streaming thread to start
+ * streaming from the desired position.
+ *
+ * Keyframe seeking is a little more complicated when dealing with
+ * segments. Ideally we want to move to the previous keyframe in
+ * the segment but there might not be a keyframe in the segment. In
+ * fact, none of the segments could contain a keyframe. We take a
+ * practical approach: seek to the previous keyframe in the segment,
+ * if there is none, seek to the beginning of the segment.
+ *
+ * Called with STREAM_LOCK
+ */
+ static gboolean
+ gst_qtdemux_perform_seek (GstQTDemux * qtdemux, GstSegment * segment,
+ guint32 seqnum, GstSeekFlags flags)
+ {
+ gint64 desired_offset;
+ guint i;
+
+ desired_offset = segment->position;
+
+ GST_DEBUG_OBJECT (qtdemux, "seeking to %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (desired_offset));
+
+ /* may not have enough fragmented info to do this adjustment,
+ * and we can't scan (and probably should not) at this time with
+ * possibly flushing upstream */
+ if ((flags & GST_SEEK_FLAG_KEY_UNIT) && !qtdemux->fragmented) {
+ gint64 min_offset;
+ gboolean next, before, after;
+
+ before = ! !(flags & GST_SEEK_FLAG_SNAP_BEFORE);
+ after = ! !(flags & GST_SEEK_FLAG_SNAP_AFTER);
+ next = after && !before;
+ if (segment->rate < 0)
+ next = !next;
+
+ gst_qtdemux_adjust_seek (qtdemux, desired_offset, TRUE, next, &min_offset,
+ NULL);
+ GST_DEBUG_OBJECT (qtdemux, "keyframe seek, align to %"
+ GST_TIME_FORMAT, GST_TIME_ARGS (min_offset));
+ desired_offset = min_offset;
+ }
+
+ /* and set all streams to the final position */
+ GST_OBJECT_LOCK (qtdemux);
+ gst_flow_combiner_reset (qtdemux->flowcombiner);
+ GST_OBJECT_UNLOCK (qtdemux);
+ qtdemux->segment_seqnum = seqnum;
+ for (i = 0; i < QTDEMUX_N_STREAMS (qtdemux); i++) {
+ QtDemuxStream *stream = QTDEMUX_NTH_STREAM (qtdemux, i);
+
+ stream->time_position = desired_offset;
+ stream->accumulated_base = 0;
+ stream->sample_index = -1;
+ stream->offset_in_sample = 0;
+ stream->segment_index = -1;
+ stream->sent_eos = FALSE;
+ stream->last_keyframe_dts = GST_CLOCK_TIME_NONE;
+
+ if (segment->flags & GST_SEEK_FLAG_FLUSH)
+ gst_segment_init (&stream->segment, GST_FORMAT_TIME);
+ }
+ segment->position = desired_offset;
+ if (segment->rate >= 0) {
+ segment->start = desired_offset;
+ /* We need to update time as we update start in that direction */
+ segment->time = desired_offset;
+
+ /* we stop at the end */
+ if (segment->stop == -1)
+ segment->stop = segment->duration;
+ } else {
+ segment->stop = desired_offset;
+ }
+
+ if (qtdemux->fragmented)
+ qtdemux->fragmented_seek_pending = TRUE;
+
+ return TRUE;
+ }
+
+ /* do a seek in pull based mode */
+ static gboolean
+ gst_qtdemux_do_seek (GstQTDemux * qtdemux, GstPad * pad, GstEvent * event)
+ {
+ gdouble rate = 1.0;
+ GstFormat format;
+ GstSeekFlags flags;
+ GstSeekType cur_type, stop_type;
+ gint64 cur, stop;
+ gboolean flush, instant_rate_change;
+ gboolean update;
+ GstSegment seeksegment;
+ guint32 seqnum = GST_SEQNUM_INVALID;
+ GstEvent *flush_event;
+ gboolean ret;
+
+ GST_DEBUG_OBJECT (qtdemux, "doing seek with event");
+
+ gst_event_parse_seek (event, &rate, &format, &flags,
+ &cur_type, &cur, &stop_type, &stop);
+ seqnum = gst_event_get_seqnum (event);
+
+ /* we have to have a format as the segment format. Try to convert
+ * if not. */
+ if (!gst_qtdemux_convert_seek (pad, &format, cur_type, &cur,
+ stop_type, &stop))
+ goto no_format;
+
+ GST_DEBUG_OBJECT (qtdemux, "seek format %s", gst_format_get_name (format));
+
+ flush = ! !(flags & GST_SEEK_FLAG_FLUSH);
+ instant_rate_change = ! !(flags & GST_SEEK_FLAG_INSTANT_RATE_CHANGE);
+
+ /* Directly send the instant-rate-change event here before taking the
+ * stream-lock so that it can be applied as soon as possible */
+ if (instant_rate_change) {
+ GstEvent *ev;
+
+ /* instant rate change only supported if direction does not change. All
+ * other requirements are already checked before creating the seek event
+ * but let's double-check here to be sure */
+ if ((qtdemux->segment.rate > 0 && rate < 0) ||
+ (qtdemux->segment.rate < 0 && rate > 0) ||
+ cur_type != GST_SEEK_TYPE_NONE ||
+ stop_type != GST_SEEK_TYPE_NONE || flush) {
+ GST_ERROR_OBJECT (qtdemux,
+ "Instant rate change seeks only supported in the "
+ "same direction, without flushing and position change");
+ return FALSE;
+ }
+
+ ev = gst_event_new_instant_rate_change (rate / qtdemux->segment.rate,
+ (GstSegmentFlags) flags);
+ gst_event_set_seqnum (ev, seqnum);
+ gst_qtdemux_push_event (qtdemux, ev);
+ return TRUE;
+ }
+
+ /* stop streaming, either by flushing or by pausing the task */
+ if (flush) {
+ flush_event = gst_event_new_flush_start ();
+ if (seqnum != GST_SEQNUM_INVALID)
+ gst_event_set_seqnum (flush_event, seqnum);
+ /* unlock upstream pull_range */
+ gst_pad_push_event (qtdemux->sinkpad, gst_event_ref (flush_event));
+ /* make sure out loop function exits */
+ gst_qtdemux_push_event (qtdemux, flush_event);
+ } else {
+ /* non flushing seek, pause the task */
+ gst_pad_pause_task (qtdemux->sinkpad);
+ }
+
+ /* wait for streaming to finish */
+ GST_PAD_STREAM_LOCK (qtdemux->sinkpad);
+
+ /* copy segment, we need this because we still need the old
+ * segment when we close the current segment. */
+ memcpy (&seeksegment, &qtdemux->segment, sizeof (GstSegment));
+
+ /* configure the segment with the seek variables */
+ GST_DEBUG_OBJECT (qtdemux, "configuring seek");
+ if (!gst_segment_do_seek (&seeksegment, rate, format, flags,
+ cur_type, cur, stop_type, stop, &update)) {
+ ret = FALSE;
+ GST_ERROR_OBJECT (qtdemux, "inconsistent seek values, doing nothing");
+ } else {
+ /* now do the seek */
+ ret = gst_qtdemux_perform_seek (qtdemux, &seeksegment, seqnum, flags);
+ }
+
+ /* prepare for streaming again */
+ if (flush) {
+ flush_event = gst_event_new_flush_stop (TRUE);
+ if (seqnum != GST_SEQNUM_INVALID)
+ gst_event_set_seqnum (flush_event, seqnum);
+
+ gst_pad_push_event (qtdemux->sinkpad, gst_event_ref (flush_event));
+ gst_qtdemux_push_event (qtdemux, flush_event);
+ }
+
+ /* commit the new segment */
+ memcpy (&qtdemux->segment, &seeksegment, sizeof (GstSegment));
+
+ if (qtdemux->segment.flags & GST_SEEK_FLAG_SEGMENT) {
+ GstMessage *msg = gst_message_new_segment_start (GST_OBJECT_CAST (qtdemux),
+ qtdemux->segment.format, qtdemux->segment.position);
+ if (seqnum != GST_SEQNUM_INVALID)
+ gst_message_set_seqnum (msg, seqnum);
+ gst_element_post_message (GST_ELEMENT_CAST (qtdemux), msg);
+ }
+
+ /* restart streaming, NEWSEGMENT will be sent from the streaming thread. */
+ gst_pad_start_task (qtdemux->sinkpad, (GstTaskFunction) gst_qtdemux_loop,
+ qtdemux->sinkpad, NULL);
+
+ GST_PAD_STREAM_UNLOCK (qtdemux->sinkpad);
+
+ return ret;
+
+ /* ERRORS */
+ no_format:
+ {
+ GST_DEBUG_OBJECT (qtdemux, "unsupported format given, seek aborted.");
+ return FALSE;
+ }
+ }
+
+ static gboolean
+ qtdemux_ensure_index (GstQTDemux * qtdemux)
+ {
+ guint i;
+
+ GST_DEBUG_OBJECT (qtdemux, "collecting all metadata for all streams");
+
+ /* Build complete index */
+ for (i = 0; i < QTDEMUX_N_STREAMS (qtdemux); i++) {
+ QtDemuxStream *stream = QTDEMUX_NTH_STREAM (qtdemux, i);
+
+ if (!qtdemux_parse_samples (qtdemux, stream, stream->n_samples - 1)) {
+ GST_LOG_OBJECT (qtdemux,
+ "Building complete index of track-id %u for seeking failed!",
+ stream->track_id);
+ return FALSE;
+ }
+ }
+
+ return TRUE;
+ }
+
+ static gboolean
+ gst_qtdemux_handle_src_event (GstPad * pad, GstObject * parent,
+ GstEvent * event)
+ {
+ gboolean res = TRUE;
+ GstQTDemux *qtdemux = GST_QTDEMUX (parent);
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_RECONFIGURE:
+ GST_OBJECT_LOCK (qtdemux);
+ gst_flow_combiner_reset (qtdemux->flowcombiner);
+ GST_OBJECT_UNLOCK (qtdemux);
+ res = gst_pad_event_default (pad, parent, event);
+ break;
+ case GST_EVENT_SEEK:
+ {
+ GstSeekFlags flags = 0;
+ GstFormat seek_format;
+ gboolean instant_rate_change;
+
+ #ifndef GST_DISABLE_GST_DEBUG
+ GstClockTime ts = gst_util_get_timestamp ();
+ #endif
+ guint32 seqnum = gst_event_get_seqnum (event);
+
+ qtdemux->received_seek = TRUE;
+
+ gst_event_parse_seek (event, NULL, &seek_format, &flags, NULL, NULL, NULL,
+ NULL);
+ instant_rate_change = ! !(flags & GST_SEEK_FLAG_INSTANT_RATE_CHANGE);
+
+ if (seqnum == qtdemux->segment_seqnum) {
+ GST_LOG_OBJECT (pad,
+ "Drop duplicated SEEK event seqnum %" G_GUINT32_FORMAT, seqnum);
+ gst_event_unref (event);
+ return TRUE;
+ }
+
+ if (qtdemux->upstream_format_is_time && qtdemux->fragmented) {
+ /* seek should be handled by upstream, we might need to re-download fragments */
+ GST_DEBUG_OBJECT (qtdemux,
+ "let upstream handle seek for fragmented playback");
+ goto upstream;
+ }
+
+ if (seek_format == GST_FORMAT_BYTES) {
+ GST_DEBUG_OBJECT (pad, "Rejecting seek request in bytes format");
+ gst_event_unref (event);
+ return FALSE;
+ }
+
+ gst_event_parse_seek_trickmode_interval (event,
+ &qtdemux->trickmode_interval);
+
+ /* Build complete index for seeking;
+ * if not a fragmented file at least and we're really doing a seek,
+ * not just an instant-rate-change */
+ if (!qtdemux->fragmented && !instant_rate_change) {
+ if (!qtdemux_ensure_index (qtdemux))
+ goto index_failed;
+ }
+ #ifndef GST_DISABLE_GST_DEBUG
+ ts = gst_util_get_timestamp () - ts;
+ GST_INFO_OBJECT (qtdemux,
+ "Time taken to parse index %" GST_TIME_FORMAT, GST_TIME_ARGS (ts));
+ #endif
+ if (qtdemux->pullbased) {
+ res = gst_qtdemux_do_seek (qtdemux, pad, event);
+ } else if (gst_pad_push_event (qtdemux->sinkpad, gst_event_ref (event))) {
+ GST_DEBUG_OBJECT (qtdemux, "Upstream successfully seeked");
+ res = TRUE;
+ } else if (qtdemux->state == QTDEMUX_STATE_MOVIE
+ && QTDEMUX_N_STREAMS (qtdemux)
+ && !qtdemux->fragmented) {
+ res = gst_qtdemux_do_push_seek (qtdemux, pad, event);
+ } else {
+ GST_DEBUG_OBJECT (qtdemux,
+ "ignoring seek in push mode in current state");
+ res = FALSE;
+ }
+ gst_event_unref (event);
+ }
+ break;
+ default:
+ upstream:
+ res = gst_pad_event_default (pad, parent, event);
+ break;
+ }
+
+ done:
+ return res;
+
+ /* ERRORS */
+ index_failed:
+ {
+ GST_ERROR_OBJECT (qtdemux, "Index failed");
+ gst_event_unref (event);
+ res = FALSE;
+ goto done;
+ }
+ }
+
+ /* Find, for each track, the first sample in coding order that has a file offset >= @byte_pos.
+ *
+ * If @fw is false, the coding order is explored backwards.
+ *
+ * If @set is true, each stream will be moved to its matched sample, or EOS if no matching
+ * sample is found for that track.
+ *
+ * The stream and sample index of the sample with the minimum offset in the direction explored
+ * (see @fw) is returned in the output parameters @_stream and @_index respectively.
+ *
+ * @_time is set to the QTSAMPLE_PTS of the matched sample with the minimum QTSAMPLE_PTS in the
+ * direction explored, which may not always match the QTSAMPLE_PTS of the sample returned in
+ * @_stream and @_index. */
+ static void
+ gst_qtdemux_find_sample (GstQTDemux * qtdemux, gint64 byte_pos, gboolean fw,
+ gboolean set, QtDemuxStream ** _stream, gint * _index, gint64 * _time)
+ {
+ gint i, index;
+ gint64 time, min_time;
+ QtDemuxStream *stream;
+ gint iter;
+
+ min_time = -1;
+ stream = NULL;
+ index = -1;
+
+ for (iter = 0; iter < QTDEMUX_N_STREAMS (qtdemux); iter++) {
+ QtDemuxStream *str;
+ gint inc;
+ gboolean set_sample;
+
+ str = QTDEMUX_NTH_STREAM (qtdemux, iter);
+ set_sample = !set;
+
+ if (fw) {
+ i = 0;
+ inc = 1;
+ } else {
+ i = str->n_samples - 1;
+ inc = -1;
+ }
+
+ for (; (i >= 0) && (i < str->n_samples); i += inc) {
+ if (str->samples[i].size == 0)
+ continue;
+
+ if (fw && (str->samples[i].offset < byte_pos))
+ continue;
+
+ if (!fw && (str->samples[i].offset + str->samples[i].size > byte_pos))
+ continue;
+
+ /* move stream to first available sample */
+ if (set) {
+ gst_qtdemux_move_stream (qtdemux, str, i);
+ set_sample = TRUE;
+ }
+
+ /* avoid index from sparse streams since they might be far away */
+ if (!CUR_STREAM (str)->sparse) {
+ /* determine min/max time */
+ time = QTSAMPLE_PTS (str, &str->samples[i]);
+ if (min_time == -1 || (!fw && time > min_time) ||
+ (fw && time < min_time)) {
+ min_time = time;
+ }
+
+ /* determine stream with leading sample, to get its position */
+ if (!stream ||
+ (fw && (str->samples[i].offset < stream->samples[index].offset)) ||
+ (!fw && (str->samples[i].offset > stream->samples[index].offset))) {
+ stream = str;
+ index = i;
+ }
+ }
+ break;
+ }
+
+ /* no sample for this stream, mark eos */
+ if (!set_sample)
+ gst_qtdemux_move_stream (qtdemux, str, str->n_samples);
+ }
+
+ if (_time)
+ *_time = min_time;
+ if (_stream)
+ *_stream = stream;
+ if (_index)
+ *_index = index;
+ }
+
+ /* Copied from mpegtsbase code */
+ /* FIXME: replace this function when we add new util function for stream-id creation */
+ static gchar *
+ _get_upstream_id (GstQTDemux * demux)
+ {
+ gchar *upstream_id = gst_pad_get_stream_id (demux->sinkpad);
+
+ if (!upstream_id) {
+ /* Try to create one from the upstream URI, else use a randome number */
+ GstQuery *query;
+ gchar *uri = NULL;
+
+ /* Try to generate one from the URI query and
+ * if it fails take a random number instead */
+ query = gst_query_new_uri ();
+ if (gst_element_query (GST_ELEMENT_CAST (demux), query)) {
+ gst_query_parse_uri (query, &uri);
+ }
+
+ if (uri) {
+ GChecksum *cs;
+
+ /* And then generate an SHA256 sum of the URI */
+ cs = g_checksum_new (G_CHECKSUM_SHA256);
+ g_checksum_update (cs, (const guchar *) uri, strlen (uri));
+ g_free (uri);
+ upstream_id = g_strdup (g_checksum_get_string (cs));
+ g_checksum_free (cs);
+ } else {
+ /* Just get some random number if the URI query fails */
+ GST_FIXME_OBJECT (demux, "Creating random stream-id, consider "
+ "implementing a deterministic way of creating a stream-id");
+ upstream_id =
+ g_strdup_printf ("%08x%08x%08x%08x", g_random_int (), g_random_int (),
+ g_random_int (), g_random_int ());
+ }
+
+ gst_query_unref (query);
+ }
+ return upstream_id;
+ }
+
+ static QtDemuxStream *
+ _create_stream (GstQTDemux * demux, guint32 track_id)
+ {
+ QtDemuxStream *stream;
+ gchar *upstream_id;
+
+ stream = g_new0 (QtDemuxStream, 1);
+ stream->demux = demux;
+ stream->track_id = track_id;
+ upstream_id = _get_upstream_id (demux);
+ stream->stream_id = g_strdup_printf ("%s/%03u", upstream_id, track_id);
+ g_free (upstream_id);
+ /* new streams always need a discont */
+ stream->discont = TRUE;
+ /* we enable clipping for raw audio/video streams */
+ stream->need_clip = FALSE;
+ stream->need_process = FALSE;
+ stream->segment_index = -1;
+ stream->time_position = 0;
+ stream->sample_index = -1;
+ stream->offset_in_sample = 0;
+ stream->new_stream = TRUE;
+ stream->multiview_mode = GST_VIDEO_MULTIVIEW_MODE_NONE;
+ stream->multiview_flags = GST_VIDEO_MULTIVIEW_FLAGS_NONE;
+ stream->protected = FALSE;
+ stream->protection_scheme_type = 0;
+ stream->protection_scheme_version = 0;
+ stream->protection_scheme_info = NULL;
+ stream->n_samples_moof = 0;
+ stream->duration_moof = 0;
+ stream->duration_last_moof = 0;
+ stream->alignment = 1;
+ stream->stream_tags = gst_tag_list_new_empty ();
+ gst_tag_list_set_scope (stream->stream_tags, GST_TAG_SCOPE_STREAM);
+ g_queue_init (&stream->protection_scheme_event_queue);
+ stream->ref_count = 1;
+ /* consistent default for push based mode */
+ gst_segment_init (&stream->segment, GST_FORMAT_TIME);
+ return stream;
+ }
+
+ static gboolean
+ gst_qtdemux_setcaps (GstQTDemux * demux, GstCaps * caps)
+ {
+ GstStructure *structure;
+ const gchar *variant;
+ const GstCaps *mediacaps = NULL;
+
+ GST_DEBUG_OBJECT (demux, "Sink set caps: %" GST_PTR_FORMAT, caps);
+
+ structure = gst_caps_get_structure (caps, 0);
+ variant = gst_structure_get_string (structure, "variant");
+
+ if (variant && strcmp (variant, "mss-fragmented") == 0) {
+ QtDemuxStream *stream;
+ const GValue *value;
+
+ demux->fragmented = TRUE;
+ demux->mss_mode = TRUE;
+
+ if (QTDEMUX_N_STREAMS (demux) > 1) {
+ /* can't do this, we can only renegotiate for another mss format */
+ return FALSE;
+ }
+
+ value = gst_structure_get_value (structure, "media-caps");
+ /* create stream */
+ if (value) {
+ const GValue *timescale_v;
+
+ /* TODO update when stream changes during playback */
+
+ if (QTDEMUX_N_STREAMS (demux) == 0) {
+ stream = _create_stream (demux, 1);
+ g_ptr_array_add (demux->active_streams, stream);
+ /* mss has no stsd/stsd entry, use id 0 as default */
+ stream->stsd_entries_length = 1;
+ stream->stsd_sample_description_id = stream->cur_stsd_entry_index = 0;
+ stream->stsd_entries = g_new0 (QtDemuxStreamStsdEntry, 1);
+ } else {
+ stream = QTDEMUX_NTH_STREAM (demux, 0);
+ }
+
+ timescale_v = gst_structure_get_value (structure, "timescale");
+ if (timescale_v) {
+ stream->timescale = g_value_get_uint64 (timescale_v);
+ } else {
+ /* default mss timescale */
+ stream->timescale = 10000000;
+ }
+ demux->timescale = stream->timescale;
+
+ mediacaps = gst_value_get_caps (value);
+ if (!CUR_STREAM (stream)->caps
+ || !gst_caps_is_equal_fixed (mediacaps, CUR_STREAM (stream)->caps)) {
+ GST_DEBUG_OBJECT (demux, "We have a new caps %" GST_PTR_FORMAT,
+ mediacaps);
+ stream->new_caps = TRUE;
+ }
+ gst_caps_replace (&CUR_STREAM (stream)->caps, (GstCaps *) mediacaps);
+ structure = gst_caps_get_structure (mediacaps, 0);
+ if (g_str_has_prefix (gst_structure_get_name (structure), "video")) {
+ stream->subtype = FOURCC_vide;
+
+ gst_structure_get_int (structure, "width", &CUR_STREAM (stream)->width);
+ gst_structure_get_int (structure, "height",
+ &CUR_STREAM (stream)->height);
+ gst_structure_get_fraction (structure, "framerate",
+ &CUR_STREAM (stream)->fps_n, &CUR_STREAM (stream)->fps_d);
+ } else if (g_str_has_prefix (gst_structure_get_name (structure), "audio")) {
+ gint rate = 0;
+ stream->subtype = FOURCC_soun;
+ gst_structure_get_int (structure, "channels",
+ &CUR_STREAM (stream)->n_channels);
+ gst_structure_get_int (structure, "rate", &rate);
+ CUR_STREAM (stream)->rate = rate;
+ } else if (gst_structure_has_name (structure, "application/x-cenc")) {
+ if (gst_structure_has_field (structure, "original-media-type")) {
+ const gchar *media_type =
+ gst_structure_get_string (structure, "original-media-type");
+ if (g_str_has_prefix (media_type, "video")) {
+ stream->subtype = FOURCC_vide;
+ } else if (g_str_has_prefix (media_type, "audio")) {
+ stream->subtype = FOURCC_soun;
+ }
+ }
+ }
+ }
+ gst_caps_replace (&demux->media_caps, (GstCaps *) mediacaps);
+ } else {
+ demux->mss_mode = FALSE;
+ }
+
+ return TRUE;
+ }
+
+ static void
+ gst_qtdemux_reset (GstQTDemux * qtdemux, gboolean hard)
+ {
+ gint i;
+
+ GST_DEBUG_OBJECT (qtdemux, "Resetting demux");
+ gst_pad_stop_task (qtdemux->sinkpad);
+
+ if (hard || qtdemux->upstream_format_is_time) {
+ qtdemux->state = QTDEMUX_STATE_INITIAL;
+ qtdemux->neededbytes = 16;
+ qtdemux->todrop = 0;
+ qtdemux->pullbased = FALSE;
+ g_clear_pointer (&qtdemux->redirect_location, g_free);
+ qtdemux->first_mdat = -1;
+ qtdemux->header_size = 0;
+ qtdemux->mdatoffset = -1;
+ qtdemux->restoredata_offset = -1;
+ if (qtdemux->mdatbuffer)
+ gst_buffer_unref (qtdemux->mdatbuffer);
+ if (qtdemux->restoredata_buffer)
+ gst_buffer_unref (qtdemux->restoredata_buffer);
+ qtdemux->mdatbuffer = NULL;
+ qtdemux->restoredata_buffer = NULL;
+ qtdemux->mdatleft = 0;
+ qtdemux->mdatsize = 0;
+ if (qtdemux->comp_brands)
+ gst_buffer_unref (qtdemux->comp_brands);
+ qtdemux->comp_brands = NULL;
+ qtdemux->last_moov_offset = -1;
+ if (qtdemux->moov_node_compressed) {
+ g_node_destroy (qtdemux->moov_node_compressed);
+ if (qtdemux->moov_node)
+ g_free (qtdemux->moov_node->data);
+ }
+ qtdemux->moov_node_compressed = NULL;
+ if (qtdemux->moov_node)
+ g_node_destroy (qtdemux->moov_node);
+ qtdemux->moov_node = NULL;
+ if (qtdemux->tag_list)
+ gst_mini_object_unref (GST_MINI_OBJECT_CAST (qtdemux->tag_list));
+ qtdemux->tag_list = gst_tag_list_new_empty ();
+ gst_tag_list_set_scope (qtdemux->tag_list, GST_TAG_SCOPE_GLOBAL);
+ #if 0
+ if (qtdemux->element_index)
+ gst_object_unref (qtdemux->element_index);
+ qtdemux->element_index = NULL;
+ #endif
+ qtdemux->major_brand = 0;
+ qtdemux->upstream_format_is_time = FALSE;
+ qtdemux->upstream_seekable = FALSE;
+ qtdemux->upstream_size = 0;
+
+ qtdemux->fragment_start = -1;
+ qtdemux->fragment_start_offset = -1;
+ qtdemux->duration = 0;
+ qtdemux->moof_offset = 0;
+ qtdemux->chapters_track_id = 0;
+ qtdemux->have_group_id = FALSE;
+ qtdemux->group_id = G_MAXUINT;
+
+ g_queue_foreach (&qtdemux->protection_event_queue, (GFunc) gst_event_unref,
+ NULL);
+ g_queue_clear (&qtdemux->protection_event_queue);
+
+ qtdemux->received_seek = FALSE;
+ qtdemux->first_moof_already_parsed = FALSE;
+ }
+ qtdemux->offset = 0;
+ gst_adapter_clear (qtdemux->adapter);
+ gst_segment_init (&qtdemux->segment, GST_FORMAT_TIME);
+ qtdemux->need_segment = TRUE;
+
+ if (hard) {
+ qtdemux->segment_seqnum = GST_SEQNUM_INVALID;
+ qtdemux->trickmode_interval = 0;
+ g_ptr_array_set_size (qtdemux->active_streams, 0);
+ g_ptr_array_set_size (qtdemux->old_streams, 0);
+ qtdemux->n_video_streams = 0;
+ qtdemux->n_audio_streams = 0;
+ qtdemux->n_sub_streams = 0;
+ qtdemux->exposed = FALSE;
+ qtdemux->fragmented = FALSE;
+ qtdemux->mss_mode = FALSE;
+ gst_caps_replace (&qtdemux->media_caps, NULL);
+ qtdemux->timescale = 0;
+ qtdemux->got_moov = FALSE;
+ qtdemux->cenc_aux_info_offset = 0;
+ qtdemux->cenc_aux_info_sizes = NULL;
+ qtdemux->cenc_aux_sample_count = 0;
+ if (qtdemux->protection_system_ids) {
+ g_ptr_array_free (qtdemux->protection_system_ids, TRUE);
+ qtdemux->protection_system_ids = NULL;
+ }
+ qtdemux->streams_aware = GST_OBJECT_PARENT (qtdemux)
+ && GST_OBJECT_FLAG_IS_SET (GST_OBJECT_PARENT (qtdemux),
+ GST_BIN_FLAG_STREAMS_AWARE);
+
+ if (qtdemux->preferred_protection_system_id) {
+ g_free (qtdemux->preferred_protection_system_id);
+ qtdemux->preferred_protection_system_id = NULL;
+ }
+ } else if (qtdemux->mss_mode) {
+ gst_flow_combiner_reset (qtdemux->flowcombiner);
+ g_ptr_array_foreach (qtdemux->active_streams,
+ (GFunc) gst_qtdemux_stream_clear, NULL);
+ } else {
+ gst_flow_combiner_reset (qtdemux->flowcombiner);
+ for (i = 0; i < QTDEMUX_N_STREAMS (qtdemux); i++) {
+ QtDemuxStream *stream = QTDEMUX_NTH_STREAM (qtdemux, i);
+ stream->sent_eos = FALSE;
+ stream->time_position = 0;
+ stream->accumulated_base = 0;
+ stream->last_keyframe_dts = GST_CLOCK_TIME_NONE;
+ }
+ }
+ }
+
+
+ /* Maps the @segment to the qt edts internal segments and pushes
+ * the corresponding segment event.
+ *
+ * If it ends up being at a empty segment, a gap will be pushed and the next
+ * edts segment will be activated in sequence.
+ *
+ * To be used in push-mode only */
+ static void
+ gst_qtdemux_map_and_push_segments (GstQTDemux * qtdemux, GstSegment * segment)
+ {
+ gint i, iter;
+
+ for (iter = 0; iter < QTDEMUX_N_STREAMS (qtdemux); iter++) {
+ QtDemuxStream *stream = QTDEMUX_NTH_STREAM (qtdemux, iter);
+
+ stream->time_position = segment->start;
+
+ /* in push mode we should be guaranteed that we will have empty segments
+ * at the beginning and then one segment after, other scenarios are not
+ * supported and are discarded when parsing the edts */
+ for (i = 0; i < stream->n_segments; i++) {
+ if (stream->segments[i].stop_time > segment->start) {
+ /* push the empty segment and move to the next one */
+ gst_qtdemux_activate_segment (qtdemux, stream, i,
+ stream->time_position);
+ if (QTSEGMENT_IS_EMPTY (&stream->segments[i])) {
+ gst_qtdemux_send_gap_for_segment (qtdemux, stream, i,
+ stream->time_position);
+
+ /* accumulate previous segments */
+ if (GST_CLOCK_TIME_IS_VALID (stream->segment.stop))
+ stream->accumulated_base +=
+ (stream->segment.stop -
+ stream->segment.start) / ABS (stream->segment.rate);
+ continue;
+ }
+
+ g_assert (i == stream->n_segments - 1);
+ }
+ }
+ }
+ }
+
+ static void
+ gst_qtdemux_stream_concat (GstQTDemux * qtdemux, GPtrArray * dest,
+ GPtrArray * src)
+ {
+ guint i;
+ guint len;
+
+ len = src->len;
+
+ if (len == 0)
+ return;
+
+ for (i = 0; i < len; i++) {
+ QtDemuxStream *stream = g_ptr_array_index (src, i);
+
+ #ifndef GST_DISABLE_GST_DEBUG
+ GST_DEBUG_OBJECT (qtdemux, "Move stream %p (stream-id %s) to %p",
+ stream, GST_STR_NULL (stream->stream_id), dest);
+ #endif
+ g_ptr_array_add (dest, gst_qtdemux_stream_ref (stream));
+ }
+
+ g_ptr_array_set_size (src, 0);
+ }
+
+ static gboolean
+ gst_qtdemux_handle_sink_event (GstPad * sinkpad, GstObject * parent,
+ GstEvent * event)
+ {
+ GstQTDemux *demux = GST_QTDEMUX (parent);
+ gboolean res = TRUE;
+
+ GST_LOG_OBJECT (demux, "handling %s event", GST_EVENT_TYPE_NAME (event));
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_SEGMENT:
+ {
+ gint64 offset = 0;
+ QtDemuxStream *stream;
+ gint idx;
+ GstSegment segment;
+
+ /* some debug output */
+ gst_event_copy_segment (event, &segment);
+ GST_DEBUG_OBJECT (demux, "received newsegment %" GST_SEGMENT_FORMAT,
+ &segment);
+
+ if (segment.format == GST_FORMAT_TIME) {
+ demux->upstream_format_is_time = TRUE;
+ demux->segment_seqnum = gst_event_get_seqnum (event);
+ } else {
+ GST_DEBUG_OBJECT (demux, "Not storing upstream newsegment, "
+ "not in time format");
+
+ /* chain will send initial newsegment after pads have been added */
+ if (demux->state != QTDEMUX_STATE_MOVIE || !QTDEMUX_N_STREAMS (demux)) {
+ GST_DEBUG_OBJECT (demux, "still starting, eating event");
+ goto exit;
+ }
+ }
+
+ /* check if this matches a time seek we received previously
+ * FIXME for backwards compatibility reasons we use the
+ * seek_offset here to compare. In the future we might want to
+ * change this to use the seqnum as it uniquely should identify
+ * the segment that corresponds to the seek. */
+ GST_DEBUG_OBJECT (demux, "Stored seek offset: %" G_GINT64_FORMAT
+ ", received segment offset %" G_GINT64_FORMAT,
+ demux->seek_offset, segment.start);
+ if (segment.format == GST_FORMAT_BYTES
+ && demux->seek_offset == segment.start) {
+ GST_OBJECT_LOCK (demux);
+ offset = segment.start;
+
+ segment.format = GST_FORMAT_TIME;
+ segment.start = demux->push_seek_start;
+ segment.stop = demux->push_seek_stop;
+ GST_DEBUG_OBJECT (demux, "Replaced segment with stored seek "
+ "segment %" GST_TIME_FORMAT " - %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (segment.start), GST_TIME_ARGS (segment.stop));
+ GST_OBJECT_UNLOCK (demux);
+ }
+
+ /* we only expect a BYTE segment, e.g. following a seek */
+ if (segment.format == GST_FORMAT_BYTES) {
+ if (GST_CLOCK_TIME_IS_VALID (segment.start)) {
+ offset = segment.start;
+
+ gst_qtdemux_find_sample (demux, segment.start, TRUE, FALSE, NULL,
+ NULL, (gint64 *) & segment.start);
+ if ((gint64) segment.start < 0)
+ segment.start = 0;
+ }
+ if (GST_CLOCK_TIME_IS_VALID (segment.stop)) {
+ gst_qtdemux_find_sample (demux, segment.stop, FALSE, FALSE, NULL,
+ NULL, (gint64 *) & segment.stop);
+ /* keyframe seeking should already arrange for start >= stop,
+ * but make sure in other rare cases */
+ segment.stop = MAX (segment.stop, segment.start);
+ }
+ } else if (segment.format == GST_FORMAT_TIME) {
+ /* push all data on the adapter before starting this
+ * new segment */
+ gst_qtdemux_process_adapter (demux, TRUE);
+ } else {
+ GST_DEBUG_OBJECT (demux, "unsupported segment format, ignoring");
+ goto exit;
+ }
+
+ /* We shouldn't modify upstream driven TIME FORMAT segment */
+ if (!demux->upstream_format_is_time) {
+ /* accept upstream's notion of segment and distribute along */
+ segment.format = GST_FORMAT_TIME;
+ segment.position = segment.time = segment.start;
+ segment.duration = demux->segment.duration;
+ segment.base = gst_segment_to_running_time (&demux->segment,
+ GST_FORMAT_TIME, demux->segment.position);
+ }
+
+ gst_segment_copy_into (&segment, &demux->segment);
+ GST_DEBUG_OBJECT (demux, "Pushing newseg %" GST_SEGMENT_FORMAT, &segment);
+
+ /* map segment to internal qt segments and push on each stream */
+ if (QTDEMUX_N_STREAMS (demux)) {
+ demux->need_segment = TRUE;
+ gst_qtdemux_check_send_pending_segment (demux);
+ }
+
+ /* clear leftover in current segment, if any */
+ gst_adapter_clear (demux->adapter);
+
+ /* set up streaming thread */
+ demux->offset = offset;
+ if (demux->upstream_format_is_time) {
+ GST_DEBUG_OBJECT (demux, "Upstream is driving in time format, "
+ "set values to restart reading from a new atom");
+ demux->neededbytes = 16;
+ demux->todrop = 0;
+ } else {
+ gst_qtdemux_find_sample (demux, offset, TRUE, TRUE, &stream, &idx,
+ NULL);
+ if (stream) {
+ demux->todrop = stream->samples[idx].offset - offset;
+ demux->neededbytes = demux->todrop + stream->samples[idx].size;
+ } else {
+ /* set up for EOS */
+ demux->neededbytes = -1;
+ demux->todrop = 0;
+ }
+ }
+ exit:
+ gst_event_unref (event);
+ res = TRUE;
+ goto drop;
+ }
+ case GST_EVENT_FLUSH_START:
+ {
+ if (gst_event_get_seqnum (event) == demux->offset_seek_seqnum) {
+ gst_event_unref (event);
+ goto drop;
+ }
+ QTDEMUX_EXPOSE_LOCK (demux);
+ res = gst_pad_event_default (demux->sinkpad, parent, event);
+ QTDEMUX_EXPOSE_UNLOCK (demux);
+ goto drop;
+ }
+ case GST_EVENT_FLUSH_STOP:
+ {
+ guint64 dur;
+
+ dur = demux->segment.duration;
+ gst_qtdemux_reset (demux, FALSE);
+ demux->segment.duration = dur;
+
+ if (gst_event_get_seqnum (event) == demux->offset_seek_seqnum) {
+ gst_event_unref (event);
+ goto drop;
+ }
+ break;
+ }
+ case GST_EVENT_EOS:
+ /* If we are in push mode, and get an EOS before we've seen any streams,
+ * then error out - we have nowhere to send the EOS */
+ if (!demux->pullbased) {
+ gint i;
+ gboolean has_valid_stream = FALSE;
+ for (i = 0; i < QTDEMUX_N_STREAMS (demux); i++) {
+ if (QTDEMUX_NTH_STREAM (demux, i)->pad != NULL) {
+ has_valid_stream = TRUE;
+ break;
+ }
+ }
+ if (!has_valid_stream)
+ gst_qtdemux_post_no_playable_stream_error (demux);
+ else {
+ GST_DEBUG_OBJECT (demux, "Data still available after EOS: %u",
+ (guint) gst_adapter_available (demux->adapter));
+ if (gst_qtdemux_process_adapter (demux, TRUE) != GST_FLOW_OK) {
+ res = FALSE;
+ }
+ }
+ }
+ break;
+ case GST_EVENT_CAPS:{
+ GstCaps *caps = NULL;
+
+ gst_event_parse_caps (event, &caps);
+ gst_qtdemux_setcaps (demux, caps);
+ res = TRUE;
+ gst_event_unref (event);
+ goto drop;
+ }
+ case GST_EVENT_PROTECTION:
+ {
+ const gchar *system_id = NULL;
+
+ gst_event_parse_protection (event, &system_id, NULL, NULL);
+ GST_DEBUG_OBJECT (demux, "Received protection event for system ID %s",
+ system_id);
+ gst_qtdemux_append_protection_system_id (demux, system_id);
+ /* save the event for later, for source pads that have not been created */
+ g_queue_push_tail (&demux->protection_event_queue, gst_event_ref (event));
+ /* send it to all pads that already exist */
+ gst_qtdemux_push_event (demux, event);
+ res = TRUE;
+ goto drop;
+ }
+ case GST_EVENT_STREAM_START:
+ {
+ res = TRUE;
+ gst_event_unref (event);
+
+ /* Drain all the buffers */
+ gst_qtdemux_process_adapter (demux, TRUE);
+ gst_qtdemux_reset (demux, FALSE);
+ /* We expect new moov box after new stream-start event */
+ if (demux->exposed) {
+ gst_qtdemux_stream_concat (demux,
+ demux->old_streams, demux->active_streams);
+ }
+
+ goto drop;
+ }
+ default:
+ break;
+ }
+
+ res = gst_pad_event_default (demux->sinkpad, parent, event) & res;
+
+ drop:
+ return res;
+ }
+
+ static gboolean
+ gst_qtdemux_handle_sink_query (GstPad * pad, GstObject * parent,
+ GstQuery * query)
+ {
+ GstQTDemux *demux = GST_QTDEMUX (parent);
+ gboolean res = FALSE;
+
+ switch (GST_QUERY_TYPE (query)) {
+ case GST_QUERY_BITRATE:
+ {
+ GstClockTime duration;
+
+ /* populate demux->upstream_size if not done yet */
+ gst_qtdemux_check_seekability (demux);
+
+ if (demux->upstream_size != -1
+ && gst_qtdemux_get_duration (demux, &duration)) {
+ guint bitrate =
+ gst_util_uint64_scale (8 * demux->upstream_size, GST_SECOND,
+ duration);
+
+ GST_LOG_OBJECT (demux, "bitrate query byte length: %" G_GUINT64_FORMAT
+ " duration %" GST_TIME_FORMAT " resulting a bitrate of %u",
+ demux->upstream_size, GST_TIME_ARGS (duration), bitrate);
+
+ /* TODO: better results based on ranges/index tables */
+ gst_query_set_bitrate (query, bitrate);
+ res = TRUE;
+ }
+ break;
+ }
+ default:
+ res = gst_pad_query_default (pad, (GstObject *) demux, query);
+ break;
+ }
+
+ return res;
+ }
+
+
+ #if 0
+ static void
+ gst_qtdemux_set_index (GstElement * element, GstIndex * index)
+ {
+ GstQTDemux *demux = GST_QTDEMUX (element);
+
+ GST_OBJECT_LOCK (demux);
+ if (demux->element_index)
+ gst_object_unref (demux->element_index);
+ if (index) {
+ demux->element_index = gst_object_ref (index);
+ } else {
+ demux->element_index = NULL;
+ }
+ GST_OBJECT_UNLOCK (demux);
+ /* object lock might be taken again */
+ if (index)
+ gst_index_get_writer_id (index, GST_OBJECT (element), &demux->index_id);
+ GST_DEBUG_OBJECT (demux, "Set index %" GST_PTR_FORMAT "for writer id %d",
+ demux->element_index, demux->index_id);
+ }
+
+ static GstIndex *
+ gst_qtdemux_get_index (GstElement * element)
+ {
+ GstIndex *result = NULL;
+ GstQTDemux *demux = GST_QTDEMUX (element);
+
+ GST_OBJECT_LOCK (demux);
+ if (demux->element_index)
+ result = gst_object_ref (demux->element_index);
+ GST_OBJECT_UNLOCK (demux);
+
+ GST_DEBUG_OBJECT (demux, "Returning index %" GST_PTR_FORMAT, result);
+
+ return result;
+ }
+ #endif
+
+ static void
+ gst_qtdemux_stbl_free (QtDemuxStream * stream)
+ {
+ g_free ((gpointer) stream->stco.data);
+ stream->stco.data = NULL;
+ g_free ((gpointer) stream->stsz.data);
+ stream->stsz.data = NULL;
+ g_free ((gpointer) stream->stsc.data);
+ stream->stsc.data = NULL;
+ g_free ((gpointer) stream->stts.data);
+ stream->stts.data = NULL;
+ g_free ((gpointer) stream->stss.data);
+ stream->stss.data = NULL;
+ g_free ((gpointer) stream->stps.data);
+ stream->stps.data = NULL;
+ g_free ((gpointer) stream->ctts.data);
+ stream->ctts.data = NULL;
+ }
+
+ static void
+ gst_qtdemux_stream_flush_segments_data (QtDemuxStream * stream)
+ {
+ g_free (stream->segments);
+ stream->segments = NULL;
+ stream->segment_index = -1;
+ stream->accumulated_base = 0;
+ }
+
+ static void
+ gst_qtdemux_stream_flush_samples_data (QtDemuxStream * stream)
+ {
+ g_free (stream->samples);
+ stream->samples = NULL;
+ gst_qtdemux_stbl_free (stream);
+
+ /* fragments */
+ g_free (stream->ra_entries);
+ stream->ra_entries = NULL;
+ stream->n_ra_entries = 0;
+
+ stream->sample_index = -1;
+ stream->stbl_index = -1;
+ stream->n_samples = 0;
+ stream->time_position = 0;
+
+ stream->n_samples_moof = 0;
+ stream->duration_moof = 0;
+ stream->duration_last_moof = 0;
+ }
+
+ static void
+ gst_qtdemux_stream_clear (QtDemuxStream * stream)
+ {
+ gint i;
+ if (stream->allocator)
+ gst_object_unref (stream->allocator);
+ while (stream->buffers) {
+ gst_buffer_unref (GST_BUFFER_CAST (stream->buffers->data));
+ stream->buffers = g_slist_delete_link (stream->buffers, stream->buffers);
+ }
+ for (i = 0; i < stream->stsd_entries_length; i++) {
+ QtDemuxStreamStsdEntry *entry = &stream->stsd_entries[i];
+ if (entry->rgb8_palette) {
+ gst_memory_unref (entry->rgb8_palette);
+ entry->rgb8_palette = NULL;
+ }
+ entry->sparse = FALSE;
+ }
+
+ if (stream->stream_tags)
+ gst_tag_list_unref (stream->stream_tags);
+
+ stream->stream_tags = gst_tag_list_new_empty ();
+ gst_tag_list_set_scope (stream->stream_tags, GST_TAG_SCOPE_STREAM);
+ g_free (stream->redirect_uri);
+ stream->redirect_uri = NULL;
+ stream->sent_eos = FALSE;
+ stream->protected = FALSE;
+ if (stream->protection_scheme_info) {
+ if (stream->protection_scheme_type == FOURCC_cenc
+ || stream->protection_scheme_type == FOURCC_cbcs) {
+ QtDemuxCencSampleSetInfo *info =
+ (QtDemuxCencSampleSetInfo *) stream->protection_scheme_info;
+ if (info->default_properties)
+ gst_structure_free (info->default_properties);
+ if (info->crypto_info)
+ g_ptr_array_free (info->crypto_info, TRUE);
+ }
+ if (stream->protection_scheme_type == FOURCC_aavd) {
+ QtDemuxAavdEncryptionInfo *info =
+ (QtDemuxAavdEncryptionInfo *) stream->protection_scheme_info;
+ if (info->default_properties)
+ gst_structure_free (info->default_properties);
+ }
+ g_free (stream->protection_scheme_info);
+ stream->protection_scheme_info = NULL;
+ }
+ stream->protection_scheme_type = 0;
+ stream->protection_scheme_version = 0;
+ g_queue_foreach (&stream->protection_scheme_event_queue,
+ (GFunc) gst_event_unref, NULL);
+ g_queue_clear (&stream->protection_scheme_event_queue);
+ gst_qtdemux_stream_flush_segments_data (stream);
+ gst_qtdemux_stream_flush_samples_data (stream);
+ }
+
+ static void
+ gst_qtdemux_stream_reset (QtDemuxStream * stream)
+ {
+ gint i;
+ gst_qtdemux_stream_clear (stream);
+ for (i = 0; i < stream->stsd_entries_length; i++) {
+ QtDemuxStreamStsdEntry *entry = &stream->stsd_entries[i];
+ if (entry->caps) {
+ gst_caps_unref (entry->caps);
+ entry->caps = NULL;
+ }
+ }
+ g_free (stream->stsd_entries);
+ stream->stsd_entries = NULL;
+ stream->stsd_entries_length = 0;
+ }
+
+ static QtDemuxStream *
+ gst_qtdemux_stream_ref (QtDemuxStream * stream)
+ {
+ g_atomic_int_add (&stream->ref_count, 1);
+
+ return stream;
+ }
+
+ static void
+ gst_qtdemux_stream_unref (QtDemuxStream * stream)
+ {
+ if (g_atomic_int_dec_and_test (&stream->ref_count)) {
+ gst_qtdemux_stream_reset (stream);
+ gst_tag_list_unref (stream->stream_tags);
+ if (stream->pad) {
+ GstQTDemux *demux = stream->demux;
+ gst_element_remove_pad (GST_ELEMENT_CAST (demux), stream->pad);
+ GST_OBJECT_LOCK (demux);
+ gst_flow_combiner_remove_pad (demux->flowcombiner, stream->pad);
+ GST_OBJECT_UNLOCK (demux);
+ }
+ g_free (stream->stream_id);
+ g_free (stream);
+ }
+ }
+
+ static GstStateChangeReturn
+ gst_qtdemux_change_state (GstElement * element, GstStateChange transition)
+ {
+ GstQTDemux *qtdemux = GST_QTDEMUX (element);
+ GstStateChangeReturn result = GST_STATE_CHANGE_FAILURE;
+
+ switch (transition) {
+ case GST_STATE_CHANGE_READY_TO_PAUSED:
+ gst_qtdemux_reset (qtdemux, TRUE);
+ break;
+ default:
+ break;
+ }
+
+ result = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_PAUSED_TO_READY:{
+ gst_qtdemux_reset (qtdemux, TRUE);
+ break;
+ }
+ default:
+ break;
+ }
+
+ return result;
+ }
+
+ static void
+ gst_qtdemux_set_context (GstElement * element, GstContext * context)
+ {
+ GstQTDemux *qtdemux = GST_QTDEMUX (element);
+
+ g_return_if_fail (GST_IS_CONTEXT (context));
+
+ if (gst_context_has_context_type (context,
+ "drm-preferred-decryption-system-id")) {
+ const GstStructure *s;
+
+ s = gst_context_get_structure (context);
+ g_free (qtdemux->preferred_protection_system_id);
+ qtdemux->preferred_protection_system_id =
+ g_strdup (gst_structure_get_string (s, "decryption-system-id"));
+ GST_DEBUG_OBJECT (element, "set preferred decryption system to %s",
+ qtdemux->preferred_protection_system_id);
+ }
+
+ GST_ELEMENT_CLASS (parent_class)->set_context (element, context);
+ }
+
+ static void
+ qtdemux_parse_ftyp (GstQTDemux * qtdemux, const guint8 * buffer, gint length)
+ {
+ /* counts as header data */
+ qtdemux->header_size += length;
+
+ /* only consider at least a sufficiently complete ftyp atom */
+ if (length >= 20) {
+ GstBuffer *buf;
+
+ qtdemux->major_brand = QT_FOURCC (buffer + 8);
+ GST_DEBUG_OBJECT (qtdemux, "major brand: %" GST_FOURCC_FORMAT,
+ GST_FOURCC_ARGS (qtdemux->major_brand));
+ if (qtdemux->comp_brands)
+ gst_buffer_unref (qtdemux->comp_brands);
+ buf = qtdemux->comp_brands = gst_buffer_new_and_alloc (length - 16);
+ gst_buffer_fill (buf, 0, buffer + 16, length - 16);
+ }
+ }
+
++#ifdef TIZEN_FEATURE_QTDEMUX_MODIFICATION
++static void
++_get_int_value_from_xml_string (GstQTDemux * qtdemux,
++ const char *xml_str, const char *param_name, int *value)
++{
++ char *value_start, *value_end, *endptr;
++ const short value_length_max = 12;
++ char init_view_ret[12];
++ int value_length = 0;
++ int i = 0;
++
++ value_start = (xml_str && param_name) ? strstr (xml_str, param_name) : NULL;
++
++ if (!value_start) {
++ GST_WARNING_OBJECT (qtdemux, "error: parameter does not exist: %s\n",
++ param_name);
++ return;
++ }
++
++ value_start += strlen (param_name);
++ while ((value_start[0] == ' ') || (value_start[0] == '\t'))
++ value_start++;
++
++ value_end = strchr (value_start, '<');
++ if (!value_end) {
++ GST_ERROR_OBJECT (qtdemux, "error: incorrect XML\n");
++ return;
++ }
++
++ value_length = value_end - value_start;
++ while ((value_length >= 1) && ((value_start[value_length - 1] == ' ')
++ || (value_start[value_length - 1] == '\t')))
++ value_length--;
++
++ if (value_start[i] == '+' || value_start[i] == '-')
++ i++;
++ while (i < value_length) {
++ if (value_start[i] < '0' || value_start[i] > '9') {
++ GST_ERROR_OBJECT (qtdemux,
++ "error: incorrect value, integer was expected\n");
++ return;
++ }
++ i++;
++ }
++
++ if (value_length >= value_length_max || value_length < 1) {
++ GST_ERROR_OBJECT (qtdemux, "error: empty XML value or incorrect range\n");
++ return;
++ }
++
++ strncpy (init_view_ret, value_start, value_length_max);
++ init_view_ret[value_length] = '\0';
++
++ *value = strtol (init_view_ret, &endptr, 10);
++ if (endptr == init_view_ret) {
++ GST_ERROR_OBJECT (qtdemux, "error: no digits were found\n");
++ return;
++ }
++
++ return;
++}
++
++static void
++_get_string_value_from_xml_string (GstQTDemux * qtdemux,
++ const char *xml_str, const char *param_name, char **value)
++{
++ char *value_start, *value_end;
++ const short value_length_max = 256;
++ int value_length = 0;
++
++ value_start = (xml_str && param_name) ? strstr (xml_str, param_name) : NULL;
++
++ if (!value_start) {
++ GST_WARNING_OBJECT (qtdemux, "error: parameter does not exist: %s\n",
++ param_name);
++ return;
++ }
++
++ value_start += strlen (param_name);
++ while ((value_start[0] == ' ') || (value_start[0] == '\t'))
++ value_start++;
++
++ value_end = strchr (value_start, '<');
++ if (!value_end) {
++ GST_ERROR_OBJECT (qtdemux, "error: incorrect XML\n");
++ return;
++ }
++
++ value_length = value_end - value_start;
++ while ((value_length >= 1) && ((value_start[value_length - 1] == ' ')
++ || (value_start[value_length - 1] == '\t')))
++ value_length--;
++
++ if (value_length >= value_length_max || value_length < 1) {
++ GST_ERROR_OBJECT (qtdemux, "error: empty XML value or incorrect range\n");
++ return;
++ }
++
++ *value = strndup(value_start, value_length);
++
++ return;
++}
++
++static void
++_get_bool_value_from_xml_string (GstQTDemux * qtdemux,
++ const char *xml_str, const char *param_name, gboolean * value)
++{
++ char *value_start, *value_end;
++ int value_length = 0;
++
++ value_start = (xml_str && param_name) ? strstr (xml_str, param_name) : NULL;
++
++ if (!value_start) {
++ GST_WARNING_OBJECT (qtdemux, "error: parameter does not exist: %s\n",
++ param_name);
++ return;
++ }
++
++ value_start += strlen (param_name);
++ while ((value_start[0] == ' ') || (value_start[0] == '\t'))
++ value_start++;
++
++ value_end = strchr (value_start, '<');
++ if (!value_end) {
++ GST_ERROR_OBJECT (qtdemux, "error: incorrect XML\n");
++ return;
++ }
++
++ value_length = value_end - value_start;
++ while ((value_length >= 1) && ((value_start[value_length - 1] == ' ')
++ || (value_start[value_length - 1] == '\t')))
++ value_length--;
++
++ if (value_length < 1) {
++ GST_ERROR_OBJECT (qtdemux, "error: empty XML value or incorrect range\n");
++ return;
++ }
++
++ *value = g_strstr_len(value_start, value_length, "true") ? TRUE : FALSE;
++
++ return;
++}
++
++static void
++_parse_spatial_video_metadata_from_xml_string (GstQTDemux * qtdemux, const char *xmlStr)
++{
++ const char is_spherical_str[] = "<GSpherical:Spherical>";
++ const char is_stitched_str[] = "<GSpherical:Stitched>";
++ const char stitching_software_str[] = "<GSpherical:StitchingSoftware>";
++ const char projection_type_str[] = "<GSpherical:ProjectionType>";
++ const char stereo_mode_str[] = "<GSpherical:StereoMode>";
++ const char source_count_str[] = "<GSpherical:SourceCount>";
++ const char init_view_heading_str[] = "<GSpherical:InitialViewHeadingDegrees>";
++ const char init_view_pitch_str[] = "<GSpherical:InitialViewPitchDegrees>";
++ const char init_view_roll_str[] = "<GSpherical:InitialViewRollDegrees>";
++ const char timestamp_str[] = "<GSpherical:Timestamp>";
++ const char full_pano_width_str[] = "<GSpherical:FullPanoWidthPixels>";
++ const char full_pano_height_str[] = "<GSpherical:FullPanoHeightPixels>";
++ const char cropped_area_image_width_str[] =
++ "<GSpherical:CroppedAreaImageWidthPixels>";
++ const char cropped_area_image_height_str[] =
++ "<GSpherical:CroppedAreaImageHeightPixels>";
++ const char cropped_area_left_str[] = "<GSpherical:CroppedAreaLeftPixels>";
++ const char cropped_area_top_str[] = "<GSpherical:CroppedAreaTopPixels>";
++
++ QtDemuxSphericalMetadata * spherical_metadata = qtdemux->spherical_metadata;
++
++ _get_bool_value_from_xml_string (qtdemux, xmlStr, is_spherical_str,
++ (gboolean *) & spherical_metadata->is_spherical);
++ _get_bool_value_from_xml_string (qtdemux, xmlStr, is_stitched_str,
++ (gboolean *) & spherical_metadata->is_stitched);
++
++ if (spherical_metadata->is_spherical && spherical_metadata->is_stitched) {
++ _get_string_value_from_xml_string (qtdemux, xmlStr,
++ stitching_software_str, &spherical_metadata->stitching_software);
++ _get_string_value_from_xml_string (qtdemux, xmlStr,
++ projection_type_str, &spherical_metadata->projection_type);
++ _get_string_value_from_xml_string (qtdemux, xmlStr, stereo_mode_str,
++ &spherical_metadata->stereo_mode);
++ _get_int_value_from_xml_string (qtdemux, xmlStr, source_count_str,
++ &spherical_metadata->source_count);
++ _get_int_value_from_xml_string (qtdemux, xmlStr,
++ init_view_heading_str, &spherical_metadata->init_view_heading);
++ _get_int_value_from_xml_string (qtdemux, xmlStr, init_view_pitch_str,
++ &spherical_metadata->init_view_pitch);
++ _get_int_value_from_xml_string (qtdemux, xmlStr, init_view_roll_str,
++ &spherical_metadata->init_view_roll);
++ _get_int_value_from_xml_string (qtdemux, xmlStr, timestamp_str,
++ &spherical_metadata->timestamp);
++ _get_int_value_from_xml_string (qtdemux, xmlStr, full_pano_width_str,
++ &spherical_metadata->full_pano_width_pixels);
++ _get_int_value_from_xml_string (qtdemux, xmlStr,
++ full_pano_height_str, &spherical_metadata->full_pano_height_pixels);
++ _get_int_value_from_xml_string (qtdemux, xmlStr,
++ cropped_area_image_width_str,
++ &spherical_metadata->cropped_area_image_width);
++ _get_int_value_from_xml_string (qtdemux, xmlStr,
++ cropped_area_image_height_str,
++ &spherical_metadata->cropped_area_image_height);
++ _get_int_value_from_xml_string (qtdemux, xmlStr, cropped_area_left_str,
++ &spherical_metadata->cropped_area_left);
++ _get_int_value_from_xml_string (qtdemux, xmlStr, cropped_area_top_str,
++ &spherical_metadata->cropped_area_top);
++ }
++
++ return;
++}
++
++static void
++gst_tag_register_spherical_tags (void) {
++ gst_tag_register ("is_spherical", GST_TAG_FLAG_META,
++ G_TYPE_INT,
++ _("tag-spherical"),
++ _("Flag indicating if the video is a spherical video"),
++ NULL);
++ gst_tag_register ("is_stitched", GST_TAG_FLAG_META,
++ G_TYPE_INT,
++ _("tag-stitched"),
++ _("Flag indicating if the video is stitched"),
++ NULL);
++ gst_tag_register ("stitching_software", GST_TAG_FLAG_META,
++ G_TYPE_STRING,
++ _("tag-stitching-software"),
++ _("Software used to stitch the spherical video"),
++ NULL);
++ gst_tag_register ("projection_type", GST_TAG_FLAG_META,
++ G_TYPE_STRING,
++ _("tag-projection-type"),
++ _("Projection type used in the video frames"),
++ NULL);
++ gst_tag_register ("stereo_mode", GST_TAG_FLAG_META,
++ G_TYPE_STRING,
++ _("tag-stereo-mode"),
++ _("Description of stereoscopic 3D layout"),
++ NULL);
++ gst_tag_register ("source_count", GST_TAG_FLAG_META,
++ G_TYPE_INT,
++ _("tag-source-count"),
++ _("Number of cameras used to create the spherical video"),
++ NULL);
++ gst_tag_register ("init_view_heading", GST_TAG_FLAG_META,
++ G_TYPE_INT,
++ _("tag-init-view-heading"),
++ _("The heading angle of the initial view in degrees"),
++ NULL);
++ gst_tag_register ("init_view_pitch", GST_TAG_FLAG_META,
++ G_TYPE_INT,
++ _("tag-init-view-pitch"),
++ _("The pitch angle of the initial view in degrees"),
++ NULL);
++ gst_tag_register ("init_view_roll", GST_TAG_FLAG_META,
++ G_TYPE_INT,
++ _("tag-init-view-roll"),
++ _("The roll angle of the initial view in degrees"),
++ NULL);
++ gst_tag_register ("timestamp", GST_TAG_FLAG_META,
++ G_TYPE_INT,
++ _("tag-timestamp"),
++ _("Epoch timestamp of when the first frame in the video was recorded"),
++ NULL);
++ gst_tag_register ("full_pano_width_pixels", GST_TAG_FLAG_META,
++ G_TYPE_INT,
++ _("tag-full-pano-width"),
++ _("Width of the encoded video frame in pixels"),
++ NULL);
++ gst_tag_register ("full_pano_height_pixels", GST_TAG_FLAG_META,
++ G_TYPE_INT,
++ _("tag-full-pano-height"),
++ _("Height of the encoded video frame in pixels"),
++ NULL);
++ gst_tag_register ("cropped_area_image_width", GST_TAG_FLAG_META,
++ G_TYPE_INT,
++ _("tag-cropped-area-image-width"),
++ _("Width of the video frame to display (e.g. cropping)"),
++ NULL);
++ gst_tag_register ("cropped_area_image_height", GST_TAG_FLAG_META,
++ G_TYPE_INT,
++ _("tag-cropped-area-image-height"),
++ _("Height of the video frame to display (e.g. cropping)"),
++ NULL);
++ gst_tag_register ("cropped_area_left", GST_TAG_FLAG_META,
++ G_TYPE_INT,
++ _("tag-cropped-area-left"),
++ _("Column where the left edge of the image was cropped from the"
++ " full sized panorama"),
++ NULL);
++ gst_tag_register ("cropped_area_top", GST_TAG_FLAG_META,
++ G_TYPE_INT,
++ _("tag-cropped-area-top"),
++ _("Row where the top edge of the image was cropped from the"
++ " full sized panorama"),
++ NULL);
++ gst_tag_register ("ambisonic_type", GST_TAG_FLAG_META,
++ G_TYPE_INT,
++ _("tag-ambisonic-type"),
++ _("Specifies the type of ambisonic audio represented"),
++ NULL);
++ gst_tag_register ("ambisonic_format", GST_TAG_FLAG_META,
++ G_TYPE_INT,
++ _("tag-ambisonic-format"),
++ _("Specifies the ambisonic audio format"),
++ NULL);
++ gst_tag_register ("ambisonic_order", GST_TAG_FLAG_META,
++ G_TYPE_INT,
++ _("tag-ambisonic-order"),
++ _("Specifies the ambisonic audio channel order"),
++ NULL);
++
++ return;
++}
++
++static void
++_send_spherical_metadata_msg_to_bus (GstQTDemux * qtdemux)
++{
++ GstTagList *taglist;
++ QtDemuxSphericalMetadata *spherical_metadata = qtdemux->spherical_metadata;
++
++ GST_DEBUG_OBJECT (qtdemux, "is_spherical = %d",
++ spherical_metadata->is_spherical);
++ GST_DEBUG_OBJECT (qtdemux, "is_stitched = %d",
++ spherical_metadata->is_stitched);
++ GST_DEBUG_OBJECT (qtdemux, "stitching_software = %s",
++ spherical_metadata->stitching_software);
++ GST_DEBUG_OBJECT (qtdemux, "projection_type = %s",
++ spherical_metadata->projection_type);
++ GST_DEBUG_OBJECT (qtdemux, "stereo_mode = %s",
++ spherical_metadata->stereo_mode);
++ GST_DEBUG_OBJECT (qtdemux, "source_count %d",
++ spherical_metadata->source_count);
++ GST_DEBUG_OBJECT (qtdemux, "init_view_heading = %d",
++ spherical_metadata->init_view_heading);
++ GST_DEBUG_OBJECT (qtdemux, "init_view_pitch = %d",
++ spherical_metadata->init_view_pitch);
++ GST_DEBUG_OBJECT (qtdemux, "init_view_roll = %d",
++ spherical_metadata->init_view_roll);
++ GST_DEBUG_OBJECT (qtdemux, "timestamp = %d", spherical_metadata->timestamp);
++ GST_DEBUG_OBJECT (qtdemux, "full_pano_width_pixels = %d",
++ spherical_metadata->full_pano_width_pixels);
++ GST_DEBUG_OBJECT (qtdemux, "full_pano_height_pixels = %d",
++ spherical_metadata->full_pano_height_pixels);
++ GST_DEBUG_OBJECT (qtdemux, "cropped_area_image_width = %d",
++ spherical_metadata->cropped_area_image_width);
++ GST_DEBUG_OBJECT (qtdemux, "cropped_area_image_height = %d",
++ spherical_metadata->cropped_area_image_height);
++ GST_DEBUG_OBJECT (qtdemux, "cropped_area_left = %d",
++ spherical_metadata->cropped_area_left);
++ GST_DEBUG_OBJECT (qtdemux, "cropped_area_top = %d",
++ spherical_metadata->cropped_area_top);
++ GST_DEBUG_OBJECT (qtdemux, "ambisonic_type = %d",
++ spherical_metadata->ambisonic_type);
++ GST_DEBUG_OBJECT (qtdemux, "ambisonic_order = %d",
++ spherical_metadata->ambisonic_order);
++ GST_DEBUG_OBJECT (qtdemux, "ambisonic_format = %d",
++ spherical_metadata->ambisonic_format);
++
++ taglist = gst_tag_list_new_empty ();
++ gst_tag_list_add (taglist, GST_TAG_MERGE_REPLACE,
++ "is_spherical", spherical_metadata->is_spherical,
++ "is_stitched", spherical_metadata->is_stitched,
++ "source_count", spherical_metadata->source_count,
++ "init_view_heading", spherical_metadata->init_view_heading,
++ "init_view_pitch", spherical_metadata->init_view_pitch,
++ "init_view_roll", spherical_metadata->init_view_roll,
++ "timestamp", spherical_metadata->timestamp,
++ "full_pano_width_pixels", spherical_metadata->full_pano_width_pixels,
++ "full_pano_height_pixels", spherical_metadata->full_pano_height_pixels,
++ "cropped_area_image_width", spherical_metadata->cropped_area_image_width,
++ "cropped_area_image_height", spherical_metadata->cropped_area_image_height,
++ "cropped_area_left", spherical_metadata->cropped_area_left,
++ "cropped_area_top", spherical_metadata->cropped_area_top,
++ "ambisonic_type", spherical_metadata->ambisonic_type,
++ "ambisonic_format", spherical_metadata->ambisonic_format,
++ "ambisonic_order", spherical_metadata->ambisonic_order,
++ NULL);
++
++ if (spherical_metadata->stitching_software)
++ gst_tag_list_add (taglist, GST_TAG_MERGE_REPLACE,
++ "stitching_software", spherical_metadata->stitching_software,
++ NULL);
++ if (spherical_metadata->projection_type)
++ gst_tag_list_add (taglist, GST_TAG_MERGE_REPLACE,
++ "projection_type", spherical_metadata->projection_type,
++ NULL);
++ if (spherical_metadata->stereo_mode)
++ gst_tag_list_add (taglist, GST_TAG_MERGE_REPLACE,
++ "stereo_mode", spherical_metadata->stereo_mode,
++ NULL);
++
++ gst_element_post_message (GST_ELEMENT_CAST (qtdemux),
++ gst_message_new_tag (GST_OBJECT_CAST (qtdemux),
++ gst_tag_list_copy (taglist)));
++
++ gst_tag_list_unref(taglist);
++
++ return;
++}
++
++static void
++qtdemux_parse_SA3D (GstQTDemux * qtdemux, const guint8 * buffer, gint length)
++{
++ guint offset = 0;
++
++ guint8 version = 0;
++ guint8 ambisonic_type = 0;
++ guint32 ambisonic_order = 0;
++ guint8 ambisonic_channel_ordering = 0;
++ guint8 ambisonic_normalization = 0;
++ guint32 num_channels = 0;
++ guint32 channel_map[49] = { 0 }; /* Up to 6th order */
++
++ int i;
++
++ GST_DEBUG_OBJECT (qtdemux, "qtdemux_parse_SA3D");
++
++ qtdemux->header_size += length;
++ offset = (QT_UINT32 (buffer) == 0) ? 16 : 8;
++
++ if (length <= offset + 16) {
++ GST_DEBUG_OBJECT (qtdemux, "SA3D atom is too short, skipping");
++ return;
++ }
++
++ version = QT_UINT8 (buffer + offset);
++ ambisonic_type = QT_UINT8 (buffer + offset + 1);
++ ambisonic_order = QT_UINT32 (buffer + offset + 2);
++ ambisonic_channel_ordering = QT_UINT8 (buffer + offset + 6);
++ ambisonic_normalization = QT_UINT8 (buffer + offset + 7);
++ num_channels = QT_UINT32 (buffer + offset + 8);
++ for (i = 0; i < num_channels; ++i)
++ channel_map[i] = QT_UINT32 (buffer + offset + 12 + i * 4);
++
++ GST_DEBUG_OBJECT (qtdemux, "version: %d", version);
++ GST_DEBUG_OBJECT (qtdemux, "ambisonic_type: %d", ambisonic_type);
++ GST_DEBUG_OBJECT (qtdemux, "ambisonic_order: %d", ambisonic_order);
++ GST_DEBUG_OBJECT (qtdemux, "ambisonic_channel_ordering: %d",
++ ambisonic_channel_ordering);
++ GST_DEBUG_OBJECT (qtdemux, "ambisonic_normalization: %d",
++ ambisonic_normalization);
++ GST_DEBUG_OBJECT (qtdemux, "num_channels: %d", num_channels);
++ for (i = 0; i < num_channels; ++i)
++ GST_DEBUG_OBJECT (qtdemux, "channel_map: %d", channel_map[i]);
++
++ if (version == RFC_AMBISONIC_SA3DBOX_VERSION_SUPPORTED) {
++ if (ambisonic_type == RFC_AMBISONIC_TYPE_PERIPHONIC)
++ qtdemux->spherical_metadata->ambisonic_type = QTDEMUX_AMBISONIC_TYPE_PERIPHONIC;
++
++ if (ambisonic_order == RFC_AMBISONIC_ORDER_FOA) {
++ if (num_channels == 4) {
++ qtdemux->spherical_metadata->ambisonic_order = QTDEMUX_AMBISONIC_ORDER_FOA;
++
++ if ((ambisonic_channel_ordering == RFC_AMBISONIC_CHANNEL_ORDERING_ACN)
++ && (ambisonic_normalization == RFC_AMBISONIC_NORMALIZATION_SN3D)
++ && (channel_map[0] == 0) && (channel_map[1] == 1)
++ && (channel_map[2] == 2) && (channel_map[3] == 3))
++ qtdemux->spherical_metadata->ambisonic_format = QTDEMUX_AMBISONIC_FORMAT_AMBIX;
++
++ if ((ambisonic_channel_ordering == RFC_AMBISONIC_CHANNEL_ORDERING_FUMA)
++ && (ambisonic_normalization == RFC_AMBISONIC_NORMALIZATION_FUMA)
++ && (channel_map[0] == 0) && (channel_map[1] == 3)
++ && (channel_map[2] == 1) && (channel_map[3] == 2))
++ qtdemux->spherical_metadata->ambisonic_format = QTDEMUX_AMBISONIC_FORMAT_AMB;
++ }
++ }
++ }
++
++ return;
++}
++#endif /* TIZEN_FEATURE_QTDEMUX_MODIFICATION */
++
+ static void
+ qtdemux_update_default_sample_cenc_settings (GstQTDemux * qtdemux,
+ QtDemuxCencSampleSetInfo * info, guint32 is_encrypted,
+ guint32 protection_scheme_type, guint8 iv_size, const guint8 * kid,
+ guint crypt_byte_block, guint skip_byte_block, guint8 constant_iv_size,
+ const guint8 * constant_iv)
+ {
+ GstBuffer *kid_buf = gst_buffer_new_allocate (NULL, 16, NULL);
+ gst_buffer_fill (kid_buf, 0, kid, 16);
+ if (info->default_properties)
+ gst_structure_free (info->default_properties);
+ info->default_properties =
+ gst_structure_new ("application/x-cenc",
+ "iv_size", G_TYPE_UINT, iv_size,
+ "encrypted", G_TYPE_BOOLEAN, (is_encrypted == 1),
+ "kid", GST_TYPE_BUFFER, kid_buf, NULL);
+ GST_DEBUG_OBJECT (qtdemux, "default sample properties: "
+ "is_encrypted=%u, iv_size=%u", is_encrypted, iv_size);
+ gst_buffer_unref (kid_buf);
+ if (protection_scheme_type == FOURCC_cbcs) {
+ if (crypt_byte_block != 0 || skip_byte_block != 0) {
+ gst_structure_set (info->default_properties, "crypt_byte_block",
+ G_TYPE_UINT, crypt_byte_block, "skip_byte_block", G_TYPE_UINT,
+ skip_byte_block, NULL);
+ }
+ if (constant_iv != NULL) {
+ GstBuffer *constant_iv_buf =
+ gst_buffer_new_allocate (NULL, constant_iv_size, NULL);
+ gst_buffer_fill (constant_iv_buf, 0, constant_iv, constant_iv_size);
+ gst_structure_set (info->default_properties, "constant_iv_size",
+ G_TYPE_UINT, constant_iv_size, "iv", GST_TYPE_BUFFER, constant_iv_buf,
+ NULL);
+ gst_buffer_unref (constant_iv_buf);
+ }
+ gst_structure_set (info->default_properties, "cipher-mode",
+ G_TYPE_STRING, "cbcs", NULL);
+ } else {
+ gst_structure_set (info->default_properties, "cipher-mode",
+ G_TYPE_STRING, "cenc", NULL);
+ }
+ }
+
+ static gboolean
+ qtdemux_update_default_piff_encryption_settings (GstQTDemux * qtdemux,
+ QtDemuxCencSampleSetInfo * info, GstByteReader * br)
+ {
+ guint32 algorithm_id = 0;
+ const guint8 *kid;
+ gboolean is_encrypted = TRUE;
+ guint8 iv_size = 8;
+
+ if (!gst_byte_reader_get_uint24_le (br, &algorithm_id)) {
+ GST_ERROR_OBJECT (qtdemux, "Error getting box's algorithm ID field");
+ return FALSE;
+ }
+
+ algorithm_id >>= 8;
+ if (algorithm_id == 0) {
+ is_encrypted = FALSE;
+ } else if (algorithm_id == 1) {
+ GST_DEBUG_OBJECT (qtdemux, "AES 128-bits CTR encrypted stream");
+ } else if (algorithm_id == 2) {
+ GST_DEBUG_OBJECT (qtdemux, "AES 128-bits CBC encrypted stream");
+ }
+
+ if (!gst_byte_reader_get_uint8 (br, &iv_size))
+ return FALSE;
+
+ if (!gst_byte_reader_get_data (br, 16, &kid))
+ return FALSE;
+
+ qtdemux_update_default_sample_cenc_settings (qtdemux, info,
+ is_encrypted, FOURCC_cenc, iv_size, kid, 0, 0, 0, NULL);
+ gst_structure_set (info->default_properties, "piff_algorithm_id",
+ G_TYPE_UINT, algorithm_id, NULL);
+ return TRUE;
+ }
+
+
+ static void
+ qtdemux_parse_piff (GstQTDemux * qtdemux, const guint8 * buffer, gint length,
+ guint offset)
+ {
+ GstByteReader br;
+ guint8 version;
+ guint32 flags = 0;
+ guint i;
+ guint iv_size = 8;
+ QtDemuxStream *stream;
+ GstStructure *structure;
+ QtDemuxCencSampleSetInfo *ss_info = NULL;
+ const gchar *system_id;
+ gboolean uses_sub_sample_encryption = FALSE;
+ guint32 sample_count;
+
+ if (QTDEMUX_N_STREAMS (qtdemux) == 0)
+ return;
+
+ stream = QTDEMUX_NTH_STREAM (qtdemux, 0);
+
+ structure = gst_caps_get_structure (CUR_STREAM (stream)->caps, 0);
+ if (!gst_structure_has_name (structure, "application/x-cenc")) {
+ GST_WARNING_OBJECT (qtdemux,
+ "Attempting PIFF box parsing on an unencrypted stream.");
+ return;
+ }
+
+ gst_structure_get (structure, GST_PROTECTION_SYSTEM_ID_CAPS_FIELD,
+ G_TYPE_STRING, &system_id, NULL);
+ gst_qtdemux_append_protection_system_id (qtdemux, system_id);
+
+ stream->protected = TRUE;
+ stream->protection_scheme_type = FOURCC_cenc;
+
+ if (!stream->protection_scheme_info)
+ stream->protection_scheme_info = g_new0 (QtDemuxCencSampleSetInfo, 1);
+
+ ss_info = (QtDemuxCencSampleSetInfo *) stream->protection_scheme_info;
+ if (!ss_info->default_properties) {
+ ss_info->default_properties =
+ gst_structure_new ("application/x-cenc",
+ "iv_size", G_TYPE_UINT, iv_size, "encrypted", G_TYPE_BOOLEAN, TRUE,
+ NULL);
+
+ }
+
+ if (ss_info->crypto_info) {
+ GST_LOG_OBJECT (qtdemux, "unreffing existing crypto_info");
+ g_ptr_array_free (ss_info->crypto_info, TRUE);
+ ss_info->crypto_info = NULL;
+ }
+
+ /* skip UUID */
+ gst_byte_reader_init (&br, buffer + offset + 16, length - offset - 16);
+
+ if (!gst_byte_reader_get_uint8 (&br, &version)) {
+ GST_ERROR_OBJECT (qtdemux, "Error getting box's version field");
+ return;
+ }
+
+ if (!gst_byte_reader_get_uint24_be (&br, &flags)) {
+ GST_ERROR_OBJECT (qtdemux, "Error getting box's flags field");
+ return;
+ }
+
+ if ((flags & 0x000001)) {
+ if (!qtdemux_update_default_piff_encryption_settings (qtdemux, ss_info,
+ &br))
+ return;
+ } else if ((flags & 0x000002)) {
+ uses_sub_sample_encryption = TRUE;
+ }
+
+ if (!gst_structure_get_uint (ss_info->default_properties, "iv_size",
+ &iv_size)) {
+ GST_ERROR_OBJECT (qtdemux, "Error getting encryption IV size field");
+ return;
+ }
+
+ if (!gst_byte_reader_get_uint32_be (&br, &sample_count)) {
+ GST_ERROR_OBJECT (qtdemux, "Error getting box's sample count field");
+ return;
+ }
+
+ ss_info->crypto_info =
+ g_ptr_array_new_full (sample_count,
+ (GDestroyNotify) qtdemux_gst_structure_free);
+
+ for (i = 0; i < sample_count; ++i) {
+ GstStructure *properties;
+ guint8 *data;
+ GstBuffer *buf;
+
+ properties = qtdemux_get_cenc_sample_properties (qtdemux, stream, i);
+ if (properties == NULL) {
+ GST_ERROR_OBJECT (qtdemux, "failed to get properties for sample %u", i);
+ qtdemux->cenc_aux_sample_count = i;
+ return;
+ }
+
+ if (!gst_byte_reader_dup_data (&br, iv_size, &data)) {
+ GST_ERROR_OBJECT (qtdemux, "IV data not present for sample %u", i);
+ gst_structure_free (properties);
+ qtdemux->cenc_aux_sample_count = i;
+ return;
+ }
+ buf = gst_buffer_new_wrapped (data, iv_size);
+ gst_structure_set (properties, "iv", GST_TYPE_BUFFER, buf, NULL);
+ gst_buffer_unref (buf);
+
+ if (uses_sub_sample_encryption) {
+ guint16 n_subsamples;
+ const GValue *kid_buf_value;
+
+ if (!gst_byte_reader_get_uint16_be (&br, &n_subsamples)
+ || n_subsamples == 0) {
+ GST_ERROR_OBJECT (qtdemux,
+ "failed to get subsample count for sample %u", i);
+ gst_structure_free (properties);
+ qtdemux->cenc_aux_sample_count = i;
+ return;
+ }
+ GST_LOG_OBJECT (qtdemux, "subsample count: %u", n_subsamples);
+ if (!gst_byte_reader_dup_data (&br, n_subsamples * 6, &data)) {
+ GST_ERROR_OBJECT (qtdemux, "failed to get subsample data for sample %u",
+ i);
+ gst_structure_free (properties);
+ qtdemux->cenc_aux_sample_count = i;
+ return;
+ }
+ buf = gst_buffer_new_wrapped (data, n_subsamples * 6);
+
+ kid_buf_value =
+ gst_structure_get_value (ss_info->default_properties, "kid");
+
+ gst_structure_set (properties,
+ "subsample_count", G_TYPE_UINT, n_subsamples,
+ "subsamples", GST_TYPE_BUFFER, buf, NULL);
+ gst_structure_set_value (properties, "kid", kid_buf_value);
+ gst_buffer_unref (buf);
+ } else {
+ gst_structure_set (properties, "subsample_count", G_TYPE_UINT, 0, NULL);
+ }
+
+ g_ptr_array_add (ss_info->crypto_info, properties);
+ }
+
+ qtdemux->cenc_aux_sample_count = sample_count;
+ }
+
+ static void
+ qtdemux_parse_uuid (GstQTDemux * qtdemux, const guint8 * buffer, gint length)
+ {
+ static const guint8 xmp_uuid[] = { 0xBE, 0x7A, 0xCF, 0xCB,
+ 0x97, 0xA9, 0x42, 0xE8,
+ 0x9C, 0x71, 0x99, 0x94,
+ 0x91, 0xE3, 0xAF, 0xAC
+ };
+ static const guint8 playready_uuid[] = {
+ 0xd0, 0x8a, 0x4f, 0x18, 0x10, 0xf3, 0x4a, 0x82,
+ 0xb6, 0xc8, 0x32, 0xd8, 0xab, 0xa1, 0x83, 0xd3
+ };
+
+ static const guint8 piff_sample_encryption_uuid[] = {
+ 0xa2, 0x39, 0x4f, 0x52, 0x5a, 0x9b, 0x4f, 0x14,
+ 0xa2, 0x44, 0x6c, 0x42, 0x7c, 0x64, 0x8d, 0xf4
+ };
+
++#ifdef TIZEN_FEATURE_QTDEMUX_MODIFICATION
++ static const guint8 spherical_uuid[] = {
++ 0xff, 0xcc, 0x82, 0x63, 0xf8, 0x55, 0x4a, 0x93,
++ 0x88, 0x14, 0x58, 0x7a, 0x02, 0x52, 0x1f, 0xdd
++ };
++#endif /* TIZEN_FEATURE_QTDEMUX_MODIFICATION */
++
+ guint offset;
+
+ /* counts as header data */
+ qtdemux->header_size += length;
+
+ offset = (QT_UINT32 (buffer) == 0) ? 16 : 8;
+
+ if (length <= offset + 16) {
+ GST_DEBUG_OBJECT (qtdemux, "uuid atom is too short, skipping");
+ return;
+ }
+
++#ifdef TIZEN_FEATURE_QTDEMUX_MODIFICATION
++ if (memcmp (buffer + offset, spherical_uuid, 16) == 0) {
++ const char *contents;
++
++ GST_DEBUG_OBJECT (qtdemux, "spherical uuid was found");
++ contents = (char *) (buffer + offset + 16);
++ GST_DEBUG_OBJECT (qtdemux, "contents: %s\n", contents);
++
++ if (qtdemux->spherical_metadata)
++ _parse_spatial_video_metadata_from_xml_string (qtdemux, contents);
++
++ return;
++ }
++#endif /* TIZEN_FEATURE_QTDEMUX_MODIFICATION */
++
+ if (memcmp (buffer + offset, xmp_uuid, 16) == 0) {
+ GstBuffer *buf;
+ GstTagList *taglist;
+
+ buf = _gst_buffer_new_wrapped ((guint8 *) buffer + offset + 16,
+ length - offset - 16, NULL);
+ taglist = gst_tag_list_from_xmp_buffer (buf);
+ gst_buffer_unref (buf);
+
+ /* make sure we have a usable taglist */
+ qtdemux->tag_list = gst_tag_list_make_writable (qtdemux->tag_list);
+
+ qtdemux_handle_xmp_taglist (qtdemux, qtdemux->tag_list, taglist);
+
+ } else if (memcmp (buffer + offset, playready_uuid, 16) == 0) {
+ int len;
+ const gunichar2 *s_utf16;
+ char *contents;
+
+ len = GST_READ_UINT16_LE (buffer + offset + 0x30);
+ s_utf16 = (const gunichar2 *) (buffer + offset + 0x32);
+ contents = g_utf16_to_utf8 (s_utf16, len / 2, NULL, NULL, NULL);
+ GST_ERROR_OBJECT (qtdemux, "contents: %s", contents);
+
+ g_free (contents);
+
+ GST_ELEMENT_ERROR (qtdemux, STREAM, DECRYPT,
+ (_("Cannot play stream because it is encrypted with PlayReady DRM.")),
+ (NULL));
+ } else if (memcmp (buffer + offset, piff_sample_encryption_uuid, 16) == 0) {
+ qtdemux_parse_piff (qtdemux, buffer, length, offset);
+ } else {
+ GST_DEBUG_OBJECT (qtdemux, "Ignoring unknown uuid: %08x-%08x-%08x-%08x",
+ GST_READ_UINT32_LE (buffer + offset),
+ GST_READ_UINT32_LE (buffer + offset + 4),
+ GST_READ_UINT32_LE (buffer + offset + 8),
+ GST_READ_UINT32_LE (buffer + offset + 12));
+ }
+ }
+
+ static void
+ qtdemux_parse_sidx (GstQTDemux * qtdemux, const guint8 * buffer, gint length)
+ {
+ GstSidxParser sidx_parser;
+ GstIsoffParserResult res;
+ guint consumed;
+
+ gst_isoff_qt_sidx_parser_init (&sidx_parser);
+
+ res =
+ gst_isoff_qt_sidx_parser_add_data (&sidx_parser, buffer, length,
+ &consumed);
+ GST_DEBUG_OBJECT (qtdemux, "sidx parse result: %d", res);
+ if (res == GST_ISOFF_QT_PARSER_DONE) {
+ check_update_duration (qtdemux, sidx_parser.cumulative_pts);
+ }
+ gst_isoff_qt_sidx_parser_clear (&sidx_parser);
+ }
+
+ /* caller verifies at least 8 bytes in buf */
+ static void
+ extract_initial_length_and_fourcc (const guint8 * data, guint size,
+ guint64 * plength, guint32 * pfourcc)
+ {
+ guint64 length;
+ guint32 fourcc;
+
+ length = QT_UINT32 (data);
+ GST_DEBUG ("length 0x%08" G_GINT64_MODIFIER "x", length);
+ fourcc = QT_FOURCC (data + 4);
+ GST_DEBUG ("atom type %" GST_FOURCC_FORMAT, GST_FOURCC_ARGS (fourcc));
+
+ if (length == 0) {
+ length = G_MAXUINT64;
+ } else if (length == 1 && size >= 16) {
+ /* this means we have an extended size, which is the 64 bit value of
+ * the next 8 bytes */
+ length = QT_UINT64 (data + 8);
+ GST_DEBUG ("length 0x%08" G_GINT64_MODIFIER "x", length);
+ }
+
+ if (plength)
+ *plength = length;
+ if (pfourcc)
+ *pfourcc = fourcc;
+ }
+
+ static gboolean
+ qtdemux_parse_mehd (GstQTDemux * qtdemux, GstByteReader * br)
+ {
+ guint32 version = 0;
+ GstClockTime duration = 0;
+
+ if (!gst_byte_reader_get_uint32_be (br, &version))
+ goto failed;
+
+ version >>= 24;
+ if (version == 1) {
+ if (!gst_byte_reader_get_uint64_be (br, &duration))
+ goto failed;
+ } else {
+ guint32 dur = 0;
+
+ if (!gst_byte_reader_get_uint32_be (br, &dur))
+ goto failed;
+ duration = dur;
+ }
+
+ GST_INFO_OBJECT (qtdemux, "mehd duration: %" G_GUINT64_FORMAT, duration);
+ qtdemux->duration = duration;
+
+ return TRUE;
+
+ failed:
+ {
+ GST_DEBUG_OBJECT (qtdemux, "parsing mehd failed");
+ return FALSE;
+ }
+ }
+
+ static gboolean
+ qtdemux_parse_trex (GstQTDemux * qtdemux, QtDemuxStream * stream,
+ guint32 * ds_duration, guint32 * ds_size, guint32 * ds_flags)
+ {
+ if (!stream->parsed_trex && qtdemux->moov_node) {
+ GNode *mvex, *trex;
+ GstByteReader trex_data;
+
+ mvex = qtdemux_tree_get_child_by_type (qtdemux->moov_node, FOURCC_mvex);
+ if (mvex) {
+ trex = qtdemux_tree_get_child_by_type_full (mvex, FOURCC_trex,
+ &trex_data);
+ while (trex) {
+ guint32 id = 0, sdi = 0, dur = 0, size = 0, flags = 0;
+
+ /* skip version/flags */
+ if (!gst_byte_reader_skip (&trex_data, 4))
+ goto next;
+ if (!gst_byte_reader_get_uint32_be (&trex_data, &id))
+ goto next;
+ if (id != stream->track_id)
+ goto next;
+ if (!gst_byte_reader_get_uint32_be (&trex_data, &sdi))
+ goto next;
+ if (!gst_byte_reader_get_uint32_be (&trex_data, &dur))
+ goto next;
+ if (!gst_byte_reader_get_uint32_be (&trex_data, &size))
+ goto next;
+ if (!gst_byte_reader_get_uint32_be (&trex_data, &flags))
+ goto next;
+
+ GST_DEBUG_OBJECT (qtdemux, "fragment defaults for stream %d; "
+ "duration %d, size %d, flags 0x%x", stream->track_id,
+ dur, size, flags);
+
+ stream->parsed_trex = TRUE;
+ stream->def_sample_description_index = sdi;
+ stream->def_sample_duration = dur;
+ stream->def_sample_size = size;
+ stream->def_sample_flags = flags;
+
+ next:
+ /* iterate all siblings */
+ trex = qtdemux_tree_get_sibling_by_type_full (trex, FOURCC_trex,
+ &trex_data);
+ }
+ }
+ }
+
+ *ds_duration = stream->def_sample_duration;
+ *ds_size = stream->def_sample_size;
+ *ds_flags = stream->def_sample_flags;
+
+ /* even then, above values are better than random ... */
+ if (G_UNLIKELY (!stream->parsed_trex)) {
+ GST_WARNING_OBJECT (qtdemux,
+ "failed to find fragment defaults for stream %d", stream->track_id);
+ return FALSE;
+ }
+
+ return TRUE;
+ }
+
+ /* This method should be called whenever a more accurate duration might
+ * have been found. It will update all relevant variables if/where needed
+ */
+ static void
+ check_update_duration (GstQTDemux * qtdemux, GstClockTime duration)
+ {
+ guint i;
+ guint64 movdur;
+ GstClockTime prevdur;
+
+ movdur = GSTTIME_TO_QTTIME (qtdemux, duration);
+
+ if (movdur > qtdemux->duration) {
+ prevdur = QTTIME_TO_GSTTIME (qtdemux, qtdemux->duration);
+ GST_DEBUG_OBJECT (qtdemux,
+ "Updating total duration to %" GST_TIME_FORMAT " was %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (duration), GST_TIME_ARGS (prevdur));
+ qtdemux->duration = movdur;
+ GST_DEBUG_OBJECT (qtdemux,
+ "qtdemux->segment.duration: %" GST_TIME_FORMAT " .stop: %"
+ GST_TIME_FORMAT, GST_TIME_ARGS (qtdemux->segment.duration),
+ GST_TIME_ARGS (qtdemux->segment.stop));
+ if (qtdemux->segment.duration == prevdur) {
+ /* If the current segment has duration/stop identical to previous duration
+ * update them also (because they were set at that point in time with
+ * the wrong duration */
+ /* We convert the value *from* the timescale version to avoid rounding errors */
+ GstClockTime fixeddur = QTTIME_TO_GSTTIME (qtdemux, movdur);
+ GST_DEBUG_OBJECT (qtdemux, "Updated segment.duration and segment.stop");
+ qtdemux->segment.duration = fixeddur;
+ qtdemux->segment.stop = fixeddur;
+ }
+ }
+
+ for (i = 0; i < QTDEMUX_N_STREAMS (qtdemux); i++) {
+ QtDemuxStream *stream = QTDEMUX_NTH_STREAM (qtdemux, i);
+
+ movdur = GSTTIME_TO_QTSTREAMTIME (stream, duration);
+ if (movdur > stream->duration) {
+ GST_DEBUG_OBJECT (qtdemux,
+ "Updating stream #%d duration to %" GST_TIME_FORMAT, i,
+ GST_TIME_ARGS (duration));
+ stream->duration = movdur;
+ /* internal duration tracking state has been updated above, so */
+ /* preserve an open-ended dummy segment rather than repeatedly updating
+ * it and spamming downstream accordingly with segment events */
+ /* also mangle the edit list end time when fragmented with a single edit
+ * list that may only cover any non-fragmented data */
+ if ((stream->dummy_segment ||
+ (qtdemux->fragmented && stream->n_segments == 1)) &&
+ GST_CLOCK_TIME_IS_VALID (stream->segments[0].duration)) {
+ /* Update all dummy values to new duration */
+ stream->segments[0].stop_time = duration;
+ stream->segments[0].duration = duration;
+ stream->segments[0].media_stop = duration;
+
+ /* let downstream know we possibly have a new stop time */
+ if (stream->segment_index != -1) {
+ GstClockTime pos;
+
+ if (qtdemux->segment.rate >= 0) {
+ pos = stream->segment.start;
+ } else {
+ pos = stream->segment.stop;
+ }
+
+ gst_qtdemux_stream_update_segment (qtdemux, stream,
+ stream->segment_index, pos, NULL, NULL);
+ }
+ }
+ }
+ }
+ }
+
+ static gboolean
+ qtdemux_parse_trun (GstQTDemux * qtdemux, GstByteReader * trun,
+ QtDemuxStream * stream, guint32 d_sample_duration, guint32 d_sample_size,
+ guint32 d_sample_flags, gint64 moof_offset, gint64 moof_length,
+ gint64 * base_offset, gint64 * running_offset, gint64 decode_ts,
+ gboolean has_tfdt)
+ {
+ GstClockTime gst_ts = GST_CLOCK_TIME_NONE;
+ guint64 timestamp;
+ gint32 data_offset = 0;
+ guint32 flags = 0, first_flags = 0, samples_count = 0;
+ gint i;
+ guint8 *data;
+ guint entry_size, dur_offset, size_offset, flags_offset = 0, ct_offset = 0;
+ QtDemuxSample *sample;
+ gboolean ismv = FALSE;
+ gint64 initial_offset;
+
+ GST_LOG_OBJECT (qtdemux, "parsing trun track-id %d; "
+ "default dur %d, size %d, flags 0x%x, base offset %" G_GINT64_FORMAT ", "
+ "decode ts %" G_GINT64_FORMAT, stream->track_id, d_sample_duration,
+ d_sample_size, d_sample_flags, *base_offset, decode_ts);
+
+ if (stream->pending_seek && moof_offset < stream->pending_seek->moof_offset) {
+ GST_INFO_OBJECT (stream->pad, "skipping trun before seek target fragment");
+ return TRUE;
+ }
+
+ /* presence of stss or not can't really tell us much,
+ * and flags and so on tend to be marginally reliable in these files */
+ if (stream->subtype == FOURCC_soun) {
+ GST_DEBUG_OBJECT (qtdemux,
+ "sound track in fragmented file; marking all keyframes");
+ stream->all_keyframe = TRUE;
+ }
+
+ if (!gst_byte_reader_skip (trun, 1) ||
+ !gst_byte_reader_get_uint24_be (trun, &flags))
+ goto fail;
+
+ if (!gst_byte_reader_get_uint32_be (trun, &samples_count))
+ goto fail;
+
+ if (flags & TR_DATA_OFFSET) {
+ /* note this is really signed */
+ if (!gst_byte_reader_get_int32_be (trun, &data_offset))
+ goto fail;
+ GST_LOG_OBJECT (qtdemux, "trun data offset %d", data_offset);
+ /* default base offset = first byte of moof */
+ if (*base_offset == -1) {
+ GST_LOG_OBJECT (qtdemux, "base_offset at moof");
+ *base_offset = moof_offset;
+ }
+ *running_offset = *base_offset + data_offset;
+ } else {
+ /* if no offset at all, that would mean data starts at moof start,
+ * which is a bit wrong and is ismv crappy way, so compensate
+ * assuming data is in mdat following moof */
+ if (*base_offset == -1) {
+ *base_offset = moof_offset + moof_length + 8;
+ GST_LOG_OBJECT (qtdemux, "base_offset assumed in mdat after moof");
+ ismv = TRUE;
+ }
+ if (*running_offset == -1)
+ *running_offset = *base_offset;
+ }
+
+ GST_LOG_OBJECT (qtdemux, "running offset now %" G_GINT64_FORMAT,
+ *running_offset);
+ GST_LOG_OBJECT (qtdemux, "trun offset %d, flags 0x%x, entries %d",
+ data_offset, flags, samples_count);
+
+ if (flags & TR_FIRST_SAMPLE_FLAGS) {
+ if (G_UNLIKELY (flags & TR_SAMPLE_FLAGS)) {
+ GST_DEBUG_OBJECT (qtdemux,
+ "invalid flags; SAMPLE and FIRST_SAMPLE present, discarding latter");
+ flags ^= TR_FIRST_SAMPLE_FLAGS;
+ } else {
+ if (!gst_byte_reader_get_uint32_be (trun, &first_flags))
+ goto fail;
+ GST_LOG_OBJECT (qtdemux, "first flags: 0x%x", first_flags);
+ }
+ }
+
+ /* FIXME ? spec says other bits should also be checked to determine
+ * entry size (and prefix size for that matter) */
+ entry_size = 0;
+ dur_offset = size_offset = 0;
+ if (flags & TR_SAMPLE_DURATION) {
+ GST_LOG_OBJECT (qtdemux, "entry duration present");
+ dur_offset = entry_size;
+ entry_size += 4;
+ }
+ if (flags & TR_SAMPLE_SIZE) {
+ GST_LOG_OBJECT (qtdemux, "entry size present");
+ size_offset = entry_size;
+ entry_size += 4;
+ }
+ if (flags & TR_SAMPLE_FLAGS) {
+ GST_LOG_OBJECT (qtdemux, "entry flags present");
+ flags_offset = entry_size;
+ entry_size += 4;
+ }
+ if (flags & TR_COMPOSITION_TIME_OFFSETS) {
+ GST_LOG_OBJECT (qtdemux, "entry ct offset present");
+ ct_offset = entry_size;
+ entry_size += 4;
+ }
+
+ if (!qt_atom_parser_has_chunks (trun, samples_count, entry_size))
+ goto fail;
+ data = (guint8 *) gst_byte_reader_peek_data_unchecked (trun);
+
+ if (stream->n_samples + samples_count >=
+ QTDEMUX_MAX_SAMPLE_INDEX_SIZE / sizeof (QtDemuxSample))
+ goto index_too_big;
+
+ GST_DEBUG_OBJECT (qtdemux, "allocating n_samples %u * %u (%.2f MB)",
+ stream->n_samples + samples_count, (guint) sizeof (QtDemuxSample),
+ (stream->n_samples + samples_count) *
+ sizeof (QtDemuxSample) / (1024.0 * 1024.0));
+
+ /* create a new array of samples if it's the first sample parsed */
+ if (stream->n_samples == 0) {
+ g_assert (stream->samples == NULL);
+ stream->samples = g_try_new0 (QtDemuxSample, samples_count);
+ /* or try to reallocate it with space enough to insert the new samples */
+ } else
+ stream->samples = g_try_renew (QtDemuxSample, stream->samples,
+ stream->n_samples + samples_count);
+ if (stream->samples == NULL)
+ goto out_of_memory;
+
+ if (qtdemux->fragment_start != -1) {
+ timestamp = GSTTIME_TO_QTSTREAMTIME (stream, qtdemux->fragment_start);
+ qtdemux->fragment_start = -1;
+ } else {
+ if (stream->n_samples == 0) {
+ if (decode_ts > 0) {
+ timestamp = decode_ts;
+ } else if (stream->pending_seek != NULL) {
+ /* if we don't have a timestamp from a tfdt box, we'll use the one
+ * from the mfra seek table */
+ GST_INFO_OBJECT (stream->pad, "pending seek ts = %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (stream->pending_seek->ts));
+
+ /* FIXME: this is not fully correct, the timestamp refers to the random
+ * access sample refered to in the tfra entry, which may not necessarily
+ * be the first sample in the tfrag/trun (but hopefully/usually is) */
+ timestamp = GSTTIME_TO_QTSTREAMTIME (stream, stream->pending_seek->ts);
+ } else {
+ timestamp = 0;
+ }
+
+ gst_ts = QTSTREAMTIME_TO_GSTTIME (stream, timestamp);
+ GST_INFO_OBJECT (stream->pad, "first sample ts %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (gst_ts));
+ } else {
+ /* subsequent fragments extend stream */
+ timestamp =
+ stream->samples[stream->n_samples - 1].timestamp +
+ stream->samples[stream->n_samples - 1].duration;
+
+ /* If this is a GST_FORMAT_BYTES stream and there's a significant
+ * difference (1 sec.) between decode_ts and timestamp, prefer the
+ * former */
+ if (has_tfdt && !qtdemux->upstream_format_is_time
+ && ABSDIFF (decode_ts, timestamp) >
+ MAX (stream->duration_last_moof / 2,
+ GSTTIME_TO_QTSTREAMTIME (stream, GST_SECOND))) {
+ GST_INFO_OBJECT (qtdemux,
+ "decode_ts (%" GST_TIME_FORMAT ") and timestamp (%" GST_TIME_FORMAT
+ ") are significantly different (more than %" GST_TIME_FORMAT
+ "), using decode_ts",
+ GST_TIME_ARGS (QTSTREAMTIME_TO_GSTTIME (stream, decode_ts)),
+ GST_TIME_ARGS (QTSTREAMTIME_TO_GSTTIME (stream, timestamp)),
+ GST_TIME_ARGS (QTSTREAMTIME_TO_GSTTIME (stream,
+ MAX (stream->duration_last_moof / 2,
+ GSTTIME_TO_QTSTREAMTIME (stream, GST_SECOND)))));
+ timestamp = decode_ts;
+ }
+
+ gst_ts = QTSTREAMTIME_TO_GSTTIME (stream, timestamp);
+ GST_INFO_OBJECT (qtdemux, "first sample ts %" GST_TIME_FORMAT
+ " (extends previous samples)", GST_TIME_ARGS (gst_ts));
+ }
+ }
+
+ initial_offset = *running_offset;
+
+ sample = stream->samples + stream->n_samples;
+ for (i = 0; i < samples_count; i++) {
+ guint32 dur, size, sflags, ct;
+
+ /* first read sample data */
+ if (flags & TR_SAMPLE_DURATION) {
+ dur = QT_UINT32 (data + dur_offset);
+ } else {
+ dur = d_sample_duration;
+ }
+ if (flags & TR_SAMPLE_SIZE) {
+ size = QT_UINT32 (data + size_offset);
+ } else {
+ size = d_sample_size;
+ }
+ if (flags & TR_FIRST_SAMPLE_FLAGS) {
+ if (i == 0) {
+ sflags = first_flags;
+ } else {
+ sflags = d_sample_flags;
+ }
+ } else if (flags & TR_SAMPLE_FLAGS) {
+ sflags = QT_UINT32 (data + flags_offset);
+ } else {
+ sflags = d_sample_flags;
+ }
+ if (flags & TR_COMPOSITION_TIME_OFFSETS) {
+ ct = QT_UINT32 (data + ct_offset);
+ } else {
+ ct = 0;
+ }
+ data += entry_size;
+
+ /* fill the sample information */
+ sample->offset = *running_offset;
+ sample->pts_offset = ct;
+ sample->size = size;
+ sample->timestamp = timestamp;
+ sample->duration = dur;
+ /* sample-is-difference-sample */
+ /* ismv seems to use 0x40 for keyframe, 0xc0 for non-keyframe,
+ * now idea how it relates to bitfield other than massive LE/BE confusion */
+ sample->keyframe = ismv ? ((sflags & 0xff) == 0x40) : !(sflags & 0x10000);
+ *running_offset += size;
+ timestamp += dur;
+ stream->duration_moof += dur;
+ sample++;
+ }
+
+ /* Update total duration if needed */
+ check_update_duration (qtdemux, QTSTREAMTIME_TO_GSTTIME (stream, timestamp));
+
+ /* Pre-emptively figure out size of mdat based on trun information.
+ * If the [mdat] atom is effectivelly read, it will be replaced by the actual
+ * size, else we will still be able to use this when dealing with gap'ed
+ * input */
+ qtdemux->mdatleft = *running_offset - initial_offset;
+ qtdemux->mdatoffset = initial_offset;
+ qtdemux->mdatsize = qtdemux->mdatleft;
+
+ stream->n_samples += samples_count;
+ stream->n_samples_moof += samples_count;
+
+ if (stream->pending_seek != NULL)
+ stream->pending_seek = NULL;
+
+ return TRUE;
+
+ fail:
+ {
+ GST_WARNING_OBJECT (qtdemux, "failed to parse trun");
+ return FALSE;
+ }
+ out_of_memory:
+ {
+ GST_WARNING_OBJECT (qtdemux, "failed to allocate %d samples",
+ stream->n_samples);
+ return FALSE;
+ }
+ index_too_big:
+ {
+ GST_WARNING_OBJECT (qtdemux, "not allocating index of %d samples, would "
+ "be larger than %uMB (broken file?)", stream->n_samples,
+ QTDEMUX_MAX_SAMPLE_INDEX_SIZE >> 20);
+ return FALSE;
+ }
+ }
+
+ /* find stream with @id */
+ static inline QtDemuxStream *
+ qtdemux_find_stream (GstQTDemux * qtdemux, guint32 id)
+ {
+ QtDemuxStream *stream;
+ gint i;
+
+ /* check */
+ if (G_UNLIKELY (!id)) {
+ GST_DEBUG_OBJECT (qtdemux, "invalid track id 0");
+ return NULL;
+ }
+
+ for (i = 0; i < QTDEMUX_N_STREAMS (qtdemux); i++) {
+ stream = QTDEMUX_NTH_STREAM (qtdemux, i);
+ if (stream->track_id == id)
+ return stream;
+ }
+ if (qtdemux->mss_mode) {
+ /* mss should have only 1 stream anyway */
+ return QTDEMUX_NTH_STREAM (qtdemux, 0);
+ }
+
+ return NULL;
+ }
+
+ static gboolean
+ qtdemux_parse_mfhd (GstQTDemux * qtdemux, GstByteReader * mfhd,
+ guint32 * fragment_number)
+ {
+ if (!gst_byte_reader_skip (mfhd, 4))
+ goto fail;
+ if (!gst_byte_reader_get_uint32_be (mfhd, fragment_number))
+ goto fail;
+ return TRUE;
+ fail:
+ {
+ GST_WARNING_OBJECT (qtdemux, "Failed to parse mfhd atom");
+ return FALSE;
+ }
+ }
+
+ static gboolean
+ qtdemux_parse_tfhd (GstQTDemux * qtdemux, GstByteReader * tfhd,
+ QtDemuxStream ** stream, guint32 * default_sample_duration,
+ guint32 * default_sample_size, guint32 * default_sample_flags,
+ gint64 * base_offset)
+ {
+ guint32 flags = 0;
+ guint32 track_id = 0;
+
+ if (!gst_byte_reader_skip (tfhd, 1) ||
+ !gst_byte_reader_get_uint24_be (tfhd, &flags))
+ goto invalid_track;
+
+ if (!gst_byte_reader_get_uint32_be (tfhd, &track_id))
+ goto invalid_track;
+
+ *stream = qtdemux_find_stream (qtdemux, track_id);
+ if (G_UNLIKELY (!*stream))
+ goto unknown_stream;
+
+ if (flags & TF_DEFAULT_BASE_IS_MOOF)
+ *base_offset = qtdemux->moof_offset;
+
+ if (flags & TF_BASE_DATA_OFFSET)
+ if (!gst_byte_reader_get_uint64_be (tfhd, (guint64 *) base_offset))
+ goto invalid_track;
+
+ /* obtain stream defaults */
+ qtdemux_parse_trex (qtdemux, *stream,
+ default_sample_duration, default_sample_size, default_sample_flags);
+
+ (*stream)->stsd_sample_description_id =
+ (*stream)->def_sample_description_index - 1;
+
+ if (flags & TF_SAMPLE_DESCRIPTION_INDEX) {
+ guint32 sample_description_index;
+ if (!gst_byte_reader_get_uint32_be (tfhd, &sample_description_index))
+ goto invalid_track;
+ (*stream)->stsd_sample_description_id = sample_description_index - 1;
+ }
+
+ if (qtdemux->mss_mode) {
+ /* mss has no stsd entry */
+ (*stream)->stsd_sample_description_id = 0;
+ }
+
+ if (flags & TF_DEFAULT_SAMPLE_DURATION)
+ if (!gst_byte_reader_get_uint32_be (tfhd, default_sample_duration))
+ goto invalid_track;
+
+ if (flags & TF_DEFAULT_SAMPLE_SIZE)
+ if (!gst_byte_reader_get_uint32_be (tfhd, default_sample_size))
+ goto invalid_track;
+
+ if (flags & TF_DEFAULT_SAMPLE_FLAGS)
+ if (!gst_byte_reader_get_uint32_be (tfhd, default_sample_flags))
+ goto invalid_track;
+
+ return TRUE;
+
+ invalid_track:
+ {
+ GST_WARNING_OBJECT (qtdemux, "invalid track fragment header");
+ return FALSE;
+ }
+ unknown_stream:
+ {
+ GST_DEBUG_OBJECT (qtdemux, "unknown stream (%u) in tfhd", track_id);
+ return TRUE;
+ }
+ }
+
+ static gboolean
+ qtdemux_parse_tfdt (GstQTDemux * qtdemux, GstByteReader * br,
+ guint64 * decode_time)
+ {
+ guint32 version = 0;
+
+ if (!gst_byte_reader_get_uint32_be (br, &version))
+ return FALSE;
+
+ version >>= 24;
+ if (version == 1) {
+ if (!gst_byte_reader_get_uint64_be (br, decode_time))
+ goto failed;
+ } else {
+ guint32 dec_time = 0;
+ if (!gst_byte_reader_get_uint32_be (br, &dec_time))
+ goto failed;
+ *decode_time = dec_time;
+ }
+
+ GST_INFO_OBJECT (qtdemux, "Track fragment decode time: %" G_GUINT64_FORMAT,
+ *decode_time);
+
+ return TRUE;
+
+ failed:
+ {
+ GST_DEBUG_OBJECT (qtdemux, "parsing tfdt failed");
+ return FALSE;
+ }
+ }
+
+ /* Returns a pointer to a GstStructure containing the properties of
+ * the stream sample identified by @sample_index. The caller must unref
+ * the returned object after use. Returns NULL if unsuccessful. */
+ static GstStructure *
+ qtdemux_get_cenc_sample_properties (GstQTDemux * qtdemux,
+ QtDemuxStream * stream, guint sample_index)
+ {
+ QtDemuxCencSampleSetInfo *info = NULL;
+
+ g_return_val_if_fail (stream != NULL, NULL);
+ g_return_val_if_fail (stream->protected, NULL);
+ g_return_val_if_fail (stream->protection_scheme_info != NULL, NULL);
+
+ info = (QtDemuxCencSampleSetInfo *) stream->protection_scheme_info;
+
+ /* Currently, cenc properties for groups of samples are not supported, so
+ * simply return a copy of the default sample properties */
+ return gst_structure_copy (info->default_properties);
+ }
+
+ /* Parses the sizes of sample auxiliary information contained within a stream,
+ * as given in a saiz box. Returns array of sample_count guint8 size values,
+ * or NULL on failure */
+ static guint8 *
+ qtdemux_parse_saiz (GstQTDemux * qtdemux, QtDemuxStream * stream,
+ GstByteReader * br, guint32 * sample_count)
+ {
+ guint32 flags = 0;
+ guint8 *info_sizes;
+ guint8 default_info_size;
+
+ g_return_val_if_fail (qtdemux != NULL, NULL);
+ g_return_val_if_fail (stream != NULL, NULL);
+ g_return_val_if_fail (br != NULL, NULL);
+ g_return_val_if_fail (sample_count != NULL, NULL);
+
+ if (!gst_byte_reader_get_uint32_be (br, &flags))
+ return NULL;
+
+ if (flags & 0x1) {
+ /* aux_info_type and aux_info_type_parameter are ignored */
+ if (!gst_byte_reader_skip (br, 8))
+ return NULL;
+ }
+
+ if (!gst_byte_reader_get_uint8 (br, &default_info_size))
+ return NULL;
+ GST_DEBUG_OBJECT (qtdemux, "default_info_size: %u", default_info_size);
+
+ if (!gst_byte_reader_get_uint32_be (br, sample_count))
+ return NULL;
+ GST_DEBUG_OBJECT (qtdemux, "sample_count: %u", *sample_count);
+
+
+ if (default_info_size == 0) {
+ if (!gst_byte_reader_dup_data (br, *sample_count, &info_sizes)) {
+ return NULL;
+ }
+ } else {
+ info_sizes = g_new (guint8, *sample_count);
+ memset (info_sizes, default_info_size, *sample_count);
+ }
+
+ return info_sizes;
+ }
+
+ /* Parses the offset of sample auxiliary information contained within a stream,
+ * as given in a saio box. Returns TRUE if successful; FALSE otherwise. */
+ static gboolean
+ qtdemux_parse_saio (GstQTDemux * qtdemux, QtDemuxStream * stream,
+ GstByteReader * br, guint32 * info_type, guint32 * info_type_parameter,
+ guint64 * offset)
+ {
+ guint8 version = 0;
+ guint32 flags = 0;
+ guint32 aux_info_type = 0;
+ guint32 aux_info_type_parameter = 0;
+ guint32 entry_count;
+ guint32 off_32;
+ guint64 off_64;
+ const guint8 *aux_info_type_data = NULL;
+
+ g_return_val_if_fail (qtdemux != NULL, FALSE);
+ g_return_val_if_fail (stream != NULL, FALSE);
+ g_return_val_if_fail (br != NULL, FALSE);
+ g_return_val_if_fail (offset != NULL, FALSE);
+
+ if (!gst_byte_reader_get_uint8 (br, &version))
+ return FALSE;
+
+ if (!gst_byte_reader_get_uint24_be (br, &flags))
+ return FALSE;
+
+ if (flags & 0x1) {
+
+ if (!gst_byte_reader_get_data (br, 4, &aux_info_type_data))
+ return FALSE;
+ aux_info_type = QT_FOURCC (aux_info_type_data);
+
+ if (!gst_byte_reader_get_uint32_be (br, &aux_info_type_parameter))
+ return FALSE;
+ } else if (stream->protected) {
+ aux_info_type = stream->protection_scheme_type;
+ } else {
+ aux_info_type = CUR_STREAM (stream)->fourcc;
+ }
+
+ if (info_type)
+ *info_type = aux_info_type;
+ if (info_type_parameter)
+ *info_type_parameter = aux_info_type_parameter;
+
+ GST_DEBUG_OBJECT (qtdemux, "aux_info_type: '%" GST_FOURCC_FORMAT "', "
+ "aux_info_type_parameter: %#06x",
+ GST_FOURCC_ARGS (aux_info_type), aux_info_type_parameter);
+
+ if (!gst_byte_reader_get_uint32_be (br, &entry_count))
+ return FALSE;
+
+ if (entry_count != 1) {
+ GST_ERROR_OBJECT (qtdemux, "multiple offsets are not supported");
+ return FALSE;
+ }
+
+ if (version == 0) {
+ if (!gst_byte_reader_get_uint32_be (br, &off_32))
+ return FALSE;
+ *offset = (guint64) off_32;
+ } else {
+ if (!gst_byte_reader_get_uint64_be (br, &off_64))
+ return FALSE;
+ *offset = off_64;
+ }
+
+ GST_DEBUG_OBJECT (qtdemux, "offset: %" G_GUINT64_FORMAT, *offset);
+ return TRUE;
+ }
+
+ static void
+ qtdemux_gst_structure_free (GstStructure * gststructure)
+ {
+ if (gststructure) {
+ gst_structure_free (gststructure);
+ }
+ }
+
+ /* Parses auxiliary information relating to samples protected using
+ * Common Encryption (cenc); the format of this information
+ * is defined in ISO/IEC 23001-7. Returns TRUE if successful; FALSE
+ * otherwise. */
+ static gboolean
+ qtdemux_parse_cenc_aux_info (GstQTDemux * qtdemux, QtDemuxStream * stream,
+ GstByteReader * br, guint8 * info_sizes, guint32 sample_count)
+ {
+ QtDemuxCencSampleSetInfo *ss_info = NULL;
+ guint8 size;
+ gint i;
+ GPtrArray *old_crypto_info = NULL;
+ guint old_entries = 0;
+
+ g_return_val_if_fail (qtdemux != NULL, FALSE);
+ g_return_val_if_fail (stream != NULL, FALSE);
+ g_return_val_if_fail (br != NULL, FALSE);
+ g_return_val_if_fail (stream->protected, FALSE);
+ g_return_val_if_fail (stream->protection_scheme_info != NULL, FALSE);
+
+ ss_info = (QtDemuxCencSampleSetInfo *) stream->protection_scheme_info;
+
+ if (ss_info->crypto_info) {
+ old_crypto_info = ss_info->crypto_info;
+ /* Count number of non-null entries remaining at the tail end */
+ for (i = old_crypto_info->len - 1; i >= 0; i--) {
+ if (g_ptr_array_index (old_crypto_info, i) == NULL)
+ break;
+ old_entries++;
+ }
+ }
+
+ ss_info->crypto_info =
+ g_ptr_array_new_full (sample_count + old_entries,
+ (GDestroyNotify) qtdemux_gst_structure_free);
+
+ /* We preserve old entries because we parse the next moof in advance
+ * of consuming all samples from the previous moof, and otherwise
+ * we'd discard the corresponding crypto info for the samples
+ * from the previous fragment. */
+ if (old_entries) {
+ GST_DEBUG_OBJECT (qtdemux, "Preserving %d old crypto info entries",
+ old_entries);
+ for (i = old_crypto_info->len - old_entries; i < old_crypto_info->len; i++) {
+ g_ptr_array_add (ss_info->crypto_info, g_ptr_array_index (old_crypto_info,
+ i));
+ g_ptr_array_index (old_crypto_info, i) = NULL;
+ }
+ }
+
+ if (old_crypto_info) {
+ /* Everything now belongs to the new array */
+ g_ptr_array_free (old_crypto_info, TRUE);
+ }
+
+ for (i = 0; i < sample_count; ++i) {
+ GstStructure *properties;
+ guint16 n_subsamples = 0;
+ guint8 *data;
+ guint iv_size;
+ GstBuffer *buf;
+ gboolean could_read_iv;
+
+ properties = qtdemux_get_cenc_sample_properties (qtdemux, stream, i);
+ if (properties == NULL) {
+ GST_ERROR_OBJECT (qtdemux, "failed to get properties for sample %u", i);
+ return FALSE;
+ }
+ if (!gst_structure_get_uint (properties, "iv_size", &iv_size)) {
+ GST_ERROR_OBJECT (qtdemux, "failed to get iv_size for sample %u", i);
+ gst_structure_free (properties);
+ return FALSE;
+ }
+ could_read_iv =
+ iv_size > 0 ? gst_byte_reader_dup_data (br, iv_size, &data) : FALSE;
+ if (could_read_iv) {
+ buf = gst_buffer_new_wrapped (data, iv_size);
+ gst_structure_set (properties, "iv", GST_TYPE_BUFFER, buf, NULL);
+ gst_buffer_unref (buf);
+ } else if (stream->protection_scheme_type == FOURCC_cbcs) {
+ const GValue *constant_iv_size_value =
+ gst_structure_get_value (properties, "constant_iv_size");
+ const GValue *constant_iv_value =
+ gst_structure_get_value (properties, "iv");
+ if (constant_iv_size_value == NULL || constant_iv_value == NULL) {
+ GST_ERROR_OBJECT (qtdemux, "failed to get constant_iv");
+ gst_structure_free (properties);
+ return FALSE;
+ }
+ gst_structure_set_value (properties, "iv_size", constant_iv_size_value);
+ gst_structure_remove_field (properties, "constant_iv_size");
+ } else if (stream->protection_scheme_type == FOURCC_cenc) {
+ GST_ERROR_OBJECT (qtdemux, "failed to get IV for sample %u", i);
+ gst_structure_free (properties);
+ return FALSE;
+ }
+ size = info_sizes[i];
+ if (size > iv_size) {
+ if (!gst_byte_reader_get_uint16_be (br, &n_subsamples)
+ || !(n_subsamples > 0)) {
+ gst_structure_free (properties);
+ GST_ERROR_OBJECT (qtdemux,
+ "failed to get subsample count for sample %u", i);
+ return FALSE;
+ }
+ GST_LOG_OBJECT (qtdemux, "subsample count: %u", n_subsamples);
+ if (!gst_byte_reader_dup_data (br, n_subsamples * 6, &data)) {
+ GST_ERROR_OBJECT (qtdemux, "failed to get subsample data for sample %u",
+ i);
+ gst_structure_free (properties);
+ return FALSE;
+ }
+ buf = gst_buffer_new_wrapped (data, n_subsamples * 6);
+ if (!buf) {
+ gst_structure_free (properties);
+ return FALSE;
+ }
+ gst_structure_set (properties,
+ "subsample_count", G_TYPE_UINT, n_subsamples,
+ "subsamples", GST_TYPE_BUFFER, buf, NULL);
+ gst_buffer_unref (buf);
+ } else {
+ gst_structure_set (properties, "subsample_count", G_TYPE_UINT, 0, NULL);
+ }
+ g_ptr_array_add (ss_info->crypto_info, properties);
+ }
+ return TRUE;
+ }
+
+ /* Converts a UUID in raw byte form to a string representation, as defined in
+ * RFC 4122. The caller takes ownership of the returned string and is
+ * responsible for freeing it after use. */
+ static gchar *
+ qtdemux_uuid_bytes_to_string (gconstpointer uuid_bytes)
+ {
+ const guint8 *uuid = (const guint8 *) uuid_bytes;
+
+ return g_strdup_printf ("%02x%02x%02x%02x-%02x%02x-%02x%02x-"
+ "%02x%02x-%02x%02x%02x%02x%02x%02x",
+ uuid[0], uuid[1], uuid[2], uuid[3],
+ uuid[4], uuid[5], uuid[6], uuid[7],
+ uuid[8], uuid[9], uuid[10], uuid[11],
+ uuid[12], uuid[13], uuid[14], uuid[15]);
+ }
+
+ /* Parses a Protection System Specific Header box (pssh), as defined in the
+ * Common Encryption (cenc) standard (ISO/IEC 23001-7), which contains
+ * information needed by a specific content protection system in order to
+ * decrypt cenc-protected tracks. Returns TRUE if successful; FALSE
+ * otherwise. */
+ static gboolean
+ qtdemux_parse_pssh (GstQTDemux * qtdemux, GNode * node)
+ {
+ gchar *sysid_string;
+ guint32 pssh_size = QT_UINT32 (node->data);
+ GstBuffer *pssh = NULL;
+ GstEvent *event = NULL;
+ guint32 parent_box_type;
+ gint i;
+
+ if (G_UNLIKELY (pssh_size < 32U)) {
+ GST_ERROR_OBJECT (qtdemux, "invalid box size");
+ return FALSE;
+ }
+
+ sysid_string =
+ qtdemux_uuid_bytes_to_string ((const guint8 *) node->data + 12);
+
+ gst_qtdemux_append_protection_system_id (qtdemux, sysid_string);
+
+ pssh = gst_buffer_new_memdup (node->data, pssh_size);
+ GST_LOG_OBJECT (qtdemux, "cenc pssh size: %" G_GSIZE_FORMAT,
+ gst_buffer_get_size (pssh));
+
+ parent_box_type = QT_FOURCC ((const guint8 *) node->parent->data + 4);
+
+ /* Push an event containing the pssh box onto the queues of all streams. */
+ event = gst_event_new_protection (sysid_string, pssh,
+ (parent_box_type == FOURCC_moov) ? "isobmff/moov" : "isobmff/moof");
+ for (i = 0; i < QTDEMUX_N_STREAMS (qtdemux); i++) {
+ QtDemuxStream *stream = QTDEMUX_NTH_STREAM (qtdemux, i);
+ GST_TRACE_OBJECT (qtdemux,
+ "adding protection event for stream %s and system %s",
+ stream->stream_id, sysid_string);
+ g_queue_push_tail (&stream->protection_scheme_event_queue,
+ gst_event_ref (event));
+ }
+ g_free (sysid_string);
+ gst_event_unref (event);
+ gst_buffer_unref (pssh);
+ return TRUE;
+ }
+
+ static gboolean
+ qtdemux_parse_moof (GstQTDemux * qtdemux, const guint8 * buffer, guint length,
+ guint64 moof_offset, QtDemuxStream * stream)
+ {
+ GNode *moof_node, *traf_node, *tfhd_node, *trun_node, *tfdt_node, *mfhd_node;
+ GNode *uuid_node;
+ GstByteReader mfhd_data, trun_data, tfhd_data, tfdt_data;
+ GNode *saiz_node, *saio_node, *pssh_node;
+ GstByteReader saiz_data, saio_data;
+ guint32 ds_size = 0, ds_duration = 0, ds_flags = 0;
+ gint64 base_offset, running_offset;
+ guint32 frag_num;
+ GstClockTime min_dts = GST_CLOCK_TIME_NONE;
+
+ /* NOTE @stream ignored */
+
+ moof_node = g_node_new ((guint8 *) buffer);
+ qtdemux_parse_node (qtdemux, moof_node, buffer, length);
+ qtdemux_node_dump (qtdemux, moof_node);
+
+ /* Get fragment number from mfhd and check it's valid */
+ mfhd_node =
+ qtdemux_tree_get_child_by_type_full (moof_node, FOURCC_mfhd, &mfhd_data);
+ if (mfhd_node == NULL)
+ goto missing_mfhd;
+ if (!qtdemux_parse_mfhd (qtdemux, &mfhd_data, &frag_num))
+ goto fail;
+ GST_DEBUG_OBJECT (qtdemux, "Fragment #%d", frag_num);
+
+ /* unknown base_offset to start with */
+ base_offset = running_offset = -1;
+ traf_node = qtdemux_tree_get_child_by_type (moof_node, FOURCC_traf);
+ while (traf_node) {
+ guint64 decode_time = 0;
+
+ /* Fragment Header node */
+ tfhd_node =
+ qtdemux_tree_get_child_by_type_full (traf_node, FOURCC_tfhd,
+ &tfhd_data);
+ if (!tfhd_node)
+ goto missing_tfhd;
+ if (!qtdemux_parse_tfhd (qtdemux, &tfhd_data, &stream, &ds_duration,
+ &ds_size, &ds_flags, &base_offset))
+ goto missing_tfhd;
+
+ /* The following code assumes at most a single set of sample auxiliary
+ * data in the fragment (consisting of a saiz box and a corresponding saio
+ * box); in theory, however, there could be multiple sets of sample
+ * auxiliary data in a fragment. */
+ saiz_node =
+ qtdemux_tree_get_child_by_type_full (traf_node, FOURCC_saiz,
+ &saiz_data);
+ if (saiz_node) {
+ guint32 info_type = 0;
+ guint64 offset = 0;
+ guint32 info_type_parameter = 0;
+
+ g_free (qtdemux->cenc_aux_info_sizes);
+
+ qtdemux->cenc_aux_info_sizes =
+ qtdemux_parse_saiz (qtdemux, stream, &saiz_data,
+ &qtdemux->cenc_aux_sample_count);
+ if (qtdemux->cenc_aux_info_sizes == NULL) {
+ GST_ERROR_OBJECT (qtdemux, "failed to parse saiz box");
+ goto fail;
+ }
+ saio_node =
+ qtdemux_tree_get_child_by_type_full (traf_node, FOURCC_saio,
+ &saio_data);
+ if (!saio_node) {
+ GST_ERROR_OBJECT (qtdemux, "saiz box without a corresponding saio box");
+ g_free (qtdemux->cenc_aux_info_sizes);
+ qtdemux->cenc_aux_info_sizes = NULL;
+ goto fail;
+ }
+
+ if (G_UNLIKELY (!qtdemux_parse_saio (qtdemux, stream, &saio_data,
+ &info_type, &info_type_parameter, &offset))) {
+ GST_ERROR_OBJECT (qtdemux, "failed to parse saio box");
+ g_free (qtdemux->cenc_aux_info_sizes);
+ qtdemux->cenc_aux_info_sizes = NULL;
+ goto fail;
+ }
+ if (base_offset > -1 && base_offset > qtdemux->moof_offset)
+ offset += (guint64) (base_offset - qtdemux->moof_offset);
+ if ((info_type == FOURCC_cenc || info_type == FOURCC_cbcs)
+ && info_type_parameter == 0U) {
+ GstByteReader br;
+ if (offset > length) {
+ GST_DEBUG_OBJECT (qtdemux, "cenc auxiliary info stored out of moof");
+ qtdemux->cenc_aux_info_offset = offset;
+ } else {
+ gst_byte_reader_init (&br, buffer + offset, length - offset);
+ if (!qtdemux_parse_cenc_aux_info (qtdemux, stream, &br,
+ qtdemux->cenc_aux_info_sizes,
+ qtdemux->cenc_aux_sample_count)) {
+ GST_ERROR_OBJECT (qtdemux, "failed to parse cenc auxiliary info");
+ g_free (qtdemux->cenc_aux_info_sizes);
+ qtdemux->cenc_aux_info_sizes = NULL;
+ goto fail;
+ }
+ }
+ }
+ }
+
+ tfdt_node =
+ qtdemux_tree_get_child_by_type_full (traf_node, FOURCC_tfdt,
+ &tfdt_data);
+ if (tfdt_node) {
+ /* We'll use decode_time to interpolate timestamps
+ * in case the input timestamps are missing */
+ qtdemux_parse_tfdt (qtdemux, &tfdt_data, &decode_time);
+
+ GST_DEBUG_OBJECT (qtdemux, "decode time %" G_GINT64_FORMAT
+ " (%" GST_TIME_FORMAT ")", decode_time,
+ GST_TIME_ARGS (stream ? QTSTREAMTIME_TO_GSTTIME (stream,
+ decode_time) : GST_CLOCK_TIME_NONE));
+
+ /* Discard the fragment buffer timestamp info to avoid using it.
+ * Rely on tfdt instead as it is more accurate than the timestamp
+ * that is fetched from a manifest/playlist and is usually
+ * less accurate. */
+ qtdemux->fragment_start = -1;
+ }
+
+ if (G_UNLIKELY (!stream)) {
+ /* we lost track of offset, we'll need to regain it,
+ * but can delay complaining until later or avoid doing so altogether */
+ base_offset = -2;
+ goto next;
+ }
+ if (G_UNLIKELY (base_offset < -1))
+ goto lost_offset;
+
+ min_dts = MIN (min_dts, QTSTREAMTIME_TO_GSTTIME (stream, decode_time));
+
+ if (!qtdemux->pullbased) {
+ /* Sample tables can grow enough to be problematic if the system memory
+ * is very low (e.g. embedded devices) and the videos very long
+ * (~8 MiB/hour for 25-30 fps video + typical AAC audio frames).
+ * Fortunately, we can easily discard them for each new fragment when
+ * we know qtdemux will not receive seeks outside of the current fragment.
+ * adaptivedemux honors this assumption.
+ * This optimization is also useful for applications that use qtdemux as
+ * a push-based simple demuxer, like Media Source Extensions. */
+ gst_qtdemux_stream_flush_samples_data (stream);
+ }
+
+ /* initialise moof sample data */
+ stream->n_samples_moof = 0;
+ stream->duration_last_moof = stream->duration_moof;
+ stream->duration_moof = 0;
+
+ /* Track Run node */
+ trun_node =
+ qtdemux_tree_get_child_by_type_full (traf_node, FOURCC_trun,
+ &trun_data);
+ while (trun_node) {
+ qtdemux_parse_trun (qtdemux, &trun_data, stream,
+ ds_duration, ds_size, ds_flags, moof_offset, length, &base_offset,
+ &running_offset, decode_time, (tfdt_node != NULL));
+ /* iterate all siblings */
+ trun_node = qtdemux_tree_get_sibling_by_type_full (trun_node, FOURCC_trun,
+ &trun_data);
+ }
+
+ uuid_node = qtdemux_tree_get_child_by_type (traf_node, FOURCC_uuid);
+ if (uuid_node) {
+ guint8 *uuid_buffer = (guint8 *) uuid_node->data;
+ guint32 box_length = QT_UINT32 (uuid_buffer);
+
+ qtdemux_parse_uuid (qtdemux, uuid_buffer, box_length);
+ }
+
+ /* if no new base_offset provided for next traf,
+ * base is end of current traf */
+ base_offset = running_offset;
+ running_offset = -1;
+
+ if (stream->n_samples_moof && stream->duration_moof)
+ stream->new_caps = TRUE;
+
+ next:
+ /* iterate all siblings */
+ traf_node = qtdemux_tree_get_sibling_by_type (traf_node, FOURCC_traf);
+ }
+
+ /* parse any protection system info */
+ pssh_node = qtdemux_tree_get_child_by_type (moof_node, FOURCC_pssh);
+ while (pssh_node) {
+ GST_LOG_OBJECT (qtdemux, "Parsing pssh box.");
+ qtdemux_parse_pssh (qtdemux, pssh_node);
+ pssh_node = qtdemux_tree_get_sibling_by_type (pssh_node, FOURCC_pssh);
+ }
+
+ if (!qtdemux->upstream_format_is_time && !qtdemux->first_moof_already_parsed
+ && !qtdemux->received_seek && GST_CLOCK_TIME_IS_VALID (min_dts)
+ && min_dts != 0) {
+ /* Unless the user has explicitly requested another seek, perform an
+ * internal seek to the time specified in the tfdt.
+ *
+ * This way if the user opens a file where the first tfdt is 1 hour
+ * into the presentation, they will not have to wait 1 hour for run
+ * time to catch up and actual playback to start. */
+ gint i;
+
+ GST_DEBUG_OBJECT (qtdemux, "First fragment has a non-zero tfdt, "
+ "performing an internal seek to %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (min_dts));
+
+ qtdemux->segment.start = min_dts;
+ qtdemux->segment.time = qtdemux->segment.position = min_dts;
+
+ for (i = 0; i < QTDEMUX_N_STREAMS (qtdemux); i++) {
+ QtDemuxStream *stream = QTDEMUX_NTH_STREAM (qtdemux, i);
+ stream->time_position = min_dts;
+ }
+
+ /* Before this code was run a segment was already sent when the moov was
+ * parsed... which is OK -- some apps (mostly tests) expect a segment to
+ * be emitted after a moov, and we can emit a second segment anyway for
+ * special cases like this. */
+ qtdemux->need_segment = TRUE;
+ }
+
+ qtdemux->first_moof_already_parsed = TRUE;
+
+ g_node_destroy (moof_node);
+ return TRUE;
+
+ missing_tfhd:
+ {
+ GST_DEBUG_OBJECT (qtdemux, "missing tfhd box");
+ goto fail;
+ }
+ missing_mfhd:
+ {
+ GST_DEBUG_OBJECT (qtdemux, "Missing mfhd box");
+ goto fail;
+ }
+ lost_offset:
+ {
+ GST_DEBUG_OBJECT (qtdemux, "lost offset");
+ goto fail;
+ }
+ fail:
+ {
+ g_node_destroy (moof_node);
+ GST_ELEMENT_ERROR (qtdemux, STREAM, DEMUX,
+ (_("This file is corrupt and cannot be played.")), (NULL));
+ return FALSE;
+ }
+ }
+
+ #if 0
+ /* might be used if some day we actually use mfra & co
+ * for random access to fragments,
+ * but that will require quite some modifications and much less relying
+ * on a sample array */
+ #endif
+
+ static gboolean
+ qtdemux_parse_tfra (GstQTDemux * qtdemux, GNode * tfra_node)
+ {
+ QtDemuxStream *stream;
+ guint32 ver_flags, track_id, len, num_entries, i;
+ guint value_size, traf_size, trun_size, sample_size;
+ guint64 time = 0, moof_offset = 0;
+ #if 0
+ GstBuffer *buf = NULL;
+ GstFlowReturn ret;
+ #endif
+ GstByteReader tfra;
+
+ gst_byte_reader_init (&tfra, tfra_node->data, QT_UINT32 (tfra_node->data));
+
+ if (!gst_byte_reader_skip (&tfra, 8))
+ return FALSE;
+
+ if (!gst_byte_reader_get_uint32_be (&tfra, &ver_flags))
+ return FALSE;
+
+ if (!gst_byte_reader_get_uint32_be (&tfra, &track_id)
+ || !gst_byte_reader_get_uint32_be (&tfra, &len)
+ || !gst_byte_reader_get_uint32_be (&tfra, &num_entries))
+ return FALSE;
+
+ GST_DEBUG_OBJECT (qtdemux, "parsing tfra box for track id %u", track_id);
+
+ stream = qtdemux_find_stream (qtdemux, track_id);
+ if (stream == NULL)
+ goto unknown_trackid;
+
+ value_size = ((ver_flags >> 24) == 1) ? sizeof (guint64) : sizeof (guint32);
+ sample_size = (len & 3) + 1;
+ trun_size = ((len & 12) >> 2) + 1;
+ traf_size = ((len & 48) >> 4) + 1;
+
+ GST_DEBUG_OBJECT (qtdemux, "%u entries, sizes: value %u, traf %u, trun %u, "
+ "sample %u", num_entries, value_size, traf_size, trun_size, sample_size);
+
+ if (num_entries == 0)
+ goto no_samples;
+
+ if (!qt_atom_parser_has_chunks (&tfra, num_entries,
+ value_size + value_size + traf_size + trun_size + sample_size))
+ goto corrupt_file;
+
+ g_free (stream->ra_entries);
+ stream->ra_entries = g_new (QtDemuxRandomAccessEntry, num_entries);
+ stream->n_ra_entries = num_entries;
+
+ for (i = 0; i < num_entries; i++) {
+ qt_atom_parser_get_offset (&tfra, value_size, &time);
+ qt_atom_parser_get_offset (&tfra, value_size, &moof_offset);
+ qt_atom_parser_get_uint_with_size_unchecked (&tfra, traf_size);
+ qt_atom_parser_get_uint_with_size_unchecked (&tfra, trun_size);
+ qt_atom_parser_get_uint_with_size_unchecked (&tfra, sample_size);
+
+ time = QTSTREAMTIME_TO_GSTTIME (stream, time);
+
+ GST_LOG_OBJECT (qtdemux, "fragment time: %" GST_TIME_FORMAT ", "
+ " moof_offset: %" G_GUINT64_FORMAT, GST_TIME_ARGS (time), moof_offset);
+
+ stream->ra_entries[i].ts = time;
+ stream->ra_entries[i].moof_offset = moof_offset;
+
+ /* don't want to go through the entire file and read all moofs at startup */
+ #if 0
+ ret = gst_qtdemux_pull_atom (qtdemux, moof_offset, 0, &buf);
+ if (ret != GST_FLOW_OK)
+ goto corrupt_file;
+ qtdemux_parse_moof (qtdemux, GST_BUFFER_DATA (buf), GST_BUFFER_SIZE (buf),
+ moof_offset, stream);
+ gst_buffer_unref (buf);
+ #endif
+ }
+
+ check_update_duration (qtdemux, time);
+
+ return TRUE;
+
+ /* ERRORS */
+ unknown_trackid:
+ {
+ GST_WARNING_OBJECT (qtdemux, "Couldn't find stream for track %u", track_id);
+ return FALSE;
+ }
+ corrupt_file:
+ {
+ GST_WARNING_OBJECT (qtdemux, "broken traf box, ignoring");
+ return FALSE;
+ }
+ no_samples:
+ {
+ GST_WARNING_OBJECT (qtdemux, "stream has no samples");
+ return FALSE;
+ }
+ }
+
+ static gboolean
+ qtdemux_pull_mfro_mfra (GstQTDemux * qtdemux)
+ {
+ GstMapInfo mfro_map = GST_MAP_INFO_INIT;
+ GstMapInfo mfra_map = GST_MAP_INFO_INIT;
+ GstBuffer *mfro = NULL, *mfra = NULL;
+ GstFlowReturn flow;
+ gboolean ret = FALSE;
+ GNode *mfra_node, *tfra_node;
+ guint64 mfra_offset = 0;
+ guint32 fourcc, mfra_size;
+ gint64 len;
+
+ /* query upstream size in bytes */
+ if (!gst_pad_peer_query_duration (qtdemux->sinkpad, GST_FORMAT_BYTES, &len))
+ goto size_query_failed;
+
+ /* mfro box should be at the very end of the file */
+ flow = gst_qtdemux_pull_atom (qtdemux, len - 16, 16, &mfro);
+ if (flow != GST_FLOW_OK)
+ goto exit;
+
+ gst_buffer_map (mfro, &mfro_map, GST_MAP_READ);
+
+ fourcc = QT_FOURCC (mfro_map.data + 4);
+ if (fourcc != FOURCC_mfro)
+ goto exit;
+
+ GST_INFO_OBJECT (qtdemux, "Found mfro box");
+ if (mfro_map.size < 16)
+ goto invalid_mfro_size;
+
+ mfra_size = QT_UINT32 (mfro_map.data + 12);
+ if (mfra_size >= len)
+ goto invalid_mfra_size;
+
+ mfra_offset = len - mfra_size;
+
+ GST_INFO_OBJECT (qtdemux, "mfra offset: %" G_GUINT64_FORMAT ", size %u",
+ mfra_offset, mfra_size);
+
+ /* now get and parse mfra box */
+ flow = gst_qtdemux_pull_atom (qtdemux, mfra_offset, mfra_size, &mfra);
+ if (flow != GST_FLOW_OK)
+ goto broken_file;
+
+ gst_buffer_map (mfra, &mfra_map, GST_MAP_READ);
+
+ mfra_node = g_node_new ((guint8 *) mfra_map.data);
+ qtdemux_parse_node (qtdemux, mfra_node, mfra_map.data, mfra_map.size);
+
+ tfra_node = qtdemux_tree_get_child_by_type (mfra_node, FOURCC_tfra);
+
+ while (tfra_node) {
+ qtdemux_parse_tfra (qtdemux, tfra_node);
+ /* iterate all siblings */
+ tfra_node = qtdemux_tree_get_sibling_by_type (tfra_node, FOURCC_tfra);
+ }
+ g_node_destroy (mfra_node);
+
+ GST_INFO_OBJECT (qtdemux, "parsed movie fragment random access box (mfra)");
+ ret = TRUE;
+
+ exit:
+
+ if (mfro) {
+ if (mfro_map.memory != NULL)
+ gst_buffer_unmap (mfro, &mfro_map);
+ gst_buffer_unref (mfro);
+ }
+ if (mfra) {
+ if (mfra_map.memory != NULL)
+ gst_buffer_unmap (mfra, &mfra_map);
+ gst_buffer_unref (mfra);
+ }
+ return ret;
+
+ /* ERRORS */
+ size_query_failed:
+ {
+ GST_WARNING_OBJECT (qtdemux, "could not query upstream size");
+ goto exit;
+ }
+ invalid_mfro_size:
+ {
+ GST_WARNING_OBJECT (qtdemux, "mfro size is too small");
+ goto exit;
+ }
+ invalid_mfra_size:
+ {
+ GST_WARNING_OBJECT (qtdemux, "mfra_size in mfro box is invalid");
+ goto exit;
+ }
+ broken_file:
+ {
+ GST_WARNING_OBJECT (qtdemux, "bogus mfra offset or size, broken file");
+ goto exit;
+ }
+ }
+
+ static guint64
+ add_offset (guint64 offset, guint64 advance)
+ {
+ /* Avoid 64-bit overflow by clamping */
+ if (offset > G_MAXUINT64 - advance)
+ return G_MAXUINT64;
+ return offset + advance;
+ }
+
+ static GstFlowReturn
+ gst_qtdemux_loop_state_header (GstQTDemux * qtdemux)
+ {
+ guint64 length = 0;
+ guint32 fourcc = 0;
+ GstBuffer *buf = NULL;
+ GstFlowReturn ret = GST_FLOW_OK;
+ guint64 cur_offset = qtdemux->offset;
+ GstMapInfo map;
+
+ ret = gst_pad_pull_range (qtdemux->sinkpad, cur_offset, 16, &buf);
+ if (G_UNLIKELY (ret != GST_FLOW_OK))
+ goto beach;
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+ if (G_LIKELY (map.size >= 8))
+ extract_initial_length_and_fourcc (map.data, map.size, &length, &fourcc);
+ gst_buffer_unmap (buf, &map);
+ gst_buffer_unref (buf);
+
+ /* maybe we already got most we needed, so only consider this eof */
+ if (G_UNLIKELY (length == 0)) {
+ GST_ELEMENT_WARNING (qtdemux, STREAM, DEMUX,
+ (_("Invalid atom size.")),
+ ("Header atom '%" GST_FOURCC_FORMAT "' has empty length",
+ GST_FOURCC_ARGS (fourcc)));
+ ret = GST_FLOW_EOS;
+ goto beach;
+ }
+
+ switch (fourcc) {
+ case FOURCC_moof:
+ /* record for later parsing when needed */
+ if (!qtdemux->moof_offset) {
+ qtdemux->moof_offset = qtdemux->offset;
+ }
+ if (qtdemux_pull_mfro_mfra (qtdemux)) {
+ /* FIXME */
+ } else {
+ qtdemux->offset += length; /* skip moof and keep going */
+ }
+ if (qtdemux->got_moov) {
+ GST_INFO_OBJECT (qtdemux, "moof header, got moov, done with headers");
+ ret = GST_FLOW_EOS;
+ goto beach;
+ }
+ break;
+ case FOURCC_mdat:
+ case FOURCC_free:
+ case FOURCC_skip:
+ case FOURCC_wide:
+ case FOURCC_PICT:
+ case FOURCC_pnot:
+ {
+ GST_LOG_OBJECT (qtdemux,
+ "skipping atom '%" GST_FOURCC_FORMAT "' at %" G_GUINT64_FORMAT,
+ GST_FOURCC_ARGS (fourcc), cur_offset);
+ qtdemux->offset = add_offset (qtdemux->offset, length);
+ break;
+ }
+ case FOURCC_moov:
+ {
+ GstBuffer *moov = NULL;
+
+ if (qtdemux->got_moov) {
+ GST_DEBUG_OBJECT (qtdemux, "Skipping moov atom as we have one already");
+ qtdemux->offset = add_offset (qtdemux->offset, length);
+ goto beach;
+ }
+
+ ret = gst_pad_pull_range (qtdemux->sinkpad, cur_offset, length, &moov);
+ if (ret != GST_FLOW_OK)
+ goto beach;
+ gst_buffer_map (moov, &map, GST_MAP_READ);
+
+ if (length != map.size) {
+ /* Some files have a 'moov' atom at the end of the file which contains
+ * a terminal 'free' atom where the body of the atom is missing.
+ * Check for, and permit, this special case.
+ */
+ if (map.size >= 8) {
+ guint8 *final_data = map.data + (map.size - 8);
+ guint32 final_length = QT_UINT32 (final_data);
+ guint32 final_fourcc = QT_FOURCC (final_data + 4);
+
+ if (final_fourcc == FOURCC_free
+ && map.size + final_length - 8 == length) {
+ /* Ok, we've found that special case. Allocate a new buffer with
+ * that free atom actually present. */
+ GstBuffer *newmoov = gst_buffer_new_and_alloc (length);
+ gst_buffer_fill (newmoov, 0, map.data, map.size);
+ gst_buffer_memset (newmoov, map.size, 0, final_length - 8);
+ gst_buffer_unmap (moov, &map);
+ gst_buffer_unref (moov);
+ moov = newmoov;
+ gst_buffer_map (moov, &map, GST_MAP_READ);
+ }
+ }
+ }
+
+ if (length != map.size) {
+ GST_ELEMENT_ERROR (qtdemux, STREAM, DEMUX,
+ (_("This file is incomplete and cannot be played.")),
+ ("We got less than expected (received %" G_GSIZE_FORMAT
+ ", wanted %u, offset %" G_GUINT64_FORMAT ")", map.size,
+ (guint) length, cur_offset));
+ gst_buffer_unmap (moov, &map);
+ gst_buffer_unref (moov);
+ ret = GST_FLOW_ERROR;
+ goto beach;
+ }
+ qtdemux->offset += length;
+
+ qtdemux_parse_moov (qtdemux, map.data, length);
+ qtdemux_node_dump (qtdemux, qtdemux->moov_node);
+
+ qtdemux_parse_tree (qtdemux);
+ if (qtdemux->moov_node_compressed) {
+ g_node_destroy (qtdemux->moov_node_compressed);
+ g_free (qtdemux->moov_node->data);
+ }
+ qtdemux->moov_node_compressed = NULL;
+ g_node_destroy (qtdemux->moov_node);
+ qtdemux->moov_node = NULL;
+ gst_buffer_unmap (moov, &map);
+ gst_buffer_unref (moov);
+ qtdemux->got_moov = TRUE;
+
+ break;
+ }
+ case FOURCC_ftyp:
+ {
+ GstBuffer *ftyp = NULL;
+
+ /* extract major brand; might come in handy for ISO vs QT issues */
+ ret = gst_qtdemux_pull_atom (qtdemux, cur_offset, length, &ftyp);
+ if (ret != GST_FLOW_OK)
+ goto beach;
+ qtdemux->offset += length;
+ gst_buffer_map (ftyp, &map, GST_MAP_READ);
+ qtdemux_parse_ftyp (qtdemux, map.data, map.size);
+ gst_buffer_unmap (ftyp, &map);
+ gst_buffer_unref (ftyp);
+ break;
+ }
+ case FOURCC_uuid:
+ {
+ GstBuffer *uuid = NULL;
+
+ /* uuid are extension atoms */
+ ret = gst_qtdemux_pull_atom (qtdemux, cur_offset, length, &uuid);
+ if (ret != GST_FLOW_OK)
+ goto beach;
+ qtdemux->offset += length;
+ gst_buffer_map (uuid, &map, GST_MAP_READ);
+ qtdemux_parse_uuid (qtdemux, map.data, map.size);
+ gst_buffer_unmap (uuid, &map);
+ gst_buffer_unref (uuid);
+ break;
+ }
+ case FOURCC_sidx:
+ {
+ GstBuffer *sidx = NULL;
+ ret = gst_qtdemux_pull_atom (qtdemux, cur_offset, length, &sidx);
+ if (ret != GST_FLOW_OK)
+ goto beach;
+ qtdemux->offset += length;
+ gst_buffer_map (sidx, &map, GST_MAP_READ);
+ qtdemux_parse_sidx (qtdemux, map.data, map.size);
+ gst_buffer_unmap (sidx, &map);
+ gst_buffer_unref (sidx);
+ break;
+ }
+ default:
+ {
+ GstBuffer *unknown = NULL;
+
+ GST_LOG_OBJECT (qtdemux,
+ "unknown %08x '%" GST_FOURCC_FORMAT "' of size %" G_GUINT64_FORMAT
+ " at %" G_GUINT64_FORMAT, fourcc, GST_FOURCC_ARGS (fourcc), length,
+ cur_offset);
+ ret = gst_qtdemux_pull_atom (qtdemux, cur_offset, length, &unknown);
+ if (ret != GST_FLOW_OK)
+ goto beach;
+ gst_buffer_map (unknown, &map, GST_MAP_READ);
+ GST_MEMDUMP ("Unknown tag", map.data, map.size);
+ gst_buffer_unmap (unknown, &map);
+ gst_buffer_unref (unknown);
+ qtdemux->offset += length;
+ break;
+ }
+ }
+
+ beach:
+ if (ret == GST_FLOW_EOS && (qtdemux->got_moov || qtdemux->media_caps)) {
+ /* digested all data, show what we have */
++#ifdef TIZEN_FEATURE_QTDEMUX_MODIFICATION
++ if (qtdemux->spherical_metadata)
++ _send_spherical_metadata_msg_to_bus (qtdemux);
++#endif /* TIZEN_FEATURE_QTDEMUX_MODIFICATION */
+ qtdemux_prepare_streams (qtdemux);
+ QTDEMUX_EXPOSE_LOCK (qtdemux);
+ ret = qtdemux_expose_streams (qtdemux);
+ QTDEMUX_EXPOSE_UNLOCK (qtdemux);
+
+ qtdemux->state = QTDEMUX_STATE_MOVIE;
+ GST_DEBUG_OBJECT (qtdemux, "switching state to STATE_MOVIE (%d)",
+ qtdemux->state);
+ return ret;
+ }
+ return ret;
+ }
+
+ /* Seeks to the previous keyframe of the indexed stream and
+ * aligns other streams with respect to the keyframe timestamp
+ * of indexed stream. Only called in case of Reverse Playback
+ */
+ static GstFlowReturn
+ gst_qtdemux_seek_to_previous_keyframe (GstQTDemux * qtdemux)
+ {
+ guint32 seg_idx = 0, k_index = 0;
+ guint32 ref_seg_idx, ref_k_index;
+ GstClockTime k_pos = 0, last_stop = 0;
+ QtDemuxSegment *seg = NULL;
+ QtDemuxStream *ref_str = NULL;
+ guint64 seg_media_start_mov; /* segment media start time in mov format */
+ guint64 target_ts;
+ gint i;
+
+ /* Now we choose an arbitrary stream, get the previous keyframe timestamp
+ * and finally align all the other streams on that timestamp with their
+ * respective keyframes */
+ for (i = 0; i < QTDEMUX_N_STREAMS (qtdemux); i++) {
+ QtDemuxStream *str = QTDEMUX_NTH_STREAM (qtdemux, i);
+
+ /* No candidate yet, take the first stream */
+ if (!ref_str) {
+ ref_str = str;
+ continue;
+ }
+
+ /* So that stream has a segment, we prefer video streams */
+ if (str->subtype == FOURCC_vide) {
+ ref_str = str;
+ break;
+ }
+ }
+
+ if (G_UNLIKELY (!ref_str)) {
+ GST_DEBUG_OBJECT (qtdemux, "couldn't find any stream");
+ goto eos;
+ }
+
+ if (G_UNLIKELY (!ref_str->from_sample)) {
+ GST_DEBUG_OBJECT (qtdemux, "reached the beginning of the file");
+ goto eos;
+ }
+
+ /* So that stream has been playing from from_sample to to_sample. We will
+ * get the timestamp of the previous sample and search for a keyframe before
+ * that. For audio streams we do an arbitrary jump in the past (10 samples) */
+ if (ref_str->subtype == FOURCC_vide) {
+ k_index = gst_qtdemux_find_keyframe (qtdemux, ref_str,
+ ref_str->from_sample - 1, FALSE);
+ } else {
+ if (ref_str->from_sample >= 10)
+ k_index = ref_str->from_sample - 10;
+ else
+ k_index = 0;
+ }
+
+ target_ts =
+ ref_str->samples[k_index].timestamp +
+ ref_str->samples[k_index].pts_offset;
+
+ /* get current segment for that stream */
+ seg = &ref_str->segments[ref_str->segment_index];
+ /* Use segment start in original timescale for comparisons */
+ seg_media_start_mov = seg->trak_media_start;
+
+ GST_LOG_OBJECT (qtdemux, "keyframe index %u ts %" G_GUINT64_FORMAT
+ " seg start %" G_GUINT64_FORMAT " %" GST_TIME_FORMAT,
+ k_index, target_ts, seg_media_start_mov,
+ GST_TIME_ARGS (seg->media_start));
+
+ /* Crawl back through segments to find the one containing this I frame */
+ while (target_ts < seg_media_start_mov) {
+ GST_DEBUG_OBJECT (qtdemux,
+ "keyframe position (sample %u) is out of segment %u " " target %"
+ G_GUINT64_FORMAT " seg start %" G_GUINT64_FORMAT, k_index,
+ ref_str->segment_index, target_ts, seg_media_start_mov);
+
+ if (G_UNLIKELY (!ref_str->segment_index)) {
+ /* Reached first segment, let's consider it's EOS */
+ goto eos;
+ }
+ ref_str->segment_index--;
+ seg = &ref_str->segments[ref_str->segment_index];
+ /* Use segment start in original timescale for comparisons */
+ seg_media_start_mov = seg->trak_media_start;
+ }
+ /* Calculate time position of the keyframe and where we should stop */
+ k_pos =
+ QTSTREAMTIME_TO_GSTTIME (ref_str,
+ target_ts - seg->trak_media_start) + seg->time;
+ last_stop =
+ QTSTREAMTIME_TO_GSTTIME (ref_str,
+ ref_str->samples[ref_str->from_sample].timestamp -
+ seg->trak_media_start) + seg->time;
+
+ GST_DEBUG_OBJECT (qtdemux, "preferred stream played from sample %u, "
+ "now going to sample %u (pts %" GST_TIME_FORMAT ")", ref_str->from_sample,
+ k_index, GST_TIME_ARGS (k_pos));
+
+ /* Set last_stop with the keyframe timestamp we pushed of that stream */
+ qtdemux->segment.position = last_stop;
+ GST_DEBUG_OBJECT (qtdemux, "last_stop now is %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (last_stop));
+
+ if (G_UNLIKELY (last_stop < qtdemux->segment.start)) {
+ GST_DEBUG_OBJECT (qtdemux, "reached the beginning of segment");
+ goto eos;
+ }
+
+ ref_seg_idx = ref_str->segment_index;
+ ref_k_index = k_index;
+
+ /* Align them all on this */
+ for (i = 0; i < QTDEMUX_N_STREAMS (qtdemux); i++) {
+ guint32 index = 0;
+ GstClockTime seg_time = 0;
+ QtDemuxStream *str = QTDEMUX_NTH_STREAM (qtdemux, i);
+
+ /* aligning reference stream again might lead to backing up to yet another
+ * keyframe (due to timestamp rounding issues),
+ * potentially putting more load on downstream; so let's try to avoid */
+ if (str == ref_str) {
+ seg_idx = ref_seg_idx;
+ seg = &str->segments[seg_idx];
+ k_index = ref_k_index;
+ GST_DEBUG_OBJECT (qtdemux, "reference track-id %u segment %d, "
+ "sample at index %d", str->track_id, ref_str->segment_index, k_index);
+ } else {
+ seg_idx = gst_qtdemux_find_segment (qtdemux, str, k_pos);
+ GST_DEBUG_OBJECT (qtdemux,
+ "track-id %u align segment %d for keyframe pos %" GST_TIME_FORMAT,
+ str->track_id, seg_idx, GST_TIME_ARGS (k_pos));
+
+ /* get segment and time in the segment */
+ seg = &str->segments[seg_idx];
+ seg_time = k_pos - seg->time;
+
+ /* get the media time in the segment.
+ * No adjustment for empty "filler" segments */
+ if (seg->media_start != GST_CLOCK_TIME_NONE)
+ seg_time += seg->media_start;
+
+ /* get the index of the sample with media time */
+ index = gst_qtdemux_find_index_linear (qtdemux, str, seg_time);
+ GST_DEBUG_OBJECT (qtdemux,
+ "track-id %u sample for %" GST_TIME_FORMAT " at %u", str->track_id,
+ GST_TIME_ARGS (seg_time), index);
+
+ /* find previous keyframe */
+ k_index = gst_qtdemux_find_keyframe (qtdemux, str, index, FALSE);
+ }
+
+ /* Remember until where we want to go */
+ str->to_sample = str->from_sample - 1;
+ /* Define our time position */
+ target_ts =
+ str->samples[k_index].timestamp + str->samples[k_index].pts_offset;
+ str->time_position = QTSTREAMTIME_TO_GSTTIME (str, target_ts) + seg->time;
+ if (seg->media_start != GST_CLOCK_TIME_NONE)
+ str->time_position -= seg->media_start;
+
+ /* Now seek back in time */
+ gst_qtdemux_move_stream (qtdemux, str, k_index);
+ GST_DEBUG_OBJECT (qtdemux, "track-id %u keyframe at %u, time position %"
+ GST_TIME_FORMAT " playing from sample %u to %u", str->track_id, k_index,
+ GST_TIME_ARGS (str->time_position), str->from_sample, str->to_sample);
+ }
+
+ return GST_FLOW_OK;
+
+ eos:
+ return GST_FLOW_EOS;
+ }
+
+ /*
+ * Gets the current qt segment start, stop and position for the
+ * given time offset. This is used in update_segment()
+ */
+ static void
+ gst_qtdemux_stream_segment_get_boundaries (GstQTDemux * qtdemux,
+ QtDemuxStream * stream, GstClockTime offset,
+ GstClockTime * _start, GstClockTime * _stop, GstClockTime * _time)
+ {
+ GstClockTime seg_time;
+ GstClockTime start, stop, time;
+ QtDemuxSegment *segment;
+
+ segment = &stream->segments[stream->segment_index];
+
+ /* get time in this segment */
+ seg_time = (offset - segment->time) * segment->rate;
+
+ GST_LOG_OBJECT (stream->pad, "seg_time %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (seg_time));
+
+ if (G_UNLIKELY (seg_time > segment->duration)) {
+ GST_LOG_OBJECT (stream->pad,
+ "seg_time > segment->duration %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (segment->duration));
+ seg_time = segment->duration;
+ }
+
+ /* qtdemux->segment.stop is in outside-time-realm, whereas
+ * segment->media_stop is in track-time-realm.
+ *
+ * In order to compare the two, we need to bring segment.stop
+ * into the track-time-realm
+ *
+ * FIXME - does this comment still hold? Don't see any conversion here */
+
+ stop = qtdemux->segment.stop;
+ if (stop == GST_CLOCK_TIME_NONE)
+ stop = qtdemux->segment.duration;
+ if (stop == GST_CLOCK_TIME_NONE)
+ stop = segment->media_stop;
+ else
+ stop =
+ MIN (segment->media_stop, stop - segment->time + segment->media_start);
+
+ if (G_UNLIKELY (QTSEGMENT_IS_EMPTY (segment))) {
+ start = segment->time + seg_time;
+ time = offset;
+ stop = start - seg_time + segment->duration;
+ } else if (qtdemux->segment.rate >= 0) {
+ start = MIN (segment->media_start + seg_time, stop);
+ time = offset;
+ } else {
+ if (segment->media_start >= qtdemux->segment.start) {
+ time = segment->time;
+ } else {
+ time = segment->time + (qtdemux->segment.start - segment->media_start);
+ }
+
+ start = MAX (segment->media_start, qtdemux->segment.start);
+ stop = MIN (segment->media_start + seg_time, stop);
+ }
+
+ *_start = start;
+ *_stop = stop;
+ *_time = time;
+ }
+
+ /*
+ * Updates the qt segment used for the stream and pushes a new segment event
+ * downstream on this stream's pad.
+ */
+ static gboolean
+ gst_qtdemux_stream_update_segment (GstQTDemux * qtdemux, QtDemuxStream * stream,
+ gint seg_idx, GstClockTime offset, GstClockTime * _start,
+ GstClockTime * _stop)
+ {
+ QtDemuxSegment *segment;
+ GstClockTime start = 0, stop = GST_CLOCK_TIME_NONE, time = 0;
+ gdouble rate;
+ GstEvent *event;
+
+ /* update the current segment */
+ stream->segment_index = seg_idx;
+
+ /* get the segment */
+ segment = &stream->segments[seg_idx];
+
+ if (G_UNLIKELY (offset < segment->time)) {
+ GST_WARNING_OBJECT (stream->pad, "offset < segment->time %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (segment->time));
+ return FALSE;
+ }
+
+ /* segment lies beyond total indicated duration */
+ if (G_UNLIKELY (qtdemux->segment.duration != GST_CLOCK_TIME_NONE &&
+ segment->time > qtdemux->segment.duration)) {
+ GST_WARNING_OBJECT (stream->pad, "file duration %" GST_TIME_FORMAT
+ " < segment->time %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (qtdemux->segment.duration),
+ GST_TIME_ARGS (segment->time));
+ return FALSE;
+ }
+
+ gst_qtdemux_stream_segment_get_boundaries (qtdemux, stream, offset,
+ &start, &stop, &time);
+
+ GST_DEBUG_OBJECT (stream->pad, "new segment %d from %" GST_TIME_FORMAT
+ " to %" GST_TIME_FORMAT ", time %" GST_TIME_FORMAT, seg_idx,
+ GST_TIME_ARGS (start), GST_TIME_ARGS (stop), GST_TIME_ARGS (time));
+
+ /* combine global rate with that of the segment */
+ rate = segment->rate * qtdemux->segment.rate;
+
+ /* Copy flags from main segment */
+ stream->segment.flags = qtdemux->segment.flags;
+
+ /* update the segment values used for clipping */
+ stream->segment.offset = qtdemux->segment.offset;
+ stream->segment.base = qtdemux->segment.base + stream->accumulated_base;
+ stream->segment.applied_rate = qtdemux->segment.applied_rate;
+ stream->segment.rate = rate;
+ stream->segment.start = start + QTSTREAMTIME_TO_GSTTIME (stream,
+ stream->cslg_shift);
+ stream->segment.stop = stop + QTSTREAMTIME_TO_GSTTIME (stream,
+ stream->cslg_shift);
+ stream->segment.time = time;
+ stream->segment.position = stream->segment.start;
+
+ GST_DEBUG_OBJECT (stream->pad, "New segment: %" GST_SEGMENT_FORMAT,
+ &stream->segment);
+
+ /* now prepare and send the segment */
+ if (stream->pad) {
+ event = gst_event_new_segment (&stream->segment);
+ if (qtdemux->segment_seqnum != GST_SEQNUM_INVALID) {
+ gst_event_set_seqnum (event, qtdemux->segment_seqnum);
+ }
+ gst_pad_push_event (stream->pad, event);
+ /* assume we can send more data now */
+ GST_PAD_LAST_FLOW_RETURN (stream->pad) = GST_FLOW_OK;
+ /* clear to send tags on this pad now */
+ gst_qtdemux_push_tags (qtdemux, stream);
+ }
+
+ if (_start)
+ *_start = start;
+ if (_stop)
+ *_stop = stop;
+
+ return TRUE;
+ }
+
+ /* activate the given segment number @seg_idx of @stream at time @offset.
+ * @offset is an absolute global position over all the segments.
+ *
+ * This will push out a NEWSEGMENT event with the right values and
+ * position the stream index to the first decodable sample before
+ * @offset.
+ */
+ static gboolean
+ gst_qtdemux_activate_segment (GstQTDemux * qtdemux, QtDemuxStream * stream,
+ guint32 seg_idx, GstClockTime offset)
+ {
+ QtDemuxSegment *segment;
+ guint32 index, kf_index;
+ GstClockTime start = 0, stop = GST_CLOCK_TIME_NONE;
+
+ GST_LOG_OBJECT (stream->pad, "activate segment %d, offset %" GST_TIME_FORMAT,
+ seg_idx, GST_TIME_ARGS (offset));
+
+ if (!gst_qtdemux_stream_update_segment (qtdemux, stream, seg_idx, offset,
+ &start, &stop))
+ return FALSE;
+
+ segment = &stream->segments[stream->segment_index];
+
+ /* in the fragmented case, we pick a fragment that starts before our
+ * desired position and rely on downstream to wait for a keyframe
+ * (FIXME: doesn't seem to work so well with ismv and wmv, as no parser; the
+ * tfra entries tells us which trun/sample the key unit is in, but we don't
+ * make use of this additional information at the moment) */
+ if (qtdemux->fragmented && !qtdemux->fragmented_seek_pending) {
+ stream->to_sample = G_MAXUINT32;
+ return TRUE;
+ } else {
+ /* well, it will be taken care of below */
+ qtdemux->fragmented_seek_pending = FALSE;
+ /* FIXME ideally the do_fragmented_seek can be done right here,
+ * rather than at loop level
+ * (which might even allow handling edit lists in a fragmented file) */
+ }
+
+ /* We don't need to look for a sample in push-based */
+ if (!qtdemux->pullbased)
+ return TRUE;
+
+ /* and move to the keyframe before the indicated media time of the
+ * segment */
+ if (G_LIKELY (!QTSEGMENT_IS_EMPTY (segment))) {
+ if (qtdemux->segment.rate >= 0) {
+ index = gst_qtdemux_find_index_linear (qtdemux, stream, start);
+ stream->to_sample = G_MAXUINT32;
+ GST_DEBUG_OBJECT (stream->pad,
+ "moving data pointer to %" GST_TIME_FORMAT ", index: %u, pts %"
+ GST_TIME_FORMAT, GST_TIME_ARGS (start), index,
+ GST_TIME_ARGS (QTSAMPLE_PTS (stream, &stream->samples[index])));
+ } else {
+ index = gst_qtdemux_find_index_linear (qtdemux, stream, stop);
+ stream->to_sample = index;
+ GST_DEBUG_OBJECT (stream->pad,
+ "moving data pointer to %" GST_TIME_FORMAT ", index: %u, pts %"
+ GST_TIME_FORMAT, GST_TIME_ARGS (stop), index,
+ GST_TIME_ARGS (QTSAMPLE_PTS (stream, &stream->samples[index])));
+ }
+ } else {
+ GST_DEBUG_OBJECT (stream->pad, "No need to look for keyframe, "
+ "this is an empty segment");
+ return TRUE;
+ }
+
+ /* gst_qtdemux_parse_sample () called from gst_qtdemux_find_index_linear ()
+ * encountered an error and printed a message so we return appropriately */
+ if (index == -1)
+ return FALSE;
+
+ /* we're at the right spot */
+ if (index == stream->sample_index) {
+ GST_DEBUG_OBJECT (stream->pad, "we are at the right index");
+ return TRUE;
+ }
+
+ /* find keyframe of the target index */
+ kf_index = gst_qtdemux_find_keyframe (qtdemux, stream, index, FALSE);
+
+ /* go back two frames to provide lead-in for non-raw audio decoders */
+ if (stream->subtype == FOURCC_soun && !stream->need_clip) {
+ guint32 lead_in = 2;
+ guint32 old_index = kf_index;
+ GstStructure *s = gst_caps_get_structure (CUR_STREAM (stream)->caps, 0);
+
+ if (gst_structure_has_name (s, "audio/mpeg")) {
+ gint mpegversion;
+ if (gst_structure_get_int (s, "mpegversion", &mpegversion)
+ && mpegversion == 1) {
+ /* mp3 could need up to 30 frames of lead-in per mpegaudioparse */
+ lead_in = 30;
+ }
+ }
+
+ kf_index = MAX (kf_index, lead_in) - lead_in;
+ if (qtdemux_parse_samples (qtdemux, stream, kf_index)) {
+ GST_DEBUG_OBJECT (stream->pad,
+ "Moving backwards %u frames to ensure sufficient sound lead-in",
+ old_index - kf_index);
+ } else {
+ kf_index = old_index;
+ }
+ }
+
+ /* if we move forwards, we don't have to go back to the previous
+ * keyframe since we already sent that. We can also just jump to
+ * the keyframe right before the target index if there is one. */
+ if (index > stream->sample_index) {
+ /* moving forwards check if we move past a keyframe */
+ if (kf_index > stream->sample_index) {
+ GST_DEBUG_OBJECT (stream->pad,
+ "moving forwards to keyframe at %u "
+ "(pts %" GST_TIME_FORMAT " dts %" GST_TIME_FORMAT " )",
+ kf_index,
+ GST_TIME_ARGS (QTSAMPLE_PTS (stream, &stream->samples[kf_index])),
+ GST_TIME_ARGS (QTSAMPLE_DTS (stream, &stream->samples[kf_index])));
+ gst_qtdemux_move_stream (qtdemux, stream, kf_index);
+ } else {
+ GST_DEBUG_OBJECT (stream->pad,
+ "moving forwards, keyframe at %u "
+ "(pts %" GST_TIME_FORMAT " dts %" GST_TIME_FORMAT " ) already sent",
+ kf_index,
+ GST_TIME_ARGS (QTSAMPLE_PTS (stream, &stream->samples[kf_index])),
+ GST_TIME_ARGS (QTSAMPLE_DTS (stream, &stream->samples[kf_index])));
+ }
+ } else {
+ GST_DEBUG_OBJECT (stream->pad,
+ "moving backwards to %sframe at %u "
+ "(pts %" GST_TIME_FORMAT " dts %" GST_TIME_FORMAT " )",
+ (stream->subtype == FOURCC_soun) ? "audio " : "key", kf_index,
+ GST_TIME_ARGS (QTSAMPLE_PTS (stream, &stream->samples[kf_index])),
+ GST_TIME_ARGS (QTSAMPLE_DTS (stream, &stream->samples[kf_index])));
+ gst_qtdemux_move_stream (qtdemux, stream, kf_index);
+ }
+
+ return TRUE;
+ }
+
+ /* prepare to get the current sample of @stream, getting essential values.
+ *
+ * This function will also prepare and send the segment when needed.
+ *
+ * Return FALSE if the stream is EOS.
+ *
+ * PULL-BASED
+ */
+ static gboolean
+ gst_qtdemux_prepare_current_sample (GstQTDemux * qtdemux,
+ QtDemuxStream * stream, gboolean * empty, guint64 * offset, guint * size,
+ GstClockTime * dts, GstClockTime * pts, GstClockTime * duration,
+ gboolean * keyframe)
+ {
+ QtDemuxSample *sample;
+ GstClockTime time_position;
+ guint32 seg_idx;
+
+ g_return_val_if_fail (stream != NULL, FALSE);
+
+ time_position = stream->time_position;
+ if (G_UNLIKELY (time_position == GST_CLOCK_TIME_NONE))
+ goto eos;
+
+ seg_idx = stream->segment_index;
+ if (G_UNLIKELY (seg_idx == -1)) {
+ /* find segment corresponding to time_position if we are looking
+ * for a segment. */
+ seg_idx = gst_qtdemux_find_segment (qtdemux, stream, time_position);
+ }
+
+ /* different segment, activate it, sample_index will be set. */
+ if (G_UNLIKELY (stream->segment_index != seg_idx))
+ gst_qtdemux_activate_segment (qtdemux, stream, seg_idx, time_position);
+
+ if (G_UNLIKELY (QTSEGMENT_IS_EMPTY (&stream->
+ segments[stream->segment_index]))) {
+ QtDemuxSegment *seg = &stream->segments[stream->segment_index];
+
+ GST_LOG_OBJECT (qtdemux, "Empty segment activated,"
+ " prepare empty sample");
+
+ *empty = TRUE;
+ *pts = *dts = time_position;
+ *duration = seg->duration - (time_position - seg->time);
+
+ return TRUE;
+ }
+
+ *empty = FALSE;
+
+ if (stream->sample_index == -1)
+ stream->sample_index = 0;
+
+ GST_LOG_OBJECT (qtdemux, "segment active, index = %u of %u",
+ stream->sample_index, stream->n_samples);
+
+ if (G_UNLIKELY (stream->sample_index >= stream->n_samples)) {
+ if (!qtdemux->fragmented)
+ goto eos;
+
+ GST_INFO_OBJECT (qtdemux, "out of samples, trying to add more");
+ do {
+ GstFlowReturn flow;
+
+ GST_OBJECT_LOCK (qtdemux);
+ flow = qtdemux_add_fragmented_samples (qtdemux);
+ GST_OBJECT_UNLOCK (qtdemux);
+
+ if (flow != GST_FLOW_OK)
+ goto eos;
+ }
+ while (stream->sample_index >= stream->n_samples);
+ }
+
+ if (!qtdemux_parse_samples (qtdemux, stream, stream->sample_index)) {
+ GST_LOG_OBJECT (qtdemux, "Parsing of index %u failed!",
+ stream->sample_index);
+ return FALSE;
+ }
+
+ /* now get the info for the sample we're at */
+ sample = &stream->samples[stream->sample_index];
+
+ *dts = QTSAMPLE_DTS (stream, sample);
+ *pts = QTSAMPLE_PTS (stream, sample);
+ *offset = sample->offset;
+ *size = sample->size;
+ *duration = QTSAMPLE_DUR_DTS (stream, sample, *dts);
+ *keyframe = QTSAMPLE_KEYFRAME (stream, sample);
+
+ return TRUE;
+
+ /* special cases */
+ eos:
+ {
+ stream->time_position = GST_CLOCK_TIME_NONE;
+ return FALSE;
+ }
+ }
+
+ /* move to the next sample in @stream.
+ *
+ * Moves to the next segment when needed.
+ */
+ static void
+ gst_qtdemux_advance_sample (GstQTDemux * qtdemux, QtDemuxStream * stream)
+ {
+ QtDemuxSample *sample;
+ QtDemuxSegment *segment;
+
+ /* get current segment */
+ segment = &stream->segments[stream->segment_index];
+
+ if (G_UNLIKELY (QTSEGMENT_IS_EMPTY (segment))) {
+ GST_DEBUG_OBJECT (qtdemux, "Empty segment, no samples to advance");
+ goto next_segment;
+ }
+
+ if (G_UNLIKELY (stream->sample_index >= stream->to_sample)) {
+ /* Mark the stream as EOS */
+ GST_DEBUG_OBJECT (qtdemux,
+ "reached max allowed sample %u, mark EOS", stream->to_sample);
+ stream->time_position = GST_CLOCK_TIME_NONE;
+ return;
+ }
+
+ /* move to next sample */
+ stream->sample_index++;
+ stream->offset_in_sample = 0;
+
+ GST_TRACE_OBJECT (qtdemux, "advance to sample %u/%u", stream->sample_index,
+ stream->n_samples);
+
+ /* reached the last sample, we need the next segment */
+ if (G_UNLIKELY (stream->sample_index >= stream->n_samples))
+ goto next_segment;
+
+ if (!qtdemux_parse_samples (qtdemux, stream, stream->sample_index)) {
+ GST_LOG_OBJECT (qtdemux, "Parsing of index %u failed!",
+ stream->sample_index);
+ return;
+ }
+
+ /* get next sample */
+ sample = &stream->samples[stream->sample_index];
+
+ GST_TRACE_OBJECT (qtdemux, "sample dts %" GST_TIME_FORMAT " media_stop: %"
+ GST_TIME_FORMAT, GST_TIME_ARGS (QTSAMPLE_DTS (stream, sample)),
+ GST_TIME_ARGS (segment->media_stop));
+
+ /* see if we are past the segment */
+ if (G_UNLIKELY (QTSAMPLE_DTS (stream, sample) >= segment->media_stop))
+ goto next_segment;
+
+ if (QTSAMPLE_DTS (stream, sample) >= segment->media_start) {
+ /* inside the segment, update time_position, looks very familiar to
+ * GStreamer segments, doesn't it? */
+ stream->time_position =
+ QTSAMPLE_DTS (stream, sample) - segment->media_start + segment->time;
+ } else {
+ /* not yet in segment, time does not yet increment. This means
+ * that we are still prerolling keyframes to the decoder so it can
+ * decode the first sample of the segment. */
+ stream->time_position = segment->time;
+ }
+ return;
+
+ /* move to the next segment */
+ next_segment:
+ {
+ GST_DEBUG_OBJECT (qtdemux, "segment %d ended ", stream->segment_index);
+
+ if (stream->segment_index == stream->n_segments - 1) {
+ /* are we at the end of the last segment, we're EOS */
+ stream->time_position = GST_CLOCK_TIME_NONE;
+ } else {
+ /* else we're only at the end of the current segment */
+ stream->time_position = segment->stop_time;
+ }
+ /* make sure we select a new segment */
+
+ /* accumulate previous segments */
+ if (GST_CLOCK_TIME_IS_VALID (stream->segment.stop))
+ stream->accumulated_base +=
+ (stream->segment.stop -
+ stream->segment.start) / ABS (stream->segment.rate);
+
+ stream->segment_index = -1;
+ }
+ }
+
+ static void
+ gst_qtdemux_sync_streams (GstQTDemux * demux)
+ {
+ gint i;
+
+ if (QTDEMUX_N_STREAMS (demux) <= 1)
+ return;
+
+ for (i = 0; i < QTDEMUX_N_STREAMS (demux); i++) {
+ QtDemuxStream *stream;
+ GstClockTime end_time;
+
+ stream = QTDEMUX_NTH_STREAM (demux, i);
+
+ if (!stream->pad)
+ continue;
+
+ /* TODO advance time on subtitle streams here, if any some day */
+
+ /* some clips/trailers may have unbalanced streams at the end,
+ * so send EOS on shorter stream to prevent stalling others */
+
+ /* do not mess with EOS if SEGMENT seeking */
+ if (demux->segment.flags & GST_SEEK_FLAG_SEGMENT)
+ continue;
+
+ if (demux->pullbased) {
+ /* loop mode is sample time based */
+ if (!STREAM_IS_EOS (stream))
+ continue;
+ } else {
+ /* push mode is byte position based */
+ if (stream->n_samples &&
+ stream->samples[stream->n_samples - 1].offset >= demux->offset)
+ continue;
+ }
+
+ if (stream->sent_eos)
+ continue;
+
+ /* only act if some gap */
+ end_time = stream->segments[stream->n_segments - 1].stop_time;
+ GST_LOG_OBJECT (demux, "current position: %" GST_TIME_FORMAT
+ ", stream end: %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (demux->segment.position), GST_TIME_ARGS (end_time));
+ if (GST_CLOCK_TIME_IS_VALID (end_time)
+ && (end_time + 2 * GST_SECOND < demux->segment.position)) {
+ GstEvent *event;
+
+ GST_DEBUG_OBJECT (demux, "sending EOS for stream %s",
+ GST_PAD_NAME (stream->pad));
+ stream->sent_eos = TRUE;
+ event = gst_event_new_eos ();
+ if (demux->segment_seqnum != GST_SEQNUM_INVALID)
+ gst_event_set_seqnum (event, demux->segment_seqnum);
+ gst_pad_push_event (stream->pad, event);
+ }
+ }
+ }
+
+ /* EOS and NOT_LINKED need to be combined. This means that we return:
+ *
+ * GST_FLOW_NOT_LINKED: when all pads NOT_LINKED.
+ * GST_FLOW_EOS: when all pads EOS or NOT_LINKED.
+ */
+ static GstFlowReturn
+ gst_qtdemux_combine_flows (GstQTDemux * demux, QtDemuxStream * stream,
+ GstFlowReturn ret)
+ {
+ GST_LOG_OBJECT (demux, "flow return: %s", gst_flow_get_name (ret));
+
+ if (stream->pad)
+ ret = gst_flow_combiner_update_pad_flow (demux->flowcombiner, stream->pad,
+ ret);
+ else
+ ret = gst_flow_combiner_update_flow (demux->flowcombiner, ret);
+
+ GST_LOG_OBJECT (demux, "combined flow return: %s", gst_flow_get_name (ret));
+ return ret;
+ }
+
+ /* the input buffer metadata must be writable. Returns NULL when the buffer is
+ * completely clipped
+ *
+ * Should be used only with raw buffers */
+ static GstBuffer *
+ gst_qtdemux_clip_buffer (GstQTDemux * qtdemux, QtDemuxStream * stream,
+ GstBuffer * buf)
+ {
+ guint64 start, stop, cstart, cstop, diff;
+ GstClockTime pts, duration;
+ gsize size, osize;
+ gint num_rate, denom_rate;
+ gint frame_size;
+ gboolean clip_data;
+ guint offset;
+
+ osize = size = gst_buffer_get_size (buf);
+ offset = 0;
+
+ /* depending on the type, setup the clip parameters */
+ if (stream->subtype == FOURCC_soun) {
+ frame_size = CUR_STREAM (stream)->bytes_per_frame;
+ num_rate = GST_SECOND;
+ denom_rate = (gint) CUR_STREAM (stream)->rate;
+ clip_data = TRUE;
+ } else if (stream->subtype == FOURCC_vide) {
+ frame_size = size;
+ num_rate = CUR_STREAM (stream)->fps_n;
+ denom_rate = CUR_STREAM (stream)->fps_d;
+ clip_data = FALSE;
+ } else
+ goto wrong_type;
+
+ if (frame_size <= 0)
+ goto bad_frame_size;
+
+ /* we can only clip if we have a valid pts */
+ pts = GST_BUFFER_PTS (buf);
+ if (G_UNLIKELY (!GST_CLOCK_TIME_IS_VALID (pts)))
+ goto no_pts;
+
+ duration = GST_BUFFER_DURATION (buf);
+
+ if (G_UNLIKELY (!GST_CLOCK_TIME_IS_VALID (duration))) {
+ duration =
+ gst_util_uint64_scale_int (size / frame_size, num_rate, denom_rate);
+ }
+
+ start = pts;
+ stop = start + duration;
+
+ if (G_UNLIKELY (!gst_segment_clip (&stream->segment,
+ GST_FORMAT_TIME, start, stop, &cstart, &cstop)))
+ goto clipped;
+
+ /* see if some clipping happened */
+ diff = cstart - start;
+ if (diff > 0) {
+ pts += diff;
+ duration -= diff;
+
+ if (clip_data) {
+ /* bring clipped time to samples and to bytes */
+ diff = gst_util_uint64_scale_int (diff, denom_rate, num_rate);
+ diff *= frame_size;
+
+ GST_DEBUG_OBJECT (qtdemux,
+ "clipping start to %" GST_TIME_FORMAT " %"
+ G_GUINT64_FORMAT " bytes", GST_TIME_ARGS (cstart), diff);
+
+ offset = diff;
+ size -= diff;
+ }
+ }
+ diff = stop - cstop;
+ if (diff > 0) {
+ duration -= diff;
+
+ if (clip_data) {
+ /* bring clipped time to samples and then to bytes */
+ diff = gst_util_uint64_scale_int (diff, denom_rate, num_rate);
+ diff *= frame_size;
+ GST_DEBUG_OBJECT (qtdemux,
+ "clipping stop to %" GST_TIME_FORMAT " %" G_GUINT64_FORMAT
+ " bytes", GST_TIME_ARGS (cstop), diff);
+ size -= diff;
+ }
+ }
+
+ if (offset != 0 || size != osize)
+ gst_buffer_resize (buf, offset, size);
+
+ GST_BUFFER_DTS (buf) = GST_CLOCK_TIME_NONE;
+ GST_BUFFER_PTS (buf) = pts;
+ GST_BUFFER_DURATION (buf) = duration;
+
+ return buf;
+
+ /* dropped buffer */
+ wrong_type:
+ {
+ GST_DEBUG_OBJECT (qtdemux, "unknown stream type");
+ return buf;
+ }
+ bad_frame_size:
+ {
+ GST_DEBUG_OBJECT (qtdemux, "bad frame size");
+ return buf;
+ }
+ no_pts:
+ {
+ GST_DEBUG_OBJECT (qtdemux, "no pts on buffer");
+ return buf;
+ }
+ clipped:
+ {
+ GST_DEBUG_OBJECT (qtdemux, "clipped buffer");
+ gst_buffer_unref (buf);
+ return NULL;
+ }
+ }
+
+ static GstBuffer *
+ gst_qtdemux_align_buffer (GstQTDemux * demux,
+ GstBuffer * buffer, gsize alignment)
+ {
+ GstMapInfo map;
+
+ gst_buffer_map (buffer, &map, GST_MAP_READ);
+
+ if (map.size < sizeof (guintptr)) {
+ gst_buffer_unmap (buffer, &map);
+ return buffer;
+ }
+
+ if (((guintptr) map.data) & (alignment - 1)) {
+ GstBuffer *new_buffer;
+ GstAllocationParams params = { 0, alignment - 1, 0, 0, };
+
+ new_buffer = gst_buffer_new_allocate (NULL,
+ gst_buffer_get_size (buffer), ¶ms);
+
+ /* Copy data "by hand", so ensure alignment is kept: */
+ gst_buffer_fill (new_buffer, 0, map.data, map.size);
+
+ gst_buffer_copy_into (new_buffer, buffer, GST_BUFFER_COPY_METADATA, 0, -1);
+ GST_DEBUG_OBJECT (demux,
+ "We want output aligned on %" G_GSIZE_FORMAT ", reallocated",
+ alignment);
+
+ gst_buffer_unmap (buffer, &map);
+ gst_buffer_unref (buffer);
+
+ return new_buffer;
+ }
+
+ gst_buffer_unmap (buffer, &map);
+ return buffer;
+ }
+
+ static guint8 *
+ convert_to_s334_1a (const guint8 * ccpair, guint8 ccpair_size, guint field,
+ gsize * res)
+ {
+ guint8 *storage;
+ gsize i;
+
+ /* We are converting from pairs to triplets */
+ *res = ccpair_size / 2 * 3;
+ storage = g_malloc (*res);
+ for (i = 0; i * 2 < ccpair_size; i += 1) {
+ /* FIXME: Use line offset 0 as we simply can't know here */
+ if (field == 1)
+ storage[i * 3] = 0x80 | 0x00;
+ else
+ storage[i * 3] = 0x00 | 0x00;
+ storage[i * 3 + 1] = ccpair[i * 2];
+ storage[i * 3 + 2] = ccpair[i * 2 + 1];
+ }
+
+ return storage;
+ }
+
+ static guint8 *
+ extract_cc_from_data (QtDemuxStream * stream, const guint8 * data, gsize size,
+ gsize * cclen)
+ {
+ guint8 *res = NULL;
+ guint32 atom_length, fourcc;
+ QtDemuxStreamStsdEntry *stsd_entry;
+
+ GST_MEMDUMP ("caption atom", data, size);
+
+ /* There might be multiple atoms */
+
+ *cclen = 0;
+ if (size < 8)
+ goto invalid_cdat;
+ atom_length = QT_UINT32 (data);
+ fourcc = QT_FOURCC (data + 4);
+ if (G_UNLIKELY (atom_length > size || atom_length == 8))
+ goto invalid_cdat;
+
+ GST_DEBUG_OBJECT (stream->pad, "here");
+
+ /* Check if we have something compatible */
+ stsd_entry = CUR_STREAM (stream);
+ switch (stsd_entry->fourcc) {
+ case FOURCC_c608:{
+ guint8 *cdat = NULL, *cdt2 = NULL;
+ gsize cdat_size = 0, cdt2_size = 0;
+ /* Should be cdat or cdt2 */
+ if (fourcc != FOURCC_cdat && fourcc != FOURCC_cdt2) {
+ GST_WARNING_OBJECT (stream->pad,
+ "Unknown data atom (%" GST_FOURCC_FORMAT ") for CEA608",
+ GST_FOURCC_ARGS (fourcc));
+ goto invalid_cdat;
+ }
+
+ /* Convert to S334-1 Annex A byte triplet */
+ if (fourcc == FOURCC_cdat)
+ cdat = convert_to_s334_1a (data + 8, atom_length - 8, 1, &cdat_size);
+ else
+ cdt2 = convert_to_s334_1a (data + 8, atom_length - 8, 2, &cdt2_size);
+ GST_DEBUG_OBJECT (stream->pad, "size:%" G_GSIZE_FORMAT " atom_length:%u",
+ size, atom_length);
+
+ /* Check for another atom ? */
+ if (size > atom_length + 8) {
+ guint32 new_atom_length = QT_UINT32 (data + atom_length);
+ if (size >= atom_length + new_atom_length) {
+ fourcc = QT_FOURCC (data + atom_length + 4);
+ if (fourcc == FOURCC_cdat) {
+ if (cdat == NULL)
+ cdat =
+ convert_to_s334_1a (data + atom_length + 8,
+ new_atom_length - 8, 1, &cdat_size);
+ else
+ GST_WARNING_OBJECT (stream->pad,
+ "Got multiple [cdat] atoms in a c608 sample. This is unsupported for now. Please file a bug");
+ } else {
+ if (cdt2 == NULL)
+ cdt2 =
+ convert_to_s334_1a (data + atom_length + 8,
+ new_atom_length - 8, 2, &cdt2_size);
+ else
+ GST_WARNING_OBJECT (stream->pad,
+ "Got multiple [cdt2] atoms in a c608 sample. This is unsupported for now. Please file a bug");
+ }
+ }
+ }
+
+ *cclen = cdat_size + cdt2_size;
+ res = g_malloc (*cclen);
+ if (cdat_size)
+ memcpy (res, cdat, cdat_size);
+ if (cdt2_size)
+ memcpy (res + cdat_size, cdt2, cdt2_size);
+ g_free (cdat);
+ g_free (cdt2);
+ }
+ break;
+ case FOURCC_c708:
+ if (fourcc != FOURCC_ccdp) {
+ GST_WARNING_OBJECT (stream->pad,
+ "Unknown data atom (%" GST_FOURCC_FORMAT ") for CEA708",
+ GST_FOURCC_ARGS (fourcc));
+ goto invalid_cdat;
+ }
+ *cclen = atom_length - 8;
+ res = g_memdup2 (data + 8, *cclen);
+ break;
+ default:
+ /* Keep this here in case other closed caption formats are added */
+ g_assert_not_reached ();
+ break;
+ }
+
+ GST_MEMDUMP ("Output", res, *cclen);
+ return res;
+
+ /* Errors */
+ invalid_cdat:
+ GST_WARNING ("[cdat] atom is too small or invalid");
+ return NULL;
+ }
+
+ /* the input buffer metadata must be writable,
+ * but time/duration etc not yet set and need not be preserved */
+ static GstBuffer *
+ gst_qtdemux_process_buffer (GstQTDemux * qtdemux, QtDemuxStream * stream,
+ GstBuffer * buf)
+ {
+ GstMapInfo map;
+ guint nsize = 0;
+ gchar *str;
+
+ /* not many cases for now */
+ if (G_UNLIKELY (CUR_STREAM (stream)->fourcc == FOURCC_mp4s)) {
+ /* send a one time dvd clut event */
+ if (stream->pending_event && stream->pad)
+ gst_pad_push_event (stream->pad, stream->pending_event);
+ stream->pending_event = NULL;
+ }
+
+ if (G_UNLIKELY (stream->subtype != FOURCC_text
+ && stream->subtype != FOURCC_sbtl &&
+ stream->subtype != FOURCC_subp && stream->subtype != FOURCC_clcp)) {
+ return buf;
+ }
+
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+
+ /* empty buffer is sent to terminate previous subtitle */
+ if (map.size <= 2) {
+ gst_buffer_unmap (buf, &map);
+ gst_buffer_unref (buf);
+ return NULL;
+ }
+ if (stream->subtype == FOURCC_subp) {
+ /* That's all the processing needed for subpictures */
+ gst_buffer_unmap (buf, &map);
+ return buf;
+ }
+
+ if (stream->subtype == FOURCC_clcp) {
+ guint8 *cc;
+ gsize cclen = 0;
+ /* For closed caption, we need to extract the information from the
+ * [cdat],[cdt2] or [ccdp] atom */
+ cc = extract_cc_from_data (stream, map.data, map.size, &cclen);
+ gst_buffer_unmap (buf, &map);
+ gst_buffer_unref (buf);
+ if (cc) {
+ buf = _gst_buffer_new_wrapped (cc, cclen, g_free);
+ } else {
+ /* Conversion failed or there's nothing */
+ buf = NULL;
+ }
+ return buf;
+ }
+
+ nsize = GST_READ_UINT16_BE (map.data);
+ nsize = MIN (nsize, map.size - 2);
+
+ GST_LOG_OBJECT (qtdemux, "3GPP timed text subtitle: %d/%" G_GSIZE_FORMAT "",
+ nsize, map.size);
+
+ /* takes care of UTF-8 validation or UTF-16 recognition,
+ * no other encoding expected */
+ str = gst_tag_freeform_string_to_utf8 ((gchar *) map.data + 2, nsize, NULL);
+ gst_buffer_unmap (buf, &map);
+ if (str) {
+ gst_buffer_unref (buf);
+ buf = _gst_buffer_new_wrapped (str, strlen (str), g_free);
+ } else {
+ /* this should not really happen unless the subtitle is corrupted */
+ gst_buffer_unref (buf);
+ buf = NULL;
+ }
+
+ /* FIXME ? convert optional subsequent style info to markup */
+
+ return buf;
+ }
+
+ static GstFlowReturn
+ gst_qtdemux_push_buffer (GstQTDemux * qtdemux, QtDemuxStream * stream,
+ GstBuffer * buf)
+ {
+ GstFlowReturn ret = GST_FLOW_OK;
+ GstClockTime pts, duration;
+
+ if (stream->need_clip)
+ buf = gst_qtdemux_clip_buffer (qtdemux, stream, buf);
+
+ if (G_UNLIKELY (buf == NULL))
+ goto exit;
+
+ if (G_UNLIKELY (stream->discont)) {
+ GST_LOG_OBJECT (qtdemux, "marking discont buffer");
+ GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_DISCONT);
+ stream->discont = FALSE;
+ } else {
+ GST_BUFFER_FLAG_UNSET (buf, GST_BUFFER_FLAG_DISCONT);
+ }
+
+ GST_LOG_OBJECT (qtdemux,
+ "Pushing buffer with dts %" GST_TIME_FORMAT ", pts %" GST_TIME_FORMAT
+ ", duration %" GST_TIME_FORMAT " on pad %s",
+ GST_TIME_ARGS (GST_BUFFER_DTS (buf)),
+ GST_TIME_ARGS (GST_BUFFER_PTS (buf)),
+ GST_TIME_ARGS (GST_BUFFER_DURATION (buf)), GST_PAD_NAME (stream->pad));
+
+ if (stream->protected && stream->protection_scheme_type == FOURCC_aavd) {
+ GstStructure *crypto_info;
+ QtDemuxAavdEncryptionInfo *info =
+ (QtDemuxAavdEncryptionInfo *) stream->protection_scheme_info;
+
+ crypto_info = gst_structure_copy (info->default_properties);
+ if (!crypto_info || !gst_buffer_add_protection_meta (buf, crypto_info))
+ GST_ERROR_OBJECT (qtdemux, "failed to attach aavd metadata to buffer");
+ }
+
+ if (stream->protected && (stream->protection_scheme_type == FOURCC_cenc
+ || stream->protection_scheme_type == FOURCC_cbcs)) {
+ GstStructure *crypto_info;
+ QtDemuxCencSampleSetInfo *info =
+ (QtDemuxCencSampleSetInfo *) stream->protection_scheme_info;
+ gint index;
+ GstEvent *event;
+
+ while ((event = g_queue_pop_head (&stream->protection_scheme_event_queue))) {
+ GST_TRACE_OBJECT (stream->pad, "pushing protection event: %"
+ GST_PTR_FORMAT, event);
+ gst_pad_push_event (stream->pad, event);
+ }
+
+ if (info->crypto_info == NULL) {
+ if (stream->protection_scheme_type == FOURCC_cbcs) {
+ crypto_info = qtdemux_get_cenc_sample_properties (qtdemux, stream, 0);
+ if (!crypto_info || !gst_buffer_add_protection_meta (buf, crypto_info)) {
+ GST_ERROR_OBJECT (qtdemux,
+ "failed to attach cbcs metadata to buffer");
+ qtdemux_gst_structure_free (crypto_info);
+ } else {
+ GST_TRACE_OBJECT (qtdemux, "added cbcs protection metadata");
+ }
+ } else {
+ GST_DEBUG_OBJECT (qtdemux,
+ "cenc metadata hasn't been parsed yet, pushing buffer as if it wasn't encrypted");
+ }
+ } else {
+ /* The end of the crypto_info array matches our n_samples position,
+ * so count backward from there */
+ index = stream->sample_index - stream->n_samples + info->crypto_info->len;
+ if (G_LIKELY (index >= 0 && index < info->crypto_info->len)) {
+ /* steal structure from array */
+ crypto_info = g_ptr_array_index (info->crypto_info, index);
+ g_ptr_array_index (info->crypto_info, index) = NULL;
+ GST_LOG_OBJECT (qtdemux, "attaching cenc metadata [%u/%u]", index,
+ info->crypto_info->len);
+ if (!crypto_info || !gst_buffer_add_protection_meta (buf, crypto_info))
+ GST_ERROR_OBJECT (qtdemux,
+ "failed to attach cenc metadata to buffer");
+ } else {
+ GST_INFO_OBJECT (qtdemux, "No crypto info with index %d and sample %d",
+ index, stream->sample_index);
+ }
+ }
+ }
+
+ if (stream->alignment > 1)
+ buf = gst_qtdemux_align_buffer (qtdemux, buf, stream->alignment);
+
+ pts = GST_BUFFER_PTS (buf);
+ duration = GST_BUFFER_DURATION (buf);
+
+ ret = gst_pad_push (stream->pad, buf);
+
+ if (GST_CLOCK_TIME_IS_VALID (pts) && GST_CLOCK_TIME_IS_VALID (duration)) {
+ /* mark position in stream, we'll need this to know when to send GAP event */
+ stream->segment.position = pts + duration;
+ }
+
+ exit:
+
+ return ret;
+ }
+
+ static GstFlowReturn
+ gst_qtdemux_split_and_push_buffer (GstQTDemux * qtdemux, QtDemuxStream * stream,
+ GstBuffer * buf)
+ {
+ GstFlowReturn ret = GST_FLOW_OK;
+
+ if (stream->subtype == FOURCC_clcp
+ && CUR_STREAM (stream)->fourcc == FOURCC_c608 && stream->need_split) {
+ GstMapInfo map;
+ guint n_output_buffers, n_field1 = 0, n_field2 = 0;
+ guint n_triplets, i;
+ guint field1_off = 0, field2_off = 0;
+
+ /* We have to split CEA608 buffers so that each outgoing buffer contains
+ * one byte pair per field according to the framerate of the video track.
+ *
+ * If there is only a single byte pair per field we don't have to do
+ * anything
+ */
+
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+
+ n_triplets = map.size / 3;
+ for (i = 0; i < n_triplets; i++) {
+ if (map.data[3 * i] & 0x80)
+ n_field1++;
+ else
+ n_field2++;
+ }
+
+ g_assert (n_field1 || n_field2);
+
+ /* If there's more than 1 frame we have to split, otherwise we can just
+ * pass through */
+ if (n_field1 > 1 || n_field2 > 1) {
+ n_output_buffers =
+ gst_util_uint64_scale (GST_BUFFER_DURATION (buf),
+ CUR_STREAM (stream)->fps_n, GST_SECOND * CUR_STREAM (stream)->fps_d);
+
+ for (i = 0; i < n_output_buffers; i++) {
+ GstBuffer *outbuf =
+ gst_buffer_new_and_alloc ((n_field1 ? 3 : 0) + (n_field2 ? 3 : 0));
+ GstMapInfo outmap;
+ guint8 *outptr;
+
+ gst_buffer_map (outbuf, &outmap, GST_MAP_WRITE);
+ outptr = outmap.data;
+
+ if (n_field1) {
+ gboolean found = FALSE;
+
+ while (map.data + field1_off < map.data + map.size) {
+ if (map.data[field1_off] & 0x80) {
+ memcpy (outptr, &map.data[field1_off], 3);
+ field1_off += 3;
+ found = TRUE;
+ break;
+ }
+ field1_off += 3;
+ }
+
+ if (!found) {
+ const guint8 empty[] = { 0x80, 0x80, 0x80 };
+
+ memcpy (outptr, empty, 3);
+ }
+
+ outptr += 3;
+ }
+
+ if (n_field2) {
+ gboolean found = FALSE;
+
+ while (map.data + field2_off < map.data + map.size) {
+ if ((map.data[field2_off] & 0x80) == 0) {
+ memcpy (outptr, &map.data[field2_off], 3);
+ field2_off += 3;
+ found = TRUE;
+ break;
+ }
+ field2_off += 3;
+ }
+
+ if (!found) {
+ const guint8 empty[] = { 0x00, 0x80, 0x80 };
+
+ memcpy (outptr, empty, 3);
+ }
+
+ outptr += 3;
+ }
+
+ gst_buffer_unmap (outbuf, &outmap);
+
+ GST_BUFFER_PTS (outbuf) =
+ GST_BUFFER_PTS (buf) + gst_util_uint64_scale (i,
+ GST_SECOND * CUR_STREAM (stream)->fps_d,
+ CUR_STREAM (stream)->fps_n);
+ GST_BUFFER_DURATION (outbuf) =
+ gst_util_uint64_scale (GST_SECOND, CUR_STREAM (stream)->fps_d,
+ CUR_STREAM (stream)->fps_n);
+ GST_BUFFER_OFFSET (outbuf) = -1;
+ GST_BUFFER_OFFSET_END (outbuf) = -1;
+
+ ret = gst_qtdemux_push_buffer (qtdemux, stream, outbuf);
+
+ if (ret != GST_FLOW_OK && ret != GST_FLOW_NOT_LINKED)
+ break;
+ }
+ gst_buffer_unmap (buf, &map);
+ gst_buffer_unref (buf);
+ } else {
+ gst_buffer_unmap (buf, &map);
+ ret = gst_qtdemux_push_buffer (qtdemux, stream, buf);
+ }
+ } else {
+ ret = gst_qtdemux_push_buffer (qtdemux, stream, buf);
+ }
+
+ return ret;
+ }
+
+ /* Sets a buffer's attributes properly and pushes it downstream.
+ * Also checks for additional actions and custom processing that may
+ * need to be done first.
+ */
+ static GstFlowReturn
+ gst_qtdemux_decorate_and_push_buffer (GstQTDemux * qtdemux,
+ QtDemuxStream * stream, GstBuffer * buf,
+ GstClockTime dts, GstClockTime pts, GstClockTime duration,
+ gboolean keyframe, GstClockTime position, guint64 byte_position)
+ {
+ GstFlowReturn ret = GST_FLOW_OK;
+
+ /* offset the timestamps according to the edit list */
+
+ if (G_UNLIKELY (CUR_STREAM (stream)->fourcc == FOURCC_rtsp)) {
+ gchar *url;
+ GstMapInfo map;
+
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+ url = g_strndup ((gchar *) map.data, map.size);
+ gst_buffer_unmap (buf, &map);
+ if (url != NULL && strlen (url) != 0) {
+ /* we have RTSP redirect now */
+ g_free (qtdemux->redirect_location);
+ qtdemux->redirect_location = g_strdup (url);
+ gst_element_post_message (GST_ELEMENT_CAST (qtdemux),
+ gst_message_new_element (GST_OBJECT_CAST (qtdemux),
+ gst_structure_new ("redirect",
+ "new-location", G_TYPE_STRING, url, NULL)));
+ } else {
+ GST_WARNING_OBJECT (qtdemux, "Redirect URI of stream is empty, not "
+ "posting");
+ }
+ g_free (url);
+ }
+
+ /* position reporting */
+ if (qtdemux->segment.rate >= 0) {
+ qtdemux->segment.position = position;
+ gst_qtdemux_sync_streams (qtdemux);
+ }
+
+ if (G_UNLIKELY (!stream->pad)) {
+ GST_DEBUG_OBJECT (qtdemux, "No output pad for stream, ignoring");
+ gst_buffer_unref (buf);
+ goto exit;
+ }
+
+ /* send out pending buffers */
+ while (stream->buffers) {
+ GstBuffer *buffer = (GstBuffer *) stream->buffers->data;
+
+ if (G_UNLIKELY (stream->discont)) {
+ GST_LOG_OBJECT (qtdemux, "marking discont buffer");
+ GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_DISCONT);
+ stream->discont = FALSE;
+ } else {
+ GST_BUFFER_FLAG_UNSET (buf, GST_BUFFER_FLAG_DISCONT);
+ }
+
+ if (stream->alignment > 1)
+ buffer = gst_qtdemux_align_buffer (qtdemux, buffer, stream->alignment);
+ gst_pad_push (stream->pad, buffer);
+
+ stream->buffers = g_slist_delete_link (stream->buffers, stream->buffers);
+ }
+
+ /* we're going to modify the metadata */
+ buf = gst_buffer_make_writable (buf);
+
+ if (G_UNLIKELY (stream->need_process))
+ buf = gst_qtdemux_process_buffer (qtdemux, stream, buf);
+
+ if (!buf) {
+ goto exit;
+ }
+
+ GST_BUFFER_DTS (buf) = dts;
+ GST_BUFFER_PTS (buf) = pts;
+ GST_BUFFER_DURATION (buf) = duration;
+ GST_BUFFER_OFFSET (buf) = -1;
+ GST_BUFFER_OFFSET_END (buf) = -1;
+
+ if (!keyframe) {
+ GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_DELTA_UNIT);
+ stream->on_keyframe = FALSE;
+ } else {
+ stream->on_keyframe = TRUE;
+ }
+
+ if (G_UNLIKELY (CUR_STREAM (stream)->rgb8_palette))
+ gst_buffer_append_memory (buf,
+ gst_memory_ref (CUR_STREAM (stream)->rgb8_palette));
+
+ if (G_UNLIKELY (CUR_STREAM (stream)->padding)) {
+ gst_buffer_resize (buf, CUR_STREAM (stream)->padding, -1);
+ }
+ #if 0
+ if (G_UNLIKELY (qtdemux->element_index)) {
+ GstClockTime stream_time;
+
+ stream_time =
+ gst_segment_to_stream_time (&stream->segment, GST_FORMAT_TIME,
+ timestamp);
+ if (GST_CLOCK_TIME_IS_VALID (stream_time)) {
+ GST_LOG_OBJECT (qtdemux,
+ "adding association %" GST_TIME_FORMAT "-> %"
+ G_GUINT64_FORMAT, GST_TIME_ARGS (stream_time), byte_position);
+ gst_index_add_association (qtdemux->element_index,
+ qtdemux->index_id,
+ keyframe ? GST_ASSOCIATION_FLAG_KEY_UNIT :
+ GST_ASSOCIATION_FLAG_DELTA_UNIT, GST_FORMAT_TIME, stream_time,
+ GST_FORMAT_BYTES, byte_position, NULL);
+ }
+ }
+ #endif
+
+ ret = gst_qtdemux_split_and_push_buffer (qtdemux, stream, buf);
+
+ exit:
+ return ret;
+ }
+
+ static const QtDemuxRandomAccessEntry *
+ gst_qtdemux_stream_seek_fragment (GstQTDemux * qtdemux, QtDemuxStream * stream,
+ GstClockTime pos, gboolean after)
+ {
+ QtDemuxRandomAccessEntry *entries = stream->ra_entries;
+ guint n_entries = stream->n_ra_entries;
+ guint i;
+
+ /* we assume the table is sorted */
+ for (i = 0; i < n_entries; ++i) {
+ if (entries[i].ts > pos)
+ break;
+ }
+
+ /* FIXME: maybe save first moof_offset somewhere instead, but for now it's
+ * probably okay to assume that the index lists the very first fragment */
+ if (i == 0)
+ return &entries[0];
+
+ if (after)
+ return &entries[i];
+ else
+ return &entries[i - 1];
+ }
+
+ static gboolean
+ gst_qtdemux_do_fragmented_seek (GstQTDemux * qtdemux)
+ {
+ const QtDemuxRandomAccessEntry *best_entry = NULL;
+ gint i;
+
+ GST_OBJECT_LOCK (qtdemux);
+
+ g_assert (QTDEMUX_N_STREAMS (qtdemux) > 0);
+
+ /* first see if we can determine where to go to using mfra,
+ * before we start clearing things */
+ for (i = 0; i < QTDEMUX_N_STREAMS (qtdemux); i++) {
+ const QtDemuxRandomAccessEntry *entry;
+ QtDemuxStream *stream;
+ gboolean is_audio_or_video;
+
+ stream = QTDEMUX_NTH_STREAM (qtdemux, i);
+
+ if (stream->ra_entries == NULL)
+ continue;
+
+ if (stream->subtype == FOURCC_vide || stream->subtype == FOURCC_soun)
+ is_audio_or_video = TRUE;
+ else
+ is_audio_or_video = FALSE;
+
+ entry =
+ gst_qtdemux_stream_seek_fragment (qtdemux, stream,
+ stream->time_position, !is_audio_or_video);
+
+ GST_INFO_OBJECT (stream->pad, "%" GST_TIME_FORMAT " at offset "
+ "%" G_GUINT64_FORMAT, GST_TIME_ARGS (entry->ts), entry->moof_offset);
+
+ stream->pending_seek = entry;
+
+ /* decide position to jump to just based on audio/video tracks, not subs */
+ if (!is_audio_or_video)
+ continue;
+
+ if (best_entry == NULL || entry->moof_offset < best_entry->moof_offset)
+ best_entry = entry;
+ }
+
+ /* no luck, will handle seek otherwise */
+ if (best_entry == NULL) {
+ GST_OBJECT_UNLOCK (qtdemux);
+ return FALSE;
+ }
+
+ /* ok, now we can prepare for processing as of located moof */
+ for (i = 0; i < QTDEMUX_N_STREAMS (qtdemux); i++) {
+ QtDemuxStream *stream;
+
+ stream = QTDEMUX_NTH_STREAM (qtdemux, i);
+
+ g_free (stream->samples);
+ stream->samples = NULL;
+ stream->n_samples = 0;
+ stream->stbl_index = -1; /* no samples have yet been parsed */
+ stream->sample_index = -1;
+
+ if (stream->protection_scheme_info) {
+ /* Clear out any old cenc crypto info entries as we'll move to a new moof */
+ if (stream->protection_scheme_type == FOURCC_cenc
+ || stream->protection_scheme_type == FOURCC_cbcs) {
+ QtDemuxCencSampleSetInfo *info =
+ (QtDemuxCencSampleSetInfo *) stream->protection_scheme_info;
+ if (info->crypto_info) {
+ g_ptr_array_free (info->crypto_info, TRUE);
+ info->crypto_info = NULL;
+ }
+ }
+ }
+ }
+
+ GST_INFO_OBJECT (qtdemux, "seek to %" GST_TIME_FORMAT ", best fragment "
+ "moof offset: %" G_GUINT64_FORMAT ", ts %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (QTDEMUX_NTH_STREAM (qtdemux, 0)->time_position),
+ best_entry->moof_offset, GST_TIME_ARGS (best_entry->ts));
+
+ qtdemux->moof_offset = best_entry->moof_offset;
+
+ qtdemux_add_fragmented_samples (qtdemux);
+
+ GST_OBJECT_UNLOCK (qtdemux);
+ return TRUE;
+ }
+
+ static GstFlowReturn
+ gst_qtdemux_loop_state_movie (GstQTDemux * qtdemux)
+ {
+ GstFlowReturn ret = GST_FLOW_OK;
+ GstBuffer *buf = NULL;
+ QtDemuxStream *stream, *target_stream = NULL;
+ GstClockTime min_time;
+ guint64 offset = 0;
+ GstClockTime dts = GST_CLOCK_TIME_NONE;
+ GstClockTime pts = GST_CLOCK_TIME_NONE;
+ GstClockTime duration = 0;
+ gboolean keyframe = FALSE;
+ guint sample_size = 0;
+ guint num_samples = 1;
+ gboolean empty = 0;
+ guint size;
+ gint i;
+
+ if (qtdemux->fragmented_seek_pending) {
+ GST_INFO_OBJECT (qtdemux, "pending fragmented seek");
+ if (gst_qtdemux_do_fragmented_seek (qtdemux)) {
+ GST_INFO_OBJECT (qtdemux, "fragmented seek done!");
+ qtdemux->fragmented_seek_pending = FALSE;
+ } else {
+ GST_INFO_OBJECT (qtdemux, "fragmented seek still pending");
+ }
+ }
+
+ /* Figure out the next stream sample to output, min_time is expressed in
+ * global time and runs over the edit list segments. */
+ min_time = G_MAXUINT64;
+ for (i = 0; i < QTDEMUX_N_STREAMS (qtdemux); i++) {
+ GstClockTime position;
+
+ stream = QTDEMUX_NTH_STREAM (qtdemux, i);
+ position = stream->time_position;
+
+ if (!GST_CLOCK_TIME_IS_VALID (position))
+ continue;
+
+ if (stream->segment_index != -1) {
+ QtDemuxSegment *segment = &stream->segments[stream->segment_index];
+ position += segment->media_start;
+ }
+
+ /* position of -1 is EOS */
+ if (position < min_time) {
+ min_time = position;
+ target_stream = stream;
+ }
+ }
+ /* all are EOS */
+ if (G_UNLIKELY (target_stream == NULL)) {
+ GST_DEBUG_OBJECT (qtdemux, "all streams are EOS");
+ goto eos;
+ }
+
+ /* check for segment end */
+ if (G_UNLIKELY (qtdemux->segment.stop != -1
+ && qtdemux->segment.rate >= 0
+ && qtdemux->segment.stop <= min_time && target_stream->on_keyframe)) {
+ GST_DEBUG_OBJECT (qtdemux, "we reached the end of our segment.");
+ target_stream->time_position = GST_CLOCK_TIME_NONE;
+ goto eos_stream;
+ }
+
+ /* gap events for subtitle streams */
+ for (i = 0; i < QTDEMUX_N_STREAMS (qtdemux); i++) {
+ stream = QTDEMUX_NTH_STREAM (qtdemux, i);
+ if (stream->pad) {
+ GstClockTime gap_threshold;
+
+ /* Only send gap events on non-subtitle streams if lagging way behind. */
+ if (stream->subtype == FOURCC_subp
+ || stream->subtype == FOURCC_text || stream->subtype == FOURCC_sbtl)
+ gap_threshold = 1 * GST_SECOND;
+ else
+ gap_threshold = 3 * GST_SECOND;
+
+ /* send gap events until the stream catches up */
+ /* gaps can only be sent after segment is activated (segment.stop is no longer -1) */
+ while (GST_CLOCK_TIME_IS_VALID (stream->segment.stop) &&
+ GST_CLOCK_TIME_IS_VALID (stream->segment.position) &&
+ stream->segment.position + gap_threshold < min_time) {
+ GstEvent *gap =
+ gst_event_new_gap (stream->segment.position, gap_threshold);
+ gst_pad_push_event (stream->pad, gap);
+ stream->segment.position += gap_threshold;
+ }
+ }
+ }
+
+ stream = target_stream;
+ /* fetch info for the current sample of this stream */
+ if (G_UNLIKELY (!gst_qtdemux_prepare_current_sample (qtdemux, stream, &empty,
+ &offset, &sample_size, &dts, &pts, &duration, &keyframe)))
+ goto eos_stream;
+
+ gst_qtdemux_stream_check_and_change_stsd_index (qtdemux, stream);
+ if (stream->new_caps) {
+ gst_qtdemux_configure_stream (qtdemux, stream);
+ qtdemux_do_allocation (stream, qtdemux);
+ }
+
+ /* If we're doing a keyframe-only trickmode, only push keyframes on video streams */
+ if (G_UNLIKELY (qtdemux->segment.
+ flags & GST_SEGMENT_FLAG_TRICKMODE_KEY_UNITS)) {
+ if (stream->subtype == FOURCC_vide) {
+ if (!keyframe) {
+ GST_LOG_OBJECT (qtdemux, "Skipping non-keyframe on track-id %u",
+ stream->track_id);
+ goto next;
+ } else if (qtdemux->trickmode_interval > 0) {
+ GstClockTimeDiff interval;
+
+ if (qtdemux->segment.rate > 0)
+ interval = stream->time_position - stream->last_keyframe_dts;
+ else
+ interval = stream->last_keyframe_dts - stream->time_position;
+
+ if (GST_CLOCK_TIME_IS_VALID (stream->last_keyframe_dts)
+ && interval < qtdemux->trickmode_interval) {
+ GST_LOG_OBJECT (qtdemux,
+ "Skipping keyframe within interval on track-id %u",
+ stream->track_id);
+ goto next;
+ } else {
+ stream->last_keyframe_dts = stream->time_position;
+ }
+ }
+ }
+ }
+
+ GST_DEBUG_OBJECT (qtdemux,
+ "pushing from track-id %u, empty %d offset %" G_GUINT64_FORMAT
+ ", size %d, dts=%" GST_TIME_FORMAT ", pts=%" GST_TIME_FORMAT
+ ", duration %" GST_TIME_FORMAT, stream->track_id, empty, offset,
+ sample_size, GST_TIME_ARGS (dts), GST_TIME_ARGS (pts),
+ GST_TIME_ARGS (duration));
+
+ if (G_UNLIKELY (empty)) {
+ /* empty segment, push a gap if there's a second or more
+ * difference and move to the next one */
+ if ((pts + duration - stream->segment.position) >= GST_SECOND)
+ gst_pad_push_event (stream->pad, gst_event_new_gap (pts, duration));
+ stream->segment.position = pts + duration;
+ goto next;
+ }
+
+ /* hmm, empty sample, skip and move to next sample */
+ if (G_UNLIKELY (sample_size <= 0))
+ goto next;
+
+ /* last pushed sample was out of boundary, goto next sample */
+ if (G_UNLIKELY (GST_PAD_LAST_FLOW_RETURN (stream->pad) == GST_FLOW_EOS))
+ goto next;
+
+ if (stream->max_buffer_size != 0 && sample_size > stream->max_buffer_size) {
+ GST_DEBUG_OBJECT (qtdemux,
+ "size %d larger than stream max_buffer_size %d, trimming",
+ sample_size, stream->max_buffer_size);
+ size =
+ MIN (sample_size - stream->offset_in_sample, stream->max_buffer_size);
+ } else if (stream->min_buffer_size != 0 && stream->offset_in_sample == 0
+ && sample_size < stream->min_buffer_size) {
+ guint start_sample_index = stream->sample_index;
+ guint accumulated_size = sample_size;
+ guint64 expected_next_offset = offset + sample_size;
+
+ GST_DEBUG_OBJECT (qtdemux,
+ "size %d smaller than stream min_buffer_size %d, combining with the next",
+ sample_size, stream->min_buffer_size);
+
+ while (stream->sample_index < stream->to_sample
+ && stream->sample_index + 1 < stream->n_samples) {
+ const QtDemuxSample *next_sample;
+
+ /* Increment temporarily */
+ stream->sample_index++;
+
+ /* Failed to parse sample so let's go back to the previous one that was
+ * still successful */
+ if (!qtdemux_parse_samples (qtdemux, stream, stream->sample_index)) {
+ stream->sample_index--;
+ break;
+ }
+
+ next_sample = &stream->samples[stream->sample_index];
+
+ /* Not contiguous with the previous sample so let's go back to the
+ * previous one that was still successful */
+ if (next_sample->offset != expected_next_offset) {
+ stream->sample_index--;
+ break;
+ }
+
+ accumulated_size += next_sample->size;
+ expected_next_offset += next_sample->size;
+ if (accumulated_size >= stream->min_buffer_size)
+ break;
+ }
+
+ num_samples = stream->sample_index + 1 - start_sample_index;
+ stream->sample_index = start_sample_index;
+ GST_DEBUG_OBJECT (qtdemux, "Pulling %u samples of size %u at once",
+ num_samples, accumulated_size);
+ size = accumulated_size;
+ } else {
+ size = sample_size;
+ }
+
+ if (qtdemux->cenc_aux_info_offset > 0) {
+ GstMapInfo map;
+ GstByteReader br;
+ GstBuffer *aux_info = NULL;
+
+ /* pull the data stored before the sample */
+ ret =
+ gst_qtdemux_pull_atom (qtdemux, qtdemux->offset,
+ offset + stream->offset_in_sample - qtdemux->offset, &aux_info);
+ if (G_UNLIKELY (ret != GST_FLOW_OK))
+ goto beach;
+ gst_buffer_map (aux_info, &map, GST_MAP_READ);
+ GST_DEBUG_OBJECT (qtdemux, "parsing cenc auxiliary info");
+ gst_byte_reader_init (&br, map.data + 8, map.size);
+ if (!qtdemux_parse_cenc_aux_info (qtdemux, stream, &br,
+ qtdemux->cenc_aux_info_sizes, qtdemux->cenc_aux_sample_count)) {
+ GST_ERROR_OBJECT (qtdemux, "failed to parse cenc auxiliary info");
+ gst_buffer_unmap (aux_info, &map);
+ gst_buffer_unref (aux_info);
+ ret = GST_FLOW_ERROR;
+ goto beach;
+ }
+ gst_buffer_unmap (aux_info, &map);
+ gst_buffer_unref (aux_info);
+ }
+
+ GST_LOG_OBJECT (qtdemux, "reading %d bytes @ %" G_GUINT64_FORMAT, size,
+ offset);
+
+ if (stream->use_allocator) {
+ /* if we have a per-stream allocator, use it */
+ buf = gst_buffer_new_allocate (stream->allocator, size, &stream->params);
+ }
+
+ ret = gst_qtdemux_pull_atom (qtdemux, offset + stream->offset_in_sample,
+ size, &buf);
+ if (G_UNLIKELY (ret != GST_FLOW_OK))
+ goto beach;
+
+ /* Update for both splitting and combining of samples */
+ if (size != sample_size) {
+ pts += gst_util_uint64_scale_int (GST_SECOND,
+ stream->offset_in_sample / CUR_STREAM (stream)->bytes_per_frame,
+ stream->timescale);
+ dts +=
+ gst_util_uint64_scale_int (GST_SECOND,
+ stream->offset_in_sample / CUR_STREAM (stream)->bytes_per_frame,
+ stream->timescale);
+ duration =
+ gst_util_uint64_scale_int (GST_SECOND,
+ size / CUR_STREAM (stream)->bytes_per_frame, stream->timescale);
+ }
+
+ ret = gst_qtdemux_decorate_and_push_buffer (qtdemux, stream, buf,
+ dts, pts, duration, keyframe, min_time, offset);
+
+ if (size < sample_size) {
+ QtDemuxSample *sample = &stream->samples[stream->sample_index];
+ QtDemuxSegment *segment = &stream->segments[stream->segment_index];
+
+ GstClockTime time_position = QTSTREAMTIME_TO_GSTTIME (stream,
+ sample->timestamp +
+ stream->offset_in_sample / CUR_STREAM (stream)->bytes_per_frame);
+ if (time_position >= segment->media_start) {
+ /* inside the segment, update time_position, looks very familiar to
+ * GStreamer segments, doesn't it? */
+ stream->time_position = (time_position - segment->media_start) +
+ segment->time;
+ } else {
+ /* not yet in segment, time does not yet increment. This means
+ * that we are still prerolling keyframes to the decoder so it can
+ * decode the first sample of the segment. */
+ stream->time_position = segment->time;
+ }
+ } else if (size > sample_size) {
+ /* Increase to the last sample we already pulled so that advancing
+ * below brings us to the next sample we need to pull */
+ stream->sample_index += num_samples - 1;
+ }
+
+ /* combine flows */
+ GST_OBJECT_LOCK (qtdemux);
+ ret = gst_qtdemux_combine_flows (qtdemux, stream, ret);
+ GST_OBJECT_UNLOCK (qtdemux);
+ /* ignore unlinked, we will not push on the pad anymore and we will EOS when
+ * we have no more data for the pad to push */
+ if (ret == GST_FLOW_EOS)
+ ret = GST_FLOW_OK;
+
+ stream->offset_in_sample += size;
+ if (stream->offset_in_sample >= sample_size) {
+ gst_qtdemux_advance_sample (qtdemux, stream);
+ }
+ goto beach;
+
+ next:
+ gst_qtdemux_advance_sample (qtdemux, stream);
+
+ beach:
+ return ret;
+
+ /* special cases */
+ eos:
+ {
+ GST_DEBUG_OBJECT (qtdemux, "No samples left for any streams - EOS");
+ ret = GST_FLOW_EOS;
+ goto beach;
+ }
+ eos_stream:
+ {
+ GST_DEBUG_OBJECT (qtdemux, "No samples left for stream");
+ /* EOS will be raised if all are EOS */
+ ret = GST_FLOW_OK;
+ goto beach;
+ }
+ }
+
+ static void
+ gst_qtdemux_loop (GstPad * pad)
+ {
+ GstQTDemux *qtdemux;
+ guint64 cur_offset;
+ GstFlowReturn ret;
+
+ qtdemux = GST_QTDEMUX (gst_pad_get_parent (pad));
+
+ cur_offset = qtdemux->offset;
+ GST_LOG_OBJECT (qtdemux, "loop at position %" G_GUINT64_FORMAT ", state %s",
+ cur_offset, qt_demux_state_string (qtdemux->state));
+
+ switch (qtdemux->state) {
+ case QTDEMUX_STATE_INITIAL:
+ case QTDEMUX_STATE_HEADER:
+ ret = gst_qtdemux_loop_state_header (qtdemux);
+ break;
+ case QTDEMUX_STATE_MOVIE:
+ ret = gst_qtdemux_loop_state_movie (qtdemux);
+ if (qtdemux->segment.rate < 0 && ret == GST_FLOW_EOS) {
+ ret = gst_qtdemux_seek_to_previous_keyframe (qtdemux);
+ }
+ break;
+ default:
+ /* ouch */
+ goto invalid_state;
+ }
+
+ /* if something went wrong, pause */
+ if (ret != GST_FLOW_OK)
+ goto pause;
+
+ done:
+ gst_object_unref (qtdemux);
+ return;
+
+ /* ERRORS */
+ invalid_state:
+ {
+ GST_ELEMENT_ERROR (qtdemux, STREAM, FAILED,
+ (NULL), ("streaming stopped, invalid state"));
+ gst_pad_pause_task (pad);
+ gst_qtdemux_push_event (qtdemux, gst_event_new_eos ());
+ goto done;
+ }
+ pause:
+ {
+ const gchar *reason = gst_flow_get_name (ret);
+
+ GST_LOG_OBJECT (qtdemux, "pausing task, reason %s", reason);
+
+ gst_pad_pause_task (pad);
+
+ /* fatal errors need special actions */
+ /* check EOS */
+ if (ret == GST_FLOW_EOS) {
+ if (QTDEMUX_N_STREAMS (qtdemux) == 0) {
+ /* we have no streams, post an error */
+ gst_qtdemux_post_no_playable_stream_error (qtdemux);
+ }
+ if (qtdemux->segment.flags & GST_SEEK_FLAG_SEGMENT) {
+ gint64 stop;
+
+ if ((stop = qtdemux->segment.stop) == -1)
+ stop = qtdemux->segment.duration;
+
+ if (qtdemux->segment.rate >= 0) {
+ GstMessage *message;
+ GstEvent *event;
+
+ GST_LOG_OBJECT (qtdemux, "Sending segment done, at end of segment");
+ message = gst_message_new_segment_done (GST_OBJECT_CAST (qtdemux),
+ GST_FORMAT_TIME, stop);
+ event = gst_event_new_segment_done (GST_FORMAT_TIME, stop);
+ if (qtdemux->segment_seqnum != GST_SEQNUM_INVALID) {
+ gst_message_set_seqnum (message, qtdemux->segment_seqnum);
+ gst_event_set_seqnum (event, qtdemux->segment_seqnum);
+ }
+ gst_element_post_message (GST_ELEMENT_CAST (qtdemux), message);
+ gst_qtdemux_push_event (qtdemux, event);
+ } else {
+ GstMessage *message;
+ GstEvent *event;
+
+ /* For Reverse Playback */
+ GST_LOG_OBJECT (qtdemux, "Sending segment done, at start of segment");
+ message = gst_message_new_segment_done (GST_OBJECT_CAST (qtdemux),
+ GST_FORMAT_TIME, qtdemux->segment.start);
+ event = gst_event_new_segment_done (GST_FORMAT_TIME,
+ qtdemux->segment.start);
+ if (qtdemux->segment_seqnum != GST_SEQNUM_INVALID) {
+ gst_message_set_seqnum (message, qtdemux->segment_seqnum);
+ gst_event_set_seqnum (event, qtdemux->segment_seqnum);
+ }
+ gst_element_post_message (GST_ELEMENT_CAST (qtdemux), message);
+ gst_qtdemux_push_event (qtdemux, event);
+ }
+ } else {
+ GstEvent *event;
+
+ GST_LOG_OBJECT (qtdemux, "Sending EOS at end of segment");
+ event = gst_event_new_eos ();
+ if (qtdemux->segment_seqnum != GST_SEQNUM_INVALID)
+ gst_event_set_seqnum (event, qtdemux->segment_seqnum);
+ gst_qtdemux_push_event (qtdemux, event);
+ }
+ } else if (ret == GST_FLOW_NOT_LINKED || ret < GST_FLOW_EOS) {
+ GST_ELEMENT_FLOW_ERROR (qtdemux, ret);
+ gst_qtdemux_push_event (qtdemux, gst_event_new_eos ());
+ }
+ goto done;
+ }
+ }
+
+ /*
+ * has_next_entry
+ *
+ * Returns if there are samples to be played.
+ */
+ static gboolean
+ has_next_entry (GstQTDemux * demux)
+ {
+ QtDemuxStream *stream;
+ gint i;
+
+ GST_DEBUG_OBJECT (demux, "Checking if there are samples not played yet");
+
+ for (i = 0; i < QTDEMUX_N_STREAMS (demux); i++) {
+ stream = QTDEMUX_NTH_STREAM (demux, i);
+
+ if (stream->sample_index == -1) {
+ stream->sample_index = 0;
+ stream->offset_in_sample = 0;
+ }
+
+ if (stream->sample_index >= stream->n_samples) {
+ GST_LOG_OBJECT (demux, "track-id %u samples exhausted", stream->track_id);
+ continue;
+ }
+ GST_DEBUG_OBJECT (demux, "Found a sample");
+ return TRUE;
+ }
+
+ GST_DEBUG_OBJECT (demux, "There wasn't any next sample");
+ return FALSE;
+ }
+
+ /*
+ * next_entry_size
+ *
+ * Returns the size of the first entry at the current offset.
+ * If -1, there are none (which means EOS or empty file).
+ */
+ static guint64
+ next_entry_size (GstQTDemux * demux)
+ {
+ QtDemuxStream *stream, *target_stream = NULL;
+ guint64 smalloffs = (guint64) - 1;
+ QtDemuxSample *sample;
+ gint i;
+
+ GST_LOG_OBJECT (demux, "Finding entry at offset %" G_GUINT64_FORMAT,
+ demux->offset);
+
+ for (i = 0; i < QTDEMUX_N_STREAMS (demux); i++) {
+ stream = QTDEMUX_NTH_STREAM (demux, i);
+
+ if (stream->sample_index == -1) {
+ stream->sample_index = 0;
+ stream->offset_in_sample = 0;
+ }
+
+ if (stream->sample_index >= stream->n_samples) {
+ GST_LOG_OBJECT (demux, "track-id %u samples exhausted", stream->track_id);
+ continue;
+ }
+
+ if (!qtdemux_parse_samples (demux, stream, stream->sample_index)) {
+ GST_LOG_OBJECT (demux, "Parsing of index %u from stbl atom failed!",
+ stream->sample_index);
+ return -1;
+ }
+
+ sample = &stream->samples[stream->sample_index];
+
+ GST_LOG_OBJECT (demux,
+ "Checking track-id %u (sample_index:%d / offset:%" G_GUINT64_FORMAT
+ " / size:%" G_GUINT32_FORMAT ")", stream->track_id,
+ stream->sample_index, sample->offset, sample->size);
+
+ if (((smalloffs == -1)
+ || (sample->offset < smalloffs)) && (sample->size)) {
+ smalloffs = sample->offset;
+ target_stream = stream;
+ }
+ }
+
+ if (!target_stream)
+ return -1;
+
+ GST_LOG_OBJECT (demux,
+ "track-id %u offset %" G_GUINT64_FORMAT " demux->offset :%"
+ G_GUINT64_FORMAT, target_stream->track_id, smalloffs, demux->offset);
+
+ stream = target_stream;
+ sample = &stream->samples[stream->sample_index];
+
+ if (sample->offset >= demux->offset) {
+ demux->todrop = sample->offset - demux->offset;
+ return sample->size + demux->todrop;
+ }
+
+ GST_DEBUG_OBJECT (demux,
+ "There wasn't any entry at offset %" G_GUINT64_FORMAT, demux->offset);
+ return -1;
+ }
+
+ static void
+ gst_qtdemux_post_progress (GstQTDemux * demux, gint num, gint denom)
+ {
+ gint perc = (gint) ((gdouble) num * 100.0 / (gdouble) denom);
+
+ gst_element_post_message (GST_ELEMENT_CAST (demux),
+ gst_message_new_element (GST_OBJECT_CAST (demux),
+ gst_structure_new ("progress", "percent", G_TYPE_INT, perc, NULL)));
+ }
+
+ static gboolean
+ qtdemux_seek_offset (GstQTDemux * demux, guint64 offset)
+ {
+ GstEvent *event;
+ gboolean res = 0;
+
+ GST_DEBUG_OBJECT (demux, "Seeking to %" G_GUINT64_FORMAT, offset);
+
+ event =
+ gst_event_new_seek (1.0, GST_FORMAT_BYTES,
+ GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_ACCURATE, GST_SEEK_TYPE_SET, offset,
+ GST_SEEK_TYPE_NONE, -1);
+
+ /* store seqnum to drop flush events, they don't need to reach downstream */
+ demux->offset_seek_seqnum = gst_event_get_seqnum (event);
+ res = gst_pad_push_event (demux->sinkpad, event);
+ demux->offset_seek_seqnum = GST_SEQNUM_INVALID;
+
+ return res;
+ }
+
+ /* check for seekable upstream, above and beyond a mere query */
+ static void
+ gst_qtdemux_check_seekability (GstQTDemux * demux)
+ {
+ GstQuery *query;
+ gboolean seekable = FALSE;
+ gint64 start = -1, stop = -1;
+
+ if (demux->upstream_size)
+ return;
+
+ if (demux->upstream_format_is_time)
+ return;
+
+ query = gst_query_new_seeking (GST_FORMAT_BYTES);
+ if (!gst_pad_peer_query (demux->sinkpad, query)) {
+ GST_DEBUG_OBJECT (demux, "seeking query failed");
+ goto done;
+ }
+
+ gst_query_parse_seeking (query, NULL, &seekable, &start, &stop);
+
+ /* try harder to query upstream size if we didn't get it the first time */
+ if (seekable && stop == -1) {
+ GST_DEBUG_OBJECT (demux, "doing duration query to fix up unset stop");
+ gst_pad_peer_query_duration (demux->sinkpad, GST_FORMAT_BYTES, &stop);
+ }
+
+ /* if upstream doesn't know the size, it's likely that it's not seekable in
+ * practice even if it technically may be seekable */
+ if (seekable && (start != 0 || stop <= start)) {
+ GST_DEBUG_OBJECT (demux, "seekable but unknown start/stop -> disable");
+ seekable = FALSE;
+ }
+
+ done:
+ gst_query_unref (query);
+
+ GST_DEBUG_OBJECT (demux, "seekable: %d (%" G_GUINT64_FORMAT " - %"
+ G_GUINT64_FORMAT ")", seekable, start, stop);
+ demux->upstream_seekable = seekable;
+ demux->upstream_size = seekable ? stop : -1;
+ }
+
+ static void
+ gst_qtdemux_drop_data (GstQTDemux * demux, gint bytes)
+ {
+ g_return_if_fail (bytes <= demux->todrop);
+
+ GST_LOG_OBJECT (demux, "Dropping %d bytes", bytes);
+ gst_adapter_flush (demux->adapter, bytes);
+ demux->neededbytes -= bytes;
+ demux->offset += bytes;
+ demux->todrop -= bytes;
+ }
+
+ /* PUSH-MODE only: Send a segment, if not done already. */
+ static void
+ gst_qtdemux_check_send_pending_segment (GstQTDemux * demux)
+ {
+ if (G_UNLIKELY (demux->need_segment)) {
+ gint i;
+
+ if (!demux->upstream_format_is_time) {
+ gst_qtdemux_map_and_push_segments (demux, &demux->segment);
+ } else {
+ GstEvent *segment_event;
+ segment_event = gst_event_new_segment (&demux->segment);
+ if (demux->segment_seqnum != GST_SEQNUM_INVALID)
+ gst_event_set_seqnum (segment_event, demux->segment_seqnum);
+ gst_qtdemux_push_event (demux, segment_event);
+ }
+
+ demux->need_segment = FALSE;
+
+ /* clear to send tags on all streams */
+ for (i = 0; i < QTDEMUX_N_STREAMS (demux); i++) {
+ QtDemuxStream *stream = QTDEMUX_NTH_STREAM (demux, i);
+ gst_qtdemux_push_tags (demux, stream);
+ if (CUR_STREAM (stream)->sparse) {
+ GST_INFO_OBJECT (demux, "Sending gap event on stream %d", i);
+ gst_pad_push_event (stream->pad,
+ gst_event_new_gap (stream->segment.position, GST_CLOCK_TIME_NONE));
+ }
+ }
+ }
+ }
+
+ /* Used for push mode only. */
+ static void
+ gst_qtdemux_send_gap_for_segment (GstQTDemux * demux,
+ QtDemuxStream * stream, gint segment_index, GstClockTime pos)
+ {
+ GstClockTime ts, dur;
+
+ ts = pos;
+ dur =
+ stream->segments[segment_index].duration - (pos -
+ stream->segments[segment_index].time);
+ stream->time_position += dur;
+
+ /* Only gaps with a duration of at least one second are propagated.
+ * Same workaround as in pull mode.
+ * (See 2e45926a96ec5298c6ef29bf912e5e6a06dc3e0e) */
+ if (dur >= GST_SECOND) {
+ GstEvent *gap;
+ gap = gst_event_new_gap (ts, dur);
+
+ GST_DEBUG_OBJECT (stream->pad, "Pushing gap for empty "
+ "segment: %" GST_PTR_FORMAT, gap);
+ gst_pad_push_event (stream->pad, gap);
+ }
+ }
+
+ static GstFlowReturn
+ gst_qtdemux_chain (GstPad * sinkpad, GstObject * parent, GstBuffer * inbuf)
+ {
+ GstQTDemux *demux;
+
+ demux = GST_QTDEMUX (parent);
+
+ GST_DEBUG_OBJECT (demux,
+ "Received buffer pts:%" GST_TIME_FORMAT " dts:%" GST_TIME_FORMAT
+ " offset:%" G_GUINT64_FORMAT " size:%" G_GSIZE_FORMAT " demux offset:%"
+ G_GUINT64_FORMAT, GST_TIME_ARGS (GST_BUFFER_PTS (inbuf)),
+ GST_TIME_ARGS (GST_BUFFER_DTS (inbuf)), GST_BUFFER_OFFSET (inbuf),
+ gst_buffer_get_size (inbuf), demux->offset);
+
+ if (GST_BUFFER_FLAG_IS_SET (inbuf, GST_BUFFER_FLAG_DISCONT)) {
+ gboolean is_gap_input = FALSE;
+ gint i;
+
+ GST_DEBUG_OBJECT (demux, "Got DISCONT, marking all streams as DISCONT");
+
+ for (i = 0; i < QTDEMUX_N_STREAMS (demux); i++) {
+ QTDEMUX_NTH_STREAM (demux, i)->discont = TRUE;
+ }
+
+ /* Check if we can land back on our feet in the case where upstream is
+ * handling the seeking/pushing of samples with gaps in between (like
+ * in the case of trick-mode DASH for example) */
+ if (demux->upstream_format_is_time
+ && GST_BUFFER_OFFSET (inbuf) != GST_BUFFER_OFFSET_NONE) {
+ for (i = 0; i < QTDEMUX_N_STREAMS (demux); i++) {
+ guint32 res;
+ QtDemuxStream *stream = QTDEMUX_NTH_STREAM (demux, i);
+ GST_LOG_OBJECT (demux,
+ "track-id #%u , checking if offset %" G_GUINT64_FORMAT
+ " is a sample start", stream->track_id, GST_BUFFER_OFFSET (inbuf));
+ res =
+ gst_qtdemux_find_index_for_given_media_offset_linear (demux,
+ stream, GST_BUFFER_OFFSET (inbuf));
+ if (res != -1) {
+ QtDemuxSample *sample = &stream->samples[res];
+ GST_LOG_OBJECT (demux,
+ "Checking if sample %d from track-id %u is valid (offset:%"
+ G_GUINT64_FORMAT " size:%" G_GUINT32_FORMAT ")", res,
+ stream->track_id, sample->offset, sample->size);
+ if (sample->offset == GST_BUFFER_OFFSET (inbuf)) {
+ GST_LOG_OBJECT (demux,
+ "new buffer corresponds to a valid sample : %" G_GUINT32_FORMAT,
+ res);
+ is_gap_input = TRUE;
+ /* We can go back to standard playback mode */
+ demux->state = QTDEMUX_STATE_MOVIE;
+ /* Remember which sample this stream is at */
+ stream->sample_index = res;
+ /* Finally update all push-based values to the expected values */
+ demux->neededbytes = stream->samples[res].size;
+ demux->offset = GST_BUFFER_OFFSET (inbuf);
+ demux->mdatleft =
+ demux->mdatsize - demux->offset + demux->mdatoffset;
+ demux->todrop = 0;
+ }
+ }
+ }
+ if (!is_gap_input) {
+ GST_DEBUG_OBJECT (demux, "Resetting, actual DISCONT");
+ /* Reset state if it's a real discont */
+ demux->neededbytes = 16;
+ demux->state = QTDEMUX_STATE_INITIAL;
+ demux->offset = GST_BUFFER_OFFSET (inbuf);
+ gst_adapter_clear (demux->adapter);
+ }
+ }
+ /* Reverse fragmented playback, need to flush all we have before
+ * consuming a new fragment.
+ * The samples array have the timestamps calculated by accumulating the
+ * durations but this won't work for reverse playback of fragments as
+ * the timestamps of a subsequent fragment should be smaller than the
+ * previously received one. */
+ if (!is_gap_input && demux->fragmented && demux->segment.rate < 0) {
+ gst_qtdemux_process_adapter (demux, TRUE);
+ g_ptr_array_foreach (demux->active_streams,
+ (GFunc) gst_qtdemux_stream_flush_samples_data, NULL);
+ }
+ }
+
+ gst_adapter_push (demux->adapter, inbuf);
+
+ GST_DEBUG_OBJECT (demux,
+ "pushing in inbuf %p, neededbytes:%u, available:%" G_GSIZE_FORMAT, inbuf,
+ demux->neededbytes, gst_adapter_available (demux->adapter));
+
+ return gst_qtdemux_process_adapter (demux, FALSE);
+ }
+
+ static GstFlowReturn
+ gst_qtdemux_process_adapter (GstQTDemux * demux, gboolean force)
+ {
+ GstFlowReturn ret = GST_FLOW_OK;
+
+ /* we never really mean to buffer that much */
+ if (demux->neededbytes == -1) {
+ goto eos;
+ }
+
+ while (((gst_adapter_available (demux->adapter)) >= demux->neededbytes) &&
+ (ret == GST_FLOW_OK || (ret == GST_FLOW_NOT_LINKED && force))) {
+
+ #ifndef GST_DISABLE_GST_DEBUG
+ {
+ guint64 discont_offset, distance_from_discont;
+
+ discont_offset = gst_adapter_offset_at_discont (demux->adapter);
+ distance_from_discont =
+ gst_adapter_distance_from_discont (demux->adapter);
+
+ GST_DEBUG_OBJECT (demux,
+ "state:%s , demux->neededbytes:%d, demux->offset:%" G_GUINT64_FORMAT
+ " adapter offset :%" G_GUINT64_FORMAT " (+ %" G_GUINT64_FORMAT
+ " bytes)", qt_demux_state_string (demux->state), demux->neededbytes,
+ demux->offset, discont_offset, distance_from_discont);
+ }
+ #endif
+
+ switch (demux->state) {
+ case QTDEMUX_STATE_INITIAL:{
+ const guint8 *data;
+ guint32 fourcc;
+ guint64 size;
+
+ gst_qtdemux_check_seekability (demux);
+
+ data = gst_adapter_map (demux->adapter, demux->neededbytes);
+
+ /* get fourcc/length, set neededbytes */
+ extract_initial_length_and_fourcc ((guint8 *) data, demux->neededbytes,
+ &size, &fourcc);
+ gst_adapter_unmap (demux->adapter);
+ data = NULL;
+ GST_DEBUG_OBJECT (demux, "Peeking found [%" GST_FOURCC_FORMAT "] "
+ "size: %" G_GUINT64_FORMAT, GST_FOURCC_ARGS (fourcc), size);
+ if (size == 0) {
+ GST_ELEMENT_ERROR (demux, STREAM, DEMUX,
+ (_("This file is invalid and cannot be played.")),
+ ("initial atom '%" GST_FOURCC_FORMAT "' has empty length",
+ GST_FOURCC_ARGS (fourcc)));
+ ret = GST_FLOW_ERROR;
+ break;
+ }
+ if (fourcc == FOURCC_mdat) {
+ gint next_entry = next_entry_size (demux);
+ if (QTDEMUX_N_STREAMS (demux) > 0 && (next_entry != -1
+ || !demux->fragmented)) {
+ /* we have the headers, start playback */
+ demux->state = QTDEMUX_STATE_MOVIE;
+ demux->neededbytes = next_entry;
+ demux->mdatleft = size;
+ demux->mdatsize = demux->mdatleft;
+ } else {
+ /* no headers yet, try to get them */
+ guint bs;
+ gboolean res;
+ guint64 old, target;
+
+ buffer_data:
+ old = demux->offset;
+ target = old + size;
+
+ /* try to jump over the atom with a seek */
+ /* only bother if it seems worth doing so,
+ * and avoids possible upstream/server problems */
+ if (demux->upstream_seekable &&
+ demux->upstream_size > 4 * (1 << 20)) {
+ res = qtdemux_seek_offset (demux, target);
+ } else {
+ GST_DEBUG_OBJECT (demux, "skipping seek");
+ res = FALSE;
+ }
+
+ if (res) {
+ GST_DEBUG_OBJECT (demux, "seek success");
+ /* remember the offset fo the first mdat so we can seek back to it
+ * after we have the headers */
+ if (fourcc == FOURCC_mdat && demux->first_mdat == -1) {
+ demux->first_mdat = old;
+ GST_DEBUG_OBJECT (demux, "first mdat at %" G_GUINT64_FORMAT,
+ demux->first_mdat);
+ }
+ /* seek worked, continue reading */
+ demux->offset = target;
+ demux->neededbytes = 16;
+ demux->state = QTDEMUX_STATE_INITIAL;
+ } else {
+ /* seek failed, need to buffer */
+ demux->offset = old;
+ GST_DEBUG_OBJECT (demux, "seek failed/skipped");
+ /* there may be multiple mdat (or alike) buffers */
+ /* sanity check */
+ if (demux->mdatbuffer)
+ bs = gst_buffer_get_size (demux->mdatbuffer);
+ else
+ bs = 0;
+ if (size + bs > 10 * (1 << 20))
+ goto no_moov;
+ demux->state = QTDEMUX_STATE_BUFFER_MDAT;
+ demux->neededbytes = size;
+ if (!demux->mdatbuffer)
+ demux->mdatoffset = demux->offset;
+ }
+ }
+ } else if (G_UNLIKELY (size > QTDEMUX_MAX_ATOM_SIZE)) {
+ GST_ELEMENT_ERROR (demux, STREAM, DEMUX,
+ (_("This file is invalid and cannot be played.")),
+ ("atom %" GST_FOURCC_FORMAT " has bogus size %" G_GUINT64_FORMAT,
+ GST_FOURCC_ARGS (fourcc), size));
+ ret = GST_FLOW_ERROR;
+ break;
+ } else {
+ /* this means we already started buffering and still no moov header,
+ * let's continue buffering everything till we get moov */
+ if (demux->mdatbuffer && !(fourcc == FOURCC_moov
+ || fourcc == FOURCC_moof))
+ goto buffer_data;
+ demux->neededbytes = size;
+ demux->state = QTDEMUX_STATE_HEADER;
+ }
+ break;
+ }
+ case QTDEMUX_STATE_HEADER:{
+ const guint8 *data;
+ guint32 fourcc;
+
+ GST_DEBUG_OBJECT (demux, "In header");
+
+ data = gst_adapter_map (demux->adapter, demux->neededbytes);
+
+ /* parse the header */
+ extract_initial_length_and_fourcc (data, demux->neededbytes, NULL,
+ &fourcc);
+ if (fourcc == FOURCC_moov) {
+ /* in usual fragmented setup we could try to scan for more
+ * and end up at the the moov (after mdat) again */
+ if (demux->got_moov && QTDEMUX_N_STREAMS (demux) > 0 &&
+ (!demux->fragmented
+ || demux->last_moov_offset == demux->offset)) {
+ GST_DEBUG_OBJECT (demux,
+ "Skipping moov atom as we have (this) one already");
+ } else {
+ GST_DEBUG_OBJECT (demux, "Parsing [moov]");
+
+ if (demux->got_moov && demux->fragmented) {
+ GST_DEBUG_OBJECT (demux,
+ "Got a second moov, clean up data from old one");
+ if (demux->moov_node_compressed) {
+ g_node_destroy (demux->moov_node_compressed);
+ if (demux->moov_node)
+ g_free (demux->moov_node->data);
+ }
+ demux->moov_node_compressed = NULL;
+ if (demux->moov_node)
+ g_node_destroy (demux->moov_node);
+ demux->moov_node = NULL;
+ }
+
+ demux->last_moov_offset = demux->offset;
+
+ /* Update streams with new moov */
+ gst_qtdemux_stream_concat (demux,
+ demux->old_streams, demux->active_streams);
+
+ qtdemux_parse_moov (demux, data, demux->neededbytes);
+ qtdemux_node_dump (demux, demux->moov_node);
+ qtdemux_parse_tree (demux);
+ qtdemux_prepare_streams (demux);
+ QTDEMUX_EXPOSE_LOCK (demux);
+ qtdemux_expose_streams (demux);
+ QTDEMUX_EXPOSE_UNLOCK (demux);
+
+ demux->got_moov = TRUE;
+
+ gst_qtdemux_check_send_pending_segment (demux);
+
+ if (demux->moov_node_compressed) {
+ g_node_destroy (demux->moov_node_compressed);
+ g_free (demux->moov_node->data);
+ }
+ demux->moov_node_compressed = NULL;
+ g_node_destroy (demux->moov_node);
+ demux->moov_node = NULL;
+ GST_DEBUG_OBJECT (demux, "Finished parsing the header");
+ }
+ } else if (fourcc == FOURCC_moof) {
+ if ((demux->got_moov || demux->media_caps) && demux->fragmented) {
+ guint64 dist = 0;
+ GstClockTime prev_pts;
+ guint64 prev_offset;
+ guint64 adapter_discont_offset, adapter_discont_dist;
+
+ GST_DEBUG_OBJECT (demux, "Parsing [moof]");
+
+ /*
+ * The timestamp of the moof buffer is relevant as some scenarios
+ * won't have the initial timestamp in the atoms. Whenever a new
+ * buffer has started, we get that buffer's PTS and use it as a base
+ * timestamp for the trun entries.
+ *
+ * To keep track of the current buffer timestamp and starting point
+ * we use gst_adapter_prev_pts that gives us the PTS and the distance
+ * from the beginning of the buffer, with the distance and demux->offset
+ * we know if it is still the same buffer or not.
+ */
+ prev_pts = gst_adapter_prev_pts (demux->adapter, &dist);
+ prev_offset = demux->offset - dist;
+ if (demux->fragment_start_offset == -1
+ || prev_offset > demux->fragment_start_offset) {
+ demux->fragment_start_offset = prev_offset;
+ demux->fragment_start = prev_pts;
+ GST_DEBUG_OBJECT (demux,
+ "New fragment start found at: %" G_GUINT64_FORMAT " : %"
+ GST_TIME_FORMAT, demux->fragment_start_offset,
+ GST_TIME_ARGS (demux->fragment_start));
+ }
+
+ /* We can't use prev_offset() here because this would require
+ * upstream to set consistent and correct offsets on all buffers
+ * since the discont. Nothing ever did that in the past and we
+ * would break backwards compatibility here then.
+ * Instead take the offset we had at the last discont and count
+ * the bytes from there. This works with old code as there would
+ * be no discont between moov and moof, and also works with
+ * adaptivedemux which correctly sets offset and will set the
+ * DISCONT flag accordingly when needed.
+ *
+ * We also only do this for upstream TIME segments as otherwise
+ * there are potential backwards compatibility problems with
+ * seeking in PUSH mode and upstream providing inconsistent
+ * timestamps. */
+ adapter_discont_offset =
+ gst_adapter_offset_at_discont (demux->adapter);
+ adapter_discont_dist =
+ gst_adapter_distance_from_discont (demux->adapter);
+
+ GST_DEBUG_OBJECT (demux,
+ "demux offset %" G_GUINT64_FORMAT " adapter offset %"
+ G_GUINT64_FORMAT " (+ %" G_GUINT64_FORMAT " bytes)",
+ demux->offset, adapter_discont_offset, adapter_discont_dist);
+
+ if (demux->upstream_format_is_time) {
+ demux->moof_offset = adapter_discont_offset;
+ if (demux->moof_offset != GST_BUFFER_OFFSET_NONE)
+ demux->moof_offset += adapter_discont_dist;
+ if (demux->moof_offset == GST_BUFFER_OFFSET_NONE)
+ demux->moof_offset = demux->offset;
+ } else {
+ demux->moof_offset = demux->offset;
+ }
+
+ if (!qtdemux_parse_moof (demux, data, demux->neededbytes,
+ demux->moof_offset, NULL)) {
+ gst_adapter_unmap (demux->adapter);
+ ret = GST_FLOW_ERROR;
+ goto done;
+ }
+
+ /* in MSS we need to expose the pads after the first moof as we won't get a moov */
+ if (demux->mss_mode && !demux->exposed) {
+ QTDEMUX_EXPOSE_LOCK (demux);
+ qtdemux_expose_streams (demux);
+ QTDEMUX_EXPOSE_UNLOCK (demux);
+ }
+
+ gst_qtdemux_check_send_pending_segment (demux);
+ } else {
+ GST_DEBUG_OBJECT (demux, "Discarding [moof]");
+ }
+ } else if (fourcc == FOURCC_ftyp) {
+ GST_DEBUG_OBJECT (demux, "Parsing [ftyp]");
+ qtdemux_parse_ftyp (demux, data, demux->neededbytes);
+ } else if (fourcc == FOURCC_uuid) {
+ GST_DEBUG_OBJECT (demux, "Parsing [uuid]");
+ qtdemux_parse_uuid (demux, data, demux->neededbytes);
+ } else if (fourcc == FOURCC_sidx) {
+ GST_DEBUG_OBJECT (demux, "Parsing [sidx]");
+ qtdemux_parse_sidx (demux, data, demux->neededbytes);
+ } else {
+ switch (fourcc) {
+ case FOURCC_styp:
+ /* [styp] is like a [ftyp], but in fragment header. We ignore it for now
+ * FALLTHROUGH */
+ case FOURCC_skip:
+ case FOURCC_free:
+ /* [free] and [skip] are padding atoms */
+ GST_DEBUG_OBJECT (demux,
+ "Skipping fourcc while parsing header : %" GST_FOURCC_FORMAT,
+ GST_FOURCC_ARGS (fourcc));
+ break;
+ default:
+ GST_WARNING_OBJECT (demux,
+ "Unknown fourcc while parsing header : %" GST_FOURCC_FORMAT,
+ GST_FOURCC_ARGS (fourcc));
+ /* Let's jump that one and go back to initial state */
+ break;
+ }
+ }
+ gst_adapter_unmap (demux->adapter);
+ data = NULL;
+
+ if (demux->mdatbuffer && QTDEMUX_N_STREAMS (demux)) {
+ gsize remaining_data_size = 0;
+
+ /* the mdat was before the header */
+ GST_DEBUG_OBJECT (demux, "We have n_streams:%d and mdatbuffer:%p",
+ QTDEMUX_N_STREAMS (demux), demux->mdatbuffer);
+ /* restore our adapter/offset view of things with upstream;
+ * put preceding buffered data ahead of current moov data.
+ * This should also handle evil mdat, moov, mdat cases and alike */
+ gst_adapter_flush (demux->adapter, demux->neededbytes);
+
+ /* Store any remaining data after the mdat for later usage */
+ remaining_data_size = gst_adapter_available (demux->adapter);
+ if (remaining_data_size > 0) {
+ g_assert (demux->restoredata_buffer == NULL);
+ demux->restoredata_buffer =
+ gst_adapter_take_buffer (demux->adapter, remaining_data_size);
+ demux->restoredata_offset = demux->offset + demux->neededbytes;
+ GST_DEBUG_OBJECT (demux,
+ "Stored %" G_GSIZE_FORMAT " post mdat bytes at offset %"
+ G_GUINT64_FORMAT, remaining_data_size,
+ demux->restoredata_offset);
+ }
+
+ gst_adapter_push (demux->adapter, demux->mdatbuffer);
+ demux->mdatbuffer = NULL;
+ demux->offset = demux->mdatoffset;
+ demux->neededbytes = next_entry_size (demux);
+ demux->state = QTDEMUX_STATE_MOVIE;
+ demux->mdatleft = gst_adapter_available (demux->adapter);
+ demux->mdatsize = demux->mdatleft;
+ } else {
+ GST_DEBUG_OBJECT (demux, "Carrying on normally");
+ gst_adapter_flush (demux->adapter, demux->neededbytes);
+
+ /* only go back to the mdat if there are samples to play */
+ if (demux->got_moov && demux->first_mdat != -1
+ && has_next_entry (demux)) {
+ gboolean res;
+
+ /* we need to seek back */
+ res = qtdemux_seek_offset (demux, demux->first_mdat);
+ if (res) {
+ demux->offset = demux->first_mdat;
+ } else {
+ GST_DEBUG_OBJECT (demux, "Seek back failed");
+ }
+ } else {
+ demux->offset += demux->neededbytes;
+ }
+ demux->neededbytes = 16;
+ demux->state = QTDEMUX_STATE_INITIAL;
+ }
+
+ break;
+ }
+ case QTDEMUX_STATE_BUFFER_MDAT:{
+ GstBuffer *buf;
+ guint8 fourcc[4];
+
+ GST_DEBUG_OBJECT (demux, "Got our buffer at offset %" G_GUINT64_FORMAT,
+ demux->offset);
+ buf = gst_adapter_take_buffer (demux->adapter, demux->neededbytes);
+ gst_buffer_extract (buf, 0, fourcc, 4);
+ GST_DEBUG_OBJECT (demux, "mdatbuffer starts with %" GST_FOURCC_FORMAT,
+ GST_FOURCC_ARGS (QT_FOURCC (fourcc)));
+ if (demux->mdatbuffer)
+ demux->mdatbuffer = gst_buffer_append (demux->mdatbuffer, buf);
+ else
+ demux->mdatbuffer = buf;
+ demux->offset += demux->neededbytes;
+ demux->neededbytes = 16;
+ demux->state = QTDEMUX_STATE_INITIAL;
+ gst_qtdemux_post_progress (demux, 1, 1);
+
+ break;
+ }
+ case QTDEMUX_STATE_MOVIE:{
+ QtDemuxStream *stream = NULL;
+ QtDemuxSample *sample;
+ GstClockTime dts, pts, duration;
+ gboolean keyframe;
+ gint i;
+
+ GST_DEBUG_OBJECT (demux,
+ "BEGIN // in MOVIE for offset %" G_GUINT64_FORMAT, demux->offset);
+
+ if (demux->fragmented) {
+ GST_DEBUG_OBJECT (demux, "mdat remaining %" G_GUINT64_FORMAT,
+ demux->mdatleft);
+ if (G_LIKELY (demux->todrop < demux->mdatleft)) {
+ /* if needed data starts within this atom,
+ * then it should not exceed this atom */
+ if (G_UNLIKELY (demux->neededbytes > demux->mdatleft)) {
+ GST_ELEMENT_ERROR (demux, STREAM, DEMUX,
+ (_("This file is invalid and cannot be played.")),
+ ("sample data crosses atom boundary"));
+ ret = GST_FLOW_ERROR;
+ break;
+ }
+ demux->mdatleft -= demux->neededbytes;
+ } else {
+ GST_DEBUG_OBJECT (demux, "data atom emptied; resuming atom scan");
+ /* so we are dropping more than left in this atom */
+ gst_qtdemux_drop_data (demux, demux->mdatleft);
+ demux->mdatleft = 0;
+
+ /* need to resume atom parsing so we do not miss any other pieces */
+ demux->state = QTDEMUX_STATE_INITIAL;
+ demux->neededbytes = 16;
+
+ /* check if there was any stored post mdat data from previous buffers */
+ if (demux->restoredata_buffer) {
+ g_assert (gst_adapter_available (demux->adapter) == 0);
+
+ gst_adapter_push (demux->adapter, demux->restoredata_buffer);
+ demux->restoredata_buffer = NULL;
+ demux->offset = demux->restoredata_offset;
+ }
+
+ break;
+ }
+ }
+
+ if (demux->todrop) {
+ if (demux->cenc_aux_info_offset > 0) {
+ GstByteReader br;
+ const guint8 *data;
+
+ GST_DEBUG_OBJECT (demux, "parsing cenc auxiliary info");
+ data = gst_adapter_map (demux->adapter, demux->todrop);
+ gst_byte_reader_init (&br, data + 8, demux->todrop);
+ if (!qtdemux_parse_cenc_aux_info (demux,
+ QTDEMUX_NTH_STREAM (demux, 0), &br,
+ demux->cenc_aux_info_sizes, demux->cenc_aux_sample_count)) {
+ GST_ERROR_OBJECT (demux, "failed to parse cenc auxiliary info");
+ ret = GST_FLOW_ERROR;
+ gst_adapter_unmap (demux->adapter);
+ g_free (demux->cenc_aux_info_sizes);
+ demux->cenc_aux_info_sizes = NULL;
+ goto done;
+ }
+ demux->cenc_aux_info_offset = 0;
+ g_free (demux->cenc_aux_info_sizes);
+ demux->cenc_aux_info_sizes = NULL;
+ gst_adapter_unmap (demux->adapter);
+ }
+ gst_qtdemux_drop_data (demux, demux->todrop);
+ }
+
+ /* first buffer? */
+ /* initial newsegment sent here after having added pads,
+ * possible others in sink_event */
+ gst_qtdemux_check_send_pending_segment (demux);
+
+ /* Figure out which stream this packet belongs to */
+ for (i = 0; i < QTDEMUX_N_STREAMS (demux); i++) {
+ stream = QTDEMUX_NTH_STREAM (demux, i);
+ if (stream->sample_index >= stream->n_samples) {
+ /* reset to be checked below G_UNLIKELY (stream == NULL) */
+ stream = NULL;
+ continue;
+ }
+ GST_LOG_OBJECT (demux,
+ "Checking track-id %u (sample_index:%d / offset:%"
+ G_GUINT64_FORMAT " / size:%d)", stream->track_id,
+ stream->sample_index,
+ stream->samples[stream->sample_index].offset,
+ stream->samples[stream->sample_index].size);
+
+ if (stream->samples[stream->sample_index].offset == demux->offset)
+ break;
+ }
+
+ if (G_UNLIKELY (stream == NULL))
+ goto unknown_stream;
+
+ gst_qtdemux_stream_check_and_change_stsd_index (demux, stream);
+
+ if (stream->new_caps) {
+ gst_qtdemux_configure_stream (demux, stream);
+ }
+
+ /* Put data in a buffer, set timestamps, caps, ... */
+ sample = &stream->samples[stream->sample_index];
+
+ if (G_LIKELY (!(STREAM_IS_EOS (stream)))) {
+ GST_DEBUG_OBJECT (demux, "stream : %" GST_FOURCC_FORMAT,
+ GST_FOURCC_ARGS (CUR_STREAM (stream)->fourcc));
+
+ dts = QTSAMPLE_DTS (stream, sample);
+ pts = QTSAMPLE_PTS (stream, sample);
+ duration = QTSAMPLE_DUR_DTS (stream, sample, dts);
+ keyframe = QTSAMPLE_KEYFRAME (stream, sample);
+
+ /* check for segment end */
+ if (G_UNLIKELY (demux->segment.stop != -1
+ && demux->segment.stop <= pts && stream->on_keyframe)
+ && !(demux->upstream_format_is_time && demux->segment.rate < 0)) {
+ GST_DEBUG_OBJECT (demux, "we reached the end of our segment.");
+ stream->time_position = GST_CLOCK_TIME_NONE; /* this means EOS */
+
+ /* skip this data, stream is EOS */
+ gst_adapter_flush (demux->adapter, demux->neededbytes);
+ demux->offset += demux->neededbytes;
+
+ /* check if all streams are eos */
+ ret = GST_FLOW_EOS;
+ for (i = 0; i < QTDEMUX_N_STREAMS (demux); i++) {
+ if (!STREAM_IS_EOS (QTDEMUX_NTH_STREAM (demux, i))) {
+ ret = GST_FLOW_OK;
+ break;
+ }
+ }
+ } else {
+ GstBuffer *outbuf;
+
+ outbuf =
+ gst_adapter_take_buffer (demux->adapter, demux->neededbytes);
+
+ /* FIXME: should either be an assert or a plain check */
+ g_return_val_if_fail (outbuf != NULL, GST_FLOW_ERROR);
+
+ ret = gst_qtdemux_decorate_and_push_buffer (demux, stream, outbuf,
+ dts, pts, duration, keyframe, dts, demux->offset);
+ }
+
+ /* combine flows */
+ GST_OBJECT_LOCK (demux);
+ ret = gst_qtdemux_combine_flows (demux, stream, ret);
+ GST_OBJECT_UNLOCK (demux);
+ } else {
+ /* skip this data, stream is EOS */
+ gst_adapter_flush (demux->adapter, demux->neededbytes);
+ }
+
+ stream->sample_index++;
+ stream->offset_in_sample = 0;
+
+ /* update current offset and figure out size of next buffer */
+ GST_LOG_OBJECT (demux, "increasing offset %" G_GUINT64_FORMAT " by %u",
+ demux->offset, demux->neededbytes);
+ demux->offset += demux->neededbytes;
+ GST_LOG_OBJECT (demux, "offset is now %" G_GUINT64_FORMAT,
+ demux->offset);
+
+
+ if (ret == GST_FLOW_EOS) {
+ GST_DEBUG_OBJECT (demux, "All streams are EOS, signal upstream");
+ demux->neededbytes = -1;
+ goto eos;
+ }
+
+ if ((demux->neededbytes = next_entry_size (demux)) == -1) {
+ if (demux->fragmented) {
+ GST_DEBUG_OBJECT (demux, "(temporarily) out of fragmented samples");
+ /* there may be more to follow, only finish this atom */
+ demux->todrop = demux->mdatleft;
+ demux->neededbytes = demux->todrop;
+ break;
+ }
+ goto eos;
+ }
+ if (ret != GST_FLOW_OK && ret != GST_FLOW_NOT_LINKED) {
+ goto non_ok_unlinked_flow;
+ }
+ break;
+ }
+ default:
+ goto invalid_state;
+ }
+ }
+
+ /* when buffering movie data, at least show user something is happening */
+ if (ret == GST_FLOW_OK && demux->state == QTDEMUX_STATE_BUFFER_MDAT &&
+ gst_adapter_available (demux->adapter) <= demux->neededbytes) {
+ gst_qtdemux_post_progress (demux, gst_adapter_available (demux->adapter),
+ demux->neededbytes);
+ }
+ done:
+
+ return ret;
+
+ /* ERRORS */
+ non_ok_unlinked_flow:
+ {
+ GST_DEBUG_OBJECT (demux, "Stopping, combined return flow %s",
+ gst_flow_get_name (ret));
+ return ret;
+ }
+ unknown_stream:
+ {
+ GST_ELEMENT_ERROR (demux, STREAM, FAILED, (NULL), ("unknown stream found"));
+ ret = GST_FLOW_ERROR;
+ goto done;
+ }
+ eos:
+ {
+ GST_DEBUG_OBJECT (demux, "no next entry, EOS");
+ ret = GST_FLOW_EOS;
+ goto done;
+ }
+ invalid_state:
+ {
+ GST_ELEMENT_ERROR (demux, STREAM, FAILED,
+ (NULL), ("qtdemuxer invalid state %d", demux->state));
+ ret = GST_FLOW_ERROR;
+ goto done;
+ }
+ no_moov:
+ {
+ GST_ELEMENT_ERROR (demux, STREAM, FAILED,
+ (NULL), ("no 'moov' atom within the first 10 MB"));
+ ret = GST_FLOW_ERROR;
+ goto done;
+ }
+ }
+
+ static gboolean
+ qtdemux_sink_activate (GstPad * sinkpad, GstObject * parent)
+ {
+ GstQuery *query;
+ gboolean pull_mode;
+
+ query = gst_query_new_scheduling ();
+
+ if (!gst_pad_peer_query (sinkpad, query)) {
+ gst_query_unref (query);
+ goto activate_push;
+ }
+
+ pull_mode = gst_query_has_scheduling_mode_with_flags (query,
+ GST_PAD_MODE_PULL, GST_SCHEDULING_FLAG_SEEKABLE);
+ gst_query_unref (query);
+
+ if (!pull_mode)
+ goto activate_push;
+
+ GST_DEBUG_OBJECT (sinkpad, "activating pull");
+ return gst_pad_activate_mode (sinkpad, GST_PAD_MODE_PULL, TRUE);
+
+ activate_push:
+ {
+ GST_DEBUG_OBJECT (sinkpad, "activating push");
+ return gst_pad_activate_mode (sinkpad, GST_PAD_MODE_PUSH, TRUE);
+ }
+ }
+
+ static gboolean
+ qtdemux_sink_activate_mode (GstPad * sinkpad, GstObject * parent,
+ GstPadMode mode, gboolean active)
+ {
+ gboolean res;
+ GstQTDemux *demux = GST_QTDEMUX (parent);
+
+ switch (mode) {
+ case GST_PAD_MODE_PUSH:
+ demux->pullbased = FALSE;
+ res = TRUE;
+ break;
+ case GST_PAD_MODE_PULL:
+ if (active) {
+ demux->pullbased = TRUE;
+ res = gst_pad_start_task (sinkpad, (GstTaskFunction) gst_qtdemux_loop,
+ sinkpad, NULL);
+ } else {
+ res = gst_pad_stop_task (sinkpad);
+ }
+ break;
+ default:
+ res = FALSE;
+ break;
+ }
+ return res;
+ }
+
+ #ifdef HAVE_ZLIB
+ static void *
+ qtdemux_inflate (void *z_buffer, guint z_length, guint * length)
+ {
+ guint8 *buffer;
+ z_stream z;
+ int ret;
+
+ memset (&z, 0, sizeof (z));
+ z.zalloc = NULL;
+ z.zfree = NULL;
+ z.opaque = NULL;
+
+ if ((ret = inflateInit (&z)) != Z_OK) {
+ GST_ERROR ("inflateInit() returned %d", ret);
+ return NULL;
+ }
+
+ z.next_in = z_buffer;
+ z.avail_in = z_length;
+
+ buffer = (guint8 *) g_malloc (*length);
+ z.avail_out = *length;
+ z.next_out = (Bytef *) buffer;
+ do {
+ ret = inflate (&z, Z_NO_FLUSH);
+ if (ret == Z_STREAM_END) {
+ break;
+ } else if (ret != Z_OK) {
+ GST_WARNING ("inflate() returned %d", ret);
+ break;
+ }
+
+ *length += 4096;
+ buffer = (guint8 *) g_realloc (buffer, *length);
+ z.next_out = (Bytef *) (buffer + z.total_out);
+ z.avail_out += 4096;
+ } while (z.avail_in > 0);
+
+ if (ret != Z_STREAM_END) {
+ g_free (buffer);
+ buffer = NULL;
+ *length = 0;
+ } else {
+ *length = z.total_out;
+ }
+
+ inflateEnd (&z);
+
+ return buffer;
+ }
+ #endif /* HAVE_ZLIB */
+
+ static gboolean
+ qtdemux_parse_moov (GstQTDemux * qtdemux, const guint8 * buffer, guint length)
+ {
+ GNode *cmov;
+
+ qtdemux->moov_node = g_node_new ((guint8 *) buffer);
+
+ /* counts as header data */
+ qtdemux->header_size += length;
+
+ GST_DEBUG_OBJECT (qtdemux, "parsing 'moov' atom");
+ qtdemux_parse_node (qtdemux, qtdemux->moov_node, buffer, length);
+
+ cmov = qtdemux_tree_get_child_by_type (qtdemux->moov_node, FOURCC_cmov);
+ if (cmov) {
+ guint32 method;
+ GNode *dcom;
+ GNode *cmvd;
+ guint32 dcom_len;
+
+ dcom = qtdemux_tree_get_child_by_type (cmov, FOURCC_dcom);
+ cmvd = qtdemux_tree_get_child_by_type (cmov, FOURCC_cmvd);
+ if (dcom == NULL || cmvd == NULL)
+ goto invalid_compression;
+
+ dcom_len = QT_UINT32 (dcom->data);
+ if (dcom_len < 12)
+ goto invalid_compression;
+
+ method = QT_FOURCC ((guint8 *) dcom->data + 8);
+ switch (method) {
+ #ifdef HAVE_ZLIB
+ case FOURCC_zlib:{
+ guint uncompressed_length;
+ guint compressed_length;
+ guint8 *buf;
+ guint32 cmvd_len;
+
+ cmvd_len = QT_UINT32 ((guint8 *) cmvd->data);
+ if (cmvd_len < 12)
+ goto invalid_compression;
+
+ uncompressed_length = QT_UINT32 ((guint8 *) cmvd->data + 8);
+ compressed_length = cmvd_len - 12;
+ GST_LOG ("length = %u", uncompressed_length);
+
+ buf =
+ (guint8 *) qtdemux_inflate ((guint8 *) cmvd->data + 12,
+ compressed_length, &uncompressed_length);
+
+ if (buf) {
+ qtdemux->moov_node_compressed = qtdemux->moov_node;
+ qtdemux->moov_node = g_node_new (buf);
+
+ qtdemux_parse_node (qtdemux, qtdemux->moov_node, buf,
+ uncompressed_length);
+ }
+ break;
+ }
+ #endif /* HAVE_ZLIB */
+ default:
+ GST_WARNING_OBJECT (qtdemux, "unknown or unhandled header compression "
+ "type %" GST_FOURCC_FORMAT, GST_FOURCC_ARGS (method));
+ break;
+ }
+ }
+ return TRUE;
+
+ /* ERRORS */
+ invalid_compression:
+ {
+ GST_ERROR_OBJECT (qtdemux, "invalid compressed header");
+ return FALSE;
+ }
+ }
+
+ static gboolean
+ qtdemux_parse_container (GstQTDemux * qtdemux, GNode * node, const guint8 * buf,
+ const guint8 * end)
+ {
+ while (G_UNLIKELY (buf < end)) {
+ GNode *child;
+ guint32 len;
+
+ if (G_UNLIKELY (buf + 4 > end)) {
+ GST_LOG_OBJECT (qtdemux, "buffer overrun");
+ break;
+ }
+ len = QT_UINT32 (buf);
+ if (G_UNLIKELY (len == 0)) {
+ GST_LOG_OBJECT (qtdemux, "empty container");
+ break;
+ }
+ if (G_UNLIKELY (len < 8)) {
+ GST_WARNING_OBJECT (qtdemux, "length too short (%d < 8)", len);
+ break;
+ }
+ if (G_UNLIKELY (len > (end - buf))) {
+ GST_WARNING_OBJECT (qtdemux, "length too long (%d > %d)", len,
+ (gint) (end - buf));
+ break;
+ }
+
+ child = g_node_new ((guint8 *) buf);
+ g_node_append (node, child);
+ GST_LOG_OBJECT (qtdemux, "adding new node of len %d", len);
+ qtdemux_parse_node (qtdemux, child, buf, len);
+
+ buf += len;
+ }
+ return TRUE;
+ }
+
+ static gboolean
+ qtdemux_parse_theora_extension (GstQTDemux * qtdemux, QtDemuxStream * stream,
+ GNode * xdxt)
+ {
+ int len = QT_UINT32 (xdxt->data);
+ guint8 *buf = xdxt->data;
+ guint8 *end = buf + len;
+ GstBuffer *buffer;
+
+ /* skip size and type */
+ buf += 8;
+ end -= 8;
+
+ while (buf < end) {
+ gint size;
+ guint32 type;
+
+ size = QT_UINT32 (buf);
+ type = QT_FOURCC (buf + 4);
+
+ GST_LOG_OBJECT (qtdemux, "%p %p", buf, end);
+
+ if (buf + size > end || size <= 0)
+ break;
+
+ buf += 8;
+ size -= 8;
+
+ GST_WARNING_OBJECT (qtdemux, "have cookie %" GST_FOURCC_FORMAT,
+ GST_FOURCC_ARGS (type));
+
+ switch (type) {
+ case FOURCC_tCtH:
+ buffer = gst_buffer_new_and_alloc (size);
+ gst_buffer_fill (buffer, 0, buf, size);
+ stream->buffers = g_slist_append (stream->buffers, buffer);
+ GST_LOG_OBJECT (qtdemux, "parsing theora header");
+ break;
+ case FOURCC_tCt_:
+ buffer = gst_buffer_new_and_alloc (size);
+ gst_buffer_fill (buffer, 0, buf, size);
+ stream->buffers = g_slist_append (stream->buffers, buffer);
+ GST_LOG_OBJECT (qtdemux, "parsing theora comment");
+ break;
+ case FOURCC_tCtC:
+ buffer = gst_buffer_new_and_alloc (size);
+ gst_buffer_fill (buffer, 0, buf, size);
+ stream->buffers = g_slist_append (stream->buffers, buffer);
+ GST_LOG_OBJECT (qtdemux, "parsing theora codebook");
+ break;
+ default:
+ GST_WARNING_OBJECT (qtdemux,
+ "unknown theora cookie %" GST_FOURCC_FORMAT,
+ GST_FOURCC_ARGS (type));
+ break;
+ }
+ buf += size;
+ }
+ return TRUE;
+ }
+
+ static gboolean
+ qtdemux_parse_node (GstQTDemux * qtdemux, GNode * node, const guint8 * buffer,
+ guint length)
+ {
+ guint32 fourcc = 0;
+ guint32 node_length = 0;
+ const QtNodeType *type;
+ const guint8 *end;
+
+ GST_LOG_OBJECT (qtdemux, "qtdemux_parse buffer %p length %u", buffer, length);
+
+ if (G_UNLIKELY (length < 8))
+ goto not_enough_data;
+
+ node_length = QT_UINT32 (buffer);
+ fourcc = QT_FOURCC (buffer + 4);
+
+ /* ignore empty nodes */
+ if (G_UNLIKELY (fourcc == 0 || node_length == 8))
+ return TRUE;
+
+ type = qtdemux_type_get (fourcc);
+
+ end = buffer + length;
+
+ GST_LOG_OBJECT (qtdemux,
+ "parsing '%" GST_FOURCC_FORMAT "', length=%u, name '%s'",
+ GST_FOURCC_ARGS (fourcc), node_length, type->name);
+
+ if (node_length > length)
+ goto broken_atom_size;
+
+ if (type->flags & QT_FLAG_CONTAINER) {
+ qtdemux_parse_container (qtdemux, node, buffer + 8, end);
+ } else {
+ switch (fourcc) {
+ case FOURCC_stsd:
+ {
+ if (node_length < 20) {
+ GST_LOG_OBJECT (qtdemux, "skipping small stsd box");
+ break;
+ }
+ GST_DEBUG_OBJECT (qtdemux,
+ "parsing stsd (sample table, sample description) atom");
+ /* Skip over 8 byte atom hdr + 1 byte version, 3 bytes flags, 4 byte num_entries */
+ qtdemux_parse_container (qtdemux, node, buffer + 16, end);
+ break;
+ }
+ case FOURCC_mp4a:
+ case FOURCC_alac:
+ case FOURCC_fLaC:
+ case FOURCC_aavd:
+ {
+ guint32 version;
+ guint32 offset;
+ guint min_size;
+
+ /* also read alac (or whatever) in stead of mp4a in the following,
+ * since a similar layout is used in other cases as well */
+ if (fourcc == FOURCC_mp4a)
+ min_size = 20;
+ else if (fourcc == FOURCC_fLaC)
+ min_size = 86;
+ else
+ min_size = 40;
+
+ /* There are two things we might encounter here: a true mp4a atom, and
+ an mp4a entry in an stsd atom. The latter is what we're interested
+ in, and it looks like an atom, but isn't really one. The true mp4a
+ atom is short, so we detect it based on length here. */
+ if (length < min_size) {
+ GST_LOG_OBJECT (qtdemux, "skipping small %" GST_FOURCC_FORMAT " box",
+ GST_FOURCC_ARGS (fourcc));
+ break;
+ }
+
+ /* 'version' here is the sound sample description version. Types 0 and
+ 1 are documented in the QTFF reference, but type 2 is not: it's
+ described in Apple header files instead (struct SoundDescriptionV2
+ in Movies.h) */
+ version = QT_UINT16 (buffer + 16);
+
+ GST_DEBUG_OBJECT (qtdemux, "%" GST_FOURCC_FORMAT " version 0x%08x",
+ GST_FOURCC_ARGS (fourcc), version);
+
+ /* parse any esds descriptors */
+ switch (version) {
+ case 0:
+ offset = 0x24;
+ break;
+ case 1:
+ offset = 0x34;
+ break;
+ case 2:
+ offset = 0x48;
+ break;
+ default:
+ GST_WARNING_OBJECT (qtdemux,
+ "unhandled %" GST_FOURCC_FORMAT " version 0x%08x",
+ GST_FOURCC_ARGS (fourcc), version);
+ offset = 0;
+ break;
+ }
+ if (offset)
+ qtdemux_parse_container (qtdemux, node, buffer + offset, end);
+ break;
+ }
+ case FOURCC_mp4v:
+ case FOURCC_MP4V:
+ case FOURCC_fmp4:
+ case FOURCC_FMP4:
+ case FOURCC_apcs:
+ case FOURCC_apch:
+ case FOURCC_apcn:
+ case FOURCC_apco:
+ case FOURCC_ap4h:
+ case FOURCC_xvid:
+ case FOURCC_XVID:
+ case FOURCC_H264:
+ case FOURCC_avc1:
+ case FOURCC_avc3:
+ case FOURCC_H265:
+ case FOURCC_hvc1:
+ case FOURCC_hev1:
+ case FOURCC_dvh1:
+ case FOURCC_dvhe:
+ case FOURCC_mjp2:
+ case FOURCC_encv:
+ {
+ guint32 version;
+ guint32 str_len;
+
+ /* codec_data is contained inside these atoms, which all have
+ * the same format. */
+ /* video sample description size is 86 bytes without extension.
+ * node_length have to be bigger than 86 bytes because video sample
+ * description can include extensions such as esds, fiel, glbl, etc. */
+ if (node_length < 86) {
+ GST_WARNING_OBJECT (qtdemux, "%" GST_FOURCC_FORMAT
+ " sample description length too short (%u < 86)",
+ GST_FOURCC_ARGS (fourcc), node_length);
+ break;
+ }
+
+ GST_DEBUG_OBJECT (qtdemux, "parsing in %" GST_FOURCC_FORMAT,
+ GST_FOURCC_ARGS (fourcc));
+
+ /* version (2 bytes) : this is set to 0, unless a compressor has changed
+ * its data format.
+ * revision level (2 bytes) : must be set to 0. */
+ version = QT_UINT32 (buffer + 16);
+ GST_DEBUG_OBJECT (qtdemux, "version %08x", version);
+
+ /* compressor name : PASCAL string and informative purposes
+ * first byte : the number of bytes to be displayed.
+ * it has to be less than 32 because it is reserved
+ * space of 32 bytes total including itself. */
+ str_len = QT_UINT8 (buffer + 50);
+ if (str_len < 32)
+ GST_DEBUG_OBJECT (qtdemux, "compressorname = %.*s", str_len,
+ (char *) buffer + 51);
+ else
+ GST_WARNING_OBJECT (qtdemux,
+ "compressorname length too big (%u > 31)", str_len);
+
+ GST_MEMDUMP_OBJECT (qtdemux, "video sample description", buffer,
+ end - buffer);
+ qtdemux_parse_container (qtdemux, node, buffer + 86, end);
+ break;
+ }
+ case FOURCC_meta:
+ {
+ GST_DEBUG_OBJECT (qtdemux, "parsing meta atom");
+
+ /* You are reading this correctly. QTFF specifies that the
+ * metadata atom is a short atom, whereas ISO BMFF specifies
+ * it's a full atom. But since so many people are doing things
+ * differently, we actually peek into the atom to see which
+ * variant it is */
+ if (length < 16) {
+ GST_LOG_OBJECT (qtdemux, "skipping small %" GST_FOURCC_FORMAT " box",
+ GST_FOURCC_ARGS (fourcc));
+ break;
+ }
+ if (QT_FOURCC (buffer + 12) == FOURCC_hdlr) {
+ /* Variant 1: What QTFF specifies. 'meta' is a short header which
+ * starts with a 'hdlr' atom */
+ qtdemux_parse_container (qtdemux, node, buffer + 8, end);
+ } else if (QT_UINT32 (buffer + 8) == 0x00000000) {
+ /* Variant 2: What ISO BMFF specifies. 'meta' is a _full_ atom
+ * with version/flags both set to zero */
+ qtdemux_parse_container (qtdemux, node, buffer + 12, end);
+ } else
+ GST_WARNING_OBJECT (qtdemux, "Unknown 'meta' atom format");
+ break;
+ }
+ case FOURCC_mp4s:
+ {
+ GST_MEMDUMP_OBJECT (qtdemux, "mp4s", buffer, end - buffer);
+ /* Skip 8 byte header, plus 8 byte version + flags + entry_count */
+ qtdemux_parse_container (qtdemux, node, buffer + 16, end);
+ break;
+ }
+ case FOURCC_XiTh:
+ {
+ guint32 version;
+ guint32 offset;
+
+ if (length < 16) {
+ GST_LOG_OBJECT (qtdemux, "skipping small %" GST_FOURCC_FORMAT " box",
+ GST_FOURCC_ARGS (fourcc));
+ break;
+ }
+
+ version = QT_UINT32 (buffer + 12);
+ GST_DEBUG_OBJECT (qtdemux, "parsing XiTh atom version 0x%08x", version);
+
+ switch (version) {
+ case 0x00000001:
+ offset = 0x62;
+ break;
+ default:
+ GST_DEBUG_OBJECT (qtdemux, "unknown version 0x%08x", version);
+ offset = 0;
+ break;
+ }
+ if (offset) {
+ if (length < offset) {
+ GST_WARNING_OBJECT (qtdemux,
+ "skipping too small %" GST_FOURCC_FORMAT " box",
+ GST_FOURCC_ARGS (fourcc));
+ break;
+ }
+ qtdemux_parse_container (qtdemux, node, buffer + offset, end);
+ }
+ break;
+ }
+ case FOURCC_in24:
+ {
+ qtdemux_parse_container (qtdemux, node, buffer + 0x34, end);
+ break;
+ }
+ case FOURCC_uuid:
+ {
+ qtdemux_parse_uuid (qtdemux, buffer, end - buffer);
+ break;
+ }
+ case FOURCC_enca:
+ {
+ qtdemux_parse_container (qtdemux, node, buffer + 36, end);
+ break;
+ }
++#ifdef TIZEN_FEATURE_QTDEMUX_MODIFICATION
++ case FOURCC_SA3D:
++ {
++ qtdemux_parse_SA3D (qtdemux, buffer, end - buffer);
++ break;
++ }
++#endif /* TIZEN_FEATURE_QTDEMUX_MODIFICATION */
+ default:
+ if (!strcmp (type->name, "unknown"))
+ GST_MEMDUMP ("Unknown tag", buffer + 4, end - buffer - 4);
+ break;
+ }
+ }
+ GST_LOG_OBJECT (qtdemux, "parsed '%" GST_FOURCC_FORMAT "'",
+ GST_FOURCC_ARGS (fourcc));
+ return TRUE;
+
+ /* ERRORS */
+ not_enough_data:
+ {
+ GST_ELEMENT_ERROR (qtdemux, STREAM, DEMUX,
+ (_("This file is corrupt and cannot be played.")),
+ ("Not enough data for an atom header, got only %u bytes", length));
+ return FALSE;
+ }
+ broken_atom_size:
+ {
+ GST_ELEMENT_ERROR (qtdemux, STREAM, DEMUX,
+ (_("This file is corrupt and cannot be played.")),
+ ("Atom '%" GST_FOURCC_FORMAT "' has size of %u bytes, but we have only "
+ "%u bytes available.", GST_FOURCC_ARGS (fourcc), node_length,
+ length));
+ return FALSE;
+ }
+ }
+
+ static void
+ qtdemux_do_allocation (QtDemuxStream * stream, GstQTDemux * qtdemux)
+ {
+ /* FIXME: This can only reliably work if demuxers have a
+ * separate streaming thread per srcpad. This should be
+ * done in a demuxer base class, which integrates parts
+ * of multiqueue
+ *
+ * https://bugzilla.gnome.org/show_bug.cgi?id=701856
+ */
+ #if 0
+ GstQuery *query;
+
+ query = gst_query_new_allocation (stream->caps, FALSE);
+
+ if (!gst_pad_peer_query (stream->pad, query)) {
+ /* not a problem, just debug a little */
+ GST_DEBUG_OBJECT (qtdemux, "peer ALLOCATION query failed");
+ }
+
+ if (stream->allocator)
+ gst_object_unref (stream->allocator);
+
+ if (gst_query_get_n_allocation_params (query) > 0) {
+ /* try the allocator */
+ gst_query_parse_nth_allocation_param (query, 0, &stream->allocator,
+ &stream->params);
+ stream->use_allocator = TRUE;
+ } else {
+ stream->allocator = NULL;
+ gst_allocation_params_init (&stream->params);
+ stream->use_allocator = FALSE;
+ }
+ gst_query_unref (query);
+ #endif
+ }
+
+ static gboolean
+ pad_query (const GValue * item, GValue * value, gpointer user_data)
+ {
+ GstPad *pad = g_value_get_object (item);
+ GstQuery *query = user_data;
+ gboolean res;
+
+ res = gst_pad_peer_query (pad, query);
+
+ if (res) {
+ g_value_set_boolean (value, TRUE);
+ return FALSE;
+ }
+
+ GST_INFO_OBJECT (pad, "pad peer query failed");
+ return TRUE;
+ }
+
+ static gboolean
+ gst_qtdemux_run_query (GstElement * element, GstQuery * query,
+ GstPadDirection direction)
+ {
+ GstIterator *it;
+ GstIteratorFoldFunction func = pad_query;
+ GValue res = { 0, };
+
+ g_value_init (&res, G_TYPE_BOOLEAN);
+ g_value_set_boolean (&res, FALSE);
+
+ /* Ask neighbor */
+ if (direction == GST_PAD_SRC)
+ it = gst_element_iterate_src_pads (element);
+ else
+ it = gst_element_iterate_sink_pads (element);
+
+ while (gst_iterator_fold (it, func, &res, query) == GST_ITERATOR_RESYNC)
+ gst_iterator_resync (it);
+
+ gst_iterator_free (it);
+
+ return g_value_get_boolean (&res);
+ }
+
+ static void
+ gst_qtdemux_request_protection_context (GstQTDemux * qtdemux,
+ QtDemuxStream * stream)
+ {
+ GstQuery *query;
+ GstContext *ctxt;
+ GstElement *element = GST_ELEMENT (qtdemux);
+ GstStructure *st;
+ gchar **filtered_sys_ids;
+ GValue event_list = G_VALUE_INIT;
+ GList *walk;
+
+ /* 1. Check if we already have the context. */
+ if (qtdemux->preferred_protection_system_id != NULL) {
+ GST_LOG_OBJECT (element,
+ "already have the protection context, no need to request it again");
+ return;
+ }
+
+ g_ptr_array_add (qtdemux->protection_system_ids, NULL);
+ filtered_sys_ids = gst_protection_filter_systems_by_available_decryptors (
+ (const gchar **) qtdemux->protection_system_ids->pdata);
+
+ g_ptr_array_remove_index (qtdemux->protection_system_ids,
+ qtdemux->protection_system_ids->len - 1);
+ GST_TRACE_OBJECT (qtdemux, "detected %u protection systems, we have "
+ "decryptors for %u of them, running context request",
+ qtdemux->protection_system_ids->len,
+ filtered_sys_ids ? g_strv_length (filtered_sys_ids) : 0);
+
+
+ if (stream->protection_scheme_event_queue.length) {
+ GST_TRACE_OBJECT (qtdemux, "using stream event queue, length %u",
+ stream->protection_scheme_event_queue.length);
+ walk = stream->protection_scheme_event_queue.tail;
+ } else {
+ GST_TRACE_OBJECT (qtdemux, "using demuxer event queue, length %u",
+ qtdemux->protection_event_queue.length);
+ walk = qtdemux->protection_event_queue.tail;
+ }
+
+ g_value_init (&event_list, GST_TYPE_LIST);
+ for (; walk; walk = g_list_previous (walk)) {
+ GValue *event_value = g_new0 (GValue, 1);
+ g_value_init (event_value, GST_TYPE_EVENT);
+ g_value_set_boxed (event_value, walk->data);
+ gst_value_list_append_and_take_value (&event_list, event_value);
+ }
+
+ /* 2a) Query downstream with GST_QUERY_CONTEXT for the context and
+ * check if downstream already has a context of the specific type
+ * 2b) Query upstream as above.
+ */
+ query = gst_query_new_context ("drm-preferred-decryption-system-id");
+ st = gst_query_writable_structure (query);
+ gst_structure_set (st, "track-id", G_TYPE_UINT, stream->track_id,
+ "available-stream-encryption-systems", G_TYPE_STRV, filtered_sys_ids,
+ NULL);
+ gst_structure_set_value (st, "stream-encryption-events", &event_list);
+ if (gst_qtdemux_run_query (element, query, GST_PAD_SRC)) {
+ gst_query_parse_context (query, &ctxt);
+ GST_INFO_OBJECT (element, "found context (%p) in downstream query", ctxt);
+ gst_element_set_context (element, ctxt);
+ } else if (gst_qtdemux_run_query (element, query, GST_PAD_SINK)) {
+ gst_query_parse_context (query, &ctxt);
+ GST_INFO_OBJECT (element, "found context (%p) in upstream query", ctxt);
+ gst_element_set_context (element, ctxt);
+ } else {
+ /* 3) Post a GST_MESSAGE_NEED_CONTEXT message on the bus with
+ * the required context type and afterwards check if a
+ * usable context was set now as in 1). The message could
+ * be handled by the parent bins of the element and the
+ * application.
+ */
+ GstMessage *msg;
+
+ GST_INFO_OBJECT (element, "posting need context message");
+ msg = gst_message_new_need_context (GST_OBJECT_CAST (element),
+ "drm-preferred-decryption-system-id");
+ st = (GstStructure *) gst_message_get_structure (msg);
+ gst_structure_set (st, "track-id", G_TYPE_UINT, stream->track_id,
+ "available-stream-encryption-systems", G_TYPE_STRV, filtered_sys_ids,
+ NULL);
+
+ gst_structure_set_value (st, "stream-encryption-events", &event_list);
+ gst_element_post_message (element, msg);
+ }
+
+ g_strfreev (filtered_sys_ids);
+ g_value_unset (&event_list);
+ gst_query_unref (query);
+ }
+
+ static gboolean
+ gst_qtdemux_configure_protected_caps (GstQTDemux * qtdemux,
+ QtDemuxStream * stream)
+ {
+ GstStructure *s;
+ const gchar *selected_system = NULL;
+
+ g_return_val_if_fail (qtdemux != NULL, FALSE);
+ g_return_val_if_fail (stream != NULL, FALSE);
+ g_return_val_if_fail (gst_caps_get_size (CUR_STREAM (stream)->caps) == 1,
+ FALSE);
+
+ if (stream->protection_scheme_type == FOURCC_aavd) {
+ s = gst_caps_get_structure (CUR_STREAM (stream)->caps, 0);
+ if (!gst_structure_has_name (s, "application/x-aavd")) {
+ gst_structure_set (s,
+ "original-media-type", G_TYPE_STRING, gst_structure_get_name (s),
+ NULL);
+ gst_structure_set_name (s, "application/x-aavd");
+ }
+ return TRUE;
+ }
+
+ if (stream->protection_scheme_type != FOURCC_cenc
+ && stream->protection_scheme_type != FOURCC_cbcs) {
+ GST_ERROR_OBJECT (qtdemux,
+ "unsupported protection scheme: %" GST_FOURCC_FORMAT,
+ GST_FOURCC_ARGS (stream->protection_scheme_type));
+ return FALSE;
+ }
+
+ s = gst_caps_get_structure (CUR_STREAM (stream)->caps, 0);
+ if (!gst_structure_has_name (s, "application/x-cenc")) {
+ gst_structure_set (s,
+ "original-media-type", G_TYPE_STRING, gst_structure_get_name (s), NULL);
+ gst_structure_set (s, "cipher-mode", G_TYPE_STRING,
+ (stream->protection_scheme_type == FOURCC_cbcs) ? "cbcs" : "cenc",
+ NULL);
+ gst_structure_set_name (s, "application/x-cenc");
+ }
+
+ if (qtdemux->protection_system_ids == NULL) {
+ GST_DEBUG_OBJECT (qtdemux, "stream is protected using cenc, but no "
+ "cenc protection system information has been found, not setting a "
+ "protection system UUID");
+ return TRUE;
+ }
+
+ gst_qtdemux_request_protection_context (qtdemux, stream);
+ if (qtdemux->preferred_protection_system_id != NULL) {
+ const gchar *preferred_system_array[] =
+ { qtdemux->preferred_protection_system_id, NULL };
+
+ selected_system = gst_protection_select_system (preferred_system_array);
+
+ if (selected_system) {
+ GST_TRACE_OBJECT (qtdemux, "selected preferred system %s",
+ qtdemux->preferred_protection_system_id);
+ } else {
+ GST_WARNING_OBJECT (qtdemux, "could not select preferred system %s "
+ "because there is no available decryptor",
+ qtdemux->preferred_protection_system_id);
+ }
+ }
+
+ if (!selected_system) {
+ g_ptr_array_add (qtdemux->protection_system_ids, NULL);
+ selected_system = gst_protection_select_system ((const gchar **)
+ qtdemux->protection_system_ids->pdata);
+ g_ptr_array_remove_index (qtdemux->protection_system_ids,
+ qtdemux->protection_system_ids->len - 1);
+ }
+
+ if (!selected_system) {
+ GST_ERROR_OBJECT (qtdemux, "stream is protected, but no "
+ "suitable decryptor element has been found");
+ return FALSE;
+ }
+
+ GST_DEBUG_OBJECT (qtdemux, "selected protection system is %s",
+ selected_system);
+
+ gst_structure_set (s,
+ GST_PROTECTION_SYSTEM_ID_CAPS_FIELD, G_TYPE_STRING, selected_system,
+ NULL);
+
+ return TRUE;
+ }
+
+ static gboolean
+ gst_qtdemux_guess_framerate (GstQTDemux * qtdemux, QtDemuxStream * stream)
+ {
+ /* fps is calculated base on the duration of the average framerate since
+ * qt does not have a fixed framerate. */
+ gboolean fps_available = TRUE;
+ guint32 first_duration = 0;
+
+ if (stream->n_samples > 0)
+ first_duration = stream->samples[0].duration;
+
+ if ((stream->n_samples == 1 && first_duration == 0)
+ || (qtdemux->fragmented && stream->n_samples_moof == 1)) {
+ /* still frame */
+ CUR_STREAM (stream)->fps_n = 0;
+ CUR_STREAM (stream)->fps_d = 1;
+ } else {
+ if (stream->duration == 0 || stream->n_samples < 2) {
+ CUR_STREAM (stream)->fps_n = stream->timescale;
+ CUR_STREAM (stream)->fps_d = 1;
+ fps_available = FALSE;
+ } else {
+ GstClockTime avg_duration;
+ guint64 duration;
+ guint32 n_samples;
+
+ /* duration and n_samples can be updated for fragmented format
+ * so, framerate of fragmented format is calculated using data in a moof */
+ if (qtdemux->fragmented && stream->n_samples_moof > 0
+ && stream->duration_moof > 0) {
+ n_samples = stream->n_samples_moof;
+ duration = stream->duration_moof;
+ } else {
+ n_samples = stream->n_samples;
+ duration = stream->duration;
+ }
+
+ /* Calculate a framerate, ignoring the first sample which is sometimes truncated */
+ /* stream->duration is guint64, timescale, n_samples are guint32 */
+ avg_duration =
+ gst_util_uint64_scale_round (duration -
+ first_duration, GST_SECOND,
+ (guint64) (stream->timescale) * (n_samples - 1));
+
+ GST_LOG_OBJECT (qtdemux,
+ "Calculating avg sample duration based on stream (or moof) duration %"
+ G_GUINT64_FORMAT
+ " minus first sample %u, leaving %d samples gives %"
+ GST_TIME_FORMAT, duration, first_duration,
+ n_samples - 1, GST_TIME_ARGS (avg_duration));
+
++#ifdef TIZEN_FEATURE_QTDEMUX_MODIFICATION
++ gst_video_guess_framerate (avg_duration,
++ &CUR_STREAM (stream)->fps_n, &CUR_STREAM (stream)->fps_d);
++ if (CUR_STREAM (stream)->fps_d == 0)
++ fps_available = FALSE;
++#else
+ fps_available =
+ gst_video_guess_framerate (avg_duration,
+ &CUR_STREAM (stream)->fps_n, &CUR_STREAM (stream)->fps_d);
++#endif
+
+ GST_DEBUG_OBJECT (qtdemux,
+ "Calculating framerate, timescale %u gave fps_n %d fps_d %d",
+ stream->timescale, CUR_STREAM (stream)->fps_n,
+ CUR_STREAM (stream)->fps_d);
+ }
+ }
+
+ return fps_available;
+ }
+
+ static gboolean
+ gst_qtdemux_configure_stream (GstQTDemux * qtdemux, QtDemuxStream * stream)
+ {
+ if (stream->subtype == FOURCC_vide) {
+ gboolean fps_available = gst_qtdemux_guess_framerate (qtdemux, stream);
+
+ if (CUR_STREAM (stream)->caps) {
+ CUR_STREAM (stream)->caps =
+ gst_caps_make_writable (CUR_STREAM (stream)->caps);
+
+ if (CUR_STREAM (stream)->width && CUR_STREAM (stream)->height)
+ gst_caps_set_simple (CUR_STREAM (stream)->caps,
+ "width", G_TYPE_INT, CUR_STREAM (stream)->width,
+ "height", G_TYPE_INT, CUR_STREAM (stream)->height, NULL);
+
+ /* set framerate if calculated framerate is reliable */
+ if (fps_available) {
+ gst_caps_set_simple (CUR_STREAM (stream)->caps,
+ "framerate", GST_TYPE_FRACTION, CUR_STREAM (stream)->fps_n,
+ CUR_STREAM (stream)->fps_d, NULL);
+ }
+
+ /* calculate pixel-aspect-ratio using display width and height */
+ GST_DEBUG_OBJECT (qtdemux,
+ "video size %dx%d, target display size %dx%d",
+ CUR_STREAM (stream)->width, CUR_STREAM (stream)->height,
+ stream->display_width, stream->display_height);
+ /* qt file might have pasp atom */
+ if (CUR_STREAM (stream)->par_w > 0 && CUR_STREAM (stream)->par_h > 0) {
+ GST_DEBUG_OBJECT (qtdemux, "par %d:%d", CUR_STREAM (stream)->par_w,
+ CUR_STREAM (stream)->par_h);
+ gst_caps_set_simple (CUR_STREAM (stream)->caps, "pixel-aspect-ratio",
+ GST_TYPE_FRACTION, CUR_STREAM (stream)->par_w,
+ CUR_STREAM (stream)->par_h, NULL);
+ } else if (stream->display_width > 0 && stream->display_height > 0
+ && CUR_STREAM (stream)->width > 0
+ && CUR_STREAM (stream)->height > 0) {
+ gint n, d;
+
+ /* calculate the pixel aspect ratio using the display and pixel w/h */
+ n = stream->display_width * CUR_STREAM (stream)->height;
+ d = stream->display_height * CUR_STREAM (stream)->width;
+ if (n == d)
+ n = d = 1;
+ GST_DEBUG_OBJECT (qtdemux, "setting PAR to %d/%d", n, d);
+ CUR_STREAM (stream)->par_w = n;
+ CUR_STREAM (stream)->par_h = d;
+ gst_caps_set_simple (CUR_STREAM (stream)->caps, "pixel-aspect-ratio",
+ GST_TYPE_FRACTION, CUR_STREAM (stream)->par_w,
+ CUR_STREAM (stream)->par_h, NULL);
+ }
+
+ if (CUR_STREAM (stream)->interlace_mode > 0) {
+ if (CUR_STREAM (stream)->interlace_mode == 1) {
+ gst_caps_set_simple (CUR_STREAM (stream)->caps, "interlace-mode",
+ G_TYPE_STRING, "progressive", NULL);
+ } else if (CUR_STREAM (stream)->interlace_mode == 2) {
+ gst_caps_set_simple (CUR_STREAM (stream)->caps, "interlace-mode",
+ G_TYPE_STRING, "interleaved", NULL);
+ if (CUR_STREAM (stream)->field_order == 9) {
+ gst_caps_set_simple (CUR_STREAM (stream)->caps, "field-order",
+ G_TYPE_STRING, "top-field-first", NULL);
+ } else if (CUR_STREAM (stream)->field_order == 14) {
+ gst_caps_set_simple (CUR_STREAM (stream)->caps, "field-order",
+ G_TYPE_STRING, "bottom-field-first", NULL);
+ }
+ }
+ }
+
+ /* Create incomplete colorimetry here if needed */
+ if (CUR_STREAM (stream)->colorimetry.range ||
+ CUR_STREAM (stream)->colorimetry.matrix ||
+ CUR_STREAM (stream)->colorimetry.transfer
+ || CUR_STREAM (stream)->colorimetry.primaries) {
+ gchar *colorimetry =
+ gst_video_colorimetry_to_string (&CUR_STREAM (stream)->colorimetry);
+ gst_caps_set_simple (CUR_STREAM (stream)->caps, "colorimetry",
+ G_TYPE_STRING, colorimetry, NULL);
+ g_free (colorimetry);
+ }
+
+ if (stream->multiview_mode != GST_VIDEO_MULTIVIEW_MODE_NONE) {
+ guint par_w = 1, par_h = 1;
+
+ if (CUR_STREAM (stream)->par_w > 0 && CUR_STREAM (stream)->par_h > 0) {
+ par_w = CUR_STREAM (stream)->par_w;
+ par_h = CUR_STREAM (stream)->par_h;
+ }
+
+ if (gst_video_multiview_guess_half_aspect (stream->multiview_mode,
+ CUR_STREAM (stream)->width, CUR_STREAM (stream)->height, par_w,
+ par_h)) {
+ stream->multiview_flags |= GST_VIDEO_MULTIVIEW_FLAGS_HALF_ASPECT;
+ }
+
+ gst_caps_set_simple (CUR_STREAM (stream)->caps,
+ "multiview-mode", G_TYPE_STRING,
+ gst_video_multiview_mode_to_caps_string (stream->multiview_mode),
+ "multiview-flags", GST_TYPE_VIDEO_MULTIVIEW_FLAGSET,
+ stream->multiview_flags, GST_FLAG_SET_MASK_EXACT, NULL);
+ }
+ }
+ }
+
+ else if (stream->subtype == FOURCC_soun) {
+ if (CUR_STREAM (stream)->caps) {
+ CUR_STREAM (stream)->caps =
+ gst_caps_make_writable (CUR_STREAM (stream)->caps);
+ if (CUR_STREAM (stream)->rate > 0)
+ gst_caps_set_simple (CUR_STREAM (stream)->caps,
+ "rate", G_TYPE_INT, (int) CUR_STREAM (stream)->rate, NULL);
+ if (CUR_STREAM (stream)->n_channels > 0)
+ gst_caps_set_simple (CUR_STREAM (stream)->caps,
+ "channels", G_TYPE_INT, CUR_STREAM (stream)->n_channels, NULL);
+ if (CUR_STREAM (stream)->n_channels > 2) {
+ /* FIXME: Need to parse the 'chan' atom to get channel layouts
+ * correctly; this is just the minimum we can do - assume
+ * we don't actually have any channel positions. */
+ gst_caps_set_simple (CUR_STREAM (stream)->caps,
+ "channel-mask", GST_TYPE_BITMASK, G_GUINT64_CONSTANT (0), NULL);
+ }
+ }
+ }
+
+ else if (stream->subtype == FOURCC_clcp && CUR_STREAM (stream)->caps) {
+ const GstStructure *s;
+ QtDemuxStream *fps_stream = NULL;
+ gboolean fps_available = FALSE;
+
+ /* CEA608 closed caption tracks are a bit special in that each sample
+ * can contain CCs for multiple frames, and CCs can be omitted and have to
+ * be inferred from the duration of the sample then.
+ *
+ * As such we take the framerate from the (first) video track here for
+ * CEA608 as there must be one CC byte pair for every video frame
+ * according to the spec.
+ *
+ * For CEA708 all is fine and there is one sample per frame.
+ */
+
+ s = gst_caps_get_structure (CUR_STREAM (stream)->caps, 0);
+ if (gst_structure_has_name (s, "closedcaption/x-cea-608")) {
+ gint i;
+
+ for (i = 0; i < QTDEMUX_N_STREAMS (qtdemux); i++) {
+ QtDemuxStream *tmp = QTDEMUX_NTH_STREAM (qtdemux, i);
+
+ if (tmp->subtype == FOURCC_vide) {
+ fps_stream = tmp;
+ break;
+ }
+ }
+
+ if (fps_stream) {
+ fps_available = gst_qtdemux_guess_framerate (qtdemux, fps_stream);
+ CUR_STREAM (stream)->fps_n = CUR_STREAM (fps_stream)->fps_n;
+ CUR_STREAM (stream)->fps_d = CUR_STREAM (fps_stream)->fps_d;
+ }
+ } else {
+ fps_available = gst_qtdemux_guess_framerate (qtdemux, stream);
+ fps_stream = stream;
+ }
+
+ CUR_STREAM (stream)->caps =
+ gst_caps_make_writable (CUR_STREAM (stream)->caps);
+
+ /* set framerate if calculated framerate is reliable */
+ if (fps_available) {
+ gst_caps_set_simple (CUR_STREAM (stream)->caps,
+ "framerate", GST_TYPE_FRACTION, CUR_STREAM (stream)->fps_n,
+ CUR_STREAM (stream)->fps_d, NULL);
+ }
+ }
+
+ if (stream->pad) {
+ GstCaps *prev_caps = NULL;
+
+ GST_PAD_ELEMENT_PRIVATE (stream->pad) = stream;
+ gst_pad_set_event_function (stream->pad, gst_qtdemux_handle_src_event);
+ gst_pad_set_query_function (stream->pad, gst_qtdemux_handle_src_query);
+ gst_pad_set_active (stream->pad, TRUE);
+
+ gst_pad_use_fixed_caps (stream->pad);
+
+ if (stream->protected) {
+ if (!gst_qtdemux_configure_protected_caps (qtdemux, stream)) {
+ GST_ERROR_OBJECT (qtdemux,
+ "Failed to configure protected stream caps.");
+ return FALSE;
+ }
+ }
+
+ GST_DEBUG_OBJECT (qtdemux, "setting caps %" GST_PTR_FORMAT,
+ CUR_STREAM (stream)->caps);
+ if (stream->new_stream) {
+ GstEvent *event;
+ GstStreamFlags stream_flags = GST_STREAM_FLAG_NONE;
+
+ event =
+ gst_pad_get_sticky_event (qtdemux->sinkpad, GST_EVENT_STREAM_START,
+ 0);
+ if (event) {
+ gst_event_parse_stream_flags (event, &stream_flags);
+ if (gst_event_parse_group_id (event, &qtdemux->group_id))
+ qtdemux->have_group_id = TRUE;
+ else
+ qtdemux->have_group_id = FALSE;
+ gst_event_unref (event);
+ } else if (!qtdemux->have_group_id) {
+ qtdemux->have_group_id = TRUE;
+ qtdemux->group_id = gst_util_group_id_next ();
+ }
+
+ stream->new_stream = FALSE;
+ event = gst_event_new_stream_start (stream->stream_id);
+ if (qtdemux->have_group_id)
+ gst_event_set_group_id (event, qtdemux->group_id);
+ if (stream->disabled)
+ stream_flags |= GST_STREAM_FLAG_UNSELECT;
+ if (CUR_STREAM (stream)->sparse) {
+ stream_flags |= GST_STREAM_FLAG_SPARSE;
+ } else {
+ stream_flags &= ~GST_STREAM_FLAG_SPARSE;
+ }
+ gst_event_set_stream_flags (event, stream_flags);
+ gst_pad_push_event (stream->pad, event);
+ }
+
+ prev_caps = gst_pad_get_current_caps (stream->pad);
+
+ if (CUR_STREAM (stream)->caps) {
+ if (!prev_caps
+ || !gst_caps_is_equal_fixed (prev_caps, CUR_STREAM (stream)->caps)) {
+ GST_DEBUG_OBJECT (qtdemux, "setting caps %" GST_PTR_FORMAT,
+ CUR_STREAM (stream)->caps);
+ gst_pad_set_caps (stream->pad, CUR_STREAM (stream)->caps);
+ } else {
+ GST_DEBUG_OBJECT (qtdemux, "ignore duplicated caps");
+ }
+ } else {
+ GST_WARNING_OBJECT (qtdemux, "stream without caps");
+ }
+
+ if (prev_caps)
+ gst_caps_unref (prev_caps);
+ stream->new_caps = FALSE;
+ }
+ return TRUE;
+ }
+
+ static void
+ gst_qtdemux_stream_check_and_change_stsd_index (GstQTDemux * demux,
+ QtDemuxStream * stream)
+ {
+ if (stream->cur_stsd_entry_index == stream->stsd_sample_description_id)
+ return;
+
+ GST_DEBUG_OBJECT (stream->pad, "Changing stsd index from '%u' to '%u'",
+ stream->cur_stsd_entry_index, stream->stsd_sample_description_id);
+ if (G_UNLIKELY (stream->stsd_sample_description_id >=
+ stream->stsd_entries_length)) {
+ GST_ELEMENT_ERROR (demux, STREAM, DEMUX,
+ (_("This file is invalid and cannot be played.")),
+ ("New sample description id is out of bounds (%d >= %d)",
+ stream->stsd_sample_description_id, stream->stsd_entries_length));
+ } else {
+ stream->cur_stsd_entry_index = stream->stsd_sample_description_id;
+ stream->new_caps = TRUE;
+ }
+ }
+
+ static gboolean
+ gst_qtdemux_add_stream (GstQTDemux * qtdemux,
+ QtDemuxStream * stream, GstTagList * list)
+ {
+ gboolean ret = TRUE;
+
+ if (stream->subtype == FOURCC_vide) {
+ gchar *name = g_strdup_printf ("video_%u", qtdemux->n_video_streams);
+
+ stream->pad =
+ gst_pad_new_from_static_template (&gst_qtdemux_videosrc_template, name);
+ g_free (name);
+
+ if (!gst_qtdemux_configure_stream (qtdemux, stream)) {
+ gst_object_unref (stream->pad);
+ stream->pad = NULL;
+ ret = FALSE;
+ goto done;
+ }
+
+ qtdemux->n_video_streams++;
+ } else if (stream->subtype == FOURCC_soun) {
+ gchar *name = g_strdup_printf ("audio_%u", qtdemux->n_audio_streams);
+
+ stream->pad =
+ gst_pad_new_from_static_template (&gst_qtdemux_audiosrc_template, name);
+ g_free (name);
+ if (!gst_qtdemux_configure_stream (qtdemux, stream)) {
+ gst_object_unref (stream->pad);
+ stream->pad = NULL;
+ ret = FALSE;
+ goto done;
+ }
+ qtdemux->n_audio_streams++;
+ } else if (stream->subtype == FOURCC_strm) {
+ GST_DEBUG_OBJECT (qtdemux, "stream type, not creating pad");
+ } else if (stream->subtype == FOURCC_subp || stream->subtype == FOURCC_text
+ || stream->subtype == FOURCC_sbtl || stream->subtype == FOURCC_subt
+ || stream->subtype == FOURCC_clcp) {
+ gchar *name = g_strdup_printf ("subtitle_%u", qtdemux->n_sub_streams);
+
+ stream->pad =
+ gst_pad_new_from_static_template (&gst_qtdemux_subsrc_template, name);
+ g_free (name);
+ if (!gst_qtdemux_configure_stream (qtdemux, stream)) {
+ gst_object_unref (stream->pad);
+ stream->pad = NULL;
+ ret = FALSE;
+ goto done;
+ }
+ qtdemux->n_sub_streams++;
+ } else if (CUR_STREAM (stream)->caps) {
+ gchar *name = g_strdup_printf ("video_%u", qtdemux->n_video_streams);
+
+ stream->pad =
+ gst_pad_new_from_static_template (&gst_qtdemux_videosrc_template, name);
+ g_free (name);
+ if (!gst_qtdemux_configure_stream (qtdemux, stream)) {
+ gst_object_unref (stream->pad);
+ stream->pad = NULL;
+ ret = FALSE;
+ goto done;
+ }
+ qtdemux->n_video_streams++;
+ } else {
+ GST_DEBUG_OBJECT (qtdemux, "unknown stream type");
+ goto done;
+ }
+
+ if (stream->pad) {
+ GList *l;
+
+ GST_DEBUG_OBJECT (qtdemux, "adding pad %s %p to qtdemux %p",
+ GST_OBJECT_NAME (stream->pad), stream->pad, qtdemux);
+ gst_element_add_pad (GST_ELEMENT_CAST (qtdemux), stream->pad);
+ GST_OBJECT_LOCK (qtdemux);
+ gst_flow_combiner_add_pad (qtdemux->flowcombiner, stream->pad);
+ GST_OBJECT_UNLOCK (qtdemux);
+
+ if (stream->stream_tags)
+ gst_tag_list_unref (stream->stream_tags);
+ stream->stream_tags = list;
+ list = NULL;
+ /* global tags go on each pad anyway */
+ stream->send_global_tags = TRUE;
+ /* send upstream GST_EVENT_PROTECTION events that were received before
+ this source pad was created */
+ for (l = qtdemux->protection_event_queue.head; l != NULL; l = l->next)
+ gst_pad_push_event (stream->pad, gst_event_ref (l->data));
+ }
+ done:
+ if (list)
+ gst_tag_list_unref (list);
+ return ret;
+ }
+
+ /* find next atom with @fourcc starting at @offset */
+ static GstFlowReturn
+ qtdemux_find_atom (GstQTDemux * qtdemux, guint64 * offset,
+ guint64 * length, guint32 fourcc)
+ {
+ GstFlowReturn ret;
+ guint32 lfourcc;
+ GstBuffer *buf;
+
+ GST_LOG_OBJECT (qtdemux, "finding fourcc %" GST_FOURCC_FORMAT " at offset %"
+ G_GUINT64_FORMAT, GST_FOURCC_ARGS (fourcc), *offset);
+
+ while (TRUE) {
+ GstMapInfo map;
+
+ buf = NULL;
+ ret = gst_pad_pull_range (qtdemux->sinkpad, *offset, 16, &buf);
+ if (G_UNLIKELY (ret != GST_FLOW_OK))
+ goto locate_failed;
+ if (G_UNLIKELY (gst_buffer_get_size (buf) != 16)) {
+ /* likely EOF */
+ ret = GST_FLOW_EOS;
+ gst_buffer_unref (buf);
+ goto locate_failed;
+ }
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+ extract_initial_length_and_fourcc (map.data, 16, length, &lfourcc);
+ gst_buffer_unmap (buf, &map);
+ gst_buffer_unref (buf);
+
+ if (G_UNLIKELY (*length == 0)) {
+ GST_DEBUG_OBJECT (qtdemux, "invalid length 0");
+ ret = GST_FLOW_ERROR;
+ goto locate_failed;
+ }
+
+ if (lfourcc == fourcc) {
+ GST_DEBUG_OBJECT (qtdemux, "found '%" GST_FOURCC_FORMAT " at offset %"
+ G_GUINT64_FORMAT, GST_FOURCC_ARGS (fourcc), *offset);
+ break;
+ } else {
+ GST_LOG_OBJECT (qtdemux,
+ "skipping atom '%" GST_FOURCC_FORMAT "' at %" G_GUINT64_FORMAT,
+ GST_FOURCC_ARGS (lfourcc), *offset);
+ if (*offset == G_MAXUINT64)
+ goto locate_failed;
+ *offset += *length;
+ }
+ }
+
+ return GST_FLOW_OK;
+
+ locate_failed:
+ {
+ /* might simply have had last one */
+ GST_DEBUG_OBJECT (qtdemux, "fourcc not found");
+ return ret;
+ }
+ }
+
+ /* should only do something in pull mode */
+ /* call with OBJECT lock */
+ static GstFlowReturn
+ qtdemux_add_fragmented_samples (GstQTDemux * qtdemux)
+ {
+ guint64 length, offset;
+ GstBuffer *buf = NULL;
+ GstFlowReturn ret = GST_FLOW_OK;
+ GstFlowReturn res = GST_FLOW_OK;
+ GstMapInfo map;
+
+ offset = qtdemux->moof_offset;
+ GST_DEBUG_OBJECT (qtdemux, "next moof at offset %" G_GUINT64_FORMAT, offset);
+
+ if (!offset) {
+ GST_DEBUG_OBJECT (qtdemux, "no next moof");
+ return GST_FLOW_EOS;
+ }
+
+ /* best not do pull etc with lock held */
+ GST_OBJECT_UNLOCK (qtdemux);
+
+ ret = qtdemux_find_atom (qtdemux, &offset, &length, FOURCC_moof);
+ if (ret != GST_FLOW_OK)
+ goto flow_failed;
+
+ ret = gst_qtdemux_pull_atom (qtdemux, offset, length, &buf);
+ if (G_UNLIKELY (ret != GST_FLOW_OK))
+ goto flow_failed;
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+ if (!qtdemux_parse_moof (qtdemux, map.data, map.size, offset, NULL)) {
+ gst_buffer_unmap (buf, &map);
+ gst_buffer_unref (buf);
+ buf = NULL;
+ goto parse_failed;
+ }
+
+ gst_buffer_unmap (buf, &map);
+ gst_buffer_unref (buf);
+ buf = NULL;
+
+ offset += length;
+ /* look for next moof */
+ ret = qtdemux_find_atom (qtdemux, &offset, &length, FOURCC_moof);
+ if (G_UNLIKELY (ret != GST_FLOW_OK))
+ goto flow_failed;
+
+ exit:
+ GST_OBJECT_LOCK (qtdemux);
+
+ qtdemux->moof_offset = offset;
+
+ return res;
+
+ parse_failed:
+ {
+ GST_DEBUG_OBJECT (qtdemux, "failed to parse moof");
+ offset = 0;
+ res = GST_FLOW_ERROR;
+ goto exit;
+ }
+ flow_failed:
+ {
+ /* maybe upstream temporarily flushing */
+ if (ret != GST_FLOW_FLUSHING) {
+ GST_DEBUG_OBJECT (qtdemux, "no next moof");
+ offset = 0;
+ } else {
+ GST_DEBUG_OBJECT (qtdemux, "upstream WRONG_STATE");
+ /* resume at current position next time */
+ }
+ res = ret;
+ goto exit;
+ }
+ }
+
+ static void
+ qtdemux_merge_sample_table (GstQTDemux * qtdemux, QtDemuxStream * stream)
+ {
+ guint i;
+ guint32 num_chunks;
+ gint32 stts_duration;
+ GstByteWriter stsc, stts, stsz;
+
+ /* Each sample has a different size, which we don't support for merging */
+ if (stream->sample_size == 0) {
+ GST_DEBUG_OBJECT (qtdemux,
+ "Not all samples have the same size, not merging");
+ return;
+ }
+
+ /* The stream has a ctts table, we don't support that */
+ if (stream->ctts_present) {
+ GST_DEBUG_OBJECT (qtdemux, "Have ctts, not merging");
+ return;
+ }
+
+ /* If there's a sync sample table also ignore this stream */
+ if (stream->stps_present || stream->stss_present) {
+ GST_DEBUG_OBJECT (qtdemux, "Have stss/stps, not merging");
+ return;
+ }
+
+ /* If chunks are considered samples already ignore this stream */
+ if (stream->chunks_are_samples) {
+ GST_DEBUG_OBJECT (qtdemux, "Chunks are samples, not merging");
+ return;
+ }
+
+ /* Require that all samples have the same duration */
+ if (stream->n_sample_times > 1) {
+ GST_DEBUG_OBJECT (qtdemux, "Not all samples have the same duration");
+ return;
+ }
+
+ /* Parse the stts to get the sample duration and number of samples */
+ gst_byte_reader_skip_unchecked (&stream->stts, 4);
+ stts_duration = gst_byte_reader_get_uint32_be_unchecked (&stream->stts);
+
+ /* Parse the number of chunks from the stco manually because the
+ * reader is already behind that */
+ num_chunks = GST_READ_UINT32_BE (stream->stco.data + 4);
+
+ GST_DEBUG_OBJECT (qtdemux, "sample_duration %d, num_chunks %u", stts_duration,
+ num_chunks);
+
+ /* Now parse stsc, convert chunks into single samples and generate a
+ * new stsc, stts and stsz from this information */
+ gst_byte_writer_init (&stsc);
+ gst_byte_writer_init (&stts);
+ gst_byte_writer_init (&stsz);
+
+ /* Note: we skip fourccs, size, version, flags and other fields of the new
+ * atoms as the byte readers with them are already behind that position
+ * anyway and only update the values of those inside the stream directly.
+ */
+ stream->n_sample_times = 0;
+ stream->n_samples = 0;
+ for (i = 0; i < stream->n_samples_per_chunk; i++) {
+ guint j;
+ guint32 first_chunk, last_chunk, samples_per_chunk, sample_description_id;
+
+ first_chunk = gst_byte_reader_get_uint32_be_unchecked (&stream->stsc);
+ samples_per_chunk = gst_byte_reader_get_uint32_be_unchecked (&stream->stsc);
+ sample_description_id =
+ gst_byte_reader_get_uint32_be_unchecked (&stream->stsc);
+
+ if (i == stream->n_samples_per_chunk - 1) {
+ /* +1 because first_chunk is 1-based */
+ last_chunk = num_chunks + 1;
+ } else {
+ last_chunk = gst_byte_reader_peek_uint32_be_unchecked (&stream->stsc);
+ }
+
+ GST_DEBUG_OBJECT (qtdemux,
+ "Merging first_chunk: %u, last_chunk: %u, samples_per_chunk: %u, sample_description_id: %u",
+ first_chunk, last_chunk, samples_per_chunk, sample_description_id);
+
+ gst_byte_writer_put_uint32_be (&stsc, first_chunk);
+ /* One sample in this chunk */
+ gst_byte_writer_put_uint32_be (&stsc, 1);
+ gst_byte_writer_put_uint32_be (&stsc, sample_description_id);
+
+ /* For each chunk write a stts and stsz entry now */
+ gst_byte_writer_put_uint32_be (&stts, last_chunk - first_chunk);
+ gst_byte_writer_put_uint32_be (&stts, stts_duration * samples_per_chunk);
+ for (j = first_chunk; j < last_chunk; j++) {
+ gst_byte_writer_put_uint32_be (&stsz,
+ stream->sample_size * samples_per_chunk);
+ }
+
+ stream->n_sample_times += 1;
+ stream->n_samples += last_chunk - first_chunk;
+ }
+
+ g_assert_cmpint (stream->n_samples, ==, num_chunks);
+
+ GST_DEBUG_OBJECT (qtdemux, "Have %u samples and %u sample times",
+ stream->n_samples, stream->n_sample_times);
+
+ /* We don't have a fixed sample size anymore */
+ stream->sample_size = 0;
+
+ /* Free old data for the atoms */
+ g_free ((gpointer) stream->stsz.data);
+ stream->stsz.data = NULL;
+ g_free ((gpointer) stream->stsc.data);
+ stream->stsc.data = NULL;
+ g_free ((gpointer) stream->stts.data);
+ stream->stts.data = NULL;
+
+ /* Store new data and replace byte readers */
+ stream->stsz.size = gst_byte_writer_get_size (&stsz);
+ stream->stsz.data = gst_byte_writer_reset_and_get_data (&stsz);
+ gst_byte_reader_init (&stream->stsz, stream->stsz.data, stream->stsz.size);
+ stream->stts.size = gst_byte_writer_get_size (&stts);
+ stream->stts.data = gst_byte_writer_reset_and_get_data (&stts);
+ gst_byte_reader_init (&stream->stts, stream->stts.data, stream->stts.size);
+ stream->stsc.size = gst_byte_writer_get_size (&stsc);
+ stream->stsc.data = gst_byte_writer_reset_and_get_data (&stsc);
+ gst_byte_reader_init (&stream->stsc, stream->stsc.data, stream->stsc.size);
+ }
+
+ /* initialise bytereaders for stbl sub-atoms */
+ static gboolean
+ qtdemux_stbl_init (GstQTDemux * qtdemux, QtDemuxStream * stream, GNode * stbl)
+ {
+ stream->stbl_index = -1; /* no samples have yet been parsed */
+ stream->sample_index = -1;
+
+ /* time-to-sample atom */
+ if (!qtdemux_tree_get_child_by_type_full (stbl, FOURCC_stts, &stream->stts))
+ goto corrupt_file;
+
+ /* copy atom data into a new buffer for later use */
+ stream->stts.data = g_memdup2 (stream->stts.data, stream->stts.size);
+
+ /* skip version + flags */
+ if (!gst_byte_reader_skip (&stream->stts, 1 + 3) ||
+ !gst_byte_reader_get_uint32_be (&stream->stts, &stream->n_sample_times))
+ goto corrupt_file;
+ GST_LOG_OBJECT (qtdemux, "%u timestamp blocks", stream->n_sample_times);
+
+ /* make sure there's enough data */
+ if (!qt_atom_parser_has_chunks (&stream->stts, stream->n_sample_times, 8)) {
+ stream->n_sample_times = gst_byte_reader_get_remaining (&stream->stts) / 8;
+ GST_LOG_OBJECT (qtdemux, "overriding to %u timestamp blocks",
+ stream->n_sample_times);
+ if (!stream->n_sample_times)
+ goto corrupt_file;
+ }
+
+ /* sync sample atom */
+ stream->stps_present = FALSE;
+ if ((stream->stss_present =
+ ! !qtdemux_tree_get_child_by_type_full (stbl, FOURCC_stss,
+ &stream->stss) ? TRUE : FALSE) == TRUE) {
+ /* copy atom data into a new buffer for later use */
+ stream->stss.data = g_memdup2 (stream->stss.data, stream->stss.size);
+
+ /* skip version + flags */
+ if (!gst_byte_reader_skip (&stream->stss, 1 + 3) ||
+ !gst_byte_reader_get_uint32_be (&stream->stss, &stream->n_sample_syncs))
+ goto corrupt_file;
+
+ if (stream->n_sample_syncs) {
+ /* make sure there's enough data */
+ if (!qt_atom_parser_has_chunks (&stream->stss, stream->n_sample_syncs, 4))
+ goto corrupt_file;
+ }
+
+ /* partial sync sample atom */
+ if ((stream->stps_present =
+ ! !qtdemux_tree_get_child_by_type_full (stbl, FOURCC_stps,
+ &stream->stps) ? TRUE : FALSE) == TRUE) {
+ /* copy atom data into a new buffer for later use */
+ stream->stps.data = g_memdup2 (stream->stps.data, stream->stps.size);
+
+ /* skip version + flags */
+ if (!gst_byte_reader_skip (&stream->stps, 1 + 3) ||
+ !gst_byte_reader_get_uint32_be (&stream->stps,
+ &stream->n_sample_partial_syncs))
+ goto corrupt_file;
+
+ /* if there are no entries, the stss table contains the real
+ * sync samples */
+ if (stream->n_sample_partial_syncs) {
+ /* make sure there's enough data */
+ if (!qt_atom_parser_has_chunks (&stream->stps,
+ stream->n_sample_partial_syncs, 4))
+ goto corrupt_file;
+ }
+ }
+ }
+
+ /* sample size */
+ if (!qtdemux_tree_get_child_by_type_full (stbl, FOURCC_stsz, &stream->stsz))
+ goto no_samples;
+
+ /* copy atom data into a new buffer for later use */
+ stream->stsz.data = g_memdup2 (stream->stsz.data, stream->stsz.size);
+
+ /* skip version + flags */
+ if (!gst_byte_reader_skip (&stream->stsz, 1 + 3) ||
+ !gst_byte_reader_get_uint32_be (&stream->stsz, &stream->sample_size))
+ goto corrupt_file;
+
+ if (!gst_byte_reader_get_uint32_be (&stream->stsz, &stream->n_samples))
+ goto corrupt_file;
+
+ if (!stream->n_samples)
+ goto no_samples;
+
+ /* sample-to-chunk atom */
+ if (!qtdemux_tree_get_child_by_type_full (stbl, FOURCC_stsc, &stream->stsc))
+ goto corrupt_file;
+
+ /* copy atom data into a new buffer for later use */
+ stream->stsc.data = g_memdup2 (stream->stsc.data, stream->stsc.size);
+
+ /* skip version + flags */
+ if (!gst_byte_reader_skip (&stream->stsc, 1 + 3) ||
+ !gst_byte_reader_get_uint32_be (&stream->stsc,
+ &stream->n_samples_per_chunk))
+ goto corrupt_file;
+
+ GST_DEBUG_OBJECT (qtdemux, "n_samples_per_chunk %u",
+ stream->n_samples_per_chunk);
+
+ /* make sure there's enough data */
+ if (!qt_atom_parser_has_chunks (&stream->stsc, stream->n_samples_per_chunk,
+ 12))
+ goto corrupt_file;
+
+
+ /* chunk offset */
+ if (qtdemux_tree_get_child_by_type_full (stbl, FOURCC_stco, &stream->stco))
+ stream->co_size = sizeof (guint32);
+ else if (qtdemux_tree_get_child_by_type_full (stbl, FOURCC_co64,
+ &stream->stco))
+ stream->co_size = sizeof (guint64);
+ else
+ goto corrupt_file;
+
+ /* copy atom data into a new buffer for later use */
+ stream->stco.data = g_memdup2 (stream->stco.data, stream->stco.size);
+
+ /* skip version + flags */
+ if (!gst_byte_reader_skip (&stream->stco, 1 + 3))
+ goto corrupt_file;
+
+ /* chunks_are_samples == TRUE means treat chunks as samples */
+ stream->chunks_are_samples = stream->sample_size
+ && !CUR_STREAM (stream)->sampled;
+ if (stream->chunks_are_samples) {
+ /* treat chunks as samples */
+ if (!gst_byte_reader_get_uint32_be (&stream->stco, &stream->n_samples))
+ goto corrupt_file;
+ } else {
+ /* skip number of entries */
+ if (!gst_byte_reader_skip (&stream->stco, 4))
+ goto corrupt_file;
+
+ /* make sure there are enough data in the stsz atom */
+ if (!stream->sample_size) {
+ /* different sizes for each sample */
+ if (!qt_atom_parser_has_chunks (&stream->stsz, stream->n_samples, 4))
+ goto corrupt_file;
+ }
+ }
+
+ /* composition time-to-sample */
+ if ((stream->ctts_present =
+ ! !qtdemux_tree_get_child_by_type_full (stbl, FOURCC_ctts,
+ &stream->ctts) ? TRUE : FALSE) == TRUE) {
+ GstByteReader cslg = GST_BYTE_READER_INIT (NULL, 0);
+
+ /* copy atom data into a new buffer for later use */
+ stream->ctts.data = g_memdup2 (stream->ctts.data, stream->ctts.size);
+
+ /* skip version + flags */
+ if (!gst_byte_reader_skip (&stream->ctts, 1 + 3)
+ || !gst_byte_reader_get_uint32_be (&stream->ctts,
+ &stream->n_composition_times))
+ goto corrupt_file;
+
+ /* make sure there's enough data */
+ if (!qt_atom_parser_has_chunks (&stream->ctts, stream->n_composition_times,
+ 4 + 4))
+ goto corrupt_file;
+
+ /* This is optional, if missing we iterate the ctts */
+ if (qtdemux_tree_get_child_by_type_full (stbl, FOURCC_cslg, &cslg)) {
+ if (!gst_byte_reader_skip (&cslg, 1 + 3)
+ || !gst_byte_reader_get_uint32_be (&cslg, &stream->cslg_shift)) {
+ g_free ((gpointer) cslg.data);
+ goto corrupt_file;
+ }
+ } else {
+ gint32 cslg_least = 0;
+ guint num_entries, pos;
+ gint i;
+
+ pos = gst_byte_reader_get_pos (&stream->ctts);
+ num_entries = stream->n_composition_times;
+
+ stream->cslg_shift = 0;
+
+ for (i = 0; i < num_entries; i++) {
+ gint32 offset;
+
+ gst_byte_reader_skip_unchecked (&stream->ctts, 4);
+ offset = gst_byte_reader_get_int32_be_unchecked (&stream->ctts);
+ /* HACK: if sample_offset is larger than 2 * duration, ignore the box.
+ * slightly inaccurate PTS could be more usable than corrupted one */
+ if (G_UNLIKELY ((ABS (offset) / 2) > stream->duration)) {
+ GST_WARNING_OBJECT (qtdemux,
+ "Ignore corrupted ctts, sample_offset %" G_GINT32_FORMAT
+ " larger than duration %" G_GUINT64_FORMAT,
+ offset, stream->duration);
+
+ stream->cslg_shift = 0;
+ stream->ctts_present = FALSE;
+ goto done;
+ }
+
+ if (offset < cslg_least)
+ cslg_least = offset;
+ }
+
+ if (cslg_least < 0)
+ stream->cslg_shift = ABS (cslg_least);
+ else
+ stream->cslg_shift = 0;
+
+ /* reset the reader so we can generate sample table */
+ gst_byte_reader_set_pos (&stream->ctts, pos);
+ }
+ } else {
+ /* Ensure the cslg_shift value is consistent so we can use it
+ * unconditionally to produce TS and Segment */
+ stream->cslg_shift = 0;
+ }
+
+ /* For raw audio streams especially we might want to merge the samples
+ * to not output one audio sample per buffer. We're doing this here
+ * before allocating the sample tables so that from this point onwards
+ * the number of container samples are static */
+ if (stream->min_buffer_size > 0) {
+ qtdemux_merge_sample_table (qtdemux, stream);
+ }
+
+ done:
+ GST_DEBUG_OBJECT (qtdemux, "allocating n_samples %u * %u (%.2f MB)",
+ stream->n_samples, (guint) sizeof (QtDemuxSample),
+ stream->n_samples * sizeof (QtDemuxSample) / (1024.0 * 1024.0));
+
+ if (stream->n_samples >=
+ QTDEMUX_MAX_SAMPLE_INDEX_SIZE / sizeof (QtDemuxSample)) {
+ GST_WARNING_OBJECT (qtdemux, "not allocating index of %d samples, would "
+ "be larger than %uMB (broken file?)", stream->n_samples,
+ QTDEMUX_MAX_SAMPLE_INDEX_SIZE >> 20);
+ return FALSE;
+ }
+
+ g_assert (stream->samples == NULL);
+ stream->samples = g_try_new0 (QtDemuxSample, stream->n_samples);
+ if (!stream->samples) {
+ GST_WARNING_OBJECT (qtdemux, "failed to allocate %d samples",
+ stream->n_samples);
+ return FALSE;
+ }
+
+ return TRUE;
+
+ corrupt_file:
+ {
+ GST_ELEMENT_ERROR (qtdemux, STREAM, DEMUX,
+ (_("This file is corrupt and cannot be played.")), (NULL));
+ return FALSE;
+ }
+ no_samples:
+ {
+ gst_qtdemux_stbl_free (stream);
+ if (!qtdemux->fragmented) {
+ /* not quite good */
+ GST_WARNING_OBJECT (qtdemux, "stream has no samples");
+ return FALSE;
+ } else {
+ /* may pick up samples elsewhere */
+ return TRUE;
+ }
+ }
+ }
+
+ /* collect samples from the next sample to be parsed up to sample @n for @stream
+ * by reading the info from @stbl
+ *
+ * This code can be executed from both the streaming thread and the seeking
+ * thread so it takes the object lock to protect itself
+ */
+ static gboolean
+ qtdemux_parse_samples (GstQTDemux * qtdemux, QtDemuxStream * stream, guint32 n)
+ {
+ gint i, j, k;
+ QtDemuxSample *samples, *first, *cur, *last;
+ guint32 n_samples_per_chunk;
+ guint32 n_samples;
+
+ GST_LOG_OBJECT (qtdemux, "parsing samples for stream fourcc %"
+ GST_FOURCC_FORMAT ", pad %s",
+ GST_FOURCC_ARGS (CUR_STREAM (stream)->fourcc),
+ stream->pad ? GST_PAD_NAME (stream->pad) : "(NULL)");
+
+ n_samples = stream->n_samples;
+
+ if (n >= n_samples)
+ goto out_of_samples;
+
+ GST_OBJECT_LOCK (qtdemux);
+ if (n <= stream->stbl_index)
+ goto already_parsed;
+
+ GST_DEBUG_OBJECT (qtdemux, "parsing up to sample %u", n);
+
+ if (!stream->stsz.data) {
+ /* so we already parsed and passed all the moov samples;
+ * onto fragmented ones */
+ g_assert (qtdemux->fragmented);
+ goto done;
+ }
+
+ /* pointer to the sample table */
+ samples = stream->samples;
+
+ /* starts from -1, moves to the next sample index to parse */
+ stream->stbl_index++;
+
+ /* keep track of the first and last sample to fill */
+ first = &samples[stream->stbl_index];
+ last = &samples[n];
+
+ if (!stream->chunks_are_samples) {
+ /* set the sample sizes */
+ if (stream->sample_size == 0) {
+ /* different sizes for each sample */
+ for (cur = first; cur <= last; cur++) {
+ cur->size = gst_byte_reader_get_uint32_be_unchecked (&stream->stsz);
+ GST_LOG_OBJECT (qtdemux, "sample %d has size %u",
+ (guint) (cur - samples), cur->size);
+ }
+ } else {
+ /* samples have the same size */
+ GST_LOG_OBJECT (qtdemux, "all samples have size %u", stream->sample_size);
+ for (cur = first; cur <= last; cur++)
+ cur->size = stream->sample_size;
+ }
+ }
+
+ n_samples_per_chunk = stream->n_samples_per_chunk;
+ cur = first;
+
+ for (i = stream->stsc_index; i < n_samples_per_chunk; i++) {
+ guint32 last_chunk;
+
+ if (stream->stsc_chunk_index >= stream->last_chunk
+ || stream->stsc_chunk_index < stream->first_chunk) {
+ stream->first_chunk =
+ gst_byte_reader_get_uint32_be_unchecked (&stream->stsc);
+ stream->samples_per_chunk =
+ gst_byte_reader_get_uint32_be_unchecked (&stream->stsc);
+ /* starts from 1 */
+ stream->stsd_sample_description_id =
+ gst_byte_reader_get_uint32_be_unchecked (&stream->stsc) - 1;
+
+ /* chunk numbers are counted from 1 it seems */
+ if (G_UNLIKELY (stream->first_chunk == 0))
+ goto corrupt_file;
+
+ --stream->first_chunk;
+
+ /* the last chunk of each entry is calculated by taking the first chunk
+ * of the next entry; except if there is no next, where we fake it with
+ * INT_MAX */
+ if (G_UNLIKELY (i == (stream->n_samples_per_chunk - 1))) {
+ stream->last_chunk = G_MAXUINT32;
+ } else {
+ stream->last_chunk =
+ gst_byte_reader_peek_uint32_be_unchecked (&stream->stsc);
+ if (G_UNLIKELY (stream->last_chunk == 0))
+ goto corrupt_file;
+
+ --stream->last_chunk;
+ }
+
+ GST_LOG_OBJECT (qtdemux,
+ "entry %d has first_chunk %d, last_chunk %d, samples_per_chunk %d"
+ "sample desc ID: %d", i, stream->first_chunk, stream->last_chunk,
+ stream->samples_per_chunk, stream->stsd_sample_description_id);
+
+ if (G_UNLIKELY (stream->last_chunk < stream->first_chunk))
+ goto corrupt_file;
+
+ if (stream->last_chunk != G_MAXUINT32) {
+ if (!qt_atom_parser_peek_sub (&stream->stco,
+ stream->first_chunk * stream->co_size,
+ (stream->last_chunk - stream->first_chunk) * stream->co_size,
+ &stream->co_chunk))
+ goto corrupt_file;
+
+ } else {
+ stream->co_chunk = stream->stco;
+ if (!gst_byte_reader_skip (&stream->co_chunk,
+ stream->first_chunk * stream->co_size))
+ goto corrupt_file;
+ }
+
+ stream->stsc_chunk_index = stream->first_chunk;
+ }
+
+ last_chunk = stream->last_chunk;
+
+ if (stream->chunks_are_samples) {
+ cur = &samples[stream->stsc_chunk_index];
+
+ for (j = stream->stsc_chunk_index; j < last_chunk; j++) {
+ if (j > n) {
+ /* save state */
+ stream->stsc_chunk_index = j;
+ goto done;
+ }
+
+ cur->offset =
+ qt_atom_parser_get_offset_unchecked (&stream->co_chunk,
+ stream->co_size);
+
+ GST_LOG_OBJECT (qtdemux, "Created entry %d with offset "
+ "%" G_GUINT64_FORMAT, j, cur->offset);
+
+ if (CUR_STREAM (stream)->samples_per_frame > 0 &&
+ CUR_STREAM (stream)->bytes_per_frame > 0) {
+ cur->size =
+ (stream->samples_per_chunk * CUR_STREAM (stream)->n_channels) /
+ CUR_STREAM (stream)->samples_per_frame *
+ CUR_STREAM (stream)->bytes_per_frame;
+ } else {
+ cur->size = stream->samples_per_chunk;
+ }
+
+ GST_DEBUG_OBJECT (qtdemux,
+ "keyframe sample %d: timestamp %" GST_TIME_FORMAT ", size %u",
+ j, GST_TIME_ARGS (QTSTREAMTIME_TO_GSTTIME (stream,
+ stream->stco_sample_index)), cur->size);
+
+ cur->timestamp = stream->stco_sample_index;
+ cur->duration = stream->samples_per_chunk;
+ cur->keyframe = TRUE;
+ cur++;
+
+ stream->stco_sample_index += stream->samples_per_chunk;
+ }
+ stream->stsc_chunk_index = j;
+ } else {
+ for (j = stream->stsc_chunk_index; j < last_chunk; j++) {
+ guint32 samples_per_chunk;
+ guint64 chunk_offset;
+
+ if (!stream->stsc_sample_index
+ && !qt_atom_parser_get_offset (&stream->co_chunk, stream->co_size,
+ &stream->chunk_offset))
+ goto corrupt_file;
+
+ samples_per_chunk = stream->samples_per_chunk;
+ chunk_offset = stream->chunk_offset;
+
+ for (k = stream->stsc_sample_index; k < samples_per_chunk; k++) {
+ GST_LOG_OBJECT (qtdemux, "creating entry %d with offset %"
+ G_GUINT64_FORMAT " and size %d",
+ (guint) (cur - samples), chunk_offset, cur->size);
+
+ cur->offset = chunk_offset;
+ chunk_offset += cur->size;
+ cur++;
+
+ if (G_UNLIKELY (cur > last)) {
+ /* save state */
+ stream->stsc_sample_index = k + 1;
+ stream->chunk_offset = chunk_offset;
+ stream->stsc_chunk_index = j;
+ goto done2;
+ }
+ }
+ stream->stsc_sample_index = 0;
+ }
+ stream->stsc_chunk_index = j;
+ }
+ stream->stsc_index++;
+ }
+
+ if (stream->chunks_are_samples)
+ goto ctts;
+ done2:
+ {
+ guint32 n_sample_times;
+
+ n_sample_times = stream->n_sample_times;
+ cur = first;
+
+ for (i = stream->stts_index; i < n_sample_times; i++) {
+ guint32 stts_samples;
+ gint32 stts_duration;
+ gint64 stts_time;
+
+ if (stream->stts_sample_index >= stream->stts_samples
+ || !stream->stts_sample_index) {
+
+ stream->stts_samples =
+ gst_byte_reader_get_uint32_be_unchecked (&stream->stts);
+ stream->stts_duration =
+ gst_byte_reader_get_uint32_be_unchecked (&stream->stts);
+
+ GST_LOG_OBJECT (qtdemux, "block %d, %u timestamps, duration %u",
+ i, stream->stts_samples, stream->stts_duration);
+
+ stream->stts_sample_index = 0;
+ }
+
+ stts_samples = stream->stts_samples;
+ stts_duration = stream->stts_duration;
+ stts_time = stream->stts_time;
+
+ for (j = stream->stts_sample_index; j < stts_samples; j++) {
+ GST_DEBUG_OBJECT (qtdemux,
+ "sample %d: index %d, timestamp %" GST_TIME_FORMAT,
+ (guint) (cur - samples), j,
+ GST_TIME_ARGS (QTSTREAMTIME_TO_GSTTIME (stream, stts_time)));
+
+ cur->timestamp = stts_time;
+ cur->duration = stts_duration;
+
+ /* avoid 32-bit wrap-around,
+ * but still mind possible 'negative' duration */
+ stts_time += (gint64) stts_duration;
+ cur++;
+
+ if (G_UNLIKELY (cur > last)) {
+ /* save values */
+ stream->stts_time = stts_time;
+ stream->stts_sample_index = j + 1;
+ if (stream->stts_sample_index >= stream->stts_samples)
+ stream->stts_index++;
+ goto done3;
+ }
+ }
+ stream->stts_sample_index = 0;
+ stream->stts_time = stts_time;
+ stream->stts_index++;
+ }
+ /* fill up empty timestamps with the last timestamp, this can happen when
+ * the last samples do not decode and so we don't have timestamps for them.
+ * We however look at the last timestamp to estimate the track length so we
+ * need something in here. */
+ for (; cur < last; cur++) {
+ GST_DEBUG_OBJECT (qtdemux,
+ "fill sample %d: timestamp %" GST_TIME_FORMAT,
+ (guint) (cur - samples),
+ GST_TIME_ARGS (QTSTREAMTIME_TO_GSTTIME (stream, stream->stts_time)));
+ cur->timestamp = stream->stts_time;
+ cur->duration = -1;
+ }
+ }
+ done3:
+ {
+ /* sample sync, can be NULL */
+ if (stream->stss_present == TRUE) {
+ guint32 n_sample_syncs;
+
+ n_sample_syncs = stream->n_sample_syncs;
+
+ if (!n_sample_syncs) {
+ GST_DEBUG_OBJECT (qtdemux, "all samples are keyframes");
+ stream->all_keyframe = TRUE;
+ } else {
+ for (i = stream->stss_index; i < n_sample_syncs; i++) {
+ /* note that the first sample is index 1, not 0 */
+ guint32 index;
+
+ index = gst_byte_reader_get_uint32_be_unchecked (&stream->stss);
+
+ if (G_LIKELY (index > 0 && index <= n_samples)) {
+ index -= 1;
+ samples[index].keyframe = TRUE;
+ GST_DEBUG_OBJECT (qtdemux, "samples at %u is keyframe", index);
+ /* and exit if we have enough samples */
+ if (G_UNLIKELY (index >= n)) {
+ i++;
+ break;
+ }
+ }
+ }
+ /* save state */
+ stream->stss_index = i;
+ }
+
+ /* stps marks partial sync frames like open GOP I-Frames */
+ if (stream->stps_present == TRUE) {
+ guint32 n_sample_partial_syncs;
+
+ n_sample_partial_syncs = stream->n_sample_partial_syncs;
+
+ /* if there are no entries, the stss table contains the real
+ * sync samples */
+ if (n_sample_partial_syncs) {
+ for (i = stream->stps_index; i < n_sample_partial_syncs; i++) {
+ /* note that the first sample is index 1, not 0 */
+ guint32 index;
+
+ index = gst_byte_reader_get_uint32_be_unchecked (&stream->stps);
+
+ if (G_LIKELY (index > 0 && index <= n_samples)) {
+ index -= 1;
+ samples[index].keyframe = TRUE;
+ GST_DEBUG_OBJECT (qtdemux, "samples at %u is keyframe", index);
+ /* and exit if we have enough samples */
+ if (G_UNLIKELY (index >= n)) {
+ i++;
+ break;
+ }
+ }
+ }
+ /* save state */
+ stream->stps_index = i;
+ }
+ }
+ } else {
+ /* no stss, all samples are keyframes */
+ stream->all_keyframe = TRUE;
+ GST_DEBUG_OBJECT (qtdemux, "setting all keyframes");
+ }
+ }
+
+ ctts:
+ /* composition time to sample */
+ if (stream->ctts_present == TRUE) {
+ guint32 n_composition_times;
+ guint32 ctts_count;
+ gint32 ctts_soffset;
+
+ /* Fill in the pts_offsets */
+ cur = first;
+ n_composition_times = stream->n_composition_times;
+
+ for (i = stream->ctts_index; i < n_composition_times; i++) {
+ if (stream->ctts_sample_index >= stream->ctts_count
+ || !stream->ctts_sample_index) {
+ stream->ctts_count =
+ gst_byte_reader_get_uint32_be_unchecked (&stream->ctts);
+ stream->ctts_soffset =
+ gst_byte_reader_get_int32_be_unchecked (&stream->ctts);
+ stream->ctts_sample_index = 0;
+ }
+
+ ctts_count = stream->ctts_count;
+ ctts_soffset = stream->ctts_soffset;
+
+ for (j = stream->ctts_sample_index; j < ctts_count; j++) {
+ cur->pts_offset = ctts_soffset;
+ cur++;
+
+ if (G_UNLIKELY (cur > last)) {
+ /* save state */
+ stream->ctts_sample_index = j + 1;
+ goto done;
+ }
+ }
+ stream->ctts_sample_index = 0;
+ stream->ctts_index++;
+ }
+ }
+ done:
+ stream->stbl_index = n;
+ /* if index has been completely parsed, free data that is no-longer needed */
+ if (n + 1 == stream->n_samples) {
+ gst_qtdemux_stbl_free (stream);
+ GST_DEBUG_OBJECT (qtdemux, "parsed all available samples;");
+ if (qtdemux->pullbased) {
+ GST_DEBUG_OBJECT (qtdemux, "checking for more samples");
+ while (n + 1 == stream->n_samples)
+ if (qtdemux_add_fragmented_samples (qtdemux) != GST_FLOW_OK)
+ break;
+ }
+ }
+ GST_OBJECT_UNLOCK (qtdemux);
+
+ return TRUE;
+
+ /* SUCCESS */
+ already_parsed:
+ {
+ GST_LOG_OBJECT (qtdemux,
+ "Tried to parse up to sample %u but this sample has already been parsed",
+ n);
+ /* if fragmented, there may be more */
+ if (qtdemux->fragmented && n == stream->stbl_index)
+ goto done;
+ GST_OBJECT_UNLOCK (qtdemux);
+ return TRUE;
+ }
+ /* ERRORS */
+ out_of_samples:
+ {
+ GST_LOG_OBJECT (qtdemux,
+ "Tried to parse up to sample %u but there are only %u samples", n + 1,
+ stream->n_samples);
+ GST_ELEMENT_ERROR (qtdemux, STREAM, DEMUX,
+ (_("This file is corrupt and cannot be played.")), (NULL));
+ return FALSE;
+ }
+ corrupt_file:
+ {
+ GST_OBJECT_UNLOCK (qtdemux);
+ GST_ELEMENT_ERROR (qtdemux, STREAM, DEMUX,
+ (_("This file is corrupt and cannot be played.")), (NULL));
+ return FALSE;
+ }
+ }
+
+ /* collect all segment info for @stream.
+ */
+ static gboolean
+ qtdemux_parse_segments (GstQTDemux * qtdemux, QtDemuxStream * stream,
+ GNode * trak)
+ {
+ GNode *edts;
+ /* accept edts if they contain gaps at start and there is only
+ * one media segment */
+ gboolean allow_pushbased_edts = TRUE;
+ gint media_segments_count = 0;
+
+ /* parse and prepare segment info from the edit list */
+ GST_DEBUG_OBJECT (qtdemux, "looking for edit list container");
+ stream->n_segments = 0;
+ stream->segments = NULL;
+ if ((edts = qtdemux_tree_get_child_by_type (trak, FOURCC_edts))) {
+ GNode *elst;
+ gint n_segments;
+ gint segment_number, entry_size;
+ guint64 time;
+ GstClockTime stime;
+ const guint8 *buffer;
+ guint8 version;
+ guint32 size;
+
+ GST_DEBUG_OBJECT (qtdemux, "looking for edit list");
+ if (!(elst = qtdemux_tree_get_child_by_type (edts, FOURCC_elst)))
+ goto done;
+
+ buffer = elst->data;
+
+ size = QT_UINT32 (buffer);
+ /* version, flags, n_segments */
+ if (size < 16) {
+ GST_WARNING_OBJECT (qtdemux, "Invalid edit list");
+ goto done;
+ }
+ version = QT_UINT8 (buffer + 8);
+ entry_size = (version == 1) ? 20 : 12;
+
+ n_segments = QT_UINT32 (buffer + 12);
+
+ if (n_segments > 100000 || size < 16 + n_segments * entry_size) {
+ GST_WARNING_OBJECT (qtdemux, "Invalid edit list");
+ goto done;
+ }
+
+ /* we might allocate a bit too much, at least allocate 1 segment */
+ stream->segments = g_new (QtDemuxSegment, MAX (n_segments, 1));
+
+ /* segments always start from 0 */
+ time = 0;
+ stime = 0;
+ buffer += 16;
+ for (segment_number = 0; segment_number < n_segments; segment_number++) {
+ guint64 duration;
+ guint64 media_time;
+ gboolean empty_edit = FALSE;
+ QtDemuxSegment *segment;
+ guint32 rate_int;
+ GstClockTime media_start = GST_CLOCK_TIME_NONE;
+
+ if (version == 1) {
+ media_time = QT_UINT64 (buffer + 8);
+ duration = QT_UINT64 (buffer);
+ if (media_time == G_MAXUINT64)
+ empty_edit = TRUE;
+ } else {
+ media_time = QT_UINT32 (buffer + 4);
+ duration = QT_UINT32 (buffer);
+ if (media_time == G_MAXUINT32)
+ empty_edit = TRUE;
+ }
+
+ if (!empty_edit)
+ media_start = QTSTREAMTIME_TO_GSTTIME (stream, media_time);
+
+ segment = &stream->segments[segment_number];
+
+ /* time and duration expressed in global timescale */
+ segment->time = stime;
+ if (duration != 0 || empty_edit) {
+ /* edge case: empty edits with duration=zero are treated here.
+ * (files should not have these anyway). */
+
+ /* add non scaled values so we don't cause roundoff errors */
+ time += duration;
+ stime = QTTIME_TO_GSTTIME (qtdemux, time);
+ segment->duration = stime - segment->time;
+ } else {
+ /* zero duration does not imply media_start == media_stop
+ * but, only specify media_start. The edit ends with the track. */
+ stime = segment->duration = GST_CLOCK_TIME_NONE;
+ /* Don't allow more edits after this one. */
+ n_segments = segment_number + 1;
+ }
+ segment->stop_time = stime;
+
+ segment->trak_media_start = media_time;
+ /* media_time expressed in stream timescale */
+ if (!empty_edit) {
+ segment->media_start = media_start;
+ segment->media_stop = GST_CLOCK_TIME_IS_VALID (segment->duration)
+ ? segment->media_start + segment->duration : GST_CLOCK_TIME_NONE;
+ media_segments_count++;
+ } else {
+ segment->media_start = GST_CLOCK_TIME_NONE;
+ segment->media_stop = GST_CLOCK_TIME_NONE;
+ }
+ rate_int = QT_UINT32 (buffer + ((version == 1) ? 16 : 8));
+
+ if (rate_int <= 1) {
+ /* 0 is not allowed, some programs write 1 instead of the floating point
+ * value */
+ GST_WARNING_OBJECT (qtdemux, "found suspicious rate %" G_GUINT32_FORMAT,
+ rate_int);
+ segment->rate = 1;
+ } else {
+ segment->rate = rate_int / 65536.0;
+ }
+
+ GST_DEBUG_OBJECT (qtdemux, "created segment %d time %" GST_TIME_FORMAT
+ ", duration %" GST_TIME_FORMAT ", media_start %" GST_TIME_FORMAT
+ " (%" G_GUINT64_FORMAT ") , media_stop %" GST_TIME_FORMAT
+ " stop_time %" GST_TIME_FORMAT " rate %g, (%d) timescale %u",
+ segment_number, GST_TIME_ARGS (segment->time),
+ GST_TIME_ARGS (segment->duration),
+ GST_TIME_ARGS (segment->media_start), media_time,
+ GST_TIME_ARGS (segment->media_stop),
+ GST_TIME_ARGS (segment->stop_time), segment->rate, rate_int,
+ stream->timescale);
+ if (segment->stop_time > qtdemux->segment.stop &&
+ !qtdemux->upstream_format_is_time) {
+ GST_WARNING_OBJECT (qtdemux, "Segment %d "
+ " extends to %" GST_TIME_FORMAT
+ " past the end of the declared movie duration %" GST_TIME_FORMAT
+ " movie segment will be extended", segment_number,
+ GST_TIME_ARGS (segment->stop_time),
+ GST_TIME_ARGS (qtdemux->segment.stop));
+ qtdemux->segment.stop = qtdemux->segment.duration = segment->stop_time;
+ }
+
+ buffer += entry_size;
+ }
+ GST_DEBUG_OBJECT (qtdemux, "found %d segments", n_segments);
+ stream->n_segments = n_segments;
+ if (media_segments_count != 1)
+ allow_pushbased_edts = FALSE;
+ }
+ done:
+
+ /* push based does not handle segments, so act accordingly here,
+ * and warn if applicable */
+ if (!qtdemux->pullbased && !allow_pushbased_edts) {
+ GST_WARNING_OBJECT (qtdemux, "streaming; discarding edit list segments");
+ /* remove and use default one below, we stream like it anyway */
+ g_free (stream->segments);
+ stream->segments = NULL;
+ stream->n_segments = 0;
+ }
+
+ /* no segments, create one to play the complete trak */
+ if (stream->n_segments == 0) {
+ GstClockTime stream_duration =
+ QTSTREAMTIME_TO_GSTTIME (stream, stream->duration);
+
+ if (stream->segments == NULL)
+ stream->segments = g_new (QtDemuxSegment, 1);
+
+ /* represent unknown our way */
+ if (stream_duration == 0)
+ stream_duration = GST_CLOCK_TIME_NONE;
+
+ stream->segments[0].time = 0;
+ stream->segments[0].stop_time = stream_duration;
+ stream->segments[0].duration = stream_duration;
+ stream->segments[0].media_start = 0;
+ stream->segments[0].media_stop = stream_duration;
+ stream->segments[0].rate = 1.0;
+ stream->segments[0].trak_media_start = 0;
+
+ GST_DEBUG_OBJECT (qtdemux, "created dummy segment %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (stream_duration));
+ stream->n_segments = 1;
+ stream->dummy_segment = TRUE;
+ }
+ GST_DEBUG_OBJECT (qtdemux, "using %d segments", stream->n_segments);
+
+ return TRUE;
+ }
+
+ /*
+ * Parses the stsd atom of a svq3 trak looking for
+ * the SMI and gama atoms.
+ */
+ static void
+ qtdemux_parse_svq3_stsd_data (GstQTDemux * qtdemux,
+ const guint8 * stsd_entry_data, const guint8 ** gamma, GstBuffer ** seqh)
+ {
+ const guint8 *_gamma = NULL;
+ GstBuffer *_seqh = NULL;
+ const guint8 *stsd_data = stsd_entry_data;
+ guint32 length = QT_UINT32 (stsd_data);
+ guint16 version;
+
+ if (length < 32) {
+ GST_WARNING_OBJECT (qtdemux, "stsd too short");
+ goto end;
+ }
+
+ stsd_data += 16;
+ length -= 16;
+ version = QT_UINT16 (stsd_data);
+ if (version == 3) {
+ if (length >= 70) {
+ length -= 70;
+ stsd_data += 70;
+ while (length > 8) {
+ guint32 fourcc, size;
+ const guint8 *data;
+ size = QT_UINT32 (stsd_data);
+ fourcc = QT_FOURCC (stsd_data + 4);
+ data = stsd_data + 8;
+
+ if (size == 0) {
+ GST_WARNING_OBJECT (qtdemux, "Atom of size 0 found, aborting "
+ "svq3 atom parsing");
+ goto end;
+ }
+
+ switch (fourcc) {
+ case FOURCC_gama:{
+ if (size == 12) {
+ _gamma = data;
+ } else {
+ GST_WARNING_OBJECT (qtdemux, "Unexpected size %" G_GUINT32_FORMAT
+ " for gama atom, expected 12", size);
+ }
+ break;
+ }
+ case FOURCC_SMI_:{
+ if (size > 16 && QT_FOURCC (data) == FOURCC_SEQH) {
+ guint32 seqh_size;
+ if (_seqh != NULL) {
+ GST_WARNING_OBJECT (qtdemux, "Unexpected second SEQH SMI atom "
+ " found, ignoring");
+ } else {
+ seqh_size = QT_UINT32 (data + 4);
+ if (seqh_size > 0) {
+ _seqh = gst_buffer_new_and_alloc (seqh_size);
+ gst_buffer_fill (_seqh, 0, data + 8, seqh_size);
+ }
+ }
+ }
+ break;
+ }
+ default:{
+ GST_WARNING_OBJECT (qtdemux, "Unhandled atom %" GST_FOURCC_FORMAT
+ " in SVQ3 entry in stsd atom", GST_FOURCC_ARGS (fourcc));
+ }
+ }
+
+ if (size <= length) {
+ length -= size;
+ stsd_data += size;
+ }
+ }
+ } else {
+ GST_WARNING_OBJECT (qtdemux, "SVQ3 entry too short in stsd atom");
+ }
+ } else {
+ GST_WARNING_OBJECT (qtdemux, "Unexpected version for SVQ3 entry %"
+ G_GUINT16_FORMAT, version);
+ goto end;
+ }
+
+ end:
+ if (gamma) {
+ *gamma = _gamma;
+ }
+ if (seqh) {
+ *seqh = _seqh;
+ } else if (_seqh) {
+ gst_buffer_unref (_seqh);
+ }
+ }
+
+ static gchar *
+ qtdemux_get_rtsp_uri_from_hndl (GstQTDemux * qtdemux, GNode * minf)
+ {
+ GNode *dinf;
+ GstByteReader dref;
+ gchar *uri = NULL;
+
+ /*
+ * Get 'dinf', to get its child 'dref', that might contain a 'hndl'
+ * atom that might contain a 'data' atom with the rtsp uri.
+ * This case was reported in bug #597497, some info about
+ * the hndl atom can be found in TN1195
+ */
+ dinf = qtdemux_tree_get_child_by_type (minf, FOURCC_dinf);
+ GST_DEBUG_OBJECT (qtdemux, "Trying to obtain rtsp URI for stream trak");
+
+ if (dinf) {
+ guint32 dref_num_entries = 0;
+ if (qtdemux_tree_get_child_by_type_full (dinf, FOURCC_dref, &dref) &&
+ gst_byte_reader_skip (&dref, 4) &&
+ gst_byte_reader_get_uint32_be (&dref, &dref_num_entries)) {
+ gint i;
+
+ /* search dref entries for hndl atom */
+ for (i = 0; i < dref_num_entries; i++) {
+ guint32 size = 0, type;
+ guint8 string_len = 0;
+ if (gst_byte_reader_get_uint32_be (&dref, &size) &&
+ qt_atom_parser_get_fourcc (&dref, &type)) {
+ if (type == FOURCC_hndl) {
+ GST_DEBUG_OBJECT (qtdemux, "Found hndl atom");
+
+ /* skip data reference handle bytes and the
+ * following pascal string and some extra 4
+ * bytes I have no idea what are */
+ if (!gst_byte_reader_skip (&dref, 4) ||
+ !gst_byte_reader_get_uint8 (&dref, &string_len) ||
+ !gst_byte_reader_skip (&dref, string_len + 4)) {
+ GST_WARNING_OBJECT (qtdemux, "Failed to parse hndl atom");
+ break;
+ }
+
+ /* iterate over the atoms to find the data atom */
+ while (gst_byte_reader_get_remaining (&dref) >= 8) {
+ guint32 atom_size;
+ guint32 atom_type;
+
+ if (gst_byte_reader_get_uint32_be (&dref, &atom_size) &&
+ qt_atom_parser_get_fourcc (&dref, &atom_type)) {
+ if (atom_type == FOURCC_data) {
+ const guint8 *uri_aux = NULL;
+
+ /* found the data atom that might contain the rtsp uri */
+ GST_DEBUG_OBJECT (qtdemux, "Found data atom inside "
+ "hndl atom, interpreting it as an URI");
+ if (gst_byte_reader_peek_data (&dref, atom_size - 8,
+ &uri_aux)) {
+ if (g_strstr_len ((gchar *) uri_aux, 7, "rtsp://") != NULL)
+ uri = g_strndup ((gchar *) uri_aux, atom_size - 8);
+ else
+ GST_WARNING_OBJECT (qtdemux, "Data atom in hndl atom "
+ "didn't contain a rtsp address");
+ } else {
+ GST_WARNING_OBJECT (qtdemux, "Failed to get the data "
+ "atom contents");
+ }
+ break;
+ }
+ /* skipping to the next entry */
+ if (!gst_byte_reader_skip (&dref, atom_size - 8))
+ break;
+ } else {
+ GST_WARNING_OBJECT (qtdemux, "Failed to parse hndl child "
+ "atom header");
+ break;
+ }
+ }
+ break;
+ }
+ /* skip to the next entry */
+ if (!gst_byte_reader_skip (&dref, size - 8))
+ break;
+ } else {
+ GST_WARNING_OBJECT (qtdemux, "Error parsing dref atom");
+ }
+ }
+ GST_DEBUG_OBJECT (qtdemux, "Finished parsing dref atom");
+ }
+ }
+ return uri;
+ }
+
+ #define AMR_NB_ALL_MODES 0x81ff
+ #define AMR_WB_ALL_MODES 0x83ff
+ static guint
+ qtdemux_parse_amr_bitrate (GstBuffer * buf, gboolean wb)
+ {
+ /* The 'damr' atom is of the form:
+ *
+ * | vendor | decoder_ver | mode_set | mode_change_period | frames/sample |
+ * 32 b 8 b 16 b 8 b 8 b
+ *
+ * The highest set bit of the first 7 (AMR-NB) or 8 (AMR-WB) bits of mode_set
+ * represents the highest mode used in the stream (and thus the maximum
+ * bitrate), with a couple of special cases as seen below.
+ */
+
+ /* Map of frame type ID -> bitrate */
+ static const guint nb_bitrates[] = {
+ 4750, 5150, 5900, 6700, 7400, 7950, 10200, 12200
+ };
+ static const guint wb_bitrates[] = {
+ 6600, 8850, 12650, 14250, 15850, 18250, 19850, 23050, 23850
+ };
+ GstMapInfo map;
+ gsize max_mode;
+ guint16 mode_set;
+
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+
+ if (map.size != 0x11) {
+ GST_DEBUG ("Atom should have size 0x11, not %" G_GSIZE_FORMAT, map.size);
+ goto bad_data;
+ }
+
+ if (QT_FOURCC (map.data + 4) != FOURCC_damr) {
+ GST_DEBUG ("Unknown atom in %" GST_FOURCC_FORMAT,
+ GST_FOURCC_ARGS (QT_UINT32 (map.data + 4)));
+ goto bad_data;
+ }
+
+ mode_set = QT_UINT16 (map.data + 13);
+
+ if (mode_set == (wb ? AMR_WB_ALL_MODES : AMR_NB_ALL_MODES))
+ max_mode = 7 + (wb ? 1 : 0);
+ else
+ /* AMR-NB modes fo from 0-7, and AMR-WB modes go from 0-8 */
+ max_mode = g_bit_nth_msf ((gulong) mode_set & (wb ? 0x1ff : 0xff), -1);
+
+ if (max_mode == -1) {
+ GST_DEBUG ("No mode indication was found (mode set) = %x",
+ (guint) mode_set);
+ goto bad_data;
+ }
+
+ gst_buffer_unmap (buf, &map);
+ return wb ? wb_bitrates[max_mode] : nb_bitrates[max_mode];
+
+ bad_data:
+ gst_buffer_unmap (buf, &map);
+ return 0;
+ }
+
+ static gboolean
+ qtdemux_parse_transformation_matrix (GstQTDemux * qtdemux,
+ GstByteReader * reader, guint32 * matrix, const gchar * atom)
+ {
+ /*
+ * 9 values of 32 bits (fixed point 16.16, except 2 5 and 8 that are 2.30)
+ * [0 1 2]
+ * [3 4 5]
+ * [6 7 8]
+ */
+
+ if (gst_byte_reader_get_remaining (reader) < 36)
+ return FALSE;
+
+ matrix[0] = gst_byte_reader_get_uint32_be_unchecked (reader);
+ matrix[1] = gst_byte_reader_get_uint32_be_unchecked (reader);
+ matrix[2] = gst_byte_reader_get_uint32_be_unchecked (reader);
+ matrix[3] = gst_byte_reader_get_uint32_be_unchecked (reader);
+ matrix[4] = gst_byte_reader_get_uint32_be_unchecked (reader);
+ matrix[5] = gst_byte_reader_get_uint32_be_unchecked (reader);
+ matrix[6] = gst_byte_reader_get_uint32_be_unchecked (reader);
+ matrix[7] = gst_byte_reader_get_uint32_be_unchecked (reader);
+ matrix[8] = gst_byte_reader_get_uint32_be_unchecked (reader);
+
+ GST_DEBUG_OBJECT (qtdemux, "Transformation matrix from atom %s", atom);
+ GST_DEBUG_OBJECT (qtdemux, "%u.%u %u.%u %u.%u", matrix[0] >> 16,
+ matrix[0] & 0xFFFF, matrix[1] >> 16, matrix[1] & 0xFF, matrix[2] >> 16,
+ matrix[2] & 0xFF);
+ GST_DEBUG_OBJECT (qtdemux, "%u.%u %u.%u %u.%u", matrix[3] >> 16,
+ matrix[3] & 0xFFFF, matrix[4] >> 16, matrix[4] & 0xFF, matrix[5] >> 16,
+ matrix[5] & 0xFF);
+ GST_DEBUG_OBJECT (qtdemux, "%u.%u %u.%u %u.%u", matrix[6] >> 16,
+ matrix[6] & 0xFFFF, matrix[7] >> 16, matrix[7] & 0xFF, matrix[8] >> 16,
+ matrix[8] & 0xFF);
+
+ return TRUE;
+ }
+
+ static void
+ qtdemux_inspect_transformation_matrix (GstQTDemux * qtdemux,
+ QtDemuxStream * stream, guint32 * matrix, GstTagList ** taglist)
+ {
+
+ /* [a b c]
+ * [d e f]
+ * [g h i]
+ *
+ * This macro will only compare value abdegh, it expects cfi to have already
+ * been checked
+ */
+ #define QTCHECK_MATRIX(m,a,b,d,e) ((m)[0] == (a << 16) && (m)[1] == (b << 16) && \
+ (m)[3] == (d << 16) && (m)[4] == (e << 16))
+
+ /* only handle the cases where the last column has standard values */
+ if (matrix[2] == 0 && matrix[5] == 0 && matrix[8] == 1 << 30) {
+ const gchar *rotation_tag = NULL;
+
+ /* no rotation needed */
+ if (QTCHECK_MATRIX (matrix, 1, 0, 0, 1)) {
+ /* NOP */
+ } else if (QTCHECK_MATRIX (matrix, 0, 1, G_MAXUINT16, 0)) {
+ rotation_tag = "rotate-90";
+ } else if (QTCHECK_MATRIX (matrix, G_MAXUINT16, 0, 0, G_MAXUINT16)) {
+ rotation_tag = "rotate-180";
+ } else if (QTCHECK_MATRIX (matrix, 0, G_MAXUINT16, 1, 0)) {
+ rotation_tag = "rotate-270";
+ } else {
+ GST_FIXME_OBJECT (qtdemux, "Unhandled transformation matrix values");
+ }
+
+ GST_DEBUG_OBJECT (qtdemux, "Transformation matrix rotation %s",
+ GST_STR_NULL (rotation_tag));
+ if (rotation_tag != NULL) {
+ if (*taglist == NULL)
+ *taglist = gst_tag_list_new_empty ();
+ gst_tag_list_add (*taglist, GST_TAG_MERGE_REPLACE,
+ GST_TAG_IMAGE_ORIENTATION, rotation_tag, NULL);
+ }
+ } else {
+ GST_FIXME_OBJECT (qtdemux, "Unhandled transformation matrix values");
+ }
+ }
+
+ static gboolean
+ qtdemux_parse_protection_aavd (GstQTDemux * qtdemux,
+ QtDemuxStream * stream, GNode * container, guint32 * original_fmt)
+ {
+ GNode *adrm;
+ guint32 adrm_size;
+ GstBuffer *adrm_buf = NULL;
+ QtDemuxAavdEncryptionInfo *info;
+
+ adrm = qtdemux_tree_get_child_by_type (container, FOURCC_adrm);
+ if (G_UNLIKELY (!adrm)) {
+ GST_ERROR_OBJECT (qtdemux, "aavd box does not contain mandatory adrm box");
+ return FALSE;
+ }
+ adrm_size = QT_UINT32 (adrm->data);
+ adrm_buf = gst_buffer_new_memdup (adrm->data, adrm_size);
+
+ stream->protection_scheme_type = FOURCC_aavd;
+
+ if (!stream->protection_scheme_info)
+ stream->protection_scheme_info = g_new0 (QtDemuxAavdEncryptionInfo, 1);
+
+ info = (QtDemuxAavdEncryptionInfo *) stream->protection_scheme_info;
+
+ if (info->default_properties)
+ gst_structure_free (info->default_properties);
+ info->default_properties = gst_structure_new ("application/x-aavd",
+ "encrypted", G_TYPE_BOOLEAN, TRUE,
+ "adrm", GST_TYPE_BUFFER, adrm_buf, NULL);
+ gst_buffer_unref (adrm_buf);
+
+ *original_fmt = FOURCC_mp4a;
+ return TRUE;
+ }
+
+ /* Parses the boxes defined in ISO/IEC 14496-12 that enable support for
+ * protected streams (sinf, frma, schm and schi); if the protection scheme is
+ * Common Encryption (cenc), the function will also parse the tenc box (defined
+ * in ISO/IEC 23001-7). @container points to the node that contains these boxes
+ * (typically an enc[v|a|t|s] sample entry); the function will set
+ * @original_fmt to the fourcc of the original unencrypted stream format.
+ * Returns TRUE if successful; FALSE otherwise. */
+ static gboolean
+ qtdemux_parse_protection_scheme_info (GstQTDemux * qtdemux,
+ QtDemuxStream * stream, GNode * container, guint32 * original_fmt)
+ {
+ GNode *sinf;
+ GNode *frma;
+ GNode *schm;
+ GNode *schi;
+ QtDemuxCencSampleSetInfo *info;
+ GNode *tenc;
+ const guint8 *tenc_data;
+
+ g_return_val_if_fail (qtdemux != NULL, FALSE);
+ g_return_val_if_fail (stream != NULL, FALSE);
+ g_return_val_if_fail (container != NULL, FALSE);
+ g_return_val_if_fail (original_fmt != NULL, FALSE);
+
+ sinf = qtdemux_tree_get_child_by_type (container, FOURCC_sinf);
+ if (G_UNLIKELY (!sinf)) {
+ if (stream->protection_scheme_type == FOURCC_cenc
+ || stream->protection_scheme_type == FOURCC_cbcs) {
+ GST_ERROR_OBJECT (qtdemux, "sinf box does not contain schi box, which is "
+ "mandatory for Common Encryption");
+ return FALSE;
+ }
+ return TRUE;
+ }
+
+ frma = qtdemux_tree_get_child_by_type (sinf, FOURCC_frma);
+ if (G_UNLIKELY (!frma)) {
+ GST_ERROR_OBJECT (qtdemux, "sinf box does not contain mandatory frma box");
+ return FALSE;
+ }
+
+ *original_fmt = QT_FOURCC ((const guint8 *) frma->data + 8);
+ GST_DEBUG_OBJECT (qtdemux, "original stream format: '%" GST_FOURCC_FORMAT "'",
+ GST_FOURCC_ARGS (*original_fmt));
+
+ schm = qtdemux_tree_get_child_by_type (sinf, FOURCC_schm);
+ if (!schm) {
+ GST_DEBUG_OBJECT (qtdemux, "sinf box does not contain schm box");
+ return FALSE;
+ }
+ stream->protection_scheme_type = QT_FOURCC ((const guint8 *) schm->data + 12);
+ stream->protection_scheme_version =
+ QT_UINT32 ((const guint8 *) schm->data + 16);
+
+ GST_DEBUG_OBJECT (qtdemux,
+ "protection_scheme_type: %" GST_FOURCC_FORMAT ", "
+ "protection_scheme_version: %#010x",
+ GST_FOURCC_ARGS (stream->protection_scheme_type),
+ stream->protection_scheme_version);
+
+ schi = qtdemux_tree_get_child_by_type (sinf, FOURCC_schi);
+ if (!schi) {
+ GST_DEBUG_OBJECT (qtdemux, "sinf box does not contain schi box");
+ return FALSE;
+ }
+ if (stream->protection_scheme_type != FOURCC_cenc &&
+ stream->protection_scheme_type != FOURCC_piff &&
+ stream->protection_scheme_type != FOURCC_cbcs) {
+ GST_ERROR_OBJECT (qtdemux,
+ "Invalid protection_scheme_type: %" GST_FOURCC_FORMAT,
+ GST_FOURCC_ARGS (stream->protection_scheme_type));
+ return FALSE;
+ }
+
+ if (G_UNLIKELY (!stream->protection_scheme_info))
+ stream->protection_scheme_info =
+ g_malloc0 (sizeof (QtDemuxCencSampleSetInfo));
+
+ info = (QtDemuxCencSampleSetInfo *) stream->protection_scheme_info;
+
+ if (stream->protection_scheme_type == FOURCC_cenc
+ || stream->protection_scheme_type == FOURCC_cbcs) {
+ guint8 is_encrypted;
+ guint8 iv_size;
+ guint8 constant_iv_size = 0;
+ const guint8 *default_kid;
+ guint8 crypt_byte_block = 0;
+ guint8 skip_byte_block = 0;
+ const guint8 *constant_iv = NULL;
+
+ tenc = qtdemux_tree_get_child_by_type (schi, FOURCC_tenc);
+ if (!tenc) {
+ GST_ERROR_OBJECT (qtdemux, "schi box does not contain tenc box, "
+ "which is mandatory for Common Encryption");
+ return FALSE;
+ }
+ tenc_data = (const guint8 *) tenc->data + 12;
+ is_encrypted = QT_UINT8 (tenc_data + 2);
+ iv_size = QT_UINT8 (tenc_data + 3);
+ default_kid = (tenc_data + 4);
+ if (stream->protection_scheme_type == FOURCC_cbcs) {
+ guint8 possible_pattern_info;
+ if (iv_size == 0) {
+ constant_iv_size = QT_UINT8 (tenc_data + 20);
+ if (constant_iv_size != 8 && constant_iv_size != 16) {
+ GST_ERROR_OBJECT (qtdemux,
+ "constant IV size should be 8 or 16, not %hhu", constant_iv_size);
+ return FALSE;
+ }
+ constant_iv = (tenc_data + 21);
+ }
+ possible_pattern_info = QT_UINT8 (tenc_data + 1);
+ crypt_byte_block = (possible_pattern_info >> 4) & 0x0f;
+ skip_byte_block = possible_pattern_info & 0x0f;
+ }
+ qtdemux_update_default_sample_cenc_settings (qtdemux, info,
+ is_encrypted, stream->protection_scheme_type, iv_size, default_kid,
+ crypt_byte_block, skip_byte_block, constant_iv_size, constant_iv);
+ } else if (stream->protection_scheme_type == FOURCC_piff) {
+ GstByteReader br;
+ static const guint8 piff_track_encryption_uuid[] = {
+ 0x89, 0x74, 0xdb, 0xce, 0x7b, 0xe7, 0x4c, 0x51,
+ 0x84, 0xf9, 0x71, 0x48, 0xf9, 0x88, 0x25, 0x54
+ };
+
+ tenc = qtdemux_tree_get_child_by_type (schi, FOURCC_uuid);
+ if (!tenc) {
+ GST_ERROR_OBJECT (qtdemux, "schi box does not contain tenc box, "
+ "which is mandatory for Common Encryption");
+ return FALSE;
+ }
+
+ tenc_data = (const guint8 *) tenc->data + 8;
+ if (memcmp (tenc_data, piff_track_encryption_uuid, 16) != 0) {
+ gchar *box_uuid = qtdemux_uuid_bytes_to_string (tenc_data);
+ GST_ERROR_OBJECT (qtdemux,
+ "Unsupported track encryption box with uuid: %s", box_uuid);
+ g_free (box_uuid);
+ return FALSE;
+ }
+ tenc_data = (const guint8 *) tenc->data + 16 + 12;
+ gst_byte_reader_init (&br, tenc_data, 20);
+ if (!qtdemux_update_default_piff_encryption_settings (qtdemux, info, &br)) {
+ GST_ERROR_OBJECT (qtdemux, "PIFF track box parsing error");
+ return FALSE;
+ }
+ stream->protection_scheme_type = FOURCC_cenc;
+ }
+
+ return TRUE;
+ }
+
+ static gint
+ qtdemux_track_id_compare_func (QtDemuxStream ** stream1,
+ QtDemuxStream ** stream2)
+ {
+ return (gint) (*stream1)->track_id - (gint) (*stream2)->track_id;
+ }
+
+ static gboolean
+ qtdemux_parse_stereo_svmi_atom (GstQTDemux * qtdemux, QtDemuxStream * stream,
+ GNode * stbl)
+ {
+ GNode *svmi;
+
+ /*parse svmi header if existing */
+ svmi = qtdemux_tree_get_child_by_type (stbl, FOURCC_svmi);
+ if (svmi) {
+ guint len = QT_UINT32 ((guint8 *) svmi->data);
+ guint32 version = QT_UINT32 ((guint8 *) svmi->data + 8);
+ if (!version) {
+ GstVideoMultiviewMode mode = GST_VIDEO_MULTIVIEW_MODE_NONE;
+ GstVideoMultiviewFlags flags = GST_VIDEO_MULTIVIEW_FLAGS_NONE;
+ guint8 frame_type, frame_layout;
+ guint32 stereo_mono_change_count;
+
+ if (len < 18)
+ return FALSE;
+
+ /* MPEG-A stereo video */
+ if (qtdemux->major_brand == FOURCC_ss02)
+ flags |= GST_VIDEO_MULTIVIEW_FLAGS_MIXED_MONO;
+
+ frame_type = QT_UINT8 ((guint8 *) svmi->data + 12);
+ frame_layout = QT_UINT8 ((guint8 *) svmi->data + 13) & 0x01;
+ stereo_mono_change_count = QT_UINT32 ((guint8 *) svmi->data + 14);
+
+ switch (frame_type) {
+ case 0:
+ mode = GST_VIDEO_MULTIVIEW_MODE_SIDE_BY_SIDE;
+ break;
+ case 1:
+ mode = GST_VIDEO_MULTIVIEW_MODE_ROW_INTERLEAVED;
+ break;
+ case 2:
+ mode = GST_VIDEO_MULTIVIEW_MODE_FRAME_BY_FRAME;
+ break;
+ case 3:
+ /* mode 3 is primary/secondary view sequence, ie
+ * left/right views in separate tracks. See section 7.2
+ * of ISO/IEC 23000-11:2009 */
+ /* In the future this might be supported using related
+ * streams, like an enhancement track - if files like this
+ * ever exist */
+ GST_FIXME_OBJECT (qtdemux,
+ "Implement stereo video in separate streams");
+ }
+
+ if ((frame_layout & 0x1) == 0)
+ flags |= GST_VIDEO_MULTIVIEW_FLAGS_RIGHT_VIEW_FIRST;
+
+ GST_LOG_OBJECT (qtdemux,
+ "StereoVideo: composition type: %u, is_left_first: %u",
+ frame_type, frame_layout);
+
+ if (stereo_mono_change_count > 1) {
+ GST_FIXME_OBJECT (qtdemux,
+ "Mixed-mono flags are not yet supported in qtdemux.");
+ }
+
+ stream->multiview_mode = mode;
+ stream->multiview_flags = flags;
+ }
+ }
+
+ return TRUE;
+ }
+
+ /* parse the traks.
+ * With each track we associate a new QtDemuxStream that contains all the info
+ * about the trak.
+ * traks that do not decode to something (like strm traks) will not have a pad.
+ */
+ static gboolean
+ qtdemux_parse_trak (GstQTDemux * qtdemux, GNode * trak)
+ {
+ GstByteReader tkhd;
+ int offset;
+ GNode *mdia;
+ GNode *mdhd;
+ GNode *hdlr;
+ GNode *minf;
+ GNode *stbl;
+ GNode *stsd;
+ GNode *mp4a;
+ GNode *mp4v;
+ GNode *esds;
+ GNode *tref;
+ GNode *udta;
+
+ QtDemuxStream *stream = NULL;
+ const guint8 *stsd_data;
+ const guint8 *stsd_entry_data;
+ guint remaining_stsd_len;
+ guint stsd_entry_count;
+ guint stsd_index;
+ guint16 lang_code; /* quicktime lang code or packed iso code */
+ guint32 version;
+ guint32 tkhd_flags = 0;
+ guint8 tkhd_version = 0;
+ guint32 w = 0, h = 0;
+ guint value_size, stsd_len, len;
+ guint32 track_id;
+ guint32 dummy;
+
+ GST_DEBUG_OBJECT (qtdemux, "parse_trak");
+
+ if (!qtdemux_tree_get_child_by_type_full (trak, FOURCC_tkhd, &tkhd)
+ || !gst_byte_reader_get_uint8 (&tkhd, &tkhd_version)
+ || !gst_byte_reader_get_uint24_be (&tkhd, &tkhd_flags))
+ goto corrupt_file;
+
+ /* pick between 64 or 32 bits */
+ value_size = tkhd_version == 1 ? 8 : 4;
+ if (!gst_byte_reader_skip (&tkhd, value_size * 2) ||
+ !gst_byte_reader_get_uint32_be (&tkhd, &track_id))
+ goto corrupt_file;
+
+ /* Check if current moov has duplicated track_id */
+ if (qtdemux_find_stream (qtdemux, track_id))
+ goto existing_stream;
+
+ stream = _create_stream (qtdemux, track_id);
+ stream->stream_tags = gst_tag_list_make_writable (stream->stream_tags);
+
++#ifdef TIZEN_FEATURE_QTDEMUX_DURATION
++ if (!gst_byte_reader_skip (&tkhd, 4))
++ goto corrupt_file;
++
++ if (tkhd_version == 1) {
++ if (!gst_byte_reader_get_uint64_be (&tkhd, &stream->tkhd_duration))
++ goto corrupt_file;
++ } else {
++ guint32 dur = 0;
++ if (!gst_byte_reader_get_uint32_be (&tkhd, &dur))
++ goto corrupt_file;
++ stream->tkhd_duration = dur;
++ }
++ GST_INFO_OBJECT (qtdemux, "tkhd duration: %" G_GUINT64_FORMAT,
++ stream->tkhd_duration);
++#endif
+ /* need defaults for fragments */
+ qtdemux_parse_trex (qtdemux, stream, &dummy, &dummy, &dummy);
+
+ if ((tkhd_flags & 1) == 0)
+ stream->disabled = TRUE;
+
+ GST_LOG_OBJECT (qtdemux, "track[tkhd] version/flags/id: 0x%02x/%06x/%u",
+ tkhd_version, tkhd_flags, stream->track_id);
+
+ if (!(mdia = qtdemux_tree_get_child_by_type (trak, FOURCC_mdia)))
+ goto corrupt_file;
+
+ if (!(mdhd = qtdemux_tree_get_child_by_type (mdia, FOURCC_mdhd))) {
+ /* be nice for some crooked mjp2 files that use mhdr for mdhd */
+ if (qtdemux->major_brand != FOURCC_mjp2 ||
+ !(mdhd = qtdemux_tree_get_child_by_type (mdia, FOURCC_mhdr)))
+ goto corrupt_file;
+ }
+
+ len = QT_UINT32 ((guint8 *) mdhd->data);
+ version = QT_UINT32 ((guint8 *) mdhd->data + 8);
+ GST_LOG_OBJECT (qtdemux, "track version/flags: %08x", version);
+ if (version == 0x01000000) {
+ if (len < 42)
+ goto corrupt_file;
+ stream->timescale = QT_UINT32 ((guint8 *) mdhd->data + 28);
+ stream->duration = QT_UINT64 ((guint8 *) mdhd->data + 32);
+ lang_code = QT_UINT16 ((guint8 *) mdhd->data + 40);
+ } else {
+ if (len < 30)
+ goto corrupt_file;
+ stream->timescale = QT_UINT32 ((guint8 *) mdhd->data + 20);
+ stream->duration = QT_UINT32 ((guint8 *) mdhd->data + 24);
+ lang_code = QT_UINT16 ((guint8 *) mdhd->data + 28);
+ }
+
+ if (lang_code < 0x400) {
+ qtdemux_lang_map_qt_code_to_iso (stream->lang_id, lang_code);
+ } else if (lang_code == 0x7fff) {
+ stream->lang_id[0] = 0; /* unspecified */
+ } else {
+ stream->lang_id[0] = 0x60 + ((lang_code >> 10) & 0x1F);
+ stream->lang_id[1] = 0x60 + ((lang_code >> 5) & 0x1F);
+ stream->lang_id[2] = 0x60 + (lang_code & 0x1F);
+ stream->lang_id[3] = 0;
+ }
+
+ GST_LOG_OBJECT (qtdemux, "track timescale: %" G_GUINT32_FORMAT,
+ stream->timescale);
+ GST_LOG_OBJECT (qtdemux, "track duration: %" G_GUINT64_FORMAT,
+ stream->duration);
+ GST_LOG_OBJECT (qtdemux, "track language code/id: 0x%04x/%s",
+ lang_code, stream->lang_id);
+
+ if (G_UNLIKELY (stream->timescale == 0 || qtdemux->timescale == 0))
+ goto corrupt_file;
+
+ if ((tref = qtdemux_tree_get_child_by_type (trak, FOURCC_tref))) {
+ /* chapters track reference */
+ GNode *chap = qtdemux_tree_get_child_by_type (tref, FOURCC_chap);
+ if (chap) {
+ gsize length = GST_READ_UINT32_BE (chap->data);
+ if (qtdemux->chapters_track_id)
+ GST_FIXME_OBJECT (qtdemux, "Multiple CHAP tracks");
+
+ if (length >= 12) {
+ qtdemux->chapters_track_id =
+ GST_READ_UINT32_BE ((gint8 *) chap->data + 8);
+ }
+ }
+ }
+
+ /* fragmented files may have bogus duration in moov */
+ if (!qtdemux->fragmented &&
+ qtdemux->duration != G_MAXINT64 && stream->duration != G_MAXINT32) {
+ guint64 tdur1, tdur2;
+
+ /* don't overflow */
+ tdur1 = stream->timescale * (guint64) qtdemux->duration;
+ tdur2 = qtdemux->timescale * (guint64) stream->duration;
+
+ /* HACK:
+ * some of those trailers, nowadays, have prologue images that are
+ * themselves video tracks as well. I haven't really found a way to
+ * identify those yet, except for just looking at their duration. */
+ if (tdur1 != 0 && (tdur2 * 10 / tdur1) < 2) {
+ GST_WARNING_OBJECT (qtdemux,
+ "Track shorter than 20%% (%" G_GUINT64_FORMAT "/%" G_GUINT32_FORMAT
+ " vs. %" G_GUINT64_FORMAT "/%" G_GUINT32_FORMAT ") of the stream "
+ "found, assuming preview image or something; skipping track",
+ stream->duration, stream->timescale, qtdemux->duration,
+ qtdemux->timescale);
+ gst_qtdemux_stream_unref (stream);
+ return TRUE;
+ }
+ }
+
+ if (!(hdlr = qtdemux_tree_get_child_by_type (mdia, FOURCC_hdlr)))
+ goto corrupt_file;
+
+ GST_LOG_OBJECT (qtdemux, "track type: %" GST_FOURCC_FORMAT,
+ GST_FOURCC_ARGS (QT_FOURCC ((guint8 *) hdlr->data + 12)));
+
+ len = QT_UINT32 ((guint8 *) hdlr->data);
+ if (len >= 20)
+ stream->subtype = QT_FOURCC ((guint8 *) hdlr->data + 16);
+ GST_LOG_OBJECT (qtdemux, "track subtype: %" GST_FOURCC_FORMAT,
+ GST_FOURCC_ARGS (stream->subtype));
+
+ if (!(minf = qtdemux_tree_get_child_by_type (mdia, FOURCC_minf)))
+ goto corrupt_file;
+
+ if (!(stbl = qtdemux_tree_get_child_by_type (minf, FOURCC_stbl)))
+ goto corrupt_file;
+
+ /* Parse out svmi (and later st3d/sv3d) atoms */
+ if (!qtdemux_parse_stereo_svmi_atom (qtdemux, stream, stbl))
+ goto corrupt_file;
+
+ /* parse rest of tkhd */
+ if (stream->subtype == FOURCC_vide) {
+ guint32 matrix[9];
+
+ /* version 1 uses some 64-bit ints */
++#ifdef TIZEN_FEATURE_QTDEMUX_DURATION
++ if (!gst_byte_reader_skip (&tkhd, 16))
++#else
+ if (!gst_byte_reader_skip (&tkhd, 20 + value_size))
++#endif
+ goto corrupt_file;
+
+ if (!qtdemux_parse_transformation_matrix (qtdemux, &tkhd, matrix, "tkhd"))
+ goto corrupt_file;
+
+ if (!gst_byte_reader_get_uint32_be (&tkhd, &w)
+ || !gst_byte_reader_get_uint32_be (&tkhd, &h))
+ goto corrupt_file;
+
+ qtdemux_inspect_transformation_matrix (qtdemux, stream, matrix,
+ &stream->stream_tags);
+ }
+
+ /* parse stsd */
+ if (!(stsd = qtdemux_tree_get_child_by_type (stbl, FOURCC_stsd)))
+ goto corrupt_file;
+ stsd_data = (const guint8 *) stsd->data;
+
+ /* stsd should at least have one entry */
+ stsd_len = QT_UINT32 (stsd_data);
+ if (stsd_len < 24) {
+ /* .. but skip stream with empty stsd produced by some Vivotek cameras */
+ if (stream->subtype == FOURCC_vivo) {
+ gst_qtdemux_stream_unref (stream);
+ return TRUE;
+ } else {
+ goto corrupt_file;
+ }
+ }
+
+ stream->stsd_entries_length = stsd_entry_count = QT_UINT32 (stsd_data + 12);
+ /* each stsd entry must contain at least 8 bytes */
+ if (stream->stsd_entries_length == 0
+ || stream->stsd_entries_length > stsd_len / 8) {
+ stream->stsd_entries_length = 0;
+ goto corrupt_file;
+ }
+ stream->stsd_entries = g_new0 (QtDemuxStreamStsdEntry, stsd_entry_count);
+ GST_LOG_OBJECT (qtdemux, "stsd len: %d", stsd_len);
+ GST_LOG_OBJECT (qtdemux, "stsd entry count: %u", stsd_entry_count);
+
+ stsd_entry_data = stsd_data + 16;
+ remaining_stsd_len = stsd_len - 16;
+ for (stsd_index = 0; stsd_index < stsd_entry_count; stsd_index++) {
+ guint32 fourcc;
+ gchar *codec = NULL;
+ QtDemuxStreamStsdEntry *entry = &stream->stsd_entries[stsd_index];
+
+ /* and that entry should fit within stsd */
+ len = QT_UINT32 (stsd_entry_data);
+ if (len > remaining_stsd_len)
+ goto corrupt_file;
+
+ entry->fourcc = fourcc = QT_FOURCC (stsd_entry_data + 4);
+ GST_LOG_OBJECT (qtdemux, "stsd type: %" GST_FOURCC_FORMAT,
+ GST_FOURCC_ARGS (entry->fourcc));
+ GST_LOG_OBJECT (qtdemux, "stsd type len: %d", len);
+
+ if ((fourcc == FOURCC_drms) || (fourcc == FOURCC_drmi))
+ goto error_encrypted;
+
+ if (fourcc == FOURCC_aavd) {
+ if (stream->subtype != FOURCC_soun) {
+ GST_ERROR_OBJECT (qtdemux,
+ "Unexpeced stsd type 'aavd' outside 'soun' track");
+ } else {
+ /* encrypted audio with sound sample description v0 */
+ GNode *enc = qtdemux_tree_get_child_by_type (stsd, fourcc);
+ stream->protected = TRUE;
+ if (!qtdemux_parse_protection_aavd (qtdemux, stream, enc, &fourcc))
+ GST_ERROR_OBJECT (qtdemux, "Failed to parse protection scheme info");
+ }
+ }
+
+ if (fourcc == FOURCC_encv || fourcc == FOURCC_enca) {
+ /* FIXME this looks wrong, there might be multiple children
+ * with the same type */
+ GNode *enc = qtdemux_tree_get_child_by_type (stsd, fourcc);
+ stream->protected = TRUE;
+ if (!qtdemux_parse_protection_scheme_info (qtdemux, stream, enc, &fourcc))
+ GST_ERROR_OBJECT (qtdemux, "Failed to parse protection scheme info");
+ }
+
+ if (stream->subtype == FOURCC_vide) {
+ GNode *colr;
+ GNode *fiel;
+ GNode *pasp;
+ gboolean gray;
+ gint depth, palette_size, palette_count;
+ guint32 *palette_data = NULL;
+
+ entry->sampled = TRUE;
+
+ stream->display_width = w >> 16;
+ stream->display_height = h >> 16;
+
+ offset = 16;
+ if (len < 86) /* TODO verify */
+ goto corrupt_file;
+
+ entry->width = QT_UINT16 (stsd_entry_data + offset + 16);
+ entry->height = QT_UINT16 (stsd_entry_data + offset + 18);
+ entry->fps_n = 0; /* this is filled in later */
+ entry->fps_d = 0; /* this is filled in later */
+ entry->bits_per_sample = QT_UINT16 (stsd_entry_data + offset + 66);
+ entry->color_table_id = QT_UINT16 (stsd_entry_data + offset + 68);
+
+ /* if color_table_id is 0, ctab atom must follow; however some files
+ * produced by TMPEGEnc have color_table_id = 0 and no ctab atom, so
+ * if color table is not present we'll correct the value */
+ if (entry->color_table_id == 0 &&
+ (len < 90
+ || QT_FOURCC (stsd_entry_data + offset + 70) != FOURCC_ctab)) {
+ entry->color_table_id = -1;
+ }
+
+ GST_LOG_OBJECT (qtdemux, "width %d, height %d, bps %d, color table id %d",
+ entry->width, entry->height, entry->bits_per_sample,
+ entry->color_table_id);
+
+ depth = entry->bits_per_sample;
+
+ /* more than 32 bits means grayscale */
+ gray = (depth > 32);
+ /* low 32 bits specify the depth */
+ depth &= 0x1F;
+
+ /* different number of palette entries is determined by depth. */
+ palette_count = 0;
+ if ((depth == 1) || (depth == 2) || (depth == 4) || (depth == 8))
+ palette_count = (1 << depth);
+ palette_size = palette_count * 4;
+
+ if (entry->color_table_id) {
+ switch (palette_count) {
+ case 0:
+ break;
+ case 2:
+ palette_data = g_memdup2 (ff_qt_default_palette_2, palette_size);
+ break;
+ case 4:
+ palette_data = g_memdup2 (ff_qt_default_palette_4, palette_size);
+ break;
+ case 16:
+ if (gray)
+ palette_data =
+ g_memdup2 (ff_qt_grayscale_palette_16, palette_size);
+ else
+ palette_data = g_memdup2 (ff_qt_default_palette_16, palette_size);
+ break;
+ case 256:
+ if (gray)
+ palette_data =
+ g_memdup2 (ff_qt_grayscale_palette_256, palette_size);
+ else
+ palette_data =
+ g_memdup2 (ff_qt_default_palette_256, palette_size);
+ break;
+ default:
+ GST_ELEMENT_WARNING (qtdemux, STREAM, DEMUX,
+ (_("The video in this file might not play correctly.")),
+ ("unsupported palette depth %d", depth));
+ break;
+ }
+ } else {
+ gint i, j, start, end;
+
+ if (len < 94)
+ goto corrupt_file;
+
+ /* read table */
+ start = QT_UINT32 (stsd_entry_data + offset + 70);
+ palette_count = QT_UINT16 (stsd_entry_data + offset + 74);
+ end = QT_UINT16 (stsd_entry_data + offset + 76);
+
+ GST_LOG_OBJECT (qtdemux, "start %d, end %d, palette_count %d",
+ start, end, palette_count);
+
+ if (end > 255)
+ end = 255;
+ if (start > end)
+ start = end;
+
+ if (len < 94 + (end - start) * 8)
+ goto corrupt_file;
+
+ /* palette is always the same size */
+ palette_data = g_malloc0 (256 * 4);
+ palette_size = 256 * 4;
+
+ for (j = 0, i = start; i <= end; j++, i++) {
+ guint32 a, r, g, b;
+
+ a = QT_UINT16 (stsd_entry_data + offset + 78 + (j * 8));
+ r = QT_UINT16 (stsd_entry_data + offset + 80 + (j * 8));
+ g = QT_UINT16 (stsd_entry_data + offset + 82 + (j * 8));
+ b = QT_UINT16 (stsd_entry_data + offset + 84 + (j * 8));
+
+ palette_data[i] = ((a & 0xff00) << 16) | ((r & 0xff00) << 8) |
+ (g & 0xff00) | (b >> 8);
+ }
+ }
+
+ if (entry->caps)
+ gst_caps_unref (entry->caps);
+
+ entry->caps =
+ qtdemux_video_caps (qtdemux, stream, entry, fourcc, stsd_entry_data,
+ &codec);
+ if (G_UNLIKELY (!entry->caps)) {
+ g_free (palette_data);
+ goto unknown_stream;
+ }
+
+ if (codec) {
+ gst_tag_list_add (stream->stream_tags, GST_TAG_MERGE_REPLACE,
+ GST_TAG_VIDEO_CODEC, codec, NULL);
+ g_free (codec);
+ codec = NULL;
+ }
+
+ if (palette_data) {
+ GstStructure *s;
+
+ if (entry->rgb8_palette)
+ gst_memory_unref (entry->rgb8_palette);
+ entry->rgb8_palette = gst_memory_new_wrapped (GST_MEMORY_FLAG_READONLY,
+ palette_data, palette_size, 0, palette_size, palette_data, g_free);
+
+ s = gst_caps_get_structure (entry->caps, 0);
+
+ /* non-raw video has a palette_data property. raw video has the palette as
+ * an extra plane that we append to the output buffers before we push
+ * them*/
+ if (!gst_structure_has_name (s, "video/x-raw")) {
+ GstBuffer *palette;
+
+ palette = gst_buffer_new ();
+ gst_buffer_append_memory (palette, entry->rgb8_palette);
+ entry->rgb8_palette = NULL;
+
+ gst_caps_set_simple (entry->caps, "palette_data",
+ GST_TYPE_BUFFER, palette, NULL);
+ gst_buffer_unref (palette);
+ }
+ } else if (palette_count != 0) {
+ GST_ELEMENT_WARNING (qtdemux, STREAM, NOT_IMPLEMENTED,
+ (NULL), ("Unsupported palette depth %d", depth));
+ }
+
+ GST_LOG_OBJECT (qtdemux, "frame count: %u",
+ QT_UINT16 (stsd_entry_data + offset + 32));
+
+ esds = NULL;
+ pasp = NULL;
+ colr = NULL;
+ fiel = NULL;
+ /* pick 'the' stsd child */
+ mp4v = qtdemux_tree_get_child_by_index (stsd, stsd_index);
+ // We should skip parsing the stsd for non-protected streams if
+ // the entry doesn't match the fourcc, since they don't change
+ // format. However, for protected streams we can have partial
+ // encryption, where parts of the stream are encrypted and parts
+ // not. For both parts of such streams, we should ensure the
+ // esds overrides are parsed for both from the stsd.
+ if (QTDEMUX_TREE_NODE_FOURCC (mp4v) != fourcc) {
+ if (stream->protected && QTDEMUX_TREE_NODE_FOURCC (mp4v) != FOURCC_encv)
+ mp4v = NULL;
+ else if (!stream->protected)
+ mp4v = NULL;
+ }
+
+ if (mp4v) {
+ esds = qtdemux_tree_get_child_by_type (mp4v, FOURCC_esds);
+ pasp = qtdemux_tree_get_child_by_type (mp4v, FOURCC_pasp);
+ colr = qtdemux_tree_get_child_by_type (mp4v, FOURCC_colr);
+ fiel = qtdemux_tree_get_child_by_type (mp4v, FOURCC_fiel);
+ }
+
+ if (pasp) {
+ const guint8 *pasp_data = (const guint8 *) pasp->data;
+ gint len = QT_UINT32 (pasp_data);
+
+ if (len == 16) {
+ CUR_STREAM (stream)->par_w = QT_UINT32 (pasp_data + 8);
+ CUR_STREAM (stream)->par_h = QT_UINT32 (pasp_data + 12);
+ } else {
+ CUR_STREAM (stream)->par_w = 0;
+ CUR_STREAM (stream)->par_h = 0;
+ }
+ } else {
+ CUR_STREAM (stream)->par_w = 0;
+ CUR_STREAM (stream)->par_h = 0;
+ }
+
+ if (fiel) {
+ const guint8 *fiel_data = (const guint8 *) fiel->data;
+ gint len = QT_UINT32 (fiel_data);
+
+ if (len == 10) {
+ CUR_STREAM (stream)->interlace_mode = GST_READ_UINT8 (fiel_data + 8);
+ CUR_STREAM (stream)->field_order = GST_READ_UINT8 (fiel_data + 9);
+ }
+ }
+
+ if (colr) {
+ const guint8 *colr_data = (const guint8 *) colr->data;
+ gint len = QT_UINT32 (colr_data);
+
+ if (len == 19 || len == 18) {
+ guint32 color_type = GST_READ_UINT32_LE (colr_data + 8);
+
+ if (color_type == FOURCC_nclx || color_type == FOURCC_nclc) {
+ guint16 primaries = GST_READ_UINT16_BE (colr_data + 12);
+ guint16 transfer_function = GST_READ_UINT16_BE (colr_data + 14);
+ guint16 matrix = GST_READ_UINT16_BE (colr_data + 16);
+ gboolean full_range = len == 19 ? colr_data[17] >> 7 : FALSE;
+
+ CUR_STREAM (stream)->colorimetry.primaries =
+ gst_video_color_primaries_from_iso (primaries);
+ CUR_STREAM (stream)->colorimetry.transfer =
+ gst_video_transfer_function_from_iso (transfer_function);
+ CUR_STREAM (stream)->colorimetry.matrix =
+ gst_video_color_matrix_from_iso (matrix);
+ CUR_STREAM (stream)->colorimetry.range =
+ full_range ? GST_VIDEO_COLOR_RANGE_0_255 :
+ GST_VIDEO_COLOR_RANGE_16_235;
+ } else {
+ GST_DEBUG_OBJECT (qtdemux, "Unsupported color type");
+ }
+ } else {
+ GST_WARNING_OBJECT (qtdemux, "Invalid colr atom size");
+ }
+ }
+
+ if (esds) {
+ gst_qtdemux_handle_esds (qtdemux, stream, entry, esds,
+ stream->stream_tags);
+ } else {
+ switch (fourcc) {
+ case FOURCC_H264:
+ case FOURCC_avc1:
+ case FOURCC_avc3:
+ {
+ gint len = QT_UINT32 (stsd_entry_data) - 0x56;
+ const guint8 *avc_data = stsd_entry_data + 0x56;
+
+ /* find avcC */
+ while (len >= 0x8) {
+ gint size;
+
+ if (QT_UINT32 (avc_data) <= len)
+ size = QT_UINT32 (avc_data) - 0x8;
+ else
+ size = len - 0x8;
+
+ if (size < 1)
+ /* No real data, so break out */
+ break;
+
+ switch (QT_FOURCC (avc_data + 0x4)) {
+ case FOURCC_avcC:
+ {
+ /* parse, if found */
+ GstBuffer *buf;
+
+ GST_DEBUG_OBJECT (qtdemux, "found avcC codec_data in stsd");
+
+ /* First 4 bytes are the length of the atom, the next 4 bytes
+ * are the fourcc, the next 1 byte is the version, and the
+ * subsequent bytes are profile_tier_level structure like data. */
+ gst_codec_utils_h264_caps_set_level_and_profile (entry->caps,
+ avc_data + 8 + 1, size - 1);
+ buf = gst_buffer_new_and_alloc (size);
+ gst_buffer_fill (buf, 0, avc_data + 0x8, size);
+ gst_caps_set_simple (entry->caps,
+ "codec_data", GST_TYPE_BUFFER, buf, NULL);
+ gst_buffer_unref (buf);
+
+ break;
+ }
+ case FOURCC_strf:
+ {
+ GstBuffer *buf;
+
+ GST_DEBUG_OBJECT (qtdemux, "found strf codec_data in stsd");
+
+ /* First 4 bytes are the length of the atom, the next 4 bytes
+ * are the fourcc, next 40 bytes are BITMAPINFOHEADER,
+ * next 1 byte is the version, and the
+ * subsequent bytes are sequence parameter set like data. */
+
+ size -= 40; /* we'll be skipping BITMAPINFOHEADER */
+ if (size > 1) {
+ gst_codec_utils_h264_caps_set_level_and_profile
+ (entry->caps, avc_data + 8 + 40 + 1, size - 1);
+
+ buf = gst_buffer_new_and_alloc (size);
+ gst_buffer_fill (buf, 0, avc_data + 8 + 40, size);
+ gst_caps_set_simple (entry->caps,
+ "codec_data", GST_TYPE_BUFFER, buf, NULL);
+ gst_buffer_unref (buf);
+ }
+ break;
+ }
+ case FOURCC_btrt:
+ {
+ guint avg_bitrate, max_bitrate;
+
+ /* bufferSizeDB, maxBitrate and avgBitrate - 4 bytes each */
+ if (size < 12)
+ break;
+
+ max_bitrate = QT_UINT32 (avc_data + 0xc);
+ avg_bitrate = QT_UINT32 (avc_data + 0x10);
+
+ if (!max_bitrate && !avg_bitrate)
+ break;
+
+ /* Some muxers seem to swap the average and maximum bitrates
+ * (I'm looking at you, YouTube), so we swap for sanity. */
+ if (max_bitrate > 0 && max_bitrate < avg_bitrate) {
+ guint temp = avg_bitrate;
+
+ avg_bitrate = max_bitrate;
+ max_bitrate = temp;
+ }
+
+ if (max_bitrate > 0 && max_bitrate < G_MAXUINT32) {
+ gst_tag_list_add (stream->stream_tags,
+ GST_TAG_MERGE_REPLACE, GST_TAG_MAXIMUM_BITRATE,
+ max_bitrate, NULL);
+ }
+ if (avg_bitrate > 0 && avg_bitrate < G_MAXUINT32) {
+ gst_tag_list_add (stream->stream_tags,
+ GST_TAG_MERGE_REPLACE, GST_TAG_BITRATE, avg_bitrate,
+ NULL);
+ }
+
+ break;
+ }
+
+ default:
+ break;
+ }
+
+ len -= size + 8;
+ avc_data += size + 8;
+ }
+
+ break;
+ }
+ case FOURCC_H265:
+ case FOURCC_hvc1:
+ case FOURCC_hev1:
+ case FOURCC_dvh1:
+ case FOURCC_dvhe:
+ {
+ gint len = QT_UINT32 (stsd_entry_data) - 0x56;
+ const guint8 *hevc_data = stsd_entry_data + 0x56;
+
+ /* find hevc */
+ while (len >= 0x8) {
+ gint size;
+
+ if (QT_UINT32 (hevc_data) <= len)
+ size = QT_UINT32 (hevc_data) - 0x8;
+ else
+ size = len - 0x8;
+
+ if (size < 1)
+ /* No real data, so break out */
+ break;
+
+ switch (QT_FOURCC (hevc_data + 0x4)) {
+ case FOURCC_hvcC:
+ {
+ /* parse, if found */
+ GstBuffer *buf;
+
+ GST_DEBUG_OBJECT (qtdemux, "found hvcC codec_data in stsd");
+
+ /* First 4 bytes are the length of the atom, the next 4 bytes
+ * are the fourcc, the next 1 byte is the version, and the
+ * subsequent bytes are sequence parameter set like data. */
+ gst_codec_utils_h265_caps_set_level_tier_and_profile
+ (entry->caps, hevc_data + 8 + 1, size - 1);
+
+ buf = gst_buffer_new_and_alloc (size);
+ gst_buffer_fill (buf, 0, hevc_data + 0x8, size);
+ gst_caps_set_simple (entry->caps,
+ "codec_data", GST_TYPE_BUFFER, buf, NULL);
+ gst_buffer_unref (buf);
+ break;
+ }
+ default:
+ break;
+ }
+ len -= size + 8;
+ hevc_data += size + 8;
+ }
+ break;
+ }
+ case FOURCC_mp4v:
+ case FOURCC_MP4V:
+ case FOURCC_fmp4:
+ case FOURCC_FMP4:
+ case FOURCC_xvid:
+ case FOURCC_XVID:
+ {
+ GNode *glbl;
+
+ GST_DEBUG_OBJECT (qtdemux, "found %" GST_FOURCC_FORMAT,
+ GST_FOURCC_ARGS (fourcc));
+
+ /* codec data might be in glbl extension atom */
+ glbl = mp4v ?
+ qtdemux_tree_get_child_by_type (mp4v, FOURCC_glbl) : NULL;
+ if (glbl) {
+ guint8 *data;
+ GstBuffer *buf;
+ gint len;
+
+ GST_DEBUG_OBJECT (qtdemux, "found glbl data in stsd");
+ data = glbl->data;
+ len = QT_UINT32 (data);
+ if (len > 0x8) {
+ len -= 0x8;
+ buf = gst_buffer_new_and_alloc (len);
+ gst_buffer_fill (buf, 0, data + 8, len);
+ gst_caps_set_simple (entry->caps,
+ "codec_data", GST_TYPE_BUFFER, buf, NULL);
+ gst_buffer_unref (buf);
+ }
+ }
+ break;
+ }
+ case FOURCC_mjp2:
+ {
+ /* see annex I of the jpeg2000 spec */
+ GNode *jp2h, *ihdr, *colr, *mjp2, *field, *prefix, *cmap, *cdef;
+ const guint8 *data;
+ const gchar *colorspace = NULL;
+ gint ncomp = 0;
+ guint32 ncomp_map = 0;
+ gint32 *comp_map = NULL;
+ guint32 nchan_def = 0;
+ gint32 *chan_def = NULL;
+
+ GST_DEBUG_OBJECT (qtdemux, "found mjp2");
+ /* some required atoms */
+ mjp2 = qtdemux_tree_get_child_by_index (stsd, stsd_index);
+ if (!mjp2)
+ break;
+ jp2h = qtdemux_tree_get_child_by_type (mjp2, FOURCC_jp2h);
+ if (!jp2h)
+ break;
+
+ /* number of components; redundant with info in codestream, but useful
+ to a muxer */
+ ihdr = qtdemux_tree_get_child_by_type (jp2h, FOURCC_ihdr);
+ if (!ihdr || QT_UINT32 (ihdr->data) != 22)
+ break;
+ ncomp = QT_UINT16 (((guint8 *) ihdr->data) + 16);
+
+ colr = qtdemux_tree_get_child_by_type (jp2h, FOURCC_colr);
+ if (!colr)
+ break;
+ GST_DEBUG_OBJECT (qtdemux, "found colr");
+ /* extract colour space info */
+ if (QT_UINT8 ((guint8 *) colr->data + 8) == 1) {
+ switch (QT_UINT32 ((guint8 *) colr->data + 11)) {
+ case 16:
+ colorspace = "sRGB";
+ break;
+ case 17:
+ colorspace = "GRAY";
+ break;
+ case 18:
+ colorspace = "sYUV";
+ break;
+ default:
+ colorspace = NULL;
+ break;
+ }
+ }
+ if (!colorspace)
+ /* colr is required, and only values 16, 17, and 18 are specified,
+ so error if we have no colorspace */
+ break;
+
+ /* extract component mapping */
+ cmap = qtdemux_tree_get_child_by_type (jp2h, FOURCC_cmap);
+ if (cmap) {
+ guint32 cmap_len = 0;
+ int i;
+ cmap_len = QT_UINT32 (cmap->data);
+ if (cmap_len >= 8) {
+ /* normal box, subtract off header */
+ cmap_len -= 8;
+ /* cmap: { u16 cmp; u8 mtyp; u8 pcol; }* */
+ if (cmap_len % 4 == 0) {
+ ncomp_map = (cmap_len / 4);
+ comp_map = g_new0 (gint32, ncomp_map);
+ for (i = 0; i < ncomp_map; i++) {
+ guint16 cmp;
+ guint8 mtyp, pcol;
+ cmp = QT_UINT16 (((guint8 *) cmap->data) + 8 + i * 4);
+ mtyp = QT_UINT8 (((guint8 *) cmap->data) + 8 + i * 4 + 2);
+ pcol = QT_UINT8 (((guint8 *) cmap->data) + 8 + i * 4 + 3);
+ comp_map[i] = (mtyp << 24) | (pcol << 16) | cmp;
+ }
+ }
+ }
+ }
+ /* extract channel definitions */
+ cdef = qtdemux_tree_get_child_by_type (jp2h, FOURCC_cdef);
+ if (cdef) {
+ guint32 cdef_len = 0;
+ int i;
+ cdef_len = QT_UINT32 (cdef->data);
+ if (cdef_len >= 10) {
+ /* normal box, subtract off header and len */
+ cdef_len -= 10;
+ /* cdef: u16 n; { u16 cn; u16 typ; u16 asoc; }* */
+ if (cdef_len % 6 == 0) {
+ nchan_def = (cdef_len / 6);
+ chan_def = g_new0 (gint32, nchan_def);
+ for (i = 0; i < nchan_def; i++)
+ chan_def[i] = -1;
+ for (i = 0; i < nchan_def; i++) {
+ guint16 cn, typ, asoc;
+ cn = QT_UINT16 (((guint8 *) cdef->data) + 10 + i * 6);
+ typ = QT_UINT16 (((guint8 *) cdef->data) + 10 + i * 6 + 2);
+ asoc = QT_UINT16 (((guint8 *) cdef->data) + 10 + i * 6 + 4);
+ if (cn < nchan_def) {
+ switch (typ) {
+ case 0:
+ chan_def[cn] = asoc;
+ break;
+ case 1:
+ chan_def[cn] = 0; /* alpha */
+ break;
+ default:
+ chan_def[cn] = -typ;
+ }
+ }
+ }
+ }
+ }
+ }
+
+ gst_caps_set_simple (entry->caps,
+ "num-components", G_TYPE_INT, ncomp, NULL);
+ gst_caps_set_simple (entry->caps,
+ "colorspace", G_TYPE_STRING, colorspace, NULL);
+
+ if (comp_map) {
+ GValue arr = { 0, };
+ GValue elt = { 0, };
+ int i;
+ g_value_init (&arr, GST_TYPE_ARRAY);
+ g_value_init (&elt, G_TYPE_INT);
+ for (i = 0; i < ncomp_map; i++) {
+ g_value_set_int (&elt, comp_map[i]);
+ gst_value_array_append_value (&arr, &elt);
+ }
+ gst_structure_set_value (gst_caps_get_structure (entry->caps, 0),
+ "component-map", &arr);
+ g_value_unset (&elt);
+ g_value_unset (&arr);
+ g_free (comp_map);
+ }
+
+ if (chan_def) {
+ GValue arr = { 0, };
+ GValue elt = { 0, };
+ int i;
+ g_value_init (&arr, GST_TYPE_ARRAY);
+ g_value_init (&elt, G_TYPE_INT);
+ for (i = 0; i < nchan_def; i++) {
+ g_value_set_int (&elt, chan_def[i]);
+ gst_value_array_append_value (&arr, &elt);
+ }
+ gst_structure_set_value (gst_caps_get_structure (entry->caps, 0),
+ "channel-definitions", &arr);
+ g_value_unset (&elt);
+ g_value_unset (&arr);
+ g_free (chan_def);
+ }
+
+ /* some optional atoms */
+ field = qtdemux_tree_get_child_by_type (mjp2, FOURCC_fiel);
+ prefix = qtdemux_tree_get_child_by_type (mjp2, FOURCC_jp2x);
+
+ /* indicate possible fields in caps */
+ if (field) {
+ data = (guint8 *) field->data + 8;
+ if (*data != 1)
+ gst_caps_set_simple (entry->caps, "fields", G_TYPE_INT,
+ (gint) * data, NULL);
+ }
+ /* add codec_data if provided */
+ if (prefix) {
+ GstBuffer *buf;
+ gint len;
+
+ GST_DEBUG_OBJECT (qtdemux, "found prefix data in stsd");
+ data = prefix->data;
+ len = QT_UINT32 (data);
+ if (len > 0x8) {
+ len -= 0x8;
+ buf = gst_buffer_new_and_alloc (len);
+ gst_buffer_fill (buf, 0, data + 8, len);
+ gst_caps_set_simple (entry->caps,
+ "codec_data", GST_TYPE_BUFFER, buf, NULL);
+ gst_buffer_unref (buf);
+ }
+ }
+ break;
+ }
+ case FOURCC_SVQ3:
+ case FOURCC_VP31:
+ {
+ GstBuffer *buf;
+ GstBuffer *seqh = NULL;
+ const guint8 *gamma_data = NULL;
+ gint len = QT_UINT32 (stsd_data); /* FIXME review - why put the whole stsd in codec data? */
+
+ qtdemux_parse_svq3_stsd_data (qtdemux, stsd_entry_data, &gamma_data,
+ &seqh);
+ if (gamma_data) {
+ gst_caps_set_simple (entry->caps, "applied-gamma", G_TYPE_DOUBLE,
+ QT_FP32 (gamma_data), NULL);
+ }
+ if (seqh) {
+ /* sorry for the bad name, but we don't know what this is, other
+ * than its own fourcc */
+ gst_caps_set_simple (entry->caps, "seqh", GST_TYPE_BUFFER, seqh,
+ NULL);
+ gst_buffer_unref (seqh);
+ }
+
+ GST_DEBUG_OBJECT (qtdemux, "found codec_data in stsd");
+ buf = gst_buffer_new_and_alloc (len);
+ gst_buffer_fill (buf, 0, stsd_data, len);
+ gst_caps_set_simple (entry->caps,
+ "codec_data", GST_TYPE_BUFFER, buf, NULL);
+ gst_buffer_unref (buf);
+ break;
+ }
+ case FOURCC_jpeg:
+ {
+ /* https://developer.apple.com/standards/qtff-2001.pdf,
+ * page 92, "Video Sample Description", under table 3.1 */
+ GstByteReader br;
+
+ const gint compressor_offset =
+ 16 + 4 + 4 * 3 + 2 * 2 + 2 * 4 + 4 + 2;
+ const gint min_size = compressor_offset + 32 + 2 + 2;
+ GNode *jpeg;
+ guint32 len;
+ guint16 color_table_id = 0;
+ gboolean ok;
+
+ GST_DEBUG_OBJECT (qtdemux, "found jpeg");
+
+ /* recover information on interlaced/progressive */
+ jpeg = qtdemux_tree_get_child_by_type (stsd, FOURCC_jpeg);
+ if (!jpeg)
+ break;
+
+ len = QT_UINT32 (jpeg->data);
+ GST_DEBUG_OBJECT (qtdemux, "Found jpeg: len %u, need %d", len,
+ min_size);
+ if (len >= min_size) {
+ gst_byte_reader_init (&br, jpeg->data, len);
+
+ gst_byte_reader_skip (&br, compressor_offset + 32 + 2);
+ gst_byte_reader_get_uint16_le (&br, &color_table_id);
+ if (color_table_id != 0) {
+ /* the spec says there can be concatenated chunks in the data, and we want
+ * to find one called field. Walk through them. */
+ gint offset = min_size;
+ while (offset + 8 < len) {
+ guint32 size = 0, tag;
+ ok = gst_byte_reader_get_uint32_le (&br, &size);
+ ok &= gst_byte_reader_get_uint32_le (&br, &tag);
+ if (!ok || size < 8) {
+ GST_WARNING_OBJECT (qtdemux,
+ "Failed to walk optional chunk list");
+ break;
+ }
+ GST_DEBUG_OBJECT (qtdemux,
+ "Found optional %4.4s chunk, size %u",
+ (const char *) &tag, size);
+ if (tag == FOURCC_fiel) {
+ guint8 n_fields = 0, ordering = 0;
+ gst_byte_reader_get_uint8 (&br, &n_fields);
+ gst_byte_reader_get_uint8 (&br, &ordering);
+ if (n_fields == 1 || n_fields == 2) {
+ GST_DEBUG_OBJECT (qtdemux,
+ "Found fiel tag with %u fields, ordering %u",
+ n_fields, ordering);
+ if (n_fields == 2)
+ gst_caps_set_simple (CUR_STREAM (stream)->caps,
+ "interlace-mode", G_TYPE_STRING, "interleaved",
+ NULL);
+ } else {
+ GST_WARNING_OBJECT (qtdemux,
+ "Found fiel tag with invalid fields (%u)", n_fields);
+ }
+ }
+ offset += size;
+ }
+ } else {
+ GST_DEBUG_OBJECT (qtdemux,
+ "Color table ID is 0, not trying to get interlacedness");
+ }
+ } else {
+ GST_WARNING_OBJECT (qtdemux,
+ "Length of jpeg chunk is too small, not trying to get interlacedness");
+ }
+
+ break;
+ }
+ case FOURCC_rle_:
+ case FOURCC_WRLE:
+ {
+ gst_caps_set_simple (entry->caps,
+ "depth", G_TYPE_INT, QT_UINT16 (stsd_entry_data + offset + 66),
+ NULL);
+ break;
+ }
+ case FOURCC_XiTh:
+ {
+ GNode *xith, *xdxt;
+
+ GST_DEBUG_OBJECT (qtdemux, "found XiTh");
+ xith = qtdemux_tree_get_child_by_index (stsd, stsd_index);
+ if (!xith)
+ break;
+
+ xdxt = qtdemux_tree_get_child_by_type (xith, FOURCC_XdxT);
+ if (!xdxt)
+ break;
+
+ GST_DEBUG_OBJECT (qtdemux, "found XdxT node");
+ /* collect the headers and store them in a stream list so that we can
+ * send them out first */
+ qtdemux_parse_theora_extension (qtdemux, stream, xdxt);
+ break;
+ }
+ case FOURCC_ovc1:
+ {
+ GNode *ovc1;
+ guint8 *ovc1_data;
+ guint ovc1_len;
+ GstBuffer *buf;
+
+ GST_DEBUG_OBJECT (qtdemux, "parse ovc1 header");
+ ovc1 = qtdemux_tree_get_child_by_index (stsd, stsd_index);
+ if (!ovc1)
+ break;
+ ovc1_data = ovc1->data;
+ ovc1_len = QT_UINT32 (ovc1_data);
+ if (ovc1_len <= 198) {
+ GST_WARNING_OBJECT (qtdemux, "Too small ovc1 header, skipping");
+ break;
+ }
+ buf = gst_buffer_new_and_alloc (ovc1_len - 198);
+ gst_buffer_fill (buf, 0, ovc1_data + 198, ovc1_len - 198);
+ gst_caps_set_simple (entry->caps,
+ "codec_data", GST_TYPE_BUFFER, buf, NULL);
+ gst_buffer_unref (buf);
+ break;
+ }
+ case FOURCC_vc_1:
+ {
+ gint len = QT_UINT32 (stsd_entry_data) - 0x56;
+ const guint8 *vc1_data = stsd_entry_data + 0x56;
+
+ /* find dvc1 */
+ while (len >= 8) {
+ gint size;
+
+ if (QT_UINT32 (vc1_data) <= len)
+ size = QT_UINT32 (vc1_data) - 8;
+ else
+ size = len - 8;
+
+ if (size < 1)
+ /* No real data, so break out */
+ break;
+
+ switch (QT_FOURCC (vc1_data + 0x4)) {
+ case GST_MAKE_FOURCC ('d', 'v', 'c', '1'):
+ {
+ GstBuffer *buf;
+
+ GST_DEBUG_OBJECT (qtdemux, "found dvc1 codec_data in stsd");
+ buf = gst_buffer_new_and_alloc (size);
+ gst_buffer_fill (buf, 0, vc1_data + 8, size);
+ gst_caps_set_simple (entry->caps,
+ "codec_data", GST_TYPE_BUFFER, buf, NULL);
+ gst_buffer_unref (buf);
+ break;
+ }
+ default:
+ break;
+ }
+ len -= size + 8;
+ vc1_data += size + 8;
+ }
+ break;
+ }
+ case FOURCC_av01:
+ {
+ gint len = QT_UINT32 (stsd_entry_data) - 0x56;
+ const guint8 *av1_data = stsd_entry_data + 0x56;
+
+ /* find av1C */
+ while (len >= 0x8) {
+ gint size;
+
+ if (QT_UINT32 (av1_data) <= len)
+ size = QT_UINT32 (av1_data) - 0x8;
+ else
+ size = len - 0x8;
+
+ if (size < 1)
+ /* No real data, so break out */
+ break;
+
+ switch (QT_FOURCC (av1_data + 0x4)) {
+ case FOURCC_av1C:
+ {
+ /* parse, if found */
+ GstBuffer *buf;
+ guint8 pres_delay_field;
+
+ GST_DEBUG_OBJECT (qtdemux,
+ "found av1C codec_data in stsd of size %d", size);
+
+ /* not enough data, just ignore and hope for the best */
+ if (size < 5)
+ break;
+
+ /* Content is:
+ * 4 bytes: atom length
+ * 4 bytes: fourcc
+ * 1 byte: version
+ * 3 bytes: flags
+ * 3 bits: reserved
+ * 1 bits: initial_presentation_delay_present
+ * 4 bits: initial_presentation_delay (if present else reserved
+ * rest: OBUs.
+ */
+
+ if (av1_data[9] != 0) {
+ GST_WARNING ("Unknown version %d of av1C box", av1_data[9]);
+ break;
+ }
+
+ /* We skip initial_presentation_delay* for now */
+ pres_delay_field = *(av1_data + 12);
+ if (pres_delay_field & (1 << 5)) {
+ gst_caps_set_simple (entry->caps,
+ "presentation-delay", G_TYPE_INT,
+ (gint) (pres_delay_field & 0x0F) + 1, NULL);
+ }
+ if (size > 5) {
+ buf = gst_buffer_new_and_alloc (size - 5);
+ GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_HEADER);
+ gst_buffer_fill (buf, 0, av1_data + 13, size - 5);
+ gst_caps_set_simple (entry->caps,
+ "codec_data", GST_TYPE_BUFFER, buf, NULL);
+ gst_buffer_unref (buf);
+ }
+ break;
+ }
+ default:
+ break;
+ }
+
+ len -= size + 8;
+ av1_data += size + 8;
+ }
+
+ break;
+ }
+
+ /* TODO: Need to parse vpcC for VP8 codec too.
+ * Note that VPCodecConfigurationBox (vpcC) is defined for
+ * vp08, vp09, and vp10 fourcc. */
+ case FOURCC_vp09:
+ {
+ gint len = QT_UINT32 (stsd_entry_data) - 0x56;
+ const guint8 *vpcc_data = stsd_entry_data + 0x56;
+
+ /* find vpcC */
+ while (len >= 0x8) {
+ gint size;
+
+ if (QT_UINT32 (vpcc_data) <= len)
+ size = QT_UINT32 (vpcc_data) - 0x8;
+ else
+ size = len - 0x8;
+
+ if (size < 1)
+ /* No real data, so break out */
+ break;
+
+ switch (QT_FOURCC (vpcc_data + 0x4)) {
+ case FOURCC_vpcC:
+ {
+ const gchar *profile_str = NULL;
+ const gchar *chroma_format_str = NULL;
+ guint8 profile;
+ guint8 bitdepth;
+ guint8 chroma_format;
+ GstVideoColorimetry cinfo;
+
+ /* parse, if found */
+ GST_DEBUG_OBJECT (qtdemux,
+ "found vp codec_data in stsd of size %d", size);
+
+ /* the meaning of "size" is length of the atom body, excluding
+ * atom length and fourcc fields */
+ if (size < 12)
+ break;
+
+ /* Content is:
+ * 4 bytes: atom length
+ * 4 bytes: fourcc
+ * 1 byte: version
+ * 3 bytes: flags
+ * 1 byte: profile
+ * 1 byte: level
+ * 4 bits: bitDepth
+ * 3 bits: chromaSubsampling
+ * 1 bit: videoFullRangeFlag
+ * 1 byte: colourPrimaries
+ * 1 byte: transferCharacteristics
+ * 1 byte: matrixCoefficients
+ * 2 bytes: codecIntializationDataSize (should be zero for vp8 and vp9)
+ * rest: codecIntializationData (not used for vp8 and vp9)
+ */
+
+ if (vpcc_data[8] != 1) {
+ GST_WARNING_OBJECT (qtdemux,
+ "unknown vpcC version %d", vpcc_data[8]);
+ break;
+ }
+
+ profile = vpcc_data[12];
+ switch (profile) {
+ case 0:
+ profile_str = "0";
+ break;
+ case 1:
+ profile_str = "1";
+ break;
+ case 2:
+ profile_str = "2";
+ break;
+ case 3:
+ profile_str = "3";
+ break;
+ default:
+ break;
+ }
+
+ if (profile_str) {
+ gst_caps_set_simple (entry->caps,
+ "profile", G_TYPE_STRING, profile_str, NULL);
+ }
+
+ /* skip level, the VP9 spec v0.6 defines only one level atm,
+ * but webm spec define various ones. Add level to caps
+ * if we really need it then */
+
+ bitdepth = (vpcc_data[14] & 0xf0) >> 4;
+ if (bitdepth == 8 || bitdepth == 10 || bitdepth == 12) {
+ gst_caps_set_simple (entry->caps,
+ "bit-depth-luma", G_TYPE_UINT, bitdepth,
+ "bit-depth-chroma", G_TYPE_UINT, bitdepth, NULL);
+ }
+
+ chroma_format = (vpcc_data[14] & 0xe) >> 1;
+ switch (chroma_format) {
+ case 0:
+ case 1:
+ chroma_format_str = "4:2:0";
+ break;
+ case 2:
+ chroma_format_str = "4:2:2";
+ break;
+ case 3:
+ chroma_format_str = "4:4:4";
+ break;
+ default:
+ break;
+ }
+
+ if (chroma_format_str) {
+ gst_caps_set_simple (entry->caps,
+ "chroma-format", G_TYPE_STRING, chroma_format_str,
+ NULL);
+ }
+
+ if ((vpcc_data[14] & 0x1) != 0)
+ cinfo.range = GST_VIDEO_COLOR_RANGE_0_255;
+ else
+ cinfo.range = GST_VIDEO_COLOR_RANGE_16_235;
+ cinfo.primaries =
+ gst_video_color_primaries_from_iso (vpcc_data[15]);
+ cinfo.transfer =
+ gst_video_transfer_function_from_iso (vpcc_data[16]);
+ cinfo.matrix =
+ gst_video_color_matrix_from_iso (vpcc_data[17]);
+
+ if (cinfo.primaries != GST_VIDEO_COLOR_PRIMARIES_UNKNOWN &&
+ cinfo.transfer != GST_VIDEO_TRANSFER_UNKNOWN &&
+ cinfo.matrix != GST_VIDEO_COLOR_MATRIX_UNKNOWN) {
+ /* set this only if all values are known, otherwise this
+ * might overwrite valid ones parsed from other color box */
+ CUR_STREAM (stream)->colorimetry = cinfo;
+ }
+ break;
+ }
+ default:
+ break;
+ }
+
+ len -= size + 8;
+ vpcc_data += size + 8;
+ }
+
+ break;
+ }
+ default:
+ break;
+ }
+ }
+
+ GST_INFO_OBJECT (qtdemux,
+ "type %" GST_FOURCC_FORMAT " caps %" GST_PTR_FORMAT,
+ GST_FOURCC_ARGS (fourcc), entry->caps);
+
+ } else if (stream->subtype == FOURCC_soun) {
+ GNode *wave;
+ int version, samplesize;
+ guint16 compression_id;
+ gboolean amrwb = FALSE;
+
+ offset = 16;
+ /* sample description entry (16) + sound sample description v0 (20) */
+ if (len < 36)
+ goto corrupt_file;
+
+ version = QT_UINT32 (stsd_entry_data + offset);
+ entry->n_channels = QT_UINT16 (stsd_entry_data + offset + 8);
+ samplesize = QT_UINT16 (stsd_entry_data + offset + 10);
+ compression_id = QT_UINT16 (stsd_entry_data + offset + 12);
+ entry->rate = QT_FP32 (stsd_entry_data + offset + 16);
+
+ GST_LOG_OBJECT (qtdemux, "version/rev: %08x", version);
+ GST_LOG_OBJECT (qtdemux, "vendor: %08x",
+ QT_UINT32 (stsd_entry_data + offset + 4));
+ GST_LOG_OBJECT (qtdemux, "n_channels: %d", entry->n_channels);
+ GST_LOG_OBJECT (qtdemux, "sample_size: %d", samplesize);
+ GST_LOG_OBJECT (qtdemux, "compression_id: %d", compression_id);
+ GST_LOG_OBJECT (qtdemux, "packet size: %d",
+ QT_UINT16 (stsd_entry_data + offset + 14));
+ GST_LOG_OBJECT (qtdemux, "sample rate: %g", entry->rate);
+
+ if (compression_id == 0xfffe)
+ entry->sampled = TRUE;
+
+ /* first assume uncompressed audio */
+ entry->bytes_per_sample = samplesize / 8;
+ entry->samples_per_frame = entry->n_channels;
+ entry->bytes_per_frame = entry->n_channels * entry->bytes_per_sample;
+ entry->samples_per_packet = entry->samples_per_frame;
+ entry->bytes_per_packet = entry->bytes_per_sample;
+
+ offset = 36;
+
+ if (version == 0x00010000) {
+ /* sample description entry (16) + sound sample description v1 (20+16) */
+ if (len < 52)
+ goto corrupt_file;
+
+ /* take information from here over the normal sample description */
+ entry->samples_per_packet = QT_UINT32 (stsd_entry_data + offset);
+ entry->bytes_per_packet = QT_UINT32 (stsd_entry_data + offset + 4);
+ entry->bytes_per_frame = QT_UINT32 (stsd_entry_data + offset + 8);
+ entry->bytes_per_sample = QT_UINT32 (stsd_entry_data + offset + 12);
+
+ GST_LOG_OBJECT (qtdemux, "Sound sample description Version 1");
+ GST_LOG_OBJECT (qtdemux, "samples/packet: %d",
+ entry->samples_per_packet);
+ GST_LOG_OBJECT (qtdemux, "bytes/packet: %d",
+ entry->bytes_per_packet);
+ GST_LOG_OBJECT (qtdemux, "bytes/frame: %d",
+ entry->bytes_per_frame);
+ GST_LOG_OBJECT (qtdemux, "bytes/sample: %d",
+ entry->bytes_per_sample);
+
+ if (!entry->sampled && entry->bytes_per_packet) {
+ entry->samples_per_frame = (entry->bytes_per_frame /
+ entry->bytes_per_packet) * entry->samples_per_packet;
+ GST_LOG_OBJECT (qtdemux, "samples/frame: %d",
+ entry->samples_per_frame);
+ }
+ } else if (version == 0x00020000) {
+ /* sample description entry (16) + sound sample description v2 (56) */
+ if (len < 72)
+ goto corrupt_file;
+
+ /* take information from here over the normal sample description */
+ entry->rate = GST_READ_DOUBLE_BE (stsd_entry_data + offset + 4);
+ entry->n_channels = QT_UINT32 (stsd_entry_data + offset + 12);
+ entry->samples_per_frame = entry->n_channels;
+ entry->bytes_per_sample = QT_UINT32 (stsd_entry_data + offset + 20) / 8;
+ entry->bytes_per_packet = QT_UINT32 (stsd_entry_data + offset + 28);
+ entry->samples_per_packet = QT_UINT32 (stsd_entry_data + offset + 32);
+ entry->bytes_per_frame = entry->bytes_per_sample * entry->n_channels;
+
+ GST_LOG_OBJECT (qtdemux, "Sound sample description Version 2");
+ GST_LOG_OBJECT (qtdemux, "sample rate: %g", entry->rate);
+ GST_LOG_OBJECT (qtdemux, "n_channels: %d", entry->n_channels);
+ GST_LOG_OBJECT (qtdemux, "bits/channel: %d",
+ entry->bytes_per_sample * 8);
+ GST_LOG_OBJECT (qtdemux, "format flags: %X",
+ QT_UINT32 (stsd_entry_data + offset + 24));
+ GST_LOG_OBJECT (qtdemux, "bytes/packet: %d",
+ entry->bytes_per_packet);
+ GST_LOG_OBJECT (qtdemux, "LPCM frames/packet: %d",
+ entry->samples_per_packet);
+ } else if (version != 0x00000) {
+ GST_WARNING_OBJECT (qtdemux, "unknown audio STSD version %08x",
+ version);
+ }
+
+ switch (fourcc) {
+ /* Yes, these have to be hard-coded */
+ case FOURCC_MAC6:
+ {
+ entry->samples_per_packet = 6;
+ entry->bytes_per_packet = 1;
+ entry->bytes_per_frame = 1 * entry->n_channels;
+ entry->bytes_per_sample = 1;
+ entry->samples_per_frame = 6 * entry->n_channels;
+ break;
+ }
+ case FOURCC_MAC3:
+ {
+ entry->samples_per_packet = 3;
+ entry->bytes_per_packet = 1;
+ entry->bytes_per_frame = 1 * entry->n_channels;
+ entry->bytes_per_sample = 1;
+ entry->samples_per_frame = 3 * entry->n_channels;
+ break;
+ }
+ case FOURCC_ima4:
+ {
+ entry->samples_per_packet = 64;
+ entry->bytes_per_packet = 34;
+ entry->bytes_per_frame = 34 * entry->n_channels;
+ entry->bytes_per_sample = 2;
+ entry->samples_per_frame = 64 * entry->n_channels;
+ break;
+ }
+ case FOURCC_ulaw:
+ case FOURCC_alaw:
+ {
+ entry->samples_per_packet = 1;
+ entry->bytes_per_packet = 1;
+ entry->bytes_per_frame = 1 * entry->n_channels;
+ entry->bytes_per_sample = 1;
+ entry->samples_per_frame = 1 * entry->n_channels;
+ break;
+ }
+ case FOURCC_agsm:
+ {
+ entry->samples_per_packet = 160;
+ entry->bytes_per_packet = 33;
+ entry->bytes_per_frame = 33 * entry->n_channels;
+ entry->bytes_per_sample = 2;
+ entry->samples_per_frame = 160 * entry->n_channels;
+ break;
+ }
+ /* fix up any invalid header information from above */
+ case FOURCC_twos:
+ case FOURCC_sowt:
+ case FOURCC_raw_:
+ case FOURCC_lpcm:
+ /* Sometimes these are set to 0 in the sound sample descriptions so
+ * let's try to infer useful values from the other information we
+ * have available */
+ if (entry->bytes_per_sample == 0)
+ entry->bytes_per_sample =
+ entry->bytes_per_frame / entry->n_channels;
+ if (entry->bytes_per_sample == 0)
+ entry->bytes_per_sample = samplesize / 8;
+
+ if (entry->bytes_per_frame == 0)
+ entry->bytes_per_frame =
+ entry->bytes_per_sample * entry->n_channels;
+
+ if (entry->bytes_per_packet == 0)
+ entry->bytes_per_packet = entry->bytes_per_sample;
+
+ if (entry->samples_per_frame == 0)
+ entry->samples_per_frame = entry->n_channels;
+
+ if (entry->samples_per_packet == 0)
+ entry->samples_per_packet = entry->samples_per_frame;
+
+ break;
+ case FOURCC_in24:
+ case FOURCC_in32:
+ case FOURCC_fl32:
+ case FOURCC_fl64:
+ case FOURCC_s16l:{
+ switch (fourcc) {
+ case FOURCC_in24:
+ entry->bytes_per_sample = 3;
+ break;
+ case FOURCC_in32:
+ case FOURCC_fl32:
+ entry->bytes_per_sample = 4;
+ break;
+ case FOURCC_fl64:
+ entry->bytes_per_sample = 8;
+ break;
+ case FOURCC_s16l:
+ entry->bytes_per_sample = 2;
+ break;
+ default:
+ g_assert_not_reached ();
+ break;
+ }
+ entry->samples_per_frame = entry->n_channels;
+ entry->bytes_per_frame = entry->n_channels * entry->bytes_per_sample;
+ entry->samples_per_packet = entry->samples_per_frame;
+ entry->bytes_per_packet = entry->bytes_per_sample;
+ break;
+ }
+ default:
+ break;
+ }
+
+ if (entry->caps)
+ gst_caps_unref (entry->caps);
+
+ entry->caps = qtdemux_audio_caps (qtdemux, stream, entry, fourcc,
+ stsd_entry_data + 32, len - 16, &codec);
+
+ switch (fourcc) {
+ case FOURCC_in24:
+ case FOURCC_in32:
+ case FOURCC_fl32:
+ case FOURCC_fl64:
+ {
+ GNode *enda;
+ GNode *fmt;
+
+ fmt = qtdemux_tree_get_child_by_type (stsd, fourcc);
+
+ enda = qtdemux_tree_get_child_by_type (fmt, FOURCC_enda);
+ if (!enda) {
+ wave = qtdemux_tree_get_child_by_type (fmt, FOURCC_wave);
+ if (wave)
+ enda = qtdemux_tree_get_child_by_type (wave, FOURCC_enda);
+ }
+ if (enda) {
+ int enda_value = QT_UINT16 ((guint8 *) enda->data + 8);
+ const gchar *format_str;
+
+ switch (fourcc) {
+ case FOURCC_in24:
+ format_str = (enda_value) ? "S24LE" : "S24BE";
+ break;
+ case FOURCC_in32:
+ format_str = (enda_value) ? "S32LE" : "S32BE";
+ break;
+ case FOURCC_fl32:
+ format_str = (enda_value) ? "F32LE" : "F32BE";
+ break;
+ case FOURCC_fl64:
+ format_str = (enda_value) ? "F64LE" : "F64BE";
+ break;
+ default:
+ g_assert_not_reached ();
+ break;
+ }
+ gst_caps_set_simple (entry->caps,
+ "format", G_TYPE_STRING, format_str, NULL);
+ }
+ break;
+ }
+ case FOURCC_owma:
+ {
+ const guint8 *owma_data;
+ const gchar *codec_name = NULL;
+ guint owma_len;
+ GstBuffer *buf;
+ gint version = 1;
+ /* from http://msdn.microsoft.com/en-us/library/dd757720(VS.85).aspx */
+ /* FIXME this should also be gst_riff_strf_auds,
+ * but the latter one is actually missing bits-per-sample :( */
+ typedef struct
+ {
+ gint16 wFormatTag;
+ gint16 nChannels;
+ gint32 nSamplesPerSec;
+ gint32 nAvgBytesPerSec;
+ gint16 nBlockAlign;
+ gint16 wBitsPerSample;
+ gint16 cbSize;
+ } WAVEFORMATEX;
+ WAVEFORMATEX *wfex;
+
+ GST_DEBUG_OBJECT (qtdemux, "parse owma");
+ owma_data = stsd_entry_data;
+ owma_len = QT_UINT32 (owma_data);
+ if (owma_len <= 54) {
+ GST_WARNING_OBJECT (qtdemux, "Too small owma header, skipping");
+ break;
+ }
+ wfex = (WAVEFORMATEX *) (owma_data + 36);
+ buf = gst_buffer_new_and_alloc (owma_len - 54);
+ gst_buffer_fill (buf, 0, owma_data + 54, owma_len - 54);
+ if (wfex->wFormatTag == 0x0161) {
+ codec_name = "Windows Media Audio";
+ version = 2;
+ } else if (wfex->wFormatTag == 0x0162) {
+ codec_name = "Windows Media Audio 9 Pro";
+ version = 3;
+ } else if (wfex->wFormatTag == 0x0163) {
+ codec_name = "Windows Media Audio 9 Lossless";
+ /* is that correct? gstffmpegcodecmap.c is missing it, but
+ * fluendo codec seems to support it */
+ version = 4;
+ }
+
+ gst_caps_set_simple (entry->caps,
+ "codec_data", GST_TYPE_BUFFER, buf,
+ "wmaversion", G_TYPE_INT, version,
+ "block_align", G_TYPE_INT,
+ GST_READ_UINT16_LE (&wfex->nBlockAlign), "bitrate", G_TYPE_INT,
+ GST_READ_UINT32_LE (&wfex->nAvgBytesPerSec), "width", G_TYPE_INT,
+ GST_READ_UINT16_LE (&wfex->wBitsPerSample), "depth", G_TYPE_INT,
+ GST_READ_UINT16_LE (&wfex->wBitsPerSample), NULL);
+ gst_buffer_unref (buf);
+
+ if (codec_name) {
+ g_free (codec);
+ codec = g_strdup (codec_name);
+ }
+ break;
+ }
+ case FOURCC_wma_:
+ {
+ gint len = QT_UINT32 (stsd_entry_data) - offset;
+ const guint8 *wfex_data = stsd_entry_data + offset;
+ const gchar *codec_name = NULL;
+ gint version = 1;
+ /* from http://msdn.microsoft.com/en-us/library/dd757720(VS.85).aspx */
+ /* FIXME this should also be gst_riff_strf_auds,
+ * but the latter one is actually missing bits-per-sample :( */
+ typedef struct
+ {
+ gint16 wFormatTag;
+ gint16 nChannels;
+ gint32 nSamplesPerSec;
+ gint32 nAvgBytesPerSec;
+ gint16 nBlockAlign;
+ gint16 wBitsPerSample;
+ gint16 cbSize;
+ } WAVEFORMATEX;
+ WAVEFORMATEX wfex;
+
+ /* FIXME: unify with similar wavformatex parsing code above */
+ GST_DEBUG_OBJECT (qtdemux, "parse wma, looking for wfex");
+
+ /* find wfex */
+ while (len >= 8) {
+ gint size;
+
+ if (QT_UINT32 (wfex_data) <= len)
+ size = QT_UINT32 (wfex_data) - 8;
+ else
+ size = len - 8;
+
+ if (size < 1)
+ /* No real data, so break out */
+ break;
+
+ switch (QT_FOURCC (wfex_data + 4)) {
+ case GST_MAKE_FOURCC ('w', 'f', 'e', 'x'):
+ {
+ GST_DEBUG_OBJECT (qtdemux, "found wfex in stsd");
+
+ if (size < 8 + 18)
+ break;
+
+ wfex.wFormatTag = GST_READ_UINT16_LE (wfex_data + 8 + 0);
+ wfex.nChannels = GST_READ_UINT16_LE (wfex_data + 8 + 2);
+ wfex.nSamplesPerSec = GST_READ_UINT32_LE (wfex_data + 8 + 4);
+ wfex.nAvgBytesPerSec = GST_READ_UINT32_LE (wfex_data + 8 + 8);
+ wfex.nBlockAlign = GST_READ_UINT16_LE (wfex_data + 8 + 12);
+ wfex.wBitsPerSample = GST_READ_UINT16_LE (wfex_data + 8 + 14);
+ wfex.cbSize = GST_READ_UINT16_LE (wfex_data + 8 + 16);
+
+ GST_LOG_OBJECT (qtdemux, "Found wfex box in stsd:");
+ GST_LOG_OBJECT (qtdemux, "FormatTag = 0x%04x, Channels = %u, "
+ "SamplesPerSec = %u, AvgBytesPerSec = %u, BlockAlign = %u, "
+ "BitsPerSample = %u, Size = %u", wfex.wFormatTag,
+ wfex.nChannels, wfex.nSamplesPerSec, wfex.nAvgBytesPerSec,
+ wfex.nBlockAlign, wfex.wBitsPerSample, wfex.cbSize);
+
+ if (wfex.wFormatTag == 0x0161) {
+ codec_name = "Windows Media Audio";
+ version = 2;
+ } else if (wfex.wFormatTag == 0x0162) {
+ codec_name = "Windows Media Audio 9 Pro";
+ version = 3;
+ } else if (wfex.wFormatTag == 0x0163) {
+ codec_name = "Windows Media Audio 9 Lossless";
+ /* is that correct? gstffmpegcodecmap.c is missing it, but
+ * fluendo codec seems to support it */
+ version = 4;
+ }
+
+ gst_caps_set_simple (entry->caps,
+ "wmaversion", G_TYPE_INT, version,
+ "block_align", G_TYPE_INT, wfex.nBlockAlign,
+ "bitrate", G_TYPE_INT, wfex.nAvgBytesPerSec,
+ "width", G_TYPE_INT, wfex.wBitsPerSample,
+ "depth", G_TYPE_INT, wfex.wBitsPerSample, NULL);
+
+ if (size > wfex.cbSize) {
+ GstBuffer *buf;
+
+ buf = gst_buffer_new_and_alloc (size - wfex.cbSize);
+ gst_buffer_fill (buf, 0, wfex_data + 8 + wfex.cbSize,
+ size - wfex.cbSize);
+ gst_caps_set_simple (entry->caps,
+ "codec_data", GST_TYPE_BUFFER, buf, NULL);
+ gst_buffer_unref (buf);
+ } else {
+ GST_WARNING_OBJECT (qtdemux, "no codec data");
+ }
+
+ if (codec_name) {
+ g_free (codec);
+ codec = g_strdup (codec_name);
+ }
+ break;
+ }
+ default:
+ break;
+ }
+ len -= size + 8;
+ wfex_data += size + 8;
+ }
+ break;
+ }
+ case FOURCC_opus:
+ {
+ const guint8 *opus_data;
+ guint8 *channel_mapping = NULL;
+ guint32 rate;
+ guint8 channels;
+ guint8 channel_mapping_family;
+ guint8 stream_count;
+ guint8 coupled_count;
+ guint8 i;
+
+ opus_data = stsd_entry_data;
+
+ channels = GST_READ_UINT8 (opus_data + 45);
+ rate = GST_READ_UINT32_LE (opus_data + 48);
+ channel_mapping_family = GST_READ_UINT8 (opus_data + 54);
+ stream_count = GST_READ_UINT8 (opus_data + 55);
+ coupled_count = GST_READ_UINT8 (opus_data + 56);
+
+ if (channels > 0) {
+ channel_mapping = g_malloc (channels * sizeof (guint8));
+ for (i = 0; i < channels; i++)
+ channel_mapping[i] = GST_READ_UINT8 (opus_data + i + 57);
+ }
+
+ entry->caps = gst_codec_utils_opus_create_caps (rate, channels,
+ channel_mapping_family, stream_count, coupled_count,
+ channel_mapping);
+ break;
+ }
+ default:
+ break;
+ }
+
+ if (codec) {
+ GstStructure *s;
+ gint bitrate = 0;
+
+ gst_tag_list_add (stream->stream_tags, GST_TAG_MERGE_REPLACE,
+ GST_TAG_AUDIO_CODEC, codec, NULL);
+ g_free (codec);
+ codec = NULL;
+
+ /* some bitrate info may have ended up in caps */
+ s = gst_caps_get_structure (entry->caps, 0);
+ gst_structure_get_int (s, "bitrate", &bitrate);
+ if (bitrate > 0)
+ gst_tag_list_add (stream->stream_tags, GST_TAG_MERGE_REPLACE,
+ GST_TAG_BITRATE, bitrate, NULL);
+ }
+
+ esds = NULL;
+ mp4a = qtdemux_tree_get_child_by_index (stsd, stsd_index);
+ if (QTDEMUX_TREE_NODE_FOURCC (mp4a) != fourcc) {
+ if (stream->protected) {
+ if (QTDEMUX_TREE_NODE_FOURCC (mp4a) == FOURCC_aavd) {
+ esds = qtdemux_tree_get_child_by_type (mp4a, FOURCC_esds);
+ }
+ if (QTDEMUX_TREE_NODE_FOURCC (mp4a) != FOURCC_enca) {
+ mp4a = NULL;
+ }
+ } else {
+ mp4a = NULL;
+ }
+ }
+
+ wave = NULL;
+ if (mp4a) {
+ wave = qtdemux_tree_get_child_by_type (mp4a, FOURCC_wave);
+ if (wave)
+ esds = qtdemux_tree_get_child_by_type (wave, FOURCC_esds);
+ if (!esds)
+ esds = qtdemux_tree_get_child_by_type (mp4a, FOURCC_esds);
+ }
+
+
+ /* If the fourcc's bottom 16 bits gives 'sm', then the top
+ 16 bits is a byte-swapped wave-style codec identifier,
+ and we can find a WAVE header internally to a 'wave' atom here.
+ This can more clearly be thought of as 'ms' as the top 16 bits, and a
+ codec id as the bottom 16 bits - but byte-swapped to store in QT (which
+ is big-endian).
+ */
+ if ((fourcc & 0xffff) == (('s' << 8) | 'm')) {
+ if (len < offset + 20) {
+ GST_WARNING_OBJECT (qtdemux, "No wave atom in MS-style audio");
+ } else {
+ guint32 datalen = QT_UINT32 (stsd_entry_data + offset + 16);
+ const guint8 *data = stsd_entry_data + offset + 16;
+ GNode *wavenode;
+ GNode *waveheadernode;
+
+ wavenode = g_node_new ((guint8 *) data);
+ if (qtdemux_parse_node (qtdemux, wavenode, data, datalen)) {
+ const guint8 *waveheader;
+ guint32 headerlen;
+
+ waveheadernode = qtdemux_tree_get_child_by_type (wavenode, fourcc);
+ if (waveheadernode) {
+ waveheader = (const guint8 *) waveheadernode->data;
+ headerlen = QT_UINT32 (waveheader);
+
+ if (headerlen > 8) {
+ gst_riff_strf_auds *header = NULL;
+ GstBuffer *headerbuf;
+ GstBuffer *extra;
+
+ waveheader += 8;
+ headerlen -= 8;
+
+ headerbuf = gst_buffer_new_and_alloc (headerlen);
+ gst_buffer_fill (headerbuf, 0, waveheader, headerlen);
+
+ if (gst_riff_parse_strf_auds (GST_ELEMENT_CAST (qtdemux),
+ headerbuf, &header, &extra)) {
+ gst_caps_unref (entry->caps);
+ /* FIXME: Need to do something with the channel reorder map */
+ entry->caps =
+ gst_riff_create_audio_caps (header->format, NULL, header,
+ extra, NULL, NULL, NULL);
+
+ if (extra)
+ gst_buffer_unref (extra);
+ g_free (header);
+ }
+ }
+ } else
+ GST_DEBUG ("Didn't find waveheadernode for this codec");
+ }
+ g_node_destroy (wavenode);
+ }
+ } else if (esds) {
+ gst_qtdemux_handle_esds (qtdemux, stream, entry, esds,
+ stream->stream_tags);
+ } else {
+ switch (fourcc) {
+ #if 0
+ /* FIXME: what is in the chunk? */
+ case FOURCC_QDMC:
+ {
+ gint len = QT_UINT32 (stsd_data);
+
+ /* seems to be always = 116 = 0x74 */
+ break;
+ }
+ #endif
+ case FOURCC_QDM2:
+ {
+ gint len = QT_UINT32 (stsd_entry_data);
+
+ if (len > 0x3C) {
+ GstBuffer *buf = gst_buffer_new_and_alloc (len - 0x3C);
+
+ gst_buffer_fill (buf, 0, stsd_entry_data + 0x3C, len - 0x3C);
+ gst_caps_set_simple (entry->caps,
+ "codec_data", GST_TYPE_BUFFER, buf, NULL);
+ gst_buffer_unref (buf);
+ }
+ gst_caps_set_simple (entry->caps,
+ "samplesize", G_TYPE_INT, samplesize, NULL);
+ break;
+ }
+ case FOURCC_alac:
+ {
+ GNode *alac, *wave = NULL;
+
+ /* apparently, m4a has this atom appended directly in the stsd entry,
+ * while mov has it in a wave atom */
+ alac = qtdemux_tree_get_child_by_type (stsd, FOURCC_alac);
+ if (alac) {
+ /* alac now refers to stsd entry atom */
+ wave = qtdemux_tree_get_child_by_type (alac, FOURCC_wave);
+ if (wave)
+ alac = qtdemux_tree_get_child_by_type (wave, FOURCC_alac);
+ else
+ alac = qtdemux_tree_get_child_by_type (alac, FOURCC_alac);
+ }
+ if (alac) {
+ const guint8 *alac_data = alac->data;
+ gint len = QT_UINT32 (alac->data);
+ GstBuffer *buf;
+
+ if (len < 36) {
+ GST_DEBUG_OBJECT (qtdemux,
+ "discarding alac atom with unexpected len %d", len);
+ } else {
+ /* codec-data contains alac atom size and prefix,
+ * ffmpeg likes it that way, not quite gst-ish though ...*/
+ buf = gst_buffer_new_and_alloc (len);
+ gst_buffer_fill (buf, 0, alac->data, len);
+ gst_caps_set_simple (entry->caps,
+ "codec_data", GST_TYPE_BUFFER, buf, NULL);
+ gst_buffer_unref (buf);
+
+ entry->bytes_per_frame = QT_UINT32 (alac_data + 12);
+ entry->n_channels = QT_UINT8 (alac_data + 21);
+ entry->rate = QT_UINT32 (alac_data + 32);
+ samplesize = QT_UINT8 (alac_data + 16 + 1);
+ }
+ }
+ gst_caps_set_simple (entry->caps,
+ "samplesize", G_TYPE_INT, samplesize, NULL);
+ break;
+ }
+ case FOURCC_fLaC:
+ {
+ /* The codingname of the sample entry is 'fLaC' */
+ GNode *flac = qtdemux_tree_get_child_by_type (stsd, FOURCC_fLaC);
+
+ if (flac) {
+ /* The 'dfLa' box is added to the sample entry to convey
+ initializing information for the decoder. */
+ const GNode *dfla =
+ qtdemux_tree_get_child_by_type (flac, FOURCC_dfLa);
+
+ if (dfla) {
+ const guint32 len = QT_UINT32 (dfla->data);
+
+ /* Must contain at least dfLa box header (12),
+ * METADATA_BLOCK_HEADER (4), METADATA_BLOCK_STREAMINFO (34) */
+ if (len < 50) {
+ GST_DEBUG_OBJECT (qtdemux,
+ "discarding dfla atom with unexpected len %d", len);
+ } else {
+ /* skip dfLa header to get the METADATA_BLOCKs */
+ const guint8 *metadata_blocks = (guint8 *) dfla->data + 12;
+ const guint32 metadata_blocks_len = len - 12;
+
+ gchar *stream_marker = g_strdup ("fLaC");
+ GstBuffer *block = gst_buffer_new_wrapped (stream_marker,
+ strlen (stream_marker));
+
+ guint32 index = 0;
+ guint32 remainder = 0;
+ guint32 block_size = 0;
+ gboolean is_last = FALSE;
+
+ GValue array = G_VALUE_INIT;
+ GValue value = G_VALUE_INIT;
+
+ g_value_init (&array, GST_TYPE_ARRAY);
+ g_value_init (&value, GST_TYPE_BUFFER);
+
+ gst_value_set_buffer (&value, block);
+ gst_value_array_append_value (&array, &value);
+ g_value_reset (&value);
+
+ gst_buffer_unref (block);
+
+ /* check there's at least one METADATA_BLOCK_HEADER's worth
+ * of data, and we haven't already finished parsing */
+ while (!is_last && ((index + 3) < metadata_blocks_len)) {
+ remainder = metadata_blocks_len - index;
+
+ /* add the METADATA_BLOCK_HEADER size to the signalled size */
+ block_size = 4 +
+ (metadata_blocks[index + 1] << 16) +
+ (metadata_blocks[index + 2] << 8) +
+ metadata_blocks[index + 3];
+
+ /* be careful not to read off end of box */
+ if (block_size > remainder) {
+ break;
+ }
+
+ is_last = metadata_blocks[index] >> 7;
+
+ block = gst_buffer_new_and_alloc (block_size);
+
+ gst_buffer_fill (block, 0, &metadata_blocks[index],
+ block_size);
+
+ gst_value_set_buffer (&value, block);
+ gst_value_array_append_value (&array, &value);
+ g_value_reset (&value);
+
+ gst_buffer_unref (block);
+
+ index += block_size;
+ }
+
+ /* only append the metadata if we successfully read all of it */
+ if (is_last) {
+ gst_structure_set_value (gst_caps_get_structure (CUR_STREAM
+ (stream)->caps, 0), "streamheader", &array);
+ } else {
+ GST_WARNING_OBJECT (qtdemux,
+ "discarding all METADATA_BLOCKs due to invalid "
+ "block_size %d at idx %d, rem %d", block_size, index,
+ remainder);
+ }
+
+ g_value_unset (&value);
+ g_value_unset (&array);
+
+ /* The sample rate obtained from the stsd may not be accurate
+ * since it cannot represent rates greater than 65535Hz, so
+ * override that value with the sample rate from the
+ * METADATA_BLOCK_STREAMINFO block */
+ CUR_STREAM (stream)->rate =
+ (QT_UINT32 (metadata_blocks + 14) >> 12) & 0xFFFFF;
+ }
+ }
+ }
+ break;
+ }
+ case FOURCC_sawb:
+ /* Fallthrough! */
+ amrwb = TRUE;
+ case FOURCC_samr:
+ {
+ gint len = QT_UINT32 (stsd_entry_data);
+
+ if (len > 0x24) {
+ GstBuffer *buf = gst_buffer_new_and_alloc (len - 0x24);
+ guint bitrate;
+
+ gst_buffer_fill (buf, 0, stsd_entry_data + 0x24, len - 0x24);
+
+ /* If we have enough data, let's try to get the 'damr' atom. See
+ * the 3GPP container spec (26.244) for more details. */
+ if ((len - 0x34) > 8 &&
+ (bitrate = qtdemux_parse_amr_bitrate (buf, amrwb))) {
+ gst_tag_list_add (stream->stream_tags, GST_TAG_MERGE_REPLACE,
+ GST_TAG_MAXIMUM_BITRATE, bitrate, NULL);
+ }
+
+ gst_caps_set_simple (entry->caps,
+ "codec_data", GST_TYPE_BUFFER, buf, NULL);
+ gst_buffer_unref (buf);
+ }
+ break;
+ }
+ case FOURCC_mp4a:
+ {
+ /* mp4a atom withtout ESDS; Attempt to build codec data from atom */
+ gint len = QT_UINT32 (stsd_entry_data);
+ guint16 sound_version = 0;
+ /* FIXME: Can this be determined somehow? There doesn't seem to be
+ * anything in mp4a atom that specifis compression */
+ gint profile = 2;
+ guint16 channels = entry->n_channels;
+ guint32 time_scale = (guint32) entry->rate;
+ gint sample_rate_index = -1;
+
+ if (len >= 34) {
+ sound_version = QT_UINT16 (stsd_entry_data + 16);
+
+ if (sound_version == 1) {
+ channels = QT_UINT16 (stsd_entry_data + 24);
+ time_scale = QT_UINT32 (stsd_entry_data + 30);
+ } else {
+ GST_FIXME_OBJECT (qtdemux, "Unhandled mp4a atom version %d",
+ sound_version);
+ }
+ } else {
+ GST_DEBUG_OBJECT (qtdemux, "Too small stsd entry data len %d",
+ len);
+ }
+
+ sample_rate_index =
+ gst_codec_utils_aac_get_index_from_sample_rate (time_scale);
+ if (sample_rate_index >= 0 && channels > 0) {
+ guint8 codec_data[2];
+ GstBuffer *buf;
+
+ /* build AAC codec data */
+ codec_data[0] = profile << 3;
+ codec_data[0] |= ((sample_rate_index >> 1) & 0x7);
+ codec_data[1] = (sample_rate_index & 0x01) << 7;
+ codec_data[1] |= (channels & 0xF) << 3;
+
+ buf = gst_buffer_new_and_alloc (2);
+ gst_buffer_fill (buf, 0, codec_data, 2);
+ gst_caps_set_simple (entry->caps,
+ "codec_data", GST_TYPE_BUFFER, buf, NULL);
+ gst_buffer_unref (buf);
+ }
+ break;
+ }
+ case FOURCC_lpcm:
+ case FOURCC_in24:
+ case FOURCC_in32:
+ case FOURCC_fl32:
+ case FOURCC_fl64:
+ case FOURCC_s16l:
+ /* Fully handled elsewhere */
+ break;
+ default:
+ GST_INFO_OBJECT (qtdemux,
+ "unhandled type %" GST_FOURCC_FORMAT, GST_FOURCC_ARGS (fourcc));
+ break;
+ }
+ }
+ GST_INFO_OBJECT (qtdemux,
+ "type %" GST_FOURCC_FORMAT " caps %" GST_PTR_FORMAT,
+ GST_FOURCC_ARGS (fourcc), entry->caps);
+
+ } else if (stream->subtype == FOURCC_strm) {
+ if (fourcc == FOURCC_rtsp) {
+ stream->redirect_uri = qtdemux_get_rtsp_uri_from_hndl (qtdemux, minf);
+ } else {
+ GST_INFO_OBJECT (qtdemux, "unhandled stream type %" GST_FOURCC_FORMAT,
+ GST_FOURCC_ARGS (fourcc));
+ goto unknown_stream;
+ }
+ entry->sampled = TRUE;
+ } else if (stream->subtype == FOURCC_subp || stream->subtype == FOURCC_text
+ || stream->subtype == FOURCC_sbtl || stream->subtype == FOURCC_subt
+ || stream->subtype == FOURCC_clcp) {
+
+ entry->sampled = TRUE;
+ entry->sparse = TRUE;
+
+ entry->caps =
+ qtdemux_sub_caps (qtdemux, stream, entry, fourcc, stsd_entry_data,
+ &codec);
+ if (codec) {
+ gst_tag_list_add (stream->stream_tags, GST_TAG_MERGE_REPLACE,
+ GST_TAG_SUBTITLE_CODEC, codec, NULL);
+ g_free (codec);
+ codec = NULL;
+ }
+
+ /* hunt for sort-of codec data */
+ switch (fourcc) {
+ case FOURCC_mp4s:
+ {
+ GNode *mp4s = NULL;
+ GNode *esds = NULL;
+
+ /* look for palette in a stsd->mp4s->esds sub-atom */
+ mp4s = qtdemux_tree_get_child_by_type (stsd, FOURCC_mp4s);
+ if (mp4s)
+ esds = qtdemux_tree_get_child_by_type (mp4s, FOURCC_esds);
+ if (esds == NULL) {
+ /* Invalid STSD */
+ GST_LOG_OBJECT (qtdemux, "Skipping invalid stsd: no esds child");
+ break;
+ }
+
+ gst_qtdemux_handle_esds (qtdemux, stream, entry, esds,
+ stream->stream_tags);
+ break;
+ }
+ default:
+ GST_INFO_OBJECT (qtdemux,
+ "unhandled type %" GST_FOURCC_FORMAT, GST_FOURCC_ARGS (fourcc));
+ break;
+ }
+ GST_INFO_OBJECT (qtdemux,
+ "type %" GST_FOURCC_FORMAT " caps %" GST_PTR_FORMAT,
+ GST_FOURCC_ARGS (fourcc), entry->caps);
+ } else {
+ /* everything in 1 sample */
+ entry->sampled = TRUE;
+
+ entry->caps =
+ qtdemux_generic_caps (qtdemux, stream, entry, fourcc, stsd_entry_data,
+ &codec);
+
+ if (entry->caps == NULL)
+ goto unknown_stream;
+
+ if (codec) {
+ gst_tag_list_add (stream->stream_tags, GST_TAG_MERGE_REPLACE,
+ GST_TAG_SUBTITLE_CODEC, codec, NULL);
+ g_free (codec);
+ codec = NULL;
+ }
+ }
+
+ /* promote to sampled format */
+ if (entry->fourcc == FOURCC_samr) {
+ /* force mono 8000 Hz for AMR */
+ entry->sampled = TRUE;
+ entry->n_channels = 1;
+ entry->rate = 8000;
+ } else if (entry->fourcc == FOURCC_sawb) {
+ /* force mono 16000 Hz for AMR-WB */
+ entry->sampled = TRUE;
+ entry->n_channels = 1;
+ entry->rate = 16000;
+ } else if (entry->fourcc == FOURCC_mp4a) {
+ entry->sampled = TRUE;
+ }
+
+
+ stsd_entry_data += len;
+ remaining_stsd_len -= len;
+
+ }
+
+ /* collect sample information */
+ if (!qtdemux_stbl_init (qtdemux, stream, stbl))
+ goto samples_failed;
+
+ if (qtdemux->fragmented) {
+ guint64 offset;
+
+ /* need all moov samples as basis; probably not many if any at all */
+ /* prevent moof parsing taking of at this time */
+ offset = qtdemux->moof_offset;
+ qtdemux->moof_offset = 0;
+ if (stream->n_samples &&
+ !qtdemux_parse_samples (qtdemux, stream, stream->n_samples - 1)) {
+ qtdemux->moof_offset = offset;
+ goto samples_failed;
+ }
+ qtdemux->moof_offset = offset;
+ /* movie duration more reliable in this case (e.g. mehd) */
+ if (qtdemux->segment.duration &&
+ GST_CLOCK_TIME_IS_VALID (qtdemux->segment.duration))
+ stream->duration =
+ GSTTIME_TO_QTSTREAMTIME (stream, qtdemux->segment.duration);
+ }
+
+ /* configure segments */
+ if (!qtdemux_parse_segments (qtdemux, stream, trak))
+ goto segments_failed;
+
+ /* add some language tag, if useful */
+ if (stream->lang_id[0] != '\0' && strcmp (stream->lang_id, "unk") &&
+ strcmp (stream->lang_id, "und")) {
+ const gchar *lang_code;
+
+ /* convert ISO 639-2 code to ISO 639-1 */
+ lang_code = gst_tag_get_language_code (stream->lang_id);
+ gst_tag_list_add (stream->stream_tags, GST_TAG_MERGE_REPLACE,
+ GST_TAG_LANGUAGE_CODE, (lang_code) ? lang_code : stream->lang_id, NULL);
+ }
+
+ /* Check for UDTA tags */
+ if ((udta = qtdemux_tree_get_child_by_type (trak, FOURCC_udta))) {
+ qtdemux_parse_udta (qtdemux, stream->stream_tags, udta);
+ }
+
+ /* Insert and sort new stream in track-id order.
+ * This will help in comparing old/new streams during stream update check */
+ g_ptr_array_add (qtdemux->active_streams, stream);
+ g_ptr_array_sort (qtdemux->active_streams,
+ (GCompareFunc) qtdemux_track_id_compare_func);
+ GST_DEBUG_OBJECT (qtdemux, "n_streams is now %d",
+ QTDEMUX_N_STREAMS (qtdemux));
+
+ return TRUE;
+
+ /* ERRORS */
+ corrupt_file:
+ {
+ GST_ELEMENT_ERROR (qtdemux, STREAM, DEMUX,
+ (_("This file is corrupt and cannot be played.")), (NULL));
+ if (stream)
+ gst_qtdemux_stream_unref (stream);
+ return FALSE;
+ }
+ error_encrypted:
+ {
+ GST_ELEMENT_ERROR (qtdemux, STREAM, DECRYPT, (NULL), (NULL));
+ gst_qtdemux_stream_unref (stream);
+ return FALSE;
+ }
+ samples_failed:
+ segments_failed:
+ {
+ /* we posted an error already */
+ /* free stbl sub-atoms */
+ gst_qtdemux_stbl_free (stream);
+ gst_qtdemux_stream_unref (stream);
+ return FALSE;
+ }
+ existing_stream:
+ {
+ GST_INFO_OBJECT (qtdemux, "stream with track id %i already exists",
+ track_id);
+ return TRUE;
+ }
+ unknown_stream:
+ {
+ GST_INFO_OBJECT (qtdemux, "unknown subtype %" GST_FOURCC_FORMAT,
+ GST_FOURCC_ARGS (stream->subtype));
+ gst_qtdemux_stream_unref (stream);
+ return TRUE;
+ }
+ }
+
+ /* If we can estimate the overall bitrate, and don't have information about the
+ * stream bitrate for exactly one stream, this guesses the stream bitrate as
+ * the overall bitrate minus the sum of the bitrates of all other streams. This
+ * should be useful for the common case where we have one audio and one video
+ * stream and can estimate the bitrate of one, but not the other. */
+ static void
+ gst_qtdemux_guess_bitrate (GstQTDemux * qtdemux)
+ {
+ QtDemuxStream *stream = NULL;
+ gint64 size, sys_bitrate, sum_bitrate = 0;
+ GstClockTime duration;
+ guint bitrate;
+ gint i;
+
+ if (qtdemux->fragmented)
+ return;
+
+ GST_DEBUG_OBJECT (qtdemux, "Looking for streams with unknown bitrate");
+
+ if (!gst_pad_peer_query_duration (qtdemux->sinkpad, GST_FORMAT_BYTES, &size)
+ || size <= 0) {
+ GST_DEBUG_OBJECT (qtdemux,
+ "Size in bytes of the stream not known - bailing");
+ return;
+ }
+
+ /* Subtract the header size */
+ GST_DEBUG_OBJECT (qtdemux, "Total size %" G_GINT64_FORMAT ", header size %u",
+ size, qtdemux->header_size);
+
+ if (size < qtdemux->header_size)
+ return;
+
+ size = size - qtdemux->header_size;
+
+ if (!gst_qtdemux_get_duration (qtdemux, &duration)) {
+ GST_DEBUG_OBJECT (qtdemux, "Stream duration not known - bailing");
+ return;
+ }
+
+ for (i = 0; i < QTDEMUX_N_STREAMS (qtdemux); i++) {
+ QtDemuxStream *str = QTDEMUX_NTH_STREAM (qtdemux, i);
+ switch (str->subtype) {
+ case FOURCC_soun:
+ case FOURCC_vide:
+ GST_DEBUG_OBJECT (qtdemux, "checking bitrate for %" GST_PTR_FORMAT,
+ CUR_STREAM (str)->caps);
+ /* retrieve bitrate, prefer avg then max */
+ bitrate = 0;
+ if (str->stream_tags) {
+ if (gst_tag_list_get_uint (str->stream_tags,
+ GST_TAG_MAXIMUM_BITRATE, &bitrate))
+ GST_DEBUG_OBJECT (qtdemux, "max-bitrate: %u", bitrate);
+ if (gst_tag_list_get_uint (str->stream_tags,
+ GST_TAG_NOMINAL_BITRATE, &bitrate))
+ GST_DEBUG_OBJECT (qtdemux, "nominal-bitrate: %u", bitrate);
+ if (gst_tag_list_get_uint (str->stream_tags,
+ GST_TAG_BITRATE, &bitrate))
+ GST_DEBUG_OBJECT (qtdemux, "bitrate: %u", bitrate);
+ }
+ if (bitrate)
+ sum_bitrate += bitrate;
+ else {
+ if (stream) {
+ GST_DEBUG_OBJECT (qtdemux,
+ ">1 stream with unknown bitrate - bailing");
+ return;
+ } else
+ stream = str;
+ }
+
+ default:
+ /* For other subtypes, we assume no significant impact on bitrate */
+ break;
+ }
+ }
+
+ if (!stream) {
+ GST_DEBUG_OBJECT (qtdemux, "All stream bitrates are known");
+ return;
+ }
+
+ sys_bitrate = gst_util_uint64_scale (size, GST_SECOND * 8, duration);
+
+ if (sys_bitrate < sum_bitrate) {
+ /* This can happen, since sum_bitrate might be derived from maximum
+ * bitrates and not average bitrates */
+ GST_DEBUG_OBJECT (qtdemux,
+ "System bitrate less than sum bitrate - bailing");
+ return;
+ }
+
+ bitrate = sys_bitrate - sum_bitrate;
+ GST_DEBUG_OBJECT (qtdemux, "System bitrate = %" G_GINT64_FORMAT
+ ", Stream bitrate = %u", sys_bitrate, bitrate);
+
+ if (!stream->stream_tags)
+ stream->stream_tags = gst_tag_list_new_empty ();
+ else
+ stream->stream_tags = gst_tag_list_make_writable (stream->stream_tags);
+
+ gst_tag_list_add (stream->stream_tags, GST_TAG_MERGE_REPLACE,
+ GST_TAG_BITRATE, bitrate, NULL);
+ }
+
+ static GstFlowReturn
+ qtdemux_prepare_streams (GstQTDemux * qtdemux)
+ {
+ GstFlowReturn ret = GST_FLOW_OK;
++#ifdef TIZEN_FEATURE_QTDEMUX_DURATION
++ guint64 tkhd_max_duration = 0;
++#endif
+ gint i;
+
+ GST_DEBUG_OBJECT (qtdemux, "prepare %u streams", QTDEMUX_N_STREAMS (qtdemux));
+
+ for (i = 0; i < QTDEMUX_N_STREAMS (qtdemux); i++) {
+ QtDemuxStream *stream = QTDEMUX_NTH_STREAM (qtdemux, i);
+ guint32 sample_num = 0;
+
+ GST_DEBUG_OBJECT (qtdemux, "track-id %u, fourcc %" GST_FOURCC_FORMAT,
+ stream->track_id, GST_FOURCC_ARGS (CUR_STREAM (stream)->fourcc));
+
+ if (qtdemux->fragmented && qtdemux->pullbased) {
+ /* need all moov samples first */
+ GST_OBJECT_LOCK (qtdemux);
+ while (stream->n_samples == 0)
+ if ((ret = qtdemux_add_fragmented_samples (qtdemux)) != GST_FLOW_OK)
+ break;
+ GST_OBJECT_UNLOCK (qtdemux);
+ } else {
+ /* discard any stray moof */
+ qtdemux->moof_offset = 0;
++#ifdef TIZEN_FEATURE_QTDEMUX_DURATION
++ if (tkhd_max_duration < stream->tkhd_duration)
++ tkhd_max_duration = stream->tkhd_duration;
++#endif
+ }
+
+ /* prepare braking */
+ if (ret != GST_FLOW_ERROR)
+ ret = GST_FLOW_OK;
+
+ /* in pull mode, we should have parsed some sample info by now;
+ * and quite some code will not handle no samples.
+ * in push mode, we'll just have to deal with it */
+ if (G_UNLIKELY (qtdemux->pullbased && !stream->n_samples)) {
+ GST_DEBUG_OBJECT (qtdemux, "no samples for stream; discarding");
+ g_ptr_array_remove_index (qtdemux->active_streams, i);
+ i--;
+ continue;
+ } else if (stream->track_id == qtdemux->chapters_track_id &&
+ (stream->subtype == FOURCC_text || stream->subtype == FOURCC_sbtl)) {
+ /* TODO - parse chapters track and expose it as GstToc; For now just ignore it
+ so that it doesn't look like a subtitle track */
+ g_ptr_array_remove_index (qtdemux->active_streams, i);
+ i--;
+ continue;
+ }
+
+ /* parse the initial sample for use in setting the frame rate cap */
+ while (sample_num == 0 && sample_num < stream->n_samples) {
+ if (!qtdemux_parse_samples (qtdemux, stream, sample_num))
+ break;
+ ++sample_num;
+ }
+ }
+
++#ifdef TIZEN_FEATURE_QTDEMUX_DURATION
++ if (!qtdemux->fragmented && (qtdemux->duration > tkhd_max_duration)) {
++ GST_INFO_OBJECT (qtdemux,
++ "Update duration: %" G_GUINT64_FORMAT " -> %" G_GUINT64_FORMAT,
++ qtdemux->duration, tkhd_max_duration);
++ qtdemux->duration = tkhd_max_duration;
++ }
++#endif
++
+ return ret;
+ }
+
+ static gboolean
+ _stream_equal_func (const QtDemuxStream * stream, const gchar * stream_id)
+ {
+ return g_strcmp0 (stream->stream_id, stream_id) == 0;
+ }
+
+ static gboolean
+ qtdemux_is_streams_update (GstQTDemux * qtdemux)
+ {
+ gint i;
+
+ /* Different length, updated */
+ if (QTDEMUX_N_STREAMS (qtdemux) != qtdemux->old_streams->len)
+ return TRUE;
+
+ /* streams in list are sorted in track-id order */
+ for (i = 0; i < QTDEMUX_N_STREAMS (qtdemux); i++) {
+ /* Different stream-id, updated */
+ if (g_strcmp0 (QTDEMUX_NTH_STREAM (qtdemux, i)->stream_id,
+ QTDEMUX_NTH_OLD_STREAM (qtdemux, i)->stream_id))
+ return TRUE;
+ }
+
+ return FALSE;
+ }
+
+ static gboolean
+ qtdemux_reuse_and_configure_stream (GstQTDemux * qtdemux,
+ QtDemuxStream * oldstream, QtDemuxStream * newstream)
+ {
+ /* Connect old stream's srcpad to new stream */
+ newstream->pad = oldstream->pad;
+ oldstream->pad = NULL;
+
+ /* unset new_stream to prevent stream-start event, unless we are EOS in which
+ * case we need to force one through */
+ newstream->new_stream = GST_PAD_IS_EOS (newstream->pad);
+
+ return gst_qtdemux_configure_stream (qtdemux, newstream);
+ }
+
+ static gboolean
+ qtdemux_update_streams (GstQTDemux * qtdemux)
+ {
+ gint i;
+ g_assert (qtdemux->streams_aware);
+
+ /* At below, figure out which stream in active_streams has identical stream-id
+ * with that of in old_streams. If there is matching stream-id,
+ * corresponding newstream will not be exposed again,
+ * but demux will reuse srcpad of matched old stream
+ *
+ * active_streams : newly created streams from the latest moov
+ * old_streams : existing streams (belong to previous moov)
+ */
+
+ for (i = 0; i < QTDEMUX_N_STREAMS (qtdemux); i++) {
+ QtDemuxStream *stream = QTDEMUX_NTH_STREAM (qtdemux, i);
+ QtDemuxStream *oldstream = NULL;
+ guint target;
+
+ GST_DEBUG_OBJECT (qtdemux, "track-id %u, fourcc %" GST_FOURCC_FORMAT,
+ stream->track_id, GST_FOURCC_ARGS (CUR_STREAM (stream)->fourcc));
+
+ if (g_ptr_array_find_with_equal_func (qtdemux->old_streams,
+ stream->stream_id, (GEqualFunc) _stream_equal_func, &target)) {
+ oldstream = QTDEMUX_NTH_OLD_STREAM (qtdemux, target);
+
+ /* null pad stream cannot be reused */
+ if (oldstream->pad == NULL)
+ oldstream = NULL;
+ }
+
+ if (oldstream) {
+ GST_DEBUG_OBJECT (qtdemux, "Reuse track-id %d", oldstream->track_id);
+
+ if (!qtdemux_reuse_and_configure_stream (qtdemux, oldstream, stream))
+ return FALSE;
+
+ /* we don't need to preserve order of old streams */
+ g_ptr_array_remove_fast (qtdemux->old_streams, oldstream);
+ } else {
+ GstTagList *list;
+
+ /* now we have all info and can expose */
+ list = stream->stream_tags;
+ stream->stream_tags = NULL;
+ if (!gst_qtdemux_add_stream (qtdemux, stream, list))
+ return FALSE;
+ }
+ }
+
+ return TRUE;
+ }
+
+ /* Must be called with expose lock */
+ static GstFlowReturn
+ qtdemux_expose_streams (GstQTDemux * qtdemux)
+ {
+ gint i;
+
+ GST_DEBUG_OBJECT (qtdemux, "exposing streams");
+
+ if (!qtdemux_is_streams_update (qtdemux)) {
+ GST_DEBUG_OBJECT (qtdemux, "Reuse all streams");
+ for (i = 0; i < QTDEMUX_N_STREAMS (qtdemux); i++) {
+ QtDemuxStream *new_stream = QTDEMUX_NTH_STREAM (qtdemux, i);
+ QtDemuxStream *old_stream = QTDEMUX_NTH_OLD_STREAM (qtdemux, i);
+ if (!qtdemux_reuse_and_configure_stream (qtdemux, old_stream, new_stream))
+ return GST_FLOW_ERROR;
+ }
+
+ g_ptr_array_set_size (qtdemux->old_streams, 0);
+ qtdemux->need_segment = TRUE;
+
+ return GST_FLOW_OK;
+ }
+
+ if (qtdemux->streams_aware) {
+ if (!qtdemux_update_streams (qtdemux))
+ return GST_FLOW_ERROR;
+ } else {
+ for (i = 0; i < QTDEMUX_N_STREAMS (qtdemux); i++) {
+ QtDemuxStream *stream = QTDEMUX_NTH_STREAM (qtdemux, i);
+ GstTagList *list;
+
+ /* now we have all info and can expose */
+ list = stream->stream_tags;
+ stream->stream_tags = NULL;
+ if (!gst_qtdemux_add_stream (qtdemux, stream, list))
+ return GST_FLOW_ERROR;
+
+ }
+ }
+
+ gst_qtdemux_guess_bitrate (qtdemux);
+
+ gst_element_no_more_pads (GST_ELEMENT_CAST (qtdemux));
+
+ /* If we have still old_streams, it's no more used stream */
+ for (i = 0; i < qtdemux->old_streams->len; i++) {
+ QtDemuxStream *stream = QTDEMUX_NTH_OLD_STREAM (qtdemux, i);
+
+ if (stream->pad) {
+ GstEvent *event;
+
+ event = gst_event_new_eos ();
+ if (qtdemux->segment_seqnum)
+ gst_event_set_seqnum (event, qtdemux->segment_seqnum);
+
+ gst_pad_push_event (stream->pad, event);
+ }
+ }
+
+ g_ptr_array_set_size (qtdemux->old_streams, 0);
+
+ /* check if we should post a redirect in case there is a single trak
+ * and it is a redirecting trak */
+ if (QTDEMUX_N_STREAMS (qtdemux) == 1 &&
+ QTDEMUX_NTH_STREAM (qtdemux, 0)->redirect_uri != NULL) {
+ GstMessage *m;
+
+ GST_INFO_OBJECT (qtdemux, "Issuing a redirect due to a single track with "
+ "an external content");
+ m = gst_message_new_element (GST_OBJECT_CAST (qtdemux),
+ gst_structure_new ("redirect",
+ "new-location", G_TYPE_STRING,
+ QTDEMUX_NTH_STREAM (qtdemux, 0)->redirect_uri, NULL));
+ gst_element_post_message (GST_ELEMENT_CAST (qtdemux), m);
+ g_free (qtdemux->redirect_location);
+ qtdemux->redirect_location =
+ g_strdup (QTDEMUX_NTH_STREAM (qtdemux, 0)->redirect_uri);
+ }
+
+ g_ptr_array_foreach (qtdemux->active_streams,
+ (GFunc) qtdemux_do_allocation, qtdemux);
+
+ qtdemux->need_segment = TRUE;
+
+ qtdemux->exposed = TRUE;
+ return GST_FLOW_OK;
+ }
+
+ typedef struct
+ {
+ GstStructure *structure; /* helper for sort function */
+ gchar *location;
+ guint min_req_bitrate;
+ guint min_req_qt_version;
+ } GstQtReference;
+
+ static gint
+ qtdemux_redirects_sort_func (gconstpointer a, gconstpointer b)
+ {
+ GstQtReference *ref_a = (GstQtReference *) a;
+ GstQtReference *ref_b = (GstQtReference *) b;
+
+ if (ref_b->min_req_qt_version != ref_a->min_req_qt_version)
+ return ref_b->min_req_qt_version - ref_a->min_req_qt_version;
+
+ /* known bitrates go before unknown; higher bitrates go first */
+ return ref_b->min_req_bitrate - ref_a->min_req_bitrate;
+ }
+
+ /* sort the redirects and post a message for the application.
+ */
+ static void
+ qtdemux_process_redirects (GstQTDemux * qtdemux, GList * references)
+ {
+ GstQtReference *best;
+ GstStructure *s;
+ GstMessage *msg;
+ GValue list_val = { 0, };
+ GList *l;
+
+ g_assert (references != NULL);
+
+ references = g_list_sort (references, qtdemux_redirects_sort_func);
+
+ best = (GstQtReference *) references->data;
+
+ g_value_init (&list_val, GST_TYPE_LIST);
+
+ for (l = references; l != NULL; l = l->next) {
+ GstQtReference *ref = (GstQtReference *) l->data;
+ GValue struct_val = { 0, };
+
+ ref->structure = gst_structure_new ("redirect",
+ "new-location", G_TYPE_STRING, ref->location, NULL);
+
+ if (ref->min_req_bitrate > 0) {
+ gst_structure_set (ref->structure, "minimum-bitrate", G_TYPE_INT,
+ ref->min_req_bitrate, NULL);
+ }
+
+ g_value_init (&struct_val, GST_TYPE_STRUCTURE);
+ g_value_set_boxed (&struct_val, ref->structure);
+ gst_value_list_append_value (&list_val, &struct_val);
+ g_value_unset (&struct_val);
+ /* don't free anything here yet, since we need best->structure below */
+ }
+
+ g_assert (best != NULL);
+ s = gst_structure_copy (best->structure);
+
+ if (g_list_length (references) > 1) {
+ gst_structure_set_value (s, "locations", &list_val);
+ }
+
+ g_value_unset (&list_val);
+
+ for (l = references; l != NULL; l = l->next) {
+ GstQtReference *ref = (GstQtReference *) l->data;
+
+ gst_structure_free (ref->structure);
+ g_free (ref->location);
+ g_free (ref);
+ }
+ g_list_free (references);
+
+ GST_INFO_OBJECT (qtdemux, "posting redirect message: %" GST_PTR_FORMAT, s);
+ g_free (qtdemux->redirect_location);
+ qtdemux->redirect_location =
+ g_strdup (gst_structure_get_string (s, "new-location"));
+ msg = gst_message_new_element (GST_OBJECT_CAST (qtdemux), s);
+ gst_element_post_message (GST_ELEMENT_CAST (qtdemux), msg);
+ }
+
+ /* look for redirect nodes, collect all redirect information and
+ * process it.
+ */
+ static gboolean
+ qtdemux_parse_redirects (GstQTDemux * qtdemux)
+ {
+ GNode *rmra, *rmda, *rdrf;
+
+ rmra = qtdemux_tree_get_child_by_type (qtdemux->moov_node, FOURCC_rmra);
+ if (rmra) {
+ GList *redirects = NULL;
+
+ rmda = qtdemux_tree_get_child_by_type (rmra, FOURCC_rmda);
+ while (rmda) {
+ GstQtReference ref = { NULL, NULL, 0, 0 };
+ GNode *rmdr, *rmvc;
+
+ if ((rmdr = qtdemux_tree_get_child_by_type (rmda, FOURCC_rmdr))) {
+ ref.min_req_bitrate = QT_UINT32 ((guint8 *) rmdr->data + 12);
+ GST_LOG_OBJECT (qtdemux, "data rate atom, required bitrate = %u",
+ ref.min_req_bitrate);
+ }
+
+ if ((rmvc = qtdemux_tree_get_child_by_type (rmda, FOURCC_rmvc))) {
+ guint32 package = QT_FOURCC ((guint8 *) rmvc->data + 12);
+ guint version = QT_UINT32 ((guint8 *) rmvc->data + 16);
+
+ #ifndef GST_DISABLE_GST_DEBUG
+ guint bitmask = QT_UINT32 ((guint8 *) rmvc->data + 20);
+ #endif
+ guint check_type = QT_UINT16 ((guint8 *) rmvc->data + 24);
+
+ GST_LOG_OBJECT (qtdemux,
+ "version check atom [%" GST_FOURCC_FORMAT "], version=0x%08x"
+ ", mask=%08x, check_type=%u", GST_FOURCC_ARGS (package), version,
+ bitmask, check_type);
+ if (package == FOURCC_qtim && check_type == 0) {
+ ref.min_req_qt_version = version;
+ }
+ }
+
+ rdrf = qtdemux_tree_get_child_by_type (rmda, FOURCC_rdrf);
+ if (rdrf) {
+ guint32 ref_type;
+ guint8 *ref_data;
+ guint ref_len;
+
+ ref_len = QT_UINT32 ((guint8 *) rdrf->data);
+ if (ref_len > 20) {
+ ref_type = QT_FOURCC ((guint8 *) rdrf->data + 12);
+ ref_data = (guint8 *) rdrf->data + 20;
+ if (ref_type == FOURCC_alis) {
+ guint record_len, record_version, fn_len;
+
+ if (ref_len > 70) {
+ /* MacOSX alias record, google for alias-layout.txt */
+ record_len = QT_UINT16 (ref_data + 4);
+ record_version = QT_UINT16 (ref_data + 4 + 2);
+ fn_len = QT_UINT8 (ref_data + 50);
+ if (record_len > 50 && record_version == 2 && fn_len > 0) {
+ ref.location = g_strndup ((gchar *) ref_data + 51, fn_len);
+ }
+ } else {
+ GST_WARNING_OBJECT (qtdemux, "Invalid rdrf/alis size (%u < 70)",
+ ref_len);
+ }
+ } else if (ref_type == FOURCC_url_) {
+ ref.location = g_strndup ((gchar *) ref_data, ref_len - 8);
+ } else {
+ GST_DEBUG_OBJECT (qtdemux,
+ "unknown rdrf reference type %" GST_FOURCC_FORMAT,
+ GST_FOURCC_ARGS (ref_type));
+ }
+ if (ref.location != NULL) {
+ GST_INFO_OBJECT (qtdemux, "New location: %s", ref.location);
+ redirects =
+ g_list_prepend (redirects, g_memdup2 (&ref, sizeof (ref)));
+ } else {
+ GST_WARNING_OBJECT (qtdemux,
+ "Failed to extract redirect location from rdrf atom");
+ }
+ } else {
+ GST_WARNING_OBJECT (qtdemux, "Invalid rdrf size (%u < 20)", ref_len);
+ }
+ }
+
+ /* look for others */
+ rmda = qtdemux_tree_get_sibling_by_type (rmda, FOURCC_rmda);
+ }
+
+ if (redirects != NULL) {
+ qtdemux_process_redirects (qtdemux, redirects);
+ }
+ }
+ return TRUE;
+ }
+
+ static GstTagList *
+ qtdemux_add_container_format (GstQTDemux * qtdemux, GstTagList * tags)
+ {
+ const gchar *fmt;
+
+ if (tags == NULL) {
+ tags = gst_tag_list_new_empty ();
+ gst_tag_list_set_scope (tags, GST_TAG_SCOPE_GLOBAL);
+ }
+
+ if (qtdemux->major_brand == FOURCC_mjp2)
+ fmt = "Motion JPEG 2000";
+ else if ((qtdemux->major_brand & 0xffff) == FOURCC_3g__)
+ fmt = "3GP";
+ else if (qtdemux->major_brand == FOURCC_qt__)
+ fmt = "Quicktime";
+ else if (qtdemux->fragmented)
+ fmt = "ISO fMP4";
+ else
+ fmt = "ISO MP4/M4A";
+
+ GST_LOG_OBJECT (qtdemux, "mapped %" GST_FOURCC_FORMAT " to '%s'",
+ GST_FOURCC_ARGS (qtdemux->major_brand), fmt);
+
+ gst_tag_list_add (tags, GST_TAG_MERGE_REPLACE, GST_TAG_CONTAINER_FORMAT,
+ fmt, NULL);
+
+ return tags;
+ }
+
+ /* we have read the complete moov node now.
+ * This function parses all of the relevant info, creates the traks and
+ * prepares all data structures for playback
+ */
+ static gboolean
+ qtdemux_parse_tree (GstQTDemux * qtdemux)
+ {
+ GNode *mvhd;
+ GNode *trak;
+ GNode *udta;
+ GNode *mvex;
+ GNode *pssh;
+ guint64 creation_time;
+ GstDateTime *datetime = NULL;
+ gint version;
+
+ /* make sure we have a usable taglist */
+ qtdemux->tag_list = gst_tag_list_make_writable (qtdemux->tag_list);
+
+ mvhd = qtdemux_tree_get_child_by_type (qtdemux->moov_node, FOURCC_mvhd);
+ if (mvhd == NULL) {
+ GST_LOG_OBJECT (qtdemux, "No mvhd node found, looking for redirects.");
+ return qtdemux_parse_redirects (qtdemux);
+ }
+
+ version = QT_UINT8 ((guint8 *) mvhd->data + 8);
+ if (version == 1) {
+ creation_time = QT_UINT64 ((guint8 *) mvhd->data + 12);
+ qtdemux->timescale = QT_UINT32 ((guint8 *) mvhd->data + 28);
+ qtdemux->duration = QT_UINT64 ((guint8 *) mvhd->data + 32);
+ } else if (version == 0) {
+ creation_time = QT_UINT32 ((guint8 *) mvhd->data + 12);
+ qtdemux->timescale = QT_UINT32 ((guint8 *) mvhd->data + 20);
+ qtdemux->duration = QT_UINT32 ((guint8 *) mvhd->data + 24);
+ } else {
+ GST_WARNING_OBJECT (qtdemux, "Unhandled mvhd version %d", version);
+ return FALSE;
+ }
+
+ /* Moving qt creation time (secs since 1904) to unix time */
+ if (creation_time != 0) {
+ /* Try to use epoch first as it should be faster and more commonly found */
+ if (creation_time >= QTDEMUX_SECONDS_FROM_1904_TO_1970) {
+ gint64 now_s;
+
+ creation_time -= QTDEMUX_SECONDS_FROM_1904_TO_1970;
+ /* some data cleansing sanity */
+ now_s = g_get_real_time () / G_USEC_PER_SEC;
+ if (now_s + 24 * 3600 < creation_time) {
+ GST_DEBUG_OBJECT (qtdemux, "discarding bogus future creation time");
+ } else {
+ datetime = gst_date_time_new_from_unix_epoch_utc (creation_time);
+ }
+ } else {
+ GDateTime *base_dt = g_date_time_new_utc (1904, 1, 1, 0, 0, 0);
+ GDateTime *dt, *dt_local;
+
+ dt = g_date_time_add_seconds (base_dt, creation_time);
+ dt_local = g_date_time_to_local (dt);
+ datetime = gst_date_time_new_from_g_date_time (dt_local);
+
+ g_date_time_unref (base_dt);
+ g_date_time_unref (dt);
+ }
+ }
+ if (datetime) {
+ /* Use KEEP as explicit tags should have a higher priority than mvhd tag */
+ gst_tag_list_add (qtdemux->tag_list, GST_TAG_MERGE_KEEP, GST_TAG_DATE_TIME,
+ datetime, NULL);
+ gst_date_time_unref (datetime);
+ }
+
+ GST_INFO_OBJECT (qtdemux, "timescale: %u", qtdemux->timescale);
+ GST_INFO_OBJECT (qtdemux, "duration: %" G_GUINT64_FORMAT, qtdemux->duration);
+
+ /* check for fragmented file and get some (default) data */
+ mvex = qtdemux_tree_get_child_by_type (qtdemux->moov_node, FOURCC_mvex);
+ if (mvex) {
+ GNode *mehd;
+ GstByteReader mehd_data;
+
+ /* let track parsing or anyone know weird stuff might happen ... */
+ qtdemux->fragmented = TRUE;
+
+ /* compensate for total duration */
+ mehd = qtdemux_tree_get_child_by_type_full (mvex, FOURCC_mehd, &mehd_data);
+ if (mehd)
+ qtdemux_parse_mehd (qtdemux, &mehd_data);
+ }
+
+ /* Update the movie segment duration, unless it was directly given to us
+ * by upstream. Otherwise let it as is, as we don't want to mangle the
+ * duration provided by upstream that may come e.g. from a MPD file. */
+ if (!qtdemux->upstream_format_is_time) {
+ GstClockTime duration;
+ /* set duration in the segment info */
+ gst_qtdemux_get_duration (qtdemux, &duration);
+ qtdemux->segment.duration = duration;
+ /* also do not exceed duration; stop is set that way post seek anyway,
+ * and segment activation falls back to duration,
+ * whereas loop only checks stop, so let's align this here as well */
+ qtdemux->segment.stop = duration;
+ }
+
+ /* parse all traks */
+ trak = qtdemux_tree_get_child_by_type (qtdemux->moov_node, FOURCC_trak);
+ while (trak) {
+ qtdemux_parse_trak (qtdemux, trak);
+ /* iterate all siblings */
+ trak = qtdemux_tree_get_sibling_by_type (trak, FOURCC_trak);
+ }
+
+ qtdemux->tag_list = gst_tag_list_make_writable (qtdemux->tag_list);
+
+ /* find tags */
+ udta = qtdemux_tree_get_child_by_type (qtdemux->moov_node, FOURCC_udta);
+ if (udta) {
+ qtdemux_parse_udta (qtdemux, qtdemux->tag_list, udta);
+ } else {
+ GST_LOG_OBJECT (qtdemux, "No udta node found.");
+ }
+
+ /* maybe also some tags in meta box */
+ udta = qtdemux_tree_get_child_by_type (qtdemux->moov_node, FOURCC_meta);
+ if (udta) {
+ GST_DEBUG_OBJECT (qtdemux, "Parsing meta box for tags.");
+ qtdemux_parse_udta (qtdemux, qtdemux->tag_list, udta);
+ } else {
+ GST_LOG_OBJECT (qtdemux, "No meta node found.");
+ }
+
+ /* parse any protection system info */
+ pssh = qtdemux_tree_get_child_by_type (qtdemux->moov_node, FOURCC_pssh);
+ while (pssh) {
+ GST_LOG_OBJECT (qtdemux, "Parsing pssh box.");
+ qtdemux_parse_pssh (qtdemux, pssh);
+ pssh = qtdemux_tree_get_sibling_by_type (pssh, FOURCC_pssh);
+ }
+
+ qtdemux->tag_list = qtdemux_add_container_format (qtdemux, qtdemux->tag_list);
+
+ return TRUE;
+ }
+
+ /* taken from ffmpeg */
+ static int
+ read_descr_size (guint8 * ptr, guint8 * end, guint8 ** end_out)
+ {
+ int count = 4;
+ int len = 0;
+
+ while (count--) {
+ int c;
+
+ if (ptr >= end)
+ return -1;
+
+ c = *ptr++;
+ len = (len << 7) | (c & 0x7f);
+ if (!(c & 0x80))
+ break;
+ }
+ *end_out = ptr;
+ return len;
+ }
+
+ static GList *
+ parse_xiph_stream_headers (GstQTDemux * qtdemux, gpointer codec_data,
+ gsize codec_data_size)
+ {
+ GList *list = NULL;
+ guint8 *p = codec_data;
+ gint i, offset, num_packets;
+ guint *length, last;
+
+ GST_MEMDUMP_OBJECT (qtdemux, "xiph codec data", codec_data, codec_data_size);
+
+ if (codec_data == NULL || codec_data_size == 0)
+ goto error;
+
+ /* start of the stream and vorbis audio or theora video, need to
+ * send the codec_priv data as first three packets */
+ num_packets = p[0] + 1;
+ GST_DEBUG_OBJECT (qtdemux,
+ "%u stream headers, total length=%" G_GSIZE_FORMAT " bytes",
+ (guint) num_packets, codec_data_size);
+
+ /* Let's put some limits, Don't think there even is a xiph codec
+ * with more than 3-4 headers */
+ if (G_UNLIKELY (num_packets > 16)) {
+ GST_WARNING_OBJECT (qtdemux,
+ "Unlikely number of xiph headers, most likely not valid");
+ goto error;
+ }
+
+ length = g_alloca (num_packets * sizeof (guint));
+ last = 0;
+ offset = 1;
+
+ /* first packets, read length values */
+ for (i = 0; i < num_packets - 1; i++) {
+ length[i] = 0;
+ while (offset < codec_data_size) {
+ length[i] += p[offset];
+ if (p[offset++] != 0xff)
+ break;
+ }
+ last += length[i];
+ }
+ if (offset + last > codec_data_size)
+ goto error;
+
+ /* last packet is the remaining size */
+ length[i] = codec_data_size - offset - last;
+
+ for (i = 0; i < num_packets; i++) {
+ GstBuffer *hdr;
+
+ GST_DEBUG_OBJECT (qtdemux, "buffer %d: %u bytes", i, (guint) length[i]);
+
+ if (offset + length[i] > codec_data_size)
+ goto error;
+
+ hdr = gst_buffer_new_memdup (p + offset, length[i]);
+ list = g_list_append (list, hdr);
+
+ offset += length[i];
+ }
+
+ return list;
+
+ /* ERRORS */
+ error:
+ {
+ if (list != NULL)
+ g_list_free_full (list, (GDestroyNotify) gst_buffer_unref);
+ return NULL;
+ }
+
+ }
+
+ /* this can change the codec originally present in @list */
+ static void
+ gst_qtdemux_handle_esds (GstQTDemux * qtdemux, QtDemuxStream * stream,
+ QtDemuxStreamStsdEntry * entry, GNode * esds, GstTagList * list)
+ {
+ int len = QT_UINT32 (esds->data);
+ guint8 *ptr = esds->data;
+ guint8 *end = ptr + len;
+ int tag;
+ guint8 *data_ptr = NULL;
+ int data_len = 0;
+ guint8 object_type_id = 0;
+ guint8 stream_type = 0;
+ const char *codec_name = NULL;
+ GstCaps *caps = NULL;
+
+ GST_MEMDUMP_OBJECT (qtdemux, "esds", ptr, len);
+ ptr += 8;
+ GST_DEBUG_OBJECT (qtdemux, "version/flags = %08x", QT_UINT32 (ptr));
+ ptr += 4;
+ while (ptr + 1 < end) {
+ tag = QT_UINT8 (ptr);
+ GST_DEBUG_OBJECT (qtdemux, "tag = %02x", tag);
+ ptr++;
+ len = read_descr_size (ptr, end, &ptr);
+ GST_DEBUG_OBJECT (qtdemux, "len = %d", len);
+
+ /* Check the stated amount of data is available for reading */
+ if (len < 0 || ptr + len > end)
+ break;
+
+ switch (tag) {
+ case ES_DESCRIPTOR_TAG:
+ GST_DEBUG_OBJECT (qtdemux, "ID 0x%04x", QT_UINT16 (ptr));
+ GST_DEBUG_OBJECT (qtdemux, "priority 0x%04x", QT_UINT8 (ptr + 2));
+ ptr += 3;
+ break;
+ case DECODER_CONFIG_DESC_TAG:{
+ guint max_bitrate, avg_bitrate;
+
+ object_type_id = QT_UINT8 (ptr);
+ stream_type = QT_UINT8 (ptr + 1) >> 2;
+ max_bitrate = QT_UINT32 (ptr + 5);
+ avg_bitrate = QT_UINT32 (ptr + 9);
+ GST_DEBUG_OBJECT (qtdemux, "object_type_id %02x", object_type_id);
+ GST_DEBUG_OBJECT (qtdemux, "stream_type %02x", stream_type);
+ GST_DEBUG_OBJECT (qtdemux, "buffer_size_db %02x", QT_UINT24 (ptr + 2));
+ GST_DEBUG_OBJECT (qtdemux, "max bitrate %u", max_bitrate);
+ GST_DEBUG_OBJECT (qtdemux, "avg bitrate %u", avg_bitrate);
+ if (max_bitrate > 0 && max_bitrate < G_MAXUINT32) {
+ gst_tag_list_add (list, GST_TAG_MERGE_REPLACE,
+ GST_TAG_MAXIMUM_BITRATE, max_bitrate, NULL);
+ }
+ if (avg_bitrate > 0 && avg_bitrate < G_MAXUINT32) {
+ gst_tag_list_add (list, GST_TAG_MERGE_REPLACE, GST_TAG_BITRATE,
+ avg_bitrate, NULL);
+ }
+ ptr += 13;
+ break;
+ }
+ case DECODER_SPECIFIC_INFO_TAG:
+ GST_MEMDUMP_OBJECT (qtdemux, "data", ptr, len);
+ if (object_type_id == 0xe0 && len == 0x40) {
+ guint8 *data;
+ GstStructure *s;
+ guint32 clut[16];
+ gint i;
+
+ GST_DEBUG_OBJECT (qtdemux,
+ "Have VOBSUB palette. Creating palette event");
+ /* move to decConfigDescr data and read palette */
+ data = ptr;
+ for (i = 0; i < 16; i++) {
+ clut[i] = QT_UINT32 (data);
+ data += 4;
+ }
+
+ s = gst_structure_new ("application/x-gst-dvd", "event",
+ G_TYPE_STRING, "dvd-spu-clut-change",
+ "clut00", G_TYPE_INT, clut[0], "clut01", G_TYPE_INT, clut[1],
+ "clut02", G_TYPE_INT, clut[2], "clut03", G_TYPE_INT, clut[3],
+ "clut04", G_TYPE_INT, clut[4], "clut05", G_TYPE_INT, clut[5],
+ "clut06", G_TYPE_INT, clut[6], "clut07", G_TYPE_INT, clut[7],
+ "clut08", G_TYPE_INT, clut[8], "clut09", G_TYPE_INT, clut[9],
+ "clut10", G_TYPE_INT, clut[10], "clut11", G_TYPE_INT, clut[11],
+ "clut12", G_TYPE_INT, clut[12], "clut13", G_TYPE_INT, clut[13],
+ "clut14", G_TYPE_INT, clut[14], "clut15", G_TYPE_INT, clut[15],
+ NULL);
+
+ /* store event and trigger custom processing */
+ stream->pending_event =
+ gst_event_new_custom (GST_EVENT_CUSTOM_DOWNSTREAM, s);
+ } else {
+ /* Generic codec_data handler puts it on the caps */
+ data_ptr = ptr;
+ data_len = len;
+ }
+
+ ptr += len;
+ break;
+ case SL_CONFIG_DESC_TAG:
+ GST_DEBUG_OBJECT (qtdemux, "data %02x", QT_UINT8 (ptr));
+ ptr += 1;
+ break;
+ default:
+ GST_DEBUG_OBJECT (qtdemux, "Unknown/unhandled descriptor tag %02x",
+ tag);
+ GST_MEMDUMP_OBJECT (qtdemux, "descriptor data", ptr, len);
+ ptr += len;
+ break;
+ }
+ }
+
+ /* object_type_id in the esds atom in mp4a and mp4v tells us which codec is
+ * in use, and should also be used to override some other parameters for some
+ * codecs. */
+ switch (object_type_id) {
+ case 0x20: /* MPEG-4 */
+ /* 4 bytes for the visual_object_sequence_start_code and 1 byte for the
+ * profile_and_level_indication */
+ if (data_ptr != NULL && data_len >= 5 &&
+ GST_READ_UINT32_BE (data_ptr) == 0x000001b0) {
+ gst_codec_utils_mpeg4video_caps_set_level_and_profile (entry->caps,
+ data_ptr + 4, data_len - 4);
+ }
+ break; /* Nothing special needed here */
+ case 0x21: /* H.264 */
+ codec_name = "H.264 / AVC";
+ caps = gst_caps_new_simple ("video/x-h264",
+ "stream-format", G_TYPE_STRING, "avc",
+ "alignment", G_TYPE_STRING, "au", NULL);
+ break;
+ case 0x40: /* AAC (any) */
+ case 0x66: /* AAC Main */
+ case 0x67: /* AAC LC */
+ case 0x68: /* AAC SSR */
+ /* Override channels and rate based on the codec_data, as it's often
+ * wrong. */
+ /* Only do so for basic setup without HE-AAC extension */
+ if (data_ptr && data_len == 2) {
+ guint channels, rate;
+
+ channels = gst_codec_utils_aac_get_channels (data_ptr, data_len);
+ if (channels > 0)
+ entry->n_channels = channels;
+
+ rate = gst_codec_utils_aac_get_sample_rate (data_ptr, data_len);
+ if (rate > 0)
+ entry->rate = rate;
+ }
+
+ /* Set level and profile if possible */
+ if (data_ptr != NULL && data_len >= 2) {
+ gst_codec_utils_aac_caps_set_level_and_profile (entry->caps,
+ data_ptr, data_len);
+ } else {
+ const gchar *profile_str = NULL;
+ GstBuffer *buffer;
+ GstMapInfo map;
+ guint8 *codec_data;
+ gint rate_idx, profile;
+
+ /* No codec_data, let's invent something.
+ * FIXME: This is wrong for SBR! */
+
+ GST_WARNING_OBJECT (qtdemux, "No codec_data for AAC available");
+
+ buffer = gst_buffer_new_and_alloc (2);
+ gst_buffer_map (buffer, &map, GST_MAP_WRITE);
+ codec_data = map.data;
+
+ rate_idx =
+ gst_codec_utils_aac_get_index_from_sample_rate (CUR_STREAM
+ (stream)->rate);
+
+ switch (object_type_id) {
+ case 0x66:
+ profile_str = "main";
+ profile = 0;
+ break;
+ case 0x67:
+ profile_str = "lc";
+ profile = 1;
+ break;
+ case 0x68:
+ profile_str = "ssr";
+ profile = 2;
+ break;
+ default:
+ profile = 3;
+ break;
+ }
+
+ codec_data[0] = ((profile + 1) << 3) | ((rate_idx & 0xE) >> 1);
+ codec_data[1] =
+ ((rate_idx & 0x1) << 7) | (CUR_STREAM (stream)->n_channels << 3);
+
+ gst_buffer_unmap (buffer, &map);
+ gst_caps_set_simple (CUR_STREAM (stream)->caps, "codec_data",
+ GST_TYPE_BUFFER, buffer, NULL);
+ gst_buffer_unref (buffer);
+
+ if (profile_str) {
+ gst_caps_set_simple (CUR_STREAM (stream)->caps, "profile",
+ G_TYPE_STRING, profile_str, NULL);
+ }
+ }
+ break;
+ case 0x60: /* MPEG-2, various profiles */
+ case 0x61:
+ case 0x62:
+ case 0x63:
+ case 0x64:
+ case 0x65:
+ codec_name = "MPEG-2 video";
+ caps = gst_caps_new_simple ("video/mpeg",
+ "mpegversion", G_TYPE_INT, 2,
+ "systemstream", G_TYPE_BOOLEAN, FALSE, NULL);
+ break;
+ case 0x69: /* MPEG-2 BC audio */
+ case 0x6B: /* MPEG-1 audio */
+ caps = gst_caps_new_simple ("audio/mpeg",
+ "mpegversion", G_TYPE_INT, 1, NULL);
+ codec_name = "MPEG-1 audio";
+ break;
+ case 0x6A: /* MPEG-1 */
+ codec_name = "MPEG-1 video";
+ caps = gst_caps_new_simple ("video/mpeg",
+ "mpegversion", G_TYPE_INT, 1,
+ "systemstream", G_TYPE_BOOLEAN, FALSE, NULL);
+ break;
+ case 0x6C: /* MJPEG */
+ caps =
+ gst_caps_new_simple ("image/jpeg", "parsed", G_TYPE_BOOLEAN, TRUE,
+ NULL);
+ codec_name = "Motion-JPEG";
+ break;
+ case 0x6D: /* PNG */
+ caps =
+ gst_caps_new_simple ("image/png", "parsed", G_TYPE_BOOLEAN, TRUE,
+ NULL);
+ codec_name = "PNG still images";
+ break;
+ case 0x6E: /* JPEG2000 */
+ codec_name = "JPEG-2000";
+ caps = gst_caps_new_simple ("image/x-j2c", "fields", G_TYPE_INT, 1, NULL);
+ break;
+ case 0xA4: /* Dirac */
+ codec_name = "Dirac";
+ caps = gst_caps_new_empty_simple ("video/x-dirac");
+ break;
+ case 0xA5: /* AC3 */
+ codec_name = "AC-3 audio";
+ caps = gst_caps_new_simple ("audio/x-ac3",
+ "framed", G_TYPE_BOOLEAN, TRUE, NULL);
+ break;
+ case 0xA9: /* AC3 */
+ codec_name = "DTS audio";
+ caps = gst_caps_new_simple ("audio/x-dts",
+ "framed", G_TYPE_BOOLEAN, TRUE, NULL);
+ break;
+ case 0xDD:
+ if (stream_type == 0x05 && data_ptr) {
+ GList *headers =
+ parse_xiph_stream_headers (qtdemux, data_ptr, data_len);
+ if (headers) {
+ GList *tmp;
+ GValue arr_val = G_VALUE_INIT;
+ GValue buf_val = G_VALUE_INIT;
+ GstStructure *s;
+
+ /* Let's assume it's vorbis if it's an audio stream of type 0xdd and we have codec data that extracts properly */
+ codec_name = "Vorbis";
+ caps = gst_caps_new_empty_simple ("audio/x-vorbis");
+ g_value_init (&arr_val, GST_TYPE_ARRAY);
+ g_value_init (&buf_val, GST_TYPE_BUFFER);
+ for (tmp = headers; tmp; tmp = tmp->next) {
+ g_value_set_boxed (&buf_val, (GstBuffer *) tmp->data);
+ gst_value_array_append_value (&arr_val, &buf_val);
+ }
+ s = gst_caps_get_structure (caps, 0);
+ gst_structure_take_value (s, "streamheader", &arr_val);
+ g_value_unset (&buf_val);
+ g_list_free (headers);
+
+ data_ptr = NULL;
+ data_len = 0;
+ }
+ }
+ break;
+ case 0xE1: /* QCELP */
+ /* QCELP, the codec_data is a riff tag (little endian) with
+ * more info (http://ftp.3gpp2.org/TSGC/Working/2003/2003-05-SanDiego/TSG-C-2003-05-San%20Diego/WG1/SWG12/C12-20030512-006%20=%20C12-20030217-015_Draft_Baseline%20Text%20of%20FFMS_R2.doc). */
+ caps = gst_caps_new_empty_simple ("audio/qcelp");
+ codec_name = "QCELP";
+ break;
+ default:
+ break;
+ }
+
+ /* If we have a replacement caps, then change our caps for this stream */
+ if (caps) {
+ gst_caps_unref (entry->caps);
+ entry->caps = caps;
+ }
+
+ if (codec_name && list)
+ gst_tag_list_add (list, GST_TAG_MERGE_REPLACE,
+ GST_TAG_AUDIO_CODEC, codec_name, NULL);
+
+ /* Add the codec_data attribute to caps, if we have it */
+ if (data_ptr) {
+ GstBuffer *buffer;
+
+ buffer = gst_buffer_new_and_alloc (data_len);
+ gst_buffer_fill (buffer, 0, data_ptr, data_len);
+
+ GST_DEBUG_OBJECT (qtdemux, "setting codec_data from esds");
+ GST_MEMDUMP_OBJECT (qtdemux, "codec_data from esds", data_ptr, data_len);
+
+ gst_caps_set_simple (entry->caps, "codec_data", GST_TYPE_BUFFER,
+ buffer, NULL);
+ gst_buffer_unref (buffer);
+ }
+
+ }
+
+ static inline GstCaps *
+ _get_unknown_codec_name (const gchar * type, guint32 fourcc)
+ {
+ GstCaps *caps;
+ guint i;
+ char *s, fourstr[5];
+
+ g_snprintf (fourstr, 5, "%" GST_FOURCC_FORMAT, GST_FOURCC_ARGS (fourcc));
+ for (i = 0; i < 4; i++) {
+ if (!g_ascii_isalnum (fourstr[i]))
+ fourstr[i] = '_';
+ }
+ s = g_strdup_printf ("%s/x-gst-fourcc-%s", type, g_strstrip (fourstr));
+ caps = gst_caps_new_empty_simple (s);
+ g_free (s);
+ return caps;
+ }
+
+ #define _codec(name) \
+ do { \
+ if (codec_name) { \
+ *codec_name = g_strdup (name); \
+ } \
+ } while (0)
+
+ static GstCaps *
+ qtdemux_video_caps (GstQTDemux * qtdemux, QtDemuxStream * stream,
+ QtDemuxStreamStsdEntry * entry, guint32 fourcc,
+ const guint8 * stsd_entry_data, gchar ** codec_name)
+ {
+ GstCaps *caps = NULL;
+ GstVideoFormat format = GST_VIDEO_FORMAT_UNKNOWN;
+
+ switch (fourcc) {
+ case FOURCC_png:
+ _codec ("PNG still images");
+ caps = gst_caps_new_empty_simple ("image/png");
+ break;
+ case FOURCC_jpeg:
+ _codec ("JPEG still images");
+ caps =
+ gst_caps_new_simple ("image/jpeg", "parsed", G_TYPE_BOOLEAN, TRUE,
+ NULL);
+ break;
+ case GST_MAKE_FOURCC ('m', 'j', 'p', 'a'):
+ case GST_MAKE_FOURCC ('A', 'V', 'D', 'J'):
+ case GST_MAKE_FOURCC ('M', 'J', 'P', 'G'):
+ case GST_MAKE_FOURCC ('d', 'm', 'b', '1'):
+ _codec ("Motion-JPEG");
+ caps =
+ gst_caps_new_simple ("image/jpeg", "parsed", G_TYPE_BOOLEAN, TRUE,
+ NULL);
+ break;
+ case GST_MAKE_FOURCC ('m', 'j', 'p', 'b'):
+ _codec ("Motion-JPEG format B");
+ caps = gst_caps_new_empty_simple ("video/x-mjpeg-b");
+ break;
+ case FOURCC_mjp2:
+ _codec ("JPEG-2000");
+ /* override to what it should be according to spec, avoid palette_data */
+ entry->bits_per_sample = 24;
+ caps = gst_caps_new_simple ("image/x-j2c", "fields", G_TYPE_INT, 1, NULL);
+ break;
+ case FOURCC_SVQ3:
+ _codec ("Sorensen video v.3");
+ caps = gst_caps_new_simple ("video/x-svq",
+ "svqversion", G_TYPE_INT, 3, NULL);
+ break;
+ case GST_MAKE_FOURCC ('s', 'v', 'q', 'i'):
+ case GST_MAKE_FOURCC ('S', 'V', 'Q', '1'):
+ _codec ("Sorensen video v.1");
+ caps = gst_caps_new_simple ("video/x-svq",
+ "svqversion", G_TYPE_INT, 1, NULL);
+ break;
+ case GST_MAKE_FOURCC ('W', 'R', 'A', 'W'):
+ caps = gst_caps_new_empty_simple ("video/x-raw");
+ gst_caps_set_simple (caps, "format", G_TYPE_STRING, "RGB8P", NULL);
+ _codec ("Windows Raw RGB");
+ stream->alignment = 32;
+ break;
+ case FOURCC_raw_:
+ {
+ guint16 bps;
+
+ bps = QT_UINT16 (stsd_entry_data + 82);
+ switch (bps) {
+ case 15:
+ format = GST_VIDEO_FORMAT_RGB15;
+ break;
+ case 16:
+ format = GST_VIDEO_FORMAT_RGB16;
+ break;
+ case 24:
+ format = GST_VIDEO_FORMAT_RGB;
+ break;
+ case 32:
+ format = GST_VIDEO_FORMAT_ARGB;
+ break;
+ default:
+ /* unknown */
+ break;
+ }
+ break;
+ }
+ case GST_MAKE_FOURCC ('y', 'v', '1', '2'):
+ format = GST_VIDEO_FORMAT_I420;
+ break;
+ case GST_MAKE_FOURCC ('y', 'u', 'v', '2'):
+ case GST_MAKE_FOURCC ('Y', 'u', 'v', '2'):
+ format = GST_VIDEO_FORMAT_I420;
+ break;
+ case FOURCC_2vuy:
+ case GST_MAKE_FOURCC ('2', 'V', 'u', 'y'):
+ format = GST_VIDEO_FORMAT_UYVY;
+ break;
+ case GST_MAKE_FOURCC ('v', '3', '0', '8'):
+ format = GST_VIDEO_FORMAT_v308;
+ break;
+ case GST_MAKE_FOURCC ('v', '2', '1', '6'):
+ format = GST_VIDEO_FORMAT_v216;
+ break;
+ case FOURCC_v210:
+ format = GST_VIDEO_FORMAT_v210;
+ break;
+ case GST_MAKE_FOURCC ('r', '2', '1', '0'):
+ format = GST_VIDEO_FORMAT_r210;
+ break;
+ /* Packed YUV 4:4:4 10 bit in 32 bits, complex
+ case GST_MAKE_FOURCC ('v', '4', '1', '0'):
+ format = GST_VIDEO_FORMAT_v410;
+ break;
+ */
+ /* Packed YUV 4:4:4:4 8 bit in 32 bits
+ * but different order than AYUV
+ case GST_MAKE_FOURCC ('v', '4', '0', '8'):
+ format = GST_VIDEO_FORMAT_v408;
+ break;
+ */
+ case GST_MAKE_FOURCC ('m', 'p', 'e', 'g'):
+ case GST_MAKE_FOURCC ('m', 'p', 'g', '1'):
+ _codec ("MPEG-1 video");
+ caps = gst_caps_new_simple ("video/mpeg", "mpegversion", G_TYPE_INT, 1,
+ "systemstream", G_TYPE_BOOLEAN, FALSE, NULL);
+ break;
+ case GST_MAKE_FOURCC ('h', 'd', 'v', '1'): /* HDV 720p30 */
+ case GST_MAKE_FOURCC ('h', 'd', 'v', '2'): /* HDV 1080i60 */
+ case GST_MAKE_FOURCC ('h', 'd', 'v', '3'): /* HDV 1080i50 */
+ case GST_MAKE_FOURCC ('h', 'd', 'v', '4'): /* HDV 720p24 */
+ case GST_MAKE_FOURCC ('h', 'd', 'v', '5'): /* HDV 720p25 */
+ case GST_MAKE_FOURCC ('h', 'd', 'v', '6'): /* HDV 1080p24 */
+ case GST_MAKE_FOURCC ('h', 'd', 'v', '7'): /* HDV 1080p25 */
+ case GST_MAKE_FOURCC ('h', 'd', 'v', '8'): /* HDV 1080p30 */
+ case GST_MAKE_FOURCC ('h', 'd', 'v', '9'): /* HDV 720p60 */
+ case GST_MAKE_FOURCC ('h', 'd', 'v', 'a'): /* HDV 720p50 */
+ case GST_MAKE_FOURCC ('m', 'x', '5', 'n'): /* MPEG2 IMX NTSC 525/60 50mb/s produced by FCP */
+ case GST_MAKE_FOURCC ('m', 'x', '5', 'p'): /* MPEG2 IMX PAL 625/60 50mb/s produced by FCP */
+ case GST_MAKE_FOURCC ('m', 'x', '4', 'n'): /* MPEG2 IMX NTSC 525/60 40mb/s produced by FCP */
+ case GST_MAKE_FOURCC ('m', 'x', '4', 'p'): /* MPEG2 IMX PAL 625/60 40mb/s produced by FCP */
+ case GST_MAKE_FOURCC ('m', 'x', '3', 'n'): /* MPEG2 IMX NTSC 525/60 30mb/s produced by FCP */
+ case GST_MAKE_FOURCC ('m', 'x', '3', 'p'): /* MPEG2 IMX PAL 625/50 30mb/s produced by FCP */
+ case GST_MAKE_FOURCC ('x', 'd', 'v', '1'): /* XDCAM HD 720p30 35Mb/s */
+ case GST_MAKE_FOURCC ('x', 'd', 'v', '2'): /* XDCAM HD 1080i60 35Mb/s */
+ case GST_MAKE_FOURCC ('x', 'd', 'v', '3'): /* XDCAM HD 1080i50 35Mb/s */
+ case GST_MAKE_FOURCC ('x', 'd', 'v', '4'): /* XDCAM HD 720p24 35Mb/s */
+ case GST_MAKE_FOURCC ('x', 'd', 'v', '5'): /* XDCAM HD 720p25 35Mb/s */
+ case GST_MAKE_FOURCC ('x', 'd', 'v', '6'): /* XDCAM HD 1080p24 35Mb/s */
+ case GST_MAKE_FOURCC ('x', 'd', 'v', '7'): /* XDCAM HD 1080p25 35Mb/s */
+ case GST_MAKE_FOURCC ('x', 'd', 'v', '8'): /* XDCAM HD 1080p30 35Mb/s */
+ case GST_MAKE_FOURCC ('x', 'd', 'v', '9'): /* XDCAM HD 720p60 35Mb/s */
+ case GST_MAKE_FOURCC ('x', 'd', 'v', 'a'): /* XDCAM HD 720p50 35Mb/s */
+ case GST_MAKE_FOURCC ('x', 'd', 'v', 'b'): /* XDCAM EX 1080i60 50Mb/s CBR */
+ case GST_MAKE_FOURCC ('x', 'd', 'v', 'c'): /* XDCAM EX 1080i50 50Mb/s CBR */
+ case GST_MAKE_FOURCC ('x', 'd', 'v', 'd'): /* XDCAM HD 1080p24 50Mb/s CBR */
+ case GST_MAKE_FOURCC ('x', 'd', 'v', 'e'): /* XDCAM HD 1080p25 50Mb/s CBR */
+ case GST_MAKE_FOURCC ('x', 'd', 'v', 'f'): /* XDCAM HD 1080p30 50Mb/s CBR */
+ case GST_MAKE_FOURCC ('x', 'd', '5', '1'): /* XDCAM HD422 720p30 50Mb/s CBR */
+ case GST_MAKE_FOURCC ('x', 'd', '5', '4'): /* XDCAM HD422 720p24 50Mb/s CBR */
+ case GST_MAKE_FOURCC ('x', 'd', '5', '5'): /* XDCAM HD422 720p25 50Mb/s CBR */
+ case GST_MAKE_FOURCC ('x', 'd', '5', '9'): /* XDCAM HD422 720p60 50Mb/s CBR */
+ case GST_MAKE_FOURCC ('x', 'd', '5', 'a'): /* XDCAM HD422 720p50 50Mb/s CBR */
+ case GST_MAKE_FOURCC ('x', 'd', '5', 'b'): /* XDCAM HD422 1080i50 50Mb/s CBR */
+ case GST_MAKE_FOURCC ('x', 'd', '5', 'c'): /* XDCAM HD422 1080i50 50Mb/s CBR */
+ case GST_MAKE_FOURCC ('x', 'd', '5', 'd'): /* XDCAM HD422 1080p24 50Mb/s CBR */
+ case GST_MAKE_FOURCC ('x', 'd', '5', 'e'): /* XDCAM HD422 1080p25 50Mb/s CBR */
+ case GST_MAKE_FOURCC ('x', 'd', '5', 'f'): /* XDCAM HD422 1080p30 50Mb/s CBR */
+ case GST_MAKE_FOURCC ('x', 'd', 'h', 'd'): /* XDCAM HD 540p */
+ case GST_MAKE_FOURCC ('x', 'd', 'h', '2'): /* XDCAM HD422 540p */
+ case GST_MAKE_FOURCC ('A', 'V', 'm', 'p'): /* AVID IMX PAL */
+ case GST_MAKE_FOURCC ('m', 'p', 'g', '2'): /* AVID IMX PAL */
+ case GST_MAKE_FOURCC ('m', 'p', '2', 'v'): /* AVID IMX PAL */
+ case GST_MAKE_FOURCC ('m', '2', 'v', '1'):
+ _codec ("MPEG-2 video");
+ caps = gst_caps_new_simple ("video/mpeg", "mpegversion", G_TYPE_INT, 2,
+ "systemstream", G_TYPE_BOOLEAN, FALSE, NULL);
+ break;
+ case GST_MAKE_FOURCC ('g', 'i', 'f', ' '):
+ _codec ("GIF still images");
+ caps = gst_caps_new_empty_simple ("image/gif");
+ break;
+ case FOURCC_h263:
+ case GST_MAKE_FOURCC ('H', '2', '6', '3'):
+ case FOURCC_s263:
+ case GST_MAKE_FOURCC ('U', '2', '6', '3'):
+ _codec ("H.263");
+ /* ffmpeg uses the height/width props, don't know why */
+ caps = gst_caps_new_simple ("video/x-h263",
+ "variant", G_TYPE_STRING, "itu", NULL);
+ break;
+ case FOURCC_mp4v:
+ case FOURCC_MP4V:
+ _codec ("MPEG-4 video");
+ caps = gst_caps_new_simple ("video/mpeg", "mpegversion", G_TYPE_INT, 4,
+ "systemstream", G_TYPE_BOOLEAN, FALSE, NULL);
+ break;
+ case GST_MAKE_FOURCC ('3', 'i', 'v', 'd'):
+ case GST_MAKE_FOURCC ('3', 'I', 'V', 'D'):
+ _codec ("Microsoft MPEG-4 4.3"); /* FIXME? */
+ caps = gst_caps_new_simple ("video/x-msmpeg",
+ "msmpegversion", G_TYPE_INT, 43, NULL);
+ break;
+ case GST_MAKE_FOURCC ('D', 'I', 'V', '3'):
+ _codec ("DivX 3");
+ caps = gst_caps_new_simple ("video/x-divx",
+ "divxversion", G_TYPE_INT, 3, NULL);
+ break;
+ case GST_MAKE_FOURCC ('D', 'I', 'V', 'X'):
+ case GST_MAKE_FOURCC ('d', 'i', 'v', 'x'):
+ _codec ("DivX 4");
+ caps = gst_caps_new_simple ("video/x-divx",
+ "divxversion", G_TYPE_INT, 4, NULL);
+ break;
+ case GST_MAKE_FOURCC ('D', 'X', '5', '0'):
+ _codec ("DivX 5");
+ caps = gst_caps_new_simple ("video/x-divx",
+ "divxversion", G_TYPE_INT, 5, NULL);
+ break;
+
+ case GST_MAKE_FOURCC ('F', 'F', 'V', '1'):
+ _codec ("FFV1");
+ caps = gst_caps_new_simple ("video/x-ffv",
+ "ffvversion", G_TYPE_INT, 1, NULL);
+ break;
+
+ case GST_MAKE_FOURCC ('3', 'I', 'V', '1'):
+ case GST_MAKE_FOURCC ('3', 'I', 'V', '2'):
+ case FOURCC_XVID:
+ case FOURCC_xvid:
+ case FOURCC_FMP4:
+ case FOURCC_fmp4:
+ case GST_MAKE_FOURCC ('U', 'M', 'P', '4'):
+ caps = gst_caps_new_simple ("video/mpeg", "mpegversion", G_TYPE_INT, 4,
+ "systemstream", G_TYPE_BOOLEAN, FALSE, NULL);
+ _codec ("MPEG-4");
+ break;
+
+ case GST_MAKE_FOURCC ('c', 'v', 'i', 'd'):
+ _codec ("Cinepak");
+ caps = gst_caps_new_empty_simple ("video/x-cinepak");
+ break;
+ case GST_MAKE_FOURCC ('q', 'd', 'r', 'w'):
+ _codec ("Apple QuickDraw");
+ caps = gst_caps_new_empty_simple ("video/x-qdrw");
+ break;
+ case GST_MAKE_FOURCC ('r', 'p', 'z', 'a'):
+ _codec ("Apple video");
+ caps = gst_caps_new_empty_simple ("video/x-apple-video");
+ break;
+ case FOURCC_H264:
+ case FOURCC_avc1:
+ case FOURCC_dva1:
+ _codec ("H.264 / AVC");
+ caps = gst_caps_new_simple ("video/x-h264",
+ "stream-format", G_TYPE_STRING, "avc",
+ "alignment", G_TYPE_STRING, "au", NULL);
+ break;
+ case FOURCC_avc3:
+ case FOURCC_dvav:
+ _codec ("H.264 / AVC");
+ caps = gst_caps_new_simple ("video/x-h264",
+ "stream-format", G_TYPE_STRING, "avc3",
+ "alignment", G_TYPE_STRING, "au", NULL);
+ break;
+ case FOURCC_H265:
+ case FOURCC_hvc1:
+ case FOURCC_dvh1:
+ _codec ("H.265 / HEVC");
+ caps = gst_caps_new_simple ("video/x-h265",
+ "stream-format", G_TYPE_STRING, "hvc1",
+ "alignment", G_TYPE_STRING, "au", NULL);
+ break;
+ case FOURCC_hev1:
+ case FOURCC_dvhe:
+ _codec ("H.265 / HEVC");
+ caps = gst_caps_new_simple ("video/x-h265",
+ "stream-format", G_TYPE_STRING, "hev1",
+ "alignment", G_TYPE_STRING, "au", NULL);
+ break;
+ case FOURCC_rle_:
+ _codec ("Run-length encoding");
+ caps = gst_caps_new_simple ("video/x-rle",
+ "layout", G_TYPE_STRING, "quicktime", NULL);
+ break;
+ case FOURCC_WRLE:
+ _codec ("Run-length encoding");
+ caps = gst_caps_new_simple ("video/x-rle",
+ "layout", G_TYPE_STRING, "microsoft", NULL);
+ break;
+ case GST_MAKE_FOURCC ('I', 'V', '3', '2'):
+ case GST_MAKE_FOURCC ('i', 'v', '3', '2'):
+ _codec ("Indeo Video 3");
+ caps = gst_caps_new_simple ("video/x-indeo",
+ "indeoversion", G_TYPE_INT, 3, NULL);
+ break;
+ case GST_MAKE_FOURCC ('I', 'V', '4', '1'):
+ case GST_MAKE_FOURCC ('i', 'v', '4', '1'):
+ _codec ("Intel Video 4");
+ caps = gst_caps_new_simple ("video/x-indeo",
+ "indeoversion", G_TYPE_INT, 4, NULL);
+ break;
+ case FOURCC_dvcp:
+ case FOURCC_dvc_:
+ case GST_MAKE_FOURCC ('d', 'v', 's', 'd'):
+ case GST_MAKE_FOURCC ('D', 'V', 'S', 'D'):
+ case GST_MAKE_FOURCC ('d', 'v', 'c', 's'):
+ case GST_MAKE_FOURCC ('D', 'V', 'C', 'S'):
+ case GST_MAKE_FOURCC ('d', 'v', '2', '5'):
+ case GST_MAKE_FOURCC ('d', 'v', 'p', 'p'):
+ _codec ("DV Video");
+ caps = gst_caps_new_simple ("video/x-dv", "dvversion", G_TYPE_INT, 25,
+ "systemstream", G_TYPE_BOOLEAN, FALSE, NULL);
+ break;
+ case FOURCC_dv5n: /* DVCPRO50 NTSC */
+ case FOURCC_dv5p: /* DVCPRO50 PAL */
+ _codec ("DVCPro50 Video");
+ caps = gst_caps_new_simple ("video/x-dv", "dvversion", G_TYPE_INT, 50,
+ "systemstream", G_TYPE_BOOLEAN, FALSE, NULL);
+ break;
+ case GST_MAKE_FOURCC ('d', 'v', 'h', '5'): /* DVCPRO HD 50i produced by FCP */
+ case GST_MAKE_FOURCC ('d', 'v', 'h', '6'): /* DVCPRO HD 60i produced by FCP */
+ _codec ("DVCProHD Video");
+ caps = gst_caps_new_simple ("video/x-dv", "dvversion", G_TYPE_INT, 100,
+ "systemstream", G_TYPE_BOOLEAN, FALSE, NULL);
+ break;
+ case GST_MAKE_FOURCC ('s', 'm', 'c', ' '):
+ _codec ("Apple Graphics (SMC)");
+ caps = gst_caps_new_empty_simple ("video/x-smc");
+ break;
+ case GST_MAKE_FOURCC ('V', 'P', '3', '1'):
+ _codec ("VP3");
+ caps = gst_caps_new_empty_simple ("video/x-vp3");
+ break;
+ case GST_MAKE_FOURCC ('V', 'P', '6', 'F'):
+ _codec ("VP6 Flash");
+ caps = gst_caps_new_empty_simple ("video/x-vp6-flash");
+ break;
+ case FOURCC_XiTh:
+ _codec ("Theora");
+ caps = gst_caps_new_empty_simple ("video/x-theora");
+ /* theora uses one byte of padding in the data stream because it does not
+ * allow 0 sized packets while theora does */
+ entry->padding = 1;
+ break;
+ case FOURCC_drac:
+ _codec ("Dirac");
+ caps = gst_caps_new_empty_simple ("video/x-dirac");
+ break;
+ case GST_MAKE_FOURCC ('t', 'i', 'f', 'f'):
+ _codec ("TIFF still images");
+ caps = gst_caps_new_empty_simple ("image/tiff");
+ break;
+ case GST_MAKE_FOURCC ('i', 'c', 'o', 'd'):
+ _codec ("Apple Intermediate Codec");
+ caps = gst_caps_from_string ("video/x-apple-intermediate-codec");
+ break;
+ case GST_MAKE_FOURCC ('A', 'V', 'd', 'n'):
+ _codec ("AVID DNxHD");
+ caps = gst_caps_from_string ("video/x-dnxhd");
+ break;
+ case FOURCC_VP80:
+ case FOURCC_vp08:
+ _codec ("On2 VP8");
+ caps = gst_caps_from_string ("video/x-vp8");
+ break;
+ case FOURCC_vp09:
+ _codec ("Google VP9");
+ caps = gst_caps_from_string ("video/x-vp9");
+ break;
+ case FOURCC_apcs:
+ _codec ("Apple ProRes LT");
+ caps =
+ gst_caps_new_simple ("video/x-prores", "variant", G_TYPE_STRING, "lt",
+ NULL);
+ break;
+ case FOURCC_apch:
+ _codec ("Apple ProRes HQ");
+ caps =
+ gst_caps_new_simple ("video/x-prores", "variant", G_TYPE_STRING, "hq",
+ NULL);
+ break;
+ case FOURCC_apcn:
+ _codec ("Apple ProRes");
+ caps =
+ gst_caps_new_simple ("video/x-prores", "variant", G_TYPE_STRING,
+ "standard", NULL);
+ break;
+ case FOURCC_apco:
+ _codec ("Apple ProRes Proxy");
+ caps =
+ gst_caps_new_simple ("video/x-prores", "variant", G_TYPE_STRING,
+ "proxy", NULL);
+ break;
+ case FOURCC_ap4h:
+ _codec ("Apple ProRes 4444");
+ caps =
+ gst_caps_new_simple ("video/x-prores", "variant", G_TYPE_STRING,
+ "4444", NULL);
+
+ /* 24 bits per sample = an alpha channel is coded but image is always opaque */
+ if (entry->bits_per_sample > 0) {
+ gst_caps_set_simple (caps, "depth", G_TYPE_INT, entry->bits_per_sample,
+ NULL);
+ }
+ break;
+ case FOURCC_ap4x:
+ _codec ("Apple ProRes 4444 XQ");
+ caps =
+ gst_caps_new_simple ("video/x-prores", "variant", G_TYPE_STRING,
+ "4444xq", NULL);
+
+ /* 24 bits per sample = an alpha channel is coded but image is always opaque */
+ if (entry->bits_per_sample > 0) {
+ gst_caps_set_simple (caps, "depth", G_TYPE_INT, entry->bits_per_sample,
+ NULL);
+ }
+ break;
+ case FOURCC_cfhd:
+ _codec ("GoPro CineForm");
+ caps = gst_caps_from_string ("video/x-cineform");
+ break;
+ case FOURCC_vc_1:
+ case FOURCC_ovc1:
+ _codec ("VC-1");
+ caps = gst_caps_new_simple ("video/x-wmv",
+ "wmvversion", G_TYPE_INT, 3, "format", G_TYPE_STRING, "WVC1", NULL);
+ break;
+ case FOURCC_av01:
+ _codec ("AV1");
+ caps = gst_caps_new_empty_simple ("video/x-av1");
+ break;
+ case GST_MAKE_FOURCC ('k', 'p', 'c', 'd'):
+ default:
+ {
+ caps = _get_unknown_codec_name ("video", fourcc);
+ break;
+ }
+ }
+
+ if (format != GST_VIDEO_FORMAT_UNKNOWN) {
+ GstVideoInfo info;
+
+ gst_video_info_init (&info);
+ gst_video_info_set_format (&info, format, entry->width, entry->height);
+
+ caps = gst_video_info_to_caps (&info);
+ *codec_name = gst_pb_utils_get_codec_description (caps);
+
+ /* enable clipping for raw video streams */
+ stream->need_clip = TRUE;
+ stream->alignment = 32;
+ }
+
+ return caps;
+ }
+
+ static guint
+ round_up_pow2 (guint n)
+ {
+ n = n - 1;
+ n = n | (n >> 1);
+ n = n | (n >> 2);
+ n = n | (n >> 4);
+ n = n | (n >> 8);
+ n = n | (n >> 16);
+ return n + 1;
+ }
+
+ static GstCaps *
+ qtdemux_audio_caps (GstQTDemux * qtdemux, QtDemuxStream * stream,
+ QtDemuxStreamStsdEntry * entry, guint32 fourcc, const guint8 * data,
+ int len, gchar ** codec_name)
+ {
+ GstCaps *caps;
+ const GstStructure *s;
+ const gchar *name;
+ gint endian = 0;
+ GstAudioFormat format = 0;
+ gint depth;
+
+ GST_DEBUG_OBJECT (qtdemux, "resolve fourcc 0x%08x", GUINT32_TO_BE (fourcc));
+
+ depth = entry->bytes_per_packet * 8;
+
+ switch (fourcc) {
+ case GST_MAKE_FOURCC ('N', 'O', 'N', 'E'):
+ case FOURCC_raw_:
+ /* 8-bit audio is unsigned */
+ if (depth == 8)
+ format = GST_AUDIO_FORMAT_U8;
+ /* otherwise it's signed and big-endian just like 'twos' */
+ case FOURCC_twos:
+ endian = G_BIG_ENDIAN;
+ /* fall-through */
+ case FOURCC_sowt:
+ {
+ gchar *str;
+
+ if (!endian)
+ endian = G_LITTLE_ENDIAN;
+
+ if (!format)
+ format = gst_audio_format_build_integer (TRUE, endian, depth, depth);
+
+ str = g_strdup_printf ("Raw %d-bit PCM audio", depth);
+ _codec (str);
+ g_free (str);
+
+ caps = gst_caps_new_simple ("audio/x-raw",
+ "format", G_TYPE_STRING, gst_audio_format_to_string (format),
+ "layout", G_TYPE_STRING, "interleaved", NULL);
+ stream->alignment = GST_ROUND_UP_8 (depth);
+ stream->alignment = round_up_pow2 (stream->alignment);
+ break;
+ }
+ case FOURCC_fl64:
+ _codec ("Raw 64-bit floating-point audio");
+ /* we assume BIG ENDIAN, an enda box will tell us to change this to little
+ * endian later */
+ caps = gst_caps_new_simple ("audio/x-raw",
+ "format", G_TYPE_STRING, "F64BE",
+ "layout", G_TYPE_STRING, "interleaved", NULL);
+ stream->alignment = 8;
+ break;
+ case FOURCC_fl32:
+ _codec ("Raw 32-bit floating-point audio");
+ /* we assume BIG ENDIAN, an enda box will tell us to change this to little
+ * endian later */
+ caps = gst_caps_new_simple ("audio/x-raw",
+ "format", G_TYPE_STRING, "F32BE",
+ "layout", G_TYPE_STRING, "interleaved", NULL);
+ stream->alignment = 4;
+ break;
+ case FOURCC_in24:
+ _codec ("Raw 24-bit PCM audio");
+ /* we assume BIG ENDIAN, an enda box will tell us to change this to little
+ * endian later */
+ caps = gst_caps_new_simple ("audio/x-raw",
+ "format", G_TYPE_STRING, "S24BE",
+ "layout", G_TYPE_STRING, "interleaved", NULL);
+ stream->alignment = 4;
+ break;
+ case FOURCC_in32:
+ _codec ("Raw 32-bit PCM audio");
+ /* we assume BIG ENDIAN, an enda box will tell us to change this to little
+ * endian later */
+ caps = gst_caps_new_simple ("audio/x-raw",
+ "format", G_TYPE_STRING, "S32BE",
+ "layout", G_TYPE_STRING, "interleaved", NULL);
+ stream->alignment = 4;
+ break;
+ case FOURCC_s16l:
+ _codec ("Raw 16-bit PCM audio");
+ caps = gst_caps_new_simple ("audio/x-raw",
+ "format", G_TYPE_STRING, "S16LE",
+ "layout", G_TYPE_STRING, "interleaved", NULL);
+ stream->alignment = 2;
+ break;
+ case FOURCC_ulaw:
+ _codec ("Mu-law audio");
+ caps = gst_caps_new_empty_simple ("audio/x-mulaw");
+ break;
+ case FOURCC_alaw:
+ _codec ("A-law audio");
+ caps = gst_caps_new_empty_simple ("audio/x-alaw");
+ break;
+ case 0x0200736d:
+ case 0x6d730002:
+ _codec ("Microsoft ADPCM");
+ /* Microsoft ADPCM-ACM code 2 */
+ caps = gst_caps_new_simple ("audio/x-adpcm",
+ "layout", G_TYPE_STRING, "microsoft", NULL);
+ break;
+ case 0x1100736d:
+ case 0x6d730011:
+ _codec ("DVI/IMA ADPCM");
+ caps = gst_caps_new_simple ("audio/x-adpcm",
+ "layout", G_TYPE_STRING, "dvi", NULL);
+ break;
+ case 0x1700736d:
+ case 0x6d730017:
+ _codec ("DVI/Intel IMA ADPCM");
+ /* FIXME DVI/Intel IMA ADPCM/ACM code 17 */
+ caps = gst_caps_new_simple ("audio/x-adpcm",
+ "layout", G_TYPE_STRING, "quicktime", NULL);
+ break;
+ case 0x5500736d:
+ case 0x6d730055:
+ /* MPEG layer 3, CBR only (pre QT4.1) */
+ case FOURCC__mp3:
+ case FOURCC_mp3_:
+ _codec ("MPEG-1 layer 3");
+ /* MPEG layer 3, CBR & VBR (QT4.1 and later) */
+ caps = gst_caps_new_simple ("audio/mpeg", "layer", G_TYPE_INT, 3,
+ "mpegversion", G_TYPE_INT, 1, NULL);
+ break;
+ case GST_MAKE_FOURCC ('.', 'm', 'p', '2'):
+ _codec ("MPEG-1 layer 2");
+ /* MPEG layer 2 */
+ caps = gst_caps_new_simple ("audio/mpeg", "layer", G_TYPE_INT, 2,
+ "mpegversion", G_TYPE_INT, 1, NULL);
+ break;
+ case 0x20736d:
+ case GST_MAKE_FOURCC ('e', 'c', '-', '3'):
+ _codec ("EAC-3 audio");
+ caps = gst_caps_new_simple ("audio/x-eac3",
+ "framed", G_TYPE_BOOLEAN, TRUE, NULL);
+ entry->sampled = TRUE;
+ break;
+ case GST_MAKE_FOURCC ('s', 'a', 'c', '3'): // Nero Recode
+ case FOURCC_ac_3:
+ _codec ("AC-3 audio");
+ caps = gst_caps_new_simple ("audio/x-ac3",
+ "framed", G_TYPE_BOOLEAN, TRUE, NULL);
+ entry->sampled = TRUE;
+ break;
+ case GST_MAKE_FOURCC ('d', 't', 's', 'c'):
+ case GST_MAKE_FOURCC ('D', 'T', 'S', ' '):
+ _codec ("DTS audio");
+ caps = gst_caps_new_simple ("audio/x-dts",
+ "framed", G_TYPE_BOOLEAN, TRUE, NULL);
+ entry->sampled = TRUE;
+ break;
+ case GST_MAKE_FOURCC ('d', 't', 's', 'h'): // DTS-HD
+ case GST_MAKE_FOURCC ('d', 't', 's', 'l'): // DTS-HD Lossless
+ _codec ("DTS-HD audio");
+ caps = gst_caps_new_simple ("audio/x-dts",
+ "framed", G_TYPE_BOOLEAN, TRUE, NULL);
+ entry->sampled = TRUE;
+ break;
+ case FOURCC_MAC3:
+ _codec ("MACE-3");
+ caps = gst_caps_new_simple ("audio/x-mace",
+ "maceversion", G_TYPE_INT, 3, NULL);
+ break;
+ case FOURCC_MAC6:
+ _codec ("MACE-6");
+ caps = gst_caps_new_simple ("audio/x-mace",
+ "maceversion", G_TYPE_INT, 6, NULL);
+ break;
+ case GST_MAKE_FOURCC ('O', 'g', 'g', 'V'):
+ /* ogg/vorbis */
+ caps = gst_caps_new_empty_simple ("application/ogg");
+ break;
+ case GST_MAKE_FOURCC ('d', 'v', 'c', 'a'):
+ _codec ("DV audio");
+ caps = gst_caps_new_empty_simple ("audio/x-dv");
+ break;
+ case FOURCC_mp4a:
+ _codec ("MPEG-4 AAC audio");
+ caps = gst_caps_new_simple ("audio/mpeg",
+ "mpegversion", G_TYPE_INT, 4, "framed", G_TYPE_BOOLEAN, TRUE,
+ "stream-format", G_TYPE_STRING, "raw", NULL);
+ break;
+ case GST_MAKE_FOURCC ('Q', 'D', 'M', 'C'):
+ _codec ("QDesign Music");
+ caps = gst_caps_new_empty_simple ("audio/x-qdm");
+ break;
+ case FOURCC_QDM2:
+ _codec ("QDesign Music v.2");
+ /* FIXME: QDesign music version 2 (no constant) */
+ if (FALSE && data) {
+ caps = gst_caps_new_simple ("audio/x-qdm2",
+ "framesize", G_TYPE_INT, QT_UINT32 (data + 52),
+ "bitrate", G_TYPE_INT, QT_UINT32 (data + 40),
+ "blocksize", G_TYPE_INT, QT_UINT32 (data + 44), NULL);
+ } else {
+ caps = gst_caps_new_empty_simple ("audio/x-qdm2");
+ }
+ break;
+ case FOURCC_agsm:
+ _codec ("GSM audio");
+ caps = gst_caps_new_empty_simple ("audio/x-gsm");
+ break;
+ case FOURCC_samr:
+ _codec ("AMR audio");
+ caps = gst_caps_new_empty_simple ("audio/AMR");
+ break;
+ case FOURCC_sawb:
+ _codec ("AMR-WB audio");
+ caps = gst_caps_new_empty_simple ("audio/AMR-WB");
+ break;
+ case FOURCC_ima4:
+ _codec ("Quicktime IMA ADPCM");
+ caps = gst_caps_new_simple ("audio/x-adpcm",
+ "layout", G_TYPE_STRING, "quicktime", NULL);
+ break;
+ case FOURCC_alac:
+ _codec ("Apple lossless audio");
+ caps = gst_caps_new_empty_simple ("audio/x-alac");
+ break;
+ case FOURCC_fLaC:
+ _codec ("Free Lossless Audio Codec");
+ caps = gst_caps_new_simple ("audio/x-flac",
+ "framed", G_TYPE_BOOLEAN, TRUE, NULL);
+ break;
+ case GST_MAKE_FOURCC ('Q', 'c', 'l', 'p'):
+ _codec ("QualComm PureVoice");
+ caps = gst_caps_from_string ("audio/qcelp");
+ break;
+ case FOURCC_wma_:
+ case FOURCC_owma:
+ _codec ("WMA");
+ caps = gst_caps_new_empty_simple ("audio/x-wma");
+ break;
+ case FOURCC_opus:
+ _codec ("Opus");
+ caps = gst_caps_new_empty_simple ("audio/x-opus");
+ break;
+ case FOURCC_lpcm:
+ {
+ guint32 flags = 0;
+ guint32 depth = 0;
+ guint32 width = 0;
+ GstAudioFormat format;
+ enum
+ {
+ FLAG_IS_FLOAT = 0x1,
+ FLAG_IS_BIG_ENDIAN = 0x2,
+ FLAG_IS_SIGNED = 0x4,
+ FLAG_IS_PACKED = 0x8,
+ FLAG_IS_ALIGNED_HIGH = 0x10,
+ FLAG_IS_NON_INTERLEAVED = 0x20
+ };
+ _codec ("Raw LPCM audio");
+
+ if (data && len >= 36) {
+ depth = QT_UINT32 (data + 24);
+ flags = QT_UINT32 (data + 28);
+ width = QT_UINT32 (data + 32) * 8 / entry->n_channels;
+ }
+ if ((flags & FLAG_IS_FLOAT) == 0) {
+ if (depth == 0)
+ depth = 16;
+ if (width == 0)
+ width = 16;
+ if ((flags & FLAG_IS_ALIGNED_HIGH))
+ depth = width;
+
+ format = gst_audio_format_build_integer ((flags & FLAG_IS_SIGNED) ?
+ TRUE : FALSE, (flags & FLAG_IS_BIG_ENDIAN) ?
+ G_BIG_ENDIAN : G_LITTLE_ENDIAN, width, depth);
+ caps = gst_caps_new_simple ("audio/x-raw",
+ "format", G_TYPE_STRING,
+ format !=
+ GST_AUDIO_FORMAT_UNKNOWN ? gst_audio_format_to_string (format) :
+ "UNKNOWN", "layout", G_TYPE_STRING,
+ (flags & FLAG_IS_NON_INTERLEAVED) ? "non-interleaved" :
+ "interleaved", NULL);
+ stream->alignment = GST_ROUND_UP_8 (depth);
+ stream->alignment = round_up_pow2 (stream->alignment);
+ } else {
+ if (width == 0)
+ width = 32;
+ if (width == 64) {
+ if (flags & FLAG_IS_BIG_ENDIAN)
+ format = GST_AUDIO_FORMAT_F64BE;
+ else
+ format = GST_AUDIO_FORMAT_F64LE;
+ } else {
+ if (flags & FLAG_IS_BIG_ENDIAN)
+ format = GST_AUDIO_FORMAT_F32BE;
+ else
+ format = GST_AUDIO_FORMAT_F32LE;
+ }
+ caps = gst_caps_new_simple ("audio/x-raw",
+ "format", G_TYPE_STRING, gst_audio_format_to_string (format),
+ "layout", G_TYPE_STRING, (flags & FLAG_IS_NON_INTERLEAVED) ?
+ "non-interleaved" : "interleaved", NULL);
+ stream->alignment = width / 8;
+ }
+ break;
+ }
+ case GST_MAKE_FOURCC ('a', 'c', '-', '4'):
+ {
+ _codec ("AC4");
+ caps = gst_caps_new_empty_simple ("audio/x-ac4");
+ break;
+ }
+ case GST_MAKE_FOURCC ('q', 't', 'v', 'r'):
+ /* ? */
+ default:
+ {
+ caps = _get_unknown_codec_name ("audio", fourcc);
+ break;
+ }
+ }
+
+ if (caps) {
+ GstCaps *templ_caps =
+ gst_static_pad_template_get_caps (&gst_qtdemux_audiosrc_template);
+ GstCaps *intersection = gst_caps_intersect (caps, templ_caps);
+ gst_caps_unref (caps);
+ gst_caps_unref (templ_caps);
+ caps = intersection;
+ }
+
+ /* enable clipping for raw audio streams */
+ s = gst_caps_get_structure (caps, 0);
+ name = gst_structure_get_name (s);
+ if (g_str_has_prefix (name, "audio/x-raw")) {
+ stream->need_clip = TRUE;
+ stream->min_buffer_size = 1024 * entry->bytes_per_frame;
+ stream->max_buffer_size = 4096 * entry->bytes_per_frame;
+ GST_DEBUG ("setting min/max buffer sizes to %d/%d", stream->min_buffer_size,
+ stream->max_buffer_size);
+ }
+ return caps;
+ }
+
+ static GstCaps *
+ qtdemux_sub_caps (GstQTDemux * qtdemux, QtDemuxStream * stream,
+ QtDemuxStreamStsdEntry * entry, guint32 fourcc,
+ const guint8 * stsd_entry_data, gchar ** codec_name)
+ {
+ GstCaps *caps;
+
+ GST_DEBUG_OBJECT (qtdemux, "resolve fourcc 0x%08x", GUINT32_TO_BE (fourcc));
+
+ switch (fourcc) {
+ case FOURCC_mp4s:
+ _codec ("DVD subtitle");
+ caps = gst_caps_new_empty_simple ("subpicture/x-dvd");
+ stream->need_process = TRUE;
+ break;
+ case FOURCC_text:
+ _codec ("Quicktime timed text");
+ goto text;
+ case FOURCC_tx3g:
+ _codec ("3GPP timed text");
+ text:
+ caps = gst_caps_new_simple ("text/x-raw", "format", G_TYPE_STRING,
+ "utf8", NULL);
+ /* actual text piece needs to be extracted */
+ stream->need_process = TRUE;
+ break;
+ case FOURCC_stpp:
+ _codec ("XML subtitles");
+ caps = gst_caps_new_empty_simple ("application/ttml+xml");
+ break;
+ case FOURCC_c608:
+ _codec ("CEA 608 Closed Caption");
+ caps =
+ gst_caps_new_simple ("closedcaption/x-cea-608", "format",
+ G_TYPE_STRING, "s334-1a", NULL);
+ stream->need_process = TRUE;
+ stream->need_split = TRUE;
+ break;
+ case FOURCC_c708:
+ _codec ("CEA 708 Closed Caption");
+ caps =
+ gst_caps_new_simple ("closedcaption/x-cea-708", "format",
+ G_TYPE_STRING, "cdp", NULL);
+ stream->need_process = TRUE;
+ break;
+
+ default:
+ {
+ caps = _get_unknown_codec_name ("text", fourcc);
+ break;
+ }
+ }
+ return caps;
+ }
+
+ static GstCaps *
+ qtdemux_generic_caps (GstQTDemux * qtdemux, QtDemuxStream * stream,
+ QtDemuxStreamStsdEntry * entry, guint32 fourcc,
+ const guint8 * stsd_entry_data, gchar ** codec_name)
+ {
+ GstCaps *caps;
+
+ switch (fourcc) {
+ case FOURCC_m1v:
+ _codec ("MPEG 1 video");
+ caps = gst_caps_new_simple ("video/mpeg", "mpegversion", G_TYPE_INT, 1,
+ "systemstream", G_TYPE_BOOLEAN, FALSE, NULL);
+ break;
+ default:
+ caps = NULL;
+ break;
+ }
+ return caps;
+ }
+
+ static void
+ gst_qtdemux_append_protection_system_id (GstQTDemux * qtdemux,
+ const gchar * system_id)
+ {
+ gint i;
+
+ if (!qtdemux->protection_system_ids)
+ qtdemux->protection_system_ids =
+ g_ptr_array_new_with_free_func ((GDestroyNotify) g_free);
+ /* Check whether we already have an entry for this system ID. */
+ for (i = 0; i < qtdemux->protection_system_ids->len; ++i) {
+ const gchar *id = g_ptr_array_index (qtdemux->protection_system_ids, i);
+ if (g_ascii_strcasecmp (system_id, id) == 0) {
+ return;
+ }
+ }
+ GST_DEBUG_OBJECT (qtdemux, "Adding cenc protection system ID %s", system_id);
+ g_ptr_array_add (qtdemux->protection_system_ids, g_ascii_strdown (system_id,
+ -1));
+ }
--- /dev/null
+ /* GStreamer
+ * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+
+ #ifndef __GST_QTDEMUX_H__
+ #define __GST_QTDEMUX_H__
+
+ #include <gst/gst.h>
+ #include <gst/base/gstadapter.h>
+ #include <gst/base/gstflowcombiner.h>
+ #include <gst/base/gstbytereader.h>
+ #include <gst/video/video.h>
+ #include "gstisoff.h"
+
+ G_BEGIN_DECLS
+
+ #define GST_TYPE_QTDEMUX \
+ (gst_qtdemux_get_type())
+ #define GST_QTDEMUX(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_QTDEMUX,GstQTDemux))
+ #define GST_QTDEMUX_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_QTDEMUX,GstQTDemuxClass))
+ #define GST_IS_QTDEMUX(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_QTDEMUX))
+ #define GST_IS_QTDEMUX_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_QTDEMUX))
+
+ #define GST_QTDEMUX_CAST(obj) ((GstQTDemux *)(obj))
+
+ /* qtdemux produces these for atoms it cannot parse */
+ #define GST_QT_DEMUX_PRIVATE_TAG "private-qt-tag"
+ #define GST_QT_DEMUX_CLASSIFICATION_TAG "classification"
+
+ typedef struct _GstQTDemux GstQTDemux;
+ typedef struct _GstQTDemuxClass GstQTDemuxClass;
+ typedef struct _QtDemuxStream QtDemuxStream;
+ typedef struct _QtDemuxSample QtDemuxSample;
+ typedef struct _QtDemuxSegment QtDemuxSegment;
+ typedef struct _QtDemuxRandomAccessEntry QtDemuxRandomAccessEntry;
+ typedef struct _QtDemuxStreamStsdEntry QtDemuxStreamStsdEntry;
+
+ enum QtDemuxState
+ {
+ QTDEMUX_STATE_INITIAL, /* Initial state (haven't got the header yet) */
+ QTDEMUX_STATE_HEADER, /* Parsing the header */
+ QTDEMUX_STATE_MOVIE, /* Parsing/Playing the media data */
+ QTDEMUX_STATE_BUFFER_MDAT /* Buffering the mdat atom */
+ };
+
++#ifdef TIZEN_FEATURE_QTDEMUX_MODIFICATION
++typedef struct _QtDemuxSphericalMetadata QtDemuxSphericalMetadata;
++#endif
++
+ struct _GstQTDemux {
+ GstElement element;
+
+ /* Global state */
+ enum QtDemuxState state;
+
+ /* static sink pad */
+ GstPad *sinkpad;
+
+ /* TRUE if pull-based */
+ gboolean pullbased;
+
+ gchar *redirect_location;
+
+ /* Protect pad exposing from flush event */
+ GMutex expose_lock;
+
+ /* list of QtDemuxStream */
+ GPtrArray *active_streams;
+ GPtrArray *old_streams;
+
+ gint n_video_streams;
+ gint n_audio_streams;
+ gint n_sub_streams;
+
+ GstFlowCombiner *flowcombiner;
+
+ /* Incoming stream group-id to set on downstream STREAM_START events.
+ * If upstream doesn't contain one, a global one will be generated */
+ gboolean have_group_id;
+ guint group_id;
+
+ guint major_brand;
+ GstBuffer *comp_brands;
+
+ /* [moov] header.
+ * FIXME : This is discarded just after it's created. Just move it
+ * to a temporary variable ? */
+ GNode *moov_node;
+
+ /* FIXME : This is never freed. It is only assigned once. memleak ? */
+ GNode *moov_node_compressed;
+
+ /* Set to TRUE when the [moov] header has been fully parsed */
+ gboolean got_moov;
+
+ /* Global timescale for the incoming stream. Use the QTTIME macros
+ * to convert values to/from GstClockTime */
+ guint32 timescale;
+
+ /* Global duration (in global timescale). Use QTTIME macros to get GstClockTime */
+ guint64 duration;
+
+ /* Total size of header atoms. Used to calculate fallback overall bitrate */
+ guint header_size;
+
+ GstTagList *tag_list;
+
+ /* configured playback region */
+ GstSegment segment;
+
+ /* State for key_units trickmode */
+ GstClockTime trickmode_interval;
+
+ /* PUSH-BASED only: If the initial segment event, or a segment consequence of
+ * a seek or incoming TIME segment from upstream needs to be pushed. This
+ * variable is used instead of pushing the event directly because at that
+ * point we may not have yet emitted the srcpads. */
+ gboolean need_segment;
+
+ guint32 segment_seqnum;
+
+ /* flag to indicate that we're working with a smoothstreaming fragment
+ * Mss doesn't have 'moov' or any information about the streams format,
+ * requiring qtdemux to expose and create the streams */
+ gboolean mss_mode;
+
+ /* Set to TRUE if the incoming stream is either a MSS stream or
+ * a Fragmented MP4 (containing the [mvex] atom in the header) */
+ gboolean fragmented;
+
+ /* PULL-BASED only : If TRUE there is a pending seek */
+ gboolean fragmented_seek_pending;
+
+ /* PULL-BASED : offset of first [moof] or of fragment to seek to
+ * PUSH-BASED : offset of latest [moof] */
+ guint64 moof_offset;
+
+ /* MSS streams have a single media that is unspecified at the atoms, so
+ * upstream provides it at the caps */
+ GstCaps *media_caps;
+
+ /* Set to TRUE when all streams have been exposed */
+ gboolean exposed;
+
+ gint64 chapters_track_id;
+
+ /* protection support */
+ GPtrArray *protection_system_ids; /* Holds identifiers of all content protection systems for all tracks */
+ GQueue protection_event_queue; /* holds copy of upstream protection events */
+ guint64 cenc_aux_info_offset;
+ guint8 *cenc_aux_info_sizes;
+ guint32 cenc_aux_sample_count;
+ gchar *preferred_protection_system_id;
+
+ /* Whether the parent bin is streams-aware, meaning we can
+ * add/remove streams at any point in time */
+ gboolean streams_aware;
+
+ /*
+ * ALL VARIABLES BELOW ARE ONLY USED IN PUSH-BASED MODE
+ */
+ GstAdapter *adapter;
+ guint neededbytes;
+ guint todrop;
+ /* Used to store data if [mdat] is before the headers */
+ GstBuffer *mdatbuffer;
+ /* Amount of bytes left to read in the current [mdat] */
+ guint64 mdatleft, mdatsize;
+
+ /* When restoring the mdat to the adapter, this buffer stores any
+ * trailing data that was after the last atom parsed as it has to be
+ * restored later along with the correct offset. Used in fragmented
+ * scenario where mdat/moof are one after the other in any order.
+ *
+ * Check https://bugzilla.gnome.org/show_bug.cgi?id=710623 */
+ GstBuffer *restoredata_buffer;
+ guint64 restoredata_offset;
+
+ /* The current offset in bytes from upstream.
+ * Note: While it makes complete sense when we are PULL-BASED (pulling
+ * in BYTES from upstream) and PUSH-BASED with a BYTE SEGMENT (receiving
+ * buffers with actual offsets), it is undefined in PUSH-BASED with a
+ * TIME SEGMENT */
+ guint64 offset;
+
+ /* offset of the mdat atom */
+ guint64 mdatoffset;
+ /* Offset of the first mdat */
+ guint64 first_mdat;
+ /* offset of last [moov] seen */
+ guint64 last_moov_offset;
+
+ /* If TRUE, qtdemux received upstream newsegment in TIME format
+ * which likely means that upstream is driving the pipeline (such as
+ * adaptive demuxers or dlna sources) */
+ gboolean upstream_format_is_time;
+
+ /* Seqnum of the seek event sent upstream. Will be used to
+ * detect incoming FLUSH events corresponding to that */
+ guint32 offset_seek_seqnum;
+
+ /* UPSTREAM BYTE: Requested upstream byte seek offset.
+ * Currently it is only used to check if an incoming BYTE SEGMENT
+ * corresponds to a seek event that was sent upstream */
+ gint64 seek_offset;
+
+ /* UPSTREAM BYTE: Requested start/stop TIME values from
+ * downstream.
+ * Used to set on the downstream segment once the corresponding upstream
+ * BYTE SEEK has succeeded */
+ gint64 push_seek_start;
+ gint64 push_seek_stop;
+
+ #if 0
+ /* gst index support */
+ GstIndex *element_index;
+ gint index_id;
+ #endif
+
+ /* Whether upstream is seekable in BYTES */
+ gboolean upstream_seekable;
+ /* UPSTREAM BYTE: Size of upstream content.
+ * Note : This is only computed once ! If upstream grows in the meantime
+ * it will not be updated */
+ gint64 upstream_size;
+
+ /* UPSTREAM TIME : Contains the PTS (if any) of the
+ * buffer that contains a [moof] header. Will be used to establish
+ * the actual PTS of the samples contained within that fragment. */
+ guint64 fragment_start;
+ /* UPSTREAM TIME : The offset in bytes of the [moof]
+ * header start.
+ * Note : This is not computed from the GST_BUFFER_OFFSET field */
+ guint64 fragment_start_offset;
+
+ /* These two fields are used to perform an implicit seek when a fragmented
+ * file whose first tfdt is not zero. This way if the first fragment starts
+ * at 1 hour, the user does not have to wait 1 hour or perform a manual seek
+ * for the image to move and the sound to play.
+ *
+ * This implicit seek is only done if the first parsed fragment has a non-zero
+ * decode base time and a seek has not been received previously, hence these
+ * fields. */
+ gboolean received_seek;
+ gboolean first_moof_already_parsed;
++#ifdef TIZEN_FEATURE_QTDEMUX_MODIFICATION
++ QtDemuxSphericalMetadata *spherical_metadata;
++#endif
+ };
+
+ struct _GstQTDemuxClass {
+ GstElementClass parent_class;
+ };
+
+ GType gst_qtdemux_get_type (void);
+
+ struct _QtDemuxStreamStsdEntry
+ {
+ GstCaps *caps;
+ guint32 fourcc;
+ gboolean sparse;
+
+ /* video info */
+ gint width;
+ gint height;
+ gint par_w;
+ gint par_h;
+ /* Numerator/denominator framerate */
+ gint fps_n;
+ gint fps_d;
+ GstVideoColorimetry colorimetry;
+ guint16 bits_per_sample;
+ guint16 color_table_id;
+ GstMemory *rgb8_palette;
+ guint interlace_mode;
+ guint field_order;
+
+ /* audio info */
+ gdouble rate;
+ gint n_channels;
+ guint samples_per_packet;
+ guint samples_per_frame;
+ guint bytes_per_packet;
+ guint bytes_per_sample;
+ guint bytes_per_frame;
+ guint compression;
+
+ /* if we use chunks or samples */
+ gboolean sampled;
+ guint padding;
+
+ };
+
+ struct _QtDemuxSample
+ {
+ guint32 size;
+ gint32 pts_offset; /* Add this value to timestamp to get the pts */
+ guint64 offset;
+ guint64 timestamp; /* DTS In mov time */
+ guint32 duration; /* In mov time */
+ gboolean keyframe; /* TRUE when this packet is a keyframe */
+ };
+
+ struct _QtDemuxStream
+ {
+ GstPad *pad;
+
+ GstQTDemux *demux;
+ gchar *stream_id;
+
+ QtDemuxStreamStsdEntry *stsd_entries;
+ guint stsd_entries_length;
+ guint cur_stsd_entry_index;
+
+ /* stream type */
+ guint32 subtype;
+
+ gboolean new_caps; /* If TRUE, caps need to be generated (by
+ * calling _configure_stream()) This happens
+ * for MSS and fragmented streams */
+
+ gboolean new_stream; /* signals that a stream_start is required */
+ gboolean on_keyframe; /* if this stream last pushed buffer was a
+ * keyframe. This is important to identify
+ * where to stop pushing buffers after a
+ * segment stop time */
+
+ /* if the stream has a redirect URI in its headers, we store it here */
+ gchar *redirect_uri;
+
+ /* track id */
+ guint track_id;
+
++#ifdef TIZEN_FEATURE_QTDEMUX_DURATION
++ guint64 tkhd_duration;
++#endif
++
+ /* duration/scale */
+ guint64 duration; /* in timescale units */
+ guint32 timescale;
+
+ /* language */
+ gchar lang_id[4]; /* ISO 639-2T language code */
+
+ /* our samples */
+ guint32 n_samples;
+ QtDemuxSample *samples;
+ gboolean all_keyframe; /* TRUE when all samples are keyframes (no stss) */
+ guint32 n_samples_moof; /* sample count in a moof */
+ guint64 duration_moof; /* duration in timescale of a moof, used for figure out
+ * the framerate of fragmented format stream */
+ guint64 duration_last_moof;
+
+ guint32 offset_in_sample; /* Offset in the current sample, used for
+ * streams which have got exceedingly big
+ * sample size (such as 24s of raw audio).
+ * Only used when max_buffer_size is non-NULL */
+ guint32 min_buffer_size; /* Minimum allowed size for output buffers.
+ * Currently only set for raw audio streams*/
+ guint32 max_buffer_size; /* Maximum allowed size for output buffers.
+ * Currently only set for raw audio streams*/
+
+ /* video info */
+ /* aspect ratio */
+ gint display_width;
+ gint display_height;
+
+ /* allocation */
+ gboolean use_allocator;
+ GstAllocator *allocator;
+ GstAllocationParams params;
+
+ gsize alignment;
+
+ /* when a discontinuity is pending */
+ gboolean discont;
+
+ /* list of buffers to push first */
+ GSList *buffers;
+
+ /* if we need to clip this buffer. This is only needed for uncompressed
+ * data */
+ gboolean need_clip;
+
+ /* buffer needs some custom processing, e.g. subtitles */
+ gboolean need_process;
+ /* buffer needs potentially be split, e.g. CEA608 subtitles */
+ gboolean need_split;
+
+ /* current position */
+ guint32 segment_index;
+ guint32 sample_index;
+ GstClockTime time_position; /* in gst time */
+ guint64 accumulated_base;
+
+ /* the Gst segment we are processing out, used for clipping */
+ GstSegment segment;
+
+ /* quicktime segments */
+ guint32 n_segments;
+ QtDemuxSegment *segments;
+ gboolean dummy_segment;
+ guint32 from_sample;
+ guint32 to_sample;
+
+ gboolean sent_eos;
+ GstTagList *stream_tags;
+ gboolean send_global_tags;
+
+ GstEvent *pending_event;
+
+ GstByteReader stco;
+ GstByteReader stsz;
+ GstByteReader stsc;
+ GstByteReader stts;
+ GstByteReader stss;
+ GstByteReader stps;
+ GstByteReader ctts;
+
+ gboolean chunks_are_samples; /* TRUE means treat chunks as samples */
+ gint64 stbl_index;
+ /* stco */
+ guint co_size;
+ GstByteReader co_chunk;
+ guint32 first_chunk;
+ guint32 current_chunk;
+ guint32 last_chunk;
+ guint32 samples_per_chunk;
+ guint32 stsd_sample_description_id;
+ guint32 stco_sample_index;
+ /* stsz */
+ guint32 sample_size; /* 0 means variable sizes are stored in stsz */
+ /* stsc */
+ guint32 stsc_index;
+ guint32 n_samples_per_chunk;
+ guint32 stsc_chunk_index;
+ guint32 stsc_sample_index;
+ guint64 chunk_offset;
+ /* stts */
+ guint32 stts_index;
+ guint32 stts_samples;
+ guint32 n_sample_times;
+ guint32 stts_sample_index;
+ guint64 stts_time;
+ guint32 stts_duration;
+ /* stss */
+ gboolean stss_present;
+ guint32 n_sample_syncs;
+ guint32 stss_index;
+ /* stps */
+ gboolean stps_present;
+ guint32 n_sample_partial_syncs;
+ guint32 stps_index;
+ QtDemuxRandomAccessEntry *ra_entries;
+ guint n_ra_entries;
+
+ const QtDemuxRandomAccessEntry *pending_seek;
+
+ /* ctts */
+ gboolean ctts_present;
+ guint32 n_composition_times;
+ guint32 ctts_index;
+ guint32 ctts_sample_index;
+ guint32 ctts_count;
+ gint32 ctts_soffset;
+
+ /* cslg */
+ guint32 cslg_shift;
+
+ /* fragmented */
+ gboolean parsed_trex;
+ guint32 def_sample_description_index; /* index is 1-based */
+ guint32 def_sample_duration;
+ guint32 def_sample_size;
+ guint32 def_sample_flags;
+
+ gboolean disabled;
+
+ /* stereoscopic video streams */
+ GstVideoMultiviewMode multiview_mode;
+ GstVideoMultiviewFlags multiview_flags;
+
+ /* protected streams */
+ gboolean protected;
+ guint32 protection_scheme_type;
+ guint32 protection_scheme_version;
+ gpointer protection_scheme_info; /* specific to the protection scheme */
+ GQueue protection_scheme_event_queue;
+
+ /* KEY_UNITS trickmode with an interval */
+ GstClockTime last_keyframe_dts;
+
+ gint ref_count; /* atomic */
+ };
+
+ G_END_DECLS
+
+ #endif /* __GST_QTDEMUX_H__ */
--- /dev/null
+ /* GStreamer
+ * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
+ * Copyright (C) <2009> STEricsson <benjamin.gaignard@stericsson.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+ #ifndef __GST_QTDEMUX_TYPES_H__
+ #define __GST_QTDEMUX_TYPES_H__
+
+ #include <gst/gst.h>
+ #include <gst/base/gstbytereader.h>
+
+ #include "qtdemux.h"
+
+ G_BEGIN_DECLS
+
+ typedef gboolean (*QtDumpFunc) (GstQTDemux * qtdemux, GstByteReader * data,
+ int depth);
+
+ typedef struct _QtNodeType QtNodeType;
+
+ #define QT_UINT32(a) (GST_READ_UINT32_BE(a))
+ #define QT_UINT24(a) (GST_READ_UINT32_BE(a) >> 8)
+ #define QT_UINT16(a) (GST_READ_UINT16_BE(a))
+ #define QT_UINT8(a) (GST_READ_UINT8(a))
+ #define QT_FP32(a) ((GST_READ_UINT32_BE(a))/65536.0)
+ #define QT_SFP32(a) (((gint)(GST_READ_UINT32_BE(a)))/65536.0)
+ #define QT_FP16(a) ((GST_READ_UINT16_BE(a))/256.0)
+ #define QT_FOURCC(a) (GST_READ_UINT32_LE(a))
+ #define QT_UINT64(a) ((((guint64)QT_UINT32(a))<<32)|QT_UINT32(((guint8 *)a)+4))
+
++#ifdef TIZEN_FEATURE_QTDEMUX_MODIFICATION
++#define RFC_AMBISONIC_SA3DBOX_VERSION_SUPPORTED 0
++#define RFC_AMBISONIC_TYPE_PERIPHONIC 0
++#define RFC_AMBISONIC_ORDER_FOA 1
++#define RFC_AMBISONIC_CHANNEL_ORDERING_ACN 0
++#define RFC_AMBISONIC_CHANNEL_ORDERING_FUMA 1 /* FIXME: Currently value is not defined in Spatial Audio RFC */
++#define RFC_AMBISONIC_NORMALIZATION_SN3D 0
++#define RFC_AMBISONIC_NORMALIZATION_FUMA 1 /* FIXME: Currently value is not defined in Spatial Audio RFC */
++#endif /* TIZEN_FEATURE_QTDEMUX_MODIFICATION */
++
+ typedef enum {
+ QT_FLAG_NONE = (0),
+ QT_FLAG_CONTAINER = (1 << 0)
+ } QtFlags;
+
+ struct _QtNodeType {
+ guint32 fourcc;
+ const gchar *name;
+ QtFlags flags;
+ QtDumpFunc dump;
+ };
+
+ enum TfFlags
+ {
+ TF_BASE_DATA_OFFSET = 0x000001, /* base-data-offset-present */
+ TF_SAMPLE_DESCRIPTION_INDEX = 0x000002, /* sample-description-index-present */
+ TF_DEFAULT_SAMPLE_DURATION = 0x000008, /* default-sample-duration-present */
+ TF_DEFAULT_SAMPLE_SIZE = 0x000010, /* default-sample-size-present */
+ TF_DEFAULT_SAMPLE_FLAGS = 0x000020, /* default-sample-flags-present */
+ TF_DURATION_IS_EMPTY = 0x010000, /* duration-is-empty */
+ TF_DEFAULT_BASE_IS_MOOF = 0x020000 /* default-base-is-moof */
+ };
+
+ enum TrFlags
+ {
+ TR_DATA_OFFSET = 0x000001, /* data-offset-present */
+ TR_FIRST_SAMPLE_FLAGS = 0x000004, /* first-sample-flags-present */
+ TR_SAMPLE_DURATION = 0x000100, /* sample-duration-present */
+ TR_SAMPLE_SIZE = 0x000200, /* sample-size-present */
+ TR_SAMPLE_FLAGS = 0x000400, /* sample-flags-present */
+ TR_COMPOSITION_TIME_OFFSETS = 0x000800 /* sample-composition-time-offsets-presents */
+ };
+
++#ifdef TIZEN_FEATURE_QTDEMUX_MODIFICATION
++typedef enum
++{
++ QTDEMUX_AMBISONIC_TYPE_UNKNOWN = 0,
++ QTDEMUX_AMBISONIC_TYPE_PERIPHONIC = 1, /* To comply with Google's Spatial Audio RFC */
++ QTDEMUX_AMBISONIC_TYPE_NON_PERIPHONIC = 2,
++} QTDEMUX_AMBISONIC_TYPE;
++
++typedef enum
++{
++ QTDEMUX_AMBISONIC_FORMAT_UNKNOWN = 0,
++ QTDEMUX_AMBISONIC_FORMAT_AMBIX = 1, /* AMBIX (Channel sequence: ACN, Normalization: SN3D) */
++ QTDEMUX_AMBISONIC_FORMAT_AMB = 2, /* .AMB, Tetraproc (Channel sequence: FuMa, Normalization: FuMa) */
++ QTDEMUX_AMBISONIC_FORMAT_UA = 3, /* Universal Ambisonics (Channel sequence: SID, Normalization: N3D) */
++} QTDEMUX_AMBISONIC_FORMAT;
++
++typedef enum
++{
++ QTDEMUX_AMBISONIC_ORDER_UNKNOWN = 0,
++ QTDEMUX_AMBISONIC_ORDER_FOA = 1, /* First order Ambisonics */
++ QTDEMUX_AMBISONIC_ORDER_TOA = 3, /* Third order Ambisonics */
++ QTDEMUX_AMBISONIC_ORDER_HOA, /* Higher order Ambisonics */
++} QTDEMUX_AMBISONIC_ORDER;
++
++typedef enum
++{
++ QTDEMUX_AMBISONIC_CHANNEL_ORDERING_UNKNOWN = 0,
++ QTDEMUX_AMBISONIC_CHANNEL_ORDERING_ACN = 1, /* Ambisonic Channel Number (ACN) system */
++ QTDEMUX_AMBISONIC_CHANNEL_ORDERING_FUMA = 2, /* Furse-Malham ordering */
++ QTDEMUX_AMBISONIC_CHANNEL_ORDERING_SID = 3, /* Single Index Designation ordering */
++} QTDEMUX_AMBISONIC_CHANNEL_ORDERING;
++
++typedef enum
++{
++ QTDEMUX_AMBISONIC_NORMALIZATION_UNKNOWN = 0,
++ QTDEMUX_AMBISONIC_NORMALIZATION_SN3D = 1, /* Schmidt semi-normalization */
++ QTDEMUX_AMBISONIC_NORMALIZATION_FUMA = 2, /* Furse-Malham MaxN normalization */
++ QTDEMUX_AMBISONIC_NORMALIZATION_N3D = 3, /* Full 3D normalization */
++} QTDEMUX_AMBISONIC_NORMALIZATION;
++#endif /* TIZEN_FEATURE_QTDEMUX_MODIFICATION */
++
+ const QtNodeType *qtdemux_type_get (guint32 fourcc);
+
+ G_END_DECLS
+
+ #endif /* __GST_QTDEMUX_TYPES_H__ */
--- /dev/null
- gst_caps_can_intersect (gst_static_caps_get (&intra_caps), caps);
+ /* GStreamer Matroska muxer/demuxer
+ * (c) 2003 Ronald Bultje <rbultje@ronald.bitfreak.net>
+ * (c) 2006 Tim-Philipp Müller <tim centricular net>
+ * (c) 2008 Sebastian Dröge <slomo@circular-chaos.org>
+ * (c) 2011 Debarshi Ray <rishi@gnu.org>
+ *
+ * matroska-demux.c: matroska file/stream demuxer
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+ /* TODO: check CRC32 if present
+ * TODO: there can be a segment after the first segment. Handle like
+ * chained oggs. Fixes #334082
+ * TODO: Test samples: http://www.matroska.org/samples/matrix/index.html
+ * http://samples.mplayerhq.hu/Matroska/
+ * TODO: check if demuxing is done correct for all codecs according to spec
+ * TODO: seeking with incomplete or without CUE
+ */
+
+ /**
+ * SECTION:element-matroskademux
+ * @title: matroskademux
+ *
+ * matroskademux demuxes a Matroska file into the different contained streams.
+ *
+ * ## Example launch line
+ * |[
+ * gst-launch-1.0 -v filesrc location=/path/to/mkv ! matroskademux ! vorbisdec ! audioconvert ! audioresample ! autoaudiosink
+ * ]| This pipeline demuxes a Matroska file and outputs the contained Vorbis audio.
+ *
+ */
+
+
+ #ifdef HAVE_CONFIG_H
+ #include "config.h"
+ #endif
+
+ #include <math.h>
+ #include <string.h>
+ #include <glib/gprintf.h>
+
+ #include <gst/base/base.h>
+
+ /* For AVI compatibility mode
+ and for fourcc stuff */
+ #include <gst/riff/riff-read.h>
+ #include <gst/riff/riff-ids.h>
+ #include <gst/riff/riff-media.h>
+
+ #include <gst/audio/audio.h>
+ #include <gst/tag/tag.h>
+ #include <gst/pbutils/pbutils.h>
+ #include <gst/video/gstvideocodecalphameta.h>
+ #include <gst/video/video.h>
+
+ #include "gstmatroskaelements.h"
+ #include "matroska-demux.h"
+ #include "matroska-ids.h"
+
+ GST_DEBUG_CATEGORY_STATIC (matroskademux_debug);
+ #define GST_CAT_DEFAULT matroskademux_debug
+
+ #define DEBUG_ELEMENT_START(demux, ebml, element) \
+ GST_DEBUG_OBJECT (demux, "Parsing " element " element at offset %" \
+ G_GUINT64_FORMAT, gst_ebml_read_get_pos (ebml))
+
+ #define DEBUG_ELEMENT_STOP(demux, ebml, element, ret) \
+ GST_DEBUG_OBJECT (demux, "Parsing " element " element " \
+ " finished with '%s'", gst_flow_get_name (ret))
+
+ enum
+ {
+ PROP_0,
+ PROP_METADATA,
+ PROP_STREAMINFO,
+ PROP_MAX_GAP_TIME,
+ PROP_MAX_BACKTRACK_DISTANCE
+ };
+
+ #define DEFAULT_MAX_GAP_TIME (2 * GST_SECOND)
+ #define DEFAULT_MAX_BACKTRACK_DISTANCE 30
+ #define INVALID_DATA_THRESHOLD (2 * 1024 * 1024)
+
+ static GstStaticPadTemplate sink_templ = GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("audio/x-matroska; video/x-matroska; "
+ "video/x-matroska-3d; audio/webm; video/webm")
+ );
+
+ /* TODO: fill in caps! */
+
+ static GstStaticPadTemplate audio_src_templ =
+ GST_STATIC_PAD_TEMPLATE ("audio_%u",
+ GST_PAD_SRC,
+ GST_PAD_SOMETIMES,
+ GST_STATIC_CAPS ("ANY")
+ );
+
+ static GstStaticPadTemplate video_src_templ =
+ GST_STATIC_PAD_TEMPLATE ("video_%u",
+ GST_PAD_SRC,
+ GST_PAD_SOMETIMES,
+ GST_STATIC_CAPS ("ANY")
+ );
+
+ static GstStaticPadTemplate subtitle_src_templ =
+ GST_STATIC_PAD_TEMPLATE ("subtitle_%u",
+ GST_PAD_SRC,
+ GST_PAD_SOMETIMES,
+ GST_STATIC_CAPS ("text/x-raw, format=pango-markup; application/x-ssa; "
+ "application/x-ass;application/x-usf; subpicture/x-dvd; "
+ "subpicture/x-pgs; subtitle/x-kate; " "application/x-subtitle-unknown")
+ );
+
+ static GQuark matroska_block_additional_quark;
+
+ static GstFlowReturn gst_matroska_demux_parse_id (GstMatroskaDemux * demux,
+ guint32 id, guint64 length, guint needed);
+
+ /* element functions */
+ static void gst_matroska_demux_loop (GstPad * pad);
+
+ static gboolean gst_matroska_demux_element_send_event (GstElement * element,
+ GstEvent * event);
+ static gboolean gst_matroska_demux_element_query (GstElement * element,
+ GstQuery * query);
+
+ /* pad functions */
+ static gboolean gst_matroska_demux_sink_activate (GstPad * sinkpad,
+ GstObject * parent);
+ static gboolean gst_matroska_demux_sink_activate_mode (GstPad * sinkpad,
+ GstObject * parent, GstPadMode mode, gboolean active);
+
+ static gboolean gst_matroska_demux_handle_seek_push (GstMatroskaDemux * demux,
+ GstPad * pad, GstEvent * event);
+ static gboolean gst_matroska_demux_handle_seek_event (GstMatroskaDemux * demux,
+ GstPad * pad, GstEvent * event);
+ static gboolean gst_matroska_demux_handle_src_event (GstPad * pad,
+ GstObject * parent, GstEvent * event);
+ static gboolean gst_matroska_demux_handle_src_query (GstPad * pad,
+ GstObject * parent, GstQuery * query);
+
+ static gboolean gst_matroska_demux_handle_sink_event (GstPad * pad,
+ GstObject * parent, GstEvent * event);
+ static gboolean gst_matroska_demux_handle_sink_query (GstPad * pad,
+ GstObject * parent, GstQuery * query);
+ static GstFlowReturn gst_matroska_demux_chain (GstPad * pad,
+ GstObject * object, GstBuffer * buffer);
+
+ static GstStateChangeReturn
+ gst_matroska_demux_change_state (GstElement * element,
+ GstStateChange transition);
+ #if 0
+ static void
+ gst_matroska_demux_set_index (GstElement * element, GstIndex * index);
+ static GstIndex *gst_matroska_demux_get_index (GstElement * element);
+ #endif
+
+ /* caps functions */
+ static GstCaps *gst_matroska_demux_video_caps (GstMatroskaTrackVideoContext
+ * videocontext, const gchar * codec_id, guint8 * data, guint size,
+ gchar ** codec_name, guint32 * riff_fourcc);
+ static GstCaps *gst_matroska_demux_audio_caps (GstMatroskaTrackAudioContext
+ * audiocontext, const gchar * codec_id, guint8 * data, guint size,
+ gchar ** codec_name, guint16 * riff_audio_fmt, GstClockTime * lead_in_ts);
+ static GstCaps
+ * gst_matroska_demux_subtitle_caps (GstMatroskaTrackSubtitleContext *
+ subtitlecontext, const gchar * codec_id, gpointer data, guint size);
+ static const gchar *gst_matroska_track_encryption_algorithm_name (gint val);
+ static const gchar *gst_matroska_track_encryption_cipher_mode_name (gint val);
+ static const gchar *gst_matroska_track_encoding_scope_name (gint val);
+
+ /* stream methods */
+ static void gst_matroska_demux_reset (GstElement * element);
+ static gboolean perform_seek_to_offset (GstMatroskaDemux * demux,
+ gdouble rate, guint64 offset, guint32 seqnum, GstSeekFlags flags);
+
+ /* gobject functions */
+ static void gst_matroska_demux_set_property (GObject * object,
+ guint prop_id, const GValue * value, GParamSpec * pspec);
+ static void gst_matroska_demux_get_property (GObject * object,
+ guint prop_id, GValue * value, GParamSpec * pspec);
+
+ GType gst_matroska_demux_get_type (void);
+ #define parent_class gst_matroska_demux_parent_class
+ G_DEFINE_TYPE (GstMatroskaDemux, gst_matroska_demux, GST_TYPE_ELEMENT);
+ #define _do_init \
+ gst_riff_init (); \
+ matroska_element_init (plugin); \
+ GST_DEBUG_CATEGORY_INIT (ebmlread_debug, "ebmlread", 0, "EBML stream helper class"); \
+ matroska_block_additional_quark = \
+ g_quark_from_static_string ("matroska-block-additional");
+
+ GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (matroskademux, "matroskademux",
+ GST_RANK_PRIMARY, GST_TYPE_MATROSKA_DEMUX, _do_init);
+
+ static void
+ gst_matroska_demux_finalize (GObject * object)
+ {
+ GstMatroskaDemux *demux = GST_MATROSKA_DEMUX (object);
+
+ gst_matroska_read_common_finalize (&demux->common);
+ gst_flow_combiner_free (demux->flowcombiner);
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+ }
+
+ static void
+ gst_matroska_demux_class_init (GstMatroskaDemuxClass * klass)
+ {
+ GObjectClass *gobject_class = (GObjectClass *) klass;
+ GstElementClass *gstelement_class = (GstElementClass *) klass;
+
+ GST_DEBUG_CATEGORY_INIT (matroskademux_debug, "matroskademux", 0,
+ "Matroska demuxer");
+
+ gobject_class->finalize = gst_matroska_demux_finalize;
+
+ gobject_class->get_property = gst_matroska_demux_get_property;
+ gobject_class->set_property = gst_matroska_demux_set_property;
+
+ g_object_class_install_property (gobject_class, PROP_MAX_GAP_TIME,
+ g_param_spec_uint64 ("max-gap-time", "Maximum gap time",
+ "The demuxer sends out segment events for skipping "
+ "gaps longer than this (0 = disabled).", 0, G_MAXUINT64,
+ DEFAULT_MAX_GAP_TIME, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_MAX_BACKTRACK_DISTANCE,
+ g_param_spec_uint ("max-backtrack-distance",
+ "Maximum backtrack distance",
+ "Maximum backtrack distance in seconds when seeking without "
+ "and index in pull mode and search for a keyframe "
+ "(0 = disable backtracking).",
+ 0, G_MAXUINT, DEFAULT_MAX_BACKTRACK_DISTANCE,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ gstelement_class->change_state =
+ GST_DEBUG_FUNCPTR (gst_matroska_demux_change_state);
+ gstelement_class->send_event =
+ GST_DEBUG_FUNCPTR (gst_matroska_demux_element_send_event);
+ gstelement_class->query =
+ GST_DEBUG_FUNCPTR (gst_matroska_demux_element_query);
+ #if 0
+ gstelement_class->set_index =
+ GST_DEBUG_FUNCPTR (gst_matroska_demux_set_index);
+ gstelement_class->get_index =
+ GST_DEBUG_FUNCPTR (gst_matroska_demux_get_index);
+ #endif
+
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &video_src_templ);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &audio_src_templ);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &subtitle_src_templ);
+ gst_element_class_add_static_pad_template (gstelement_class, &sink_templ);
+
+ gst_element_class_set_static_metadata (gstelement_class, "Matroska demuxer",
+ "Codec/Demuxer",
+ "Demuxes Matroska/WebM streams into video/audio/subtitles",
+ "GStreamer maintainers <gstreamer-devel@lists.freedesktop.org>");
+ }
+
+ static void
+ gst_matroska_demux_init (GstMatroskaDemux * demux)
+ {
+ demux->common.sinkpad = gst_pad_new_from_static_template (&sink_templ,
+ "sink");
+ gst_pad_set_activate_function (demux->common.sinkpad,
+ GST_DEBUG_FUNCPTR (gst_matroska_demux_sink_activate));
+ gst_pad_set_activatemode_function (demux->common.sinkpad,
+ GST_DEBUG_FUNCPTR (gst_matroska_demux_sink_activate_mode));
+ gst_pad_set_chain_function (demux->common.sinkpad,
+ GST_DEBUG_FUNCPTR (gst_matroska_demux_chain));
+ gst_pad_set_event_function (demux->common.sinkpad,
+ GST_DEBUG_FUNCPTR (gst_matroska_demux_handle_sink_event));
+ gst_pad_set_query_function (demux->common.sinkpad,
+ GST_DEBUG_FUNCPTR (gst_matroska_demux_handle_sink_query));
+ gst_element_add_pad (GST_ELEMENT (demux), demux->common.sinkpad);
+
+ /* init defaults for common read context */
+ gst_matroska_read_common_init (&demux->common);
+
+ /* property defaults */
+ demux->max_gap_time = DEFAULT_MAX_GAP_TIME;
+ demux->max_backtrack_distance = DEFAULT_MAX_BACKTRACK_DISTANCE;
+
+ GST_OBJECT_FLAG_SET (demux, GST_ELEMENT_FLAG_INDEXABLE);
+
+ demux->flowcombiner = gst_flow_combiner_new ();
+
+ /* finish off */
+ gst_matroska_demux_reset (GST_ELEMENT (demux));
+ }
+
+ static void
+ gst_matroska_demux_reset (GstElement * element)
+ {
+ GstMatroskaDemux *demux = GST_MATROSKA_DEMUX (element);
+
+ GST_DEBUG_OBJECT (demux, "Resetting state");
+
+ gst_matroska_read_common_reset (GST_ELEMENT (demux), &demux->common);
+
+ demux->num_a_streams = 0;
+ demux->num_t_streams = 0;
+ demux->num_v_streams = 0;
+ demux->have_nonintraonly_v_streams = FALSE;
+
+ demux->have_group_id = FALSE;
+ demux->group_id = G_MAXUINT;
+
+ demux->clock = NULL;
+ demux->tracks_parsed = FALSE;
+
+ if (demux->clusters) {
+ g_array_free (demux->clusters, TRUE);
+ demux->clusters = NULL;
+ }
+
+ g_list_foreach (demux->seek_parsed,
+ (GFunc) gst_matroska_read_common_free_parsed_el, NULL);
+ g_list_free (demux->seek_parsed);
+ demux->seek_parsed = NULL;
+
+ demux->last_stop_end = GST_CLOCK_TIME_NONE;
+ demux->seek_block = 0;
+ demux->stream_start_time = GST_CLOCK_TIME_NONE;
+ demux->to_time = GST_CLOCK_TIME_NONE;
+ demux->cluster_time = GST_CLOCK_TIME_NONE;
+ demux->cluster_offset = 0;
+ demux->cluster_prevsize = 0;
+ demux->seen_cluster_prevsize = FALSE;
+ demux->next_cluster_offset = 0;
+ demux->stream_last_time = GST_CLOCK_TIME_NONE;
+ demux->last_cluster_offset = 0;
+ demux->index_offset = 0;
+ demux->seekable = FALSE;
+ demux->need_segment = FALSE;
+ demux->segment_seqnum = 0;
+ demux->requested_seek_time = GST_CLOCK_TIME_NONE;
+ demux->seek_offset = -1;
+ demux->audio_lead_in_ts = 0;
+ demux->building_index = FALSE;
+ if (demux->seek_event) {
+ gst_event_unref (demux->seek_event);
+ demux->seek_event = NULL;
+ }
+
+ demux->seek_index = NULL;
+ demux->seek_entry = 0;
+
+ if (demux->new_segment) {
+ gst_event_unref (demux->new_segment);
+ demux->new_segment = NULL;
+ }
+
+ demux->invalid_duration = FALSE;
+
+ demux->cached_length = G_MAXUINT64;
+
+ if (demux->deferred_seek_event)
+ gst_event_unref (demux->deferred_seek_event);
+ demux->deferred_seek_event = NULL;
+ demux->deferred_seek_pad = NULL;
+
+ gst_flow_combiner_clear (demux->flowcombiner);
+ }
+
+ static GstBuffer *
+ gst_matroska_decode_buffer (GstMatroskaTrackContext * context, GstBuffer * buf)
+ {
+ GstMapInfo map;
+ gpointer data;
+ gsize size;
+ GstBuffer *out_buf = buf;
+
+ g_return_val_if_fail (GST_IS_BUFFER (buf), NULL);
+
+ GST_DEBUG ("decoding buffer %p", buf);
+
+ gst_buffer_map (out_buf, &map, GST_MAP_READ);
+ data = map.data;
+ size = map.size;
+
+ g_return_val_if_fail (size > 0, buf);
+
+ if (gst_matroska_decode_data (context->encodings, &data, &size,
+ GST_MATROSKA_TRACK_ENCODING_SCOPE_FRAME, FALSE)) {
+ if (data != map.data) {
+ gst_buffer_unmap (out_buf, &map);
+ gst_buffer_unref (out_buf);
+ out_buf = gst_buffer_new_wrapped (data, size);
+ } else {
+ gst_buffer_unmap (out_buf, &map);
+ }
+ } else {
+ GST_DEBUG ("decode data failed");
+ gst_buffer_unmap (out_buf, &map);
+ gst_buffer_unref (out_buf);
+ return NULL;
+ }
+ /* Encrypted stream */
+ if (context->protection_info) {
+
+ GstStructure *info_protect = gst_structure_copy (context->protection_info);
+ gboolean encrypted = FALSE;
+
+ gst_buffer_map (out_buf, &map, GST_MAP_READ);
+ data = map.data;
+ size = map.size;
+
+ if (gst_matroska_parse_protection_meta (&data, &size, info_protect,
+ &encrypted)) {
+ if (data != map.data) {
+ GstBuffer *tmp_buf;
+
+ gst_buffer_unmap (out_buf, &map);
+ tmp_buf = out_buf;
+ out_buf = gst_buffer_copy_region (tmp_buf, GST_BUFFER_COPY_ALL,
+ gst_buffer_get_size (tmp_buf) - size, size);
+ gst_buffer_unref (tmp_buf);
+ if (encrypted)
+ gst_buffer_add_protection_meta (out_buf, info_protect);
+ else
+ gst_structure_free (info_protect);
+ } else {
+ gst_buffer_unmap (out_buf, &map);
+ gst_structure_free (info_protect);
+ }
+ } else {
+ GST_WARNING ("Adding protection metadata failed");
+ gst_buffer_unmap (out_buf, &map);
+ gst_buffer_unref (out_buf);
+ gst_structure_free (info_protect);
+ return NULL;
+ }
+ }
+
+ return out_buf;
+ }
+
+ static void
+ gst_matroska_demux_add_stream_headers_to_caps (GstMatroskaDemux * demux,
+ GstBufferList * list, GstCaps * caps)
+ {
+ GstStructure *s;
+ GValue arr_val = G_VALUE_INIT;
+ GValue buf_val = G_VALUE_INIT;
+ gint i, num;
+
+ g_assert (gst_caps_is_writable (caps));
+
+ g_value_init (&arr_val, GST_TYPE_ARRAY);
+ g_value_init (&buf_val, GST_TYPE_BUFFER);
+
+ num = gst_buffer_list_length (list);
+ for (i = 0; i < num; ++i) {
+ g_value_set_boxed (&buf_val, gst_buffer_list_get (list, i));
+ gst_value_array_append_value (&arr_val, &buf_val);
+ }
+
+ s = gst_caps_get_structure (caps, 0);
+ gst_structure_take_value (s, "streamheader", &arr_val);
+ g_value_unset (&buf_val);
+ }
+
+ static GstFlowReturn
+ gst_matroska_demux_parse_mastering_metadata (GstMatroskaDemux * demux,
+ GstEbmlRead * ebml, GstMatroskaTrackVideoContext * video_context)
+ {
+ GstFlowReturn ret = GST_FLOW_OK;
+ GstVideoMasteringDisplayInfo minfo;
+ guint32 id;
+ gdouble num;
+ /* Precision defined by HEVC specification */
+ const guint chroma_scale = 50000;
+ const guint luma_scale = 10000;
+
+ gst_video_mastering_display_info_init (&minfo);
+
+ DEBUG_ELEMENT_START (demux, ebml, "MasteringMetadata");
+
+ if ((ret = gst_ebml_read_master (ebml, &id)) != GST_FLOW_OK)
+ goto beach;
+
+ while (ret == GST_FLOW_OK && gst_ebml_read_has_remaining (ebml, 1, TRUE)) {
+ if ((ret = gst_ebml_peek_id (ebml, &id)) != GST_FLOW_OK)
+ goto beach;
+
+ /* all sub elements have float type */
+ if ((ret = gst_ebml_read_float (ebml, &id, &num)) != GST_FLOW_OK)
+ goto beach;
+
+ /* chromaticity should be in [0, 1] range */
+ if (id >= GST_MATROSKA_ID_PRIMARYRCHROMATICITYX &&
+ id <= GST_MATROSKA_ID_WHITEPOINTCHROMATICITYY) {
+ if (num < 0 || num > 1.0) {
+ GST_WARNING_OBJECT (demux, "0x%x has invalid value %f", id, num);
+ goto beach;
+ }
+ } else if (id == GST_MATROSKA_ID_LUMINANCEMAX ||
+ id == GST_MATROSKA_ID_LUMINANCEMIN) {
+ /* Note: webM spec said valid range is [0, 999.9999] but
+ * 1000 cd/m^2 is generally used value on HDR. Just check guint range here.
+ * See https://www.webmproject.org/docs/container/#LuminanceMax
+ */
+ if (num < 0 || num > (gdouble) (G_MAXUINT32 / luma_scale)) {
+ GST_WARNING_OBJECT (demux, "0x%x has invalid value %f", id, num);
+ goto beach;
+ }
+ }
+
+ switch (id) {
+ case GST_MATROSKA_ID_PRIMARYRCHROMATICITYX:
+ minfo.display_primaries[0].x = (guint16) (num * chroma_scale);
+ break;
+ case GST_MATROSKA_ID_PRIMARYRCHROMATICITYY:
+ minfo.display_primaries[0].y = (guint16) (num * chroma_scale);
+ break;
+ case GST_MATROSKA_ID_PRIMARYGCHROMATICITYX:
+ minfo.display_primaries[1].x = (guint16) (num * chroma_scale);
+ break;
+ case GST_MATROSKA_ID_PRIMARYGCHROMATICITYY:
+ minfo.display_primaries[1].y = (guint16) (num * chroma_scale);
+ break;
+ case GST_MATROSKA_ID_PRIMARYBCHROMATICITYX:
+ minfo.display_primaries[2].x = (guint16) (num * chroma_scale);
+ break;
+ case GST_MATROSKA_ID_PRIMARYBCHROMATICITYY:
+ minfo.display_primaries[2].y = (guint16) (num * chroma_scale);
+ break;
+ case GST_MATROSKA_ID_WHITEPOINTCHROMATICITYX:
+ minfo.white_point.x = (guint16) (num * chroma_scale);
+ break;
+ case GST_MATROSKA_ID_WHITEPOINTCHROMATICITYY:
+ minfo.white_point.y = (guint16) (num * chroma_scale);
+ break;
+ case GST_MATROSKA_ID_LUMINANCEMAX:
+ minfo.max_display_mastering_luminance = (guint32) (num * luma_scale);
+ break;
+ case GST_MATROSKA_ID_LUMINANCEMIN:
+ minfo.min_display_mastering_luminance = (guint32) (num * luma_scale);
+ break;
+ default:
+ GST_FIXME_OBJECT (demux,
+ "Unsupported subelement 0x%x in MasteringMetadata", id);
+ ret = gst_ebml_read_skip (ebml);
+ break;
+ }
+ }
+
+ video_context->mastering_display_info = minfo;
+ video_context->mastering_display_info_present = TRUE;
+
+ beach:
+ DEBUG_ELEMENT_STOP (demux, ebml, "MasteringMetadata", ret);
+
+ return ret;
+ }
+
+ static GstFlowReturn
+ gst_matroska_demux_parse_colour (GstMatroskaDemux * demux, GstEbmlRead * ebml,
+ GstMatroskaTrackVideoContext * video_context)
+ {
+ GstFlowReturn ret;
+ GstVideoColorimetry colorimetry;
+ guint32 id;
+ guint64 num;
+
+ colorimetry.range = GST_VIDEO_COLOR_RANGE_UNKNOWN;
+ colorimetry.matrix = GST_VIDEO_COLOR_MATRIX_UNKNOWN;
+ colorimetry.transfer = GST_VIDEO_TRANSFER_UNKNOWN;
+ colorimetry.primaries = GST_VIDEO_COLOR_PRIMARIES_UNKNOWN;
+
+ DEBUG_ELEMENT_START (demux, ebml, "TrackVideoColour");
+
+ if ((ret = gst_ebml_read_master (ebml, &id)) != GST_FLOW_OK)
+ goto beach;
+
+ while (ret == GST_FLOW_OK && gst_ebml_read_has_remaining (ebml, 1, TRUE)) {
+ if ((ret = gst_ebml_peek_id (ebml, &id)) != GST_FLOW_OK)
+ goto beach;
+
+ switch (id) {
+ case GST_MATROSKA_ID_VIDEOMATRIXCOEFFICIENTS:{
+ if ((ret = gst_ebml_read_uint (ebml, &id, &num)) != GST_FLOW_OK)
+ goto beach;
+
+ colorimetry.matrix = gst_video_color_matrix_from_iso ((guint) num);
+ break;
+ }
+
+ case GST_MATROSKA_ID_VIDEORANGE:{
+ if ((ret = gst_ebml_read_uint (ebml, &id, &num)) != GST_FLOW_OK)
+ goto beach;
+
+ switch (num) {
+ case 0:
+ colorimetry.range = GST_VIDEO_COLOR_RANGE_UNKNOWN;
+ break;
+ case 1:
+ colorimetry.range = GST_VIDEO_COLOR_RANGE_16_235;
+ break;
+ case 2:
+ colorimetry.range = GST_VIDEO_COLOR_RANGE_0_255;
+ break;
+ default:
+ GST_FIXME_OBJECT (demux, "Unsupported color range %"
+ G_GUINT64_FORMAT, num);
+ break;
+ }
+ break;
+ }
+
+ case GST_MATROSKA_ID_VIDEOTRANSFERCHARACTERISTICS:{
+ if ((ret = gst_ebml_read_uint (ebml, &id, &num)) != GST_FLOW_OK)
+ goto beach;
+
+ colorimetry.transfer =
+ gst_video_transfer_function_from_iso ((guint) num);
+ break;
+ }
+
+ case GST_MATROSKA_ID_VIDEOPRIMARIES:{
+ if ((ret = gst_ebml_read_uint (ebml, &id, &num)) != GST_FLOW_OK)
+ goto beach;
+
+ colorimetry.primaries =
+ gst_video_color_primaries_from_iso ((guint) num);
+ break;
+ }
+
+ case GST_MATROSKA_ID_MASTERINGMETADATA:{
+ if ((ret =
+ gst_matroska_demux_parse_mastering_metadata (demux, ebml,
+ video_context)) != GST_FLOW_OK)
+ goto beach;
+ break;
+ }
+
+ case GST_MATROSKA_ID_MAXCLL:{
+ if ((ret = gst_ebml_read_uint (ebml, &id, &num)) != GST_FLOW_OK)
+ goto beach;
+ if (num > G_MAXUINT16) {
+ GST_WARNING_OBJECT (demux,
+ "Too large maxCLL value %" G_GUINT64_FORMAT, num);
+ } else {
+ video_context->content_light_level.max_content_light_level = num;
+ }
+ break;
+ }
+
+ case GST_MATROSKA_ID_MAXFALL:{
+ if ((ret = gst_ebml_read_uint (ebml, &id, &num)) != GST_FLOW_OK)
+ goto beach;
+ if (num >= G_MAXUINT16) {
+ GST_WARNING_OBJECT (demux,
+ "Too large maxFALL value %" G_GUINT64_FORMAT, num);
+ } else {
+ video_context->content_light_level.max_frame_average_light_level =
+ num;
+ }
+ break;
+ }
+
+ default:
+ GST_FIXME_OBJECT (demux, "Unsupported subelement 0x%x in Colour", id);
+ ret = gst_ebml_read_skip (ebml);
+ break;
+ }
+ }
+
+ memcpy (&video_context->colorimetry, &colorimetry,
+ sizeof (GstVideoColorimetry));
+
+ beach:
+ DEBUG_ELEMENT_STOP (demux, ebml, "TrackVideoColour", ret);
+ return ret;
+ }
+
+ static GstFlowReturn
+ gst_matroska_demux_parse_stream (GstMatroskaDemux * demux, GstEbmlRead * ebml,
+ GstMatroskaTrackContext ** dest_context)
+ {
+ GstMatroskaTrackContext *context;
+ GstCaps *caps = NULL;
+ GstTagList *cached_taglist;
+ GstFlowReturn ret;
+ guint32 id, riff_fourcc = 0;
+ guint16 riff_audio_fmt = 0;
+ gchar *codec = NULL;
+
+ DEBUG_ELEMENT_START (demux, ebml, "TrackEntry");
+
+ *dest_context = NULL;
+
+ /* start with the master */
+ if ((ret = gst_ebml_read_master (ebml, &id)) != GST_FLOW_OK) {
+ DEBUG_ELEMENT_STOP (demux, ebml, "TrackEntry", ret);
+ return ret;
+ }
+
+ /* allocate generic... if we know the type, we'll g_renew()
+ * with the precise type */
+ context = g_new0 (GstMatroskaTrackContext, 1);
+ context->index_writer_id = -1;
+ context->type = 0; /* no type yet */
+ context->default_duration = 0;
+ context->pos = 0;
+ context->set_discont = TRUE;
+ context->timecodescale = 1.0;
+ context->flags =
+ GST_MATROSKA_TRACK_ENABLED | GST_MATROSKA_TRACK_DEFAULT |
+ GST_MATROSKA_TRACK_LACING;
+ context->from_time = GST_CLOCK_TIME_NONE;
+ context->from_offset = -1;
+ context->to_offset = G_MAXINT64;
+ context->alignment = 1;
+ context->dts_only = FALSE;
+ context->intra_only = FALSE;
+ context->tags = gst_tag_list_new_empty ();
+ g_queue_init (&context->protection_event_queue);
+ context->protection_info = NULL;
+
+ GST_DEBUG_OBJECT (demux, "Parsing a TrackEntry (%d tracks parsed so far)",
+ demux->common.num_streams);
+
+ /* try reading the trackentry headers */
+ while (ret == GST_FLOW_OK && gst_ebml_read_has_remaining (ebml, 1, TRUE)) {
+ if ((ret = gst_ebml_peek_id (ebml, &id)) != GST_FLOW_OK)
+ break;
+
+ switch (id) {
+ /* track number (unique stream ID) */
+ case GST_MATROSKA_ID_TRACKNUMBER:{
+ guint64 num;
+
+ if ((ret = gst_ebml_read_uint (ebml, &id, &num)) != GST_FLOW_OK)
+ break;
+
+ if (num == 0) {
+ GST_ERROR_OBJECT (demux, "Invalid TrackNumber 0");
+ ret = GST_FLOW_ERROR;
+ break;
+ }
+
+ GST_DEBUG_OBJECT (demux, "TrackNumber: %" G_GUINT64_FORMAT, num);
+ context->num = num;
+ break;
+ }
+ /* track UID (unique identifier) */
+ case GST_MATROSKA_ID_TRACKUID:{
+ guint64 num;
+
+ if ((ret = gst_ebml_read_uint (ebml, &id, &num)) != GST_FLOW_OK)
+ break;
+
+ if (num == 0) {
+ GST_ERROR_OBJECT (demux, "Invalid TrackUID 0");
+ ret = GST_FLOW_ERROR;
+ break;
+ }
+
+ GST_DEBUG_OBJECT (demux, "TrackUID: %" G_GUINT64_FORMAT, num);
+ context->uid = num;
+ break;
+ }
+
+ /* track type (video, audio, combined, subtitle, etc.) */
+ case GST_MATROSKA_ID_TRACKTYPE:{
+ guint64 track_type;
+
+ if ((ret = gst_ebml_read_uint (ebml, &id, &track_type)) != GST_FLOW_OK) {
+ break;
+ }
+
+ if (context->type != 0 && context->type != track_type) {
+ GST_WARNING_OBJECT (demux,
+ "More than one tracktype defined in a TrackEntry - skipping");
+ break;
+ } else if (track_type < 1 || track_type > 254) {
+ GST_WARNING_OBJECT (demux, "Invalid TrackType %" G_GUINT64_FORMAT,
+ track_type);
+ break;
+ }
+
+ GST_DEBUG_OBJECT (demux, "TrackType: %" G_GUINT64_FORMAT, track_type);
+
+ /* ok, so we're actually going to reallocate this thing */
+ switch (track_type) {
+ case GST_MATROSKA_TRACK_TYPE_VIDEO:
+ gst_matroska_track_init_video_context (&context);
+ break;
+ case GST_MATROSKA_TRACK_TYPE_AUDIO:
+ gst_matroska_track_init_audio_context (&context);
+ break;
+ case GST_MATROSKA_TRACK_TYPE_SUBTITLE:
+ gst_matroska_track_init_subtitle_context (&context);
+ break;
+ case GST_MATROSKA_TRACK_TYPE_COMPLEX:
+ case GST_MATROSKA_TRACK_TYPE_LOGO:
+ case GST_MATROSKA_TRACK_TYPE_BUTTONS:
+ case GST_MATROSKA_TRACK_TYPE_CONTROL:
+ default:
+ GST_WARNING_OBJECT (demux,
+ "Unknown or unsupported TrackType %" G_GUINT64_FORMAT,
+ track_type);
+ context->type = 0;
+ break;
+ }
+ break;
+ }
+
+ /* tracktype specific stuff for video */
+ case GST_MATROSKA_ID_TRACKVIDEO:{
+ GstMatroskaTrackVideoContext *videocontext;
+
+ DEBUG_ELEMENT_START (demux, ebml, "TrackVideo");
+
+ if (!gst_matroska_track_init_video_context (&context)) {
+ GST_WARNING_OBJECT (demux,
+ "TrackVideo element in non-video track - ignoring track");
+ ret = GST_FLOW_ERROR;
+ break;
+ } else if ((ret = gst_ebml_read_master (ebml, &id)) != GST_FLOW_OK) {
+ break;
+ }
+ videocontext = (GstMatroskaTrackVideoContext *) context;
+
+ while (ret == GST_FLOW_OK &&
+ gst_ebml_read_has_remaining (ebml, 1, TRUE)) {
+ if ((ret = gst_ebml_peek_id (ebml, &id)) != GST_FLOW_OK)
+ break;
+
+ switch (id) {
+ /* Should be one level up but some broken muxers write it here. */
+ case GST_MATROSKA_ID_TRACKDEFAULTDURATION:{
+ guint64 num;
+
+ if ((ret = gst_ebml_read_uint (ebml, &id, &num)) != GST_FLOW_OK)
+ break;
+
+ if (num == 0) {
+ GST_WARNING_OBJECT (demux, "Invalid TrackDefaultDuration 0");
+ break;
+ }
+
+ GST_DEBUG_OBJECT (demux,
+ "TrackDefaultDuration: %" G_GUINT64_FORMAT, num);
+ context->default_duration = num;
+ break;
+ }
+
+ /* video framerate */
+ /* NOTE: This one is here only for backward compatibility.
+ * Use _TRACKDEFAULDURATION one level up. */
+ case GST_MATROSKA_ID_VIDEOFRAMERATE:{
+ gdouble num;
+
+ if ((ret = gst_ebml_read_float (ebml, &id, &num)) != GST_FLOW_OK)
+ break;
+
+ if (num <= 0.0) {
+ GST_WARNING_OBJECT (demux, "Invalid TrackVideoFPS %lf", num);
+ break;
+ }
+
+ GST_DEBUG_OBJECT (demux, "TrackVideoFrameRate: %lf", num);
+ if (context->default_duration == 0)
+ context->default_duration =
+ gst_gdouble_to_guint64 ((gdouble) GST_SECOND * (1.0 / num));
+ videocontext->default_fps = num;
+ break;
+ }
+
+ /* width of the size to display the video at */
+ case GST_MATROSKA_ID_VIDEODISPLAYWIDTH:{
+ guint64 num;
+
+ if ((ret = gst_ebml_read_uint (ebml, &id, &num)) != GST_FLOW_OK)
+ break;
+
+ if (num == 0) {
+ GST_WARNING_OBJECT (demux, "Invalid TrackVideoDisplayWidth 0");
+ break;
+ }
+
+ GST_DEBUG_OBJECT (demux,
+ "TrackVideoDisplayWidth: %" G_GUINT64_FORMAT, num);
+ videocontext->display_width = num;
+ break;
+ }
+
+ /* height of the size to display the video at */
+ case GST_MATROSKA_ID_VIDEODISPLAYHEIGHT:{
+ guint64 num;
+
+ if ((ret = gst_ebml_read_uint (ebml, &id, &num)) != GST_FLOW_OK)
+ break;
+
+ if (num == 0) {
+ GST_WARNING_OBJECT (demux, "Invalid TrackVideoDisplayHeight 0");
+ break;
+ }
+
+ GST_DEBUG_OBJECT (demux,
+ "TrackVideoDisplayHeight: %" G_GUINT64_FORMAT, num);
+ videocontext->display_height = num;
+ break;
+ }
+
+ /* width of the video in the file */
+ case GST_MATROSKA_ID_VIDEOPIXELWIDTH:{
+ guint64 num;
+
+ if ((ret = gst_ebml_read_uint (ebml, &id, &num)) != GST_FLOW_OK)
+ break;
+
+ if (num == 0) {
+ GST_WARNING_OBJECT (demux, "Invalid TrackVideoPixelWidth 0");
+ break;
+ }
+
+ GST_DEBUG_OBJECT (demux,
+ "TrackVideoPixelWidth: %" G_GUINT64_FORMAT, num);
+ videocontext->pixel_width = num;
+ break;
+ }
+
+ /* height of the video in the file */
+ case GST_MATROSKA_ID_VIDEOPIXELHEIGHT:{
+ guint64 num;
+
+ if ((ret = gst_ebml_read_uint (ebml, &id, &num)) != GST_FLOW_OK)
+ break;
+
+ if (num == 0) {
+ GST_WARNING_OBJECT (demux, "Invalid TrackVideoPixelHeight 0");
+ break;
+ }
+
+ GST_DEBUG_OBJECT (demux,
+ "TrackVideoPixelHeight: %" G_GUINT64_FORMAT, num);
+ videocontext->pixel_height = num;
+ break;
+ }
+
+ /* whether the video is interlaced */
+ case GST_MATROSKA_ID_VIDEOFLAGINTERLACED:{
+ guint64 num;
+
+ if ((ret = gst_ebml_read_uint (ebml, &id, &num)) != GST_FLOW_OK)
+ break;
+
+ if (num == 1)
+ videocontext->interlace_mode =
+ GST_MATROSKA_INTERLACE_MODE_INTERLACED;
+ else if (num == 2)
+ videocontext->interlace_mode =
+ GST_MATROSKA_INTERLACE_MODE_PROGRESSIVE;
+ else
+ videocontext->interlace_mode =
+ GST_MATROSKA_INTERLACE_MODE_UNKNOWN;
+
+ GST_DEBUG_OBJECT (demux, "video track interlacing mode: %d",
+ videocontext->interlace_mode);
+ break;
+ }
+
+ /* interlaced field order */
+ case GST_MATROSKA_ID_VIDEOFIELDORDER:{
+ guint64 num;
+
+ if ((ret = gst_ebml_read_uint (ebml, &id, &num)) != GST_FLOW_OK)
+ break;
+
+ if (videocontext->interlace_mode !=
+ GST_MATROSKA_INTERLACE_MODE_INTERLACED) {
+ GST_WARNING_OBJECT (demux,
+ "FieldOrder element when not interlaced - ignoring");
+ break;
+ }
+
+ if (num == 0)
+ /* turns out we're actually progressive */
+ videocontext->interlace_mode =
+ GST_MATROSKA_INTERLACE_MODE_PROGRESSIVE;
+ else if (num == 2)
+ videocontext->field_order = GST_VIDEO_FIELD_ORDER_UNKNOWN;
+ else if (num == 9)
+ videocontext->field_order =
+ GST_VIDEO_FIELD_ORDER_TOP_FIELD_FIRST;
+ else if (num == 14)
+ videocontext->field_order =
+ GST_VIDEO_FIELD_ORDER_BOTTOM_FIELD_FIRST;
+ else {
+ GST_FIXME_OBJECT (demux,
+ "Unknown or unsupported FieldOrder %" G_GUINT64_FORMAT,
+ num);
+ videocontext->field_order = GST_VIDEO_FIELD_ORDER_UNKNOWN;
+ }
+
+ GST_DEBUG_OBJECT (demux, "video track field order: %d",
+ videocontext->field_order);
+ break;
+ }
+
+ /* aspect ratio behaviour */
+ case GST_MATROSKA_ID_VIDEOASPECTRATIOTYPE:{
+ guint64 num;
+
+ if ((ret = gst_ebml_read_uint (ebml, &id, &num)) != GST_FLOW_OK)
+ break;
+
+ if (num != GST_MATROSKA_ASPECT_RATIO_MODE_FREE &&
+ num != GST_MATROSKA_ASPECT_RATIO_MODE_KEEP &&
+ num != GST_MATROSKA_ASPECT_RATIO_MODE_FIXED) {
+ GST_WARNING_OBJECT (demux,
+ "Unknown TrackVideoAspectRatioType 0x%x", (guint) num);
+ break;
+ }
+ GST_DEBUG_OBJECT (demux,
+ "TrackVideoAspectRatioType: %" G_GUINT64_FORMAT, num);
+ videocontext->asr_mode = num;
+ break;
+ }
+
+ /* colourspace (only matters for raw video) fourcc */
+ case GST_MATROSKA_ID_VIDEOCOLOURSPACE:{
+ guint8 *data;
+ guint64 datalen;
+
+ if ((ret =
+ gst_ebml_read_binary (ebml, &id, &data,
+ &datalen)) != GST_FLOW_OK)
+ break;
+
+ if (datalen != 4) {
+ g_free (data);
+ GST_WARNING_OBJECT (demux,
+ "Invalid TrackVideoColourSpace length %" G_GUINT64_FORMAT,
+ datalen);
+ break;
+ }
+
+ memcpy (&videocontext->fourcc, data, 4);
+ GST_DEBUG_OBJECT (demux,
+ "TrackVideoColourSpace: %" GST_FOURCC_FORMAT,
+ GST_FOURCC_ARGS (videocontext->fourcc));
+ g_free (data);
+ break;
+ }
+
+ /* color info */
+ case GST_MATROSKA_ID_VIDEOCOLOUR:{
+ ret = gst_matroska_demux_parse_colour (demux, ebml, videocontext);
+ break;
+ }
+
+ case GST_MATROSKA_ID_VIDEOSTEREOMODE:
+ {
+ guint64 num;
+
+ if ((ret = gst_ebml_read_uint (ebml, &id, &num)) != GST_FLOW_OK)
+ break;
+
+ GST_DEBUG_OBJECT (demux, "StereoMode: %" G_GUINT64_FORMAT, num);
+
+ switch (num) {
+ case GST_MATROSKA_STEREO_MODE_SBS_RL:
+ videocontext->multiview_flags =
+ GST_VIDEO_MULTIVIEW_FLAGS_RIGHT_VIEW_FIRST;
+ /* fall through */
+ case GST_MATROSKA_STEREO_MODE_SBS_LR:
+ videocontext->multiview_mode =
+ GST_VIDEO_MULTIVIEW_MODE_SIDE_BY_SIDE;
+ break;
+ case GST_MATROSKA_STEREO_MODE_TB_RL:
+ videocontext->multiview_flags =
+ GST_VIDEO_MULTIVIEW_FLAGS_RIGHT_VIEW_FIRST;
+ /* fall through */
+ case GST_MATROSKA_STEREO_MODE_TB_LR:
+ videocontext->multiview_mode =
+ GST_VIDEO_MULTIVIEW_MODE_TOP_BOTTOM;
+ break;
+ case GST_MATROSKA_STEREO_MODE_CHECKER_RL:
+ videocontext->multiview_flags =
+ GST_VIDEO_MULTIVIEW_FLAGS_RIGHT_VIEW_FIRST;
+ /* fall through */
+ case GST_MATROSKA_STEREO_MODE_CHECKER_LR:
+ videocontext->multiview_mode =
+ GST_VIDEO_MULTIVIEW_MODE_CHECKERBOARD;
+ break;
+ case GST_MATROSKA_STEREO_MODE_FBF_RL:
+ videocontext->multiview_flags =
+ GST_VIDEO_MULTIVIEW_FLAGS_RIGHT_VIEW_FIRST;
+ /* fall through */
+ case GST_MATROSKA_STEREO_MODE_FBF_LR:
+ videocontext->multiview_mode =
+ GST_VIDEO_MULTIVIEW_MODE_FRAME_BY_FRAME;
+ /* FIXME: In frame-by-frame mode, left/right frame buffers are
+ * laced within one block, and we'll need to apply FIRST_IN_BUNDLE
+ * accordingly. See http://www.matroska.org/technical/specs/index.html#StereoMode */
+ GST_FIXME_OBJECT (demux,
+ "Frame-by-frame stereoscopic mode not fully implemented");
+ break;
+ }
+ break;
+ }
+
+ case GST_MATROSKA_ID_VIDEOALPHAMODE:
+ {
+ guint64 num;
+
+ if ((ret = gst_ebml_read_uint (ebml, &id, &num)) != GST_FLOW_OK)
+ break;
+
+ GST_DEBUG_OBJECT (demux, "AlphaMode: %" G_GUINT64_FORMAT, num);
+
+ if (num == 1)
+ videocontext->alpha_mode = TRUE;
+ else
+ videocontext->alpha_mode = FALSE;
+ break;
+ }
+
+ default:
+ GST_WARNING_OBJECT (demux,
+ "Unknown TrackVideo subelement 0x%x - ignoring", id);
+ /* fall through */
+ case GST_MATROSKA_ID_VIDEODISPLAYUNIT:
+ case GST_MATROSKA_ID_VIDEOPIXELCROPBOTTOM:
+ case GST_MATROSKA_ID_VIDEOPIXELCROPTOP:
+ case GST_MATROSKA_ID_VIDEOPIXELCROPLEFT:
+ case GST_MATROSKA_ID_VIDEOPIXELCROPRIGHT:
+ case GST_MATROSKA_ID_VIDEOGAMMAVALUE:
+ ret = gst_ebml_read_skip (ebml);
+ break;
+ }
+ }
+
+ DEBUG_ELEMENT_STOP (demux, ebml, "TrackVideo", ret);
+ break;
+ }
+
+ /* tracktype specific stuff for audio */
+ case GST_MATROSKA_ID_TRACKAUDIO:{
+ GstMatroskaTrackAudioContext *audiocontext;
+
+ DEBUG_ELEMENT_START (demux, ebml, "TrackAudio");
+
+ if (!gst_matroska_track_init_audio_context (&context)) {
+ GST_WARNING_OBJECT (demux,
+ "TrackAudio element in non-audio track - ignoring track");
+ ret = GST_FLOW_ERROR;
+ break;
+ }
+
+ if ((ret = gst_ebml_read_master (ebml, &id)) != GST_FLOW_OK)
+ break;
+
+ audiocontext = (GstMatroskaTrackAudioContext *) context;
+
+ while (ret == GST_FLOW_OK &&
+ gst_ebml_read_has_remaining (ebml, 1, TRUE)) {
+ if ((ret = gst_ebml_peek_id (ebml, &id)) != GST_FLOW_OK)
+ break;
+
+ switch (id) {
+ /* samplerate */
+ case GST_MATROSKA_ID_AUDIOSAMPLINGFREQ:{
+ gdouble num;
+
+ if ((ret = gst_ebml_read_float (ebml, &id, &num)) != GST_FLOW_OK)
+ break;
+
+
+ if (num <= 0.0) {
+ GST_WARNING_OBJECT (demux,
+ "Invalid TrackAudioSamplingFrequency %lf", num);
+ break;
+ }
+
+ GST_DEBUG_OBJECT (demux, "TrackAudioSamplingFrequency: %lf", num);
+ audiocontext->samplerate = num;
+ break;
+ }
+
+ /* bitdepth */
+ case GST_MATROSKA_ID_AUDIOBITDEPTH:{
+ guint64 num;
+
+ if ((ret = gst_ebml_read_uint (ebml, &id, &num)) != GST_FLOW_OK)
+ break;
+
+ if (num == 0) {
+ GST_WARNING_OBJECT (demux, "Invalid TrackAudioBitDepth 0");
+ break;
+ }
+
+ GST_DEBUG_OBJECT (demux, "TrackAudioBitDepth: %" G_GUINT64_FORMAT,
+ num);
+ audiocontext->bitdepth = num;
+ break;
+ }
+
+ /* channels */
+ case GST_MATROSKA_ID_AUDIOCHANNELS:{
+ guint64 num;
+
+ if ((ret = gst_ebml_read_uint (ebml, &id, &num)) != GST_FLOW_OK)
+ break;
+
+ if (num == 0) {
+ GST_WARNING_OBJECT (demux, "Invalid TrackAudioChannels 0");
+ break;
+ }
+
+ GST_DEBUG_OBJECT (demux, "TrackAudioChannels: %" G_GUINT64_FORMAT,
+ num);
+ audiocontext->channels = num;
+ break;
+ }
+
+ default:
+ GST_WARNING_OBJECT (demux,
+ "Unknown TrackAudio subelement 0x%x - ignoring", id);
+ /* fall through */
+ case GST_MATROSKA_ID_AUDIOCHANNELPOSITIONS:
+ case GST_MATROSKA_ID_AUDIOOUTPUTSAMPLINGFREQ:
+ ret = gst_ebml_read_skip (ebml);
+ break;
+ }
+ }
+
+ DEBUG_ELEMENT_STOP (demux, ebml, "TrackAudio", ret);
+
+ break;
+ }
+
+ /* codec identifier */
+ case GST_MATROSKA_ID_CODECID:{
+ gchar *text;
+
+ if ((ret = gst_ebml_read_ascii (ebml, &id, &text)) != GST_FLOW_OK)
+ break;
+
+ GST_DEBUG_OBJECT (demux, "CodecID: %s", GST_STR_NULL (text));
+ context->codec_id = text;
+ break;
+ }
+
+ /* codec private data */
+ case GST_MATROSKA_ID_CODECPRIVATE:{
+ guint8 *data;
+ guint64 size;
+
+ if ((ret =
+ gst_ebml_read_binary (ebml, &id, &data, &size)) != GST_FLOW_OK)
+ break;
+
+ context->codec_priv = data;
+ context->codec_priv_size = size;
+
+ GST_DEBUG_OBJECT (demux, "CodecPrivate of size %" G_GUINT64_FORMAT,
+ size);
+ break;
+ }
+
+ /* name of the codec */
+ case GST_MATROSKA_ID_CODECNAME:{
+ gchar *text;
+
+ if ((ret = gst_ebml_read_utf8 (ebml, &id, &text)) != GST_FLOW_OK)
+ break;
+
+ GST_DEBUG_OBJECT (demux, "CodecName: %s", GST_STR_NULL (text));
+ context->codec_name = text;
+ break;
+ }
+
+ /* codec delay */
+ case GST_MATROSKA_ID_CODECDELAY:{
+ guint64 num;
+
+ if ((ret = gst_ebml_read_uint (ebml, &id, &num)) != GST_FLOW_OK)
+ break;
+
+ context->codec_delay = num;
+
+ GST_DEBUG_OBJECT (demux, "CodecDelay: %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (num));
+ break;
+ }
+
+ /* codec delay */
+ case GST_MATROSKA_ID_SEEKPREROLL:{
+ guint64 num;
+
+ if ((ret = gst_ebml_read_uint (ebml, &id, &num)) != GST_FLOW_OK)
+ break;
+
+ context->seek_preroll = num;
+
+ GST_DEBUG_OBJECT (demux, "SeekPreroll: %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (num));
+ break;
+ }
+
+ /* name of this track */
+ case GST_MATROSKA_ID_TRACKNAME:{
+ gchar *text;
+
+ if ((ret = gst_ebml_read_utf8 (ebml, &id, &text)) != GST_FLOW_OK)
+ break;
+
+ context->name = text;
+ GST_DEBUG_OBJECT (demux, "TrackName: %s", GST_STR_NULL (text));
+ break;
+ }
+
+ /* language (matters for audio/subtitles, mostly) */
+ case GST_MATROSKA_ID_TRACKLANGUAGE:{
+ gchar *text;
+
+ if ((ret = gst_ebml_read_utf8 (ebml, &id, &text)) != GST_FLOW_OK)
+ break;
+
+
+ context->language = text;
+
+ /* fre-ca => fre */
+ if (strlen (context->language) >= 4 && context->language[3] == '-')
+ context->language[3] = '\0';
+
+ GST_DEBUG_OBJECT (demux, "TrackLanguage: %s",
+ GST_STR_NULL (context->language));
+ break;
+ }
+
+ /* whether this is actually used */
+ case GST_MATROSKA_ID_TRACKFLAGENABLED:{
+ guint64 num;
+
+ if ((ret = gst_ebml_read_uint (ebml, &id, &num)) != GST_FLOW_OK)
+ break;
+
+ if (num)
+ context->flags |= GST_MATROSKA_TRACK_ENABLED;
+ else
+ context->flags &= ~GST_MATROSKA_TRACK_ENABLED;
+
+ GST_DEBUG_OBJECT (demux, "TrackEnabled: %d",
+ (context->flags & GST_MATROSKA_TRACK_ENABLED) ? 1 : 0);
+ break;
+ }
+
+ /* whether it's the default for this track type */
+ case GST_MATROSKA_ID_TRACKFLAGDEFAULT:{
+ guint64 num;
+
+ if ((ret = gst_ebml_read_uint (ebml, &id, &num)) != GST_FLOW_OK)
+ break;
+
+ if (num)
+ context->flags |= GST_MATROSKA_TRACK_DEFAULT;
+ else
+ context->flags &= ~GST_MATROSKA_TRACK_DEFAULT;
+
+ GST_DEBUG_OBJECT (demux, "TrackDefault: %d",
+ (context->flags & GST_MATROSKA_TRACK_DEFAULT) ? 1 : 0);
+ break;
+ }
+
+ /* whether the track must be used during playback */
+ case GST_MATROSKA_ID_TRACKFLAGFORCED:{
+ guint64 num;
+
+ if ((ret = gst_ebml_read_uint (ebml, &id, &num)) != GST_FLOW_OK)
+ break;
+
+ if (num)
+ context->flags |= GST_MATROSKA_TRACK_FORCED;
+ else
+ context->flags &= ~GST_MATROSKA_TRACK_FORCED;
+
+ GST_DEBUG_OBJECT (demux, "TrackForced: %d",
+ (context->flags & GST_MATROSKA_TRACK_FORCED) ? 1 : 0);
+ break;
+ }
+
+ /* lacing (like MPEG, where blocks don't end/start on frame
+ * boundaries) */
+ case GST_MATROSKA_ID_TRACKFLAGLACING:{
+ guint64 num;
+
+ if ((ret = gst_ebml_read_uint (ebml, &id, &num)) != GST_FLOW_OK)
+ break;
+
+ if (num)
+ context->flags |= GST_MATROSKA_TRACK_LACING;
+ else
+ context->flags &= ~GST_MATROSKA_TRACK_LACING;
+
+ GST_DEBUG_OBJECT (demux, "TrackLacing: %d",
+ (context->flags & GST_MATROSKA_TRACK_LACING) ? 1 : 0);
+ break;
+ }
+
+ /* default length (in time) of one data block in this track */
+ case GST_MATROSKA_ID_TRACKDEFAULTDURATION:{
+ guint64 num;
+
+ if ((ret = gst_ebml_read_uint (ebml, &id, &num)) != GST_FLOW_OK)
+ break;
+
+
+ if (num == 0) {
+ GST_WARNING_OBJECT (demux, "Invalid TrackDefaultDuration 0");
+ break;
+ }
+
+ GST_DEBUG_OBJECT (demux, "TrackDefaultDuration: %" G_GUINT64_FORMAT,
+ num);
+ context->default_duration = num;
+ break;
+ }
+
+ case GST_MATROSKA_ID_CONTENTENCODINGS:{
+ ret = gst_matroska_read_common_read_track_encodings (&demux->common,
+ ebml, context);
+ break;
+ }
+
+ case GST_MATROSKA_ID_TRACKTIMECODESCALE:{
+ gdouble num;
+
+ if ((ret = gst_ebml_read_float (ebml, &id, &num)) != GST_FLOW_OK)
+ break;
+
+ if (num <= 0.0) {
+ GST_WARNING_OBJECT (demux, "Invalid TrackTimeCodeScale %lf", num);
+ break;
+ }
+
+ GST_DEBUG_OBJECT (demux, "TrackTimeCodeScale: %lf", num);
+ context->timecodescale = num;
+ break;
+ }
+
+ default:
+ GST_WARNING ("Unknown TrackEntry subelement 0x%x - ignoring", id);
+ /* pass-through */
+
+ /* we ignore these because they're nothing useful (i.e. crap)
+ * or simply not implemented yet. */
+ case GST_MATROSKA_ID_TRACKMINCACHE:
+ case GST_MATROSKA_ID_TRACKMAXCACHE:
+ case GST_MATROSKA_ID_MAXBLOCKADDITIONID:
+ case GST_MATROSKA_ID_TRACKATTACHMENTLINK:
+ case GST_MATROSKA_ID_TRACKOVERLAY:
+ case GST_MATROSKA_ID_TRACKTRANSLATE:
+ case GST_MATROSKA_ID_TRACKOFFSET:
+ case GST_MATROSKA_ID_CODECSETTINGS:
+ case GST_MATROSKA_ID_CODECINFOURL:
+ case GST_MATROSKA_ID_CODECDOWNLOADURL:
+ case GST_MATROSKA_ID_CODECDECODEALL:
+ ret = gst_ebml_read_skip (ebml);
+ break;
+ }
+ }
+
+ DEBUG_ELEMENT_STOP (demux, ebml, "TrackEntry", ret);
+
+ /* Decode codec private data if necessary */
+ if (context->encodings && context->encodings->len > 0 && context->codec_priv
+ && context->codec_priv_size > 0) {
+ if (!gst_matroska_decode_data (context->encodings,
+ &context->codec_priv, &context->codec_priv_size,
+ GST_MATROSKA_TRACK_ENCODING_SCOPE_CODEC_DATA, TRUE)) {
+ GST_WARNING_OBJECT (demux, "Decoding codec private data failed");
+ ret = GST_FLOW_ERROR;
+ }
+ }
+
+ if (context->type == 0 || context->codec_id == NULL || (ret != GST_FLOW_OK
+ && ret != GST_FLOW_EOS)) {
+ if (ret == GST_FLOW_OK || ret == GST_FLOW_EOS)
+ GST_WARNING_OBJECT (ebml, "Unknown stream/codec in track entry header");
+
+ gst_matroska_track_free (context);
+ context = NULL;
+ *dest_context = NULL;
+ return ret;
+ }
+
+ /* check for a cached track taglist */
+ cached_taglist =
+ (GstTagList *) g_hash_table_lookup (demux->common.cached_track_taglists,
+ GUINT_TO_POINTER (context->uid));
+ if (cached_taglist)
+ gst_tag_list_insert (context->tags, cached_taglist, GST_TAG_MERGE_APPEND);
+
+ /* compute caps */
+ switch (context->type) {
+ case GST_MATROSKA_TRACK_TYPE_VIDEO:{
+ GstMatroskaTrackVideoContext *videocontext =
+ (GstMatroskaTrackVideoContext *) context;
+
+ caps = gst_matroska_demux_video_caps (videocontext,
+ context->codec_id, context->codec_priv,
+ context->codec_priv_size, &codec, &riff_fourcc);
+
+ if (codec) {
+ gst_tag_list_add (context->tags, GST_TAG_MERGE_REPLACE,
+ GST_TAG_VIDEO_CODEC, codec, NULL);
+ context->tags_changed = TRUE;
+ g_free (codec);
+ }
+ break;
+ }
+
+ case GST_MATROSKA_TRACK_TYPE_AUDIO:{
+ GstClockTime lead_in_ts = 0;
+ GstMatroskaTrackAudioContext *audiocontext =
+ (GstMatroskaTrackAudioContext *) context;
+
+ caps = gst_matroska_demux_audio_caps (audiocontext,
+ context->codec_id, context->codec_priv, context->codec_priv_size,
+ &codec, &riff_audio_fmt, &lead_in_ts);
+ if (lead_in_ts > demux->audio_lead_in_ts) {
+ demux->audio_lead_in_ts = lead_in_ts;
+ GST_DEBUG_OBJECT (demux, "Increased audio lead-in to %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (lead_in_ts));
+ }
+
+ if (codec) {
+ gst_tag_list_add (context->tags, GST_TAG_MERGE_REPLACE,
+ GST_TAG_AUDIO_CODEC, codec, NULL);
+ context->tags_changed = TRUE;
+ g_free (codec);
+ }
+ break;
+ }
+
+ case GST_MATROSKA_TRACK_TYPE_SUBTITLE:{
+ GstMatroskaTrackSubtitleContext *subtitlecontext =
+ (GstMatroskaTrackSubtitleContext *) context;
+
+ caps = gst_matroska_demux_subtitle_caps (subtitlecontext,
+ context->codec_id, context->codec_priv, context->codec_priv_size);
+ break;
+ }
+
+ case GST_MATROSKA_TRACK_TYPE_COMPLEX:
+ case GST_MATROSKA_TRACK_TYPE_LOGO:
+ case GST_MATROSKA_TRACK_TYPE_BUTTONS:
+ case GST_MATROSKA_TRACK_TYPE_CONTROL:
+ default:
+ /* we should already have quit by now */
+ g_assert_not_reached ();
+ }
+
+ if ((context->language == NULL || *context->language == '\0') &&
+ (context->type == GST_MATROSKA_TRACK_TYPE_AUDIO ||
+ context->type == GST_MATROSKA_TRACK_TYPE_SUBTITLE)) {
+ GST_LOG ("stream %d: language=eng (assuming default)", context->index);
+ context->language = g_strdup ("eng");
+ }
+
+ if (context->language) {
+ const gchar *lang;
+
+ /* Matroska contains ISO 639-2B codes, we want ISO 639-1 */
+ lang = gst_tag_get_language_code (context->language);
+ gst_tag_list_add (context->tags, GST_TAG_MERGE_REPLACE,
+ GST_TAG_LANGUAGE_CODE, (lang) ? lang : context->language, NULL);
+
+ if (context->name) {
+ gst_tag_list_add (context->tags, GST_TAG_MERGE_REPLACE,
+ GST_TAG_TITLE, context->name, NULL);
+ }
+ context->tags_changed = TRUE;
+ }
+
+ if (caps == NULL) {
+ GST_WARNING_OBJECT (demux, "could not determine caps for stream with "
+ "codec_id='%s'", context->codec_id);
+ switch (context->type) {
+ case GST_MATROSKA_TRACK_TYPE_VIDEO:
+ caps = gst_caps_new_empty_simple ("video/x-unknown");
+ break;
+ case GST_MATROSKA_TRACK_TYPE_AUDIO:
+ caps = gst_caps_new_empty_simple ("audio/x-unknown");
+ break;
+ case GST_MATROSKA_TRACK_TYPE_SUBTITLE:
+ caps = gst_caps_new_empty_simple ("application/x-subtitle-unknown");
+ break;
+ case GST_MATROSKA_TRACK_TYPE_COMPLEX:
+ default:
+ caps = gst_caps_new_empty_simple ("application/x-matroska-unknown");
+ break;
+ }
+ gst_caps_set_simple (caps, "codec-id", G_TYPE_STRING, context->codec_id,
+ NULL);
+
+ /* add any unrecognised riff fourcc / audio format, but after codec-id */
+ if (context->type == GST_MATROSKA_TRACK_TYPE_AUDIO && riff_audio_fmt != 0)
+ gst_caps_set_simple (caps, "format", G_TYPE_INT, riff_audio_fmt, NULL);
+ else if (context->type == GST_MATROSKA_TRACK_TYPE_VIDEO && riff_fourcc != 0) {
+ gchar *fstr = g_strdup_printf ("%" GST_FOURCC_FORMAT,
+ GST_FOURCC_ARGS (riff_fourcc));
+ gst_caps_set_simple (caps, "fourcc", G_TYPE_STRING, fstr, NULL);
+ g_free (fstr);
+ }
+ } else if (context->stream_headers != NULL) {
+ gst_matroska_demux_add_stream_headers_to_caps (demux,
+ context->stream_headers, caps);
+ }
+
+ if (context->encodings) {
+ GstMatroskaTrackEncoding *enc;
+ guint i;
+
+ for (i = 0; i < context->encodings->len; i++) {
+ enc = &g_array_index (context->encodings, GstMatroskaTrackEncoding, i);
+ if (enc->type == GST_MATROSKA_ENCODING_ENCRYPTION /* encryption */ ) {
+ GstStructure *s = gst_caps_get_structure (caps, 0);
+ if (!gst_structure_has_name (s, "application/x-webm-enc")) {
+ gst_structure_set (s, "original-media-type", G_TYPE_STRING,
+ gst_structure_get_name (s), NULL);
+ gst_structure_set (s, "encryption-algorithm", G_TYPE_STRING,
+ gst_matroska_track_encryption_algorithm_name (enc->enc_algo),
+ NULL);
+ gst_structure_set (s, "encoding-scope", G_TYPE_STRING,
+ gst_matroska_track_encoding_scope_name (enc->scope), NULL);
+ gst_structure_set (s, "cipher-mode", G_TYPE_STRING,
+ gst_matroska_track_encryption_cipher_mode_name
+ (enc->enc_cipher_mode), NULL);
+ gst_structure_set_name (s, "application/x-webm-enc");
+ }
+ }
+ }
+ }
+
+ context->caps = caps;
+
+ /* tadaah! */
+ *dest_context = context;
+ return ret;
+ }
+
+ static void
+ gst_matroska_demux_add_stream (GstMatroskaDemux * demux,
+ GstMatroskaTrackContext * context)
+ {
+ GstElementClass *klass = GST_ELEMENT_GET_CLASS (demux);
+ gchar *padname = NULL;
+ GstPadTemplate *templ = NULL;
+ GstStreamFlags stream_flags;
+
+ GstEvent *stream_start;
+
+ gchar *stream_id;
+
+ g_ptr_array_add (demux->common.src, context);
+ context->index = demux->common.num_streams++;
+ g_assert (demux->common.src->len == demux->common.num_streams);
+ g_ptr_array_index (demux->common.src, demux->common.num_streams - 1) =
+ context;
+
+ /* now create the GStreamer connectivity */
+ switch (context->type) {
+ case GST_MATROSKA_TRACK_TYPE_VIDEO:
+ padname = g_strdup_printf ("video_%u", demux->num_v_streams++);
+ templ = gst_element_class_get_pad_template (klass, "video_%u");
+
+ if (!context->intra_only)
+ demux->have_nonintraonly_v_streams = TRUE;
+ break;
+
+ case GST_MATROSKA_TRACK_TYPE_AUDIO:
+ padname = g_strdup_printf ("audio_%u", demux->num_a_streams++);
+ templ = gst_element_class_get_pad_template (klass, "audio_%u");
+ break;
+
+ case GST_MATROSKA_TRACK_TYPE_SUBTITLE:
+ padname = g_strdup_printf ("subtitle_%u", demux->num_t_streams++);
+ templ = gst_element_class_get_pad_template (klass, "subtitle_%u");
+ break;
+
+ default:
+ /* we should already have quit by now */
+ g_assert_not_reached ();
+ }
+
+ /* the pad in here */
+ context->pad = gst_pad_new_from_template (templ, padname);
+
+ gst_pad_set_event_function (context->pad,
+ GST_DEBUG_FUNCPTR (gst_matroska_demux_handle_src_event));
+ gst_pad_set_query_function (context->pad,
+ GST_DEBUG_FUNCPTR (gst_matroska_demux_handle_src_query));
+
+ GST_INFO_OBJECT (demux, "Adding pad '%s' with caps %" GST_PTR_FORMAT,
+ padname, context->caps);
+
+ gst_pad_set_element_private (context->pad, context);
+
+ gst_pad_use_fixed_caps (context->pad);
+ gst_pad_set_active (context->pad, TRUE);
+
+ stream_id =
+ gst_pad_create_stream_id_printf (context->pad, GST_ELEMENT_CAST (demux),
+ "%03" G_GUINT64_FORMAT ":%03" G_GUINT64_FORMAT,
+ context->num, context->uid);
+ stream_start =
+ gst_pad_get_sticky_event (demux->common.sinkpad, GST_EVENT_STREAM_START,
+ 0);
+ if (stream_start) {
+ if (gst_event_parse_group_id (stream_start, &demux->group_id))
+ demux->have_group_id = TRUE;
+ else
+ demux->have_group_id = FALSE;
+ gst_event_unref (stream_start);
+ } else if (!demux->have_group_id) {
+ demux->have_group_id = TRUE;
+ demux->group_id = gst_util_group_id_next ();
+ }
+
+ stream_start = gst_event_new_stream_start (stream_id);
+ g_free (stream_id);
+ if (demux->have_group_id)
+ gst_event_set_group_id (stream_start, demux->group_id);
+ stream_flags = GST_STREAM_FLAG_NONE;
+ if (context->type == GST_MATROSKA_TRACK_TYPE_SUBTITLE)
+ stream_flags |= GST_STREAM_FLAG_SPARSE;
+ if (context->flags & GST_MATROSKA_TRACK_DEFAULT)
+ stream_flags |= GST_STREAM_FLAG_SELECT;
+ else if (!(context->flags & GST_MATROSKA_TRACK_ENABLED))
+ stream_flags |= GST_STREAM_FLAG_UNSELECT;
+
+ gst_event_set_stream_flags (stream_start, stream_flags);
+ gst_pad_push_event (context->pad, stream_start);
+ gst_pad_set_caps (context->pad, context->caps);
+
+
+ if (demux->common.global_tags) {
+ GstEvent *tag_event;
+
+ gst_tag_list_add (demux->common.global_tags, GST_TAG_MERGE_REPLACE,
+ GST_TAG_CONTAINER_FORMAT, "Matroska", NULL);
+ GST_DEBUG_OBJECT (context->pad, "Sending global_tags %p: %" GST_PTR_FORMAT,
+ demux->common.global_tags, demux->common.global_tags);
+
+ tag_event =
+ gst_event_new_tag (gst_tag_list_copy (demux->common.global_tags));
+
+ gst_pad_push_event (context->pad, tag_event);
+ }
+
+ if (G_UNLIKELY (context->tags_changed)) {
+ GST_DEBUG_OBJECT (context->pad, "Sending tags %p: %"
+ GST_PTR_FORMAT, context->tags, context->tags);
+ gst_pad_push_event (context->pad,
+ gst_event_new_tag (gst_tag_list_copy (context->tags)));
+ context->tags_changed = FALSE;
+ }
+
+ gst_element_add_pad (GST_ELEMENT (demux), context->pad);
+ gst_flow_combiner_add_pad (demux->flowcombiner, context->pad);
+
+ g_free (padname);
+ }
+
+ static gboolean
+ gst_matroska_demux_query (GstMatroskaDemux * demux, GstPad * pad,
+ GstQuery * query)
+ {
+ gboolean res = FALSE;
+ GstMatroskaTrackContext *context = NULL;
+
+ if (pad) {
+ context = gst_pad_get_element_private (pad);
+ }
+
+ switch (GST_QUERY_TYPE (query)) {
+ case GST_QUERY_POSITION:
+ {
+ GstFormat format;
+
+ gst_query_parse_position (query, &format, NULL);
+
+ res = TRUE;
+ if (format == GST_FORMAT_TIME) {
+ GST_OBJECT_LOCK (demux);
+ if (context)
+ gst_query_set_position (query, GST_FORMAT_TIME,
+ MAX (context->pos, demux->stream_start_time) -
+ demux->stream_start_time);
+ else
+ gst_query_set_position (query, GST_FORMAT_TIME,
+ MAX (demux->common.segment.position, demux->stream_start_time) -
+ demux->stream_start_time);
+ GST_OBJECT_UNLOCK (demux);
+ } else if (format == GST_FORMAT_DEFAULT && context
+ && context->default_duration) {
+ GST_OBJECT_LOCK (demux);
+ gst_query_set_position (query, GST_FORMAT_DEFAULT,
+ context->pos / context->default_duration);
+ GST_OBJECT_UNLOCK (demux);
+ } else {
+ GST_DEBUG_OBJECT (demux,
+ "only position query in TIME and DEFAULT format is supported");
+ res = FALSE;
+ }
+
+ break;
+ }
+ case GST_QUERY_DURATION:
+ {
+ GstFormat format;
+
+ gst_query_parse_duration (query, &format, NULL);
+
+ res = TRUE;
+ if (format == GST_FORMAT_TIME) {
+ GST_OBJECT_LOCK (demux);
+ gst_query_set_duration (query, GST_FORMAT_TIME,
+ demux->common.segment.duration);
+ GST_OBJECT_UNLOCK (demux);
+ } else if (format == GST_FORMAT_DEFAULT && context
+ && context->default_duration) {
+ GST_OBJECT_LOCK (demux);
+ gst_query_set_duration (query, GST_FORMAT_DEFAULT,
+ demux->common.segment.duration / context->default_duration);
+ GST_OBJECT_UNLOCK (demux);
+ } else {
+ GST_DEBUG_OBJECT (demux,
+ "only duration query in TIME and DEFAULT format is supported");
+ res = FALSE;
+ }
+ break;
+ }
+
+ case GST_QUERY_SEEKING:
+ {
+ GstFormat fmt;
+
+ gst_query_parse_seeking (query, &fmt, NULL, NULL, NULL);
+ GST_OBJECT_LOCK (demux);
+ if (fmt == GST_FORMAT_TIME) {
+ gboolean seekable;
+
+ if (demux->streaming) {
+ /* assuming we'll be able to get an index ... */
+ seekable = demux->seekable;
+ } else {
+ seekable = TRUE;
+ }
+
+ gst_query_set_seeking (query, GST_FORMAT_TIME, seekable,
+ 0, demux->common.segment.duration);
+ res = TRUE;
+ }
+ GST_OBJECT_UNLOCK (demux);
+ break;
+ }
+ case GST_QUERY_SEGMENT:
+ {
+ GstFormat format;
+ gint64 start, stop;
+
+ format = demux->common.segment.format;
+
+ start =
+ gst_segment_to_stream_time (&demux->common.segment, format,
+ demux->common.segment.start);
+ if ((stop = demux->common.segment.stop) == -1)
+ stop = demux->common.segment.duration;
+ else
+ stop =
+ gst_segment_to_stream_time (&demux->common.segment, format, stop);
+
+ gst_query_set_segment (query, demux->common.segment.rate, format, start,
+ stop);
+ res = TRUE;
+ break;
+ }
+ default:
+ if (pad)
+ res = gst_pad_query_default (pad, (GstObject *) demux, query);
+ else
+ res =
+ GST_ELEMENT_CLASS (parent_class)->query (GST_ELEMENT_CAST (demux),
+ query);
+ break;
+ }
+
+ return res;
+ }
+
+ static gboolean
+ gst_matroska_demux_element_query (GstElement * element, GstQuery * query)
+ {
+ return gst_matroska_demux_query (GST_MATROSKA_DEMUX (element), NULL, query);
+ }
+
+ static gboolean
+ gst_matroska_demux_handle_src_query (GstPad * pad, GstObject * parent,
+ GstQuery * query)
+ {
+ GstMatroskaDemux *demux = GST_MATROSKA_DEMUX (parent);
+
+ return gst_matroska_demux_query (demux, pad, query);
+ }
+
+ /* returns FALSE if there are no pads to deliver event to,
+ * otherwise TRUE (whatever the outcome of event sending),
+ * takes ownership of the passed event! */
+ static gboolean
+ gst_matroska_demux_send_event (GstMatroskaDemux * demux, GstEvent * event)
+ {
+ gboolean ret = FALSE;
+ gint i;
+
+ g_return_val_if_fail (event != NULL, FALSE);
+
+ GST_DEBUG_OBJECT (demux, "Sending event of type %s to all source pads",
+ GST_EVENT_TYPE_NAME (event));
+
+ g_assert (demux->common.src->len == demux->common.num_streams);
+ for (i = 0; i < demux->common.src->len; i++) {
+ GstMatroskaTrackContext *stream;
+
+ stream = g_ptr_array_index (demux->common.src, i);
+ gst_event_ref (event);
+ gst_pad_push_event (stream->pad, event);
+ ret = TRUE;
+ }
+
+ gst_event_unref (event);
+ return ret;
+ }
+
+ static void
+ gst_matroska_demux_send_tags (GstMatroskaDemux * demux)
+ {
+ gint i;
+
+ if (G_UNLIKELY (demux->common.global_tags_changed)) {
+ GstEvent *tag_event;
+ gst_tag_list_add (demux->common.global_tags, GST_TAG_MERGE_REPLACE,
+ GST_TAG_CONTAINER_FORMAT, "Matroska", NULL);
+ GST_DEBUG_OBJECT (demux, "Sending global_tags %p : %" GST_PTR_FORMAT,
+ demux->common.global_tags, demux->common.global_tags);
+
+ tag_event =
+ gst_event_new_tag (gst_tag_list_copy (demux->common.global_tags));
+
+ for (i = 0; i < demux->common.src->len; i++) {
+ GstMatroskaTrackContext *stream;
+
+ stream = g_ptr_array_index (demux->common.src, i);
+ gst_pad_push_event (stream->pad, gst_event_ref (tag_event));
+ }
+
+ gst_event_unref (tag_event);
+ demux->common.global_tags_changed = FALSE;
+ }
+
+ g_assert (demux->common.src->len == demux->common.num_streams);
+ for (i = 0; i < demux->common.src->len; i++) {
+ GstMatroskaTrackContext *stream;
+
+ stream = g_ptr_array_index (demux->common.src, i);
+
+ if (G_UNLIKELY (stream->tags_changed)) {
+ GST_DEBUG_OBJECT (demux, "Sending tags %p for pad %s:%s : %"
+ GST_PTR_FORMAT, stream->tags,
+ GST_DEBUG_PAD_NAME (stream->pad), stream->tags);
+ gst_pad_push_event (stream->pad,
+ gst_event_new_tag (gst_tag_list_copy (stream->tags)));
+ stream->tags_changed = FALSE;
+ }
+ }
+ }
+
+ static gboolean
+ gst_matroska_demux_element_send_event (GstElement * element, GstEvent * event)
+ {
+ GstMatroskaDemux *demux = GST_MATROSKA_DEMUX (element);
+ gboolean res;
+
+ g_return_val_if_fail (event != NULL, FALSE);
+
+ if (GST_EVENT_TYPE (event) == GST_EVENT_SEEK) {
+ /* no seeking until we are (safely) ready */
+ if (demux->common.state != GST_MATROSKA_READ_STATE_DATA) {
+ GST_DEBUG_OBJECT (demux,
+ "not ready for seeking yet, deferring seek: %" GST_PTR_FORMAT, event);
+ if (demux->deferred_seek_event)
+ gst_event_unref (demux->deferred_seek_event);
+ demux->deferred_seek_event = event;
+ demux->deferred_seek_pad = NULL;
+ return TRUE;
+ }
+ if (!demux->streaming)
+ res = gst_matroska_demux_handle_seek_event (demux, NULL, event);
+ else
+ res = gst_matroska_demux_handle_seek_push (demux, NULL, event);
+ } else {
+ GST_WARNING_OBJECT (demux, "Unhandled event of type %s",
+ GST_EVENT_TYPE_NAME (event));
+ res = FALSE;
+ }
+ gst_event_unref (event);
+ return res;
+ }
+
+ static gboolean
+ gst_matroska_demux_move_to_entry (GstMatroskaDemux * demux,
+ GstMatroskaIndex * entry, gboolean reset, gboolean update)
+ {
+ gint i;
+
+ GST_OBJECT_LOCK (demux);
+
+ if (update) {
+ /* seek (relative to matroska segment) */
+ /* position might be invalid; will error when streaming resumes ... */
+ demux->common.offset = entry->pos + demux->common.ebml_segment_start;
+ demux->next_cluster_offset = 0;
+
+ GST_DEBUG_OBJECT (demux,
+ "Seeked to offset %" G_GUINT64_FORMAT ", block %d, " "time %"
+ GST_TIME_FORMAT, entry->pos + demux->common.ebml_segment_start,
+ entry->block, GST_TIME_ARGS (entry->time));
+
+ /* update the time */
+ gst_matroska_read_common_reset_streams (&demux->common, entry->time, TRUE);
+ gst_flow_combiner_reset (demux->flowcombiner);
+ demux->common.segment.position = entry->time;
+ demux->seek_block = entry->block;
+ demux->seek_first = TRUE;
+ demux->last_stop_end = GST_CLOCK_TIME_NONE;
+ }
+
+ for (i = 0; i < demux->common.src->len; i++) {
+ GstMatroskaTrackContext *stream = g_ptr_array_index (demux->common.src, i);
+
+ if (reset) {
+ stream->to_offset = G_MAXINT64;
+ } else {
+ if (stream->from_offset != -1)
+ stream->to_offset = stream->from_offset;
+ }
+ stream->from_offset = -1;
+ stream->from_time = GST_CLOCK_TIME_NONE;
+ }
+
+ GST_OBJECT_UNLOCK (demux);
+
+ return TRUE;
+ }
+
+ static gint
+ gst_matroska_cluster_compare (gint64 * i1, gint64 * i2)
+ {
+ if (*i1 < *i2)
+ return -1;
+ else if (*i1 > *i2)
+ return 1;
+ else
+ return 0;
+ }
+
+ /* searches for a cluster start from @pos,
+ * return GST_FLOW_OK and cluster position in @pos if found */
+ static GstFlowReturn
+ gst_matroska_demux_search_cluster (GstMatroskaDemux * demux, gint64 * pos,
+ gboolean forward)
+ {
+ gint64 newpos = *pos;
+ gint64 orig_offset;
+ GstFlowReturn ret = GST_FLOW_OK;
+ const guint chunk = 128 * 1024;
+ GstBuffer *buf = NULL;
+ GstMapInfo map;
+ gpointer data = NULL;
+ gsize size;
+ guint64 length;
+ guint32 id;
+ guint needed;
+ gint64 oldpos, oldlength;
+
+ orig_offset = demux->common.offset;
+
+ GST_LOG_OBJECT (demux, "searching cluster %s offset %" G_GINT64_FORMAT,
+ forward ? "following" : "preceding", *pos);
+
+ if (demux->clusters) {
+ gint64 *cpos;
+
+ cpos = gst_util_array_binary_search (demux->clusters->data,
+ demux->clusters->len, sizeof (gint64),
+ (GCompareDataFunc) gst_matroska_cluster_compare,
+ forward ? GST_SEARCH_MODE_AFTER : GST_SEARCH_MODE_BEFORE, pos, NULL);
+ /* sanity check */
+ if (cpos) {
+ GST_DEBUG_OBJECT (demux,
+ "cluster reported at offset %" G_GINT64_FORMAT, *cpos);
+ demux->common.offset = *cpos;
+ ret = gst_matroska_read_common_peek_id_length_pull (&demux->common,
+ GST_ELEMENT_CAST (demux), &id, &length, &needed);
+ if (ret == GST_FLOW_OK && id == GST_MATROSKA_ID_CLUSTER) {
+ newpos = *cpos;
+ goto exit;
+ }
+ }
+ }
+
+ /* read in at newpos and scan for ebml cluster id */
+ oldpos = oldlength = -1;
+ while (1) {
+ GstByteReader reader;
+ gint cluster_pos;
+ guint toread = chunk;
+
+ if (!forward) {
+ /* never read beyond the requested target */
+ if (G_UNLIKELY (newpos < chunk)) {
+ toread = newpos;
+ newpos = 0;
+ } else {
+ newpos -= chunk;
+ }
+ }
+ if (buf != NULL) {
+ gst_buffer_unmap (buf, &map);
+ gst_buffer_unref (buf);
+ buf = NULL;
+ }
+ ret = gst_pad_pull_range (demux->common.sinkpad, newpos, toread, &buf);
+ if (ret != GST_FLOW_OK)
+ break;
+ GST_DEBUG_OBJECT (demux,
+ "read buffer size %" G_GSIZE_FORMAT " at offset %" G_GINT64_FORMAT,
+ gst_buffer_get_size (buf), newpos);
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+ data = map.data;
+ size = map.size;
+ if (oldpos == newpos && oldlength == map.size) {
+ GST_ERROR_OBJECT (demux, "Stuck at same position");
+ ret = GST_FLOW_ERROR;
+ goto exit;
+ } else {
+ oldpos = newpos;
+ oldlength = map.size;
+ }
+
+ gst_byte_reader_init (&reader, data, size);
+ cluster_pos = -1;
+ while (1) {
+ gint found = gst_byte_reader_masked_scan_uint32 (&reader, 0xffffffff,
+ GST_MATROSKA_ID_CLUSTER, 0, gst_byte_reader_get_remaining (&reader));
+ if (forward) {
+ cluster_pos = found;
+ break;
+ }
+ /* need last occurrence when searching backwards */
+ if (found >= 0) {
+ cluster_pos = gst_byte_reader_get_pos (&reader) + found;
+ gst_byte_reader_skip (&reader, found + 4);
+ } else {
+ break;
+ }
+ }
+
+ if (cluster_pos >= 0) {
+ newpos += cluster_pos;
+ GST_DEBUG_OBJECT (demux,
+ "found cluster ebml id at offset %" G_GINT64_FORMAT, newpos);
+ /* extra checks whether we really sync'ed to a cluster:
+ * - either it is the first and only cluster
+ * - either there is a cluster after this one
+ * - either cluster length is undefined
+ */
+ /* ok if first cluster (there may not a subsequent one) */
+ if (newpos == demux->first_cluster_offset) {
+ GST_DEBUG_OBJECT (demux, "cluster is first cluster -> OK");
+ break;
+ }
+ demux->common.offset = newpos;
+ ret = gst_matroska_read_common_peek_id_length_pull (&demux->common,
+ GST_ELEMENT_CAST (demux), &id, &length, &needed);
+ if (ret != GST_FLOW_OK) {
+ GST_DEBUG_OBJECT (demux, "need more data -> continue");
+ goto next;
+ }
+ g_assert (id == GST_MATROSKA_ID_CLUSTER);
+ GST_DEBUG_OBJECT (demux, "cluster size %" G_GUINT64_FORMAT ", prefix %d",
+ length, needed);
+ /* ok if undefined length or first cluster */
+ if (length == GST_EBML_SIZE_UNKNOWN || length == G_MAXUINT64) {
+ GST_DEBUG_OBJECT (demux, "cluster has undefined length -> OK");
+ break;
+ }
+ /* skip cluster */
+ demux->common.offset += length + needed;
+ ret = gst_matroska_read_common_peek_id_length_pull (&demux->common,
+ GST_ELEMENT_CAST (demux), &id, &length, &needed);
+ if (ret != GST_FLOW_OK)
+ goto next;
+ GST_DEBUG_OBJECT (demux, "next element is %scluster",
+ id == GST_MATROSKA_ID_CLUSTER ? "" : "not ");
+ if (id == GST_MATROSKA_ID_CLUSTER)
+ break;
+ next:
+ if (forward)
+ newpos += 1;
+ } else {
+ /* partial cluster id may have been in tail of buffer */
+ newpos +=
+ forward ? MAX (gst_byte_reader_get_remaining (&reader), 4) - 3 : 3;
+ }
+ }
+
+ if (buf) {
+ gst_buffer_unmap (buf, &map);
+ gst_buffer_unref (buf);
+ buf = NULL;
+ }
+
+ exit:
+ demux->common.offset = orig_offset;
+ *pos = newpos;
+ return ret;
+ }
+
+ /* Three states to express: starts with I-frame, starts with delta, don't know */
+ typedef enum
+ {
+ CLUSTER_STATUS_NONE = 0,
+ CLUSTER_STATUS_STARTS_WITH_KEYFRAME,
+ CLUSTER_STATUS_STARTS_WITH_DELTAUNIT,
+ } ClusterStatus;
+
+ typedef struct
+ {
+ guint64 offset;
+ guint64 size;
+ guint64 prev_size;
+ GstClockTime time;
+ ClusterStatus status;
+ } ClusterInfo;
+
+ static const gchar *
+ cluster_status_get_nick (ClusterStatus status)
+ {
+ switch (status) {
+ case CLUSTER_STATUS_NONE:
+ return "none";
+ case CLUSTER_STATUS_STARTS_WITH_KEYFRAME:
+ return "key";
+ case CLUSTER_STATUS_STARTS_WITH_DELTAUNIT:
+ return "delta";
+ }
+ return "???";
+ }
+
+ /* Skip ebml-coded number:
+ * 1xxx.. = 1 byte
+ * 01xx.. = 2 bytes
+ * 001x.. = 3 bytes, etc.
+ */
+ static gboolean
+ bit_reader_skip_ebml_num (GstBitReader * br)
+ {
+ guint8 i, v = 0;
+
+ if (!gst_bit_reader_peek_bits_uint8 (br, &v, 8))
+ return FALSE;
+
+ for (i = 0; i < 8; i++) {
+ if ((v & (0x80 >> i)) != 0)
+ break;
+ }
+ return gst_bit_reader_skip (br, (i + 1) * 8);
+ }
+
+ /* Don't probe more than that many bytes into the cluster for keyframe info
+ * (random value, mostly for sanity checking) */
+ #define MAX_CLUSTER_INFO_PROBE_LENGTH 256
+
+ static gboolean
+ gst_matroska_demux_peek_cluster_info (GstMatroskaDemux * demux,
+ ClusterInfo * cluster, guint64 offset)
+ {
+ demux->common.offset = offset;
+ demux->cluster_time = GST_CLOCK_TIME_NONE;
+
+ cluster->offset = offset;
+ cluster->size = 0;
+ cluster->prev_size = 0;
+ cluster->time = GST_CLOCK_TIME_NONE;
+ cluster->status = CLUSTER_STATUS_NONE;
+
+ /* parse first few elements in cluster */
+ do {
+ GstFlowReturn flow;
+ guint64 length;
+ guint32 id;
+ guint needed;
+
+ flow = gst_matroska_read_common_peek_id_length_pull (&demux->common,
+ GST_ELEMENT_CAST (demux), &id, &length, &needed);
+
+ if (flow != GST_FLOW_OK)
+ break;
+
+ GST_LOG_OBJECT (demux, "Offset %" G_GUINT64_FORMAT ", Element id 0x%x, "
+ "size %" G_GUINT64_FORMAT ", needed %d", demux->common.offset, id,
+ length, needed);
+
+ /* Reached start of next cluster without finding data, stop processing */
+ if (id == GST_MATROSKA_ID_CLUSTER && cluster->offset != offset)
+ break;
+
+ /* Not going to parse into these for now, stop processing */
+ if (id == GST_MATROSKA_ID_ENCRYPTEDBLOCK
+ || id == GST_MATROSKA_ID_BLOCKGROUP || id == GST_MATROSKA_ID_BLOCK)
+ break;
+
+ /* SimpleBlock: peek at headers to check if it's a keyframe */
+ if (id == GST_MATROSKA_ID_SIMPLEBLOCK) {
+ GstBitReader br;
+ guint8 *d, hdr_len, v = 0;
+
+ GST_DEBUG_OBJECT (demux, "SimpleBlock found");
+
+ /* SimpleBlock header is max. 21 bytes */
+ hdr_len = MIN (21, length);
+
+ flow = gst_matroska_read_common_peek_bytes (&demux->common,
+ demux->common.offset, hdr_len, NULL, &d);
+
+ if (flow != GST_FLOW_OK)
+ break;
+
+ gst_bit_reader_init (&br, d, hdr_len);
+
+ /* skip prefix: ebml id (SimpleBlock) + element length */
+ if (!gst_bit_reader_skip (&br, 8 * needed))
+ break;
+
+ /* skip track number (ebml coded) */
+ if (!bit_reader_skip_ebml_num (&br))
+ break;
+
+ /* skip Timecode */
+ if (!gst_bit_reader_skip (&br, 16))
+ break;
+
+ /* read flags */
+ if (!gst_bit_reader_get_bits_uint8 (&br, &v, 8))
+ break;
+
+ if ((v & 0x80) != 0)
+ cluster->status = CLUSTER_STATUS_STARTS_WITH_KEYFRAME;
+ else
+ cluster->status = CLUSTER_STATUS_STARTS_WITH_DELTAUNIT;
+
+ break;
+ }
+
+ flow = gst_matroska_demux_parse_id (demux, id, length, needed);
+
+ if (flow != GST_FLOW_OK)
+ break;
+
+ switch (id) {
+ case GST_MATROSKA_ID_CLUSTER:
+ if (length == G_MAXUINT64)
+ cluster->size = 0;
+ else
+ cluster->size = length + needed;
+ break;
+ case GST_MATROSKA_ID_PREVSIZE:
+ cluster->prev_size = demux->cluster_prevsize;
+ break;
+ case GST_MATROSKA_ID_CLUSTERTIMECODE:
+ cluster->time = demux->cluster_time * demux->common.time_scale;
+ break;
+ case GST_MATROSKA_ID_SILENTTRACKS:
+ case GST_EBML_ID_CRC32:
+ /* ignore and continue */
+ break;
+ default:
+ GST_WARNING_OBJECT (demux, "Unknown ebml id 0x%08x (possibly garbage), "
+ "bailing out", id);
+ goto out;
+ }
+ } while (demux->common.offset - offset < MAX_CLUSTER_INFO_PROBE_LENGTH);
+
+ out:
+
+ GST_INFO_OBJECT (demux, "Cluster @ %" G_GUINT64_FORMAT ": "
+ "time %" GST_TIME_FORMAT ", size %" G_GUINT64_FORMAT ", "
+ "prev_size %" G_GUINT64_FORMAT ", %s", cluster->offset,
+ GST_TIME_ARGS (cluster->time), cluster->size, cluster->prev_size,
+ cluster_status_get_nick (cluster->status));
+
+ /* return success as long as we could extract the minimum useful information */
+ return cluster->time != GST_CLOCK_TIME_NONE;
+ }
+
+ /* returns TRUE if the cluster offset was updated */
+ static gboolean
+ gst_matroska_demux_scan_back_for_keyframe_cluster (GstMatroskaDemux * demux,
+ gint64 * cluster_offset, GstClockTime * cluster_time)
+ {
+ GstClockTime stream_start_time = demux->stream_start_time;
+ guint64 first_cluster_offset = demux->first_cluster_offset;
+ gint64 off = *cluster_offset;
+ ClusterInfo cluster = { 0, };
+
+ GST_INFO_OBJECT (demux, "Checking if cluster starts with keyframe");
+ while (off > first_cluster_offset) {
+ if (!gst_matroska_demux_peek_cluster_info (demux, &cluster, off)) {
+ GST_LOG_OBJECT (demux,
+ "Couldn't get info on cluster @ %" G_GUINT64_FORMAT, off);
+ break;
+ }
+
+ /* Keyframe? Then we're done */
+ if (cluster.status == CLUSTER_STATUS_STARTS_WITH_KEYFRAME) {
+ GST_LOG_OBJECT (demux,
+ "Found keyframe at start of cluster @ %" G_GUINT64_FORMAT, off);
+ break;
+ }
+
+ /* We only scan back if we *know* we landed on a cluster that
+ * starts with a delta frame. */
+ if (cluster.status != CLUSTER_STATUS_STARTS_WITH_DELTAUNIT) {
+ GST_LOG_OBJECT (demux,
+ "No delta frame at start of cluster @ %" G_GUINT64_FORMAT, off);
+ break;
+ }
+
+ GST_DEBUG_OBJECT (demux, "Cluster starts with delta frame, backtracking");
+
+ /* Don't scan back more than this much in time from the cluster we
+ * originally landed on. This is mostly a sanity check in case a file
+ * always has keyframes in the middle of clusters and never at the
+ * beginning. Without this we would always scan back to the beginning
+ * of the file in that case. */
+ if (cluster.time != GST_CLOCK_TIME_NONE) {
+ GstClockTimeDiff distance = GST_CLOCK_DIFF (cluster.time, *cluster_time);
+
+ if (distance < 0 || distance > demux->max_backtrack_distance * GST_SECOND) {
+ GST_DEBUG_OBJECT (demux, "Haven't found cluster with keyframe within "
+ "%u secs of original seek target cluster, stopping",
+ demux->max_backtrack_distance);
+ break;
+ }
+ }
+
+ /* If we have cluster prev_size we can skip back efficiently. If not,
+ * we'll just do a brute force search for a cluster identifier */
+ if (cluster.prev_size > 0 && off >= cluster.prev_size) {
+ off -= cluster.prev_size;
+ } else {
+ GstFlowReturn flow;
+
+ GST_LOG_OBJECT (demux, "Cluster has no or invalid prev size, searching "
+ "for previous cluster instead then");
+
+ flow = gst_matroska_demux_search_cluster (demux, &off, FALSE);
+ if (flow != GST_FLOW_OK) {
+ GST_DEBUG_OBJECT (demux, "cluster search yielded flow %s, stopping",
+ gst_flow_get_name (flow));
+ break;
+ }
+ }
+
+ if (off <= first_cluster_offset) {
+ GST_LOG_OBJECT (demux, "Reached first cluster, stopping");
+ *cluster_offset = first_cluster_offset;
+ *cluster_time = stream_start_time;
+ return TRUE;
+ }
+ GST_LOG_OBJECT (demux, "Trying prev cluster @ %" G_GUINT64_FORMAT, off);
+ }
+
+ /* If we found a cluster starting with a keyframe jump to that instead,
+ * otherwise leave everything as it was before */
+ if (cluster.time != GST_CLOCK_TIME_NONE
+ && (cluster.offset == first_cluster_offset
+ || cluster.status == CLUSTER_STATUS_STARTS_WITH_KEYFRAME)) {
+ *cluster_offset = cluster.offset;
+ *cluster_time = cluster.time;
+ return TRUE;
+ }
+
+ return FALSE;
+ }
+
+ /* bisect and scan through file for cluster starting before @time,
+ * returns fake index entry with corresponding info on cluster */
+ static GstMatroskaIndex *
+ gst_matroska_demux_search_pos (GstMatroskaDemux * demux, GstClockTime time)
+ {
+ GstMatroskaIndex *entry = NULL;
+ GstMatroskaReadState current_state;
+ GstClockTime otime, prev_cluster_time, current_cluster_time, cluster_time;
+ GstClockTime atime;
+ gint64 opos, newpos, current_offset;
+ gint64 prev_cluster_offset = -1, current_cluster_offset, cluster_offset;
+ gint64 apos, maxpos;
+ guint64 cluster_size = 0;
+ GstFlowReturn ret;
+ guint64 length;
+ guint32 id;
+ guint needed;
+
+ /* estimate new position, resync using cluster ebml id,
+ * and bisect further or scan forward to appropriate cluster */
+
+ /* save some current global state which will be touched by our scanning */
+ current_state = demux->common.state;
+ g_return_val_if_fail (current_state == GST_MATROSKA_READ_STATE_DATA, NULL);
+
+ current_cluster_offset = demux->cluster_offset;
+ current_cluster_time = demux->cluster_time;
+ current_offset = demux->common.offset;
+
+ demux->common.state = GST_MATROSKA_READ_STATE_SCANNING;
+
+ /* estimate using start and last known cluster */
+ GST_OBJECT_LOCK (demux);
+ apos = demux->first_cluster_offset;
+ atime = demux->stream_start_time;
+ opos = demux->last_cluster_offset;
+ otime = demux->stream_last_time;
+ GST_OBJECT_UNLOCK (demux);
+
+ /* sanitize */
+ time = MAX (time, atime);
+ otime = MAX (otime, atime);
+ opos = MAX (opos, apos);
+
+ maxpos = gst_matroska_read_common_get_length (&demux->common);
+
+ /* invariants;
+ * apos <= opos
+ * atime <= otime
+ * apos always refer to a cluster before target time;
+ * opos may or may not be after target time, but if it is once so,
+ * then also in next iteration
+ * */
+
+ retry:
+ GST_LOG_OBJECT (demux,
+ "apos: %" G_GUINT64_FORMAT ", atime: %" GST_TIME_FORMAT ", %"
+ GST_TIME_FORMAT " in stream time, "
+ "opos: %" G_GUINT64_FORMAT ", otime: %" GST_TIME_FORMAT ", %"
+ GST_TIME_FORMAT " in stream time (start %" GST_TIME_FORMAT "), time %"
+ GST_TIME_FORMAT, apos, GST_TIME_ARGS (atime),
+ GST_TIME_ARGS (atime - demux->stream_start_time), opos,
+ GST_TIME_ARGS (otime), GST_TIME_ARGS (otime - demux->stream_start_time),
+ GST_TIME_ARGS (demux->stream_start_time), GST_TIME_ARGS (time));
+
+ g_assert (atime <= otime);
+ g_assert (apos <= opos);
+ if (time == GST_CLOCK_TIME_NONE) {
+ GST_DEBUG_OBJECT (demux, "searching last cluster");
+ newpos = maxpos;
+ if (newpos == -1) {
+ GST_DEBUG_OBJECT (demux, "unknown file size; bailing out");
+ goto exit;
+ }
+ } else if (otime <= atime) {
+ newpos = apos;
+ } else {
+ newpos = apos +
+ gst_util_uint64_scale (opos - apos, time - atime, otime - atime);
+ if (maxpos != -1 && newpos > maxpos)
+ newpos = maxpos;
+ }
+
+ GST_DEBUG_OBJECT (demux,
+ "estimated offset for %" GST_TIME_FORMAT ": %" G_GINT64_FORMAT,
+ GST_TIME_ARGS (time), newpos);
+
+ /* search backwards */
+ if (newpos > apos) {
+ ret = gst_matroska_demux_search_cluster (demux, &newpos, FALSE);
+ if (ret != GST_FLOW_OK)
+ goto exit;
+ }
+
+ /* then start scanning and parsing for cluster time,
+ * re-estimate if possible, otherwise next cluster and so on */
+ /* note that each re-estimate is entered with a change in apos or opos,
+ * avoiding infinite loop */
+ demux->common.offset = newpos;
+ demux->cluster_time = cluster_time = GST_CLOCK_TIME_NONE;
+ cluster_size = 0;
+ prev_cluster_time = GST_CLOCK_TIME_NONE;
+ while (1) {
+ /* peek and parse some elements */
+ ret = gst_matroska_read_common_peek_id_length_pull (&demux->common,
+ GST_ELEMENT_CAST (demux), &id, &length, &needed);
+ if (ret != GST_FLOW_OK)
+ goto error;
+ GST_LOG_OBJECT (demux, "Offset %" G_GUINT64_FORMAT ", Element id 0x%x, "
+ "size %" G_GUINT64_FORMAT ", needed %d", demux->common.offset, id,
+ length, needed);
+ ret = gst_matroska_demux_parse_id (demux, id, length, needed);
+ if (ret != GST_FLOW_OK)
+ goto error;
+
+ if (id == GST_MATROSKA_ID_CLUSTER) {
+ cluster_time = GST_CLOCK_TIME_NONE;
+ if (length == G_MAXUINT64)
+ cluster_size = 0;
+ else
+ cluster_size = length + needed;
+ }
+ if (demux->cluster_time != GST_CLOCK_TIME_NONE &&
+ cluster_time == GST_CLOCK_TIME_NONE) {
+ cluster_time = demux->cluster_time * demux->common.time_scale;
+ cluster_offset = demux->cluster_offset;
+ GST_DEBUG_OBJECT (demux, "found cluster at offset %" G_GINT64_FORMAT
+ " with time %" GST_TIME_FORMAT, cluster_offset,
+ GST_TIME_ARGS (cluster_time));
+ if (time == GST_CLOCK_TIME_NONE) {
+ GST_DEBUG_OBJECT (demux, "found last cluster");
+ prev_cluster_time = cluster_time;
+ prev_cluster_offset = cluster_offset;
+ break;
+ }
+ if (cluster_time > time) {
+ GST_DEBUG_OBJECT (demux, "overshot target");
+ /* cluster overshoots */
+ if (cluster_offset == demux->first_cluster_offset) {
+ /* but no prev one */
+ GST_DEBUG_OBJECT (demux, "but using first cluster anyway");
+ prev_cluster_time = cluster_time;
+ prev_cluster_offset = cluster_offset;
+ break;
+ }
+ if (prev_cluster_time != GST_CLOCK_TIME_NONE) {
+ /* prev cluster did not overshoot, so prev cluster is target */
+ break;
+ } else {
+ /* re-estimate using this new position info */
+ opos = cluster_offset;
+ otime = cluster_time;
+ goto retry;
+ }
+ } else {
+ /* cluster undershoots */
+ GST_DEBUG_OBJECT (demux, "undershot target");
+ /* ok if close enough */
+ if (GST_CLOCK_DIFF (cluster_time, time) < 5 * GST_SECOND) {
+ GST_DEBUG_OBJECT (demux, "target close enough");
+ prev_cluster_time = cluster_time;
+ prev_cluster_offset = cluster_offset;
+ break;
+ }
+ if (otime > time) {
+ /* we are in between atime and otime => can bisect if worthwhile */
+ if (prev_cluster_time != GST_CLOCK_TIME_NONE &&
+ cluster_time > prev_cluster_time &&
+ (GST_CLOCK_DIFF (prev_cluster_time, cluster_time) * 10 <
+ GST_CLOCK_DIFF (cluster_time, time))) {
+ /* we moved at least one cluster forward,
+ * and it looks like target is still far away,
+ * let's estimate again */
+ GST_DEBUG_OBJECT (demux, "bisecting with new apos");
+ apos = cluster_offset;
+ atime = cluster_time;
+ goto retry;
+ }
+ }
+ /* cluster undershoots, goto next one */
+ prev_cluster_time = cluster_time;
+ prev_cluster_offset = cluster_offset;
+ /* skip cluster if length is defined,
+ * otherwise will be skippingly parsed into */
+ if (cluster_size) {
+ GST_DEBUG_OBJECT (demux, "skipping to next cluster");
+ demux->common.offset = cluster_offset + cluster_size;
+ demux->cluster_time = GST_CLOCK_TIME_NONE;
+ } else {
+ GST_DEBUG_OBJECT (demux, "parsing/skipping cluster elements");
+ }
+ }
+ }
+ continue;
+
+ error:
+ if (ret == GST_FLOW_EOS) {
+ if (prev_cluster_time != GST_CLOCK_TIME_NONE)
+ break;
+ }
+ goto exit;
+ }
+
+ /* In the bisect loop above we always undershoot and then jump forward
+ * cluster-by-cluster until we overshoot, so if we get here we've gone
+ * over and the previous cluster is where we need to go to. */
+ cluster_offset = prev_cluster_offset;
+ cluster_time = prev_cluster_time;
+
+ /* If we have video and can easily backtrack, check if we landed on a cluster
+ * that starts with a keyframe - and if not backtrack until we find one that
+ * does. */
+ if (demux->have_nonintraonly_v_streams && demux->max_backtrack_distance > 0) {
+ if (gst_matroska_demux_scan_back_for_keyframe_cluster (demux,
+ &cluster_offset, &cluster_time)) {
+ GST_INFO_OBJECT (demux, "Adjusted cluster to %" GST_TIME_FORMAT " @ "
+ "%" G_GUINT64_FORMAT, GST_TIME_ARGS (cluster_time), cluster_offset);
+ }
+ }
+
+ entry = g_new0 (GstMatroskaIndex, 1);
+ entry->time = cluster_time;
+ entry->pos = cluster_offset - demux->common.ebml_segment_start;
+ GST_DEBUG_OBJECT (demux, "simulated index entry; time %" GST_TIME_FORMAT
+ ", pos %" G_GUINT64_FORMAT, GST_TIME_ARGS (entry->time), entry->pos);
+
+ exit:
+
+ /* restore some state */
+ demux->cluster_offset = current_cluster_offset;
+ demux->cluster_time = current_cluster_time;
+ demux->common.offset = current_offset;
+ demux->common.state = current_state;
+
+ return entry;
+ }
+
+ static gboolean
+ gst_matroska_demux_handle_seek_event (GstMatroskaDemux * demux,
+ GstPad * pad, GstEvent * event)
+ {
+ GstMatroskaIndex *entry = NULL;
+ GstMatroskaIndex scan_entry;
+ GstSeekFlags flags;
+ GstSeekType cur_type, stop_type;
+ GstFormat format;
+ gboolean flush, keyunit, instant_rate_change, before, after, accurate,
+ snap_next;
+ gdouble rate;
+ gint64 cur, stop;
+ GstMatroskaTrackContext *track = NULL;
+ GstSegment seeksegment = { 0, };
+ guint64 seekpos;
+ gboolean update = TRUE;
+ gboolean pad_locked = FALSE;
+ guint32 seqnum;
+ GstSearchMode snap_dir;
+
+ g_return_val_if_fail (event != NULL, FALSE);
+
+ if (pad)
+ track = gst_pad_get_element_private (pad);
+
+ GST_DEBUG_OBJECT (demux, "Have seek %" GST_PTR_FORMAT, event);
+
+ gst_event_parse_seek (event, &rate, &format, &flags, &cur_type, &cur,
+ &stop_type, &stop);
+ seqnum = gst_event_get_seqnum (event);
+
+ /* we can only seek on time */
+ if (format != GST_FORMAT_TIME) {
+ GST_DEBUG_OBJECT (demux, "Can only seek on TIME");
+ return FALSE;
+ }
+
+ GST_DEBUG_OBJECT (demux, "configuring seek");
+
+ flush = ! !(flags & GST_SEEK_FLAG_FLUSH);
+ keyunit = ! !(flags & GST_SEEK_FLAG_KEY_UNIT);
+ after = ! !(flags & GST_SEEK_FLAG_SNAP_AFTER);
+ before = ! !(flags & GST_SEEK_FLAG_SNAP_BEFORE);
+ accurate = ! !(flags & GST_SEEK_FLAG_ACCURATE);
+ instant_rate_change = ! !(flags & GST_SEEK_FLAG_INSTANT_RATE_CHANGE);
+
+ /* Directly send the instant-rate-change event here before taking the
+ * stream-lock so that it can be applied as soon as possible */
+ if (instant_rate_change) {
+ GstEvent *ev;
+
+ /* instant rate change only supported if direction does not change. All
+ * other requirements are already checked before creating the seek event
+ * but let's double-check here to be sure */
+ if ((rate > 0 && demux->common.segment.rate < 0) ||
+ (rate < 0 && demux->common.segment.rate > 0) ||
+ cur_type != GST_SEEK_TYPE_NONE ||
+ stop_type != GST_SEEK_TYPE_NONE || flush) {
+ GST_ERROR_OBJECT (demux,
+ "Instant rate change seeks only supported in the "
+ "same direction, without flushing and position change");
+ return FALSE;
+ }
+
+ ev = gst_event_new_instant_rate_change (rate /
+ demux->common.segment.rate, (GstSegmentFlags) flags);
+ gst_event_set_seqnum (ev, seqnum);
+ gst_matroska_demux_send_event (demux, ev);
+ return TRUE;
+ }
+
+ /* copy segment, we need this because we still need the old
+ * segment when we close the current segment. */
+ memcpy (&seeksegment, &demux->common.segment, sizeof (GstSegment));
+
+ /* pull mode without index means that the actual duration is not known,
+ * we might be playing a file that's still being recorded
+ * so, invalidate our current duration, which is only a moving target,
+ * and should not be used to clamp anything */
+ if (!demux->streaming && !demux->common.index && demux->invalid_duration) {
+ seeksegment.duration = GST_CLOCK_TIME_NONE;
+ }
+
+ /* Subtract stream_start_time so we always seek on a segment
+ * in stream time */
+ if (GST_CLOCK_TIME_IS_VALID (demux->stream_start_time)) {
+ seeksegment.start -= demux->stream_start_time;
+ seeksegment.position -= demux->stream_start_time;
+ if (GST_CLOCK_TIME_IS_VALID (seeksegment.stop))
+ seeksegment.stop -= demux->stream_start_time;
+ else
+ seeksegment.stop = seeksegment.duration;
+ }
+
+ if (!gst_segment_do_seek (&seeksegment, rate, format, flags,
+ cur_type, cur, stop_type, stop, &update)) {
+ GST_WARNING_OBJECT (demux, "gst_segment_do_seek() failed.");
+ return FALSE;
+ }
+
+ /* Restore the clip timestamp offset */
+ if (GST_CLOCK_TIME_IS_VALID (demux->stream_start_time)) {
+ seeksegment.position += demux->stream_start_time;
+ seeksegment.start += demux->stream_start_time;
+ if (!GST_CLOCK_TIME_IS_VALID (seeksegment.stop))
+ seeksegment.stop = seeksegment.duration;
+ if (GST_CLOCK_TIME_IS_VALID (seeksegment.stop))
+ seeksegment.stop += demux->stream_start_time;
+ }
+
+ /* restore segment duration (if any effect),
+ * would be determined again when parsing, but anyway ... */
+ seeksegment.duration = demux->common.segment.duration;
+
+ /* always do full update if flushing,
+ * otherwise problems might arise downstream with missing keyframes etc */
+ update = update || flush;
+
+ GST_DEBUG_OBJECT (demux, "New segment %" GST_SEGMENT_FORMAT, &seeksegment);
+
+ /* check sanity before we start flushing and all that */
+ snap_next = after && !before;
+ if (seeksegment.rate < 0)
+ snap_dir = snap_next ? GST_SEARCH_MODE_BEFORE : GST_SEARCH_MODE_AFTER;
+ else
+ snap_dir = snap_next ? GST_SEARCH_MODE_AFTER : GST_SEARCH_MODE_BEFORE;
+
+ GST_OBJECT_LOCK (demux);
+
+ seekpos = seeksegment.position;
+ if (accurate) {
+ seekpos -= MIN (seeksegment.position, demux->audio_lead_in_ts);
+ }
+
+ track = gst_matroska_read_common_get_seek_track (&demux->common, track);
+ if ((entry = gst_matroska_read_common_do_index_seek (&demux->common, track,
+ seekpos, &demux->seek_index, &demux->seek_entry,
+ snap_dir)) == NULL) {
+ /* pull mode without index can scan later on */
+ if (demux->streaming) {
+ GST_DEBUG_OBJECT (demux, "No matching seek entry in index");
+ GST_OBJECT_UNLOCK (demux);
+ return FALSE;
+ } else if (rate < 0.0) {
+ /* FIXME: We should build an index during playback or when scanning
+ * that can be used here. The reverse playback code requires seek_index
+ * and seek_entry to be set!
+ */
+ GST_DEBUG_OBJECT (demux,
+ "No matching seek entry in index, needed for reverse playback");
+ GST_OBJECT_UNLOCK (demux);
+ return FALSE;
+ }
+ }
+ GST_DEBUG_OBJECT (demux, "Seek position looks sane");
+ GST_OBJECT_UNLOCK (demux);
+
+ if (!update) {
+ /* only have to update some segment,
+ * but also still have to honour flush and so on */
+ GST_DEBUG_OBJECT (demux, "... no update");
+ /* bad goto, bad ... */
+ goto next;
+ }
+
+ if (demux->streaming)
+ goto finish;
+
+ next:
+ if (flush) {
+ GstEvent *flush_event = gst_event_new_flush_start ();
+ gst_event_set_seqnum (flush_event, seqnum);
+ GST_DEBUG_OBJECT (demux, "Starting flush");
+ gst_pad_push_event (demux->common.sinkpad, gst_event_ref (flush_event));
+ gst_matroska_demux_send_event (demux, flush_event);
+ } else {
+ GST_DEBUG_OBJECT (demux, "Non-flushing seek, pausing task");
+ gst_pad_pause_task (demux->common.sinkpad);
+ }
+ /* ouch */
+ if (!update) {
+ GST_PAD_STREAM_LOCK (demux->common.sinkpad);
+ pad_locked = TRUE;
+ goto exit;
+ }
+
+ /* now grab the stream lock so that streaming cannot continue, for
+ * non flushing seeks when the element is in PAUSED this could block
+ * forever. */
+ GST_DEBUG_OBJECT (demux, "Waiting for streaming to stop");
+ GST_PAD_STREAM_LOCK (demux->common.sinkpad);
+ pad_locked = TRUE;
+
+ /* pull mode without index can do some scanning */
+ if (!demux->streaming && !entry) {
+ GstEvent *flush_event;
+
+ /* need to stop flushing upstream as we need it next */
+ if (flush) {
+ flush_event = gst_event_new_flush_stop (TRUE);
+ gst_event_set_seqnum (flush_event, seqnum);
+ gst_pad_push_event (demux->common.sinkpad, flush_event);
+ }
+ entry = gst_matroska_demux_search_pos (demux, seekpos);
+ /* keep local copy */
+ if (entry) {
+ scan_entry = *entry;
+ g_free (entry);
+ entry = &scan_entry;
+ } else {
+ GST_DEBUG_OBJECT (demux, "Scan failed to find matching position");
+ if (flush) {
+ flush_event = gst_event_new_flush_stop (TRUE);
+ gst_event_set_seqnum (flush_event, seqnum);
+ gst_matroska_demux_send_event (demux, flush_event);
+ }
+ goto seek_error;
+ }
+ }
+
+ finish:
+ if (keyunit && seeksegment.rate > 0) {
+ GST_DEBUG_OBJECT (demux, "seek to key unit, adjusting segment start from %"
+ GST_TIME_FORMAT " to %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (seeksegment.start), GST_TIME_ARGS (entry->time));
+ seeksegment.start = MAX (entry->time, demux->stream_start_time);
+ seeksegment.position = seeksegment.start;
+ seeksegment.time = seeksegment.start - demux->stream_start_time;
+ } else if (keyunit) {
+ GST_DEBUG_OBJECT (demux, "seek to key unit, adjusting segment stop from %"
+ GST_TIME_FORMAT " to %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (seeksegment.stop), GST_TIME_ARGS (entry->time));
+ seeksegment.stop = MAX (entry->time, demux->stream_start_time);
+ seeksegment.position = seeksegment.stop;
+ }
+
+ if (demux->streaming) {
+ GST_OBJECT_LOCK (demux);
+ /* track real position we should start at */
+ GST_DEBUG_OBJECT (demux, "storing segment start");
+ demux->requested_seek_time = seeksegment.position;
+ demux->seek_offset = entry->pos + demux->common.ebml_segment_start;
+ GST_OBJECT_UNLOCK (demux);
+ /* need to seek to cluster start to pick up cluster time */
+ /* upstream takes care of flushing and all that
+ * ... and newsegment event handling takes care of the rest */
+ return perform_seek_to_offset (demux, rate,
+ entry->pos + demux->common.ebml_segment_start, seqnum, flags);
+ }
+
+ exit:
+ if (flush) {
+ GstEvent *flush_event = gst_event_new_flush_stop (TRUE);
+ gst_event_set_seqnum (flush_event, seqnum);
+ GST_DEBUG_OBJECT (demux, "Stopping flush");
+ gst_pad_push_event (demux->common.sinkpad, gst_event_ref (flush_event));
+ gst_matroska_demux_send_event (demux, flush_event);
+ }
+
+ GST_OBJECT_LOCK (demux);
+ /* now update the real segment info */
+ GST_DEBUG_OBJECT (demux, "Committing new seek segment");
+ memcpy (&demux->common.segment, &seeksegment, sizeof (GstSegment));
+ GST_OBJECT_UNLOCK (demux);
+
+ /* update some (segment) state */
+ if (!gst_matroska_demux_move_to_entry (demux, entry, TRUE, update))
+ goto seek_error;
+
+ /* notify start of new segment */
+ if (demux->common.segment.flags & GST_SEEK_FLAG_SEGMENT) {
+ GstMessage *msg;
+
+ msg = gst_message_new_segment_start (GST_OBJECT (demux),
+ GST_FORMAT_TIME, demux->common.segment.start);
+ gst_message_set_seqnum (msg, seqnum);
+ gst_element_post_message (GST_ELEMENT (demux), msg);
+ }
+
+ GST_OBJECT_LOCK (demux);
+ if (demux->new_segment)
+ gst_event_unref (demux->new_segment);
+
+ /* On port from 0.10, discarded !update (for segment.update) here, FIXME? */
+ demux->new_segment = gst_event_new_segment (&demux->common.segment);
+ gst_event_set_seqnum (demux->new_segment, seqnum);
+ if (demux->common.segment.rate < 0 && demux->common.segment.stop == -1)
+ demux->to_time = demux->common.segment.position;
+ else
+ demux->to_time = GST_CLOCK_TIME_NONE;
+ demux->segment_seqnum = seqnum;
+ GST_OBJECT_UNLOCK (demux);
+
+ /* restart our task since it might have been stopped when we did the
+ * flush. */
+ gst_pad_start_task (demux->common.sinkpad,
+ (GstTaskFunction) gst_matroska_demux_loop, demux->common.sinkpad, NULL);
+
+ /* streaming can continue now */
+ if (pad_locked) {
+ GST_PAD_STREAM_UNLOCK (demux->common.sinkpad);
+ }
+
+ return TRUE;
+
+ seek_error:
+ {
+ if (pad_locked) {
+ GST_PAD_STREAM_UNLOCK (demux->common.sinkpad);
+ }
+ GST_ELEMENT_ERROR (demux, STREAM, DEMUX, (NULL), ("Got a seek error"));
+ return FALSE;
+ }
+ }
+
+ /*
+ * Handle whether we can perform the seek event or if we have to let the chain
+ * function handle seeks to build the seek indexes first.
+ */
+ static gboolean
+ gst_matroska_demux_handle_seek_push (GstMatroskaDemux * demux, GstPad * pad,
+ GstEvent * event)
+ {
+ GstSeekFlags flags;
+ GstSeekType cur_type, stop_type;
+ GstFormat format;
+ gdouble rate;
+ gint64 cur, stop;
+
+ gst_event_parse_seek (event, &rate, &format, &flags, &cur_type, &cur,
+ &stop_type, &stop);
+
+ /* Directly send the instant-rate-change event here before taking the
+ * stream-lock so that it can be applied as soon as possible */
+ if (flags & GST_SEEK_FLAG_INSTANT_RATE_CHANGE) {
+ guint32 seqnum;
+ GstEvent *ev;
+
+ /* instant rate change only supported if direction does not change. All
+ * other requirements are already checked before creating the seek event
+ * but let's double-check here to be sure */
+ if ((rate > 0 && demux->common.segment.rate < 0) ||
+ (rate < 0 && demux->common.segment.rate > 0) ||
+ cur_type != GST_SEEK_TYPE_NONE ||
+ stop_type != GST_SEEK_TYPE_NONE || (flags & GST_SEEK_FLAG_FLUSH)) {
+ GST_ERROR_OBJECT (demux,
+ "Instant rate change seeks only supported in the "
+ "same direction, without flushing and position change");
+ return FALSE;
+ }
+
+ seqnum = gst_event_get_seqnum (event);
+ ev = gst_event_new_instant_rate_change (rate / demux->common.segment.rate,
+ (GstSegmentFlags) flags);
+ gst_event_set_seqnum (ev, seqnum);
+ gst_matroska_demux_send_event (demux, ev);
+ return TRUE;
+ }
+
+
+
+ /* sanity checks */
+
+ /* we can only seek on time */
+ if (format != GST_FORMAT_TIME) {
+ GST_DEBUG_OBJECT (demux, "Can only seek on TIME");
+ return FALSE;
+ }
+
+ if (stop_type != GST_SEEK_TYPE_NONE && stop != GST_CLOCK_TIME_NONE) {
+ GST_DEBUG_OBJECT (demux, "Seek end-time not supported in streaming mode");
+ return FALSE;
+ }
+
+ if (!(flags & GST_SEEK_FLAG_FLUSH)) {
+ GST_DEBUG_OBJECT (demux,
+ "Non-flushing seek not supported in streaming mode");
+ return FALSE;
+ }
+
+ if (flags & GST_SEEK_FLAG_SEGMENT) {
+ GST_DEBUG_OBJECT (demux, "Segment seek not supported in streaming mode");
+ return FALSE;
+ }
+
+ /* check for having parsed index already */
+ if (!demux->common.index_parsed) {
+ gboolean building_index;
+ guint64 offset = 0;
+
+ if (!demux->index_offset) {
+ GST_DEBUG_OBJECT (demux, "no index (location); no seek in push mode");
+ return FALSE;
+ }
+
+ GST_OBJECT_LOCK (demux);
+ /* handle the seek event in the chain function */
+ demux->common.state = GST_MATROSKA_READ_STATE_SEEK;
+ /* no more seek can be issued until state reset to _DATA */
+
+ /* copy the event */
+ if (demux->seek_event)
+ gst_event_unref (demux->seek_event);
+ demux->seek_event = gst_event_ref (event);
+
+ /* set the building_index flag so that only one thread can setup the
+ * structures for index seeking. */
+ building_index = demux->building_index;
+ if (!building_index) {
+ demux->building_index = TRUE;
+ offset = demux->index_offset;
+ }
+ GST_OBJECT_UNLOCK (demux);
+
+ if (!building_index) {
+ /* seek to the first subindex or legacy index */
+ GST_INFO_OBJECT (demux, "Seeking to Cues at %" G_GUINT64_FORMAT, offset);
+ return perform_seek_to_offset (demux, rate, offset,
+ gst_event_get_seqnum (event), GST_SEEK_FLAG_NONE);
+ }
+
+ /* well, we are handling it already */
+ return TRUE;
+ }
+
+ /* delegate to tweaked regular seek */
+ return gst_matroska_demux_handle_seek_event (demux, pad, event);
+ }
+
+ static gboolean
+ gst_matroska_demux_handle_src_event (GstPad * pad, GstObject * parent,
+ GstEvent * event)
+ {
+ GstMatroskaDemux *demux = GST_MATROSKA_DEMUX (parent);
+ gboolean res = TRUE;
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_SEEK:
+ /* no seeking until we are (safely) ready */
+ if (demux->common.state != GST_MATROSKA_READ_STATE_DATA) {
+ GST_DEBUG_OBJECT (demux,
+ "not ready for seeking yet, deferring seek event: %" GST_PTR_FORMAT,
+ event);
+ if (demux->deferred_seek_event)
+ gst_event_unref (demux->deferred_seek_event);
+ demux->deferred_seek_event = event;
+ demux->deferred_seek_pad = pad;
+ return TRUE;
+ }
+
+ {
+ guint32 seqnum = gst_event_get_seqnum (event);
+ if (seqnum == demux->segment_seqnum) {
+ GST_LOG_OBJECT (pad,
+ "Drop duplicated SEEK event seqnum %" G_GUINT32_FORMAT, seqnum);
+ gst_event_unref (event);
+ return TRUE;
+ }
+ }
+
+ if (!demux->streaming)
+ res = gst_matroska_demux_handle_seek_event (demux, pad, event);
+ else
+ res = gst_matroska_demux_handle_seek_push (demux, pad, event);
+ gst_event_unref (event);
+ break;
+
+ case GST_EVENT_QOS:
+ {
+ GstMatroskaTrackContext *context = gst_pad_get_element_private (pad);
+ if (context->type == GST_MATROSKA_TRACK_TYPE_VIDEO) {
+ GstMatroskaTrackVideoContext *videocontext =
+ (GstMatroskaTrackVideoContext *) context;
+ gdouble proportion;
+ GstClockTimeDiff diff;
+ GstClockTime timestamp;
+
+ gst_event_parse_qos (event, NULL, &proportion, &diff, ×tamp);
+
+ GST_OBJECT_LOCK (demux);
+ videocontext->earliest_time = timestamp + diff;
+ GST_OBJECT_UNLOCK (demux);
+ }
+ res = TRUE;
+ gst_event_unref (event);
+ break;
+ }
+
+ case GST_EVENT_TOC_SELECT:
+ {
+ char *uid = NULL;
+ GstTocEntry *entry = NULL;
+ GstEvent *seek_event;
+ gint64 start_pos;
+
+ if (!demux->common.toc) {
+ GST_DEBUG_OBJECT (demux, "no TOC to select");
+ return FALSE;
+ } else {
+ gst_event_parse_toc_select (event, &uid);
+ if (uid != NULL) {
+ GST_OBJECT_LOCK (demux);
+ entry = gst_toc_find_entry (demux->common.toc, uid);
+ if (entry == NULL) {
+ GST_OBJECT_UNLOCK (demux);
+ GST_WARNING_OBJECT (demux, "no TOC entry with given UID: %s", uid);
+ res = FALSE;
+ } else {
+ gst_toc_entry_get_start_stop_times (entry, &start_pos, NULL);
+ GST_OBJECT_UNLOCK (demux);
+ seek_event = gst_event_new_seek (1.0,
+ GST_FORMAT_TIME,
+ GST_SEEK_FLAG_FLUSH,
+ GST_SEEK_TYPE_SET, start_pos, GST_SEEK_TYPE_SET, -1);
+ res = gst_matroska_demux_handle_seek_event (demux, pad, seek_event);
+ gst_event_unref (seek_event);
+ }
+ g_free (uid);
+ } else {
+ GST_WARNING_OBJECT (demux, "received empty TOC select event");
+ res = FALSE;
+ }
+ }
+ gst_event_unref (event);
+ break;
+ }
+
+ /* events we don't need to handle */
+ case GST_EVENT_NAVIGATION:
+ gst_event_unref (event);
+ res = FALSE;
+ break;
+
+ case GST_EVENT_LATENCY:
+ default:
+ res = gst_pad_push_event (demux->common.sinkpad, event);
+ break;
+ }
+
+ return res;
+ }
+
+ static gboolean
+ gst_matroska_demux_handle_sink_query (GstPad * pad, GstObject * parent,
+ GstQuery * query)
+ {
+ GstMatroskaDemux *demux = GST_MATROSKA_DEMUX (parent);
+ gboolean res = FALSE;
+
+ switch (GST_QUERY_TYPE (query)) {
+ case GST_QUERY_BITRATE:
+ {
+ if (G_UNLIKELY (demux->cached_length == G_MAXUINT64 ||
+ demux->common.offset >= demux->cached_length)) {
+ demux->cached_length =
+ gst_matroska_read_common_get_length (&demux->common);
+ }
+
+ if (demux->cached_length < G_MAXUINT64
+ && demux->common.segment.duration > 0) {
+ /* TODO: better results based on ranges/index tables */
+ guint bitrate =
+ gst_util_uint64_scale (8 * demux->cached_length, GST_SECOND,
+ demux->common.segment.duration);
+
+ GST_LOG_OBJECT (demux, "bitrate query byte length: %" G_GUINT64_FORMAT
+ " duration %" GST_TIME_FORMAT " resulting in a bitrate of %u",
+ demux->cached_length,
+ GST_TIME_ARGS (demux->common.segment.duration), bitrate);
+
+ gst_query_set_bitrate (query, bitrate);
+ res = TRUE;
+ }
+ break;
+ }
+ default:
+ res = gst_pad_query_default (pad, (GstObject *) demux, query);
+ break;
+ }
+
+ return res;
+ }
+
+ static GstFlowReturn
+ gst_matroska_demux_seek_to_previous_keyframe (GstMatroskaDemux * demux)
+ {
+ GstFlowReturn ret = GST_FLOW_EOS;
+ gboolean done = TRUE;
+ gint i;
+
+ g_return_val_if_fail (demux->seek_index, GST_FLOW_EOS);
+ g_return_val_if_fail (demux->seek_entry < demux->seek_index->len,
+ GST_FLOW_EOS);
+
+ GST_DEBUG_OBJECT (demux, "locating previous keyframe");
+
+ if (!demux->seek_entry) {
+ GST_DEBUG_OBJECT (demux, "no earlier index entry");
+ goto exit;
+ }
+
+ for (i = 0; i < demux->common.src->len; i++) {
+ GstMatroskaTrackContext *stream = g_ptr_array_index (demux->common.src, i);
+
+ GST_DEBUG_OBJECT (demux, "segment start %" GST_TIME_FORMAT
+ ", stream %d at %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (demux->common.segment.start), stream->index,
+ GST_TIME_ARGS (stream->from_time));
+ if (GST_CLOCK_TIME_IS_VALID (stream->from_time)) {
+ if (stream->from_time > demux->common.segment.start) {
+ GST_DEBUG_OBJECT (demux, "stream %d not finished yet", stream->index);
+ done = FALSE;
+ }
+ } else {
+ /* nothing pushed for this stream;
+ * likely seek entry did not start at keyframe, so all was skipped.
+ * So we need an earlier entry */
+ done = FALSE;
+ }
+ }
+
+ if (!done) {
+ GstMatroskaIndex *entry;
+
+ entry = &g_array_index (demux->seek_index, GstMatroskaIndex,
+ --demux->seek_entry);
+ if (!gst_matroska_demux_move_to_entry (demux, entry, FALSE, TRUE))
+ goto exit;
+
+ ret = GST_FLOW_OK;
+ }
+
+ exit:
+ return ret;
+ }
+
+ static GstFlowReturn
+ gst_matroska_demux_parse_tracks (GstMatroskaDemux * demux, GstEbmlRead * ebml)
+ {
+ GstFlowReturn ret = GST_FLOW_OK;
+ guint32 id;
+
+ DEBUG_ELEMENT_START (demux, ebml, "Tracks");
+
+ if ((ret = gst_ebml_read_master (ebml, &id)) != GST_FLOW_OK) {
+ DEBUG_ELEMENT_STOP (demux, ebml, "Tracks", ret);
+ return ret;
+ }
+
+ while (ret == GST_FLOW_OK && gst_ebml_read_has_remaining (ebml, 1, TRUE)) {
+ if ((ret = gst_ebml_peek_id (ebml, &id)) != GST_FLOW_OK)
+ break;
+
+ switch (id) {
+ /* one track within the "all-tracks" header */
+ case GST_MATROSKA_ID_TRACKENTRY:{
+ GstMatroskaTrackContext *track;
+ ret = gst_matroska_demux_parse_stream (demux, ebml, &track);
+ if (track != NULL) {
+ if (gst_matroska_read_common_tracknumber_unique (&demux->common,
+ track->num)) {
+ gst_matroska_demux_add_stream (demux, track);
+ } else {
+ GST_ERROR_OBJECT (demux,
+ "TrackNumber %" G_GUINT64_FORMAT " is not unique", track->num);
+ ret = GST_FLOW_ERROR;
+ gst_matroska_track_free (track);
+ track = NULL;
+ }
+ }
+ break;
+ }
+
+ default:
+ ret = gst_matroska_read_common_parse_skip (&demux->common, ebml,
+ "Track", id);
+ break;
+ }
+ }
+ DEBUG_ELEMENT_STOP (demux, ebml, "Tracks", ret);
+
+ demux->tracks_parsed = TRUE;
+ GST_DEBUG_OBJECT (demux, "signaling no more pads");
+ gst_element_no_more_pads (GST_ELEMENT (demux));
+
+ return ret;
+ }
+
+ static GstFlowReturn
+ gst_matroska_demux_update_tracks (GstMatroskaDemux * demux, GstEbmlRead * ebml)
+ {
+ GstFlowReturn ret = GST_FLOW_OK;
+ guint num_tracks_found = 0;
+ guint32 id;
+
+ GST_INFO_OBJECT (demux, "Reparsing Tracks element");
+
+ DEBUG_ELEMENT_START (demux, ebml, "Tracks");
+
+ if ((ret = gst_ebml_read_master (ebml, &id)) != GST_FLOW_OK) {
+ DEBUG_ELEMENT_STOP (demux, ebml, "Tracks", ret);
+ return ret;
+ }
+
+ while (ret == GST_FLOW_OK && gst_ebml_read_has_remaining (ebml, 1, TRUE)) {
+ if ((ret = gst_ebml_peek_id (ebml, &id)) != GST_FLOW_OK)
+ break;
+
+ switch (id) {
+ /* one track within the "all-tracks" header */
+ case GST_MATROSKA_ID_TRACKENTRY:{
+ GstMatroskaTrackContext *new_track;
+ gint old_track_index;
+ GstMatroskaTrackContext *old_track;
+ ret = gst_matroska_demux_parse_stream (demux, ebml, &new_track);
+ if (new_track == NULL)
+ break;
+ num_tracks_found++;
+
+ if (gst_matroska_read_common_tracknumber_unique (&demux->common,
+ new_track->num)) {
+ GST_ERROR_OBJECT (demux,
+ "Unexpected new TrackNumber: %" G_GUINT64_FORMAT, new_track->num);
+ goto track_mismatch_error;
+ }
+
+ old_track_index =
+ gst_matroska_read_common_stream_from_num (&demux->common,
+ new_track->num);
+ g_assert (old_track_index != -1);
+ old_track = g_ptr_array_index (demux->common.src, old_track_index);
+
+ if (old_track->type != new_track->type) {
+ GST_ERROR_OBJECT (demux,
+ "Mismatch reparsing track %" G_GUINT64_FORMAT
+ " on track type. Expected %d, found %d", new_track->num,
+ old_track->type, new_track->type);
+ goto track_mismatch_error;
+ }
+
+ if (g_strcmp0 (old_track->codec_id, new_track->codec_id) != 0) {
+ GST_ERROR_OBJECT (demux,
+ "Mismatch reparsing track %" G_GUINT64_FORMAT
+ " on codec id. Expected '%s', found '%s'", new_track->num,
+ old_track->codec_id, new_track->codec_id);
+ goto track_mismatch_error;
+ }
+
+ /* The new track matches the old track. No problems on our side.
+ * Let's make it replace the old track. */
+ new_track->pad = old_track->pad;
+ new_track->index = old_track->index;
+ new_track->pos = old_track->pos;
+ g_ptr_array_index (demux->common.src, old_track_index) = new_track;
+ gst_pad_set_element_private (new_track->pad, new_track);
+
+ if (!gst_caps_is_equal (old_track->caps, new_track->caps)) {
+ gst_pad_set_caps (new_track->pad, new_track->caps);
+ }
+ gst_caps_replace (&old_track->caps, NULL);
+
+ if (!gst_tag_list_is_equal (old_track->tags, new_track->tags)) {
+ GST_DEBUG_OBJECT (old_track->pad, "Sending tags %p: %"
+ GST_PTR_FORMAT, new_track->tags, new_track->tags);
+ gst_pad_push_event (new_track->pad,
+ gst_event_new_tag (gst_tag_list_copy (new_track->tags)));
+ }
+
+ gst_matroska_track_free (old_track);
+ break;
+
+ track_mismatch_error:
+ gst_matroska_track_free (new_track);
+ new_track = NULL;
+ ret = GST_FLOW_ERROR;
+ break;
+ }
+
+ default:
+ ret = gst_matroska_read_common_parse_skip (&demux->common, ebml,
+ "Track", id);
+ break;
+ }
+ }
+ DEBUG_ELEMENT_STOP (demux, ebml, "Tracks", ret);
+
+ if (ret != GST_FLOW_ERROR && demux->common.num_streams != num_tracks_found) {
+ GST_ERROR_OBJECT (demux,
+ "Mismatch on the number of tracks. Expected %du tracks, found %du",
+ demux->common.num_streams, num_tracks_found);
+ ret = GST_FLOW_ERROR;
+ }
+
+ return ret;
+ }
+
+ /*
+ * Read signed/unsigned "EBML" numbers.
+ * Return: number of bytes processed.
+ */
+
+ static gint
+ gst_matroska_ebmlnum_uint (guint8 * data, guint size, guint64 * num)
+ {
+ gint len_mask = 0x80, read = 1, n = 1, num_ffs = 0;
+ guint64 total;
+
+ if (size <= 0) {
+ return -1;
+ }
+
+ total = data[0];
+ while (read <= 8 && !(total & len_mask)) {
+ read++;
+ len_mask >>= 1;
+ }
+ if (read > 8)
+ return -1;
+
+ if ((total &= (len_mask - 1)) == len_mask - 1)
+ num_ffs++;
+ if (size < read)
+ return -1;
+ while (n < read) {
+ if (data[n] == 0xff)
+ num_ffs++;
+ total = (total << 8) | data[n];
+ n++;
+ }
+
+ if (read == num_ffs && total != 0)
+ *num = G_MAXUINT64;
+ else
+ *num = total;
+
+ return read;
+ }
+
+ static gint
+ gst_matroska_ebmlnum_sint (guint8 * data, guint size, gint64 * num)
+ {
+ guint64 unum;
+ gint res;
+
+ /* read as unsigned number first */
+ if ((res = gst_matroska_ebmlnum_uint (data, size, &unum)) < 0)
+ return -1;
+
+ /* make signed */
+ if (unum == G_MAXUINT64)
+ *num = G_MAXINT64;
+ else
+ *num = unum - ((1 << ((7 * res) - 1)) - 1);
+
+ return res;
+ }
+
+ /*
+ * Mostly used for subtitles. We add void filler data for each
+ * lagging stream to make sure we don't deadlock.
+ */
+
+ static void
+ gst_matroska_demux_sync_streams (GstMatroskaDemux * demux)
+ {
+ GstClockTime gap_threshold;
+ gint stream_nr;
+
+ GST_OBJECT_LOCK (demux);
+
+ GST_LOG_OBJECT (demux, "Sync to %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (demux->common.segment.position));
+
+ g_assert (demux->common.num_streams == demux->common.src->len);
+ for (stream_nr = 0; stream_nr < demux->common.src->len; stream_nr++) {
+ GstMatroskaTrackContext *context;
+
+ context = g_ptr_array_index (demux->common.src, stream_nr);
+
+ GST_LOG_OBJECT (demux,
+ "Checking for resync on stream %d (%" GST_TIME_FORMAT ")", stream_nr,
+ GST_TIME_ARGS (context->pos));
+
+ /* Only send gap events on non-subtitle streams if lagging way behind.
+ * The 0.5 second threshold for subtitle streams is also quite random. */
+ if (context->type == GST_MATROSKA_TRACK_TYPE_SUBTITLE)
+ gap_threshold = GST_SECOND / 2;
+ else
+ gap_threshold = 3 * GST_SECOND;
+
+ /* Lag need only be considered if we have advanced into requested segment */
+ if (GST_CLOCK_TIME_IS_VALID (context->pos) &&
+ GST_CLOCK_TIME_IS_VALID (demux->common.segment.position) &&
+ demux->common.segment.position > demux->common.segment.start &&
+ context->pos + gap_threshold < demux->common.segment.position) {
+
+ GstEvent *event;
+ guint64 start = context->pos;
+ guint64 stop = demux->common.segment.position - gap_threshold;
+
+ GST_DEBUG_OBJECT (demux,
+ "Synchronizing stream %d with other by advancing time from %"
+ GST_TIME_FORMAT " to %" GST_TIME_FORMAT, stream_nr,
+ GST_TIME_ARGS (start), GST_TIME_ARGS (stop));
+
+ context->pos = stop;
+
+ event = gst_event_new_gap (start, stop - start);
+ GST_OBJECT_UNLOCK (demux);
+ gst_pad_push_event (context->pad, event);
+ GST_OBJECT_LOCK (demux);
+ }
+ }
+
+ GST_OBJECT_UNLOCK (demux);
+ }
+
+ static GstFlowReturn
+ gst_matroska_demux_push_stream_headers (GstMatroskaDemux * demux,
+ GstMatroskaTrackContext * stream)
+ {
+ GstFlowReturn ret = GST_FLOW_OK;
+ gint i, num;
+
+ num = gst_buffer_list_length (stream->stream_headers);
+ for (i = 0; i < num; ++i) {
+ GstBuffer *buf;
+
+ buf = gst_buffer_list_get (stream->stream_headers, i);
+ buf = gst_buffer_copy (buf);
+
+ GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_HEADER);
+
+ if (stream->set_discont) {
+ GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_DISCONT);
+ stream->set_discont = FALSE;
+ } else {
+ GST_BUFFER_FLAG_UNSET (buf, GST_BUFFER_FLAG_DISCONT);
+ }
+
+ /* push out all headers in one go and use last flow return */
+ ret = gst_pad_push (stream->pad, buf);
+ }
+
+ /* don't need these any longer */
+ gst_buffer_list_unref (stream->stream_headers);
+ stream->stream_headers = NULL;
+
+ /* combine flows */
+ ret = gst_flow_combiner_update_flow (demux->flowcombiner, ret);
+
+ return ret;
+ }
+
+ static void
+ gst_matroska_demux_push_dvd_clut_change_event (GstMatroskaDemux * demux,
+ GstMatroskaTrackContext * stream)
+ {
+ gchar *buf, *start;
+
+ g_assert (!strcmp (stream->codec_id, GST_MATROSKA_CODEC_ID_SUBTITLE_VOBSUB));
+
+ if (!stream->codec_priv)
+ return;
+
+ /* ideally, VobSub private data should be parsed and stored more convenient
+ * elsewhere, but for now, only interested in a small part */
+
+ /* make sure we have terminating 0 */
+ buf = g_strndup (stream->codec_priv, stream->codec_priv_size);
+
+ /* just locate and parse palette part */
+ start = strstr (buf, "palette:");
+ if (start) {
+ gint i;
+ guint32 clut[16];
+ guint32 col;
+ guint8 r, g, b, y, u, v;
+
+ start += 8;
+ while (g_ascii_isspace (*start))
+ start++;
+ for (i = 0; i < 16; i++) {
+ if (sscanf (start, "%06x", &col) != 1)
+ break;
+ start += 6;
+ while ((*start == ',') || g_ascii_isspace (*start))
+ start++;
+ /* sigh, need to convert this from vobsub pseudo-RGB to YUV */
+ r = (col >> 16) & 0xff;
+ g = (col >> 8) & 0xff;
+ b = col & 0xff;
+ y = CLAMP ((0.1494 * r + 0.6061 * g + 0.2445 * b) * 219 / 255 + 16, 0,
+ 255);
+ u = CLAMP (0.6066 * r - 0.4322 * g - 0.1744 * b + 128, 0, 255);
+ v = CLAMP (-0.08435 * r - 0.3422 * g + 0.4266 * b + 128, 0, 255);
+ clut[i] = (y << 16) | (u << 8) | v;
+ }
+
+ /* got them all without problems; build and send event */
+ if (i == 16) {
+ GstStructure *s;
+
+ s = gst_structure_new ("application/x-gst-dvd", "event", G_TYPE_STRING,
+ "dvd-spu-clut-change", "clut00", G_TYPE_INT, clut[0], "clut01",
+ G_TYPE_INT, clut[1], "clut02", G_TYPE_INT, clut[2], "clut03",
+ G_TYPE_INT, clut[3], "clut04", G_TYPE_INT, clut[4], "clut05",
+ G_TYPE_INT, clut[5], "clut06", G_TYPE_INT, clut[6], "clut07",
+ G_TYPE_INT, clut[7], "clut08", G_TYPE_INT, clut[8], "clut09",
+ G_TYPE_INT, clut[9], "clut10", G_TYPE_INT, clut[10], "clut11",
+ G_TYPE_INT, clut[11], "clut12", G_TYPE_INT, clut[12], "clut13",
+ G_TYPE_INT, clut[13], "clut14", G_TYPE_INT, clut[14], "clut15",
+ G_TYPE_INT, clut[15], NULL);
+
+ gst_pad_push_event (stream->pad,
+ gst_event_new_custom (GST_EVENT_CUSTOM_DOWNSTREAM_STICKY, s));
+ }
+ }
+ g_free (buf);
+ }
+
+ static void
+ gst_matroska_demux_push_codec_data_all (GstMatroskaDemux * demux)
+ {
+ gint stream_nr;
+
+ g_assert (demux->common.num_streams == demux->common.src->len);
+ for (stream_nr = 0; stream_nr < demux->common.src->len; stream_nr++) {
+ GstMatroskaTrackContext *stream;
+
+ stream = g_ptr_array_index (demux->common.src, stream_nr);
+
+ if (stream->send_stream_headers) {
+ if (stream->stream_headers != NULL) {
+ gst_matroska_demux_push_stream_headers (demux, stream);
+ } else {
+ /* FIXME: perhaps we can just disable and skip this stream then */
+ GST_ELEMENT_ERROR (demux, STREAM, DECODE, (NULL),
+ ("Failed to extract stream headers from codec private data"));
+ }
+ stream->send_stream_headers = FALSE;
+ }
+
+ if (stream->send_dvd_event) {
+ gst_matroska_demux_push_dvd_clut_change_event (demux, stream);
+ /* FIXME: should we send this event again after (flushing) seek ? */
+ stream->send_dvd_event = FALSE;
+ }
+ }
+
+ }
+
+ static GstFlowReturn
+ gst_matroska_demux_add_mpeg_seq_header (GstElement * element,
+ GstMatroskaTrackContext * stream, GstBuffer ** buf)
+ {
+ guint8 *seq_header;
+ guint seq_header_len;
+ guint32 header, tmp;
+
+ if (stream->codec_state) {
+ seq_header = stream->codec_state;
+ seq_header_len = stream->codec_state_size;
+ } else if (stream->codec_priv) {
+ seq_header = stream->codec_priv;
+ seq_header_len = stream->codec_priv_size;
+ } else {
+ return GST_FLOW_OK;
+ }
+
+ /* Sequence header only needed for keyframes */
+ if (GST_BUFFER_FLAG_IS_SET (*buf, GST_BUFFER_FLAG_DELTA_UNIT))
+ return GST_FLOW_OK;
+
+ if (gst_buffer_get_size (*buf) < 4)
+ return GST_FLOW_OK;
+
+ gst_buffer_extract (*buf, 0, &tmp, sizeof (guint32));
+ header = GUINT32_FROM_BE (tmp);
+
+ /* Sequence start code, if not found prepend */
+ if (header != 0x000001b3) {
+ GstBuffer *newbuf;
+
+ GST_DEBUG_OBJECT (element, "Prepending MPEG sequence header");
+
+ newbuf = gst_buffer_new_memdup (seq_header, seq_header_len);
+
+ gst_buffer_copy_into (newbuf, *buf, GST_BUFFER_COPY_TIMESTAMPS |
+ GST_BUFFER_COPY_FLAGS | GST_BUFFER_COPY_MEMORY, 0,
+ gst_buffer_get_size (*buf));
+
+ gst_buffer_unref (*buf);
+ *buf = newbuf;
+ }
+
+ return GST_FLOW_OK;
+ }
+
+ static GstFlowReturn
+ gst_matroska_demux_add_wvpk_header (GstElement * element,
+ GstMatroskaTrackContext * stream, GstBuffer ** buf)
+ {
+ GstMatroskaTrackAudioContext *audiocontext =
+ (GstMatroskaTrackAudioContext *) stream;
+ GstBuffer *newbuf = NULL;
+ GstMapInfo map, outmap;
+ guint8 *buf_data, *data;
+ Wavpack4Header wvh;
+
+ wvh.ck_id[0] = 'w';
+ wvh.ck_id[1] = 'v';
+ wvh.ck_id[2] = 'p';
+ wvh.ck_id[3] = 'k';
+
+ wvh.version = GST_READ_UINT16_LE (stream->codec_priv);
+ wvh.track_no = 0;
+ wvh.index_no = 0;
+ wvh.total_samples = -1;
+ wvh.block_index = audiocontext->wvpk_block_index;
+
+ if (audiocontext->channels <= 2) {
+ guint32 block_samples, tmp;
+ gsize size = gst_buffer_get_size (*buf);
+
+ if (size < 4) {
+ GST_ERROR_OBJECT (element, "Too small wavpack buffer");
+ gst_buffer_unmap (*buf, &map);
+ return GST_FLOW_ERROR;
+ }
+
+ gst_buffer_extract (*buf, 0, &tmp, sizeof (guint32));
+ block_samples = GUINT32_FROM_LE (tmp);
+ /* we need to reconstruct the header of the wavpack block */
+
+ /* -20 because ck_size is the size of the wavpack block -8
+ * and lace_size is the size of the wavpack block + 12
+ * (the three guint32 of the header that already are in the buffer) */
+ wvh.ck_size = size + WAVPACK4_HEADER_SIZE - 20;
+
+ /* block_samples, flags and crc are already in the buffer */
+ newbuf = gst_buffer_new_allocate (NULL, WAVPACK4_HEADER_SIZE - 12, NULL);
+
+ gst_buffer_map (newbuf, &outmap, GST_MAP_WRITE);
+ data = outmap.data;
+ data[0] = 'w';
+ data[1] = 'v';
+ data[2] = 'p';
+ data[3] = 'k';
+ GST_WRITE_UINT32_LE (data + 4, wvh.ck_size);
+ GST_WRITE_UINT16_LE (data + 8, wvh.version);
+ GST_WRITE_UINT8 (data + 10, wvh.track_no);
+ GST_WRITE_UINT8 (data + 11, wvh.index_no);
+ GST_WRITE_UINT32_LE (data + 12, wvh.total_samples);
+ GST_WRITE_UINT32_LE (data + 16, wvh.block_index);
+ gst_buffer_unmap (newbuf, &outmap);
+
+ /* Append data from buf: */
+ gst_buffer_copy_into (newbuf, *buf, GST_BUFFER_COPY_TIMESTAMPS |
+ GST_BUFFER_COPY_FLAGS | GST_BUFFER_COPY_MEMORY, 0, size);
+
+ gst_buffer_unref (*buf);
+ *buf = newbuf;
+ audiocontext->wvpk_block_index += block_samples;
+ } else {
+ guint8 *outdata = NULL;
+ gsize buf_size, size;
+ guint32 block_samples, flags, crc, blocksize;
+ GstAdapter *adapter;
+
+ adapter = gst_adapter_new ();
+
+ gst_buffer_map (*buf, &map, GST_MAP_READ);
+ buf_data = map.data;
+ buf_size = map.size;
+
+ if (buf_size < 4) {
+ GST_ERROR_OBJECT (element, "Too small wavpack buffer");
+ gst_buffer_unmap (*buf, &map);
+ g_object_unref (adapter);
+ return GST_FLOW_ERROR;
+ }
+
+ data = buf_data;
+ size = buf_size;
+
+ block_samples = GST_READ_UINT32_LE (data);
+ data += 4;
+ size -= 4;
+
+ while (size > 12) {
+ flags = GST_READ_UINT32_LE (data);
+ data += 4;
+ size -= 4;
+ crc = GST_READ_UINT32_LE (data);
+ data += 4;
+ size -= 4;
+ blocksize = GST_READ_UINT32_LE (data);
+ data += 4;
+ size -= 4;
+
+ if (blocksize == 0 || size < blocksize) {
+ GST_ERROR_OBJECT (element, "Too small wavpack buffer");
+ gst_buffer_unmap (*buf, &map);
+ g_object_unref (adapter);
+ return GST_FLOW_ERROR;
+ }
+
+ g_assert (newbuf == NULL);
+
+ newbuf =
+ gst_buffer_new_allocate (NULL, WAVPACK4_HEADER_SIZE + blocksize,
+ NULL);
+ gst_buffer_map (newbuf, &outmap, GST_MAP_WRITE);
+ outdata = outmap.data;
+
+ outdata[0] = 'w';
+ outdata[1] = 'v';
+ outdata[2] = 'p';
+ outdata[3] = 'k';
+ outdata += 4;
+
+ GST_WRITE_UINT32_LE (outdata, blocksize + WAVPACK4_HEADER_SIZE - 8);
+ GST_WRITE_UINT16_LE (outdata + 4, wvh.version);
+ GST_WRITE_UINT8 (outdata + 6, wvh.track_no);
+ GST_WRITE_UINT8 (outdata + 7, wvh.index_no);
+ GST_WRITE_UINT32_LE (outdata + 8, wvh.total_samples);
+ GST_WRITE_UINT32_LE (outdata + 12, wvh.block_index);
+ GST_WRITE_UINT32_LE (outdata + 16, block_samples);
+ GST_WRITE_UINT32_LE (outdata + 20, flags);
+ GST_WRITE_UINT32_LE (outdata + 24, crc);
+ outdata += 28;
+
+ memcpy (outdata, data, blocksize);
+
+ gst_buffer_unmap (newbuf, &outmap);
+ gst_adapter_push (adapter, newbuf);
+ newbuf = NULL;
+
+ data += blocksize;
+ size -= blocksize;
+ }
+ gst_buffer_unmap (*buf, &map);
+
+ newbuf = gst_adapter_take_buffer (adapter, gst_adapter_available (adapter));
+ g_object_unref (adapter);
+
+ gst_buffer_copy_into (newbuf, *buf,
+ GST_BUFFER_COPY_TIMESTAMPS | GST_BUFFER_COPY_FLAGS, 0, -1);
+ gst_buffer_unref (*buf);
+ *buf = newbuf;
+
+ audiocontext->wvpk_block_index += block_samples;
+ }
+
+ return GST_FLOW_OK;
+ }
+
+ static GstFlowReturn
+ gst_matroska_demux_add_prores_header (GstElement * element,
+ GstMatroskaTrackContext * stream, GstBuffer ** buf)
+ {
+ GstBuffer *newbuf = gst_buffer_new_allocate (NULL, 8, NULL);
+ GstMapInfo map;
+ guint32 frame_size;
+
+ if (!gst_buffer_map (newbuf, &map, GST_MAP_WRITE)) {
+ GST_ERROR ("Failed to map newly allocated buffer");
+ return GST_FLOW_ERROR;
+ }
+
+ frame_size = gst_buffer_get_size (*buf);
+
+ GST_WRITE_UINT32_BE (map.data, frame_size);
+ map.data[4] = 'i';
+ map.data[5] = 'c';
+ map.data[6] = 'p';
+ map.data[7] = 'f';
+
+ gst_buffer_unmap (newbuf, &map);
+ *buf = gst_buffer_append (newbuf, *buf);
+
+ return GST_FLOW_OK;
+ }
+
+ /* @text must be null-terminated */
+ static gboolean
+ gst_matroska_demux_subtitle_chunk_has_tag (GstElement * element,
+ const gchar * text)
+ {
+ gchar *tag;
+
+ g_return_val_if_fail (text != NULL, FALSE);
+
+ /* yes, this might all lead to false positives ... */
+ tag = (gchar *) text;
+ while ((tag = strchr (tag, '<'))) {
+ tag++;
+ if (*tag != '\0' && *(tag + 1) == '>') {
+ /* some common convenience ones */
+ /* maybe any character will do here ? */
+ switch (*tag) {
+ case 'b':
+ case 'i':
+ case 'u':
+ case 's':
+ return TRUE;
+ default:
+ return FALSE;
+ }
+ }
+ }
+
+ if (strstr (text, "<span"))
+ return TRUE;
+
+ return FALSE;
+ }
+
+ static GstFlowReturn
+ gst_matroska_demux_check_subtitle_buffer (GstElement * element,
+ GstMatroskaTrackContext * stream, GstBuffer ** buf)
+ {
+ GstMatroskaTrackSubtitleContext *sub_stream;
+ const gchar *encoding;
+ GError *err = NULL;
+ GstBuffer *newbuf;
+ gchar *utf8;
+ GstMapInfo map;
+ gboolean needs_unmap = TRUE;
+
+ sub_stream = (GstMatroskaTrackSubtitleContext *) stream;
+
+ if (!gst_buffer_get_size (*buf) || !gst_buffer_map (*buf, &map, GST_MAP_READ))
+ return GST_FLOW_OK;
+
+ /* The subtitle buffer we push out should not include a NUL terminator as
+ * part of the data. */
+ if (map.data[map.size - 1] == '\0') {
+ gst_buffer_set_size (*buf, map.size - 1);
+ gst_buffer_unmap (*buf, &map);
+ gst_buffer_map (*buf, &map, GST_MAP_READ);
+ }
+
+ if (!sub_stream->invalid_utf8) {
+ if (g_utf8_validate ((gchar *) map.data, map.size, NULL)) {
+ goto next;
+ }
+ GST_WARNING_OBJECT (element, "subtitle stream %" G_GUINT64_FORMAT
+ " is not valid UTF-8, this is broken according to the matroska"
+ " specification", stream->num);
+ sub_stream->invalid_utf8 = TRUE;
+ }
+
+ /* file with broken non-UTF8 subtitle, do the best we can do to fix it */
+ encoding = g_getenv ("GST_SUBTITLE_ENCODING");
+ if (encoding == NULL || *encoding == '\0') {
+ /* if local encoding is UTF-8 and no encoding specified
+ * via the environment variable, assume ISO-8859-15 */
+ if (g_get_charset (&encoding)) {
+ encoding = "ISO-8859-15";
+ }
+ }
+
+ utf8 =
+ g_convert_with_fallback ((gchar *) map.data, map.size, "UTF-8", encoding,
+ (char *) "*", NULL, NULL, &err);
+
+ if (err) {
+ GST_LOG_OBJECT (element, "could not convert string from '%s' to UTF-8: %s",
+ encoding, err->message);
+ g_error_free (err);
+ g_free (utf8);
+
+ /* invalid input encoding, fall back to ISO-8859-15 (always succeeds) */
+ encoding = "ISO-8859-15";
+ utf8 =
+ g_convert_with_fallback ((gchar *) map.data, map.size, "UTF-8",
+ encoding, (char *) "*", NULL, NULL, NULL);
+ }
+
+ GST_LOG_OBJECT (element, "converted subtitle text from %s to UTF-8 %s",
+ encoding, (err) ? "(using ISO-8859-15 as fallback)" : "");
+
+ if (utf8 == NULL)
+ utf8 = g_strdup ("invalid subtitle");
+
+ newbuf = gst_buffer_new_wrapped (utf8, strlen (utf8));
+ gst_buffer_unmap (*buf, &map);
+ gst_buffer_copy_into (newbuf, *buf,
+ GST_BUFFER_COPY_TIMESTAMPS | GST_BUFFER_COPY_FLAGS | GST_BUFFER_COPY_META,
+ 0, -1);
+ gst_buffer_unref (*buf);
+
+ *buf = newbuf;
+ gst_buffer_map (*buf, &map, GST_MAP_READ);
+
+ next:
+
+ if (sub_stream->check_markup) {
+ /* caps claim markup text, so we need to escape text,
+ * except if text is already markup and then needs no further escaping */
+ sub_stream->seen_markup_tag = sub_stream->seen_markup_tag ||
+ gst_matroska_demux_subtitle_chunk_has_tag (element, (gchar *) map.data);
+
+ if (!sub_stream->seen_markup_tag) {
+ utf8 = g_markup_escape_text ((gchar *) map.data, map.size);
+
+ newbuf = gst_buffer_new_wrapped (utf8, strlen (utf8));
+ gst_buffer_unmap (*buf, &map);
+ gst_buffer_copy_into (newbuf, *buf,
+ GST_BUFFER_COPY_TIMESTAMPS | GST_BUFFER_COPY_FLAGS |
+ GST_BUFFER_COPY_META, 0, -1);
+ gst_buffer_unref (*buf);
+
+ *buf = newbuf;
+ needs_unmap = FALSE;
+ }
+ }
+
+ if (needs_unmap)
+ gst_buffer_unmap (*buf, &map);
+
+ return GST_FLOW_OK;
+ }
+
+ static GstFlowReturn
+ gst_matroska_demux_check_aac (GstElement * element,
+ GstMatroskaTrackContext * stream, GstBuffer ** buf)
+ {
+ guint8 data[2];
+ guint size;
+
+ gst_buffer_extract (*buf, 0, data, 2);
+ size = gst_buffer_get_size (*buf);
+
+ if (size > 2 && data[0] == 0xff && (data[1] >> 4 == 0x0f)) {
+ GstStructure *s;
+
+ /* tss, ADTS data, remove codec_data
+ * still assume it is at least parsed */
+ stream->caps = gst_caps_make_writable (stream->caps);
+ s = gst_caps_get_structure (stream->caps, 0);
+ g_assert (s);
+ gst_structure_remove_field (s, "codec_data");
+ gst_pad_set_caps (stream->pad, stream->caps);
+ GST_DEBUG_OBJECT (element, "ADTS AAC audio data; removing codec-data, "
+ "new caps: %" GST_PTR_FORMAT, stream->caps);
+ }
+
+ /* disable subsequent checking */
+ stream->postprocess_frame = NULL;
+
+ return GST_FLOW_OK;
+ }
+
+ static GstBuffer *
+ gst_matroska_demux_align_buffer (GstMatroskaDemux * demux,
+ GstBuffer * buffer, gsize alignment)
+ {
+ GstMapInfo map;
+
+ gst_buffer_map (buffer, &map, GST_MAP_READ);
+
+ if (map.size < sizeof (guintptr)) {
+ gst_buffer_unmap (buffer, &map);
+ return buffer;
+ }
+
+ if (((guintptr) map.data) & (alignment - 1)) {
+ GstBuffer *new_buffer;
+ GstAllocationParams params = { 0, alignment - 1, 0, 0, };
+
+ new_buffer = gst_buffer_new_allocate (NULL,
+ gst_buffer_get_size (buffer), ¶ms);
+
+ /* Copy data "by hand", so ensure alignment is kept: */
+ gst_buffer_fill (new_buffer, 0, map.data, map.size);
+
+ gst_buffer_copy_into (new_buffer, buffer, GST_BUFFER_COPY_METADATA, 0, -1);
+ GST_DEBUG_OBJECT (demux,
+ "We want output aligned on %" G_GSIZE_FORMAT ", reallocated",
+ alignment);
+
+ gst_buffer_unmap (buffer, &map);
+ gst_buffer_unref (buffer);
+
+ return new_buffer;
+ }
+
+ gst_buffer_unmap (buffer, &map);
+ return buffer;
+ }
+
+ typedef struct
+ {
+ guint8 *data;
+ gsize size;
+ guint64 id;
+ } BlockAddition;
+
+ static GstFlowReturn
+ gst_matroska_demux_parse_blockmore (GstMatroskaDemux * demux,
+ GstEbmlRead * ebml, GQueue * additions)
+ {
+ GstFlowReturn ret;
+ guint32 id;
+ guint64 block_id = 1;
+ guint64 datalen = 0;
+ guint8 *data = NULL;
+
+ ret = gst_ebml_read_master (ebml, &id); /* GST_MATROSKA_ID_BLOCKMORE */
+ if (ret != GST_FLOW_OK)
+ return ret;
+
+ /* read all BlockMore sub-entries */
+ while (ret == GST_FLOW_OK && gst_ebml_read_has_remaining (ebml, 1, TRUE)) {
+
+ if ((ret = gst_ebml_peek_id (ebml, &id)) != GST_FLOW_OK)
+ break;
+
+ switch (id) {
+ case GST_MATROSKA_ID_BLOCKADDID:
+ ret = gst_ebml_read_uint (ebml, &id, &block_id);
+ if (block_id == 0)
+ block_id = 1;
+ break;
+ case GST_MATROSKA_ID_BLOCKADDITIONAL:
+ g_free (data);
+ data = NULL;
+ datalen = 0;
+ ret = gst_ebml_read_binary (ebml, &id, &data, &datalen);
+ break;
+ default:
+ ret = gst_matroska_read_common_parse_skip (&demux->common, ebml,
+ "BlockMore", id);
+ break;
+ }
+ }
+
+ if (data != NULL && datalen > 0) {
+ BlockAddition *blockadd = g_new (BlockAddition, 1);
+
+ GST_LOG_OBJECT (demux, "BlockAddition %" G_GUINT64_FORMAT ": "
+ "%" G_GUINT64_FORMAT " bytes", block_id, datalen);
+ GST_MEMDUMP_OBJECT (demux, "BlockAdditional", data, datalen);
+ blockadd->data = data;
+ blockadd->size = datalen;
+ blockadd->id = block_id;
+ g_queue_push_tail (additions, blockadd);
+ GST_LOG_OBJECT (demux, "now %d pending block additions", additions->length);
+ }
+
+ return ret;
+ }
+
+ /* BLOCKADDITIONS
+ * BLOCKMORE
+ * BLOCKADDID
+ * BLOCKADDITIONAL
+ */
+ static GstFlowReturn
+ gst_matroska_demux_parse_blockadditions (GstMatroskaDemux * demux,
+ GstEbmlRead * ebml, GQueue * additions)
+ {
+ GstFlowReturn ret;
+ guint32 id;
+
+ ret = gst_ebml_read_master (ebml, &id); /* GST_MATROSKA_ID_BLOCKADDITIONS */
+ if (ret != GST_FLOW_OK)
+ return ret;
+
+ /* read all BlockMore sub-entries */
+ while (ret == GST_FLOW_OK && gst_ebml_read_has_remaining (ebml, 1, TRUE)) {
+
+ if ((ret = gst_ebml_peek_id (ebml, &id)) != GST_FLOW_OK)
+ break;
+
+ if (id == GST_MATROSKA_ID_BLOCKMORE) {
+ DEBUG_ELEMENT_START (demux, ebml, "BlockMore");
+ ret = gst_matroska_demux_parse_blockmore (demux, ebml, additions);
+ DEBUG_ELEMENT_STOP (demux, ebml, "BlockMore", ret);
+ if (ret != GST_FLOW_OK)
+ break;
+ } else {
+ GST_WARNING_OBJECT (demux, "Expected BlockMore, got %x", id);
+ }
+ }
+
+ return ret;
+ }
+
+ static GstFlowReturn
+ gst_matroska_demux_parse_blockgroup_or_simpleblock (GstMatroskaDemux * demux,
+ GstEbmlRead * ebml, guint64 cluster_time, guint64 cluster_offset,
+ gboolean is_simpleblock)
+ {
+ GstMatroskaTrackContext *stream = NULL;
+ GstFlowReturn ret = GST_FLOW_OK;
+ gboolean readblock = FALSE;
+ guint32 id;
+ guint64 block_duration = -1;
+ gint64 block_discardpadding = 0;
+ GstBuffer *buf = NULL;
+ GstMapInfo map;
+ gint stream_num = -1, n, laces = 0;
+ guint size = 0;
+ gint *lace_size = NULL;
+ gint64 time = 0;
+ gint flags = 0;
+ gint64 referenceblock = 0;
+ gint64 offset;
+ GstClockTime buffer_timestamp;
+ GQueue additions = G_QUEUE_INIT;
+
+ offset = gst_ebml_read_get_offset (ebml);
+
+ while (ret == GST_FLOW_OK && gst_ebml_read_has_remaining (ebml, 1, TRUE)) {
+ if (!is_simpleblock) {
+ if ((ret = gst_ebml_peek_id (ebml, &id)) != GST_FLOW_OK) {
+ goto data_error;
+ }
+ } else {
+ id = GST_MATROSKA_ID_SIMPLEBLOCK;
+ }
+
+ switch (id) {
+ /* one block inside the group. Note, block parsing is one
+ * of the harder things, so this code is a bit complicated.
+ * See http://www.matroska.org/ for documentation. */
+ case GST_MATROSKA_ID_SIMPLEBLOCK:
+ case GST_MATROSKA_ID_BLOCK:
+ {
+ guint64 num;
+ guint8 *data;
+
+ if (buf) {
+ gst_buffer_unmap (buf, &map);
+ gst_buffer_unref (buf);
+ buf = NULL;
+ }
+ if ((ret = gst_ebml_read_buffer (ebml, &id, &buf)) != GST_FLOW_OK)
+ break;
+
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+ data = map.data;
+ size = map.size;
+
+ /* first byte(s): blocknum */
+ if ((n = gst_matroska_ebmlnum_uint (data, size, &num)) < 0)
+ goto data_error;
+ data += n;
+ size -= n;
+
+ /* fetch stream from num */
+ stream_num = gst_matroska_read_common_stream_from_num (&demux->common,
+ num);
+ if (G_UNLIKELY (size < 3)) {
+ GST_WARNING_OBJECT (demux, "Invalid size %u", size);
+ /* non-fatal, try next block(group) */
+ ret = GST_FLOW_OK;
+ goto done;
+ } else if (G_UNLIKELY (stream_num < 0 ||
+ stream_num >= demux->common.num_streams)) {
+ /* let's not give up on a stray invalid track number */
+ GST_WARNING_OBJECT (demux,
+ "Invalid stream %d for track number %" G_GUINT64_FORMAT
+ "; ignoring block", stream_num, num);
+ goto done;
+ }
+
+ stream = g_ptr_array_index (demux->common.src, stream_num);
+
+ /* time (relative to cluster time) */
+ time = ((gint16) GST_READ_UINT16_BE (data));
+ data += 2;
+ size -= 2;
+ flags = GST_READ_UINT8 (data);
+ data += 1;
+ size -= 1;
+
+ GST_LOG_OBJECT (demux, "time %" G_GUINT64_FORMAT ", flags %d", time,
+ flags);
+
+ switch ((flags & 0x06) >> 1) {
+ case 0x0: /* no lacing */
+ laces = 1;
+ lace_size = g_new (gint, 1);
+ lace_size[0] = size;
+ break;
+
+ case 0x1: /* xiph lacing */
+ case 0x2: /* fixed-size lacing */
+ case 0x3: /* EBML lacing */
+ if (size == 0)
+ goto invalid_lacing;
+ laces = GST_READ_UINT8 (data) + 1;
+ data += 1;
+ size -= 1;
+ lace_size = g_new0 (gint, laces);
+
+ switch ((flags & 0x06) >> 1) {
+ case 0x1: /* xiph lacing */ {
+ guint temp, total = 0;
+
+ for (n = 0; ret == GST_FLOW_OK && n < laces - 1; n++) {
+ while (1) {
+ if (size == 0)
+ goto invalid_lacing;
+ temp = GST_READ_UINT8 (data);
+ lace_size[n] += temp;
+ data += 1;
+ size -= 1;
+ if (temp != 0xff)
+ break;
+ }
+ total += lace_size[n];
+ }
+ lace_size[n] = size - total;
+ break;
+ }
+
+ case 0x2: /* fixed-size lacing */
+ for (n = 0; n < laces; n++)
+ lace_size[n] = size / laces;
+ break;
+
+ case 0x3: /* EBML lacing */ {
+ guint total;
+
+ if ((n = gst_matroska_ebmlnum_uint (data, size, &num)) < 0)
+ goto data_error;
+ data += n;
+ size -= n;
+ total = lace_size[0] = num;
+ for (n = 1; ret == GST_FLOW_OK && n < laces - 1; n++) {
+ gint64 snum;
+ gint r;
+
+ if ((r = gst_matroska_ebmlnum_sint (data, size, &snum)) < 0)
+ goto data_error;
+ data += r;
+ size -= r;
+ lace_size[n] = lace_size[n - 1] + snum;
+ total += lace_size[n];
+ }
+ if (n < laces)
+ lace_size[n] = size - total;
+ break;
+ }
+ }
+ break;
+ }
+
+ if (ret != GST_FLOW_OK)
+ break;
+
+ readblock = TRUE;
+ break;
+ }
+
+ case GST_MATROSKA_ID_BLOCKADDITIONS:
+ {
+ DEBUG_ELEMENT_START (demux, ebml, "BlockAdditions");
+ ret = gst_matroska_demux_parse_blockadditions (demux, ebml, &additions);
+ DEBUG_ELEMENT_STOP (demux, ebml, "BlockAdditions", ret);
+ break;
+ }
+
+ case GST_MATROSKA_ID_BLOCKDURATION:{
+ ret = gst_ebml_read_uint (ebml, &id, &block_duration);
+ GST_DEBUG_OBJECT (demux, "BlockDuration: %" G_GUINT64_FORMAT,
+ block_duration);
+ break;
+ }
+
+ case GST_MATROSKA_ID_DISCARDPADDING:{
+ ret = gst_ebml_read_sint (ebml, &id, &block_discardpadding);
+ GST_DEBUG_OBJECT (demux, "DiscardPadding: %" GST_STIME_FORMAT,
+ GST_STIME_ARGS (block_discardpadding));
+ break;
+ }
+
+ case GST_MATROSKA_ID_REFERENCEBLOCK:{
+ ret = gst_ebml_read_sint (ebml, &id, &referenceblock);
+ GST_DEBUG_OBJECT (demux, "ReferenceBlock: %" G_GINT64_FORMAT,
+ referenceblock);
+ break;
+ }
+
+ case GST_MATROSKA_ID_CODECSTATE:{
+ guint8 *data;
+ guint64 data_len = 0;
+
+ if ((ret =
+ gst_ebml_read_binary (ebml, &id, &data,
+ &data_len)) != GST_FLOW_OK)
+ break;
+
+ if (G_UNLIKELY (stream == NULL)) {
+ GST_WARNING_OBJECT (demux,
+ "Unexpected CodecState subelement - ignoring");
+ break;
+ }
+
+ g_free (stream->codec_state);
+ stream->codec_state = data;
+ stream->codec_state_size = data_len;
+
+ /* Decode if necessary */
+ if (stream->encodings && stream->encodings->len > 0
+ && stream->codec_state && stream->codec_state_size > 0) {
+ if (!gst_matroska_decode_data (stream->encodings,
+ &stream->codec_state, &stream->codec_state_size,
+ GST_MATROSKA_TRACK_ENCODING_SCOPE_CODEC_DATA, TRUE)) {
+ GST_WARNING_OBJECT (demux, "Decoding codec state failed");
+ }
+ }
+
+ GST_DEBUG_OBJECT (demux, "CodecState of %" G_GSIZE_FORMAT " bytes",
+ stream->codec_state_size);
+ break;
+ }
+
+ default:
+ ret = gst_matroska_read_common_parse_skip (&demux->common, ebml,
+ "BlockGroup", id);
+ break;
+
+ case GST_MATROSKA_ID_BLOCKVIRTUAL:
+ case GST_MATROSKA_ID_REFERENCEPRIORITY:
+ case GST_MATROSKA_ID_REFERENCEVIRTUAL:
+ case GST_MATROSKA_ID_SLICES:
+ GST_DEBUG_OBJECT (demux,
+ "Skipping BlockGroup subelement 0x%x - ignoring", id);
+ ret = gst_ebml_read_skip (ebml);
+ break;
+ }
+
+ if (is_simpleblock)
+ break;
+ }
+
+ /* reading a number or so could have failed */
+ if (ret != GST_FLOW_OK)
+ goto data_error;
+
+ if (ret == GST_FLOW_OK && readblock) {
+ gboolean invisible_frame = FALSE;
+ gboolean delta_unit = FALSE;
+ guint64 duration = 0;
+ gint64 lace_time = 0;
+ gboolean keep_seek_start = TRUE;
+ GstEvent *protect_event;
+
+ stream = g_ptr_array_index (demux->common.src, stream_num);
+
+ if (cluster_time != GST_CLOCK_TIME_NONE) {
+ /* FIXME: What to do with negative timestamps? Give timestamp 0 or -1?
+ * Drop unless the lace contains timestamp 0? */
+ if (time < 0 && (-time) > cluster_time) {
+ lace_time = 0;
+ } else {
+ if (stream->timecodescale == 1.0)
+ lace_time = (cluster_time + time) * demux->common.time_scale;
+ else
+ lace_time =
+ gst_util_guint64_to_gdouble ((cluster_time + time) *
+ demux->common.time_scale) * stream->timecodescale;
+ }
+ } else {
+ lace_time = GST_CLOCK_TIME_NONE;
+ }
+ /* Send the GST_PROTECTION event */
+ while ((protect_event = g_queue_pop_head (&stream->protection_event_queue))) {
+ GST_TRACE_OBJECT (demux, "pushing protection event for stream %d:%s",
+ stream->index, GST_STR_NULL (stream->name));
+ gst_pad_push_event (stream->pad, protect_event);
+ }
+
+ /* need to refresh segment info ASAP */
+ if (GST_CLOCK_TIME_IS_VALID (lace_time)
+ && GST_CLOCK_TIME_IS_VALID (demux->stream_start_time)
+ && lace_time < demux->stream_start_time) {
+ keep_seek_start =
+ (demux->common.segment.start > demux->stream_start_time);
+ demux->stream_start_time = lace_time;
+ demux->need_segment = TRUE;
+ }
+
+ if (GST_CLOCK_TIME_IS_VALID (lace_time) && demux->need_segment) {
+ GstSegment *segment = &demux->common.segment;
+ guint64 clace_time;
+ GstEvent *segment_event;
+
+ if (!GST_CLOCK_TIME_IS_VALID (demux->stream_start_time)) {
+ demux->stream_start_time = lace_time;
+ GST_DEBUG_OBJECT (demux,
+ "Setting stream start time to %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (lace_time));
+ }
+ clace_time = MAX (lace_time, demux->stream_start_time);
+ if (keep_seek_start
+ && GST_CLOCK_TIME_IS_VALID (demux->common.segment.position)
+ && demux->common.segment.position != 0) {
+ GST_DEBUG_OBJECT (demux, "using stored seek position %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (demux->common.segment.position));
+ clace_time = demux->common.segment.position;
+ }
+ segment->start = clace_time;
+ segment->stop = demux->common.segment.stop;
+ segment->time = segment->start - demux->stream_start_time;
+ segment->position = segment->start - demux->stream_start_time;
+ GST_DEBUG_OBJECT (demux,
+ "generated segment starting at %" GST_TIME_FORMAT ": %"
+ GST_SEGMENT_FORMAT, GST_TIME_ARGS (lace_time), segment);
+ /* now convey our segment notion downstream */
+ segment_event = gst_event_new_segment (segment);
+ if (demux->segment_seqnum)
+ gst_event_set_seqnum (segment_event, demux->segment_seqnum);
+ gst_matroska_demux_send_event (demux, segment_event);
+ demux->need_segment = FALSE;
+ demux->segment_seqnum = 0;
+ }
+
+ /* send pending codec data headers for all streams,
+ * before we perform sync across all streams */
+ gst_matroska_demux_push_codec_data_all (demux);
+
+ if (block_duration != -1) {
+ if (stream->timecodescale == 1.0)
+ duration = gst_util_uint64_scale (block_duration,
+ demux->common.time_scale, 1);
+ else
+ duration =
+ gst_util_gdouble_to_guint64 (gst_util_guint64_to_gdouble
+ (gst_util_uint64_scale (block_duration, demux->common.time_scale,
+ 1)) * stream->timecodescale);
+ } else if (stream->default_duration) {
+ duration = stream->default_duration * laces;
+ }
+ /* else duration is diff between timecode of this and next block */
+
+ if (stream->type == GST_MATROSKA_TRACK_TYPE_VIDEO) {
+ /* For SimpleBlock, look at the keyframe bit in flags. Otherwise,
+ a ReferenceBlock implies that this is not a keyframe. In either
+ case, it only makes sense for video streams. */
+ if ((is_simpleblock && !(flags & 0x80)) || referenceblock) {
+ delta_unit = TRUE;
+ invisible_frame = ((flags & 0x08)) &&
+ (!strcmp (stream->codec_id, GST_MATROSKA_CODEC_ID_VIDEO_VP8) ||
+ !strcmp (stream->codec_id, GST_MATROSKA_CODEC_ID_VIDEO_VP9) ||
+ !strcmp (stream->codec_id, GST_MATROSKA_CODEC_ID_VIDEO_AV1));
+ }
+
+ /* If we're doing a keyframe-only trickmode, only push keyframes on video
+ * streams */
+ if (delta_unit
+ && demux->common.segment.
+ flags & GST_SEGMENT_FLAG_TRICKMODE_KEY_UNITS) {
+ GST_LOG_OBJECT (demux, "Skipping non-keyframe on stream %d",
+ stream->index);
+ ret = GST_FLOW_OK;
+ goto done;
+ }
+ }
+
+ for (n = 0; n < laces; n++) {
+ GstBuffer *sub;
+
+ if (G_UNLIKELY (lace_size[n] > size)) {
+ GST_WARNING_OBJECT (demux, "Invalid lace size");
+ break;
+ }
+
+ /* QoS for video track with an index. the assumption is that
+ index entries point to keyframes, but if that is not true we
+ will instead skip until the next keyframe. */
+ if (GST_CLOCK_TIME_IS_VALID (lace_time) &&
+ stream->type == GST_MATROSKA_TRACK_TYPE_VIDEO &&
+ stream->index_table && demux->common.segment.rate > 0.0) {
+ GstMatroskaTrackVideoContext *videocontext =
+ (GstMatroskaTrackVideoContext *) stream;
+ GstClockTime earliest_time;
+ GstClockTime earliest_stream_time;
+
+ GST_OBJECT_LOCK (demux);
+ earliest_time = videocontext->earliest_time;
+ GST_OBJECT_UNLOCK (demux);
+ earliest_stream_time =
+ gst_segment_position_from_running_time (&demux->common.segment,
+ GST_FORMAT_TIME, earliest_time);
+
+ if (GST_CLOCK_TIME_IS_VALID (lace_time) &&
+ GST_CLOCK_TIME_IS_VALID (earliest_stream_time) &&
+ lace_time <= earliest_stream_time) {
+ /* find index entry (keyframe) <= earliest_stream_time */
+ GstMatroskaIndex *entry =
+ gst_util_array_binary_search (stream->index_table->data,
+ stream->index_table->len, sizeof (GstMatroskaIndex),
+ (GCompareDataFunc) gst_matroska_index_seek_find,
+ GST_SEARCH_MODE_BEFORE, &earliest_stream_time, NULL);
+
+ /* if that entry (keyframe) is after the current the current
+ buffer, we can skip pushing (and thus decoding) all
+ buffers until that keyframe. */
+ if (entry && GST_CLOCK_TIME_IS_VALID (entry->time) &&
+ entry->time > lace_time) {
+ GST_LOG_OBJECT (demux, "Skipping lace before late keyframe");
+ stream->set_discont = TRUE;
+ goto next_lace;
+ }
+ }
+ }
+
+ sub = gst_buffer_copy_region (buf, GST_BUFFER_COPY_ALL,
+ gst_buffer_get_size (buf) - size, lace_size[n]);
+ GST_DEBUG_OBJECT (demux, "created subbuffer %p", sub);
+
+ if (delta_unit)
+ GST_BUFFER_FLAG_SET (sub, GST_BUFFER_FLAG_DELTA_UNIT);
+ else
+ GST_BUFFER_FLAG_UNSET (sub, GST_BUFFER_FLAG_DELTA_UNIT);
+
+ if (invisible_frame)
+ GST_BUFFER_FLAG_SET (sub, GST_BUFFER_FLAG_DECODE_ONLY);
+
+ if (stream->encodings != NULL && stream->encodings->len > 0)
+ sub = gst_matroska_decode_buffer (stream, sub);
+
+ if (sub == NULL) {
+ GST_WARNING_OBJECT (demux, "Decoding buffer failed");
+ goto next_lace;
+ }
+
+ if (!stream->dts_only) {
+ GST_BUFFER_PTS (sub) = lace_time;
+ } else {
+ GST_BUFFER_DTS (sub) = lace_time;
+ if (stream->intra_only)
+ GST_BUFFER_PTS (sub) = lace_time;
+ }
+
+ buffer_timestamp = gst_matroska_track_get_buffer_timestamp (stream, sub);
+
+ if (GST_CLOCK_TIME_IS_VALID (lace_time)) {
+ GstClockTime last_stop_end;
+
+ /* Check if this stream is after segment stop,
+ * but only terminate if we hit the next keyframe,
+ * to make sure that all frames potentially inside the segment
+ * are available to the decoder for decoding / reordering.*/
+ if (!delta_unit && GST_CLOCK_TIME_IS_VALID (demux->common.segment.stop)
+ && lace_time >= demux->common.segment.stop) {
+ GST_DEBUG_OBJECT (demux,
+ "Stream %d lace time: %" GST_TIME_FORMAT " after segment stop: %"
+ GST_TIME_FORMAT, stream->index, GST_TIME_ARGS (lace_time),
+ GST_TIME_ARGS (demux->common.segment.stop));
+ gst_buffer_unref (sub);
+ goto eos;
+ }
+ if (offset >= stream->to_offset
+ || (GST_CLOCK_TIME_IS_VALID (demux->to_time)
+ && lace_time > demux->to_time)) {
+ GST_DEBUG_OBJECT (demux, "Stream %d after playback section",
+ stream->index);
+ gst_buffer_unref (sub);
+ goto eos;
+ }
+
+ /* handle gaps, e.g. non-zero start-time, or an cue index entry
+ * that landed us with timestamps not quite intended */
+ GST_OBJECT_LOCK (demux);
+ if (demux->max_gap_time &&
+ GST_CLOCK_TIME_IS_VALID (demux->last_stop_end) &&
+ demux->common.segment.rate > 0.0) {
+ GstClockTimeDiff diff;
+
+ /* only send segments with increasing start times,
+ * otherwise if these go back and forth downstream (sinks) increase
+ * accumulated time and running_time */
+ diff = GST_CLOCK_DIFF (demux->last_stop_end, lace_time);
+ if (diff > 0 && diff > demux->max_gap_time
+ && lace_time > demux->common.segment.start
+ && (!GST_CLOCK_TIME_IS_VALID (demux->common.segment.stop)
+ || lace_time < demux->common.segment.stop)) {
+ GstEvent *event;
+ GST_DEBUG_OBJECT (demux,
+ "Gap of %" G_GINT64_FORMAT " ns detected in"
+ "stream %d (%" GST_TIME_FORMAT " -> %" GST_TIME_FORMAT "). "
+ "Sending updated SEGMENT events", diff,
+ stream->index, GST_TIME_ARGS (stream->pos),
+ GST_TIME_ARGS (lace_time));
+
+ event = gst_event_new_gap (demux->last_stop_end, diff);
+ GST_OBJECT_UNLOCK (demux);
+ gst_pad_push_event (stream->pad, event);
+ GST_OBJECT_LOCK (demux);
+ }
+ }
+
+ if (!GST_CLOCK_TIME_IS_VALID (demux->common.segment.position)
+ || demux->common.segment.position < lace_time) {
+ demux->common.segment.position = lace_time;
+ }
+ GST_OBJECT_UNLOCK (demux);
+
+ last_stop_end = lace_time;
+ if (duration) {
+ GST_BUFFER_DURATION (sub) = duration / laces;
+ last_stop_end += GST_BUFFER_DURATION (sub);
+ }
+
+ if (!GST_CLOCK_TIME_IS_VALID (demux->last_stop_end) ||
+ demux->last_stop_end < last_stop_end)
+ demux->last_stop_end = last_stop_end;
+
+ GST_OBJECT_LOCK (demux);
+ if (demux->common.segment.duration == -1 ||
+ demux->stream_start_time + demux->common.segment.duration <
+ last_stop_end) {
+ demux->common.segment.duration =
+ last_stop_end - demux->stream_start_time;
+ GST_OBJECT_UNLOCK (demux);
+ if (!demux->invalid_duration) {
+ gst_element_post_message (GST_ELEMENT_CAST (demux),
+ gst_message_new_duration_changed (GST_OBJECT_CAST (demux)));
+ demux->invalid_duration = TRUE;
+ }
+ } else {
+ GST_OBJECT_UNLOCK (demux);
+ }
+ }
+
+ stream->pos = lace_time;
+
+ gst_matroska_demux_sync_streams (demux);
+
+ if (stream->set_discont) {
+ GST_DEBUG_OBJECT (demux, "marking DISCONT");
+ GST_BUFFER_FLAG_SET (sub, GST_BUFFER_FLAG_DISCONT);
+ stream->set_discont = FALSE;
+ } else {
+ GST_BUFFER_FLAG_UNSET (sub, GST_BUFFER_FLAG_DISCONT);
+ }
+
+ /* reverse playback book-keeping */
+ if (!GST_CLOCK_TIME_IS_VALID (stream->from_time))
+ stream->from_time = lace_time;
+ if (stream->from_offset == -1)
+ stream->from_offset = offset;
+
+ GST_DEBUG_OBJECT (demux,
+ "Pushing lace %d, data of size %" G_GSIZE_FORMAT
+ " for stream %d, time=%" GST_TIME_FORMAT " and duration=%"
+ GST_TIME_FORMAT, n, gst_buffer_get_size (sub), stream_num,
+ GST_TIME_ARGS (buffer_timestamp),
+ GST_TIME_ARGS (GST_BUFFER_DURATION (sub)));
+
+ #if 0
+ if (demux->common.element_index) {
+ if (stream->index_writer_id == -1)
+ gst_index_get_writer_id (demux->common.element_index,
+ GST_OBJECT (stream->pad), &stream->index_writer_id);
+
+ GST_LOG_OBJECT (demux, "adding association %" GST_TIME_FORMAT "-> %"
+ G_GUINT64_FORMAT " for writer id %d",
+ GST_TIME_ARGS (buffer_timestamp), cluster_offset,
+ stream->index_writer_id);
+ gst_index_add_association (demux->common.element_index,
+ stream->index_writer_id, GST_BUFFER_FLAG_IS_SET (sub,
+ GST_BUFFER_FLAG_DELTA_UNIT) ? 0 : GST_ASSOCIATION_FLAG_KEY_UNIT,
+ GST_FORMAT_TIME, buffer_timestamp, GST_FORMAT_BYTES, cluster_offset,
+ NULL);
+ }
+ #endif
+
+ /* Postprocess the buffers depending on the codec used */
+ if (stream->postprocess_frame) {
+ GST_LOG_OBJECT (demux, "running post process");
+ ret = stream->postprocess_frame (GST_ELEMENT (demux), stream, &sub);
+ }
+
+ /* At this point, we have a sub-buffer pointing at data within a larger
+ buffer. This data might not be aligned with anything. If the data is
+ raw samples though, we want it aligned to the raw type (eg, 4 bytes
+ for 32 bit samples, etc), or bad things will happen downstream as
+ elements typically assume minimal alignment.
+ Therefore, create an aligned copy if necessary. */
+ sub = gst_matroska_demux_align_buffer (demux, sub, stream->alignment);
+
+ if (!strcmp (stream->codec_id, GST_MATROSKA_CODEC_ID_AUDIO_OPUS)) {
+ guint64 start_clip = 0, end_clip = 0;
+
+ /* Codec delay is part of the timestamps */
+ if (GST_BUFFER_PTS_IS_VALID (sub) && stream->codec_delay) {
+ if (GST_BUFFER_PTS (sub) > stream->codec_delay) {
+ GST_BUFFER_PTS (sub) -= stream->codec_delay;
+ } else {
+ GST_BUFFER_PTS (sub) = 0;
+
+ /* Opus GstAudioClippingMeta units are scaled by 48000/sample_rate.
+ That is, if a Opus track has audio encoded at 24000 Hz and 132
+ samples need to be clipped, GstAudioClippingMeta.start will be
+ set to 264. (This is also the case for buffer offsets.)
+ Opus sample rates are always divisors of 48000 Hz, which is the
+ maximum allowed sample rate. */
+ start_clip =
+ gst_util_uint64_scale_round (stream->codec_delay, 48000,
+ GST_SECOND);
+
+ if (GST_BUFFER_DURATION_IS_VALID (sub)) {
+ if (GST_BUFFER_DURATION (sub) > stream->codec_delay)
+ GST_BUFFER_DURATION (sub) -= stream->codec_delay;
+ else
+ GST_BUFFER_DURATION (sub) = 0;
+ }
+ }
+ }
+
+ if (block_discardpadding) {
+ end_clip =
+ gst_util_uint64_scale_round (block_discardpadding, 48000,
+ GST_SECOND);
+ }
+
+ if (start_clip || end_clip) {
+ gst_buffer_add_audio_clipping_meta (sub, GST_FORMAT_DEFAULT,
+ start_clip, end_clip);
+ }
+ }
+
+ if (GST_BUFFER_PTS_IS_VALID (sub)) {
+ stream->pos = GST_BUFFER_PTS (sub);
+ if (GST_BUFFER_DURATION_IS_VALID (sub))
+ stream->pos += GST_BUFFER_DURATION (sub);
+ } else if (GST_BUFFER_DTS_IS_VALID (sub)) {
+ stream->pos = GST_BUFFER_DTS (sub);
+ if (GST_BUFFER_DURATION_IS_VALID (sub))
+ stream->pos += GST_BUFFER_DURATION (sub);
+ }
+
+ /* Attach BlockAdditions to buffer; we assume a single buffer per group
+ * in this case */
+ if (additions.length > 0) {
+ BlockAddition *blockadd;
+
+ if (laces > 2)
+ GST_FIXME_OBJECT (demux, "Fix block additions with laced buffers");
+
+ while ((blockadd = g_queue_pop_head (&additions))) {
+ GstMatroskaTrackVideoContext *videocontext =
+ (GstMatroskaTrackVideoContext *) stream;
+ if (blockadd->id == 1 && videocontext->alpha_mode
+ && (!strcmp (stream->codec_id, GST_MATROSKA_CODEC_ID_VIDEO_VP8)
+ || !strcmp (stream->codec_id,
+ GST_MATROSKA_CODEC_ID_VIDEO_VP9))) {
+ GstBuffer *alpha_buffer;
+
+ GST_TRACE_OBJECT (demux, "adding block addition %u as VP8/VP9 "
+ "alpha meta to buffer %p, %u bytes", (guint) blockadd->id, buf,
+ (guint) blockadd->size);
+
+ alpha_buffer = gst_buffer_new_wrapped (blockadd->data,
+ blockadd->size);
+ gst_buffer_copy_into (alpha_buffer, sub,
+ GST_BUFFER_COPY_FLAGS | GST_BUFFER_COPY_TIMESTAMPS, 0, -1);
+ gst_buffer_add_video_codec_alpha_meta (sub, alpha_buffer);
+ } else {
+ g_free (blockadd->data);
+ }
+ g_free (blockadd);
+ }
+ }
+
+ ret = gst_pad_push (stream->pad, sub);
+
+ if (demux->common.segment.rate < 0) {
+ if (lace_time > demux->common.segment.stop && ret == GST_FLOW_EOS) {
+ /* In reverse playback we can get a GST_FLOW_EOS when
+ * we are at the end of the segment, so we just need to jump
+ * back to the previous section. */
+ GST_DEBUG_OBJECT (demux, "downstream has reached end of segment");
+ ret = GST_FLOW_OK;
+ }
+ }
+ /* combine flows */
+ ret = gst_flow_combiner_update_pad_flow (demux->flowcombiner,
+ stream->pad, ret);
+
+ next_lace:
+ size -= lace_size[n];
+ if (lace_time != GST_CLOCK_TIME_NONE && duration)
+ lace_time += duration / laces;
+ else
+ lace_time = GST_CLOCK_TIME_NONE;
+ }
+ }
+
+ done:
+ if (buf) {
+ gst_buffer_unmap (buf, &map);
+ gst_buffer_unref (buf);
+ }
+ g_free (lace_size);
+ {
+ BlockAddition *blockadd;
+
+ while ((blockadd = g_queue_pop_head (&additions))) {
+ g_free (blockadd->data);
+ g_free (blockadd);
+ }
+ }
+ return ret;
+
+ /* EXITS */
+ eos:
+ {
+ stream->eos = TRUE;
+ ret = GST_FLOW_OK;
+ /* combine flows */
+ ret = gst_flow_combiner_update_pad_flow (demux->flowcombiner, stream->pad,
+ ret);
+ goto done;
+ }
+ invalid_lacing:
+ {
+ GST_ELEMENT_WARNING (demux, STREAM, DEMUX, (NULL), ("Invalid lacing size"));
+ /* non-fatal, try next block(group) */
+ ret = GST_FLOW_OK;
+ goto done;
+ }
+ data_error:
+ {
+ GST_ELEMENT_WARNING (demux, STREAM, DEMUX, (NULL), ("Data error"));
+ /* non-fatal, try next block(group) */
+ ret = GST_FLOW_OK;
+ goto done;
+ }
+ }
+
+ /* return FALSE if block(group) should be skipped (due to a seek) */
+ static inline gboolean
+ gst_matroska_demux_seek_block (GstMatroskaDemux * demux)
+ {
+ if (G_UNLIKELY (demux->seek_block)) {
+ if (!(--demux->seek_block)) {
+ return TRUE;
+ } else {
+ GST_LOG_OBJECT (demux, "should skip block due to seek");
+ return FALSE;
+ }
+ } else {
+ return TRUE;
+ }
+ }
+
+ static GstFlowReturn
+ gst_matroska_demux_parse_contents_seekentry (GstMatroskaDemux * demux,
+ GstEbmlRead * ebml)
+ {
+ GstFlowReturn ret;
+ guint64 seek_pos = (guint64) - 1;
+ guint32 seek_id = 0;
+ guint32 id;
+
+ DEBUG_ELEMENT_START (demux, ebml, "Seek");
+
+ if ((ret = gst_ebml_read_master (ebml, &id)) != GST_FLOW_OK) {
+ DEBUG_ELEMENT_STOP (demux, ebml, "Seek", ret);
+ return ret;
+ }
+
+ while (ret == GST_FLOW_OK && gst_ebml_read_has_remaining (ebml, 1, TRUE)) {
+ if ((ret = gst_ebml_peek_id (ebml, &id)) != GST_FLOW_OK)
+ break;
+
+ switch (id) {
+ case GST_MATROSKA_ID_SEEKID:
+ {
+ guint64 t;
+
+ if ((ret = gst_ebml_read_uint (ebml, &id, &t)) != GST_FLOW_OK)
+ break;
+
+ GST_DEBUG_OBJECT (demux, "SeekID: %" G_GUINT64_FORMAT, t);
+ seek_id = t;
+ break;
+ }
+
+ case GST_MATROSKA_ID_SEEKPOSITION:
+ {
+ guint64 t;
+
+ if ((ret = gst_ebml_read_uint (ebml, &id, &t)) != GST_FLOW_OK)
+ break;
+
+ if (t > G_MAXINT64) {
+ GST_WARNING_OBJECT (demux,
+ "Too large SeekPosition %" G_GUINT64_FORMAT, t);
+ break;
+ }
+
+ GST_DEBUG_OBJECT (demux, "SeekPosition: %" G_GUINT64_FORMAT, t);
+ seek_pos = t;
+ break;
+ }
+
+ default:
+ ret = gst_matroska_read_common_parse_skip (&demux->common, ebml,
+ "SeekHead", id);
+ break;
+ }
+ }
+
+ if (ret != GST_FLOW_OK && ret != GST_FLOW_EOS)
+ return ret;
+
+ if (!seek_id || seek_pos == (guint64) - 1) {
+ GST_WARNING_OBJECT (demux, "Incomplete seekhead entry (0x%x/%"
+ G_GUINT64_FORMAT ")", seek_id, seek_pos);
+ return GST_FLOW_OK;
+ }
+
+ switch (seek_id) {
+ case GST_MATROSKA_ID_SEEKHEAD:
+ {
+ }
+ case GST_MATROSKA_ID_CUES:
+ case GST_MATROSKA_ID_TAGS:
+ case GST_MATROSKA_ID_TRACKS:
+ case GST_MATROSKA_ID_SEGMENTINFO:
+ case GST_MATROSKA_ID_ATTACHMENTS:
+ case GST_MATROSKA_ID_CHAPTERS:
+ {
+ guint64 before_pos, length;
+ guint needed;
+
+ /* remember */
+ length = gst_matroska_read_common_get_length (&demux->common);
+ before_pos = demux->common.offset;
+
+ if (length == (guint64) - 1) {
+ GST_DEBUG_OBJECT (demux, "no upstream length, skipping SeakHead entry");
+ break;
+ }
+
+ /* check for validity */
+ if (seek_pos + demux->common.ebml_segment_start + 12 >= length) {
+ GST_WARNING_OBJECT (demux,
+ "SeekHead reference lies outside file!" " (%"
+ G_GUINT64_FORMAT "+%" G_GUINT64_FORMAT "+12 >= %"
+ G_GUINT64_FORMAT ")", seek_pos, demux->common.ebml_segment_start,
+ length);
+ break;
+ }
+
+ /* only pick up index location when streaming */
+ if (demux->streaming) {
+ if (seek_id == GST_MATROSKA_ID_CUES) {
+ demux->index_offset = seek_pos + demux->common.ebml_segment_start;
+ GST_DEBUG_OBJECT (demux, "Cues located at offset %" G_GUINT64_FORMAT,
+ demux->index_offset);
+ }
+ break;
+ }
+
+ /* seek */
+ demux->common.offset = seek_pos + demux->common.ebml_segment_start;
+
+ /* check ID */
+ if ((ret = gst_matroska_read_common_peek_id_length_pull (&demux->common,
+ GST_ELEMENT_CAST (demux), &id, &length, &needed)) !=
+ GST_FLOW_OK)
+ goto finish;
+
+ if (id != seek_id) {
+ GST_WARNING_OBJECT (demux,
+ "We looked for ID=0x%x but got ID=0x%x (pos=%" G_GUINT64_FORMAT ")",
+ seek_id, id, seek_pos + demux->common.ebml_segment_start);
+ } else {
+ /* now parse */
+ ret = gst_matroska_demux_parse_id (demux, id, length, needed);
+ }
+
+ finish:
+ /* seek back */
+ demux->common.offset = before_pos;
+ break;
+ }
+
+ case GST_MATROSKA_ID_CLUSTER:
+ {
+ guint64 pos = seek_pos + demux->common.ebml_segment_start;
+
+ GST_LOG_OBJECT (demux, "Cluster position");
+ if (G_UNLIKELY (!demux->clusters))
+ demux->clusters = g_array_sized_new (TRUE, TRUE, sizeof (guint64), 100);
+ g_array_append_val (demux->clusters, pos);
+ break;
+ }
+
+ default:
+ GST_DEBUG_OBJECT (demux, "Ignoring Seek entry for ID=0x%x", seek_id);
+ break;
+ }
+ DEBUG_ELEMENT_STOP (demux, ebml, "Seek", ret);
+
+ return ret;
+ }
+
+ static GstFlowReturn
+ gst_matroska_demux_parse_contents (GstMatroskaDemux * demux, GstEbmlRead * ebml)
+ {
+ GstFlowReturn ret = GST_FLOW_OK;
+ guint32 id;
+
+ DEBUG_ELEMENT_START (demux, ebml, "SeekHead");
+
+ if ((ret = gst_ebml_read_master (ebml, &id)) != GST_FLOW_OK) {
+ DEBUG_ELEMENT_STOP (demux, ebml, "SeekHead", ret);
+ return ret;
+ }
+
+ while (ret == GST_FLOW_OK && gst_ebml_read_has_remaining (ebml, 1, TRUE)) {
+ if ((ret = gst_ebml_peek_id (ebml, &id)) != GST_FLOW_OK)
+ break;
+
+ switch (id) {
+ case GST_MATROSKA_ID_SEEKENTRY:
+ {
+ ret = gst_matroska_demux_parse_contents_seekentry (demux, ebml);
+ /* Ignore EOS and errors here */
+ if (ret != GST_FLOW_OK) {
+ GST_DEBUG_OBJECT (demux, "Ignoring %s", gst_flow_get_name (ret));
+ ret = GST_FLOW_OK;
+ }
+ break;
+ }
+
+ default:
+ ret = gst_matroska_read_common_parse_skip (&demux->common,
+ ebml, "SeekHead", id);
+ break;
+ }
+ }
+
+ DEBUG_ELEMENT_STOP (demux, ebml, "SeekHead", ret);
+
+ /* Sort clusters by position for easier searching */
+ if (demux->clusters)
+ g_array_sort (demux->clusters, (GCompareFunc) gst_matroska_cluster_compare);
+
+ return ret;
+ }
+
+ #define GST_FLOW_OVERFLOW GST_FLOW_CUSTOM_ERROR
+
+ #define MAX_BLOCK_SIZE (15 * 1024 * 1024)
+
+ static inline GstFlowReturn
+ gst_matroska_demux_check_read_size (GstMatroskaDemux * demux, guint64 bytes)
+ {
+ if (G_UNLIKELY (bytes > MAX_BLOCK_SIZE)) {
+ /* only a few blocks are expected/allowed to be large,
+ * and will be recursed into, whereas others will be read and must fit */
+ if (demux->streaming) {
+ /* fatal in streaming case, as we can't step over easily */
+ GST_ELEMENT_ERROR (demux, STREAM, DEMUX, (NULL),
+ ("reading large block of size %" G_GUINT64_FORMAT " not supported; "
+ "file might be corrupt.", bytes));
+ return GST_FLOW_ERROR;
+ } else {
+ /* indicate higher level to quietly give up */
+ GST_DEBUG_OBJECT (demux,
+ "too large block of size %" G_GUINT64_FORMAT, bytes);
+ return GST_FLOW_ERROR;
+ }
+ } else {
+ return GST_FLOW_OK;
+ }
+ }
+
+ /* returns TRUE if we truly are in error state, and should give up */
+ static inline GstFlowReturn
+ gst_matroska_demux_check_parse_error (GstMatroskaDemux * demux)
+ {
+ if (!demux->streaming && demux->next_cluster_offset > 0) {
+ /* just repositioning to where next cluster should be and try from there */
+ GST_WARNING_OBJECT (demux, "parse error, trying next cluster expected at %"
+ G_GUINT64_FORMAT, demux->next_cluster_offset);
+ demux->common.offset = demux->next_cluster_offset;
+ demux->next_cluster_offset = 0;
+ return GST_FLOW_OK;
+ } else {
+ gint64 pos;
+ GstFlowReturn ret;
+
+ /* sigh, one last attempt above and beyond call of duty ...;
+ * search for cluster mark following current pos */
+ pos = demux->common.offset;
+ GST_WARNING_OBJECT (demux, "parse error, looking for next cluster");
+ if ((ret = gst_matroska_demux_search_cluster (demux, &pos, TRUE)) !=
+ GST_FLOW_OK) {
+ /* did not work, give up */
+ return ret;
+ } else {
+ GST_DEBUG_OBJECT (demux, "... found at %" G_GUINT64_FORMAT, pos);
+ /* try that position */
+ demux->common.offset = pos;
+ return GST_FLOW_OK;
+ }
+ }
+ }
+
+ static inline GstFlowReturn
+ gst_matroska_demux_flush (GstMatroskaDemux * demux, guint flush)
+ {
+ GST_LOG_OBJECT (demux, "skipping %d bytes", flush);
+ demux->common.offset += flush;
+ if (demux->streaming) {
+ GstFlowReturn ret;
+
+ /* hard to skip large blocks when streaming */
+ ret = gst_matroska_demux_check_read_size (demux, flush);
+ if (ret != GST_FLOW_OK)
+ return ret;
+ if (flush <= gst_adapter_available (demux->common.adapter))
+ gst_adapter_flush (demux->common.adapter, flush);
+ else
+ return GST_FLOW_EOS;
+ }
+ return GST_FLOW_OK;
+ }
+
+ /* initializes @ebml with @bytes from input stream at current offset.
+ * Returns EOS if insufficient available,
+ * ERROR if too much was attempted to read. */
+ static inline GstFlowReturn
+ gst_matroska_demux_take (GstMatroskaDemux * demux, guint64 bytes,
+ GstEbmlRead * ebml)
+ {
+ GstBuffer *buffer = NULL;
+ GstFlowReturn ret = GST_FLOW_OK;
+
+ GST_LOG_OBJECT (demux, "taking %" G_GUINT64_FORMAT " bytes for parsing",
+ bytes);
+ ret = gst_matroska_demux_check_read_size (demux, bytes);
+ if (G_UNLIKELY (ret != GST_FLOW_OK)) {
+ if (!demux->streaming) {
+ /* in pull mode, we can skip */
+ if ((ret = gst_matroska_demux_flush (demux, bytes)) == GST_FLOW_OK)
+ ret = GST_FLOW_OVERFLOW;
+ } else {
+ /* otherwise fatal */
+ ret = GST_FLOW_ERROR;
+ }
+ goto exit;
+ }
+ if (demux->streaming) {
+ if (gst_adapter_available (demux->common.adapter) >= bytes)
+ buffer = gst_adapter_take_buffer (demux->common.adapter, bytes);
+ else
+ ret = GST_FLOW_EOS;
+ } else
+ ret = gst_matroska_read_common_peek_bytes (&demux->common,
+ demux->common.offset, bytes, &buffer, NULL);
+ if (G_LIKELY (buffer)) {
+ gst_ebml_read_init (ebml, GST_ELEMENT_CAST (demux), buffer,
+ demux->common.offset);
+ demux->common.offset += bytes;
+ }
+ exit:
+ return ret;
+ }
+
+ static void
+ gst_matroska_demux_check_seekability (GstMatroskaDemux * demux)
+ {
+ GstQuery *query;
+ gboolean seekable = FALSE;
+ gint64 start = -1, stop = -1;
+
+ query = gst_query_new_seeking (GST_FORMAT_BYTES);
+ if (!gst_pad_peer_query (demux->common.sinkpad, query)) {
+ GST_DEBUG_OBJECT (demux, "seeking query failed");
+ goto done;
+ }
+
+ gst_query_parse_seeking (query, NULL, &seekable, &start, &stop);
+
+ /* try harder to query upstream size if we didn't get it the first time */
+ if (seekable && stop == -1) {
+ GST_DEBUG_OBJECT (demux, "doing duration query to fix up unset stop");
+ gst_pad_peer_query_duration (demux->common.sinkpad, GST_FORMAT_BYTES,
+ &stop);
+ }
+
+ /* if upstream doesn't know the size, it's likely that it's not seekable in
+ * practice even if it technically may be seekable */
+ if (seekable && (start != 0 || stop <= start)) {
+ GST_DEBUG_OBJECT (demux, "seekable but unknown start/stop -> disable");
+ seekable = FALSE;
+ }
+
+ done:
+ GST_INFO_OBJECT (demux, "seekable: %d (%" G_GUINT64_FORMAT " - %"
+ G_GUINT64_FORMAT ")", seekable, start, stop);
+ demux->seekable = seekable;
+
+ gst_query_unref (query);
+ }
+
+ static GstFlowReturn
+ gst_matroska_demux_find_tracks (GstMatroskaDemux * demux)
+ {
+ guint32 id;
+ guint64 before_pos;
+ guint64 length;
+ guint needed;
+ GstFlowReturn ret = GST_FLOW_OK;
+
+ GST_WARNING_OBJECT (demux,
+ "Found Cluster element before Tracks, searching Tracks");
+
+ /* remember */
+ before_pos = demux->common.offset;
+
+ /* Search Tracks element */
+ while (TRUE) {
+ ret = gst_matroska_read_common_peek_id_length_pull (&demux->common,
+ GST_ELEMENT_CAST (demux), &id, &length, &needed);
+ if (ret != GST_FLOW_OK)
+ break;
+
+ if (id != GST_MATROSKA_ID_TRACKS) {
+ /* we may be skipping large cluster here, so forego size check etc */
+ /* ... but we can't skip undefined size; force error */
+ if (length == G_MAXUINT64) {
+ ret = gst_matroska_demux_check_read_size (demux, length);
+ break;
+ } else {
+ demux->common.offset += needed;
+ demux->common.offset += length;
+ }
+ continue;
+ }
+
+ /* will lead to track parsing ... */
+ ret = gst_matroska_demux_parse_id (demux, id, length, needed);
+ break;
+ }
+
+ /* seek back */
+ demux->common.offset = before_pos;
+
+ return ret;
+ }
+
+ #define GST_READ_CHECK(stmt) \
+ G_STMT_START { \
+ if (G_UNLIKELY ((ret = (stmt)) != GST_FLOW_OK)) { \
+ if (ret == GST_FLOW_OVERFLOW) { \
+ ret = GST_FLOW_OK; \
+ } \
+ goto read_error; \
+ } \
+ } G_STMT_END
+
+ static GstFlowReturn
+ gst_matroska_demux_parse_id (GstMatroskaDemux * demux, guint32 id,
+ guint64 length, guint needed)
+ {
+ GstEbmlRead ebml = { 0, };
+ GstFlowReturn ret = GST_FLOW_OK;
+ guint64 read;
+
+ GST_LOG_OBJECT (demux, "Parsing Element id 0x%x, "
+ "size %" G_GUINT64_FORMAT ", prefix %d", id, length, needed);
+
+ /* if we plan to read and parse this element, we need prefix (id + length)
+ * and the contents */
+ /* mind about overflow wrap-around when dealing with undefined size */
+ read = length;
+ if (G_LIKELY (length != G_MAXUINT64))
+ read += needed;
+
+ switch (demux->common.state) {
+ case GST_MATROSKA_READ_STATE_START:
+ switch (id) {
+ case GST_EBML_ID_HEADER:
+ GST_READ_CHECK (gst_matroska_demux_take (demux, read, &ebml));
+ ret = gst_matroska_read_common_parse_header (&demux->common, &ebml);
+ if (ret != GST_FLOW_OK)
+ goto parse_failed;
+ demux->common.state = GST_MATROSKA_READ_STATE_SEGMENT;
+ gst_matroska_demux_check_seekability (demux);
+ break;
+ default:
+ goto invalid_header;
+ break;
+ }
+ break;
+ case GST_MATROSKA_READ_STATE_SEGMENT:
+ switch (id) {
+ case GST_MATROSKA_ID_SEGMENT:
+ /* eat segment prefix */
+ GST_READ_CHECK (gst_matroska_demux_flush (demux, needed));
+ GST_DEBUG_OBJECT (demux,
+ "Found Segment start at offset %" G_GUINT64_FORMAT " with size %"
+ G_GUINT64_FORMAT, demux->common.offset, length);
+ /* seeks are from the beginning of the segment,
+ * after the segment ID/length */
+ demux->common.ebml_segment_start = demux->common.offset;
+ if (length == 0)
+ length = G_MAXUINT64;
+ demux->common.ebml_segment_length = length;
+ demux->common.state = GST_MATROSKA_READ_STATE_HEADER;
+ break;
+ default:
+ GST_WARNING_OBJECT (demux,
+ "Expected a Segment ID (0x%x), but received 0x%x!",
+ GST_MATROSKA_ID_SEGMENT, id);
+ GST_READ_CHECK (gst_matroska_demux_flush (demux, read));
+ break;
+ }
+ break;
+ case GST_MATROSKA_READ_STATE_SCANNING:
+ if (id != GST_MATROSKA_ID_CLUSTER &&
+ id != GST_MATROSKA_ID_PREVSIZE &&
+ id != GST_MATROSKA_ID_CLUSTERTIMECODE) {
+ if (demux->common.start_resync_offset != -1) {
+ /* we need to skip byte per byte if we are scanning for a new cluster
+ * after invalid data is found
+ */
+ read = 1;
+ }
+ goto skip;
+ } else {
+ if (demux->common.start_resync_offset != -1) {
+ GST_LOG_OBJECT (demux, "Resync done, new cluster found!");
+ demux->common.start_resync_offset = -1;
+ demux->common.state = demux->common.state_to_restore;
+ }
+ }
+ /* fall-through */
+ case GST_MATROSKA_READ_STATE_HEADER:
+ case GST_MATROSKA_READ_STATE_DATA:
+ case GST_MATROSKA_READ_STATE_SEEK:
+ switch (id) {
+ case GST_EBML_ID_HEADER:
+ GST_READ_CHECK (gst_matroska_demux_flush (demux, read));
+ demux->common.state = GST_MATROSKA_READ_STATE_SEGMENT;
+ gst_matroska_demux_check_seekability (demux);
+ break;
+ case GST_MATROSKA_ID_SEGMENTINFO:
+ if (!demux->common.segmentinfo_parsed) {
+ GST_READ_CHECK (gst_matroska_demux_take (demux, read, &ebml));
+ ret = gst_matroska_read_common_parse_info (&demux->common,
+ GST_ELEMENT_CAST (demux), &ebml);
+ if (ret == GST_FLOW_OK)
+ gst_matroska_demux_send_tags (demux);
+ } else {
+ GST_READ_CHECK (gst_matroska_demux_flush (demux, read));
+ }
+ break;
+ case GST_MATROSKA_ID_TRACKS:
+ GST_READ_CHECK (gst_matroska_demux_take (demux, read, &ebml));
+ if (!demux->tracks_parsed) {
+ ret = gst_matroska_demux_parse_tracks (demux, &ebml);
+ } else {
+ ret = gst_matroska_demux_update_tracks (demux, &ebml);
+ }
+ break;
+ case GST_MATROSKA_ID_CLUSTER:
+ if (G_UNLIKELY (!demux->tracks_parsed)) {
+ if (demux->streaming) {
+ GST_DEBUG_OBJECT (demux, "Cluster before Track");
+ goto not_streamable;
+ } else {
+ ret = gst_matroska_demux_find_tracks (demux);
+ if (!demux->tracks_parsed)
+ goto no_tracks;
+ }
+ }
+ if (demux->common.state == GST_MATROSKA_READ_STATE_HEADER) {
+ demux->common.state = GST_MATROSKA_READ_STATE_DATA;
+ demux->first_cluster_offset = demux->common.offset;
+
+ if (!demux->streaming &&
+ !GST_CLOCK_TIME_IS_VALID (demux->common.segment.duration)) {
+ GstMatroskaIndex *last = NULL;
+
+ GST_DEBUG_OBJECT (demux,
+ "estimating duration using last cluster");
+ if ((last = gst_matroska_demux_search_pos (demux,
+ GST_CLOCK_TIME_NONE)) != NULL) {
+ demux->last_cluster_offset =
+ last->pos + demux->common.ebml_segment_start;
+ demux->stream_last_time = last->time;
+ demux->common.segment.duration =
+ demux->stream_last_time - demux->stream_start_time;
+ /* above estimate should not be taken all too strongly */
+ demux->invalid_duration = TRUE;
+ GST_DEBUG_OBJECT (demux,
+ "estimated duration as %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (demux->common.segment.duration));
+
+ g_free (last);
+ }
+ }
+
+ /* Peek at second cluster in order to figure out if we have cluster
+ * prev_size or not (which is never set on the first cluster for
+ * obvious reasons). This is useful in case someone initiates a
+ * seek or direction change before we reach the second cluster. */
+ if (!demux->streaming) {
+ ClusterInfo cluster = { 0, };
+
+ if (gst_matroska_demux_peek_cluster_info (demux, &cluster,
+ demux->first_cluster_offset) && cluster.size > 0) {
+ gst_matroska_demux_peek_cluster_info (demux, &cluster,
+ demux->first_cluster_offset + cluster.size);
+ }
+ demux->common.offset = demux->first_cluster_offset;
+ }
+
+ if (demux->deferred_seek_event) {
+ GstEvent *seek_event;
+ GstPad *seek_pad;
+ seek_event = demux->deferred_seek_event;
+ seek_pad = demux->deferred_seek_pad;
+ demux->deferred_seek_event = NULL;
+ demux->deferred_seek_pad = NULL;
+ GST_DEBUG_OBJECT (demux,
+ "Handling deferred seek event: %" GST_PTR_FORMAT, seek_event);
+ gst_matroska_demux_handle_seek_event (demux, seek_pad,
+ seek_event);
+ gst_event_unref (seek_event);
+ }
+
+ /* send initial segment - we wait till we know the first
+ incoming timestamp, so we can properly set the start of
+ the segment. */
+ demux->need_segment = TRUE;
+ }
+ demux->cluster_time = GST_CLOCK_TIME_NONE;
+ demux->cluster_offset = demux->common.offset;
+ demux->cluster_prevsize = 0;
+ if (G_UNLIKELY (!demux->seek_first && demux->seek_block)) {
+ GST_DEBUG_OBJECT (demux, "seek target block %" G_GUINT64_FORMAT
+ " not found in Cluster, trying next Cluster's first block instead",
+ demux->seek_block);
+ demux->seek_block = 0;
+ }
+ demux->seek_first = FALSE;
+ /* record next cluster for recovery */
+ if (read != G_MAXUINT64)
+ demux->next_cluster_offset = demux->cluster_offset + read;
+ /* eat cluster prefix */
+ gst_matroska_demux_flush (demux, needed);
+ break;
+ case GST_MATROSKA_ID_CLUSTERTIMECODE:
+ {
+ guint64 num;
+
+ GST_READ_CHECK (gst_matroska_demux_take (demux, read, &ebml));
+ if ((ret = gst_ebml_read_uint (&ebml, &id, &num)) != GST_FLOW_OK)
+ goto parse_failed;
+ GST_DEBUG_OBJECT (demux, "ClusterTimeCode: %" G_GUINT64_FORMAT, num);
+ demux->cluster_time = num;
+ /* track last cluster */
+ if (demux->cluster_offset > demux->last_cluster_offset) {
+ demux->last_cluster_offset = demux->cluster_offset;
+ demux->stream_last_time =
+ demux->cluster_time * demux->common.time_scale;
+ }
+ #if 0
+ if (demux->common.element_index) {
+ if (demux->common.element_index_writer_id == -1)
+ gst_index_get_writer_id (demux->common.element_index,
+ GST_OBJECT (demux), &demux->common.element_index_writer_id);
+ GST_LOG_OBJECT (demux, "adding association %" GST_TIME_FORMAT "-> %"
+ G_GUINT64_FORMAT " for writer id %d",
+ GST_TIME_ARGS (demux->cluster_time), demux->cluster_offset,
+ demux->common.element_index_writer_id);
+ gst_index_add_association (demux->common.element_index,
+ demux->common.element_index_writer_id,
+ GST_ASSOCIATION_FLAG_KEY_UNIT,
+ GST_FORMAT_TIME, demux->cluster_time,
+ GST_FORMAT_BYTES, demux->cluster_offset, NULL);
+ }
+ #endif
+ break;
+ }
+ case GST_MATROSKA_ID_BLOCKGROUP:
+ if (!gst_matroska_demux_seek_block (demux))
+ goto skip;
+ GST_READ_CHECK (gst_matroska_demux_take (demux, read, &ebml));
+ DEBUG_ELEMENT_START (demux, &ebml, "BlockGroup");
+ if ((ret = gst_ebml_read_master (&ebml, &id)) == GST_FLOW_OK) {
+ ret = gst_matroska_demux_parse_blockgroup_or_simpleblock (demux,
+ &ebml, demux->cluster_time, demux->cluster_offset, FALSE);
+ }
+ DEBUG_ELEMENT_STOP (demux, &ebml, "BlockGroup", ret);
+ break;
+ case GST_MATROSKA_ID_SIMPLEBLOCK:
+ if (!gst_matroska_demux_seek_block (demux))
+ goto skip;
+ GST_READ_CHECK (gst_matroska_demux_take (demux, read, &ebml));
+ DEBUG_ELEMENT_START (demux, &ebml, "SimpleBlock");
+ ret = gst_matroska_demux_parse_blockgroup_or_simpleblock (demux,
+ &ebml, demux->cluster_time, demux->cluster_offset, TRUE);
+ DEBUG_ELEMENT_STOP (demux, &ebml, "SimpleBlock", ret);
+ break;
+ case GST_MATROSKA_ID_ATTACHMENTS:
+ if (!demux->common.attachments_parsed) {
+ GST_READ_CHECK (gst_matroska_demux_take (demux, read, &ebml));
+ ret = gst_matroska_read_common_parse_attachments (&demux->common,
+ GST_ELEMENT_CAST (demux), &ebml);
+ if (ret == GST_FLOW_OK)
+ gst_matroska_demux_send_tags (demux);
+ } else {
+ GST_READ_CHECK (gst_matroska_demux_flush (demux, read));
+ }
+ break;
+ case GST_MATROSKA_ID_TAGS:
+ GST_READ_CHECK (gst_matroska_demux_take (demux, read, &ebml));
+ ret = gst_matroska_read_common_parse_metadata (&demux->common,
+ GST_ELEMENT_CAST (demux), &ebml);
+ if (ret == GST_FLOW_OK)
+ gst_matroska_demux_send_tags (demux);
+ break;
+ case GST_MATROSKA_ID_CHAPTERS:
+ if (!demux->common.chapters_parsed) {
+ GST_READ_CHECK (gst_matroska_demux_take (demux, read, &ebml));
+ ret =
+ gst_matroska_read_common_parse_chapters (&demux->common, &ebml);
+
+ if (demux->common.toc) {
+ gst_matroska_demux_send_event (demux,
+ gst_event_new_toc (demux->common.toc, FALSE));
+ }
+ } else
+ GST_READ_CHECK (gst_matroska_demux_flush (demux, read));
+ break;
+ case GST_MATROSKA_ID_SEEKHEAD:
+ GST_READ_CHECK (gst_matroska_demux_take (demux, read, &ebml));
+ ret = gst_matroska_demux_parse_contents (demux, &ebml);
+ break;
+ case GST_MATROSKA_ID_CUES:
+ if (demux->common.index_parsed) {
+ GST_READ_CHECK (gst_matroska_demux_flush (demux, read));
+ break;
+ }
+ GST_READ_CHECK (gst_matroska_demux_take (demux, read, &ebml));
+ ret = gst_matroska_read_common_parse_index (&demux->common, &ebml);
+ /* only push based; delayed index building */
+ if (ret == GST_FLOW_OK
+ && demux->common.state == GST_MATROSKA_READ_STATE_SEEK) {
+ GstEvent *event;
+
+ GST_OBJECT_LOCK (demux);
+ event = demux->seek_event;
+ demux->seek_event = NULL;
+ GST_OBJECT_UNLOCK (demux);
+
+ g_assert (event);
+ /* unlikely to fail, since we managed to seek to this point */
+ if (!gst_matroska_demux_handle_seek_event (demux, NULL, event)) {
+ gst_event_unref (event);
+ goto seek_failed;
+ }
+ gst_event_unref (event);
+ /* resume data handling, main thread clear to seek again */
+ GST_OBJECT_LOCK (demux);
+ demux->common.state = GST_MATROSKA_READ_STATE_DATA;
+ GST_OBJECT_UNLOCK (demux);
+ }
+ break;
+ case GST_MATROSKA_ID_PREVSIZE:{
+ guint64 num;
+
+ GST_READ_CHECK (gst_matroska_demux_take (demux, read, &ebml));
+ if ((ret = gst_ebml_read_uint (&ebml, &id, &num)) != GST_FLOW_OK)
+ goto parse_failed;
+ GST_LOG_OBJECT (demux, "ClusterPrevSize: %" G_GUINT64_FORMAT, num);
+ demux->cluster_prevsize = num;
+ demux->seen_cluster_prevsize = TRUE;
+ break;
+ }
+ case GST_MATROSKA_ID_POSITION:
+ case GST_MATROSKA_ID_ENCRYPTEDBLOCK:
+ /* The WebM doesn't support the EncryptedBlock element.
+ * The Matroska spec doesn't give us more detail, how to parse this element,
+ * for example the field TransformID isn't specified yet.*/
+ case GST_MATROSKA_ID_SILENTTRACKS:
+ GST_DEBUG_OBJECT (demux,
+ "Skipping Cluster subelement 0x%x - ignoring", id);
+ /* fall-through */
+ default:
+ skip:
+ GST_DEBUG_OBJECT (demux, "skipping Element 0x%x", id);
+ GST_READ_CHECK (gst_matroska_demux_flush (demux, read));
+ break;
+ }
+ break;
+ }
+
+ if (ret == GST_FLOW_PARSE)
+ goto parse_failed;
+
+ exit:
+ gst_ebml_read_clear (&ebml);
+ return ret;
+
+ /* ERRORS */
+ read_error:
+ {
+ /* simply exit, maybe not enough data yet */
+ /* no ebml to clear if read error */
+ return ret;
+ }
+ parse_failed:
+ {
+ GST_ELEMENT_ERROR (demux, STREAM, DEMUX, (NULL),
+ ("Failed to parse Element 0x%x", id));
+ ret = GST_FLOW_ERROR;
+ goto exit;
+ }
+ not_streamable:
+ {
+ GST_ELEMENT_ERROR (demux, STREAM, DEMUX, (NULL),
+ ("File layout does not permit streaming"));
+ ret = GST_FLOW_ERROR;
+ goto exit;
+ }
+ no_tracks:
+ {
+ GST_ELEMENT_ERROR (demux, STREAM, DEMUX, (NULL),
+ ("No Tracks element found"));
+ ret = GST_FLOW_ERROR;
+ goto exit;
+ }
+ invalid_header:
+ {
+ GST_ELEMENT_ERROR (demux, STREAM, DEMUX, (NULL), ("Invalid header"));
+ ret = GST_FLOW_ERROR;
+ goto exit;
+ }
+ seek_failed:
+ {
+ GST_ELEMENT_ERROR (demux, STREAM, DEMUX, (NULL), ("Failed to seek"));
+ ret = GST_FLOW_ERROR;
+ goto exit;
+ }
+ }
+
+ static void
+ gst_matroska_demux_loop (GstPad * pad)
+ {
+ GstMatroskaDemux *demux = GST_MATROSKA_DEMUX (GST_PAD_PARENT (pad));
+ GstFlowReturn ret;
+ guint32 id;
+ guint64 length;
+ guint needed;
+
+ /* If we have to close a segment, send a new segment to do this now */
+ if (G_LIKELY (demux->common.state == GST_MATROSKA_READ_STATE_DATA)) {
+ if (G_UNLIKELY (demux->new_segment)) {
+ gst_matroska_demux_send_event (demux, demux->new_segment);
+ demux->new_segment = NULL;
+ }
+ }
+
+ ret = gst_matroska_read_common_peek_id_length_pull (&demux->common,
+ GST_ELEMENT_CAST (demux), &id, &length, &needed);
+ if (ret == GST_FLOW_EOS) {
+ goto eos;
+ } else if (ret == GST_FLOW_FLUSHING) {
+ goto pause;
+ } else if (ret != GST_FLOW_OK) {
+ ret = gst_matroska_demux_check_parse_error (demux);
+
+ /* Only handle EOS as no error if we're outside the segment already */
+ if (ret == GST_FLOW_EOS && (demux->common.ebml_segment_length != G_MAXUINT64
+ && demux->common.offset >=
+ demux->common.ebml_segment_start +
+ demux->common.ebml_segment_length))
+ goto eos;
+ else if (ret != GST_FLOW_OK)
+ goto pause;
+ else
+ return;
+ }
+
+ GST_LOG_OBJECT (demux, "Offset %" G_GUINT64_FORMAT ", Element id 0x%x, "
+ "size %" G_GUINT64_FORMAT ", needed %d", demux->common.offset, id,
+ length, needed);
+
+ ret = gst_matroska_demux_parse_id (demux, id, length, needed);
+ if (ret == GST_FLOW_EOS)
+ goto eos;
+ if (ret != GST_FLOW_OK)
+ goto pause;
+
+ /* check if we're at the end of a configured segment */
+ if (G_LIKELY (demux->common.src->len)) {
+ guint i;
+
+ g_assert (demux->common.num_streams == demux->common.src->len);
+ for (i = 0; i < demux->common.src->len; i++) {
+ GstMatroskaTrackContext *context = g_ptr_array_index (demux->common.src,
+ i);
+ GST_DEBUG_OBJECT (context->pad, "pos %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (context->pos));
+ if (context->eos == FALSE)
+ goto next;
+ }
+
+ GST_INFO_OBJECT (demux, "All streams are EOS");
+ ret = GST_FLOW_EOS;
+ goto eos;
+ }
+
+ next:
+ if (G_UNLIKELY (demux->cached_length == G_MAXUINT64 ||
+ demux->common.offset >= demux->cached_length)) {
+ demux->cached_length = gst_matroska_read_common_get_length (&demux->common);
+ if (demux->common.offset == demux->cached_length) {
+ GST_LOG_OBJECT (demux, "Reached end of stream");
+ ret = GST_FLOW_EOS;
+ goto eos;
+ }
+ }
+
+ return;
+
+ /* ERRORS */
+ eos:
+ {
+ if (demux->common.segment.rate < 0.0) {
+ ret = gst_matroska_demux_seek_to_previous_keyframe (demux);
+ if (ret == GST_FLOW_OK)
+ return;
+ }
+ /* fall-through */
+ }
+ pause:
+ {
+ const gchar *reason = gst_flow_get_name (ret);
+ gboolean push_eos = FALSE;
+
+ GST_LOG_OBJECT (demux, "pausing task, reason %s", reason);
+ gst_pad_pause_task (demux->common.sinkpad);
+
+ if (ret == GST_FLOW_EOS) {
+ /* perform EOS logic */
+
+ /* If we were in the headers, make sure we send no-more-pads.
+ This will ensure decodebin does not get stuck thinking
+ the chain is not complete yet, and waiting indefinitely. */
+ if (G_UNLIKELY (demux->common.state == GST_MATROSKA_READ_STATE_HEADER)) {
+ if (demux->common.src->len == 0) {
+ GST_ELEMENT_ERROR (demux, STREAM, FAILED, (NULL),
+ ("No pads created"));
+ } else {
+ GST_ELEMENT_WARNING (demux, STREAM, DEMUX, (NULL),
+ ("Failed to finish reading headers"));
+ }
+ gst_element_no_more_pads (GST_ELEMENT (demux));
+ }
+
+ if (demux->common.segment.flags & GST_SEEK_FLAG_SEGMENT) {
+ GstEvent *event;
+ GstMessage *msg;
+ gint64 stop;
+
+ /* for segment playback we need to post when (in stream time)
+ * we stopped, this is either stop (when set) or the duration. */
+ if ((stop = demux->common.segment.stop) == -1)
+ stop = demux->last_stop_end;
+
+ GST_LOG_OBJECT (demux, "Sending segment done, at end of segment");
+ msg = gst_message_new_segment_done (GST_OBJECT (demux), GST_FORMAT_TIME,
+ stop);
+ if (demux->segment_seqnum)
+ gst_message_set_seqnum (msg, demux->segment_seqnum);
+ gst_element_post_message (GST_ELEMENT (demux), msg);
+
+ event = gst_event_new_segment_done (GST_FORMAT_TIME, stop);
+ if (demux->segment_seqnum)
+ gst_event_set_seqnum (event, demux->segment_seqnum);
+ gst_matroska_demux_send_event (demux, event);
+ } else {
+ push_eos = TRUE;
+ }
+ } else if (ret == GST_FLOW_NOT_LINKED || ret < GST_FLOW_EOS) {
+ /* for fatal errors we post an error message */
+ GST_ELEMENT_FLOW_ERROR (demux, ret);
+ push_eos = TRUE;
+ }
+ if (push_eos) {
+ GstEvent *event;
+
+ /* send EOS, and prevent hanging if no streams yet */
+ GST_LOG_OBJECT (demux, "Sending EOS, at end of stream");
+ event = gst_event_new_eos ();
+ if (demux->segment_seqnum)
+ gst_event_set_seqnum (event, demux->segment_seqnum);
+ if (!gst_matroska_demux_send_event (demux, event) &&
+ (ret == GST_FLOW_EOS)) {
+ GST_ELEMENT_ERROR (demux, STREAM, DEMUX,
+ (NULL), ("got eos but no streams (yet)"));
+ }
+ }
+ return;
+ }
+ }
+
+ /*
+ * Create and push a flushing seek event upstream
+ */
+ static gboolean
+ perform_seek_to_offset (GstMatroskaDemux * demux, gdouble rate, guint64 offset,
+ guint32 seqnum, GstSeekFlags flags)
+ {
+ GstEvent *event;
+ gboolean res = 0;
+
+ GST_DEBUG_OBJECT (demux, "Seeking to %" G_GUINT64_FORMAT, offset);
+
+ event =
+ gst_event_new_seek (rate, GST_FORMAT_BYTES,
+ flags | GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_ACCURATE,
+ GST_SEEK_TYPE_SET, offset, GST_SEEK_TYPE_NONE, -1);
+ gst_event_set_seqnum (event, seqnum);
+
+ res = gst_pad_push_event (demux->common.sinkpad, event);
+
+ /* segment event will update offset */
+ return res;
+ }
+
+ static GstFlowReturn
+ gst_matroska_demux_chain (GstPad * pad, GstObject * parent, GstBuffer * buffer)
+ {
+ GstMatroskaDemux *demux = GST_MATROSKA_DEMUX (parent);
+ guint available;
+ GstFlowReturn ret = GST_FLOW_OK;
+ guint needed = 0;
+ guint32 id;
+ guint64 length;
+
+ if (G_UNLIKELY (GST_BUFFER_IS_DISCONT (buffer))) {
+ GST_DEBUG_OBJECT (demux, "got DISCONT");
+ gst_adapter_clear (demux->common.adapter);
+ GST_OBJECT_LOCK (demux);
+ gst_matroska_read_common_reset_streams (&demux->common,
+ GST_CLOCK_TIME_NONE, FALSE);
+ GST_OBJECT_UNLOCK (demux);
+ }
+
+ gst_adapter_push (demux->common.adapter, buffer);
+ buffer = NULL;
+
+ next:
+ available = gst_adapter_available (demux->common.adapter);
+
+ ret = gst_matroska_read_common_peek_id_length_push (&demux->common,
+ GST_ELEMENT_CAST (demux), &id, &length, &needed);
+ if (G_UNLIKELY (ret != GST_FLOW_OK && ret != GST_FLOW_EOS)) {
+ if (demux->common.ebml_segment_length != G_MAXUINT64
+ && demux->common.offset >=
+ demux->common.ebml_segment_start + demux->common.ebml_segment_length) {
+ return GST_FLOW_OK;
+ } else {
+ gint64 bytes_scanned;
+ if (demux->common.start_resync_offset == -1) {
+ demux->common.start_resync_offset = demux->common.offset;
+ demux->common.state_to_restore = demux->common.state;
+ }
+ bytes_scanned = demux->common.offset - demux->common.start_resync_offset;
+ if (bytes_scanned <= INVALID_DATA_THRESHOLD) {
+ GST_WARNING_OBJECT (demux,
+ "parse error, looking for next cluster, actual offset %"
+ G_GUINT64_FORMAT ", start resync offset %" G_GUINT64_FORMAT,
+ demux->common.offset, demux->common.start_resync_offset);
+ demux->common.state = GST_MATROSKA_READ_STATE_SCANNING;
+ ret = GST_FLOW_OK;
+ } else {
+ GST_WARNING_OBJECT (demux,
+ "unrecoverable parse error, next cluster not found and threshold "
+ "exceeded, bytes scanned %" G_GINT64_FORMAT, bytes_scanned);
+ return ret;
+ }
+ }
+ }
+
+ GST_LOG_OBJECT (demux, "Offset %" G_GUINT64_FORMAT ", Element id 0x%x, "
+ "size %" G_GUINT64_FORMAT ", needed %d, available %d",
+ demux->common.offset, id, length, needed, available);
+
+ if (needed > available)
+ return GST_FLOW_OK;
+
+ ret = gst_matroska_demux_parse_id (demux, id, length, needed);
+ if (ret == GST_FLOW_EOS) {
+ /* need more data */
+ return GST_FLOW_OK;
+ } else if (ret != GST_FLOW_OK) {
+ return ret;
+ } else
+ goto next;
+ }
+
+ static gboolean
+ gst_matroska_demux_handle_sink_event (GstPad * pad, GstObject * parent,
+ GstEvent * event)
+ {
+ gboolean res = TRUE;
+ GstMatroskaDemux *demux = GST_MATROSKA_DEMUX (parent);
+
+ GST_DEBUG_OBJECT (demux,
+ "have event type %s: %p on sink pad", GST_EVENT_TYPE_NAME (event), event);
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_SEGMENT:
+ {
+ const GstSegment *segment;
+
+ /* some debug output */
+ gst_event_parse_segment (event, &segment);
+ /* FIXME: do we need to update segment base here (like accum in 0.10)? */
+ GST_DEBUG_OBJECT (demux,
+ "received format %d segment %" GST_SEGMENT_FORMAT, segment->format,
+ segment);
+
+ if (demux->common.state < GST_MATROSKA_READ_STATE_DATA) {
+ GST_DEBUG_OBJECT (demux, "still starting");
+ goto exit;
+ }
+
+ /* we only expect a BYTE segment, e.g. following a seek */
+ if (segment->format != GST_FORMAT_BYTES) {
+ GST_DEBUG_OBJECT (demux, "unsupported segment format, ignoring");
+ goto exit;
+ }
+
+ GST_DEBUG_OBJECT (demux, "clearing segment state");
+ GST_OBJECT_LOCK (demux);
+ /* clear current segment leftover */
+ gst_adapter_clear (demux->common.adapter);
+ /* and some streaming setup */
+ demux->common.offset = segment->start;
+ /* accumulate base based on current position */
+ if (GST_CLOCK_TIME_IS_VALID (demux->common.segment.position))
+ demux->common.segment.base +=
+ (MAX (demux->common.segment.position, demux->stream_start_time)
+ - demux->stream_start_time) / fabs (demux->common.segment.rate);
+ /* do not know where we are;
+ * need to come across a cluster and generate segment */
+ demux->common.segment.position = GST_CLOCK_TIME_NONE;
+ demux->cluster_time = GST_CLOCK_TIME_NONE;
+ demux->cluster_offset = 0;
+ demux->cluster_prevsize = 0;
+ demux->need_segment = TRUE;
+ demux->segment_seqnum = gst_event_get_seqnum (event);
+ /* but keep some of the upstream segment */
+ demux->common.segment.rate = segment->rate;
+ demux->common.segment.flags = segment->flags;
+ /* also check if need to keep some of the requested seek position */
+ if (demux->seek_offset == segment->start) {
+ GST_DEBUG_OBJECT (demux, "position matches requested seek");
+ demux->common.segment.position = demux->requested_seek_time;
+ } else {
+ GST_DEBUG_OBJECT (demux, "unexpected segment position");
+ }
+ demux->requested_seek_time = GST_CLOCK_TIME_NONE;
+ demux->seek_offset = -1;
+ GST_OBJECT_UNLOCK (demux);
+ exit:
+ /* chain will send initial segment after pads have been added,
+ * or otherwise come up with one */
+ GST_DEBUG_OBJECT (demux, "eating event");
+ gst_event_unref (event);
+ res = TRUE;
+ break;
+ }
+ case GST_EVENT_EOS:
+ {
+ if (demux->common.state != GST_MATROSKA_READ_STATE_DATA
+ && demux->common.state != GST_MATROSKA_READ_STATE_SCANNING) {
+ gst_event_unref (event);
+ GST_ELEMENT_ERROR (demux, STREAM, DEMUX,
+ (NULL), ("got eos and didn't receive a complete header object"));
+ } else if (demux->common.num_streams == 0) {
+ GST_ELEMENT_ERROR (demux, STREAM, DEMUX,
+ (NULL), ("got eos but no streams (yet)"));
+ } else {
+ gst_matroska_demux_send_event (demux, event);
+ }
+ break;
+ }
+ case GST_EVENT_FLUSH_STOP:
+ {
+ guint64 dur;
+
+ gst_adapter_clear (demux->common.adapter);
+ GST_OBJECT_LOCK (demux);
+ gst_matroska_read_common_reset_streams (&demux->common,
+ GST_CLOCK_TIME_NONE, TRUE);
+ gst_flow_combiner_reset (demux->flowcombiner);
+ dur = demux->common.segment.duration;
+ gst_segment_init (&demux->common.segment, GST_FORMAT_TIME);
+ demux->common.segment.duration = dur;
+ demux->cluster_time = GST_CLOCK_TIME_NONE;
+ demux->cluster_offset = 0;
+ demux->cluster_prevsize = 0;
+ GST_OBJECT_UNLOCK (demux);
+ /* fall-through */
+ }
+ default:
+ res = gst_pad_event_default (pad, parent, event);
+ break;
+ }
+
+ return res;
+ }
+
+ static gboolean
+ gst_matroska_demux_sink_activate (GstPad * sinkpad, GstObject * parent)
+ {
+ GstMatroskaDemux *demux = GST_MATROSKA_DEMUX (parent);
+ GstQuery *query;
+ gboolean pull_mode = FALSE;
+
+ query = gst_query_new_scheduling ();
+
+ if (gst_pad_peer_query (sinkpad, query))
+ pull_mode = gst_query_has_scheduling_mode_with_flags (query,
+ GST_PAD_MODE_PULL, GST_SCHEDULING_FLAG_SEEKABLE);
+
+ gst_query_unref (query);
+
+ if (pull_mode) {
+ GST_DEBUG ("going to pull mode");
+ demux->streaming = FALSE;
+ return gst_pad_activate_mode (sinkpad, GST_PAD_MODE_PULL, TRUE);
+ } else {
+ GST_DEBUG ("going to push (streaming) mode");
+ demux->streaming = TRUE;
+ return gst_pad_activate_mode (sinkpad, GST_PAD_MODE_PUSH, TRUE);
+ }
+ }
+
+ static gboolean
+ gst_matroska_demux_sink_activate_mode (GstPad * sinkpad, GstObject * parent,
+ GstPadMode mode, gboolean active)
+ {
+ switch (mode) {
+ case GST_PAD_MODE_PULL:
+ if (active) {
+ /* if we have a scheduler we can start the task */
+ gst_pad_start_task (sinkpad, (GstTaskFunction) gst_matroska_demux_loop,
+ sinkpad, NULL);
+ } else {
+ gst_pad_stop_task (sinkpad);
+ }
+ return TRUE;
+ case GST_PAD_MODE_PUSH:
+ return TRUE;
+ default:
+ return FALSE;
+ }
+ }
+
+ static GstCaps *
+ gst_matroska_demux_video_caps (GstMatroskaTrackVideoContext *
+ videocontext, const gchar * codec_id, guint8 * data, guint size,
+ gchar ** codec_name, guint32 * riff_fourcc)
+ {
+ GstMatroskaTrackContext *context = (GstMatroskaTrackContext *) videocontext;
+ GstCaps *caps = NULL;
+
+ g_assert (videocontext != NULL);
+ g_assert (codec_name != NULL);
+
+ if (riff_fourcc)
+ *riff_fourcc = 0;
+
+ /* TODO: check if we have all codec types from matroska-ids.h
+ * check if we have to do more special things with codec_private
+ *
+ * Add support for
+ * GST_MATROSKA_CODEC_ID_VIDEO_QUICKTIME
+ * GST_MATROSKA_CODEC_ID_VIDEO_SNOW
+ */
+
+ if (!strcmp (codec_id, GST_MATROSKA_CODEC_ID_VIDEO_VFW_FOURCC)) {
+ gst_riff_strf_vids *vids = NULL;
+
+ if (data) {
+ GstBuffer *buf = NULL;
+
+ vids = (gst_riff_strf_vids *) data;
+
+ /* assure size is big enough */
+ if (size < 24) {
+ GST_WARNING ("Too small BITMAPINFOHEADER (%d bytes)", size);
+ return NULL;
+ }
+ if (size < sizeof (gst_riff_strf_vids)) {
+ vids = g_new (gst_riff_strf_vids, 1);
+ memcpy (vids, data, size);
+ }
+
+ context->dts_only = TRUE; /* VFW files only store DTS */
+
+ /* little-endian -> byte-order */
+ vids->size = GUINT32_FROM_LE (vids->size);
+ vids->width = GUINT32_FROM_LE (vids->width);
+ vids->height = GUINT32_FROM_LE (vids->height);
+ vids->planes = GUINT16_FROM_LE (vids->planes);
+ vids->bit_cnt = GUINT16_FROM_LE (vids->bit_cnt);
+ vids->compression = GUINT32_FROM_LE (vids->compression);
+ vids->image_size = GUINT32_FROM_LE (vids->image_size);
+ vids->xpels_meter = GUINT32_FROM_LE (vids->xpels_meter);
+ vids->ypels_meter = GUINT32_FROM_LE (vids->ypels_meter);
+ vids->num_colors = GUINT32_FROM_LE (vids->num_colors);
+ vids->imp_colors = GUINT32_FROM_LE (vids->imp_colors);
+
+ if (size > sizeof (gst_riff_strf_vids)) { /* some extra_data */
+ gsize offset = sizeof (gst_riff_strf_vids);
+
+ buf = gst_buffer_new_memdup ((guint8 *) vids + offset, size - offset);
+ }
+
+ if (riff_fourcc)
+ *riff_fourcc = vids->compression;
+
+ caps = gst_riff_create_video_caps (vids->compression, NULL, vids,
+ buf, NULL, codec_name);
+
+ if (caps == NULL) {
+ GST_WARNING ("Unhandled RIFF fourcc %" GST_FOURCC_FORMAT,
+ GST_FOURCC_ARGS (vids->compression));
+ } else {
+ static GstStaticCaps intra_caps = GST_STATIC_CAPS ("image/jpeg; "
+ "video/x-raw; image/png; video/x-dv; video/x-huffyuv; video/x-ffv; "
+ "video/x-compressed-yuv");
++ GstCaps *tmp = gst_static_caps_get (&intra_caps);
++
+ context->intra_only =
++ gst_caps_can_intersect (tmp, caps);
++ gst_caps_unref(tmp);
+ }
+
+ if (buf)
+ gst_buffer_unref (buf);
+
+ if (vids != (gst_riff_strf_vids *) data)
+ g_free (vids);
+ }
+ } else if (!strcmp (codec_id, GST_MATROSKA_CODEC_ID_VIDEO_UNCOMPRESSED)) {
+ GstVideoInfo info;
+ GstVideoFormat format;
+
+ gst_video_info_init (&info);
+ switch (videocontext->fourcc) {
+ case GST_MAKE_FOURCC ('I', '4', '2', '0'):
+ format = GST_VIDEO_FORMAT_I420;
+ break;
+ case GST_MAKE_FOURCC ('Y', 'U', 'Y', '2'):
+ format = GST_VIDEO_FORMAT_YUY2;
+ break;
+ case GST_MAKE_FOURCC ('Y', 'V', '1', '2'):
+ format = GST_VIDEO_FORMAT_YV12;
+ break;
+ case GST_MAKE_FOURCC ('U', 'Y', 'V', 'Y'):
+ format = GST_VIDEO_FORMAT_UYVY;
+ break;
+ case GST_MAKE_FOURCC ('A', 'Y', 'U', 'V'):
+ format = GST_VIDEO_FORMAT_AYUV;
+ break;
+ case GST_MAKE_FOURCC ('Y', '8', '0', '0'):
+ case GST_MAKE_FOURCC ('Y', '8', ' ', ' '):
+ format = GST_VIDEO_FORMAT_GRAY8;
+ break;
+ case GST_MAKE_FOURCC ('R', 'G', 'B', 24):
+ format = GST_VIDEO_FORMAT_RGB;
+ break;
+ case GST_MAKE_FOURCC ('B', 'G', 'R', 24):
+ format = GST_VIDEO_FORMAT_BGR;
+ break;
+ default:
+ GST_DEBUG ("Unknown fourcc %" GST_FOURCC_FORMAT,
+ GST_FOURCC_ARGS (videocontext->fourcc));
+ return NULL;
+ }
+
+ context->intra_only = TRUE;
+
+ gst_video_info_set_format (&info, format, videocontext->pixel_width,
+ videocontext->pixel_height);
+ caps = gst_video_info_to_caps (&info);
+ *codec_name = gst_pb_utils_get_codec_description (caps);
+ context->alignment = 32;
+ } else if (!strcmp (codec_id, GST_MATROSKA_CODEC_ID_VIDEO_MPEG4_SP)) {
+ caps = gst_caps_new_simple ("video/x-divx",
+ "divxversion", G_TYPE_INT, 4, NULL);
+ *codec_name = g_strdup ("MPEG-4 simple profile");
+ } else if (!strcmp (codec_id, GST_MATROSKA_CODEC_ID_VIDEO_MPEG4_ASP) ||
+ !strcmp (codec_id, GST_MATROSKA_CODEC_ID_VIDEO_MPEG4_AP)) {
+ caps = gst_caps_new_simple ("video/mpeg",
+ "mpegversion", G_TYPE_INT, 4,
+ "systemstream", G_TYPE_BOOLEAN, FALSE, NULL);
+ if (data) {
+ GstBuffer *priv;
+
+ priv = gst_buffer_new_memdup (data, size);
+ gst_caps_set_simple (caps, "codec_data", GST_TYPE_BUFFER, priv, NULL);
+ gst_buffer_unref (priv);
+
+ gst_codec_utils_mpeg4video_caps_set_level_and_profile (caps, data, size);
+ }
+ if (!strcmp (codec_id, GST_MATROSKA_CODEC_ID_VIDEO_MPEG4_ASP))
+ *codec_name = g_strdup ("MPEG-4 advanced simple profile");
+ else
+ *codec_name = g_strdup ("MPEG-4 advanced profile");
+ } else if (!strcmp (codec_id, GST_MATROSKA_CODEC_ID_VIDEO_MSMPEG4V3)) {
+ #if 0
+ caps = gst_caps_new_full (gst_structure_new ("video/x-divx",
+ "divxversion", G_TYPE_INT, 3, NULL),
+ gst_structure_new ("video/x-msmpeg",
+ "msmpegversion", G_TYPE_INT, 43, NULL), NULL);
+ #endif
+ caps = gst_caps_new_simple ("video/x-msmpeg",
+ "msmpegversion", G_TYPE_INT, 43, NULL);
+ *codec_name = g_strdup ("Microsoft MPEG-4 v.3");
+ } else if (!strcmp (codec_id, GST_MATROSKA_CODEC_ID_VIDEO_MPEG1) ||
+ !strcmp (codec_id, GST_MATROSKA_CODEC_ID_VIDEO_MPEG2)) {
+ gint mpegversion;
+
+ if (!strcmp (codec_id, GST_MATROSKA_CODEC_ID_VIDEO_MPEG1))
+ mpegversion = 1;
+ else
+ mpegversion = 2;
+
+ caps = gst_caps_new_simple ("video/mpeg",
+ "systemstream", G_TYPE_BOOLEAN, FALSE,
+ "mpegversion", G_TYPE_INT, mpegversion, NULL);
+ *codec_name = g_strdup_printf ("MPEG-%d video", mpegversion);
+ context->postprocess_frame = gst_matroska_demux_add_mpeg_seq_header;
+ } else if (!strcmp (codec_id, GST_MATROSKA_CODEC_ID_VIDEO_MJPEG)) {
+ caps = gst_caps_new_empty_simple ("image/jpeg");
+ *codec_name = g_strdup ("Motion-JPEG");
+ context->intra_only = TRUE;
+ } else if (!strcmp (codec_id, GST_MATROSKA_CODEC_ID_VIDEO_MPEG4_AVC)) {
+ caps = gst_caps_new_empty_simple ("video/x-h264");
+ if (data) {
+ GstBuffer *priv;
+
+ /* First byte is the version, second is the profile indication, and third
+ * is the 5 contraint_set_flags and 3 reserved bits. Fourth byte is the
+ * level indication. */
+ gst_codec_utils_h264_caps_set_level_and_profile (caps, data + 1,
+ size - 1);
+
+ priv = gst_buffer_new_memdup (data, size);
+ gst_caps_set_simple (caps, "codec_data", GST_TYPE_BUFFER, priv, NULL);
+ gst_buffer_unref (priv);
+
+ gst_caps_set_simple (caps, "stream-format", G_TYPE_STRING, "avc",
+ "alignment", G_TYPE_STRING, "au", NULL);
+ } else {
+ GST_WARNING ("No codec data found, assuming output is byte-stream");
+ gst_caps_set_simple (caps, "stream-format", G_TYPE_STRING, "byte-stream",
+ NULL);
+ }
+ *codec_name = g_strdup ("H264");
+ } else if (!strcmp (codec_id, GST_MATROSKA_CODEC_ID_VIDEO_MPEGH_HEVC)) {
+ caps = gst_caps_new_empty_simple ("video/x-h265");
+ if (data) {
+ GstBuffer *priv;
+
+ gst_codec_utils_h265_caps_set_level_tier_and_profile (caps, data + 1,
+ size - 1);
+
+ priv = gst_buffer_new_memdup (data, size);
+ gst_caps_set_simple (caps, "codec_data", GST_TYPE_BUFFER, priv, NULL);
+ gst_buffer_unref (priv);
+
+ gst_caps_set_simple (caps, "stream-format", G_TYPE_STRING, "hvc1",
+ "alignment", G_TYPE_STRING, "au", NULL);
+ } else {
+ GST_WARNING ("No codec data found, assuming output is byte-stream");
+ gst_caps_set_simple (caps, "stream-format", G_TYPE_STRING, "byte-stream",
+ NULL);
+ }
+ *codec_name = g_strdup ("HEVC");
+ } else if ((!strcmp (codec_id, GST_MATROSKA_CODEC_ID_VIDEO_REALVIDEO1)) ||
+ (!strcmp (codec_id, GST_MATROSKA_CODEC_ID_VIDEO_REALVIDEO2)) ||
+ (!strcmp (codec_id, GST_MATROSKA_CODEC_ID_VIDEO_REALVIDEO3)) ||
+ (!strcmp (codec_id, GST_MATROSKA_CODEC_ID_VIDEO_REALVIDEO4))) {
+ gint rmversion = -1;
+
+ if (!strcmp (codec_id, GST_MATROSKA_CODEC_ID_VIDEO_REALVIDEO1))
+ rmversion = 1;
+ else if (!strcmp (codec_id, GST_MATROSKA_CODEC_ID_VIDEO_REALVIDEO2))
+ rmversion = 2;
+ else if (!strcmp (codec_id, GST_MATROSKA_CODEC_ID_VIDEO_REALVIDEO3))
+ rmversion = 3;
+ else if (!strcmp (codec_id, GST_MATROSKA_CODEC_ID_VIDEO_REALVIDEO4))
+ rmversion = 4;
+
+ caps = gst_caps_new_simple ("video/x-pn-realvideo",
+ "rmversion", G_TYPE_INT, rmversion, NULL);
+ GST_DEBUG ("data:%p, size:0x%x", data, size);
+ /* We need to extract the extradata ! */
+ if (data && (size >= 0x22)) {
+ GstBuffer *priv;
+ guint rformat;
+ guint subformat;
+
+ subformat = GST_READ_UINT32_BE (data + 0x1a);
+ rformat = GST_READ_UINT32_BE (data + 0x1e);
+
+ priv = gst_buffer_new_memdup (data + 0x1a, size - 0x1a);
+ gst_caps_set_simple (caps, "codec_data", GST_TYPE_BUFFER, priv, "format",
+ G_TYPE_INT, rformat, "subformat", G_TYPE_INT, subformat, NULL);
+ gst_buffer_unref (priv);
+
+ }
+ *codec_name = g_strdup_printf ("RealVideo %d.0", rmversion);
+ } else if (!strcmp (codec_id, GST_MATROSKA_CODEC_ID_VIDEO_THEORA)) {
+ caps = gst_caps_new_empty_simple ("video/x-theora");
+ context->stream_headers =
+ gst_matroska_parse_xiph_stream_headers (context->codec_priv,
+ context->codec_priv_size);
+ /* FIXME: mark stream as broken and skip if there are no stream headers */
+ context->send_stream_headers = TRUE;
+ } else if (!strcmp (codec_id, GST_MATROSKA_CODEC_ID_VIDEO_DIRAC)) {
+ caps = gst_caps_new_empty_simple ("video/x-dirac");
+ *codec_name = g_strdup_printf ("Dirac");
+ } else if (!strcmp (codec_id, GST_MATROSKA_CODEC_ID_VIDEO_VP8)) {
+ caps = gst_caps_new_empty_simple ("video/x-vp8");
+ if (videocontext->alpha_mode)
+ gst_caps_set_simple (caps, "codec-alpha", G_TYPE_BOOLEAN, TRUE, NULL);
+ *codec_name = g_strdup_printf ("On2 VP8");
+ } else if (!strcmp (codec_id, GST_MATROSKA_CODEC_ID_VIDEO_VP9)) {
+ caps = gst_caps_new_empty_simple ("video/x-vp9");
+ if (videocontext->alpha_mode)
+ gst_caps_set_simple (caps, "codec-alpha", G_TYPE_BOOLEAN, TRUE, NULL);
+ *codec_name = g_strdup_printf ("On2 VP9");
+ } else if (!strcmp (codec_id, GST_MATROSKA_CODEC_ID_VIDEO_AV1)) {
+ caps = gst_caps_new_empty_simple ("video/x-av1");
+ if (data) {
+ GstBuffer *priv;
+
+ priv = gst_buffer_new_memdup (data, size);
+ gst_caps_set_simple (caps, "codec_data", GST_TYPE_BUFFER, priv, NULL);
+ gst_buffer_unref (priv);
+ } else {
+ GST_WARNING ("No AV1 codec data found!");
+ }
+ *codec_name = g_strdup_printf ("AOM AV1");
+ } else if (!strcmp (codec_id, GST_MATROSKA_CODEC_ID_VIDEO_FFV1)) {
+ caps =
+ gst_caps_new_simple ("video/x-ffv", "ffvversion", G_TYPE_INT, 1, NULL);
+ if (data) {
+ GstBuffer *priv;
+
+ priv = gst_buffer_new_memdup (data, size);
+ gst_caps_set_simple (caps, "codec_data", GST_TYPE_BUFFER, priv, NULL);
+ gst_buffer_unref (priv);
+ } else {
+ GST_WARNING ("No FFV1 codec data found!");
+ }
+ *codec_name = g_strdup_printf ("FFMpeg v1");
+ } else if (!strcmp (codec_id, GST_MATROSKA_CODEC_ID_VIDEO_PRORES)) {
+ guint32 fourcc;
+ const gchar *variant, *variant_descr = "";
+
+ /* Expect a fourcc in the codec private data */
+ if (!data || size < 4) {
+ GST_WARNING ("No or too small PRORESS fourcc (%d bytes)", size);
+ return NULL;
+ }
+
+ fourcc = GST_STR_FOURCC (data);
+ switch (fourcc) {
+ case GST_MAKE_FOURCC ('a', 'p', 'c', 's'):
+ variant_descr = " 4:2:2 LT";
+ variant = "lt";
+ break;
+ case GST_MAKE_FOURCC ('a', 'p', 'c', 'h'):
+ variant = "hq";
+ variant_descr = " 4:2:2 HQ";
+ break;
+ case GST_MAKE_FOURCC ('a', 'p', '4', 'h'):
+ variant = "4444";
+ variant_descr = " 4:4:4:4";
+ break;
+ case GST_MAKE_FOURCC ('a', 'p', 'c', 'o'):
+ variant = "proxy";
+ variant_descr = " 4:2:2 Proxy";
+ break;
+ case GST_MAKE_FOURCC ('a', 'p', 'c', 'n'):
+ default:
+ variant = "standard";
+ variant_descr = " 4:2:2 SD";
+ break;
+ }
+
+ GST_LOG ("Prores video, codec fourcc %" GST_FOURCC_FORMAT,
+ GST_FOURCC_ARGS (fourcc));
+
+ caps = gst_caps_new_simple ("video/x-prores",
+ "format", G_TYPE_STRING, variant, NULL);
+ *codec_name = g_strdup_printf ("Apple ProRes%s", variant_descr);
+ context->postprocess_frame = gst_matroska_demux_add_prores_header;
+ } else {
+ GST_WARNING ("Unknown codec '%s', cannot build Caps", codec_id);
+ return NULL;
+ }
+
+ if (caps != NULL) {
+ int i;
+ GstStructure *structure;
+
+ for (i = 0; i < gst_caps_get_size (caps); i++) {
+ structure = gst_caps_get_structure (caps, i);
+
+ /* FIXME: use the real unit here! */
+ GST_DEBUG ("video size %dx%d, target display size %dx%d (any unit)",
+ videocontext->pixel_width,
+ videocontext->pixel_height,
+ videocontext->display_width, videocontext->display_height);
+
+ /* pixel width and height are the w and h of the video in pixels */
+ if (videocontext->pixel_width > 0 && videocontext->pixel_height > 0) {
+ gint w = videocontext->pixel_width;
+ gint h = videocontext->pixel_height;
+
+ gst_structure_set (structure,
+ "width", G_TYPE_INT, w, "height", G_TYPE_INT, h, NULL);
+ }
+
+ if (videocontext->display_width > 0 || videocontext->display_height > 0) {
+ int n, d;
+
+ if (videocontext->display_width <= 0)
+ videocontext->display_width = videocontext->pixel_width;
+ if (videocontext->display_height <= 0)
+ videocontext->display_height = videocontext->pixel_height;
+
+ /* calculate the pixel aspect ratio using the display and pixel w/h */
+ n = videocontext->display_width * videocontext->pixel_height;
+ d = videocontext->display_height * videocontext->pixel_width;
+ GST_DEBUG ("setting PAR to %d/%d", n, d);
+ gst_structure_set (structure, "pixel-aspect-ratio",
+ GST_TYPE_FRACTION,
+ videocontext->display_width * videocontext->pixel_height,
+ videocontext->display_height * videocontext->pixel_width, NULL);
+ }
+
+ if (videocontext->default_fps > 0.0) {
+ gint fps_n, fps_d;
+
+ gst_util_double_to_fraction (videocontext->default_fps, &fps_n, &fps_d);
+
+ GST_DEBUG ("using default fps %d/%d", fps_n, fps_d);
+
+ gst_structure_set (structure, "framerate", GST_TYPE_FRACTION, fps_n,
+ fps_d, NULL);
+ } else if (context->default_duration > 0) {
+ int fps_n, fps_d;
+
+ gst_video_guess_framerate (context->default_duration, &fps_n, &fps_d);
+
+ GST_INFO ("using default duration %" G_GUINT64_FORMAT
+ " framerate %d/%d", context->default_duration, fps_n, fps_d);
+
+ gst_structure_set (structure, "framerate", GST_TYPE_FRACTION,
+ fps_n, fps_d, NULL);
+ } else {
+ gst_structure_set (structure, "framerate", GST_TYPE_FRACTION,
+ 0, 1, NULL);
+ }
+
+ switch (videocontext->interlace_mode) {
+ case GST_MATROSKA_INTERLACE_MODE_PROGRESSIVE:
+ gst_structure_set (structure,
+ "interlace-mode", G_TYPE_STRING, "progressive", NULL);
+ break;
+ case GST_MATROSKA_INTERLACE_MODE_INTERLACED:
+ gst_structure_set (structure,
+ "interlace-mode", G_TYPE_STRING, "interleaved", NULL);
+
+ if (videocontext->field_order != GST_VIDEO_FIELD_ORDER_UNKNOWN)
+ gst_structure_set (structure, "field-order", G_TYPE_STRING,
+ gst_video_field_order_to_string (videocontext->field_order),
+ NULL);
+ break;
+ default:
+ break;
+ }
+ }
+ if (videocontext->multiview_mode != GST_VIDEO_MULTIVIEW_MODE_NONE) {
+ if (gst_video_multiview_guess_half_aspect (videocontext->multiview_mode,
+ videocontext->pixel_width, videocontext->pixel_height,
+ videocontext->display_width * videocontext->pixel_height,
+ videocontext->display_height * videocontext->pixel_width)) {
+ videocontext->multiview_flags |= GST_VIDEO_MULTIVIEW_FLAGS_HALF_ASPECT;
+ }
+ gst_caps_set_simple (caps,
+ "multiview-mode", G_TYPE_STRING,
+ gst_video_multiview_mode_to_caps_string
+ (videocontext->multiview_mode), "multiview-flags",
+ GST_TYPE_VIDEO_MULTIVIEW_FLAGSET, videocontext->multiview_flags,
+ GST_FLAG_SET_MASK_EXACT, NULL);
+ }
+
+ if (videocontext->colorimetry.range != GST_VIDEO_COLOR_RANGE_UNKNOWN ||
+ videocontext->colorimetry.matrix != GST_VIDEO_COLOR_MATRIX_UNKNOWN ||
+ videocontext->colorimetry.transfer != GST_VIDEO_TRANSFER_UNKNOWN ||
+ videocontext->colorimetry.primaries !=
+ GST_VIDEO_COLOR_PRIMARIES_UNKNOWN) {
+ gchar *colorimetry =
+ gst_video_colorimetry_to_string (&videocontext->colorimetry);
+ gst_caps_set_simple (caps, "colorimetry", G_TYPE_STRING, colorimetry,
+ NULL);
+ GST_DEBUG ("setting colorimetry to %s", colorimetry);
+ g_free (colorimetry);
+ }
+
+ if (videocontext->mastering_display_info_present) {
+ if (!gst_video_mastering_display_info_add_to_caps
+ (&videocontext->mastering_display_info, caps)) {
+ GST_WARNING ("couldn't set mastering display info to caps");
+ }
+ }
+
+ if (videocontext->content_light_level.max_content_light_level &&
+ videocontext->content_light_level.max_frame_average_light_level) {
+ if (!gst_video_content_light_level_add_to_caps
+ (&videocontext->content_light_level, caps)) {
+ GST_WARNING ("couldn't set content light level to caps");
+ }
+ }
+
+ caps = gst_caps_simplify (caps);
+ }
+
+ return caps;
+ }
+
+ /*
+ * Some AAC specific code... *sigh*
+ * FIXME: maybe we should use '15' and code the sample rate explicitly
+ * if the sample rate doesn't match the predefined rates exactly? (tpm)
+ */
+
+ static gint
+ aac_rate_idx (gint rate)
+ {
+ if (92017 <= rate)
+ return 0;
+ else if (75132 <= rate)
+ return 1;
+ else if (55426 <= rate)
+ return 2;
+ else if (46009 <= rate)
+ return 3;
+ else if (37566 <= rate)
+ return 4;
+ else if (27713 <= rate)
+ return 5;
+ else if (23004 <= rate)
+ return 6;
+ else if (18783 <= rate)
+ return 7;
+ else if (13856 <= rate)
+ return 8;
+ else if (11502 <= rate)
+ return 9;
+ else if (9391 <= rate)
+ return 10;
+ else
+ return 11;
+ }
+
+ static gint
+ aac_profile_idx (const gchar * codec_id)
+ {
+ gint profile;
+
+ if (strlen (codec_id) <= 12)
+ profile = 3;
+ else if (!strncmp (&codec_id[12], "MAIN", 4))
+ profile = 0;
+ else if (!strncmp (&codec_id[12], "LC", 2))
+ profile = 1;
+ else if (!strncmp (&codec_id[12], "SSR", 3))
+ profile = 2;
+ else
+ profile = 3;
+
+ return profile;
+ }
+
+ static guint
+ round_up_pow2 (guint n)
+ {
+ n = n - 1;
+ n = n | (n >> 1);
+ n = n | (n >> 2);
+ n = n | (n >> 4);
+ n = n | (n >> 8);
+ n = n | (n >> 16);
+ return n + 1;
+ }
+
+ #define AAC_SYNC_EXTENSION_TYPE 0x02b7
+
+ static GstCaps *
+ gst_matroska_demux_audio_caps (GstMatroskaTrackAudioContext *
+ audiocontext, const gchar * codec_id, guint8 * data, guint size,
+ gchar ** codec_name, guint16 * riff_audio_fmt, GstClockTime * lead_in_ts)
+ {
+ GstMatroskaTrackContext *context = (GstMatroskaTrackContext *) audiocontext;
+ GstCaps *caps = NULL;
+ guint lead_in = 0;
+ /* Max potential blocksize causing the longest possible lead_in_ts need, as
+ * we don't have the exact number parsed out here */
+ guint max_blocksize = 0;
+ /* Original samplerate before SBR multiplications, as parsers would use */
+ guint rate = audiocontext->samplerate;
+
+ g_assert (audiocontext != NULL);
+ g_assert (codec_name != NULL);
+
+ if (riff_audio_fmt)
+ *riff_audio_fmt = 0;
+
+ /* TODO: check if we have all codec types from matroska-ids.h
+ * check if we have to do more special things with codec_private
+ * check if we need bitdepth in different places too
+ * implement channel position magic
+ * Add support for:
+ * GST_MATROSKA_CODEC_ID_AUDIO_AC3_BSID9
+ * GST_MATROSKA_CODEC_ID_AUDIO_AC3_BSID10
+ * GST_MATROSKA_CODEC_ID_AUDIO_QUICKTIME_QDMC
+ * GST_MATROSKA_CODEC_ID_AUDIO_QUICKTIME_QDM2
+ */
+
+ if (!strcmp (codec_id, GST_MATROSKA_CODEC_ID_AUDIO_MPEG1_L1) ||
+ !strcmp (codec_id, GST_MATROSKA_CODEC_ID_AUDIO_MPEG1_L2) ||
+ !strcmp (codec_id, GST_MATROSKA_CODEC_ID_AUDIO_MPEG1_L3)) {
+ gint layer;
+
+ if (!strcmp (codec_id, GST_MATROSKA_CODEC_ID_AUDIO_MPEG1_L1))
+ layer = 1;
+ else if (!strcmp (codec_id, GST_MATROSKA_CODEC_ID_AUDIO_MPEG1_L2))
+ layer = 2;
+ else
+ layer = 3;
+
+ lead_in = 30; /* Could mp2 need as much too? */
+ max_blocksize = 1152;
+ caps = gst_caps_new_simple ("audio/mpeg",
+ "mpegversion", G_TYPE_INT, 1, "layer", G_TYPE_INT, layer, NULL);
+ *codec_name = g_strdup_printf ("MPEG-1 layer %d", layer);
+ } else if (!strcmp (codec_id, GST_MATROSKA_CODEC_ID_AUDIO_PCM_INT_BE) ||
+ !strcmp (codec_id, GST_MATROSKA_CODEC_ID_AUDIO_PCM_INT_LE)) {
+ gboolean sign;
+ gint endianness;
+ GstAudioFormat format;
+
+ sign = (audiocontext->bitdepth != 8);
+ if (!strcmp (codec_id, GST_MATROSKA_CODEC_ID_AUDIO_PCM_INT_BE))
+ endianness = G_BIG_ENDIAN;
+ else
+ endianness = G_LITTLE_ENDIAN;
+
+ format = gst_audio_format_build_integer (sign, endianness,
+ audiocontext->bitdepth, audiocontext->bitdepth);
+
+ /* FIXME: Channel mask and reordering */
+ caps = gst_caps_new_simple ("audio/x-raw",
+ "format", G_TYPE_STRING, gst_audio_format_to_string (format),
+ "layout", G_TYPE_STRING, "interleaved",
+ "channel-mask", GST_TYPE_BITMASK,
+ gst_audio_channel_get_fallback_mask (audiocontext->channels), NULL);
+
+ *codec_name = g_strdup_printf ("Raw %d-bit PCM audio",
+ audiocontext->bitdepth);
+ context->alignment = GST_ROUND_UP_8 (audiocontext->bitdepth) / 8;
+ context->alignment = round_up_pow2 (context->alignment);
+ } else if (!strcmp (codec_id, GST_MATROSKA_CODEC_ID_AUDIO_PCM_FLOAT)) {
+ const gchar *format;
+ if (audiocontext->bitdepth == 32)
+ format = "F32LE";
+ else
+ format = "F64LE";
+ /* FIXME: Channel mask and reordering */
+ caps = gst_caps_new_simple ("audio/x-raw",
+ "format", G_TYPE_STRING, format,
+ "layout", G_TYPE_STRING, "interleaved",
+ "channel-mask", GST_TYPE_BITMASK,
+ gst_audio_channel_get_fallback_mask (audiocontext->channels), NULL);
+ *codec_name = g_strdup_printf ("Raw %d-bit floating-point audio",
+ audiocontext->bitdepth);
+ context->alignment = audiocontext->bitdepth / 8;
+ context->alignment = round_up_pow2 (context->alignment);
+ } else if (!strncmp (codec_id, GST_MATROSKA_CODEC_ID_AUDIO_AC3,
+ strlen (GST_MATROSKA_CODEC_ID_AUDIO_AC3))) {
+ lead_in = 2;
+ max_blocksize = 1536;
+ caps = gst_caps_new_simple ("audio/x-ac3",
+ "framed", G_TYPE_BOOLEAN, TRUE, NULL);
+ *codec_name = g_strdup ("AC-3 audio");
+ } else if (!strncmp (codec_id, GST_MATROSKA_CODEC_ID_AUDIO_EAC3,
+ strlen (GST_MATROSKA_CODEC_ID_AUDIO_EAC3))) {
+ lead_in = 2;
+ max_blocksize = 1536;
+ caps = gst_caps_new_simple ("audio/x-eac3",
+ "framed", G_TYPE_BOOLEAN, TRUE, NULL);
+ *codec_name = g_strdup ("E-AC-3 audio");
+ } else if (!strncmp (codec_id, GST_MATROSKA_CODEC_ID_AUDIO_TRUEHD,
+ strlen (GST_MATROSKA_CODEC_ID_AUDIO_TRUEHD))) {
+ caps = gst_caps_new_empty_simple ("audio/x-true-hd");
+ *codec_name = g_strdup ("Dolby TrueHD");
+ } else if (!strcmp (codec_id, GST_MATROSKA_CODEC_ID_AUDIO_DTS)) {
+ caps = gst_caps_new_empty_simple ("audio/x-dts");
+ *codec_name = g_strdup ("DTS audio");
+ } else if (!strcmp (codec_id, GST_MATROSKA_CODEC_ID_AUDIO_VORBIS)) {
+ caps = gst_caps_new_empty_simple ("audio/x-vorbis");
+ context->stream_headers =
+ gst_matroska_parse_xiph_stream_headers (context->codec_priv,
+ context->codec_priv_size);
+ /* FIXME: mark stream as broken and skip if there are no stream headers */
+ context->send_stream_headers = TRUE;
+ } else if (!strcmp (codec_id, GST_MATROSKA_CODEC_ID_AUDIO_FLAC)) {
+ caps = gst_caps_new_empty_simple ("audio/x-flac");
+ context->stream_headers =
+ gst_matroska_parse_flac_stream_headers (context->codec_priv,
+ context->codec_priv_size);
+ /* FIXME: mark stream as broken and skip if there are no stream headers */
+ context->send_stream_headers = TRUE;
+ } else if (!strcmp (codec_id, GST_MATROSKA_CODEC_ID_AUDIO_SPEEX)) {
+ caps = gst_caps_new_empty_simple ("audio/x-speex");
+ context->stream_headers =
+ gst_matroska_parse_speex_stream_headers (context->codec_priv,
+ context->codec_priv_size);
+ /* FIXME: mark stream as broken and skip if there are no stream headers */
+ context->send_stream_headers = TRUE;
+ } else if (!strcmp (codec_id, GST_MATROSKA_CODEC_ID_AUDIO_OPUS)) {
+ GstBuffer *tmp;
+
+ if (context->codec_priv_size >= 19) {
+ if (audiocontext->samplerate)
+ GST_WRITE_UINT32_LE ((guint8 *) context->codec_priv + 12,
+ audiocontext->samplerate);
+ if (context->codec_delay) {
+ guint64 delay =
+ gst_util_uint64_scale_round (context->codec_delay, 48000,
+ GST_SECOND);
+ GST_WRITE_UINT16_LE ((guint8 *) context->codec_priv + 10, delay);
+ }
+
+ tmp =
+ gst_buffer_new_memdup (context->codec_priv, context->codec_priv_size);
+ caps = gst_codec_utils_opus_create_caps_from_header (tmp, NULL);
+ gst_buffer_unref (tmp);
+ *codec_name = g_strdup ("Opus");
+ } else if (context->codec_priv_size == 0) {
+ GST_WARNING ("No Opus codec data found, trying to create one");
+ if (audiocontext->channels <= 2) {
+ guint8 streams, coupled, channels;
+ guint32 samplerate;
+
+ samplerate =
+ audiocontext->samplerate == 0 ? 48000 : audiocontext->samplerate;
+ rate = samplerate;
+ channels = audiocontext->channels == 0 ? 2 : audiocontext->channels;
+ if (channels == 1) {
+ streams = 1;
+ coupled = 0;
+ } else {
+ streams = 1;
+ coupled = 1;
+ }
+
+ caps =
+ gst_codec_utils_opus_create_caps (samplerate, channels, 0, streams,
+ coupled, NULL);
+ if (caps) {
+ *codec_name = g_strdup ("Opus");
+ } else {
+ GST_WARNING ("Failed to create Opus caps from audio context");
+ }
+ } else {
+ GST_WARNING ("No Opus codec data, and not enough info to create one");
+ }
+ } else {
+ GST_WARNING ("Invalid Opus codec data size (got %" G_GSIZE_FORMAT
+ ", expected 19)", context->codec_priv_size);
+ }
+ } else if (!strcmp (codec_id, GST_MATROSKA_CODEC_ID_AUDIO_ACM)) {
+ gst_riff_strf_auds auds;
+
+ if (data && size >= 18) {
+ GstBuffer *codec_data = NULL;
+
+ /* little-endian -> byte-order */
+ auds.format = GST_READ_UINT16_LE (data);
+ auds.channels = GST_READ_UINT16_LE (data + 2);
+ auds.rate = GST_READ_UINT32_LE (data + 4);
+ auds.av_bps = GST_READ_UINT32_LE (data + 8);
+ auds.blockalign = GST_READ_UINT16_LE (data + 12);
+ auds.bits_per_sample = GST_READ_UINT16_LE (data + 16);
+
+ /* 18 is the waveformatex size */
+ if (size > 18) {
+ codec_data = gst_buffer_new_wrapped_full (GST_MEMORY_FLAG_READONLY,
+ data + 18, size - 18, 0, size - 18, NULL, NULL);
+ }
+
+ if (riff_audio_fmt)
+ *riff_audio_fmt = auds.format;
+
+ /* FIXME: Handle reorder map */
+ caps = gst_riff_create_audio_caps (auds.format, NULL, &auds, codec_data,
+ NULL, codec_name, NULL);
+ if (codec_data)
+ gst_buffer_unref (codec_data);
+
+ if (caps == NULL) {
+ GST_WARNING ("Unhandled RIFF audio format 0x%02x", auds.format);
+ }
+ } else {
+ GST_WARNING ("Invalid codec data size (%d expected, got %d)", 18, size);
+ }
+ } else if (g_str_has_prefix (codec_id, GST_MATROSKA_CODEC_ID_AUDIO_AAC)) {
+ GstBuffer *priv = NULL;
+ gint mpegversion;
+ gint rate_idx, profile;
+ guint8 *data = NULL;
+
+ /* unspecified AAC profile with opaque private codec data */
+ if (strcmp (codec_id, GST_MATROSKA_CODEC_ID_AUDIO_AAC) == 0) {
+ if (context->codec_priv_size >= 2) {
+ guint obj_type, freq_index, explicit_freq_bytes = 0;
+
+ codec_id = GST_MATROSKA_CODEC_ID_AUDIO_AAC_MPEG4;
+ mpegversion = 4;
+ freq_index = (GST_READ_UINT16_BE (context->codec_priv) & 0x780) >> 7;
+ obj_type = (GST_READ_UINT16_BE (context->codec_priv) & 0xF800) >> 11;
+ if (freq_index == 15)
+ explicit_freq_bytes = 3;
+ GST_DEBUG ("obj_type = %u, freq_index = %u", obj_type, freq_index);
+ priv = gst_buffer_new_memdup (context->codec_priv,
+ context->codec_priv_size);
+ /* assume SBR if samplerate <= 24kHz */
+ if (obj_type == 5 || (freq_index >= 6 && freq_index != 15) ||
+ (context->codec_priv_size == (5 + explicit_freq_bytes))) {
+ /* TODO: Commonly aacparse will reset the rate in caps to
+ * non-multiplied - which one is correct? */
+ audiocontext->samplerate *= 2;
+ }
+ } else {
+ GST_WARNING ("Opaque A_AAC codec ID, but no codec private data");
+ /* this is pretty broken;
+ * maybe we need to make up some default private,
+ * or maybe ADTS data got dumped in.
+ * Let's set up some private data now, and check actual data later */
+ /* just try this and see what happens ... */
+ codec_id = GST_MATROSKA_CODEC_ID_AUDIO_AAC_MPEG4;
+ context->postprocess_frame = gst_matroska_demux_check_aac;
+ }
+ }
+
+ /* make up decoder-specific data if it is not supplied */
+ if (priv == NULL) {
+ GstMapInfo map;
+
+ priv = gst_buffer_new_allocate (NULL, 5, NULL);
+ gst_buffer_map (priv, &map, GST_MAP_WRITE);
+ data = map.data;
+ rate_idx = aac_rate_idx (audiocontext->samplerate);
+ profile = aac_profile_idx (codec_id);
+
+ data[0] = ((profile + 1) << 3) | ((rate_idx & 0xE) >> 1);
+ data[1] = ((rate_idx & 0x1) << 7) | (audiocontext->channels << 3);
+
+ if (!strncmp (codec_id, GST_MATROSKA_CODEC_ID_AUDIO_AAC_MPEG2,
+ strlen (GST_MATROSKA_CODEC_ID_AUDIO_AAC_MPEG2))) {
+ mpegversion = 2;
+ gst_buffer_unmap (priv, &map);
+ gst_buffer_set_size (priv, 2);
+ } else if (!strncmp (codec_id, GST_MATROSKA_CODEC_ID_AUDIO_AAC_MPEG4,
+ strlen (GST_MATROSKA_CODEC_ID_AUDIO_AAC_MPEG4))) {
+ mpegversion = 4;
+
+ if (g_strrstr (codec_id, "SBR")) {
+ /* HE-AAC (aka SBR AAC) */
+ audiocontext->samplerate *= 2;
+ rate_idx = aac_rate_idx (audiocontext->samplerate);
+ data[2] = AAC_SYNC_EXTENSION_TYPE >> 3;
+ data[3] = ((AAC_SYNC_EXTENSION_TYPE & 0x07) << 5) | 5;
+ data[4] = (1 << 7) | (rate_idx << 3);
+ gst_buffer_unmap (priv, &map);
+ } else {
+ gst_buffer_unmap (priv, &map);
+ gst_buffer_set_size (priv, 2);
+ }
+ } else {
+ gst_buffer_unmap (priv, &map);
+ gst_buffer_unref (priv);
+ priv = NULL;
+ GST_ERROR ("Unknown AAC profile and no codec private data");
+ }
+ }
+
+ if (priv) {
+ lead_in = 2;
+ max_blocksize = 1024;
+ caps = gst_caps_new_simple ("audio/mpeg",
+ "mpegversion", G_TYPE_INT, mpegversion,
+ "framed", G_TYPE_BOOLEAN, TRUE,
+ "stream-format", G_TYPE_STRING, "raw", NULL);
+ gst_caps_set_simple (caps, "codec_data", GST_TYPE_BUFFER, priv, NULL);
+ if (context->codec_priv && context->codec_priv_size > 0)
+ gst_codec_utils_aac_caps_set_level_and_profile (caps,
+ context->codec_priv, context->codec_priv_size);
+ *codec_name = g_strdup_printf ("MPEG-%d AAC audio", mpegversion);
+ gst_buffer_unref (priv);
+ }
+ } else if (!strcmp (codec_id, GST_MATROSKA_CODEC_ID_AUDIO_TTA)) {
+ caps = gst_caps_new_simple ("audio/x-tta",
+ "width", G_TYPE_INT, audiocontext->bitdepth, NULL);
+ *codec_name = g_strdup ("TTA audio");
+ } else if (!strcmp (codec_id, GST_MATROSKA_CODEC_ID_AUDIO_WAVPACK4)) {
+ caps = gst_caps_new_simple ("audio/x-wavpack",
+ "width", G_TYPE_INT, audiocontext->bitdepth,
+ "framed", G_TYPE_BOOLEAN, TRUE, NULL);
+ *codec_name = g_strdup ("Wavpack audio");
+ context->postprocess_frame = gst_matroska_demux_add_wvpk_header;
+ audiocontext->wvpk_block_index = 0;
+ } else if ((!strcmp (codec_id, GST_MATROSKA_CODEC_ID_AUDIO_REAL_14_4)) ||
+ (!strcmp (codec_id, GST_MATROSKA_CODEC_ID_AUDIO_REAL_28_8)) ||
+ (!strcmp (codec_id, GST_MATROSKA_CODEC_ID_AUDIO_REAL_COOK))) {
+ gint raversion = -1;
+
+ if (!strcmp (codec_id, GST_MATROSKA_CODEC_ID_AUDIO_REAL_14_4))
+ raversion = 1;
+ else if (!strcmp (codec_id, GST_MATROSKA_CODEC_ID_AUDIO_REAL_COOK))
+ raversion = 8;
+ else
+ raversion = 2;
+
+ caps = gst_caps_new_simple ("audio/x-pn-realaudio",
+ "raversion", G_TYPE_INT, raversion, NULL);
+ /* Extract extra information from caps, mapping varies based on codec */
+ if (data && (size >= 0x50)) {
+ GstBuffer *priv;
+ guint flavor;
+ guint packet_size;
+ guint height;
+ guint leaf_size;
+ guint sample_width;
+ guint extra_data_size;
+
+ GST_DEBUG ("real audio raversion:%d", raversion);
+ if (raversion == 8) {
+ /* COOK */
+ flavor = GST_READ_UINT16_BE (data + 22);
+ packet_size = GST_READ_UINT32_BE (data + 24);
+ height = GST_READ_UINT16_BE (data + 40);
+ leaf_size = GST_READ_UINT16_BE (data + 44);
+ sample_width = GST_READ_UINT16_BE (data + 58);
+ extra_data_size = GST_READ_UINT32_BE (data + 74);
+
+ GST_DEBUG
+ ("flavor:%d, packet_size:%d, height:%d, leaf_size:%d, sample_width:%d, extra_data_size:%d",
+ flavor, packet_size, height, leaf_size, sample_width,
+ extra_data_size);
+ gst_caps_set_simple (caps, "flavor", G_TYPE_INT, flavor, "packet_size",
+ G_TYPE_INT, packet_size, "height", G_TYPE_INT, height, "leaf_size",
+ G_TYPE_INT, leaf_size, "width", G_TYPE_INT, sample_width, NULL);
+
+ if ((size - 78) >= extra_data_size) {
+ priv = gst_buffer_new_memdup (data + 78, extra_data_size);
+ gst_caps_set_simple (caps, "codec_data", GST_TYPE_BUFFER, priv, NULL);
+ gst_buffer_unref (priv);
+ }
+ }
+ }
+
+ *codec_name = g_strdup_printf ("RealAudio %d.0", raversion);
+ } else if (!strcmp (codec_id, GST_MATROSKA_CODEC_ID_AUDIO_REAL_SIPR)) {
+ caps = gst_caps_new_empty_simple ("audio/x-sipro");
+ *codec_name = g_strdup ("Sipro/ACELP.NET Voice Codec");
+ } else if (!strcmp (codec_id, GST_MATROSKA_CODEC_ID_AUDIO_REAL_RALF)) {
+ caps = gst_caps_new_empty_simple ("audio/x-ralf-mpeg4-generic");
+ *codec_name = g_strdup ("Real Audio Lossless");
+ } else if (!strcmp (codec_id, GST_MATROSKA_CODEC_ID_AUDIO_REAL_ATRC)) {
+ caps = gst_caps_new_empty_simple ("audio/x-vnd.sony.atrac3");
+ *codec_name = g_strdup ("Sony ATRAC3");
+ } else {
+ GST_WARNING ("Unknown codec '%s', cannot build Caps", codec_id);
+ return NULL;
+ }
+
+ if (caps != NULL) {
+ if (audiocontext->samplerate > 0 && audiocontext->channels > 0) {
+ gint i;
+
+ for (i = 0; i < gst_caps_get_size (caps); i++) {
+ gst_structure_set (gst_caps_get_structure (caps, i),
+ "channels", G_TYPE_INT, audiocontext->channels,
+ "rate", G_TYPE_INT, audiocontext->samplerate, NULL);
+ }
+ }
+
+ caps = gst_caps_simplify (caps);
+ }
+
+ if (lead_in_ts && lead_in && max_blocksize && rate) {
+ *lead_in_ts =
+ gst_util_uint64_scale (GST_SECOND, max_blocksize * lead_in, rate);
+ }
+
+ return caps;
+ }
+
+ static GstCaps *
+ gst_matroska_demux_subtitle_caps (GstMatroskaTrackSubtitleContext *
+ subtitlecontext, const gchar * codec_id, gpointer data, guint size)
+ {
+ GstCaps *caps = NULL;
+ GstMatroskaTrackContext *context =
+ (GstMatroskaTrackContext *) subtitlecontext;
+
+ /* for backwards compatibility */
+ if (!g_ascii_strcasecmp (codec_id, GST_MATROSKA_CODEC_ID_SUBTITLE_ASCII))
+ codec_id = GST_MATROSKA_CODEC_ID_SUBTITLE_UTF8;
+ else if (!g_ascii_strcasecmp (codec_id, "S_SSA"))
+ codec_id = GST_MATROSKA_CODEC_ID_SUBTITLE_SSA;
+ else if (!g_ascii_strcasecmp (codec_id, "S_ASS"))
+ codec_id = GST_MATROSKA_CODEC_ID_SUBTITLE_ASS;
+ else if (!g_ascii_strcasecmp (codec_id, "S_USF"))
+ codec_id = GST_MATROSKA_CODEC_ID_SUBTITLE_USF;
+
+ /* TODO: Add GST_MATROSKA_CODEC_ID_SUBTITLE_BMP support
+ * Check if we have to do something with codec_private */
+ if (!strcmp (codec_id, GST_MATROSKA_CODEC_ID_SUBTITLE_UTF8)) {
+ /* well, plain text simply does not have a lot of markup ... */
+ caps = gst_caps_new_simple ("text/x-raw", "format", G_TYPE_STRING,
+ "pango-markup", NULL);
+ context->postprocess_frame = gst_matroska_demux_check_subtitle_buffer;
+ subtitlecontext->check_markup = TRUE;
+ } else if (!strcmp (codec_id, GST_MATROSKA_CODEC_ID_SUBTITLE_SSA)) {
+ caps = gst_caps_new_empty_simple ("application/x-ssa");
+ context->postprocess_frame = gst_matroska_demux_check_subtitle_buffer;
+ subtitlecontext->check_markup = FALSE;
+ } else if (!strcmp (codec_id, GST_MATROSKA_CODEC_ID_SUBTITLE_ASS)) {
+ caps = gst_caps_new_empty_simple ("application/x-ass");
+ context->postprocess_frame = gst_matroska_demux_check_subtitle_buffer;
+ subtitlecontext->check_markup = FALSE;
+ } else if (!strcmp (codec_id, GST_MATROSKA_CODEC_ID_SUBTITLE_USF)) {
+ caps = gst_caps_new_empty_simple ("application/x-usf");
+ context->postprocess_frame = gst_matroska_demux_check_subtitle_buffer;
+ subtitlecontext->check_markup = FALSE;
+ } else if (!strcmp (codec_id, GST_MATROSKA_CODEC_ID_SUBTITLE_VOBSUB)) {
+ caps = gst_caps_new_empty_simple ("subpicture/x-dvd");
+ ((GstMatroskaTrackContext *) subtitlecontext)->send_dvd_event = TRUE;
+ } else if (!strcmp (codec_id, GST_MATROSKA_CODEC_ID_SUBTITLE_HDMVPGS)) {
+ caps = gst_caps_new_empty_simple ("subpicture/x-pgs");
+ } else if (!strcmp (codec_id, GST_MATROSKA_CODEC_ID_SUBTITLE_KATE)) {
+ caps = gst_caps_new_empty_simple ("subtitle/x-kate");
+ context->stream_headers =
+ gst_matroska_parse_xiph_stream_headers (context->codec_priv,
+ context->codec_priv_size);
+ /* FIXME: mark stream as broken and skip if there are no stream headers */
+ context->send_stream_headers = TRUE;
+ } else {
+ GST_DEBUG ("Unknown subtitle stream: codec_id='%s'", codec_id);
+ caps = gst_caps_new_empty_simple ("application/x-subtitle-unknown");
+ }
+
+ if (data != NULL && size > 0) {
+ GstBuffer *buf;
+
+ buf = gst_buffer_new_memdup (data, size);
+ gst_caps_set_simple (caps, "codec_data", GST_TYPE_BUFFER, buf, NULL);
+ gst_buffer_unref (buf);
+ }
+
+ return caps;
+ }
+
+ #if 0
+ static void
+ gst_matroska_demux_set_index (GstElement * element, GstIndex * index)
+ {
+ GstMatroskaDemux *demux = GST_MATROSKA_DEMUX (element);
+
+ GST_OBJECT_LOCK (demux);
+ if (demux->common.element_index)
+ gst_object_unref (demux->common.element_index);
+ demux->common.element_index = index ? gst_object_ref (index) : NULL;
+ GST_OBJECT_UNLOCK (demux);
+ GST_DEBUG_OBJECT (demux, "Set index %" GST_PTR_FORMAT,
+ demux->common.element_index);
+ }
+
+ static GstIndex *
+ gst_matroska_demux_get_index (GstElement * element)
+ {
+ GstIndex *result = NULL;
+ GstMatroskaDemux *demux = GST_MATROSKA_DEMUX (element);
+
+ GST_OBJECT_LOCK (demux);
+ if (demux->common.element_index)
+ result = gst_object_ref (demux->common.element_index);
+ GST_OBJECT_UNLOCK (demux);
+
+ GST_DEBUG_OBJECT (demux, "Returning index %" GST_PTR_FORMAT, result);
+
+ return result;
+ }
+ #endif
+
+ static GstStateChangeReturn
+ gst_matroska_demux_change_state (GstElement * element,
+ GstStateChange transition)
+ {
+ GstMatroskaDemux *demux = GST_MATROSKA_DEMUX (element);
+ GstStateChangeReturn ret = GST_STATE_CHANGE_SUCCESS;
+
+ /* handle upwards state changes here */
+ switch (transition) {
+ default:
+ break;
+ }
+
+ ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
+
+ /* handle downwards state changes */
+ switch (transition) {
+ case GST_STATE_CHANGE_PAUSED_TO_READY:
+ gst_matroska_demux_reset (GST_ELEMENT (demux));
+ break;
+ default:
+ break;
+ }
+
+ return ret;
+ }
+
+ static void
+ gst_matroska_demux_set_property (GObject * object,
+ guint prop_id, const GValue * value, GParamSpec * pspec)
+ {
+ GstMatroskaDemux *demux;
+
+ g_return_if_fail (GST_IS_MATROSKA_DEMUX (object));
+ demux = GST_MATROSKA_DEMUX (object);
+
+ switch (prop_id) {
+ case PROP_MAX_GAP_TIME:
+ GST_OBJECT_LOCK (demux);
+ demux->max_gap_time = g_value_get_uint64 (value);
+ GST_OBJECT_UNLOCK (demux);
+ break;
+ case PROP_MAX_BACKTRACK_DISTANCE:
+ GST_OBJECT_LOCK (demux);
+ demux->max_backtrack_distance = g_value_get_uint (value);
+ GST_OBJECT_UNLOCK (demux);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+ }
+
+ static void
+ gst_matroska_demux_get_property (GObject * object,
+ guint prop_id, GValue * value, GParamSpec * pspec)
+ {
+ GstMatroskaDemux *demux;
+
+ g_return_if_fail (GST_IS_MATROSKA_DEMUX (object));
+ demux = GST_MATROSKA_DEMUX (object);
+
+ switch (prop_id) {
+ case PROP_MAX_GAP_TIME:
+ GST_OBJECT_LOCK (demux);
+ g_value_set_uint64 (value, demux->max_gap_time);
+ GST_OBJECT_UNLOCK (demux);
+ break;
+ case PROP_MAX_BACKTRACK_DISTANCE:
+ GST_OBJECT_LOCK (demux);
+ g_value_set_uint (value, demux->max_backtrack_distance);
+ GST_OBJECT_UNLOCK (demux);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+ }
+
+ static const gchar *
+ gst_matroska_track_encryption_algorithm_name (gint val)
+ {
+ GEnumValue *en;
+ GEnumClass *enum_class =
+ g_type_class_ref (MATROSKA_TRACK_ENCRYPTION_ALGORITHM_TYPE);
+ en = g_enum_get_value (G_ENUM_CLASS (enum_class), val);
+ return en ? en->value_nick : NULL;
+ }
+
+ static const gchar *
+ gst_matroska_track_encryption_cipher_mode_name (gint val)
+ {
+ GEnumValue *en;
+ GEnumClass *enum_class =
+ g_type_class_ref (MATROSKA_TRACK_ENCRYPTION_CIPHER_MODE_TYPE);
+ en = g_enum_get_value (G_ENUM_CLASS (enum_class), val);
+ return en ? en->value_nick : NULL;
+ }
+
+ static const gchar *
+ gst_matroska_track_encoding_scope_name (gint val)
+ {
+ GEnumValue *en;
+ GEnumClass *enum_class =
+ g_type_class_ref (MATROSKA_TRACK_ENCODING_SCOPE_TYPE);
+
+ en = g_enum_get_value (G_ENUM_CLASS (enum_class), val);
+ return en ? en->value_nick : NULL;
+ }
--- /dev/null
- *
+ /* GStreamer ReplayGain volume adjustment
+ *
+ * Copyright (C) 2007 Rene Stadler <mail@renestadler.de>
- *
++ *
+ * gstrgvolume.c: Element to apply ReplayGain volume adjustment
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public License
+ * as published by the Free Software Foundation; either version 2.1 of
+ * the License, or (at your option) any later version.
- *
++ *
+ * This library is distributed in the hope that it will be useful, but
+ * WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
++ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with this library; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
+ * 02110-1301 USA
+ */
+
+ /**
+ * SECTION:element-rgvolume
+ * @title: rgvolume
+ * @see_also: #GstRgLimiter, #GstRgAnalysis
+ *
+ * This element applies volume changes to streams as lined out in the proposed
+ * [ReplayGain standard](https://wiki.hydrogenaud.io/index.php?title=ReplayGain).
+ * It interprets the ReplayGain meta data tags and carries out the adjustment
+ * (by using a volume element internally).
+ *
+ * The relevant tags are:
+ * * #GST_TAG_TRACK_GAIN
+ * * #GST_TAG_TRACK_PEAK
+ * * #GST_TAG_ALBUM_GAIN
+ * * #GST_TAG_ALBUM_PEAK
+ * * #GST_TAG_REFERENCE_LEVEL
+ *
+ * The information carried by these tags must have been calculated beforehand by
+ * performing the ReplayGain analysis. This is implemented by the <link
+ * linkend="GstRgAnalysis">rganalysis</link> element.
+ *
+ * The signal compression/limiting recommendations outlined in the proposed
+ * standard are not implemented by this element. This has to be handled by
+ * separate elements because applications might want to have additional filters
+ * between the volume adjustment and the limiting stage. A basic limiter is
+ * included with this plugin: The <link linkend="GstRgLimiter">rglimiter</link>
+ * element applies -6 dB hard limiting as mentioned in the ReplayGain standard.
+ *
+ * ## Example launch line
+ * |[
+ * gst-launch-1.0 filesrc location=filename.ext ! decodebin ! audioconvert \
+ * ! rgvolume ! audioconvert ! audioresample ! alsasink
+ * ]| Playback of a file
+ *
+ */
+
+ #ifdef HAVE_CONFIG_H
+ #include <config.h>
+ #endif
+
+ #include <gst/gst.h>
+ #include <gst/pbutils/pbutils.h>
+ #include <gst/audio/audio.h>
+ #include <math.h>
+
+ #include "gstrgvolume.h"
+ #include "replaygain.h"
+
+ GST_DEBUG_CATEGORY_STATIC (gst_rg_volume_debug);
+ #define GST_CAT_DEFAULT gst_rg_volume_debug
+
+ enum
+ {
+ PROP_0,
++#ifdef TIZEN_FEATURE_RGVOLUME_MODIFICATION
++ PROP_ENABLE_RGVOLUME,
++#endif
+ PROP_ALBUM_MODE,
+ PROP_HEADROOM,
+ PROP_PRE_AMP,
+ PROP_FALLBACK_GAIN,
+ PROP_TARGET_GAIN,
+ PROP_RESULT_GAIN
+ };
+
++#ifdef TIZEN_FEATURE_RGVOLUME_MODIFICATION
++#define DEFAULT_ENABLE_RGVOLUME TRUE
++#endif
+ #define DEFAULT_ALBUM_MODE TRUE
+ #define DEFAULT_HEADROOM 0.0
+ #define DEFAULT_PRE_AMP 0.0
+ #define DEFAULT_FALLBACK_GAIN 0.0
+
+ #define DB_TO_LINEAR(x) pow (10., (x) / 20.)
+ #define LINEAR_TO_DB(x) (20. * log10 (x))
+
+ #define GAIN_FORMAT "+.02f dB"
+ #define PEAK_FORMAT ".06f"
+
+ #define VALID_GAIN(x) ((x) > -60.00 && (x) < 60.00)
+ #define VALID_PEAK(x) ((x) > 0.)
+
+ /* Same template caps as GstVolume, for I don't like having just ANY caps. */
+
+ #define FORMAT "{ "GST_AUDIO_NE(F32)","GST_AUDIO_NE(S16)" }"
+
+ static GstStaticPadTemplate sink_template = GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("audio/x-raw, "
+ "format = (string) " FORMAT ", "
+ "layout = (string) { interleaved, non-interleaved }, "
+ "rate = (int) [ 1, MAX ], " "channels = (int) [ 1, MAX ]"));
+
+ static GstStaticPadTemplate src_template = GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("audio/x-raw, "
+ "format = (string) " FORMAT ", "
+ "layout = (string) { interleaved, non-interleaved }, "
+ "rate = (int) [ 1, MAX ], " "channels = (int) [ 1, MAX ]"));
+
+ #define gst_rg_volume_parent_class parent_class
+ G_DEFINE_TYPE (GstRgVolume, gst_rg_volume, GST_TYPE_BIN);
+ GST_ELEMENT_REGISTER_DEFINE (rgvolume, "rgvolume", GST_RANK_NONE,
+ GST_TYPE_RG_VOLUME);
+
+ static void gst_rg_volume_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec);
+ static void gst_rg_volume_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec);
+ static void gst_rg_volume_dispose (GObject * object);
+
+ static GstStateChangeReturn gst_rg_volume_change_state (GstElement * element,
+ GstStateChange transition);
+ static gboolean gst_rg_volume_sink_event (GstPad * pad, GstObject * parent,
+ GstEvent * event);
+
+ static GstEvent *gst_rg_volume_tag_event (GstRgVolume * self, GstEvent * event);
+ static void gst_rg_volume_reset (GstRgVolume * self);
+ static void gst_rg_volume_update_gain (GstRgVolume * self);
+ static inline void gst_rg_volume_determine_gain (GstRgVolume * self,
+ gdouble * target_gain, gdouble * result_gain);
+
+ static void
+ gst_rg_volume_class_init (GstRgVolumeClass * klass)
+ {
+ GObjectClass *gobject_class;
+ GstElementClass *element_class;
+ GstBinClass *bin_class;
+
+ gobject_class = (GObjectClass *) klass;
+
+ gobject_class->set_property = gst_rg_volume_set_property;
+ gobject_class->get_property = gst_rg_volume_get_property;
+ gobject_class->dispose = gst_rg_volume_dispose;
+
++#ifdef TIZEN_FEATURE_RGVOLUME_MODIFICATION
++ /**
++ * GstRgVolume:enable-rgvolume:
++ *
++ * Whether to enable replaygain volume.
++ *
++ * If rgvulme is disabled, the rgvolume isn't affected by tag and properties.
++ */
++ g_object_class_install_property (gobject_class, PROP_ENABLE_RGVOLUME,
++ g_param_spec_boolean ("enable-rgvolume", "Enable rg volume",
++ "Whether to enable replaygain volume", DEFAULT_ENABLE_RGVOLUME,
++ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
++#endif
++
+ /**
+ * GstRgVolume:album-mode:
+ *
+ * Whether to prefer album gain over track gain.
+ *
+ * If set to %TRUE, use album gain instead of track gain if both are
+ * available. This keeps the relative loudness levels of tracks from the same
+ * album intact.
+ *
+ * If set to %FALSE, track mode is used instead. This effectively leads to
+ * more extensive normalization.
+ *
+ * If album mode is enabled but the album gain tag is absent in the stream,
+ * the track gain is used instead. If both gain tags are missing, the value
+ * of the #GstRgVolume:fallback-gain property is used instead.
+ */
+ g_object_class_install_property (gobject_class, PROP_ALBUM_MODE,
+ g_param_spec_boolean ("album-mode", "Album mode",
+ "Prefer album over track gain", DEFAULT_ALBUM_MODE,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ /**
+ * GstRgVolume:headroom:
+ *
+ * Extra headroom [dB]. This controls the amount by which the output can
+ * exceed digital full scale.
+ *
+ * Only set this to a value greater than 0.0 if signal compression/limiting of
+ * a suitable form is applied to the output (or output is brought into the
+ * correct range by some other transformation).
+ *
+ * This element internally uses a volume element, which also supports
+ * operating on integer audio formats. These formats do not allow exceeding
+ * digital full scale. If extra headroom is used, make sure that the raw
+ * audio data format is floating point (F32). Otherwise,
+ * clipping distortion might be introduced as part of the volume adjustment
+ * itself.
+ */
+ g_object_class_install_property (gobject_class, PROP_HEADROOM,
+ g_param_spec_double ("headroom", "Headroom", "Extra headroom [dB]",
+ 0., 60., DEFAULT_HEADROOM,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ /**
+ * GstRgVolume:pre-amp:
+ *
+ * Additional gain to apply globally [dB]. This controls the trade-off
+ * between uniformity of normalization and utilization of available dynamic
+ * range.
+ *
+ * Note that the default value is 0 dB because the ReplayGain reference value
+ * was adjusted by +6 dB (from 83 to 89 dB). The original proposal stated
+ * that a proper default pre-amp value is +6 dB, this translates to the used 0
+ * dB.
+ */
+ g_object_class_install_property (gobject_class, PROP_PRE_AMP,
+ g_param_spec_double ("pre-amp", "Pre-amp", "Extra gain [dB]",
+ -60., 60., DEFAULT_PRE_AMP,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ /**
+ * GstRgVolume:fallback-gain:
+ *
+ * Fallback gain [dB] for streams missing ReplayGain tags.
+ */
+ g_object_class_install_property (gobject_class, PROP_FALLBACK_GAIN,
+ g_param_spec_double ("fallback-gain", "Fallback gain",
+ "Gain for streams missing tags [dB]",
+ -60., 60., DEFAULT_FALLBACK_GAIN,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ /**
+ * GstRgVolume:result-gain:
+ *
+ * Applied gain [dB]. This gain is applied to processed buffer data.
+ *
+ * This is set to the #GstRgVolume:target-gain if amplification by that amount
+ * can be applied safely. "Safely" means that the volume adjustment does not
+ * inflict clipping distortion. Should this not be the case, the result gain
+ * is set to an appropriately reduced value (by applying peak normalization).
+ * The proposed standard calls this "clipping prevention".
+ *
+ * The difference between target and result gain reflects the necessary amount
+ * of reduction. Applications can make use of this information to temporarily
+ * reduce the #GstRgVolume:pre-amp for subsequent streams, as recommended by
+ * the ReplayGain standard.
+ *
+ * Note that target and result gain differing for a great majority of streams
+ * indicates a problem: What happens in this case is that most streams receive
+ * peak normalization instead of amplification by the ideal replay gain. To
+ * prevent this, the #GstRgVolume:pre-amp has to be lowered and/or a limiter
+ * has to be used which facilitates the use of #GstRgVolume:headroom.
+ */
+ g_object_class_install_property (gobject_class, PROP_RESULT_GAIN,
+ g_param_spec_double ("result-gain", "Result-gain", "Applied gain [dB]",
+ -120., 120., 0., G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
+ /**
+ * GstRgVolume:target-gain:
+ *
+ * Applicable gain [dB]. This gain is supposed to be applied.
+ *
+ * Depending on the value of the #GstRgVolume:album-mode property and the
+ * presence of ReplayGain tags in the stream, this is set according to one of
+ * these simple formulas:
+ *
+ *
+ * * #GstRgVolume:pre-amp + album gain of the stream
+ * * #GstRgVolume:pre-amp + track gain of the stream
+ * * #GstRgVolume:pre-amp + #GstRgVolume:fallback-gain
+ *
+ */
+ g_object_class_install_property (gobject_class, PROP_TARGET_GAIN,
+ g_param_spec_double ("target-gain", "Target-gain",
+ "Applicable gain [dB]", -120., 120., 0.,
+ G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
+
+ element_class = (GstElementClass *) klass;
+ element_class->change_state = GST_DEBUG_FUNCPTR (gst_rg_volume_change_state);
+
+ bin_class = (GstBinClass *) klass;
+ /* Setting these to NULL makes gst_bin_add and _remove refuse to let anyone
+ * mess with our internals. */
+ bin_class->add_element = NULL;
+ bin_class->remove_element = NULL;
+
+ gst_element_class_add_static_pad_template (element_class, &src_template);
+ gst_element_class_add_static_pad_template (element_class, &sink_template);
+ gst_element_class_set_static_metadata (element_class, "ReplayGain volume",
+ "Filter/Effect/Audio",
+ "Apply ReplayGain volume adjustment",
+ "Ren\xc3\xa9 Stadler <mail@renestadler.de>");
+
+ GST_DEBUG_CATEGORY_INIT (gst_rg_volume_debug, "rgvolume", 0,
+ "ReplayGain volume element");
+ }
+
+ static void
+ gst_rg_volume_init (GstRgVolume * self)
+ {
+ GObjectClass *volume_class;
+ GstPad *volume_pad, *ghost_pad;
+
++#ifdef TIZEN_FEATURE_RGVOLUME_MODIFICATION
++ self->enable_rgvolume = DEFAULT_ENABLE_RGVOLUME;
++#endif
+ self->album_mode = DEFAULT_ALBUM_MODE;
+ self->headroom = DEFAULT_HEADROOM;
+ self->pre_amp = DEFAULT_PRE_AMP;
+ self->fallback_gain = DEFAULT_FALLBACK_GAIN;
+ self->target_gain = 0.0;
+ self->result_gain = 0.0;
+
+ self->volume_element = gst_element_factory_make ("volume", "rgvolume-volume");
+ if (G_UNLIKELY (self->volume_element == NULL)) {
+ GstMessage *msg;
+
+ GST_WARNING_OBJECT (self, "could not create volume element");
+ msg = gst_missing_element_message_new (GST_ELEMENT_CAST (self), "volume");
+ gst_element_post_message (GST_ELEMENT_CAST (self), msg);
+
+ /* Nothing else to do, we will refuse the state change from NULL to READY to
+ * indicate that something went very wrong. It is doubtful that someone
+ * attempts changing our state though, since we end up having no pads! */
+ return;
+ }
+
+ volume_class = G_OBJECT_GET_CLASS (G_OBJECT (self->volume_element));
+ self->max_volume = G_PARAM_SPEC_DOUBLE
+ (g_object_class_find_property (volume_class, "volume"))->maximum;
+
+ GST_BIN_CLASS (parent_class)->add_element (GST_BIN_CAST (self),
+ self->volume_element);
+
+ volume_pad = gst_element_get_static_pad (self->volume_element, "sink");
+ ghost_pad = gst_ghost_pad_new_from_template ("sink", volume_pad,
+ GST_PAD_PAD_TEMPLATE (volume_pad));
+ gst_object_unref (volume_pad);
+ gst_pad_set_event_function (ghost_pad, gst_rg_volume_sink_event);
+ gst_element_add_pad (GST_ELEMENT_CAST (self), ghost_pad);
+
+ volume_pad = gst_element_get_static_pad (self->volume_element, "src");
+ ghost_pad = gst_ghost_pad_new_from_template ("src", volume_pad,
+ GST_PAD_PAD_TEMPLATE (volume_pad));
+ gst_object_unref (volume_pad);
+ gst_element_add_pad (GST_ELEMENT_CAST (self), ghost_pad);
+ }
+
+ static void
+ gst_rg_volume_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+ {
+ GstRgVolume *self = GST_RG_VOLUME (object);
+
+ switch (prop_id) {
++#ifdef TIZEN_FEATURE_RGVOLUME_MODIFICATION
++ case PROP_ENABLE_RGVOLUME:
++ self->enable_rgvolume = g_value_get_boolean (value);
++ break;
++#endif
+ case PROP_ALBUM_MODE:
+ self->album_mode = g_value_get_boolean (value);
+ break;
+ case PROP_HEADROOM:
+ self->headroom = g_value_get_double (value);
+ break;
+ case PROP_PRE_AMP:
+ self->pre_amp = g_value_get_double (value);
+ break;
+ case PROP_FALLBACK_GAIN:
+ self->fallback_gain = g_value_get_double (value);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+
+ gst_rg_volume_update_gain (self);
+ }
+
+ static void
+ gst_rg_volume_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec)
+ {
+ GstRgVolume *self = GST_RG_VOLUME (object);
+
+ switch (prop_id) {
++#ifdef TIZEN_FEATURE_RGVOLUME_MODIFICATION
++ case PROP_ENABLE_RGVOLUME:
++ g_value_set_boolean (value, self->enable_rgvolume);
++ break;
++#endif
+ case PROP_ALBUM_MODE:
+ g_value_set_boolean (value, self->album_mode);
+ break;
+ case PROP_HEADROOM:
+ g_value_set_double (value, self->headroom);
+ break;
+ case PROP_PRE_AMP:
+ g_value_set_double (value, self->pre_amp);
+ break;
+ case PROP_FALLBACK_GAIN:
+ g_value_set_double (value, self->fallback_gain);
+ break;
+ case PROP_TARGET_GAIN:
+ g_value_set_double (value, self->target_gain);
++#ifdef TIZEN_FEATURE_RGVOLUME_MODIFICATION
++ if (!self->enable_rgvolume)
++ g_value_set_double (value, 0.0);
++#endif
+ break;
+ case PROP_RESULT_GAIN:
+ g_value_set_double (value, self->result_gain);
++#ifdef TIZEN_FEATURE_RGVOLUME_MODIFICATION
++ if (!self->enable_rgvolume)
++ g_value_set_double (value, 0.0);
++#endif
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+ }
+
+ static void
+ gst_rg_volume_dispose (GObject * object)
+ {
+ GstRgVolume *self = GST_RG_VOLUME (object);
+
+ if (self->volume_element != NULL) {
+ /* Manually remove our child using the bin implementation of remove_element.
+ * This is needed because we prevent gst_bin_remove from working, which the
+ * parent dispose handler would use if we had any children left. */
+ GST_BIN_CLASS (parent_class)->remove_element (GST_BIN_CAST (self),
+ self->volume_element);
+ self->volume_element = NULL;
+ }
+
+ G_OBJECT_CLASS (parent_class)->dispose (object);
+ }
+
+ static GstStateChangeReturn
+ gst_rg_volume_change_state (GstElement * element, GstStateChange transition)
+ {
+ GstRgVolume *self = GST_RG_VOLUME (element);
+ GstStateChangeReturn res;
+
+ switch (transition) {
+ case GST_STATE_CHANGE_NULL_TO_READY:
+
+ if (G_UNLIKELY (self->volume_element == NULL)) {
+ /* Creating our child volume element in _init failed. */
+ return GST_STATE_CHANGE_FAILURE;
+ }
+ break;
+
+ case GST_STATE_CHANGE_READY_TO_PAUSED:
+
+ gst_rg_volume_reset (self);
+ break;
+
+ default:
+ break;
+ }
+
+ res = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
+
+ return res;
+ }
+
+ /* Event function for the ghost sink pad. */
+ static gboolean
+ gst_rg_volume_sink_event (GstPad * pad, GstObject * parent, GstEvent * event)
+ {
+ GstRgVolume *self;
+ GstEvent *send_event = event;
+ gboolean res;
+
+ self = GST_RG_VOLUME (parent);
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_TAG:
+
+ GST_LOG_OBJECT (self, "received tag event");
+
+ send_event = gst_rg_volume_tag_event (self, event);
+
+ if (send_event == NULL)
+ GST_LOG_OBJECT (self, "all tags handled, dropping event");
+
+ break;
+
+ case GST_EVENT_EOS:
+
+ gst_rg_volume_reset (self);
+ break;
+
+ default:
+ break;
+ }
+
+ if (G_LIKELY (send_event != NULL))
+ res = gst_pad_event_default (pad, parent, send_event);
+ else
+ res = TRUE;
+
+ return res;
+ }
+
+ static GstEvent *
+ gst_rg_volume_tag_event (GstRgVolume * self, GstEvent * event)
+ {
+ GstTagList *tag_list;
+ gboolean has_track_gain, has_track_peak, has_album_gain, has_album_peak;
+ gboolean has_ref_level;
+
+ g_return_val_if_fail (event != NULL, NULL);
+ g_return_val_if_fail (GST_EVENT_TYPE (event) == GST_EVENT_TAG, event);
+
+ gst_event_parse_tag (event, &tag_list);
+
+ if (gst_tag_list_is_empty (tag_list))
+ return event;
+
+ has_track_gain = gst_tag_list_get_double (tag_list, GST_TAG_TRACK_GAIN,
+ &self->track_gain);
+ has_track_peak = gst_tag_list_get_double (tag_list, GST_TAG_TRACK_PEAK,
+ &self->track_peak);
+ has_album_gain = gst_tag_list_get_double (tag_list, GST_TAG_ALBUM_GAIN,
+ &self->album_gain);
+ has_album_peak = gst_tag_list_get_double (tag_list, GST_TAG_ALBUM_PEAK,
+ &self->album_peak);
+ has_ref_level = gst_tag_list_get_double (tag_list, GST_TAG_REFERENCE_LEVEL,
+ &self->reference_level);
+
+ if (!has_track_gain && !has_track_peak && !has_album_gain && !has_album_peak)
+ return event;
+
+ if (has_ref_level && (has_track_gain || has_album_gain)
+ && (ABS (self->reference_level - RG_REFERENCE_LEVEL) > 1.e-6)) {
+ /* Log a message stating the amount of adjustment that is applied below. */
+ GST_DEBUG_OBJECT (self,
+ "compensating for reference level difference by %" GAIN_FORMAT,
+ RG_REFERENCE_LEVEL - self->reference_level);
+ }
+ if (has_track_gain) {
+ self->track_gain += RG_REFERENCE_LEVEL - self->reference_level;
+ }
+ if (has_album_gain) {
+ self->album_gain += RG_REFERENCE_LEVEL - self->reference_level;
+ }
+
+ /* Ignore values that are obviously invalid. */
+ if (G_UNLIKELY (has_track_gain && !VALID_GAIN (self->track_gain))) {
+ GST_DEBUG_OBJECT (self,
+ "ignoring bogus track gain value %" GAIN_FORMAT, self->track_gain);
+ has_track_gain = FALSE;
+ }
+ if (G_UNLIKELY (has_track_peak && !VALID_PEAK (self->track_peak))) {
+ GST_DEBUG_OBJECT (self,
+ "ignoring bogus track peak value %" PEAK_FORMAT, self->track_peak);
+ has_track_peak = FALSE;
+ }
+ if (G_UNLIKELY (has_album_gain && !VALID_GAIN (self->album_gain))) {
+ GST_DEBUG_OBJECT (self,
+ "ignoring bogus album gain value %" GAIN_FORMAT, self->album_gain);
+ has_album_gain = FALSE;
+ }
+ if (G_UNLIKELY (has_album_peak && !VALID_PEAK (self->album_peak))) {
+ GST_DEBUG_OBJECT (self,
+ "ignoring bogus album peak value %" PEAK_FORMAT, self->album_peak);
+ has_album_peak = FALSE;
+ }
+
+ /* Clamp peaks >1.0. Float based decoders can produce spurious samples >1.0,
+ * cutting these files back to 1.0 should not cause any audible distortion.
+ * This is most often seen with Vorbis files. */
+ if (has_track_peak && self->track_peak > 1.) {
+ GST_DEBUG_OBJECT (self,
+ "clamping track peak %" PEAK_FORMAT " to 1.0", self->track_peak);
+ self->track_peak = 1.0;
+ }
+ if (has_album_peak && self->album_peak > 1.) {
+ GST_DEBUG_OBJECT (self,
+ "clamping album peak %" PEAK_FORMAT " to 1.0", self->album_peak);
+ self->album_peak = 1.0;
+ }
+
+ self->has_track_gain |= has_track_gain;
+ self->has_track_peak |= has_track_peak;
+ self->has_album_gain |= has_album_gain;
+ self->has_album_peak |= has_album_peak;
+
+ tag_list = gst_tag_list_copy (tag_list);
+ gst_tag_list_remove_tag (tag_list, GST_TAG_TRACK_GAIN);
+ gst_tag_list_remove_tag (tag_list, GST_TAG_TRACK_PEAK);
+ gst_tag_list_remove_tag (tag_list, GST_TAG_ALBUM_GAIN);
+ gst_tag_list_remove_tag (tag_list, GST_TAG_ALBUM_PEAK);
+ gst_tag_list_remove_tag (tag_list, GST_TAG_REFERENCE_LEVEL);
+
+ gst_rg_volume_update_gain (self);
+
+ gst_event_unref (event);
+ if (gst_tag_list_is_empty (tag_list)) {
+ gst_tag_list_unref (tag_list);
+ return NULL;
+ }
+
+ return gst_event_new_tag (tag_list);
+ }
+
+ static void
+ gst_rg_volume_reset (GstRgVolume * self)
+ {
+ self->has_track_gain = FALSE;
+ self->has_track_peak = FALSE;
+ self->has_album_gain = FALSE;
+ self->has_album_peak = FALSE;
+
+ self->reference_level = RG_REFERENCE_LEVEL;
+
+ gst_rg_volume_update_gain (self);
+ }
+
+ static void
+ gst_rg_volume_update_gain (GstRgVolume * self)
+ {
+ gdouble target_gain, result_gain, result_volume;
+ gboolean target_gain_changed, result_gain_changed;
+
++#ifdef TIZEN_FEATURE_RGVOLUME_MODIFICATION
++ if (!self->enable_rgvolume) {
++ g_object_set (self->volume_element, "volume", 1.0, NULL);
++ return;
++ }
++#endif
+ gst_rg_volume_determine_gain (self, &target_gain, &result_gain);
+
+ result_volume = DB_TO_LINEAR (result_gain);
+
+ /* Ensure that the result volume is within the range that the volume element
+ * can handle. Currently, the limit is 10. (+20 dB), which should not be
+ * restrictive. */
+ if (G_UNLIKELY (result_volume > self->max_volume)) {
+ GST_INFO_OBJECT (self,
+ "cannot handle result gain of %" GAIN_FORMAT " (%0.6f), adjusting",
+ result_gain, result_volume);
+
+ result_volume = self->max_volume;
+ result_gain = LINEAR_TO_DB (result_volume);
+ }
+
+ /* Direct comparison is OK in this case. */
+ if (target_gain == result_gain) {
+ GST_INFO_OBJECT (self,
+ "result gain is %" GAIN_FORMAT " (%0.6f), matching target",
+ result_gain, result_volume);
+ } else {
+ GST_INFO_OBJECT (self,
+ "result gain is %" GAIN_FORMAT " (%0.6f), target is %" GAIN_FORMAT,
+ result_gain, result_volume, target_gain);
+ }
+
+ target_gain_changed = (self->target_gain != target_gain);
+ result_gain_changed = (self->result_gain != result_gain);
+
+ self->target_gain = target_gain;
+ self->result_gain = result_gain;
+
+ g_object_set (self->volume_element, "volume", result_volume, NULL);
+
+ if (target_gain_changed)
+ g_object_notify ((GObject *) self, "target-gain");
+ if (result_gain_changed)
+ g_object_notify ((GObject *) self, "result-gain");
+ }
+
+ static inline void
+ gst_rg_volume_determine_gain (GstRgVolume * self, gdouble * target_gain,
+ gdouble * result_gain)
+ {
+ gdouble gain, peak;
+
+ if (!self->has_track_gain && !self->has_album_gain) {
+
+ GST_DEBUG_OBJECT (self, "using fallback gain");
+ gain = self->fallback_gain;
+ peak = 1.0;
+
+ } else if ((self->album_mode && self->has_album_gain)
+ || (!self->album_mode && !self->has_track_gain)) {
+
+ gain = self->album_gain;
+ if (G_LIKELY (self->has_album_peak)) {
+ peak = self->album_peak;
+ } else {
+ GST_DEBUG_OBJECT (self, "album peak missing, assuming 1.0");
+ peak = 1.0;
+ }
+ /* Falling back from track to album gain shouldn't really happen. */
+ if (G_UNLIKELY (!self->album_mode))
+ GST_INFO_OBJECT (self, "falling back to album gain");
+
+ } else {
+ /* !album_mode && !has_album_gain || album_mode && has_track_gain */
+
+ gain = self->track_gain;
+ if (G_LIKELY (self->has_track_peak)) {
+ peak = self->track_peak;
+ } else {
+ GST_DEBUG_OBJECT (self, "track peak missing, assuming 1.0");
+ peak = 1.0;
+ }
+ if (self->album_mode)
+ GST_INFO_OBJECT (self, "falling back to track gain");
+ }
+
+ gain += self->pre_amp;
+
+ *target_gain = gain;
+ *result_gain = gain;
+
+ if (LINEAR_TO_DB (peak) + gain > self->headroom) {
+ *result_gain = LINEAR_TO_DB (1. / peak) + self->headroom;
+ }
+ }
--- /dev/null
- *
+ /* GStreamer ReplayGain volume adjustment
+ *
+ * Copyright (C) 2007 Rene Stadler <mail@renestadler.de>
+ *
+ * gstrgvolume.h: Element to apply ReplayGain volume adjustment
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public License
+ * as published by the Free Software Foundation; either version 2.1 of
+ * the License, or (at your option) any later version.
- *
++ *
+ * This library is distributed in the hope that it will be useful, but
+ * WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
++ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with this library; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
+ * 02110-1301 USA
+ */
+
+ #ifndef __GST_RG_VOLUME_H__
+ #define __GST_RG_VOLUME_H__
+
+ #include <gst/gst.h>
+
+ G_BEGIN_DECLS
+
+ #define GST_TYPE_RG_VOLUME \
+ (gst_rg_volume_get_type())
+ #define GST_RG_VOLUME(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_RG_VOLUME,GstRgVolume))
+ #define GST_RG_VOLUME_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_RG_VOLUME,GstRgVolumeClass))
+ #define GST_IS_RG_VOLUME(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_RG_VOLUME))
+ #define GST_IS_RG_VOLUME_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_RG_VOLUME))
+
+ typedef struct _GstRgVolume GstRgVolume;
+ typedef struct _GstRgVolumeClass GstRgVolumeClass;
+
+ /**
+ * GstRgVolume:
+ *
+ * Opaque data structure.
+ */
+ struct _GstRgVolume
+ {
+ GstBin bin;
+
+ /*< private >*/
+
+ GstElement *volume_element;
+ gdouble max_volume;
+
++#ifdef TIZEN_FEATURE_RGVOLUME_MODIFICATION
++ gboolean enable_rgvolume;
++#endif
+ gboolean album_mode;
+ gdouble headroom;
+ gdouble pre_amp;
+ gdouble fallback_gain;
+
+ gdouble target_gain;
+ gdouble result_gain;
+
+ gdouble track_gain;
+ gdouble track_peak;
+ gdouble album_gain;
+ gdouble album_peak;
+
+ gboolean has_track_gain;
+ gboolean has_track_peak;
+ gboolean has_album_gain;
+ gboolean has_album_peak;
+
+ gdouble reference_level;
+ };
+
+ struct _GstRgVolumeClass
+ {
+ GstBinClass parent_class;
+ };
+
+ GType gst_rg_volume_get_type (void);
+
+ GST_ELEMENT_REGISTER_DECLARE (rgvolume);
+
+ G_END_DECLS
+
+ #endif /* __GST_RG_VOLUME_H__ */
--- /dev/null
-
+ /* GStreamer
+ * Copyright (C) <2007> Wim Taymans <wim.taymans@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+ /**
+ * SECTION:element-rtpbin
+ * @title: rtpbin
+ * @see_also: rtpjitterbuffer, rtpsession, rtpptdemux, rtpssrcdemux
+ *
+ * RTP bin combines the functions of #GstRtpSession, #GstRtpSsrcDemux,
+ * #GstRtpJitterBuffer and #GstRtpPtDemux in one element. It allows for multiple
+ * RTP sessions that will be synchronized together using RTCP SR packets.
+ *
+ * #GstRtpBin is configured with a number of request pads that define the
+ * functionality that is activated, similar to the #GstRtpSession element.
+ *
+ * To use #GstRtpBin as an RTP receiver, request a recv_rtp_sink_\%u pad. The session
+ * number must be specified in the pad name.
+ * Data received on the recv_rtp_sink_\%u pad will be processed in the #GstRtpSession
+ * manager and after being validated forwarded on #GstRtpSsrcDemux element. Each
+ * RTP stream is demuxed based on the SSRC and send to a #GstRtpJitterBuffer. After
+ * the packets are released from the jitterbuffer, they will be forwarded to a
+ * #GstRtpPtDemux element. The #GstRtpPtDemux element will demux the packets based
+ * on the payload type and will create a unique pad recv_rtp_src_\%u_\%u_\%u on
+ * rtpbin with the session number, SSRC and payload type respectively as the pad
+ * name.
+ *
+ * To also use #GstRtpBin as an RTCP receiver, request a recv_rtcp_sink_\%u pad. The
+ * session number must be specified in the pad name.
+ *
+ * If you want the session manager to generate and send RTCP packets, request
+ * the send_rtcp_src_\%u pad with the session number in the pad name. Packet pushed
+ * on this pad contain SR/RR RTCP reports that should be sent to all participants
+ * in the session.
+ *
+ * To use #GstRtpBin as a sender, request a send_rtp_sink_\%u pad, which will
+ * automatically create a send_rtp_src_\%u pad. If the session number is not provided,
+ * the pad from the lowest available session will be returned. The session manager will modify the
+ * SSRC in the RTP packets to its own SSRC and will forward the packets on the
+ * send_rtp_src_\%u pad after updating its internal state.
+ *
+ * The session manager needs the clock-rate of the payload types it is handling
+ * and will signal the #GstRtpSession::request-pt-map signal when it needs such a
+ * mapping. One can clear the cached values with the #GstRtpSession::clear-pt-map
+ * signal.
+ *
+ * Access to the internal statistics of rtpbin is provided with the
+ * get-internal-session property. This action signal gives access to the
+ * RTPSession object which further provides action signals to retrieve the
+ * internal source and other sources.
+ *
+ * #GstRtpBin also has signals (#GstRtpBin::request-rtp-encoder,
+ * #GstRtpBin::request-rtp-decoder, #GstRtpBin::request-rtcp-encoder and
+ * #GstRtpBin::request-rtp-decoder) to dynamically request for RTP and RTCP encoders
+ * and decoders in order to support SRTP. The encoders must provide the pads
+ * rtp_sink_\%u and rtp_src_\%u for RTP and rtcp_sink_\%u and rtcp_src_\%u for
+ * RTCP. The session number will be used in the pad name. The decoders must provide
+ * rtp_sink and rtp_src for RTP and rtcp_sink and rtcp_src for RTCP. The decoders will
+ * be placed before the #GstRtpSession element, thus they must support SSRC demuxing
+ * internally.
+ *
+ * #GstRtpBin has signals (#GstRtpBin::request-aux-sender and
+ * #GstRtpBin::request-aux-receiver to dynamically request an element that can be
+ * used to create or merge additional RTP streams. AUX elements are needed to
+ * implement FEC or retransmission (such as RFC 4588). An AUX sender must have one
+ * sink_\%u pad that matches the sessionid in the signal and it should have 1 or
+ * more src_\%u pads. For each src_%\u pad, a session will be made (if needed)
+ * and the pad will be linked to the session send_rtp_sink pad. Each session will
+ * then expose its source pad as send_rtp_src_\%u on #GstRtpBin.
+ * An AUX receiver has 1 src_\%u pad that much match the sessionid in the signal
+ * and 1 or more sink_\%u pads. A session will be made for each sink_\%u pad
+ * when the corresponding recv_rtp_sink_\%u pad is requested on #GstRtpBin.
+ * The #GstRtpBin::request-jitterbuffer signal can be used to provide a custom
+ * element to perform arrival time smoothing, reordering and optionally packet
+ * loss detection and retransmission requests.
+ *
+ * ## Example pipelines
+ *
+ * |[
+ * gst-launch-1.0 udpsrc port=5000 caps="application/x-rtp, ..." ! .recv_rtp_sink_0 \
+ * rtpbin ! rtptheoradepay ! theoradec ! xvimagesink
+ * ]| Receive RTP data from port 5000 and send to the session 0 in rtpbin.
+ * |[
+ * gst-launch-1.0 rtpbin name=rtpbin \
+ * v4l2src ! videoconvert ! ffenc_h263 ! rtph263ppay ! rtpbin.send_rtp_sink_0 \
+ * rtpbin.send_rtp_src_0 ! udpsink port=5000 \
+ * rtpbin.send_rtcp_src_0 ! udpsink port=5001 sync=false async=false \
+ * udpsrc port=5005 ! rtpbin.recv_rtcp_sink_0 \
+ * audiotestsrc ! amrnbenc ! rtpamrpay ! rtpbin.send_rtp_sink_1 \
+ * rtpbin.send_rtp_src_1 ! udpsink port=5002 \
+ * rtpbin.send_rtcp_src_1 ! udpsink port=5003 sync=false async=false \
+ * udpsrc port=5007 ! rtpbin.recv_rtcp_sink_1
+ * ]| Encode and payload H263 video captured from a v4l2src. Encode and payload AMR
+ * audio generated from audiotestsrc. The video is sent to session 0 in rtpbin
+ * and the audio is sent to session 1. Video packets are sent on UDP port 5000
+ * and audio packets on port 5002. The video RTCP packets for session 0 are sent
+ * on port 5001 and the audio RTCP packets for session 0 are sent on port 5003.
+ * RTCP packets for session 0 are received on port 5005 and RTCP for session 1
+ * is received on port 5007. Since RTCP packets from the sender should be sent
+ * as soon as possible and do not participate in preroll, sync=false and
+ * async=false is configured on udpsink
+ * |[
+ * gst-launch-1.0 -v rtpbin name=rtpbin \
+ * udpsrc caps="application/x-rtp,media=(string)video,clock-rate=(int)90000,encoding-name=(string)H263-1998" \
+ * port=5000 ! rtpbin.recv_rtp_sink_0 \
+ * rtpbin. ! rtph263pdepay ! ffdec_h263 ! xvimagesink \
+ * udpsrc port=5001 ! rtpbin.recv_rtcp_sink_0 \
+ * rtpbin.send_rtcp_src_0 ! udpsink port=5005 sync=false async=false \
+ * udpsrc caps="application/x-rtp,media=(string)audio,clock-rate=(int)8000,encoding-name=(string)AMR,encoding-params=(string)1,octet-align=(string)1" \
+ * port=5002 ! rtpbin.recv_rtp_sink_1 \
+ * rtpbin. ! rtpamrdepay ! amrnbdec ! alsasink \
+ * udpsrc port=5003 ! rtpbin.recv_rtcp_sink_1 \
+ * rtpbin.send_rtcp_src_1 ! udpsink port=5007 sync=false async=false
+ * ]| Receive H263 on port 5000, send it through rtpbin in session 0, depayload,
+ * decode and display the video.
+ * Receive AMR on port 5002, send it through rtpbin in session 1, depayload,
+ * decode and play the audio.
+ * Receive server RTCP packets for session 0 on port 5001 and RTCP packets for
+ * session 1 on port 5003. These packets will be used for session management and
+ * synchronisation.
+ * Send RTCP reports for session 0 on port 5005 and RTCP reports for session 1
+ * on port 5007.
+ *
+ */
+
+ #ifdef HAVE_CONFIG_H
+ #include "config.h"
+ #endif
+ #include <stdio.h>
+ #include <string.h>
+
+ #include <gst/rtp/gstrtpbuffer.h>
+ #include <gst/rtp/gstrtcpbuffer.h>
+
+ #include "gstrtpbin.h"
+ #include "rtpsession.h"
+ #include "gstrtpsession.h"
+ #include "gstrtpjitterbuffer.h"
+
+ #include <gst/glib-compat-private.h>
+
+ GST_DEBUG_CATEGORY_STATIC (gst_rtp_bin_debug);
+ #define GST_CAT_DEFAULT gst_rtp_bin_debug
+
+ /* sink pads */
+ static GstStaticPadTemplate rtpbin_recv_rtp_sink_template =
+ GST_STATIC_PAD_TEMPLATE ("recv_rtp_sink_%u",
+ GST_PAD_SINK,
+ GST_PAD_REQUEST,
+ GST_STATIC_CAPS ("application/x-rtp;application/x-srtp")
+ );
+
+ /**
+ * GstRtpBin!recv_fec_sink_%u_%u:
+ *
+ * Sink template for receiving Forward Error Correction packets,
+ * in the form recv_fec_sink_<session_idx>_<fec_stream_idx>
+ *
+ * See #GstRTPST_2022_1_FecDec for example usage
+ *
+ * Since: 1.20
+ */
+ static GstStaticPadTemplate rtpbin_recv_fec_sink_template =
+ GST_STATIC_PAD_TEMPLATE ("recv_fec_sink_%u_%u",
+ GST_PAD_SINK,
+ GST_PAD_REQUEST,
+ GST_STATIC_CAPS ("application/x-rtp")
+ );
+
+ /**
+ * GstRtpBin!send_fec_src_%u_%u:
+ *
+ * Src template for sending Forward Error Correction packets,
+ * in the form send_fec_src_<session_idx>_<fec_stream_idx>
+ *
+ * See #GstRTPST_2022_1_FecEnc for example usage
+ *
+ * Since: 1.20
+ */
+ static GstStaticPadTemplate rtpbin_send_fec_src_template =
+ GST_STATIC_PAD_TEMPLATE ("send_fec_src_%u_%u",
+ GST_PAD_SRC,
+ GST_PAD_SOMETIMES,
+ GST_STATIC_CAPS ("application/x-rtp")
+ );
+
+ static GstStaticPadTemplate rtpbin_recv_rtcp_sink_template =
+ GST_STATIC_PAD_TEMPLATE ("recv_rtcp_sink_%u",
+ GST_PAD_SINK,
+ GST_PAD_REQUEST,
+ GST_STATIC_CAPS ("application/x-rtcp;application/x-srtcp")
+ );
+
+ static GstStaticPadTemplate rtpbin_send_rtp_sink_template =
+ GST_STATIC_PAD_TEMPLATE ("send_rtp_sink_%u",
+ GST_PAD_SINK,
+ GST_PAD_REQUEST,
+ GST_STATIC_CAPS ("application/x-rtp")
+ );
+
+ /* src pads */
+ static GstStaticPadTemplate rtpbin_recv_rtp_src_template =
+ GST_STATIC_PAD_TEMPLATE ("recv_rtp_src_%u_%u_%u",
+ GST_PAD_SRC,
+ GST_PAD_SOMETIMES,
+ GST_STATIC_CAPS ("application/x-rtp")
+ );
+
+ static GstStaticPadTemplate rtpbin_send_rtcp_src_template =
+ GST_STATIC_PAD_TEMPLATE ("send_rtcp_src_%u",
+ GST_PAD_SRC,
+ GST_PAD_REQUEST,
+ GST_STATIC_CAPS ("application/x-rtcp;application/x-srtcp")
+ );
+
+ static GstStaticPadTemplate rtpbin_send_rtp_src_template =
+ GST_STATIC_PAD_TEMPLATE ("send_rtp_src_%u",
+ GST_PAD_SRC,
+ GST_PAD_SOMETIMES,
+ GST_STATIC_CAPS ("application/x-rtp;application/x-srtp")
+ );
+
+ #define GST_RTP_BIN_LOCK(bin) g_mutex_lock (&(bin)->priv->bin_lock)
+ #define GST_RTP_BIN_UNLOCK(bin) g_mutex_unlock (&(bin)->priv->bin_lock)
+
+ /* lock to protect dynamic callbacks, like pad-added and new ssrc. */
+ #define GST_RTP_BIN_DYN_LOCK(bin) g_mutex_lock (&(bin)->priv->dyn_lock)
+ #define GST_RTP_BIN_DYN_UNLOCK(bin) g_mutex_unlock (&(bin)->priv->dyn_lock)
+
+ /* lock for shutdown */
+ #define GST_RTP_BIN_SHUTDOWN_LOCK(bin,label) \
+ G_STMT_START { \
+ if (g_atomic_int_get (&bin->priv->shutdown)) \
+ goto label; \
+ GST_RTP_BIN_DYN_LOCK (bin); \
+ if (g_atomic_int_get (&bin->priv->shutdown)) { \
+ GST_RTP_BIN_DYN_UNLOCK (bin); \
+ goto label; \
+ } \
+ } G_STMT_END
+
+ /* unlock for shutdown */
+ #define GST_RTP_BIN_SHUTDOWN_UNLOCK(bin) \
+ GST_RTP_BIN_DYN_UNLOCK (bin); \
+
+ /* Minimum time offset to apply. This compensates for rounding errors in NTP to
+ * RTP timestamp conversions */
+ #define MIN_TS_OFFSET (4 * GST_MSECOND)
+
+ struct _GstRtpBinPrivate
+ {
+ GMutex bin_lock;
+
+ /* lock protecting dynamic adding/removing */
+ GMutex dyn_lock;
+
+ /* if we are shutting down or not */
+ gint shutdown;
+
+ gboolean autoremove;
+
+ /* NTP time in ns of last SR sync used */
+ guint64 last_ntpnstime;
+
+ /* list of extra elements */
+ GList *elements;
+ };
+
+ /* signals and args */
+ enum
+ {
+ SIGNAL_REQUEST_PT_MAP,
+ SIGNAL_PAYLOAD_TYPE_CHANGE,
+ SIGNAL_CLEAR_PT_MAP,
+ SIGNAL_RESET_SYNC,
+ SIGNAL_GET_SESSION,
+ SIGNAL_GET_INTERNAL_SESSION,
+ SIGNAL_GET_STORAGE,
+ SIGNAL_GET_INTERNAL_STORAGE,
+ SIGNAL_CLEAR_SSRC,
+
+ SIGNAL_ON_NEW_SSRC,
+ SIGNAL_ON_SSRC_COLLISION,
+ SIGNAL_ON_SSRC_VALIDATED,
+ SIGNAL_ON_SSRC_ACTIVE,
+ SIGNAL_ON_SSRC_SDES,
+ SIGNAL_ON_BYE_SSRC,
+ SIGNAL_ON_BYE_TIMEOUT,
+ SIGNAL_ON_TIMEOUT,
+ SIGNAL_ON_SENDER_TIMEOUT,
+ SIGNAL_ON_NPT_STOP,
+
+ SIGNAL_REQUEST_RTP_ENCODER,
+ SIGNAL_REQUEST_RTP_DECODER,
+ SIGNAL_REQUEST_RTCP_ENCODER,
+ SIGNAL_REQUEST_RTCP_DECODER,
+
+ SIGNAL_REQUEST_FEC_DECODER,
+ SIGNAL_REQUEST_FEC_ENCODER,
+
+ SIGNAL_REQUEST_JITTERBUFFER,
+
+ SIGNAL_NEW_JITTERBUFFER,
+ SIGNAL_NEW_STORAGE,
+
+ SIGNAL_REQUEST_AUX_SENDER,
+ SIGNAL_REQUEST_AUX_RECEIVER,
+
+ SIGNAL_ON_NEW_SENDER_SSRC,
+ SIGNAL_ON_SENDER_SSRC_ACTIVE,
+
+ SIGNAL_ON_BUNDLED_SSRC,
+
+ LAST_SIGNAL
+ };
+
+ #define DEFAULT_LATENCY_MS 200
+ #define DEFAULT_DROP_ON_LATENCY FALSE
+ #define DEFAULT_SDES NULL
+ #define DEFAULT_DO_LOST FALSE
+ #define DEFAULT_IGNORE_PT FALSE
+ #define DEFAULT_NTP_SYNC FALSE
+ #define DEFAULT_AUTOREMOVE FALSE
+ #define DEFAULT_BUFFER_MODE RTP_JITTER_BUFFER_MODE_SLAVE
+ #define DEFAULT_USE_PIPELINE_CLOCK FALSE
+ #define DEFAULT_RTCP_SYNC GST_RTP_BIN_RTCP_SYNC_ALWAYS
+ #define DEFAULT_RTCP_SYNC_INTERVAL 0
+ #define DEFAULT_DO_SYNC_EVENT FALSE
+ #define DEFAULT_DO_RETRANSMISSION FALSE
+ #define DEFAULT_RTP_PROFILE GST_RTP_PROFILE_AVP
+ #define DEFAULT_NTP_TIME_SOURCE GST_RTP_NTP_TIME_SOURCE_NTP
+ #define DEFAULT_RTCP_SYNC_SEND_TIME TRUE
+ #define DEFAULT_MAX_RTCP_RTP_TIME_DIFF 1000
+ #define DEFAULT_MAX_DROPOUT_TIME 60000
+ #define DEFAULT_MAX_MISORDER_TIME 2000
+ #define DEFAULT_RFC7273_SYNC FALSE
+ #define DEFAULT_MAX_STREAMS G_MAXUINT
+ #define DEFAULT_MAX_TS_OFFSET_ADJUSTMENT G_GUINT64_CONSTANT(0)
+ #define DEFAULT_MAX_TS_OFFSET G_GINT64_CONSTANT(3000000000)
++#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
++#define DEFAULT_RTSP_USE_BUFFERING FALSE
++#endif
+
+ enum
+ {
+ PROP_0,
+ PROP_LATENCY,
+ PROP_DROP_ON_LATENCY,
+ PROP_SDES,
+ PROP_DO_LOST,
+ PROP_IGNORE_PT,
+ PROP_NTP_SYNC,
+ PROP_RTCP_SYNC,
+ PROP_RTCP_SYNC_INTERVAL,
+ PROP_AUTOREMOVE,
+ PROP_BUFFER_MODE,
+ PROP_USE_PIPELINE_CLOCK,
+ PROP_DO_SYNC_EVENT,
+ PROP_DO_RETRANSMISSION,
+ PROP_RTP_PROFILE,
+ PROP_NTP_TIME_SOURCE,
+ PROP_RTCP_SYNC_SEND_TIME,
+ PROP_MAX_RTCP_RTP_TIME_DIFF,
+ PROP_MAX_DROPOUT_TIME,
+ PROP_MAX_MISORDER_TIME,
+ PROP_RFC7273_SYNC,
+ PROP_MAX_STREAMS,
+ PROP_MAX_TS_OFFSET_ADJUSTMENT,
+ PROP_MAX_TS_OFFSET,
+ PROP_FEC_DECODERS,
+ PROP_FEC_ENCODERS,
++#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
++ PROP_USE_RTSP_BUFFERING /* use for player RTSP buffering */
++#endif
+ };
+
+ #define GST_RTP_BIN_RTCP_SYNC_TYPE (gst_rtp_bin_rtcp_sync_get_type())
+ static GType
+ gst_rtp_bin_rtcp_sync_get_type (void)
+ {
+ static GType rtcp_sync_type = 0;
+ static const GEnumValue rtcp_sync_types[] = {
+ {GST_RTP_BIN_RTCP_SYNC_ALWAYS, "always", "always"},
+ {GST_RTP_BIN_RTCP_SYNC_INITIAL, "initial", "initial"},
+ {GST_RTP_BIN_RTCP_SYNC_RTP, "rtp-info", "rtp-info"},
+ {0, NULL, NULL},
+ };
+
+ if (!rtcp_sync_type) {
+ rtcp_sync_type = g_enum_register_static ("GstRTCPSync", rtcp_sync_types);
+ }
+ return rtcp_sync_type;
+ }
+
+ /* helper objects */
+ typedef struct _GstRtpBinSession GstRtpBinSession;
+ typedef struct _GstRtpBinStream GstRtpBinStream;
+ typedef struct _GstRtpBinClient GstRtpBinClient;
+
+ static guint gst_rtp_bin_signals[LAST_SIGNAL] = { 0 };
+
+ static GstCaps *pt_map_requested (GstElement * element, guint pt,
+ GstRtpBinSession * session);
+ static void payload_type_change (GstElement * element, guint pt,
+ GstRtpBinSession * session);
+ static void remove_recv_rtp (GstRtpBin * rtpbin, GstRtpBinSession * session);
+ static void remove_recv_rtcp (GstRtpBin * rtpbin, GstRtpBinSession * session);
+ static void remove_recv_fec (GstRtpBin * rtpbin, GstRtpBinSession * session);
+ static void remove_send_rtp (GstRtpBin * rtpbin, GstRtpBinSession * session);
+ static void remove_send_fec (GstRtpBin * rtpbin, GstRtpBinSession * session);
+ static void remove_rtcp (GstRtpBin * rtpbin, GstRtpBinSession * session);
+ static void free_client (GstRtpBinClient * client, GstRtpBin * bin);
+ static void free_stream (GstRtpBinStream * stream, GstRtpBin * bin);
+ static GstRtpBinSession *create_session (GstRtpBin * rtpbin, gint id);
+ static GstPad *complete_session_sink (GstRtpBin * rtpbin,
+ GstRtpBinSession * session);
+ static void
+ complete_session_receiver (GstRtpBin * rtpbin, GstRtpBinSession * session,
+ guint sessid);
+ static GstPad *complete_session_rtcp (GstRtpBin * rtpbin,
+ GstRtpBinSession * session, guint sessid);
+ static GstElement *session_request_element (GstRtpBinSession * session,
+ guint signal);
+
+ /* Manages the RTP stream for one SSRC.
+ *
+ * We pipe the stream (coming from the SSRC demuxer) into a jitterbuffer.
+ * If we see an SDES RTCP packet that links multiple SSRCs together based on a
+ * common CNAME, we create a GstRtpBinClient structure to group the SSRCs
+ * together (see below).
+ */
+ struct _GstRtpBinStream
+ {
+ /* the SSRC of this stream */
+ guint32 ssrc;
+
+ /* parent bin */
+ GstRtpBin *bin;
+
+ /* the session this SSRC belongs to */
+ GstRtpBinSession *session;
+
+ /* the jitterbuffer of the SSRC */
+ GstElement *buffer;
+ gulong buffer_handlesync_sig;
+ gulong buffer_ptreq_sig;
+ gulong buffer_ntpstop_sig;
+ gint percent;
-
++#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
++ gint prev_percent;
++#endif
+ /* the PT demuxer of the SSRC */
+ GstElement *demux;
+ gulong demux_newpad_sig;
+ gulong demux_padremoved_sig;
+ gulong demux_ptreq_sig;
+ gulong demux_ptchange_sig;
+
+ /* if we have calculated a valid rt_delta for this stream */
+ gboolean have_sync;
+ /* mapping to local RTP and NTP time */
+ gint64 rt_delta;
+ gint64 rtp_delta;
+ /* base rtptime in gst time */
+ gint64 clock_base;
+ };
+
+ #define GST_RTP_SESSION_LOCK(sess) g_mutex_lock (&(sess)->lock)
+ #define GST_RTP_SESSION_UNLOCK(sess) g_mutex_unlock (&(sess)->lock)
+
+ /* Manages the receiving end of the packets.
+ *
+ * There is one such structure for each RTP session (audio/video/...).
+ * We get the RTP/RTCP packets and stuff them into the session manager. From
+ * there they are pushed into an SSRC demuxer that splits the stream based on
+ * SSRC. Each of the SSRC streams go into their own jitterbuffer (managed with
+ * the GstRtpBinStream above).
+ *
+ * Before the SSRC demuxer, a storage element may be inserted for the purpose
+ * of Forward Error Correction.
+ */
+ struct _GstRtpBinSession
+ {
+ /* session id */
+ gint id;
+ /* the parent bin */
+ GstRtpBin *bin;
+ /* the session element */
+ GstElement *session;
+ /* the SSRC demuxer */
+ GstElement *demux;
+ gulong demux_newpad_sig;
+ gulong demux_padremoved_sig;
+
+ /* Fec support */
+ GstElement *storage;
+
+ GMutex lock;
+
+ /* list of GstRtpBinStream */
+ GSList *streams;
+
+ /* list of elements */
+ GSList *elements;
+
+ /* mapping of payload type to caps */
+ GHashTable *ptmap;
+
+ /* the pads of the session */
+ GstPad *recv_rtp_sink;
+ GstPad *recv_rtp_sink_ghost;
+ GstPad *recv_rtp_src;
+ GstPad *recv_rtcp_sink;
+ GstPad *recv_rtcp_sink_ghost;
+ GstPad *sync_src;
+ GstPad *send_rtp_sink;
+ GstPad *send_rtp_sink_ghost;
+ GstPad *send_rtp_src_ghost;
+ GstPad *send_rtcp_src;
+ GstPad *send_rtcp_src_ghost;
+
+ GSList *recv_fec_sinks;
+ GSList *recv_fec_sink_ghosts;
+ GstElement *fec_decoder;
+
+ GSList *send_fec_src_ghosts;
+ };
+
+ /* Manages the RTP streams that come from one client and should therefore be
+ * synchronized.
+ */
+ struct _GstRtpBinClient
+ {
+ /* the common CNAME for the streams */
+ gchar *cname;
+ guint cname_len;
+
+ /* the streams */
+ guint nstreams;
+ GSList *streams;
+ };
+
+ /* find a session with the given id. Must be called with RTP_BIN_LOCK */
+ static GstRtpBinSession *
+ find_session_by_id (GstRtpBin * rtpbin, gint id)
+ {
+ GSList *walk;
+
+ for (walk = rtpbin->sessions; walk; walk = g_slist_next (walk)) {
+ GstRtpBinSession *sess = (GstRtpBinSession *) walk->data;
+
+ if (sess->id == id)
+ return sess;
+ }
+ return NULL;
+ }
+
+ static gboolean
+ pad_is_recv_fec (GstRtpBinSession * session, GstPad * pad)
+ {
+ return g_slist_find (session->recv_fec_sink_ghosts, pad) != NULL;
+ }
+
+ /* find a session with the given request pad. Must be called with RTP_BIN_LOCK */
+ static GstRtpBinSession *
+ find_session_by_pad (GstRtpBin * rtpbin, GstPad * pad)
+ {
+ GSList *walk;
+
+ for (walk = rtpbin->sessions; walk; walk = g_slist_next (walk)) {
+ GstRtpBinSession *sess = (GstRtpBinSession *) walk->data;
+
+ if ((sess->recv_rtp_sink_ghost == pad) ||
+ (sess->recv_rtcp_sink_ghost == pad) ||
+ (sess->send_rtp_sink_ghost == pad) ||
+ (sess->send_rtcp_src_ghost == pad) || pad_is_recv_fec (sess, pad))
+ return sess;
+ }
+ return NULL;
+ }
+
+ static void
+ on_new_ssrc (GstElement * session, guint32 ssrc, GstRtpBinSession * sess)
+ {
+ g_signal_emit (sess->bin, gst_rtp_bin_signals[SIGNAL_ON_NEW_SSRC], 0,
+ sess->id, ssrc);
+ }
+
+ static void
+ on_ssrc_collision (GstElement * session, guint32 ssrc, GstRtpBinSession * sess)
+ {
+ g_signal_emit (sess->bin, gst_rtp_bin_signals[SIGNAL_ON_SSRC_COLLISION], 0,
+ sess->id, ssrc);
+ }
+
+ static void
+ on_ssrc_validated (GstElement * session, guint32 ssrc, GstRtpBinSession * sess)
+ {
+ g_signal_emit (sess->bin, gst_rtp_bin_signals[SIGNAL_ON_SSRC_VALIDATED], 0,
+ sess->id, ssrc);
+ }
+
+ static void
+ on_ssrc_active (GstElement * session, guint32 ssrc, GstRtpBinSession * sess)
+ {
+ g_signal_emit (sess->bin, gst_rtp_bin_signals[SIGNAL_ON_SSRC_ACTIVE], 0,
+ sess->id, ssrc);
+ }
+
+ static void
+ on_ssrc_sdes (GstElement * session, guint32 ssrc, GstRtpBinSession * sess)
+ {
+ g_signal_emit (sess->bin, gst_rtp_bin_signals[SIGNAL_ON_SSRC_SDES], 0,
+ sess->id, ssrc);
+ }
+
+ static void
+ on_bye_ssrc (GstElement * session, guint32 ssrc, GstRtpBinSession * sess)
+ {
+ g_signal_emit (sess->bin, gst_rtp_bin_signals[SIGNAL_ON_BYE_SSRC], 0,
+ sess->id, ssrc);
+ }
+
+ static void
+ on_bye_timeout (GstElement * session, guint32 ssrc, GstRtpBinSession * sess)
+ {
+ g_signal_emit (sess->bin, gst_rtp_bin_signals[SIGNAL_ON_BYE_TIMEOUT], 0,
+ sess->id, ssrc);
+
+ if (sess->bin->priv->autoremove)
+ g_signal_emit_by_name (sess->demux, "clear-ssrc", ssrc, NULL);
+ }
+
+ static void
+ on_timeout (GstElement * session, guint32 ssrc, GstRtpBinSession * sess)
+ {
+ g_signal_emit (sess->bin, gst_rtp_bin_signals[SIGNAL_ON_TIMEOUT], 0,
+ sess->id, ssrc);
+
+ if (sess->bin->priv->autoremove)
+ g_signal_emit_by_name (sess->demux, "clear-ssrc", ssrc, NULL);
+ }
+
+ static void
+ on_sender_timeout (GstElement * session, guint32 ssrc, GstRtpBinSession * sess)
+ {
+ g_signal_emit (sess->bin, gst_rtp_bin_signals[SIGNAL_ON_SENDER_TIMEOUT], 0,
+ sess->id, ssrc);
+ }
+
+ static void
+ on_npt_stop (GstElement * jbuf, GstRtpBinStream * stream)
+ {
+ g_signal_emit (stream->bin, gst_rtp_bin_signals[SIGNAL_ON_NPT_STOP], 0,
+ stream->session->id, stream->ssrc);
+ }
+
+ static void
+ on_new_sender_ssrc (GstElement * session, guint32 ssrc, GstRtpBinSession * sess)
+ {
+ g_signal_emit (sess->bin, gst_rtp_bin_signals[SIGNAL_ON_NEW_SENDER_SSRC], 0,
+ sess->id, ssrc);
+ }
+
+ static void
+ on_sender_ssrc_active (GstElement * session, guint32 ssrc,
+ GstRtpBinSession * sess)
+ {
+ g_signal_emit (sess->bin, gst_rtp_bin_signals[SIGNAL_ON_SENDER_SSRC_ACTIVE],
+ 0, sess->id, ssrc);
+ }
+
+ /* must be called with the SESSION lock */
+ static GstRtpBinStream *
+ find_stream_by_ssrc (GstRtpBinSession * session, guint32 ssrc)
+ {
+ GSList *walk;
+
+ for (walk = session->streams; walk; walk = g_slist_next (walk)) {
+ GstRtpBinStream *stream = (GstRtpBinStream *) walk->data;
+
+ if (stream->ssrc == ssrc)
+ return stream;
+ }
+ return NULL;
+ }
+
+ static void
+ ssrc_demux_pad_removed (GstElement * element, guint ssrc, GstPad * pad,
+ GstRtpBinSession * session)
+ {
+ GstRtpBinStream *stream = NULL;
+ GstRtpBin *rtpbin;
+
+ rtpbin = session->bin;
+
+ GST_RTP_BIN_LOCK (rtpbin);
+
+ GST_RTP_SESSION_LOCK (session);
+ if ((stream = find_stream_by_ssrc (session, ssrc)))
+ session->streams = g_slist_remove (session->streams, stream);
+ GST_RTP_SESSION_UNLOCK (session);
+
+ if (stream)
+ free_stream (stream, rtpbin);
+
+ GST_RTP_BIN_UNLOCK (rtpbin);
+ }
+
+ /* create a session with the given id. Must be called with RTP_BIN_LOCK */
+ static GstRtpBinSession *
+ create_session (GstRtpBin * rtpbin, gint id)
+ {
+ GstRtpBinSession *sess;
+ GstElement *session, *demux;
+ GstElement *storage = NULL;
+ GstState target;
+
+ if (!(session = gst_element_factory_make ("rtpsession", NULL)))
+ goto no_session;
+
+ if (!(demux = gst_element_factory_make ("rtpssrcdemux", NULL)))
+ goto no_demux;
+
+ if (!(storage = gst_element_factory_make ("rtpstorage", NULL)))
+ goto no_storage;
+
+ /* need to sink the storage or otherwise signal handlers from bindings will
+ * take ownership of it and we don't own it anymore */
+ gst_object_ref_sink (storage);
+ g_signal_emit (rtpbin, gst_rtp_bin_signals[SIGNAL_NEW_STORAGE], 0, storage,
+ id);
+
+ sess = g_new0 (GstRtpBinSession, 1);
+ g_mutex_init (&sess->lock);
+ sess->id = id;
+ sess->bin = rtpbin;
+ sess->session = session;
+ sess->demux = demux;
+ sess->storage = storage;
+
+ sess->ptmap = g_hash_table_new_full (NULL, NULL, NULL,
+ (GDestroyNotify) gst_caps_unref);
+ rtpbin->sessions = g_slist_prepend (rtpbin->sessions, sess);
+
+ /* configure SDES items */
+ GST_OBJECT_LOCK (rtpbin);
+ g_object_set (demux, "max-streams", rtpbin->max_streams, NULL);
+ g_object_set (session, "sdes", rtpbin->sdes, "rtp-profile",
+ rtpbin->rtp_profile, "rtcp-sync-send-time", rtpbin->rtcp_sync_send_time,
+ NULL);
+ if (rtpbin->use_pipeline_clock)
+ g_object_set (session, "use-pipeline-clock", rtpbin->use_pipeline_clock,
+ NULL);
+ else
+ g_object_set (session, "ntp-time-source", rtpbin->ntp_time_source, NULL);
+
+ g_object_set (session, "max-dropout-time", rtpbin->max_dropout_time,
+ "max-misorder-time", rtpbin->max_misorder_time, NULL);
+ GST_OBJECT_UNLOCK (rtpbin);
+
+ /* provide clock_rate to the session manager when needed */
+ g_signal_connect (session, "request-pt-map",
+ (GCallback) pt_map_requested, sess);
+
+ g_signal_connect (sess->session, "on-new-ssrc",
+ (GCallback) on_new_ssrc, sess);
+ g_signal_connect (sess->session, "on-ssrc-collision",
+ (GCallback) on_ssrc_collision, sess);
+ g_signal_connect (sess->session, "on-ssrc-validated",
+ (GCallback) on_ssrc_validated, sess);
+ g_signal_connect (sess->session, "on-ssrc-active",
+ (GCallback) on_ssrc_active, sess);
+ g_signal_connect (sess->session, "on-ssrc-sdes",
+ (GCallback) on_ssrc_sdes, sess);
+ g_signal_connect (sess->session, "on-bye-ssrc",
+ (GCallback) on_bye_ssrc, sess);
+ g_signal_connect (sess->session, "on-bye-timeout",
+ (GCallback) on_bye_timeout, sess);
+ g_signal_connect (sess->session, "on-timeout", (GCallback) on_timeout, sess);
+ g_signal_connect (sess->session, "on-sender-timeout",
+ (GCallback) on_sender_timeout, sess);
+ g_signal_connect (sess->session, "on-new-sender-ssrc",
+ (GCallback) on_new_sender_ssrc, sess);
+ g_signal_connect (sess->session, "on-sender-ssrc-active",
+ (GCallback) on_sender_ssrc_active, sess);
+
+ gst_bin_add (GST_BIN_CAST (rtpbin), session);
+ gst_bin_add (GST_BIN_CAST (rtpbin), demux);
+ gst_bin_add (GST_BIN_CAST (rtpbin), storage);
+
+ /* unref the storage again, the bin has a reference now and
+ * we don't need it anymore */
+ gst_object_unref (storage);
+
+ GST_OBJECT_LOCK (rtpbin);
+ target = GST_STATE_TARGET (rtpbin);
+ GST_OBJECT_UNLOCK (rtpbin);
+
+ /* change state only to what's needed */
+ gst_element_set_state (demux, target);
+ gst_element_set_state (session, target);
+ gst_element_set_state (storage, target);
+
+ return sess;
+
+ /* ERRORS */
+ no_session:
+ {
+ g_warning ("rtpbin: could not create rtpsession element");
+ return NULL;
+ }
+ no_demux:
+ {
+ gst_object_unref (session);
+ g_warning ("rtpbin: could not create rtpssrcdemux element");
+ return NULL;
+ }
+ no_storage:
+ {
+ gst_object_unref (session);
+ gst_object_unref (demux);
+ g_warning ("rtpbin: could not create rtpstorage element");
+ return NULL;
+ }
+ }
+
+ static gboolean
+ bin_manage_element (GstRtpBin * bin, GstElement * element)
+ {
+ GstRtpBinPrivate *priv = bin->priv;
+
+ if (g_list_find (priv->elements, element)) {
+ GST_DEBUG_OBJECT (bin, "requested element %p already in bin", element);
+ } else {
+ GST_DEBUG_OBJECT (bin, "adding requested element %p", element);
+
+ if (g_object_is_floating (element))
+ element = gst_object_ref_sink (element);
+
+ if (!gst_bin_add (GST_BIN_CAST (bin), element))
+ goto add_failed;
+ if (!gst_element_sync_state_with_parent (element))
+ GST_WARNING_OBJECT (bin, "unable to sync element state with rtpbin");
+ }
+ /* we add the element multiple times, each we need an equal number of
+ * removes to really remove the element from the bin */
+ priv->elements = g_list_prepend (priv->elements, element);
+
+ return TRUE;
+
+ /* ERRORS */
+ add_failed:
+ {
+ GST_WARNING_OBJECT (bin, "unable to add element");
+ gst_object_unref (element);
+ return FALSE;
+ }
+ }
+
+ static void
+ remove_bin_element (GstElement * element, GstRtpBin * bin)
+ {
+ GstRtpBinPrivate *priv = bin->priv;
+ GList *find;
+
+ find = g_list_find (priv->elements, element);
+ if (find) {
+ priv->elements = g_list_delete_link (priv->elements, find);
+
+ if (!g_list_find (priv->elements, element)) {
+ gst_element_set_locked_state (element, TRUE);
+ gst_bin_remove (GST_BIN_CAST (bin), element);
+ gst_element_set_state (element, GST_STATE_NULL);
+ }
+
+ gst_object_unref (element);
+ }
+ }
+
+ /* called with RTP_BIN_LOCK */
+ static void
+ free_session (GstRtpBinSession * sess, GstRtpBin * bin)
+ {
+ GST_DEBUG_OBJECT (bin, "freeing session %p", sess);
+
+ gst_element_set_locked_state (sess->demux, TRUE);
+ gst_element_set_locked_state (sess->session, TRUE);
+ gst_element_set_locked_state (sess->storage, TRUE);
+
+ gst_element_set_state (sess->demux, GST_STATE_NULL);
+ gst_element_set_state (sess->session, GST_STATE_NULL);
+ gst_element_set_state (sess->storage, GST_STATE_NULL);
+
+ remove_recv_rtp (bin, sess);
+ remove_recv_rtcp (bin, sess);
+ remove_recv_fec (bin, sess);
+ remove_send_rtp (bin, sess);
+ remove_send_fec (bin, sess);
+ remove_rtcp (bin, sess);
+
+ gst_bin_remove (GST_BIN_CAST (bin), sess->session);
+ gst_bin_remove (GST_BIN_CAST (bin), sess->demux);
+ gst_bin_remove (GST_BIN_CAST (bin), sess->storage);
+
+ g_slist_foreach (sess->elements, (GFunc) remove_bin_element, bin);
+ g_slist_free (sess->elements);
+ sess->elements = NULL;
+
+ g_slist_foreach (sess->streams, (GFunc) free_stream, bin);
+ g_slist_free (sess->streams);
+
+ g_mutex_clear (&sess->lock);
+ g_hash_table_destroy (sess->ptmap);
+
+ g_free (sess);
+ }
+
+ /* get the payload type caps for the specific payload @pt in @session */
+ static GstCaps *
+ get_pt_map (GstRtpBinSession * session, guint pt)
+ {
+ GstCaps *caps = NULL;
+ GstRtpBin *bin;
+ GValue ret = { 0 };
+ GValue args[3] = { {0}, {0}, {0} };
+
+ GST_DEBUG ("searching pt %u in cache", pt);
+
+ GST_RTP_SESSION_LOCK (session);
+
+ /* first look in the cache */
+ caps = g_hash_table_lookup (session->ptmap, GINT_TO_POINTER (pt));
+ if (caps) {
+ gst_caps_ref (caps);
+ goto done;
+ }
+
+ bin = session->bin;
+
+ GST_DEBUG ("emitting signal for pt %u in session %u", pt, session->id);
+
+ /* not in cache, send signal to request caps */
+ g_value_init (&args[0], GST_TYPE_ELEMENT);
+ g_value_set_object (&args[0], bin);
+ g_value_init (&args[1], G_TYPE_UINT);
+ g_value_set_uint (&args[1], session->id);
+ g_value_init (&args[2], G_TYPE_UINT);
+ g_value_set_uint (&args[2], pt);
+
+ g_value_init (&ret, GST_TYPE_CAPS);
+ g_value_set_boxed (&ret, NULL);
+
+ GST_RTP_SESSION_UNLOCK (session);
+
+ g_signal_emitv (args, gst_rtp_bin_signals[SIGNAL_REQUEST_PT_MAP], 0, &ret);
+
+ GST_RTP_SESSION_LOCK (session);
+
+ g_value_unset (&args[0]);
+ g_value_unset (&args[1]);
+ g_value_unset (&args[2]);
+
+ /* look in the cache again because we let the lock go */
+ caps = g_hash_table_lookup (session->ptmap, GINT_TO_POINTER (pt));
+ if (caps) {
+ gst_caps_ref (caps);
+ g_value_unset (&ret);
+ goto done;
+ }
+
+ caps = (GstCaps *) g_value_dup_boxed (&ret);
+ g_value_unset (&ret);
+ if (!caps)
+ goto no_caps;
+
+ GST_DEBUG ("caching pt %u as %" GST_PTR_FORMAT, pt, caps);
+
+ /* store in cache, take additional ref */
+ g_hash_table_insert (session->ptmap, GINT_TO_POINTER (pt),
+ gst_caps_ref (caps));
+
+ done:
+ GST_RTP_SESSION_UNLOCK (session);
+
+ return caps;
+
+ /* ERRORS */
+ no_caps:
+ {
+ GST_RTP_SESSION_UNLOCK (session);
+ GST_DEBUG ("no pt map could be obtained");
+ return NULL;
+ }
+ }
+
+ static gboolean
+ return_true (gpointer key, gpointer value, gpointer user_data)
+ {
+ return TRUE;
+ }
+
+ static void
+ gst_rtp_bin_reset_sync (GstRtpBin * rtpbin)
+ {
+ GSList *clients, *streams;
+
+ GST_DEBUG_OBJECT (rtpbin, "Reset sync on all clients");
+
+ GST_RTP_BIN_LOCK (rtpbin);
+ for (clients = rtpbin->clients; clients; clients = g_slist_next (clients)) {
+ GstRtpBinClient *client = (GstRtpBinClient *) clients->data;
+
+ /* reset sync on all streams for this client */
+ for (streams = client->streams; streams; streams = g_slist_next (streams)) {
+ GstRtpBinStream *stream = (GstRtpBinStream *) streams->data;
+
+ /* make use require a new SR packet for this stream before we attempt new
+ * lip-sync */
+ stream->have_sync = FALSE;
+ stream->rt_delta = 0;
+ stream->rtp_delta = 0;
+ stream->clock_base = -100 * GST_SECOND;
+ }
+ }
+ GST_RTP_BIN_UNLOCK (rtpbin);
+ }
+
+ static void
+ gst_rtp_bin_clear_pt_map (GstRtpBin * bin)
+ {
+ GSList *sessions, *streams;
+
+ GST_RTP_BIN_LOCK (bin);
+ GST_DEBUG_OBJECT (bin, "clearing pt map");
+ for (sessions = bin->sessions; sessions; sessions = g_slist_next (sessions)) {
+ GstRtpBinSession *session = (GstRtpBinSession *) sessions->data;
+
+ GST_DEBUG_OBJECT (bin, "clearing session %p", session);
+ g_signal_emit_by_name (session->session, "clear-pt-map", NULL);
+
+ GST_RTP_SESSION_LOCK (session);
+ g_hash_table_foreach_remove (session->ptmap, return_true, NULL);
+
+ for (streams = session->streams; streams; streams = g_slist_next (streams)) {
+ GstRtpBinStream *stream = (GstRtpBinStream *) streams->data;
+
+ GST_DEBUG_OBJECT (bin, "clearing stream %p", stream);
+ if (g_signal_lookup ("clear-pt-map", G_OBJECT_TYPE (stream->buffer)) != 0)
+ g_signal_emit_by_name (stream->buffer, "clear-pt-map", NULL);
+ if (stream->demux)
+ g_signal_emit_by_name (stream->demux, "clear-pt-map", NULL);
+ }
+ GST_RTP_SESSION_UNLOCK (session);
+ }
+ GST_RTP_BIN_UNLOCK (bin);
+
+ /* reset sync too */
+ gst_rtp_bin_reset_sync (bin);
+ }
+
+ static GstElement *
+ gst_rtp_bin_get_session (GstRtpBin * bin, guint session_id)
+ {
+ GstRtpBinSession *session;
+ GstElement *ret = NULL;
+
+ GST_RTP_BIN_LOCK (bin);
+ GST_DEBUG_OBJECT (bin, "retrieving GstRtpSession, index: %u", session_id);
+ session = find_session_by_id (bin, (gint) session_id);
+ if (session) {
+ ret = gst_object_ref (session->session);
+ }
+ GST_RTP_BIN_UNLOCK (bin);
+
+ return ret;
+ }
+
+ static RTPSession *
+ gst_rtp_bin_get_internal_session (GstRtpBin * bin, guint session_id)
+ {
+ RTPSession *internal_session = NULL;
+ GstRtpBinSession *session;
+
+ GST_RTP_BIN_LOCK (bin);
+ GST_DEBUG_OBJECT (bin, "retrieving internal RTPSession object, index: %u",
+ session_id);
+ session = find_session_by_id (bin, (gint) session_id);
+ if (session) {
+ g_object_get (session->session, "internal-session", &internal_session,
+ NULL);
+ }
+ GST_RTP_BIN_UNLOCK (bin);
+
+ return internal_session;
+ }
+
+ static GstElement *
+ gst_rtp_bin_get_storage (GstRtpBin * bin, guint session_id)
+ {
+ GstRtpBinSession *session;
+ GstElement *res = NULL;
+
+ GST_RTP_BIN_LOCK (bin);
+ GST_DEBUG_OBJECT (bin, "retrieving internal storage object, index: %u",
+ session_id);
+ session = find_session_by_id (bin, (gint) session_id);
+ if (session && session->storage) {
+ res = gst_object_ref (session->storage);
+ }
+ GST_RTP_BIN_UNLOCK (bin);
+
+ return res;
+ }
+
+ static GObject *
+ gst_rtp_bin_get_internal_storage (GstRtpBin * bin, guint session_id)
+ {
+ GObject *internal_storage = NULL;
+ GstRtpBinSession *session;
+
+ GST_RTP_BIN_LOCK (bin);
+ GST_DEBUG_OBJECT (bin, "retrieving internal storage object, index: %u",
+ session_id);
+ session = find_session_by_id (bin, (gint) session_id);
+ if (session && session->storage) {
+ g_object_get (session->storage, "internal-storage", &internal_storage,
+ NULL);
+ }
+ GST_RTP_BIN_UNLOCK (bin);
+
+ return internal_storage;
+ }
+
+ static void
+ gst_rtp_bin_clear_ssrc (GstRtpBin * bin, guint session_id, guint32 ssrc)
+ {
+ GstRtpBinSession *session;
+ GstElement *demux = NULL;
+
+ GST_RTP_BIN_LOCK (bin);
+ GST_DEBUG_OBJECT (bin, "clearing ssrc %u for session %u", ssrc, session_id);
+ session = find_session_by_id (bin, (gint) session_id);
+ if (session)
+ demux = gst_object_ref (session->demux);
+ GST_RTP_BIN_UNLOCK (bin);
+
+ if (demux) {
+ g_signal_emit_by_name (demux, "clear-ssrc", ssrc, NULL);
+ gst_object_unref (demux);
+ }
+ }
+
+ static GstElement *
+ gst_rtp_bin_request_encoder (GstRtpBin * bin, guint session_id)
+ {
+ GST_DEBUG_OBJECT (bin, "return NULL encoder");
+ return NULL;
+ }
+
+ static GstElement *
+ gst_rtp_bin_request_decoder (GstRtpBin * bin, guint session_id)
+ {
+ GST_DEBUG_OBJECT (bin, "return NULL decoder");
+ return NULL;
+ }
+
+ static GstElement *
+ gst_rtp_bin_request_jitterbuffer (GstRtpBin * bin, guint session_id)
+ {
+ return gst_element_factory_make ("rtpjitterbuffer", NULL);
+ }
+
+ static void
+ gst_rtp_bin_propagate_property_to_jitterbuffer (GstRtpBin * bin,
+ const gchar * name, const GValue * value)
+ {
+ GSList *sessions, *streams;
+
+ GST_RTP_BIN_LOCK (bin);
+ for (sessions = bin->sessions; sessions; sessions = g_slist_next (sessions)) {
+ GstRtpBinSession *session = (GstRtpBinSession *) sessions->data;
+
+ GST_RTP_SESSION_LOCK (session);
+ for (streams = session->streams; streams; streams = g_slist_next (streams)) {
+ GstRtpBinStream *stream = (GstRtpBinStream *) streams->data;
+ GObjectClass *jb_class;
+
+ jb_class = G_OBJECT_GET_CLASS (G_OBJECT (stream->buffer));
+ if (g_object_class_find_property (jb_class, name))
+ g_object_set_property (G_OBJECT (stream->buffer), name, value);
+ else
+ GST_WARNING_OBJECT (bin,
+ "Stream jitterbuffer does not expose property %s", name);
+ }
+ GST_RTP_SESSION_UNLOCK (session);
+ }
+ GST_RTP_BIN_UNLOCK (bin);
+ }
+
+ static void
+ gst_rtp_bin_propagate_property_to_session (GstRtpBin * bin,
+ const gchar * name, const GValue * value)
+ {
+ GSList *sessions;
+
+ GST_RTP_BIN_LOCK (bin);
+ for (sessions = bin->sessions; sessions; sessions = g_slist_next (sessions)) {
+ GstRtpBinSession *sess = (GstRtpBinSession *) sessions->data;
+
+ g_object_set_property (G_OBJECT (sess->session), name, value);
+ }
+ GST_RTP_BIN_UNLOCK (bin);
+ }
+
+ /* get a client with the given SDES name. Must be called with RTP_BIN_LOCK */
+ static GstRtpBinClient *
+ get_client (GstRtpBin * bin, guint8 len, guint8 * data, gboolean * created)
+ {
+ GstRtpBinClient *result = NULL;
+ GSList *walk;
+
+ for (walk = bin->clients; walk; walk = g_slist_next (walk)) {
+ GstRtpBinClient *client = (GstRtpBinClient *) walk->data;
+
+ if (len != client->cname_len)
+ continue;
+
+ if (!strncmp ((gchar *) data, client->cname, client->cname_len)) {
+ GST_DEBUG_OBJECT (bin, "found existing client %p with CNAME %s", client,
+ client->cname);
+ result = client;
+ break;
+ }
+ }
+
+ /* nothing found, create one */
+ if (result == NULL) {
+ result = g_new0 (GstRtpBinClient, 1);
+ result->cname = g_strndup ((gchar *) data, len);
+ result->cname_len = len;
+ bin->clients = g_slist_prepend (bin->clients, result);
+ GST_DEBUG_OBJECT (bin, "created new client %p with CNAME %s", result,
+ result->cname);
+ }
+ return result;
+ }
+
+ static void
+ free_client (GstRtpBinClient * client, GstRtpBin * bin)
+ {
+ GST_DEBUG_OBJECT (bin, "freeing client %p", client);
+ g_slist_free (client->streams);
+ g_free (client->cname);
+ g_free (client);
+ }
+
+ static void
+ get_current_times (GstRtpBin * bin, GstClockTime * running_time,
+ guint64 * ntpnstime)
+ {
+ guint64 ntpns = -1;
+ GstClock *clock;
+ GstClockTime base_time, rt, clock_time;
+
+ GST_OBJECT_LOCK (bin);
+ if ((clock = GST_ELEMENT_CLOCK (bin))) {
+ base_time = GST_ELEMENT_CAST (bin)->base_time;
+ gst_object_ref (clock);
+ GST_OBJECT_UNLOCK (bin);
+
+ /* get current clock time and convert to running time */
+ clock_time = gst_clock_get_time (clock);
+ rt = clock_time - base_time;
+
+ if (bin->use_pipeline_clock) {
+ ntpns = rt;
+ /* add constant to convert from 1970 based time to 1900 based time */
+ ntpns += (2208988800LL * GST_SECOND);
+ } else {
+ switch (bin->ntp_time_source) {
+ case GST_RTP_NTP_TIME_SOURCE_NTP:
+ case GST_RTP_NTP_TIME_SOURCE_UNIX:{
+ /* get current NTP time */
+ ntpns = g_get_real_time () * GST_USECOND;
+
+ /* add constant to convert from 1970 based time to 1900 based time */
+ if (bin->ntp_time_source == GST_RTP_NTP_TIME_SOURCE_NTP)
+ ntpns += (2208988800LL * GST_SECOND);
+ break;
+ }
+ case GST_RTP_NTP_TIME_SOURCE_RUNNING_TIME:
+ ntpns = rt;
+ break;
+ case GST_RTP_NTP_TIME_SOURCE_CLOCK_TIME:
+ ntpns = clock_time;
+ break;
+ default:
+ ntpns = -1; /* Fix uninited compiler warning */
+ g_assert_not_reached ();
+ break;
+ }
+ }
+
+ gst_object_unref (clock);
+ } else {
+ GST_OBJECT_UNLOCK (bin);
+ rt = -1;
+ ntpns = -1;
+ }
+ if (running_time)
+ *running_time = rt;
+ if (ntpnstime)
+ *ntpnstime = ntpns;
+ }
+
+ static void
+ stream_set_ts_offset (GstRtpBin * bin, GstRtpBinStream * stream,
+ gint64 ts_offset, gint64 max_ts_offset, gint64 min_ts_offset,
+ gboolean allow_positive_ts_offset)
+ {
+ gint64 prev_ts_offset;
+ GObjectClass *jb_class;
+
+ jb_class = G_OBJECT_GET_CLASS (G_OBJECT (stream->buffer));
+
+ if (!g_object_class_find_property (jb_class, "ts-offset")) {
+ GST_LOG_OBJECT (bin,
+ "stream's jitterbuffer does not expose ts-offset property");
+ return;
+ }
+
+ g_object_get (stream->buffer, "ts-offset", &prev_ts_offset, NULL);
+
+ /* delta changed, see how much */
+ if (prev_ts_offset != ts_offset) {
+ gint64 diff;
+
+ diff = prev_ts_offset - ts_offset;
+
+ GST_DEBUG_OBJECT (bin,
+ "ts-offset %" G_GINT64_FORMAT ", prev %" G_GINT64_FORMAT
+ ", diff: %" G_GINT64_FORMAT, ts_offset, prev_ts_offset, diff);
+
+ /* ignore minor offsets */
+ if (ABS (diff) < min_ts_offset) {
+ GST_DEBUG_OBJECT (bin, "offset too small, ignoring");
+ return;
+ }
+
+ /* sanity check offset */
+ if (max_ts_offset > 0) {
+ if (ts_offset > 0 && !allow_positive_ts_offset) {
+ GST_DEBUG_OBJECT (bin,
+ "offset is positive (clocks are out of sync), ignoring");
+ return;
+ }
+ if (ABS (ts_offset) > max_ts_offset) {
+ GST_DEBUG_OBJECT (bin, "offset too large, ignoring");
+ return;
+ }
+ }
+
+ g_object_set (stream->buffer, "ts-offset", ts_offset, NULL);
+ }
+ GST_DEBUG_OBJECT (bin, "stream SSRC %08x, delta %" G_GINT64_FORMAT,
+ stream->ssrc, ts_offset);
+ }
+
+ static void
+ gst_rtp_bin_send_sync_event (GstRtpBinStream * stream)
+ {
+ if (stream->bin->send_sync_event) {
+ GstEvent *event;
+ GstPad *srcpad;
+
+ GST_DEBUG_OBJECT (stream->bin,
+ "sending GstRTCPSRReceived event downstream");
+
+ event = gst_event_new_custom (GST_EVENT_CUSTOM_DOWNSTREAM,
+ gst_structure_new_empty ("GstRTCPSRReceived"));
+
+ srcpad = gst_element_get_static_pad (stream->buffer, "src");
+ gst_pad_push_event (srcpad, event);
+ gst_object_unref (srcpad);
+ }
+ }
+
+ /* associate a stream to the given CNAME. This will make sure all streams for
+ * that CNAME are synchronized together.
+ * Must be called with GST_RTP_BIN_LOCK */
+ static void
+ gst_rtp_bin_associate (GstRtpBin * bin, GstRtpBinStream * stream, guint8 len,
+ guint8 * data, guint64 ntptime, guint64 last_extrtptime,
+ guint64 base_rtptime, guint64 base_time, guint clock_rate,
+ gint64 rtp_clock_base)
+ {
+ GstRtpBinClient *client;
+ gboolean created;
+ GSList *walk;
+ GstClockTime running_time, running_time_rtp;
+ guint64 ntpnstime;
+
+ /* first find or create the CNAME */
+ client = get_client (bin, len, data, &created);
+
+ /* find stream in the client */
+ for (walk = client->streams; walk; walk = g_slist_next (walk)) {
+ GstRtpBinStream *ostream = (GstRtpBinStream *) walk->data;
+
+ if (ostream == stream)
+ break;
+ }
+ /* not found, add it to the list */
+ if (walk == NULL) {
+ GST_DEBUG_OBJECT (bin,
+ "new association of SSRC %08x with client %p with CNAME %s",
+ stream->ssrc, client, client->cname);
+ client->streams = g_slist_prepend (client->streams, stream);
+ client->nstreams++;
+ } else {
+ GST_DEBUG_OBJECT (bin,
+ "found association of SSRC %08x with client %p with CNAME %s",
+ stream->ssrc, client, client->cname);
+ }
+
+ if (!GST_CLOCK_TIME_IS_VALID (last_extrtptime)) {
+ GST_DEBUG_OBJECT (bin, "invalidated sync data");
+ if (bin->rtcp_sync == GST_RTP_BIN_RTCP_SYNC_RTP) {
+ /* we don't need that data, so carry on,
+ * but make some values look saner */
+ last_extrtptime = base_rtptime;
+ } else {
+ /* nothing we can do with this data in this case */
+ GST_DEBUG_OBJECT (bin, "bailing out");
+ return;
+ }
+ }
+
+ /* Take the extended rtptime we found in the SR packet and map it to the
+ * local rtptime. The local rtp time is used to construct timestamps on the
+ * buffers so we will calculate what running_time corresponds to the RTP
+ * timestamp in the SR packet. */
+ running_time_rtp = last_extrtptime - base_rtptime;
+
+ GST_DEBUG_OBJECT (bin,
+ "base %" G_GUINT64_FORMAT ", extrtptime %" G_GUINT64_FORMAT
+ ", local RTP %" G_GUINT64_FORMAT ", clock-rate %d, "
+ "clock-base %" G_GINT64_FORMAT, base_rtptime,
+ last_extrtptime, running_time_rtp, clock_rate, rtp_clock_base);
+
+ /* calculate local RTP time in gstreamer timestamp, we essentially perform the
+ * same conversion that a jitterbuffer would use to convert an rtp timestamp
+ * into a corresponding gstreamer timestamp. Note that the base_time also
+ * contains the drift between sender and receiver. */
+ running_time =
+ gst_util_uint64_scale_int (running_time_rtp, GST_SECOND, clock_rate);
+ running_time += base_time;
+
+ /* convert ntptime to nanoseconds */
+ ntpnstime = gst_util_uint64_scale (ntptime, GST_SECOND,
+ (G_GINT64_CONSTANT (1) << 32));
+
+ stream->have_sync = TRUE;
+
+ GST_DEBUG_OBJECT (bin,
+ "SR RTP running time %" G_GUINT64_FORMAT ", SR NTP %" G_GUINT64_FORMAT,
+ running_time, ntpnstime);
+
+ /* recalc inter stream playout offset, but only if there is more than one
+ * stream or we're doing NTP sync. */
+ if (bin->ntp_sync) {
+ gint64 ntpdiff, rtdiff;
+ guint64 local_ntpnstime;
+ GstClockTime local_running_time;
+
+ /* For NTP sync we need to first get a snapshot of running_time and NTP
+ * time. We know at what running_time we play a certain RTP time, we also
+ * calculated when we would play the RTP time in the SR packet. Now we need
+ * to know how the running_time and the NTP time relate to each other. */
+ get_current_times (bin, &local_running_time, &local_ntpnstime);
+
+ /* see how far away the NTP time is. This is the difference between the
+ * current NTP time and the NTP time in the last SR packet. */
+ ntpdiff = local_ntpnstime - ntpnstime;
+ /* see how far away the running_time is. This is the difference between the
+ * current running_time and the running_time of the RTP timestamp in the
+ * last SR packet. */
+ rtdiff = local_running_time - running_time;
+
+ GST_DEBUG_OBJECT (bin,
+ "local NTP time %" G_GUINT64_FORMAT ", SR NTP time %" G_GUINT64_FORMAT,
+ local_ntpnstime, ntpnstime);
+ GST_DEBUG_OBJECT (bin,
+ "local running time %" G_GUINT64_FORMAT ", SR RTP running time %"
+ G_GUINT64_FORMAT, local_running_time, running_time);
+ GST_DEBUG_OBJECT (bin,
+ "NTP diff %" G_GINT64_FORMAT ", RT diff %" G_GINT64_FORMAT, ntpdiff,
+ rtdiff);
+
+ /* combine to get the final diff to apply to the running_time */
+ stream->rt_delta = rtdiff - ntpdiff;
+
+ stream_set_ts_offset (bin, stream, stream->rt_delta, bin->max_ts_offset,
+ 0, FALSE);
+ } else {
+ gint64 min, rtp_min, clock_base = stream->clock_base;
+ gboolean all_sync, use_rtp;
+ gboolean rtcp_sync = g_atomic_int_get (&bin->rtcp_sync);
+
+ /* calculate delta between server and receiver. ntpnstime is created by
+ * converting the ntptime in the last SR packet to a gstreamer timestamp. This
+ * delta expresses the difference to our timeline and the server timeline. The
+ * difference in itself doesn't mean much but we can combine the delta of
+ * multiple streams to create a stream specific offset. */
+ stream->rt_delta = ntpnstime - running_time;
+
+ /* calculate the min of all deltas, ignoring streams that did not yet have a
+ * valid rt_delta because we did not yet receive an SR packet for those
+ * streams.
+ * We calculate the minimum because we would like to only apply positive
+ * offsets to streams, delaying their playback instead of trying to speed up
+ * other streams (which might be impossible when we have to create negative
+ * latencies).
+ * The stream that has the smallest diff is selected as the reference stream,
+ * all other streams will have a positive offset to this difference. */
+
+ /* some alternative setting allow ignoring RTCP as much as possible,
+ * for servers generating bogus ntp timeline */
+ min = rtp_min = G_MAXINT64;
+ use_rtp = FALSE;
+ if (rtcp_sync == GST_RTP_BIN_RTCP_SYNC_RTP) {
+ guint64 ext_base;
+
+ use_rtp = TRUE;
+ /* signed version for convenience */
+ clock_base = base_rtptime;
+ /* deal with possible wrap-around */
+ ext_base = base_rtptime;
+ rtp_clock_base = gst_rtp_buffer_ext_timestamp (&ext_base, rtp_clock_base);
+ /* sanity check; base rtp and provided clock_base should be close */
+ if (rtp_clock_base >= clock_base) {
+ if (rtp_clock_base - clock_base < 10 * clock_rate) {
+ rtp_clock_base = base_time +
+ gst_util_uint64_scale_int (rtp_clock_base - clock_base,
+ GST_SECOND, clock_rate);
+ } else {
+ use_rtp = FALSE;
+ }
+ } else {
+ if (clock_base - rtp_clock_base < 10 * clock_rate) {
+ rtp_clock_base = base_time -
+ gst_util_uint64_scale_int (clock_base - rtp_clock_base,
+ GST_SECOND, clock_rate);
+ } else {
+ use_rtp = FALSE;
+ }
+ }
+ /* warn and bail for clarity out if no sane values */
+ if (!use_rtp) {
+ GST_WARNING_OBJECT (bin, "unable to sync to provided rtptime");
+ return;
+ }
+ /* store to track changes */
+ clock_base = rtp_clock_base;
+ /* generate a fake as before,
+ * now equating rtptime obtained from RTP-Info,
+ * where the large time represent the otherwise irrelevant npt/ntp time */
+ stream->rtp_delta = (GST_SECOND << 28) - rtp_clock_base;
+ } else {
+ clock_base = rtp_clock_base;
+ }
+
+ all_sync = TRUE;
+ for (walk = client->streams; walk; walk = g_slist_next (walk)) {
+ GstRtpBinStream *ostream = (GstRtpBinStream *) walk->data;
+
+ if (!ostream->have_sync) {
+ all_sync = FALSE;
+ continue;
+ }
+
+ /* change in current stream's base from previously init'ed value
+ * leads to reset of all stream's base */
+ if (stream != ostream && stream->clock_base >= 0 &&
+ (stream->clock_base != clock_base)) {
+ GST_DEBUG_OBJECT (bin, "reset upon clock base change");
+ ostream->clock_base = -100 * GST_SECOND;
+ ostream->rtp_delta = 0;
+ }
+
+ if (ostream->rt_delta < min)
+ min = ostream->rt_delta;
+ if (ostream->rtp_delta < rtp_min)
+ rtp_min = ostream->rtp_delta;
+ }
+
+ /* arrange to re-sync for each stream upon significant change,
+ * e.g. post-seek */
+ all_sync = all_sync && (stream->clock_base == clock_base);
+ stream->clock_base = clock_base;
+
+ /* may need init performed above later on, but nothing more to do now */
+ if (client->nstreams <= 1)
+ return;
+
+ GST_DEBUG_OBJECT (bin, "client %p min delta %" G_GINT64_FORMAT
+ " all sync %d", client, min, all_sync);
+ GST_DEBUG_OBJECT (bin, "rtcp sync mode %d, use_rtp %d", rtcp_sync, use_rtp);
+
+ switch (rtcp_sync) {
+ case GST_RTP_BIN_RTCP_SYNC_RTP:
+ if (!use_rtp)
+ break;
+ GST_DEBUG_OBJECT (bin, "using rtp generated reports; "
+ "client %p min rtp delta %" G_GINT64_FORMAT, client, rtp_min);
+ /* fall-through */
+ case GST_RTP_BIN_RTCP_SYNC_INITIAL:
+ /* if all have been synced already, do not bother further */
+ if (all_sync) {
+ GST_DEBUG_OBJECT (bin, "all streams already synced; done");
+ return;
+ }
+ break;
+ default:
+ break;
+ }
+
+ /* bail out if we adjusted recently enough */
+ if (all_sync && (ntpnstime - bin->priv->last_ntpnstime) <
+ bin->rtcp_sync_interval * GST_MSECOND) {
+ GST_DEBUG_OBJECT (bin, "discarding RTCP sender packet for sync; "
+ "previous sender info too recent "
+ "(previous NTP %" G_GUINT64_FORMAT ")", bin->priv->last_ntpnstime);
+ return;
+ }
+ bin->priv->last_ntpnstime = ntpnstime;
+
+ /* calculate offsets for each stream */
+ for (walk = client->streams; walk; walk = g_slist_next (walk)) {
+ GstRtpBinStream *ostream = (GstRtpBinStream *) walk->data;
+ gint64 ts_offset;
+
+ /* ignore streams for which we didn't receive an SR packet yet, we
+ * can't synchronize them yet. We can however sync other streams just
+ * fine. */
+ if (!ostream->have_sync)
+ continue;
+
+ /* calculate offset to our reference stream, this should always give a
+ * positive number. */
+ if (use_rtp)
+ ts_offset = ostream->rtp_delta - rtp_min;
+ else
+ ts_offset = ostream->rt_delta - min;
+
+ stream_set_ts_offset (bin, ostream, ts_offset, bin->max_ts_offset,
+ MIN_TS_OFFSET, TRUE);
+ }
+ }
+ gst_rtp_bin_send_sync_event (stream);
+
+ return;
+ }
+
+ #define GST_RTCP_BUFFER_FOR_PACKETS(b,buffer,packet) \
+ for ((b) = gst_rtcp_buffer_get_first_packet ((buffer), (packet)); (b); \
+ (b) = gst_rtcp_packet_move_to_next ((packet)))
+
+ #define GST_RTCP_SDES_FOR_ITEMS(b,packet) \
+ for ((b) = gst_rtcp_packet_sdes_first_item ((packet)); (b); \
+ (b) = gst_rtcp_packet_sdes_next_item ((packet)))
+
+ #define GST_RTCP_SDES_FOR_ENTRIES(b,packet) \
+ for ((b) = gst_rtcp_packet_sdes_first_entry ((packet)); (b); \
+ (b) = gst_rtcp_packet_sdes_next_entry ((packet)))
+
+ static void
+ gst_rtp_bin_handle_sync (GstElement * jitterbuffer, GstStructure * s,
+ GstRtpBinStream * stream)
+ {
+ GstRtpBin *bin;
+ GstRTCPPacket packet;
+ guint32 ssrc;
+ guint64 ntptime;
+ gboolean have_sr, have_sdes;
+ gboolean more;
+ guint64 base_rtptime;
+ guint64 base_time;
+ guint clock_rate;
+ guint64 clock_base;
+ guint64 extrtptime;
+ GstBuffer *buffer;
+ GstRTCPBuffer rtcp = { NULL, };
+
+ bin = stream->bin;
+
+ GST_DEBUG_OBJECT (bin, "sync handler called");
+
+ /* get the last relation between the rtp timestamps and the gstreamer
+ * timestamps. We get this info directly from the jitterbuffer which
+ * constructs gstreamer timestamps from rtp timestamps and so it know exactly
+ * what the current situation is. */
+ base_rtptime =
+ g_value_get_uint64 (gst_structure_get_value (s, "base-rtptime"));
+ base_time = g_value_get_uint64 (gst_structure_get_value (s, "base-time"));
+ clock_rate = g_value_get_uint (gst_structure_get_value (s, "clock-rate"));
+ clock_base = g_value_get_uint64 (gst_structure_get_value (s, "clock-base"));
+ extrtptime =
+ g_value_get_uint64 (gst_structure_get_value (s, "sr-ext-rtptime"));
+ buffer = gst_value_get_buffer (gst_structure_get_value (s, "sr-buffer"));
+
+ have_sr = FALSE;
+ have_sdes = FALSE;
+
+ gst_rtcp_buffer_map (buffer, GST_MAP_READ, &rtcp);
+
+ GST_RTCP_BUFFER_FOR_PACKETS (more, &rtcp, &packet) {
+ /* first packet must be SR or RR or else the validate would have failed */
+ switch (gst_rtcp_packet_get_type (&packet)) {
+ case GST_RTCP_TYPE_SR:
+ /* only parse first. There is only supposed to be one SR in the packet
+ * but we will deal with malformed packets gracefully */
+ if (have_sr)
+ break;
+ /* get NTP and RTP times */
+ gst_rtcp_packet_sr_get_sender_info (&packet, &ssrc, &ntptime, NULL,
+ NULL, NULL);
+
+ GST_DEBUG_OBJECT (bin, "received sync packet from SSRC %08x", ssrc);
+ /* ignore SR that is not ours */
+ if (ssrc != stream->ssrc)
+ continue;
+
+ have_sr = TRUE;
+ break;
+ case GST_RTCP_TYPE_SDES:
+ {
+ gboolean more_items, more_entries;
+
+ /* only deal with first SDES, there is only supposed to be one SDES in
+ * the RTCP packet but we deal with bad packets gracefully. Also bail
+ * out if we have not seen an SR item yet. */
+ if (have_sdes || !have_sr)
+ break;
+
+ GST_RTCP_SDES_FOR_ITEMS (more_items, &packet) {
+ /* skip items that are not about the SSRC of the sender */
+ if (gst_rtcp_packet_sdes_get_ssrc (&packet) != ssrc)
+ continue;
+
+ /* find the CNAME entry */
+ GST_RTCP_SDES_FOR_ENTRIES (more_entries, &packet) {
+ GstRTCPSDESType type;
+ guint8 len;
+ guint8 *data;
+
+ gst_rtcp_packet_sdes_get_entry (&packet, &type, &len, &data);
+
+ if (type == GST_RTCP_SDES_CNAME) {
+ GST_RTP_BIN_LOCK (bin);
+ /* associate the stream to CNAME */
+ gst_rtp_bin_associate (bin, stream, len, data,
+ ntptime, extrtptime, base_rtptime, base_time, clock_rate,
+ clock_base);
+ GST_RTP_BIN_UNLOCK (bin);
+ }
+ }
+ }
+ have_sdes = TRUE;
+ break;
+ }
+ default:
+ /* we can ignore these packets */
+ break;
+ }
+ }
+ gst_rtcp_buffer_unmap (&rtcp);
+ }
+
+ /* create a new stream with @ssrc in @session. Must be called with
+ * RTP_SESSION_LOCK. */
+ static GstRtpBinStream *
+ create_stream (GstRtpBinSession * session, guint32 ssrc)
+ {
+ GstElement *buffer, *demux = NULL;
++#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
++ GstElement *queue2 = NULL;
++#endif
+ GstRtpBinStream *stream;
+ GstRtpBin *rtpbin;
+ GstState target;
+ GObjectClass *jb_class;
+
+ rtpbin = session->bin;
+
+ if (g_slist_length (session->streams) >= rtpbin->max_streams)
+ goto max_streams;
+
+ if (!(buffer =
+ session_request_element (session, SIGNAL_REQUEST_JITTERBUFFER)))
+ goto no_jitterbuffer;
+
+ if (!rtpbin->ignore_pt) {
+ if (!(demux = gst_element_factory_make ("rtpptdemux", NULL)))
+ goto no_demux;
+ }
++#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
++ if (rtpbin->use_rtsp_buffering &&
++ rtpbin->buffer_mode == RTP_JITTER_BUFFER_MODE_SLAVE) {
++ if (!(queue2 = gst_element_factory_make ("queue2", NULL)))
++ goto no_queue2;
++ }
++#endif
+
+ stream = g_new0 (GstRtpBinStream, 1);
+ stream->ssrc = ssrc;
+ stream->bin = rtpbin;
+ stream->session = session;
+ stream->buffer = gst_object_ref (buffer);
+ stream->demux = demux;
+
+ stream->have_sync = FALSE;
+ stream->rt_delta = 0;
+ stream->rtp_delta = 0;
+ stream->percent = 100;
++#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
++ stream->prev_percent = 0;
++#endif
+ stream->clock_base = -100 * GST_SECOND;
+ session->streams = g_slist_prepend (session->streams, stream);
+
+ jb_class = G_OBJECT_GET_CLASS (G_OBJECT (buffer));
+
+ if (g_signal_lookup ("request-pt-map", G_OBJECT_TYPE (buffer)) != 0) {
+ /* provide clock_rate to the jitterbuffer when needed */
+ stream->buffer_ptreq_sig = g_signal_connect (buffer, "request-pt-map",
+ (GCallback) pt_map_requested, session);
+ }
+ if (g_signal_lookup ("on-npt-stop", G_OBJECT_TYPE (buffer)) != 0) {
+ stream->buffer_ntpstop_sig = g_signal_connect (buffer, "on-npt-stop",
+ (GCallback) on_npt_stop, stream);
+ }
+
+ g_object_set_data (G_OBJECT (buffer), "GstRTPBin.session", session);
+ g_object_set_data (G_OBJECT (buffer), "GstRTPBin.stream", stream);
+
+ /* configure latency and packet lost */
+ g_object_set (buffer, "latency", rtpbin->latency_ms, NULL);
+
+ if (g_object_class_find_property (jb_class, "drop-on-latency"))
+ g_object_set (buffer, "drop-on-latency", rtpbin->drop_on_latency, NULL);
+ if (g_object_class_find_property (jb_class, "do-lost"))
+ g_object_set (buffer, "do-lost", rtpbin->do_lost, NULL);
+ if (g_object_class_find_property (jb_class, "mode"))
+ g_object_set (buffer, "mode", rtpbin->buffer_mode, NULL);
+ if (g_object_class_find_property (jb_class, "do-retransmission"))
+ g_object_set (buffer, "do-retransmission", rtpbin->do_retransmission, NULL);
+ if (g_object_class_find_property (jb_class, "max-rtcp-rtp-time-diff"))
+ g_object_set (buffer, "max-rtcp-rtp-time-diff",
+ rtpbin->max_rtcp_rtp_time_diff, NULL);
+ if (g_object_class_find_property (jb_class, "max-dropout-time"))
+ g_object_set (buffer, "max-dropout-time", rtpbin->max_dropout_time, NULL);
+ if (g_object_class_find_property (jb_class, "max-misorder-time"))
+ g_object_set (buffer, "max-misorder-time", rtpbin->max_misorder_time, NULL);
+ if (g_object_class_find_property (jb_class, "rfc7273-sync"))
+ g_object_set (buffer, "rfc7273-sync", rtpbin->rfc7273_sync, NULL);
+ if (g_object_class_find_property (jb_class, "max-ts-offset-adjustment"))
+ g_object_set (buffer, "max-ts-offset-adjustment",
+ rtpbin->max_ts_offset_adjustment, NULL);
+
++#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
++ /* configure queue2 to use live buffering */
++ if (queue2) {
++ g_object_set_data (G_OBJECT (queue2), "GstRTPBin.stream", stream);
++ g_object_set (queue2, "use-buffering", TRUE, NULL);
++ g_object_set (queue2, "buffer-mode", GST_BUFFERING_LIVE, NULL);
++ }
++#endif
+ g_signal_emit (rtpbin, gst_rtp_bin_signals[SIGNAL_NEW_JITTERBUFFER], 0,
+ buffer, session->id, ssrc);
+
+ if (!rtpbin->ignore_pt)
+ gst_bin_add (GST_BIN_CAST (rtpbin), demux);
+
++#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
++ if (queue2)
++ gst_bin_add (GST_BIN_CAST (rtpbin), queue2);
++#endif
++
+ /* link stuff */
++#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
++ if (queue2) {
++ gst_element_link_pads_full (buffer, "src", queue2, "sink",
++ GST_PAD_LINK_CHECK_NOTHING);
++ if (demux) {
++ gst_element_link_pads_full (queue2, "src", demux, "sink",
++ GST_PAD_LINK_CHECK_NOTHING);
++ }
++ } else if (demux) {
++ gst_element_link_pads_full (buffer, "src", demux, "sink",
++ GST_PAD_LINK_CHECK_NOTHING);
++ }
++#else
+ if (demux)
+ gst_element_link_pads_full (buffer, "src", demux, "sink",
+ GST_PAD_LINK_CHECK_NOTHING);
++#endif
+ if (rtpbin->buffering) {
+ guint64 last_out;
+
+ if (g_signal_lookup ("set-active", G_OBJECT_TYPE (buffer)) != 0) {
+ GST_INFO_OBJECT (rtpbin,
+ "bin is buffering, set jitterbuffer as not active");
+ g_signal_emit_by_name (buffer, "set-active", FALSE, (gint64) 0,
+ &last_out);
+ }
+ }
+
+
+ GST_OBJECT_LOCK (rtpbin);
+ target = GST_STATE_TARGET (rtpbin);
+ GST_OBJECT_UNLOCK (rtpbin);
+
+ /* from sink to source */
+ if (demux)
+ gst_element_set_state (demux, target);
+
+ gst_element_set_state (buffer, target);
+
++#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
++ if (queue2)
++ gst_element_set_state (queue2, target);
++#endif
++
+ return stream;
+
+ /* ERRORS */
+ max_streams:
+ {
+ GST_WARNING_OBJECT (rtpbin, "stream exceeds maximum (%d)",
+ rtpbin->max_streams);
+ return NULL;
+ }
+ no_jitterbuffer:
+ {
+ g_warning ("rtpbin: could not create rtpjitterbuffer element");
+ return NULL;
+ }
+ no_demux:
+ {
+ gst_object_unref (buffer);
+ g_warning ("rtpbin: could not create rtpptdemux element");
+ return NULL;
+ }
++#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
++no_queue2:
++ {
++ gst_object_unref (buffer);
++ gst_object_unref (demux);
++ g_warning ("rtpbin: could not create queue2 element");
++ return NULL;
++ }
++#endif
+ }
+
+ /* called with RTP_BIN_LOCK */
+ static void
+ free_stream (GstRtpBinStream * stream, GstRtpBin * bin)
+ {
+ GstRtpBinSession *sess = stream->session;
+ GSList *clients, *next_client;
+
+ GST_DEBUG_OBJECT (bin, "freeing stream %p", stream);
+
+ gst_element_set_locked_state (stream->buffer, TRUE);
+ if (stream->demux)
+ gst_element_set_locked_state (stream->demux, TRUE);
+
+ gst_element_set_state (stream->buffer, GST_STATE_NULL);
+ if (stream->demux)
+ gst_element_set_state (stream->demux, GST_STATE_NULL);
+
+ if (stream->demux) {
+ g_signal_handler_disconnect (stream->demux, stream->demux_newpad_sig);
+ g_signal_handler_disconnect (stream->demux, stream->demux_ptreq_sig);
+ g_signal_handler_disconnect (stream->demux, stream->demux_ptchange_sig);
+ g_signal_handler_disconnect (stream->demux, stream->demux_padremoved_sig);
+ }
+
+ if (stream->buffer_handlesync_sig)
+ g_signal_handler_disconnect (stream->buffer, stream->buffer_handlesync_sig);
+ if (stream->buffer_ptreq_sig)
+ g_signal_handler_disconnect (stream->buffer, stream->buffer_ptreq_sig);
+ if (stream->buffer_ntpstop_sig)
+ g_signal_handler_disconnect (stream->buffer, stream->buffer_ntpstop_sig);
+
+ sess->elements = g_slist_remove (sess->elements, stream->buffer);
+ remove_bin_element (stream->buffer, bin);
+ gst_object_unref (stream->buffer);
+
+ if (stream->demux)
+ gst_bin_remove (GST_BIN_CAST (bin), stream->demux);
+
+ for (clients = bin->clients; clients; clients = next_client) {
+ GstRtpBinClient *client = (GstRtpBinClient *) clients->data;
+ GSList *streams, *next_stream;
+
+ next_client = g_slist_next (clients);
+
+ for (streams = client->streams; streams; streams = next_stream) {
+ GstRtpBinStream *ostream = (GstRtpBinStream *) streams->data;
+
+ next_stream = g_slist_next (streams);
+
+ if (ostream == stream) {
+ client->streams = g_slist_delete_link (client->streams, streams);
+ /* If this was the last stream belonging to this client,
+ * clean up the client. */
+ if (--client->nstreams == 0) {
+ bin->clients = g_slist_delete_link (bin->clients, clients);
+ free_client (client, bin);
+ break;
+ }
+ }
+ }
+ }
+ g_free (stream);
+ }
+
+ /* GObject vmethods */
+ static void gst_rtp_bin_dispose (GObject * object);
+ static void gst_rtp_bin_finalize (GObject * object);
+ static void gst_rtp_bin_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec);
+ static void gst_rtp_bin_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec);
+
+ /* GstElement vmethods */
+ static GstStateChangeReturn gst_rtp_bin_change_state (GstElement * element,
+ GstStateChange transition);
+ static GstPad *gst_rtp_bin_request_new_pad (GstElement * element,
+ GstPadTemplate * templ, const gchar * name, const GstCaps * caps);
+ static void gst_rtp_bin_release_pad (GstElement * element, GstPad * pad);
+ static void gst_rtp_bin_handle_message (GstBin * bin, GstMessage * message);
+
+ #define gst_rtp_bin_parent_class parent_class
+ G_DEFINE_TYPE_WITH_PRIVATE (GstRtpBin, gst_rtp_bin, GST_TYPE_BIN);
+ GST_ELEMENT_REGISTER_DEFINE (rtpbin, "rtpbin", GST_RANK_NONE, GST_TYPE_RTP_BIN);
+
+ static gboolean
+ _gst_element_accumulator (GSignalInvocationHint * ihint,
+ GValue * return_accu, const GValue * handler_return, gpointer dummy)
+ {
+ GstElement *element;
+
+ element = g_value_get_object (handler_return);
+ GST_DEBUG ("got element %" GST_PTR_FORMAT, element);
+
+ g_value_set_object (return_accu, element);
+
+ /* stop emission if we have an element */
+ return (element == NULL);
+ }
+
+ static gboolean
+ _gst_caps_accumulator (GSignalInvocationHint * ihint,
+ GValue * return_accu, const GValue * handler_return, gpointer dummy)
+ {
+ GstCaps *caps;
+
+ caps = g_value_get_boxed (handler_return);
+ GST_DEBUG ("got caps %" GST_PTR_FORMAT, caps);
+
+ g_value_set_boxed (return_accu, caps);
+
+ /* stop emission if we have a caps */
+ return (caps == NULL);
+ }
+
+ static void
+ gst_rtp_bin_class_init (GstRtpBinClass * klass)
+ {
+ GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
+ GstBinClass *gstbin_class;
+
+ gobject_class = (GObjectClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
+ gstbin_class = (GstBinClass *) klass;
+
+ gobject_class->dispose = gst_rtp_bin_dispose;
+ gobject_class->finalize = gst_rtp_bin_finalize;
+ gobject_class->set_property = gst_rtp_bin_set_property;
+ gobject_class->get_property = gst_rtp_bin_get_property;
+
+ g_object_class_install_property (gobject_class, PROP_LATENCY,
+ g_param_spec_uint ("latency", "Buffer latency in ms",
+ "Default amount of ms to buffer in the jitterbuffers", 0,
+ G_MAXUINT, DEFAULT_LATENCY_MS,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_DROP_ON_LATENCY,
+ g_param_spec_boolean ("drop-on-latency",
+ "Drop buffers when maximum latency is reached",
+ "Tells the jitterbuffer to never exceed the given latency in size",
+ DEFAULT_DROP_ON_LATENCY, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ /**
+ * GstRtpBin::request-pt-map:
+ * @rtpbin: the object which received the signal
+ * @session: the session
+ * @pt: the pt
+ *
+ * Request the payload type as #GstCaps for @pt in @session.
+ */
+ gst_rtp_bin_signals[SIGNAL_REQUEST_PT_MAP] =
+ g_signal_new ("request-pt-map", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (GstRtpBinClass, request_pt_map),
+ _gst_caps_accumulator, NULL, NULL, GST_TYPE_CAPS, 2, G_TYPE_UINT,
+ G_TYPE_UINT);
+
+ /**
+ * GstRtpBin::payload-type-change:
+ * @rtpbin: the object which received the signal
+ * @session: the session
+ * @pt: the pt
+ *
+ * Signal that the current payload type changed to @pt in @session.
+ */
+ gst_rtp_bin_signals[SIGNAL_PAYLOAD_TYPE_CHANGE] =
+ g_signal_new ("payload-type-change", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (GstRtpBinClass, payload_type_change),
+ NULL, NULL, NULL, G_TYPE_NONE, 2, G_TYPE_UINT, G_TYPE_UINT);
+
+ /**
+ * GstRtpBin::clear-pt-map:
+ * @rtpbin: the object which received the signal
+ *
+ * Clear all previously cached pt-mapping obtained with
+ * #GstRtpBin::request-pt-map.
+ */
+ gst_rtp_bin_signals[SIGNAL_CLEAR_PT_MAP] =
+ g_signal_new ("clear-pt-map", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST | G_SIGNAL_ACTION, G_STRUCT_OFFSET (GstRtpBinClass,
+ clear_pt_map), NULL, NULL, NULL, G_TYPE_NONE, 0, G_TYPE_NONE);
+
+ /**
+ * GstRtpBin::reset-sync:
+ * @rtpbin: the object which received the signal
+ *
+ * Reset all currently configured lip-sync parameters and require new SR
+ * packets for all streams before lip-sync is attempted again.
+ */
+ gst_rtp_bin_signals[SIGNAL_RESET_SYNC] =
+ g_signal_new ("reset-sync", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST | G_SIGNAL_ACTION, G_STRUCT_OFFSET (GstRtpBinClass,
+ reset_sync), NULL, NULL, NULL, G_TYPE_NONE, 0, G_TYPE_NONE);
+
+ /**
+ * GstRtpBin::get-session:
+ * @rtpbin: the object which received the signal
+ * @id: the session id
+ *
+ * Request the related GstRtpSession as #GstElement related with session @id.
+ *
+ * Since: 1.8
+ */
+ gst_rtp_bin_signals[SIGNAL_GET_SESSION] =
+ g_signal_new ("get-session", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST | G_SIGNAL_ACTION, G_STRUCT_OFFSET (GstRtpBinClass,
+ get_session), NULL, NULL, NULL, GST_TYPE_ELEMENT, 1, G_TYPE_UINT);
+
+ /**
+ * GstRtpBin::get-internal-session:
+ * @rtpbin: the object which received the signal
+ * @id: the session id
+ *
+ * Request the internal RTPSession object as #GObject in session @id.
+ */
+ gst_rtp_bin_signals[SIGNAL_GET_INTERNAL_SESSION] =
+ g_signal_new ("get-internal-session", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST | G_SIGNAL_ACTION, G_STRUCT_OFFSET (GstRtpBinClass,
+ get_internal_session), NULL, NULL, NULL, RTP_TYPE_SESSION, 1,
+ G_TYPE_UINT);
+
+ /**
+ * GstRtpBin::get-internal-storage:
+ * @rtpbin: the object which received the signal
+ * @id: the session id
+ *
+ * Request the internal RTPStorage object as #GObject in session @id. This
+ * is the internal storage used by the RTPStorage element, which is used to
+ * keep a backlog of received RTP packets for the session @id.
+ *
+ * Since: 1.14
+ */
+ gst_rtp_bin_signals[SIGNAL_GET_INTERNAL_STORAGE] =
+ g_signal_new ("get-internal-storage", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST | G_SIGNAL_ACTION, G_STRUCT_OFFSET (GstRtpBinClass,
+ get_internal_storage), NULL, NULL, NULL, G_TYPE_OBJECT, 1,
+ G_TYPE_UINT);
+
+ /**
+ * GstRtpBin::get-storage:
+ * @rtpbin: the object which received the signal
+ * @id: the session id
+ *
+ * Request the RTPStorage element as #GObject in session @id. This element
+ * is used to keep a backlog of received RTP packets for the session @id.
+ *
+ * Since: 1.16
+ */
+ gst_rtp_bin_signals[SIGNAL_GET_STORAGE] =
+ g_signal_new ("get-storage", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST | G_SIGNAL_ACTION, G_STRUCT_OFFSET (GstRtpBinClass,
+ get_storage), NULL, NULL, NULL, GST_TYPE_ELEMENT, 1, G_TYPE_UINT);
+
+ /**
+ * GstRtpBin::clear-ssrc:
+ * @rtpbin: the object which received the signal
+ * @id: the session id
+ * @ssrc: the ssrc
+ *
+ * Remove all pads from rtpssrcdemux element associated with the specified
+ * ssrc. This delegate the action signal to the rtpssrcdemux element
+ * associated with the specified session.
+ *
+ * Since: 1.20
+ */
+ gst_rtp_bin_signals[SIGNAL_CLEAR_SSRC] =
+ g_signal_new ("clear-ssrc", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST | G_SIGNAL_ACTION, G_STRUCT_OFFSET (GstRtpBinClass,
+ clear_ssrc), NULL, NULL, NULL, G_TYPE_NONE, 2,
+ G_TYPE_UINT, G_TYPE_UINT);
+
+ /**
+ * GstRtpBin::on-new-ssrc:
+ * @rtpbin: the object which received the signal
+ * @session: the session
+ * @ssrc: the SSRC
+ *
+ * Notify of a new SSRC that entered @session.
+ */
+ gst_rtp_bin_signals[SIGNAL_ON_NEW_SSRC] =
+ g_signal_new ("on-new-ssrc", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (GstRtpBinClass, on_new_ssrc),
+ NULL, NULL, NULL, G_TYPE_NONE, 2, G_TYPE_UINT, G_TYPE_UINT);
+ /**
+ * GstRtpBin::on-ssrc-collision:
+ * @rtpbin: the object which received the signal
+ * @session: the session
+ * @ssrc: the SSRC
+ *
+ * Notify when we have an SSRC collision
+ */
+ gst_rtp_bin_signals[SIGNAL_ON_SSRC_COLLISION] =
+ g_signal_new ("on-ssrc-collision", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (GstRtpBinClass, on_ssrc_collision),
+ NULL, NULL, NULL, G_TYPE_NONE, 2, G_TYPE_UINT, G_TYPE_UINT);
+ /**
+ * GstRtpBin::on-ssrc-validated:
+ * @rtpbin: the object which received the signal
+ * @session: the session
+ * @ssrc: the SSRC
+ *
+ * Notify of a new SSRC that became validated.
+ */
+ gst_rtp_bin_signals[SIGNAL_ON_SSRC_VALIDATED] =
+ g_signal_new ("on-ssrc-validated", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (GstRtpBinClass, on_ssrc_validated),
+ NULL, NULL, NULL, G_TYPE_NONE, 2, G_TYPE_UINT, G_TYPE_UINT);
+ /**
+ * GstRtpBin::on-ssrc-active:
+ * @rtpbin: the object which received the signal
+ * @session: the session
+ * @ssrc: the SSRC
+ *
+ * Notify of a SSRC that is active, i.e., sending RTCP.
+ */
+ gst_rtp_bin_signals[SIGNAL_ON_SSRC_ACTIVE] =
+ g_signal_new ("on-ssrc-active", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (GstRtpBinClass, on_ssrc_active),
+ NULL, NULL, NULL, G_TYPE_NONE, 2, G_TYPE_UINT, G_TYPE_UINT);
+ /**
+ * GstRtpBin::on-ssrc-sdes:
+ * @rtpbin: the object which received the signal
+ * @session: the session
+ * @ssrc: the SSRC
+ *
+ * Notify of a SSRC that is active, i.e., sending RTCP.
+ */
+ gst_rtp_bin_signals[SIGNAL_ON_SSRC_SDES] =
+ g_signal_new ("on-ssrc-sdes", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (GstRtpBinClass, on_ssrc_sdes),
+ NULL, NULL, NULL, G_TYPE_NONE, 2, G_TYPE_UINT, G_TYPE_UINT);
+
+ /**
+ * GstRtpBin::on-bye-ssrc:
+ * @rtpbin: the object which received the signal
+ * @session: the session
+ * @ssrc: the SSRC
+ *
+ * Notify of an SSRC that became inactive because of a BYE packet.
+ */
+ gst_rtp_bin_signals[SIGNAL_ON_BYE_SSRC] =
+ g_signal_new ("on-bye-ssrc", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (GstRtpBinClass, on_bye_ssrc),
+ NULL, NULL, NULL, G_TYPE_NONE, 2, G_TYPE_UINT, G_TYPE_UINT);
+ /**
+ * GstRtpBin::on-bye-timeout:
+ * @rtpbin: the object which received the signal
+ * @session: the session
+ * @ssrc: the SSRC
+ *
+ * Notify of an SSRC that has timed out because of BYE
+ */
+ gst_rtp_bin_signals[SIGNAL_ON_BYE_TIMEOUT] =
+ g_signal_new ("on-bye-timeout", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (GstRtpBinClass, on_bye_timeout),
+ NULL, NULL, NULL, G_TYPE_NONE, 2, G_TYPE_UINT, G_TYPE_UINT);
+ /**
+ * GstRtpBin::on-timeout:
+ * @rtpbin: the object which received the signal
+ * @session: the session
+ * @ssrc: the SSRC
+ *
+ * Notify of an SSRC that has timed out
+ */
+ gst_rtp_bin_signals[SIGNAL_ON_TIMEOUT] =
+ g_signal_new ("on-timeout", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (GstRtpBinClass, on_timeout),
+ NULL, NULL, NULL, G_TYPE_NONE, 2, G_TYPE_UINT, G_TYPE_UINT);
+ /**
+ * GstRtpBin::on-sender-timeout:
+ * @rtpbin: the object which received the signal
+ * @session: the session
+ * @ssrc: the SSRC
+ *
+ * Notify of a sender SSRC that has timed out and became a receiver
+ */
+ gst_rtp_bin_signals[SIGNAL_ON_SENDER_TIMEOUT] =
+ g_signal_new ("on-sender-timeout", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (GstRtpBinClass, on_sender_timeout),
+ NULL, NULL, NULL, G_TYPE_NONE, 2, G_TYPE_UINT, G_TYPE_UINT);
+
+ /**
+ * GstRtpBin::on-npt-stop:
+ * @rtpbin: the object which received the signal
+ * @session: the session
+ * @ssrc: the SSRC
+ *
+ * Notify that SSRC sender has sent data up to the configured NPT stop time.
+ */
+ gst_rtp_bin_signals[SIGNAL_ON_NPT_STOP] =
+ g_signal_new ("on-npt-stop", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (GstRtpBinClass, on_npt_stop),
+ NULL, NULL, NULL, G_TYPE_NONE, 2, G_TYPE_UINT, G_TYPE_UINT);
+
+ /**
+ * GstRtpBin::request-rtp-encoder:
+ * @rtpbin: the object which received the signal
+ * @session: the session
+ *
+ * Request an RTP encoder element for the given @session. The encoder
+ * element will be added to the bin if not previously added.
+ *
+ * If no handler is connected, no encoder will be used.
+ *
+ * Since: 1.4
+ */
+ gst_rtp_bin_signals[SIGNAL_REQUEST_RTP_ENCODER] =
+ g_signal_new ("request-rtp-encoder", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (GstRtpBinClass,
+ request_rtp_encoder), _gst_element_accumulator, NULL, NULL,
+ GST_TYPE_ELEMENT, 1, G_TYPE_UINT);
+
+ /**
+ * GstRtpBin::request-rtp-decoder:
+ * @rtpbin: the object which received the signal
+ * @session: the session
+ *
+ * Request an RTP decoder element for the given @session. The decoder
+ * element will be added to the bin if not previously added.
+ *
+ * If no handler is connected, no encoder will be used.
+ *
+ * Since: 1.4
+ */
+ gst_rtp_bin_signals[SIGNAL_REQUEST_RTP_DECODER] =
+ g_signal_new ("request-rtp-decoder", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (GstRtpBinClass,
+ request_rtp_decoder), _gst_element_accumulator, NULL,
+ NULL, GST_TYPE_ELEMENT, 1, G_TYPE_UINT);
+
+ /**
+ * GstRtpBin::request-rtcp-encoder:
+ * @rtpbin: the object which received the signal
+ * @session: the session
+ *
+ * Request an RTCP encoder element for the given @session. The encoder
+ * element will be added to the bin if not previously added.
+ *
+ * If no handler is connected, no encoder will be used.
+ *
+ * Since: 1.4
+ */
+ gst_rtp_bin_signals[SIGNAL_REQUEST_RTCP_ENCODER] =
+ g_signal_new ("request-rtcp-encoder", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (GstRtpBinClass,
+ request_rtcp_encoder), _gst_element_accumulator, NULL, NULL,
+ GST_TYPE_ELEMENT, 1, G_TYPE_UINT);
+
+ /**
+ * GstRtpBin::request-rtcp-decoder:
+ * @rtpbin: the object which received the signal
+ * @session: the session
+ *
+ * Request an RTCP decoder element for the given @session. The decoder
+ * element will be added to the bin if not previously added.
+ *
+ * If no handler is connected, no encoder will be used.
+ *
+ * Since: 1.4
+ */
+ gst_rtp_bin_signals[SIGNAL_REQUEST_RTCP_DECODER] =
+ g_signal_new ("request-rtcp-decoder", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (GstRtpBinClass,
+ request_rtcp_decoder), _gst_element_accumulator, NULL, NULL,
+ GST_TYPE_ELEMENT, 1, G_TYPE_UINT);
+
+ /**
+ * GstRtpBin::request-jitterbuffer:
+ * @rtpbin: the object which received the signal
+ * @session: the session
+ *
+ * Request a jitterbuffer element for the given @session.
+ *
+ * If no handler is connected, the default jitterbuffer will be used.
+ *
+ * Note: The provided element is expected to conform to the API exposed
+ * by the standard #GstRtpJitterBuffer. Runtime checks will be made to
+ * determine whether it exposes properties and signals before attempting
+ * to set, call or connect to them, and some functionalities of #GstRtpBin
+ * may not be available when that is not the case.
+ *
+ * This should be considered experimental API, as the standard jitterbuffer
+ * API is susceptible to change, provided elements will have to update their
+ * custom jitterbuffer's API to match the API of #GstRtpJitterBuffer if and
+ * when it changes.
+ *
+ * Since: 1.18
+ */
+ gst_rtp_bin_signals[SIGNAL_REQUEST_JITTERBUFFER] =
+ g_signal_new ("request-jitterbuffer", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (GstRtpBinClass,
+ request_jitterbuffer), _gst_element_accumulator, NULL,
+ g_cclosure_marshal_generic, GST_TYPE_ELEMENT, 1, G_TYPE_UINT);
+
+ /**
+ * GstRtpBin::new-jitterbuffer:
+ * @rtpbin: the object which received the signal
+ * @jitterbuffer: the new jitterbuffer
+ * @session: the session
+ * @ssrc: the SSRC
+ *
+ * Notify that a new @jitterbuffer was created for @session and @ssrc.
+ * This signal can, for example, be used to configure @jitterbuffer.
+ *
+ * Since: 1.4
+ */
+ gst_rtp_bin_signals[SIGNAL_NEW_JITTERBUFFER] =
+ g_signal_new ("new-jitterbuffer", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (GstRtpBinClass,
+ new_jitterbuffer), NULL, NULL, NULL,
+ G_TYPE_NONE, 3, GST_TYPE_ELEMENT, G_TYPE_UINT, G_TYPE_UINT);
+
+ /**
+ * GstRtpBin::new-storage:
+ * @rtpbin: the object which received the signal
+ * @storage: the new storage
+ * @session: the session
+ *
+ * Notify that a new @storage was created for @session.
+ * This signal can, for example, be used to configure @storage.
+ *
+ * Since: 1.14
+ */
+ gst_rtp_bin_signals[SIGNAL_NEW_STORAGE] =
+ g_signal_new ("new-storage", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (GstRtpBinClass,
+ new_storage), NULL, NULL, NULL,
+ G_TYPE_NONE, 2, GST_TYPE_ELEMENT, G_TYPE_UINT);
+
+ /**
+ * GstRtpBin::request-aux-sender:
+ * @rtpbin: the object which received the signal
+ * @session: the session
+ *
+ * Request an AUX sender element for the given @session. The AUX
+ * element will be added to the bin.
+ *
+ * If no handler is connected, no AUX element will be used.
+ *
+ * Since: 1.4
+ */
+ gst_rtp_bin_signals[SIGNAL_REQUEST_AUX_SENDER] =
+ g_signal_new ("request-aux-sender", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (GstRtpBinClass,
+ request_aux_sender), _gst_element_accumulator, NULL, NULL,
+ GST_TYPE_ELEMENT, 1, G_TYPE_UINT);
+
+ /**
+ * GstRtpBin::request-aux-receiver:
+ * @rtpbin: the object which received the signal
+ * @session: the session
+ *
+ * Request an AUX receiver element for the given @session. The AUX
+ * element will be added to the bin.
+ *
+ * If no handler is connected, no AUX element will be used.
+ *
+ * Since: 1.4
+ */
+ gst_rtp_bin_signals[SIGNAL_REQUEST_AUX_RECEIVER] =
+ g_signal_new ("request-aux-receiver", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (GstRtpBinClass,
+ request_aux_receiver), _gst_element_accumulator, NULL, NULL,
+ GST_TYPE_ELEMENT, 1, G_TYPE_UINT);
+
+ /**
+ * GstRtpBin::request-fec-decoder:
+ * @rtpbin: the object which received the signal
+ * @session: the session index
+ *
+ * Request a FEC decoder element for the given @session. The element
+ * will be added to the bin after the pt demuxer.
+ *
+ * If no handler is connected, no FEC decoder will be used.
+ *
+ * Since: 1.14
+ */
+ gst_rtp_bin_signals[SIGNAL_REQUEST_FEC_DECODER] =
+ g_signal_new ("request-fec-decoder", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (GstRtpBinClass,
+ request_fec_decoder), _gst_element_accumulator, NULL, NULL,
+ GST_TYPE_ELEMENT, 1, G_TYPE_UINT);
+
+ /**
+ * GstRtpBin::request-fec-encoder:
+ * @rtpbin: the object which received the signal
+ * @session: the session index
+ *
+ * Request a FEC encoder element for the given @session. The element
+ * will be added to the bin after the RTPSession.
+ *
+ * If no handler is connected, no FEC encoder will be used.
+ *
+ * Since: 1.14
+ */
+ gst_rtp_bin_signals[SIGNAL_REQUEST_FEC_ENCODER] =
+ g_signal_new ("request-fec-encoder", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (GstRtpBinClass,
+ request_fec_encoder), _gst_element_accumulator, NULL, NULL,
+ GST_TYPE_ELEMENT, 1, G_TYPE_UINT);
+
+ /**
+ * GstRtpBin::on-new-sender-ssrc:
+ * @rtpbin: the object which received the signal
+ * @session: the session
+ * @ssrc: the sender SSRC
+ *
+ * Notify of a new sender SSRC that entered @session.
+ *
+ * Since: 1.8
+ */
+ gst_rtp_bin_signals[SIGNAL_ON_NEW_SENDER_SSRC] =
+ g_signal_new ("on-new-sender-ssrc", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (GstRtpBinClass, on_new_sender_ssrc),
+ NULL, NULL, NULL, G_TYPE_NONE, 2, G_TYPE_UINT, G_TYPE_UINT);
+ /**
+ * GstRtpBin::on-sender-ssrc-active:
+ * @rtpbin: the object which received the signal
+ * @session: the session
+ * @ssrc: the sender SSRC
+ *
+ * Notify of a sender SSRC that is active, i.e., sending RTCP.
+ *
+ * Since: 1.8
+ */
+ gst_rtp_bin_signals[SIGNAL_ON_SENDER_SSRC_ACTIVE] =
+ g_signal_new ("on-sender-ssrc-active", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (GstRtpBinClass,
+ on_sender_ssrc_active), NULL, NULL, NULL,
+ G_TYPE_NONE, 2, G_TYPE_UINT, G_TYPE_UINT);
+
+ g_object_class_install_property (gobject_class, PROP_SDES,
+ g_param_spec_boxed ("sdes", "SDES",
+ "The SDES items of this session",
+ GST_TYPE_STRUCTURE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS
+ | GST_PARAM_DOC_SHOW_DEFAULT));
+
+ g_object_class_install_property (gobject_class, PROP_DO_LOST,
+ g_param_spec_boolean ("do-lost", "Do Lost",
+ "Send an event downstream when a packet is lost", DEFAULT_DO_LOST,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_AUTOREMOVE,
+ g_param_spec_boolean ("autoremove", "Auto Remove",
+ "Automatically remove timed out sources", DEFAULT_AUTOREMOVE,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_IGNORE_PT,
+ g_param_spec_boolean ("ignore-pt", "Ignore PT",
+ "Do not demultiplex based on PT values", DEFAULT_IGNORE_PT,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_USE_PIPELINE_CLOCK,
+ g_param_spec_boolean ("use-pipeline-clock", "Use pipeline clock",
+ "Use the pipeline running-time to set the NTP time in the RTCP SR messages "
+ "(DEPRECATED: Use ntp-time-source property)",
+ DEFAULT_USE_PIPELINE_CLOCK,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | G_PARAM_DEPRECATED));
+ /**
+ * GstRtpBin:buffer-mode:
+ *
+ * Control the buffering and timestamping mode used by the jitterbuffer.
+ */
+ g_object_class_install_property (gobject_class, PROP_BUFFER_MODE,
+ g_param_spec_enum ("buffer-mode", "Buffer Mode",
+ "Control the buffering algorithm in use", RTP_TYPE_JITTER_BUFFER_MODE,
+ DEFAULT_BUFFER_MODE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ /**
+ * GstRtpBin:ntp-sync:
+ *
+ * Set the NTP time from the sender reports as the running-time on the
+ * buffers. When both the sender and receiver have sychronized
+ * running-time, i.e. when the clock and base-time is shared
+ * between the receivers and the and the senders, this option can be
+ * used to synchronize receivers on multiple machines.
+ */
+ g_object_class_install_property (gobject_class, PROP_NTP_SYNC,
+ g_param_spec_boolean ("ntp-sync", "Sync on NTP clock",
+ "Synchronize received streams to the NTP clock", DEFAULT_NTP_SYNC,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ /**
+ * GstRtpBin:rtcp-sync:
+ *
+ * If not synchronizing (directly) to the NTP clock, determines how to sync
+ * the various streams.
+ */
+ g_object_class_install_property (gobject_class, PROP_RTCP_SYNC,
+ g_param_spec_enum ("rtcp-sync", "RTCP Sync",
+ "Use of RTCP SR in synchronization", GST_RTP_BIN_RTCP_SYNC_TYPE,
+ DEFAULT_RTCP_SYNC, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ /**
+ * GstRtpBin:rtcp-sync-interval:
+ *
+ * Determines how often to sync streams using RTCP data.
+ */
+ g_object_class_install_property (gobject_class, PROP_RTCP_SYNC_INTERVAL,
+ g_param_spec_uint ("rtcp-sync-interval", "RTCP Sync Interval",
+ "RTCP SR interval synchronization (ms) (0 = always)",
+ 0, G_MAXUINT, DEFAULT_RTCP_SYNC_INTERVAL,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_DO_SYNC_EVENT,
+ g_param_spec_boolean ("do-sync-event", "Do Sync Event",
+ "Send event downstream when a stream is synchronized to the sender",
+ DEFAULT_DO_SYNC_EVENT, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ /**
+ * GstRtpBin:do-retransmission:
+ *
+ * Enables RTP retransmission on all streams. To control retransmission on
+ * a per-SSRC basis, connect to the #GstRtpBin::new-jitterbuffer signal and
+ * set the #GstRtpJitterBuffer:do-retransmission property on the
+ * #GstRtpJitterBuffer object instead.
+ */
+ g_object_class_install_property (gobject_class, PROP_DO_RETRANSMISSION,
+ g_param_spec_boolean ("do-retransmission", "Do retransmission",
+ "Enable retransmission on all streams",
+ DEFAULT_DO_RETRANSMISSION,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ /**
+ * GstRtpBin:rtp-profile:
+ *
+ * Sets the default RTP profile of newly created RTP sessions. The
+ * profile can be changed afterwards on a per-session basis.
+ */
+ g_object_class_install_property (gobject_class, PROP_RTP_PROFILE,
+ g_param_spec_enum ("rtp-profile", "RTP Profile",
+ "Default RTP profile of newly created sessions",
+ GST_TYPE_RTP_PROFILE, DEFAULT_RTP_PROFILE,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_NTP_TIME_SOURCE,
+ g_param_spec_enum ("ntp-time-source", "NTP Time Source",
+ "NTP time source for RTCP packets",
+ gst_rtp_ntp_time_source_get_type (), DEFAULT_NTP_TIME_SOURCE,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_RTCP_SYNC_SEND_TIME,
+ g_param_spec_boolean ("rtcp-sync-send-time", "RTCP Sync Send Time",
+ "Use send time or capture time for RTCP sync "
+ "(TRUE = send time, FALSE = capture time)",
+ DEFAULT_RTCP_SYNC_SEND_TIME,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_MAX_RTCP_RTP_TIME_DIFF,
+ g_param_spec_int ("max-rtcp-rtp-time-diff", "Max RTCP RTP Time Diff",
+ "Maximum amount of time in ms that the RTP time in RTCP SRs "
+ "is allowed to be ahead (-1 disabled)", -1, G_MAXINT,
+ DEFAULT_MAX_RTCP_RTP_TIME_DIFF,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_MAX_DROPOUT_TIME,
+ g_param_spec_uint ("max-dropout-time", "Max dropout time",
+ "The maximum time (milliseconds) of missing packets tolerated.",
+ 0, G_MAXUINT, DEFAULT_MAX_DROPOUT_TIME,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_MAX_MISORDER_TIME,
+ g_param_spec_uint ("max-misorder-time", "Max misorder time",
+ "The maximum time (milliseconds) of misordered packets tolerated.",
+ 0, G_MAXUINT, DEFAULT_MAX_MISORDER_TIME,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_RFC7273_SYNC,
+ g_param_spec_boolean ("rfc7273-sync", "Sync on RFC7273 clock",
+ "Synchronize received streams to the RFC7273 clock "
+ "(requires clock and offset to be provided)", DEFAULT_RFC7273_SYNC,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_MAX_STREAMS,
+ g_param_spec_uint ("max-streams", "Max Streams",
+ "The maximum number of streams to create for one session",
+ 0, G_MAXUINT, DEFAULT_MAX_STREAMS,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ /**
+ * GstRtpBin:max-ts-offset-adjustment:
+ *
+ * Syncing time stamps to NTP time adds a time offset. This parameter
+ * specifies the maximum number of nanoseconds per frame that this time offset
+ * may be adjusted with. This is used to avoid sudden large changes to time
+ * stamps.
+ *
+ * Since: 1.14
+ */
+ g_object_class_install_property (gobject_class, PROP_MAX_TS_OFFSET_ADJUSTMENT,
+ g_param_spec_uint64 ("max-ts-offset-adjustment",
+ "Max Timestamp Offset Adjustment",
+ "The maximum number of nanoseconds per frame that time stamp offsets "
+ "may be adjusted (0 = no limit).", 0, G_MAXUINT64,
+ DEFAULT_MAX_TS_OFFSET_ADJUSTMENT, G_PARAM_READWRITE |
+ G_PARAM_STATIC_STRINGS));
+
+ /**
+ * GstRtpBin:max-ts-offset:
+ *
+ * Used to set an upper limit of how large a time offset may be. This
+ * is used to protect against unrealistic values as a result of either
+ * client,server or clock issues.
+ *
+ * Since: 1.14
+ */
+ g_object_class_install_property (gobject_class, PROP_MAX_TS_OFFSET,
+ g_param_spec_int64 ("max-ts-offset", "Max TS Offset",
+ "The maximum absolute value of the time offset in (nanoseconds). "
+ "Note, if the ntp-sync parameter is set the default value is "
+ "changed to 0 (no limit)", 0, G_MAXINT64, DEFAULT_MAX_TS_OFFSET,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ /**
+ * GstRtpBin:fec-decoders:
+ *
+ * Used to provide a factory used to build the FEC decoder for a
+ * given session, as a command line alternative to
+ * #GstRtpBin::request-fec-decoder.
+ *
+ * Expects a GstStructure in the form session_id (gint) -> factory (string)
+ *
+ * Since: 1.20
+ */
+ g_object_class_install_property (gobject_class, PROP_FEC_DECODERS,
+ g_param_spec_boxed ("fec-decoders", "Fec Decoders",
+ "GstStructure mapping from session index to FEC decoder "
+ "factory, eg "
+ "fec-decoders='fec,0=\"rtpst2022-1-fecdec\\ size-time\\=1000000000\";'",
+ GST_TYPE_STRUCTURE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ /**
+ * GstRtpBin:fec-encoders:
+ *
+ * Used to provide a factory used to build the FEC encoder for a
+ * given session, as a command line alternative to
+ * #GstRtpBin::request-fec-encoder.
+ *
+ * Expects a GstStructure in the form session_id (gint) -> factory (string)
+ *
+ * Since: 1.20
+ */
+ g_object_class_install_property (gobject_class, PROP_FEC_ENCODERS,
+ g_param_spec_boxed ("fec-encoders", "Fec Encoders",
+ "GstStructure mapping from session index to FEC encoder "
+ "factory, eg "
+ "fec-encoders='fec,0=\"rtpst2022-1-fecenc\\ rows\\=5\\ columns\\=5\";'",
+ GST_TYPE_STRUCTURE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
++#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
++ g_object_class_install_property (gobject_class, PROP_USE_RTSP_BUFFERING,
++ g_param_spec_boolean ("use-rtsp-buffering", "Use RTSP buffering",
++ "Use RTSP buffering in RTP_JITTER_BUFFER_MODE_SLAVE buffer mode",
++ DEFAULT_RTSP_USE_BUFFERING,
++ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
++#endif
++
+ gstelement_class->change_state = GST_DEBUG_FUNCPTR (gst_rtp_bin_change_state);
+ gstelement_class->request_new_pad =
+ GST_DEBUG_FUNCPTR (gst_rtp_bin_request_new_pad);
+ gstelement_class->release_pad = GST_DEBUG_FUNCPTR (gst_rtp_bin_release_pad);
+
+ /* sink pads */
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &rtpbin_recv_rtp_sink_template);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &rtpbin_recv_fec_sink_template);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &rtpbin_recv_rtcp_sink_template);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &rtpbin_send_rtp_sink_template);
+
+ /* src pads */
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &rtpbin_recv_rtp_src_template);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &rtpbin_send_rtcp_src_template);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &rtpbin_send_rtp_src_template);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &rtpbin_send_fec_src_template);
+
+ gst_element_class_set_static_metadata (gstelement_class, "RTP Bin",
+ "Filter/Network/RTP",
+ "Real-Time Transport Protocol bin",
+ "Wim Taymans <wim.taymans@gmail.com>");
+
+ gstbin_class->handle_message = GST_DEBUG_FUNCPTR (gst_rtp_bin_handle_message);
+
+ klass->clear_pt_map = GST_DEBUG_FUNCPTR (gst_rtp_bin_clear_pt_map);
+ klass->reset_sync = GST_DEBUG_FUNCPTR (gst_rtp_bin_reset_sync);
+ klass->get_session = GST_DEBUG_FUNCPTR (gst_rtp_bin_get_session);
+ klass->get_internal_session =
+ GST_DEBUG_FUNCPTR (gst_rtp_bin_get_internal_session);
+ klass->get_storage = GST_DEBUG_FUNCPTR (gst_rtp_bin_get_storage);
+ klass->get_internal_storage =
+ GST_DEBUG_FUNCPTR (gst_rtp_bin_get_internal_storage);
+ klass->clear_ssrc = GST_DEBUG_FUNCPTR (gst_rtp_bin_clear_ssrc);
+ klass->request_rtp_encoder = GST_DEBUG_FUNCPTR (gst_rtp_bin_request_encoder);
+ klass->request_rtp_decoder = GST_DEBUG_FUNCPTR (gst_rtp_bin_request_decoder);
+ klass->request_rtcp_encoder = GST_DEBUG_FUNCPTR (gst_rtp_bin_request_encoder);
+ klass->request_rtcp_decoder = GST_DEBUG_FUNCPTR (gst_rtp_bin_request_decoder);
+ klass->request_jitterbuffer =
+ GST_DEBUG_FUNCPTR (gst_rtp_bin_request_jitterbuffer);
+
+ GST_DEBUG_CATEGORY_INIT (gst_rtp_bin_debug, "rtpbin", 0, "RTP bin");
+
+ gst_type_mark_as_plugin_api (GST_RTP_BIN_RTCP_SYNC_TYPE, 0);
+ }
+
+ static void
+ gst_rtp_bin_init (GstRtpBin * rtpbin)
+ {
+ gchar *cname;
+
+ rtpbin->priv = gst_rtp_bin_get_instance_private (rtpbin);
+ g_mutex_init (&rtpbin->priv->bin_lock);
+ g_mutex_init (&rtpbin->priv->dyn_lock);
+
+ rtpbin->latency_ms = DEFAULT_LATENCY_MS;
+ rtpbin->latency_ns = DEFAULT_LATENCY_MS * GST_MSECOND;
+ rtpbin->drop_on_latency = DEFAULT_DROP_ON_LATENCY;
+ rtpbin->do_lost = DEFAULT_DO_LOST;
+ rtpbin->ignore_pt = DEFAULT_IGNORE_PT;
+ rtpbin->ntp_sync = DEFAULT_NTP_SYNC;
+ rtpbin->rtcp_sync = DEFAULT_RTCP_SYNC;
+ rtpbin->rtcp_sync_interval = DEFAULT_RTCP_SYNC_INTERVAL;
+ rtpbin->priv->autoremove = DEFAULT_AUTOREMOVE;
+ rtpbin->buffer_mode = DEFAULT_BUFFER_MODE;
+ rtpbin->use_pipeline_clock = DEFAULT_USE_PIPELINE_CLOCK;
+ rtpbin->send_sync_event = DEFAULT_DO_SYNC_EVENT;
+ rtpbin->do_retransmission = DEFAULT_DO_RETRANSMISSION;
+ rtpbin->rtp_profile = DEFAULT_RTP_PROFILE;
+ rtpbin->ntp_time_source = DEFAULT_NTP_TIME_SOURCE;
+ rtpbin->rtcp_sync_send_time = DEFAULT_RTCP_SYNC_SEND_TIME;
+ rtpbin->max_rtcp_rtp_time_diff = DEFAULT_MAX_RTCP_RTP_TIME_DIFF;
+ rtpbin->max_dropout_time = DEFAULT_MAX_DROPOUT_TIME;
+ rtpbin->max_misorder_time = DEFAULT_MAX_MISORDER_TIME;
+ rtpbin->rfc7273_sync = DEFAULT_RFC7273_SYNC;
+ rtpbin->max_streams = DEFAULT_MAX_STREAMS;
+ rtpbin->max_ts_offset_adjustment = DEFAULT_MAX_TS_OFFSET_ADJUSTMENT;
+ rtpbin->max_ts_offset = DEFAULT_MAX_TS_OFFSET;
+ rtpbin->max_ts_offset_is_set = FALSE;
++#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
++ rtpbin->use_rtsp_buffering = FALSE;
++#endif
+
+ /* some default SDES entries */
+ cname = g_strdup_printf ("user%u@host-%x", g_random_int (), g_random_int ());
+ rtpbin->sdes = gst_structure_new ("application/x-rtp-source-sdes",
+ "cname", G_TYPE_STRING, cname, "tool", G_TYPE_STRING, "GStreamer", NULL);
+ rtpbin->fec_decoders =
+ gst_structure_new_empty ("application/x-rtp-fec-decoders");
+ rtpbin->fec_encoders =
+ gst_structure_new_empty ("application/x-rtp-fec-encoders");
+ g_free (cname);
+ }
+
+ static void
+ gst_rtp_bin_dispose (GObject * object)
+ {
+ GstRtpBin *rtpbin;
+
+ rtpbin = GST_RTP_BIN (object);
+
+ GST_RTP_BIN_LOCK (rtpbin);
+ GST_DEBUG_OBJECT (object, "freeing sessions");
+ g_slist_foreach (rtpbin->sessions, (GFunc) free_session, rtpbin);
+ g_slist_free (rtpbin->sessions);
+ rtpbin->sessions = NULL;
+ GST_RTP_BIN_UNLOCK (rtpbin);
+
+ G_OBJECT_CLASS (parent_class)->dispose (object);
+ }
+
+ static void
+ gst_rtp_bin_finalize (GObject * object)
+ {
+ GstRtpBin *rtpbin;
+
+ rtpbin = GST_RTP_BIN (object);
+
+ if (rtpbin->sdes)
+ gst_structure_free (rtpbin->sdes);
+
+ if (rtpbin->fec_decoders)
+ gst_structure_free (rtpbin->fec_decoders);
+
+ if (rtpbin->fec_encoders)
+ gst_structure_free (rtpbin->fec_encoders);
+
+ g_mutex_clear (&rtpbin->priv->bin_lock);
+ g_mutex_clear (&rtpbin->priv->dyn_lock);
+
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+ }
+
+
+ static void
+ gst_rtp_bin_set_sdes_struct (GstRtpBin * bin, const GstStructure * sdes)
+ {
+ GSList *item;
+
+ if (sdes == NULL)
+ return;
+
+ GST_RTP_BIN_LOCK (bin);
+
+ GST_OBJECT_LOCK (bin);
+ if (bin->sdes)
+ gst_structure_free (bin->sdes);
+ bin->sdes = gst_structure_copy (sdes);
+ GST_OBJECT_UNLOCK (bin);
+
+ /* store in all sessions */
+ for (item = bin->sessions; item; item = g_slist_next (item)) {
+ GstRtpBinSession *session = item->data;
+ g_object_set (session->session, "sdes", sdes, NULL);
+ }
+
+ GST_RTP_BIN_UNLOCK (bin);
+ }
+
+ static void
+ gst_rtp_bin_set_fec_decoders_struct (GstRtpBin * bin,
+ const GstStructure * decoders)
+ {
+ if (decoders == NULL)
+ return;
+
+ GST_RTP_BIN_LOCK (bin);
+
+ GST_OBJECT_LOCK (bin);
+ if (bin->fec_decoders)
+ gst_structure_free (bin->fec_decoders);
+ bin->fec_decoders = gst_structure_copy (decoders);
+
+ GST_OBJECT_UNLOCK (bin);
+
+ GST_RTP_BIN_UNLOCK (bin);
+ }
+
+ static void
+ gst_rtp_bin_set_fec_encoders_struct (GstRtpBin * bin,
+ const GstStructure * encoders)
+ {
+ if (encoders == NULL)
+ return;
+
+ GST_RTP_BIN_LOCK (bin);
+
+ GST_OBJECT_LOCK (bin);
+ if (bin->fec_encoders)
+ gst_structure_free (bin->fec_encoders);
+ bin->fec_encoders = gst_structure_copy (encoders);
+
+ GST_OBJECT_UNLOCK (bin);
+
+ GST_RTP_BIN_UNLOCK (bin);
+ }
+
+ static GstStructure *
+ gst_rtp_bin_get_sdes_struct (GstRtpBin * bin)
+ {
+ GstStructure *result;
+
+ GST_OBJECT_LOCK (bin);
+ result = gst_structure_copy (bin->sdes);
+ GST_OBJECT_UNLOCK (bin);
+
+ return result;
+ }
+
+ static GstStructure *
+ gst_rtp_bin_get_fec_decoders_struct (GstRtpBin * bin)
+ {
+ GstStructure *result;
+
+ GST_OBJECT_LOCK (bin);
+ result = gst_structure_copy (bin->fec_decoders);
+ GST_OBJECT_UNLOCK (bin);
+
+ return result;
+ }
+
+ static GstStructure *
+ gst_rtp_bin_get_fec_encoders_struct (GstRtpBin * bin)
+ {
+ GstStructure *result;
+
+ GST_OBJECT_LOCK (bin);
+ result = gst_structure_copy (bin->fec_encoders);
+ GST_OBJECT_UNLOCK (bin);
+
+ return result;
+ }
+
+ static void
+ gst_rtp_bin_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+ {
+ GstRtpBin *rtpbin;
+
+ rtpbin = GST_RTP_BIN (object);
+
+ switch (prop_id) {
++#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
++ case PROP_USE_RTSP_BUFFERING:
++ GST_RTP_BIN_LOCK (rtpbin);
++ rtpbin->use_rtsp_buffering = g_value_get_boolean (value);
++ GST_RTP_BIN_UNLOCK (rtpbin);
++ break;
++#endif
+ case PROP_LATENCY:
+ GST_RTP_BIN_LOCK (rtpbin);
+ rtpbin->latency_ms = g_value_get_uint (value);
+ rtpbin->latency_ns = rtpbin->latency_ms * GST_MSECOND;
+ GST_RTP_BIN_UNLOCK (rtpbin);
+ /* propagate the property down to the jitterbuffer */
+ gst_rtp_bin_propagate_property_to_jitterbuffer (rtpbin, "latency", value);
+ break;
+ case PROP_DROP_ON_LATENCY:
+ GST_RTP_BIN_LOCK (rtpbin);
+ rtpbin->drop_on_latency = g_value_get_boolean (value);
+ GST_RTP_BIN_UNLOCK (rtpbin);
+ /* propagate the property down to the jitterbuffer */
+ gst_rtp_bin_propagate_property_to_jitterbuffer (rtpbin,
+ "drop-on-latency", value);
+ break;
+ case PROP_SDES:
+ gst_rtp_bin_set_sdes_struct (rtpbin, g_value_get_boxed (value));
+ break;
+ case PROP_DO_LOST:
+ GST_RTP_BIN_LOCK (rtpbin);
+ rtpbin->do_lost = g_value_get_boolean (value);
+ GST_RTP_BIN_UNLOCK (rtpbin);
+ gst_rtp_bin_propagate_property_to_jitterbuffer (rtpbin, "do-lost", value);
+ break;
+ case PROP_NTP_SYNC:
+ rtpbin->ntp_sync = g_value_get_boolean (value);
+ /* The default value of max_ts_offset depends on ntp_sync. If user
+ * hasn't set it then change default value */
+ if (!rtpbin->max_ts_offset_is_set) {
+ if (rtpbin->ntp_sync) {
+ rtpbin->max_ts_offset = 0;
+ } else {
+ rtpbin->max_ts_offset = DEFAULT_MAX_TS_OFFSET;
+ }
+ }
+ break;
+ case PROP_RTCP_SYNC:
+ g_atomic_int_set (&rtpbin->rtcp_sync, g_value_get_enum (value));
+ break;
+ case PROP_RTCP_SYNC_INTERVAL:
+ rtpbin->rtcp_sync_interval = g_value_get_uint (value);
+ break;
+ case PROP_IGNORE_PT:
+ rtpbin->ignore_pt = g_value_get_boolean (value);
+ break;
+ case PROP_AUTOREMOVE:
+ rtpbin->priv->autoremove = g_value_get_boolean (value);
+ break;
+ case PROP_USE_PIPELINE_CLOCK:
+ {
+ GSList *sessions;
+ GST_RTP_BIN_LOCK (rtpbin);
+ rtpbin->use_pipeline_clock = g_value_get_boolean (value);
+ for (sessions = rtpbin->sessions; sessions;
+ sessions = g_slist_next (sessions)) {
+ GstRtpBinSession *session = (GstRtpBinSession *) sessions->data;
+
+ g_object_set (G_OBJECT (session->session),
+ "use-pipeline-clock", rtpbin->use_pipeline_clock, NULL);
+ }
+ GST_RTP_BIN_UNLOCK (rtpbin);
+ }
+ break;
+ case PROP_DO_SYNC_EVENT:
+ rtpbin->send_sync_event = g_value_get_boolean (value);
+ break;
+ case PROP_BUFFER_MODE:
+ GST_RTP_BIN_LOCK (rtpbin);
+ rtpbin->buffer_mode = g_value_get_enum (value);
+ GST_RTP_BIN_UNLOCK (rtpbin);
+ /* propagate the property down to the jitterbuffer */
+ gst_rtp_bin_propagate_property_to_jitterbuffer (rtpbin, "mode", value);
+ break;
+ case PROP_DO_RETRANSMISSION:
+ GST_RTP_BIN_LOCK (rtpbin);
+ rtpbin->do_retransmission = g_value_get_boolean (value);
+ GST_RTP_BIN_UNLOCK (rtpbin);
+ gst_rtp_bin_propagate_property_to_jitterbuffer (rtpbin,
+ "do-retransmission", value);
+ break;
+ case PROP_RTP_PROFILE:
+ rtpbin->rtp_profile = g_value_get_enum (value);
+ break;
+ case PROP_NTP_TIME_SOURCE:{
+ GSList *sessions;
+ GST_RTP_BIN_LOCK (rtpbin);
+ rtpbin->ntp_time_source = g_value_get_enum (value);
+ for (sessions = rtpbin->sessions; sessions;
+ sessions = g_slist_next (sessions)) {
+ GstRtpBinSession *session = (GstRtpBinSession *) sessions->data;
+
+ g_object_set (G_OBJECT (session->session),
+ "ntp-time-source", rtpbin->ntp_time_source, NULL);
+ }
+ GST_RTP_BIN_UNLOCK (rtpbin);
+ break;
+ }
+ case PROP_RTCP_SYNC_SEND_TIME:{
+ GSList *sessions;
+ GST_RTP_BIN_LOCK (rtpbin);
+ rtpbin->rtcp_sync_send_time = g_value_get_boolean (value);
+ for (sessions = rtpbin->sessions; sessions;
+ sessions = g_slist_next (sessions)) {
+ GstRtpBinSession *session = (GstRtpBinSession *) sessions->data;
+
+ g_object_set (G_OBJECT (session->session),
+ "rtcp-sync-send-time", rtpbin->rtcp_sync_send_time, NULL);
+ }
+ GST_RTP_BIN_UNLOCK (rtpbin);
+ break;
+ }
+ case PROP_MAX_RTCP_RTP_TIME_DIFF:
+ GST_RTP_BIN_LOCK (rtpbin);
+ rtpbin->max_rtcp_rtp_time_diff = g_value_get_int (value);
+ GST_RTP_BIN_UNLOCK (rtpbin);
+ gst_rtp_bin_propagate_property_to_jitterbuffer (rtpbin,
+ "max-rtcp-rtp-time-diff", value);
+ break;
+ case PROP_MAX_DROPOUT_TIME:
+ GST_RTP_BIN_LOCK (rtpbin);
+ rtpbin->max_dropout_time = g_value_get_uint (value);
+ GST_RTP_BIN_UNLOCK (rtpbin);
+ gst_rtp_bin_propagate_property_to_jitterbuffer (rtpbin,
+ "max-dropout-time", value);
+ gst_rtp_bin_propagate_property_to_session (rtpbin, "max-dropout-time",
+ value);
+ break;
+ case PROP_MAX_MISORDER_TIME:
+ GST_RTP_BIN_LOCK (rtpbin);
+ rtpbin->max_misorder_time = g_value_get_uint (value);
+ GST_RTP_BIN_UNLOCK (rtpbin);
+ gst_rtp_bin_propagate_property_to_jitterbuffer (rtpbin,
+ "max-misorder-time", value);
+ gst_rtp_bin_propagate_property_to_session (rtpbin, "max-misorder-time",
+ value);
+ break;
+ case PROP_RFC7273_SYNC:
+ rtpbin->rfc7273_sync = g_value_get_boolean (value);
+ gst_rtp_bin_propagate_property_to_jitterbuffer (rtpbin,
+ "rfc7273-sync", value);
+ break;
+ case PROP_MAX_STREAMS:
+ rtpbin->max_streams = g_value_get_uint (value);
+ break;
+ case PROP_MAX_TS_OFFSET_ADJUSTMENT:
+ rtpbin->max_ts_offset_adjustment = g_value_get_uint64 (value);
+ gst_rtp_bin_propagate_property_to_jitterbuffer (rtpbin,
+ "max-ts-offset-adjustment", value);
+ break;
+ case PROP_MAX_TS_OFFSET:
+ rtpbin->max_ts_offset = g_value_get_int64 (value);
+ rtpbin->max_ts_offset_is_set = TRUE;
+ break;
+ case PROP_FEC_DECODERS:
+ gst_rtp_bin_set_fec_decoders_struct (rtpbin, g_value_get_boxed (value));
+ break;
+ case PROP_FEC_ENCODERS:
+ gst_rtp_bin_set_fec_encoders_struct (rtpbin, g_value_get_boxed (value));
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+ }
+
+ static void
+ gst_rtp_bin_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec)
+ {
+ GstRtpBin *rtpbin;
+
+ rtpbin = GST_RTP_BIN (object);
+
+ switch (prop_id) {
++#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
++ case PROP_USE_RTSP_BUFFERING:
++ GST_RTP_BIN_LOCK (rtpbin);
++ g_value_set_boolean (value, rtpbin->use_rtsp_buffering);
++ GST_RTP_BIN_UNLOCK (rtpbin);
++ break;
++#endif
+ case PROP_LATENCY:
+ GST_RTP_BIN_LOCK (rtpbin);
+ g_value_set_uint (value, rtpbin->latency_ms);
+ GST_RTP_BIN_UNLOCK (rtpbin);
+ break;
+ case PROP_DROP_ON_LATENCY:
+ GST_RTP_BIN_LOCK (rtpbin);
+ g_value_set_boolean (value, rtpbin->drop_on_latency);
+ GST_RTP_BIN_UNLOCK (rtpbin);
+ break;
+ case PROP_SDES:
+ g_value_take_boxed (value, gst_rtp_bin_get_sdes_struct (rtpbin));
+ break;
+ case PROP_DO_LOST:
+ GST_RTP_BIN_LOCK (rtpbin);
+ g_value_set_boolean (value, rtpbin->do_lost);
+ GST_RTP_BIN_UNLOCK (rtpbin);
+ break;
+ case PROP_IGNORE_PT:
+ g_value_set_boolean (value, rtpbin->ignore_pt);
+ break;
+ case PROP_NTP_SYNC:
+ g_value_set_boolean (value, rtpbin->ntp_sync);
+ break;
+ case PROP_RTCP_SYNC:
+ g_value_set_enum (value, g_atomic_int_get (&rtpbin->rtcp_sync));
+ break;
+ case PROP_RTCP_SYNC_INTERVAL:
+ g_value_set_uint (value, rtpbin->rtcp_sync_interval);
+ break;
+ case PROP_AUTOREMOVE:
+ g_value_set_boolean (value, rtpbin->priv->autoremove);
+ break;
+ case PROP_BUFFER_MODE:
+ g_value_set_enum (value, rtpbin->buffer_mode);
+ break;
+ case PROP_USE_PIPELINE_CLOCK:
+ g_value_set_boolean (value, rtpbin->use_pipeline_clock);
+ break;
+ case PROP_DO_SYNC_EVENT:
+ g_value_set_boolean (value, rtpbin->send_sync_event);
+ break;
+ case PROP_DO_RETRANSMISSION:
+ GST_RTP_BIN_LOCK (rtpbin);
+ g_value_set_boolean (value, rtpbin->do_retransmission);
+ GST_RTP_BIN_UNLOCK (rtpbin);
+ break;
+ case PROP_RTP_PROFILE:
+ g_value_set_enum (value, rtpbin->rtp_profile);
+ break;
+ case PROP_NTP_TIME_SOURCE:
+ g_value_set_enum (value, rtpbin->ntp_time_source);
+ break;
+ case PROP_RTCP_SYNC_SEND_TIME:
+ g_value_set_boolean (value, rtpbin->rtcp_sync_send_time);
+ break;
+ case PROP_MAX_RTCP_RTP_TIME_DIFF:
+ GST_RTP_BIN_LOCK (rtpbin);
+ g_value_set_int (value, rtpbin->max_rtcp_rtp_time_diff);
+ GST_RTP_BIN_UNLOCK (rtpbin);
+ break;
+ case PROP_MAX_DROPOUT_TIME:
+ g_value_set_uint (value, rtpbin->max_dropout_time);
+ break;
+ case PROP_MAX_MISORDER_TIME:
+ g_value_set_uint (value, rtpbin->max_misorder_time);
+ break;
+ case PROP_RFC7273_SYNC:
+ g_value_set_boolean (value, rtpbin->rfc7273_sync);
+ break;
+ case PROP_MAX_STREAMS:
+ g_value_set_uint (value, rtpbin->max_streams);
+ break;
+ case PROP_MAX_TS_OFFSET_ADJUSTMENT:
+ g_value_set_uint64 (value, rtpbin->max_ts_offset_adjustment);
+ break;
+ case PROP_MAX_TS_OFFSET:
+ g_value_set_int64 (value, rtpbin->max_ts_offset);
+ break;
+ case PROP_FEC_DECODERS:
+ g_value_take_boxed (value, gst_rtp_bin_get_fec_decoders_struct (rtpbin));
+ break;
+ case PROP_FEC_ENCODERS:
+ g_value_take_boxed (value, gst_rtp_bin_get_fec_encoders_struct (rtpbin));
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+ }
+
+ static void
+ gst_rtp_bin_handle_message (GstBin * bin, GstMessage * message)
+ {
+ GstRtpBin *rtpbin;
+
+ rtpbin = GST_RTP_BIN (bin);
+
+ switch (GST_MESSAGE_TYPE (message)) {
+ case GST_MESSAGE_ELEMENT:
+ {
+ const GstStructure *s = gst_message_get_structure (message);
+
+ /* we change the structure name and add the session ID to it */
+ if (gst_structure_has_name (s, "application/x-rtp-source-sdes")) {
+ GstRtpBinSession *sess;
+
+ /* find the session we set it as object data */
+ sess = g_object_get_data (G_OBJECT (GST_MESSAGE_SRC (message)),
+ "GstRTPBin.session");
+
+ if (G_LIKELY (sess)) {
+ message = gst_message_make_writable (message);
+ s = gst_message_get_structure (message);
+ gst_structure_set ((GstStructure *) s, "session", G_TYPE_UINT,
+ sess->id, NULL);
+ }
+ }
+ GST_BIN_CLASS (parent_class)->handle_message (bin, message);
+ break;
+ }
+ case GST_MESSAGE_BUFFERING:
+ {
+ gint percent;
+ gint min_percent = 100;
+ GSList *sessions, *streams;
+ GstRtpBinStream *stream;
++#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
++ gboolean buffering_flag = FALSE, update_buffering_status = TRUE;
++#endif
+ gboolean change = FALSE, active = FALSE;
+ GstClockTime min_out_time;
+ GstBufferingMode mode;
+ gint avg_in, avg_out;
+ gint64 buffering_left;
+
+ gst_message_parse_buffering (message, &percent);
+ gst_message_parse_buffering_stats (message, &mode, &avg_in, &avg_out,
+ &buffering_left);
+
+ stream =
+ g_object_get_data (G_OBJECT (GST_MESSAGE_SRC (message)),
+ "GstRTPBin.stream");
+
+ GST_DEBUG_OBJECT (bin, "got percent %d from stream %p", percent, stream);
+
+ /* get the stream */
+ if (G_LIKELY (stream)) {
+ GST_RTP_BIN_LOCK (rtpbin);
+ /* fill in the percent */
+ stream->percent = percent;
+
+ /* calculate the min value for all streams */
+ for (sessions = rtpbin->sessions; sessions;
+ sessions = g_slist_next (sessions)) {
+ GstRtpBinSession *session = (GstRtpBinSession *) sessions->data;
+
+ GST_RTP_SESSION_LOCK (session);
+ if (session->streams) {
+ for (streams = session->streams; streams;
+ streams = g_slist_next (streams)) {
+ GstRtpBinStream *stream = (GstRtpBinStream *) streams->data;
+
++#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
++ if (rtpbin->use_rtsp_buffering &&
++ rtpbin->buffer_mode == RTP_JITTER_BUFFER_MODE_SLAVE) {
++ GstPad *temp_pad_src = NULL;
++ GstCaps *temp_caps_src = NULL;
++ GstStructure *caps_structure;
++ const gchar *caps_str_media = NULL;
++ temp_pad_src = gst_element_get_static_pad (stream->buffer, "src");
++ temp_caps_src = gst_pad_get_current_caps (temp_pad_src);
++ GST_DEBUG_OBJECT (bin,
++ "stream %p percent %d : temp_caps_src=%" GST_PTR_FORMAT,
++ stream, stream->percent, temp_caps_src);
++ if (temp_caps_src) {
++ caps_structure = gst_caps_get_structure (temp_caps_src, 0);
++ caps_str_media =
++ gst_structure_get_string (caps_structure, "media");
++ if (caps_str_media != NULL) {
++ if ((strcmp (caps_str_media, "video") != 0)
++ && (strcmp (caps_str_media, "audio") != 0)) {
++ GST_DEBUG_OBJECT (bin,
++ "Non Audio/Video Stream.. ignoring the same !!");
++ gst_caps_unref (temp_caps_src);
++ gst_object_unref (temp_pad_src);
++ continue;
++ } else if (stream->percent >= 100) {
++ /* Most of the time buffering icon displays in rtsp playback.
++ Optimizing the buffering updation code. Whenever any stream percentage
++ reaches 100 do not post buffering messages. */
++ if (stream->prev_percent < 100)
++ buffering_flag = TRUE;
++ else
++ update_buffering_status = FALSE;
++ }
++ }
++ gst_caps_unref (temp_caps_src);
++ }
++ gst_object_unref (temp_pad_src);
++ /* Updating prev stream percentage */
++ stream->prev_percent = stream->percent;
++ } else {
++ GST_DEBUG_OBJECT (bin, "stream %p percent %d", stream,
++ stream->percent);
++ }
++#else
+ GST_DEBUG_OBJECT (bin, "stream %p percent %d", stream,
+ stream->percent);
+
++#endif
+ /* find min percent */
+ if (min_percent > stream->percent)
+ min_percent = stream->percent;
+ }
+ } else {
+ GST_INFO_OBJECT (bin,
+ "session has no streams, setting min_percent to 0");
+ min_percent = 0;
+ }
+ GST_RTP_SESSION_UNLOCK (session);
+ }
+ GST_DEBUG_OBJECT (bin, "min percent %d", min_percent);
+
++#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
++ if (!(rtpbin->use_rtsp_buffering &&
++ rtpbin->buffer_mode == RTP_JITTER_BUFFER_MODE_SLAVE)) {
++#endif
+ if (rtpbin->buffering) {
+ if (min_percent == 100) {
+ rtpbin->buffering = FALSE;
+ active = TRUE;
+ change = TRUE;
+ }
+ } else {
+ if (min_percent < 100) {
+ /* pause the streams */
+ rtpbin->buffering = TRUE;
+ active = FALSE;
+ change = TRUE;
+ }
+ }
++#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
++ }
++#endif
+ GST_RTP_BIN_UNLOCK (rtpbin);
+
+ gst_message_unref (message);
+
++#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
++ if (rtpbin->use_rtsp_buffering &&
++ rtpbin->buffer_mode == RTP_JITTER_BUFFER_MODE_SLAVE) {
++ if (update_buffering_status == FALSE)
++ break;
++ if (buffering_flag) {
++ min_percent = 100;
++ GST_DEBUG_OBJECT (bin, "forcefully change min_percent to 100!!!");
++ }
++ }
++#endif
+ /* make a new buffering message with the min value */
+ message =
+ gst_message_new_buffering (GST_OBJECT_CAST (bin), min_percent);
+ gst_message_set_buffering_stats (message, mode, avg_in, avg_out,
+ buffering_left);
+
++#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
++ if (rtpbin->use_rtsp_buffering &&
++ rtpbin->buffer_mode == RTP_JITTER_BUFFER_MODE_SLAVE)
++ goto slave_buffering;
++#endif
+ if (G_UNLIKELY (change)) {
+ GstClock *clock;
+ guint64 running_time = 0;
+ guint64 offset = 0;
+
+ /* figure out the running time when we have a clock */
+ if (G_LIKELY ((clock =
+ gst_element_get_clock (GST_ELEMENT_CAST (bin))))) {
+ guint64 now, base_time;
+
+ now = gst_clock_get_time (clock);
+ base_time = gst_element_get_base_time (GST_ELEMENT_CAST (bin));
+ running_time = now - base_time;
+ gst_object_unref (clock);
+ }
+ GST_DEBUG_OBJECT (bin,
+ "running time now %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (running_time));
+
+ GST_RTP_BIN_LOCK (rtpbin);
+
+ /* when we reactivate, calculate the offsets so that all streams have
+ * an output time that is at least as big as the running_time */
+ offset = 0;
+ if (active) {
+ if (running_time > rtpbin->buffer_start) {
+ offset = running_time - rtpbin->buffer_start;
+ if (offset >= rtpbin->latency_ns)
+ offset -= rtpbin->latency_ns;
+ else
+ offset = 0;
+ }
+ }
+
+ /* pause all streams */
+ min_out_time = -1;
+ for (sessions = rtpbin->sessions; sessions;
+ sessions = g_slist_next (sessions)) {
+ GstRtpBinSession *session = (GstRtpBinSession *) sessions->data;
+
+ GST_RTP_SESSION_LOCK (session);
+ for (streams = session->streams; streams;
+ streams = g_slist_next (streams)) {
+ GstRtpBinStream *stream = (GstRtpBinStream *) streams->data;
+ GstElement *element = stream->buffer;
+ guint64 last_out = -1;
+
+ if (g_signal_lookup ("set-active", G_OBJECT_TYPE (element)) != 0) {
+ g_signal_emit_by_name (element, "set-active", active, offset,
+ &last_out);
+ }
+
+ if (!active) {
+ g_object_get (element, "percent", &stream->percent, NULL);
+
+ if (last_out == -1)
+ last_out = 0;
+ if (min_out_time == -1 || last_out < min_out_time)
+ min_out_time = last_out;
+ }
+
+ GST_DEBUG_OBJECT (bin,
+ "setting %p to %d, offset %" GST_TIME_FORMAT ", last %"
+ GST_TIME_FORMAT ", percent %d", element, active,
+ GST_TIME_ARGS (offset), GST_TIME_ARGS (last_out),
+ stream->percent);
+ }
+ GST_RTP_SESSION_UNLOCK (session);
+ }
+ GST_DEBUG_OBJECT (bin,
+ "min out time %" GST_TIME_FORMAT, GST_TIME_ARGS (min_out_time));
+
+ /* the buffer_start is the min out time of all paused jitterbuffers */
+ if (!active)
+ rtpbin->buffer_start = min_out_time;
+
+ GST_RTP_BIN_UNLOCK (rtpbin);
+ }
+ }
++#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
++slave_buffering:
++#endif
+ GST_BIN_CLASS (parent_class)->handle_message (bin, message);
+ break;
+ }
+ default:
+ {
+ GST_BIN_CLASS (parent_class)->handle_message (bin, message);
+ break;
+ }
+ }
+ }
+
+ static GstStateChangeReturn
+ gst_rtp_bin_change_state (GstElement * element, GstStateChange transition)
+ {
+ GstStateChangeReturn res;
+ GstRtpBin *rtpbin;
+ GstRtpBinPrivate *priv;
+
+ rtpbin = GST_RTP_BIN (element);
+ priv = rtpbin->priv;
+
+ switch (transition) {
+ case GST_STATE_CHANGE_NULL_TO_READY:
+ break;
+ case GST_STATE_CHANGE_READY_TO_PAUSED:
+ priv->last_ntpnstime = 0;
+ GST_LOG_OBJECT (rtpbin, "clearing shutdown flag");
+ g_atomic_int_set (&priv->shutdown, 0);
+ break;
+ case GST_STATE_CHANGE_PAUSED_TO_READY:
+ GST_LOG_OBJECT (rtpbin, "setting shutdown flag");
+ g_atomic_int_set (&priv->shutdown, 1);
+ /* wait for all callbacks to end by taking the lock. No new callbacks will
+ * be able to happen as we set the shutdown flag. */
+ GST_RTP_BIN_DYN_LOCK (rtpbin);
+ GST_LOG_OBJECT (rtpbin, "dynamic lock taken, we can continue shutdown");
+ GST_RTP_BIN_DYN_UNLOCK (rtpbin);
+ break;
+ default:
+ break;
+ }
+
+ res = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_PLAYING_TO_PAUSED:
+ break;
+ case GST_STATE_CHANGE_PAUSED_TO_READY:
+ break;
+ case GST_STATE_CHANGE_READY_TO_NULL:
+ break;
+ default:
+ break;
+ }
+ return res;
+ }
+
+ static GstElement *
+ session_request_element (GstRtpBinSession * session, guint signal)
+ {
+ GstElement *element = NULL;
+ GstRtpBin *bin = session->bin;
+
+ g_signal_emit (bin, gst_rtp_bin_signals[signal], 0, session->id, &element);
+
+ if (element) {
+ if (!bin_manage_element (bin, element))
+ goto manage_failed;
+ session->elements = g_slist_prepend (session->elements, element);
+ }
+ return element;
+
+ /* ERRORS */
+ manage_failed:
+ {
+ GST_WARNING_OBJECT (bin, "unable to manage element");
+ gst_object_unref (element);
+ return NULL;
+ }
+ }
+
+ static gboolean
+ copy_sticky_events (GstPad * pad, GstEvent ** event, gpointer user_data)
+ {
+ GstPad *gpad = GST_PAD_CAST (user_data);
+
+ GST_DEBUG_OBJECT (gpad, "store sticky event %" GST_PTR_FORMAT, *event);
+ gst_pad_store_sticky_event (gpad, *event);
+
+ return TRUE;
+ }
+
+ static gboolean
+ ensure_fec_decoder (GstRtpBin * rtpbin, GstRtpBinSession * session)
+ {
+ const gchar *factory;
+ gchar *sess_id_str;
+
+ if (session->fec_decoder)
+ goto done;
+
+ sess_id_str = g_strdup_printf ("%u", session->id);
+ factory = gst_structure_get_string (rtpbin->fec_decoders, sess_id_str);
+ g_free (sess_id_str);
+
+ /* First try the property */
+ if (factory) {
+ GError *err = NULL;
+
+ session->fec_decoder =
+ gst_parse_bin_from_description_full (factory, TRUE, NULL,
+ GST_PARSE_FLAG_NO_SINGLE_ELEMENT_BINS | GST_PARSE_FLAG_FATAL_ERRORS,
+ &err);
+ if (!session->fec_decoder) {
+ GST_ERROR_OBJECT (rtpbin, "Failed to build decoder from factory: %s",
+ err->message);
+ }
+
+ bin_manage_element (session->bin, session->fec_decoder);
+ session->elements =
+ g_slist_prepend (session->elements, session->fec_decoder);
+ GST_INFO_OBJECT (rtpbin, "Built FEC decoder: %" GST_PTR_FORMAT
+ " for session %u", session->fec_decoder, session->id);
+ }
+
+ /* Fallback to the signal */
+ if (!session->fec_decoder)
+ session->fec_decoder =
+ session_request_element (session, SIGNAL_REQUEST_FEC_DECODER);
+
+ done:
+ return session->fec_decoder != NULL;
+ }
+
+ static void
+ expose_recv_src_pad (GstRtpBin * rtpbin, GstPad * pad, GstRtpBinStream * stream,
+ guint8 pt)
+ {
+ GstElementClass *klass;
+ GstPadTemplate *templ;
+ gchar *padname;
+ GstPad *gpad;
+
+ gst_object_ref (pad);
+
+ if (stream->session->storage && !stream->session->fec_decoder) {
+ if (ensure_fec_decoder (rtpbin, stream->session)) {
+ GstElement *fec_decoder = stream->session->fec_decoder;
+ GstPad *sinkpad, *srcpad;
+ GstPadLinkReturn ret;
+
+ sinkpad = gst_element_get_static_pad (fec_decoder, "sink");
+
+ if (!sinkpad)
+ goto fec_decoder_sink_failed;
+
+ ret = gst_pad_link (pad, sinkpad);
+ gst_object_unref (sinkpad);
+
+ if (ret != GST_PAD_LINK_OK)
+ goto fec_decoder_link_failed;
+
+ srcpad = gst_element_get_static_pad (fec_decoder, "src");
+
+ if (!srcpad)
+ goto fec_decoder_src_failed;
+
+ gst_pad_sticky_events_foreach (pad, copy_sticky_events, srcpad);
+ gst_object_unref (pad);
+ pad = srcpad;
+ }
+ }
+
+ GST_RTP_BIN_SHUTDOWN_LOCK (rtpbin, shutdown);
+
+ /* ghost the pad to the parent */
+ klass = GST_ELEMENT_GET_CLASS (rtpbin);
+ templ = gst_element_class_get_pad_template (klass, "recv_rtp_src_%u_%u_%u");
+ padname = g_strdup_printf ("recv_rtp_src_%u_%u_%u",
+ stream->session->id, stream->ssrc, pt);
+ gpad = gst_ghost_pad_new_from_template (padname, pad, templ);
+ g_free (padname);
+ g_object_set_data (G_OBJECT (pad), "GstRTPBin.ghostpad", gpad);
+
+ gst_pad_set_active (gpad, TRUE);
+ GST_RTP_BIN_SHUTDOWN_UNLOCK (rtpbin);
+
+ gst_pad_sticky_events_foreach (pad, copy_sticky_events, gpad);
+ gst_element_add_pad (GST_ELEMENT_CAST (rtpbin), gpad);
+
+ done:
+ gst_object_unref (pad);
+
+ return;
+
+ shutdown:
+ {
+ GST_DEBUG ("ignoring, we are shutting down");
+ goto done;
+ }
+ fec_decoder_sink_failed:
+ {
+ g_warning ("rtpbin: failed to get fec encoder sink pad for session %u",
+ stream->session->id);
+ goto done;
+ }
+ fec_decoder_src_failed:
+ {
+ g_warning ("rtpbin: failed to get fec encoder src pad for session %u",
+ stream->session->id);
+ goto done;
+ }
+ fec_decoder_link_failed:
+ {
+ g_warning ("rtpbin: failed to link fec decoder for session %u",
+ stream->session->id);
+ goto done;
+ }
+ }
+
+ /* a new pad (SSRC) was created in @session. This signal is emitted from the
+ * payload demuxer. */
+ static void
+ new_payload_found (GstElement * element, guint pt, GstPad * pad,
+ GstRtpBinStream * stream)
+ {
+ GstRtpBin *rtpbin;
+
+ rtpbin = stream->bin;
+
+ GST_DEBUG_OBJECT (rtpbin, "new payload pad %u", pt);
+
+ expose_recv_src_pad (rtpbin, pad, stream, pt);
+ }
+
+ static void
+ payload_pad_removed (GstElement * element, GstPad * pad,
+ GstRtpBinStream * stream)
+ {
+ GstRtpBin *rtpbin;
+ GstPad *gpad;
+
+ rtpbin = stream->bin;
+
+ GST_DEBUG ("payload pad removed");
+
+ GST_RTP_BIN_DYN_LOCK (rtpbin);
+ if ((gpad = g_object_get_data (G_OBJECT (pad), "GstRTPBin.ghostpad"))) {
+ g_object_set_data (G_OBJECT (pad), "GstRTPBin.ghostpad", NULL);
+
+ gst_pad_set_active (gpad, FALSE);
+ gst_element_remove_pad (GST_ELEMENT_CAST (rtpbin), gpad);
+ }
+ GST_RTP_BIN_DYN_UNLOCK (rtpbin);
+ }
+
+ static GstCaps *
+ pt_map_requested (GstElement * element, guint pt, GstRtpBinSession * session)
+ {
+ GstRtpBin *rtpbin;
+ GstCaps *caps;
+
+ rtpbin = session->bin;
+
+ GST_DEBUG_OBJECT (rtpbin, "payload map requested for pt %u in session %u", pt,
+ session->id);
+
+ caps = get_pt_map (session, pt);
+ if (!caps)
+ goto no_caps;
+
+ return caps;
+
+ /* ERRORS */
+ no_caps:
+ {
+ GST_DEBUG_OBJECT (rtpbin, "could not get caps");
+ return NULL;
+ }
+ }
+
+ static GstCaps *
+ ptdemux_pt_map_requested (GstElement * element, guint pt,
+ GstRtpBinSession * session)
+ {
+ GstCaps *ret = pt_map_requested (element, pt, session);
+
+ if (ret && gst_caps_get_size (ret) == 1) {
+ const GstStructure *s = gst_caps_get_structure (ret, 0);
+ gboolean is_fec;
+
+ if (gst_structure_get_boolean (s, "is-fec", &is_fec) && is_fec) {
+ GValue v = G_VALUE_INIT;
+ GValue v2 = G_VALUE_INIT;
+
+ GST_INFO_OBJECT (session->bin, "Will ignore FEC pt %u in session %u", pt,
+ session->id);
+ g_value_init (&v, GST_TYPE_ARRAY);
+ g_value_init (&v2, G_TYPE_INT);
+ g_object_get_property (G_OBJECT (element), "ignored-payload-types", &v);
+ g_value_set_int (&v2, pt);
+ gst_value_array_append_value (&v, &v2);
+ g_value_unset (&v2);
+ g_object_set_property (G_OBJECT (element), "ignored-payload-types", &v);
+ g_value_unset (&v);
+ }
+ }
+
+ return ret;
+ }
+
+ static void
+ payload_type_change (GstElement * element, guint pt, GstRtpBinSession * session)
+ {
+ GST_DEBUG_OBJECT (session->bin,
+ "emitting signal for pt type changed to %u in session %u", pt,
+ session->id);
+
+ g_signal_emit (session->bin, gst_rtp_bin_signals[SIGNAL_PAYLOAD_TYPE_CHANGE],
+ 0, session->id, pt);
+ }
+
+ /* emitted when caps changed for the session */
+ static void
+ caps_changed (GstPad * pad, GParamSpec * pspec, GstRtpBinSession * session)
+ {
+ GstRtpBin *bin;
+ GstCaps *caps;
+ gint payload;
+ const GstStructure *s;
+
+ bin = session->bin;
+
+ g_object_get (pad, "caps", &caps, NULL);
+
+ if (caps == NULL)
+ return;
+
+ GST_DEBUG_OBJECT (bin, "got caps %" GST_PTR_FORMAT, caps);
+
+ s = gst_caps_get_structure (caps, 0);
+
+ /* get payload, finish when it's not there */
+ if (!gst_structure_get_int (s, "payload", &payload)) {
+ gst_caps_unref (caps);
+ return;
+ }
+
+ GST_RTP_SESSION_LOCK (session);
+ GST_DEBUG_OBJECT (bin, "insert caps for payload %d", payload);
+ g_hash_table_insert (session->ptmap, GINT_TO_POINTER (payload), caps);
+ GST_RTP_SESSION_UNLOCK (session);
+ }
+
+ /* a new pad (SSRC) was created in @session */
+ static void
+ new_ssrc_pad_found (GstElement * element, guint ssrc, GstPad * pad,
+ GstRtpBinSession * session)
+ {
+ GstRtpBin *rtpbin;
+ GstRtpBinStream *stream;
+ GstPad *sinkpad, *srcpad;
+ gchar *padname;
+
+ rtpbin = session->bin;
+
+ GST_DEBUG_OBJECT (rtpbin, "new SSRC pad %08x, %s:%s", ssrc,
+ GST_DEBUG_PAD_NAME (pad));
+
+ GST_RTP_BIN_SHUTDOWN_LOCK (rtpbin, shutdown);
+
+ GST_RTP_SESSION_LOCK (session);
+
+ /* create new stream */
+ stream = create_stream (session, ssrc);
+ if (!stream)
+ goto no_stream;
+
+ /* get pad and link */
+ GST_DEBUG_OBJECT (rtpbin, "linking jitterbuffer RTP");
+ padname = g_strdup_printf ("src_%u", ssrc);
+ srcpad = gst_element_get_static_pad (element, padname);
+ g_free (padname);
+
+ if (session->fec_decoder) {
+ sinkpad = gst_element_get_static_pad (session->fec_decoder, "sink");
+ gst_pad_link_full (srcpad, sinkpad, GST_PAD_LINK_CHECK_NOTHING);
+ gst_object_unref (sinkpad);
+ gst_object_unref (srcpad);
+ srcpad = gst_element_get_static_pad (session->fec_decoder, "src");
+ }
+
+ sinkpad = gst_element_get_static_pad (stream->buffer, "sink");
+ gst_pad_link_full (srcpad, sinkpad, GST_PAD_LINK_CHECK_NOTHING);
+ gst_object_unref (sinkpad);
+ gst_object_unref (srcpad);
+
+ sinkpad = gst_element_request_pad_simple (stream->buffer, "sink_rtcp");
+ if (sinkpad) {
+ GST_DEBUG_OBJECT (rtpbin, "linking jitterbuffer RTCP");
+ padname = g_strdup_printf ("rtcp_src_%u", ssrc);
+ srcpad = gst_element_get_static_pad (element, padname);
+ g_free (padname);
+ gst_pad_link_full (srcpad, sinkpad, GST_PAD_LINK_CHECK_NOTHING);
+ gst_object_unref (sinkpad);
+ gst_object_unref (srcpad);
+ }
+
+ if (g_signal_lookup ("handle-sync", G_OBJECT_TYPE (stream->buffer)) != 0) {
+ /* connect to the RTCP sync signal from the jitterbuffer */
+ GST_DEBUG_OBJECT (rtpbin, "connecting sync signal");
+ stream->buffer_handlesync_sig = g_signal_connect (stream->buffer,
+ "handle-sync", (GCallback) gst_rtp_bin_handle_sync, stream);
+ }
+
+ if (stream->demux) {
+ /* connect to the new-pad signal of the payload demuxer, this will expose the
+ * new pad by ghosting it. */
+ stream->demux_newpad_sig = g_signal_connect (stream->demux,
+ "new-payload-type", (GCallback) new_payload_found, stream);
+ stream->demux_padremoved_sig = g_signal_connect (stream->demux,
+ "pad-removed", (GCallback) payload_pad_removed, stream);
+
+ /* connect to the request-pt-map signal. This signal will be emitted by the
+ * demuxer so that it can apply a proper caps on the buffers for the
+ * depayloaders. */
+ stream->demux_ptreq_sig = g_signal_connect (stream->demux,
+ "request-pt-map", (GCallback) ptdemux_pt_map_requested, session);
+ /* connect to the signal so it can be forwarded. */
+ stream->demux_ptchange_sig = g_signal_connect (stream->demux,
+ "payload-type-change", (GCallback) payload_type_change, session);
+
+ GST_RTP_SESSION_UNLOCK (session);
+ GST_RTP_BIN_SHUTDOWN_UNLOCK (rtpbin);
+ } else {
+ /* add rtpjitterbuffer src pad to pads */
+ GstPad *pad;
+
+ pad = gst_element_get_static_pad (stream->buffer, "src");
+
+ GST_RTP_SESSION_UNLOCK (session);
+ GST_RTP_BIN_SHUTDOWN_UNLOCK (rtpbin);
+
+ expose_recv_src_pad (rtpbin, pad, stream, 255);
+
+ gst_object_unref (pad);
+ }
+
+ return;
+
+ /* ERRORS */
+ shutdown:
+ {
+ GST_DEBUG_OBJECT (rtpbin, "we are shutting down");
+ return;
+ }
+ no_stream:
+ {
+ GST_RTP_SESSION_UNLOCK (session);
+ GST_RTP_BIN_SHUTDOWN_UNLOCK (rtpbin);
+ GST_DEBUG_OBJECT (rtpbin, "could not create stream");
+ return;
+ }
+ }
+
+ static GstPad *
+ complete_session_sink (GstRtpBin * rtpbin, GstRtpBinSession * session)
+ {
+ guint sessid = session->id;
+ GstPad *recv_rtp_sink;
+ GstElement *decoder;
+
+ g_assert (!session->recv_rtp_sink);
+
+ /* get recv_rtp pad and store */
+ session->recv_rtp_sink =
+ gst_element_request_pad_simple (session->session, "recv_rtp_sink");
+ if (session->recv_rtp_sink == NULL)
+ goto pad_failed;
+
+ g_signal_connect (session->recv_rtp_sink, "notify::caps",
+ (GCallback) caps_changed, session);
+
+ GST_DEBUG_OBJECT (rtpbin, "requesting RTP decoder");
+ decoder = session_request_element (session, SIGNAL_REQUEST_RTP_DECODER);
+ if (decoder) {
+ GstPad *decsrc, *decsink;
+ GstPadLinkReturn ret;
+
+ GST_DEBUG_OBJECT (rtpbin, "linking RTP decoder");
+ decsink = gst_element_get_static_pad (decoder, "rtp_sink");
+ if (decsink == NULL)
+ goto dec_sink_failed;
+
+ recv_rtp_sink = decsink;
+
+ decsrc = gst_element_get_static_pad (decoder, "rtp_src");
+ if (decsrc == NULL)
+ goto dec_src_failed;
+
+ ret = gst_pad_link (decsrc, session->recv_rtp_sink);
+
+ gst_object_unref (decsrc);
+
+ if (ret != GST_PAD_LINK_OK)
+ goto dec_link_failed;
+
+ } else {
+ GST_DEBUG_OBJECT (rtpbin, "no RTP decoder given");
+ recv_rtp_sink = gst_object_ref (session->recv_rtp_sink);
+ }
+
+ return recv_rtp_sink;
+
+ /* ERRORS */
+ pad_failed:
+ {
+ g_warning ("rtpbin: failed to get session recv_rtp_sink pad");
+ return NULL;
+ }
+ dec_sink_failed:
+ {
+ g_warning ("rtpbin: failed to get decoder sink pad for session %u", sessid);
+ return NULL;
+ }
+ dec_src_failed:
+ {
+ g_warning ("rtpbin: failed to get decoder src pad for session %u", sessid);
+ gst_object_unref (recv_rtp_sink);
+ return NULL;
+ }
+ dec_link_failed:
+ {
+ g_warning ("rtpbin: failed to link rtp decoder for session %u", sessid);
+ gst_object_unref (recv_rtp_sink);
+ return NULL;
+ }
+ }
+
+ static void
+ complete_session_receiver (GstRtpBin * rtpbin, GstRtpBinSession * session,
+ guint sessid)
+ {
+ GstElement *aux;
+ GstPad *recv_rtp_src;
+
+ g_assert (!session->recv_rtp_src);
+
+ session->recv_rtp_src =
+ gst_element_get_static_pad (session->session, "recv_rtp_src");
+ if (session->recv_rtp_src == NULL)
+ goto pad_failed;
+
+ /* find out if we need AUX elements */
+ aux = session_request_element (session, SIGNAL_REQUEST_AUX_RECEIVER);
+ if (aux) {
+ gchar *pname;
+ GstPad *auxsink;
+ GstPadLinkReturn ret;
+
+ GST_DEBUG_OBJECT (rtpbin, "linking AUX receiver");
+
+ pname = g_strdup_printf ("sink_%u", sessid);
+ auxsink = gst_element_get_static_pad (aux, pname);
+ g_free (pname);
+ if (auxsink == NULL)
+ goto aux_sink_failed;
+
+ ret = gst_pad_link (session->recv_rtp_src, auxsink);
+ gst_object_unref (auxsink);
+ if (ret != GST_PAD_LINK_OK)
+ goto aux_link_failed;
+
+ /* this can be NULL when this AUX element is not to be linked any further */
+ pname = g_strdup_printf ("src_%u", sessid);
+ recv_rtp_src = gst_element_get_static_pad (aux, pname);
+ g_free (pname);
+ } else {
+ recv_rtp_src = gst_object_ref (session->recv_rtp_src);
+ }
+
+ /* Add a storage element if needed */
+ if (recv_rtp_src && session->storage) {
+ GstPadLinkReturn ret;
+ GstPad *sinkpad = gst_element_get_static_pad (session->storage, "sink");
+
+ ret = gst_pad_link (recv_rtp_src, sinkpad);
+
+ gst_object_unref (sinkpad);
+ gst_object_unref (recv_rtp_src);
+
+ if (ret != GST_PAD_LINK_OK)
+ goto storage_link_failed;
+
+ recv_rtp_src = gst_element_get_static_pad (session->storage, "src");
+ }
+
+ if (recv_rtp_src) {
+ GstPad *sinkdpad;
+
+ GST_DEBUG_OBJECT (rtpbin, "getting demuxer RTP sink pad");
+ sinkdpad = gst_element_get_static_pad (session->demux, "sink");
+ GST_DEBUG_OBJECT (rtpbin, "linking demuxer RTP sink pad");
+ gst_pad_link_full (recv_rtp_src, sinkdpad, GST_PAD_LINK_CHECK_NOTHING);
+ gst_object_unref (sinkdpad);
+ gst_object_unref (recv_rtp_src);
+
+ /* connect to the new-ssrc-pad signal of the SSRC demuxer */
+ session->demux_newpad_sig = g_signal_connect (session->demux,
+ "new-ssrc-pad", (GCallback) new_ssrc_pad_found, session);
+ session->demux_padremoved_sig = g_signal_connect (session->demux,
+ "removed-ssrc-pad", (GCallback) ssrc_demux_pad_removed, session);
+ }
+
+ return;
+
+ pad_failed:
+ {
+ g_warning ("rtpbin: failed to get session recv_rtp_src pad");
+ return;
+ }
+ aux_sink_failed:
+ {
+ g_warning ("rtpbin: failed to get AUX sink pad for session %u", sessid);
+ return;
+ }
+ aux_link_failed:
+ {
+ g_warning ("rtpbin: failed to link AUX pad to session %u", sessid);
+ return;
+ }
+ storage_link_failed:
+ {
+ g_warning ("rtpbin: failed to link storage");
+ return;
+ }
+ }
+
+ /* Create a pad for receiving RTP for the session in @name. Must be called with
+ * RTP_BIN_LOCK.
+ */
+ static GstPad *
+ create_recv_rtp (GstRtpBin * rtpbin, GstPadTemplate * templ, const gchar * name)
+ {
+ guint sessid;
+ GstRtpBinSession *session;
+ GstPad *recv_rtp_sink;
+
+ /* first get the session number */
+ if (name == NULL || sscanf (name, "recv_rtp_sink_%u", &sessid) != 1)
+ goto no_name;
+
+ GST_DEBUG_OBJECT (rtpbin, "finding session %u", sessid);
+
+ /* get or create session */
+ session = find_session_by_id (rtpbin, sessid);
+ if (!session) {
+ GST_DEBUG_OBJECT (rtpbin, "creating session %u", sessid);
+ /* create session now */
+ session = create_session (rtpbin, sessid);
+ if (session == NULL)
+ goto create_error;
+ }
+
+ /* check if pad was requested */
+ if (session->recv_rtp_sink_ghost != NULL)
+ return session->recv_rtp_sink_ghost;
+
+ /* setup the session sink pad */
+ recv_rtp_sink = complete_session_sink (rtpbin, session);
+ if (!recv_rtp_sink)
+ goto session_sink_failed;
+
+ GST_DEBUG_OBJECT (rtpbin, "ghosting session sink pad");
+ session->recv_rtp_sink_ghost =
+ gst_ghost_pad_new_from_template (name, recv_rtp_sink, templ);
+ gst_object_unref (recv_rtp_sink);
+ gst_pad_set_active (session->recv_rtp_sink_ghost, TRUE);
+ gst_element_add_pad (GST_ELEMENT_CAST (rtpbin), session->recv_rtp_sink_ghost);
+
+ complete_session_receiver (rtpbin, session, sessid);
+
+ return session->recv_rtp_sink_ghost;
+
+ /* ERRORS */
+ no_name:
+ {
+ g_warning ("rtpbin: cannot find session id for pad: %s",
+ GST_STR_NULL (name));
+ return NULL;
+ }
+ create_error:
+ {
+ /* create_session already warned */
+ return NULL;
+ }
+ session_sink_failed:
+ {
+ /* warning already done */
+ return NULL;
+ }
+ }
+
+ static void
+ remove_recv_rtp (GstRtpBin * rtpbin, GstRtpBinSession * session)
+ {
+ if (session->demux_newpad_sig) {
+ g_signal_handler_disconnect (session->demux, session->demux_newpad_sig);
+ session->demux_newpad_sig = 0;
+ }
+ if (session->demux_padremoved_sig) {
+ g_signal_handler_disconnect (session->demux, session->demux_padremoved_sig);
+ session->demux_padremoved_sig = 0;
+ }
+ if (session->recv_rtp_src) {
+ gst_object_unref (session->recv_rtp_src);
+ session->recv_rtp_src = NULL;
+ }
+ if (session->recv_rtp_sink) {
+ gst_element_release_request_pad (session->session, session->recv_rtp_sink);
+ gst_object_unref (session->recv_rtp_sink);
+ session->recv_rtp_sink = NULL;
+ }
+ if (session->recv_rtp_sink_ghost) {
+ gst_pad_set_active (session->recv_rtp_sink_ghost, FALSE);
+ gst_element_remove_pad (GST_ELEMENT_CAST (rtpbin),
+ session->recv_rtp_sink_ghost);
+ session->recv_rtp_sink_ghost = NULL;
+ }
+ }
+
+ static GstPad *
+ complete_session_fec (GstRtpBin * rtpbin, GstRtpBinSession * session,
+ guint fec_idx)
+ {
+ gchar *padname;
+ GstPad *ret;
+
+ if (!ensure_fec_decoder (rtpbin, session))
+ goto no_decoder;
+
+ GST_DEBUG_OBJECT (rtpbin, "getting FEC sink pad");
+ padname = g_strdup_printf ("fec_%u", fec_idx);
+ ret = gst_element_request_pad_simple (session->fec_decoder, padname);
+ g_free (padname);
+
+ if (ret == NULL)
+ goto pad_failed;
+
+ session->recv_fec_sinks = g_slist_prepend (session->recv_fec_sinks, ret);
+
+ return ret;
+
+ pad_failed:
+ {
+ g_warning ("rtpbin: failed to get decoder fec pad");
+ return NULL;
+ }
+ no_decoder:
+ {
+ g_warning ("rtpbin: failed to build FEC decoder for session %u",
+ session->id);
+ return NULL;
+ }
+ }
+
+ static GstPad *
+ complete_session_rtcp (GstRtpBin * rtpbin, GstRtpBinSession * session,
+ guint sessid)
+ {
+ GstElement *decoder;
+ GstPad *sinkdpad;
+ GstPad *decsink = NULL;
+
+ /* get recv_rtp pad and store */
+ GST_DEBUG_OBJECT (rtpbin, "getting RTCP sink pad");
+ session->recv_rtcp_sink =
+ gst_element_request_pad_simple (session->session, "recv_rtcp_sink");
+ if (session->recv_rtcp_sink == NULL)
+ goto pad_failed;
+
+ GST_DEBUG_OBJECT (rtpbin, "getting RTCP decoder");
+ decoder = session_request_element (session, SIGNAL_REQUEST_RTCP_DECODER);
+ if (decoder) {
+ GstPad *decsrc;
+ GstPadLinkReturn ret;
+
+ GST_DEBUG_OBJECT (rtpbin, "linking RTCP decoder");
+ decsink = gst_element_get_static_pad (decoder, "rtcp_sink");
+ decsrc = gst_element_get_static_pad (decoder, "rtcp_src");
+
+ if (decsink == NULL)
+ goto dec_sink_failed;
+
+ if (decsrc == NULL)
+ goto dec_src_failed;
+
+ ret = gst_pad_link (decsrc, session->recv_rtcp_sink);
+
+ gst_object_unref (decsrc);
+
+ if (ret != GST_PAD_LINK_OK)
+ goto dec_link_failed;
+ } else {
+ GST_DEBUG_OBJECT (rtpbin, "no RTCP decoder given");
+ decsink = gst_object_ref (session->recv_rtcp_sink);
+ }
+
+ /* get srcpad, link to SSRCDemux */
+ GST_DEBUG_OBJECT (rtpbin, "getting sync src pad");
+ session->sync_src = gst_element_get_static_pad (session->session, "sync_src");
+ if (session->sync_src == NULL)
+ goto src_pad_failed;
+
+ GST_DEBUG_OBJECT (rtpbin, "getting demuxer RTCP sink pad");
+ sinkdpad = gst_element_get_static_pad (session->demux, "rtcp_sink");
+ gst_pad_link_full (session->sync_src, sinkdpad, GST_PAD_LINK_CHECK_NOTHING);
+ gst_object_unref (sinkdpad);
+
+ return decsink;
+
+ pad_failed:
+ {
+ g_warning ("rtpbin: failed to get session rtcp_sink pad");
+ return NULL;
+ }
+ dec_sink_failed:
+ {
+ g_warning ("rtpbin: failed to get decoder sink pad for session %u", sessid);
+ return NULL;
+ }
+ dec_src_failed:
+ {
+ g_warning ("rtpbin: failed to get decoder src pad for session %u", sessid);
+ goto cleanup;
+ }
+ dec_link_failed:
+ {
+ g_warning ("rtpbin: failed to link rtcp decoder for session %u", sessid);
+ goto cleanup;
+ }
+ src_pad_failed:
+ {
+ g_warning ("rtpbin: failed to get session sync_src pad");
+ }
+
+ cleanup:
+ gst_object_unref (decsink);
+ return NULL;
+ }
+
+ /* Create a pad for receiving RTCP for the session in @name. Must be called with
+ * RTP_BIN_LOCK.
+ */
+ static GstPad *
+ create_recv_rtcp (GstRtpBin * rtpbin, GstPadTemplate * templ,
+ const gchar * name)
+ {
+ guint sessid;
+ GstRtpBinSession *session;
+ GstPad *decsink = NULL;
+
+ /* first get the session number */
+ if (name == NULL || sscanf (name, "recv_rtcp_sink_%u", &sessid) != 1)
+ goto no_name;
+
+ GST_DEBUG_OBJECT (rtpbin, "finding session %u", sessid);
+
+ /* get or create the session */
+ session = find_session_by_id (rtpbin, sessid);
+ if (!session) {
+ GST_DEBUG_OBJECT (rtpbin, "creating session %u", sessid);
+ /* create session now */
+ session = create_session (rtpbin, sessid);
+ if (session == NULL)
+ goto create_error;
+ }
+
+ /* check if pad was requested */
+ if (session->recv_rtcp_sink_ghost != NULL)
+ return session->recv_rtcp_sink_ghost;
+
+ decsink = complete_session_rtcp (rtpbin, session, sessid);
+ if (!decsink)
+ goto create_error;
+
+ session->recv_rtcp_sink_ghost =
+ gst_ghost_pad_new_from_template (name, decsink, templ);
+ gst_object_unref (decsink);
+ gst_pad_set_active (session->recv_rtcp_sink_ghost, TRUE);
+ gst_element_add_pad (GST_ELEMENT_CAST (rtpbin),
+ session->recv_rtcp_sink_ghost);
+
+ return session->recv_rtcp_sink_ghost;
+
+ /* ERRORS */
+ no_name:
+ {
+ g_warning ("rtpbin: cannot find session id for pad: %s",
+ GST_STR_NULL (name));
+ return NULL;
+ }
+ create_error:
+ {
+ /* create_session already warned */
+ return NULL;
+ }
+ }
+
+ static GstPad *
+ create_recv_fec (GstRtpBin * rtpbin, GstPadTemplate * templ, const gchar * name)
+ {
+ guint sessid, fec_idx;
+ GstRtpBinSession *session;
+ GstPad *decsink = NULL;
+ GstPad *ghost;
+
+ /* first get the session number */
+ if (name == NULL
+ || sscanf (name, "recv_fec_sink_%u_%u", &sessid, &fec_idx) != 2)
+ goto no_name;
+
+ if (fec_idx > 1)
+ goto invalid_idx;
+
+ GST_DEBUG_OBJECT (rtpbin, "finding session %u", sessid);
+
+ /* get or create the session */
+ session = find_session_by_id (rtpbin, sessid);
+ if (!session) {
+ GST_DEBUG_OBJECT (rtpbin, "creating session %u", sessid);
+ /* create session now */
+ session = create_session (rtpbin, sessid);
+ if (session == NULL)
+ goto create_error;
+ }
+
+ decsink = complete_session_fec (rtpbin, session, fec_idx);
+ if (!decsink)
+ goto create_error;
+
+ ghost = gst_ghost_pad_new_from_template (name, decsink, templ);
+ session->recv_fec_sink_ghosts =
+ g_slist_prepend (session->recv_fec_sink_ghosts, ghost);
+ gst_object_unref (decsink);
+ gst_pad_set_active (ghost, TRUE);
+ gst_element_add_pad (GST_ELEMENT_CAST (rtpbin), ghost);
+
+ return ghost;
+
+ /* ERRORS */
+ no_name:
+ {
+ g_warning ("rtpbin: cannot find session id for pad: %s",
+ GST_STR_NULL (name));
+ return NULL;
+ }
+ invalid_idx:
+ {
+ g_warning ("rtpbin: invalid FEC index: %s", GST_STR_NULL (name));
+ return NULL;
+ }
+ create_error:
+ {
+ /* create_session already warned */
+ return NULL;
+ }
+ }
+
+ static void
+ remove_recv_rtcp (GstRtpBin * rtpbin, GstRtpBinSession * session)
+ {
+ if (session->recv_rtcp_sink_ghost) {
+ gst_pad_set_active (session->recv_rtcp_sink_ghost, FALSE);
+ gst_element_remove_pad (GST_ELEMENT_CAST (rtpbin),
+ session->recv_rtcp_sink_ghost);
+ session->recv_rtcp_sink_ghost = NULL;
+ }
+ if (session->sync_src) {
+ /* releasing the request pad should also unref the sync pad */
+ gst_object_unref (session->sync_src);
+ session->sync_src = NULL;
+ }
+ if (session->recv_rtcp_sink) {
+ gst_element_release_request_pad (session->session, session->recv_rtcp_sink);
+ gst_object_unref (session->recv_rtcp_sink);
+ session->recv_rtcp_sink = NULL;
+ }
+ }
+
+ static void
+ remove_recv_fec_for_pad (GstRtpBin * rtpbin, GstRtpBinSession * session,
+ GstPad * ghost)
+ {
+ GSList *item;
+ GstPad *target;
+
+ target = gst_ghost_pad_get_target (GST_GHOST_PAD (ghost));
+
+ if (target) {
+ item = g_slist_find (session->recv_fec_sinks, target);
+ if (item) {
+ gst_element_release_request_pad (session->fec_decoder, item->data);
+ session->recv_fec_sinks =
+ g_slist_delete_link (session->recv_fec_sinks, item);
+ }
+ gst_object_unref (target);
+ }
+
+ item = g_slist_find (session->recv_fec_sink_ghosts, ghost);
+ if (item)
+ session->recv_fec_sink_ghosts =
+ g_slist_delete_link (session->recv_fec_sink_ghosts, item);
+
+ gst_pad_set_active (ghost, FALSE);
+ gst_element_remove_pad (GST_ELEMENT_CAST (rtpbin), ghost);
+ }
+
+ static void
+ remove_recv_fec (GstRtpBin * rtpbin, GstRtpBinSession * session)
+ {
+ GSList *copy;
+ GSList *tmp;
+
+ copy = g_slist_copy (session->recv_fec_sink_ghosts);
+
+ for (tmp = copy; tmp; tmp = tmp->next) {
+ remove_recv_fec_for_pad (rtpbin, session, (GstPad *) tmp->data);
+ }
+
+ g_slist_free (copy);
+ }
+
+ static gboolean
+ complete_session_src (GstRtpBin * rtpbin, GstRtpBinSession * session)
+ {
+ gchar *gname;
+ guint sessid = session->id;
+ GstPad *send_rtp_src;
+ GstElement *encoder;
+ GstElementClass *klass;
+ GstPadTemplate *templ;
+ gboolean ret = FALSE;
+
+ /* get srcpad */
+ send_rtp_src = gst_element_get_static_pad (session->session, "send_rtp_src");
+
+ if (send_rtp_src == NULL)
+ goto no_srcpad;
+
+ GST_DEBUG_OBJECT (rtpbin, "getting RTP encoder");
+ encoder = session_request_element (session, SIGNAL_REQUEST_RTP_ENCODER);
+ if (encoder) {
+ gchar *ename;
+ GstPad *encsrc, *encsink;
+ GstPadLinkReturn ret;
+
+ GST_DEBUG_OBJECT (rtpbin, "linking RTP encoder");
+ ename = g_strdup_printf ("rtp_src_%u", sessid);
+ encsrc = gst_element_get_static_pad (encoder, ename);
+ g_free (ename);
+
+ if (encsrc == NULL)
+ goto enc_src_failed;
+
+ ename = g_strdup_printf ("rtp_sink_%u", sessid);
+ encsink = gst_element_get_static_pad (encoder, ename);
+ g_free (ename);
+ if (encsink == NULL)
+ goto enc_sink_failed;
+
+ ret = gst_pad_link (send_rtp_src, encsink);
+ gst_object_unref (encsink);
+ gst_object_unref (send_rtp_src);
+
+ send_rtp_src = encsrc;
+
+ if (ret != GST_PAD_LINK_OK)
+ goto enc_link_failed;
+ } else {
+ GST_DEBUG_OBJECT (rtpbin, "no RTP encoder given");
+ }
+
+ /* ghost the new source pad */
+ klass = GST_ELEMENT_GET_CLASS (rtpbin);
+ gname = g_strdup_printf ("send_rtp_src_%u", sessid);
+ templ = gst_element_class_get_pad_template (klass, "send_rtp_src_%u");
+ session->send_rtp_src_ghost =
+ gst_ghost_pad_new_from_template (gname, send_rtp_src, templ);
+ gst_pad_set_active (session->send_rtp_src_ghost, TRUE);
+ gst_pad_sticky_events_foreach (send_rtp_src, copy_sticky_events,
+ session->send_rtp_src_ghost);
+ gst_element_add_pad (GST_ELEMENT_CAST (rtpbin), session->send_rtp_src_ghost);
+ g_free (gname);
+
+ ret = TRUE;
+
+ done:
+ if (send_rtp_src)
+ gst_object_unref (send_rtp_src);
+
+ return ret;
+
+ /* ERRORS */
+ no_srcpad:
+ {
+ g_warning ("rtpbin: failed to get rtp source pad for session %u", sessid);
+ goto done;
+ }
+ enc_src_failed:
+ {
+ g_warning ("rtpbin: failed to get %" GST_PTR_FORMAT
+ " src pad for session %u", encoder, sessid);
+ goto done;
+ }
+ enc_sink_failed:
+ {
+ g_warning ("rtpbin: failed to get %" GST_PTR_FORMAT
+ " sink pad for session %u", encoder, sessid);
+ goto done;
+ }
+ enc_link_failed:
+ {
+ g_warning ("rtpbin: failed to link %" GST_PTR_FORMAT " for session %u",
+ encoder, sessid);
+ goto done;
+ }
+ }
+
+ static gboolean
+ setup_aux_sender_fold (const GValue * item, GValue * result, gpointer user_data)
+ {
+ GstPad *pad;
+ gchar *name;
+ guint sessid;
+ GstRtpBinSession *session = user_data, *newsess;
+ GstRtpBin *rtpbin = session->bin;
+ GstPadLinkReturn ret;
+
+ pad = g_value_get_object (item);
+ name = gst_pad_get_name (pad);
+
+ if (name == NULL || sscanf (name, "src_%u", &sessid) != 1)
+ goto no_name;
+
+ g_free (name);
+
+ newsess = find_session_by_id (rtpbin, sessid);
+ if (newsess == NULL) {
+ /* create new session */
+ newsess = create_session (rtpbin, sessid);
+ if (newsess == NULL)
+ goto create_error;
+ } else if (newsess->send_rtp_sink != NULL)
+ goto existing_session;
+
+ /* get send_rtp pad and store */
+ newsess->send_rtp_sink =
+ gst_element_request_pad_simple (newsess->session, "send_rtp_sink");
+ if (newsess->send_rtp_sink == NULL)
+ goto pad_failed;
+
+ ret = gst_pad_link (pad, newsess->send_rtp_sink);
+ if (ret != GST_PAD_LINK_OK)
+ goto aux_link_failed;
+
+ if (!complete_session_src (rtpbin, newsess))
+ goto session_src_failed;
+
+ return TRUE;
+
+ /* ERRORS */
+ no_name:
+ {
+ GST_WARNING ("ignoring invalid pad name %s", GST_STR_NULL (name));
+ g_free (name);
+ return TRUE;
+ }
+ create_error:
+ {
+ /* create_session already warned */
+ return FALSE;
+ }
+ existing_session:
+ {
+ GST_DEBUG_OBJECT (rtpbin,
+ "skipping src_%i setup, since it is already configured.", sessid);
+ return TRUE;
+ }
+ pad_failed:
+ {
+ g_warning ("rtpbin: failed to get session pad for session %u", sessid);
+ return FALSE;
+ }
+ aux_link_failed:
+ {
+ g_warning ("rtpbin: failed to link AUX for session %u", sessid);
+ return FALSE;
+ }
+ session_src_failed:
+ {
+ g_warning ("rtpbin: failed to complete AUX for session %u", sessid);
+ return FALSE;
+ }
+ }
+
+ static gboolean
+ setup_aux_sender (GstRtpBin * rtpbin, GstRtpBinSession * session,
+ GstElement * aux)
+ {
+ GstIterator *it;
+ GValue result = { 0, };
+ GstIteratorResult res;
+
+ it = gst_element_iterate_src_pads (aux);
+ res = gst_iterator_fold (it, setup_aux_sender_fold, &result, session);
+ gst_iterator_free (it);
+
+ return res == GST_ITERATOR_DONE;
+ }
+
+ static void
+ fec_encoder_pad_added_cb (GstElement * encoder, GstPad * pad,
+ GstRtpBinSession * session)
+ {
+ GstElementClass *klass;
+ gchar *gname;
+ GstPadTemplate *templ;
+ guint fec_idx;
+ GstPad *ghost;
+
+ if (sscanf (GST_PAD_NAME (pad), "fec_%u", &fec_idx) != 1) {
+ GST_WARNING_OBJECT (session->bin,
+ "FEC encoder added pad with name not matching fec_%%u (%s)",
+ GST_PAD_NAME (pad));
+ goto done;
+ }
+
+ GST_INFO_OBJECT (session->bin, "FEC encoder for session %u exposed new pad",
+ session->id);
+
+ GST_RTP_BIN_LOCK (session->bin);
+ klass = GST_ELEMENT_GET_CLASS (session->bin);
+ gname = g_strdup_printf ("send_fec_src_%u_%u", session->id, fec_idx);
+ templ = gst_element_class_get_pad_template (klass, "send_fec_src_%u_%u");
+ ghost = gst_ghost_pad_new_from_template (gname, pad, templ);
+ session->send_fec_src_ghosts =
+ g_slist_prepend (session->send_fec_src_ghosts, ghost);
+ gst_pad_set_active (ghost, TRUE);
+ gst_pad_sticky_events_foreach (pad, copy_sticky_events, ghost);
+ gst_element_add_pad (GST_ELEMENT (session->bin), ghost);
+ g_free (gname);
+ GST_RTP_BIN_UNLOCK (session->bin);
+
+ done:
+ return;
+ }
+
+ static GstElement *
+ request_fec_encoder (GstRtpBin * rtpbin, GstRtpBinSession * session,
+ guint sessid)
+ {
+ GstElement *ret = NULL;
+ const gchar *factory;
+ gchar *sess_id_str;
+
+ sess_id_str = g_strdup_printf ("%u", sessid);
+ factory = gst_structure_get_string (rtpbin->fec_encoders, sess_id_str);
+ g_free (sess_id_str);
+
+ /* First try the property */
+ if (factory) {
+ GError *err = NULL;
+
+ ret =
+ gst_parse_bin_from_description_full (factory, TRUE, NULL,
+ GST_PARSE_FLAG_NO_SINGLE_ELEMENT_BINS | GST_PARSE_FLAG_FATAL_ERRORS,
+ &err);
+ if (!ret) {
+ GST_ERROR_OBJECT (rtpbin, "Failed to build encoder from factory: %s",
+ err->message);
+ goto done;
+ }
+
+ bin_manage_element (session->bin, ret);
+ session->elements = g_slist_prepend (session->elements, ret);
+ GST_INFO_OBJECT (rtpbin, "Built FEC encoder: %" GST_PTR_FORMAT
+ " for session %u", ret, sessid);
+ }
+
+ /* Fallback to the signal */
+ if (!ret)
+ ret = session_request_element (session, SIGNAL_REQUEST_FEC_ENCODER);
+
+ if (ret) {
+ g_signal_connect (ret, "pad-added", G_CALLBACK (fec_encoder_pad_added_cb),
+ session);
+ }
+
+ done:
+ return ret;
+ }
+
+ /* Create a pad for sending RTP for the session in @name. Must be called with
+ * RTP_BIN_LOCK.
+ */
+ static GstPad *
+ create_send_rtp (GstRtpBin * rtpbin, GstPadTemplate * templ, const gchar * name)
+ {
+ gchar *pname;
+ guint sessid;
+ GstPad *send_rtp_sink;
+ GstElement *aux;
+ GstElement *encoder;
+ GstElement *prev = NULL;
+ GstRtpBinSession *session;
+
+ /* first get the session number */
+ if (name == NULL || sscanf (name, "send_rtp_sink_%u", &sessid) != 1)
+ goto no_name;
+
+ /* get or create session */
+ session = find_session_by_id (rtpbin, sessid);
+ if (!session) {
+ /* create session now */
+ session = create_session (rtpbin, sessid);
+ if (session == NULL)
+ goto create_error;
+ }
+
+ /* check if pad was requested */
+ if (session->send_rtp_sink_ghost != NULL)
+ return session->send_rtp_sink_ghost;
+
+ /* check if we are already using this session as a sender */
+ if (session->send_rtp_sink != NULL)
+ goto existing_session;
+
+ encoder = request_fec_encoder (rtpbin, session, sessid);
+
+ if (encoder) {
+ GST_DEBUG_OBJECT (rtpbin, "Linking FEC encoder");
+
+ send_rtp_sink = gst_element_get_static_pad (encoder, "sink");
+
+ if (!send_rtp_sink)
+ goto enc_sink_failed;
+
+ prev = encoder;
+ }
+
+ GST_DEBUG_OBJECT (rtpbin, "getting RTP AUX sender");
+ aux = session_request_element (session, SIGNAL_REQUEST_AUX_SENDER);
+ if (aux) {
+ GstPad *sinkpad;
+ GST_DEBUG_OBJECT (rtpbin, "linking AUX sender");
+ if (!setup_aux_sender (rtpbin, session, aux))
+ goto aux_session_failed;
+
+ pname = g_strdup_printf ("sink_%u", sessid);
+ sinkpad = gst_element_get_static_pad (aux, pname);
+ g_free (pname);
+
+ if (sinkpad == NULL)
+ goto aux_sink_failed;
+
+ if (!prev) {
+ send_rtp_sink = sinkpad;
+ } else {
+ GstPad *srcpad = gst_element_get_static_pad (prev, "src");
+ GstPadLinkReturn ret;
+
+ ret = gst_pad_link (srcpad, sinkpad);
+ gst_object_unref (srcpad);
+ if (ret != GST_PAD_LINK_OK) {
+ goto aux_link_failed;
+ }
+ }
+ prev = aux;
+ } else {
+ /* get send_rtp pad and store */
+ session->send_rtp_sink =
+ gst_element_request_pad_simple (session->session, "send_rtp_sink");
+ if (session->send_rtp_sink == NULL)
+ goto pad_failed;
+
+ if (!complete_session_src (rtpbin, session))
+ goto session_src_failed;
+
+ if (!prev) {
+ send_rtp_sink = gst_object_ref (session->send_rtp_sink);
+ } else {
+ GstPad *srcpad = gst_element_get_static_pad (prev, "src");
+ GstPadLinkReturn ret;
+
+ ret = gst_pad_link (srcpad, session->send_rtp_sink);
+ gst_object_unref (srcpad);
+ if (ret != GST_PAD_LINK_OK)
+ goto session_link_failed;
+ }
+ }
+
+ session->send_rtp_sink_ghost =
+ gst_ghost_pad_new_from_template (name, send_rtp_sink, templ);
+ gst_object_unref (send_rtp_sink);
+ gst_pad_set_active (session->send_rtp_sink_ghost, TRUE);
+ gst_element_add_pad (GST_ELEMENT_CAST (rtpbin), session->send_rtp_sink_ghost);
+
+ return session->send_rtp_sink_ghost;
+
+ /* ERRORS */
+ no_name:
+ {
+ g_warning ("rtpbin: cannot find session id for pad: %s",
+ GST_STR_NULL (name));
+ return NULL;
+ }
+ create_error:
+ {
+ /* create_session already warned */
+ return NULL;
+ }
+ existing_session:
+ {
+ g_warning ("rtpbin: session %u is already in use", sessid);
+ return NULL;
+ }
+ aux_session_failed:
+ {
+ g_warning ("rtpbin: failed to get AUX sink pad for session %u", sessid);
+ return NULL;
+ }
+ aux_sink_failed:
+ {
+ g_warning ("rtpbin: failed to get AUX sink pad for session %u", sessid);
+ return NULL;
+ }
+ aux_link_failed:
+ {
+ g_warning ("rtpbin: failed to link %" GST_PTR_FORMAT " for session %u",
+ aux, sessid);
+ return NULL;
+ }
+ pad_failed:
+ {
+ g_warning ("rtpbin: failed to get session pad for session %u", sessid);
+ return NULL;
+ }
+ session_src_failed:
+ {
+ g_warning ("rtpbin: failed to setup source pads for session %u", sessid);
+ return NULL;
+ }
+ session_link_failed:
+ {
+ g_warning ("rtpbin: failed to link %" GST_PTR_FORMAT " for session %u",
+ session, sessid);
+ return NULL;
+ }
+ enc_sink_failed:
+ {
+ g_warning ("rtpbin: failed to get %" GST_PTR_FORMAT
+ " sink pad for session %u", encoder, sessid);
+ return NULL;
+ }
+ }
+
+ static void
+ remove_send_rtp (GstRtpBin * rtpbin, GstRtpBinSession * session)
+ {
+ if (session->send_rtp_src_ghost) {
+ gst_pad_set_active (session->send_rtp_src_ghost, FALSE);
+ gst_element_remove_pad (GST_ELEMENT_CAST (rtpbin),
+ session->send_rtp_src_ghost);
+ session->send_rtp_src_ghost = NULL;
+ }
+ if (session->send_rtp_sink) {
+ gst_element_release_request_pad (GST_ELEMENT_CAST (session->session),
+ session->send_rtp_sink);
+ gst_object_unref (session->send_rtp_sink);
+ session->send_rtp_sink = NULL;
+ }
+ if (session->send_rtp_sink_ghost) {
+ gst_pad_set_active (session->send_rtp_sink_ghost, FALSE);
+ gst_element_remove_pad (GST_ELEMENT_CAST (rtpbin),
+ session->send_rtp_sink_ghost);
+ session->send_rtp_sink_ghost = NULL;
+ }
+ }
+
+ static void
+ remove_send_fec (GstRtpBin * rtpbin, GstRtpBinSession * session)
+ {
+ GSList *tmp;
+
+ for (tmp = session->send_fec_src_ghosts; tmp; tmp = tmp->next) {
+ GstPad *ghost = GST_PAD (tmp->data);
+ gst_pad_set_active (ghost, FALSE);
+ gst_element_remove_pad (GST_ELEMENT_CAST (rtpbin), ghost);
+ }
+
+ g_slist_free (session->send_fec_src_ghosts);
+ session->send_fec_src_ghosts = NULL;
+ }
+
+ /* Create a pad for sending RTCP for the session in @name. Must be called with
+ * RTP_BIN_LOCK.
+ */
+ static GstPad *
+ create_send_rtcp (GstRtpBin * rtpbin, GstPadTemplate * templ,
+ const gchar * name)
+ {
+ guint sessid;
+ GstPad *encsrc;
+ GstElement *encoder;
+ GstRtpBinSession *session;
+
+ /* first get the session number */
+ if (name == NULL || sscanf (name, "send_rtcp_src_%u", &sessid) != 1)
+ goto no_name;
+
+ /* get or create session */
+ session = find_session_by_id (rtpbin, sessid);
+ if (!session) {
+ GST_DEBUG_OBJECT (rtpbin, "creating session %u", sessid);
+ /* create session now */
+ session = create_session (rtpbin, sessid);
+ if (session == NULL)
+ goto create_error;
+ }
+
+ /* check if pad was requested */
+ if (session->send_rtcp_src_ghost != NULL)
+ return session->send_rtcp_src_ghost;
+
+ /* get rtcp_src pad and store */
+ session->send_rtcp_src =
+ gst_element_request_pad_simple (session->session, "send_rtcp_src");
+ if (session->send_rtcp_src == NULL)
+ goto pad_failed;
+
+ GST_DEBUG_OBJECT (rtpbin, "getting RTCP encoder");
+ encoder = session_request_element (session, SIGNAL_REQUEST_RTCP_ENCODER);
+ if (encoder) {
+ gchar *ename;
+ GstPad *encsink;
+ GstPadLinkReturn ret;
+
+ GST_DEBUG_OBJECT (rtpbin, "linking RTCP encoder");
+
+ ename = g_strdup_printf ("rtcp_src_%u", sessid);
+ encsrc = gst_element_get_static_pad (encoder, ename);
+ g_free (ename);
+ if (encsrc == NULL)
+ goto enc_src_failed;
+
+ ename = g_strdup_printf ("rtcp_sink_%u", sessid);
+ encsink = gst_element_get_static_pad (encoder, ename);
+ g_free (ename);
+ if (encsink == NULL)
+ goto enc_sink_failed;
+
+ ret = gst_pad_link (session->send_rtcp_src, encsink);
+ gst_object_unref (encsink);
+
+ if (ret != GST_PAD_LINK_OK)
+ goto enc_link_failed;
+ } else {
+ GST_DEBUG_OBJECT (rtpbin, "no RTCP encoder given");
+ encsrc = gst_object_ref (session->send_rtcp_src);
+ }
+
+ session->send_rtcp_src_ghost =
+ gst_ghost_pad_new_from_template (name, encsrc, templ);
+ gst_object_unref (encsrc);
+ gst_pad_set_active (session->send_rtcp_src_ghost, TRUE);
+ gst_element_add_pad (GST_ELEMENT_CAST (rtpbin), session->send_rtcp_src_ghost);
+
+ return session->send_rtcp_src_ghost;
+
+ /* ERRORS */
+ no_name:
+ {
+ g_warning ("rtpbin: cannot find session id for pad: %s",
+ GST_STR_NULL (name));
+ return NULL;
+ }
+ create_error:
+ {
+ /* create_session already warned */
+ return NULL;
+ }
+ pad_failed:
+ {
+ g_warning ("rtpbin: failed to get rtcp pad for session %u", sessid);
+ return NULL;
+ }
+ enc_src_failed:
+ {
+ g_warning ("rtpbin: failed to get encoder src pad for session %u", sessid);
+ return NULL;
+ }
+ enc_sink_failed:
+ {
+ g_warning ("rtpbin: failed to get encoder sink pad for session %u", sessid);
+ gst_object_unref (encsrc);
+ return NULL;
+ }
+ enc_link_failed:
+ {
+ g_warning ("rtpbin: failed to link rtcp encoder for session %u", sessid);
+ gst_object_unref (encsrc);
+ return NULL;
+ }
+ }
+
+ static void
+ remove_rtcp (GstRtpBin * rtpbin, GstRtpBinSession * session)
+ {
+ if (session->send_rtcp_src_ghost) {
+ gst_pad_set_active (session->send_rtcp_src_ghost, FALSE);
+ gst_element_remove_pad (GST_ELEMENT_CAST (rtpbin),
+ session->send_rtcp_src_ghost);
+ session->send_rtcp_src_ghost = NULL;
+ }
+ if (session->send_rtcp_src) {
+ gst_element_release_request_pad (session->session, session->send_rtcp_src);
+ gst_object_unref (session->send_rtcp_src);
+ session->send_rtcp_src = NULL;
+ }
+ }
+
+ /* If the requested name is NULL we should create a name with
+ * the session number assuming we want the lowest possible session
+ * with a free pad like the template */
+ static gchar *
+ gst_rtp_bin_get_free_pad_name (GstElement * element, GstPadTemplate * templ)
+ {
+ gboolean name_found = FALSE;
+ gint session = 0;
+ GstIterator *pad_it = NULL;
+ gchar *pad_name = NULL;
+ GValue data = { 0, };
+
+ GST_DEBUG_OBJECT (element, "find a free pad name for template");
+ while (!name_found) {
+ gboolean done = FALSE;
+
+ g_free (pad_name);
+ pad_name = g_strdup_printf (templ->name_template, session++);
+ pad_it = gst_element_iterate_pads (GST_ELEMENT (element));
+ name_found = TRUE;
+ while (!done) {
+ switch (gst_iterator_next (pad_it, &data)) {
+ case GST_ITERATOR_OK:
+ {
+ GstPad *pad;
+ gchar *name;
+
+ pad = g_value_get_object (&data);
+ name = gst_pad_get_name (pad);
+
+ if (strcmp (name, pad_name) == 0) {
+ done = TRUE;
+ name_found = FALSE;
+ }
+ g_free (name);
+ g_value_reset (&data);
+ break;
+ }
+ case GST_ITERATOR_ERROR:
+ case GST_ITERATOR_RESYNC:
+ /* restart iteration */
+ done = TRUE;
+ name_found = FALSE;
+ session = 0;
+ break;
+ case GST_ITERATOR_DONE:
+ done = TRUE;
+ break;
+ }
+ }
+ g_value_unset (&data);
+ gst_iterator_free (pad_it);
+ }
+
+ GST_DEBUG_OBJECT (element, "free pad name found: '%s'", pad_name);
+ return pad_name;
+ }
+
+ /*
+ */
+ static GstPad *
+ gst_rtp_bin_request_new_pad (GstElement * element,
+ GstPadTemplate * templ, const gchar * name, const GstCaps * caps)
+ {
+ GstRtpBin *rtpbin;
+ GstElementClass *klass;
+ GstPad *result;
+
+ gchar *pad_name = NULL;
+
+ g_return_val_if_fail (templ != NULL, NULL);
+ g_return_val_if_fail (GST_IS_RTP_BIN (element), NULL);
+
+ rtpbin = GST_RTP_BIN (element);
+ klass = GST_ELEMENT_GET_CLASS (element);
+
+ GST_RTP_BIN_LOCK (rtpbin);
+
+ if (name == NULL) {
+ /* use a free pad name */
+ pad_name = gst_rtp_bin_get_free_pad_name (element, templ);
+ } else {
+ /* use the provided name */
+ pad_name = g_strdup (name);
+ }
+
+ GST_DEBUG_OBJECT (rtpbin, "Trying to request a pad with name %s", pad_name);
+
+ /* figure out the template */
+ if (templ == gst_element_class_get_pad_template (klass, "recv_rtp_sink_%u")) {
+ result = create_recv_rtp (rtpbin, templ, pad_name);
+ } else if (templ == gst_element_class_get_pad_template (klass,
+ "recv_rtcp_sink_%u")) {
+ result = create_recv_rtcp (rtpbin, templ, pad_name);
+ } else if (templ == gst_element_class_get_pad_template (klass,
+ "send_rtp_sink_%u")) {
+ result = create_send_rtp (rtpbin, templ, pad_name);
+ } else if (templ == gst_element_class_get_pad_template (klass,
+ "send_rtcp_src_%u")) {
+ result = create_send_rtcp (rtpbin, templ, pad_name);
+ } else if (templ == gst_element_class_get_pad_template (klass,
+ "recv_fec_sink_%u_%u")) {
+ result = create_recv_fec (rtpbin, templ, pad_name);
+ } else
+ goto wrong_template;
+
+ g_free (pad_name);
+ GST_RTP_BIN_UNLOCK (rtpbin);
+
+ return result;
+
+ /* ERRORS */
+ wrong_template:
+ {
+ g_free (pad_name);
+ GST_RTP_BIN_UNLOCK (rtpbin);
+ g_warning ("rtpbin: this is not our template");
+ return NULL;
+ }
+ }
+
+ static void
+ gst_rtp_bin_release_pad (GstElement * element, GstPad * pad)
+ {
+ GstRtpBinSession *session;
+ GstRtpBin *rtpbin;
+
+ g_return_if_fail (GST_IS_GHOST_PAD (pad));
+ g_return_if_fail (GST_IS_RTP_BIN (element));
+
+ rtpbin = GST_RTP_BIN (element);
+
+ GST_RTP_BIN_LOCK (rtpbin);
+ GST_DEBUG_OBJECT (rtpbin, "Trying to release pad %s:%s",
+ GST_DEBUG_PAD_NAME (pad));
+
+ if (!(session = find_session_by_pad (rtpbin, pad)))
+ goto unknown_pad;
+
+ if (session->recv_rtp_sink_ghost == pad) {
+ remove_recv_rtp (rtpbin, session);
+ } else if (session->recv_rtcp_sink_ghost == pad) {
+ remove_recv_rtcp (rtpbin, session);
+ } else if (session->send_rtp_sink_ghost == pad) {
+ remove_send_rtp (rtpbin, session);
+ } else if (session->send_rtcp_src_ghost == pad) {
+ remove_rtcp (rtpbin, session);
+ } else if (pad_is_recv_fec (session, pad)) {
+ remove_recv_fec_for_pad (rtpbin, session, pad);
+ }
+
+ /* no more request pads, free the complete session */
+ if (session->recv_rtp_sink_ghost == NULL
+ && session->recv_rtcp_sink_ghost == NULL
+ && session->send_rtp_sink_ghost == NULL
+ && session->send_rtcp_src_ghost == NULL
+ && session->recv_fec_sink_ghosts == NULL) {
+ GST_DEBUG_OBJECT (rtpbin, "no more pads for session %p", session);
+ rtpbin->sessions = g_slist_remove (rtpbin->sessions, session);
+ free_session (session, rtpbin);
+ }
+ GST_RTP_BIN_UNLOCK (rtpbin);
+
+ return;
+
+ /* ERROR */
+ unknown_pad:
+ {
+ GST_RTP_BIN_UNLOCK (rtpbin);
+ g_warning ("rtpbin: %s:%s is not one of our request pads",
+ GST_DEBUG_PAD_NAME (pad));
+ return;
+ }
+ }
--- /dev/null
-
+ /* GStreamer
+ * Copyright (C) <2007> Wim Taymans <wim.taymans@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+ #ifndef __GST_RTP_BIN_H__
+ #define __GST_RTP_BIN_H__
+
+ #include <gst/gst.h>
+
+ #include "rtpsession.h"
+ #include "gstrtpsession.h"
+ #include "rtpjitterbuffer.h"
+
+ #define GST_TYPE_RTP_BIN \
+ (gst_rtp_bin_get_type())
+ #define GST_RTP_BIN(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_RTP_BIN,GstRtpBin))
+ #define GST_RTP_BIN_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_RTP_BIN,GstRtpBinClass))
+ #define GST_IS_RTP_BIN(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_RTP_BIN))
+ #define GST_IS_RTP_BIN_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_RTP_BIN))
+
+ typedef enum
+ {
+ GST_RTP_BIN_RTCP_SYNC_ALWAYS,
+ GST_RTP_BIN_RTCP_SYNC_INITIAL,
+ GST_RTP_BIN_RTCP_SYNC_RTP
+ } GstRTCPSync;
+
+ typedef struct _GstRtpBin GstRtpBin;
+ typedef struct _GstRtpBinClass GstRtpBinClass;
+ typedef struct _GstRtpBinPrivate GstRtpBinPrivate;
+
+ struct _GstRtpBin {
+ GstBin bin;
+
+ /*< private >*/
+ /* default latency for sessions */
+ guint latency_ms;
+ guint64 latency_ns;
+ gboolean drop_on_latency;
+ gboolean do_lost;
+ gboolean ignore_pt;
+ gboolean ntp_sync;
+ gint rtcp_sync;
+ guint rtcp_sync_interval;
+ RTPJitterBufferMode buffer_mode;
+ gboolean buffering;
+ gboolean use_pipeline_clock;
+ GstRtpNtpTimeSource ntp_time_source;
+ gboolean send_sync_event;
+ GstClockTime buffer_start;
+ gboolean do_retransmission;
+ GstRTPProfile rtp_profile;
+ gboolean rtcp_sync_send_time;
+ gint max_rtcp_rtp_time_diff;
+ guint32 max_dropout_time;
+ guint32 max_misorder_time;
+ gboolean rfc7273_sync;
+ guint max_streams;
+ guint64 max_ts_offset_adjustment;
+ gint64 max_ts_offset;
+ gboolean max_ts_offset_is_set;
++#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
++ gboolean use_rtsp_buffering;
++#endif
+ /* a list of session */
+ GSList *sessions;
+
+ /* a list of clients, these are streams with the same CNAME */
+ GSList *clients;
+
+ /* the default SDES items for sessions */
+ GstStructure *sdes;
+
+ /* the default FEC decoder factories for sessions */
+ GstStructure *fec_decoders;
+
+ /* the default FEC encoder factories for sessions */
+ GstStructure *fec_encoders;
+
+ /*< private >*/
+ GstRtpBinPrivate *priv;
+ };
+
+ struct _GstRtpBinClass {
+ GstBinClass parent_class;
+
+ /* get the caps for pt */
+ GstCaps* (*request_pt_map) (GstRtpBin *rtpbin, guint session, guint pt);
+
+ void (*payload_type_change) (GstRtpBin *rtpbin, guint session, guint pt);
+
+ void (*new_jitterbuffer) (GstRtpBin *rtpbin, GstElement *jitterbuffer, guint session, guint32 ssrc);
+
+ void (*new_storage) (GstRtpBin *rtpbin, GstElement *jitterbuffer, guint session);
+
+ /* action signals */
+ void (*clear_pt_map) (GstRtpBin *rtpbin);
+ void (*reset_sync) (GstRtpBin *rtpbin);
+ GstElement* (*get_session) (GstRtpBin *rtpbin, guint session);
+ RTPSession* (*get_internal_session) (GstRtpBin *rtpbin, guint session);
+ GstElement* (*get_storage) (GstRtpBin *rtpbin, guint session);
+ GObject* (*get_internal_storage) (GstRtpBin *rtpbin, guint session);
+ void (*clear_ssrc) (GstRtpBin *rtpbin, guint session, guint32 ssrc);
+
+ /* session manager signals */
+ void (*on_new_ssrc) (GstRtpBin *rtpbin, guint session, guint32 ssrc);
+ void (*on_ssrc_collision) (GstRtpBin *rtpbin, guint session, guint32 ssrc);
+ void (*on_ssrc_validated) (GstRtpBin *rtpbin, guint session, guint32 ssrc);
+ void (*on_ssrc_active) (GstRtpBin *rtpbin, guint session, guint32 ssrc);
+ void (*on_ssrc_sdes) (GstRtpBin *rtpbin, guint session, guint32 ssrc);
+ void (*on_bye_ssrc) (GstRtpBin *rtpbin, guint session, guint32 ssrc);
+ void (*on_bye_timeout) (GstRtpBin *rtpbin, guint session, guint32 ssrc);
+ void (*on_timeout) (GstRtpBin *rtpbin, guint session, guint32 ssrc);
+ void (*on_sender_timeout) (GstRtpBin *rtpbin, guint session, guint32 ssrc);
+ void (*on_npt_stop) (GstRtpBin *rtpbin, guint session, guint32 ssrc);
+
+ GstElement* (*request_rtp_encoder) (GstRtpBin *rtpbin, guint session);
+ GstElement* (*request_rtp_decoder) (GstRtpBin *rtpbin, guint session);
+ GstElement* (*request_rtcp_encoder) (GstRtpBin *rtpbin, guint session);
+ GstElement* (*request_rtcp_decoder) (GstRtpBin *rtpbin, guint session);
+
+ GstElement* (*request_aux_sender) (GstRtpBin *rtpbin, guint session);
+ GstElement* (*request_aux_receiver) (GstRtpBin *rtpbin, guint session);
+
+ GstElement* (*request_fec_encoder) (GstRtpBin *rtpbin, guint session);
+ GstElement* (*request_fec_decoder) (GstRtpBin *rtpbin, guint session);
+
+ GstElement* (*request_jitterbuffer) (GstRtpBin *rtpbin, guint session);
+
+ void (*on_new_sender_ssrc) (GstRtpBin *rtpbin, guint session, guint32 ssrc);
+ void (*on_sender_ssrc_active) (GstRtpBin *rtpbin, guint session, guint32 ssrc);
+ };
+
+ GType gst_rtp_bin_get_type (void);
+
+ GST_ELEMENT_REGISTER_DECLARE (rtpbin);
+
+ #endif /* __GST_RTP_BIN_H__ */
--- /dev/null
+ /* GStreamer
+ * Copyright (C) <2007> Wim Taymans <wim.taymans@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+ #include <string.h>
+ #include <stdlib.h>
+
+ #include <gst/rtp/gstrtpbuffer.h>
+ #include <gst/rtp/gstrtcpbuffer.h>
+
+ #include "rtpjitterbuffer.h"
+
+ GST_DEBUG_CATEGORY_STATIC (rtp_jitter_buffer_debug);
+ #define GST_CAT_DEFAULT rtp_jitter_buffer_debug
+
+ #define MAX_WINDOW RTP_JITTER_BUFFER_MAX_WINDOW
+ #define MAX_TIME (2 * GST_SECOND)
+
+ /* signals and args */
+ enum
+ {
+ LAST_SIGNAL
+ };
+
+ enum
+ {
+ PROP_0
+ };
+
+ /* GObject vmethods */
+ static void rtp_jitter_buffer_finalize (GObject * object);
+
+ GType
+ rtp_jitter_buffer_mode_get_type (void)
+ {
+ static GType jitter_buffer_mode_type = 0;
+ static const GEnumValue jitter_buffer_modes[] = {
+ {RTP_JITTER_BUFFER_MODE_NONE, "Only use RTP timestamps", "none"},
+ {RTP_JITTER_BUFFER_MODE_SLAVE, "Slave receiver to sender clock", "slave"},
+ {RTP_JITTER_BUFFER_MODE_BUFFER, "Do low/high watermark buffering",
+ "buffer"},
+ {RTP_JITTER_BUFFER_MODE_SYNCED, "Synchronized sender and receiver clocks",
+ "synced"},
+ {0, NULL, NULL},
+ };
+
+ if (!jitter_buffer_mode_type) {
+ jitter_buffer_mode_type =
+ g_enum_register_static ("RTPJitterBufferMode", jitter_buffer_modes);
+ }
+ return jitter_buffer_mode_type;
+ }
+
+ /* static guint rtp_jitter_buffer_signals[LAST_SIGNAL] = { 0 }; */
+
+ G_DEFINE_TYPE (RTPJitterBuffer, rtp_jitter_buffer, G_TYPE_OBJECT);
+
+ static void
+ rtp_jitter_buffer_class_init (RTPJitterBufferClass * klass)
+ {
+ GObjectClass *gobject_class;
+
+ gobject_class = (GObjectClass *) klass;
+
+ gobject_class->finalize = rtp_jitter_buffer_finalize;
+
+ GST_DEBUG_CATEGORY_INIT (rtp_jitter_buffer_debug, "rtpjitterbuffer", 0,
+ "RTP Jitter Buffer");
+ }
+
+ static void
+ rtp_jitter_buffer_init (RTPJitterBuffer * jbuf)
+ {
+ g_mutex_init (&jbuf->clock_lock);
+
+ g_queue_init (&jbuf->packets);
+ jbuf->mode = RTP_JITTER_BUFFER_MODE_SLAVE;
+
+ rtp_jitter_buffer_reset_skew (jbuf);
+ }
+
+ static void
+ rtp_jitter_buffer_finalize (GObject * object)
+ {
+ RTPJitterBuffer *jbuf;
+
+ jbuf = RTP_JITTER_BUFFER_CAST (object);
+
+ if (jbuf->media_clock_synced_id)
+ g_signal_handler_disconnect (jbuf->media_clock,
+ jbuf->media_clock_synced_id);
+ if (jbuf->media_clock) {
+ /* Make sure to clear any clock master before releasing the clock */
+ gst_clock_set_master (jbuf->media_clock, NULL);
+ gst_object_unref (jbuf->media_clock);
+ }
+
+ if (jbuf->pipeline_clock)
+ gst_object_unref (jbuf->pipeline_clock);
+
+ /* We cannot use g_queue_clear() as it would pass the wrong size to
+ * g_slice_free() which may lead to data corruption in the slice allocator.
+ */
+ rtp_jitter_buffer_flush (jbuf, NULL, NULL);
+
+ g_mutex_clear (&jbuf->clock_lock);
+
+ G_OBJECT_CLASS (rtp_jitter_buffer_parent_class)->finalize (object);
+ }
+
+ /**
+ * rtp_jitter_buffer_new:
+ *
+ * Create an #RTPJitterBuffer.
+ *
+ * Returns: a new #RTPJitterBuffer. Use g_object_unref() after usage.
+ */
+ RTPJitterBuffer *
+ rtp_jitter_buffer_new (void)
+ {
+ RTPJitterBuffer *jbuf;
+
+ jbuf = g_object_new (RTP_TYPE_JITTER_BUFFER, NULL);
+
+ return jbuf;
+ }
+
+ /**
+ * rtp_jitter_buffer_get_mode:
+ * @jbuf: an #RTPJitterBuffer
+ *
+ * Get the current jitterbuffer mode.
+ *
+ * Returns: the current jitterbuffer mode.
+ */
+ RTPJitterBufferMode
+ rtp_jitter_buffer_get_mode (RTPJitterBuffer * jbuf)
+ {
+ return jbuf->mode;
+ }
+
+ /**
+ * rtp_jitter_buffer_set_mode:
+ * @jbuf: an #RTPJitterBuffer
+ * @mode: a #RTPJitterBufferMode
+ *
+ * Set the buffering and clock slaving algorithm used in the @jbuf.
+ */
+ void
+ rtp_jitter_buffer_set_mode (RTPJitterBuffer * jbuf, RTPJitterBufferMode mode)
+ {
+ jbuf->mode = mode;
+ }
+
+ GstClockTime
+ rtp_jitter_buffer_get_delay (RTPJitterBuffer * jbuf)
+ {
+ return jbuf->delay;
+ }
+
+ void
+ rtp_jitter_buffer_set_delay (RTPJitterBuffer * jbuf, GstClockTime delay)
+ {
+ jbuf->delay = delay;
+ jbuf->low_level = (delay * 15) / 100;
+ /* the high level is at 90% in order to release packets before we fill up the
+ * buffer up to the latency */
+ jbuf->high_level = (delay * 90) / 100;
+
+ GST_DEBUG ("delay %" GST_TIME_FORMAT ", min %" GST_TIME_FORMAT ", max %"
+ GST_TIME_FORMAT, GST_TIME_ARGS (jbuf->delay),
+ GST_TIME_ARGS (jbuf->low_level), GST_TIME_ARGS (jbuf->high_level));
+ }
+
+ /**
+ * rtp_jitter_buffer_set_clock_rate:
+ * @jbuf: an #RTPJitterBuffer
+ * @clock_rate: the new clock rate
+ *
+ * Set the clock rate in the jitterbuffer.
+ */
+ void
+ rtp_jitter_buffer_set_clock_rate (RTPJitterBuffer * jbuf, guint32 clock_rate)
+ {
+ if (jbuf->clock_rate != clock_rate) {
+ GST_DEBUG ("Clock rate changed from %" G_GUINT32_FORMAT " to %"
+ G_GUINT32_FORMAT, jbuf->clock_rate, clock_rate);
+ jbuf->clock_rate = clock_rate;
+ rtp_jitter_buffer_reset_skew (jbuf);
+ }
+ }
+
+ /**
+ * rtp_jitter_buffer_get_clock_rate:
+ * @jbuf: an #RTPJitterBuffer
+ *
+ * Get the currently configure clock rate in @jbuf.
+ *
+ * Returns: the current clock-rate
+ */
+ guint32
+ rtp_jitter_buffer_get_clock_rate (RTPJitterBuffer * jbuf)
+ {
+ return jbuf->clock_rate;
+ }
+
+ static void
+ media_clock_synced_cb (GstClock * clock, gboolean synced,
+ RTPJitterBuffer * jbuf)
+ {
+ GstClockTime internal, external;
+
+ g_mutex_lock (&jbuf->clock_lock);
+ if (jbuf->pipeline_clock) {
+ internal = gst_clock_get_internal_time (jbuf->media_clock);
+ external = gst_clock_get_time (jbuf->pipeline_clock);
+
+ gst_clock_set_calibration (jbuf->media_clock, internal, external, 1, 1);
+ }
+ g_mutex_unlock (&jbuf->clock_lock);
+ }
+
+ /**
+ * rtp_jitter_buffer_set_media_clock:
+ * @jbuf: an #RTPJitterBuffer
+ * @clock: (transfer full): media #GstClock
+ * @clock_offset: RTP time at clock epoch or -1
+ *
+ * Sets the media clock for the media and the clock offset
+ *
+ */
+ void
+ rtp_jitter_buffer_set_media_clock (RTPJitterBuffer * jbuf, GstClock * clock,
+ guint64 clock_offset)
+ {
+ g_mutex_lock (&jbuf->clock_lock);
+ if (jbuf->media_clock) {
+ if (jbuf->media_clock_synced_id)
+ g_signal_handler_disconnect (jbuf->media_clock,
+ jbuf->media_clock_synced_id);
+ jbuf->media_clock_synced_id = 0;
+ gst_object_unref (jbuf->media_clock);
+ }
+ jbuf->media_clock = clock;
+ jbuf->media_clock_offset = clock_offset;
+
+ if (jbuf->pipeline_clock && jbuf->media_clock &&
+ jbuf->pipeline_clock != jbuf->media_clock) {
+ jbuf->media_clock_synced_id =
+ g_signal_connect (jbuf->media_clock, "synced",
+ G_CALLBACK (media_clock_synced_cb), jbuf);
+ if (gst_clock_is_synced (jbuf->media_clock)) {
+ GstClockTime internal, external;
+
+ internal = gst_clock_get_internal_time (jbuf->media_clock);
+ external = gst_clock_get_time (jbuf->pipeline_clock);
+
+ gst_clock_set_calibration (jbuf->media_clock, internal, external, 1, 1);
+ }
+
+ gst_clock_set_master (jbuf->media_clock, jbuf->pipeline_clock);
+ }
+ g_mutex_unlock (&jbuf->clock_lock);
+ }
+
+ /**
+ * rtp_jitter_buffer_set_pipeline_clock:
+ * @jbuf: an #RTPJitterBuffer
+ * @clock: pipeline #GstClock
+ *
+ * Sets the pipeline clock
+ *
+ */
+ void
+ rtp_jitter_buffer_set_pipeline_clock (RTPJitterBuffer * jbuf, GstClock * clock)
+ {
+ g_mutex_lock (&jbuf->clock_lock);
+ if (jbuf->pipeline_clock)
+ gst_object_unref (jbuf->pipeline_clock);
+ jbuf->pipeline_clock = clock ? gst_object_ref (clock) : NULL;
+
+ if (jbuf->pipeline_clock && jbuf->media_clock &&
+ jbuf->pipeline_clock != jbuf->media_clock) {
+ if (gst_clock_is_synced (jbuf->media_clock)) {
+ GstClockTime internal, external;
+
+ internal = gst_clock_get_internal_time (jbuf->media_clock);
+ external = gst_clock_get_time (jbuf->pipeline_clock);
+
+ gst_clock_set_calibration (jbuf->media_clock, internal, external, 1, 1);
+ }
+
+ gst_clock_set_master (jbuf->media_clock, jbuf->pipeline_clock);
+ }
+ g_mutex_unlock (&jbuf->clock_lock);
+ }
+
+ gboolean
+ rtp_jitter_buffer_get_rfc7273_sync (RTPJitterBuffer * jbuf)
+ {
+ return jbuf->rfc7273_sync;
+ }
+
+ void
+ rtp_jitter_buffer_set_rfc7273_sync (RTPJitterBuffer * jbuf,
+ gboolean rfc7273_sync)
+ {
+ jbuf->rfc7273_sync = rfc7273_sync;
+ }
+
+ /**
+ * rtp_jitter_buffer_reset_skew:
+ * @jbuf: an #RTPJitterBuffer
+ *
+ * Reset the skew calculations in @jbuf.
+ */
+ void
+ rtp_jitter_buffer_reset_skew (RTPJitterBuffer * jbuf)
+ {
+ jbuf->base_time = -1;
+ jbuf->base_rtptime = -1;
+ jbuf->base_extrtp = -1;
+ jbuf->media_clock_base_time = -1;
+ jbuf->ext_rtptime = -1;
+ jbuf->last_rtptime = -1;
+ jbuf->window_pos = 0;
+ jbuf->window_filling = TRUE;
+ jbuf->window_min = 0;
+ jbuf->skew = 0;
+ jbuf->prev_send_diff = -1;
+ jbuf->prev_out_time = -1;
+ jbuf->need_resync = TRUE;
+
+ GST_DEBUG ("reset skew correction");
+ }
+
+ /**
+ * rtp_jitter_buffer_disable_buffering:
+ * @jbuf: an #RTPJitterBuffer
+ * @disabled: the new state
+ *
+ * Enable or disable buffering on @jbuf.
+ */
+ void
+ rtp_jitter_buffer_disable_buffering (RTPJitterBuffer * jbuf, gboolean disabled)
+ {
+ jbuf->buffering_disabled = disabled;
+ }
+
+ static void
+ rtp_jitter_buffer_resync (RTPJitterBuffer * jbuf, GstClockTime time,
+ GstClockTime gstrtptime, guint64 ext_rtptime, gboolean reset_skew)
+ {
+ jbuf->base_time = time;
+ jbuf->media_clock_base_time = -1;
+ jbuf->base_rtptime = gstrtptime;
+ jbuf->base_extrtp = ext_rtptime;
+ jbuf->prev_out_time = -1;
+ jbuf->prev_send_diff = -1;
+ if (reset_skew) {
+ jbuf->window_filling = TRUE;
+ jbuf->window_pos = 0;
+ jbuf->window_min = 0;
+ jbuf->window_size = 0;
+ jbuf->skew = 0;
+ }
+ jbuf->need_resync = FALSE;
+ }
+
+ static guint64
+ get_buffer_level (RTPJitterBuffer * jbuf)
+ {
+ RTPJitterBufferItem *high_buf = NULL, *low_buf = NULL;
+ guint64 level;
+
+ /* first buffer with timestamp */
+ high_buf = (RTPJitterBufferItem *) g_queue_peek_tail_link (&jbuf->packets);
+ while (high_buf) {
+ if (high_buf->dts != -1 || high_buf->pts != -1)
+ break;
+
+ high_buf = (RTPJitterBufferItem *) g_list_previous (high_buf);
+ }
+
+ low_buf = (RTPJitterBufferItem *) g_queue_peek_head_link (&jbuf->packets);
+ while (low_buf) {
+ if (low_buf->dts != -1 || low_buf->pts != -1)
+ break;
+
+ low_buf = (RTPJitterBufferItem *) g_list_next (low_buf);
+ }
+
+ if (!high_buf || !low_buf || high_buf == low_buf) {
+ level = 0;
+ } else {
+ guint64 high_ts, low_ts;
+
+ high_ts = high_buf->dts != -1 ? high_buf->dts : high_buf->pts;
+ low_ts = low_buf->dts != -1 ? low_buf->dts : low_buf->pts;
+
+ if (high_ts > low_ts)
+ level = high_ts - low_ts;
+ else
+ level = 0;
+
+ GST_LOG_OBJECT (jbuf,
+ "low %" GST_TIME_FORMAT " high %" GST_TIME_FORMAT " level %"
+ G_GUINT64_FORMAT, GST_TIME_ARGS (low_ts), GST_TIME_ARGS (high_ts),
+ level);
+ }
+ return level;
+ }
+
+ static void
+ update_buffer_level (RTPJitterBuffer * jbuf, gint * percent)
+ {
+ gboolean post = FALSE;
+ guint64 level;
+
+ level = get_buffer_level (jbuf);
+ GST_DEBUG ("buffer level %" GST_TIME_FORMAT, GST_TIME_ARGS (level));
+
+ if (jbuf->buffering_disabled) {
+ GST_DEBUG ("buffering is disabled");
+ level = jbuf->high_level;
+ }
+
+ if (jbuf->buffering) {
+ post = TRUE;
+ if (level >= jbuf->high_level) {
+ GST_DEBUG ("buffering finished");
+ jbuf->buffering = FALSE;
+ }
+ } else {
+ if (level < jbuf->low_level) {
+ GST_DEBUG ("buffering started");
+ jbuf->buffering = TRUE;
+ post = TRUE;
+ }
+ }
+ if (post) {
+ gint perc;
+
+ if (jbuf->buffering && (jbuf->high_level != 0)) {
+ perc = (level * 100 / jbuf->high_level);
+ perc = MIN (perc, 100);
+ } else {
+ perc = 100;
+ }
+
+ if (percent)
+ *percent = perc;
+
+ GST_DEBUG ("buffering %d", perc);
+ }
+ }
+
+ /* For the clock skew we use a windowed low point averaging algorithm as can be
+ * found in Fober, Orlarey and Letz, 2005, "Real Time Clock Skew Estimation
+ * over Network Delays":
+ * http://www.grame.fr/Ressources/pub/TR-050601.pdf
+ * http://citeseerx.ist.psu.edu/viewdoc/summary?doi=10.1.1.102.1546
+ *
+ * The idea is that the jitter is composed of:
+ *
+ * J = N + n
+ *
+ * N : a constant network delay.
+ * n : random added noise. The noise is concentrated around 0
+ *
+ * In the receiver we can track the elapsed time at the sender with:
+ *
+ * send_diff(i) = (Tsi - Ts0);
+ *
+ * Tsi : The time at the sender at packet i
+ * Ts0 : The time at the sender at the first packet
+ *
+ * This is the difference between the RTP timestamp in the first received packet
+ * and the current packet.
+ *
+ * At the receiver we have to deal with the jitter introduced by the network.
+ *
+ * recv_diff(i) = (Tri - Tr0)
+ *
+ * Tri : The time at the receiver at packet i
+ * Tr0 : The time at the receiver at the first packet
+ *
+ * Both of these values contain a jitter Ji, a jitter for packet i, so we can
+ * write:
+ *
+ * recv_diff(i) = (Cri + D + ni) - (Cr0 + D + n0))
+ *
+ * Cri : The time of the clock at the receiver for packet i
+ * D + ni : The jitter when receiving packet i
+ *
+ * We see that the network delay is irrelevant here as we can eliminate D:
+ *
+ * recv_diff(i) = (Cri + ni) - (Cr0 + n0))
+ *
+ * The drift is now expressed as:
+ *
+ * Drift(i) = recv_diff(i) - send_diff(i);
+ *
+ * We now keep the W latest values of Drift and find the minimum (this is the
+ * one with the lowest network jitter and thus the one which is least affected
+ * by it). We average this lowest value to smooth out the resulting network skew.
+ *
+ * Both the window and the weighting used for averaging influence the accuracy
+ * of the drift estimation. Finding the correct parameters turns out to be a
+ * compromise between accuracy and inertia.
+ *
+ * We use a 2 second window or up to 512 data points, which is statistically big
+ * enough to catch spikes (FIXME, detect spikes).
+ * We also use a rather large weighting factor (125) to smoothly adapt. During
+ * startup, when filling the window, we use a parabolic weighting factor, the
+ * more the window is filled, the faster we move to the detected possible skew.
+ *
+ * Returns: @time adjusted with the clock skew.
+ */
+ static GstClockTime
+ calculate_skew (RTPJitterBuffer * jbuf, guint64 ext_rtptime,
+ GstClockTime gstrtptime, GstClockTime time, gint gap, gboolean is_rtx)
+ {
+ guint64 send_diff, recv_diff;
+ gint64 delta;
+ gint64 old;
+ gint pos, i;
+ GstClockTime out_time;
+ guint64 slope;
+
+ /* elapsed time at sender */
+ send_diff = gstrtptime - jbuf->base_rtptime;
+
+ /* we don't have an arrival timestamp so we can't do skew detection. we
+ * should still apply a timestamp based on RTP timestamp and base_time */
+ if (time == -1 || jbuf->base_time == -1 || is_rtx)
+ goto no_skew;
+
+ /* elapsed time at receiver, includes the jitter */
+ recv_diff = time - jbuf->base_time;
+
+ /* measure the diff */
+ delta = ((gint64) recv_diff) - ((gint64) send_diff);
+
+ /* measure the slope, this gives a rought estimate between the sender speed
+ * and the receiver speed. This should be approximately 8, higher values
+ * indicate a burst (especially when the connection starts) */
+ if (recv_diff > 0)
+ slope = (send_diff * 8) / recv_diff;
+ else
+ slope = 8;
+
+ GST_DEBUG ("time %" GST_TIME_FORMAT ", base %" GST_TIME_FORMAT ", recv_diff %"
+ GST_TIME_FORMAT ", slope %" G_GUINT64_FORMAT, GST_TIME_ARGS (time),
+ GST_TIME_ARGS (jbuf->base_time), GST_TIME_ARGS (recv_diff), slope);
+
+ /* if the difference between the sender timeline and the receiver timeline
+ * changed too quickly we have to resync because the server likely restarted
+ * its timestamps. */
+ if (ABS (delta - jbuf->skew) > GST_SECOND) {
+ GST_WARNING ("delta - skew: %" GST_TIME_FORMAT " too big, reset skew",
+ GST_TIME_ARGS (ABS (delta - jbuf->skew)));
+ rtp_jitter_buffer_resync (jbuf, time, gstrtptime, ext_rtptime, TRUE);
+ send_diff = 0;
+ delta = 0;
+ gap = 0;
+ }
+
+ /* only do skew calculations if we didn't have a gap. if too much time
+ * has elapsed despite there being a gap, we resynced already. */
+ if (G_UNLIKELY (gap != 0))
+ goto no_skew;
+
+ pos = jbuf->window_pos;
+
+ if (G_UNLIKELY (jbuf->window_filling)) {
+ /* we are filling the window */
+ GST_DEBUG ("filling %d, delta %" G_GINT64_FORMAT, pos, delta);
+ jbuf->window[pos++] = delta;
+ /* calc the min delta we observed */
+ if (G_UNLIKELY (pos == 1 || delta < jbuf->window_min))
+ jbuf->window_min = delta;
+
+ if (G_UNLIKELY (send_diff >= MAX_TIME || pos >= MAX_WINDOW)) {
+ jbuf->window_size = pos;
+
+ /* window filled */
+ GST_DEBUG ("min %" G_GINT64_FORMAT, jbuf->window_min);
+
+ /* the skew is now the min */
+ jbuf->skew = jbuf->window_min;
+ jbuf->window_filling = FALSE;
+ } else {
+ gint perc_time, perc_window, perc;
+
+ /* figure out how much we filled the window, this depends on the amount of
+ * time we have or the max number of points we keep. */
+ perc_time = send_diff * 100 / MAX_TIME;
+ perc_window = pos * 100 / MAX_WINDOW;
+ perc = MAX (perc_time, perc_window);
+
+ /* make a parabolic function, the closer we get to the MAX, the more value
+ * we give to the scaling factor of the new value */
+ perc = perc * perc;
+
+ /* quickly go to the min value when we are filling up, slowly when we are
+ * just starting because we're not sure it's a good value yet. */
+ jbuf->skew =
+ (perc * jbuf->window_min + ((10000 - perc) * jbuf->skew)) / 10000;
+ jbuf->window_size = pos + 1;
+ }
+ } else {
+ /* pick old value and store new value. We keep the previous value in order
+ * to quickly check if the min of the window changed */
+ old = jbuf->window[pos];
+ jbuf->window[pos++] = delta;
+
+ if (G_UNLIKELY (delta <= jbuf->window_min)) {
+ /* if the new value we inserted is smaller or equal to the current min,
+ * it becomes the new min */
+ jbuf->window_min = delta;
+ } else if (G_UNLIKELY (old == jbuf->window_min)) {
+ gint64 min = G_MAXINT64;
+
+ /* if we removed the old min, we have to find a new min */
+ for (i = 0; i < jbuf->window_size; i++) {
+ /* we found another value equal to the old min, we can stop searching now */
+ if (jbuf->window[i] == old) {
+ min = old;
+ break;
+ }
+ if (jbuf->window[i] < min)
+ min = jbuf->window[i];
+ }
+ jbuf->window_min = min;
+ }
+ /* average the min values */
+ jbuf->skew = (jbuf->window_min + (124 * jbuf->skew)) / 125;
+ GST_DEBUG ("delta %" G_GINT64_FORMAT ", new min: %" G_GINT64_FORMAT,
+ delta, jbuf->window_min);
+ }
+ /* wrap around in the window */
+ if (G_UNLIKELY (pos >= jbuf->window_size))
+ pos = 0;
+ jbuf->window_pos = pos;
+
+ no_skew:
+ /* the output time is defined as the base timestamp plus the RTP time
+ * adjusted for the clock skew .*/
+ if (jbuf->base_time != -1) {
+ out_time = jbuf->base_time + send_diff;
+ /* skew can be negative and we don't want to make invalid timestamps */
+ if (jbuf->skew < 0 && out_time < -jbuf->skew) {
+ out_time = 0;
+ } else {
+ out_time += jbuf->skew;
+ }
+ } else
+ out_time = -1;
+
+ GST_DEBUG ("skew %" G_GINT64_FORMAT ", out %" GST_TIME_FORMAT,
+ jbuf->skew, GST_TIME_ARGS (out_time));
+
+ return out_time;
+ }
+
+ static void
+ queue_do_insert (RTPJitterBuffer * jbuf, GList * list, GList * item)
+ {
+ GQueue *queue = &jbuf->packets;
+
+ /* It's more likely that the packet was inserted at the tail of the queue */
+ if (G_LIKELY (list)) {
+ item->prev = list;
+ item->next = list->next;
+ list->next = item;
+ } else {
+ item->prev = NULL;
+ item->next = queue->head;
+ queue->head = item;
+ }
+ if (item->next)
+ item->next->prev = item;
+ else
+ queue->tail = item;
+ queue->length++;
+ }
+
+ GstClockTime
+ rtp_jitter_buffer_calculate_pts (RTPJitterBuffer * jbuf, GstClockTime dts,
+ gboolean estimated_dts, guint32 rtptime, GstClockTime base_time,
+ gint gap, gboolean is_rtx)
+ {
+ guint64 ext_rtptime;
+ GstClockTime gstrtptime, pts;
+ GstClock *media_clock, *pipeline_clock;
+ guint64 media_clock_offset;
+ gboolean rfc7273_mode;
+
+ /* rtp time jumps are checked for during skew calculation, but bypassed
+ * in other mode, so mind those here and reset jb if needed.
+ * Only reset if valid input time, which is likely for UDP input
+ * where we expect this might happen due to async thread effects
+ * (in seek and state change cycles), but not so much for TCP input */
+ if (GST_CLOCK_TIME_IS_VALID (dts) && !estimated_dts &&
+ jbuf->mode != RTP_JITTER_BUFFER_MODE_SLAVE &&
+ jbuf->base_time != -1 && jbuf->last_rtptime != -1) {
+ GstClockTime ext_rtptime = jbuf->ext_rtptime;
+
+ ext_rtptime = gst_rtp_buffer_ext_timestamp (&ext_rtptime, rtptime);
+ if (ext_rtptime > jbuf->last_rtptime + 3 * jbuf->clock_rate ||
+ ext_rtptime + 3 * jbuf->clock_rate < jbuf->last_rtptime) {
+ if (!is_rtx) {
+ /* reset even if we don't have valid incoming time;
+ * still better than producing possibly very bogus output timestamp */
+ GST_WARNING ("rtp delta too big, reset skew");
+ rtp_jitter_buffer_reset_skew (jbuf);
+ } else {
+ GST_WARNING ("rtp delta too big: ignore rtx packet");
+ media_clock = NULL;
+ pipeline_clock = NULL;
+ pts = GST_CLOCK_TIME_NONE;
+ goto done;
+ }
+ }
+ }
+
+ /* Return the last time if we got the same RTP timestamp again */
+ ext_rtptime = gst_rtp_buffer_ext_timestamp (&jbuf->ext_rtptime, rtptime);
+ if (jbuf->last_rtptime != -1 && ext_rtptime == jbuf->last_rtptime) {
+ return jbuf->prev_out_time;
+ }
+
+ /* keep track of the last extended rtptime */
+ jbuf->last_rtptime = ext_rtptime;
+
+ g_mutex_lock (&jbuf->clock_lock);
+ media_clock = jbuf->media_clock ? gst_object_ref (jbuf->media_clock) : NULL;
+ pipeline_clock =
+ jbuf->pipeline_clock ? gst_object_ref (jbuf->pipeline_clock) : NULL;
+ media_clock_offset = jbuf->media_clock_offset;
+ g_mutex_unlock (&jbuf->clock_lock);
+
+ gstrtptime =
+ gst_util_uint64_scale_int (ext_rtptime, GST_SECOND, jbuf->clock_rate);
+
+ if (G_LIKELY (jbuf->base_rtptime != -1)) {
+ /* check elapsed time in RTP units */
+ if (gstrtptime < jbuf->base_rtptime) {
+ if (!is_rtx) {
+ /* elapsed time at sender, timestamps can go backwards and thus be
+ * smaller than our base time, schedule to take a new base time in
+ * that case. */
+ GST_WARNING ("backward timestamps at server, schedule resync");
+ jbuf->need_resync = TRUE;
+ } else {
+ GST_WARNING ("backward timestamps: ignore rtx packet");
+ pts = GST_CLOCK_TIME_NONE;
+ goto done;
+ }
+ }
+ }
+
+ switch (jbuf->mode) {
+ case RTP_JITTER_BUFFER_MODE_NONE:
+ case RTP_JITTER_BUFFER_MODE_BUFFER:
+ /* send 0 as the first timestamp and -1 for the other ones. This will
+ * interpolate them from the RTP timestamps with a 0 origin. In buffering
+ * mode we will adjust the outgoing timestamps according to the amount of
+ * time we spent buffering. */
+ if (jbuf->base_time == -1)
+ dts = 0;
+ else
+ dts = -1;
+ break;
+ case RTP_JITTER_BUFFER_MODE_SYNCED:
+ /* synchronized clocks, take first timestamp as base, use RTP timestamps
+ * to interpolate */
+ if (jbuf->base_time != -1 && !jbuf->need_resync)
+ dts = -1;
+ break;
+ case RTP_JITTER_BUFFER_MODE_SLAVE:
+ default:
+ break;
+ }
+
+ /* need resync, lock on to time and gstrtptime if we can, otherwise we
+ * do with the previous values */
+ if (G_UNLIKELY (jbuf->need_resync && dts != -1)) {
+ if (is_rtx) {
+ GST_DEBUG ("not resyncing on rtx packet, discard");
+ pts = GST_CLOCK_TIME_NONE;
+ goto done;
+ }
+ GST_INFO ("resync to time %" GST_TIME_FORMAT ", rtptime %"
+ GST_TIME_FORMAT, GST_TIME_ARGS (dts), GST_TIME_ARGS (gstrtptime));
+ rtp_jitter_buffer_resync (jbuf, dts, gstrtptime, ext_rtptime, FALSE);
+ }
+
+ GST_DEBUG ("extrtp %" G_GUINT64_FORMAT ", gstrtp %" GST_TIME_FORMAT ", base %"
+ GST_TIME_FORMAT ", send_diff %" GST_TIME_FORMAT, ext_rtptime,
+ GST_TIME_ARGS (gstrtptime), GST_TIME_ARGS (jbuf->base_rtptime),
+ GST_TIME_ARGS (gstrtptime - jbuf->base_rtptime));
+
+ rfc7273_mode = media_clock && pipeline_clock
+ && gst_clock_is_synced (media_clock);
+
+ if (rfc7273_mode && jbuf->mode == RTP_JITTER_BUFFER_MODE_SLAVE
+ && (media_clock_offset == -1 || !jbuf->rfc7273_sync)) {
+ GstClockTime internal, external;
+ GstClockTime rate_num, rate_denom;
+ GstClockTime nsrtptimediff, rtpntptime, rtpsystime;
+
+ gst_clock_get_calibration (media_clock, &internal, &external, &rate_num,
+ &rate_denom);
+
+ /* Slave to the RFC7273 media clock instead of trying to estimate it
+ * based on receive times and RTP timestamps */
+
+ if (jbuf->media_clock_base_time == -1) {
+ if (jbuf->base_time != -1) {
+ jbuf->media_clock_base_time =
+ gst_clock_unadjust_with_calibration (media_clock,
+ jbuf->base_time + base_time, internal, external, rate_num,
+ rate_denom);
+ } else {
+ if (dts != -1)
+ jbuf->media_clock_base_time =
+ gst_clock_unadjust_with_calibration (media_clock, dts + base_time,
+ internal, external, rate_num, rate_denom);
+ else
+ jbuf->media_clock_base_time =
+ gst_clock_get_internal_time (media_clock);
+ jbuf->base_rtptime = gstrtptime;
+ }
+ }
+
+ if (gstrtptime > jbuf->base_rtptime)
+ nsrtptimediff = gstrtptime - jbuf->base_rtptime;
+ else
+ nsrtptimediff = 0;
+
+ rtpntptime = nsrtptimediff + jbuf->media_clock_base_time;
+
+ rtpsystime =
+ gst_clock_adjust_with_calibration (media_clock, rtpntptime, internal,
+ external, rate_num, rate_denom);
+
+ if (rtpsystime > base_time)
+ pts = rtpsystime - base_time;
+ else
+ pts = 0;
+
+ GST_DEBUG ("RFC7273 clock time %" GST_TIME_FORMAT ", out %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (rtpsystime), GST_TIME_ARGS (pts));
+ } else if (rfc7273_mode && (jbuf->mode == RTP_JITTER_BUFFER_MODE_SLAVE
+ || jbuf->mode == RTP_JITTER_BUFFER_MODE_SYNCED)
+ && media_clock_offset != -1 && jbuf->rfc7273_sync) {
+ GstClockTime ntptime, rtptime_tmp;
+ GstClockTime ntprtptime, rtpsystime;
+ GstClockTime internal, external;
+ GstClockTime rate_num, rate_denom;
+
+ /* Don't do any of the dts related adjustments further down */
+ dts = -1;
+
+ /* Calculate the actual clock time on the sender side based on the
+ * RFC7273 clock and convert it to our pipeline clock
+ */
+
+ gst_clock_get_calibration (media_clock, &internal, &external, &rate_num,
+ &rate_denom);
+
+ ntptime = gst_clock_get_internal_time (media_clock);
+
+ ntprtptime = gst_util_uint64_scale (ntptime, jbuf->clock_rate, GST_SECOND);
+ ntprtptime += media_clock_offset;
+ ntprtptime &= 0xffffffff;
+
+ rtptime_tmp = rtptime;
+ /* Check for wraparounds, we assume that the diff between current RTP
+ * timestamp and current media clock time can't be bigger than
+ * 2**31 clock units */
+ if (ntprtptime > rtptime_tmp && ntprtptime - rtptime_tmp >= 0x80000000)
+ rtptime_tmp += G_GUINT64_CONSTANT (0x100000000);
+ else if (rtptime_tmp > ntprtptime && rtptime_tmp - ntprtptime >= 0x80000000)
+ ntprtptime += G_GUINT64_CONSTANT (0x100000000);
+
+ if (ntprtptime > rtptime_tmp)
+ ntptime -=
+ gst_util_uint64_scale (ntprtptime - rtptime_tmp, GST_SECOND,
+ jbuf->clock_rate);
+ else
+ ntptime +=
+ gst_util_uint64_scale (rtptime_tmp - ntprtptime, GST_SECOND,
+ jbuf->clock_rate);
+
+ rtpsystime =
+ gst_clock_adjust_with_calibration (media_clock, ntptime, internal,
+ external, rate_num, rate_denom);
+ /* All this assumes that the pipeline has enough additional
+ * latency to cover for the network delay */
+ if (rtpsystime > base_time)
+ pts = rtpsystime - base_time;
+ else
+ pts = 0;
+
+ GST_DEBUG ("RFC7273 clock time %" GST_TIME_FORMAT ", ntptime %"
+ GST_TIME_FORMAT ", ntprtptime %" G_GUINT64_FORMAT ", rtptime %"
+ G_GUINT32_FORMAT ", base_time %" GST_TIME_FORMAT ", internal %"
+ GST_TIME_FORMAT ", external %" GST_TIME_FORMAT ", out %"
+ GST_TIME_FORMAT, GST_TIME_ARGS (rtpsystime), GST_TIME_ARGS (ntptime),
+ ntprtptime, rtptime, GST_TIME_ARGS (base_time),
+ GST_TIME_ARGS (internal), GST_TIME_ARGS (external),
+ GST_TIME_ARGS (pts));
+ } else {
+ /* If we used the RFC7273 clock before and not anymore,
+ * we need to resync it later again */
+ jbuf->media_clock_base_time = -1;
+
+ /* do skew calculation by measuring the difference between rtptime and the
+ * receive dts, this function will return the skew corrected rtptime. */
+ pts = calculate_skew (jbuf, ext_rtptime, gstrtptime, dts, gap, is_rtx);
+ }
+
+ /* check if timestamps are not going backwards, we can only check this if we
+ * have a previous out time and a previous send_diff */
+ if (G_LIKELY (pts != -1 && jbuf->prev_out_time != -1
+ && jbuf->prev_send_diff != -1)) {
+ /* now check for backwards timestamps */
+ if (G_UNLIKELY (
+ /* if the server timestamps went up and the out_time backwards */
+ (gstrtptime - jbuf->base_rtptime > jbuf->prev_send_diff
+ && pts < jbuf->prev_out_time) ||
+ /* if the server timestamps went backwards and the out_time forwards */
+ (gstrtptime - jbuf->base_rtptime < jbuf->prev_send_diff
+ && pts > jbuf->prev_out_time) ||
+ /* if the server timestamps did not change */
+ gstrtptime - jbuf->base_rtptime == jbuf->prev_send_diff)) {
+ GST_DEBUG ("backwards timestamps, using previous time");
+ pts = jbuf->prev_out_time;
+ }
+ }
+
+ if (gap == 0 && dts != -1 && pts + jbuf->delay < dts) {
+ /* if we are going to produce a timestamp that is later than the input
+ * timestamp, we need to reset the jitterbuffer. Likely the server paused
+ * temporarily */
+ GST_DEBUG ("out %" GST_TIME_FORMAT " + %" G_GUINT64_FORMAT " < time %"
+ GST_TIME_FORMAT ", reset jitterbuffer and discard", GST_TIME_ARGS (pts),
+ jbuf->delay, GST_TIME_ARGS (dts));
+ rtp_jitter_buffer_reset_skew (jbuf);
+ rtp_jitter_buffer_resync (jbuf, dts, gstrtptime, ext_rtptime, TRUE);
+ pts = dts;
+ }
+
+ jbuf->prev_out_time = pts;
+ jbuf->prev_send_diff = gstrtptime - jbuf->base_rtptime;
+
+ done:
+ if (media_clock)
+ gst_object_unref (media_clock);
+ if (pipeline_clock)
+ gst_object_unref (pipeline_clock);
+
+ return pts;
+ }
+
+
+ /**
+ * rtp_jitter_buffer_insert:
+ * @jbuf: an #RTPJitterBuffer
+ * @item: an #RTPJitterBufferItem to insert
+ * @head: TRUE when the head element changed.
+ * @percent: the buffering percent after insertion
+ *
+ * Inserts @item into the packet queue of @jbuf. The sequence number of the
+ * packet will be used to sort the packets. This function takes ownerhip of
+ * @buf when the function returns %TRUE.
+ *
+ * When @head is %TRUE, the new packet was added at the head of the queue and
+ * will be available with the next call to rtp_jitter_buffer_pop() and
+ * rtp_jitter_buffer_peek().
+ *
+ * Returns: %FALSE if a packet with the same number already existed.
+ */
+ static gboolean
+ rtp_jitter_buffer_insert (RTPJitterBuffer * jbuf, RTPJitterBufferItem * item,
+ gboolean * head, gint * percent)
+ {
+ GList *list, *event = NULL;
+ guint16 seqnum;
+
+ g_return_val_if_fail (jbuf != NULL, FALSE);
+ g_return_val_if_fail (item != NULL, FALSE);
+
+ list = jbuf->packets.tail;
+
+ /* no seqnum, simply append then */
+ if (item->seqnum == -1)
+ goto append;
+
+ seqnum = item->seqnum;
+
+ /* loop the list to skip strictly larger seqnum buffers */
+ for (; list; list = g_list_previous (list)) {
+ guint16 qseq;
+ gint gap;
+ RTPJitterBufferItem *qitem = (RTPJitterBufferItem *) list;
+
+ if (qitem->seqnum == -1) {
+ /* keep a pointer to the first consecutive event if not already
+ * set. we will insert the packet after the event if we can't find
+ * a packet with lower sequence number before the event. */
+ if (event == NULL)
+ event = list;
+ continue;
+ }
+
+ qseq = qitem->seqnum;
+
+ /* compare the new seqnum to the one in the buffer */
+ gap = gst_rtp_buffer_compare_seqnum (seqnum, qseq);
+
+ /* we hit a packet with the same seqnum, notify a duplicate */
+ if (G_UNLIKELY (gap == 0))
+ goto duplicate;
+
+ /* seqnum > qseq, we can stop looking */
+ if (G_LIKELY (gap < 0))
+ break;
+
+ /* if we've found a packet with greater sequence number, cleanup the
+ * event pointer as the packet will be inserted before the event */
+ event = NULL;
+ }
+
+ /* if event is set it means that packets before the event had smaller
+ * sequence number, so we will insert our packet after the event */
+ if (event)
+ list = event;
+
+ append:
+ queue_do_insert (jbuf, list, (GList *) item);
+
+ /* buffering mode, update buffer stats */
+ if (jbuf->mode == RTP_JITTER_BUFFER_MODE_BUFFER)
+ update_buffer_level (jbuf, percent);
+ else if (percent)
+ *percent = -1;
+
+ /* head was changed when we did not find a previous packet, we set the return
+ * flag when requested. */
+ if (G_LIKELY (head))
+ *head = (list == NULL);
+
+ return TRUE;
+
+ /* ERRORS */
+ duplicate:
+ {
+ GST_DEBUG ("duplicate packet %d found", (gint) seqnum);
+ if (G_LIKELY (head))
+ *head = FALSE;
+ if (percent)
+ *percent = -1;
+ return FALSE;
+ }
+ }
+
+ /**
+ * rtp_jitter_buffer_alloc_item:
+ * @data: The data stored in this item
+ * @type: User specific item type
+ * @dts: Decoding Timestamp
+ * @pts: Presentation Timestamp
+ * @seqnum: Sequence number
+ * @count: Number of packet this item represent
+ * @rtptime: The RTP specific timestamp
+ * @free_data: A function to free @data (optional)
+ *
+ * Create an item that can then be stored in the jitter buffer.
+ *
+ * Returns: a newly allocated RTPJitterbufferItem
+ */
+ static RTPJitterBufferItem *
+ rtp_jitter_buffer_alloc_item (gpointer data, guint type, GstClockTime dts,
+ GstClockTime pts, guint seqnum, guint count, guint rtptime,
+ GDestroyNotify free_data)
+ {
+ RTPJitterBufferItem *item;
+
+ item = g_slice_new (RTPJitterBufferItem);
+ item->data = data;
+ item->next = NULL;
+ item->prev = NULL;
+ item->type = type;
+ item->dts = dts;
+ item->pts = pts;
+ item->seqnum = seqnum;
+ item->count = count;
+ item->rtptime = rtptime;
+ item->free_data = free_data;
+
+ return item;
+ }
+
+ static inline RTPJitterBufferItem *
+ alloc_event_item (GstEvent * event)
+ {
+ return rtp_jitter_buffer_alloc_item (event, ITEM_TYPE_EVENT, -1, -1, -1, 0,
+ -1, (GDestroyNotify) gst_mini_object_unref);
+ }
+
+ /**
+ * rtp_jitter_buffer_append_event:
+ * @jbuf: an #RTPJitterBuffer
+ * @event: an #GstEvent to insert
+
+ * Inserts @event into the packet queue of @jbuf.
+ *
+ * Returns: %TRUE if the event is at the head of the queue
+ */
+ gboolean
+ rtp_jitter_buffer_append_event (RTPJitterBuffer * jbuf, GstEvent * event)
+ {
+ RTPJitterBufferItem *item = alloc_event_item (event);
+ gboolean head;
+ rtp_jitter_buffer_insert (jbuf, item, &head, NULL);
+ return head;
+ }
+
+ /**
+ * rtp_jitter_buffer_append_query:
+ * @jbuf: an #RTPJitterBuffer
+ * @query: an #GstQuery to insert
+
+ * Inserts @query into the packet queue of @jbuf.
+ *
+ * Returns: %TRUE if the query is at the head of the queue
+ */
+ gboolean
+ rtp_jitter_buffer_append_query (RTPJitterBuffer * jbuf, GstQuery * query)
+ {
+ RTPJitterBufferItem *item =
+ rtp_jitter_buffer_alloc_item (query, ITEM_TYPE_QUERY, -1, -1, -1, 0, -1,
+ NULL);
+ gboolean head;
+ rtp_jitter_buffer_insert (jbuf, item, &head, NULL);
+ return head;
+ }
+
+ /**
+ * rtp_jitter_buffer_append_lost_event:
+ * @jbuf: an #RTPJitterBuffer
+ * @event: an #GstEvent to insert
+ * @seqnum: Sequence number
+ * @lost_packets: Number of lost packet this item represent
+
+ * Inserts @event into the packet queue of @jbuf.
+ *
+ * Returns: %TRUE if the event is at the head of the queue
+ */
+ gboolean
+ rtp_jitter_buffer_append_lost_event (RTPJitterBuffer * jbuf, GstEvent * event,
+ guint16 seqnum, guint lost_packets)
+ {
+ RTPJitterBufferItem *item = rtp_jitter_buffer_alloc_item (event,
+ ITEM_TYPE_LOST, -1, -1, seqnum, lost_packets, -1,
+ (GDestroyNotify) gst_mini_object_unref);
+ gboolean head;
+
+ if (!rtp_jitter_buffer_insert (jbuf, item, &head, NULL)) {
+ /* Duplicate */
+ rtp_jitter_buffer_free_item (item);
+ head = FALSE;
+ }
+
+ return head;
+ }
+
+ /**
+ * rtp_jitter_buffer_append_buffer:
+ * @jbuf: an #RTPJitterBuffer
+ * @buf: an #GstBuffer to insert
+ * @seqnum: Sequence number
+ * @duplicate: TRUE when the packet inserted is a duplicate
+ * @percent: the buffering percent after insertion
+ *
+ * Inserts @buf into the packet queue of @jbuf.
+ *
+ * Returns: %TRUE if the buffer is at the head of the queue
+ */
+ gboolean
+ rtp_jitter_buffer_append_buffer (RTPJitterBuffer * jbuf, GstBuffer * buf,
+ GstClockTime dts, GstClockTime pts, guint16 seqnum, guint rtptime,
+ gboolean * duplicate, gint * percent)
+ {
+ RTPJitterBufferItem *item = rtp_jitter_buffer_alloc_item (buf,
+ ITEM_TYPE_BUFFER, dts, pts, seqnum, 1, rtptime,
+ (GDestroyNotify) gst_mini_object_unref);
+ gboolean head;
+ gboolean inserted;
+
+ inserted = rtp_jitter_buffer_insert (jbuf, item, &head, percent);
+ if (!inserted)
+ rtp_jitter_buffer_free_item (item);
+
+ if (duplicate)
+ *duplicate = !inserted;
+
+ return head;
+ }
+
+ /**
+ * rtp_jitter_buffer_pop:
+ * @jbuf: an #RTPJitterBuffer
+ * @percent: the buffering percent
+ *
+ * Pops the oldest buffer from the packet queue of @jbuf. The popped buffer will
+ * have its timestamp adjusted with the incoming running_time and the detected
+ * clock skew.
+ *
+ * Returns: a #GstBuffer or %NULL when there was no packet in the queue.
+ */
+ RTPJitterBufferItem *
+ rtp_jitter_buffer_pop (RTPJitterBuffer * jbuf, gint * percent)
+ {
+ GList *item = NULL;
+ GQueue *queue;
+
+ g_return_val_if_fail (jbuf != NULL, NULL);
+
+ queue = &jbuf->packets;
+
+ item = queue->head;
+ if (item) {
+ queue->head = item->next;
+ if (queue->head)
+ queue->head->prev = NULL;
+ else
+ queue->tail = NULL;
+ queue->length--;
+ }
+
+ /* buffering mode, update buffer stats */
+ if (jbuf->mode == RTP_JITTER_BUFFER_MODE_BUFFER)
+ update_buffer_level (jbuf, percent);
+ else if (percent)
+ *percent = -1;
+
+ /* let's clear the pointers so we can ensure we don't free items that are
+ * still in the jitterbuffer */
++#ifdef __TIZEN__
++ if (item)
++#endif
+ item->next = item->prev = NULL;
+
+ return (RTPJitterBufferItem *) item;
+ }
+
+ /**
+ * rtp_jitter_buffer_peek:
+ * @jbuf: an #RTPJitterBuffer
+ *
+ * Peek the oldest buffer from the packet queue of @jbuf.
+ *
+ * See rtp_jitter_buffer_insert() to check when an older packet was
+ * added.
+ *
+ * Returns: a #GstBuffer or %NULL when there was no packet in the queue.
+ */
+ RTPJitterBufferItem *
+ rtp_jitter_buffer_peek (RTPJitterBuffer * jbuf)
+ {
+ g_return_val_if_fail (jbuf != NULL, NULL);
+
+ return (RTPJitterBufferItem *) jbuf->packets.head;
+ }
+
+ /**
+ * rtp_jitter_buffer_flush:
+ * @jbuf: an #RTPJitterBuffer
+ * @free_func: function to free each item (optional)
+ * @user_data: user data passed to @free_func
+ *
+ * Flush all packets from the jitterbuffer.
+ */
+ void
+ rtp_jitter_buffer_flush (RTPJitterBuffer * jbuf, GFunc free_func,
+ gpointer user_data)
+ {
+ GList *item;
+
+ g_return_if_fail (jbuf != NULL);
+
+ if (free_func == NULL)
+ free_func = (GFunc) rtp_jitter_buffer_free_item;
+
+ while ((item = g_queue_pop_head_link (&jbuf->packets)))
+ free_func ((RTPJitterBufferItem *) item, user_data);
+ }
+
+ /**
+ * rtp_jitter_buffer_is_buffering:
+ * @jbuf: an #RTPJitterBuffer
+ *
+ * Check if @jbuf is buffering currently. Users of the jitterbuffer should not
+ * pop packets while in buffering mode.
+ *
+ * Returns: the buffering state of @jbuf
+ */
+ gboolean
+ rtp_jitter_buffer_is_buffering (RTPJitterBuffer * jbuf)
+ {
+ return jbuf->buffering && !jbuf->buffering_disabled;
+ }
+
+ /**
+ * rtp_jitter_buffer_set_buffering:
+ * @jbuf: an #RTPJitterBuffer
+ * @buffering: the new buffering state
+ *
+ * Forces @jbuf to go into the buffering state.
+ */
+ void
+ rtp_jitter_buffer_set_buffering (RTPJitterBuffer * jbuf, gboolean buffering)
+ {
+ jbuf->buffering = buffering;
+ }
+
+ /**
+ * rtp_jitter_buffer_get_percent:
+ * @jbuf: an #RTPJitterBuffer
+ *
+ * Get the buffering percent of the jitterbuffer.
+ *
+ * Returns: the buffering percent
+ */
+ gint
+ rtp_jitter_buffer_get_percent (RTPJitterBuffer * jbuf)
+ {
+ gint percent;
+ guint64 level;
+
+ if (G_UNLIKELY (jbuf->high_level == 0))
+ return 100;
+
+ if (G_UNLIKELY (jbuf->buffering_disabled))
+ return 100;
+
+ level = get_buffer_level (jbuf);
+ percent = (level * 100 / jbuf->high_level);
+ percent = MIN (percent, 100);
+
+ return percent;
+ }
+
+ /**
+ * rtp_jitter_buffer_num_packets:
+ * @jbuf: an #RTPJitterBuffer
+ *
+ * Get the number of packets currently in "jbuf.
+ *
+ * Returns: The number of packets in @jbuf.
+ */
+ guint
+ rtp_jitter_buffer_num_packets (RTPJitterBuffer * jbuf)
+ {
+ g_return_val_if_fail (jbuf != NULL, 0);
+
+ return jbuf->packets.length;
+ }
+
+ /**
+ * rtp_jitter_buffer_get_ts_diff:
+ * @jbuf: an #RTPJitterBuffer
+ *
+ * Get the difference between the timestamps of first and last packet in the
+ * jitterbuffer.
+ *
+ * Returns: The difference expressed in the timestamp units of the packets.
+ */
+ guint32
+ rtp_jitter_buffer_get_ts_diff (RTPJitterBuffer * jbuf)
+ {
+ guint64 high_ts, low_ts;
+ RTPJitterBufferItem *high_buf, *low_buf;
+ guint32 result;
+
+ g_return_val_if_fail (jbuf != NULL, 0);
+
+ high_buf = (RTPJitterBufferItem *) g_queue_peek_tail_link (&jbuf->packets);
+ low_buf = (RTPJitterBufferItem *) g_queue_peek_head_link (&jbuf->packets);
+
+ if (!high_buf || !low_buf || high_buf == low_buf)
+ return 0;
+
+ high_ts = high_buf->rtptime;
+ low_ts = low_buf->rtptime;
+
+ /* it needs to work if ts wraps */
+ if (high_ts >= low_ts) {
+ result = (guint32) (high_ts - low_ts);
+ } else {
+ result = (guint32) (high_ts + G_MAXUINT32 + 1 - low_ts);
+ }
+ return result;
+ }
+
+
+ /*
+ * rtp_jitter_buffer_get_seqnum_diff:
+ * @jbuf: an #RTPJitterBuffer
+ *
+ * Get the difference between the seqnum of first and last packet in the
+ * jitterbuffer.
+ *
+ * Returns: The difference expressed in seqnum.
+ */
+ static guint16
+ rtp_jitter_buffer_get_seqnum_diff (RTPJitterBuffer * jbuf)
+ {
+ guint32 high_seqnum, low_seqnum;
+ RTPJitterBufferItem *high_buf, *low_buf;
+ guint16 result;
+
+ g_return_val_if_fail (jbuf != NULL, 0);
+
+ high_buf = (RTPJitterBufferItem *) g_queue_peek_tail_link (&jbuf->packets);
+ low_buf = (RTPJitterBufferItem *) g_queue_peek_head_link (&jbuf->packets);
+
+ while (high_buf && high_buf->seqnum == -1)
+ high_buf = (RTPJitterBufferItem *) high_buf->prev;
+
+ while (low_buf && low_buf->seqnum == -1)
+ low_buf = (RTPJitterBufferItem *) low_buf->next;
+
+ if (!high_buf || !low_buf || high_buf == low_buf)
+ return 0;
+
+ high_seqnum = high_buf->seqnum;
+ low_seqnum = low_buf->seqnum;
+
+ /* it needs to work if ts wraps */
+ if (high_seqnum >= low_seqnum) {
+ result = (guint32) (high_seqnum - low_seqnum);
+ } else {
+ result = (guint32) (high_seqnum + G_MAXUINT16 + 1 - low_seqnum);
+ }
+ return result;
+ }
+
+ /**
+ * rtp_jitter_buffer_get_sync:
+ * @jbuf: an #RTPJitterBuffer
+ * @rtptime: result RTP time
+ * @timestamp: result GStreamer timestamp
+ * @clock_rate: clock-rate of @rtptime
+ * @last_rtptime: last seen rtptime.
+ *
+ * Calculates the relation between the RTP timestamp and the GStreamer timestamp
+ * used for constructing timestamps.
+ *
+ * For extended RTP timestamp @rtptime with a clock-rate of @clock_rate,
+ * the GStreamer timestamp is currently @timestamp.
+ *
+ * The last seen extended RTP timestamp with clock-rate @clock-rate is returned in
+ * @last_rtptime.
+ */
+ void
+ rtp_jitter_buffer_get_sync (RTPJitterBuffer * jbuf, guint64 * rtptime,
+ guint64 * timestamp, guint32 * clock_rate, guint64 * last_rtptime)
+ {
+ if (rtptime)
+ *rtptime = jbuf->base_extrtp;
+ if (timestamp)
+ *timestamp = jbuf->base_time + jbuf->skew;
+ if (clock_rate)
+ *clock_rate = jbuf->clock_rate;
+ if (last_rtptime)
+ *last_rtptime = jbuf->last_rtptime;
+ }
+
+ /**
+ * rtp_jitter_buffer_can_fast_start:
+ * @jbuf: an #RTPJitterBuffer
+ * @num_packets: Number of consecutive packets needed
+ *
+ * Check if in the queue if there is enough packets with consecutive seqnum in
+ * order to start delivering them.
+ *
+ * Returns: %TRUE if the required number of consecutive packets was found.
+ */
+ gboolean
+ rtp_jitter_buffer_can_fast_start (RTPJitterBuffer * jbuf, gint num_packet)
+ {
+ gboolean ret = TRUE;
+ RTPJitterBufferItem *last_item = NULL, *item;
+ gint i;
+
+ if (rtp_jitter_buffer_num_packets (jbuf) < num_packet)
+ return FALSE;
+
+ item = rtp_jitter_buffer_peek (jbuf);
+ for (i = 0; i < num_packet; i++) {
+ if (G_LIKELY (last_item)) {
+ guint16 expected_seqnum = last_item->seqnum + 1;
+
+ if (expected_seqnum != item->seqnum) {
+ ret = FALSE;
+ break;
+ }
+ }
+
+ last_item = item;
+ item = (RTPJitterBufferItem *) last_item->next;
+ }
+
+ return ret;
+ }
+
+ gboolean
+ rtp_jitter_buffer_is_full (RTPJitterBuffer * jbuf)
+ {
+ return rtp_jitter_buffer_get_seqnum_diff (jbuf) >= 32765 &&
+ rtp_jitter_buffer_num_packets (jbuf) > 10000;
+ }
+
+
+ /**
+ * rtp_jitter_buffer_free_item:
+ * @item: the item to be freed
+ *
+ * Free the jitter buffer item.
+ */
+ void
+ rtp_jitter_buffer_free_item (RTPJitterBufferItem * item)
+ {
+ g_return_if_fail (item != NULL);
+ /* needs to be unlinked first */
+ g_return_if_fail (item->next == NULL);
+ g_return_if_fail (item->prev == NULL);
+
+ if (item->data && item->free_data)
+ item->free_data (item->data);
+ g_slice_free (RTPJitterBufferItem, item);
+ }
--- /dev/null
+ /* GStreamer
+ * Copyright (C) <2007> Wim Taymans <wim.taymans@gmail.com>
+ * Copyright (C) 2015 Kurento (http://kurento.org/)
+ * @author: Miguel París <mparisdiaz@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+ #define GLIB_DISABLE_DEPRECATION_WARNINGS
+
+ #include "rtpstats.h"
+ #include "rtptwcc.h"
+
+ void
+ gst_rtp_packet_rate_ctx_reset (RTPPacketRateCtx * ctx, gint32 clock_rate)
+ {
+ ctx->clock_rate = clock_rate;
+ ctx->probed = FALSE;
+ ctx->avg_packet_rate = -1;
+ ctx->last_ts = -1;
+ }
+
+ guint32
+ gst_rtp_packet_rate_ctx_update (RTPPacketRateCtx * ctx, guint16 seqnum,
+ guint32 ts)
+ {
+ guint64 new_ts, diff_ts;
+ gint diff_seqnum;
+ gint32 new_packet_rate;
+ gint32 base;
+
+ if (ctx->clock_rate <= 0) {
+ return ctx->avg_packet_rate;
+ }
+
+ new_ts = ctx->last_ts;
+ gst_rtp_buffer_ext_timestamp (&new_ts, ts);
+
+ if (!ctx->probed) {
+ ctx->probed = TRUE;
+ goto done_but_save;
+ }
+
+ diff_seqnum = gst_rtp_buffer_compare_seqnum (ctx->last_seqnum, seqnum);
+ /* Ignore seqnums that are over 15,000 away from the latest one, it's close
+ * to 2^14 but far enough to avoid any risk of computing error.
+ */
+ if (diff_seqnum > 15000)
+ goto done_but_save;
+
+ /* Ignore any packet that is in the past, we're only interested in newer
+ * packets to compute the packet rate.
+ */
+ if (diff_seqnum <= 0 || new_ts <= ctx->last_ts)
+ goto done;
+
+ diff_ts = new_ts - ctx->last_ts;
+ diff_ts = gst_util_uint64_scale_int (diff_ts, GST_SECOND, ctx->clock_rate);
+ new_packet_rate = gst_util_uint64_scale (diff_seqnum, GST_SECOND, diff_ts);
+
+ /* The goal is that higher packet rates "win".
+ * If there's a sudden burst, the average will go up fast,
+ * but it will go down again slowly.
+ * This is useful for bursty cases, where a lot of packets are close
+ * to each other and should allow a higher reorder/dropout there.
+ * Round up the new average.
+ * We do it on different rates depending on the packet rate, so it's not too
+ * jumpy.
+ */
+ if (ctx->avg_packet_rate > new_packet_rate)
+ base = MAX (ctx->avg_packet_rate / 3, 8); /* about 333 ms */
+ else
+ base = MAX (ctx->avg_packet_rate / 15, 2); /* about 66 ms */
+
+ diff_seqnum = MIN (diff_seqnum, base - 1);
+
+ ctx->avg_packet_rate = (((base - diff_seqnum) * ctx->avg_packet_rate) +
+ (new_packet_rate * diff_seqnum)) / base;
+
+
+ done_but_save:
+
+ ctx->last_seqnum = seqnum;
+ ctx->last_ts = new_ts;
+ done:
+
+ return ctx->avg_packet_rate;
+ }
+
+ guint32
+ gst_rtp_packet_rate_ctx_get (RTPPacketRateCtx * ctx)
+ {
+ return ctx->avg_packet_rate;
+ }
+
+ guint32
+ gst_rtp_packet_rate_ctx_get_max_dropout (RTPPacketRateCtx * ctx, gint32 time_ms)
+ {
+ if (time_ms <= 0 || !ctx->probed || ctx->avg_packet_rate == -1) {
+ return RTP_DEF_DROPOUT;
+ }
+
+ return MAX (RTP_MIN_DROPOUT, ctx->avg_packet_rate * time_ms / 1000);
+ }
+
+ guint32
+ gst_rtp_packet_rate_ctx_get_max_misorder (RTPPacketRateCtx * ctx,
+ gint32 time_ms)
+ {
+ if (time_ms <= 0 || !ctx->probed || ctx->avg_packet_rate == -1) {
+ return RTP_DEF_MISORDER;
+ }
+
+ return MAX (RTP_MIN_MISORDER, ctx->avg_packet_rate * time_ms / 1000);
+ }
+
+ /**
+ * rtp_stats_init_defaults:
+ * @stats: an #RTPSessionStats struct
+ *
+ * Initialize @stats with its default values.
+ */
+ void
+ rtp_stats_init_defaults (RTPSessionStats * stats)
+ {
+ rtp_stats_set_bandwidths (stats, -1, -1, -1, -1);
+ stats->min_interval = RTP_STATS_MIN_INTERVAL;
+ stats->bye_timeout = RTP_STATS_BYE_TIMEOUT;
+ stats->nacks_dropped = 0;
+ stats->nacks_sent = 0;
+ stats->nacks_received = 0;
+ }
+
+ /**
+ * rtp_stats_set_bandwidths:
+ * @stats: an #RTPSessionStats struct
+ * @rtp_bw: RTP bandwidth
+ * @rtcp_bw: RTCP bandwidth
+ * @rs: sender RTCP bandwidth
+ * @rr: receiver RTCP bandwidth
+ *
+ * Configure the bandwidth parameters in the stats. When an input variable is
+ * set to -1, it will be calculated from the other input variables and from the
+ * defaults.
+ */
+ void
+ rtp_stats_set_bandwidths (RTPSessionStats * stats, guint rtp_bw,
+ gdouble rtcp_bw, guint rs, guint rr)
+ {
+ GST_DEBUG ("recalc bandwidths: RTP %u, RTCP %f, RS %u, RR %u", rtp_bw,
+ rtcp_bw, rs, rr);
+
+ /* when given, sender and receive bandwidth add up to the total
+ * rtcp bandwidth */
+ if (rs != -1 && rr != -1)
+ rtcp_bw = rs + rr;
+
+ /* If rtcp_bw is between 0 and 1, it is a fraction of rtp_bw */
+ if (rtcp_bw > 0.0 && rtcp_bw < 1.0) {
+ if (rtp_bw > 0.0)
+ rtcp_bw = rtp_bw * rtcp_bw;
+ else
+ rtcp_bw = -1.0;
+ }
+
+ /* RTCP is 5% of the RTP bandwidth */
+ if (rtp_bw == -1 && rtcp_bw > 1.0)
+ rtp_bw = rtcp_bw * 20;
+ else if (rtp_bw != -1 && rtcp_bw < 0.0)
+ rtcp_bw = rtp_bw / 20;
+ else if (rtp_bw == -1 && rtcp_bw < 0.0) {
+ /* nothing given, take defaults */
+ rtp_bw = RTP_STATS_BANDWIDTH;
+ rtcp_bw = rtp_bw * RTP_STATS_RTCP_FRACTION;
+ }
+
+ stats->bandwidth = rtp_bw;
+ stats->rtcp_bandwidth = rtcp_bw;
+
+ /* now figure out the fractions */
+ if (rs == -1) {
+ /* rs unknown */
+ if (rr == -1) {
+ /* both not given, use defaults */
+ rs = stats->rtcp_bandwidth * RTP_STATS_SENDER_FRACTION;
+ rr = stats->rtcp_bandwidth * RTP_STATS_RECEIVER_FRACTION;
+ } else {
+ /* rr known, calculate rs */
+ if (stats->rtcp_bandwidth > rr)
+ rs = stats->rtcp_bandwidth - rr;
+ else
+ rs = 0;
+ }
+ } else if (rr == -1) {
+ /* rs known, calculate rr */
+ if (stats->rtcp_bandwidth > rs)
+ rr = stats->rtcp_bandwidth - rs;
+ else
+ rr = 0;
+ }
+
+ if (stats->rtcp_bandwidth > 0) {
+ stats->sender_fraction = ((gdouble) rs) / ((gdouble) stats->rtcp_bandwidth);
+ stats->receiver_fraction = 1.0 - stats->sender_fraction;
+ } else {
+ /* no RTCP bandwidth, set dummy values */
+ stats->sender_fraction = 0.0;
+ stats->receiver_fraction = 0.0;
+ }
+ GST_DEBUG ("bandwidths: RTP %u, RTCP %u, RS %f, RR %f", stats->bandwidth,
+ stats->rtcp_bandwidth, stats->sender_fraction, stats->receiver_fraction);
+ }
+
+ /**
+ * rtp_stats_calculate_rtcp_interval:
+ * @stats: an #RTPSessionStats struct
+ * @sender: if we are a sender
+ * @profile: RTP profile of this session
+ * @ptp: if this session is a point-to-point session
+ * @first: if this is the first time
+ *
+ * Calculate the RTCP interval. The result of this function is the amount of
+ * time to wait (in nanoseconds) before sending a new RTCP message.
+ *
+ * Returns: the RTCP interval.
+ */
+ GstClockTime
+ rtp_stats_calculate_rtcp_interval (RTPSessionStats * stats, gboolean we_send,
+ GstRTPProfile profile, gboolean ptp, gboolean first)
+ {
+ gdouble members, senders, n;
+ gdouble avg_rtcp_size, rtcp_bw;
+ gdouble interval;
+ gdouble rtcp_min_time;
+
+ if (profile == GST_RTP_PROFILE_AVPF || profile == GST_RTP_PROFILE_SAVPF) {
+ /* RFC 4585 3.4d), 3.5.1 */
+
+ if (first && !ptp)
+ rtcp_min_time = 1.0;
+ else
+ rtcp_min_time = 0.0;
+ } else {
+ /* Very first call at application start-up uses half the min
+ * delay for quicker notification while still allowing some time
+ * before reporting for randomization and to learn about other
+ * sources so the report interval will converge to the correct
+ * interval more quickly.
+ */
+ rtcp_min_time = stats->min_interval;
+ if (first)
+ rtcp_min_time /= 2.0;
+ }
+
+ /* Dedicate a fraction of the RTCP bandwidth to senders unless
+ * the number of senders is large enough that their share is
+ * more than that fraction.
+ */
+ n = members = stats->active_sources;
+ senders = (gdouble) stats->sender_sources;
+ rtcp_bw = stats->rtcp_bandwidth;
+
+ if (senders <= members * stats->sender_fraction) {
+ if (we_send) {
+ rtcp_bw *= stats->sender_fraction;
+ n = senders;
+ } else {
+ rtcp_bw *= stats->receiver_fraction;
+ n -= senders;
+ }
+ }
+
+ /* no bandwidth for RTCP, return NONE to signal that we don't want to send
+ * RTCP packets */
+ if (rtcp_bw <= 0.0001)
+ return GST_CLOCK_TIME_NONE;
+
+ avg_rtcp_size = 8.0 * stats->avg_rtcp_packet_size;
+ /*
+ * The effective number of sites times the average packet size is
+ * the total number of octets sent when each site sends a report.
+ * Dividing this by the effective bandwidth gives the time
+ * interval over which those packets must be sent in order to
+ * meet the bandwidth target, with a minimum enforced. In that
+ * time interval we send one report so this time is also our
+ * average time between reports.
+ */
+ GST_DEBUG ("avg size %f, n %f, rtcp_bw %f", avg_rtcp_size, n, rtcp_bw);
+ interval = avg_rtcp_size * n / rtcp_bw;
+ if (interval < rtcp_min_time)
+ interval = rtcp_min_time;
+
+ return interval * GST_SECOND;
+ }
+
+ /**
+ * rtp_stats_add_rtcp_jitter:
+ * @stats: an #RTPSessionStats struct
+ * @interval: an RTCP interval
+ *
+ * Apply a random jitter to the @interval. @interval is typically obtained with
+ * rtp_stats_calculate_rtcp_interval().
+ *
+ * Returns: the new RTCP interval.
+ */
+ GstClockTime
+ rtp_stats_add_rtcp_jitter (RTPSessionStats * stats, GstClockTime interval)
+ {
+ gdouble temp;
+
+ /* see RFC 3550 p 30
+ * To compensate for "unconditional reconsideration" converging to a
+ * value below the intended average.
+ */
+ #define COMPENSATION (2.71828 - 1.5);
+
+ temp = (interval * g_random_double_range (0.5, 1.5)) / COMPENSATION;
+
+ return (GstClockTime) temp;
+ }
+
+
+ /**
+ * rtp_stats_calculate_bye_interval:
+ * @stats: an #RTPSessionStats struct
+ *
+ * Calculate the BYE interval. The result of this function is the amount of
+ * time to wait (in nanoseconds) before sending a BYE message.
+ *
+ * Returns: the BYE interval.
+ */
+ GstClockTime
+ rtp_stats_calculate_bye_interval (RTPSessionStats * stats)
+ {
+ gdouble members;
+ gdouble avg_rtcp_size, rtcp_bw;
+ gdouble interval;
+ gdouble rtcp_min_time;
+
+ /* no interval when we have less than 50 members */
+ if (stats->active_sources < 50)
+ return 0;
+
+ rtcp_min_time = (stats->min_interval) / 2.0;
+
+ /* Dedicate a fraction of the RTCP bandwidth to senders unless
+ * the number of senders is large enough that their share is
+ * more than that fraction.
+ */
+ members = stats->bye_members;
+ rtcp_bw = stats->rtcp_bandwidth * stats->receiver_fraction;
+
+ /* no bandwidth for RTCP, return NONE to signal that we don't want to send
+ * RTCP packets */
+ if (rtcp_bw <= 0.0001)
+ return GST_CLOCK_TIME_NONE;
+
+ avg_rtcp_size = 8.0 * stats->avg_rtcp_packet_size;
+ /*
+ * The effective number of sites times the average packet size is
+ * the total number of octets sent when each site sends a report.
+ * Dividing this by the effective bandwidth gives the time
+ * interval over which those packets must be sent in order to
+ * meet the bandwidth target, with a minimum enforced. In that
+ * time interval we send one report so this time is also our
+ * average time between reports.
+ */
+ interval = avg_rtcp_size * members / rtcp_bw;
+ if (interval < rtcp_min_time)
+ interval = rtcp_min_time;
+
+ return interval * GST_SECOND;
+ }
+
+ /**
+ * rtp_stats_get_packets_lost:
+ * @stats: an #RTPSourceStats struct
+ *
+ * Calculate the total number of RTP packets lost since beginning of
+ * reception. Packets that arrive late are not considered lost, and
+ * duplicates are not taken into account. Hence, the loss may be negative
+ * if there are duplicates.
+ *
+ * Returns: total RTP packets lost.
+ */
+ gint64
+ rtp_stats_get_packets_lost (const RTPSourceStats * stats)
+ {
+ gint64 lost;
+ guint64 extended_max, expected;
+
+ extended_max = stats->cycles + stats->max_seq;
+ expected = extended_max - stats->base_seq + 1;
+ lost = expected - stats->packets_received;
+
+ return lost;
+ }
+
+ void
+ rtp_stats_set_min_interval (RTPSessionStats * stats, gdouble min_interval)
+ {
+ stats->min_interval = min_interval;
+ }
+
+ gboolean
+ __g_socket_address_equal (GSocketAddress * a, GSocketAddress * b)
+ {
+ GInetSocketAddress *ia, *ib;
+ GInetAddress *iaa, *iab;
+
+ ia = G_INET_SOCKET_ADDRESS (a);
+ ib = G_INET_SOCKET_ADDRESS (b);
+
+ if (g_inet_socket_address_get_port (ia) !=
+ g_inet_socket_address_get_port (ib))
+ return FALSE;
+
+ iaa = g_inet_socket_address_get_address (ia);
+ iab = g_inet_socket_address_get_address (ib);
+
+ return g_inet_address_equal (iaa, iab);
+ }
+
+ gchar *
+ __g_socket_address_to_string (GSocketAddress * addr)
+ {
+ GInetSocketAddress *ia;
+ gchar *ret, *tmp;
+
+ ia = G_INET_SOCKET_ADDRESS (addr);
+
+ tmp = g_inet_address_to_string (g_inet_socket_address_get_address (ia));
+ ret = g_strdup_printf ("%s:%u", tmp, g_inet_socket_address_get_port (ia));
+ g_free (tmp);
+
+ return ret;
+ }
+
+ static void
+ _append_structure_to_value_array (GValueArray * array, GstStructure * s)
+ {
+ GValue *val;
+ g_value_array_append (array, NULL);
+ val = g_value_array_get_nth (array, array->n_values - 1);
+ g_value_init (val, GST_TYPE_STRUCTURE);
+ g_value_take_boxed (val, s);
+ }
+
+ static void
+ _structure_take_value_array (GstStructure * s,
+ const gchar * field_name, GValueArray * array)
+ {
+ GValue value = G_VALUE_INIT;
+ g_value_init (&value, G_TYPE_VALUE_ARRAY);
+ g_value_take_boxed (&value, array);
+ gst_structure_take_value (s, field_name, &value);
+ g_value_unset (&value);
+ }
+
+ GstStructure *
+ rtp_twcc_stats_get_packets_structure (GArray * twcc_packets)
+ {
+ GstStructure *ret = gst_structure_new_empty ("RTPTWCCPackets");
+ GValueArray *array = g_value_array_new (0);
+ guint i;
+
+ for (i = 0; i < twcc_packets->len; i++) {
+ RTPTWCCPacket *pkt = &g_array_index (twcc_packets, RTPTWCCPacket, i);
+
+ GstStructure *pkt_s = gst_structure_new ("RTPTWCCPacket",
+ "seqnum", G_TYPE_UINT, pkt->seqnum,
+ "local-ts", G_TYPE_UINT64, pkt->local_ts,
+ "remote-ts", G_TYPE_UINT64, pkt->remote_ts,
+ "payload-type", G_TYPE_UCHAR, pkt->pt,
+ "size", G_TYPE_UINT, pkt->size,
+ "lost", G_TYPE_BOOLEAN, pkt->status == RTP_TWCC_PACKET_STATUS_NOT_RECV,
+ NULL);
+ _append_structure_to_value_array (array, pkt_s);
+ }
+
+ _structure_take_value_array (ret, "packets", array);
+ return ret;
+ }
+
+ static void
+ rtp_twcc_stats_calculate_stats (RTPTWCCStats * stats, GArray * twcc_packets)
+ {
+ guint packets_recv = 0;
+ guint i;
+
+ for (i = 0; i < twcc_packets->len; i++) {
+ RTPTWCCPacket *pkt = &g_array_index (twcc_packets, RTPTWCCPacket, i);
+
+ if (pkt->status != RTP_TWCC_PACKET_STATUS_NOT_RECV)
+ packets_recv++;
+
+ if (GST_CLOCK_TIME_IS_VALID (pkt->local_ts) &&
+ GST_CLOCK_TIME_IS_VALID (stats->last_local_ts)) {
+ pkt->local_delta = GST_CLOCK_DIFF (stats->last_local_ts, pkt->local_ts);
+ }
+
+ if (GST_CLOCK_TIME_IS_VALID (pkt->remote_ts) &&
+ GST_CLOCK_TIME_IS_VALID (stats->last_remote_ts)) {
+ pkt->remote_delta =
+ GST_CLOCK_DIFF (stats->last_remote_ts, pkt->remote_ts);
+ }
+
+ if (GST_CLOCK_STIME_IS_VALID (pkt->local_delta) &&
+ GST_CLOCK_STIME_IS_VALID (pkt->remote_delta)) {
+ pkt->delta_delta = pkt->remote_delta - pkt->local_delta;
+ }
+
+ stats->last_local_ts = pkt->local_ts;
+ stats->last_remote_ts = pkt->remote_ts;
+ }
+
+ stats->packets_sent = twcc_packets->len;
+ stats->packets_recv = packets_recv;
+ }
+
+ static gint
+ _get_window_start_index (RTPTWCCStats * stats, GstClockTime duration,
+ GstClockTime * local_duration, GstClockTime * remote_duration)
+ {
+ RTPTWCCPacket *last = NULL;
+ guint i;
+
+ if (stats->packets->len < 2)
+ return -1;
+
+ for (i = 0; i < stats->packets->len; i++) {
+ guint start_index = stats->packets->len - 1 - i;
+ RTPTWCCPacket *pkt =
+ &g_array_index (stats->packets, RTPTWCCPacket, start_index);
+ if (GST_CLOCK_TIME_IS_VALID (pkt->local_ts)
+ && GST_CLOCK_TIME_IS_VALID (pkt->remote_ts)) {
+ /* first find the last valid packet */
+ if (last == NULL) {
+ last = pkt;
+ } else {
+ /* and then get the duration in local ts */
+ GstClockTimeDiff ld = GST_CLOCK_DIFF (pkt->local_ts, last->local_ts);
+ if (ld >= duration) {
+ *local_duration = ld;
+ *remote_duration = GST_CLOCK_DIFF (pkt->remote_ts, last->remote_ts);
+ return start_index;
+ }
+ }
+ }
+ }
+
+ return -1;
+ }
+
+ static void
+ rtp_twcc_stats_calculate_windowed_stats (RTPTWCCStats * stats)
+ {
+ guint i;
+ gint start_idx;
+ guint bits_sent = 0;
+ guint bits_recv = 0;
+ guint packets_sent = 0;
+ guint packets_recv = 0;
+ guint packets_lost;
+ GstClockTimeDiff delta_delta_sum = 0;
+ guint delta_delta_count = 0;
+ GstClockTime local_duration;
+ GstClockTime remote_duration;
+
+ start_idx = _get_window_start_index (stats, stats->window_size,
+ &local_duration, &remote_duration);
+ if (start_idx == -1) {
+ return;
+ }
+
+ /* remove the old packets */
+ if (start_idx > 0)
+ g_array_remove_range (stats->packets, 0, start_idx);
+
+ packets_sent = stats->packets->len - 1;
+
+ for (i = 0; i < packets_sent; i++) {
+ RTPTWCCPacket *pkt = &g_array_index (stats->packets, RTPTWCCPacket, i);
+
+ if (GST_CLOCK_TIME_IS_VALID (pkt->local_ts)) {
+ bits_sent += pkt->size * 8;
+ }
+
+ if (GST_CLOCK_TIME_IS_VALID (pkt->remote_ts)) {
+ bits_recv += pkt->size * 8;
+ packets_recv++;
+ }
+
+ if (GST_CLOCK_STIME_IS_VALID (pkt->delta_delta)) {
+ delta_delta_sum += pkt->delta_delta;
+ delta_delta_count++;
+ }
+ }
+
+ packets_lost = packets_sent - packets_recv;
+ stats->packet_loss_pct = (packets_lost * 100) / (gfloat) packets_sent;
+
+ if (delta_delta_count) {
+ GstClockTimeDiff avg_delta_of_delta = delta_delta_sum / delta_delta_count;
+ if (GST_CLOCK_STIME_IS_VALID (stats->avg_delta_of_delta)) {
+ stats->avg_delta_of_delta_change =
+ (avg_delta_of_delta -
++#ifndef __TIZEN__
+ stats->avg_delta_of_delta) / (250 * GST_USECOND);
++#else
++ stats->avg_delta_of_delta) / (gfloat) (250 * GST_USECOND);
++#endif
+ }
+ stats->avg_delta_of_delta = avg_delta_of_delta;
+ }
+
+ if (local_duration > 0)
+ stats->bitrate_sent =
+ gst_util_uint64_scale (bits_sent, GST_SECOND, local_duration);
+ if (remote_duration > 0)
+ stats->bitrate_recv =
+ gst_util_uint64_scale (bits_recv, GST_SECOND, remote_duration);
+
+ GST_DEBUG ("Got stats: bits_sent: %u, bits_recv: %u, packets_sent = %u, "
+ "packets_recv: %u, packetlost_pct = %f, sent_bitrate = %u, "
+ "recv_bitrate = %u, delta-delta-avg = %" GST_STIME_FORMAT ", "
+ "delta-delta-change: %f", bits_sent, bits_recv, stats->packets_sent,
+ packets_recv, stats->packet_loss_pct, stats->bitrate_sent,
+ stats->bitrate_recv, GST_STIME_ARGS (stats->avg_delta_of_delta),
+ stats->avg_delta_of_delta_change);
+ }
+
+ RTPTWCCStats *
+ rtp_twcc_stats_new (void)
+ {
+ RTPTWCCStats *stats = g_new0 (RTPTWCCStats, 1);
+ stats->packets = g_array_new (FALSE, FALSE, sizeof (RTPTWCCPacket));
+ stats->last_local_ts = GST_CLOCK_TIME_NONE;
+ stats->last_remote_ts = GST_CLOCK_TIME_NONE;
+ stats->avg_delta_of_delta = GST_CLOCK_STIME_NONE;
+ stats->window_size = 300 * GST_MSECOND; /* FIXME: could be configurable? */
+ return stats;
+ }
+
+ void
+ rtp_twcc_stats_free (RTPTWCCStats * stats)
+ {
+ g_array_unref (stats->packets);
+ g_free (stats);
+ }
+
+ static GstStructure *
+ rtp_twcc_stats_get_stats_structure (RTPTWCCStats * stats)
+ {
+ return gst_structure_new ("RTPTWCCStats",
+ "bitrate-sent", G_TYPE_UINT, stats->bitrate_sent,
+ "bitrate-recv", G_TYPE_UINT, stats->bitrate_recv,
+ "packets-sent", G_TYPE_UINT, stats->packets_sent,
+ "packets-recv", G_TYPE_UINT, stats->packets_recv,
+ "packet-loss-pct", G_TYPE_DOUBLE, stats->packet_loss_pct,
+ "avg-delta-of-delta", G_TYPE_INT64, stats->avg_delta_of_delta, NULL);
+ }
+
+ GstStructure *
+ rtp_twcc_stats_process_packets (RTPTWCCStats * stats, GArray * twcc_packets)
+ {
+ rtp_twcc_stats_calculate_stats (stats, twcc_packets);
+ g_array_append_vals (stats->packets, twcc_packets->data, twcc_packets->len);
+ rtp_twcc_stats_calculate_windowed_stats (stats);
+ return rtp_twcc_stats_get_stats_structure (stats);
+ }
--- /dev/null
-
+ /* GStreamer
+ * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
+ * <2006> Wim Taymans <wim@fluendo.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+ /*
+ * Unless otherwise indicated, Source Code is licensed under MIT license.
+ * See further explanation attached in License Statement (distributed in the file
+ * LICENSE).
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy of
+ * this software and associated documentation files (the "Software"), to deal in
+ * the Software without restriction, including without limitation the rights to
+ * use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
+ * of the Software, and to permit persons to whom the Software is furnished to do
+ * so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in all
+ * copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+ * SOFTWARE.
+ */
+
+ #ifdef HAVE_CONFIG_H
+ #include "config.h"
+ #endif
+
+ #include "gstrtspelements.h"
++#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
++#include "gstrtspsrc.h"
++#endif
+ static gboolean
+ plugin_init (GstPlugin * plugin)
+ {
+ gboolean ret = FALSE;
+
++#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
++ if (!gst_element_register (plugin, "rtspsrc", GST_RANK_PRIMARY,
++ GST_TYPE_RTSPSRC))
++ return FALSE;
++#else
+ ret |= GST_ELEMENT_REGISTER (rtspsrc, plugin);
++#endif
++
+ ret |= GST_ELEMENT_REGISTER (rtpdec, plugin);
+
+ return ret;
+ }
+
+ GST_PLUGIN_DEFINE (GST_VERSION_MAJOR,
+ GST_VERSION_MINOR,
+ rtsp,
+ "transfer data via RTSP",
+ plugin_init, VERSION, GST_LICENSE, GST_PACKAGE_NAME, GST_PACKAGE_ORIGIN)
--- /dev/null
-
+ /* GStreamer
+ * Copyright (C) <2005,2006> Wim Taymans <wim at fluendo dot com>
+ * <2006> Lutz Mueller <lutz at topfrose dot de>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+ /*
+ * Unless otherwise indicated, Source Code is licensed under MIT license.
+ * See further explanation attached in License Statement (distributed in the file
+ * LICENSE).
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy of
+ * this software and associated documentation files (the "Software"), to deal in
+ * the Software without restriction, including without limitation the rights to
+ * use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
+ * of the Software, and to permit persons to whom the Software is furnished to do
+ * so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in all
+ * copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+ * SOFTWARE.
+ */
+ /**
+ * SECTION:element-rtspsrc
+ * @title: rtspsrc
+ *
+ * Makes a connection to an RTSP server and read the data.
+ * rtspsrc strictly follows RFC 2326 and therefore does not (yet) support
+ * RealMedia/Quicktime/Microsoft extensions.
+ *
+ * RTSP supports transport over TCP or UDP in unicast or multicast mode. By
+ * default rtspsrc will negotiate a connection in the following order:
+ * UDP unicast/UDP multicast/TCP. The order cannot be changed but the allowed
+ * protocols can be controlled with the #GstRTSPSrc:protocols property.
+ *
+ * rtspsrc currently understands SDP as the format of the session description.
+ * For each stream listed in the SDP a new rtp_stream\%d pad will be created
+ * with caps derived from the SDP media description. This is a caps of mime type
+ * "application/x-rtp" that can be connected to any available RTP depayloader
+ * element.
+ *
+ * rtspsrc will internally instantiate an RTP session manager element
+ * that will handle the RTCP messages to and from the server, jitter removal,
+ * packet reordering along with providing a clock for the pipeline.
+ * This feature is implemented using the gstrtpbin element.
+ *
+ * rtspsrc acts like a live source and will therefore only generate data in the
+ * PLAYING state.
+ *
+ * If a RTP session times out then the rtspsrc will generate an element message
+ * named "GstRTSPSrcTimeout". Currently this is only supported for timeouts
+ * triggered by RTCP.
+ *
+ * The message's structure contains three fields:
+ *
+ * GstRTSPSrcTimeoutCause `cause`: the cause of the timeout.
+ *
+ * #gint `stream-number`: an internal identifier of the stream that timed out.
+ *
+ * #guint `ssrc`: the SSRC of the stream that timed out.
+ *
+ * ## Example launch line
+ * |[
+ * gst-launch-1.0 rtspsrc location=rtsp://some.server/url ! fakesink
+ * ]| Establish a connection to an RTSP server and send the raw RTP packets to a
+ * fakesink.
+ *
+ * NOTE: rtspsrc will send a PAUSE command to the server if you set the
+ * element to the PAUSED state, and will send a PLAY command if you set it to
+ * the PLAYING state.
+ *
+ * Unfortunately, going to the NULL state involves going through PAUSED, so
+ * rtspsrc does not know the difference and will send a PAUSE when you wanted
+ * a TEARDOWN. The workaround is to hook into the `before-send` signal and
+ * return FALSE in this case.
+ */
+
+ #ifdef HAVE_CONFIG_H
+ #include "config.h"
+ #endif
+
+ #ifdef HAVE_UNISTD_H
+ #include <unistd.h>
+ #endif /* HAVE_UNISTD_H */
+ #include <stdlib.h>
+ #include <string.h>
+ #include <stdio.h>
+ #include <stdarg.h>
+
+ #include <gst/net/gstnet.h>
+ #include <gst/sdp/gstsdpmessage.h>
+ #include <gst/sdp/gstmikey.h>
+ #include <gst/rtp/rtp.h>
+
+ #include "gst/gst-i18n-plugin.h"
+
+ #include "gstrtspelements.h"
+ #include "gstrtspsrc.h"
+
+ GST_DEBUG_CATEGORY_STATIC (rtspsrc_debug);
+ #define GST_CAT_DEFAULT (rtspsrc_debug)
+
+ static GstStaticPadTemplate rtptemplate = GST_STATIC_PAD_TEMPLATE ("stream_%u",
+ GST_PAD_SRC,
+ GST_PAD_SOMETIMES,
+ GST_STATIC_CAPS ("application/x-rtp; application/x-rdt"));
+
+ /* templates used internally */
+ static GstStaticPadTemplate anysrctemplate =
+ GST_STATIC_PAD_TEMPLATE ("internalsrc_%u",
+ GST_PAD_SRC,
+ GST_PAD_SOMETIMES,
+ GST_STATIC_CAPS_ANY);
+
+ static GstStaticPadTemplate anysinktemplate =
+ GST_STATIC_PAD_TEMPLATE ("internalsink_%u",
+ GST_PAD_SINK,
+ GST_PAD_SOMETIMES,
+ GST_STATIC_CAPS_ANY);
+
+ enum
+ {
+ SIGNAL_HANDLE_REQUEST,
+ SIGNAL_ON_SDP,
+ SIGNAL_SELECT_STREAM,
+ SIGNAL_NEW_MANAGER,
+ SIGNAL_REQUEST_RTCP_KEY,
+ SIGNAL_ACCEPT_CERTIFICATE,
+ SIGNAL_BEFORE_SEND,
+ SIGNAL_PUSH_BACKCHANNEL_BUFFER,
+ SIGNAL_GET_PARAMETER,
+ SIGNAL_GET_PARAMETERS,
+ SIGNAL_SET_PARAMETER,
+ LAST_SIGNAL
+ };
+
+ enum _GstRtspSrcRtcpSyncMode
+ {
+ RTCP_SYNC_ALWAYS,
+ RTCP_SYNC_INITIAL,
+ RTCP_SYNC_RTP
+ };
+
+ enum _GstRtspSrcBufferMode
+ {
+ BUFFER_MODE_NONE,
+ BUFFER_MODE_SLAVE,
+ BUFFER_MODE_BUFFER,
+ BUFFER_MODE_AUTO,
+ BUFFER_MODE_SYNCED
+ };
+
+ #define GST_TYPE_RTSP_SRC_BUFFER_MODE (gst_rtsp_src_buffer_mode_get_type())
+ static GType
+ gst_rtsp_src_buffer_mode_get_type (void)
+ {
+ static GType buffer_mode_type = 0;
+ static const GEnumValue buffer_modes[] = {
+ {BUFFER_MODE_NONE, "Only use RTP timestamps", "none"},
+ {BUFFER_MODE_SLAVE, "Slave receiver to sender clock", "slave"},
+ {BUFFER_MODE_BUFFER, "Do low/high watermark buffering", "buffer"},
+ {BUFFER_MODE_AUTO, "Choose mode depending on stream live", "auto"},
+ {BUFFER_MODE_SYNCED, "Synchronized sender and receiver clocks", "synced"},
+ {0, NULL, NULL},
+ };
+
+ if (!buffer_mode_type) {
+ buffer_mode_type =
+ g_enum_register_static ("GstRTSPSrcBufferMode", buffer_modes);
+ }
+ return buffer_mode_type;
+ }
+
+ enum _GstRtspSrcNtpTimeSource
+ {
+ NTP_TIME_SOURCE_NTP,
+ NTP_TIME_SOURCE_UNIX,
+ NTP_TIME_SOURCE_RUNNING_TIME,
+ NTP_TIME_SOURCE_CLOCK_TIME
+ };
+
+ #define DEBUG_RTSP(__self,msg) gst_rtspsrc_print_rtsp_message (__self, msg)
+ #define DEBUG_SDP(__self,msg) gst_rtspsrc_print_sdp_message (__self, msg)
+
+ #define GST_TYPE_RTSP_SRC_NTP_TIME_SOURCE (gst_rtsp_src_ntp_time_source_get_type())
+ static GType
+ gst_rtsp_src_ntp_time_source_get_type (void)
+ {
+ static GType ntp_time_source_type = 0;
+ static const GEnumValue ntp_time_source_values[] = {
+ {NTP_TIME_SOURCE_NTP, "NTP time based on realtime clock", "ntp"},
+ {NTP_TIME_SOURCE_UNIX, "UNIX time based on realtime clock", "unix"},
+ {NTP_TIME_SOURCE_RUNNING_TIME,
+ "Running time based on pipeline clock",
+ "running-time"},
+ {NTP_TIME_SOURCE_CLOCK_TIME, "Pipeline clock time", "clock-time"},
+ {0, NULL, NULL},
+ };
+
+ if (!ntp_time_source_type) {
+ ntp_time_source_type =
+ g_enum_register_static ("GstRTSPSrcNtpTimeSource",
+ ntp_time_source_values);
+ }
+ return ntp_time_source_type;
+ }
+
+ enum _GstRtspBackchannel
+ {
+ BACKCHANNEL_NONE,
+ BACKCHANNEL_ONVIF
+ };
+
+ #define GST_TYPE_RTSP_BACKCHANNEL (gst_rtsp_backchannel_get_type())
+ static GType
+ gst_rtsp_backchannel_get_type (void)
+ {
+ static GType backchannel_type = 0;
+ static const GEnumValue backchannel_values[] = {
+ {BACKCHANNEL_NONE, "No backchannel", "none"},
+ {BACKCHANNEL_ONVIF, "ONVIF audio backchannel", "onvif"},
+ {0, NULL, NULL},
+ };
+
+ if (G_UNLIKELY (backchannel_type == 0)) {
+ backchannel_type =
+ g_enum_register_static ("GstRTSPBackchannel", backchannel_values);
+ }
+ return backchannel_type;
+ }
+
+ #define BACKCHANNEL_ONVIF_HDR_REQUIRE_VAL "www.onvif.org/ver20/backchannel"
+
+ #define DEFAULT_LOCATION NULL
+ #define DEFAULT_PROTOCOLS GST_RTSP_LOWER_TRANS_UDP | GST_RTSP_LOWER_TRANS_UDP_MCAST | GST_RTSP_LOWER_TRANS_TCP
+ #define DEFAULT_DEBUG FALSE
+ #define DEFAULT_RETRY 20
+ #define DEFAULT_TIMEOUT 5000000
+ #define DEFAULT_UDP_BUFFER_SIZE 0x80000
+ #define DEFAULT_TCP_TIMEOUT 20000000
+ #define DEFAULT_LATENCY_MS 2000
+ #define DEFAULT_DROP_ON_LATENCY FALSE
+ #define DEFAULT_CONNECTION_SPEED 0
+ #define DEFAULT_NAT_METHOD GST_RTSP_NAT_DUMMY
+ #define DEFAULT_DO_RTCP TRUE
+ #define DEFAULT_DO_RTSP_KEEP_ALIVE TRUE
+ #define DEFAULT_PROXY NULL
+ #define DEFAULT_RTP_BLOCKSIZE 0
+ #define DEFAULT_USER_ID NULL
+ #define DEFAULT_USER_PW NULL
+ #define DEFAULT_BUFFER_MODE BUFFER_MODE_AUTO
+ #define DEFAULT_PORT_RANGE NULL
+ #define DEFAULT_SHORT_HEADER FALSE
+ #define DEFAULT_PROBATION 2
+ #define DEFAULT_UDP_RECONNECT TRUE
+ #define DEFAULT_MULTICAST_IFACE NULL
+ #define DEFAULT_NTP_SYNC FALSE
+ #define DEFAULT_USE_PIPELINE_CLOCK FALSE
+ #define DEFAULT_TLS_VALIDATION_FLAGS G_TLS_CERTIFICATE_VALIDATE_ALL
+ #define DEFAULT_TLS_DATABASE NULL
+ #define DEFAULT_TLS_INTERACTION NULL
+ #define DEFAULT_DO_RETRANSMISSION TRUE
+ #define DEFAULT_NTP_TIME_SOURCE NTP_TIME_SOURCE_NTP
+ #define DEFAULT_USER_AGENT "GStreamer/" PACKAGE_VERSION
+ #define DEFAULT_MAX_RTCP_RTP_TIME_DIFF 1000
+ #define DEFAULT_RFC7273_SYNC FALSE
+ #define DEFAULT_MAX_TS_OFFSET_ADJUSTMENT G_GUINT64_CONSTANT(0)
+ #define DEFAULT_MAX_TS_OFFSET G_GINT64_CONSTANT(3000000000)
+ #define DEFAULT_VERSION GST_RTSP_VERSION_1_0
+ #define DEFAULT_BACKCHANNEL GST_RTSP_BACKCHANNEL_NONE
+ #define DEFAULT_TEARDOWN_TIMEOUT (100 * GST_MSECOND)
+ #define DEFAULT_ONVIF_MODE FALSE
+ #define DEFAULT_ONVIF_RATE_CONTROL TRUE
+ #define DEFAULT_IS_LIVE TRUE
+ #define DEFAULT_IGNORE_X_SERVER_REPLY FALSE
+
++#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
++#define DEFAULT_START_POSITION 0
++#endif
++
+ enum
+ {
+ PROP_0,
+ PROP_LOCATION,
+ PROP_PROTOCOLS,
+ PROP_DEBUG,
+ PROP_RETRY,
+ PROP_TIMEOUT,
++#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
++ PROP_START_POSITION,
++ PROP_RESUME_POSITION,
++#endif
+ PROP_TCP_TIMEOUT,
+ PROP_LATENCY,
+ PROP_DROP_ON_LATENCY,
+ PROP_CONNECTION_SPEED,
+ PROP_NAT_METHOD,
+ PROP_DO_RTCP,
+ PROP_DO_RTSP_KEEP_ALIVE,
+ PROP_PROXY,
+ PROP_PROXY_ID,
+ PROP_PROXY_PW,
+ PROP_RTP_BLOCKSIZE,
+ PROP_USER_ID,
+ PROP_USER_PW,
+ PROP_BUFFER_MODE,
+ PROP_PORT_RANGE,
+ PROP_UDP_BUFFER_SIZE,
+ PROP_SHORT_HEADER,
+ PROP_PROBATION,
+ PROP_UDP_RECONNECT,
+ PROP_MULTICAST_IFACE,
+ PROP_NTP_SYNC,
+ PROP_USE_PIPELINE_CLOCK,
+ PROP_SDES,
+ PROP_TLS_VALIDATION_FLAGS,
+ PROP_TLS_DATABASE,
+ PROP_TLS_INTERACTION,
+ PROP_DO_RETRANSMISSION,
+ PROP_NTP_TIME_SOURCE,
+ PROP_USER_AGENT,
+ PROP_MAX_RTCP_RTP_TIME_DIFF,
+ PROP_RFC7273_SYNC,
+ PROP_MAX_TS_OFFSET_ADJUSTMENT,
+ PROP_MAX_TS_OFFSET,
+ PROP_DEFAULT_VERSION,
+ PROP_BACKCHANNEL,
+ PROP_TEARDOWN_TIMEOUT,
+ PROP_ONVIF_MODE,
+ PROP_ONVIF_RATE_CONTROL,
+ PROP_IS_LIVE,
+ PROP_IGNORE_X_SERVER_REPLY
+ };
+
+ #define GST_TYPE_RTSP_NAT_METHOD (gst_rtsp_nat_method_get_type())
+ static GType
+ gst_rtsp_nat_method_get_type (void)
+ {
+ static GType rtsp_nat_method_type = 0;
+ static const GEnumValue rtsp_nat_method[] = {
+ {GST_RTSP_NAT_NONE, "None", "none"},
+ {GST_RTSP_NAT_DUMMY, "Send Dummy packets", "dummy"},
+ {0, NULL, NULL},
+ };
+
+ if (!rtsp_nat_method_type) {
+ rtsp_nat_method_type =
+ g_enum_register_static ("GstRTSPNatMethod", rtsp_nat_method);
+ }
+ return rtsp_nat_method_type;
+ }
+
+ #define RTSP_SRC_RESPONSE_ERROR(src, response_msg, err_cat, err_code, error_message) \
+ do { \
+ GST_ELEMENT_ERROR_WITH_DETAILS((src), err_cat, err_code, ("%s", error_message), \
+ ("%s (%d)", (response_msg)->type_data.response.reason, (response_msg)->type_data.response.code), \
+ ("rtsp-status-code", G_TYPE_UINT, (response_msg)->type_data.response.code, \
+ "rtsp-status-reason", G_TYPE_STRING, GST_STR_NULL((response_msg)->type_data.response.reason), NULL)); \
+ } while (0)
+
+ typedef struct _ParameterRequest
+ {
+ gint cmd;
+ gchar *content_type;
+ GString *body;
+ GstPromise *promise;
+ } ParameterRequest;
+
+ static void gst_rtspsrc_finalize (GObject * object);
+
+ static void gst_rtspsrc_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec);
+ static void gst_rtspsrc_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec);
+
+ static GstClock *gst_rtspsrc_provide_clock (GstElement * element);
+
+ static void gst_rtspsrc_uri_handler_init (gpointer g_iface,
+ gpointer iface_data);
+
+ static gboolean gst_rtspsrc_set_proxy (GstRTSPSrc * rtsp, const gchar * proxy);
+ static void gst_rtspsrc_set_tcp_timeout (GstRTSPSrc * rtspsrc, guint64 timeout);
+
+ static GstStateChangeReturn gst_rtspsrc_change_state (GstElement * element,
+ GstStateChange transition);
+ static gboolean gst_rtspsrc_send_event (GstElement * element, GstEvent * event);
+ static void gst_rtspsrc_handle_message (GstBin * bin, GstMessage * message);
+
+ static gboolean gst_rtspsrc_setup_auth (GstRTSPSrc * src,
+ GstRTSPMessage * response);
+
+ static gboolean gst_rtspsrc_loop_send_cmd (GstRTSPSrc * src, gint cmd,
+ gint mask);
+ static GstRTSPResult gst_rtspsrc_send_cb (GstRTSPExtension * ext,
+ GstRTSPMessage * request, GstRTSPMessage * response, GstRTSPSrc * src);
+
+ static GstRTSPResult gst_rtspsrc_open (GstRTSPSrc * src, gboolean async);
+ static GstRTSPResult gst_rtspsrc_play (GstRTSPSrc * src, GstSegment * segment,
+ gboolean async, const gchar * seek_style);
+ static GstRTSPResult gst_rtspsrc_pause (GstRTSPSrc * src, gboolean async);
+ static GstRTSPResult gst_rtspsrc_close (GstRTSPSrc * src, gboolean async,
+ gboolean only_close);
+
+ static gboolean gst_rtspsrc_uri_set_uri (GstURIHandler * handler,
+ const gchar * uri, GError ** error);
+ static gchar *gst_rtspsrc_uri_get_uri (GstURIHandler * handler);
+
+ static gboolean gst_rtspsrc_activate_streams (GstRTSPSrc * src);
+ static gboolean gst_rtspsrc_loop (GstRTSPSrc * src);
+ static gboolean gst_rtspsrc_stream_push_event (GstRTSPSrc * src,
+ GstRTSPStream * stream, GstEvent * event);
+ static gboolean gst_rtspsrc_push_event (GstRTSPSrc * src, GstEvent * event);
+ static void gst_rtspsrc_connection_flush (GstRTSPSrc * src, gboolean flush);
+ static GstRTSPResult gst_rtsp_conninfo_close (GstRTSPSrc * src,
+ GstRTSPConnInfo * info, gboolean free);
+ static void
+ gst_rtspsrc_print_rtsp_message (GstRTSPSrc * src, const GstRTSPMessage * msg);
+ static void
+ gst_rtspsrc_print_sdp_message (GstRTSPSrc * src, const GstSDPMessage * msg);
+
+ static GstRTSPResult
+ gst_rtspsrc_get_parameter (GstRTSPSrc * src, ParameterRequest * req);
+
+ static GstRTSPResult
+ gst_rtspsrc_set_parameter (GstRTSPSrc * src, ParameterRequest * req);
+
+ static gboolean get_parameter (GstRTSPSrc * src, const gchar * parameter,
+ const gchar * content_type, GstPromise * promise);
+
+ static gboolean get_parameters (GstRTSPSrc * src, gchar ** parameters,
+ const gchar * content_type, GstPromise * promise);
+
+ static gboolean set_parameter (GstRTSPSrc * src, const gchar * name,
+ const gchar * value, const gchar * content_type, GstPromise * promise);
+
+ static GstFlowReturn gst_rtspsrc_push_backchannel_buffer (GstRTSPSrc * src,
+ guint id, GstSample * sample);
+
+ typedef struct
+ {
+ guint8 pt;
+ GstCaps *caps;
+ } PtMapItem;
+
+ /* commands we send to out loop to notify it of events */
+ #define CMD_OPEN (1 << 0)
+ #define CMD_PLAY (1 << 1)
+ #define CMD_PAUSE (1 << 2)
+ #define CMD_CLOSE (1 << 3)
+ #define CMD_WAIT (1 << 4)
+ #define CMD_RECONNECT (1 << 5)
+ #define CMD_LOOP (1 << 6)
+ #define CMD_GET_PARAMETER (1 << 7)
+ #define CMD_SET_PARAMETER (1 << 8)
+
+ /* mask for all commands */
+ #define CMD_ALL ((CMD_SET_PARAMETER << 1) - 1)
+
+ #define GST_ELEMENT_PROGRESS(el, type, code, text) \
+ G_STMT_START { \
+ gchar *__txt = _gst_element_error_printf text; \
+ gst_element_post_message (GST_ELEMENT_CAST (el), \
+ gst_message_new_progress (GST_OBJECT_CAST (el), \
+ GST_PROGRESS_TYPE_ ##type, code, __txt)); \
+ g_free (__txt); \
+ } G_STMT_END
+
+ static guint gst_rtspsrc_signals[LAST_SIGNAL] = { 0 };
+
+ #define gst_rtspsrc_parent_class parent_class
+ G_DEFINE_TYPE_WITH_CODE (GstRTSPSrc, gst_rtspsrc, GST_TYPE_BIN,
+ G_IMPLEMENT_INTERFACE (GST_TYPE_URI_HANDLER, gst_rtspsrc_uri_handler_init));
+ GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtspsrc, "rtspsrc", GST_RANK_NONE,
+ GST_TYPE_RTSPSRC, rtsp_element_init (plugin));
+
+ #ifndef GST_DISABLE_GST_DEBUG
+ static inline const char *
+ cmd_to_string (guint cmd)
+ {
+ switch (cmd) {
+ case CMD_OPEN:
+ return "OPEN";
+ case CMD_PLAY:
+ return "PLAY";
+ case CMD_PAUSE:
+ return "PAUSE";
+ case CMD_CLOSE:
+ return "CLOSE";
+ case CMD_WAIT:
+ return "WAIT";
+ case CMD_RECONNECT:
+ return "RECONNECT";
+ case CMD_LOOP:
+ return "LOOP";
+ case CMD_GET_PARAMETER:
+ return "GET_PARAMETER";
+ case CMD_SET_PARAMETER:
+ return "SET_PARAMETER";
+ }
+
+ return "unknown";
+ }
+ #endif
+
++#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
++static void
++gst_rtspsrc_post_error_message (GstRTSPSrc * src, GstRTSPSrcError error_id,
++ const gchar * error_string)
++{
++ GstMessage *message;
++ GstStructure *structure;
++ gboolean ret = TRUE;
++
++ GST_ERROR_OBJECT (src, "[%d] %s", error_id, error_string);
++
++ structure = gst_structure_new ("streaming_error",
++ "error_id", G_TYPE_UINT, error_id,
++ "error_string", G_TYPE_STRING, error_string, NULL);
++
++ message =
++ gst_message_new_custom (GST_MESSAGE_ERROR, GST_OBJECT (src), structure);
++
++ ret = gst_element_post_message (GST_ELEMENT (src), message);
++ if (!ret)
++ GST_ERROR_OBJECT (src, "fail to post error message.");
++
++ return;
++}
++#endif
++
+ static gboolean
+ default_select_stream (GstRTSPSrc * src, guint id, GstCaps * caps)
+ {
+ GST_DEBUG_OBJECT (src, "default handler");
+ return TRUE;
+ }
+
+ static gboolean
+ select_stream_accum (GSignalInvocationHint * ihint,
+ GValue * return_accu, const GValue * handler_return, gpointer data)
+ {
+ gboolean myboolean;
+
+ myboolean = g_value_get_boolean (handler_return);
+ GST_DEBUG ("accum %d", myboolean);
+ g_value_set_boolean (return_accu, myboolean);
+
+ /* stop emission if FALSE */
+ return myboolean;
+ }
+
+ static gboolean
+ default_before_send (GstRTSPSrc * src, GstRTSPMessage * msg)
+ {
+ GST_DEBUG_OBJECT (src, "default handler");
+ return TRUE;
+ }
+
+ static gboolean
+ before_send_accum (GSignalInvocationHint * ihint,
+ GValue * return_accu, const GValue * handler_return, gpointer data)
+ {
+ gboolean myboolean;
+
+ myboolean = g_value_get_boolean (handler_return);
+ g_value_set_boolean (return_accu, myboolean);
+
+ /* prevent send if FALSE */
+ return myboolean;
+ }
+
+ static void
+ gst_rtspsrc_class_init (GstRTSPSrcClass * klass)
+ {
+ GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
+ GstBinClass *gstbin_class;
+
+ gobject_class = (GObjectClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
+ gstbin_class = (GstBinClass *) klass;
+
+ GST_DEBUG_CATEGORY_INIT (rtspsrc_debug, "rtspsrc", 0, "RTSP src");
+
+ gobject_class->set_property = gst_rtspsrc_set_property;
+ gobject_class->get_property = gst_rtspsrc_get_property;
+
+ gobject_class->finalize = gst_rtspsrc_finalize;
+
+ g_object_class_install_property (gobject_class, PROP_LOCATION,
+ g_param_spec_string ("location", "RTSP Location",
+ "Location of the RTSP url to read",
+ DEFAULT_LOCATION, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_PROTOCOLS,
+ g_param_spec_flags ("protocols", "Protocols",
+ "Allowed lower transport protocols", GST_TYPE_RTSP_LOWER_TRANS,
+ DEFAULT_PROTOCOLS, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_DEBUG,
+ g_param_spec_boolean ("debug", "Debug",
+ "Dump request and response messages to stdout"
+ "(DEPRECATED: Printed all RTSP message to gstreamer log as 'log' level)",
+ DEFAULT_DEBUG,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | G_PARAM_DEPRECATED));
+
+ g_object_class_install_property (gobject_class, PROP_RETRY,
+ g_param_spec_uint ("retry", "Retry",
+ "Max number of retries when allocating RTP ports.",
+ 0, G_MAXUINT16, DEFAULT_RETRY,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_TIMEOUT,
+ g_param_spec_uint64 ("timeout", "Timeout",
+ "Retry TCP transport after UDP timeout microseconds (0 = disabled)",
+ 0, G_MAXUINT64, DEFAULT_TIMEOUT,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
-
++#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
++ g_object_class_install_property (gobject_class, PROP_START_POSITION,
++ g_param_spec_uint64 ("pending-start-position", "set start position",
++ "Set start position before PLAYING request.",
++ 0, G_MAXUINT64, DEFAULT_START_POSITION,
++ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
++ g_object_class_install_property (gobject_class, PROP_RESUME_POSITION,
++ g_param_spec_uint64 ("resume-position", "set resume position",
++ "Set resume position before PLAYING request after pause.",
++ 0, G_MAXUINT64, DEFAULT_START_POSITION,
++ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
++#endif
+ g_object_class_install_property (gobject_class, PROP_TCP_TIMEOUT,
+ g_param_spec_uint64 ("tcp-timeout", "TCP Timeout",
+ "Fail after timeout microseconds on TCP connections (0 = disabled)",
+ 0, G_MAXUINT64, DEFAULT_TCP_TIMEOUT,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_LATENCY,
+ g_param_spec_uint ("latency", "Buffer latency in ms",
+ "Amount of ms to buffer", 0, G_MAXUINT, DEFAULT_LATENCY_MS,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_DROP_ON_LATENCY,
+ g_param_spec_boolean ("drop-on-latency",
+ "Drop buffers when maximum latency is reached",
+ "Tells the jitterbuffer to never exceed the given latency in size",
+ DEFAULT_DROP_ON_LATENCY, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_CONNECTION_SPEED,
+ g_param_spec_uint64 ("connection-speed", "Connection Speed",
+ "Network connection speed in kbps (0 = unknown)",
+ 0, G_MAXUINT64 / 1000, DEFAULT_CONNECTION_SPEED,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_NAT_METHOD,
+ g_param_spec_enum ("nat-method", "NAT Method",
+ "Method to use for traversing firewalls and NAT",
+ GST_TYPE_RTSP_NAT_METHOD, DEFAULT_NAT_METHOD,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ /**
+ * GstRTSPSrc:do-rtcp:
+ *
+ * Enable RTCP support. Some old server don't like RTCP and then this property
+ * needs to be set to FALSE.
+ */
+ g_object_class_install_property (gobject_class, PROP_DO_RTCP,
+ g_param_spec_boolean ("do-rtcp", "Do RTCP",
+ "Send RTCP packets, disable for old incompatible server.",
+ DEFAULT_DO_RTCP, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ /**
+ * GstRTSPSrc:do-rtsp-keep-alive:
+ *
+ * Enable RTSP keep alive support. Some old server don't like RTSP
+ * keep alive and then this property needs to be set to FALSE.
+ */
+ g_object_class_install_property (gobject_class, PROP_DO_RTSP_KEEP_ALIVE,
+ g_param_spec_boolean ("do-rtsp-keep-alive", "Do RTSP Keep Alive",
+ "Send RTSP keep alive packets, disable for old incompatible server.",
+ DEFAULT_DO_RTSP_KEEP_ALIVE,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ /**
+ * GstRTSPSrc:proxy:
+ *
+ * Set the proxy parameters. This has to be a string of the format
+ * [http://][user:passwd@]host[:port].
+ */
+ g_object_class_install_property (gobject_class, PROP_PROXY,
+ g_param_spec_string ("proxy", "Proxy",
+ "Proxy settings for HTTP tunneling. Format: [http://][user:passwd@]host[:port]",
+ DEFAULT_PROXY, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ /**
+ * GstRTSPSrc:proxy-id:
+ *
+ * Sets the proxy URI user id for authentication. If the URI set via the
+ * "proxy" property contains a user-id already, that will take precedence.
+ *
+ * Since: 1.2
+ */
+ g_object_class_install_property (gobject_class, PROP_PROXY_ID,
+ g_param_spec_string ("proxy-id", "proxy-id",
+ "HTTP proxy URI user id for authentication", "",
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ /**
+ * GstRTSPSrc:proxy-pw:
+ *
+ * Sets the proxy URI password for authentication. If the URI set via the
+ * "proxy" property contains a password already, that will take precedence.
+ *
+ * Since: 1.2
+ */
+ g_object_class_install_property (gobject_class, PROP_PROXY_PW,
+ g_param_spec_string ("proxy-pw", "proxy-pw",
+ "HTTP proxy URI user password for authentication", "",
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ /**
+ * GstRTSPSrc:rtp-blocksize:
+ *
+ * RTP package size to suggest to server.
+ */
+ g_object_class_install_property (gobject_class, PROP_RTP_BLOCKSIZE,
+ g_param_spec_uint ("rtp-blocksize", "RTP Blocksize",
+ "RTP package size to suggest to server (0 = disabled)",
+ 0, 65536, DEFAULT_RTP_BLOCKSIZE,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class,
+ PROP_USER_ID,
+ g_param_spec_string ("user-id", "user-id",
+ "RTSP location URI user id for authentication", DEFAULT_USER_ID,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_USER_PW,
+ g_param_spec_string ("user-pw", "user-pw",
+ "RTSP location URI user password for authentication", DEFAULT_USER_PW,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ /**
+ * GstRTSPSrc:buffer-mode:
+ *
+ * Control the buffering and timestamping mode used by the jitterbuffer.
+ */
+ g_object_class_install_property (gobject_class, PROP_BUFFER_MODE,
+ g_param_spec_enum ("buffer-mode", "Buffer Mode",
+ "Control the buffering algorithm in use",
+ GST_TYPE_RTSP_SRC_BUFFER_MODE, DEFAULT_BUFFER_MODE,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ /**
+ * GstRTSPSrc:port-range:
+ *
+ * Configure the client port numbers that can be used to receive RTP and
+ * RTCP.
+ */
+ g_object_class_install_property (gobject_class, PROP_PORT_RANGE,
+ g_param_spec_string ("port-range", "Port range",
+ "Client port range that can be used to receive RTP and RTCP data, "
+ "eg. 3000-3005 (NULL = no restrictions)", DEFAULT_PORT_RANGE,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ /**
+ * GstRTSPSrc:udp-buffer-size:
+ *
+ * Size of the kernel UDP receive buffer in bytes.
+ */
+ g_object_class_install_property (gobject_class, PROP_UDP_BUFFER_SIZE,
+ g_param_spec_int ("udp-buffer-size", "UDP Buffer Size",
+ "Size of the kernel UDP receive buffer in bytes, 0=default",
+ 0, G_MAXINT, DEFAULT_UDP_BUFFER_SIZE,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ /**
+ * GstRTSPSrc:short-header:
+ *
+ * Only send the basic RTSP headers for broken encoders.
+ */
+ g_object_class_install_property (gobject_class, PROP_SHORT_HEADER,
+ g_param_spec_boolean ("short-header", "Short Header",
+ "Only send the basic RTSP headers for broken encoders",
+ DEFAULT_SHORT_HEADER, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_PROBATION,
+ g_param_spec_uint ("probation", "Number of probations",
+ "Consecutive packet sequence numbers to accept the source",
+ 0, G_MAXUINT, DEFAULT_PROBATION,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_UDP_RECONNECT,
+ g_param_spec_boolean ("udp-reconnect", "Reconnect to the server",
+ "Reconnect to the server if RTSP connection is closed when doing UDP",
+ DEFAULT_UDP_RECONNECT, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_MULTICAST_IFACE,
+ g_param_spec_string ("multicast-iface", "Multicast Interface",
+ "The network interface on which to join the multicast group",
+ DEFAULT_MULTICAST_IFACE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_NTP_SYNC,
+ g_param_spec_boolean ("ntp-sync", "Sync on NTP clock",
+ "Synchronize received streams to the NTP clock", DEFAULT_NTP_SYNC,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_USE_PIPELINE_CLOCK,
+ g_param_spec_boolean ("use-pipeline-clock", "Use pipeline clock",
+ "Use the pipeline running-time to set the NTP time in the RTCP SR messages"
+ "(DEPRECATED: Use ntp-time-source property)",
+ DEFAULT_USE_PIPELINE_CLOCK,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | G_PARAM_DEPRECATED));
+
+ g_object_class_install_property (gobject_class, PROP_SDES,
+ g_param_spec_boxed ("sdes", "SDES",
+ "The SDES items of this session",
+ GST_TYPE_STRUCTURE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ /**
+ * GstRTSPSrc::tls-validation-flags:
+ *
+ * TLS certificate validation flags used to validate server
+ * certificate.
+ *
+ * Since: 1.2.1
+ */
+ g_object_class_install_property (gobject_class, PROP_TLS_VALIDATION_FLAGS,
+ g_param_spec_flags ("tls-validation-flags", "TLS validation flags",
+ "TLS certificate validation flags used to validate the server certificate",
+ G_TYPE_TLS_CERTIFICATE_FLAGS, DEFAULT_TLS_VALIDATION_FLAGS,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ /**
+ * GstRTSPSrc::tls-database:
+ *
+ * TLS database with anchor certificate authorities used to validate
+ * the server certificate.
+ *
+ * Since: 1.4
+ */
+ g_object_class_install_property (gobject_class, PROP_TLS_DATABASE,
+ g_param_spec_object ("tls-database", "TLS database",
+ "TLS database with anchor certificate authorities used to validate the server certificate",
+ G_TYPE_TLS_DATABASE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ /**
+ * GstRTSPSrc::tls-interaction:
+ *
+ * A #GTlsInteraction object to be used when the connection or certificate
+ * database need to interact with the user. This will be used to prompt the
+ * user for passwords where necessary.
+ *
+ * Since: 1.6
+ */
+ g_object_class_install_property (gobject_class, PROP_TLS_INTERACTION,
+ g_param_spec_object ("tls-interaction", "TLS interaction",
+ "A GTlsInteraction object to prompt the user for password or certificate",
+ G_TYPE_TLS_INTERACTION, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ /**
+ * GstRTSPSrc::do-retransmission:
+ *
+ * Attempt to ask the server to retransmit lost packets according to RFC4588.
+ *
+ * Note: currently only works with SSRC-multiplexed retransmission streams
+ *
+ * Since: 1.6
+ */
+ g_object_class_install_property (gobject_class, PROP_DO_RETRANSMISSION,
+ g_param_spec_boolean ("do-retransmission", "Retransmission",
+ "Ask the server to retransmit lost packets",
+ DEFAULT_DO_RETRANSMISSION,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ /**
+ * GstRTSPSrc::ntp-time-source:
+ *
+ * allows to select the time source that should be used
+ * for the NTP time in RTCP packets
+ *
+ * Since: 1.6
+ */
+ g_object_class_install_property (gobject_class, PROP_NTP_TIME_SOURCE,
+ g_param_spec_enum ("ntp-time-source", "NTP Time Source",
+ "NTP time source for RTCP packets",
+ GST_TYPE_RTSP_SRC_NTP_TIME_SOURCE, DEFAULT_NTP_TIME_SOURCE,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ /**
+ * GstRTSPSrc::user-agent:
+ *
+ * The string to set in the User-Agent header.
+ *
+ * Since: 1.6
+ */
+ g_object_class_install_property (gobject_class, PROP_USER_AGENT,
+ g_param_spec_string ("user-agent", "User Agent",
+ "The User-Agent string to send to the server",
+ DEFAULT_USER_AGENT, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_MAX_RTCP_RTP_TIME_DIFF,
+ g_param_spec_int ("max-rtcp-rtp-time-diff", "Max RTCP RTP Time Diff",
+ "Maximum amount of time in ms that the RTP time in RTCP SRs "
+ "is allowed to be ahead (-1 disabled)", -1, G_MAXINT,
+ DEFAULT_MAX_RTCP_RTP_TIME_DIFF,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_RFC7273_SYNC,
+ g_param_spec_boolean ("rfc7273-sync", "Sync on RFC7273 clock",
+ "Synchronize received streams to the RFC7273 clock "
+ "(requires clock and offset to be provided)", DEFAULT_RFC7273_SYNC,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ /**
+ * GstRTSPSrc:default-rtsp-version:
+ *
+ * The preferred RTSP version to use while negotiating the version with the server.
+ *
+ * Since: 1.14
+ */
+ g_object_class_install_property (gobject_class, PROP_DEFAULT_VERSION,
+ g_param_spec_enum ("default-rtsp-version",
+ "The RTSP version to try first",
+ "The RTSP version that should be tried first when negotiating version.",
+ GST_TYPE_RTSP_VERSION, DEFAULT_VERSION,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ /**
+ * GstRTSPSrc:max-ts-offset-adjustment:
+ *
+ * Syncing time stamps to NTP time adds a time offset. This parameter
+ * specifies the maximum number of nanoseconds per frame that this time offset
+ * may be adjusted with. This is used to avoid sudden large changes to time
+ * stamps.
+ */
+ g_object_class_install_property (gobject_class, PROP_MAX_TS_OFFSET_ADJUSTMENT,
+ g_param_spec_uint64 ("max-ts-offset-adjustment",
+ "Max Timestamp Offset Adjustment",
+ "The maximum number of nanoseconds per frame that time stamp offsets "
+ "may be adjusted (0 = no limit).", 0, G_MAXUINT64,
+ DEFAULT_MAX_TS_OFFSET_ADJUSTMENT, G_PARAM_READWRITE |
+ G_PARAM_STATIC_STRINGS));
+
+ /**
+ * GstRTSPSrc:max-ts-offset:
+ *
+ * Used to set an upper limit of how large a time offset may be. This
+ * is used to protect against unrealistic values as a result of either
+ * client,server or clock issues.
+ */
+ g_object_class_install_property (gobject_class, PROP_MAX_TS_OFFSET,
+ g_param_spec_int64 ("max-ts-offset", "Max TS Offset",
+ "The maximum absolute value of the time offset in (nanoseconds). "
+ "Note, if the ntp-sync parameter is set the default value is "
+ "changed to 0 (no limit)", 0, G_MAXINT64, DEFAULT_MAX_TS_OFFSET,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ /**
+ * GstRTSPSrc:backchannel
+ *
+ * Select a type of backchannel to setup with the RTSP server.
+ * Default value is "none". Allowed values are "none" and "onvif".
+ *
+ * Since: 1.14
+ */
+ g_object_class_install_property (gobject_class, PROP_BACKCHANNEL,
+ g_param_spec_enum ("backchannel", "Backchannel type",
+ "The type of backchannel to setup. Default is 'none'.",
+ GST_TYPE_RTSP_BACKCHANNEL, BACKCHANNEL_NONE,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ /**
+ * GstRTSPSrc:teardown-timeout
+ *
+ * When transitioning PAUSED-READY, allow up to timeout (in nanoseconds)
+ * delay in order to send teardown (0 = disabled)
+ *
+ * Since: 1.14
+ */
+ g_object_class_install_property (gobject_class, PROP_TEARDOWN_TIMEOUT,
+ g_param_spec_uint64 ("teardown-timeout", "Teardown Timeout",
+ "When transitioning PAUSED-READY, allow up to timeout (in nanoseconds) "
+ "delay in order to send teardown (0 = disabled)",
+ 0, G_MAXUINT64, DEFAULT_TEARDOWN_TIMEOUT,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ /**
+ * GstRTSPSrc:onvif-mode
+ *
+ * Act as an ONVIF client. When set to %TRUE:
+ *
+ * - seeks will be interpreted as nanoseconds since prime epoch (1900-01-01)
+ *
+ * - #GstRTSPSrc:onvif-rate-control can be used to request that the server sends
+ * data as fast as it can
+ *
+ * - TCP is picked as the transport protocol
+ *
+ * - Trickmode flags in seek events are transformed into the appropriate ONVIF
+ * request headers
+ *
+ * Since: 1.18
+ */
+ g_object_class_install_property (gobject_class, PROP_ONVIF_MODE,
+ g_param_spec_boolean ("onvif-mode", "Onvif Mode",
+ "Act as an ONVIF client",
+ DEFAULT_ONVIF_MODE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ /**
+ * GstRTSPSrc:onvif-rate-control
+ *
+ * When in onvif-mode, whether to set Rate-Control to yes or no. When set
+ * to %FALSE, the server will deliver data as fast as the client can consume
+ * it.
+ *
+ * Since: 1.18
+ */
+ g_object_class_install_property (gobject_class, PROP_ONVIF_RATE_CONTROL,
+ g_param_spec_boolean ("onvif-rate-control", "Onvif Rate Control",
+ "When in onvif-mode, whether to set Rate-Control to yes or no",
+ DEFAULT_ONVIF_RATE_CONTROL,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ /**
+ * GstRTSPSrc:is-live
+ *
+ * Whether to act as a live source. This is useful in combination with
+ * #GstRTSPSrc:onvif-rate-control set to %FALSE and usage of the TCP
+ * protocol. In that situation, data delivery rate can be entirely
+ * controlled from the client side, enabling features such as frame
+ * stepping and instantaneous rate changes.
+ *
+ * Since: 1.18
+ */
+ g_object_class_install_property (gobject_class, PROP_IS_LIVE,
+ g_param_spec_boolean ("is-live", "Is live",
+ "Whether to act as a live source",
+ DEFAULT_IS_LIVE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ /**
+ * GstRTSPSrc:ignore-x-server-reply
+ *
+ * When connecting to an RTSP server in tunneled mode (HTTP) the server
+ * usually replies with an x-server-ip-address header. This contains the
+ * address of the intended streaming server. However some servers return an
+ * "invalid" address. Here follows two examples when it might happen.
+ *
+ * 1. A server uses Apache combined with a separate RTSP process to handle
+ * HTTPS requests on port 443. In this case Apache handles TLS and
+ * connects to the local RTSP server, which results in a local
+ * address 127.0.0.1 or ::1 in the header reply. This address is
+ * returned to the actual RTSP client in the header. The client will
+ * receive this address and try to connect to it and fail.
+ *
+ * 2. The client uses an IPv6 link local address with a specified scope id
+ * fe80::aaaa:bbbb:cccc:dddd%eth0 and connects via HTTP on port 80.
+ * The RTSP server receives the connection and returns the address
+ * in the x-server-ip-address header. The client will receive this
+ * address and try to connect to it "as is" without the scope id and
+ * fail.
+ *
+ * In the case of streaming data from RTSP servers like 1 and 2, it's
+ * useful to have the option to simply ignore the x-server-ip-address
+ * header reply and continue using the original address.
+ *
+ * Since: 1.20
+ */
+ g_object_class_install_property (gobject_class, PROP_IGNORE_X_SERVER_REPLY,
+ g_param_spec_boolean ("ignore-x-server-reply",
+ "Ignore x-server-ip-address",
+ "Whether to ignore the x-server-ip-address server header reply",
+ DEFAULT_IGNORE_X_SERVER_REPLY,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ /**
+ * GstRTSPSrc::handle-request:
+ * @rtspsrc: a #GstRTSPSrc
+ * @request: a #GstRTSPMessage
+ * @response: a #GstRTSPMessage
+ *
+ * Handle a server request in @request and prepare @response.
+ *
+ * This signal is called from the streaming thread, you should therefore not
+ * do any state changes on @rtspsrc because this might deadlock. If you want
+ * to modify the state as a result of this signal, post a
+ * #GST_MESSAGE_REQUEST_STATE message on the bus or signal the main thread
+ * in some other way.
+ *
+ * Since: 1.2
+ */
+ gst_rtspsrc_signals[SIGNAL_HANDLE_REQUEST] =
+ g_signal_new ("handle-request", G_TYPE_FROM_CLASS (klass), 0,
+ 0, NULL, NULL, NULL, G_TYPE_NONE, 2,
+ GST_TYPE_RTSP_MESSAGE | G_SIGNAL_TYPE_STATIC_SCOPE,
+ GST_TYPE_RTSP_MESSAGE | G_SIGNAL_TYPE_STATIC_SCOPE);
+
+ /**
+ * GstRTSPSrc::on-sdp:
+ * @rtspsrc: a #GstRTSPSrc
+ * @sdp: a #GstSDPMessage
+ *
+ * Emitted when the client has retrieved the SDP and before it configures the
+ * streams in the SDP. @sdp can be inspected and modified.
+ *
+ * This signal is called from the streaming thread, you should therefore not
+ * do any state changes on @rtspsrc because this might deadlock. If you want
+ * to modify the state as a result of this signal, post a
+ * #GST_MESSAGE_REQUEST_STATE message on the bus or signal the main thread
+ * in some other way.
+ *
+ * Since: 1.2
+ */
+ gst_rtspsrc_signals[SIGNAL_ON_SDP] =
+ g_signal_new ("on-sdp", G_TYPE_FROM_CLASS (klass), 0,
+ 0, NULL, NULL, NULL, G_TYPE_NONE, 1,
+ GST_TYPE_SDP_MESSAGE | G_SIGNAL_TYPE_STATIC_SCOPE);
+
+ /**
+ * GstRTSPSrc::select-stream:
+ * @rtspsrc: a #GstRTSPSrc
+ * @num: the stream number
+ * @caps: the stream caps
+ *
+ * Emitted before the client decides to configure the stream @num with
+ * @caps.
+ *
+ * Returns: %TRUE when the stream should be selected, %FALSE when the stream
+ * is to be ignored.
+ *
+ * Since: 1.2
+ */
+ gst_rtspsrc_signals[SIGNAL_SELECT_STREAM] =
+ g_signal_new_class_handler ("select-stream", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST,
+ (GCallback) default_select_stream, select_stream_accum, NULL, NULL,
+ G_TYPE_BOOLEAN, 2, G_TYPE_UINT, GST_TYPE_CAPS);
+ /**
+ * GstRTSPSrc::new-manager:
+ * @rtspsrc: a #GstRTSPSrc
+ * @manager: a #GstElement
+ *
+ * Emitted after a new manager (like rtpbin) was created and the default
+ * properties were configured.
+ *
+ * Since: 1.4
+ */
+ gst_rtspsrc_signals[SIGNAL_NEW_MANAGER] =
+ g_signal_new_class_handler ("new-manager", G_TYPE_FROM_CLASS (klass),
+ 0, 0, NULL, NULL, NULL, G_TYPE_NONE, 1, GST_TYPE_ELEMENT);
+
+ /**
+ * GstRTSPSrc::request-rtcp-key:
+ * @rtspsrc: a #GstRTSPSrc
+ * @num: the stream number
+ *
+ * Signal emitted to get the crypto parameters relevant to the RTCP
+ * stream. User should provide the key and the RTCP encryption ciphers
+ * and authentication, and return them wrapped in a GstCaps.
+ *
+ * Since: 1.4
+ */
+ gst_rtspsrc_signals[SIGNAL_REQUEST_RTCP_KEY] =
+ g_signal_new ("request-rtcp-key", G_TYPE_FROM_CLASS (klass),
+ 0, 0, NULL, NULL, NULL, GST_TYPE_CAPS, 1, G_TYPE_UINT);
+
+ /**
+ * GstRTSPSrc::accept-certificate:
+ * @rtspsrc: a #GstRTSPSrc
+ * @peer_cert: the peer's #GTlsCertificate
+ * @errors: the problems with @peer_cert
+ * @user_data: user data set when the signal handler was connected.
+ *
+ * This will directly map to #GTlsConnection 's "accept-certificate"
+ * signal and be performed after the default checks of #GstRTSPConnection
+ * (checking against the #GTlsDatabase with the given #GTlsCertificateFlags)
+ * have failed. If no #GTlsDatabase is set on this connection, only this
+ * signal will be emitted.
+ *
+ * Since: 1.14
+ */
+ gst_rtspsrc_signals[SIGNAL_ACCEPT_CERTIFICATE] =
+ g_signal_new ("accept-certificate", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST, 0, g_signal_accumulator_true_handled, NULL, NULL,
+ G_TYPE_BOOLEAN, 3, G_TYPE_TLS_CONNECTION, G_TYPE_TLS_CERTIFICATE,
+ G_TYPE_TLS_CERTIFICATE_FLAGS);
+
+ /**
+ * GstRTSPSrc::before-send:
+ * @rtspsrc: a #GstRTSPSrc
+ * @num: the stream number
+ *
+ * Emitted before each RTSP request is sent, in order to allow
+ * the application to modify send parameters or to skip the message entirely.
+ * This can be used, for example, to work with ONVIF Profile G servers,
+ * which need a different/additional range, rate-control, and intra/x
+ * parameters.
+ *
+ * Returns: %TRUE when the command should be sent, %FALSE when the
+ * command should be dropped.
+ *
+ * Since: 1.14
+ */
+ gst_rtspsrc_signals[SIGNAL_BEFORE_SEND] =
+ g_signal_new_class_handler ("before-send", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST,
+ (GCallback) default_before_send, before_send_accum, NULL, NULL,
+ G_TYPE_BOOLEAN, 1, GST_TYPE_RTSP_MESSAGE | G_SIGNAL_TYPE_STATIC_SCOPE);
+
+ /**
+ * GstRTSPSrc::push-backchannel-buffer:
+ * @rtspsrc: a #GstRTSPSrc
+ * @sample: RTP sample to send back
+ *
+ *
+ */
+ gst_rtspsrc_signals[SIGNAL_PUSH_BACKCHANNEL_BUFFER] =
+ g_signal_new ("push-backchannel-buffer", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST | G_SIGNAL_ACTION, G_STRUCT_OFFSET (GstRTSPSrcClass,
+ push_backchannel_buffer), NULL, NULL, NULL,
+ GST_TYPE_FLOW_RETURN, 2, G_TYPE_UINT, GST_TYPE_SAMPLE);
+
+ /**
+ * GstRTSPSrc::get-parameter:
+ * @rtspsrc: a #GstRTSPSrc
+ * @parameter: the parameter name
+ * @parameter: the content type
+ * @parameter: a pointer to #GstPromise
+ *
+ * Handle the GET_PARAMETER signal.
+ *
+ * Returns: %TRUE when the command could be issued, %FALSE otherwise
+ *
+ */
+ gst_rtspsrc_signals[SIGNAL_GET_PARAMETER] =
+ g_signal_new ("get-parameter", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST | G_SIGNAL_ACTION, G_STRUCT_OFFSET (GstRTSPSrcClass,
+ get_parameter), NULL, NULL, NULL,
+ G_TYPE_BOOLEAN, 3, G_TYPE_STRING, G_TYPE_STRING, GST_TYPE_PROMISE);
+
+ /**
+ * GstRTSPSrc::get-parameters:
+ * @rtspsrc: a #GstRTSPSrc
+ * @parameter: a NULL-terminated array of parameters
+ * @parameter: the content type
+ * @parameter: a pointer to #GstPromise
+ *
+ * Handle the GET_PARAMETERS signal.
+ *
+ * Returns: %TRUE when the command could be issued, %FALSE otherwise
+ *
+ */
+ gst_rtspsrc_signals[SIGNAL_GET_PARAMETERS] =
+ g_signal_new ("get-parameters", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST | G_SIGNAL_ACTION, G_STRUCT_OFFSET (GstRTSPSrcClass,
+ get_parameters), NULL, NULL, NULL,
+ G_TYPE_BOOLEAN, 3, G_TYPE_STRV, G_TYPE_STRING, GST_TYPE_PROMISE);
+
+ /**
+ * GstRTSPSrc::set-parameter:
+ * @rtspsrc: a #GstRTSPSrc
+ * @parameter: the parameter name
+ * @parameter: the parameter value
+ * @parameter: the content type
+ * @parameter: a pointer to #GstPromise
+ *
+ * Handle the SET_PARAMETER signal.
+ *
+ * Returns: %TRUE when the command could be issued, %FALSE otherwise
+ *
+ */
+ gst_rtspsrc_signals[SIGNAL_SET_PARAMETER] =
+ g_signal_new ("set-parameter", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST | G_SIGNAL_ACTION, G_STRUCT_OFFSET (GstRTSPSrcClass,
+ set_parameter), NULL, NULL, NULL, G_TYPE_BOOLEAN, 4, G_TYPE_STRING,
+ G_TYPE_STRING, G_TYPE_STRING, GST_TYPE_PROMISE);
+
+ gstelement_class->send_event = gst_rtspsrc_send_event;
+ gstelement_class->provide_clock = gst_rtspsrc_provide_clock;
+ gstelement_class->change_state = gst_rtspsrc_change_state;
+
+ gst_element_class_add_static_pad_template (gstelement_class, &rtptemplate);
+
+ gst_element_class_set_static_metadata (gstelement_class,
+ "RTSP packet receiver", "Source/Network",
+ "Receive data over the network via RTSP (RFC 2326)",
+ "Wim Taymans <wim@fluendo.com>, "
+ "Thijs Vermeir <thijs.vermeir@barco.com>, "
+ "Lutz Mueller <lutz@topfrose.de>");
+
+ gstbin_class->handle_message = gst_rtspsrc_handle_message;
+
+ klass->push_backchannel_buffer = gst_rtspsrc_push_backchannel_buffer;
+ klass->get_parameter = GST_DEBUG_FUNCPTR (get_parameter);
+ klass->get_parameters = GST_DEBUG_FUNCPTR (get_parameters);
+ klass->set_parameter = GST_DEBUG_FUNCPTR (set_parameter);
+
+ gst_rtsp_ext_list_init ();
+
+ gst_type_mark_as_plugin_api (GST_TYPE_RTSP_SRC_BUFFER_MODE, 0);
+ gst_type_mark_as_plugin_api (GST_TYPE_RTSP_SRC_NTP_TIME_SOURCE, 0);
+ gst_type_mark_as_plugin_api (GST_TYPE_RTSP_BACKCHANNEL, 0);
+ gst_type_mark_as_plugin_api (GST_TYPE_RTSP_NAT_METHOD, 0);
+ }
+
+ static gboolean
+ validate_set_get_parameter_name (const gchar * parameter_name)
+ {
+ gchar *ptr = (gchar *) parameter_name;
+
+ while (*ptr) {
+ /* Don't allow '\r', '\n', \'t', ' ' etc in the parameter name */
+ if (g_ascii_isspace (*ptr) || g_ascii_iscntrl (*ptr)) {
+ GST_DEBUG ("invalid parameter name '%s'", parameter_name);
+ return FALSE;
+ }
+ ptr++;
+ }
+ return TRUE;
+ }
+
+ static gboolean
+ validate_set_get_parameters (gchar ** parameter_names)
+ {
+ while (*parameter_names) {
+ if (!validate_set_get_parameter_name (*parameter_names)) {
+ return FALSE;
+ }
+ parameter_names++;
+ }
+ return TRUE;
+ }
+
+ static gboolean
+ get_parameter (GstRTSPSrc * src, const gchar * parameter,
+ const gchar * content_type, GstPromise * promise)
+ {
+ gchar *parameters[] = { (gchar *) parameter, NULL };
+
+ GST_LOG_OBJECT (src, "get_parameter: %s", GST_STR_NULL (parameter));
+
+ if (parameter == NULL || parameter[0] == '\0' || promise == NULL) {
+ GST_DEBUG ("invalid input");
+ return FALSE;
+ }
+
+ return get_parameters (src, parameters, content_type, promise);
+ }
+
+ static gboolean
+ get_parameters (GstRTSPSrc * src, gchar ** parameters,
+ const gchar * content_type, GstPromise * promise)
+ {
+ ParameterRequest *req;
+
+ GST_LOG_OBJECT (src, "get_parameters: %d", g_strv_length (parameters));
+
+ if (parameters == NULL || promise == NULL) {
+ GST_DEBUG ("invalid input");
+ return FALSE;
+ }
+
+ if (src->state == GST_RTSP_STATE_INVALID) {
+ GST_DEBUG ("invalid state");
+ return FALSE;
+ }
+
+ if (!validate_set_get_parameters (parameters)) {
+ return FALSE;
+ }
+
+ req = g_new0 (ParameterRequest, 1);
+ req->promise = gst_promise_ref (promise);
+ req->cmd = CMD_GET_PARAMETER;
+ /* Set the request body according to RFC 2326 or RFC 7826 */
+ req->body = g_string_new (NULL);
+ while (*parameters) {
+ g_string_append_printf (req->body, "%s:\r\n", *parameters);
+ parameters++;
+ }
+ if (content_type)
+ req->content_type = g_strdup (content_type);
+
+ GST_OBJECT_LOCK (src);
+ g_queue_push_tail (&src->set_get_param_q, req);
+ GST_OBJECT_UNLOCK (src);
+
+ gst_rtspsrc_loop_send_cmd (src, CMD_GET_PARAMETER, CMD_LOOP);
+
+ return TRUE;
+ }
+
+ static gboolean
+ set_parameter (GstRTSPSrc * src, const gchar * name, const gchar * value,
+ const gchar * content_type, GstPromise * promise)
+ {
+ ParameterRequest *req;
+
+ GST_LOG_OBJECT (src, "set_parameter: %s: %s", GST_STR_NULL (name),
+ GST_STR_NULL (value));
+
+ if (name == NULL || name[0] == '\0' || value == NULL || promise == NULL) {
+ GST_DEBUG ("invalid input");
+ return FALSE;
+ }
+
+ if (src->state == GST_RTSP_STATE_INVALID) {
+ GST_DEBUG ("invalid state");
+ return FALSE;
+ }
+
+ if (!validate_set_get_parameter_name (name)) {
+ return FALSE;
+ }
+
+ req = g_new0 (ParameterRequest, 1);
+ req->cmd = CMD_SET_PARAMETER;
+ req->promise = gst_promise_ref (promise);
+ req->body = g_string_new (NULL);
+ /* Set the request body according to RFC 2326 or RFC 7826 */
+ g_string_append_printf (req->body, "%s: %s\r\n", name, value);
+ if (content_type)
+ req->content_type = g_strdup (content_type);
+
+ GST_OBJECT_LOCK (src);
+ g_queue_push_tail (&src->set_get_param_q, req);
+ GST_OBJECT_UNLOCK (src);
+
+ gst_rtspsrc_loop_send_cmd (src, CMD_SET_PARAMETER, CMD_LOOP);
+
+ return TRUE;
+ }
+
+ static void
+ gst_rtspsrc_init (GstRTSPSrc * src)
+ {
+ src->conninfo.location = g_strdup (DEFAULT_LOCATION);
+ src->protocols = DEFAULT_PROTOCOLS;
+ src->debug = DEFAULT_DEBUG;
+ src->retry = DEFAULT_RETRY;
+ src->udp_timeout = DEFAULT_TIMEOUT;
++#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
++ src->start_position = DEFAULT_START_POSITION;
++ src->is_audio_codec_supported = FALSE;
++ src->is_video_codec_supported = FALSE;
++ src->audio_codec = NULL;
++ src->video_codec = NULL;
++ src->video_frame_size = NULL;
++#endif
+ gst_rtspsrc_set_tcp_timeout (src, DEFAULT_TCP_TIMEOUT);
+ src->latency = DEFAULT_LATENCY_MS;
+ src->drop_on_latency = DEFAULT_DROP_ON_LATENCY;
+ src->connection_speed = DEFAULT_CONNECTION_SPEED;
+ src->nat_method = DEFAULT_NAT_METHOD;
+ src->do_rtcp = DEFAULT_DO_RTCP;
+ src->do_rtsp_keep_alive = DEFAULT_DO_RTSP_KEEP_ALIVE;
+ gst_rtspsrc_set_proxy (src, DEFAULT_PROXY);
+ src->rtp_blocksize = DEFAULT_RTP_BLOCKSIZE;
+ src->user_id = g_strdup (DEFAULT_USER_ID);
+ src->user_pw = g_strdup (DEFAULT_USER_PW);
+ src->buffer_mode = DEFAULT_BUFFER_MODE;
+ src->client_port_range.min = 0;
+ src->client_port_range.max = 0;
+ src->udp_buffer_size = DEFAULT_UDP_BUFFER_SIZE;
+ src->short_header = DEFAULT_SHORT_HEADER;
+ src->probation = DEFAULT_PROBATION;
+ src->udp_reconnect = DEFAULT_UDP_RECONNECT;
+ src->multi_iface = g_strdup (DEFAULT_MULTICAST_IFACE);
+ src->ntp_sync = DEFAULT_NTP_SYNC;
+ src->use_pipeline_clock = DEFAULT_USE_PIPELINE_CLOCK;
+ src->sdes = NULL;
+ src->tls_validation_flags = DEFAULT_TLS_VALIDATION_FLAGS;
+ src->tls_database = DEFAULT_TLS_DATABASE;
+ src->tls_interaction = DEFAULT_TLS_INTERACTION;
+ src->do_retransmission = DEFAULT_DO_RETRANSMISSION;
+ src->ntp_time_source = DEFAULT_NTP_TIME_SOURCE;
+ src->user_agent = g_strdup (DEFAULT_USER_AGENT);
+ src->max_rtcp_rtp_time_diff = DEFAULT_MAX_RTCP_RTP_TIME_DIFF;
+ src->rfc7273_sync = DEFAULT_RFC7273_SYNC;
+ src->max_ts_offset_adjustment = DEFAULT_MAX_TS_OFFSET_ADJUSTMENT;
+ src->max_ts_offset = DEFAULT_MAX_TS_OFFSET;
+ src->max_ts_offset_is_set = FALSE;
+ src->default_version = DEFAULT_VERSION;
+ src->version = GST_RTSP_VERSION_INVALID;
+ src->teardown_timeout = DEFAULT_TEARDOWN_TIMEOUT;
+ src->onvif_mode = DEFAULT_ONVIF_MODE;
+ src->onvif_rate_control = DEFAULT_ONVIF_RATE_CONTROL;
+ src->is_live = DEFAULT_IS_LIVE;
+ src->seek_seqnum = GST_SEQNUM_INVALID;
+ src->group_id = GST_GROUP_ID_INVALID;
+
++#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
++ g_mutex_init (&(src)->pause_lock);
++ g_cond_init (&(src)->open_end);
++#endif
+ /* get a list of all extensions */
+ src->extensions = gst_rtsp_ext_list_get ();
+
+ /* connect to send signal */
+ gst_rtsp_ext_list_connect (src->extensions, "send",
+ (GCallback) gst_rtspsrc_send_cb, src);
+
+ /* protects the streaming thread in interleaved mode or the polling
+ * thread in UDP mode. */
+ g_rec_mutex_init (&src->stream_rec_lock);
+
+ /* protects our state changes from multiple invocations */
+ g_rec_mutex_init (&src->state_rec_lock);
+
+ g_queue_init (&src->set_get_param_q);
+
+ src->state = GST_RTSP_STATE_INVALID;
+
+ g_mutex_init (&src->conninfo.send_lock);
+ g_mutex_init (&src->conninfo.recv_lock);
+ g_cond_init (&src->cmd_cond);
+
+ g_mutex_init (&src->group_lock);
+
+ GST_OBJECT_FLAG_SET (src, GST_ELEMENT_FLAG_SOURCE);
+ gst_bin_set_suppressed_flags (GST_BIN (src),
+ GST_ELEMENT_FLAG_SOURCE | GST_ELEMENT_FLAG_SINK);
+ }
+
+ static void
+ free_param_data (ParameterRequest * req)
+ {
+ gst_promise_unref (req->promise);
+ if (req->body)
+ g_string_free (req->body, TRUE);
+ g_free (req->content_type);
+ g_free (req);
+ }
+
+ static void
+ gst_rtspsrc_finalize (GObject * object)
+ {
+ GstRTSPSrc *rtspsrc;
+
+ rtspsrc = GST_RTSPSRC (object);
+
++#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
++ rtspsrc->is_audio_codec_supported = FALSE;
++ rtspsrc->is_video_codec_supported = FALSE;
++ if (rtspsrc->audio_codec) {
++ g_free (rtspsrc->audio_codec);
++ rtspsrc->audio_codec = NULL;
++ }
++ if (rtspsrc->video_codec) {
++ g_free (rtspsrc->video_codec);
++ rtspsrc->video_codec = NULL;
++ }
++ if (rtspsrc->video_frame_size) {
++ g_free (rtspsrc->video_frame_size);
++ rtspsrc->video_frame_size = NULL;
++ }
++#endif
+ gst_rtsp_ext_list_free (rtspsrc->extensions);
+ g_free (rtspsrc->conninfo.location);
+ gst_rtsp_url_free (rtspsrc->conninfo.url);
+ g_free (rtspsrc->conninfo.url_str);
+ g_free (rtspsrc->user_id);
+ g_free (rtspsrc->user_pw);
+ g_free (rtspsrc->multi_iface);
+ g_free (rtspsrc->user_agent);
+
++#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
++ g_mutex_clear (&(rtspsrc)->pause_lock);
++ g_cond_clear (&(rtspsrc)->open_end);
++#endif
++
+ if (rtspsrc->sdp) {
+ gst_sdp_message_free (rtspsrc->sdp);
+ rtspsrc->sdp = NULL;
+ }
+ if (rtspsrc->provided_clock)
+ gst_object_unref (rtspsrc->provided_clock);
+
+ if (rtspsrc->sdes)
+ gst_structure_free (rtspsrc->sdes);
+
+ if (rtspsrc->tls_database)
+ g_object_unref (rtspsrc->tls_database);
+
+ if (rtspsrc->tls_interaction)
+ g_object_unref (rtspsrc->tls_interaction);
+
+ /* free locks */
+ g_rec_mutex_clear (&rtspsrc->stream_rec_lock);
+ g_rec_mutex_clear (&rtspsrc->state_rec_lock);
+
+ g_mutex_clear (&rtspsrc->conninfo.send_lock);
+ g_mutex_clear (&rtspsrc->conninfo.recv_lock);
+ g_cond_clear (&rtspsrc->cmd_cond);
+
+ g_mutex_clear (&rtspsrc->group_lock);
+
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+ }
+
+ static GstClock *
+ gst_rtspsrc_provide_clock (GstElement * element)
+ {
+ GstRTSPSrc *src = GST_RTSPSRC (element);
+ GstClock *clock;
+
+ if ((clock = src->provided_clock) != NULL)
+ return gst_object_ref (clock);
+
+ return GST_ELEMENT_CLASS (parent_class)->provide_clock (element);
+ }
+
+ /* a proxy string of the format [user:passwd@]host[:port] */
+ static gboolean
+ gst_rtspsrc_set_proxy (GstRTSPSrc * rtsp, const gchar * proxy)
+ {
+ gchar *p, *at, *col;
+
+ g_free (rtsp->proxy_user);
+ rtsp->proxy_user = NULL;
+ g_free (rtsp->proxy_passwd);
+ rtsp->proxy_passwd = NULL;
+ g_free (rtsp->proxy_host);
+ rtsp->proxy_host = NULL;
+ rtsp->proxy_port = 0;
+
+ p = (gchar *) proxy;
+
+ if (p == NULL)
+ return TRUE;
+
+ /* we allow http:// in front but ignore it */
+ if (g_str_has_prefix (p, "http://"))
+ p += 7;
+
+ at = strchr (p, '@');
+ if (at) {
+ /* look for user:passwd */
+ col = strchr (proxy, ':');
+ if (col == NULL || col > at)
+ return FALSE;
+
+ rtsp->proxy_user = g_strndup (p, col - p);
+ col++;
+ rtsp->proxy_passwd = g_strndup (col, at - col);
+
+ /* move to host */
+ p = at + 1;
+ } else {
+ if (rtsp->prop_proxy_id != NULL && *rtsp->prop_proxy_id != '\0')
+ rtsp->proxy_user = g_strdup (rtsp->prop_proxy_id);
+ if (rtsp->prop_proxy_pw != NULL && *rtsp->prop_proxy_pw != '\0')
+ rtsp->proxy_passwd = g_strdup (rtsp->prop_proxy_pw);
+ if (rtsp->proxy_user != NULL || rtsp->proxy_passwd != NULL) {
+ GST_LOG_OBJECT (rtsp, "set proxy user/pw from properties: %s:%s",
+ GST_STR_NULL (rtsp->proxy_user), GST_STR_NULL (rtsp->proxy_passwd));
+ }
+ }
+ col = strchr (p, ':');
+
+ if (col) {
+ /* everything before the colon is the hostname */
+ rtsp->proxy_host = g_strndup (p, col - p);
+ p = col + 1;
+ rtsp->proxy_port = strtoul (p, (char **) &p, 10);
+ } else {
+ rtsp->proxy_host = g_strdup (p);
+ rtsp->proxy_port = 8080;
+ }
+ return TRUE;
+ }
+
+ static void
+ gst_rtspsrc_set_tcp_timeout (GstRTSPSrc * rtspsrc, guint64 timeout)
+ {
+ rtspsrc->tcp_timeout = timeout;
+ }
+
+ static void
+ gst_rtspsrc_set_property (GObject * object, guint prop_id, const GValue * value,
+ GParamSpec * pspec)
+ {
+ GstRTSPSrc *rtspsrc;
+
+ rtspsrc = GST_RTSPSRC (object);
+
+ switch (prop_id) {
+ case PROP_LOCATION:
+ gst_rtspsrc_uri_set_uri (GST_URI_HANDLER (rtspsrc),
+ g_value_get_string (value), NULL);
+ break;
+ case PROP_PROTOCOLS:
+ rtspsrc->protocols = g_value_get_flags (value);
+ break;
+ case PROP_DEBUG:
+ rtspsrc->debug = g_value_get_boolean (value);
+ break;
+ case PROP_RETRY:
+ rtspsrc->retry = g_value_get_uint (value);
+ break;
+ case PROP_TIMEOUT:
+ rtspsrc->udp_timeout = g_value_get_uint64 (value);
+ break;
++#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
++ case PROP_START_POSITION:
++ rtspsrc->start_position = g_value_get_uint64 (value);
++ break;
++ case PROP_RESUME_POSITION:
++ rtspsrc->last_pos = g_value_get_uint64 (value);
++ GST_DEBUG_OBJECT (rtspsrc, "src->last_pos value set to %" GST_TIME_FORMAT,
++ GST_TIME_ARGS (rtspsrc->last_pos));
++ break;
++#endif
+ case PROP_TCP_TIMEOUT:
+ gst_rtspsrc_set_tcp_timeout (rtspsrc, g_value_get_uint64 (value));
+ break;
+ case PROP_LATENCY:
+ rtspsrc->latency = g_value_get_uint (value);
+ break;
+ case PROP_DROP_ON_LATENCY:
+ rtspsrc->drop_on_latency = g_value_get_boolean (value);
+ break;
+ case PROP_CONNECTION_SPEED:
+ rtspsrc->connection_speed = g_value_get_uint64 (value);
+ break;
+ case PROP_NAT_METHOD:
+ rtspsrc->nat_method = g_value_get_enum (value);
+ break;
+ case PROP_DO_RTCP:
+ rtspsrc->do_rtcp = g_value_get_boolean (value);
+ break;
+ case PROP_DO_RTSP_KEEP_ALIVE:
+ rtspsrc->do_rtsp_keep_alive = g_value_get_boolean (value);
+ break;
+ case PROP_PROXY:
+ gst_rtspsrc_set_proxy (rtspsrc, g_value_get_string (value));
+ break;
+ case PROP_PROXY_ID:
+ g_free (rtspsrc->prop_proxy_id);
+ rtspsrc->prop_proxy_id = g_value_dup_string (value);
+ break;
+ case PROP_PROXY_PW:
+ g_free (rtspsrc->prop_proxy_pw);
+ rtspsrc->prop_proxy_pw = g_value_dup_string (value);
+ break;
+ case PROP_RTP_BLOCKSIZE:
+ rtspsrc->rtp_blocksize = g_value_get_uint (value);
+ break;
+ case PROP_USER_ID:
+ g_free (rtspsrc->user_id);
+ rtspsrc->user_id = g_value_dup_string (value);
+ break;
+ case PROP_USER_PW:
+ g_free (rtspsrc->user_pw);
+ rtspsrc->user_pw = g_value_dup_string (value);
+ break;
+ case PROP_BUFFER_MODE:
+ rtspsrc->buffer_mode = g_value_get_enum (value);
+ break;
+ case PROP_PORT_RANGE:
+ {
+ const gchar *str;
+
+ str = g_value_get_string (value);
+ if (str == NULL || sscanf (str, "%u-%u", &rtspsrc->client_port_range.min,
+ &rtspsrc->client_port_range.max) != 2) {
+ rtspsrc->client_port_range.min = 0;
+ rtspsrc->client_port_range.max = 0;
+ }
+ break;
+ }
+ case PROP_UDP_BUFFER_SIZE:
+ rtspsrc->udp_buffer_size = g_value_get_int (value);
+ break;
+ case PROP_SHORT_HEADER:
+ rtspsrc->short_header = g_value_get_boolean (value);
+ break;
+ case PROP_PROBATION:
+ rtspsrc->probation = g_value_get_uint (value);
+ break;
+ case PROP_UDP_RECONNECT:
+ rtspsrc->udp_reconnect = g_value_get_boolean (value);
+ break;
+ case PROP_MULTICAST_IFACE:
+ g_free (rtspsrc->multi_iface);
+
+ if (g_value_get_string (value) == NULL)
+ rtspsrc->multi_iface = g_strdup (DEFAULT_MULTICAST_IFACE);
+ else
+ rtspsrc->multi_iface = g_value_dup_string (value);
+ break;
+ case PROP_NTP_SYNC:
+ rtspsrc->ntp_sync = g_value_get_boolean (value);
+ /* The default value of max_ts_offset depends on ntp_sync. If user
+ * hasn't set it then change default value */
+ if (!rtspsrc->max_ts_offset_is_set) {
+ if (rtspsrc->ntp_sync) {
+ rtspsrc->max_ts_offset = 0;
+ } else {
+ rtspsrc->max_ts_offset = DEFAULT_MAX_TS_OFFSET;
+ }
+ }
+ break;
+ case PROP_USE_PIPELINE_CLOCK:
+ rtspsrc->use_pipeline_clock = g_value_get_boolean (value);
+ break;
+ case PROP_SDES:
+ rtspsrc->sdes = g_value_dup_boxed (value);
+ break;
+ case PROP_TLS_VALIDATION_FLAGS:
+ rtspsrc->tls_validation_flags = g_value_get_flags (value);
+ break;
+ case PROP_TLS_DATABASE:
+ g_clear_object (&rtspsrc->tls_database);
+ rtspsrc->tls_database = g_value_dup_object (value);
+ break;
+ case PROP_TLS_INTERACTION:
+ g_clear_object (&rtspsrc->tls_interaction);
+ rtspsrc->tls_interaction = g_value_dup_object (value);
+ break;
+ case PROP_DO_RETRANSMISSION:
+ rtspsrc->do_retransmission = g_value_get_boolean (value);
+ break;
+ case PROP_NTP_TIME_SOURCE:
+ rtspsrc->ntp_time_source = g_value_get_enum (value);
+ break;
+ case PROP_USER_AGENT:
+ g_free (rtspsrc->user_agent);
+ rtspsrc->user_agent = g_value_dup_string (value);
+ break;
+ case PROP_MAX_RTCP_RTP_TIME_DIFF:
+ rtspsrc->max_rtcp_rtp_time_diff = g_value_get_int (value);
+ break;
+ case PROP_RFC7273_SYNC:
+ rtspsrc->rfc7273_sync = g_value_get_boolean (value);
+ break;
+ case PROP_MAX_TS_OFFSET_ADJUSTMENT:
+ rtspsrc->max_ts_offset_adjustment = g_value_get_uint64 (value);
+ break;
+ case PROP_MAX_TS_OFFSET:
+ rtspsrc->max_ts_offset = g_value_get_int64 (value);
+ rtspsrc->max_ts_offset_is_set = TRUE;
+ break;
+ case PROP_DEFAULT_VERSION:
+ rtspsrc->default_version = g_value_get_enum (value);
+ break;
+ case PROP_BACKCHANNEL:
+ rtspsrc->backchannel = g_value_get_enum (value);
+ break;
+ case PROP_TEARDOWN_TIMEOUT:
+ rtspsrc->teardown_timeout = g_value_get_uint64 (value);
+ break;
+ case PROP_ONVIF_MODE:
+ rtspsrc->onvif_mode = g_value_get_boolean (value);
+ break;
+ case PROP_ONVIF_RATE_CONTROL:
+ rtspsrc->onvif_rate_control = g_value_get_boolean (value);
+ break;
+ case PROP_IS_LIVE:
+ rtspsrc->is_live = g_value_get_boolean (value);
+ break;
+ case PROP_IGNORE_X_SERVER_REPLY:
+ rtspsrc->ignore_x_server_reply = g_value_get_boolean (value);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+ }
+
+ static void
+ gst_rtspsrc_get_property (GObject * object, guint prop_id, GValue * value,
+ GParamSpec * pspec)
+ {
+ GstRTSPSrc *rtspsrc;
+
+ rtspsrc = GST_RTSPSRC (object);
+
+ switch (prop_id) {
+ case PROP_LOCATION:
+ g_value_set_string (value, rtspsrc->conninfo.location);
+ break;
+ case PROP_PROTOCOLS:
+ g_value_set_flags (value, rtspsrc->protocols);
+ break;
+ case PROP_DEBUG:
+ g_value_set_boolean (value, rtspsrc->debug);
+ break;
+ case PROP_RETRY:
+ g_value_set_uint (value, rtspsrc->retry);
+ break;
+ case PROP_TIMEOUT:
+ g_value_set_uint64 (value, rtspsrc->udp_timeout);
+ break;
++#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
++ case PROP_START_POSITION:
++ g_value_set_uint64 (value, rtspsrc->start_position);
++ break;
++ case PROP_RESUME_POSITION:
++ g_value_set_uint64 (value, rtspsrc->last_pos);
++ break;
++#endif
+ case PROP_TCP_TIMEOUT:
+ g_value_set_uint64 (value, rtspsrc->tcp_timeout);
+ break;
+ case PROP_LATENCY:
+ g_value_set_uint (value, rtspsrc->latency);
+ break;
+ case PROP_DROP_ON_LATENCY:
+ g_value_set_boolean (value, rtspsrc->drop_on_latency);
+ break;
+ case PROP_CONNECTION_SPEED:
+ g_value_set_uint64 (value, rtspsrc->connection_speed);
+ break;
+ case PROP_NAT_METHOD:
+ g_value_set_enum (value, rtspsrc->nat_method);
+ break;
+ case PROP_DO_RTCP:
+ g_value_set_boolean (value, rtspsrc->do_rtcp);
+ break;
+ case PROP_DO_RTSP_KEEP_ALIVE:
+ g_value_set_boolean (value, rtspsrc->do_rtsp_keep_alive);
+ break;
+ case PROP_PROXY:
+ {
+ gchar *str;
+
+ if (rtspsrc->proxy_host) {
+ str =
+ g_strdup_printf ("%s:%d", rtspsrc->proxy_host, rtspsrc->proxy_port);
+ } else {
+ str = NULL;
+ }
+ g_value_take_string (value, str);
+ break;
+ }
+ case PROP_PROXY_ID:
+ g_value_set_string (value, rtspsrc->prop_proxy_id);
+ break;
+ case PROP_PROXY_PW:
+ g_value_set_string (value, rtspsrc->prop_proxy_pw);
+ break;
+ case PROP_RTP_BLOCKSIZE:
+ g_value_set_uint (value, rtspsrc->rtp_blocksize);
+ break;
+ case PROP_USER_ID:
+ g_value_set_string (value, rtspsrc->user_id);
+ break;
+ case PROP_USER_PW:
+ g_value_set_string (value, rtspsrc->user_pw);
+ break;
+ case PROP_BUFFER_MODE:
+ g_value_set_enum (value, rtspsrc->buffer_mode);
+ break;
+ case PROP_PORT_RANGE:
+ {
+ gchar *str;
+
+ if (rtspsrc->client_port_range.min != 0) {
+ str = g_strdup_printf ("%u-%u", rtspsrc->client_port_range.min,
+ rtspsrc->client_port_range.max);
+ } else {
+ str = NULL;
+ }
+ g_value_take_string (value, str);
+ break;
+ }
+ case PROP_UDP_BUFFER_SIZE:
+ g_value_set_int (value, rtspsrc->udp_buffer_size);
+ break;
+ case PROP_SHORT_HEADER:
+ g_value_set_boolean (value, rtspsrc->short_header);
+ break;
+ case PROP_PROBATION:
+ g_value_set_uint (value, rtspsrc->probation);
+ break;
+ case PROP_UDP_RECONNECT:
+ g_value_set_boolean (value, rtspsrc->udp_reconnect);
+ break;
+ case PROP_MULTICAST_IFACE:
+ g_value_set_string (value, rtspsrc->multi_iface);
+ break;
+ case PROP_NTP_SYNC:
+ g_value_set_boolean (value, rtspsrc->ntp_sync);
+ break;
+ case PROP_USE_PIPELINE_CLOCK:
+ g_value_set_boolean (value, rtspsrc->use_pipeline_clock);
+ break;
+ case PROP_SDES:
+ g_value_set_boxed (value, rtspsrc->sdes);
+ break;
+ case PROP_TLS_VALIDATION_FLAGS:
+ g_value_set_flags (value, rtspsrc->tls_validation_flags);
+ break;
+ case PROP_TLS_DATABASE:
+ g_value_set_object (value, rtspsrc->tls_database);
+ break;
+ case PROP_TLS_INTERACTION:
+ g_value_set_object (value, rtspsrc->tls_interaction);
+ break;
+ case PROP_DO_RETRANSMISSION:
+ g_value_set_boolean (value, rtspsrc->do_retransmission);
+ break;
+ case PROP_NTP_TIME_SOURCE:
+ g_value_set_enum (value, rtspsrc->ntp_time_source);
+ break;
+ case PROP_USER_AGENT:
+ g_value_set_string (value, rtspsrc->user_agent);
+ break;
+ case PROP_MAX_RTCP_RTP_TIME_DIFF:
+ g_value_set_int (value, rtspsrc->max_rtcp_rtp_time_diff);
+ break;
+ case PROP_RFC7273_SYNC:
+ g_value_set_boolean (value, rtspsrc->rfc7273_sync);
+ break;
+ case PROP_MAX_TS_OFFSET_ADJUSTMENT:
+ g_value_set_uint64 (value, rtspsrc->max_ts_offset_adjustment);
+ break;
+ case PROP_MAX_TS_OFFSET:
+ g_value_set_int64 (value, rtspsrc->max_ts_offset);
+ break;
+ case PROP_DEFAULT_VERSION:
+ g_value_set_enum (value, rtspsrc->default_version);
+ break;
+ case PROP_BACKCHANNEL:
+ g_value_set_enum (value, rtspsrc->backchannel);
+ break;
+ case PROP_TEARDOWN_TIMEOUT:
+ g_value_set_uint64 (value, rtspsrc->teardown_timeout);
+ break;
+ case PROP_ONVIF_MODE:
+ g_value_set_boolean (value, rtspsrc->onvif_mode);
+ break;
+ case PROP_ONVIF_RATE_CONTROL:
+ g_value_set_boolean (value, rtspsrc->onvif_rate_control);
+ break;
+ case PROP_IS_LIVE:
+ g_value_set_boolean (value, rtspsrc->is_live);
+ break;
+ case PROP_IGNORE_X_SERVER_REPLY:
+ g_value_set_boolean (value, rtspsrc->ignore_x_server_reply);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+ }
+
+ static gint
+ find_stream_by_id (GstRTSPStream * stream, gint * id)
+ {
+ if (stream->id == *id)
+ return 0;
+
+ return -1;
+ }
+
+ static gint
+ find_stream_by_channel (GstRTSPStream * stream, gint * channel)
+ {
+ /* ignore unconfigured channels here (e.g., those that
+ * were explicitly skipped during SETUP) */
+ if ((stream->channelpad[0] != NULL) &&
+ (stream->channel[0] == *channel || stream->channel[1] == *channel))
+ return 0;
+
+ return -1;
+ }
+
+ static gint
+ find_stream_by_udpsrc (GstRTSPStream * stream, gconstpointer a)
+ {
+ GstElement *src = (GstElement *) a;
+
+ if (stream->udpsrc[0] == src)
+ return 0;
+ if (stream->udpsrc[1] == src)
+ return 0;
+
+ return -1;
+ }
+
+ static gint
+ find_stream_by_setup (GstRTSPStream * stream, gconstpointer a)
+ {
+ if (stream->conninfo.location) {
+ /* check qualified setup_url */
+ if (!strcmp (stream->conninfo.location, (gchar *) a))
+ return 0;
+ }
+ if (stream->control_url) {
+ /* check original control_url */
+ if (!strcmp (stream->control_url, (gchar *) a))
+ return 0;
+
+ /* check if qualified setup_url ends with string */
+ if (g_str_has_suffix (stream->control_url, (gchar *) a))
+ return 0;
+ }
+
+ return -1;
+ }
+
+ static GstRTSPStream *
+ find_stream (GstRTSPSrc * src, gconstpointer data, gconstpointer func)
+ {
+ GList *lstream;
+
+ /* find and get stream */
+ if ((lstream = g_list_find_custom (src->streams, data, (GCompareFunc) func)))
+ return (GstRTSPStream *) lstream->data;
+
+ return NULL;
+ }
+
+ static const GstSDPBandwidth *
+ gst_rtspsrc_get_bandwidth (GstRTSPSrc * src, const GstSDPMessage * sdp,
+ const GstSDPMedia * media, const gchar * type)
+ {
+ guint i, len;
+
+ /* first look in the media specific section */
+ len = gst_sdp_media_bandwidths_len (media);
+ for (i = 0; i < len; i++) {
+ const GstSDPBandwidth *bw = gst_sdp_media_get_bandwidth (media, i);
+
+ if (strcmp (bw->bwtype, type) == 0)
+ return bw;
+ }
+ /* then look in the message specific section */
+ len = gst_sdp_message_bandwidths_len (sdp);
+ for (i = 0; i < len; i++) {
+ const GstSDPBandwidth *bw = gst_sdp_message_get_bandwidth (sdp, i);
+
+ if (strcmp (bw->bwtype, type) == 0)
+ return bw;
+ }
+ return NULL;
+ }
+
+ static void
+ gst_rtspsrc_collect_bandwidth (GstRTSPSrc * src, const GstSDPMessage * sdp,
+ const GstSDPMedia * media, GstRTSPStream * stream)
+ {
+ const GstSDPBandwidth *bw;
+
+ if ((bw = gst_rtspsrc_get_bandwidth (src, sdp, media, GST_SDP_BWTYPE_AS)))
+ stream->as_bandwidth = bw->bandwidth;
+ else
+ stream->as_bandwidth = -1;
+
+ if ((bw = gst_rtspsrc_get_bandwidth (src, sdp, media, GST_SDP_BWTYPE_RR)))
+ stream->rr_bandwidth = bw->bandwidth;
+ else
+ stream->rr_bandwidth = -1;
+
+ if ((bw = gst_rtspsrc_get_bandwidth (src, sdp, media, GST_SDP_BWTYPE_RS)))
+ stream->rs_bandwidth = bw->bandwidth;
+ else
+ stream->rs_bandwidth = -1;
+ }
+
+ static void
+ gst_rtspsrc_do_stream_connection (GstRTSPSrc * src, GstRTSPStream * stream,
+ const GstSDPConnection * conn)
+ {
+ if (conn->nettype == NULL || strcmp (conn->nettype, "IN") != 0)
+ return;
+
+ if (conn->addrtype == NULL)
+ return;
+
+ /* check for IPV6 */
+ if (strcmp (conn->addrtype, "IP4") == 0)
+ stream->is_ipv6 = FALSE;
+ else if (strcmp (conn->addrtype, "IP6") == 0)
+ stream->is_ipv6 = TRUE;
+ else
+ return;
+
+ /* save address */
+ g_free (stream->destination);
+ stream->destination = g_strdup (conn->address);
+
+ /* check for multicast */
+ stream->is_multicast =
+ gst_sdp_address_is_multicast (conn->nettype, conn->addrtype,
+ conn->address);
+ stream->ttl = conn->ttl;
+ }
+
+ /* Go over the connections for a stream.
+ * - If we are dealing with IPV6, we will setup IPV6 sockets for sending and
+ * receiving.
+ * - If we are dealing with a localhost address, we disable multicast
+ */
+ static void
+ gst_rtspsrc_collect_connections (GstRTSPSrc * src, const GstSDPMessage * sdp,
+ const GstSDPMedia * media, GstRTSPStream * stream)
+ {
+ const GstSDPConnection *conn;
+ guint i, len;
+
+ /* first look in the media specific section */
+ len = gst_sdp_media_connections_len (media);
+ for (i = 0; i < len; i++) {
+ conn = gst_sdp_media_get_connection (media, i);
+
+ gst_rtspsrc_do_stream_connection (src, stream, conn);
+ }
+ /* then look in the message specific section */
+ if ((conn = gst_sdp_message_get_connection (sdp))) {
+ gst_rtspsrc_do_stream_connection (src, stream, conn);
+ }
+ }
+
+ static gchar *
+ make_stream_id (GstRTSPStream * stream, const GstSDPMedia * media)
+ {
+ gchar *stream_id =
+ g_strdup_printf ("%s:%d:%d:%s:%d", media->media, media->port,
+ media->num_ports, media->proto, stream->default_pt);
+
+ g_strcanon (stream_id, G_CSET_a_2_z G_CSET_A_2_Z G_CSET_DIGITS, ':');
+
+ return stream_id;
+ }
+
+ /* m=<media> <UDP port> RTP/AVP <payload>
+ */
+ static void
+ gst_rtspsrc_collect_payloads (GstRTSPSrc * src, const GstSDPMessage * sdp,
+ const GstSDPMedia * media, GstRTSPStream * stream)
+ {
+ guint i, len;
+ const gchar *proto;
+ GstCaps *global_caps;
+
+ /* get proto */
+ proto = gst_sdp_media_get_proto (media);
+ if (proto == NULL)
+ goto no_proto;
+
+ if (g_str_equal (proto, "RTP/AVP"))
+ stream->profile = GST_RTSP_PROFILE_AVP;
+ else if (g_str_equal (proto, "RTP/SAVP"))
+ stream->profile = GST_RTSP_PROFILE_SAVP;
+ else if (g_str_equal (proto, "RTP/AVPF"))
+ stream->profile = GST_RTSP_PROFILE_AVPF;
+ else if (g_str_equal (proto, "RTP/SAVPF"))
+ stream->profile = GST_RTSP_PROFILE_SAVPF;
+ else
+ goto unknown_proto;
+
+ if (gst_sdp_media_get_attribute_val (media, "sendonly") != NULL &&
+ /* We want to setup caps for streams configured as backchannel */
+ !stream->is_backchannel && src->backchannel != BACKCHANNEL_NONE)
+ goto sendonly_media;
+
+ /* Parse global SDP attributes once */
+ global_caps = gst_caps_new_empty_simple ("application/x-unknown");
+ GST_DEBUG ("mapping sdp session level attributes to caps");
+ gst_sdp_message_attributes_to_caps (sdp, global_caps);
+ GST_DEBUG ("mapping sdp media level attributes to caps");
+ gst_sdp_media_attributes_to_caps (media, global_caps);
+
+ /* Keep a copy of the SDP key management */
+ gst_sdp_media_parse_keymgmt (media, &stream->mikey);
+ if (stream->mikey == NULL)
+ gst_sdp_message_parse_keymgmt (sdp, &stream->mikey);
+
+ len = gst_sdp_media_formats_len (media);
+ for (i = 0; i < len; i++) {
+ gint pt;
+ GstCaps *caps, *outcaps;
+ GstStructure *s;
+ const gchar *enc;
+ PtMapItem item;
-
++#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
++ const gchar *encoder, *mediatype;
++#endif
+ pt = atoi (gst_sdp_media_get_format (media, i));
+
+ GST_DEBUG_OBJECT (src, " looking at %d pt: %d", i, pt);
+
+ /* convert caps */
+ caps = gst_sdp_media_get_caps_from_media (media, pt);
+ if (caps == NULL) {
+ GST_WARNING_OBJECT (src, " skipping pt %d without caps", pt);
+ continue;
+ }
+
+ /* do some tweaks */
+ s = gst_caps_get_structure (caps, 0);
+ if ((enc = gst_structure_get_string (s, "encoding-name"))) {
+ stream->is_real = (strstr (enc, "-REAL") != NULL);
+ if (strcmp (enc, "X-ASF-PF") == 0)
+ stream->container = TRUE;
+ }
++#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
++ if ((mediatype = gst_structure_get_string (s, "media"))) {
++ GST_DEBUG_OBJECT (src, " mediatype : %s", mediatype);
++ if (!strcmp (mediatype, "video")) {
++ if ((encoder = gst_structure_get_string (s, "encoding-name"))) {
++ GST_DEBUG_OBJECT (src, " encoder : %s", encoder);
++ if ((!strcmp (encoder, "H261")) ||
++ (!strcmp (encoder, "H263")) ||
++ (!strcmp (encoder, "H263-1998"))
++ || (!strcmp (encoder, "H263-2000")) || (!strcmp (encoder, "H264"))
++ || (!strcmp (encoder, "MP4V-ES"))) {
++ src->is_video_codec_supported = TRUE;
++ GST_DEBUG_OBJECT (src, "Supported Video Codec %s", encoder);
++ } else {
++ GST_DEBUG_OBJECT (src, "Unsupported Video Codec %s", encoder);
++ }
++ }
++
++ src->video_codec = g_strdup (encoder);
++ src->video_frame_size =
++ g_strdup (gst_structure_get_string (s, "a-framesize"));
++ GST_DEBUG_OBJECT (src, "video_codec %s , video_frame_size %s ",
++ src->video_codec, src->video_frame_size);
++ } else if (!strcmp (mediatype, "audio")) {
++ if ((encoder = gst_structure_get_string (s, "encoding-name"))) {
++ GST_DEBUG_OBJECT (src, " encoder : %s", encoder);
++ if ((!strcmp (encoder, "MP4A-LATM")) ||
++ (!strcmp (encoder, "AMR")) || (!strcmp (encoder, "AMR-WB"))
++ || (!strcmp (encoder, "AMR-NB"))
++ || (!strcmp (encoder, "mpeg4-generic"))
++ || (!strcmp (encoder, "MPEG4-GENERIC"))
++ || (!strcmp (encoder, "QCELP")) || ((strstr (encoder, "G726"))
++ || (strstr (encoder, "PCMU")))) {
++ src->is_audio_codec_supported = TRUE;
++ GST_DEBUG_OBJECT (src, "Supported Audio Codec %s", encoder);
++ } else {
++ GST_DEBUG_OBJECT (src, "Unsupported Audio Codec %s", encoder);
++ }
++ }
++
++ src->audio_codec = g_strdup (encoder);
++ GST_DEBUG_OBJECT (src, "audio_codec %s ", src->audio_codec);
++ }
++ }
++#endif
+
+ /* Merge in global caps */
+ /* Intersect will merge in missing fields to the current caps */
+ outcaps = gst_caps_intersect (caps, global_caps);
+ gst_caps_unref (caps);
+
+ /* the first pt will be the default */
+ if (stream->ptmap->len == 0)
+ stream->default_pt = pt;
+
+ item.pt = pt;
+ item.caps = outcaps;
+
+ g_array_append_val (stream->ptmap, item);
+ }
+
+ stream->stream_id = make_stream_id (stream, media);
+
+ gst_caps_unref (global_caps);
+ return;
+
+ no_proto:
+ {
+ GST_ERROR_OBJECT (src, "can't find proto in media");
+ return;
+ }
+ unknown_proto:
+ {
+ GST_ERROR_OBJECT (src, "unknown proto in media: '%s'", proto);
+ return;
+ }
+ sendonly_media:
+ {
+ GST_DEBUG_OBJECT (src, "sendonly media ignored, no backchannel");
+ return;
+ }
+ }
+
+ static const gchar *
+ get_aggregate_control (GstRTSPSrc * src)
+ {
+ const gchar *base;
+
+ if (src->control)
+ base = src->control;
+ else if (src->content_base)
+ base = src->content_base;
+ else if (src->conninfo.url_str)
+ base = src->conninfo.url_str;
+ else
+ base = "/";
+
+ return base;
+ }
+
+ static void
+ clear_ptmap_item (PtMapItem * item)
+ {
+ if (item->caps)
+ gst_caps_unref (item->caps);
+ }
+
+ static GstRTSPStream *
+ gst_rtspsrc_create_stream (GstRTSPSrc * src, GstSDPMessage * sdp, gint idx,
+ gint n_streams)
+ {
+ GstRTSPStream *stream;
+ const gchar *control_path;
+ const GstSDPMedia *media;
+
+ /* get media, should not return NULL */
+ media = gst_sdp_message_get_media (sdp, idx);
+ if (media == NULL)
+ return NULL;
+
+ stream = g_new0 (GstRTSPStream, 1);
+ stream->parent = src;
+ /* we mark the pad as not linked, we will mark it as OK when we add the pad to
+ * the element. */
+ stream->last_ret = GST_FLOW_NOT_LINKED;
+ stream->added = FALSE;
+ stream->setup = FALSE;
+ stream->skipped = FALSE;
+ stream->id = idx;
+ stream->eos = FALSE;
+ stream->discont = TRUE;
+ stream->seqbase = -1;
+ stream->timebase = -1;
+ stream->send_ssrc = g_random_int ();
+ stream->profile = GST_RTSP_PROFILE_AVP;
+ stream->ptmap = g_array_new (FALSE, FALSE, sizeof (PtMapItem));
+ stream->mikey = NULL;
+ stream->stream_id = NULL;
+ stream->is_backchannel = FALSE;
+ g_mutex_init (&stream->conninfo.send_lock);
+ g_mutex_init (&stream->conninfo.recv_lock);
+ g_array_set_clear_func (stream->ptmap, (GDestroyNotify) clear_ptmap_item);
+
+ /* stream is sendonly and onvif backchannel is requested */
+ if (gst_sdp_media_get_attribute_val (media, "sendonly") != NULL &&
+ src->backchannel != BACKCHANNEL_NONE)
+ stream->is_backchannel = TRUE;
+
+ /* collect bandwidth information for this steam. FIXME, configure in the RTP
+ * session manager to scale RTCP. */
+ gst_rtspsrc_collect_bandwidth (src, sdp, media, stream);
+
+ /* collect connection info */
+ gst_rtspsrc_collect_connections (src, sdp, media, stream);
+
+ /* make the payload type map */
+ gst_rtspsrc_collect_payloads (src, sdp, media, stream);
+
+ /* collect port number */
+ stream->port = gst_sdp_media_get_port (media);
+
+ /* get control url to construct the setup url. The setup url is used to
+ * configure the transport of the stream and is used to identity the stream in
+ * the RTP-Info header field returned from PLAY. */
+ control_path = gst_sdp_media_get_attribute_val (media, "control");
+ if (control_path == NULL)
+ control_path = gst_sdp_message_get_attribute_val_n (sdp, "control", 0);
+
+ GST_DEBUG_OBJECT (src, "stream %d, (%p)", stream->id, stream);
+ GST_DEBUG_OBJECT (src, " port: %d", stream->port);
+ GST_DEBUG_OBJECT (src, " container: %d", stream->container);
+ GST_DEBUG_OBJECT (src, " control: %s", GST_STR_NULL (control_path));
+
+ /* RFC 2326, C.3: missing control_path permitted in case of a single stream */
+ if (control_path == NULL && n_streams == 1) {
+ control_path = "";
+ }
+
+ if (control_path != NULL) {
+ stream->control_url = g_strdup (control_path);
+ /* Build a fully qualified url using the content_base if any or by prefixing
+ * the original request.
+ * If the control_path starts with a non rtsp: protocol we will most
+ * likely build a URL that the server will fail to understand, this is ok,
+ * we will fail then. */
+ if (g_str_has_prefix (control_path, "rtsp://"))
+ stream->conninfo.location = g_strdup (control_path);
+ else {
+ if (g_strcmp0 (control_path, "*") == 0)
+ control_path = "";
+ /* handle url with query */
+ if (src->conninfo.url && src->conninfo.url->query) {
+ stream->conninfo.location =
+ gst_rtsp_url_get_request_uri_with_control (src->conninfo.url,
+ control_path);
+ } else {
+ const gchar *base;
+ gboolean has_slash;
+ const gchar *slash;
+ const gchar *actual_control_path = NULL;
+
+ base = get_aggregate_control (src);
+ has_slash = g_str_has_suffix (base, "/");
+ /* manage existence or non-existence of / in control path */
+ if (control_path && strlen (control_path) > 0) {
+ gboolean control_has_slash = g_str_has_prefix (control_path, "/");
+
+ actual_control_path = control_path;
+ if (has_slash && control_has_slash) {
+ if (strlen (control_path) == 1) {
+ actual_control_path = NULL;
+ } else {
+ actual_control_path = control_path + 1;
+ }
+ } else {
+ has_slash = has_slash || control_has_slash;
+ }
+ }
+ slash = (!has_slash && (actual_control_path != NULL)) ? "/" : "";
+ /* concatenate the two strings, insert / when not present */
+ stream->conninfo.location =
+ g_strdup_printf ("%s%s%s", base, slash, control_path);
+ }
+ }
+ }
+ GST_DEBUG_OBJECT (src, " setup: %s",
+ GST_STR_NULL (stream->conninfo.location));
+
+ /* we keep track of all streams */
+ src->streams = g_list_append (src->streams, stream);
+
+ return stream;
+
+ /* ERRORS */
+ }
+
+ static void
+ gst_rtspsrc_stream_free (GstRTSPSrc * src, GstRTSPStream * stream)
+ {
+ gint i;
+
+ GST_DEBUG_OBJECT (src, "free stream %p", stream);
+
+ g_array_free (stream->ptmap, TRUE);
+
+ g_free (stream->destination);
+ g_free (stream->control_url);
+ g_free (stream->conninfo.location);
+ g_free (stream->stream_id);
+
+ for (i = 0; i < 2; i++) {
+ if (stream->udpsrc[i]) {
+ gst_element_set_state (stream->udpsrc[i], GST_STATE_NULL);
+ if (gst_object_has_as_parent (GST_OBJECT (stream->udpsrc[i]),
+ GST_OBJECT (src)))
+ gst_bin_remove (GST_BIN_CAST (src), stream->udpsrc[i]);
+ gst_object_unref (stream->udpsrc[i]);
+ }
+ if (stream->channelpad[i])
+ gst_object_unref (stream->channelpad[i]);
+
+ if (stream->udpsink[i]) {
+ gst_element_set_state (stream->udpsink[i], GST_STATE_NULL);
+ if (gst_object_has_as_parent (GST_OBJECT (stream->udpsink[i]),
+ GST_OBJECT (src)))
+ gst_bin_remove (GST_BIN_CAST (src), stream->udpsink[i]);
+ gst_object_unref (stream->udpsink[i]);
+ }
+ }
+ if (stream->rtpsrc) {
+ gst_element_set_state (stream->rtpsrc, GST_STATE_NULL);
+ gst_bin_remove (GST_BIN_CAST (src), stream->rtpsrc);
+ gst_object_unref (stream->rtpsrc);
+ }
+ if (stream->srcpad) {
+ gst_pad_set_active (stream->srcpad, FALSE);
+ if (stream->added)
+ gst_element_remove_pad (GST_ELEMENT_CAST (src), stream->srcpad);
+ }
+ if (stream->srtpenc)
+ gst_object_unref (stream->srtpenc);
+ if (stream->srtpdec)
+ gst_object_unref (stream->srtpdec);
+ if (stream->srtcpparams)
+ gst_caps_unref (stream->srtcpparams);
+ if (stream->mikey)
+ gst_mikey_message_unref (stream->mikey);
+ if (stream->rtcppad)
+ gst_object_unref (stream->rtcppad);
+ if (stream->session)
+ g_object_unref (stream->session);
+ if (stream->rtx_pt_map)
+ gst_structure_free (stream->rtx_pt_map);
+
+ g_mutex_clear (&stream->conninfo.send_lock);
+ g_mutex_clear (&stream->conninfo.recv_lock);
+
+ g_free (stream);
+ }
+
+ static void
+ gst_rtspsrc_cleanup (GstRTSPSrc * src)
+ {
+ GList *walk;
+ ParameterRequest *req;
+
+ GST_DEBUG_OBJECT (src, "cleanup");
+
+ for (walk = src->streams; walk; walk = g_list_next (walk)) {
+ GstRTSPStream *stream = (GstRTSPStream *) walk->data;
+
+ gst_rtspsrc_stream_free (src, stream);
+ }
+ g_list_free (src->streams);
+ src->streams = NULL;
+ if (src->manager) {
+ if (src->manager_sig_id) {
+ g_signal_handler_disconnect (src->manager, src->manager_sig_id);
+ src->manager_sig_id = 0;
+ }
+ gst_element_set_state (src->manager, GST_STATE_NULL);
+ gst_bin_remove (GST_BIN_CAST (src), src->manager);
+ src->manager = NULL;
+ }
+ if (src->props)
+ gst_structure_free (src->props);
+ src->props = NULL;
+
+ g_free (src->content_base);
+ src->content_base = NULL;
+
+ g_free (src->control);
+ src->control = NULL;
+
+ if (src->range)
+ gst_rtsp_range_free (src->range);
+ src->range = NULL;
+
+ /* don't clear the SDP when it was used in the url */
+ if (src->sdp && !src->from_sdp) {
+ gst_sdp_message_free (src->sdp);
+ src->sdp = NULL;
+ }
+
+ src->need_segment = FALSE;
+ src->clip_out_segment = FALSE;
+
+ if (src->provided_clock) {
+ gst_object_unref (src->provided_clock);
+ src->provided_clock = NULL;
+ }
+
+ GST_OBJECT_LOCK (src);
+ /* free parameter requests queue */
+ while ((req = g_queue_pop_head (&src->set_get_param_q))) {
+ gst_promise_expire (req->promise);
+ free_param_data (req);
+ }
+ GST_OBJECT_UNLOCK (src);
+
+ }
+
+ static gboolean
+ gst_rtspsrc_alloc_udp_ports (GstRTSPStream * stream,
+ gint * rtpport, gint * rtcpport)
+ {
+ GstRTSPSrc *src;
+ GstStateChangeReturn ret;
+ GstElement *udpsrc0, *udpsrc1;
+ gint tmp_rtp, tmp_rtcp;
+ guint count;
+ const gchar *host;
+
+ src = stream->parent;
+
+ udpsrc0 = NULL;
+ udpsrc1 = NULL;
+ count = 0;
+
+ /* Start at next port */
+ tmp_rtp = src->next_port_num;
+
+ if (stream->is_ipv6)
+ host = "udp://[::0]";
+ else
+ host = "udp://0.0.0.0";
+
+ /* try to allocate 2 UDP ports, the RTP port should be an even
+ * number and the RTCP port should be the next (uneven) port */
+ again:
+
+ if (tmp_rtp != 0 && src->client_port_range.max > 0 &&
+ tmp_rtp >= src->client_port_range.max)
+ goto no_ports;
+
+ udpsrc0 = gst_element_make_from_uri (GST_URI_SRC, host, NULL, NULL);
+ if (udpsrc0 == NULL)
+ goto no_udp_protocol;
+ g_object_set (G_OBJECT (udpsrc0), "port", tmp_rtp, "reuse", FALSE, NULL);
+
+ if (src->udp_buffer_size != 0)
+ g_object_set (G_OBJECT (udpsrc0), "buffer-size", src->udp_buffer_size,
+ NULL);
+
+ ret = gst_element_set_state (udpsrc0, GST_STATE_READY);
+ if (ret == GST_STATE_CHANGE_FAILURE) {
+ if (tmp_rtp != 0) {
+ GST_DEBUG_OBJECT (src, "Unable to make udpsrc from RTP port %d", tmp_rtp);
+
+ tmp_rtp += 2;
+ if (++count > src->retry)
+ goto no_ports;
+
+ GST_DEBUG_OBJECT (src, "free RTP udpsrc");
+ gst_element_set_state (udpsrc0, GST_STATE_NULL);
+ gst_object_unref (udpsrc0);
+ udpsrc0 = NULL;
+
+ GST_DEBUG_OBJECT (src, "retry %d", count);
+ goto again;
+ }
+ goto no_udp_protocol;
+ }
+
+ g_object_get (G_OBJECT (udpsrc0), "port", &tmp_rtp, NULL);
+ GST_DEBUG_OBJECT (src, "got RTP port %d", tmp_rtp);
+
+ /* check if port is even */
+ if ((tmp_rtp & 0x01) != 0) {
+ /* port not even, close and allocate another */
+ if (++count > src->retry)
+ goto no_ports;
+
+ GST_DEBUG_OBJECT (src, "RTP port not even");
+
+ GST_DEBUG_OBJECT (src, "free RTP udpsrc");
+ gst_element_set_state (udpsrc0, GST_STATE_NULL);
+ gst_object_unref (udpsrc0);
+ udpsrc0 = NULL;
+
+ GST_DEBUG_OBJECT (src, "retry %d", count);
+ tmp_rtp++;
+ goto again;
+ }
+
+ /* allocate port+1 for RTCP now */
+ udpsrc1 = gst_element_make_from_uri (GST_URI_SRC, host, NULL, NULL);
+ if (udpsrc1 == NULL)
+ goto no_udp_rtcp_protocol;
+
+ /* set port */
+ tmp_rtcp = tmp_rtp + 1;
+ if (src->client_port_range.max > 0 && tmp_rtcp > src->client_port_range.max)
+ goto no_ports;
+
+ g_object_set (G_OBJECT (udpsrc1), "port", tmp_rtcp, "reuse", FALSE, NULL);
+
+ GST_DEBUG_OBJECT (src, "starting RTCP on port %d", tmp_rtcp);
+ ret = gst_element_set_state (udpsrc1, GST_STATE_READY);
+ /* tmp_rtcp port is busy already : retry to make rtp/rtcp pair */
+ if (ret == GST_STATE_CHANGE_FAILURE) {
+ GST_DEBUG_OBJECT (src, "Unable to make udpsrc from RTCP port %d", tmp_rtcp);
+
+ if (++count > src->retry)
+ goto no_ports;
+
+ GST_DEBUG_OBJECT (src, "free RTP udpsrc");
+ gst_element_set_state (udpsrc0, GST_STATE_NULL);
+ gst_object_unref (udpsrc0);
+ udpsrc0 = NULL;
+
+ GST_DEBUG_OBJECT (src, "free RTCP udpsrc");
+ gst_element_set_state (udpsrc1, GST_STATE_NULL);
+ gst_object_unref (udpsrc1);
+ udpsrc1 = NULL;
+
+ tmp_rtp += 2;
+ GST_DEBUG_OBJECT (src, "retry %d", count);
+ goto again;
+ }
+
+ /* all fine, do port check */
+ g_object_get (G_OBJECT (udpsrc0), "port", rtpport, NULL);
+ g_object_get (G_OBJECT (udpsrc1), "port", rtcpport, NULL);
+
+ /* this should not happen... */
+ if (*rtpport != tmp_rtp || *rtcpport != tmp_rtcp)
+ goto port_error;
+
+ /* we keep these elements, we configure all in configure_transport when the
+ * server told us to really use the UDP ports. */
+ stream->udpsrc[0] = gst_object_ref_sink (udpsrc0);
+ stream->udpsrc[1] = gst_object_ref_sink (udpsrc1);
+ gst_element_set_locked_state (stream->udpsrc[0], TRUE);
+ gst_element_set_locked_state (stream->udpsrc[1], TRUE);
+
+ /* keep track of next available port number when we have a range
+ * configured */
+ if (src->next_port_num != 0)
+ src->next_port_num = tmp_rtcp + 1;
+
+ return TRUE;
+
+ /* ERRORS */
+ no_udp_protocol:
+ {
+ GST_DEBUG_OBJECT (src, "could not get UDP source");
+ goto cleanup;
+ }
+ no_ports:
+ {
+ GST_DEBUG_OBJECT (src, "could not allocate UDP port pair after %d retries",
+ count);
+ goto cleanup;
+ }
+ no_udp_rtcp_protocol:
+ {
+ GST_DEBUG_OBJECT (src, "could not get UDP source for RTCP");
+ goto cleanup;
+ }
+ port_error:
+ {
+ GST_DEBUG_OBJECT (src, "ports don't match rtp: %d<->%d, rtcp: %d<->%d",
+ tmp_rtp, *rtpport, tmp_rtcp, *rtcpport);
+ goto cleanup;
+ }
+ cleanup:
+ {
+ if (udpsrc0) {
+ gst_element_set_state (udpsrc0, GST_STATE_NULL);
+ gst_object_unref (udpsrc0);
+ }
+ if (udpsrc1) {
+ gst_element_set_state (udpsrc1, GST_STATE_NULL);
+ gst_object_unref (udpsrc1);
+ }
+ return FALSE;
+ }
+ }
+
+ static void
+ gst_rtspsrc_set_state (GstRTSPSrc * src, GstState state)
+ {
+ GList *walk;
+
++#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
++ GST_WARNING_OBJECT (src, "Setting [%s] element state to: %s \n",
++ GST_ELEMENT_NAME (GST_ELEMENT_CAST (src)),
++ gst_element_state_get_name (state));
++#endif
+ if (src->manager)
+ gst_element_set_state (GST_ELEMENT_CAST (src->manager), state);
+
+ for (walk = src->streams; walk; walk = g_list_next (walk)) {
+ GstRTSPStream *stream = (GstRTSPStream *) walk->data;
+ gint i;
+
+ for (i = 0; i < 2; i++) {
+ if (stream->udpsrc[i])
+ gst_element_set_state (stream->udpsrc[i], state);
+ }
+ }
+ }
+
+ static void
+ gst_rtspsrc_flush (GstRTSPSrc * src, gboolean flush, gboolean playing,
+ guint32 seqnum)
+ {
+ GstEvent *event;
+ gint cmd;
+ GstState state;
+
+ if (flush) {
+ event = gst_event_new_flush_start ();
+ gst_event_set_seqnum (event, seqnum);
+ GST_DEBUG_OBJECT (src, "start flush");
+ cmd = CMD_WAIT;
+ state = GST_STATE_PAUSED;
+ } else {
+ event = gst_event_new_flush_stop (TRUE);
+ gst_event_set_seqnum (event, seqnum);
+ GST_DEBUG_OBJECT (src, "stop flush; playing %d", playing);
+ cmd = CMD_LOOP;
+ if (playing)
+ state = GST_STATE_PLAYING;
+ else
+ state = GST_STATE_PAUSED;
+ }
+ gst_rtspsrc_push_event (src, event);
+ gst_rtspsrc_loop_send_cmd (src, cmd, CMD_LOOP);
+ gst_rtspsrc_set_state (src, state);
+ }
+
+ static GstRTSPResult
+ gst_rtspsrc_connection_send (GstRTSPSrc * src, GstRTSPConnInfo * conninfo,
+ GstRTSPMessage * message, gint64 timeout)
+ {
+ GstRTSPResult ret;
+
+ if (conninfo->connection) {
+ g_mutex_lock (&conninfo->send_lock);
+ ret =
+ gst_rtsp_connection_send_usec (conninfo->connection, message, timeout);
+ g_mutex_unlock (&conninfo->send_lock);
+ } else {
+ ret = GST_RTSP_ERROR;
+ }
+
+ return ret;
+ }
+
+ static GstRTSPResult
+ gst_rtspsrc_connection_receive (GstRTSPSrc * src, GstRTSPConnInfo * conninfo,
+ GstRTSPMessage * message, gint64 timeout)
+ {
+ GstRTSPResult ret;
+
+ if (conninfo->connection) {
+ g_mutex_lock (&conninfo->recv_lock);
+ ret = gst_rtsp_connection_receive_usec (conninfo->connection, message,
+ timeout);
+ g_mutex_unlock (&conninfo->recv_lock);
+ } else {
+ ret = GST_RTSP_ERROR;
+ }
+
+ return ret;
+ }
+
+ static void
+ gst_rtspsrc_get_position (GstRTSPSrc * src)
+ {
+ GstQuery *query;
+ GList *walk;
+
+ query = gst_query_new_position (GST_FORMAT_TIME);
+ /* should be known somewhere down the stream (e.g. jitterbuffer) */
+ for (walk = src->streams; walk; walk = g_list_next (walk)) {
+ GstRTSPStream *stream = (GstRTSPStream *) walk->data;
+ GstFormat fmt;
+ gint64 pos;
+
+ if (stream->srcpad) {
+ if (gst_pad_query (stream->srcpad, query)) {
+ gst_query_parse_position (query, &fmt, &pos);
+ GST_DEBUG_OBJECT (src, "retaining position %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (pos));
+ src->last_pos = pos;
+ goto out;
+ }
+ }
+ }
+
+ src->last_pos = 0;
+
+ out:
+
+ gst_query_unref (query);
+ }
+
+ static gboolean
+ gst_rtspsrc_perform_seek (GstRTSPSrc * src, GstEvent * event)
+ {
+ gdouble rate;
+ GstFormat format;
+ GstSeekFlags flags;
+ GstSeekType cur_type = GST_SEEK_TYPE_NONE, stop_type = GST_SEEK_TYPE_NONE;
+ gint64 cur, stop;
+ gboolean flush, server_side_trickmode;
+ gboolean update;
+ gboolean playing;
+ GstSegment seeksegment = { 0, };
+ GList *walk;
+ const gchar *seek_style = NULL;
+ gboolean rate_change_only = FALSE;
+ gboolean rate_change_same_direction = FALSE;
+
+ GST_DEBUG_OBJECT (src, "doing seek with event %" GST_PTR_FORMAT, event);
+
+ gst_event_parse_seek (event, &rate, &format, &flags,
+ &cur_type, &cur, &stop_type, &stop);
+ rate_change_only = cur_type == GST_SEEK_TYPE_NONE
+ && stop_type == GST_SEEK_TYPE_NONE;
+
+ /* we need TIME format */
+ if (format != src->segment.format)
+ goto no_format;
+
+ /* Check if we are not at all seekable */
+ if (src->seekable == -1.0)
+ goto not_seekable;
+
+ /* Additional seeking-to-beginning-only check */
+ if (src->seekable == 0.0 && cur != 0)
+ goto not_seekable;
+
+ if (flags & GST_SEEK_FLAG_SEGMENT)
+ goto invalid_segment_flag;
+
+ /* get flush flag */
+ flush = flags & GST_SEEK_FLAG_FLUSH;
+ server_side_trickmode = flags & GST_SEEK_FLAG_TRICKMODE;
+
+ gst_event_parse_seek_trickmode_interval (event, &src->trickmode_interval);
+
+ /* now we need to make sure the streaming thread is stopped. We do this by
+ * either sending a FLUSH_START event downstream which will cause the
+ * streaming thread to stop with a WRONG_STATE.
+ * For a non-flushing seek we simply pause the task, which will happen as soon
+ * as it completes one iteration (and thus might block when the sink is
+ * blocking in preroll). */
+ if (flush) {
+ GST_DEBUG_OBJECT (src, "starting flush");
+ gst_rtspsrc_flush (src, TRUE, FALSE, gst_event_get_seqnum (event));
+ } else {
+ if (src->task) {
+ gst_task_pause (src->task);
+ }
+ }
+
+ /* we should now be able to grab the streaming thread because we stopped it
+ * with the above flush/pause code */
+ GST_RTSP_STREAM_LOCK (src);
+
+ GST_DEBUG_OBJECT (src, "stopped streaming");
+
+ /* stop flushing the rtsp connection so we can send PAUSE/PLAY below */
+ gst_rtspsrc_connection_flush (src, FALSE);
+
+ /* copy segment, we need this because we still need the old
+ * segment when we close the current segment. */
+ seeksegment = src->segment;
+
+ /* configure the seek parameters in the seeksegment. We will then have the
+ * right values in the segment to perform the seek */
+ GST_DEBUG_OBJECT (src, "configuring seek");
+ rate_change_same_direction = (rate * seeksegment.rate) > 0;
+ gst_segment_do_seek (&seeksegment, rate, format, flags,
+ cur_type, cur, stop_type, stop, &update);
+
+ /* if we were playing, pause first */
+ playing = (src->state == GST_RTSP_STATE_PLAYING);
+ if (playing) {
+ /* obtain current position in case seek fails */
+ gst_rtspsrc_get_position (src);
+ gst_rtspsrc_pause (src, FALSE);
+ }
+ src->server_side_trickmode = server_side_trickmode;
+
+ src->state = GST_RTSP_STATE_SEEKING;
+
+ /* PLAY will add the range header now. */
+ src->need_range = TRUE;
+
+ /* If an accurate seek was requested, we want to clip the segment we
+ * output in ONVIF mode to the requested bounds */
+ src->clip_out_segment = ! !(flags & GST_SEEK_FLAG_ACCURATE);
+ src->seek_seqnum = gst_event_get_seqnum (event);
+
+ /* prepare for streaming again */
+ if (flush) {
+ /* if we started flush, we stop now */
+ GST_DEBUG_OBJECT (src, "stopping flush");
+ gst_rtspsrc_flush (src, FALSE, playing, gst_event_get_seqnum (event));
+ }
+
+ /* now we did the seek and can activate the new segment values */
+ src->segment = seeksegment;
+
+ /* if we're doing a segment seek, post a SEGMENT_START message */
+ if (src->segment.flags & GST_SEEK_FLAG_SEGMENT) {
+ gst_element_post_message (GST_ELEMENT_CAST (src),
+ gst_message_new_segment_start (GST_OBJECT_CAST (src),
+ src->segment.format, src->segment.position));
+ }
+
+ /* mark discont when needed */
+ if (!(rate_change_only && rate_change_same_direction)) {
+ GST_DEBUG_OBJECT (src, "mark DISCONT, we did a seek to another position");
+ for (walk = src->streams; walk; walk = g_list_next (walk)) {
+ GstRTSPStream *stream = (GstRTSPStream *) walk->data;
+ stream->discont = TRUE;
+ }
+ }
+
+ /* and continue playing if needed. If we are not acting as a live source,
+ * then only the RTSP PLAYING state, set earlier, matters. */
+ GST_OBJECT_LOCK (src);
+ if (src->is_live) {
+ playing = (GST_STATE_PENDING (src) == GST_STATE_VOID_PENDING
+ && GST_STATE (src) == GST_STATE_PLAYING)
+ || (GST_STATE_PENDING (src) == GST_STATE_PLAYING);
+ }
+ GST_OBJECT_UNLOCK (src);
+
+ if (src->version >= GST_RTSP_VERSION_2_0) {
+ if (flags & GST_SEEK_FLAG_ACCURATE)
+ seek_style = "RAP";
+ else if (flags & GST_SEEK_FLAG_KEY_UNIT)
+ seek_style = "CoRAP";
+ else if (flags & GST_SEEK_FLAG_KEY_UNIT
+ && flags & GST_SEEK_FLAG_SNAP_BEFORE)
+ seek_style = "First-Prior";
+ else if (flags & GST_SEEK_FLAG_KEY_UNIT && flags & GST_SEEK_FLAG_SNAP_AFTER)
+ seek_style = "Next";
+ }
+
+ if (playing)
+ gst_rtspsrc_play (src, &seeksegment, FALSE, seek_style);
+
+ GST_RTSP_STREAM_UNLOCK (src);
+
+ return TRUE;
+
+ /* ERRORS */
+ no_format:
+ {
+ GST_DEBUG_OBJECT (src, "unsupported format given, seek aborted.");
+ return FALSE;
+ }
+ not_seekable:
+ {
+ GST_DEBUG_OBJECT (src, "stream is not seekable");
+ return FALSE;
+ }
+ invalid_segment_flag:
+ {
+ GST_WARNING_OBJECT (src, "Segment seeks not supported");
+ return FALSE;
+ }
+ }
+
+ static gboolean
+ gst_rtspsrc_handle_src_event (GstPad * pad, GstObject * parent,
+ GstEvent * event)
+ {
+ GstRTSPSrc *src;
+ gboolean res = TRUE;
+ gboolean forward;
+
+ src = GST_RTSPSRC_CAST (parent);
+
+ GST_DEBUG_OBJECT (src, "pad %s:%s received event %s",
+ GST_DEBUG_PAD_NAME (pad), GST_EVENT_TYPE_NAME (event));
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_SEEK:
+ {
+ guint32 seqnum = gst_event_get_seqnum (event);
+ if (seqnum == src->seek_seqnum) {
+ GST_LOG_OBJECT (pad, "Drop duplicated SEEK event seqnum %"
+ G_GUINT32_FORMAT, seqnum);
+ } else {
+ res = gst_rtspsrc_perform_seek (src, event);
+ }
+ }
+ forward = FALSE;
+ break;
+ case GST_EVENT_QOS:
+ case GST_EVENT_NAVIGATION:
+ case GST_EVENT_LATENCY:
+ default:
+ forward = TRUE;
+ break;
+ }
+ if (forward) {
+ GstPad *target;
+
+ if ((target = gst_ghost_pad_get_target (GST_GHOST_PAD_CAST (pad)))) {
+ res = gst_pad_send_event (target, event);
+ gst_object_unref (target);
+ } else {
+ gst_event_unref (event);
+ }
+ } else {
+ gst_event_unref (event);
+ }
+
+ return res;
+ }
+
+ static void
+ gst_rtspsrc_stream_start_event_add_group_id (GstRTSPSrc * src, GstEvent * event)
+ {
+ g_mutex_lock (&src->group_lock);
+
+ if (src->group_id == GST_GROUP_ID_INVALID)
+ src->group_id = gst_util_group_id_next ();
+
+ g_mutex_unlock (&src->group_lock);
+
+ gst_event_set_group_id (event, src->group_id);
+ }
+
+ static gboolean
+ gst_rtspsrc_handle_src_sink_event (GstPad * pad, GstObject * parent,
+ GstEvent * event)
+ {
+ GstRTSPStream *stream;
+ GstRTSPSrc *self = GST_RTSPSRC (GST_OBJECT_PARENT (parent));
+
+ stream = gst_pad_get_element_private (pad);
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_STREAM_START:{
+ GChecksum *cs;
+ gchar *uri;
+ gchar *stream_id;
+
+ cs = g_checksum_new (G_CHECKSUM_SHA256);
+ uri = self->conninfo.location;
+ g_checksum_update (cs, (const guchar *) uri, strlen (uri));
+
+ stream_id =
+ g_strdup_printf ("%s/%s", g_checksum_get_string (cs),
+ stream->stream_id);
+
+ g_checksum_free (cs);
+ gst_event_unref (event);
+ event = gst_event_new_stream_start (stream_id);
+ gst_rtspsrc_stream_start_event_add_group_id (self, event);
+ g_free (stream_id);
+ break;
+ }
+ case GST_EVENT_SEGMENT:
+ if (self->seek_seqnum != GST_SEQNUM_INVALID)
+ GST_EVENT_SEQNUM (event) = self->seek_seqnum;
+ break;
+ default:
+ break;
+ }
+
+ return gst_pad_push_event (stream->srcpad, event);
+ }
+
+ /* this is the final event function we receive on the internal source pad when
+ * we deal with TCP connections */
+ static gboolean
+ gst_rtspsrc_handle_internal_src_event (GstPad * pad, GstObject * parent,
+ GstEvent * event)
+ {
+ gboolean res;
+
+ GST_DEBUG_OBJECT (pad, "received event %s", GST_EVENT_TYPE_NAME (event));
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_SEEK:
+ case GST_EVENT_QOS:
+ case GST_EVENT_NAVIGATION:
+ case GST_EVENT_LATENCY:
+ default:
+ gst_event_unref (event);
+ res = TRUE;
+ break;
+ }
+ return res;
+ }
+
+ /* this is the final query function we receive on the internal source pad when
+ * we deal with TCP connections */
+ static gboolean
+ gst_rtspsrc_handle_internal_src_query (GstPad * pad, GstObject * parent,
+ GstQuery * query)
+ {
+ GstRTSPSrc *src;
+ gboolean res = FALSE;
+
+ src = GST_RTSPSRC_CAST (gst_pad_get_element_private (pad));
+
+ GST_DEBUG_OBJECT (src, "pad %s:%s received query %s",
+ GST_DEBUG_PAD_NAME (pad), GST_QUERY_TYPE_NAME (query));
+
+ switch (GST_QUERY_TYPE (query)) {
+ case GST_QUERY_POSITION:
+ {
+ /* no idea */
+ break;
+ }
+ case GST_QUERY_DURATION:
+ {
+ GstFormat format;
+
+ gst_query_parse_duration (query, &format, NULL);
+
+ switch (format) {
+ case GST_FORMAT_TIME:
+ gst_query_set_duration (query, format, src->segment.duration);
+ res = TRUE;
+ break;
+ default:
+ break;
+ }
+ break;
+ }
+ case GST_QUERY_LATENCY:
+ {
+ /* we are live with a min latency of 0 and unlimited max latency, this
+ * result will be updated by the session manager if there is any. */
+ gst_query_set_latency (query, src->is_live, 0, -1);
+ res = TRUE;
+ break;
+ }
+ default:
+ break;
+ }
+
+ return res;
+ }
+
+ /* this query is executed on the ghost source pad exposed on rtspsrc. */
+ static gboolean
+ gst_rtspsrc_handle_src_query (GstPad * pad, GstObject * parent,
+ GstQuery * query)
+ {
+ GstRTSPSrc *src;
+ gboolean res = FALSE;
+
+ src = GST_RTSPSRC_CAST (parent);
+
+ GST_DEBUG_OBJECT (src, "pad %s:%s received query %s",
+ GST_DEBUG_PAD_NAME (pad), GST_QUERY_TYPE_NAME (query));
+
+ switch (GST_QUERY_TYPE (query)) {
+ case GST_QUERY_DURATION:
+ {
+ GstFormat format;
+
+ gst_query_parse_duration (query, &format, NULL);
+
+ switch (format) {
+ case GST_FORMAT_TIME:
+ gst_query_set_duration (query, format, src->segment.duration);
+ res = TRUE;
+ break;
+ default:
+ break;
+ }
+ break;
+ }
+ case GST_QUERY_SEEKING:
+ {
+ GstFormat format;
+
+ gst_query_parse_seeking (query, &format, NULL, NULL, NULL);
+ if (format == GST_FORMAT_TIME) {
+ gboolean seekable = TRUE;
+ GstClockTime start = 0, duration = src->segment.duration;
+
+ /* seeking without duration is unlikely */
+ seekable = seekable && src->seekable >= 0.0 && src->segment.duration &&
+ GST_CLOCK_TIME_IS_VALID (src->segment.duration);
+
+ if (seekable) {
+ if (src->seekable > 0.0) {
+ start = src->last_pos - src->seekable * GST_SECOND;
+ } else {
+ /* src->seekable == 0 means that we can only seek to 0 */
+ start = 0;
+ duration = 0;
+ }
+ }
+
+ GST_LOG_OBJECT (src, "seekable: %d, duration: %" GST_TIME_FORMAT
+ ", src->seekable: %f", seekable,
+ GST_TIME_ARGS (src->segment.duration), src->seekable);
+
+ gst_query_set_seeking (query, GST_FORMAT_TIME, seekable, start,
+ duration);
+ res = TRUE;
+ }
+ break;
+ }
+ case GST_QUERY_URI:
+ {
+ gchar *uri;
+
+ uri = gst_rtspsrc_uri_get_uri (GST_URI_HANDLER (src));
+ if (uri != NULL) {
+ gst_query_set_uri (query, uri);
+ g_free (uri);
+ res = TRUE;
+ }
+ break;
+ }
+ default:
+ {
+ GstPad *target = gst_ghost_pad_get_target (GST_GHOST_PAD_CAST (pad));
+
+ /* forward the query to the proxy target pad */
+ if (target) {
+ res = gst_pad_query (target, query);
+ gst_object_unref (target);
+ }
+ break;
+ }
+ }
+
+ return res;
+ }
+
+ /* callback for RTCP messages to be sent to the server when operating in TCP
+ * mode. */
+ static GstFlowReturn
+ gst_rtspsrc_sink_chain (GstPad * pad, GstObject * parent, GstBuffer * buffer)
+ {
+ GstRTSPSrc *src;
+ GstRTSPStream *stream;
+ GstFlowReturn res = GST_FLOW_OK;
+ GstRTSPResult ret;
+ GstRTSPMessage message = { 0 };
+ GstRTSPConnInfo *conninfo;
+
+ stream = (GstRTSPStream *) gst_pad_get_element_private (pad);
+ src = stream->parent;
+
+ gst_rtsp_message_init_data (&message, stream->channel[1]);
+
+ /* lend the body data to the message */
+ gst_rtsp_message_set_body_buffer (&message, buffer);
+
+ if (stream->conninfo.connection)
+ conninfo = &stream->conninfo;
+ else
+ conninfo = &src->conninfo;
+
+ GST_DEBUG_OBJECT (src, "sending %u bytes RTCP",
+ (guint) gst_buffer_get_size (buffer));
+ ret = gst_rtspsrc_connection_send (src, conninfo, &message, 0);
+ GST_DEBUG_OBJECT (src, "sent RTCP, %d", ret);
+
+ gst_rtsp_message_unset (&message);
+
+ gst_buffer_unref (buffer);
+
+ return res;
+ }
+
+ static GstFlowReturn
+ gst_rtspsrc_push_backchannel_buffer (GstRTSPSrc * src, guint id,
+ GstSample * sample)
+ {
+ GstFlowReturn res = GST_FLOW_OK;
+ GstRTSPStream *stream;
+
+ if (!src->conninfo.connected || src->state != GST_RTSP_STATE_PLAYING)
+ goto out;
+
+ stream = find_stream (src, &id, (gpointer) find_stream_by_id);
+ if (stream == NULL) {
+ GST_ERROR_OBJECT (src, "no stream with id %u", id);
+ goto out;
+ }
+
+ if (src->interleaved) {
+ GstBuffer *buffer;
+ GstRTSPResult ret;
+ GstRTSPMessage message = { 0 };
+ GstRTSPConnInfo *conninfo;
+
+ buffer = gst_sample_get_buffer (sample);
+
+ gst_rtsp_message_init_data (&message, stream->channel[0]);
+
+ /* lend the body data to the message */
+ gst_rtsp_message_set_body_buffer (&message, buffer);
+
+ if (stream->conninfo.connection)
+ conninfo = &stream->conninfo;
+ else
+ conninfo = &src->conninfo;
+
+ GST_DEBUG_OBJECT (src, "sending %u bytes backchannel RTP",
+ (guint) gst_buffer_get_size (buffer));
+ ret = gst_rtspsrc_connection_send (src, conninfo, &message, 0);
+ GST_DEBUG_OBJECT (src, "sent backchannel RTP, %d", ret);
+
+ gst_rtsp_message_unset (&message);
+
+ res = GST_FLOW_OK;
+ } else {
+ g_signal_emit_by_name (stream->rtpsrc, "push-sample", sample, &res);
+ GST_DEBUG_OBJECT (src, "sent backchannel RTP sample %p: %s", sample,
+ gst_flow_get_name (res));
+ }
+
+ out:
+ gst_sample_unref (sample);
+
+ return res;
+ }
+
+ static GstPadProbeReturn
+ pad_blocked (GstPad * pad, GstPadProbeInfo * info, gpointer user_data)
+ {
+ GstRTSPSrc *src = user_data;
+
+ GST_DEBUG_OBJECT (src, "pad %s:%s blocked, activating streams",
+ GST_DEBUG_PAD_NAME (pad));
+
+ /* activate the streams */
+ GST_OBJECT_LOCK (src);
+ if (!src->need_activate)
+ goto was_ok;
+
+ src->need_activate = FALSE;
+ GST_OBJECT_UNLOCK (src);
+
+ gst_rtspsrc_activate_streams (src);
+
+ return GST_PAD_PROBE_OK;
+
+ was_ok:
+ {
+ GST_OBJECT_UNLOCK (src);
+ return GST_PAD_PROBE_OK;
+ }
+ }
+
+ static GstPadProbeReturn
+ udpsrc_probe_cb (GstPad * pad, GstPadProbeInfo * info, gpointer user_data)
+ {
+ guint32 *segment_seqnum = user_data;
+
+ switch (GST_EVENT_TYPE (info->data)) {
+ case GST_EVENT_SEGMENT:
+ if (!gst_event_is_writable (info->data))
+ info->data = gst_event_make_writable (info->data);
+
+ *segment_seqnum = gst_event_get_seqnum (info->data);
+ default:
+ break;
+ }
+
+ return GST_PAD_PROBE_OK;
+ }
+
+ static gboolean
+ copy_sticky_events (GstPad * pad, GstEvent ** event, gpointer user_data)
+ {
+ GstPad *gpad = GST_PAD_CAST (user_data);
+
+ GST_DEBUG_OBJECT (gpad, "store sticky event %" GST_PTR_FORMAT, *event);
+ gst_pad_store_sticky_event (gpad, *event);
+
+ return TRUE;
+ }
+
+ static gboolean
+ add_backchannel_fakesink (GstRTSPSrc * src, GstRTSPStream * stream,
+ GstPad * srcpad)
+ {
+ GstPad *sinkpad;
+ GstElement *fakesink;
+
+ fakesink = gst_element_factory_make ("fakesink", NULL);
+ if (fakesink == NULL) {
+ GST_ERROR_OBJECT (src, "no fakesink");
+ return FALSE;
+ }
+
+ sinkpad = gst_element_get_static_pad (fakesink, "sink");
+
+ GST_DEBUG_OBJECT (src, "backchannel stream %p, hooking fakesink", stream);
+
+ gst_bin_add (GST_BIN_CAST (src), fakesink);
+ if (gst_pad_link (srcpad, sinkpad) != GST_PAD_LINK_OK) {
+ GST_WARNING_OBJECT (src, "could not link to fakesink");
+ return FALSE;
+ }
+
+ gst_object_unref (sinkpad);
+
+ gst_element_sync_state_with_parent (fakesink);
+ return TRUE;
+ }
+
+ /* this callback is called when the session manager generated a new src pad with
+ * payloaded RTP packets. We simply ghost the pad here. */
+ static void
+ new_manager_pad (GstElement * manager, GstPad * pad, GstRTSPSrc * src)
+ {
+ gchar *name;
+ GstPadTemplate *template;
+ gint id, ssrc, pt;
+ GList *ostreams;
+ GstRTSPStream *stream;
+ gboolean all_added;
+ GstPad *internal_src;
+
+ GST_DEBUG_OBJECT (src, "got new manager pad %" GST_PTR_FORMAT, pad);
+
+ GST_RTSP_STATE_LOCK (src);
+ /* find stream */
+ name = gst_object_get_name (GST_OBJECT_CAST (pad));
+ if (sscanf (name, "recv_rtp_src_%u_%u_%u", &id, &ssrc, &pt) != 3)
+ goto unknown_stream;
+
+ GST_DEBUG_OBJECT (src, "stream: %u, SSRC %08x, PT %d", id, ssrc, pt);
+
+ stream = find_stream (src, &id, (gpointer) find_stream_by_id);
+ if (stream == NULL)
+ goto unknown_stream;
+
+ /* save SSRC */
+ stream->ssrc = ssrc;
+
+ /* we'll add it later see below */
+ stream->added = TRUE;
+
+ /* check if we added all streams */
+ all_added = TRUE;
+ for (ostreams = src->streams; ostreams; ostreams = g_list_next (ostreams)) {
+ GstRTSPStream *ostream = (GstRTSPStream *) ostreams->data;
+
+ GST_DEBUG_OBJECT (src, "stream %p, container %d, added %d, setup %d",
+ ostream, ostream->container, ostream->added, ostream->setup);
+
+ /* if we find a stream for which we did a setup that is not added, we
+ * need to wait some more */
+ if (ostream->setup && !ostream->added) {
+ all_added = FALSE;
+ break;
+ }
+ }
+ GST_RTSP_STATE_UNLOCK (src);
+
+ /* create a new pad we will use to stream to */
+ template = gst_static_pad_template_get (&rtptemplate);
+ stream->srcpad = gst_ghost_pad_new_from_template (name, pad, template);
+ gst_object_unref (template);
+ g_free (name);
+
+ /* We intercept and modify the stream start event */
+ internal_src =
+ GST_PAD (gst_proxy_pad_get_internal (GST_PROXY_PAD (stream->srcpad)));
+ gst_pad_set_element_private (internal_src, stream);
+ gst_pad_set_event_function (internal_src, gst_rtspsrc_handle_src_sink_event);
+ gst_object_unref (internal_src);
+
+ gst_pad_set_event_function (stream->srcpad, gst_rtspsrc_handle_src_event);
+ gst_pad_set_query_function (stream->srcpad, gst_rtspsrc_handle_src_query);
+ gst_pad_set_active (stream->srcpad, TRUE);
+ gst_pad_sticky_events_foreach (pad, copy_sticky_events, stream->srcpad);
+
+ /* don't add the srcpad if this is a sendonly stream */
+ if (stream->is_backchannel)
+ add_backchannel_fakesink (src, stream, stream->srcpad);
+ else
+ gst_element_add_pad (GST_ELEMENT_CAST (src), stream->srcpad);
+
+ if (all_added) {
+ GST_DEBUG_OBJECT (src, "We added all streams");
+ /* when we get here, all stream are added and we can fire the no-more-pads
+ * signal. */
+ gst_element_no_more_pads (GST_ELEMENT_CAST (src));
+ }
+
+ return;
+
+ /* ERRORS */
+ unknown_stream:
+ {
+ GST_DEBUG_OBJECT (src, "ignoring unknown stream");
+ GST_RTSP_STATE_UNLOCK (src);
+ g_free (name);
+ return;
+ }
+ }
+
+ static GstCaps *
+ stream_get_caps_for_pt (GstRTSPStream * stream, guint pt)
+ {
+ guint i, len;
+
+ len = stream->ptmap->len;
+ for (i = 0; i < len; i++) {
+ PtMapItem *item = &g_array_index (stream->ptmap, PtMapItem, i);
+ if (item->pt == pt)
+ return item->caps;
+ }
+ return NULL;
+ }
+
+ static GstCaps *
+ request_pt_map (GstElement * manager, guint session, guint pt, GstRTSPSrc * src)
+ {
+ GstRTSPStream *stream;
+ GstCaps *caps;
+
+ GST_DEBUG_OBJECT (src, "getting pt map for pt %d in session %d", pt, session);
+
+ GST_RTSP_STATE_LOCK (src);
+ stream = find_stream (src, &session, (gpointer) find_stream_by_id);
+ if (!stream)
+ goto unknown_stream;
+
+ if ((caps = stream_get_caps_for_pt (stream, pt)))
+ gst_caps_ref (caps);
+ GST_RTSP_STATE_UNLOCK (src);
+
+ return caps;
+
+ unknown_stream:
+ {
+ GST_DEBUG_OBJECT (src, "unknown stream %d", session);
+ GST_RTSP_STATE_UNLOCK (src);
+ return NULL;
+ }
+ }
+
+ static void
+ gst_rtspsrc_do_stream_eos (GstRTSPSrc * src, GstRTSPStream * stream)
+ {
+ GST_DEBUG_OBJECT (src, "setting stream for session %u to EOS", stream->id);
+
+ if (stream->eos)
+ goto was_eos;
+
+ stream->eos = TRUE;
+ gst_rtspsrc_stream_push_event (src, stream, gst_event_new_eos ());
+ return;
+
+ /* ERRORS */
+ was_eos:
+ {
+ GST_DEBUG_OBJECT (src, "stream for session %u was already EOS", stream->id);
+ return;
+ }
+ }
+
+ static void
+ on_bye_ssrc (GObject * session, GObject * source, GstRTSPStream * stream)
+ {
+ GstRTSPSrc *src = stream->parent;
+ guint ssrc;
+
+ g_object_get (source, "ssrc", &ssrc, NULL);
+
+ GST_DEBUG_OBJECT (src, "source %08x, stream %08x, session %u received BYE",
+ ssrc, stream->ssrc, stream->id);
+
+ if (ssrc == stream->ssrc)
+ gst_rtspsrc_do_stream_eos (src, stream);
+ }
+
+ static void
+ on_timeout_common (GObject * session, GObject * source, GstRTSPStream * stream)
+ {
+ GstRTSPSrc *src = stream->parent;
+ guint ssrc;
+
+ g_object_get (source, "ssrc", &ssrc, NULL);
+
+ GST_WARNING_OBJECT (src, "source %08x, stream %08x in session %u timed out",
+ ssrc, stream->ssrc, stream->id);
+
+ if (ssrc == stream->ssrc)
+ gst_rtspsrc_do_stream_eos (src, stream);
+ }
+
+ static void
+ on_timeout (GObject * session, GObject * source, GstRTSPStream * stream)
+ {
+ GstRTSPSrc *src = stream->parent;
+
+ /* timeout, post element message */
+ gst_element_post_message (GST_ELEMENT_CAST (src),
+ gst_message_new_element (GST_OBJECT_CAST (src),
+ gst_structure_new ("GstRTSPSrcTimeout",
+ "cause", G_TYPE_ENUM, GST_RTSP_SRC_TIMEOUT_CAUSE_RTCP,
+ "stream-number", G_TYPE_INT, stream->id, "ssrc", G_TYPE_UINT,
+ stream->ssrc, NULL)));
+
+ /* In non-live mode, timeouts can occur if we are PAUSED, this doesn't mean
+ * the stream is EOS, it may simply be blocked */
+ if (src->is_live || !src->interleaved)
+ on_timeout_common (session, source, stream);
+ }
+
+ static void
+ on_npt_stop (GstElement * rtpbin, guint session, guint ssrc, GstRTSPSrc * src)
+ {
+ GstRTSPStream *stream;
+
+ GST_DEBUG_OBJECT (src, "source in session %u reached NPT stop", session);
+
+ /* get stream for session */
+ stream = find_stream (src, &session, (gpointer) find_stream_by_id);
+ if (stream) {
+ gst_rtspsrc_do_stream_eos (src, stream);
+ }
+ }
+
+ static void
+ on_ssrc_active (GObject * session, GObject * source, GstRTSPStream * stream)
+ {
+ GST_DEBUG_OBJECT (stream->parent, "source in session %u is active",
+ stream->id);
+ }
+
+ static void
+ set_manager_buffer_mode (GstRTSPSrc * src)
+ {
+ GObjectClass *klass;
+
+ if (src->manager == NULL)
+ return;
+
+ klass = G_OBJECT_GET_CLASS (G_OBJECT (src->manager));
+
+ if (!g_object_class_find_property (klass, "buffer-mode"))
+ return;
+
+ if (src->buffer_mode != BUFFER_MODE_AUTO) {
+ g_object_set (src->manager, "buffer-mode", src->buffer_mode, NULL);
+
+ return;
+ }
+
+ GST_DEBUG_OBJECT (src,
+ "auto buffering mode, have clock %" GST_PTR_FORMAT, src->provided_clock);
+
+ if (src->provided_clock) {
+ GstClock *clock = gst_element_get_clock (GST_ELEMENT_CAST (src));
+
+ if (clock == src->provided_clock) {
+ GST_DEBUG_OBJECT (src, "selected synced");
+ g_object_set (src->manager, "buffer-mode", BUFFER_MODE_SYNCED, NULL);
+
+ if (clock)
+ gst_object_unref (clock);
+
+ return;
+ }
+
+ /* Otherwise fall-through and use another buffer mode */
+ if (clock)
+ gst_object_unref (clock);
+ }
+
+ GST_DEBUG_OBJECT (src, "auto buffering mode");
+ if (src->use_buffering) {
+ GST_DEBUG_OBJECT (src, "selected buffer");
+ g_object_set (src->manager, "buffer-mode", BUFFER_MODE_BUFFER, NULL);
+ } else {
+ GST_DEBUG_OBJECT (src, "selected slave");
+ g_object_set (src->manager, "buffer-mode", BUFFER_MODE_SLAVE, NULL);
+ }
+ }
+
+ static GstCaps *
+ request_key (GstElement * srtpdec, guint ssrc, GstRTSPStream * stream)
+ {
+ guint i;
+ GstCaps *caps;
+ GstMIKEYMessage *msg = stream->mikey;
+
+ GST_DEBUG ("request key SSRC %u", ssrc);
+
+ caps = gst_caps_ref (stream_get_caps_for_pt (stream, stream->default_pt));
+ caps = gst_caps_make_writable (caps);
+
+ /* parse crypto sessions and look for the SSRC rollover counter */
+ msg = stream->mikey;
+ for (i = 0; msg && i < gst_mikey_message_get_n_cs (msg); i++) {
+ const GstMIKEYMapSRTP *map = gst_mikey_message_get_cs_srtp (msg, i);
+
+ if (ssrc == map->ssrc) {
+ gst_caps_set_simple (caps, "roc", G_TYPE_UINT, map->roc, NULL);
+ break;
+ }
+ }
+
+ return caps;
+ }
+
+ static GstElement *
+ request_rtp_decoder (GstElement * rtpbin, guint session, GstRTSPStream * stream)
+ {
+ GST_DEBUG ("decoder session %u, stream %p, %d", session, stream, stream->id);
+ if (stream->id != session)
+ return NULL;
+
+ if (stream->profile != GST_RTSP_PROFILE_SAVP &&
+ stream->profile != GST_RTSP_PROFILE_SAVPF)
+ return NULL;
+
+ if (stream->srtpdec == NULL) {
+ gchar *name;
+
+ name = g_strdup_printf ("srtpdec_%u", session);
+ stream->srtpdec = gst_element_factory_make ("srtpdec", name);
+ g_free (name);
+
+ if (stream->srtpdec == NULL) {
+ GST_ELEMENT_ERROR (stream->parent, CORE, MISSING_PLUGIN, (NULL),
+ ("no srtpdec element present!"));
+ return NULL;
+ }
+ g_signal_connect (stream->srtpdec, "request-key",
+ (GCallback) request_key, stream);
+ }
+ return gst_object_ref (stream->srtpdec);
+ }
+
+ static GstElement *
+ request_rtcp_encoder (GstElement * rtpbin, guint session,
+ GstRTSPStream * stream)
+ {
+ gchar *name;
+ GstPad *pad;
+
+ GST_DEBUG ("decoder session %u, stream %p, %d", session, stream, stream->id);
+ if (stream->id != session)
+ return NULL;
+
+ if (stream->profile != GST_RTSP_PROFILE_SAVP &&
+ stream->profile != GST_RTSP_PROFILE_SAVPF)
+ return NULL;
+
+ if (stream->srtpenc == NULL) {
+ GstStructure *s;
+
+ name = g_strdup_printf ("srtpenc_%u", session);
+ stream->srtpenc = gst_element_factory_make ("srtpenc", name);
+ g_free (name);
+
+ if (stream->srtpenc == NULL) {
+ GST_ELEMENT_ERROR (stream->parent, CORE, MISSING_PLUGIN, (NULL),
+ ("no srtpenc element present!"));
+ return NULL;
+ }
+
+ /* get RTCP crypto parameters from caps */
+ s = gst_caps_get_structure (stream->srtcpparams, 0);
+ if (s) {
+ GstBuffer *buf;
+ const gchar *str;
+ GType ciphertype, authtype;
+ GValue rtcp_cipher = G_VALUE_INIT, rtcp_auth = G_VALUE_INIT;
+
+ ciphertype = g_type_from_name ("GstSrtpCipherType");
+ authtype = g_type_from_name ("GstSrtpAuthType");
+ g_value_init (&rtcp_cipher, ciphertype);
+ g_value_init (&rtcp_auth, authtype);
+
+ str = gst_structure_get_string (s, "srtcp-cipher");
+ gst_value_deserialize (&rtcp_cipher, str);
+ str = gst_structure_get_string (s, "srtcp-auth");
+ gst_value_deserialize (&rtcp_auth, str);
+ gst_structure_get (s, "srtp-key", GST_TYPE_BUFFER, &buf, NULL);
+
+ g_object_set_property (G_OBJECT (stream->srtpenc), "rtp-cipher",
+ &rtcp_cipher);
+ g_object_set_property (G_OBJECT (stream->srtpenc), "rtp-auth",
+ &rtcp_auth);
+ g_object_set_property (G_OBJECT (stream->srtpenc), "rtcp-cipher",
+ &rtcp_cipher);
+ g_object_set_property (G_OBJECT (stream->srtpenc), "rtcp-auth",
+ &rtcp_auth);
+ g_object_set (stream->srtpenc, "key", buf, NULL);
+
+ g_value_unset (&rtcp_cipher);
+ g_value_unset (&rtcp_auth);
+ gst_buffer_unref (buf);
+ }
+ }
+ name = g_strdup_printf ("rtcp_sink_%d", session);
+ pad = gst_element_request_pad_simple (stream->srtpenc, name);
+ g_free (name);
+ gst_object_unref (pad);
+
+ return gst_object_ref (stream->srtpenc);
+ }
+
+ static GstElement *
+ request_aux_receiver (GstElement * rtpbin, guint sessid, GstRTSPSrc * src)
+ {
+ GstElement *rtx, *bin;
+ GstPad *pad;
+ gchar *name;
+ GstRTSPStream *stream;
+
+ stream = find_stream (src, &sessid, (gpointer) find_stream_by_id);
+ if (!stream) {
+ GST_WARNING_OBJECT (src, "Stream %u not found", sessid);
+ return NULL;
+ }
+
+ GST_INFO_OBJECT (src, "creating retransmision receiver for session %u "
+ "with map %" GST_PTR_FORMAT, sessid, stream->rtx_pt_map);
+ bin = gst_bin_new (NULL);
+ rtx = gst_element_factory_make ("rtprtxreceive", NULL);
+ g_object_set (rtx, "payload-type-map", stream->rtx_pt_map, NULL);
+ gst_bin_add (GST_BIN (bin), rtx);
+
+ pad = gst_element_get_static_pad (rtx, "src");
+ name = g_strdup_printf ("src_%u", sessid);
+ gst_element_add_pad (bin, gst_ghost_pad_new (name, pad));
+ g_free (name);
+ gst_object_unref (pad);
+
+ pad = gst_element_get_static_pad (rtx, "sink");
+ name = g_strdup_printf ("sink_%u", sessid);
+ gst_element_add_pad (bin, gst_ghost_pad_new (name, pad));
+ g_free (name);
+ gst_object_unref (pad);
+
+ return bin;
+ }
+
+ static void
+ add_retransmission (GstRTSPSrc * src, GstRTSPTransport * transport)
+ {
+ GList *walk;
+ guint signal_id;
+ gboolean do_retransmission = FALSE;
+
+ if (transport->trans != GST_RTSP_TRANS_RTP)
+ return;
+ if (transport->profile != GST_RTSP_PROFILE_AVPF &&
+ transport->profile != GST_RTSP_PROFILE_SAVPF)
+ return;
+
+ signal_id = g_signal_lookup ("request-aux-receiver",
+ G_OBJECT_TYPE (src->manager));
+ /* there's already something connected */
+ if (g_signal_handler_find (src->manager, G_SIGNAL_MATCH_ID, signal_id, 0,
+ NULL, NULL, NULL) != 0) {
+ GST_DEBUG_OBJECT (src, "Not adding RTX AUX element as "
+ "\"request-aux-receiver\" signal is "
+ "already used by the application");
+ return;
+ }
+
+ /* build the retransmission payload type map */
+ for (walk = src->streams; walk; walk = g_list_next (walk)) {
+ GstRTSPStream *stream = (GstRTSPStream *) walk->data;
+ gboolean do_retransmission_stream = FALSE;
+ int i;
+
+ if (stream->rtx_pt_map)
+ gst_structure_free (stream->rtx_pt_map);
+ stream->rtx_pt_map = gst_structure_new_empty ("application/x-rtp-pt-map");
+
+ for (i = 0; i < stream->ptmap->len; i++) {
+ PtMapItem *item = &g_array_index (stream->ptmap, PtMapItem, i);
+ GstStructure *s = gst_caps_get_structure (item->caps, 0);
+ const gchar *encoding;
+
+ /* we only care about RTX streams */
+ if ((encoding = gst_structure_get_string (s, "encoding-name"))
+ && g_strcmp0 (encoding, "RTX") == 0) {
+ const gchar *stream_pt_s;
+ gint rtx_pt;
+
+ if (gst_structure_get_int (s, "payload", &rtx_pt)
+ && (stream_pt_s = gst_structure_get_string (s, "apt"))) {
+
+ if (rtx_pt != 0) {
+ gst_structure_set (stream->rtx_pt_map, stream_pt_s, G_TYPE_UINT,
+ rtx_pt, NULL);
+ do_retransmission_stream = TRUE;
+ }
+ }
+ }
+ }
+
+ if (do_retransmission_stream) {
+ GST_DEBUG_OBJECT (src, "built retransmission payload map for stream "
+ "id %i: %" GST_PTR_FORMAT, stream->id, stream->rtx_pt_map);
+ do_retransmission = TRUE;
+ } else {
+ GST_DEBUG_OBJECT (src, "no retransmission payload map for stream "
+ "id %i", stream->id);
+ gst_structure_free (stream->rtx_pt_map);
+ stream->rtx_pt_map = NULL;
+ }
+ }
+
+ if (do_retransmission) {
+ GST_DEBUG_OBJECT (src, "Enabling retransmissions");
+
+ g_object_set (src->manager, "do-retransmission", TRUE, NULL);
+
+ /* enable RFC4588 retransmission handling by setting rtprtxreceive
+ * as the "aux" element of rtpbin */
+ g_signal_connect (src->manager, "request-aux-receiver",
+ (GCallback) request_aux_receiver, src);
+ } else {
+ GST_DEBUG_OBJECT (src,
+ "Not enabling retransmissions as no stream had a retransmission payload map");
+ }
+ }
+
+ /* try to get and configure a manager */
+ static gboolean
+ gst_rtspsrc_stream_configure_manager (GstRTSPSrc * src, GstRTSPStream * stream,
+ GstRTSPTransport * transport)
+ {
+ const gchar *manager;
+ gchar *name;
+ GstStateChangeReturn ret;
+
+ if (!src->is_live)
+ goto use_no_manager;
+
+ /* find a manager */
+ if (gst_rtsp_transport_get_manager (transport->trans, &manager, 0) < 0)
+ goto no_manager;
+
+ if (manager) {
+ GST_DEBUG_OBJECT (src, "using manager %s", manager);
+
+ /* configure the manager */
+ if (src->manager == NULL) {
+ GObjectClass *klass;
+
+ if (!(src->manager = gst_element_factory_make (manager, "manager"))) {
+ /* fallback */
+ if (gst_rtsp_transport_get_manager (transport->trans, &manager, 1) < 0)
+ goto no_manager;
+
+ if (!manager)
+ goto use_no_manager;
+
+ if (!(src->manager = gst_element_factory_make (manager, "manager")))
+ goto manager_failed;
+ }
++#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
++ if (g_strcmp0 (manager, "rtpbin") == 0) {
++ /* set for player rtsp buffering */
++ g_object_set (src->manager, "use-rtsp-buffering", TRUE, NULL);
++ }
++#endif
+
+ /* we manage this element */
+ gst_element_set_locked_state (src->manager, TRUE);
+ gst_bin_add (GST_BIN_CAST (src), src->manager);
+
+ ret = gst_element_set_state (src->manager, GST_STATE_PAUSED);
+ if (ret == GST_STATE_CHANGE_FAILURE)
+ goto start_manager_failure;
+
+ g_object_set (src->manager, "latency", src->latency, NULL);
+
+ klass = G_OBJECT_GET_CLASS (G_OBJECT (src->manager));
+
+ if (g_object_class_find_property (klass, "ntp-sync")) {
+ g_object_set (src->manager, "ntp-sync", src->ntp_sync, NULL);
+ }
+
+ if (g_object_class_find_property (klass, "rfc7273-sync")) {
+ g_object_set (src->manager, "rfc7273-sync", src->rfc7273_sync, NULL);
+ }
+
+ if (src->use_pipeline_clock) {
+ if (g_object_class_find_property (klass, "use-pipeline-clock")) {
+ g_object_set (src->manager, "use-pipeline-clock", TRUE, NULL);
+ }
+ } else {
+ if (g_object_class_find_property (klass, "ntp-time-source")) {
+ g_object_set (src->manager, "ntp-time-source", src->ntp_time_source,
+ NULL);
+ }
+ }
+
+ if (src->sdes && g_object_class_find_property (klass, "sdes")) {
+ g_object_set (src->manager, "sdes", src->sdes, NULL);
+ }
+
+ if (g_object_class_find_property (klass, "drop-on-latency")) {
+ g_object_set (src->manager, "drop-on-latency", src->drop_on_latency,
+ NULL);
+ }
+
+ if (g_object_class_find_property (klass, "max-rtcp-rtp-time-diff")) {
+ g_object_set (src->manager, "max-rtcp-rtp-time-diff",
+ src->max_rtcp_rtp_time_diff, NULL);
+ }
+
+ if (g_object_class_find_property (klass, "max-ts-offset-adjustment")) {
+ g_object_set (src->manager, "max-ts-offset-adjustment",
+ src->max_ts_offset_adjustment, NULL);
+ }
+
+ if (g_object_class_find_property (klass, "max-ts-offset")) {
+ gint64 max_ts_offset;
+
+ /* setting max-ts-offset in the manager has side effects so only do it
+ * if the value differs */
+ g_object_get (src->manager, "max-ts-offset", &max_ts_offset, NULL);
+ if (max_ts_offset != src->max_ts_offset) {
+ g_object_set (src->manager, "max-ts-offset", src->max_ts_offset,
+ NULL);
+ }
+ }
+
+ /* buffer mode pauses are handled by adding offsets to buffer times,
+ * but some depayloaders may have a hard time syncing output times
+ * with such input times, e.g. container ones, most notably ASF */
+ /* TODO alternatives are having an event that indicates these shifts,
+ * or having rtsp extensions provide suggestion on buffer mode */
+ /* valid duration implies not likely live pipeline,
+ * so slaving in jitterbuffer does not make much sense
+ * (and might mess things up due to bursts) */
+ if (GST_CLOCK_TIME_IS_VALID (src->segment.duration) &&
+ src->segment.duration && stream->container) {
+ src->use_buffering = TRUE;
+ } else {
+ src->use_buffering = FALSE;
+ }
+
+ set_manager_buffer_mode (src);
+
+ /* connect to signals */
+ GST_DEBUG_OBJECT (src, "connect to signals on session manager, stream %p",
+ stream);
+ src->manager_sig_id =
+ g_signal_connect (src->manager, "pad-added",
+ (GCallback) new_manager_pad, src);
+ src->manager_ptmap_id =
+ g_signal_connect (src->manager, "request-pt-map",
+ (GCallback) request_pt_map, src);
+
+ g_signal_connect (src->manager, "on-npt-stop", (GCallback) on_npt_stop,
+ src);
+
+ g_signal_emit (src, gst_rtspsrc_signals[SIGNAL_NEW_MANAGER], 0,
+ src->manager);
+
+ if (src->do_retransmission)
+ add_retransmission (src, transport);
+ }
+ g_signal_connect (src->manager, "request-rtp-decoder",
+ (GCallback) request_rtp_decoder, stream);
+ g_signal_connect (src->manager, "request-rtcp-decoder",
+ (GCallback) request_rtp_decoder, stream);
+ g_signal_connect (src->manager, "request-rtcp-encoder",
+ (GCallback) request_rtcp_encoder, stream);
+
+ /* we stream directly to the manager, get some pads. Each RTSP stream goes
+ * into a separate RTP session. */
+ name = g_strdup_printf ("recv_rtp_sink_%u", stream->id);
+ stream->channelpad[0] = gst_element_request_pad_simple (src->manager, name);
+ g_free (name);
+ name = g_strdup_printf ("recv_rtcp_sink_%u", stream->id);
+ stream->channelpad[1] = gst_element_request_pad_simple (src->manager, name);
+ g_free (name);
+
+ /* now configure the bandwidth in the manager */
+ if (g_signal_lookup ("get-internal-session",
+ G_OBJECT_TYPE (src->manager)) != 0) {
+ GObject *rtpsession;
+
+ g_signal_emit_by_name (src->manager, "get-internal-session", stream->id,
+ &rtpsession);
+ if (rtpsession) {
+ GstRTPProfile rtp_profile;
+
+ GST_INFO_OBJECT (src, "configure bandwidth in session %p", rtpsession);
+
+ stream->session = rtpsession;
+
+ if (stream->as_bandwidth != -1) {
+ GST_INFO_OBJECT (src, "setting AS: %f",
+ (gdouble) (stream->as_bandwidth * 1000));
+ g_object_set (rtpsession, "bandwidth",
+ (gdouble) (stream->as_bandwidth * 1000), NULL);
+ }
+ if (stream->rr_bandwidth != -1) {
+ GST_INFO_OBJECT (src, "setting RR: %u", stream->rr_bandwidth);
+ g_object_set (rtpsession, "rtcp-rr-bandwidth", stream->rr_bandwidth,
+ NULL);
+ }
+ if (stream->rs_bandwidth != -1) {
+ GST_INFO_OBJECT (src, "setting RS: %u", stream->rs_bandwidth);
+ g_object_set (rtpsession, "rtcp-rs-bandwidth", stream->rs_bandwidth,
+ NULL);
+ }
+
+ switch (stream->profile) {
+ case GST_RTSP_PROFILE_AVPF:
+ rtp_profile = GST_RTP_PROFILE_AVPF;
+ break;
+ case GST_RTSP_PROFILE_SAVP:
+ rtp_profile = GST_RTP_PROFILE_SAVP;
+ break;
+ case GST_RTSP_PROFILE_SAVPF:
+ rtp_profile = GST_RTP_PROFILE_SAVPF;
+ break;
+ case GST_RTSP_PROFILE_AVP:
+ default:
+ rtp_profile = GST_RTP_PROFILE_AVP;
+ break;
+ }
+
+ g_object_set (rtpsession, "rtp-profile", rtp_profile, NULL);
+
+ g_object_set (rtpsession, "probation", src->probation, NULL);
+
+ g_object_set (rtpsession, "internal-ssrc", stream->send_ssrc, NULL);
+
+ g_signal_connect (rtpsession, "on-bye-ssrc", (GCallback) on_bye_ssrc,
+ stream);
+ g_signal_connect (rtpsession, "on-bye-timeout",
+ (GCallback) on_timeout_common, stream);
+ g_signal_connect (rtpsession, "on-timeout", (GCallback) on_timeout,
+ stream);
+ g_signal_connect (rtpsession, "on-ssrc-active",
+ (GCallback) on_ssrc_active, stream);
+ }
+ }
+ }
+
+ use_no_manager:
+ return TRUE;
+
+ /* ERRORS */
+ no_manager:
+ {
+ GST_DEBUG_OBJECT (src, "cannot get a session manager");
+ return FALSE;
+ }
+ manager_failed:
+ {
+ GST_DEBUG_OBJECT (src, "no session manager element %s found", manager);
+ return FALSE;
+ }
+ start_manager_failure:
+ {
+ GST_DEBUG_OBJECT (src, "could not start session manager");
+ return FALSE;
+ }
+ }
+
+ /* free the UDP sources allocated when negotiating a transport.
+ * This function is called when the server negotiated to a transport where the
+ * UDP sources are not needed anymore, such as TCP or multicast. */
+ static void
+ gst_rtspsrc_stream_free_udp (GstRTSPStream * stream)
+ {
+ gint i;
+
+ for (i = 0; i < 2; i++) {
+ if (stream->udpsrc[i]) {
+ GST_DEBUG ("free UDP source %d for stream %p", i, stream);
+ gst_element_set_state (stream->udpsrc[i], GST_STATE_NULL);
+ gst_object_unref (stream->udpsrc[i]);
+ stream->udpsrc[i] = NULL;
+ }
+ }
+ }
+
+ /* for TCP, create pads to send and receive data to and from the manager and to
+ * intercept various events and queries
+ */
+ static gboolean
+ gst_rtspsrc_stream_configure_tcp (GstRTSPSrc * src, GstRTSPStream * stream,
+ GstRTSPTransport * transport, GstPad ** outpad)
+ {
+ gchar *name;
+ GstPadTemplate *template;
+ GstPad *pad0, *pad1;
+
+ /* configure for interleaved delivery, nothing needs to be done
+ * here, the loop function will call the chain functions of the
+ * session manager. */
+ stream->channel[0] = transport->interleaved.min;
+ stream->channel[1] = transport->interleaved.max;
+ GST_DEBUG_OBJECT (src, "stream %p on channels %d-%d", stream,
+ stream->channel[0], stream->channel[1]);
+
+ /* we can remove the allocated UDP ports now */
+ gst_rtspsrc_stream_free_udp (stream);
+
+ /* no session manager, send data to srcpad directly */
+ if (!stream->channelpad[0]) {
+ GST_DEBUG_OBJECT (src, "no manager, creating pad");
+
+ /* create a new pad we will use to stream to */
+ name = g_strdup_printf ("stream_%u", stream->id);
+ template = gst_static_pad_template_get (&rtptemplate);
+ stream->channelpad[0] = gst_pad_new_from_template (template, name);
+ gst_object_unref (template);
+ g_free (name);
+
+ /* set caps and activate */
+ gst_pad_use_fixed_caps (stream->channelpad[0]);
+ gst_pad_set_active (stream->channelpad[0], TRUE);
+
+ *outpad = gst_object_ref (stream->channelpad[0]);
+ } else {
+ GST_DEBUG_OBJECT (src, "using manager source pad");
+
+ template = gst_static_pad_template_get (&anysrctemplate);
+
+ /* allocate pads for sending the channel data into the manager */
+ pad0 = gst_pad_new_from_template (template, "internalsrc_0");
+ gst_pad_link_full (pad0, stream->channelpad[0], GST_PAD_LINK_CHECK_NOTHING);
+ gst_object_unref (stream->channelpad[0]);
+ stream->channelpad[0] = pad0;
+ gst_pad_set_event_function (pad0, gst_rtspsrc_handle_internal_src_event);
+ gst_pad_set_query_function (pad0, gst_rtspsrc_handle_internal_src_query);
+ gst_pad_set_element_private (pad0, src);
+ gst_pad_set_active (pad0, TRUE);
+
+ if (stream->channelpad[1]) {
+ /* if we have a sinkpad for the other channel, create a pad and link to the
+ * manager. */
+ pad1 = gst_pad_new_from_template (template, "internalsrc_1");
+ gst_pad_set_event_function (pad1, gst_rtspsrc_handle_internal_src_event);
+ gst_pad_link_full (pad1, stream->channelpad[1],
+ GST_PAD_LINK_CHECK_NOTHING);
+ gst_object_unref (stream->channelpad[1]);
+ stream->channelpad[1] = pad1;
+ gst_pad_set_active (pad1, TRUE);
+ }
+ gst_object_unref (template);
+ }
+ /* setup RTCP transport back to the server if we have to. */
+ if (src->manager && src->do_rtcp) {
+ GstPad *pad;
+
+ template = gst_static_pad_template_get (&anysinktemplate);
+
+ stream->rtcppad = gst_pad_new_from_template (template, "internalsink_0");
+ gst_pad_set_chain_function (stream->rtcppad, gst_rtspsrc_sink_chain);
+ gst_pad_set_element_private (stream->rtcppad, stream);
+ gst_pad_set_active (stream->rtcppad, TRUE);
+
+ /* get session RTCP pad */
+ name = g_strdup_printf ("send_rtcp_src_%u", stream->id);
+ pad = gst_element_request_pad_simple (src->manager, name);
+ g_free (name);
+
+ /* and link */
+ if (pad) {
+ gst_pad_link_full (pad, stream->rtcppad, GST_PAD_LINK_CHECK_NOTHING);
+ gst_object_unref (pad);
+ }
+
+ gst_object_unref (template);
+ }
+ return TRUE;
+ }
+
+ static void
+ gst_rtspsrc_get_transport_info (GstRTSPSrc * src, GstRTSPStream * stream,
+ GstRTSPTransport * transport, const gchar ** destination, gint * min,
+ gint * max, guint * ttl)
+ {
+ if (transport->lower_transport == GST_RTSP_LOWER_TRANS_UDP_MCAST) {
+ if (destination) {
+ if (!(*destination = transport->destination))
+ *destination = stream->destination;
+ }
+ if (min && max) {
+ /* transport first */
+ *min = transport->port.min;
+ *max = transport->port.max;
+ if (*min == -1 && *max == -1) {
+ /* then try from SDP */
+ if (stream->port != 0) {
+ *min = stream->port;
+ *max = stream->port + 1;
+ }
+ }
+ }
+
+ if (ttl) {
+ if (!(*ttl = transport->ttl))
+ *ttl = stream->ttl;
+ }
+ } else {
+ if (destination) {
+ /* first take the source, then the endpoint to figure out where to send
+ * the RTCP. */
+ if (!(*destination = transport->source)) {
+ if (src->conninfo.connection)
+ *destination = gst_rtsp_connection_get_ip (src->conninfo.connection);
+ else if (stream->conninfo.connection)
+ *destination =
+ gst_rtsp_connection_get_ip (stream->conninfo.connection);
+ }
+ }
+ if (min && max) {
+ /* for unicast we only expect the ports here */
+ *min = transport->server_port.min;
+ *max = transport->server_port.max;
+ }
+ }
+ }
+
+ /* For multicast create UDP sources and join the multicast group. */
+ static gboolean
+ gst_rtspsrc_stream_configure_mcast (GstRTSPSrc * src, GstRTSPStream * stream,
+ GstRTSPTransport * transport, GstPad ** outpad)
+ {
+ gchar *uri;
+ const gchar *destination;
+ gint min, max;
+
+ GST_DEBUG_OBJECT (src, "creating UDP sources for multicast");
+
+ /* we can remove the allocated UDP ports now */
+ gst_rtspsrc_stream_free_udp (stream);
+
+ gst_rtspsrc_get_transport_info (src, stream, transport, &destination, &min,
+ &max, NULL);
+
+ /* we need a destination now */
+ if (destination == NULL)
+ goto no_destination;
+
+ /* we really need ports now or we won't be able to receive anything at all */
+ if (min == -1 && max == -1)
+ goto no_ports;
+
+ GST_DEBUG_OBJECT (src, "have destination '%s' and ports (%d)-(%d)",
+ destination, min, max);
+
+ /* creating UDP source for RTP */
+ if (min != -1) {
+ uri = g_strdup_printf ("udp://%s:%d", destination, min);
+ stream->udpsrc[0] =
+ gst_element_make_from_uri (GST_URI_SRC, uri, NULL, NULL);
+ g_free (uri);
+ if (stream->udpsrc[0] == NULL)
+ goto no_element;
+
+ /* take ownership */
+ gst_object_ref_sink (stream->udpsrc[0]);
+
+ if (src->udp_buffer_size != 0)
+ g_object_set (G_OBJECT (stream->udpsrc[0]), "buffer-size",
+ src->udp_buffer_size, NULL);
+
+ if (src->multi_iface != NULL)
+ g_object_set (G_OBJECT (stream->udpsrc[0]), "multicast-iface",
+ src->multi_iface, NULL);
+
+ /* change state */
+ gst_element_set_locked_state (stream->udpsrc[0], TRUE);
+ gst_element_set_state (stream->udpsrc[0], GST_STATE_READY);
+ }
+
+ /* creating another UDP source for RTCP */
+ if (max != -1) {
+ GstCaps *caps;
+
+ uri = g_strdup_printf ("udp://%s:%d", destination, max);
+ stream->udpsrc[1] =
+ gst_element_make_from_uri (GST_URI_SRC, uri, NULL, NULL);
+ g_free (uri);
+ if (stream->udpsrc[1] == NULL)
+ goto no_element;
+
+ if (stream->profile == GST_RTSP_PROFILE_SAVP ||
+ stream->profile == GST_RTSP_PROFILE_SAVPF)
+ caps = gst_caps_new_empty_simple ("application/x-srtcp");
+ else
+ caps = gst_caps_new_empty_simple ("application/x-rtcp");
+ g_object_set (stream->udpsrc[1], "caps", caps, NULL);
+ gst_caps_unref (caps);
+
+ /* take ownership */
+ gst_object_ref_sink (stream->udpsrc[1]);
+
+ if (src->multi_iface != NULL)
+ g_object_set (G_OBJECT (stream->udpsrc[1]), "multicast-iface",
+ src->multi_iface, NULL);
+
+ gst_element_set_state (stream->udpsrc[1], GST_STATE_READY);
+ }
+ return TRUE;
+
+ /* ERRORS */
+ no_element:
+ {
+ GST_DEBUG_OBJECT (src, "no UDP source element found");
+ return FALSE;
+ }
+ no_destination:
+ {
+ GST_DEBUG_OBJECT (src, "no destination found");
+ return FALSE;
+ }
+ no_ports:
+ {
+ GST_DEBUG_OBJECT (src, "no ports found");
+ return FALSE;
+ }
+ }
+
+ /* configure the remainder of the UDP ports */
+ static gboolean
+ gst_rtspsrc_stream_configure_udp (GstRTSPSrc * src, GstRTSPStream * stream,
+ GstRTSPTransport * transport, GstPad ** outpad)
+ {
+ /* we manage the UDP elements now. For unicast, the UDP sources where
+ * allocated in the stream when we suggested a transport. */
+ if (stream->udpsrc[0]) {
+ GstCaps *caps;
+
+ gst_element_set_locked_state (stream->udpsrc[0], TRUE);
+ gst_bin_add (GST_BIN_CAST (src), stream->udpsrc[0]);
+
+ GST_DEBUG_OBJECT (src, "setting up UDP source");
+
+ /* configure a timeout on the UDP port. When the timeout message is
+ * posted, we assume UDP transport is not possible. We reconnect using TCP
+ * if we can. */
+ g_object_set (G_OBJECT (stream->udpsrc[0]), "timeout",
+ src->udp_timeout * 1000, NULL);
+
+ if ((caps = stream_get_caps_for_pt (stream, stream->default_pt)))
+ g_object_set (stream->udpsrc[0], "caps", caps, NULL);
+
+ /* get output pad of the UDP source. */
+ *outpad = gst_element_get_static_pad (stream->udpsrc[0], "src");
+
+ /* save it so we can unblock */
+ stream->blockedpad = *outpad;
+
+ /* configure pad block on the pad. As soon as there is dataflow on the
+ * UDP source, we know that UDP is not blocked by a firewall and we can
+ * configure all the streams to let the application autoplug decoders. */
+ stream->blockid =
+ gst_pad_add_probe (stream->blockedpad,
+ GST_PAD_PROBE_TYPE_BLOCK | GST_PAD_PROBE_TYPE_BUFFER |
+ GST_PAD_PROBE_TYPE_BUFFER_LIST, pad_blocked, src, NULL);
+
+ gst_pad_add_probe (stream->blockedpad,
+ GST_PAD_PROBE_TYPE_EVENT_DOWNSTREAM, udpsrc_probe_cb,
+ &(stream->segment_seqnum[0]), NULL);
+
+ if (stream->channelpad[0]) {
+ GST_DEBUG_OBJECT (src, "connecting UDP source 0 to manager");
+ /* configure for UDP delivery, we need to connect the UDP pads to
+ * the session plugin. */
+ gst_pad_link_full (*outpad, stream->channelpad[0],
+ GST_PAD_LINK_CHECK_NOTHING);
+ gst_object_unref (*outpad);
+ *outpad = NULL;
+ /* we connected to pad-added signal to get pads from the manager */
+ } else {
+ GST_DEBUG_OBJECT (src, "using UDP src pad as output");
+ }
+ }
+
+ /* RTCP port */
+ if (stream->udpsrc[1]) {
+ GstCaps *caps;
+
+ gst_element_set_locked_state (stream->udpsrc[1], TRUE);
+ gst_bin_add (GST_BIN_CAST (src), stream->udpsrc[1]);
+
+ if (stream->profile == GST_RTSP_PROFILE_SAVP ||
+ stream->profile == GST_RTSP_PROFILE_SAVPF)
+ caps = gst_caps_new_empty_simple ("application/x-srtcp");
+ else
+ caps = gst_caps_new_empty_simple ("application/x-rtcp");
+ g_object_set (stream->udpsrc[1], "caps", caps, NULL);
+ gst_caps_unref (caps);
+
+ if (stream->channelpad[1]) {
+ GstPad *pad;
+
+ GST_DEBUG_OBJECT (src, "connecting UDP source 1 to manager");
+
+ pad = gst_element_get_static_pad (stream->udpsrc[1], "src");
+ gst_pad_add_probe (pad,
+ GST_PAD_PROBE_TYPE_EVENT_DOWNSTREAM, udpsrc_probe_cb,
+ &(stream->segment_seqnum[1]), NULL);
+ gst_pad_link_full (pad, stream->channelpad[1],
+ GST_PAD_LINK_CHECK_NOTHING);
+ gst_object_unref (pad);
+ } else {
+ /* leave unlinked */
+ }
+ }
+ return TRUE;
+ }
+
+ /* configure the UDP sink back to the server for status reports */
+ static gboolean
+ gst_rtspsrc_stream_configure_udp_sinks (GstRTSPSrc * src,
+ GstRTSPStream * stream, GstRTSPTransport * transport)
+ {
+ GstPad *pad;
+ gint rtp_port, rtcp_port;
+ gboolean do_rtp, do_rtcp;
+ const gchar *destination;
+ gchar *uri, *name;
+ guint ttl = 0;
+ GSocket *socket;
+
+ /* get transport info */
+ gst_rtspsrc_get_transport_info (src, stream, transport, &destination,
+ &rtp_port, &rtcp_port, &ttl);
+
+ /* see what we need to do */
+ do_rtp = (rtp_port != -1);
+ /* it's possible that the server does not want us to send RTCP in which case
+ * the port is -1 */
+ do_rtcp = (rtcp_port != -1 && src->manager != NULL && src->do_rtcp);
+
+ /* we need a destination when we have RTP or RTCP ports */
+ if (destination == NULL && (do_rtp || do_rtcp))
+ goto no_destination;
+
+ /* try to construct the fakesrc to the RTP port of the server to open up any
+ * NAT firewalls or, if backchannel, construct an appsrc */
+ if (do_rtp) {
+ GST_DEBUG_OBJECT (src, "configure RTP UDP sink for %s:%d", destination,
+ rtp_port);
+
+ uri = g_strdup_printf ("udp://%s:%d", destination, rtp_port);
+ stream->udpsink[0] =
+ gst_element_make_from_uri (GST_URI_SINK, uri, NULL, NULL);
+ g_free (uri);
+ if (stream->udpsink[0] == NULL)
+ goto no_sink_element;
+
+ /* don't join multicast group, we will have the source socket do that */
+ /* no sync or async state changes needed */
+ g_object_set (G_OBJECT (stream->udpsink[0]), "auto-multicast", FALSE,
+ "loop", FALSE, "sync", FALSE, "async", FALSE, NULL);
+ if (ttl > 0)
+ g_object_set (G_OBJECT (stream->udpsink[0]), "ttl", ttl, NULL);
+
+ if (stream->udpsrc[0]) {
+ /* configure socket, we give it the same UDP socket as the udpsrc for RTP
+ * so that NAT firewalls will open a hole for us */
+ g_object_get (G_OBJECT (stream->udpsrc[0]), "used-socket", &socket, NULL);
+ if (!socket)
+ goto no_socket;
+
+ GST_DEBUG_OBJECT (src, "RTP UDP src has sock %p", socket);
+ /* configure socket and make sure udpsink does not close it when shutting
+ * down, it belongs to udpsrc after all. */
+ g_object_set (G_OBJECT (stream->udpsink[0]), "socket", socket,
+ "close-socket", FALSE, NULL);
+ g_object_unref (socket);
+ }
+
+ if (stream->is_backchannel) {
+ /* appsrc is for the app to shovel data using push-backchannel-buffer */
+ stream->rtpsrc = gst_element_factory_make ("appsrc", NULL);
+ if (stream->rtpsrc == NULL)
+ goto no_appsrc_element;
+
+ /* interal use only, don't emit signals */
+ g_object_set (G_OBJECT (stream->rtpsrc), "emit-signals", TRUE,
+ "is-live", TRUE, NULL);
+ } else {
+ /* the source for the dummy packets to open up NAT */
+ stream->rtpsrc = gst_element_factory_make ("fakesrc", NULL);
+ if (stream->rtpsrc == NULL)
+ goto no_fakesrc_element;
+
+ /* random data in 5 buffers, a size of 200 bytes should be fine */
+ g_object_set (G_OBJECT (stream->rtpsrc), "filltype", 3, "num-buffers", 5,
+ "sizetype", 2, "sizemax", 200, "silent", TRUE, NULL);
+ }
+
+ /* keep everything locked */
+ gst_element_set_locked_state (stream->udpsink[0], TRUE);
+ gst_element_set_locked_state (stream->rtpsrc, TRUE);
+
+ gst_object_ref (stream->udpsink[0]);
+ gst_bin_add (GST_BIN_CAST (src), stream->udpsink[0]);
+ gst_object_ref (stream->rtpsrc);
+ gst_bin_add (GST_BIN_CAST (src), stream->rtpsrc);
+
+ gst_element_link_pads_full (stream->rtpsrc, "src", stream->udpsink[0],
+ "sink", GST_PAD_LINK_CHECK_NOTHING);
+ }
+ if (do_rtcp) {
+ GST_DEBUG_OBJECT (src, "configure RTCP UDP sink for %s:%d", destination,
+ rtcp_port);
+
+ uri = g_strdup_printf ("udp://%s:%d", destination, rtcp_port);
+ stream->udpsink[1] =
+ gst_element_make_from_uri (GST_URI_SINK, uri, NULL, NULL);
+ g_free (uri);
+ if (stream->udpsink[1] == NULL)
+ goto no_sink_element;
+
+ /* don't join multicast group, we will have the source socket do that */
+ /* no sync or async state changes needed */
+ g_object_set (G_OBJECT (stream->udpsink[1]), "auto-multicast", FALSE,
+ "loop", FALSE, "sync", FALSE, "async", FALSE, NULL);
+ if (ttl > 0)
+ g_object_set (G_OBJECT (stream->udpsink[0]), "ttl", ttl, NULL);
+
+ if (stream->udpsrc[1]) {
+ /* configure socket, we give it the same UDP socket as the udpsrc for RTCP
+ * because some servers check the port number of where it sends RTCP to identify
+ * the RTCP packets it receives */
+ g_object_get (G_OBJECT (stream->udpsrc[1]), "used-socket", &socket, NULL);
+ if (!socket)
+ goto no_socket;
+
+ GST_DEBUG_OBJECT (src, "RTCP UDP src has sock %p", socket);
+ /* configure socket and make sure udpsink does not close it when shutting
+ * down, it belongs to udpsrc after all. */
+ g_object_set (G_OBJECT (stream->udpsink[1]), "socket", socket,
+ "close-socket", FALSE, NULL);
+ g_object_unref (socket);
+ }
+
+ /* we keep this playing always */
+ gst_element_set_locked_state (stream->udpsink[1], TRUE);
+ gst_element_set_state (stream->udpsink[1], GST_STATE_PLAYING);
+
+ gst_object_ref (stream->udpsink[1]);
+ gst_bin_add (GST_BIN_CAST (src), stream->udpsink[1]);
+
+ stream->rtcppad = gst_element_get_static_pad (stream->udpsink[1], "sink");
+
+ /* get session RTCP pad */
+ name = g_strdup_printf ("send_rtcp_src_%u", stream->id);
+ pad = gst_element_request_pad_simple (src->manager, name);
+ g_free (name);
+
+ /* and link */
+ if (pad) {
+ gst_pad_link_full (pad, stream->rtcppad, GST_PAD_LINK_CHECK_NOTHING);
+ gst_object_unref (pad);
+ }
+ }
+
+ return TRUE;
+
+ /* ERRORS */
+ no_destination:
+ {
+ GST_ERROR_OBJECT (src, "no destination address specified");
+ return FALSE;
+ }
+ no_sink_element:
+ {
+ GST_ERROR_OBJECT (src, "no UDP sink element found");
+ return FALSE;
+ }
+ no_appsrc_element:
+ {
+ GST_ERROR_OBJECT (src, "no appsrc element found");
+ return FALSE;
+ }
+ no_fakesrc_element:
+ {
+ GST_ERROR_OBJECT (src, "no fakesrc element found");
+ return FALSE;
+ }
+ no_socket:
+ {
+ GST_ERROR_OBJECT (src, "failed to create socket");
+ return FALSE;
+ }
+ }
+
+ /* sets up all elements needed for streaming over the specified transport.
+ * Does not yet expose the element pads, this will be done when there is actuall
+ * dataflow detected, which might never happen when UDP is blocked in a
+ * firewall, for example.
+ */
+ static gboolean
+ gst_rtspsrc_stream_configure_transport (GstRTSPStream * stream,
+ GstRTSPTransport * transport)
+ {
+ GstRTSPSrc *src;
+ GstPad *outpad = NULL;
+ GstPadTemplate *template;
+ gchar *name;
+ const gchar *media_type;
+ guint i, len;
+
+ src = stream->parent;
+
+ GST_DEBUG_OBJECT (src, "configuring transport for stream %p", stream);
+
+ /* get the proper media type for this stream now */
+ if (gst_rtsp_transport_get_media_type (transport, &media_type) < 0)
+ goto unknown_transport;
+ if (!media_type)
+ goto unknown_transport;
+
+ /* configure the final media type */
+ GST_DEBUG_OBJECT (src, "setting media type to %s", media_type);
+
+ len = stream->ptmap->len;
+ for (i = 0; i < len; i++) {
+ GstStructure *s;
+ PtMapItem *item = &g_array_index (stream->ptmap, PtMapItem, i);
+
+ if (item->caps == NULL)
+ continue;
+
+ s = gst_caps_get_structure (item->caps, 0);
+ gst_structure_set_name (s, media_type);
+ /* set ssrc if known */
+ if (transport->ssrc)
+ gst_structure_set (s, "ssrc", G_TYPE_UINT, transport->ssrc, NULL);
+ }
+
+ /* try to get and configure a manager, channelpad[0-1] will be configured with
+ * the pads for the manager, or NULL when no manager is needed. */
+ if (!gst_rtspsrc_stream_configure_manager (src, stream, transport))
+ goto no_manager;
+
+ switch (transport->lower_transport) {
+ case GST_RTSP_LOWER_TRANS_TCP:
+ if (!gst_rtspsrc_stream_configure_tcp (src, stream, transport, &outpad))
+ goto transport_failed;
+ break;
+ case GST_RTSP_LOWER_TRANS_UDP_MCAST:
+ if (!gst_rtspsrc_stream_configure_mcast (src, stream, transport, &outpad))
+ goto transport_failed;
+ /* fallthrough, the rest is the same for UDP and MCAST */
+ case GST_RTSP_LOWER_TRANS_UDP:
+ if (!gst_rtspsrc_stream_configure_udp (src, stream, transport, &outpad))
+ goto transport_failed;
+ /* configure udpsinks back to the server for RTCP messages, for the
+ * dummy RTP messages to open NAT, and for the backchannel */
+ if (!gst_rtspsrc_stream_configure_udp_sinks (src, stream, transport))
+ goto transport_failed;
+ break;
+ default:
+ goto unknown_transport;
+ }
+
+ /* using backchannel and no manager, hence no srcpad for this stream */
+ if (outpad && stream->is_backchannel) {
+ add_backchannel_fakesink (src, stream, outpad);
+ gst_object_unref (outpad);
+ } else if (outpad) {
+ GST_DEBUG_OBJECT (src, "creating ghostpad for stream %p", stream);
+
+ gst_pad_use_fixed_caps (outpad);
+
+ /* create ghostpad, don't add just yet, this will be done when we activate
+ * the stream. */
+ name = g_strdup_printf ("stream_%u", stream->id);
+ template = gst_static_pad_template_get (&rtptemplate);
+ stream->srcpad = gst_ghost_pad_new_from_template (name, outpad, template);
+ gst_pad_set_event_function (stream->srcpad, gst_rtspsrc_handle_src_event);
+ gst_pad_set_query_function (stream->srcpad, gst_rtspsrc_handle_src_query);
+ gst_object_unref (template);
+ g_free (name);
+
+ gst_object_unref (outpad);
+ }
+ /* mark pad as ok */
+ stream->last_ret = GST_FLOW_OK;
+
+ return TRUE;
+
+ /* ERRORS */
+ transport_failed:
+ {
+ GST_WARNING_OBJECT (src, "failed to configure transport");
+ return FALSE;
+ }
+ unknown_transport:
+ {
+ GST_WARNING_OBJECT (src, "unknown transport");
+ return FALSE;
+ }
+ no_manager:
+ {
+ GST_WARNING_OBJECT (src, "cannot get a session manager");
+ return FALSE;
+ }
+ }
+
+ /* send a couple of dummy random packets on the receiver RTP port to the server,
+ * this should make a firewall think we initiated the data transfer and
+ * hopefully allow packets to go from the sender port to our RTP receiver port */
+ static gboolean
+ gst_rtspsrc_send_dummy_packets (GstRTSPSrc * src)
+ {
+ GList *walk;
+
+ if (src->nat_method != GST_RTSP_NAT_DUMMY)
+ return TRUE;
+
+ for (walk = src->streams; walk; walk = g_list_next (walk)) {
+ GstRTSPStream *stream = (GstRTSPStream *) walk->data;
+
+ if (!stream->rtpsrc || !stream->udpsink[0])
+ continue;
+
+ if (stream->is_backchannel)
+ GST_DEBUG_OBJECT (src, "starting backchannel stream %p", stream);
+ else
+ GST_DEBUG_OBJECT (src, "sending dummy packet to stream %p", stream);
+
+ gst_element_set_state (stream->udpsink[0], GST_STATE_NULL);
+ gst_element_set_state (stream->rtpsrc, GST_STATE_NULL);
+ gst_element_set_state (stream->udpsink[0], GST_STATE_PLAYING);
+ gst_element_set_state (stream->rtpsrc, GST_STATE_PLAYING);
+ }
+ return TRUE;
+ }
+
+ /* Adds the source pads of all configured streams to the element.
+ * This code is performed when we detected dataflow.
+ *
+ * We detect dataflow from either the _loop function or with pad probes on the
+ * udp sources.
+ */
+ static gboolean
+ gst_rtspsrc_activate_streams (GstRTSPSrc * src)
+ {
+ GList *walk;
+
+ GST_DEBUG_OBJECT (src, "activating streams");
+
+ for (walk = src->streams; walk; walk = g_list_next (walk)) {
+ GstRTSPStream *stream = (GstRTSPStream *) walk->data;
+
+ if (stream->udpsrc[0]) {
+ /* remove timeout, we are streaming now and timeouts will be handled by
+ * the session manager and jitter buffer */
+ g_object_set (G_OBJECT (stream->udpsrc[0]), "timeout", (guint64) 0, NULL);
+ }
+ if (stream->srcpad) {
+ GST_DEBUG_OBJECT (src, "activating stream pad %p", stream);
+ gst_pad_set_active (stream->srcpad, TRUE);
+
+ /* if we don't have a session manager, set the caps now. If we have a
+ * session, we will get a notification of the pad and the caps. */
+ if (!src->manager) {
+ GstCaps *caps;
+
+ caps = stream_get_caps_for_pt (stream, stream->default_pt);
+ GST_DEBUG_OBJECT (src, "setting pad caps for stream %p", stream);
+ gst_pad_set_caps (stream->srcpad, caps);
+ }
+ /* add the pad */
+ if (!stream->added) {
+ GST_DEBUG_OBJECT (src, "adding stream pad %p", stream);
+ if (stream->is_backchannel)
+ add_backchannel_fakesink (src, stream, stream->srcpad);
+ else
+ gst_element_add_pad (GST_ELEMENT_CAST (src), stream->srcpad);
+ stream->added = TRUE;
+ }
+ }
+ }
+
+ /* unblock all pads */
+ for (walk = src->streams; walk; walk = g_list_next (walk)) {
+ GstRTSPStream *stream = (GstRTSPStream *) walk->data;
+
+ if (stream->blockid) {
+ GST_DEBUG_OBJECT (src, "unblocking stream pad %p", stream);
+ gst_pad_remove_probe (stream->blockedpad, stream->blockid);
+ stream->blockid = 0;
+ }
+ }
+
+ return TRUE;
+ }
+
+ static void
+ gst_rtspsrc_configure_caps (GstRTSPSrc * src, GstSegment * segment,
+ gboolean reset_manager)
+ {
+ GList *walk;
+ guint64 start, stop;
+ gdouble play_speed, play_scale;
+
+ GST_DEBUG_OBJECT (src, "configuring stream caps");
+
+ start = segment->rate > 0.0 ? segment->start : segment->stop;
+ stop = segment->rate > 0.0 ? segment->stop : segment->start;
+ play_speed = segment->rate;
+ play_scale = segment->applied_rate;
+
+ for (walk = src->streams; walk; walk = g_list_next (walk)) {
+ GstRTSPStream *stream = (GstRTSPStream *) walk->data;
+ guint j, len;
+
+ if (!stream->setup)
+ continue;
+
+ len = stream->ptmap->len;
+ for (j = 0; j < len; j++) {
+ GstCaps *caps;
+ PtMapItem *item = &g_array_index (stream->ptmap, PtMapItem, j);
+
+ if (item->caps == NULL)
+ continue;
+
+ caps = gst_caps_make_writable (item->caps);
+ /* update caps */
+ if (stream->timebase != -1)
+ gst_caps_set_simple (caps, "clock-base", G_TYPE_UINT,
+ (guint) stream->timebase, NULL);
+ if (stream->seqbase != -1)
+ gst_caps_set_simple (caps, "seqnum-base", G_TYPE_UINT,
+ (guint) stream->seqbase, NULL);
+ gst_caps_set_simple (caps, "npt-start", G_TYPE_UINT64, start, NULL);
+ if (stop != -1)
+ gst_caps_set_simple (caps, "npt-stop", G_TYPE_UINT64, stop, NULL);
+ gst_caps_set_simple (caps, "play-speed", G_TYPE_DOUBLE, play_speed, NULL);
+ gst_caps_set_simple (caps, "play-scale", G_TYPE_DOUBLE, play_scale, NULL);
+ gst_caps_set_simple (caps, "onvif-mode", G_TYPE_BOOLEAN, src->onvif_mode,
+ NULL);
+
+ item->caps = caps;
+ GST_DEBUG_OBJECT (src, "stream %p, pt %d, caps %" GST_PTR_FORMAT, stream,
+ item->pt, caps);
+
+ if (item->pt == stream->default_pt) {
+ if (stream->udpsrc[0])
+ g_object_set (stream->udpsrc[0], "caps", caps, NULL);
+ stream->need_caps = TRUE;
+ }
+ }
+ }
+ if (reset_manager && src->manager) {
+ GST_DEBUG_OBJECT (src, "clear session");
+ g_signal_emit_by_name (src->manager, "clear-pt-map", NULL);
+ }
+ }
+
+ static GstFlowReturn
+ gst_rtspsrc_combine_flows (GstRTSPSrc * src, GstRTSPStream * stream,
+ GstFlowReturn ret)
+ {
+ GList *streams;
+
+ /* store the value */
+ stream->last_ret = ret;
+
+ /* if it's success we can return the value right away */
+ if (ret == GST_FLOW_OK)
+ goto done;
+
+ /* any other error that is not-linked can be returned right
+ * away */
+ if (ret != GST_FLOW_NOT_LINKED)
+ goto done;
+
+ /* only return NOT_LINKED if all other pads returned NOT_LINKED */
+ for (streams = src->streams; streams; streams = g_list_next (streams)) {
+ GstRTSPStream *ostream = (GstRTSPStream *) streams->data;
+
+ ret = ostream->last_ret;
+ /* some other return value (must be SUCCESS but we can return
+ * other values as well) */
+ if (ret != GST_FLOW_NOT_LINKED)
+ goto done;
+ }
+ /* if we get here, all other pads were unlinked and we return
+ * NOT_LINKED then */
+ done:
+ return ret;
+ }
+
+ static gboolean
+ gst_rtspsrc_stream_push_event (GstRTSPSrc * src, GstRTSPStream * stream,
+ GstEvent * event)
+ {
+ gboolean res = TRUE;
+
+ /* only streams that have a connection to the outside world */
+ if (!stream->setup)
+ goto done;
+
+ if (stream->udpsrc[0]) {
+ GstEvent *sent_event;
+
+ if (GST_EVENT_TYPE (event) == GST_EVENT_EOS) {
+ sent_event = gst_event_new_eos ();
+ gst_event_set_seqnum (sent_event, stream->segment_seqnum[0]);
+ } else {
+ sent_event = gst_event_ref (event);
+ }
+
+ res = gst_element_send_event (stream->udpsrc[0], sent_event);
+ } else if (stream->channelpad[0]) {
+ gst_event_ref (event);
+ if (GST_PAD_IS_SRC (stream->channelpad[0]))
+ res = gst_pad_push_event (stream->channelpad[0], event);
+ else
+ res = gst_pad_send_event (stream->channelpad[0], event);
+ }
+
+ if (stream->udpsrc[1]) {
+ GstEvent *sent_event;
+
+ if (GST_EVENT_TYPE (event) == GST_EVENT_EOS) {
+ sent_event = gst_event_new_eos ();
+ if (stream->segment_seqnum[1] != GST_SEQNUM_INVALID) {
+ gst_event_set_seqnum (sent_event, stream->segment_seqnum[1]);
+ }
+ } else {
+ sent_event = gst_event_ref (event);
+ }
+
+ res &= gst_element_send_event (stream->udpsrc[1], sent_event);
+ } else if (stream->channelpad[1]) {
+ gst_event_ref (event);
+ if (GST_PAD_IS_SRC (stream->channelpad[1]))
+ res &= gst_pad_push_event (stream->channelpad[1], event);
+ else
+ res &= gst_pad_send_event (stream->channelpad[1], event);
+ }
+
+ done:
+ gst_event_unref (event);
+
+ return res;
+ }
+
+ static gboolean
+ gst_rtspsrc_push_event (GstRTSPSrc * src, GstEvent * event)
+ {
+ GList *streams;
+ gboolean res = TRUE;
+
+ for (streams = src->streams; streams; streams = g_list_next (streams)) {
+ GstRTSPStream *ostream = (GstRTSPStream *) streams->data;
+
+ gst_event_ref (event);
+ res &= gst_rtspsrc_stream_push_event (src, ostream, event);
+ }
+ gst_event_unref (event);
+
+ return res;
+ }
+
+ static gboolean
+ accept_certificate_cb (GTlsConnection * conn, GTlsCertificate * peer_cert,
+ GTlsCertificateFlags errors, gpointer user_data)
+ {
+ GstRTSPSrc *src = user_data;
+ gboolean accept = FALSE;
+
+ g_signal_emit (src, gst_rtspsrc_signals[SIGNAL_ACCEPT_CERTIFICATE], 0, conn,
+ peer_cert, errors, &accept);
+
+ return accept;
+ }
+
+ static GstRTSPResult
+ gst_rtsp_conninfo_connect (GstRTSPSrc * src, GstRTSPConnInfo * info,
+ gboolean async)
+ {
+ GstRTSPResult res;
+ GstRTSPMessage response;
+ gboolean retry = FALSE;
+ memset (&response, 0, sizeof (response));
+ gst_rtsp_message_init (&response);
+ do {
+ if (info->connection == NULL) {
+ if (info->url == NULL) {
+ GST_DEBUG_OBJECT (src, "parsing uri (%s)...", info->location);
+ if ((res = gst_rtsp_url_parse (info->location, &info->url)) < 0)
+ goto parse_error;
+ }
+ /* create connection */
+ GST_DEBUG_OBJECT (src, "creating connection (%s)...", info->location);
+ if ((res = gst_rtsp_connection_create (info->url, &info->connection)) < 0)
+ goto could_not_create;
+
+ if (retry) {
+ gst_rtspsrc_setup_auth (src, &response);
+ }
+
+ g_free (info->url_str);
+ info->url_str = gst_rtsp_url_get_request_uri (info->url);
+
+ GST_DEBUG_OBJECT (src, "sanitized uri %s", info->url_str);
+
+ if (info->url->transports & GST_RTSP_LOWER_TRANS_TLS) {
+ if (!gst_rtsp_connection_set_tls_validation_flags (info->connection,
+ src->tls_validation_flags))
+ GST_WARNING_OBJECT (src, "Unable to set TLS validation flags");
+
+ if (src->tls_database)
+ gst_rtsp_connection_set_tls_database (info->connection,
+ src->tls_database);
+
+ if (src->tls_interaction)
+ gst_rtsp_connection_set_tls_interaction (info->connection,
+ src->tls_interaction);
+ gst_rtsp_connection_set_accept_certificate_func (info->connection,
+ accept_certificate_cb, src, NULL);
+ }
+
+ if (info->url->transports & GST_RTSP_LOWER_TRANS_HTTP) {
+ gst_rtsp_connection_set_tunneled (info->connection, TRUE);
+ gst_rtsp_connection_set_ignore_x_server_reply (info->connection,
+ src->ignore_x_server_reply);
+ }
+
+ if (src->proxy_host) {
+ GST_DEBUG_OBJECT (src, "setting proxy %s:%d", src->proxy_host,
+ src->proxy_port);
+ gst_rtsp_connection_set_proxy (info->connection, src->proxy_host,
+ src->proxy_port);
+ }
+ }
+
+ if (!info->connected) {
+ /* connect */
+ if (async)
+ GST_ELEMENT_PROGRESS (src, CONTINUE, "connect",
+ ("Connecting to %s", info->location));
+ GST_DEBUG_OBJECT (src, "connecting (%s)...", info->location);
+ res = gst_rtsp_connection_connect_with_response_usec (info->connection,
+ src->tcp_timeout, &response);
+
+ if (response.type == GST_RTSP_MESSAGE_HTTP_RESPONSE &&
+ response.type_data.response.code == GST_RTSP_STS_UNAUTHORIZED) {
+ gst_rtsp_conninfo_close (src, info, TRUE);
+ if (!retry)
+ retry = TRUE;
+ else
+ retry = FALSE; // we should not retry more than once
+ } else {
+ retry = FALSE;
+ }
+
+ if (res == GST_RTSP_OK)
+ info->connected = TRUE;
+ else if (!retry)
+ goto could_not_connect;
+ }
+ } while (!info->connected && retry);
+
+ gst_rtsp_message_unset (&response);
+ return GST_RTSP_OK;
+
+ /* ERRORS */
+ parse_error:
+ {
+ GST_ERROR_OBJECT (src, "No valid RTSP URL was provided");
+ gst_rtsp_message_unset (&response);
+ return res;
+ }
+ could_not_create:
+ {
+ gchar *str = gst_rtsp_strresult (res);
+ GST_ERROR_OBJECT (src, "Could not create connection. (%s)", str);
+ g_free (str);
+ gst_rtsp_message_unset (&response);
+ return res;
+ }
+ could_not_connect:
+ {
+ gchar *str = gst_rtsp_strresult (res);
+ GST_ERROR_OBJECT (src, "Could not connect to server. (%s)", str);
+ g_free (str);
+ gst_rtsp_message_unset (&response);
+ return res;
+ }
+ }
+
+ static GstRTSPResult
+ gst_rtsp_conninfo_close (GstRTSPSrc * src, GstRTSPConnInfo * info,
+ gboolean free)
+ {
+ GST_RTSP_STATE_LOCK (src);
+ if (info->connected) {
+ GST_DEBUG_OBJECT (src, "closing connection...");
+ gst_rtsp_connection_close (info->connection);
+ info->connected = FALSE;
+ }
+ if (free && info->connection) {
+ /* free connection */
+ GST_DEBUG_OBJECT (src, "freeing connection...");
+ gst_rtsp_connection_free (info->connection);
+ info->connection = NULL;
+ info->flushing = FALSE;
+ }
+ GST_RTSP_STATE_UNLOCK (src);
+ return GST_RTSP_OK;
+ }
+
+ static GstRTSPResult
+ gst_rtsp_conninfo_reconnect (GstRTSPSrc * src, GstRTSPConnInfo * info,
+ gboolean async)
+ {
+ GstRTSPResult res;
+
+ GST_DEBUG_OBJECT (src, "reconnecting connection...");
+ gst_rtsp_conninfo_close (src, info, FALSE);
+ res = gst_rtsp_conninfo_connect (src, info, async);
+
+ return res;
+ }
+
+ static void
+ gst_rtspsrc_connection_flush (GstRTSPSrc * src, gboolean flush)
+ {
+ GList *walk;
+
+ GST_DEBUG_OBJECT (src, "set flushing %d", flush);
+ GST_RTSP_STATE_LOCK (src);
+ if (src->conninfo.connection && src->conninfo.flushing != flush) {
+ GST_DEBUG_OBJECT (src, "connection flush");
+ gst_rtsp_connection_flush (src->conninfo.connection, flush);
+ src->conninfo.flushing = flush;
+ }
+ for (walk = src->streams; walk; walk = g_list_next (walk)) {
+ GstRTSPStream *stream = (GstRTSPStream *) walk->data;
+ if (stream->conninfo.connection && stream->conninfo.flushing != flush) {
+ GST_DEBUG_OBJECT (src, "stream %p flush", stream);
+ gst_rtsp_connection_flush (stream->conninfo.connection, flush);
+ stream->conninfo.flushing = flush;
+ }
+ }
+ GST_RTSP_STATE_UNLOCK (src);
+ }
+
+ static GstRTSPResult
+ gst_rtspsrc_init_request (GstRTSPSrc * src, GstRTSPMessage * msg,
+ GstRTSPMethod method, const gchar * uri)
+ {
+ GstRTSPResult res;
+
+ res = gst_rtsp_message_init_request (msg, method, uri);
+ if (res < 0)
+ return res;
+
+ /* set user-agent */
+ if (src->user_agent)
+ gst_rtsp_message_add_header (msg, GST_RTSP_HDR_USER_AGENT, src->user_agent);
+
+ return res;
+ }
+
+ /* FIXME, handle server request, reply with OK, for now */
+ static GstRTSPResult
+ gst_rtspsrc_handle_request (GstRTSPSrc * src, GstRTSPConnInfo * conninfo,
+ GstRTSPMessage * request)
+ {
+ GstRTSPMessage response = { 0 };
+ GstRTSPResult res;
+
+ GST_DEBUG_OBJECT (src, "got server request message");
+
+ DEBUG_RTSP (src, request);
+
+ res = gst_rtsp_ext_list_receive_request (src->extensions, request);
+
+ if (res == GST_RTSP_ENOTIMPL) {
+ /* default implementation, send OK */
+ GST_DEBUG_OBJECT (src, "prepare OK reply");
+ res =
+ gst_rtsp_message_init_response (&response, GST_RTSP_STS_OK, "OK",
+ request);
+ if (res < 0)
+ goto send_error;
+
+ /* let app parse and reply */
+ g_signal_emit (src, gst_rtspsrc_signals[SIGNAL_HANDLE_REQUEST],
+ 0, request, &response);
+
+ DEBUG_RTSP (src, &response);
+
+ res = gst_rtspsrc_connection_send (src, conninfo, &response, 0);
+ if (res < 0)
+ goto send_error;
+
+ gst_rtsp_message_unset (&response);
+ } else if (res == GST_RTSP_EEOF)
+ return res;
+
+ return GST_RTSP_OK;
+
+ /* ERRORS */
+ send_error:
+ {
+ gst_rtsp_message_unset (&response);
+ return res;
+ }
+ }
+
+ /* send server keep-alive */
+ static GstRTSPResult
+ gst_rtspsrc_send_keep_alive (GstRTSPSrc * src)
+ {
+ GstRTSPMessage request = { 0 };
+ GstRTSPResult res;
+ GstRTSPMethod method;
+ const gchar *control;
+
+ if (src->do_rtsp_keep_alive == FALSE) {
+ GST_DEBUG_OBJECT (src, "do-rtsp-keep-alive is FALSE, not sending.");
+ gst_rtsp_connection_reset_timeout (src->conninfo.connection);
+ return GST_RTSP_OK;
+ }
+
+ GST_DEBUG_OBJECT (src, "creating server keep-alive");
+
+ /* find a method to use for keep-alive */
+ if (src->methods & GST_RTSP_GET_PARAMETER)
+ method = GST_RTSP_GET_PARAMETER;
+ else
+ method = GST_RTSP_OPTIONS;
+
+ control = get_aggregate_control (src);
+ if (control == NULL)
+ goto no_control;
+
+ res = gst_rtspsrc_init_request (src, &request, method, control);
+ if (res < 0)
+ goto send_error;
+
+ request.type_data.request.version = src->version;
+
+ res = gst_rtspsrc_connection_send (src, &src->conninfo, &request, 0);
+ if (res < 0)
+ goto send_error;
+
+ gst_rtsp_connection_reset_timeout (src->conninfo.connection);
+ gst_rtsp_message_unset (&request);
+
+ return GST_RTSP_OK;
+
+ /* ERRORS */
+ no_control:
+ {
+ GST_WARNING_OBJECT (src, "no control url to send keepalive");
+ return GST_RTSP_OK;
+ }
+ send_error:
+ {
+ gchar *str = gst_rtsp_strresult (res);
+
+ gst_rtsp_message_unset (&request);
+ GST_ELEMENT_WARNING (src, RESOURCE, WRITE, (NULL),
+ ("Could not send keep-alive. (%s)", str));
+ g_free (str);
+ return res;
+ }
+ }
+
+ static GstFlowReturn
+ gst_rtspsrc_handle_data (GstRTSPSrc * src, GstRTSPMessage * message)
+ {
+ GstFlowReturn ret = GST_FLOW_OK;
+ gint channel;
+ GstRTSPStream *stream;
+ GstPad *outpad = NULL;
+ guint8 *data;
+ guint size;
+ GstBuffer *buf;
+ gboolean is_rtcp;
+
+ channel = message->type_data.data.channel;
+
+ stream = find_stream (src, &channel, (gpointer) find_stream_by_channel);
+ if (!stream)
+ goto unknown_stream;
+
+ if (channel == stream->channel[0]) {
+ outpad = stream->channelpad[0];
+ is_rtcp = FALSE;
+ } else if (channel == stream->channel[1]) {
+ outpad = stream->channelpad[1];
+ is_rtcp = TRUE;
+ } else {
+ is_rtcp = FALSE;
+ }
+
+ /* take a look at the body to figure out what we have */
+ gst_rtsp_message_get_body (message, &data, &size);
+ if (size < 2)
+ goto invalid_length;
+
+ /* channels are not correct on some servers, do extra check */
+ if (data[1] >= 200 && data[1] <= 204) {
+ /* hmm RTCP message switch to the RTCP pad of the same stream. */
+ outpad = stream->channelpad[1];
+ is_rtcp = TRUE;
+ }
+
+ /* we have no clue what this is, just ignore then. */
+ if (outpad == NULL)
+ goto unknown_stream;
+
+ /* take the message body for further processing */
+ gst_rtsp_message_steal_body (message, &data, &size);
+
+ /* strip the trailing \0 */
+ size -= 1;
+
+ buf = gst_buffer_new ();
+ gst_buffer_append_memory (buf,
+ gst_memory_new_wrapped (0, data, size, 0, size, data, g_free));
+
+ /* don't need message anymore */
+ gst_rtsp_message_unset (message);
+
+ GST_DEBUG_OBJECT (src, "pushing data of size %d on channel %d", size,
+ channel);
+
+ if (src->need_activate) {
+ gchar *stream_id;
+ GstEvent *event;
+ GChecksum *cs;
+ gchar *uri;
+ GList *streams;
+
+ /* generate an SHA256 sum of the URI */
+ cs = g_checksum_new (G_CHECKSUM_SHA256);
+ uri = src->conninfo.location;
+ g_checksum_update (cs, (const guchar *) uri, strlen (uri));
+
+ for (streams = src->streams; streams; streams = g_list_next (streams)) {
+ GstRTSPStream *ostream = (GstRTSPStream *) streams->data;
+ GstCaps *caps;
+
+ /* Activate in advance so that the stream-start event is registered */
+ if (stream->srcpad) {
+ gst_pad_set_active (stream->srcpad, TRUE);
+ }
+
+ stream_id =
+ g_strdup_printf ("%s/%d", g_checksum_get_string (cs), ostream->id);
+
+ event = gst_event_new_stream_start (stream_id);
+
+ gst_rtspsrc_stream_start_event_add_group_id (src, event);
+
+ g_free (stream_id);
+ gst_rtspsrc_stream_push_event (src, ostream, event);
+
+ if ((caps = stream_get_caps_for_pt (ostream, ostream->default_pt))) {
+ /* only streams that have a connection to the outside world */
+ if (ostream->setup) {
+ if (ostream->udpsrc[0]) {
+ gst_element_send_event (ostream->udpsrc[0],
+ gst_event_new_caps (caps));
+ } else if (ostream->channelpad[0]) {
+ if (GST_PAD_IS_SRC (ostream->channelpad[0]))
+ gst_pad_push_event (ostream->channelpad[0],
+ gst_event_new_caps (caps));
+ else
+ gst_pad_send_event (ostream->channelpad[0],
+ gst_event_new_caps (caps));
+ }
+ ostream->need_caps = FALSE;
+
+ if (ostream->profile == GST_RTSP_PROFILE_SAVP ||
+ ostream->profile == GST_RTSP_PROFILE_SAVPF)
+ caps = gst_caps_new_empty_simple ("application/x-srtcp");
+ else
+ caps = gst_caps_new_empty_simple ("application/x-rtcp");
+
+ if (ostream->udpsrc[1]) {
+ gst_element_send_event (ostream->udpsrc[1],
+ gst_event_new_caps (caps));
+ } else if (ostream->channelpad[1]) {
+ if (GST_PAD_IS_SRC (ostream->channelpad[1]))
+ gst_pad_push_event (ostream->channelpad[1],
+ gst_event_new_caps (caps));
+ else
+ gst_pad_send_event (ostream->channelpad[1],
+ gst_event_new_caps (caps));
+ }
+
+ gst_caps_unref (caps);
+ }
+ }
+ }
+ g_checksum_free (cs);
+
+ gst_rtspsrc_activate_streams (src);
+ src->need_activate = FALSE;
+ src->need_segment = TRUE;
+ }
+
+ if (src->base_time == -1) {
+ /* Take current running_time. This timestamp will be put on
+ * the first buffer of each stream because we are a live source and so we
+ * timestamp with the running_time. When we are dealing with TCP, we also
+ * only timestamp the first buffer (using the DISCONT flag) because a server
+ * typically bursts data, for which we don't want to compensate by speeding
+ * up the media. The other timestamps will be interpollated from this one
+ * using the RTP timestamps. */
+ GST_OBJECT_LOCK (src);
+ if (GST_ELEMENT_CLOCK (src)) {
+ GstClockTime now;
+ GstClockTime base_time;
+
+ now = gst_clock_get_time (GST_ELEMENT_CLOCK (src));
+ base_time = GST_ELEMENT_CAST (src)->base_time;
+
+ src->base_time = now - base_time;
+
+ GST_DEBUG_OBJECT (src, "first buffer at time %" GST_TIME_FORMAT ", base %"
+ GST_TIME_FORMAT, GST_TIME_ARGS (now), GST_TIME_ARGS (base_time));
+ }
+ GST_OBJECT_UNLOCK (src);
+ }
+
+ /* If needed send a new segment, don't forget we are live and buffer are
+ * timestamped with running time */
+ if (src->need_segment) {
+ src->need_segment = FALSE;
+ if (src->onvif_mode) {
+ gst_rtspsrc_push_event (src, gst_event_new_segment (&src->out_segment));
+ } else {
+ GstSegment segment;
+
+ gst_segment_init (&segment, GST_FORMAT_TIME);
+ gst_rtspsrc_push_event (src, gst_event_new_segment (&segment));
+ }
+ }
+
+ if (stream->need_caps) {
+ GstCaps *caps;
+
+ if ((caps = stream_get_caps_for_pt (stream, stream->default_pt))) {
+ /* only streams that have a connection to the outside world */
+ if (stream->setup) {
+ /* Only need to update the TCP caps here, UDP is already handled */
+ if (stream->channelpad[0]) {
+ if (GST_PAD_IS_SRC (stream->channelpad[0]))
+ gst_pad_push_event (stream->channelpad[0],
+ gst_event_new_caps (caps));
+ else
+ gst_pad_send_event (stream->channelpad[0],
+ gst_event_new_caps (caps));
+ }
+ stream->need_caps = FALSE;
+ }
+ }
+
+ stream->need_caps = FALSE;
+ }
+
+ if (stream->discont && !is_rtcp) {
+ /* mark first RTP buffer as discont */
+ GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_DISCONT);
+ stream->discont = FALSE;
+ /* first buffer gets the timestamp, other buffers are not timestamped and
+ * their presentation time will be interpollated from the rtp timestamps. */
+ GST_DEBUG_OBJECT (src, "setting timestamp %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (src->base_time));
+
+ GST_BUFFER_TIMESTAMP (buf) = src->base_time;
+ }
+
+ /* chain to the peer pad */
+ if (GST_PAD_IS_SINK (outpad))
+ ret = gst_pad_chain (outpad, buf);
+ else
+ ret = gst_pad_push (outpad, buf);
+
+ if (!is_rtcp) {
+ /* combine all stream flows for the data transport */
+ ret = gst_rtspsrc_combine_flows (src, stream, ret);
+ }
+ return ret;
+
+ /* ERRORS */
+ unknown_stream:
+ {
+ GST_DEBUG_OBJECT (src, "unknown stream on channel %d, ignored", channel);
+ gst_rtsp_message_unset (message);
+ return GST_FLOW_OK;
+ }
+ invalid_length:
+ {
+ GST_ELEMENT_WARNING (src, RESOURCE, READ, (NULL),
+ ("Short message received, ignoring."));
+ gst_rtsp_message_unset (message);
+ return GST_FLOW_OK;
+ }
+ }
+
+ static GstFlowReturn
+ gst_rtspsrc_loop_interleaved (GstRTSPSrc * src)
+ {
+ GstRTSPMessage message = { 0 };
+ GstRTSPResult res;
+ GstFlowReturn ret = GST_FLOW_OK;
+
+ while (TRUE) {
+ gst_rtsp_message_unset (&message);
+
+ if (src->conninfo.flushing) {
+ /* do not attempt to receive if flushing */
+ res = GST_RTSP_EINTR;
+ } else {
+ /* protect the connection with the connection lock so that we can see when
+ * we are finished doing server communication */
+ res = gst_rtspsrc_connection_receive (src, &src->conninfo, &message,
+ src->tcp_timeout);
+ }
+
+ switch (res) {
+ case GST_RTSP_OK:
+ GST_DEBUG_OBJECT (src, "we received a server message");
+ break;
+ case GST_RTSP_EINTR:
+ /* we got interrupted this means we need to stop */
+ goto interrupt;
+ case GST_RTSP_ETIMEOUT:
+ /* no reply, send keep alive */
+ GST_DEBUG_OBJECT (src, "timeout, sending keep-alive");
+ if ((res = gst_rtspsrc_send_keep_alive (src)) == GST_RTSP_EINTR)
+ goto interrupt;
+ continue;
+ case GST_RTSP_EEOF:
+ /* go EOS when the server closed the connection */
+ goto server_eof;
+ default:
+ goto receive_error;
+ }
+
+ switch (message.type) {
+ case GST_RTSP_MESSAGE_REQUEST:
+ /* server sends us a request message, handle it */
+ res = gst_rtspsrc_handle_request (src, &src->conninfo, &message);
+ if (res == GST_RTSP_EEOF)
+ goto server_eof;
+ else if (res < 0)
+ goto handle_request_failed;
+ break;
+ case GST_RTSP_MESSAGE_RESPONSE:
+ /* we ignore response messages */
+ GST_DEBUG_OBJECT (src, "ignoring response message");
+ DEBUG_RTSP (src, &message);
+ break;
+ case GST_RTSP_MESSAGE_DATA:
+ GST_DEBUG_OBJECT (src, "got data message");
+ ret = gst_rtspsrc_handle_data (src, &message);
+ if (ret != GST_FLOW_OK)
+ goto handle_data_failed;
+ break;
+ default:
+ GST_WARNING_OBJECT (src, "ignoring unknown message type %d",
+ message.type);
+ break;
+ }
+ }
+ g_assert_not_reached ();
+
+ /* ERRORS */
+ server_eof:
+ {
+ GST_DEBUG_OBJECT (src, "we got an eof from the server");
+ GST_ELEMENT_WARNING (src, RESOURCE, READ, (NULL),
+ ("The server closed the connection."));
+ src->conninfo.connected = FALSE;
+ gst_rtsp_message_unset (&message);
+ return GST_FLOW_EOS;
+ }
+ interrupt:
+ {
+ gst_rtsp_message_unset (&message);
+ GST_DEBUG_OBJECT (src, "got interrupted");
+ return GST_FLOW_FLUSHING;
+ }
+ receive_error:
+ {
+ gchar *str = gst_rtsp_strresult (res);
+
++#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
++ gst_rtspsrc_post_error_message (src, GST_RTSPSRC_ERROR_BAD_SERVER,
++ "Could not receive message.");
++#else
+ GST_ELEMENT_ERROR (src, RESOURCE, READ, (NULL),
+ ("Could not receive message. (%s)", str));
++#endif
+ g_free (str);
+
+ gst_rtsp_message_unset (&message);
+ return GST_FLOW_ERROR;
+ }
+ handle_request_failed:
+ {
+ gchar *str = gst_rtsp_strresult (res);
+
++#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
++ gst_rtspsrc_post_error_message (src, GST_RTSPSRC_ERROR_SERVICE_UNAVAILABLE,
++ "Could not handle server message.");
++#else
+ GST_ELEMENT_ERROR (src, RESOURCE, WRITE, (NULL),
+ ("Could not handle server message. (%s)", str));
++#endif
+ g_free (str);
+ gst_rtsp_message_unset (&message);
+ return GST_FLOW_ERROR;
+ }
+ handle_data_failed:
+ {
+ GST_DEBUG_OBJECT (src, "could no handle data message");
+ return ret;
+ }
+ }
+
+ static GstFlowReturn
+ gst_rtspsrc_loop_udp (GstRTSPSrc * src)
+ {
+ GstRTSPResult res;
+ GstRTSPMessage message = { 0 };
+ gint retry = 0;
+
+ while (TRUE) {
+ gint64 timeout;
+
+ /* get the next timeout interval */
+ timeout = gst_rtsp_connection_next_timeout_usec (src->conninfo.connection);
+
+ GST_DEBUG_OBJECT (src, "doing receive with timeout %d seconds",
+ (gint) timeout / G_USEC_PER_SEC);
+
+ gst_rtsp_message_unset (&message);
+
+ /* we should continue reading the TCP socket because the server might
+ * send us requests. When the session timeout expires, we need to send a
+ * keep-alive request to keep the session open. */
+ if (src->conninfo.flushing) {
+ /* do not attempt to receive if flushing */
+ res = GST_RTSP_EINTR;
+ } else {
+ res = gst_rtspsrc_connection_receive (src, &src->conninfo, &message,
+ timeout);
+ }
+
+ switch (res) {
+ case GST_RTSP_OK:
+ GST_DEBUG_OBJECT (src, "we received a server message");
+ break;
+ case GST_RTSP_EINTR:
+ /* we got interrupted, see what we have to do */
+ goto interrupt;
+ case GST_RTSP_ETIMEOUT:
+ /* send keep-alive, ignore the result, a warning will be posted. */
+ GST_DEBUG_OBJECT (src, "timeout, sending keep-alive");
+ if ((res = gst_rtspsrc_send_keep_alive (src)) == GST_RTSP_EINTR)
+ goto interrupt;
+ continue;
+ case GST_RTSP_EEOF:
+ /* server closed the connection. not very fatal for UDP, reconnect and
+ * see what happens. */
+ GST_ELEMENT_WARNING (src, RESOURCE, READ, (NULL),
+ ("The server closed the connection."));
+ if (src->udp_reconnect) {
+ if ((res =
+ gst_rtsp_conninfo_reconnect (src, &src->conninfo, FALSE)) < 0)
+ goto connect_error;
+ } else {
+ goto server_eof;
+ }
+ continue;
+ case GST_RTSP_ENET:
+ GST_DEBUG_OBJECT (src, "An ethernet problem occurred.");
+ default:
+ GST_ELEMENT_WARNING (src, RESOURCE, READ, (NULL),
+ ("Unhandled return value %d.", res));
+ goto receive_error;
+ }
+
+ switch (message.type) {
+ case GST_RTSP_MESSAGE_REQUEST:
+ /* server sends us a request message, handle it */
+ res = gst_rtspsrc_handle_request (src, &src->conninfo, &message);
+ if (res == GST_RTSP_EEOF)
+ goto server_eof;
+ else if (res < 0)
+ goto handle_request_failed;
+ break;
+ case GST_RTSP_MESSAGE_RESPONSE:
+ /* we ignore response and data messages */
+ GST_DEBUG_OBJECT (src, "ignoring response message");
+ DEBUG_RTSP (src, &message);
+ if (message.type_data.response.code == GST_RTSP_STS_UNAUTHORIZED) {
+ GST_DEBUG_OBJECT (src, "but is Unauthorized response ...");
+ if (gst_rtspsrc_setup_auth (src, &message) && !(retry++)) {
+ GST_DEBUG_OBJECT (src, "so retrying keep-alive");
+ if ((res = gst_rtspsrc_send_keep_alive (src)) == GST_RTSP_EINTR)
+ goto interrupt;
+ }
+ } else {
+ retry = 0;
+ }
+ break;
+ case GST_RTSP_MESSAGE_DATA:
+ /* we ignore response and data messages */
+ GST_DEBUG_OBJECT (src, "ignoring data message");
+ break;
+ default:
+ GST_WARNING_OBJECT (src, "ignoring unknown message type %d",
+ message.type);
+ break;
+ }
+ }
+ g_assert_not_reached ();
+
+ /* we get here when the connection got interrupted */
+ interrupt:
+ {
+ gst_rtsp_message_unset (&message);
+ GST_DEBUG_OBJECT (src, "got interrupted");
+ return GST_FLOW_FLUSHING;
+ }
+ connect_error:
+ {
+ gchar *str = gst_rtsp_strresult (res);
+ GstFlowReturn ret;
+
+ src->conninfo.connected = FALSE;
+ if (res != GST_RTSP_EINTR) {
++#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
++ gst_rtspsrc_post_error_message (src, GST_RTSPSRC_ERROR_CONNECTION_FAIL,
++ "Could not connect to server.");
++#else
+ GST_ELEMENT_ERROR (src, RESOURCE, OPEN_READ_WRITE, (NULL),
+ ("Could not connect to server. (%s)", str));
++#endif
+ g_free (str);
+ ret = GST_FLOW_ERROR;
+ } else {
+ ret = GST_FLOW_FLUSHING;
+ }
+ return ret;
+ }
+ receive_error:
+ {
+ gchar *str = gst_rtsp_strresult (res);
+
++#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
++ gst_rtspsrc_post_error_message (src, GST_RTSPSRC_ERROR_SERVER_DISCONNECTED,
++ "Could not receive message.");
++#else
+ GST_ELEMENT_ERROR (src, RESOURCE, READ, (NULL),
+ ("Could not receive message. (%s)", str));
++#endif
+ g_free (str);
+ return GST_FLOW_ERROR;
+ }
+ handle_request_failed:
+ {
+ gchar *str = gst_rtsp_strresult (res);
+ GstFlowReturn ret;
+
+ gst_rtsp_message_unset (&message);
+ if (res != GST_RTSP_EINTR) {
++#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
++ gst_rtspsrc_post_error_message (src,
++ GST_RTSPSRC_ERROR_SERVICE_UNAVAILABLE,
++ "Could not handle server message.");
++#else
+ GST_ELEMENT_ERROR (src, RESOURCE, WRITE, (NULL),
+ ("Could not handle server message. (%s)", str));
++#endif
+ g_free (str);
+ ret = GST_FLOW_ERROR;
+ } else {
+ ret = GST_FLOW_FLUSHING;
+ }
+ return ret;
+ }
+ server_eof:
+ {
+ GST_DEBUG_OBJECT (src, "we got an eof from the server");
+ GST_ELEMENT_WARNING (src, RESOURCE, READ, (NULL),
+ ("The server closed the connection."));
+ src->conninfo.connected = FALSE;
+ gst_rtsp_message_unset (&message);
+ return GST_FLOW_EOS;
+ }
+ }
+
+ static GstRTSPResult
+ gst_rtspsrc_reconnect (GstRTSPSrc * src, gboolean async)
+ {
+ GstRTSPResult res = GST_RTSP_OK;
+ gboolean restart;
+
+ GST_DEBUG_OBJECT (src, "doing reconnect");
+
+ GST_OBJECT_LOCK (src);
+ /* only restart when the pads were not yet activated, else we were
+ * streaming over UDP */
+ restart = src->need_activate;
+ GST_OBJECT_UNLOCK (src);
+
+ /* no need to restart, we're done */
+ if (!restart)
+ goto done;
+
+ /* we can try only TCP now */
+ src->cur_protocols = GST_RTSP_LOWER_TRANS_TCP;
+
+ /* close and cleanup our state */
+ if ((res = gst_rtspsrc_close (src, async, FALSE)) < 0)
+ goto done;
+
+ /* see if we have TCP left to try. Also don't try TCP when we were configured
+ * with an SDP. */
+ if (!(src->protocols & GST_RTSP_LOWER_TRANS_TCP) || src->from_sdp)
+ goto no_protocols;
+
+ /* We post a warning message now to inform the user
+ * that nothing happened. It's most likely a firewall thing. */
+ GST_ELEMENT_WARNING (src, RESOURCE, READ, (NULL),
+ ("Could not receive any UDP packets for %.4f seconds, maybe your "
+ "firewall is blocking it. Retrying using a tcp connection.",
+ gst_guint64_to_gdouble (src->udp_timeout) / 1000000.0));
+
+ /* open new connection using tcp */
+ if (gst_rtspsrc_open (src, async) < 0)
+ goto open_failed;
+
+ /* start playback */
+ if (gst_rtspsrc_play (src, &src->segment, async, NULL) < 0)
+ goto play_failed;
+
+ done:
+ return res;
+
+ /* ERRORS */
+ no_protocols:
+ {
+ src->cur_protocols = 0;
+ /* no transport possible, post an error and stop */
++#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
++ gst_rtspsrc_post_error_message (src, GST_RTSPSRC_ERROR_BAD_TRANSPORT,
++ "Could not receive any UDP packets for seconds, maybe your firewall is blocking it. No other protocols to try.");
++#else
+ GST_ELEMENT_ERROR (src, RESOURCE, READ, (NULL),
+ ("Could not receive any UDP packets for %.4f seconds, maybe your "
+ "firewall is blocking it. No other protocols to try.",
+ gst_guint64_to_gdouble (src->udp_timeout) / 1000000.0));
++#endif
+ return GST_RTSP_ERROR;
+ }
+ open_failed:
+ {
+ GST_DEBUG_OBJECT (src, "open failed");
+ return GST_RTSP_OK;
+ }
+ play_failed:
+ {
+ GST_DEBUG_OBJECT (src, "play failed");
+ return GST_RTSP_OK;
+ }
+ }
+
+ static void
+ gst_rtspsrc_loop_start_cmd (GstRTSPSrc * src, gint cmd)
+ {
+ switch (cmd) {
+ case CMD_OPEN:
+ GST_ELEMENT_PROGRESS (src, START, "open", ("Opening Stream"));
+ break;
+ case CMD_PLAY:
+ GST_ELEMENT_PROGRESS (src, START, "request", ("Sending PLAY request"));
+ break;
+ case CMD_PAUSE:
+ GST_ELEMENT_PROGRESS (src, START, "request", ("Sending PAUSE request"));
+ break;
+ case CMD_GET_PARAMETER:
+ GST_ELEMENT_PROGRESS (src, START, "request",
+ ("Sending GET_PARAMETER request"));
+ break;
+ case CMD_SET_PARAMETER:
+ GST_ELEMENT_PROGRESS (src, START, "request",
+ ("Sending SET_PARAMETER request"));
+ break;
+ case CMD_CLOSE:
+ GST_ELEMENT_PROGRESS (src, START, "close", ("Closing Stream"));
+ break;
+ default:
+ break;
+ }
+ }
+
+ static void
+ gst_rtspsrc_loop_complete_cmd (GstRTSPSrc * src, gint cmd)
+ {
++#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
++ GstMessage *s;
++ GST_WARNING_OBJECT (src, "Got cmd %s", cmd_to_string (cmd));
++#endif
++
+ switch (cmd) {
+ case CMD_OPEN:
++#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
++ GST_DEBUG_OBJECT (src,
++ "rtsp_duration %" GST_TIME_FORMAT
++ ", rtsp_audio_codec %s , rtsp_video_codec %s , rtsp_video_frame_size %s",
++ GST_TIME_ARGS (src->segment.duration), src->audio_codec,
++ src->video_codec, src->video_frame_size);
++
++ /* post message */
++ s = gst_message_new_element (GST_OBJECT_CAST (src),
++ gst_structure_new ("rtspsrc_properties",
++ "rtsp_duration", G_TYPE_UINT64, src->segment.duration,
++ "rtsp_audio_codec", G_TYPE_STRING, src->audio_codec,
++ "rtsp_video_codec", G_TYPE_STRING, src->video_codec,
++ "rtsp_video_frame_size", G_TYPE_STRING, src->video_frame_size,
++ NULL));
++
++ gst_element_post_message (GST_ELEMENT_CAST (src), s);
++#endif
+ GST_ELEMENT_PROGRESS (src, COMPLETE, "open", ("Opened Stream"));
++#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
++ /* rtspsrc PAUSE state should be here for parsing sdp before PAUSE state changed. */
++ g_mutex_lock (&(src)->pause_lock);
++ g_cond_signal (&(src)->open_end);
++ g_mutex_unlock (&(src)->pause_lock);
++#endif
+ break;
+ case CMD_PLAY:
+ GST_ELEMENT_PROGRESS (src, COMPLETE, "request", ("Sent PLAY request"));
+ break;
+ case CMD_PAUSE:
+ GST_ELEMENT_PROGRESS (src, COMPLETE, "request", ("Sent PAUSE request"));
+ break;
+ case CMD_GET_PARAMETER:
+ GST_ELEMENT_PROGRESS (src, COMPLETE, "request",
+ ("Sent GET_PARAMETER request"));
+ break;
+ case CMD_SET_PARAMETER:
+ GST_ELEMENT_PROGRESS (src, COMPLETE, "request",
+ ("Sent SET_PARAMETER request"));
+ break;
+ case CMD_CLOSE:
+ GST_ELEMENT_PROGRESS (src, COMPLETE, "close", ("Closed Stream"));
+ break;
+ default:
+ break;
+ }
+ }
+
+ static void
+ gst_rtspsrc_loop_cancel_cmd (GstRTSPSrc * src, gint cmd)
+ {
+ switch (cmd) {
+ case CMD_OPEN:
+ GST_ELEMENT_PROGRESS (src, CANCELED, "open", ("Open canceled"));
+ break;
+ case CMD_PLAY:
+ GST_ELEMENT_PROGRESS (src, CANCELED, "request", ("PLAY canceled"));
+ break;
+ case CMD_PAUSE:
+ GST_ELEMENT_PROGRESS (src, CANCELED, "request", ("PAUSE canceled"));
+ break;
+ case CMD_GET_PARAMETER:
+ GST_ELEMENT_PROGRESS (src, CANCELED, "request",
+ ("GET_PARAMETER canceled"));
+ break;
+ case CMD_SET_PARAMETER:
+ GST_ELEMENT_PROGRESS (src, CANCELED, "request",
+ ("SET_PARAMETER canceled"));
+ break;
+ case CMD_CLOSE:
+ GST_ELEMENT_PROGRESS (src, CANCELED, "close", ("Close canceled"));
+ break;
+ default:
+ break;
+ }
+ }
+
+ static void
+ gst_rtspsrc_loop_error_cmd (GstRTSPSrc * src, gint cmd)
+ {
+ switch (cmd) {
+ case CMD_OPEN:
+ GST_ELEMENT_PROGRESS (src, ERROR, "open", ("Open failed"));
++#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
++ /* Ending conditional wait for pause when open fails.*/
++ g_mutex_lock (&(src)->pause_lock);
++ g_cond_signal (&(src)->open_end);
++ g_mutex_unlock (&(src)->pause_lock);
++ GST_WARNING_OBJECT (src,
++ "ending conditional wait for pause as open is failed.");
++#endif
+ break;
+ case CMD_PLAY:
+ GST_ELEMENT_PROGRESS (src, ERROR, "request", ("PLAY failed"));
+ break;
+ case CMD_PAUSE:
+ GST_ELEMENT_PROGRESS (src, ERROR, "request", ("PAUSE failed"));
+ break;
+ case CMD_GET_PARAMETER:
+ GST_ELEMENT_PROGRESS (src, ERROR, "request", ("GET_PARAMETER failed"));
+ break;
+ case CMD_SET_PARAMETER:
+ GST_ELEMENT_PROGRESS (src, ERROR, "request", ("SET_PARAMETER failed"));
+ break;
+ case CMD_CLOSE:
+ GST_ELEMENT_PROGRESS (src, ERROR, "close", ("Close failed"));
+ break;
+ default:
+ break;
+ }
+ }
+
+ static void
+ gst_rtspsrc_loop_end_cmd (GstRTSPSrc * src, gint cmd, GstRTSPResult ret)
+ {
+ if (ret == GST_RTSP_OK)
+ gst_rtspsrc_loop_complete_cmd (src, cmd);
+ else if (ret == GST_RTSP_EINTR)
+ gst_rtspsrc_loop_cancel_cmd (src, cmd);
+ else
+ gst_rtspsrc_loop_error_cmd (src, cmd);
+ }
+
+ static gboolean
+ gst_rtspsrc_loop_send_cmd (GstRTSPSrc * src, gint cmd, gint mask)
+ {
+ gint old;
+ gboolean flushed = FALSE;
+
+ /* start new request */
+ gst_rtspsrc_loop_start_cmd (src, cmd);
+
+ GST_DEBUG_OBJECT (src, "sending cmd %s", cmd_to_string (cmd));
+
+ GST_OBJECT_LOCK (src);
+ old = src->pending_cmd;
+
+ if (old == CMD_RECONNECT) {
+ GST_DEBUG_OBJECT (src, "ignore, we were reconnecting");
+ cmd = CMD_RECONNECT;
+ } else if (old == CMD_CLOSE) {
+ /* our CMD_CLOSE might have interrutped CMD_LOOP. gst_rtspsrc_loop
+ * will send a CMD_WAIT which would cancel our pending CMD_CLOSE (if
+ * still pending). We just avoid it here by making sure CMD_CLOSE is
+ * still the pending command. */
+ GST_DEBUG_OBJECT (src, "ignore, we were closing");
+ cmd = CMD_CLOSE;
+ } else if (old == CMD_SET_PARAMETER) {
+ GST_DEBUG_OBJECT (src, "ignore, we have a pending %s", cmd_to_string (old));
+ cmd = CMD_SET_PARAMETER;
+ } else if (old == CMD_GET_PARAMETER) {
+ GST_DEBUG_OBJECT (src, "ignore, we have a pending %s", cmd_to_string (old));
+ cmd = CMD_GET_PARAMETER;
+ } else if (old != CMD_WAIT) {
+ src->pending_cmd = CMD_WAIT;
+ GST_OBJECT_UNLOCK (src);
+ /* cancel previous request */
+ GST_DEBUG_OBJECT (src, "cancel previous request %s", cmd_to_string (old));
+ gst_rtspsrc_loop_cancel_cmd (src, old);
+ GST_OBJECT_LOCK (src);
+ }
+ src->pending_cmd = cmd;
+ /* interrupt if allowed */
+ if (src->busy_cmd & mask) {
+ GST_DEBUG_OBJECT (src, "connection flush busy %s",
+ cmd_to_string (src->busy_cmd));
+ gst_rtspsrc_connection_flush (src, TRUE);
+ flushed = TRUE;
+ } else {
+ GST_DEBUG_OBJECT (src, "not interrupting busy cmd %s",
+ cmd_to_string (src->busy_cmd));
+ }
+ if (src->task)
+ gst_task_start (src->task);
+ GST_OBJECT_UNLOCK (src);
+
+ return flushed;
+ }
+
+ static gboolean
+ gst_rtspsrc_loop_send_cmd_and_wait (GstRTSPSrc * src, gint cmd, gint mask,
+ GstClockTime timeout)
+ {
+ gboolean flushed = gst_rtspsrc_loop_send_cmd (src, cmd, mask);
+
+ if (timeout > 0) {
+ gint64 end_time = g_get_monotonic_time () + (timeout / 1000);
+ GST_OBJECT_LOCK (src);
+ while (src->pending_cmd == cmd || src->busy_cmd == cmd) {
+ if (!g_cond_wait_until (&src->cmd_cond, GST_OBJECT_GET_LOCK (src),
+ end_time)) {
+ GST_WARNING_OBJECT (src,
+ "Timed out waiting for TEARDOWN to be processed.");
+ break; /* timeout passed */
+ }
+ }
+ GST_OBJECT_UNLOCK (src);
+ }
+ return flushed;
+ }
+
+ static gboolean
+ gst_rtspsrc_loop (GstRTSPSrc * src)
+ {
+ GstFlowReturn ret;
+
+ if (!src->conninfo.connection || !src->conninfo.connected)
+ goto no_connection;
+
+ if (src->interleaved)
+ ret = gst_rtspsrc_loop_interleaved (src);
+ else
+ ret = gst_rtspsrc_loop_udp (src);
+
+ if (ret != GST_FLOW_OK)
+ goto pause;
+
+ return TRUE;
+
+ /* ERRORS */
+ no_connection:
+ {
+ GST_WARNING_OBJECT (src, "we are not connected");
+ ret = GST_FLOW_FLUSHING;
+ goto pause;
+ }
+ pause:
+ {
+ const gchar *reason = gst_flow_get_name (ret);
+
+ GST_DEBUG_OBJECT (src, "pausing task, reason %s", reason);
+ src->running = FALSE;
+ if (ret == GST_FLOW_EOS) {
+ /* perform EOS logic */
+ if (src->segment.flags & GST_SEEK_FLAG_SEGMENT) {
+ gst_element_post_message (GST_ELEMENT_CAST (src),
+ gst_message_new_segment_done (GST_OBJECT_CAST (src),
+ src->segment.format, src->segment.position));
+ gst_rtspsrc_push_event (src,
+ gst_event_new_segment_done (src->segment.format,
+ src->segment.position));
+ } else {
+ gst_rtspsrc_push_event (src, gst_event_new_eos ());
+ }
+ } else if (ret == GST_FLOW_NOT_LINKED || ret < GST_FLOW_EOS) {
+ /* for fatal errors we post an error message, post the error before the
+ * EOS so the app knows about the error first. */
+ GST_ELEMENT_FLOW_ERROR (src, ret);
+ gst_rtspsrc_push_event (src, gst_event_new_eos ());
+ }
+ gst_rtspsrc_loop_send_cmd (src, CMD_WAIT, CMD_LOOP);
+ return FALSE;
+ }
+ }
+
+ #ifndef GST_DISABLE_GST_DEBUG
+ static const gchar *
+ gst_rtsp_auth_method_to_string (GstRTSPAuthMethod method)
+ {
+ gint index = 0;
+
+ while (method != 0) {
+ index++;
+ method >>= 1;
+ }
+ switch (index) {
+ case 0:
+ return "None";
+ case 1:
+ return "Basic";
+ case 2:
+ return "Digest";
+ }
+
+ return "Unknown";
+ }
+ #endif
+
+ /* Parse a WWW-Authenticate Response header and determine the
+ * available authentication methods
+ *
+ * This code should also cope with the fact that each WWW-Authenticate
+ * header can contain multiple challenge methods + tokens
+ *
+ * At the moment, for Basic auth, we just do a minimal check and don't
+ * even parse out the realm */
+ static void
+ gst_rtspsrc_parse_auth_hdr (GstRTSPMessage * response,
+ GstRTSPAuthMethod * methods, GstRTSPConnection * conn, gboolean * stale)
+ {
+ GstRTSPAuthCredential **credentials, **credential;
+
+ g_return_if_fail (response != NULL);
+ g_return_if_fail (methods != NULL);
+ g_return_if_fail (stale != NULL);
+
+ credentials =
+ gst_rtsp_message_parse_auth_credentials (response,
+ GST_RTSP_HDR_WWW_AUTHENTICATE);
+ if (!credentials)
+ return;
+
+ credential = credentials;
+ while (*credential) {
+ if ((*credential)->scheme == GST_RTSP_AUTH_BASIC) {
+ *methods |= GST_RTSP_AUTH_BASIC;
+ } else if ((*credential)->scheme == GST_RTSP_AUTH_DIGEST) {
+ GstRTSPAuthParam **param = (*credential)->params;
+
+ *methods |= GST_RTSP_AUTH_DIGEST;
+
+ gst_rtsp_connection_clear_auth_params (conn);
+ *stale = FALSE;
+
+ while (*param) {
+ if (strcmp ((*param)->name, "stale") == 0
+ && g_ascii_strcasecmp ((*param)->value, "TRUE") == 0)
+ *stale = TRUE;
+ gst_rtsp_connection_set_auth_param (conn, (*param)->name,
+ (*param)->value);
+ param++;
+ }
+ }
+
+ credential++;
+ }
+
+ gst_rtsp_auth_credentials_free (credentials);
+ }
+
+ /**
+ * gst_rtspsrc_setup_auth:
+ * @src: the rtsp source
+ *
+ * Configure a username and password and auth method on the
+ * connection object based on a response we received from the
+ * peer.
+ *
+ * Currently, this requires that a username and password were supplied
+ * in the uri. In the future, they may be requested on demand by sending
+ * a message up the bus.
+ *
+ * Returns: TRUE if authentication information could be set up correctly.
+ */
+ static gboolean
+ gst_rtspsrc_setup_auth (GstRTSPSrc * src, GstRTSPMessage * response)
+ {
+ gchar *user = NULL;
+ gchar *pass = NULL;
+ GstRTSPAuthMethod avail_methods = GST_RTSP_AUTH_NONE;
+ GstRTSPAuthMethod method;
+ GstRTSPResult auth_result;
+ GstRTSPUrl *url;
+ GstRTSPConnection *conn;
+ gboolean stale = FALSE;
+
+ conn = src->conninfo.connection;
+
+ /* Identify the available auth methods and see if any are supported */
+ gst_rtspsrc_parse_auth_hdr (response, &avail_methods, conn, &stale);
+
+ if (avail_methods == GST_RTSP_AUTH_NONE)
+ goto no_auth_available;
+
+ /* For digest auth, if the response indicates that the session
+ * data are stale, we just update them in the connection object and
+ * return TRUE to retry the request */
+ if (stale)
+ src->tried_url_auth = FALSE;
+
+ url = gst_rtsp_connection_get_url (conn);
+
+ /* Do we have username and password available? */
+ if (url != NULL && !src->tried_url_auth && url->user != NULL
+ && url->passwd != NULL) {
+ user = url->user;
+ pass = url->passwd;
+ src->tried_url_auth = TRUE;
+ GST_DEBUG_OBJECT (src,
+ "Attempting authentication using credentials from the URL");
+ } else {
+ user = src->user_id;
+ pass = src->user_pw;
+ GST_DEBUG_OBJECT (src,
+ "Attempting authentication using credentials from the properties");
+ }
+
+ /* FIXME: If the url didn't contain username and password or we tried them
+ * already, request a username and passwd from the application via some kind
+ * of credentials request message */
+
+ /* If we don't have a username and passwd at this point, bail out. */
+ if (user == NULL || pass == NULL)
+ goto no_user_pass;
+
+ /* Try to configure for each available authentication method, strongest to
+ * weakest */
+ for (method = GST_RTSP_AUTH_MAX; method != GST_RTSP_AUTH_NONE; method >>= 1) {
+ /* Check if this method is available on the server */
+ if ((method & avail_methods) == 0)
+ continue;
+
+ /* Pass the credentials to the connection to try on the next request */
+ auth_result = gst_rtsp_connection_set_auth (conn, method, user, pass);
+ /* INVAL indicates an invalid username/passwd were supplied, so we'll just
+ * ignore it and end up retrying later */
+ if (auth_result == GST_RTSP_OK || auth_result == GST_RTSP_EINVAL) {
+ GST_DEBUG_OBJECT (src, "Attempting %s authentication",
+ gst_rtsp_auth_method_to_string (method));
+ break;
+ }
+ }
+
+ if (method == GST_RTSP_AUTH_NONE)
+ goto no_auth_available;
+
+ return TRUE;
+
+ no_auth_available:
+ {
+ /* Output an error indicating that we couldn't connect because there were
+ * no supported authentication protocols */
++#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
++ gst_rtspsrc_post_error_message (src, GST_RTSPSRC_ERROR_NOT_AUTHORIZED,
++ "No supported authentication protocol was found");
++#else
+ GST_ELEMENT_ERROR (src, RESOURCE, OPEN_READ, (NULL),
+ ("No supported authentication protocol was found"));
++#endif
+ return FALSE;
+ }
+ no_user_pass:
+ {
+ /* We don't fire an error message, we just return FALSE and let the
+ * normal NOT_AUTHORIZED error be propagated */
+ return FALSE;
+ }
+ }
+
+ static GstRTSPResult
+ gst_rtsp_src_receive_response (GstRTSPSrc * src, GstRTSPConnInfo * conninfo,
+ GstRTSPMessage * response, GstRTSPStatusCode * code)
+ {
+ GstRTSPStatusCode thecode;
+ gchar *content_base = NULL;
+ GstRTSPResult res;
+
+ next:
+ if (conninfo->flushing) {
+ /* do not attempt to receive if flushing */
+ res = GST_RTSP_EINTR;
+ } else {
+ res = gst_rtspsrc_connection_receive (src, conninfo, response,
+ src->tcp_timeout);
+ }
+
+ if (res < 0)
+ goto receive_error;
+
+ DEBUG_RTSP (src, response);
+
+ switch (response->type) {
+ case GST_RTSP_MESSAGE_REQUEST:
+ res = gst_rtspsrc_handle_request (src, conninfo, response);
+ if (res == GST_RTSP_EEOF)
+ goto server_eof;
+ else if (res < 0)
+ goto handle_request_failed;
+
+ /* Not a response, receive next message */
+ goto next;
+ case GST_RTSP_MESSAGE_RESPONSE:
+ /* ok, a response is good */
+ GST_DEBUG_OBJECT (src, "received response message");
+ break;
+ case GST_RTSP_MESSAGE_DATA:
+ /* get next response */
+ GST_DEBUG_OBJECT (src, "handle data response message");
+ gst_rtspsrc_handle_data (src, response);
+
+ /* Not a response, receive next message */
+ goto next;
+ default:
+ GST_WARNING_OBJECT (src, "ignoring unknown message type %d",
+ response->type);
+
+ /* Not a response, receive next message */
+ goto next;
+ }
+
+ thecode = response->type_data.response.code;
+
+ GST_DEBUG_OBJECT (src, "got response message %d", thecode);
+
+ /* if the caller wanted the result code, we store it. */
+ if (code)
+ *code = thecode;
+
+ /* If the request didn't succeed, bail out before doing any more */
+ if (thecode != GST_RTSP_STS_OK)
+ return GST_RTSP_OK;
+
+ /* store new content base if any */
+ gst_rtsp_message_get_header (response, GST_RTSP_HDR_CONTENT_BASE,
+ &content_base, 0);
+ if (content_base) {
+ g_free (src->content_base);
+ src->content_base = g_strdup (content_base);
+ }
+
+ return GST_RTSP_OK;
+
+ /* ERRORS */
+ receive_error:
+ {
+ switch (res) {
+ case GST_RTSP_EEOF:
+ return GST_RTSP_EEOF;
+ default:
+ {
+ gchar *str = gst_rtsp_strresult (res);
+
+ if (res != GST_RTSP_EINTR) {
++#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
++ gst_rtspsrc_post_error_message (src,
++ GST_RTSPSRC_ERROR_SERVER_DISCONNECTED,
++ "Could not receive message.");
++#else
+ GST_ELEMENT_ERROR (src, RESOURCE, READ, (NULL),
+ ("Could not receive message. (%s)", str));
++#endif
+ } else {
+ GST_WARNING_OBJECT (src, "receive interrupted");
+ }
+ g_free (str);
+ break;
+ }
+ }
+ return res;
+ }
+ handle_request_failed:
+ {
+ /* ERROR was posted */
+ gst_rtsp_message_unset (response);
+ return res;
+ }
+ server_eof:
+ {
+ GST_DEBUG_OBJECT (src, "we got an eof from the server");
+ GST_ELEMENT_WARNING (src, RESOURCE, READ, (NULL),
+ ("The server closed the connection."));
+ gst_rtsp_message_unset (response);
+ return res;
+ }
+ }
+
+
+ static GstRTSPResult
+ gst_rtspsrc_try_send (GstRTSPSrc * src, GstRTSPConnInfo * conninfo,
+ GstRTSPMessage * request, GstRTSPMessage * response,
+ GstRTSPStatusCode * code)
+ {
+ GstRTSPResult res;
+ gint try = 0;
+ gboolean allow_send = TRUE;
+
+ again:
+ if (!src->short_header)
+ gst_rtsp_ext_list_before_send (src->extensions, request);
+
+ g_signal_emit (src, gst_rtspsrc_signals[SIGNAL_BEFORE_SEND], 0,
+ request, &allow_send);
+ if (!allow_send) {
+ GST_DEBUG_OBJECT (src, "skipping message, disabled by signal");
+ return GST_RTSP_OK;
+ }
+
+ GST_DEBUG_OBJECT (src, "sending message");
+
+ DEBUG_RTSP (src, request);
+
+ res = gst_rtspsrc_connection_send (src, conninfo, request, src->tcp_timeout);
+ if (res < 0)
+ goto send_error;
+
+ gst_rtsp_connection_reset_timeout (conninfo->connection);
+ if (!response)
+ return res;
+
+ res = gst_rtsp_src_receive_response (src, conninfo, response, code);
+ if (res == GST_RTSP_EEOF) {
+ GST_WARNING_OBJECT (src, "server closed connection");
+ /* only try once after reconnect, then fallthrough and error out */
+ if ((try == 0) && !src->interleaved && src->udp_reconnect) {
+ try++;
+ /* if reconnect succeeds, try again */
+ if ((res = gst_rtsp_conninfo_reconnect (src, &src->conninfo, FALSE)) == 0)
+ goto again;
+ }
+ }
+
+ if (res < 0)
+ goto receive_error;
+
+ gst_rtsp_ext_list_after_send (src->extensions, request, response);
+
+ return res;
+
+ send_error:
+ {
+ gchar *str = gst_rtsp_strresult (res);
+
+ if (res != GST_RTSP_EINTR) {
++#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
++ gst_rtspsrc_post_error_message (src, GST_RTSPSRC_ERROR_CONNECTION_FAIL,
++ "Could not send message.");
++#else
+ GST_ELEMENT_ERROR (src, RESOURCE, WRITE, (NULL),
+ ("Could not send message. (%s)", str));
++#endif
+ } else {
+ GST_WARNING_OBJECT (src, "send interrupted");
+ }
+ g_free (str);
+ return res;
+ }
+
+ receive_error:
+ {
+ gchar *str = gst_rtsp_strresult (res);
+
+ if (res != GST_RTSP_EINTR) {
+ GST_ELEMENT_ERROR (src, RESOURCE, READ, (NULL),
+ ("Could not receive message. (%s)", str));
+ } else {
+ GST_WARNING_OBJECT (src, "receive interrupted");
+ }
+ g_free (str);
+ return res;
+ }
+ }
+
+ /**
+ * gst_rtspsrc_send:
+ * @src: the rtsp source
+ * @conninfo: the connection information to send on
+ * @request: must point to a valid request
+ * @response: must point to an empty #GstRTSPMessage
+ * @code: an optional code result
+ * @versions: List of versions to try, setting it back onto the @request message
+ * if not set, `src->version` will be used as RTSP version.
+ *
+ * send @request and retrieve the response in @response. optionally @code can be
+ * non-NULL in which case it will contain the status code of the response.
+ *
+ * If This function returns #GST_RTSP_OK, @response will contain a valid response
+ * message that should be cleaned with gst_rtsp_message_unset() after usage.
+ *
+ * If @code is NULL, this function will return #GST_RTSP_ERROR (with an invalid
+ * @response message) if the response code was not 200 (OK).
+ *
+ * If the attempt results in an authentication failure, then this will attempt
+ * to retrieve authentication credentials via gst_rtspsrc_setup_auth and retry
+ * the request.
+ *
+ * Returns: #GST_RTSP_OK if the processing was successful.
+ */
+ static GstRTSPResult
+ gst_rtspsrc_send (GstRTSPSrc * src, GstRTSPConnInfo * conninfo,
+ GstRTSPMessage * request, GstRTSPMessage * response,
+ GstRTSPStatusCode * code, GstRTSPVersion * versions)
+ {
+ GstRTSPStatusCode int_code = GST_RTSP_STS_OK;
+ GstRTSPResult res = GST_RTSP_ERROR;
+ gint count;
+ gboolean retry;
+ GstRTSPMethod method = GST_RTSP_INVALID;
+ gint version_retry = 0;
+
+ count = 0;
+ do {
+ retry = FALSE;
+
+ /* make sure we don't loop forever */
+ if (count++ > 8)
+ break;
+
+ /* save method so we can disable it when the server complains */
+ method = request->type_data.request.method;
+
+ if (!versions)
+ request->type_data.request.version = src->version;
+
+ if ((res =
+ gst_rtspsrc_try_send (src, conninfo, request, response,
+ &int_code)) < 0)
+ goto error;
+
+ switch (int_code) {
+ case GST_RTSP_STS_UNAUTHORIZED:
+ case GST_RTSP_STS_NOT_FOUND:
+ if (gst_rtspsrc_setup_auth (src, response)) {
+ /* Try the request/response again after configuring the auth info
+ * and loop again */
+ retry = TRUE;
+ }
+ break;
+ case GST_RTSP_STS_RTSP_VERSION_NOT_SUPPORTED:
+ GST_INFO_OBJECT (src, "Version %s not supported by the server",
+ versions ? gst_rtsp_version_as_text (versions[version_retry]) :
+ "unknown");
+ if (versions && versions[version_retry] != GST_RTSP_VERSION_INVALID) {
+ GST_INFO_OBJECT (src, "Unsupported version %s => trying %s",
+ gst_rtsp_version_as_text (request->type_data.request.version),
+ gst_rtsp_version_as_text (versions[version_retry]));
+ request->type_data.request.version = versions[version_retry];
+ retry = TRUE;
+ version_retry++;
+ break;
+ }
+ /* fallthrough */
+ default:
+ break;
+ }
+ } while (retry == TRUE);
+
+ /* If the user requested the code, let them handle errors, otherwise
+ * post an error below */
+ if (code != NULL)
+ *code = int_code;
+ else if (int_code != GST_RTSP_STS_OK)
+ goto error_response;
+
+ return res;
+
+ /* ERRORS */
+ error:
+ {
+ GST_DEBUG_OBJECT (src, "got error %d", res);
+ return res;
+ }
+ error_response:
+ {
+ res = GST_RTSP_ERROR;
+
+ switch (response->type_data.response.code) {
+ case GST_RTSP_STS_NOT_FOUND:
++#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
++ gst_rtspsrc_post_error_message (src, GST_RTSPSRC_ERROR_BAD_REQUEST,
++ "STS NOT FOUND");
++#else
+ RTSP_SRC_RESPONSE_ERROR (src, response, RESOURCE, NOT_FOUND,
+ "Not found");
++#endif
+ break;
+ case GST_RTSP_STS_UNAUTHORIZED:
++#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
++ gst_rtspsrc_post_error_message (src, GST_RTSPSRC_ERROR_NOT_AUTHORIZED,
++ "STS NOT AUTHORIZED");
++#else
+ RTSP_SRC_RESPONSE_ERROR (src, response, RESOURCE, NOT_AUTHORIZED,
+ "Unauthorized");
++#endif
+ break;
+ case GST_RTSP_STS_MOVED_PERMANENTLY:
+ case GST_RTSP_STS_MOVE_TEMPORARILY:
+ {
+ gchar *new_location;
+ GstRTSPLowerTrans transports;
+
+ GST_DEBUG_OBJECT (src, "got redirection");
+ /* if we don't have a Location Header, we must error */
+ if (gst_rtsp_message_get_header (response, GST_RTSP_HDR_LOCATION,
+ &new_location, 0) < 0)
+ break;
+
+ /* When we receive a redirect result, we go back to the INIT state after
+ * parsing the new URI. The caller should do the needed steps to issue
+ * a new setup when it detects this state change. */
+ GST_DEBUG_OBJECT (src, "redirection to %s", new_location);
+
+ /* save current transports */
+ if (src->conninfo.url)
+ transports = src->conninfo.url->transports;
+ else
+ transports = GST_RTSP_LOWER_TRANS_UNKNOWN;
+
+ gst_rtspsrc_uri_set_uri (GST_URI_HANDLER (src), new_location, NULL);
+
+ /* set old transports */
+ if (src->conninfo.url && transports != GST_RTSP_LOWER_TRANS_UNKNOWN)
+ src->conninfo.url->transports = transports;
+
+ src->need_redirect = TRUE;
+ res = GST_RTSP_OK;
+ break;
+ }
+ case GST_RTSP_STS_NOT_ACCEPTABLE:
+ case GST_RTSP_STS_NOT_IMPLEMENTED:
+ case GST_RTSP_STS_METHOD_NOT_ALLOWED:
+ /* Some cameras (e.g. HikVision DS-2CD2732F-IS) return "551
+ * Option not supported" when a command is sent that is not implemented
+ * (e.g. PAUSE). Instead; it should return "501 Not Implemented".
+ *
+ * This is wrong, as previously, the camera did announce support
+ * for PAUSE in the OPTIONS.
+ *
+ * In this case, handle the 551 as if it was 501 to avoid throwing
+ * errors to application level. */
+ case GST_RTSP_STS_OPTION_NOT_SUPPORTED:
+ GST_WARNING_OBJECT (src, "got NOT IMPLEMENTED, disable method %s",
+ gst_rtsp_method_as_text (method));
+ src->methods &= ~method;
+ res = GST_RTSP_OK;
+ break;
+ default:
++#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
++ gst_rtspsrc_post_error_message (src, GST_RTSPSRC_ERROR_UNEXPECTED_MSG,
++ "Got error response from Server");
++#else
+ RTSP_SRC_RESPONSE_ERROR (src, response, RESOURCE, READ,
+ "Unhandled error");
++#endif
+ break;
+ }
+ /* if we return ERROR we should unset the response ourselves */
+ if (res == GST_RTSP_ERROR)
+ gst_rtsp_message_unset (response);
+
+ return res;
+ }
+ }
+
+ static GstRTSPResult
+ gst_rtspsrc_send_cb (GstRTSPExtension * ext, GstRTSPMessage * request,
+ GstRTSPMessage * response, GstRTSPSrc * src)
+ {
+ return gst_rtspsrc_send (src, &src->conninfo, request, response, NULL, NULL);
+ }
+
+
+ /* parse the response and collect all the supported methods. We need this
+ * information so that we don't try to send an unsupported request to the
+ * server.
+ */
+ static gboolean
+ gst_rtspsrc_parse_methods (GstRTSPSrc * src, GstRTSPMessage * response)
+ {
+ GstRTSPHeaderField field;
+ gchar *respoptions;
+ gint indx = 0;
+
+ /* reset supported methods */
+ src->methods = 0;
+
+ /* Try Allow Header first */
+ field = GST_RTSP_HDR_ALLOW;
+ while (TRUE) {
+ respoptions = NULL;
+ gst_rtsp_message_get_header (response, field, &respoptions, indx);
+ if (!respoptions)
+ break;
+
+ src->methods |= gst_rtsp_options_from_text (respoptions);
+
+ indx++;
+ }
+
+ indx = 0;
+ field = GST_RTSP_HDR_PUBLIC;
+ while (TRUE) {
+ respoptions = NULL;
+ gst_rtsp_message_get_header (response, field, &respoptions, indx);
+ if (!respoptions)
+ break;
+
+ src->methods |= gst_rtsp_options_from_text (respoptions);
+
+ indx++;
+ }
+
+ if (src->methods == 0) {
+ /* neither Allow nor Public are required, assume the server supports
+ * at least DESCRIBE, SETUP, we always assume it supports PLAY as
+ * well. */
+ GST_DEBUG_OBJECT (src, "could not get OPTIONS");
+ src->methods = GST_RTSP_DESCRIBE | GST_RTSP_SETUP;
+ }
+ /* always assume PLAY, FIXME, extensions should be able to override
+ * this */
+ src->methods |= GST_RTSP_PLAY;
+ /* also assume it will support Range */
+ src->seekable = G_MAXFLOAT;
+
+ /* we need describe and setup */
+ if (!(src->methods & GST_RTSP_DESCRIBE))
+ goto no_describe;
+ if (!(src->methods & GST_RTSP_SETUP))
+ goto no_setup;
+
+ return TRUE;
+
+ /* ERRORS */
+ no_describe:
+ {
++#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
++ gst_rtspsrc_post_error_message (src, GST_RTSPSRC_ERROR_METHOD_NOT_ALLOWED,
++ "Server does not support DESCRIBE.");
++#else
+ GST_ELEMENT_ERROR (src, RESOURCE, OPEN_READ, (NULL),
+ ("Server does not support DESCRIBE."));
++#endif
+ return FALSE;
+ }
+ no_setup:
+ {
++#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
++ gst_rtspsrc_post_error_message (src, GST_RTSPSRC_ERROR_METHOD_NOT_ALLOWED,
++ "Server does not support SETUP.");
++#else
+ GST_ELEMENT_ERROR (src, RESOURCE, OPEN_READ, (NULL),
+ ("Server does not support SETUP."));
++#endif
+ return FALSE;
+ }
+ }
+
+ /* masks to be kept in sync with the hardcoded protocol order of preference
+ * in code below */
+ static const guint protocol_masks[] = {
+ GST_RTSP_LOWER_TRANS_UDP,
+ GST_RTSP_LOWER_TRANS_UDP_MCAST,
+ GST_RTSP_LOWER_TRANS_TCP,
+ 0
+ };
+
+ static GstRTSPResult
+ gst_rtspsrc_create_transports_string (GstRTSPSrc * src,
+ GstRTSPLowerTrans protocols, GstRTSPProfile profile, gchar ** transports)
+ {
+ GstRTSPResult res;
+ GString *result;
+ gboolean add_udp_str;
+
+ *transports = NULL;
+
+ res =
+ gst_rtsp_ext_list_get_transports (src->extensions, protocols, transports);
+
+ if (res < 0)
+ goto failed;
+
+ GST_DEBUG_OBJECT (src, "got transports %s", GST_STR_NULL (*transports));
+
+ /* extension listed transports, use those */
+ if (*transports != NULL)
+ return GST_RTSP_OK;
+
+ /* it's the default */
+ add_udp_str = FALSE;
+
+ /* the default RTSP transports */
+ result = g_string_new ("RTP");
+
+ switch (profile) {
+ case GST_RTSP_PROFILE_AVP:
+ g_string_append (result, "/AVP");
+ break;
+ case GST_RTSP_PROFILE_SAVP:
+ g_string_append (result, "/SAVP");
+ break;
+ case GST_RTSP_PROFILE_AVPF:
+ g_string_append (result, "/AVPF");
+ break;
+ case GST_RTSP_PROFILE_SAVPF:
+ g_string_append (result, "/SAVPF");
+ break;
+ default:
+ break;
+ }
+
+ if (protocols & GST_RTSP_LOWER_TRANS_UDP) {
+ GST_DEBUG_OBJECT (src, "adding UDP unicast");
+ if (add_udp_str)
+ g_string_append (result, "/UDP");
+ g_string_append (result, ";unicast;client_port=%%u1-%%u2");
+ } else if (protocols & GST_RTSP_LOWER_TRANS_UDP_MCAST) {
+ GST_DEBUG_OBJECT (src, "adding UDP multicast");
+ /* we don't have to allocate any UDP ports yet, if the selected transport
+ * turns out to be multicast we can create them and join the multicast
+ * group indicated in the transport reply */
+ if (add_udp_str)
+ g_string_append (result, "/UDP");
+ g_string_append (result, ";multicast");
+ if (src->next_port_num != 0) {
+ if (src->client_port_range.max > 0 &&
+ src->next_port_num >= src->client_port_range.max)
+ goto no_ports;
+
+ g_string_append_printf (result, ";client_port=%d-%d",
+ src->next_port_num, src->next_port_num + 1);
+ }
+ } else if (protocols & GST_RTSP_LOWER_TRANS_TCP) {
+ GST_DEBUG_OBJECT (src, "adding TCP");
+
+ g_string_append (result, "/TCP;unicast;interleaved=%%i1-%%i2");
+ }
+ *transports = g_string_free (result, FALSE);
+
+ GST_DEBUG_OBJECT (src, "prepared transports %s", GST_STR_NULL (*transports));
+
+ return GST_RTSP_OK;
+
+ /* ERRORS */
+ failed:
+ {
+ GST_ERROR ("extension gave error %d", res);
+ return res;
+ }
+ no_ports:
+ {
+ GST_ERROR ("no more ports available");
+ return GST_RTSP_ERROR;
+ }
+ }
+
+ static GstRTSPResult
+ gst_rtspsrc_prepare_transports (GstRTSPStream * stream, gchar ** transports,
+ gint orig_rtpport, gint orig_rtcpport)
+ {
+ GstRTSPSrc *src;
+ gint nr_udp, nr_int;
+ gchar *next, *p;
+ gint rtpport = 0, rtcpport = 0;
+ GString *str;
+
+ src = stream->parent;
+
+ /* find number of placeholders first */
+ if (strstr (*transports, "%%i2"))
+ nr_int = 2;
+ else if (strstr (*transports, "%%i1"))
+ nr_int = 1;
+ else
+ nr_int = 0;
+
+ if (strstr (*transports, "%%u2"))
+ nr_udp = 2;
+ else if (strstr (*transports, "%%u1"))
+ nr_udp = 1;
+ else
+ nr_udp = 0;
+
+ if (nr_udp == 0 && nr_int == 0)
+ goto done;
+
+ if (nr_udp > 0) {
+ if (!orig_rtpport || !orig_rtcpport) {
+ if (!gst_rtspsrc_alloc_udp_ports (stream, &rtpport, &rtcpport))
+ goto failed;
+ } else {
+ rtpport = orig_rtpport;
+ rtcpport = orig_rtcpport;
+ }
+ }
+
+ str = g_string_new ("");
+ p = *transports;
+ while ((next = strstr (p, "%%"))) {
+ g_string_append_len (str, p, next - p);
+ if (next[2] == 'u') {
+ if (next[3] == '1')
+ g_string_append_printf (str, "%d", rtpport);
+ else if (next[3] == '2')
+ g_string_append_printf (str, "%d", rtcpport);
+ }
+ if (next[2] == 'i') {
+ if (next[3] == '1')
+ g_string_append_printf (str, "%d", src->free_channel);
+ else if (next[3] == '2')
+ g_string_append_printf (str, "%d", src->free_channel + 1);
+
+ }
+
+ p = next + 4;
+ }
+ if (src->version >= GST_RTSP_VERSION_2_0)
+ src->free_channel += 2;
+
+ /* append final part */
+ g_string_append (str, p);
+
+ g_free (*transports);
+ *transports = g_string_free (str, FALSE);
+
+ done:
+ return GST_RTSP_OK;
+
+ /* ERRORS */
+ failed:
+ {
+ GST_ERROR ("failed to allocate udp ports");
+ return GST_RTSP_ERROR;
+ }
+ }
+
+ static GstCaps *
+ signal_get_srtcp_params (GstRTSPSrc * src, GstRTSPStream * stream)
+ {
+ GstCaps *caps = NULL;
+
+ g_signal_emit (src, gst_rtspsrc_signals[SIGNAL_REQUEST_RTCP_KEY], 0,
+ stream->id, &caps);
+
+ if (caps != NULL)
+ GST_DEBUG_OBJECT (src, "SRTP parameters received");
+
+ return caps;
+ }
+
+ static GstCaps *
+ default_srtcp_params (void)
+ {
+ guint i;
+ GstCaps *caps;
+ GstBuffer *buf;
+ guint8 *key_data;
+ #define KEY_SIZE 30
+ guint data_size = GST_ROUND_UP_4 (KEY_SIZE);
+
+ /* create a random key */
+ key_data = g_malloc (data_size);
+ for (i = 0; i < data_size; i += 4)
+ GST_WRITE_UINT32_BE (key_data + i, g_random_int ());
+
+ buf = gst_buffer_new_wrapped (key_data, KEY_SIZE);
+
+ caps = gst_caps_new_simple ("application/x-srtcp",
+ "srtp-key", GST_TYPE_BUFFER, buf,
+ "srtp-cipher", G_TYPE_STRING, "aes-128-icm",
+ "srtp-auth", G_TYPE_STRING, "hmac-sha1-80",
+ "srtcp-cipher", G_TYPE_STRING, "aes-128-icm",
+ "srtcp-auth", G_TYPE_STRING, "hmac-sha1-80", NULL);
+
+ gst_buffer_unref (buf);
+
+ return caps;
+ }
+
+ static gchar *
+ gst_rtspsrc_stream_make_keymgmt (GstRTSPSrc * src, GstRTSPStream * stream)
+ {
+ gchar *base64, *result = NULL;
+ GstMIKEYMessage *mikey_msg;
+
+ stream->srtcpparams = signal_get_srtcp_params (src, stream);
+ if (stream->srtcpparams == NULL)
+ stream->srtcpparams = default_srtcp_params ();
+
+ mikey_msg = gst_mikey_message_new_from_caps (stream->srtcpparams);
+ if (mikey_msg) {
+ /* add policy '0' for our SSRC */
+ gst_mikey_message_add_cs_srtp (mikey_msg, 0, stream->send_ssrc, 0);
+
+ base64 = gst_mikey_message_base64_encode (mikey_msg);
+ gst_mikey_message_unref (mikey_msg);
+
+ if (base64) {
+ result = gst_sdp_make_keymgmt (stream->conninfo.location, base64);
+ g_free (base64);
+ }
+ }
+
+ return result;
+ }
+
+ static GstRTSPResult
+ gst_rtsp_src_setup_stream_from_response (GstRTSPSrc * src,
+ GstRTSPStream * stream, GstRTSPMessage * response,
+ GstRTSPLowerTrans * protocols, gint retry, gint * rtpport, gint * rtcpport)
+ {
+ gchar *resptrans = NULL;
+ GstRTSPTransport transport = { 0 };
+
+ gst_rtsp_message_get_header (response, GST_RTSP_HDR_TRANSPORT, &resptrans, 0);
+ if (!resptrans) {
+ gst_rtspsrc_stream_free_udp (stream);
+ goto no_transport;
+ }
+
+ /* parse transport, go to next stream on parse error */
+ if (gst_rtsp_transport_parse (resptrans, &transport) != GST_RTSP_OK) {
+ GST_WARNING_OBJECT (src, "failed to parse transport %s", resptrans);
+ return GST_RTSP_ELAST;
+ }
+
+ /* update allowed transports for other streams. once the transport of
+ * one stream has been determined, we make sure that all other streams
+ * are configured in the same way */
+ switch (transport.lower_transport) {
+ case GST_RTSP_LOWER_TRANS_TCP:
+ GST_DEBUG_OBJECT (src, "stream %p as TCP interleaved", stream);
+ if (protocols)
+ *protocols = GST_RTSP_LOWER_TRANS_TCP;
+ src->interleaved = TRUE;
+ if (src->version < GST_RTSP_VERSION_2_0) {
+ /* update free channels */
+ src->free_channel = MAX (transport.interleaved.min, src->free_channel);
+ src->free_channel = MAX (transport.interleaved.max, src->free_channel);
+ src->free_channel++;
+ }
+ break;
+ case GST_RTSP_LOWER_TRANS_UDP_MCAST:
+ /* only allow multicast for other streams */
+ GST_DEBUG_OBJECT (src, "stream %p as UDP multicast", stream);
+ if (protocols)
+ *protocols = GST_RTSP_LOWER_TRANS_UDP_MCAST;
+ /* if the server selected our ports, increment our counters so that
+ * we select a new port later */
+ if (src->next_port_num == transport.port.min &&
+ src->next_port_num + 1 == transport.port.max) {
+ src->next_port_num += 2;
+ }
+ break;
+ case GST_RTSP_LOWER_TRANS_UDP:
+ /* only allow unicast for other streams */
+ GST_DEBUG_OBJECT (src, "stream %p as UDP unicast", stream);
+ if (protocols)
+ *protocols = GST_RTSP_LOWER_TRANS_UDP;
+ break;
+ default:
+ GST_DEBUG_OBJECT (src, "stream %p unknown transport %d", stream,
+ transport.lower_transport);
+ break;
+ }
+
+ if (!src->interleaved || !retry) {
+ /* now configure the stream with the selected transport */
+ if (!gst_rtspsrc_stream_configure_transport (stream, &transport)) {
+ GST_DEBUG_OBJECT (src,
+ "could not configure stream %p transport, skipping stream", stream);
+ goto done;
+ } else if (stream->udpsrc[0] && stream->udpsrc[1] && rtpport && rtcpport) {
+ /* retain the first allocated UDP port pair */
+ g_object_get (G_OBJECT (stream->udpsrc[0]), "port", rtpport, NULL);
+ g_object_get (G_OBJECT (stream->udpsrc[1]), "port", rtcpport, NULL);
+ }
+ }
+ /* we need to activate at least one stream when we detect activity */
+ src->need_activate = TRUE;
+
+ /* stream is setup now */
+ stream->setup = TRUE;
+ stream->waiting_setup_response = FALSE;
+
+ if (src->version >= GST_RTSP_VERSION_2_0) {
+ gchar *prop, *media_properties;
+ gchar **props;
+ gint i;
+
+ if (gst_rtsp_message_get_header (response, GST_RTSP_HDR_MEDIA_PROPERTIES,
+ &media_properties, 0) != GST_RTSP_OK) {
+ GST_ELEMENT_ERROR (src, RESOURCE, WRITE, (NULL),
+ ("Error: No MEDIA_PROPERTY header in a SETUP request in RTSP 2.0"
+ " - this header is mandatory."));
+
+ gst_rtsp_message_unset (response);
+ return GST_RTSP_ERROR;
+ }
+
+ props = g_strsplit (media_properties, ",", -2);
+ for (i = 0; props[i]; i++) {
+ prop = props[i];
+
+ while (*prop == ' ')
+ prop++;
+
+ if (strstr (prop, "Random-Access")) {
+ gchar **random_seekable_val = g_strsplit (prop, "=", 2);
+
+ if (!random_seekable_val[1])
+ src->seekable = G_MAXFLOAT;
+ else
+ src->seekable = g_ascii_strtod (random_seekable_val[1], NULL);
+
+ g_strfreev (random_seekable_val);
+ } else if (!g_strcmp0 (prop, "No-Seeking")) {
+ src->seekable = -1.0;
+ } else if (!g_strcmp0 (prop, "Beginning-Only")) {
+ src->seekable = 0.0;
+ }
+ }
+
+ g_strfreev (props);
+ }
+
+ done:
+ /* clean up our transport struct */
+ gst_rtsp_transport_init (&transport);
+ /* clean up used RTSP messages */
+ gst_rtsp_message_unset (response);
+
+ return GST_RTSP_OK;
+
+ no_transport:
+ {
+ GST_ELEMENT_ERROR (src, RESOURCE, SETTINGS, (NULL),
+ ("Server did not select transport."));
+
+ gst_rtsp_message_unset (response);
+ return GST_RTSP_ERROR;
+ }
+ }
+
+ static GstRTSPResult
+ gst_rtspsrc_setup_streams_end (GstRTSPSrc * src, gboolean async)
+ {
+ GList *tmp;
+ GstRTSPConnInfo *conninfo;
+
+ g_assert (src->version >= GST_RTSP_VERSION_2_0);
+
+ conninfo = &src->conninfo;
+ for (tmp = src->streams; tmp; tmp = tmp->next) {
+ GstRTSPStream *stream = (GstRTSPStream *) tmp->data;
+ GstRTSPMessage response = { 0, };
+
+ if (!stream->waiting_setup_response)
+ continue;
+
+ if (!src->conninfo.connection)
+ conninfo = &((GstRTSPStream *) tmp->data)->conninfo;
+
+ gst_rtsp_src_receive_response (src, conninfo, &response, NULL);
+
+ gst_rtsp_src_setup_stream_from_response (src, stream,
+ &response, NULL, 0, NULL, NULL);
+ }
+
+ return GST_RTSP_OK;
+ }
+
+ /* Perform the SETUP request for all the streams.
+ *
+ * We ask the server for a specific transport, which initially includes all the
+ * ones we can support (UDP/TCP/MULTICAST). For the UDP transport we allocate
+ * two local UDP ports that we send to the server.
+ *
+ * Once the server replied with a transport, we configure the other streams
+ * with the same transport.
+ *
+ * In case setup request are not pipelined, this function will also configure the
+ * stream for the selected transport, * which basically means creating the pipeline.
+ * Otherwise, the first stream is setup right away from the reply and a
+ * CMD_FINALIZE_SETUP command is set for the stream pipelines to happen on the
+ * remaining streams from the RTSP thread.
+ */
+ static GstRTSPResult
+ gst_rtspsrc_setup_streams_start (GstRTSPSrc * src, gboolean async)
+ {
+ GList *walk;
+ GstRTSPResult res = GST_RTSP_ERROR;
+ GstRTSPMessage request = { 0 };
+ GstRTSPMessage response = { 0 };
+ GstRTSPStream *stream = NULL;
+ GstRTSPLowerTrans protocols;
+ GstRTSPStatusCode code;
+ gboolean unsupported_real = FALSE;
+ gint rtpport, rtcpport;
+ GstRTSPUrl *url;
+ gchar *hval;
+ gchar *pipelined_request_id = NULL;
+
+ if (src->conninfo.connection) {
+ url = gst_rtsp_connection_get_url (src->conninfo.connection);
+ /* we initially allow all configured lower transports. based on the URL
+ * transports and the replies from the server we narrow them down. */
+ protocols = url->transports & src->cur_protocols;
+ } else {
+ url = NULL;
+ protocols = src->cur_protocols;
+ }
+
+ /* In ONVIF mode, we only want to try TCP transport */
+ if (src->onvif_mode && (protocols & GST_RTSP_LOWER_TRANS_TCP))
+ protocols = GST_RTSP_LOWER_TRANS_TCP;
+
+ if (protocols == 0)
+ goto no_protocols;
+
+ /* reset some state */
+ src->free_channel = 0;
+ src->interleaved = FALSE;
+ src->need_activate = FALSE;
+ /* keep track of next port number, 0 is random */
+ src->next_port_num = src->client_port_range.min;
+ rtpport = rtcpport = 0;
+
+ if (G_UNLIKELY (src->streams == NULL))
+ goto no_streams;
+
+ for (walk = src->streams; walk; walk = g_list_next (walk)) {
+ GstRTSPConnInfo *conninfo;
+ gchar *transports;
+ gint retry = 0;
+ guint mask = 0;
+ gboolean selected;
+ GstCaps *caps;
+
+ stream = (GstRTSPStream *) walk->data;
+
+ caps = stream_get_caps_for_pt (stream, stream->default_pt);
+ if (caps == NULL) {
+ GST_WARNING_OBJECT (src, "skipping stream %p, no caps", stream);
+ continue;
+ }
+
+ if (stream->skipped) {
+ GST_DEBUG_OBJECT (src, "skipping stream %p", stream);
+ continue;
+ }
+
+ /* see if we need to configure this stream */
+ if (!gst_rtsp_ext_list_configure_stream (src->extensions, caps)) {
+ GST_DEBUG_OBJECT (src, "skipping stream %p, disabled by extension",
+ stream);
+ continue;
+ }
+
+ g_signal_emit (src, gst_rtspsrc_signals[SIGNAL_SELECT_STREAM], 0,
+ stream->id, caps, &selected);
+ if (!selected) {
+ GST_DEBUG_OBJECT (src, "skipping stream %p, disabled by signal", stream);
+ continue;
+ }
+
+ /* merge/overwrite global caps */
+ if (caps) {
+ guint j, num;
+ GstStructure *s;
+
+ s = gst_caps_get_structure (caps, 0);
+
+ num = gst_structure_n_fields (src->props);
+ for (j = 0; j < num; j++) {
+ const gchar *name;
+ const GValue *val;
+
+ name = gst_structure_nth_field_name (src->props, j);
+ val = gst_structure_get_value (src->props, name);
+ gst_structure_set_value (s, name, val);
+
+ GST_DEBUG_OBJECT (src, "copied %s", name);
+ }
+ }
+
+ /* skip setup if we have no URL for it */
+ if (stream->conninfo.location == NULL) {
+ GST_WARNING_OBJECT (src, "skipping stream %p, no setup", stream);
+ continue;
+ }
+
+ if (src->conninfo.connection == NULL) {
+ if (!gst_rtsp_conninfo_connect (src, &stream->conninfo, async)) {
+ GST_WARNING_OBJECT (src, "skipping stream %p, failed to connect",
+ stream);
+ continue;
+ }
+ conninfo = &stream->conninfo;
+ } else {
+ conninfo = &src->conninfo;
+ }
+ GST_DEBUG_OBJECT (src, "doing setup of stream %p with %s", stream,
+ stream->conninfo.location);
+
+ /* if we have a multicast connection, only suggest multicast from now on */
+ if (stream->is_multicast)
+ protocols &= GST_RTSP_LOWER_TRANS_UDP_MCAST;
+
+ next_protocol:
+ /* first selectable protocol */
+ while (protocol_masks[mask] && !(protocols & protocol_masks[mask]))
+ mask++;
+ if (!protocol_masks[mask])
+ goto no_protocols;
+
+ retry:
+ GST_DEBUG_OBJECT (src, "protocols = 0x%x, protocol mask = 0x%x", protocols,
+ protocol_masks[mask]);
+ /* create a string with first transport in line */
+ transports = NULL;
+ res = gst_rtspsrc_create_transports_string (src,
+ protocols & protocol_masks[mask], stream->profile, &transports);
+ if (res < 0 || transports == NULL)
+ goto setup_transport_failed;
+
+ if (strlen (transports) == 0) {
+ g_free (transports);
+ GST_DEBUG_OBJECT (src, "no transports found");
+ mask++;
+ goto next_protocol;
+ }
+
+ GST_DEBUG_OBJECT (src, "replace ports in %s", GST_STR_NULL (transports));
+
+ /* replace placeholders with real values, this function will optionally
+ * allocate UDP ports and other info needed to execute the setup request */
+ res = gst_rtspsrc_prepare_transports (stream, &transports,
+ retry > 0 ? rtpport : 0, retry > 0 ? rtcpport : 0);
+ if (res < 0) {
+ g_free (transports);
+ goto setup_transport_failed;
+ }
+
+ GST_DEBUG_OBJECT (src, "transport is now %s", GST_STR_NULL (transports));
+ /* create SETUP request */
+ res =
+ gst_rtspsrc_init_request (src, &request, GST_RTSP_SETUP,
+ stream->conninfo.location);
+ if (res < 0) {
+ g_free (transports);
+ goto create_request_failed;
+ }
+
+ if (src->version >= GST_RTSP_VERSION_2_0) {
+ if (!pipelined_request_id)
+ pipelined_request_id = g_strdup_printf ("%d",
+ g_random_int_range (0, G_MAXINT32));
+
+ gst_rtsp_message_add_header (&request, GST_RTSP_HDR_PIPELINED_REQUESTS,
+ pipelined_request_id);
+ gst_rtsp_message_add_header (&request, GST_RTSP_HDR_ACCEPT_RANGES,
+ "npt, clock, smpte, clock");
+ }
+
+ /* select transport */
+ gst_rtsp_message_take_header (&request, GST_RTSP_HDR_TRANSPORT, transports);
+
+ if (stream->is_backchannel && src->backchannel == BACKCHANNEL_ONVIF)
+ gst_rtsp_message_add_header (&request, GST_RTSP_HDR_REQUIRE,
+ BACKCHANNEL_ONVIF_HDR_REQUIRE_VAL);
+
+ /* set up keys */
+ if (stream->profile == GST_RTSP_PROFILE_SAVP ||
+ stream->profile == GST_RTSP_PROFILE_SAVPF) {
+ hval = gst_rtspsrc_stream_make_keymgmt (src, stream);
+ gst_rtsp_message_take_header (&request, GST_RTSP_HDR_KEYMGMT, hval);
+ }
+
+ /* if the user wants a non default RTP packet size we add the blocksize
+ * parameter */
+ if (src->rtp_blocksize > 0) {
+ hval = g_strdup_printf ("%d", src->rtp_blocksize);
+ gst_rtsp_message_take_header (&request, GST_RTSP_HDR_BLOCKSIZE, hval);
+ }
+
+ if (async)
+ GST_ELEMENT_PROGRESS (src, CONTINUE, "request", ("SETUP stream %d",
+ stream->id));
+
+ /* handle the code ourselves */
+ res =
+ gst_rtspsrc_send (src, conninfo, &request,
+ pipelined_request_id ? NULL : &response, &code, NULL);
+ if (res < 0)
+ goto send_error;
+
+ switch (code) {
+ case GST_RTSP_STS_OK:
+ break;
+ case GST_RTSP_STS_UNSUPPORTED_TRANSPORT:
+ gst_rtsp_message_unset (&request);
+ gst_rtsp_message_unset (&response);
+ /* cleanup of leftover transport */
+ gst_rtspsrc_stream_free_udp (stream);
+ /* MS WMServer RTSP MUST use same UDP pair in all SETUP requests;
+ * we might be in this case */
+ if (stream->container && rtpport && rtcpport && !retry) {
+ GST_DEBUG_OBJECT (src, "retrying with original port pair %u-%u",
+ rtpport, rtcpport);
+ retry++;
+ goto retry;
+ }
+ /* this transport did not go down well, but we may have others to try
+ * that we did not send yet, try those and only give up then
+ * but not without checking for lost cause/extension so we can
+ * post a nicer/more useful error message later */
+ if (!unsupported_real)
+ unsupported_real = stream->is_real;
+ /* select next available protocol, give up on this stream if none */
+ mask++;
+ while (protocol_masks[mask] && !(protocols & protocol_masks[mask]))
+ mask++;
+ if (!protocol_masks[mask] || unsupported_real)
+ continue;
+ else
+ goto retry;
+ default:
+ /* cleanup of leftover transport and move to the next stream */
+ gst_rtspsrc_stream_free_udp (stream);
+ goto response_error;
+ }
+
+
+ if (!pipelined_request_id) {
+ /* parse response transport */
+ res = gst_rtsp_src_setup_stream_from_response (src, stream,
+ &response, &protocols, retry, &rtpport, &rtcpport);
+ switch (res) {
+ case GST_RTSP_ERROR:
+ goto cleanup_error;
+ case GST_RTSP_ELAST:
+ goto retry;
+ default:
+ break;
+ }
+ } else {
+ stream->waiting_setup_response = TRUE;
+ /* we need to activate at least one stream when we detect activity */
+ src->need_activate = TRUE;
+ }
+
+ {
+ GList *skip = walk;
+
+ while (TRUE) {
+ GstRTSPStream *sskip;
+
+ skip = g_list_next (skip);
+ if (skip == NULL)
+ break;
+
+ sskip = (GstRTSPStream *) skip->data;
+
+ /* skip all streams with the same control url */
+ if (g_str_equal (stream->conninfo.location, sskip->conninfo.location)) {
+ GST_DEBUG_OBJECT (src, "found stream %p with same control %s",
+ sskip, sskip->conninfo.location);
+ sskip->skipped = TRUE;
+ }
+ }
+ }
+ gst_rtsp_message_unset (&request);
+ }
+
+ if (pipelined_request_id) {
+ gst_rtspsrc_setup_streams_end (src, TRUE);
+ }
+
+ /* store the transport protocol that was configured */
+ src->cur_protocols = protocols;
+
+ gst_rtsp_ext_list_stream_select (src->extensions, url);
+
+ if (pipelined_request_id)
+ g_free (pipelined_request_id);
+
+ /* if there is nothing to activate, error out */
+ if (!src->need_activate)
+ goto nothing_to_activate;
+
+ return res;
+
+ /* ERRORS */
+ no_protocols:
+ {
++#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
++ gst_rtspsrc_post_error_message (src, GST_RTSPSRC_ERROR_INVALID_PROTOCOL,
++ "Could not connect to server, no protocols left");
++#else
+ /* no transport possible, post an error and stop */
+ GST_ELEMENT_ERROR (src, RESOURCE, READ, (NULL),
+ ("Could not connect to server, no protocols left"));
++#endif
+ return GST_RTSP_ERROR;
+ }
+ no_streams:
+ {
++#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
++ gst_rtspsrc_post_error_message (src, GST_RTSPSRC_ERROR_CONTENT_NOT_FOUND,
++ "SDP contains no streams");
++#else
+ GST_ELEMENT_ERROR (src, RESOURCE, SETTINGS, (NULL),
+ ("SDP contains no streams"));
++#endif
+ return GST_RTSP_ERROR;
+ }
+ create_request_failed:
+ {
+ gchar *str = gst_rtsp_strresult (res);
+
++#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
++ gst_rtspsrc_post_error_message (src, GST_RTSPSRC_ERROR_BAD_REQUEST,
++ "Could not create request.");
++#else
+ GST_ELEMENT_ERROR (src, LIBRARY, INIT, (NULL),
+ ("Could not create request. (%s)", str));
++#endif
+ g_free (str);
+ goto cleanup_error;
+ }
+ setup_transport_failed:
+ {
++#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
++ gst_rtspsrc_post_error_message (src, GST_RTSPSRC_ERROR_BAD_REQUEST,
++ "Could not setup transport.");
++#else
+ GST_ELEMENT_ERROR (src, RESOURCE, SETTINGS, (NULL),
+ ("Could not setup transport."));
++#endif
+ res = GST_RTSP_ERROR;
+ goto cleanup_error;
+ }
+ response_error:
+ {
++#ifndef TIZEN_FEATURE_RTSP_MODIFICATION
+ const gchar *str = gst_rtsp_status_as_text (code);
++#endif
+
++#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
++ gst_rtspsrc_post_error_message (src, GST_RTSPSRC_ERROR_UNEXPECTED_MSG,
++ "Error from Server .");
++#else
+ GST_ELEMENT_ERROR (src, RESOURCE, WRITE, (NULL),
+ ("Error (%d): %s", code, GST_STR_NULL (str)));
++#endif
+ res = GST_RTSP_ERROR;
+ goto cleanup_error;
+ }
+ send_error:
+ {
+ gchar *str = gst_rtsp_strresult (res);
+
+ if (res != GST_RTSP_EINTR) {
++#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
++ gst_rtspsrc_post_error_message (src, GST_RTSPSRC_ERROR_CONNECTION_FAIL,
++ "Could not send message.");
++#else
+ GST_ELEMENT_ERROR (src, RESOURCE, WRITE, (NULL),
+ ("Could not send message. (%s)", str));
++#endif
+ } else {
+ GST_WARNING_OBJECT (src, "send interrupted");
+ }
+ g_free (str);
+ goto cleanup_error;
+ }
+ nothing_to_activate:
+ {
+ /* none of the available error codes is really right .. */
+ if (unsupported_real) {
++#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
++ gst_rtspsrc_post_error_message (src,
++ GST_RTSPSRC_ERROR_UNSUPPORTED_MEDIA_TYPE,
++ "No supported stream was found. You might need to install a GStreamer RTSP extension plugin for Real media streams.");
++#else
+ GST_ELEMENT_ERROR (src, STREAM, CODEC_NOT_FOUND,
+ (_("No supported stream was found. You might need to install a "
+ "GStreamer RTSP extension plugin for Real media streams.")),
+ (NULL));
++#endif
+ } else {
++#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
++ gst_rtspsrc_post_error_message (src,
++ GST_RTSPSRC_ERROR_UNSUPPORTED_MEDIA_TYPE,
++ "No supported stream was found. You might need to allow more transport protocols or may otherwise be missing the right GStreamer RTSP extension plugin.");
++#else
+ GST_ELEMENT_ERROR (src, STREAM, CODEC_NOT_FOUND,
+ (_("No supported stream was found. You might need to allow "
+ "more transport protocols or may otherwise be missing "
+ "the right GStreamer RTSP extension plugin.")), (NULL));
++#endif
+ }
+ return GST_RTSP_ERROR;
+ }
+ cleanup_error:
+ {
+ if (pipelined_request_id)
+ g_free (pipelined_request_id);
+ gst_rtsp_message_unset (&request);
+ gst_rtsp_message_unset (&response);
+ return res;
+ }
+ }
+
+ static gboolean
+ gst_rtspsrc_parse_range (GstRTSPSrc * src, const gchar * range,
+ GstSegment * segment, gboolean update_duration)
+ {
+ GstClockTime begin_seconds, end_seconds;
+ gint64 seconds;
+ GstRTSPTimeRange *therange;
+
+ if (src->range)
+ gst_rtsp_range_free (src->range);
+
+ if (gst_rtsp_range_parse (range, &therange) == GST_RTSP_OK) {
+ GST_DEBUG_OBJECT (src, "parsed range %s", range);
+ src->range = therange;
+ } else {
+ GST_DEBUG_OBJECT (src, "failed to parse range %s", range);
+ src->range = NULL;
+ gst_segment_init (segment, GST_FORMAT_TIME);
+ return FALSE;
+ }
+
+ gst_rtsp_range_get_times (therange, &begin_seconds, &end_seconds);
+
+ GST_DEBUG_OBJECT (src, "range: type %d, min %f - type %d, max %f ",
+ therange->min.type, therange->min.seconds, therange->max.type,
+ therange->max.seconds);
+
+ if (therange->min.type == GST_RTSP_TIME_NOW)
+ seconds = 0;
+ else if (therange->min.type == GST_RTSP_TIME_END)
+ seconds = 0;
+ else
+ seconds = begin_seconds;
+
+ GST_DEBUG_OBJECT (src, "range: min %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (seconds));
+
+ /* we need to start playback without clipping from the position reported by
+ * the server */
+ if (segment->rate > 0.0)
+ segment->start = seconds;
+ else
+ segment->stop = seconds;
+
++#ifndef TIZEN_FEATURE_RTSP_MODIFICATION
++/*
++The range-min points to the start of the segment , not the current position.
++After getting the current position from MSL during normal pause/resume or during seek , we should not
++update the segment->position again with the rtp header npt timestamp.
++*/
+ segment->position = seconds;
++#endif
+
+ if (therange->max.type == GST_RTSP_TIME_NOW)
++#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
++ seconds = 0;
++#else
+ seconds = -1;
++#endif
+ else if (therange->max.type == GST_RTSP_TIME_END)
+ seconds = -1;
+ else
+ seconds = end_seconds;
+
+ GST_DEBUG_OBJECT (src, "range: max %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (seconds));
+
+ /* live (WMS) server might send overflowed large max as its idea of infinity,
+ * compensate to prevent problems later on */
+ if (seconds != -1 && seconds < 0) {
+ seconds = -1;
+ GST_DEBUG_OBJECT (src, "insane range, set to NONE");
+ }
+
+ /* live (WMS) might send min == max, which is not worth recording */
+ if (segment->duration == -1 && seconds == begin_seconds)
+ seconds = -1;
+
+ /* don't change duration with unknown value, we might have a valid value
+ * there that we want to keep. Also, the total duration of the stream
+ * can only be determined from the response to a DESCRIBE request, not
+ * from a PLAY request where we might have requested a custom range, so
+ * don't update duration in that case */
+ if (update_duration && seconds != -1) {
+ segment->duration = seconds;
+ GST_DEBUG_OBJECT (src, "set duration from range as %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (seconds));
+ } else {
+ GST_DEBUG_OBJECT (src, "not updating existing duration %" GST_TIME_FORMAT
+ " from range %" GST_TIME_FORMAT, GST_TIME_ARGS (segment->duration),
+ GST_TIME_ARGS (seconds));
+ }
+
+ if (segment->rate > 0.0)
+ segment->stop = seconds;
+ else
+ segment->start = seconds;
+
+ return TRUE;
+ }
+
+ /* Parse clock profived by the server with following syntax:
+ *
+ * "GstNetTimeProvider <wrapped-clock> <server-IP:port> <clock-time>"
+ */
+ static gboolean
+ gst_rtspsrc_parse_gst_clock (GstRTSPSrc * src, const gchar * gstclock)
+ {
+ gboolean res = FALSE;
+
+ if (g_str_has_prefix (gstclock, "GstNetTimeProvider ")) {
+ gchar **fields = NULL, **parts = NULL;
+ gchar *remote_ip, *str;
+ gint port;
+ GstClockTime base_time;
+ GstClock *netclock;
+
+ fields = g_strsplit (gstclock, " ", 0);
+
+ /* wrapped clock, not very interesting for now */
+ if (fields[1] == NULL)
+ goto cleanup;
+
+ /* remote IP address and port */
+ if ((str = fields[2]) == NULL)
+ goto cleanup;
+
+ parts = g_strsplit (str, ":", 0);
+
+ if ((remote_ip = parts[0]) == NULL)
+ goto cleanup;
+
+ if ((str = parts[1]) == NULL)
+ goto cleanup;
+
+ port = atoi (str);
+ if (port == 0)
+ goto cleanup;
+
+ /* base-time */
+ if ((str = fields[3]) == NULL)
+ goto cleanup;
+
+ base_time = g_ascii_strtoull (str, NULL, 10);
+
+ netclock =
+ gst_net_client_clock_new ((gchar *) "GstRTSPClock", remote_ip, port,
+ base_time);
+
+ if (src->provided_clock)
+ gst_object_unref (src->provided_clock);
+ src->provided_clock = netclock;
+
+ gst_element_post_message (GST_ELEMENT_CAST (src),
+ gst_message_new_clock_provide (GST_OBJECT_CAST (src),
+ src->provided_clock, TRUE));
+
+ res = TRUE;
+ cleanup:
+ g_strfreev (fields);
+ g_strfreev (parts);
+ }
+ return res;
+ }
+
+ /* must be called with the RTSP state lock */
+ static GstRTSPResult
+ gst_rtspsrc_open_from_sdp (GstRTSPSrc * src, GstSDPMessage * sdp,
+ gboolean async)
+ {
+ GstRTSPResult res;
+ gint i, n_streams;
+
+ /* prepare global stream caps properties */
+ if (src->props)
+ gst_structure_remove_all_fields (src->props);
+ else
+ src->props = gst_structure_new_empty ("RTSPProperties");
+
+ DEBUG_SDP (src, sdp);
+
+ gst_rtsp_ext_list_parse_sdp (src->extensions, sdp, src->props);
+
+ /* let the app inspect and change the SDP */
+ g_signal_emit (src, gst_rtspsrc_signals[SIGNAL_ON_SDP], 0, sdp);
+
+ gst_segment_init (&src->segment, GST_FORMAT_TIME);
+
+ /* parse range for duration reporting. */
+ {
+ const gchar *range;
+
+ for (i = 0;; i++) {
+ range = gst_sdp_message_get_attribute_val_n (sdp, "range", i);
+ if (range == NULL)
+ break;
+
+ /* keep track of the range and configure it in the segment */
+ if (gst_rtspsrc_parse_range (src, range, &src->segment, TRUE))
+ break;
+ }
+ }
+ /* parse clock information. This is GStreamer specific, a server can tell the
+ * client what clock it is using and wrap that in a network clock. The
+ * advantage of that is that we can slave to it. */
+ {
+ const gchar *gstclock;
+
+ for (i = 0;; i++) {
+ gstclock = gst_sdp_message_get_attribute_val_n (sdp, "x-gst-clock", i);
+ if (gstclock == NULL)
+ break;
+
+ /* parse the clock and expose it in the provide_clock method */
+ if (gst_rtspsrc_parse_gst_clock (src, gstclock))
+ break;
+ }
+ }
+ /* try to find a global control attribute. Note that a '*' means that we should
+ * do aggregate control with the current url (so we don't do anything and
+ * leave the current connection as is) */
+ {
+ const gchar *control;
+
+ for (i = 0;; i++) {
+ control = gst_sdp_message_get_attribute_val_n (sdp, "control", i);
+ if (control == NULL)
+ break;
+
+ /* only take fully qualified urls */
+ if (g_str_has_prefix (control, "rtsp://"))
+ break;
+ }
+ if (control) {
+ g_free (src->conninfo.location);
+ src->conninfo.location = g_strdup (control);
+ /* make a connection for this, if there was a connection already, nothing
+ * happens. */
+ if (gst_rtsp_conninfo_connect (src, &src->conninfo, async) < 0) {
+ GST_ERROR_OBJECT (src, "could not connect");
+ }
+ }
+ /* we need to keep the control url separate from the connection url because
+ * the rules for constructing the media control url need it */
+ g_free (src->control);
+ src->control = g_strdup (control);
+ }
+
++#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
++ src->is_audio_codec_supported = FALSE;
++ src->is_video_codec_supported = FALSE;
++#endif
++
+ /* create streams */
+ n_streams = gst_sdp_message_medias_len (sdp);
+ for (i = 0; i < n_streams; i++) {
+ gst_rtspsrc_create_stream (src, sdp, i, n_streams);
+ }
+
+ src->state = GST_RTSP_STATE_INIT;
++#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
++ /* Check for the support for the Media codecs */
++ if ((!src->is_audio_codec_supported) && (!src->is_video_codec_supported)) {
++ GST_ERROR_OBJECT (src, "UnSupported Media Type !!!! \n");
++ goto unsupported_file_type;
++ } else {
++ GST_DEBUG_OBJECT (src, "Supported Media Type. \n");
++ }
++#endif
+ /* setup streams */
+ if ((res = gst_rtspsrc_setup_streams_start (src, async)) < 0)
+ goto setup_failed;
+
+ /* reset our state */
+ src->need_range = TRUE;
+ src->server_side_trickmode = FALSE;
+ src->trickmode_interval = 0;
+
+ src->state = GST_RTSP_STATE_READY;
+
+ return res;
+
+ /* ERRORS */
+ setup_failed:
+ {
+ GST_ERROR_OBJECT (src, "setup failed");
+ gst_rtspsrc_cleanup (src);
+ return res;
+ }
++#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
++unsupported_file_type:
++ {
++ gst_rtspsrc_post_error_message (src,
++ GST_RTSPSRC_ERROR_UNSUPPORTED_MEDIA_TYPE,
++ "No supported stream was found");
++ res = GST_RTSP_ERROR;
++ gst_rtspsrc_cleanup (src);
++ return res;
++ }
++#endif
+ }
+
+ static GstRTSPResult
+ gst_rtspsrc_retrieve_sdp (GstRTSPSrc * src, GstSDPMessage ** sdp,
+ gboolean async)
+ {
+ GstRTSPResult res;
+ GstRTSPMessage request = { 0 };
+ GstRTSPMessage response = { 0 };
+ guint8 *data;
+ guint size;
+ gchar *respcont = NULL;
+ GstRTSPVersion versions[] =
+ { GST_RTSP_VERSION_2_0, GST_RTSP_VERSION_INVALID };
+
+ src->version = src->default_version;
+ if (src->default_version == GST_RTSP_VERSION_2_0) {
+ versions[0] = GST_RTSP_VERSION_1_0;
+ }
+
+ restart:
+ src->need_redirect = FALSE;
+
+ /* can't continue without a valid url */
+ if (G_UNLIKELY (src->conninfo.url == NULL)) {
+ res = GST_RTSP_EINVAL;
+ goto no_url;
+ }
+ src->tried_url_auth = FALSE;
+
+ if ((res = gst_rtsp_conninfo_connect (src, &src->conninfo, async)) < 0)
+ goto connect_failed;
+
+ /* create OPTIONS */
+ GST_DEBUG_OBJECT (src, "create options... (%s)", async ? "async" : "sync");
+ res =
+ gst_rtspsrc_init_request (src, &request, GST_RTSP_OPTIONS,
+ src->conninfo.url_str);
+ if (res < 0)
+ goto create_request_failed;
+
+ /* send OPTIONS */
+ request.type_data.request.version = src->version;
+ GST_DEBUG_OBJECT (src, "send options...");
+
+ if (async)
+ GST_ELEMENT_PROGRESS (src, CONTINUE, "open", ("Retrieving server options"));
+
+ if ((res =
+ gst_rtspsrc_send (src, &src->conninfo, &request, &response,
+ NULL, versions)) < 0) {
+ goto send_error;
+ }
+
+ src->version = request.type_data.request.version;
+ GST_INFO_OBJECT (src, "Now using version: %s",
+ gst_rtsp_version_as_text (src->version));
+
+ /* parse OPTIONS */
+ if (!gst_rtspsrc_parse_methods (src, &response))
+ goto methods_error;
+
+ /* create DESCRIBE */
+ GST_DEBUG_OBJECT (src, "create describe...");
+ res =
+ gst_rtspsrc_init_request (src, &request, GST_RTSP_DESCRIBE,
+ src->conninfo.url_str);
+ if (res < 0)
+ goto create_request_failed;
+
+ /* we only accept SDP for now */
+ gst_rtsp_message_add_header (&request, GST_RTSP_HDR_ACCEPT,
+ "application/sdp");
+
+ if (src->backchannel == BACKCHANNEL_ONVIF)
+ gst_rtsp_message_add_header (&request, GST_RTSP_HDR_REQUIRE,
+ BACKCHANNEL_ONVIF_HDR_REQUIRE_VAL);
+ /* TODO: Handle the case when backchannel is unsupported and goto restart */
+
+ /* send DESCRIBE */
+ GST_DEBUG_OBJECT (src, "send describe...");
+
+ if (async)
+ GST_ELEMENT_PROGRESS (src, CONTINUE, "open", ("Retrieving media info"));
+
+ if ((res =
+ gst_rtspsrc_send (src, &src->conninfo, &request, &response,
+ NULL, NULL)) < 0)
+ goto send_error;
+
+ /* we only perform redirect for describe and play, currently */
+ if (src->need_redirect) {
+ /* close connection, we don't have to send a TEARDOWN yet, ignore the
+ * result. */
+ gst_rtsp_conninfo_close (src, &src->conninfo, TRUE);
+
+ gst_rtsp_message_unset (&request);
+ gst_rtsp_message_unset (&response);
+
+ /* and now retry */
+ goto restart;
+ }
+
+ /* it could be that the DESCRIBE method was not implemented */
+ if (!(src->methods & GST_RTSP_DESCRIBE))
+ goto no_describe;
+
+ /* check if reply is SDP */
+ gst_rtsp_message_get_header (&response, GST_RTSP_HDR_CONTENT_TYPE, &respcont,
+ 0);
+ /* could not be set but since the request returned OK, we assume it
+ * was SDP, else check it. */
+ if (respcont) {
+ const gchar *props = strchr (respcont, ';');
+
+ if (props) {
+ gchar *mimetype = g_strndup (respcont, props - respcont);
+
+ mimetype = g_strstrip (mimetype);
+ if (g_ascii_strcasecmp (mimetype, "application/sdp") != 0) {
+ g_free (mimetype);
+ goto wrong_content_type;
+ }
+
+ /* TODO: Check for charset property and do conversions of all messages if
+ * needed. Some servers actually send that property */
+
+ g_free (mimetype);
+ } else if (g_ascii_strcasecmp (respcont, "application/sdp") != 0) {
+ goto wrong_content_type;
+ }
+ }
+
+ /* get message body and parse as SDP */
+ gst_rtsp_message_get_body (&response, &data, &size);
+ if (data == NULL || size == 0)
+ goto no_describe;
+
+ GST_DEBUG_OBJECT (src, "parse SDP...");
+ gst_sdp_message_new (sdp);
+ gst_sdp_message_parse_buffer (data, size, *sdp);
+
+ /* clean up any messages */
+ gst_rtsp_message_unset (&request);
+ gst_rtsp_message_unset (&response);
+
+ return res;
+
+ /* ERRORS */
+ no_url:
+ {
++#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
++ gst_rtspsrc_post_error_message (src, GST_RTSPSRC_ERROR_INVALID_URL,
++ "No valid RTSP URL was provided");
++#else
+ GST_ELEMENT_ERROR (src, RESOURCE, NOT_FOUND, (NULL),
+ ("No valid RTSP URL was provided"));
++#endif
+ goto cleanup_error;
+ }
+ connect_failed:
+ {
+ gchar *str = gst_rtsp_strresult (res);
+
+ if (res != GST_RTSP_EINTR) {
++#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
++ gst_rtspsrc_post_error_message (src, GST_RTSPSRC_ERROR_CONNECTION_FAIL,
++ "Failed to connect.");
++#else
+ GST_ELEMENT_ERROR (src, RESOURCE, OPEN_READ_WRITE, (NULL),
+ ("Failed to connect. (%s)", str));
++#endif
+ } else {
+ GST_WARNING_OBJECT (src, "connect interrupted");
+ }
+ g_free (str);
+ goto cleanup_error;
+ }
+ create_request_failed:
+ {
+ gchar *str = gst_rtsp_strresult (res);
+
++#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
++ gst_rtspsrc_post_error_message (src, GST_RTSPSRC_ERROR_BAD_REQUEST,
++ "Could not create request.");
++#else
+ GST_ELEMENT_ERROR (src, LIBRARY, INIT, (NULL),
+ ("Could not create request. (%s)", str));
++#endif
+ g_free (str);
+ goto cleanup_error;
+ }
+ send_error:
+ {
+ /* Don't post a message - the rtsp_send method will have
+ * taken care of it because we passed NULL for the response code */
+ goto cleanup_error;
+ }
+ methods_error:
+ {
+ /* error was posted */
+ res = GST_RTSP_ERROR;
+ goto cleanup_error;
+ }
+ wrong_content_type:
+ {
++#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
++ gst_rtspsrc_post_error_message (src, GST_RTSPSRC_ERROR_OPTION_NOT_SUPPORTED,
++ "Server does not support SDP. ");
++#else
+ GST_ELEMENT_ERROR (src, RESOURCE, SETTINGS, (NULL),
+ ("Server does not support SDP, got %s.", respcont));
++#endif
+ res = GST_RTSP_ERROR;
+ goto cleanup_error;
+ }
+ no_describe:
+ {
++#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
++ gst_rtspsrc_post_error_message (src, GST_RTSPSRC_ERROR_METHOD_NOT_ALLOWED,
++ "Server can not provide an SDP.");
++#else
+ GST_ELEMENT_ERROR (src, RESOURCE, SETTINGS, (NULL),
+ ("Server can not provide an SDP."));
++#endif
+ res = GST_RTSP_ERROR;
+ goto cleanup_error;
+ }
+ cleanup_error:
+ {
+ if (src->conninfo.connection) {
+ GST_DEBUG_OBJECT (src, "free connection");
+ gst_rtsp_conninfo_close (src, &src->conninfo, TRUE);
+ }
+ gst_rtsp_message_unset (&request);
+ gst_rtsp_message_unset (&response);
+ return res;
+ }
+ }
+
+ static GstRTSPResult
+ gst_rtspsrc_open (GstRTSPSrc * src, gboolean async)
+ {
+ GstRTSPResult ret;
+
+ src->methods =
+ GST_RTSP_SETUP | GST_RTSP_PLAY | GST_RTSP_PAUSE | GST_RTSP_TEARDOWN;
+
+ if (src->sdp == NULL) {
+ if ((ret = gst_rtspsrc_retrieve_sdp (src, &src->sdp, async)) < 0)
+ goto no_sdp;
+ }
+
+ if ((ret = gst_rtspsrc_open_from_sdp (src, src->sdp, async)) < 0)
+ goto open_failed;
+
+ if (src->initial_seek) {
+ if (!gst_rtspsrc_perform_seek (src, src->initial_seek))
+ goto initial_seek_failed;
+ gst_event_replace (&src->initial_seek, NULL);
+ }
+
+ done:
+ if (async)
+ gst_rtspsrc_loop_end_cmd (src, CMD_OPEN, ret);
+
+ return ret;
+
+ /* ERRORS */
+ no_sdp:
+ {
+ GST_WARNING_OBJECT (src, "can't get sdp");
+ src->open_error = TRUE;
+ goto done;
+ }
+ open_failed:
+ {
+ GST_WARNING_OBJECT (src, "can't setup streaming from sdp");
+ src->open_error = TRUE;
+ goto done;
+ }
+ initial_seek_failed:
+ {
+ GST_WARNING_OBJECT (src, "Failed to perform initial seek");
+ ret = GST_RTSP_ERROR;
+ src->open_error = TRUE;
+ goto done;
+ }
+ }
+
+ static GstRTSPResult
+ gst_rtspsrc_close (GstRTSPSrc * src, gboolean async, gboolean only_close)
+ {
+ GstRTSPMessage request = { 0 };
+ GstRTSPMessage response = { 0 };
+ GstRTSPResult res = GST_RTSP_OK;
+ GList *walk;
+ const gchar *control;
+
+ GST_DEBUG_OBJECT (src, "TEARDOWN...");
+
+ gst_rtspsrc_set_state (src, GST_STATE_READY);
+
+ if (src->state < GST_RTSP_STATE_READY) {
+ GST_DEBUG_OBJECT (src, "not ready, doing cleanup");
+ goto close;
+ }
+
+ if (only_close)
+ goto close;
+
+ /* construct a control url */
+ control = get_aggregate_control (src);
+
+ if (!(src->methods & (GST_RTSP_PLAY | GST_RTSP_TEARDOWN)))
+ goto not_supported;
+
+ for (walk = src->streams; walk; walk = g_list_next (walk)) {
+ GstRTSPStream *stream = (GstRTSPStream *) walk->data;
+ const gchar *setup_url;
+ GstRTSPConnInfo *info;
+
+ /* try aggregate control first but do non-aggregate control otherwise */
+ if (control)
+ setup_url = control;
+ else if ((setup_url = stream->conninfo.location) == NULL)
+ continue;
+
+ if (src->conninfo.connection) {
+ info = &src->conninfo;
+ } else if (stream->conninfo.connection) {
+ info = &stream->conninfo;
+ } else {
+ continue;
+ }
+ if (!info->connected)
+ goto next;
+
+ /* do TEARDOWN */
+ res =
+ gst_rtspsrc_init_request (src, &request, GST_RTSP_TEARDOWN, setup_url);
+ GST_LOG_OBJECT (src, "Teardown on %s", setup_url);
+ if (res < 0)
+ goto create_request_failed;
+
+ if (stream->is_backchannel && src->backchannel == BACKCHANNEL_ONVIF)
+ gst_rtsp_message_add_header (&request, GST_RTSP_HDR_REQUIRE,
+ BACKCHANNEL_ONVIF_HDR_REQUIRE_VAL);
+
+ if (async)
+ GST_ELEMENT_PROGRESS (src, CONTINUE, "close", ("Closing stream"));
+
+ if ((res =
+ gst_rtspsrc_send (src, info, &request, &response, NULL, NULL)) < 0)
+ goto send_error;
+
+ /* FIXME, parse result? */
+ gst_rtsp_message_unset (&request);
+ gst_rtsp_message_unset (&response);
+
+ next:
+ /* early exit when we did aggregate control */
+ if (control)
+ break;
+ }
+
+ close:
+ /* close connections */
+ GST_DEBUG_OBJECT (src, "closing connection...");
+ gst_rtsp_conninfo_close (src, &src->conninfo, TRUE);
+ for (walk = src->streams; walk; walk = g_list_next (walk)) {
+ GstRTSPStream *stream = (GstRTSPStream *) walk->data;
+ gst_rtsp_conninfo_close (src, &stream->conninfo, TRUE);
+ }
+
+ /* cleanup */
+ gst_rtspsrc_cleanup (src);
+
+ src->state = GST_RTSP_STATE_INVALID;
+
+ if (async)
+ gst_rtspsrc_loop_end_cmd (src, CMD_CLOSE, res);
+
+ return res;
+
+ /* ERRORS */
+ create_request_failed:
+ {
+ gchar *str = gst_rtsp_strresult (res);
+
++#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
++ gst_rtspsrc_post_error_message (src, GST_RTSPSRC_ERROR_BAD_REQUEST,
++ "Could not create request.");
++#else
+ GST_ELEMENT_ERROR (src, LIBRARY, INIT, (NULL),
+ ("Could not create request. (%s)", str));
++#endif
+ g_free (str);
+ goto close;
+ }
+ send_error:
+ {
+ gchar *str = gst_rtsp_strresult (res);
+
+ gst_rtsp_message_unset (&request);
+ if (res != GST_RTSP_EINTR) {
++#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
++ gst_rtspsrc_post_error_message (src, GST_RTSPSRC_ERROR_CONNECTION_FAIL,
++ "Could not send message.");
++#else
+ GST_ELEMENT_ERROR (src, RESOURCE, WRITE, (NULL),
+ ("Could not send message. (%s)", str));
++#endif
+ } else {
+ GST_WARNING_OBJECT (src, "TEARDOWN interrupted");
+ }
+ g_free (str);
+ goto close;
+ }
+ not_supported:
+ {
+ GST_DEBUG_OBJECT (src,
+ "TEARDOWN and PLAY not supported, can't do TEARDOWN");
+ goto close;
+ }
+ }
+
+ /* RTP-Info is of the format:
+ *
+ * url=<URL>;[seq=<seqbase>;rtptime=<timebase>] [, url=...]
+ *
+ * rtptime corresponds to the timestamp for the NPT time given in the header
+ * seqbase corresponds to the next sequence number we received. This number
+ * indicates the first seqnum after the seek and should be used to discard
+ * packets that are from before the seek.
+ */
+ static gboolean
+ gst_rtspsrc_parse_rtpinfo (GstRTSPSrc * src, gchar * rtpinfo)
+ {
+ gchar **infos;
+ gint i, j;
+
+ GST_DEBUG_OBJECT (src, "parsing RTP-Info %s", rtpinfo);
+
+ infos = g_strsplit (rtpinfo, ",", 0);
+ for (i = 0; infos[i]; i++) {
+ gchar **fields;
+ GstRTSPStream *stream;
+ gint32 seqbase;
+ gint64 timebase;
+
+ GST_DEBUG_OBJECT (src, "parsing info %s", infos[i]);
+
+ /* init values, types of seqbase and timebase are bigger than needed so we
+ * can store -1 as uninitialized values */
+ stream = NULL;
+ seqbase = -1;
+ timebase = -1;
+
+ /* parse url, find stream for url.
+ * parse seq and rtptime. The seq number should be configured in the rtp
+ * depayloader or session manager to detect gaps. Same for the rtptime, it
+ * should be used to create an initial time newsegment. */
+ fields = g_strsplit (infos[i], ";", 0);
+ for (j = 0; fields[j]; j++) {
+ GST_DEBUG_OBJECT (src, "parsing field %s", fields[j]);
+ /* remove leading whitespace */
+ fields[j] = g_strchug (fields[j]);
+ if (g_str_has_prefix (fields[j], "url=")) {
+ /* get the url and the stream */
+ stream =
+ find_stream (src, (fields[j] + 4), (gpointer) find_stream_by_setup);
+ } else if (g_str_has_prefix (fields[j], "seq=")) {
+ seqbase = atoi (fields[j] + 4);
+ } else if (g_str_has_prefix (fields[j], "rtptime=")) {
+ timebase = g_ascii_strtoll (fields[j] + 8, NULL, 10);
+ }
+ }
+ g_strfreev (fields);
+ /* now we need to store the values for the caps of the stream */
+ if (stream != NULL) {
+ GST_DEBUG_OBJECT (src,
+ "found stream %p, setting: seqbase %d, timebase %" G_GINT64_FORMAT,
+ stream, seqbase, timebase);
+
+ /* we have a stream, configure detected params */
+ stream->seqbase = seqbase;
+ stream->timebase = timebase;
+ }
+ }
+ g_strfreev (infos);
+
+ return TRUE;
+ }
+
+ static void
+ gst_rtspsrc_handle_rtcp_interval (GstRTSPSrc * src, gchar * rtcp)
+ {
+ guint64 interval;
+ GList *walk;
+
+ interval = strtoul (rtcp, NULL, 10);
+ GST_DEBUG_OBJECT (src, "rtcp interval: %" G_GUINT64_FORMAT " ms", interval);
+
+ if (!interval)
+ return;
+
+ interval *= GST_MSECOND;
+
+ for (walk = src->streams; walk; walk = g_list_next (walk)) {
+ GstRTSPStream *stream = (GstRTSPStream *) walk->data;
+
+ /* already (optionally) retrieved this when configuring manager */
+ if (stream->session) {
+ GObject *rtpsession = stream->session;
+
+ GST_DEBUG_OBJECT (src, "configure rtcp interval in session %p",
+ rtpsession);
+ g_object_set (rtpsession, "rtcp-min-interval", interval, NULL);
+ }
+ }
+
+ /* now it happens that (Xenon) server sending this may also provide bogus
+ * RTCP SR sync data (i.e. with quite some jitter), so never mind those
+ * and just use RTP-Info to sync */
+ if (src->manager) {
+ GObjectClass *klass;
+
+ klass = G_OBJECT_GET_CLASS (G_OBJECT (src->manager));
+ if (g_object_class_find_property (klass, "rtcp-sync")) {
+ GST_DEBUG_OBJECT (src, "configuring rtp sync method");
+ g_object_set (src->manager, "rtcp-sync", RTCP_SYNC_RTP, NULL);
+ }
+ }
+ }
+
+ static gdouble
+ gst_rtspsrc_get_float (const gchar * dstr)
+ {
+ gchar s[G_ASCII_DTOSTR_BUF_SIZE] = { 0, };
+
+ /* canonicalise floating point string so we can handle float strings
+ * in the form "24.930" or "24,930" irrespective of the current locale */
+ g_strlcpy (s, dstr, sizeof (s));
+ g_strdelimit (s, ",", '.');
+ return g_ascii_strtod (s, NULL);
+ }
+
+ static gchar *
+ gen_range_header (GstRTSPSrc * src, GstSegment * segment)
+ {
+ GstRTSPTimeRange range = { 0, };
+ gdouble begin_seconds, end_seconds;
+
+ if (segment->rate > 0) {
+ begin_seconds = (gdouble) segment->start / GST_SECOND;
+ end_seconds = (gdouble) segment->stop / GST_SECOND;
+ } else {
+ begin_seconds = (gdouble) segment->stop / GST_SECOND;
+ end_seconds = (gdouble) segment->start / GST_SECOND;
+ }
+
+ if (src->onvif_mode) {
+ GDateTime *prime_epoch, *datetime;
+
+ range.unit = GST_RTSP_RANGE_CLOCK;
+
+ prime_epoch = g_date_time_new_utc (1900, 1, 1, 0, 0, 0);
+
+ datetime = g_date_time_add_seconds (prime_epoch, begin_seconds);
+
+ range.min.type = GST_RTSP_TIME_UTC;
+ range.min2.year = g_date_time_get_year (datetime);
+ range.min2.month = g_date_time_get_month (datetime);
+ range.min2.day = g_date_time_get_day_of_month (datetime);
+ range.min.seconds =
+ g_date_time_get_seconds (datetime) +
+ g_date_time_get_minute (datetime) * 60 +
+ g_date_time_get_hour (datetime) * 60 * 60;
+
+ g_date_time_unref (datetime);
+
+ datetime = g_date_time_add_seconds (prime_epoch, end_seconds);
+
+ range.max.type = GST_RTSP_TIME_UTC;
+ range.max2.year = g_date_time_get_year (datetime);
+ range.max2.month = g_date_time_get_month (datetime);
+ range.max2.day = g_date_time_get_day_of_month (datetime);
+ range.max.seconds =
+ g_date_time_get_seconds (datetime) +
+ g_date_time_get_minute (datetime) * 60 +
+ g_date_time_get_hour (datetime) * 60 * 60;
+
+ g_date_time_unref (datetime);
+ g_date_time_unref (prime_epoch);
+ } else {
++
++#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
++ if (src->start_position != 0 && segment->position == 0) {
++ segment->position = src->start_position;
++ src->start_position = 0;
++ }
++#endif
+ range.unit = GST_RTSP_RANGE_NPT;
+
+ if (src->range && src->range->min.type == GST_RTSP_TIME_NOW) {
+ range.min.type = GST_RTSP_TIME_NOW;
+ } else {
++#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
++ if (segment->position != 0)
++ begin_seconds = (gdouble) segment->position / GST_SECOND;
++#endif
+ range.min.type = GST_RTSP_TIME_SECONDS;
+ range.min.seconds = begin_seconds;
+ }
+
+ if (src->range && src->range->max.type == GST_RTSP_TIME_END) {
+ range.max.type = GST_RTSP_TIME_END;
+ } else {
+ range.max.type = GST_RTSP_TIME_SECONDS;
+ range.max.seconds = end_seconds;
+ }
+ }
+
+ /* Don't set end bounds when not required to */
+ if (!GST_CLOCK_TIME_IS_VALID (segment->stop)) {
+ if (segment->rate > 0)
+ range.max.type = GST_RTSP_TIME_END;
+ else
+ range.min.type = GST_RTSP_TIME_END;
+ }
+
+ return gst_rtsp_range_to_string (&range);
+ }
+
+ static void
+ clear_rtp_base (GstRTSPSrc * src, GstRTSPStream * stream)
+ {
+ guint i, len;
+
+ stream->timebase = -1;
+ stream->seqbase = -1;
+
+ len = stream->ptmap->len;
+ for (i = 0; i < len; i++) {
+ PtMapItem *item = &g_array_index (stream->ptmap, PtMapItem, i);
+ GstStructure *s;
+
+ if (item->caps == NULL)
+ continue;
+
+ item->caps = gst_caps_make_writable (item->caps);
+ s = gst_caps_get_structure (item->caps, 0);
+ gst_structure_remove_fields (s, "clock-base", "seqnum-base", NULL);
+ if (item->pt == stream->default_pt && stream->udpsrc[0])
+ g_object_set (stream->udpsrc[0], "caps", item->caps, NULL);
+ }
+ stream->need_caps = TRUE;
+ }
+
+ static GstRTSPResult
+ gst_rtspsrc_ensure_open (GstRTSPSrc * src, gboolean async)
+ {
+ GstRTSPResult res = GST_RTSP_OK;
+
+ if (src->state < GST_RTSP_STATE_READY) {
+ res = GST_RTSP_ERROR;
+ if (src->open_error) {
+ GST_DEBUG_OBJECT (src, "the stream was in error");
+ goto done;
+ }
+ if (async)
+ gst_rtspsrc_loop_start_cmd (src, CMD_OPEN);
+
+ if ((res = gst_rtspsrc_open (src, async)) < 0) {
+ GST_DEBUG_OBJECT (src, "failed to open stream");
+ goto done;
+ }
+ }
+
+ done:
+ return res;
+ }
+
+ static GstRTSPResult
+ gst_rtspsrc_play (GstRTSPSrc * src, GstSegment * segment, gboolean async,
+ const gchar * seek_style)
+ {
+ GstRTSPMessage request = { 0 };
+ GstRTSPMessage response = { 0 };
+ GstRTSPResult res = GST_RTSP_OK;
+ GList *walk;
+ gchar *hval;
+ gint hval_idx;
+ const gchar *control;
+ GstSegment requested;
+
+ GST_DEBUG_OBJECT (src, "PLAY...");
+
+ restart:
+ if ((res = gst_rtspsrc_ensure_open (src, async)) < 0)
+ goto open_failed;
+
+ if (!(src->methods & GST_RTSP_PLAY))
+ goto not_supported;
+
+ if (src->state == GST_RTSP_STATE_PLAYING)
+ goto was_playing;
+
+ if (!src->conninfo.connection || !src->conninfo.connected)
+ goto done;
+
+ requested = *segment;
+
+ /* send some dummy packets before we activate the receive in the
+ * udp sources */
+ gst_rtspsrc_send_dummy_packets (src);
+
+ /* require new SR packets */
+ if (src->manager)
+ g_signal_emit_by_name (src->manager, "reset-sync", NULL);
+
+ /* construct a control url */
+ control = get_aggregate_control (src);
+
+ for (walk = src->streams; walk; walk = g_list_next (walk)) {
+ GstRTSPStream *stream = (GstRTSPStream *) walk->data;
+ const gchar *setup_url;
+ GstRTSPConnInfo *conninfo;
+
+ /* try aggregate control first but do non-aggregate control otherwise */
+ if (control)
+ setup_url = control;
+ else if ((setup_url = stream->conninfo.location) == NULL)
+ continue;
+
+ if (src->conninfo.connection) {
+ conninfo = &src->conninfo;
+ } else if (stream->conninfo.connection) {
+ conninfo = &stream->conninfo;
+ } else {
+ continue;
+ }
+
+ /* do play */
+ res = gst_rtspsrc_init_request (src, &request, GST_RTSP_PLAY, setup_url);
+ if (res < 0)
+ goto create_request_failed;
+
+ if (src->need_range && src->seekable >= 0.0) {
++#ifndef TIZEN_FEATURE_RTSP_MODIFICATION
+ hval = gen_range_header (src, segment);
+
+ gst_rtsp_message_take_header (&request, GST_RTSP_HDR_RANGE, hval);
+
++#endif
+ /* store the newsegment event so it can be sent from the streaming thread. */
+ src->need_segment = TRUE;
+ }
++#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
++ else {
++/*
++ Updating position with the MSL current position as gst_rtspsrc_get_position() does not return correct position.
++*/
++ GST_DEBUG_OBJECT (src,
++ " During normal pause-resume , segment->position=%" GST_TIME_FORMAT
++ ",src->start_position=%" GST_TIME_FORMAT,
++ GST_TIME_ARGS (segment->position),
++ GST_TIME_ARGS (src->start_position));
++ segment->position = src->last_pos;
++ }
++
++/*
++ Sending the npt range request for each play request for updating the segment position properly.
++*/
++ hval = gen_range_header (src, segment);
++ gst_rtsp_message_take_header (&request, GST_RTSP_HDR_RANGE, hval);
++#endif
+
+ if (segment->rate != 1.0) {
+ gchar scale_val[G_ASCII_DTOSTR_BUF_SIZE];
+ gchar speed_val[G_ASCII_DTOSTR_BUF_SIZE];
+
+ if (src->server_side_trickmode) {
+ g_ascii_dtostr (scale_val, sizeof (scale_val), segment->rate);
+ gst_rtsp_message_add_header (&request, GST_RTSP_HDR_SCALE, scale_val);
+ } else if (segment->rate < 0.0) {
+ g_ascii_dtostr (scale_val, sizeof (scale_val), -1.0);
+ gst_rtsp_message_add_header (&request, GST_RTSP_HDR_SCALE, scale_val);
+
+ if (ABS (segment->rate) != 1.0) {
+ g_ascii_dtostr (speed_val, sizeof (speed_val), ABS (segment->rate));
+ gst_rtsp_message_add_header (&request, GST_RTSP_HDR_SPEED, speed_val);
+ }
+ } else {
+ g_ascii_dtostr (speed_val, sizeof (speed_val), segment->rate);
+ gst_rtsp_message_add_header (&request, GST_RTSP_HDR_SPEED, speed_val);
+ }
+ }
+
+ if (src->onvif_mode) {
+ if (segment->flags & GST_SEEK_FLAG_TRICKMODE_KEY_UNITS) {
+ gchar *hval;
+
+ if (src->trickmode_interval)
+ hval =
+ g_strdup_printf ("intra/%" G_GUINT64_FORMAT,
+ src->trickmode_interval / GST_MSECOND);
+ else
+ hval = g_strdup ("intra");
+
+ gst_rtsp_message_add_header (&request, GST_RTSP_HDR_FRAMES, hval);
+
+ g_free (hval);
+ } else if (segment->flags & GST_SEEK_FLAG_TRICKMODE_FORWARD_PREDICTED) {
+ gst_rtsp_message_add_header (&request, GST_RTSP_HDR_FRAMES,
+ "predicted");
+ }
+ }
+
+ if (seek_style)
+ gst_rtsp_message_add_header (&request, GST_RTSP_HDR_SEEK_STYLE,
+ seek_style);
+
+ /* when we have an ONVIF audio backchannel, the PLAY request must have the
+ * Require: header when doing either aggregate or non-aggregate control */
+ if (src->backchannel == BACKCHANNEL_ONVIF &&
+ (control || stream->is_backchannel))
+ gst_rtsp_message_add_header (&request, GST_RTSP_HDR_REQUIRE,
+ BACKCHANNEL_ONVIF_HDR_REQUIRE_VAL);
+
+ if (src->onvif_mode) {
+ if (src->onvif_rate_control)
+ gst_rtsp_message_add_header (&request, GST_RTSP_HDR_RATE_CONTROL,
+ "yes");
+ else
+ gst_rtsp_message_add_header (&request, GST_RTSP_HDR_RATE_CONTROL, "no");
+ }
+
+ if (async)
+ GST_ELEMENT_PROGRESS (src, CONTINUE, "request", ("Sending PLAY request"));
+
+ if ((res =
+ gst_rtspsrc_send (src, conninfo, &request, &response, NULL, NULL))
+ < 0)
+ goto send_error;
+
+ if (src->need_redirect) {
+ GST_DEBUG_OBJECT (src,
+ "redirect: tearing down and restarting with new url");
+ /* teardown and restart with new url */
+ gst_rtspsrc_close (src, TRUE, FALSE);
+ /* reset protocols to force re-negotiation with redirected url */
+ src->cur_protocols = src->protocols;
+ gst_rtsp_message_unset (&request);
+ gst_rtsp_message_unset (&response);
+ goto restart;
+ }
+
+ /* seek may have silently failed as it is not supported */
+ if (!(src->methods & GST_RTSP_PLAY)) {
+ GST_DEBUG_OBJECT (src, "PLAY Range not supported; re-enable PLAY");
+
+ if (src->version >= GST_RTSP_VERSION_2_0 && src->seekable >= 0.0) {
+ GST_WARNING_OBJECT (src, "Server declared stream as seekable but"
+ " playing with range failed... Ignoring information.");
+ }
+ /* obviously it is supported as we made it here */
+ src->methods |= GST_RTSP_PLAY;
+ src->seekable = -1.0;
+ /* but there is nothing to parse in the response,
+ * so convey we have no idea and not to expect anything particular */
+ clear_rtp_base (src, stream);
+ if (control) {
+ GList *run;
+
+ /* need to do for all streams */
+ for (run = src->streams; run; run = g_list_next (run))
+ clear_rtp_base (src, (GstRTSPStream *) run->data);
+ }
+ /* NOTE the above also disables npt based eos detection */
+ /* and below forces position to 0,
+ * which is visible feedback we lost the plot */
+ segment->start = segment->position = src->last_pos;
+ }
+
+ gst_rtsp_message_unset (&request);
+
+ /* parse RTP npt field. This is the current position in the stream (Normal
+ * Play Time) and should be put in the NEWSEGMENT position field. */
+ if (gst_rtsp_message_get_header (&response, GST_RTSP_HDR_RANGE, &hval,
+ 0) == GST_RTSP_OK)
+ gst_rtspsrc_parse_range (src, hval, segment, FALSE);
+
+ /* assume 1.0 rate now, overwrite when the SCALE or SPEED headers are present. */
+ segment->rate = 1.0;
+
+ /* parse Speed header. This is the intended playback rate of the stream
+ * and should be put in the NEWSEGMENT rate field. */
+ if (gst_rtsp_message_get_header (&response, GST_RTSP_HDR_SPEED, &hval,
+ 0) == GST_RTSP_OK) {
+ segment->rate = gst_rtspsrc_get_float (hval);
+ } else if (gst_rtsp_message_get_header (&response, GST_RTSP_HDR_SCALE,
+ &hval, 0) == GST_RTSP_OK) {
+ segment->rate = gst_rtspsrc_get_float (hval);
+ }
+
+ /* parse the RTP-Info header field (if ANY) to get the base seqnum and timestamp
+ * for the RTP packets. If this is not present, we assume all starts from 0...
+ * This is info for the RTP session manager that we pass to it in caps. */
+ hval_idx = 0;
+ while (gst_rtsp_message_get_header (&response, GST_RTSP_HDR_RTP_INFO,
+ &hval, hval_idx++) == GST_RTSP_OK)
+ gst_rtspsrc_parse_rtpinfo (src, hval);
+
+ /* some servers indicate RTCP parameters in PLAY response,
+ * rather than properly in SDP */
+ if (gst_rtsp_message_get_header (&response, GST_RTSP_HDR_RTCP_INTERVAL,
+ &hval, 0) == GST_RTSP_OK)
+ gst_rtspsrc_handle_rtcp_interval (src, hval);
+
+ gst_rtsp_message_unset (&response);
+
+ /* early exit when we did aggregate control */
+ if (control)
+ break;
+ }
+
+ src->out_segment = *segment;
+
+ if (src->clip_out_segment) {
+ /* Only clip the output segment when the server has answered with valid
+ * values, we cannot know otherwise whether the requested bounds were
+ * available */
+ if (GST_CLOCK_TIME_IS_VALID (src->segment.start) &&
+ GST_CLOCK_TIME_IS_VALID (requested.start))
+ src->out_segment.start = MAX (src->out_segment.start, requested.start);
+ if (GST_CLOCK_TIME_IS_VALID (src->segment.stop) &&
+ GST_CLOCK_TIME_IS_VALID (requested.stop))
+ src->out_segment.stop = MIN (src->out_segment.stop, requested.stop);
+ }
+
+ /* configure the caps of the streams after we parsed all headers. Only reset
+ * the manager object when we set a new Range header (we did a seek) */
+ gst_rtspsrc_configure_caps (src, segment, src->need_range);
+
+ /* set to PLAYING after we have configured the caps, otherwise we
+ * might end up calling request_key (with SRTP) while caps are still
+ * being configured. */
+ gst_rtspsrc_set_state (src, GST_STATE_PLAYING);
+
+ /* set again when needed */
+ src->need_range = FALSE;
+
+ src->running = TRUE;
+ src->base_time = -1;
+ src->state = GST_RTSP_STATE_PLAYING;
+
+ /* mark discont */
+ GST_DEBUG_OBJECT (src, "mark DISCONT, we did a seek to another position");
+ for (walk = src->streams; walk; walk = g_list_next (walk)) {
+ GstRTSPStream *stream = (GstRTSPStream *) walk->data;
+ stream->discont = TRUE;
+ }
+
+ done:
+ if (async)
+ gst_rtspsrc_loop_end_cmd (src, CMD_PLAY, res);
+
+ return res;
+
+ /* ERRORS */
+ open_failed:
+ {
+ GST_WARNING_OBJECT (src, "failed to open stream");
+ goto done;
+ }
+ not_supported:
+ {
+ GST_WARNING_OBJECT (src, "PLAY is not supported");
+ goto done;
+ }
+ was_playing:
+ {
+ GST_WARNING_OBJECT (src, "we were already PLAYING");
+ goto done;
+ }
+ create_request_failed:
+ {
+ gchar *str = gst_rtsp_strresult (res);
+
++#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
++ gst_rtspsrc_post_error_message (src, GST_RTSPSRC_ERROR_BAD_REQUEST,
++ "Could not create request. ");
++#else
+ GST_ELEMENT_ERROR (src, LIBRARY, INIT, (NULL),
+ ("Could not create request. (%s)", str));
++#endif
+ g_free (str);
+ goto done;
+ }
+ send_error:
+ {
+ gchar *str = gst_rtsp_strresult (res);
+
+ gst_rtsp_message_unset (&request);
+ if (res != GST_RTSP_EINTR) {
++#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
++ gst_rtspsrc_post_error_message (src, GST_RTSPSRC_ERROR_CONNECTION_FAIL,
++ "Could not send message.");
++#else
+ GST_ELEMENT_ERROR (src, RESOURCE, WRITE, (NULL),
+ ("Could not send message. (%s)", str));
++#endif
+ } else {
+ GST_WARNING_OBJECT (src, "PLAY interrupted");
+ }
+ g_free (str);
+ goto done;
+ }
+ }
+
+ static GstRTSPResult
+ gst_rtspsrc_pause (GstRTSPSrc * src, gboolean async)
+ {
+ GstRTSPResult res = GST_RTSP_OK;
+ GstRTSPMessage request = { 0 };
+ GstRTSPMessage response = { 0 };
+ GList *walk;
+ const gchar *control;
+
+ GST_DEBUG_OBJECT (src, "PAUSE...");
+
+ if ((res = gst_rtspsrc_ensure_open (src, async)) < 0)
+ goto open_failed;
+
+ if (!(src->methods & GST_RTSP_PAUSE))
+ goto not_supported;
+
+ if (src->state == GST_RTSP_STATE_READY)
+ goto was_paused;
+
+ if (!src->conninfo.connection || !src->conninfo.connected)
+ goto no_connection;
+
+ /* construct a control url */
+ control = get_aggregate_control (src);
+
+ /* loop over the streams. We might exit the loop early when we could do an
+ * aggregate control */
+ for (walk = src->streams; walk; walk = g_list_next (walk)) {
+ GstRTSPStream *stream = (GstRTSPStream *) walk->data;
+ GstRTSPConnInfo *conninfo;
+ const gchar *setup_url;
+
+ /* try aggregate control first but do non-aggregate control otherwise */
+ if (control)
+ setup_url = control;
+ else if ((setup_url = stream->conninfo.location) == NULL)
+ continue;
+
+ if (src->conninfo.connection) {
+ conninfo = &src->conninfo;
+ } else if (stream->conninfo.connection) {
+ conninfo = &stream->conninfo;
+ } else {
+ continue;
+ }
+
+ if (async)
+ GST_ELEMENT_PROGRESS (src, CONTINUE, "request",
+ ("Sending PAUSE request"));
+
+ if ((res =
+ gst_rtspsrc_init_request (src, &request, GST_RTSP_PAUSE,
+ setup_url)) < 0)
+ goto create_request_failed;
+
+ /* when we have an ONVIF audio backchannel, the PAUSE request must have the
+ * Require: header when doing either aggregate or non-aggregate control */
+ if (src->backchannel == BACKCHANNEL_ONVIF &&
+ (control || stream->is_backchannel))
+ gst_rtsp_message_add_header (&request, GST_RTSP_HDR_REQUIRE,
+ BACKCHANNEL_ONVIF_HDR_REQUIRE_VAL);
+
+ if ((res =
+ gst_rtspsrc_send (src, conninfo, &request, &response, NULL,
+ NULL)) < 0)
+ goto send_error;
+
+ gst_rtsp_message_unset (&request);
+ gst_rtsp_message_unset (&response);
+
+ /* exit early when we did aggregate control */
+ if (control)
+ break;
+ }
+
+ /* change element states now */
+ gst_rtspsrc_set_state (src, GST_STATE_PAUSED);
+
+ no_connection:
+ src->state = GST_RTSP_STATE_READY;
+
+ done:
+ if (async)
+ gst_rtspsrc_loop_end_cmd (src, CMD_PAUSE, res);
+
+ return res;
+
+ /* ERRORS */
+ open_failed:
+ {
+ GST_DEBUG_OBJECT (src, "failed to open stream");
+ goto done;
+ }
+ not_supported:
+ {
+ GST_DEBUG_OBJECT (src, "PAUSE is not supported");
+ goto done;
+ }
+ was_paused:
+ {
+ GST_DEBUG_OBJECT (src, "we were already PAUSED");
+ goto done;
+ }
+ create_request_failed:
+ {
+ gchar *str = gst_rtsp_strresult (res);
+
++#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
++ gst_rtspsrc_post_error_message (src, GST_RTSPSRC_ERROR_BAD_REQUEST,
++ "Could not create request.");
++#else
+ GST_ELEMENT_ERROR (src, LIBRARY, INIT, (NULL),
+ ("Could not create request. (%s)", str));
++#endif
+ g_free (str);
+ goto done;
+ }
+ send_error:
+ {
+ gchar *str = gst_rtsp_strresult (res);
+
+ gst_rtsp_message_unset (&request);
+ if (res != GST_RTSP_EINTR) {
++#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
++ gst_rtspsrc_post_error_message (src, GST_RTSPSRC_ERROR_CONNECTION_FAIL,
++ "Could not send message.");
++#else
+ GST_ELEMENT_ERROR (src, RESOURCE, WRITE, (NULL),
+ ("Could not send message. (%s)", str));
++#endif
+ } else {
+ GST_WARNING_OBJECT (src, "PAUSE interrupted");
+ }
+ g_free (str);
+ goto done;
+ }
+ }
+
+ static void
+ gst_rtspsrc_handle_message (GstBin * bin, GstMessage * message)
+ {
+ GstRTSPSrc *rtspsrc;
+
+ rtspsrc = GST_RTSPSRC (bin);
+
+ switch (GST_MESSAGE_TYPE (message)) {
+ case GST_MESSAGE_STREAM_START:
+ case GST_MESSAGE_EOS:
+ gst_message_unref (message);
+ break;
+ case GST_MESSAGE_ELEMENT:
+ {
+ const GstStructure *s = gst_message_get_structure (message);
+
+ if (gst_structure_has_name (s, "GstUDPSrcTimeout")) {
+ gboolean ignore_timeout;
+
+ GST_DEBUG_OBJECT (bin, "timeout on UDP port");
+
+ GST_OBJECT_LOCK (rtspsrc);
+ ignore_timeout = rtspsrc->ignore_timeout;
+ rtspsrc->ignore_timeout = TRUE;
+ GST_OBJECT_UNLOCK (rtspsrc);
+
+ /* we only act on the first udp timeout message, others are irrelevant
+ * and can be ignored. */
+ if (!ignore_timeout)
+ gst_rtspsrc_loop_send_cmd (rtspsrc, CMD_RECONNECT, CMD_LOOP);
+ /* eat and free */
+ gst_message_unref (message);
+ return;
+ }
+ GST_BIN_CLASS (parent_class)->handle_message (bin, message);
+ break;
+ }
+ case GST_MESSAGE_ERROR:
+ {
+ GstObject *udpsrc;
+ GstRTSPStream *stream;
+ GstFlowReturn ret;
+
+ udpsrc = GST_MESSAGE_SRC (message);
+
+ GST_DEBUG_OBJECT (rtspsrc, "got error from %s",
+ GST_ELEMENT_NAME (udpsrc));
+
+ stream = find_stream (rtspsrc, udpsrc, (gpointer) find_stream_by_udpsrc);
+ if (!stream)
+ goto forward;
+
+ /* we ignore the RTCP udpsrc */
+ if (stream->udpsrc[1] == GST_ELEMENT_CAST (udpsrc))
+ goto done;
+
+ /* if we get error messages from the udp sources, that's not a problem as
+ * long as not all of them error out. We also don't really know what the
+ * problem is, the message does not give enough detail... */
+ ret = gst_rtspsrc_combine_flows (rtspsrc, stream, GST_FLOW_NOT_LINKED);
+ GST_DEBUG_OBJECT (rtspsrc, "combined flows: %s", gst_flow_get_name (ret));
+ if (ret != GST_FLOW_OK)
+ goto forward;
+
+ done:
+ gst_message_unref (message);
+ break;
+
+ forward:
+ /* fatal but not our message, forward */
+ GST_BIN_CLASS (parent_class)->handle_message (bin, message);
+ break;
+ }
+ default:
+ {
+ GST_BIN_CLASS (parent_class)->handle_message (bin, message);
+ break;
+ }
+ }
+ }
+
+ /* the thread where everything happens */
+ static void
+ gst_rtspsrc_thread (GstRTSPSrc * src)
+ {
+ gint cmd;
+ ParameterRequest *req = NULL;
+
+ GST_OBJECT_LOCK (src);
+ cmd = src->pending_cmd;
+ if (cmd == CMD_RECONNECT || cmd == CMD_PLAY || cmd == CMD_PAUSE
+ || cmd == CMD_LOOP || cmd == CMD_OPEN || cmd == CMD_GET_PARAMETER
+ || cmd == CMD_SET_PARAMETER) {
+ if (g_queue_is_empty (&src->set_get_param_q)) {
+ src->pending_cmd = CMD_LOOP;
+ } else {
+ ParameterRequest *next_req;
+ if (cmd == CMD_GET_PARAMETER || cmd == CMD_SET_PARAMETER) {
+ req = g_queue_pop_head (&src->set_get_param_q);
+ }
+ next_req = g_queue_peek_head (&src->set_get_param_q);
+ src->pending_cmd = next_req ? next_req->cmd : CMD_LOOP;
+ }
+ } else
+ src->pending_cmd = CMD_WAIT;
+ GST_DEBUG_OBJECT (src, "got command %s", cmd_to_string (cmd));
+
+ /* we got the message command, so ensure communication is possible again */
+ gst_rtspsrc_connection_flush (src, FALSE);
+
+ src->busy_cmd = cmd;
+ GST_OBJECT_UNLOCK (src);
+
+ switch (cmd) {
+ case CMD_OPEN:
+ gst_rtspsrc_open (src, TRUE);
+ break;
+ case CMD_PLAY:
+ gst_rtspsrc_play (src, &src->segment, TRUE, NULL);
+ break;
+ case CMD_PAUSE:
+ gst_rtspsrc_pause (src, TRUE);
+ break;
+ case CMD_CLOSE:
+ gst_rtspsrc_close (src, TRUE, FALSE);
+ break;
+ case CMD_GET_PARAMETER:
+ gst_rtspsrc_get_parameter (src, req);
+ break;
+ case CMD_SET_PARAMETER:
+ gst_rtspsrc_set_parameter (src, req);
+ break;
+ case CMD_LOOP:
+ gst_rtspsrc_loop (src);
+ break;
+ case CMD_RECONNECT:
+ gst_rtspsrc_reconnect (src, FALSE);
+ break;
+ default:
+ break;
+ }
+
+ GST_OBJECT_LOCK (src);
+ /* No more cmds, wake any waiters */
+ g_cond_broadcast (&src->cmd_cond);
+ /* and go back to sleep */
+ if (src->pending_cmd == CMD_WAIT) {
+ if (src->task)
+ gst_task_pause (src->task);
+ }
+ /* reset waiting */
+ src->busy_cmd = CMD_WAIT;
+ GST_OBJECT_UNLOCK (src);
+ }
+
+ static gboolean
+ gst_rtspsrc_start (GstRTSPSrc * src)
+ {
+ GST_DEBUG_OBJECT (src, "starting");
+
+ GST_OBJECT_LOCK (src);
+
+ src->pending_cmd = CMD_WAIT;
+
+ if (src->task == NULL) {
+ src->task = gst_task_new ((GstTaskFunction) gst_rtspsrc_thread, src, NULL);
+ if (src->task == NULL)
+ goto task_error;
+
+ gst_task_set_lock (src->task, GST_RTSP_STREAM_GET_LOCK (src));
+ }
+ GST_OBJECT_UNLOCK (src);
+
+ return TRUE;
+
+ /* ERRORS */
+ task_error:
+ {
+ GST_OBJECT_UNLOCK (src);
+ GST_ERROR_OBJECT (src, "failed to create task");
+ return FALSE;
+ }
+ }
+
+ static gboolean
+ gst_rtspsrc_stop (GstRTSPSrc * src)
+ {
+ GstTask *task;
+
+ GST_DEBUG_OBJECT (src, "stopping");
+
+ /* also cancels pending task */
+ gst_rtspsrc_loop_send_cmd (src, CMD_WAIT, CMD_ALL);
+
+ GST_OBJECT_LOCK (src);
+ if ((task = src->task)) {
+ src->task = NULL;
+ GST_OBJECT_UNLOCK (src);
+
+ gst_task_stop (task);
+
+ /* make sure it is not running */
+ GST_RTSP_STREAM_LOCK (src);
+ GST_RTSP_STREAM_UNLOCK (src);
+
+ /* now wait for the task to finish */
+ gst_task_join (task);
+
+ /* and free the task */
+ gst_object_unref (GST_OBJECT (task));
+
+ GST_OBJECT_LOCK (src);
+ }
+ GST_OBJECT_UNLOCK (src);
+
+ /* ensure synchronously all is closed and clean */
+ gst_rtspsrc_close (src, FALSE, TRUE);
+
+ return TRUE;
+ }
+
+ static GstStateChangeReturn
+ gst_rtspsrc_change_state (GstElement * element, GstStateChange transition)
+ {
+ GstRTSPSrc *rtspsrc;
+ GstStateChangeReturn ret;
++#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
++ guint64 end_time;
++#endif
+
+ rtspsrc = GST_RTSPSRC (element);
++#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
++ GST_WARNING_OBJECT (rtspsrc, "State change transition: %d \n", transition);
++#endif
+
+ switch (transition) {
+ case GST_STATE_CHANGE_NULL_TO_READY:
+ if (!gst_rtspsrc_start (rtspsrc))
+ goto start_failed;
+ break;
+ case GST_STATE_CHANGE_READY_TO_PAUSED:
+ /* init some state */
+ rtspsrc->cur_protocols = rtspsrc->protocols;
+ /* first attempt, don't ignore timeouts */
+ rtspsrc->ignore_timeout = FALSE;
+ rtspsrc->open_error = FALSE;
+ if (rtspsrc->is_live)
+ gst_rtspsrc_loop_send_cmd (rtspsrc, CMD_OPEN, 0);
+ else
+ gst_rtspsrc_loop_send_cmd (rtspsrc, CMD_PLAY, 0);
+ break;
+ case GST_STATE_CHANGE_PAUSED_TO_PLAYING:
+ set_manager_buffer_mode (rtspsrc);
+ /* fall-through */
+ case GST_STATE_CHANGE_PLAYING_TO_PAUSED:
+ if (rtspsrc->is_live) {
+ /* unblock the tcp tasks and make the loop waiting */
+ if (gst_rtspsrc_loop_send_cmd (rtspsrc, CMD_WAIT, CMD_LOOP)) {
+ /* make sure it is waiting before we send PAUSE or PLAY below */
+ GST_RTSP_STREAM_LOCK (rtspsrc);
+ GST_RTSP_STREAM_UNLOCK (rtspsrc);
+ }
+ }
+ break;
+ case GST_STATE_CHANGE_PAUSED_TO_READY:
+ rtspsrc->group_id = GST_GROUP_ID_INVALID;
+ break;
+ default:
+ break;
+ }
+
+ ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
+ if (ret == GST_STATE_CHANGE_FAILURE)
+ goto done;
+
+ switch (transition) {
+ case GST_STATE_CHANGE_NULL_TO_READY:
+ ret = GST_STATE_CHANGE_SUCCESS;
+ break;
+ case GST_STATE_CHANGE_READY_TO_PAUSED:
++#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
++ /* don't change to PAUSE state before complete stream opend.
++ see gst_rtspsrc_loop_complete_cmd() */
++ g_mutex_lock (&(rtspsrc)->pause_lock);
++ end_time = g_get_monotonic_time () + 10 * G_TIME_SPAN_SECOND;
++ if (!g_cond_wait_until (&(rtspsrc)->open_end, &(rtspsrc)->pause_lock,
++ end_time)) {
++ GST_WARNING_OBJECT (rtspsrc,
++ "time out: stream opend is not completed yet..");
++ }
++ g_mutex_unlock (&(rtspsrc)->pause_lock);
++#endif
+ if (rtspsrc->is_live)
+ ret = GST_STATE_CHANGE_NO_PREROLL;
+ else
+ ret = GST_STATE_CHANGE_SUCCESS;
+ break;
+ case GST_STATE_CHANGE_PAUSED_TO_PLAYING:
+ if (rtspsrc->is_live)
+ gst_rtspsrc_loop_send_cmd (rtspsrc, CMD_PLAY, 0);
+ ret = GST_STATE_CHANGE_SUCCESS;
+ break;
+ case GST_STATE_CHANGE_PLAYING_TO_PAUSED:
+ if (rtspsrc->is_live) {
+ /* send pause request and keep the idle task around */
+ gst_rtspsrc_loop_send_cmd (rtspsrc, CMD_PAUSE, CMD_LOOP);
+ }
+ ret = GST_STATE_CHANGE_SUCCESS;
+ break;
+ case GST_STATE_CHANGE_PAUSED_TO_READY:
+ rtspsrc->seek_seqnum = GST_SEQNUM_INVALID;
+ gst_rtspsrc_loop_send_cmd_and_wait (rtspsrc, CMD_CLOSE, CMD_ALL,
+ rtspsrc->teardown_timeout);
+ ret = GST_STATE_CHANGE_SUCCESS;
+ break;
+ case GST_STATE_CHANGE_READY_TO_NULL:
+ gst_rtspsrc_stop (rtspsrc);
+ ret = GST_STATE_CHANGE_SUCCESS;
+ break;
+ default:
+ /* Otherwise it's success, we don't want to return spurious
+ * NO_PREROLL or ASYNC from internal elements as we care for
+ * state changes ourselves here
+ *
+ * This is to catch PAUSED->PAUSED and PLAYING->PLAYING transitions.
+ */
+ if (GST_STATE_TRANSITION_NEXT (transition) == GST_STATE_PAUSED)
+ ret = GST_STATE_CHANGE_NO_PREROLL;
+ else
+ ret = GST_STATE_CHANGE_SUCCESS;
+ break;
+ }
+
+ done:
+ return ret;
+
+ start_failed:
+ {
+ GST_DEBUG_OBJECT (rtspsrc, "start failed");
+ return GST_STATE_CHANGE_FAILURE;
+ }
+ }
+
+ static gboolean
+ gst_rtspsrc_send_event (GstElement * element, GstEvent * event)
+ {
+ gboolean res;
+ GstRTSPSrc *rtspsrc;
+
+ rtspsrc = GST_RTSPSRC (element);
+
+ if (GST_EVENT_TYPE (event) == GST_EVENT_SEEK) {
+ if (rtspsrc->state >= GST_RTSP_STATE_READY) {
+ res = gst_rtspsrc_perform_seek (rtspsrc, event);
+ gst_event_unref (event);
+ } else {
+ /* Store for later use */
+ res = TRUE;
+ rtspsrc->initial_seek = event;
+ }
+ } else if (GST_EVENT_IS_DOWNSTREAM (event)) {
+ res = gst_rtspsrc_push_event (rtspsrc, event);
+ } else {
+ res = GST_ELEMENT_CLASS (parent_class)->send_event (element, event);
+ }
+
+ return res;
+ }
+
+
+ /*** GSTURIHANDLER INTERFACE *************************************************/
+
+ static GstURIType
+ gst_rtspsrc_uri_get_type (GType type)
+ {
+ return GST_URI_SRC;
+ }
+
+ static const gchar *const *
+ gst_rtspsrc_uri_get_protocols (GType type)
+ {
+ static const gchar *protocols[] =
+ { "rtsp", "rtspu", "rtspt", "rtsph", "rtsp-sdp",
+ "rtsps", "rtspsu", "rtspst", "rtspsh", NULL
+ };
+
+ return protocols;
+ }
+
+ static gchar *
+ gst_rtspsrc_uri_get_uri (GstURIHandler * handler)
+ {
+ GstRTSPSrc *src = GST_RTSPSRC (handler);
+
+ /* FIXME: make thread-safe */
+ return g_strdup (src->conninfo.location);
+ }
+
+ static gboolean
+ gst_rtspsrc_uri_set_uri (GstURIHandler * handler, const gchar * uri,
+ GError ** error)
+ {
+ GstRTSPSrc *src;
+ GstRTSPResult res;
+ GstSDPResult sres;
+ GstRTSPUrl *newurl = NULL;
+ GstSDPMessage *sdp = NULL;
+
+ src = GST_RTSPSRC (handler);
+
+ /* same URI, we're fine */
+ if (src->conninfo.location && uri && !strcmp (uri, src->conninfo.location))
+ goto was_ok;
+
+ if (g_str_has_prefix (uri, "rtsp-sdp://")) {
+ sres = gst_sdp_message_new (&sdp);
+ if (sres < 0)
+ goto sdp_failed;
+
+ GST_DEBUG_OBJECT (src, "parsing SDP message");
+ sres = gst_sdp_message_parse_uri (uri, sdp);
+ if (sres < 0)
+ goto invalid_sdp;
+ } else {
+ /* try to parse */
+ GST_DEBUG_OBJECT (src, "parsing URI");
+ if ((res = gst_rtsp_url_parse (uri, &newurl)) < 0)
+ goto parse_error;
+ }
+
+ /* if worked, free previous and store new url object along with the original
+ * location. */
+ GST_DEBUG_OBJECT (src, "configuring URI");
+ g_free (src->conninfo.location);
+ src->conninfo.location = g_strdup (uri);
+ gst_rtsp_url_free (src->conninfo.url);
+ src->conninfo.url = newurl;
+ g_free (src->conninfo.url_str);
+ if (newurl)
+ src->conninfo.url_str = gst_rtsp_url_get_request_uri (src->conninfo.url);
+ else
+ src->conninfo.url_str = NULL;
+
+ if (src->sdp)
+ gst_sdp_message_free (src->sdp);
+ src->sdp = sdp;
+ src->from_sdp = sdp != NULL;
+
+ GST_DEBUG_OBJECT (src, "set uri: %s", GST_STR_NULL (uri));
+ GST_DEBUG_OBJECT (src, "request uri is: %s",
+ GST_STR_NULL (src->conninfo.url_str));
+
+ return TRUE;
+
+ /* Special cases */
+ was_ok:
+ {
+ GST_DEBUG_OBJECT (src, "URI was ok: '%s'", GST_STR_NULL (uri));
+ return TRUE;
+ }
+ sdp_failed:
+ {
+ GST_ERROR_OBJECT (src, "Could not create new SDP (%d)", sres);
+ g_set_error_literal (error, GST_URI_ERROR, GST_URI_ERROR_BAD_URI,
+ "Could not create SDP");
+ return FALSE;
+ }
+ invalid_sdp:
+ {
+ GST_ERROR_OBJECT (src, "Not a valid SDP (%d) '%s'", sres,
+ GST_STR_NULL (uri));
+ gst_sdp_message_free (sdp);
+ g_set_error_literal (error, GST_URI_ERROR, GST_URI_ERROR_BAD_URI,
+ "Invalid SDP");
+ return FALSE;
+ }
+ parse_error:
+ {
+ GST_ERROR_OBJECT (src, "Not a valid RTSP url '%s' (%d)",
+ GST_STR_NULL (uri), res);
+ g_set_error_literal (error, GST_URI_ERROR, GST_URI_ERROR_BAD_URI,
+ "Invalid RTSP URI");
+ return FALSE;
+ }
+ }
+
+ static void
+ gst_rtspsrc_uri_handler_init (gpointer g_iface, gpointer iface_data)
+ {
+ GstURIHandlerInterface *iface = (GstURIHandlerInterface *) g_iface;
+
+ iface->get_type = gst_rtspsrc_uri_get_type;
+ iface->get_protocols = gst_rtspsrc_uri_get_protocols;
+ iface->get_uri = gst_rtspsrc_uri_get_uri;
+ iface->set_uri = gst_rtspsrc_uri_set_uri;
+ }
+
+
+ /* send GET_PARAMETER */
+ static GstRTSPResult
+ gst_rtspsrc_get_parameter (GstRTSPSrc * src, ParameterRequest * req)
+ {
+ GstRTSPMessage request = { 0 };
+ GstRTSPMessage response = { 0 };
+ GstRTSPResult res;
+ GstRTSPStatusCode code = GST_RTSP_STS_OK;
+ const gchar *control;
+ gchar *recv_body = NULL;
+ guint recv_body_len;
+
+ GST_DEBUG_OBJECT (src, "creating server get_parameter");
+
+ g_assert (req);
+
+ if ((res = gst_rtspsrc_ensure_open (src, FALSE)) < 0)
+ goto open_failed;
+
+ control = get_aggregate_control (src);
+ if (control == NULL)
+ goto no_control;
+
+ if (!(src->methods & GST_RTSP_GET_PARAMETER))
+ goto not_supported;
+
+ gst_rtspsrc_connection_flush (src, FALSE);
+
+ res = gst_rtsp_message_init_request (&request, GST_RTSP_GET_PARAMETER,
+ control);
+ if (res < 0)
+ goto create_request_failed;
+
+ res = gst_rtsp_message_add_header (&request, GST_RTSP_HDR_CONTENT_TYPE,
+ req->content_type == NULL ? "text/parameters" : req->content_type);
+ if (res < 0)
+ goto add_content_hdr_failed;
+
+ if (req->body && req->body->len) {
+ res =
+ gst_rtsp_message_set_body (&request, (guint8 *) req->body->str,
+ req->body->len);
+ if (res < 0)
+ goto set_body_failed;
+ }
+
+ if ((res = gst_rtspsrc_send (src, &src->conninfo,
+ &request, &response, &code, NULL)) < 0)
+ goto send_error;
+
+ res = gst_rtsp_message_get_body (&response, (guint8 **) & recv_body,
+ &recv_body_len);
+ if (res < 0)
+ goto get_body_failed;
+
+ done:
+ {
+ gst_promise_reply (req->promise,
+ gst_structure_new ("get-parameter-reply",
+ "rtsp-result", G_TYPE_INT, res,
+ "rtsp-code", G_TYPE_INT, code,
+ "rtsp-reason", G_TYPE_STRING, gst_rtsp_status_as_text (code),
+ "body", G_TYPE_STRING, GST_STR_NULL (recv_body), NULL));
+ free_param_data (req);
+
+
+ gst_rtsp_message_unset (&request);
+ gst_rtsp_message_unset (&response);
+
+ return res;
+ }
+
+ /* ERRORS */
+ open_failed:
+ {
+ GST_DEBUG_OBJECT (src, "failed to open stream");
+ goto done;
+ }
+ no_control:
+ {
+ GST_DEBUG_OBJECT (src, "no control url to send GET_PARAMETER");
+ res = GST_RTSP_ERROR;
+ goto done;
+ }
+ not_supported:
+ {
+ GST_DEBUG_OBJECT (src, "GET_PARAMETER is not supported");
+ res = GST_RTSP_ERROR;
+ goto done;
+ }
+ create_request_failed:
+ {
+ GST_DEBUG_OBJECT (src, "could not create GET_PARAMETER request");
+ goto done;
+ }
+ add_content_hdr_failed:
+ {
+ GST_DEBUG_OBJECT (src, "could not add content header");
+ goto done;
+ }
+ set_body_failed:
+ {
+ GST_DEBUG_OBJECT (src, "could not set body");
+ goto done;
+ }
+ send_error:
+ {
+ gchar *str = gst_rtsp_strresult (res);
+
+ GST_ELEMENT_WARNING (src, RESOURCE, WRITE, (NULL),
+ ("Could not send get-parameter. (%s)", str));
+ g_free (str);
+ goto done;
+ }
+ get_body_failed:
+ {
+ GST_DEBUG_OBJECT (src, "could not get body");
+ goto done;
+ }
+ }
+
+ /* send SET_PARAMETER */
+ static GstRTSPResult
+ gst_rtspsrc_set_parameter (GstRTSPSrc * src, ParameterRequest * req)
+ {
+ GstRTSPMessage request = { 0 };
+ GstRTSPMessage response = { 0 };
+ GstRTSPResult res = GST_RTSP_OK;
+ GstRTSPStatusCode code = GST_RTSP_STS_OK;
+ const gchar *control;
+
+ GST_DEBUG_OBJECT (src, "creating server set_parameter");
+
+ g_assert (req);
+
+ if ((res = gst_rtspsrc_ensure_open (src, FALSE)) < 0)
+ goto open_failed;
+
+ control = get_aggregate_control (src);
+ if (control == NULL)
+ goto no_control;
+
+ if (!(src->methods & GST_RTSP_SET_PARAMETER))
+ goto not_supported;
+
+ gst_rtspsrc_connection_flush (src, FALSE);
+
+ res =
+ gst_rtsp_message_init_request (&request, GST_RTSP_SET_PARAMETER, control);
+ if (res < 0)
+ goto send_error;
+
+ res = gst_rtsp_message_add_header (&request, GST_RTSP_HDR_CONTENT_TYPE,
+ req->content_type == NULL ? "text/parameters" : req->content_type);
+ if (res < 0)
+ goto add_content_hdr_failed;
+
+ if (req->body && req->body->len) {
+ res =
+ gst_rtsp_message_set_body (&request, (guint8 *) req->body->str,
+ req->body->len);
+
+ if (res < 0)
+ goto set_body_failed;
+ }
+
+ if ((res = gst_rtspsrc_send (src, &src->conninfo,
+ &request, &response, &code, NULL)) < 0)
+ goto send_error;
+
+ done:
+ {
+ gst_promise_reply (req->promise, gst_structure_new ("set-parameter-reply",
+ "rtsp-result", G_TYPE_INT, res,
+ "rtsp-code", G_TYPE_INT, code,
+ "rtsp-reason", G_TYPE_STRING, gst_rtsp_status_as_text (code),
+ NULL));
+ free_param_data (req);
+
+ gst_rtsp_message_unset (&request);
+ gst_rtsp_message_unset (&response);
+
+ return res;
+ }
+
+ /* ERRORS */
+ open_failed:
+ {
+ GST_DEBUG_OBJECT (src, "failed to open stream");
+ goto done;
+ }
+ no_control:
+ {
+ GST_DEBUG_OBJECT (src, "no control url to send SET_PARAMETER");
+ res = GST_RTSP_ERROR;
+ goto done;
+ }
+ not_supported:
+ {
+ GST_DEBUG_OBJECT (src, "SET_PARAMETER is not supported");
+ res = GST_RTSP_ERROR;
+ goto done;
+ }
+ add_content_hdr_failed:
+ {
+ GST_DEBUG_OBJECT (src, "could not add content header");
+ goto done;
+ }
+ set_body_failed:
+ {
+ GST_DEBUG_OBJECT (src, "could not set body");
+ goto done;
+ }
+ send_error:
+ {
+ gchar *str = gst_rtsp_strresult (res);
+
+ GST_ELEMENT_WARNING (src, RESOURCE, WRITE, (NULL),
+ ("Could not send set-parameter. (%s)", str));
+ g_free (str);
+ goto done;
+ }
+ }
+
+ typedef struct _RTSPKeyValue
+ {
+ GstRTSPHeaderField field;
+ gchar *value;
+ gchar *custom_key; /* custom header string (field is INVALID then) */
+ } RTSPKeyValue;
+
+ static void
+ key_value_foreach (GArray * array, GFunc func, gpointer user_data)
+ {
+ guint i;
+
+ g_return_if_fail (array != NULL);
+
+ for (i = 0; i < array->len; i++) {
+ (*func) (&g_array_index (array, RTSPKeyValue, i), user_data);
+ }
+ }
+
+ static void
+ dump_key_value (gpointer data, gpointer user_data G_GNUC_UNUSED)
+ {
+ RTSPKeyValue *key_value = (RTSPKeyValue *) data;
+ GstRTSPSrc *src = GST_RTSPSRC (user_data);
+ const gchar *key_string;
+
+ if (key_value->custom_key != NULL)
+ key_string = key_value->custom_key;
+ else
+ key_string = gst_rtsp_header_as_text (key_value->field);
+
+ GST_LOG_OBJECT (src, " key: '%s', value: '%s'", key_string,
+ key_value->value);
+ }
+
+ static void
+ gst_rtspsrc_print_rtsp_message (GstRTSPSrc * src, const GstRTSPMessage * msg)
+ {
+ guint8 *data;
+ guint size;
+ GString *body_string = NULL;
+
+ g_return_if_fail (src != NULL);
+ g_return_if_fail (msg != NULL);
+
+ if (gst_debug_category_get_threshold (GST_CAT_DEFAULT) < GST_LEVEL_LOG)
+ return;
+
+ GST_LOG_OBJECT (src, "--------------------------------------------");
+ switch (msg->type) {
+ case GST_RTSP_MESSAGE_REQUEST:
+ GST_LOG_OBJECT (src, "RTSP request message %p", msg);
+ GST_LOG_OBJECT (src, " request line:");
+ GST_LOG_OBJECT (src, " method: '%s'",
+ gst_rtsp_method_as_text (msg->type_data.request.method));
+ GST_LOG_OBJECT (src, " uri: '%s'", msg->type_data.request.uri);
+ GST_LOG_OBJECT (src, " version: '%s'",
+ gst_rtsp_version_as_text (msg->type_data.request.version));
+ GST_LOG_OBJECT (src, " headers:");
+ key_value_foreach (msg->hdr_fields, dump_key_value, src);
+ GST_LOG_OBJECT (src, " body:");
+ gst_rtsp_message_get_body (msg, &data, &size);
+ if (size > 0) {
+ body_string = g_string_new_len ((const gchar *) data, size);
+ GST_LOG_OBJECT (src, " %s(%d)", body_string->str, size);
+ g_string_free (body_string, TRUE);
+ body_string = NULL;
+ }
+ break;
+ case GST_RTSP_MESSAGE_RESPONSE:
+ GST_LOG_OBJECT (src, "RTSP response message %p", msg);
+ GST_LOG_OBJECT (src, " status line:");
+ GST_LOG_OBJECT (src, " code: '%d'", msg->type_data.response.code);
+ GST_LOG_OBJECT (src, " reason: '%s'", msg->type_data.response.reason);
+ GST_LOG_OBJECT (src, " version: '%s",
+ gst_rtsp_version_as_text (msg->type_data.response.version));
+ GST_LOG_OBJECT (src, " headers:");
+ key_value_foreach (msg->hdr_fields, dump_key_value, src);
+ gst_rtsp_message_get_body (msg, &data, &size);
+ GST_LOG_OBJECT (src, " body: length %d", size);
+ if (size > 0) {
+ body_string = g_string_new_len ((const gchar *) data, size);
+ GST_LOG_OBJECT (src, " %s(%d)", body_string->str, size);
+ g_string_free (body_string, TRUE);
+ body_string = NULL;
+ }
+ break;
+ case GST_RTSP_MESSAGE_HTTP_REQUEST:
+ GST_LOG_OBJECT (src, "HTTP request message %p", msg);
+ GST_LOG_OBJECT (src, " request line:");
+ GST_LOG_OBJECT (src, " method: '%s'",
+ gst_rtsp_method_as_text (msg->type_data.request.method));
+ GST_LOG_OBJECT (src, " uri: '%s'", msg->type_data.request.uri);
+ GST_LOG_OBJECT (src, " version: '%s'",
+ gst_rtsp_version_as_text (msg->type_data.request.version));
+ GST_LOG_OBJECT (src, " headers:");
+ key_value_foreach (msg->hdr_fields, dump_key_value, src);
+ GST_LOG_OBJECT (src, " body:");
+ gst_rtsp_message_get_body (msg, &data, &size);
+ if (size > 0) {
+ body_string = g_string_new_len ((const gchar *) data, size);
+ GST_LOG_OBJECT (src, " %s(%d)", body_string->str, size);
+ g_string_free (body_string, TRUE);
+ body_string = NULL;
+ }
+ break;
+ case GST_RTSP_MESSAGE_HTTP_RESPONSE:
+ GST_LOG_OBJECT (src, "HTTP response message %p", msg);
+ GST_LOG_OBJECT (src, " status line:");
+ GST_LOG_OBJECT (src, " code: '%d'", msg->type_data.response.code);
+ GST_LOG_OBJECT (src, " reason: '%s'", msg->type_data.response.reason);
+ GST_LOG_OBJECT (src, " version: '%s'",
+ gst_rtsp_version_as_text (msg->type_data.response.version));
+ GST_LOG_OBJECT (src, " headers:");
+ key_value_foreach (msg->hdr_fields, dump_key_value, src);
+ gst_rtsp_message_get_body (msg, &data, &size);
+ GST_LOG_OBJECT (src, " body: length %d", size);
+ if (size > 0) {
+ body_string = g_string_new_len ((const gchar *) data, size);
+ GST_LOG_OBJECT (src, " %s(%d)", body_string->str, size);
+ g_string_free (body_string, TRUE);
+ body_string = NULL;
+ }
+ break;
+ case GST_RTSP_MESSAGE_DATA:
+ GST_LOG_OBJECT (src, "RTSP data message %p", msg);
+ GST_LOG_OBJECT (src, " channel: '%d'", msg->type_data.data.channel);
+ GST_LOG_OBJECT (src, " size: '%d'", msg->body_size);
+ gst_rtsp_message_get_body (msg, &data, &size);
+ if (size > 0) {
+ body_string = g_string_new_len ((const gchar *) data, size);
+ GST_LOG_OBJECT (src, " %s(%d)", body_string->str, size);
+ g_string_free (body_string, TRUE);
+ body_string = NULL;
+ }
+ break;
+ default:
+ GST_LOG_OBJECT (src, "unsupported message type %d", msg->type);
+ break;
+ }
+ GST_LOG_OBJECT (src, "--------------------------------------------");
+ }
+
+ static void
+ gst_rtspsrc_print_sdp_media (GstRTSPSrc * src, GstSDPMedia * media)
+ {
+ GST_LOG_OBJECT (src, " media: '%s'", GST_STR_NULL (media->media));
+ GST_LOG_OBJECT (src, " port: '%u'", media->port);
+ GST_LOG_OBJECT (src, " num_ports: '%u'", media->num_ports);
+ GST_LOG_OBJECT (src, " proto: '%s'", GST_STR_NULL (media->proto));
+ if (media->fmts && media->fmts->len > 0) {
+ guint i;
+
+ GST_LOG_OBJECT (src, " formats:");
+ for (i = 0; i < media->fmts->len; i++) {
+ GST_LOG_OBJECT (src, " format '%s'", g_array_index (media->fmts,
+ gchar *, i));
+ }
+ }
+ GST_LOG_OBJECT (src, " information: '%s'",
+ GST_STR_NULL (media->information));
+ if (media->connections && media->connections->len > 0) {
+ guint i;
+
+ GST_LOG_OBJECT (src, " connections:");
+ for (i = 0; i < media->connections->len; i++) {
+ GstSDPConnection *conn =
+ &g_array_index (media->connections, GstSDPConnection, i);
+
+ GST_LOG_OBJECT (src, " nettype: '%s'",
+ GST_STR_NULL (conn->nettype));
+ GST_LOG_OBJECT (src, " addrtype: '%s'",
+ GST_STR_NULL (conn->addrtype));
+ GST_LOG_OBJECT (src, " address: '%s'",
+ GST_STR_NULL (conn->address));
+ GST_LOG_OBJECT (src, " ttl: '%u'", conn->ttl);
+ GST_LOG_OBJECT (src, " addr_number: '%u'", conn->addr_number);
+ }
+ }
+ if (media->bandwidths && media->bandwidths->len > 0) {
+ guint i;
+
+ GST_LOG_OBJECT (src, " bandwidths:");
+ for (i = 0; i < media->bandwidths->len; i++) {
+ GstSDPBandwidth *bw =
+ &g_array_index (media->bandwidths, GstSDPBandwidth, i);
+
+ GST_LOG_OBJECT (src, " type: '%s'", GST_STR_NULL (bw->bwtype));
+ GST_LOG_OBJECT (src, " bandwidth: '%u'", bw->bandwidth);
+ }
+ }
+ GST_LOG_OBJECT (src, " key:");
+ GST_LOG_OBJECT (src, " type: '%s'", GST_STR_NULL (media->key.type));
+ GST_LOG_OBJECT (src, " data: '%s'", GST_STR_NULL (media->key.data));
+ if (media->attributes && media->attributes->len > 0) {
+ guint i;
+
+ GST_LOG_OBJECT (src, " attributes:");
+ for (i = 0; i < media->attributes->len; i++) {
+ GstSDPAttribute *attr =
+ &g_array_index (media->attributes, GstSDPAttribute, i);
+
+ GST_LOG_OBJECT (src, " attribute '%s' : '%s'", attr->key, attr->value);
+ }
+ }
+ }
+
+ void
+ gst_rtspsrc_print_sdp_message (GstRTSPSrc * src, const GstSDPMessage * msg)
+ {
+ g_return_if_fail (src != NULL);
+ g_return_if_fail (msg != NULL);
+
+ if (gst_debug_category_get_threshold (GST_CAT_DEFAULT) < GST_LEVEL_LOG)
+ return;
+
+ GST_LOG_OBJECT (src, "--------------------------------------------");
+ GST_LOG_OBJECT (src, "sdp packet %p:", msg);
+ GST_LOG_OBJECT (src, " version: '%s'", GST_STR_NULL (msg->version));
+ GST_LOG_OBJECT (src, " origin:");
+ GST_LOG_OBJECT (src, " username: '%s'",
+ GST_STR_NULL (msg->origin.username));
+ GST_LOG_OBJECT (src, " sess_id: '%s'",
+ GST_STR_NULL (msg->origin.sess_id));
+ GST_LOG_OBJECT (src, " sess_version: '%s'",
+ GST_STR_NULL (msg->origin.sess_version));
+ GST_LOG_OBJECT (src, " nettype: '%s'",
+ GST_STR_NULL (msg->origin.nettype));
+ GST_LOG_OBJECT (src, " addrtype: '%s'",
+ GST_STR_NULL (msg->origin.addrtype));
+ GST_LOG_OBJECT (src, " addr: '%s'", GST_STR_NULL (msg->origin.addr));
+ GST_LOG_OBJECT (src, " session_name: '%s'",
+ GST_STR_NULL (msg->session_name));
+ GST_LOG_OBJECT (src, " information: '%s'", GST_STR_NULL (msg->information));
+ GST_LOG_OBJECT (src, " uri: '%s'", GST_STR_NULL (msg->uri));
+
+ if (msg->emails && msg->emails->len > 0) {
+ guint i;
+
+ GST_LOG_OBJECT (src, " emails:");
+ for (i = 0; i < msg->emails->len; i++) {
+ GST_LOG_OBJECT (src, " email '%s'", g_array_index (msg->emails, gchar *,
+ i));
+ }
+ }
+ if (msg->phones && msg->phones->len > 0) {
+ guint i;
+
+ GST_LOG_OBJECT (src, " phones:");
+ for (i = 0; i < msg->phones->len; i++) {
+ GST_LOG_OBJECT (src, " phone '%s'", g_array_index (msg->phones, gchar *,
+ i));
+ }
+ }
+ GST_LOG_OBJECT (src, " connection:");
+ GST_LOG_OBJECT (src, " nettype: '%s'",
+ GST_STR_NULL (msg->connection.nettype));
+ GST_LOG_OBJECT (src, " addrtype: '%s'",
+ GST_STR_NULL (msg->connection.addrtype));
+ GST_LOG_OBJECT (src, " address: '%s'",
+ GST_STR_NULL (msg->connection.address));
+ GST_LOG_OBJECT (src, " ttl: '%u'", msg->connection.ttl);
+ GST_LOG_OBJECT (src, " addr_number: '%u'", msg->connection.addr_number);
+ if (msg->bandwidths && msg->bandwidths->len > 0) {
+ guint i;
+
+ GST_LOG_OBJECT (src, " bandwidths:");
+ for (i = 0; i < msg->bandwidths->len; i++) {
+ GstSDPBandwidth *bw =
+ &g_array_index (msg->bandwidths, GstSDPBandwidth, i);
+
+ GST_LOG_OBJECT (src, " type: '%s'", GST_STR_NULL (bw->bwtype));
+ GST_LOG_OBJECT (src, " bandwidth: '%u'", bw->bandwidth);
+ }
+ }
+ GST_LOG_OBJECT (src, " key:");
+ GST_LOG_OBJECT (src, " type: '%s'", GST_STR_NULL (msg->key.type));
+ GST_LOG_OBJECT (src, " data: '%s'", GST_STR_NULL (msg->key.data));
+ if (msg->attributes && msg->attributes->len > 0) {
+ guint i;
+
+ GST_LOG_OBJECT (src, " attributes:");
+ for (i = 0; i < msg->attributes->len; i++) {
+ GstSDPAttribute *attr =
+ &g_array_index (msg->attributes, GstSDPAttribute, i);
+
+ GST_LOG_OBJECT (src, " attribute '%s' : '%s'", attr->key, attr->value);
+ }
+ }
+ if (msg->medias && msg->medias->len > 0) {
+ guint i;
+
+ GST_LOG_OBJECT (src, " medias:");
+ for (i = 0; i < msg->medias->len; i++) {
+ GST_LOG_OBJECT (src, " media %u:", i);
+ gst_rtspsrc_print_sdp_media (src, &g_array_index (msg->medias,
+ GstSDPMedia, i));
+ }
+ }
+ GST_LOG_OBJECT (src, "--------------------------------------------");
+ }
--- /dev/null
+ /* GStreamer
+ * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
+ * <2006> Wim Taymans <wim@fluendo.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+ /*
+ * Unless otherwise indicated, Source Code is licensed under MIT license.
+ * See further explanation attached in License Statement (distributed in the file
+ * LICENSE).
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy of
+ * this software and associated documentation files (the "Software"), to deal in
+ * the Software without restriction, including without limitation the rights to
+ * use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
+ * of the Software, and to permit persons to whom the Software is furnished to do
+ * so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in all
+ * copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+ * SOFTWARE.
+ */
+
+ #ifndef __GST_RTSPSRC_H__
+ #define __GST_RTSPSRC_H__
+
+ #include <gst/gst.h>
+
+ G_BEGIN_DECLS
+
+ #include <gst/rtsp/rtsp.h>
+ #include <gio/gio.h>
+
+ #include "gstrtspext.h"
+
+ #define GST_TYPE_RTSPSRC \
+ (gst_rtspsrc_get_type())
+ #define GST_RTSPSRC(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_RTSPSRC,GstRTSPSrc))
+ #define GST_RTSPSRC_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_RTSPSRC,GstRTSPSrcClass))
+ #define GST_IS_RTSPSRC(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_RTSPSRC))
+ #define GST_IS_RTSPSRC_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_RTSPSRC))
+ #define GST_RTSPSRC_CAST(obj) \
+ ((GstRTSPSrc *)(obj))
+
+ typedef struct _GstRTSPSrc GstRTSPSrc;
+ typedef struct _GstRTSPSrcClass GstRTSPSrcClass;
+
+ #define GST_RTSP_STATE_GET_LOCK(rtsp) (&GST_RTSPSRC_CAST(rtsp)->state_rec_lock)
+ #define GST_RTSP_STATE_LOCK(rtsp) (g_rec_mutex_lock (GST_RTSP_STATE_GET_LOCK(rtsp)))
+ #define GST_RTSP_STATE_UNLOCK(rtsp) (g_rec_mutex_unlock (GST_RTSP_STATE_GET_LOCK(rtsp)))
+
+ #define GST_RTSP_STREAM_GET_LOCK(rtsp) (&GST_RTSPSRC_CAST(rtsp)->stream_rec_lock)
+ #define GST_RTSP_STREAM_LOCK(rtsp) (g_rec_mutex_lock (GST_RTSP_STREAM_GET_LOCK(rtsp)))
+ #define GST_RTSP_STREAM_UNLOCK(rtsp) (g_rec_mutex_unlock (GST_RTSP_STREAM_GET_LOCK(rtsp)))
+
++#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
++typedef enum {
++ GST_RTSPSRC_ERROR_NONE = 0,
++ GST_RTSPSRC_ERROR_UNSUPPORTED_AUDIO,
++ GST_RTSPSRC_ERROR_UNSUPPORTED_VIDEO,
++ GST_RTSPSRC_ERROR_CONNECTION_FAIL,
++ GST_RTSPSRC_ERROR_DNS_FAIL,
++ GST_RTSPSRC_ERROR_SERVER_DISCONNECTED,
++ GST_RTSPSRC_ERROR_BAD_SERVER,
++ GST_RTSPSRC_ERROR_INVALID_PROTOCOL,
++ GST_RTSPSRC_ERROR_INVALID_URL,
++ GST_RTSPSRC_ERROR_UNEXPECTED_MSG,
++ GST_RTSPSRC_ERROR_OUT_OF_MEMORIES,
++ GST_RTSPSRC_ERROR_RTSP_TIMEOUT,
++ GST_RTSPSRC_ERROR_BAD_REQUEST,
++ GST_RTSPSRC_ERROR_NOT_AUTHORIZED,
++ GST_RTSPSRC_ERROR_PAYMENT_REQUIRED,
++ GST_RTSPSRC_ERROR_FORBIDDEN,
++ GST_RTSPSRC_ERROR_CONTENT_NOT_FOUND,
++ GST_RTSPSRC_ERROR_METHOD_NOT_ALLOWED,
++ GST_RTSPSRC_ERROR_NOT_ACCEPTABLE,
++ GST_RTSPSRC_ERROR_PROXY_AUTHENTICATION_REQUIRED,
++ GST_RTSPSRC_ERROR_SERVER_TIMEOUT,
++ GST_RTSPSRC_ERROR_GONE,
++ GST_RTSPSRC_ERROR_LENGTH_REQUIRED,
++ GST_RTSPSRC_ERROR_PRECONDITION_FAILED,
++ GST_RTSPSRC_ERROR_REQUEST_ENTITY_TOO_LARGE,
++ GST_RTSPSRC_ERROR_REQUEST_URI_TOO_LARGE,
++ GST_RTSPSRC_ERROR_UNSUPPORTED_MEDIA_TYPE,
++ GST_RTSPSRC_ERROR_PARAMETER_NOT_UNDERSTOOD,
++ GST_RTSPSRC_ERROR_CONFERENCE_NOT_FOUND,
++ GST_RTSPSRC_ERROR_NOT_ENOUGH_BANDWIDTH,
++ GST_RTSPSRC_ERROR_NO_SESSION_ID,
++ GST_RTSPSRC_ERROR_METHOD_NOT_VALID_IN_THIS_STATE,
++ GST_RTSPSRC_ERROR_HEADER_FIELD_NOT_VALID_FOR_SOURCE,
++ GST_RTSPSRC_ERROR_INVALID_RANGE,
++ GST_RTSPSRC_ERROR_PARAMETER_IS_READONLY,
++ GST_RTSPSRC_ERROR_AGGREGATE_OP_NOT_ALLOWED,
++ GST_RTSPSRC_ERROR_ONLY_AGGREGATE_OP_ALLOWED,
++ GST_RTSPSRC_ERROR_BAD_TRANSPORT,
++ GST_RTSPSRC_ERROR_DESTINATION_UNREACHABLE,
++ GST_RTSPSRC_ERROR_INTERNAL_SERVER_ERROR,
++ GST_RTSPSRC_ERROR_NOT_IMPLEMENTED,
++ GST_RTSPSRC_ERROR_BAD_GATEWAY,
++ GST_RTSPSRC_ERROR_SERVICE_UNAVAILABLE,
++ GST_RTSPSRC_ERROR_GATEWAY_TIME_OUT ,
++ GST_RTSPSRC_ERROR_RTSP_VERSION_NOT_SUPPORTED,
++ GST_RTSPSRC_ERROR_OPTION_NOT_SUPPORTED,
++}_GstRTSPSrcError;
++typedef _GstRTSPSrcError GstRTSPSrcError;
++#endif
++
+ typedef struct _GstRTSPConnInfo GstRTSPConnInfo;
+
+ struct _GstRTSPConnInfo {
+ gchar *location;
+ GstRTSPUrl *url;
+ gchar *url_str;
+ GstRTSPConnection *connection;
+ gboolean connected;
+ gboolean flushing;
+
+ GMutex send_lock;
+ GMutex recv_lock;
+ };
+
+ typedef struct _GstRTSPStream GstRTSPStream;
+
+ struct _GstRTSPStream {
+ gint id;
+
+ GstRTSPSrc *parent; /* parent, no extra ref to parent is taken */
+
+ /* pad we expose or NULL when it does not have an actual pad */
+ GstPad *srcpad;
+ GstFlowReturn last_ret;
+ gboolean added;
+ gboolean setup;
+ gboolean skipped;
+ gboolean eos;
+ gboolean discont;
+ gboolean need_caps;
+ gboolean waiting_setup_response;
+
+ /* for interleaved mode */
+ guint8 channel[2];
+ GstPad *channelpad[2];
+
+ /* our udp sources */
+ GstElement *udpsrc[2];
+ GstPad *blockedpad;
+ gulong blockid;
+ gboolean is_ipv6;
+
+ /* our udp sinks back to the server */
+ GstElement *udpsink[2];
+ GstPad *rtcppad;
+
+ /* fakesrc for sending dummy data or appsrc for sending backchannel data */
+ GstElement *rtpsrc;
+
+ /* state */
+ guint port;
+ gboolean container;
+ gboolean is_real;
+ guint8 default_pt;
+ GstRTSPProfile profile;
+ GArray *ptmap;
+ /* original control url */
+ gchar *control_url;
+ guint32 ssrc;
+ guint32 seqbase;
+ guint64 timebase;
+ GstElement *srtpdec;
+ GstCaps *srtcpparams;
+ GstElement *srtpenc;
+ guint32 send_ssrc;
+
+ /* per stream connection */
+ GstRTSPConnInfo conninfo;
+
+ /* session */
+ GObject *session;
+
+ /* srtp key management */
+ GstMIKEYMessage *mikey;
+
+ /* bandwidth */
+ guint as_bandwidth;
+ guint rs_bandwidth;
+ guint rr_bandwidth;
+
+ /* destination */
+ gchar *destination;
+ gboolean is_multicast;
+ guint ttl;
+ gboolean is_backchannel;
+
+ /* A unique and stable id we will use for the stream start event */
+ gchar *stream_id;
+
+ GstStructure *rtx_pt_map;
+
+ guint32 segment_seqnum[2];
+ };
+
+ /**
+ * GstRTSPSrcTimeoutCause:
+ * @GST_RTSP_SRC_TIMEOUT_CAUSE_RTCP: timeout triggered by RTCP
+ *
+ * Different causes to why the rtspsrc generated the GstRTSPSrcTimeout
+ * message.
+ */
+ typedef enum
+ {
+ GST_RTSP_SRC_TIMEOUT_CAUSE_RTCP
+ } GstRTSPSrcTimeoutCause;
+
+ /**
+ * GstRTSPNatMethod:
+ * @GST_RTSP_NAT_NONE: none
+ * @GST_RTSP_NAT_DUMMY: send dummy packets
+ *
+ * Different methods for trying to traverse firewalls.
+ */
+ typedef enum
+ {
+ GST_RTSP_NAT_NONE,
+ GST_RTSP_NAT_DUMMY
+ } GstRTSPNatMethod;
+
+
+ struct _GstRTSPSrc {
+ GstBin parent;
+
+ /* task and mutex for interleaved mode */
+ gboolean interleaved;
+ GstTask *task;
+ GRecMutex stream_rec_lock;
+ GstSegment segment;
+ gboolean running;
+ gboolean need_range;
+ gboolean server_side_trickmode;
+ GstClockTime trickmode_interval;
+ gint free_channel;
+ gboolean need_segment;
+ gboolean clip_out_segment;
+ GstSegment out_segment;
+ GstClockTime base_time;
+
+ /* UDP mode loop */
+ gint pending_cmd;
+ gint busy_cmd;
+ GCond cmd_cond;
+ gboolean ignore_timeout;
+ gboolean open_error;
+
+ /* mutex for protecting state changes */
+ GRecMutex state_rec_lock;
+
+ GstSDPMessage *sdp;
+ gboolean from_sdp;
+ GList *streams;
+ GstStructure *props;
+ gboolean need_activate;
+
+ /* properties */
+ GstRTSPLowerTrans protocols;
+ gboolean debug;
+ guint retry;
+ guint64 udp_timeout;
+ gint64 tcp_timeout;
+ guint latency;
+ gboolean drop_on_latency;
+ guint64 connection_speed;
+ GstRTSPNatMethod nat_method;
+ gboolean do_rtcp;
+ gboolean do_rtsp_keep_alive;
+ gchar *proxy_host;
+ guint proxy_port;
+ gchar *proxy_user; /* from url or property */
+ gchar *proxy_passwd; /* from url or property */
+ gchar *prop_proxy_id; /* set via property */
+ gchar *prop_proxy_pw; /* set via property */
+ guint rtp_blocksize;
+ gchar *user_id;
+ gchar *user_pw;
+ gint buffer_mode;
+ GstRTSPRange client_port_range;
+ gint udp_buffer_size;
+ gboolean short_header;
+ guint probation;
+ gboolean udp_reconnect;
+ gchar *multi_iface;
+ gboolean ntp_sync;
+ gboolean use_pipeline_clock;
+ GstStructure *sdes;
+ GTlsCertificateFlags tls_validation_flags;
+ GTlsDatabase *tls_database;
+ GTlsInteraction *tls_interaction;
+ gboolean do_retransmission;
+ gint ntp_time_source;
+ gchar *user_agent;
+ gint max_rtcp_rtp_time_diff;
+ gboolean rfc7273_sync;
+ guint64 max_ts_offset_adjustment;
+ gint64 max_ts_offset;
+ gboolean max_ts_offset_is_set;
+ gint backchannel;
+ GstClockTime teardown_timeout;
+ gboolean onvif_mode;
+ gboolean onvif_rate_control;
+ gboolean is_live;
+ gboolean ignore_x_server_reply;
+
+ /* state */
+ GstRTSPState state;
+ gchar *content_base;
+ GstRTSPLowerTrans cur_protocols;
+ gboolean tried_url_auth;
+ gchar *addr;
+ gboolean need_redirect;
+ GstRTSPTimeRange *range;
+ gchar *control;
+ guint next_port_num;
+ GstClock *provided_clock;
+
+ /* supported methods */
+ gint methods;
+
+ /* seekability
+ * -1.0 : Stream is not seekable
+ * 0.0 : seekable only to the beginning
+ * G_MAXFLOAT : Any value is possible
+ *
+ * Any other positive value indicates the longest duration
+ * between any two random access points
+ * */
+ gfloat seekable;
+ guint32 seek_seqnum;
+ GstClockTime last_pos;
+
+ /* session management */
+ GstElement *manager;
+ gulong manager_sig_id;
+ gulong manager_ptmap_id;
+ gboolean use_buffering;
+
++#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
++ /* media type */
++ gboolean is_audio_codec_supported;
++ gboolean is_video_codec_supported;
++ gchar *audio_codec;
++ gchar *video_codec;
++ gchar *video_frame_size;
++#endif
++
+ GstRTSPConnInfo conninfo;
+
+ /* SET/GET PARAMETER requests queue */
+ GQueue set_get_param_q;
+
+ /* a list of RTSP extensions as GstElement */
+ GstRTSPExtensionList *extensions;
+
+ GstRTSPVersion default_version;
+ GstRTSPVersion version;
+
+ GstEvent *initial_seek;
+
+ guint group_id;
+ GMutex group_lock;
++
++#ifdef TIZEN_FEATURE_RTSP_MODIFICATION
++ GCond open_end;
++ GMutex pause_lock;
++ guint64 start_position;
++#endif
+ };
+
+ struct _GstRTSPSrcClass {
+ GstBinClass parent_class;
+
+ /* action signals */
+ gboolean (*get_parameter) (GstRTSPSrc *rtsp, const gchar *parameter, const gchar *content_type, GstPromise *promise);
+ gboolean (*get_parameters) (GstRTSPSrc *rtsp, gchar **parameters, const gchar *content_type, GstPromise *promise);
+ gboolean (*set_parameter) (GstRTSPSrc *rtsp, const gchar *name, const gchar *value, const gchar *content_type, GstPromise *promise);
+ GstFlowReturn (*push_backchannel_buffer) (GstRTSPSrc *src, guint id, GstSample *sample);
+ };
+
+ GType gst_rtspsrc_get_type(void);
+
+ G_END_DECLS
+
+ #endif /* __GST_RTSPSRC_H__ */
--- /dev/null
- caps = gst_caps_intersect (caps, gst_static_caps_get (&raw_caps));
+ /* GStreamer
+ * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
+ * Copyright (C) <2003> David Schleef <ds@schleef.org>
+ * Copyright (C) <2010> Sebastian Dröge <sebastian.droege@collabora.co.uk>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+ /*
+ * This file was (probably) generated from gstvideobalance.c,
+ * gstvideobalance.c,v 1.7 2003/11/08 02:48:59 dschleef Exp
+ */
+
+ /**
+ * SECTION:element-videobalance
+ * @title: videobalance
+ *
+ * Adjusts brightness, contrast, hue, saturation on a video stream.
+ *
+ * ## Example launch line
+ * |[
+ * gst-launch-1.0 videotestsrc ! videobalance saturation=0.0 ! videoconvert ! ximagesink
+ * ]| This pipeline converts the image to black and white by setting the
+ * saturation to 0.0.
+ *
+ */
+
+ #ifdef HAVE_CONFIG_H
+ #include "config.h"
+ #endif
+
+ #include <gst/math-compat.h>
+
+ #include "gstvideobalance.h"
+ #include <string.h>
+
+ #include <gst/video/colorbalance.h>
+
+ GST_DEBUG_CATEGORY_STATIC (videobalance_debug);
+ #define GST_CAT_DEFAULT videobalance_debug
+
+ /* GstVideoBalance properties */
+ #define DEFAULT_PROP_CONTRAST 1.0
+ #define DEFAULT_PROP_BRIGHTNESS 0.0
+ #define DEFAULT_PROP_HUE 0.0
+ #define DEFAULT_PROP_SATURATION 1.0
+
+ enum
+ {
+ PROP_0,
+ PROP_CONTRAST,
+ PROP_BRIGHTNESS,
+ PROP_HUE,
+ PROP_SATURATION
+ };
+
+ #define PROCESSING_CAPS \
+ "{ AYUV, ARGB, BGRA, ABGR, RGBA, Y444, xRGB, RGBx, " \
+ "xBGR, BGRx, RGB, BGR, Y42B, YUY2, UYVY, YVYU, " \
+ "I420, YV12, IYUV, Y41B, NV12, NV21 }"
+
+ static GstStaticPadTemplate gst_video_balance_src_template =
+ GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE (PROCESSING_CAPS) ";"
+ "video/x-raw(ANY)")
+ );
+
+ static GstStaticPadTemplate gst_video_balance_sink_template =
+ GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE (PROCESSING_CAPS) ";"
+ "video/x-raw(ANY)")
+ );
+
+ static void gst_video_balance_colorbalance_init (GstColorBalanceInterface *
+ iface);
+
+ static void gst_video_balance_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec);
+ static void gst_video_balance_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec);
+
+ #define gst_video_balance_parent_class parent_class
+ G_DEFINE_TYPE_WITH_CODE (GstVideoBalance, gst_video_balance,
+ GST_TYPE_VIDEO_FILTER,
+ G_IMPLEMENT_INTERFACE (GST_TYPE_COLOR_BALANCE,
+ gst_video_balance_colorbalance_init));
+ GST_ELEMENT_REGISTER_DEFINE (videobalance, "videobalance",
+ GST_RANK_NONE, GST_TYPE_VIDEO_BALANCE);
+
+ /*
+ * look-up tables (LUT).
+ */
+ static void
+ gst_video_balance_update_tables (GstVideoBalance * vb)
+ {
+ gint i, j;
+ gdouble y, u, v, hue_cos, hue_sin;
+
+ /* Y */
+ for (i = 0; i < 256; i++) {
+ y = 16 + ((i - 16) * vb->contrast + vb->brightness * 255);
+ if (y < 0)
+ y = 0;
+ else if (y > 255)
+ y = 255;
+ vb->tabley[i] = rint (y);
+ }
+
+ hue_cos = cos (G_PI * vb->hue);
+ hue_sin = sin (G_PI * vb->hue);
+
+ /* U/V lookup tables are 2D, since we need both U/V for each table
+ * separately. */
+ for (i = -128; i < 128; i++) {
+ for (j = -128; j < 128; j++) {
+ u = 128 + ((i * hue_cos + j * hue_sin) * vb->saturation);
+ v = 128 + ((-i * hue_sin + j * hue_cos) * vb->saturation);
+ if (u < 0)
+ u = 0;
+ else if (u > 255)
+ u = 255;
+ if (v < 0)
+ v = 0;
+ else if (v > 255)
+ v = 255;
+ vb->tableu[i + 128][j + 128] = rint (u);
+ vb->tablev[i + 128][j + 128] = rint (v);
+ }
+ }
+ }
+
+ static gboolean
+ gst_video_balance_is_passthrough (GstVideoBalance * videobalance)
+ {
+ return videobalance->contrast == 1.0 &&
+ videobalance->brightness == 0.0 &&
+ videobalance->hue == 0.0 && videobalance->saturation == 1.0;
+ }
+
+ static void
+ gst_video_balance_update_properties (GstVideoBalance * videobalance)
+ {
+ gboolean passthrough;
+ GstBaseTransform *base = GST_BASE_TRANSFORM (videobalance);
+
+ GST_OBJECT_LOCK (videobalance);
+ passthrough = gst_video_balance_is_passthrough (videobalance);
+ if (!passthrough)
+ gst_video_balance_update_tables (videobalance);
+ GST_OBJECT_UNLOCK (videobalance);
+
+ gst_base_transform_set_passthrough (base, passthrough);
+ }
+
+ static void
+ gst_video_balance_planar_yuv (GstVideoBalance * videobalance,
+ GstVideoFrame * frame)
+ {
+ gint x, y;
+ guint8 *ydata;
+ guint8 *udata, *vdata;
+ gint ystride, ustride, vstride;
+ gint width, height;
+ gint width2, height2;
+ guint8 *tabley = videobalance->tabley;
+ guint8 **tableu = videobalance->tableu;
+ guint8 **tablev = videobalance->tablev;
+
+ width = GST_VIDEO_FRAME_WIDTH (frame);
+ height = GST_VIDEO_FRAME_HEIGHT (frame);
+
+ ydata = GST_VIDEO_FRAME_PLANE_DATA (frame, 0);
+ ystride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, 0);
+
+ for (y = 0; y < height; y++) {
+ guint8 *yptr;
+
+ yptr = ydata + y * ystride;
+ for (x = 0; x < width; x++) {
+ *yptr = tabley[*yptr];
+ yptr++;
+ }
+ }
+
+ width2 = GST_VIDEO_FRAME_COMP_WIDTH (frame, 1);
+ height2 = GST_VIDEO_FRAME_COMP_HEIGHT (frame, 1);
+
+ udata = GST_VIDEO_FRAME_PLANE_DATA (frame, 1);
+ vdata = GST_VIDEO_FRAME_PLANE_DATA (frame, 2);
+ ustride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, 1);
+ vstride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, 2);
+
+ for (y = 0; y < height2; y++) {
+ guint8 *uptr, *vptr;
+ guint8 u1, v1;
+
+ uptr = udata + y * ustride;
+ vptr = vdata + y * vstride;
+
+ for (x = 0; x < width2; x++) {
+ u1 = *uptr;
+ v1 = *vptr;
+
+ *uptr++ = tableu[u1][v1];
+ *vptr++ = tablev[u1][v1];
+ }
+ }
+ }
+
+ static void
+ gst_video_balance_semiplanar_yuv (GstVideoBalance * videobalance,
+ GstVideoFrame * frame)
+ {
+ gint x, y;
+ guint8 *ydata;
+ guint8 *uvdata;
+ gint ystride, uvstride;
+ gint width, height;
+ gint width2, height2;
+ guint8 *tabley = videobalance->tabley;
+ guint8 **tableu = videobalance->tableu;
+ guint8 **tablev = videobalance->tablev;
+ gint upos, vpos;
+
+ width = GST_VIDEO_FRAME_WIDTH (frame);
+ height = GST_VIDEO_FRAME_HEIGHT (frame);
+
+ ydata = GST_VIDEO_FRAME_PLANE_DATA (frame, 0);
+ ystride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, 0);
+
+ for (y = 0; y < height; y++) {
+ guint8 *yptr;
+
+ yptr = ydata + y * ystride;
+ for (x = 0; x < width; x++) {
+ *yptr = tabley[*yptr];
+ yptr++;
+ }
+ }
+
+ width2 = GST_VIDEO_FRAME_COMP_WIDTH (frame, 1);
+ height2 = GST_VIDEO_FRAME_COMP_HEIGHT (frame, 1);
+
+ uvdata = GST_VIDEO_FRAME_PLANE_DATA (frame, 1);
+ uvstride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, 1);
+
+ upos = GST_VIDEO_INFO_FORMAT (&frame->info) == GST_VIDEO_FORMAT_NV12 ? 0 : 1;
+ vpos = GST_VIDEO_INFO_FORMAT (&frame->info) == GST_VIDEO_FORMAT_NV12 ? 1 : 0;
+
+ for (y = 0; y < height2; y++) {
+ guint8 *uvptr;
+ guint8 u1, v1;
+
+ uvptr = uvdata + y * uvstride;
+
+ for (x = 0; x < width2; x++) {
+ u1 = uvptr[upos];
+ v1 = uvptr[vpos];
+
+ uvptr[upos] = tableu[u1][v1];
+ uvptr[vpos] = tablev[u1][v1];
+ uvptr += 2;
+ }
+ }
+ }
+
+ static void
+ gst_video_balance_packed_yuv (GstVideoBalance * videobalance,
+ GstVideoFrame * frame)
+ {
+ gint x, y, stride;
+ guint8 *ydata, *udata, *vdata;
+ gint yoff, uoff, voff;
+ gint width, height;
+ gint width2, height2;
+ guint8 *tabley = videobalance->tabley;
+ guint8 **tableu = videobalance->tableu;
+ guint8 **tablev = videobalance->tablev;
+
+ width = GST_VIDEO_FRAME_WIDTH (frame);
+ height = GST_VIDEO_FRAME_HEIGHT (frame);
+
+ stride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, 0);
+ ydata = GST_VIDEO_FRAME_COMP_DATA (frame, 0);
+ yoff = GST_VIDEO_FRAME_COMP_PSTRIDE (frame, 0);
+
+ for (y = 0; y < height; y++) {
+ guint8 *yptr;
+
+ yptr = ydata + y * stride;
+ for (x = 0; x < width; x++) {
+ *yptr = tabley[*yptr];
+ yptr += yoff;
+ }
+ }
+
+ width2 = GST_VIDEO_FRAME_COMP_WIDTH (frame, 1);
+ height2 = GST_VIDEO_FRAME_COMP_HEIGHT (frame, 1);
+
+ udata = GST_VIDEO_FRAME_COMP_DATA (frame, 1);
+ vdata = GST_VIDEO_FRAME_COMP_DATA (frame, 2);
+ uoff = GST_VIDEO_FRAME_COMP_PSTRIDE (frame, 1);
+ voff = GST_VIDEO_FRAME_COMP_PSTRIDE (frame, 2);
+
+ for (y = 0; y < height2; y++) {
+ guint8 *uptr, *vptr;
+ guint8 u1, v1;
+
+ uptr = udata + y * stride;
+ vptr = vdata + y * stride;
+
+ for (x = 0; x < width2; x++) {
+ u1 = *uptr;
+ v1 = *vptr;
+
+ *uptr = tableu[u1][v1];
+ *vptr = tablev[u1][v1];
+
+ uptr += uoff;
+ vptr += voff;
+ }
+ }
+ }
+
+ static const int cog_ycbcr_to_rgb_matrix_8bit_sdtv[] = {
+ 298, 0, 409, -57068,
+ 298, -100, -208, 34707,
+ 298, 516, 0, -70870,
+ };
+
+ static const gint cog_rgb_to_ycbcr_matrix_8bit_sdtv[] = {
+ 66, 129, 25, 4096,
+ -38, -74, 112, 32768,
+ 112, -94, -18, 32768,
+ };
+
+ #define APPLY_MATRIX(m,o,v1,v2,v3) ((m[o*4] * v1 + m[o*4+1] * v2 + m[o*4+2] * v3 + m[o*4+3]) >> 8)
+
+ static void
+ gst_video_balance_packed_rgb (GstVideoBalance * videobalance,
+ GstVideoFrame * frame)
+ {
+ gint i, j, height;
+ gint width, stride, row_wrap;
+ gint pixel_stride;
+ guint8 *data;
+ gint offsets[3];
+ gint r, g, b;
+ gint y, u, v;
+ gint u_tmp, v_tmp;
+ guint8 *tabley = videobalance->tabley;
+ guint8 **tableu = videobalance->tableu;
+ guint8 **tablev = videobalance->tablev;
+
+ width = GST_VIDEO_FRAME_WIDTH (frame);
+ height = GST_VIDEO_FRAME_HEIGHT (frame);
+
+ offsets[0] = GST_VIDEO_FRAME_COMP_OFFSET (frame, 0);
+ offsets[1] = GST_VIDEO_FRAME_COMP_OFFSET (frame, 1);
+ offsets[2] = GST_VIDEO_FRAME_COMP_OFFSET (frame, 2);
+
+ data = GST_VIDEO_FRAME_PLANE_DATA (frame, 0);
+ stride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, 0);
+
+ pixel_stride = GST_VIDEO_FRAME_COMP_PSTRIDE (frame, 0);
+ row_wrap = stride - pixel_stride * width;
+
+ for (i = 0; i < height; i++) {
+ for (j = 0; j < width; j++) {
+ r = data[offsets[0]];
+ g = data[offsets[1]];
+ b = data[offsets[2]];
+
+ y = APPLY_MATRIX (cog_rgb_to_ycbcr_matrix_8bit_sdtv, 0, r, g, b);
+ u_tmp = APPLY_MATRIX (cog_rgb_to_ycbcr_matrix_8bit_sdtv, 1, r, g, b);
+ v_tmp = APPLY_MATRIX (cog_rgb_to_ycbcr_matrix_8bit_sdtv, 2, r, g, b);
+
+ y = CLAMP (y, 0, 255);
+ u_tmp = CLAMP (u_tmp, 0, 255);
+ v_tmp = CLAMP (v_tmp, 0, 255);
+
+ y = tabley[y];
+ u = tableu[u_tmp][v_tmp];
+ v = tablev[u_tmp][v_tmp];
+
+ r = APPLY_MATRIX (cog_ycbcr_to_rgb_matrix_8bit_sdtv, 0, y, u, v);
+ g = APPLY_MATRIX (cog_ycbcr_to_rgb_matrix_8bit_sdtv, 1, y, u, v);
+ b = APPLY_MATRIX (cog_ycbcr_to_rgb_matrix_8bit_sdtv, 2, y, u, v);
+
+ data[offsets[0]] = CLAMP (r, 0, 255);
+ data[offsets[1]] = CLAMP (g, 0, 255);
+ data[offsets[2]] = CLAMP (b, 0, 255);
+ data += pixel_stride;
+ }
+ data += row_wrap;
+ }
+ }
+
+ /* get notified of caps and plug in the correct process function */
+ static gboolean
+ gst_video_balance_set_info (GstVideoFilter * vfilter, GstCaps * incaps,
+ GstVideoInfo * in_info, GstCaps * outcaps, GstVideoInfo * out_info)
+ {
+ GstVideoBalance *videobalance = GST_VIDEO_BALANCE (vfilter);
+
+ GST_DEBUG_OBJECT (videobalance,
+ "in %" GST_PTR_FORMAT " out %" GST_PTR_FORMAT, incaps, outcaps);
+
+ videobalance->process = NULL;
+
+ switch (GST_VIDEO_INFO_FORMAT (in_info)) {
+ case GST_VIDEO_FORMAT_I420:
+ case GST_VIDEO_FORMAT_YV12:
+ case GST_VIDEO_FORMAT_Y41B:
+ case GST_VIDEO_FORMAT_Y42B:
+ case GST_VIDEO_FORMAT_Y444:
+ videobalance->process = gst_video_balance_planar_yuv;
+ break;
+ case GST_VIDEO_FORMAT_YUY2:
+ case GST_VIDEO_FORMAT_UYVY:
+ case GST_VIDEO_FORMAT_AYUV:
+ case GST_VIDEO_FORMAT_YVYU:
+ videobalance->process = gst_video_balance_packed_yuv;
+ break;
+ case GST_VIDEO_FORMAT_NV12:
+ case GST_VIDEO_FORMAT_NV21:
+ videobalance->process = gst_video_balance_semiplanar_yuv;
+ break;
+ case GST_VIDEO_FORMAT_ARGB:
+ case GST_VIDEO_FORMAT_ABGR:
+ case GST_VIDEO_FORMAT_RGBA:
+ case GST_VIDEO_FORMAT_BGRA:
+ case GST_VIDEO_FORMAT_xRGB:
+ case GST_VIDEO_FORMAT_xBGR:
+ case GST_VIDEO_FORMAT_RGBx:
+ case GST_VIDEO_FORMAT_BGRx:
+ case GST_VIDEO_FORMAT_RGB:
+ case GST_VIDEO_FORMAT_BGR:
+ videobalance->process = gst_video_balance_packed_rgb;
+ break;
+ default:
+ if (!gst_video_balance_is_passthrough (videobalance))
+ goto unknown_format;
+ break;
+ }
+
+ return TRUE;
+
+ /* ERRORS */
+ unknown_format:
+ {
+ GST_ERROR_OBJECT (videobalance, "unknown format %" GST_PTR_FORMAT, incaps);
+ return FALSE;
+ }
+ }
+
+ static void
+ gst_video_balance_before_transform (GstBaseTransform * base, GstBuffer * buf)
+ {
+ GstVideoBalance *balance = GST_VIDEO_BALANCE (base);
+ GstClockTime timestamp, stream_time;
+
+ timestamp = GST_BUFFER_TIMESTAMP (buf);
+ stream_time =
+ gst_segment_to_stream_time (&base->segment, GST_FORMAT_TIME, timestamp);
+
+ GST_DEBUG_OBJECT (balance, "sync to %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (timestamp));
+
+ if (GST_CLOCK_TIME_IS_VALID (stream_time))
+ gst_object_sync_values (GST_OBJECT (balance), stream_time);
+ }
+
+ static GstCaps *
+ gst_video_balance_transform_caps (GstBaseTransform * trans,
+ GstPadDirection direction, GstCaps * caps, GstCaps * filter)
+ {
+ GstVideoBalance *balance = GST_VIDEO_BALANCE (trans);
+ GstCaps *ret;
+
+ if (!gst_video_balance_is_passthrough (balance)) {
+ static GstStaticCaps raw_caps =
+ GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE (PROCESSING_CAPS));
++ GstCaps *tmp = gst_static_caps_get (&raw_caps);
+
++ caps = gst_caps_intersect (caps, tmp);
++ gst_caps_unref (tmp);
+
+ if (filter) {
+ ret = gst_caps_intersect_full (filter, caps, GST_CAPS_INTERSECT_FIRST);
+ gst_caps_unref (caps);
+ } else {
+ ret = caps;
+ }
+ } else {
+ if (filter) {
+ ret = gst_caps_intersect_full (filter, caps, GST_CAPS_INTERSECT_FIRST);
+ } else {
+ ret = gst_caps_ref (caps);
+ }
+ }
+
+ return ret;
+ }
+
+ static GstFlowReturn
+ gst_video_balance_transform_frame_ip (GstVideoFilter * vfilter,
+ GstVideoFrame * frame)
+ {
+ GstVideoBalance *videobalance = GST_VIDEO_BALANCE (vfilter);
+
+ if (!videobalance->process)
+ goto not_negotiated;
+
+ GST_OBJECT_LOCK (videobalance);
+ videobalance->process (videobalance, frame);
+ GST_OBJECT_UNLOCK (videobalance);
+
+ return GST_FLOW_OK;
+
+ /* ERRORS */
+ not_negotiated:
+ {
+ GST_ERROR_OBJECT (videobalance, "Not negotiated yet");
+ return GST_FLOW_NOT_NEGOTIATED;
+ }
+ }
+
+ static void
+ gst_video_balance_finalize (GObject * object)
+ {
+ GList *channels = NULL;
+ GstVideoBalance *balance = GST_VIDEO_BALANCE (object);
+
+ g_free (balance->tableu[0]);
+
+ channels = balance->channels;
+ while (channels) {
+ GstColorBalanceChannel *channel = channels->data;
+
+ g_object_unref (channel);
+ channels->data = NULL;
+ channels = g_list_next (channels);
+ }
+
+ if (balance->channels)
+ g_list_free (balance->channels);
+
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+ }
+
+ static void
+ gst_video_balance_class_init (GstVideoBalanceClass * klass)
+ {
+ GObjectClass *gobject_class = (GObjectClass *) klass;
+ GstElementClass *gstelement_class = (GstElementClass *) klass;
+ GstBaseTransformClass *trans_class = (GstBaseTransformClass *) klass;
+ GstVideoFilterClass *vfilter_class = (GstVideoFilterClass *) klass;
+
+ GST_DEBUG_CATEGORY_INIT (videobalance_debug, "videobalance", 0,
+ "videobalance");
+
+ gobject_class->finalize = gst_video_balance_finalize;
+ gobject_class->set_property = gst_video_balance_set_property;
+ gobject_class->get_property = gst_video_balance_get_property;
+
+ g_object_class_install_property (gobject_class, PROP_CONTRAST,
+ g_param_spec_double ("contrast", "Contrast", "contrast",
+ 0.0, 2.0, DEFAULT_PROP_CONTRAST,
+ GST_PARAM_CONTROLLABLE | G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_BRIGHTNESS,
+ g_param_spec_double ("brightness", "Brightness", "brightness", -1.0, 1.0,
+ DEFAULT_PROP_BRIGHTNESS,
+ GST_PARAM_CONTROLLABLE | G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_HUE,
+ g_param_spec_double ("hue", "Hue", "hue", -1.0, 1.0, DEFAULT_PROP_HUE,
+ GST_PARAM_CONTROLLABLE | G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_SATURATION,
+ g_param_spec_double ("saturation", "Saturation", "saturation", 0.0, 2.0,
+ DEFAULT_PROP_SATURATION,
+ GST_PARAM_CONTROLLABLE | G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ gst_element_class_set_static_metadata (gstelement_class, "Video balance",
+ "Filter/Effect/Video",
+ "Adjusts brightness, contrast, hue, saturation on a video stream",
+ "David Schleef <ds@schleef.org>");
+
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_video_balance_sink_template);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_video_balance_src_template);
+
+ trans_class->before_transform =
+ GST_DEBUG_FUNCPTR (gst_video_balance_before_transform);
+ trans_class->transform_ip_on_passthrough = FALSE;
+ trans_class->transform_caps =
+ GST_DEBUG_FUNCPTR (gst_video_balance_transform_caps);
+
+ vfilter_class->set_info = GST_DEBUG_FUNCPTR (gst_video_balance_set_info);
+ vfilter_class->transform_frame_ip =
+ GST_DEBUG_FUNCPTR (gst_video_balance_transform_frame_ip);
+ }
+
+ static void
+ gst_video_balance_init (GstVideoBalance * videobalance)
+ {
+ const gchar *channels[4] = { "HUE", "SATURATION",
+ "BRIGHTNESS", "CONTRAST"
+ };
+ gint i;
+
+ /* Initialize propertiews */
+ videobalance->contrast = DEFAULT_PROP_CONTRAST;
+ videobalance->brightness = DEFAULT_PROP_BRIGHTNESS;
+ videobalance->hue = DEFAULT_PROP_HUE;
+ videobalance->saturation = DEFAULT_PROP_SATURATION;
+
+ videobalance->tableu[0] = g_new (guint8, 256 * 256 * 2);
+ for (i = 0; i < 256; i++) {
+ videobalance->tableu[i] =
+ videobalance->tableu[0] + i * 256 * sizeof (guint8);
+ videobalance->tablev[i] =
+ videobalance->tableu[0] + 256 * 256 * sizeof (guint8) +
+ i * 256 * sizeof (guint8);
+ }
+
+ gst_video_balance_update_properties (videobalance);
+
+ /* Generate the channels list */
+ for (i = 0; i < G_N_ELEMENTS (channels); i++) {
+ GstColorBalanceChannel *channel;
+
+ channel = g_object_new (GST_TYPE_COLOR_BALANCE_CHANNEL, NULL);
+ channel->label = g_strdup (channels[i]);
+ channel->min_value = -1000;
+ channel->max_value = 1000;
+
+ videobalance->channels = g_list_append (videobalance->channels, channel);
+ }
+ }
+
+ static const GList *
+ gst_video_balance_colorbalance_list_channels (GstColorBalance * balance)
+ {
+ GstVideoBalance *videobalance = GST_VIDEO_BALANCE (balance);
+
+ g_return_val_if_fail (videobalance != NULL, NULL);
+ g_return_val_if_fail (GST_IS_VIDEO_BALANCE (videobalance), NULL);
+
+ return videobalance->channels;
+ }
+
+ static void
+ gst_video_balance_colorbalance_set_value (GstColorBalance * balance,
+ GstColorBalanceChannel * channel, gint value)
+ {
+ GstVideoBalance *vb = GST_VIDEO_BALANCE (balance);
+ gdouble new_val;
+ gboolean changed = FALSE;
+
+ g_return_if_fail (vb != NULL);
+ g_return_if_fail (GST_IS_VIDEO_BALANCE (vb));
+ g_return_if_fail (GST_IS_VIDEO_FILTER (vb));
+ g_return_if_fail (channel->label != NULL);
+
+ GST_OBJECT_LOCK (vb);
+ if (!g_ascii_strcasecmp (channel->label, "HUE")) {
+ new_val = (value + 1000.0) * 2.0 / 2000.0 - 1.0;
+ changed = new_val != vb->hue;
+ vb->hue = new_val;
+ } else if (!g_ascii_strcasecmp (channel->label, "SATURATION")) {
+ new_val = (value + 1000.0) * 2.0 / 2000.0;
+ changed = new_val != vb->saturation;
+ vb->saturation = new_val;
+ } else if (!g_ascii_strcasecmp (channel->label, "BRIGHTNESS")) {
+ new_val = (value + 1000.0) * 2.0 / 2000.0 - 1.0;
+ changed = new_val != vb->brightness;
+ vb->brightness = new_val;
+ } else if (!g_ascii_strcasecmp (channel->label, "CONTRAST")) {
+ new_val = (value + 1000.0) * 2.0 / 2000.0;
+ changed = new_val != vb->contrast;
+ vb->contrast = new_val;
+ }
+ GST_OBJECT_UNLOCK (vb);
+
+ if (changed)
+ gst_video_balance_update_properties (vb);
+
+ if (changed) {
+ gst_color_balance_value_changed (balance, channel,
+ gst_color_balance_get_value (balance, channel));
+ }
+ }
+
+ static gint
+ gst_video_balance_colorbalance_get_value (GstColorBalance * balance,
+ GstColorBalanceChannel * channel)
+ {
+ GstVideoBalance *vb = GST_VIDEO_BALANCE (balance);
+ gint value = 0;
+
+ g_return_val_if_fail (vb != NULL, 0);
+ g_return_val_if_fail (GST_IS_VIDEO_BALANCE (vb), 0);
+ g_return_val_if_fail (channel->label != NULL, 0);
+
+ if (!g_ascii_strcasecmp (channel->label, "HUE")) {
+ value = (vb->hue + 1) * 2000.0 / 2.0 - 1000.0;
+ } else if (!g_ascii_strcasecmp (channel->label, "SATURATION")) {
+ value = vb->saturation * 2000.0 / 2.0 - 1000.0;
+ } else if (!g_ascii_strcasecmp (channel->label, "BRIGHTNESS")) {
+ value = (vb->brightness + 1) * 2000.0 / 2.0 - 1000.0;
+ } else if (!g_ascii_strcasecmp (channel->label, "CONTRAST")) {
+ value = vb->contrast * 2000.0 / 2.0 - 1000.0;
+ }
+
+ return value;
+ }
+
+ static GstColorBalanceType
+ gst_video_balance_colorbalance_get_balance_type (GstColorBalance * balance)
+ {
+ return GST_COLOR_BALANCE_SOFTWARE;
+ }
+
+ static void
+ gst_video_balance_colorbalance_init (GstColorBalanceInterface * iface)
+ {
+ iface->list_channels = gst_video_balance_colorbalance_list_channels;
+ iface->set_value = gst_video_balance_colorbalance_set_value;
+ iface->get_value = gst_video_balance_colorbalance_get_value;
+ iface->get_balance_type = gst_video_balance_colorbalance_get_balance_type;
+ }
+
+ static GstColorBalanceChannel *
+ gst_video_balance_find_channel (GstVideoBalance * balance, const gchar * label)
+ {
+ GList *l;
+
+ for (l = balance->channels; l; l = l->next) {
+ GstColorBalanceChannel *channel = l->data;
+
+ if (g_ascii_strcasecmp (channel->label, label) == 0)
+ return channel;
+ }
+ return NULL;
+ }
+
+ static void
+ gst_video_balance_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+ {
+ GstVideoBalance *balance = GST_VIDEO_BALANCE (object);
+ gdouble d;
+ const gchar *label = NULL;
+
+ GST_OBJECT_LOCK (balance);
+ switch (prop_id) {
+ case PROP_CONTRAST:
+ d = g_value_get_double (value);
+ GST_DEBUG_OBJECT (balance, "Changing contrast from %lf to %lf",
+ balance->contrast, d);
+ if (d != balance->contrast)
+ label = "CONTRAST";
+ balance->contrast = d;
+ break;
+ case PROP_BRIGHTNESS:
+ d = g_value_get_double (value);
+ GST_DEBUG_OBJECT (balance, "Changing brightness from %lf to %lf",
+ balance->brightness, d);
+ if (d != balance->brightness)
+ label = "BRIGHTNESS";
+ balance->brightness = d;
+ break;
+ case PROP_HUE:
+ d = g_value_get_double (value);
+ GST_DEBUG_OBJECT (balance, "Changing hue from %lf to %lf", balance->hue,
+ d);
+ if (d != balance->hue)
+ label = "HUE";
+ balance->hue = d;
+ break;
+ case PROP_SATURATION:
+ d = g_value_get_double (value);
+ GST_DEBUG_OBJECT (balance, "Changing saturation from %lf to %lf",
+ balance->saturation, d);
+ if (d != balance->saturation)
+ label = "SATURATION";
+ balance->saturation = d;
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+
+ GST_OBJECT_UNLOCK (balance);
+ gst_video_balance_update_properties (balance);
+
+ if (label) {
+ GstColorBalanceChannel *channel =
+ gst_video_balance_find_channel (balance, label);
+ gst_color_balance_value_changed (GST_COLOR_BALANCE (balance), channel,
+ gst_color_balance_get_value (GST_COLOR_BALANCE (balance), channel));
+ }
+ }
+
+ static void
+ gst_video_balance_get_property (GObject * object, guint prop_id, GValue * value,
+ GParamSpec * pspec)
+ {
+ GstVideoBalance *balance = GST_VIDEO_BALANCE (object);
+
+ switch (prop_id) {
+ case PROP_CONTRAST:
+ g_value_set_double (value, balance->contrast);
+ break;
+ case PROP_BRIGHTNESS:
+ g_value_set_double (value, balance->brightness);
+ break;
+ case PROP_HUE:
+ g_value_set_double (value, balance->hue);
+ break;
+ case PROP_SATURATION:
+ g_value_set_double (value, balance->saturation);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+ }
--- /dev/null
+ /* -*- Mode: C; tab-width: 2; indent-tabs-mode: t; c-basic-offset: 2 -*- */
+ /* GStreamer
+ * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
+ * Copyright (C) <2006> Nokia Corporation, Stefan Kost <stefan.kost@nokia.com>.
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+ /**
+ * SECTION:element-wavparse
+ * @title: wavparse
+ *
+ * Parse a .wav file into raw or compressed audio.
+ *
+ * Wavparse supports both push and pull mode operations, making it possible to
+ * stream from a network source.
+ *
+ * ## Example launch line
+ * |[
+ * gst-launch-1.0 filesrc location=sine.wav ! wavparse ! audioconvert ! alsasink
+ * ]| Read a wav file and output to the soundcard using the ALSA element. The
+ * wav file is assumed to contain raw uncompressed samples.
+ * |[
+ * gst-launch-1.0 gnomevfssrc location=http://www.example.org/sine.wav ! queue ! wavparse ! audioconvert ! alsasink
+ * ]| Stream data from a network url.
+ *
+ */
+
+ /*
+ * TODO:
+ * http://replaygain.hydrogenaudio.org/file_format_wav.html
+ */
+
+ #ifdef HAVE_CONFIG_H
+ #include "config.h"
+ #endif
+
+ #include <string.h>
+ #include <math.h>
+
+ #include "gstwavparse.h"
+ #include "gst/riff/riff-media.h"
+ #include <gst/base/gsttypefindhelper.h>
+ #include <gst/pbutils/descriptions.h>
+ #include <gst/gst-i18n-plugin.h>
+
+ GST_DEBUG_CATEGORY_STATIC (wavparse_debug);
+ #define GST_CAT_DEFAULT (wavparse_debug)
+
+ /* Data size chunk of RF64,
+ * see http://tech.ebu.ch/docs/tech/tech3306-2009.pdf */
+ #define GST_RS64_TAG_DS64 GST_MAKE_FOURCC ('d','s','6','4')
+
+ static void gst_wavparse_dispose (GObject * object);
+
+ static gboolean gst_wavparse_sink_activate (GstPad * sinkpad,
+ GstObject * parent);
+ static gboolean gst_wavparse_sink_activate_mode (GstPad * sinkpad,
+ GstObject * parent, GstPadMode mode, gboolean active);
+ static gboolean gst_wavparse_send_event (GstElement * element,
+ GstEvent * event);
+ static GstStateChangeReturn gst_wavparse_change_state (GstElement * element,
+ GstStateChange transition);
+
+ static gboolean gst_wavparse_pad_query (GstPad * pad, GstObject * parent,
+ GstQuery * query);
+ static gboolean gst_wavparse_pad_convert (GstPad * pad, GstFormat src_format,
+ gint64 src_value, GstFormat * dest_format, gint64 * dest_value);
+
+ static GstFlowReturn gst_wavparse_chain (GstPad * pad, GstObject * parent,
+ GstBuffer * buf);
+ static gboolean gst_wavparse_sink_event (GstPad * pad, GstObject * parent,
+ GstEvent * event);
+ static void gst_wavparse_loop (GstPad * pad);
+ static gboolean gst_wavparse_srcpad_event (GstPad * pad, GstObject * parent,
+ GstEvent * event);
+
+ static void gst_wavparse_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec);
+ static void gst_wavparse_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec);
+
+ #define DEFAULT_IGNORE_LENGTH FALSE
+
+ enum
+ {
+ PROP_0,
+ PROP_IGNORE_LENGTH,
+ };
+
+ static GstStaticPadTemplate sink_template_factory =
+ GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("audio/x-wav;audio/x-rf64")
+ );
+
+ #define DEBUG_INIT \
+ GST_DEBUG_CATEGORY_INIT (wavparse_debug, "wavparse", 0, "WAV parser");
+
+ #define gst_wavparse_parent_class parent_class
+ G_DEFINE_TYPE_WITH_CODE (GstWavParse, gst_wavparse, GST_TYPE_ELEMENT,
+ DEBUG_INIT);
+
+ GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (wavparse, "wavparse", GST_RANK_PRIMARY,
+ GST_TYPE_WAVPARSE, gst_riff_init ();
+ );
+
+ typedef struct
+ {
+ /* Offset Size Description Value
+ * 0x00 4 ID unique identification value
+ * 0x04 4 Position play order position
+ * 0x08 4 Data Chunk ID RIFF ID of corresponding data chunk
+ * 0x0c 4 Chunk Start Byte Offset of Data Chunk *
+ * 0x10 4 Block Start Byte Offset to sample of First Channel
+ * 0x14 4 Sample Offset Byte Offset to sample byte of First Channel
+ */
+ guint32 id;
+ guint32 position;
+ guint32 data_chunk_id;
+ guint32 chunk_start;
+ guint32 block_start;
+ guint32 sample_offset;
+ } GstWavParseCue;
+
+ typedef struct
+ {
+ /* Offset Size Description Value
+ * 0x08 4 Cue Point ID 0 - 0xFFFFFFFF
+ * 0x0c Text
+ */
+ guint32 cue_point_id;
+ gchar *text;
+ } GstWavParseLabl, GstWavParseNote;
+
+ static void
+ gst_wavparse_class_init (GstWavParseClass * klass)
+ {
+ GstElementClass *gstelement_class;
+ GObjectClass *object_class;
+ GstPadTemplate *src_template;
+
+ gstelement_class = (GstElementClass *) klass;
+ object_class = (GObjectClass *) klass;
+
+ parent_class = g_type_class_peek_parent (klass);
+
+ object_class->dispose = gst_wavparse_dispose;
+
+ object_class->set_property = gst_wavparse_set_property;
+ object_class->get_property = gst_wavparse_get_property;
+
+ /**
+ * GstWavParse:ignore-length:
+ *
+ * This selects whether the length found in a data chunk
+ * should be ignored. This may be useful for streamed audio
+ * where the length is unknown until the end of streaming,
+ * and various software/hardware just puts some random value
+ * in there and hopes it doesn't break too much.
+ */
+ g_object_class_install_property (object_class, PROP_IGNORE_LENGTH,
+ g_param_spec_boolean ("ignore-length",
+ "Ignore length",
+ "Ignore length from the Wave header",
+ DEFAULT_IGNORE_LENGTH, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)
+ );
+
+ gstelement_class->change_state = gst_wavparse_change_state;
+ gstelement_class->send_event = gst_wavparse_send_event;
+
+ /* register pads */
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &sink_template_factory);
+
+ src_template = gst_pad_template_new ("src", GST_PAD_SRC,
+ GST_PAD_ALWAYS, gst_riff_create_audio_template_caps ());
+ gst_element_class_add_pad_template (gstelement_class, src_template);
+
+ gst_element_class_set_static_metadata (gstelement_class, "WAV audio demuxer",
+ "Codec/Demuxer/Audio",
+ "Parse a .wav file into raw audio",
+ "Erik Walthinsen <omega@cse.ogi.edu>");
+ }
+
+ static void
+ gst_wavparse_notes_free (GstWavParseNote * note)
+ {
+ if (note)
+ g_free (note->text);
+ g_free (note);
+ }
+
+ static void
+ gst_wavparse_labls_free (GstWavParseLabl * labl)
+ {
+ if (labl)
+ g_free (labl->text);
+ g_free (labl);
+ }
+
+ static void
+ gst_wavparse_reset (GstWavParse * wav)
+ {
+ wav->state = GST_WAVPARSE_START;
+
+ /* These will all be set correctly in the fmt chunk */
+ wav->depth = 0;
+ wav->rate = 0;
+ wav->width = 0;
+ wav->channels = 0;
+ wav->blockalign = 0;
+ wav->bps = 0;
+ wav->fact = 0;
+ wav->offset = 0;
+ wav->end_offset = 0;
+ wav->dataleft = 0;
+ wav->datasize = 0;
+ wav->datastart = 0;
+ wav->chunk_size = 0;
+ wav->duration = 0;
+ wav->got_fmt = FALSE;
+ wav->first = TRUE;
+
+ if (wav->seek_event)
+ gst_event_unref (wav->seek_event);
+ wav->seek_event = NULL;
+ if (wav->adapter) {
+ gst_adapter_clear (wav->adapter);
+ g_object_unref (wav->adapter);
+ wav->adapter = NULL;
+ }
+ if (wav->tags)
+ gst_tag_list_unref (wav->tags);
+ wav->tags = NULL;
+ if (wav->toc)
+ gst_toc_unref (wav->toc);
+ wav->toc = NULL;
+ if (wav->cues)
+ g_list_free_full (wav->cues, g_free);
+ wav->cues = NULL;
+ if (wav->labls)
+ g_list_free_full (wav->labls, (GDestroyNotify) gst_wavparse_labls_free);
+ wav->labls = NULL;
+ if (wav->notes)
+ g_list_free_full (wav->notes, (GDestroyNotify) gst_wavparse_notes_free);
+ wav->notes = NULL;
+ if (wav->caps)
+ gst_caps_unref (wav->caps);
+ wav->caps = NULL;
+ if (wav->start_segment)
+ gst_event_unref (wav->start_segment);
+ wav->start_segment = NULL;
+ }
+
+ static void
+ gst_wavparse_dispose (GObject * object)
+ {
+ GstWavParse *wav = GST_WAVPARSE (object);
+
+ GST_DEBUG_OBJECT (wav, "WAV: Dispose");
+ gst_wavparse_reset (wav);
+
+ G_OBJECT_CLASS (parent_class)->dispose (object);
+ }
+
+ static void
+ gst_wavparse_init (GstWavParse * wavparse)
+ {
+ gst_wavparse_reset (wavparse);
+
+ /* sink */
+ wavparse->sinkpad =
+ gst_pad_new_from_static_template (&sink_template_factory, "sink");
+ gst_pad_set_activate_function (wavparse->sinkpad,
+ GST_DEBUG_FUNCPTR (gst_wavparse_sink_activate));
+ gst_pad_set_activatemode_function (wavparse->sinkpad,
+ GST_DEBUG_FUNCPTR (gst_wavparse_sink_activate_mode));
+ gst_pad_set_chain_function (wavparse->sinkpad,
+ GST_DEBUG_FUNCPTR (gst_wavparse_chain));
+ gst_pad_set_event_function (wavparse->sinkpad,
+ GST_DEBUG_FUNCPTR (gst_wavparse_sink_event));
+ gst_element_add_pad (GST_ELEMENT_CAST (wavparse), wavparse->sinkpad);
+
+ /* src */
+ wavparse->srcpad =
+ gst_pad_new_from_template (gst_element_class_get_pad_template
+ (GST_ELEMENT_GET_CLASS (wavparse), "src"), "src");
+ gst_pad_use_fixed_caps (wavparse->srcpad);
+ gst_pad_set_query_function (wavparse->srcpad,
+ GST_DEBUG_FUNCPTR (gst_wavparse_pad_query));
+ gst_pad_set_event_function (wavparse->srcpad,
+ GST_DEBUG_FUNCPTR (gst_wavparse_srcpad_event));
+ gst_element_add_pad (GST_ELEMENT_CAST (wavparse), wavparse->srcpad);
+ }
+
+ static gboolean
+ gst_wavparse_parse_file_header (GstElement * element, GstBuffer * buf)
+ {
+ guint32 doctype;
+
+ if (!gst_riff_parse_file_header (element, buf, &doctype))
+ return FALSE;
+
+ if (doctype != GST_RIFF_RIFF_WAVE)
+ goto not_wav;
+
+ return TRUE;
+
+ /* ERRORS */
+ not_wav:
+ {
+ GST_ELEMENT_ERROR (element, STREAM, WRONG_TYPE, (NULL),
+ ("File is not a WAVE file: 0x%" G_GINT32_MODIFIER "x", doctype));
+ return FALSE;
+ }
+ }
+
+ static GstFlowReturn
+ gst_wavparse_stream_init (GstWavParse * wav)
+ {
+ GstFlowReturn res;
+ GstBuffer *buf = NULL;
+
+ if ((res = gst_pad_pull_range (wav->sinkpad,
+ wav->offset, 12, &buf)) != GST_FLOW_OK)
+ return res;
+ else if (!gst_wavparse_parse_file_header (GST_ELEMENT_CAST (wav), buf))
+ return GST_FLOW_ERROR;
+
+ wav->offset += 12;
+
+ return GST_FLOW_OK;
+ }
+
+ static gboolean
+ gst_wavparse_time_to_bytepos (GstWavParse * wav, gint64 ts, gint64 * bytepos)
+ {
+ /* -1 always maps to -1 */
+ if (ts == -1) {
+ *bytepos = -1;
+ return TRUE;
+ }
+
+ /* 0 always maps to 0 */
+ if (ts == 0) {
+ *bytepos = 0;
+ return TRUE;
+ }
+
+ if (wav->bps > 0) {
+ *bytepos = gst_util_uint64_scale_ceil (ts, (guint64) wav->bps, GST_SECOND);
+ return TRUE;
+ } else if (wav->fact) {
+ guint64 bps = gst_util_uint64_scale (wav->datasize, wav->rate, wav->fact);
+ *bytepos = gst_util_uint64_scale_ceil (ts, bps, GST_SECOND);
+ return TRUE;
+ }
+
+ return FALSE;
+ }
+
+ /* This function is used to perform seeks on the element.
+ *
+ * It also works when event is NULL, in which case it will just
+ * start from the last configured segment. This technique is
+ * used when activating the element and to perform the seek in
+ * READY.
+ */
+ static gboolean
+ gst_wavparse_perform_seek (GstWavParse * wav, GstEvent * event)
+ {
+ gboolean res;
+ gdouble rate;
+ GstFormat format, bformat;
+ GstSeekFlags flags;
+ GstSeekType cur_type = GST_SEEK_TYPE_NONE, stop_type;
+ gint64 cur, stop, upstream_size;
+ gboolean flush;
+ gboolean update;
+ GstSegment seeksegment = { 0, };
+ gint64 last_stop;
+ guint32 seqnum = GST_SEQNUM_INVALID;
+
+ if (event) {
+ GST_DEBUG_OBJECT (wav, "doing seek with event");
+
+ gst_event_parse_seek (event, &rate, &format, &flags,
+ &cur_type, &cur, &stop_type, &stop);
+ seqnum = gst_event_get_seqnum (event);
+
+ /* no negative rates yet */
+ if (rate < 0.0)
+ goto negative_rate;
+
+ if (format != wav->segment.format) {
+ GST_INFO_OBJECT (wav, "converting seek-event from %s to %s",
+ gst_format_get_name (format),
+ gst_format_get_name (wav->segment.format));
+ res = TRUE;
+ if (cur_type != GST_SEEK_TYPE_NONE)
+ res =
+ gst_pad_query_convert (wav->srcpad, format, cur,
+ wav->segment.format, &cur);
+ if (res && stop_type != GST_SEEK_TYPE_NONE)
+ res =
+ gst_pad_query_convert (wav->srcpad, format, stop,
+ wav->segment.format, &stop);
+ if (!res)
+ goto no_format;
+
+ format = wav->segment.format;
+ }
+ } else {
+ GST_DEBUG_OBJECT (wav, "doing seek without event");
+ flags = 0;
+ rate = 1.0;
+ cur_type = GST_SEEK_TYPE_SET;
+ stop_type = GST_SEEK_TYPE_SET;
+ }
+
+ /* in push mode, we must delegate to upstream */
+ if (wav->streaming) {
+ gboolean res = FALSE;
+
+ /* if streaming not yet started; only prepare initial newsegment */
+ if (!event || wav->state != GST_WAVPARSE_DATA) {
+ if (wav->start_segment)
+ gst_event_unref (wav->start_segment);
+ wav->start_segment = gst_event_new_segment (&wav->segment);
+ res = TRUE;
+ } else {
+ /* convert seek positions to byte positions in data sections */
+ if (format == GST_FORMAT_TIME) {
+ /* should not fail */
+ if (!gst_wavparse_time_to_bytepos (wav, cur, &cur))
+ goto no_position;
+ if (!gst_wavparse_time_to_bytepos (wav, stop, &stop))
+ goto no_position;
+ }
+ /* mind sample boundary and header */
+ if (cur >= 0) {
+ cur -= (cur % wav->bytes_per_sample);
+ cur += wav->datastart;
+ }
+ if (stop >= 0) {
+ stop -= (stop % wav->bytes_per_sample);
+ stop += wav->datastart;
+ }
+ GST_DEBUG_OBJECT (wav, "Pushing BYTE seek rate %g, "
+ "start %" G_GINT64_FORMAT ", stop %" G_GINT64_FORMAT, rate, cur,
+ stop);
+ /* BYTE seek event */
+ event = gst_event_new_seek (rate, GST_FORMAT_BYTES, flags, cur_type, cur,
+ stop_type, stop);
+ if (seqnum != GST_SEQNUM_INVALID)
+ gst_event_set_seqnum (event, seqnum);
+ res = gst_pad_push_event (wav->sinkpad, event);
+ }
+ return res;
+ }
+
+ /* get flush flag */
+ flush = flags & GST_SEEK_FLAG_FLUSH;
+
+ /* now we need to make sure the streaming thread is stopped. We do this by
+ * either sending a FLUSH_START event downstream which will cause the
+ * streaming thread to stop with a WRONG_STATE.
+ * For a non-flushing seek we simply pause the task, which will happen as soon
+ * as it completes one iteration (and thus might block when the sink is
+ * blocking in preroll). */
+ if (flush) {
+ GstEvent *fevent;
+ GST_DEBUG_OBJECT (wav, "sending flush start");
+
+ fevent = gst_event_new_flush_start ();
+ if (seqnum != GST_SEQNUM_INVALID)
+ gst_event_set_seqnum (fevent, seqnum);
+ gst_pad_push_event (wav->sinkpad, gst_event_ref (fevent));
+ gst_pad_push_event (wav->srcpad, fevent);
+ } else {
+ gst_pad_pause_task (wav->sinkpad);
+ }
+
+ /* we should now be able to grab the streaming thread because we stopped it
+ * with the above flush/pause code */
+ GST_PAD_STREAM_LOCK (wav->sinkpad);
+
+ /* save current position */
+ last_stop = wav->segment.position;
+
+ GST_DEBUG_OBJECT (wav, "stopped streaming at %" G_GINT64_FORMAT, last_stop);
+
+ /* copy segment, we need this because we still need the old
+ * segment when we close the current segment. */
+ memcpy (&seeksegment, &wav->segment, sizeof (GstSegment));
+
+ /* configure the seek parameters in the seeksegment. We will then have the
+ * right values in the segment to perform the seek */
+ if (event) {
+ GST_DEBUG_OBJECT (wav, "configuring seek");
+ gst_segment_do_seek (&seeksegment, rate, format, flags,
+ cur_type, cur, stop_type, stop, &update);
+ }
+
+ /* figure out the last position we need to play. If it's configured (stop !=
+ * -1), use that, else we play until the total duration of the file */
+ if ((stop = seeksegment.stop) == -1)
+ stop = seeksegment.duration;
+
+ GST_DEBUG_OBJECT (wav, "cur_type =%d", cur_type);
+ if ((cur_type != GST_SEEK_TYPE_NONE)) {
+ /* bring offset to bytes, if the bps is 0, we have the segment in BYTES and
+ * we can just copy the last_stop. If not, we use the bps to convert TIME to
+ * bytes. */
+ if (!gst_wavparse_time_to_bytepos (wav, seeksegment.position,
+ (gint64 *) & wav->offset))
+ wav->offset = seeksegment.position;
+ GST_LOG_OBJECT (wav, "offset=%" G_GUINT64_FORMAT, wav->offset);
+ wav->offset -= (wav->offset % wav->bytes_per_sample);
+ GST_LOG_OBJECT (wav, "offset=%" G_GUINT64_FORMAT, wav->offset);
+ wav->offset += wav->datastart;
+ GST_LOG_OBJECT (wav, "offset=%" G_GUINT64_FORMAT, wav->offset);
+ } else {
+ GST_LOG_OBJECT (wav, "continue from offset=%" G_GUINT64_FORMAT,
+ wav->offset);
+ }
+
+ if (stop_type != GST_SEEK_TYPE_NONE) {
+ if (!gst_wavparse_time_to_bytepos (wav, stop, (gint64 *) & wav->end_offset))
+ wav->end_offset = stop;
+ GST_LOG_OBJECT (wav, "end_offset=%" G_GUINT64_FORMAT, wav->end_offset);
+ wav->end_offset -= (wav->end_offset % wav->bytes_per_sample);
+ GST_LOG_OBJECT (wav, "end_offset=%" G_GUINT64_FORMAT, wav->end_offset);
+ wav->end_offset += wav->datastart;
+ GST_LOG_OBJECT (wav, "end_offset=%" G_GUINT64_FORMAT, wav->end_offset);
+ } else {
+ GST_LOG_OBJECT (wav, "continue to end_offset=%" G_GUINT64_FORMAT,
+ wav->end_offset);
+ }
+
+ /* make sure filesize is not exceeded due to rounding errors or so,
+ * same precaution as in _stream_headers */
+ bformat = GST_FORMAT_BYTES;
+ if (gst_pad_peer_query_duration (wav->sinkpad, bformat, &upstream_size))
+ wav->end_offset = MIN (wav->end_offset, upstream_size);
+
+ if (wav->datasize > 0 && wav->end_offset > wav->datastart + wav->datasize)
+ wav->end_offset = wav->datastart + wav->datasize;
+
+ /* this is the range of bytes we will use for playback */
+ wav->offset = MIN (wav->offset, wav->end_offset);
+ wav->dataleft = wav->end_offset - wav->offset;
+
+ GST_DEBUG_OBJECT (wav,
+ "seek: rate %lf, offset %" G_GUINT64_FORMAT ", end %" G_GUINT64_FORMAT
+ ", segment %" GST_TIME_FORMAT " -- %" GST_TIME_FORMAT, rate, wav->offset,
+ wav->end_offset, GST_TIME_ARGS (seeksegment.start), GST_TIME_ARGS (stop));
+
+ /* prepare for streaming again */
+ if (flush) {
+ GstEvent *fevent;
+
+ /* if we sent a FLUSH_START, we now send a FLUSH_STOP */
+ GST_DEBUG_OBJECT (wav, "sending flush stop");
+
+ fevent = gst_event_new_flush_stop (TRUE);
+ if (seqnum != GST_SEQNUM_INVALID)
+ gst_event_set_seqnum (fevent, seqnum);
+ gst_pad_push_event (wav->sinkpad, gst_event_ref (fevent));
+ gst_pad_push_event (wav->srcpad, fevent);
+ }
+
+ /* now we did the seek and can activate the new segment values */
+ memcpy (&wav->segment, &seeksegment, sizeof (GstSegment));
+
+ /* if we're doing a segment seek, post a SEGMENT_START message */
+ if (wav->segment.flags & GST_SEEK_FLAG_SEGMENT) {
+ gst_element_post_message (GST_ELEMENT_CAST (wav),
+ gst_message_new_segment_start (GST_OBJECT_CAST (wav),
+ wav->segment.format, wav->segment.position));
+ }
+
+ /* now create the newsegment */
+ GST_DEBUG_OBJECT (wav, "Creating newsegment from %" G_GINT64_FORMAT
+ " to %" G_GINT64_FORMAT, wav->segment.position, stop);
+
+ /* store the newsegment event so it can be sent from the streaming thread. */
+ if (wav->start_segment)
+ gst_event_unref (wav->start_segment);
+ wav->start_segment = gst_event_new_segment (&wav->segment);
+ if (seqnum != GST_SEQNUM_INVALID)
+ gst_event_set_seqnum (wav->start_segment, seqnum);
+
+ /* mark discont if we are going to stream from another position. */
+ if (last_stop != wav->segment.position) {
+ GST_DEBUG_OBJECT (wav, "mark DISCONT, we did a seek to another position");
+ wav->discont = TRUE;
+ }
+
+ /* and start the streaming task again */
+ if (!wav->streaming) {
+ gst_pad_start_task (wav->sinkpad, (GstTaskFunction) gst_wavparse_loop,
+ wav->sinkpad, NULL);
+ }
+
+ GST_PAD_STREAM_UNLOCK (wav->sinkpad);
+
+ return TRUE;
+
+ /* ERRORS */
+ negative_rate:
+ {
+ GST_DEBUG_OBJECT (wav, "negative playback rates are not supported yet.");
+ return FALSE;
+ }
+ no_format:
+ {
+ GST_DEBUG_OBJECT (wav, "unsupported format given, seek aborted.");
+ return FALSE;
+ }
+ no_position:
+ {
+ GST_DEBUG_OBJECT (wav,
+ "Could not determine byte position for desired time");
+ return FALSE;
+ }
+ }
+
+ /*
+ * gst_wavparse_peek_chunk_info:
+ * @wav Wavparse object
+ * @tag holder for tag
+ * @size holder for tag size
+ *
+ * Peek next chunk info (tag and size)
+ *
+ * Returns: %TRUE when the chunk info (header) is available
+ */
+ static gboolean
+ gst_wavparse_peek_chunk_info (GstWavParse * wav, guint32 * tag, guint32 * size)
+ {
+ const guint8 *data = NULL;
+
+ if (gst_adapter_available (wav->adapter) < 8)
+ return FALSE;
+
+ data = gst_adapter_map (wav->adapter, 8);
+ *tag = GST_READ_UINT32_LE (data);
+ *size = GST_READ_UINT32_LE (data + 4);
+ gst_adapter_unmap (wav->adapter);
+
+ GST_DEBUG ("Next chunk size is %u bytes, type %" GST_FOURCC_FORMAT, *size,
+ GST_FOURCC_ARGS (*tag));
+
+ return TRUE;
+ }
+
+ /*
+ * gst_wavparse_peek_chunk:
+ * @wav Wavparse object
+ * @tag holder for tag
+ * @size holder for tag size
+ *
+ * Peek enough data for one full chunk
+ *
+ * Returns: %TRUE when the full chunk is available
+ */
+ static gboolean
+ gst_wavparse_peek_chunk (GstWavParse * wav, guint32 * tag, guint32 * size)
+ {
+ guint32 peek_size = 0;
+ guint available;
+
+ if (!gst_wavparse_peek_chunk_info (wav, tag, size))
+ return FALSE;
+
+ /* size 0 -> empty data buffer would surprise most callers,
+ * large size -> do not bother trying to squeeze that into adapter,
+ * so we throw poor man's exception, which can be caught if caller really
+ * wants to handle 0 size chunk */
+ if (!(*size) || (*size) >= (1 << 30)) {
+ GST_INFO ("Invalid/unexpected chunk size %u for tag %" GST_FOURCC_FORMAT,
+ *size, GST_FOURCC_ARGS (*tag));
+ /* chain should give up */
+ wav->abort_buffering = TRUE;
+ return FALSE;
+ }
+ peek_size = (*size + 1) & ~1;
+ available = gst_adapter_available (wav->adapter);
+
+ if (available >= (8 + peek_size)) {
+ return TRUE;
+ } else {
+ GST_LOG ("but only %u bytes available now", available);
+ return FALSE;
+ }
+ }
+
+ /*
+ * gst_wavparse_calculate_duration:
+ * @wav: wavparse object
+ *
+ * Calculate duration on demand and store in @wav. Prefer bps, but use fact as a
+ * fallback.
+ *
+ * Returns: %TRUE if duration is available.
+ */
+ static gboolean
+ gst_wavparse_calculate_duration (GstWavParse * wav)
+ {
+ if (wav->duration > 0)
+ return TRUE;
+
+ if (wav->bps > 0) {
+ GST_INFO_OBJECT (wav, "Got datasize %" G_GUINT64_FORMAT, wav->datasize);
+ wav->duration =
+ gst_util_uint64_scale_ceil (wav->datasize, GST_SECOND,
+ (guint64) wav->bps);
+ GST_INFO_OBJECT (wav, "Got duration (bps) %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (wav->duration));
+ return TRUE;
+ } else if (wav->fact) {
+ wav->duration =
+ gst_util_uint64_scale_ceil (GST_SECOND, wav->fact, wav->rate);
+ GST_INFO_OBJECT (wav, "Got duration (fact) %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (wav->duration));
+ return TRUE;
+ }
+ return FALSE;
+ }
+
+ static gboolean
+ gst_waveparse_ignore_chunk (GstWavParse * wav, GstBuffer * buf, guint32 tag,
+ guint32 size)
+ {
+ guint flush;
+
+ if (wav->streaming) {
+ if (!gst_wavparse_peek_chunk (wav, &tag, &size))
+ return FALSE;
+ }
+ GST_DEBUG_OBJECT (wav, "Ignoring tag %" GST_FOURCC_FORMAT,
+ GST_FOURCC_ARGS (tag));
+ flush = 8 + ((size + 1) & ~1);
+ wav->offset += flush;
+ if (wav->streaming) {
+ gst_adapter_flush (wav->adapter, flush);
+ } else {
+ gst_buffer_unref (buf);
+ }
+
+ return TRUE;
+ }
+
+ /*
+ * gst_wavparse_cue_chunk:
+ * @wav GstWavParse object
+ * @data holder for data
+ * @size holder for data size
+ *
+ * Parse cue chunk from @data to wav->cues.
+ *
+ * Returns: %TRUE when cue chunk is available
+ */
+ static gboolean
+ gst_wavparse_cue_chunk (GstWavParse * wav, const guint8 * data, guint32 size)
+ {
+ guint32 i, ncues;
+ GList *cues = NULL;
+ GstWavParseCue *cue;
+
+ if (wav->cues) {
+ GST_WARNING_OBJECT (wav, "found another cue's");
+ return TRUE;
+ }
+
+ ncues = GST_READ_UINT32_LE (data);
+
+ if (size < 4 + ncues * 24) {
+ GST_WARNING_OBJECT (wav, "broken file %d %d", size, ncues);
+ return FALSE;
+ }
+
+ /* parse data */
+ data += 4;
+ for (i = 0; i < ncues; i++) {
+ cue = g_new0 (GstWavParseCue, 1);
+ cue->id = GST_READ_UINT32_LE (data);
+ cue->position = GST_READ_UINT32_LE (data + 4);
+ cue->data_chunk_id = GST_READ_UINT32_LE (data + 8);
+ cue->chunk_start = GST_READ_UINT32_LE (data + 12);
+ cue->block_start = GST_READ_UINT32_LE (data + 16);
+ cue->sample_offset = GST_READ_UINT32_LE (data + 20);
+ cues = g_list_append (cues, cue);
+ data += 24;
+ }
+
+ wav->cues = cues;
+
+ return TRUE;
+ }
+
+ /*
+ * gst_wavparse_labl_chunk:
+ * @wav GstWavParse object
+ * @data holder for data
+ * @size holder for data size
+ *
+ * Parse labl from @data to wav->labls.
+ *
+ * Returns: %TRUE when labl chunk is available
+ */
+ static gboolean
+ gst_wavparse_labl_chunk (GstWavParse * wav, const guint8 * data, guint32 size)
+ {
+ GstWavParseLabl *labl;
+
+ if (size < 5)
+ return FALSE;
+
+ labl = g_new0 (GstWavParseLabl, 1);
+
+ /* parse data */
+ labl->cue_point_id = GST_READ_UINT32_LE (data);
+ labl->text = g_strndup ((const gchar *) data + 4, size - 4);
+
+ wav->labls = g_list_append (wav->labls, labl);
+
+ return TRUE;
+ }
+
+ /*
+ * gst_wavparse_note_chunk:
+ * @wav GstWavParse object
+ * @data holder for data
+ * @size holder for data size
+ *
+ * Parse note from @data to wav->notes.
+ *
+ * Returns: %TRUE when note chunk is available
+ */
+ static gboolean
+ gst_wavparse_note_chunk (GstWavParse * wav, const guint8 * data, guint32 size)
+ {
+ GstWavParseNote *note;
+
+ if (size < 5)
+ return FALSE;
+
+ note = g_new0 (GstWavParseNote, 1);
+
+ /* parse data */
+ note->cue_point_id = GST_READ_UINT32_LE (data);
+ note->text = g_strndup ((const gchar *) data + 4, size - 4);
+
+ wav->notes = g_list_append (wav->notes, note);
+
+ return TRUE;
+ }
+
+ /*
+ * gst_wavparse_smpl_chunk:
+ * @wav GstWavParse object
+ * @data holder for data
+ * @size holder for data size
+ *
+ * Parse smpl chunk from @data.
+ *
+ * Returns: %TRUE when cue chunk is available
+ */
+ static gboolean
+ gst_wavparse_smpl_chunk (GstWavParse * wav, const guint8 * data, guint32 size)
+ {
+ guint32 note_number;
+
+ /*
+ manufacturer_id = GST_READ_UINT32_LE (data);
+ product_id = GST_READ_UINT32_LE (data + 4);
+ sample_period = GST_READ_UINT32_LE (data + 8);
+ */
+ note_number = GST_READ_UINT32_LE (data + 12);
+ /*
+ pitch_fraction = GST_READ_UINT32_LE (data + 16);
+ SMPTE_format = GST_READ_UINT32_LE (data + 20);
+ SMPTE_offset = GST_READ_UINT32_LE (data + 24);
+ num_sample_loops = GST_READ_UINT32_LE (data + 28);
+ List of Sample Loops, 24 bytes each
+ */
+
+ if (!wav->tags)
+ wav->tags = gst_tag_list_new_empty ();
+ gst_tag_list_add (wav->tags, GST_TAG_MERGE_REPLACE,
+ GST_TAG_MIDI_BASE_NOTE, (guint) note_number, NULL);
+ return TRUE;
+ }
+
+ /*
+ * gst_wavparse_adtl_chunk:
+ * @wav GstWavParse object
+ * @data holder for data
+ * @size holder for data size
+ *
+ * Parse adtl from @data.
+ *
+ * Returns: %TRUE when adtl chunk is available
+ */
+ static gboolean
+ gst_wavparse_adtl_chunk (GstWavParse * wav, const guint8 * data, guint32 size)
+ {
+ guint32 ltag, lsize, offset = 0;
+
+ while (size >= 8) {
+ ltag = GST_READ_UINT32_LE (data + offset);
+ lsize = GST_READ_UINT32_LE (data + offset + 4);
+
+ if (lsize > (G_MAXUINT - 8) || lsize + 8 > size) {
+ GST_WARNING_OBJECT (wav, "Invalid adtl size: %u + 8 > %u", lsize, size);
+ return FALSE;
+ }
+
+ switch (ltag) {
+ case GST_RIFF_TAG_labl:
+ gst_wavparse_labl_chunk (wav, data + offset + 8, lsize);
+ break;
+ case GST_RIFF_TAG_note:
+ gst_wavparse_note_chunk (wav, data + offset + 8, lsize);
+ break;
+ default:
+ GST_WARNING_OBJECT (wav, "Unknowm adtl %" GST_FOURCC_FORMAT,
+ GST_FOURCC_ARGS (ltag));
+ GST_MEMDUMP_OBJECT (wav, "Unknowm adtl", &data[offset], lsize);
+ break;
+ }
+ offset += 8 + GST_ROUND_UP_2 (lsize);
+ size -= 8 + GST_ROUND_UP_2 (lsize);
+ }
+
+ return TRUE;
+ }
+
+ static GstTagList *
+ gst_wavparse_get_tags_toc_entry (GstToc * toc, gchar * id)
+ {
+ GstTagList *tags = NULL;
+ GstTocEntry *entry = NULL;
+
+ entry = gst_toc_find_entry (toc, id);
+ if (entry != NULL) {
+ tags = gst_toc_entry_get_tags (entry);
+ if (tags == NULL) {
+ tags = gst_tag_list_new_empty ();
+ gst_toc_entry_set_tags (entry, tags);
+ }
+ }
+
+ return tags;
+ }
+
+ /*
+ * gst_wavparse_create_toc:
+ * @wav GstWavParse object
+ *
+ * Create TOC from wav->cues and wav->labls.
+ */
+ static gboolean
+ gst_wavparse_create_toc (GstWavParse * wav)
+ {
+ gint64 start, stop;
+ gchar *id;
+ GList *list;
+ GstWavParseCue *cue;
+ GstWavParseLabl *labl;
+ GstWavParseNote *note;
+ GstTagList *tags;
+ GstToc *toc;
+ GstTocEntry *entry = NULL, *cur_subentry = NULL, *prev_subentry = NULL;
+
+ GST_OBJECT_LOCK (wav);
+ if (wav->toc) {
+ GST_OBJECT_UNLOCK (wav);
+ GST_WARNING_OBJECT (wav, "found another TOC");
+ return FALSE;
+ }
+
+ if (!wav->cues) {
+ GST_OBJECT_UNLOCK (wav);
+ return TRUE;
+ }
+
+ /* FIXME: send CURRENT scope toc too */
+ toc = gst_toc_new (GST_TOC_SCOPE_GLOBAL);
+
+ /* add cue edition */
+ entry = gst_toc_entry_new (GST_TOC_ENTRY_TYPE_EDITION, "cue");
+ gst_toc_entry_set_start_stop_times (entry, 0, wav->duration);
+ gst_toc_append_entry (toc, entry);
+
+ /* add tracks in cue edition */
+ list = wav->cues;
+ while (list) {
+ cue = list->data;
+ prev_subentry = cur_subentry;
+ /* previous track stop time = current track start time */
+ if (prev_subentry != NULL) {
+ gst_toc_entry_get_start_stop_times (prev_subentry, &start, NULL);
+ stop = gst_util_uint64_scale_round (cue->position, GST_SECOND, wav->rate);
+ gst_toc_entry_set_start_stop_times (prev_subentry, start, stop);
+ }
+ id = g_strdup_printf ("%08x", cue->id);
+ cur_subentry = gst_toc_entry_new (GST_TOC_ENTRY_TYPE_TRACK, id);
+ g_free (id);
+ start = gst_util_uint64_scale_round (cue->position, GST_SECOND, wav->rate);
+ stop = wav->duration;
+ gst_toc_entry_set_start_stop_times (cur_subentry, start, stop);
+ gst_toc_entry_append_sub_entry (entry, cur_subentry);
+ list = g_list_next (list);
+ }
+
+ /* add tags in tracks */
+ list = wav->labls;
+ while (list) {
+ labl = list->data;
+ id = g_strdup_printf ("%08x", labl->cue_point_id);
+ tags = gst_wavparse_get_tags_toc_entry (toc, id);
+ g_free (id);
+ if (tags != NULL) {
+ gst_tag_list_add (tags, GST_TAG_MERGE_APPEND, GST_TAG_TITLE, labl->text,
+ NULL);
+ }
+ list = g_list_next (list);
+ }
+ list = wav->notes;
+ while (list) {
+ note = list->data;
+ id = g_strdup_printf ("%08x", note->cue_point_id);
+ tags = gst_wavparse_get_tags_toc_entry (toc, id);
+ g_free (id);
+ if (tags != NULL) {
+ gst_tag_list_add (tags, GST_TAG_MERGE_PREPEND, GST_TAG_COMMENT,
+ note->text, NULL);
+ }
+ list = g_list_next (list);
+ }
+
+ /* send data as TOC */
+ wav->toc = toc;
+
+ /* send TOC event */
+ if (wav->toc) {
+ GST_OBJECT_UNLOCK (wav);
+ gst_pad_push_event (wav->srcpad, gst_event_new_toc (wav->toc, FALSE));
+ }
+
+ return TRUE;
+ }
+
+ #define MAX_BUFFER_SIZE 4096
+
+ static gboolean
+ parse_ds64 (GstWavParse * wav, GstBuffer * buf)
+ {
+ GstMapInfo map;
+ guint32 dataSizeLow, dataSizeHigh;
+ guint32 sampleCountLow, sampleCountHigh;
+
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+ dataSizeLow = GST_READ_UINT32_LE (map.data + 2 * 4);
+ dataSizeHigh = GST_READ_UINT32_LE (map.data + 3 * 4);
+ sampleCountLow = GST_READ_UINT32_LE (map.data + 4 * 4);
+ sampleCountHigh = GST_READ_UINT32_LE (map.data + 5 * 4);
+ gst_buffer_unmap (buf, &map);
+ if (dataSizeHigh != 0xFFFFFFFF && dataSizeLow != 0xFFFFFFFF) {
+ wav->datasize = ((guint64) dataSizeHigh << 32) | dataSizeLow;
+ }
+ if (sampleCountHigh != 0xFFFFFFFF && sampleCountLow != 0xFFFFFFFF) {
+ wav->fact = ((guint64) sampleCountHigh << 32) | sampleCountLow;
+ }
+
+ GST_DEBUG_OBJECT (wav, "Got 'ds64' TAG, datasize : %" G_GINT64_FORMAT
+ " fact: %" G_GINT64_FORMAT, wav->datasize, wav->fact);
+ return TRUE;
+ }
+
+ static GstFlowReturn
+ gst_wavparse_stream_headers (GstWavParse * wav)
+ {
+ GstFlowReturn res = GST_FLOW_OK;
+ GstBuffer *buf = NULL;
+ gst_riff_strf_auds *header = NULL;
+ guint32 tag, size;
+ gboolean gotdata = FALSE;
+ GstCaps *caps = NULL;
+ gchar *codec_name = NULL;
+ gint64 upstream_size = 0;
+ GstStructure *s;
+
+ /* search for "_fmt" chunk, which must be before "data" */
+ while (!wav->got_fmt) {
+ GstBuffer *extra;
+
+ if (wav->streaming) {
+ if (!gst_wavparse_peek_chunk (wav, &tag, &size))
+ return res;
+
+ gst_adapter_flush (wav->adapter, 8);
+ wav->offset += 8;
+
+ if (size) {
+ buf = gst_adapter_take_buffer (wav->adapter, size);
+ if (size & 1)
+ gst_adapter_flush (wav->adapter, 1);
+ wav->offset += GST_ROUND_UP_2 (size);
+ } else {
+ buf = gst_buffer_new ();
+ }
+ } else {
+ if ((res = gst_riff_read_chunk (GST_ELEMENT_CAST (wav), wav->sinkpad,
+ &wav->offset, &tag, &buf)) != GST_FLOW_OK)
+ return res;
+ }
+
+ if (tag == GST_RS64_TAG_DS64) {
+ if (!parse_ds64 (wav, buf))
+ goto fail;
+ else
+ continue;
+ }
+
+ if (tag != GST_RIFF_TAG_fmt) {
+ GST_DEBUG_OBJECT (wav, "skipping %" GST_FOURCC_FORMAT " chunk",
+ GST_FOURCC_ARGS (tag));
+ gst_buffer_unref (buf);
+ buf = NULL;
+ continue;
+ }
+
+ if (!(gst_riff_parse_strf_auds (GST_ELEMENT_CAST (wav), buf, &header,
+ &extra)))
+ goto parse_header_error;
+
+ buf = NULL; /* parse_strf_auds() took ownership of buffer */
+
+ /* do sanity checks of header fields */
+ if (header->channels == 0)
+ goto no_channels;
+ if (header->rate == 0)
+ goto no_rate;
+
+ GST_DEBUG_OBJECT (wav, "creating the caps");
+
+ /* Note: gst_riff_create_audio_caps might need to fix values in
+ * the header header depending on the format, so call it first */
+ /* FIXME: Need to handle the channel reorder map */
+ caps = gst_riff_create_audio_caps (header->format, NULL, header, extra,
+ NULL, &codec_name, NULL);
+
+ if (extra)
+ gst_buffer_unref (extra);
+
+ if (!caps)
+ goto unknown_format;
+
+ /* If we got raw audio from upstream, we remove the codec_data field,
+ * which may have been added if the wav header included an extended
+ * chunk. We want to keep it for non raw audio.
+ */
+ s = gst_caps_get_structure (caps, 0);
+ if (s && gst_structure_has_name (s, "audio/x-raw")) {
+ gst_structure_remove_field (s, "codec_data");
+ }
+
+ /* do more sanity checks of header fields
+ * (these can be sanitized by gst_riff_create_audio_caps()
+ */
+ wav->format = header->format;
+ wav->rate = header->rate;
+ wav->channels = header->channels;
+ wav->blockalign = header->blockalign;
+ wav->depth = header->bits_per_sample;
+ wav->av_bps = header->av_bps;
+ wav->vbr = FALSE;
+
+ g_free (header);
+ header = NULL;
+
+ /* do format specific handling */
+ switch (wav->format) {
+ case GST_RIFF_WAVE_FORMAT_MPEGL12:
+ case GST_RIFF_WAVE_FORMAT_MPEGL3:
+ {
+ /* Note: workaround for mp2/mp3 embedded in wav, that relies on the
+ * bitrate inside the mpeg stream */
+ GST_INFO ("resetting bps from %u to 0 for mp2/3", wav->av_bps);
+ wav->bps = 0;
+ break;
+ }
+ case GST_RIFF_WAVE_FORMAT_PCM:
+ if (wav->blockalign > wav->channels * ((wav->depth + 7) / 8))
+ goto invalid_blockalign;
+ /* fall through */
+ default:
+ if (wav->av_bps > wav->blockalign * wav->rate)
+ goto invalid_bps;
+ /* use the configured bps */
+ wav->bps = wav->av_bps;
+ break;
+ }
+
+ wav->width = (wav->blockalign * 8) / wav->channels;
+ wav->bytes_per_sample = wav->channels * wav->width / 8;
+
+ if (wav->bytes_per_sample <= 0)
+ goto no_bytes_per_sample;
+
+ GST_DEBUG_OBJECT (wav, "blockalign = %u", (guint) wav->blockalign);
+ GST_DEBUG_OBJECT (wav, "width = %u", (guint) wav->width);
+ GST_DEBUG_OBJECT (wav, "depth = %u", (guint) wav->depth);
+ GST_DEBUG_OBJECT (wav, "av_bps = %u", (guint) wav->av_bps);
+ GST_DEBUG_OBJECT (wav, "frequency = %u", (guint) wav->rate);
+ GST_DEBUG_OBJECT (wav, "channels = %u", (guint) wav->channels);
+ GST_DEBUG_OBJECT (wav, "bytes_per_sample = %u", wav->bytes_per_sample);
+
+ /* bps can be 0 when we don't have a valid bitrate (mostly for compressed
+ * formats). This will make the element output a BYTE format segment and
+ * will not timestamp the outgoing buffers.
+ */
+ GST_DEBUG_OBJECT (wav, "bps = %u", (guint) wav->bps);
+
+ GST_DEBUG_OBJECT (wav, "caps = %" GST_PTR_FORMAT, caps);
+
+ /* create pad later so we can sniff the first few bytes
+ * of the real data and correct our caps if necessary */
+ gst_caps_replace (&wav->caps, caps);
+ gst_caps_replace (&caps, NULL);
+
+ wav->got_fmt = TRUE;
+
+ if (wav->tags == NULL)
+ wav->tags = gst_tag_list_new_empty ();
+
+ {
+ GstCaps *templ_caps = gst_pad_get_pad_template_caps (wav->sinkpad);
+ gst_pb_utils_add_codec_description_to_tag_list (wav->tags,
+ GST_TAG_CONTAINER_FORMAT, templ_caps);
+ gst_caps_unref (templ_caps);
+ }
+
+ /* If bps is nonzero, then we do have a valid bitrate that can be
+ * announced in a tag list. */
+ if (wav->bps) {
+ guint bitrate = wav->bps * 8;
+ gst_tag_list_add (wav->tags, GST_TAG_MERGE_REPLACE,
+ GST_TAG_BITRATE, bitrate, NULL);
+ }
+
+ if (codec_name) {
+ gst_tag_list_add (wav->tags, GST_TAG_MERGE_REPLACE,
+ GST_TAG_AUDIO_CODEC, codec_name, NULL);
+
+ g_free (codec_name);
+ codec_name = NULL;
+ }
+
+ }
+
+ gst_pad_peer_query_duration (wav->sinkpad, GST_FORMAT_BYTES, &upstream_size);
+ GST_DEBUG_OBJECT (wav, "upstream size %" G_GUINT64_FORMAT, upstream_size);
+
+ /* loop headers until we get data */
+ while (!gotdata) {
+ if (wav->streaming) {
+ if (!gst_wavparse_peek_chunk_info (wav, &tag, &size))
+ goto exit;
+ } else {
+ GstMapInfo map;
+
+ buf = NULL;
+ if ((res =
+ gst_pad_pull_range (wav->sinkpad, wav->offset, 8,
+ &buf)) != GST_FLOW_OK)
+ goto header_read_error;
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+ tag = GST_READ_UINT32_LE (map.data);
+ size = GST_READ_UINT32_LE (map.data + 4);
+ gst_buffer_unmap (buf, &map);
+ }
+
+ GST_INFO_OBJECT (wav,
+ "Got TAG: %" GST_FOURCC_FORMAT ", offset %" G_GUINT64_FORMAT ", size %"
+ G_GUINT32_FORMAT, GST_FOURCC_ARGS (tag), wav->offset, size);
+
+ /* Maximum valid size is INT_MAX */
+ if (size & 0x80000000) {
+ GST_WARNING_OBJECT (wav, "Invalid size, clipping to 0x7fffffff");
+ size = 0x7fffffff;
+ }
+
+ /* Clip to upstream size if known */
+ if (upstream_size > 0 && size + wav->offset > upstream_size) {
+ GST_WARNING_OBJECT (wav, "Clipping chunk size to file size");
+ g_assert (upstream_size >= wav->offset);
+ size = upstream_size - wav->offset;
+ }
+
+ /* wav is a st00pid format, we don't know for sure where data starts.
+ * So we have to go bit by bit until we find the 'data' header
+ */
+ switch (tag) {
+ case GST_RIFF_TAG_data:{
+ guint64 size64;
+
+ GST_DEBUG_OBJECT (wav, "Got 'data' TAG, size : %u", size);
+ size64 = size;
+ if (wav->ignore_length) {
+ GST_DEBUG_OBJECT (wav, "Ignoring length");
+ size64 = 0;
+ }
+ if (wav->streaming) {
+ gst_adapter_flush (wav->adapter, 8);
+ gotdata = TRUE;
+ } else {
+ gst_buffer_unref (buf);
+ }
+ wav->offset += 8;
+ wav->datastart = wav->offset;
+ /* use size from ds64 chunk if available */
+ if (size64 == -1 && wav->datasize > 0) {
+ GST_DEBUG_OBJECT (wav, "Using ds64 datasize");
+ size64 = wav->datasize;
+ }
+ wav->chunk_size = size64;
+
+ /* If size is zero, then the data chunk probably actually extends to
+ the end of the file */
+ if (size64 == 0 && upstream_size) {
+ size64 = upstream_size - wav->datastart;
+ }
+ /* Or the file might be truncated */
+ else if (upstream_size) {
+ size64 = MIN (size64, (upstream_size - wav->datastart));
+ }
+ wav->datasize = size64;
+ wav->dataleft = size64;
+ wav->end_offset = size64 + wav->datastart;
+ if (!wav->streaming) {
+ /* We will continue parsing tags 'till end */
+ wav->offset += size64;
+ }
+ GST_DEBUG_OBJECT (wav, "datasize = %" G_GUINT64_FORMAT, size64);
+ break;
+ }
+ case GST_RIFF_TAG_fact:{
+ if (wav->fact == 0 &&
+ wav->format != GST_RIFF_WAVE_FORMAT_MPEGL12 &&
+ wav->format != GST_RIFF_WAVE_FORMAT_MPEGL3) {
+ const guint data_size = 4;
+
+ GST_INFO_OBJECT (wav, "Have fact chunk");
+ if (size < data_size) {
+ if (!gst_waveparse_ignore_chunk (wav, buf, tag, size)) {
+ /* need more data */
+ goto exit;
+ }
+ GST_DEBUG_OBJECT (wav, "need %u, available %u; ignoring chunk",
+ data_size, size);
+ break;
+ }
+ /* number of samples (for compressed formats) */
+ if (wav->streaming) {
+ const guint8 *data = NULL;
+
+ if (!gst_wavparse_peek_chunk (wav, &tag, &size)) {
+ goto exit;
+ }
+ gst_adapter_flush (wav->adapter, 8);
+ data = gst_adapter_map (wav->adapter, data_size);
+ wav->fact = GST_READ_UINT32_LE (data);
+ gst_adapter_unmap (wav->adapter);
+ gst_adapter_flush (wav->adapter, GST_ROUND_UP_2 (size));
+ } else {
+ gst_buffer_unref (buf);
+ buf = NULL;
+ if ((res =
+ gst_pad_pull_range (wav->sinkpad, wav->offset + 8,
+ data_size, &buf)) != GST_FLOW_OK)
+ goto header_read_error;
+ gst_buffer_extract (buf, 0, &wav->fact, 4);
+ wav->fact = GUINT32_FROM_LE (wav->fact);
+ gst_buffer_unref (buf);
+ }
+ GST_DEBUG_OBJECT (wav, "have fact %" G_GUINT64_FORMAT, wav->fact);
+ wav->offset += 8 + GST_ROUND_UP_2 (size);
+ break;
+ } else {
+ if (!gst_waveparse_ignore_chunk (wav, buf, tag, size)) {
+ /* need more data */
+ goto exit;
+ }
+ }
+ break;
+ }
+ case GST_RIFF_TAG_acid:{
+ const gst_riff_acid *acid = NULL;
+ const guint data_size = sizeof (gst_riff_acid);
+ gfloat tempo;
++#ifdef TIZEN_FEATURE_WAVPARSE_MODIFICATION
++ const guint8 *data = NULL;
++#endif
+
+ GST_INFO_OBJECT (wav, "Have acid chunk");
+ if (size < data_size) {
+ if (!gst_waveparse_ignore_chunk (wav, buf, tag, size)) {
+ /* need more data */
+ goto exit;
+ }
+ GST_DEBUG_OBJECT (wav, "need %u, available %u; ignoring chunk",
+ data_size, size);
+ break;
+ }
+ if (wav->streaming) {
+ if (!gst_wavparse_peek_chunk (wav, &tag, &size)) {
+ goto exit;
+ }
+ gst_adapter_flush (wav->adapter, 8);
++#ifdef TIZEN_FEATURE_WAVPARSE_MODIFICATION
++ if (gst_adapter_available (wav->adapter) < 24) {
++ goto exit;
++ }
++ data = gst_adapter_map (wav->adapter, 24);
++ tempo = GST_READ_FLOAT_LE (data + 20);
++#else
+ acid = (const gst_riff_acid *) gst_adapter_map (wav->adapter,
+ data_size);
+ tempo = acid->tempo;
++#endif
+ gst_adapter_unmap (wav->adapter);
+ } else {
+ GstMapInfo map;
+ gst_buffer_unref (buf);
+ buf = NULL;
+ if ((res =
+ gst_pad_pull_range (wav->sinkpad, wav->offset + 8,
+ size, &buf)) != GST_FLOW_OK)
+ goto header_read_error;
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+ acid = (const gst_riff_acid *) map.data;
+ tempo = acid->tempo;
+ gst_buffer_unmap (buf, &map);
+ }
+ /* send data as tags */
+ if (!wav->tags)
+ wav->tags = gst_tag_list_new_empty ();
+ gst_tag_list_add (wav->tags, GST_TAG_MERGE_REPLACE,
+ GST_TAG_BEATS_PER_MINUTE, tempo, NULL);
+
+ size = GST_ROUND_UP_2 (size);
+ if (wav->streaming) {
+ gst_adapter_flush (wav->adapter, size);
+ } else {
+ gst_buffer_unref (buf);
+ }
+ wav->offset += 8 + size;
+ break;
+ }
+ /* FIXME: all list tags after data are ignored in streaming mode */
+ case GST_RIFF_TAG_LIST:{
+ guint32 ltag;
+
+ if (wav->streaming) {
+ const guint8 *data = NULL;
+
+ if (gst_adapter_available (wav->adapter) < 12) {
+ goto exit;
+ }
+ data = gst_adapter_map (wav->adapter, 12);
+ ltag = GST_READ_UINT32_LE (data + 8);
+ gst_adapter_unmap (wav->adapter);
+ } else {
+ gst_buffer_unref (buf);
+ buf = NULL;
+ if ((res =
+ gst_pad_pull_range (wav->sinkpad, wav->offset, 12,
+ &buf)) != GST_FLOW_OK)
+ goto header_read_error;
+ gst_buffer_extract (buf, 8, <ag, 4);
+ ltag = GUINT32_FROM_LE (ltag);
+ }
+ switch (ltag) {
+ case GST_RIFF_LIST_INFO:{
+ const gint data_size = size - 4;
+ GstTagList *new;
+
+ GST_INFO_OBJECT (wav, "Have LIST chunk INFO size %u", data_size);
+ if (wav->streaming) {
+ if (!gst_wavparse_peek_chunk (wav, &tag, &size)) {
+ goto exit;
+ }
+ gst_adapter_flush (wav->adapter, 12);
+ wav->offset += 12;
+ if (data_size > 0) {
+ buf = gst_adapter_take_buffer (wav->adapter, data_size);
+ if (data_size & 1)
+ gst_adapter_flush (wav->adapter, 1);
+ }
+ } else {
+ wav->offset += 12;
+ gst_buffer_unref (buf);
+ buf = NULL;
+ if (data_size > 0) {
+ if ((res =
+ gst_pad_pull_range (wav->sinkpad, wav->offset,
+ data_size, &buf)) != GST_FLOW_OK)
+ goto header_read_error;
+ }
+ }
+ if (data_size > 0) {
+ /* parse tags */
+ gst_riff_parse_info (GST_ELEMENT (wav), buf, &new);
+ if (new) {
+ GstTagList *old = wav->tags;
+ wav->tags =
+ gst_tag_list_merge (old, new, GST_TAG_MERGE_REPLACE);
+ if (old)
+ gst_tag_list_unref (old);
+ gst_tag_list_unref (new);
+ }
+ gst_buffer_unref (buf);
+ wav->offset += GST_ROUND_UP_2 (data_size);
+ }
+ break;
+ }
+ case GST_RIFF_LIST_adtl:{
+ const gint data_size = size - 4;
+
+ GST_INFO_OBJECT (wav, "Have 'adtl' LIST, size %u", data_size);
+ if (wav->streaming) {
+ const guint8 *data = NULL;
+
+ gst_adapter_flush (wav->adapter, 12);
+ wav->offset += 12;
+ data = gst_adapter_map (wav->adapter, data_size);
+ gst_wavparse_adtl_chunk (wav, data, data_size);
+ gst_adapter_unmap (wav->adapter);
+ } else {
+ GstMapInfo map;
+
+ gst_buffer_unref (buf);
+ buf = NULL;
+ wav->offset += 12;
+ if ((res =
+ gst_pad_pull_range (wav->sinkpad, wav->offset,
+ data_size, &buf)) != GST_FLOW_OK)
+ goto header_read_error;
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+ gst_wavparse_adtl_chunk (wav, (const guint8 *) map.data,
+ data_size);
+ gst_buffer_unmap (buf, &map);
+ }
+ wav->offset += GST_ROUND_UP_2 (data_size);
+ break;
+ }
+ default:
+ GST_WARNING_OBJECT (wav, "Ignoring LIST chunk %" GST_FOURCC_FORMAT,
+ GST_FOURCC_ARGS (ltag));
+ if (!gst_waveparse_ignore_chunk (wav, buf, tag, size))
+ /* need more data */
+ goto exit;
+ break;
+ }
+ break;
+ }
+ case GST_RIFF_TAG_cue:{
+ const guint data_size = size;
+
+ GST_DEBUG_OBJECT (wav, "Have 'cue' TAG, size : %u", data_size);
+ if (wav->streaming) {
+ const guint8 *data = NULL;
+
+ if (!gst_wavparse_peek_chunk (wav, &tag, &size)) {
+ goto exit;
+ }
+ gst_adapter_flush (wav->adapter, 8);
+ wav->offset += 8;
+ data = gst_adapter_map (wav->adapter, data_size);
+ if (!gst_wavparse_cue_chunk (wav, data, data_size)) {
+ goto header_read_error;
+ }
+ gst_adapter_unmap (wav->adapter);
+ } else {
+ GstMapInfo map;
+
+ wav->offset += 8;
+ gst_buffer_unref (buf);
+ buf = NULL;
+ if ((res =
+ gst_pad_pull_range (wav->sinkpad, wav->offset,
+ data_size, &buf)) != GST_FLOW_OK)
+ goto header_read_error;
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+ if (!gst_wavparse_cue_chunk (wav, (const guint8 *) map.data,
+ data_size)) {
+ goto header_read_error;
+ }
+ gst_buffer_unmap (buf, &map);
+ }
+ size = GST_ROUND_UP_2 (size);
+ if (wav->streaming) {
+ gst_adapter_flush (wav->adapter, size);
+ } else {
+ gst_buffer_unref (buf);
+ }
+ size = GST_ROUND_UP_2 (size);
+ wav->offset += size;
+ break;
+ }
+ case GST_RIFF_TAG_smpl:{
+ const gint data_size = size;
+
+ GST_DEBUG_OBJECT (wav, "Have 'smpl' TAG, size : %u", data_size);
+ if (wav->streaming) {
+ const guint8 *data = NULL;
+
+ if (!gst_wavparse_peek_chunk (wav, &tag, &size)) {
+ goto exit;
+ }
+ gst_adapter_flush (wav->adapter, 8);
+ wav->offset += 8;
+ data = gst_adapter_map (wav->adapter, data_size);
+ if (!gst_wavparse_smpl_chunk (wav, data, data_size)) {
+ goto header_read_error;
+ }
+ gst_adapter_unmap (wav->adapter);
+ } else {
+ GstMapInfo map;
+
+ wav->offset += 8;
+ gst_buffer_unref (buf);
+ buf = NULL;
+ if ((res =
+ gst_pad_pull_range (wav->sinkpad, wav->offset,
+ data_size, &buf)) != GST_FLOW_OK)
+ goto header_read_error;
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+ if (!gst_wavparse_smpl_chunk (wav, (const guint8 *) map.data,
+ data_size)) {
+ goto header_read_error;
+ }
+ gst_buffer_unmap (buf, &map);
+ }
+ size = GST_ROUND_UP_2 (size);
+ if (wav->streaming) {
+ gst_adapter_flush (wav->adapter, size);
+ } else {
+ gst_buffer_unref (buf);
+ }
+ size = GST_ROUND_UP_2 (size);
+ wav->offset += size;
+ break;
+ }
+ default:
+ GST_WARNING_OBJECT (wav, "Ignoring chunk %" GST_FOURCC_FORMAT,
+ GST_FOURCC_ARGS (tag));
+ if (!gst_waveparse_ignore_chunk (wav, buf, tag, size))
+ /* need more data */
+ goto exit;
+ break;
+ }
+
+ if (upstream_size && (wav->offset >= upstream_size)) {
+ /* Now we are gone through the whole file */
+ gotdata = TRUE;
+ }
+ }
+
+ GST_DEBUG_OBJECT (wav, "Finished parsing headers");
+
+ if (wav->bps <= 0 && wav->fact) {
+ #if 0
+ /* not a good idea, as for embedded mp2/mp3 we set bps to 0 earlier */
+ wav->bps =
+ (guint32) gst_util_uint64_scale ((guint64) wav->rate, wav->datasize,
+ (guint64) wav->fact);
+ GST_INFO_OBJECT (wav, "calculated bps : %u, enabling VBR", wav->bps);
+ #endif
+ wav->vbr = TRUE;
+ }
+
+ if (gst_wavparse_calculate_duration (wav)) {
+ gst_segment_init (&wav->segment, GST_FORMAT_TIME);
+ if (!wav->ignore_length)
+ wav->segment.duration = wav->duration;
+ if (!wav->toc)
+ gst_wavparse_create_toc (wav);
+ } else {
+ /* no bitrate, let downstream peer do the math, we'll feed it bytes. */
+ gst_segment_init (&wav->segment, GST_FORMAT_BYTES);
+ if (!wav->ignore_length)
+ wav->segment.duration = wav->datasize;
+ }
+
+ /* now we have all the info to perform a pending seek if any, if no
+ * event, this will still do the right thing and it will also send
+ * the right newsegment event downstream. */
+ gst_wavparse_perform_seek (wav, wav->seek_event);
+ /* remove pending event */
+ gst_event_replace (&wav->seek_event, NULL);
+
+ /* we just started, we are discont */
+ wav->discont = TRUE;
+
+ wav->state = GST_WAVPARSE_DATA;
+
+ /* determine reasonable max buffer size,
+ * that is, buffers not too small either size or time wise
+ * so we do not end up with too many of them */
+ /* var abuse */
+ if (gst_wavparse_time_to_bytepos (wav, 40 * GST_MSECOND, &upstream_size))
+ wav->max_buf_size = upstream_size;
+ else
+ wav->max_buf_size = 0;
+ wav->max_buf_size = MAX (wav->max_buf_size, MAX_BUFFER_SIZE);
+ if (wav->blockalign > 0)
+ wav->max_buf_size -= (wav->max_buf_size % wav->blockalign);
+
+ GST_DEBUG_OBJECT (wav, "max buffer size %u", wav->max_buf_size);
+
+ return GST_FLOW_OK;
+
+ /* ERROR */
+ exit:
+ {
+ g_free (codec_name);
+ g_free (header);
+ if (caps)
+ gst_caps_unref (caps);
+ return res;
+ }
+ fail:
+ {
+ res = GST_FLOW_ERROR;
+ goto exit;
+ }
+ parse_header_error:
+ {
+ GST_ELEMENT_ERROR (wav, STREAM, DEMUX, (NULL),
+ ("Couldn't parse audio header"));
+ goto fail;
+ }
+ no_channels:
+ {
+ GST_ELEMENT_ERROR (wav, STREAM, FAILED, (NULL),
+ ("Stream claims to contain no channels - invalid data"));
+ goto fail;
+ }
+ no_rate:
+ {
+ GST_ELEMENT_ERROR (wav, STREAM, FAILED, (NULL),
+ ("Stream with sample_rate == 0 - invalid data"));
+ goto fail;
+ }
+ invalid_blockalign:
+ {
+ GST_ELEMENT_ERROR (wav, STREAM, FAILED, (NULL),
+ ("Stream claims blockalign = %u, which is more than %u - invalid data",
+ wav->blockalign, wav->channels * ((wav->depth + 7) / 8)));
+ goto fail;
+ }
+ invalid_bps:
+ {
+ GST_ELEMENT_ERROR (wav, STREAM, FAILED, (NULL),
+ ("Stream claims av_bsp = %u, which is more than %u - invalid data",
+ wav->av_bps, wav->blockalign * wav->rate));
+ goto fail;
+ }
+ no_bytes_per_sample:
+ {
+ GST_ELEMENT_ERROR (wav, STREAM, FAILED, (NULL),
+ ("Could not calculate bytes per sample - invalid data"));
+ goto fail;
+ }
+ unknown_format:
+ {
+ GST_ELEMENT_ERROR (wav, STREAM, TYPE_NOT_FOUND, (NULL),
+ ("No caps found for format 0x%x, %u channels, %u Hz",
+ wav->format, wav->channels, wav->rate));
+ goto fail;
+ }
+ header_read_error:
+ {
+ GST_ELEMENT_ERROR (wav, STREAM, DEMUX, (NULL),
+ ("Couldn't read in header %d (%s)", res, gst_flow_get_name (res)));
+ goto fail;
+ }
+ }
+
+ /*
+ * Read WAV file tag when streaming
+ */
+ static GstFlowReturn
+ gst_wavparse_parse_stream_init (GstWavParse * wav)
+ {
+ if (gst_adapter_available (wav->adapter) >= 12) {
+ GstBuffer *tmp;
+
+ /* _take flushes the data */
+ tmp = gst_adapter_take_buffer (wav->adapter, 12);
+
+ GST_DEBUG ("Parsing wav header");
+ if (!gst_wavparse_parse_file_header (GST_ELEMENT_CAST (wav), tmp))
+ return GST_FLOW_ERROR;
+
+ wav->offset += 12;
+ /* Go to next state */
+ wav->state = GST_WAVPARSE_HEADER;
+ }
+ return GST_FLOW_OK;
+ }
+
+ /* handle an event sent directly to the element.
+ *
+ * This event can be sent either in the READY state or the
+ * >READY state. The only event of interest really is the seek
+ * event.
+ *
+ * In the READY state we can only store the event and try to
+ * respect it when going to PAUSED. We assume we are in the
+ * READY state when our parsing state != GST_WAVPARSE_DATA.
+ *
+ * When we are steaming, we can simply perform the seek right
+ * away.
+ */
+ static gboolean
+ gst_wavparse_send_event (GstElement * element, GstEvent * event)
+ {
+ GstWavParse *wav = GST_WAVPARSE (element);
+ gboolean res = FALSE;
+
+ GST_DEBUG_OBJECT (wav, "received event %s", GST_EVENT_TYPE_NAME (event));
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_SEEK:
+ if (wav->state == GST_WAVPARSE_DATA) {
+ /* we can handle the seek directly when streaming data */
+ res = gst_wavparse_perform_seek (wav, event);
+ } else {
+ GST_DEBUG_OBJECT (wav, "queuing seek for later");
+
+ gst_event_replace (&wav->seek_event, event);
+
+ /* we always return true */
+ res = TRUE;
+ }
+ break;
+ default:
+ break;
+ }
+ gst_event_unref (event);
+ return res;
+ }
+
+ static gboolean
+ gst_wavparse_have_dts_caps (const GstCaps * caps, GstTypeFindProbability prob)
+ {
+ GstStructure *s;
+
+ s = gst_caps_get_structure (caps, 0);
+ if (!gst_structure_has_name (s, "audio/x-dts"))
+ return FALSE;
+ /* typefind behavior for DTS:
+ * MAXIMUM: multiple frame syncs detected, certainly DTS
+ * LIKELY: single frame sync at offset 0. Maybe DTS?
+ * POSSIBLE: single frame sync, not at offset 0. Highly unlikely
+ * to be DTS. */
+ if (prob > GST_TYPE_FIND_LIKELY)
+ return TRUE;
+ if (prob <= GST_TYPE_FIND_POSSIBLE)
+ return FALSE;
+ /* for maybe, check for at least a valid-looking rate and channels */
+ if (!gst_structure_has_field (s, "channels"))
+ return FALSE;
+ /* and for extra assurance we could also check the rate from the DTS frame
+ * against the one in the wav header, but for now let's not do that */
+ return gst_structure_has_field (s, "rate");
+ }
+
+ static GstTagList *
+ gst_wavparse_get_upstream_tags (GstWavParse * wav, GstTagScope scope)
+ {
+ GstTagList *tags = NULL;
+ GstEvent *ev;
+ gint i;
+
+ i = 0;
+ while ((ev = gst_pad_get_sticky_event (wav->sinkpad, GST_EVENT_TAG, i++))) {
+ gst_event_parse_tag (ev, &tags);
+ if (tags != NULL && gst_tag_list_get_scope (tags) == scope) {
+ tags = gst_tag_list_copy (tags);
+ gst_tag_list_remove_tag (tags, GST_TAG_CONTAINER_FORMAT);
+ gst_event_unref (ev);
+ break;
+ }
+ tags = NULL;
+ gst_event_unref (ev);
+ }
+ return tags;
+ }
+
+ static void
+ gst_wavparse_add_src_pad (GstWavParse * wav, GstBuffer * buf)
+ {
+ GstStructure *s;
+ GstTagList *tags, *utags;
+
+ GST_DEBUG_OBJECT (wav, "adding src pad");
+
+ g_assert (wav->caps != NULL);
+
+ s = gst_caps_get_structure (wav->caps, 0);
+ if (s && gst_structure_has_name (s, "audio/x-raw") && buf != NULL
+ && (GST_BUFFER_OFFSET (buf) == 0 || !GST_BUFFER_OFFSET_IS_VALID (buf))) {
+ GstTypeFindProbability prob;
+ GstCaps *tf_caps;
+
+ tf_caps = gst_type_find_helper_for_buffer (GST_OBJECT (wav), buf, &prob);
+ if (tf_caps != NULL) {
+ GST_LOG ("typefind caps = %" GST_PTR_FORMAT ", P=%d", tf_caps, prob);
+ if (gst_wavparse_have_dts_caps (tf_caps, prob)) {
+ GST_INFO_OBJECT (wav, "Found DTS marker in file marked as raw PCM");
+ gst_caps_unref (wav->caps);
+ wav->caps = tf_caps;
+
+ gst_tag_list_add (wav->tags, GST_TAG_MERGE_REPLACE,
+ GST_TAG_AUDIO_CODEC, "dts", NULL);
+ } else {
+ GST_DEBUG_OBJECT (wav, "found caps %" GST_PTR_FORMAT " for stream "
+ "marked as raw PCM audio, but ignoring for now", tf_caps);
+ gst_caps_unref (tf_caps);
+ }
+ }
+ }
+
+ gst_pad_set_caps (wav->srcpad, wav->caps);
+
+ if (wav->start_segment) {
+ GST_DEBUG_OBJECT (wav, "Send start segment event on newpad");
+ gst_pad_push_event (wav->srcpad, wav->start_segment);
+ wav->start_segment = NULL;
+ }
+
+ /* upstream tags, e.g. from id3/ape tag before the wav file; assume for now
+ * that there'll be only one scope/type of tag list from upstream, if any */
+ utags = gst_wavparse_get_upstream_tags (wav, GST_TAG_SCOPE_GLOBAL);
+ if (utags == NULL)
+ utags = gst_wavparse_get_upstream_tags (wav, GST_TAG_SCOPE_STREAM);
+
+ /* if there's a tag upstream it's probably been added to override the
+ * tags from inside the wav header, so keep upstream tags if in doubt */
+ tags = gst_tag_list_merge (utags, wav->tags, GST_TAG_MERGE_KEEP);
+
+ if (wav->tags != NULL) {
+ gst_tag_list_unref (wav->tags);
+ wav->tags = NULL;
+ }
+
+ if (utags != NULL)
+ gst_tag_list_unref (utags);
+
+ /* send tags downstream, if any */
+ if (tags != NULL)
+ gst_pad_push_event (wav->srcpad, gst_event_new_tag (tags));
+ }
+
+ static GstFlowReturn
+ gst_wavparse_stream_data (GstWavParse * wav, gboolean flushing)
+ {
+ GstBuffer *buf = NULL;
+ GstFlowReturn res = GST_FLOW_OK;
+ guint64 desired, obtained;
+ GstClockTime timestamp, next_timestamp, duration;
+ guint64 pos, nextpos;
+
+ iterate_adapter:
+ GST_LOG_OBJECT (wav,
+ "offset: %" G_GINT64_FORMAT " , end: %" G_GINT64_FORMAT " , dataleft: %"
+ G_GINT64_FORMAT, wav->offset, wav->end_offset, wav->dataleft);
+
+ if ((wav->dataleft == 0 || wav->dataleft < wav->blockalign)) {
+ /* In case chunk size is not declared in the beginning get size from the
+ * file size directly */
+ if (wav->chunk_size == 0) {
+ gint64 upstream_size = 0;
+
+ /* Get the size of the file */
+ if (!gst_pad_peer_query_duration (wav->sinkpad, GST_FORMAT_BYTES,
+ &upstream_size))
+ goto found_eos;
+
+ if (upstream_size < wav->offset + wav->datastart)
+ goto found_eos;
+
+ /* If file has updated since the beginning continue reading the file */
+ wav->dataleft = upstream_size - wav->offset - wav->datastart;
+ wav->end_offset = upstream_size;
+
+ /* Get the next n bytes and output them, if we can */
+ if (wav->dataleft == 0 || wav->dataleft < wav->blockalign)
+ goto found_eos;
+ } else {
+ goto found_eos;
+ }
+ }
+
+ /* scale the amount of data by the segment rate so we get equal
+ * amounts of data regardless of the playback rate */
+ desired =
+ MIN (gst_guint64_to_gdouble (wav->dataleft),
+ wav->max_buf_size * ABS (wav->segment.rate));
+
+ if (desired >= wav->blockalign && wav->blockalign > 0)
+ desired -= (desired % wav->blockalign);
+
+ GST_LOG_OBJECT (wav, "Fetching %" G_GINT64_FORMAT " bytes of data "
+ "from the sinkpad", desired);
+
+ if (wav->streaming) {
+ guint avail = gst_adapter_available (wav->adapter);
+ guint extra;
+
+ /* flush some bytes if evil upstream sends segment that starts
+ * before data or does is not send sample aligned segment */
+ if (G_LIKELY (wav->offset >= wav->datastart)) {
+ extra = (wav->offset - wav->datastart) % wav->bytes_per_sample;
+ } else {
+ extra = wav->datastart - wav->offset;
+ }
+
+ if (G_UNLIKELY (extra)) {
+ extra = wav->bytes_per_sample - extra;
+ if (extra <= avail) {
+ GST_DEBUG_OBJECT (wav, "flushing %u bytes to sample boundary", extra);
+ gst_adapter_flush (wav->adapter, extra);
+ wav->offset += extra;
+ wav->dataleft -= extra;
+ goto iterate_adapter;
+ } else {
+ GST_DEBUG_OBJECT (wav, "flushing %u bytes", avail);
+ gst_adapter_clear (wav->adapter);
+ wav->offset += avail;
+ wav->dataleft -= avail;
+ return GST_FLOW_OK;
+ }
+ }
+
+ if (avail < desired) {
+ GST_LOG_OBJECT (wav, "Got only %u bytes of data from the sinkpad", avail);
+
+ /* If we are at the end of the stream, we need to flush whatever we have left */
+ if (avail > 0 && flushing) {
+ if (avail >= wav->blockalign && wav->blockalign > 0) {
+ avail -= (avail % wav->blockalign);
+ buf = gst_adapter_take_buffer (wav->adapter, avail);
+ } else {
+ return GST_FLOW_OK;
+ }
+ } else {
+ return GST_FLOW_OK;
+ }
+ } else {
+ buf = gst_adapter_take_buffer (wav->adapter, desired);
+ }
+ } else {
+ if ((res = gst_pad_pull_range (wav->sinkpad, wav->offset,
+ desired, &buf)) != GST_FLOW_OK)
+ goto pull_error;
+
+ /* we may get a short buffer at the end of the file */
+ if (gst_buffer_get_size (buf) < desired) {
+ gsize size = gst_buffer_get_size (buf);
+
+ GST_LOG_OBJECT (wav, "Got only %" G_GSIZE_FORMAT " bytes of data", size);
+ if (size >= wav->blockalign) {
+ if (wav->blockalign > 0) {
+ buf = gst_buffer_make_writable (buf);
+ gst_buffer_resize (buf, 0, size - (size % wav->blockalign));
+ }
+ } else {
+ gst_buffer_unref (buf);
+ goto found_eos;
+ }
+ }
+ }
+
+ obtained = gst_buffer_get_size (buf);
+
+ /* our positions in bytes */
+ pos = wav->offset - wav->datastart;
+ nextpos = pos + obtained;
+
+ /* update offsets, does not overflow. */
+ buf = gst_buffer_make_writable (buf);
+ GST_BUFFER_OFFSET (buf) = pos / wav->bytes_per_sample;
+ GST_BUFFER_OFFSET_END (buf) = nextpos / wav->bytes_per_sample;
+
+ /* first chunk of data? create the source pad. We do this only here so
+ * we can detect broken .wav files with dts disguised as raw PCM (sigh) */
+ if (G_UNLIKELY (wav->first)) {
+ wav->first = FALSE;
+ /* this will also push the segment events */
+ gst_wavparse_add_src_pad (wav, buf);
+ } else {
+ /* If we have a pending start segment, send it now. */
+ if (G_UNLIKELY (wav->start_segment != NULL)) {
+ gst_pad_push_event (wav->srcpad, wav->start_segment);
+ wav->start_segment = NULL;
+ }
+ }
+
+ if (wav->bps > 0) {
+ /* and timestamps if we have a bitrate, be careful for overflows */
+ timestamp =
+ gst_util_uint64_scale_ceil (pos, GST_SECOND, (guint64) wav->bps);
+ next_timestamp =
+ gst_util_uint64_scale_ceil (nextpos, GST_SECOND, (guint64) wav->bps);
+ duration = next_timestamp - timestamp;
+
+ /* update current running segment position */
+ if (G_LIKELY (next_timestamp >= wav->segment.start))
+ wav->segment.position = next_timestamp;
+ } else if (wav->fact) {
+ guint64 bps =
+ gst_util_uint64_scale_int (wav->datasize, wav->rate, wav->fact);
+ /* and timestamps if we have a bitrate, be careful for overflows */
+ timestamp = gst_util_uint64_scale_ceil (pos, GST_SECOND, bps);
+ next_timestamp = gst_util_uint64_scale_ceil (nextpos, GST_SECOND, bps);
+ duration = next_timestamp - timestamp;
+ } else {
+ /* no bitrate, all we know is that the first sample has timestamp 0, all
+ * other positions and durations have unknown timestamp. */
+ if (pos == 0)
+ timestamp = 0;
+ else
+ timestamp = GST_CLOCK_TIME_NONE;
+ duration = GST_CLOCK_TIME_NONE;
+ /* update current running segment position with byte offset */
+ if (G_LIKELY (nextpos >= wav->segment.start))
+ wav->segment.position = nextpos;
+ }
+ if ((pos > 0) && wav->vbr) {
+ /* don't set timestamps for VBR files if it's not the first buffer */
+ timestamp = GST_CLOCK_TIME_NONE;
+ duration = GST_CLOCK_TIME_NONE;
+ }
+ if (wav->discont) {
+ GST_DEBUG_OBJECT (wav, "marking DISCONT");
+ GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_DISCONT);
+ wav->discont = FALSE;
+ }
+
+ GST_BUFFER_TIMESTAMP (buf) = timestamp;
+ GST_BUFFER_DURATION (buf) = duration;
+
+ GST_LOG_OBJECT (wav,
+ "Got buffer. timestamp:%" GST_TIME_FORMAT " , duration:%" GST_TIME_FORMAT
+ ", size:%" G_GSIZE_FORMAT, GST_TIME_ARGS (timestamp),
+ GST_TIME_ARGS (duration), gst_buffer_get_size (buf));
+
+ if ((res = gst_pad_push (wav->srcpad, buf)) != GST_FLOW_OK)
+ goto push_error;
+
+ if (obtained < wav->dataleft) {
+ wav->offset += obtained;
+ wav->dataleft -= obtained;
+ } else {
+ wav->offset += wav->dataleft;
+ wav->dataleft = 0;
+ }
+
+ /* Iterate until need more data, so adapter size won't grow */
+ if (wav->streaming) {
+ GST_LOG_OBJECT (wav,
+ "offset: %" G_GINT64_FORMAT " , end: %" G_GINT64_FORMAT, wav->offset,
+ wav->end_offset);
+ goto iterate_adapter;
+ }
+ return res;
+
+ /* ERROR */
+ found_eos:
+ {
+ GST_DEBUG_OBJECT (wav, "found EOS");
+ return GST_FLOW_EOS;
+ }
+ pull_error:
+ {
+ /* check if we got EOS */
+ if (res == GST_FLOW_EOS)
+ goto found_eos;
+
+ GST_WARNING_OBJECT (wav,
+ "Error getting %" G_GINT64_FORMAT " bytes from the "
+ "sinkpad (dataleft = %" G_GINT64_FORMAT ")", desired, wav->dataleft);
+ return res;
+ }
+ push_error:
+ {
+ GST_INFO_OBJECT (wav,
+ "Error pushing on srcpad %s:%s, reason %s, is linked? = %d",
+ GST_DEBUG_PAD_NAME (wav->srcpad), gst_flow_get_name (res),
+ gst_pad_is_linked (wav->srcpad));
+ return res;
+ }
+ }
+
+ static void
+ gst_wavparse_loop (GstPad * pad)
+ {
+ GstFlowReturn ret;
+ GstWavParse *wav = GST_WAVPARSE (GST_PAD_PARENT (pad));
+ GstEvent *event;
+ gchar *stream_id;
+
+ GST_LOG_OBJECT (wav, "process data");
+
+ switch (wav->state) {
+ case GST_WAVPARSE_START:
+ GST_INFO_OBJECT (wav, "GST_WAVPARSE_START");
+ if ((ret = gst_wavparse_stream_init (wav)) != GST_FLOW_OK)
+ goto pause;
+
+ stream_id =
+ gst_pad_create_stream_id (wav->srcpad, GST_ELEMENT_CAST (wav), NULL);
+ event = gst_event_new_stream_start (stream_id);
+ gst_event_set_group_id (event, gst_util_group_id_next ());
+ gst_pad_push_event (wav->srcpad, event);
+ g_free (stream_id);
+
+ wav->state = GST_WAVPARSE_HEADER;
+ /* fall-through */
+
+ case GST_WAVPARSE_HEADER:
+ GST_INFO_OBJECT (wav, "GST_WAVPARSE_HEADER");
+ if ((ret = gst_wavparse_stream_headers (wav)) != GST_FLOW_OK)
+ goto pause;
+
+ wav->state = GST_WAVPARSE_DATA;
+ GST_INFO_OBJECT (wav, "GST_WAVPARSE_DATA");
+ /* fall-through */
+
+ case GST_WAVPARSE_DATA:
+ if ((ret = gst_wavparse_stream_data (wav, FALSE)) != GST_FLOW_OK)
+ goto pause;
+ break;
+ default:
+ g_assert_not_reached ();
+ }
+ return;
+
+ /* ERRORS */
+ pause:
+ {
+ const gchar *reason = gst_flow_get_name (ret);
+
+ GST_DEBUG_OBJECT (wav, "pausing task, reason %s", reason);
+ gst_pad_pause_task (pad);
+
+ if (ret == GST_FLOW_EOS) {
+ /* handle end-of-stream/segment */
+ /* so align our position with the end of it, if there is one
+ * this ensures a subsequent will arrive at correct base/acc time */
+ if (wav->segment.format == GST_FORMAT_TIME) {
+ if (wav->segment.rate > 0.0 &&
+ GST_CLOCK_TIME_IS_VALID (wav->segment.stop))
+ wav->segment.position = wav->segment.stop;
+ else if (wav->segment.rate < 0.0)
+ wav->segment.position = wav->segment.start;
+ }
+ if (wav->state == GST_WAVPARSE_START || !wav->caps) {
+ GST_ELEMENT_ERROR (wav, STREAM, WRONG_TYPE, (NULL),
+ ("No valid input found before end of stream"));
+ gst_pad_push_event (wav->srcpad, gst_event_new_eos ());
+ } else {
+ /* add pad before we perform EOS */
+ if (G_UNLIKELY (wav->first)) {
+ wav->first = FALSE;
+ gst_wavparse_add_src_pad (wav, NULL);
+ }
+
+ /* perform EOS logic */
+ if (wav->segment.flags & GST_SEEK_FLAG_SEGMENT) {
+ GstClockTime stop;
+
+ if ((stop = wav->segment.stop) == -1)
+ stop = wav->segment.duration;
+
+ gst_element_post_message (GST_ELEMENT_CAST (wav),
+ gst_message_new_segment_done (GST_OBJECT_CAST (wav),
+ wav->segment.format, stop));
+ gst_pad_push_event (wav->srcpad,
+ gst_event_new_segment_done (wav->segment.format, stop));
+ } else {
+ gst_pad_push_event (wav->srcpad, gst_event_new_eos ());
+ }
+ }
+ } else if (ret == GST_FLOW_NOT_LINKED || ret < GST_FLOW_EOS) {
+ /* for fatal errors we post an error message, post the error
+ * first so the app knows about the error first. */
+ GST_ELEMENT_FLOW_ERROR (wav, ret);
+ gst_pad_push_event (wav->srcpad, gst_event_new_eos ());
+ }
+ return;
+ }
+ }
+
+ static GstFlowReturn
+ gst_wavparse_chain (GstPad * pad, GstObject * parent, GstBuffer * buf)
+ {
+ GstFlowReturn ret;
+ GstWavParse *wav = GST_WAVPARSE (parent);
+
+ GST_LOG_OBJECT (wav, "adapter_push %" G_GSIZE_FORMAT " bytes",
+ gst_buffer_get_size (buf));
+
+ gst_adapter_push (wav->adapter, buf);
+
+ switch (wav->state) {
+ case GST_WAVPARSE_START:
+ GST_INFO_OBJECT (wav, "GST_WAVPARSE_START");
+ if ((ret = gst_wavparse_parse_stream_init (wav)) != GST_FLOW_OK)
+ goto done;
+
+ if (wav->state != GST_WAVPARSE_HEADER)
+ break;
+
+ /* otherwise fall-through */
+ case GST_WAVPARSE_HEADER:
+ GST_INFO_OBJECT (wav, "GST_WAVPARSE_HEADER");
+ if ((ret = gst_wavparse_stream_headers (wav)) != GST_FLOW_OK)
+ goto done;
+
+ if (!wav->got_fmt || wav->datastart == 0)
+ break;
+
+ wav->state = GST_WAVPARSE_DATA;
+ GST_INFO_OBJECT (wav, "GST_WAVPARSE_DATA");
+
+ /* fall-through */
+ case GST_WAVPARSE_DATA:
+ if (buf && GST_BUFFER_FLAG_IS_SET (buf, GST_BUFFER_FLAG_DISCONT))
+ wav->discont = TRUE;
+ if ((ret = gst_wavparse_stream_data (wav, FALSE)) != GST_FLOW_OK)
+ goto done;
+ break;
+ default:
+ g_return_val_if_reached (GST_FLOW_ERROR);
+ }
+ done:
+ if (G_UNLIKELY (wav->abort_buffering)) {
+ wav->abort_buffering = FALSE;
+ ret = GST_FLOW_ERROR;
+ /* sort of demux/parse error */
+ GST_ELEMENT_ERROR (wav, STREAM, DEMUX, (NULL), ("unhandled buffer size"));
+ }
+
+ return ret;
+ }
+
+ static GstFlowReturn
+ gst_wavparse_flush_data (GstWavParse * wav)
+ {
+ GstFlowReturn ret = GST_FLOW_OK;
+ guint av;
+
+ if ((av = gst_adapter_available (wav->adapter)) > 0) {
+ ret = gst_wavparse_stream_data (wav, TRUE);
+ }
+
+ return ret;
+ }
+
+ static gboolean
+ gst_wavparse_sink_event (GstPad * pad, GstObject * parent, GstEvent * event)
+ {
+ GstWavParse *wav = GST_WAVPARSE (parent);
+ gboolean ret = TRUE;
+
+ GST_LOG_OBJECT (wav, "handling %s event", GST_EVENT_TYPE_NAME (event));
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_CAPS:
+ {
+ /* discard, we'll come up with proper src caps */
+ gst_event_unref (event);
+ break;
+ }
+ case GST_EVENT_SEGMENT:
+ {
+ gint64 start, stop, offset = 0, end_offset = -1;
+ GstSegment segment;
+
+ /* some debug output */
+ gst_event_copy_segment (event, &segment);
+ GST_DEBUG_OBJECT (wav, "received newsegment %" GST_SEGMENT_FORMAT,
+ &segment);
+
+ if (wav->state != GST_WAVPARSE_DATA) {
+ GST_DEBUG_OBJECT (wav, "still starting, eating event");
+ goto exit;
+ }
+
+ /* now we are either committed to TIME or BYTE format,
+ * and we only expect a BYTE segment, e.g. following a seek */
+ if (segment.format == GST_FORMAT_BYTES) {
+ /* handle (un)signed issues */
+ start = segment.start;
+ stop = segment.stop;
+ if (start > 0) {
+ offset = start;
+ start -= wav->datastart;
+ start = MAX (start, 0);
+ }
+ if (stop > 0) {
+ end_offset = stop;
+ stop -= wav->datastart;
+ stop = MAX (stop, 0);
+ }
+ if (wav->segment.format == GST_FORMAT_TIME) {
+ guint64 bps = wav->bps;
+
+ /* operating in format TIME, so we can convert */
+ if (!bps && wav->fact)
+ bps =
+ gst_util_uint64_scale_int (wav->datasize, wav->rate, wav->fact);
+ if (bps) {
+ if (start >= 0)
+ start =
+ gst_util_uint64_scale_ceil (start, GST_SECOND,
+ (guint64) wav->bps);
+ if (stop >= 0)
+ stop =
+ gst_util_uint64_scale_ceil (stop, GST_SECOND,
+ (guint64) wav->bps);
+ }
+ }
+ } else {
+ GST_DEBUG_OBJECT (wav, "unsupported segment format, ignoring");
+ goto exit;
+ }
+
+ segment.start = start;
+ segment.stop = stop;
+
+ /* accept upstream's notion of segment and distribute along */
+ segment.format = wav->segment.format;
+ segment.time = segment.position = segment.start;
+ segment.duration = wav->segment.duration;
+ segment.base = gst_segment_to_running_time (&wav->segment,
+ GST_FORMAT_TIME, wav->segment.position);
+
+ gst_segment_copy_into (&segment, &wav->segment);
+
+ /* also store the newsegment event for the streaming thread */
+ if (wav->start_segment)
+ gst_event_unref (wav->start_segment);
+ GST_DEBUG_OBJECT (wav, "Storing newseg %" GST_SEGMENT_FORMAT, &segment);
+ wav->start_segment = gst_event_new_segment (&segment);
+
+ /* stream leftover data in current segment */
+ gst_wavparse_flush_data (wav);
+ /* and set up streaming thread for next one */
+ wav->offset = offset;
+ wav->end_offset = end_offset;
+
+ if (wav->datasize > 0 && (wav->end_offset == -1
+ || wav->end_offset > wav->datastart + wav->datasize))
+ wav->end_offset = wav->datastart + wav->datasize;
+
+ if (wav->end_offset != -1) {
+ wav->dataleft = wav->end_offset - wav->offset;
+ } else {
+ /* infinity; upstream will EOS when done */
+ wav->dataleft = G_MAXUINT64;
+ }
+ exit:
+ gst_event_unref (event);
+ break;
+ }
+ case GST_EVENT_EOS:
+ if (wav->state == GST_WAVPARSE_START || !wav->caps) {
+ GST_ELEMENT_ERROR (wav, STREAM, WRONG_TYPE, (NULL),
+ ("No valid input found before end of stream"));
+ } else {
+ /* add pad if needed so EOS is seen downstream */
+ if (G_UNLIKELY (wav->first)) {
+ wav->first = FALSE;
+ gst_wavparse_add_src_pad (wav, NULL);
+ }
+
+ /* stream leftover data in current segment */
+ gst_wavparse_flush_data (wav);
+ }
+
+ /* fall-through */
+ case GST_EVENT_FLUSH_STOP:
+ {
+ GstClockTime dur;
+
+ if (wav->adapter)
+ gst_adapter_clear (wav->adapter);
+ wav->discont = TRUE;
+ dur = wav->segment.duration;
+ gst_segment_init (&wav->segment, wav->segment.format);
+ wav->segment.duration = dur;
+ /* fall-through */
+ }
+ default:
+ ret = gst_pad_event_default (wav->sinkpad, parent, event);
+ break;
+ }
+
+ return ret;
+ }
+
+ #if 0
+ /* convert and query stuff */
+ static const GstFormat *
+ gst_wavparse_get_formats (GstPad * pad)
+ {
+ static const GstFormat formats[] = {
+ GST_FORMAT_TIME,
+ GST_FORMAT_BYTES,
+ GST_FORMAT_DEFAULT, /* a "frame", ie a set of samples per Hz */
+ 0
+ };
+
+ return formats;
+ }
+ #endif
+
+ static gboolean
+ gst_wavparse_pad_convert (GstPad * pad,
+ GstFormat src_format, gint64 src_value,
+ GstFormat * dest_format, gint64 * dest_value)
+ {
+ GstWavParse *wavparse;
+ gboolean res = TRUE;
+
+ wavparse = GST_WAVPARSE (GST_PAD_PARENT (pad));
+
+ if (*dest_format == src_format) {
+ *dest_value = src_value;
+ return TRUE;
+ }
+
+ if ((wavparse->bps == 0) && !wavparse->fact)
+ goto no_bps_fact;
+
+ GST_INFO_OBJECT (wavparse, "converting value from %s to %s",
+ gst_format_get_name (src_format), gst_format_get_name (*dest_format));
+
+ switch (src_format) {
+ case GST_FORMAT_BYTES:
+ switch (*dest_format) {
+ case GST_FORMAT_DEFAULT:
+ *dest_value = src_value / wavparse->bytes_per_sample;
+ /* make sure we end up on a sample boundary */
+ *dest_value -= *dest_value % wavparse->bytes_per_sample;
+ break;
+ case GST_FORMAT_TIME:
+ /* src_value + datastart = offset */
+ GST_INFO_OBJECT (wavparse,
+ "src=%" G_GINT64_FORMAT ", offset=%" G_GINT64_FORMAT, src_value,
+ wavparse->offset);
+ if (wavparse->bps > 0)
+ *dest_value = gst_util_uint64_scale_ceil (src_value, GST_SECOND,
+ (guint64) wavparse->bps);
+ else if (wavparse->fact) {
+ guint64 bps = gst_util_uint64_scale_int_ceil (wavparse->datasize,
+ wavparse->rate, wavparse->fact);
+
+ *dest_value =
+ gst_util_uint64_scale_int_ceil (src_value, GST_SECOND, bps);
+ } else {
+ res = FALSE;
+ }
+ break;
+ default:
+ res = FALSE;
+ goto done;
+ }
+ break;
+
+ case GST_FORMAT_DEFAULT:
+ switch (*dest_format) {
+ case GST_FORMAT_BYTES:
+ *dest_value = src_value * wavparse->bytes_per_sample;
+ break;
+ case GST_FORMAT_TIME:
+ *dest_value = gst_util_uint64_scale (src_value, GST_SECOND,
+ (guint64) wavparse->rate);
+ break;
+ default:
+ res = FALSE;
+ goto done;
+ }
+ break;
+
+ case GST_FORMAT_TIME:
+ switch (*dest_format) {
+ case GST_FORMAT_BYTES:
+ if (wavparse->bps > 0)
+ *dest_value = gst_util_uint64_scale (src_value,
+ (guint64) wavparse->bps, GST_SECOND);
+ else {
+ guint64 bps = gst_util_uint64_scale_int (wavparse->datasize,
+ wavparse->rate, wavparse->fact);
+
+ *dest_value = gst_util_uint64_scale (src_value, bps, GST_SECOND);
+ }
+ /* make sure we end up on a sample boundary */
+ *dest_value -= *dest_value % wavparse->blockalign;
+ break;
+ case GST_FORMAT_DEFAULT:
+ *dest_value = gst_util_uint64_scale (src_value,
+ (guint64) wavparse->rate, GST_SECOND);
+ break;
+ default:
+ res = FALSE;
+ goto done;
+ }
+ break;
+
+ default:
+ res = FALSE;
+ goto done;
+ }
+
+ done:
+ return res;
+
+ /* ERRORS */
+ no_bps_fact:
+ {
+ GST_DEBUG_OBJECT (wavparse, "bps 0 or no fact chunk, cannot convert");
+ res = FALSE;
+ goto done;
+ }
+ }
+
+ /* handle queries for location and length in requested format */
+ static gboolean
+ gst_wavparse_pad_query (GstPad * pad, GstObject * parent, GstQuery * query)
+ {
+ gboolean res = TRUE;
+ GstWavParse *wav = GST_WAVPARSE (parent);
+
+ /* only if we know */
+ if (wav->state != GST_WAVPARSE_DATA) {
+ return FALSE;
+ }
+
+ GST_LOG_OBJECT (pad, "%s query", GST_QUERY_TYPE_NAME (query));
+
+ switch (GST_QUERY_TYPE (query)) {
+ case GST_QUERY_POSITION:
+ {
+ gint64 curb;
+ gint64 cur;
+ GstFormat format;
+
+ /* this is not very precise, as we have pushed severla buffer upstream for prerolling */
+ curb = wav->offset - wav->datastart;
+ gst_query_parse_position (query, &format, NULL);
+ GST_INFO_OBJECT (wav, "pos query at %" G_GINT64_FORMAT, curb);
+
+ switch (format) {
+ case GST_FORMAT_BYTES:
+ format = GST_FORMAT_BYTES;
+ cur = curb;
+ break;
+ default:
+ res = gst_wavparse_pad_convert (pad, GST_FORMAT_BYTES, curb,
+ &format, &cur);
+ break;
+ }
+ if (res)
+ gst_query_set_position (query, format, cur);
+ break;
+ }
+ case GST_QUERY_DURATION:
+ {
+ gint64 duration = 0;
+ GstFormat format;
+
+ if (wav->ignore_length) {
+ res = FALSE;
+ break;
+ }
+
+ gst_query_parse_duration (query, &format, NULL);
+
+ switch (format) {
+ case GST_FORMAT_BYTES:{
+ format = GST_FORMAT_BYTES;
+ duration = wav->datasize;
+ break;
+ }
+ case GST_FORMAT_TIME:
+ if ((res = gst_wavparse_calculate_duration (wav))) {
+ duration = wav->duration;
+ }
+ break;
+ default:
+ res = FALSE;
+ break;
+ }
+ if (res)
+ gst_query_set_duration (query, format, duration);
+ break;
+ }
+ case GST_QUERY_CONVERT:
+ {
+ gint64 srcvalue, dstvalue;
+ GstFormat srcformat, dstformat;
+
+ gst_query_parse_convert (query, &srcformat, &srcvalue,
+ &dstformat, &dstvalue);
+ res = gst_wavparse_pad_convert (pad, srcformat, srcvalue,
+ &dstformat, &dstvalue);
+ if (res)
+ gst_query_set_convert (query, srcformat, srcvalue, dstformat, dstvalue);
+ break;
+ }
+ case GST_QUERY_SEEKING:{
+ GstFormat fmt;
+ gboolean seekable = FALSE;
+
+ gst_query_parse_seeking (query, &fmt, NULL, NULL, NULL);
+ if (fmt == wav->segment.format) {
+ if (wav->streaming) {
+ GstQuery *q;
+
+ q = gst_query_new_seeking (GST_FORMAT_BYTES);
+ if ((res = gst_pad_peer_query (wav->sinkpad, q))) {
+ gst_query_parse_seeking (q, &fmt, &seekable, NULL, NULL);
+ GST_LOG_OBJECT (wav, "upstream BYTE seekable %d", seekable);
+ }
+ gst_query_unref (q);
+ } else {
+ GST_LOG_OBJECT (wav, "looping => seekable");
+ seekable = TRUE;
+ res = TRUE;
+ }
+ } else if (fmt == GST_FORMAT_TIME) {
+ res = TRUE;
+ }
+ if (res) {
+ gst_query_set_seeking (query, fmt, seekable, 0, wav->segment.duration);
+ }
+ break;
+ }
+ case GST_QUERY_SEGMENT:
+ {
+ GstFormat format;
+ gint64 start, stop;
+
+ format = wav->segment.format;
+
+ start =
+ gst_segment_to_stream_time (&wav->segment, format,
+ wav->segment.start);
+ if ((stop = wav->segment.stop) == -1)
+ stop = wav->segment.duration;
+ else
+ stop = gst_segment_to_stream_time (&wav->segment, format, stop);
+
+ gst_query_set_segment (query, wav->segment.rate, format, start, stop);
+ res = TRUE;
+ break;
+ }
+ default:
+ res = gst_pad_query_default (pad, parent, query);
+ break;
+ }
+ return res;
+ }
+
+ static gboolean
+ gst_wavparse_srcpad_event (GstPad * pad, GstObject * parent, GstEvent * event)
+ {
+ GstWavParse *wavparse = GST_WAVPARSE (parent);
+ gboolean res = FALSE;
+
+ GST_DEBUG_OBJECT (wavparse, "%s event", GST_EVENT_TYPE_NAME (event));
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_SEEK:
+ /* can only handle events when we are in the data state */
+ if (wavparse->state == GST_WAVPARSE_DATA) {
+ res = gst_wavparse_perform_seek (wavparse, event);
+ }
+ gst_event_unref (event);
+ break;
+
+ case GST_EVENT_TOC_SELECT:
+ {
+ char *uid = NULL;
+ GstTocEntry *entry = NULL;
+ GstEvent *seek_event;
+ gint64 start_pos;
+
+ if (!wavparse->toc) {
+ GST_DEBUG_OBJECT (wavparse, "no TOC to select");
+ return FALSE;
+ } else {
+ gst_event_parse_toc_select (event, &uid);
+ if (uid != NULL) {
+ GST_OBJECT_LOCK (wavparse);
+ entry = gst_toc_find_entry (wavparse->toc, uid);
+ if (entry == NULL) {
+ GST_OBJECT_UNLOCK (wavparse);
+ GST_WARNING_OBJECT (wavparse, "no TOC entry with given UID: %s",
+ uid);
+ res = FALSE;
+ } else {
+ gst_toc_entry_get_start_stop_times (entry, &start_pos, NULL);
+ GST_OBJECT_UNLOCK (wavparse);
+ seek_event = gst_event_new_seek (1.0,
+ GST_FORMAT_TIME,
+ GST_SEEK_FLAG_FLUSH,
+ GST_SEEK_TYPE_SET, start_pos, GST_SEEK_TYPE_SET, -1);
+ res = gst_wavparse_perform_seek (wavparse, seek_event);
+ gst_event_unref (seek_event);
+ }
+ g_free (uid);
+ } else {
+ GST_WARNING_OBJECT (wavparse, "received empty TOC select event");
+ res = FALSE;
+ }
+ }
+ gst_event_unref (event);
+ break;
+ }
+
+ default:
+ res = gst_pad_push_event (wavparse->sinkpad, event);
+ break;
+ }
+ return res;
+ }
+
+ static gboolean
+ gst_wavparse_sink_activate (GstPad * sinkpad, GstObject * parent)
+ {
+ GstWavParse *wav = GST_WAVPARSE (parent);
+ GstQuery *query;
+ gboolean pull_mode;
+
+ if (wav->adapter) {
+ gst_adapter_clear (wav->adapter);
+ g_object_unref (wav->adapter);
+ wav->adapter = NULL;
+ }
+
+ query = gst_query_new_scheduling ();
+
+ if (!gst_pad_peer_query (sinkpad, query)) {
+ gst_query_unref (query);
+ goto activate_push;
+ }
+
+ pull_mode = gst_query_has_scheduling_mode_with_flags (query,
+ GST_PAD_MODE_PULL, GST_SCHEDULING_FLAG_SEEKABLE);
+ gst_query_unref (query);
+
+ if (!pull_mode)
+ goto activate_push;
+
+ GST_DEBUG_OBJECT (sinkpad, "activating pull");
+ wav->streaming = FALSE;
+ return gst_pad_activate_mode (sinkpad, GST_PAD_MODE_PULL, TRUE);
+
+ activate_push:
+ {
+ GST_DEBUG_OBJECT (sinkpad, "activating push");
+ wav->streaming = TRUE;
+ wav->adapter = gst_adapter_new ();
+ return gst_pad_activate_mode (sinkpad, GST_PAD_MODE_PUSH, TRUE);
+ }
+ }
+
+
+ static gboolean
+ gst_wavparse_sink_activate_mode (GstPad * sinkpad, GstObject * parent,
+ GstPadMode mode, gboolean active)
+ {
+ gboolean res;
+
+ switch (mode) {
+ case GST_PAD_MODE_PUSH:
+ res = TRUE;
+ break;
+ case GST_PAD_MODE_PULL:
+ if (active) {
+ /* if we have a scheduler we can start the task */
+ res = gst_pad_start_task (sinkpad, (GstTaskFunction) gst_wavparse_loop,
+ sinkpad, NULL);
+ } else {
+ res = gst_pad_stop_task (sinkpad);
+ }
+ break;
+ default:
+ res = FALSE;
+ break;
+ }
+ return res;
+ }
+
+ static GstStateChangeReturn
+ gst_wavparse_change_state (GstElement * element, GstStateChange transition)
+ {
+ GstStateChangeReturn ret;
+ GstWavParse *wav = GST_WAVPARSE (element);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_NULL_TO_READY:
+ break;
+ case GST_STATE_CHANGE_READY_TO_PAUSED:
+ break;
+ case GST_STATE_CHANGE_PAUSED_TO_PLAYING:
+ break;
+ default:
+ break;
+ }
+
+ ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_PLAYING_TO_PAUSED:
+ break;
+ case GST_STATE_CHANGE_PAUSED_TO_READY:
+ gst_wavparse_reset (wav);
+ break;
+ case GST_STATE_CHANGE_READY_TO_NULL:
+ break;
+ default:
+ break;
+ }
+ return ret;
+ }
+
+ static void
+ gst_wavparse_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+ {
+ GstWavParse *self;
+
+ g_return_if_fail (GST_IS_WAVPARSE (object));
+ self = GST_WAVPARSE (object);
+
+ switch (prop_id) {
+ case PROP_IGNORE_LENGTH:
+ self->ignore_length = g_value_get_boolean (value);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (self, prop_id, pspec);
+ }
+
+ }
+
+ static void
+ gst_wavparse_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec)
+ {
+ GstWavParse *self;
+
+ g_return_if_fail (GST_IS_WAVPARSE (object));
+ self = GST_WAVPARSE (object);
+
+ switch (prop_id) {
+ case PROP_IGNORE_LENGTH:
+ g_value_set_boolean (value, self->ignore_length);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (self, prop_id, pspec);
+ }
+ }
+
+ static gboolean
+ plugin_init (GstPlugin * plugin)
+ {
+ return GST_ELEMENT_REGISTER (wavparse, plugin);
+ }
+
+ GST_PLUGIN_DEFINE (GST_VERSION_MAJOR,
+ GST_VERSION_MINOR,
+ wavparse,
+ "Parse a .wav file into raw audio",
+ plugin_init, VERSION, GST_LICENSE, GST_PACKAGE_NAME, GST_PACKAGE_ORIGIN)
--- /dev/null
+ project('gst-plugins-good', 'c',
+ version : '1.19.2',
+ meson_version : '>= 0.54',
+ default_options : [ 'warning_level=1',
+ 'buildtype=debugoptimized' ])
+
+ gst_version = meson.project_version()
+ version_arr = gst_version.split('.')
+ gst_version_major = version_arr[0].to_int()
+ gst_version_minor = version_arr[1].to_int()
+ gst_version_micro = version_arr[2].to_int()
+ if version_arr.length() == 4
+ gst_version_nano = version_arr[3].to_int()
+ else
+ gst_version_nano = 0
+ endif
+ gst_version_is_dev = gst_version_minor % 2 == 1 and gst_version_micro < 90
+
+ have_cxx = add_languages('cpp', native: false, required: false)
+
+ glib_req = '>= 2.56.0'
+ orc_req = '>= 0.4.17'
+ gst_req = '>= @0@.@1@.0'.format(gst_version_major, gst_version_minor)
+
+ api_version = '1.0'
+
+ plugins_install_dir = join_paths(get_option('libdir'), 'gstreamer-1.0')
+ plugins = []
+
+ cc = meson.get_compiler('c')
+ host_system = host_machine.system()
+
+ if cc.get_id() == 'msvc'
+ msvc_args = [
+ # Ignore several spurious warnings for things gstreamer does very commonly
+ # If a warning is completely useless and spammy, use '/wdXXXX' to suppress it
+ # If a warning is harmless but hard to fix, use '/woXXXX' so it's shown once
+ # NOTE: Only add warnings here if you are sure they're spurious
+ '/wd4018', # implicit signed/unsigned conversion
+ '/wd4146', # unary minus on unsigned (beware INT_MIN)
+ '/wd4244', # lossy type conversion (e.g. double -> int)
+ '/wd4305', # truncating type conversion (e.g. double -> float)
+ cc.get_supported_arguments(['/utf-8']), # set the input encoding to utf-8
+
+ # Enable some warnings on MSVC to match GCC/Clang behaviour
+ '/w14062', # enumerator 'identifier' in switch of enum 'enumeration' is not handled
+ '/w14101', # 'identifier' : unreferenced local variable
+ '/w14189', # 'identifier' : local variable is initialized but not referenced
+ ]
+ if have_cxx
+ add_project_arguments(msvc_args, language: ['c', 'cpp'])
+ else
+ add_project_arguments(msvc_args, language: 'c')
+ endif
+ # Disable SAFESEH with MSVC for plugins and libs that use external deps that
+ # are built with MinGW
+ noseh_link_args = ['/SAFESEH:NO']
+ else
+ noseh_link_args = []
+ endif
+
+ if cc.has_link_argument('-Wl,-Bsymbolic-functions')
+ add_project_link_arguments('-Wl,-Bsymbolic-functions', language : 'c')
+ endif
+
+ # Symbol visibility
+ if cc.has_argument('-fvisibility=hidden')
+ add_project_arguments('-fvisibility=hidden', language: 'c')
+ endif
+
+ # Disable strict aliasing
+ if cc.has_argument('-fno-strict-aliasing')
+ add_project_arguments('-fno-strict-aliasing', language: 'c')
+ endif
+
+ # Define G_DISABLE_DEPRECATED for development versions
+ if gst_version_is_dev
+ message('Disabling deprecated GLib API')
+ add_project_arguments('-DG_DISABLE_DEPRECATED', language: 'c')
+ endif
+
+ cast_checks = get_option('gobject-cast-checks')
+ if cast_checks.disabled() or (cast_checks.auto() and not gst_version_is_dev)
+ message('Disabling GLib cast checks')
+ add_project_arguments('-DG_DISABLE_CAST_CHECKS', language: 'c')
+ endif
+
+ glib_asserts = get_option('glib-asserts')
+ if glib_asserts.disabled() or (glib_asserts.auto() and not gst_version_is_dev)
+ message('Disabling GLib asserts')
+ add_project_arguments('-DG_DISABLE_ASSERT', language: 'c')
+ endif
+
+ glib_checks = get_option('glib-checks')
+ if glib_checks.disabled() or (glib_checks.auto() and not gst_version_is_dev)
+ message('Disabling GLib checks')
+ add_project_arguments('-DG_DISABLE_CHECKS', language: 'c')
+ endif
+
+ cdata = configuration_data()
+
+ check_headers = [
+ ['HAVE_DLFCN_H', 'dlfcn.h'],
+ ['HAVE_FCNTL_H', 'fcntl.h'],
+ ['HAVE_INTTYPES_H', 'inttypes.h'],
+ ['HAVE_MEMORY_H', 'memory.h'],
+ ['HAVE_PROCESS_H', 'process.h'],
+ ['HAVE_STDINT_H', 'stdint.h'],
+ ['HAVE_STDLIB_H', 'stdlib.h'],
+ ['HAVE_STRINGS_H', 'strings.h'],
+ ['HAVE_STRING_H', 'string.h'],
+ ['HAVE_SYS_IOCTL_H', 'sys/ioctl.h'],
+ ['HAVE_SYS_PARAM_H', 'sys/param.h'],
+ ['HAVE_SYS_SOCKET_H', 'sys/socket.h'],
+ ['HAVE_SYS_STAT_H', 'sys/stat.h'],
+ ['HAVE_SYS_TIME_H', 'sys/time.h'],
+ ['HAVE_SYS_TYPES_H', 'sys/types.h'],
+ ['HAVE_UNISTD_H', 'unistd.h'],
+ ]
+
+ foreach h : check_headers
+ if cc.has_header(h.get(1))
+ cdata.set(h.get(0), 1)
+ endif
+ endforeach
+
+ check_functions = [
+ ['HAVE_ASINH', 'asinh', '#include<math.h>'],
+ ['HAVE_CLOCK_GETTIME', 'clock_gettime', '#include<time.h>'],
+ ['HAVE_COSH', 'cosh', '#include<math.h>'],
+ # check token HAVE_CPU_ALPHA
+ # check token HAVE_CPU_ARM
+ # check token HAVE_CPU_CRIS
+ # check token HAVE_CPU_CRISV32
+ # check token HAVE_CPU_HPPA
+ # check token HAVE_CPU_I386
+ # check token HAVE_CPU_IA64
+ # check token HAVE_CPU_M68K
+ # check token HAVE_CPU_MIPS
+ # check token HAVE_CPU_PPC
+ # check token HAVE_CPU_PPC64
+ # check token HAVE_CPU_S390
+ # check token HAVE_CPU_SPARC
+ # check token HAVE_CPU_X86_64
+ ['HAVE_DCGETTEXT', 'dcgettext', '#include<libintl.h>'],
+ # check token HAVE_DIRECTSOUND
+ # check token HAVE_EXPERIMENTAL
+ # check token HAVE_EXTERNAL
+ # check token HAVE_FPCLASS
+ # check token HAVE_GCC_ASM
+ ['HAVE_GETPAGESIZE', 'getpagesize', '#include<unistd.h>'],
+ # check token HAVE_GETTEXT
+ # check token HAVE_GST_V4L2
+ ['HAVE_ISINF', 'isinf', '#include<math.h>'],
+ # check token HAVE_LIBV4L2
+ ['HAVE_MMAP', 'mmap', '#include<sys/mman.h>'],
+ ['HAVE_MMAP64', 'mmap64', '#include<sys/mman.h>'],
+ # check token HAVE_OSX_AUDIO
+ # check token HAVE_OSX_VIDEO
+ # check token HAVE_RDTSC
+ ['HAVE_SINH', 'sinh', '#include<math.h>'],
+ # check token HAVE_WAVEFORM
+ ['HAVE_GMTIME_R', 'gmtime_r', '#include<time.h>'],
+ ]
+
+ libm = cc.find_library('m', required : false)
+
+ foreach f : check_functions
+ if cc.has_function(f.get(1), prefix : f.get(2), dependencies : libm)
+ cdata.set(f.get(0), 1)
+ endif
+ endforeach
+
+ cdata.set('HAVE_IOS', host_system == 'ios')
+
+ cdata.set('SIZEOF_CHAR', cc.sizeof('char'))
+ cdata.set('SIZEOF_INT', cc.sizeof('int'))
+ cdata.set('SIZEOF_LONG', cc.sizeof('long'))
+ cdata.set('SIZEOF_SHORT', cc.sizeof('short'))
+ cdata.set('SIZEOF_VOIDP', cc.sizeof('void*'))
+ cdata.set('SIZEOF_OFF_T', cc.sizeof('off_t'))
+
+ # Here be fixmes.
+ # FIXME: check if this is correct
+ cdata.set('HAVE_CPU_X86_64', host_machine.cpu() == 'amd64')
+ cdata.set('HAVE_GCC_ASM', cc.get_id() != 'msvc')
+ cdata.set_quoted('VERSION', gst_version)
+ cdata.set_quoted('PACKAGE_VERSION', gst_version)
+ cdata.set_quoted('GST_LICENSE', 'LGPL')
+ cdata.set_quoted('PACKAGE', 'gst-plugins-good')
+ cdata.set_quoted('GETTEXT_PACKAGE', 'gst-plugins-good-1.0')
+ cdata.set_quoted('LOCALEDIR', join_paths(get_option('prefix'), get_option('localedir')))
+
+ warning_flags = [
+ '-Wmissing-declarations',
+ '-Wredundant-decls',
+ '-Wwrite-strings',
+ '-Winit-self',
+ '-Wmissing-include-dirs',
+ '-Wno-multichar',
+ '-Wvla',
+ '-Wpointer-arith',
+ ]
+
+ warning_c_flags = [
+ '-Wmissing-prototypes',
+ '-Wdeclaration-after-statement',
+ '-Wold-style-definition',
+ '-Waggregate-return',
+ ]
+
+ if have_cxx
+ cxx = meson.get_compiler('cpp')
+ endif
+
+ foreach extra_arg : warning_flags
+ if cc.has_argument (extra_arg)
+ add_project_arguments([extra_arg], language: 'c')
+ endif
+ if have_cxx and cxx.has_argument (extra_arg)
+ add_project_arguments([extra_arg], language: 'cpp')
+ endif
+ endforeach
+
+ foreach extra_arg : warning_c_flags
+ if cc.has_argument (extra_arg)
+ add_project_arguments([extra_arg], language: 'c')
+ endif
+ endforeach
+
+ # GStreamer package name and origin url
+ gst_package_name = get_option('package-name')
+ if gst_package_name == ''
+ if gst_version_nano == 0
+ gst_package_name = 'GStreamer Good Plug-ins source release'
+ elif gst_version_nano == 1
+ gst_package_name = 'GStreamer Good Plug-ins git'
+ else
+ gst_package_name = 'GStreamer Good Plug-ins prerelease'
+ endif
+ endif
+ cdata.set_quoted('GST_PACKAGE_NAME', gst_package_name)
+ cdata.set_quoted('GST_PACKAGE_ORIGIN', get_option('package-origin'))
+
+ # Mandatory GST deps
+ gst_dep = dependency('gstreamer-1.0', version : gst_req,
+ fallback : ['gstreamer', 'gst_dep'])
+ gstbase_dep = dependency('gstreamer-base-1.0', version : gst_req,
+ fallback : ['gstreamer', 'gst_base_dep'])
+ gstnet_dep = dependency('gstreamer-net-1.0', version : gst_req,
+ fallback : ['gstreamer', 'gst_net_dep'])
+ gstcontroller_dep = dependency('gstreamer-controller-1.0', version : gst_req,
+ fallback : ['gstreamer', 'gst_controller_dep'])
+ gstcheck_dep = dependency('gstreamer-check-1.0', version : gst_req,
+ required : get_option('tests'),
+ fallback : ['gstreamer', 'gst_check_dep'])
+ gstpbutils_dep = dependency('gstreamer-pbutils-1.0', version : gst_req,
+ fallback : ['gst-plugins-base', 'pbutils_dep'])
+ gstallocators_dep = dependency('gstreamer-allocators-1.0', version : gst_req,
+ fallback : ['gst-plugins-base', 'allocators_dep'])
+ gstapp_dep = dependency('gstreamer-app-1.0', version : gst_req,
+ fallback : ['gst-plugins-base', 'app_dep'])
+ gstaudio_dep = dependency('gstreamer-audio-1.0', version : gst_req,
+ fallback : ['gst-plugins-base', 'audio_dep'])
+ gstfft_dep = dependency('gstreamer-fft-1.0', version : gst_req,
+ fallback : ['gst-plugins-base', 'fft_dep'])
+ gstriff_dep = dependency('gstreamer-riff-1.0', version : gst_req,
+ fallback : ['gst-plugins-base', 'riff_dep'])
+ gstrtp_dep = dependency('gstreamer-rtp-1.0', version : gst_req,
+ fallback : ['gst-plugins-base', 'rtp_dep'])
+ gstrtsp_dep = dependency('gstreamer-rtsp-1.0', version : gst_req,
+ fallback : ['gst-plugins-base', 'rtsp_dep'])
+ gstsdp_dep = dependency('gstreamer-sdp-1.0', version : gst_req,
+ fallback : ['gst-plugins-base', 'sdp_dep'])
+ gsttag_dep = dependency('gstreamer-tag-1.0', version : gst_req,
+ fallback : ['gst-plugins-base', 'tag_dep'])
+ gstvideo_dep = dependency('gstreamer-video-1.0', version : gst_req,
+ fallback : ['gst-plugins-base', 'video_dep'])
+
+ # GStreamer OpenGL
+ # FIXME: automagic
+ gstgl_dep = dependency('gstreamer-gl-1.0', version : gst_req,
+ fallback : ['gst-plugins-base', 'gstgl_dep'], required: false)
+ gstglproto_dep = dependency('', required : false)
+ gstglx11_dep = dependency('', required : false)
+ gstglwayland_dep = dependency('', required : false)
+ gstglegl_dep = dependency('', required : false)
+
+ have_gstgl = gstgl_dep.found()
+
+ if have_gstgl
+ if gstgl_dep.type_name() == 'pkgconfig'
+ gst_gl_apis = gstgl_dep.get_pkgconfig_variable('gl_apis').split()
+ gst_gl_winsys = gstgl_dep.get_pkgconfig_variable('gl_winsys').split()
+ gst_gl_platforms = gstgl_dep.get_pkgconfig_variable('gl_platforms').split()
+ else
+ gstbase = subproject('gst-plugins-base')
+ gst_gl_apis = gstbase.get_variable('enabled_gl_apis')
+ gst_gl_winsys = gstbase.get_variable('enabled_gl_winsys')
+ gst_gl_platforms = gstbase.get_variable('enabled_gl_platforms')
+ endif
+
+ message('GStreamer OpenGL window systems: @0@'.format(' '.join(gst_gl_winsys)))
+ message('GStreamer OpenGL platforms: @0@'.format(' '.join(gst_gl_platforms)))
+ message('GStreamer OpenGL apis: @0@'.format(' '.join(gst_gl_apis)))
+
+ foreach ws : ['x11', 'wayland', 'android', 'cocoa', 'eagl', 'win32', 'dispmanx', 'viv_fb']
+ set_variable('gst_gl_have_window_@0@'.format(ws), gst_gl_winsys.contains(ws))
+ endforeach
+
+ foreach p : ['glx', 'egl', 'cgl', 'eagl', 'wgl']
+ set_variable('gst_gl_have_platform_@0@'.format(p), gst_gl_platforms.contains(p))
+ endforeach
+
+ foreach api : ['gl', 'gles2']
+ set_variable('gst_gl_have_api_@0@'.format(api), gst_gl_apis.contains(api))
+ endforeach
+
+ gstglproto_dep = dependency('gstreamer-gl-prototypes-1.0', version : gst_req,
+ fallback : ['gst-plugins-base', 'gstglproto_dep'], required: true)
+ # Behind specific checks because meson fails at optional dependencies with a
+ # fallback to the same subproject. On the first failure, meson will never
+ # check the system again even if the fallback never existed.
+ # Last checked with meson 0.54.3
+ if gst_gl_have_window_x11
+ gstglx11_dep = dependency('gstreamer-gl-x11-1.0', version : gst_req,
+ fallback : ['gst-plugins-base', 'gstglx11_dep'], required: true)
+ endif
+ if gst_gl_have_window_wayland
+ gstglwayland_dep = dependency('gstreamer-gl-wayland-1.0', version : gst_req,
+ fallback : ['gst-plugins-base', 'gstglwayland_dep'], required: true)
+ endif
+ if gst_gl_have_platform_egl
+ gstglegl_dep = dependency('gstreamer-gl-egl-1.0', version : gst_req,
+ fallback : ['gst-plugins-base', 'gstglegl_dep'], required: true)
+ endif
+ endif
+
+ zlib_dep = dependency('zlib', required : false)
+ if not zlib_dep.found()
+ zlib_dep = cc.find_library('z', required : false)
+ if not zlib_dep.found() or not cc.has_header('zlib.h')
+ zlib_dep = subproject('zlib').get_variable('zlib_dep')
+ endif
+ endif
+ cdata.set('HAVE_ZLIB', true)
+
+ glib_deps = [dependency('glib-2.0', version : glib_req, fallback: ['glib', 'libglib_dep']),
+ dependency('gobject-2.0', fallback: ['glib', 'libgobject_dep'])]
+ gio_dep = dependency('gio-2.0', fallback: ['glib', 'libgio_dep'])
+
+ gst_plugins_good_args = ['-DHAVE_CONFIG_H']
+ configinc = include_directories('.')
+ libsinc = include_directories('gst-libs')
+
+ have_orcc = false
+ orcc_args = []
+ orc_targets = []
+ # Used by various libraries/elements that use Orc code
+ orc_dep = dependency('orc-0.4', version : orc_req, required : get_option('orc'),
+ fallback : ['orc', 'orc_dep'])
+ orcc = find_program('orcc', required : get_option('orc'))
+ if orc_dep.found() and orcc.found()
+ have_orcc = true
+ orcc_args = [orcc, '--include', 'glib.h']
+ cdata.set('HAVE_ORC', 1)
+ else
+ message('Orc Compiler not found, will use backup C code')
+ cdata.set('DISABLE_ORC', 1)
+ endif
+
+ have_nasm = false
+ # FIXME: nasm path needs testing on non-Linux, esp. Windows
+ host_cpu = host_machine.cpu_family()
+ if host_cpu == 'x86_64'
+ if cc.get_define('__ILP32__') == '1'
+ message('Nasm disabled on x32')
+ else
+ asm_option = get_option('asm')
+ nasm = find_program('nasm', native: true, required: asm_option)
+ if nasm.found()
+ # We can't use the version: kwarg for find_program because old versions
+ # of nasm don't support --version
+ ret = run_command(nasm, '-v')
+ if ret.returncode() == 0
+ nasm_version = ret.stdout().strip().split()[2]
+ nasm_req = '>=2.13'
+ if nasm_version.version_compare(nasm_req)
+ message('nasm found on x86-64')
+ cdata.set('HAVE_NASM', 1)
+ have_nasm = true
+ else
+ if asm_option.enabled()
+ error('asm option is enabled, and nasm @0@ was found, but @1@ is required'.format(nasm_version, nasm_req))
+ endif
+ message('nasm @0@ was found, but @1@ is required'.format(nasm_version, nasm_req))
+ endif
+ else
+ if asm_option.enabled()
+ error('asm option is enabled, but nasm is not usable: @0@\n@1@'.format(ret.stdout(), ret.stderr()))
+ endif
+ message('nasm was found, but it\'s not usable')
+ endif
+ # Unset nasm to not be 'found'
+ if not have_nasm
+ nasm = disabler()
+ endif
+ endif
+ endif
+ endif
+
++# TIZEN_BUILD_OPTION
++
++tbm_dep = dependency('libtbm', required : get_option('tbm'))
++if tbm_dep.found()
++ cdata.set('TIZEN_FEATURE_V4L2_TBM_SUPPORT', 1)
++endif
++
++gio_dep = dependency('gio-2.0')
++
++cdata.set('TIZEN_FEATURE_V4L2SRC_AUTO_SCAN_DEVICE_NODE', true)
++cdata.set('TIZEN_FEATURE_V4L2SRC_SUPPORT_CAMERA_ID', true)
++cdata.set('TIZEN_FEATURE_V4L2VIDEO_ADJ_RANK', true)
++cdata.set('TIZEN_FEATURE_WAVPARSE_MODIFICATION', true)
++cdata.set('TIZEN_FEATURE_MP3PARSE_MODIFICATION', true)
++cdata.set('TIZEN_FEATURE_AACPARSE_MODIFICATION', true)
++cdata.set('TIZEN_FEATURE_QTDEMUX_MODIFICATION', true)
++cdata.set('TIZEN_FEATURE_QTDEMUX_DURATION', true)
++cdata.set('TIZEN_FEATURE_FLVDEMUX_MODIFICATION', true)
++cdata.set('TIZEN_FEATURE_RTSP_MODIFICATION', true)
++cdata.set('TIZEN_FEATURE_SOUP_MODIFICATION', true)
++cdata.set('TIZEN_FEATURE_RGVOLUME_MODIFICATION', true)
++cdata.set('TIZEN_FEATURE_DISABLE_V4L2_DEPENDENCY', true)
++cdata.set('TIZEN_FEATURE_AVIDEMUX_MODIFICATION', true)
++cdata.set('TIZEN_FEATURE_USE_LIBV4L2', true)
++cdata.set('TIZEN_FEATURE_V4L2_ADDITIONAL_CID_SUPPORT', true)
++
++# TIZEN_BUILD_OPTION end
++
+ # Disable compiler warnings for unused variables and args if gst debug system is disabled
+ if gst_dep.type_name() == 'internal'
+ gst_debug_disabled = not subproject('gstreamer').get_variable('gst_debug')
+ else
+ # We can't check that in the case of subprojects as we won't
+ # be able to build against an internal dependency (which is not built yet)
+ gst_debug_disabled = cc.has_header_symbol('gst/gstconfig.h', 'GST_DISABLE_GST_DEBUG', dependencies: gst_dep)
+ endif
+
+ if gst_debug_disabled
+ message('GStreamer debug system is disabled')
+ if cc.has_argument('-Wno-unused')
+ add_project_arguments('-Wno-unused', language: 'c')
+ endif
+ if have_cxx and cxx.has_argument ('-Wno-unused')
+ add_project_arguments('-Wno-unused', language: 'cpp')
+ endif
+ else
+ message('GStreamer debug system is enabled')
+ endif
+
+ presetdir = join_paths(get_option('datadir'), 'gstreamer-' + api_version, 'presets')
+
+ python3 = import('python').find_installation()
+ pkgconfig = import('pkgconfig')
+ plugins_pkgconfig_install_dir = join_paths(plugins_install_dir, 'pkgconfig')
+ if get_option('default_library') == 'shared'
+ # If we don't build static plugins there is no need to generate pc files
+ plugins_pkgconfig_install_dir = disabler()
+ endif
+
+ subdir('gst')
+ subdir('sys')
+ subdir('ext')
+ subdir('tests')
+ subdir('docs')
+
+ if have_orcc and orc_targets.length() > 0
+ update_orc_dist_files = find_program('scripts/update-orc-dist-files.py')
+
+ orc_update_targets = []
+ foreach t : orc_targets
+ orc_name = t.get('name')
+ orc_file = t.get('orc-source')
+ header = t.get('header')
+ source = t.get('source')
+ # alias_target() only works with build targets, so can't use run_target() here
+ orc_update_targets += [
+ custom_target('update-orc-@0@'.format(orc_name),
+ input: [header, source],
+ command: [update_orc_dist_files, orc_file, header, source],
+ output: ['@0@-dist.c'.format(orc_name)]) # not entirely true
+ ]
+ endforeach
+
+ if meson.version().version_compare('>= 0.52')
+ update_orc_dist_target = alias_target('update-orc-dist', orc_update_targets)
+ endif
+ endif
+
+ # xgettext is optional (on Windows for instance)
+ if find_program('xgettext', required : get_option('nls')).found()
+ cdata.set('ENABLE_NLS', 1)
+ subdir('po')
+ endif
+
+ subdir('scripts')
+
+ # Set release date
+ if gst_version_nano == 0
+ extract_release_date = find_program('scripts/extract-release-date-from-doap-file.py')
+ run_result = run_command(extract_release_date, gst_version, files('gst-plugins-good.doap'))
+ if run_result.returncode() == 0
+ release_date = run_result.stdout().strip()
+ cdata.set_quoted('GST_PACKAGE_RELEASE_DATETIME', release_date)
+ message('Package release date: ' + release_date)
+ else
+ # Error out if our release can't be found in the .doap file
+ error(run_result.stderr())
+ endif
+ endif
+
+ if gio_dep.version().version_compare('< 2.67.4')
+ cdata.set('g_memdup2(ptr,sz)', '(G_LIKELY(((guint64)(sz)) < G_MAXUINT)) ? g_memdup(ptr,sz) : (g_abort(),NULL)')
+ endif
+
+ configure_file(output : 'config.h', configuration : cdata)
+
+ run_command(python3, '-c', 'import shutil; shutil.copy("hooks/pre-commit.hook", ".git/hooks/pre-commit")')
+
+ if meson.version().version_compare('>= 0.54')
+ plugin_names = []
+ foreach plugin: plugins
+ # FIXME: Use str.subtring() when we can depend on Meson 0.56
+ split = plugin.name().split('gst')
+ if split.length() == 2
+ plugin_names += [split[1]]
+ else
+ warning('Need substring API in meson >= 0.56 to properly parse plugin name: ' + plugin.name())
+ plugin_names += [plugin.name()]
+ endif
+ endforeach
+ summary({'Plugins':plugin_names}, list_sep: ', ')
+ endif
--- /dev/null
+ # Feature options for plugins without external deps
+ option('alpha', type : 'feature', value : 'auto')
+ option('apetag', type : 'feature', value : 'auto')
+ option('audiofx', type : 'feature', value : 'auto')
+ option('audioparsers', type : 'feature', value : 'auto')
+ option('auparse', type : 'feature', value : 'auto')
+ option('autodetect', type : 'feature', value : 'auto')
+ option('avi', type : 'feature', value : 'auto')
+ option('cutter', type : 'feature', value : 'auto')
+ option('debugutils', type : 'feature', value : 'auto')
+ option('deinterlace', type : 'feature', value : 'auto')
+ option('dtmf', type : 'feature', value : 'auto')
+ option('effectv', type : 'feature', value : 'auto')
+ option('equalizer', type : 'feature', value : 'auto')
+ option('flv', type : 'feature', value : 'auto')
+ option('flx', type : 'feature', value : 'auto')
+ option('goom', type : 'feature', value : 'auto')
+ option('goom2k1', type : 'feature', value : 'auto')
+ option('icydemux', type : 'feature', value : 'auto')
+ option('id3demux', type : 'feature', value : 'auto')
+ option('imagefreeze', type : 'feature', value : 'auto')
+ option('interleave', type : 'feature', value : 'auto')
+ option('isomp4', type : 'feature', value : 'auto')
+ option('law', type : 'feature', value : 'auto')
+ option('level', type : 'feature', value : 'auto')
+ option('matroska', type : 'feature', value : 'auto')
+ option('monoscope', type : 'feature', value : 'auto')
+ option('multifile', type : 'feature', value : 'auto')
+ option('multipart', type : 'feature', value : 'auto')
+ option('replaygain', type : 'feature', value : 'auto')
+ option('rtp', type : 'feature', value : 'auto')
+ option('rtpmanager', type : 'feature', value : 'auto')
+ option('rtsp', type : 'feature', value : 'auto')
+ option('shapewipe', type : 'feature', value : 'auto')
+ option('smpte', type : 'feature', value : 'auto')
+ option('spectrum', type : 'feature', value : 'auto')
+ option('udp', type : 'feature', value : 'auto')
+ option('videobox', type : 'feature', value : 'auto')
+ option('videocrop', type : 'feature', value : 'auto')
+ option('videofilter', type : 'feature', value : 'auto')
+ option('videomixer', type : 'feature', value : 'auto')
+ option('wavenc', type : 'feature', value : 'auto')
+ option('wavparse', type : 'feature', value : 'auto')
+ option('y4m', type : 'feature', value : 'auto')
+
+ # Feature options for plugins with external deps
+ option('aalib', type : 'feature', value : 'auto', description : 'aalib text console video sink plugin')
+ option('bz2', type : 'feature', value : 'auto', description : 'libbz2 support in the matroska plugin')
+ option('cairo', type : 'feature', value : 'auto', description : 'Cairo overlay plugin')
+ option('directsound', type : 'feature', value : 'auto', description : 'Directsound audio source/sink plugin')
+ option('dv', type : 'feature', value : 'auto', description : 'Digital video decoder and demuxer plugin')
+ option('dv1394', type : 'feature', value : 'auto', description : 'Digital IEEE1394 interface video source plugin')
+ option('flac', type : 'feature', value : 'auto', description : 'FLAC audio codec plugin')
+ option('gdk-pixbuf', type : 'feature', value : 'auto', description : 'gdk-pixbuf image decoder, overlay, and sink plugin')
+ option('gtk3', type : 'feature', value : 'auto', description : 'GTK+ video sink plugin')
+ option('jack', type : 'feature', value : 'auto', description : 'JACK audio source/sink plugin')
+ option('jpeg', type : 'feature', value : 'auto', description : 'JPEG image codec plugin')
+ option('lame', type : 'feature', value : 'auto', description : 'LAME mp3 audio encoder plugin')
+ option('libcaca', type : 'feature', value : 'auto', description : 'libcaca text console video sink plugin')
+ option('mpg123', type : 'feature', value : 'auto', description : 'mpg123 mp3 audio decoder plugin')
+ option('oss', type : 'feature', value : 'auto', description : 'OSS audio source/sink plugin')
+ option('oss4', type : 'feature', value : 'auto', description : 'OSSv4 audio source/sink plugin')
+ option('osxaudio', type : 'feature', value : 'auto', description : 'macOS/iOS CoreAudio source/sink plugin')
+ option('osxvideo', type : 'feature', value : 'auto', description : 'macOS Cocoa video sink plugin')
+ option('png', type : 'feature', value : 'auto', description : 'PNG image codec plugin')
+ option('pulse', type : 'feature', value : 'auto', description : 'Pulseaudio audio source/sink plugin')
+ option('qt5', type : 'feature', value : 'auto', yield : true, description : 'Qt5 QML video sink plugin')
+ option('shout2', type : 'feature', value : 'auto', description : 'Shout-casting network sink plugin based on libshout2')
+ option('soup', type : 'feature', value : 'auto', description : 'libsoup HTTP client source/sink plugin')
+ option('speex', type : 'feature', value : 'auto', description : 'Speex audio codec plugin')
+ option('taglib', type : 'feature', value : 'auto', description : 'Tag-writing plugin based on taglib')
+ option('twolame', type : 'feature', value : 'auto', description : 'twolame mp2 audio encoder plugin')
+ option('vpx', type : 'feature', value : 'auto', description : 'VP8 and VP9 video codec plugin')
+ option('waveform', type : 'feature', value : 'auto', description : 'Windows waveform audio sink plugin')
+ option('wavpack', type : 'feature', value : 'auto', description : 'Wavpack audio codec plugin')
+
+ # rpicamsrc plugin options
+ option('rpicamsrc', type : 'feature', value : 'auto', description : 'Raspberry Pi camera module plugin')
+ option('rpi-header-dir', type : 'string', value : '/opt/vc/include', description : 'Directory where VideoCore/MMAL headers and bcm_host.h can be found')
+ option('rpi-lib-dir', type : 'string', value : '/opt/vc/lib', description : 'Directory where VideoCore/MMAL libraries can be found')
+
+ # ximagesrc plugin options
+ option('ximagesrc', type : 'feature', value : 'auto', description : 'X11 ximagesrc plugin')
+ option('ximagesrc-xshm', type : 'feature', value : 'auto', description : 'X11 ximagesrc plugin (XSHM support)')
+ option('ximagesrc-xfixes', type : 'feature', value : 'auto', description : 'X11 ximagesrc plugin (XFixes support)')
+ option('ximagesrc-xdamage', type : 'feature', value : 'auto', description : 'X11 ximagesrc plugin (XDamage support)')
+
+ # v4l2 plugin options
+ option('v4l2', type : 'feature', value : 'auto', description : 'Build video4linux2 source/sink plugin')
+ option('v4l2-probe', type : 'boolean', value : true, description : 'Probe v4l2 devices when the v4l2 plugin is loaded')
+ option('v4l2-libv4l2', type : 'feature', value : 'auto', description : 'Use libv4l2 for some obscure format conversions')
+ option('v4l2-gudev', type : 'feature', value : 'auto', description : 'Use libgudev for probing v4l2 devices')
+
+ # Common feature options
+ option('examples', type : 'feature', value : 'auto', yield : true)
+ option('tests', type : 'feature', value : 'auto', yield : true)
+ option('nls', type : 'feature', value : 'auto', yield: true, description : 'Enable native language support (translations)')
+ option('orc', type : 'feature', value : 'auto', yield : true)
+ option('gobject-cast-checks', type : 'feature', value : 'auto', yield : true,
+ description: 'Enable run-time GObject cast checks (auto = enabled for development, disabled for stable releases)')
+ option('glib-asserts', type : 'feature', value : 'enabled', yield : true,
+ description: 'Enable GLib assertion (auto = enabled for development, disabled for stable releases)')
+ option('glib-checks', type : 'feature', value : 'enabled', yield : true,
+ description: 'Enable GLib checks such as API guards (auto = enabled for development, disabled for stable releases)')
+ option('asm', type : 'feature', value : 'auto', yield : true)
+
+ # Common options
+ option('package-name', type : 'string', yield : true,
+ description : 'package name to use in plugins')
+ option('package-origin', type : 'string', value : 'Unknown package origin', yield : true,
+ description : 'package origin URL to use in plugins')
+ option('doc', type : 'feature', value : 'auto', yield: true,
+ description: 'Enable documentation.')
++
++# Tizen Options
++option('tbm', type : 'boolean', value : true,
++ description : 'tizen buffer manager')
--- /dev/null
-# Stéphane Aulery <lkppo@free.fr>, 2015, 2017.
+ # Translation of gst-plugins-good to French
+ # Copyright (C) 2003-2011 GStreamer core team
+ # This file is distributed under the same license as the gst-plugins-good package.
+ #
+ # Nicolas Velin <nicolas@velin.fr>, 2008.
+ # Claude Paroz <claude@2xlibre.net>, 2008-2011.
-"Project-Id-Version: gst-plugins-good 1.12.0\n"
++# Stéphane Aulery <lkppo@free.fr>, 2015, 2017, 2019.
+ #
+ msgid ""
+ msgstr ""
-"POT-Creation-Date: 2019-02-26 11:47+0000\n"
-"PO-Revision-Date: 2017-05-05 22:27+0200\n"
++"Project-Id-Version: gst-plugins-good 1.15.1\n"
+ "Report-Msgid-Bugs-To: http://bugzilla.gnome.org/\n"
-msgid ""
-"Failed to configure LAME mp3 audio encoder. Check your encoding parameters."
-msgstr ""
++"POT-Creation-Date: 2019-01-17 01:59+0000\n"
++"PO-Revision-Date: 2019-05-12 03:57+0200\n"
+ "Last-Translator: Stéphane Aulery <lkppo@free.fr>\n"
+ "Language-Team: French <traduc@traduc.org>\n"
+ "Language: fr\n"
+ "MIME-Version: 1.0\n"
+ "Content-Type: text/plain; charset=UTF-8\n"
+ "Content-Transfer-Encoding: 8bit\n"
+ "X-Bugs: Report translation errors to the Language-Team address.\n"
+
++#: ext/jack/gstjackaudiosink.c:356 ext/jack/gstjackaudiosrc.c:364
+ msgid "Jack server not found"
+ msgstr "serveur Jack introuvable"
+
++#: ext/jpeg/gstjpegdec.c:936 ext/jpeg/gstjpegdec.c:1103
++#: ext/jpeg/gstjpegdec.c:1112 ext/jpeg/gstjpegdec.c:1122
++#: ext/jpeg/gstjpegdec.c:1131 ext/jpeg/gstjpegdec.c:1393
++#: ext/jpeg/gstjpegdec.c:1421
+ msgid "Failed to decode JPEG image"
+ msgstr "Échec de décodage de l’image JPEG"
+
-msgid ""
-"The requested bitrate %d kbit/s for property '%s' is not allowed. The "
-"bitrate was changed to %d kbit/s."
-msgstr ""
++#: ext/lame/gstlamemp3enc.c:393
++msgid "Failed to configure LAME mp3 audio encoder. Check your encoding parameters."
++msgstr "Configuration de l'encodeur audio LAME mp3 échouée. Vérifiez ses paramètres."
+
++#: ext/lame/gstlamemp3enc.c:425 ext/twolame/gsttwolamemp2enc.c:488
+ #, c-format
-msgid ""
-"A network error occurred, or the server closed the connection unexpectedly."
-msgstr ""
-"Une erreur réseau s’est produite ou le serveur a fermé abruptement la "
-"connexion."
++msgid "The requested bitrate %d kbit/s for property '%s' is not allowed. The bitrate was changed to %d kbit/s."
++msgstr "Le débit demandé de %d kbit/s pour la propriété « %s » n'est pas autorisé. Le débit a été changé en %d kbits/s."
+
+ #. TRANSLATORS: 'song title' by 'artist name'
++#: ext/pulse/pulsesink.c:3127
+ #, c-format
+ msgid "'%s' by '%s'"
+ msgstr "« %s » par « %s »"
+
++#: ext/shout2/gstshout2.c:619 ext/shout2/gstshout2.c:629
+ msgid "Could not connect to server"
+ msgstr "Impossible de se connecter au serveur"
+
++#: ext/soup/gstsouphttpsrc.c:914
+ msgid "No URL set."
+ msgstr "Aucun URL défini."
+
++#: ext/soup/gstsouphttpsrc.c:1372
+ msgid "Could not resolve server name."
+ msgstr "Impossible de résoudre le nom du serveur."
+
++#: ext/soup/gstsouphttpsrc.c:1377
+ msgid "Could not establish connection to server."
+ msgstr "Impossible d’établir une connexion vers le serveur."
+
++#: ext/soup/gstsouphttpsrc.c:1381
+ msgid "Secure connection setup failed."
+ msgstr "La configuration d’une connexion sécurisée a échoué."
+
-msgstr ""
++#: ext/soup/gstsouphttpsrc.c:1387
++msgid "A network error occurred, or the server closed the connection unexpectedly."
++msgstr "Une erreur réseau s’est produite ou le serveur a fermé abruptement la connexion."
+
++#: ext/soup/gstsouphttpsrc.c:1392
+ msgid "Server sent bad data."
+ msgstr "Le serveur a envoyé de mauvaises données."
+
++#: ext/soup/gstsouphttpsrc.c:1616
+ msgid "Server does not support seeking."
+ msgstr "Le serveur ne gère pas la recherche."
+
++#: ext/twolame/gsttwolamemp2enc.c:411
+ msgid "Failed to configure TwoLAME encoder. Check your encoding parameters."
-msgstr ""
-"Il est possible que la vidéo dans ce fichier ne puisse pas être lue "
-"correctement."
++msgstr "Configuration de l'encodeur TwoLAME échoué. Vérifiez ses paramètres."
+
++#: gst/avi/gstavimux.c:1832
+ msgid "No or invalid input audio, AVI stream will be corrupt."
+ msgstr "Entrée audio absente ou non valide, le flux AVI sera corrompu."
+
++#: gst/isomp4/qtdemux.c:713 gst/isomp4/qtdemux.c:717
+ msgid "This file contains no playable streams."
+ msgstr "Ce fichier ne contient aucun flux exploitable."
+
++#: gst/isomp4/qtdemux.c:763 gst/isomp4/qtdemux.c:7003
++#: gst/isomp4/qtdemux.c:7072 gst/isomp4/qtdemux.c:7362
++#: gst/isomp4/qtdemux.c:8800
+ msgid "This file is invalid and cannot be played."
+ msgstr "Ce fichier n'est pas valide et ne peut donc pas être lu."
+
++#: gst/isomp4/qtdemux.c:3091
+ msgid "Cannot play stream because it is encrypted with PlayReady DRM."
+ msgstr "Impossible de lire le flux car il est chiffré par un DRM PlayReady."
+
++#: gst/isomp4/qtdemux.c:4312 gst/isomp4/qtdemux.c:8131
++#: gst/isomp4/qtdemux.c:8138 gst/isomp4/qtdemux.c:9283
++#: gst/isomp4/qtdemux.c:9720 gst/isomp4/qtdemux.c:9727
++#: gst/isomp4/qtdemux.c:12528
+ msgid "This file is corrupt and cannot be played."
+ msgstr "Ce fichier est corrompu et ne peut pas être lu."
+
++#: gst/isomp4/qtdemux.c:4554
+ msgid "Invalid atom size."
+ msgstr "Taille d’atome non valide."
+
++#: gst/isomp4/qtdemux.c:4633
+ msgid "This file is incomplete and cannot be played."
+ msgstr "Ce fichier n’est pas complet et ne peut donc pas être lu."
+
++#: gst/isomp4/qtdemux.c:10756
+ msgid "The video in this file might not play correctly."
-msgid ""
-"No supported stream was found. You might need to install a GStreamer RTSP "
-"extension plugin for Real media streams."
-msgstr ""
-"Aucun flux pris en charge n’a été trouvé. Il faut peut-être installer un "
-"greffon d’extension GStreamer RTSP pour les flux Real media."
++msgstr "Il est possible que la vidéo dans ce fichier ne puisse pas être lue correctement."
+
-msgid ""
-"No supported stream was found. You might need to allow more transport "
-"protocols or may otherwise be missing the right GStreamer RTSP extension "
-"plugin."
-msgstr ""
-"Aucun flux pris en charge n’a été trouvé. Il faut peut-être autoriser "
-"davantage de protocoles de transport ou il manque peut-être le bon greffon "
-"d’extension GStreamer RTSP."
++#: gst/rtsp/gstrtspsrc.c:7398
++msgid "No supported stream was found. You might need to install a GStreamer RTSP extension plugin for Real media streams."
++msgstr "Aucun flux pris en charge n’a été trouvé. Il faut peut-être installer un greffon d’extension GStreamer RTSP pour les flux Real media."
+
-msgid ""
-"Could not open audio device for playback. Device is being used by another "
-"application."
-msgstr ""
-"Impossible d’ouvrir le périphérique audio en lecture. Le périphérique est "
-"utilisé par une autre application."
++#: gst/rtsp/gstrtspsrc.c:7403
++msgid "No supported stream was found. You might need to allow more transport protocols or may otherwise be missing the right GStreamer RTSP extension plugin."
++msgstr "Aucun flux pris en charge n’a été trouvé. Il faut peut-être autoriser davantage de protocoles de transport ou il manque peut-être le bon greffon d’extension GStreamer RTSP."
+
-msgid ""
-"Could not open audio device for playback. You don't have permission to open "
-"the device."
-msgstr ""
-"Impossible d’ouvrir le périphérique audio en lecture. Vous n’avez pas les "
-"droits nécessaires pour ouvrir le périphérique."
++#: sys/oss4/oss4-sink.c:493 sys/oss4/oss4-source.c:358
++#: sys/oss/gstosssink.c:384
++msgid "Could not open audio device for playback. Device is being used by another application."
++msgstr "Impossible d’ouvrir le périphérique audio en lecture. Le périphérique est utilisé par une autre application."
+
-msgid ""
-"Could not open audio device for playback. This version of the Open Sound "
-"System is not supported by this element."
-msgstr ""
-"Impossible d’ouvrir le périphérique audio en lecture. Cette version d’OSS "
-"(Open Sound System) n’est pas prise en charge par cet élément."
++#: sys/oss4/oss4-sink.c:503 sys/oss4/oss4-source.c:368
++#: sys/oss/gstosssink.c:391
++msgid "Could not open audio device for playback. You don't have permission to open the device."
++msgstr "Impossible d’ouvrir le périphérique audio en lecture. Vous n’avez pas les droits nécessaires pour ouvrir le périphérique."
+
++#: sys/oss4/oss4-sink.c:514 sys/oss4/oss4-source.c:379
++#: sys/oss/gstosssink.c:399
+ msgid "Could not open audio device for playback."
+ msgstr "Impossible d’ouvrir le périphérique audio en lecture."
+
-msgid ""
-"Could not open audio device for recording. You don't have permission to open "
-"the device."
-msgstr ""
-"Impossible d’ouvrir le périphérique audio pour l’enregistrement. Vous n’avez "
-"pas les droits nécessaires pour ouvrir le périphérique."
++#: sys/oss4/oss4-sink.c:523 sys/oss4/oss4-source.c:389
++msgid "Could not open audio device for playback. This version of the Open Sound System is not supported by this element."
++msgstr "Impossible d’ouvrir le périphérique audio en lecture. Cette version d’OSS (Open Sound System) n’est pas prise en charge par cet élément."
+
++#: sys/oss4/oss4-sink.c:646
+ msgid "Playback is not supported by this audio device."
+ msgstr "La lecture n’est pas prise en charge par ce périphérique audio."
+
++#: sys/oss4/oss4-sink.c:653
+ msgid "Audio playback error."
+ msgstr "Erreur de lecture audio."
+
++#: sys/oss4/oss4-source.c:503
+ msgid "Recording is not supported by this audio device."
+ msgstr "L’enregistrement n’est pas pris en charge par ce périphérique audio."
+
++#: sys/oss4/oss4-source.c:510
+ msgid "Error recording from audio device."
+ msgstr "Erreur lors de l’enregistrement à partir du périphérique audio."
+
-msgstr ""
-"Impossible d’énumérer les formats vidéo compatibles avec le périphérique "
-"« %s »"
++#: sys/oss/gstosssrc.c:376
++msgid "Could not open audio device for recording. You don't have permission to open the device."
++msgstr "Impossible d’ouvrir le périphérique audio pour l’enregistrement. Vous n’avez pas les droits nécessaires pour ouvrir le périphérique."
+
++#: sys/oss/gstosssrc.c:384
+ msgid "Could not open audio device for recording."
+ msgstr "Impossible d’ouvrir le périphérique audio pour l’enregistrement."
+
++#: sys/osxaudio/gstosxaudioringbuffer.c:149
+ msgid "CoreAudio device not found"
+ msgstr "Périphèrique CoreAudio introuvable"
+
++#: sys/osxaudio/gstosxaudioringbuffer.c:155
+ msgid "CoreAudio device could not be opened"
+ msgstr "Le périphèrique CoreAudio n’a pas pu être ouvert"
+
++#: sys/v4l2/gstv4l2bufferpool.c:1712
+ #, c-format
+ msgid "Error reading %d bytes from device '%s'."
+ msgstr "Erreur de lecture de %d octets sur le périphérique « %s »."
+
++#: sys/v4l2/gstv4l2object.c:1223
+ #, c-format
+ msgid "Failed to enumerate possible video formats device '%s' can work with"
-msgstr ""
-"Le pilote du périphérique « %s » ne prend en charge la méthode d’entrée-"
-"sortie %d"
++msgstr "Impossible d’énumérer les formats vidéo compatibles avec le périphérique « %s »"
+
++#: sys/v4l2/gstv4l2object.c:2956
+ #, c-format
+ msgid "Could not map buffers from device '%s'"
+ msgstr "Impossible de mapper les tampons du périphérique « %s »."
+
++#: sys/v4l2/gstv4l2object.c:2964
+ #, c-format
+ msgid "The driver of device '%s' does not support the IO method %d"
-msgstr ""
-"Le pilote du périphérique « %s » ne prend en charge aucune méthode d’entrée-"
-"sortie connue."
++msgstr "Le pilote du périphérique « %s » ne prend en charge la méthode d’entrée-sortie %d"
+
++#: sys/v4l2/gstv4l2object.c:2971
+ #, c-format
+ msgid "The driver of device '%s' does not support any known IO method."
-#, fuzzy, c-format
++msgstr "Le pilote du périphérique « %s » ne prend en charge aucune méthode d’entrée-sortie connue."
+
-msgstr ""
-"L’encodeur du périphérique « %s » ne prend en charge aucun format d’entrée"
++#: sys/v4l2/gstv4l2object.c:3741 sys/v4l2/gstv4l2object.c:3765
++#, c-format
+ msgid "Device '%s' has no supported format"
-msgstr ""
++msgstr "Le périphérique « %s » n'a pas format pris en charge"
+
++#: sys/v4l2/gstv4l2object.c:3747 sys/v4l2/gstv4l2object.c:3771
+ #, c-format
+ msgid "Device '%s' failed during initialization"
-msgstr ""
-"Le périphérique « %s » ne peut pas capturer avec une résolution de %d x %d"
++msgstr "L'initialisation du périphérique « %s » a échouée"
+
++#: sys/v4l2/gstv4l2object.c:3759
+ #, c-format
+ msgid "Device '%s' is busy"
+ msgstr "Périphérique « %s » occupé"
+
++#: sys/v4l2/gstv4l2object.c:3782
+ #, c-format
+ msgid "Device '%s' cannot capture at %dx%d"
-#, fuzzy, c-format
++msgstr "Le périphérique « %s » ne peut pas capturer avec une résolution de %d x %d"
+
++#: sys/v4l2/gstv4l2object.c:3791
+ #, c-format
+ msgid "Device '%s' cannot capture in the specified format"
+ msgstr "Le périphérique « %s » ne peut pas capturer dans le format spécifié"
+
++#: sys/v4l2/gstv4l2object.c:3802
+ #, c-format
+ msgid "Device '%s' does support non-contiguous planes"
+ msgstr "Le périphérique « %s » ne prend pas en charge les plans non contiguës"
+
-msgstr "Le serveur ne gère pas la recherche."
++#: sys/v4l2/gstv4l2object.c:3817
++#, c-format
+ msgid "Device '%s' does not support %s interlacing"
-#, fuzzy, c-format
++msgstr "Le périphérique « %s » ne prend pas en charge l'entrelacement %s"
+
-msgstr "Le périphérique « %s » ne prend pas en charge les plans non contiguës"
++#: sys/v4l2/gstv4l2object.c:3831
++#, c-format
+ msgid "Device '%s' does not support %s colorimetry"
-msgstr ""
-"Le périphérique vidéo n’a pas accepté le nouveau paramètre de fréquence "
-"d’image."
++msgstr "Le périphérique « %s » ne prend pas en charge la colorimétrie %s"
+
++#: sys/v4l2/gstv4l2object.c:3843
+ #, c-format
+ msgid "Could not get parameters on device '%s'"
+ msgstr "Impossible d’obtenir les paramètres du périphérique « %s »"
+
++#: sys/v4l2/gstv4l2object.c:3851
+ msgid "Video device did not accept new frame rate setting."
-msgstr ""
-"Le périphérique vidéo utilise une méthode d’entrelacement non prise en "
-"charge."
++msgstr "Le périphérique vidéo n’a pas accepté le nouveau paramètre de fréquence d’image."
+
++#: sys/v4l2/gstv4l2object.c:3977
+ msgid "Video device did not provide output format."
+ msgstr "Le périphérique vidéo n’a pas fourni de format de sortie."
+
++#: sys/v4l2/gstv4l2object.c:3983
+ msgid "Video device returned invalid dimensions."
+ msgstr "Le périphérique vidéo a retourné une dimensions incorrecte."
+
++#: sys/v4l2/gstv4l2object.c:3991
+ msgid "Video device uses an unsupported interlacing method."
-msgstr ""
-"Impossible d’obtenir les paramètres du syntoniseur %d du périphérique « %s »."
++msgstr "Le périphérique vidéo utilise une méthode d’entrelacement non prise en charge."
+
++#: sys/v4l2/gstv4l2object.c:3998
+ msgid "Video device uses an unsupported pixel format."
+ msgstr "Le périphérique vidéo utilise un format de pixel non pris en charge."
+
++#: sys/v4l2/gstv4l2object.c:4518
+ msgid "Failed to configure internal buffer pool."
+ msgstr "Impossible de configurer le spooler interne de tampons."
+
++#: sys/v4l2/gstv4l2object.c:4524
+ msgid "Video device did not suggest any buffer size."
+ msgstr "Le périphérique vidéo n’a suggéré aucune taille de cache."
+
++#: sys/v4l2/gstv4l2object.c:4539
+ msgid "No downstream pool to import from."
+ msgstr "No downstream pool to import from."
+
++#: sys/v4l2/gstv4l2radio.c:143
+ #, c-format
+ msgid "Failed to get settings of tuner %d on device '%s'."
-msgstr ""
-"Le convertisseur du périphérique « %s » ne prend en charge aucun format "
-"d’entrée"
++msgstr "Impossible d’obtenir les paramètres du syntoniseur %d du périphérique « %s »."
+
++#: sys/v4l2/gstv4l2radio.c:150
+ #, c-format
+ msgid "Error getting capabilities for device '%s'."
+ msgstr "Erreur d’interrogation des capacités du périphérique « %s »."
+
++#: sys/v4l2/gstv4l2radio.c:157
+ #, c-format
+ msgid "Device '%s' is not a tuner."
+ msgstr "Le périphérique « %s » n'est pas un syntoniseur."
+
++#: sys/v4l2/gstv4l2radio.c:184
+ #, c-format
+ msgid "Failed to get radio input on device '%s'. "
+ msgstr "Impossible d’obtenir un flux radio depuis le périphérique %s."
+
++#: sys/v4l2/gstv4l2radio.c:207 sys/v4l2/v4l2_calls.c:1072
+ #, c-format
+ msgid "Failed to set input %d on device %s."
+ msgstr "Impossible de définir l’entrée %d sur le périphérique %s."
+
++#: sys/v4l2/gstv4l2radio.c:241
+ #, c-format
+ msgid "Failed to change mute state for device '%s'."
+ msgstr "Impossible de modifier la sourdine du périphérique « %s »."
+
++#: sys/v4l2/gstv4l2sink.c:628
+ msgid "Failed to allocated required memory."
+ msgstr "Impossible d’allouer la mémoire nécessaire."
+
++#: sys/v4l2/gstv4l2src.c:652 sys/v4l2/gstv4l2videodec.c:756
++#: sys/v4l2/gstv4l2videoenc.c:782
+ msgid "Failed to allocate required memory."
+ msgstr "Impossible d’allouer la mémoire nécessaire."
+
++#: sys/v4l2/gstv4l2transform.c:142
+ #, c-format
+ msgid "Converter on device %s has no supported input format"
-msgstr ""
-"Le convertisseur du périphérique « %s » ne prend en charge aucun format de "
-"sortie"
++msgstr "Le convertisseur du périphérique « %s » ne prend en charge aucun format d’entrée"
+
++#: sys/v4l2/gstv4l2transform.c:149
+ #, c-format
+ msgid "Converter on device %s has no supported output format"
-#, fuzzy, c-format
++msgstr "Le convertisseur du périphérique « %s » ne prend en charge aucun format de sortie"
+
-msgstr ""
-"L’encodeur du périphérique « %s » ne prend en charge aucun format d’entrée"
++#: sys/v4l2/gstv4l2videodec.c:136
++#, c-format
+ msgid "Decoder on device %s has no supported input format"
-#, fuzzy, c-format
++msgstr "Le décodeur du périphérique « %s » ne prend en charge aucun format d’entrée"
+
-msgstr ""
-"L’encodeur du périphérique « %s » ne prend en charge aucun format de sortie"
++#: sys/v4l2/gstv4l2videodec.c:281
++#, c-format
+ msgid "Decoder on device %s has no supported output format"
-msgstr ""
-"L’encodeur du périphérique « %s » ne prend en charge aucun format de sortie"
++msgstr "Le décodeur du périphérique « %s » ne prend en charge aucun format de sortie"
+
++#: sys/v4l2/gstv4l2videodec.c:770
+ msgid "Failed to start decoding thread."
+ msgstr "Échec de démarrage du processus de décodage."
+
++#: sys/v4l2/gstv4l2videodec.c:777 sys/v4l2/gstv4l2videoenc.c:803
+ msgid "Failed to process frame."
+ msgstr "Échec du traitement de frame."
+
++#: sys/v4l2/gstv4l2videoenc.c:140
+ #, c-format
+ msgid "Encoder on device %s has no supported output format"
-msgstr ""
-"L’encodeur du périphérique « %s » ne prend en charge aucun format d’entrée"
++msgstr "L’encodeur du périphérique « %s » ne prend en charge aucun format de sortie"
+
++#: sys/v4l2/gstv4l2videoenc.c:147
+ #, c-format
+ msgid "Encoder on device %s has no supported input format"
-#, fuzzy
++msgstr "L’encodeur du périphérique « %s » ne prend en charge aucun format d’entrée"
+
-msgstr "Échec de démarrage du processus de décodage."
++#: sys/v4l2/gstv4l2videoenc.c:795
+ msgid "Failed to start encoding thread."
-msgid ""
-"Error getting capabilities for device '%s': It isn't a v4l2 driver. Check if "
-"it is a v4l1 driver."
-msgstr ""
-"Erreur de récupération des capacités pour le périphérique « %s » : ce n’est "
-"pas un pilote v4l2. Vérifiez si c’est un pilote v4l1."
++msgstr "Échec de démarrage du processus d'encodage."
+
++#: sys/v4l2/v4l2_calls.c:92
+ #, c-format
-msgstr ""
-"Impossible de récupérer les attributs de l’entrée %d du périphérique %s"
++msgid "Error getting capabilities for device '%s': It isn't a v4l2 driver. Check if it is a v4l1 driver."
++msgstr "Erreur de récupération des capacités pour le périphérique « %s » : ce n’est pas un pilote v4l2. Vérifiez si c’est un pilote v4l1."
+
++#: sys/v4l2/v4l2_calls.c:156
+ #, c-format
+ msgid "Failed to query attributes of input %d in device %s"
-msgstr ""
-"Impossible de récupérer les paramètres du syntoniseur %d du périphérique "
-"« %s »."
++msgstr "Impossible de récupérer les attributs de l’entrée %d du périphérique %s"
+
++#: sys/v4l2/v4l2_calls.c:187
+ #, c-format
+ msgid "Failed to get setting of tuner %d on device '%s'."
-msgstr ""
-"Impossible de récupérer les attributs de contrôle du périphérique « %s »."
++msgstr "Impossible de récupérer les paramètres du syntoniseur %d du périphérique « %s »."
+
++#: sys/v4l2/v4l2_calls.c:235
+ #, c-format
+ msgid "Failed to query norm on device '%s'."
+ msgstr "Impossible de récupérer la norme du périphérique « %s »."
+
++#: sys/v4l2/v4l2_calls.c:416
+ #, c-format
+ msgid "Failed getting controls attributes on device '%s'."
-msgstr ""
-"Impossible de récupérer la fréquence actuelle de syntonisation du "
-"périphérique « %s »."
++msgstr "Impossible de récupérer les attributs de contrôle du périphérique « %s »."
+
++#: sys/v4l2/v4l2_calls.c:608
+ #, c-format
+ msgid "Cannot identify device '%s'."
+ msgstr "Impossible d’identifier le périphérique « %s »."
+
++#: sys/v4l2/v4l2_calls.c:615
+ #, c-format
+ msgid "This isn't a device '%s'."
+ msgstr "Ceci n’est pas un périphérique « %s »."
+
++#: sys/v4l2/v4l2_calls.c:622
+ #, c-format
+ msgid "Could not open device '%s' for reading and writing."
+ msgstr "Impossible d’ouvrir le périphérique « %s » en lecture et écriture."
+
++#: sys/v4l2/v4l2_calls.c:629
+ #, c-format
+ msgid "Device '%s' is not a capture device."
+ msgstr "Le périphérique « %s » n’est pas un périphérique d’enregistrement."
+
++#: sys/v4l2/v4l2_calls.c:637
+ #, c-format
+ msgid "Device '%s' is not a output device."
+ msgstr "Le périphérique « %s » n’est pas un périphérique de sortie."
+
++#: sys/v4l2/v4l2_calls.c:645
+ #, c-format
+ msgid "Device '%s' is not a M2M device."
+ msgstr "Le périphérique « %s » n’est pas un périphérique M2M."
+
++#: sys/v4l2/v4l2_calls.c:698
+ #, c-format
+ msgid "Could not dup device '%s' for reading and writing."
+ msgstr "Impossible de dupliquer le périphérique « %s » en lecture et écriture."
+
++#: sys/v4l2/v4l2_calls.c:782
+ #, c-format
+ msgid "Failed to set norm for device '%s'."
+ msgstr "La définition de la norme du périphérique « %s » a échoué."
+
++#: sys/v4l2/v4l2_calls.c:820
+ #, c-format
+ msgid "Failed to get current tuner frequency for device '%s'."
-msgstr ""
-"Impossible de définir la fréquence de syntonisation du périphérique « %s » à "
-"%lu Hz."
++msgstr "Impossible de récupérer la fréquence actuelle de syntonisation du périphérique « %s »."
+
++#: sys/v4l2/v4l2_calls.c:862
+ #, c-format
+ msgid "Failed to set current tuner frequency for device '%s' to %lu Hz."
-msgstr ""
-"Impossible de récupérer la valeur du contrôle %d du périphérique « %s »."
++msgstr "Impossible de définir la fréquence de syntonisation du périphérique « %s » à %lu Hz."
+
++#: sys/v4l2/v4l2_calls.c:896
+ #, c-format
+ msgid "Failed to get signal strength for device '%s'."
+ msgstr "Impossible d’obtenir la force du signal du périphérique « %s »."
+
++#: sys/v4l2/v4l2_calls.c:932
+ #, c-format
+ msgid "Failed to get value for control %d on device '%s'."
-msgstr ""
-"Impossible de récupérer la valeur %d du contrôle %d du périphérique « %s »."
++msgstr "Impossible de récupérer la valeur du contrôle %d du périphérique « %s »."
+
++#: sys/v4l2/v4l2_calls.c:967
+ #, c-format
+ msgid "Failed to set value %d for control %d on device '%s'."
-msgstr ""
-"Impossible de récupérer l’entrée actuelle du périphérique « %s ». C’est peut-"
-"être un périphérique radio"
++msgstr "Impossible de récupérer la valeur %d du contrôle %d du périphérique « %s »."
+
++#: sys/v4l2/v4l2_calls.c:1047
+ #, c-format
+ msgid "Failed to get current input on device '%s'. May be it is a radio device"
-msgid ""
-"Failed to get current output on device '%s'. May be it is a radio device"
-msgstr ""
-"Impossible de récupérer la sortie actuelle du périphérique « %s ». C’est "
-"peut-être un périphérique radio"
++msgstr "Impossible de récupérer l’entrée actuelle du périphérique « %s ». C’est peut-être un périphérique radio"
+
++#: sys/v4l2/v4l2_calls.c:1104
+ #, c-format
-msgstr ""
-"La modification de la résolution au cours de l’exécution n’est pas encore "
-"prise en charge."
++msgid "Failed to get current output on device '%s'. May be it is a radio device"
++msgstr "Impossible de récupérer la sortie actuelle du périphérique « %s ». C’est peut-être un périphérique radio"
+
++#: sys/v4l2/v4l2_calls.c:1129
+ #, c-format
+ msgid "Failed to set output %d on device %s."
+ msgstr "Impossible de définir la sortie %d du périphérique %s."
+
++#: sys/ximage/gstximagesrc.c:838
+ msgid "Changing resolution at runtime is not yet supported."
++msgstr "La modification de la résolution au cours de l’exécution n’est pas encore prise en charge."
+
++#: sys/ximage/gstximagesrc.c:852
+ msgid "Cannot operate without a clock"
+ msgstr "Impossible de fonctionner sans horloge"
+
+ #~ msgid "This file contains too many streams. Only playing first %d"
+ #~ msgstr "Ce fichier contient trop de flux. Seuls les %d premiers seront lus"
+
+ #~ msgid "Record Source"
+ #~ msgstr "Source d’enregistrement"
+
+ #~ msgid "Microphone"
+ #~ msgstr "Micro"
+
+ #~ msgid "Line In"
+ #~ msgstr "Entrée ligne"
+
+ #~ msgid "Internal CD"
+ #~ msgstr "CD interne"
+
+ #~ msgid "SPDIF In"
+ #~ msgstr "Entrée SPDIF"
+
+ #~ msgid "AUX 1 In"
+ #~ msgstr "Entrée AUX 1"
+
+ #~ msgid "AUX 2 In"
+ #~ msgstr "Entrée AUX 2"
+
+ #~ msgid "Codec Loopback"
+ #~ msgstr "Codec boucle interne"
+
+ #~ msgid "SunVTS Loopback"
+ #~ msgstr "SunVTS boucle interne"
+
+ #~ msgid "Volume"
+ #~ msgstr "Volume"
+
+ #~ msgid "Gain"
+ #~ msgstr "Gain"
+
+ #~ msgid "Monitor"
+ #~ msgstr "Moniteur"
+
+ #~ msgid "Built-in Speaker"
+ #~ msgstr "Haut-parleur interne"
+
+ #~ msgid "Headphone"
+ #~ msgstr "Écouteurs"
+
+ #~ msgid "Line Out"
+ #~ msgstr "Sortie ligne"
+
+ #~ msgid "SPDIF Out"
+ #~ msgstr "Sortie SPDIF"
+
+ #~ msgid "AUX 1 Out"
+ #~ msgstr "Sortie AUX 1"
+
+ #~ msgid "AUX 2 Out"
+ #~ msgstr "Sortie AUX 2"
+
+ #~ msgid "Internal data stream error."
+ #~ msgstr "Erreur du flux de données interne."
+
+ #~ msgid "Internal data flow error."
+ #~ msgstr "Erreur du flux de données interne."
--- /dev/null
-msgstr "Videouređaj koristi nepodržani format piksela."
+ # Translation of gst-plugins-good messages to Croatian.
+ # This file is put in the public domain.
+ # Copyright (C) 2004-2010, 2019 GStreamer core team.
+ # This file is distributed under the same license as the gst-plugins-good package.
+ #
+ # Tomislav Krznar <tomislav.krznar@gmail.com>, 2012.
+ # Božidar Putanec <bozidarp@yahoo.com>, 2016, 2017, 2018, 2019.
+ msgid ""
+ msgstr ""
+ "Project-Id-Version: gst-plugins-good-1.15.1\n"
+ "Report-Msgid-Bugs-To: http://bugzilla.gnome.org/\n"
+ "POT-Creation-Date: 2019-02-26 11:47+0000\n"
+ "PO-Revision-Date: 2019-02-03 13:58-0800\n"
+ "Last-Translator: Božidar Putanec <bozidarp@yahoo.com>\n"
+ "Language-Team: Croatian <lokalizacija@linux.hr>\n"
+ "Language: hr\n"
+ "MIME-Version: 1.0\n"
+ "Content-Type: text/plain; charset=UTF-8\n"
+ "Content-Transfer-Encoding: 8bit\n"
+ "X-Bugs: Report translation errors to the Language-Team address.\n"
+ "Plural-Forms: nplurals=3; plural=(n%10==1 && n%100!=11 ? 0 : n%10>=2 && n"
+ "%10<=4 && (n%100<10 || n%100>=20) ? 1 : 2);\n"
+ "X-Generator: Poedit 2.2.1\n"
+
+ msgid "Jack server not found"
+ msgstr "Server Jack nije pronađen"
+
+ msgid "Failed to decode JPEG image"
+ msgstr "Nije uspjelo dekodirati JPEG sliku"
+
+ msgid ""
+ "Failed to configure LAME mp3 audio encoder. Check your encoding parameters."
+ msgstr ""
+ "Nije uspjelo konfigurirati audio koder MP3 LAME. Provjerite parametre "
+ "kodiranja."
+
+ #, c-format
+ msgid ""
+ "The requested bitrate %d kbit/s for property '%s' is not allowed. The "
+ "bitrate was changed to %d kbit/s."
+ msgstr ""
+ "Zatražena brzina %d kbit/s za svojstvo „%s“ nije dopuštena -- postavljena je "
+ "na brzinu od %d kbit/s."
+
+ #. TRANSLATORS: 'song title' by 'artist name'
+ #, c-format
+ msgid "'%s' by '%s'"
+ msgstr "„%s“ od „%s“"
+
+ msgid "Could not connect to server"
+ msgstr "Spajanje na server nije moguće"
+
+ msgid "No URL set."
+ msgstr "Nema URL-adrese (nije postavljena)."
+
+ msgid "Could not resolve server name."
+ msgstr "Nije moguće razriješiti ime servera."
+
+ msgid "Could not establish connection to server."
+ msgstr "Nije moguće uspostaviti vezu sa serverom."
+
+ msgid "Secure connection setup failed."
+ msgstr "Nije uspjelo uspostaviti sigurnu vezu."
+
+ msgid ""
+ "A network error occurred, or the server closed the connection unexpectedly."
+ msgstr "Dogodila se greška na mreži ili je server neočekivano prekinuo vezu."
+
+ msgid "Server sent bad data."
+ msgstr "Server je poslao loše podatke."
+
+ msgid "Server does not support seeking."
+ msgstr "Server ne podržava skok traženje na poziciju."
+
+ msgid "Failed to configure TwoLAME encoder. Check your encoding parameters."
+ msgstr ""
+ "Nije uspjelo konfigurirati koder TwoLAME. Provjerite parametre kodiranja."
+
+ msgid "No or invalid input audio, AVI stream will be corrupt."
+ msgstr "Nema audio ulaza ili nije valjan -- AVI protok bit će oštećen."
+
+ msgid "This file contains no playable streams."
+ msgstr "Ova datoteka ne sadrži upotrebljive protoke."
+
+ msgid "This file is invalid and cannot be played."
+ msgstr "Datoteka nije valjana i ne može se reproducirati."
+
+ msgid "Cannot play stream because it is encrypted with PlayReady DRM."
+ msgstr "Ovaj protok nije moguće reproducirati jer je šifriran s PlayReady DRM."
+
+ msgid "This file is corrupt and cannot be played."
+ msgstr "Datoteka je oštećena i ne može se reproducirati."
+
+ msgid "Invalid atom size."
+ msgstr "Veličina atoma nije valjana."
+
+ msgid "This file is incomplete and cannot be played."
+ msgstr "Datoteka je nepotpuna i ne može se reproducirati."
+
+ msgid "The video in this file might not play correctly."
+ msgstr "Video iz ove datoteke se možda neće korektno reproducirati."
+
+ # https://gstreamer.freedesktop.org/documentation/rtp.html
+ msgid ""
+ "No supported stream was found. You might need to install a GStreamer RTSP "
+ "extension plugin for Real media streams."
+ msgstr ""
+ "Nije nađen nijedan podržani protok. Vjerojatno trebate instalirati plugin "
+ "GStreamera za proširenje RTSP na Real multimedijske protoke."
+
+ msgid ""
+ "No supported stream was found. You might need to allow more transport "
+ "protocols or may otherwise be missing the right GStreamer RTSP extension "
+ "plugin."
+ msgstr ""
+ "Nije nađen nijedan podržani protok. Vjerojatno trebate omogućiti još neke "
+ "prijenosne protokole ili vam možda nedostaje odgovarajući plugin GStreamera "
+ "za proširenje RTSP-a."
+
+ msgid ""
+ "Could not open audio device for playback. Device is being used by another "
+ "application."
+ msgstr ""
+ "Audiouređaj nije moguće otvoriti za reprodukciju jer ga koristi neka druga "
+ "aplikacija."
+
+ msgid ""
+ "Could not open audio device for playback. You don't have permission to open "
+ "the device."
+ msgstr ""
+ "Audiouređaj nije moguće otvoriti za reprodukciju jer nemate dopuštenje za "
+ "otvaranje uređaja."
+
+ msgid "Could not open audio device for playback."
+ msgstr "Audiouređaj nije moguće otvoriti za reprodukciju."
+
+ msgid ""
+ "Could not open audio device for playback. This version of the Open Sound "
+ "System is not supported by this element."
+ msgstr ""
+ "Audiouređaj nije moguće otvoriti za reprodukciju jer ovaj element ne "
+ "podržava ovu inačicu Open Sound System."
+
+ msgid "Playback is not supported by this audio device."
+ msgstr "Ovaj audiouređaj ne podržava reprodukciju."
+
+ msgid "Audio playback error."
+ msgstr "Greška u audio reprodukciji."
+
+ msgid "Recording is not supported by this audio device."
+ msgstr "Ovaj audiouređaj ne podržava snimanje."
+
+ msgid "Error recording from audio device."
+ msgstr "Greška pri snimanju s audiouređaja."
+
+ msgid ""
+ "Could not open audio device for recording. You don't have permission to open "
+ "the device."
+ msgstr ""
+ "Audiouređaj nije moguće otvoriti za snimanje jer nemate dopuštenje za "
+ "otvaranje uređaja."
+
+ msgid "Could not open audio device for recording."
+ msgstr "Audiouređaj nije moguće otvoriti za snimanje."
+
+ msgid "CoreAudio device not found"
+ msgstr "CoreAudio uređaj nije pronađen"
+
+ msgid "CoreAudio device could not be opened"
+ msgstr "CoreAudio uređaj nije moguće otvoriti"
+
+ #, c-format
+ msgid "Error reading %d bytes from device '%s'."
+ msgstr "Greška čitanja %d bajtova iz uređaja „%s“."
+
+ #, c-format
+ msgid "Failed to enumerate possible video formats device '%s' can work with"
+ msgstr ""
+ "Nije uspjelo prikazati sve video formate s kojima može raditi uređaj „%s“"
+
+ #, c-format
+ msgid "Could not map buffers from device '%s'"
+ msgstr "Nije moguće mapirati međuspremnike uređaja „%s“"
+
+ #, c-format
+ msgid "The driver of device '%s' does not support the IO method %d"
+ msgstr "Upravljački program uređaja „%s“ ne podržava metodu U/I %d"
+
+ #, c-format
+ msgid "The driver of device '%s' does not support any known IO method."
+ msgstr ""
+ "Upravljački program uređaja „%s“ ne podržava nijednu poznatu metodu U/I."
+
+ #, c-format
+ msgid "Device '%s' has no supported format"
+ msgstr "Uređaj „%s“ nema podržani format"
+
+ #, c-format
+ msgid "Device '%s' failed during initialization"
+ msgstr "Nije uspjela inicijalizacija uređaja „%s“"
+
+ #, c-format
+ msgid "Device '%s' is busy"
+ msgstr "Uređaj „%s“ je zauzet"
+
+ #, c-format
+ msgid "Device '%s' cannot capture at %dx%d"
+ msgstr "Uređaj „%s“ ne može snimati u rezoluciji %dx%d"
+
+ #, c-format
+ msgid "Device '%s' cannot capture in the specified format"
+ msgstr "Uređaj „%s“ ne može snimati u specificiranom formatu"
+
+ #, c-format
+ msgid "Device '%s' does support non-contiguous planes"
+ msgstr "Device „%s“ podržava nepovezane plohe/ravnine (non-contiguous planes)"
+
+ #, c-format
+ msgid "Device '%s' does not support %s interlacing"
+ msgstr "Device „%s“ ne podržava preplitanje (interlacing) %s"
+
+ #, c-format
+ msgid "Device '%s' does not support %s colorimetry"
+ msgstr "Uređaj „%s“ ne podržava kolorimetriju %s"
+
+ #, c-format
+ msgid "Could not get parameters on device '%s'"
+ msgstr "Nije moguće dobiti parametre uređaja „%s“"
+
+ msgid "Video device did not accept new frame rate setting."
+ msgstr "Videouređaj nije prihvatio novu postavku frekvencije okvira (slika)."
+
+ msgid "Video device did not provide output format."
+ msgstr "Videouređaj nije dao/odredio izlazni format."
+
+ msgid "Video device returned invalid dimensions."
+ msgstr "Videouređaj nije vratio valjane dimenzije."
+
+ msgid "Video device uses an unsupported interlacing method."
+ msgstr "Videouređaj koristi nepodržanu metodu preplitanja (interlacing)."
+
+ msgid "Video device uses an unsupported pixel format."
++msgstr "Videouređaj koristi format piksela koji nije podržan."
+
+ msgid "Failed to configure internal buffer pool."
+ msgstr "Nije uspjelo konfigurirati interne međuspremnike (buffer pool)."
+
+ msgid "Video device did not suggest any buffer size."
+ msgstr "Videouređaj nije naveo/zatražio bilo kakvu veličinu međuspremnika."
+
+ msgid "No downstream pool to import from."
+ msgstr "Ne postoji mjesto (downstream pool) iz kojeg se može uvoziti."
+
+ # tuner > štelanje frekvencije, mijenjanje (biranje) kanala
+ #, c-format
+ msgid "Failed to get settings of tuner %d on device '%s'."
+ msgstr "Nije uspjelo dobiti postavke tunera %d na uređaju „%s“."
+
+ #, c-format
+ msgid "Error getting capabilities for device '%s'."
+ msgstr "Greška pri dobivanju sposobnosti uređaja „%s“."
+
+ #, c-format
+ msgid "Device '%s' is not a tuner."
+ msgstr "Uređaj „%s“ nije tuner."
+
+ #, c-format
+ msgid "Failed to get radio input on device '%s'. "
+ msgstr "Nije uspjelo dobiti radiosignal na uređaju „%s“."
+
+ #, c-format
+ msgid "Failed to set input %d on device %s."
+ msgstr "Nije uspjelo postaviti ulaz %d na uređaju %s."
+
+ #, c-format
+ msgid "Failed to change mute state for device '%s'."
+ msgstr "Nije uspjelo promijeniti stanje „mute“ na uređaju „%s“."
+
+ msgid "Failed to allocated required memory."
+ msgstr "Nije uspjelo dodijeliti potrebnu memoriju."
+
+ msgid "Failed to allocate required memory."
+ msgstr "Nije uspjelo dodijeliti potrebnu memoriju."
+
+ #, c-format
+ msgid "Converter on device %s has no supported input format"
+ msgstr "Pretvarač na uređaju %s nema podržani ulazni format"
+
+ #, c-format
+ msgid "Converter on device %s has no supported output format"
+ msgstr "Pretvarač na uređaju %s nema podržani izlazni format"
+
+ #, c-format
+ msgid "Decoder on device %s has no supported input format"
+ msgstr "Dekoder na uređaju %s nema podržani ulazni format"
+
+ #, c-format
+ msgid "Decoder on device %s has no supported output format"
+ msgstr "Dekoder na uređaju %s nema podržani izlazni format"
+
+ msgid "Failed to start decoding thread."
+ msgstr "Nije uspjelo započeti dekodiranje dretve."
+
+ msgid "Failed to process frame."
+ msgstr "Nije uspjelo obraditi okvir (sliku)."
+
+ #, c-format
+ msgid "Encoder on device %s has no supported output format"
+ msgstr "Koder na uređaju %s nema podržani izlazni format"
+
+ #, c-format
+ msgid "Encoder on device %s has no supported input format"
+ msgstr "Koder na uređaju %s nema podržani ulazni format"
+
+ msgid "Failed to start encoding thread."
+ msgstr "Nije uspjelo započeti kodiranje dretve."
+
+ #, c-format
+ msgid ""
+ "Error getting capabilities for device '%s': It isn't a v4l2 driver. Check if "
+ "it is a v4l1 driver."
+ msgstr ""
+ "Greška pri dobivanju Caps (sposobnosti) za uređaj „%s“: To nije v4l2 "
+ "upravljački program. Provjerite je li to v4l1 upravljački program."
+
+ #, c-format
+ msgid "Failed to query attributes of input %d in device %s"
+ msgstr "Nije uspjelo ispitati atribute ulaza %d uređaja %s"
+
+ #, c-format
+ msgid "Failed to get setting of tuner %d on device '%s'."
+ msgstr "Nije uspjelo dobiti postavke tunera %d uređaja „%s“."
+
+ #, c-format
+ msgid "Failed to query norm on device '%s'."
+ msgstr "Nije uspjelo ispitati „norm“ na uređaju „%s“."
+
+ #, c-format
+ msgid "Failed getting controls attributes on device '%s'."
+ msgstr "Nije uspjelo dobiti atribute kontrola uređaja „%s“."
+
+ #, c-format
+ msgid "Cannot identify device '%s'."
+ msgstr "Nije moguće identificirati uređaj „%s“."
+
+ #, c-format
+ msgid "This isn't a device '%s'."
+ msgstr "To nije uređaj „%s“."
+
+ #, c-format
+ msgid "Could not open device '%s' for reading and writing."
+ msgstr "Nije bilo moguće otvoriti uređaj „%s“ za čitanje i pisanje."
+
+ #, c-format
+ msgid "Device '%s' is not a capture device."
+ msgstr "Uređaj „%s“ nije uređaj za snimanje."
+
+ #, c-format
+ msgid "Device '%s' is not a output device."
+ msgstr "Uređaj „%s“ nije izlazni uređaj."
+
+ #, c-format
+ msgid "Device '%s' is not a M2M device."
+ msgstr "Uređaj „%s“ nije uređaj M2M."
+
+ #, c-format
+ msgid "Could not dup device '%s' for reading and writing."
+ msgstr "Nije uspjelo dup() uređaj „%s“ za čitanje i pisanje."
+
+ #, c-format
+ msgid "Failed to set norm for device '%s'."
+ msgstr "Nije uspjelo postaviti „norm“ za uređaj „%s“."
+
+ #, c-format
+ msgid "Failed to get current tuner frequency for device '%s'."
+ msgstr "Nije uspjelo dobiti aktualnu frekvenciju tunera za uređaj „%s“."
+
+ #, c-format
+ msgid "Failed to set current tuner frequency for device '%s' to %lu Hz."
+ msgstr ""
+ "Nije uspjelo postaviti aktualnu frekvenciju tunera za uređaj „%s“ na %lu Hz."
+
+ #, c-format
+ msgid "Failed to get signal strength for device '%s'."
+ msgstr "Nije uspjelo dobiti snagu signala za uređaj „%s“."
+
+ #, c-format
+ msgid "Failed to get value for control %d on device '%s'."
+ msgstr "Nije uspjelo dobiti vrijednost za kontrolu %d na uređaju „%s“."
+
+ #, c-format
+ msgid "Failed to set value %d for control %d on device '%s'."
+ msgstr "Nije uspjelo postaviti na vrijednost %d kontrolu %d na uređaju „%s“."
+
+ #, c-format
+ msgid "Failed to get current input on device '%s'. May be it is a radio device"
+ msgstr ""
+ "Nije uspjelo dobiti aktualni ulaz na uređaju „%s“ -- možda je to radiouređaj."
+
+ #, c-format
+ msgid ""
+ "Failed to get current output on device '%s'. May be it is a radio device"
+ msgstr ""
+ "Nije uspjelo dobiti aktualni izlaz na uređaju „%s“ -- možda je to "
+ "radiouređaj."
+
+ #, c-format
+ msgid "Failed to set output %d on device %s."
+ msgstr "Nije uspjelo postaviti izlaz %d na uređaju %s."
+
+ msgid "Changing resolution at runtime is not yet supported."
+ msgstr "Promjena rezolucije u tijeku rada (runtime) još nije podržana."
+
+ msgid "Cannot operate without a clock"
+ msgstr "Nije moguće raditi bez sata (clock)"
+
+ #~ msgid "This file contains too many streams. Only playing first %d"
+ #~ msgstr ""
+ #~ "U ovoj datoteci ima previše struja. Samo prvih %d će se reproducirati"
+
+ #~ msgid "Record Source"
+ #~ msgstr "Izvor snimanja"
+
+ #~ msgid "Microphone"
+ #~ msgstr "Mikrofon"
+
+ #~ msgid "Line In"
+ #~ msgstr "Linijski ulaz"
+
+ #~ msgid "Internal CD"
+ #~ msgstr "Interni CD"
+
+ #~ msgid "SPDIF In"
+ #~ msgstr "SPDIF ulaz"
+
+ #~ msgid "AUX 1 In"
+ #~ msgstr "AUX 1 ulaz"
+
+ #~ msgid "AUX 2 In"
+ #~ msgstr "AUX 2 ulaz"
+
+ #~ msgid "Codec Loopback"
+ #~ msgstr "Kodekova povratna petlja"
+
+ #~ msgid "SunVTS Loopback"
+ #~ msgstr "SunVTS povratna petlja"
+
+ #~ msgid "Volume"
+ #~ msgstr "Glasnoća"
+
+ #~ msgid "Gain"
+ #~ msgstr "Pojačanje"
+
+ #~ msgid "Monitor"
+ #~ msgstr "Monitor"
+
+ #~ msgid "Built-in Speaker"
+ #~ msgstr "Ugrađeni zvučnik"
+
+ #~ msgid "Headphone"
+ #~ msgstr "Slušalica"
+
+ #~ msgid "Line Out"
+ #~ msgstr "Linijski izlaz"
+
+ #~ msgid "SPDIF Out"
+ #~ msgstr "SPDIF izlaz"
+
+ #~ msgid "AUX 1 Out"
+ #~ msgstr "AUX 1 izlaz"
+
+ #~ msgid "AUX 2 Out"
+ #~ msgstr "AUX 2 izlaz"
+
+ #~ msgid "Internal data stream error."
+ #~ msgstr "Interna greška toka (stream) podataka."
+
+ #~ msgid "Internal data flow error."
+ #~ msgstr "Interna greška protoka podataka."
+
+ #~ msgid "Could not establish connection to sound server"
+ #~ msgstr "Ne mogu ostvariti vezu prema poslužitelju zvuka"
+
+ #~ msgid "Failed to query sound server capabilities"
+ #~ msgstr "Nisam uspio ispitati mogućnosti poslužitelja zvuka"
+
+ #~ msgid "Bass"
+ #~ msgstr "Niski"
+
+ #~ msgid "Treble"
+ #~ msgstr "Visoki"
+
+ #~ msgid "Synth"
+ #~ msgstr "Sintetizator"
+
+ #~ msgid "PCM"
+ #~ msgstr "PCM"
+
+ #~ msgid "Speaker"
+ #~ msgstr "Zvučnik"
+
+ #~ msgid "Line-in"
+ #~ msgstr "Ulazna linija"
+
+ #~ msgid "CD"
+ #~ msgstr "CD"
+
+ #~ msgid "Mixer"
+ #~ msgstr "Mikser"
+
+ #~ msgid "PCM-2"
+ #~ msgstr "PCM-2"
+
+ #~ msgid "Record"
+ #~ msgstr "Snimanje"
+
+ #~ msgid "In-gain"
+ #~ msgstr "Ulazno pojačanje"
+
+ #~ msgid "Out-gain"
+ #~ msgstr "Izlazno pojačanje"
+
+ #~ msgid "Line-1"
+ #~ msgstr "Linija 1"
+
+ #~ msgid "Line-2"
+ #~ msgstr "Linija 2"
+
+ #~ msgid "Line-3"
+ #~ msgstr "Linija 3"
+
+ #~ msgid "Digital-1"
+ #~ msgstr "Digitalni 1"
+
+ #~ msgid "Digital-2"
+ #~ msgstr "Digitalni 2"
+
+ #~ msgid "Digital-3"
+ #~ msgstr "Digitalni 3"
+
+ #~ msgid "Phone-in"
+ #~ msgstr "Telefonski ulaz"
+
+ #~ msgid "Phone-out"
+ #~ msgstr "Telefonski izlaz"
+
+ #~ msgid "Video"
+ #~ msgstr "Video"
+
+ #~ msgid "Radio"
+ #~ msgstr "Radio"
+
+ #~ msgid "Could not open audio device for mixer control handling."
+ #~ msgstr "Ne mogu otvoriti zvučni uređaj za upravljanje mikserom."
+
+ #~ msgid ""
+ #~ "Could not open audio device for mixer control handling. This version of "
+ #~ "the Open Sound System is not supported by this element."
+ #~ msgstr ""
+ #~ "Ne mogu otvoriti zvučni uređaj za upravljanje mikserom. Ovaj element ne "
+ #~ "podržava ovu inačicu Open Sound System sustava."
+
+ #~ msgid "Master"
+ #~ msgstr "Glavni"
+
+ #~ msgid "Front"
+ #~ msgstr "Prednji"
+
+ #~ msgid "Rear"
+ #~ msgstr "Stražnji"
+
+ #~ msgid "Headphones"
+ #~ msgstr "Slušalice"
+
+ #~ msgid "Center"
+ #~ msgstr "Srednji"
+
+ #~ msgid "LFE"
+ #~ msgstr "LFE"
+
+ #~ msgid "Surround"
+ #~ msgstr "Surround"
+
+ #~ msgid "Side"
+ #~ msgstr "Bočni"
+
+ #~ msgid "AUX Out"
+ #~ msgstr "AUX izlaz"
+
+ #~ msgid "3D Depth"
+ #~ msgstr "3D dubina"
+
+ #~ msgid "3D Center"
+ #~ msgstr "3D središte"
+
+ #~ msgid "3D Enhance"
+ #~ msgstr "3D dopuna"
+
+ #~ msgid "Telephone"
+ #~ msgstr "Telefon"
+
+ #~ msgid "Video In"
+ #~ msgstr "Video ulaz"
+
+ #~ msgid "AUX In"
+ #~ msgstr "AUX ulaz"
+
+ #~ msgid "Record Gain"
+ #~ msgstr "Pojačanje snimanja"
+
+ #~ msgid "Output Gain"
+ #~ msgstr "Pojačanje izlaza"
+
+ #~ msgid "Microphone Boost"
+ #~ msgstr "Pojačanje mikrofona"
+
+ #~ msgid "Diagnostic"
+ #~ msgstr "Dijagnostika"
+
+ #~ msgid "Bass Boost"
+ #~ msgstr "Pojačanje niskih"
+
+ #~ msgid "Playback Ports"
+ #~ msgstr "Portovi reprodukcije"
+
+ #~ msgid "Input"
+ #~ msgstr "Ulaz"
+
+ #~ msgid "Monitor Source"
+ #~ msgstr "Izvor nadziranja"
+
+ #~ msgid "Keyboard Beep"
+ #~ msgstr "Tipkovnički zvuk"
+
+ #~ msgid "Simulate Stereo"
+ #~ msgstr "Simuliraj stereo"
+
+ #~ msgid "Stereo"
+ #~ msgstr "Stereo"
+
+ #~ msgid "Surround Sound"
+ #~ msgstr "Surround zvuk"
+
+ #~ msgid "Microphone Gain"
+ #~ msgstr "Pojačanje mikrofona"
+
+ #~ msgid "Speaker Source"
+ #~ msgstr "Izvor zvučnika"
+
+ #~ msgid "Microphone Source"
+ #~ msgstr "Izvor mikrofona"
+
+ #~ msgid "Jack"
+ #~ msgstr "Priključak"
+
+ #~ msgid "Center / LFE"
+ #~ msgstr "Srednji / LFE"
+
+ #~ msgid "Stereo Mix"
+ #~ msgstr "Stereo mikser"
+
+ #~ msgid "Mono Mix"
+ #~ msgstr "Mono mikser"
+
+ #~ msgid "Input Mix"
+ #~ msgstr "Ulazni mikser"
+
+ #~ msgid "Microphone 1"
+ #~ msgstr "Mikrofon 1"
+
+ #~ msgid "Microphone 2"
+ #~ msgstr "Mikrofon 2"
+
+ #~ msgid "Digital Out"
+ #~ msgstr "Digitalni izlaz"
+
+ #~ msgid "Digital In"
+ #~ msgstr "Digitalni ulaz"
+
+ #~ msgid "HDMI"
+ #~ msgstr "HDMI"
+
+ #~ msgid "Modem"
+ #~ msgstr "Modem"
+
+ #~ msgid "Handset"
+ #~ msgstr "Slušalice s mikrofonom"
+
+ #~ msgid "Other"
+ #~ msgstr "Ostalo"
+
+ #~ msgid "None"
+ #~ msgstr "Nijedan"
+
+ #~ msgid "On"
+ #~ msgstr "Uključen"
+
+ #~ msgid "Off"
+ #~ msgstr "Isključen"
+
+ #~ msgid "Mute"
+ #~ msgstr "Utišan"
+
+ #~ msgid "Fast"
+ #~ msgstr "Brzo"
+
+ #~ msgid "Very Low"
+ #~ msgstr "Vrlo niska"
+
+ #~ msgid "Low"
+ #~ msgstr "Niska"
+
+ #~ msgid "Medium"
+ #~ msgstr "Srednja"
+
+ #~ msgid "High"
+ #~ msgstr "Visoka"
+
+ #~ msgid "Very High"
+ #~ msgstr "Vrlo visoka"
+
+ #~ msgid "Production"
+ #~ msgstr "Produkcijska"
+
+ #~ msgid "Front Panel Microphone"
+ #~ msgstr "Mikrofon na prednjoj ploči"
+
+ #~ msgid "Front Panel Line In"
+ #~ msgstr "Linijski ulaz na prednjoj ploči"
+
+ #~ msgid "Front Panel Headphones"
+ #~ msgstr "Slušalice na prednjoj ploči"
+
+ #~ msgid "Front Panel Line Out"
+ #~ msgstr "Linijski izlaz na prednjoj ploči"
+
+ #~ msgid "Green Connector"
+ #~ msgstr "Zelena priključnica"
+
+ #~ msgid "Pink Connector"
+ #~ msgstr "Ružičasta priključnica"
+
+ #~ msgid "Blue Connector"
+ #~ msgstr "Plava priključnica"
+
+ #~ msgid "White Connector"
+ #~ msgstr "Bijela priključnica"
+
+ #~ msgid "Black Connector"
+ #~ msgstr "Crna priključnica"
+
+ #~ msgid "Gray Connector"
+ #~ msgstr "Siva priključnica"
+
+ #~ msgid "Orange Connector"
+ #~ msgstr "Narančasta priključnica"
+
+ #~ msgid "Red Connector"
+ #~ msgstr "Crvena priključnica"
+
+ #~ msgid "Yellow Connector"
+ #~ msgstr "Žuta priključnica"
+
+ #~ msgid "Green Front Panel Connector"
+ #~ msgstr "Zelena priključnica na prednjoj ploči"
+
+ #~ msgid "Pink Front Panel Connector"
+ #~ msgstr "Ružičasta priključnica na prednjoj ploči"
+
+ #~ msgid "Blue Front Panel Connector"
+ #~ msgstr "Plava priključnica na prednjoj ploči"
+
+ #~ msgid "White Front Panel Connector"
+ #~ msgstr "Bijela priključnica na prednjoj ploči"
+
+ #~ msgid "Black Front Panel Connector"
+ #~ msgstr "Crna priključnica na prednjoj ploči"
+
+ #~ msgid "Gray Front Panel Connector"
+ #~ msgstr "Siva priključnica na prednjoj ploči"
+
+ #~ msgid "Orange Front Panel Connector"
+ #~ msgstr "Narančasta priključnica na prednjoj ploči"
+
+ #~ msgid "Red Front Panel Connector"
+ #~ msgstr "Crvena priključnica na prednjoj ploči"
+
+ #~ msgid "Yellow Front Panel Connector"
+ #~ msgstr "Žuta priključnica na prednjoj ploči"
+
+ #~ msgid "Spread Output"
+ #~ msgstr "Rašireni izlaz"
+
+ #~ msgid "Downmix"
+ #~ msgstr "Smanjenje broja kanala"
+
+ #~ msgid "Virtual Mixer Input"
+ #~ msgstr "Ulaz virtualnog miksera"
+
+ #~ msgid "Virtual Mixer Output"
+ #~ msgstr "Izlaz virtualnog miksera"
+
+ #~ msgid "Virtual Mixer Channels"
+ #~ msgstr "Kanali virtualnog miksera"
+
+ #~ msgid "%s %d Function"
+ #~ msgstr "%s %d funkcija"
+
+ #~ msgid "%s Function"
+ #~ msgstr "%s funkcija"
+
+ #~ msgid "Got unexpected frame size of %u instead of %u."
+ #~ msgstr "Dobivena neočekivana veličina okvira %u umjesto %u."
+
+ #~ msgid "Error reading %d bytes on device '%s'."
+ #~ msgstr "Greška čitanja %d bajtova na uređaju „%s”."
+
+ #~ msgid "Could not enqueue buffers in device '%s'."
+ #~ msgstr "Nisam uspio dodati u red međuspremnike u uređaj „%s”."
+
+ #~ msgid "Failed trying to get video frames from device '%s'."
+ #~ msgstr "Nisam uspio pokušavajući dobiti video okvire iz uređaja „%s”."
+
+ #~ msgid "Failed after %d tries. device %s. system error: %s"
+ #~ msgstr "Nisam uspio nakon %d pokušaja. uređaj %s. greška sustava: %s"
--- /dev/null
-# Copyright (C) 2004, 2007, 2008, 2009, 2010, 2012, 2014, 2016, 2017 Free Software Foundation, Inc.
+ # Hungarian translation for gst-plugins-good.
-# Balázs Úr <urbalazs@gmail.com>, 2014, 2015, 2017.
++# Copyright (C) 2004, 2007, 2008, 2009, 2010, 2012, 2014, 2016, 2017, 2019 Free Software Foundation, Inc.
+ # This file is distributed under the same license as the gst-plugins-good package.
+ #
+ # Laszlo Dvornik <dvornik@invitel.hu>, 2004.
+ # Gabor Kelemen <kelemeng@gnome.hu>, 2007, 2008, 2009, 2010, 2012, 2014, 2016.
-"Project-Id-Version: gst-plugins-good 1.12.0\n"
++# Balázs Úr <ur.balazs@fsf.hu>, 2014, 2015, 2017, 2019.
+ msgid ""
+ msgstr ""
-"POT-Creation-Date: 2019-02-26 11:47+0000\n"
-"PO-Revision-Date: 2017-05-05 20:11+0200\n"
-"Last-Translator: Balázs Úr <urbalazs@gmail.com>\n"
++"Project-Id-Version: gst-plugins-good 1.15.1\n"
+ "Report-Msgid-Bugs-To: http://bugzilla.gnome.org/\n"
-"X-Generator: Lokalize 1.2\n"
++"POT-Creation-Date: 2019-01-17 01:59+0000\n"
++"PO-Revision-Date: 2019-11-23 22:07+0100\n"
++"Last-Translator: Balázs Úr <ur.balazs@fsf.hu>\n"
+ "Language-Team: Hungarian <translation-team-hu@lists.sourceforge.net>\n"
+ "Language: hu\n"
+ "MIME-Version: 1.0\n"
+ "Content-Type: text/plain; charset=UTF-8\n"
+ "Content-Transfer-Encoding: 8bit\n"
+ "X-Bugs: Report translation errors to the Language-Team address.\n"
+ "X-Rosetta-Export-Date: 2007-07-27 19:18:15+0000\n"
-msgid ""
-"Failed to configure LAME mp3 audio encoder. Check your encoding parameters."
-msgstr ""
++"X-Generator: Lokalize 19.04.3\n"
+ "X-Rosetta-Export-Date: 2007-07-27 19:18:15+0000\n"
+ "Plural-Forms: nplurals=2; plural=(n != 1);\n"
+
++#: ext/jack/gstjackaudiosink.c:356 ext/jack/gstjackaudiosrc.c:364
+ msgid "Jack server not found"
+ msgstr "Jack kiszolgáló nem található"
+
++#: ext/jpeg/gstjpegdec.c:936 ext/jpeg/gstjpegdec.c:1103
++#: ext/jpeg/gstjpegdec.c:1112 ext/jpeg/gstjpegdec.c:1122
++#: ext/jpeg/gstjpegdec.c:1131 ext/jpeg/gstjpegdec.c:1393
++#: ext/jpeg/gstjpegdec.c:1421
+ msgid "Failed to decode JPEG image"
+ msgstr "A JPEG kép visszafejtése meghiúsult"
+
-msgid ""
-"The requested bitrate %d kbit/s for property '%s' is not allowed. The "
-"bitrate was changed to %d kbit/s."
-msgstr ""
++#: ext/lame/gstlamemp3enc.c:393
++msgid "Failed to configure LAME mp3 audio encoder. Check your encoding parameters."
++msgstr "Nem sikerült beállítani a LAME mp3 hangkódolót. Ellenőrizze a kódolási paramétereket."
+
++#: ext/lame/gstlamemp3enc.c:425 ext/twolame/gsttwolamemp2enc.c:488
+ #, c-format
-msgid ""
-"A network error occurred, or the server closed the connection unexpectedly."
-msgstr ""
-"Hálózati hiba történt, vagy a kiszolgáló váratlanul lezárta a kapcsolatot."
++msgid "The requested bitrate %d kbit/s for property '%s' is not allowed. The bitrate was changed to %d kbit/s."
++msgstr "A kért %d kbit/s bitsebesség a(z) „%s” tulajdonságnál nem engedélyezett. A bitsebesség meg lett változtatva %d kbit/s értékre."
+
+ #. TRANSLATORS: 'song title' by 'artist name'
++#: ext/pulse/pulsesink.c:3127
+ #, c-format
+ msgid "'%s' by '%s'"
+ msgstr "„%s” ettől: „%s”"
+
++#: ext/shout2/gstshout2.c:619 ext/shout2/gstshout2.c:629
+ msgid "Could not connect to server"
+ msgstr "Nem sikerült csatlakozni a kiszolgálóhoz"
+
++#: ext/soup/gstsouphttpsrc.c:914
+ msgid "No URL set."
+ msgstr "Nincs beállítva URL."
+
++#: ext/soup/gstsouphttpsrc.c:1372
+ msgid "Could not resolve server name."
+ msgstr "Nem sikerült feloldani a kiszolgáló nevét."
+
++#: ext/soup/gstsouphttpsrc.c:1377
+ msgid "Could not establish connection to server."
+ msgstr "Nem hozható létre kapcsolat a kiszolgálóhoz."
+
++#: ext/soup/gstsouphttpsrc.c:1381
+ msgid "Secure connection setup failed."
+ msgstr "A biztonságos kapcsolat kialakítása meghiúsult."
+
-msgstr ""
++#: ext/soup/gstsouphttpsrc.c:1387
++msgid "A network error occurred, or the server closed the connection unexpectedly."
++msgstr "Hálózati hiba történt, vagy a kiszolgáló váratlanul lezárta a kapcsolatot."
+
++#: ext/soup/gstsouphttpsrc.c:1392
+ msgid "Server sent bad data."
+ msgstr "A kiszolgáló hibás adatokat küldött."
+
++#: ext/soup/gstsouphttpsrc.c:1616
+ msgid "Server does not support seeking."
+ msgstr "A kiszolgáló nem támogatja a tekerést."
+
++#: ext/twolame/gsttwolamemp2enc.c:411
+ msgid "Failed to configure TwoLAME encoder. Check your encoding parameters."
-msgstr ""
-"Nem lehet lejátszani a folyamot, mert PlayReady DRM titkosítás van rajta."
++msgstr "Nem sikerült beállítani a TwoLAME kódolót. Ellenőrizze a kódolási paramétereket."
+
++#: gst/avi/gstavimux.c:1832
+ msgid "No or invalid input audio, AVI stream will be corrupt."
+ msgstr "Nincs vagy érvénytelen bemeneti hang, az AVI-folyam sérült lesz."
+
++#: gst/isomp4/qtdemux.c:713 gst/isomp4/qtdemux.c:717
+ msgid "This file contains no playable streams."
+ msgstr "A fájl nem tartalmaz lejátszható adatfolyamokat."
+
++#: gst/isomp4/qtdemux.c:763 gst/isomp4/qtdemux.c:7003
++#: gst/isomp4/qtdemux.c:7072 gst/isomp4/qtdemux.c:7362
++#: gst/isomp4/qtdemux.c:8800
+ msgid "This file is invalid and cannot be played."
+ msgstr "A fájl nem érvényes és nem játszható le."
+
++#: gst/isomp4/qtdemux.c:3091
+ msgid "Cannot play stream because it is encrypted with PlayReady DRM."
-msgid ""
-"No supported stream was found. You might need to install a GStreamer RTSP "
-"extension plugin for Real media streams."
-msgstr ""
-"Nem található támogatott adatfolyam. Telepítse a GStreamer RTSP bővítményt a "
-"Real media adatfolyamokhoz."
++msgstr "Nem lehet lejátszani a folyamot, mert PlayReady DRM titkosítás van rajta."
+
++#: gst/isomp4/qtdemux.c:4312 gst/isomp4/qtdemux.c:8131
++#: gst/isomp4/qtdemux.c:8138 gst/isomp4/qtdemux.c:9283
++#: gst/isomp4/qtdemux.c:9720 gst/isomp4/qtdemux.c:9727
++#: gst/isomp4/qtdemux.c:12528
+ msgid "This file is corrupt and cannot be played."
+ msgstr "A fájl sérült és nem játszható le."
+
++#: gst/isomp4/qtdemux.c:4554
+ msgid "Invalid atom size."
+ msgstr "Érvénytelen atom méret."
+
++#: gst/isomp4/qtdemux.c:4633
+ msgid "This file is incomplete and cannot be played."
+ msgstr "A fájl nem teljes és nem játszható le."
+
++#: gst/isomp4/qtdemux.c:10756
+ msgid "The video in this file might not play correctly."
+ msgstr "A fájlban található videó lehet, hogy nem játszható le megfelelően."
+
-msgid ""
-"No supported stream was found. You might need to allow more transport "
-"protocols or may otherwise be missing the right GStreamer RTSP extension "
-"plugin."
-msgstr ""
-"Nem található támogatott adatfolyam. Lehet, hogy több átviteli protokollt "
-"kell engedélyezni, vagy hiányzik a megfelelő GStreamer RTSP bővítmény."
++#: gst/rtsp/gstrtspsrc.c:7398
++msgid "No supported stream was found. You might need to install a GStreamer RTSP extension plugin for Real media streams."
++msgstr "Nem található támogatott adatfolyam. Telepítse a GStreamer RTSP bővítményt a Real media adatfolyamokhoz."
+
-msgid ""
-"Could not open audio device for playback. Device is being used by another "
-"application."
-msgstr ""
-"Nem nyitható meg hangeszköz a lejátszáshoz. Az eszközt másik alkalmazás "
-"használja."
++#: gst/rtsp/gstrtspsrc.c:7403
++msgid "No supported stream was found. You might need to allow more transport protocols or may otherwise be missing the right GStreamer RTSP extension plugin."
++msgstr "Nem található támogatott adatfolyam. Lehet, hogy több átviteli protokollt kell engedélyezni, vagy hiányzik a megfelelő GStreamer RTSP bővítmény."
+
-msgid ""
-"Could not open audio device for playback. You don't have permission to open "
-"the device."
-msgstr ""
-"Nem nyitható meg hangeszköz a lejátszáshoz. Nincs jogosultsága az eszköz "
-"megnyitására."
++#: sys/oss4/oss4-sink.c:493 sys/oss4/oss4-source.c:358
++#: sys/oss/gstosssink.c:384
++msgid "Could not open audio device for playback. Device is being used by another application."
++msgstr "Nem nyitható meg hangeszköz a lejátszáshoz. Az eszközt másik alkalmazás használja."
+
-msgid ""
-"Could not open audio device for playback. This version of the Open Sound "
-"System is not supported by this element."
-msgstr ""
-"Nem nyitható meg hangeszköz a lejátszáshoz. Az Open Sound System ezen "
-"verzióját az elem nem támogatja."
++#: sys/oss4/oss4-sink.c:503 sys/oss4/oss4-source.c:368
++#: sys/oss/gstosssink.c:391
++msgid "Could not open audio device for playback. You don't have permission to open the device."
++msgstr "Nem nyitható meg hangeszköz a lejátszáshoz. Nincs jogosultsága az eszköz megnyitására."
+
++#: sys/oss4/oss4-sink.c:514 sys/oss4/oss4-source.c:379
++#: sys/oss/gstosssink.c:399
+ msgid "Could not open audio device for playback."
+ msgstr "Nem nyitható meg hangeszköz a lejátszáshoz."
+
-msgid ""
-"Could not open audio device for recording. You don't have permission to open "
-"the device."
-msgstr ""
-"Nem nyitható meg hangeszköz a felvételhez. Nincs jogosultsága az eszköz "
-"megnyitására."
++#: sys/oss4/oss4-sink.c:523 sys/oss4/oss4-source.c:389
++msgid "Could not open audio device for playback. This version of the Open Sound System is not supported by this element."
++msgstr "Nem nyitható meg hangeszköz a lejátszáshoz. Az Open Sound System ezen verzióját az elem nem támogatja."
+
++#: sys/oss4/oss4-sink.c:646
+ msgid "Playback is not supported by this audio device."
+ msgstr "Ez a hangeszköz nem támogatja a lejátszást."
+
++#: sys/oss4/oss4-sink.c:653
+ msgid "Audio playback error."
+ msgstr "Hanglejátszási hiba."
+
++#: sys/oss4/oss4-source.c:503
+ msgid "Recording is not supported by this audio device."
+ msgstr "Ez a hangeszköz nem támogatja a felvételt."
+
++#: sys/oss4/oss4-source.c:510
+ msgid "Error recording from audio device."
+ msgstr "Hiba a hangeszközről való felvételkor."
+
-msgstr ""
-"A(z) „%s” eszköz által kezelhető lehetséges videoformátumok felsorolása "
-"sikertelen"
++#: sys/oss/gstosssrc.c:376
++msgid "Could not open audio device for recording. You don't have permission to open the device."
++msgstr "Nem nyitható meg hangeszköz a felvételhez. Nincs jogosultsága az eszköz megnyitására."
+
++#: sys/oss/gstosssrc.c:384
+ msgid "Could not open audio device for recording."
+ msgstr "Nem nyitható meg hangeszköz a felvételhez."
+
++#: sys/osxaudio/gstosxaudioringbuffer.c:149
+ msgid "CoreAudio device not found"
+ msgstr "A CoreAudio eszköz nem található"
+
++#: sys/osxaudio/gstosxaudioringbuffer.c:155
+ msgid "CoreAudio device could not be opened"
+ msgstr "A CoreAudio eszköz nem nyitható meg"
+
++#: sys/v4l2/gstv4l2bufferpool.c:1712
+ #, c-format
+ msgid "Error reading %d bytes from device '%s'."
+ msgstr "Hiba %d bájt olvasásakor a következő eszközről: „%s”."
+
++#: sys/v4l2/gstv4l2object.c:1223
+ #, c-format
+ msgid "Failed to enumerate possible video formats device '%s' can work with"
-msgstr ""
-"A(z) „%s” eszköz illesztőprogramja nem támogat egyetlen ismert IO módot sem."
++msgstr "A(z) „%s” eszköz által kezelhető lehetséges videoformátumok felsorolása sikertelen"
+
++#: sys/v4l2/gstv4l2object.c:2956
+ #, c-format
+ msgid "Could not map buffers from device '%s'"
+ msgstr "Nem képezhetők le a(z) „%s” eszköz pufferei"
+
++#: sys/v4l2/gstv4l2object.c:2964
+ #, c-format
+ msgid "The driver of device '%s' does not support the IO method %d"
+ msgstr "A(z) „%s” eszköz illesztőprogramja nem támogatja a(z) %d. IO módot"
+
++#: sys/v4l2/gstv4l2object.c:2971
+ #, c-format
+ msgid "The driver of device '%s' does not support any known IO method."
-#, fuzzy, c-format
++msgstr "A(z) „%s” eszköz illesztőprogramja nem támogat egyetlen ismert IO módot sem."
+
-msgstr "A(z) %s eszközön lévő kódolónak nincs támogatott bemeneti formátuma"
++#: sys/v4l2/gstv4l2object.c:3741 sys/v4l2/gstv4l2object.c:3765
++#, c-format
+ msgid "Device '%s' has no supported format"
-msgstr ""
++msgstr "A(z) „%s” eszköznek nincs támogatott formátuma"
+
++#: sys/v4l2/gstv4l2object.c:3747 sys/v4l2/gstv4l2object.c:3771
+ #, c-format
+ msgid "Device '%s' failed during initialization"
-#, fuzzy, c-format
++msgstr "A(z) „%s” eszköz meghiúsult az előkészítés során"
+
++#: sys/v4l2/gstv4l2object.c:3759
+ #, c-format
+ msgid "Device '%s' is busy"
+ msgstr "A(z) „%s” eszköz foglalt"
+
++#: sys/v4l2/gstv4l2object.c:3782
+ #, c-format
+ msgid "Device '%s' cannot capture at %dx%d"
+ msgstr "A(z) „%s” eszköz nem képes felvenni %dx%d felbontásban"
+
++#: sys/v4l2/gstv4l2object.c:3791
+ #, c-format
+ msgid "Device '%s' cannot capture in the specified format"
+ msgstr "A(z) „%s” eszköz nem képes felvenni a megadott formátumban"
+
++#: sys/v4l2/gstv4l2object.c:3802
+ #, c-format
+ msgid "Device '%s' does support non-contiguous planes"
+ msgstr "A(z) „%s” eszköz nem támogatja a nem szomszédos síkokat"
+
-msgstr "A kiszolgáló nem támogatja a tekerést."
++#: sys/v4l2/gstv4l2object.c:3817
++#, c-format
+ msgid "Device '%s' does not support %s interlacing"
-#, fuzzy, c-format
++msgstr "A(z) „%s” eszköz nem támogat %s váltottsort"
+
-msgstr "A(z) „%s” eszköz nem támogatja a nem szomszédos síkokat"
++#: sys/v4l2/gstv4l2object.c:3831
++#, c-format
+ msgid "Device '%s' does not support %s colorimetry"
-#, fuzzy, c-format
++msgstr "A(z) „%s” eszköz nem támogat %s színmetrikát"
+
++#: sys/v4l2/gstv4l2object.c:3843
+ #, c-format
+ msgid "Could not get parameters on device '%s'"
+ msgstr "Nem kérhetők le a(z) „%s” eszköz paraméterei"
+
++#: sys/v4l2/gstv4l2object.c:3851
+ msgid "Video device did not accept new frame rate setting."
+ msgstr "A videoeszköz nem fogadta el az új képkockasebesség-beállítást."
+
++#: sys/v4l2/gstv4l2object.c:3977
+ msgid "Video device did not provide output format."
+ msgstr "A videoeszköz nem szolgáltatott kimeneti formátumot."
+
++#: sys/v4l2/gstv4l2object.c:3983
+ msgid "Video device returned invalid dimensions."
+ msgstr "A videoeszköz érvénytelen dimenziókkal tért vissza."
+
++#: sys/v4l2/gstv4l2object.c:3991
+ msgid "Video device uses an unsupported interlacing method."
+ msgstr "A videoeszköz nem támogatott váltottsoros módot használ."
+
++#: sys/v4l2/gstv4l2object.c:3998
+ msgid "Video device uses an unsupported pixel format."
+ msgstr "A videoeszköz nem támogatott képpontformátumot használ."
+
++#: sys/v4l2/gstv4l2object.c:4518
+ msgid "Failed to configure internal buffer pool."
+ msgstr "Nem sikerült beállítani a belső puffertárolót."
+
++#: sys/v4l2/gstv4l2object.c:4524
+ msgid "Video device did not suggest any buffer size."
+ msgstr "A videoeszköz nem javasolt semmilyen pufferméretet."
+
++#: sys/v4l2/gstv4l2object.c:4539
+ msgid "No downstream pool to import from."
+ msgstr "Nincs importálási forrásként használható alárendelt tároló."
+
++#: sys/v4l2/gstv4l2radio.c:143
+ #, c-format
+ msgid "Failed to get settings of tuner %d on device '%s'."
+ msgstr "A(z) %d. tuner beállításának lekérése a(z) „%s” eszközön meghiúsult."
+
++#: sys/v4l2/gstv4l2radio.c:150
+ #, c-format
+ msgid "Error getting capabilities for device '%s'."
+ msgstr "Hiba a(z) „%s” eszköz képességeinek lekérésekor."
+
++#: sys/v4l2/gstv4l2radio.c:157
+ #, c-format
+ msgid "Device '%s' is not a tuner."
+ msgstr "A(z) „%s” eszköz nem tuner."
+
++#: sys/v4l2/gstv4l2radio.c:184
+ #, c-format
+ msgid "Failed to get radio input on device '%s'. "
+ msgstr "A rádióbemenet lekérése meghiúsult a(z) „%s” eszközön."
+
++#: sys/v4l2/gstv4l2radio.c:207 sys/v4l2/v4l2_calls.c:1072
+ #, c-format
+ msgid "Failed to set input %d on device %s."
+ msgstr "A(z) %d. bemenet beállítása meghiúsult a(z) „%s” eszközön."
+
++#: sys/v4l2/gstv4l2radio.c:241
+ #, c-format
+ msgid "Failed to change mute state for device '%s'."
+ msgstr "A némítási állapot módosítása meghiúsult a(z) „%s” eszközön."
+
++#: sys/v4l2/gstv4l2sink.c:628
+ msgid "Failed to allocated required memory."
+ msgstr "Nem sikerült lefoglalni a szükséges memóriát."
+
++#: sys/v4l2/gstv4l2src.c:652 sys/v4l2/gstv4l2videodec.c:756
++#: sys/v4l2/gstv4l2videoenc.c:782
+ msgid "Failed to allocate required memory."
+ msgstr "Nem sikerült lefoglalni a szükséges memóriát."
+
++#: sys/v4l2/gstv4l2transform.c:142
+ #, c-format
+ msgid "Converter on device %s has no supported input format"
+ msgstr "A(z) %s eszközön lévő átalakítónak nincs támogatott bemeneti formátuma"
+
++#: sys/v4l2/gstv4l2transform.c:149
+ #, c-format
+ msgid "Converter on device %s has no supported output format"
+ msgstr "A(z) %s eszközön lévő átalakítónak nincs támogatott kimeneti formátuma"
+
-msgstr "A(z) %s eszközön lévő kódolónak nincs támogatott bemeneti formátuma"
++#: sys/v4l2/gstv4l2videodec.c:136
++#, c-format
+ msgid "Decoder on device %s has no supported input format"
-#, fuzzy, c-format
++msgstr "A(z) %s eszközön lévő dekódolónak nincs támogatott bemeneti formátuma"
+
-msgstr "A(z) %s eszközön lévő kódolónak nincs támogatott kimeneti formátuma"
++#: sys/v4l2/gstv4l2videodec.c:281
++#, c-format
+ msgid "Decoder on device %s has no supported output format"
-#, fuzzy
++msgstr "A(z) %s eszközön lévő dekódolónak nincs támogatott kimeneti formátuma"
+
++#: sys/v4l2/gstv4l2videodec.c:770
+ msgid "Failed to start decoding thread."
+ msgstr "Nem sikerült elindítani a dekódolási szálat."
+
++#: sys/v4l2/gstv4l2videodec.c:777 sys/v4l2/gstv4l2videoenc.c:803
+ msgid "Failed to process frame."
+ msgstr "Nem sikerült feldolgozni a keretet."
+
++#: sys/v4l2/gstv4l2videoenc.c:140
+ #, c-format
+ msgid "Encoder on device %s has no supported output format"
+ msgstr "A(z) %s eszközön lévő kódolónak nincs támogatott kimeneti formátuma"
+
++#: sys/v4l2/gstv4l2videoenc.c:147
+ #, c-format
+ msgid "Encoder on device %s has no supported input format"
+ msgstr "A(z) %s eszközön lévő kódolónak nincs támogatott bemeneti formátuma"
+
-msgstr "Nem sikerült elindítani a dekódolási szálat."
++#: sys/v4l2/gstv4l2videoenc.c:795
+ msgid "Failed to start encoding thread."
-msgid ""
-"Error getting capabilities for device '%s': It isn't a v4l2 driver. Check if "
-"it is a v4l1 driver."
-msgstr ""
-"Hiba a(z) „%s” eszköz képességeinek lekérésekor. Ez nem egy v4l2 meghajtó. "
-"Ellenőrizze, hogy nem v4l1 meghajtó-e."
++msgstr "Nem sikerült elindítani a kódolási szálat."
+
++#: sys/v4l2/v4l2_calls.c:92
+ #, c-format
-msgstr ""
-"A tuner aktuális frekvenciájának lekérdezése meghiúsult a(z) „%s” eszköztől."
++msgid "Error getting capabilities for device '%s': It isn't a v4l2 driver. Check if it is a v4l1 driver."
++msgstr "Hiba a(z) „%s” eszköz képességeinek lekérésekor. Ez nem egy v4l2 meghajtó. Ellenőrizze, hogy nem v4l1 meghajtó-e."
+
++#: sys/v4l2/v4l2_calls.c:156
+ #, c-format
+ msgid "Failed to query attributes of input %d in device %s"
+ msgstr "A(z) %d bemenet attribútumainak lekérése meghiúsult a(z) %s eszközön"
+
++#: sys/v4l2/v4l2_calls.c:187
+ #, c-format
+ msgid "Failed to get setting of tuner %d on device '%s'."
+ msgstr "A(z) %d. tuner beállításának lekérése a(z) „%s” eszközön meghiúsult."
+
++#: sys/v4l2/v4l2_calls.c:235
+ #, c-format
+ msgid "Failed to query norm on device '%s'."
+ msgstr "A norma lekérdezése meghiúsult a(z) „%s” eszköztől."
+
++#: sys/v4l2/v4l2_calls.c:416
+ #, c-format
+ msgid "Failed getting controls attributes on device '%s'."
+ msgstr "A vezérlőattribútumok lekérése meghiúsult a(z) „%s” eszközön."
+
++#: sys/v4l2/v4l2_calls.c:608
+ #, c-format
+ msgid "Cannot identify device '%s'."
+ msgstr "Nem azonosítható a(z) „%s” eszköz."
+
++#: sys/v4l2/v4l2_calls.c:615
+ #, c-format
+ msgid "This isn't a device '%s'."
+ msgstr "Ez nem egy eszköz: „%s”."
+
++#: sys/v4l2/v4l2_calls.c:622
+ #, c-format
+ msgid "Could not open device '%s' for reading and writing."
+ msgstr "Nem sikerült olvasásra és írásra megnyitni a(z) „%s” eszközt."
+
++#: sys/v4l2/v4l2_calls.c:629
+ #, c-format
+ msgid "Device '%s' is not a capture device."
+ msgstr "A(z) „%s” eszköz nem rögzítőeszköz."
+
++#: sys/v4l2/v4l2_calls.c:637
+ #, c-format
+ msgid "Device '%s' is not a output device."
+ msgstr "A(z) „%s” eszköz nem kimeneti eszköz."
+
++#: sys/v4l2/v4l2_calls.c:645
+ #, c-format
+ msgid "Device '%s' is not a M2M device."
+ msgstr "A(z) „%s” eszköz nem M2M eszköz."
+
++#: sys/v4l2/v4l2_calls.c:698
+ #, c-format
+ msgid "Could not dup device '%s' for reading and writing."
+ msgstr "Nem sikerült duplikálni a(z) „%s” eszközt olvasásra és írásra."
+
++#: sys/v4l2/v4l2_calls.c:782
+ #, c-format
+ msgid "Failed to set norm for device '%s'."
+ msgstr "A norma beállítása meghiúsult a(z) „%s” eszközön."
+
++#: sys/v4l2/v4l2_calls.c:820
+ #, c-format
+ msgid "Failed to get current tuner frequency for device '%s'."
-msgstr ""
-"A tuner aktuális frekvenciájának beállítása meghiúsult a(z) „%s” eszközön "
-"%lu Hz-re."
++msgstr "A tuner aktuális frekvenciájának lekérdezése meghiúsult a(z) „%s” eszköztől."
+
++#: sys/v4l2/v4l2_calls.c:862
+ #, c-format
+ msgid "Failed to set current tuner frequency for device '%s' to %lu Hz."
-msgstr ""
-"A(z) $%2d érték beállítása $%1d vezérlőelemhez meghiúsult a(z) „%s” eszközön."
++msgstr "A tuner aktuális frekvenciájának beállítása meghiúsult a(z) „%s” eszközön %lu Hz-re."
+
++#: sys/v4l2/v4l2_calls.c:896
+ #, c-format
+ msgid "Failed to get signal strength for device '%s'."
+ msgstr "A jelerősség lekérdezése meghiúsult a(z) „%s” eszközön."
+
++#: sys/v4l2/v4l2_calls.c:932
+ #, c-format
+ msgid "Failed to get value for control %d on device '%s'."
+ msgstr "A(z) %d. vezérlőelem értékének lekérése meghiúsult a(z) „%s” eszközön."
+
++#: sys/v4l2/v4l2_calls.c:967
+ #, c-format
+ msgid "Failed to set value %d for control %d on device '%s'."
-msgstr ""
-"Az aktuális bemenet lekérése meghiúsult a(z) „%s” eszközről. Lehet, hogy ez "
-"egy rádióeszköz."
++msgstr "A(z) $%2d érték beállítása $%1d vezérlőelemhez meghiúsult a(z) „%s” eszközön."
+
++#: sys/v4l2/v4l2_calls.c:1047
+ #, c-format
+ msgid "Failed to get current input on device '%s'. May be it is a radio device"
-msgid ""
-"Failed to get current output on device '%s'. May be it is a radio device"
-msgstr ""
-"Az aktuális kimenet lekérése meghiúsult a(z) „%s” eszközről. Lehet, hogy ez "
-"egy rádióeszköz."
++msgstr "Az aktuális bemenet lekérése meghiúsult a(z) „%s” eszközről. Lehet, hogy ez egy rádióeszköz."
+
++#: sys/v4l2/v4l2_calls.c:1104
+ #, c-format
-#~ msgstr ""
-#~ "A fájl túl sok adatfolyamot tartalmaz. Csak az első %d kerül lejátszásra."
++msgid "Failed to get current output on device '%s'. May be it is a radio device"
++msgstr "Az aktuális kimenet lekérése meghiúsult a(z) „%s” eszközről. Lehet, hogy ez egy rádióeszköz."
+
++#: sys/v4l2/v4l2_calls.c:1129
+ #, c-format
+ msgid "Failed to set output %d on device %s."
+ msgstr "A(z) %d. kimenet beállítása meghiúsult a(z) „%s” eszközön."
+
++#: sys/ximage/gstximagesrc.c:838
+ msgid "Changing resolution at runtime is not yet supported."
+ msgstr "A felbontás módosítása futás közben még nem támogatott."
+
++#: sys/ximage/gstximagesrc.c:852
+ msgid "Cannot operate without a clock"
+ msgstr "Óra nélkül lehetetlen a működés"
+
+ #~ msgid "This file contains too many streams. Only playing first %d"
++#~ msgstr "A fájl túl sok adatfolyamot tartalmaz. Csak az első %d kerül lejátszásra."
+
+ #~ msgid "Record Source"
+ #~ msgstr "Felvétel forrása"
+
+ #~ msgid "Microphone"
+ #~ msgstr "Mikrofon"
+
+ #~ msgid "Line In"
+ #~ msgstr "Vonalbemenet"
+
+ #~ msgid "Internal CD"
+ #~ msgstr "Belső CD"
+
+ #~ msgid "SPDIF In"
+ #~ msgstr "SPDIF be"
+
+ #~ msgid "AUX 1 In"
+ #~ msgstr "1. AUX be"
+
+ #~ msgid "AUX 2 In"
+ #~ msgstr "2. AUX be"
+
+ #~ msgid "Codec Loopback"
+ #~ msgstr "Kodek visszacsatolás"
+
+ #~ msgid "SunVTS Loopback"
+ #~ msgstr "SunVTS visszacsatolás"
+
+ #~ msgid "Volume"
+ #~ msgstr "Hangerő"
+
+ #~ msgid "Gain"
+ #~ msgstr "Erősítés"
+
+ #~ msgid "Monitor"
+ #~ msgstr "Monitor"
+
+ #~ msgid "Built-in Speaker"
+ #~ msgstr "Beépített hangszóró"
+
+ #~ msgid "Headphone"
+ #~ msgstr "Fejhallgató"
+
+ #~ msgid "Line Out"
+ #~ msgstr "Vonalkimenet"
+
+ #~ msgid "SPDIF Out"
+ #~ msgstr "SPDIF ki"
+
+ #~ msgid "AUX 1 Out"
+ #~ msgstr "1. AUX ki"
+
+ #~ msgid "AUX 2 Out"
+ #~ msgstr "2. AUX ki"
--- /dev/null
-# Johnny A. Solbu <johnny@solbu.net>, 2012-2017
+ # Norwegian bokmaal translation of gst-utils.
+ # This file is put in the public domain.
+ #
+ # Kjartan Maraas <kmaraas@gnome.org>, 2004-2010.
-"Project-Id-Version: gst-plugins-good 1.12.0\n"
++# Johnny A. Solbu <johnny@solbu.net>, 2012-2019
+ #
+ msgid ""
+ msgstr ""
-"POT-Creation-Date: 2019-02-26 11:47+0000\n"
-"PO-Revision-Date: 2017-05-23 23:04+0200\n"
++"Project-Id-Version: gst-plugins-good 1.15.1\n"
+ "Report-Msgid-Bugs-To: http://bugzilla.gnome.org/\n"
-msgid ""
-"Failed to configure LAME mp3 audio encoder. Check your encoding parameters."
-msgstr ""
++"POT-Creation-Date: 2019-01-17 01:59+0000\n"
++"PO-Revision-Date: 2019-09-03 09:55+0200\n"
+ "Last-Translator: Johnny A. Solbu <johnny@solbu.net>\n"
+ "Language-Team: Norwegian Bokmaal <i18n-nb@lister.ping.uio.no>\n"
+ "Language: nb_NO\n"
+ "MIME-Version: 1.0\n"
+ "Content-Type: text/plain; charset=UTF-8\n"
+ "Content-Transfer-Encoding: 8bit\n"
+ "X-Bugs: Report translation errors to the Language-Team address.\n"
+ "X-Generator: Poedit 1.8.7.1\n"
+
++#: ext/jack/gstjackaudiosink.c:356 ext/jack/gstjackaudiosrc.c:364
+ msgid "Jack server not found"
+ msgstr "Jack-server ikke funnet"
+
++#: ext/jpeg/gstjpegdec.c:936 ext/jpeg/gstjpegdec.c:1103
++#: ext/jpeg/gstjpegdec.c:1112 ext/jpeg/gstjpegdec.c:1122
++#: ext/jpeg/gstjpegdec.c:1131 ext/jpeg/gstjpegdec.c:1393
++#: ext/jpeg/gstjpegdec.c:1421
+ msgid "Failed to decode JPEG image"
+ msgstr "Klarte ikke å dekode JPEG-bilde"
+
-msgid ""
-"The requested bitrate %d kbit/s for property '%s' is not allowed. The "
-"bitrate was changed to %d kbit/s."
-msgstr ""
++#: ext/lame/gstlamemp3enc.c:393
++msgid "Failed to configure LAME mp3 audio encoder. Check your encoding parameters."
++msgstr "Kunne ikke konfigurere LAME mp3-lydkoder. Sjekk kodingsparametrene dine."
+
++#: ext/lame/gstlamemp3enc.c:425 ext/twolame/gsttwolamemp2enc.c:488
+ #, c-format
-msgid ""
-"A network error occurred, or the server closed the connection unexpectedly."
++msgid "The requested bitrate %d kbit/s for property '%s' is not allowed. The bitrate was changed to %d kbit/s."
++msgstr "Den forespurte bitraten %d-kbit/s for egenskapen «%s» er ikke tillatt. Bitraten ble endret til %d kbit/s."
+
+ #. TRANSLATORS: 'song title' by 'artist name'
++#: ext/pulse/pulsesink.c:3127
+ #, c-format
+ msgid "'%s' by '%s'"
+ msgstr "«%s» av «%s»"
+
++#: ext/shout2/gstshout2.c:619 ext/shout2/gstshout2.c:629
+ msgid "Could not connect to server"
+ msgstr "Kunne ikke koble til tjener."
+
++#: ext/soup/gstsouphttpsrc.c:914
+ msgid "No URL set."
+ msgstr "Ingen URL satt."
+
++#: ext/soup/gstsouphttpsrc.c:1372
+ msgid "Could not resolve server name."
+ msgstr "Kunne ikke slå opp navn på tjener."
+
++#: ext/soup/gstsouphttpsrc.c:1377
+ msgid "Could not establish connection to server."
+ msgstr "Kunne ikke etablere tilkobling til tjener."
+
++#: ext/soup/gstsouphttpsrc.c:1381
+ msgid "Secure connection setup failed."
+ msgstr "Oppsett av sikker tilkobling feilet."
+
-msgstr ""
++#: ext/soup/gstsouphttpsrc.c:1387
++msgid "A network error occurred, or the server closed the connection unexpectedly."
+ msgstr "En nettverksfeil oppstod, eller tjeneren lukket uventet tilkoblingen."
+
++#: ext/soup/gstsouphttpsrc.c:1392
+ msgid "Server sent bad data."
+ msgstr "Tjener sendte ugyldige data."
+
++#: ext/soup/gstsouphttpsrc.c:1616
+ msgid "Server does not support seeking."
+ msgstr "Tjener støtter ikke søking."
+
++#: ext/twolame/gsttwolamemp2enc.c:411
+ msgid "Failed to configure TwoLAME encoder. Check your encoding parameters."
-msgid ""
-"No supported stream was found. You might need to install a GStreamer RTSP "
-"extension plugin for Real media streams."
-msgstr ""
-"Ingen støttet strøm ble funnet. Du må kanskje installere en GStreamer RTSP "
-"utvidelsesprogramtillegg for Real mediestrømmer."
++msgstr "Kunne ikke konfigurere TwoLame-enkoder. Kontroller kodingsparametrene dine."
+
++#: gst/avi/gstavimux.c:1832
+ msgid "No or invalid input audio, AVI stream will be corrupt."
+ msgstr "Ingen eller ugyldig inndatalyd, AVI-strømmen vil bli skadet."
+
++#: gst/isomp4/qtdemux.c:713 gst/isomp4/qtdemux.c:717
+ msgid "This file contains no playable streams."
+ msgstr "Filen inneholder ingen spillbare strømmer."
+
++#: gst/isomp4/qtdemux.c:763 gst/isomp4/qtdemux.c:7003
++#: gst/isomp4/qtdemux.c:7072 gst/isomp4/qtdemux.c:7362
++#: gst/isomp4/qtdemux.c:8800
+ msgid "This file is invalid and cannot be played."
+ msgstr "Filen er ugyldig og kan ikke spilles."
+
++#: gst/isomp4/qtdemux.c:3091
+ msgid "Cannot play stream because it is encrypted with PlayReady DRM."
+ msgstr "Kan ikke spille av strømmen fordi den er kryptert med PlayReady DRM."
+
++#: gst/isomp4/qtdemux.c:4312 gst/isomp4/qtdemux.c:8131
++#: gst/isomp4/qtdemux.c:8138 gst/isomp4/qtdemux.c:9283
++#: gst/isomp4/qtdemux.c:9720 gst/isomp4/qtdemux.c:9727
++#: gst/isomp4/qtdemux.c:12528
+ msgid "This file is corrupt and cannot be played."
+ msgstr "Filen er skadet og kan ikke spilles."
+
++#: gst/isomp4/qtdemux.c:4554
+ msgid "Invalid atom size."
+ msgstr "Ugyldig atomstørrelse."
+
++#: gst/isomp4/qtdemux.c:4633
+ msgid "This file is incomplete and cannot be played."
+ msgstr "Filen er ufullstendig og kan ikke spilles."
+
++#: gst/isomp4/qtdemux.c:10756
+ msgid "The video in this file might not play correctly."
+ msgstr "Videoen i denne filen spilles kanskje ikke av korrekt."
+
-msgid ""
-"No supported stream was found. You might need to allow more transport "
-"protocols or may otherwise be missing the right GStreamer RTSP extension "
-"plugin."
-msgstr ""
-"Ingen støttet strøm ble funnet. Du må kanskje tillate flere "
-"transportprotokoller eller den kan mangle den rette GStreamer RTSP-"
-"utvidelsestillegget."
++#: gst/rtsp/gstrtspsrc.c:7398
++msgid "No supported stream was found. You might need to install a GStreamer RTSP extension plugin for Real media streams."
++msgstr "Ingen støttet strøm ble funnet. Du må kanskje installere en GStreamer RTSP utvidelsesprogramtillegg for Real mediestrømmer."
+
-msgid ""
-"Could not open audio device for playback. Device is being used by another "
-"application."
-msgstr ""
-"Kunne ikke åpne lydenheten for avspilling. Enheten brukes av et annet "
-"program."
++#: gst/rtsp/gstrtspsrc.c:7403
++msgid "No supported stream was found. You might need to allow more transport protocols or may otherwise be missing the right GStreamer RTSP extension plugin."
++msgstr "Ingen støttet strøm ble funnet. Du må kanskje tillate flere transportprotokoller eller den kan mangle den rette GStreamer RTSP-utvidelsestillegget."
+
-msgid ""
-"Could not open audio device for playback. You don't have permission to open "
-"the device."
-msgstr ""
-"Kunne ikke åpne lydenheten for avspilling. Du har ikke tillatelse til å åpne "
-"enheten."
++#: sys/oss4/oss4-sink.c:493 sys/oss4/oss4-source.c:358
++#: sys/oss/gstosssink.c:384
++msgid "Could not open audio device for playback. Device is being used by another application."
++msgstr "Kunne ikke åpne lydenheten for avspilling. Enheten brukes av et annet program."
+
-msgid ""
-"Could not open audio device for playback. This version of the Open Sound "
-"System is not supported by this element."
-msgstr ""
-"Kunne ikke åpne lydenheten for avspilling. Denne versjonen av «Open Sound "
-"System» støttes ikke av dette elementet."
++#: sys/oss4/oss4-sink.c:503 sys/oss4/oss4-source.c:368
++#: sys/oss/gstosssink.c:391
++msgid "Could not open audio device for playback. You don't have permission to open the device."
++msgstr "Kunne ikke åpne lydenheten for avspilling. Du har ikke tillatelse til å åpne enheten."
+
++#: sys/oss4/oss4-sink.c:514 sys/oss4/oss4-source.c:379
++#: sys/oss/gstosssink.c:399
+ msgid "Could not open audio device for playback."
+ msgstr "Kunne ikke åpne lydenhet for avspilling."
+
-msgid ""
-"Could not open audio device for recording. You don't have permission to open "
-"the device."
-msgstr ""
-"Kunne ikke åpne lydenheten for opptak. Du har ikke tillatelse til å åpne "
-"enheten."
++#: sys/oss4/oss4-sink.c:523 sys/oss4/oss4-source.c:389
++msgid "Could not open audio device for playback. This version of the Open Sound System is not supported by this element."
++msgstr "Kunne ikke åpne lydenheten for avspilling. Denne versjonen av «Open Sound System» støttes ikke av dette elementet."
+
++#: sys/oss4/oss4-sink.c:646
+ msgid "Playback is not supported by this audio device."
+ msgstr "Avspilling støttes ikke av denne lydenheten."
+
++#: sys/oss4/oss4-sink.c:653
+ msgid "Audio playback error."
+ msgstr "Lydavspillingsfeil"
+
++#: sys/oss4/oss4-source.c:503
+ msgid "Recording is not supported by this audio device."
+ msgstr "Opptak støttes ikke av denne lydenheten."
+
++#: sys/oss4/oss4-source.c:510
+ msgid "Error recording from audio device."
+ msgstr "Feil ved opptak fra lydenheten."
+
-#, fuzzy, c-format
++#: sys/oss/gstosssrc.c:376
++msgid "Could not open audio device for recording. You don't have permission to open the device."
++msgstr "Kunne ikke åpne lydenheten for opptak. Du har ikke tillatelse til å åpne enheten."
+
++#: sys/oss/gstosssrc.c:384
+ msgid "Could not open audio device for recording."
+ msgstr "Kunne ikke åpne lydenhet for opptak."
+
++#: sys/osxaudio/gstosxaudioringbuffer.c:149
+ msgid "CoreAudio device not found"
+ msgstr "CoreAudio-enhet ikke funnet"
+
++#: sys/osxaudio/gstosxaudioringbuffer.c:155
+ msgid "CoreAudio device could not be opened"
+ msgstr "CoreAudio-enhet kune ikke åpnes"
+
++#: sys/v4l2/gstv4l2bufferpool.c:1712
+ #, c-format
+ msgid "Error reading %d bytes from device '%s'."
+ msgstr "Feil ved lesing av% d bytes fra enheten '% s»."
+
++#: sys/v4l2/gstv4l2object.c:1223
+ #, c-format
+ msgid "Failed to enumerate possible video formats device '%s' can work with"
+ msgstr "Kunne ikke spesifisere mulige videoformater enhet «%s» kan arbeide med"
+
++#: sys/v4l2/gstv4l2object.c:2956
+ #, c-format
+ msgid "Could not map buffers from device '%s'"
+ msgstr "Kunne ikke tilordne buffere fra enheten «%s»."
+
++#: sys/v4l2/gstv4l2object.c:2964
+ #, c-format
+ msgid "The driver of device '%s' does not support the IO method %d"
+ msgstr "Driveren av enheten «%s» støtter ikke IO metoden %d"
+
++#: sys/v4l2/gstv4l2object.c:2971
+ #, c-format
+ msgid "The driver of device '%s' does not support any known IO method."
+ msgstr "Driveren av enheten «%s» støtter ikke noen kjente IO-metoder."
+
-msgstr "Enkoder på enhet «%s» har ingen støttede inndataformat"
++#: sys/v4l2/gstv4l2object.c:3741 sys/v4l2/gstv4l2object.c:3765
++#, c-format
+ msgid "Device '%s' has no supported format"
-msgstr ""
++msgstr "Enhet «%s» har ingen støttede format"
+
++#: sys/v4l2/gstv4l2object.c:3747 sys/v4l2/gstv4l2object.c:3771
+ #, c-format
+ msgid "Device '%s' failed during initialization"
-#, fuzzy, c-format
++msgstr "Enheten «%s» mislyktes under initialiseringen"
+
++#: sys/v4l2/gstv4l2object.c:3759
+ #, c-format
+ msgid "Device '%s' is busy"
+ msgstr "Enheten «%s» er opptatt"
+
++#: sys/v4l2/gstv4l2object.c:3782
+ #, c-format
+ msgid "Device '%s' cannot capture at %dx%d"
+ msgstr "Enhet «%s» kan ikke fange ved %dx%d"
+
++#: sys/v4l2/gstv4l2object.c:3791
+ #, c-format
+ msgid "Device '%s' cannot capture in the specified format"
+ msgstr "Enhet «%s» kan ikke fange i det angitte formatet"
+
++#: sys/v4l2/gstv4l2object.c:3802
+ #, c-format
+ msgid "Device '%s' does support non-contiguous planes"
+ msgstr "Enhet «%s» støtter ikke ikke-sammenhengende plan"
+
-msgstr "Enhet «%s» støtter ikke videoopptak"
++#: sys/v4l2/gstv4l2object.c:3817
++#, c-format
+ msgid "Device '%s' does not support %s interlacing"
-#, fuzzy, c-format
++msgstr "Enhet «%s» støtter ikke %s-linjefletting"
+
-msgstr "Enhet «%s» støtter ikke videoopptak"
++#: sys/v4l2/gstv4l2object.c:3831
++#, c-format
+ msgid "Device '%s' does not support %s colorimetry"
-#, fuzzy, c-format
++msgstr "Enhet «%s» støtter ikke %s-kolorimetri"
+
++#: sys/v4l2/gstv4l2object.c:3843
+ #, c-format
+ msgid "Could not get parameters on device '%s'"
+ msgstr "Kunne ikke få parametere på enheten «%s»."
+
++#: sys/v4l2/gstv4l2object.c:3851
+ msgid "Video device did not accept new frame rate setting."
+ msgstr "Videoenheten aksepterte ikke ny bildefrekvensinnstilling."
+
++#: sys/v4l2/gstv4l2object.c:3977
+ msgid "Video device did not provide output format."
+ msgstr "Videoenhet tilbød ikke utdataformat."
+
++#: sys/v4l2/gstv4l2object.c:3983
+ msgid "Video device returned invalid dimensions."
+ msgstr "Videoenhet returnerte ugyldige dimensjoner."
+
++#: sys/v4l2/gstv4l2object.c:3991
+ msgid "Video device uses an unsupported interlacing method."
+ msgstr "Videoenhet bruker en ustøttet linjeflettingsmetode."
+
++#: sys/v4l2/gstv4l2object.c:3998
+ msgid "Video device uses an unsupported pixel format."
+ msgstr "Videoenhet bruker et ustøttet pikselformat."
+
++#: sys/v4l2/gstv4l2object.c:4518
+ msgid "Failed to configure internal buffer pool."
+ msgstr "Klarte ikke å konfigurere internt bufferområde"
+
++#: sys/v4l2/gstv4l2object.c:4524
+ msgid "Video device did not suggest any buffer size."
+ msgstr "Videoenhet foreslo ikke noen bufferstørrelse."
+
++#: sys/v4l2/gstv4l2object.c:4539
+ msgid "No downstream pool to import from."
+ msgstr "Ingen nedstrømsområde å importere fra."
+
++#: sys/v4l2/gstv4l2radio.c:143
+ #, c-format
+ msgid "Failed to get settings of tuner %d on device '%s'."
+ msgstr "Kunne ikke hente innstillingene fra mottakeren %d på enheten «%s»."
+
++#: sys/v4l2/gstv4l2radio.c:150
+ #, c-format
+ msgid "Error getting capabilities for device '%s'."
+ msgstr "Feil ved henting av funksjoner for enheten «%s»."
+
++#: sys/v4l2/gstv4l2radio.c:157
+ #, c-format
+ msgid "Device '%s' is not a tuner."
+ msgstr "Enheten «%s» er ikke en tuner."
+
++#: sys/v4l2/gstv4l2radio.c:184
+ #, c-format
+ msgid "Failed to get radio input on device '%s'. "
+ msgstr "Mislyktes i å få radio-inndata på enheten «%s»."
+
++#: sys/v4l2/gstv4l2radio.c:207 sys/v4l2/v4l2_calls.c:1072
+ #, c-format
+ msgid "Failed to set input %d on device %s."
+ msgstr "Klarte ikke å sette inngang %d på enhet %s."
+
++#: sys/v4l2/gstv4l2radio.c:241
+ #, c-format
+ msgid "Failed to change mute state for device '%s'."
+ msgstr "Klarte ikke å endre dempe-modus for enheten «%s»."
+
++#: sys/v4l2/gstv4l2sink.c:628
+ msgid "Failed to allocated required memory."
+ msgstr "Kunne ikke tildele nødvendige minne."
+
++#: sys/v4l2/gstv4l2src.c:652 sys/v4l2/gstv4l2videodec.c:756
++#: sys/v4l2/gstv4l2videoenc.c:782
+ msgid "Failed to allocate required memory."
+ msgstr "Kunne ikke tildele nødvendige minne."
+
++#: sys/v4l2/gstv4l2transform.c:142
+ #, c-format
+ msgid "Converter on device %s has no supported input format"
+ msgstr "Konverter på enhet «%s» har ingen støttede inndataformat"
+
++#: sys/v4l2/gstv4l2transform.c:149
+ #, c-format
+ msgid "Converter on device %s has no supported output format"
+ msgstr "Konverter på enhet «%s» har ingen støttede utdataformat"
+
-msgstr "Enkoder på enhet «%s» har ingen støttede inndataformat"
++#: sys/v4l2/gstv4l2videodec.c:136
++#, c-format
+ msgid "Decoder on device %s has no supported input format"
-#, fuzzy, c-format
++msgstr "Dekoder på enhet «%s» har ingen støttede inndataformat"
+
-msgstr "Enkoder på enhet «%s» har ingen støttede utdataformat"
++#: sys/v4l2/gstv4l2videodec.c:281
++#, c-format
+ msgid "Decoder on device %s has no supported output format"
-#, fuzzy
++msgstr "Dekoder på enhet «%s» har ingen støttede utdataformat"
+
++#: sys/v4l2/gstv4l2videodec.c:770
+ msgid "Failed to start decoding thread."
+ msgstr "Klarte ikke å starte dekoding av tråden."
+
++#: sys/v4l2/gstv4l2videodec.c:777 sys/v4l2/gstv4l2videoenc.c:803
+ msgid "Failed to process frame."
+ msgstr "Kunne ikke behandle ramme."
+
++#: sys/v4l2/gstv4l2videoenc.c:140
+ #, c-format
+ msgid "Encoder on device %s has no supported output format"
+ msgstr "Enkoder på enhet «%s» har ingen støttede utdataformat"
+
++#: sys/v4l2/gstv4l2videoenc.c:147
+ #, c-format
+ msgid "Encoder on device %s has no supported input format"
+ msgstr "Enkoder på enhet «%s» har ingen støttede inndataformat"
+
-msgstr "Klarte ikke å starte dekoding av tråden."
++#: sys/v4l2/gstv4l2videoenc.c:795
+ msgid "Failed to start encoding thread."
-msgid ""
-"Error getting capabilities for device '%s': It isn't a v4l2 driver. Check if "
-"it is a v4l1 driver."
-msgstr ""
-"Feil ved henting av evner for enheten «%s»: Det er ikke en v4l2 driver. "
-"Sjekk om det er en v4l1 driver."
++msgstr "Klarte ikke å starte kodingstråd."
+
++#: sys/v4l2/v4l2_calls.c:92
+ #, c-format
-msgstr ""
-"Klarte ikke å sette gjeldende mottakerfrekvens for enheten «%s» til %lu Hz."
++msgid "Error getting capabilities for device '%s': It isn't a v4l2 driver. Check if it is a v4l1 driver."
++msgstr "Feil ved henting av evner for enheten «%s»: Det er ikke en v4l2 driver. Sjekk om det er en v4l1 driver."
+
++#: sys/v4l2/v4l2_calls.c:156
+ #, c-format
+ msgid "Failed to query attributes of input %d in device %s"
+ msgstr "Feil ved attributtspørring av inndata %d på enhet %s"
+
++#: sys/v4l2/v4l2_calls.c:187
+ #, c-format
+ msgid "Failed to get setting of tuner %d on device '%s'."
+ msgstr "Kunne ikke hente innstillingen av mottakeren %d på enheten «%s»."
+
++#: sys/v4l2/v4l2_calls.c:235
+ #, c-format
+ msgid "Failed to query norm on device '%s'."
+ msgstr "Feil ved norm-spørring på enhet %s."
+
++#: sys/v4l2/v4l2_calls.c:416
+ #, c-format
+ msgid "Failed getting controls attributes on device '%s'."
+ msgstr "Kunne ikke å få inn kontrollattributter på enhet «%s»."
+
++#: sys/v4l2/v4l2_calls.c:608
+ #, c-format
+ msgid "Cannot identify device '%s'."
+ msgstr "Kan ikke identifisere enhet «%s»."
+
++#: sys/v4l2/v4l2_calls.c:615
+ #, c-format
+ msgid "This isn't a device '%s'."
+ msgstr "Dette er ikke en enhet «%s»."
+
++#: sys/v4l2/v4l2_calls.c:622
+ #, c-format
+ msgid "Could not open device '%s' for reading and writing."
+ msgstr "Kunne ikke åpne enhet «%s» for lesing og skriving."
+
++#: sys/v4l2/v4l2_calls.c:629
+ #, c-format
+ msgid "Device '%s' is not a capture device."
+ msgstr "Enhet «%s» kan ikke fange data."
+
++#: sys/v4l2/v4l2_calls.c:637
+ #, c-format
+ msgid "Device '%s' is not a output device."
+ msgstr "Enhet «%s» er ikke en utgangsenhet."
+
++#: sys/v4l2/v4l2_calls.c:645
+ #, c-format
+ msgid "Device '%s' is not a M2M device."
+ msgstr "Enheten «%s» er ikke en M2M-enhet."
+
++#: sys/v4l2/v4l2_calls.c:698
+ #, c-format
+ msgid "Could not dup device '%s' for reading and writing."
+ msgstr "Kunne ikke åpne enhet «%s» for lesing og skriving."
+
++#: sys/v4l2/v4l2_calls.c:782
+ #, c-format
+ msgid "Failed to set norm for device '%s'."
+ msgstr "Kunne ikke sette normen for enhet «%s»."
+
++#: sys/v4l2/v4l2_calls.c:820
+ #, c-format
+ msgid "Failed to get current tuner frequency for device '%s'."
+ msgstr "Klarte ikke å få gjeldende mottakerfrekvens for enheten «%s»."
+
++#: sys/v4l2/v4l2_calls.c:862
+ #, c-format
+ msgid "Failed to set current tuner frequency for device '%s' to %lu Hz."
-msgstr ""
-"Kunne ikke hente gjeldende inndata på enheten «%s». Kanskje det er en "
-"radioenhet"
++msgstr "Klarte ikke å sette gjeldende mottakerfrekvens for enheten «%s» til %lu Hz."
+
++#: sys/v4l2/v4l2_calls.c:896
+ #, c-format
+ msgid "Failed to get signal strength for device '%s'."
+ msgstr "Klarte ikke å innhente signalstyrken for enheten «%s»."
+
++#: sys/v4l2/v4l2_calls.c:932
+ #, c-format
+ msgid "Failed to get value for control %d on device '%s'."
+ msgstr "Kunne ikke hente verdi for kontroll %d på enheten «%s»."
+
++#: sys/v4l2/v4l2_calls.c:967
+ #, c-format
+ msgid "Failed to set value %d for control %d on device '%s'."
+ msgstr "Klarte ikke å sette verdi %d for kontroll %d på enheten «%s»."
+
++#: sys/v4l2/v4l2_calls.c:1047
+ #, c-format
+ msgid "Failed to get current input on device '%s'. May be it is a radio device"
-msgid ""
-"Failed to get current output on device '%s'. May be it is a radio device"
++msgstr "Kunne ikke hente gjeldende inndata på enheten «%s». Kanskje det er en radioenhet"
+
++#: sys/v4l2/v4l2_calls.c:1104
+ #, c-format
-#~ msgid ""
-#~ "Could not open audio device for mixer control handling. This version of "
-#~ "the Open Sound System is not supported by this element."
-#~ msgstr ""
-#~ "Kunne ikke åpne lydenheten for mikserkontrollhåndtering. Denne versjonen "
-#~ "av «Open Sound System» støttes ikke av dette elementet."
++msgid "Failed to get current output on device '%s'. May be it is a radio device"
+ msgstr "Kunne ikke hente utdata fra enheten «%s». Kanskje det er en radioenhet"
+
++#: sys/v4l2/v4l2_calls.c:1129
+ #, c-format
+ msgid "Failed to set output %d on device %s."
+ msgstr "Klarte ikke å sette utgang %d på enhet %s."
+
++#: sys/ximage/gstximagesrc.c:838
+ msgid "Changing resolution at runtime is not yet supported."
+ msgstr "Endring av oppløsning under kjøring støttes ikke enda."
+
++#: sys/ximage/gstximagesrc.c:852
+ msgid "Cannot operate without a clock"
+ msgstr "Kan ikke operere uten en klokke"
+
+ #~ msgid "This file contains too many streams. Only playing first %d"
+ #~ msgstr "Filen inneholder for mange strømmer. Spiller bare første %d"
+
+ #~ msgid "Record Source"
+ #~ msgstr "Opptakskilde"
+
+ #~ msgid "Microphone"
+ #~ msgstr "Mikrofon"
+
+ #~ msgid "Line In"
+ #~ msgstr "Linje inn"
+
+ #~ msgid "Internal CD"
+ #~ msgstr "Intern CD"
+
+ #~ msgid "SPDIF In"
+ #~ msgstr "SPDIF Inn"
+
+ #~ msgid "AUX 1 In"
+ #~ msgstr "AUX 1 inn"
+
+ #~ msgid "AUX 2 In"
+ #~ msgstr "AUX 2 inn"
+
+ #~ msgid "Codec Loopback"
+ #~ msgstr "Kodek-Loopback"
+
+ #~ msgid "SunVTS Loopback"
+ #~ msgstr "SunVTS-Loopback"
+
+ #~ msgid "Volume"
+ #~ msgstr "Volum"
+
+ #~ msgid "Gain"
+ #~ msgstr "Styrke"
+
+ #~ msgid "Monitor"
+ #~ msgstr "Monitor"
+
+ #~ msgid "Built-in Speaker"
+ #~ msgstr "Innebygget høyttaler"
+
+ #~ msgid "Headphone"
+ #~ msgstr "Hodetelefon"
+
+ #~ msgid "Line Out"
+ #~ msgstr "Linje ut"
+
+ #~ msgid "SPDIF Out"
+ #~ msgstr "SPDIF ut"
+
+ #~ msgid "AUX 1 Out"
+ #~ msgstr "AUX 1 ut"
+
+ #~ msgid "AUX 2 Out"
+ #~ msgstr "AUX 2 ut"
+
+ #~ msgid "Internal data stream error."
+ #~ msgstr "Intern feil i datastrøm."
+
+ #~ msgid "Internal data flow error."
+ #~ msgstr "Intern dataflytfeil."
+
+ #~ msgid "Could not establish connection to sound server"
+ #~ msgstr "Kunne ikke etablere tilkobling til lydtjener"
+
+ #~ msgid "Failed to query sound server capabilities"
+ #~ msgstr "Klarte ikke å etterspørre lydtjenerens evner"
+
+ #~ msgid "Bass"
+ #~ msgstr "Bass"
+
+ #~ msgid "Treble"
+ #~ msgstr "Diskant"
+
+ #~ msgid "Synth"
+ #~ msgstr "Synth"
+
+ #~ msgid "PCM"
+ #~ msgstr "PCM"
+
+ #~ msgid "Speaker"
+ #~ msgstr "Høyttaler"
+
+ #~ msgid "Line-in"
+ #~ msgstr "Linje inn"
+
+ #~ msgid "CD"
+ #~ msgstr "CD"
+
+ #~ msgid "Mixer"
+ #~ msgstr "Mikser"
+
+ #~ msgid "PCM-2"
+ #~ msgstr "PCM-2"
+
+ #~ msgid "Record"
+ #~ msgstr "Opptak"
+
+ #~ msgid "In-gain"
+ #~ msgstr "Innstyrke"
+
+ #~ msgid "Out-gain"
+ #~ msgstr "Utstyrke"
+
+ #~ msgid "Line-1"
+ #~ msgstr "Linje 1"
+
+ #~ msgid "Line-2"
+ #~ msgstr "Linje 2"
+
+ #~ msgid "Line-3"
+ #~ msgstr "Linje 3"
+
+ #~ msgid "Digital-1"
+ #~ msgstr "Digital 1"
+
+ #~ msgid "Digital-2"
+ #~ msgstr "Digital 2"
+
+ #~ msgid "Digital-3"
+ #~ msgstr "Digital 3"
+
+ #~ msgid "Phone-in"
+ #~ msgstr "Telefon inn"
+
+ #~ msgid "Phone-out"
+ #~ msgstr "Telefon ut"
+
+ #~ msgid "Video"
+ #~ msgstr "Video"
+
+ #~ msgid "Radio"
+ #~ msgstr "Radio"
+
+ #~ msgid "Could not open audio device for mixer control handling."
+ #~ msgstr "Kunne ikke åpne lydenheten for mikserkontrollhåndtering."
+
++#~ msgid "Could not open audio device for mixer control handling. This version of the Open Sound System is not supported by this element."
++#~ msgstr "Kunne ikke åpne lydenheten for mikserkontrollhåndtering. Denne versjonen av «Open Sound System» støttes ikke av dette elementet."
+
+ #~ msgid "Master"
+ #~ msgstr "Hovedvolum"
+
+ #~ msgid "Front"
+ #~ msgstr "Front"
+
+ #~ msgid "Rear"
+ #~ msgstr "Bak"
+
+ #~ msgid "Headphones"
+ #~ msgstr "Hodetelefoner"
+
+ #~ msgid "Center"
+ #~ msgstr "Senter"
+
+ #~ msgid "LFE"
+ #~ msgstr "LFE"
+
+ #~ msgid "Surround"
+ #~ msgstr "Surround"
+
+ #~ msgid "Side"
+ #~ msgstr "Side"
+
+ #~ msgid "AUX Out"
+ #~ msgstr "AUX ut"
+
+ #~ msgid "3D Depth"
+ #~ msgstr "3D dybde"
+
+ #~ msgid "3D Center"
+ #~ msgstr "3D senter"
+
+ #~ msgid "3D Enhance"
+ #~ msgstr "3D-forbedring"
+
+ #~ msgid "Telephone"
+ #~ msgstr "Telefon"
+
+ #~ msgid "Video In"
+ #~ msgstr "Video inn"
+
+ #~ msgid "AUX In"
+ #~ msgstr "AUX inn"
+
+ #~ msgid "Record Gain"
+ #~ msgstr "Innspillingsstyrke"
+
+ #~ msgid "Output Gain"
+ #~ msgstr "Utdatastyrke"
+
+ #~ msgid "Microphone Boost"
+ #~ msgstr "Mikrofonforsterkning"
+
+ #~ msgid "Diagnostic"
+ #~ msgstr "Diagnose"
+
+ #~ msgid "Bass Boost"
+ #~ msgstr "BassBoost"
+
+ #~ msgid "Playback Ports"
+ #~ msgstr "Avspillingsporter:"
+
+ #~ msgid "Input"
+ #~ msgstr "Inngang"
+
+ #~ msgid "Monitor Source"
+ #~ msgstr "Monitorkilde"
+
+ #~ msgid "Keyboard Beep"
+ #~ msgstr "Tastaturpip"
+
+ #~ msgid "Simulate Stereo"
+ #~ msgstr "Simulert stereo"
+
+ #~ msgid "Stereo"
+ #~ msgstr "Stereo"
+
+ #~ msgid "Surround Sound"
+ #~ msgstr "Surroundlyd"
+
+ #~ msgid "Microphone Gain"
+ #~ msgstr "Mikrofonstyrke"
+
+ #~ msgid "Speaker Source"
+ #~ msgstr "Høyttalerkilde"
+
+ #~ msgid "Microphone Source"
+ #~ msgstr "Mikrofonkilde"
+
+ #~ msgid "Jack"
+ #~ msgstr "Jack"
+
+ #~ msgid "Center / LFE"
+ #~ msgstr "Senter / LFE"
+
+ #~ msgid "Stereo Mix"
+ #~ msgstr "Stereomiks"
+
+ #~ msgid "Mono Mix"
+ #~ msgstr "Monomiks"
+
+ #~ msgid "Input Mix"
+ #~ msgstr "Inndatamiks"
+
+ #~ msgid "Microphone 1"
+ #~ msgstr "Mikrofon 1"
+
+ #~ msgid "Microphone 2"
+ #~ msgstr "Mikrofon 2"
+
+ #~ msgid "Digital Out"
+ #~ msgstr "Digital ut"
+
+ #~ msgid "Digital In"
+ #~ msgstr "Digital inn"
+
+ #~ msgid "HDMI"
+ #~ msgstr "HDMI"
+
+ #~ msgid "Modem"
+ #~ msgstr "Modem"
+
+ #~ msgid "Handset"
+ #~ msgstr "Håndsett"
+
+ #~ msgid "Other"
+ #~ msgstr "Annet"
+
+ #~ msgid "None"
+ #~ msgstr "Ingen"
+
+ #~ msgid "On"
+ #~ msgstr "På"
+
+ #~ msgid "Off"
+ #~ msgstr "Av"
+
+ #~ msgid "Mute"
+ #~ msgstr "Demp"
+
+ #~ msgid "Fast"
+ #~ msgstr "Rask"
+
+ #~ msgid "Very Low"
+ #~ msgstr "Meget lav"
+
+ #~ msgid "Low"
+ #~ msgstr "Lav"
+
+ #~ msgid "Medium"
+ #~ msgstr "Middels"
+
+ #~ msgid "High"
+ #~ msgstr "Høy"
+
+ #~ msgid "Very High"
+ #~ msgstr "Meget høy"
+
+ #~ msgid "Production"
+ #~ msgstr "Produksjon"
+
+ #~ msgid "Front Panel Microphone"
+ #~ msgstr "Fronpanelmikrofon"
+
+ #~ msgid "Front Panel Line In"
+ #~ msgstr "Frontpanel innlinje"
+
+ #~ msgid "Front Panel Headphones"
+ #~ msgstr "Frontpanelhodetelefoner"
+
+ #~ msgid "Front Panel Line Out"
+ #~ msgstr "Frontpanel utlinje"
+
+ #~ msgid "Green Connector"
+ #~ msgstr "Grønn kontakt"
+
+ #~ msgid "Pink Connector"
+ #~ msgstr "Rosa kontakt"
+
+ #~ msgid "Blue Connector"
+ #~ msgstr "Blå kontakt"
+
+ #~ msgid "White Connector"
+ #~ msgstr "Hvit kontakt"
+
+ #~ msgid "Black Connector"
+ #~ msgstr "Sort kontakt"
+
+ #~ msgid "Gray Connector"
+ #~ msgstr "Grå kontakt"
+
+ #~ msgid "Orange Connector"
+ #~ msgstr "Oransje kontakt"
+
+ #~ msgid "Red Connector"
+ #~ msgstr "Rød kontakt"
+
+ #~ msgid "Yellow Connector"
+ #~ msgstr "Gul kontakt"
+
+ #~ msgid "Green Front Panel Connector"
+ #~ msgstr "Grønn frontpanelkontakt"
+
+ #~ msgid "Pink Front Panel Connector"
+ #~ msgstr "Rosa frontpanelkontakt"
+
+ #~ msgid "Blue Front Panel Connector"
+ #~ msgstr "Blå frontpanelkontakt"
+
+ #~ msgid "White Front Panel Connector"
+ #~ msgstr "Hvit frontpanelkontakt"
+
+ #~ msgid "Black Front Panel Connector"
+ #~ msgstr "Sort frontpanelkontakt"
+
+ #~ msgid "Gray Front Panel Connector"
+ #~ msgstr "Grå frontpanelkontakt"
+
+ #~ msgid "Orange Front Panel Connector"
+ #~ msgstr "Oransje frontpanelkontakt"
+
+ #~ msgid "Red Front Panel Connector"
+ #~ msgstr "Rød frontpanelkontakt"
+
+ #~ msgid "Yellow Front Panel Connector"
+ #~ msgstr "Gul frontpanelkontakt"
+
+ #~ msgid "Spread Output"
+ #~ msgstr "Spredningsutgang"
+
+ #~ msgid "Downmix"
+ #~ msgstr "Nedmiks"
+
+ #~ msgid "Virtual Mixer Input"
+ #~ msgstr "Virtuell mikserinngang"
+
+ #~ msgid "Virtual Mixer Output"
+ #~ msgstr "Virtuell mikserutgang"
+
+ #~ msgid "Virtual Mixer Channels"
+ #~ msgstr "Virtuelle mikserkanaler"
+
+ #~ msgid "%s %d Function"
+ #~ msgstr "%s %d Funksjon"
+
+ #~ msgid "%s Function"
+ #~ msgstr "%s Funksjon"
+
+ #~ msgid "Got unexpected frame size of %u instead of %u."
+ #~ msgstr "Fikk uventet rammestørrelsen %u istedenfor %u."
+
+ #~ msgid "Error reading %d bytes on device '%s'."
+ #~ msgstr "Feil ved lesing av %d byte på enheten «%s»."
+
+ #~ msgid "Could not enqueue buffers in device '%s'."
+ #~ msgstr "Kunne ikke legge i kø buffere i enheten «%s»."
+
+ #~ msgid "Failed trying to get video frames from device '%s'."
+ #~ msgstr "Mislyktes i å få videobilder fra enhet «%s»."
+
+ #~ msgid "Failed after %d tries. device %s. system error: %s"
+ #~ msgstr "Mislyktes etter %d forsøk. Enhet %s. Systemfeil: %s"
+
+ #~ msgid "Describes the selected input element."
+ #~ msgstr "Beskriver valgt inndataelement."
--- /dev/null
+ /* SPDX-License-Identifier: ((GPL-2.0+ WITH Linux-syscall-note) OR BSD-3-Clause) */
+ /*
+ * Video for Linux Two header file
+ *
+ * Copyright (C) 1999-2012 the contributors
+ *
+ * This program is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation; either version 2 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * Alternatively you can redistribute this file under the terms of the
+ * BSD license as stated below:
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in
+ * the documentation and/or other materials provided with the
+ * distribution.
+ * 3. The names of its contributors may not be used to endorse or promote
+ * products derived from this software without specific prior written
+ * permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+ * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+ * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
+ * TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+ * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
+ * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+ * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+ * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ * Header file for v4l or V4L2 drivers and applications
+ * with public API.
+ * All kernel-specific stuff were moved to media/v4l2-dev.h, so
+ * no #if __KERNEL tests are allowed here
+ *
+ * See https://linuxtv.org for more info
+ *
+ * Author: Bill Dirks <bill@thedirks.org>
+ * Justin Schoeman
+ * Hans Verkuil <hverkuil@xs4all.nl>
+ * et al.
+ */
+ #ifndef _UAPI__LINUX_VIDEODEV2_H
+ #define _UAPI__LINUX_VIDEODEV2_H
+
+ #ifndef __KERNEL__
+ #include <sys/time.h>
+ #include <sys/ioctl.h>
+ #endif
+
+ #include "ext/types-compat.h"
+ #include "ext/v4l2-common.h"
+ #include "ext/v4l2-controls.h"
+
+ /*
+ * Common stuff for both V4L1 and V4L2
+ * Moved from videodev.h
+ */
+ #define VIDEO_MAX_FRAME 32
+ #define VIDEO_MAX_PLANES 8
+
+ /*
+ * M I S C E L L A N E O U S
+ */
+
+ /* Four-character-code (FOURCC) */
+ #define v4l2_fourcc(a, b, c, d)\
+ ((__u32)(a) | ((__u32)(b) << 8) | ((__u32)(c) << 16) | ((__u32)(d) << 24))
+ #define v4l2_fourcc_be(a, b, c, d) (v4l2_fourcc(a, b, c, d) | (1U << 31))
+
+ /*
+ * E N U M S
+ */
+ enum v4l2_field {
+ V4L2_FIELD_ANY = 0, /* driver can choose from none,
+ top, bottom, interlaced
+ depending on whatever it thinks
+ is approximate ... */
+ V4L2_FIELD_NONE = 1, /* this device has no fields ... */
+ V4L2_FIELD_TOP = 2, /* top field only */
+ V4L2_FIELD_BOTTOM = 3, /* bottom field only */
+ V4L2_FIELD_INTERLACED = 4, /* both fields interlaced */
+ V4L2_FIELD_SEQ_TB = 5, /* both fields sequential into one
+ buffer, top-bottom order */
+ V4L2_FIELD_SEQ_BT = 6, /* same as above + bottom-top order */
+ V4L2_FIELD_ALTERNATE = 7, /* both fields alternating into
+ separate buffers */
+ V4L2_FIELD_INTERLACED_TB = 8, /* both fields interlaced, top field
+ first and the top field is
+ transmitted first */
+ V4L2_FIELD_INTERLACED_BT = 9, /* both fields interlaced, top field
+ first and the bottom field is
+ transmitted first */
+ };
+ #define V4L2_FIELD_HAS_TOP(field) \
+ ((field) == V4L2_FIELD_TOP ||\
+ (field) == V4L2_FIELD_INTERLACED ||\
+ (field) == V4L2_FIELD_INTERLACED_TB ||\
+ (field) == V4L2_FIELD_INTERLACED_BT ||\
+ (field) == V4L2_FIELD_SEQ_TB ||\
+ (field) == V4L2_FIELD_SEQ_BT)
+ #define V4L2_FIELD_HAS_BOTTOM(field) \
+ ((field) == V4L2_FIELD_BOTTOM ||\
+ (field) == V4L2_FIELD_INTERLACED ||\
+ (field) == V4L2_FIELD_INTERLACED_TB ||\
+ (field) == V4L2_FIELD_INTERLACED_BT ||\
+ (field) == V4L2_FIELD_SEQ_TB ||\
+ (field) == V4L2_FIELD_SEQ_BT)
+ #define V4L2_FIELD_HAS_BOTH(field) \
+ ((field) == V4L2_FIELD_INTERLACED ||\
+ (field) == V4L2_FIELD_INTERLACED_TB ||\
+ (field) == V4L2_FIELD_INTERLACED_BT ||\
+ (field) == V4L2_FIELD_SEQ_TB ||\
+ (field) == V4L2_FIELD_SEQ_BT)
+ #define V4L2_FIELD_HAS_T_OR_B(field) \
+ ((field) == V4L2_FIELD_BOTTOM ||\
+ (field) == V4L2_FIELD_TOP ||\
+ (field) == V4L2_FIELD_ALTERNATE)
+ #define V4L2_FIELD_IS_INTERLACED(field) \
+ ((field) == V4L2_FIELD_INTERLACED ||\
+ (field) == V4L2_FIELD_INTERLACED_TB ||\
+ (field) == V4L2_FIELD_INTERLACED_BT)
+ #define V4L2_FIELD_IS_SEQUENTIAL(field) \
+ ((field) == V4L2_FIELD_SEQ_TB ||\
+ (field) == V4L2_FIELD_SEQ_BT)
+
+ enum v4l2_buf_type {
+ V4L2_BUF_TYPE_VIDEO_CAPTURE = 1,
+ V4L2_BUF_TYPE_VIDEO_OUTPUT = 2,
+ V4L2_BUF_TYPE_VIDEO_OVERLAY = 3,
+ V4L2_BUF_TYPE_VBI_CAPTURE = 4,
+ V4L2_BUF_TYPE_VBI_OUTPUT = 5,
+ V4L2_BUF_TYPE_SLICED_VBI_CAPTURE = 6,
+ V4L2_BUF_TYPE_SLICED_VBI_OUTPUT = 7,
+ V4L2_BUF_TYPE_VIDEO_OUTPUT_OVERLAY = 8,
+ V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE = 9,
+ V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE = 10,
+ V4L2_BUF_TYPE_SDR_CAPTURE = 11,
+ V4L2_BUF_TYPE_SDR_OUTPUT = 12,
+ V4L2_BUF_TYPE_META_CAPTURE = 13,
+ V4L2_BUF_TYPE_META_OUTPUT = 14,
+ /* Deprecated, do not use */
+ V4L2_BUF_TYPE_PRIVATE = 0x80,
+ };
+
+ #define V4L2_TYPE_IS_MULTIPLANAR(type) \
+ ((type) == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE \
+ || (type) == V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE)
+
+ #define V4L2_TYPE_IS_OUTPUT(type) \
+ ((type) == V4L2_BUF_TYPE_VIDEO_OUTPUT \
+ || (type) == V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE \
+ || (type) == V4L2_BUF_TYPE_VIDEO_OVERLAY \
+ || (type) == V4L2_BUF_TYPE_VIDEO_OUTPUT_OVERLAY \
+ || (type) == V4L2_BUF_TYPE_VBI_OUTPUT \
+ || (type) == V4L2_BUF_TYPE_SLICED_VBI_OUTPUT \
+ || (type) == V4L2_BUF_TYPE_SDR_OUTPUT \
+ || (type) == V4L2_BUF_TYPE_META_OUTPUT)
+
+ #define V4L2_TYPE_IS_CAPTURE(type) (!V4L2_TYPE_IS_OUTPUT(type))
+
+ enum v4l2_tuner_type {
+ V4L2_TUNER_RADIO = 1,
+ V4L2_TUNER_ANALOG_TV = 2,
+ V4L2_TUNER_DIGITAL_TV = 3,
+ V4L2_TUNER_SDR = 4,
+ V4L2_TUNER_RF = 5,
+ };
+
+ /* Deprecated, do not use */
+ #define V4L2_TUNER_ADC V4L2_TUNER_SDR
+
+ enum v4l2_memory {
+ V4L2_MEMORY_MMAP = 1,
+ V4L2_MEMORY_USERPTR = 2,
+ V4L2_MEMORY_OVERLAY = 3,
+ V4L2_MEMORY_DMABUF = 4,
+ };
+
+ #define V4L2_FLAG_MEMORY_NON_CONSISTENT (1 << 0)
+
+ /* see also http://vektor.theorem.ca/graphics/ycbcr/ */
+ enum v4l2_colorspace {
+ /*
+ * Default colorspace, i.e. let the driver figure it out.
+ * Can only be used with video capture.
+ */
+ V4L2_COLORSPACE_DEFAULT = 0,
+
+ /* SMPTE 170M: used for broadcast NTSC/PAL SDTV */
+ V4L2_COLORSPACE_SMPTE170M = 1,
+
+ /* Obsolete pre-1998 SMPTE 240M HDTV standard, superseded by Rec 709 */
+ V4L2_COLORSPACE_SMPTE240M = 2,
+
+ /* Rec.709: used for HDTV */
+ V4L2_COLORSPACE_REC709 = 3,
+
+ /*
+ * Deprecated, do not use. No driver will ever return this. This was
+ * based on a misunderstanding of the bt878 datasheet.
+ */
+ V4L2_COLORSPACE_BT878 = 4,
+
+ /*
+ * NTSC 1953 colorspace. This only makes sense when dealing with
+ * really, really old NTSC recordings. Superseded by SMPTE 170M.
+ */
+ V4L2_COLORSPACE_470_SYSTEM_M = 5,
+
+ /*
+ * EBU Tech 3213 PAL/SECAM colorspace. This only makes sense when
+ * dealing with really old PAL/SECAM recordings. Superseded by
+ * SMPTE 170M.
+ */
+ V4L2_COLORSPACE_470_SYSTEM_BG = 6,
+
+ /*
+ * Effectively shorthand for V4L2_COLORSPACE_SRGB, V4L2_YCBCR_ENC_601
+ * and V4L2_QUANTIZATION_FULL_RANGE. To be used for (Motion-)JPEG.
+ */
+ V4L2_COLORSPACE_JPEG = 7,
+
+ /* For RGB colorspaces such as produces by most webcams. */
+ V4L2_COLORSPACE_SRGB = 8,
+
+ /* opRGB colorspace */
+ V4L2_COLORSPACE_OPRGB = 9,
+
+ /* BT.2020 colorspace, used for UHDTV. */
+ V4L2_COLORSPACE_BT2020 = 10,
+
+ /* Raw colorspace: for RAW unprocessed images */
+ V4L2_COLORSPACE_RAW = 11,
+
+ /* DCI-P3 colorspace, used by cinema projectors */
+ V4L2_COLORSPACE_DCI_P3 = 12,
+ };
+
+ /*
+ * Determine how COLORSPACE_DEFAULT should map to a proper colorspace.
+ * This depends on whether this is a SDTV image (use SMPTE 170M), an
+ * HDTV image (use Rec. 709), or something else (use sRGB).
+ */
+ #define V4L2_MAP_COLORSPACE_DEFAULT(is_sdtv, is_hdtv) \
+ ((is_sdtv) ? V4L2_COLORSPACE_SMPTE170M : \
+ ((is_hdtv) ? V4L2_COLORSPACE_REC709 : V4L2_COLORSPACE_SRGB))
+
+ enum v4l2_xfer_func {
+ /*
+ * Mapping of V4L2_XFER_FUNC_DEFAULT to actual transfer functions
+ * for the various colorspaces:
+ *
+ * V4L2_COLORSPACE_SMPTE170M, V4L2_COLORSPACE_470_SYSTEM_M,
+ * V4L2_COLORSPACE_470_SYSTEM_BG, V4L2_COLORSPACE_REC709 and
+ * V4L2_COLORSPACE_BT2020: V4L2_XFER_FUNC_709
+ *
+ * V4L2_COLORSPACE_SRGB, V4L2_COLORSPACE_JPEG: V4L2_XFER_FUNC_SRGB
+ *
+ * V4L2_COLORSPACE_OPRGB: V4L2_XFER_FUNC_OPRGB
+ *
+ * V4L2_COLORSPACE_SMPTE240M: V4L2_XFER_FUNC_SMPTE240M
+ *
+ * V4L2_COLORSPACE_RAW: V4L2_XFER_FUNC_NONE
+ *
+ * V4L2_COLORSPACE_DCI_P3: V4L2_XFER_FUNC_DCI_P3
+ */
+ V4L2_XFER_FUNC_DEFAULT = 0,
+ V4L2_XFER_FUNC_709 = 1,
+ V4L2_XFER_FUNC_SRGB = 2,
+ V4L2_XFER_FUNC_OPRGB = 3,
+ V4L2_XFER_FUNC_SMPTE240M = 4,
+ V4L2_XFER_FUNC_NONE = 5,
+ V4L2_XFER_FUNC_DCI_P3 = 6,
+ V4L2_XFER_FUNC_SMPTE2084 = 7,
+ };
+
+ /*
+ * Determine how XFER_FUNC_DEFAULT should map to a proper transfer function.
+ * This depends on the colorspace.
+ */
+ #define V4L2_MAP_XFER_FUNC_DEFAULT(colsp) \
+ ((colsp) == V4L2_COLORSPACE_OPRGB ? V4L2_XFER_FUNC_OPRGB : \
+ ((colsp) == V4L2_COLORSPACE_SMPTE240M ? V4L2_XFER_FUNC_SMPTE240M : \
+ ((colsp) == V4L2_COLORSPACE_DCI_P3 ? V4L2_XFER_FUNC_DCI_P3 : \
+ ((colsp) == V4L2_COLORSPACE_RAW ? V4L2_XFER_FUNC_NONE : \
+ ((colsp) == V4L2_COLORSPACE_SRGB || (colsp) == V4L2_COLORSPACE_JPEG ? \
+ V4L2_XFER_FUNC_SRGB : V4L2_XFER_FUNC_709)))))
+
+ enum v4l2_ycbcr_encoding {
+ /*
+ * Mapping of V4L2_YCBCR_ENC_DEFAULT to actual encodings for the
+ * various colorspaces:
+ *
+ * V4L2_COLORSPACE_SMPTE170M, V4L2_COLORSPACE_470_SYSTEM_M,
+ * V4L2_COLORSPACE_470_SYSTEM_BG, V4L2_COLORSPACE_SRGB,
+ * V4L2_COLORSPACE_OPRGB and V4L2_COLORSPACE_JPEG: V4L2_YCBCR_ENC_601
+ *
+ * V4L2_COLORSPACE_REC709 and V4L2_COLORSPACE_DCI_P3: V4L2_YCBCR_ENC_709
+ *
+ * V4L2_COLORSPACE_BT2020: V4L2_YCBCR_ENC_BT2020
+ *
+ * V4L2_COLORSPACE_SMPTE240M: V4L2_YCBCR_ENC_SMPTE240M
+ */
+ V4L2_YCBCR_ENC_DEFAULT = 0,
+
+ /* ITU-R 601 -- SDTV */
+ V4L2_YCBCR_ENC_601 = 1,
+
+ /* Rec. 709 -- HDTV */
+ V4L2_YCBCR_ENC_709 = 2,
+
+ /* ITU-R 601/EN 61966-2-4 Extended Gamut -- SDTV */
+ V4L2_YCBCR_ENC_XV601 = 3,
+
+ /* Rec. 709/EN 61966-2-4 Extended Gamut -- HDTV */
+ V4L2_YCBCR_ENC_XV709 = 4,
+
+ #ifndef __KERNEL__
+ /*
+ * sYCC (Y'CbCr encoding of sRGB), identical to ENC_601. It was added
+ * originally due to a misunderstanding of the sYCC standard. It should
+ * not be used, instead use V4L2_YCBCR_ENC_601.
+ */
+ V4L2_YCBCR_ENC_SYCC = 5,
+ #endif
+
+ /* BT.2020 Non-constant Luminance Y'CbCr */
+ V4L2_YCBCR_ENC_BT2020 = 6,
+
+ /* BT.2020 Constant Luminance Y'CbcCrc */
+ V4L2_YCBCR_ENC_BT2020_CONST_LUM = 7,
+
+ /* SMPTE 240M -- Obsolete HDTV */
+ V4L2_YCBCR_ENC_SMPTE240M = 8,
+ };
+
+ /*
+ * enum v4l2_hsv_encoding values should not collide with the ones from
+ * enum v4l2_ycbcr_encoding.
+ */
+ enum v4l2_hsv_encoding {
+
+ /* Hue mapped to 0 - 179 */
+ V4L2_HSV_ENC_180 = 128,
+
+ /* Hue mapped to 0-255 */
+ V4L2_HSV_ENC_256 = 129,
+ };
+
+ /*
+ * Determine how YCBCR_ENC_DEFAULT should map to a proper Y'CbCr encoding.
+ * This depends on the colorspace.
+ */
+ #define V4L2_MAP_YCBCR_ENC_DEFAULT(colsp) \
+ (((colsp) == V4L2_COLORSPACE_REC709 || \
+ (colsp) == V4L2_COLORSPACE_DCI_P3) ? V4L2_YCBCR_ENC_709 : \
+ ((colsp) == V4L2_COLORSPACE_BT2020 ? V4L2_YCBCR_ENC_BT2020 : \
+ ((colsp) == V4L2_COLORSPACE_SMPTE240M ? V4L2_YCBCR_ENC_SMPTE240M : \
+ V4L2_YCBCR_ENC_601)))
+
+ enum v4l2_quantization {
+ /*
+ * The default for R'G'B' quantization is always full range, except
+ * for the BT2020 colorspace. For Y'CbCr the quantization is always
+ * limited range, except for COLORSPACE_JPEG: this is full range.
+ */
+ V4L2_QUANTIZATION_DEFAULT = 0,
+ V4L2_QUANTIZATION_FULL_RANGE = 1,
+ V4L2_QUANTIZATION_LIM_RANGE = 2,
+ };
+
+ /*
+ * Determine how QUANTIZATION_DEFAULT should map to a proper quantization.
+ * This depends on whether the image is RGB or not, the colorspace and the
+ * Y'CbCr encoding.
+ */
+ #define V4L2_MAP_QUANTIZATION_DEFAULT(is_rgb_or_hsv, colsp, ycbcr_enc) \
+ (((is_rgb_or_hsv) && (colsp) == V4L2_COLORSPACE_BT2020) ? \
+ V4L2_QUANTIZATION_LIM_RANGE : \
+ (((is_rgb_or_hsv) || (colsp) == V4L2_COLORSPACE_JPEG) ? \
+ V4L2_QUANTIZATION_FULL_RANGE : V4L2_QUANTIZATION_LIM_RANGE))
+
+ /*
+ * Deprecated names for opRGB colorspace (IEC 61966-2-5)
+ *
+ * WARNING: Please don't use these deprecated defines in your code, as
+ * there is a chance we have to remove them in the future.
+ */
+ #ifndef __KERNEL__
+ #define V4L2_COLORSPACE_ADOBERGB V4L2_COLORSPACE_OPRGB
+ #define V4L2_XFER_FUNC_ADOBERGB V4L2_XFER_FUNC_OPRGB
+ #endif
+
+ enum v4l2_priority {
+ V4L2_PRIORITY_UNSET = 0, /* not initialized */
+ V4L2_PRIORITY_BACKGROUND = 1,
+ V4L2_PRIORITY_INTERACTIVE = 2,
+ V4L2_PRIORITY_RECORD = 3,
+ V4L2_PRIORITY_DEFAULT = V4L2_PRIORITY_INTERACTIVE,
+ };
+
+ struct v4l2_rect {
+ __s32 left;
+ __s32 top;
+ __u32 width;
+ __u32 height;
+ };
+
+ struct v4l2_fract {
+ __u32 numerator;
+ __u32 denominator;
+ };
+
+ struct v4l2_area {
+ __u32 width;
+ __u32 height;
+ };
+
+ /**
+ * struct v4l2_capability - Describes V4L2 device caps returned by VIDIOC_QUERYCAP
+ *
+ * @driver: name of the driver module (e.g. "bttv")
+ * @card: name of the card (e.g. "Hauppauge WinTV")
+ * @bus_info: name of the bus (e.g. "PCI:" + pci_name(pci_dev) )
+ * @version: KERNEL_VERSION
+ * @capabilities: capabilities of the physical device as a whole
+ * @device_caps: capabilities accessed via this particular device (node)
+ * @reserved: reserved fields for future extensions
+ */
+ struct v4l2_capability {
+ __u8 driver[16];
+ __u8 card[32];
+ __u8 bus_info[32];
+ __u32 version;
+ __u32 capabilities;
+ __u32 device_caps;
+ __u32 reserved[3];
+ };
+
+ /* Values for 'capabilities' field */
+ #define V4L2_CAP_VIDEO_CAPTURE 0x00000001 /* Is a video capture device */
+ #define V4L2_CAP_VIDEO_OUTPUT 0x00000002 /* Is a video output device */
+ #define V4L2_CAP_VIDEO_OVERLAY 0x00000004 /* Can do video overlay */
+ #define V4L2_CAP_VBI_CAPTURE 0x00000010 /* Is a raw VBI capture device */
+ #define V4L2_CAP_VBI_OUTPUT 0x00000020 /* Is a raw VBI output device */
+ #define V4L2_CAP_SLICED_VBI_CAPTURE 0x00000040 /* Is a sliced VBI capture device */
+ #define V4L2_CAP_SLICED_VBI_OUTPUT 0x00000080 /* Is a sliced VBI output device */
+ #define V4L2_CAP_RDS_CAPTURE 0x00000100 /* RDS data capture */
+ #define V4L2_CAP_VIDEO_OUTPUT_OVERLAY 0x00000200 /* Can do video output overlay */
+ #define V4L2_CAP_HW_FREQ_SEEK 0x00000400 /* Can do hardware frequency seek */
+ #define V4L2_CAP_RDS_OUTPUT 0x00000800 /* Is an RDS encoder */
+
+ /* Is a video capture device that supports multiplanar formats */
+ #define V4L2_CAP_VIDEO_CAPTURE_MPLANE 0x00001000
+ /* Is a video output device that supports multiplanar formats */
+ #define V4L2_CAP_VIDEO_OUTPUT_MPLANE 0x00002000
+ /* Is a video mem-to-mem device that supports multiplanar formats */
+ #define V4L2_CAP_VIDEO_M2M_MPLANE 0x00004000
+ /* Is a video mem-to-mem device */
+ #define V4L2_CAP_VIDEO_M2M 0x00008000
+
+ #define V4L2_CAP_TUNER 0x00010000 /* has a tuner */
+ #define V4L2_CAP_AUDIO 0x00020000 /* has audio support */
+ #define V4L2_CAP_RADIO 0x00040000 /* is a radio device */
+ #define V4L2_CAP_MODULATOR 0x00080000 /* has a modulator */
+
+ #define V4L2_CAP_SDR_CAPTURE 0x00100000 /* Is a SDR capture device */
+ #define V4L2_CAP_EXT_PIX_FORMAT 0x00200000 /* Supports the extended pixel format */
+ #define V4L2_CAP_SDR_OUTPUT 0x00400000 /* Is a SDR output device */
+ #define V4L2_CAP_META_CAPTURE 0x00800000 /* Is a metadata capture device */
+
+ #define V4L2_CAP_READWRITE 0x01000000 /* read/write systemcalls */
+ #define V4L2_CAP_ASYNCIO 0x02000000 /* async I/O */
+ #define V4L2_CAP_STREAMING 0x04000000 /* streaming I/O ioctls */
+ #define V4L2_CAP_META_OUTPUT 0x08000000 /* Is a metadata output device */
+
+ #define V4L2_CAP_TOUCH 0x10000000 /* Is a touch device */
+
+ #define V4L2_CAP_IO_MC 0x20000000 /* Is input/output controlled by the media controller */
+
+ #define V4L2_CAP_DEVICE_CAPS 0x80000000 /* sets device capabilities field */
+
+ /*
+ * V I D E O I M A G E F O R M A T
+ */
+ struct v4l2_pix_format {
+ __u32 width;
+ __u32 height;
+ __u32 pixelformat;
+ __u32 field; /* enum v4l2_field */
+ __u32 bytesperline; /* for padding, zero if unused */
+ __u32 sizeimage;
+ __u32 colorspace; /* enum v4l2_colorspace */
+ __u32 priv; /* private data, depends on pixelformat */
+ __u32 flags; /* format flags (V4L2_PIX_FMT_FLAG_*) */
+ union {
+ /* enum v4l2_ycbcr_encoding */
+ __u32 ycbcr_enc;
+ /* enum v4l2_hsv_encoding */
+ __u32 hsv_enc;
+ };
+ __u32 quantization; /* enum v4l2_quantization */
+ __u32 xfer_func; /* enum v4l2_xfer_func */
+ };
+
+ /* Pixel format FOURCC depth Description */
+
+ /* RGB formats */
+ #define V4L2_PIX_FMT_RGB332 v4l2_fourcc('R', 'G', 'B', '1') /* 8 RGB-3-3-2 */
+ #define V4L2_PIX_FMT_RGB444 v4l2_fourcc('R', '4', '4', '4') /* 16 xxxxrrrr ggggbbbb */
+ #define V4L2_PIX_FMT_ARGB444 v4l2_fourcc('A', 'R', '1', '2') /* 16 aaaarrrr ggggbbbb */
+ #define V4L2_PIX_FMT_XRGB444 v4l2_fourcc('X', 'R', '1', '2') /* 16 xxxxrrrr ggggbbbb */
+ #define V4L2_PIX_FMT_RGBA444 v4l2_fourcc('R', 'A', '1', '2') /* 16 rrrrgggg bbbbaaaa */
+ #define V4L2_PIX_FMT_RGBX444 v4l2_fourcc('R', 'X', '1', '2') /* 16 rrrrgggg bbbbxxxx */
+ #define V4L2_PIX_FMT_ABGR444 v4l2_fourcc('A', 'B', '1', '2') /* 16 aaaabbbb ggggrrrr */
+ #define V4L2_PIX_FMT_XBGR444 v4l2_fourcc('X', 'B', '1', '2') /* 16 xxxxbbbb ggggrrrr */
+
+ /*
+ * Originally this had 'BA12' as fourcc, but this clashed with the older
+ * V4L2_PIX_FMT_SGRBG12 which inexplicably used that same fourcc.
+ * So use 'GA12' instead for V4L2_PIX_FMT_BGRA444.
+ */
+ #define V4L2_PIX_FMT_BGRA444 v4l2_fourcc('G', 'A', '1', '2') /* 16 bbbbgggg rrrraaaa */
+ #define V4L2_PIX_FMT_BGRX444 v4l2_fourcc('B', 'X', '1', '2') /* 16 bbbbgggg rrrrxxxx */
+ #define V4L2_PIX_FMT_RGB555 v4l2_fourcc('R', 'G', 'B', 'O') /* 16 RGB-5-5-5 */
+ #define V4L2_PIX_FMT_ARGB555 v4l2_fourcc('A', 'R', '1', '5') /* 16 ARGB-1-5-5-5 */
+ #define V4L2_PIX_FMT_XRGB555 v4l2_fourcc('X', 'R', '1', '5') /* 16 XRGB-1-5-5-5 */
+ #define V4L2_PIX_FMT_RGBA555 v4l2_fourcc('R', 'A', '1', '5') /* 16 RGBA-5-5-5-1 */
+ #define V4L2_PIX_FMT_RGBX555 v4l2_fourcc('R', 'X', '1', '5') /* 16 RGBX-5-5-5-1 */
+ #define V4L2_PIX_FMT_ABGR555 v4l2_fourcc('A', 'B', '1', '5') /* 16 ABGR-1-5-5-5 */
+ #define V4L2_PIX_FMT_XBGR555 v4l2_fourcc('X', 'B', '1', '5') /* 16 XBGR-1-5-5-5 */
+ #define V4L2_PIX_FMT_BGRA555 v4l2_fourcc('B', 'A', '1', '5') /* 16 BGRA-5-5-5-1 */
+ #define V4L2_PIX_FMT_BGRX555 v4l2_fourcc('B', 'X', '1', '5') /* 16 BGRX-5-5-5-1 */
+ #define V4L2_PIX_FMT_RGB565 v4l2_fourcc('R', 'G', 'B', 'P') /* 16 RGB-5-6-5 */
+ #define V4L2_PIX_FMT_RGB555X v4l2_fourcc('R', 'G', 'B', 'Q') /* 16 RGB-5-5-5 BE */
+ #define V4L2_PIX_FMT_ARGB555X v4l2_fourcc_be('A', 'R', '1', '5') /* 16 ARGB-5-5-5 BE */
+ #define V4L2_PIX_FMT_XRGB555X v4l2_fourcc_be('X', 'R', '1', '5') /* 16 XRGB-5-5-5 BE */
+ #define V4L2_PIX_FMT_RGB565X v4l2_fourcc('R', 'G', 'B', 'R') /* 16 RGB-5-6-5 BE */
+ #define V4L2_PIX_FMT_BGR666 v4l2_fourcc('B', 'G', 'R', 'H') /* 18 BGR-6-6-6 */
+ #define V4L2_PIX_FMT_BGR24 v4l2_fourcc('B', 'G', 'R', '3') /* 24 BGR-8-8-8 */
+ #define V4L2_PIX_FMT_RGB24 v4l2_fourcc('R', 'G', 'B', '3') /* 24 RGB-8-8-8 */
+ #define V4L2_PIX_FMT_BGR32 v4l2_fourcc('B', 'G', 'R', '4') /* 32 BGR-8-8-8-8 */
+ #define V4L2_PIX_FMT_ABGR32 v4l2_fourcc('A', 'R', '2', '4') /* 32 BGRA-8-8-8-8 */
+ #define V4L2_PIX_FMT_XBGR32 v4l2_fourcc('X', 'R', '2', '4') /* 32 BGRX-8-8-8-8 */
+ #define V4L2_PIX_FMT_BGRA32 v4l2_fourcc('R', 'A', '2', '4') /* 32 ABGR-8-8-8-8 */
+ #define V4L2_PIX_FMT_BGRX32 v4l2_fourcc('R', 'X', '2', '4') /* 32 XBGR-8-8-8-8 */
+ #define V4L2_PIX_FMT_RGB32 v4l2_fourcc('R', 'G', 'B', '4') /* 32 RGB-8-8-8-8 */
+ #define V4L2_PIX_FMT_RGBA32 v4l2_fourcc('A', 'B', '2', '4') /* 32 RGBA-8-8-8-8 */
+ #define V4L2_PIX_FMT_RGBX32 v4l2_fourcc('X', 'B', '2', '4') /* 32 RGBX-8-8-8-8 */
+ #define V4L2_PIX_FMT_ARGB32 v4l2_fourcc('B', 'A', '2', '4') /* 32 ARGB-8-8-8-8 */
+ #define V4L2_PIX_FMT_XRGB32 v4l2_fourcc('B', 'X', '2', '4') /* 32 XRGB-8-8-8-8 */
+
+ /* Grey formats */
+ #define V4L2_PIX_FMT_GREY v4l2_fourcc('G', 'R', 'E', 'Y') /* 8 Greyscale */
+ #define V4L2_PIX_FMT_Y4 v4l2_fourcc('Y', '0', '4', ' ') /* 4 Greyscale */
+ #define V4L2_PIX_FMT_Y6 v4l2_fourcc('Y', '0', '6', ' ') /* 6 Greyscale */
+ #define V4L2_PIX_FMT_Y10 v4l2_fourcc('Y', '1', '0', ' ') /* 10 Greyscale */
+ #define V4L2_PIX_FMT_Y12 v4l2_fourcc('Y', '1', '2', ' ') /* 12 Greyscale */
+ #define V4L2_PIX_FMT_Y14 v4l2_fourcc('Y', '1', '4', ' ') /* 14 Greyscale */
+ #define V4L2_PIX_FMT_Y16 v4l2_fourcc('Y', '1', '6', ' ') /* 16 Greyscale */
+ #define V4L2_PIX_FMT_Y16_BE v4l2_fourcc_be('Y', '1', '6', ' ') /* 16 Greyscale BE */
+
+ /* Grey bit-packed formats */
+ #define V4L2_PIX_FMT_Y10BPACK v4l2_fourcc('Y', '1', '0', 'B') /* 10 Greyscale bit-packed */
+ #define V4L2_PIX_FMT_Y10P v4l2_fourcc('Y', '1', '0', 'P') /* 10 Greyscale, MIPI RAW10 packed */
+
+ /* Palette formats */
+ #define V4L2_PIX_FMT_PAL8 v4l2_fourcc('P', 'A', 'L', '8') /* 8 8-bit palette */
+
+ /* Chrominance formats */
+ #define V4L2_PIX_FMT_UV8 v4l2_fourcc('U', 'V', '8', ' ') /* 8 UV 4:4 */
+
+ /* Luminance+Chrominance formats */
+ #define V4L2_PIX_FMT_YUYV v4l2_fourcc('Y', 'U', 'Y', 'V') /* 16 YUV 4:2:2 */
+ #define V4L2_PIX_FMT_YYUV v4l2_fourcc('Y', 'Y', 'U', 'V') /* 16 YUV 4:2:2 */
+ #define V4L2_PIX_FMT_YVYU v4l2_fourcc('Y', 'V', 'Y', 'U') /* 16 YVU 4:2:2 */
+ #define V4L2_PIX_FMT_UYVY v4l2_fourcc('U', 'Y', 'V', 'Y') /* 16 YUV 4:2:2 */
+ #define V4L2_PIX_FMT_VYUY v4l2_fourcc('V', 'Y', 'U', 'Y') /* 16 YUV 4:2:2 */
+ #define V4L2_PIX_FMT_Y41P v4l2_fourcc('Y', '4', '1', 'P') /* 12 YUV 4:1:1 */
+ #define V4L2_PIX_FMT_YUV444 v4l2_fourcc('Y', '4', '4', '4') /* 16 xxxxyyyy uuuuvvvv */
+ #define V4L2_PIX_FMT_YUV555 v4l2_fourcc('Y', 'U', 'V', 'O') /* 16 YUV-5-5-5 */
+ #define V4L2_PIX_FMT_YUV565 v4l2_fourcc('Y', 'U', 'V', 'P') /* 16 YUV-5-6-5 */
+ #define V4L2_PIX_FMT_YUV32 v4l2_fourcc('Y', 'U', 'V', '4') /* 32 YUV-8-8-8-8 */
+ #define V4L2_PIX_FMT_AYUV32 v4l2_fourcc('A', 'Y', 'U', 'V') /* 32 AYUV-8-8-8-8 */
+ #define V4L2_PIX_FMT_XYUV32 v4l2_fourcc('X', 'Y', 'U', 'V') /* 32 XYUV-8-8-8-8 */
+ #define V4L2_PIX_FMT_VUYA32 v4l2_fourcc('V', 'U', 'Y', 'A') /* 32 VUYA-8-8-8-8 */
+ #define V4L2_PIX_FMT_VUYX32 v4l2_fourcc('V', 'U', 'Y', 'X') /* 32 VUYX-8-8-8-8 */
+ #define V4L2_PIX_FMT_HI240 v4l2_fourcc('H', 'I', '2', '4') /* 8 8-bit color */
+ #define V4L2_PIX_FMT_HM12 v4l2_fourcc('H', 'M', '1', '2') /* 8 YUV 4:2:0 16x16 macroblocks */
+ #define V4L2_PIX_FMT_M420 v4l2_fourcc('M', '4', '2', '0') /* 12 YUV 4:2:0 2 lines y, 1 line uv interleaved */
+
+ /* two planes -- one Y, one Cr + Cb interleaved */
+ #define V4L2_PIX_FMT_NV12 v4l2_fourcc('N', 'V', '1', '2') /* 12 Y/CbCr 4:2:0 */
+ #define V4L2_PIX_FMT_NV21 v4l2_fourcc('N', 'V', '2', '1') /* 12 Y/CrCb 4:2:0 */
+ #define V4L2_PIX_FMT_NV16 v4l2_fourcc('N', 'V', '1', '6') /* 16 Y/CbCr 4:2:2 */
+ #define V4L2_PIX_FMT_NV61 v4l2_fourcc('N', 'V', '6', '1') /* 16 Y/CrCb 4:2:2 */
+ #define V4L2_PIX_FMT_NV24 v4l2_fourcc('N', 'V', '2', '4') /* 24 Y/CbCr 4:4:4 */
+ #define V4L2_PIX_FMT_NV42 v4l2_fourcc('N', 'V', '4', '2') /* 24 Y/CrCb 4:4:4 */
+
+ /* two non contiguous planes - one Y, one Cr + Cb interleaved */
+ #define V4L2_PIX_FMT_NV12M v4l2_fourcc('N', 'M', '1', '2') /* 12 Y/CbCr 4:2:0 */
+ #define V4L2_PIX_FMT_NV21M v4l2_fourcc('N', 'M', '2', '1') /* 21 Y/CrCb 4:2:0 */
+ #define V4L2_PIX_FMT_NV16M v4l2_fourcc('N', 'M', '1', '6') /* 16 Y/CbCr 4:2:2 */
+ #define V4L2_PIX_FMT_NV61M v4l2_fourcc('N', 'M', '6', '1') /* 16 Y/CrCb 4:2:2 */
+ #define V4L2_PIX_FMT_NV12MT v4l2_fourcc('T', 'M', '1', '2') /* 12 Y/CbCr 4:2:0 64x32 macroblocks */
+ #define V4L2_PIX_FMT_NV12MT_16X16 v4l2_fourcc('V', 'M', '1', '2') /* 12 Y/CbCr 4:2:0 16x16 macroblocks */
+
+ /* three planes - Y Cb, Cr */
+ #define V4L2_PIX_FMT_YUV410 v4l2_fourcc('Y', 'U', 'V', '9') /* 9 YUV 4:1:0 */
+ #define V4L2_PIX_FMT_YVU410 v4l2_fourcc('Y', 'V', 'U', '9') /* 9 YVU 4:1:0 */
+ #define V4L2_PIX_FMT_YUV411P v4l2_fourcc('4', '1', '1', 'P') /* 12 YVU411 planar */
+ #define V4L2_PIX_FMT_YUV420 v4l2_fourcc('Y', 'U', '1', '2') /* 12 YUV 4:2:0 */
+ #define V4L2_PIX_FMT_YVU420 v4l2_fourcc('Y', 'V', '1', '2') /* 12 YVU 4:2:0 */
+ #define V4L2_PIX_FMT_YUV422P v4l2_fourcc('4', '2', '2', 'P') /* 16 YVU422 planar */
+
+ /* three non contiguous planes - Y, Cb, Cr */
+ #define V4L2_PIX_FMT_YUV420M v4l2_fourcc('Y', 'M', '1', '2') /* 12 YUV420 planar */
+ #define V4L2_PIX_FMT_YVU420M v4l2_fourcc('Y', 'M', '2', '1') /* 12 YVU420 planar */
+ #define V4L2_PIX_FMT_YUV422M v4l2_fourcc('Y', 'M', '1', '6') /* 16 YUV422 planar */
+ #define V4L2_PIX_FMT_YVU422M v4l2_fourcc('Y', 'M', '6', '1') /* 16 YVU422 planar */
+ #define V4L2_PIX_FMT_YUV444M v4l2_fourcc('Y', 'M', '2', '4') /* 24 YUV444 planar */
+ #define V4L2_PIX_FMT_YVU444M v4l2_fourcc('Y', 'M', '4', '2') /* 24 YVU444 planar */
+
+ /* Bayer formats - see http://www.siliconimaging.com/RGB%20Bayer.htm */
+ #define V4L2_PIX_FMT_SBGGR8 v4l2_fourcc('B', 'A', '8', '1') /* 8 BGBG.. GRGR.. */
+ #define V4L2_PIX_FMT_SGBRG8 v4l2_fourcc('G', 'B', 'R', 'G') /* 8 GBGB.. RGRG.. */
+ #define V4L2_PIX_FMT_SGRBG8 v4l2_fourcc('G', 'R', 'B', 'G') /* 8 GRGR.. BGBG.. */
+ #define V4L2_PIX_FMT_SRGGB8 v4l2_fourcc('R', 'G', 'G', 'B') /* 8 RGRG.. GBGB.. */
+ #define V4L2_PIX_FMT_SBGGR10 v4l2_fourcc('B', 'G', '1', '0') /* 10 BGBG.. GRGR.. */
+ #define V4L2_PIX_FMT_SGBRG10 v4l2_fourcc('G', 'B', '1', '0') /* 10 GBGB.. RGRG.. */
+ #define V4L2_PIX_FMT_SGRBG10 v4l2_fourcc('B', 'A', '1', '0') /* 10 GRGR.. BGBG.. */
+ #define V4L2_PIX_FMT_SRGGB10 v4l2_fourcc('R', 'G', '1', '0') /* 10 RGRG.. GBGB.. */
+ /* 10bit raw bayer packed, 5 bytes for every 4 pixels */
+ #define V4L2_PIX_FMT_SBGGR10P v4l2_fourcc('p', 'B', 'A', 'A')
+ #define V4L2_PIX_FMT_SGBRG10P v4l2_fourcc('p', 'G', 'A', 'A')
+ #define V4L2_PIX_FMT_SGRBG10P v4l2_fourcc('p', 'g', 'A', 'A')
+ #define V4L2_PIX_FMT_SRGGB10P v4l2_fourcc('p', 'R', 'A', 'A')
+ /* 10bit raw bayer a-law compressed to 8 bits */
+ #define V4L2_PIX_FMT_SBGGR10ALAW8 v4l2_fourcc('a', 'B', 'A', '8')
+ #define V4L2_PIX_FMT_SGBRG10ALAW8 v4l2_fourcc('a', 'G', 'A', '8')
+ #define V4L2_PIX_FMT_SGRBG10ALAW8 v4l2_fourcc('a', 'g', 'A', '8')
+ #define V4L2_PIX_FMT_SRGGB10ALAW8 v4l2_fourcc('a', 'R', 'A', '8')
+ /* 10bit raw bayer DPCM compressed to 8 bits */
+ #define V4L2_PIX_FMT_SBGGR10DPCM8 v4l2_fourcc('b', 'B', 'A', '8')
+ #define V4L2_PIX_FMT_SGBRG10DPCM8 v4l2_fourcc('b', 'G', 'A', '8')
+ #define V4L2_PIX_FMT_SGRBG10DPCM8 v4l2_fourcc('B', 'D', '1', '0')
+ #define V4L2_PIX_FMT_SRGGB10DPCM8 v4l2_fourcc('b', 'R', 'A', '8')
+ #define V4L2_PIX_FMT_SBGGR12 v4l2_fourcc('B', 'G', '1', '2') /* 12 BGBG.. GRGR.. */
+ #define V4L2_PIX_FMT_SGBRG12 v4l2_fourcc('G', 'B', '1', '2') /* 12 GBGB.. RGRG.. */
+ #define V4L2_PIX_FMT_SGRBG12 v4l2_fourcc('B', 'A', '1', '2') /* 12 GRGR.. BGBG.. */
+ #define V4L2_PIX_FMT_SRGGB12 v4l2_fourcc('R', 'G', '1', '2') /* 12 RGRG.. GBGB.. */
+ /* 12bit raw bayer packed, 6 bytes for every 4 pixels */
+ #define V4L2_PIX_FMT_SBGGR12P v4l2_fourcc('p', 'B', 'C', 'C')
+ #define V4L2_PIX_FMT_SGBRG12P v4l2_fourcc('p', 'G', 'C', 'C')
+ #define V4L2_PIX_FMT_SGRBG12P v4l2_fourcc('p', 'g', 'C', 'C')
+ #define V4L2_PIX_FMT_SRGGB12P v4l2_fourcc('p', 'R', 'C', 'C')
+ #define V4L2_PIX_FMT_SBGGR14 v4l2_fourcc('B', 'G', '1', '4') /* 14 BGBG.. GRGR.. */
+ #define V4L2_PIX_FMT_SGBRG14 v4l2_fourcc('G', 'B', '1', '4') /* 14 GBGB.. RGRG.. */
+ #define V4L2_PIX_FMT_SGRBG14 v4l2_fourcc('G', 'R', '1', '4') /* 14 GRGR.. BGBG.. */
+ #define V4L2_PIX_FMT_SRGGB14 v4l2_fourcc('R', 'G', '1', '4') /* 14 RGRG.. GBGB.. */
+ /* 14bit raw bayer packed, 7 bytes for every 4 pixels */
+ #define V4L2_PIX_FMT_SBGGR14P v4l2_fourcc('p', 'B', 'E', 'E')
+ #define V4L2_PIX_FMT_SGBRG14P v4l2_fourcc('p', 'G', 'E', 'E')
+ #define V4L2_PIX_FMT_SGRBG14P v4l2_fourcc('p', 'g', 'E', 'E')
+ #define V4L2_PIX_FMT_SRGGB14P v4l2_fourcc('p', 'R', 'E', 'E')
+ #define V4L2_PIX_FMT_SBGGR16 v4l2_fourcc('B', 'Y', 'R', '2') /* 16 BGBG.. GRGR.. */
+ #define V4L2_PIX_FMT_SGBRG16 v4l2_fourcc('G', 'B', '1', '6') /* 16 GBGB.. RGRG.. */
+ #define V4L2_PIX_FMT_SGRBG16 v4l2_fourcc('G', 'R', '1', '6') /* 16 GRGR.. BGBG.. */
+ #define V4L2_PIX_FMT_SRGGB16 v4l2_fourcc('R', 'G', '1', '6') /* 16 RGRG.. GBGB.. */
+
+ /* HSV formats */
+ #define V4L2_PIX_FMT_HSV24 v4l2_fourcc('H', 'S', 'V', '3')
+ #define V4L2_PIX_FMT_HSV32 v4l2_fourcc('H', 'S', 'V', '4')
+
+ /* compressed formats */
+ #define V4L2_PIX_FMT_MJPEG v4l2_fourcc('M', 'J', 'P', 'G') /* Motion-JPEG */
+ #define V4L2_PIX_FMT_JPEG v4l2_fourcc('J', 'P', 'E', 'G') /* JFIF JPEG */
+ #define V4L2_PIX_FMT_DV v4l2_fourcc('d', 'v', 's', 'd') /* 1394 */
+ #define V4L2_PIX_FMT_MPEG v4l2_fourcc('M', 'P', 'E', 'G') /* MPEG-1/2/4 Multiplexed */
+ #define V4L2_PIX_FMT_H264 v4l2_fourcc('H', '2', '6', '4') /* H264 with start codes */
+ #define V4L2_PIX_FMT_H264_NO_SC v4l2_fourcc('A', 'V', 'C', '1') /* H264 without start codes */
+ #define V4L2_PIX_FMT_H264_MVC v4l2_fourcc('M', '2', '6', '4') /* H264 MVC */
+ #define V4L2_PIX_FMT_H263 v4l2_fourcc('H', '2', '6', '3') /* H263 */
+ #define V4L2_PIX_FMT_MPEG1 v4l2_fourcc('M', 'P', 'G', '1') /* MPEG-1 ES */
+ #define V4L2_PIX_FMT_MPEG2 v4l2_fourcc('M', 'P', 'G', '2') /* MPEG-2 ES */
+ #define V4L2_PIX_FMT_MPEG2_SLICE v4l2_fourcc('M', 'G', '2', 'S') /* MPEG-2 parsed slice data */
+ #define V4L2_PIX_FMT_MPEG4 v4l2_fourcc('M', 'P', 'G', '4') /* MPEG-4 part 2 ES */
+ #define V4L2_PIX_FMT_XVID v4l2_fourcc('X', 'V', 'I', 'D') /* Xvid */
+ #define V4L2_PIX_FMT_VC1_ANNEX_G v4l2_fourcc('V', 'C', '1', 'G') /* SMPTE 421M Annex G compliant stream */
+ #define V4L2_PIX_FMT_VC1_ANNEX_L v4l2_fourcc('V', 'C', '1', 'L') /* SMPTE 421M Annex L compliant stream */
+ #define V4L2_PIX_FMT_VP8 v4l2_fourcc('V', 'P', '8', '0') /* VP8 */
+ #define V4L2_PIX_FMT_VP9 v4l2_fourcc('V', 'P', '9', '0') /* VP9 */
+ #define V4L2_PIX_FMT_HEVC v4l2_fourcc('H', 'E', 'V', 'C') /* HEVC aka H.265 */
+ #define V4L2_PIX_FMT_FWHT v4l2_fourcc('F', 'W', 'H', 'T') /* Fast Walsh Hadamard Transform (vicodec) */
+ #define V4L2_PIX_FMT_FWHT_STATELESS v4l2_fourcc('S', 'F', 'W', 'H') /* Stateless FWHT (vicodec) */
+
+ /* Vendor-specific formats */
+ #define V4L2_PIX_FMT_CPIA1 v4l2_fourcc('C', 'P', 'I', 'A') /* cpia1 YUV */
+ #define V4L2_PIX_FMT_WNVA v4l2_fourcc('W', 'N', 'V', 'A') /* Winnov hw compress */
+ #define V4L2_PIX_FMT_SN9C10X v4l2_fourcc('S', '9', '1', '0') /* SN9C10x compression */
+ #define V4L2_PIX_FMT_SN9C20X_I420 v4l2_fourcc('S', '9', '2', '0') /* SN9C20x YUV 4:2:0 */
+ #define V4L2_PIX_FMT_PWC1 v4l2_fourcc('P', 'W', 'C', '1') /* pwc older webcam */
+ #define V4L2_PIX_FMT_PWC2 v4l2_fourcc('P', 'W', 'C', '2') /* pwc newer webcam */
+ #define V4L2_PIX_FMT_ET61X251 v4l2_fourcc('E', '6', '2', '5') /* ET61X251 compression */
+ #define V4L2_PIX_FMT_SPCA501 v4l2_fourcc('S', '5', '0', '1') /* YUYV per line */
+ #define V4L2_PIX_FMT_SPCA505 v4l2_fourcc('S', '5', '0', '5') /* YYUV per line */
+ #define V4L2_PIX_FMT_SPCA508 v4l2_fourcc('S', '5', '0', '8') /* YUVY per line */
+ #define V4L2_PIX_FMT_SPCA561 v4l2_fourcc('S', '5', '6', '1') /* compressed GBRG bayer */
+ #define V4L2_PIX_FMT_PAC207 v4l2_fourcc('P', '2', '0', '7') /* compressed BGGR bayer */
+ #define V4L2_PIX_FMT_MR97310A v4l2_fourcc('M', '3', '1', '0') /* compressed BGGR bayer */
+ #define V4L2_PIX_FMT_JL2005BCD v4l2_fourcc('J', 'L', '2', '0') /* compressed RGGB bayer */
+ #define V4L2_PIX_FMT_SN9C2028 v4l2_fourcc('S', 'O', 'N', 'X') /* compressed GBRG bayer */
+ #define V4L2_PIX_FMT_SQ905C v4l2_fourcc('9', '0', '5', 'C') /* compressed RGGB bayer */
+ #define V4L2_PIX_FMT_PJPG v4l2_fourcc('P', 'J', 'P', 'G') /* Pixart 73xx JPEG */
+ #define V4L2_PIX_FMT_OV511 v4l2_fourcc('O', '5', '1', '1') /* ov511 JPEG */
+ #define V4L2_PIX_FMT_OV518 v4l2_fourcc('O', '5', '1', '8') /* ov518 JPEG */
+ #define V4L2_PIX_FMT_STV0680 v4l2_fourcc('S', '6', '8', '0') /* stv0680 bayer */
+ #define V4L2_PIX_FMT_TM6000 v4l2_fourcc('T', 'M', '6', '0') /* tm5600/tm60x0 */
+ #define V4L2_PIX_FMT_CIT_YYVYUY v4l2_fourcc('C', 'I', 'T', 'V') /* one line of Y then 1 line of VYUY */
+ #define V4L2_PIX_FMT_KONICA420 v4l2_fourcc('K', 'O', 'N', 'I') /* YUV420 planar in blocks of 256 pixels */
+ #define V4L2_PIX_FMT_JPGL v4l2_fourcc('J', 'P', 'G', 'L') /* JPEG-Lite */
+ #define V4L2_PIX_FMT_SE401 v4l2_fourcc('S', '4', '0', '1') /* se401 janggu compressed rgb */
+ #define V4L2_PIX_FMT_S5C_UYVY_JPG v4l2_fourcc('S', '5', 'C', 'I') /* S5C73M3 interleaved UYVY/JPEG */
+ #define V4L2_PIX_FMT_Y8I v4l2_fourcc('Y', '8', 'I', ' ') /* Greyscale 8-bit L/R interleaved */
+ #define V4L2_PIX_FMT_Y12I v4l2_fourcc('Y', '1', '2', 'I') /* Greyscale 12-bit L/R interleaved */
+ #define V4L2_PIX_FMT_Z16 v4l2_fourcc('Z', '1', '6', ' ') /* Depth data 16-bit */
+ #define V4L2_PIX_FMT_MT21C v4l2_fourcc('M', 'T', '2', '1') /* Mediatek compressed block mode */
+ #define V4L2_PIX_FMT_INZI v4l2_fourcc('I', 'N', 'Z', 'I') /* Intel Planar Greyscale 10-bit and Depth 16-bit */
+ #define V4L2_PIX_FMT_SUNXI_TILED_NV12 v4l2_fourcc('S', 'T', '1', '2') /* Sunxi Tiled NV12 Format */
+ #define V4L2_PIX_FMT_CNF4 v4l2_fourcc('C', 'N', 'F', '4') /* Intel 4-bit packed depth confidence information */
+
+ /* 10bit raw bayer packed, 32 bytes for every 25 pixels, last LSB 6 bits unused */
+ #define V4L2_PIX_FMT_IPU3_SBGGR10 v4l2_fourcc('i', 'p', '3', 'b') /* IPU3 packed 10-bit BGGR bayer */
+ #define V4L2_PIX_FMT_IPU3_SGBRG10 v4l2_fourcc('i', 'p', '3', 'g') /* IPU3 packed 10-bit GBRG bayer */
+ #define V4L2_PIX_FMT_IPU3_SGRBG10 v4l2_fourcc('i', 'p', '3', 'G') /* IPU3 packed 10-bit GRBG bayer */
+ #define V4L2_PIX_FMT_IPU3_SRGGB10 v4l2_fourcc('i', 'p', '3', 'r') /* IPU3 packed 10-bit RGGB bayer */
+
+ /* SDR formats - used only for Software Defined Radio devices */
+ #define V4L2_SDR_FMT_CU8 v4l2_fourcc('C', 'U', '0', '8') /* IQ u8 */
+ #define V4L2_SDR_FMT_CU16LE v4l2_fourcc('C', 'U', '1', '6') /* IQ u16le */
+ #define V4L2_SDR_FMT_CS8 v4l2_fourcc('C', 'S', '0', '8') /* complex s8 */
+ #define V4L2_SDR_FMT_CS14LE v4l2_fourcc('C', 'S', '1', '4') /* complex s14le */
+ #define V4L2_SDR_FMT_RU12LE v4l2_fourcc('R', 'U', '1', '2') /* real u12le */
+ #define V4L2_SDR_FMT_PCU16BE v4l2_fourcc('P', 'C', '1', '6') /* planar complex u16be */
+ #define V4L2_SDR_FMT_PCU18BE v4l2_fourcc('P', 'C', '1', '8') /* planar complex u18be */
+ #define V4L2_SDR_FMT_PCU20BE v4l2_fourcc('P', 'C', '2', '0') /* planar complex u20be */
+
+ /* Touch formats - used for Touch devices */
+ #define V4L2_TCH_FMT_DELTA_TD16 v4l2_fourcc('T', 'D', '1', '6') /* 16-bit signed deltas */
+ #define V4L2_TCH_FMT_DELTA_TD08 v4l2_fourcc('T', 'D', '0', '8') /* 8-bit signed deltas */
+ #define V4L2_TCH_FMT_TU16 v4l2_fourcc('T', 'U', '1', '6') /* 16-bit unsigned touch data */
+ #define V4L2_TCH_FMT_TU08 v4l2_fourcc('T', 'U', '0', '8') /* 8-bit unsigned touch data */
+
+ /* Meta-data formats */
+ #define V4L2_META_FMT_VSP1_HGO v4l2_fourcc('V', 'S', 'P', 'H') /* R-Car VSP1 1-D Histogram */
+ #define V4L2_META_FMT_VSP1_HGT v4l2_fourcc('V', 'S', 'P', 'T') /* R-Car VSP1 2-D Histogram */
+ #define V4L2_META_FMT_UVC v4l2_fourcc('U', 'V', 'C', 'H') /* UVC Payload Header metadata */
+ #define V4L2_META_FMT_D4XX v4l2_fourcc('D', '4', 'X', 'X') /* D4XX Payload Header metadata */
+ #define V4L2_META_FMT_VIVID v4l2_fourcc('V', 'I', 'V', 'D') /* Vivid Metadata */
+
++#define V4L2_PIX_FMT_INVZ v4l2_fourcc('I', 'N', 'V', 'Z') /* Intel Planar Depth 16-bit */
++
+ /* priv field value to indicates that subsequent fields are valid. */
+ #define V4L2_PIX_FMT_PRIV_MAGIC 0xfeedcafe
+
+ /* Flags */
+ #define V4L2_PIX_FMT_FLAG_PREMUL_ALPHA 0x00000001
+
+ /*
+ * F O R M A T E N U M E R A T I O N
+ */
+ struct v4l2_fmtdesc {
+ __u32 index; /* Format number */
+ __u32 type; /* enum v4l2_buf_type */
+ __u32 flags;
+ __u8 description[32]; /* Description string */
+ __u32 pixelformat; /* Format fourcc */
+ __u32 mbus_code; /* Media bus code */
+ __u32 reserved[3];
+ };
+
+ #define V4L2_FMT_FLAG_COMPRESSED 0x0001
+ #define V4L2_FMT_FLAG_EMULATED 0x0002
+ #define V4L2_FMT_FLAG_CONTINUOUS_BYTESTREAM 0x0004
+ #define V4L2_FMT_FLAG_DYN_RESOLUTION 0x0008
+ #define V4L2_FMT_FLAG_ENC_CAP_FRAME_INTERVAL 0x0010
+
+ /* Frame Size and frame rate enumeration */
+ /*
+ * F R A M E S I Z E E N U M E R A T I O N
+ */
+ enum v4l2_frmsizetypes {
+ V4L2_FRMSIZE_TYPE_DISCRETE = 1,
+ V4L2_FRMSIZE_TYPE_CONTINUOUS = 2,
+ V4L2_FRMSIZE_TYPE_STEPWISE = 3,
+ };
+
+ struct v4l2_frmsize_discrete {
+ __u32 width; /* Frame width [pixel] */
+ __u32 height; /* Frame height [pixel] */
+ };
+
+ struct v4l2_frmsize_stepwise {
+ __u32 min_width; /* Minimum frame width [pixel] */
+ __u32 max_width; /* Maximum frame width [pixel] */
+ __u32 step_width; /* Frame width step size [pixel] */
+ __u32 min_height; /* Minimum frame height [pixel] */
+ __u32 max_height; /* Maximum frame height [pixel] */
+ __u32 step_height; /* Frame height step size [pixel] */
+ };
+
+ struct v4l2_frmsizeenum {
+ __u32 index; /* Frame size number */
+ __u32 pixel_format; /* Pixel format */
+ __u32 type; /* Frame size type the device supports. */
+
+ union { /* Frame size */
+ struct v4l2_frmsize_discrete discrete;
+ struct v4l2_frmsize_stepwise stepwise;
+ };
+
+ __u32 reserved[2]; /* Reserved space for future use */
+ };
+
+ /*
+ * F R A M E R A T E E N U M E R A T I O N
+ */
+ enum v4l2_frmivaltypes {
+ V4L2_FRMIVAL_TYPE_DISCRETE = 1,
+ V4L2_FRMIVAL_TYPE_CONTINUOUS = 2,
+ V4L2_FRMIVAL_TYPE_STEPWISE = 3,
+ };
+
+ struct v4l2_frmival_stepwise {
+ struct v4l2_fract min; /* Minimum frame interval [s] */
+ struct v4l2_fract max; /* Maximum frame interval [s] */
+ struct v4l2_fract step; /* Frame interval step size [s] */
+ };
+
+ struct v4l2_frmivalenum {
+ __u32 index; /* Frame format index */
+ __u32 pixel_format; /* Pixel format */
+ __u32 width; /* Frame width */
+ __u32 height; /* Frame height */
+ __u32 type; /* Frame interval type the device supports. */
+
+ union { /* Frame interval */
+ struct v4l2_fract discrete;
+ struct v4l2_frmival_stepwise stepwise;
+ };
+
+ __u32 reserved[2]; /* Reserved space for future use */
+ };
+
+ /*
+ * T I M E C O D E
+ */
+ struct v4l2_timecode {
+ __u32 type;
+ __u32 flags;
+ __u8 frames;
+ __u8 seconds;
+ __u8 minutes;
+ __u8 hours;
+ __u8 userbits[4];
+ };
+
+ /* Type */
+ #define V4L2_TC_TYPE_24FPS 1
+ #define V4L2_TC_TYPE_25FPS 2
+ #define V4L2_TC_TYPE_30FPS 3
+ #define V4L2_TC_TYPE_50FPS 4
+ #define V4L2_TC_TYPE_60FPS 5
+
+ /* Flags */
+ #define V4L2_TC_FLAG_DROPFRAME 0x0001 /* "drop-frame" mode */
+ #define V4L2_TC_FLAG_COLORFRAME 0x0002
+ #define V4L2_TC_USERBITS_field 0x000C
+ #define V4L2_TC_USERBITS_USERDEFINED 0x0000
+ #define V4L2_TC_USERBITS_8BITCHARS 0x0008
+ /* The above is based on SMPTE timecodes */
+
+ struct v4l2_jpegcompression {
+ int quality;
+
+ int APPn; /* Number of APP segment to be written,
+ * must be 0..15 */
+ int APP_len; /* Length of data in JPEG APPn segment */
+ char APP_data[60]; /* Data in the JPEG APPn segment. */
+
+ int COM_len; /* Length of data in JPEG COM segment */
+ char COM_data[60]; /* Data in JPEG COM segment */
+
+ __u32 jpeg_markers; /* Which markers should go into the JPEG
+ * output. Unless you exactly know what
+ * you do, leave them untouched.
+ * Including less markers will make the
+ * resulting code smaller, but there will
+ * be fewer applications which can read it.
+ * The presence of the APP and COM marker
+ * is influenced by APP_len and COM_len
+ * ONLY, not by this property! */
+
+ #define V4L2_JPEG_MARKER_DHT (1<<3) /* Define Huffman Tables */
+ #define V4L2_JPEG_MARKER_DQT (1<<4) /* Define Quantization Tables */
+ #define V4L2_JPEG_MARKER_DRI (1<<5) /* Define Restart Interval */
+ #define V4L2_JPEG_MARKER_COM (1<<6) /* Comment segment */
+ #define V4L2_JPEG_MARKER_APP (1<<7) /* App segment, driver will
+ * always use APP0 */
+ };
+
+ /*
+ * M E M O R Y - M A P P I N G B U F F E R S
+ */
+
+ #ifdef __KERNEL__
+ /*
+ * This corresponds to the user space version of timeval
+ * for 64-bit time_t. sparc64 is different from everyone
+ * else, using the microseconds in the wrong half of the
+ * second 64-bit word.
+ */
+ struct __kernel_v4l2_timeval {
+ long long tv_sec;
+ #if defined(__sparc__) && defined(__arch64__)
+ int tv_usec;
+ int __pad;
+ #else
+ long long tv_usec;
+ #endif
+ };
+ #endif
+
+ struct v4l2_requestbuffers {
+ __u32 count;
+ __u32 type; /* enum v4l2_buf_type */
+ __u32 memory; /* enum v4l2_memory */
+ __u32 capabilities;
+ union {
+ __u32 flags;
+ __u32 reserved[1];
+ };
+ };
+
+ /* capabilities for struct v4l2_requestbuffers and v4l2_create_buffers */
+ #define V4L2_BUF_CAP_SUPPORTS_MMAP (1 << 0)
+ #define V4L2_BUF_CAP_SUPPORTS_USERPTR (1 << 1)
+ #define V4L2_BUF_CAP_SUPPORTS_DMABUF (1 << 2)
+ #define V4L2_BUF_CAP_SUPPORTS_REQUESTS (1 << 3)
+ #define V4L2_BUF_CAP_SUPPORTS_ORPHANED_BUFS (1 << 4)
+ #define V4L2_BUF_CAP_SUPPORTS_M2M_HOLD_CAPTURE_BUF (1 << 5)
+ #define V4L2_BUF_CAP_SUPPORTS_MMAP_CACHE_HINTS (1 << 6)
+
+ /**
+ * struct v4l2_plane - plane info for multi-planar buffers
+ * @bytesused: number of bytes occupied by data in the plane (payload)
+ * @length: size of this plane (NOT the payload) in bytes
+ * @mem_offset: when memory in the associated struct v4l2_buffer is
+ * V4L2_MEMORY_MMAP, equals the offset from the start of
+ * the device memory for this plane (or is a "cookie" that
+ * should be passed to mmap() called on the video node)
+ * @userptr: when memory is V4L2_MEMORY_USERPTR, a userspace pointer
+ * pointing to this plane
+ * @fd: when memory is V4L2_MEMORY_DMABUF, a userspace file
+ * descriptor associated with this plane
+ * @data_offset: offset in the plane to the start of data; usually 0,
+ * unless there is a header in front of the data
+ *
+ * Multi-planar buffers consist of one or more planes, e.g. an YCbCr buffer
+ * with two planes can have one plane for Y, and another for interleaved CbCr
+ * components. Each plane can reside in a separate memory buffer, or even in
+ * a completely separate memory node (e.g. in embedded devices).
+ */
+ struct v4l2_plane {
+ __u32 bytesused;
+ __u32 length;
+ union {
+ __u32 mem_offset;
+ unsigned long userptr;
+ __s32 fd;
+ } m;
+ __u32 data_offset;
+ __u32 reserved[11];
+ };
+
+ /**
+ * struct v4l2_buffer - video buffer info
+ * @index: id number of the buffer
+ * @type: enum v4l2_buf_type; buffer type (type == *_MPLANE for
+ * multiplanar buffers);
+ * @bytesused: number of bytes occupied by data in the buffer (payload);
+ * unused (set to 0) for multiplanar buffers
+ * @flags: buffer informational flags
+ * @field: enum v4l2_field; field order of the image in the buffer
+ * @timestamp: frame timestamp
+ * @timecode: frame timecode
+ * @sequence: sequence count of this frame
+ * @memory: enum v4l2_memory; the method, in which the actual video data is
+ * passed
+ * @offset: for non-multiplanar buffers with memory == V4L2_MEMORY_MMAP;
+ * offset from the start of the device memory for this plane,
+ * (or a "cookie" that should be passed to mmap() as offset)
+ * @userptr: for non-multiplanar buffers with memory == V4L2_MEMORY_USERPTR;
+ * a userspace pointer pointing to this buffer
+ * @fd: for non-multiplanar buffers with memory == V4L2_MEMORY_DMABUF;
+ * a userspace file descriptor associated with this buffer
+ * @planes: for multiplanar buffers; userspace pointer to the array of plane
+ * info structs for this buffer
+ * @length: size in bytes of the buffer (NOT its payload) for single-plane
+ * buffers (when type != *_MPLANE); number of elements in the
+ * planes array for multi-plane buffers
+ * @request_fd: fd of the request that this buffer should use
+ *
+ * Contains data exchanged by application and driver using one of the Streaming
+ * I/O methods.
+ */
+ struct v4l2_buffer {
+ __u32 index;
+ __u32 type;
+ __u32 bytesused;
+ __u32 flags;
+ __u32 field;
+ #ifdef __KERNEL__
+ struct __kernel_v4l2_timeval timestamp;
+ #else
+ struct timeval timestamp;
+ #endif
+ struct v4l2_timecode timecode;
+ __u32 sequence;
+
+ /* memory location */
+ __u32 memory;
+ union {
+ __u32 offset;
+ unsigned long userptr;
+ struct v4l2_plane *planes;
+ __s32 fd;
+ } m;
+ __u32 length;
+ __u32 reserved2;
+ union {
+ __s32 request_fd;
+ __u32 reserved;
+ };
+ };
+
+ #ifndef __KERNEL__
+ /**
+ * v4l2_timeval_to_ns - Convert timeval to nanoseconds
+ * @ts: pointer to the timeval variable to be converted
+ *
+ * Returns the scalar nanosecond representation of the timeval
+ * parameter.
+ */
+ static inline __u64 v4l2_timeval_to_ns(const struct timeval *tv)
+ {
+ return (__u64)tv->tv_sec * 1000000000ULL + tv->tv_usec * 1000;
+ }
+ #endif
+
+ /* Flags for 'flags' field */
+ /* Buffer is mapped (flag) */
+ #define V4L2_BUF_FLAG_MAPPED 0x00000001
+ /* Buffer is queued for processing */
+ #define V4L2_BUF_FLAG_QUEUED 0x00000002
+ /* Buffer is ready */
+ #define V4L2_BUF_FLAG_DONE 0x00000004
+ /* Image is a keyframe (I-frame) */
+ #define V4L2_BUF_FLAG_KEYFRAME 0x00000008
+ /* Image is a P-frame */
+ #define V4L2_BUF_FLAG_PFRAME 0x00000010
+ /* Image is a B-frame */
+ #define V4L2_BUF_FLAG_BFRAME 0x00000020
+ /* Buffer is ready, but the data contained within is corrupted. */
+ #define V4L2_BUF_FLAG_ERROR 0x00000040
+ /* Buffer is added to an unqueued request */
+ #define V4L2_BUF_FLAG_IN_REQUEST 0x00000080
+ /* timecode field is valid */
+ #define V4L2_BUF_FLAG_TIMECODE 0x00000100
+ /* Don't return the capture buffer until OUTPUT timestamp changes */
+ #define V4L2_BUF_FLAG_M2M_HOLD_CAPTURE_BUF 0x00000200
+ /* Buffer is prepared for queuing */
+ #define V4L2_BUF_FLAG_PREPARED 0x00000400
+ /* Cache handling flags */
+ #define V4L2_BUF_FLAG_NO_CACHE_INVALIDATE 0x00000800
+ #define V4L2_BUF_FLAG_NO_CACHE_CLEAN 0x00001000
+ /* Timestamp type */
+ #define V4L2_BUF_FLAG_TIMESTAMP_MASK 0x0000e000
+ #define V4L2_BUF_FLAG_TIMESTAMP_UNKNOWN 0x00000000
+ #define V4L2_BUF_FLAG_TIMESTAMP_MONOTONIC 0x00002000
+ #define V4L2_BUF_FLAG_TIMESTAMP_COPY 0x00004000
+ /* Timestamp sources. */
+ #define V4L2_BUF_FLAG_TSTAMP_SRC_MASK 0x00070000
+ #define V4L2_BUF_FLAG_TSTAMP_SRC_EOF 0x00000000
+ #define V4L2_BUF_FLAG_TSTAMP_SRC_SOE 0x00010000
+ /* mem2mem encoder/decoder */
+ #define V4L2_BUF_FLAG_LAST 0x00100000
+ /* request_fd is valid */
+ #define V4L2_BUF_FLAG_REQUEST_FD 0x00800000
+
+ /**
+ * struct v4l2_exportbuffer - export of video buffer as DMABUF file descriptor
+ *
+ * @index: id number of the buffer
+ * @type: enum v4l2_buf_type; buffer type (type == *_MPLANE for
+ * multiplanar buffers);
+ * @plane: index of the plane to be exported, 0 for single plane queues
+ * @flags: flags for newly created file, currently only O_CLOEXEC is
+ * supported, refer to manual of open syscall for more details
+ * @fd: file descriptor associated with DMABUF (set by driver)
+ *
+ * Contains data used for exporting a video buffer as DMABUF file descriptor.
+ * The buffer is identified by a 'cookie' returned by VIDIOC_QUERYBUF
+ * (identical to the cookie used to mmap() the buffer to userspace). All
+ * reserved fields must be set to zero. The field reserved0 is expected to
+ * become a structure 'type' allowing an alternative layout of the structure
+ * content. Therefore this field should not be used for any other extensions.
+ */
+ struct v4l2_exportbuffer {
+ __u32 type; /* enum v4l2_buf_type */
+ __u32 index;
+ __u32 plane;
+ __u32 flags;
+ __s32 fd;
+ __u32 reserved[11];
+ };
+
+ /*
+ * O V E R L A Y P R E V I E W
+ */
+ struct v4l2_framebuffer {
+ __u32 capability;
+ __u32 flags;
+ /* FIXME: in theory we should pass something like PCI device + memory
+ * region + offset instead of some physical address */
+ void *base;
+ struct {
+ __u32 width;
+ __u32 height;
+ __u32 pixelformat;
+ __u32 field; /* enum v4l2_field */
+ __u32 bytesperline; /* for padding, zero if unused */
+ __u32 sizeimage;
+ __u32 colorspace; /* enum v4l2_colorspace */
+ __u32 priv; /* reserved field, set to 0 */
+ } fmt;
+ };
+ /* Flags for the 'capability' field. Read only */
+ #define V4L2_FBUF_CAP_EXTERNOVERLAY 0x0001
+ #define V4L2_FBUF_CAP_CHROMAKEY 0x0002
+ #define V4L2_FBUF_CAP_LIST_CLIPPING 0x0004
+ #define V4L2_FBUF_CAP_BITMAP_CLIPPING 0x0008
+ #define V4L2_FBUF_CAP_LOCAL_ALPHA 0x0010
+ #define V4L2_FBUF_CAP_GLOBAL_ALPHA 0x0020
+ #define V4L2_FBUF_CAP_LOCAL_INV_ALPHA 0x0040
+ #define V4L2_FBUF_CAP_SRC_CHROMAKEY 0x0080
+ /* Flags for the 'flags' field. */
+ #define V4L2_FBUF_FLAG_PRIMARY 0x0001
+ #define V4L2_FBUF_FLAG_OVERLAY 0x0002
+ #define V4L2_FBUF_FLAG_CHROMAKEY 0x0004
+ #define V4L2_FBUF_FLAG_LOCAL_ALPHA 0x0008
+ #define V4L2_FBUF_FLAG_GLOBAL_ALPHA 0x0010
+ #define V4L2_FBUF_FLAG_LOCAL_INV_ALPHA 0x0020
+ #define V4L2_FBUF_FLAG_SRC_CHROMAKEY 0x0040
+
+ struct v4l2_clip {
+ struct v4l2_rect c;
+ struct v4l2_clip __user *next;
+ };
+
+ struct v4l2_window {
+ struct v4l2_rect w;
+ __u32 field; /* enum v4l2_field */
+ __u32 chromakey;
+ struct v4l2_clip __user *clips;
+ __u32 clipcount;
+ void __user *bitmap;
+ __u8 global_alpha;
+ };
+
+ /*
+ * C A P T U R E P A R A M E T E R S
+ */
+ struct v4l2_captureparm {
+ __u32 capability; /* Supported modes */
+ __u32 capturemode; /* Current mode */
+ struct v4l2_fract timeperframe; /* Time per frame in seconds */
+ __u32 extendedmode; /* Driver-specific extensions */
+ __u32 readbuffers; /* # of buffers for read */
+ __u32 reserved[4];
+ };
+
+ /* Flags for 'capability' and 'capturemode' fields */
+ #define V4L2_MODE_HIGHQUALITY 0x0001 /* High quality imaging mode */
+ #define V4L2_CAP_TIMEPERFRAME 0x1000 /* timeperframe field is supported */
+
+ struct v4l2_outputparm {
+ __u32 capability; /* Supported modes */
+ __u32 outputmode; /* Current mode */
+ struct v4l2_fract timeperframe; /* Time per frame in seconds */
+ __u32 extendedmode; /* Driver-specific extensions */
+ __u32 writebuffers; /* # of buffers for write */
+ __u32 reserved[4];
+ };
+
+ /*
+ * I N P U T I M A G E C R O P P I N G
+ */
+ struct v4l2_cropcap {
+ __u32 type; /* enum v4l2_buf_type */
+ struct v4l2_rect bounds;
+ struct v4l2_rect defrect;
+ struct v4l2_fract pixelaspect;
+ };
+
+ struct v4l2_crop {
+ __u32 type; /* enum v4l2_buf_type */
+ struct v4l2_rect c;
+ };
+
+ /**
+ * struct v4l2_selection - selection info
+ * @type: buffer type (do not use *_MPLANE types)
+ * @target: Selection target, used to choose one of possible rectangles;
+ * defined in v4l2-common.h; V4L2_SEL_TGT_* .
+ * @flags: constraints flags, defined in v4l2-common.h; V4L2_SEL_FLAG_*.
+ * @r: coordinates of selection window
+ * @reserved: for future use, rounds structure size to 64 bytes, set to zero
+ *
+ * Hardware may use multiple helper windows to process a video stream.
+ * The structure is used to exchange this selection areas between
+ * an application and a driver.
+ */
+ struct v4l2_selection {
+ __u32 type;
+ __u32 target;
+ __u32 flags;
+ struct v4l2_rect r;
+ __u32 reserved[9];
+ };
+
+
+ /*
+ * A N A L O G V I D E O S T A N D A R D
+ */
+
+ typedef __u64 v4l2_std_id;
+
+ /*
+ * Attention: Keep the V4L2_STD_* bit definitions in sync with
+ * include/dt-bindings/display/sdtv-standards.h SDTV_STD_* bit definitions.
+ */
+ /* one bit for each */
+ #define V4L2_STD_PAL_B ((v4l2_std_id)0x00000001)
+ #define V4L2_STD_PAL_B1 ((v4l2_std_id)0x00000002)
+ #define V4L2_STD_PAL_G ((v4l2_std_id)0x00000004)
+ #define V4L2_STD_PAL_H ((v4l2_std_id)0x00000008)
+ #define V4L2_STD_PAL_I ((v4l2_std_id)0x00000010)
+ #define V4L2_STD_PAL_D ((v4l2_std_id)0x00000020)
+ #define V4L2_STD_PAL_D1 ((v4l2_std_id)0x00000040)
+ #define V4L2_STD_PAL_K ((v4l2_std_id)0x00000080)
+
+ #define V4L2_STD_PAL_M ((v4l2_std_id)0x00000100)
+ #define V4L2_STD_PAL_N ((v4l2_std_id)0x00000200)
+ #define V4L2_STD_PAL_Nc ((v4l2_std_id)0x00000400)
+ #define V4L2_STD_PAL_60 ((v4l2_std_id)0x00000800)
+
+ #define V4L2_STD_NTSC_M ((v4l2_std_id)0x00001000) /* BTSC */
+ #define V4L2_STD_NTSC_M_JP ((v4l2_std_id)0x00002000) /* EIA-J */
+ #define V4L2_STD_NTSC_443 ((v4l2_std_id)0x00004000)
+ #define V4L2_STD_NTSC_M_KR ((v4l2_std_id)0x00008000) /* FM A2 */
+
+ #define V4L2_STD_SECAM_B ((v4l2_std_id)0x00010000)
+ #define V4L2_STD_SECAM_D ((v4l2_std_id)0x00020000)
+ #define V4L2_STD_SECAM_G ((v4l2_std_id)0x00040000)
+ #define V4L2_STD_SECAM_H ((v4l2_std_id)0x00080000)
+ #define V4L2_STD_SECAM_K ((v4l2_std_id)0x00100000)
+ #define V4L2_STD_SECAM_K1 ((v4l2_std_id)0x00200000)
+ #define V4L2_STD_SECAM_L ((v4l2_std_id)0x00400000)
+ #define V4L2_STD_SECAM_LC ((v4l2_std_id)0x00800000)
+
+ /* ATSC/HDTV */
+ #define V4L2_STD_ATSC_8_VSB ((v4l2_std_id)0x01000000)
+ #define V4L2_STD_ATSC_16_VSB ((v4l2_std_id)0x02000000)
+
+ /* FIXME:
+ Although std_id is 64 bits, there is an issue on PPC32 architecture that
+ makes switch(__u64) to break. So, there's a hack on v4l2-common.c rounding
+ this value to 32 bits.
+ As, currently, the max value is for V4L2_STD_ATSC_16_VSB (30 bits wide),
+ it should work fine. However, if needed to add more than two standards,
+ v4l2-common.c should be fixed.
+ */
+
+ /*
+ * Some macros to merge video standards in order to make live easier for the
+ * drivers and V4L2 applications
+ */
+
+ /*
+ * "Common" NTSC/M - It should be noticed that V4L2_STD_NTSC_443 is
+ * Missing here.
+ */
+ #define V4L2_STD_NTSC (V4L2_STD_NTSC_M |\
+ V4L2_STD_NTSC_M_JP |\
+ V4L2_STD_NTSC_M_KR)
+ /* Secam macros */
+ #define V4L2_STD_SECAM_DK (V4L2_STD_SECAM_D |\
+ V4L2_STD_SECAM_K |\
+ V4L2_STD_SECAM_K1)
+ /* All Secam Standards */
+ #define V4L2_STD_SECAM (V4L2_STD_SECAM_B |\
+ V4L2_STD_SECAM_G |\
+ V4L2_STD_SECAM_H |\
+ V4L2_STD_SECAM_DK |\
+ V4L2_STD_SECAM_L |\
+ V4L2_STD_SECAM_LC)
+ /* PAL macros */
+ #define V4L2_STD_PAL_BG (V4L2_STD_PAL_B |\
+ V4L2_STD_PAL_B1 |\
+ V4L2_STD_PAL_G)
+ #define V4L2_STD_PAL_DK (V4L2_STD_PAL_D |\
+ V4L2_STD_PAL_D1 |\
+ V4L2_STD_PAL_K)
+ /*
+ * "Common" PAL - This macro is there to be compatible with the old
+ * V4L1 concept of "PAL": /BGDKHI.
+ * Several PAL standards are missing here: /M, /N and /Nc
+ */
+ #define V4L2_STD_PAL (V4L2_STD_PAL_BG |\
+ V4L2_STD_PAL_DK |\
+ V4L2_STD_PAL_H |\
+ V4L2_STD_PAL_I)
+ /* Chroma "agnostic" standards */
+ #define V4L2_STD_B (V4L2_STD_PAL_B |\
+ V4L2_STD_PAL_B1 |\
+ V4L2_STD_SECAM_B)
+ #define V4L2_STD_G (V4L2_STD_PAL_G |\
+ V4L2_STD_SECAM_G)
+ #define V4L2_STD_H (V4L2_STD_PAL_H |\
+ V4L2_STD_SECAM_H)
+ #define V4L2_STD_L (V4L2_STD_SECAM_L |\
+ V4L2_STD_SECAM_LC)
+ #define V4L2_STD_GH (V4L2_STD_G |\
+ V4L2_STD_H)
+ #define V4L2_STD_DK (V4L2_STD_PAL_DK |\
+ V4L2_STD_SECAM_DK)
+ #define V4L2_STD_BG (V4L2_STD_B |\
+ V4L2_STD_G)
+ #define V4L2_STD_MN (V4L2_STD_PAL_M |\
+ V4L2_STD_PAL_N |\
+ V4L2_STD_PAL_Nc |\
+ V4L2_STD_NTSC)
+
+ /* Standards where MTS/BTSC stereo could be found */
+ #define V4L2_STD_MTS (V4L2_STD_NTSC_M |\
+ V4L2_STD_PAL_M |\
+ V4L2_STD_PAL_N |\
+ V4L2_STD_PAL_Nc)
+
+ /* Standards for Countries with 60Hz Line frequency */
+ #define V4L2_STD_525_60 (V4L2_STD_PAL_M |\
+ V4L2_STD_PAL_60 |\
+ V4L2_STD_NTSC |\
+ V4L2_STD_NTSC_443)
+ /* Standards for Countries with 50Hz Line frequency */
+ #define V4L2_STD_625_50 (V4L2_STD_PAL |\
+ V4L2_STD_PAL_N |\
+ V4L2_STD_PAL_Nc |\
+ V4L2_STD_SECAM)
+
+ #define V4L2_STD_ATSC (V4L2_STD_ATSC_8_VSB |\
+ V4L2_STD_ATSC_16_VSB)
+ /* Macros with none and all analog standards */
+ #define V4L2_STD_UNKNOWN 0
+ #define V4L2_STD_ALL (V4L2_STD_525_60 |\
+ V4L2_STD_625_50)
+
+ struct v4l2_standard {
+ __u32 index;
+ v4l2_std_id id;
+ __u8 name[24];
+ struct v4l2_fract frameperiod; /* Frames, not fields */
+ __u32 framelines;
+ __u32 reserved[4];
+ };
+
+ /*
+ * D V B T T I M I N G S
+ */
+
+ /** struct v4l2_bt_timings - BT.656/BT.1120 timing data
+ * @width: total width of the active video in pixels
+ * @height: total height of the active video in lines
+ * @interlaced: Interlaced or progressive
+ * @polarities: Positive or negative polarities
+ * @pixelclock: Pixel clock in HZ. Ex. 74.25MHz->74250000
+ * @hfrontporch:Horizontal front porch in pixels
+ * @hsync: Horizontal Sync length in pixels
+ * @hbackporch: Horizontal back porch in pixels
+ * @vfrontporch:Vertical front porch in lines
+ * @vsync: Vertical Sync length in lines
+ * @vbackporch: Vertical back porch in lines
+ * @il_vfrontporch:Vertical front porch for the even field
+ * (aka field 2) of interlaced field formats
+ * @il_vsync: Vertical Sync length for the even field
+ * (aka field 2) of interlaced field formats
+ * @il_vbackporch:Vertical back porch for the even field
+ * (aka field 2) of interlaced field formats
+ * @standards: Standards the timing belongs to
+ * @flags: Flags
+ * @picture_aspect: The picture aspect ratio (hor/vert).
+ * @cea861_vic: VIC code as per the CEA-861 standard.
+ * @hdmi_vic: VIC code as per the HDMI standard.
+ * @reserved: Reserved fields, must be zeroed.
+ *
+ * A note regarding vertical interlaced timings: height refers to the total
+ * height of the active video frame (= two fields). The blanking timings refer
+ * to the blanking of each field. So the height of the total frame is
+ * calculated as follows:
+ *
+ * tot_height = height + vfrontporch + vsync + vbackporch +
+ * il_vfrontporch + il_vsync + il_vbackporch
+ *
+ * The active height of each field is height / 2.
+ */
+ struct v4l2_bt_timings {
+ __u32 width;
+ __u32 height;
+ __u32 interlaced;
+ __u32 polarities;
+ __u64 pixelclock;
+ __u32 hfrontporch;
+ __u32 hsync;
+ __u32 hbackporch;
+ __u32 vfrontporch;
+ __u32 vsync;
+ __u32 vbackporch;
+ __u32 il_vfrontporch;
+ __u32 il_vsync;
+ __u32 il_vbackporch;
+ __u32 standards;
+ __u32 flags;
+ struct v4l2_fract picture_aspect;
+ __u8 cea861_vic;
+ __u8 hdmi_vic;
+ __u8 reserved[46];
+ } __attribute__ ((packed));
+
+ /* Interlaced or progressive format */
+ #define V4L2_DV_PROGRESSIVE 0
+ #define V4L2_DV_INTERLACED 1
+
+ /* Polarities. If bit is not set, it is assumed to be negative polarity */
+ #define V4L2_DV_VSYNC_POS_POL 0x00000001
+ #define V4L2_DV_HSYNC_POS_POL 0x00000002
+
+ /* Timings standards */
+ #define V4L2_DV_BT_STD_CEA861 (1 << 0) /* CEA-861 Digital TV Profile */
+ #define V4L2_DV_BT_STD_DMT (1 << 1) /* VESA Discrete Monitor Timings */
+ #define V4L2_DV_BT_STD_CVT (1 << 2) /* VESA Coordinated Video Timings */
+ #define V4L2_DV_BT_STD_GTF (1 << 3) /* VESA Generalized Timings Formula */
+ #define V4L2_DV_BT_STD_SDI (1 << 4) /* SDI Timings */
+
+ /* Flags */
+
+ /*
+ * CVT/GTF specific: timing uses reduced blanking (CVT) or the 'Secondary
+ * GTF' curve (GTF). In both cases the horizontal and/or vertical blanking
+ * intervals are reduced, allowing a higher resolution over the same
+ * bandwidth. This is a read-only flag.
+ */
+ #define V4L2_DV_FL_REDUCED_BLANKING (1 << 0)
+ /*
+ * CEA-861 specific: set for CEA-861 formats with a framerate of a multiple
+ * of six. These formats can be optionally played at 1 / 1.001 speed.
+ * This is a read-only flag.
+ */
+ #define V4L2_DV_FL_CAN_REDUCE_FPS (1 << 1)
+ /*
+ * CEA-861 specific: only valid for video transmitters, the flag is cleared
+ * by receivers.
+ * If the framerate of the format is a multiple of six, then the pixelclock
+ * used to set up the transmitter is divided by 1.001 to make it compatible
+ * with 60 Hz based standards such as NTSC and PAL-M that use a framerate of
+ * 29.97 Hz. Otherwise this flag is cleared. If the transmitter can't generate
+ * such frequencies, then the flag will also be cleared.
+ */
+ #define V4L2_DV_FL_REDUCED_FPS (1 << 2)
+ /*
+ * Specific to interlaced formats: if set, then field 1 is really one half-line
+ * longer and field 2 is really one half-line shorter, so each field has
+ * exactly the same number of half-lines. Whether half-lines can be detected
+ * or used depends on the hardware.
+ */
+ #define V4L2_DV_FL_HALF_LINE (1 << 3)
+ /*
+ * If set, then this is a Consumer Electronics (CE) video format. Such formats
+ * differ from other formats (commonly called IT formats) in that if RGB
+ * encoding is used then by default the RGB values use limited range (i.e.
+ * use the range 16-235) as opposed to 0-255. All formats defined in CEA-861
+ * except for the 640x480 format are CE formats.
+ */
+ #define V4L2_DV_FL_IS_CE_VIDEO (1 << 4)
+ /* Some formats like SMPTE-125M have an interlaced signal with a odd
+ * total height. For these formats, if this flag is set, the first
+ * field has the extra line. If not, it is the second field.
+ */
+ #define V4L2_DV_FL_FIRST_FIELD_EXTRA_LINE (1 << 5)
+ /*
+ * If set, then the picture_aspect field is valid. Otherwise assume that the
+ * pixels are square, so the picture aspect ratio is the same as the width to
+ * height ratio.
+ */
+ #define V4L2_DV_FL_HAS_PICTURE_ASPECT (1 << 6)
+ /*
+ * If set, then the cea861_vic field is valid and contains the Video
+ * Identification Code as per the CEA-861 standard.
+ */
+ #define V4L2_DV_FL_HAS_CEA861_VIC (1 << 7)
+ /*
+ * If set, then the hdmi_vic field is valid and contains the Video
+ * Identification Code as per the HDMI standard (HDMI Vendor Specific
+ * InfoFrame).
+ */
+ #define V4L2_DV_FL_HAS_HDMI_VIC (1 << 8)
+ /*
+ * CEA-861 specific: only valid for video receivers.
+ * If set, then HW can detect the difference between regular FPS and
+ * 1000/1001 FPS. Note: This flag is only valid for HDMI VIC codes with
+ * the V4L2_DV_FL_CAN_REDUCE_FPS flag set.
+ */
+ #define V4L2_DV_FL_CAN_DETECT_REDUCED_FPS (1 << 9)
+
+ /* A few useful defines to calculate the total blanking and frame sizes */
+ #define V4L2_DV_BT_BLANKING_WIDTH(bt) \
+ ((bt)->hfrontporch + (bt)->hsync + (bt)->hbackporch)
+ #define V4L2_DV_BT_FRAME_WIDTH(bt) \
+ ((bt)->width + V4L2_DV_BT_BLANKING_WIDTH(bt))
+ #define V4L2_DV_BT_BLANKING_HEIGHT(bt) \
+ ((bt)->vfrontporch + (bt)->vsync + (bt)->vbackporch + \
+ (bt)->il_vfrontporch + (bt)->il_vsync + (bt)->il_vbackporch)
+ #define V4L2_DV_BT_FRAME_HEIGHT(bt) \
+ ((bt)->height + V4L2_DV_BT_BLANKING_HEIGHT(bt))
+
+ /** struct v4l2_dv_timings - DV timings
+ * @type: the type of the timings
+ * @bt: BT656/1120 timings
+ */
+ struct v4l2_dv_timings {
+ __u32 type;
+ union {
+ struct v4l2_bt_timings bt;
+ __u32 reserved[32];
+ };
+ } __attribute__ ((packed));
+
+ /* Values for the type field */
+ #define V4L2_DV_BT_656_1120 0 /* BT.656/1120 timing type */
+
+
+ /** struct v4l2_enum_dv_timings - DV timings enumeration
+ * @index: enumeration index
+ * @pad: the pad number for which to enumerate timings (used with
+ * v4l-subdev nodes only)
+ * @reserved: must be zeroed
+ * @timings: the timings for the given index
+ */
+ struct v4l2_enum_dv_timings {
+ __u32 index;
+ __u32 pad;
+ __u32 reserved[2];
+ struct v4l2_dv_timings timings;
+ };
+
+ /** struct v4l2_bt_timings_cap - BT.656/BT.1120 timing capabilities
+ * @min_width: width in pixels
+ * @max_width: width in pixels
+ * @min_height: height in lines
+ * @max_height: height in lines
+ * @min_pixelclock: Pixel clock in HZ. Ex. 74.25MHz->74250000
+ * @max_pixelclock: Pixel clock in HZ. Ex. 74.25MHz->74250000
+ * @standards: Supported standards
+ * @capabilities: Supported capabilities
+ * @reserved: Must be zeroed
+ */
+ struct v4l2_bt_timings_cap {
+ __u32 min_width;
+ __u32 max_width;
+ __u32 min_height;
+ __u32 max_height;
+ __u64 min_pixelclock;
+ __u64 max_pixelclock;
+ __u32 standards;
+ __u32 capabilities;
+ __u32 reserved[16];
+ } __attribute__ ((packed));
+
+ /* Supports interlaced formats */
+ #define V4L2_DV_BT_CAP_INTERLACED (1 << 0)
+ /* Supports progressive formats */
+ #define V4L2_DV_BT_CAP_PROGRESSIVE (1 << 1)
+ /* Supports CVT/GTF reduced blanking */
+ #define V4L2_DV_BT_CAP_REDUCED_BLANKING (1 << 2)
+ /* Supports custom formats */
+ #define V4L2_DV_BT_CAP_CUSTOM (1 << 3)
+
+ /** struct v4l2_dv_timings_cap - DV timings capabilities
+ * @type: the type of the timings (same as in struct v4l2_dv_timings)
+ * @pad: the pad number for which to query capabilities (used with
+ * v4l-subdev nodes only)
+ * @bt: the BT656/1120 timings capabilities
+ */
+ struct v4l2_dv_timings_cap {
+ __u32 type;
+ __u32 pad;
+ __u32 reserved[2];
+ union {
+ struct v4l2_bt_timings_cap bt;
+ __u32 raw_data[32];
+ };
+ };
+
+
+ /*
+ * V I D E O I N P U T S
+ */
+ struct v4l2_input {
+ __u32 index; /* Which input */
+ __u8 name[32]; /* Label */
+ __u32 type; /* Type of input */
+ __u32 audioset; /* Associated audios (bitfield) */
+ __u32 tuner; /* enum v4l2_tuner_type */
+ v4l2_std_id std;
+ __u32 status;
+ __u32 capabilities;
+ __u32 reserved[3];
+ };
+
+ /* Values for the 'type' field */
+ #define V4L2_INPUT_TYPE_TUNER 1
+ #define V4L2_INPUT_TYPE_CAMERA 2
+ #define V4L2_INPUT_TYPE_TOUCH 3
+
+ /* field 'status' - general */
+ #define V4L2_IN_ST_NO_POWER 0x00000001 /* Attached device is off */
+ #define V4L2_IN_ST_NO_SIGNAL 0x00000002
+ #define V4L2_IN_ST_NO_COLOR 0x00000004
+
+ /* field 'status' - sensor orientation */
+ /* If sensor is mounted upside down set both bits */
+ #define V4L2_IN_ST_HFLIP 0x00000010 /* Frames are flipped horizontally */
+ #define V4L2_IN_ST_VFLIP 0x00000020 /* Frames are flipped vertically */
+
+ /* field 'status' - analog */
+ #define V4L2_IN_ST_NO_H_LOCK 0x00000100 /* No horizontal sync lock */
+ #define V4L2_IN_ST_COLOR_KILL 0x00000200 /* Color killer is active */
+ #define V4L2_IN_ST_NO_V_LOCK 0x00000400 /* No vertical sync lock */
+ #define V4L2_IN_ST_NO_STD_LOCK 0x00000800 /* No standard format lock */
+
+ /* field 'status' - digital */
+ #define V4L2_IN_ST_NO_SYNC 0x00010000 /* No synchronization lock */
+ #define V4L2_IN_ST_NO_EQU 0x00020000 /* No equalizer lock */
+ #define V4L2_IN_ST_NO_CARRIER 0x00040000 /* Carrier recovery failed */
+
+ /* field 'status' - VCR and set-top box */
+ #define V4L2_IN_ST_MACROVISION 0x01000000 /* Macrovision detected */
+ #define V4L2_IN_ST_NO_ACCESS 0x02000000 /* Conditional access denied */
+ #define V4L2_IN_ST_VTR 0x04000000 /* VTR time constant */
+
+ /* capabilities flags */
+ #define V4L2_IN_CAP_DV_TIMINGS 0x00000002 /* Supports S_DV_TIMINGS */
+ #define V4L2_IN_CAP_CUSTOM_TIMINGS V4L2_IN_CAP_DV_TIMINGS /* For compatibility */
+ #define V4L2_IN_CAP_STD 0x00000004 /* Supports S_STD */
+ #define V4L2_IN_CAP_NATIVE_SIZE 0x00000008 /* Supports setting native size */
+
+ /*
+ * V I D E O O U T P U T S
+ */
+ struct v4l2_output {
+ __u32 index; /* Which output */
+ __u8 name[32]; /* Label */
+ __u32 type; /* Type of output */
+ __u32 audioset; /* Associated audios (bitfield) */
+ __u32 modulator; /* Associated modulator */
+ v4l2_std_id std;
+ __u32 capabilities;
+ __u32 reserved[3];
+ };
+ /* Values for the 'type' field */
+ #define V4L2_OUTPUT_TYPE_MODULATOR 1
+ #define V4L2_OUTPUT_TYPE_ANALOG 2
+ #define V4L2_OUTPUT_TYPE_ANALOGVGAOVERLAY 3
+
+ /* capabilities flags */
+ #define V4L2_OUT_CAP_DV_TIMINGS 0x00000002 /* Supports S_DV_TIMINGS */
+ #define V4L2_OUT_CAP_CUSTOM_TIMINGS V4L2_OUT_CAP_DV_TIMINGS /* For compatibility */
+ #define V4L2_OUT_CAP_STD 0x00000004 /* Supports S_STD */
+ #define V4L2_OUT_CAP_NATIVE_SIZE 0x00000008 /* Supports setting native size */
+
+ /*
+ * C O N T R O L S
+ */
+ struct v4l2_control {
+ __u32 id;
+ __s32 value;
+ };
+
+ struct v4l2_ext_control {
+ __u32 id;
+ __u32 size;
+ __u32 reserved2[1];
+ union {
+ __s32 value;
+ __s64 value64;
+ char __user *string;
+ __u8 __user *p_u8;
+ __u16 __user *p_u16;
+ __u32 __user *p_u32;
+ struct v4l2_area __user *p_area;
+ void __user *ptr;
+ };
+ } __attribute__ ((packed));
+
+ struct v4l2_ext_controls {
+ union {
+ #ifndef __KERNEL__
+ __u32 ctrl_class;
+ #endif
+ __u32 which;
+ };
+ __u32 count;
+ __u32 error_idx;
+ __s32 request_fd;
+ __u32 reserved[1];
+ struct v4l2_ext_control *controls;
+ };
+
+ #define V4L2_CTRL_ID_MASK (0x0fffffff)
+ #ifndef __KERNEL__
+ #define V4L2_CTRL_ID2CLASS(id) ((id) & 0x0fff0000UL)
+ #endif
+ #define V4L2_CTRL_ID2WHICH(id) ((id) & 0x0fff0000UL)
+ #define V4L2_CTRL_DRIVER_PRIV(id) (((id) & 0xffff) >= 0x1000)
+ #define V4L2_CTRL_MAX_DIMS (4)
+ #define V4L2_CTRL_WHICH_CUR_VAL 0
+ #define V4L2_CTRL_WHICH_DEF_VAL 0x0f000000
+ #define V4L2_CTRL_WHICH_REQUEST_VAL 0x0f010000
+
+ enum v4l2_ctrl_type {
+ V4L2_CTRL_TYPE_INTEGER = 1,
+ V4L2_CTRL_TYPE_BOOLEAN = 2,
+ V4L2_CTRL_TYPE_MENU = 3,
+ V4L2_CTRL_TYPE_BUTTON = 4,
+ V4L2_CTRL_TYPE_INTEGER64 = 5,
+ V4L2_CTRL_TYPE_CTRL_CLASS = 6,
+ V4L2_CTRL_TYPE_STRING = 7,
+ V4L2_CTRL_TYPE_BITMASK = 8,
+ V4L2_CTRL_TYPE_INTEGER_MENU = 9,
+
+ /* Compound types are >= 0x0100 */
+ V4L2_CTRL_COMPOUND_TYPES = 0x0100,
+ V4L2_CTRL_TYPE_U8 = 0x0100,
+ V4L2_CTRL_TYPE_U16 = 0x0101,
+ V4L2_CTRL_TYPE_U32 = 0x0102,
+ V4L2_CTRL_TYPE_AREA = 0x0106,
+ };
+
+ /* Used in the VIDIOC_QUERYCTRL ioctl for querying controls */
+ struct v4l2_queryctrl {
+ __u32 id;
+ __u32 type; /* enum v4l2_ctrl_type */
+ __u8 name[32]; /* Whatever */
+ __s32 minimum; /* Note signedness */
+ __s32 maximum;
+ __s32 step;
+ __s32 default_value;
+ __u32 flags;
+ __u32 reserved[2];
+ };
+
+ /* Used in the VIDIOC_QUERY_EXT_CTRL ioctl for querying extended controls */
+ struct v4l2_query_ext_ctrl {
+ __u32 id;
+ __u32 type;
+ char name[32];
+ __s64 minimum;
+ __s64 maximum;
+ __u64 step;
+ __s64 default_value;
+ __u32 flags;
+ __u32 elem_size;
+ __u32 elems;
+ __u32 nr_of_dims;
+ __u32 dims[V4L2_CTRL_MAX_DIMS];
+ __u32 reserved[32];
+ };
+
+ /* Used in the VIDIOC_QUERYMENU ioctl for querying menu items */
+ struct v4l2_querymenu {
+ __u32 id;
+ __u32 index;
+ union {
+ __u8 name[32]; /* Whatever */
+ __s64 value;
+ };
+ __u32 reserved;
+ } __attribute__ ((packed));
+
+ /* Control flags */
+ #define V4L2_CTRL_FLAG_DISABLED 0x0001
+ #define V4L2_CTRL_FLAG_GRABBED 0x0002
+ #define V4L2_CTRL_FLAG_READ_ONLY 0x0004
+ #define V4L2_CTRL_FLAG_UPDATE 0x0008
+ #define V4L2_CTRL_FLAG_INACTIVE 0x0010
+ #define V4L2_CTRL_FLAG_SLIDER 0x0020
+ #define V4L2_CTRL_FLAG_WRITE_ONLY 0x0040
+ #define V4L2_CTRL_FLAG_VOLATILE 0x0080
+ #define V4L2_CTRL_FLAG_HAS_PAYLOAD 0x0100
+ #define V4L2_CTRL_FLAG_EXECUTE_ON_WRITE 0x0200
+ #define V4L2_CTRL_FLAG_MODIFY_LAYOUT 0x0400
+
+ /* Query flags, to be ORed with the control ID */
+ #define V4L2_CTRL_FLAG_NEXT_CTRL 0x80000000
+ #define V4L2_CTRL_FLAG_NEXT_COMPOUND 0x40000000
+
+ /* User-class control IDs defined by V4L2 */
+ #define V4L2_CID_MAX_CTRLS 1024
+ /* IDs reserved for driver specific controls */
+ #define V4L2_CID_PRIVATE_BASE 0x08000000
+
+
+ /*
+ * T U N I N G
+ */
+ struct v4l2_tuner {
+ __u32 index;
+ __u8 name[32];
+ __u32 type; /* enum v4l2_tuner_type */
+ __u32 capability;
+ __u32 rangelow;
+ __u32 rangehigh;
+ __u32 rxsubchans;
+ __u32 audmode;
+ __s32 signal;
+ __s32 afc;
+ __u32 reserved[4];
+ };
+
+ struct v4l2_modulator {
+ __u32 index;
+ __u8 name[32];
+ __u32 capability;
+ __u32 rangelow;
+ __u32 rangehigh;
+ __u32 txsubchans;
+ __u32 type; /* enum v4l2_tuner_type */
+ __u32 reserved[3];
+ };
+
+ /* Flags for the 'capability' field */
+ #define V4L2_TUNER_CAP_LOW 0x0001
+ #define V4L2_TUNER_CAP_NORM 0x0002
+ #define V4L2_TUNER_CAP_HWSEEK_BOUNDED 0x0004
+ #define V4L2_TUNER_CAP_HWSEEK_WRAP 0x0008
+ #define V4L2_TUNER_CAP_STEREO 0x0010
+ #define V4L2_TUNER_CAP_LANG2 0x0020
+ #define V4L2_TUNER_CAP_SAP 0x0020
+ #define V4L2_TUNER_CAP_LANG1 0x0040
+ #define V4L2_TUNER_CAP_RDS 0x0080
+ #define V4L2_TUNER_CAP_RDS_BLOCK_IO 0x0100
+ #define V4L2_TUNER_CAP_RDS_CONTROLS 0x0200
+ #define V4L2_TUNER_CAP_FREQ_BANDS 0x0400
+ #define V4L2_TUNER_CAP_HWSEEK_PROG_LIM 0x0800
+ #define V4L2_TUNER_CAP_1HZ 0x1000
+
+ /* Flags for the 'rxsubchans' field */
+ #define V4L2_TUNER_SUB_MONO 0x0001
+ #define V4L2_TUNER_SUB_STEREO 0x0002
+ #define V4L2_TUNER_SUB_LANG2 0x0004
+ #define V4L2_TUNER_SUB_SAP 0x0004
+ #define V4L2_TUNER_SUB_LANG1 0x0008
+ #define V4L2_TUNER_SUB_RDS 0x0010
+
+ /* Values for the 'audmode' field */
+ #define V4L2_TUNER_MODE_MONO 0x0000
+ #define V4L2_TUNER_MODE_STEREO 0x0001
+ #define V4L2_TUNER_MODE_LANG2 0x0002
+ #define V4L2_TUNER_MODE_SAP 0x0002
+ #define V4L2_TUNER_MODE_LANG1 0x0003
+ #define V4L2_TUNER_MODE_LANG1_LANG2 0x0004
+
+ struct v4l2_frequency {
+ __u32 tuner;
+ __u32 type; /* enum v4l2_tuner_type */
+ __u32 frequency;
+ __u32 reserved[8];
+ };
+
+ #define V4L2_BAND_MODULATION_VSB (1 << 1)
+ #define V4L2_BAND_MODULATION_FM (1 << 2)
+ #define V4L2_BAND_MODULATION_AM (1 << 3)
+
+ struct v4l2_frequency_band {
+ __u32 tuner;
+ __u32 type; /* enum v4l2_tuner_type */
+ __u32 index;
+ __u32 capability;
+ __u32 rangelow;
+ __u32 rangehigh;
+ __u32 modulation;
+ __u32 reserved[9];
+ };
+
+ struct v4l2_hw_freq_seek {
+ __u32 tuner;
+ __u32 type; /* enum v4l2_tuner_type */
+ __u32 seek_upward;
+ __u32 wrap_around;
+ __u32 spacing;
+ __u32 rangelow;
+ __u32 rangehigh;
+ __u32 reserved[5];
+ };
+
+ /*
+ * R D S
+ */
+
+ struct v4l2_rds_data {
+ __u8 lsb;
+ __u8 msb;
+ __u8 block;
+ } __attribute__ ((packed));
+
+ #define V4L2_RDS_BLOCK_MSK 0x7
+ #define V4L2_RDS_BLOCK_A 0
+ #define V4L2_RDS_BLOCK_B 1
+ #define V4L2_RDS_BLOCK_C 2
+ #define V4L2_RDS_BLOCK_D 3
+ #define V4L2_RDS_BLOCK_C_ALT 4
+ #define V4L2_RDS_BLOCK_INVALID 7
+
+ #define V4L2_RDS_BLOCK_CORRECTED 0x40
+ #define V4L2_RDS_BLOCK_ERROR 0x80
+
+ /*
+ * A U D I O
+ */
+ struct v4l2_audio {
+ __u32 index;
+ __u8 name[32];
+ __u32 capability;
+ __u32 mode;
+ __u32 reserved[2];
+ };
+
+ /* Flags for the 'capability' field */
+ #define V4L2_AUDCAP_STEREO 0x00001
+ #define V4L2_AUDCAP_AVL 0x00002
+
+ /* Flags for the 'mode' field */
+ #define V4L2_AUDMODE_AVL 0x00001
+
+ struct v4l2_audioout {
+ __u32 index;
+ __u8 name[32];
+ __u32 capability;
+ __u32 mode;
+ __u32 reserved[2];
+ };
+
+ /*
+ * M P E G S E R V I C E S
+ */
+ #if 1
+ #define V4L2_ENC_IDX_FRAME_I (0)
+ #define V4L2_ENC_IDX_FRAME_P (1)
+ #define V4L2_ENC_IDX_FRAME_B (2)
+ #define V4L2_ENC_IDX_FRAME_MASK (0xf)
+
+ struct v4l2_enc_idx_entry {
+ __u64 offset;
+ __u64 pts;
+ __u32 length;
+ __u32 flags;
+ __u32 reserved[2];
+ };
+
+ #define V4L2_ENC_IDX_ENTRIES (64)
+ struct v4l2_enc_idx {
+ __u32 entries;
+ __u32 entries_cap;
+ __u32 reserved[4];
+ struct v4l2_enc_idx_entry entry[V4L2_ENC_IDX_ENTRIES];
+ };
+
+
+ #define V4L2_ENC_CMD_START (0)
+ #define V4L2_ENC_CMD_STOP (1)
+ #define V4L2_ENC_CMD_PAUSE (2)
+ #define V4L2_ENC_CMD_RESUME (3)
+
+ /* Flags for V4L2_ENC_CMD_STOP */
+ #define V4L2_ENC_CMD_STOP_AT_GOP_END (1 << 0)
+
+ struct v4l2_encoder_cmd {
+ __u32 cmd;
+ __u32 flags;
+ union {
+ struct {
+ __u32 data[8];
+ } raw;
+ };
+ };
+
+ /* Decoder commands */
+ #define V4L2_DEC_CMD_START (0)
+ #define V4L2_DEC_CMD_STOP (1)
+ #define V4L2_DEC_CMD_PAUSE (2)
+ #define V4L2_DEC_CMD_RESUME (3)
+ #define V4L2_DEC_CMD_FLUSH (4)
+
+ /* Flags for V4L2_DEC_CMD_START */
+ #define V4L2_DEC_CMD_START_MUTE_AUDIO (1 << 0)
+
+ /* Flags for V4L2_DEC_CMD_PAUSE */
+ #define V4L2_DEC_CMD_PAUSE_TO_BLACK (1 << 0)
+
+ /* Flags for V4L2_DEC_CMD_STOP */
+ #define V4L2_DEC_CMD_STOP_TO_BLACK (1 << 0)
+ #define V4L2_DEC_CMD_STOP_IMMEDIATELY (1 << 1)
+
+ /* Play format requirements (returned by the driver): */
+
+ /* The decoder has no special format requirements */
+ #define V4L2_DEC_START_FMT_NONE (0)
+ /* The decoder requires full GOPs */
+ #define V4L2_DEC_START_FMT_GOP (1)
+
+ /* The structure must be zeroed before use by the application
+ This ensures it can be extended safely in the future. */
+ struct v4l2_decoder_cmd {
+ __u32 cmd;
+ __u32 flags;
+ union {
+ struct {
+ __u64 pts;
+ } stop;
+
+ struct {
+ /* 0 or 1000 specifies normal speed,
+ 1 specifies forward single stepping,
+ -1 specifies backward single stepping,
+ >1: playback at speed/1000 of the normal speed,
+ <-1: reverse playback at (-speed/1000) of the normal speed. */
+ __s32 speed;
+ __u32 format;
+ } start;
+
+ struct {
+ __u32 data[16];
+ } raw;
+ };
+ };
+ #endif
+
+
+ /*
+ * D A T A S E R V I C E S ( V B I )
+ *
+ * Data services API by Michael Schimek
+ */
+
+ /* Raw VBI */
+ struct v4l2_vbi_format {
+ __u32 sampling_rate; /* in 1 Hz */
+ __u32 offset;
+ __u32 samples_per_line;
+ __u32 sample_format; /* V4L2_PIX_FMT_* */
+ __s32 start[2];
+ __u32 count[2];
+ __u32 flags; /* V4L2_VBI_* */
+ __u32 reserved[2]; /* must be zero */
+ };
+
+ /* VBI flags */
+ #define V4L2_VBI_UNSYNC (1 << 0)
+ #define V4L2_VBI_INTERLACED (1 << 1)
+
+ /* ITU-R start lines for each field */
+ #define V4L2_VBI_ITU_525_F1_START (1)
+ #define V4L2_VBI_ITU_525_F2_START (264)
+ #define V4L2_VBI_ITU_625_F1_START (1)
+ #define V4L2_VBI_ITU_625_F2_START (314)
+
+ /* Sliced VBI
+ *
+ * This implements is a proposal V4L2 API to allow SLICED VBI
+ * required for some hardware encoders. It should change without
+ * notice in the definitive implementation.
+ */
+
+ struct v4l2_sliced_vbi_format {
+ __u16 service_set;
+ /* service_lines[0][...] specifies lines 0-23 (1-23 used) of the first field
+ service_lines[1][...] specifies lines 0-23 (1-23 used) of the second field
+ (equals frame lines 313-336 for 625 line video
+ standards, 263-286 for 525 line standards) */
+ __u16 service_lines[2][24];
+ __u32 io_size;
+ __u32 reserved[2]; /* must be zero */
+ };
+
+ /* Teletext World System Teletext
+ (WST), defined on ITU-R BT.653-2 */
+ #define V4L2_SLICED_TELETEXT_B (0x0001)
+ /* Video Program System, defined on ETS 300 231*/
+ #define V4L2_SLICED_VPS (0x0400)
+ /* Closed Caption, defined on EIA-608 */
+ #define V4L2_SLICED_CAPTION_525 (0x1000)
+ /* Wide Screen System, defined on ITU-R BT1119.1 */
+ #define V4L2_SLICED_WSS_625 (0x4000)
+
+ #define V4L2_SLICED_VBI_525 (V4L2_SLICED_CAPTION_525)
+ #define V4L2_SLICED_VBI_625 (V4L2_SLICED_TELETEXT_B | V4L2_SLICED_VPS | V4L2_SLICED_WSS_625)
+
+ struct v4l2_sliced_vbi_cap {
+ __u16 service_set;
+ /* service_lines[0][...] specifies lines 0-23 (1-23 used) of the first field
+ service_lines[1][...] specifies lines 0-23 (1-23 used) of the second field
+ (equals frame lines 313-336 for 625 line video
+ standards, 263-286 for 525 line standards) */
+ __u16 service_lines[2][24];
+ __u32 type; /* enum v4l2_buf_type */
+ __u32 reserved[3]; /* must be 0 */
+ };
+
+ struct v4l2_sliced_vbi_data {
+ __u32 id;
+ __u32 field; /* 0: first field, 1: second field */
+ __u32 line; /* 1-23 */
+ __u32 reserved; /* must be 0 */
+ __u8 data[48];
+ };
+
+ /*
+ * Sliced VBI data inserted into MPEG Streams
+ */
+
+ /*
+ * V4L2_MPEG_STREAM_VBI_FMT_IVTV:
+ *
+ * Structure of payload contained in an MPEG 2 Private Stream 1 PES Packet in an
+ * MPEG-2 Program Pack that contains V4L2_MPEG_STREAM_VBI_FMT_IVTV Sliced VBI
+ * data
+ *
+ * Note, the MPEG-2 Program Pack and Private Stream 1 PES packet header
+ * definitions are not included here. See the MPEG-2 specifications for details
+ * on these headers.
+ */
+
+ /* Line type IDs */
+ #define V4L2_MPEG_VBI_IVTV_TELETEXT_B (1)
+ #define V4L2_MPEG_VBI_IVTV_CAPTION_525 (4)
+ #define V4L2_MPEG_VBI_IVTV_WSS_625 (5)
+ #define V4L2_MPEG_VBI_IVTV_VPS (7)
+
+ struct v4l2_mpeg_vbi_itv0_line {
+ __u8 id; /* One of V4L2_MPEG_VBI_IVTV_* above */
+ __u8 data[42]; /* Sliced VBI data for the line */
+ } __attribute__ ((packed));
+
+ struct v4l2_mpeg_vbi_itv0 {
+ __le32 linemask[2]; /* Bitmasks of VBI service lines present */
+ struct v4l2_mpeg_vbi_itv0_line line[35];
+ } __attribute__ ((packed));
+
+ struct v4l2_mpeg_vbi_ITV0 {
+ struct v4l2_mpeg_vbi_itv0_line line[36];
+ } __attribute__ ((packed));
+
+ #define V4L2_MPEG_VBI_IVTV_MAGIC0 "itv0"
+ #define V4L2_MPEG_VBI_IVTV_MAGIC1 "ITV0"
+
+ struct v4l2_mpeg_vbi_fmt_ivtv {
+ __u8 magic[4];
+ union {
+ struct v4l2_mpeg_vbi_itv0 itv0;
+ struct v4l2_mpeg_vbi_ITV0 ITV0;
+ };
+ } __attribute__ ((packed));
+
+ /*
+ * A G G R E G A T E S T R U C T U R E S
+ */
+
+ /**
+ * struct v4l2_plane_pix_format - additional, per-plane format definition
+ * @sizeimage: maximum size in bytes required for data, for which
+ * this plane will be used
+ * @bytesperline: distance in bytes between the leftmost pixels in two
+ * adjacent lines
+ */
+ struct v4l2_plane_pix_format {
+ __u32 sizeimage;
+ __u32 bytesperline;
+ __u16 reserved[6];
+ } __attribute__ ((packed));
+
+ /**
+ * struct v4l2_pix_format_mplane - multiplanar format definition
+ * @width: image width in pixels
+ * @height: image height in pixels
+ * @pixelformat: little endian four character code (fourcc)
+ * @field: enum v4l2_field; field order (for interlaced video)
+ * @colorspace: enum v4l2_colorspace; supplemental to pixelformat
+ * @plane_fmt: per-plane information
+ * @num_planes: number of planes for this format
+ * @flags: format flags (V4L2_PIX_FMT_FLAG_*)
+ * @ycbcr_enc: enum v4l2_ycbcr_encoding, Y'CbCr encoding
+ * @quantization: enum v4l2_quantization, colorspace quantization
+ * @xfer_func: enum v4l2_xfer_func, colorspace transfer function
+ */
+ struct v4l2_pix_format_mplane {
+ __u32 width;
+ __u32 height;
+ __u32 pixelformat;
+ __u32 field;
+ __u32 colorspace;
+
+ struct v4l2_plane_pix_format plane_fmt[VIDEO_MAX_PLANES];
+ __u8 num_planes;
+ __u8 flags;
+ union {
+ __u8 ycbcr_enc;
+ __u8 hsv_enc;
+ };
+ __u8 quantization;
+ __u8 xfer_func;
+ __u8 reserved[7];
+ } __attribute__ ((packed));
+
+ /**
+ * struct v4l2_sdr_format - SDR format definition
+ * @pixelformat: little endian four character code (fourcc)
+ * @buffersize: maximum size in bytes required for data
+ */
+ struct v4l2_sdr_format {
+ __u32 pixelformat;
+ __u32 buffersize;
+ __u8 reserved[24];
+ } __attribute__ ((packed));
+
+ /**
+ * struct v4l2_meta_format - metadata format definition
+ * @dataformat: little endian four character code (fourcc)
+ * @buffersize: maximum size in bytes required for data
+ */
+ struct v4l2_meta_format {
+ __u32 dataformat;
+ __u32 buffersize;
+ } __attribute__ ((packed));
+
+ /**
+ * struct v4l2_format - stream data format
+ * @type: enum v4l2_buf_type; type of the data stream
+ * @pix: definition of an image format
+ * @pix_mp: definition of a multiplanar image format
+ * @win: definition of an overlaid image
+ * @vbi: raw VBI capture or output parameters
+ * @sliced: sliced VBI capture or output parameters
+ * @raw_data: placeholder for future extensions and custom formats
+ */
+ struct v4l2_format {
+ __u32 type;
+ union {
+ struct v4l2_pix_format pix; /* V4L2_BUF_TYPE_VIDEO_CAPTURE */
+ struct v4l2_pix_format_mplane pix_mp; /* V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE */
+ struct v4l2_window win; /* V4L2_BUF_TYPE_VIDEO_OVERLAY */
+ struct v4l2_vbi_format vbi; /* V4L2_BUF_TYPE_VBI_CAPTURE */
+ struct v4l2_sliced_vbi_format sliced; /* V4L2_BUF_TYPE_SLICED_VBI_CAPTURE */
+ struct v4l2_sdr_format sdr; /* V4L2_BUF_TYPE_SDR_CAPTURE */
+ struct v4l2_meta_format meta; /* V4L2_BUF_TYPE_META_CAPTURE */
+ __u8 raw_data[200]; /* user-defined */
+ } fmt;
+ };
+
+ /* Stream type-dependent parameters
+ */
+ struct v4l2_streamparm {
+ __u32 type; /* enum v4l2_buf_type */
+ union {
+ struct v4l2_captureparm capture;
+ struct v4l2_outputparm output;
+ __u8 raw_data[200]; /* user-defined */
+ } parm;
+ };
+
+ /*
+ * E V E N T S
+ */
+
+ #define V4L2_EVENT_ALL 0
+ #define V4L2_EVENT_VSYNC 1
+ #define V4L2_EVENT_EOS 2
+ #define V4L2_EVENT_CTRL 3
+ #define V4L2_EVENT_FRAME_SYNC 4
+ #define V4L2_EVENT_SOURCE_CHANGE 5
+ #define V4L2_EVENT_MOTION_DET 6
+ #define V4L2_EVENT_PRIVATE_START 0x08000000
+
+ /* Payload for V4L2_EVENT_VSYNC */
+ struct v4l2_event_vsync {
+ /* Can be V4L2_FIELD_ANY, _NONE, _TOP or _BOTTOM */
+ __u8 field;
+ } __attribute__ ((packed));
+
+ /* Payload for V4L2_EVENT_CTRL */
+ #define V4L2_EVENT_CTRL_CH_VALUE (1 << 0)
+ #define V4L2_EVENT_CTRL_CH_FLAGS (1 << 1)
+ #define V4L2_EVENT_CTRL_CH_RANGE (1 << 2)
+
+ struct v4l2_event_ctrl {
+ __u32 changes;
+ __u32 type;
+ union {
+ __s32 value;
+ __s64 value64;
+ };
+ __u32 flags;
+ __s32 minimum;
+ __s32 maximum;
+ __s32 step;
+ __s32 default_value;
+ };
+
+ struct v4l2_event_frame_sync {
+ __u32 frame_sequence;
+ };
+
+ #define V4L2_EVENT_SRC_CH_RESOLUTION (1 << 0)
+
+ struct v4l2_event_src_change {
+ __u32 changes;
+ };
+
+ #define V4L2_EVENT_MD_FL_HAVE_FRAME_SEQ (1 << 0)
+
+ /**
+ * struct v4l2_event_motion_det - motion detection event
+ * @flags: if V4L2_EVENT_MD_FL_HAVE_FRAME_SEQ is set, then the
+ * frame_sequence field is valid.
+ * @frame_sequence: the frame sequence number associated with this event.
+ * @region_mask: which regions detected motion.
+ */
+ struct v4l2_event_motion_det {
+ __u32 flags;
+ __u32 frame_sequence;
+ __u32 region_mask;
+ };
+
+ struct v4l2_event {
+ __u32 type;
+ union {
+ struct v4l2_event_vsync vsync;
+ struct v4l2_event_ctrl ctrl;
+ struct v4l2_event_frame_sync frame_sync;
+ struct v4l2_event_src_change src_change;
+ struct v4l2_event_motion_det motion_det;
+ __u8 data[64];
+ } u;
+ __u32 pending;
+ __u32 sequence;
+ #ifdef __KERNEL__
+ struct __kernel_timespec timestamp;
+ #else
+ struct timespec timestamp;
+ #endif
+ __u32 id;
+ __u32 reserved[8];
+ };
+
+ #define V4L2_EVENT_SUB_FL_SEND_INITIAL (1 << 0)
+ #define V4L2_EVENT_SUB_FL_ALLOW_FEEDBACK (1 << 1)
+
+ struct v4l2_event_subscription {
+ __u32 type;
+ __u32 id;
+ __u32 flags;
+ __u32 reserved[5];
+ };
+
+ /*
+ * A D V A N C E D D E B U G G I N G
+ *
+ * NOTE: EXPERIMENTAL API, NEVER RELY ON THIS IN APPLICATIONS!
+ * FOR DEBUGGING, TESTING AND INTERNAL USE ONLY!
+ */
+
+ /* VIDIOC_DBG_G_REGISTER and VIDIOC_DBG_S_REGISTER */
+
+ #define V4L2_CHIP_MATCH_BRIDGE 0 /* Match against chip ID on the bridge (0 for the bridge) */
+ #define V4L2_CHIP_MATCH_SUBDEV 4 /* Match against subdev index */
+
+ /* The following four defines are no longer in use */
+ #define V4L2_CHIP_MATCH_HOST V4L2_CHIP_MATCH_BRIDGE
+ #define V4L2_CHIP_MATCH_I2C_DRIVER 1 /* Match against I2C driver name */
+ #define V4L2_CHIP_MATCH_I2C_ADDR 2 /* Match against I2C 7-bit address */
+ #define V4L2_CHIP_MATCH_AC97 3 /* Match against ancillary AC97 chip */
+
+ struct v4l2_dbg_match {
+ __u32 type; /* Match type */
+ union { /* Match this chip, meaning determined by type */
+ __u32 addr;
+ char name[32];
+ };
+ } __attribute__ ((packed));
+
+ struct v4l2_dbg_register {
+ struct v4l2_dbg_match match;
+ __u32 size; /* register size in bytes */
+ __u64 reg;
+ __u64 val;
+ } __attribute__ ((packed));
+
+ #define V4L2_CHIP_FL_READABLE (1 << 0)
+ #define V4L2_CHIP_FL_WRITABLE (1 << 1)
+
+ /* VIDIOC_DBG_G_CHIP_INFO */
+ struct v4l2_dbg_chip_info {
+ struct v4l2_dbg_match match;
+ char name[32];
+ __u32 flags;
+ __u32 reserved[32];
+ } __attribute__ ((packed));
+
+ /**
+ * struct v4l2_create_buffers - VIDIOC_CREATE_BUFS argument
+ * @index: on return, index of the first created buffer
+ * @count: entry: number of requested buffers,
+ * return: number of created buffers
+ * @memory: enum v4l2_memory; buffer memory type
+ * @format: frame format, for which buffers are requested
+ * @capabilities: capabilities of this buffer type.
+ * @flags: additional buffer management attributes (ignored unless the
+ * queue has V4L2_BUF_CAP_SUPPORTS_MMAP_CACHE_HINTS capability
+ * and configured for MMAP streaming I/O).
+ * @reserved: future extensions
+ */
+ struct v4l2_create_buffers {
+ __u32 index;
+ __u32 count;
+ __u32 memory;
+ struct v4l2_format format;
+ __u32 capabilities;
+ __u32 flags;
+ __u32 reserved[6];
+ };
+
+ /*
+ * I O C T L C O D E S F O R V I D E O D E V I C E S
+ *
+ */
+ #define VIDIOC_QUERYCAP _IOR('V', 0, struct v4l2_capability)
+ #define VIDIOC_ENUM_FMT _IOWR('V', 2, struct v4l2_fmtdesc)
+ #define VIDIOC_G_FMT _IOWR('V', 4, struct v4l2_format)
+ #define VIDIOC_S_FMT _IOWR('V', 5, struct v4l2_format)
+ #define VIDIOC_REQBUFS _IOWR('V', 8, struct v4l2_requestbuffers)
+ #define VIDIOC_QUERYBUF _IOWR('V', 9, struct v4l2_buffer)
+ #define VIDIOC_G_FBUF _IOR('V', 10, struct v4l2_framebuffer)
+ #define VIDIOC_S_FBUF _IOW('V', 11, struct v4l2_framebuffer)
+ #define VIDIOC_OVERLAY _IOW('V', 14, int)
+ #define VIDIOC_QBUF _IOWR('V', 15, struct v4l2_buffer)
+ #define VIDIOC_EXPBUF _IOWR('V', 16, struct v4l2_exportbuffer)
+ #define VIDIOC_DQBUF _IOWR('V', 17, struct v4l2_buffer)
+ #define VIDIOC_STREAMON _IOW('V', 18, int)
+ #define VIDIOC_STREAMOFF _IOW('V', 19, int)
+ #define VIDIOC_G_PARM _IOWR('V', 21, struct v4l2_streamparm)
+ #define VIDIOC_S_PARM _IOWR('V', 22, struct v4l2_streamparm)
+ #define VIDIOC_G_STD _IOR('V', 23, v4l2_std_id)
+ #define VIDIOC_S_STD _IOW('V', 24, v4l2_std_id)
+ #define VIDIOC_ENUMSTD _IOWR('V', 25, struct v4l2_standard)
+ #define VIDIOC_ENUMINPUT _IOWR('V', 26, struct v4l2_input)
+ #define VIDIOC_G_CTRL _IOWR('V', 27, struct v4l2_control)
+ #define VIDIOC_S_CTRL _IOWR('V', 28, struct v4l2_control)
+ #define VIDIOC_G_TUNER _IOWR('V', 29, struct v4l2_tuner)
+ #define VIDIOC_S_TUNER _IOW('V', 30, struct v4l2_tuner)
+ #define VIDIOC_G_AUDIO _IOR('V', 33, struct v4l2_audio)
+ #define VIDIOC_S_AUDIO _IOW('V', 34, struct v4l2_audio)
+ #define VIDIOC_QUERYCTRL _IOWR('V', 36, struct v4l2_queryctrl)
+ #define VIDIOC_QUERYMENU _IOWR('V', 37, struct v4l2_querymenu)
+ #define VIDIOC_G_INPUT _IOR('V', 38, int)
+ #define VIDIOC_S_INPUT _IOWR('V', 39, int)
+ #define VIDIOC_G_EDID _IOWR('V', 40, struct v4l2_edid)
+ #define VIDIOC_S_EDID _IOWR('V', 41, struct v4l2_edid)
+ #define VIDIOC_G_OUTPUT _IOR('V', 46, int)
+ #define VIDIOC_S_OUTPUT _IOWR('V', 47, int)
+ #define VIDIOC_ENUMOUTPUT _IOWR('V', 48, struct v4l2_output)
+ #define VIDIOC_G_AUDOUT _IOR('V', 49, struct v4l2_audioout)
+ #define VIDIOC_S_AUDOUT _IOW('V', 50, struct v4l2_audioout)
+ #define VIDIOC_G_MODULATOR _IOWR('V', 54, struct v4l2_modulator)
+ #define VIDIOC_S_MODULATOR _IOW('V', 55, struct v4l2_modulator)
+ #define VIDIOC_G_FREQUENCY _IOWR('V', 56, struct v4l2_frequency)
+ #define VIDIOC_S_FREQUENCY _IOW('V', 57, struct v4l2_frequency)
+ #define VIDIOC_CROPCAP _IOWR('V', 58, struct v4l2_cropcap)
+ #define VIDIOC_G_CROP _IOWR('V', 59, struct v4l2_crop)
+ #define VIDIOC_S_CROP _IOW('V', 60, struct v4l2_crop)
+ #define VIDIOC_G_JPEGCOMP _IOR('V', 61, struct v4l2_jpegcompression)
+ #define VIDIOC_S_JPEGCOMP _IOW('V', 62, struct v4l2_jpegcompression)
+ #define VIDIOC_QUERYSTD _IOR('V', 63, v4l2_std_id)
+ #define VIDIOC_TRY_FMT _IOWR('V', 64, struct v4l2_format)
+ #define VIDIOC_ENUMAUDIO _IOWR('V', 65, struct v4l2_audio)
+ #define VIDIOC_ENUMAUDOUT _IOWR('V', 66, struct v4l2_audioout)
+ #define VIDIOC_G_PRIORITY _IOR('V', 67, __u32) /* enum v4l2_priority */
+ #define VIDIOC_S_PRIORITY _IOW('V', 68, __u32) /* enum v4l2_priority */
+ #define VIDIOC_G_SLICED_VBI_CAP _IOWR('V', 69, struct v4l2_sliced_vbi_cap)
+ #define VIDIOC_LOG_STATUS _IO('V', 70)
+ #define VIDIOC_G_EXT_CTRLS _IOWR('V', 71, struct v4l2_ext_controls)
+ #define VIDIOC_S_EXT_CTRLS _IOWR('V', 72, struct v4l2_ext_controls)
+ #define VIDIOC_TRY_EXT_CTRLS _IOWR('V', 73, struct v4l2_ext_controls)
+ #define VIDIOC_ENUM_FRAMESIZES _IOWR('V', 74, struct v4l2_frmsizeenum)
+ #define VIDIOC_ENUM_FRAMEINTERVALS _IOWR('V', 75, struct v4l2_frmivalenum)
+ #define VIDIOC_G_ENC_INDEX _IOR('V', 76, struct v4l2_enc_idx)
+ #define VIDIOC_ENCODER_CMD _IOWR('V', 77, struct v4l2_encoder_cmd)
+ #define VIDIOC_TRY_ENCODER_CMD _IOWR('V', 78, struct v4l2_encoder_cmd)
+
+ /*
+ * Experimental, meant for debugging, testing and internal use.
+ * Only implemented if CONFIG_VIDEO_ADV_DEBUG is defined.
+ * You must be root to use these ioctls. Never use these in applications!
+ */
+ #define VIDIOC_DBG_S_REGISTER _IOW('V', 79, struct v4l2_dbg_register)
+ #define VIDIOC_DBG_G_REGISTER _IOWR('V', 80, struct v4l2_dbg_register)
+
+ #define VIDIOC_S_HW_FREQ_SEEK _IOW('V', 82, struct v4l2_hw_freq_seek)
+ #define VIDIOC_S_DV_TIMINGS _IOWR('V', 87, struct v4l2_dv_timings)
+ #define VIDIOC_G_DV_TIMINGS _IOWR('V', 88, struct v4l2_dv_timings)
+ #define VIDIOC_DQEVENT _IOR('V', 89, struct v4l2_event)
+ #define VIDIOC_SUBSCRIBE_EVENT _IOW('V', 90, struct v4l2_event_subscription)
+ #define VIDIOC_UNSUBSCRIBE_EVENT _IOW('V', 91, struct v4l2_event_subscription)
+ #define VIDIOC_CREATE_BUFS _IOWR('V', 92, struct v4l2_create_buffers)
+ #define VIDIOC_PREPARE_BUF _IOWR('V', 93, struct v4l2_buffer)
+ #define VIDIOC_G_SELECTION _IOWR('V', 94, struct v4l2_selection)
+ #define VIDIOC_S_SELECTION _IOWR('V', 95, struct v4l2_selection)
+ #define VIDIOC_DECODER_CMD _IOWR('V', 96, struct v4l2_decoder_cmd)
+ #define VIDIOC_TRY_DECODER_CMD _IOWR('V', 97, struct v4l2_decoder_cmd)
+ #define VIDIOC_ENUM_DV_TIMINGS _IOWR('V', 98, struct v4l2_enum_dv_timings)
+ #define VIDIOC_QUERY_DV_TIMINGS _IOR('V', 99, struct v4l2_dv_timings)
+ #define VIDIOC_DV_TIMINGS_CAP _IOWR('V', 100, struct v4l2_dv_timings_cap)
+ #define VIDIOC_ENUM_FREQ_BANDS _IOWR('V', 101, struct v4l2_frequency_band)
+
+ /*
+ * Experimental, meant for debugging, testing and internal use.
+ * Never use this in applications!
+ */
+ #define VIDIOC_DBG_G_CHIP_INFO _IOWR('V', 102, struct v4l2_dbg_chip_info)
+
+ #define VIDIOC_QUERY_EXT_CTRL _IOWR('V', 103, struct v4l2_query_ext_ctrl)
+
+ /* Reminder: when adding new ioctls please add support for them to
+ drivers/media/v4l2-core/v4l2-compat-ioctl32.c as well! */
+
+ #define BASE_VIDIOC_PRIVATE 192 /* 192-255 are private */
+
+ #endif /* _UAPI__LINUX_VIDEODEV2_H */
--- /dev/null
+ /* GStreamer
+ *
+ * Copyright (C) 2001-2002 Ronald Bultje <rbultje@ronald.bitfreak.net>
+ * 2006 Edgard Lima <edgard.lima@gmail.com>
+ *
+ * gstv4l2.c: plugin for v4l2 elements
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+ #ifdef HAVE_CONFIG_H
+ #include "config.h"
+ #endif
+
+ #ifndef _GNU_SOURCE
+ # define _GNU_SOURCE /* O_CLOEXEC */
+ #endif
+
+ #include "gst/gst-i18n-plugin.h"
+
+ #include <gst/gst.h>
+
+ #include <fcntl.h>
+ #include <string.h>
+ #include <sys/stat.h>
+ #include <sys/types.h>
+ #include <unistd.h>
+
+ #include "ext/videodev2.h"
+ #include "gstv4l2elements.h"
+ #include "v4l2-utils.h"
+
+ #include "gstv4l2object.h"
+ #include "gstv4l2src.h"
+ #include "gstv4l2sink.h"
+ #include "gstv4l2radio.h"
+ #include "gstv4l2videodec.h"
+ #include "gstv4l2fwhtenc.h"
+ #include "gstv4l2h263enc.h"
+ #include "gstv4l2h264enc.h"
+ #include "gstv4l2h265enc.h"
+ #include "gstv4l2jpegenc.h"
+ #include "gstv4l2mpeg4enc.h"
+ #include "gstv4l2vp8enc.h"
+ #include "gstv4l2vp9enc.h"
+ #include "gstv4l2transform.h"
+
+ GST_DEBUG_CATEGORY_EXTERN (v4l2_debug);
+ #define GST_CAT_DEFAULT v4l2_debug
+
+ #ifdef GST_V4L2_ENABLE_PROBE
+ /* This is a minimalist probe, for speed, we only enumerate formats */
+ static GstCaps *
+ gst_v4l2_probe_template_caps (const gchar * device, gint video_fd,
+ enum v4l2_buf_type type)
+ {
+ gint n;
+ struct v4l2_fmtdesc format;
+ GstCaps *caps;
+
+ GST_DEBUG ("Getting %s format enumerations", device);
+ caps = gst_caps_new_empty ();
+
+ for (n = 0;; n++) {
+ GstStructure *template;
+
+ memset (&format, 0, sizeof (format));
+
+ format.index = n;
+ format.type = type;
+
+ if (ioctl (video_fd, VIDIOC_ENUM_FMT, &format) < 0)
+ break; /* end of enumeration */
+
+ GST_LOG ("index: %u", format.index);
+ GST_LOG ("type: %d", format.type);
+ GST_LOG ("flags: %08x", format.flags);
+ GST_LOG ("description: '%s'", format.description);
+ GST_LOG ("pixelformat: %" GST_FOURCC_FORMAT,
+ GST_FOURCC_ARGS (format.pixelformat));
+
+ template = gst_v4l2_object_v4l2fourcc_to_structure (format.pixelformat);
+
+ if (template) {
+ GstStructure *alt_t = NULL;
+
+ switch (format.pixelformat) {
++#ifdef TIZEN_FEATURE_V4L2_TBM_SUPPORT
++ case V4L2_PIX_FMT_YUV420:
++ alt_t = gst_structure_copy (template);
++ gst_structure_set (alt_t, "format", G_TYPE_STRING, "S420", NULL);
++ break;
++ case V4L2_PIX_FMT_NV12:
++ alt_t = gst_structure_copy (template);
++ gst_structure_set (alt_t, "format", G_TYPE_STRING, "SN12", NULL);
++ break;
++#endif /* TIZEN_FEATURE_V4L2_TBM_SUPPORT */
+ case V4L2_PIX_FMT_RGB32:
+ alt_t = gst_structure_copy (template);
+ gst_structure_set (alt_t, "format", G_TYPE_STRING, "ARGB", NULL);
+ break;
+ case V4L2_PIX_FMT_BGR32:
+ alt_t = gst_structure_copy (template);
+ gst_structure_set (alt_t, "format", G_TYPE_STRING, "BGRA", NULL);
+ default:
+ break;
+ }
+
+ gst_caps_append_structure (caps, template);
+
+ if (alt_t)
+ gst_caps_append_structure (caps, alt_t);
+ }
+ }
+
+ return gst_caps_simplify (caps);
+ }
+
+ static gboolean
+ gst_v4l2_probe_and_register (GstPlugin * plugin)
+ {
+ GstV4l2Iterator *it;
+ gint video_fd = -1;
+ struct v4l2_capability vcap;
+ guint32 device_caps;
+
+ v4l2_element_init (plugin);
+
+ GST_DEBUG ("Probing devices");
+
+ it = gst_v4l2_iterator_new ();
+
+ while (gst_v4l2_iterator_next (it)) {
+ GstCaps *src_caps, *sink_caps;
+ gchar *basename;
+
+ if (video_fd >= 0)
+ close (video_fd);
+
+ video_fd = open (it->device_path, O_RDWR | O_CLOEXEC);
+
+ if (video_fd == -1) {
+ GST_DEBUG ("Failed to open %s: %s", it->device_path, g_strerror (errno));
+ continue;
+ }
+
+ memset (&vcap, 0, sizeof (vcap));
+
+ if (ioctl (video_fd, VIDIOC_QUERYCAP, &vcap) < 0) {
+ GST_DEBUG ("Failed to get device capabilities: %s", g_strerror (errno));
+ continue;
+ }
+
+ if (vcap.capabilities & V4L2_CAP_DEVICE_CAPS)
+ device_caps = vcap.device_caps;
+ else
+ device_caps = vcap.capabilities;
+
+ if (!GST_V4L2_IS_M2M (device_caps))
+ continue;
+
+ GST_DEBUG ("Probing '%s' located at '%s'",
+ it->device_name ? it->device_name : (const gchar *) vcap.driver,
+ it->device_path);
+
+ /* get sink supported format (no MPLANE for codec) */
+ sink_caps = gst_caps_merge (gst_v4l2_probe_template_caps (it->device_path,
+ video_fd, V4L2_BUF_TYPE_VIDEO_OUTPUT),
+ gst_v4l2_probe_template_caps (it->device_path, video_fd,
+ V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE));
+
+ /* get src supported format */
+ src_caps = gst_caps_merge (gst_v4l2_probe_template_caps (it->device_path,
+ video_fd, V4L2_BUF_TYPE_VIDEO_CAPTURE),
+ gst_v4l2_probe_template_caps (it->device_path, video_fd,
+ V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE));
+
+ /* Skip devices without any supported formats */
+ if (gst_caps_is_empty (sink_caps) || gst_caps_is_empty (src_caps)) {
+ gst_caps_unref (sink_caps);
+ gst_caps_unref (src_caps);
+ continue;
+ }
+
+ basename = g_path_get_basename (it->device_path);
+
+ /* Caps won't be freed if the subclass is not instantiated */
+ GST_MINI_OBJECT_FLAG_SET (sink_caps, GST_MINI_OBJECT_FLAG_MAY_BE_LEAKED);
+ GST_MINI_OBJECT_FLAG_SET (src_caps, GST_MINI_OBJECT_FLAG_MAY_BE_LEAKED);
+
+ if (gst_v4l2_is_video_dec (sink_caps, src_caps)) {
+ gst_v4l2_video_dec_register (plugin, basename, it->device_path,
+ video_fd, sink_caps, src_caps);
+ } else if (gst_v4l2_is_video_enc (sink_caps, src_caps, NULL)) {
+ if (gst_v4l2_is_fwht_enc (sink_caps, src_caps))
+ gst_v4l2_fwht_enc_register (plugin, basename, it->device_path,
+ sink_caps, src_caps);
+
+ if (gst_v4l2_is_h264_enc (sink_caps, src_caps))
+ gst_v4l2_h264_enc_register (plugin, basename, it->device_path,
+ video_fd, sink_caps, src_caps);
+
+ if (gst_v4l2_is_h265_enc (sink_caps, src_caps))
+ gst_v4l2_h265_enc_register (plugin, basename, it->device_path,
+ video_fd, sink_caps, src_caps);
+
+ if (gst_v4l2_is_mpeg4_enc (sink_caps, src_caps))
+ gst_v4l2_mpeg4_enc_register (plugin, basename, it->device_path,
+ video_fd, sink_caps, src_caps);
+
+ if (gst_v4l2_is_h263_enc (sink_caps, src_caps))
+ gst_v4l2_h263_enc_register (plugin, basename, it->device_path,
+ sink_caps, src_caps);
+
+ if (gst_v4l2_is_jpeg_enc (sink_caps, src_caps))
+ gst_v4l2_jpeg_enc_register (plugin, basename, it->device_path,
+ sink_caps, src_caps);
+
+ if (gst_v4l2_is_vp8_enc (sink_caps, src_caps))
+ gst_v4l2_vp8_enc_register (plugin, basename, it->device_path,
+ video_fd, sink_caps, src_caps);
+
+ if (gst_v4l2_is_vp9_enc (sink_caps, src_caps))
+ gst_v4l2_vp9_enc_register (plugin, basename, it->device_path,
+ video_fd, sink_caps, src_caps);
+ } else if (gst_v4l2_is_transform (sink_caps, src_caps)) {
+ gst_v4l2_transform_register (plugin, basename, it->device_path,
+ sink_caps, src_caps);
+ }
+ /* else if ( ... etc. */
+
+ gst_caps_unref (sink_caps);
+ gst_caps_unref (src_caps);
+ g_free (basename);
+ }
+
+ if (video_fd >= 0)
+ close (video_fd);
+
+ gst_v4l2_iterator_free (it);
+
+ return TRUE;
+ }
+ #endif
+
+ static gboolean
+ plugin_init (GstPlugin * plugin)
+ {
+ gboolean ret = FALSE;
++#ifndef TIZEN_FEATURE_DISABLE_V4L2_DEPENDENCY
+ const gchar *paths[] = { "/dev", "/dev/v4l2", NULL };
+ const gchar *names[] = { "video", NULL };
+
+ /* Add some dependency, so the dynamic features get updated upon changes in
+ * /dev/video* */
+ gst_plugin_add_dependency (plugin,
+ NULL, paths, names, GST_PLUGIN_DEPENDENCY_FLAG_FILE_NAME_IS_PREFIX);
++#endif /* TIZEN_FEATURE_DISABLE_V4L2_DEPENDENCY */
+
+ #ifdef GST_V4L2_ENABLE_PROBE
+ ret |= gst_v4l2_probe_and_register (plugin);
+ #endif
+
+ ret |= GST_ELEMENT_REGISTER (v4l2src, plugin);
+ ret |= GST_ELEMENT_REGISTER (v4l2sink, plugin);
+ ret |= GST_ELEMENT_REGISTER (v4l2radio, plugin);
+ ret |= GST_DEVICE_PROVIDER_REGISTER (v4l2deviceprovider, plugin);
+
+ return ret;
+ }
+
+ GST_PLUGIN_DEFINE (GST_VERSION_MAJOR,
+ GST_VERSION_MINOR,
+ video4linux2,
+ "elements for Video 4 Linux",
+ plugin_init, VERSION, GST_LICENSE, GST_PACKAGE_NAME, GST_PACKAGE_ORIGIN)
--- /dev/null
-
+ /*
+ * Copyright (C) 2014 Collabora Ltd.
+ * Author: Nicolas Dufresne <nicolas.dufresne@collabora.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ *
+ */
+
+ #include "config.h"
+
+ #ifndef _GNU_SOURCE
+ # define _GNU_SOURCE /* O_CLOEXEC */
+ #endif
+
+ #include "ext/videodev2.h"
+
+ #include "gstv4l2object.h"
+ #include "gstv4l2allocator.h"
+
+ #include <gst/allocators/gstdmabuf.h>
+
+ #include <fcntl.h>
+ #include <string.h>
+ #include <sys/stat.h>
+ #include <sys/types.h>
+ #include <sys/mman.h>
+ #include <unistd.h>
++#ifdef TIZEN_FEATURE_V4L2_TBM_SUPPORT
++#include <tbm_surface.h>
++#include <tbm_surface_internal.h>
++#endif /* TIZEN_FEATURE_V4L2_TBM_SUPPORT */
+
+ #define GST_V4L2_MEMORY_TYPE "V4l2Memory"
+
+ #define gst_v4l2_allocator_parent_class parent_class
+ G_DEFINE_TYPE (GstV4l2Allocator, gst_v4l2_allocator, GST_TYPE_ALLOCATOR);
+
+ GST_DEBUG_CATEGORY_STATIC (v4l2allocator_debug);
+ #define GST_CAT_DEFAULT v4l2allocator_debug
+
+ #define UNSET_QUEUED(buffer) \
+ ((buffer).flags &= ~(V4L2_BUF_FLAG_QUEUED | V4L2_BUF_FLAG_DONE))
+
+ #define SET_QUEUED(buffer) ((buffer).flags |= V4L2_BUF_FLAG_QUEUED)
+
+ #define IS_QUEUED(buffer) \
+ ((buffer).flags & (V4L2_BUF_FLAG_QUEUED | V4L2_BUF_FLAG_DONE))
+
+ enum
+ {
+ GROUP_RELEASED,
+ LAST_SIGNAL
+ };
+
+ static guint gst_v4l2_allocator_signals[LAST_SIGNAL] = { 0 };
+
+ static void gst_v4l2_allocator_release (GstV4l2Allocator * allocator,
+ GstV4l2Memory * mem);
+
+ static const gchar *
+ memory_type_to_str (guint32 memory)
+ {
+ switch (memory) {
+ case V4L2_MEMORY_MMAP:
+ return "mmap";
+ case V4L2_MEMORY_USERPTR:
+ return "userptr";
+ case V4L2_MEMORY_DMABUF:
+ return "dmabuf";
+ default:
+ return "unknown";
+ }
+ }
+
+ /*************************************/
+ /* GstV4lMemory implementation */
+ /*************************************/
+
+ static gpointer
+ _v4l2mem_map (GstV4l2Memory * mem, gsize maxsize, GstMapFlags flags)
+ {
+ gpointer data = NULL;
+
+ switch (mem->group->buffer.memory) {
+ case V4L2_MEMORY_MMAP:
+ case V4L2_MEMORY_USERPTR:
+ data = mem->data;
+ break;
+ case V4L2_MEMORY_DMABUF:
+ /* v4l2 dmabuf memory are not shared with downstream */
+ g_assert_not_reached ();
+ break;
+ default:
+ GST_WARNING ("Unknown memory type %i", mem->group->buffer.memory);
+ break;
+ }
+ return data;
+ }
+
+ static gboolean
+ _v4l2mem_unmap (GstV4l2Memory * mem)
+ {
+ gboolean ret = FALSE;
+
+ switch (mem->group->buffer.memory) {
+ case V4L2_MEMORY_MMAP:
+ case V4L2_MEMORY_USERPTR:
+ ret = TRUE;
+ break;
+ case V4L2_MEMORY_DMABUF:
+ /* v4l2 dmabuf memory are not share with downstream */
+ g_assert_not_reached ();
+ break;
+ default:
+ GST_WARNING ("Unknown memory type %i", mem->group->buffer.memory);
+ break;
+ }
+ return ret;
+ }
+
+ static gboolean
+ _v4l2mem_dispose (GstV4l2Memory * mem)
+ {
+ GstV4l2Allocator *allocator = (GstV4l2Allocator *) mem->mem.allocator;
+ GstV4l2MemoryGroup *group = mem->group;
+ gboolean ret;
+
+ if (group->mem[mem->plane]) {
+ /* We may have a dmabuf, replace it with returned original memory */
+ group->mem[mem->plane] = gst_memory_ref ((GstMemory *) mem);
+ gst_v4l2_allocator_release (allocator, mem);
+ ret = FALSE;
+ } else {
+ gst_object_ref (allocator);
+ ret = TRUE;
+ }
+
+ return ret;
+ }
+
+ static inline GstV4l2Memory *
+ _v4l2mem_new (GstMemoryFlags flags, GstAllocator * allocator,
+ GstMemory * parent, gsize maxsize, gsize align, gsize offset, gsize size,
+ gint plane, gpointer data, int dmafd, GstV4l2MemoryGroup * group)
+ {
+ GstV4l2Memory *mem;
+
+ mem = g_slice_new0 (GstV4l2Memory);
+ gst_memory_init (GST_MEMORY_CAST (mem),
+ flags, allocator, parent, maxsize, align, offset, size);
+
+ if (parent == NULL)
+ mem->mem.mini_object.dispose =
+ (GstMiniObjectDisposeFunction) _v4l2mem_dispose;
+
+ mem->plane = plane;
+ mem->data = data;
+ mem->dmafd = dmafd;
+ mem->group = group;
+
+ return mem;
+ }
+
+ static GstV4l2Memory *
+ _v4l2mem_share (GstV4l2Memory * mem, gssize offset, gsize size)
+ {
+ GstV4l2Memory *sub;
+ GstMemory *parent;
+
+ /* find the real parent */
+ if ((parent = mem->mem.parent) == NULL)
+ parent = (GstMemory *) mem;
+
+ if (size == -1)
+ size = mem->mem.size - offset;
+
+ /* the shared memory is always readonly */
+ sub = _v4l2mem_new (GST_MINI_OBJECT_FLAGS (parent) |
+ GST_MINI_OBJECT_FLAG_LOCK_READONLY, mem->mem.allocator, parent,
+ mem->mem.maxsize, mem->mem.align, offset, size, mem->plane, mem->data,
+ -1, mem->group);
+
+ return sub;
+ }
+
+ static gboolean
+ _v4l2mem_is_span (GstV4l2Memory * mem1, GstV4l2Memory * mem2, gsize * offset)
+ {
+ if (offset)
+ *offset = mem1->mem.offset - mem1->mem.parent->offset;
+
+ /* and memory is contiguous */
+ return mem1->mem.offset + mem1->mem.size == mem2->mem.offset;
+ }
+
+ gboolean
+ gst_is_v4l2_memory (GstMemory * mem)
+ {
+ return gst_memory_is_type (mem, GST_V4L2_MEMORY_TYPE);
+ }
+
+ GQuark
+ gst_v4l2_memory_quark (void)
+ {
+ static GQuark quark = 0;
+
+ if (quark == 0)
+ quark = g_quark_from_string ("GstV4l2Memory");
+
+ return quark;
+ }
+
+
+ /*************************************/
+ /* GstV4l2MemoryGroup implementation */
+ /*************************************/
+
+ static void
+ gst_v4l2_memory_group_free (GstV4l2MemoryGroup * group)
+ {
+ gint i;
+
+ for (i = 0; i < group->n_mem; i++) {
+ GstMemory *mem = group->mem[i];
+ group->mem[i] = NULL;
+ if (mem)
+ gst_memory_unref (mem);
+ }
++#ifdef TIZEN_FEATURE_V4L2_TBM_SUPPORT
++ if (group->surface) {
++ GST_INFO ("unref surface[%p]", group->surface);
++ tbm_surface_destroy (group->surface);
++ group->surface = NULL;
++ }
++#endif /* TIZEN_FEATURE_V4L2_TBM_SUPPORT */
+
+ g_slice_free (GstV4l2MemoryGroup, group);
+ }
+
+ static GstV4l2MemoryGroup *
+ gst_v4l2_memory_group_new (GstV4l2Allocator * allocator, guint32 index)
+ {
+ GstV4l2Object *obj = allocator->obj;
+ guint32 memory = allocator->memory;
+ struct v4l2_format *format = &obj->format;
+ GstV4l2MemoryGroup *group;
+ gsize img_size, buf_size;
+
+ group = g_slice_new0 (GstV4l2MemoryGroup);
+
+ group->buffer.type = format->type;
+ group->buffer.index = index;
+ group->buffer.memory = memory;
+
+ if (V4L2_TYPE_IS_MULTIPLANAR (format->type)) {
+ group->n_mem = group->buffer.length = format->fmt.pix_mp.num_planes;
+ group->buffer.m.planes = group->planes;
+ } else {
+ group->n_mem = 1;
+ }
+
+ if (obj->ioctl (obj->video_fd, VIDIOC_QUERYBUF, &group->buffer) < 0)
+ goto querybuf_failed;
+
+ if (group->buffer.index != index) {
+ GST_ERROR_OBJECT (allocator, "Buffer index returned by VIDIOC_QUERYBUF "
+ "didn't match, this indicate the presence of a bug in your driver or "
+ "libv4l2");
+ g_slice_free (GstV4l2MemoryGroup, group);
+ return NULL;
+ }
+
+ /* Check that provided size matches the format we have negotiation. Failing
+ * there usually means a driver of libv4l bug. */
+ if (V4L2_TYPE_IS_MULTIPLANAR (obj->type)) {
+ gint i;
+
+ for (i = 0; i < group->n_mem; i++) {
+ img_size = obj->format.fmt.pix_mp.plane_fmt[i].sizeimage;
+ buf_size = group->planes[i].length;
+ if (buf_size < img_size)
+ goto buffer_too_short;
+ }
+ } else {
+ img_size = obj->format.fmt.pix.sizeimage;
+ buf_size = group->buffer.length;
+ if (buf_size < img_size)
+ goto buffer_too_short;
+ }
+
+ /* We save non planar buffer information into the multi-planar plane array
+ * to avoid duplicating the code later */
+ if (!V4L2_TYPE_IS_MULTIPLANAR (format->type)) {
+ group->planes[0].bytesused = group->buffer.bytesused;
+ group->planes[0].length = group->buffer.length;
+ group->planes[0].data_offset = 0;
+ g_assert (sizeof (group->planes[0].m) == sizeof (group->buffer.m));
+ memcpy (&group->planes[0].m, &group->buffer.m, sizeof (group->buffer.m));
+ }
+
+ GST_LOG_OBJECT (allocator, "Got %s buffer", memory_type_to_str (memory));
+ GST_LOG_OBJECT (allocator, " index: %u", group->buffer.index);
+ GST_LOG_OBJECT (allocator, " type: %d", group->buffer.type);
+ GST_LOG_OBJECT (allocator, " flags: %08x", group->buffer.flags);
+ GST_LOG_OBJECT (allocator, " field: %d", group->buffer.field);
+ GST_LOG_OBJECT (allocator, " memory: %d", group->buffer.memory);
+ GST_LOG_OBJECT (allocator, " planes: %d", group->n_mem);
+
+ #ifndef GST_DISABLE_GST_DEBUG
+ if (memory == V4L2_MEMORY_MMAP) {
+ gint i;
+ for (i = 0; i < group->n_mem; i++) {
+ GST_LOG_OBJECT (allocator,
+ " [%u] bytesused: %u, length: %u, offset: %u", i,
+ group->planes[i].bytesused, group->planes[i].length,
+ group->planes[i].data_offset);
+ GST_LOG_OBJECT (allocator, " [%u] MMAP offset: %u", i,
+ group->planes[i].m.mem_offset);
+ }
+ }
+ #endif
+
+ return group;
+
+ querybuf_failed:
+ {
+ GST_ERROR ("error querying buffer %d: %s", index, g_strerror (errno));
+ goto failed;
+ }
+ buffer_too_short:
+ {
+ GST_ERROR ("buffer size %" G_GSIZE_FORMAT
+ " is smaller then negotiated size %" G_GSIZE_FORMAT
+ ", this is usually the result of a bug in the v4l2 driver or libv4l.",
+ buf_size, img_size);
+ goto failed;
+ }
+ failed:
+ gst_v4l2_memory_group_free (group);
+ return NULL;
+ }
+
+
+ /*************************************/
+ /* GstV4lAllocator implementation */
+ /*************************************/
+
+ static void
+ gst_v4l2_allocator_release (GstV4l2Allocator * allocator, GstV4l2Memory * mem)
+ {
+ GstV4l2MemoryGroup *group = mem->group;
+
+ GST_LOG_OBJECT (allocator, "plane %i of buffer %u released",
+ mem->plane, group->buffer.index);
+
+ switch (allocator->memory) {
+ case V4L2_MEMORY_DMABUF:
+ close (mem->dmafd);
+ mem->dmafd = -1;
+ break;
+ case V4L2_MEMORY_USERPTR:
+ mem->data = NULL;
+ break;
+ default:
+ break;
+ }
+
+ /* When all memory are back, put the group back in the free queue */
+ if (g_atomic_int_dec_and_test (&group->mems_allocated)) {
+ GST_LOG_OBJECT (allocator, "buffer %u released", group->buffer.index);
+ gst_atomic_queue_push (allocator->free_queue, group);
+ g_signal_emit (allocator, gst_v4l2_allocator_signals[GROUP_RELEASED], 0);
+ }
+
+ /* Keep last, allocator may be freed after this call */
+ g_object_unref (allocator);
+ }
+
+ static void
+ gst_v4l2_allocator_free (GstAllocator * gallocator, GstMemory * gmem)
+ {
+ GstV4l2Allocator *allocator = (GstV4l2Allocator *) gallocator;
+ GstV4l2Object *obj = allocator->obj;
+ GstV4l2Memory *mem = (GstV4l2Memory *) gmem;
+ GstV4l2MemoryGroup *group = mem->group;
+
+ /* Only free unparented memory */
+ if (mem->mem.parent == NULL) {
+ GST_LOG_OBJECT (allocator, "freeing plane %i of buffer %u",
+ mem->plane, group->buffer.index);
+
+ if (allocator->memory == V4L2_MEMORY_MMAP) {
+ if (mem->data)
+ obj->munmap (mem->data, group->planes[mem->plane].length);
+ }
+
+ /* This apply for both mmap with expbuf, and dmabuf imported memory */
+ if (mem->dmafd >= 0)
+ close (mem->dmafd);
+ }
+
+ g_slice_free (GstV4l2Memory, mem);
+ }
+
+ static void
+ gst_v4l2_allocator_dispose (GObject * obj)
+ {
+ GstV4l2Allocator *allocator = (GstV4l2Allocator *) obj;
+ gint i;
+
+ GST_LOG_OBJECT (obj, "called");
+
+ for (i = 0; i < allocator->count; i++) {
+ GstV4l2MemoryGroup *group = allocator->groups[i];
+ allocator->groups[i] = NULL;
+ if (group)
+ gst_v4l2_memory_group_free (group);
+ }
+
+ G_OBJECT_CLASS (parent_class)->dispose (obj);
+ }
+
+ static void
+ gst_v4l2_allocator_finalize (GObject * obj)
+ {
+ GstV4l2Allocator *allocator = (GstV4l2Allocator *) obj;
+
+ GST_LOG_OBJECT (obj, "called");
++#ifdef TIZEN_FEATURE_V4L2_TBM_SUPPORT
++ if (allocator->bufmgr) {
++ GST_INFO_OBJECT (obj, "deinit tbm bufmgr %p", allocator->bufmgr);
++ tbm_bufmgr_deinit (allocator->bufmgr);
++ allocator->bufmgr = NULL;
++ }
++#endif /* TIZEN_FEATURE_V4L2_TBM_SUPPORT */
+
+ gst_atomic_queue_unref (allocator->free_queue);
+ gst_object_unref (allocator->obj->element);
+
+ G_OBJECT_CLASS (parent_class)->finalize (obj);
+ }
+
+ static void
+ gst_v4l2_allocator_class_init (GstV4l2AllocatorClass * klass)
+ {
+ GObjectClass *object_class;
+ GstAllocatorClass *allocator_class;
+
+ allocator_class = (GstAllocatorClass *) klass;
+ object_class = (GObjectClass *) klass;
+
+ allocator_class->alloc = NULL;
+ allocator_class->free = gst_v4l2_allocator_free;
+
+ object_class->dispose = gst_v4l2_allocator_dispose;
+ object_class->finalize = gst_v4l2_allocator_finalize;
+
+ gst_v4l2_allocator_signals[GROUP_RELEASED] = g_signal_new ("group-released",
+ G_TYPE_FROM_CLASS (object_class), G_SIGNAL_RUN_LAST, 0, NULL, NULL, NULL,
+ G_TYPE_NONE, 0);
+
+ GST_DEBUG_CATEGORY_INIT (v4l2allocator_debug, "v4l2allocator", 0,
+ "V4L2 Allocator");
+ }
+
+ static void
+ gst_v4l2_allocator_init (GstV4l2Allocator * allocator)
+ {
+ GstAllocator *alloc = GST_ALLOCATOR_CAST (allocator);
+
+ alloc->mem_type = GST_V4L2_MEMORY_TYPE;
+ alloc->mem_map = (GstMemoryMapFunction) _v4l2mem_map;
+ alloc->mem_unmap = (GstMemoryUnmapFunction) _v4l2mem_unmap;
+ alloc->mem_share = (GstMemoryShareFunction) _v4l2mem_share;
+ alloc->mem_is_span = (GstMemoryIsSpanFunction) _v4l2mem_is_span;
+ /* Use the default, fallback copy function */
+
+ allocator->free_queue = gst_atomic_queue_new (VIDEO_MAX_FRAME);
+
+ GST_OBJECT_FLAG_SET (allocator, GST_ALLOCATOR_FLAG_CUSTOM_ALLOC);
+ }
+
+ #define GST_V4L2_ALLOCATOR_PROBE(obj,type) \
+ gst_v4l2_allocator_probe ((obj), V4L2_MEMORY_ ## type, \
+ GST_V4L2_ALLOCATOR_FLAG_ ## type ## _REQBUFS, \
+ GST_V4L2_ALLOCATOR_FLAG_ ## type ## _CREATE_BUFS)
+ static guint32
+ gst_v4l2_allocator_probe (GstV4l2Allocator * allocator, guint32 memory,
+ guint32 breq_flag, guint32 bcreate_flag)
+ {
+ GstV4l2Object *obj = allocator->obj;
+ struct v4l2_requestbuffers breq = { 0 };
+ guint32 flags = 0;
+
+ breq.type = obj->type;
+ breq.count = 0;
+ breq.memory = memory;
+
+ if (obj->ioctl (obj->video_fd, VIDIOC_REQBUFS, &breq) == 0) {
+ struct v4l2_create_buffers bcreate = { 0 };
+
+ flags |= breq_flag;
+
+ bcreate.memory = memory;
+ bcreate.format = obj->format;
+
+ if ((obj->ioctl (obj->video_fd, VIDIOC_CREATE_BUFS, &bcreate) == 0))
+ flags |= bcreate_flag;
+ }
+
+ if (breq.capabilities & V4L2_BUF_CAP_SUPPORTS_ORPHANED_BUFS)
+ flags |= GST_V4L2_ALLOCATOR_FLAG_SUPPORTS_ORPHANED_BUFS;
+
+ return flags;
+ }
+
+ static GstV4l2MemoryGroup *
+ gst_v4l2_allocator_create_buf (GstV4l2Allocator * allocator)
+ {
+ GstV4l2Object *obj = allocator->obj;
+ struct v4l2_create_buffers bcreate = { 0 };
+ GstV4l2MemoryGroup *group = NULL;
+
+ GST_OBJECT_LOCK (allocator);
+
+ if (!g_atomic_int_get (&allocator->active))
+ goto done;
+
+ if (GST_V4L2_ALLOCATOR_IS_ORPHANED (allocator))
+ goto orphaned_bug;
+
+ bcreate.memory = allocator->memory;
+ bcreate.format = obj->format;
+ bcreate.count = 1;
+
+ if (!allocator->can_allocate)
+ goto done;
+
+ if (obj->ioctl (obj->video_fd, VIDIOC_CREATE_BUFS, &bcreate) < 0)
+ goto create_bufs_failed;
+
+ if (allocator->groups[bcreate.index] != NULL)
+ goto create_bufs_bug;
+
+ group = gst_v4l2_memory_group_new (allocator, bcreate.index);
+
+ if (group) {
+ allocator->groups[bcreate.index] = group;
+ allocator->count++;
+ }
+
+ done:
+ GST_OBJECT_UNLOCK (allocator);
+ return group;
+
+ orphaned_bug:
+ {
+ GST_ERROR_OBJECT (allocator, "allocator was orphaned, "
+ "not creating new buffers");
+ goto done;
+ }
+ create_bufs_failed:
+ {
+ GST_WARNING_OBJECT (allocator, "error creating a new buffer: %s",
+ g_strerror (errno));
+ goto done;
+ }
+ create_bufs_bug:
+ {
+ GST_ERROR_OBJECT (allocator, "created buffer has already used buffer "
+ "index %i, this means there is an bug in your driver or libv4l2",
+ bcreate.index);
+ goto done;
+ }
+ }
+
+ static GstV4l2MemoryGroup *
+ gst_v4l2_allocator_alloc (GstV4l2Allocator * allocator)
+ {
+ GstV4l2MemoryGroup *group;
+
+ if (!g_atomic_int_get (&allocator->active))
+ return NULL;
+
+ group = gst_atomic_queue_pop (allocator->free_queue);
+
+ if (group == NULL) {
+ if (allocator->can_allocate) {
+ group = gst_v4l2_allocator_create_buf (allocator);
+
+ /* Don't hammer on CREATE_BUFS */
+ if (group == NULL)
+ allocator->can_allocate = FALSE;
+ }
+ }
+
+ return group;
+ }
+
+ static void
+ gst_v4l2_allocator_reset_size (GstV4l2Allocator * allocator,
+ GstV4l2MemoryGroup * group)
+ {
+ gint i;
+ for (i = 0; i < group->n_mem; i++) {
+ group->mem[i]->maxsize = group->planes[i].length;
+ group->mem[i]->offset = 0;
+ group->mem[i]->size = group->planes[i].length;
+ }
+ }
+
+ static void
+ _cleanup_failed_alloc (GstV4l2Allocator * allocator, GstV4l2MemoryGroup * group)
+ {
+ if (group->mems_allocated > 0) {
+ gint i;
+ /* If one or more mmap worked, we need to unref the memory, otherwise
+ * they will keep a ref on the allocator and leak it. This will put back
+ * the group into the free_queue */
+ for (i = 0; i < group->n_mem; i++)
+ gst_memory_unref (group->mem[i]);
+ } else {
+ /* Otherwise, group has to be on free queue for _stop() to work */
+ gst_atomic_queue_push (allocator->free_queue, group);
+ }
+ }
+
++#ifdef TIZEN_FEATURE_V4L2_TBM_SUPPORT
++static tbm_format __get_tbm_format (GstVideoFormat gst_format)
++{
++ switch (gst_format) {
++ case GST_VIDEO_FORMAT_NV12:
++ case GST_VIDEO_FORMAT_SN12:
++ return TBM_FORMAT_NV12;
++ case GST_VIDEO_FORMAT_I420:
++ case GST_VIDEO_FORMAT_S420:
++ default:
++ return TBM_FORMAT_YUV420;
++ }
++}
++#endif /* TIZEN_FEATURE_V4L2_TBM_SUPPORT */
+
+ GstV4l2Allocator *
+ gst_v4l2_allocator_new (GstObject * parent, GstV4l2Object * v4l2object)
+ {
+ GstV4l2Allocator *allocator;
+ guint32 flags = 0;
+ gchar *name, *parent_name;
+
+ parent_name = gst_object_get_name (parent);
+ name = g_strconcat (parent_name, ":allocator", NULL);
+ g_free (parent_name);
+
+ allocator = g_object_new (GST_TYPE_V4L2_ALLOCATOR, "name", name, NULL);
+ gst_object_ref_sink (allocator);
+ g_free (name);
+
+ /* Save everything */
+ allocator->obj = v4l2object;
+
+ /* Keep a ref on the element so obj does not disappear */
+ gst_object_ref (allocator->obj->element);
+
+ flags |= GST_V4L2_ALLOCATOR_PROBE (allocator, MMAP);
+ flags |= GST_V4L2_ALLOCATOR_PROBE (allocator, USERPTR);
+ flags |= GST_V4L2_ALLOCATOR_PROBE (allocator, DMABUF);
+
+
+ if (flags == 0) {
+ /* Drivers not ported from videobuf to videbuf2 don't allow freeing buffers
+ * using REQBUFS(0). This is a workaround to still support these drivers,
+ * which are known to have MMAP support. */
+ GST_WARNING_OBJECT (allocator, "Could not probe supported memory type, "
+ "assuming MMAP is supported, this is expected for older drivers not "
+ " yet ported to videobuf2 framework");
+ flags = GST_V4L2_ALLOCATOR_FLAG_MMAP_REQBUFS;
+ }
+
+ GST_OBJECT_FLAG_SET (allocator, flags);
+
++#ifdef TIZEN_FEATURE_V4L2_TBM_SUPPORT
++ if (v4l2object->tbm_output &&
++ !V4L2_TYPE_IS_OUTPUT (v4l2object->type) &&
++ v4l2object->mode == GST_V4L2_IO_DMABUF) {
++ tbm_surface_h tmp_surface = NULL;
++ int width = GST_VIDEO_INFO_WIDTH (&v4l2object->info);
++ int height = GST_VIDEO_INFO_HEIGHT (&v4l2object->info);
++
++ tmp_surface = tbm_surface_create (width, height,
++ __get_tbm_format (GST_VIDEO_INFO_FORMAT (&v4l2object->info)));
++ if (tmp_surface) {
++ tbm_surface_get_info (tmp_surface, &allocator->s_info);
++ GST_INFO_OBJECT (allocator, "[%dx%d] -> tbm surface info[%dx%d]",
++ width, height, allocator->s_info.width, allocator->s_info.height);
++ tbm_surface_destroy (tmp_surface);
++ } else {
++ GST_ERROR_OBJECT (allocator, "[%dx%d] surface failed", width, height);
++ }
++
++ allocator->bufmgr = tbm_bufmgr_init (-1);
++ if (!allocator->bufmgr) {
++ GST_ERROR_OBJECT (allocator, "tbm bufmgr failed");
++ gst_object_unref (allocator);
++ return NULL;
++ }
++ }
++#endif /* TIZEN_FEATURE_V4L2_TBM_SUPPORT */
+ return allocator;
+ }
+
+ guint
+ gst_v4l2_allocator_start (GstV4l2Allocator * allocator, guint32 count,
+ guint32 memory)
+ {
+ GstV4l2Object *obj = allocator->obj;
+ struct v4l2_requestbuffers breq = { count, obj->type, memory };
+ gboolean can_allocate;
+ gint i;
+
+ g_return_val_if_fail (count != 0, 0);
+
+ GST_OBJECT_LOCK (allocator);
+
+ if (g_atomic_int_get (&allocator->active))
+ goto already_active;
+
+ if (GST_V4L2_ALLOCATOR_IS_ORPHANED (allocator))
+ goto orphaned;
+
+ if (obj->ioctl (obj->video_fd, VIDIOC_REQBUFS, &breq) < 0)
+ goto reqbufs_failed;
+
+ if (breq.count < 1)
+ goto out_of_memory;
+
+ switch (memory) {
+ case V4L2_MEMORY_MMAP:
+ can_allocate = GST_V4L2_ALLOCATOR_CAN_ALLOCATE (allocator, MMAP);
+ break;
+ case V4L2_MEMORY_USERPTR:
+ can_allocate = GST_V4L2_ALLOCATOR_CAN_ALLOCATE (allocator, USERPTR);
+ break;
+ case V4L2_MEMORY_DMABUF:
+ can_allocate = GST_V4L2_ALLOCATOR_CAN_ALLOCATE (allocator, DMABUF);
+ break;
+ default:
+ can_allocate = FALSE;
+ break;
+ }
+
+ GST_DEBUG_OBJECT (allocator, "allocated %u %s buffers out of %u requested",
+ breq.count, memory_type_to_str (memory), count);
+
+ allocator->can_allocate = can_allocate;
+ allocator->count = breq.count;
+ allocator->memory = memory;
+
+ /* Create memory groups */
+ for (i = 0; i < allocator->count; i++) {
+ allocator->groups[i] = gst_v4l2_memory_group_new (allocator, i);
+ if (allocator->groups[i] == NULL)
+ goto error;
+
+ gst_atomic_queue_push (allocator->free_queue, allocator->groups[i]);
+ }
+
+ g_atomic_int_set (&allocator->active, TRUE);
+
+ done:
+ GST_OBJECT_UNLOCK (allocator);
+ return breq.count;
+
+ already_active:
+ {
+ GST_ERROR_OBJECT (allocator, "allocator already active");
+ goto error;
+ }
+ orphaned:
+ {
+ GST_ERROR_OBJECT (allocator, "allocator was orphaned");
+ goto error;
+ }
+ reqbufs_failed:
+ {
+ GST_ERROR_OBJECT (allocator,
+ "error requesting %d buffers: %s", count, g_strerror (errno));
+ goto error;
+ }
+ out_of_memory:
+ {
+ GST_ERROR_OBJECT (allocator, "Not enough memory to allocate buffers");
+ goto error;
+ }
+ error:
+ {
+ breq.count = 0;
+ goto done;
+ }
+ }
+
+ GstV4l2Return
+ gst_v4l2_allocator_stop (GstV4l2Allocator * allocator)
+ {
+ GstV4l2Object *obj = allocator->obj;
+ struct v4l2_requestbuffers breq = { 0, obj->type, allocator->memory };
+ gint i = 0;
+ GstV4l2Return ret = GST_V4L2_OK;
+
+ GST_DEBUG_OBJECT (allocator, "stop allocator");
+
+ GST_OBJECT_LOCK (allocator);
+
+ if (!g_atomic_int_get (&allocator->active))
+ goto done;
+
+ if (gst_atomic_queue_length (allocator->free_queue) != allocator->count) {
+ GST_DEBUG_OBJECT (allocator, "allocator is still in use");
+ ret = GST_V4L2_BUSY;
+ goto done;
+ }
+
+ while (gst_atomic_queue_pop (allocator->free_queue)) {
+ /* nothing */
+ };
+
+ for (i = 0; i < allocator->count; i++) {
+ GstV4l2MemoryGroup *group = allocator->groups[i];
+ allocator->groups[i] = NULL;
+ if (group)
+ gst_v4l2_memory_group_free (group);
+ }
+
+ if (!GST_V4L2_ALLOCATOR_IS_ORPHANED (allocator)) {
+ /* Not all drivers support rebufs(0), so warn only */
+ if (obj->ioctl (obj->video_fd, VIDIOC_REQBUFS, &breq) < 0)
+ GST_WARNING_OBJECT (allocator,
+ "error releasing buffers buffers: %s", g_strerror (errno));
+ }
+
+ allocator->count = 0;
+
+ g_atomic_int_set (&allocator->active, FALSE);
+
+ done:
+ GST_OBJECT_UNLOCK (allocator);
+ return ret;
+ }
+
+ gboolean
+ gst_v4l2_allocator_orphan (GstV4l2Allocator * allocator)
+ {
+ GstV4l2Object *obj = allocator->obj;
+ struct v4l2_requestbuffers breq = { 0, obj->type, allocator->memory };
+
+ if (!GST_V4L2_ALLOCATOR_CAN_ORPHAN_BUFS (allocator))
+ return FALSE;
+
+ GST_OBJECT_FLAG_SET (allocator, GST_V4L2_ALLOCATOR_FLAG_ORPHANED);
+
+ if (!g_atomic_int_get (&allocator->active))
+ return TRUE;
+
+ if (obj->ioctl (obj->video_fd, VIDIOC_REQBUFS, &breq) < 0) {
+ GST_ERROR_OBJECT (allocator,
+ "error orphaning buffers buffers: %s", g_strerror (errno));
+ return FALSE;
+ }
+
+ return TRUE;
+ }
+
+ GstV4l2MemoryGroup *
+ gst_v4l2_allocator_alloc_mmap (GstV4l2Allocator * allocator)
+ {
+ GstV4l2Object *obj = allocator->obj;
+ GstV4l2MemoryGroup *group;
+ gint i;
+
+ g_return_val_if_fail (allocator->memory == V4L2_MEMORY_MMAP, NULL);
+
+ group = gst_v4l2_allocator_alloc (allocator);
+
+ if (group == NULL)
+ return NULL;
+
+ for (i = 0; i < group->n_mem; i++) {
+ if (group->mem[i] == NULL) {
+ gpointer data;
+ data = obj->mmap (NULL, group->planes[i].length, PROT_READ | PROT_WRITE,
+ MAP_SHARED, obj->video_fd, group->planes[i].m.mem_offset);
+
+ if (data == MAP_FAILED)
+ goto mmap_failed;
+
+ GST_LOG_OBJECT (allocator,
+ "mmap buffer length %d, data offset %d, plane %d",
+ group->planes[i].length, group->planes[i].data_offset, i);
+
+ group->mem[i] = (GstMemory *) _v4l2mem_new (0, GST_ALLOCATOR (allocator),
+ NULL, group->planes[i].length, 0, 0, group->planes[i].length, i, data,
+ -1, group);
+ } else {
+ /* Take back the allocator reference */
+ gst_object_ref (allocator);
+ }
+
+ group->mems_allocated++;
+ }
+
+ /* Ensure group size. Unlike GST, v4l2 have size (bytesused) initially set
+ * to 0. As length might be bigger then the expected size exposed in the
+ * format, we simply set bytesused initially and reset it here for
+ * simplicity */
+ gst_v4l2_allocator_reset_size (allocator, group);
+
+ return group;
+
+ mmap_failed:
+ {
+ GST_ERROR_OBJECT (allocator, "Failed to mmap buffer: %s",
+ g_strerror (errno));
+ _cleanup_failed_alloc (allocator, group);
+ return NULL;
+ }
+ }
+
+ GstV4l2MemoryGroup *
+ gst_v4l2_allocator_alloc_dmabuf (GstV4l2Allocator * allocator,
+ GstAllocator * dmabuf_allocator)
+ {
+ GstV4l2Object *obj = allocator->obj;
+ GstV4l2MemoryGroup *group;
+ gint i;
++#ifdef TIZEN_FEATURE_V4L2_TBM_SUPPORT
++ tbm_bo bos[VIDEO_MAX_PLANES] = {NULL, };
++#endif /* TIZEN_FEATURE_V4L2_TBM_SUPPORT */
+
+ g_return_val_if_fail (allocator->memory == V4L2_MEMORY_MMAP, NULL);
+
+ group = gst_v4l2_allocator_alloc (allocator);
+
+ if (group == NULL)
+ return NULL;
+
+ for (i = 0; i < group->n_mem; i++) {
+ GstV4l2Memory *mem;
+ GstMemory *dma_mem;
+
+ if (group->mem[i] == NULL) {
+ struct v4l2_exportbuffer expbuf = { 0 };
+
+ expbuf.type = obj->type;
+ expbuf.index = group->buffer.index;
+ expbuf.plane = i;
+ expbuf.flags = O_CLOEXEC | O_RDWR;
+
+ if (obj->ioctl (obj->video_fd, VIDIOC_EXPBUF, &expbuf) < 0)
+ goto expbuf_failed;
+
+ GST_LOG_OBJECT (allocator, "exported DMABUF as fd %i plane %d",
+ expbuf.fd, i);
+
+ group->mem[i] = (GstMemory *) _v4l2mem_new (0, GST_ALLOCATOR (allocator),
+ NULL, group->planes[i].length, 0, group->planes[i].data_offset,
+ group->planes[i].length - group->planes[i].data_offset, i, NULL,
+ expbuf.fd, group);
++#ifdef TIZEN_FEATURE_V4L2_TBM_SUPPORT
++ if (obj->tbm_output) {
++ bos[i] = tbm_bo_import_fd (allocator->bufmgr, expbuf.fd);
++ GST_INFO_OBJECT (allocator, "obj[%p,i:%d]: fd[%d] -> bo[%p]",
++ obj, expbuf.index, expbuf.fd, bos[i]);
++ }
++#endif /* TIZEN_FEATURE_V4L2_TBM_SUPPORT */
+ } else {
+ /* Take back the allocator reference */
+ gst_object_ref (allocator);
+ }
+
+ group->mems_allocated++;
+
+ g_assert (gst_is_v4l2_memory (group->mem[i]));
+ mem = (GstV4l2Memory *) group->mem[i];
+
+ dma_mem = gst_fd_allocator_alloc (dmabuf_allocator, mem->dmafd,
+ group->planes[i].length, GST_FD_MEMORY_FLAG_DONT_CLOSE);
+ gst_memory_resize (dma_mem, group->planes[i].data_offset,
+ group->planes[i].length - group->planes[i].data_offset);
+
+ gst_mini_object_set_qdata (GST_MINI_OBJECT (dma_mem),
+ GST_V4L2_MEMORY_QUARK, mem, (GDestroyNotify) gst_memory_unref);
+
+ group->mem[i] = dma_mem;
+ }
+
++#ifdef TIZEN_FEATURE_V4L2_TBM_SUPPORT
++ if (obj->tbm_output && !group->surface) {
++ group->surface = tbm_surface_internal_create_with_bos (&allocator->s_info, bos, group->n_mem);
++ GST_INFO_OBJECT (allocator, "new surface[%p] in memory group[%p]", group->surface, group);
++ }
++ /* release bos - they will be kept in surface. */
++ for (i = 0 ; i < VIDEO_MAX_PLANES && bos[i] ; i++)
++ tbm_bo_unref (bos[i]);
++#endif /* TIZEN_FEATURE_V4L2_TBM_SUPPORT */
++
+ gst_v4l2_allocator_reset_size (allocator, group);
+
+ return group;
+
+ expbuf_failed:
+ {
+ GST_ERROR_OBJECT (allocator, "Failed to export DMABUF: %s",
+ g_strerror (errno));
+ goto cleanup;
+ }
+ cleanup:
+ {
+ _cleanup_failed_alloc (allocator, group);
+ return NULL;
+ }
+ }
+
+ static void
+ gst_v4l2_allocator_clear_dmabufin (GstV4l2Allocator * allocator,
+ GstV4l2MemoryGroup * group)
+ {
+ GstV4l2Object *obj = allocator->obj;
+ GstV4l2Memory *mem;
+ gint i;
+
+ g_return_if_fail (allocator->memory == V4L2_MEMORY_DMABUF);
+
+ for (i = 0; i < group->n_mem; i++) {
+
+ mem = (GstV4l2Memory *) group->mem[i];
+
+ GST_LOG_OBJECT (allocator, "[%i] clearing DMABUF import, fd %i plane %d",
+ group->buffer.index, mem->dmafd, i);
+
+ /* Update memory */
+ mem->mem.maxsize = 0;
+ mem->mem.offset = 0;
+ mem->mem.size = 0;
+ mem->dmafd = -1;
+
+ /* Update v4l2 structure */
+ group->planes[i].length = 0;
+ group->planes[i].bytesused = 0;
+ group->planes[i].m.fd = -1;
+ group->planes[i].data_offset = 0;
+ }
+
+ if (!V4L2_TYPE_IS_MULTIPLANAR (obj->type)) {
+ group->buffer.bytesused = 0;
+ group->buffer.length = 0;
+ group->buffer.m.fd = -1;
+ }
+ }
+
+ GstV4l2MemoryGroup *
+ gst_v4l2_allocator_alloc_dmabufin (GstV4l2Allocator * allocator)
+ {
+ GstV4l2MemoryGroup *group;
+ gint i;
+
+ g_return_val_if_fail (allocator->memory == V4L2_MEMORY_DMABUF, NULL);
+
+ group = gst_v4l2_allocator_alloc (allocator);
+
+ if (group == NULL)
+ return NULL;
+
+ GST_LOG_OBJECT (allocator, "allocating empty DMABUF import group");
+
+ for (i = 0; i < group->n_mem; i++) {
+ if (group->mem[i] == NULL) {
+ group->mem[i] = (GstMemory *) _v4l2mem_new (0, GST_ALLOCATOR (allocator),
+ NULL, 0, 0, 0, 0, i, NULL, -1, group);
+ } else {
+ /* Take back the allocator reference */
+ gst_object_ref (allocator);
+ }
+
+ group->mems_allocated++;
+ }
+
+ gst_v4l2_allocator_clear_dmabufin (allocator, group);
+
+ return group;
+ }
+
+ static void
+ gst_v4l2_allocator_clear_userptr (GstV4l2Allocator * allocator,
+ GstV4l2MemoryGroup * group)
+ {
+ GstV4l2Object *obj = allocator->obj;
+ GstV4l2Memory *mem;
+ gint i;
+
+ g_return_if_fail (allocator->memory == V4L2_MEMORY_USERPTR);
+
+ for (i = 0; i < group->n_mem; i++) {
+ mem = (GstV4l2Memory *) group->mem[i];
+
+ GST_LOG_OBJECT (allocator, "[%i] clearing USERPTR %p plane %d size %"
+ G_GSIZE_FORMAT, group->buffer.index, mem->data, i, mem->mem.size);
+
+ mem->mem.maxsize = 0;
+ mem->mem.size = 0;
+ mem->data = NULL;
+
+ group->planes[i].length = 0;
+ group->planes[i].bytesused = 0;
+ group->planes[i].m.userptr = 0;
+ }
+
+ if (!V4L2_TYPE_IS_MULTIPLANAR (obj->type)) {
+ group->buffer.bytesused = 0;
+ group->buffer.length = 0;
+ group->buffer.m.userptr = 0;
+ }
+ }
+
+ GstV4l2MemoryGroup *
+ gst_v4l2_allocator_alloc_userptr (GstV4l2Allocator * allocator)
+ {
+ GstV4l2MemoryGroup *group;
+ gint i;
+
+ g_return_val_if_fail (allocator->memory == V4L2_MEMORY_USERPTR, NULL);
+
+ group = gst_v4l2_allocator_alloc (allocator);
+
+ if (group == NULL)
+ return NULL;
+
+ GST_LOG_OBJECT (allocator, "allocating empty USERPTR group");
+
+ for (i = 0; i < group->n_mem; i++) {
+
+ if (group->mem[i] == NULL) {
+ group->mem[i] = (GstMemory *) _v4l2mem_new (0, GST_ALLOCATOR (allocator),
+ NULL, 0, 0, 0, 0, i, NULL, -1, group);
+ } else {
+ /* Take back the allocator reference */
+ gst_object_ref (allocator);
+ }
+
+ group->mems_allocated++;
+ }
+
+ gst_v4l2_allocator_clear_userptr (allocator, group);
+
+ return group;
+ }
+
+ gboolean
+ gst_v4l2_allocator_import_dmabuf (GstV4l2Allocator * allocator,
+ GstV4l2MemoryGroup * group, gint n_mem, GstMemory ** dma_mem)
+ {
+ GstV4l2Object *obj = allocator->obj;
+ GstV4l2Memory *mem;
+ gint i;
+
+ g_return_val_if_fail (allocator->memory == V4L2_MEMORY_DMABUF, FALSE);
+
+ if (group->n_mem != n_mem)
+ goto n_mem_missmatch;
+
+ for (i = 0; i < group->n_mem; i++) {
+ gint dmafd;
+ gsize size, offset, maxsize;
+
+ if (!gst_is_dmabuf_memory (dma_mem[i]))
+ goto not_dmabuf;
+
+ size = gst_memory_get_sizes (dma_mem[i], &offset, &maxsize);
+
+ dmafd = gst_dmabuf_memory_get_fd (dma_mem[i]);
+
+ GST_LOG_OBJECT (allocator, "[%i] imported DMABUF as fd %i plane %d",
+ group->buffer.index, dmafd, i);
+
+ mem = (GstV4l2Memory *) group->mem[i];
+
+ /* Update memory */
+ mem->mem.maxsize = maxsize;
+ mem->mem.offset = offset;
+ mem->mem.size = size;
+ mem->dmafd = dmafd;
+
+ /* Update v4l2 structure */
+ group->planes[i].length = maxsize;
+ group->planes[i].bytesused = size + offset;
+ group->planes[i].m.fd = dmafd;
+ group->planes[i].data_offset = offset;
+ }
+
+ /* Copy into buffer structure if not using planes */
+ if (!V4L2_TYPE_IS_MULTIPLANAR (obj->type)) {
+ group->buffer.bytesused = group->planes[0].bytesused;
+ group->buffer.length = group->planes[0].length;
+ group->buffer.m.fd = group->planes[0].m.userptr;
+
+ /* FIXME Check if data_offset > 0 and fail for non-multi-planar */
+ g_assert (group->planes[0].data_offset == 0);
+ } else {
+ group->buffer.length = group->n_mem;
+ }
+
+ return TRUE;
+
+ n_mem_missmatch:
+ {
+ GST_ERROR_OBJECT (allocator, "Got %i dmabuf but needed %i", n_mem,
+ group->n_mem);
+ return FALSE;
+ }
+ not_dmabuf:
+ {
+ GST_ERROR_OBJECT (allocator, "Memory %i is not of DMABUF", i);
+ return FALSE;
+ }
+ }
+
+ gboolean
+ gst_v4l2_allocator_import_userptr (GstV4l2Allocator * allocator,
+ GstV4l2MemoryGroup * group, gsize img_size, int n_planes,
+ gpointer * data, gsize * size)
+ {
+ GstV4l2Object *obj = allocator->obj;
+ GstV4l2Memory *mem;
+ gint i;
+
+ g_return_val_if_fail (allocator->memory == V4L2_MEMORY_USERPTR, FALSE);
+
+ /* TODO Support passing N plane from 1 memory to MPLANE v4l2 format */
+ if (V4L2_TYPE_IS_MULTIPLANAR (obj->type) && n_planes != group->n_mem)
+ goto n_mem_missmatch;
+
+ for (i = 0; i < group->n_mem; i++) {
+ gsize maxsize, psize;
+
+ /* TODO request used size and maxsize separately */
+ if (V4L2_TYPE_IS_MULTIPLANAR (obj->type))
+ maxsize = psize = size[i];
+ else
+ maxsize = psize = img_size;
+
+ g_assert (psize <= img_size);
+
+ GST_LOG_OBJECT (allocator, "[%i] imported USERPTR %p plane %d size %"
+ G_GSIZE_FORMAT, group->buffer.index, data[i], i, psize);
+
+ mem = (GstV4l2Memory *) group->mem[i];
+
+ mem->mem.maxsize = maxsize;
+ mem->mem.size = psize;
+ mem->data = data[i];
+
+ group->planes[i].length = maxsize;
+ group->planes[i].bytesused = psize;
+ group->planes[i].m.userptr = (unsigned long) data[i];
+ group->planes[i].data_offset = 0;
+ }
+
+ /* Copy into buffer structure if not using planes */
+ if (!V4L2_TYPE_IS_MULTIPLANAR (obj->type)) {
+ group->buffer.bytesused = group->planes[0].bytesused;
+ group->buffer.length = group->planes[0].length;
+ group->buffer.m.userptr = group->planes[0].m.userptr;
+ } else {
+ group->buffer.length = group->n_mem;
+ }
+
+ return TRUE;
+
+ n_mem_missmatch:
+ {
+ GST_ERROR_OBJECT (allocator, "Got %i userptr plane while driver need %i",
+ n_planes, group->n_mem);
+ return FALSE;
+ }
+ }
+
+ void
+ gst_v4l2_allocator_flush (GstV4l2Allocator * allocator)
+ {
+ gint i;
+
+ GST_OBJECT_LOCK (allocator);
+
+ if (!g_atomic_int_get (&allocator->active))
+ goto done;
+
+ for (i = 0; i < allocator->count; i++) {
+ GstV4l2MemoryGroup *group = allocator->groups[i];
+ gint n;
+
+ if (IS_QUEUED (group->buffer)) {
+ UNSET_QUEUED (group->buffer);
+
+ gst_v4l2_allocator_reset_group (allocator, group);
+
+ for (n = 0; n < group->n_mem; n++)
+ gst_memory_unref (group->mem[n]);
+ }
+ }
+
+ done:
+ GST_OBJECT_UNLOCK (allocator);
+ }
+
+ gboolean
+ gst_v4l2_allocator_qbuf (GstV4l2Allocator * allocator,
+ GstV4l2MemoryGroup * group)
+ {
+ GstV4l2Object *obj = allocator->obj;
+ gboolean ret = TRUE;
+ gint i;
+
+ g_return_val_if_fail (g_atomic_int_get (&allocator->active), FALSE);
+
+ /* update sizes */
+ if (V4L2_TYPE_IS_MULTIPLANAR (obj->type)) {
+ for (i = 0; i < group->n_mem; i++)
+ group->planes[i].bytesused =
+ gst_memory_get_sizes (group->mem[i], NULL, NULL);
+ } else {
+ group->buffer.bytesused = gst_memory_get_sizes (group->mem[0], NULL, NULL);
+ }
+
+ /* Ensure the memory will stay around and is RO */
+ for (i = 0; i < group->n_mem; i++)
+ gst_memory_ref (group->mem[i]);
+
+ if (obj->ioctl (obj->video_fd, VIDIOC_QBUF, &group->buffer) < 0) {
+ GST_ERROR_OBJECT (allocator, "failed queueing buffer %i: %s",
+ group->buffer.index, g_strerror (errno));
+
+ /* Release the memory, possibly making it RW again */
+ for (i = 0; i < group->n_mem; i++)
+ gst_memory_unref (group->mem[i]);
+
+ ret = FALSE;
+ if (IS_QUEUED (group->buffer)) {
+ GST_DEBUG_OBJECT (allocator,
+ "driver pretends buffer is queued even if queue failed");
+ UNSET_QUEUED (group->buffer);
+ }
+ goto done;
+ }
+
+ GST_LOG_OBJECT (allocator, "queued buffer %i (flags 0x%X)",
+ group->buffer.index, group->buffer.flags);
+
+ if (!IS_QUEUED (group->buffer)) {
+ GST_DEBUG_OBJECT (allocator,
+ "driver pretends buffer is not queued even if queue succeeded");
+ SET_QUEUED (group->buffer);
+ }
+
+ done:
+ return ret;
+ }
+
+ GstFlowReturn
+ gst_v4l2_allocator_dqbuf (GstV4l2Allocator * allocator,
+ GstV4l2MemoryGroup ** group_out)
+ {
+ GstV4l2Object *obj = allocator->obj;
+ struct v4l2_buffer buffer = { 0 };
+ struct v4l2_plane planes[VIDEO_MAX_PLANES] = { {0} };
+ gint i;
+
+ GstV4l2MemoryGroup *group = NULL;
+
+ g_return_val_if_fail (g_atomic_int_get (&allocator->active), GST_FLOW_ERROR);
+
+ buffer.type = obj->type;
+ buffer.memory = allocator->memory;
+
+ if (V4L2_TYPE_IS_MULTIPLANAR (obj->type)) {
+ buffer.length = obj->format.fmt.pix_mp.num_planes;
+ buffer.m.planes = planes;
+ }
+
+ if (obj->ioctl (obj->video_fd, VIDIOC_DQBUF, &buffer) < 0)
+ goto error;
+
+ group = allocator->groups[buffer.index];
+
+ if (!IS_QUEUED (group->buffer)) {
+ GST_ERROR_OBJECT (allocator,
+ "buffer %i was not queued, this indicate a driver bug.", buffer.index);
+ return GST_FLOW_ERROR;
+ }
+
+ group->buffer = buffer;
+
+ GST_LOG_OBJECT (allocator, "dequeued buffer %i (flags 0x%X)", buffer.index,
+ buffer.flags);
+
+ if (IS_QUEUED (group->buffer)) {
+ GST_DEBUG_OBJECT (allocator,
+ "driver pretends buffer is queued even if dequeue succeeded");
+ UNSET_QUEUED (group->buffer);
+ }
+
+ if (V4L2_TYPE_IS_MULTIPLANAR (obj->type)) {
+ group->buffer.m.planes = group->planes;
+ memcpy (group->planes, buffer.m.planes, sizeof (planes));
+ } else {
+ group->planes[0].bytesused = group->buffer.bytesused;
+ group->planes[0].length = group->buffer.length;
+ g_assert (sizeof (group->planes[0].m) == sizeof (group->buffer.m));
+ memcpy (&group->planes[0].m, &group->buffer.m, sizeof (group->buffer.m));
+ }
+
+ /* And update memory size */
+ if (V4L2_TYPE_IS_OUTPUT (obj->type)) {
+ gst_v4l2_allocator_reset_size (allocator, group);
+ } else {
+ /* for capture, simply read the size */
+ for (i = 0; i < group->n_mem; i++) {
+ gsize size, offset;
+
+ GST_LOG_OBJECT (allocator,
+ "Dequeued capture buffer, length: %u bytesused: %u data_offset: %u",
+ group->planes[i].length, group->planes[i].bytesused,
+ group->planes[i].data_offset);
+
+ offset = group->planes[i].data_offset;
+
+ if (group->planes[i].bytesused >= group->planes[i].data_offset) {
+ size = group->planes[i].bytesused - group->planes[i].data_offset;
+ } else {
+ GST_WARNING_OBJECT (allocator, "V4L2 provided buffer has bytesused %"
+ G_GUINT32_FORMAT " which is too small to include data_offset %"
+ G_GUINT32_FORMAT, group->planes[i].bytesused,
+ group->planes[i].data_offset);
+ size = group->planes[i].bytesused;
+ }
+
+ if (G_LIKELY (size + offset <= group->mem[i]->maxsize))
+ gst_memory_resize (group->mem[i], offset, size);
+ else {
+ GST_WARNING_OBJECT (allocator,
+ "v4l2 provided buffer that is too big for the memory it was "
+ "writing into. v4l2 claims %" G_GSIZE_FORMAT " bytes used but "
+ "memory is only %" G_GSIZE_FORMAT "B. This is probably a driver "
+ "bug.", size, group->mem[i]->maxsize);
+ gst_memory_resize (group->mem[i], 0, group->mem[i]->maxsize);
+ }
+ }
+ }
+
+ /* Release the memory, possibly making it RW again */
+ for (i = 0; i < group->n_mem; i++)
+ gst_memory_unref (group->mem[i]);
+
+ *group_out = group;
+ return GST_FLOW_OK;
+
+ error:
+ if (errno == EPIPE) {
+ GST_DEBUG_OBJECT (allocator, "broken pipe signals last buffer");
+ return GST_FLOW_EOS;
+ }
+
+ GST_ERROR_OBJECT (allocator, "failed dequeuing a %s buffer: %s",
+ memory_type_to_str (allocator->memory), g_strerror (errno));
+
+ switch (errno) {
+ case EAGAIN:
+ GST_WARNING_OBJECT (allocator,
+ "Non-blocking I/O has been selected using O_NONBLOCK and"
+ " no buffer was in the outgoing queue.");
+ break;
+ case EINVAL:
+ GST_ERROR_OBJECT (allocator,
+ "The buffer type is not supported, or the index is out of bounds, "
+ "or no buffers have been allocated yet, or the userptr "
+ "or length are invalid.");
+ break;
+ case ENOMEM:
+ GST_ERROR_OBJECT (allocator,
+ "insufficient memory to enqueue a user pointer buffer");
+ break;
+ case EIO:
+ GST_INFO_OBJECT (allocator,
+ "VIDIOC_DQBUF failed due to an internal error."
+ " Can also indicate temporary problems like signal loss."
+ " Note the driver might dequeue an (empty) buffer despite"
+ " returning an error, or even stop capturing.");
+ /* have we de-queued a buffer ? */
+ if (!IS_QUEUED (buffer)) {
+ GST_DEBUG_OBJECT (allocator, "reenqueueing buffer");
+ /* FIXME ... should we do something here? */
+ }
+ break;
+ case EINTR:
+ GST_WARNING_OBJECT (allocator, "could not sync on a buffer on device");
+ break;
+ default:
+ GST_WARNING_OBJECT (allocator,
+ "Grabbing frame got interrupted unexpectedly. %d: %s.", errno,
+ g_strerror (errno));
+ break;
+ }
+
+ return GST_FLOW_ERROR;
+ }
+
+ void
+ gst_v4l2_allocator_reset_group (GstV4l2Allocator * allocator,
+ GstV4l2MemoryGroup * group)
+ {
+ switch (allocator->memory) {
+ case V4L2_MEMORY_USERPTR:
+ gst_v4l2_allocator_clear_userptr (allocator, group);
+ break;
+ case V4L2_MEMORY_DMABUF:
+ gst_v4l2_allocator_clear_dmabufin (allocator, group);
+ break;
+ case V4L2_MEMORY_MMAP:
+ break;
+ default:
+ g_assert_not_reached ();
+ break;
+ }
+
+ gst_v4l2_allocator_reset_size (allocator, group);
+ }
--- /dev/null
-
+ /*
+ * Copyright (C) 2014 Collabora Ltd.
+ * Author: Nicolas Dufresne <nicolas.dufresne@collabora.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ *
+ */
+
+
+ #ifndef __GST_V4L2_ALLOCATOR_H__
+ #define __GST_V4L2_ALLOCATOR_H__
+
+ #include "ext/videodev2.h"
+ #include <gst/gst.h>
+ #include <gst/gstatomicqueue.h>
++#ifdef TIZEN_FEATURE_V4L2_TBM_SUPPORT
++#include <tbm_surface.h>
++#include <tbm_surface_internal.h>
++#endif /* TIZEN_FEATURE_V4L2_TBM_SUPPORT */
+
+ G_BEGIN_DECLS
+
+ #define GST_TYPE_V4L2_ALLOCATOR (gst_v4l2_allocator_get_type())
+ #define GST_IS_V4L2_ALLOCATOR(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_V4L2_ALLOCATOR))
+ #define GST_IS_V4L2_ALLOCATOR_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), GST_TYPE_V4L2_ALLOCATOR))
+ #define GST_V4L2_ALLOCATOR_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), GST_TYPE_V4L2_ALLOCATOR, GstV4l2AllocatorClass))
+ #define GST_V4L2_ALLOCATOR(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), GST_TYPE_V4L2_ALLOCATOR, GstV4l2Allocator))
+ #define GST_V4L2_ALLOCATOR_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), GST_TYPE_V4L2_ALLOCATOR, GstV4l2AllocatorClass))
+ #define GST_V4L2_ALLOCATOR_CAST(obj) ((GstV4l2Allocator *)(obj))
+
+ #define GST_V4L2_ALLOCATOR_CAN_REQUEST(obj,type) \
+ (GST_OBJECT_FLAG_IS_SET (obj, GST_V4L2_ALLOCATOR_FLAG_ ## type ## _REQBUFS))
+ #define GST_V4L2_ALLOCATOR_CAN_ALLOCATE(obj,type) \
+ (GST_OBJECT_FLAG_IS_SET (obj, GST_V4L2_ALLOCATOR_FLAG_ ## type ## _CREATE_BUFS))
+ #define GST_V4L2_ALLOCATOR_CAN_ORPHAN_BUFS(obj) \
+ (GST_OBJECT_FLAG_IS_SET (obj, GST_V4L2_ALLOCATOR_FLAG_SUPPORTS_ORPHANED_BUFS))
+ #define GST_V4L2_ALLOCATOR_IS_ORPHANED(obj) \
+ (GST_OBJECT_FLAG_IS_SET (obj, GST_V4L2_ALLOCATOR_FLAG_ORPHANED))
+
+ #define GST_V4L2_MEMORY_QUARK gst_v4l2_memory_quark ()
+
+ typedef struct _GstV4l2Allocator GstV4l2Allocator;
+ typedef struct _GstV4l2AllocatorClass GstV4l2AllocatorClass;
+ typedef struct _GstV4l2MemoryGroup GstV4l2MemoryGroup;
+ typedef struct _GstV4l2Memory GstV4l2Memory;
+ typedef enum _GstV4l2Capabilities GstV4l2Capabilities;
+ typedef enum _GstV4l2Return GstV4l2Return;
+
+ enum _GstV4l2AllocatorFlags
+ {
+ GST_V4L2_ALLOCATOR_FLAG_MMAP_REQBUFS = (GST_ALLOCATOR_FLAG_LAST << 0),
+ GST_V4L2_ALLOCATOR_FLAG_MMAP_CREATE_BUFS = (GST_ALLOCATOR_FLAG_LAST << 1),
+ GST_V4L2_ALLOCATOR_FLAG_USERPTR_REQBUFS = (GST_ALLOCATOR_FLAG_LAST << 2),
+ GST_V4L2_ALLOCATOR_FLAG_USERPTR_CREATE_BUFS = (GST_ALLOCATOR_FLAG_LAST << 3),
+ GST_V4L2_ALLOCATOR_FLAG_DMABUF_REQBUFS = (GST_ALLOCATOR_FLAG_LAST << 4),
+ GST_V4L2_ALLOCATOR_FLAG_DMABUF_CREATE_BUFS = (GST_ALLOCATOR_FLAG_LAST << 5),
+ GST_V4L2_ALLOCATOR_FLAG_SUPPORTS_ORPHANED_BUFS = (GST_ALLOCATOR_FLAG_LAST << 6),
+ GST_V4L2_ALLOCATOR_FLAG_ORPHANED = (GST_ALLOCATOR_FLAG_LAST << 7),
+ };
+
+ enum _GstV4l2Return
+ {
+ GST_V4L2_OK = 0,
+ GST_V4L2_ERROR = -1,
+ GST_V4L2_BUSY = -2
+ };
+
+ struct _GstV4l2Memory
+ {
+ GstMemory mem;
+ gint plane;
+ GstV4l2MemoryGroup *group;
+ gpointer data;
+ gint dmafd;
+ };
+
+ struct _GstV4l2MemoryGroup
+ {
+ gint n_mem;
+ GstMemory * mem[VIDEO_MAX_PLANES];
+ gint mems_allocated;
+ struct v4l2_buffer buffer;
+ struct v4l2_plane planes[VIDEO_MAX_PLANES];
++#ifdef TIZEN_FEATURE_V4L2_TBM_SUPPORT
++ tbm_surface_h surface;
++#endif /* TIZEN_FEATURE_V4L2_TBM_SUPPORT */
+ };
+
+ struct _GstV4l2Allocator
+ {
+ GstAllocator parent;
+ GstV4l2Object *obj;
+ guint32 count;
+ guint32 memory;
+ gboolean can_allocate;
+ gboolean active;
+
+ GstV4l2MemoryGroup * groups[VIDEO_MAX_FRAME];
+ GstAtomicQueue *free_queue;
+ GstAtomicQueue *pending_queue;
++#ifdef TIZEN_FEATURE_V4L2_TBM_SUPPORT
++ tbm_bufmgr bufmgr;
++ tbm_surface_info_s s_info;
++ gint live_buffer_count;
++#endif /* TIZEN_FEATURE_V4L2_TBM_SUPPORT */
+ };
+
+ struct _GstV4l2AllocatorClass {
+ GstAllocatorClass parent_class;
+ };
+
+ GType gst_v4l2_allocator_get_type(void);
+
+ gboolean gst_is_v4l2_memory (GstMemory * mem);
+
+ GQuark gst_v4l2_memory_quark (void);
+
+ gboolean gst_v4l2_allocator_is_active (GstV4l2Allocator * allocator);
+
+ guint gst_v4l2_allocator_get_size (GstV4l2Allocator * allocator);
+
+ GstV4l2Allocator* gst_v4l2_allocator_new (GstObject *parent, GstV4l2Object * obj);
+
+ guint gst_v4l2_allocator_start (GstV4l2Allocator * allocator,
+ guint32 count, guint32 memory);
+
+ GstV4l2Return gst_v4l2_allocator_stop (GstV4l2Allocator * allocator);
+
+ gboolean gst_v4l2_allocator_orphan (GstV4l2Allocator * allocator);
+
+ GstV4l2MemoryGroup* gst_v4l2_allocator_alloc_mmap (GstV4l2Allocator * allocator);
+
+ GstV4l2MemoryGroup* gst_v4l2_allocator_alloc_dmabuf (GstV4l2Allocator * allocator,
+ GstAllocator * dmabuf_allocator);
+
+ GstV4l2MemoryGroup * gst_v4l2_allocator_alloc_dmabufin (GstV4l2Allocator * allocator);
+
+ GstV4l2MemoryGroup * gst_v4l2_allocator_alloc_userptr (GstV4l2Allocator * allocator);
+
+ gboolean gst_v4l2_allocator_import_dmabuf (GstV4l2Allocator * allocator,
+ GstV4l2MemoryGroup *group,
+ gint n_mem, GstMemory ** dma_mem);
+
+ gboolean gst_v4l2_allocator_import_userptr (GstV4l2Allocator * allocator,
+ GstV4l2MemoryGroup *group,
+ gsize img_size, int n_planes,
+ gpointer * data, gsize * size);
+
+ void gst_v4l2_allocator_flush (GstV4l2Allocator * allocator);
+
+ gboolean gst_v4l2_allocator_qbuf (GstV4l2Allocator * allocator,
+ GstV4l2MemoryGroup * group);
+
+ GstFlowReturn gst_v4l2_allocator_dqbuf (GstV4l2Allocator * allocator,
+ GstV4l2MemoryGroup ** group);
+
+ void gst_v4l2_allocator_reset_group (GstV4l2Allocator * allocator,
+ GstV4l2MemoryGroup * group);
+
+ G_END_DECLS
+
+ #endif /* __GST_V4L2_ALLOCATOR_H__ */
--- /dev/null
-
+ /* GStreamer
+ *
+ * Copyright (C) 2001-2002 Ronald Bultje <rbultje@ronald.bitfreak.net>
+ * 2006 Edgard Lima <edgard.lima@gmail.com>
+ * 2009 Texas Instruments, Inc - http://www.ti.com/
+ *
+ * gstv4l2bufferpool.c V4L2 buffer pool class
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+ #ifdef HAVE_CONFIG_H
+ # include <config.h>
+ #endif
+
+ #ifndef _GNU_SOURCE
+ # define _GNU_SOURCE /* O_CLOEXEC */
+ #endif
+ #include <fcntl.h>
+
+ #include <sys/mman.h>
+ #include <string.h>
+ #include <unistd.h>
+
+ #include "gst/video/video.h"
+ #include "gst/video/gstvideometa.h"
+ #include "gst/video/gstvideopool.h"
+ #include "gst/allocators/gstdmabuf.h"
+
+ #include <gstv4l2bufferpool.h>
+
+ #include "gstv4l2object.h"
+ #include "gst/gst-i18n-plugin.h"
+ #include <gst/glib-compat-private.h>
++#ifdef TIZEN_FEATURE_V4L2_TBM_SUPPORT
++#include <gst/allocators/gsttizenmemory.h>
++#endif /* TIZEN_FEATURE_V4L2_TBM_SUPPORT */
+
+ GST_DEBUG_CATEGORY_STATIC (v4l2bufferpool_debug);
+ GST_DEBUG_CATEGORY_STATIC (CAT_PERFORMANCE);
+ #define GST_CAT_DEFAULT v4l2bufferpool_debug
+
+ #define GST_V4L2_IMPORT_QUARK gst_v4l2_buffer_pool_import_quark ()
+
-
+ /*
+ * GstV4l2BufferPool:
+ */
+ #define gst_v4l2_buffer_pool_parent_class parent_class
+ G_DEFINE_TYPE (GstV4l2BufferPool, gst_v4l2_buffer_pool, GST_TYPE_BUFFER_POOL);
+
+ enum _GstV4l2BufferPoolAcquireFlags
+ {
+ GST_V4L2_BUFFER_POOL_ACQUIRE_FLAG_RESURRECT =
+ GST_BUFFER_POOL_ACQUIRE_FLAG_LAST,
+ GST_V4L2_BUFFER_POOL_ACQUIRE_FLAG_LAST
+ };
+
+ static void gst_v4l2_buffer_pool_release_buffer (GstBufferPool * bpool,
+ GstBuffer * buffer);
++#ifdef TIZEN_FEATURE_V4L2_TBM_SUPPORT
++typedef struct _GstV4l2TizenBuffer GstV4l2TizenBuffer;
++struct _GstV4l2TizenBuffer {
++ int index;
++ GstBuffer *gst_buffer;
++ GstBuffer *v4l2_buffer;
++ GstV4l2BufferPool *v4l2_pool;
++};
++
++static void gst_v4l2_tizen_buffer_finalize (GstV4l2TizenBuffer *tizen_buffer)
++{
++ GstV4l2BufferPool *pool = NULL;
++
++ if (!tizen_buffer) {
++ GST_ERROR ("NULL buffer");
++ return;
++ }
++
++ pool = tizen_buffer->v4l2_pool;
++
++ gst_v4l2_buffer_pool_release_buffer (GST_BUFFER_POOL_CAST (pool), tizen_buffer->v4l2_buffer);
++
++ g_mutex_lock (&pool->buffer_lock);
++
++ pool->live_buffer_count--;
++
++ GST_DEBUG_OBJECT (pool, "release buffer[%d][tizen:%p,v4l2:%p,gst:%p], live[%d]",
++ tizen_buffer->index, tizen_buffer, tizen_buffer->v4l2_buffer,
++ tizen_buffer->gst_buffer, pool->live_buffer_count);
++
++ g_cond_signal (&pool->buffer_cond);
++
++ g_mutex_unlock (&pool->buffer_lock);
++
++ gst_object_unref (pool);
++
++ g_free(tizen_buffer);
++}
++
++static GstV4l2TizenBuffer *gst_v4l2_tizen_buffer_new (GstBuffer *v4l2_buffer, int index, GstV4l2BufferPool *v4l2_pool)
++{
++ GstV4l2TizenBuffer *tizen_buffer = NULL;
++ GstMemory *memory = NULL;
++
++ tizen_buffer = g_new0 (GstV4l2TizenBuffer, 1);
++ tizen_buffer->index = index;
++ tizen_buffer->v4l2_buffer = v4l2_buffer;
++ tizen_buffer->gst_buffer = gst_buffer_new ();
++ tizen_buffer->v4l2_pool = gst_object_ref (v4l2_pool);
++
++ memory = gst_tizen_allocator_alloc_surface (v4l2_pool->tallocator,
++ &v4l2_pool->obj->info, v4l2_pool->vallocator->groups[index]->surface, (gpointer)tizen_buffer,
++ (GDestroyNotify)gst_v4l2_tizen_buffer_finalize);
++
++ gst_buffer_append_memory (tizen_buffer->gst_buffer, memory);
++ gst_buffer_set_size (tizen_buffer->gst_buffer, v4l2_pool->vallocator->s_info.size);
++
++ g_mutex_lock (&v4l2_pool->buffer_lock);
++
++ v4l2_pool->live_buffer_count++;
++
++ GST_DEBUG_OBJECT (v4l2_pool, "new buffer[tizen:%p,v4l2:%p,gst:%p], size[%d], live[%d]",
++ tizen_buffer, v4l2_buffer, tizen_buffer->gst_buffer,
++ v4l2_pool->vallocator->s_info.size, v4l2_pool->live_buffer_count);
++
++ g_mutex_unlock (&v4l2_pool->buffer_lock);
++
++ return tizen_buffer;
++}
++#endif /* TIZEN_FEATURE_V4L2_TBM_SUPPORT */
+
+ static gboolean
+ gst_v4l2_is_buffer_valid (GstBuffer * buffer, GstV4l2MemoryGroup ** out_group)
+ {
+ GstMemory *mem = gst_buffer_peek_memory (buffer, 0);
+ gboolean valid = FALSE;
+
+ if (GST_BUFFER_FLAG_IS_SET (buffer, GST_BUFFER_FLAG_TAG_MEMORY))
+ goto done;
+
+ if (gst_is_dmabuf_memory (mem))
+ mem = gst_mini_object_get_qdata (GST_MINI_OBJECT (mem),
+ GST_V4L2_MEMORY_QUARK);
+
+ if (mem && gst_is_v4l2_memory (mem)) {
+ GstV4l2Memory *vmem = (GstV4l2Memory *) mem;
+ GstV4l2MemoryGroup *group = vmem->group;
+ gint i;
+
+ if (group->n_mem != gst_buffer_n_memory (buffer))
+ goto done;
+
+ for (i = 0; i < group->n_mem; i++) {
+ if (group->mem[i] != gst_buffer_peek_memory (buffer, i))
+ goto done;
+
+ if (!gst_memory_is_writable (group->mem[i]))
+ goto done;
+ }
+
+ valid = TRUE;
+ if (out_group)
+ *out_group = group;
+ }
+
+ done:
+ return valid;
+ }
+
+ static GstFlowReturn
+ gst_v4l2_buffer_pool_copy_buffer (GstV4l2BufferPool * pool, GstBuffer * dest,
+ GstBuffer * src)
+ {
+ const GstVideoFormatInfo *finfo = pool->caps_info.finfo;
+
+ GST_LOG_OBJECT (pool, "copying buffer");
+
+ if (finfo && (finfo->format != GST_VIDEO_FORMAT_UNKNOWN &&
+ finfo->format != GST_VIDEO_FORMAT_ENCODED)) {
+ GstVideoFrame src_frame, dest_frame;
+
+ GST_DEBUG_OBJECT (pool, "copy video frame");
+
+ /* we have raw video, use videoframe copy to get strides right */
+ if (!gst_video_frame_map (&src_frame, &pool->caps_info, src, GST_MAP_READ))
+ goto invalid_buffer;
+
+ if (!gst_video_frame_map (&dest_frame, &pool->caps_info, dest,
+ GST_MAP_WRITE)) {
+ gst_video_frame_unmap (&src_frame);
+ goto invalid_buffer;
+ }
+
+ gst_video_frame_copy (&dest_frame, &src_frame);
+
+ gst_video_frame_unmap (&src_frame);
+ gst_video_frame_unmap (&dest_frame);
+ } else {
+ GstMapInfo map;
+
+ GST_DEBUG_OBJECT (pool, "copy raw bytes");
+
+ if (!gst_buffer_map (src, &map, GST_MAP_READ))
+ goto invalid_buffer;
+
+ gst_buffer_fill (dest, 0, map.data, gst_buffer_get_size (src));
+
+ gst_buffer_unmap (src, &map);
+ gst_buffer_resize (dest, 0, gst_buffer_get_size (src));
+ }
+
+ gst_buffer_copy_into (dest, src,
+ GST_BUFFER_COPY_FLAGS | GST_BUFFER_COPY_TIMESTAMPS, 0, -1);
+
+ GST_CAT_LOG_OBJECT (CAT_PERFORMANCE, pool, "slow copy into buffer %p", dest);
+
+ return GST_FLOW_OK;
+
+ invalid_buffer:
+ {
+ GST_ERROR_OBJECT (pool, "could not map buffer");
+ return GST_FLOW_ERROR;
+ }
+ }
+
+ struct UserPtrData
+ {
+ GstBuffer *buffer;
+ gboolean is_frame;
+ GstVideoFrame frame;
+ GstMapInfo map;
+ };
+
+ static GQuark
+ gst_v4l2_buffer_pool_import_quark (void)
+ {
+ static GQuark quark = 0;
+
+ if (quark == 0)
+ quark = g_quark_from_string ("GstV4l2BufferPoolUsePtrData");
+
+ return quark;
+ }
+
+ static void
+ _unmap_userptr_frame (struct UserPtrData *data)
+ {
+ if (data->is_frame)
+ gst_video_frame_unmap (&data->frame);
+ else
+ gst_buffer_unmap (data->buffer, &data->map);
+
+ if (data->buffer)
+ gst_buffer_unref (data->buffer);
+
+ g_slice_free (struct UserPtrData, data);
+ }
+
+ static GstFlowReturn
+ gst_v4l2_buffer_pool_import_userptr (GstV4l2BufferPool * pool,
+ GstBuffer * dest, GstBuffer * src)
+ {
+ GstFlowReturn ret = GST_FLOW_OK;
+ GstV4l2MemoryGroup *group = NULL;
+ GstMapFlags flags;
+ const GstVideoFormatInfo *finfo = pool->caps_info.finfo;
+ struct UserPtrData *data = NULL;
+
+ GST_LOG_OBJECT (pool, "importing userptr");
+
+ /* get the group */
+ if (!gst_v4l2_is_buffer_valid (dest, &group))
+ goto not_our_buffer;
+
+ if (V4L2_TYPE_IS_OUTPUT (pool->obj->type))
+ flags = GST_MAP_READ;
+ else
+ flags = GST_MAP_WRITE;
+
+ data = g_slice_new0 (struct UserPtrData);
+
+ if (finfo && (finfo->format != GST_VIDEO_FORMAT_UNKNOWN &&
+ finfo->format != GST_VIDEO_FORMAT_ENCODED)) {
+ gsize size[GST_VIDEO_MAX_PLANES] = { 0, };
+ gint i;
+
+ data->is_frame = TRUE;
+
+ if (!gst_video_frame_map (&data->frame, &pool->caps_info, src, flags))
+ goto invalid_buffer;
+
+ for (i = 0; i < GST_VIDEO_FORMAT_INFO_N_PLANES (finfo); i++) {
+ if (GST_VIDEO_FORMAT_INFO_IS_TILED (finfo)) {
+ gint tinfo = GST_VIDEO_FRAME_PLANE_STRIDE (&data->frame, i);
+ gint pstride;
+ guint pheight;
+
+ pstride = GST_VIDEO_TILE_X_TILES (tinfo) <<
+ GST_VIDEO_FORMAT_INFO_TILE_WS (finfo);
+
+ pheight = GST_VIDEO_TILE_Y_TILES (tinfo) <<
+ GST_VIDEO_FORMAT_INFO_TILE_HS (finfo);
+
+ size[i] = pstride * pheight;
+ } else {
+ size[i] = GST_VIDEO_FRAME_PLANE_STRIDE (&data->frame, i) *
+ GST_VIDEO_FRAME_COMP_HEIGHT (&data->frame, i);
+ }
+ }
+
+ /* In the single planar API, planes must be contiguous in memory and
+ * therefore they must have expected size. ie: no padding.
+ * To check these conditions, we check that plane 'i' start address
+ * + plane 'i' size equals to plane 'i+1' start address */
+ if (!V4L2_TYPE_IS_MULTIPLANAR (pool->obj->type)) {
+ for (i = 0; i < (GST_VIDEO_FORMAT_INFO_N_PLANES (finfo) - 1); i++) {
+ const struct v4l2_pix_format *pix_fmt = &pool->obj->format.fmt.pix;
+ gpointer tmp;
+ gint estride = gst_v4l2_object_extrapolate_stride (finfo, i,
+ pix_fmt->bytesperline);
+ guint eheight = GST_VIDEO_FORMAT_INFO_SCALE_HEIGHT (finfo, i,
+ pix_fmt->height);
+
+ tmp = ((guint8 *) data->frame.data[i]) + estride * eheight;
+ if (tmp != data->frame.data[i + 1])
+ goto non_contiguous_mem;
+ }
+ }
+
+ if (!gst_v4l2_allocator_import_userptr (pool->vallocator, group,
+ data->frame.info.size, finfo->n_planes, data->frame.data, size))
+ goto import_failed;
+ } else {
+ gpointer ptr[1];
+ gsize size[1];
+
+ data->is_frame = FALSE;
+
+ if (!gst_buffer_map (src, &data->map, flags))
+ goto invalid_buffer;
+
+ ptr[0] = data->map.data;
+ size[0] = data->map.size;
+
+ if (!gst_v4l2_allocator_import_userptr (pool->vallocator, group,
+ data->map.size, 1, ptr, size))
+ goto import_failed;
+ }
+
+ data->buffer = gst_buffer_ref (src);
+
+ gst_mini_object_set_qdata (GST_MINI_OBJECT (dest), GST_V4L2_IMPORT_QUARK,
+ data, (GDestroyNotify) _unmap_userptr_frame);
+
+ gst_buffer_copy_into (dest, src,
+ GST_BUFFER_COPY_FLAGS | GST_BUFFER_COPY_TIMESTAMPS, 0, -1);
+
+ return ret;
+
+ not_our_buffer:
+ {
+ GST_ERROR_OBJECT (pool, "destination buffer invalid or not from our pool");
+ return GST_FLOW_ERROR;
+ }
+ invalid_buffer:
+ {
+ GST_ERROR_OBJECT (pool, "could not map buffer");
+ g_slice_free (struct UserPtrData, data);
+ return GST_FLOW_ERROR;
+ }
+ non_contiguous_mem:
+ {
+ GST_ERROR_OBJECT (pool, "memory is not contiguous or plane size mismatch");
+ _unmap_userptr_frame (data);
+ return GST_FLOW_ERROR;
+ }
+ import_failed:
+ {
+ GST_ERROR_OBJECT (pool, "failed to import data");
+ _unmap_userptr_frame (data);
+ return GST_FLOW_ERROR;
+ }
+ }
+
+ static GstFlowReturn
+ gst_v4l2_buffer_pool_import_dmabuf (GstV4l2BufferPool * pool,
+ GstBuffer * dest, GstBuffer * src)
+ {
+ GstV4l2MemoryGroup *group = NULL;
+ GstMemory *dma_mem[GST_VIDEO_MAX_PLANES] = { 0 };
+ guint n_mem = gst_buffer_n_memory (src);
+ gint i;
+
+ GST_LOG_OBJECT (pool, "importing dmabuf");
+
+ if (!gst_v4l2_is_buffer_valid (dest, &group))
+ goto not_our_buffer;
+
+ if (n_mem > GST_VIDEO_MAX_PLANES)
+ goto too_many_mems;
+
+ for (i = 0; i < n_mem; i++)
+ dma_mem[i] = gst_buffer_peek_memory (src, i);
+
+ if (!gst_v4l2_allocator_import_dmabuf (pool->vallocator, group, n_mem,
+ dma_mem))
+ goto import_failed;
+
+ gst_mini_object_set_qdata (GST_MINI_OBJECT (dest), GST_V4L2_IMPORT_QUARK,
+ gst_buffer_ref (src), (GDestroyNotify) gst_buffer_unref);
+
+ gst_buffer_copy_into (dest, src,
+ GST_BUFFER_COPY_FLAGS | GST_BUFFER_COPY_TIMESTAMPS, 0, -1);
+
+ return GST_FLOW_OK;
+
+ not_our_buffer:
+ {
+ GST_ERROR_OBJECT (pool, "destination buffer invalid or not from our pool");
+ return GST_FLOW_ERROR;
+ }
+ too_many_mems:
+ {
+ GST_ERROR_OBJECT (pool, "could not map buffer");
+ return GST_FLOW_ERROR;
+ }
+ import_failed:
+ {
+ GST_ERROR_OBJECT (pool, "failed to import dmabuf");
+ return GST_FLOW_ERROR;
+ }
+ }
+
+ static GstFlowReturn
+ gst_v4l2_buffer_pool_prepare_buffer (GstV4l2BufferPool * pool,
+ GstBuffer * dest, GstBuffer * src)
+ {
+ GstFlowReturn ret = GST_FLOW_OK;
+ gboolean own_src = FALSE;
+
+ if (src == NULL) {
+ if (pool->other_pool == NULL) {
+ GST_ERROR_OBJECT (pool, "can't prepare buffer, source buffer missing");
+ return GST_FLOW_ERROR;
+ }
+
+ ret = gst_buffer_pool_acquire_buffer (pool->other_pool, &src, NULL);
+ if (ret != GST_FLOW_OK) {
+ GST_ERROR_OBJECT (pool, "failed to acquire buffer from downstream pool");
+ goto done;
+ }
+
+ own_src = TRUE;
+ }
+
+ switch (pool->obj->mode) {
+ case GST_V4L2_IO_MMAP:
+ case GST_V4L2_IO_DMABUF:
+ ret = gst_v4l2_buffer_pool_copy_buffer (pool, dest, src);
+ break;
+ case GST_V4L2_IO_USERPTR:
+ ret = gst_v4l2_buffer_pool_import_userptr (pool, dest, src);
+ break;
+ case GST_V4L2_IO_DMABUF_IMPORT:
+ ret = gst_v4l2_buffer_pool_import_dmabuf (pool, dest, src);
+ break;
+ default:
+ break;
+ }
+
+ if (own_src)
+ gst_buffer_unref (src);
+
+ done:
+ return ret;
+ }
+
+ static GstFlowReturn
+ gst_v4l2_buffer_pool_alloc_buffer (GstBufferPool * bpool, GstBuffer ** buffer,
+ GstBufferPoolAcquireParams * params)
+ {
+ GstV4l2BufferPool *pool = GST_V4L2_BUFFER_POOL (bpool);
+ GstV4l2MemoryGroup *group = NULL;
+ GstBuffer *newbuf = NULL;
+ GstV4l2Object *obj;
+ GstVideoInfo *info;
+
+ obj = pool->obj;
+ info = &obj->info;
+
+ switch (obj->mode) {
+ case GST_V4L2_IO_RW:
+ newbuf =
+ gst_buffer_new_allocate (pool->allocator, pool->size, &pool->params);
+ break;
+ case GST_V4L2_IO_MMAP:
+ group = gst_v4l2_allocator_alloc_mmap (pool->vallocator);
+ break;
+ case GST_V4L2_IO_DMABUF:
+ group = gst_v4l2_allocator_alloc_dmabuf (pool->vallocator,
+ pool->allocator);
+ break;
+ case GST_V4L2_IO_USERPTR:
+ group = gst_v4l2_allocator_alloc_userptr (pool->vallocator);
+ break;
+ case GST_V4L2_IO_DMABUF_IMPORT:
+ group = gst_v4l2_allocator_alloc_dmabufin (pool->vallocator);
+ break;
+ default:
+ newbuf = NULL;
+ g_assert_not_reached ();
+ break;
+ }
+
+ if (group != NULL) {
+ gint i;
+ newbuf = gst_buffer_new ();
+
+ for (i = 0; i < group->n_mem; i++)
+ gst_buffer_append_memory (newbuf, group->mem[i]);
+ } else if (newbuf == NULL) {
+ goto allocation_failed;
+ }
+
+ /* add metadata to raw video buffers */
+ if (pool->add_videometa)
+ gst_buffer_add_video_meta_full (newbuf, GST_VIDEO_FRAME_FLAG_NONE,
+ GST_VIDEO_INFO_FORMAT (info), GST_VIDEO_INFO_WIDTH (info),
+ GST_VIDEO_INFO_HEIGHT (info), GST_VIDEO_INFO_N_PLANES (info),
+ info->offset, info->stride);
+
+ *buffer = newbuf;
+
+ return GST_FLOW_OK;
+
+ /* ERRORS */
+ allocation_failed:
+ {
+ GST_ERROR_OBJECT (pool, "failed to allocate buffer");
+ return GST_FLOW_ERROR;
+ }
+ }
+
+ static gboolean
+ gst_v4l2_buffer_pool_set_config (GstBufferPool * bpool, GstStructure * config)
+ {
+ GstV4l2BufferPool *pool = GST_V4L2_BUFFER_POOL (bpool);
+ GstV4l2Object *obj = pool->obj;
+ GstCaps *caps;
+ guint size, min_buffers, max_buffers;
+ GstAllocator *allocator;
+ GstAllocationParams params;
+ gboolean can_allocate = FALSE;
+ gboolean updated = FALSE;
+ gboolean ret;
+
+ pool->add_videometa =
+ gst_buffer_pool_config_has_option (config,
+ GST_BUFFER_POOL_OPTION_VIDEO_META);
+
+ /* parse the config and keep around */
+ if (!gst_buffer_pool_config_get_params (config, &caps, &size, &min_buffers,
+ &max_buffers))
+ goto wrong_config;
+
+ if (!gst_buffer_pool_config_get_allocator (config, &allocator, ¶ms))
+ goto wrong_config;
+
+ GST_DEBUG_OBJECT (pool, "config %" GST_PTR_FORMAT, config);
+
+ if (pool->allocator)
+ gst_object_unref (pool->allocator);
+ pool->allocator = NULL;
+
+ switch (obj->mode) {
+ case GST_V4L2_IO_DMABUF:
+ pool->allocator = gst_dmabuf_allocator_new ();
+ can_allocate = GST_V4L2_ALLOCATOR_CAN_ALLOCATE (pool->vallocator, MMAP);
+ break;
+ case GST_V4L2_IO_MMAP:
+ can_allocate = GST_V4L2_ALLOCATOR_CAN_ALLOCATE (pool->vallocator, MMAP);
+ break;
+ case GST_V4L2_IO_USERPTR:
+ can_allocate =
+ GST_V4L2_ALLOCATOR_CAN_ALLOCATE (pool->vallocator, USERPTR);
+ break;
+ case GST_V4L2_IO_DMABUF_IMPORT:
+ can_allocate = GST_V4L2_ALLOCATOR_CAN_ALLOCATE (pool->vallocator, DMABUF);
+ break;
+ case GST_V4L2_IO_RW:
+ if (allocator)
+ pool->allocator = g_object_ref (allocator);
+ pool->params = params;
+ /* No need to change the configuration */
+ goto done;
+ break;
+ default:
+ g_assert_not_reached ();
+ break;
+ }
+
+ /* libv4l2 conversion code does not handle CREATE_BUFS, and may lead to
+ * instability and crash, disable it for now */
+ if (can_allocate && obj->fmtdesc->flags & V4L2_FMT_FLAG_EMULATED) {
+ GST_WARNING_OBJECT (pool,
+ "libv4l2 converter detected, disabling CREATE_BUFS");
+ can_allocate = FALSE;
+ GST_OBJECT_FLAG_UNSET (pool->vallocator,
+ GST_V4L2_ALLOCATOR_FLAG_MMAP_CREATE_BUFS
+ | GST_V4L2_ALLOCATOR_FLAG_USERPTR_CREATE_BUFS
+ | GST_V4L2_ALLOCATOR_FLAG_DMABUF_CREATE_BUFS);
+ }
+
+ if (min_buffers < GST_V4L2_MIN_BUFFERS (obj)) {
+ updated = TRUE;
+ min_buffers = GST_V4L2_MIN_BUFFERS (obj);
+ GST_INFO_OBJECT (pool, "increasing minimum buffers to %u", min_buffers);
+ }
+
+ /* respect driver requirements */
+ if (min_buffers < obj->min_buffers) {
+ updated = TRUE;
+ min_buffers = obj->min_buffers;
+ GST_INFO_OBJECT (pool, "increasing minimum buffers to %u", min_buffers);
+ }
+
+ if (max_buffers > VIDEO_MAX_FRAME || max_buffers == 0) {
+ updated = TRUE;
+ max_buffers = VIDEO_MAX_FRAME;
+ GST_INFO_OBJECT (pool, "reducing maximum buffers to %u", max_buffers);
+ }
+
+ if (min_buffers > max_buffers) {
+ updated = TRUE;
+ min_buffers = max_buffers;
+ GST_INFO_OBJECT (pool, "reducing minimum buffers to %u", min_buffers);
+ } else if (min_buffers != max_buffers) {
+ if (!can_allocate) {
+ updated = TRUE;
+ max_buffers = min_buffers;
+ GST_INFO_OBJECT (pool, "can't allocate, setting maximum to minimum");
+ }
+ }
+
+ if (!pool->add_videometa && obj->need_video_meta) {
+ GST_INFO_OBJECT (pool, "adding needed video meta");
+ updated = TRUE;
+ gst_buffer_pool_config_add_option (config,
+ GST_BUFFER_POOL_OPTION_VIDEO_META);
+ }
+
+ /* Always update the config to ensure the configured size matches */
+ gst_buffer_pool_config_set_params (config, caps, obj->info.size, min_buffers,
+ max_buffers);
+
+ /* keep a GstVideoInfo with defaults for the when we need to copy */
+ gst_video_info_from_caps (&pool->caps_info, caps);
+
+ done:
+ ret = GST_BUFFER_POOL_CLASS (parent_class)->set_config (bpool, config);
+
+ /* If anything was changed documentation recommend to return FALSE */
+ return !updated && ret;
+
+ /* ERRORS */
+ wrong_config:
+ {
+ GST_ERROR_OBJECT (pool, "invalid config %" GST_PTR_FORMAT, config);
+ return FALSE;
+ }
+ }
+
+ static GstFlowReturn
+ gst_v4l2_buffer_pool_resurrect_buffer (GstV4l2BufferPool * pool)
+ {
+ GstBufferPoolAcquireParams params = { 0 };
+ GstBuffer *buffer = NULL;
+ GstFlowReturn ret;
+
+ GST_DEBUG_OBJECT (pool, "A buffer was lost, reallocating it");
+
+ /* block recursive calls to this function */
+ g_signal_handler_block (pool->vallocator, pool->group_released_handler);
+
+ params.flags =
+ (GstBufferPoolAcquireFlags) GST_V4L2_BUFFER_POOL_ACQUIRE_FLAG_RESURRECT |
+ GST_BUFFER_POOL_ACQUIRE_FLAG_DONTWAIT;
+ ret =
+ gst_buffer_pool_acquire_buffer (GST_BUFFER_POOL (pool), &buffer, ¶ms);
+
+ if (ret == GST_FLOW_OK)
+ gst_buffer_unref (buffer);
+
+ g_signal_handler_unblock (pool->vallocator, pool->group_released_handler);
+
+ return ret;
+ }
+
+ static gboolean
+ gst_v4l2_buffer_pool_streamon (GstV4l2BufferPool * pool)
+ {
+ GstV4l2Object *obj = pool->obj;
+
+ if (pool->streaming)
+ return TRUE;
+
+ switch (obj->mode) {
+ case GST_V4L2_IO_MMAP:
+ case GST_V4L2_IO_USERPTR:
+ case GST_V4L2_IO_DMABUF:
+ case GST_V4L2_IO_DMABUF_IMPORT:
+ if (!V4L2_TYPE_IS_OUTPUT (pool->obj->type)) {
+ guint num_queued;
+ guint i, n = 0;
+
+ num_queued = g_atomic_int_get (&pool->num_queued);
+ if (num_queued < pool->num_allocated)
+ n = pool->num_allocated - num_queued;
+
+ /* For captures, we need to enqueue buffers before we start streaming,
+ * so the driver don't underflow immediately. As we have put then back
+ * into the base class queue, resurrect them, then releasing will queue
+ * them back. */
+ for (i = 0; i < n; i++)
+ gst_v4l2_buffer_pool_resurrect_buffer (pool);
+ }
+
+ if (obj->ioctl (pool->video_fd, VIDIOC_STREAMON, &obj->type) < 0)
+ goto streamon_failed;
+
+ pool->streaming = TRUE;
+
+ GST_DEBUG_OBJECT (pool, "Started streaming");
+ break;
+ default:
+ break;
+ }
+
+ return TRUE;
+
+ streamon_failed:
+ {
+ GST_ERROR_OBJECT (pool, "error with STREAMON %d (%s)", errno,
+ g_strerror (errno));
+ return FALSE;
+ }
+ }
+
+ /* Call with streamlock held, or when streaming threads are down */
+ static void
+ gst_v4l2_buffer_pool_streamoff (GstV4l2BufferPool * pool)
+ {
+ GstBufferPoolClass *pclass = GST_BUFFER_POOL_CLASS (parent_class);
+ GstV4l2Object *obj = pool->obj;
+ gint i;
++#ifdef TIZEN_FEATURE_V4L2_TBM_SUPPORT
++ gint64 end_time = 0;
++#endif /* TIZEN_FEATURE_V4L2_TBM_SUPPORT */
+
+ if (!pool->streaming)
+ return;
+
++#ifdef TIZEN_FEATURE_V4L2_TBM_SUPPORT
++ if (obj->tbm_output && !V4L2_TYPE_IS_OUTPUT(pool->obj->type)) {
++ g_mutex_lock (&pool->buffer_lock);
++
++ GST_INFO_OBJECT (pool, "live buffer[%d]", pool->live_buffer_count);
++
++ if (pool->live_buffer_count > 0) {
++ end_time = g_get_monotonic_time () + G_TIME_SPAN_SECOND;
++
++ do {
++ GST_WARNING_OBJECT (pool, "wait for live buffer[%d]", pool->live_buffer_count);
++
++ if (!g_cond_wait_until (&pool->buffer_cond, &pool->buffer_lock, end_time)) {
++ GST_ERROR_OBJECT (pool, "failed to wait live buffer[%d]", pool->live_buffer_count);
++ break;
++ }
++
++ GST_WARNING_OBJECT (pool, "signal received, check again : live count[%d]",
++ pool->live_buffer_count);
++ } while (pool->live_buffer_count > 0);
++ }
++
++ g_mutex_unlock (&pool->buffer_lock);
++ }
++#endif /* TIZEN_FEATURE_V4L2_TBM_SUPPORT */
+ switch (obj->mode) {
+ case GST_V4L2_IO_MMAP:
+ case GST_V4L2_IO_USERPTR:
+ case GST_V4L2_IO_DMABUF:
+ case GST_V4L2_IO_DMABUF_IMPORT:
+
+ if (obj->ioctl (pool->video_fd, VIDIOC_STREAMOFF, &obj->type) < 0)
+ GST_WARNING_OBJECT (pool, "STREAMOFF failed with errno %d (%s)",
+ errno, g_strerror (errno));
+
+ pool->streaming = FALSE;
+
+ GST_DEBUG_OBJECT (pool, "Stopped streaming");
+
+ if (pool->vallocator)
+ gst_v4l2_allocator_flush (pool->vallocator);
+ break;
+ default:
+ break;
+ }
+
+ for (i = 0; i < VIDEO_MAX_FRAME; i++) {
+ if (pool->buffers[i]) {
+ GstBuffer *buffer = pool->buffers[i];
+ GstBufferPool *bpool = GST_BUFFER_POOL (pool);
+
+ pool->buffers[i] = NULL;
+
+ if (V4L2_TYPE_IS_OUTPUT (pool->obj->type))
+ gst_v4l2_buffer_pool_release_buffer (bpool, buffer);
+ else /* Don't re-enqueue capture buffer on stop */
+ pclass->release_buffer (bpool, buffer);
+
+ g_atomic_int_add (&pool->num_queued, -1);
+ }
+ }
+ }
+
+ static gboolean
+ gst_v4l2_buffer_pool_start (GstBufferPool * bpool)
+ {
+ GstV4l2BufferPool *pool = GST_V4L2_BUFFER_POOL (bpool);
+ GstBufferPoolClass *pclass = GST_BUFFER_POOL_CLASS (parent_class);
+ GstV4l2Object *obj = pool->obj;
+ GstStructure *config;
+ GstCaps *caps;
+ guint size, min_buffers, max_buffers;
+ guint max_latency, min_latency, copy_threshold = 0;
+ gboolean can_allocate = FALSE, ret = TRUE;
+
+ GST_DEBUG_OBJECT (pool, "activating pool");
+
+ if (pool->other_pool) {
+ GstBuffer *buffer;
+
+ if (!gst_buffer_pool_set_active (pool->other_pool, TRUE))
+ goto other_pool_failed;
+
+ if (gst_buffer_pool_acquire_buffer (pool->other_pool, &buffer, NULL) !=
+ GST_FLOW_OK)
+ goto other_pool_failed;
+
+ if (!gst_v4l2_object_try_import (obj, buffer)) {
+ gst_buffer_unref (buffer);
+ goto cannot_import;
+ }
+ gst_buffer_unref (buffer);
+ }
+
+ config = gst_buffer_pool_get_config (bpool);
+ if (!gst_buffer_pool_config_get_params (config, &caps, &size, &min_buffers,
+ &max_buffers))
+ goto wrong_config;
+
+ min_latency = MAX (GST_V4L2_MIN_BUFFERS (obj), obj->min_buffers);
+
+ switch (obj->mode) {
+ case GST_V4L2_IO_RW:
+ can_allocate = TRUE;
+ #ifdef HAVE_LIBV4L2
+ /* This workaround a unfixable bug in libv4l2 when RW is emulated on top
+ * of MMAP. In this case, the first read initialize the queues, but the
+ * poll before that will always fail. Doing an empty read, forces the
+ * queue to be initialized now. We only do this if we have a streaming
+ * driver. */
+ if (obj->device_caps & V4L2_CAP_STREAMING)
+ obj->read (obj->video_fd, NULL, 0);
+ #endif
+ break;
+ case GST_V4L2_IO_DMABUF:
+ case GST_V4L2_IO_MMAP:
+ {
+ guint count;
+
+ can_allocate = GST_V4L2_ALLOCATOR_CAN_ALLOCATE (pool->vallocator, MMAP);
+
+ /* first, lets request buffers, and see how many we can get: */
+ GST_DEBUG_OBJECT (pool, "requesting %d MMAP buffers", min_buffers);
+
+ count = gst_v4l2_allocator_start (pool->vallocator, min_buffers,
+ V4L2_MEMORY_MMAP);
+ pool->num_allocated = count;
+
+ if (count < GST_V4L2_MIN_BUFFERS (obj)) {
+ min_buffers = count;
+ goto no_buffers;
+ }
+
+ /* V4L2 buffer pool are often very limited in the amount of buffers it
+ * can offer. The copy_threshold will workaround this limitation by
+ * falling back to copy if the pipeline needed more buffers. This also
+ * prevent having to do REQBUFS(N)/REQBUFS(0) every time configure is
+ * called. */
+ if (count != min_buffers || pool->enable_copy_threshold) {
+ GST_WARNING_OBJECT (pool,
+ "Uncertain or not enough buffers, enabling copy threshold");
+ min_buffers = count;
+ copy_threshold = min_latency;
+ }
+
+ break;
+ }
+ case GST_V4L2_IO_USERPTR:
+ {
+ guint count;
+
+ can_allocate =
+ GST_V4L2_ALLOCATOR_CAN_ALLOCATE (pool->vallocator, USERPTR);
+
+ GST_DEBUG_OBJECT (pool, "requesting %d USERPTR buffers", min_buffers);
+
+ count = gst_v4l2_allocator_start (pool->vallocator, min_buffers,
+ V4L2_MEMORY_USERPTR);
+ pool->num_allocated = count;
+
+ /* There is no rational to not get what we asked */
+ if (count < min_buffers) {
+ min_buffers = count;
+ goto no_buffers;
+ }
+
+ min_buffers = count;
+ break;
+ }
+ case GST_V4L2_IO_DMABUF_IMPORT:
+ {
+ guint count;
+
+ can_allocate = GST_V4L2_ALLOCATOR_CAN_ALLOCATE (pool->vallocator, DMABUF);
+
+ GST_DEBUG_OBJECT (pool, "requesting %d DMABUF buffers", min_buffers);
+
+ count = gst_v4l2_allocator_start (pool->vallocator, min_buffers,
+ V4L2_MEMORY_DMABUF);
+ pool->num_allocated = count;
+
+ /* There is no rational to not get what we asked */
+ if (count < min_buffers) {
+ min_buffers = count;
+ goto no_buffers;
+ }
+
+ min_buffers = count;
+ break;
+ }
+ default:
+ min_buffers = 0;
+ copy_threshold = 0;
+ g_assert_not_reached ();
+ break;
+ }
+
+ if (can_allocate)
+ max_latency = max_buffers;
+ else
+ max_latency = min_buffers;
+
+ pool->size = size;
+ pool->copy_threshold = copy_threshold;
+ pool->max_latency = max_latency;
+ pool->min_latency = min_latency;
+ pool->num_queued = 0;
+
+ if (max_buffers != 0 && max_buffers < min_buffers)
+ max_buffers = min_buffers;
+
+ gst_buffer_pool_config_set_params (config, caps, size, min_buffers,
+ max_buffers);
+ pclass->set_config (bpool, config);
+ gst_structure_free (config);
+
+ /* now, allocate the buffers: */
+ if (!pclass->start (bpool))
+ goto start_failed;
+
+ if (!V4L2_TYPE_IS_OUTPUT (obj->type)) {
+ if (g_atomic_int_get (&pool->num_queued) < pool->num_allocated)
+ goto queue_failed;
+
+ pool->group_released_handler =
+ g_signal_connect_swapped (pool->vallocator, "group-released",
+ G_CALLBACK (gst_v4l2_buffer_pool_resurrect_buffer), pool);
+ ret = gst_v4l2_buffer_pool_streamon (pool);
+ }
+
+ return ret;
+
+ /* ERRORS */
+ wrong_config:
+ {
+ GST_ERROR_OBJECT (pool, "invalid config %" GST_PTR_FORMAT, config);
+ gst_structure_free (config);
+ return FALSE;
+ }
+ no_buffers:
+ {
+ GST_ERROR_OBJECT (pool,
+ "we received %d buffer from device '%s', we want at least %d",
+ min_buffers, obj->videodev, GST_V4L2_MIN_BUFFERS (obj));
+ gst_structure_free (config);
+ return FALSE;
+ }
+ start_failed:
+ {
+ GST_ERROR_OBJECT (pool, "allocate failed");
+ return FALSE;
+ }
+ other_pool_failed:
+ {
+ GST_ERROR_OBJECT (pool, "failed to activate the other pool %"
+ GST_PTR_FORMAT, pool->other_pool);
+ return FALSE;
+ }
+ queue_failed:
+ {
+ GST_ERROR_OBJECT (pool, "failed to queue buffers into the capture queue");
+ return FALSE;
+ }
+ cannot_import:
+ {
+ GST_ERROR_OBJECT (pool, "cannot import buffers from downstream pool");
+ return FALSE;
+ }
+ }
+
+ static gboolean
+ gst_v4l2_buffer_pool_vallocator_stop (GstV4l2BufferPool * pool)
+ {
+ GstV4l2Return vret;
+
+ if (!pool->vallocator)
+ return TRUE;
+
+ vret = gst_v4l2_allocator_stop (pool->vallocator);
+
+ if (vret == GST_V4L2_BUSY)
+ GST_WARNING_OBJECT (pool, "some buffers are still outstanding");
+
+ return (vret == GST_V4L2_OK);
+ }
+
+ static gboolean
+ gst_v4l2_buffer_pool_stop (GstBufferPool * bpool)
+ {
+ GstV4l2BufferPool *pool = GST_V4L2_BUFFER_POOL (bpool);
+ gboolean ret;
+
+ GST_DEBUG_OBJECT (pool, "stopping pool");
+
+ if (pool->group_released_handler > 0) {
+ g_signal_handler_disconnect (pool->vallocator,
+ pool->group_released_handler);
+ pool->group_released_handler = 0;
+ }
+
+ if (pool->other_pool) {
+ gst_buffer_pool_set_active (pool->other_pool, FALSE);
+ gst_object_unref (pool->other_pool);
+ pool->other_pool = NULL;
+ }
+
+ if (!pool->orphaned)
+ gst_v4l2_buffer_pool_streamoff (pool);
+
+ ret = GST_BUFFER_POOL_CLASS (parent_class)->stop (bpool);
+
+ if (ret)
+ ret = gst_v4l2_buffer_pool_vallocator_stop (pool);
+
+ return ret;
+ }
+
+ gboolean
+ gst_v4l2_buffer_pool_orphan (GstBufferPool ** bpool)
+ {
+ GstV4l2BufferPool *pool = GST_V4L2_BUFFER_POOL (*bpool);
+ gboolean ret;
+
+ g_return_val_if_fail (pool->orphaned == FALSE, FALSE);
+
+ if (!GST_V4L2_ALLOCATOR_CAN_ORPHAN_BUFS (pool->vallocator))
+ return FALSE;
+
+ if (g_getenv ("GST_V4L2_FORCE_DRAIN"))
+ return FALSE;
+
+ GST_DEBUG_OBJECT (pool, "orphaning pool");
+ gst_buffer_pool_set_active (*bpool, FALSE);
+
+ /* We lock to prevent racing with a return buffer in QBuf, and has a
+ * workaround of not being able to use the pool hidden activation lock. */
+ GST_OBJECT_LOCK (pool);
+
+ gst_v4l2_buffer_pool_streamoff (pool);
+ ret = gst_v4l2_allocator_orphan (pool->vallocator);
+ if (ret)
+ pool->orphaned = TRUE;
+
+ GST_OBJECT_UNLOCK (pool);
+
+ if (ret) {
+ gst_object_unref (*bpool);
+ *bpool = NULL;
+ }
+
+ return ret;
+ }
+
+ static void
+ gst_v4l2_buffer_pool_flush_start (GstBufferPool * bpool)
+ {
+ GstV4l2BufferPool *pool = GST_V4L2_BUFFER_POOL (bpool);
+
+ GST_DEBUG_OBJECT (pool, "start flushing");
+
+ gst_poll_set_flushing (pool->poll, TRUE);
+
+ GST_OBJECT_LOCK (pool);
+ pool->empty = FALSE;
+ g_cond_broadcast (&pool->empty_cond);
+ GST_OBJECT_UNLOCK (pool);
+
+ if (pool->other_pool && gst_buffer_pool_is_active (pool->other_pool))
+ gst_buffer_pool_set_flushing (pool->other_pool, TRUE);
+ }
+
+ static void
+ gst_v4l2_buffer_pool_flush_stop (GstBufferPool * bpool)
+ {
+ GstV4l2BufferPool *pool = GST_V4L2_BUFFER_POOL (bpool);
+
+ GST_DEBUG_OBJECT (pool, "stop flushing");
+
+ if (pool->other_pool && gst_buffer_pool_is_active (pool->other_pool))
+ gst_buffer_pool_set_flushing (pool->other_pool, FALSE);
+
+ gst_poll_set_flushing (pool->poll, FALSE);
+ }
+
+ static GstFlowReturn
+ gst_v4l2_buffer_pool_poll (GstV4l2BufferPool * pool, gboolean wait)
+ {
+ gint ret;
+ GstClockTime timeout;
+
+ if (wait)
+ timeout = GST_CLOCK_TIME_NONE;
+ else
+ timeout = 0;
+
+ /* In RW mode there is no queue, hence no need to wait while the queue is
+ * empty */
+ if (pool->obj->mode != GST_V4L2_IO_RW) {
+ GST_OBJECT_LOCK (pool);
+
+ if (!wait && pool->empty) {
+ GST_OBJECT_UNLOCK (pool);
+ goto no_buffers;
+ }
+
+ while (pool->empty)
+ g_cond_wait (&pool->empty_cond, GST_OBJECT_GET_LOCK (pool));
+
+ GST_OBJECT_UNLOCK (pool);
+ }
+
+ if (!pool->can_poll_device) {
+ if (wait)
+ goto done;
+ else
+ goto no_buffers;
+ }
+
+ GST_LOG_OBJECT (pool, "polling device");
+
+ again:
+ ret = gst_poll_wait (pool->poll, timeout);
+ if (G_UNLIKELY (ret < 0)) {
+ switch (errno) {
+ case EBUSY:
+ goto stopped;
+ case EAGAIN:
+ case EINTR:
+ goto again;
+ case ENXIO:
+ GST_WARNING_OBJECT (pool,
+ "v4l2 device doesn't support polling. Disabling"
+ " using libv4l2 in this case may cause deadlocks");
+ pool->can_poll_device = FALSE;
+ goto done;
+ default:
+ goto select_error;
+ }
+ }
+
+ if (gst_poll_fd_has_error (pool->poll, &pool->pollfd))
+ goto select_error;
+
+ /* PRI is used to signal that events are available */
+ if (gst_poll_fd_has_pri (pool->poll, &pool->pollfd)) {
+ struct v4l2_event event = { 0, };
+
+ if (!gst_v4l2_dequeue_event (pool->obj, &event))
+ goto dqevent_failed;
+
+ if (event.type != V4L2_EVENT_SOURCE_CHANGE) {
+ GST_INFO_OBJECT (pool, "Received unhandled event, ignoring.");
+ goto again;
+ }
+
+ if ((event.u.src_change.changes & V4L2_EVENT_SRC_CH_RESOLUTION) == 0) {
+ GST_DEBUG_OBJECT (pool,
+ "Received non-resolution source-change, ignoring.");
+ goto again;
+ }
+
+ return GST_V4L2_FLOW_RESOLUTION_CHANGE;
+ }
+
+ if (ret == 0)
+ goto no_buffers;
+
+ done:
+ return GST_FLOW_OK;
+
+ /* ERRORS */
+ stopped:
+ {
+ GST_DEBUG_OBJECT (pool, "stop called");
+ return GST_FLOW_FLUSHING;
+ }
+ select_error:
+ {
+ GST_ELEMENT_ERROR (pool->obj->element, RESOURCE, READ, (NULL),
+ ("poll error %d: %s (%d)", ret, g_strerror (errno), errno));
+ return GST_FLOW_ERROR;
+ }
+ no_buffers:
+ {
+ return GST_V4L2_FLOW_LAST_BUFFER;
+ }
+ dqevent_failed:
+ {
+ GST_ELEMENT_ERROR (pool->obj->element, RESOURCE, READ, (NULL),
+ ("dqevent error: %s (%d)", g_strerror (errno), errno));
+ return GST_FLOW_ERROR;
+ }
+ }
+
+ static GstFlowReturn
+ gst_v4l2_buffer_pool_qbuf (GstV4l2BufferPool * pool, GstBuffer * buf,
+ GstV4l2MemoryGroup * group, guint32 * frame_number)
+ {
+ const GstV4l2Object *obj = pool->obj;
+ gint index;
+
+ index = group->buffer.index;
+
+ if (pool->buffers[index] != NULL)
+ goto already_queued;
+
+ GST_LOG_OBJECT (pool, "queuing buffer %i", index);
+
+ if (V4L2_TYPE_IS_OUTPUT (obj->type)) {
+ enum v4l2_field field;
+
+ /* Buffer field is the same as the one defined in format */
+ if (V4L2_TYPE_IS_MULTIPLANAR (obj->type))
+ field = obj->format.fmt.pix_mp.field;
+ else
+ field = obj->format.fmt.pix.field;
+
+ group->buffer.field = field;
+ }
+
+ if (frame_number) {
+ group->buffer.timestamp.tv_sec = *frame_number;
+ group->buffer.timestamp.tv_usec = 0;
+ } else {
+ if (GST_BUFFER_TIMESTAMP_IS_VALID (buf)) {
+ GstClockTime timestamp = GST_BUFFER_TIMESTAMP (buf);
+ GST_TIME_TO_TIMEVAL (timestamp, group->buffer.timestamp);
+ } else {
+ group->buffer.timestamp.tv_sec = -1;
+ group->buffer.timestamp.tv_usec = -1;
+ }
+ }
+
+ GST_OBJECT_LOCK (pool);
+
+ /* If the pool was orphaned, don't try to queue any returned buffers.
+ * This is done with the objet lock in order to synchronize with
+ * orphaning. */
+ if (pool->orphaned)
+ goto was_orphaned;
+
+ g_atomic_int_inc (&pool->num_queued);
+ pool->buffers[index] = buf;
+
+ if (!gst_v4l2_allocator_qbuf (pool->vallocator, group))
+ goto queue_failed;
+
+ pool->empty = FALSE;
+ g_cond_signal (&pool->empty_cond);
+ GST_OBJECT_UNLOCK (pool);
+
+ return GST_FLOW_OK;
+
+ already_queued:
+ {
+ GST_ERROR_OBJECT (pool, "the buffer %i was already queued", index);
+ return GST_FLOW_ERROR;
+ }
+ was_orphaned:
+ {
+ GST_DEBUG_OBJECT (pool, "pool was orphaned, not queuing back buffer.");
+ GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_TAG_MEMORY);
+ GST_OBJECT_UNLOCK (pool);
+ return GST_FLOW_FLUSHING;
+ }
+ queue_failed:
+ {
+ GST_ERROR_OBJECT (pool, "could not queue a buffer %i", index);
+ /* Mark broken buffer to the allocator */
+ GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_TAG_MEMORY);
+ g_atomic_int_add (&pool->num_queued, -1);
+ pool->buffers[index] = NULL;
+ GST_OBJECT_UNLOCK (pool);
+ return GST_FLOW_ERROR;
+ }
+ }
+
+ static GstFlowReturn
+ gst_v4l2_buffer_pool_dqbuf (GstV4l2BufferPool * pool, GstBuffer ** buffer,
+ gboolean wait)
+ {
+ GstFlowReturn res;
+ GstBuffer *outbuf = NULL;
+ GstV4l2Object *obj = pool->obj;
+ GstClockTime timestamp;
+ GstV4l2MemoryGroup *group;
+ GstVideoMeta *vmeta;
+ gsize size;
+ gint i;
++#ifdef TIZEN_FEATURE_V4L2_TBM_SUPPORT
++ GstV4l2TizenBuffer *tizen_buffer = NULL;
++#endif /* TIZEN_FEATURE_V4L2_TBM_SUPPORT */
+
+ if ((res = gst_v4l2_buffer_pool_poll (pool, wait)) < GST_FLOW_OK)
+ goto poll_failed;
+
+ if (res == GST_V4L2_FLOW_LAST_BUFFER) {
+ GST_LOG_OBJECT (pool, "nothing to dequeue");
+ goto done;
+ }
+
+ if (res == GST_V4L2_FLOW_RESOLUTION_CHANGE) {
+ GST_INFO_OBJECT (pool, "Resolution change detected.");
+ goto done;
+ }
+
+ GST_LOG_OBJECT (pool, "dequeueing a buffer");
+
+ res = gst_v4l2_allocator_dqbuf (pool->vallocator, &group);
+ if (res == GST_FLOW_EOS)
+ goto eos;
+ if (res != GST_FLOW_OK)
+ goto dqbuf_failed;
+
+ /* get our GstBuffer with that index from the pool, if the buffer was
+ * outstanding we have a serious problem.
+ */
+ outbuf = pool->buffers[group->buffer.index];
+ if (outbuf == NULL)
+ goto no_buffer;
+
+ /* mark the buffer outstanding */
+ pool->buffers[group->buffer.index] = NULL;
+ if (g_atomic_int_dec_and_test (&pool->num_queued)) {
+ GST_OBJECT_LOCK (pool);
+ pool->empty = TRUE;
+ GST_OBJECT_UNLOCK (pool);
+ }
+
+ timestamp = GST_TIMEVAL_TO_TIME (group->buffer.timestamp);
+
+ size = 0;
+ vmeta = gst_buffer_get_video_meta (outbuf);
+ for (i = 0; i < group->n_mem; i++) {
+ GST_LOG_OBJECT (pool,
+ "dequeued buffer %p seq:%d (ix=%d), mem %p used %d, plane=%d, flags %08x, ts %"
+ GST_TIME_FORMAT ", pool-queued=%d, buffer=%p", outbuf,
+ group->buffer.sequence, group->buffer.index, group->mem[i],
+ group->planes[i].bytesused, i, group->buffer.flags,
+ GST_TIME_ARGS (timestamp), pool->num_queued, outbuf);
+
+ if (vmeta) {
+ vmeta->offset[i] = size;
+ size += gst_memory_get_sizes (group->mem[i], NULL, NULL);
+ }
+ }
+
+ /* Ignore timestamp and field for OUTPUT device */
+ if (V4L2_TYPE_IS_OUTPUT (obj->type))
+ goto done;
+
+ /* Check for driver bug in reporting feild */
+ if (group->buffer.field == V4L2_FIELD_ANY) {
+ /* Only warn once to avoid the spamming */
+ #ifndef GST_DISABLE_GST_DEBUG
+ if (!pool->has_warned_on_buggy_field) {
+ pool->has_warned_on_buggy_field = TRUE;
+ GST_WARNING_OBJECT (pool,
+ "Driver should never set v4l2_buffer.field to ANY");
+ }
+ #endif
+
+ /* Use the value from the format (works for UVC bug) */
+ group->buffer.field = obj->format.fmt.pix.field;
+
+ /* If driver also has buggy S_FMT, assume progressive */
+ if (group->buffer.field == V4L2_FIELD_ANY) {
+ #ifndef GST_DISABLE_GST_DEBUG
+ if (!pool->has_warned_on_buggy_field) {
+ pool->has_warned_on_buggy_field = TRUE;
+ GST_WARNING_OBJECT (pool,
+ "Driver should never set v4l2_format.pix.field to ANY");
+ }
+ #endif
+
+ group->buffer.field = V4L2_FIELD_NONE;
+ }
+ }
+
+ /* set top/bottom field first if v4l2_buffer has the information */
+ switch (group->buffer.field) {
+ case V4L2_FIELD_NONE:
+ GST_BUFFER_FLAG_UNSET (outbuf, GST_VIDEO_BUFFER_FLAG_INTERLACED);
+ GST_BUFFER_FLAG_UNSET (outbuf, GST_VIDEO_BUFFER_FLAG_TFF);
+ break;
+ case V4L2_FIELD_TOP:
+ GST_BUFFER_FLAG_SET (outbuf, GST_VIDEO_BUFFER_FLAG_INTERLACED);
+ GST_BUFFER_FLAG_SET (outbuf, GST_VIDEO_BUFFER_FLAG_TOP_FIELD);
+ break;
+ case V4L2_FIELD_BOTTOM:
+ GST_BUFFER_FLAG_SET (outbuf, GST_VIDEO_BUFFER_FLAG_INTERLACED);
+ GST_BUFFER_FLAG_SET (outbuf, GST_VIDEO_BUFFER_FLAG_BOTTOM_FIELD);
+ break;
+ case V4L2_FIELD_INTERLACED_TB:
+ GST_BUFFER_FLAG_SET (outbuf, GST_VIDEO_BUFFER_FLAG_INTERLACED);
+ GST_BUFFER_FLAG_SET (outbuf, GST_VIDEO_BUFFER_FLAG_TFF);
+ break;
+ case V4L2_FIELD_INTERLACED_BT:
+ GST_BUFFER_FLAG_SET (outbuf, GST_VIDEO_BUFFER_FLAG_INTERLACED);
+ GST_BUFFER_FLAG_UNSET (outbuf, GST_VIDEO_BUFFER_FLAG_TFF);
+ break;
+ case V4L2_FIELD_INTERLACED:
+ GST_BUFFER_FLAG_SET (outbuf, GST_VIDEO_BUFFER_FLAG_INTERLACED);
+ if (obj->tv_norm == V4L2_STD_NTSC_M ||
+ obj->tv_norm == V4L2_STD_NTSC_M_JP ||
+ obj->tv_norm == V4L2_STD_NTSC_M_KR) {
+ GST_BUFFER_FLAG_UNSET (outbuf, GST_VIDEO_BUFFER_FLAG_TFF);
+ } else {
+ GST_BUFFER_FLAG_SET (outbuf, GST_VIDEO_BUFFER_FLAG_TFF);
+ }
+ break;
+ default:
+ GST_BUFFER_FLAG_UNSET (outbuf, GST_VIDEO_BUFFER_FLAG_INTERLACED);
+ GST_BUFFER_FLAG_UNSET (outbuf, GST_VIDEO_BUFFER_FLAG_TFF);
+ GST_FIXME_OBJECT (pool,
+ "Unhandled enum v4l2_field %d - treating as progressive",
+ group->buffer.field);
+ break;
+ }
+
+ if (GST_VIDEO_INFO_FORMAT (&obj->info) == GST_VIDEO_FORMAT_ENCODED) {
+ if ((group->buffer.flags & V4L2_BUF_FLAG_KEYFRAME) ||
+ GST_V4L2_PIXELFORMAT (obj) == V4L2_PIX_FMT_MJPEG ||
+ GST_V4L2_PIXELFORMAT (obj) == V4L2_PIX_FMT_JPEG ||
+ GST_V4L2_PIXELFORMAT (obj) == V4L2_PIX_FMT_PJPG)
+ GST_BUFFER_FLAG_UNSET (outbuf, GST_BUFFER_FLAG_DELTA_UNIT);
+ else
+ GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DELTA_UNIT);
+ }
+
+ if (group->buffer.flags & V4L2_BUF_FLAG_ERROR)
+ GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_CORRUPTED);
+
+ GST_BUFFER_TIMESTAMP (outbuf) = timestamp;
+ GST_BUFFER_OFFSET (outbuf) = group->buffer.sequence;
+ GST_BUFFER_OFFSET_END (outbuf) = group->buffer.sequence + 1;
+
++#ifdef TIZEN_FEATURE_V4L2_TBM_SUPPORT
++ if (group->surface) {
++ tizen_buffer = gst_v4l2_tizen_buffer_new (outbuf, group->buffer.index, pool);
++ if (!tizen_buffer) {
++ GST_ERROR_OBJECT (pool, "tizen buffer failed for index[%d]", group->buffer.index);
++ goto no_buffer;
++ }
++ outbuf = tizen_buffer->gst_buffer;
++ }
++#endif /* TIZEN_FEATURE_V4L2_TBM_SUPPORT */
+ done:
+ *buffer = outbuf;
+
+ return res;
+
+ /* ERRORS */
+ poll_failed:
+ {
+ GST_DEBUG_OBJECT (pool, "poll error %s", gst_flow_get_name (res));
+ return res;
+ }
+ eos:
+ {
+ return GST_FLOW_EOS;
+ }
+ dqbuf_failed:
+ {
+ return GST_FLOW_ERROR;
+ }
+ no_buffer:
+ {
+ GST_ERROR_OBJECT (pool, "No free buffer found in the pool at index %d.",
+ group->buffer.index);
+ return GST_FLOW_ERROR;
+ }
+ }
+
+ static GstFlowReturn
+ gst_v4l2_buffer_pool_acquire_buffer (GstBufferPool * bpool, GstBuffer ** buffer,
+ GstBufferPoolAcquireParams * params)
+ {
+ GstFlowReturn ret;
+ GstV4l2BufferPool *pool = GST_V4L2_BUFFER_POOL (bpool);
+ GstBufferPoolClass *pclass = GST_BUFFER_POOL_CLASS (parent_class);
+ GstV4l2Object *obj = pool->obj;
+
+ GST_DEBUG_OBJECT (pool, "acquire");
+
+ /* If this is being called to resurrect a lost buffer */
+ if (params && params->flags & GST_V4L2_BUFFER_POOL_ACQUIRE_FLAG_RESURRECT) {
+ ret = pclass->acquire_buffer (bpool, buffer, params);
+ goto done;
+ }
+
+ switch (obj->type) {
+ case V4L2_BUF_TYPE_VIDEO_CAPTURE:
+ case V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE:
+ /* capture, This function should return a buffer with new captured data */
+ switch (obj->mode) {
+ case GST_V4L2_IO_RW:
+ {
+ /* take empty buffer from the pool */
+ ret = pclass->acquire_buffer (bpool, buffer, params);
+ break;
+ }
+ case GST_V4L2_IO_DMABUF:
+ case GST_V4L2_IO_MMAP:
+ case GST_V4L2_IO_USERPTR:
+ case GST_V4L2_IO_DMABUF_IMPORT:
+ {
+ /* just dequeue a buffer, we basically use the queue of v4l2 as the
+ * storage for our buffers. This function does poll first so we can
+ * interrupt it fine. */
+ ret = gst_v4l2_buffer_pool_dqbuf (pool, buffer, TRUE);
+ break;
+ }
+ default:
+ ret = GST_FLOW_ERROR;
+ g_assert_not_reached ();
+ break;
+ }
+ break;
+
+
+ case V4L2_BUF_TYPE_VIDEO_OUTPUT:
+ case V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE:
+ /* playback, This function should return an empty buffer */
+ switch (obj->mode) {
+ case GST_V4L2_IO_RW:
+ /* get an empty buffer */
+ ret = pclass->acquire_buffer (bpool, buffer, params);
+ break;
+
+ case GST_V4L2_IO_MMAP:
+ case GST_V4L2_IO_DMABUF:
+ case GST_V4L2_IO_USERPTR:
+ case GST_V4L2_IO_DMABUF_IMPORT:
+ /* get a free unqueued buffer */
+ ret = pclass->acquire_buffer (bpool, buffer, params);
+ break;
+
+ default:
+ ret = GST_FLOW_ERROR;
+ g_assert_not_reached ();
+ break;
+ }
+ break;
+
+ default:
+ ret = GST_FLOW_ERROR;
+ g_assert_not_reached ();
+ break;
+ }
+ done:
+ return ret;
+ }
+
+ static void
+ gst_v4l2_buffer_pool_release_buffer (GstBufferPool * bpool, GstBuffer * buffer)
+ {
+ GstV4l2BufferPool *pool = GST_V4L2_BUFFER_POOL (bpool);
+ GstBufferPoolClass *pclass = GST_BUFFER_POOL_CLASS (parent_class);
+ GstV4l2Object *obj = pool->obj;
+
+ GST_DEBUG_OBJECT (pool, "release buffer %p", buffer);
+
++ /* If the buffer's pool has been orphaned, dispose of it so that
++ * the pool resources can be freed */
++ if (pool->orphaned) {
++ GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_TAG_MEMORY);
++ pclass->release_buffer (bpool, buffer);
++ return;
++ }
++
+ switch (obj->type) {
+ case V4L2_BUF_TYPE_VIDEO_CAPTURE:
+ case V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE:
+ /* capture, put the buffer back in the queue so that we can refill it
+ * later. */
+ switch (obj->mode) {
+ case GST_V4L2_IO_RW:
+ /* release back in the pool */
+ pclass->release_buffer (bpool, buffer);
+ break;
+
+ case GST_V4L2_IO_DMABUF:
+ case GST_V4L2_IO_MMAP:
+ case GST_V4L2_IO_USERPTR:
+ case GST_V4L2_IO_DMABUF_IMPORT:
+ {
+ GstV4l2MemoryGroup *group;
+ if (gst_v4l2_is_buffer_valid (buffer, &group)) {
+ GstFlowReturn ret = GST_FLOW_OK;
+
+ gst_v4l2_allocator_reset_group (pool->vallocator, group);
+ /* queue back in the device */
+ if (pool->other_pool)
+ ret = gst_v4l2_buffer_pool_prepare_buffer (pool, buffer, NULL);
+ if (ret != GST_FLOW_OK ||
+ gst_v4l2_buffer_pool_qbuf (pool, buffer, group,
+ NULL) != GST_FLOW_OK)
+ pclass->release_buffer (bpool, buffer);
+ } else {
+ /* Simply release invalid/modified buffer, the allocator will
+ * give it back later */
+ GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_TAG_MEMORY);
+ pclass->release_buffer (bpool, buffer);
+ }
+ break;
+ }
+ default:
+ g_assert_not_reached ();
+ break;
+ }
+ break;
+
+ case V4L2_BUF_TYPE_VIDEO_OUTPUT:
+ case V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE:
+ switch (obj->mode) {
+ case GST_V4L2_IO_RW:
+ /* release back in the pool */
+ pclass->release_buffer (bpool, buffer);
+ break;
+
+ case GST_V4L2_IO_MMAP:
+ case GST_V4L2_IO_DMABUF:
+ case GST_V4L2_IO_USERPTR:
+ case GST_V4L2_IO_DMABUF_IMPORT:
+ {
+ GstV4l2MemoryGroup *group;
+ guint index;
+
+ if (!gst_v4l2_is_buffer_valid (buffer, &group)) {
+ /* Simply release invalid/modified buffer, the allocator will
+ * give it back later */
+ GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_TAG_MEMORY);
+ pclass->release_buffer (bpool, buffer);
+ break;
+ }
+
+ index = group->buffer.index;
+
+ if (pool->buffers[index] == NULL) {
+ GST_LOG_OBJECT (pool, "buffer %u not queued, putting on free list",
+ index);
+
+ /* Remove qdata, this will unmap any map data in userptr */
+ gst_mini_object_set_qdata (GST_MINI_OBJECT (buffer),
+ GST_V4L2_IMPORT_QUARK, NULL, NULL);
+
+ /* reset to default size */
+ gst_v4l2_allocator_reset_group (pool->vallocator, group);
+
+ /* playback, put the buffer back in the queue to refill later. */
+ pclass->release_buffer (bpool, buffer);
+ } else {
+ /* the buffer is queued in the device but maybe not played yet. We just
+ * leave it there and not make it available for future calls to acquire
+ * for now. The buffer will be dequeued and reused later. */
+ GST_LOG_OBJECT (pool, "buffer %u is queued", index);
+ }
+ break;
+ }
+
+ default:
+ g_assert_not_reached ();
+ break;
+ }
+ break;
+
+ default:
+ g_assert_not_reached ();
+ break;
+ }
+ }
+
+ static void
+ gst_v4l2_buffer_pool_dispose (GObject * object)
+ {
+ GstV4l2BufferPool *pool = GST_V4L2_BUFFER_POOL (object);
+
+ if (pool->vallocator)
+ gst_object_unref (pool->vallocator);
+ pool->vallocator = NULL;
+
+ if (pool->allocator)
+ gst_object_unref (pool->allocator);
+ pool->allocator = NULL;
+
++#ifdef TIZEN_FEATURE_V4L2_TBM_SUPPORT
++ g_cond_clear (&pool->buffer_cond);
++ g_mutex_clear (&pool->buffer_lock);
++
++ if (pool->tallocator)
++ gst_object_unref (pool->tallocator);
++ pool->tallocator = NULL;
++#endif /* TIZEN_FEATURE_V4L2_TBM_SUPPORT */
+ if (pool->other_pool)
+ gst_object_unref (pool->other_pool);
+ pool->other_pool = NULL;
+
+ G_OBJECT_CLASS (parent_class)->dispose (object);
+ }
+
+ static void
+ gst_v4l2_buffer_pool_finalize (GObject * object)
+ {
+ GstV4l2BufferPool *pool = GST_V4L2_BUFFER_POOL (object);
+
+ if (pool->video_fd >= 0)
+ pool->obj->close (pool->video_fd);
+
+ gst_poll_free (pool->poll);
+
+ /* This can't be done in dispose method because we must not set pointer
+ * to NULL as it is part of the v4l2object and dispose could be called
+ * multiple times */
+ gst_object_unref (pool->obj->element);
+
+ g_cond_clear (&pool->empty_cond);
+
+ /* FIXME have we done enough here ? */
+
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+ }
+
+ static void
+ gst_v4l2_buffer_pool_init (GstV4l2BufferPool * pool)
+ {
+ pool->poll = gst_poll_new (TRUE);
+ pool->can_poll_device = TRUE;
+ g_cond_init (&pool->empty_cond);
+ pool->empty = TRUE;
+ pool->orphaned = FALSE;
+ }
+
+ static void
+ gst_v4l2_buffer_pool_class_init (GstV4l2BufferPoolClass * klass)
+ {
+ GObjectClass *object_class = G_OBJECT_CLASS (klass);
+ GstBufferPoolClass *bufferpool_class = GST_BUFFER_POOL_CLASS (klass);
+
+ object_class->dispose = gst_v4l2_buffer_pool_dispose;
+ object_class->finalize = gst_v4l2_buffer_pool_finalize;
+
+ bufferpool_class->start = gst_v4l2_buffer_pool_start;
+ bufferpool_class->stop = gst_v4l2_buffer_pool_stop;
+ bufferpool_class->set_config = gst_v4l2_buffer_pool_set_config;
+ bufferpool_class->alloc_buffer = gst_v4l2_buffer_pool_alloc_buffer;
+ bufferpool_class->acquire_buffer = gst_v4l2_buffer_pool_acquire_buffer;
+ bufferpool_class->release_buffer = gst_v4l2_buffer_pool_release_buffer;
+ bufferpool_class->flush_start = gst_v4l2_buffer_pool_flush_start;
+ bufferpool_class->flush_stop = gst_v4l2_buffer_pool_flush_stop;
+
+ GST_DEBUG_CATEGORY_INIT (v4l2bufferpool_debug, "v4l2bufferpool", 0,
+ "V4L2 Buffer Pool");
+ GST_DEBUG_CATEGORY_GET (CAT_PERFORMANCE, "GST_PERFORMANCE");
+ }
+
+ /**
+ * gst_v4l2_buffer_pool_new:
+ * @obj: the v4l2 object owning the pool
+ *
+ * Construct a new buffer pool.
+ *
+ * Returns: the new pool, use gst_object_unref() to free resources
+ */
+ GstBufferPool *
+ gst_v4l2_buffer_pool_new (GstV4l2Object * obj, GstCaps * caps)
+ {
+ GstV4l2BufferPool *pool;
+ GstStructure *config;
+ gchar *name, *parent_name;
+ gint fd;
+
+ fd = obj->dup (obj->video_fd);
+ if (fd < 0)
+ goto dup_failed;
+
+ /* setting a significant unique name */
+ parent_name = gst_object_get_name (GST_OBJECT (obj->element));
+ name = g_strdup_printf ("%s:pool%u:%s",
+ parent_name, obj->pool_seq++,
+ V4L2_TYPE_IS_OUTPUT (obj->type) ? "sink" : "src");
+ g_free (parent_name);
+
+ pool = (GstV4l2BufferPool *) g_object_new (GST_TYPE_V4L2_BUFFER_POOL,
+ "name", name, NULL);
+ g_object_ref_sink (pool);
+ g_free (name);
+
+ gst_poll_fd_init (&pool->pollfd);
+ pool->pollfd.fd = fd;
+ gst_poll_add_fd (pool->poll, &pool->pollfd);
+ if (V4L2_TYPE_IS_OUTPUT (obj->type))
+ gst_poll_fd_ctl_write (pool->poll, &pool->pollfd, TRUE);
+ else
+ gst_poll_fd_ctl_read (pool->poll, &pool->pollfd, TRUE);
+
+ pool->video_fd = fd;
+ pool->obj = obj;
+ pool->can_poll_device = TRUE;
+
++#ifdef TIZEN_FEATURE_V4L2_TBM_SUPPORT
++ pool->tallocator = gst_tizen_allocator_new ();
++ if (pool->tallocator == NULL)
++ goto allocator_failed;
++
++ g_mutex_init (&pool->buffer_lock);
++ g_cond_init (&pool->buffer_cond);
++#endif /* TIZEN_FEATURE_V4L2_TBM_SUPPORT */
+ pool->vallocator = gst_v4l2_allocator_new (GST_OBJECT (pool), obj);
+ if (pool->vallocator == NULL)
+ goto allocator_failed;
+
+ gst_object_ref (obj->element);
+
+ config = gst_buffer_pool_get_config (GST_BUFFER_POOL_CAST (pool));
+ gst_buffer_pool_config_set_params (config, caps, obj->info.size, 0, 0);
+ /* This will simply set a default config, but will not configure the pool
+ * because min and max are not valid */
+ gst_buffer_pool_set_config (GST_BUFFER_POOL_CAST (pool), config);
+
+ return GST_BUFFER_POOL (pool);
+
+ /* ERRORS */
+ dup_failed:
+ {
+ GST_ERROR ("failed to dup fd %d (%s)", errno, g_strerror (errno));
+ return NULL;
+ }
+ allocator_failed:
+ {
++#ifdef TIZEN_FEATURE_V4L2_TBM_SUPPORT
++ if (pool->tallocator) {
++ gst_object_unref (pool->tallocator);
++ pool->tallocator = NULL;
++ }
++#endif /* TIZEN_FEATURE_V4L2_TBM_SUPPORT */
+ GST_ERROR_OBJECT (pool, "Failed to create V4L2 allocator");
+ gst_object_unref (pool);
+ return NULL;
+ }
+ }
+
+ static GstFlowReturn
+ gst_v4l2_do_read (GstV4l2BufferPool * pool, GstBuffer * buf)
+ {
+ GstFlowReturn res;
+ GstV4l2Object *obj = pool->obj;
+ gint amount;
+ GstMapInfo map;
+ gint toread;
+
+ toread = obj->info.size;
+
+ GST_LOG_OBJECT (pool, "reading %d bytes into buffer %p", toread, buf);
+
+ gst_buffer_map (buf, &map, GST_MAP_WRITE);
+
+ do {
+ if ((res = gst_v4l2_buffer_pool_poll (pool, TRUE)) != GST_FLOW_OK)
+ goto poll_error;
+
+ amount = obj->read (obj->video_fd, map.data, toread);
+
+ if (amount == toread) {
+ break;
+ } else if (amount == -1) {
+ if (errno == EAGAIN || errno == EINTR) {
+ continue;
+ } else
+ goto read_error;
+ } else {
+ /* short reads can happen if a signal interrupts the read */
+ continue;
+ }
+ } while (TRUE);
+
+ GST_LOG_OBJECT (pool, "read %d bytes", amount);
+ gst_buffer_unmap (buf, &map);
+ gst_buffer_resize (buf, 0, amount);
+
+ return GST_FLOW_OK;
+
+ /* ERRORS */
+ poll_error:
+ {
+ GST_DEBUG ("poll error %s", gst_flow_get_name (res));
+ goto cleanup;
+ }
+ read_error:
+ {
+ GST_ELEMENT_ERROR (obj->element, RESOURCE, READ,
+ (_("Error reading %d bytes from device '%s'."),
+ toread, obj->videodev), GST_ERROR_SYSTEM);
+ res = GST_FLOW_ERROR;
+ goto cleanup;
+ }
+ cleanup:
+ {
+ gst_buffer_unmap (buf, &map);
+ gst_buffer_resize (buf, 0, 0);
+ return res;
+ }
+ }
+
+ /**
+ * gst_v4l2_buffer_pool_process:
+ * @bpool: a #GstBufferPool
+ * @buf: a #GstBuffer, maybe be replaced
+ * @frame_number: 32 bit frame number or %NULL
+ *
+ * Process @buf in @bpool. For capture devices, this functions fills @buf with
+ * data from the device. For output devices, this functions send the contents of
+ * @buf to the device for playback.
+ *
+ * If non-%NULL and an output device, @frame_number is stored inside the timestamp for output devices and read
+ * back from the timestamp for capture devices.
+ *
+ * Returns: %GST_FLOW_OK on success.
+ */
+ GstFlowReturn
+ gst_v4l2_buffer_pool_process (GstV4l2BufferPool * pool, GstBuffer ** buf,
+ guint32 * frame_number)
+ {
+ GstFlowReturn ret = GST_FLOW_OK;
+ GstBufferPool *bpool = GST_BUFFER_POOL_CAST (pool);
+ GstV4l2Object *obj = pool->obj;
+
+ GST_DEBUG_OBJECT (pool, "process buffer %p", buf);
+
+ if (GST_BUFFER_POOL_IS_FLUSHING (pool))
+ return GST_FLOW_FLUSHING;
+
+ switch (obj->type) {
+ case V4L2_BUF_TYPE_VIDEO_CAPTURE:
+ case V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE:
+ /* capture */
+ switch (obj->mode) {
+ case GST_V4L2_IO_RW:
+ /* capture into the buffer */
+ ret = gst_v4l2_do_read (pool, *buf);
+ break;
+
+ case GST_V4L2_IO_MMAP:
+ case GST_V4L2_IO_DMABUF:
+ {
+ GstBuffer *tmp;
+
+ if ((*buf)->pool == bpool) {
+ guint num_queued;
+ gsize size = gst_buffer_get_size (*buf);
+
+ /* Legacy M2M devices return empty buffer when drained */
+ if (size == 0 && GST_V4L2_IS_M2M (obj->device_caps))
+ goto eos;
+
+ if (GST_VIDEO_INFO_FORMAT (&pool->caps_info) !=
+ GST_VIDEO_FORMAT_ENCODED && size < pool->size)
+ goto buffer_truncated;
+
+ num_queued = g_atomic_int_get (&pool->num_queued);
+ GST_TRACE_OBJECT (pool, "Only %i buffer left in the capture queue.",
+ num_queued);
+
+ /* If we have no more buffer, and can allocate it time to do so */
+ if (num_queued == 0) {
+ if (GST_V4L2_ALLOCATOR_CAN_ALLOCATE (pool->vallocator, MMAP)) {
+ ret = gst_v4l2_buffer_pool_resurrect_buffer (pool);
+ if (ret == GST_FLOW_OK)
+ goto done;
+ }
+ }
+
+ /* start copying buffers when we are running low on buffers */
+ if (num_queued < pool->copy_threshold) {
+ GstBuffer *copy;
+
+ if (GST_V4L2_ALLOCATOR_CAN_ALLOCATE (pool->vallocator, MMAP)) {
+ ret = gst_v4l2_buffer_pool_resurrect_buffer (pool);
+ if (ret == GST_FLOW_OK)
+ goto done;
+ }
+
+ /* copy the buffer */
+ copy = gst_buffer_copy_region (*buf,
+ GST_BUFFER_COPY_ALL | GST_BUFFER_COPY_DEEP, 0, -1);
+ GST_LOG_OBJECT (pool, "copy buffer %p->%p", *buf, copy);
+
+ /* and requeue so that we can continue capturing */
+ gst_buffer_unref (*buf);
+ *buf = copy;
+ }
+
+ ret = GST_FLOW_OK;
+ /* nothing, data was inside the buffer when we did _acquire() */
+ goto done;
+ }
+
+ /* buffer not from our pool, grab a frame and copy it into the target */
+ if ((ret = gst_v4l2_buffer_pool_dqbuf (pool, &tmp, TRUE))
+ != GST_FLOW_OK)
+ goto done;
+
+ /* An empty buffer on capture indicates the end of stream */
+ if (gst_buffer_get_size (tmp) == 0) {
+ gst_v4l2_buffer_pool_release_buffer (bpool, tmp);
+
+ /* Legacy M2M devices return empty buffer when drained */
+ if (GST_V4L2_IS_M2M (obj->device_caps))
+ goto eos;
+ }
++#ifdef TIZEN_FEATURE_V4L2_TBM_SUPPORT
++ if (pool->obj->tbm_output && pool->obj->mode == GST_V4L2_IO_DMABUF) {
++ gst_buffer_unref (*buf);
++ *buf = tmp;
++ } else {
++#endif /* TIZEN_FEATURE_V4L2_TBM_SUPPORT */
+ ret = gst_v4l2_buffer_pool_copy_buffer (pool, *buf, tmp);
+
+ /* an queue the buffer again after the copy */
+ gst_v4l2_buffer_pool_release_buffer (bpool, tmp);
++#ifdef TIZEN_FEATURE_V4L2_TBM_SUPPORT
++ }
++#endif /* TIZEN_FEATURE_V4L2_TBM_SUPPORT */
+
+ if (ret != GST_FLOW_OK)
+ goto copy_failed;
+ break;
+ }
+
+ case GST_V4L2_IO_USERPTR:
+ {
+ struct UserPtrData *data;
+ GstBuffer *tmp;
+
+ /* Replace our buffer with downstream allocated buffer */
+ data = gst_mini_object_steal_qdata (GST_MINI_OBJECT (*buf),
+ GST_V4L2_IMPORT_QUARK);
+ tmp = gst_buffer_ref (data->buffer);
+ _unmap_userptr_frame (data);
+
+ /* Now tmp is writable, copy the flags and timestamp */
+ gst_buffer_copy_into (tmp, *buf,
+ GST_BUFFER_COPY_FLAGS | GST_BUFFER_COPY_TIMESTAMPS, 0, -1);
+
+ gst_buffer_replace (buf, tmp);
+ gst_buffer_unref (tmp);
+ break;
+ }
+
+ case GST_V4L2_IO_DMABUF_IMPORT:
+ {
+ GstBuffer *tmp;
+
+ /* Replace our buffer with downstream allocated buffer */
+ tmp = gst_mini_object_steal_qdata (GST_MINI_OBJECT (*buf),
+ GST_V4L2_IMPORT_QUARK);
+
+ gst_buffer_copy_into (tmp, *buf,
+ GST_BUFFER_COPY_FLAGS | GST_BUFFER_COPY_TIMESTAMPS, 0, -1);
+
+ gst_buffer_replace (buf, tmp);
+ gst_buffer_unref (tmp);
+ break;
+ }
+
+ default:
+ g_assert_not_reached ();
+ break;
+ }
+ break;
+
+ case V4L2_BUF_TYPE_VIDEO_OUTPUT:
+ case V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE:
+ /* playback */
+ switch (obj->mode) {
+ case GST_V4L2_IO_RW:
+ /* FIXME, do write() */
+ GST_WARNING_OBJECT (pool, "implement write()");
+ break;
+
+ case GST_V4L2_IO_USERPTR:
+ case GST_V4L2_IO_DMABUF_IMPORT:
+ case GST_V4L2_IO_DMABUF:
+ case GST_V4L2_IO_MMAP:
+ {
+ GstBuffer *to_queue = NULL;
+ GstBuffer *buffer;
+ GstV4l2MemoryGroup *group;
+ gint index;
+
+ if ((*buf)->pool != bpool)
+ goto copying;
+
+ if (!gst_v4l2_is_buffer_valid (*buf, &group))
+ goto copying;
+
+ index = group->buffer.index;
+
+ GST_LOG_OBJECT (pool, "processing buffer %i from our pool", index);
+
+ if (pool->buffers[index] != NULL) {
+ GST_LOG_OBJECT (pool, "buffer %i already queued, copying", index);
+ goto copying;
+ }
+
+ /* we can queue directly */
+ to_queue = gst_buffer_ref (*buf);
+
+ copying:
+ if (to_queue == NULL) {
+ GstBufferPoolAcquireParams params = { 0 };
+
+ GST_LOG_OBJECT (pool, "alloc buffer from our pool");
+
+ /* this can return EOS if all buffers are outstanding which would
+ * be strange because we would expect the upstream element to have
+ * allocated them and returned to us.. */
+ params.flags = GST_BUFFER_POOL_ACQUIRE_FLAG_DONTWAIT;
+ ret = gst_buffer_pool_acquire_buffer (bpool, &to_queue, ¶ms);
+ if (ret != GST_FLOW_OK)
+ goto acquire_failed;
+
+ ret = gst_v4l2_buffer_pool_prepare_buffer (pool, to_queue, *buf);
+ if (ret != GST_FLOW_OK) {
+ gst_buffer_unref (to_queue);
+ goto prepare_failed;
+ }
+
+ /* retrieve the group */
+ gst_v4l2_is_buffer_valid (to_queue, &group);
+ }
+
+ if ((ret =
+ gst_v4l2_buffer_pool_qbuf (pool, to_queue, group,
+ frame_number))
+ != GST_FLOW_OK)
+ goto queue_failed;
+
+ /* if we are not streaming yet (this is the first buffer, start
+ * streaming now */
+ if (!gst_v4l2_buffer_pool_streamon (pool)) {
+ /* don't check return value because qbuf would have failed */
+ gst_v4l2_is_buffer_valid (to_queue, &group);
+
+ /* qbuf has stored to_queue buffer but we are not in
+ * streaming state, so the flush logic won't be performed.
+ * To avoid leaks, flush the allocator and restore the queued
+ * buffer as non-queued */
+ gst_v4l2_allocator_flush (pool->vallocator);
+
+ pool->buffers[group->buffer.index] = NULL;
+
+ gst_mini_object_set_qdata (GST_MINI_OBJECT (to_queue),
+ GST_V4L2_IMPORT_QUARK, NULL, NULL);
+ gst_buffer_unref (to_queue);
+ g_atomic_int_add (&pool->num_queued, -1);
+ goto start_failed;
+ }
+
+ /* Remove our ref, we will still hold this buffer in acquire as needed,
+ * otherwise the pool will think it is outstanding and will refuse to stop. */
+ gst_buffer_unref (to_queue);
+
+ /* release as many buffer as possible */
+ while (gst_v4l2_buffer_pool_dqbuf (pool, &buffer, FALSE) ==
+ GST_FLOW_OK) {
+ if (buffer->pool == NULL)
+ gst_v4l2_buffer_pool_release_buffer (bpool, buffer);
+ }
+
+ if (g_atomic_int_get (&pool->num_queued) >= pool->min_latency) {
+ /* all buffers are queued, try to dequeue one and release it back
+ * into the pool so that _acquire can get to it again. */
+ ret = gst_v4l2_buffer_pool_dqbuf (pool, &buffer, TRUE);
+ if (ret == GST_FLOW_OK && buffer->pool == NULL)
+ /* release the rendered buffer back into the pool. This wakes up any
+ * thread waiting for a buffer in _acquire(). */
+ gst_v4l2_buffer_pool_release_buffer (bpool, buffer);
+ }
+ break;
+ }
+ default:
+ g_assert_not_reached ();
+ break;
+ }
+ break;
+ default:
+ g_assert_not_reached ();
+ break;
+ }
+ done:
+ return ret;
+
+ /* ERRORS */
+ copy_failed:
+ {
+ GST_ERROR_OBJECT (pool, "failed to copy buffer");
+ return ret;
+ }
+ buffer_truncated:
+ {
+ GST_WARNING_OBJECT (pool,
+ "Dropping truncated buffer, this is likely a driver bug.");
+ gst_buffer_unref (*buf);
+ *buf = NULL;
+ return GST_V4L2_FLOW_CORRUPTED_BUFFER;
+ }
+ eos:
+ {
+ GST_DEBUG_OBJECT (pool, "end of stream reached");
+ gst_buffer_unref (*buf);
+ *buf = NULL;
+ return GST_V4L2_FLOW_LAST_BUFFER;
+ }
+ acquire_failed:
+ {
+ if (ret == GST_FLOW_FLUSHING)
+ GST_DEBUG_OBJECT (pool, "flushing");
+ else
+ GST_WARNING_OBJECT (pool, "failed to acquire a buffer: %s",
+ gst_flow_get_name (ret));
+ return ret;
+ }
+ prepare_failed:
+ {
+ GST_ERROR_OBJECT (pool, "failed to prepare data");
+ return ret;
+ }
+ queue_failed:
+ {
+ GST_ERROR_OBJECT (pool, "failed to queue buffer");
+ return ret;
+ }
+ start_failed:
+ {
+ GST_ERROR_OBJECT (pool, "failed to start streaming");
+ return GST_FLOW_ERROR;
+ }
+ }
+
+ void
+ gst_v4l2_buffer_pool_set_other_pool (GstV4l2BufferPool * pool,
+ GstBufferPool * other_pool)
+ {
+ g_return_if_fail (!gst_buffer_pool_is_active (GST_BUFFER_POOL (pool)));
+
+ if (pool->other_pool)
+ gst_object_unref (pool->other_pool);
+ pool->other_pool = gst_object_ref (other_pool);
+ }
+
+ void
+ gst_v4l2_buffer_pool_copy_at_threshold (GstV4l2BufferPool * pool, gboolean copy)
+ {
+ GST_OBJECT_LOCK (pool);
+ pool->enable_copy_threshold = copy;
+ GST_OBJECT_UNLOCK (pool);
+ }
+
+ gboolean
+ gst_v4l2_buffer_pool_flush (GstBufferPool * bpool)
+ {
+ GstV4l2BufferPool *pool = GST_V4L2_BUFFER_POOL (bpool);
+ gboolean ret = TRUE;
+
+ gst_v4l2_buffer_pool_streamoff (pool);
+
+ if (!V4L2_TYPE_IS_OUTPUT (pool->obj->type))
+ ret = gst_v4l2_buffer_pool_streamon (pool);
+
+ return ret;
+ }
+
+ /**
+ * gst_v4l2_buffer_pool_enable_resolution_change:
+ * @pool: a #GstBufferPool
+ *
+ * When this is called, the pool will subscribe to the
+ * %V4L2_EVENT_SOURCE_CHANGE. Upon receiving this event, it will notify
+ * the element acquiring buffer with the special flow return
+ * %GST_V4L2_FLOW_RESOLUTION_CHANGE.
+ */
+ void
+ gst_v4l2_buffer_pool_enable_resolution_change (GstV4l2BufferPool * pool)
+ {
+ guint32 input_id = 0;
+
+ g_return_if_fail (!gst_buffer_pool_is_active (GST_BUFFER_POOL (pool)));
+
+ /* Make sure we subscribe for the current input */
+ gst_v4l2_get_input (pool->obj, &input_id);
+
+ if (gst_v4l2_subscribe_event (pool->obj, V4L2_EVENT_SOURCE_CHANGE, input_id))
+ gst_poll_fd_ctl_pri (pool->poll, &pool->pollfd, TRUE);
+ }
--- /dev/null
+ /* GStreamer
+ *
+ * Copyright (C) 2001-2002 Ronald Bultje <rbultje@ronald.bitfreak.net>
+ * 2006 Edgard Lima <edgard.lima@gmail.com>
+ * 2009 Texas Instruments, Inc - http://www.ti.com/
+ *
+ * gstv4l2bufferpool.h V4L2 buffer pool class
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+ #ifndef __GST_V4L2_BUFFER_POOL_H__
+ #define __GST_V4L2_BUFFER_POOL_H__
+
+ #include <gst/gst.h>
+
+ typedef struct _GstV4l2BufferPool GstV4l2BufferPool;
+ typedef struct _GstV4l2BufferPoolClass GstV4l2BufferPoolClass;
+ typedef struct _GstV4l2Meta GstV4l2Meta;
+
+ #include "gstv4l2object.h"
+ #include "gstv4l2allocator.h"
+
+ G_BEGIN_DECLS
+
+ #define GST_TYPE_V4L2_BUFFER_POOL (gst_v4l2_buffer_pool_get_type())
+ #define GST_IS_V4L2_BUFFER_POOL(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_V4L2_BUFFER_POOL))
+ #define GST_V4L2_BUFFER_POOL(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), GST_TYPE_V4L2_BUFFER_POOL, GstV4l2BufferPool))
+ #define GST_V4L2_BUFFER_POOL_CAST(obj) ((GstV4l2BufferPool*)(obj))
+
+ /* Returns true if the pool is streaming. Must be called with stream lock
+ * held. */
+ #define GST_V4L2_BUFFER_POOL_IS_STREAMING(obj) (GST_V4L2_BUFFER_POOL (obj)->streaming)
+
+ /* This flow return is used to indicate that the last buffer has been dequeued
+ * during draining. This should normally only occur for mem-2-mem devices. */
+ #define GST_V4L2_FLOW_LAST_BUFFER GST_FLOW_CUSTOM_SUCCESS
+
+ /* This flow return is used to indicated that the returned buffer was marked
+ * with the error flag and had no payload. This error should be recovered by
+ * simply waiting for next buffer. */
+ #define GST_V4L2_FLOW_CORRUPTED_BUFFER GST_FLOW_CUSTOM_SUCCESS_1
+
+ /* This flow return is used to indicate that a SOURCE_CHANGE event with the
+ * resolution change flag set was received. */
+ #define GST_V4L2_FLOW_RESOLUTION_CHANGE GST_FLOW_CUSTOM_SUCCESS_2
+
+ struct _GstV4l2BufferPool
+ {
+ GstBufferPool parent;
+
+ GstV4l2Object *obj; /* the v4l2 object */
+ gint video_fd; /* a dup(2) of the v4l2object's video_fd */
+ GstPoll *poll; /* a poll for video_fd */
+ GstPollFD pollfd;
+ gboolean can_poll_device;
+
+ gboolean empty;
+ GCond empty_cond;
+
+ gboolean orphaned;
+
+ GstV4l2Allocator *vallocator;
+ GstAllocator *allocator;
++#ifdef TIZEN_FEATURE_V4L2_TBM_SUPPORT
++ GstAllocator *tallocator;
++ gint live_buffer_count;
++ GMutex buffer_lock;
++ GCond buffer_cond;
++#endif /* TIZEN_FEATURE_V4L2_TBM_SUPPORT */
+ GstAllocationParams params;
+ GstBufferPool *other_pool;
+ guint size;
+ GstVideoInfo caps_info; /* Default video information */
+
+ gboolean add_videometa; /* set if video meta should be added */
+ gboolean enable_copy_threshold; /* If copy_threshold should be set */
+
+ guint min_latency; /* number of buffers we will hold */
+ guint max_latency; /* number of buffers we can hold */
+ guint num_queued; /* number of buffers queued in the driver */
+ guint num_allocated; /* number of buffers allocated */
+ guint copy_threshold; /* when our pool runs lower, start handing out copies */
+
+ gboolean streaming;
+ gboolean flushing;
+
+ GstBuffer *buffers[VIDEO_MAX_FRAME];
+
+ /* signal handlers */
+ gulong group_released_handler;
+
+ /* Control to warn only once on buggy feild driver bug */
+ gboolean has_warned_on_buggy_field;
+ };
+
+ struct _GstV4l2BufferPoolClass
+ {
+ GstBufferPoolClass parent_class;
+ };
+
+ GType gst_v4l2_buffer_pool_get_type (void);
+
+ GstBufferPool * gst_v4l2_buffer_pool_new (GstV4l2Object *obj, GstCaps *caps);
+
+ GstFlowReturn gst_v4l2_buffer_pool_process (GstV4l2BufferPool * bpool, GstBuffer ** buf, guint32 * frame_number);
+
+ void gst_v4l2_buffer_pool_set_other_pool (GstV4l2BufferPool * pool,
+ GstBufferPool * other_pool);
+ void gst_v4l2_buffer_pool_copy_at_threshold (GstV4l2BufferPool * pool,
+ gboolean copy);
+
+ gboolean gst_v4l2_buffer_pool_flush (GstBufferPool *pool);
+
+ gboolean gst_v4l2_buffer_pool_orphan (GstBufferPool ** pool);
+
+ void gst_v4l2_buffer_pool_enable_resolution_change (GstV4l2BufferPool *self);
+
+ G_END_DECLS
+
+ #endif /*__GST_V4L2_BUFFER_POOL_H__ */
--- /dev/null
- case V4L2_PIX_FMT_YUV411P:{
+ /* GStreamer
+ *
+ * Copyright (C) 2001-2002 Ronald Bultje <rbultje@ronald.bitfreak.net>
+ * 2006 Edgard Lima <edgard.lima@gmail.com>
+ *
+ * gstv4l2object.c: base class for V4L2 elements
+ *
+ * This library is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU Library General Public License as published
+ * by the Free Software Foundation; either version 2 of the License, or
+ * (at your option) any later version. This library is distributed in the hope
+ * that it will be useful, but WITHOUT ANY WARRANTY; without even the
+ * implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+ * PURPOSE. See the GNU Library General Public License for more details.
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301,
+ * USA.
+ */
+
+ #ifdef HAVE_CONFIG_H
+ #include <config.h>
+ #endif
+
+ #include <sys/stat.h>
+ #include <fcntl.h>
+ #include <errno.h>
+ #include <string.h>
+ #include <sys/mman.h>
+ #include <sys/ioctl.h>
+
+
+ #ifdef HAVE_GUDEV
+ #include <gudev/gudev.h>
+ #endif
+
+ #include "ext/videodev2.h"
+ #include "gstv4l2object.h"
+ #include "gstv4l2tuner.h"
+ #include "gstv4l2colorbalance.h"
+
+ #include "gst/gst-i18n-plugin.h"
+
+ #include <gst/video/video.h>
+ #include <gst/allocators/gstdmabuf.h>
+
+ GST_DEBUG_CATEGORY_EXTERN (v4l2_debug);
+ #define GST_CAT_DEFAULT v4l2_debug
+
+ #define DEFAULT_PROP_DEVICE_NAME NULL
+ #define DEFAULT_PROP_DEVICE_FD -1
+ #define DEFAULT_PROP_FLAGS 0
+ #define DEFAULT_PROP_TV_NORM 0
+ #define DEFAULT_PROP_IO_MODE GST_V4L2_IO_AUTO
+
+ #define ENCODED_BUFFER_SIZE (2 * 1024 * 1024)
+
+ enum
+ {
+ PROP_0,
+ V4L2_STD_OBJECT_PROPS,
+ };
+
+ /*
+ * common format / caps utilities:
+ */
+ typedef enum
+ {
+ GST_V4L2_RAW = 1 << 0,
+ GST_V4L2_CODEC = 1 << 1,
+ GST_V4L2_TRANSPORT = 1 << 2,
+ GST_V4L2_NO_PARSE = 1 << 3,
+ GST_V4L2_ALL = 0xffff
+ } GstV4L2FormatFlags;
+
+ typedef struct
+ {
+ guint32 format;
+ gboolean dimensions;
+ GstV4L2FormatFlags flags;
+ } GstV4L2FormatDesc;
+
+ static const GstV4L2FormatDesc gst_v4l2_formats[] = {
+ /* RGB formats */
+ {V4L2_PIX_FMT_RGB332, TRUE, GST_V4L2_RAW},
+ {V4L2_PIX_FMT_ARGB555, TRUE, GST_V4L2_RAW},
+ {V4L2_PIX_FMT_XRGB555, TRUE, GST_V4L2_RAW},
+ {V4L2_PIX_FMT_ARGB555X, TRUE, GST_V4L2_RAW},
+ {V4L2_PIX_FMT_XRGB555X, TRUE, GST_V4L2_RAW},
+ {V4L2_PIX_FMT_RGB565, TRUE, GST_V4L2_RAW},
+ {V4L2_PIX_FMT_RGB565X, TRUE, GST_V4L2_RAW},
+ {V4L2_PIX_FMT_BGR666, TRUE, GST_V4L2_RAW},
+ {V4L2_PIX_FMT_BGR24, TRUE, GST_V4L2_RAW},
+ {V4L2_PIX_FMT_RGB24, TRUE, GST_V4L2_RAW},
+ {V4L2_PIX_FMT_ABGR32, TRUE, GST_V4L2_RAW},
+ {V4L2_PIX_FMT_XBGR32, TRUE, GST_V4L2_RAW},
+ {V4L2_PIX_FMT_BGRA32, TRUE, GST_V4L2_RAW},
+ {V4L2_PIX_FMT_BGRX32, TRUE, GST_V4L2_RAW},
+ {V4L2_PIX_FMT_RGBA32, TRUE, GST_V4L2_RAW},
+ {V4L2_PIX_FMT_RGBX32, TRUE, GST_V4L2_RAW},
+ {V4L2_PIX_FMT_ARGB32, TRUE, GST_V4L2_RAW},
+ {V4L2_PIX_FMT_XRGB32, TRUE, GST_V4L2_RAW},
+
+ /* Deprecated Packed RGB Image Formats (alpha ambiguity) */
+ {V4L2_PIX_FMT_RGB444, TRUE, GST_V4L2_RAW},
+ {V4L2_PIX_FMT_RGB555, TRUE, GST_V4L2_RAW},
+ {V4L2_PIX_FMT_RGB555X, TRUE, GST_V4L2_RAW},
+ {V4L2_PIX_FMT_BGR32, TRUE, GST_V4L2_RAW},
+ {V4L2_PIX_FMT_RGB32, TRUE, GST_V4L2_RAW},
+
+ /* Grey formats */
+ {V4L2_PIX_FMT_GREY, TRUE, GST_V4L2_RAW},
+ {V4L2_PIX_FMT_Y4, TRUE, GST_V4L2_RAW},
+ {V4L2_PIX_FMT_Y6, TRUE, GST_V4L2_RAW},
+ {V4L2_PIX_FMT_Y10, TRUE, GST_V4L2_RAW},
+ {V4L2_PIX_FMT_Y12, TRUE, GST_V4L2_RAW},
+ {V4L2_PIX_FMT_Y16, TRUE, GST_V4L2_RAW},
+ {V4L2_PIX_FMT_Y16_BE, TRUE, GST_V4L2_RAW},
+ {V4L2_PIX_FMT_Y10BPACK, TRUE, GST_V4L2_RAW},
++ {V4L2_PIX_FMT_INVZ, TRUE, GST_V4L2_RAW},
+
+ /* Palette formats */
+ {V4L2_PIX_FMT_PAL8, TRUE, GST_V4L2_RAW},
+
+ /* Chrominance formats */
+ {V4L2_PIX_FMT_UV8, TRUE, GST_V4L2_RAW},
+
+ /* Luminance+Chrominance formats */
+ {V4L2_PIX_FMT_YVU410, TRUE, GST_V4L2_RAW},
+ {V4L2_PIX_FMT_YVU420, TRUE, GST_V4L2_RAW},
+ {V4L2_PIX_FMT_YVU420M, TRUE, GST_V4L2_RAW},
+ {V4L2_PIX_FMT_YUYV, TRUE, GST_V4L2_RAW},
+ {V4L2_PIX_FMT_YYUV, TRUE, GST_V4L2_RAW},
+ {V4L2_PIX_FMT_YVYU, TRUE, GST_V4L2_RAW},
+ {V4L2_PIX_FMT_UYVY, TRUE, GST_V4L2_RAW},
+ {V4L2_PIX_FMT_VYUY, TRUE, GST_V4L2_RAW},
+ {V4L2_PIX_FMT_YUV422P, TRUE, GST_V4L2_RAW},
+ {V4L2_PIX_FMT_YUV411P, TRUE, GST_V4L2_RAW},
+ {V4L2_PIX_FMT_Y41P, TRUE, GST_V4L2_RAW},
+ {V4L2_PIX_FMT_YUV444, TRUE, GST_V4L2_RAW},
+ {V4L2_PIX_FMT_YUV555, TRUE, GST_V4L2_RAW},
+ {V4L2_PIX_FMT_YUV565, TRUE, GST_V4L2_RAW},
+ {V4L2_PIX_FMT_YUV32, TRUE, GST_V4L2_RAW},
+ {V4L2_PIX_FMT_YUV410, TRUE, GST_V4L2_RAW},
+ {V4L2_PIX_FMT_YUV420, TRUE, GST_V4L2_RAW},
+ {V4L2_PIX_FMT_YUV420M, TRUE, GST_V4L2_RAW},
+ {V4L2_PIX_FMT_HI240, TRUE, GST_V4L2_RAW},
+ {V4L2_PIX_FMT_HM12, TRUE, GST_V4L2_RAW},
+ {V4L2_PIX_FMT_M420, TRUE, GST_V4L2_RAW},
+
+ /* two planes -- one Y, one Cr + Cb interleaved */
+ {V4L2_PIX_FMT_NV12, TRUE, GST_V4L2_RAW},
+ {V4L2_PIX_FMT_NV12M, TRUE, GST_V4L2_RAW},
+ {V4L2_PIX_FMT_NV12MT, TRUE, GST_V4L2_RAW},
+ {V4L2_PIX_FMT_NV12MT_16X16, TRUE, GST_V4L2_RAW},
+ {V4L2_PIX_FMT_NV21, TRUE, GST_V4L2_RAW},
+ {V4L2_PIX_FMT_NV21M, TRUE, GST_V4L2_RAW},
+ {V4L2_PIX_FMT_NV16, TRUE, GST_V4L2_RAW},
+ {V4L2_PIX_FMT_NV16M, TRUE, GST_V4L2_RAW},
+ {V4L2_PIX_FMT_NV61, TRUE, GST_V4L2_RAW},
+ {V4L2_PIX_FMT_NV61M, TRUE, GST_V4L2_RAW},
+ {V4L2_PIX_FMT_NV24, TRUE, GST_V4L2_RAW},
+ {V4L2_PIX_FMT_NV42, TRUE, GST_V4L2_RAW},
+
+ /* Bayer formats - see http://www.siliconimaging.com/RGB%20Bayer.htm */
+ {V4L2_PIX_FMT_SBGGR8, TRUE, GST_V4L2_RAW},
+ {V4L2_PIX_FMT_SGBRG8, TRUE, GST_V4L2_RAW},
+ {V4L2_PIX_FMT_SGRBG8, TRUE, GST_V4L2_RAW},
+ {V4L2_PIX_FMT_SRGGB8, TRUE, GST_V4L2_RAW},
+
+ /* compressed formats */
+ {V4L2_PIX_FMT_MJPEG, FALSE, GST_V4L2_CODEC},
+ {V4L2_PIX_FMT_JPEG, FALSE, GST_V4L2_CODEC},
+ {V4L2_PIX_FMT_PJPG, FALSE, GST_V4L2_CODEC},
+ {V4L2_PIX_FMT_DV, FALSE, GST_V4L2_TRANSPORT},
+ {V4L2_PIX_FMT_MPEG, FALSE, GST_V4L2_TRANSPORT},
+ {V4L2_PIX_FMT_FWHT, FALSE, GST_V4L2_CODEC},
+ {V4L2_PIX_FMT_H264, FALSE, GST_V4L2_CODEC},
+ {V4L2_PIX_FMT_H264_NO_SC, FALSE, GST_V4L2_CODEC},
+ {V4L2_PIX_FMT_H264_MVC, FALSE, GST_V4L2_CODEC},
+ {V4L2_PIX_FMT_HEVC, FALSE, GST_V4L2_CODEC},
+ {V4L2_PIX_FMT_H263, FALSE, GST_V4L2_CODEC},
+ {V4L2_PIX_FMT_MPEG1, FALSE, GST_V4L2_CODEC},
+ {V4L2_PIX_FMT_MPEG2, FALSE, GST_V4L2_CODEC},
+ {V4L2_PIX_FMT_MPEG4, FALSE, GST_V4L2_CODEC},
+ {V4L2_PIX_FMT_XVID, FALSE, GST_V4L2_CODEC},
+ {V4L2_PIX_FMT_VC1_ANNEX_G, FALSE, GST_V4L2_CODEC},
+ {V4L2_PIX_FMT_VC1_ANNEX_L, FALSE, GST_V4L2_CODEC},
+ {V4L2_PIX_FMT_VP8, FALSE, GST_V4L2_CODEC | GST_V4L2_NO_PARSE},
+ {V4L2_PIX_FMT_VP9, FALSE, GST_V4L2_CODEC | GST_V4L2_NO_PARSE},
+
+ /* Vendor-specific formats */
+ {V4L2_PIX_FMT_WNVA, TRUE, GST_V4L2_CODEC},
+ {V4L2_PIX_FMT_SN9C10X, TRUE, GST_V4L2_CODEC},
+ {V4L2_PIX_FMT_PWC1, TRUE, GST_V4L2_CODEC},
+ {V4L2_PIX_FMT_PWC2, TRUE, GST_V4L2_CODEC},
+ };
+
+ #define GST_V4L2_FORMAT_COUNT (G_N_ELEMENTS (gst_v4l2_formats))
+
+ static GSList *gst_v4l2_object_get_format_list (GstV4l2Object * v4l2object);
+
+
+ #define GST_TYPE_V4L2_DEVICE_FLAGS (gst_v4l2_device_get_type ())
+ static GType
+ gst_v4l2_device_get_type (void)
+ {
+ static GType v4l2_device_type = 0;
+
+ if (v4l2_device_type == 0) {
+ static const GFlagsValue values[] = {
+ {V4L2_CAP_VIDEO_CAPTURE, "Device supports video capture", "capture"},
+ {V4L2_CAP_VIDEO_OUTPUT, "Device supports video playback", "output"},
+ {V4L2_CAP_VIDEO_OVERLAY, "Device supports video overlay", "overlay"},
+
+ {V4L2_CAP_VBI_CAPTURE, "Device supports the VBI capture", "vbi-capture"},
+ {V4L2_CAP_VBI_OUTPUT, "Device supports the VBI output", "vbi-output"},
+
+ {V4L2_CAP_TUNER, "Device has a tuner or modulator", "tuner"},
+ {V4L2_CAP_AUDIO, "Device has audio inputs or outputs", "audio"},
+
+ {0, NULL, NULL}
+ };
+
+ v4l2_device_type =
+ g_flags_register_static ("GstV4l2DeviceTypeFlags", values);
+ }
+
+ return v4l2_device_type;
+ }
+
+ #define GST_TYPE_V4L2_TV_NORM (gst_v4l2_tv_norm_get_type ())
+ static GType
+ gst_v4l2_tv_norm_get_type (void)
+ {
+ static GType v4l2_tv_norm = 0;
+
+ if (!v4l2_tv_norm) {
+ static const GEnumValue tv_norms[] = {
+ {0, "none", "none"},
+
+ {V4L2_STD_NTSC, "NTSC", "NTSC"},
+ {V4L2_STD_NTSC_M, "NTSC-M", "NTSC-M"},
+ {V4L2_STD_NTSC_M_JP, "NTSC-M-JP", "NTSC-M-JP"},
+ {V4L2_STD_NTSC_M_KR, "NTSC-M-KR", "NTSC-M-KR"},
+ {V4L2_STD_NTSC_443, "NTSC-443", "NTSC-443"},
+
+ {V4L2_STD_PAL, "PAL", "PAL"},
+ {V4L2_STD_PAL_BG, "PAL-BG", "PAL-BG"},
+ {V4L2_STD_PAL_B, "PAL-B", "PAL-B"},
+ {V4L2_STD_PAL_B1, "PAL-B1", "PAL-B1"},
+ {V4L2_STD_PAL_G, "PAL-G", "PAL-G"},
+ {V4L2_STD_PAL_H, "PAL-H", "PAL-H"},
+ {V4L2_STD_PAL_I, "PAL-I", "PAL-I"},
+ {V4L2_STD_PAL_DK, "PAL-DK", "PAL-DK"},
+ {V4L2_STD_PAL_D, "PAL-D", "PAL-D"},
+ {V4L2_STD_PAL_D1, "PAL-D1", "PAL-D1"},
+ {V4L2_STD_PAL_K, "PAL-K", "PAL-K"},
+ {V4L2_STD_PAL_M, "PAL-M", "PAL-M"},
+ {V4L2_STD_PAL_N, "PAL-N", "PAL-N"},
+ {V4L2_STD_PAL_Nc, "PAL-Nc", "PAL-Nc"},
+ {V4L2_STD_PAL_60, "PAL-60", "PAL-60"},
+
+ {V4L2_STD_SECAM, "SECAM", "SECAM"},
+ {V4L2_STD_SECAM_B, "SECAM-B", "SECAM-B"},
+ {V4L2_STD_SECAM_G, "SECAM-G", "SECAM-G"},
+ {V4L2_STD_SECAM_H, "SECAM-H", "SECAM-H"},
+ {V4L2_STD_SECAM_DK, "SECAM-DK", "SECAM-DK"},
+ {V4L2_STD_SECAM_D, "SECAM-D", "SECAM-D"},
+ {V4L2_STD_SECAM_K, "SECAM-K", "SECAM-K"},
+ {V4L2_STD_SECAM_K1, "SECAM-K1", "SECAM-K1"},
+ {V4L2_STD_SECAM_L, "SECAM-L", "SECAM-L"},
+ {V4L2_STD_SECAM_LC, "SECAM-Lc", "SECAM-Lc"},
+
+ {0, NULL, NULL}
+ };
+
+ v4l2_tv_norm = g_enum_register_static ("V4L2_TV_norms", tv_norms);
+ }
+
+ return v4l2_tv_norm;
+ }
+
+ GType
+ gst_v4l2_io_mode_get_type (void)
+ {
+ static GType v4l2_io_mode = 0;
+
+ if (!v4l2_io_mode) {
+ static const GEnumValue io_modes[] = {
+ {GST_V4L2_IO_AUTO, "GST_V4L2_IO_AUTO", "auto"},
+ {GST_V4L2_IO_RW, "GST_V4L2_IO_RW", "rw"},
+ {GST_V4L2_IO_MMAP, "GST_V4L2_IO_MMAP", "mmap"},
+ {GST_V4L2_IO_USERPTR, "GST_V4L2_IO_USERPTR", "userptr"},
+ {GST_V4L2_IO_DMABUF, "GST_V4L2_IO_DMABUF", "dmabuf"},
+ {GST_V4L2_IO_DMABUF_IMPORT, "GST_V4L2_IO_DMABUF_IMPORT",
+ "dmabuf-import"},
+
+ {0, NULL, NULL}
+ };
+ v4l2_io_mode = g_enum_register_static ("GstV4l2IOMode", io_modes);
+ }
+ return v4l2_io_mode;
+ }
+
+ void
+ gst_v4l2_object_install_properties_helper (GObjectClass * gobject_class,
+ const char *default_device)
+ {
+ g_object_class_install_property (gobject_class, PROP_DEVICE,
+ g_param_spec_string ("device", "Device", "Device location",
+ default_device, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_DEVICE_NAME,
+ g_param_spec_string ("device-name", "Device name",
+ "Name of the device", DEFAULT_PROP_DEVICE_NAME,
+ G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_DEVICE_FD,
+ g_param_spec_int ("device-fd", "File descriptor",
+ "File descriptor of the device", -1, G_MAXINT, DEFAULT_PROP_DEVICE_FD,
+ G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_FLAGS,
+ g_param_spec_flags ("flags", "Flags", "Device type flags",
+ GST_TYPE_V4L2_DEVICE_FLAGS, DEFAULT_PROP_FLAGS,
+ G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
+
+ /**
+ * GstV4l2Src:brightness:
+ *
+ * Picture brightness, or more precisely, the black level
+ */
+ g_object_class_install_property (gobject_class, PROP_BRIGHTNESS,
+ g_param_spec_int ("brightness", "Brightness",
+ "Picture brightness, or more precisely, the black level", G_MININT,
+ G_MAXINT, 0,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | GST_PARAM_CONTROLLABLE));
+ /**
+ * GstV4l2Src:contrast:
+ *
+ * Picture contrast or luma gain
+ */
+ g_object_class_install_property (gobject_class, PROP_CONTRAST,
+ g_param_spec_int ("contrast", "Contrast",
+ "Picture contrast or luma gain", G_MININT,
+ G_MAXINT, 0,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | GST_PARAM_CONTROLLABLE));
+ /**
+ * GstV4l2Src:saturation:
+ *
+ * Picture color saturation or chroma gain
+ */
+ g_object_class_install_property (gobject_class, PROP_SATURATION,
+ g_param_spec_int ("saturation", "Saturation",
+ "Picture color saturation or chroma gain", G_MININT,
+ G_MAXINT, 0,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | GST_PARAM_CONTROLLABLE));
+ /**
+ * GstV4l2Src:hue:
+ *
+ * Hue or color balance
+ */
+ g_object_class_install_property (gobject_class, PROP_HUE,
+ g_param_spec_int ("hue", "Hue",
+ "Hue or color balance", G_MININT,
+ G_MAXINT, 0,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | GST_PARAM_CONTROLLABLE));
+
+ /**
+ * GstV4l2Src:norm:
+ *
+ * TV norm
+ */
+ g_object_class_install_property (gobject_class, PROP_TV_NORM,
+ g_param_spec_enum ("norm", "TV norm",
+ "video standard",
+ GST_TYPE_V4L2_TV_NORM, DEFAULT_PROP_TV_NORM,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ /**
+ * GstV4l2Src:io-mode:
+ *
+ * IO Mode
+ */
+ g_object_class_install_property (gobject_class, PROP_IO_MODE,
+ g_param_spec_enum ("io-mode", "IO mode",
+ "I/O mode",
+ GST_TYPE_V4L2_IO_MODE, DEFAULT_PROP_IO_MODE,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ /**
+ * GstV4l2Src:extra-controls:
+ *
+ * Additional v4l2 controls for the device. The controls are identified
+ * by the control name (lowercase with '_' for any non-alphanumeric
+ * characters).
+ *
+ * Since: 1.2
+ */
+ g_object_class_install_property (gobject_class, PROP_EXTRA_CONTROLS,
+ g_param_spec_boxed ("extra-controls", "Extra Controls",
+ "Extra v4l2 controls (CIDs) for the device",
+ GST_TYPE_STRUCTURE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ /**
+ * GstV4l2Src:pixel-aspect-ratio:
+ *
+ * The pixel aspect ratio of the device. This overwrites the pixel aspect
+ * ratio queried from the device.
+ *
+ * Since: 1.2
+ */
+ g_object_class_install_property (gobject_class, PROP_PIXEL_ASPECT_RATIO,
+ g_param_spec_string ("pixel-aspect-ratio", "Pixel Aspect Ratio",
+ "Overwrite the pixel aspect ratio of the device", "1/1",
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ /**
+ * GstV4l2Src:force-aspect-ratio:
+ *
+ * When enabled, the pixel aspect ratio queried from the device or set
+ * with the pixel-aspect-ratio property will be enforced.
+ *
+ * Since: 1.2
+ */
+ g_object_class_install_property (gobject_class, PROP_FORCE_ASPECT_RATIO,
+ g_param_spec_boolean ("force-aspect-ratio", "Force aspect ratio",
+ "When enabled, the pixel aspect ratio will be enforced", TRUE,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ gst_type_mark_as_plugin_api (GST_TYPE_V4L2_DEVICE_FLAGS, 0);
+ gst_type_mark_as_plugin_api (GST_TYPE_V4L2_TV_NORM, 0);
+ gst_type_mark_as_plugin_api (GST_TYPE_V4L2_IO_MODE, 0);
+ }
+
+ void
+ gst_v4l2_object_install_m2m_properties_helper (GObjectClass * gobject_class)
+ {
+ g_object_class_install_property (gobject_class, PROP_DEVICE,
+ g_param_spec_string ("device", "Device", "Device location",
+ NULL, G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_DEVICE_NAME,
+ g_param_spec_string ("device-name", "Device name",
+ "Name of the device", DEFAULT_PROP_DEVICE_NAME,
+ G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_DEVICE_FD,
+ g_param_spec_int ("device-fd", "File descriptor",
+ "File descriptor of the device", -1, G_MAXINT, DEFAULT_PROP_DEVICE_FD,
+ G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_OUTPUT_IO_MODE,
+ g_param_spec_enum ("output-io-mode", "Output IO mode",
+ "Output side I/O mode (matches sink pad)",
+ GST_TYPE_V4L2_IO_MODE, DEFAULT_PROP_IO_MODE,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_CAPTURE_IO_MODE,
+ g_param_spec_enum ("capture-io-mode", "Capture IO mode",
+ "Capture I/O mode (matches src pad)",
+ GST_TYPE_V4L2_IO_MODE, DEFAULT_PROP_IO_MODE,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_EXTRA_CONTROLS,
+ g_param_spec_boxed ("extra-controls", "Extra Controls",
+ "Extra v4l2 controls (CIDs) for the device",
+ GST_TYPE_STRUCTURE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ }
+
+ /* Support for 32bit off_t, this wrapper is casting off_t to gint64 */
+ #ifdef HAVE_LIBV4L2
+ #if SIZEOF_OFF_T < 8
+
+ static gpointer
+ v4l2_mmap_wrapper (gpointer start, gsize length, gint prot, gint flags, gint fd,
+ off_t offset)
+ {
+ return v4l2_mmap (start, length, prot, flags, fd, (gint64) offset);
+ }
+
+ #define v4l2_mmap v4l2_mmap_wrapper
+
+ #endif /* SIZEOF_OFF_T < 8 */
+ #endif /* HAVE_LIBV4L2 */
+
+ GstV4l2Object *
+ gst_v4l2_object_new (GstElement * element,
+ GstObject * debug_object,
+ enum v4l2_buf_type type,
+ const char *default_device,
+ GstV4l2GetInOutFunction get_in_out_func,
+ GstV4l2SetInOutFunction set_in_out_func,
+ GstV4l2UpdateFpsFunction update_fps_func)
+ {
+ GstV4l2Object *v4l2object;
+
+ /*
+ * some default values
+ */
+ v4l2object = g_new0 (GstV4l2Object, 1);
+
+ v4l2object->type = type;
+ v4l2object->formats = NULL;
+
+ v4l2object->element = element;
+ v4l2object->dbg_obj = debug_object;
+ v4l2object->get_in_out_func = get_in_out_func;
+ v4l2object->set_in_out_func = set_in_out_func;
+ v4l2object->update_fps_func = update_fps_func;
+
+ v4l2object->video_fd = -1;
+ v4l2object->active = FALSE;
+ v4l2object->videodev = g_strdup (default_device);
+
+ v4l2object->norms = NULL;
+ v4l2object->channels = NULL;
+ v4l2object->colors = NULL;
+
+ v4l2object->keep_aspect = TRUE;
+
+ v4l2object->n_v4l2_planes = 0;
+
+ v4l2object->no_initial_format = FALSE;
+
+ /* We now disable libv4l2 by default, but have an env to enable it. */
+ #ifdef HAVE_LIBV4L2
++#ifdef TIZEN_FEATURE_USE_LIBV4L2
++ if (1) {
++#else /* TIZEN_FEATURE_USE_LIBV4L2 */
+ if (g_getenv ("GST_V4L2_USE_LIBV4L2")) {
++#endif /* TIZEN_FEATURE_USE_LIBV4L2 */
+ v4l2object->fd_open = v4l2_fd_open;
+ v4l2object->close = v4l2_close;
+ v4l2object->dup = v4l2_dup;
+ v4l2object->ioctl = v4l2_ioctl;
+ v4l2object->read = v4l2_read;
+ v4l2object->mmap = v4l2_mmap;
+ v4l2object->munmap = v4l2_munmap;
+ } else
+ #endif
+ {
+ v4l2object->fd_open = NULL;
+ v4l2object->close = close;
+ v4l2object->dup = dup;
+ v4l2object->ioctl = ioctl;
+ v4l2object->read = read;
+ v4l2object->mmap = mmap;
+ v4l2object->munmap = munmap;
+ }
+
+ return v4l2object;
+ }
+
+ static gboolean gst_v4l2_object_clear_format_list (GstV4l2Object * v4l2object);
+
+
+ void
+ gst_v4l2_object_destroy (GstV4l2Object * v4l2object)
+ {
+ g_return_if_fail (v4l2object != NULL);
+
+ g_free (v4l2object->videodev);
+ g_free (v4l2object->par);
+ g_free (v4l2object->channel);
+
+ if (v4l2object->formats) {
+ gst_v4l2_object_clear_format_list (v4l2object);
+ }
+
+ if (v4l2object->probed_caps) {
+ gst_caps_unref (v4l2object->probed_caps);
+ }
+
+ if (v4l2object->extra_controls) {
+ gst_structure_free (v4l2object->extra_controls);
+ }
+
+ g_free (v4l2object);
+ }
+
+
+ static gboolean
+ gst_v4l2_object_clear_format_list (GstV4l2Object * v4l2object)
+ {
+ g_slist_foreach (v4l2object->formats, (GFunc) g_free, NULL);
+ g_slist_free (v4l2object->formats);
+ v4l2object->formats = NULL;
+
+ return TRUE;
+ }
+
+ static gint
+ gst_v4l2_object_prop_to_cid (guint prop_id)
+ {
+ gint cid = -1;
+
+ switch (prop_id) {
+ case PROP_BRIGHTNESS:
+ cid = V4L2_CID_BRIGHTNESS;
+ break;
+ case PROP_CONTRAST:
+ cid = V4L2_CID_CONTRAST;
+ break;
+ case PROP_SATURATION:
+ cid = V4L2_CID_SATURATION;
+ break;
+ case PROP_HUE:
+ cid = V4L2_CID_HUE;
+ break;
+ default:
+ GST_WARNING ("unmapped property id: %d", prop_id);
+ }
+ return cid;
+ }
+
+
+ gboolean
+ gst_v4l2_object_set_property_helper (GstV4l2Object * v4l2object,
+ guint prop_id, const GValue * value, GParamSpec * pspec)
+ {
+ switch (prop_id) {
+ case PROP_DEVICE:
+ g_free (v4l2object->videodev);
+ v4l2object->videodev = g_value_dup_string (value);
+ break;
+ case PROP_BRIGHTNESS:
+ case PROP_CONTRAST:
+ case PROP_SATURATION:
+ case PROP_HUE:
+ {
+ gint cid = gst_v4l2_object_prop_to_cid (prop_id);
+
+ if (cid != -1) {
+ if (GST_V4L2_IS_OPEN (v4l2object)) {
+ gst_v4l2_set_attribute (v4l2object, cid, g_value_get_int (value));
+ }
+ }
+ return TRUE;
+ }
+ break;
+ case PROP_TV_NORM:
+ v4l2object->tv_norm = g_value_get_enum (value);
+ break;
+ #if 0
+ case PROP_CHANNEL:
+ if (GST_V4L2_IS_OPEN (v4l2object)) {
+ GstTuner *tuner = GST_TUNER (v4l2object->element);
+ GstTunerChannel *channel = gst_tuner_find_channel_by_name (tuner,
+ (gchar *) g_value_get_string (value));
+
+ if (channel) {
+ /* like gst_tuner_set_channel (tuner, channel)
+ without g_object_notify */
+ gst_v4l2_tuner_set_channel (v4l2object, channel);
+ }
+ } else {
+ g_free (v4l2object->channel);
+ v4l2object->channel = g_value_dup_string (value);
+ }
+ break;
+ case PROP_FREQUENCY:
+ if (GST_V4L2_IS_OPEN (v4l2object)) {
+ GstTuner *tuner = GST_TUNER (v4l2object->element);
+ GstTunerChannel *channel = gst_tuner_get_channel (tuner);
+
+ if (channel &&
+ GST_TUNER_CHANNEL_HAS_FLAG (channel, GST_TUNER_CHANNEL_FREQUENCY)) {
+ /* like
+ gst_tuner_set_frequency (tuner, channel, g_value_get_ulong (value))
+ without g_object_notify */
+ gst_v4l2_tuner_set_frequency (v4l2object, channel,
+ g_value_get_ulong (value));
+ }
+ } else {
+ v4l2object->frequency = g_value_get_ulong (value);
+ }
+ break;
+ #endif
+
+ case PROP_IO_MODE:
+ v4l2object->req_mode = g_value_get_enum (value);
+ break;
+ case PROP_CAPTURE_IO_MODE:
+ g_return_val_if_fail (!V4L2_TYPE_IS_OUTPUT (v4l2object->type), FALSE);
+ v4l2object->req_mode = g_value_get_enum (value);
+ break;
+ case PROP_OUTPUT_IO_MODE:
+ g_return_val_if_fail (V4L2_TYPE_IS_OUTPUT (v4l2object->type), FALSE);
+ v4l2object->req_mode = g_value_get_enum (value);
+ break;
+ case PROP_EXTRA_CONTROLS:{
+ const GstStructure *s = gst_value_get_structure (value);
+
+ if (v4l2object->extra_controls)
+ gst_structure_free (v4l2object->extra_controls);
+
+ v4l2object->extra_controls = s ? gst_structure_copy (s) : NULL;
+ if (GST_V4L2_IS_OPEN (v4l2object))
+ gst_v4l2_set_controls (v4l2object, v4l2object->extra_controls);
+ break;
+ }
+ case PROP_PIXEL_ASPECT_RATIO:
+ if (v4l2object->par) {
+ g_value_unset (v4l2object->par);
+ g_free (v4l2object->par);
+ }
+ v4l2object->par = g_new0 (GValue, 1);
+ g_value_init (v4l2object->par, GST_TYPE_FRACTION);
+ if (!g_value_transform (value, v4l2object->par)) {
+ g_warning ("Could not transform string to aspect ratio");
+ gst_value_set_fraction (v4l2object->par, 1, 1);
+ }
+
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj, "set PAR to %d/%d",
+ gst_value_get_fraction_numerator (v4l2object->par),
+ gst_value_get_fraction_denominator (v4l2object->par));
+ break;
+ case PROP_FORCE_ASPECT_RATIO:
+ v4l2object->keep_aspect = g_value_get_boolean (value);
+ break;
+ default:
+ return FALSE;
+ break;
+ }
+ return TRUE;
+ }
+
+
+ gboolean
+ gst_v4l2_object_get_property_helper (GstV4l2Object * v4l2object,
+ guint prop_id, GValue * value, GParamSpec * pspec)
+ {
+ switch (prop_id) {
+ case PROP_DEVICE:
+ g_value_set_string (value, v4l2object->videodev);
+ break;
+ case PROP_DEVICE_NAME:
+ {
+ const guchar *name = NULL;
+
+ if (GST_V4L2_IS_OPEN (v4l2object))
+ name = v4l2object->vcap.card;
+
+ g_value_set_string (value, (gchar *) name);
+ break;
+ }
+ case PROP_DEVICE_FD:
+ {
+ if (GST_V4L2_IS_OPEN (v4l2object))
+ g_value_set_int (value, v4l2object->video_fd);
+ else
+ g_value_set_int (value, DEFAULT_PROP_DEVICE_FD);
+ break;
+ }
+ case PROP_FLAGS:
+ {
+ guint flags = 0;
+
+ if (GST_V4L2_IS_OPEN (v4l2object)) {
+ flags |= v4l2object->device_caps &
+ (V4L2_CAP_VIDEO_CAPTURE |
+ V4L2_CAP_VIDEO_OUTPUT |
+ V4L2_CAP_VIDEO_OVERLAY |
+ V4L2_CAP_VBI_CAPTURE |
+ V4L2_CAP_VBI_OUTPUT | V4L2_CAP_TUNER | V4L2_CAP_AUDIO);
+
+ if (v4l2object->device_caps & V4L2_CAP_VIDEO_CAPTURE_MPLANE)
+ flags |= V4L2_CAP_VIDEO_CAPTURE;
+
+ if (v4l2object->device_caps & V4L2_CAP_VIDEO_OUTPUT_MPLANE)
+ flags |= V4L2_CAP_VIDEO_OUTPUT;
+ }
+ g_value_set_flags (value, flags);
+ break;
+ }
+ case PROP_BRIGHTNESS:
+ case PROP_CONTRAST:
+ case PROP_SATURATION:
+ case PROP_HUE:
+ {
+ gint cid = gst_v4l2_object_prop_to_cid (prop_id);
+
+ if (cid != -1) {
+ if (GST_V4L2_IS_OPEN (v4l2object)) {
+ gint v;
+ if (gst_v4l2_get_attribute (v4l2object, cid, &v)) {
+ g_value_set_int (value, v);
+ }
+ }
+ }
+ return TRUE;
+ }
+ break;
+ case PROP_TV_NORM:
+ g_value_set_enum (value, v4l2object->tv_norm);
+ break;
+ case PROP_IO_MODE:
+ g_value_set_enum (value, v4l2object->req_mode);
+ break;
+ case PROP_CAPTURE_IO_MODE:
+ g_return_val_if_fail (!V4L2_TYPE_IS_OUTPUT (v4l2object->type), FALSE);
+ g_value_set_enum (value, v4l2object->req_mode);
+ break;
+ case PROP_OUTPUT_IO_MODE:
+ g_return_val_if_fail (V4L2_TYPE_IS_OUTPUT (v4l2object->type), FALSE);
+ g_value_set_enum (value, v4l2object->req_mode);
+ break;
+ case PROP_EXTRA_CONTROLS:
+ gst_value_set_structure (value, v4l2object->extra_controls);
+ break;
+ case PROP_PIXEL_ASPECT_RATIO:
+ if (v4l2object->par)
+ g_value_transform (v4l2object->par, value);
+ break;
+ case PROP_FORCE_ASPECT_RATIO:
+ g_value_set_boolean (value, v4l2object->keep_aspect);
+ break;
+ default:
+ return FALSE;
+ break;
+ }
+ return TRUE;
+ }
+
+ static void
+ gst_v4l2_get_driver_min_buffers (GstV4l2Object * v4l2object)
+ {
+ struct v4l2_control control = { 0, };
+
+ g_return_if_fail (GST_V4L2_IS_OPEN (v4l2object));
+
+ if (V4L2_TYPE_IS_OUTPUT (v4l2object->type))
+ control.id = V4L2_CID_MIN_BUFFERS_FOR_OUTPUT;
+ else
+ control.id = V4L2_CID_MIN_BUFFERS_FOR_CAPTURE;
+
+ if (v4l2object->ioctl (v4l2object->video_fd, VIDIOC_G_CTRL, &control) == 0) {
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj,
+ "driver requires a minimum of %d buffers", control.value);
++#ifdef TIZEN_FEATURE_V4L2_TBM_SUPPORT
++#define DEFAULT_DECODER_OUTPUT_BUFFER_COUNT 5
++ if (v4l2object->tbm_output &&
++ !V4L2_TYPE_IS_OUTPUT (v4l2object->type) && control.value == 1) {
++ v4l2object->min_buffers = DEFAULT_DECODER_OUTPUT_BUFFER_COUNT;
++ GST_WARNING_OBJECT (v4l2object->dbg_obj, "but SET MIN BUFFER COUNT[%d] and it will be [%d] later",
++ v4l2object->min_buffers, v4l2object->min_buffers + 1);
++ } else {
++ v4l2object->min_buffers = control.value;
++ }
++#else /* TIZEN_FEATURE_V4L2_TBM_SUPPORT */
+ v4l2object->min_buffers = control.value;
++#endif /* TIZEN_FEATURE_V4L2_TBM_SUPPORT */
+ } else {
+ v4l2object->min_buffers = 0;
+ }
+ }
+
+ static void
+ gst_v4l2_set_defaults (GstV4l2Object * v4l2object)
+ {
+ GstTunerNorm *norm = NULL;
+ GstTunerChannel *channel = NULL;
+ GstTuner *tuner;
+
+ if (!GST_IS_TUNER (v4l2object->element))
+ return;
+
+ tuner = GST_TUNER (v4l2object->element);
+
+ if (v4l2object->tv_norm)
+ norm = gst_v4l2_tuner_get_norm_by_std_id (v4l2object, v4l2object->tv_norm);
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj, "tv_norm=0x%" G_GINT64_MODIFIER "x, "
+ "norm=%p", (guint64) v4l2object->tv_norm, norm);
+ if (norm) {
+ gst_tuner_set_norm (tuner, norm);
+ } else {
+ norm =
+ GST_TUNER_NORM (gst_tuner_get_norm (GST_TUNER (v4l2object->element)));
+ if (norm) {
+ v4l2object->tv_norm =
+ gst_v4l2_tuner_get_std_id_by_norm (v4l2object, norm);
+ gst_tuner_norm_changed (tuner, norm);
+ }
+ }
+
+ if (v4l2object->channel)
+ channel = gst_tuner_find_channel_by_name (tuner, v4l2object->channel);
+ if (channel) {
+ gst_tuner_set_channel (tuner, channel);
+ } else {
+ channel =
+ GST_TUNER_CHANNEL (gst_tuner_get_channel (GST_TUNER
+ (v4l2object->element)));
+ if (channel) {
+ g_free (v4l2object->channel);
+ v4l2object->channel = g_strdup (channel->label);
+ gst_tuner_channel_changed (tuner, channel);
+ }
+ }
+
+ if (channel
+ && GST_TUNER_CHANNEL_HAS_FLAG (channel, GST_TUNER_CHANNEL_FREQUENCY)) {
+ if (v4l2object->frequency != 0) {
+ gst_tuner_set_frequency (tuner, channel, v4l2object->frequency);
+ } else {
+ v4l2object->frequency = gst_tuner_get_frequency (tuner, channel);
+ if (v4l2object->frequency == 0) {
+ /* guess */
+ gst_tuner_set_frequency (tuner, channel, 1000);
+ } else {
+ }
+ }
+ }
+ }
+
+ gboolean
+ gst_v4l2_object_open (GstV4l2Object * v4l2object, GstV4l2Error * error)
+ {
+ if (gst_v4l2_open (v4l2object, error))
+ gst_v4l2_set_defaults (v4l2object);
+ else
+ return FALSE;
+
+ return TRUE;
+ }
+
+ gboolean
+ gst_v4l2_object_open_shared (GstV4l2Object * v4l2object, GstV4l2Object * other)
+ {
+ gboolean ret;
+
+ ret = gst_v4l2_dup (v4l2object, other);
+
+ return ret;
+ }
+
+ gboolean
+ gst_v4l2_object_close (GstV4l2Object * v4l2object)
+ {
+ if (!gst_v4l2_close (v4l2object))
+ return FALSE;
+
+ gst_caps_replace (&v4l2object->probed_caps, NULL);
+
+ /* reset our copy of the device caps */
+ v4l2object->device_caps = 0;
+
+ if (v4l2object->formats) {
+ gst_v4l2_object_clear_format_list (v4l2object);
+ }
+
+ if (v4l2object->par) {
+ g_value_unset (v4l2object->par);
+ g_free (v4l2object->par);
+ v4l2object->par = NULL;
+ }
+
+ if (v4l2object->channel) {
+ g_free (v4l2object->channel);
+ v4l2object->channel = NULL;
+ }
+
+ return TRUE;
+ }
+
+ static struct v4l2_fmtdesc *
+ gst_v4l2_object_get_format_from_fourcc (GstV4l2Object * v4l2object,
+ guint32 fourcc)
+ {
+ struct v4l2_fmtdesc *fmt;
+ GSList *walk;
+
+ if (fourcc == 0)
+ return NULL;
+
+ walk = gst_v4l2_object_get_format_list (v4l2object);
+ while (walk) {
+ fmt = (struct v4l2_fmtdesc *) walk->data;
+ if (fmt->pixelformat == fourcc)
+ return fmt;
+ /* special case for jpeg */
+ if (fmt->pixelformat == V4L2_PIX_FMT_MJPEG ||
+ fmt->pixelformat == V4L2_PIX_FMT_JPEG ||
+ fmt->pixelformat == V4L2_PIX_FMT_PJPG) {
+ if (fourcc == V4L2_PIX_FMT_JPEG || fourcc == V4L2_PIX_FMT_MJPEG ||
+ fourcc == V4L2_PIX_FMT_PJPG) {
+ return fmt;
+ }
+ }
+ walk = g_slist_next (walk);
+ }
+
+ return NULL;
+ }
+
+
+
+ /* complete made up ranking, the values themselves are meaningless */
+ /* These ranks MUST be X such that X<<15 fits on a signed int - see
+ the comment at the end of gst_v4l2_object_format_get_rank. */
+ #define YUV_BASE_RANK 1000
+ #define JPEG_BASE_RANK 500
+ #define DV_BASE_RANK 200
+ #define RGB_BASE_RANK 100
+ #define YUV_ODD_BASE_RANK 50
+ #define RGB_ODD_BASE_RANK 25
+ #define BAYER_BASE_RANK 15
+ #define S910_BASE_RANK 10
+ #define GREY_BASE_RANK 5
+ #define PWC_BASE_RANK 1
+
+ static gint
+ gst_v4l2_object_format_get_rank (const struct v4l2_fmtdesc *fmt)
+ {
+ guint32 fourcc = fmt->pixelformat;
+ gboolean emulated = ((fmt->flags & V4L2_FMT_FLAG_EMULATED) != 0);
+ gint rank = 0;
+
+ switch (fourcc) {
+ case V4L2_PIX_FMT_MJPEG:
+ case V4L2_PIX_FMT_PJPG:
+ rank = JPEG_BASE_RANK;
+ break;
+ case V4L2_PIX_FMT_JPEG:
+ rank = JPEG_BASE_RANK + 1;
+ break;
+ case V4L2_PIX_FMT_MPEG: /* MPEG */
+ rank = JPEG_BASE_RANK + 2;
+ break;
+
+ case V4L2_PIX_FMT_RGB332:
+ case V4L2_PIX_FMT_ARGB555:
+ case V4L2_PIX_FMT_XRGB555:
+ case V4L2_PIX_FMT_RGB555:
+ case V4L2_PIX_FMT_ARGB555X:
+ case V4L2_PIX_FMT_XRGB555X:
+ case V4L2_PIX_FMT_RGB555X:
+ case V4L2_PIX_FMT_BGR666:
+ case V4L2_PIX_FMT_RGB565:
+ case V4L2_PIX_FMT_RGB565X:
+ case V4L2_PIX_FMT_RGB444:
+ case V4L2_PIX_FMT_Y4:
+ case V4L2_PIX_FMT_Y6:
+ case V4L2_PIX_FMT_Y10:
+ case V4L2_PIX_FMT_Y12:
+ case V4L2_PIX_FMT_Y10BPACK:
+ case V4L2_PIX_FMT_YUV555:
+ case V4L2_PIX_FMT_YUV565:
+ case V4L2_PIX_FMT_YUV32:
+ case V4L2_PIX_FMT_NV12MT_16X16:
+ case V4L2_PIX_FMT_NV42:
+ case V4L2_PIX_FMT_H264_MVC:
+ rank = RGB_ODD_BASE_RANK;
+ break;
+
+ case V4L2_PIX_FMT_RGB24:
+ case V4L2_PIX_FMT_BGR24:
+ rank = RGB_BASE_RANK - 1;
+ break;
+
+ case V4L2_PIX_FMT_RGB32:
+ case V4L2_PIX_FMT_BGR32:
+ case V4L2_PIX_FMT_ABGR32:
+ case V4L2_PIX_FMT_XBGR32:
+ case V4L2_PIX_FMT_BGRA32:
+ case V4L2_PIX_FMT_BGRX32:
+ case V4L2_PIX_FMT_RGBA32:
+ case V4L2_PIX_FMT_RGBX32:
+ case V4L2_PIX_FMT_ARGB32:
+ case V4L2_PIX_FMT_XRGB32:
+ rank = RGB_BASE_RANK;
+ break;
+
+ case V4L2_PIX_FMT_GREY: /* 8 Greyscale */
++ case V4L2_PIX_FMT_INVZ:
+ rank = GREY_BASE_RANK;
+ break;
+
+ case V4L2_PIX_FMT_NV12MT: /* NV12 64x32 tile */
+ case V4L2_PIX_FMT_NV21: /* 12 Y/CrCb 4:2:0 */
+ case V4L2_PIX_FMT_NV21M: /* Same as NV21 */
+ case V4L2_PIX_FMT_YYUV: /* 16 YUV 4:2:2 */
+ case V4L2_PIX_FMT_HI240: /* 8 8-bit color */
+ case V4L2_PIX_FMT_NV16: /* 16 Y/CbCr 4:2:2 */
+ case V4L2_PIX_FMT_NV16M: /* Same as NV16 */
+ case V4L2_PIX_FMT_NV61: /* 16 Y/CrCb 4:2:2 */
+ case V4L2_PIX_FMT_NV61M: /* Same as NV61 */
+ case V4L2_PIX_FMT_NV24: /* 24 Y/CrCb 4:4:4 */
+ rank = YUV_ODD_BASE_RANK;
+ break;
+
+ case V4L2_PIX_FMT_YVU410: /* YVU9, 9 bits per pixel */
+ rank = YUV_BASE_RANK + 3;
+ break;
+ case V4L2_PIX_FMT_YUV410: /* YUV9, 9 bits per pixel */
+ rank = YUV_BASE_RANK + 2;
+ break;
+ case V4L2_PIX_FMT_YUV420: /* I420, 12 bits per pixel */
+ case V4L2_PIX_FMT_YUV420M:
+ rank = YUV_BASE_RANK + 7;
+ break;
+ case V4L2_PIX_FMT_NV12: /* Y/CbCr 4:2:0, 12 bits per pixel */
+ case V4L2_PIX_FMT_NV12M: /* Same as NV12 */
+ rank = YUV_BASE_RANK + 8;
+ break;
+ case V4L2_PIX_FMT_YUYV: /* YUY2, 16 bits per pixel */
+ rank = YUV_BASE_RANK + 10;
+ break;
+ case V4L2_PIX_FMT_YVU420: /* YV12, 12 bits per pixel */
+ rank = YUV_BASE_RANK + 6;
+ break;
+ case V4L2_PIX_FMT_UYVY: /* UYVY, 16 bits per pixel */
+ rank = YUV_BASE_RANK + 9;
+ break;
+ case V4L2_PIX_FMT_YUV444:
+ rank = YUV_BASE_RANK + 6;
+ break;
+ case V4L2_PIX_FMT_Y41P: /* Y41P, 12 bits per pixel */
+ rank = YUV_BASE_RANK + 5;
+ break;
+ case V4L2_PIX_FMT_YUV411P: /* Y41B, 12 bits per pixel */
+ rank = YUV_BASE_RANK + 4;
+ break;
+ case V4L2_PIX_FMT_YUV422P: /* Y42B, 16 bits per pixel */
+ rank = YUV_BASE_RANK + 8;
+ break;
+
+ case V4L2_PIX_FMT_DV:
+ rank = DV_BASE_RANK;
+ break;
+
+ case V4L2_PIX_FMT_WNVA: /* Winnov hw compress */
+ rank = 0;
+ break;
+
+ case V4L2_PIX_FMT_SBGGR8:
+ case V4L2_PIX_FMT_SGBRG8:
+ case V4L2_PIX_FMT_SGRBG8:
+ case V4L2_PIX_FMT_SRGGB8:
+ rank = BAYER_BASE_RANK;
+ break;
+
+ case V4L2_PIX_FMT_SN9C10X:
+ rank = S910_BASE_RANK;
+ break;
+
+ case V4L2_PIX_FMT_PWC1:
+ rank = PWC_BASE_RANK;
+ break;
+ case V4L2_PIX_FMT_PWC2:
+ rank = PWC_BASE_RANK;
+ break;
+
+ default:
+ rank = 0;
+ break;
+ }
+
+ /* All ranks are below 1<<15 so a shift by 15
+ * will a) make all non-emulated formats larger
+ * than emulated and b) will not overflow
+ */
+ if (!emulated)
+ rank <<= 15;
+
+ return rank;
+ }
+
+
+
+ static gint
+ format_cmp_func (gconstpointer a, gconstpointer b)
+ {
+ const struct v4l2_fmtdesc *fa = a;
+ const struct v4l2_fmtdesc *fb = b;
+
+ if (fa->pixelformat == fb->pixelformat)
+ return 0;
+
+ return gst_v4l2_object_format_get_rank (fb) -
+ gst_v4l2_object_format_get_rank (fa);
+ }
+
+ /******************************************************
+ * gst_v4l2_object_fill_format_list():
+ * create list of supported capture formats
+ * return value: TRUE on success, FALSE on error
+ ******************************************************/
+ static gboolean
+ gst_v4l2_object_fill_format_list (GstV4l2Object * v4l2object,
+ enum v4l2_buf_type type)
+ {
+ gint n;
+ struct v4l2_fmtdesc *format;
+
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj, "getting src format enumerations");
+
+ /* format enumeration */
+ for (n = 0;; n++) {
+ format = g_new0 (struct v4l2_fmtdesc, 1);
+
+ format->index = n;
+ format->type = type;
+
+ if (v4l2object->ioctl (v4l2object->video_fd, VIDIOC_ENUM_FMT, format) < 0) {
+ if (errno == EINVAL) {
+ g_free (format);
+ break; /* end of enumeration */
+ } else {
+ goto failed;
+ }
+ }
+
+ GST_LOG_OBJECT (v4l2object->dbg_obj, "index: %u", format->index);
+ GST_LOG_OBJECT (v4l2object->dbg_obj, "type: %d", format->type);
+ GST_LOG_OBJECT (v4l2object->dbg_obj, "flags: %08x", format->flags);
+ GST_LOG_OBJECT (v4l2object->dbg_obj, "description: '%s'",
+ format->description);
+ GST_LOG_OBJECT (v4l2object->dbg_obj, "pixelformat: %" GST_FOURCC_FORMAT,
+ GST_FOURCC_ARGS (format->pixelformat));
+
+ /* sort formats according to our preference; we do this, because caps
+ * are probed in the order the formats are in the list, and the order of
+ * formats in the final probed caps matters for things like fixation */
+ v4l2object->formats = g_slist_insert_sorted (v4l2object->formats, format,
+ (GCompareFunc) format_cmp_func);
+ }
+
+ #ifndef GST_DISABLE_GST_DEBUG
+ {
+ GSList *l;
+
+ GST_INFO_OBJECT (v4l2object->dbg_obj, "got %d format(s):", n);
+ for (l = v4l2object->formats; l != NULL; l = l->next) {
+ format = l->data;
+
+ GST_INFO_OBJECT (v4l2object->dbg_obj,
+ " %" GST_FOURCC_FORMAT "%s", GST_FOURCC_ARGS (format->pixelformat),
+ ((format->flags & V4L2_FMT_FLAG_EMULATED)) ? " (emulated)" : "");
+ }
+ }
+ #endif
+
+ return TRUE;
+
+ /* ERRORS */
+ failed:
+ {
+ g_free (format);
+
+ if (v4l2object->element)
+ return FALSE;
+
+ GST_ELEMENT_ERROR (v4l2object->element, RESOURCE, SETTINGS,
+ (_("Failed to enumerate possible video formats device '%s' can work "
+ "with"), v4l2object->videodev),
+ ("Failed to get number %d in pixelformat enumeration for %s. (%d - %s)",
+ n, v4l2object->videodev, errno, g_strerror (errno)));
+
+ return FALSE;
+ }
+ }
+
+ /*
+ * Get the list of supported capture formats, a list of
+ * `struct v4l2_fmtdesc`.
+ */
+ static GSList *
+ gst_v4l2_object_get_format_list (GstV4l2Object * v4l2object)
+ {
+ if (!v4l2object->formats) {
+
+ /* check usual way */
+ gst_v4l2_object_fill_format_list (v4l2object, v4l2object->type);
+
+ /* if our driver supports multi-planar
+ * and if formats are still empty then we can workaround driver bug
+ * by also looking up formats as if our device was not supporting
+ * multiplanar */
+ if (!v4l2object->formats) {
+ switch (v4l2object->type) {
+ case V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE:
+ gst_v4l2_object_fill_format_list (v4l2object,
+ V4L2_BUF_TYPE_VIDEO_CAPTURE);
+ break;
+
+ case V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE:
+ gst_v4l2_object_fill_format_list (v4l2object,
+ V4L2_BUF_TYPE_VIDEO_OUTPUT);
+ break;
+
+ default:
+ break;
+ }
+ }
+ }
+ return v4l2object->formats;
+ }
+
+ static GstVideoFormat
+ gst_v4l2_object_v4l2fourcc_to_video_format (guint32 fourcc)
+ {
+ GstVideoFormat format;
+
+ switch (fourcc) {
+ case V4L2_PIX_FMT_GREY: /* 8 Greyscale */
+ format = GST_VIDEO_FORMAT_GRAY8;
+ break;
+ case V4L2_PIX_FMT_Y16:
+ format = GST_VIDEO_FORMAT_GRAY16_LE;
+ break;
+ case V4L2_PIX_FMT_Y16_BE:
+ format = GST_VIDEO_FORMAT_GRAY16_BE;
+ break;
+ case V4L2_PIX_FMT_XRGB555:
+ case V4L2_PIX_FMT_RGB555:
+ format = GST_VIDEO_FORMAT_RGB15;
+ break;
+ case V4L2_PIX_FMT_XRGB555X:
+ case V4L2_PIX_FMT_RGB555X:
+ format = GST_VIDEO_FORMAT_BGR15;
+ break;
+ case V4L2_PIX_FMT_RGB565:
+ format = GST_VIDEO_FORMAT_RGB16;
+ break;
+ case V4L2_PIX_FMT_RGB24:
+ format = GST_VIDEO_FORMAT_RGB;
+ break;
+ case V4L2_PIX_FMT_BGR24:
+ format = GST_VIDEO_FORMAT_BGR;
+ break;
+ case V4L2_PIX_FMT_XRGB32:
+ case V4L2_PIX_FMT_RGB32:
+ format = GST_VIDEO_FORMAT_xRGB;
+ break;
+ case V4L2_PIX_FMT_RGBX32:
+ format = GST_VIDEO_FORMAT_RGBx;
+ break;
+ case V4L2_PIX_FMT_XBGR32:
+ case V4L2_PIX_FMT_BGR32:
+ format = GST_VIDEO_FORMAT_BGRx;
+ break;
+ case V4L2_PIX_FMT_BGRX32:
+ format = GST_VIDEO_FORMAT_xBGR;
+ break;
+ case V4L2_PIX_FMT_ABGR32:
+ format = GST_VIDEO_FORMAT_BGRA;
+ break;
+ case V4L2_PIX_FMT_BGRA32:
+ format = GST_VIDEO_FORMAT_ABGR;
+ break;
+ case V4L2_PIX_FMT_RGBA32:
+ format = GST_VIDEO_FORMAT_RGBA;
+ break;
+ case V4L2_PIX_FMT_ARGB32:
+ format = GST_VIDEO_FORMAT_ARGB;
+ break;
+ case V4L2_PIX_FMT_NV12:
+ case V4L2_PIX_FMT_NV12M:
+ format = GST_VIDEO_FORMAT_NV12;
+ break;
+ case V4L2_PIX_FMT_NV12MT:
+ format = GST_VIDEO_FORMAT_NV12_64Z32;
+ break;
+ case V4L2_PIX_FMT_NV21:
+ case V4L2_PIX_FMT_NV21M:
+ format = GST_VIDEO_FORMAT_NV21;
+ break;
+ case V4L2_PIX_FMT_YVU410:
+ format = GST_VIDEO_FORMAT_YVU9;
+ break;
+ case V4L2_PIX_FMT_YUV410:
+ format = GST_VIDEO_FORMAT_YUV9;
+ break;
+ case V4L2_PIX_FMT_YUV420:
+ case V4L2_PIX_FMT_YUV420M:
+ format = GST_VIDEO_FORMAT_I420;
+ break;
+ case V4L2_PIX_FMT_YUYV:
+ format = GST_VIDEO_FORMAT_YUY2;
+ break;
+ case V4L2_PIX_FMT_YVU420:
+ format = GST_VIDEO_FORMAT_YV12;
+ break;
+ case V4L2_PIX_FMT_UYVY:
+ format = GST_VIDEO_FORMAT_UYVY;
+ break;
+ case V4L2_PIX_FMT_YUV411P:
+ format = GST_VIDEO_FORMAT_Y41B;
+ break;
+ case V4L2_PIX_FMT_YUV422P:
+ format = GST_VIDEO_FORMAT_Y42B;
+ break;
+ case V4L2_PIX_FMT_YVYU:
+ format = GST_VIDEO_FORMAT_YVYU;
+ break;
+ case V4L2_PIX_FMT_NV16:
+ case V4L2_PIX_FMT_NV16M:
+ format = GST_VIDEO_FORMAT_NV16;
+ break;
+ case V4L2_PIX_FMT_NV61:
+ case V4L2_PIX_FMT_NV61M:
+ format = GST_VIDEO_FORMAT_NV61;
+ break;
+ case V4L2_PIX_FMT_NV24:
+ format = GST_VIDEO_FORMAT_NV24;
+ break;
++ case V4L2_PIX_FMT_INVZ:
++ format = GST_VIDEO_FORMAT_INVZ;
++ break;
+ default:
+ format = GST_VIDEO_FORMAT_UNKNOWN;
+ break;
+ }
+
+ return format;
+ }
+
+ static gboolean
+ gst_v4l2_object_v4l2fourcc_is_rgb (guint32 fourcc)
+ {
+ gboolean ret = FALSE;
+
+ switch (fourcc) {
+ case V4L2_PIX_FMT_XRGB555:
+ case V4L2_PIX_FMT_RGB555:
+ case V4L2_PIX_FMT_XRGB555X:
+ case V4L2_PIX_FMT_RGB555X:
+ case V4L2_PIX_FMT_RGB565:
+ case V4L2_PIX_FMT_RGB24:
+ case V4L2_PIX_FMT_BGR24:
+ case V4L2_PIX_FMT_XRGB32:
+ case V4L2_PIX_FMT_RGB32:
+ case V4L2_PIX_FMT_RGBA32:
+ case V4L2_PIX_FMT_RGBX32:
+ case V4L2_PIX_FMT_XBGR32:
+ case V4L2_PIX_FMT_BGR32:
+ case V4L2_PIX_FMT_BGRA32:
+ case V4L2_PIX_FMT_BGRX32:
+ case V4L2_PIX_FMT_ABGR32:
+ case V4L2_PIX_FMT_ARGB32:
+ case V4L2_PIX_FMT_SBGGR8:
+ case V4L2_PIX_FMT_SGBRG8:
+ case V4L2_PIX_FMT_SGRBG8:
+ case V4L2_PIX_FMT_SRGGB8:
+ ret = TRUE;
+ break;
+ default:
+ break;
+ }
+
+ return ret;
+ }
+
+ static GstStructure *
+ gst_v4l2_object_v4l2fourcc_to_bare_struct (guint32 fourcc)
+ {
+ GstStructure *structure = NULL;
+
+ switch (fourcc) {
+ case V4L2_PIX_FMT_MJPEG: /* Motion-JPEG */
+ case V4L2_PIX_FMT_PJPG: /* Progressive-JPEG */
+ case V4L2_PIX_FMT_JPEG: /* JFIF JPEG */
+ structure = gst_structure_new_empty ("image/jpeg");
+ break;
+ case V4L2_PIX_FMT_MPEG1:
+ structure = gst_structure_new ("video/mpeg",
+ "mpegversion", G_TYPE_INT, 1, NULL);
+ break;
+ case V4L2_PIX_FMT_MPEG2:
+ structure = gst_structure_new ("video/mpeg",
+ "mpegversion", G_TYPE_INT, 2, NULL);
+ break;
+ case V4L2_PIX_FMT_MPEG4:
+ case V4L2_PIX_FMT_XVID:
+ structure = gst_structure_new ("video/mpeg",
+ "mpegversion", G_TYPE_INT, 4, "systemstream",
+ G_TYPE_BOOLEAN, FALSE, NULL);
+ break;
+ case V4L2_PIX_FMT_FWHT:
+ structure = gst_structure_new_empty ("video/x-fwht");
+ break;
+ case V4L2_PIX_FMT_H263:
+ structure = gst_structure_new ("video/x-h263",
+ "variant", G_TYPE_STRING, "itu", NULL);
+ break;
+ case V4L2_PIX_FMT_H264: /* H.264 */
+ structure = gst_structure_new ("video/x-h264",
+ "stream-format", G_TYPE_STRING, "byte-stream", "alignment",
+ G_TYPE_STRING, "au", NULL);
+ break;
+ case V4L2_PIX_FMT_H264_NO_SC:
+ structure = gst_structure_new ("video/x-h264",
+ "stream-format", G_TYPE_STRING, "avc", "alignment",
+ G_TYPE_STRING, "au", NULL);
+ break;
+ case V4L2_PIX_FMT_HEVC: /* H.265 */
+ structure = gst_structure_new ("video/x-h265",
+ "stream-format", G_TYPE_STRING, "byte-stream", "alignment",
+ G_TYPE_STRING, "au", NULL);
+ break;
+ case V4L2_PIX_FMT_VC1_ANNEX_G:
+ case V4L2_PIX_FMT_VC1_ANNEX_L:
+ structure = gst_structure_new ("video/x-wmv",
+ "wmvversion", G_TYPE_INT, 3, "format", G_TYPE_STRING, "WVC1", NULL);
+ break;
+ case V4L2_PIX_FMT_VP8:
+ structure = gst_structure_new_empty ("video/x-vp8");
+ break;
+ case V4L2_PIX_FMT_VP9:
+ structure = gst_structure_new_empty ("video/x-vp9");
+ break;
+ case V4L2_PIX_FMT_GREY: /* 8 Greyscale */
+ case V4L2_PIX_FMT_Y16:
+ case V4L2_PIX_FMT_Y16_BE:
+ case V4L2_PIX_FMT_XRGB555:
+ case V4L2_PIX_FMT_RGB555:
+ case V4L2_PIX_FMT_XRGB555X:
+ case V4L2_PIX_FMT_RGB555X:
+ case V4L2_PIX_FMT_RGB565:
+ case V4L2_PIX_FMT_RGB24:
+ case V4L2_PIX_FMT_BGR24:
+ case V4L2_PIX_FMT_RGB32:
+ case V4L2_PIX_FMT_XRGB32:
+ case V4L2_PIX_FMT_ARGB32:
+ case V4L2_PIX_FMT_RGBX32:
+ case V4L2_PIX_FMT_RGBA32:
+ case V4L2_PIX_FMT_BGR32:
+ case V4L2_PIX_FMT_BGRX32:
+ case V4L2_PIX_FMT_BGRA32:
+ case V4L2_PIX_FMT_XBGR32:
+ case V4L2_PIX_FMT_ABGR32:
+ case V4L2_PIX_FMT_NV12: /* 12 Y/CbCr 4:2:0 */
+ case V4L2_PIX_FMT_NV12M:
+ case V4L2_PIX_FMT_NV12MT:
+ case V4L2_PIX_FMT_NV21: /* 12 Y/CrCb 4:2:0 */
+ case V4L2_PIX_FMT_NV21M:
+ case V4L2_PIX_FMT_NV16: /* 16 Y/CbCr 4:2:2 */
+ case V4L2_PIX_FMT_NV16M:
+ case V4L2_PIX_FMT_NV61: /* 16 Y/CrCb 4:2:2 */
+ case V4L2_PIX_FMT_NV61M:
+ case V4L2_PIX_FMT_NV24: /* 24 Y/CrCb 4:4:4 */
+ case V4L2_PIX_FMT_YVU410:
+ case V4L2_PIX_FMT_YUV410:
+ case V4L2_PIX_FMT_YUV420: /* I420/IYUV */
+ case V4L2_PIX_FMT_YUV420M:
+ case V4L2_PIX_FMT_YUYV:
+ case V4L2_PIX_FMT_YVU420:
+ case V4L2_PIX_FMT_UYVY:
+ case V4L2_PIX_FMT_YUV422P:
+ case V4L2_PIX_FMT_YVYU:
++ case V4L2_PIX_FMT_YUV411P:
++ case V4L2_PIX_FMT_INVZ:{
+ GstVideoFormat format;
+ format = gst_v4l2_object_v4l2fourcc_to_video_format (fourcc);
+ if (format != GST_VIDEO_FORMAT_UNKNOWN)
+ structure = gst_structure_new ("video/x-raw",
+ "format", G_TYPE_STRING, gst_video_format_to_string (format), NULL);
+ break;
+ }
+ case V4L2_PIX_FMT_DV:
+ structure =
+ gst_structure_new ("video/x-dv", "systemstream", G_TYPE_BOOLEAN, TRUE,
+ NULL);
+ break;
+ case V4L2_PIX_FMT_MPEG: /* MPEG */
+ structure = gst_structure_new ("video/mpegts",
+ "systemstream", G_TYPE_BOOLEAN, TRUE, NULL);
+ break;
+ case V4L2_PIX_FMT_WNVA: /* Winnov hw compress */
+ break;
+ case V4L2_PIX_FMT_SBGGR8:
+ case V4L2_PIX_FMT_SGBRG8:
+ case V4L2_PIX_FMT_SGRBG8:
+ case V4L2_PIX_FMT_SRGGB8:
+ structure = gst_structure_new ("video/x-bayer", "format", G_TYPE_STRING,
+ fourcc == V4L2_PIX_FMT_SBGGR8 ? "bggr" :
+ fourcc == V4L2_PIX_FMT_SGBRG8 ? "gbrg" :
+ fourcc == V4L2_PIX_FMT_SGRBG8 ? "grbg" :
+ /* fourcc == V4L2_PIX_FMT_SRGGB8 ? */ "rggb", NULL);
+ break;
+ case V4L2_PIX_FMT_SN9C10X:
+ structure = gst_structure_new_empty ("video/x-sonix");
+ break;
+ case V4L2_PIX_FMT_PWC1:
+ structure = gst_structure_new_empty ("video/x-pwc1");
+ break;
+ case V4L2_PIX_FMT_PWC2:
+ structure = gst_structure_new_empty ("video/x-pwc2");
+ break;
+ case V4L2_PIX_FMT_RGB332:
+ case V4L2_PIX_FMT_BGR666:
+ case V4L2_PIX_FMT_ARGB555X:
+ case V4L2_PIX_FMT_RGB565X:
+ case V4L2_PIX_FMT_RGB444:
+ case V4L2_PIX_FMT_YYUV: /* 16 YUV 4:2:2 */
+ case V4L2_PIX_FMT_HI240: /* 8 8-bit color */
+ case V4L2_PIX_FMT_Y4:
+ case V4L2_PIX_FMT_Y6:
+ case V4L2_PIX_FMT_Y10:
+ case V4L2_PIX_FMT_Y12:
+ case V4L2_PIX_FMT_Y10BPACK:
+ case V4L2_PIX_FMT_YUV444:
+ case V4L2_PIX_FMT_YUV555:
+ case V4L2_PIX_FMT_YUV565:
+ case V4L2_PIX_FMT_Y41P:
+ case V4L2_PIX_FMT_YUV32:
+ case V4L2_PIX_FMT_NV12MT_16X16:
+ case V4L2_PIX_FMT_NV42:
+ case V4L2_PIX_FMT_H264_MVC:
+ default:
+ GST_DEBUG ("Unsupported fourcc 0x%08x %" GST_FOURCC_FORMAT,
+ fourcc, GST_FOURCC_ARGS (fourcc));
+ break;
+ }
+
+ return structure;
+ }
+
+ GstStructure *
+ gst_v4l2_object_v4l2fourcc_to_structure (guint32 fourcc)
+ {
+ GstStructure *template;
+ gint i;
+
+ template = gst_v4l2_object_v4l2fourcc_to_bare_struct (fourcc);
+
+ if (template == NULL)
+ goto done;
+
+ for (i = 0; i < GST_V4L2_FORMAT_COUNT; i++) {
+ if (gst_v4l2_formats[i].format != fourcc)
+ continue;
+
+ if (gst_v4l2_formats[i].dimensions) {
+ gst_structure_set (template,
+ "width", GST_TYPE_INT_RANGE, 1, GST_V4L2_MAX_SIZE,
+ "height", GST_TYPE_INT_RANGE, 1, GST_V4L2_MAX_SIZE,
+ "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, G_MAXINT, 1, NULL);
+ }
+ break;
+ }
+
+ done:
+ return template;
+ }
+
+ /* Add an 'alternate' variant of the caps with the feature */
+ static void
+ add_alternate_variant (GstV4l2Object * v4l2object, GstCaps * caps,
+ GstStructure * structure)
+ {
+ GstStructure *alt_s;
+
+ if (v4l2object && v4l2object->never_interlaced)
+ return;
+
+ if (!gst_structure_has_name (structure, "video/x-raw"))
+ return;
+
+ alt_s = gst_structure_copy (structure);
+ gst_structure_set (alt_s, "interlace-mode", G_TYPE_STRING, "alternate", NULL);
+
+ gst_caps_append_structure_full (caps, alt_s,
+ gst_caps_features_new (GST_CAPS_FEATURE_FORMAT_INTERLACED, NULL));
+ }
+
+ static GstCaps *
+ gst_v4l2_object_get_caps_helper (GstV4L2FormatFlags flags)
+ {
+ GstStructure *structure;
+ GstCaps *caps, *caps_interlaced;
+ guint i;
+
+ caps = gst_caps_new_empty ();
+ caps_interlaced = gst_caps_new_empty ();
+ for (i = 0; i < GST_V4L2_FORMAT_COUNT; i++) {
+
+ if ((gst_v4l2_formats[i].flags & flags) == 0)
+ continue;
+
+ structure =
+ gst_v4l2_object_v4l2fourcc_to_bare_struct (gst_v4l2_formats[i].format);
+
+ if (structure) {
+ GstStructure *alt_s = NULL;
+
+ if (gst_v4l2_formats[i].dimensions) {
+ gst_structure_set (structure,
+ "width", GST_TYPE_INT_RANGE, 1, GST_V4L2_MAX_SIZE,
+ "height", GST_TYPE_INT_RANGE, 1, GST_V4L2_MAX_SIZE,
+ "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, G_MAXINT, 1, NULL);
+ }
+
+ switch (gst_v4l2_formats[i].format) {
++#ifdef TIZEN_FEATURE_V4L2_TBM_SUPPORT
++ case V4L2_PIX_FMT_YUV420:
++ alt_s = gst_structure_copy (structure);
++ gst_structure_set (alt_s, "format", G_TYPE_STRING, "S420", NULL);
++ break;
++ case V4L2_PIX_FMT_NV12:
++ alt_s = gst_structure_copy (structure);
++ gst_structure_set (alt_s, "format", G_TYPE_STRING, "SN12", NULL);
++ break;
++#endif /* TIZEN_FEATURE_V4L2_TBM_SUPPORT */
+ case V4L2_PIX_FMT_RGB32:
+ alt_s = gst_structure_copy (structure);
+ gst_structure_set (alt_s, "format", G_TYPE_STRING, "ARGB", NULL);
+ break;
+ case V4L2_PIX_FMT_BGR32:
+ alt_s = gst_structure_copy (structure);
+ gst_structure_set (alt_s, "format", G_TYPE_STRING, "BGRA", NULL);
+ default:
+ break;
+ }
+
+ gst_caps_append_structure (caps, structure);
+
+ if (alt_s) {
+ gst_caps_append_structure (caps, alt_s);
+ add_alternate_variant (NULL, caps_interlaced, alt_s);
+ }
+
+ add_alternate_variant (NULL, caps_interlaced, structure);
+ }
+ }
+
+ caps = gst_caps_simplify (caps);
+ caps_interlaced = gst_caps_simplify (caps_interlaced);
+
+ return gst_caps_merge (caps, caps_interlaced);
+ }
+
+ GstCaps *
+ gst_v4l2_object_get_all_caps (void)
+ {
+ static GstCaps *caps = NULL;
+
+ if (g_once_init_enter (&caps)) {
+ GstCaps *all_caps = gst_v4l2_object_get_caps_helper (GST_V4L2_ALL);
+ GST_MINI_OBJECT_FLAG_SET (all_caps, GST_MINI_OBJECT_FLAG_MAY_BE_LEAKED);
+ g_once_init_leave (&caps, all_caps);
+ }
+
+ return caps;
+ }
+
+ GstCaps *
+ gst_v4l2_object_get_raw_caps (void)
+ {
+ static GstCaps *caps = NULL;
+
+ if (g_once_init_enter (&caps)) {
+ GstCaps *raw_caps = gst_v4l2_object_get_caps_helper (GST_V4L2_RAW);
+ GST_MINI_OBJECT_FLAG_SET (raw_caps, GST_MINI_OBJECT_FLAG_MAY_BE_LEAKED);
+ g_once_init_leave (&caps, raw_caps);
+ }
+
+ return caps;
+ }
+
+ GstCaps *
+ gst_v4l2_object_get_codec_caps (void)
+ {
+ static GstCaps *caps = NULL;
+
+ if (g_once_init_enter (&caps)) {
+ GstCaps *codec_caps = gst_v4l2_object_get_caps_helper (GST_V4L2_CODEC);
+ GST_MINI_OBJECT_FLAG_SET (codec_caps, GST_MINI_OBJECT_FLAG_MAY_BE_LEAKED);
+ g_once_init_leave (&caps, codec_caps);
+ }
+
+ return caps;
+ }
+
+ /* collect data for the given caps
+ * @caps: given input caps
+ * @format: location for the v4l format
+ * @w/@h: location for width and height
+ * @fps_n/@fps_d: location for framerate
+ * @size: location for expected size of the frame or 0 if unknown
+ */
+ static gboolean
+ gst_v4l2_object_get_caps_info (GstV4l2Object * v4l2object, GstCaps * caps,
+ struct v4l2_fmtdesc **format, GstVideoInfo * info)
+ {
+ GstStructure *structure;
+ guint32 fourcc = 0, fourcc_nc = 0;
+ const gchar *mimetype;
+ struct v4l2_fmtdesc *fmt = NULL;
+
+ structure = gst_caps_get_structure (caps, 0);
+
+ mimetype = gst_structure_get_name (structure);
+
+ if (!gst_video_info_from_caps (info, caps))
+ goto invalid_format;
+
+ if (g_str_equal (mimetype, "video/x-raw")) {
+ switch (GST_VIDEO_INFO_FORMAT (info)) {
+ case GST_VIDEO_FORMAT_I420:
++#ifdef TIZEN_FEATURE_V4L2_TBM_SUPPORT
++ case GST_VIDEO_FORMAT_S420:
++#endif /* TIZEN_FEATURE_V4L2_TBM_SUPPORT */
+ fourcc = V4L2_PIX_FMT_YUV420;
+ fourcc_nc = V4L2_PIX_FMT_YUV420M;
+ break;
+ case GST_VIDEO_FORMAT_YUY2:
+ fourcc = V4L2_PIX_FMT_YUYV;
+ break;
+ case GST_VIDEO_FORMAT_UYVY:
+ fourcc = V4L2_PIX_FMT_UYVY;
+ break;
+ case GST_VIDEO_FORMAT_YV12:
+ fourcc = V4L2_PIX_FMT_YVU420;
+ break;
+ case GST_VIDEO_FORMAT_Y41B:
+ fourcc = V4L2_PIX_FMT_YUV411P;
+ break;
+ case GST_VIDEO_FORMAT_Y42B:
+ fourcc = V4L2_PIX_FMT_YUV422P;
+ break;
+ case GST_VIDEO_FORMAT_NV12:
++#ifdef TIZEN_FEATURE_V4L2_TBM_SUPPORT
++ case GST_VIDEO_FORMAT_SN12:
++#endif /* TIZEN_FEATURE_V4L2_TBM_SUPPORT */
+ fourcc = V4L2_PIX_FMT_NV12;
+ fourcc_nc = V4L2_PIX_FMT_NV12M;
+ break;
+ case GST_VIDEO_FORMAT_NV12_64Z32:
+ fourcc_nc = V4L2_PIX_FMT_NV12MT;
+ break;
+ case GST_VIDEO_FORMAT_NV21:
+ fourcc = V4L2_PIX_FMT_NV21;
+ fourcc_nc = V4L2_PIX_FMT_NV21M;
+ break;
+ case GST_VIDEO_FORMAT_NV16:
+ fourcc = V4L2_PIX_FMT_NV16;
+ fourcc_nc = V4L2_PIX_FMT_NV16M;
+ break;
+ case GST_VIDEO_FORMAT_NV61:
+ fourcc = V4L2_PIX_FMT_NV61;
+ fourcc_nc = V4L2_PIX_FMT_NV61M;
+ break;
+ case GST_VIDEO_FORMAT_NV24:
+ fourcc = V4L2_PIX_FMT_NV24;
+ break;
+ case GST_VIDEO_FORMAT_YVYU:
+ fourcc = V4L2_PIX_FMT_YVYU;
+ break;
+ case GST_VIDEO_FORMAT_RGB15:
+ fourcc = V4L2_PIX_FMT_RGB555;
+ fourcc_nc = V4L2_PIX_FMT_XRGB555;
+ break;
+ case GST_VIDEO_FORMAT_RGB16:
+ fourcc = V4L2_PIX_FMT_RGB565;
+ break;
+ case GST_VIDEO_FORMAT_RGB:
+ fourcc = V4L2_PIX_FMT_RGB24;
+ break;
+ case GST_VIDEO_FORMAT_BGR:
+ fourcc = V4L2_PIX_FMT_BGR24;
+ break;
+ case GST_VIDEO_FORMAT_xRGB:
+ fourcc = V4L2_PIX_FMT_RGB32;
+ fourcc_nc = V4L2_PIX_FMT_XRGB32;
+ break;
+ case GST_VIDEO_FORMAT_RGBx:
+ fourcc = V4L2_PIX_FMT_RGBX32;
+ break;
+ case GST_VIDEO_FORMAT_ARGB:
+ fourcc = V4L2_PIX_FMT_RGB32;
+ fourcc_nc = V4L2_PIX_FMT_ARGB32;
+ break;
+ case GST_VIDEO_FORMAT_RGBA:
+ fourcc = V4L2_PIX_FMT_RGBA32;
+ break;
+ case GST_VIDEO_FORMAT_BGRx:
+ fourcc = V4L2_PIX_FMT_BGR32;
+ fourcc_nc = V4L2_PIX_FMT_XBGR32;
+ break;
+ case GST_VIDEO_FORMAT_xBGR:
+ fourcc = V4L2_PIX_FMT_BGRX32;
+ break;
+ case GST_VIDEO_FORMAT_BGRA:
+ fourcc = V4L2_PIX_FMT_BGR32;
+ fourcc_nc = V4L2_PIX_FMT_ABGR32;
+ break;
+ case GST_VIDEO_FORMAT_ABGR:
+ fourcc = V4L2_PIX_FMT_BGRA32;
+ break;
+ case GST_VIDEO_FORMAT_GRAY8:
+ fourcc = V4L2_PIX_FMT_GREY;
+ break;
+ case GST_VIDEO_FORMAT_GRAY16_LE:
+ fourcc = V4L2_PIX_FMT_Y16;
+ break;
+ case GST_VIDEO_FORMAT_GRAY16_BE:
+ fourcc = V4L2_PIX_FMT_Y16_BE;
+ break;
+ case GST_VIDEO_FORMAT_BGR15:
+ fourcc = V4L2_PIX_FMT_RGB555X;
+ fourcc_nc = V4L2_PIX_FMT_XRGB555X;
+ break;
++ case GST_VIDEO_FORMAT_INVZ:
++ fourcc = V4L2_PIX_FMT_INVZ;
++ break;
+ default:
+ break;
+ }
+ } else {
+ if (g_str_equal (mimetype, "video/mpegts")) {
+ fourcc = V4L2_PIX_FMT_MPEG;
+ } else if (g_str_equal (mimetype, "video/x-dv")) {
+ fourcc = V4L2_PIX_FMT_DV;
+ } else if (g_str_equal (mimetype, "image/jpeg")) {
+ fourcc = V4L2_PIX_FMT_JPEG;
+ } else if (g_str_equal (mimetype, "video/mpeg")) {
+ gint version;
+ if (gst_structure_get_int (structure, "mpegversion", &version)) {
+ switch (version) {
+ case 1:
+ fourcc = V4L2_PIX_FMT_MPEG1;
+ break;
+ case 2:
+ fourcc = V4L2_PIX_FMT_MPEG2;
+ break;
+ case 4:
+ fourcc = V4L2_PIX_FMT_MPEG4;
+ fourcc_nc = V4L2_PIX_FMT_XVID;
+ break;
+ default:
+ break;
+ }
+ }
+ } else if (g_str_equal (mimetype, "video/x-fwht")) {
+ fourcc = V4L2_PIX_FMT_FWHT;
+ } else if (g_str_equal (mimetype, "video/x-h263")) {
+ fourcc = V4L2_PIX_FMT_H263;
+ } else if (g_str_equal (mimetype, "video/x-h264")) {
+ const gchar *stream_format =
+ gst_structure_get_string (structure, "stream-format");
+ if (g_str_equal (stream_format, "avc"))
+ fourcc = V4L2_PIX_FMT_H264_NO_SC;
+ else
+ fourcc = V4L2_PIX_FMT_H264;
+ } else if (g_str_equal (mimetype, "video/x-h265")) {
+ fourcc = V4L2_PIX_FMT_HEVC;
+ } else if (g_str_equal (mimetype, "video/x-vp8")) {
+ fourcc = V4L2_PIX_FMT_VP8;
+ } else if (g_str_equal (mimetype, "video/x-vp9")) {
+ fourcc = V4L2_PIX_FMT_VP9;
+ } else if (g_str_equal (mimetype, "video/x-bayer")) {
+ const gchar *format = gst_structure_get_string (structure, "format");
+ if (format) {
+ if (!g_ascii_strcasecmp (format, "bggr"))
+ fourcc = V4L2_PIX_FMT_SBGGR8;
+ else if (!g_ascii_strcasecmp (format, "gbrg"))
+ fourcc = V4L2_PIX_FMT_SGBRG8;
+ else if (!g_ascii_strcasecmp (format, "grbg"))
+ fourcc = V4L2_PIX_FMT_SGRBG8;
+ else if (!g_ascii_strcasecmp (format, "rggb"))
+ fourcc = V4L2_PIX_FMT_SRGGB8;
+ }
+ } else if (g_str_equal (mimetype, "video/x-sonix")) {
+ fourcc = V4L2_PIX_FMT_SN9C10X;
+ } else if (g_str_equal (mimetype, "video/x-pwc1")) {
+ fourcc = V4L2_PIX_FMT_PWC1;
+ } else if (g_str_equal (mimetype, "video/x-pwc2")) {
+ fourcc = V4L2_PIX_FMT_PWC2;
+ }
+ }
+
+
+ /* Prefer the non-contiguous if supported */
+ v4l2object->prefered_non_contiguous = TRUE;
+
+ if (fourcc_nc)
+ fmt = gst_v4l2_object_get_format_from_fourcc (v4l2object, fourcc_nc);
+ else if (fourcc == 0)
+ goto unhandled_format;
+
+ if (fmt == NULL) {
+ fmt = gst_v4l2_object_get_format_from_fourcc (v4l2object, fourcc);
+ v4l2object->prefered_non_contiguous = FALSE;
+ }
+
+ if (fmt == NULL)
+ goto unsupported_format;
+
+ *format = fmt;
+
+ return TRUE;
+
+ /* ERRORS */
+ invalid_format:
+ {
+ GST_DEBUG_OBJECT (v4l2object, "invalid format");
+ return FALSE;
+ }
+ unhandled_format:
+ {
+ GST_DEBUG_OBJECT (v4l2object, "unhandled format");
+ return FALSE;
+ }
+ unsupported_format:
+ {
+ GST_DEBUG_OBJECT (v4l2object, "unsupported format");
+ return FALSE;
+ }
+ }
+
+ static gboolean
+ gst_v4l2_object_get_nearest_size (GstV4l2Object * v4l2object,
+ guint32 pixelformat, gint * width, gint * height);
+
+ static void
+ gst_v4l2_object_add_aspect_ratio (GstV4l2Object * v4l2object, GstStructure * s)
+ {
+ if (v4l2object->keep_aspect && v4l2object->par)
+ gst_structure_set_value (s, "pixel-aspect-ratio", v4l2object->par);
+ }
+
+ /* returns TRUE if the value was changed in place, otherwise FALSE */
+ static gboolean
+ gst_v4l2src_value_simplify (GValue * val)
+ {
+ /* simplify list of one value to one value */
+ if (GST_VALUE_HOLDS_LIST (val) && gst_value_list_get_size (val) == 1) {
+ const GValue *list_val;
+ GValue new_val = G_VALUE_INIT;
+
+ list_val = gst_value_list_get_value (val, 0);
+ g_value_init (&new_val, G_VALUE_TYPE (list_val));
+ g_value_copy (list_val, &new_val);
+ g_value_unset (val);
+ *val = new_val;
+ return TRUE;
+ }
+
+ return FALSE;
+ }
+
+ static gboolean
+ gst_v4l2_object_get_interlace_mode (enum v4l2_field field,
+ GstVideoInterlaceMode * interlace_mode)
+ {
+ switch (field) {
+ case V4L2_FIELD_ANY:
+ GST_ERROR
+ ("Driver bug detected - check driver with v4l2-compliance from http://git.linuxtv.org/v4l-utils.git\n");
+ return FALSE;
+ case V4L2_FIELD_NONE:
+ *interlace_mode = GST_VIDEO_INTERLACE_MODE_PROGRESSIVE;
+ return TRUE;
+ case V4L2_FIELD_INTERLACED:
+ case V4L2_FIELD_INTERLACED_TB:
+ case V4L2_FIELD_INTERLACED_BT:
+ *interlace_mode = GST_VIDEO_INTERLACE_MODE_INTERLEAVED;
+ return TRUE;
+ case V4L2_FIELD_ALTERNATE:
+ *interlace_mode = GST_VIDEO_INTERLACE_MODE_ALTERNATE;
+ return TRUE;
+ default:
+ GST_ERROR ("Unknown enum v4l2_field %d", field);
+ return FALSE;
+ }
+ }
+
+ static gboolean
+ gst_v4l2_object_get_colorspace (GstV4l2Object * v4l2object,
+ struct v4l2_format *fmt, GstVideoColorimetry * cinfo)
+ {
+ gboolean is_rgb =
+ gst_v4l2_object_v4l2fourcc_is_rgb (fmt->fmt.pix.pixelformat);
+ enum v4l2_colorspace colorspace;
+ enum v4l2_quantization range;
+ enum v4l2_ycbcr_encoding matrix;
+ enum v4l2_xfer_func transfer;
+ gboolean ret = TRUE;
+
+ if (V4L2_TYPE_IS_MULTIPLANAR (fmt->type)) {
+ colorspace = fmt->fmt.pix_mp.colorspace;
+ range = fmt->fmt.pix_mp.quantization;
+ matrix = fmt->fmt.pix_mp.ycbcr_enc;
+ transfer = fmt->fmt.pix_mp.xfer_func;
+ } else {
+ colorspace = fmt->fmt.pix.colorspace;
+ range = fmt->fmt.pix.quantization;
+ matrix = fmt->fmt.pix.ycbcr_enc;
+ transfer = fmt->fmt.pix.xfer_func;
+ }
+
+ /* First step, set the defaults for each primaries */
+ switch (colorspace) {
+ case V4L2_COLORSPACE_SMPTE170M:
+ cinfo->range = GST_VIDEO_COLOR_RANGE_16_235;
+ cinfo->matrix = GST_VIDEO_COLOR_MATRIX_BT601;
+ cinfo->transfer = GST_VIDEO_TRANSFER_BT601;
+ cinfo->primaries = GST_VIDEO_COLOR_PRIMARIES_SMPTE170M;
+ break;
+ case V4L2_COLORSPACE_REC709:
+ cinfo->range = GST_VIDEO_COLOR_RANGE_16_235;
+ cinfo->matrix = GST_VIDEO_COLOR_MATRIX_BT709;
+ cinfo->transfer = GST_VIDEO_TRANSFER_BT709;
+ cinfo->primaries = GST_VIDEO_COLOR_PRIMARIES_BT709;
+ break;
+ case V4L2_COLORSPACE_SRGB:
+ case V4L2_COLORSPACE_JPEG:
+ cinfo->range = GST_VIDEO_COLOR_RANGE_0_255;
+ cinfo->matrix = GST_VIDEO_COLOR_MATRIX_BT601;
+ cinfo->transfer = GST_VIDEO_TRANSFER_SRGB;
+ cinfo->primaries = GST_VIDEO_COLOR_PRIMARIES_BT709;
+ break;
+ case V4L2_COLORSPACE_OPRGB:
+ cinfo->range = GST_VIDEO_COLOR_RANGE_16_235;
+ cinfo->matrix = GST_VIDEO_COLOR_MATRIX_BT601;
+ cinfo->transfer = GST_VIDEO_TRANSFER_ADOBERGB;
+ cinfo->primaries = GST_VIDEO_COLOR_PRIMARIES_ADOBERGB;
+ break;
+ case V4L2_COLORSPACE_BT2020:
+ cinfo->range = GST_VIDEO_COLOR_RANGE_16_235;
+ cinfo->matrix = GST_VIDEO_COLOR_MATRIX_BT2020;
+ cinfo->transfer = GST_VIDEO_TRANSFER_BT2020_12;
+ cinfo->primaries = GST_VIDEO_COLOR_PRIMARIES_BT2020;
+ break;
+ case V4L2_COLORSPACE_SMPTE240M:
+ cinfo->range = GST_VIDEO_COLOR_RANGE_16_235;
+ cinfo->matrix = GST_VIDEO_COLOR_MATRIX_SMPTE240M;
+ cinfo->transfer = GST_VIDEO_TRANSFER_SMPTE240M;
+ cinfo->primaries = GST_VIDEO_COLOR_PRIMARIES_SMPTE240M;
+ break;
+ case V4L2_COLORSPACE_470_SYSTEM_M:
+ cinfo->range = GST_VIDEO_COLOR_RANGE_16_235;
+ cinfo->matrix = GST_VIDEO_COLOR_MATRIX_BT601;
+ cinfo->transfer = GST_VIDEO_TRANSFER_BT709;
+ cinfo->primaries = GST_VIDEO_COLOR_PRIMARIES_BT470M;
+ break;
+ case V4L2_COLORSPACE_470_SYSTEM_BG:
+ cinfo->range = GST_VIDEO_COLOR_RANGE_16_235;
+ cinfo->matrix = GST_VIDEO_COLOR_MATRIX_BT601;
+ cinfo->transfer = GST_VIDEO_TRANSFER_BT709;
+ cinfo->primaries = GST_VIDEO_COLOR_PRIMARIES_BT470BG;
+ break;
+ case V4L2_COLORSPACE_RAW:
+ /* Explicitly unknown */
+ cinfo->range = GST_VIDEO_COLOR_RANGE_UNKNOWN;
+ cinfo->matrix = GST_VIDEO_COLOR_MATRIX_UNKNOWN;
+ cinfo->transfer = GST_VIDEO_TRANSFER_UNKNOWN;
+ cinfo->primaries = GST_VIDEO_COLOR_PRIMARIES_UNKNOWN;
+ break;
+ default:
+ GST_DEBUG ("Unknown enum v4l2_colorspace %d", colorspace);
+ ret = FALSE;
+ break;
+ }
+
+ if (!ret)
+ goto done;
+
+ /* Second step, apply any custom variation */
+ switch (range) {
+ case V4L2_QUANTIZATION_FULL_RANGE:
+ cinfo->range = GST_VIDEO_COLOR_RANGE_0_255;
+ break;
+ case V4L2_QUANTIZATION_LIM_RANGE:
+ cinfo->range = GST_VIDEO_COLOR_RANGE_16_235;
+ break;
+ case V4L2_QUANTIZATION_DEFAULT:
+ /* replicated V4L2_MAP_QUANTIZATION_DEFAULT macro behavior */
+ if (is_rgb && colorspace == V4L2_COLORSPACE_BT2020)
+ cinfo->range = GST_VIDEO_COLOR_RANGE_16_235;
+ else if (is_rgb || matrix == V4L2_YCBCR_ENC_XV601
+ || matrix == V4L2_YCBCR_ENC_XV709
+ || colorspace == V4L2_COLORSPACE_JPEG)
+ cinfo->range = GST_VIDEO_COLOR_RANGE_0_255;
+ else
+ cinfo->range = GST_VIDEO_COLOR_RANGE_16_235;
+ break;
+ default:
+ GST_WARNING ("Unknown enum v4l2_quantization value %d", range);
+ cinfo->range = GST_VIDEO_COLOR_RANGE_UNKNOWN;
+ break;
+ }
+
+ switch (matrix) {
+ case V4L2_YCBCR_ENC_XV601:
+ case V4L2_YCBCR_ENC_SYCC:
+ GST_FIXME ("XV601 and SYCC not defined, assuming 601");
+ /* fallthrough */
+ case V4L2_YCBCR_ENC_601:
+ cinfo->matrix = GST_VIDEO_COLOR_MATRIX_BT601;
+ break;
+ case V4L2_YCBCR_ENC_XV709:
+ GST_FIXME ("XV709 not defined, assuming 709");
+ /* fallthrough */
+ case V4L2_YCBCR_ENC_709:
+ cinfo->matrix = GST_VIDEO_COLOR_MATRIX_BT709;
+ break;
+ case V4L2_YCBCR_ENC_BT2020_CONST_LUM:
+ GST_FIXME ("BT2020 with constant luma is not defined, assuming BT2020");
+ /* fallthrough */
+ case V4L2_YCBCR_ENC_BT2020:
+ cinfo->matrix = GST_VIDEO_COLOR_MATRIX_BT2020;
+ break;
+ case V4L2_YCBCR_ENC_SMPTE240M:
+ cinfo->matrix = GST_VIDEO_COLOR_MATRIX_SMPTE240M;
+ break;
+ case V4L2_YCBCR_ENC_DEFAULT:
+ /* nothing, just use defaults for colorspace */
+ break;
+ default:
+ GST_WARNING ("Unknown enum v4l2_ycbcr_encoding value %d", matrix);
+ cinfo->matrix = GST_VIDEO_COLOR_MATRIX_UNKNOWN;
+ break;
+ }
+
+ /* Set identity matrix for R'G'B' formats to avoid creating
+ * confusion. This though is cosmetic as it's now properly ignored by
+ * the video info API and videoconvert. */
+ if (is_rgb)
+ cinfo->matrix = GST_VIDEO_COLOR_MATRIX_RGB;
+
+ switch (transfer) {
+ case V4L2_XFER_FUNC_709:
+ if (colorspace == V4L2_COLORSPACE_BT2020 && fmt->fmt.pix.height >= 2160)
+ cinfo->transfer = GST_VIDEO_TRANSFER_BT2020_12;
+ else if (colorspace == V4L2_COLORSPACE_SMPTE170M)
+ cinfo->transfer = GST_VIDEO_TRANSFER_BT601;
+ else
+ cinfo->transfer = GST_VIDEO_TRANSFER_BT709;
+
+ if (v4l2object->transfer)
+ cinfo->transfer = v4l2object->transfer;
+ break;
+ case V4L2_XFER_FUNC_SRGB:
+ cinfo->transfer = GST_VIDEO_TRANSFER_SRGB;
+ break;
+ case V4L2_XFER_FUNC_OPRGB:
+ cinfo->transfer = GST_VIDEO_TRANSFER_ADOBERGB;
+ break;
+ case V4L2_XFER_FUNC_SMPTE240M:
+ cinfo->transfer = GST_VIDEO_TRANSFER_SMPTE240M;
+ break;
+ case V4L2_XFER_FUNC_NONE:
+ cinfo->transfer = GST_VIDEO_TRANSFER_GAMMA10;
+ break;
+ case V4L2_XFER_FUNC_SMPTE2084:
+ cinfo->transfer = GST_VIDEO_TRANSFER_SMPTE2084;
+ break;
+ case V4L2_XFER_FUNC_DEFAULT:
+ /* nothing, just use defaults for colorspace */
+ break;
+ default:
+ GST_WARNING ("Unknown enum v4l2_xfer_func value %d", transfer);
+ cinfo->transfer = GST_VIDEO_TRANSFER_UNKNOWN;
+ break;
+ }
+
+ done:
+ return ret;
+ }
+
+ static gboolean
+ gst_v4l2_object_get_streamparm (GstV4l2Object * v4l2object, GstVideoInfo * info)
+ {
+ struct v4l2_streamparm streamparm;
+ memset (&streamparm, 0x00, sizeof (struct v4l2_streamparm));
+ streamparm.type = v4l2object->type;
+ if (v4l2object->ioctl (v4l2object->video_fd, VIDIOC_G_PARM, &streamparm) < 0) {
+ GST_WARNING_OBJECT (v4l2object->dbg_obj, "VIDIOC_G_PARM failed");
+ return FALSE;
+ }
+ if ((streamparm.parm.capture.timeperframe.numerator != 0)
+ && (v4l2object->type == V4L2_BUF_TYPE_VIDEO_CAPTURE
+ || v4l2object->type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE)) {
+ GST_VIDEO_INFO_FPS_N (info) =
+ streamparm.parm.capture.timeperframe.denominator;
+ GST_VIDEO_INFO_FPS_D (info) =
+ streamparm.parm.capture.timeperframe.numerator;
+ }
+ return TRUE;
+ }
+
+ static int
+ gst_v4l2_object_try_fmt (GstV4l2Object * v4l2object,
+ struct v4l2_format *try_fmt)
+ {
+ int fd = v4l2object->video_fd;
+ struct v4l2_format fmt;
+ int r;
+
+ memcpy (&fmt, try_fmt, sizeof (fmt));
+ r = v4l2object->ioctl (fd, VIDIOC_TRY_FMT, &fmt);
+
+ if (r < 0 && errno == ENOTTY) {
+ /* The driver might not implement TRY_FMT, in which case we will try
+ S_FMT to probe */
+ if (GST_V4L2_IS_ACTIVE (v4l2object))
+ goto error;
+
+ memcpy (&fmt, try_fmt, sizeof (fmt));
+ r = v4l2object->ioctl (fd, VIDIOC_S_FMT, &fmt);
+ }
+ memcpy (try_fmt, &fmt, sizeof (fmt));
+
+ return r;
+
+ error:
+ memcpy (try_fmt, &fmt, sizeof (fmt));
+ GST_WARNING_OBJECT (v4l2object->dbg_obj,
+ "Unable to try format: %s", g_strerror (errno));
+ return r;
+ }
+
+
+ static void
+ gst_v4l2_object_add_interlace_mode (GstV4l2Object * v4l2object,
+ GstStructure * s, guint32 width, guint32 height, guint32 pixelformat)
+ {
+ struct v4l2_format fmt;
+ GValue interlace_formats = { 0, };
+ enum v4l2_field formats[] = { V4L2_FIELD_NONE,
+ V4L2_FIELD_INTERLACED, V4L2_FIELD_ALTERNATE
+ };
+ gsize i;
+ GstVideoInterlaceMode interlace_mode, prev = -1;
+
+ if (!g_str_equal (gst_structure_get_name (s), "video/x-raw"))
+ return;
+
+ if (v4l2object->never_interlaced) {
+ gst_structure_set (s, "interlace-mode", G_TYPE_STRING, "progressive", NULL);
+ return;
+ }
+
+ g_value_init (&interlace_formats, GST_TYPE_LIST);
+
+ /* Try thrice - once for NONE, once for INTERLACED and once for ALTERNATE. */
+ for (i = 0; i < G_N_ELEMENTS (formats); i++) {
+ memset (&fmt, 0, sizeof (fmt));
+ fmt.type = v4l2object->type;
+ fmt.fmt.pix.width = width;
+ fmt.fmt.pix.height = height;
+ fmt.fmt.pix.pixelformat = pixelformat;
+ fmt.fmt.pix.field = formats[i];
+
+ if (fmt.fmt.pix.field == V4L2_FIELD_ALTERNATE)
+ fmt.fmt.pix.height /= 2;
+
+ /* if skip_try_fmt_probes is set it's up to the caller to filter out the
+ * formats from the formats requested by peer.
+ * For this negotiation to work with 'alternate' we need the caps to contain
+ * the feature so we have an intersection with downstream caps.
+ */
+ if (!v4l2object->skip_try_fmt_probes
+ && gst_v4l2_object_try_fmt (v4l2object, &fmt) != 0)
+ continue;
+
+ if (gst_v4l2_object_get_interlace_mode (fmt.fmt.pix.field, &interlace_mode)
+ && prev != interlace_mode) {
+ GValue interlace_enum = { 0, };
+ const gchar *mode_string;
+ g_value_init (&interlace_enum, G_TYPE_STRING);
+ mode_string = gst_video_interlace_mode_to_string (interlace_mode);
+ g_value_set_string (&interlace_enum, mode_string);
+ gst_value_list_append_and_take_value (&interlace_formats,
+ &interlace_enum);
+ prev = interlace_mode;
+ }
+ }
+
+ if (gst_v4l2src_value_simplify (&interlace_formats)
+ || gst_value_list_get_size (&interlace_formats) > 0)
+ gst_structure_take_value (s, "interlace-mode", &interlace_formats);
+ else
+ GST_WARNING_OBJECT (v4l2object, "Failed to determine interlace mode");
+
+ return;
+ }
+
+ static void
+ gst_v4l2_object_fill_colorimetry_list (GValue * list,
+ GstVideoColorimetry * cinfo)
+ {
+ GValue colorimetry = G_VALUE_INIT;
+ guint size;
+ guint i;
+ gboolean found = FALSE;
+
+ g_value_init (&colorimetry, G_TYPE_STRING);
+ g_value_take_string (&colorimetry, gst_video_colorimetry_to_string (cinfo));
+
+ /* only insert if no duplicate */
+ size = gst_value_list_get_size (list);
+ for (i = 0; i < size; i++) {
+ const GValue *tmp;
+
+ tmp = gst_value_list_get_value (list, i);
+ if (gst_value_compare (&colorimetry, tmp) == GST_VALUE_EQUAL) {
+ found = TRUE;
+ break;
+ }
+ }
+
+ if (!found)
+ gst_value_list_append_and_take_value (list, &colorimetry);
+ else
+ g_value_unset (&colorimetry);
+ }
+
+ static void
+ gst_v4l2_object_add_colorspace (GstV4l2Object * v4l2object, GstStructure * s,
+ guint32 width, guint32 height, guint32 pixelformat)
+ {
+ struct v4l2_format fmt;
+ GValue list = G_VALUE_INIT;
+ GstVideoColorimetry cinfo;
+ enum v4l2_colorspace req_cspace;
+
+ memset (&fmt, 0, sizeof (fmt));
+ fmt.type = v4l2object->type;
+ fmt.fmt.pix.width = width;
+ fmt.fmt.pix.height = height;
+ fmt.fmt.pix.pixelformat = pixelformat;
+
+ g_value_init (&list, GST_TYPE_LIST);
+
+ /* step 1: get device default colorspace and insert it first as
+ * it should be the preferred one */
+ if (gst_v4l2_object_try_fmt (v4l2object, &fmt) == 0) {
+ if (gst_v4l2_object_get_colorspace (v4l2object, &fmt, &cinfo))
+ gst_v4l2_object_fill_colorimetry_list (&list, &cinfo);
+ }
+
+ /* step 2: probe all colorspace other than default
+ * We don't probe all colorspace, range, matrix and transfer combination to
+ * avoid ioctl flooding which could greatly increase initialization time
+ * with low-speed devices (UVC...) */
+ for (req_cspace = V4L2_COLORSPACE_SMPTE170M;
+ req_cspace <= V4L2_COLORSPACE_RAW; req_cspace++) {
+ /* V4L2_COLORSPACE_BT878 is deprecated and shall not be used, so skip */
+ if (req_cspace == V4L2_COLORSPACE_BT878)
+ continue;
+
+ if (V4L2_TYPE_IS_MULTIPLANAR (v4l2object->type))
+ fmt.fmt.pix_mp.colorspace = req_cspace;
+ else
+ fmt.fmt.pix.colorspace = req_cspace;
+
+ if (gst_v4l2_object_try_fmt (v4l2object, &fmt) == 0) {
+ enum v4l2_colorspace colorspace;
+
+ if (V4L2_TYPE_IS_MULTIPLANAR (v4l2object->type))
+ colorspace = fmt.fmt.pix_mp.colorspace;
+ else
+ colorspace = fmt.fmt.pix.colorspace;
+
+ if (colorspace == req_cspace) {
+ if (gst_v4l2_object_get_colorspace (v4l2object, &fmt, &cinfo))
+ gst_v4l2_object_fill_colorimetry_list (&list, &cinfo);
+ }
+ }
+ }
+
+ if (gst_value_list_get_size (&list) > 0)
+ gst_structure_take_value (s, "colorimetry", &list);
+ else
+ g_value_unset (&list);
+
+ return;
+ }
+
+ /* The frame interval enumeration code first appeared in Linux 2.6.19. */
+ static GstStructure *
+ gst_v4l2_object_probe_caps_for_format_and_size (GstV4l2Object * v4l2object,
+ guint32 pixelformat,
+ guint32 width, guint32 height, const GstStructure * template)
+ {
+ gint fd = v4l2object->video_fd;
+ struct v4l2_frmivalenum ival;
+ guint32 num, denom;
+ GstStructure *s;
+ GValue rates = { 0, };
+
+ memset (&ival, 0, sizeof (struct v4l2_frmivalenum));
+ ival.index = 0;
+ ival.pixel_format = pixelformat;
+ ival.width = width;
+ ival.height = height;
+
+ GST_LOG_OBJECT (v4l2object->dbg_obj,
+ "get frame interval for %ux%u, %" GST_FOURCC_FORMAT, width, height,
+ GST_FOURCC_ARGS (pixelformat));
+
+ /* keep in mind that v4l2 gives us frame intervals (durations); we invert the
+ * fraction to get framerate */
+ if (v4l2object->ioctl (fd, VIDIOC_ENUM_FRAMEINTERVALS, &ival) < 0)
+ goto enum_frameintervals_failed;
+
+ if (ival.type == V4L2_FRMIVAL_TYPE_DISCRETE) {
+ GValue rate = { 0, };
+
+ g_value_init (&rates, GST_TYPE_LIST);
+ g_value_init (&rate, GST_TYPE_FRACTION);
+
+ do {
+ num = ival.discrete.numerator;
+ denom = ival.discrete.denominator;
+
+ if (num > G_MAXINT || denom > G_MAXINT) {
+ /* let us hope we don't get here... */
+ num >>= 1;
+ denom >>= 1;
+ }
+
+ GST_LOG_OBJECT (v4l2object->dbg_obj, "adding discrete framerate: %d/%d",
+ denom, num);
+
+ /* swap to get the framerate */
+ gst_value_set_fraction (&rate, denom, num);
+ gst_value_list_append_value (&rates, &rate);
+
+ ival.index++;
+ } while (v4l2object->ioctl (fd, VIDIOC_ENUM_FRAMEINTERVALS, &ival) >= 0);
+ } else if (ival.type == V4L2_FRMIVAL_TYPE_STEPWISE) {
+ GValue min = { 0, };
+ GValue step = { 0, };
+ GValue max = { 0, };
+ gboolean added = FALSE;
+ guint32 minnum, mindenom;
+ guint32 maxnum, maxdenom;
+
+ g_value_init (&rates, GST_TYPE_LIST);
+
+ g_value_init (&min, GST_TYPE_FRACTION);
+ g_value_init (&step, GST_TYPE_FRACTION);
+ g_value_init (&max, GST_TYPE_FRACTION);
+
+ /* get the min */
+ minnum = ival.stepwise.min.numerator;
+ mindenom = ival.stepwise.min.denominator;
+ if (minnum > G_MAXINT || mindenom > G_MAXINT) {
+ minnum >>= 1;
+ mindenom >>= 1;
+ }
+ GST_LOG_OBJECT (v4l2object->dbg_obj, "stepwise min frame interval: %d/%d",
+ minnum, mindenom);
+ gst_value_set_fraction (&min, minnum, mindenom);
+
+ /* get the max */
+ maxnum = ival.stepwise.max.numerator;
+ maxdenom = ival.stepwise.max.denominator;
+ if (maxnum > G_MAXINT || maxdenom > G_MAXINT) {
+ maxnum >>= 1;
+ maxdenom >>= 1;
+ }
+
+ GST_LOG_OBJECT (v4l2object->dbg_obj, "stepwise max frame interval: %d/%d",
+ maxnum, maxdenom);
+ gst_value_set_fraction (&max, maxnum, maxdenom);
+
+ /* get the step */
+ num = ival.stepwise.step.numerator;
+ denom = ival.stepwise.step.denominator;
+ if (num > G_MAXINT || denom > G_MAXINT) {
+ num >>= 1;
+ denom >>= 1;
+ }
+
+ if (num == 0 || denom == 0) {
+ /* in this case we have a wrong fraction or no step, set the step to max
+ * so that we only add the min value in the loop below */
+ num = maxnum;
+ denom = maxdenom;
+ }
+
+ /* since we only have gst_value_fraction_subtract and not add, negate the
+ * numerator */
+ GST_LOG_OBJECT (v4l2object->dbg_obj, "stepwise step frame interval: %d/%d",
+ num, denom);
+ gst_value_set_fraction (&step, -num, denom);
+
+ while (gst_value_compare (&min, &max) != GST_VALUE_GREATER_THAN) {
+ GValue rate = { 0, };
+
+ num = gst_value_get_fraction_numerator (&min);
+ denom = gst_value_get_fraction_denominator (&min);
+ GST_LOG_OBJECT (v4l2object->dbg_obj, "adding stepwise framerate: %d/%d",
+ denom, num);
+
+ /* invert to get the framerate */
+ g_value_init (&rate, GST_TYPE_FRACTION);
+ gst_value_set_fraction (&rate, denom, num);
+ gst_value_list_append_value (&rates, &rate);
+ added = TRUE;
+
+ /* we're actually adding because step was negated above. This is because
+ * there is no _add function... */
+ if (!gst_value_fraction_subtract (&min, &min, &step)) {
+ GST_WARNING_OBJECT (v4l2object->dbg_obj, "could not step fraction!");
+ break;
+ }
+ }
+ if (!added) {
+ /* no range was added, leave the default range from the template */
+ GST_WARNING_OBJECT (v4l2object->dbg_obj,
+ "no range added, leaving default");
+ g_value_unset (&rates);
+ }
+ } else if (ival.type == V4L2_FRMIVAL_TYPE_CONTINUOUS) {
+ guint32 maxnum, maxdenom;
+
+ g_value_init (&rates, GST_TYPE_FRACTION_RANGE);
+
+ num = ival.stepwise.min.numerator;
+ denom = ival.stepwise.min.denominator;
+ if (num > G_MAXINT || denom > G_MAXINT) {
+ num >>= 1;
+ denom >>= 1;
+ }
+
+ maxnum = ival.stepwise.max.numerator;
+ maxdenom = ival.stepwise.max.denominator;
+ if (maxnum > G_MAXINT || maxdenom > G_MAXINT) {
+ maxnum >>= 1;
+ maxdenom >>= 1;
+ }
+
+ GST_LOG_OBJECT (v4l2object->dbg_obj,
+ "continuous frame interval %d/%d to %d/%d", maxdenom, maxnum, denom,
+ num);
+
+ gst_value_set_fraction_range_full (&rates, maxdenom, maxnum, denom, num);
+ } else {
+ goto unknown_type;
+ }
+
+ return_data:
+ s = gst_structure_copy (template);
+ gst_structure_set (s, "width", G_TYPE_INT, (gint) width,
+ "height", G_TYPE_INT, (gint) height, NULL);
+
+ gst_v4l2_object_add_aspect_ratio (v4l2object, s);
+
+ if (!v4l2object->skip_try_fmt_probes) {
+ gst_v4l2_object_add_interlace_mode (v4l2object, s, width, height,
+ pixelformat);
+ gst_v4l2_object_add_colorspace (v4l2object, s, width, height, pixelformat);
+ }
+
+ if (G_IS_VALUE (&rates)) {
+ gst_v4l2src_value_simplify (&rates);
+ /* only change the framerate on the template when we have a valid probed new
+ * value */
+ gst_structure_take_value (s, "framerate", &rates);
+ } else if (v4l2object->type == V4L2_BUF_TYPE_VIDEO_CAPTURE ||
+ v4l2object->type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE) {
+ gst_structure_set (s, "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, G_MAXINT,
+ 1, NULL);
+ }
+ return s;
+
+ /* ERRORS */
+ enum_frameintervals_failed:
+ {
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj,
+ "Unable to enumerate intervals for %" GST_FOURCC_FORMAT "@%ux%u",
+ GST_FOURCC_ARGS (pixelformat), width, height);
+ goto return_data;
+ }
+ unknown_type:
+ {
+ /* I don't see how this is actually an error, we ignore the format then */
+ GST_WARNING_OBJECT (v4l2object->dbg_obj,
+ "Unknown frame interval type at %" GST_FOURCC_FORMAT "@%ux%u: %u",
+ GST_FOURCC_ARGS (pixelformat), width, height, ival.type);
+ return NULL;
+ }
+ }
+
+ static gint
+ sort_by_frame_size (GstStructure * s1, GstStructure * s2)
+ {
+ int w1, h1, w2, h2;
+
+ gst_structure_get_int (s1, "width", &w1);
+ gst_structure_get_int (s1, "height", &h1);
+ gst_structure_get_int (s2, "width", &w2);
+ gst_structure_get_int (s2, "height", &h2);
+
+ /* I think it's safe to assume that this won't overflow for a while */
+ return ((w2 * h2) - (w1 * h1));
+ }
+
+ static void
+ check_alternate_and_append_struct (GstCaps * caps, GstStructure * s)
+ {
+ const GValue *mode;
+
+ mode = gst_structure_get_value (s, "interlace-mode");
+ if (!mode)
+ goto done;
+
+ if (G_VALUE_HOLDS_STRING (mode)) {
+ /* Add the INTERLACED feature if the mode is alternate */
+ if (!g_strcmp0 (gst_structure_get_string (s, "interlace-mode"),
+ "alternate")) {
+ GstCapsFeatures *feat;
+
+ feat = gst_caps_features_new (GST_CAPS_FEATURE_FORMAT_INTERLACED, NULL);
+ gst_caps_set_features (caps, gst_caps_get_size (caps) - 1, feat);
+ }
+ } else if (GST_VALUE_HOLDS_LIST (mode)) {
+ /* If the mode is a list containing alternate, remove it from the list and add a
+ * variant with interlace-mode=alternate and the INTERLACED feature. */
+ GValue alter = G_VALUE_INIT;
+ GValue inter = G_VALUE_INIT;
+
+ g_value_init (&alter, G_TYPE_STRING);
+ g_value_set_string (&alter, "alternate");
+
+ /* Cannot use gst_value_can_intersect() as it requires args to have the
+ * same type. */
+ if (gst_value_intersect (&inter, mode, &alter)) {
+ GValue minus_alter = G_VALUE_INIT;
+ GstStructure *copy;
+
+ gst_value_subtract (&minus_alter, mode, &alter);
+ gst_structure_take_value (s, "interlace-mode", &minus_alter);
+
+ copy = gst_structure_copy (s);
+ gst_structure_take_value (copy, "interlace-mode", &inter);
+ gst_caps_append_structure_full (caps, copy,
+ gst_caps_features_new (GST_CAPS_FEATURE_FORMAT_INTERLACED, NULL));
+ }
+ g_value_unset (&alter);
+ }
+
+ done:
+ gst_caps_append_structure (caps, s);
+ }
+
+ static void
+ gst_v4l2_object_update_and_append (GstV4l2Object * v4l2object,
+ guint32 format, GstCaps * caps, GstStructure * s)
+ {
+ GstStructure *alt_s = NULL;
+
+ /* Encoded stream on output buffer need to be parsed */
+ if (v4l2object->type == V4L2_BUF_TYPE_VIDEO_OUTPUT ||
+ v4l2object->type == V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE) {
+ gint i = 0;
+
+ for (; i < GST_V4L2_FORMAT_COUNT; i++) {
+ if (format == gst_v4l2_formats[i].format &&
+ gst_v4l2_formats[i].flags & GST_V4L2_CODEC &&
+ !(gst_v4l2_formats[i].flags & GST_V4L2_NO_PARSE)) {
+ gst_structure_set (s, "parsed", G_TYPE_BOOLEAN, TRUE, NULL);
+ break;
+ }
+ }
+ }
+
+ if (v4l2object->has_alpha_component &&
+ (v4l2object->type == V4L2_BUF_TYPE_VIDEO_CAPTURE ||
+ v4l2object->type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE)) {
+ switch (format) {
+ case V4L2_PIX_FMT_RGB32:
+ alt_s = gst_structure_copy (s);
+ gst_structure_set (alt_s, "format", G_TYPE_STRING, "ARGB", NULL);
+ break;
+ case V4L2_PIX_FMT_BGR32:
+ alt_s = gst_structure_copy (s);
+ gst_structure_set (alt_s, "format", G_TYPE_STRING, "BGRA", NULL);
+ break;
+ default:
+ break;
+ }
+ }
+
+ check_alternate_and_append_struct (caps, s);
+
+ if (alt_s) {
+ check_alternate_and_append_struct (caps, alt_s);
+ }
+ }
+
+ static GstCaps *
+ gst_v4l2_object_probe_caps_for_format (GstV4l2Object * v4l2object,
+ guint32 pixelformat, const GstStructure * template)
+ {
+ GstCaps *ret = gst_caps_new_empty ();
+ GstStructure *tmp;
+ gint fd = v4l2object->video_fd;
+ struct v4l2_frmsizeenum size;
+ GList *results = NULL;
+ guint32 w, h;
+
+ if (pixelformat == GST_MAKE_FOURCC ('M', 'P', 'E', 'G')) {
+ gst_caps_append_structure (ret, gst_structure_copy (template));
+ return ret;
+ }
+
+ memset (&size, 0, sizeof (struct v4l2_frmsizeenum));
+ size.index = 0;
+ size.pixel_format = pixelformat;
+
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj,
+ "Enumerating frame sizes for %" GST_FOURCC_FORMAT,
+ GST_FOURCC_ARGS (pixelformat));
+
+ if (v4l2object->ioctl (fd, VIDIOC_ENUM_FRAMESIZES, &size) < 0)
+ goto enum_framesizes_failed;
+
+ if (size.type == V4L2_FRMSIZE_TYPE_DISCRETE) {
+ do {
+ GST_LOG_OBJECT (v4l2object->dbg_obj, "got discrete frame size %dx%d",
+ size.discrete.width, size.discrete.height);
+
+ w = MIN (size.discrete.width, G_MAXINT);
+ h = MIN (size.discrete.height, G_MAXINT);
+
+ if (w && h) {
+ tmp =
+ gst_v4l2_object_probe_caps_for_format_and_size (v4l2object,
+ pixelformat, w, h, template);
+
+ if (tmp)
+ results = g_list_prepend (results, tmp);
+ }
+
+ size.index++;
+ } while (v4l2object->ioctl (fd, VIDIOC_ENUM_FRAMESIZES, &size) >= 0);
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj,
+ "done iterating discrete frame sizes");
+ } else if (size.type == V4L2_FRMSIZE_TYPE_STEPWISE) {
+ guint32 maxw, maxh, step_w, step_h;
+
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj, "we have stepwise frame sizes:");
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj, "min width: %d",
+ size.stepwise.min_width);
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj, "min height: %d",
+ size.stepwise.min_height);
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj, "max width: %d",
+ size.stepwise.max_width);
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj, "min height: %d",
+ size.stepwise.max_height);
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj, "step width: %d",
+ size.stepwise.step_width);
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj, "step height: %d",
+ size.stepwise.step_height);
+
+ w = MAX (size.stepwise.min_width, 1);
+ h = MAX (size.stepwise.min_height, 1);
+ maxw = MIN (size.stepwise.max_width, G_MAXINT);
+ maxh = MIN (size.stepwise.max_height, G_MAXINT);
+
+ step_w = MAX (size.stepwise.step_width, 1);
+ step_h = MAX (size.stepwise.step_height, 1);
+
+ /* FIXME: check for sanity and that min/max are multiples of the steps */
+
+ /* we only query details for the max width/height since it's likely the
+ * most restricted if there are any resolution-dependent restrictions */
+ tmp = gst_v4l2_object_probe_caps_for_format_and_size (v4l2object,
+ pixelformat, maxw, maxh, template);
+
+ if (tmp) {
+ GValue step_range = G_VALUE_INIT;
+
+ g_value_init (&step_range, GST_TYPE_INT_RANGE);
+ gst_value_set_int_range_step (&step_range, w, maxw, step_w);
+ gst_structure_set_value (tmp, "width", &step_range);
+
+ gst_value_set_int_range_step (&step_range, h, maxh, step_h);
+ gst_structure_take_value (tmp, "height", &step_range);
+
+ /* no point using the results list here, since there's only one struct */
+ gst_v4l2_object_update_and_append (v4l2object, pixelformat, ret, tmp);
+ }
+ } else if (size.type == V4L2_FRMSIZE_TYPE_CONTINUOUS) {
+ guint32 maxw, maxh;
+
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj, "we have continuous frame sizes:");
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj, "min width: %d",
+ size.stepwise.min_width);
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj, "min height: %d",
+ size.stepwise.min_height);
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj, "max width: %d",
+ size.stepwise.max_width);
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj, "min height: %d",
+ size.stepwise.max_height);
+
+ w = MAX (size.stepwise.min_width, 1);
+ h = MAX (size.stepwise.min_height, 1);
+ maxw = MIN (size.stepwise.max_width, G_MAXINT);
+ maxh = MIN (size.stepwise.max_height, G_MAXINT);
+
+ tmp =
+ gst_v4l2_object_probe_caps_for_format_and_size (v4l2object, pixelformat,
+ w, h, template);
+ if (tmp) {
+ gst_structure_set (tmp, "width", GST_TYPE_INT_RANGE, (gint) w,
+ (gint) maxw, "height", GST_TYPE_INT_RANGE, (gint) h, (gint) maxh,
+ NULL);
+
+ /* no point using the results list here, since there's only one struct */
+ gst_v4l2_object_update_and_append (v4l2object, pixelformat, ret, tmp);
+ }
+ } else {
+ goto unknown_type;
+ }
+
+ /* we use an intermediary list to store and then sort the results of the
+ * probing because we can't make any assumptions about the order in which
+ * the driver will give us the sizes, but we want the final caps to contain
+ * the results starting with the highest resolution and having the lowest
+ * resolution last, since order in caps matters for things like fixation. */
+ results = g_list_sort (results, (GCompareFunc) sort_by_frame_size);
+ while (results != NULL) {
+ gst_v4l2_object_update_and_append (v4l2object, pixelformat, ret,
+ results->data);
+ results = g_list_delete_link (results, results);
+ }
+
+ if (gst_caps_is_empty (ret))
+ goto enum_framesizes_no_results;
+
+ return ret;
+
+ /* ERRORS */
+ enum_framesizes_failed:
+ {
+ /* I don't see how this is actually an error */
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj,
+ "Failed to enumerate frame sizes for pixelformat %" GST_FOURCC_FORMAT
+ " (%s)", GST_FOURCC_ARGS (pixelformat), g_strerror (errno));
+ goto default_frame_sizes;
+ }
+ enum_framesizes_no_results:
+ {
+ /* it's possible that VIDIOC_ENUM_FRAMESIZES is defined but the driver in
+ * question doesn't actually support it yet */
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj,
+ "No results for pixelformat %" GST_FOURCC_FORMAT
+ " enumerating frame sizes, trying fallback",
+ GST_FOURCC_ARGS (pixelformat));
+ goto default_frame_sizes;
+ }
+ unknown_type:
+ {
+ GST_WARNING_OBJECT (v4l2object->dbg_obj,
+ "Unknown frame sizeenum type for pixelformat %" GST_FOURCC_FORMAT
+ ": %u", GST_FOURCC_ARGS (pixelformat), size.type);
+ goto default_frame_sizes;
+ }
+
+ default_frame_sizes:
+ {
+ gint min_w, max_w, min_h, max_h, fix_num = 0, fix_denom = 0;
+
+ /* This code is for Linux < 2.6.19 */
+ min_w = min_h = 1;
+ max_w = max_h = GST_V4L2_MAX_SIZE;
+ if (!gst_v4l2_object_get_nearest_size (v4l2object, pixelformat, &min_w,
+ &min_h)) {
+ GST_WARNING_OBJECT (v4l2object->dbg_obj,
+ "Could not probe minimum capture size for pixelformat %"
+ GST_FOURCC_FORMAT, GST_FOURCC_ARGS (pixelformat));
+ }
+ if (!gst_v4l2_object_get_nearest_size (v4l2object, pixelformat, &max_w,
+ &max_h)) {
+ GST_WARNING_OBJECT (v4l2object->dbg_obj,
+ "Could not probe maximum capture size for pixelformat %"
+ GST_FOURCC_FORMAT, GST_FOURCC_ARGS (pixelformat));
+ }
+
+ /* Since we can't get framerate directly, try to use the current norm */
+ if (v4l2object->tv_norm && v4l2object->norms) {
+ GList *norms;
+ GstTunerNorm *norm = NULL;
+ GstTunerNorm *current =
+ gst_v4l2_tuner_get_norm_by_std_id (v4l2object, v4l2object->tv_norm);
+
+ for (norms = v4l2object->norms; norms != NULL; norms = norms->next) {
+ norm = (GstTunerNorm *) norms->data;
+ if (!strcmp (norm->label, current->label))
+ break;
+ }
+ /* If it's possible, set framerate to that (discrete) value */
+ if (norm) {
+ fix_num = gst_value_get_fraction_numerator (&norm->framerate);
+ fix_denom = gst_value_get_fraction_denominator (&norm->framerate);
+ }
+ }
+
+ tmp = gst_structure_copy (template);
+ if (fix_num) {
+ gst_structure_set (tmp, "framerate", GST_TYPE_FRACTION, fix_num,
+ fix_denom, NULL);
+ } else if (v4l2object->type == V4L2_BUF_TYPE_VIDEO_CAPTURE ||
+ v4l2object->type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE) {
+ /* if norm can't be used, copy the template framerate */
+ gst_structure_set (tmp, "framerate", GST_TYPE_FRACTION_RANGE, 0, 1,
+ G_MAXINT, 1, NULL);
+ }
+
+ if (min_w == max_w)
+ gst_structure_set (tmp, "width", G_TYPE_INT, max_w, NULL);
+ else
+ gst_structure_set (tmp, "width", GST_TYPE_INT_RANGE, min_w, max_w, NULL);
+
+ if (min_h == max_h)
+ gst_structure_set (tmp, "height", G_TYPE_INT, max_h, NULL);
+ else
+ gst_structure_set (tmp, "height", GST_TYPE_INT_RANGE, min_h, max_h, NULL);
+
+ gst_v4l2_object_add_aspect_ratio (v4l2object, tmp);
+
+ /* We could consider setting interlace mode from min and max. */
+ gst_v4l2_object_add_interlace_mode (v4l2object, tmp, max_w, max_h,
+ pixelformat);
+
+ if (!v4l2object->skip_try_fmt_probes) {
+ gint probed_w, probed_h;
+ if (v4l2object->info.width >= min_w && v4l2object->info.width <= max_w &&
+ v4l2object->info.height >= min_h
+ && v4l2object->info.height <= max_h) {
+ probed_w = v4l2object->info.width;
+ probed_h = v4l2object->info.height;
+ } else {
+ probed_w = max_w;
+ probed_h = max_h;
+ }
+ /* We could consider to check colorspace for min too, in case it depends on
+ * the size. But in this case, min and max could not be enough */
+ gst_v4l2_object_add_colorspace (v4l2object, tmp, probed_w, probed_h,
+ pixelformat);
+ }
+
+ gst_v4l2_object_update_and_append (v4l2object, pixelformat, ret, tmp);
+ return ret;
+ }
+ }
+
+ static gboolean
+ gst_v4l2_object_get_nearest_size (GstV4l2Object * v4l2object,
+ guint32 pixelformat, gint * width, gint * height)
+ {
+ struct v4l2_format fmt;
+ gboolean ret = FALSE;
+ GstVideoInterlaceMode interlace_mode;
+
+ g_return_val_if_fail (width != NULL, FALSE);
+ g_return_val_if_fail (height != NULL, FALSE);
+
+ GST_LOG_OBJECT (v4l2object->dbg_obj,
+ "getting nearest size to %dx%d with format %" GST_FOURCC_FORMAT,
+ *width, *height, GST_FOURCC_ARGS (pixelformat));
+
+ memset (&fmt, 0, sizeof (struct v4l2_format));
+
+ /* get size delimiters */
+ memset (&fmt, 0, sizeof (fmt));
+ fmt.type = v4l2object->type;
+ fmt.fmt.pix.width = *width;
+ fmt.fmt.pix.height = *height;
+ fmt.fmt.pix.pixelformat = pixelformat;
+ fmt.fmt.pix.field = V4L2_FIELD_ANY;
+
+ if (gst_v4l2_object_try_fmt (v4l2object, &fmt) < 0)
+ goto error;
+
+ GST_LOG_OBJECT (v4l2object->dbg_obj,
+ "got nearest size %dx%d", fmt.fmt.pix.width, fmt.fmt.pix.height);
+
+ *width = fmt.fmt.pix.width;
+ *height = fmt.fmt.pix.height;
+
+ if (!gst_v4l2_object_get_interlace_mode (fmt.fmt.pix.field, &interlace_mode)) {
+ GST_WARNING_OBJECT (v4l2object->dbg_obj,
+ "Unsupported field type for %" GST_FOURCC_FORMAT "@%ux%u: %u",
+ GST_FOURCC_ARGS (pixelformat), *width, *height, fmt.fmt.pix.field);
+ goto error;
+ }
+
+ ret = TRUE;
+
+ error:
+ if (!ret) {
+ GST_WARNING_OBJECT (v4l2object->dbg_obj,
+ "Unable to try format: %s", g_strerror (errno));
+ }
+
+ return ret;
+ }
+
+ static gboolean
+ gst_v4l2_object_is_dmabuf_supported (GstV4l2Object * v4l2object)
+ {
+ gboolean ret = TRUE;
+ struct v4l2_exportbuffer expbuf = {
+ .type = v4l2object->type,
+ .index = -1,
+ .plane = -1,
+ .flags = O_CLOEXEC | O_RDWR,
+ };
+
+ if (v4l2object->fmtdesc->flags & V4L2_FMT_FLAG_EMULATED) {
+ GST_WARNING_OBJECT (v4l2object->dbg_obj,
+ "libv4l2 converter detected, disabling DMABuf");
+ ret = FALSE;
+ }
+
+ /* Expected to fail, but ENOTTY tells us that it is not implemented. */
+ v4l2object->ioctl (v4l2object->video_fd, VIDIOC_EXPBUF, &expbuf);
+ if (errno == ENOTTY)
+ ret = FALSE;
+
+ return ret;
+ }
+
+ static gboolean
+ gst_v4l2_object_setup_pool (GstV4l2Object * v4l2object, GstCaps * caps)
+ {
+ GstV4l2IOMode mode;
+
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj, "initializing the %s system",
+ V4L2_TYPE_IS_OUTPUT (v4l2object->type) ? "output" : "capture");
+
+ GST_V4L2_CHECK_OPEN (v4l2object);
+ GST_V4L2_CHECK_NOT_ACTIVE (v4l2object);
+
+ /* find transport */
+ mode = v4l2object->req_mode;
+
+ if (v4l2object->device_caps & V4L2_CAP_READWRITE) {
+ if (v4l2object->req_mode == GST_V4L2_IO_AUTO)
+ mode = GST_V4L2_IO_RW;
+ } else if (v4l2object->req_mode == GST_V4L2_IO_RW)
+ goto method_not_supported;
+
+ if (v4l2object->device_caps & V4L2_CAP_STREAMING) {
+ if (v4l2object->req_mode == GST_V4L2_IO_AUTO) {
+ if (!V4L2_TYPE_IS_OUTPUT (v4l2object->type) &&
+ gst_v4l2_object_is_dmabuf_supported (v4l2object)) {
+ mode = GST_V4L2_IO_DMABUF;
+ } else {
+ mode = GST_V4L2_IO_MMAP;
+ }
+ }
+ } else if (v4l2object->req_mode == GST_V4L2_IO_MMAP ||
+ v4l2object->req_mode == GST_V4L2_IO_DMABUF)
+ goto method_not_supported;
+
+ /* if still no transport selected, error out */
+ if (mode == GST_V4L2_IO_AUTO)
+ goto no_supported_capture_method;
+
+ GST_INFO_OBJECT (v4l2object->dbg_obj, "accessing buffers via mode %d", mode);
+ v4l2object->mode = mode;
+
+ /* If min_buffers is not set, the driver either does not support the control or
+ it has not been asked yet via propose_allocation/decide_allocation. */
+ if (!v4l2object->min_buffers)
+ gst_v4l2_get_driver_min_buffers (v4l2object);
+
+ /* Map the buffers */
+ GST_LOG_OBJECT (v4l2object->dbg_obj, "initiating buffer pool");
+
+ if (!(v4l2object->pool = gst_v4l2_buffer_pool_new (v4l2object, caps)))
+ goto buffer_pool_new_failed;
+
+ GST_V4L2_SET_ACTIVE (v4l2object);
+
+ return TRUE;
+
+ /* ERRORS */
+ buffer_pool_new_failed:
+ {
+ GST_ELEMENT_ERROR (v4l2object->element, RESOURCE, READ,
+ (_("Could not map buffers from device '%s'"),
+ v4l2object->videodev),
+ ("Failed to create buffer pool: %s", g_strerror (errno)));
+ return FALSE;
+ }
+ method_not_supported:
+ {
+ GST_ELEMENT_ERROR (v4l2object->element, RESOURCE, READ,
+ (_("The driver of device '%s' does not support the IO method %d"),
+ v4l2object->videodev, mode), (NULL));
+ return FALSE;
+ }
+ no_supported_capture_method:
+ {
+ GST_ELEMENT_ERROR (v4l2object->element, RESOURCE, READ,
+ (_("The driver of device '%s' does not support any known IO "
+ "method."), v4l2object->videodev), (NULL));
+ return FALSE;
+ }
+ }
+
+ static void
+ gst_v4l2_object_set_stride (GstVideoInfo * info, GstVideoAlignment * align,
+ gint plane, gint stride)
+ {
+ const GstVideoFormatInfo *finfo = info->finfo;
+
+ if (GST_VIDEO_FORMAT_INFO_IS_TILED (finfo)) {
+ gint x_tiles, y_tiles, ws, hs, tile_height, padded_height;
+
+
+ ws = GST_VIDEO_FORMAT_INFO_TILE_WS (finfo);
+ hs = GST_VIDEO_FORMAT_INFO_TILE_HS (finfo);
+ tile_height = 1 << hs;
+
+ padded_height = GST_VIDEO_FORMAT_INFO_SCALE_HEIGHT (finfo, plane,
+ info->height + align->padding_top + align->padding_bottom);
+ padded_height = GST_ROUND_UP_N (padded_height, tile_height);
+
+ x_tiles = stride >> ws;
+ y_tiles = padded_height >> hs;
+ info->stride[plane] = GST_VIDEO_TILE_MAKE_STRIDE (x_tiles, y_tiles);
+ } else {
+ info->stride[plane] = stride;
+ }
+ }
+
+ static void
+ gst_v4l2_object_extrapolate_info (GstV4l2Object * v4l2object,
+ GstVideoInfo * info, GstVideoAlignment * align, gint stride)
+ {
+ const GstVideoFormatInfo *finfo = info->finfo;
+ gint i, estride, padded_height;
+ gsize offs = 0;
+
+ g_return_if_fail (v4l2object->n_v4l2_planes == 1);
+
+ padded_height =
+ GST_VIDEO_INFO_FIELD_HEIGHT (info) + align->padding_top +
+ align->padding_bottom;
+
+ for (i = 0; i < finfo->n_planes; i++) {
+ estride = gst_v4l2_object_extrapolate_stride (finfo, i, stride);
+
+ gst_v4l2_object_set_stride (info, align, i, estride);
+
+ info->offset[i] = offs;
+ offs += estride *
+ GST_VIDEO_FORMAT_INFO_SCALE_HEIGHT (finfo, i, padded_height);
+
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj,
+ "Extrapolated for plane %d with base stride %d: "
+ "stride %d, offset %" G_GSIZE_FORMAT, i, stride, info->stride[i],
+ info->offset[i]);
+ }
+
+ /* Update the image size according the amount of data we are going to
+ * read/write. This workaround bugs in driver where the sizeimage provided
+ * by TRY/S_FMT represent the buffer length (maximum size) rather then the expected
+ * bytesused (buffer size). */
+ if (offs < info->size)
+ info->size = offs;
+ }
+
+ static void
+ gst_v4l2_object_save_format (GstV4l2Object * v4l2object,
+ struct v4l2_fmtdesc *fmtdesc, struct v4l2_format *format,
+ GstVideoInfo * info, GstVideoAlignment * align)
+ {
+ const GstVideoFormatInfo *finfo = info->finfo;
+ gboolean standard_stride = TRUE;
+ gint stride, pstride, padded_width, padded_height, i;
+
+ if (GST_VIDEO_INFO_FORMAT (info) == GST_VIDEO_FORMAT_ENCODED) {
+ v4l2object->n_v4l2_planes = 1;
+ info->size = format->fmt.pix.sizeimage;
+ goto store_info;
+ }
+
+ /* adjust right padding */
+ if (V4L2_TYPE_IS_MULTIPLANAR (v4l2object->type))
+ stride = format->fmt.pix_mp.plane_fmt[0].bytesperline;
+ else
+ stride = format->fmt.pix.bytesperline;
+
+ pstride = GST_VIDEO_FORMAT_INFO_PSTRIDE (finfo, 0);
+ if (pstride) {
+ padded_width = stride / pstride;
+ } else {
+ /* pstride can be 0 for complex formats */
+ GST_WARNING_OBJECT (v4l2object->element,
+ "format %s has a pstride of 0, cannot compute padded with",
+ gst_video_format_to_string (GST_VIDEO_INFO_FORMAT (info)));
+ padded_width = stride;
+ }
+
+ if (padded_width < format->fmt.pix.width)
+ GST_WARNING_OBJECT (v4l2object->dbg_obj,
+ "Driver bug detected, stride (%d) is too small for the width (%d)",
+ padded_width, format->fmt.pix.width);
+
+ align->padding_right = padded_width - info->width - align->padding_left;
+
+ /* adjust bottom padding */
+ padded_height = format->fmt.pix.height;
+
+ if (GST_VIDEO_FORMAT_INFO_IS_TILED (finfo)) {
+ guint hs, tile_height;
+
+ hs = GST_VIDEO_FORMAT_INFO_TILE_HS (finfo);
+ tile_height = 1 << hs;
+
+ padded_height = GST_ROUND_UP_N (padded_height, tile_height);
+ }
+
+ align->padding_bottom =
+ padded_height - GST_VIDEO_INFO_FIELD_HEIGHT (info) - align->padding_top;
+
+ /* setup the strides and offset */
+ if (V4L2_TYPE_IS_MULTIPLANAR (v4l2object->type)) {
+ struct v4l2_pix_format_mplane *pix_mp = &format->fmt.pix_mp;
+
+ /* figure out the frame layout */
+ v4l2object->n_v4l2_planes = MAX (1, pix_mp->num_planes);
+ info->size = 0;
+ for (i = 0; i < v4l2object->n_v4l2_planes; i++) {
+ stride = pix_mp->plane_fmt[i].bytesperline;
+
+ if (info->stride[i] != stride)
+ standard_stride = FALSE;
+
+ gst_v4l2_object_set_stride (info, align, i, stride);
+ info->offset[i] = info->size;
+ info->size += pix_mp->plane_fmt[i].sizeimage;
+ }
+
+ /* Extrapolate stride if planar format are being set in 1 v4l2 plane */
+ if (v4l2object->n_v4l2_planes < finfo->n_planes) {
+ stride = format->fmt.pix_mp.plane_fmt[0].bytesperline;
+ gst_v4l2_object_extrapolate_info (v4l2object, info, align, stride);
+ }
+ } else {
+ /* only one plane in non-MPLANE mode */
+ v4l2object->n_v4l2_planes = 1;
+ info->size = format->fmt.pix.sizeimage;
+ stride = format->fmt.pix.bytesperline;
+
+ if (info->stride[0] != stride)
+ standard_stride = FALSE;
+
+ gst_v4l2_object_extrapolate_info (v4l2object, info, align, stride);
+ }
+
+ /* adjust the offset to take into account left and top */
+ if (GST_VIDEO_FORMAT_INFO_IS_TILED (finfo)) {
+ if ((align->padding_left + align->padding_top) > 0)
+ GST_WARNING_OBJECT (v4l2object->dbg_obj,
+ "Left and top padding is not permitted for tiled formats");
+ } else {
+ for (i = 0; i < finfo->n_planes; i++) {
+ gint vedge, hedge;
+
+ /* FIXME we assume plane as component as this is true for all supported
+ * format we support. */
+
+ hedge = GST_VIDEO_FORMAT_INFO_SCALE_WIDTH (finfo, i, align->padding_left);
+ vedge = GST_VIDEO_FORMAT_INFO_SCALE_HEIGHT (finfo, i, align->padding_top);
+
+ info->offset[i] += (vedge * info->stride[i]) +
+ (hedge * GST_VIDEO_INFO_COMP_PSTRIDE (info, i));
+ }
+ }
+
+ store_info:
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj, "Got sizeimage %" G_GSIZE_FORMAT,
+ info->size);
+
+ /* to avoid copies we need video meta if there is padding */
+ v4l2object->need_video_meta =
+ ((align->padding_top + align->padding_left + align->padding_right +
+ align->padding_bottom) != 0);
+
+ /* ... or if stride is non "standard" */
+ if (!standard_stride)
+ v4l2object->need_video_meta = TRUE;
+
+ /* ... or also video meta if we use multiple, non-contiguous, planes */
+ if (v4l2object->n_v4l2_planes > 1)
+ v4l2object->need_video_meta = TRUE;
+
+ v4l2object->info = *info;
+ v4l2object->align = *align;
+ v4l2object->format = *format;
+ v4l2object->fmtdesc = fmtdesc;
+
+ /* if we have a framerate pre-calculate duration */
+ if (info->fps_n > 0 && info->fps_d > 0) {
+ v4l2object->duration = gst_util_uint64_scale_int (GST_SECOND, info->fps_d,
+ info->fps_n);
+ if (GST_VIDEO_INFO_INTERLACE_MODE (info) ==
+ GST_VIDEO_INTERLACE_MODE_ALTERNATE)
+ v4l2object->duration /= 2;
+ } else {
+ v4l2object->duration = GST_CLOCK_TIME_NONE;
+ }
+ }
+
+ gint
+ gst_v4l2_object_extrapolate_stride (const GstVideoFormatInfo * finfo,
+ gint plane, gint stride)
+ {
+ gint estride;
+
+ switch (finfo->format) {
+ case GST_VIDEO_FORMAT_NV12:
+ case GST_VIDEO_FORMAT_NV12_64Z32:
+ case GST_VIDEO_FORMAT_NV21:
+ case GST_VIDEO_FORMAT_NV16:
+ case GST_VIDEO_FORMAT_NV61:
+ case GST_VIDEO_FORMAT_NV24:
+ estride = (plane == 0 ? 1 : 2) *
+ GST_VIDEO_FORMAT_INFO_SCALE_WIDTH (finfo, plane, stride);
+ break;
+ default:
+ estride = GST_VIDEO_FORMAT_INFO_SCALE_WIDTH (finfo, plane, stride);
+ break;
+ }
+
+ return estride;
+ }
+
+ static enum v4l2_field
+ get_v4l2_field_for_info (GstVideoInfo * info)
+ {
+ if (!GST_VIDEO_INFO_IS_INTERLACED (info))
+ return V4L2_FIELD_NONE;
+
+ if (GST_VIDEO_INFO_INTERLACE_MODE (info) ==
+ GST_VIDEO_INTERLACE_MODE_ALTERNATE)
+ return V4L2_FIELD_ALTERNATE;
+
+ switch (GST_VIDEO_INFO_FIELD_ORDER (info)) {
+ case GST_VIDEO_FIELD_ORDER_TOP_FIELD_FIRST:
+ return V4L2_FIELD_INTERLACED_TB;
+ case GST_VIDEO_FIELD_ORDER_BOTTOM_FIELD_FIRST:
+ return V4L2_FIELD_INTERLACED_BT;
+ case GST_VIDEO_FIELD_ORDER_UNKNOWN:
+ default:
+ return V4L2_FIELD_INTERLACED;
+ }
+ }
+
+ static gboolean
+ gst_v4l2_video_colorimetry_matches (const GstVideoColorimetry * cinfo,
+ GstCaps * caps)
+ {
+ GstVideoInfo info;
+ static const GstVideoColorimetry ci_likely_jpeg = {
+ GST_VIDEO_COLOR_RANGE_0_255, GST_VIDEO_COLOR_MATRIX_BT601,
+ GST_VIDEO_TRANSFER_UNKNOWN, GST_VIDEO_COLOR_PRIMARIES_UNKNOWN
+ };
+ static const GstVideoColorimetry ci_jpeg = {
+ GST_VIDEO_COLOR_RANGE_0_255, GST_VIDEO_COLOR_MATRIX_BT601,
+ GST_VIDEO_TRANSFER_SRGB, GST_VIDEO_COLOR_PRIMARIES_BT709
+ };
+
+ if (!gst_video_info_from_caps (&info, caps))
+ return FALSE;
+
+ /* if colorimetry in caps is unknown, use the default one */
+ if (info.colorimetry.primaries == GST_VIDEO_COLOR_PRIMARIES_UNKNOWN)
+ info.colorimetry.primaries = cinfo->primaries;
+ if (info.colorimetry.range == GST_VIDEO_COLOR_RANGE_UNKNOWN)
+ info.colorimetry.range = cinfo->range;
+ if (info.colorimetry.matrix == GST_VIDEO_COLOR_MATRIX_UNKNOWN)
+ info.colorimetry.matrix = cinfo->matrix;
+ if (info.colorimetry.transfer == GST_VIDEO_TRANSFER_UNKNOWN)
+ info.colorimetry.transfer = cinfo->transfer;
+
+ if (gst_video_colorimetry_is_equal (&info.colorimetry, cinfo))
+ return TRUE;
+
+ /* Allow 1:4:0:0 (produced by jpegdec) if the device expects 1:4:7:1 */
+ if (gst_video_colorimetry_is_equal (&info.colorimetry, &ci_likely_jpeg)
+ && gst_video_colorimetry_is_equal (cinfo, &ci_jpeg))
+ return TRUE;
+
+ return FALSE;
+ }
+
+ static const gchar *
+ field_to_str (enum v4l2_field f)
+ {
+ switch (f) {
+ case V4L2_FIELD_ANY:
+ return "any";
+ case V4L2_FIELD_NONE:
+ return "none";
+ case V4L2_FIELD_TOP:
+ return "top";
+ case V4L2_FIELD_BOTTOM:
+ return "bottom";
+ case V4L2_FIELD_INTERLACED:
+ return "interlaced";
+ case V4L2_FIELD_SEQ_TB:
+ return "seq-tb";
+ case V4L2_FIELD_SEQ_BT:
+ return "seq-bt";
+ case V4L2_FIELD_ALTERNATE:
+ return "alternate";
+ case V4L2_FIELD_INTERLACED_TB:
+ return "interlaced-tb";
+ case V4L2_FIELD_INTERLACED_BT:
+ return "interlaced-bt";
+ }
+
+ return "unknown";
+ }
+
+ static gboolean
+ gst_v4l2_object_set_format_full (GstV4l2Object * v4l2object, GstCaps * caps,
+ gboolean try_only, GstV4l2Error * error)
+ {
+ gint fd = v4l2object->video_fd;
+ struct v4l2_format format;
+ struct v4l2_streamparm streamparm;
+ enum v4l2_field field;
+ guint32 pixelformat;
+ struct v4l2_fmtdesc *fmtdesc;
+ GstVideoInfo info;
+ GstVideoAlignment align;
+ gint width, height, fps_n, fps_d;
+ gint n_v4l_planes;
+ gint i = 0;
+ gboolean is_mplane;
+ enum v4l2_colorspace colorspace = 0;
+ enum v4l2_quantization range = 0;
+ enum v4l2_ycbcr_encoding matrix = 0;
+ enum v4l2_xfer_func transfer = 0;
+ GstStructure *s;
+ gboolean disable_interlacing = FALSE;
+ gboolean disable_colorimetry = FALSE;
+
+ g_return_val_if_fail (!v4l2object->skip_try_fmt_probes ||
+ gst_caps_is_writable (caps), FALSE);
+
+ GST_V4L2_CHECK_OPEN (v4l2object);
+ if (!try_only)
+ GST_V4L2_CHECK_NOT_ACTIVE (v4l2object);
+
+ is_mplane = V4L2_TYPE_IS_MULTIPLANAR (v4l2object->type);
+
+ gst_video_info_init (&info);
+ gst_video_alignment_reset (&align);
+ v4l2object->transfer = GST_VIDEO_TRANSFER_UNKNOWN;
+
+ if (!gst_v4l2_object_get_caps_info (v4l2object, caps, &fmtdesc, &info))
+ goto invalid_caps;
+
+ pixelformat = fmtdesc->pixelformat;
+ width = GST_VIDEO_INFO_WIDTH (&info);
+ height = GST_VIDEO_INFO_FIELD_HEIGHT (&info);
+ fps_n = GST_VIDEO_INFO_FPS_N (&info);
+ fps_d = GST_VIDEO_INFO_FPS_D (&info);
+
+ /* if encoded format (GST_VIDEO_INFO_N_PLANES return 0)
+ * or if contiguous is preferred */
+ n_v4l_planes = GST_VIDEO_INFO_N_PLANES (&info);
+ if (!n_v4l_planes || !v4l2object->prefered_non_contiguous)
+ n_v4l_planes = 1;
+
+ field = get_v4l2_field_for_info (&info);
+ if (field != V4L2_FIELD_NONE)
+ GST_DEBUG_OBJECT (v4l2object->element, "interlaced video");
+ else
+ GST_DEBUG_OBJECT (v4l2object->element, "progressive video");
+
+ /* We first pick the main colorspace from the primaries */
+ switch (info.colorimetry.primaries) {
+ case GST_VIDEO_COLOR_PRIMARIES_BT709:
+ /* There is two colorspaces using these primaries, use the range to
+ * differentiate */
+ if (info.colorimetry.range == GST_VIDEO_COLOR_RANGE_16_235)
+ colorspace = V4L2_COLORSPACE_REC709;
+ else
+ colorspace = V4L2_COLORSPACE_SRGB;
+ break;
+ case GST_VIDEO_COLOR_PRIMARIES_BT2020:
+ colorspace = V4L2_COLORSPACE_BT2020;
+ break;
+ case GST_VIDEO_COLOR_PRIMARIES_BT470M:
+ colorspace = V4L2_COLORSPACE_470_SYSTEM_M;
+ break;
+ case GST_VIDEO_COLOR_PRIMARIES_BT470BG:
+ colorspace = V4L2_COLORSPACE_470_SYSTEM_BG;
+ break;
+ case GST_VIDEO_COLOR_PRIMARIES_SMPTE170M:
+ colorspace = V4L2_COLORSPACE_SMPTE170M;
+ break;
+ case GST_VIDEO_COLOR_PRIMARIES_SMPTE240M:
+ colorspace = V4L2_COLORSPACE_SMPTE240M;
+ break;
+
+ case GST_VIDEO_COLOR_PRIMARIES_FILM:
+ case GST_VIDEO_COLOR_PRIMARIES_UNKNOWN:
+ /* We don't know, we will guess */
+ break;
+
+ default:
+ GST_WARNING_OBJECT (v4l2object->dbg_obj,
+ "Unknown colorimetry primaries %d", info.colorimetry.primaries);
+ break;
+ }
+
+ switch (info.colorimetry.range) {
+ case GST_VIDEO_COLOR_RANGE_0_255:
+ range = V4L2_QUANTIZATION_FULL_RANGE;
+ break;
+ case GST_VIDEO_COLOR_RANGE_16_235:
+ range = V4L2_QUANTIZATION_LIM_RANGE;
+ break;
+ case GST_VIDEO_COLOR_RANGE_UNKNOWN:
+ /* We let the driver pick a default one */
+ break;
+ default:
+ GST_WARNING_OBJECT (v4l2object->dbg_obj,
+ "Unknown colorimetry range %d", info.colorimetry.range);
+ break;
+ }
+
+ switch (info.colorimetry.matrix) {
+ case GST_VIDEO_COLOR_MATRIX_RGB:
+ /* Unspecified, leave to default */
+ break;
+ /* FCC is about the same as BT601 with less digit */
+ case GST_VIDEO_COLOR_MATRIX_FCC:
+ case GST_VIDEO_COLOR_MATRIX_BT601:
+ matrix = V4L2_YCBCR_ENC_601;
+ break;
+ case GST_VIDEO_COLOR_MATRIX_BT709:
+ matrix = V4L2_YCBCR_ENC_709;
+ break;
+ case GST_VIDEO_COLOR_MATRIX_SMPTE240M:
+ matrix = V4L2_YCBCR_ENC_SMPTE240M;
+ break;
+ case GST_VIDEO_COLOR_MATRIX_BT2020:
+ matrix = V4L2_YCBCR_ENC_BT2020;
+ break;
+ case GST_VIDEO_COLOR_MATRIX_UNKNOWN:
+ /* We let the driver pick a default one */
+ break;
+ default:
+ GST_WARNING_OBJECT (v4l2object->dbg_obj,
+ "Unknown colorimetry matrix %d", info.colorimetry.matrix);
+ break;
+ }
+
+ switch (info.colorimetry.transfer) {
+ case GST_VIDEO_TRANSFER_GAMMA18:
+ case GST_VIDEO_TRANSFER_GAMMA20:
+ case GST_VIDEO_TRANSFER_GAMMA22:
+ case GST_VIDEO_TRANSFER_GAMMA28:
+ GST_WARNING_OBJECT (v4l2object->dbg_obj,
+ "GAMMA 18, 20, 22, 28 transfer functions not supported");
+ /* fallthrough */
+ case GST_VIDEO_TRANSFER_GAMMA10:
+ transfer = V4L2_XFER_FUNC_NONE;
+ break;
+ case GST_VIDEO_TRANSFER_SMPTE2084:
+ transfer = V4L2_XFER_FUNC_SMPTE2084;
+ break;
+ case GST_VIDEO_TRANSFER_BT601:
+ case GST_VIDEO_TRANSFER_BT2020_12:
+ case GST_VIDEO_TRANSFER_BT2020_10:
+ case GST_VIDEO_TRANSFER_BT709:
+ v4l2object->transfer = info.colorimetry.transfer;
+ transfer = V4L2_XFER_FUNC_709;
+ break;
+ case GST_VIDEO_TRANSFER_SMPTE240M:
+ transfer = V4L2_XFER_FUNC_SMPTE240M;
+ break;
+ case GST_VIDEO_TRANSFER_SRGB:
+ transfer = V4L2_XFER_FUNC_SRGB;
+ break;
+ case GST_VIDEO_TRANSFER_LOG100:
+ case GST_VIDEO_TRANSFER_LOG316:
+ GST_WARNING_OBJECT (v4l2object->dbg_obj,
+ "LOG 100, 316 transfer functions not supported");
+ /* FIXME No known sensible default, maybe AdobeRGB ? */
+ break;
+ case GST_VIDEO_TRANSFER_UNKNOWN:
+ /* We let the driver pick a default one */
+ break;
+ default:
+ GST_WARNING_OBJECT (v4l2object->dbg_obj,
+ "Unknown colorimetry transfer %d", info.colorimetry.transfer);
+ break;
+ }
+
+ if (colorspace == 0) {
+ /* Try to guess colorspace according to pixelformat and size */
+ if (GST_VIDEO_INFO_IS_YUV (&info)) {
+ if (range == V4L2_QUANTIZATION_FULL_RANGE
+ && matrix == V4L2_YCBCR_ENC_601 && transfer == 0) {
+ /* Full range BT.601 YCbCr encoding with unknown primaries and transfer
+ * function most likely is JPEG */
+ colorspace = V4L2_COLORSPACE_JPEG;
+ transfer = V4L2_XFER_FUNC_SRGB;
+ } else {
+ /* SD streams likely use SMPTE170M and HD streams REC709 */
+ if (width <= 720 && GST_VIDEO_INFO_HEIGHT (&info) <= 576)
+ colorspace = V4L2_COLORSPACE_SMPTE170M;
+ else
+ colorspace = V4L2_COLORSPACE_REC709;
+ }
+ } else if (GST_VIDEO_INFO_IS_RGB (&info)) {
+ colorspace = V4L2_COLORSPACE_SRGB;
+ transfer = V4L2_XFER_FUNC_NONE;
+ }
+ }
+
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj, "Desired format %dx%d, format "
+ "%" GST_FOURCC_FORMAT " stride: %d", width, height,
+ GST_FOURCC_ARGS (pixelformat), GST_VIDEO_INFO_PLANE_STRIDE (&info, 0));
+
+ memset (&format, 0x00, sizeof (struct v4l2_format));
+ format.type = v4l2object->type;
+
+ if (is_mplane) {
+ format.type = v4l2object->type;
+ format.fmt.pix_mp.pixelformat = pixelformat;
+ format.fmt.pix_mp.width = width;
+ format.fmt.pix_mp.height = height;
+ format.fmt.pix_mp.field = field;
+ format.fmt.pix_mp.num_planes = n_v4l_planes;
+
+ /* try to ask our preferred stride but it's not a failure if not
+ * accepted */
+ for (i = 0; i < n_v4l_planes; i++) {
+ gint stride = GST_VIDEO_INFO_PLANE_STRIDE (&info, i);
+
+ if (GST_VIDEO_FORMAT_INFO_IS_TILED (info.finfo))
+ stride = GST_VIDEO_TILE_X_TILES (stride) <<
+ GST_VIDEO_FORMAT_INFO_TILE_WS (info.finfo);
+
+ format.fmt.pix_mp.plane_fmt[i].bytesperline = stride;
+ }
+
+ if (GST_VIDEO_INFO_FORMAT (&info) == GST_VIDEO_FORMAT_ENCODED)
+ format.fmt.pix_mp.plane_fmt[0].sizeimage = ENCODED_BUFFER_SIZE;
+ } else {
+ gint stride = GST_VIDEO_INFO_PLANE_STRIDE (&info, 0);
+
+ format.type = v4l2object->type;
+
+ format.fmt.pix.width = width;
+ format.fmt.pix.height = height;
+ format.fmt.pix.pixelformat = pixelformat;
+ format.fmt.pix.field = field;
+
+ if (GST_VIDEO_FORMAT_INFO_IS_TILED (info.finfo))
+ stride = GST_VIDEO_TILE_X_TILES (stride) <<
+ GST_VIDEO_FORMAT_INFO_TILE_WS (info.finfo);
+
+ /* try to ask our preferred stride */
+ format.fmt.pix.bytesperline = stride;
+
+ if (GST_VIDEO_INFO_FORMAT (&info) == GST_VIDEO_FORMAT_ENCODED)
+ format.fmt.pix.sizeimage = ENCODED_BUFFER_SIZE;
+ }
+
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj, "Desired format is %dx%d, format "
+ "%" GST_FOURCC_FORMAT ", nb planes %d", format.fmt.pix.width,
+ format.fmt.pix_mp.height,
+ GST_FOURCC_ARGS (format.fmt.pix.pixelformat),
+ is_mplane ? format.fmt.pix_mp.num_planes : 1);
+
+ #ifndef GST_DISABLE_GST_DEBUG
+ if (is_mplane) {
+ for (i = 0; i < format.fmt.pix_mp.num_planes; i++)
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj, " stride %d",
+ format.fmt.pix_mp.plane_fmt[i].bytesperline);
+ } else {
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj, " stride %d",
+ format.fmt.pix.bytesperline);
+ }
+ #endif
+
+ if (is_mplane) {
+ format.fmt.pix_mp.colorspace = colorspace;
+ format.fmt.pix_mp.quantization = range;
+ format.fmt.pix_mp.ycbcr_enc = matrix;
+ format.fmt.pix_mp.xfer_func = transfer;
+ } else {
+ format.fmt.pix.priv = V4L2_PIX_FMT_PRIV_MAGIC;
+ format.fmt.pix.colorspace = colorspace;
+ format.fmt.pix.quantization = range;
+ format.fmt.pix.ycbcr_enc = matrix;
+ format.fmt.pix.xfer_func = transfer;
+ }
+
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj, "Desired colorspace is %d:%d:%d:%d",
+ colorspace, range, matrix, transfer);
+
+ if (try_only) {
+ if (v4l2object->ioctl (fd, VIDIOC_TRY_FMT, &format) < 0)
+ goto try_fmt_failed;
+ } else {
+ if (v4l2object->ioctl (fd, VIDIOC_S_FMT, &format) < 0)
+ goto set_fmt_failed;
+ }
+
+ if (is_mplane) {
+ colorspace = format.fmt.pix_mp.colorspace;
+ range = format.fmt.pix_mp.quantization;
+ matrix = format.fmt.pix_mp.ycbcr_enc;
+ transfer = format.fmt.pix_mp.xfer_func;
+ } else {
+ colorspace = format.fmt.pix.colorspace;
+ range = format.fmt.pix.quantization;
+ matrix = format.fmt.pix.ycbcr_enc;
+ transfer = format.fmt.pix.xfer_func;
+ }
+
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj, "Got format of %dx%d, format "
+ "%" GST_FOURCC_FORMAT ", nb planes %d, colorspace %d:%d:%d:%d field: %s",
+ format.fmt.pix.width, format.fmt.pix_mp.height,
+ GST_FOURCC_ARGS (format.fmt.pix.pixelformat),
+ is_mplane ? format.fmt.pix_mp.num_planes : 1,
+ colorspace, range, matrix, transfer, field_to_str (format.fmt.pix.field));
+
+ #ifndef GST_DISABLE_GST_DEBUG
+ if (is_mplane) {
+ for (i = 0; i < format.fmt.pix_mp.num_planes; i++)
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj, " stride %d, sizeimage %d",
+ format.fmt.pix_mp.plane_fmt[i].bytesperline,
+ format.fmt.pix_mp.plane_fmt[i].sizeimage);
+ } else {
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj, " stride %d, sizeimage %d",
+ format.fmt.pix.bytesperline, format.fmt.pix.sizeimage);
+ }
+ #endif
+
+ if (format.fmt.pix.pixelformat != pixelformat)
+ goto invalid_pixelformat;
+
+ /* Only negotiate size with raw data.
+ * For some codecs the dimensions are *not* in the bitstream, IIRC VC1
+ * in ASF mode for example, there is also not reason for a driver to
+ * change the size. */
+ if (info.finfo->format != GST_VIDEO_FORMAT_ENCODED) {
+ /* We can crop larger images */
+ if (format.fmt.pix.width < width || format.fmt.pix.height < height)
+ goto invalid_dimensions;
+
+ /* Note, this will be adjusted if upstream has non-centered cropping. */
+ align.padding_top = 0;
+ align.padding_bottom = format.fmt.pix.height - height;
+ align.padding_left = 0;
+ align.padding_right = format.fmt.pix.width - width;
+ }
+
+ if (is_mplane && format.fmt.pix_mp.num_planes != n_v4l_planes)
+ goto invalid_planes;
+
+ /* used to check colorimetry and interlace mode fields presence */
+ s = gst_caps_get_structure (caps, 0);
+
+ if (gst_v4l2_object_get_interlace_mode (format.fmt.pix.field,
+ &info.interlace_mode)) {
+ if (gst_structure_has_field (s, "interlace-mode")) {
+ if (format.fmt.pix.field != field)
+ goto invalid_field;
+ }
+ } else {
+ /* The driver (or libv4l2) is miss-behaving, just ignore interlace-mode from
+ * the TRY_FMT */
+ disable_interlacing = TRUE;
+ if (gst_structure_has_field (s, "interlace-mode"))
+ gst_structure_remove_field (s, "interlace-mode");
+ }
+
+ if (gst_v4l2_object_get_colorspace (v4l2object, &format, &info.colorimetry)) {
+ if (gst_structure_has_field (s, "colorimetry")) {
+ if (!gst_v4l2_video_colorimetry_matches (&info.colorimetry, caps))
+ goto invalid_colorimetry;
+ }
+ } else {
+ /* The driver (or libv4l2) is miss-behaving, just ignore colorimetry from
+ * the TRY_FMT */
+ disable_colorimetry = TRUE;
+ if (gst_structure_has_field (s, "colorimetry"))
+ gst_structure_remove_field (s, "colorimetry");
+ }
+
+ /* In case we have skipped the try_fmt probes, we'll need to set the
+ * interlace-mode and colorimetry back into the caps. */
+ if (v4l2object->skip_try_fmt_probes) {
+ if (!disable_interlacing && !gst_structure_has_field (s, "interlace-mode")) {
+ gst_structure_set (s, "interlace-mode", G_TYPE_STRING,
+ gst_video_interlace_mode_to_string (info.interlace_mode), NULL);
+ }
+ if (!disable_colorimetry && !gst_structure_has_field (s, "colorimetry")) {
+ gchar *str = gst_video_colorimetry_to_string (&info.colorimetry);
+ gst_structure_set (s, "colorimetry", G_TYPE_STRING, str, NULL);
+ g_free (str);
+ }
+ }
+
+ if (try_only) /* good enough for trying only */
+ return TRUE;
+
+ if (GST_VIDEO_INFO_HAS_ALPHA (&info)) {
+ struct v4l2_control ctl = { 0, };
+ ctl.id = V4L2_CID_ALPHA_COMPONENT;
+ ctl.value = 0xff;
+
+ if (v4l2object->ioctl (fd, VIDIOC_S_CTRL, &ctl) < 0)
+ GST_WARNING_OBJECT (v4l2object->dbg_obj,
+ "Failed to set alpha component value");
+ }
+
+ /* Is there a reason we require the caller to always specify a framerate? */
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj, "Desired framerate: %u/%u", fps_n,
+ fps_d);
+
+ memset (&streamparm, 0x00, sizeof (struct v4l2_streamparm));
+ streamparm.type = v4l2object->type;
+
+ if (v4l2object->ioctl (fd, VIDIOC_G_PARM, &streamparm) < 0)
+ goto get_parm_failed;
+
+ if (v4l2object->type == V4L2_BUF_TYPE_VIDEO_CAPTURE
+ || v4l2object->type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE) {
+ GST_VIDEO_INFO_FPS_N (&info) =
+ streamparm.parm.capture.timeperframe.denominator;
+ GST_VIDEO_INFO_FPS_D (&info) =
+ streamparm.parm.capture.timeperframe.numerator;
+
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj, "Got capture framerate: %u/%u",
+ streamparm.parm.capture.timeperframe.denominator,
+ streamparm.parm.capture.timeperframe.numerator);
+
+ /* We used to skip frame rate setup if the camera was already setup
+ * with the requested frame rate. This breaks some cameras though,
+ * causing them to not output data (several models of Thinkpad cameras
+ * have this problem at least).
+ * So, don't skip. */
+ GST_LOG_OBJECT (v4l2object->dbg_obj, "Setting capture framerate to %u/%u",
+ fps_n, fps_d);
+ /* We want to change the frame rate, so check whether we can. Some cheap USB
+ * cameras don't have the capability */
+ if ((streamparm.parm.capture.capability & V4L2_CAP_TIMEPERFRAME) == 0) {
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj,
+ "Not setting capture framerate (not supported)");
+ goto done;
+ }
+
+ /* Note: V4L2 wants the frame interval, we have the frame rate */
+ streamparm.parm.capture.timeperframe.numerator = fps_d;
+ streamparm.parm.capture.timeperframe.denominator = fps_n;
+
+ /* some cheap USB cam's won't accept any change */
+ if (v4l2object->ioctl (fd, VIDIOC_S_PARM, &streamparm) < 0)
+ goto set_parm_failed;
+
+ if (streamparm.parm.capture.timeperframe.numerator > 0 &&
+ streamparm.parm.capture.timeperframe.denominator > 0) {
+ /* get new values */
+ fps_d = streamparm.parm.capture.timeperframe.numerator;
+ fps_n = streamparm.parm.capture.timeperframe.denominator;
+
+ GST_INFO_OBJECT (v4l2object->dbg_obj, "Set capture framerate to %u/%u",
+ fps_n, fps_d);
+ } else {
+ /* fix v4l2 capture driver to provide framerate values */
+ GST_WARNING_OBJECT (v4l2object->dbg_obj,
+ "Reuse caps framerate %u/%u - fix v4l2 capture driver", fps_n, fps_d);
+ }
+
+ GST_VIDEO_INFO_FPS_N (&info) = fps_n;
+ GST_VIDEO_INFO_FPS_D (&info) = fps_d;
+ } else if (v4l2object->type == V4L2_BUF_TYPE_VIDEO_OUTPUT
+ || v4l2object->type == V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE) {
+ GST_VIDEO_INFO_FPS_N (&info) =
+ streamparm.parm.output.timeperframe.denominator;
+ GST_VIDEO_INFO_FPS_D (&info) =
+ streamparm.parm.output.timeperframe.numerator;
+
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj, "Got output framerate: %u/%u",
+ streamparm.parm.output.timeperframe.denominator,
+ streamparm.parm.output.timeperframe.numerator);
+
+ GST_LOG_OBJECT (v4l2object->dbg_obj, "Setting output framerate to %u/%u",
+ fps_n, fps_d);
+ if ((streamparm.parm.output.capability & V4L2_CAP_TIMEPERFRAME) == 0) {
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj,
+ "Not setting output framerate (not supported)");
+ goto done;
+ }
+
+ /* Note: V4L2 wants the frame interval, we have the frame rate */
+ streamparm.parm.output.timeperframe.numerator = fps_d;
+ streamparm.parm.output.timeperframe.denominator = fps_n;
+
+ if (v4l2object->ioctl (fd, VIDIOC_S_PARM, &streamparm) < 0)
+ goto set_parm_failed;
+
+ if (streamparm.parm.output.timeperframe.numerator > 0 &&
+ streamparm.parm.output.timeperframe.denominator > 0) {
+ /* get new values */
+ fps_d = streamparm.parm.output.timeperframe.numerator;
+ fps_n = streamparm.parm.output.timeperframe.denominator;
+
+ GST_INFO_OBJECT (v4l2object->dbg_obj, "Set output framerate to %u/%u",
+ fps_n, fps_d);
+ } else {
+ /* fix v4l2 output driver to provide framerate values */
+ GST_WARNING_OBJECT (v4l2object->dbg_obj,
+ "Reuse caps framerate %u/%u - fix v4l2 output driver", fps_n, fps_d);
+ }
+
+ GST_VIDEO_INFO_FPS_N (&info) = fps_n;
+ GST_VIDEO_INFO_FPS_D (&info) = fps_d;
+ }
+
+ done:
+ /* add boolean return, so we can fail on drivers bugs */
+ gst_v4l2_object_save_format (v4l2object, fmtdesc, &format, &info, &align);
+
+ /* now configure the pool */
+ if (!gst_v4l2_object_setup_pool (v4l2object, caps))
+ goto pool_failed;
+
+ return TRUE;
+
+ /* ERRORS */
+ invalid_caps:
+ {
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj, "can't parse caps %" GST_PTR_FORMAT,
+ caps);
+
+ GST_V4L2_ERROR (error, RESOURCE, SETTINGS,
+ (_("Invalid caps")), ("Can't parse caps %" GST_PTR_FORMAT, caps));
+ return FALSE;
+ }
+ try_fmt_failed:
+ {
+ if (errno == EINVAL) {
+ GST_V4L2_ERROR (error, RESOURCE, SETTINGS,
+ (_("Device '%s' has no supported format"), v4l2object->videodev),
+ ("Call to TRY_FMT failed for %" GST_FOURCC_FORMAT " @ %dx%d: %s",
+ GST_FOURCC_ARGS (pixelformat), width, height,
+ g_strerror (errno)));
+ } else {
+ GST_V4L2_ERROR (error, RESOURCE, FAILED,
+ (_("Device '%s' failed during initialization"),
+ v4l2object->videodev),
+ ("Call to TRY_FMT failed for %" GST_FOURCC_FORMAT " @ %dx%d: %s",
+ GST_FOURCC_ARGS (pixelformat), width, height,
+ g_strerror (errno)));
+ }
+ return FALSE;
+ }
+ set_fmt_failed:
+ {
+ if (errno == EBUSY) {
+ GST_V4L2_ERROR (error, RESOURCE, BUSY,
+ (_("Device '%s' is busy"), v4l2object->videodev),
+ ("Call to S_FMT failed for %" GST_FOURCC_FORMAT " @ %dx%d: %s",
+ GST_FOURCC_ARGS (pixelformat), width, height,
+ g_strerror (errno)));
+ } else if (errno == EINVAL) {
+ GST_V4L2_ERROR (error, RESOURCE, SETTINGS,
+ (_("Device '%s' has no supported format"), v4l2object->videodev),
+ ("Call to S_FMT failed for %" GST_FOURCC_FORMAT " @ %dx%d: %s",
+ GST_FOURCC_ARGS (pixelformat), width, height,
+ g_strerror (errno)));
+ } else {
+ GST_V4L2_ERROR (error, RESOURCE, FAILED,
+ (_("Device '%s' failed during initialization"),
+ v4l2object->videodev),
+ ("Call to S_FMT failed for %" GST_FOURCC_FORMAT " @ %dx%d: %s",
+ GST_FOURCC_ARGS (pixelformat), width, height,
+ g_strerror (errno)));
+ }
+ return FALSE;
+ }
+ invalid_dimensions:
+ {
+ GST_V4L2_ERROR (error, RESOURCE, SETTINGS,
+ (_("Device '%s' cannot capture at %dx%d"),
+ v4l2object->videodev, width, height),
+ ("Tried to capture at %dx%d, but device returned size %dx%d",
+ width, height, format.fmt.pix.width, format.fmt.pix.height));
+ return FALSE;
+ }
+ invalid_pixelformat:
+ {
+ GST_V4L2_ERROR (error, RESOURCE, SETTINGS,
+ (_("Device '%s' cannot capture in the specified format"),
+ v4l2object->videodev),
+ ("Tried to capture in %" GST_FOURCC_FORMAT
+ ", but device returned format" " %" GST_FOURCC_FORMAT,
+ GST_FOURCC_ARGS (pixelformat),
+ GST_FOURCC_ARGS (format.fmt.pix.pixelformat)));
+ return FALSE;
+ }
+ invalid_planes:
+ {
+ GST_V4L2_ERROR (error, RESOURCE, SETTINGS,
+ (_("Device '%s' does support non-contiguous planes"),
+ v4l2object->videodev),
+ ("Device wants %d planes", format.fmt.pix_mp.num_planes));
+ return FALSE;
+ }
+ invalid_field:
+ {
+ enum v4l2_field wanted_field;
+
+ if (is_mplane)
+ wanted_field = format.fmt.pix_mp.field;
+ else
+ wanted_field = format.fmt.pix.field;
+
+ GST_V4L2_ERROR (error, RESOURCE, SETTINGS,
+ (_("Device '%s' does not support %s interlacing"),
+ v4l2object->videodev,
+ field == V4L2_FIELD_NONE ? "progressive" : "interleaved"),
+ ("Device wants %s interlacing",
+ wanted_field == V4L2_FIELD_NONE ? "progressive" : "interleaved"));
+ return FALSE;
+ }
+ invalid_colorimetry:
+ {
+ gchar *wanted_colorimetry;
+
+ wanted_colorimetry = gst_video_colorimetry_to_string (&info.colorimetry);
+
+ GST_V4L2_ERROR (error, RESOURCE, SETTINGS,
+ (_("Device '%s' does not support %s colorimetry"),
+ v4l2object->videodev, gst_structure_get_string (s, "colorimetry")),
+ ("Device wants %s colorimetry", wanted_colorimetry));
+
+ g_free (wanted_colorimetry);
+ return FALSE;
+ }
+ get_parm_failed:
+ {
+ /* it's possible that this call is not supported */
+ if (errno != EINVAL && errno != ENOTTY) {
+ GST_V4L2_ERROR (error, RESOURCE, SETTINGS,
+ (_("Could not get parameters on device '%s'"),
+ v4l2object->videodev), GST_ERROR_SYSTEM);
+ }
+ goto done;
+ }
+ set_parm_failed:
+ {
+ GST_V4L2_ERROR (error, RESOURCE, SETTINGS,
+ (_("Video device did not accept new frame rate setting.")),
+ GST_ERROR_SYSTEM);
+ goto done;
+ }
+ pool_failed:
+ {
+ /* setup_pool already send the error */
+ return FALSE;
+ }
+ }
+
+ gboolean
+ gst_v4l2_object_set_format (GstV4l2Object * v4l2object, GstCaps * caps,
+ GstV4l2Error * error)
+ {
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj, "Setting format to %" GST_PTR_FORMAT,
+ caps);
+ return gst_v4l2_object_set_format_full (v4l2object, caps, FALSE, error);
+ }
+
+ gboolean
+ gst_v4l2_object_try_format (GstV4l2Object * v4l2object, GstCaps * caps,
+ GstV4l2Error * error)
+ {
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj, "Trying format %" GST_PTR_FORMAT,
+ caps);
+ return gst_v4l2_object_set_format_full (v4l2object, caps, TRUE, error);
+ }
+
+ /**
+ * gst_v4l2_object_acquire_format:
+ * @v4l2object: the object
+ * @info: a GstVideoInfo to be filled
+ *
+ * Acquire the driver chosen format. This is useful in decoder or encoder elements where
+ * the output format is chosen by the HW.
+ *
+ * Returns: %TRUE on success, %FALSE on failure.
+ */
+ gboolean
+ gst_v4l2_object_acquire_format (GstV4l2Object * v4l2object, GstVideoInfo * info)
+ {
+ struct v4l2_fmtdesc *fmtdesc;
+ struct v4l2_format fmt;
+ struct v4l2_crop crop;
+ struct v4l2_selection sel;
+ struct v4l2_rect *r = NULL;
+ GstVideoFormat format;
+ guint width, height;
+ GstVideoAlignment align;
+ GstVideoInterlaceMode interlace_mode;
+
+ gst_video_info_init (info);
+ gst_video_alignment_reset (&align);
+
+ memset (&fmt, 0x00, sizeof (struct v4l2_format));
+ fmt.type = v4l2object->type;
+ if (v4l2object->ioctl (v4l2object->video_fd, VIDIOC_G_FMT, &fmt) < 0)
+ goto get_fmt_failed;
+
+ fmtdesc = gst_v4l2_object_get_format_from_fourcc (v4l2object,
+ fmt.fmt.pix.pixelformat);
+ if (fmtdesc == NULL)
+ goto unsupported_format;
+
+ /* No need to care about mplane, the four first params are the same */
+ format = gst_v4l2_object_v4l2fourcc_to_video_format (fmt.fmt.pix.pixelformat);
+
+ /* fails if we do no translate the fmt.pix.pixelformat to GstVideoFormat */
+ if (format == GST_VIDEO_FORMAT_UNKNOWN)
+ goto unsupported_format;
+
+ if (fmt.fmt.pix.width == 0 || fmt.fmt.pix.height == 0)
+ goto invalid_dimensions;
+
+ width = fmt.fmt.pix.width;
+ height = fmt.fmt.pix.height;
+
+ /* Use the default compose rectangle */
+ memset (&sel, 0, sizeof (struct v4l2_selection));
+ sel.type = v4l2object->type;
+ sel.target = V4L2_SEL_TGT_COMPOSE_DEFAULT;
+ if (v4l2object->ioctl (v4l2object->video_fd, VIDIOC_G_SELECTION, &sel) >= 0) {
+ r = &sel.r;
+ } else {
+ /* For ancient kernels, fall back to G_CROP */
+ memset (&crop, 0, sizeof (struct v4l2_crop));
+ crop.type = v4l2object->type;
+ if (v4l2object->ioctl (v4l2object->video_fd, VIDIOC_G_CROP, &crop) >= 0)
+ r = &crop.c;
+ }
+ if (r) {
+ align.padding_left = r->left;
+ align.padding_top = r->top;
+ align.padding_right = width - r->width - r->left;
+ align.padding_bottom = height - r->height - r->top;
+ width = r->width;
+ height = r->height;
+ }
+
+ switch (fmt.fmt.pix.field) {
+ case V4L2_FIELD_ANY:
+ case V4L2_FIELD_NONE:
+ interlace_mode = GST_VIDEO_INTERLACE_MODE_PROGRESSIVE;
+ break;
+ case V4L2_FIELD_INTERLACED:
+ case V4L2_FIELD_INTERLACED_TB:
+ case V4L2_FIELD_INTERLACED_BT:
+ interlace_mode = GST_VIDEO_INTERLACE_MODE_INTERLEAVED;
+ break;
+ case V4L2_FIELD_ALTERNATE:
+ interlace_mode = GST_VIDEO_INTERLACE_MODE_ALTERNATE;
+ break;
+ default:
+ goto unsupported_field;
+ }
+
+ gst_video_info_set_interlaced_format (info, format, interlace_mode, width,
+ height);
+
+ gst_v4l2_object_get_colorspace (v4l2object, &fmt, &info->colorimetry);
+ gst_v4l2_object_get_streamparm (v4l2object, info);
+ if ((info->fps_n == 0 && v4l2object->info.fps_d != 0)
+ && (v4l2object->type == V4L2_BUF_TYPE_VIDEO_CAPTURE
+ || v4l2object->type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE)) {
+ info->fps_d = v4l2object->info.fps_d;
+ info->fps_n = v4l2object->info.fps_n;
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj, "Set capture fps to %d/%d",
+ info->fps_n, info->fps_d);
+ }
+
+ gst_v4l2_object_save_format (v4l2object, fmtdesc, &fmt, info, &align);
+
+ /* Shall we setup the pool ? */
+
+ return TRUE;
+
+ get_fmt_failed:
+ {
+ GST_ELEMENT_WARNING (v4l2object->element, RESOURCE, SETTINGS,
+ (_("Video device did not provide output format.")), GST_ERROR_SYSTEM);
+ return FALSE;
+ }
+ invalid_dimensions:
+ {
+ GST_ELEMENT_WARNING (v4l2object->element, RESOURCE, SETTINGS,
+ (_("Video device returned invalid dimensions.")),
+ ("Expected non 0 dimensions, got %dx%d", fmt.fmt.pix.width,
+ fmt.fmt.pix.height));
+ return FALSE;
+ }
+ unsupported_field:
+ {
+ GST_ELEMENT_ERROR (v4l2object->element, RESOURCE, SETTINGS,
+ (_("Video device uses an unsupported interlacing method.")),
+ ("V4L2 field type %d not supported", fmt.fmt.pix.field));
+ return FALSE;
+ }
+ unsupported_format:
+ {
+ GST_ELEMENT_ERROR (v4l2object->element, RESOURCE, SETTINGS,
+ (_("Video device uses an unsupported pixel format.")),
+ ("V4L2 format %" GST_FOURCC_FORMAT " not supported",
+ GST_FOURCC_ARGS (fmt.fmt.pix.pixelformat)));
+ return FALSE;
+ }
+ }
+
+ gboolean
+ gst_v4l2_object_set_crop (GstV4l2Object * obj)
+ {
+ struct v4l2_selection sel = { 0 };
+ struct v4l2_crop crop = { 0 };
+
+ sel.type = obj->type;
+ sel.target = V4L2_SEL_TGT_CROP;
+ sel.flags = 0;
+ sel.r.left = obj->align.padding_left;
+ sel.r.top = obj->align.padding_top;
+ sel.r.width = obj->info.width;
+ sel.r.height = GST_VIDEO_INFO_FIELD_HEIGHT (&obj->info);
+
+ crop.type = obj->type;
+ crop.c = sel.r;
+
+ if (obj->align.padding_left + obj->align.padding_top +
+ obj->align.padding_right + obj->align.padding_bottom == 0) {
+ GST_DEBUG_OBJECT (obj->dbg_obj, "no cropping needed");
+ return TRUE;
+ }
+
+ GST_DEBUG_OBJECT (obj->dbg_obj,
+ "Desired cropping left %u, top %u, size %ux%u", crop.c.left, crop.c.top,
+ crop.c.width, crop.c.height);
+
+ if (obj->ioctl (obj->video_fd, VIDIOC_S_SELECTION, &sel) < 0) {
+ if (errno != ENOTTY) {
+ GST_WARNING_OBJECT (obj->dbg_obj,
+ "Failed to set crop rectangle with VIDIOC_S_SELECTION: %s",
+ g_strerror (errno));
+ return FALSE;
+ } else {
+ if (obj->ioctl (obj->video_fd, VIDIOC_S_CROP, &crop) < 0) {
+ GST_WARNING_OBJECT (obj->dbg_obj, "VIDIOC_S_CROP failed");
+ return FALSE;
+ }
+
+ if (obj->ioctl (obj->video_fd, VIDIOC_G_CROP, &crop) < 0) {
+ GST_WARNING_OBJECT (obj->dbg_obj, "VIDIOC_G_CROP failed");
+ return FALSE;
+ }
+
+ sel.r = crop.c;
+ }
+ }
+
+ GST_DEBUG_OBJECT (obj->dbg_obj,
+ "Got cropping left %u, top %u, size %ux%u", crop.c.left, crop.c.top,
+ crop.c.width, crop.c.height);
+
+ return TRUE;
+ }
+
+ gboolean
+ gst_v4l2_object_caps_equal (GstV4l2Object * v4l2object, GstCaps * caps)
+ {
+ GstStructure *config;
+ GstCaps *oldcaps;
+ gboolean ret;
+
+ if (!v4l2object->pool)
+ return FALSE;
+
+ config = gst_buffer_pool_get_config (v4l2object->pool);
+ gst_buffer_pool_config_get_params (config, &oldcaps, NULL, NULL, NULL);
+
+ ret = oldcaps && gst_caps_is_equal (caps, oldcaps);
+
+ gst_structure_free (config);
+
+ return ret;
+ }
+
+ gboolean
+ gst_v4l2_object_caps_is_subset (GstV4l2Object * v4l2object, GstCaps * caps)
+ {
+ GstStructure *config;
+ GstCaps *oldcaps;
+ gboolean ret;
+
+ if (!v4l2object->pool)
+ return FALSE;
+
+ config = gst_buffer_pool_get_config (v4l2object->pool);
+ gst_buffer_pool_config_get_params (config, &oldcaps, NULL, NULL, NULL);
+
+ ret = oldcaps && gst_caps_is_subset (oldcaps, caps);
+
+ gst_structure_free (config);
+
+ return ret;
+ }
+
+ GstCaps *
+ gst_v4l2_object_get_current_caps (GstV4l2Object * v4l2object)
+ {
+ GstStructure *config;
+ GstCaps *oldcaps;
+
+ if (!v4l2object->pool)
+ return NULL;
+
+ config = gst_buffer_pool_get_config (v4l2object->pool);
+ gst_buffer_pool_config_get_params (config, &oldcaps, NULL, NULL, NULL);
+
+ if (oldcaps)
+ gst_caps_ref (oldcaps);
+
+ gst_structure_free (config);
+
+ return oldcaps;
+ }
+
+ gboolean
+ gst_v4l2_object_unlock (GstV4l2Object * v4l2object)
+ {
+ gboolean ret = TRUE;
+
+ GST_LOG_OBJECT (v4l2object->dbg_obj, "start flushing");
+
+ if (v4l2object->pool && gst_buffer_pool_is_active (v4l2object->pool))
+ gst_buffer_pool_set_flushing (v4l2object->pool, TRUE);
+
+ return ret;
+ }
+
+ gboolean
+ gst_v4l2_object_unlock_stop (GstV4l2Object * v4l2object)
+ {
+ gboolean ret = TRUE;
+
+ GST_LOG_OBJECT (v4l2object->dbg_obj, "stop flushing");
+
+ if (v4l2object->pool && gst_buffer_pool_is_active (v4l2object->pool))
+ gst_buffer_pool_set_flushing (v4l2object->pool, FALSE);
+
+ return ret;
+ }
+
+ gboolean
+ gst_v4l2_object_stop (GstV4l2Object * v4l2object)
+ {
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj, "stopping");
+
+ if (!GST_V4L2_IS_OPEN (v4l2object))
+ goto done;
+ if (!GST_V4L2_IS_ACTIVE (v4l2object))
+ goto done;
+
+ if (v4l2object->pool) {
+ if (!gst_v4l2_buffer_pool_orphan (&v4l2object->pool)) {
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj, "deactivating pool");
+ gst_buffer_pool_set_active (v4l2object->pool, FALSE);
+ gst_object_unref (v4l2object->pool);
+ }
+ v4l2object->pool = NULL;
+ }
+
+ GST_V4L2_SET_INACTIVE (v4l2object);
+
+ done:
+ return TRUE;
+ }
+
+ GstCaps *
+ gst_v4l2_object_probe_caps (GstV4l2Object * v4l2object, GstCaps * filter)
+ {
+ GstCaps *ret;
+ GSList *walk;
+ GSList *formats;
+
+ formats = gst_v4l2_object_get_format_list (v4l2object);
+
+ ret = gst_caps_new_empty ();
+
+ if (v4l2object->keep_aspect && !v4l2object->par) {
+ struct v4l2_cropcap cropcap;
+
+ memset (&cropcap, 0, sizeof (cropcap));
+
+ cropcap.type = v4l2object->type;
+ if (v4l2object->ioctl (v4l2object->video_fd, VIDIOC_CROPCAP, &cropcap) < 0) {
+ if (errno != ENOTTY)
+ GST_WARNING_OBJECT (v4l2object->dbg_obj,
+ "Failed to probe pixel aspect ratio with VIDIOC_CROPCAP: %s",
+ g_strerror (errno));
+ } else if (cropcap.pixelaspect.numerator && cropcap.pixelaspect.denominator) {
+ v4l2object->par = g_new0 (GValue, 1);
+ g_value_init (v4l2object->par, GST_TYPE_FRACTION);
+ gst_value_set_fraction (v4l2object->par, cropcap.pixelaspect.numerator,
+ cropcap.pixelaspect.denominator);
+ }
+ }
+
+ for (walk = formats; walk; walk = walk->next) {
+ struct v4l2_fmtdesc *format;
+ GstStructure *template;
+ GstCaps *tmp;
+
+ format = (struct v4l2_fmtdesc *) walk->data;
+
+ template = gst_v4l2_object_v4l2fourcc_to_bare_struct (format->pixelformat);
+
+ if (!template) {
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj,
+ "unknown format %" GST_FOURCC_FORMAT,
+ GST_FOURCC_ARGS (format->pixelformat));
+ continue;
+ }
+
+ /* If we have a filter, check if we need to probe this format or not */
+ if (filter) {
+ GstCaps *format_caps = gst_caps_new_empty ();
+
+ gst_caps_append_structure (format_caps, gst_structure_copy (template));
+
+ if (!gst_caps_can_intersect (format_caps, filter)) {
+ gst_caps_unref (format_caps);
+ gst_structure_free (template);
+ continue;
+ }
+
+ gst_caps_unref (format_caps);
+ }
+
+ tmp = gst_v4l2_object_probe_caps_for_format (v4l2object,
+ format->pixelformat, template);
+ if (tmp) {
+ gst_caps_append (ret, tmp);
+
+ /* Add a variant of the caps with the Interlaced feature so we can negotiate it if needed */
+ add_alternate_variant (v4l2object, ret, gst_caps_get_structure (ret,
+ gst_caps_get_size (ret) - 1));
+ }
+
++#ifdef TIZEN_FEATURE_V4L2_TBM_SUPPORT
++ if (format->pixelformat == V4L2_PIX_FMT_NV12 ||
++ format->pixelformat == V4L2_PIX_FMT_YUV420) {
++ GstStructure *alt_s = gst_structure_copy (template);
++
++ if (format->pixelformat == V4L2_PIX_FMT_NV12)
++ gst_structure_set (alt_s, "format", G_TYPE_STRING, "SN12", NULL);
++ else
++ gst_structure_set (alt_s, "format", G_TYPE_STRING, "S420", NULL);
++
++ tmp = gst_v4l2_object_probe_caps_for_format (v4l2object,
++ format->pixelformat, alt_s);
++
++ if (tmp)
++ gst_caps_append (ret, tmp);
++
++ gst_structure_free (alt_s);
++ }
++#endif /* TIZEN_FEATURE_V4L2_TBM_SUPPORT */
+ gst_structure_free (template);
+ }
+
+ if (filter) {
+ GstCaps *tmp;
+
+ tmp = ret;
+ ret = gst_caps_intersect_full (filter, ret, GST_CAPS_INTERSECT_FIRST);
+ gst_caps_unref (tmp);
+ }
+
+ GST_INFO_OBJECT (v4l2object->dbg_obj, "probed caps: %" GST_PTR_FORMAT, ret);
+
+ return ret;
+ }
+
+ GstCaps *
+ gst_v4l2_object_get_caps (GstV4l2Object * v4l2object, GstCaps * filter)
+ {
+ GstCaps *ret;
+
+ if (v4l2object->probed_caps == NULL)
+ v4l2object->probed_caps = gst_v4l2_object_probe_caps (v4l2object, NULL);
+
+ if (filter) {
+ ret = gst_caps_intersect_full (filter, v4l2object->probed_caps,
+ GST_CAPS_INTERSECT_FIRST);
+ } else {
+ ret = gst_caps_ref (v4l2object->probed_caps);
+ }
+
+ return ret;
+ }
+
+ static gboolean
+ gst_v4l2_object_match_buffer_layout (GstV4l2Object * obj, guint n_planes,
+ gsize offset[GST_VIDEO_MAX_PLANES], gint stride[GST_VIDEO_MAX_PLANES],
+ gsize buffer_size, guint padded_height)
+ {
+ guint p;
+ gboolean need_fmt_update = FALSE;
+
+ if (n_planes != GST_VIDEO_INFO_N_PLANES (&obj->info)) {
+ GST_WARNING_OBJECT (obj->dbg_obj,
+ "Cannot match buffers with different number planes");
+ return FALSE;
+ }
+
+ for (p = 0; p < n_planes; p++) {
+ if (stride[p] < obj->info.stride[p]) {
+ GST_DEBUG_OBJECT (obj->dbg_obj,
+ "Not matching as remote stride %i is smaller than %i on plane %u",
+ stride[p], obj->info.stride[p], p);
+ return FALSE;
+ } else if (stride[p] > obj->info.stride[p]) {
+ GST_LOG_OBJECT (obj->dbg_obj,
+ "remote stride %i is higher than %i on plane %u",
+ stride[p], obj->info.stride[p], p);
+ need_fmt_update = TRUE;
+ }
+
+ if (offset[p] < obj->info.offset[p]) {
+ GST_DEBUG_OBJECT (obj->dbg_obj,
+ "Not matching as offset %" G_GSIZE_FORMAT
+ " is smaller than %" G_GSIZE_FORMAT " on plane %u",
+ offset[p], obj->info.offset[p], p);
+ return FALSE;
+ } else if (offset[p] > obj->info.offset[p]) {
+ GST_LOG_OBJECT (obj->dbg_obj,
+ "Remote offset %" G_GSIZE_FORMAT
+ " is higher than %" G_GSIZE_FORMAT " on plane %u",
+ offset[p], obj->info.offset[p], p);
+ need_fmt_update = TRUE;
+ }
+ }
+
+ if (need_fmt_update) {
+ struct v4l2_format format;
+ gint wanted_stride[GST_VIDEO_MAX_PLANES] = { 0, };
+
+ format = obj->format;
+
+ if (padded_height) {
+ GST_DEBUG_OBJECT (obj->dbg_obj, "Padded height %u", padded_height);
+
+ obj->align.padding_bottom =
+ padded_height - GST_VIDEO_INFO_FIELD_HEIGHT (&obj->info);
+ } else {
+ GST_WARNING_OBJECT (obj->dbg_obj,
+ "Failed to compute padded height; keep the default one");
+ padded_height = format.fmt.pix_mp.height;
+ }
+
+ /* update the current format with the stride we want to import from */
+ if (V4L2_TYPE_IS_MULTIPLANAR (obj->type)) {
+ guint i;
+
+ GST_DEBUG_OBJECT (obj->dbg_obj, "Wanted strides:");
+
+ for (i = 0; i < obj->n_v4l2_planes; i++) {
+ gint plane_stride = stride[i];
+
+ if (GST_VIDEO_FORMAT_INFO_IS_TILED (obj->info.finfo))
+ plane_stride = GST_VIDEO_TILE_X_TILES (plane_stride) <<
+ GST_VIDEO_FORMAT_INFO_TILE_WS (obj->info.finfo);
+
+ format.fmt.pix_mp.plane_fmt[i].bytesperline = plane_stride;
+ format.fmt.pix_mp.height = padded_height;
+ wanted_stride[i] = plane_stride;
+ GST_DEBUG_OBJECT (obj->dbg_obj, " [%u] %i", i, wanted_stride[i]);
+ }
+ } else {
+ gint plane_stride = stride[0];
+
+ GST_DEBUG_OBJECT (obj->dbg_obj, "Wanted stride: %i", plane_stride);
+
+ if (GST_VIDEO_FORMAT_INFO_IS_TILED (obj->info.finfo))
+ plane_stride = GST_VIDEO_TILE_X_TILES (plane_stride) <<
+ GST_VIDEO_FORMAT_INFO_TILE_WS (obj->info.finfo);
+
+ format.fmt.pix.bytesperline = plane_stride;
+ format.fmt.pix.height = padded_height;
+ wanted_stride[0] = plane_stride;
+ }
+
+ if (obj->ioctl (obj->video_fd, VIDIOC_S_FMT, &format) < 0) {
+ GST_WARNING_OBJECT (obj->dbg_obj,
+ "Something went wrong trying to update current format: %s",
+ g_strerror (errno));
+ return FALSE;
+ }
+
+ gst_v4l2_object_save_format (obj, obj->fmtdesc, &format, &obj->info,
+ &obj->align);
+
+ if (V4L2_TYPE_IS_MULTIPLANAR (obj->type)) {
+ guint i;
+
+ for (i = 0; i < obj->n_v4l2_planes; i++) {
+ if (format.fmt.pix_mp.plane_fmt[i].bytesperline != wanted_stride[i]) {
+ GST_DEBUG_OBJECT (obj->dbg_obj,
+ "[%i] Driver did not accept the new stride (wants %i, got %i)",
+ i, wanted_stride[i], format.fmt.pix_mp.plane_fmt[i].bytesperline);
+ return FALSE;
+ }
+ }
+
+ if (format.fmt.pix_mp.height != padded_height) {
+ GST_DEBUG_OBJECT (obj->dbg_obj,
+ "Driver did not accept the padded height (wants %i, got %i)",
+ padded_height, format.fmt.pix_mp.height);
+ }
+ } else {
+ if (format.fmt.pix.bytesperline != wanted_stride[0]) {
+ GST_DEBUG_OBJECT (obj->dbg_obj,
+ "Driver did not accept the new stride (wants %i, got %i)",
+ wanted_stride[0], format.fmt.pix.bytesperline);
+ return FALSE;
+ }
+
+ if (format.fmt.pix.height != padded_height) {
+ GST_DEBUG_OBJECT (obj->dbg_obj,
+ "Driver did not accept the padded height (wants %i, got %i)",
+ padded_height, format.fmt.pix.height);
+ }
+ }
+ }
+
+ if (obj->align.padding_bottom) {
+ /* Crop because of vertical padding */
+ GST_DEBUG_OBJECT (obj->dbg_obj, "crop because of bottom padding of %d",
+ obj->align.padding_bottom);
+ gst_v4l2_object_set_crop (obj);
+ }
+
+ return TRUE;
+ }
+
+ static gboolean
+ validate_video_meta_struct (GstV4l2Object * obj, const GstStructure * s)
+ {
+ guint i;
+
+ for (i = 0; i < gst_structure_n_fields (s); i++) {
+ const gchar *name = gst_structure_nth_field_name (s, i);
+
+ if (!g_str_equal (name, "padding-top")
+ && !g_str_equal (name, "padding-bottom")
+ && !g_str_equal (name, "padding-left")
+ && !g_str_equal (name, "padding-right")) {
+ GST_WARNING_OBJECT (obj->dbg_obj, "Unknown video meta field: '%s'", name);
+ return FALSE;
+ }
+ }
+
+ return TRUE;
+ }
+
+ static gboolean
+ gst_v4l2_object_match_buffer_layout_from_struct (GstV4l2Object * obj,
+ const GstStructure * s, GstCaps * caps, guint buffer_size)
+ {
+ GstVideoInfo info;
+ GstVideoAlignment align;
+ gsize plane_size[GST_VIDEO_MAX_PLANES];
+
+ if (!validate_video_meta_struct (obj, s))
+ return FALSE;
+
+ if (!gst_video_info_from_caps (&info, caps)) {
+ GST_WARNING_OBJECT (obj->dbg_obj, "Failed to create video info");
+ return FALSE;
+ }
+
+ gst_video_alignment_reset (&align);
+
+ gst_structure_get_uint (s, "padding-top", &align.padding_top);
+ gst_structure_get_uint (s, "padding-bottom", &align.padding_bottom);
+ gst_structure_get_uint (s, "padding-left", &align.padding_left);
+ gst_structure_get_uint (s, "padding-right", &align.padding_right);
+
+ if (align.padding_top || align.padding_bottom || align.padding_left ||
+ align.padding_right) {
+ GST_DEBUG_OBJECT (obj->dbg_obj,
+ "Upstream requested padding (top: %d bottom: %d left: %d right: %d)",
+ align.padding_top, align.padding_bottom, align.padding_left,
+ align.padding_right);
+ }
+
+ if (!gst_video_info_align_full (&info, &align, plane_size)) {
+ GST_WARNING_OBJECT (obj->dbg_obj, "Failed to align video info");
+ return FALSE;
+ }
+
+ if (GST_VIDEO_INFO_SIZE (&info) != buffer_size) {
+ GST_WARNING_OBJECT (obj->dbg_obj,
+ "Requested buffer size (%d) doesn't match video info size (%"
+ G_GSIZE_FORMAT ")", buffer_size, GST_VIDEO_INFO_SIZE (&info));
+ return FALSE;
+ }
+
+ GST_DEBUG_OBJECT (obj->dbg_obj,
+ "try matching buffer layout requested by downstream");
+
+ gst_v4l2_object_match_buffer_layout (obj, GST_VIDEO_INFO_N_PLANES (&info),
+ info.offset, info.stride, buffer_size,
+ GST_VIDEO_INFO_PLANE_HEIGHT (&info, 0, plane_size));
+
+ return TRUE;
+ }
+
+ gboolean
+ gst_v4l2_object_decide_allocation (GstV4l2Object * obj, GstQuery * query)
+ {
+ GstCaps *caps;
+ GstBufferPool *pool = NULL, *other_pool = NULL;
+ GstStructure *config;
+ guint size, min, max, own_min = 0;
+ gboolean update;
+ gboolean has_video_meta;
+ gboolean can_share_own_pool, pushing_from_our_pool = FALSE;
+ GstAllocator *allocator = NULL;
+ GstAllocationParams params = { 0 };
+ guint video_idx;
+
++#ifdef TIZEN_FEATURE_V4L2_TBM_SUPPORT
++ GST_INFO_OBJECT (obj->dbg_obj, "decide allocation - %s",
++ V4L2_TYPE_IS_OUTPUT (obj->type) ? "output" : "capture");
++#else /* TIZEN_FEATURE_V4L2_TBM_SUPPORT */
+ GST_DEBUG_OBJECT (obj->dbg_obj, "decide allocation");
++#endif /* TIZEN_FEATURE_V4L2_TBM_SUPPORT */
+
+ g_return_val_if_fail (obj->type == V4L2_BUF_TYPE_VIDEO_CAPTURE ||
+ obj->type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE, FALSE);
+
+ gst_query_parse_allocation (query, &caps, NULL);
+
+ if (obj->pool == NULL) {
+ if (!gst_v4l2_object_setup_pool (obj, caps))
+ goto pool_failed;
+ }
+
+ if (gst_query_get_n_allocation_params (query) > 0)
+ gst_query_parse_nth_allocation_param (query, 0, &allocator, ¶ms);
+
+ if (gst_query_get_n_allocation_pools (query) > 0) {
+ gst_query_parse_nth_allocation_pool (query, 0, &pool, &size, &min, &max);
+ update = TRUE;
+ } else {
+ pool = NULL;
+ min = max = 0;
+ size = 0;
+ update = FALSE;
+ }
+
+ GST_DEBUG_OBJECT (obj->dbg_obj, "allocation: size:%u min:%u max:%u pool:%"
+ GST_PTR_FORMAT, size, min, max, pool);
+
+ has_video_meta =
+ gst_query_find_allocation_meta (query, GST_VIDEO_META_API_TYPE,
+ &video_idx);
+
+ if (has_video_meta) {
+ const GstStructure *params;
+ gst_query_parse_nth_allocation_meta (query, video_idx, ¶ms);
+
+ if (params)
+ gst_v4l2_object_match_buffer_layout_from_struct (obj, params, caps, size);
+ }
+
+ can_share_own_pool = (has_video_meta || !obj->need_video_meta);
+
+ gst_v4l2_get_driver_min_buffers (obj);
+ /* We can't share our own pool, if it exceed V4L2 capacity */
+ if (min + obj->min_buffers + 1 > VIDEO_MAX_FRAME)
+ can_share_own_pool = FALSE;
+
+ /* select a pool */
+ switch (obj->mode) {
+ case GST_V4L2_IO_RW:
+ if (pool) {
+ /* in READ/WRITE mode, prefer a downstream pool because our own pool
+ * doesn't help much, we have to write to it as well */
+ GST_DEBUG_OBJECT (obj->dbg_obj,
+ "read/write mode: using downstream pool");
+ /* use the bigest size, when we use our own pool we can't really do any
+ * other size than what the hardware gives us but for downstream pools
+ * we can try */
+ size = MAX (size, obj->info.size);
+ } else if (can_share_own_pool) {
+ /* no downstream pool, use our own then */
+ GST_DEBUG_OBJECT (obj->dbg_obj,
+ "read/write mode: no downstream pool, using our own");
+ pool = gst_object_ref (obj->pool);
+ size = obj->info.size;
+ pushing_from_our_pool = TRUE;
+ }
+ break;
+
+ case GST_V4L2_IO_USERPTR:
+ case GST_V4L2_IO_DMABUF_IMPORT:
+ /* in importing mode, prefer our own pool, and pass the other pool to
+ * our own, so it can serve itself */
+ if (pool == NULL)
+ goto no_downstream_pool;
+ gst_v4l2_buffer_pool_set_other_pool (GST_V4L2_BUFFER_POOL (obj->pool),
+ pool);
+ other_pool = pool;
+ gst_object_unref (pool);
+ pool = gst_object_ref (obj->pool);
+ size = obj->info.size;
+ break;
+
+ case GST_V4L2_IO_MMAP:
+ case GST_V4L2_IO_DMABUF:
+ /* in streaming mode, prefer our own pool */
+ /* Check if we can use it ... */
+ if (can_share_own_pool) {
+ if (pool)
+ gst_object_unref (pool);
+ pool = gst_object_ref (obj->pool);
+ size = obj->info.size;
+ GST_DEBUG_OBJECT (obj->dbg_obj,
+ "streaming mode: using our own pool %" GST_PTR_FORMAT, pool);
+ pushing_from_our_pool = TRUE;
+ } else if (pool) {
+ GST_DEBUG_OBJECT (obj->dbg_obj,
+ "streaming mode: copying to downstream pool %" GST_PTR_FORMAT,
+ pool);
+ } else {
+ GST_DEBUG_OBJECT (obj->dbg_obj,
+ "streaming mode: no usable pool, copying to generic pool");
+ size = MAX (size, obj->info.size);
+ }
+ break;
+ case GST_V4L2_IO_AUTO:
+ default:
+ GST_WARNING_OBJECT (obj->dbg_obj, "unhandled mode");
+ break;
+ }
+
+ if (size == 0)
+ goto no_size;
+
+ /* If pushing from our own pool, configure it with queried minimum,
+ * otherwise use the minimum required */
+ if (pushing_from_our_pool) {
+ /* When pushing from our own pool, we need what downstream one, to be able
+ * to fill the pipeline, the minimum required to decoder according to the
+ * driver and 2 more, so we don't endup up with everything downstream or
+ * held by the decoder. We account 2 buffers for v4l2 so when one is being
+ * pushed downstream the other one can already be queued for the next
+ * frame. */
+ own_min = min + obj->min_buffers + 2;
+
+ /* If no allocation parameters where provided, allow for a little more
+ * buffers and enable copy threshold */
+ if (!update) {
+ own_min += 2;
+ gst_v4l2_buffer_pool_copy_at_threshold (GST_V4L2_BUFFER_POOL (pool),
+ TRUE);
+ } else {
+ gst_v4l2_buffer_pool_copy_at_threshold (GST_V4L2_BUFFER_POOL (pool),
+ FALSE);
+ }
+
+ } else {
+ /* In this case we'll have to configure two buffer pool. For our buffer
+ * pool, we'll need what the driver one, and one more, so we can dequeu */
+ own_min = obj->min_buffers + 1;
+ own_min = MAX (own_min, GST_V4L2_MIN_BUFFERS (obj));
+
+ /* for the downstream pool, we keep what downstream wants, though ensure
+ * at least a minimum if downstream didn't suggest anything (we are
+ * expecting the base class to create a default one for the context) */
+ min = MAX (min, GST_V4L2_MIN_BUFFERS (obj));
+
+ /* To import we need the other pool to hold at least own_min */
+ if (obj->pool == pool)
+ min += own_min;
+ }
+
+ /* Request a bigger max, if one was suggested but it's too small */
+ if (max != 0)
+ max = MAX (min, max);
+
+ /* First step, configure our own pool */
+ config = gst_buffer_pool_get_config (obj->pool);
+
+ if (obj->need_video_meta || has_video_meta) {
+ GST_DEBUG_OBJECT (obj->dbg_obj, "activate Video Meta");
+ gst_buffer_pool_config_add_option (config,
+ GST_BUFFER_POOL_OPTION_VIDEO_META);
+ }
+
+ gst_buffer_pool_config_set_allocator (config, allocator, ¶ms);
+ gst_buffer_pool_config_set_params (config, caps, size, own_min, 0);
+
+ GST_DEBUG_OBJECT (obj->dbg_obj, "setting own pool config to %"
+ GST_PTR_FORMAT, config);
+
+ /* Our pool often need to adjust the value */
+ if (!gst_buffer_pool_set_config (obj->pool, config)) {
+ config = gst_buffer_pool_get_config (obj->pool);
+
+ GST_DEBUG_OBJECT (obj->dbg_obj, "own pool config changed to %"
+ GST_PTR_FORMAT, config);
+
+ /* our pool will adjust the maximum buffer, which we are fine with */
+ if (!gst_buffer_pool_set_config (obj->pool, config))
+ goto config_failed;
+ }
+
+ /* Now configure the other pool if different */
+ if (obj->pool != pool)
+ other_pool = pool;
+
+ if (other_pool) {
+ config = gst_buffer_pool_get_config (other_pool);
+ gst_buffer_pool_config_set_allocator (config, allocator, ¶ms);
+ gst_buffer_pool_config_set_params (config, caps, size, min, max);
+
+ GST_DEBUG_OBJECT (obj->dbg_obj, "setting other pool config to %"
+ GST_PTR_FORMAT, config);
+
+ /* if downstream supports video metadata, add this to the pool config */
+ if (has_video_meta) {
+ GST_DEBUG_OBJECT (obj->dbg_obj, "activate Video Meta");
+ gst_buffer_pool_config_add_option (config,
+ GST_BUFFER_POOL_OPTION_VIDEO_META);
+ }
+
+ if (!gst_buffer_pool_set_config (other_pool, config)) {
+ config = gst_buffer_pool_get_config (other_pool);
+
+ if (!gst_buffer_pool_config_validate_params (config, caps, size, min,
+ max)) {
+ gst_structure_free (config);
+ goto config_failed;
+ }
+
+ if (!gst_buffer_pool_set_config (other_pool, config))
+ goto config_failed;
+ }
+ }
+
+ if (pool) {
+ /* For simplicity, simply read back the active configuration, so our base
+ * class get the right information */
+ config = gst_buffer_pool_get_config (pool);
+ gst_buffer_pool_config_get_params (config, NULL, &size, &min, &max);
+ gst_structure_free (config);
+ }
+
+ if (update)
+ gst_query_set_nth_allocation_pool (query, 0, pool, size, min, max);
+ else
+ gst_query_add_allocation_pool (query, pool, size, min, max);
+
+ if (allocator)
+ gst_object_unref (allocator);
+
+ if (pool)
+ gst_object_unref (pool);
+
+ return TRUE;
+
+ pool_failed:
+ {
+ /* setup_pool already send the error */
+ goto cleanup;
+ }
+ config_failed:
+ {
+ GST_ELEMENT_ERROR (obj->element, RESOURCE, SETTINGS,
+ (_("Failed to configure internal buffer pool.")), (NULL));
+ goto cleanup;
+ }
+ no_size:
+ {
+ GST_ELEMENT_ERROR (obj->element, RESOURCE, SETTINGS,
+ (_("Video device did not suggest any buffer size.")), (NULL));
+ goto cleanup;
+ }
+ cleanup:
+ {
+ if (allocator)
+ gst_object_unref (allocator);
+
+ if (pool)
+ gst_object_unref (pool);
+ return FALSE;
+ }
+ no_downstream_pool:
+ {
+ GST_ELEMENT_ERROR (obj->element, RESOURCE, SETTINGS,
+ (_("No downstream pool to import from.")),
+ ("When importing DMABUF or USERPTR, we need a pool to import from"));
+ return FALSE;
+ }
+ }
+
+ gboolean
+ gst_v4l2_object_propose_allocation (GstV4l2Object * obj, GstQuery * query)
+ {
+ GstBufferPool *pool = NULL;
+ /* we need at least 2 buffers to operate */
+ guint size, min, max;
+ GstCaps *caps;
+ gboolean need_pool;
+
+ /* Set defaults allocation parameters */
+ size = obj->info.size;
+ min = GST_V4L2_MIN_BUFFERS (obj);
+ max = VIDEO_MAX_FRAME;
+
+ gst_query_parse_allocation (query, &caps, &need_pool);
+
+ if (caps == NULL)
+ goto no_caps;
+
+ switch (obj->mode) {
+ case GST_V4L2_IO_MMAP:
+ case GST_V4L2_IO_DMABUF:
+ if (need_pool && obj->pool) {
+ if (!gst_buffer_pool_is_active (obj->pool))
+ pool = gst_object_ref (obj->pool);
+ }
+ break;
+ default:
+ break;
+ }
+
+ if (pool != NULL) {
+ GstCaps *pcaps;
+ GstStructure *config;
+
+ /* we had a pool, check caps */
+ config = gst_buffer_pool_get_config (pool);
+ gst_buffer_pool_config_get_params (config, &pcaps, NULL, NULL, NULL);
+
+ GST_DEBUG_OBJECT (obj->dbg_obj,
+ "we had a pool with caps %" GST_PTR_FORMAT, pcaps);
+ if (!gst_caps_is_equal (caps, pcaps)) {
+ gst_structure_free (config);
+ gst_object_unref (pool);
+ goto different_caps;
+ }
+ gst_structure_free (config);
+ }
+ gst_v4l2_get_driver_min_buffers (obj);
+
+ min = MAX (obj->min_buffers, GST_V4L2_MIN_BUFFERS (obj));
+
+ gst_query_add_allocation_pool (query, pool, size, min, max);
+
+ /* we also support various metadata */
+ gst_query_add_allocation_meta (query, GST_VIDEO_META_API_TYPE, NULL);
+
+ if (pool)
+ gst_object_unref (pool);
+
+ return TRUE;
+
+ /* ERRORS */
+ no_caps:
+ {
+ GST_DEBUG_OBJECT (obj->dbg_obj, "no caps specified");
+ return FALSE;
+ }
+ different_caps:
+ {
+ /* different caps, we can't use this pool */
+ GST_DEBUG_OBJECT (obj->dbg_obj, "pool has different caps");
+ return FALSE;
+ }
+ }
+
+ gboolean
+ gst_v4l2_object_try_import (GstV4l2Object * obj, GstBuffer * buffer)
+ {
+ GstVideoMeta *vmeta;
+ guint n_mem = gst_buffer_n_memory (buffer);
+
+ /* only import if requested */
+ switch (obj->mode) {
+ case GST_V4L2_IO_USERPTR:
+ case GST_V4L2_IO_DMABUF_IMPORT:
+ break;
+ default:
+ GST_DEBUG_OBJECT (obj->dbg_obj,
+ "The io-mode does not enable importation");
+ return FALSE;
+ }
+
+ vmeta = gst_buffer_get_video_meta (buffer);
+ if (!vmeta && obj->need_video_meta) {
+ GST_DEBUG_OBJECT (obj->dbg_obj, "Downstream buffer uses standard "
+ "stride/offset while the driver does not.");
+ return FALSE;
+ }
+
+ /* we need matching strides/offsets and size */
+ if (vmeta) {
+ guint plane_height[GST_VIDEO_MAX_PLANES] = { 0, };
+
+ gst_video_meta_get_plane_height (vmeta, plane_height);
+
+ if (!gst_v4l2_object_match_buffer_layout (obj, vmeta->n_planes,
+ vmeta->offset, vmeta->stride, gst_buffer_get_size (buffer),
+ plane_height[0]))
+ return FALSE;
+ }
+
+ /* we can always import single memory buffer, but otherwise we need the same
+ * amount of memory object. */
+ if (n_mem != 1 && n_mem != obj->n_v4l2_planes) {
+ GST_DEBUG_OBJECT (obj->dbg_obj, "Can only import %i memory, "
+ "buffers contains %u memory", obj->n_v4l2_planes, n_mem);
+ return FALSE;
+ }
+
+ /* For DMABuf importation we need DMABuf of course */
+ if (obj->mode == GST_V4L2_IO_DMABUF_IMPORT) {
+ guint i;
+
+ for (i = 0; i < n_mem; i++) {
+ GstMemory *mem = gst_buffer_peek_memory (buffer, i);
+
+ if (!gst_is_dmabuf_memory (mem)) {
+ GST_DEBUG_OBJECT (obj->dbg_obj, "Cannot import non-DMABuf memory.");
+ return FALSE;
+ }
+ }
+ }
+
+ /* for the remaining, only the kernel driver can tell */
+ return TRUE;
+ }
--- /dev/null
+ /* GStreamer
+ *
+ * Copyright (C) 2001-2002 Ronald Bultje <rbultje@ronald.bitfreak.net>
+ * 2006 Edgard Lima <edgard.lima@gmail.com>
+ *
+ * gstv4l2object.h: base class for V4L2 elements
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+ #ifndef __GST_V4L2_OBJECT_H__
+ #define __GST_V4L2_OBJECT_H__
+
+ #include "ext/videodev2.h"
+ #ifdef HAVE_LIBV4L2
+ # include <libv4l2.h>
+ #endif
+
+ #include "v4l2-utils.h"
+
+ #include <gst/gst.h>
+ #include <gst/base/gstpushsrc.h>
+
+ #include <gst/video/video.h>
+ #include <unistd.h>
+
+ typedef struct _GstV4l2Object GstV4l2Object;
+ typedef struct _GstV4l2ObjectClassHelper GstV4l2ObjectClassHelper;
+
+ #include <gstv4l2bufferpool.h>
+
+ /* size of v4l2 buffer pool in streaming case, obj->info needs to be valid */
+ #define GST_V4L2_MIN_BUFFERS(obj) \
+ ((GST_VIDEO_INFO_INTERLACE_MODE (&obj->info) == \
+ GST_VIDEO_INTERLACE_MODE_ALTERNATE) ? \
+ /* 2x buffers needed with each field in its own buffer */ \
+ 4 : 2)
+
+ /* max frame width/height */
+ #define GST_V4L2_MAX_SIZE (1<<15) /* 2^15 == 32768 */
+
+ G_BEGIN_DECLS
+
+ #define GST_TYPE_V4L2_IO_MODE (gst_v4l2_io_mode_get_type ())
+ GType gst_v4l2_io_mode_get_type (void);
+
+ #define GST_V4L2_OBJECT(obj) (GstV4l2Object *)(obj)
+
+ typedef enum {
+ GST_V4L2_IO_AUTO = 0,
+ GST_V4L2_IO_RW = 1,
+ GST_V4L2_IO_MMAP = 2,
+ GST_V4L2_IO_USERPTR = 3,
+ GST_V4L2_IO_DMABUF = 4,
+ GST_V4L2_IO_DMABUF_IMPORT = 5
+ } GstV4l2IOMode;
+
+ typedef gboolean (*GstV4l2GetInOutFunction) (GstV4l2Object * v4l2object, guint32 * input);
+ typedef gboolean (*GstV4l2SetInOutFunction) (GstV4l2Object * v4l2object, guint32 input);
+ typedef gboolean (*GstV4l2UpdateFpsFunction) (GstV4l2Object * v4l2object);
+
+ /* On Android NDK r18b the ioctl() signature uses 'unsigned' instead of
+ * 'unsigned long' for the 2nd parameter */
+ #ifdef __ANDROID__
+ typedef unsigned ioctl_req_t;
+ #else
+ typedef gulong ioctl_req_t;
+ #endif
+
+ #define GST_V4L2_WIDTH(o) (GST_VIDEO_INFO_WIDTH (&(o)->info))
+ #define GST_V4L2_HEIGHT(o) (GST_VIDEO_INFO_HEIGHT (&(o)->info))
+ #define GST_V4L2_PIXELFORMAT(o) ((o)->fmtdesc->pixelformat)
+ #define GST_V4L2_FPS_N(o) (GST_VIDEO_INFO_FPS_N (&(o)->info))
+ #define GST_V4L2_FPS_D(o) (GST_VIDEO_INFO_FPS_D (&(o)->info))
+
+ /* simple check whether the device is open */
+ #define GST_V4L2_IS_OPEN(o) ((o)->video_fd > 0)
+
+ /* check whether the device is 'active' */
+ #define GST_V4L2_IS_ACTIVE(o) ((o)->active)
+ #define GST_V4L2_SET_ACTIVE(o) ((o)->active = TRUE)
+ #define GST_V4L2_SET_INACTIVE(o) ((o)->active = FALSE)
+
+ /* checks whether the current v4lv4l2object has already been open()'ed or not */
+ #define GST_V4L2_CHECK_OPEN(v4l2object) \
+ if (!GST_V4L2_IS_OPEN(v4l2object)) \
+ { \
+ GST_ELEMENT_ERROR (v4l2object->element, RESOURCE, SETTINGS, \
+ (_("Device is not open.")), (NULL)); \
+ return FALSE; \
+ }
+
+ /* checks whether the current v4lv4l2object is close()'ed or whether it is still open */
+ #define GST_V4L2_CHECK_NOT_OPEN(v4l2object) \
+ if (GST_V4L2_IS_OPEN(v4l2object)) \
+ { \
+ GST_ELEMENT_ERROR (v4l2object->element, RESOURCE, SETTINGS, \
+ (_("Device is open.")), (NULL)); \
+ return FALSE; \
+ }
+
+ /* checks whether we're out of capture mode or not */
+ #define GST_V4L2_CHECK_NOT_ACTIVE(v4l2object) \
+ if (GST_V4L2_IS_ACTIVE(v4l2object)) \
+ { \
+ GST_ELEMENT_ERROR (v4l2object->element, RESOURCE, SETTINGS, \
+ (NULL), ("Device is in streaming mode")); \
+ return FALSE; \
+ }
+
+
+ struct _GstV4l2Object {
+ GstElement * element;
+ GstObject * dbg_obj;
+
+ enum v4l2_buf_type type; /* V4L2_BUF_TYPE_VIDEO_CAPTURE, V4L2_BUF_TYPE_VIDEO_OUTPUT */
+
+ /* the video device */
+ char *videodev;
+
++#ifdef TIZEN_FEATURE_V4L2SRC_AUTO_SCAN_DEVICE_NODE
++ /* auto scan device */
++ gboolean auto_scan_device;
++#endif /* TIZEN_FEATURE_V4L2SRC_AUTO_SCAN_DEVICE_NODE */
++#ifdef TIZEN_FEATURE_V4L2_TBM_SUPPORT
++ gboolean tbm_output;
++#endif /* TIZEN_FEATURE_V4L2_TBM_SUPPORT */
++
+ /* the video-device's file descriptor */
+ gint video_fd;
+ GstV4l2IOMode mode;
+
+ gboolean active;
+
+ /* the current format */
+ struct v4l2_fmtdesc *fmtdesc;
+ struct v4l2_format format;
+ GstVideoInfo info;
+ GstVideoAlignment align;
+ GstVideoTransferFunction transfer;
+
+ /* Features */
+ gboolean need_video_meta;
+ gboolean has_alpha_component;
+
+ /* only used if the device supports MPLANE
+ * nb planes is meaning of v4l2 planes
+ * the gstreamer equivalent is gst_buffer_n_memory
+ */
+ gint n_v4l2_planes;
+
+ /* We cache the frame duration if known */
+ GstClockTime duration;
+
+ /* if the MPLANE device support both contiguous and non contiguous
+ * it allows to select which one we want. But we prefered_non_contiguous
+ * non contiguous mode.
+ */
+ gboolean prefered_non_contiguous;
+
+ /* This will be set if supported in decide_allocation. It can be used to
+ * calculate the minimum latency. */
+ guint32 min_buffers;
+
+ /* wanted mode */
+ GstV4l2IOMode req_mode;
+
+ /* optional pool */
+ GstBufferPool *pool;
+ /* the sequence of pool to identify (for debugging) */
+ guint pool_seq;
+
+ /* the video device's capabilities */
+ struct v4l2_capability vcap;
+ /* opened device specific capabilities */
+ guint32 device_caps;
+
+ /* lists... */
+ GSList *formats; /* list of available capture formats */
+ GstCaps *probed_caps;
+
+ GList *colors;
+ GList *norms;
+ GList *channels;
+ GData *controls;
+
+ /* properties */
+ v4l2_std_id tv_norm;
+ gchar *channel;
+ gulong frequency;
+ GstStructure *extra_controls;
+ gboolean keep_aspect;
+ GValue *par;
+
+ /* funcs */
+ GstV4l2GetInOutFunction get_in_out_func;
+ GstV4l2SetInOutFunction set_in_out_func;
+ GstV4l2UpdateFpsFunction update_fps_func;
+
+ /* syscalls */
+ gint (*fd_open) (gint fd, gint v4l2_flags);
+ gint (*close) (gint fd);
+ gint (*dup) (gint fd);
+ gint (*ioctl) (gint fd, ioctl_req_t request, ...);
+ gssize (*read) (gint fd, gpointer buffer, gsize n);
+ gpointer (*mmap) (gpointer start, gsize length, gint prot, gint flags,
+ gint fd, off_t offset);
+ gint (*munmap) (gpointer _start, gsize length);
+
+ /* Quirks */
+ /* Skips interlacing probes */
+ gboolean never_interlaced;
+ /* Allow to skip reading initial format through G_FMT. Some devices
+ * just fails if you don't call S_FMT first. (ex: M2M decoders) */
+ gboolean no_initial_format;
+ /* Avoid any try_fmt probe. This is used by v4l2src to speedup start up time
+ * on slow USB firmwares. When this is set, gst_v4l2_set_format() will modify
+ * the caps to reflect what was negotiated during fixation */
+ gboolean skip_try_fmt_probes;
+ };
+
+ struct _GstV4l2ObjectClassHelper {
+ /* probed devices */
+ GList *devices;
+ };
+
+ GType gst_v4l2_object_get_type (void);
+
+ #define V4L2_STD_OBJECT_PROPS \
+ PROP_DEVICE, \
+ PROP_DEVICE_NAME, \
+ PROP_DEVICE_FD, \
+ PROP_FLAGS, \
+ PROP_BRIGHTNESS, \
+ PROP_CONTRAST, \
+ PROP_SATURATION, \
+ PROP_HUE, \
+ PROP_TV_NORM, \
+ PROP_IO_MODE, \
+ PROP_OUTPUT_IO_MODE, \
+ PROP_CAPTURE_IO_MODE, \
+ PROP_EXTRA_CONTROLS, \
+ PROP_PIXEL_ASPECT_RATIO, \
+ PROP_FORCE_ASPECT_RATIO
+
+ /* create/destroy */
+ GstV4l2Object* gst_v4l2_object_new (GstElement * element,
+ GstObject * dbg_obj,
+ enum v4l2_buf_type type,
+ const char * default_device,
+ GstV4l2GetInOutFunction get_in_out_func,
+ GstV4l2SetInOutFunction set_in_out_func,
+ GstV4l2UpdateFpsFunction update_fps_func);
+
+ void gst_v4l2_object_destroy (GstV4l2Object * v4l2object);
+
+ /* properties */
+
+ void gst_v4l2_object_install_properties_helper (GObjectClass * gobject_class,
+ const char * default_device);
+
+ void gst_v4l2_object_install_m2m_properties_helper (GObjectClass * gobject_class);
+
+ gboolean gst_v4l2_object_set_property_helper (GstV4l2Object * v4l2object,
+ guint prop_id,
+ const GValue * value,
+ GParamSpec * pspec);
+ gboolean gst_v4l2_object_get_property_helper (GstV4l2Object *v4l2object,
+ guint prop_id, GValue * value,
+ GParamSpec * pspec);
+ /* open/close */
+ gboolean gst_v4l2_object_open (GstV4l2Object * v4l2object, GstV4l2Error * error);
+ gboolean gst_v4l2_object_open_shared (GstV4l2Object * v4l2object, GstV4l2Object * other);
+ gboolean gst_v4l2_object_close (GstV4l2Object * v4l2object);
+
+ /* probing */
+
+ GstCaps* gst_v4l2_object_get_all_caps (void);
+
+ GstCaps* gst_v4l2_object_get_raw_caps (void);
+
+ GstCaps* gst_v4l2_object_get_codec_caps (void);
+
+ gint gst_v4l2_object_extrapolate_stride (const GstVideoFormatInfo * finfo,
+ gint plane, gint stride);
+
+ gboolean gst_v4l2_object_set_format (GstV4l2Object * v4l2object, GstCaps * caps, GstV4l2Error * error);
+ gboolean gst_v4l2_object_try_format (GstV4l2Object * v4l2object, GstCaps * caps, GstV4l2Error * error);
+ gboolean gst_v4l2_object_try_import (GstV4l2Object * v4l2object, GstBuffer * buffer);
+
+ gboolean gst_v4l2_object_caps_equal (GstV4l2Object * v4l2object, GstCaps * caps);
+ gboolean gst_v4l2_object_caps_is_subset (GstV4l2Object * v4l2object, GstCaps * caps);
+ GstCaps * gst_v4l2_object_get_current_caps (GstV4l2Object * v4l2object);
+
+ gboolean gst_v4l2_object_unlock (GstV4l2Object * v4l2object);
+ gboolean gst_v4l2_object_unlock_stop (GstV4l2Object * v4l2object);
+
+ gboolean gst_v4l2_object_stop (GstV4l2Object * v4l2object);
+
+ GstCaps * gst_v4l2_object_probe_caps (GstV4l2Object * v4l2object, GstCaps * filter);
+ GstCaps * gst_v4l2_object_get_caps (GstV4l2Object * v4l2object, GstCaps * filter);
+
+ gboolean gst_v4l2_object_acquire_format (GstV4l2Object * v4l2object, GstVideoInfo * info);
+
+ gboolean gst_v4l2_object_set_crop (GstV4l2Object * obj);
+
+ gboolean gst_v4l2_object_decide_allocation (GstV4l2Object * v4l2object, GstQuery * query);
+
+ gboolean gst_v4l2_object_propose_allocation (GstV4l2Object * obj, GstQuery * query);
+
+ GstStructure * gst_v4l2_object_v4l2fourcc_to_structure (guint32 fourcc);
+
+ /* TODO Move to proper namespace */
+ /* open/close the device */
+ gboolean gst_v4l2_open (GstV4l2Object * v4l2object, GstV4l2Error * error);
+ gboolean gst_v4l2_dup (GstV4l2Object * v4l2object, GstV4l2Object * other);
+ gboolean gst_v4l2_close (GstV4l2Object * v4l2object);
+
+ /* norm/input/output */
+ gboolean gst_v4l2_get_norm (GstV4l2Object * v4l2object, v4l2_std_id * norm);
+ gboolean gst_v4l2_set_norm (GstV4l2Object * v4l2object, v4l2_std_id norm);
+ gboolean gst_v4l2_get_input (GstV4l2Object * v4l2object, guint32 * input);
+ gboolean gst_v4l2_set_input (GstV4l2Object * v4l2object, guint32 input);
+ gboolean gst_v4l2_query_input (GstV4l2Object * v4l2object, struct v4l2_input * input);
+ gboolean gst_v4l2_get_output (GstV4l2Object * v4l2object, guint32 * output);
+ gboolean gst_v4l2_set_output (GstV4l2Object * v4l2object, guint32 output);
+
+ /* dv timings */
+ gboolean gst_v4l2_set_dv_timings (GstV4l2Object * v4l2object, struct v4l2_dv_timings *timings);
+ gboolean gst_v4l2_query_dv_timings (GstV4l2Object * v4l2object, struct v4l2_dv_timings *timings);
+
+ /* frequency control */
+ gboolean gst_v4l2_get_frequency (GstV4l2Object * v4l2object, gint tunernum, gulong * frequency);
+ gboolean gst_v4l2_set_frequency (GstV4l2Object * v4l2object, gint tunernum, gulong frequency);
+ gboolean gst_v4l2_signal_strength (GstV4l2Object * v4l2object, gint tunernum, gulong * signal);
+
+ /* attribute control */
+ gboolean gst_v4l2_get_attribute (GstV4l2Object * v4l2object, int attribute, int * value);
+ gboolean gst_v4l2_set_attribute (GstV4l2Object * v4l2object, int attribute, const int value);
+ gboolean gst_v4l2_set_string_attribute (GstV4l2Object * v4l2object, int attribute_num, const char *value);
+ gboolean gst_v4l2_set_controls (GstV4l2Object * v4l2object, GstStructure * controls);
+
+ /* events */
+ gboolean gst_v4l2_subscribe_event (GstV4l2Object * v4l2object, guint32 event, guint32 id);
+ gboolean gst_v4l2_dequeue_event (GstV4l2Object * v4l2object, struct v4l2_event *event);
+
+ G_END_DECLS
+
+ #endif /* __GST_V4L2_OBJECT_H__ */
--- /dev/null
+ /* GStreamer
+ *
+ * Copyright (C) 2001-2002 Ronald Bultje <rbultje@ronald.bitfreak.net>
+ * 2006 Edgard Lima <edgard.lima@gmail.com>
+ *
+ * gstv4l2src.c: Video4Linux2 source element
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+ /**
+ * SECTION:element-v4l2src
+ * @title: v4l2src
+ *
+ * v4l2src can be used to capture video from v4l2 devices, like webcams and tv
+ * cards.
+ *
+ * ## Example launch lines
+ * |[
+ * gst-launch-1.0 v4l2src ! xvimagesink
+ * ]| This pipeline shows the video captured from /dev/video0 tv card and for
+ * webcams.
+ * |[
+ * gst-launch-1.0 v4l2src ! jpegdec ! xvimagesink
+ * ]| This pipeline shows the video captured from a webcam that delivers jpeg
+ * images.
+ *
+ * Since 1.14, the use of libv4l2 has been disabled due to major bugs in the
+ * emulation layer. To enable usage of this library, set the environment
+ * variable GST_V4L2_USE_LIBV4L2=1.
+ */
+
+ #ifdef HAVE_CONFIG_H
+ #include <config.h>
+ #endif
+
+ #include <string.h>
+ #include <sys/time.h>
+ #include <unistd.h>
+
+ #include <gst/video/gstvideometa.h>
+ #include <gst/video/gstvideopool.h>
+
+ #include "gstv4l2elements.h"
+ #include "gstv4l2src.h"
+
+ #include "gstv4l2colorbalance.h"
+ #include "gstv4l2tuner.h"
+ #include "gstv4l2vidorient.h"
+
+ #include "gst/gst-i18n-plugin.h"
+
+ GST_DEBUG_CATEGORY (v4l2src_debug);
+ #define GST_CAT_DEFAULT v4l2src_debug
+
+ #define DEFAULT_PROP_DEVICE "/dev/video0"
+
+ enum
+ {
+ PROP_0,
+ V4L2_STD_OBJECT_PROPS,
++#ifdef TIZEN_FEATURE_V4L2SRC_SUPPORT_CAMERA_ID
++ PROP_CAMERA_ID,
++#endif /* TIZEN_FEATURE_V4L2SRC_SUPPORT_CAMERA_ID */
++#ifdef TIZEN_FEATURE_V4L2SRC_AUTO_SCAN_DEVICE_NODE
++ PROP_AUTO_SCAN_DEVICE,
++#endif /* TIZEN_FEATURE_V4L2SRC_AUTO_SCAN_DEVICE_NODE */
++#ifdef TIZEN_FEATURE_V4L2_TBM_SUPPORT
++ PROP_TBM_OUTPUT,
++#endif /* TIZEN_FEATURE_V4L2_TBM_SUPPORT */
+ PROP_LAST
+ };
+
+ /* signals and args */
+ enum
+ {
+ SIGNAL_PRE_SET_FORMAT,
+ LAST_SIGNAL
+ };
+
+ static guint gst_v4l2_signals[LAST_SIGNAL] = { 0 };
+
+ GST_IMPLEMENT_V4L2_COLOR_BALANCE_METHODS (GstV4l2Src, gst_v4l2src);
+ GST_IMPLEMENT_V4L2_TUNER_METHODS (GstV4l2Src, gst_v4l2src);
+ GST_IMPLEMENT_V4L2_VIDORIENT_METHODS (GstV4l2Src, gst_v4l2src);
+
+ static void gst_v4l2src_uri_handler_init (gpointer g_iface,
+ gpointer iface_data);
+
+ #define gst_v4l2src_parent_class parent_class
+ G_DEFINE_TYPE_WITH_CODE (GstV4l2Src, gst_v4l2src, GST_TYPE_PUSH_SRC,
+ G_IMPLEMENT_INTERFACE (GST_TYPE_URI_HANDLER, gst_v4l2src_uri_handler_init);
+ G_IMPLEMENT_INTERFACE (GST_TYPE_TUNER, gst_v4l2src_tuner_interface_init);
+ G_IMPLEMENT_INTERFACE (GST_TYPE_COLOR_BALANCE,
+ gst_v4l2src_color_balance_interface_init);
+ G_IMPLEMENT_INTERFACE (GST_TYPE_VIDEO_ORIENTATION,
+ gst_v4l2src_video_orientation_interface_init));
+ GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (v4l2src,
+ "v4l2src", GST_RANK_PRIMARY, GST_TYPE_V4L2SRC, v4l2_element_init (plugin));
+
+ struct PreferredCapsInfo
+ {
+ gint width;
+ gint height;
+ gint fps_n;
+ gint fps_d;
+ };
+
+ static void gst_v4l2src_finalize (GstV4l2Src * v4l2src);
+
+ /* element methods */
+ static GstStateChangeReturn gst_v4l2src_change_state (GstElement * element,
+ GstStateChange transition);
+
+ /* basesrc methods */
+ static gboolean gst_v4l2src_start (GstBaseSrc * src);
+ static gboolean gst_v4l2src_unlock (GstBaseSrc * src);
+ static gboolean gst_v4l2src_unlock_stop (GstBaseSrc * src);
+ static gboolean gst_v4l2src_stop (GstBaseSrc * src);
+ static GstCaps *gst_v4l2src_get_caps (GstBaseSrc * src, GstCaps * filter);
+ static gboolean gst_v4l2src_query (GstBaseSrc * bsrc, GstQuery * query);
+ static gboolean gst_v4l2src_decide_allocation (GstBaseSrc * src,
+ GstQuery * query);
+ static GstFlowReturn gst_v4l2src_create (GstPushSrc * src, GstBuffer ** out);
+ static GstCaps *gst_v4l2src_fixate (GstBaseSrc * basesrc, GstCaps * caps,
+ struct PreferredCapsInfo *pref);
+ static gboolean gst_v4l2src_negotiate (GstBaseSrc * basesrc);
+
+ static void gst_v4l2src_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec);
+ static void gst_v4l2src_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec);
+
+ static void
+ gst_v4l2src_class_init (GstV4l2SrcClass * klass)
+ {
+ GObjectClass *gobject_class;
+ GstElementClass *element_class;
+ GstBaseSrcClass *basesrc_class;
+ GstPushSrcClass *pushsrc_class;
+
+ gobject_class = G_OBJECT_CLASS (klass);
+ element_class = GST_ELEMENT_CLASS (klass);
+ basesrc_class = GST_BASE_SRC_CLASS (klass);
+ pushsrc_class = GST_PUSH_SRC_CLASS (klass);
+
+ gobject_class->finalize = (GObjectFinalizeFunc) gst_v4l2src_finalize;
+ gobject_class->set_property = gst_v4l2src_set_property;
+ gobject_class->get_property = gst_v4l2src_get_property;
+
+ element_class->change_state = gst_v4l2src_change_state;
+
+ gst_v4l2_object_install_properties_helper (gobject_class,
+ DEFAULT_PROP_DEVICE);
+
++#ifdef TIZEN_FEATURE_V4L2SRC_SUPPORT_CAMERA_ID
++ /**
++ * GstV4l2Src:camera-id:
++ *
++ * The value which is set by application will be used as a number of device node.
++ * ex) 1 -> /dev/video1
++ */
++ g_object_class_install_property (gobject_class, PROP_CAMERA_ID,
++ g_param_spec_uint ("camera-id", "Camera ID",
++ "Camera ID for device node", 0, G_MAXUINT, 0,
++ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
++#endif /* TIZEN_FEATURE_V4L2SRC_SUPPORT_CAMERA_ID */
++#ifdef TIZEN_FEATURE_V4L2SRC_AUTO_SCAN_DEVICE_NODE
++ /**
++ * GstV4l2Src:auto-scan-device:
++ */
++ g_object_class_install_property (gobject_class, PROP_AUTO_SCAN_DEVICE,
++ g_param_spec_boolean ("auto-scan-device", "Scan device automatically",
++ "Scan all device nodes automatically until device open success.",
++ TRUE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
++#endif /* TIZEN_FEATURE_V4L2SRC_AUTO_SCAN_DEVICE_NODE */
++#ifdef TIZEN_FEATURE_V4L2_TBM_SUPPORT
++ /**
++ * GstV4l2Src:tbm-output
++ */
++ g_object_class_install_property (gobject_class, PROP_TBM_OUTPUT,
++ g_param_spec_boolean ("tbm-output", "Enable TBM for output buffer",
++ "It works for only DMABUF mode.",
++ FALSE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
++#endif /* TIZEN_FEATURE_V4L2_TBM_SUPPORT */
++
+ /**
+ * GstV4l2Src::prepare-format:
+ * @v4l2src: the v4l2src instance
+ * @fd: the file descriptor of the current device
+ * @caps: the caps of the format being set
+ *
+ * This signal gets emitted before calling the v4l2 VIDIOC_S_FMT ioctl
+ * (set format). This allows for any custom configuration of the device to
+ * happen prior to the format being set.
+ * This is mostly useful for UVC H264 encoding cameras which need the H264
+ * Probe & Commit to happen prior to the normal Probe & Commit.
+ */
+ gst_v4l2_signals[SIGNAL_PRE_SET_FORMAT] = g_signal_new ("prepare-format",
+ G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST,
+ 0, NULL, NULL, NULL, G_TYPE_NONE, 2, G_TYPE_INT, GST_TYPE_CAPS);
+
+ gst_element_class_set_static_metadata (element_class,
+ "Video (video4linux2) Source", "Source/Video",
+ "Reads frames from a Video4Linux2 device",
+ "Edgard Lima <edgard.lima@gmail.com>, "
+ "Stefan Kost <ensonic@users.sf.net>");
+
+ gst_element_class_add_pad_template
+ (element_class,
+ gst_pad_template_new ("src", GST_PAD_SRC, GST_PAD_ALWAYS,
+ gst_v4l2_object_get_all_caps ()));
+
+ basesrc_class->get_caps = GST_DEBUG_FUNCPTR (gst_v4l2src_get_caps);
+ basesrc_class->start = GST_DEBUG_FUNCPTR (gst_v4l2src_start);
+ basesrc_class->unlock = GST_DEBUG_FUNCPTR (gst_v4l2src_unlock);
+ basesrc_class->unlock_stop = GST_DEBUG_FUNCPTR (gst_v4l2src_unlock_stop);
+ basesrc_class->stop = GST_DEBUG_FUNCPTR (gst_v4l2src_stop);
+ basesrc_class->query = GST_DEBUG_FUNCPTR (gst_v4l2src_query);
+ basesrc_class->negotiate = GST_DEBUG_FUNCPTR (gst_v4l2src_negotiate);
+ basesrc_class->decide_allocation =
+ GST_DEBUG_FUNCPTR (gst_v4l2src_decide_allocation);
+
+ pushsrc_class->create = GST_DEBUG_FUNCPTR (gst_v4l2src_create);
+
+ klass->v4l2_class_devices = NULL;
+
+ GST_DEBUG_CATEGORY_INIT (v4l2src_debug, "v4l2src", 0, "V4L2 source element");
+ }
+
+ static void
+ gst_v4l2src_init (GstV4l2Src * v4l2src)
+ {
+ /* fixme: give an update_fps_function */
+ v4l2src->v4l2object = gst_v4l2_object_new (GST_ELEMENT (v4l2src),
+ GST_OBJECT (GST_BASE_SRC_PAD (v4l2src)), V4L2_BUF_TYPE_VIDEO_CAPTURE,
+ DEFAULT_PROP_DEVICE, gst_v4l2_get_input, gst_v4l2_set_input, NULL);
+
+ /* Avoid the slow probes */
+ v4l2src->v4l2object->skip_try_fmt_probes = TRUE;
++#ifdef TIZEN_FEATURE_V4L2SRC_AUTO_SCAN_DEVICE_NODE
++ v4l2src->v4l2object->auto_scan_device = TRUE;
++#endif /* TIZEN_FEATURE_V4L2SRC_AUTO_SCAN_DEVICE_NODE */
+
+ gst_base_src_set_format (GST_BASE_SRC (v4l2src), GST_FORMAT_TIME);
+ gst_base_src_set_live (GST_BASE_SRC (v4l2src), TRUE);
+ }
+
+
+ static void
+ gst_v4l2src_finalize (GstV4l2Src * v4l2src)
+ {
+ gst_v4l2_object_destroy (v4l2src->v4l2object);
+
+ G_OBJECT_CLASS (parent_class)->finalize ((GObject *) (v4l2src));
+ }
+
+
+ static void
+ gst_v4l2src_set_property (GObject * object,
+ guint prop_id, const GValue * value, GParamSpec * pspec)
+ {
+ GstV4l2Src *v4l2src = GST_V4L2SRC (object);
+
+ if (!gst_v4l2_object_set_property_helper (v4l2src->v4l2object,
+ prop_id, value, pspec)) {
+ switch (prop_id) {
++#ifdef TIZEN_FEATURE_V4L2SRC_SUPPORT_CAMERA_ID
++ case PROP_CAMERA_ID:
++ g_free (v4l2src->v4l2object->videodev);
++
++ v4l2src->camera_id = g_value_get_uint (value);
++ v4l2src->v4l2object->videodev = g_strdup_printf ("/dev/video%u", v4l2src->camera_id);
++
++ GST_INFO_OBJECT (v4l2src, "videodev [%s]", v4l2src->v4l2object->videodev);
++ break;
++#endif /* TIZEN_FEATURE_V4L2SRC_SUPPORT_CAMERA_ID */
++#ifdef TIZEN_FEATURE_V4L2SRC_AUTO_SCAN_DEVICE_NODE
++ case PROP_AUTO_SCAN_DEVICE:
++ v4l2src->v4l2object->auto_scan_device = g_value_get_boolean (value);
++ GST_INFO_OBJECT (v4l2src, "auto scan device [%d]", v4l2src->v4l2object->auto_scan_device);
++ break;
++#endif /* TIZEN_FEATURE_V4L2SRC_AUTO_SCAN_DEVICE_NODE */
++#ifdef TIZEN_FEATURE_V4L2_TBM_SUPPORT
++ case PROP_TBM_OUTPUT:
++ v4l2src->v4l2object->tbm_output = g_value_get_boolean (value);
++ GST_INFO_OBJECT (v4l2src, "tbm output [%d]", v4l2src->v4l2object->tbm_output);
++ break;
++#endif /* TIZEN_FEATURE_V4L2_TBM_SUPPORT */
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+ }
+ }
+
+ static void
+ gst_v4l2src_get_property (GObject * object,
+ guint prop_id, GValue * value, GParamSpec * pspec)
+ {
+ GstV4l2Src *v4l2src = GST_V4L2SRC (object);
+
+ if (!gst_v4l2_object_get_property_helper (v4l2src->v4l2object,
+ prop_id, value, pspec)) {
+ switch (prop_id) {
++#ifdef TIZEN_FEATURE_V4L2SRC_SUPPORT_CAMERA_ID
++ case PROP_CAMERA_ID:
++ g_value_set_uint (value, v4l2src->camera_id);
++ break;
++#endif /* TIZEN_FEATURE_V4L2SRC_SUPPORT_CAMERA_ID */
++#ifdef TIZEN_FEATURE_V4L2SRC_AUTO_SCAN_DEVICE_NODE
++ case PROP_AUTO_SCAN_DEVICE:
++ GST_INFO_OBJECT (v4l2src, "auto scan device [%d]", v4l2src->v4l2object->auto_scan_device);
++ g_value_set_boolean (value, v4l2src->v4l2object->auto_scan_device);
++ break;
++#endif /* TIZEN_FEATURE_V4L2SRC_AUTO_SCAN_DEVICE_NODE */
++#ifdef TIZEN_FEATURE_V4L2_TBM_SUPPORT
++ case PROP_TBM_OUTPUT:
++ GST_INFO_OBJECT (v4l2src, "tbm output [%d]", v4l2src->v4l2object->tbm_output);
++ g_value_set_boolean (value, v4l2src->v4l2object->tbm_output);
++ break;
++#endif /* TIZEN_FEATURE_V4L2_TBM_SUPPORT */
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+ }
+ }
+
+ static gboolean
+ gst_vl42_src_fixate_fields (GQuark field_id, GValue * value, gpointer user_data)
+ {
+ GstStructure *s = user_data;
+
+ if (field_id == g_quark_from_string ("interlace-mode"))
+ return TRUE;
+
+ if (field_id == g_quark_from_string ("colorimetry"))
+ return TRUE;
+
+ gst_structure_fixate_field (s, g_quark_to_string (field_id));
+
+ return TRUE;
+ }
+
+ static void
+ gst_v4l2_src_fixate_struct_with_preference (GstStructure * s,
+ struct PreferredCapsInfo *pref)
+ {
+ if (gst_structure_has_field (s, "width"))
+ gst_structure_fixate_field_nearest_int (s, "width", pref->width);
+
+ if (gst_structure_has_field (s, "height"))
+ gst_structure_fixate_field_nearest_int (s, "height", pref->height);
+
+ if (gst_structure_has_field (s, "framerate"))
+ gst_structure_fixate_field_nearest_fraction (s, "framerate", pref->fps_n,
+ pref->fps_d);
+
+ /* Finally, fixate everything else except the interlace-mode and colorimetry
+ * which still need further negotiation as it wasn't probed */
+ gst_structure_map_in_place (s, gst_vl42_src_fixate_fields, s);
+ }
+
+ static void
+ gst_v4l2_src_parse_fixed_struct (GstStructure * s,
+ gint * width, gint * height, gint * fps_n, gint * fps_d)
+ {
+ if (gst_structure_has_field (s, "width") && width)
+ gst_structure_get_int (s, "width", width);
+
+ if (gst_structure_has_field (s, "height") && height)
+ gst_structure_get_int (s, "height", height);
+
+ if (gst_structure_has_field (s, "framerate") && fps_n && fps_d)
+ gst_structure_get_fraction (s, "framerate", fps_n, fps_d);
+ }
+
+ /* TODO Consider framerate */
+ static gint
+ gst_v4l2src_fixed_caps_compare (GstCaps * caps_a, GstCaps * caps_b,
+ struct PreferredCapsInfo *pref)
+ {
+ GstStructure *a, *b;
+ gint aw = G_MAXINT, ah = G_MAXINT, ad = G_MAXINT;
+ gint bw = G_MAXINT, bh = G_MAXINT, bd = G_MAXINT;
+ gint ret;
+
+ a = gst_caps_get_structure (caps_a, 0);
+ b = gst_caps_get_structure (caps_b, 0);
+
+ gst_v4l2_src_parse_fixed_struct (a, &aw, &ah, NULL, NULL);
+ gst_v4l2_src_parse_fixed_struct (b, &bw, &bh, NULL, NULL);
+
+ /* When both are smaller then pref, just append to the end */
+ if ((bw < pref->width || bh < pref->height)
+ && (aw < pref->width || ah < pref->height)) {
+ ret = 1;
+ goto done;
+ }
+
+ /* If a is smaller then pref and not b, then a goes after b */
+ if (aw < pref->width || ah < pref->height) {
+ ret = 1;
+ goto done;
+ }
+
+ /* If b is smaller then pref and not a, then a goes before b */
+ if (bw < pref->width || bh < pref->height) {
+ ret = -1;
+ goto done;
+ }
+
+ /* Both are larger or equal to the preference, prefer the smallest */
+ ad = MAX (1, aw - pref->width) * MAX (1, ah - pref->height);
+ bd = MAX (1, bw - pref->width) * MAX (1, bh - pref->height);
+
+ /* Adjust slightly in case width/height matched the preference */
+ if (aw == pref->width)
+ ad -= 1;
+
+ if (ah == pref->height)
+ ad -= 1;
+
+ if (bw == pref->width)
+ bd -= 1;
+
+ if (bh == pref->height)
+ bd -= 1;
+
+ /* If the choices are equivalent, maintain the order */
+ if (ad == bd)
+ ret = 1;
+ else
+ ret = ad - bd;
+
+ done:
+ GST_TRACE ("Placing %ix%i (%s) %s %ix%i (%s)", aw, ah,
+ gst_structure_get_string (a, "format"), ret > 0 ? "after" : "before", bw,
+ bh, gst_structure_get_string (b, "format"));
+ return ret;
+ }
+
+ static gboolean
+ gst_v4l2src_set_format (GstV4l2Src * v4l2src, GstCaps * caps,
+ GstV4l2Error * error)
+ {
+ GstV4l2Object *obj;
+
+ obj = v4l2src->v4l2object;
+
+ /* make sure we stop capturing and dealloc buffers */
+ if (!gst_v4l2_object_stop (obj))
+ return FALSE;
+
+ g_signal_emit (v4l2src, gst_v4l2_signals[SIGNAL_PRE_SET_FORMAT], 0,
+ v4l2src->v4l2object->video_fd, caps);
+
+ return gst_v4l2_object_set_format (obj, caps, error);
+ }
+
+ static GstCaps *
+ gst_v4l2src_fixate (GstBaseSrc * basesrc, GstCaps * caps,
+ struct PreferredCapsInfo *pref)
+ {
+ GstV4l2Src *v4l2src = GST_V4L2SRC (basesrc);
+ GstV4l2Object *obj = v4l2src->v4l2object;
+ GList *caps_list = NULL;
+ GstStructure *s;
+ gint i = G_MAXINT;
+ GstV4l2Error error = GST_V4L2_ERROR_INIT;
+ GstCaps *fcaps = NULL;
+
+ GST_DEBUG_OBJECT (basesrc, "Fixating caps %" GST_PTR_FORMAT, caps);
+ GST_DEBUG_OBJECT (basesrc, "Preferred size %ix%i", pref->width, pref->height);
+
+ /* Sort the structures to get the caps that is nearest to our preferences,
+ * first. Use single struct caps for sorting so we preserve the features. */
+ for (i = 0; i < gst_caps_get_size (caps); i++) {
+ GstCaps *tmp = gst_caps_copy_nth (caps, i);
+
+ s = gst_caps_get_structure (tmp, 0);
+ gst_v4l2_src_fixate_struct_with_preference (s, pref);
+
+ caps_list = g_list_insert_sorted_with_data (caps_list, tmp,
+ (GCompareDataFunc) gst_v4l2src_fixed_caps_compare, pref);
+ }
+
+ gst_caps_unref (caps);
+ caps = gst_caps_new_empty ();
+
+ while (caps_list) {
+ GstCaps *tmp = caps_list->data;
+ caps_list = g_list_delete_link (caps_list, caps_list);
+ gst_caps_append (caps, tmp);
+ }
+
+ GST_DEBUG_OBJECT (basesrc, "sorted and normalized caps %" GST_PTR_FORMAT,
+ caps);
+
+ /* Each structure in the caps has been fixated, except for the
+ * interlace-mode and colorimetry. Now normalize the caps so we can
+ * enumerate the possibilities */
+ caps = gst_caps_normalize (caps);
+
+ for (i = 0; i < gst_caps_get_size (caps); ++i) {
+ gst_v4l2_clear_error (&error);
+ if (fcaps)
+ gst_caps_unref (fcaps);
+
+ fcaps = gst_caps_copy_nth (caps, i);
+
+ /* try hard to avoid TRY_FMT since some UVC camera just crash when this
+ * is called at run-time. */
+ if (gst_v4l2_object_caps_is_subset (obj, fcaps)) {
+ gst_caps_unref (fcaps);
+ fcaps = gst_v4l2_object_get_current_caps (obj);
+ break;
+ }
+
+ /* Just check if the format is acceptable, once we know
+ * no buffers should be outstanding we try S_FMT.
+ *
+ * Basesrc will do an allocation query that
+ * should indirectly reclaim buffers, after that we can
+ * set the format and then configure our pool */
+ if (gst_v4l2_object_try_format (obj, fcaps, &error)) {
+ /* make sure the caps changed before doing anything */
+ if (gst_v4l2_object_caps_equal (obj, fcaps))
+ break;
+
+ v4l2src->renegotiation_adjust = v4l2src->offset + 1;
+ v4l2src->pending_set_fmt = TRUE;
+ break;
+ }
+
+ /* Only EIVAL make sense, report any other errors, this way we don't keep
+ * probing if the device got disconnected, or if it's firmware stopped
+ * responding */
+ if (error.error->code != GST_RESOURCE_ERROR_SETTINGS) {
+ i = G_MAXINT;
+ break;
+ }
+ }
+
+ if (i >= gst_caps_get_size (caps)) {
+ gst_v4l2_error (v4l2src, &error);
+ if (fcaps)
+ gst_caps_unref (fcaps);
+ gst_caps_unref (caps);
+ return NULL;
+ }
+
+ gst_caps_unref (caps);
+
+ GST_DEBUG_OBJECT (basesrc, "fixated caps %" GST_PTR_FORMAT, fcaps);
+
+ return fcaps;
+ }
+
+ static gboolean
+ gst_v4l2src_query_preferred_dv_timings (GstV4l2Src * v4l2src,
+ struct PreferredCapsInfo *pref)
+ {
+ GstV4l2Object *obj = v4l2src->v4l2object;
+ struct v4l2_dv_timings dv_timings = { 0, };
+ const struct v4l2_bt_timings *bt = &dv_timings.bt;
+ gint tot_width, tot_height;
+ gint gcd;
+
+ if (!gst_v4l2_query_dv_timings (obj, &dv_timings))
+ return FALSE;
+
+ pref->width = bt->width;
+ pref->height = bt->height;
+
+ tot_height = bt->height +
+ bt->vfrontporch + bt->vsync + bt->vbackporch +
+ bt->il_vfrontporch + bt->il_vsync + bt->il_vbackporch;
+ tot_width = bt->width + bt->hfrontporch + bt->hsync + bt->hbackporch;
+
+ pref->fps_n = bt->pixelclock;
+ pref->fps_d = tot_width * tot_height;
+
+ if (bt->interlaced)
+ pref->fps_d /= 2;
+
+ gcd = gst_util_greatest_common_divisor (pref->fps_n, pref->fps_d);
+ pref->fps_n /= gcd;
+ pref->fps_d /= gcd;
+
+ /* If are are not streaming (e.g. we received source-change event), lock the
+ * new timing immediatly so that TRY_FMT can properly work */
+ if (!obj->pool || !GST_V4L2_BUFFER_POOL_IS_STREAMING (obj->pool)) {
+ gst_v4l2_set_dv_timings (obj, &dv_timings);
+ /* Setting a new DV timings invalidates the probed caps. */
+ gst_caps_replace (&obj->probed_caps, NULL);
+ }
+
+ GST_INFO_OBJECT (v4l2src, "Using DV Timings: %i x %i (%i/%i fps)",
+ pref->width, pref->height, pref->fps_n, pref->fps_d);
+
+ return TRUE;
+ }
+
+ static gboolean
+ gst_v4l2src_query_preferred_size (GstV4l2Src * v4l2src,
+ struct PreferredCapsInfo *pref)
+ {
+ struct v4l2_input in = { 0, };
+
+ if (!gst_v4l2_get_input (v4l2src->v4l2object, &in.index))
+ return FALSE;
+
+ if (!gst_v4l2_query_input (v4l2src->v4l2object, &in))
+ return FALSE;
+
+ GST_INFO_OBJECT (v4l2src, "Detect input %u as `%s`", in.index, in.name);
+
+ /* Notify signal status using WARNING/INFO messages */
+ if (in.status & (V4L2_IN_ST_NO_POWER | V4L2_IN_ST_NO_SIGNAL)) {
+ if (!v4l2src->no_signal)
+ /* note: taken from decklinksrc element */
+ GST_ELEMENT_WARNING (v4l2src, RESOURCE, READ, ("Signal lost"),
+ ("No input source was detected - video frames invalid"));
+ v4l2src->no_signal = TRUE;
+ } else if (v4l2src->no_signal) {
+ if (v4l2src->no_signal)
+ GST_ELEMENT_INFO (v4l2src, RESOURCE, READ,
+ ("Signal recovered"), ("Input source detected"));
+ v4l2src->no_signal = FALSE;
+ }
+
+ if (in.capabilities & V4L2_IN_CAP_NATIVE_SIZE) {
+ GST_FIXME_OBJECT (v4l2src, "missing support for native video size");
+ return FALSE;
+ } else if (in.capabilities & V4L2_IN_CAP_DV_TIMINGS) {
+ return gst_v4l2src_query_preferred_dv_timings (v4l2src, pref);
+ } else if (in.capabilities & V4L2_IN_CAP_STD) {
+ GST_FIXME_OBJECT (v4l2src, "missing support for video standards");
+ return FALSE;
+ }
+
+ return FALSE;
+ }
+
+ static gboolean
+ gst_v4l2src_negotiate (GstBaseSrc * basesrc)
+ {
+ GstV4l2Src *v4l2src = GST_V4L2SRC (basesrc);
+ GstCaps *thiscaps;
+ GstCaps *caps = NULL;
+ GstCaps *peercaps = NULL;
+ gboolean result = FALSE;
+ /* Let's prefer a good resolution as of today's standard. */
+ struct PreferredCapsInfo pref = {
+ 3840, 2160, 120, 1
+ };
+ gboolean have_pref;
+
+ /* For drivers that has DV timings or other default size query
+ * capabilities, we will prefer that resolution. This must happen before we
+ * probe the caps, as locking DV Timings or standards will change result of
+ * the caps enumeration. */
+ have_pref = gst_v4l2src_query_preferred_size (v4l2src, &pref);
+
+ /* first see what is possible on our source pad */
+ thiscaps = gst_pad_query_caps (GST_BASE_SRC_PAD (basesrc), NULL);
+ GST_DEBUG_OBJECT (basesrc, "caps of src: %" GST_PTR_FORMAT, thiscaps);
+
+ /* nothing or anything is allowed, we're done */
+ if (thiscaps == NULL || gst_caps_is_any (thiscaps))
+ goto no_nego_needed;
+
+ /* get the peer caps without a filter as we'll filter ourselves later on */
+ peercaps = gst_pad_peer_query_caps (GST_BASE_SRC_PAD (basesrc), NULL);
+ GST_DEBUG_OBJECT (basesrc, "caps of peer: %" GST_PTR_FORMAT, peercaps);
+ if (peercaps && !gst_caps_is_any (peercaps)) {
+ /* Prefer the first caps we are compatible with that the peer proposed */
+ caps = gst_caps_intersect_full (peercaps, thiscaps,
+ GST_CAPS_INTERSECT_FIRST);
+
+ GST_DEBUG_OBJECT (basesrc, "intersect: %" GST_PTR_FORMAT, caps);
+
+ gst_caps_unref (thiscaps);
+ } else {
+ /* no peer or peer have ANY caps, work with our own caps then */
+ caps = thiscaps;
+ }
+
+ if (caps) {
+ /* now fixate */
+ if (!gst_caps_is_empty (caps)) {
+
+ /* otherwise consider the first structure from peercaps to be a
+ * preference. This is useful for matching a reported native display,
+ * or simply to avoid transformation to happen downstream. */
+ if (!have_pref && peercaps && !gst_caps_is_any (peercaps)) {
+ GstStructure *pref_s = gst_caps_get_structure (peercaps, 0);
+ pref_s = gst_structure_copy (pref_s);
+ gst_v4l2_src_fixate_struct_with_preference (pref_s, &pref);
+ gst_v4l2_src_parse_fixed_struct (pref_s, &pref.width, &pref.height,
+ &pref.fps_n, &pref.fps_d);
+ gst_structure_free (pref_s);
+ }
+
+ caps = gst_v4l2src_fixate (basesrc, caps, &pref);
+
+ /* Fixating may fail as we now set the selected format */
+ if (!caps) {
+ result = FALSE;
+ goto done;
+ }
+
+ GST_INFO_OBJECT (basesrc, "fixated to: %" GST_PTR_FORMAT, caps);
+
+ if (gst_caps_is_any (caps)) {
+ /* hmm, still anything, so element can do anything and
+ * nego is not needed */
+ result = TRUE;
+ } else if (gst_caps_is_fixed (caps)) {
+ /* yay, fixed caps, use those then */
+ result = gst_base_src_set_caps (basesrc, caps);
+ }
+ }
+ gst_caps_unref (caps);
+ }
+
+ done:
+ if (peercaps)
+ gst_caps_unref (peercaps);
+
+ return result;
+
+ no_nego_needed:
+ {
+ GST_INFO_OBJECT (basesrc, "no negotiation needed");
+ if (thiscaps)
+ gst_caps_unref (thiscaps);
+ return TRUE;
+ }
+ }
+
+ static GstCaps *
+ gst_v4l2src_get_caps (GstBaseSrc * src, GstCaps * filter)
+ {
+ GstV4l2Src *v4l2src;
+ GstV4l2Object *obj;
+
+ v4l2src = GST_V4L2SRC (src);
+ obj = v4l2src->v4l2object;
+
+ if (!GST_V4L2_IS_OPEN (obj)) {
+ return gst_pad_get_pad_template_caps (GST_BASE_SRC_PAD (v4l2src));
+ }
+
+ return gst_v4l2_object_get_caps (obj, filter);
+ }
+
+ static gboolean
+ gst_v4l2src_decide_allocation (GstBaseSrc * bsrc, GstQuery * query)
+ {
+ GstV4l2Src *src = GST_V4L2SRC (bsrc);
+ gboolean ret = TRUE;
+
+ if (src->pending_set_fmt) {
+ GstCaps *caps = gst_pad_get_current_caps (GST_BASE_SRC_PAD (bsrc));
+ GstV4l2Error error = GST_V4L2_ERROR_INIT;
+
+ caps = gst_caps_make_writable (caps);
+
+ ret = gst_v4l2src_set_format (src, caps, &error);
+ if (ret) {
+ GstV4l2BufferPool *pool = GST_V4L2_BUFFER_POOL (src->v4l2object->pool);
+ gst_v4l2_buffer_pool_enable_resolution_change (pool);
+ } else {
+ gst_v4l2_error (src, &error);
+ }
+
+ gst_caps_unref (caps);
+ src->pending_set_fmt = FALSE;
+ } else if (gst_buffer_pool_is_active (src->v4l2object->pool)) {
+ /* Trick basesrc into not deactivating the active pool. Renegotiating here
+ * would otherwise turn off and on the camera. */
+ GstAllocator *allocator;
+ GstAllocationParams params;
+ GstBufferPool *pool;
+
+ gst_base_src_get_allocator (bsrc, &allocator, ¶ms);
+ pool = gst_base_src_get_buffer_pool (bsrc);
+
+ if (gst_query_get_n_allocation_params (query))
+ gst_query_set_nth_allocation_param (query, 0, allocator, ¶ms);
+ else
+ gst_query_add_allocation_param (query, allocator, ¶ms);
+
+ if (gst_query_get_n_allocation_pools (query))
+ gst_query_set_nth_allocation_pool (query, 0, pool,
+ src->v4l2object->info.size, 1, 0);
+ else
+ gst_query_add_allocation_pool (query, pool, src->v4l2object->info.size, 1,
+ 0);
+
+ if (pool)
+ gst_object_unref (pool);
+ if (allocator)
+ gst_object_unref (allocator);
+
+ return GST_BASE_SRC_CLASS (parent_class)->decide_allocation (bsrc, query);
+ }
+
+ if (ret) {
+ ret = gst_v4l2_object_decide_allocation (src->v4l2object, query);
+ if (ret)
+ ret = GST_BASE_SRC_CLASS (parent_class)->decide_allocation (bsrc, query);
+ }
+
+ if (ret) {
+ if (!gst_buffer_pool_set_active (src->v4l2object->pool, TRUE))
+ goto activate_failed;
+ }
+
+ return ret;
+
+ activate_failed:
+ {
+ GST_ELEMENT_ERROR (src, RESOURCE, SETTINGS,
+ (_("Failed to allocate required memory.")),
+ ("Buffer pool activation failed"));
+ return FALSE;
+ }
+ }
+
+ static gboolean
+ gst_v4l2src_query (GstBaseSrc * bsrc, GstQuery * query)
+ {
+ GstV4l2Src *src;
+ GstV4l2Object *obj;
+ gboolean res = FALSE;
+
+ src = GST_V4L2SRC (bsrc);
+ obj = src->v4l2object;
+
+ switch (GST_QUERY_TYPE (query)) {
+ case GST_QUERY_LATENCY:{
+ GstClockTime min_latency, max_latency;
+ guint32 fps_n, fps_d;
+ guint num_buffers = 0;
+
+ /* device must be open */
+ if (!GST_V4L2_IS_OPEN (obj)) {
+ GST_WARNING_OBJECT (src,
+ "Can't give latency since device isn't open !");
+ goto done;
+ }
+
+ fps_n = GST_V4L2_FPS_N (obj);
+ fps_d = GST_V4L2_FPS_D (obj);
+
+ /* we must have a framerate */
+ if (fps_n <= 0 || fps_d <= 0) {
+ GST_WARNING_OBJECT (src,
+ "Can't give latency since framerate isn't fixated !");
+ goto done;
+ }
+
+ /* min latency is the time to capture one frame/field */
+ min_latency = gst_util_uint64_scale_int (GST_SECOND, fps_d, fps_n);
+ if (GST_VIDEO_INFO_INTERLACE_MODE (&obj->info) ==
+ GST_VIDEO_INTERLACE_MODE_ALTERNATE)
+ min_latency /= 2;
+
+ /* max latency is total duration of the frame buffer */
+ if (obj->pool != NULL)
+ num_buffers = GST_V4L2_BUFFER_POOL_CAST (obj->pool)->max_latency;
+
+ if (num_buffers == 0)
+ max_latency = -1;
+ else
+ max_latency = num_buffers * min_latency;
+
+ GST_DEBUG_OBJECT (bsrc,
+ "report latency min %" GST_TIME_FORMAT " max %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (min_latency), GST_TIME_ARGS (max_latency));
+
+ /* we are always live, the min latency is 1 frame and the max latency is
+ * the complete buffer of frames. */
+ gst_query_set_latency (query, TRUE, min_latency, max_latency);
+
+ res = TRUE;
+ break;
+ }
+ default:
+ res = GST_BASE_SRC_CLASS (parent_class)->query (bsrc, query);
+ break;
+ }
+
+ done:
+
+ return res;
+ }
+
+ /* start and stop are not symmetric -- start will open the device, but not start
+ * capture. it's setcaps that will start capture, which is called via basesrc's
+ * negotiate method. stop will both stop capture and close the device.
+ */
+ static gboolean
+ gst_v4l2src_start (GstBaseSrc * src)
+ {
+ GstV4l2Src *v4l2src = GST_V4L2SRC (src);
+
+ v4l2src->offset = 0;
+ v4l2src->next_offset_same = FALSE;
+ v4l2src->renegotiation_adjust = 0;
+
+ /* activate settings for first frame */
+ v4l2src->ctrl_time = 0;
+ gst_object_sync_values (GST_OBJECT (src), v4l2src->ctrl_time);
+
+ v4l2src->has_bad_timestamp = FALSE;
+ v4l2src->last_timestamp = 0;
+
+ return TRUE;
+ }
+
+ static gboolean
+ gst_v4l2src_unlock (GstBaseSrc * src)
+ {
+ GstV4l2Src *v4l2src = GST_V4L2SRC (src);
+ return gst_v4l2_object_unlock (v4l2src->v4l2object);
+ }
+
+ static gboolean
+ gst_v4l2src_unlock_stop (GstBaseSrc * src)
+ {
+ GstV4l2Src *v4l2src = GST_V4L2SRC (src);
+
+ v4l2src->last_timestamp = 0;
+
+ return gst_v4l2_object_unlock_stop (v4l2src->v4l2object);
+ }
+
+ static gboolean
+ gst_v4l2src_stop (GstBaseSrc * src)
+ {
+ GstV4l2Src *v4l2src = GST_V4L2SRC (src);
+ GstV4l2Object *obj = v4l2src->v4l2object;
+
+ if (GST_V4L2_IS_ACTIVE (obj)) {
+ if (!gst_v4l2_object_stop (obj))
+ return FALSE;
+ }
+
+ v4l2src->pending_set_fmt = FALSE;
+
+ return TRUE;
+ }
+
+ static GstStateChangeReturn
+ gst_v4l2src_change_state (GstElement * element, GstStateChange transition)
+ {
+ GstStateChangeReturn ret = GST_STATE_CHANGE_SUCCESS;
+ GstV4l2Src *v4l2src = GST_V4L2SRC (element);
+ GstV4l2Object *obj = v4l2src->v4l2object;
+ GstV4l2Error error = GST_V4L2_ERROR_INIT;
+
+ switch (transition) {
+ case GST_STATE_CHANGE_NULL_TO_READY:
+ /* open the device */
+ if (!gst_v4l2_object_open (obj, &error)) {
+ gst_v4l2_error (v4l2src, &error);
+ return GST_STATE_CHANGE_FAILURE;
+ }
+ break;
+ default:
+ break;
+ }
+
+ ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_READY_TO_NULL:
+ /* close the device */
+ if (!gst_v4l2_object_close (obj))
+ return GST_STATE_CHANGE_FAILURE;
+
+ break;
+ default:
+ break;
+ }
+
+ return ret;
+ }
+
+ static GstFlowReturn
+ gst_v4l2src_create (GstPushSrc * src, GstBuffer ** buf)
+ {
+ GstV4l2Src *v4l2src = GST_V4L2SRC (src);
+ GstV4l2Object *obj = v4l2src->v4l2object;
+ GstFlowReturn ret;
+ GstClock *clock;
+ GstClockTime abs_time, base_time, timestamp, duration;
+ GstClockTime delay;
+ GstMessage *qos_msg;
+ gboolean half_frame;
+
+ do {
+ GstV4l2BufferPool *pool = GST_V4L2_BUFFER_POOL_CAST (obj->pool);
+
+ ret = GST_BASE_SRC_CLASS (parent_class)->alloc (GST_BASE_SRC (src), 0,
+ obj->info.size, buf);
+
+ if (G_UNLIKELY (ret != GST_FLOW_OK)) {
+ if (ret == GST_V4L2_FLOW_RESOLUTION_CHANGE) {
+ GST_INFO_OBJECT (v4l2src, "Resolution change detected.");
+
+ /* It is required to always cycle through streamoff, we also need to
+ * streamoff in order to allow locking a new DV_TIMING which will
+ * influence the output of TRY_FMT */
+ gst_v4l2src_stop (GST_BASE_SRC (src));
+
+ /* Force renegotiation */
+ v4l2src->renegotiation_adjust = v4l2src->offset + 1;
+ v4l2src->pending_set_fmt = TRUE;
+
+ if (!gst_base_src_negotiate (GST_BASE_SRC (src))) {
+ ret = GST_FLOW_NOT_NEGOTIATED;
+ goto error;
+ }
+
+ continue;
+ }
+ goto alloc_failed;
+ }
+
+ ret = gst_v4l2_buffer_pool_process (pool, buf, NULL);
+
+ } while (ret == GST_V4L2_FLOW_CORRUPTED_BUFFER ||
+ ret == GST_V4L2_FLOW_RESOLUTION_CHANGE);
+
+ if (G_UNLIKELY (ret != GST_FLOW_OK))
+ goto error;
+
+ timestamp = GST_BUFFER_TIMESTAMP (*buf);
+ duration = obj->duration;
+
+ /* timestamps, LOCK to get clock and base time. */
+ /* FIXME: element clock and base_time is rarely changing */
+ GST_OBJECT_LOCK (v4l2src);
+ if ((clock = GST_ELEMENT_CLOCK (v4l2src))) {
+ /* we have a clock, get base time and ref clock */
+ base_time = GST_ELEMENT (v4l2src)->base_time;
+ gst_object_ref (clock);
+ } else {
+ /* no clock, can't set timestamps */
+ base_time = GST_CLOCK_TIME_NONE;
+ }
+ GST_OBJECT_UNLOCK (v4l2src);
+
+ /* sample pipeline clock */
+ if (clock) {
+ abs_time = gst_clock_get_time (clock);
+ gst_object_unref (clock);
+ } else {
+ abs_time = GST_CLOCK_TIME_NONE;
+ }
+
+ retry:
+ if (!v4l2src->has_bad_timestamp && timestamp != GST_CLOCK_TIME_NONE) {
+ struct timespec now;
+ GstClockTime gstnow;
+
+ /* v4l2 specs say to use the system time although many drivers switched to
+ * the more desirable monotonic time. We first try to use the monotonic time
+ * and see how that goes */
+ clock_gettime (CLOCK_MONOTONIC, &now);
+ gstnow = GST_TIMESPEC_TO_TIME (now);
+
+ if (timestamp > gstnow || (gstnow - timestamp) > (10 * GST_SECOND)) {
+ /* very large diff, fall back to system time */
+ gstnow = g_get_real_time () * GST_USECOND;
+ }
+
+ /* Detect buggy drivers here, and stop using their timestamp. Failing any
+ * of these condition would imply a very buggy driver:
+ * - Timestamp in the future
+ * - Timestamp is going backward compare to last seen timestamp
+ * - Timestamp is jumping forward for less then a frame duration
+ * - Delay is bigger then the actual timestamp
+ * */
+ if (timestamp > gstnow) {
+ GST_WARNING_OBJECT (v4l2src,
+ "Timestamp in the future detected, ignoring driver timestamps");
+ v4l2src->has_bad_timestamp = TRUE;
+ goto retry;
+ }
+
+ if (v4l2src->last_timestamp > timestamp) {
+ GST_WARNING_OBJECT (v4l2src,
+ "Timestamp going backward, ignoring driver timestamps");
+ v4l2src->has_bad_timestamp = TRUE;
+ goto retry;
+ }
+
+ delay = gstnow - timestamp;
+
+ if (delay > timestamp) {
+ GST_WARNING_OBJECT (v4l2src,
+ "Timestamp does not correlate with any clock, ignoring driver timestamps");
+ v4l2src->has_bad_timestamp = TRUE;
+ goto retry;
+ }
+
+ /* Save last timestamp for sanity checks */
+ v4l2src->last_timestamp = timestamp;
+
+ GST_DEBUG_OBJECT (v4l2src, "ts: %" GST_TIME_FORMAT " now %" GST_TIME_FORMAT
+ " delay %" GST_TIME_FORMAT, GST_TIME_ARGS (timestamp),
+ GST_TIME_ARGS (gstnow), GST_TIME_ARGS (delay));
+ } else {
+ /* we assume 1 frame/field latency otherwise */
+ if (GST_CLOCK_TIME_IS_VALID (duration))
+ delay = duration;
+ else
+ delay = 0;
+ }
+
+ /* set buffer metadata */
+
+ if (G_LIKELY (abs_time != GST_CLOCK_TIME_NONE)) {
+ /* the time now is the time of the clock minus the base time */
+ timestamp = abs_time - base_time;
+
+ /* adjust for delay in the device */
+ if (timestamp > delay)
+ timestamp -= delay;
+ else
+ timestamp = 0;
+ } else {
+ timestamp = GST_CLOCK_TIME_NONE;
+ }
+
+ /* activate settings for next frame */
+ if (GST_CLOCK_TIME_IS_VALID (duration)) {
+ v4l2src->ctrl_time += duration;
+ } else {
+ /* this is not very good (as it should be the next timestamp),
+ * still good enough for linear fades (as long as it is not -1)
+ */
+ v4l2src->ctrl_time = timestamp;
+ }
+ gst_object_sync_values (GST_OBJECT (src), v4l2src->ctrl_time);
+
+ GST_LOG_OBJECT (src, "sync to %" GST_TIME_FORMAT " out ts %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (v4l2src->ctrl_time), GST_TIME_ARGS (timestamp));
+
+ if (v4l2src->next_offset_same &&
+ GST_BUFFER_OFFSET_IS_VALID (*buf) &&
+ GST_BUFFER_OFFSET (*buf) != v4l2src->offset) {
+ /* Probably had a lost field then, best to forget about last field. */
+ GST_WARNING_OBJECT (v4l2src,
+ "lost field detected - ts: %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (timestamp));
+ v4l2src->next_offset_same = FALSE;
+ }
+
+ half_frame = (GST_BUFFER_FLAG_IS_SET (*buf, GST_VIDEO_BUFFER_FLAG_ONEFIELD));
+ if (half_frame)
+ v4l2src->next_offset_same = !v4l2src->next_offset_same;
+
+ /* use generated offset values only if there are not already valid ones
+ * set by the v4l2 device */
+ if (!GST_BUFFER_OFFSET_IS_VALID (*buf)
+ || !GST_BUFFER_OFFSET_END_IS_VALID (*buf)
+ || GST_BUFFER_OFFSET (*buf) <=
+ (v4l2src->offset - v4l2src->renegotiation_adjust)) {
+ GST_BUFFER_OFFSET (*buf) = v4l2src->offset;
+ GST_BUFFER_OFFSET_END (*buf) = v4l2src->offset + 1;
+ if (!half_frame || !v4l2src->next_offset_same)
+ v4l2src->offset++;
+ } else {
+ /* adjust raw v4l2 device sequence, will restart at null in case of renegotiation
+ * (streamoff/streamon) */
+ GST_BUFFER_OFFSET (*buf) += v4l2src->renegotiation_adjust;
+ GST_BUFFER_OFFSET_END (*buf) += v4l2src->renegotiation_adjust;
+ /* check for frame loss with given (from v4l2 device) buffer offset */
+ if ((v4l2src->offset != 0)
+ && (!half_frame || v4l2src->next_offset_same)
+ && (GST_BUFFER_OFFSET (*buf) != (v4l2src->offset + 1))) {
+ guint64 lost_frame_count = GST_BUFFER_OFFSET (*buf) - v4l2src->offset - 1;
+ GST_WARNING_OBJECT (v4l2src,
+ "lost frames detected: count = %" G_GUINT64_FORMAT " - ts: %"
+ GST_TIME_FORMAT, lost_frame_count, GST_TIME_ARGS (timestamp));
+
+ qos_msg = gst_message_new_qos (GST_OBJECT_CAST (v4l2src), TRUE,
+ GST_CLOCK_TIME_NONE, GST_CLOCK_TIME_NONE, timestamp,
+ GST_CLOCK_TIME_IS_VALID (duration) ? lost_frame_count *
+ duration : GST_CLOCK_TIME_NONE);
+ gst_element_post_message (GST_ELEMENT_CAST (v4l2src), qos_msg);
+
+ }
+ v4l2src->offset = GST_BUFFER_OFFSET (*buf);
+ }
+
+ GST_BUFFER_TIMESTAMP (*buf) = timestamp;
+ GST_BUFFER_DURATION (*buf) = duration;
+
+ return ret;
+
+ /* ERROR */
+ alloc_failed:
+ {
+ if (ret != GST_FLOW_FLUSHING)
+ GST_ELEMENT_ERROR (src, RESOURCE, NO_SPACE_LEFT,
+ ("Failed to allocate a buffer"), (NULL));
+ return ret;
+ }
+ error:
+ {
+ gst_buffer_replace (buf, NULL);
+ if (ret == GST_V4L2_FLOW_LAST_BUFFER) {
+ GST_ELEMENT_ERROR (src, RESOURCE, FAILED,
+ ("Driver returned a buffer with no payload, this most likely "
+ "indicate a bug in the driver."), (NULL));
+ ret = GST_FLOW_ERROR;
+ } else {
+ GST_DEBUG_OBJECT (src, "error processing buffer %d (%s)", ret,
+ gst_flow_get_name (ret));
+ }
+ return ret;
+ }
+ }
+
+
+ /* GstURIHandler interface */
+ static GstURIType
+ gst_v4l2src_uri_get_type (GType type)
+ {
+ return GST_URI_SRC;
+ }
+
+ static const gchar *const *
+ gst_v4l2src_uri_get_protocols (GType type)
+ {
+ static const gchar *protocols[] = { "v4l2", NULL };
+
+ return protocols;
+ }
+
+ static gchar *
+ gst_v4l2src_uri_get_uri (GstURIHandler * handler)
+ {
+ GstV4l2Src *v4l2src = GST_V4L2SRC (handler);
+
+ if (v4l2src->v4l2object->videodev != NULL) {
+ return g_strdup_printf ("v4l2://%s", v4l2src->v4l2object->videodev);
+ }
+
+ return g_strdup ("v4l2://");
+ }
+
+ static gboolean
+ gst_v4l2src_uri_set_uri (GstURIHandler * handler, const gchar * uri,
+ GError ** error)
+ {
+ GstV4l2Src *v4l2src = GST_V4L2SRC (handler);
+ const gchar *device = DEFAULT_PROP_DEVICE;
+
+ if (strcmp (uri, "v4l2://") != 0) {
+ device = uri + 7;
+ }
+ g_object_set (v4l2src, "device", device, NULL);
+
+ return TRUE;
+ }
+
+
+ static void
+ gst_v4l2src_uri_handler_init (gpointer g_iface, gpointer iface_data)
+ {
+ GstURIHandlerInterface *iface = (GstURIHandlerInterface *) g_iface;
+
+ iface->get_type = gst_v4l2src_uri_get_type;
+ iface->get_protocols = gst_v4l2src_uri_get_protocols;
+ iface->get_uri = gst_v4l2src_uri_get_uri;
+ iface->set_uri = gst_v4l2src_uri_set_uri;
+ }
--- /dev/null
+ /* GStreamer
+ *
+ * Copyright (C) 2001-2002 Ronald Bultje <rbultje@ronald.bitfreak.net>
+ * 2006 Edgard Lima <edgard.lima@gmail.com>
+ *
+ * gstv4l2src.h: BT8x8/V4L2 source element
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+ #ifndef __GST_V4L2SRC_H__
+ #define __GST_V4L2SRC_H__
+
+ #include <gstv4l2object.h>
+ #include <gstv4l2bufferpool.h>
+
+ G_BEGIN_DECLS
+
+ #define GST_TYPE_V4L2SRC \
+ (gst_v4l2src_get_type())
+ #define GST_V4L2SRC(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_V4L2SRC,GstV4l2Src))
+ #define GST_V4L2SRC_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_V4L2SRC,GstV4l2SrcClass))
+ #define GST_IS_V4L2SRC(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_V4L2SRC))
+ #define GST_IS_V4L2SRC_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_V4L2SRC))
+
+ typedef struct _GstV4l2Src GstV4l2Src;
+ typedef struct _GstV4l2SrcClass GstV4l2SrcClass;
+
+ /**
+ * GstV4l2Src:
+ *
+ * Opaque object.
+ */
+ struct _GstV4l2Src
+ {
+ GstPushSrc pushsrc;
+
+ /*< private >*/
+ GstV4l2Object * v4l2object;
+
+ guint64 offset;
+ gboolean next_offset_same;
+
+ /* offset adjust after renegotiation */
+ guint64 renegotiation_adjust;
+
+ GstClockTime ctrl_time;
+
+ gboolean pending_set_fmt;
+
+ /* Timestamp sanity check */
+ GstClockTime last_timestamp;
+ gboolean has_bad_timestamp;
+
+ /* maintain signal status, updated during negotiation */
+ gboolean no_signal;
++
++#ifdef TIZEN_FEATURE_V4L2SRC_SUPPORT_CAMERA_ID
++ /* Properties */
++ guint camera_id;
++#endif /* TIZEN_FEATURE_V4L2SRC_SUPPORT_CAMERA_ID */
+ };
+
+ struct _GstV4l2SrcClass
+ {
+ GstPushSrcClass parent_class;
+
+ GList *v4l2_class_devices;
+ };
+
+ GType gst_v4l2src_get_type (void);
+
+ G_END_DECLS
+
+ #endif /* __GST_V4L2SRC_H__ */
--- /dev/null
+ /*
+ * Copyright (C) 2014 Collabora Ltd.
+ * Author: Nicolas Dufresne <nicolas.dufresne@collabora.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ *
+ */
+
+ #ifdef HAVE_CONFIG_H
+ #include "config.h"
+ #endif
+
+ #include <sys/stat.h>
+ #include <fcntl.h>
+ #include <errno.h>
+ #include <unistd.h>
+ #include <string.h>
+
+ #include "gstv4l2object.h"
+ #include "gstv4l2videodec.h"
+
+ #include "gstv4l2h264codec.h"
+ #include "gstv4l2h265codec.h"
+ #include "gstv4l2mpeg2codec.h"
+ #include "gstv4l2mpeg4codec.h"
+ #include "gstv4l2vp8codec.h"
+ #include "gstv4l2vp9codec.h"
+
+ #include <string.h>
+ #include <gst/gst-i18n-plugin.h>
+
+ GST_DEBUG_CATEGORY_STATIC (gst_v4l2_video_dec_debug);
+ #define GST_CAT_DEFAULT gst_v4l2_video_dec_debug
+
+ typedef struct
+ {
+ gchar *device;
+ GstCaps *sink_caps;
+ GstCaps *src_caps;
+ const gchar *longname;
+ const gchar *description;
+ const GstV4l2Codec *codec;
+ } GstV4l2VideoDecCData;
+
+ enum
+ {
+ PROP_0,
+ V4L2_STD_OBJECT_PROPS
++#ifdef TIZEN_FEATURE_V4L2_TBM_SUPPORT
++ , PROP_TBM_OUTPUT
++#endif /* TIZEN_FEATURE_V4L2_TBM_SUPPORT */
+ };
+
+ #define gst_v4l2_video_dec_parent_class parent_class
+ G_DEFINE_ABSTRACT_TYPE (GstV4l2VideoDec, gst_v4l2_video_dec,
+ GST_TYPE_VIDEO_DECODER);
+
+ static GstFlowReturn gst_v4l2_video_dec_finish (GstVideoDecoder * decoder);
+
++#ifdef TIZEN_FEATURE_V4L2_TBM_SUPPORT
++static void gst_v4l2_video_dec_flush_buffer_event (GstVideoDecoder * decoder)
++{
++ gboolean ret = FALSE;
++
++ if (!decoder) {
++ GST_ERROR("no decoder");
++ return;
++ }
++
++ ret = gst_pad_push_event (decoder->srcpad,
++ gst_event_new_custom (GST_EVENT_CUSTOM_DOWNSTREAM | GST_EVENT_TYPE_SERIALIZED,
++ gst_structure_new_empty("tizen/flush-buffer")));
++
++ GST_WARNING_OBJECT(decoder, "event push ret[%d] for flush-buffer", ret);
++}
++#endif /* TIZEN_FEATURE_V4L2_TBM_SUPPORT */
+ static void
+ gst_v4l2_video_dec_set_property (GObject * object,
+ guint prop_id, const GValue * value, GParamSpec * pspec)
+ {
+ GstV4l2VideoDec *self = GST_V4L2_VIDEO_DEC (object);
+
+ switch (prop_id) {
+ case PROP_CAPTURE_IO_MODE:
+ if (!gst_v4l2_object_set_property_helper (self->v4l2capture,
+ prop_id, value, pspec)) {
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ }
+ break;
++#ifdef TIZEN_FEATURE_V4L2_TBM_SUPPORT
++ case PROP_TBM_OUTPUT:
++ self->v4l2capture->tbm_output = g_value_get_boolean (value);
++ GST_INFO_OBJECT (self, "tbm output [%d]", self->v4l2capture->tbm_output);
++ break;
++#endif /* TIZEN_FEATURE_V4L2_TBM_SUPPORT */
++
+
+ /* By default, only set on output */
+ default:
+ if (!gst_v4l2_object_set_property_helper (self->v4l2output,
+ prop_id, value, pspec)) {
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ }
+ break;
+ }
+ }
+
+ static void
+ gst_v4l2_video_dec_get_property (GObject * object,
+ guint prop_id, GValue * value, GParamSpec * pspec)
+ {
+ GstV4l2VideoDec *self = GST_V4L2_VIDEO_DEC (object);
+
+ switch (prop_id) {
+ case PROP_CAPTURE_IO_MODE:
+ if (!gst_v4l2_object_get_property_helper (self->v4l2capture,
+ prop_id, value, pspec)) {
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ }
+ break;
++#ifdef TIZEN_FEATURE_V4L2_TBM_SUPPORT
++ case PROP_TBM_OUTPUT:
++ GST_INFO_OBJECT (self, "tbm output [%d]", self->v4l2capture->tbm_output);
++ g_value_set_boolean (value, self->v4l2capture->tbm_output);
++ break;
++#endif /* TIZEN_FEATURE_V4L2_TBM_SUPPORT */
+
+ /* By default read from output */
+ default:
+ if (!gst_v4l2_object_get_property_helper (self->v4l2output,
+ prop_id, value, pspec)) {
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ }
+ break;
+ }
+ }
+
+ static gboolean
+ gst_v4l2_video_dec_open (GstVideoDecoder * decoder)
+ {
+ GstV4l2VideoDec *self = GST_V4L2_VIDEO_DEC (decoder);
+ GstV4l2Error error = GST_V4L2_ERROR_INIT;
+ GstCaps *codec_caps;
+
+ GST_DEBUG_OBJECT (self, "Opening");
+
+ if (!gst_v4l2_object_open (self->v4l2output, &error))
+ goto failure;
+
+ if (!gst_v4l2_object_open_shared (self->v4l2capture, self->v4l2output))
+ goto failure;
+
+ codec_caps = gst_pad_get_pad_template_caps (decoder->sinkpad);
+ self->probed_sinkcaps = gst_v4l2_object_probe_caps (self->v4l2output,
+ codec_caps);
+ gst_caps_unref (codec_caps);
+
+ if (gst_caps_is_empty (self->probed_sinkcaps))
+ goto no_encoded_format;
+
+ return TRUE;
+
+ no_encoded_format:
+ GST_ELEMENT_ERROR (self, RESOURCE, SETTINGS,
+ (_("Decoder on device %s has no supported input format"),
+ self->v4l2output->videodev), (NULL));
+ goto failure;
+
+ failure:
+ if (GST_V4L2_IS_OPEN (self->v4l2output))
+ gst_v4l2_object_close (self->v4l2output);
+
+ if (GST_V4L2_IS_OPEN (self->v4l2capture))
+ gst_v4l2_object_close (self->v4l2capture);
+
+ gst_caps_replace (&self->probed_srccaps, NULL);
+ gst_caps_replace (&self->probed_sinkcaps, NULL);
+
+ gst_v4l2_error (self, &error);
+
+ return FALSE;
+ }
+
+ static gboolean
+ gst_v4l2_video_dec_close (GstVideoDecoder * decoder)
+ {
+ GstV4l2VideoDec *self = GST_V4L2_VIDEO_DEC (decoder);
+
+ GST_DEBUG_OBJECT (self, "Closing");
+
+ gst_v4l2_object_close (self->v4l2output);
+ gst_v4l2_object_close (self->v4l2capture);
+ gst_caps_replace (&self->probed_srccaps, NULL);
+ gst_caps_replace (&self->probed_sinkcaps, NULL);
+
+ return TRUE;
+ }
+
+ static gboolean
+ gst_v4l2_video_dec_start (GstVideoDecoder * decoder)
+ {
+ GstV4l2VideoDec *self = GST_V4L2_VIDEO_DEC (decoder);
+
+ GST_DEBUG_OBJECT (self, "Starting");
+
+ gst_v4l2_object_unlock (self->v4l2output);
+ g_atomic_int_set (&self->active, TRUE);
+ self->output_flow = GST_FLOW_OK;
+
+ return TRUE;
+ }
+
+ static gboolean
+ gst_v4l2_video_dec_stop (GstVideoDecoder * decoder)
+ {
+ GstV4l2VideoDec *self = GST_V4L2_VIDEO_DEC (decoder);
+
+ GST_DEBUG_OBJECT (self, "Stopping");
+
+ gst_v4l2_object_unlock (self->v4l2output);
+ gst_v4l2_object_unlock (self->v4l2capture);
+
+ /* Wait for capture thread to stop */
+ gst_pad_stop_task (decoder->srcpad);
+
+ GST_VIDEO_DECODER_STREAM_LOCK (decoder);
+ self->output_flow = GST_FLOW_OK;
+ GST_VIDEO_DECODER_STREAM_UNLOCK (decoder);
+
+ /* Should have been flushed already */
+ g_assert (g_atomic_int_get (&self->active) == FALSE);
++#ifdef TIZEN_FEATURE_V4L2_TBM_SUPPORT
++ gst_v4l2_video_dec_flush_buffer_event (decoder);
++#endif /* TIZEN_FEATURE_V4L2_TBM_SUPPORT */
+
+ gst_v4l2_object_stop (self->v4l2output);
+ gst_v4l2_object_stop (self->v4l2capture);
+
+ if (self->input_state) {
+ gst_video_codec_state_unref (self->input_state);
+ self->input_state = NULL;
+ }
+
+ GST_DEBUG_OBJECT (self, "Stopped");
+
+ return TRUE;
+ }
+
+ static gboolean
+ compatible_caps (GstV4l2VideoDec * self, GstCaps * new_caps)
+ {
+ GstCaps *current_caps, *caps1, *caps2;
+ GstStructure *s;
+ gboolean ret;
+
+ current_caps = gst_v4l2_object_get_current_caps (self->v4l2output);
+ if (!current_caps)
+ return FALSE;
+
+ caps1 = gst_caps_copy (current_caps);
+ s = gst_caps_get_structure (caps1, 0);
+ gst_structure_remove_field (s, "framerate");
+
+ caps2 = gst_caps_copy (new_caps);
+ s = gst_caps_get_structure (caps2, 0);
+ gst_structure_remove_field (s, "framerate");
+
+ ret = gst_caps_is_equal (caps1, caps2);
+
+ gst_caps_unref (caps1);
+ gst_caps_unref (caps2);
+ gst_caps_unref (current_caps);
+
+ return ret;
+ }
+
+ static gboolean
+ gst_v4l2_video_dec_set_format (GstVideoDecoder * decoder,
+ GstVideoCodecState * state)
+ {
+ GstV4l2Error error = GST_V4L2_ERROR_INIT;
+ gboolean ret = TRUE;
+ GstV4l2VideoDec *self = GST_V4L2_VIDEO_DEC (decoder);
+
+ GST_DEBUG_OBJECT (self, "Setting format: %" GST_PTR_FORMAT, state->caps);
+
+ if (self->input_state) {
+ if (compatible_caps (self, state->caps)) {
+ GST_DEBUG_OBJECT (self, "Compatible caps");
+ goto done;
+ }
+ gst_video_codec_state_unref (self->input_state);
+ self->input_state = NULL;
+
+ gst_v4l2_video_dec_finish (decoder);
+ gst_v4l2_object_stop (self->v4l2output);
+
+ /* The renegotiation flow don't blend with the base class flow. To properly
+ * stop the capture pool, if the buffers can't be orphaned, we need to
+ * reclaim our buffers, which will happend through the allocation query.
+ * The allocation query is triggered by gst_video_decoder_negotiate() which
+ * requires the output caps to be set, but we can't know this information
+ * as we rely on the decoder, which requires the capture queue to be
+ * stopped.
+ *
+ * To workaround this issue, we simply run an allocation query with the
+ * old negotiated caps in order to drain/reclaim our buffers. That breaks
+ * the complexity and should not have much impact in performance since the
+ * following allocation query will happen on a drained pipeline and won't
+ * block. */
+ if (self->v4l2capture->pool &&
+ !gst_v4l2_buffer_pool_orphan (&self->v4l2capture->pool)) {
+ GstCaps *caps = gst_pad_get_current_caps (decoder->srcpad);
+ if (caps) {
+ GstQuery *query = gst_query_new_allocation (caps, FALSE);
+ gst_pad_peer_query (decoder->srcpad, query);
+ gst_query_unref (query);
+ gst_caps_unref (caps);
+ }
+ }
+
+ gst_v4l2_object_stop (self->v4l2capture);
+ self->output_flow = GST_FLOW_OK;
+ }
+
+ ret = gst_v4l2_object_set_format (self->v4l2output, state->caps, &error);
+
+ gst_caps_replace (&self->probed_srccaps, NULL);
+ self->probed_srccaps = gst_v4l2_object_probe_caps (self->v4l2capture,
+ gst_v4l2_object_get_raw_caps ());
+
+ if (gst_caps_is_empty (self->probed_srccaps))
+ goto no_raw_format;
+
+ if (ret)
+ self->input_state = gst_video_codec_state_ref (state);
+ else
+ gst_v4l2_error (self, &error);
+
+ done:
+ return ret;
+
+ no_raw_format:
+ GST_ELEMENT_ERROR (self, RESOURCE, SETTINGS,
+ (_("Decoder on device %s has no supported output format"),
+ self->v4l2output->videodev), (NULL));
+ return GST_FLOW_ERROR;
+ }
+
+ static gboolean
+ gst_v4l2_video_dec_flush (GstVideoDecoder * decoder)
+ {
+ GstV4l2VideoDec *self = GST_V4L2_VIDEO_DEC (decoder);
+
+ GST_DEBUG_OBJECT (self, "Flushed");
+
+ /* Ensure the processing thread has stopped for the reverse playback
+ * discount case */
+ if (gst_pad_get_task_state (decoder->srcpad) == GST_TASK_STARTED) {
+ GST_VIDEO_DECODER_STREAM_UNLOCK (decoder);
+
+ gst_v4l2_object_unlock (self->v4l2output);
+ gst_v4l2_object_unlock (self->v4l2capture);
+ gst_pad_stop_task (decoder->srcpad);
+ GST_VIDEO_DECODER_STREAM_LOCK (decoder);
+ }
+
+ if (G_UNLIKELY (!g_atomic_int_get (&self->active)))
+ return TRUE;
+
+ self->output_flow = GST_FLOW_OK;
+
+ gst_v4l2_object_unlock_stop (self->v4l2output);
+ gst_v4l2_object_unlock_stop (self->v4l2capture);
+
+ if (self->v4l2output->pool)
+ gst_v4l2_buffer_pool_flush (self->v4l2output->pool);
+
++#ifdef TIZEN_FEATURE_V4L2_TBM_SUPPORT
++ gst_v4l2_video_dec_flush_buffer_event (decoder);
++#endif /* TIZEN_FEATURE_V4L2_TBM_SUPPORT */
+ /* gst_v4l2_buffer_pool_flush() calls streamon the capture pool and must be
+ * called after gst_v4l2_object_unlock_stop() stopped flushing the buffer
+ * pool. */
+ if (self->v4l2capture->pool)
+ gst_v4l2_buffer_pool_flush (self->v4l2capture->pool);
+
+ return TRUE;
+ }
+
+ static gboolean
+ gst_v4l2_video_dec_negotiate (GstVideoDecoder * decoder)
+ {
+ GstV4l2VideoDec *self = GST_V4L2_VIDEO_DEC (decoder);
+
+ /* We don't allow renegotiation without careful disabling the pool */
+ if (self->v4l2capture->pool &&
+ gst_buffer_pool_is_active (GST_BUFFER_POOL (self->v4l2capture->pool)))
+ return TRUE;
+
+ return GST_VIDEO_DECODER_CLASS (parent_class)->negotiate (decoder);
+ }
+
+ static gboolean
+ gst_v4l2_decoder_cmd (GstV4l2Object * v4l2object, guint cmd, guint flags)
+ {
+ struct v4l2_decoder_cmd dcmd = { 0, };
+
+ GST_DEBUG_OBJECT (v4l2object->element,
+ "sending v4l2 decoder command %u with flags %u", cmd, flags);
+
+ if (!GST_V4L2_IS_OPEN (v4l2object))
+ return FALSE;
+
+ dcmd.cmd = cmd;
+ dcmd.flags = flags;
+ if (v4l2object->ioctl (v4l2object->video_fd, VIDIOC_DECODER_CMD, &dcmd) < 0)
+ goto dcmd_failed;
+
+ return TRUE;
+
+ dcmd_failed:
+ if (errno == ENOTTY) {
+ GST_INFO_OBJECT (v4l2object->element,
+ "Failed to send decoder command %u with flags %u for '%s'. (%s)",
+ cmd, flags, v4l2object->videodev, g_strerror (errno));
+ } else {
+ GST_ERROR_OBJECT (v4l2object->element,
+ "Failed to send decoder command %u with flags %u for '%s'. (%s)",
+ cmd, flags, v4l2object->videodev, g_strerror (errno));
+ }
+ return FALSE;
+ }
+
+ static GstFlowReturn
+ gst_v4l2_video_dec_finish (GstVideoDecoder * decoder)
+ {
+ GstV4l2VideoDec *self = GST_V4L2_VIDEO_DEC (decoder);
+ GstFlowReturn ret = GST_FLOW_OK;
+ GstBuffer *buffer;
+
+ if (gst_pad_get_task_state (decoder->srcpad) != GST_TASK_STARTED)
+ goto done;
+
+ GST_DEBUG_OBJECT (self, "Finishing decoding");
+
+ GST_VIDEO_DECODER_STREAM_UNLOCK (decoder);
+
+ if (gst_v4l2_decoder_cmd (self->v4l2output, V4L2_DEC_CMD_STOP, 0)) {
+ GstTask *task = decoder->srcpad->task;
+
+ /* If the decoder stop command succeeded, just wait until processing is
+ * finished */
+ GST_DEBUG_OBJECT (self, "Waiting for decoder stop");
+ GST_OBJECT_LOCK (task);
+ while (GST_TASK_STATE (task) == GST_TASK_STARTED)
+ GST_TASK_WAIT (task);
+ GST_OBJECT_UNLOCK (task);
+ ret = GST_FLOW_FLUSHING;
+ } else {
+ /* otherwise keep queuing empty buffers until the processing thread has
+ * stopped, _pool_process() will return FLUSHING when that happened */
+ while (ret == GST_FLOW_OK) {
+ buffer = gst_buffer_new ();
+ ret =
+ gst_v4l2_buffer_pool_process (GST_V4L2_BUFFER_POOL (self->
+ v4l2output->pool), &buffer, NULL);
+ gst_buffer_unref (buffer);
+ }
+ }
+
+ /* and ensure the processing thread has stopped in case another error
+ * occurred. */
+ gst_v4l2_object_unlock (self->v4l2capture);
+ gst_pad_stop_task (decoder->srcpad);
+ GST_VIDEO_DECODER_STREAM_LOCK (decoder);
+
+ if (ret == GST_FLOW_FLUSHING)
+ ret = self->output_flow;
+
+ GST_DEBUG_OBJECT (decoder, "Done draining buffers");
+
+ /* TODO Shall we cleanup any reffed frame to workaround broken decoders ? */
+
+ done:
+ return ret;
+ }
+
+ static GstFlowReturn
+ gst_v4l2_video_dec_drain (GstVideoDecoder * decoder)
+ {
+ GstV4l2VideoDec *self = GST_V4L2_VIDEO_DEC (decoder);
+
+ GST_DEBUG_OBJECT (self, "Draining...");
+ gst_v4l2_video_dec_finish (decoder);
+ gst_v4l2_video_dec_flush (decoder);
+
+ return GST_FLOW_OK;
+ }
+
+ static gboolean
+ check_system_frame_number_too_old (guint32 current, guint32 old)
+ {
+ guint32 absdiff = current > old ? current - old : old - current;
+
+ /* More than 100 frames in the past, or current wrapped around */
+ if (absdiff > 100) {
+ /* Wraparound and difference is actually smaller than 100 */
+ if (absdiff > G_MAXUINT32 - 100)
+ return FALSE;
+ return TRUE;
+ }
+
+ return FALSE;
+ }
+
+ static void
+ gst_v4l2_video_dec_loop (GstVideoDecoder * decoder)
+ {
+ GstV4l2VideoDec *self = GST_V4L2_VIDEO_DEC (decoder);
+ GstV4l2BufferPool *v4l2_pool = GST_V4L2_BUFFER_POOL (self->v4l2capture->pool);
+ GstBufferPool *pool;
+ GstVideoCodecFrame *frame;
+ GstBuffer *buffer = NULL;
+ GstFlowReturn ret;
+
+ GST_LOG_OBJECT (decoder, "Allocate output buffer");
+
+ self->output_flow = GST_FLOW_OK;
+ do {
+ /* We cannot use the base class allotate helper since it taking the internal
+ * stream lock. we know that the acquire may need to poll until more frames
+ * comes in and holding this lock would prevent that.
+ */
+ pool = gst_video_decoder_get_buffer_pool (decoder);
+
+ /* Pool may be NULL if we started going to READY state */
+ if (pool == NULL) {
+ ret = GST_FLOW_FLUSHING;
+ goto beach;
+ }
+
+ ret = gst_buffer_pool_acquire_buffer (pool, &buffer, NULL);
+ g_object_unref (pool);
+
+ if (ret != GST_FLOW_OK)
+ goto beach;
+
+ GST_LOG_OBJECT (decoder, "Process output buffer");
+ ret = gst_v4l2_buffer_pool_process (v4l2_pool, &buffer, NULL);
+ } while (ret == GST_V4L2_FLOW_CORRUPTED_BUFFER);
+
+ if (ret != GST_FLOW_OK)
+ goto beach;
+
+ if (GST_BUFFER_TIMESTAMP (buffer) % GST_SECOND != 0)
+ GST_ERROR_OBJECT (decoder,
+ "Driver bug detected - check driver with v4l2-compliance from http://git.linuxtv.org/v4l-utils.git");
+ GST_LOG_OBJECT (decoder, "Got buffer for frame number %u",
+ (guint32) (GST_BUFFER_TIMESTAMP (buffer) / GST_SECOND));
+
+ frame =
+ gst_video_decoder_get_frame (decoder,
+ GST_BUFFER_TIMESTAMP (buffer) / GST_SECOND);
+ if (frame) {
+ GstVideoCodecFrame *oldest_frame;
+ gboolean warned = FALSE;
+
+ /* Garbage collect old frames in case of codec bugs */
+ while ((oldest_frame = gst_video_decoder_get_oldest_frame (decoder)) &&
+ check_system_frame_number_too_old (frame->system_frame_number,
+ oldest_frame->system_frame_number)) {
+ gst_video_decoder_drop_frame (decoder, oldest_frame);
+ oldest_frame = NULL;
+
+ if (!warned) {
+ g_warning ("%s: Too old frames, bug in decoder -- please file a bug",
+ GST_ELEMENT_NAME (decoder));
+ warned = TRUE;
+ }
+ }
+ if (oldest_frame)
+ gst_video_codec_frame_unref (oldest_frame);
+
+ frame->output_buffer = buffer;
+ buffer = NULL;
+ ret = gst_video_decoder_finish_frame (decoder, frame);
+
+ if (ret != GST_FLOW_OK)
+ goto beach;
+ } else {
+ GST_WARNING_OBJECT (decoder, "Decoder is producing too many buffers");
+ gst_buffer_unref (buffer);
+ }
+
+ return;
+
+ beach:
+ GST_DEBUG_OBJECT (decoder, "Leaving output thread: %s",
+ gst_flow_get_name (ret));
++#ifdef TIZEN_FEATURE_V4L2_TBM_SUPPORT
++ if (ret == GST_FLOW_EOS)
++ gst_v4l2_video_dec_flush_buffer_event (decoder);
++#endif
+
+ gst_buffer_replace (&buffer, NULL);
+ self->output_flow = ret;
+ gst_v4l2_object_unlock (self->v4l2output);
+ gst_pad_pause_task (decoder->srcpad);
+ }
+
+ static gboolean
+ gst_v4l2_video_remove_padding (GstCapsFeatures * features,
+ GstStructure * structure, gpointer user_data)
+ {
+ GstV4l2VideoDec *self = GST_V4L2_VIDEO_DEC (user_data);
+ GstVideoAlignment *align = &self->v4l2capture->align;
+ GstVideoInfo *info = &self->v4l2capture->info;
+ int width, height;
+
+ if (!gst_structure_get_int (structure, "width", &width))
+ return TRUE;
+
+ if (!gst_structure_get_int (structure, "height", &height))
+ return TRUE;
+
+ if (align->padding_left != 0 || align->padding_top != 0 ||
+ height != info->height + align->padding_bottom)
+ return TRUE;
+
+ if (height == info->height + align->padding_bottom) {
+ /* Some drivers may round up width to the padded with */
+ if (width == info->width + align->padding_right)
+ gst_structure_set (structure,
+ "width", G_TYPE_INT, width - align->padding_right,
+ "height", G_TYPE_INT, height - align->padding_bottom, NULL);
+ /* Some drivers may keep visible width and only round up bytesperline */
+ else if (width == info->width)
+ gst_structure_set (structure,
+ "height", G_TYPE_INT, height - align->padding_bottom, NULL);
+ }
+
+ return TRUE;
+ }
+
+ static GstFlowReturn
+ gst_v4l2_video_dec_handle_frame (GstVideoDecoder * decoder,
+ GstVideoCodecFrame * frame)
+ {
+ GstV4l2Error error = GST_V4L2_ERROR_INIT;
+ GstV4l2VideoDec *self = GST_V4L2_VIDEO_DEC (decoder);
+ GstFlowReturn ret = GST_FLOW_OK;
+ gboolean processed = FALSE;
+ GstBuffer *tmp;
+ GstTaskState task_state;
++#ifdef TIZEN_FEATURE_V4L2_TBM_SUPPORT
++ GstStructure *structure = NULL;
++ const gchar *caps_format = NULL;
++ GstMessage *msg = NULL;
++ GstV4l2BufferPool *capture_pool = NULL;
++#endif /* TIZEN_FEATURE_V4L2_TBM_SUPPORT */
+
+ GST_DEBUG_OBJECT (self, "Handling frame %d", frame->system_frame_number);
+
+ if (G_UNLIKELY (!g_atomic_int_get (&self->active)))
+ goto flushing;
+
+ if (G_UNLIKELY (!GST_V4L2_IS_ACTIVE (self->v4l2output))) {
+ if (!self->input_state)
+ goto not_negotiated;
+ if (!gst_v4l2_object_set_format (self->v4l2output, self->input_state->caps,
+ &error))
+ goto not_negotiated;
+ }
+
+ if (G_UNLIKELY (!GST_V4L2_IS_ACTIVE (self->v4l2capture))) {
+ GstBufferPool *pool = GST_BUFFER_POOL (self->v4l2output->pool);
+ GstVideoInfo info;
+ GstVideoCodecState *output_state;
+ GstBuffer *codec_data;
+ GstCaps *acquired_caps, *available_caps, *caps, *filter;
+ GstStructure *st;
+ guint32 dummy_frame_number = 0;
+
+ GST_DEBUG_OBJECT (self, "Sending header");
+
+ codec_data = self->input_state->codec_data;
+
+ /* We are running in byte-stream mode, so we don't know the headers, but
+ * we need to send something, otherwise the decoder will refuse to
+ * initialize.
+ */
+ if (codec_data) {
+ gst_buffer_ref (codec_data);
+ } else {
+ codec_data = gst_buffer_ref (frame->input_buffer);
+ processed = TRUE;
+ }
+
+ /* Ensure input internal pool is active */
+ if (!gst_buffer_pool_is_active (pool)) {
+ GstStructure *config = gst_buffer_pool_get_config (pool);
+ guint min = MAX (self->v4l2output->min_buffers,
+ GST_V4L2_MIN_BUFFERS (self->v4l2output));
+ guint max = VIDEO_MAX_FRAME;
+
+ gst_buffer_pool_config_set_params (config, self->input_state->caps,
+ self->v4l2output->info.size, min, max);
+
+ /* There is no reason to refuse this config */
+ if (!gst_buffer_pool_set_config (pool, config))
+ goto activate_failed;
+
+ if (!gst_buffer_pool_set_active (pool, TRUE))
+ goto activate_failed;
+ }
+
+ GST_VIDEO_DECODER_STREAM_UNLOCK (decoder);
+ GST_LOG_OBJECT (decoder, "Passing buffer with system frame number %u",
+ processed ? frame->system_frame_number : 0);
+ ret =
+ gst_v4l2_buffer_pool_process (GST_V4L2_BUFFER_POOL (self->
+ v4l2output->pool), &codec_data,
+ processed ? &frame->system_frame_number : &dummy_frame_number);
+ GST_VIDEO_DECODER_STREAM_LOCK (decoder);
+
+ gst_buffer_unref (codec_data);
+
+ /* init capture fps according to output */
+ self->v4l2capture->info.fps_d = self->v4l2output->info.fps_d;
+ self->v4l2capture->info.fps_n = self->v4l2output->info.fps_n;
+
+ /* For decoders G_FMT returns coded size, G_SELECTION returns visible size
+ * in the compose rectangle. gst_v4l2_object_acquire_format() checks both
+ * and returns the visible size as with/height and the coded size as
+ * padding. */
+ if (!gst_v4l2_object_acquire_format (self->v4l2capture, &info))
+ goto not_negotiated;
+
+ /* gst_v4l2_object_acquire_format() does not set fps, copy from sink */
+ info.fps_n = self->v4l2output->info.fps_n;
+ info.fps_d = self->v4l2output->info.fps_d;
+
+ /* Create caps from the acquired format, remove the format field */
+ acquired_caps = gst_video_info_to_caps (&info);
+ GST_DEBUG_OBJECT (self, "Acquired caps: %" GST_PTR_FORMAT, acquired_caps);
+ st = gst_caps_get_structure (acquired_caps, 0);
+ gst_structure_remove_fields (st, "format", "colorimetry", "chroma-site",
+ NULL);
+
+ /* Probe currently available pixel formats */
+ available_caps = gst_caps_copy (self->probed_srccaps);
+ GST_DEBUG_OBJECT (self, "Available caps: %" GST_PTR_FORMAT, available_caps);
+
+ /* Replace coded size with visible size, we want to negotiate visible size
+ * with downstream, not coded size. */
+ gst_caps_map_in_place (available_caps, gst_v4l2_video_remove_padding, self);
+
+ filter = gst_caps_intersect_full (available_caps, acquired_caps,
+ GST_CAPS_INTERSECT_FIRST);
+ GST_DEBUG_OBJECT (self, "Filtered caps: %" GST_PTR_FORMAT, filter);
+ gst_caps_unref (acquired_caps);
+ gst_caps_unref (available_caps);
+ caps = gst_pad_peer_query_caps (decoder->srcpad, filter);
+ gst_caps_unref (filter);
+
+ GST_DEBUG_OBJECT (self, "Possible decoded caps: %" GST_PTR_FORMAT, caps);
+ if (gst_caps_is_empty (caps)) {
+ gst_caps_unref (caps);
+ goto not_negotiated;
+ }
+
+ /* Fixate pixel format */
+ caps = gst_caps_fixate (caps);
+
+ GST_DEBUG_OBJECT (self, "Chosen decoded caps: %" GST_PTR_FORMAT, caps);
++#ifdef TIZEN_FEATURE_V4L2_TBM_SUPPORT
++ structure = gst_caps_get_structure (caps, 0);
++ caps_format = gst_structure_get_string (structure, "format");
++
++ if (!strcmp (caps_format, "I420")) {
++ GST_INFO_OBJECT (self, "I420 -> S420");
++ gst_caps_set_simple (caps, "format", G_TYPE_STRING, "S420", NULL);
++ } else if (!strcmp (caps_format, "NV12")) {
++ GST_INFO_OBJECT (self, "NV12 -> SN12");
++ gst_caps_set_simple (caps, "format", G_TYPE_STRING, "SN12", NULL);
++ }
++ GST_INFO_OBJECT (self, "Updated decoded caps: %" GST_PTR_FORMAT, caps);
++#endif /* TIZEN_FEATURE_V4L2_TBM_SUPPORT */
+
+ /* Try to set negotiated format, on success replace acquired format */
+ if (gst_v4l2_object_set_format (self->v4l2capture, caps, &error))
+ gst_video_info_from_caps (&info, caps);
+ else
+ gst_v4l2_clear_error (&error);
+ gst_caps_unref (caps);
+
+ output_state = gst_video_decoder_set_output_state (decoder,
+ info.finfo->format, info.width, info.height, self->input_state);
+
+ /* Copy the rest of the information, there might be more in the future */
+ output_state->info.interlace_mode = info.interlace_mode;
+ gst_video_codec_state_unref (output_state);
+
+ if (!gst_video_decoder_negotiate (decoder)) {
+ if (GST_PAD_IS_FLUSHING (decoder->srcpad))
+ goto flushing;
+ else
+ goto not_negotiated;
+ }
+
+ /* Ensure our internal pool is activated */
+ if (!gst_buffer_pool_set_active (GST_BUFFER_POOL (self->v4l2capture->pool),
+ TRUE))
+ goto activate_failed;
++#ifdef TIZEN_FEATURE_V4L2_TBM_SUPPORT
++ capture_pool = GST_V4L2_BUFFER_POOL (self->v4l2capture->pool);
++
++ msg = gst_message_new_element (GST_OBJECT_CAST (decoder),
++ gst_structure_new ("prepare-decode-buffers",
++ "num_buffers", G_TYPE_INT, capture_pool->num_allocated,
++ "extra_num_buffers", G_TYPE_INT, capture_pool->num_allocated - 2, NULL));
++
++ gst_element_post_message (GST_ELEMENT_CAST (decoder), msg);
++
++ GST_WARNING_OBJECT (self, "output buffer[%d]", capture_pool->num_allocated);
++#endif /* TIZEN_FEATURE_V4L2_TBM_SUPPORT */
+ }
+
+ task_state = gst_pad_get_task_state (GST_VIDEO_DECODER_SRC_PAD (self));
+ if (task_state == GST_TASK_STOPPED || task_state == GST_TASK_PAUSED) {
+ /* It's possible that the processing thread stopped due to an error */
+ if (self->output_flow != GST_FLOW_OK &&
+ self->output_flow != GST_FLOW_FLUSHING) {
+ GST_DEBUG_OBJECT (self, "Processing loop stopped with error, leaving");
+ ret = self->output_flow;
+ goto drop;
+ }
+
+ GST_DEBUG_OBJECT (self, "Starting decoding thread");
+
+ /* Start the processing task, when it quits, the task will disable input
+ * processing to unlock input if draining, or prevent potential block */
+ self->output_flow = GST_FLOW_FLUSHING;
+ if (!gst_pad_start_task (decoder->srcpad,
+ (GstTaskFunction) gst_v4l2_video_dec_loop, self, NULL))
+ goto start_task_failed;
+ }
+
+ if (!processed) {
+ GST_VIDEO_DECODER_STREAM_UNLOCK (decoder);
+ GST_LOG_OBJECT (decoder, "Passing buffer with system frame number %u",
+ frame->system_frame_number);
+ ret =
+ gst_v4l2_buffer_pool_process (GST_V4L2_BUFFER_POOL (self->v4l2output->
+ pool), &frame->input_buffer, &frame->system_frame_number);
+ GST_VIDEO_DECODER_STREAM_LOCK (decoder);
+
+ if (ret == GST_FLOW_FLUSHING) {
+ if (gst_pad_get_task_state (GST_VIDEO_DECODER_SRC_PAD (self)) !=
+ GST_TASK_STARTED)
+ ret = self->output_flow;
+ goto drop;
+ } else if (ret != GST_FLOW_OK) {
+ goto process_failed;
+ }
+ }
+
+ /* No need to keep input around */
+ tmp = frame->input_buffer;
+ frame->input_buffer = gst_buffer_new ();
+ gst_buffer_copy_into (frame->input_buffer, tmp,
+ GST_BUFFER_COPY_FLAGS | GST_BUFFER_COPY_TIMESTAMPS |
+ GST_BUFFER_COPY_META, 0, 0);
+ gst_buffer_unref (tmp);
+
+ gst_video_codec_frame_unref (frame);
+ return ret;
+
+ /* ERRORS */
+ not_negotiated:
+ {
+ GST_ERROR_OBJECT (self, "not negotiated");
+ ret = GST_FLOW_NOT_NEGOTIATED;
+ gst_v4l2_error (self, &error);
+ goto drop;
+ }
+ activate_failed:
+ {
+ GST_ELEMENT_ERROR (self, RESOURCE, SETTINGS,
+ (_("Failed to allocate required memory.")),
+ ("Buffer pool activation failed"));
+ ret = GST_FLOW_ERROR;
+ goto drop;
+ }
+ flushing:
+ {
+ ret = GST_FLOW_FLUSHING;
+ goto drop;
+ }
+
+ start_task_failed:
+ {
+ GST_ELEMENT_ERROR (self, RESOURCE, FAILED,
+ (_("Failed to start decoding thread.")), (NULL));
+ ret = GST_FLOW_ERROR;
+ goto drop;
+ }
+ process_failed:
+ {
+ GST_ELEMENT_ERROR (self, RESOURCE, FAILED,
+ (_("Failed to process frame.")),
+ ("Maybe be due to not enough memory or failing driver"));
+ ret = GST_FLOW_ERROR;
+ goto drop;
+ }
+ drop:
+ {
+ gst_video_decoder_drop_frame (decoder, frame);
+ return ret;
+ }
+ }
+
+ static gboolean
+ gst_v4l2_video_dec_decide_allocation (GstVideoDecoder * decoder,
+ GstQuery * query)
+ {
+ GstV4l2VideoDec *self = GST_V4L2_VIDEO_DEC (decoder);
+ GstClockTime latency;
+ gboolean ret = FALSE;
+
+ if (gst_v4l2_object_decide_allocation (self->v4l2capture, query))
+ ret = GST_VIDEO_DECODER_CLASS (parent_class)->decide_allocation (decoder,
+ query);
+
+ if (GST_CLOCK_TIME_IS_VALID (self->v4l2capture->duration)) {
+ latency = self->v4l2capture->min_buffers * self->v4l2capture->duration;
+ GST_DEBUG_OBJECT (self, "Setting latency: %" GST_TIME_FORMAT " (%"
+ G_GUINT32_FORMAT " * %" G_GUINT64_FORMAT, GST_TIME_ARGS (latency),
+ self->v4l2capture->min_buffers, self->v4l2capture->duration);
+ gst_video_decoder_set_latency (decoder, latency, latency);
+ } else {
+ GST_WARNING_OBJECT (self, "Duration invalid, not setting latency");
+ }
+
+ return ret;
+ }
+
+ static gboolean
+ gst_v4l2_video_dec_src_query (GstVideoDecoder * decoder, GstQuery * query)
+ {
+ gboolean ret = TRUE;
+ GstV4l2VideoDec *self = GST_V4L2_VIDEO_DEC (decoder);
+
+ switch (GST_QUERY_TYPE (query)) {
+ case GST_QUERY_CAPS:{
+ GstCaps *filter, *result = NULL;
+ GstPad *pad = GST_VIDEO_DECODER_SRC_PAD (decoder);
+
+ gst_query_parse_caps (query, &filter);
+
+ if (self->probed_srccaps)
+ result = gst_caps_ref (self->probed_srccaps);
+ else
+ result = gst_pad_get_pad_template_caps (pad);
+
+ if (filter) {
+ GstCaps *tmp = result;
+ result =
+ gst_caps_intersect_full (filter, tmp, GST_CAPS_INTERSECT_FIRST);
+ gst_caps_unref (tmp);
+ }
+
+ GST_DEBUG_OBJECT (self, "Returning src caps %" GST_PTR_FORMAT, result);
+
+ gst_query_set_caps_result (query, result);
+ gst_caps_unref (result);
+ break;
+ }
+
+ default:
+ ret = GST_VIDEO_DECODER_CLASS (parent_class)->src_query (decoder, query);
+ break;
+ }
+
+ return ret;
+ }
+
+ static GstCaps *
+ gst_v4l2_video_dec_sink_getcaps (GstVideoDecoder * decoder, GstCaps * filter)
+ {
+ GstV4l2VideoDec *self = GST_V4L2_VIDEO_DEC (decoder);
+ GstCaps *result;
+
+ result = gst_video_decoder_proxy_getcaps (decoder, self->probed_sinkcaps,
+ filter);
+
+ GST_DEBUG_OBJECT (self, "Returning sink caps %" GST_PTR_FORMAT, result);
+
+ return result;
+ }
+
+ static gboolean
+ gst_v4l2_video_dec_sink_event (GstVideoDecoder * decoder, GstEvent * event)
+ {
+ GstV4l2VideoDec *self = GST_V4L2_VIDEO_DEC (decoder);
+ gboolean ret;
+ GstEventType type = GST_EVENT_TYPE (event);
+
+ switch (type) {
+ case GST_EVENT_FLUSH_START:
+ GST_DEBUG_OBJECT (self, "flush start");
+ gst_v4l2_object_unlock (self->v4l2output);
+ gst_v4l2_object_unlock (self->v4l2capture);
+ break;
+ default:
+ break;
+ }
+
+ ret = GST_VIDEO_DECODER_CLASS (parent_class)->sink_event (decoder, event);
+
+ switch (type) {
+ case GST_EVENT_FLUSH_START:
+ /* The processing thread should stop now, wait for it */
+ gst_pad_stop_task (decoder->srcpad);
+ GST_DEBUG_OBJECT (self, "flush start done");
+ break;
+ default:
+ break;
+ }
+
+ return ret;
+ }
+
+ static GstStateChangeReturn
+ gst_v4l2_video_dec_change_state (GstElement * element,
+ GstStateChange transition)
+ {
+ GstV4l2VideoDec *self = GST_V4L2_VIDEO_DEC (element);
+ GstVideoDecoder *decoder = GST_VIDEO_DECODER (element);
+
+ if (transition == GST_STATE_CHANGE_PAUSED_TO_READY) {
+ g_atomic_int_set (&self->active, FALSE);
+ gst_v4l2_object_unlock (self->v4l2output);
+ gst_v4l2_object_unlock (self->v4l2capture);
+ gst_pad_stop_task (decoder->srcpad);
+ }
+
+ return GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
+ }
+
+ static void
+ gst_v4l2_video_dec_dispose (GObject * object)
+ {
+ GstV4l2VideoDec *self = GST_V4L2_VIDEO_DEC (object);
+
+ gst_caps_replace (&self->probed_sinkcaps, NULL);
+ gst_caps_replace (&self->probed_srccaps, NULL);
+
+ G_OBJECT_CLASS (parent_class)->dispose (object);
+ }
+
+ static void
+ gst_v4l2_video_dec_finalize (GObject * object)
+ {
+ GstV4l2VideoDec *self = GST_V4L2_VIDEO_DEC (object);
+
+ gst_v4l2_object_destroy (self->v4l2capture);
+ gst_v4l2_object_destroy (self->v4l2output);
+
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+ }
+
+ static void
+ gst_v4l2_video_dec_init (GstV4l2VideoDec * self)
+ {
+ /* V4L2 object are created in subinstance_init */
+ }
+
+ static void
+ gst_v4l2_video_dec_subinstance_init (GTypeInstance * instance, gpointer g_class)
+ {
+ GstV4l2VideoDecClass *klass = GST_V4L2_VIDEO_DEC_CLASS (g_class);
+ GstV4l2VideoDec *self = GST_V4L2_VIDEO_DEC (instance);
+ GstVideoDecoder *decoder = GST_VIDEO_DECODER (instance);
+
+ gst_video_decoder_set_packetized (decoder, TRUE);
+
+ self->v4l2output = gst_v4l2_object_new (GST_ELEMENT (self),
+ GST_OBJECT (GST_VIDEO_DECODER_SINK_PAD (self)),
+ V4L2_BUF_TYPE_VIDEO_OUTPUT, klass->default_device,
+ gst_v4l2_get_output, gst_v4l2_set_output, NULL);
+ self->v4l2output->no_initial_format = TRUE;
+ self->v4l2output->keep_aspect = FALSE;
+
+ self->v4l2capture = gst_v4l2_object_new (GST_ELEMENT (self),
+ GST_OBJECT (GST_VIDEO_DECODER_SRC_PAD (self)),
+ V4L2_BUF_TYPE_VIDEO_CAPTURE, klass->default_device,
+ gst_v4l2_get_input, gst_v4l2_set_input, NULL);
++#ifdef TIZEN_FEATURE_V4L2_TBM_SUPPORT
++ self->v4l2capture->tbm_output = TRUE;
++#endif /* TIZEN_FEATURE_V4L2_TBM_SUPPORT */
+ }
+
+ static void
+ gst_v4l2_video_dec_class_init (GstV4l2VideoDecClass * klass)
+ {
+ GstElementClass *element_class;
+ GObjectClass *gobject_class;
+ GstVideoDecoderClass *video_decoder_class;
+
+ parent_class = g_type_class_peek_parent (klass);
+
+ element_class = (GstElementClass *) klass;
+ gobject_class = (GObjectClass *) klass;
+ video_decoder_class = (GstVideoDecoderClass *) klass;
+
+ GST_DEBUG_CATEGORY_INIT (gst_v4l2_video_dec_debug, "v4l2videodec", 0,
+ "V4L2 Video Decoder");
+
+ gobject_class->dispose = GST_DEBUG_FUNCPTR (gst_v4l2_video_dec_dispose);
+ gobject_class->finalize = GST_DEBUG_FUNCPTR (gst_v4l2_video_dec_finalize);
+ gobject_class->set_property =
+ GST_DEBUG_FUNCPTR (gst_v4l2_video_dec_set_property);
+ gobject_class->get_property =
+ GST_DEBUG_FUNCPTR (gst_v4l2_video_dec_get_property);
+
+ video_decoder_class->open = GST_DEBUG_FUNCPTR (gst_v4l2_video_dec_open);
+ video_decoder_class->close = GST_DEBUG_FUNCPTR (gst_v4l2_video_dec_close);
+ video_decoder_class->start = GST_DEBUG_FUNCPTR (gst_v4l2_video_dec_start);
+ video_decoder_class->stop = GST_DEBUG_FUNCPTR (gst_v4l2_video_dec_stop);
+ video_decoder_class->finish = GST_DEBUG_FUNCPTR (gst_v4l2_video_dec_finish);
+ video_decoder_class->flush = GST_DEBUG_FUNCPTR (gst_v4l2_video_dec_flush);
+ video_decoder_class->drain = GST_DEBUG_FUNCPTR (gst_v4l2_video_dec_drain);
+ video_decoder_class->set_format =
+ GST_DEBUG_FUNCPTR (gst_v4l2_video_dec_set_format);
+ video_decoder_class->negotiate =
+ GST_DEBUG_FUNCPTR (gst_v4l2_video_dec_negotiate);
+ video_decoder_class->decide_allocation =
+ GST_DEBUG_FUNCPTR (gst_v4l2_video_dec_decide_allocation);
+ /* FIXME propose_allocation or not ? */
+ video_decoder_class->handle_frame =
+ GST_DEBUG_FUNCPTR (gst_v4l2_video_dec_handle_frame);
+ video_decoder_class->getcaps =
+ GST_DEBUG_FUNCPTR (gst_v4l2_video_dec_sink_getcaps);
+ video_decoder_class->src_query =
+ GST_DEBUG_FUNCPTR (gst_v4l2_video_dec_src_query);
+ video_decoder_class->sink_event =
+ GST_DEBUG_FUNCPTR (gst_v4l2_video_dec_sink_event);
+
+ element_class->change_state =
+ GST_DEBUG_FUNCPTR (gst_v4l2_video_dec_change_state);
+
+ gst_v4l2_object_install_m2m_properties_helper (gobject_class);
++#ifdef TIZEN_FEATURE_V4L2_TBM_SUPPORT
++ g_object_class_install_property (gobject_class, PROP_TBM_OUTPUT,
++ g_param_spec_boolean ("tbm-output", "Enable TBM for output buffer",
++ "It works for only DMABUF mode.",
++ TRUE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
++#endif /* TIZEN_FEATURE_V4L2_TBM_SUPPORT */
+ }
+
+ static void
+ gst_v4l2_video_dec_subclass_init (gpointer g_class, gpointer data)
+ {
+ GstV4l2VideoDecClass *klass = GST_V4L2_VIDEO_DEC_CLASS (g_class);
+ GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
+ GstV4l2VideoDecCData *cdata = data;
+
+ klass->default_device = cdata->device;
+
+ gst_element_class_add_pad_template (element_class,
+ gst_pad_template_new ("sink", GST_PAD_SINK, GST_PAD_ALWAYS,
+ cdata->sink_caps));
+ gst_element_class_add_pad_template (element_class,
+ gst_pad_template_new ("src", GST_PAD_SRC, GST_PAD_ALWAYS,
+ cdata->src_caps));
+
+ gst_element_class_set_metadata (element_class, cdata->longname,
+ "Codec/Decoder/Video/Hardware", cdata->description,
+ "Nicolas Dufresne <nicolas.dufresne@collabora.com>");
+
+ gst_caps_unref (cdata->sink_caps);
+ gst_caps_unref (cdata->src_caps);
+ g_free (cdata);
+ }
+
+ /* Probing functions */
+ gboolean
+ gst_v4l2_is_video_dec (GstCaps * sink_caps, GstCaps * src_caps)
+ {
+ gboolean ret = FALSE;
+
+ if (gst_caps_is_subset (sink_caps, gst_v4l2_object_get_codec_caps ())
+ && gst_caps_is_subset (src_caps, gst_v4l2_object_get_raw_caps ()))
+ ret = TRUE;
+
+ return ret;
+ }
+
+ static gchar *
+ gst_v4l2_video_dec_set_metadata (GstStructure * s, GstV4l2VideoDecCData * cdata,
+ const gchar * basename)
+ {
+ gchar *codec_name = NULL;
+ gchar *type_name = NULL;
+
+ #define SET_META(codec) \
+ G_STMT_START { \
+ cdata->longname = "V4L2 " codec " Decoder"; \
+ cdata->description = "Decodes " codec " streams via V4L2 API"; \
+ codec_name = g_ascii_strdown (codec, -1); \
+ } G_STMT_END
+
+ if (gst_structure_has_name (s, "image/jpeg")) {
+ SET_META ("JPEG");
+ } else if (gst_structure_has_name (s, "video/mpeg")) {
+ gint mpegversion = 0;
+ gst_structure_get_int (s, "mpegversion", &mpegversion);
+
+ if (mpegversion == 2) {
+ SET_META ("MPEG2");
+ cdata->codec = gst_v4l2_mpeg2_get_codec ();
+ } else {
+ SET_META ("MPEG4");
+ cdata->codec = gst_v4l2_mpeg4_get_codec ();
+ }
+ } else if (gst_structure_has_name (s, "video/x-h263")) {
+ SET_META ("H263");
+ } else if (gst_structure_has_name (s, "video/x-fwht")) {
+ SET_META ("FWHT");
+ } else if (gst_structure_has_name (s, "video/x-h264")) {
+ SET_META ("H264");
+ cdata->codec = gst_v4l2_h264_get_codec ();
+ } else if (gst_structure_has_name (s, "video/x-h265")) {
+ SET_META ("H265");
+ cdata->codec = gst_v4l2_h265_get_codec ();
+ } else if (gst_structure_has_name (s, "video/x-wmv")) {
+ SET_META ("VC1");
+ } else if (gst_structure_has_name (s, "video/x-vp8")) {
+ SET_META ("VP8");
+ cdata->codec = gst_v4l2_vp8_get_codec ();
+ } else if (gst_structure_has_name (s, "video/x-vp9")) {
+ SET_META ("VP9");
+ cdata->codec = gst_v4l2_vp9_get_codec ();
+ } else if (gst_structure_has_name (s, "video/x-bayer")) {
+ SET_META ("BAYER");
+ } else if (gst_structure_has_name (s, "video/x-sonix")) {
+ SET_META ("SONIX");
+ } else if (gst_structure_has_name (s, "video/x-pwc1")) {
+ SET_META ("PWC1");
+ } else if (gst_structure_has_name (s, "video/x-pwc2")) {
+ SET_META ("PWC2");
+ } else {
+ /* This code should be kept on sync with the exposed CODEC type of format
+ * from gstv4l2object.c. This warning will only occur in case we forget
+ * to also add a format here. */
+ gchar *s_str = gst_structure_to_string (s);
+ g_warning ("Missing fixed name mapping for caps '%s', this is a GStreamer "
+ "bug, please report at https://bugs.gnome.org", s_str);
+ g_free (s_str);
+ }
+
+ if (codec_name) {
+ type_name = g_strdup_printf ("v4l2%sdec", codec_name);
+ if (g_type_from_name (type_name) != 0) {
+ g_free (type_name);
+ type_name = g_strdup_printf ("v4l2%s%sdec", basename, codec_name);
+ }
+
+ g_free (codec_name);
+ }
+
+ return type_name;
+ #undef SET_META
+ }
+
+ void
+ gst_v4l2_video_dec_register (GstPlugin * plugin, const gchar * basename,
+ const gchar * device_path, gint video_fd, GstCaps * sink_caps,
+ GstCaps * src_caps)
+ {
+ gint i;
+
+ for (i = 0; i < gst_caps_get_size (sink_caps); i++) {
+ GstV4l2VideoDecCData *cdata;
+ GstStructure *s;
+ GTypeQuery type_query;
+ GTypeInfo type_info = { 0, };
+ GType type, subtype;
+ gchar *type_name;
+
+ s = gst_caps_get_structure (sink_caps, i);
+
+ cdata = g_new0 (GstV4l2VideoDecCData, 1);
+ cdata->device = g_strdup (device_path);
+ cdata->sink_caps = gst_caps_new_empty ();
+ gst_caps_append_structure (cdata->sink_caps, gst_structure_copy (s));
+ cdata->src_caps = gst_caps_ref (src_caps);
+ type_name = gst_v4l2_video_dec_set_metadata (s, cdata, basename);
+
+ /* Skip over if we hit an unmapped type */
+ if (!type_name) {
+ g_free (cdata);
+ continue;
+ }
+
+ if (cdata->codec != NULL && cdata->codec != gst_v4l2_vp8_get_codec ()
+ && cdata->codec != gst_v4l2_vp9_get_codec ()) {
+ GValue value = G_VALUE_INIT;
+
+ if (gst_v4l2_codec_probe_levels (cdata->codec, video_fd, &value)) {
+ gst_caps_set_value (cdata->sink_caps, "level", &value);
+ g_value_unset (&value);
+ }
+
+ if (gst_v4l2_codec_probe_profiles (cdata->codec, video_fd, &value)) {
+ gst_caps_set_value (cdata->sink_caps, "profile", &value);
+ g_value_unset (&value);
+ }
+ }
+
+ type = gst_v4l2_video_dec_get_type ();
+ g_type_query (type, &type_query);
+ memset (&type_info, 0, sizeof (type_info));
+ type_info.class_size = type_query.class_size;
+ type_info.instance_size = type_query.instance_size;
+ type_info.class_init = gst_v4l2_video_dec_subclass_init;
+ type_info.class_data = cdata;
+ type_info.instance_init = gst_v4l2_video_dec_subinstance_init;
+
+ subtype = g_type_register_static (type, type_name, &type_info, 0);
++
+ if (!gst_element_register (plugin, type_name, GST_RANK_PRIMARY + 1,
+ subtype))
+ GST_WARNING ("Failed to register plugin '%s'", type_name);
+
+ g_free (type_name);
+ }
+ }
--- /dev/null
+ /*
+ * Copyright (C) 2014-2017 SUMOMO Computer Association
+ * Authors Ayaka <ayaka@soulik.info>
+ * Copyright (C) 2017 Collabora Ltd.
+ * Author: Nicolas Dufresne <nicolas.dufresne@collabora.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ *
+ */
+
+ #ifdef HAVE_CONFIG_H
+ #include "config.h"
+ #endif
+
+ #include <sys/stat.h>
+ #include <fcntl.h>
+ #include <errno.h>
+ #include <unistd.h>
+ #include <string.h>
+
+ #include "gstv4l2object.h"
+ #include "gstv4l2videoenc.h"
+
+ #include <string.h>
+ #include <gst/gst-i18n-plugin.h>
+
+ GST_DEBUG_CATEGORY_STATIC (gst_v4l2_video_enc_debug);
+ #define GST_CAT_DEFAULT gst_v4l2_video_enc_debug
+
+ typedef struct
+ {
+ gchar *device;
+ GstCaps *sink_caps;
+ GstCaps *src_caps;
+ const GstV4l2Codec *codec;
+ } GstV4l2VideoEncCData;
+
+ enum
+ {
+ PROP_0,
+ V4L2_STD_OBJECT_PROPS,
+ };
+
+ #define gst_v4l2_video_enc_parent_class parent_class
+ G_DEFINE_ABSTRACT_TYPE (GstV4l2VideoEnc, gst_v4l2_video_enc,
+ GST_TYPE_VIDEO_ENCODER);
+
+ static void
+ gst_v4l2_video_enc_set_property (GObject * object,
+ guint prop_id, const GValue * value, GParamSpec * pspec)
+ {
+ GstV4l2VideoEnc *self = GST_V4L2_VIDEO_ENC (object);
+
+ switch (prop_id) {
+ case PROP_CAPTURE_IO_MODE:
+ if (!gst_v4l2_object_set_property_helper (self->v4l2capture,
+ prop_id, value, pspec)) {
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ }
+ break;
+
+ /* By default, only set on output */
+ default:
+ if (!gst_v4l2_object_set_property_helper (self->v4l2output,
+ prop_id, value, pspec)) {
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ }
+ break;
+ }
+ }
+
+ static void
+ gst_v4l2_video_enc_get_property (GObject * object,
+ guint prop_id, GValue * value, GParamSpec * pspec)
+ {
+ GstV4l2VideoEnc *self = GST_V4L2_VIDEO_ENC (object);
+
+ switch (prop_id) {
+ case PROP_CAPTURE_IO_MODE:
+ if (!gst_v4l2_object_get_property_helper (self->v4l2capture,
+ prop_id, value, pspec)) {
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ }
+ break;
+
+ /* By default read from output */
+ default:
+ if (!gst_v4l2_object_get_property_helper (self->v4l2output,
+ prop_id, value, pspec)) {
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ }
+ break;
+ }
+ }
+
+ static gboolean
+ gst_v4l2_video_enc_open (GstVideoEncoder * encoder)
+ {
+ GstV4l2VideoEnc *self = GST_V4L2_VIDEO_ENC (encoder);
+ GstV4l2Error error = GST_V4L2_ERROR_INIT;
+ GstCaps *codec_caps;
+
+ GST_DEBUG_OBJECT (self, "Opening");
+
+ if (!gst_v4l2_object_open (self->v4l2output, &error))
+ goto failure;
+
+ if (!gst_v4l2_object_open_shared (self->v4l2capture, self->v4l2output))
+ goto failure;
+
+ self->probed_sinkcaps = gst_v4l2_object_probe_caps (self->v4l2output,
+ gst_v4l2_object_get_raw_caps ());
+
+ if (gst_caps_is_empty (self->probed_sinkcaps))
+ goto no_raw_format;
+
+ codec_caps = gst_pad_get_pad_template_caps (encoder->srcpad);
+ self->probed_srccaps = gst_v4l2_object_probe_caps (self->v4l2capture,
+ codec_caps);
+ gst_caps_unref (codec_caps);
+
+ if (gst_caps_is_empty (self->probed_srccaps))
+ goto no_encoded_format;
+
+ return TRUE;
+
+ no_encoded_format:
+ GST_ELEMENT_ERROR (self, RESOURCE, SETTINGS,
+ (_("Encoder on device %s has no supported output format"),
+ self->v4l2output->videodev), (NULL));
+ goto failure;
+
+
+ no_raw_format:
+ GST_ELEMENT_ERROR (self, RESOURCE, SETTINGS,
+ (_("Encoder on device %s has no supported input format"),
+ self->v4l2output->videodev), (NULL));
+ goto failure;
+
+ failure:
+ if (GST_V4L2_IS_OPEN (self->v4l2output))
+ gst_v4l2_object_close (self->v4l2output);
+
+ if (GST_V4L2_IS_OPEN (self->v4l2capture))
+ gst_v4l2_object_close (self->v4l2capture);
+
+ gst_caps_replace (&self->probed_srccaps, NULL);
+ gst_caps_replace (&self->probed_sinkcaps, NULL);
+
+ gst_v4l2_error (self, &error);
+
+ return FALSE;
+ }
+
+ static gboolean
+ gst_v4l2_video_enc_close (GstVideoEncoder * encoder)
+ {
+ GstV4l2VideoEnc *self = GST_V4L2_VIDEO_ENC (encoder);
+
+ GST_DEBUG_OBJECT (self, "Closing");
+
+ gst_v4l2_object_close (self->v4l2output);
+ gst_v4l2_object_close (self->v4l2capture);
+ gst_caps_replace (&self->probed_srccaps, NULL);
+ gst_caps_replace (&self->probed_sinkcaps, NULL);
+
+ return TRUE;
+ }
+
+ static gboolean
+ gst_v4l2_video_enc_start (GstVideoEncoder * encoder)
+ {
+ GstV4l2VideoEnc *self = GST_V4L2_VIDEO_ENC (encoder);
+
+ GST_DEBUG_OBJECT (self, "Starting");
+
+ gst_v4l2_object_unlock (self->v4l2output);
+ g_atomic_int_set (&self->active, TRUE);
+ self->output_flow = GST_FLOW_OK;
+
+ return TRUE;
+ }
+
+ static gboolean
+ gst_v4l2_video_enc_stop (GstVideoEncoder * encoder)
+ {
+ GstV4l2VideoEnc *self = GST_V4L2_VIDEO_ENC (encoder);
+
+ GST_DEBUG_OBJECT (self, "Stopping");
+
+ gst_v4l2_object_unlock (self->v4l2output);
+ gst_v4l2_object_unlock (self->v4l2capture);
+
+ /* Wait for capture thread to stop */
+ gst_pad_stop_task (encoder->srcpad);
+
+ GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
+ self->output_flow = GST_FLOW_OK;
+ GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
+
+ /* Should have been flushed already */
+ g_assert (g_atomic_int_get (&self->active) == FALSE);
+ g_assert (g_atomic_int_get (&self->processing) == FALSE);
+
+ gst_v4l2_object_stop (self->v4l2output);
+ gst_v4l2_object_stop (self->v4l2capture);
+
+ if (self->input_state) {
+ gst_video_codec_state_unref (self->input_state);
+ self->input_state = NULL;
+ }
+
+ GST_DEBUG_OBJECT (self, "Stopped");
+
+ return TRUE;
+ }
+
+ static gboolean
+ gst_v4l2_encoder_cmd (GstV4l2Object * v4l2object, guint cmd, guint flags)
+ {
+ struct v4l2_encoder_cmd ecmd = { 0, };
+
+ GST_DEBUG_OBJECT (v4l2object->element,
+ "sending v4l2 encoder command %u with flags %u", cmd, flags);
+
+ if (!GST_V4L2_IS_OPEN (v4l2object))
+ return FALSE;
+
+ ecmd.cmd = cmd;
+ ecmd.flags = flags;
+ if (v4l2object->ioctl (v4l2object->video_fd, VIDIOC_ENCODER_CMD, &ecmd) < 0)
+ goto ecmd_failed;
+
+ return TRUE;
+
+ ecmd_failed:
+ if (errno == ENOTTY) {
+ GST_INFO_OBJECT (v4l2object->element,
+ "Failed to send encoder command %u with flags %u for '%s'. (%s)",
+ cmd, flags, v4l2object->videodev, g_strerror (errno));
+ } else {
+ GST_ERROR_OBJECT (v4l2object->element,
+ "Failed to send encoder command %u with flags %u for '%s'. (%s)",
+ cmd, flags, v4l2object->videodev, g_strerror (errno));
+ }
+ return FALSE;
+ }
+
+ static GstFlowReturn
+ gst_v4l2_video_enc_finish (GstVideoEncoder * encoder)
+ {
+ GstV4l2VideoEnc *self = GST_V4L2_VIDEO_ENC (encoder);
+ GstFlowReturn ret = GST_FLOW_OK;
+
+ if (gst_pad_get_task_state (encoder->srcpad) != GST_TASK_STARTED)
+ goto done;
+
+ GST_DEBUG_OBJECT (self, "Finishing encoding");
+
+ /* drop the stream lock while draining, so remaining buffers can be
+ * pushed from the src pad task thread */
+ GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
+
+ if (gst_v4l2_encoder_cmd (self->v4l2capture, V4L2_ENC_CMD_STOP, 0)) {
+ GstTask *task = encoder->srcpad->task;
+
+ /* Wait for the task to be drained */
+ GST_DEBUG_OBJECT (self, "Waiting for encoder stop");
+ GST_OBJECT_LOCK (task);
+ while (GST_TASK_STATE (task) == GST_TASK_STARTED)
+ GST_TASK_WAIT (task);
+ GST_OBJECT_UNLOCK (task);
+ ret = GST_FLOW_FLUSHING;
+ }
+
+ /* and ensure the processing thread has stopped in case another error
+ * occurred. */
+ gst_v4l2_object_unlock (self->v4l2capture);
+ gst_pad_stop_task (encoder->srcpad);
+ GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
+
+ if (ret == GST_FLOW_FLUSHING)
+ ret = self->output_flow;
+
+ GST_DEBUG_OBJECT (encoder, "Done draining buffers");
+
+ done:
+ return ret;
+ }
+
+ static gboolean
+ gst_v4l2_video_enc_set_format (GstVideoEncoder * encoder,
+ GstVideoCodecState * state)
+ {
+ gboolean ret = TRUE;
+ GstV4l2VideoEnc *self = GST_V4L2_VIDEO_ENC (encoder);
+ GstV4l2Error error = GST_V4L2_ERROR_INIT;
+ GstCaps *outcaps;
+ GstVideoCodecState *output;
+
+ GST_DEBUG_OBJECT (self, "Setting format: %" GST_PTR_FORMAT, state->caps);
+
+ if (self->input_state) {
+ if (gst_v4l2_object_caps_equal (self->v4l2output, state->caps)) {
+ GST_DEBUG_OBJECT (self, "Compatible caps");
+ return TRUE;
+ }
+
+ if (gst_v4l2_video_enc_finish (encoder) != GST_FLOW_OK)
+ return FALSE;
+
+ gst_v4l2_object_stop (self->v4l2output);
+ gst_v4l2_object_stop (self->v4l2capture);
+
+ gst_video_codec_state_unref (self->input_state);
+ self->input_state = NULL;
+ }
+
+ outcaps = gst_pad_get_pad_template_caps (encoder->srcpad);
+ outcaps = gst_caps_make_writable (outcaps);
+ output = gst_video_encoder_set_output_state (encoder, outcaps, state);
+ gst_video_codec_state_unref (output);
+
+ if (!gst_video_encoder_negotiate (encoder))
+ return FALSE;
+
+ if (!gst_v4l2_object_set_format (self->v4l2output, state->caps, &error)) {
+ gst_v4l2_error (self, &error);
+ return FALSE;
+ }
+
+ self->input_state = gst_video_codec_state_ref (state);
+
+ GST_DEBUG_OBJECT (self, "output caps: %" GST_PTR_FORMAT, state->caps);
+
+ return ret;
+ }
+
+ static gboolean
+ gst_v4l2_video_enc_flush (GstVideoEncoder * encoder)
+ {
+ GstV4l2VideoEnc *self = GST_V4L2_VIDEO_ENC (encoder);
+
+ GST_DEBUG_OBJECT (self, "Flushing");
+
+ /* Ensure the processing thread has stopped for the reverse playback
+ * iscount case */
+ if (g_atomic_int_get (&self->processing)) {
+ GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
+
+ gst_v4l2_object_unlock_stop (self->v4l2output);
+ gst_v4l2_object_unlock_stop (self->v4l2capture);
+ gst_pad_stop_task (encoder->srcpad);
+
+ GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
+
+ }
+
+ self->output_flow = GST_FLOW_OK;
+
+ gst_v4l2_object_unlock_stop (self->v4l2output);
+ gst_v4l2_object_unlock_stop (self->v4l2capture);
+
+ return TRUE;
+ }
+
+ struct ProfileLevelCtx
+ {
+ GstV4l2VideoEnc *self;
+ const gchar *profile;
+ const gchar *level;
+ };
+
+ static gboolean
+ get_string_list (GstStructure * s, const gchar * field, GQueue * queue)
+ {
+ const GValue *value;
+
+ value = gst_structure_get_value (s, field);
+
+ if (!value)
+ return FALSE;
+
+ if (GST_VALUE_HOLDS_LIST (value)) {
+ guint i;
+
+ if (gst_value_list_get_size (value) == 0)
+ return FALSE;
+
+ for (i = 0; i < gst_value_list_get_size (value); i++) {
+ const GValue *item = gst_value_list_get_value (value, i);
+
+ if (G_VALUE_HOLDS_STRING (item))
+ g_queue_push_tail (queue, g_value_dup_string (item));
+ }
+ } else if (G_VALUE_HOLDS_STRING (value)) {
+ g_queue_push_tail (queue, g_value_dup_string (value));
+ }
+
+ return TRUE;
+ }
+
+ static gboolean
+ negotiate_profile_and_level (GstCapsFeatures * features, GstStructure * s,
+ gpointer user_data)
+ {
+ struct ProfileLevelCtx *ctx = user_data;
+ GstV4l2VideoEncClass *klass = GST_V4L2_VIDEO_ENC_GET_CLASS (ctx->self);
+ GstV4l2Object *v4l2object = GST_V4L2_VIDEO_ENC (ctx->self)->v4l2output;
+ GQueue profiles = G_QUEUE_INIT;
+ GQueue levels = G_QUEUE_INIT;
+ gboolean failed = FALSE;
+ const GstV4l2Codec *codec = klass->codec;
+
+ if (codec->profile_cid && get_string_list (s, "profile", &profiles)) {
+ GList *l;
+
+ for (l = profiles.head; l; l = l->next) {
+ struct v4l2_control control = { 0, };
+ gint v4l2_profile;
+ const gchar *profile = l->data;
+
+ GST_TRACE_OBJECT (ctx->self, "Trying profile %s", profile);
+
+ control.id = codec->profile_cid;
+
+ control.value = v4l2_profile = codec->profile_from_string (profile);
+
+ if (control.value < 0)
+ continue;
+
+ if (v4l2object->ioctl (v4l2object->video_fd, VIDIOC_S_CTRL, &control) < 0) {
+ GST_WARNING_OBJECT (ctx->self, "Failed to set %s profile: '%s'",
+ klass->codec_name, g_strerror (errno));
+ break;
+ }
+
+ profile = codec->profile_to_string (control.value);
+
+ if (control.value == v4l2_profile) {
+ ctx->profile = profile;
+ break;
+ }
+
+ if (g_list_find_custom (l, profile, g_str_equal)) {
+ ctx->profile = profile;
+ break;
+ }
+ }
+
+ if (profiles.length && !ctx->profile)
+ failed = TRUE;
+
+ g_queue_foreach (&profiles, (GFunc) g_free, NULL);
+ g_queue_clear (&profiles);
+ }
+
+ if (!failed && codec->level_cid && get_string_list (s, "level", &levels)) {
+ GList *l;
+
+ for (l = levels.head; l; l = l->next) {
+ struct v4l2_control control = { 0, };
+ gint v4l2_level;
+ const gchar *level = l->data;
+
+ GST_TRACE_OBJECT (ctx->self, "Trying level %s", level);
+
+ control.id = codec->level_cid;
+ control.value = v4l2_level = codec->level_from_string (level);
+
+ if (control.value < 0)
+ continue;
+
+ if (v4l2object->ioctl (v4l2object->video_fd, VIDIOC_S_CTRL, &control) < 0) {
+ GST_WARNING_OBJECT (ctx->self, "Failed to set %s level: '%s'",
+ klass->codec_name, g_strerror (errno));
+ break;
+ }
+
+ level = codec->level_to_string (control.value);
+
+ if (control.value == v4l2_level) {
+ ctx->level = level;
+ break;
+ }
+
+ if (g_list_find_custom (l, level, g_str_equal)) {
+ ctx->level = level;
+ break;
+ }
+ }
+
+ if (levels.length && !ctx->level)
+ failed = TRUE;
+
+ g_queue_foreach (&levels, (GFunc) g_free, NULL);
+ g_queue_clear (&levels);
+ }
+
+ /* If it failed, we continue */
+ return failed;
+ }
+
+ static gboolean
+ gst_v4l2_video_enc_negotiate (GstVideoEncoder * encoder)
+ {
+ GstV4l2VideoEncClass *klass = GST_V4L2_VIDEO_ENC_GET_CLASS (encoder);
+ GstV4l2VideoEnc *self = GST_V4L2_VIDEO_ENC (encoder);
+ GstV4l2Object *v4l2object = self->v4l2output;
+ GstCaps *allowed_caps;
+ struct ProfileLevelCtx ctx = { self, NULL, NULL };
+ GstVideoCodecState *state;
+ GstStructure *s;
+ const GstV4l2Codec *codec = klass->codec;
+
+ GST_DEBUG_OBJECT (self, "Negotiating %s profile and level.",
+ klass->codec_name);
+
+ /* Only renegotiate on upstream changes */
+ if (self->input_state)
+ return TRUE;
+
+ if (!codec)
+ goto done;
+
+ allowed_caps = gst_pad_get_allowed_caps (GST_VIDEO_ENCODER_SRC_PAD (encoder));
+
+ if (allowed_caps) {
+
+ if (gst_caps_is_empty (allowed_caps))
+ goto not_negotiated;
+
+ allowed_caps = gst_caps_make_writable (allowed_caps);
+
+ /* negotiate_profile_and_level() will return TRUE on failure to keep
+ * iterating, if gst_caps_foreach() returns TRUE it means there was no
+ * compatible profile and level in any of the structure */
+ if (gst_caps_foreach (allowed_caps, negotiate_profile_and_level, &ctx)) {
+ goto no_profile_level;
+ }
+
+ gst_caps_unref (allowed_caps);
+ allowed_caps = NULL;
+ }
+
+ if (codec->profile_cid && !ctx.profile) {
+ struct v4l2_control control = { 0, };
+
+ control.id = codec->profile_cid;
+
+ if (v4l2object->ioctl (v4l2object->video_fd, VIDIOC_G_CTRL, &control) < 0)
+ goto g_ctrl_failed;
+
+ ctx.profile = codec->profile_to_string (control.value);
+ }
+
+ if (codec->level_cid && !ctx.level) {
+ struct v4l2_control control = { 0, };
+
+ control.id = codec->level_cid;
+
+ if (v4l2object->ioctl (v4l2object->video_fd, VIDIOC_G_CTRL, &control) < 0)
+ goto g_ctrl_failed;
+
+ ctx.level = codec->level_to_string (control.value);
+ }
+
+ GST_DEBUG_OBJECT (self, "Selected %s profile %s at level %s",
+ klass->codec_name, ctx.profile, ctx.level);
+
+ state = gst_video_encoder_get_output_state (encoder);
+ s = gst_caps_get_structure (state->caps, 0);
+
+ if (codec->profile_cid)
+ gst_structure_set (s, "profile", G_TYPE_STRING, ctx.profile, NULL);
+
+ if (codec->level_cid)
+ gst_structure_set (s, "level", G_TYPE_STRING, ctx.level, NULL);
+
+ done:
+ if (!GST_VIDEO_ENCODER_CLASS (parent_class)->negotiate (encoder))
+ return FALSE;
+
+ return TRUE;
+
+ g_ctrl_failed:
+ GST_WARNING_OBJECT (self, "Failed to get %s profile and level: '%s'",
+ klass->codec_name, g_strerror (errno));
+ goto not_negotiated;
+
+ no_profile_level:
+ GST_WARNING_OBJECT (self, "No compatible level and profile in caps: %"
+ GST_PTR_FORMAT, allowed_caps);
+ goto not_negotiated;
+
+ not_negotiated:
+ if (allowed_caps)
+ gst_caps_unref (allowed_caps);
+ return FALSE;
+ }
+
+ static gboolean
+ check_system_frame_number_too_old (guint32 current, guint32 old)
+ {
+ guint32 absdiff = current > old ? current - old : old - current;
+
+ /* More than 100 frames in the past, or current wrapped around */
+ if (absdiff > 100) {
+ /* Wraparound and difference is actually smaller than 100 */
+ if (absdiff > G_MAXUINT32 - 100)
+ return FALSE;
+ return TRUE;
+ }
+
+ return FALSE;
+ }
+
+ static void
+ gst_v4l2_video_enc_loop (GstVideoEncoder * encoder)
+ {
+ GstV4l2VideoEnc *self = GST_V4L2_VIDEO_ENC (encoder);
+ GstVideoCodecFrame *frame;
+ GstBuffer *buffer = NULL;
+ GstFlowReturn ret;
+
+ GST_LOG_OBJECT (encoder, "Allocate output buffer");
+
+ buffer = gst_video_encoder_allocate_output_buffer (encoder,
+ self->v4l2capture->info.size);
+
+ if (NULL == buffer) {
+ ret = GST_FLOW_FLUSHING;
+ goto beach;
+ }
+
+ /* FIXME Check if buffer isn't the last one here */
+
+ GST_LOG_OBJECT (encoder, "Process output buffer");
+ ret =
+ gst_v4l2_buffer_pool_process (GST_V4L2_BUFFER_POOL
+ (self->v4l2capture->pool), &buffer, NULL);
+
+ if (ret != GST_FLOW_OK)
+ goto beach;
+
+ if (GST_BUFFER_TIMESTAMP (buffer) % GST_SECOND != 0)
+ GST_ERROR_OBJECT (encoder,
+ "Driver bug detected - check driver with v4l2-compliance from http://git.linuxtv.org/v4l-utils.git");
+ GST_LOG_OBJECT (encoder, "Got buffer for frame number %u",
+ (guint32) (GST_BUFFER_PTS (buffer) / GST_SECOND));
+ frame =
+ gst_video_encoder_get_frame (encoder,
+ GST_BUFFER_TIMESTAMP (buffer) / GST_SECOND);
+
+ if (frame) {
+ GstVideoCodecFrame *oldest_frame;
+ gboolean warned = FALSE;
+
+ /* Garbage collect old frames in case of codec bugs */
+ while ((oldest_frame = gst_video_encoder_get_oldest_frame (encoder)) &&
+ check_system_frame_number_too_old (frame->system_frame_number,
+ oldest_frame->system_frame_number)) {
+ gst_video_encoder_finish_frame (encoder, oldest_frame);
+ oldest_frame = NULL;
+
+ if (!warned) {
+ g_warning ("%s: Too old frames, bug in encoder -- please file a bug",
+ GST_ELEMENT_NAME (encoder));
+ warned = TRUE;
+ }
+ }
+ if (oldest_frame)
+ gst_video_codec_frame_unref (oldest_frame);
+
+ /* At this point, the delta unit buffer flag is already correctly set by
+ * gst_v4l2_buffer_pool_process. Since gst_video_encoder_finish_frame
+ * will overwrite it from GST_VIDEO_CODEC_FRAME_IS_SYNC_POINT (frame),
+ * set that here.
+ */
+ if (GST_BUFFER_FLAG_IS_SET (buffer, GST_BUFFER_FLAG_DELTA_UNIT))
+ GST_VIDEO_CODEC_FRAME_UNSET_SYNC_POINT (frame);
+ else
+ GST_VIDEO_CODEC_FRAME_SET_SYNC_POINT (frame);
+ frame->output_buffer = buffer;
+ buffer = NULL;
+ ret = gst_video_encoder_finish_frame (encoder, frame);
+
+ if (ret != GST_FLOW_OK)
+ goto beach;
+ } else {
+ GST_WARNING_OBJECT (encoder, "Encoder is producing too many buffers");
+ gst_buffer_unref (buffer);
+ }
+
+ return;
+
+ beach:
+ GST_DEBUG_OBJECT (encoder, "Leaving output thread");
+
+ gst_buffer_replace (&buffer, NULL);
+ self->output_flow = ret;
+ g_atomic_int_set (&self->processing, FALSE);
+ gst_v4l2_object_unlock (self->v4l2output);
+ gst_pad_pause_task (encoder->srcpad);
+ }
+
+ static void
+ gst_v4l2_video_enc_loop_stopped (GstV4l2VideoEnc * self)
+ {
+ if (g_atomic_int_get (&self->processing)) {
+ GST_DEBUG_OBJECT (self, "Early stop of encoding thread");
+ self->output_flow = GST_FLOW_FLUSHING;
+ g_atomic_int_set (&self->processing, FALSE);
+ }
+
+ GST_DEBUG_OBJECT (self, "Encoding task destroyed: %s",
+ gst_flow_get_name (self->output_flow));
+
+ }
+
+ static GstFlowReturn
+ gst_v4l2_video_enc_handle_frame (GstVideoEncoder * encoder,
+ GstVideoCodecFrame * frame)
+ {
+ GstV4l2VideoEnc *self = GST_V4L2_VIDEO_ENC (encoder);
+ GstFlowReturn ret = GST_FLOW_OK;
+ GstTaskState task_state;
+
+ GST_DEBUG_OBJECT (self, "Handling frame %d", frame->system_frame_number);
+
+ if (G_UNLIKELY (!g_atomic_int_get (&self->active)))
+ goto flushing;
+
+ task_state = gst_pad_get_task_state (GST_VIDEO_ENCODER_SRC_PAD (self));
+ if (task_state == GST_TASK_STOPPED || task_state == GST_TASK_PAUSED) {
+ GstBufferPool *pool = GST_BUFFER_POOL (self->v4l2output->pool);
+
+ /* It is possible that the processing thread stopped due to an error or
+ * when the last buffer has been met during the draining process. */
+ if (self->output_flow != GST_FLOW_OK &&
+ self->output_flow != GST_FLOW_FLUSHING &&
+ self->output_flow != GST_V4L2_FLOW_LAST_BUFFER) {
+ GST_DEBUG_OBJECT (self, "Processing loop stopped with error: %s, leaving",
+ gst_flow_get_name (self->output_flow));
+ ret = self->output_flow;
+ goto drop;
+ }
+
+ /* Ensure input internal pool is active */
+ if (!gst_buffer_pool_is_active (pool)) {
+ GstStructure *config = gst_buffer_pool_get_config (pool);
+ guint min = MAX (self->v4l2output->min_buffers,
+ GST_V4L2_MIN_BUFFERS (self->v4l2output));
+
+ gst_buffer_pool_config_set_params (config, self->input_state->caps,
+ self->v4l2output->info.size, min, min);
+
+ /* There is no reason to refuse this config */
+ if (!gst_buffer_pool_set_config (pool, config))
+ goto activate_failed;
+
+ if (!gst_buffer_pool_set_active (pool, TRUE))
+ goto activate_failed;
+ }
+
+ if (!gst_buffer_pool_set_active
+ (GST_BUFFER_POOL (self->v4l2capture->pool), TRUE)) {
+ GST_WARNING_OBJECT (self, "Could not activate capture buffer pool.");
+ goto activate_failed;
+ }
+
+ GST_DEBUG_OBJECT (self, "Starting encoding thread");
+
+ /* Start the processing task, when it quits, the task will disable input
+ * processing to unlock input if draining, or prevent potential block */
+ if (!gst_pad_start_task (encoder->srcpad,
+ (GstTaskFunction) gst_v4l2_video_enc_loop, self,
+ (GDestroyNotify) gst_v4l2_video_enc_loop_stopped))
+ goto start_task_failed;
+ }
+
+ if (frame->input_buffer) {
+ GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
+ GST_LOG_OBJECT (encoder, "Passing buffer with frame number %u",
+ frame->system_frame_number);
+ ret =
+ gst_v4l2_buffer_pool_process (GST_V4L2_BUFFER_POOL (self->
+ v4l2output->pool), &frame->input_buffer,
+ &frame->system_frame_number);
+ GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
+
+ if (ret == GST_FLOW_FLUSHING) {
+ if (gst_pad_get_task_state (encoder->srcpad) != GST_TASK_STARTED)
+ ret = self->output_flow;
+ goto drop;
+ } else if (ret != GST_FLOW_OK) {
+ goto process_failed;
+ }
+ }
+
+ gst_video_codec_frame_unref (frame);
+ return ret;
+
+ /* ERRORS */
+ activate_failed:
+ {
+ GST_ELEMENT_ERROR (self, RESOURCE, SETTINGS,
+ (_("Failed to allocate required memory.")),
+ ("Buffer pool activation failed"));
+ return GST_FLOW_ERROR;
+
+ }
+ flushing:
+ {
+ ret = GST_FLOW_FLUSHING;
+ goto drop;
+ }
+ start_task_failed:
+ {
+ GST_ELEMENT_ERROR (self, RESOURCE, FAILED,
+ (_("Failed to start encoding thread.")), (NULL));
+ g_atomic_int_set (&self->processing, FALSE);
+ ret = GST_FLOW_ERROR;
+ goto drop;
+ }
+ process_failed:
+ {
+ GST_ELEMENT_ERROR (self, RESOURCE, FAILED,
+ (_("Failed to process frame.")),
+ ("Maybe be due to not enough memory or failing driver"));
+ ret = GST_FLOW_ERROR;
+ goto drop;
+ }
+ drop:
+ {
+ gst_video_encoder_finish_frame (encoder, frame);
+ return ret;
+ }
+ }
+
+ static gboolean
+ gst_v4l2_video_enc_decide_allocation (GstVideoEncoder *
+ encoder, GstQuery * query)
+ {
+ GstV4l2VideoEnc *self = GST_V4L2_VIDEO_ENC (encoder);
+ GstVideoCodecState *state = gst_video_encoder_get_output_state (encoder);
+ GstCaps *caps;
+ GstV4l2Error error = GST_V4L2_ERROR_INIT;
+ GstClockTime latency;
+ gboolean ret = FALSE;
+
+ /* We need to set the format here, since this is called right after
+ * GstVideoEncoder have set the width, height and framerate into the state
+ * caps. These are needed by the driver to calculate the buffer size and to
+ * implement bitrate adaptation. */
+ caps = gst_caps_copy (state->caps);
+ gst_structure_remove_field (gst_caps_get_structure (caps, 0), "colorimetry");
+ if (!gst_v4l2_object_set_format (self->v4l2capture, caps, &error)) {
+ gst_v4l2_error (self, &error);
+ gst_caps_unref (caps);
+ ret = FALSE;
+ goto done;
+ }
+ gst_caps_unref (caps);
+
+ if (gst_v4l2_object_decide_allocation (self->v4l2capture, query)) {
+ GstVideoEncoderClass *enc_class = GST_VIDEO_ENCODER_CLASS (parent_class);
+ ret = enc_class->decide_allocation (encoder, query);
+ }
+
+ /* FIXME This may not be entirely correct, as encoder may keep some
+ * observation without delaying the encoding. Linux Media API need some
+ * more work to explicitly expressed the decoder / encoder latency. This
+ * value will then become max latency, and the reported driver latency would
+ * become the min latency. */
+ if (!GST_CLOCK_TIME_IS_VALID (self->v4l2capture->duration))
+ self->v4l2capture->duration = gst_util_uint64_scale_int (GST_SECOND, 1, 25);
+ latency = self->v4l2capture->min_buffers * self->v4l2capture->duration;
+ gst_video_encoder_set_latency (encoder, latency, latency);
+ GST_DEBUG_OBJECT (self, "Setting latency: %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (latency));
+
+ done:
+ gst_video_codec_state_unref (state);
+ return ret;
+ }
+
+ static gboolean
+ gst_v4l2_video_enc_propose_allocation (GstVideoEncoder *
+ encoder, GstQuery * query)
+ {
+ GstV4l2VideoEnc *self = GST_V4L2_VIDEO_ENC (encoder);
+ gboolean ret = FALSE;
+
+ GST_DEBUG_OBJECT (self, "called");
+
+ if (query == NULL)
+ ret = TRUE;
+ else
+ ret = gst_v4l2_object_propose_allocation (self->v4l2output, query);
+
+ if (ret)
+ ret = GST_VIDEO_ENCODER_CLASS (parent_class)->propose_allocation (encoder,
+ query);
+
+ return ret;
+ }
+
+ static gboolean
+ gst_v4l2_video_enc_src_query (GstVideoEncoder * encoder, GstQuery * query)
+ {
+ gboolean ret = TRUE;
+ GstV4l2VideoEnc *self = GST_V4L2_VIDEO_ENC (encoder);
+ switch (GST_QUERY_TYPE (query)) {
+ case GST_QUERY_CAPS:{
+ GstCaps *filter, *result = NULL;
+ GstPad *pad = GST_VIDEO_ENCODER_SRC_PAD (encoder);
+
+ gst_query_parse_caps (query, &filter);
+
+ /* FIXME Try and not probe the entire encoder, but only the implement
+ * subclass format */
+ if (self->probed_srccaps) {
+ GstCaps *tmpl = gst_pad_get_pad_template_caps (pad);
+ result = gst_caps_intersect (tmpl, self->probed_srccaps);
+ gst_caps_unref (tmpl);
+ } else
+ result = gst_pad_get_pad_template_caps (pad);
+
+ if (filter) {
+ GstCaps *tmp = result;
+ result =
+ gst_caps_intersect_full (filter, tmp, GST_CAPS_INTERSECT_FIRST);
+ gst_caps_unref (tmp);
+ }
+
+ GST_DEBUG_OBJECT (self, "Returning src caps %" GST_PTR_FORMAT, result);
+
+ gst_query_set_caps_result (query, result);
+ gst_caps_unref (result);
+ break;
+ }
+
+ default:
+ ret = GST_VIDEO_ENCODER_CLASS (parent_class)->src_query (encoder, query);
+ break;
+ }
+
+ return ret;
+ }
+
+ static gboolean
+ gst_v4l2_video_enc_sink_query (GstVideoEncoder * encoder, GstQuery * query)
+ {
+ gboolean ret = TRUE;
+ GstV4l2VideoEnc *self = GST_V4L2_VIDEO_ENC (encoder);
+
+ switch (GST_QUERY_TYPE (query)) {
+ case GST_QUERY_CAPS:{
+ GstCaps *filter, *result = NULL;
+ GstPad *pad = GST_VIDEO_ENCODER_SINK_PAD (encoder);
+
+ gst_query_parse_caps (query, &filter);
+
+ if (self->probed_sinkcaps)
+ result = gst_caps_ref (self->probed_sinkcaps);
+ else
+ result = gst_pad_get_pad_template_caps (pad);
+
+ if (filter) {
+ GstCaps *tmp = result;
+ result =
+ gst_caps_intersect_full (filter, tmp, GST_CAPS_INTERSECT_FIRST);
+ gst_caps_unref (tmp);
+ }
+
+ GST_DEBUG_OBJECT (self, "Returning sink caps %" GST_PTR_FORMAT, result);
+
+ gst_query_set_caps_result (query, result);
+ gst_caps_unref (result);
+ break;
+ }
+
+ default:
+ ret = GST_VIDEO_ENCODER_CLASS (parent_class)->sink_query (encoder, query);
+ break;
+ }
+
+ return ret;
+ }
+
+ static gboolean
+ gst_v4l2_video_enc_sink_event (GstVideoEncoder * encoder, GstEvent * event)
+ {
+ GstV4l2VideoEnc *self = GST_V4L2_VIDEO_ENC (encoder);
+ gboolean ret;
+ GstEventType type = GST_EVENT_TYPE (event);
+
+ switch (type) {
+ case GST_EVENT_FLUSH_START:
+ GST_DEBUG_OBJECT (self, "flush start");
+ gst_v4l2_object_unlock (self->v4l2output);
+ gst_v4l2_object_unlock (self->v4l2capture);
+ break;
+ default:
+ break;
+ }
+
+ ret = GST_VIDEO_ENCODER_CLASS (parent_class)->sink_event (encoder, event);
+
+ switch (type) {
+ case GST_EVENT_FLUSH_START:
+ gst_pad_stop_task (encoder->srcpad);
+ GST_DEBUG_OBJECT (self, "flush start done");
+ default:
+ break;
+ }
+
+ return ret;
+ }
+
+ static GstStateChangeReturn
+ gst_v4l2_video_enc_change_state (GstElement * element,
+ GstStateChange transition)
+ {
+ GstV4l2VideoEnc *self = GST_V4L2_VIDEO_ENC (element);
+
+ if (transition == GST_STATE_CHANGE_PAUSED_TO_READY) {
+ g_atomic_int_set (&self->active, FALSE);
+ gst_v4l2_object_unlock (self->v4l2output);
+ gst_v4l2_object_unlock (self->v4l2capture);
+ }
+
+ return GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
+ }
+
+
+ static void
+ gst_v4l2_video_enc_dispose (GObject * object)
+ {
+ GstV4l2VideoEnc *self = GST_V4L2_VIDEO_ENC (object);
+
+ gst_caps_replace (&self->probed_sinkcaps, NULL);
+ gst_caps_replace (&self->probed_srccaps, NULL);
+
+ G_OBJECT_CLASS (parent_class)->dispose (object);
+ }
+
+ static void
+ gst_v4l2_video_enc_finalize (GObject * object)
+ {
+ GstV4l2VideoEnc *self = GST_V4L2_VIDEO_ENC (object);
+
+ gst_v4l2_object_destroy (self->v4l2capture);
+ gst_v4l2_object_destroy (self->v4l2output);
+
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+ }
+
+
+ static void
+ gst_v4l2_video_enc_init (GstV4l2VideoEnc * self)
+ {
+ /* V4L2 object are created in subinstance_init */
+ }
+
+ static void
+ gst_v4l2_video_enc_subinstance_init (GTypeInstance * instance, gpointer g_class)
+ {
+ GstV4l2VideoEncClass *klass = GST_V4L2_VIDEO_ENC_CLASS (g_class);
+ GstV4l2VideoEnc *self = GST_V4L2_VIDEO_ENC (instance);
+
+ self->v4l2output = gst_v4l2_object_new (GST_ELEMENT (self),
+ GST_OBJECT (GST_VIDEO_ENCODER_SINK_PAD (self)),
+ V4L2_BUF_TYPE_VIDEO_OUTPUT, klass->default_device,
+ gst_v4l2_get_output, gst_v4l2_set_output, NULL);
+ self->v4l2output->no_initial_format = TRUE;
+ self->v4l2output->keep_aspect = FALSE;
+
+ self->v4l2capture = gst_v4l2_object_new (GST_ELEMENT (self),
+ GST_OBJECT (GST_VIDEO_ENCODER_SRC_PAD (self)),
+ V4L2_BUF_TYPE_VIDEO_CAPTURE, klass->default_device,
+ gst_v4l2_get_input, gst_v4l2_set_input, NULL);
+ }
+
+ static void
+ gst_v4l2_video_enc_class_init (GstV4l2VideoEncClass * klass)
+ {
+ GstElementClass *element_class;
+ GObjectClass *gobject_class;
+ GstVideoEncoderClass *video_encoder_class;
+
+ parent_class = g_type_class_peek_parent (klass);
+
+ element_class = (GstElementClass *) klass;
+ gobject_class = (GObjectClass *) klass;
+ video_encoder_class = (GstVideoEncoderClass *) klass;
+
+ GST_DEBUG_CATEGORY_INIT (gst_v4l2_video_enc_debug, "v4l2videoenc", 0,
+ "V4L2 Video Encoder");
+
+ gobject_class->dispose = GST_DEBUG_FUNCPTR (gst_v4l2_video_enc_dispose);
+ gobject_class->finalize = GST_DEBUG_FUNCPTR (gst_v4l2_video_enc_finalize);
+ gobject_class->set_property =
+ GST_DEBUG_FUNCPTR (gst_v4l2_video_enc_set_property);
+ gobject_class->get_property =
+ GST_DEBUG_FUNCPTR (gst_v4l2_video_enc_get_property);
+
+ video_encoder_class->open = GST_DEBUG_FUNCPTR (gst_v4l2_video_enc_open);
+ video_encoder_class->close = GST_DEBUG_FUNCPTR (gst_v4l2_video_enc_close);
+ video_encoder_class->start = GST_DEBUG_FUNCPTR (gst_v4l2_video_enc_start);
+ video_encoder_class->stop = GST_DEBUG_FUNCPTR (gst_v4l2_video_enc_stop);
+ video_encoder_class->finish = GST_DEBUG_FUNCPTR (gst_v4l2_video_enc_finish);
+ video_encoder_class->flush = GST_DEBUG_FUNCPTR (gst_v4l2_video_enc_flush);
+ video_encoder_class->set_format =
+ GST_DEBUG_FUNCPTR (gst_v4l2_video_enc_set_format);
+ video_encoder_class->negotiate =
+ GST_DEBUG_FUNCPTR (gst_v4l2_video_enc_negotiate);
+ video_encoder_class->decide_allocation =
+ GST_DEBUG_FUNCPTR (gst_v4l2_video_enc_decide_allocation);
+ video_encoder_class->propose_allocation =
+ GST_DEBUG_FUNCPTR (gst_v4l2_video_enc_propose_allocation);
+ video_encoder_class->sink_query =
+ GST_DEBUG_FUNCPTR (gst_v4l2_video_enc_sink_query);
+ video_encoder_class->src_query =
+ GST_DEBUG_FUNCPTR (gst_v4l2_video_enc_src_query);
+ video_encoder_class->sink_event =
+ GST_DEBUG_FUNCPTR (gst_v4l2_video_enc_sink_event);
+ video_encoder_class->handle_frame =
+ GST_DEBUG_FUNCPTR (gst_v4l2_video_enc_handle_frame);
+
+ element_class->change_state =
+ GST_DEBUG_FUNCPTR (gst_v4l2_video_enc_change_state);
+
+ gst_v4l2_object_install_m2m_properties_helper (gobject_class);
+ }
+
+ static void
+ gst_v4l2_video_enc_subclass_init (gpointer g_class, gpointer data)
+ {
+ GstV4l2VideoEncClass *klass = GST_V4L2_VIDEO_ENC_CLASS (g_class);
+ GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
+ GstV4l2VideoEncCData *cdata = data;
+
+ klass->default_device = cdata->device;
+ klass->codec = cdata->codec;
+
+ gst_element_class_add_pad_template (element_class,
+ gst_pad_template_new ("sink", GST_PAD_SINK, GST_PAD_ALWAYS,
+ cdata->sink_caps));
+ gst_element_class_add_pad_template (element_class,
+ gst_pad_template_new ("src", GST_PAD_SRC, GST_PAD_ALWAYS,
+ cdata->src_caps));
+
+ gst_caps_unref (cdata->sink_caps);
+ gst_caps_unref (cdata->src_caps);
+ g_free (cdata);
+ }
+
+ /* Probing functions */
+ gboolean
+ gst_v4l2_is_video_enc (GstCaps * sink_caps, GstCaps * src_caps,
+ GstCaps * codec_caps)
+ {
+ gboolean ret = FALSE;
+ gboolean (*check_caps) (const GstCaps *, const GstCaps *);
+
+ if (codec_caps) {
+ check_caps = gst_caps_can_intersect;
+ } else {
+ codec_caps = gst_v4l2_object_get_codec_caps ();
+ check_caps = gst_caps_is_subset;
+ }
+
+ if (gst_caps_is_subset (sink_caps, gst_v4l2_object_get_raw_caps ())
+ && check_caps (src_caps, codec_caps))
+ ret = TRUE;
+
+ return ret;
+ }
+
+ void
+ gst_v4l2_video_enc_register (GstPlugin * plugin, GType type,
+ const char *codec_name, const gchar * basename, const gchar * device_path,
+ const GstV4l2Codec * codec, gint video_fd, GstCaps * sink_caps,
+ GstCaps * codec_caps, GstCaps * src_caps)
+ {
+ GstCaps *filtered_caps;
+ GTypeQuery type_query;
+ GTypeInfo type_info = { 0, };
+ GType subtype;
+ gchar *type_name;
+ GstV4l2VideoEncCData *cdata;
+ GValue value = G_VALUE_INIT;
+
+ filtered_caps = gst_caps_intersect (src_caps, codec_caps);
+
+ if (codec != NULL && video_fd != -1) {
+ if (gst_v4l2_codec_probe_levels (codec, video_fd, &value)) {
+ gst_caps_set_value (filtered_caps, "level", &value);
+ g_value_unset (&value);
+ }
+
+ if (gst_v4l2_codec_probe_profiles (codec, video_fd, &value)) {
+ gst_caps_set_value (filtered_caps, "profile", &value);
+ g_value_unset (&value);
+ }
+ }
+
+ cdata = g_new0 (GstV4l2VideoEncCData, 1);
+ cdata->device = g_strdup (device_path);
+ cdata->sink_caps = gst_caps_ref (sink_caps);
+ cdata->src_caps = gst_caps_ref (filtered_caps);
+ cdata->codec = codec;
+
+ g_type_query (type, &type_query);
+ memset (&type_info, 0, sizeof (type_info));
+ type_info.class_size = type_query.class_size;
+ type_info.instance_size = type_query.instance_size;
+ type_info.class_init = gst_v4l2_video_enc_subclass_init;
+ type_info.class_data = cdata;
+ type_info.instance_init = gst_v4l2_video_enc_subinstance_init;
+
+ /* The first encoder to be registered should use a constant name, like
+ * v4l2h264enc, for any additional encoders, we create unique names. Encoder
+ * names may change between boots, so this should help gain stable names for
+ * the most common use cases. */
+ type_name = g_strdup_printf ("v4l2%senc", codec_name);
+
+ if (g_type_from_name (type_name) != 0) {
+ g_free (type_name);
+ type_name = g_strdup_printf ("v4l2%s%senc", basename, codec_name);
+ }
+
+ subtype = g_type_register_static (type, type_name, &type_info, 0);
+
++#ifdef TIZEN_FEATURE_V4L2VIDEO_ADJ_RANK
++ if (!gst_element_register (plugin, type_name, GST_RANK_SECONDARY, subtype))
++ GST_WARNING ("Failed to register plugin '%s'", type_name);
++#else
+ if (!gst_element_register (plugin, type_name, GST_RANK_PRIMARY + 1, subtype))
+ GST_WARNING ("Failed to register plugin '%s'", type_name);
++#endif
+
+ g_free (type_name);
+ }
--- /dev/null
- dependencies : [gstbase_dep, gstvideo_dep, gstallocators_dep, gudev_dep, libv4l2_dep],
+ v4l2_sources = [
+ 'gstv4l2.c',
+ 'gstv4l2element.c',
+ 'gstv4l2allocator.c',
+ 'gstv4l2codec.c',
+ 'gstv4l2colorbalance.c',
+ 'gstv4l2deviceprovider.c',
+ 'gstv4l2object.c',
+ 'gstv4l2bufferpool.c',
+ 'gstv4l2sink.c',
+ 'gstv4l2src.c',
+ 'gstv4l2radio.c',
+ 'gstv4l2tuner.c',
+ 'gstv4l2transform.c',
+ 'gstv4l2videodec.c',
+ 'gstv4l2videoenc.c',
+ 'gstv4l2fwhtenc.c',
+ 'gstv4l2h263enc.c',
+ 'gstv4l2h264codec.c',
+ 'gstv4l2h264enc.c',
+ 'gstv4l2h265codec.c',
+ 'gstv4l2h265enc.c',
+ 'gstv4l2jpegenc.c',
+ 'gstv4l2mpeg2codec.c',
+ 'gstv4l2mpeg4codec.c',
+ 'gstv4l2mpeg4enc.c',
+ 'gstv4l2vidorient.c',
+ 'gstv4l2vp8codec.c',
+ 'gstv4l2vp8enc.c',
+ 'gstv4l2vp9codec.c',
+ 'gstv4l2vp9enc.c',
+ 'v4l2_calls.c',
+ 'v4l2-utils.c',
+ 'tuner.c',
+ 'tunerchannel.c',
+ 'tunernorm.c'
+ ]
+
+ v4l2 = get_option('v4l2')
+ if v4l2.disabled()
+ have_v4l2 = false
+ message('V4L2 plugin is disabled')
+ else
+ have_v4l2 = cc.has_header('linux/videodev2.h') or cc.has_header('sys/videodev2.h') or cc.has_header('sys/videoio.h')
+ if v4l2.enabled() and not have_v4l2
+ error('V4L2 is requested but headers were not found')
+ endif
+ endif
+
+ cdata.set('GST_V4L2_ENABLE_PROBE', get_option('v4l2-probe'))
+
+ if have_v4l2
+ message('building v4l2 plugin')
+ cdata.set('HAVE_GST_V4L2', true)
+ gudev_dep = dependency('gudev-1.0', version : '>=147', required : get_option('v4l2-gudev'))
+ cdata.set('HAVE_GUDEV', gudev_dep.found())
+
+ # libv4l2 is only needed for converting some obscure formats
+ # FIXME: Add a full list of the formats here
+ libv4l2_dep = dependency('libv4l2', required : get_option('v4l2-libv4l2'))
+ cdata.set('HAVE_LIBV4L2', libv4l2_dep.found())
+
+ gstv4l2 = library('gstvideo4linux2',
+ v4l2_sources,
+ c_args : gst_plugins_good_args,
+ include_directories : [configinc, libsinc],
++ dependencies : [gstbase_dep, gstvideo_dep, gstallocators_dep, gudev_dep, libv4l2_dep, tbm_dep],
+ install : true,
+ install_dir : plugins_install_dir,
+ )
+ pkgconfig.generate(gstv4l2, install_dir : plugins_pkgconfig_install_dir)
+ plugins += [gstv4l2]
+ endif
--- /dev/null
+ /* GStreamer
+ *
+ * Copyright (C) 2002 Ronald Bultje <rbultje@ronald.bitfreak.net>
+ * 2006 Edgard Lima <edgard.lima@gmail.com>
+ *
+ * v4l2_calls.c - generic V4L2 calls handling
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+ #ifdef HAVE_CONFIG_H
+ #include <config.h>
+ #endif
+
+ #include <sys/types.h>
+ #include <sys/stat.h>
+ #include <fcntl.h>
+ #include <sys/ioctl.h>
+ #include <sys/mman.h>
+ #include <string.h>
+ #include <errno.h>
+ #include <unistd.h>
++#ifdef TIZEN_FEATURE_V4L2SRC_AUTO_SCAN_DEVICE_NODE
++#include <glob.h>
++#endif /* TIZEN_FEATURE_V4L2SRC_AUTO_SCAN_DEVICE_NODE */
+ #ifdef __sun
+ /* Needed on older Solaris Nevada builds (72 at least) */
+ #include <stropts.h>
+ #include <sys/ioccom.h>
+ #endif
+ #include "gstv4l2object.h"
+ #include "gstv4l2tuner.h"
+ #include "gstv4l2colorbalance.h"
+
+ #include "gstv4l2src.h"
+ #include "gstv4l2sink.h"
+ #include "gstv4l2videodec.h"
+
+ #include "gst/gst-i18n-plugin.h"
+
++#ifdef TIZEN_FEATURE_V4L2SRC_AUTO_SCAN_DEVICE_NODE
++enum {
++ V4L2_OPEN_ERROR = 0,
++ V4L2_OPEN_ERROR_STAT_FAILED,
++ V4L2_OPEN_ERROR_NO_DEVICE,
++ V4L2_OPEN_ERROR_NOT_OPEN,
++ V4L2_OPEN_ERROR_NOT_CAPTURE,
++ V4L2_OPEN_ERROR_NOT_OUTPUT
++};
++#endif /* TIZEN_FEATURE_V4L2SRC_AUTO_SCAN_DEVICE_NODE */
++
+ GST_DEBUG_CATEGORY_EXTERN (v4l2_debug);
+ #define GST_CAT_DEFAULT v4l2_debug
+
+ /******************************************************
+ * gst_v4l2_get_capabilities():
+ * get the device's capturing capabilities
+ * return value: TRUE on success, FALSE on error
+ ******************************************************/
+ static gboolean
+ gst_v4l2_get_capabilities (GstV4l2Object * v4l2object)
+ {
+ GstElement *e;
+
+ e = v4l2object->element;
+
+ GST_DEBUG_OBJECT (e, "getting capabilities");
+
+ if (!GST_V4L2_IS_OPEN (v4l2object))
+ return FALSE;
+
+ if (v4l2object->ioctl (v4l2object->video_fd, VIDIOC_QUERYCAP,
+ &v4l2object->vcap) < 0)
+ goto cap_failed;
+
+ if (v4l2object->vcap.capabilities & V4L2_CAP_DEVICE_CAPS)
+ v4l2object->device_caps = v4l2object->vcap.device_caps;
+ else
+ v4l2object->device_caps = v4l2object->vcap.capabilities;
+
+ GST_LOG_OBJECT (e, "driver: '%s'", v4l2object->vcap.driver);
+ GST_LOG_OBJECT (e, "card: '%s'", v4l2object->vcap.card);
+ GST_LOG_OBJECT (e, "bus_info: '%s'", v4l2object->vcap.bus_info);
+ GST_LOG_OBJECT (e, "version: %08x", v4l2object->vcap.version);
+ GST_LOG_OBJECT (e, "capabilities: %08x", v4l2object->device_caps);
+
+ return TRUE;
+
+ /* ERRORS */
+ cap_failed:
+ {
+ GST_ELEMENT_ERROR (v4l2object->element, RESOURCE, SETTINGS,
+ (_("Error getting capabilities for device '%s': "
+ "It isn't a v4l2 driver. Check if it is a v4l1 driver."),
+ v4l2object->videodev), GST_ERROR_SYSTEM);
+ return FALSE;
+ }
+ }
+
+ /******************************************************
+ * The video4linux command line tool v4l2-ctrl
+ * normalises the names of the controls received from
+ * the kernel like:
+ *
+ * "Exposure (absolute)" -> "exposure_absolute"
+ *
+ * We follow their lead here. @name is modified
+ * in-place.
+ ******************************************************/
+ static void
+ gst_v4l2_normalise_control_name (gchar * name)
+ {
+ int i, j;
+ for (i = 0, j = 0; name[j]; ++j) {
+ if (g_ascii_isalnum (name[j])) {
+ if (i > 0 && !g_ascii_isalnum (name[j - 1]))
+ name[i++] = '_';
+ name[i++] = g_ascii_tolower (name[j]);
+ }
+ }
+ name[i++] = '\0';
+ }
+
+ /******************************************************
+ * gst_v4l2_empty_lists() and gst_v4l2_fill_lists():
+ * fill/empty the lists of enumerations
+ * return value: TRUE on success, FALSE on error
+ ******************************************************/
+ static gboolean
+ gst_v4l2_fill_lists (GstV4l2Object * v4l2object)
+ {
+ gint n, next;
+ struct v4l2_queryctrl control = { 0, };
+
+ GstElement *e;
+
+ e = v4l2object->element;
+
+ GST_DEBUG_OBJECT (e, "getting enumerations");
+ GST_V4L2_CHECK_OPEN (v4l2object);
+
+ GST_DEBUG_OBJECT (e, " channels");
+ /* and now, the channels */
+ for (n = 0;; n++) {
+ struct v4l2_input input;
+ GstV4l2TunerChannel *v4l2channel;
+ GstTunerChannel *channel;
+
+ memset (&input, 0, sizeof (input));
+
+ input.index = n;
+ if (v4l2object->ioctl (v4l2object->video_fd, VIDIOC_ENUMINPUT, &input) < 0) {
+ if (errno == EINVAL || errno == ENOTTY)
+ break; /* end of enumeration */
+ else {
+ GST_ELEMENT_ERROR (e, RESOURCE, SETTINGS,
+ (_("Failed to query attributes of input %d in device %s"),
+ n, v4l2object->videodev),
+ ("Failed to get %d in input enumeration for %s. (%d - %s)",
+ n, v4l2object->videodev, errno, strerror (errno)));
+ return FALSE;
+ }
+ }
+
+ GST_LOG_OBJECT (e, " index: %d", input.index);
+ GST_LOG_OBJECT (e, " name: '%s'", input.name);
+ GST_LOG_OBJECT (e, " type: %08x", input.type);
+ GST_LOG_OBJECT (e, " audioset: %08x", input.audioset);
+ GST_LOG_OBJECT (e, " std: %016" G_GINT64_MODIFIER "x",
+ (guint64) input.std);
+ GST_LOG_OBJECT (e, " status: %08x", input.status);
+
+ v4l2channel = g_object_new (GST_TYPE_V4L2_TUNER_CHANNEL, NULL);
+ channel = GST_TUNER_CHANNEL (v4l2channel);
+ channel->label = g_strdup ((const gchar *) input.name);
+ channel->flags = GST_TUNER_CHANNEL_INPUT;
+ v4l2channel->index = n;
+
+ if (input.type == V4L2_INPUT_TYPE_TUNER) {
+ struct v4l2_tuner vtun;
+
+ v4l2channel->tuner = input.tuner;
+ channel->flags |= GST_TUNER_CHANNEL_FREQUENCY;
+
+ vtun.index = input.tuner;
+ if (v4l2object->ioctl (v4l2object->video_fd, VIDIOC_G_TUNER, &vtun) < 0) {
+ GST_ELEMENT_ERROR (e, RESOURCE, SETTINGS,
+ (_("Failed to get setting of tuner %d on device '%s'."),
+ input.tuner, v4l2object->videodev), GST_ERROR_SYSTEM);
+ g_object_unref (G_OBJECT (channel));
+ return FALSE;
+ }
+
+ channel->freq_multiplicator =
+ 62.5 * ((vtun.capability & V4L2_TUNER_CAP_LOW) ? 1 : 1000);
+ channel->min_frequency = vtun.rangelow * channel->freq_multiplicator;
+ channel->max_frequency = vtun.rangehigh * channel->freq_multiplicator;
+ channel->min_signal = 0;
+ channel->max_signal = 0xffff;
+ }
+ if (input.audioset) {
+ /* we take the first. We don't care for
+ * the others for now */
+ while (!(input.audioset & (1 << v4l2channel->audio)))
+ v4l2channel->audio++;
+ channel->flags |= GST_TUNER_CHANNEL_AUDIO;
+ }
+
+ v4l2object->channels =
+ g_list_prepend (v4l2object->channels, (gpointer) channel);
+ }
+ v4l2object->channels = g_list_reverse (v4l2object->channels);
+
+ GST_DEBUG_OBJECT (e, " norms");
+ /* norms... */
+ for (n = 0;; n++) {
+ struct v4l2_standard standard = { 0, };
+ GstV4l2TunerNorm *v4l2norm;
+
+ GstTunerNorm *norm;
+
+ /* fill in defaults */
+ standard.frameperiod.numerator = 1;
+ standard.frameperiod.denominator = 0;
+ standard.index = n;
+
+ if (v4l2object->ioctl (v4l2object->video_fd, VIDIOC_ENUMSTD, &standard) < 0) {
+ if (errno == EINVAL || errno == ENOTTY)
+ break; /* end of enumeration */
+ #ifdef ENODATA
+ else if (errno == ENODATA)
+ break; /* end of enumeration, as of Linux 3.7-rc1 */
+ #endif
+ else {
+ GST_ELEMENT_ERROR (e, RESOURCE, SETTINGS,
+ (_("Failed to query norm on device '%s'."),
+ v4l2object->videodev),
+ ("Failed to get attributes for norm %d on divide '%s'. (%d - %s)",
+ n, v4l2object->videodev, errno, strerror (errno)));
+ return FALSE;
+ }
+ }
+
+ GST_DEBUG_OBJECT (e, " '%s', fps: %d / %d",
+ standard.name, standard.frameperiod.denominator,
+ standard.frameperiod.numerator);
+
+ v4l2norm = g_object_new (GST_TYPE_V4L2_TUNER_NORM, NULL);
+ norm = GST_TUNER_NORM (v4l2norm);
+ norm->label = g_strdup ((const gchar *) standard.name);
+ gst_value_set_fraction (&norm->framerate,
+ standard.frameperiod.denominator, standard.frameperiod.numerator);
+ v4l2norm->index = standard.id;
+
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj, "index=%08x, label=%s",
+ (unsigned int) v4l2norm->index, norm->label);
+
+ v4l2object->norms = g_list_prepend (v4l2object->norms, (gpointer) norm);
+ }
+ v4l2object->norms = g_list_reverse (v4l2object->norms);
+
+ GST_DEBUG_OBJECT (e, " controls+menus");
+
+ /* and lastly, controls+menus (if appropriate) */
+ next = V4L2_CTRL_FLAG_NEXT_CTRL;
+ n = 0;
+ control.id = next;
+
+ while (TRUE) {
+ GstV4l2ColorBalanceChannel *v4l2channel;
+ GstColorBalanceChannel *channel;
+
+ if (!next)
+ n++;
+
+ retry:
+ /* when we reached the last official CID, continue with private CIDs */
+ if (n == V4L2_CID_LASTP1) {
+ GST_DEBUG_OBJECT (e, "checking private CIDs");
+ n = V4L2_CID_PRIVATE_BASE;
+ }
+ GST_DEBUG_OBJECT (e, "checking control %08x", n);
+
+ control.id = n | next;
+ if (v4l2object->ioctl (v4l2object->video_fd, VIDIOC_QUERYCTRL,
+ &control) < 0) {
+ if (next) {
+ if (n > 0) {
+ GST_DEBUG_OBJECT (e, "controls finished");
+ break;
+ } else {
+ GST_DEBUG_OBJECT (e, "V4L2_CTRL_FLAG_NEXT_CTRL not supported.");
+ next = 0;
+ n = V4L2_CID_BASE;
+ goto retry;
+ }
+ }
+ if (errno == EINVAL || errno == ENOTTY || errno == EIO || errno == ENOENT) {
+ if (n < V4L2_CID_PRIVATE_BASE) {
+ GST_DEBUG_OBJECT (e, "skipping control %08x", n);
+ /* continue so that we also check private controls */
+ n = V4L2_CID_PRIVATE_BASE - 1;
+ continue;
+ } else {
+ GST_DEBUG_OBJECT (e, "controls finished");
+ break;
+ }
+ } else {
+ GST_WARNING_OBJECT (e, "Failed querying control %d on device '%s'. "
+ "(%d - %s)", n, v4l2object->videodev, errno, strerror (errno));
+ continue;
+ }
+ }
+ /* bogus driver might mess with id in unexpected ways (e.g. set to 0), so
+ * make sure to simply try all if V4L2_CTRL_FLAG_NEXT_CTRL not supported */
+ if (next)
+ n = control.id;
+ if (control.flags & V4L2_CTRL_FLAG_DISABLED) {
+ GST_DEBUG_OBJECT (e, "skipping disabled control");
+ continue;
+ }
+
+ if (control.type == V4L2_CTRL_TYPE_CTRL_CLASS) {
+ GST_DEBUG_OBJECT (e, "starting control class '%s'", control.name);
+ continue;
+ }
+
+ switch (control.type) {
+ case V4L2_CTRL_TYPE_INTEGER:
+ case V4L2_CTRL_TYPE_BOOLEAN:
+ case V4L2_CTRL_TYPE_MENU:
+ case V4L2_CTRL_TYPE_INTEGER_MENU:
+ case V4L2_CTRL_TYPE_BITMASK:
+ case V4L2_CTRL_TYPE_BUTTON:
+ case V4L2_CTRL_TYPE_STRING:{
+ control.name[31] = '\0';
+ gst_v4l2_normalise_control_name ((gchar *) control.name);
+ g_datalist_id_set_data (&v4l2object->controls,
+ g_quark_from_string ((const gchar *) control.name),
+ GINT_TO_POINTER (n));
+ break;
+ }
+ default:
+ GST_DEBUG_OBJECT (e,
+ "Control type for '%s' not supported for extra controls.",
+ control.name);
+ break;
+ }
+
+ switch (n) {
+ case V4L2_CID_BRIGHTNESS:
+ case V4L2_CID_CONTRAST:
+ case V4L2_CID_SATURATION:
+ case V4L2_CID_HUE:
+ case V4L2_CID_BLACK_LEVEL:
+ case V4L2_CID_AUTO_WHITE_BALANCE:
+ case V4L2_CID_DO_WHITE_BALANCE:
+ case V4L2_CID_RED_BALANCE:
+ case V4L2_CID_BLUE_BALANCE:
+ case V4L2_CID_GAMMA:
+ case V4L2_CID_EXPOSURE:
+ case V4L2_CID_AUTOGAIN:
+ case V4L2_CID_GAIN:
+ case V4L2_CID_SHARPNESS:
++#ifdef TIZEN_FEATURE_V4L2_ADDITIONAL_CID_SUPPORT
++ case V4L2_CID_WHITE_BALANCE_TEMPERATURE:
++ case V4L2_CID_EXPOSURE_AUTO:
++ case V4L2_CID_EXPOSURE_ABSOLUTE:
++ case V4L2_CID_EXPOSURE_AUTO_PRIORITY:
++#endif /* TIZEN_FEATURE_V4L2_ADDITIONAL_CID_SUPPORT */
+ /* we only handle these for now (why?) */
+ break;
+ case V4L2_CID_HFLIP:
+ case V4L2_CID_VFLIP:
+ case V4L2_CID_PAN_RESET:
+ case V4L2_CID_TILT_RESET:
+ /* not handled here, handled by VideoOrientation interface */
+ control.id++;
+ break;
+ case V4L2_CID_AUDIO_VOLUME:
+ case V4L2_CID_AUDIO_BALANCE:
+ case V4L2_CID_AUDIO_BASS:
+ case V4L2_CID_AUDIO_TREBLE:
+ case V4L2_CID_AUDIO_MUTE:
+ case V4L2_CID_AUDIO_LOUDNESS:
+ /* FIXME: We should implement GstMixer interface instead */
+ /* but let's not be pedantic and make element more useful for now */
+ break;
+ case V4L2_CID_ALPHA_COMPONENT:
+ v4l2object->has_alpha_component = TRUE;
+ break;
+ default:
+ GST_DEBUG_OBJECT (e,
+ "ControlID %s (%x) unhandled, FIXME", control.name, n);
+ control.id++;
+ break;
+ }
+ if (n != control.id)
+ continue;
+
+ GST_DEBUG_OBJECT (e, "Adding ControlID %s (%x)", control.name, n);
+ v4l2channel = g_object_new (GST_TYPE_V4L2_COLOR_BALANCE_CHANNEL, NULL);
+ channel = GST_COLOR_BALANCE_CHANNEL (v4l2channel);
+ channel->label = g_strdup ((const gchar *) control.name);
+ v4l2channel->id = n;
+
+ #if 0
+ /* FIXME: it will be need just when handling private controls
+ *(currently none of base controls are of this type) */
+ if (control.type == V4L2_CTRL_TYPE_MENU) {
+ struct v4l2_querymenu menu, *mptr;
+
+ int i;
+
+ menu.id = n;
+ for (i = 0;; i++) {
+ menu.index = i;
+ if (v4l2object->ioctl (v4l2object->video_fd, VIDIOC_QUERYMENU,
+ &menu) < 0) {
+ if (errno == EINVAL)
+ break; /* end of enumeration */
+ else {
+ GST_ELEMENT_ERROR (e, RESOURCE, SETTINGS,
+ (_("Failed getting controls attributes on device '%s'."),
+ v4l2object->videodev),
+ ("Failed to get %d in menu enumeration for %s. (%d - %s)",
+ n, v4l2object->videodev, errno, strerror (errno)));
+ return FALSE;
+ }
+ }
+ mptr = g_malloc (sizeof (menu));
+ memcpy (mptr, &menu, sizeof (menu));
+ menus = g_list_append (menus, mptr);
+ }
+ }
+ v4l2object->menus = g_list_append (v4l2object->menus, menus);
+ #endif
+
+ switch (control.type) {
+ case V4L2_CTRL_TYPE_INTEGER:
+ channel->min_value = control.minimum;
+ channel->max_value = control.maximum;
+ break;
+ case V4L2_CTRL_TYPE_BOOLEAN:
+ channel->min_value = FALSE;
+ channel->max_value = TRUE;
+ break;
+ default:
+ /* FIXME we should find out how to handle V4L2_CTRL_TYPE_BUTTON.
+ BUTTON controls like V4L2_CID_DO_WHITE_BALANCE can just be set (1) or
+ unset (0), but can't be queried */
+ GST_DEBUG_OBJECT (e,
+ "Control with non supported type %s (%x), type=%d",
+ control.name, n, control.type);
+ channel->min_value = channel->max_value = 0;
+ break;
+ }
+
+ v4l2object->colors =
+ g_list_prepend (v4l2object->colors, (gpointer) channel);
+ }
+ v4l2object->colors = g_list_reverse (v4l2object->colors);
+
+ GST_DEBUG_OBJECT (e, "done");
+ return TRUE;
+ }
+
+
+ static void
+ gst_v4l2_empty_lists (GstV4l2Object * v4l2object)
+ {
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj, "deleting enumerations");
+
+ g_list_foreach (v4l2object->channels, (GFunc) g_object_unref, NULL);
+ g_list_free (v4l2object->channels);
+ v4l2object->channels = NULL;
+
+ g_list_foreach (v4l2object->norms, (GFunc) g_object_unref, NULL);
+ g_list_free (v4l2object->norms);
+ v4l2object->norms = NULL;
+
+ g_list_foreach (v4l2object->colors, (GFunc) g_object_unref, NULL);
+ g_list_free (v4l2object->colors);
+ v4l2object->colors = NULL;
+
+ g_datalist_clear (&v4l2object->controls);
+ }
+
+ static void
+ gst_v4l2_adjust_buf_type (GstV4l2Object * v4l2object)
+ {
+ /* when calling gst_v4l2_object_new the user decides the initial type
+ * so adjust it if multi-planar is supported
+ * the driver should make it exclusive. So the driver should
+ * not support both MPLANE and non-PLANE.
+ * Because even when using MPLANE it still possibles to use it
+ * in a contiguous manner. In this case the first v4l2 plane
+ * contains all the gst planes.
+ */
+ switch (v4l2object->type) {
+ case V4L2_BUF_TYPE_VIDEO_OUTPUT:
+ if (v4l2object->device_caps &
+ (V4L2_CAP_VIDEO_OUTPUT_MPLANE | V4L2_CAP_VIDEO_M2M_MPLANE)) {
+ GST_DEBUG ("adjust type to multi-planar output");
+ v4l2object->type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
+ }
+ break;
+ case V4L2_BUF_TYPE_VIDEO_CAPTURE:
+ if (v4l2object->device_caps &
+ (V4L2_CAP_VIDEO_CAPTURE_MPLANE | V4L2_CAP_VIDEO_M2M_MPLANE)) {
+ GST_DEBUG ("adjust type to multi-planar capture");
+ v4l2object->type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
+ }
+ break;
+ default:
+ break;
+ }
+ }
+
+ /******************************************************
+ * gst_v4l2_open():
+ * open the video device (v4l2object->videodev)
+ * return value: TRUE on success, FALSE on error
+ ******************************************************/
+ gboolean
+ gst_v4l2_open (GstV4l2Object * v4l2object, GstV4l2Error * error)
+ {
+ struct stat st;
+ int libv4l2_fd = -1;
++#ifdef TIZEN_FEATURE_V4L2SRC_AUTO_SCAN_DEVICE_NODE
++ int error_type = V4L2_OPEN_ERROR_STAT_FAILED;
++ int device_index = 0;
++ glob_t glob_buf;
++
++ memset (&glob_buf, 0x0, sizeof(glob_t));
+
++ if (!v4l2object) {
++ GST_ERROR ("v4l2object is NULL");
++ return FALSE;
++ }
++#endif /* TIZEN_FEATURE_V4L2SRC_AUTO_SCAN_DEVICE_NODE */
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj, "Trying to open device %s",
+ v4l2object->videodev);
+
+ GST_V4L2_CHECK_NOT_OPEN (v4l2object);
+ GST_V4L2_CHECK_NOT_ACTIVE (v4l2object);
+
+ /* be sure we have a device */
+ if (!v4l2object->videodev)
+ v4l2object->videodev = g_strdup ("/dev/video");
+
++#ifdef TIZEN_FEATURE_V4L2SRC_AUTO_SCAN_DEVICE_NODE
++ if (!v4l2object->videodev) {
++ GST_ERROR_OBJECT (v4l2object->element, "videodev is NULL");
++ return FALSE;
++ }
++
++CHECK_AGAIN:
++ /* check if it is a device */
++ if (stat (v4l2object->videodev, &st) == -1) {
++ error_type = V4L2_OPEN_ERROR_STAT_FAILED;
++ goto pre_error_check;
++ }
++
++ if (!S_ISCHR (st.st_mode)) {
++ error_type = V4L2_OPEN_ERROR_NO_DEVICE;
++ goto pre_error_check;
++ }
++#else /* TIZEN_FEATURE_V4L2SRC_AUTO_SCAN_DEVICE_NODE */
+ /* check if it is a device */
+ if (stat (v4l2object->videodev, &st) == -1)
+ goto stat_failed;
+
+ if (!S_ISCHR (st.st_mode))
+ goto no_device;
++#endif /* TIZEN_FEATURE_V4L2SRC_AUTO_SCAN_DEVICE_NODE */
+
+ /* open the device */
+ v4l2object->video_fd =
+ open (v4l2object->videodev, O_RDWR /* | O_NONBLOCK */ );
+
++#ifdef TIZEN_FEATURE_V4L2SRC_AUTO_SCAN_DEVICE_NODE
++ if (!GST_V4L2_IS_OPEN (v4l2object)) {
++ error_type = V4L2_OPEN_ERROR_NOT_OPEN;
++ goto pre_error_check;
++ }
++#else /* TIZEN_FEATURE_V4L2SRC_AUTO_SCAN_DEVICE_NODE */
+ if (!GST_V4L2_IS_OPEN (v4l2object))
+ goto not_open;
++#endif /* TIZEN_FEATURE_V4L2SRC_AUTO_SCAN_DEVICE_NODE */
+
+ #ifdef HAVE_LIBV4L2
+ if (v4l2object->fd_open)
+ libv4l2_fd = v4l2object->fd_open (v4l2object->video_fd,
+ V4L2_ENABLE_ENUM_FMT_EMULATION);
+ #endif
+
+ /* Note the v4l2_xxx functions are designed so that if they get passed an
+ unknown fd, the will behave exactly as their regular xxx counterparts, so
+ if v4l2_fd_open fails, we continue as normal (missing the libv4l2 custom
+ cam format to normal formats conversion). Chances are big we will still
+ fail then though, as normally v4l2_fd_open only fails if the device is not
+ a v4l2 device. */
+ if (libv4l2_fd != -1)
+ v4l2object->video_fd = libv4l2_fd;
+
+ /* get capabilities, error will be posted */
++#ifdef TIZEN_FEATURE_V4L2SRC_AUTO_SCAN_DEVICE_NODE
++ if (!gst_v4l2_get_capabilities (v4l2object)) {
++ error_type = V4L2_OPEN_ERROR;
++ goto pre_error_check;
++ }
++
++ GST_INFO_OBJECT (v4l2object->element, "device_caps 0x%x", v4l2object->device_caps);
++
++ if (GST_IS_V4L2SRC (v4l2object->element) &&
++ (!(v4l2object->device_caps & (V4L2_CAP_VIDEO_CAPTURE | V4L2_CAP_VIDEO_CAPTURE_MPLANE)) ||
++ (v4l2object->device_caps & (V4L2_CAP_VIDEO_OUTPUT | V4L2_CAP_VIDEO_OUTPUT_MPLANE)))) {
++ error_type = V4L2_OPEN_ERROR_NOT_CAPTURE;
++ goto pre_error_check;
++ }
++
++ if (GST_IS_V4L2SINK (v4l2object->element) &&
++ !(v4l2object->device_caps & (V4L2_CAP_VIDEO_OUTPUT |
++ V4L2_CAP_VIDEO_OUTPUT_MPLANE))) {
++ error_type = V4L2_OPEN_ERROR_NOT_OUTPUT;
++ goto pre_error_check;
++ }
++#else /* TIZEN_FEATURE_V4L2SRC_AUTO_SCAN_DEVICE_NODE */
+ if (!gst_v4l2_get_capabilities (v4l2object))
+ goto error;
+
+ /* do we need to be a capture device? */
+ if (GST_IS_V4L2SRC (v4l2object->element) &&
+ !(v4l2object->device_caps & (V4L2_CAP_VIDEO_CAPTURE |
+ V4L2_CAP_VIDEO_CAPTURE_MPLANE)))
+ goto not_capture;
+
+ if (GST_IS_V4L2SINK (v4l2object->element) &&
+ !(v4l2object->device_caps & (V4L2_CAP_VIDEO_OUTPUT |
+ V4L2_CAP_VIDEO_OUTPUT_MPLANE)))
+ goto not_output;
++#endif /* TIZEN_FEATURE_V4L2SRC_AUTO_SCAN_DEVICE_NODE */
+
+ if (GST_IS_V4L2_VIDEO_DEC (v4l2object->element) &&
+ !GST_V4L2_IS_M2M (v4l2object->device_caps))
+ goto not_m2m;
+
+ gst_v4l2_adjust_buf_type (v4l2object);
+
+ /* create enumerations, posts errors. */
++#ifdef TIZEN_FEATURE_V4L2SRC_AUTO_SCAN_DEVICE_NODE
++ if (!gst_v4l2_fill_lists (v4l2object)) {
++ error_type = V4L2_OPEN_ERROR;
++ goto pre_error_check;
++ }
++#else /* TIZEN_FEATURE_V4L2SRC_AUTO_SCAN_DEVICE_NODE */
+ if (!gst_v4l2_fill_lists (v4l2object))
+ goto error;
++#endif /* TIZEN_FEATURE_V4L2SRC_AUTO_SCAN_DEVICE_NODE */
+
+ GST_INFO_OBJECT (v4l2object->dbg_obj,
+ "Opened device '%s' (%s) successfully",
+ v4l2object->vcap.card, v4l2object->videodev);
+
+ if (v4l2object->extra_controls)
+ gst_v4l2_set_controls (v4l2object, v4l2object->extra_controls);
+
++#ifdef TIZEN_FEATURE_V4L2SRC_AUTO_SCAN_DEVICE_NODE
++ globfree (&glob_buf);
++#endif /* TIZEN_FEATURE_V4L2SRC_AUTO_SCAN_DEVICE_NODE */
++
+ /* UVC devices are never interlaced, and doing VIDIOC_TRY_FMT on them
+ * causes expensive and slow USB IO, so don't probe them for interlaced
+ */
+ if (!strcmp ((char *) v4l2object->vcap.driver, "uvcusb") ||
+ !strcmp ((char *) v4l2object->vcap.driver, "uvcvideo")) {
+ v4l2object->never_interlaced = TRUE;
+ }
+
+ return TRUE;
+
++#ifdef TIZEN_FEATURE_V4L2SRC_AUTO_SCAN_DEVICE_NODE
++pre_error_check:
++ {
++ if (v4l2object->auto_scan_device == FALSE) {
++ GST_WARNING_OBJECT (v4l2object->element, "auto scan device disabled");
++ goto error;
++ }
++
++ GST_ERROR_OBJECT (v4l2object->element, "device[%s] failed, error[%d]",
++ v4l2object->videodev, error_type);
++
++ if (GST_IS_V4L2SRC (v4l2object->element) && glob_buf.gl_pathc == 0) {
++ if (glob("/dev/video*", 0, 0, &glob_buf) != 0) {
++ GST_WARNING_OBJECT (v4l2object->element, "glob failed");
++ }
++ }
++
++ if (glob_buf.gl_pathc > 0 && device_index < glob_buf.gl_pathc) {
++ if (v4l2object->videodev)
++ g_free (v4l2object->videodev);
++
++ v4l2object->videodev = g_strdup (glob_buf.gl_pathv[device_index]);
++ if (v4l2object->videodev) {
++ device_index++;
++
++ GST_INFO_OBJECT (v4l2object->element, "check device [%s]",
++ v4l2object->videodev);
++
++ if (GST_V4L2_IS_OPEN (v4l2object)) {
++ /* close device */
++ v4l2object->close (v4l2object->video_fd);
++ v4l2object->video_fd = -1;
++ }
++ /* empty lists */
++ gst_v4l2_empty_lists (v4l2object);
++
++ goto CHECK_AGAIN;
++ } else {
++ GST_WARNING_OBJECT (v4l2object->element, "g_strdup failed [%s]",
++ glob_buf.gl_pathv[device_index]);
++ }
++ }
++
++ switch (error_type) {
++ case V4L2_OPEN_ERROR_STAT_FAILED:
++ goto stat_failed;
++ case V4L2_OPEN_ERROR_NO_DEVICE:
++ goto no_device;
++ case V4L2_OPEN_ERROR_NOT_OPEN:
++ goto not_open;
++ case V4L2_OPEN_ERROR_NOT_CAPTURE:
++ goto not_capture;
++ case V4L2_OPEN_ERROR_NOT_OUTPUT:
++ goto not_output;
++ default:
++ goto error;
++ }
++ }
++#endif /* TIZEN_FEATURE_V4L2SRC_AUTO_SCAN_DEVICE_NODE */
++
+ /* ERRORS */
+ stat_failed:
+ {
+ GST_V4L2_ERROR (error, RESOURCE, NOT_FOUND,
+ (_("Cannot identify device '%s'."), v4l2object->videodev),
+ GST_ERROR_SYSTEM);
+ goto error;
+ }
+ no_device:
+ {
+ GST_V4L2_ERROR (error, RESOURCE, NOT_FOUND,
+ (_("This isn't a device '%s'."), v4l2object->videodev),
+ GST_ERROR_SYSTEM);
+ goto error;
+ }
+ not_open:
+ {
+ GST_V4L2_ERROR (error, RESOURCE, OPEN_READ_WRITE,
+ (_("Could not open device '%s' for reading and writing."),
+ v4l2object->videodev), GST_ERROR_SYSTEM);
+ goto error;
+ }
+ not_capture:
+ {
+ GST_V4L2_ERROR (error, RESOURCE, NOT_FOUND,
+ (_("Device '%s' is not a capture device."), v4l2object->videodev),
+ ("Capabilities: 0x%x", v4l2object->device_caps));
+ goto error;
+ }
+ not_output:
+ {
+ GST_V4L2_ERROR (error, RESOURCE, NOT_FOUND,
+ (_("Device '%s' is not a output device."), v4l2object->videodev),
+ ("Capabilities: 0x%x", v4l2object->device_caps));
+ goto error;
+ }
+ not_m2m:
+ {
+ GST_V4L2_ERROR (error, RESOURCE, NOT_FOUND,
+ (_("Device '%s' is not a M2M device."), v4l2object->videodev),
+ ("Capabilities: 0x%x", v4l2object->device_caps));
+ goto error;
+ }
+ error:
+ {
+ if (GST_V4L2_IS_OPEN (v4l2object)) {
+ /* close device */
+ v4l2object->close (v4l2object->video_fd);
+ v4l2object->video_fd = -1;
+ }
+ /* empty lists */
+ gst_v4l2_empty_lists (v4l2object);
+
++#ifdef TIZEN_FEATURE_V4L2SRC_AUTO_SCAN_DEVICE_NODE
++ globfree (&glob_buf);
++#endif /* TIZEN_FEATURE_V4L2SRC_AUTO_SCAN_DEVICE_NODE */
++
+ return FALSE;
+ }
+ }
+
+ gboolean
+ gst_v4l2_dup (GstV4l2Object * v4l2object, GstV4l2Object * other)
+ {
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj, "Trying to dup device %s",
+ other->videodev);
+
+ GST_V4L2_CHECK_OPEN (other);
+ GST_V4L2_CHECK_NOT_OPEN (v4l2object);
+ GST_V4L2_CHECK_NOT_ACTIVE (other);
+ GST_V4L2_CHECK_NOT_ACTIVE (v4l2object);
+
+ v4l2object->vcap = other->vcap;
+ v4l2object->device_caps = other->device_caps;
+ gst_v4l2_adjust_buf_type (v4l2object);
+
+ v4l2object->video_fd = v4l2object->dup (other->video_fd);
+ if (!GST_V4L2_IS_OPEN (v4l2object))
+ goto not_open;
+
+ g_free (v4l2object->videodev);
+ v4l2object->videodev = g_strdup (other->videodev);
+
+ GST_INFO_OBJECT (v4l2object->dbg_obj,
+ "Cloned device '%s' (%s) successfully",
+ v4l2object->vcap.card, v4l2object->videodev);
+
+ v4l2object->never_interlaced = other->never_interlaced;
+ v4l2object->no_initial_format = other->no_initial_format;
+
+ return TRUE;
+
+ not_open:
+ {
+ GST_ELEMENT_ERROR (v4l2object->element, RESOURCE, OPEN_READ_WRITE,
+ (_("Could not dup device '%s' for reading and writing."),
+ v4l2object->videodev), GST_ERROR_SYSTEM);
+
+ return FALSE;
+ }
+ }
+
+
+ /******************************************************
+ * gst_v4l2_close():
+ * close the video device (v4l2object->video_fd)
+ * return value: TRUE on success, FALSE on error
+ ******************************************************/
+ gboolean
+ gst_v4l2_close (GstV4l2Object * v4l2object)
+ {
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj, "Trying to close %s",
+ v4l2object->videodev);
+
+ GST_V4L2_CHECK_OPEN (v4l2object);
+ GST_V4L2_CHECK_NOT_ACTIVE (v4l2object);
+
+ /* close device */
+ v4l2object->close (v4l2object->video_fd);
+ v4l2object->video_fd = -1;
+
+ /* empty lists */
+ gst_v4l2_empty_lists (v4l2object);
+
+ return TRUE;
+ }
+
+
+ /******************************************************
+ * gst_v4l2_get_norm()
+ * Get the norm of the current device
+ * return value: TRUE on success, FALSE on error
+ ******************************************************/
+ gboolean
+ gst_v4l2_get_norm (GstV4l2Object * v4l2object, v4l2_std_id * norm)
+ {
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj, "getting norm");
+
+ if (!GST_V4L2_IS_OPEN (v4l2object))
+ return FALSE;
+
+ if (v4l2object->ioctl (v4l2object->video_fd, VIDIOC_G_STD, norm) < 0)
+ goto std_failed;
+
+ return TRUE;
+
+ /* ERRORS */
+ std_failed:
+ {
+ GST_DEBUG ("Failed to get the current norm for device %s",
+ v4l2object->videodev);
+ return FALSE;
+ }
+ }
+
+
+ /******************************************************
+ * gst_v4l2_set_norm()
+ * Set the norm of the current device
+ * return value: TRUE on success, FALSE on error
+ ******************************************************/
+ gboolean
+ gst_v4l2_set_norm (GstV4l2Object * v4l2object, v4l2_std_id norm)
+ {
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj, "trying to set norm to "
+ "%" G_GINT64_MODIFIER "x", (guint64) norm);
+
+ if (!GST_V4L2_IS_OPEN (v4l2object))
+ return FALSE;
+
+ if (v4l2object->ioctl (v4l2object->video_fd, VIDIOC_S_STD, &norm) < 0)
+ goto std_failed;
+
+ return TRUE;
+
+ /* ERRORS */
+ std_failed:
+ {
+ GST_ELEMENT_WARNING (v4l2object->element, RESOURCE, SETTINGS,
+ (_("Failed to set norm for device '%s'."),
+ v4l2object->videodev), GST_ERROR_SYSTEM);
+ return FALSE;
+ }
+ }
+
+ /******************************************************
+ * gst_v4l2_get_frequency():
+ * get the current frequency
+ * return value: TRUE on success, FALSE on error
+ ******************************************************/
+ gboolean
+ gst_v4l2_get_frequency (GstV4l2Object * v4l2object,
+ gint tunernum, gulong * frequency)
+ {
+ struct v4l2_frequency freq = { 0, };
+
+ GstTunerChannel *channel;
+
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj, "getting current tuner frequency");
+
+ if (!GST_V4L2_IS_OPEN (v4l2object))
+ return FALSE;
+
+ channel = gst_tuner_get_channel (GST_TUNER (v4l2object->element));
+
+ freq.tuner = tunernum;
+ if (v4l2object->ioctl (v4l2object->video_fd, VIDIOC_G_FREQUENCY, &freq) < 0)
+ goto freq_failed;
+
+ *frequency = freq.frequency * channel->freq_multiplicator;
+
+ return TRUE;
+
+ /* ERRORS */
+ freq_failed:
+ {
+ GST_ELEMENT_WARNING (v4l2object->element, RESOURCE, SETTINGS,
+ (_("Failed to get current tuner frequency for device '%s'."),
+ v4l2object->videodev), GST_ERROR_SYSTEM);
+ return FALSE;
+ }
+ }
+
+
+ /******************************************************
+ * gst_v4l2_set_frequency():
+ * set frequency
+ * return value: TRUE on success, FALSE on error
+ ******************************************************/
+ gboolean
+ gst_v4l2_set_frequency (GstV4l2Object * v4l2object,
+ gint tunernum, gulong frequency)
+ {
+ struct v4l2_frequency freq = { 0, };
+
+ GstTunerChannel *channel;
+
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj,
+ "setting current tuner frequency to %lu", frequency);
+
+ if (!GST_V4L2_IS_OPEN (v4l2object))
+ return FALSE;
+
+ channel = gst_tuner_get_channel (GST_TUNER (v4l2object->element));
+
+ freq.tuner = tunernum;
+ /* fill in type - ignore error */
+ (void) v4l2object->ioctl (v4l2object->video_fd, VIDIOC_G_FREQUENCY, &freq);
+ freq.frequency = frequency / channel->freq_multiplicator;
+
+ if (v4l2object->ioctl (v4l2object->video_fd, VIDIOC_S_FREQUENCY, &freq) < 0)
+ goto freq_failed;
+
+ return TRUE;
+
+ /* ERRORS */
+ freq_failed:
+ {
+ GST_ELEMENT_WARNING (v4l2object->element, RESOURCE, SETTINGS,
+ (_("Failed to set current tuner frequency for device '%s' to %lu Hz."),
+ v4l2object->videodev, frequency), GST_ERROR_SYSTEM);
+ return FALSE;
+ }
+ }
+
+ /******************************************************
+ * gst_v4l2_signal_strength():
+ * get the strength of the signal on the current input
+ * return value: TRUE on success, FALSE on error
+ ******************************************************/
+ gboolean
+ gst_v4l2_signal_strength (GstV4l2Object * v4l2object,
+ gint tunernum, gulong * signal_strength)
+ {
+ struct v4l2_tuner tuner = { 0, };
+
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj, "trying to get signal strength");
+
+ if (!GST_V4L2_IS_OPEN (v4l2object))
+ return FALSE;
+
+ tuner.index = tunernum;
+ if (v4l2object->ioctl (v4l2object->video_fd, VIDIOC_G_TUNER, &tuner) < 0)
+ goto tuner_failed;
+
+ *signal_strength = tuner.signal;
+
+ return TRUE;
+
+ /* ERRORS */
+ tuner_failed:
+ {
+ GST_ELEMENT_WARNING (v4l2object->element, RESOURCE, SETTINGS,
+ (_("Failed to get signal strength for device '%s'."),
+ v4l2object->videodev), GST_ERROR_SYSTEM);
+ return FALSE;
+ }
+ }
+
+ /******************************************************
+ * gst_v4l2_get_attribute():
+ * try to get the value of one specific attribute
+ * return value: TRUE on success, FALSE on error
+ ******************************************************/
+ gboolean
+ gst_v4l2_get_attribute (GstV4l2Object * v4l2object,
+ int attribute_num, int *value)
+ {
+ struct v4l2_control control = { 0, };
+
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj, "getting value of attribute %d",
+ attribute_num);
+
+ if (!GST_V4L2_IS_OPEN (v4l2object))
+ return FALSE;
+
+ control.id = attribute_num;
+
+ if (v4l2object->ioctl (v4l2object->video_fd, VIDIOC_G_CTRL, &control) < 0)
+ goto ctrl_failed;
+
+ *value = control.value;
+
+ return TRUE;
+
+ /* ERRORS */
+ ctrl_failed:
+ {
+ GST_WARNING_OBJECT (v4l2object->dbg_obj,
+ _("Failed to get value for control %d on device '%s'."),
+ attribute_num, v4l2object->videodev);
+ return FALSE;
+ }
+ }
+
+
+ /******************************************************
+ * gst_v4l2_set_attribute():
+ * try to set the value of one specific attribute
+ * return value: TRUE on success, FALSE on error
+ ******************************************************/
+ gboolean
+ gst_v4l2_set_attribute (GstV4l2Object * v4l2object,
+ int attribute_num, const int value)
+ {
+ struct v4l2_control control = { 0, };
+
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj, "setting value of attribute %d to %d",
+ attribute_num, value);
+
+ if (!GST_V4L2_IS_OPEN (v4l2object))
+ return FALSE;
+
+ control.id = attribute_num;
+ control.value = value;
+ if (v4l2object->ioctl (v4l2object->video_fd, VIDIOC_S_CTRL, &control) < 0)
+ goto ctrl_failed;
+
+ return TRUE;
+
+ /* ERRORS */
+ ctrl_failed:
+ {
+ GST_WARNING_OBJECT (v4l2object,
+ _("Failed to set value %d for control %d on device '%s'."),
+ value, attribute_num, v4l2object->videodev);
+ return FALSE;
+ }
+ }
+
+ /******************************************************
+ * gst_v4l2_set_string_attribute():
+ * try to set the string value of one specific attribute
+ * return value: TRUE on success, FALSE on error
+ ******************************************************/
+ gboolean
+ gst_v4l2_set_string_attribute (GstV4l2Object * v4l2object,
+ int attribute_num, const char *value)
+ {
+ struct v4l2_ext_controls ctrls = { {0}, 1 };
+ struct v4l2_ext_control ctrl;
+ struct v4l2_queryctrl control = { 0, };
+
+ if (!GST_V4L2_IS_OPEN (v4l2object))
+ return FALSE;
+
+ control.id = attribute_num;
+ if (v4l2object->ioctl (v4l2object->video_fd, VIDIOC_QUERYCTRL, &control) < 0) {
+ GST_WARNING_OBJECT (v4l2object,
+ "Failed to find control %d on device '%s'.",
+ attribute_num, v4l2object->videodev);
+ return TRUE;
+ }
+
+ if (control.type != V4L2_CTRL_TYPE_STRING) {
+ GST_WARNING_OBJECT (v4l2object,
+ "control %d is not string type on device '%s'.",
+ attribute_num, v4l2object->videodev);
+ return TRUE;
+ }
+
+ ctrl.id = attribute_num;
+ ctrl.size = strlen (value) + 1;
+ ctrl.string = g_malloc (ctrl.size);
+ strcpy (ctrl.string, value);
+
+ ctrls.which = V4L2_CTRL_ID2WHICH (attribute_num);
+ ctrls.count = 1;
+ ctrls.controls = &ctrl;
+
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj, "setting value of attribute %d to %s",
+ attribute_num, value);
+
+ if (v4l2object->ioctl (v4l2object->video_fd, VIDIOC_S_EXT_CTRLS, &ctrls) < 0)
+ goto ctrl_failed;
+
+ g_free (ctrl.string);
+
+ return TRUE;
+
+ /* ERRORS */
+ ctrl_failed:
+ {
+ GST_WARNING_OBJECT (v4l2object,
+ _("Failed to set value %s for control %d on device '%s'."),
+ value, attribute_num, v4l2object->videodev);
+ g_free (ctrl.string);
+ return FALSE;
+ }
+ }
+
+ static gboolean
+ set_control (GQuark field_id, const GValue * value, gpointer user_data)
+ {
+ GstV4l2Object *v4l2object = user_data;
+ GQuark normalised_field_id;
+ gpointer *d;
+
+ /* 32 bytes is the maximum size for a control name according to v4l2 */
+ gchar name[32];
+
+ /* Backwards compatibility: in the past GStreamer would normalise strings in
+ a subtly different way to v4l2-ctl. e.g. the kernel's "Focus (absolute)"
+ would become "focus__absolute_" whereas now it becomes "focus_absolute".
+ Please remove the following in GStreamer 1.5 for 1.6 */
+ strncpy (name, g_quark_to_string (field_id), sizeof (name));
+ name[31] = '\0';
+ gst_v4l2_normalise_control_name (name);
+ normalised_field_id = g_quark_from_string (name);
+ if (normalised_field_id != field_id)
+ g_warning ("In GStreamer 1.4 the way V4L2 control names were normalised "
+ "changed. Instead of setting \"%s\" please use \"%s\". The former is "
+ "deprecated and will be removed in a future version of GStreamer",
+ g_quark_to_string (field_id), name);
+ field_id = normalised_field_id;
+
+ d = g_datalist_id_get_data (&v4l2object->controls, field_id);
+ if (!d) {
+ GST_WARNING_OBJECT (v4l2object,
+ "Control '%s' does not exist or has an unsupported type.",
+ g_quark_to_string (field_id));
+ return TRUE;
+ }
+ if (G_VALUE_HOLDS (value, G_TYPE_INT)) {
+ gst_v4l2_set_attribute (v4l2object, GPOINTER_TO_INT (d),
+ g_value_get_int (value));
+ } else if (G_VALUE_HOLDS (value, G_TYPE_STRING)) {
+ gst_v4l2_set_string_attribute (v4l2object, GPOINTER_TO_INT (d),
+ g_value_get_string (value));
+ } else {
+ GST_WARNING_OBJECT (v4l2object,
+ "no compatible value expected for control '%s'.",
+ g_quark_to_string (field_id));
+ return TRUE;
+ }
+ return TRUE;
+ }
+
+ gboolean
+ gst_v4l2_set_controls (GstV4l2Object * v4l2object, GstStructure * controls)
+ {
+ return gst_structure_foreach (controls, set_control, v4l2object);
+ }
+
+ gboolean
+ gst_v4l2_get_input (GstV4l2Object * v4l2object, guint32 * input)
+ {
+ guint32 n;
+
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj, "trying to get input");
+
+ if (!GST_V4L2_IS_OPEN (v4l2object))
+ return FALSE;
+
+ if (v4l2object->ioctl (v4l2object->video_fd, VIDIOC_G_INPUT, &n) < 0)
+ goto input_failed;
+
+ *input = n;
+
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj, "input: %u", n);
+
+ return TRUE;
+
+ /* ERRORS */
+ input_failed:
+ if (v4l2object->device_caps & V4L2_CAP_TUNER) {
+ /* only give a warning message if driver actually claims to have tuner
+ * support
+ */
+ GST_ELEMENT_WARNING (v4l2object->element, RESOURCE, SETTINGS,
+ (_("Failed to get current input on device '%s'. May be it is a radio device"), v4l2object->videodev), GST_ERROR_SYSTEM);
+ }
+ return FALSE;
+ }
+
+ gboolean
+ gst_v4l2_set_input (GstV4l2Object * v4l2object, guint32 input)
+ {
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj, "trying to set input to %u", input);
+
+ if (!GST_V4L2_IS_OPEN (v4l2object))
+ return FALSE;
+
+ if (v4l2object->ioctl (v4l2object->video_fd, VIDIOC_S_INPUT, &input) < 0)
+ goto input_failed;
+
+ return TRUE;
+
+ /* ERRORS */
+ input_failed:
+ if (v4l2object->device_caps & V4L2_CAP_TUNER) {
+ /* only give a warning message if driver actually claims to have tuner
+ * support
+ */
+ GST_ELEMENT_WARNING (v4l2object->element, RESOURCE, SETTINGS,
+ (_("Failed to set input %u on device %s."),
+ input, v4l2object->videodev), GST_ERROR_SYSTEM);
+ }
+ return FALSE;
+ }
+
+ gboolean
+ gst_v4l2_query_input (GstV4l2Object * obj, struct v4l2_input * input)
+ {
+ gint ret;
+
+ ret = obj->ioctl (obj->video_fd, VIDIOC_ENUMINPUT, input);
+ if (ret < 0) {
+ GST_WARNING_OBJECT (obj->dbg_obj, "Failed to read input state: %s (%i)",
+ g_strerror (errno), errno);
+ return FALSE;
+ }
+
+ return TRUE;
+ }
+
+ gboolean
+ gst_v4l2_get_output (GstV4l2Object * v4l2object, guint32 * output)
+ {
+ guint32 n;
+
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj, "trying to get output");
+
+ if (!GST_V4L2_IS_OPEN (v4l2object))
+ return FALSE;
+
+ if (v4l2object->ioctl (v4l2object->video_fd, VIDIOC_G_OUTPUT, &n) < 0)
+ goto output_failed;
+
+ *output = n;
+
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj, "output: %u", n);
+
+ return TRUE;
+
+ /* ERRORS */
+ output_failed:
+ if (v4l2object->device_caps & V4L2_CAP_TUNER) {
+ /* only give a warning message if driver actually claims to have tuner
+ * support
+ */
+ GST_ELEMENT_WARNING (v4l2object->element, RESOURCE, SETTINGS,
+ (_("Failed to get current output on device '%s'. May be it is a radio device"), v4l2object->videodev), GST_ERROR_SYSTEM);
+ }
+ return FALSE;
+ }
+
+ gboolean
+ gst_v4l2_set_output (GstV4l2Object * v4l2object, guint32 output)
+ {
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj, "trying to set output to %u", output);
+
+ if (!GST_V4L2_IS_OPEN (v4l2object))
+ return FALSE;
+
+ if (v4l2object->ioctl (v4l2object->video_fd, VIDIOC_S_OUTPUT, &output) < 0)
+ goto output_failed;
+
+ return TRUE;
+
+ /* ERRORS */
+ output_failed:
+ if (v4l2object->device_caps & V4L2_CAP_TUNER) {
+ /* only give a warning message if driver actually claims to have tuner
+ * support
+ */
+ GST_ELEMENT_WARNING (v4l2object->element, RESOURCE, SETTINGS,
+ (_("Failed to set output %u on device %s."),
+ output, v4l2object->videodev), GST_ERROR_SYSTEM);
+ }
+ return FALSE;
+ }
+
+ static const gchar *
+ gst_v4l2_event_to_string (guint32 event)
+ {
+ switch (event) {
+ case V4L2_EVENT_ALL:
+ return "ALL";
+ case V4L2_EVENT_VSYNC:
+ return "VSYNC";
+ case V4L2_EVENT_EOS:
+ return "EOS";
+ case V4L2_EVENT_CTRL:
+ return "CTRL";
+ case V4L2_EVENT_FRAME_SYNC:
+ return "FRAME_SYNC";
+ case V4L2_EVENT_SOURCE_CHANGE:
+ return "SOURCE_CHANGE";
+ case V4L2_EVENT_MOTION_DET:
+ return "MOTION_DET";
+ default:
+ break;
+ }
+
+ return "UNKNOWN";
+ }
+
+ gboolean
+ gst_v4l2_subscribe_event (GstV4l2Object * v4l2object, guint32 event, guint32 id)
+ {
+ struct v4l2_event_subscription sub = {.type = event,.id = id, };
+ gint ret;
+
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj, "Subscribing to '%s' event",
+ gst_v4l2_event_to_string (event));
+
+ if (!GST_V4L2_IS_OPEN (v4l2object))
+ return FALSE;
+
+ ret = v4l2object->ioctl (v4l2object->video_fd, VIDIOC_SUBSCRIBE_EVENT, &sub);
+ if (ret < 0)
+ goto failed;
+
+ return TRUE;
+
+ /* ERRORS */
+ failed:
+ {
+ if (errno == ENOTTY || errno == EINVAL) {
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj,
+ "Cannot subscribe to '%s' event: %s",
+ gst_v4l2_event_to_string (event), "not supported");
+ } else {
+ GST_ERROR_OBJECT (v4l2object->dbg_obj,
+ "Cannot subscribe to '%s' event: %s",
+ gst_v4l2_event_to_string (event), g_strerror (errno));
+ }
+ return FALSE;
+ }
+ }
+
+ gboolean
+ gst_v4l2_dequeue_event (GstV4l2Object * v4l2object, struct v4l2_event * event)
+ {
+ gint ret;
+
+ if (!GST_V4L2_IS_OPEN (v4l2object))
+ return FALSE;
+
+ ret = v4l2object->ioctl (v4l2object->video_fd, VIDIOC_DQEVENT, event);
+
+ if (ret < 0) {
+ GST_ERROR_OBJECT (v4l2object->dbg_obj, "DQEVENT failed: %s",
+ g_strerror (errno));
+ return FALSE;
+ }
+
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj, "Dequeued a '%s' event.",
+ gst_v4l2_event_to_string (event->type));
+
+ return TRUE;
+ }
+
+ gboolean
+ gst_v4l2_set_dv_timings (GstV4l2Object * v4l2object,
+ struct v4l2_dv_timings * timings)
+ {
+ gint ret;
+
+ if (!GST_V4L2_IS_OPEN (v4l2object))
+ return FALSE;
+
+ ret = v4l2object->ioctl (v4l2object->video_fd, VIDIOC_S_DV_TIMINGS, timings);
+
+ if (ret < 0) {
+ GST_ERROR_OBJECT (v4l2object->dbg_obj, "S_DV_TIMINGS failed: %s (%i)",
+ g_strerror (errno), errno);
+ return FALSE;
+ }
+
+ return TRUE;
+ }
+
+ gboolean
+ gst_v4l2_query_dv_timings (GstV4l2Object * v4l2object,
+ struct v4l2_dv_timings * timings)
+ {
+ gint ret;
+
+ if (!GST_V4L2_IS_OPEN (v4l2object))
+ return FALSE;
+
+ ret = v4l2object->ioctl (v4l2object->video_fd, VIDIOC_QUERY_DV_TIMINGS,
+ timings);
+
+ if (ret < 0) {
+ switch (errno) {
+ case ENODATA:
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj,
+ "QUERY_DV_TIMINGS not supported for this input/output");
+ break;
+ case ENOLINK:
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj,
+ "No timings could be detected because no signal was found.");
+ break;
+ case ENOLCK:
+ GST_INFO_OBJECT (v4l2object->dbg_obj,
+ "The signal was unstable and the hardware could not lock on to it.");
+ break;
+ case ERANGE:
+ GST_INFO_OBJECT (v4l2object->dbg_obj,
+ "Timings were found, but they are out of range of the hardware capabilities.");
+ break;
+ default:
+ GST_ERROR_OBJECT (v4l2object->dbg_obj,
+ "QUERY_DV_TIMINGS failed: %s (%i)", g_strerror (errno), errno);
+ break;
+ }
+
+ return FALSE;
+ }
+
+ if (timings->type != V4L2_DV_BT_656_1120) {
+ GST_FIXME_OBJECT (v4l2object->dbg_obj, "Unsupported DV Timings type (%i)",
+ timings->type);
+ return FALSE;
+ }
+
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj, "Detected DV Timings (%i x %i)",
+ timings->bt.width, timings->bt.height);
+
+ return TRUE;
+ }
--- /dev/null
- /* create a caps for all wave formats supported by the device
+ /* GStreamer
+ * Copyright (C) 2005 Sebastien Moutte <sebastien@moutte.net>
+ *
+ * gstwaveformsink.c:
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+ /**
+ * SECTION:element-waveformsink
+ * @title: waveformsink
+ *
+ * This element lets you output sound using the Windows WaveForm API.
+ *
+ * Note that you should almost always use generic audio conversion elements
+ * like audioconvert and audioresample in front of an audiosink to make sure
+ * your pipeline works under all circumstances (those conversion elements will
+ * act in passthrough-mode if no conversion is necessary).
+ *
+ * ## Example pipelines
+ * |[
+ * gst-launch-1.0 -v audiotestsrc ! audioconvert ! volume volume=0.1 ! waveformsink
+ * ]| will output a sine wave (continuous beep sound) to your sound card (with
+ * a very low volume as precaution).
+ * |[
+ * gst-launch-1.0 -v filesrc location=music.ogg ! decodebin ! audioconvert ! audioresample ! waveformsink
+ * ]| will play an Ogg/Vorbis audio file and output it.
+ *
+ */
+
+ #ifdef HAVE_CONFIG_H
+ #include "config.h"
+ #endif
+
+ #include "gstwaveformsink.h"
+
+ GST_DEBUG_CATEGORY_STATIC (waveformsink_debug);
+
+ static void gst_waveform_sink_finalise (GObject * object);
+ static void gst_waveform_sink_set_property (GObject * object,
+ guint prop_id, const GValue * value, GParamSpec * pspec);
+ static void gst_waveform_sink_get_property (GObject * object,
+ guint prop_id, GValue * value, GParamSpec * pspec);
+ static GstCaps *gst_waveform_sink_getcaps (GstBaseSink * bsink,
+ GstCaps * filter);
+
+ /************************************************************************/
+ /* GstAudioSink functions */
+ /************************************************************************/
+ static gboolean gst_waveform_sink_prepare (GstAudioSink * asink,
+ GstAudioRingBufferSpec * spec);
+ static gboolean gst_waveform_sink_unprepare (GstAudioSink * asink);
+ static gboolean gst_waveform_sink_open (GstAudioSink * asink);
+ static gboolean gst_waveform_sink_close (GstAudioSink * asink);
+ static gint gst_waveform_sink_write (GstAudioSink * asink, gpointer data,
+ guint length);
+ static guint gst_waveform_sink_delay (GstAudioSink * asink);
+ static void gst_waveform_sink_reset (GstAudioSink * asink);
+
+ /************************************************************************/
+ /* Utils */
+ /************************************************************************/
+ GstCaps *gst_waveform_sink_create_caps (gint rate, gint channels,
+ const gchar * format);
+ WAVEHDR *bufferpool_get_buffer (GstWaveFormSink * wfsink, gpointer data,
+ guint length);
+ void CALLBACK waveOutProc (HWAVEOUT hwo, UINT uMsg, DWORD_PTR dwInstance,
+ DWORD_PTR dwParam1, DWORD_PTR dwParam2);
+
+ static GstStaticPadTemplate waveformsink_sink_factory =
+ GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("audio/x-raw, "
+ "format = (string) { " GST_AUDIO_NE (S16) ", S8 }, "
+ "layout = (string) interleaved, "
+ "rate = (int) [ 1, MAX ], " "channels = (int) [ 1, 2 ]"));
+
+ #define gst_waveform_sink_parent_class parent_class
+ G_DEFINE_TYPE (GstWaveFormSink, gst_waveform_sink, GST_TYPE_AUDIO_SINK);
+
+ static void
+ gst_waveform_sink_class_init (GstWaveFormSinkClass * klass)
+ {
+ GObjectClass *gobject_class;
+ GstBaseSinkClass *gstbasesink_class;
+ GstAudioSinkClass *gstaudiosink_class;
+ GstElementClass *element_class;
+
+ gobject_class = (GObjectClass *) klass;
+ gstbasesink_class = (GstBaseSinkClass *) klass;
+ gstaudiosink_class = (GstAudioSinkClass *) klass;
+ element_class = GST_ELEMENT_CLASS (klass);
+
+ parent_class = g_type_class_peek_parent (klass);
+
+ gobject_class->finalize = gst_waveform_sink_finalise;
+ gobject_class->get_property = gst_waveform_sink_get_property;
+ gobject_class->set_property = gst_waveform_sink_set_property;
+
+ gstbasesink_class->get_caps = GST_DEBUG_FUNCPTR (gst_waveform_sink_getcaps);
+
+ gstaudiosink_class->prepare = GST_DEBUG_FUNCPTR (gst_waveform_sink_prepare);
+ gstaudiosink_class->unprepare =
+ GST_DEBUG_FUNCPTR (gst_waveform_sink_unprepare);
+ gstaudiosink_class->open = GST_DEBUG_FUNCPTR (gst_waveform_sink_open);
+ gstaudiosink_class->close = GST_DEBUG_FUNCPTR (gst_waveform_sink_close);
+ gstaudiosink_class->write = GST_DEBUG_FUNCPTR (gst_waveform_sink_write);
+ gstaudiosink_class->delay = GST_DEBUG_FUNCPTR (gst_waveform_sink_delay);
+ gstaudiosink_class->reset = GST_DEBUG_FUNCPTR (gst_waveform_sink_reset);
+
+ GST_DEBUG_CATEGORY_INIT (waveformsink_debug, "waveformsink", 0,
+ "Waveform sink");
+
+ gst_element_class_set_static_metadata (element_class, "WaveForm Audio Sink",
+ "Sink/Audio",
+ "Output to a sound card via WaveForm API",
+ "Sebastien Moutte <sebastien@moutte.net>");
+
+ gst_element_class_add_static_pad_template (element_class,
+ &waveformsink_sink_factory);
+ }
+
+ static void
+ gst_waveform_sink_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+ {
+ /* GstWaveFormSink *wfsink = GST_WAVEFORM_SINK (object); */
+
+ switch (prop_id) {
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+ }
+
+ static void
+ gst_waveform_sink_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec)
+ {
+ /* GstWaveFormSink *wfsink = GST_WAVEFORM_SINK (object); */
+
+ switch (prop_id) {
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+ }
+
+ static void
+ gst_waveform_sink_init (GstWaveFormSink * wfsink)
+ {
+ /* initialize members */
+ wfsink->hwaveout = NULL;
+ wfsink->cached_caps = NULL;
+ wfsink->wave_buffers = NULL;
+ wfsink->write_buffer = 0;
+ wfsink->buffer_count = BUFFER_COUNT;
+ wfsink->buffer_size = BUFFER_SIZE;
+ wfsink->free_buffers_count = wfsink->buffer_count;
+ wfsink->bytes_in_queue = 0;
+
+ InitializeCriticalSection (&wfsink->critic_wave);
+ }
+
+ static void
+ gst_waveform_sink_finalise (GObject * object)
+ {
+ GstWaveFormSink *wfsink = GST_WAVEFORM_SINK (object);
+
+ if (wfsink->cached_caps) {
+ gst_caps_unref (wfsink->cached_caps);
+ wfsink->cached_caps = NULL;
+ }
+
+ DeleteCriticalSection (&wfsink->critic_wave);
+
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+ }
+
+ static GstCaps *
+ gst_waveform_sink_getcaps (GstBaseSink * bsink, GstCaps * filter)
+ {
+ GstWaveFormSink *wfsink = GST_WAVEFORM_SINK (bsink);
+ MMRESULT mmresult;
+ WAVEOUTCAPS wocaps;
+ GstCaps *caps, *caps_temp;
+
+ /* return the cached caps if already defined */
+ if (wfsink->cached_caps) {
+ return gst_caps_ref (wfsink->cached_caps);
+ }
+
+ /* get the default device caps */
+ mmresult = waveOutGetDevCaps (WAVE_MAPPER, &wocaps, sizeof (wocaps));
+ if (mmresult != MMSYSERR_NOERROR) {
+ waveOutGetErrorText (mmresult, wfsink->error_string, ERROR_LENGTH - 1);
+ GST_ELEMENT_ERROR (wfsink, RESOURCE, SETTINGS,
+ ("gst_waveform_sink_getcaps: waveOutGetDevCaps failed error=>%s",
+ wfsink->error_string), (NULL));
+ return NULL;
+ }
+
+ caps = gst_caps_new_empty ();
+
++ /* create a caps for all wave formats supported by the device
+ starting by the best quality format */
+ if (wocaps.dwFormats & WAVE_FORMAT_96S16) {
+ caps_temp = gst_waveform_sink_create_caps (96000, 2, GST_AUDIO_NE (S16));
+ if (caps_temp) {
+ gst_caps_append (caps, caps_temp);
+ }
+ }
+ if (wocaps.dwFormats & WAVE_FORMAT_96S08) {
+ caps_temp = gst_waveform_sink_create_caps (96000, 2, "S8");
+ if (caps_temp) {
+ gst_caps_append (caps, caps_temp);
+ }
+ }
+ if (wocaps.dwFormats & WAVE_FORMAT_96M16) {
+ caps_temp = gst_waveform_sink_create_caps (96000, 1, GST_AUDIO_NE (S16));
+ if (caps_temp) {
+ gst_caps_append (caps, caps_temp);
+ }
+ }
+ if (wocaps.dwFormats & WAVE_FORMAT_96M08) {
+ caps_temp = gst_waveform_sink_create_caps (96000, 1, "S8");
+ if (caps_temp) {
+ gst_caps_append (caps, caps_temp);
+ }
+ }
+ if (wocaps.dwFormats & WAVE_FORMAT_4S16) {
+ caps_temp = gst_waveform_sink_create_caps (44100, 2, GST_AUDIO_NE (S16));
+ if (caps_temp) {
+ gst_caps_append (caps, caps_temp);
+ }
+ }
+ if (wocaps.dwFormats & WAVE_FORMAT_4S08) {
+ caps_temp = gst_waveform_sink_create_caps (44100, 2, "S8");
+ if (caps_temp) {
+ gst_caps_append (caps, caps_temp);
+ }
+ }
+ if (wocaps.dwFormats & WAVE_FORMAT_4M16) {
+ caps_temp = gst_waveform_sink_create_caps (44100, 1, GST_AUDIO_NE (S16));
+ if (caps_temp) {
+ gst_caps_append (caps, caps_temp);
+ }
+ }
+ if (wocaps.dwFormats & WAVE_FORMAT_4M08) {
+ caps_temp = gst_waveform_sink_create_caps (44100, 1, "S8");
+ if (caps_temp) {
+ gst_caps_append (caps, caps_temp);
+ }
+ }
+ if (wocaps.dwFormats & WAVE_FORMAT_2S16) {
+ caps_temp = gst_waveform_sink_create_caps (22050, 2, GST_AUDIO_NE (S16));
+ if (caps_temp) {
+ gst_caps_append (caps, caps_temp);
+ }
+ }
+ if (wocaps.dwFormats & WAVE_FORMAT_2S08) {
+ caps_temp = gst_waveform_sink_create_caps (22050, 2, "S8");
+ if (caps_temp) {
+ gst_caps_append (caps, caps_temp);
+ }
+ }
+ if (wocaps.dwFormats & WAVE_FORMAT_2M16) {
+ caps_temp = gst_waveform_sink_create_caps (22050, 1, GST_AUDIO_NE (S16));
+ if (caps_temp) {
+ gst_caps_append (caps, caps_temp);
+ }
+ }
+ if (wocaps.dwFormats & WAVE_FORMAT_2M08) {
+ caps_temp = gst_waveform_sink_create_caps (22050, 1, "S8");
+ if (caps_temp) {
+ gst_caps_append (caps, caps_temp);
+ }
+ }
+ if (wocaps.dwFormats & WAVE_FORMAT_1S16) {
+ caps_temp = gst_waveform_sink_create_caps (11025, 2, GST_AUDIO_NE (S16));
+ if (caps_temp) {
+ gst_caps_append (caps, caps_temp);
+ }
+ }
+ if (wocaps.dwFormats & WAVE_FORMAT_1S08) {
+ caps_temp = gst_waveform_sink_create_caps (11025, 2, "S8");
+ if (caps_temp) {
+ gst_caps_append (caps, caps_temp);
+ }
+ }
+ if (wocaps.dwFormats & WAVE_FORMAT_1M16) {
+ caps_temp = gst_waveform_sink_create_caps (11025, 1, GST_AUDIO_NE (S16));
+ if (caps_temp) {
+ gst_caps_append (caps, caps_temp);
+ }
+ }
+ if (wocaps.dwFormats & WAVE_FORMAT_1M08) {
+ caps_temp = gst_waveform_sink_create_caps (11025, 1, "S8");
+ if (caps_temp) {
+ gst_caps_append (caps, caps_temp);
+ }
+ }
+
+ if (gst_caps_is_empty (caps)) {
+ gst_caps_unref (caps);
+ caps = NULL;
+ } else {
+ wfsink->cached_caps = gst_caps_ref (caps);
+ }
+
+ GST_CAT_LOG_OBJECT (waveformsink_debug, wfsink,
+ "Returning caps %" GST_PTR_FORMAT, caps);
+
+ return caps;
+ }
+
+ static gboolean
+ gst_waveform_sink_open (GstAudioSink * asink)
+ {
+ /* GstWaveFormSink *wfsink = GST_WAVEFORM_SINK (asink); */
+
+ /* nothing to do here as the device needs to be opened with the format we will use */
+
+ return TRUE;
+ }
+
+ static gboolean
+ gst_waveform_sink_prepare (GstAudioSink * asink, GstAudioRingBufferSpec * spec)
+ {
+ GstWaveFormSink *wfsink = GST_WAVEFORM_SINK (asink);
+ WAVEFORMATEX wfx;
+ MMRESULT mmresult;
+ guint index;
+
+ /* setup waveformex structure with the input ringbuffer specs */
+ memset (&wfx, 0, sizeof (wfx));
+ wfx.cbSize = 0;
+ wfx.wFormatTag = WAVE_FORMAT_PCM;
+ wfx.nChannels = spec->info.channels;
+ wfx.nSamplesPerSec = spec->info.rate;
+ wfx.wBitsPerSample = (spec->info.bpf * 8) / wfx.nChannels;
+ wfx.nBlockAlign = spec->info.bpf;
+ wfx.nAvgBytesPerSec = wfx.nSamplesPerSec * wfx.nBlockAlign;
+
+ /* save bytes per sample to use it in delay */
+ wfsink->bytes_per_sample = spec->info.bpf;
+
+ /* open the default audio device with the given caps */
+ mmresult = waveOutOpen (&wfsink->hwaveout, WAVE_MAPPER,
+ &wfx, (DWORD_PTR) waveOutProc, (DWORD_PTR) wfsink, CALLBACK_FUNCTION);
+ if (mmresult != MMSYSERR_NOERROR) {
+ waveOutGetErrorText (mmresult, wfsink->error_string, ERROR_LENGTH - 1);
+ GST_ELEMENT_ERROR (wfsink, RESOURCE, OPEN_WRITE,
+ ("gst_waveform_sink_prepare: waveOutOpen failed error=>%s",
+ wfsink->error_string), (NULL));
+ return FALSE;
+ }
+
+ /* evaluate the buffer size and the number of buffers needed */
+ wfsink->free_buffers_count = wfsink->buffer_count;
+
+ /* allocate wave buffers */
+ wfsink->wave_buffers = (WAVEHDR *) g_new0 (WAVEHDR, wfsink->buffer_count);
+ if (!wfsink->wave_buffers) {
+ GST_ELEMENT_ERROR (wfsink, RESOURCE, OPEN_WRITE,
+ ("gst_waveform_sink_prepare: Failed to allocate wave buffer headers (buffer count=%d)",
+ wfsink->buffer_count), (NULL));
+ return FALSE;
+ }
+ memset (wfsink->wave_buffers, 0, sizeof (WAVEHDR) * wfsink->buffer_count);
+
+ /* setup headers */
+ for (index = 0; index < wfsink->buffer_count; index++) {
+ wfsink->wave_buffers[index].dwBufferLength = wfsink->buffer_size;
+ wfsink->wave_buffers[index].lpData = g_new0 (gchar, wfsink->buffer_size);
+ }
+
+ return TRUE;
+ }
+
+ static gboolean
+ gst_waveform_sink_unprepare (GstAudioSink * asink)
+ {
+ GstWaveFormSink *wfsink = GST_WAVEFORM_SINK (asink);
+
+ /* free wave buffers */
+ if (wfsink->wave_buffers) {
+ guint index;
+
+ for (index = 0; index < wfsink->buffer_count; index++) {
+ if (wfsink->wave_buffers[index].dwFlags & WHDR_PREPARED) {
+ MMRESULT mmresult = waveOutUnprepareHeader (wfsink->hwaveout,
+ &wfsink->wave_buffers[index], sizeof (WAVEHDR));
+ if (mmresult != MMSYSERR_NOERROR) {
+ waveOutGetErrorText (mmresult, wfsink->error_string,
+ ERROR_LENGTH - 1);
+ GST_CAT_WARNING_OBJECT (waveformsink_debug, wfsink,
+ "gst_waveform_sink_unprepare: Error unpreparing buffer => %s",
+ wfsink->error_string);
+ }
+ }
+ g_free (wfsink->wave_buffers[index].lpData);
+ }
+ g_free (wfsink->wave_buffers);
+ wfsink->wave_buffers = NULL;
+ }
+
+ /* close waveform-audio output device */
+ if (wfsink->hwaveout) {
+ waveOutClose (wfsink->hwaveout);
+ wfsink->hwaveout = NULL;
+ }
+
+ return TRUE;
+ }
+
+ static gboolean
+ gst_waveform_sink_close (GstAudioSink * asink)
+ {
+ /* GstWaveFormSink *wfsink = GST_WAVEFORM_SINK (asink); */
+
+ return TRUE;
+ }
+
+ static gint
+ gst_waveform_sink_write (GstAudioSink * asink, gpointer data, guint length)
+ {
+ GstWaveFormSink *wfsink = GST_WAVEFORM_SINK (asink);
+ WAVEHDR *waveheader;
+ MMRESULT mmresult;
+ guint bytes_to_write = length;
+ guint remaining_length = length;
+
+ wfsink->bytes_in_queue += length;
+
+ while (remaining_length > 0) {
+ if (wfsink->free_buffers_count == 0) {
+ /* no free buffer available, wait for one */
+ Sleep (10);
+ continue;
+ }
+
+ /* get the current write buffer header */
+ waveheader = &wfsink->wave_buffers[wfsink->write_buffer];
+
+ /* unprepare the header if needed */
+ if (waveheader->dwFlags & WHDR_PREPARED) {
+ mmresult =
+ waveOutUnprepareHeader (wfsink->hwaveout, waveheader,
+ sizeof (WAVEHDR));
+ if (mmresult != MMSYSERR_NOERROR) {
+ waveOutGetErrorText (mmresult, wfsink->error_string, ERROR_LENGTH - 1);
+ GST_CAT_WARNING_OBJECT (waveformsink_debug, wfsink,
+ "Error unpreparing buffer => %s", wfsink->error_string);
+ }
+ }
+
+ if (wfsink->buffer_size - waveheader->dwUser >= remaining_length)
+ bytes_to_write = remaining_length;
+ else
+ bytes_to_write = wfsink->buffer_size - waveheader->dwUser;
+
+ memcpy (waveheader->lpData + waveheader->dwUser, data, bytes_to_write);
+ waveheader->dwUser += bytes_to_write;
+ remaining_length -= bytes_to_write;
+ data = (guint8 *) data + bytes_to_write;
+
+ if (waveheader->dwUser == wfsink->buffer_size) {
+ /* we have filled a buffer, let's prepare it and next write it to the device */
+ mmresult =
+ waveOutPrepareHeader (wfsink->hwaveout, waveheader, sizeof (WAVEHDR));
+ if (mmresult != MMSYSERR_NOERROR) {
+ waveOutGetErrorText (mmresult, wfsink->error_string, ERROR_LENGTH - 1);
+ GST_CAT_WARNING_OBJECT (waveformsink_debug, wfsink,
+ "gst_waveform_sink_write: Error preparing header => %s",
+ wfsink->error_string);
+ }
+ mmresult = waveOutWrite (wfsink->hwaveout, waveheader, sizeof (WAVEHDR));
+ if (mmresult != MMSYSERR_NOERROR) {
+ waveOutGetErrorText (mmresult, wfsink->error_string, ERROR_LENGTH - 1);
+ GST_CAT_WARNING_OBJECT (waveformsink_debug, wfsink,
+ "gst_waveform_sink_write: Error writing buffer to the device => %s",
+ wfsink->error_string);
+ }
+
+ EnterCriticalSection (&wfsink->critic_wave);
+ wfsink->free_buffers_count--;
+ LeaveCriticalSection (&wfsink->critic_wave);
+
+ wfsink->write_buffer++;
+ wfsink->write_buffer %= wfsink->buffer_count;
+ waveheader->dwUser = 0;
+ wfsink->bytes_in_queue = 0;
+ GST_CAT_LOG_OBJECT (waveformsink_debug, wfsink,
+ "gst_waveform_sink_write: Writing a buffer to the device (free buffers remaining=%d, write buffer=%d)",
+ wfsink->free_buffers_count, wfsink->write_buffer);
+ }
+ }
+
+ return length;
+ }
+
+ static guint
+ gst_waveform_sink_delay (GstAudioSink * asink)
+ {
+ /* return the number of samples in queue (device+internal queue) */
+ GstWaveFormSink *wfsink = GST_WAVEFORM_SINK (asink);
+ guint bytes_in_device =
+ (wfsink->buffer_count - wfsink->free_buffers_count) * wfsink->buffer_size;
+ guint delay =
+ (bytes_in_device + wfsink->bytes_in_queue) / wfsink->bytes_per_sample;
+ return delay;
+ }
+
+ static void
+ gst_waveform_sink_reset (GstAudioSink * asink)
+ {
+ GstWaveFormSink *wfsink = GST_WAVEFORM_SINK (asink);
+ MMRESULT mmresult = waveOutReset (wfsink->hwaveout);
+
+ if (mmresult != MMSYSERR_NOERROR) {
+ waveOutGetErrorText (mmresult, wfsink->error_string, ERROR_LENGTH - 1);
+ GST_CAT_WARNING_OBJECT (waveformsink_debug, wfsink,
+ "gst_waveform_sink_reset: Error resetting waveform-audio device => %s",
+ wfsink->error_string);
+ }
+ }
+
+ GstCaps *
+ gst_waveform_sink_create_caps (gint rate, gint channels, const gchar * format)
+ {
+ GstCaps *caps = NULL;
+
+ caps = gst_caps_new_simple ("audio/x-raw",
+ "format", G_TYPE_STRING, format,
+ "layout", G_TYPE_STRING, "interleaved",
+ "channels", G_TYPE_INT, channels, "rate", G_TYPE_INT, rate, NULL);
+ return caps;
+ }
+
+ void CALLBACK
+ waveOutProc (HWAVEOUT hwo,
+ UINT uMsg, DWORD_PTR dwInstance, DWORD_PTR dwParam1, DWORD_PTR dwParam2)
+ {
+ GstWaveFormSink *wfsink = (GstWaveFormSink *) dwInstance;
+
+ if (uMsg == WOM_DONE) {
+ EnterCriticalSection (&wfsink->critic_wave);
+ wfsink->free_buffers_count++;
+ LeaveCriticalSection (&wfsink->critic_wave);
+ }
+ }