--- /dev/null
+ /* GStreamer
+ * Copyright (C) 2012 GStreamer developers
+ *
+ * allocators.h: single include header for gst-allocators library
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+ #ifndef __GST_ALLOCATORS_H__
+ #define __GST_ALLOCATORS_H__
+
+ #include <gst/allocators/allocators-prelude.h>
+
+ #include <gst/allocators/gstdmabuf.h>
+ #include <gst/allocators/gstfdmemory.h>
+ #include <gst/allocators/gstphysmemory.h>
+
++#ifdef USE_TBM
++#include <gst/allocators/gsttizenmemory.h>
++#include <gst/allocators/gsttizenbufferpool.h>
++#endif
++
+ #endif /* __GST_ALLOCATORS_H__ */
+
--- /dev/null
--- /dev/null
++/*
++ * GStreamer tizen buffer pool
++ * Copyright (c) 2018 Samsung Electronics Co., Ltd.
++ * Author: Sejun Park <sejun79.park@samsung.com>
++ *
++ * This library is free software; you can redistribute it and/or
++ * modify it under the terms of the GNU Library General Public
++ * License as published by the Free Software Foundation; either
++ * version 2 of the License, or (at your option) any later version.
++ *
++ * This library is distributed in the hope that it will be useful,
++ * but WITHOUT ANY WARRANTY; without even the implied warranty of
++ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
++ * Library General Public License for more details.
++ *
++ * You should have received a copy of the GNU Library General Public
++ * License along with this library; if not, write to the
++ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
++ * Boston, MA 02111-1307, USA.
++ */
++#ifdef HAVE_CONFIG_H
++#include "config.h"
++#endif
++
++#include "gsttizenbufferpool.h"
++#include <tbm_surface_internal.h>
++
++GST_DEBUG_CATEGORY_STATIC (gst_tizen_vidbufpool_debug);
++#define GST_CAT_DEFAULT gst_tizen_vidbufpool_debug
++
++static GQuark gst_tizen_buffer_data_quark = 0;
++
++static void gst_tizen_buffer_pool_finalize (GObject * object);
++
++#define DEBUG_INIT \
++ GST_DEBUG_CATEGORY_INIT (gst_tizen_vidbufpool_debug, "tizenvideopool", 0, \
++ "Tizen bufferpool");
++
++#define gst_tizen_buffer_pool_parent_class parent_class
++G_DEFINE_TYPE_WITH_CODE (GstTizenBufferPool, gst_tizen_buffer_pool,
++ GST_TYPE_BUFFER_POOL, DEBUG_INIT);
++
++struct _GstTizenBufferPoolPrivate
++{
++ GstAllocator *allocator;
++};
++
++static const gchar **
++gst_tizen_buffer_pool_get_options (GstBufferPool * pool)
++{
++ static const gchar *options[] = { GST_BUFFER_POOL_OPTION_VIDEO_META,
++ GST_BUFFER_POOL_OPTION_TIZEN_META, NULL
++ };
++
++ return options;
++}
++
++static gboolean
++gst_tizen_buffer_pool_set_config (GstBufferPool * pool,
++ GstStructure * config)
++{
++ GstTizenBufferPool *_tpool = GST_TIZEN_BUFFER_POOL_CAST (pool);
++ GstVideoInfo info;
++ GstCaps *caps;
++ tbm_format format;
++ tbm_surface_h surface;
++ guint min_buffers, max_buffers;
++ int size;
++
++ if (!gst_buffer_pool_config_get_params (config, &caps, NULL, &min_buffers, &max_buffers))
++ goto wrong_config;
++
++ if (caps == NULL)
++ goto no_caps;
++
++ /* now parse the caps from the config */
++ if (!gst_video_info_from_caps (&info, caps))
++ goto wrong_caps;
++
++ if (GST_VIDEO_INFO_FORMAT (&info) == GST_VIDEO_FORMAT_UNKNOWN)
++ goto unknown_format;
++
++ _tpool->info = info;
++
++ format = gst_video_format_to_tbm_format (GST_VIDEO_INFO_FORMAT (&info));
++
++ surface = tbm_surface_create (GST_VIDEO_INFO_WIDTH (&info), GST_VIDEO_INFO_HEIGHT (&info), format);
++ size = tbm_surface_internal_get_size (surface);
++
++ tbm_surface_destroy (surface);
++
++ /* enable metadata based on config of the pool */
++ _tpool->add_videometa = gst_buffer_pool_config_has_option (config,
++ GST_BUFFER_POOL_OPTION_VIDEO_META);
++
++ /* parse extra alignment info */
++ _tpool->add_tizenmeta = gst_buffer_pool_config_has_option (config,
++ GST_BUFFER_POOL_OPTION_TIZEN_META);
++
++ gst_buffer_pool_config_set_params (config, caps, size, min_buffers,
++ max_buffers);
++
++ GST_DEBUG_OBJECT (_tpool, "min : %u, max : %u, size : %d", min_buffers, max_buffers, size);
++ return GST_BUFFER_POOL_CLASS (parent_class)->set_config (pool, config);
++
++ /* ERRORS */
++wrong_config:
++ {
++ GST_WARNING_OBJECT (pool, "invalid config");
++ return FALSE;
++ }
++no_caps:
++ {
++ GST_WARNING_OBJECT (pool, "no caps in config");
++ return FALSE;
++ }
++wrong_caps:
++ {
++ GST_WARNING_OBJECT (pool,
++ "failed getting geometry from caps %" GST_PTR_FORMAT, caps);
++ return FALSE;
++ }
++unknown_format:
++ {
++ GST_WARNING_OBJECT (_tpool, "failed to get format from caps %"
++ GST_PTR_FORMAT, caps);
++ GST_ELEMENT_ERROR (_tpool, RESOURCE, WRITE,
++ ("Failed to create output image buffer of %dx%d pixels",
++ info.width, info.height),
++
++ ("Invalid input caps %" GST_PTR_FORMAT, caps));
++ return FALSE;
++ }
++}
++
++static void
++_destroy_tbm_surface (tbm_surface_h surface)
++{
++ GST_DEBUG ("destroy surface %p", surface);
++ tbm_surface_internal_destroy (surface);
++}
++
++/* This function handles GstBuffer creation */
++static GstFlowReturn
++gst_tizen_buffer_pool_alloc (GstBufferPool * pool, GstBuffer ** buffer,
++ GstBufferPoolAcquireParams * params)
++{
++ GstTizenBufferPool *_tpool = GST_TIZEN_BUFFER_POOL_CAST (pool);
++ GstVideoInfo *info;
++ GstBuffer *buf;
++ GstMemory *tizen_mem;
++ gsize offsets[4] = {0, };
++
++ GST_DEBUG_OBJECT (pool, "gst_tizen_buffer_pool_alloc");
++ info = &_tpool->info;
++
++ if (!(buf = gst_buffer_new ()))
++ goto no_buffer;
++
++ GST_DEBUG_OBJECT (pool, "buffer new :%dx%d : %p", GST_VIDEO_INFO_WIDTH (info), GST_VIDEO_INFO_HEIGHT (info), buf);
++ if (!(tizen_mem = gst_tizen_allocator_alloc ((GstAllocator *)_tpool->allocator, info)))
++ goto mem_create_failed;
++
++ gst_buffer_append_memory (buf, tizen_mem);
++
++ GST_DEBUG_OBJECT (pool, "mem allocated : %p, %dx%d n_plane : %d",
++ buf, GST_VIDEO_INFO_WIDTH (info), GST_VIDEO_INFO_HEIGHT (info), GST_VIDEO_INFO_N_PLANES (info));
++
++ if (_tpool->add_videometa) {
++ GstVideoMeta *vmeta;
++
++ GST_DEBUG_OBJECT (pool, "adding GstVideoMeta");
++ /* these are just the defaults for now */
++
++ vmeta = gst_buffer_add_video_meta_full (buf, GST_VIDEO_FRAME_FLAG_NONE,
++ GST_VIDEO_INFO_FORMAT (info),
++ GST_VIDEO_INFO_WIDTH (info),
++ GST_VIDEO_INFO_HEIGHT (info),
++ GST_VIDEO_INFO_N_PLANES (info), offsets, info->stride);
++
++ vmeta->map = gst_tizen_video_meta_map;
++ vmeta->unmap = gst_tizen_video_meta_unmap;
++ }
++
++ gst_mini_object_set_qdata (GST_MINI_OBJECT_CAST (buf),
++ gst_tizen_buffer_data_quark, gst_tizen_memory_get_surface (tizen_mem), (GDestroyNotify) _destroy_tbm_surface);
++
++ *buffer = buf;
++ g_ptr_array_add (_tpool->buffers, buf);
++ g_atomic_int_inc (&_tpool->current_buffer_index);
++ g_atomic_int_inc (&_tpool->outstandings);
++
++ return GST_FLOW_OK;
++
++ /* ERROR */
++no_buffer:
++ {
++ GST_WARNING_OBJECT (pool, "Couldn't create buffer");
++ return GST_FLOW_ERROR;
++ }
++
++mem_create_failed:
++ {
++ GST_WARNING_OBJECT (pool, "Couldn't create GstTizen Memory");
++ return GST_FLOW_ERROR;
++ }
++}
++
++
++static GstFlowReturn
++gst_tizen_buffer_pool_acquire_buffer (GstBufferPool * bpool, GstBuffer ** buffer,
++ GstBufferPoolAcquireParams * params)
++{
++ GstFlowReturn ret;
++ GstTizenBufferPool *_tpool = GST_TIZEN_BUFFER_POOL_CAST (bpool);
++ GstBufferPoolClass *pclass = GST_BUFFER_POOL_CLASS (parent_class);
++ gint i, n;
++
++ ret = pclass->acquire_buffer (bpool, buffer, params);
++ if (ret != GST_FLOW_OK) {
++ GST_WARNING_OBJECT (_tpool, "Couldn't acquire buffer");
++ return ret;
++ }
++
++ n = _tpool->buffers->len;
++ for (i = 0; i < n; i++) {
++ GstBuffer *tmp = g_ptr_array_index (_tpool->buffers, i);
++
++ if (tmp == *buffer)
++ break;
++ }
++ g_assert (i != n);
++ g_atomic_int_set(&_tpool->current_buffer_index, i);
++
++ GST_BUFFER_TIMESTAMP (*buffer) = GST_CLOCK_TIME_NONE;
++ GST_BUFFER_OFFSET (*buffer) = GST_BUFFER_OFFSET_NONE;
++ GST_BUFFER_OFFSET_END (*buffer) = GST_BUFFER_OFFSET_NONE;
++
++ g_atomic_int_inc (&_tpool->outstandings);
++
++ g_mutex_lock (&_tpool->lock);
++ _tpool->empty = FALSE;
++ g_mutex_unlock (&_tpool->lock);
++
++ GST_DEBUG_OBJECT (_tpool, "acquire buffer %p, current index %d, num_live_buffer %d", *buffer, i, _tpool->outstandings);
++
++ return GST_FLOW_OK;
++}
++
++static void
++gst_tizen_buffer_pool_release_buffer (GstBufferPool * bpool, GstBuffer * buffer)
++{
++ GstTizenBufferPool *_tpool = GST_TIZEN_BUFFER_POOL_CAST (bpool);
++
++ GST_BUFFER_POOL_CLASS (gst_tizen_buffer_pool_parent_class)->release_buffer (bpool, buffer);
++
++ if (g_atomic_int_dec_and_test(&_tpool->outstandings)) {
++ g_mutex_lock (&_tpool->lock);
++ _tpool->empty = TRUE;
++ g_mutex_unlock (&_tpool->lock);
++ }
++ GST_DEBUG_OBJECT (_tpool, "release buffer %p, outstandings %d", buffer, _tpool->outstandings);
++}
++
++static void
++gst_tizen_buffer_pool_free_buffer (GstBufferPool * bpool, GstBuffer * buffer)
++{
++ GstTizenBufferPool *_tpool = GST_TIZEN_BUFFER_POOL_CAST (bpool);
++ GST_DEBUG_OBJECT (_tpool, "free buffer %p, %d", buffer, _tpool->outstandings);
++
++ gst_mini_object_set_qdata (GST_MINI_OBJECT (buffer),
++ gst_tizen_buffer_data_quark, NULL, NULL);
++
++ GST_BUFFER_POOL_CLASS (gst_tizen_buffer_pool_parent_class)->free_buffer (bpool,
++ buffer);
++}
++
++static void
++gst_tizen_buffer_pool_dispose (GObject * object)
++{
++ GstTizenBufferPool *_tpool = GST_TIZEN_BUFFER_POOL_CAST (object);
++
++ if (_tpool->allocator) {
++ gst_object_unref (_tpool->allocator);
++ _tpool->allocator = NULL;
++ }
++
++ if (_tpool->buffers) {
++ g_ptr_array_unref (_tpool->buffers);
++ _tpool->buffers = NULL;
++ }
++
++ GST_DEBUG_OBJECT (_tpool, "dispose pool");
++ G_OBJECT_CLASS (parent_class)->dispose (object);
++}
++
++static void
++gst_tizen_buffer_pool_finalize (GObject * object)
++{
++ GstTizenBufferPool *pool = GST_TIZEN_BUFFER_POOL_CAST (object);
++
++
++ g_mutex_clear (&pool->lock);
++
++ G_OBJECT_CLASS (gst_tizen_buffer_pool_parent_class)->finalize (object);
++}
++
++static void
++gst_tizen_buffer_pool_init (GstTizenBufferPool * pool)
++{
++ g_mutex_init (&pool->lock);
++ pool->priv = gst_tizen_buffer_pool_get_instance_private (pool);
++ pool->buffers = g_ptr_array_new ();
++ pool->allocator = gst_tizen_allocator_new ();
++ g_atomic_int_set(&pool->outstandings, 0);
++ GST_LOG_OBJECT (pool, "Tizen buffer pool init %p", pool);
++}
++
++static void
++gst_tizen_buffer_pool_class_init (GstTizenBufferPoolClass * klass)
++{
++ GObjectClass *gobject_class = (GObjectClass *) klass;
++ GstBufferPoolClass *gstbufferpool_class = (GstBufferPoolClass *) klass;
++
++ gst_tizen_buffer_data_quark = g_quark_from_static_string ("GstTizenBufferData");
++
++ gobject_class->dispose = gst_tizen_buffer_pool_dispose;
++ gobject_class->finalize = gst_tizen_buffer_pool_finalize;
++
++ gstbufferpool_class->get_options = gst_tizen_buffer_pool_get_options;
++ gstbufferpool_class->set_config = gst_tizen_buffer_pool_set_config;
++ gstbufferpool_class->alloc_buffer = gst_tizen_buffer_pool_alloc;
++ gstbufferpool_class->free_buffer = gst_tizen_buffer_pool_free_buffer;
++ gstbufferpool_class->acquire_buffer = gst_tizen_buffer_pool_acquire_buffer;
++ gstbufferpool_class->release_buffer = gst_tizen_buffer_pool_release_buffer;
++}
++
++GstBufferPool *
++gst_tizen_buffer_pool_new (void)
++{
++ return GST_BUFFER_POOL_CAST (g_object_new (GST_TYPE_TIZEN_BUFFER_POOL, NULL));
++}
--- /dev/null
--- /dev/null
++/*
++ * GStreamer tizen buffer pool
++ * Copyright (c) 2018 Samsung Electronics Co., Ltd.
++ * Author: Sejun Park <sejun79.park@samsung.com>
++ *
++ * This library is free software; you can redistribute it and/or
++ * modify it under the terms of the GNU Library General Public
++ * License as published by the Free Software Foundation; either
++ * version 2 of the License, or (at your option) any later version.
++ *
++ * This library is distributed in the hope that it will be useful,
++ * but WITHOUT ANY WARRANTY; without even the implied warranty of
++ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
++ * Library General Public License for more details.
++ *
++ * You should have received a copy of the GNU Library General Public
++ * License along with this library; if not, write to the
++ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
++ * Boston, MA 02111-1307, USA.
++ */
++
++#ifndef _GST_TIZEN_BUFFERPOOL_H_
++#define _GST_TIZEN_BUFFERPOOL_H_
++
++#include <gst/gst.h>
++#include <gst/video/video.h>
++#include <gst/video/gstvideometa.h>
++#include <gst/video/gstvideopool.h>
++#include <gst/allocators/gsttizenmemory.h>
++
++G_BEGIN_DECLS
++
++/**
++ * GST_BUFFER_POOL_OPTION_TIZEN_META:
++ *
++ * An option that can be activated on bufferpool to request TizenVideo metadata
++ * on buffers from the pool.
++ */
++#define GST_BUFFER_POOL_OPTION_TIZEN_META "GstBufferPoolOptionTizenVideoMeta"
++
++typedef struct _GstTizenBufferPool GstTizenBufferPool;
++typedef struct _GstTizenBufferPoolClass GstTizenBufferPoolClass;
++typedef struct _GstTizenBufferPoolPrivate GstTizenBufferPoolPrivate;
++
++/* buffer pool functions */
++#define GST_TYPE_TIZEN_BUFFER_POOL (gst_tizen_buffer_pool_get_type())
++#define GST_IS_TIZEN_BUFFER_POOL(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_TIZEN_BUFFER_POOL))
++#define GST_TIZEN_BUFFER_POOL(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), GST_TYPE_TIZEN_BUFFER_POOL, GstTizenBufferPool))
++#define GST_TIZEN_BUFFER_POOL_CAST(obj) ((GstTizenBufferPool*)(obj))
++
++struct _GstTizenBufferPool
++{
++ GstBufferPool bufferpool;
++
++ GstAllocator *allocator;
++ GstVideoInfo info;
++
++ gboolean add_videometa;
++ gboolean add_tizenmeta;
++
++ GMutex lock;
++
++ GPtrArray *buffers;
++ gint current_buffer_index;
++ gint outstandings;
++ gboolean empty;
++
++ GstTizenBufferPoolPrivate *priv;
++};
++
++struct _GstTizenBufferPoolClass
++{
++ GstBufferPoolClass parent_class;
++};
++
++GST_ALLOCATORS_API
++GType gst_tizen_buffer_pool_get_type (void);
++
++GST_ALLOCATORS_API
++GstBufferPool *gst_tizen_buffer_pool_new (void);
++
++
++G_END_DECLS
++
++#endif /* _GST_TIZEN_BUFFER_POOL_H_ */
--- /dev/null
--- /dev/null
++/*
++ * GStreamer tizen memory
++ * Copyright (c) 2018 Samsung Electronics Co., Ltd.
++ * Author: Sejun Park <sejun79.park@samsung.com>
++ *
++ * This library is free software; you can redistribute it and/or
++ * modify it under the terms of the GNU Library General Public
++ * License as published by the Free Software Foundation; either
++ * version 2 of the License, or (at your option) any later version.
++ *
++ * This library is distributed in the hope that it will be useful,
++ * but WITHOUT ANY WARRANTY; without even the implied warranty of
++ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
++ * Library General Public License for more details.
++ *
++ * You should have received a copy of the GNU Library General Public
++ * License along with this library; if not, write to the
++ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
++ * Boston, MA 02111-1307, USA.
++ */
++#ifdef HAVE_CONFIG_H
++#include "config.h"
++#endif
++
++#include "string.h"
++#include "gstdmabuf.h"
++#include "gsttizenmemory.h"
++#include <tbm_surface_internal.h>
++
++#define GST_TIZEN_ALLOCATOR_NAME "TizenVideoMemory"
++
++GST_DEBUG_CATEGORY_STATIC (gst_tizenmemory_debug);
++#define GST_CAT_DEFAULT gst_tizenmemory_debug
++
++#define parent_class gst_tizen_allocator_parent_class
++G_DEFINE_TYPE_WITH_CODE (GstTizenAllocator, gst_tizen_allocator, GST_TYPE_ALLOCATOR,
++ G_ADD_PRIVATE (GstTizenAllocator);
++ GST_DEBUG_CATEGORY_INIT (GST_CAT_DEFAULT, "tizenmemory", 0,
++ "GstTizenMemory and GstTizenAllocator"));
++
++typedef struct
++{
++ tbm_format format;
++ GstVideoFormat vformat; /* Gst video format */
++ GstVideoFormat nformat; /* Gst native video format */
++} GstTizenBufferFormats;
++
++static void cached_tizen_disposed_cb (GstTizenAllocator * allocator, GstMiniObject *obj);
++
++GstTizenBufferFormats yuv_formats[] = {
++ {TBM_FORMAT_YUV420, GST_VIDEO_FORMAT_I420, GST_VIDEO_FORMAT_S420},
++ {TBM_FORMAT_NV21, GST_VIDEO_FORMAT_NV21, GST_VIDEO_FORMAT_SN21},
++ {TBM_FORMAT_NV12, GST_VIDEO_FORMAT_NV12, GST_VIDEO_FORMAT_SN12},
++ {TBM_FORMAT_ARGB8888, GST_VIDEO_FORMAT_ARGB, GST_VIDEO_FORMAT_SR32}
++};
++
++/**
++ * gst_video_format_to_tbm_format: (skip)
++ * @format: a #GstVideoFormat to be converted
++ *
++ * Return: a #tbm_format which is converted from @format.
++ */
++tbm_format
++gst_video_format_to_tbm_format (GstVideoFormat format)
++{
++ gint i;
++
++ for (i = 0; i < G_N_ELEMENTS (yuv_formats); i++) {
++ if (yuv_formats[i].nformat == format)
++ return yuv_formats[i].format;
++ }
++ return -1;
++}
++
++static GstTizenMemory *
++_tizen_video_mem_new (GstAllocator * allocator, GstMemory * parent, GstVideoInfo * vinfo,
++ tbm_surface_h surface, gpointer user_data, GDestroyNotify notify)
++{
++ gint width, height;
++ GstTizenMemory *tmem;
++ tbm_surface_info_s sinfo;
++ tbm_format format;
++ gint i;
++
++ tmem = g_slice_new0 (GstTizenMemory);
++
++ /* Creates the tbm_surface with buffer objects. */
++ if (surface) {
++ tbm_surface_internal_ref (surface);
++ tmem->surface = surface;
++ } else {
++ width = GST_VIDEO_INFO_WIDTH (vinfo);
++ height = GST_VIDEO_INFO_HEIGHT (vinfo);
++ format = gst_video_format_to_tbm_format (GST_VIDEO_INFO_FORMAT (vinfo));
++ tmem->surface = tbm_surface_internal_create_with_flags (width, height, format, TBM_BO_NONCACHABLE);
++ }
++
++ if (!tbm_surface_internal_is_valid (tmem->surface)) {
++ GST_ERROR ("Invalid tbm surface");
++ goto failed;
++ }
++
++ tbm_surface_get_info (tmem->surface, &sinfo);
++
++ for (i = 0; i < sinfo.num_planes; i++) {
++ GST_VIDEO_INFO_PLANE_STRIDE (vinfo, i) = sinfo.planes[i].stride;
++ GST_VIDEO_INFO_PLANE_OFFSET (vinfo, i) = sinfo.planes[i].offset;
++ GST_DEBUG ("tbm surface plane[%d] %p", i, sinfo.planes[i].ptr);
++ }
++ GST_VIDEO_INFO_SIZE (vinfo) = sinfo.size;
++
++ gst_memory_init (GST_MEMORY_CAST (tmem), GST_MEMORY_FLAG_NO_SHARE,
++ allocator, parent, GST_VIDEO_INFO_SIZE (vinfo), 0, 0,
++ GST_VIDEO_INFO_SIZE (vinfo));
++
++ tmem->info = gst_video_info_copy (vinfo);
++ tmem->notify = notify;
++ tmem->user_data = user_data;
++ g_mutex_init (&tmem->lock);
++
++ GST_DEBUG ("%p: surface: %p size %" G_GSIZE_FORMAT, tmem, tmem->surface,
++ tmem->mem.maxsize);
++
++ return tmem;
++
++ /* ERRORS */
++failed:
++ {
++ GST_ERROR ("Failed to create tbm surface");
++ g_slice_free (GstTizenMemory, tmem);
++ return NULL;
++ }
++}
++
++static void
++gst_tizen_mem_free (GstAllocator * allocator, GstMemory * mem)
++{
++ GstTizenMemory *tmem = (GstTizenMemory *) mem;
++
++ tbm_surface_internal_unref (tmem->surface);
++ GST_DEBUG ("free surface from mem : %p", tmem->surface);
++
++ if (tmem->notify)
++ tmem->notify (tmem->user_data);
++
++ gst_video_info_free (tmem->info);
++ g_mutex_clear (&tmem->lock);
++ g_slice_free (GstTizenMemory, tmem);
++}
++
++static gpointer
++gst_tizen_mem_map (GstMemory * gmem, gsize maxsize, GstMapFlags flags)
++{
++ int err;
++ GstTizenMemory *tmem;
++ tbm_surface_info_s info;
++ gpointer data;
++
++ tmem = (GstTizenMemory *)gmem;
++ g_mutex_lock (&tmem->lock);
++
++ err = tbm_surface_map (tmem->surface, TBM_SURF_OPTION_WRITE|TBM_SURF_OPTION_READ, &info);
++ if (err == TBM_SURFACE_ERROR_NONE) {
++ data = info.planes[0].ptr;
++ } else {
++ GST_ERROR ("failed to get surface info");
++ data = NULL;
++ goto done;
++ }
++
++done:
++ g_mutex_unlock (&tmem->lock);
++
++ return data;
++}
++
++static void
++gst_tizen_mem_unmap (GstMemory * gmem)
++{
++ GstTizenMemory *tmem;
++
++ tmem = (GstTizenMemory *)gmem;
++
++ g_mutex_lock (&tmem->lock);
++ tbm_surface_unmap (tmem->surface);
++ g_mutex_unlock (&tmem->lock);
++}
++
++static void
++gst_tizen_allocator_finalize (GObject *obj)
++{
++ GstTizenAllocator *allocator;
++ GList *iter;
++
++ allocator = GST_TIZEN_ALLOCATOR (obj);
++
++ iter = allocator->priv->mem_cache;
++
++ while (iter) {
++ GstMiniObject *obj = iter->data;
++ gst_mini_object_weak_unref (obj,
++ (GstMiniObjectNotify) cached_tizen_disposed_cb, allocator);
++
++ gst_mini_object_set_qdata (obj,
++ g_quark_from_static_string ("tizenmem"), NULL, NULL);
++ iter = iter->next;
++ }
++
++ g_list_free (allocator->priv->mem_cache);
++ allocator->priv->mem_cache = NULL;
++
++ G_OBJECT_CLASS (parent_class)->finalize (obj);
++}
++
++static GstMemory *
++gst_tizen_mem_share (GstMemory * gmem, gssize offset, gssize size)
++{
++ return NULL;
++}
++
++static GstMemory *
++gst_tizen_mem_copy (GstMemory * gmem, gssize offset, gsize size)
++{
++ gint i;
++ GstMemory *copy;
++ GstTizenMemory *tmem;
++ tbm_surface_h new_surface, old_surface;
++ tbm_format format;
++ tbm_surface_info_s old_surface_info;
++ tbm_surface_info_s new_surface_info;
++
++ tmem = (GstTizenMemory *) gmem;
++
++ GST_DEBUG ("copy memory %p, offset %d, size : %u", tmem, offset, size);
++
++ old_surface = tmem->surface;
++ format = tbm_surface_get_format (old_surface);
++
++ copy = gst_tizen_allocator_alloc (gmem->allocator, tmem->info);
++ new_surface = gst_tizen_memory_get_surface (copy);
++
++ tbm_surface_get_info (old_surface, &old_surface_info);
++ tbm_surface_get_info (new_surface, &new_surface_info);
++
++ for (i = 0; i < tbm_surface_internal_get_num_planes (format); i++) {
++ memcpy (new_surface_info.planes[i].ptr,
++ old_surface_info.planes[i].ptr, new_surface_info.planes[i].size);
++ }
++
++ return copy;
++}
++
++static void
++gst_tizen_allocator_class_init (GstTizenAllocatorClass * klass)
++{
++ GObjectClass *object_class;
++ GstAllocatorClass *allocator_class;
++
++ allocator_class = GST_ALLOCATOR_CLASS (klass);
++ object_class = G_OBJECT_CLASS (klass);
++
++ allocator_class->alloc = NULL;
++ allocator_class->free = gst_tizen_mem_free;
++
++ object_class->finalize = gst_tizen_allocator_finalize;
++}
++
++static void
++gst_tizen_allocator_init (GstTizenAllocator * allocator)
++{
++ GstAllocator *alloc = GST_ALLOCATOR_CAST (allocator);
++
++ allocator->priv = gst_tizen_allocator_get_instance_private (allocator);
++ alloc->mem_type = GST_TIZEN_MEMORY_TYPE;
++ alloc->mem_map = (GstMemoryMapFunction) gst_tizen_mem_map;
++ alloc->mem_unmap = (GstMemoryUnmapFunction) gst_tizen_mem_unmap;
++ alloc->mem_share = (GstMemoryShareFunction) gst_tizen_mem_share;
++ alloc->mem_copy = (GstMemoryCopyFunction) gst_tizen_mem_copy;
++
++ GST_OBJECT_FLAG_SET (allocator, GST_ALLOCATOR_FLAG_CUSTOM_ALLOC);
++}
++
++/**
++ * gst_tizen_allocator_new:
++ *
++ * Return a new tizen allocator.
++ *
++ * Returns: (transfer full): a new fd allocator, or NULL if the allocator
++ * isn't available. Use gst_object_unref() to release the allocator after
++ * usage
++ *
++ */
++GstAllocator *
++gst_tizen_allocator_new (void)
++{
++ return g_object_new (GST_TYPE_TIZEN_ALLOCATOR, NULL);
++}
++
++/**
++ * gst_tizen_allocator_alloc:
++ * @allocator: a #GstAllocator to use
++ * @vinfo: a #GstVideoInfo to be refered for size and type of allocated memory
++ *
++ * Returns: (transfer full) (nullable): a new #GstMemory.
++ */
++GstMemory *
++gst_tizen_allocator_alloc (GstAllocator * allocator, GstVideoInfo * vinfo)
++{
++ g_return_val_if_fail (GST_IS_TIZEN_ALLOCATOR (allocator), NULL);
++
++ return (GstMemory *)_tizen_video_mem_new (allocator, NULL, vinfo, NULL, NULL, NULL);
++}
++
++/**
++ * gst_tizen_allocator_alloc_surface: (skip)
++ * @allocator: a #GstAllocator to use
++ * @vinfo: a #GstVideoInfo to be refered for memory size and type
++ * @surface: a #tbm_surface_h to be used for allocated memory
++ * @user_data: (allow-none): user data pointer
++ * @notify: (allow-none) (closure user_data): called with @user_data when the memory is freed
++ *
++ * Returns: (transfer full) (nullable): a new #GstMemory.
++ */
++GstMemory *
++gst_tizen_allocator_alloc_surface (GstAllocator * allocator, GstVideoInfo * vinfo,
++ tbm_surface_h surface, gpointer user_data, GDestroyNotify notify)
++{
++ g_return_val_if_fail (GST_IS_TIZEN_ALLOCATOR (allocator), NULL);
++
++ return (GstMemory *)_tizen_video_mem_new (allocator, NULL, vinfo, surface, user_data, notify);
++}
++
++gboolean
++gst_is_tizen_memory (GstMemory * mem)
++{
++ g_return_val_if_fail (mem != NULL, FALSE);
++
++ return GST_IS_TIZEN_ALLOCATOR (mem->allocator);
++}
++
++gint
++gst_tizen_memory_get_num_bos (GstMemory *mem)
++{
++ GstTizenMemory *tmem;
++
++ g_return_val_if_fail (mem != NULL, -1);
++ g_return_val_if_fail (GST_IS_TIZEN_ALLOCATOR (mem->allocator), -1);
++
++ tmem = (GstTizenMemory *)mem;
++
++ return (gint) tbm_surface_internal_get_num_bos (tmem->surface);
++}
++
++void *
++gst_tizen_memory_get_bos (GstMemory * mem, gint bo_idx)
++{
++ gint bo_num;
++ GstTizenMemory *tmem;
++
++ g_return_val_if_fail (mem != NULL, NULL);
++ g_return_val_if_fail (GST_IS_TIZEN_ALLOCATOR (mem->allocator), NULL);
++
++ tmem = (GstTizenMemory *)mem;
++
++ bo_num = tbm_surface_internal_get_num_bos(tmem->surface);
++
++ if (bo_idx > bo_num) {
++ GST_ERROR ("Failed to set set bo_idx");
++ return NULL;
++ }
++
++ return tbm_surface_internal_get_bo(tmem->surface, bo_idx);
++}
++
++void *
++gst_tizen_memory_get_surface (GstMemory * mem)
++{
++ GstTizenMemory *tmem;
++
++ g_return_val_if_fail (mem != NULL, NULL);
++ g_return_val_if_fail (GST_IS_TIZEN_ALLOCATOR (mem->allocator), NULL);
++
++ tmem = (GstTizenMemory *)mem;
++
++ return tmem->surface;
++}
++
++static void
++cached_tizen_disposed_cb (GstTizenAllocator * allocator, GstMiniObject *obj)
++{
++ allocator->priv->mem_cache = g_list_remove (allocator->priv->mem_cache, obj);
++}
++
++GstMemory *
++gst_tizen_allocator_dmabuf_export (GstAllocator * allocator, GstMemory * _tmem, int bo_idx)
++{
++ GstTizenMemory *tmem = (GstTizenMemory *) _tmem;
++ GstTizenAllocator *tallocator = GST_TIZEN_ALLOCATOR (allocator);
++ GstMemory * mem;
++ gint fd;
++ tbm_bo bo;
++
++ g_return_val_if_fail (tmem->surface != NULL, NULL);
++
++ bo = tbm_surface_internal_get_bo (tmem->surface, bo_idx);
++ fd = tbm_bo_export_fd (bo);
++
++ tallocator->priv->dmabuf_alloc = gst_dmabuf_allocator_new ();
++
++ mem = gst_dmabuf_allocator_alloc (tallocator->priv->dmabuf_alloc, fd,
++ gst_memory_get_sizes (_tmem, NULL, NULL));
++
++ /* cache */
++ gst_mini_object_weak_ref (GST_MINI_OBJECT (mem),
++ (GstMiniObjectNotify) cached_tizen_disposed_cb, tallocator);
++
++ tallocator->priv->mem_cache = g_list_prepend (tallocator->priv->mem_cache, mem);
++
++ gst_mini_object_set_qdata (GST_MINI_OBJECT (mem),
++ g_quark_from_static_string ("_tmem"), _tmem,
++ (GDestroyNotify) gst_memory_unref);
++ return mem;
++}
++
++/**
++ * gst_tizen_allocator_dmabuf_import: (skip)
++ * @allocator: a #GstAllocator to use
++ * @fds: fd array to be imported to #GstTizenMemory
++ * @planes: a number of planes
++ * @offsets: offset array
++ * @vinfo: a #GstVideoInfo to be refered
++ *
++ * Returns: (transfer full) (nullable): a new #GstTizenMemory.
++ */
++GstTizenMemory *
++gst_tizen_allocator_dmabuf_import (GstAllocator * allocator, gint * fds, gint planes, gsize offsets[4], GstVideoInfo * vinfo)
++{
++ GstTizenMemory *tmem;
++ GstMemory *mem;
++
++ tmem = g_slice_new0 (GstTizenMemory);
++
++ mem = GST_MEMORY_CAST (tmem);
++
++ gst_memory_init (mem, GST_MEMORY_FLAG_NO_SHARE, allocator, NULL,
++ GST_VIDEO_INFO_SIZE (vinfo), 0, 0, GST_VIDEO_INFO_SIZE (vinfo));
++
++ return NULL;
++}
++
++/**
++ * gst_tizen_video_meta_map:
++ * @meta: a #GstVideoMeta
++ * @plane: a plane
++ * @info: a #GstMapInfo
++ * @data: (out): the data of @plane
++ * @stride: (out): the stride of @plane
++ * @flags: @GstMapFlags
++ *
++ * Returns: TRUE if the map operation was successful.
++ */
++gboolean
++gst_tizen_video_meta_map (GstVideoMeta * meta, guint plane, GstMapInfo * info,
++ gpointer * data, gint * stride, GstMapFlags flags)
++{
++ int tbm_ret = TBM_SURFACE_ERROR_NONE;
++ gboolean mapped = FALSE;
++ GstBuffer *buffer = meta->buffer;
++ GstTizenMemory *vmem =
++ (GstTizenMemory *) gst_buffer_get_memory (buffer, 0);
++
++ g_return_val_if_fail (GST_IS_TIZEN_ALLOCATOR (((GstMemory *) vmem)->allocator), FALSE);
++
++ g_mutex_lock (&vmem->lock);
++
++ if (vmem->video_memory_map[plane]) {
++ GST_ERROR ("[%p] plane [%d] is already mapped", buffer, plane);
++ goto _VIDEO_MEMORY_MAP_DONE;
++ }
++
++ if (vmem->video_memory_map_count == 0) {
++ tbm_ret = tbm_surface_map (vmem->surface,
++ TBM_SURF_OPTION_WRITE | TBM_SURF_OPTION_READ,
++ &vmem->surface_info);
++ if (tbm_ret != TBM_SURFACE_ERROR_NONE) {
++ GST_ERROR ("[%p] tbm_surface_map for %p failed, 0x%x", buffer, vmem->surface, tbm_ret);
++ goto _VIDEO_MEMORY_MAP_DONE;
++ }
++ }
++
++ if (plane >= vmem->surface_info.num_planes) {
++ GST_ERROR ("[%p] invalid plane index %d (num plane %d)",
++ buffer, plane, vmem->surface_info.num_planes);
++
++ if (vmem->video_memory_map_count == 0) {
++ GST_ERROR ("[%p] unmap surface %p", buffer, vmem->surface);
++ tbm_surface_unmap (vmem->surface);
++ }
++
++ goto _VIDEO_MEMORY_MAP_DONE;
++ }
++
++ *data = vmem->surface_info.planes[plane].ptr;
++ *stride = vmem->surface_info.planes[plane].stride;
++
++ vmem->video_memory_map[plane] = TRUE;
++ vmem->video_memory_map_count++;
++
++ /* set map flags */
++ info->flags = flags;
++
++ GST_DEBUG ("[%p] mapped plane %d, data %p, stride %d, flags 0x%x",
++ buffer, plane, *data, *stride, info->flags);
++
++ mapped = TRUE;
++
++_VIDEO_MEMORY_MAP_DONE:
++ g_mutex_unlock (&vmem->lock);
++
++ return mapped;
++}
++
++/**
++ * gst_tizen_video_meta_unmap:
++ * @meta: a #GstVideoMeta
++ * @plane: a plane
++ * @info: a #GstMapInfo
++ *
++ * Returns: TRUE if the memory was successfully unmapped.
++ */
++gboolean
++gst_tizen_video_meta_unmap (GstVideoMeta * meta, guint plane, GstMapInfo * info)
++{
++ int tbm_ret = TBM_SURFACE_ERROR_NONE;
++ gboolean unmapped = FALSE;
++ GstBuffer *buffer = meta->buffer;
++ GstTizenMemory *vmem =
++ (GstTizenMemory *) gst_buffer_get_memory (buffer, 0);
++
++ g_return_val_if_fail (GST_IS_TIZEN_ALLOCATOR (((GstMemory *) vmem)->allocator), FALSE);
++
++ g_mutex_lock (&vmem->lock);
++
++ if (vmem->video_memory_map[plane] == FALSE) {
++ GST_ERROR ("[%p] plane %d is already unmapped", buffer, plane);
++ goto _VIDEO_MEMORY_UNMAP_DONE;
++ }
++
++ if (vmem->video_memory_map_count - 1 > 0) {
++ GST_DEBUG ("[%p] plane %d skip unmap surface %p", buffer, plane, vmem->surface);
++ unmapped = TRUE;
++ goto _VIDEO_MEMORY_UNMAP_DONE;
++ }
++
++ tbm_ret = tbm_surface_unmap (vmem->surface);
++ if (tbm_ret != TBM_SURFACE_ERROR_NONE) {
++ GST_ERROR ("[%p] tbm_surface_unmap %p failed, 0x%x", buffer, vmem->surface, tbm_ret);
++ goto _VIDEO_MEMORY_UNMAP_DONE;
++ }
++
++ unmapped = TRUE;
++
++ GST_DEBUG ("[%p] plane %d unmap surface %p done", buffer, plane, vmem->surface);
++
++_VIDEO_MEMORY_UNMAP_DONE:
++ if (unmapped == TRUE) {
++ vmem->video_memory_map[plane] = FALSE;
++ vmem->video_memory_map_count--;
++ }
++
++ g_mutex_unlock (&vmem->lock);
++
++ return unmapped;
++}
--- /dev/null
--- /dev/null
++/*
++ * GStreamer tizen memory
++ * Copyright (c) 2018 Samsung Electronics Co., Ltd.
++ * Author: Sejun Park <sejun79.park@samsung.com>
++ *
++ * This library is free software; you can redistribute it and/or
++ * modify it under the terms of the GNU Library General Public
++ * License as published by the Free Software Foundation; either
++ * version 2 of the License, or (at your option) any later version.
++ *
++ * This library is distributed in the hope that it will be useful,
++ * but WITHOUT ANY WARRANTY; without even the implied warranty of
++ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
++ * Library General Public License for more details.
++ *
++ * You should have received a copy of the GNU Library General Public
++ * License along with this library; if not, write to the
++ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
++ * Boston, MA 02111-1307, USA.
++ */
++#ifndef __GST_TIZEN_MEMORY_H__
++#define __GST_TIZEN_MEMORY_H__
++
++#include <gst/gst.h>
++#include <gst/allocators/allocators-prelude.h>
++#include <gst/video/video.h>
++#include <gst/video/gstvideometa.h>
++//#include <gst/gstallocator.h>
++#include <gst/gstmemory.h>
++#include <tbm_bufmgr.h>
++#include <tbm_surface.h>
++
++G_BEGIN_DECLS
++
++typedef struct _GstTizenAllocator GstTizenAllocator;
++typedef struct _GstTizenAllocatorClass GstTizenAllocatorClass;
++typedef struct _GstTizenAllocatorPrivate GstTizenAllocatorPrivate;
++
++typedef struct _GstTizenMemory GstTizenMemory;
++#define GST_TYPE_TIZEN_ALLOCATOR (gst_tizen_allocator_get_type())
++
++GST_ALLOCATORS_API
++GType gst_tizen_allocator_get_type(void);
++
++#define GST_IS_TIZEN_ALLOCATOR(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_TIZEN_ALLOCATOR))
++#define GST_IS_TIZEN_ALLOCATOR_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), GST_TYPE_TIZEN_ALLOCATOR))
++#define GST_TIZEN_ALLOCATOR_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), GST_TYPE_TIZEN_ALLOCATOR, GstTizenAllocatorClass))
++#define GST_TIZEN_ALLOCATOR(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), GST_TYPE_TIZEN_ALLOCATOR, GstTizenAllocator))
++#define GST_TIZEN_ALLOCATOR_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), GST_TYPE_TIZEN_ALLOCATOR, GstTizenAllocatorClass))
++#define GST_TIZEN_ALLOCATOR_CAST(obj) ((GstTizenAllocator *)(obj))
++
++#define GST_TIZEN_MEMORY_TYPE "TizenVideoMemory"
++
++struct _GstTizenMemory
++{
++ GstMemory mem;
++
++ tbm_surface_h surface;
++ tbm_surface_info_s surface_info;
++ GstVideoInfo *info;
++
++ /* <protected> */
++ GMutex lock;
++ gboolean video_memory_map[GST_VIDEO_MAX_PLANES];
++ gint video_memory_map_count;
++
++ /* <private> */
++ GDestroyNotify notify;
++ gpointer user_data;
++
++};
++
++struct _GstTizenAllocatorPrivate
++{
++ GList *mem_cache;
++ GstAllocator *dmabuf_alloc;
++};
++
++/**
++ * GstTizenMemoryAllocator
++ *
++ * Base class for allocators with bo memory
++ */
++struct _GstTizenAllocator
++{
++ GstAllocator parent;
++ GstTizenAllocatorPrivate *priv;
++};
++
++
++struct _GstTizenAllocatorClass
++{
++ GstAllocatorClass parent_class;
++};
++
++GST_ALLOCATORS_API
++GstAllocator * gst_tizen_allocator_new (void);
++
++GST_ALLOCATORS_API
++GstMemory * gst_tizen_allocator_alloc (GstAllocator * allocator, GstVideoInfo * vinfo);
++
++GST_ALLOCATORS_API
++GstMemory * gst_tizen_allocator_alloc_surface (GstAllocator * allocator, GstVideoInfo * vinfo,
++ tbm_surface_h surface, gpointer user_data, GDestroyNotify notify);
++
++GST_ALLOCATORS_API
++gboolean gst_is_tizen_memory (GstMemory *mem);
++
++GST_ALLOCATORS_API
++gint gst_tizen_memory_get_num_bos (GstMemory *mem);
++
++GST_ALLOCATORS_API
++void * gst_tizen_memory_get_bos (GstMemory *mem, gint index);
++
++GST_ALLOCATORS_API
++void * gst_tizen_memory_get_surface (GstMemory *mem);
++
++GST_ALLOCATORS_API
++GstMemory * gst_tizen_allocator_dmabuf_export (GstAllocator * allocator, GstMemory *tmem, int bo_idx);
++
++GST_ALLOCATORS_API
++GstTizenMemory *gst_tizen_allocator_dmabuf_import (GstAllocator * allocator, gint * fds, gint planes, gsize offsets[4], GstVideoInfo * vinfo);
++
++GST_VIDEO_API
++gboolean gst_tizen_video_meta_map (GstVideoMeta * meta, guint plane, GstMapInfo * info,
++ gpointer * data, gint * stride, GstMapFlags flags);
++
++GST_VIDEO_API
++gboolean gst_tizen_video_meta_unmap (GstVideoMeta * meta, guint plane, GstMapInfo * info);
++
++GST_ALLOCATORS_API
++tbm_format gst_video_format_to_tbm_format (GstVideoFormat format);
++
++G_END_DECLS
++
++#endif /* _GST_TIZEN_MEMORY_H_ */
--- /dev/null
- dependencies : [gst_dep],
+ gst_allocators_headers = [
+ 'allocators.h',
+ 'allocators-prelude.h',
+ 'gstfdmemory.h',
+ 'gstphysmemory.h',
+ 'gstdmabuf.h',
+ ]
++
++if tbm_dep.found()
++ gst_allocators_headers += [
++ 'gsttizenmemory.h',
++ 'gsttizenbufferpool.h',
++ ]
++endif
+ install_headers(gst_allocators_headers, subdir : 'gstreamer-1.0/gst/allocators/')
+
+ gst_allocators_sources = [ 'gstdmabuf.c', 'gstfdmemory.c', 'gstphysmemory.c']
++if tbm_dep.found()
++ gst_allocators_sources += ['gsttizenmemory.c', 'gsttizenbufferpool.c']
++endif
++
+ gstallocators = library('gstallocators-@0@'.format(api_version),
+ gst_allocators_sources,
+ c_args : gst_plugins_base_args + ['-DBUILDING_GST_ALLOCATORS'],
+ include_directories: [configinc, libsinc],
+ version : libversion,
+ soversion : soversion,
+ darwin_versions : osxversion,
+ install : true,
- dependencies : [gst_dep],
++ dependencies : [gst_dep, video_dep, tbm_dep],
+ )
+
+ pkgconfig.generate(gstallocators,
+ libraries : [gst_dep],
+ variables : pkgconfig_variables,
+ subdirs : pkgconfig_subdirs,
+ name : 'gstreamer-allocators-1.0',
+ description : 'Allocators implementation',
+ )
+
+ allocators_gen_sources = []
+ if build_gir
+ gst_gir_extra_args = gir_init_section + [ '--c-include=gst/allocators/allocators.h' ]
+ allocators_gir = gnome.generate_gir(gstallocators,
+ sources : gst_allocators_sources + gst_allocators_headers,
+ namespace : 'GstAllocators',
+ nsversion : api_version,
+ identifier_prefix : 'Gst',
+ symbol_prefix : 'gst',
+ export_packages : 'gstreamer-allocators-1.0',
+ includes : ['Gst-1.0'],
+ install : true,
+ extra_args : gst_gir_extra_args,
+ dependencies : [gst_dep]
+ )
+ allocators_gen_sources += allocators_gir
+ endif
+
+ allocators_dep = declare_dependency(link_with: gstallocators,
+ include_directories : [libsinc],
++ dependencies : [gst_dep, video_dep, tbm_dep],
+ sources : allocators_gen_sources)
+
+ meson.override_dependency('gstreamer-allocators-1.0', allocators_dep)
--- /dev/null
+ /* GStreamer
+ * Copyright (C) 2009 Igalia S.L.
+ * Author: Iago Toral Quiroga <itoral@igalia.com>
+ * Copyright (C) 2011 Mark Nauwelaerts <mark.nauwelaerts@collabora.co.uk>.
+ * Copyright (C) 2011 Nokia Corporation. All rights reserved.
+ * Contact: Stefan Kost <stefan.kost@nokia.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+ /**
+ * SECTION:gstaudiodecoder
+ * @title: GstAudioDecoder
+ * @short_description: Base class for audio decoders
+ * @see_also: #GstBaseTransform
+ *
+ * This base class is for audio decoders turning encoded data into
+ * raw audio samples.
+ *
+ * GstAudioDecoder and subclass should cooperate as follows.
+ *
+ * ## Configuration
+ *
+ * * Initially, GstAudioDecoder calls @start when the decoder element
+ * is activated, which allows subclass to perform any global setup.
+ * Base class (context) parameters can already be set according to subclass
+ * capabilities (or possibly upon receive more information in subsequent
+ * @set_format).
+ * * GstAudioDecoder calls @set_format to inform subclass of the format
+ * of input audio data that it is about to receive.
+ * While unlikely, it might be called more than once, if changing input
+ * parameters require reconfiguration.
+ * * GstAudioDecoder calls @stop at end of all processing.
+ *
+ * As of configuration stage, and throughout processing, GstAudioDecoder
+ * provides various (context) parameters, e.g. describing the format of
+ * output audio data (valid when output caps have been set) or current parsing state.
+ * Conversely, subclass can and should configure context to inform
+ * base class of its expectation w.r.t. buffer handling.
+ *
+ * ## Data processing
+ * * Base class gathers input data, and optionally allows subclass
+ * to parse this into subsequently manageable (as defined by subclass)
+ * chunks. Such chunks are subsequently referred to as 'frames',
+ * though they may or may not correspond to 1 (or more) audio format frame.
+ * * Input frame is provided to subclass' @handle_frame.
+ * * If codec processing results in decoded data, subclass should call
+ * @gst_audio_decoder_finish_frame to have decoded data pushed
+ * downstream.
+ * * Just prior to actually pushing a buffer downstream,
+ * it is passed to @pre_push. Subclass should either use this callback
+ * to arrange for additional downstream pushing or otherwise ensure such
+ * custom pushing occurs after at least a method call has finished since
+ * setting src pad caps.
+ * * During the parsing process GstAudioDecoderClass will handle both
+ * srcpad and sinkpad events. Sink events will be passed to subclass
+ * if @event callback has been provided.
+ *
+ * ## Shutdown phase
+ *
+ * * GstAudioDecoder class calls @stop to inform the subclass that data
+ * parsing will be stopped.
+ *
+ * Subclass is responsible for providing pad template caps for
+ * source and sink pads. The pads need to be named "sink" and "src". It also
+ * needs to set the fixed caps on srcpad, when the format is ensured. This
+ * is typically when base class calls subclass' @set_format function, though
+ * it might be delayed until calling @gst_audio_decoder_finish_frame.
+ *
+ * In summary, above process should have subclass concentrating on
+ * codec data processing while leaving other matters to base class,
+ * such as most notably timestamp handling. While it may exert more control
+ * in this area (see e.g. @pre_push), it is very much not recommended.
+ *
+ * In particular, base class will try to arrange for perfect output timestamps
+ * as much as possible while tracking upstream timestamps.
+ * To this end, if deviation between the next ideal expected perfect timestamp
+ * and upstream exceeds #GstAudioDecoder:tolerance, then resync to upstream
+ * occurs (which would happen always if the tolerance mechanism is disabled).
+ *
+ * In non-live pipelines, baseclass can also (configurably) arrange for
+ * output buffer aggregation which may help to redue large(r) numbers of
+ * small(er) buffers being pushed and processed downstream. Note that this
+ * feature is only available if the buffer layout is interleaved. For planar
+ * buffers, the decoder implementation is fully responsible for the output
+ * buffer size.
+ *
+ * On the other hand, it should be noted that baseclass only provides limited
+ * seeking support (upon explicit subclass request), as full-fledged support
+ * should rather be left to upstream demuxer, parser or alike. This simple
+ * approach caters for seeking and duration reporting using estimated input
+ * bitrates.
+ *
+ * Things that subclass need to take care of:
+ *
+ * * Provide pad templates
+ * * Set source pad caps when appropriate
+ * * Set user-configurable properties to sane defaults for format and
+ * implementing codec at hand, and convey some subclass capabilities and
+ * expectations in context.
+ *
+ * * Accept data in @handle_frame and provide encoded results to
+ * @gst_audio_decoder_finish_frame. If it is prepared to perform
+ * PLC, it should also accept NULL data in @handle_frame and provide for
+ * data for indicated duration.
+ *
+ */
+
+ #ifdef HAVE_CONFIG_H
+ #include "config.h"
+ #endif
+
+ #include "gstaudiodecoder.h"
+ #include "gstaudioutilsprivate.h"
+ #include <gst/pbutils/descriptions.h>
+
+ #include <string.h>
+
+ GST_DEBUG_CATEGORY (audiodecoder_debug);
+ #define GST_CAT_DEFAULT audiodecoder_debug
+
+ enum
+ {
+ LAST_SIGNAL
+ };
+
+ enum
+ {
+ PROP_0,
+ PROP_LATENCY,
+ PROP_TOLERANCE,
+ PROP_PLC,
+ PROP_MAX_ERRORS
+ };
+
+ #define DEFAULT_LATENCY 0
+ #define DEFAULT_TOLERANCE 0
+ #define DEFAULT_PLC FALSE
+ #define DEFAULT_DRAINABLE TRUE
+ #define DEFAULT_NEEDS_FORMAT FALSE
+ #define DEFAULT_MAX_ERRORS GST_AUDIO_DECODER_MAX_ERRORS
+
+ typedef struct _GstAudioDecoderContext
+ {
+ /* last negotiated input caps */
+ GstCaps *input_caps;
+
+ /* (output) audio format */
+ GstAudioInfo info;
+ GstCaps *caps;
+ gboolean output_format_changed;
+
+ /* parsing state */
+ gboolean eos;
+ gboolean sync;
+
+ gboolean had_output_data;
+ gboolean had_input_data;
+
+ /* misc */
+ gint delay;
+
+ /* output */
+ gboolean do_plc;
+ gboolean do_estimate_rate;
+ GstCaps *allocation_caps;
+ /* MT-protected (with LOCK) */
+ GstClockTime min_latency;
+ GstClockTime max_latency;
+
+ GstAllocator *allocator;
+ GstAllocationParams params;
+ } GstAudioDecoderContext;
+
+ struct _GstAudioDecoderPrivate
+ {
+ /* activation status */
+ gboolean active;
+
+ /* input base/first ts as basis for output ts */
+ GstClockTime base_ts;
+ /* input samples processed and sent downstream so far (w.r.t. base_ts) */
+ guint64 samples;
+
+ /* collected input data */
+ GstAdapter *adapter;
+ /* tracking input ts for changes */
+ GstClockTime prev_ts;
+ guint64 prev_distance;
+ /* frames obtained from input */
+ GQueue frames;
+ /* collected output data */
+ GstAdapter *adapter_out;
+ /* ts and duration for output data collected above */
+ GstClockTime out_ts, out_dur;
+ /* mark outgoing discont */
+ gboolean discont;
+
+ /* subclass gave all it could already */
+ gboolean drained;
+ /* subclass currently being forcibly drained */
+ gboolean force;
+ /* input_segment are output_segment identical */
+ gboolean in_out_segment_sync;
+ /* TRUE if we have an active set of instant rate flags */
+ gboolean decode_flags_override;
+ GstSegmentFlags decode_flags;
+
+ /* expecting the buffer with DISCONT flag */
+ gboolean expecting_discont_buf;
+
+ /* number of samples pushed out via _finish_subframe(), resets on _finish_frame() */
+ guint subframe_samples;
+
+ /* input bps estimatation */
+ /* global in bytes seen */
+ guint64 bytes_in;
+ /* global samples sent out */
+ guint64 samples_out;
+ /* bytes flushed during parsing */
+ guint sync_flush;
+ /* error count */
+ gint error_count;
+ /* max errors */
+ gint max_errors;
+
+ /* upstream stream tags (global tags are passed through as-is) */
+ GstTagList *upstream_tags;
+
+ /* subclass tags */
+ GstTagList *taglist; /* FIXME: rename to decoder_tags */
+ GstTagMergeMode decoder_tags_merge_mode;
+
+ gboolean taglist_changed; /* FIXME: rename to tags_changed */
+
+ /* whether circumstances allow output aggregation */
+ gint agg;
+
+ /* reverse playback queues */
+ /* collect input */
+ GList *gather;
+ /* to-be-decoded */
+ GList *decode;
+ /* reversed output */
+ GList *queued;
+
+ /* context storage */
+ GstAudioDecoderContext ctx;
+
+ /* properties */
+ GstClockTime latency;
+ GstClockTime tolerance;
+ gboolean plc;
+ gboolean drainable;
+ gboolean needs_format;
+
+ /* pending serialized sink events, will be sent from finish_frame() */
+ GList *pending_events;
+
+ /* flags */
+ gboolean use_default_pad_acceptcaps;
+ };
+
+ /* cached quark to avoid contention on the global quark table lock */
+ #define META_TAG_AUDIO meta_tag_audio_quark
+ static GQuark meta_tag_audio_quark;
+
+ static void gst_audio_decoder_finalize (GObject * object);
+ static void gst_audio_decoder_set_property (GObject * object,
+ guint prop_id, const GValue * value, GParamSpec * pspec);
+ static void gst_audio_decoder_get_property (GObject * object,
+ guint prop_id, GValue * value, GParamSpec * pspec);
+
+ static void gst_audio_decoder_clear_queues (GstAudioDecoder * dec);
+ static GstFlowReturn gst_audio_decoder_chain_reverse (GstAudioDecoder *
+ dec, GstBuffer * buf);
+
+ static GstStateChangeReturn gst_audio_decoder_change_state (GstElement *
+ element, GstStateChange transition);
+ static gboolean gst_audio_decoder_sink_eventfunc (GstAudioDecoder * dec,
+ GstEvent * event);
+ static gboolean gst_audio_decoder_src_eventfunc (GstAudioDecoder * dec,
+ GstEvent * event);
+ static gboolean gst_audio_decoder_sink_event (GstPad * pad, GstObject * parent,
+ GstEvent * event);
+ static gboolean gst_audio_decoder_src_event (GstPad * pad, GstObject * parent,
+ GstEvent * event);
+ static gboolean gst_audio_decoder_sink_setcaps (GstAudioDecoder * dec,
+ GstCaps * caps);
+ static GstFlowReturn gst_audio_decoder_chain (GstPad * pad, GstObject * parent,
+ GstBuffer * buf);
+ static gboolean gst_audio_decoder_src_query (GstPad * pad, GstObject * parent,
+ GstQuery * query);
+ static gboolean gst_audio_decoder_sink_query (GstPad * pad, GstObject * parent,
+ GstQuery * query);
+ static void gst_audio_decoder_reset (GstAudioDecoder * dec, gboolean full);
+
+ static gboolean gst_audio_decoder_decide_allocation_default (GstAudioDecoder *
+ dec, GstQuery * query);
+ static gboolean gst_audio_decoder_propose_allocation_default (GstAudioDecoder *
+ dec, GstQuery * query);
+ static gboolean gst_audio_decoder_negotiate_default (GstAudioDecoder * dec);
+ static gboolean gst_audio_decoder_negotiate_unlocked (GstAudioDecoder * dec);
+ static gboolean gst_audio_decoder_handle_gap (GstAudioDecoder * dec,
+ GstEvent * event);
+ static gboolean gst_audio_decoder_sink_query_default (GstAudioDecoder * dec,
+ GstQuery * query);
+ static gboolean gst_audio_decoder_src_query_default (GstAudioDecoder * dec,
+ GstQuery * query);
+
+ static gboolean gst_audio_decoder_transform_meta_default (GstAudioDecoder *
+ decoder, GstBuffer * outbuf, GstMeta * meta, GstBuffer * inbuf);
+
+ static GstFlowReturn
+ gst_audio_decoder_finish_frame_or_subframe (GstAudioDecoder * dec,
+ GstBuffer * buf, gint frames);
+
+ static GstElementClass *parent_class = NULL;
+ static gint private_offset = 0;
+
+ static void gst_audio_decoder_class_init (GstAudioDecoderClass * klass);
+ static void gst_audio_decoder_init (GstAudioDecoder * dec,
+ GstAudioDecoderClass * klass);
+
+ GType
+ gst_audio_decoder_get_type (void)
+ {
+ static gsize audio_decoder_type = 0;
+
+ if (g_once_init_enter (&audio_decoder_type)) {
+ GType _type;
+ static const GTypeInfo audio_decoder_info = {
+ sizeof (GstAudioDecoderClass),
+ NULL,
+ NULL,
+ (GClassInitFunc) gst_audio_decoder_class_init,
+ NULL,
+ NULL,
+ sizeof (GstAudioDecoder),
+ 0,
+ (GInstanceInitFunc) gst_audio_decoder_init,
+ };
+
+ _type = g_type_register_static (GST_TYPE_ELEMENT,
+ "GstAudioDecoder", &audio_decoder_info, G_TYPE_FLAG_ABSTRACT);
+
+ private_offset =
+ g_type_add_instance_private (_type, sizeof (GstAudioDecoderPrivate));
+
+ g_once_init_leave (&audio_decoder_type, _type);
+ }
+ return audio_decoder_type;
+ }
+
+ static inline GstAudioDecoderPrivate *
+ gst_audio_decoder_get_instance_private (GstAudioDecoder * self)
+ {
+ return (G_STRUCT_MEMBER_P (self, private_offset));
+ }
+
+ static void
+ gst_audio_decoder_class_init (GstAudioDecoderClass * klass)
+ {
+ GObjectClass *gobject_class;
+ GstElementClass *element_class;
+ GstAudioDecoderClass *audiodecoder_class;
+
+ gobject_class = G_OBJECT_CLASS (klass);
+ element_class = GST_ELEMENT_CLASS (klass);
+ audiodecoder_class = GST_AUDIO_DECODER_CLASS (klass);
+
+ parent_class = g_type_class_peek_parent (klass);
+
+ if (private_offset != 0)
+ g_type_class_adjust_private_offset (klass, &private_offset);
+
+ GST_DEBUG_CATEGORY_INIT (audiodecoder_debug, "audiodecoder", 0,
+ "audio decoder base class");
+
+ gobject_class->set_property = gst_audio_decoder_set_property;
+ gobject_class->get_property = gst_audio_decoder_get_property;
+ gobject_class->finalize = gst_audio_decoder_finalize;
+
+ element_class->change_state =
+ GST_DEBUG_FUNCPTR (gst_audio_decoder_change_state);
+
+ /* Properties */
+ g_object_class_install_property (gobject_class, PROP_LATENCY,
+ g_param_spec_int64 ("min-latency", "Minimum Latency",
+ "Aggregate output data to a minimum of latency time (ns)",
+ 0, G_MAXINT64, DEFAULT_LATENCY,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_TOLERANCE,
+ g_param_spec_int64 ("tolerance", "Tolerance",
+ "Perfect ts while timestamp jitter/imperfection within tolerance (ns)",
+ 0, G_MAXINT64, DEFAULT_TOLERANCE,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_PLC,
+ g_param_spec_boolean ("plc", "Packet Loss Concealment",
+ "Perform packet loss concealment (if supported)",
+ DEFAULT_PLC, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ /**
+ * GstAudioDecoder:max-errors:
+ *
+ * Maximum number of tolerated consecutive decode errors. See
+ * gst_audio_decoder_set_max_errors() for more details.
+ *
+ * Since: 1.18
+ */
+ g_object_class_install_property (gobject_class, PROP_MAX_ERRORS,
+ g_param_spec_int ("max-errors", "Max errors",
+ "Max consecutive decoder errors before returning flow error",
+ -1, G_MAXINT, DEFAULT_MAX_ERRORS,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ audiodecoder_class->sink_event =
+ GST_DEBUG_FUNCPTR (gst_audio_decoder_sink_eventfunc);
+ audiodecoder_class->src_event =
+ GST_DEBUG_FUNCPTR (gst_audio_decoder_src_eventfunc);
+ audiodecoder_class->propose_allocation =
+ GST_DEBUG_FUNCPTR (gst_audio_decoder_propose_allocation_default);
+ audiodecoder_class->decide_allocation =
+ GST_DEBUG_FUNCPTR (gst_audio_decoder_decide_allocation_default);
+ audiodecoder_class->negotiate =
+ GST_DEBUG_FUNCPTR (gst_audio_decoder_negotiate_default);
+ audiodecoder_class->sink_query =
+ GST_DEBUG_FUNCPTR (gst_audio_decoder_sink_query_default);
+ audiodecoder_class->src_query =
+ GST_DEBUG_FUNCPTR (gst_audio_decoder_src_query_default);
+ audiodecoder_class->transform_meta =
+ GST_DEBUG_FUNCPTR (gst_audio_decoder_transform_meta_default);
+
+ meta_tag_audio_quark = g_quark_from_static_string (GST_META_TAG_AUDIO_STR);
+ }
+
+ static void
+ gst_audio_decoder_init (GstAudioDecoder * dec, GstAudioDecoderClass * klass)
+ {
+ GstPadTemplate *pad_template;
+
+ GST_DEBUG_OBJECT (dec, "gst_audio_decoder_init");
+
+ dec->priv = gst_audio_decoder_get_instance_private (dec);
+
+ /* Setup sink pad */
+ pad_template =
+ gst_element_class_get_pad_template (GST_ELEMENT_CLASS (klass), "sink");
+ g_return_if_fail (pad_template != NULL);
+
+ dec->sinkpad = gst_pad_new_from_template (pad_template, "sink");
+ gst_pad_set_event_function (dec->sinkpad,
+ GST_DEBUG_FUNCPTR (gst_audio_decoder_sink_event));
+ gst_pad_set_chain_function (dec->sinkpad,
+ GST_DEBUG_FUNCPTR (gst_audio_decoder_chain));
+ gst_pad_set_query_function (dec->sinkpad,
+ GST_DEBUG_FUNCPTR (gst_audio_decoder_sink_query));
+ gst_element_add_pad (GST_ELEMENT (dec), dec->sinkpad);
+ GST_DEBUG_OBJECT (dec, "sinkpad created");
+
+ /* Setup source pad */
+ pad_template =
+ gst_element_class_get_pad_template (GST_ELEMENT_CLASS (klass), "src");
+ g_return_if_fail (pad_template != NULL);
+
+ dec->srcpad = gst_pad_new_from_template (pad_template, "src");
+ gst_pad_set_event_function (dec->srcpad,
+ GST_DEBUG_FUNCPTR (gst_audio_decoder_src_event));
+ gst_pad_set_query_function (dec->srcpad,
+ GST_DEBUG_FUNCPTR (gst_audio_decoder_src_query));
+ gst_element_add_pad (GST_ELEMENT (dec), dec->srcpad);
+ GST_DEBUG_OBJECT (dec, "srcpad created");
+
+ dec->priv->adapter = gst_adapter_new ();
+ dec->priv->adapter_out = gst_adapter_new ();
+ g_queue_init (&dec->priv->frames);
+
+ g_rec_mutex_init (&dec->stream_lock);
+
+ /* property default */
+ dec->priv->latency = DEFAULT_LATENCY;
+ dec->priv->tolerance = DEFAULT_TOLERANCE;
+ dec->priv->plc = DEFAULT_PLC;
+ dec->priv->drainable = DEFAULT_DRAINABLE;
+ dec->priv->needs_format = DEFAULT_NEEDS_FORMAT;
+ dec->priv->max_errors = GST_AUDIO_DECODER_MAX_ERRORS;
+
+ /* init state */
+ dec->priv->ctx.min_latency = 0;
+ dec->priv->ctx.max_latency = 0;
+ gst_audio_decoder_reset (dec, TRUE);
+ GST_DEBUG_OBJECT (dec, "init ok");
+ }
+
+ static void
+ gst_audio_decoder_reset (GstAudioDecoder * dec, gboolean full)
+ {
+ GST_DEBUG_OBJECT (dec, "gst_audio_decoder_reset");
+
+ GST_AUDIO_DECODER_STREAM_LOCK (dec);
+
+ if (full) {
+ dec->priv->active = FALSE;
+ GST_OBJECT_LOCK (dec);
+ dec->priv->bytes_in = 0;
+ dec->priv->samples_out = 0;
+ GST_OBJECT_UNLOCK (dec);
+ dec->priv->agg = -1;
+ dec->priv->error_count = 0;
+ gst_audio_decoder_clear_queues (dec);
+
+ if (dec->priv->taglist) {
+ gst_tag_list_unref (dec->priv->taglist);
+ dec->priv->taglist = NULL;
+ }
+ dec->priv->decoder_tags_merge_mode = GST_TAG_MERGE_KEEP_ALL;
+ if (dec->priv->upstream_tags) {
+ gst_tag_list_unref (dec->priv->upstream_tags);
+ dec->priv->upstream_tags = NULL;
+ }
+ dec->priv->taglist_changed = FALSE;
+
+ gst_segment_init (&dec->input_segment, GST_FORMAT_TIME);
+ gst_segment_init (&dec->output_segment, GST_FORMAT_TIME);
+ dec->priv->in_out_segment_sync = TRUE;
+
+ g_list_foreach (dec->priv->pending_events, (GFunc) gst_event_unref, NULL);
+ g_list_free (dec->priv->pending_events);
+ dec->priv->pending_events = NULL;
+
+ if (dec->priv->ctx.allocator)
+ gst_object_unref (dec->priv->ctx.allocator);
+
+ GST_OBJECT_LOCK (dec);
+ dec->priv->decode_flags_override = FALSE;
+ gst_caps_replace (&dec->priv->ctx.input_caps, NULL);
+ gst_caps_replace (&dec->priv->ctx.caps, NULL);
+ gst_caps_replace (&dec->priv->ctx.allocation_caps, NULL);
+
+ memset (&dec->priv->ctx, 0, sizeof (dec->priv->ctx));
+
+ gst_audio_info_init (&dec->priv->ctx.info);
+ GST_OBJECT_UNLOCK (dec);
+ dec->priv->ctx.had_output_data = FALSE;
+ dec->priv->ctx.had_input_data = FALSE;
+ }
+
+ g_queue_foreach (&dec->priv->frames, (GFunc) gst_buffer_unref, NULL);
+ g_queue_clear (&dec->priv->frames);
+ gst_adapter_clear (dec->priv->adapter);
+ gst_adapter_clear (dec->priv->adapter_out);
+ dec->priv->out_ts = GST_CLOCK_TIME_NONE;
+ dec->priv->out_dur = 0;
+ dec->priv->prev_ts = GST_CLOCK_TIME_NONE;
+ dec->priv->prev_distance = 0;
+ dec->priv->drained = TRUE;
+ dec->priv->base_ts = GST_CLOCK_TIME_NONE;
+ dec->priv->samples = 0;
+ dec->priv->discont = TRUE;
+ dec->priv->sync_flush = FALSE;
+
+ GST_AUDIO_DECODER_STREAM_UNLOCK (dec);
+ }
+
+ static void
+ gst_audio_decoder_finalize (GObject * object)
+ {
+ GstAudioDecoder *dec;
+
+ g_return_if_fail (GST_IS_AUDIO_DECODER (object));
+ dec = GST_AUDIO_DECODER (object);
+
+ if (dec->priv->adapter) {
+ g_object_unref (dec->priv->adapter);
+ }
+ if (dec->priv->adapter_out) {
+ g_object_unref (dec->priv->adapter_out);
+ }
+
+ g_rec_mutex_clear (&dec->stream_lock);
+
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+ }
+
+ static GstEvent *
+ gst_audio_decoder_create_merged_tags_event (GstAudioDecoder * dec)
+ {
+ GstTagList *merged_tags;
+
+ GST_LOG_OBJECT (dec, "upstream : %" GST_PTR_FORMAT, dec->priv->upstream_tags);
+ GST_LOG_OBJECT (dec, "decoder : %" GST_PTR_FORMAT, dec->priv->taglist);
+ GST_LOG_OBJECT (dec, "mode : %d", dec->priv->decoder_tags_merge_mode);
+
+ merged_tags =
+ gst_tag_list_merge (dec->priv->upstream_tags,
+ dec->priv->taglist, dec->priv->decoder_tags_merge_mode);
+
+ GST_DEBUG_OBJECT (dec, "merged : %" GST_PTR_FORMAT, merged_tags);
+
+ if (merged_tags == NULL)
+ return NULL;
+
+ if (gst_tag_list_is_empty (merged_tags)) {
+ gst_tag_list_unref (merged_tags);
+ return NULL;
+ }
+
+ return gst_event_new_tag (merged_tags);
+ }
+
+ static gboolean
+ gst_audio_decoder_push_event (GstAudioDecoder * dec, GstEvent * event)
+ {
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_SEGMENT:{
+ GstSegment seg;
+
+ GST_AUDIO_DECODER_STREAM_LOCK (dec);
+ gst_event_copy_segment (event, &seg);
+
+ GST_DEBUG_OBJECT (dec, "starting segment %" GST_SEGMENT_FORMAT, &seg);
+
+ dec->output_segment = seg;
+ dec->priv->in_out_segment_sync =
+ gst_segment_is_equal (&dec->input_segment, &seg);
+ GST_AUDIO_DECODER_STREAM_UNLOCK (dec);
+ break;
+ }
+ default:
+ break;
+ }
+
+ return gst_pad_push_event (dec->srcpad, event);
+ }
+
+ static gboolean
+ gst_audio_decoder_negotiate_default (GstAudioDecoder * dec)
+ {
+ GstAudioDecoderClass *klass;
+ gboolean res = TRUE;
+ GstCaps *caps;
+ GstCaps *prevcaps;
+ GstQuery *query = NULL;
+ GstAllocator *allocator;
+ GstAllocationParams params;
+
+ g_return_val_if_fail (GST_IS_AUDIO_DECODER (dec), FALSE);
+ g_return_val_if_fail (GST_AUDIO_INFO_IS_VALID (&dec->priv->ctx.info), FALSE);
+ g_return_val_if_fail (GST_IS_CAPS (dec->priv->ctx.caps), FALSE);
+
+ klass = GST_AUDIO_DECODER_GET_CLASS (dec);
+
+ caps = dec->priv->ctx.caps;
+ if (dec->priv->ctx.allocation_caps == NULL)
+ dec->priv->ctx.allocation_caps = gst_caps_ref (caps);
+
+ GST_DEBUG_OBJECT (dec, "setting src caps %" GST_PTR_FORMAT, caps);
+
+ if (dec->priv->pending_events) {
+ GList **pending_events, *l;
+
+ pending_events = &dec->priv->pending_events;
+
+ GST_DEBUG_OBJECT (dec, "Pushing pending events");
+ for (l = *pending_events; l;) {
+ GstEvent *event = GST_EVENT (l->data);
+ GList *tmp;
+
+ if (GST_EVENT_TYPE (event) < GST_EVENT_CAPS) {
+ gst_audio_decoder_push_event (dec, l->data);
+ tmp = l;
+ l = l->next;
+ *pending_events = g_list_delete_link (*pending_events, tmp);
+ } else {
+ l = l->next;
+ }
+ }
+ }
+
+ prevcaps = gst_pad_get_current_caps (dec->srcpad);
+ if (!prevcaps || !gst_caps_is_equal (prevcaps, caps))
+ res = gst_pad_set_caps (dec->srcpad, caps);
+ if (prevcaps)
+ gst_caps_unref (prevcaps);
+
+ if (!res)
+ goto done;
+ dec->priv->ctx.output_format_changed = FALSE;
+
+ query = gst_query_new_allocation (dec->priv->ctx.allocation_caps, TRUE);
+ if (!gst_pad_peer_query (dec->srcpad, query)) {
+ GST_DEBUG_OBJECT (dec, "didn't get downstream ALLOCATION hints");
+ }
+
+ g_assert (klass->decide_allocation != NULL);
+ res = klass->decide_allocation (dec, query);
+
+ GST_DEBUG_OBJECT (dec, "ALLOCATION (%d) params: %" GST_PTR_FORMAT, res,
+ query);
+
+ if (!res)
+ goto no_decide_allocation;
+
+ /* we got configuration from our peer or the decide_allocation method,
+ * parse them */
+ if (gst_query_get_n_allocation_params (query) > 0) {
+ gst_query_parse_nth_allocation_param (query, 0, &allocator, ¶ms);
+ } else {
+ allocator = NULL;
+ gst_allocation_params_init (¶ms);
+ }
+
+ if (dec->priv->ctx.allocator)
+ gst_object_unref (dec->priv->ctx.allocator);
+ dec->priv->ctx.allocator = allocator;
+ dec->priv->ctx.params = params;
+
+ done:
+
+ if (query)
+ gst_query_unref (query);
+
+ return res;
+
+ /* ERRORS */
+ no_decide_allocation:
+ {
+ GST_WARNING_OBJECT (dec, "Subclass failed to decide allocation");
+ goto done;
+ }
+ }
+
+ static gboolean
+ gst_audio_decoder_negotiate_unlocked (GstAudioDecoder * dec)
+ {
+ GstAudioDecoderClass *klass = GST_AUDIO_DECODER_GET_CLASS (dec);
+ gboolean ret = TRUE;
+
+ if (G_LIKELY (klass->negotiate))
+ ret = klass->negotiate (dec);
+
+ return ret;
+ }
+
+ /**
+ * gst_audio_decoder_negotiate:
+ * @dec: a #GstAudioDecoder
+ *
+ * Negotiate with downstream elements to currently configured #GstAudioInfo.
+ * Unmark GST_PAD_FLAG_NEED_RECONFIGURE in any case. But mark it again if
+ * negotiate fails.
+ *
+ * Returns: %TRUE if the negotiation succeeded, else %FALSE.
+ */
+ gboolean
+ gst_audio_decoder_negotiate (GstAudioDecoder * dec)
+ {
+ GstAudioDecoderClass *klass;
+ gboolean res = TRUE;
+
+ g_return_val_if_fail (GST_IS_AUDIO_DECODER (dec), FALSE);
+
+ klass = GST_AUDIO_DECODER_GET_CLASS (dec);
+
+ GST_AUDIO_DECODER_STREAM_LOCK (dec);
+ gst_pad_check_reconfigure (dec->srcpad);
+ if (klass->negotiate) {
+ res = klass->negotiate (dec);
+ if (!res)
+ gst_pad_mark_reconfigure (dec->srcpad);
+ }
+ GST_AUDIO_DECODER_STREAM_UNLOCK (dec);
+
+ return res;
+ }
+
+ /**
+ * gst_audio_decoder_set_output_format:
+ * @dec: a #GstAudioDecoder
+ * @info: #GstAudioInfo
+ *
+ * Configure output info on the srcpad of @dec.
+ *
+ * Returns: %TRUE on success.
+ **/
+ gboolean
+ gst_audio_decoder_set_output_format (GstAudioDecoder * dec,
+ const GstAudioInfo * info)
+ {
+ gboolean res = TRUE;
+ GstCaps *caps = NULL;
+
+ g_return_val_if_fail (GST_IS_AUDIO_DECODER (dec), FALSE);
+ g_return_val_if_fail (GST_AUDIO_INFO_IS_VALID (info), FALSE);
+
+ /* If the audio info can't be converted to caps,
+ * it was invalid */
+ caps = gst_audio_info_to_caps (info);
+ if (!caps) {
+ GST_WARNING_OBJECT (dec, "invalid output format");
+ return FALSE;
+ }
+
+ res = gst_audio_decoder_set_output_caps (dec, caps);
+ gst_caps_unref (caps);
+
+ return res;
+ }
+
+ /**
+ * gst_audio_decoder_set_output_caps:
+ * @dec: a #GstAudioDecoder
+ * @caps: (transfer none): (fixed) #GstCaps
+ *
+ * Configure output caps on the srcpad of @dec. Similar to
+ * gst_audio_decoder_set_output_format(), but allows subclasses to specify
+ * output caps that can't be expressed via #GstAudioInfo e.g. caps that have
+ * caps features.
+ *
+ * Returns: %TRUE on success.
+ *
+ * Since: 1.16
+ **/
+ gboolean
+ gst_audio_decoder_set_output_caps (GstAudioDecoder * dec, GstCaps * caps)
+ {
+ gboolean res = TRUE;
+ guint old_rate;
+ GstCaps *templ_caps;
+ GstAudioInfo info;
+
+ g_return_val_if_fail (GST_IS_AUDIO_DECODER (dec), FALSE);
+
+ GST_DEBUG_OBJECT (dec, "Setting srcpad caps %" GST_PTR_FORMAT, caps);
+
+ GST_AUDIO_DECODER_STREAM_LOCK (dec);
+
+ if (!gst_caps_is_fixed (caps))
+ goto refuse_caps;
+
+ /* check if caps can be parsed */
+ if (!gst_audio_info_from_caps (&info, caps))
+ goto refuse_caps;
+
+ /* Only allow caps that are a subset of the template caps */
+ templ_caps = gst_pad_get_pad_template_caps (dec->srcpad);
+ if (!gst_caps_is_subset (caps, templ_caps)) {
+ GST_WARNING_OBJECT (dec, "Requested output format %" GST_PTR_FORMAT
+ " do not match template %" GST_PTR_FORMAT, caps, templ_caps);
+ gst_caps_unref (templ_caps);
+ goto refuse_caps;
+ }
+ gst_caps_unref (templ_caps);
+
+ /* adjust ts tracking to new sample rate */
+ old_rate = GST_AUDIO_INFO_RATE (&dec->priv->ctx.info);
+ if (GST_CLOCK_TIME_IS_VALID (dec->priv->base_ts) && old_rate) {
+ dec->priv->base_ts +=
+ GST_FRAMES_TO_CLOCK_TIME (dec->priv->samples, old_rate);
+ dec->priv->samples = 0;
+ }
+
+ /* copy the GstAudioInfo */
+ GST_OBJECT_LOCK (dec);
+ dec->priv->ctx.info = info;
+ GST_OBJECT_UNLOCK (dec);
+
+ gst_caps_replace (&dec->priv->ctx.caps, caps);
+ dec->priv->ctx.output_format_changed = TRUE;
+
+ done:
+ GST_AUDIO_DECODER_STREAM_UNLOCK (dec);
+
+ return res;
+
+ /* ERRORS */
+ refuse_caps:
+ {
+ GST_WARNING_OBJECT (dec, "invalid output format");
+ res = FALSE;
+ goto done;
+ }
+ }
+
+ static gboolean
+ gst_audio_decoder_sink_setcaps (GstAudioDecoder * dec, GstCaps * caps)
+ {
+ GstAudioDecoderClass *klass;
+ gboolean res = TRUE;
+
+ klass = GST_AUDIO_DECODER_GET_CLASS (dec);
+
+ GST_DEBUG_OBJECT (dec, "caps: %" GST_PTR_FORMAT, caps);
+
+ GST_AUDIO_DECODER_STREAM_LOCK (dec);
+
+ if (dec->priv->ctx.input_caps
+ && gst_caps_is_equal (dec->priv->ctx.input_caps, caps)) {
+ GST_DEBUG_OBJECT (dec, "Caps did not change, not setting again");
+ goto done;
+ }
+
+ /* NOTE pbutils only needed here */
+ /* TODO maybe (only) upstream demuxer/parser etc should handle this ? */
+ #if 0
+ if (!dec->priv->taglist)
+ dec->priv->taglist = gst_tag_list_new ();
+ dec->priv->taglist = gst_tag_list_make_writable (dec->priv->taglist);
+ gst_pb_utils_add_codec_description_to_tag_list (dec->priv->taglist,
+ GST_TAG_AUDIO_CODEC, caps);
+ dec->priv->taglist_changed = TRUE;
+ #endif
+
+ if (klass->set_format)
+ res = klass->set_format (dec, caps);
+
+ if (res)
+ gst_caps_replace (&dec->priv->ctx.input_caps, caps);
+
+ done:
+ GST_AUDIO_DECODER_STREAM_UNLOCK (dec);
+
+ return res;
+ }
+
+ static void
+ gst_audio_decoder_setup (GstAudioDecoder * dec)
+ {
+ GstQuery *query;
+ gboolean res;
+
+ /* check if in live pipeline, then latency messing is no-no */
+ query = gst_query_new_latency ();
+ res = gst_pad_peer_query (dec->sinkpad, query);
+ if (res) {
+ gst_query_parse_latency (query, &res, NULL, NULL);
+ res = !res;
+ }
+ gst_query_unref (query);
+
+ /* normalize to bool */
+ dec->priv->agg = ! !res;
+ }
+
+ static GstFlowReturn
+ gst_audio_decoder_push_forward (GstAudioDecoder * dec, GstBuffer * buf)
+ {
+ GstAudioDecoderClass *klass;
+ GstAudioDecoderPrivate *priv;
+ GstAudioDecoderContext *ctx;
+ GstFlowReturn ret = GST_FLOW_OK;
+ GstClockTime ts;
+
+ klass = GST_AUDIO_DECODER_GET_CLASS (dec);
+ priv = dec->priv;
+ ctx = &dec->priv->ctx;
+
+ g_return_val_if_fail (ctx->info.bpf != 0, GST_FLOW_ERROR);
+
+ if (G_UNLIKELY (!buf)) {
+ g_assert_not_reached ();
+ return GST_FLOW_OK;
+ }
+
+ ctx->had_output_data = TRUE;
+ ts = GST_BUFFER_PTS (buf);
+
+ GST_LOG_OBJECT (dec,
+ "clipping buffer of size %" G_GSIZE_FORMAT " with ts %" GST_TIME_FORMAT
+ ", duration %" GST_TIME_FORMAT, gst_buffer_get_size (buf),
+ GST_TIME_ARGS (GST_BUFFER_PTS (buf)),
+ GST_TIME_ARGS (GST_BUFFER_DURATION (buf)));
+
+ /* clip buffer */
+ buf = gst_audio_buffer_clip (buf, &dec->output_segment, ctx->info.rate,
+ ctx->info.bpf);
+ if (G_UNLIKELY (!buf)) {
+ GST_DEBUG_OBJECT (dec, "no data after clipping to segment");
+ /* only check and return EOS if upstream still
+ * in the same segment and interested as such */
+ if (dec->priv->in_out_segment_sync) {
+ if (dec->output_segment.rate >= 0) {
+ if (ts >= dec->output_segment.stop)
+ ret = GST_FLOW_EOS;
+ } else if (ts < dec->output_segment.start) {
+ ret = GST_FLOW_EOS;
+ }
+ }
+ goto exit;
+ }
+
+ /* decorate */
+ if (G_UNLIKELY (priv->discont)) {
+ GST_LOG_OBJECT (dec, "marking discont");
+ GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_DISCONT);
+ priv->discont = FALSE;
+ }
+
+ /* track where we are */
+ if (G_LIKELY (GST_BUFFER_PTS_IS_VALID (buf))) {
+ /* duration should always be valid for raw audio */
+ g_assert (GST_BUFFER_DURATION_IS_VALID (buf));
+ dec->output_segment.position =
+ GST_BUFFER_PTS (buf) + GST_BUFFER_DURATION (buf);
+ }
+
+ if (klass->pre_push) {
+ /* last chance for subclass to do some dirty stuff */
+ ret = klass->pre_push (dec, &buf);
+ if (ret != GST_FLOW_OK || !buf) {
+ GST_DEBUG_OBJECT (dec, "subclass returned %s, buf %p",
+ gst_flow_get_name (ret), buf);
+ if (buf)
+ gst_buffer_unref (buf);
+ goto exit;
+ }
+ }
+
+ GST_LOG_OBJECT (dec,
+ "pushing buffer of size %" G_GSIZE_FORMAT " with ts %" GST_TIME_FORMAT
+ ", duration %" GST_TIME_FORMAT, gst_buffer_get_size (buf),
+ GST_TIME_ARGS (GST_BUFFER_PTS (buf)),
+ GST_TIME_ARGS (GST_BUFFER_DURATION (buf)));
+
+ ret = gst_pad_push (dec->srcpad, buf);
+
+ exit:
+ return ret;
+ }
+
+ /* mini aggregator combining output buffers into fewer larger ones,
+ * if so allowed/configured */
+ static GstFlowReturn
+ gst_audio_decoder_output (GstAudioDecoder * dec, GstBuffer * buf)
+ {
+ GstAudioDecoderPrivate *priv;
+ GstFlowReturn ret = GST_FLOW_OK;
+ GstBuffer *inbuf = NULL;
+
+ priv = dec->priv;
+
+ if (G_UNLIKELY (priv->agg < 0))
+ gst_audio_decoder_setup (dec);
+
+ if (G_LIKELY (buf)) {
+ GST_LOG_OBJECT (dec,
+ "output buffer of size %" G_GSIZE_FORMAT " with ts %" GST_TIME_FORMAT
+ ", duration %" GST_TIME_FORMAT, gst_buffer_get_size (buf),
+ GST_TIME_ARGS (GST_BUFFER_PTS (buf)),
+ GST_TIME_ARGS (GST_BUFFER_DURATION (buf)));
+ }
+
+ again:
+ inbuf = NULL;
+ if (priv->agg && dec->priv->latency > 0 &&
+ priv->ctx.info.layout == GST_AUDIO_LAYOUT_INTERLEAVED) {
+ gint av;
+ gboolean assemble = FALSE;
+ const GstClockTimeDiff tol = 10 * GST_MSECOND;
+ GstClockTimeDiff diff = -100 * GST_MSECOND;
+
+ av = gst_adapter_available (priv->adapter_out);
+ if (G_UNLIKELY (!buf)) {
+ /* forcibly send current */
+ assemble = TRUE;
+ GST_LOG_OBJECT (dec, "forcing fragment flush");
+ } else if (av && (!GST_BUFFER_PTS_IS_VALID (buf) ||
+ !GST_CLOCK_TIME_IS_VALID (priv->out_ts) ||
+ ((diff = GST_CLOCK_DIFF (GST_BUFFER_PTS (buf),
+ priv->out_ts + priv->out_dur)) > tol) || diff < -tol)) {
+ assemble = TRUE;
+ GST_LOG_OBJECT (dec, "buffer %d ms apart from current fragment",
+ (gint) (diff / GST_MSECOND));
+ } else {
+ /* add or start collecting */
+ if (!av) {
+ GST_LOG_OBJECT (dec, "starting new fragment");
+ priv->out_ts = GST_BUFFER_PTS (buf);
+ } else {
+ GST_LOG_OBJECT (dec, "adding to fragment");
+ }
+ gst_adapter_push (priv->adapter_out, buf);
+ priv->out_dur += GST_BUFFER_DURATION (buf);
+ av += gst_buffer_get_size (buf);
+ buf = NULL;
+ }
+ if (priv->out_dur > dec->priv->latency)
+ assemble = TRUE;
+ if (av && assemble) {
+ GST_LOG_OBJECT (dec, "assembling fragment");
+ inbuf = buf;
+ buf = gst_adapter_take_buffer (priv->adapter_out, av);
+ GST_BUFFER_PTS (buf) = priv->out_ts;
+ GST_BUFFER_DURATION (buf) = priv->out_dur;
+ priv->out_ts = GST_CLOCK_TIME_NONE;
+ priv->out_dur = 0;
+ }
+ }
+
+ if (G_LIKELY (buf)) {
+ if (dec->output_segment.rate > 0.0) {
+ ret = gst_audio_decoder_push_forward (dec, buf);
+ GST_LOG_OBJECT (dec, "buffer pushed: %s", gst_flow_get_name (ret));
+ } else {
+ ret = GST_FLOW_OK;
+ priv->queued = g_list_prepend (priv->queued, buf);
+ GST_LOG_OBJECT (dec, "buffer queued");
+ }
+
+ if (inbuf) {
+ buf = inbuf;
+ goto again;
+ }
+ }
+
+ return ret;
+ }
+
+ static void
+ send_pending_events (GstAudioDecoder * dec)
+ {
+ GstAudioDecoderPrivate *priv = dec->priv;
+ GList *pending_events, *l;
+
+ pending_events = priv->pending_events;
+ priv->pending_events = NULL;
+
+ GST_DEBUG_OBJECT (dec, "Pushing pending events");
+ for (l = pending_events; l; l = l->next)
+ gst_audio_decoder_push_event (dec, l->data);
+ g_list_free (pending_events);
+ }
+
+ /* Iterate the list of pending events, and ensure
+ * the current output segment is up to date for
+ * decoding */
+ static void
+ apply_pending_events (GstAudioDecoder * dec)
+ {
+ GstAudioDecoderPrivate *priv = dec->priv;
+ GList *l;
+
+ GST_DEBUG_OBJECT (dec, "Applying pending segments");
+ for (l = priv->pending_events; l; l = l->next) {
+ GstEvent *event = GST_EVENT (l->data);
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_SEGMENT:{
+ GstSegment seg;
+
+ GST_AUDIO_DECODER_STREAM_LOCK (dec);
+ gst_event_copy_segment (event, &seg);
+
+ GST_DEBUG_OBJECT (dec, "starting segment %" GST_SEGMENT_FORMAT, &seg);
+
+ dec->output_segment = seg;
+ dec->priv->in_out_segment_sync =
+ gst_segment_is_equal (&dec->input_segment, &seg);
+ GST_AUDIO_DECODER_STREAM_UNLOCK (dec);
+ break;
+ }
+ default:
+ break;
+ }
+ }
+ }
+
+ static GstFlowReturn
+ check_pending_reconfigure (GstAudioDecoder * dec)
+ {
+ GstFlowReturn ret = GST_FLOW_OK;
+ GstAudioDecoderContext *ctx;
+ gboolean needs_reconfigure;
+
+ ctx = &dec->priv->ctx;
+
+ needs_reconfigure = gst_pad_check_reconfigure (dec->srcpad);
+ if (G_UNLIKELY (ctx->output_format_changed ||
+ (GST_AUDIO_INFO_IS_VALID (&ctx->info)
+ && needs_reconfigure))) {
+ if (!gst_audio_decoder_negotiate_unlocked (dec)) {
+ gst_pad_mark_reconfigure (dec->srcpad);
+ if (GST_PAD_IS_FLUSHING (dec->srcpad))
+ ret = GST_FLOW_FLUSHING;
+ else
+ ret = GST_FLOW_NOT_NEGOTIATED;
+ }
+ }
+ return ret;
+ }
+
+ static gboolean
+ gst_audio_decoder_transform_meta_default (GstAudioDecoder *
+ decoder, GstBuffer * outbuf, GstMeta * meta, GstBuffer * inbuf)
+ {
+ const GstMetaInfo *info = meta->info;
+ const gchar *const *tags;
+ const gchar *const supported_tags[] = {
+ GST_META_TAG_AUDIO_STR,
+ GST_META_TAG_AUDIO_CHANNELS_STR,
+ NULL,
+ };
+
+ tags = gst_meta_api_type_get_tags (info->api);
+
+ if (!tags)
+ return TRUE;
+
+ while (*tags) {
+ if (!g_strv_contains (supported_tags, *tags))
+ return FALSE;
+ tags++;
+ }
+
+ return TRUE;
+ }
+
+ typedef struct
+ {
+ GstAudioDecoder *decoder;
+ GstBuffer *outbuf;
+ } CopyMetaData;
+
+ static gboolean
+ foreach_metadata (GstBuffer * inbuf, GstMeta ** meta, gpointer user_data)
+ {
+ CopyMetaData *data = user_data;
+ GstAudioDecoder *decoder = data->decoder;
+ GstAudioDecoderClass *klass = GST_AUDIO_DECODER_GET_CLASS (decoder);
+ GstBuffer *outbuf = data->outbuf;
+ const GstMetaInfo *info = (*meta)->info;
+ gboolean do_copy = FALSE;
+
+ if (gst_meta_api_type_has_tag (info->api, _gst_meta_tag_memory)) {
+ /* never call the transform_meta with memory specific metadata */
+ GST_DEBUG_OBJECT (decoder, "not copying memory specific metadata %s",
+ g_type_name (info->api));
+ do_copy = FALSE;
+ } else if (klass->transform_meta) {
+ do_copy = klass->transform_meta (decoder, outbuf, *meta, inbuf);
+ GST_DEBUG_OBJECT (decoder, "transformed metadata %s: copy: %d",
+ g_type_name (info->api), do_copy);
+ }
+
+ /* we only copy metadata when the subclass implemented a transform_meta
+ * function and when it returns %TRUE */
+ if (do_copy && info->transform_func) {
+ GstMetaTransformCopy copy_data = { FALSE, 0, -1 };
+ GST_DEBUG_OBJECT (decoder, "copy metadata %s", g_type_name (info->api));
+ /* simply copy then */
+ info->transform_func (outbuf, *meta, inbuf,
+ _gst_meta_transform_copy, ©_data);
+ }
+ return TRUE;
+ }
+
+ /**
+ * gst_audio_decoder_finish_subframe:
+ * @dec: a #GstAudioDecoder
+ * @buf: (transfer full) (allow-none): decoded data
+ *
+ * Collects decoded data and pushes it downstream. This function may be called
+ * multiple times for a given input frame.
+ *
+ * @buf may be NULL in which case it is assumed that the current input frame is
+ * finished. This is equivalent to calling gst_audio_decoder_finish_subframe()
+ * with a NULL buffer and frames=1 after having pushed out all decoded audio
+ * subframes using this function.
+ *
+ * When called with valid data in @buf the source pad caps must have been set
+ * already.
+ *
+ * Note that a frame received in #GstAudioDecoderClass.handle_frame() may be
+ * invalidated by a call to this function.
+ *
+ * Returns: a #GstFlowReturn that should be escalated to caller (of caller)
+ *
+ * Since: 1.16
+ */
+ GstFlowReturn
+ gst_audio_decoder_finish_subframe (GstAudioDecoder * dec, GstBuffer * buf)
+ {
+ g_return_val_if_fail (GST_IS_AUDIO_DECODER (dec), GST_FLOW_ERROR);
+
+ if (buf == NULL)
+ return gst_audio_decoder_finish_frame_or_subframe (dec, NULL, 1);
+ else
+ return gst_audio_decoder_finish_frame_or_subframe (dec, buf, 0);
+ }
+
+ /**
+ * gst_audio_decoder_finish_frame:
+ * @dec: a #GstAudioDecoder
+ * @buf: (transfer full) (allow-none): decoded data
+ * @frames: number of decoded frames represented by decoded data
+ *
+ * Collects decoded data and pushes it downstream.
+ *
+ * @buf may be NULL in which case the indicated number of frames
+ * are discarded and considered to have produced no output
+ * (e.g. lead-in or setup frames).
+ * Otherwise, source pad caps must be set when it is called with valid
+ * data in @buf.
+ *
+ * Note that a frame received in #GstAudioDecoderClass.handle_frame() may be
+ * invalidated by a call to this function.
+ *
+ * Returns: a #GstFlowReturn that should be escalated to caller (of caller)
+ */
+ GstFlowReturn
+ gst_audio_decoder_finish_frame (GstAudioDecoder * dec, GstBuffer * buf,
+ gint frames)
+ {
+ g_return_val_if_fail (GST_IS_AUDIO_DECODER (dec), GST_FLOW_ERROR);
+
+ /* no dummy calls please */
+ g_return_val_if_fail (frames != 0, GST_FLOW_ERROR);
+
+ return gst_audio_decoder_finish_frame_or_subframe (dec, buf, frames);
+ }
+
+ /* frames == 0 indicates that this is a sub-frame and further sub-frames may
+ * follow for the current input frame. */
+ static GstFlowReturn
+ gst_audio_decoder_finish_frame_or_subframe (GstAudioDecoder * dec,
+ GstBuffer * buf, gint frames)
+ {
+ GstAudioDecoderPrivate *priv;
+ GstAudioDecoderContext *ctx;
+ GstAudioDecoderClass *klass = GST_AUDIO_DECODER_GET_CLASS (dec);
+ GstAudioMeta *meta;
+ GstClockTime ts, next_ts;
+ gsize size, samples = 0;
+ GstFlowReturn ret = GST_FLOW_OK;
+ GQueue inbufs = G_QUEUE_INIT;
+ gboolean is_subframe = (frames == 0);
+ gboolean do_check_resync;
+
+ /* subclass should not hand us no data */
+ g_return_val_if_fail (buf == NULL || gst_buffer_get_size (buf) > 0,
+ GST_FLOW_ERROR);
+
+ /* if it's a subframe (frames == 0) we must have a valid buffer */
+ g_assert (!is_subframe || buf != NULL);
+
+ priv = dec->priv;
+ ctx = &dec->priv->ctx;
+ meta = buf ? gst_buffer_get_audio_meta (buf) : NULL;
+ size = buf ? gst_buffer_get_size (buf) : 0;
+ samples = buf ? (meta ? meta->samples : size / ctx->info.bpf) : 0;
+
+ /* must know the output format by now */
+ g_return_val_if_fail (buf == NULL || GST_AUDIO_INFO_IS_VALID (&ctx->info),
+ GST_FLOW_ERROR);
+
+ GST_LOG_OBJECT (dec,
+ "accepting %" G_GSIZE_FORMAT " bytes == %" G_GSIZE_FORMAT
+ " samples for %d frames", buf ? size : 0, samples, frames);
+
+ GST_AUDIO_DECODER_STREAM_LOCK (dec);
+
+ if (buf != NULL && priv->subframe_samples == 0) {
+ ret = check_pending_reconfigure (dec);
+ if (ret == GST_FLOW_FLUSHING || ret == GST_FLOW_NOT_NEGOTIATED) {
+ gst_buffer_unref (buf);
+ goto exit;
+ }
+
+ if (priv->pending_events)
+ send_pending_events (dec);
+ }
+
+ /* sanity checking */
+ if (G_LIKELY (buf && ctx->info.bpf)) {
+ if (!meta || meta->info.layout == GST_AUDIO_LAYOUT_INTERLEAVED) {
+ /* output should be whole number of sample frames */
+ if (size % ctx->info.bpf)
+ goto wrong_buffer;
+ /* output should have no additional padding */
+ if (samples != size / ctx->info.bpf)
+ goto wrong_samples;
+ } else {
+ /* can't have more samples than what the buffer fits */
+ if (samples > size / ctx->info.bpf)
+ goto wrong_samples;
+ }
+ }
+
+ /* frame and ts book-keeping */
+ if (G_UNLIKELY (frames < 0)) {
+ if (G_UNLIKELY (-frames - 1 > priv->frames.length)) {
+ GST_ELEMENT_WARNING (dec, STREAM, DECODE,
+ ("received more decoded frames %d than provided %d", frames,
+ priv->frames.length), (NULL));
+ frames = 0;
+ } else {
+ frames = priv->frames.length + frames + 1;
+ }
+ } else if (G_UNLIKELY (frames > priv->frames.length)) {
+ if (G_LIKELY (!priv->force)) {
+ GST_ELEMENT_WARNING (dec, STREAM, DECODE,
+ ("received more decoded frames %d than provided %d", frames,
+ priv->frames.length), (NULL));
+ }
+ frames = priv->frames.length;
+ }
+
+ if (G_LIKELY (priv->frames.length))
+ ts = GST_BUFFER_PTS (priv->frames.head->data);
+ else
+ ts = GST_CLOCK_TIME_NONE;
+
+ GST_DEBUG_OBJECT (dec, "leading frame ts %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (ts));
+
+ if (is_subframe && priv->frames.length == 0)
+ goto subframe_without_pending_input_frame;
+
+ /* this will be skipped in the is_subframe case because frames will be 0 */
+ while (priv->frames.length && frames) {
+ g_queue_push_tail (&inbufs, g_queue_pop_head (&priv->frames));
+ dec->priv->ctx.delay = dec->priv->frames.length;
+ frames--;
+ }
+
+ if (G_UNLIKELY (!buf))
+ goto exit;
+
+ /* lock on */
+ if (G_UNLIKELY (!GST_CLOCK_TIME_IS_VALID (priv->base_ts))) {
+ priv->base_ts = ts;
+ GST_DEBUG_OBJECT (dec, "base_ts now %" GST_TIME_FORMAT, GST_TIME_ARGS (ts));
+ }
+
+ /* still no valid ts, track the segment one */
+ if (G_UNLIKELY (!GST_CLOCK_TIME_IS_VALID (priv->base_ts)) &&
+ dec->output_segment.rate > 0.0) {
+ priv->base_ts = dec->output_segment.start;
+ }
+
+ /* only check for resync at the beginning of an input/output frame */
+ do_check_resync = !is_subframe || priv->subframe_samples == 0;
+
+ /* slightly convoluted approach caters for perfect ts if subclass desires. */
+ if (do_check_resync && GST_CLOCK_TIME_IS_VALID (ts)) {
+ if (dec->priv->tolerance > 0) {
+ GstClockTimeDiff diff;
+
+ g_assert (GST_CLOCK_TIME_IS_VALID (priv->base_ts));
+ next_ts = priv->base_ts +
+ gst_util_uint64_scale (priv->samples, GST_SECOND, ctx->info.rate);
+ GST_LOG_OBJECT (dec,
+ "buffer is %" G_GUINT64_FORMAT " samples past base_ts %"
+ GST_TIME_FORMAT ", expected ts %" GST_TIME_FORMAT, priv->samples,
+ GST_TIME_ARGS (priv->base_ts), GST_TIME_ARGS (next_ts));
+ diff = GST_CLOCK_DIFF (next_ts, ts);
+ GST_LOG_OBJECT (dec, "ts diff %d ms", (gint) (diff / GST_MSECOND));
+ /* if within tolerance,
+ * discard buffer ts and carry on producing perfect stream,
+ * otherwise resync to ts */
+ if (G_UNLIKELY (diff < (gint64) - dec->priv->tolerance ||
+ diff > (gint64) dec->priv->tolerance)) {
+ GST_DEBUG_OBJECT (dec, "base_ts resync");
+ priv->base_ts = ts;
+ priv->samples = 0;
+ }
+ } else {
+ GST_DEBUG_OBJECT (dec, "base_ts resync");
+ priv->base_ts = ts;
+ priv->samples = 0;
+ }
+ }
+
+ /* delayed one-shot stuff until confirmed data */
+ if (priv->taglist && priv->taglist_changed) {
+ GstEvent *tags_event;
+
+ tags_event = gst_audio_decoder_create_merged_tags_event (dec);
+
+ if (tags_event != NULL)
+ gst_audio_decoder_push_event (dec, tags_event);
+
+ priv->taglist_changed = FALSE;
+ }
+
+ buf = gst_buffer_make_writable (buf);
+ if (G_LIKELY (GST_CLOCK_TIME_IS_VALID (priv->base_ts))) {
+ GST_BUFFER_PTS (buf) =
+ priv->base_ts +
+ GST_FRAMES_TO_CLOCK_TIME (priv->samples, ctx->info.rate);
+ GST_BUFFER_DURATION (buf) = priv->base_ts +
+ GST_FRAMES_TO_CLOCK_TIME (priv->samples + samples, ctx->info.rate) -
+ GST_BUFFER_PTS (buf);
+ } else {
+ GST_BUFFER_PTS (buf) = GST_CLOCK_TIME_NONE;
+ GST_BUFFER_DURATION (buf) =
+ GST_FRAMES_TO_CLOCK_TIME (samples, ctx->info.rate);
+ }
+
+ if (klass->transform_meta) {
+ if (inbufs.length) {
+ GList *l;
+ for (l = inbufs.head; l; l = l->next) {
+ CopyMetaData data;
+
+ data.decoder = dec;
+ data.outbuf = buf;
+ gst_buffer_foreach_meta (l->data, foreach_metadata, &data);
+ }
+ } else if (is_subframe) {
+ CopyMetaData data;
+ GstBuffer *in_buf;
+
+ /* For subframes we assume a 1:N relationship for now, so we just take
+ * metas from the first pending input buf */
+ in_buf = g_queue_peek_head (&priv->frames);
+ data.decoder = dec;
+ data.outbuf = buf;
+ gst_buffer_foreach_meta (in_buf, foreach_metadata, &data);
+ } else {
+ GST_WARNING_OBJECT (dec,
+ "Can't copy metadata because input buffers disappeared");
+ }
+ }
+
+ GST_OBJECT_LOCK (dec);
+ priv->samples += samples;
+ priv->samples_out += samples;
+ GST_OBJECT_UNLOCK (dec);
+
+ /* we got data, so note things are looking up */
+ if (G_UNLIKELY (dec->priv->error_count))
+ dec->priv->error_count = 0;
+
+ ret = gst_audio_decoder_output (dec, buf);
+
+ exit:
+ g_queue_foreach (&inbufs, (GFunc) gst_buffer_unref, NULL);
+ g_queue_clear (&inbufs);
+
+ if (is_subframe)
+ dec->priv->subframe_samples += samples;
+ else
+ dec->priv->subframe_samples = 0;
+
+ GST_AUDIO_DECODER_STREAM_UNLOCK (dec);
+
+ return ret;
+
+ /* ERRORS */
+ wrong_buffer:
+ {
+ /* arguably more of a programming error? */
+ GST_ELEMENT_ERROR (dec, STREAM, DECODE, (NULL),
+ ("buffer size %" G_GSIZE_FORMAT " not a multiple of %d", size,
+ ctx->info.bpf));
+ gst_buffer_unref (buf);
+ ret = GST_FLOW_ERROR;
+ goto exit;
+ }
+ wrong_samples:
+ {
+ /* arguably more of a programming error? */
+ GST_ELEMENT_ERROR (dec, STREAM, DECODE, (NULL),
+ ("GstAudioMeta samples (%" G_GSIZE_FORMAT ") are inconsistent with "
+ "the buffer size and layout (size/bpf = %" G_GSIZE_FORMAT ")",
+ meta->samples, size / ctx->info.bpf));
+ gst_buffer_unref (buf);
+ ret = GST_FLOW_ERROR;
+ goto exit;
+ }
+ subframe_without_pending_input_frame:
+ {
+ /* arguably more of a programming error? */
+ GST_ELEMENT_ERROR (dec, STREAM, DECODE, (NULL),
+ ("Received decoded subframe, but no pending frame"));
+ gst_buffer_unref (buf);
+ ret = GST_FLOW_ERROR;
+ goto exit;
+ }
+ }
+
+ static GstFlowReturn
+ gst_audio_decoder_handle_frame (GstAudioDecoder * dec,
+ GstAudioDecoderClass * klass, GstBuffer * buffer)
+ {
+ /* Skip decoding and send a GAP instead if
+ * GST_SEGMENT_FLAG_TRICKMODE_NO_AUDIO is set and we have timestamps
+ * FIXME: We only do this for forward playback atm, because reverse
+ * playback would require accumulating GAP events and pushing them
+ * out in reverse order as for normal audio samples
+ */
+ if (G_UNLIKELY (dec->input_segment.rate > 0.0
+ && dec->input_segment.flags & GST_SEGMENT_FLAG_TRICKMODE_NO_AUDIO)) {
+ if (buffer) {
+ GstClockTime ts = GST_BUFFER_PTS (buffer);
+ if (GST_CLOCK_TIME_IS_VALID (ts)) {
+ GstEvent *event = gst_event_new_gap (ts, GST_BUFFER_DURATION (buffer));
+
+ gst_buffer_unref (buffer);
+ GST_LOG_OBJECT (dec, "Skipping decode in trickmode and sending gap");
+ gst_audio_decoder_handle_gap (dec, event);
+ return GST_FLOW_OK;
+ }
+ }
+ }
+
+ if (G_LIKELY (buffer)) {
+ gsize size = gst_buffer_get_size (buffer);
+ /* keep around for admin */
+ GST_LOG_OBJECT (dec,
+ "tracking frame size %" G_GSIZE_FORMAT ", ts %" GST_TIME_FORMAT, size,
+ GST_TIME_ARGS (GST_BUFFER_PTS (buffer)));
+ g_queue_push_tail (&dec->priv->frames, buffer);
+ dec->priv->ctx.delay = dec->priv->frames.length;
+ GST_OBJECT_LOCK (dec);
+ dec->priv->bytes_in += size;
+ GST_OBJECT_UNLOCK (dec);
+ } else {
+ GST_LOG_OBJECT (dec, "providing subclass with NULL frame");
+ }
+
+ return klass->handle_frame (dec, buffer);
+ }
+
+ /* maybe subclass configurable instead, but this allows for a whole lot of
+ * raw samples, so at least quite some encoded ... */
+ #define GST_AUDIO_DECODER_MAX_SYNC 10 * 8 * 2 * 1024
+
+ static GstFlowReturn
+ gst_audio_decoder_push_buffers (GstAudioDecoder * dec, gboolean force)
+ {
+ GstAudioDecoderClass *klass;
+ GstAudioDecoderPrivate *priv;
+ GstAudioDecoderContext *ctx;
+ GstFlowReturn ret = GST_FLOW_OK;
+ GstBuffer *buffer;
+ gint av, flush;
+
+ klass = GST_AUDIO_DECODER_GET_CLASS (dec);
+ priv = dec->priv;
+ ctx = &dec->priv->ctx;
+
+ g_return_val_if_fail (klass->handle_frame != NULL, GST_FLOW_ERROR);
+
+ av = gst_adapter_available (priv->adapter);
+ GST_DEBUG_OBJECT (dec, "available: %d", av);
+
+ while (ret == GST_FLOW_OK) {
+
+ flush = 0;
+ ctx->eos = force;
+
+ if (G_LIKELY (av)) {
+ gint len;
+ GstClockTime ts;
+ guint64 distance;
+
+ /* parse if needed */
+ if (klass->parse) {
+ gint offset = 0;
+
+ /* limited (legacy) parsing; avoid whole of baseparse */
+ GST_DEBUG_OBJECT (dec, "parsing available: %d", av);
+ /* piggyback sync state on discont */
+ ctx->sync = !priv->discont;
+ ret = klass->parse (dec, priv->adapter, &offset, &len);
+
+ g_assert (offset <= av);
+ if (offset) {
+ /* jumped a bit */
+ GST_DEBUG_OBJECT (dec, "skipped %d; setting DISCONT", offset);
+ gst_adapter_flush (priv->adapter, offset);
+ flush = offset;
+ /* avoid parsing indefinitely */
+ priv->sync_flush += offset;
+ if (priv->sync_flush > GST_AUDIO_DECODER_MAX_SYNC)
+ goto parse_failed;
+ }
+
+ if (ret == GST_FLOW_EOS) {
+ GST_LOG_OBJECT (dec, "no frame yet");
+ ret = GST_FLOW_OK;
+ break;
+ } else if (ret == GST_FLOW_OK) {
+ GST_LOG_OBJECT (dec, "frame at offset %d of length %d", offset, len);
+ g_assert (len);
+ g_assert (offset + len <= av);
+ priv->sync_flush = 0;
+ } else {
+ break;
+ }
+ } else {
+ len = av;
+ }
+ /* track upstream ts, but do not get stuck if nothing new upstream */
+ ts = gst_adapter_prev_pts (priv->adapter, &distance);
+ if (ts != priv->prev_ts || distance <= priv->prev_distance) {
+ priv->prev_ts = ts;
+ priv->prev_distance = distance;
+ } else {
+ GST_LOG_OBJECT (dec, "ts == prev_ts; discarding");
+ ts = GST_CLOCK_TIME_NONE;
+ }
+ buffer = gst_adapter_take_buffer (priv->adapter, len);
+ buffer = gst_buffer_make_writable (buffer);
+ GST_BUFFER_PTS (buffer) = ts;
+ flush += len;
+ priv->force = FALSE;
+ } else {
+ if (!force)
+ break;
+ if (!priv->drainable) {
+ priv->drained = TRUE;
+ break;
+ }
+ buffer = NULL;
+ priv->force = TRUE;
+ }
+
+ ret = gst_audio_decoder_handle_frame (dec, klass, buffer);
+
+ /* do not keep pushing it ... */
+ if (G_UNLIKELY (!av)) {
+ priv->drained = TRUE;
+ break;
+ }
+
+ av -= flush;
+ g_assert (av >= 0);
+ }
+
+ GST_LOG_OBJECT (dec, "done pushing to subclass");
+ return ret;
+
+ /* ERRORS */
+ parse_failed:
+ {
+ GST_ELEMENT_ERROR (dec, STREAM, DECODE, (NULL), ("failed to parse stream"));
+ return GST_FLOW_ERROR;
+ }
+ }
+
+ static GstFlowReturn
+ gst_audio_decoder_drain (GstAudioDecoder * dec)
+ {
+ GstFlowReturn ret;
+
+ if (dec->priv->drained && !dec->priv->gather)
+ return GST_FLOW_OK;
+
+ /* Apply any pending events before draining, as that
+ * may update the pending segment info */
+ apply_pending_events (dec);
+
+ /* dispatch reverse pending buffers */
+ /* chain eventually calls upon drain as well, but by that time
+ * gather list should be clear, so ok ... */
+ if (dec->output_segment.rate < 0.0 && dec->priv->gather)
+ gst_audio_decoder_chain_reverse (dec, NULL);
+ /* have subclass give all it can */
+ ret = gst_audio_decoder_push_buffers (dec, TRUE);
+ if (ret != GST_FLOW_OK) {
+ GST_WARNING_OBJECT (dec, "audio decoder push buffers failed");
+ goto drain_failed;
+ }
+ /* ensure all output sent */
+ ret = gst_audio_decoder_output (dec, NULL);
+ if (ret != GST_FLOW_OK)
+ GST_WARNING_OBJECT (dec, "audio decoder output failed");
+
+ drain_failed:
+ /* everything should be away now */
+ if (dec->priv->frames.length) {
+ /* not fatal/impossible though if subclass/codec eats stuff */
+ GST_WARNING_OBJECT (dec, "still %d frames left after draining",
+ dec->priv->frames.length);
+ g_queue_foreach (&dec->priv->frames, (GFunc) gst_buffer_unref, NULL);
+ g_queue_clear (&dec->priv->frames);
+ }
+
+ /* discard (unparsed) leftover */
+ gst_adapter_clear (dec->priv->adapter);
+ return ret;
+ }
+
+ /* hard == FLUSH, otherwise discont */
+ static GstFlowReturn
+ gst_audio_decoder_flush (GstAudioDecoder * dec, gboolean hard)
+ {
+ GstAudioDecoderClass *klass;
+ GstFlowReturn ret = GST_FLOW_OK;
+
+ klass = GST_AUDIO_DECODER_GET_CLASS (dec);
+
+ GST_LOG_OBJECT (dec, "flush hard %d", hard);
+
+ if (!hard) {
+ ret = gst_audio_decoder_drain (dec);
+ } else {
+ gst_audio_decoder_clear_queues (dec);
+ gst_segment_init (&dec->input_segment, GST_FORMAT_TIME);
+ gst_segment_init (&dec->output_segment, GST_FORMAT_TIME);
+ dec->priv->error_count = 0;
+ }
+ /* only bother subclass with flushing if known it is already alive
+ * and kicking out stuff */
+ if (klass->flush && dec->priv->samples_out > 0)
+ klass->flush (dec, hard);
+ /* and get (re)set for the sequel */
+ gst_audio_decoder_reset (dec, FALSE);
+
+ return ret;
+ }
+
+ static GstFlowReturn
+ gst_audio_decoder_chain_forward (GstAudioDecoder * dec, GstBuffer * buffer)
+ {
+ GstFlowReturn ret = GST_FLOW_OK;
+
+ /* discard silly case, though maybe ts may be of value ?? */
+ if (G_UNLIKELY (gst_buffer_get_size (buffer) == 0)) {
+ GST_DEBUG_OBJECT (dec, "discarding empty buffer");
+ gst_buffer_unref (buffer);
+ goto exit;
+ }
+
+ /* grab buffer */
+ gst_adapter_push (dec->priv->adapter, buffer);
+ buffer = NULL;
+ /* new stuff, so we can push subclass again */
+ dec->priv->drained = FALSE;
+
+ /* hand to subclass */
+ ret = gst_audio_decoder_push_buffers (dec, FALSE);
+
+ exit:
+ GST_LOG_OBJECT (dec, "chain-done");
+ return ret;
+ }
+
+ static void
+ gst_audio_decoder_clear_queues (GstAudioDecoder * dec)
+ {
+ GstAudioDecoderPrivate *priv = dec->priv;
+
+ g_list_foreach (priv->queued, (GFunc) gst_mini_object_unref, NULL);
+ g_list_free (priv->queued);
+ priv->queued = NULL;
+ g_list_foreach (priv->gather, (GFunc) gst_mini_object_unref, NULL);
+ g_list_free (priv->gather);
+ priv->gather = NULL;
+ g_list_foreach (priv->decode, (GFunc) gst_mini_object_unref, NULL);
+ g_list_free (priv->decode);
+ priv->decode = NULL;
+ }
+
+ /*
+ * Input:
+ * Buffer decoding order: 7 8 9 4 5 6 3 1 2 EOS
+ * Discont flag: D D D D
+ *
+ * - Each Discont marks a discont in the decoding order.
+ *
+ * for vorbis, each buffer is a keyframe when we have the previous
+ * buffer. This means that to decode buffer 7, we need buffer 6, which
+ * arrives out of order.
+ *
+ * we first gather buffers in the gather queue until we get a DISCONT. We
+ * prepend each incoming buffer so that they are in reversed order.
+ *
+ * gather queue: 9 8 7
+ * decode queue:
+ * output queue:
+ *
+ * When a DISCONT is received (buffer 4), we move the gather queue to the
+ * decode queue. This is simply done be taking the head of the gather queue
+ * and prepending it to the decode queue. This yields:
+ *
+ * gather queue:
+ * decode queue: 7 8 9
+ * output queue:
+ *
+ * Then we decode each buffer in the decode queue in order and put the output
+ * buffer in the output queue. The first buffer (7) will not produce any output
+ * because it needs the previous buffer (6) which did not arrive yet. This
+ * yields:
+ *
+ * gather queue:
+ * decode queue: 7 8 9
+ * output queue: 9 8
+ *
+ * Then we remove the consumed buffers from the decode queue. Buffer 7 is not
+ * completely consumed, we need to keep it around for when we receive buffer
+ * 6. This yields:
+ *
+ * gather queue:
+ * decode queue: 7
+ * output queue: 9 8
+ *
+ * Then we accumulate more buffers:
+ *
+ * gather queue: 6 5 4
+ * decode queue: 7
+ * output queue:
+ *
+ * prepending to the decode queue on DISCONT yields:
+ *
+ * gather queue:
+ * decode queue: 4 5 6 7
+ * output queue:
+ *
+ * after decoding and keeping buffer 4:
+ *
+ * gather queue:
+ * decode queue: 4
+ * output queue: 7 6 5
+ *
+ * Etc..
+ */
+ static GstFlowReturn
+ gst_audio_decoder_flush_decode (GstAudioDecoder * dec)
+ {
+ GstAudioDecoderPrivate *priv = dec->priv;
+ GstFlowReturn res = GST_FLOW_OK;
+ GstClockTime timestamp;
+ GList *walk;
+
+ walk = priv->decode;
+
+ GST_DEBUG_OBJECT (dec, "flushing buffers to decoder");
+
+ /* clear buffer and decoder state */
+ gst_audio_decoder_flush (dec, FALSE);
+
+ while (walk) {
+ GList *next;
+ GstBuffer *buf = GST_BUFFER_CAST (walk->data);
+
+ GST_DEBUG_OBJECT (dec, "decoding buffer %p, ts %" GST_TIME_FORMAT,
+ buf, GST_TIME_ARGS (GST_BUFFER_PTS (buf)));
+
+ next = g_list_next (walk);
+ /* decode buffer, resulting data prepended to output queue */
+ gst_buffer_ref (buf);
+ res = gst_audio_decoder_chain_forward (dec, buf);
+
+ /* if we generated output, we can discard the buffer, else we
+ * keep it in the queue */
+ if (priv->queued) {
+ GST_DEBUG_OBJECT (dec, "decoded buffer to %p", priv->queued->data);
+ priv->decode = g_list_delete_link (priv->decode, walk);
+ gst_buffer_unref (buf);
+ } else {
+ GST_DEBUG_OBJECT (dec, "buffer did not decode, keeping");
+ }
+ walk = next;
+ }
+
+ /* drain any aggregation (or otherwise) leftover */
+ gst_audio_decoder_drain (dec);
+
+ /* now send queued data downstream */
+ timestamp = GST_CLOCK_TIME_NONE;
+ while (priv->queued) {
+ GstBuffer *buf = GST_BUFFER_CAST (priv->queued->data);
+ GstClockTime duration;
+
+ duration = GST_BUFFER_DURATION (buf);
+
+ /* duration should always be valid for raw audio */
+ g_assert (GST_CLOCK_TIME_IS_VALID (duration));
+
+ /* interpolate (backward) if needed */
+ if (G_LIKELY (timestamp != -1)) {
+ if (timestamp > duration)
+ timestamp -= duration;
+ else
+ timestamp = 0;
+ }
+
+ if (!GST_BUFFER_PTS_IS_VALID (buf)) {
+ GST_LOG_OBJECT (dec, "applying reverse interpolated ts %"
+ GST_TIME_FORMAT, GST_TIME_ARGS (timestamp));
+ GST_BUFFER_PTS (buf) = timestamp;
+ } else {
+ /* track otherwise */
+ timestamp = GST_BUFFER_PTS (buf);
+ GST_LOG_OBJECT (dec, "tracking ts %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (timestamp));
+ }
+
+ if (G_LIKELY (res == GST_FLOW_OK)) {
+ GST_DEBUG_OBJECT (dec, "pushing buffer %p of size %" G_GSIZE_FORMAT ", "
+ "time %" GST_TIME_FORMAT ", dur %" GST_TIME_FORMAT, buf,
+ gst_buffer_get_size (buf), GST_TIME_ARGS (GST_BUFFER_PTS (buf)),
+ GST_TIME_ARGS (GST_BUFFER_DURATION (buf)));
+ /* should be already, but let's be sure */
+ buf = gst_buffer_make_writable (buf);
+ /* avoid stray DISCONT from forward processing,
+ * which have no meaning in reverse pushing */
+ GST_BUFFER_FLAG_UNSET (buf, GST_BUFFER_FLAG_DISCONT);
+ res = gst_audio_decoder_push_forward (dec, buf);
+ } else {
+ gst_buffer_unref (buf);
+ }
+
+ priv->queued = g_list_delete_link (priv->queued, priv->queued);
+ }
+
+ return res;
+ }
+
+ static GstFlowReturn
+ gst_audio_decoder_chain_reverse (GstAudioDecoder * dec, GstBuffer * buf)
+ {
+ GstAudioDecoderPrivate *priv = dec->priv;
+ GstFlowReturn result = GST_FLOW_OK;
+
+ /* if we have a discont, move buffers to the decode list */
+ if (!buf || GST_BUFFER_FLAG_IS_SET (buf, GST_BUFFER_FLAG_DISCONT)) {
+ GST_DEBUG_OBJECT (dec, "received discont");
+ while (priv->gather) {
+ GstBuffer *gbuf;
+
+ gbuf = GST_BUFFER_CAST (priv->gather->data);
+ /* remove from the gather list */
+ priv->gather = g_list_delete_link (priv->gather, priv->gather);
+ /* copy to decode queue */
+ priv->decode = g_list_prepend (priv->decode, gbuf);
+ }
+ /* decode stuff in the decode queue */
+ gst_audio_decoder_flush_decode (dec);
+ }
+
+ if (G_LIKELY (buf)) {
+ GST_DEBUG_OBJECT (dec, "gathering buffer %p of size %" G_GSIZE_FORMAT ", "
+ "time %" GST_TIME_FORMAT ", dur %" GST_TIME_FORMAT, buf,
+ gst_buffer_get_size (buf), GST_TIME_ARGS (GST_BUFFER_PTS (buf)),
+ GST_TIME_ARGS (GST_BUFFER_DURATION (buf)));
+
+ /* add buffer to gather queue */
+ priv->gather = g_list_prepend (priv->gather, buf);
+ }
+
+ return result;
+ }
+
+ static GstFlowReturn
+ gst_audio_decoder_chain (GstPad * pad, GstObject * parent, GstBuffer * buffer)
+ {
+ GstAudioDecoder *dec;
+ GstFlowReturn ret;
+
+ dec = GST_AUDIO_DECODER (parent);
+
+ GST_LOG_OBJECT (dec,
+ "received buffer of size %" G_GSIZE_FORMAT " with ts %" GST_TIME_FORMAT
+ ", duration %" GST_TIME_FORMAT, gst_buffer_get_size (buffer),
+ GST_TIME_ARGS (GST_BUFFER_PTS (buffer)),
+ GST_TIME_ARGS (GST_BUFFER_DURATION (buffer)));
+
+ GST_AUDIO_DECODER_STREAM_LOCK (dec);
+
+ if (G_UNLIKELY (dec->priv->ctx.input_caps == NULL && dec->priv->needs_format))
+ goto not_negotiated;
+
+ dec->priv->ctx.had_input_data = TRUE;
+
+ if (!dec->priv->expecting_discont_buf &&
+ GST_BUFFER_FLAG_IS_SET (buffer, GST_BUFFER_FLAG_DISCONT)) {
+ gint64 samples, ts;
+
+ /* track present position */
+ ts = dec->priv->base_ts;
+ samples = dec->priv->samples;
+
+ GST_DEBUG_OBJECT (dec, "handling discont");
+ gst_audio_decoder_flush (dec, FALSE);
+ dec->priv->discont = TRUE;
+
+ /* buffer may claim DISCONT loudly, if it can't tell us where we are now,
+ * we'll stick to where we were ...
+ * Particularly useful/needed for upstream BYTE based */
+ if (dec->input_segment.rate > 0.0 && !GST_BUFFER_PTS_IS_VALID (buffer)) {
+ GST_DEBUG_OBJECT (dec, "... but restoring previous ts tracking");
+ dec->priv->base_ts = ts;
+ dec->priv->samples = samples;
+ }
+ }
+ dec->priv->expecting_discont_buf = FALSE;
+
+ if (dec->input_segment.rate > 0.0)
+ ret = gst_audio_decoder_chain_forward (dec, buffer);
+ else
+ ret = gst_audio_decoder_chain_reverse (dec, buffer);
+
+ GST_AUDIO_DECODER_STREAM_UNLOCK (dec);
+
+ return ret;
+
+ /* ERRORS */
+ not_negotiated:
+ {
+ GST_AUDIO_DECODER_STREAM_UNLOCK (dec);
+ GST_ELEMENT_ERROR (dec, CORE, NEGOTIATION, (NULL),
+ ("decoder not initialized"));
+ gst_buffer_unref (buffer);
+ return GST_FLOW_NOT_NEGOTIATED;
+ }
+ }
+
+ /* perform upstream byte <-> time conversion (duration, seeking)
+ * if subclass allows and if enough data for moderately decent conversion */
+ static inline gboolean
+ gst_audio_decoder_do_byte (GstAudioDecoder * dec)
+ {
+ gboolean ret;
+
+ GST_OBJECT_LOCK (dec);
+ ret = dec->priv->ctx.do_estimate_rate && dec->priv->ctx.info.bpf &&
+ dec->priv->ctx.info.rate <= dec->priv->samples_out;
+ GST_OBJECT_UNLOCK (dec);
+
+ return ret;
+ }
+
+ /* Must be called holding the GST_AUDIO_DECODER_STREAM_LOCK */
+ static gboolean
+ gst_audio_decoder_negotiate_default_caps (GstAudioDecoder * dec)
+ {
+ GstCaps *caps, *templcaps;
+ gint i;
+ gint channels = 0;
+ gint rate;
+ guint64 channel_mask = 0;
+ gint caps_size;
+ GstStructure *structure;
+ GstAudioInfo info;
+
+ templcaps = gst_pad_get_pad_template_caps (dec->srcpad);
+ caps = gst_pad_peer_query_caps (dec->srcpad, templcaps);
+ if (caps)
+ gst_caps_unref (templcaps);
+ else
+ caps = templcaps;
+ templcaps = NULL;
+
+ if (!caps || gst_caps_is_empty (caps) || gst_caps_is_any (caps))
+ goto caps_error;
+
+ GST_LOG_OBJECT (dec, "peer caps %" GST_PTR_FORMAT, caps);
+
+ /* before fixating, try to use whatever upstream provided */
+ caps = gst_caps_make_writable (caps);
+ caps_size = gst_caps_get_size (caps);
+ if (dec->priv->ctx.input_caps) {
+ GstCaps *sinkcaps = dec->priv->ctx.input_caps;
+ GstStructure *structure = gst_caps_get_structure (sinkcaps, 0);
+
+ if (gst_structure_get_int (structure, "rate", &rate)) {
+ for (i = 0; i < caps_size; i++) {
+ gst_structure_set (gst_caps_get_structure (caps, i), "rate",
+ G_TYPE_INT, rate, NULL);
+ }
+ }
+
+ if (gst_structure_get_int (structure, "channels", &channels)) {
+ for (i = 0; i < caps_size; i++) {
+ gst_structure_set (gst_caps_get_structure (caps, i), "channels",
+ G_TYPE_INT, channels, NULL);
+ }
+ }
+
+ if (gst_structure_get (structure, "channel-mask", GST_TYPE_BITMASK,
+ &channel_mask, NULL)) {
+ for (i = 0; i < caps_size; i++) {
+ gst_structure_set (gst_caps_get_structure (caps, i), "channel-mask",
+ GST_TYPE_BITMASK, channel_mask, NULL);
+ }
+ }
+ }
+
+ for (i = 0; i < caps_size; i++) {
+ structure = gst_caps_get_structure (caps, i);
+ if (gst_structure_has_field (structure, "channels"))
+ gst_structure_fixate_field_nearest_int (structure,
+ "channels", GST_AUDIO_DEF_CHANNELS);
+ else
+ gst_structure_set (structure, "channels", G_TYPE_INT,
+ GST_AUDIO_DEF_CHANNELS, NULL);
+ if (gst_structure_has_field (structure, "rate"))
+ gst_structure_fixate_field_nearest_int (structure,
+ "rate", GST_AUDIO_DEF_RATE);
+ else
+ gst_structure_set (structure, "rate", G_TYPE_INT, GST_AUDIO_DEF_RATE,
+ NULL);
+ }
+ caps = gst_caps_fixate (caps);
+ structure = gst_caps_get_structure (caps, 0);
+
+ /* Need to add a channel-mask if channels > 2 */
+ gst_structure_get_int (structure, "channels", &channels);
+ if (channels > 2 && !gst_structure_has_field (structure, "channel-mask")) {
+ channel_mask = gst_audio_channel_get_fallback_mask (channels);
+ if (channel_mask != 0) {
+ gst_structure_set (structure, "channel-mask",
+ GST_TYPE_BITMASK, channel_mask, NULL);
+ } else {
+ GST_WARNING_OBJECT (dec, "No default channel-mask for %d channels",
+ channels);
+ }
+ }
+
+ if (!caps || !gst_audio_info_from_caps (&info, caps))
+ goto caps_error;
+
+ GST_OBJECT_LOCK (dec);
+ dec->priv->ctx.info = info;
+ dec->priv->ctx.caps = caps;
+ GST_OBJECT_UNLOCK (dec);
+
+ GST_INFO_OBJECT (dec,
+ "Chose default caps %" GST_PTR_FORMAT " for initial gap", caps);
+
+ return TRUE;
+
+ caps_error:
+ {
+ if (caps)
+ gst_caps_unref (caps);
+ return FALSE;
+ }
+ }
+
+ static gboolean
+ gst_audio_decoder_handle_gap (GstAudioDecoder * dec, GstEvent * event)
+ {
+ gboolean ret;
+ GstClockTime timestamp, duration;
+ gboolean needs_reconfigure = FALSE;
+
+ /* Ensure we have caps first */
+ GST_AUDIO_DECODER_STREAM_LOCK (dec);
+ if (!GST_AUDIO_INFO_IS_VALID (&dec->priv->ctx.info)) {
+ if (!gst_audio_decoder_negotiate_default_caps (dec)) {
+ GST_AUDIO_DECODER_STREAM_UNLOCK (dec);
+ GST_ELEMENT_ERROR (dec, STREAM, FORMAT, (NULL),
+ ("Decoder output not negotiated before GAP event."));
+ gst_event_unref (event);
+ return FALSE;
+ }
+ needs_reconfigure = TRUE;
+ }
+ needs_reconfigure = gst_pad_check_reconfigure (dec->srcpad)
+ || needs_reconfigure;
+ if (G_UNLIKELY (dec->priv->ctx.output_format_changed || needs_reconfigure)) {
+ if (!gst_audio_decoder_negotiate_unlocked (dec)) {
+ GST_WARNING_OBJECT (dec, "Failed to negotiate with downstream");
+ gst_pad_mark_reconfigure (dec->srcpad);
+ }
+ }
+ GST_AUDIO_DECODER_STREAM_UNLOCK (dec);
+
+ gst_event_parse_gap (event, ×tamp, &duration);
+
+ /* time progressed without data, see if we can fill the gap with
+ * some concealment data */
+ GST_DEBUG_OBJECT (dec,
+ "gap event: plc %d, do_plc %d, position %" GST_TIME_FORMAT
+ " duration %" GST_TIME_FORMAT,
+ dec->priv->plc, dec->priv->ctx.do_plc,
+ GST_TIME_ARGS (timestamp), GST_TIME_ARGS (duration));
+
+ if (dec->priv->plc && dec->priv->ctx.do_plc && dec->input_segment.rate > 0.0) {
+ GstAudioDecoderClass *klass = GST_AUDIO_DECODER_GET_CLASS (dec);
+ GstBuffer *buf;
+
+ /* hand subclass empty frame with duration that needs covering */
+ buf = gst_buffer_new ();
+ GST_BUFFER_PTS (buf) = timestamp;
+ GST_BUFFER_DURATION (buf) = duration;
+ /* best effort, not much error handling */
+ gst_audio_decoder_handle_frame (dec, klass, buf);
+ ret = TRUE;
+ dec->priv->expecting_discont_buf = TRUE;
+ gst_event_unref (event);
+ } else {
+ GstFlowReturn flowret;
+
+ /* sub-class doesn't know how to handle empty buffers,
+ * so just try sending GAP downstream */
+ flowret = check_pending_reconfigure (dec);
+ if (flowret == GST_FLOW_OK) {
+ send_pending_events (dec);
+ ret = gst_audio_decoder_push_event (dec, event);
+ } else {
+ ret = FALSE;
+ gst_event_unref (event);
+ }
+ }
+ return ret;
+ }
+
+ static GList *
+ _flush_events (GstPad * pad, GList * events)
+ {
+ GList *tmp;
+
+ for (tmp = events; tmp; tmp = tmp->next) {
+ if (GST_EVENT_TYPE (tmp->data) != GST_EVENT_EOS &&
+ GST_EVENT_TYPE (tmp->data) != GST_EVENT_SEGMENT &&
+ GST_EVENT_IS_STICKY (tmp->data)) {
+ gst_pad_store_sticky_event (pad, GST_EVENT_CAST (tmp->data));
+ }
+ gst_event_unref (tmp->data);
+ }
+ g_list_free (events);
+
+ return NULL;
+ }
+
+ static gboolean
+ gst_audio_decoder_sink_eventfunc (GstAudioDecoder * dec, GstEvent * event)
+ {
+ gboolean ret;
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_STREAM_START:
+ GST_AUDIO_DECODER_STREAM_LOCK (dec);
+ /* finish any data in current segment and clear the decoder
+ * to be ready for new stream data */
+ gst_audio_decoder_drain (dec);
+ gst_audio_decoder_flush (dec, FALSE);
+
+ GST_DEBUG_OBJECT (dec, "received STREAM_START. Clearing taglist");
+ /* Flush upstream tags after a STREAM_START */
+ if (dec->priv->upstream_tags) {
+ gst_tag_list_unref (dec->priv->upstream_tags);
+ dec->priv->upstream_tags = NULL;
+ dec->priv->taglist_changed = TRUE;
+ }
+ GST_AUDIO_DECODER_STREAM_UNLOCK (dec);
+
+ ret = gst_audio_decoder_push_event (dec, event);
+ break;
+ case GST_EVENT_SEGMENT:
+ {
+ GstSegment seg;
+ GstFormat format;
+
+ GST_AUDIO_DECODER_STREAM_LOCK (dec);
+ gst_event_copy_segment (event, &seg);
+
+ format = seg.format;
+ if (format == GST_FORMAT_TIME) {
+ GST_DEBUG_OBJECT (dec, "received TIME SEGMENT %" GST_SEGMENT_FORMAT,
+ &seg);
+ } else {
+ gint64 nstart;
+ GST_DEBUG_OBJECT (dec, "received SEGMENT %" GST_SEGMENT_FORMAT, &seg);
+ /* handle newsegment resulting from legacy simple seeking */
+ /* note that we need to convert this whether or not enough data
+ * to handle initial newsegment */
+ if (dec->priv->ctx.do_estimate_rate &&
+ gst_pad_query_convert (dec->sinkpad, GST_FORMAT_BYTES, seg.start,
+ GST_FORMAT_TIME, &nstart)) {
+ /* best attempt convert */
+ /* as these are only estimates, stop is kept open-ended to avoid
+ * premature cutting */
+ GST_DEBUG_OBJECT (dec, "converted to TIME start %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (nstart));
+ seg.format = GST_FORMAT_TIME;
+ seg.start = nstart;
+ seg.time = nstart;
+ seg.stop = GST_CLOCK_TIME_NONE;
+ /* replace event */
+ gst_event_unref (event);
+ event = gst_event_new_segment (&seg);
+ } else {
+ GST_DEBUG_OBJECT (dec, "unsupported format; ignoring");
+ GST_AUDIO_DECODER_STREAM_UNLOCK (dec);
++#ifdef TIZEN_FEATURE_AUDIODECODER_MODIFICATION
++ goto newseg_wrong_format;
++#else
+ gst_event_unref (event);
+ ret = FALSE;
++#endif
+ break;
+ }
+ }
+
+ /* prepare for next segment */
+ /* Use the segment start as a base timestamp
+ * in case upstream does not come up with anything better
+ * (e.g. upstream BYTE) */
+ if (format != GST_FORMAT_TIME) {
+ dec->priv->base_ts = seg.start;
+ dec->priv->samples = 0;
+ }
+
+ /* Update the decode flags in the segment if we have an instant-rate
+ * override active */
+ GST_OBJECT_LOCK (dec);
+ if (dec->priv->decode_flags_override) {
+ seg.flags &= ~GST_SEGMENT_INSTANT_FLAGS;
+ seg.flags |= dec->priv->decode_flags & GST_SEGMENT_INSTANT_FLAGS;
+ }
+
+ /* and follow along with segment */
+ dec->priv->in_out_segment_sync = FALSE;
+ dec->input_segment = seg;
+ GST_OBJECT_UNLOCK (dec);
+
+ dec->priv->pending_events =
+ g_list_append (dec->priv->pending_events, event);
+ GST_AUDIO_DECODER_STREAM_UNLOCK (dec);
+
+ ret = TRUE;
+ break;
+ }
+ case GST_EVENT_INSTANT_RATE_CHANGE:
+ {
+ GstSegmentFlags flags;
+ GstSegment *seg;
+
+ gst_event_parse_instant_rate_change (event, NULL, &flags);
+
+ GST_OBJECT_LOCK (dec);
+ dec->priv->decode_flags_override = TRUE;
+ dec->priv->decode_flags = flags;
+
+ /* Update the input segment flags */
+ seg = &dec->input_segment;
+ seg->flags &= ~GST_SEGMENT_INSTANT_FLAGS;
+ seg->flags |= dec->priv->decode_flags & GST_SEGMENT_INSTANT_FLAGS;
+ GST_OBJECT_UNLOCK (dec);
+
+ /* Forward downstream */
+ ret = gst_pad_event_default (dec->sinkpad, GST_OBJECT_CAST (dec), event);
+ break;
+ }
+ case GST_EVENT_GAP:
+ ret = gst_audio_decoder_handle_gap (dec, event);
+ break;
+ case GST_EVENT_FLUSH_STOP:
+ GST_AUDIO_DECODER_STREAM_LOCK (dec);
+ /* prepare for fresh start */
+ gst_audio_decoder_flush (dec, TRUE);
+
+ dec->priv->pending_events = _flush_events (dec->srcpad,
+ dec->priv->pending_events);
+ GST_AUDIO_DECODER_STREAM_UNLOCK (dec);
+
+ /* Forward FLUSH_STOP, it is expected to be forwarded immediately
+ * and no buffers are queued anyway. */
+ ret = gst_audio_decoder_push_event (dec, event);
+ break;
+
+ case GST_EVENT_SEGMENT_DONE:
+ GST_AUDIO_DECODER_STREAM_LOCK (dec);
+ gst_audio_decoder_drain (dec);
+ GST_AUDIO_DECODER_STREAM_UNLOCK (dec);
+
+ /* Forward SEGMENT_DONE because no buffer or serialized event might come after
+ * SEGMENT_DONE and nothing could trigger another _finish_frame() call. */
+ if (dec->priv->pending_events)
+ send_pending_events (dec);
+ ret = gst_audio_decoder_push_event (dec, event);
+ break;
+
+ case GST_EVENT_EOS:
+ GST_AUDIO_DECODER_STREAM_LOCK (dec);
+ gst_audio_decoder_drain (dec);
+ GST_AUDIO_DECODER_STREAM_UNLOCK (dec);
+
+ if (dec->priv->ctx.had_input_data && !dec->priv->ctx.had_output_data) {
+ GST_ELEMENT_ERROR (dec, STREAM, DECODE,
+ ("No valid frames decoded before end of stream"),
+ ("no valid frames found"));
+ }
+
+ /* Forward EOS because no buffer or serialized event will come after
+ * EOS and nothing could trigger another _finish_frame() call. */
+ if (dec->priv->pending_events)
+ send_pending_events (dec);
+ ret = gst_audio_decoder_push_event (dec, event);
+ break;
+
+ case GST_EVENT_CAPS:
+ {
+ GstCaps *caps;
+
+ gst_event_parse_caps (event, &caps);
+ ret = gst_audio_decoder_sink_setcaps (dec, caps);
+ gst_event_unref (event);
+ break;
+ }
+ case GST_EVENT_TAG:
+ {
+ GstTagList *tags;
+
+ gst_event_parse_tag (event, &tags);
+
+ if (gst_tag_list_get_scope (tags) == GST_TAG_SCOPE_STREAM) {
+ GST_AUDIO_DECODER_STREAM_LOCK (dec);
+ if (dec->priv->upstream_tags != tags) {
+ if (dec->priv->upstream_tags)
+ gst_tag_list_unref (dec->priv->upstream_tags);
+ dec->priv->upstream_tags = gst_tag_list_ref (tags);
+ GST_INFO_OBJECT (dec, "upstream stream tags: %" GST_PTR_FORMAT, tags);
+ }
+ gst_event_unref (event);
+ event = gst_audio_decoder_create_merged_tags_event (dec);
+ dec->priv->taglist_changed = FALSE;
+ GST_AUDIO_DECODER_STREAM_UNLOCK (dec);
+
+ /* No tags, go out of here instead of fall through */
+ if (!event) {
+ ret = TRUE;
+ break;
+ }
+ }
+
+ /* fall through */
+ }
+ default:
+ if (!GST_EVENT_IS_SERIALIZED (event)) {
+ ret =
+ gst_pad_event_default (dec->sinkpad, GST_OBJECT_CAST (dec), event);
+ } else {
+ GST_DEBUG_OBJECT (dec, "Enqueuing event %d, %s", GST_EVENT_TYPE (event),
+ GST_EVENT_TYPE_NAME (event));
+ GST_AUDIO_DECODER_STREAM_LOCK (dec);
+ dec->priv->pending_events =
+ g_list_append (dec->priv->pending_events, event);
+ GST_AUDIO_DECODER_STREAM_UNLOCK (dec);
+ ret = TRUE;
+ }
+ break;
+ }
+ return ret;
++
++#ifdef TIZEN_FEATURE_AUDIODECODER_MODIFICATION
++newseg_wrong_format:
++ {
++ GST_DEBUG_OBJECT (dec, "received non TIME newsegment");
++ gst_event_unref (event);
++ /* SWALLOW EVENT */
++ return TRUE;
++ }
++#endif
+ }
+
+ static gboolean
+ gst_audio_decoder_sink_event (GstPad * pad, GstObject * parent,
+ GstEvent * event)
+ {
+ GstAudioDecoder *dec;
+ GstAudioDecoderClass *klass;
+ gboolean ret;
+
+ dec = GST_AUDIO_DECODER (parent);
+ klass = GST_AUDIO_DECODER_GET_CLASS (dec);
+
+ GST_DEBUG_OBJECT (dec, "received event %d, %s", GST_EVENT_TYPE (event),
+ GST_EVENT_TYPE_NAME (event));
+
+ if (klass->sink_event)
+ ret = klass->sink_event (dec, event);
+ else {
+ gst_event_unref (event);
+ ret = FALSE;
+ }
+ return ret;
+ }
+
+ static gboolean
+ gst_audio_decoder_do_seek (GstAudioDecoder * dec, GstEvent * event)
+ {
+ GstSeekFlags flags;
+ GstSeekType start_type, end_type;
+ GstFormat format;
+ gdouble rate;
+ gint64 start, start_time, end_time;
+ GstSegment seek_segment;
+ guint32 seqnum;
+
+ gst_event_parse_seek (event, &rate, &format, &flags, &start_type,
+ &start_time, &end_type, &end_time);
+
+ /* we'll handle plain open-ended flushing seeks with the simple approach */
+ if (rate != 1.0) {
+ GST_DEBUG_OBJECT (dec, "unsupported seek: rate");
+ return FALSE;
+ }
+
+ if (start_type != GST_SEEK_TYPE_SET) {
+ GST_DEBUG_OBJECT (dec, "unsupported seek: start time");
+ return FALSE;
+ }
+
+ if ((end_type != GST_SEEK_TYPE_SET && end_type != GST_SEEK_TYPE_NONE) ||
+ (end_type == GST_SEEK_TYPE_SET && end_time != GST_CLOCK_TIME_NONE)) {
+ GST_DEBUG_OBJECT (dec, "unsupported seek: end time");
+ return FALSE;
+ }
+
+ if (!(flags & GST_SEEK_FLAG_FLUSH)) {
+ GST_DEBUG_OBJECT (dec, "unsupported seek: not flushing");
+ return FALSE;
+ }
+
+ memcpy (&seek_segment, &dec->output_segment, sizeof (seek_segment));
+ gst_segment_do_seek (&seek_segment, rate, format, flags, start_type,
+ start_time, end_type, end_time, NULL);
+ start_time = seek_segment.position;
+
+ if (!gst_pad_query_convert (dec->sinkpad, GST_FORMAT_TIME, start_time,
+ GST_FORMAT_BYTES, &start)) {
+ GST_DEBUG_OBJECT (dec, "conversion failed");
+ return FALSE;
+ }
+
+ seqnum = gst_event_get_seqnum (event);
+ event = gst_event_new_seek (1.0, GST_FORMAT_BYTES, flags,
+ GST_SEEK_TYPE_SET, start, GST_SEEK_TYPE_NONE, -1);
+ gst_event_set_seqnum (event, seqnum);
+
+ GST_DEBUG_OBJECT (dec, "seeking to %" GST_TIME_FORMAT " at byte offset %"
+ G_GINT64_FORMAT, GST_TIME_ARGS (start_time), start);
+
+ return gst_pad_push_event (dec->sinkpad, event);
+ }
+
+ static gboolean
+ gst_audio_decoder_src_eventfunc (GstAudioDecoder * dec, GstEvent * event)
+ {
+ gboolean res;
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_SEEK:
+ {
+ GstFormat format;
+ gdouble rate;
+ GstSeekFlags flags;
+ GstSeekType start_type, stop_type;
+ gint64 start, stop;
+ gint64 tstart, tstop;
+ guint32 seqnum;
+
+ gst_event_parse_seek (event, &rate, &format, &flags, &start_type, &start,
+ &stop_type, &stop);
+ seqnum = gst_event_get_seqnum (event);
+
+ /* upstream gets a chance first */
+ if ((res = gst_pad_push_event (dec->sinkpad, event)))
+ break;
+
+ /* if upstream fails for a time seek, maybe we can help if allowed */
+ if (format == GST_FORMAT_TIME) {
+ if (gst_audio_decoder_do_byte (dec))
+ res = gst_audio_decoder_do_seek (dec, event);
+ break;
+ }
+
+ /* ... though a non-time seek can be aided as well */
+ /* First bring the requested format to time */
+ if (!(res =
+ gst_pad_query_convert (dec->srcpad, format, start,
+ GST_FORMAT_TIME, &tstart)))
+ goto convert_error;
+ if (!(res =
+ gst_pad_query_convert (dec->srcpad, format, stop, GST_FORMAT_TIME,
+ &tstop)))
+ goto convert_error;
+
+ /* then seek with time on the peer */
+ event = gst_event_new_seek (rate, GST_FORMAT_TIME,
+ flags, start_type, tstart, stop_type, tstop);
+ gst_event_set_seqnum (event, seqnum);
+
+ res = gst_pad_push_event (dec->sinkpad, event);
+ break;
+ }
+ default:
+ res = gst_pad_event_default (dec->srcpad, GST_OBJECT_CAST (dec), event);
+ break;
+ }
+ done:
+ return res;
+
+ /* ERRORS */
+ convert_error:
+ {
+ GST_DEBUG_OBJECT (dec, "cannot convert start/stop for seek");
+ goto done;
+ }
+ }
+
+ static gboolean
+ gst_audio_decoder_src_event (GstPad * pad, GstObject * parent, GstEvent * event)
+ {
+ GstAudioDecoder *dec;
+ GstAudioDecoderClass *klass;
+ gboolean ret;
+
+ dec = GST_AUDIO_DECODER (parent);
+ klass = GST_AUDIO_DECODER_GET_CLASS (dec);
+
+ GST_DEBUG_OBJECT (dec, "received event %d, %s", GST_EVENT_TYPE (event),
+ GST_EVENT_TYPE_NAME (event));
+
+ if (klass->src_event)
+ ret = klass->src_event (dec, event);
+ else {
+ gst_event_unref (event);
+ ret = FALSE;
+ }
+
+ return ret;
+ }
+
+ static gboolean
+ gst_audio_decoder_decide_allocation_default (GstAudioDecoder * dec,
+ GstQuery * query)
+ {
+ GstAllocator *allocator = NULL;
+ GstAllocationParams params;
+ gboolean update_allocator;
+
+ /* we got configuration from our peer or the decide_allocation method,
+ * parse them */
+ if (gst_query_get_n_allocation_params (query) > 0) {
+ /* try the allocator */
+ gst_query_parse_nth_allocation_param (query, 0, &allocator, ¶ms);
+ update_allocator = TRUE;
+ } else {
+ allocator = NULL;
+ gst_allocation_params_init (¶ms);
+ update_allocator = FALSE;
+ }
+
+ if (update_allocator)
+ gst_query_set_nth_allocation_param (query, 0, allocator, ¶ms);
+ else
+ gst_query_add_allocation_param (query, allocator, ¶ms);
+ if (allocator)
+ gst_object_unref (allocator);
+
+ return TRUE;
+ }
+
+ static gboolean
+ gst_audio_decoder_propose_allocation_default (GstAudioDecoder * dec,
+ GstQuery * query)
+ {
+ return TRUE;
+ }
+
+ /**
+ * gst_audio_decoder_proxy_getcaps:
+ * @decoder: a #GstAudioDecoder
+ * @caps: (allow-none): initial caps
+ * @filter: (allow-none): filter caps
+ *
+ * Returns caps that express @caps (or sink template caps if @caps == NULL)
+ * restricted to rate/channels/... combinations supported by downstream
+ * elements.
+ *
+ * Returns: (transfer full): a #GstCaps owned by caller
+ *
+ * Since: 1.6
+ */
+ GstCaps *
+ gst_audio_decoder_proxy_getcaps (GstAudioDecoder * decoder, GstCaps * caps,
+ GstCaps * filter)
+ {
+ return __gst_audio_element_proxy_getcaps (GST_ELEMENT_CAST (decoder),
+ GST_AUDIO_DECODER_SINK_PAD (decoder),
+ GST_AUDIO_DECODER_SRC_PAD (decoder), caps, filter);
+ }
+
+ static GstCaps *
+ gst_audio_decoder_sink_getcaps (GstAudioDecoder * decoder, GstCaps * filter)
+ {
+ GstAudioDecoderClass *klass;
+ GstCaps *caps;
+
+ klass = GST_AUDIO_DECODER_GET_CLASS (decoder);
+
+ if (klass->getcaps)
+ caps = klass->getcaps (decoder, filter);
+ else
+ caps = gst_audio_decoder_proxy_getcaps (decoder, NULL, filter);
+
+ GST_LOG_OBJECT (decoder, "Returning caps %" GST_PTR_FORMAT, caps);
+
+ return caps;
+ }
+
+ static gboolean
+ gst_audio_decoder_sink_query_default (GstAudioDecoder * dec, GstQuery * query)
+ {
+ GstPad *pad = GST_AUDIO_DECODER_SINK_PAD (dec);
+ gboolean res = FALSE;
+
+ GST_LOG_OBJECT (dec, "handling query: %" GST_PTR_FORMAT, query);
+
+ switch (GST_QUERY_TYPE (query)) {
+ case GST_QUERY_FORMATS:
+ {
+ gst_query_set_formats (query, 2, GST_FORMAT_TIME, GST_FORMAT_BYTES);
+ res = TRUE;
+ break;
+ }
+ case GST_QUERY_CONVERT:
+ {
+ GstFormat src_fmt, dest_fmt;
+ gint64 src_val, dest_val;
+
+ gst_query_parse_convert (query, &src_fmt, &src_val, &dest_fmt, &dest_val);
+ GST_OBJECT_LOCK (dec);
+ res = __gst_audio_encoded_audio_convert (&dec->priv->ctx.info,
+ dec->priv->bytes_in, dec->priv->samples_out,
+ src_fmt, src_val, &dest_fmt, &dest_val);
+ GST_OBJECT_UNLOCK (dec);
+ if (!res)
+ goto error;
+ gst_query_set_convert (query, src_fmt, src_val, dest_fmt, dest_val);
+ break;
+ }
+ case GST_QUERY_ALLOCATION:
+ {
+ GstAudioDecoderClass *klass = GST_AUDIO_DECODER_GET_CLASS (dec);
+
+ if (klass->propose_allocation)
+ res = klass->propose_allocation (dec, query);
+ break;
+ }
+ case GST_QUERY_CAPS:{
+ GstCaps *filter, *caps;
+
+ gst_query_parse_caps (query, &filter);
+ caps = gst_audio_decoder_sink_getcaps (dec, filter);
+ gst_query_set_caps_result (query, caps);
+ gst_caps_unref (caps);
+ res = TRUE;
+ break;
+ }
+ case GST_QUERY_ACCEPT_CAPS:{
+ if (dec->priv->use_default_pad_acceptcaps) {
+ res =
+ gst_pad_query_default (GST_AUDIO_DECODER_SINK_PAD (dec),
+ GST_OBJECT_CAST (dec), query);
+ } else {
+ GstCaps *caps;
+ GstCaps *allowed_caps;
+ GstCaps *template_caps;
+ gboolean accept;
+
+ gst_query_parse_accept_caps (query, &caps);
+
+ template_caps = gst_pad_get_pad_template_caps (pad);
+ accept = gst_caps_is_subset (caps, template_caps);
+ gst_caps_unref (template_caps);
+
+ if (accept) {
+ allowed_caps = gst_pad_query_caps (GST_AUDIO_DECODER_SINK_PAD (dec),
+ caps);
+
+ accept = gst_caps_can_intersect (caps, allowed_caps);
+
+ gst_caps_unref (allowed_caps);
+ }
+
+ gst_query_set_accept_caps_result (query, accept);
+ res = TRUE;
+ }
+ break;
+ }
+ case GST_QUERY_SEEKING:
+ {
+ GstFormat format;
+
+ /* non-TIME segments are discarded, so we won't seek that way either */
+ gst_query_parse_seeking (query, &format, NULL, NULL, NULL);
+ if (format != GST_FORMAT_TIME) {
+ GST_DEBUG_OBJECT (dec, "discarding non-TIME SEEKING query");
+ res = FALSE;
+ break;
+ }
+ /* fall-through */
+ }
+ default:
+ res = gst_pad_query_default (pad, GST_OBJECT_CAST (dec), query);
+ break;
+ }
+
+ error:
+ return res;
+ }
+
+ static gboolean
+ gst_audio_decoder_sink_query (GstPad * pad, GstObject * parent,
+ GstQuery * query)
+ {
+ GstAudioDecoderClass *dec_class;
+ GstAudioDecoder *dec;
+ gboolean ret = FALSE;
+
+ dec = GST_AUDIO_DECODER (parent);
+ dec_class = GST_AUDIO_DECODER_GET_CLASS (dec);
+
+ GST_DEBUG_OBJECT (pad, "received query %" GST_PTR_FORMAT, query);
+
+ if (dec_class->sink_query)
+ ret = dec_class->sink_query (dec, query);
+
+ return ret;
+ }
+
+ /* FIXME ? are any of these queries (other than latency) a decoder's business ??
+ * also, the conversion stuff might seem to make sense, but seems to not mind
+ * segment stuff etc at all
+ * Supposedly that's backward compatibility ... */
+ static gboolean
+ gst_audio_decoder_src_query_default (GstAudioDecoder * dec, GstQuery * query)
+ {
+ GstPad *pad = GST_AUDIO_DECODER_SRC_PAD (dec);
+ gboolean res = FALSE;
+
+ GST_LOG_OBJECT (dec, "handling query: %" GST_PTR_FORMAT, query);
+
+ switch (GST_QUERY_TYPE (query)) {
+ case GST_QUERY_DURATION:
+ {
+ GstFormat format;
+
+ /* upstream in any case */
+ if ((res = gst_pad_query_default (pad, GST_OBJECT_CAST (dec), query)))
+ break;
+
+ gst_query_parse_duration (query, &format, NULL);
+ /* try answering TIME by converting from BYTE if subclass allows */
+ if (format == GST_FORMAT_TIME && gst_audio_decoder_do_byte (dec)) {
+ gint64 value;
+
+ if (gst_pad_peer_query_duration (dec->sinkpad, GST_FORMAT_BYTES,
+ &value)) {
+ GST_LOG_OBJECT (dec, "upstream size %" G_GINT64_FORMAT, value);
+ if (gst_pad_query_convert (dec->sinkpad, GST_FORMAT_BYTES, value,
+ GST_FORMAT_TIME, &value)) {
+ gst_query_set_duration (query, GST_FORMAT_TIME, value);
+ res = TRUE;
+ }
+ }
+ }
+ break;
+ }
+ case GST_QUERY_POSITION:
+ {
+ GstFormat format;
+ gint64 time, value;
+
+ if ((res = gst_pad_peer_query (dec->sinkpad, query))) {
+ GST_LOG_OBJECT (dec, "returning peer response");
+ break;
+ }
+
+ /* Refuse BYTES format queries. If it made sense to
+ * answer them, upstream would have already */
+ gst_query_parse_position (query, &format, NULL);
+
+ if (format == GST_FORMAT_BYTES) {
+ GST_LOG_OBJECT (dec, "Ignoring BYTES position query");
+ break;
+ }
+
+ /* we start from the last seen time */
+ time = dec->output_segment.position;
+ /* correct for the segment values */
+ time =
+ gst_segment_to_stream_time (&dec->output_segment, GST_FORMAT_TIME,
+ time);
+
+ GST_LOG_OBJECT (dec,
+ "query %p: our time: %" GST_TIME_FORMAT, query, GST_TIME_ARGS (time));
+
+ /* and convert to the final format */
+ if (!(res = gst_pad_query_convert (pad, GST_FORMAT_TIME, time,
+ format, &value)))
+ break;
+
+ gst_query_set_position (query, format, value);
+
+ GST_LOG_OBJECT (dec,
+ "query %p: we return %" G_GINT64_FORMAT " (format %u)", query, value,
+ format);
+ break;
+ }
+ case GST_QUERY_FORMATS:
+ {
+ gst_query_set_formats (query, 3,
+ GST_FORMAT_TIME, GST_FORMAT_BYTES, GST_FORMAT_DEFAULT);
+ res = TRUE;
+ break;
+ }
+ case GST_QUERY_CONVERT:
+ {
+ GstFormat src_fmt, dest_fmt;
+ gint64 src_val, dest_val;
+
+ gst_query_parse_convert (query, &src_fmt, &src_val, &dest_fmt, &dest_val);
+ GST_OBJECT_LOCK (dec);
+ res = gst_audio_info_convert (&dec->priv->ctx.info,
+ src_fmt, src_val, dest_fmt, &dest_val);
+ GST_OBJECT_UNLOCK (dec);
+ if (!res)
+ break;
+ gst_query_set_convert (query, src_fmt, src_val, dest_fmt, dest_val);
+ break;
+ }
+ case GST_QUERY_LATENCY:
+ {
+ if ((res = gst_pad_peer_query (dec->sinkpad, query))) {
+ gboolean live;
+ GstClockTime min_latency, max_latency;
+
+ gst_query_parse_latency (query, &live, &min_latency, &max_latency);
+ GST_DEBUG_OBJECT (dec, "Peer latency: live %d, min %"
+ GST_TIME_FORMAT " max %" GST_TIME_FORMAT, live,
+ GST_TIME_ARGS (min_latency), GST_TIME_ARGS (max_latency));
+
+ GST_OBJECT_LOCK (dec);
+ /* add our latency */
+ min_latency += dec->priv->ctx.min_latency;
+ if (max_latency == -1 || dec->priv->ctx.max_latency == -1)
+ max_latency = -1;
+ else
+ max_latency += dec->priv->ctx.max_latency;
+ GST_OBJECT_UNLOCK (dec);
+
+ gst_query_set_latency (query, live, min_latency, max_latency);
+ }
+ break;
+ }
+ default:
+ res = gst_pad_query_default (pad, GST_OBJECT_CAST (dec), query);
+ break;
+ }
+
+ return res;
+ }
+
+ static gboolean
+ gst_audio_decoder_src_query (GstPad * pad, GstObject * parent, GstQuery * query)
+ {
+ GstAudioDecoder *dec;
+ GstAudioDecoderClass *dec_class;
+ gboolean ret = FALSE;
+
+ dec = GST_AUDIO_DECODER (parent);
+ dec_class = GST_AUDIO_DECODER_GET_CLASS (dec);
+
+ GST_DEBUG_OBJECT (pad, "received query %" GST_PTR_FORMAT, query);
+
+ if (dec_class->src_query)
+ ret = dec_class->src_query (dec, query);
+
+ return ret;
+ }
+
+ static gboolean
+ gst_audio_decoder_stop (GstAudioDecoder * dec)
+ {
+ GstAudioDecoderClass *klass;
+ gboolean ret = TRUE;
+
+ GST_DEBUG_OBJECT (dec, "gst_audio_decoder_stop");
+
+ klass = GST_AUDIO_DECODER_GET_CLASS (dec);
+
+ if (klass->stop) {
+ ret = klass->stop (dec);
+ }
+
+ /* clean up */
+ gst_audio_decoder_reset (dec, TRUE);
+
+ if (ret)
+ dec->priv->active = FALSE;
+
+ return ret;
+ }
+
+ static gboolean
+ gst_audio_decoder_start (GstAudioDecoder * dec)
+ {
+ GstAudioDecoderClass *klass;
+ gboolean ret = TRUE;
+
+ GST_DEBUG_OBJECT (dec, "gst_audio_decoder_start");
+
+ klass = GST_AUDIO_DECODER_GET_CLASS (dec);
+
+ /* arrange clean state */
+ gst_audio_decoder_reset (dec, TRUE);
+
+ if (klass->start) {
+ ret = klass->start (dec);
+ }
+
+ if (ret)
+ dec->priv->active = TRUE;
+
+ return ret;
+ }
+
+ static void
+ gst_audio_decoder_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec)
+ {
+ GstAudioDecoder *dec;
+
+ dec = GST_AUDIO_DECODER (object);
+
+ switch (prop_id) {
+ case PROP_LATENCY:
+ g_value_set_int64 (value, dec->priv->latency);
+ break;
+ case PROP_TOLERANCE:
+ g_value_set_int64 (value, dec->priv->tolerance);
+ break;
+ case PROP_PLC:
+ g_value_set_boolean (value, dec->priv->plc);
+ break;
+ case PROP_MAX_ERRORS:
+ g_value_set_int (value, gst_audio_decoder_get_max_errors (dec));
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+ }
+
+ static void
+ gst_audio_decoder_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+ {
+ GstAudioDecoder *dec;
+
+ dec = GST_AUDIO_DECODER (object);
+
+ switch (prop_id) {
+ case PROP_LATENCY:
+ dec->priv->latency = g_value_get_int64 (value);
+ break;
+ case PROP_TOLERANCE:
+ dec->priv->tolerance = g_value_get_int64 (value);
+ break;
+ case PROP_PLC:
+ dec->priv->plc = g_value_get_boolean (value);
+ break;
+ case PROP_MAX_ERRORS:
+ gst_audio_decoder_set_max_errors (dec, g_value_get_int (value));
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+ }
+
+ static GstStateChangeReturn
+ gst_audio_decoder_change_state (GstElement * element, GstStateChange transition)
+ {
+ GstAudioDecoder *codec;
+ GstAudioDecoderClass *klass;
+ GstStateChangeReturn ret;
+
+ codec = GST_AUDIO_DECODER (element);
+ klass = GST_AUDIO_DECODER_GET_CLASS (codec);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_NULL_TO_READY:
+ if (klass->open) {
+ if (!klass->open (codec))
+ goto open_failed;
+ }
+ break;
+ case GST_STATE_CHANGE_READY_TO_PAUSED:
+ if (!gst_audio_decoder_start (codec)) {
+ goto start_failed;
+ }
+ break;
+ case GST_STATE_CHANGE_PAUSED_TO_PLAYING:
+ break;
+ default:
+ break;
+ }
+
+ ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_PLAYING_TO_PAUSED:
+ break;
+ case GST_STATE_CHANGE_PAUSED_TO_READY:
+ if (!gst_audio_decoder_stop (codec)) {
+ goto stop_failed;
+ }
+ break;
+ case GST_STATE_CHANGE_READY_TO_NULL:
+ if (klass->close) {
+ if (!klass->close (codec))
+ goto close_failed;
+ }
+ break;
+ default:
+ break;
+ }
+
+ return ret;
+
+ start_failed:
+ {
+ GST_ELEMENT_ERROR (codec, LIBRARY, INIT, (NULL), ("Failed to start codec"));
+ return GST_STATE_CHANGE_FAILURE;
+ }
+ stop_failed:
+ {
+ GST_ELEMENT_ERROR (codec, LIBRARY, INIT, (NULL), ("Failed to stop codec"));
+ return GST_STATE_CHANGE_FAILURE;
+ }
+ open_failed:
+ {
+ GST_ELEMENT_ERROR (codec, LIBRARY, INIT, (NULL), ("Failed to open codec"));
+ return GST_STATE_CHANGE_FAILURE;
+ }
+ close_failed:
+ {
+ GST_ELEMENT_ERROR (codec, LIBRARY, INIT, (NULL), ("Failed to close codec"));
+ return GST_STATE_CHANGE_FAILURE;
+ }
+ }
+
+ GstFlowReturn
+ _gst_audio_decoder_error (GstAudioDecoder * dec, gint weight,
+ GQuark domain, gint code, gchar * txt, gchar * dbg, const gchar * file,
+ const gchar * function, gint line)
+ {
+ if (txt)
+ GST_WARNING_OBJECT (dec, "error: %s", txt);
+ if (dbg)
+ GST_WARNING_OBJECT (dec, "error: %s", dbg);
+ dec->priv->error_count += weight;
+ dec->priv->discont = TRUE;
+ if (dec->priv->max_errors >= 0
+ && dec->priv->max_errors < dec->priv->error_count) {
+ gst_element_message_full (GST_ELEMENT (dec), GST_MESSAGE_ERROR, domain,
+ code, txt, dbg, file, function, line);
+ return GST_FLOW_ERROR;
+ } else {
+ g_free (txt);
+ g_free (dbg);
+ return GST_FLOW_OK;
+ }
+ }
+
+ /**
+ * gst_audio_decoder_get_audio_info:
+ * @dec: a #GstAudioDecoder
+ *
+ * Returns: (transfer none): a #GstAudioInfo describing the input audio format
+ */
+ GstAudioInfo *
+ gst_audio_decoder_get_audio_info (GstAudioDecoder * dec)
+ {
+ g_return_val_if_fail (GST_IS_AUDIO_DECODER (dec), NULL);
+
+ return &dec->priv->ctx.info;
+ }
+
+ /**
+ * gst_audio_decoder_set_plc_aware:
+ * @dec: a #GstAudioDecoder
+ * @plc: new plc state
+ *
+ * Indicates whether or not subclass handles packet loss concealment (plc).
+ */
+ void
+ gst_audio_decoder_set_plc_aware (GstAudioDecoder * dec, gboolean plc)
+ {
+ g_return_if_fail (GST_IS_AUDIO_DECODER (dec));
+
+ dec->priv->ctx.do_plc = plc;
+ }
+
+ /**
+ * gst_audio_decoder_get_plc_aware:
+ * @dec: a #GstAudioDecoder
+ *
+ * Returns: currently configured plc handling
+ */
+ gint
+ gst_audio_decoder_get_plc_aware (GstAudioDecoder * dec)
+ {
+ g_return_val_if_fail (GST_IS_AUDIO_DECODER (dec), 0);
+
+ return dec->priv->ctx.do_plc;
+ }
+
+ /**
+ * gst_audio_decoder_set_estimate_rate:
+ * @dec: a #GstAudioDecoder
+ * @enabled: whether to enable byte to time conversion
+ *
+ * Allows baseclass to perform byte to time estimated conversion.
+ */
+ void
+ gst_audio_decoder_set_estimate_rate (GstAudioDecoder * dec, gboolean enabled)
+ {
+ g_return_if_fail (GST_IS_AUDIO_DECODER (dec));
+
+ dec->priv->ctx.do_estimate_rate = enabled;
+ }
+
+ /**
+ * gst_audio_decoder_get_estimate_rate:
+ * @dec: a #GstAudioDecoder
+ *
+ * Returns: currently configured byte to time conversion setting
+ */
+ gint
+ gst_audio_decoder_get_estimate_rate (GstAudioDecoder * dec)
+ {
+ g_return_val_if_fail (GST_IS_AUDIO_DECODER (dec), 0);
+
+ return dec->priv->ctx.do_estimate_rate;
+ }
+
+ /**
+ * gst_audio_decoder_get_delay:
+ * @dec: a #GstAudioDecoder
+ *
+ * Returns: currently configured decoder delay
+ */
+ gint
+ gst_audio_decoder_get_delay (GstAudioDecoder * dec)
+ {
+ g_return_val_if_fail (GST_IS_AUDIO_DECODER (dec), 0);
+
+ return dec->priv->ctx.delay;
+ }
+
+ /**
+ * gst_audio_decoder_set_max_errors:
+ * @dec: a #GstAudioDecoder
+ * @num: max tolerated errors
+ *
+ * Sets numbers of tolerated decoder errors, where a tolerated one is then only
+ * warned about, but more than tolerated will lead to fatal error. You can set
+ * -1 for never returning fatal errors. Default is set to
+ * GST_AUDIO_DECODER_MAX_ERRORS.
+ */
+ void
+ gst_audio_decoder_set_max_errors (GstAudioDecoder * dec, gint num)
+ {
+ g_return_if_fail (GST_IS_AUDIO_DECODER (dec));
+
+ dec->priv->max_errors = num;
+ }
+
+ /**
+ * gst_audio_decoder_get_max_errors:
+ * @dec: a #GstAudioDecoder
+ *
+ * Returns: currently configured decoder tolerated error count.
+ */
+ gint
+ gst_audio_decoder_get_max_errors (GstAudioDecoder * dec)
+ {
+ g_return_val_if_fail (GST_IS_AUDIO_DECODER (dec), 0);
+
+ return dec->priv->max_errors;
+ }
+
+ /**
+ * gst_audio_decoder_set_latency:
+ * @dec: a #GstAudioDecoder
+ * @min: minimum latency
+ * @max: maximum latency
+ *
+ * Sets decoder latency.
+ */
+ void
+ gst_audio_decoder_set_latency (GstAudioDecoder * dec,
+ GstClockTime min, GstClockTime max)
+ {
+ g_return_if_fail (GST_IS_AUDIO_DECODER (dec));
+ g_return_if_fail (GST_CLOCK_TIME_IS_VALID (min));
+ g_return_if_fail (min <= max);
+
+ GST_OBJECT_LOCK (dec);
+ dec->priv->ctx.min_latency = min;
+ dec->priv->ctx.max_latency = max;
+ GST_OBJECT_UNLOCK (dec);
+
+ /* post latency message on the bus */
+ gst_element_post_message (GST_ELEMENT (dec),
+ gst_message_new_latency (GST_OBJECT (dec)));
+ }
+
+ /**
+ * gst_audio_decoder_get_latency:
+ * @dec: a #GstAudioDecoder
+ * @min: (out) (allow-none): a pointer to storage to hold minimum latency
+ * @max: (out) (allow-none): a pointer to storage to hold maximum latency
+ *
+ * Sets the variables pointed to by @min and @max to the currently configured
+ * latency.
+ */
+ void
+ gst_audio_decoder_get_latency (GstAudioDecoder * dec,
+ GstClockTime * min, GstClockTime * max)
+ {
+ g_return_if_fail (GST_IS_AUDIO_DECODER (dec));
+
+ GST_OBJECT_LOCK (dec);
+ if (min)
+ *min = dec->priv->ctx.min_latency;
+ if (max)
+ *max = dec->priv->ctx.max_latency;
+ GST_OBJECT_UNLOCK (dec);
+ }
+
+ /**
+ * gst_audio_decoder_get_parse_state:
+ * @dec: a #GstAudioDecoder
+ * @sync: (out) (optional): a pointer to a variable to hold the current sync state
+ * @eos: (out) (optional): a pointer to a variable to hold the current eos state
+ *
+ * Return current parsing (sync and eos) state.
+ */
+ void
+ gst_audio_decoder_get_parse_state (GstAudioDecoder * dec,
+ gboolean * sync, gboolean * eos)
+ {
+ g_return_if_fail (GST_IS_AUDIO_DECODER (dec));
+
+ if (sync)
+ *sync = dec->priv->ctx.sync;
+ if (eos)
+ *eos = dec->priv->ctx.eos;
+ }
+
+ /**
+ * gst_audio_decoder_set_allocation_caps:
+ * @dec: a #GstAudioDecoder
+ * @allocation_caps: (allow-none): a #GstCaps or %NULL
+ *
+ * Sets a caps in allocation query which are different from the set
+ * pad's caps. Use this function before calling
+ * gst_audio_decoder_negotiate(). Setting to %NULL the allocation
+ * query will use the caps from the pad.
+ *
+ * Since: 1.10
+ */
+ void
+ gst_audio_decoder_set_allocation_caps (GstAudioDecoder * dec,
+ GstCaps * allocation_caps)
+ {
+ g_return_if_fail (GST_IS_AUDIO_DECODER (dec));
+
+ gst_caps_replace (&dec->priv->ctx.allocation_caps, allocation_caps);
+ }
+
+ /**
+ * gst_audio_decoder_set_plc:
+ * @dec: a #GstAudioDecoder
+ * @enabled: new state
+ *
+ * Enable or disable decoder packet loss concealment, provided subclass
+ * and codec are capable and allow handling plc.
+ *
+ * MT safe.
+ */
+ void
+ gst_audio_decoder_set_plc (GstAudioDecoder * dec, gboolean enabled)
+ {
+ g_return_if_fail (GST_IS_AUDIO_DECODER (dec));
+
+ GST_LOG_OBJECT (dec, "enabled: %d", enabled);
+
+ GST_OBJECT_LOCK (dec);
+ dec->priv->plc = enabled;
+ GST_OBJECT_UNLOCK (dec);
+ }
+
+ /**
+ * gst_audio_decoder_get_plc:
+ * @dec: a #GstAudioDecoder
+ *
+ * Queries decoder packet loss concealment handling.
+ *
+ * Returns: TRUE if packet loss concealment is enabled.
+ *
+ * MT safe.
+ */
+ gboolean
+ gst_audio_decoder_get_plc (GstAudioDecoder * dec)
+ {
+ gboolean result;
+
+ g_return_val_if_fail (GST_IS_AUDIO_DECODER (dec), FALSE);
+
+ GST_OBJECT_LOCK (dec);
+ result = dec->priv->plc;
+ GST_OBJECT_UNLOCK (dec);
+
+ return result;
+ }
+
+ /**
+ * gst_audio_decoder_set_min_latency:
+ * @dec: a #GstAudioDecoder
+ * @num: new minimum latency
+ *
+ * Sets decoder minimum aggregation latency.
+ *
+ * MT safe.
+ */
+ void
+ gst_audio_decoder_set_min_latency (GstAudioDecoder * dec, GstClockTime num)
+ {
+ g_return_if_fail (GST_IS_AUDIO_DECODER (dec));
+ g_return_if_fail (GST_CLOCK_TIME_IS_VALID (num));
+
+ GST_OBJECT_LOCK (dec);
+ dec->priv->latency = num;
+ GST_OBJECT_UNLOCK (dec);
+ }
+
+ /**
+ * gst_audio_decoder_get_min_latency:
+ * @dec: a #GstAudioDecoder
+ *
+ * Queries decoder's latency aggregation.
+ *
+ * Returns: aggregation latency.
+ *
+ * MT safe.
+ */
+ GstClockTime
+ gst_audio_decoder_get_min_latency (GstAudioDecoder * dec)
+ {
+ GstClockTime result;
+
+ g_return_val_if_fail (GST_IS_AUDIO_DECODER (dec), FALSE);
+
+ GST_OBJECT_LOCK (dec);
+ result = dec->priv->latency;
+ GST_OBJECT_UNLOCK (dec);
+
+ return result;
+ }
+
+ /**
+ * gst_audio_decoder_set_tolerance:
+ * @dec: a #GstAudioDecoder
+ * @tolerance: new tolerance
+ *
+ * Configures decoder audio jitter tolerance threshold.
+ *
+ * MT safe.
+ */
+ void
+ gst_audio_decoder_set_tolerance (GstAudioDecoder * dec, GstClockTime tolerance)
+ {
+ g_return_if_fail (GST_IS_AUDIO_DECODER (dec));
+ g_return_if_fail (GST_CLOCK_TIME_IS_VALID (tolerance));
+
+ GST_OBJECT_LOCK (dec);
+ dec->priv->tolerance = tolerance;
+ GST_OBJECT_UNLOCK (dec);
+ }
+
+ /**
+ * gst_audio_decoder_get_tolerance:
+ * @dec: a #GstAudioDecoder
+ *
+ * Queries current audio jitter tolerance threshold.
+ *
+ * Returns: decoder audio jitter tolerance threshold.
+ *
+ * MT safe.
+ */
+ GstClockTime
+ gst_audio_decoder_get_tolerance (GstAudioDecoder * dec)
+ {
+ GstClockTime result;
+
+ g_return_val_if_fail (GST_IS_AUDIO_DECODER (dec), 0);
+
+ GST_OBJECT_LOCK (dec);
+ result = dec->priv->tolerance;
+ GST_OBJECT_UNLOCK (dec);
+
+ return result;
+ }
+
+ /**
+ * gst_audio_decoder_set_drainable:
+ * @dec: a #GstAudioDecoder
+ * @enabled: new state
+ *
+ * Configures decoder drain handling. If drainable, subclass might
+ * be handed a NULL buffer to have it return any leftover decoded data.
+ * Otherwise, it is not considered so capable and will only ever be passed
+ * real data.
+ *
+ * MT safe.
+ */
+ void
+ gst_audio_decoder_set_drainable (GstAudioDecoder * dec, gboolean enabled)
+ {
+ g_return_if_fail (GST_IS_AUDIO_DECODER (dec));
+
+ GST_OBJECT_LOCK (dec);
+ dec->priv->drainable = enabled;
+ GST_OBJECT_UNLOCK (dec);
+ }
+
+ /**
+ * gst_audio_decoder_get_drainable:
+ * @dec: a #GstAudioDecoder
+ *
+ * Queries decoder drain handling.
+ *
+ * Returns: TRUE if drainable handling is enabled.
+ *
+ * MT safe.
+ */
+ gboolean
+ gst_audio_decoder_get_drainable (GstAudioDecoder * dec)
+ {
+ gboolean result;
+
+ g_return_val_if_fail (GST_IS_AUDIO_DECODER (dec), 0);
+
+ GST_OBJECT_LOCK (dec);
+ result = dec->priv->drainable;
+ GST_OBJECT_UNLOCK (dec);
+
+ return result;
+ }
+
+ /**
+ * gst_audio_decoder_set_needs_format:
+ * @dec: a #GstAudioDecoder
+ * @enabled: new state
+ *
+ * Configures decoder format needs. If enabled, subclass needs to be
+ * negotiated with format caps before it can process any data. It will then
+ * never be handed any data before it has been configured.
+ * Otherwise, it might be handed data without having been configured and
+ * is then expected being able to do so either by default
+ * or based on the input data.
+ *
+ * MT safe.
+ */
+ void
+ gst_audio_decoder_set_needs_format (GstAudioDecoder * dec, gboolean enabled)
+ {
+ g_return_if_fail (GST_IS_AUDIO_DECODER (dec));
+
+ GST_OBJECT_LOCK (dec);
+ dec->priv->needs_format = enabled;
+ GST_OBJECT_UNLOCK (dec);
+ }
+
+ /**
+ * gst_audio_decoder_get_needs_format:
+ * @dec: a #GstAudioDecoder
+ *
+ * Queries decoder required format handling.
+ *
+ * Returns: TRUE if required format handling is enabled.
+ *
+ * MT safe.
+ */
+ gboolean
+ gst_audio_decoder_get_needs_format (GstAudioDecoder * dec)
+ {
+ gboolean result;
+
+ g_return_val_if_fail (GST_IS_AUDIO_DECODER (dec), FALSE);
+
+ GST_OBJECT_LOCK (dec);
+ result = dec->priv->needs_format;
+ GST_OBJECT_UNLOCK (dec);
+
+ return result;
+ }
+
+ /**
+ * gst_audio_decoder_merge_tags:
+ * @dec: a #GstAudioDecoder
+ * @tags: (allow-none): a #GstTagList to merge, or NULL
+ * @mode: the #GstTagMergeMode to use, usually #GST_TAG_MERGE_REPLACE
+ *
+ * Sets the audio decoder tags and how they should be merged with any
+ * upstream stream tags. This will override any tags previously-set
+ * with gst_audio_decoder_merge_tags().
+ *
+ * Note that this is provided for convenience, and the subclass is
+ * not required to use this and can still do tag handling on its own.
+ */
+ void
+ gst_audio_decoder_merge_tags (GstAudioDecoder * dec,
+ const GstTagList * tags, GstTagMergeMode mode)
+ {
+ g_return_if_fail (GST_IS_AUDIO_DECODER (dec));
+ g_return_if_fail (tags == NULL || GST_IS_TAG_LIST (tags));
+ g_return_if_fail (mode != GST_TAG_MERGE_UNDEFINED);
+
+ GST_AUDIO_DECODER_STREAM_LOCK (dec);
+ if (dec->priv->taglist != tags) {
+ if (dec->priv->taglist) {
+ gst_tag_list_unref (dec->priv->taglist);
+ dec->priv->taglist = NULL;
+ dec->priv->decoder_tags_merge_mode = GST_TAG_MERGE_KEEP_ALL;
+ }
+ if (tags) {
+ dec->priv->taglist = gst_tag_list_ref ((GstTagList *) tags);
+ dec->priv->decoder_tags_merge_mode = mode;
+ }
+
+ GST_DEBUG_OBJECT (dec, "setting decoder tags to %" GST_PTR_FORMAT, tags);
+ dec->priv->taglist_changed = TRUE;
+ }
+ GST_AUDIO_DECODER_STREAM_UNLOCK (dec);
+ }
+
+ /**
+ * gst_audio_decoder_allocate_output_buffer:
+ * @dec: a #GstAudioDecoder
+ * @size: size of the buffer
+ *
+ * Helper function that allocates a buffer to hold an audio frame
+ * for @dec's current output format.
+ *
+ * Returns: (transfer full): allocated buffer
+ */
+ GstBuffer *
+ gst_audio_decoder_allocate_output_buffer (GstAudioDecoder * dec, gsize size)
+ {
+ GstBuffer *buffer = NULL;
+ gboolean needs_reconfigure = FALSE;
+
+ g_return_val_if_fail (size > 0, NULL);
+
+ GST_DEBUG ("alloc src buffer");
+
+ GST_AUDIO_DECODER_STREAM_LOCK (dec);
+
+ needs_reconfigure = gst_pad_check_reconfigure (dec->srcpad);
+ if (G_UNLIKELY (dec->priv->ctx.output_format_changed ||
+ (GST_AUDIO_INFO_IS_VALID (&dec->priv->ctx.info)
+ && needs_reconfigure))) {
+ if (!gst_audio_decoder_negotiate_unlocked (dec)) {
+ GST_INFO_OBJECT (dec, "Failed to negotiate, fallback allocation");
+ gst_pad_mark_reconfigure (dec->srcpad);
+ goto fallback;
+ }
+ }
+
+ buffer =
+ gst_buffer_new_allocate (dec->priv->ctx.allocator, size,
+ &dec->priv->ctx.params);
+ if (!buffer) {
+ GST_INFO_OBJECT (dec, "couldn't allocate output buffer");
+ goto fallback;
+ }
+
+ GST_AUDIO_DECODER_STREAM_UNLOCK (dec);
+
+ return buffer;
+ fallback:
+ buffer = gst_buffer_new_allocate (NULL, size, NULL);
+ GST_AUDIO_DECODER_STREAM_UNLOCK (dec);
+
+ return buffer;
+ }
+
+ /**
+ * gst_audio_decoder_get_allocator:
+ * @dec: a #GstAudioDecoder
+ * @allocator: (out) (allow-none) (transfer full): the #GstAllocator
+ * used
+ * @params: (out) (allow-none) (transfer full): the
+ * #GstAllocationParams of @allocator
+ *
+ * Lets #GstAudioDecoder sub-classes to know the memory @allocator
+ * used by the base class and its @params.
+ *
+ * Unref the @allocator after use it.
+ */
+ void
+ gst_audio_decoder_get_allocator (GstAudioDecoder * dec,
+ GstAllocator ** allocator, GstAllocationParams * params)
+ {
+ g_return_if_fail (GST_IS_AUDIO_DECODER (dec));
+
+ if (allocator)
+ *allocator = dec->priv->ctx.allocator ?
+ gst_object_ref (dec->priv->ctx.allocator) : NULL;
+
+ if (params)
+ *params = dec->priv->ctx.params;
+ }
+
+ /**
+ * gst_audio_decoder_set_use_default_pad_acceptcaps:
+ * @decoder: a #GstAudioDecoder
+ * @use: if the default pad accept-caps query handling should be used
+ *
+ * Lets #GstAudioDecoder sub-classes decide if they want the sink pad
+ * to use the default pad query handler to reply to accept-caps queries.
+ *
+ * By setting this to true it is possible to further customize the default
+ * handler with %GST_PAD_SET_ACCEPT_INTERSECT and
+ * %GST_PAD_SET_ACCEPT_TEMPLATE
+ *
+ * Since: 1.6
+ */
+ void
+ gst_audio_decoder_set_use_default_pad_acceptcaps (GstAudioDecoder * decoder,
+ gboolean use)
+ {
+ decoder->priv->use_default_pad_acceptcaps = use;
+ }
--- /dev/null
+ /* GStreamer Base Class for Tag Demuxing
+ * Copyright (C) 2005 Jan Schmidt <thaytan@mad.scientist.com>
+ * Copyright (C) 2006-2007 Tim-Philipp Müller <tim centricular net>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+ /**
+ * SECTION:gsttagdemux
+ * @title: GstTagDemux
+ * @see_also: GstApeDemux, GstID3Demux
+ * @short_description: Base class for demuxing tags that are in chunks
+ * directly at the beginning or at the end of a file
+ *
+ * Provides a base class for demuxing tags at the beginning or end of a
+ * stream and handles things like typefinding, querying, seeking, and
+ * different modes of operation (chain-based, pull_range-based, and providing
+ * downstream elements with random access if upstream supports that). The tag
+ * is stripped from the output, and all offsets are adjusted for the tag
+ * sizes, so that to the downstream element the stream will appear as if
+ * there was no tag at all. Also, once the tag has been parsed, GstTagDemux
+ * will try to determine the media type of the resulting stream and add a
+ * source pad with the appropriate caps in order to facilitate auto-plugging.
+ *
+ * ## Deriving from GstTagDemux
+ *
+ * Subclasses have to do four things:
+ *
+ * * In their base init function, they must add a pad template for the sink
+ * pad to the element class, describing the media type they can parse in
+ * the caps of the pad template.
+ * * In their class init function, they must override
+ * GST_TAG_DEMUX_CLASS(demux_klass)->identify_tag with their own identify
+ * function.
+ * * In their class init function, they must override
+ * GST_TAG_DEMUX_CLASS(demux_klass)->parse_tag with their own parse
+ * function.
+ * * In their class init function, they must also set
+ * GST_TAG_DEMUX_CLASS(demux_klass)->min_start_size and/or
+ * GST_TAG_DEMUX_CLASS(demux_klass)->min_end_size to the minimum size required
+ * for the identify function to decide whether the stream has a supported tag
+ * or not. A class parsing ID3v1 tags, for example, would set min_end_size to
+ * 128 bytes.
+ */
+
+ #ifdef HAVE_CONFIG_H
+ #include "config.h"
+ #endif
+
+ #include "gsttagdemux.h"
+
+ #include <gst/base/gsttypefindhelper.h>
+ #include <gst/base/gstadapter.h>
+ #include <gst/gst-i18n-plugin.h>
+ #include <string.h>
+
+ typedef enum
+ {
+ GST_TAG_DEMUX_READ_START_TAG,
+ GST_TAG_DEMUX_TYPEFINDING,
+ GST_TAG_DEMUX_STREAMING
+ } GstTagDemuxState;
+
+ struct _GstTagDemuxPrivate
+ {
+ GstPad *srcpad;
+ GstPad *sinkpad;
+
+ /* Number of bytes to remove from the
+ * start of file (tag at beginning) */
+ guint strip_start;
+
+ /* Number of bytes to remove from the
+ * end of file (tag at end) */
+ guint strip_end;
+
+ gint64 upstream_size;
+
+ GstTagDemuxState state;
+ GstAdapter *adapter;
+ GstBuffer *collect;
+ gsize collect_size;
+ guint tagsize;
+ GstCaps *src_caps;
+
+ GstTagList *event_tags;
+ GstTagList *parsed_tags;
+ gboolean send_tag_event;
+
+ GstSegment segment;
+ gboolean need_newseg;
+
+ guint64 offset;
+
+ GList *pending_events;
+ };
+
+ /* Require at least 8kB of data before we attempt typefind.
+ * Seems a decent value based on test files
+ * 40kB is massive overkill for the maximum, I think, but it
+ * doesn't do any harm (tpm: increased to 64kB after watching
+ * typefinding fail on a wavpack file that needed 42kB to succeed) */
++#ifdef TIZEN_PROFILE_TV
++#define TYPE_FIND_MIN_SIZE 2048
++#else
+ #define TYPE_FIND_MIN_SIZE 8192
++#endif
++
+ #define TYPE_FIND_MAX_SIZE 65536
+
+ #define DEFAULT_PULL_BLOCKSIZE 4096
+
+ GST_DEBUG_CATEGORY_STATIC (tagdemux_debug);
+ #define GST_CAT_DEFAULT (tagdemux_debug)
+
+ static GstStaticPadTemplate src_factory = GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("ANY")
+ );
+
+ static void gst_tag_demux_element_loop (GstTagDemux * demux);
+
+ static void gst_tag_demux_dispose (GObject * object);
+
+ static GstFlowReturn gst_tag_demux_chain (GstPad * pad, GstObject * parent,
+ GstBuffer * buf);
+ static gboolean gst_tag_demux_sink_event (GstPad * pad, GstObject * parent,
+ GstEvent * event);
+
+ static gboolean gst_tag_demux_sink_activate_mode (GstPad * pad,
+ GstObject * parent, GstPadMode mode, gboolean active);
+ static gboolean gst_tag_demux_src_activate_mode (GstPad * pad,
+ GstObject * parent, GstPadMode mode, gboolean active);
+ static GstFlowReturn gst_tag_demux_read_range (GstTagDemux * tagdemux,
+ GstObject * parent, guint64 offset, guint length, GstBuffer ** buffer);
+
+ static GstFlowReturn gst_tag_demux_src_getrange (GstPad * srcpad,
+ GstObject * parent, guint64 offset, guint length, GstBuffer ** buffer);
+
+ static void gst_tag_demux_set_src_caps (GstTagDemux * tagdemux,
+ GstCaps * new_caps);
+
+ static gboolean gst_tag_demux_srcpad_event (GstPad * pad, GstObject * parent,
+ GstEvent * event);
+ static gboolean gst_tag_demux_sink_activate (GstPad * sinkpad,
+ GstObject * parent);
+ static GstStateChangeReturn gst_tag_demux_change_state (GstElement * element,
+ GstStateChange transition);
+ static gboolean gst_tag_demux_pad_query (GstPad * pad, GstObject * parent,
+ GstQuery * query);
+ static gboolean gst_tag_demux_get_upstream_size (GstTagDemux * tagdemux);
+ static void gst_tag_demux_send_pending_events (GstTagDemux * tagdemux);
+ static void gst_tag_demux_send_tag_event (GstTagDemux * tagdemux);
+ static gboolean gst_tag_demux_send_new_segment (GstTagDemux * tagdemux);
+
+ static void gst_tag_demux_base_init (gpointer g_class);
+ static void gst_tag_demux_class_init (gpointer g_class, gpointer d);
+ static void gst_tag_demux_init (GstTagDemux * obj, GstTagDemuxClass * klass);
+
+ static gpointer parent_class; /* NULL */
+ static gint private_offset = 0;
+
+ /* Cannot use boilerplate macros here because we want the abstract flag */
+ GType
+ gst_tag_demux_get_type (void)
+ {
+ static GType object_type; /* 0 */
+
+ if (object_type == 0) {
+ static const GTypeInfo object_info = {
+ sizeof (GstTagDemuxClass),
+ gst_tag_demux_base_init,
+ NULL, /* base_finalize */
+ gst_tag_demux_class_init,
+ NULL, /* class_finalize */
+ NULL, /* class_data */
+ sizeof (GstTagDemux),
+ 0, /* n_preallocs */
+ (GInstanceInitFunc) gst_tag_demux_init
+ };
+
+ object_type = g_type_register_static (GST_TYPE_ELEMENT,
+ "GstTagDemux", &object_info, G_TYPE_FLAG_ABSTRACT);
+
+ private_offset =
+ g_type_add_instance_private (object_type, sizeof (GstTagDemuxPrivate));
+ }
+
+ return object_type;
+ }
+
+ static inline GstTagDemuxPrivate *
+ gst_tag_demux_get_instance_private (GstTagDemux * self)
+ {
+ return (G_STRUCT_MEMBER_P (self, private_offset));
+ }
+
+ static void
+ gst_tag_demux_base_init (gpointer klass)
+ {
+ GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
+
+ gst_element_class_add_static_pad_template (element_class, &src_factory);
+
+ GST_DEBUG_CATEGORY_INIT (tagdemux_debug, "tagdemux", 0,
+ "tag demux base class");
+ }
+
+ static void
+ gst_tag_demux_class_init (gpointer klass, gpointer d)
+ {
+ GstTagDemuxClass *tagdemux_class = GST_TAG_DEMUX_CLASS (klass);
+ GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
+ GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
+
+ parent_class = g_type_class_peek_parent (klass);
+
+ gobject_class->dispose = gst_tag_demux_dispose;
+
+ element_class->change_state = GST_DEBUG_FUNCPTR (gst_tag_demux_change_state);
+
+ if (private_offset != 0)
+ g_type_class_adjust_private_offset (klass, &private_offset);
+
+ /* subclasses must set at least one of these */
+ tagdemux_class->min_start_size = 0;
+ tagdemux_class->min_end_size = 0;
+ }
+
+ static void
+ gst_tag_demux_reset (GstTagDemux * tagdemux)
+ {
+ GstBuffer **buffer_p = &tagdemux->priv->collect;
+ GstCaps **caps_p = &tagdemux->priv->src_caps;
+
+ tagdemux->priv->strip_start = 0;
+ tagdemux->priv->strip_end = 0;
+ tagdemux->priv->upstream_size = -1;
+ tagdemux->priv->state = GST_TAG_DEMUX_READ_START_TAG;
+ tagdemux->priv->send_tag_event = FALSE;
+
+ gst_buffer_replace (buffer_p, NULL);
+ tagdemux->priv->collect_size = 0;
+ tagdemux->priv->tagsize = 0;
+ gst_adapter_clear (tagdemux->priv->adapter);
+ gst_caps_replace (caps_p, NULL);
+
+ if (tagdemux->priv->event_tags) {
+ gst_tag_list_unref (tagdemux->priv->event_tags);
+ tagdemux->priv->event_tags = NULL;
+ }
+ if (tagdemux->priv->parsed_tags) {
+ gst_tag_list_unref (tagdemux->priv->parsed_tags);
+ tagdemux->priv->parsed_tags = NULL;
+ }
+
+ gst_segment_init (&tagdemux->priv->segment, GST_FORMAT_UNDEFINED);
+ tagdemux->priv->need_newseg = TRUE;
+
+ g_list_foreach (tagdemux->priv->pending_events,
+ (GFunc) gst_mini_object_unref, NULL);
+ g_list_free (tagdemux->priv->pending_events);
+ tagdemux->priv->pending_events = NULL;
+ }
+
+ static void
+ gst_tag_demux_init (GstTagDemux * demux, GstTagDemuxClass * gclass)
+ {
+ GstElementClass *element_klass = GST_ELEMENT_CLASS (gclass);
+ GstPadTemplate *tmpl;
+
+ demux->priv = gst_tag_demux_get_instance_private (demux);
+
+ /* sink pad */
+ tmpl = gst_element_class_get_pad_template (element_klass, "sink");
+ if (tmpl) {
+ demux->priv->sinkpad = gst_pad_new_from_template (tmpl, "sink");
+
+ gst_pad_set_activatemode_function (demux->priv->sinkpad,
+ GST_DEBUG_FUNCPTR (gst_tag_demux_sink_activate_mode));
+ gst_pad_set_activate_function (demux->priv->sinkpad,
+ GST_DEBUG_FUNCPTR (gst_tag_demux_sink_activate));
+ gst_pad_set_event_function (demux->priv->sinkpad,
+ GST_DEBUG_FUNCPTR (gst_tag_demux_sink_event));
+ gst_pad_set_chain_function (demux->priv->sinkpad,
+ GST_DEBUG_FUNCPTR (gst_tag_demux_chain));
+ gst_element_add_pad (GST_ELEMENT (demux), demux->priv->sinkpad);
+ } else {
+ g_warning ("GstTagDemux subclass %s must provide a sink pad template",
+ G_OBJECT_TYPE_NAME (demux));
+ }
+
+ /* source pad */
+ tmpl = gst_element_class_get_pad_template (element_klass, "src");
+ demux->priv->srcpad = gst_pad_new_from_template (tmpl, "src");
+ gst_pad_set_query_function (demux->priv->srcpad,
+ GST_DEBUG_FUNCPTR (gst_tag_demux_pad_query));
+ gst_pad_set_event_function (demux->priv->srcpad,
+ GST_DEBUG_FUNCPTR (gst_tag_demux_srcpad_event));
+ gst_pad_set_activatemode_function (demux->priv->srcpad,
+ GST_DEBUG_FUNCPTR (gst_tag_demux_src_activate_mode));
+ gst_pad_set_getrange_function (demux->priv->srcpad,
+ GST_DEBUG_FUNCPTR (gst_tag_demux_src_getrange));
+ gst_pad_use_fixed_caps (demux->priv->srcpad);
+ gst_element_add_pad (GST_ELEMENT (demux), demux->priv->srcpad);
+
+ demux->priv->adapter = gst_adapter_new ();
+ gst_tag_demux_reset (demux);
+ }
+
+ static void
+ gst_tag_demux_dispose (GObject * object)
+ {
+ GstTagDemux *tagdemux = GST_TAG_DEMUX (object);
+
+ gst_tag_demux_reset (tagdemux);
+ if (tagdemux->priv->adapter) {
+ g_object_unref (tagdemux->priv->adapter);
+ tagdemux->priv->adapter = NULL;
+ }
+
+ G_OBJECT_CLASS (parent_class)->dispose (object);
+ }
+
+ // FIXME: convert to set_caps / sending a caps event
+ static void
+ gst_tag_demux_set_src_caps (GstTagDemux * tagdemux, GstCaps * new_caps)
+ {
+ GstCaps *old_caps = tagdemux->priv->src_caps;
+
+ if (old_caps == NULL || !gst_caps_is_equal (new_caps, old_caps)) {
+ GstEvent *event;
+
+ gst_caps_replace (&tagdemux->priv->src_caps, new_caps);
+
+ GST_DEBUG_OBJECT (tagdemux, "Changing src pad caps to %" GST_PTR_FORMAT,
+ tagdemux->priv->src_caps);
+
+ event =
+ gst_pad_get_sticky_event (tagdemux->priv->sinkpad,
+ GST_EVENT_STREAM_START, 0);
+ if (!event) {
+ gchar *stream_id = gst_pad_create_stream_id (tagdemux->priv->srcpad,
+ GST_ELEMENT_CAST (tagdemux), NULL);
+ GST_DEBUG_OBJECT (tagdemux, "Creating new STREAM_START event");
+ event = gst_event_new_stream_start (stream_id);
+ g_free (stream_id);
+ gst_event_set_group_id (event, gst_util_group_id_next ());
+ }
+ gst_pad_push_event (tagdemux->priv->srcpad, event);
+
+ gst_pad_set_caps (tagdemux->priv->srcpad, tagdemux->priv->src_caps);
+ } else {
+ /* Caps never changed */
+ }
+ }
+
+ /* will return FALSE if buffer is beyond end of data; will return TRUE
+ * if buffer was trimmed successfully or didn't need trimming, but may
+ * also return TRUE and set *buf_ref to NULL if the buffer was before
+ * the start of the data */
+ static gboolean
+ gst_tag_demux_trim_buffer (GstTagDemux * tagdemux, GstBuffer ** buf_ref,
+ gsize * buf_size)
+ {
+ GstBuffer *buf = *buf_ref;
+
+ guint trim_start = 0;
+ guint out_size, bsize;
+ guint64 out_offset, boffset;
+ gboolean need_fixup = FALSE;
+ gboolean is_writable;
+
+ bsize = out_size = gst_buffer_get_size (buf);
+ boffset = out_offset = GST_BUFFER_OFFSET (buf);
+
+ /* Adjust offset and length */
+ if (!GST_BUFFER_OFFSET_IS_VALID (buf)) {
+ /* Can't change anything without an offset */
+ *buf_size = bsize;
+ return TRUE;
+ }
+
+ /* If the buffer crosses the tag at the end of file, trim it */
+ if (tagdemux->priv->strip_end > 0) {
+ if (gst_tag_demux_get_upstream_size (tagdemux)) {
+ guint64 v1tag_offset =
+ tagdemux->priv->upstream_size - tagdemux->priv->strip_end;
+
+ if (out_offset >= v1tag_offset) {
+ GST_DEBUG_OBJECT (tagdemux, "Buffer is past the end of the data");
+ goto no_out_buffer_end;
+ }
+
+ if (out_offset + out_size > v1tag_offset) {
+ out_size = v1tag_offset - out_offset;
+ need_fixup = TRUE;
+ }
+ }
+ }
+
+ if (tagdemux->priv->strip_start > 0) {
+ /* If the buffer crosses the tag at the start of file, trim it */
+ if (out_offset <= tagdemux->priv->strip_start) {
+ if (out_offset + out_size <= tagdemux->priv->strip_start) {
+ GST_DEBUG_OBJECT (tagdemux, "Buffer is before the start of the data");
+ goto no_out_buffer_start;
+ }
+
+ trim_start = tagdemux->priv->strip_start - out_offset;
+ out_size -= trim_start;
+ out_offset = 0;
+ } else {
+ out_offset -= tagdemux->priv->strip_start;
+ }
+ need_fixup = TRUE;
+ }
+
+ if (!need_fixup)
+ goto done;
+
+ is_writable = gst_buffer_is_writable (buf);
+
+ if (out_size != bsize || !is_writable) {
+ if (!is_writable) {
+ GstBuffer *sub;
+
+ GST_DEBUG_OBJECT (tagdemux, "Sub-buffering to trim size %d offset %"
+ G_GINT64_FORMAT " to %d offset %" G_GINT64_FORMAT,
+ bsize, boffset, out_size, out_offset);
+
+ sub =
+ gst_buffer_copy_region (buf, GST_BUFFER_COPY_ALL, trim_start,
+ out_size);
+ g_return_val_if_fail (sub != NULL, FALSE);
+ if (GST_BUFFER_TIMESTAMP_IS_VALID (buf))
+ GST_BUFFER_TIMESTAMP (sub) = GST_BUFFER_TIMESTAMP (buf);
+ if (GST_BUFFER_DURATION_IS_VALID (buf))
+ GST_BUFFER_DURATION (sub) = GST_BUFFER_DURATION (buf);
+ gst_buffer_unref (buf);
+ *buf_ref = buf = sub;
+ *buf_size = out_size;
+ } else {
+ GST_DEBUG_OBJECT (tagdemux, "Resizing buffer to trim size %d offset %"
+ G_GINT64_FORMAT " to %d offset %" G_GINT64_FORMAT,
+ bsize, boffset, out_size, out_offset);
+
+ gst_buffer_resize (buf, trim_start, out_size);
+ }
+ } else {
+ GST_DEBUG_OBJECT (tagdemux, "Adjusting buffer from size %d offset %"
+ G_GINT64_FORMAT " to %d offset %" G_GINT64_FORMAT,
+ bsize, boffset, out_size, out_offset);
+ }
+
+ GST_BUFFER_OFFSET (buf) = out_offset;
+ GST_BUFFER_OFFSET_END (buf) = out_offset + out_size;
+
+ done:
+
+ return TRUE;
+
+ no_out_buffer_end:
+ {
+ gst_buffer_unref (buf);
+ *buf_ref = NULL;
+ return FALSE;
+ }
+ no_out_buffer_start:
+ {
+ gst_buffer_unref (buf);
+ *buf_ref = NULL;
+ return TRUE;
+ }
+ }
+
+ static void
+ update_collected (GstTagDemux * demux)
+ {
+ guint avail;
+ GstBuffer *buf;
+
+ avail = gst_adapter_available (demux->priv->adapter);
+ if (avail == 0)
+ return;
+
+ buf = gst_adapter_take_buffer (demux->priv->adapter, avail);
+
+ if (demux->priv->collect == NULL) {
+ demux->priv->collect = buf;
+ } else {
+ demux->priv->collect = gst_buffer_append (demux->priv->collect, buf);
+ }
+ demux->priv->collect_size += avail;
+ }
+
+ static void
+ gst_tag_demux_chain_parse_tag (GstTagDemux * demux)
+ {
+ GstBuffer *collect;
+ GstTagDemuxResult parse_ret;
+ GstTagDemuxClass *klass;
+ guint tagsize = 0;
+ guint available;
+
+ available =
+ demux->priv->collect_size + gst_adapter_available (demux->priv->adapter);
+
+ klass = GST_TAG_DEMUX_CLASS (G_OBJECT_GET_CLASS (demux));
+
+ if (available < klass->min_start_size) {
+ GST_DEBUG_OBJECT (demux, "Only %u bytes available, but %u needed "
+ "to identify tag", available, klass->min_start_size);
+ return; /* wait for more data */
+ }
+
+ if (available < demux->priv->tagsize) {
+ GST_DEBUG_OBJECT (demux, "Only %u bytes available, but %u needed "
+ "to parse tag", available, demux->priv->tagsize);
+ return; /* wait for more data */
+ }
+
+ update_collected (demux);
+ demux->priv->collect = gst_buffer_make_writable (demux->priv->collect);
+ collect = demux->priv->collect;
+
+ g_assert (gst_buffer_is_writable (collect));
+
+
+ /* If we receive a buffer that's from the middle of the file,
+ * we can't read tags so move to typefinding */
+ if (GST_BUFFER_OFFSET_IS_VALID (collect) && GST_BUFFER_OFFSET (collect) != 0) {
+ GST_DEBUG_OBJECT (demux, "Received buffer from non-zero offset %"
+ G_GINT64_FORMAT ". Can't read tags", GST_BUFFER_OFFSET (collect));
+ demux->priv->state = GST_TAG_DEMUX_TYPEFINDING;
+ return;
+ }
+
+ g_assert (klass->identify_tag != NULL);
+ g_assert (klass->parse_tag != NULL);
+
+ if (!klass->identify_tag (demux, collect, TRUE, &tagsize)) {
+ GST_DEBUG_OBJECT (demux, "Could not identify start tag");
+ demux->priv->state = GST_TAG_DEMUX_TYPEFINDING;
+ return;
+ }
+
+ demux->priv->tagsize = tagsize;
+
+ /* need to set offset of first buffer to 0 or trimming won't work */
+ if (!GST_BUFFER_OFFSET_IS_VALID (collect)) {
+ GST_WARNING_OBJECT (demux, "Fixing up first buffer without offset");
+ GST_BUFFER_OFFSET (collect) = 0;
+ }
+
+ GST_DEBUG_OBJECT (demux, "Identified tag, size = %u bytes", tagsize);
+
+ do {
+ GstTagList *tags = NULL;
+ guint newsize, saved_size;
+
+ demux->priv->strip_start = tagsize;
+
+ if (available < tagsize) {
+ GST_DEBUG_OBJECT (demux, "Only %u bytes available, but %u needed "
+ "to parse tag", available, tagsize);
+ return; /* wait for more data */
+ }
+
+ saved_size = gst_buffer_get_size (collect);
+ gst_buffer_set_size (collect, tagsize);
+ newsize = tagsize;
+
+ parse_ret = klass->parse_tag (demux, collect, TRUE, &newsize, &tags);
+
+ gst_buffer_set_size (collect, saved_size);
+
+ switch (parse_ret) {
+ case GST_TAG_DEMUX_RESULT_OK:
+ demux->priv->strip_start = newsize;
+ demux->priv->parsed_tags = tags;
+ GST_DEBUG_OBJECT (demux, "Read start tag of size %u", newsize);
+ break;
+ case GST_TAG_DEMUX_RESULT_BROKEN_TAG:
+ demux->priv->strip_start = newsize;
+ demux->priv->parsed_tags = tags;
+ GST_WARNING_OBJECT (demux, "Ignoring broken start tag of size %d",
+ demux->priv->strip_start);
+ break;
+ case GST_TAG_DEMUX_RESULT_AGAIN:
+ GST_DEBUG_OBJECT (demux, "Re-parse, this time with %u bytes", newsize);
+ g_assert (newsize != tagsize);
+ tagsize = newsize;
+ break;
+ }
+ } while (parse_ret == GST_TAG_DEMUX_RESULT_AGAIN);
+
+ GST_LOG_OBJECT (demux, "Parsed tag. Proceeding to typefinding");
+ demux->priv->state = GST_TAG_DEMUX_TYPEFINDING;
+ demux->priv->send_tag_event = TRUE;
+ }
+
+ static GstFlowReturn
+ gst_tag_demux_chain_buffer (GstTagDemux * demux, GstBuffer * buf,
+ gboolean at_eos)
+ {
+ gsize size;
+
+ size = gst_buffer_get_size (buf);
+
+ /* Update our segment position info */
+ if (demux->priv->segment.format == GST_FORMAT_BYTES) {
+ if (GST_BUFFER_OFFSET_IS_VALID (buf))
+ demux->priv->segment.position = GST_BUFFER_OFFSET (buf);
+ demux->priv->segment.position += size;
+ } else if (demux->priv->segment.format == GST_FORMAT_TIME) {
+ if (GST_BUFFER_TIMESTAMP_IS_VALID (buf))
+ demux->priv->segment.position = GST_BUFFER_TIMESTAMP (buf);
+ if (GST_BUFFER_DURATION_IS_VALID (buf))
+ demux->priv->segment.position += GST_BUFFER_DURATION (buf);
+ }
+
+ gst_adapter_push (demux->priv->adapter, buf);
+ buf = NULL;
+
+ switch (demux->priv->state) {
+ case GST_TAG_DEMUX_READ_START_TAG:
+ gst_tag_demux_chain_parse_tag (demux);
+ if (demux->priv->state != GST_TAG_DEMUX_TYPEFINDING)
+ break;
+ /* Fall-through */
+ case GST_TAG_DEMUX_TYPEFINDING:{
+ GstTypeFindProbability probability = 0;
+ GstBuffer *typefind_buf = NULL;
+ gsize typefind_size;
+ GstCaps *caps;
+
+ update_collected (demux);
+
+ if (!at_eos && demux->priv->collect_size <
+ TYPE_FIND_MIN_SIZE + demux->priv->strip_start)
+ break; /* Go get more data first */
+
+ GST_DEBUG_OBJECT (demux, "Typefinding with size %" G_GSIZE_FORMAT,
+ demux->priv->collect_size);
+
+ /* Trim the buffer and adjust offset for typefinding */
+ typefind_buf = demux->priv->collect;
+ gst_buffer_ref (typefind_buf);
+ if (!gst_tag_demux_trim_buffer (demux, &typefind_buf, &typefind_size))
+ return GST_FLOW_EOS;
+
+ if (typefind_buf == NULL)
+ break; /* Still need more data */
+
+ caps = gst_type_find_helper_for_buffer (GST_OBJECT (demux),
+ typefind_buf, &probability);
+
+ if (caps == NULL) {
+ if (typefind_size < TYPE_FIND_MAX_SIZE) {
+ /* Just break for more data */
+ gst_buffer_unref (typefind_buf);
+ return GST_FLOW_OK;
+ }
+
+ /* We failed typefind */
+ GST_ELEMENT_ERROR (demux, STREAM, TYPE_NOT_FOUND, (NULL),
+ ("Could not detect type for contents within tag"));
+ gst_buffer_unref (typefind_buf);
+ gst_buffer_unref (demux->priv->collect);
+ demux->priv->collect = NULL;
+ demux->priv->collect_size = 0;
+ return GST_FLOW_ERROR;
+ }
+ gst_buffer_unref (typefind_buf);
+
+ GST_DEBUG_OBJECT (demux, "Found type %" GST_PTR_FORMAT " with a "
+ "probability of %u", caps, probability);
+
+ gst_tag_demux_set_src_caps (demux, caps);
+ gst_caps_unref (caps);
+
+ /* Move onto streaming and fall-through to push out existing
+ * data */
+ demux->priv->state = GST_TAG_DEMUX_STREAMING;
+ /* fall-through */
+ }
+ case GST_TAG_DEMUX_STREAMING:{
+ GstBuffer *outbuf = NULL;
+ gsize outbuf_size;
+
+ update_collected (demux);
+
+ /* Trim the buffer and adjust offset */
+ if (demux->priv->collect) {
+ outbuf = demux->priv->collect;
+ demux->priv->collect = NULL;
+ demux->priv->collect_size = 0;
+ if (!gst_tag_demux_trim_buffer (demux, &outbuf, &outbuf_size))
+ return GST_FLOW_EOS;
+ }
+ if (outbuf) {
+ /* Might need a new segment before the buffer */
+ if (demux->priv->need_newseg) {
+ if (!gst_tag_demux_send_new_segment (demux)) {
+ GST_WARNING_OBJECT (demux, "Downstream did not handle newsegment "
+ "event as it should");
+ }
+ demux->priv->need_newseg = FALSE;
+ }
+
+ /* send any pending events we cached */
+ gst_tag_demux_send_pending_events (demux);
+
+ /* Send our own pending tag event */
+ if (demux->priv->send_tag_event) {
+ gst_tag_demux_send_tag_event (demux);
+ demux->priv->send_tag_event = FALSE;
+ }
+
+ GST_LOG_OBJECT (demux, "Pushing buffer %" GST_PTR_FORMAT, outbuf);
+
+ return gst_pad_push (demux->priv->srcpad, outbuf);
+ }
+ }
+ }
+ return GST_FLOW_OK;
+ }
+
+ static GstFlowReturn
+ gst_tag_demux_chain (GstPad * pad, GstObject * parent, GstBuffer * buf)
+ {
+ return gst_tag_demux_chain_buffer (GST_TAG_DEMUX (parent), buf, FALSE);
+ }
+
+ static gboolean
+ gst_tag_demux_sink_event (GstPad * pad, GstObject * parent, GstEvent * event)
+ {
+ GstTagDemux *demux;
+ gboolean ret;
+
+ demux = GST_TAG_DEMUX (parent);
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_EOS:
+ if (!gst_pad_has_current_caps (demux->priv->srcpad)) {
+ GST_INFO_OBJECT (demux, "EOS before we found a type");
+
+ /* push final buffer with eos indication to force typefinding */
+ gst_tag_demux_chain_buffer (demux, gst_buffer_new (), TRUE);
+
+ if (!gst_pad_has_current_caps (demux->priv->srcpad)) {
+ GST_ELEMENT_ERROR (demux, STREAM, TYPE_NOT_FOUND, (NULL), (NULL));
+ }
+ }
+ ret = gst_pad_event_default (pad, parent, event);
+ break;
+ case GST_EVENT_SEGMENT:
+ {
+ gst_event_copy_segment (event, &demux->priv->segment);
+
+ demux->priv->need_newseg = TRUE;
+ gst_event_unref (event);
+ ret = TRUE;
+ break;
+ }
+ case GST_EVENT_FLUSH_STOP:
+ case GST_EVENT_FLUSH_START:
+ ret = gst_pad_event_default (pad, parent, event);
+ break;
+ case GST_EVENT_CAPS:
+ /* we drop the caps event. We do typefind and push a new caps event. */
+ ret = gst_pad_event_default (pad, parent, event);
+ break;
+ default:
+ if (demux->priv->need_newseg && GST_EVENT_IS_SERIALIZED (event)) {
+ /* Cache all events if we have a pending segment, so they don't get
+ * lost (esp. tag events) */
+ GST_INFO_OBJECT (demux, "caching event: %" GST_PTR_FORMAT, event);
+ GST_OBJECT_LOCK (demux);
+ demux->priv->pending_events =
+ g_list_append (demux->priv->pending_events, event);
+ GST_OBJECT_UNLOCK (demux);
+ ret = TRUE;
+ } else {
+ ret = gst_pad_event_default (pad, parent, event);
+ }
+ break;
+ }
+
+ return ret;
+ }
+
+ static gboolean
+ gst_tag_demux_get_upstream_size (GstTagDemux * tagdemux)
+ {
+ gint64 len;
+
+ /* Short-cut if we already queried upstream */
+ if (tagdemux->priv->upstream_size > 0)
+ return TRUE;
+
+ if (!gst_pad_peer_query_duration (tagdemux->priv->sinkpad, GST_FORMAT_BYTES,
+ &len) || len <= 0) {
+ return FALSE;
+ }
+
+ tagdemux->priv->upstream_size = len;
+ return TRUE;
+ }
+
+ static gboolean
+ gst_tag_demux_seek_pull (GstTagDemux * tagdemux, GstEvent * event)
+ {
+ GstSeekFlags flags;
+ GstSeekType start_type, stop_type;
+ GstFormat format;
+ gboolean flush;
+ gdouble rate;
+ gint64 start, stop;
+ GstSegment seeksegment = { 0, };
+
+ gst_event_parse_seek (event, &rate, &format, &flags, &start_type, &start,
+ &stop_type, &stop);
+
+ /* we can only seek on bytes */
+ if (format != GST_FORMAT_BYTES) {
+ GST_DEBUG_OBJECT (tagdemux, "Can only seek on BYTES");
+ return FALSE;
+ }
+
+ if (tagdemux->priv->state != GST_TAG_DEMUX_STREAMING) {
+ GST_DEBUG_OBJECT (tagdemux, "Can only seek if streaming already");
+ return FALSE;
+ }
+
+ switch (start_type) {
+ case GST_SEEK_TYPE_SET:
+ if (start == -1)
+ start = 0;
+ start += tagdemux->priv->strip_start;
+ break;
+ case GST_SEEK_TYPE_END:
+ /* Adjust the seek to be relative to the start of any end tag
+ * (note: 10 bytes before end is represented by stop=-10) */
+ if (start > 0)
+ start = 0;
+ start -= tagdemux->priv->strip_end;
+ break;
+ case GST_SEEK_TYPE_NONE:
+ default:
+ break;
+ }
+ switch (stop_type) {
+ case GST_SEEK_TYPE_SET:
+ if (stop != -1) {
+ /* -1 means the end of the file, pass it upstream intact */
+ stop += tagdemux->priv->strip_start;
+ }
+ break;
+ case GST_SEEK_TYPE_END:
+ /* Adjust the seek to be relative to the start of any end tag
+ * (note: 10 bytes before end is represented by stop=-10) */
+ if (stop > 0)
+ stop = 0;
+ stop -= tagdemux->priv->strip_end;
+ break;
+ case GST_SEEK_TYPE_NONE:
+ default:
+ break;
+ }
+
+ /* copy segment, we need this because we still need the old
+ * segment when we close the current segment. */
+ memcpy (&seeksegment, &tagdemux->priv->segment, sizeof (GstSegment));
+
+ GST_DEBUG_OBJECT (tagdemux, "configuring seek");
+ gst_segment_do_seek (&seeksegment, rate, format, flags,
+ start_type, start, stop_type, stop, NULL);
+
+ flush = ! !(flags & GST_SEEK_FLAG_FLUSH);
+
+ GST_DEBUG_OBJECT (tagdemux, "New segment %" GST_SEGMENT_FORMAT, &seeksegment);
+
+ if (flush) {
+ GST_DEBUG_OBJECT (tagdemux, "Starting flush");
+ gst_pad_push_event (tagdemux->priv->sinkpad, gst_event_new_flush_start ());
+ gst_pad_push_event (tagdemux->priv->srcpad, gst_event_new_flush_start ());
+ } else {
+ GST_DEBUG_OBJECT (tagdemux, "Non-flushing seek, pausing task");
+ gst_pad_pause_task (tagdemux->priv->sinkpad);
+ }
+
+ /* now grab the stream lock so that streaming cannot continue, for
+ * non flushing seeks when the element is in PAUSED this could block
+ * forever. */
+ GST_DEBUG_OBJECT (tagdemux, "Waiting for streaming to stop");
+ GST_PAD_STREAM_LOCK (tagdemux->priv->sinkpad);
+
+ if (flush) {
+ GST_DEBUG_OBJECT (tagdemux, "Stopping flush");
+ gst_pad_push_event (tagdemux->priv->sinkpad,
+ gst_event_new_flush_stop (TRUE));
+ gst_pad_push_event (tagdemux->priv->srcpad,
+ gst_event_new_flush_stop (TRUE));
+ }
+
+ /* now update the real segment info */
+ GST_DEBUG_OBJECT (tagdemux, "Committing new seek segment");
+ memcpy (&tagdemux->priv->segment, &seeksegment, sizeof (GstSegment));
+ tagdemux->priv->offset = tagdemux->priv->segment.start;
+
+ /* notify start of new segment */
+ if (tagdemux->priv->segment.flags & GST_SEGMENT_FLAG_SEGMENT) {
+ GstMessage *msg;
+
+ msg = gst_message_new_segment_start (GST_OBJECT (tagdemux),
+ GST_FORMAT_BYTES, tagdemux->priv->segment.start);
+ gst_element_post_message (GST_ELEMENT (tagdemux), msg);
+ }
+
+ tagdemux->priv->need_newseg = TRUE;
+
+ /* restart our task since it might have been stopped when we did the
+ * flush. */
+ gst_pad_start_task (tagdemux->priv->sinkpad,
+ (GstTaskFunction) gst_tag_demux_element_loop, tagdemux, NULL);
+
+ /* streaming can continue now */
+ GST_PAD_STREAM_UNLOCK (tagdemux->priv->sinkpad);
+
+ return TRUE;
+ }
+
+ static gboolean
+ gst_tag_demux_seek_push (GstTagDemux * tagdemux, GstEvent * event)
+ {
+ gboolean res = FALSE;
+ gdouble rate;
+ GstFormat format;
+ GstSeekType start_type, stop_type;
+ GstSeekFlags flags;
+ gint64 start, stop;
+
+ gst_event_parse_seek (event, &rate, &format, &flags,
+ &start_type, &start, &stop_type, &stop);
+
+ if (format == GST_FORMAT_BYTES &&
+ tagdemux->priv->state == GST_TAG_DEMUX_STREAMING &&
+ gst_pad_is_linked (tagdemux->priv->sinkpad)) {
+ GstEvent *upstream;
+
+ switch (start_type) {
+ case GST_SEEK_TYPE_SET:
+ if (start == -1)
+ start = 0;
+ start += tagdemux->priv->strip_start;
+ break;
+ case GST_SEEK_TYPE_END:
+ /* Adjust the seek to be relative to the start of any end tag
+ * (note: 10 bytes before end is represented by stop=-10) */
+ if (start > 0)
+ start = 0;
+ start -= tagdemux->priv->strip_end;
+ break;
+ case GST_SEEK_TYPE_NONE:
+ default:
+ break;
+ }
+ switch (stop_type) {
+ case GST_SEEK_TYPE_SET:
+ if (stop != -1) {
+ /* -1 means the end of the file, pass it upstream intact */
+ stop += tagdemux->priv->strip_start;
+ }
+ break;
+ case GST_SEEK_TYPE_END:
+ /* Adjust the seek to be relative to the start of any end tag
+ * (note: 10 bytes before end is represented by stop=-10) */
+ if (stop > 0)
+ stop = 0;
+ stop -= tagdemux->priv->strip_end;
+ break;
+ case GST_SEEK_TYPE_NONE:
+ default:
+ break;
+ }
+ upstream = gst_event_new_seek (rate, format, flags,
+ start_type, start, stop_type, stop);
+ res = gst_pad_push_event (tagdemux->priv->sinkpad, upstream);
+ } else if (format == GST_FORMAT_TIME &&
+ tagdemux->priv->state == GST_TAG_DEMUX_STREAMING &&
+ gst_pad_is_linked (tagdemux->priv->sinkpad)) {
+ res = gst_pad_push_event (tagdemux->priv->sinkpad, gst_event_ref (event));
+ }
+
+ return res;
+ }
+
+ static gboolean
+ gst_tag_demux_srcpad_event (GstPad * pad, GstObject * parent, GstEvent * event)
+ {
+ GstTagDemux *tagdemux;
+ gboolean res = FALSE;
+
+ tagdemux = GST_TAG_DEMUX (parent);
+
+ /* Handle SEEK events, with adjusted byte offsets and sizes. */
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_SEEK:
+ {
+ if (GST_PAD_MODE (tagdemux->priv->sinkpad) == GST_PAD_MODE_PUSH)
+ res = gst_tag_demux_seek_push (tagdemux, event);
+ else
+ res = gst_tag_demux_seek_pull (tagdemux, event);
+ break;
+ }
+ default:
+ res = gst_pad_push_event (tagdemux->priv->sinkpad, event);
+ event = NULL;
+ break;
+ }
+
+ if (event)
+ gst_event_unref (event);
+
+ return res;
+ }
+
+ /* Read and interpret any end tag when activating in pull_range.
+ * Returns FALSE if pad activation should fail. */
+ static GstFlowReturn
+ gst_tag_demux_pull_end_tag (GstTagDemux * demux, GstTagList ** tags)
+ {
+ GstTagDemuxResult parse_ret;
+ GstTagDemuxClass *klass;
+ GstFlowReturn flow_ret;
+ GstTagList *new_tags = NULL;
+ GstBuffer *buffer = NULL;
+ gboolean have_tag;
+ guint64 offset;
+ guint tagsize;
+ gsize bsize;
+
+ klass = GST_TAG_DEMUX_CLASS (G_OBJECT_GET_CLASS (demux));
+
+ g_assert (klass->identify_tag != NULL);
+ g_assert (klass->parse_tag != NULL);
+
+ if (klass->min_end_size == 0) {
+ GST_DEBUG_OBJECT (demux, "Not looking for tag at the end");
+ return GST_FLOW_OK;
+ }
+
+ if (demux->priv->upstream_size < klass->min_end_size) {
+ GST_DEBUG_OBJECT (demux, "File too small");
+ return GST_FLOW_OK;
+ }
+
+ /* Pull enough to identify the tag and retrieve its total size */
+ offset = demux->priv->upstream_size - klass->min_end_size;
+
+ flow_ret = gst_pad_pull_range (demux->priv->sinkpad, offset,
+ klass->min_end_size, &buffer);
+
+ if (flow_ret != GST_FLOW_OK) {
+ GST_DEBUG_OBJECT (demux, "Could not read tag header from end of file, "
+ "ret = %s", gst_flow_get_name (flow_ret));
+ goto done;
+ }
+
+ bsize = gst_buffer_get_size (buffer);
+
+ if (bsize < klass->min_end_size) {
+ GST_DEBUG_OBJECT (demux, "Only managed to read %" G_GSIZE_FORMAT " bytes"
+ "from file (required: %u bytes)", bsize, klass->min_end_size);
+ flow_ret = GST_FLOW_EOS;
+ goto done;
+ }
+
+ have_tag = klass->identify_tag (demux, buffer, FALSE, &tagsize);
+
+ if (!have_tag) {
+ GST_DEBUG_OBJECT (demux, "Could not find tag at end");
+ flow_ret = GST_FLOW_OK;
+ goto done;
+ }
+
+ /* Now pull the entire tag */
+ do {
+ guint newsize, saved_size;
+
+ GST_DEBUG_OBJECT (demux, "Identified tag at end, size=%u bytes", tagsize);
+
+ demux->priv->strip_end = tagsize;
+
+ g_assert (tagsize >= klass->min_end_size);
+
+ /* Get buffer that's exactly the requested size */
+ if (bsize != tagsize) {
+ gst_buffer_unref (buffer);
+ buffer = NULL;
+
+ offset = demux->priv->upstream_size - tagsize;
+
+ flow_ret = gst_pad_pull_range (demux->priv->sinkpad, offset,
+ tagsize, &buffer);
+
+ if (flow_ret != GST_FLOW_OK) {
+ GST_DEBUG_OBJECT (demux, "Could not read data from end of file at "
+ "offset %" G_GUINT64_FORMAT ". ret = %s", offset,
+ gst_flow_get_name (flow_ret));
+ goto done;
+ }
+
+ bsize = gst_buffer_get_size (buffer);
+
+ if (bsize < tagsize) {
+ GST_DEBUG_OBJECT (demux, "Only managed to read %" G_GSIZE_FORMAT
+ " bytes from file", bsize);
+ flow_ret = GST_FLOW_EOS;
+ goto done;
+ }
+ }
+
+ GST_BUFFER_OFFSET (buffer) = offset;
+
+ saved_size = bsize;
+ gst_buffer_set_size (buffer, tagsize);
+ newsize = tagsize;
+
+ parse_ret = klass->parse_tag (demux, buffer, FALSE, &newsize, &new_tags);
+
+ gst_buffer_set_size (buffer, saved_size);
+
+ switch (parse_ret) {
+ case GST_TAG_DEMUX_RESULT_OK:
+ flow_ret = GST_FLOW_OK;
+ demux->priv->strip_end = newsize;
+ GST_DEBUG_OBJECT (demux, "Read tag at end, size %d",
+ demux->priv->strip_end);
+ break;
+ case GST_TAG_DEMUX_RESULT_BROKEN_TAG:
+ flow_ret = GST_FLOW_OK;
+ demux->priv->strip_end = newsize;
+ GST_WARNING_OBJECT (demux, "Ignoring broken tag at end, size %d",
+ demux->priv->strip_end);
+ break;
+ case GST_TAG_DEMUX_RESULT_AGAIN:
+ GST_DEBUG_OBJECT (demux, "Re-parse, this time with %d bytes", newsize);
+ g_assert (newsize != tagsize);
+ tagsize = newsize;
+ break;
+ }
+ } while (parse_ret == GST_TAG_DEMUX_RESULT_AGAIN);
+
+ *tags = new_tags;
+ new_tags = NULL;
+
+ done:
+ if (new_tags)
+ gst_tag_list_unref (new_tags);
+ if (buffer)
+ gst_buffer_unref (buffer);
+ return flow_ret;
+ }
+
+ /* Read and interpret any tag at the start when activating in
+ * pull_range. Returns FALSE if pad activation should fail. */
+ static GstFlowReturn
+ gst_tag_demux_pull_start_tag (GstTagDemux * demux, GstTagList ** tags)
+ {
+ GstTagDemuxResult parse_ret;
+ GstTagDemuxClass *klass;
+ GstFlowReturn flow_ret;
+ GstTagList *new_tags = NULL;
+ GstBuffer *buffer = NULL;
+ gboolean have_tag;
+ guint req, tagsize;
+ gsize bsize;
+
+ klass = GST_TAG_DEMUX_CLASS (G_OBJECT_GET_CLASS (demux));
+
+ g_assert (klass->identify_tag != NULL);
+ g_assert (klass->parse_tag != NULL);
+
+ if (klass->min_start_size == 0) {
+ GST_DEBUG_OBJECT (demux, "Not looking for tag at the beginning");
+ return GST_FLOW_OK;
+ }
+
+ /* Handle tag at start. Try with 4kB to start with */
+ req = MAX (klass->min_start_size, 4096);
+
+ /* Pull enough to identify the tag and retrieve its total size */
+ flow_ret = gst_pad_pull_range (demux->priv->sinkpad, 0, req, &buffer);
+ if (flow_ret != GST_FLOW_OK) {
+ GST_DEBUG_OBJECT (demux, "Could not read data from start of file ret=%s",
+ gst_flow_get_name (flow_ret));
+ goto done;
+ }
+
+ bsize = gst_buffer_get_size (buffer);
+
+ if (bsize < klass->min_start_size) {
+ GST_DEBUG_OBJECT (demux, "Only managed to read %" G_GSIZE_FORMAT
+ " bytes from file - no tag in this file", bsize);
+ flow_ret = GST_FLOW_EOS;
+ goto done;
+ }
+
+ have_tag = klass->identify_tag (demux, buffer, TRUE, &tagsize);
+
+ if (!have_tag) {
+ GST_DEBUG_OBJECT (demux, "Could not find start tag");
+ flow_ret = GST_FLOW_OK;
+ goto done;
+ }
+
+ GST_DEBUG_OBJECT (demux, "Identified start tag, size = %u bytes", tagsize);
+
+ do {
+ guint newsize, saved_size;
+
+ demux->priv->strip_start = tagsize;
+
+ /* Now pull the entire tag */
+ g_assert (tagsize >= klass->min_start_size);
+
+ if (bsize < tagsize) {
+ gst_buffer_unref (buffer);
+ buffer = NULL;
+
+ flow_ret = gst_pad_pull_range (demux->priv->sinkpad, 0, tagsize, &buffer);
+ if (flow_ret != GST_FLOW_OK) {
+ GST_DEBUG_OBJECT (demux, "Could not read data from start of file, "
+ "ret = %s", gst_flow_get_name (flow_ret));
+ goto done;
+ }
+
+ bsize = gst_buffer_get_size (buffer);
+
+ if (bsize < tagsize) {
+ GST_DEBUG_OBJECT (demux, "Only managed to read %" G_GSIZE_FORMAT
+ " bytes from file", bsize);
+ GST_ELEMENT_ERROR (demux, STREAM, DECODE,
+ (_("Failed to read tag: not enough data")), (NULL));
+ flow_ret = GST_FLOW_EOS;
+ goto done;
+ }
+ }
+
+ saved_size = bsize;
+ gst_buffer_set_size (buffer, tagsize);
+ newsize = tagsize;
+ parse_ret = klass->parse_tag (demux, buffer, TRUE, &newsize, &new_tags);
+
+ gst_buffer_set_size (buffer, saved_size);
+
+ switch (parse_ret) {
+ case GST_TAG_DEMUX_RESULT_OK:
+ flow_ret = GST_FLOW_OK;
+ demux->priv->strip_start = newsize;
+ GST_DEBUG_OBJECT (demux, "Read start tag of size %d", newsize);
+ break;
+ case GST_TAG_DEMUX_RESULT_BROKEN_TAG:
+ flow_ret = GST_FLOW_OK;
+ demux->priv->strip_start = newsize;
+ GST_WARNING_OBJECT (demux, "Ignoring broken start tag of size %d",
+ demux->priv->strip_start);
+ break;
+ case GST_TAG_DEMUX_RESULT_AGAIN:
+ GST_DEBUG_OBJECT (demux, "Re-parse, this time with %d bytes", newsize);
+ g_assert (newsize != tagsize);
+ tagsize = newsize;
+ break;
+ }
+ } while (parse_ret == GST_TAG_DEMUX_RESULT_AGAIN);
+
+ *tags = new_tags;
+ new_tags = NULL;
+
+ done:
+ if (new_tags)
+ gst_tag_list_unref (new_tags);
+ if (buffer)
+ gst_buffer_unref (buffer);
+ return flow_ret;
+ }
+
+ /* This function operates similarly to gst_type_find_element_loop
+ * in the typefind element
+ * 1. try to read tags in pull mode
+ * 2. typefind the contents
+ * 3. if we didn't find any caps, fail.
+ * 4. set caps on srcpad
+ */
+ static GstFlowReturn
+ gst_tag_demux_element_find (GstTagDemux * demux)
+ {
+ GstTagDemuxClass *klass;
+ GstTypeFindProbability probability = 0;
+ GstFlowReturn ret = GST_FLOW_OK;
+ GstTagList *start_tags = NULL;
+ GstTagList *end_tags = NULL;
+ gboolean e_tag_ok, s_tag_ok;
+ GstCaps *caps = NULL;
+
+ /* Look for tags at start and end of file */
+ GST_DEBUG_OBJECT (demux, "Activated pull mode. Looking for tags");
+ if (!gst_tag_demux_get_upstream_size (demux))
+ goto no_size;
+
+ demux->priv->strip_start = 0;
+ demux->priv->strip_end = 0;
+
+ /* 1 - Read tags */
+ ret = gst_tag_demux_pull_start_tag (demux, &start_tags);
+ if (ret != GST_FLOW_OK && ret != GST_FLOW_EOS)
+ goto read_tag_error;
+ s_tag_ok = ret == GST_FLOW_OK;
+ ret = gst_tag_demux_pull_end_tag (demux, &end_tags);
+ if (ret != GST_FLOW_OK && ret != GST_FLOW_EOS)
+ goto read_tag_error;
+ e_tag_ok = ret == GST_FLOW_OK;
+ ret = GST_FLOW_OK;
+
+ klass = GST_TAG_DEMUX_CLASS (G_OBJECT_GET_CLASS (demux));
+
+ if (klass->merge_tags != NULL) {
+ demux->priv->parsed_tags = klass->merge_tags (demux, start_tags, end_tags);
+ } else {
+ /* we merge in REPLACE mode, so put the less important tags first, which
+ * we'll just assume is the end tag (subclasses may change this behaviour
+ * or make it configurable by overriding the merge_tags vfunc) */
+ demux->priv->parsed_tags =
+ gst_tag_list_merge (end_tags, start_tags, GST_TAG_MERGE_REPLACE);
+ }
+
+ if (start_tags)
+ gst_tag_list_unref (start_tags);
+ if (end_tags)
+ gst_tag_list_unref (end_tags);
+
+ /* Only happens if both are EOS, i.e. not enough data could be read */
+ if (!e_tag_ok && !s_tag_ok)
+ goto no_tags;
+
+ if (demux->priv->parsed_tags != NULL) {
+ demux->priv->send_tag_event = TRUE;
+ }
+
+ if (demux->priv->upstream_size <=
+ demux->priv->strip_start + demux->priv->strip_end)
+ goto no_data;
+
+ /* 2 - Do typefinding on data, but not if downstream is in charge */
+ if (GST_PAD_MODE (demux->priv->srcpad) == GST_PAD_MODE_PULL)
+ goto skip_typefinding;
+
+ ret = gst_type_find_helper_get_range_full (GST_OBJECT (demux), NULL,
+ (GstTypeFindHelperGetRangeFunction) gst_tag_demux_read_range,
+ demux->priv->upstream_size
+ - (demux->priv->strip_start + demux->priv->strip_end), NULL,
+ &caps, &probability);
+ if (ret != GST_FLOW_OK)
+ goto read_tag_error;
+
+ GST_INFO_OBJECT (demux, "Found type %" GST_PTR_FORMAT " with a "
+ "probability of %u", caps, probability);
+
+ /* 3 - If we didn't find the caps, fail */
+ if (caps == NULL)
+ goto no_caps;
+
+ /* tag reading and typefinding were already done, don't do them again in
+ * the chain function if we end up in push mode */
+ demux->priv->state = GST_TAG_DEMUX_STREAMING;
+
+ /* 6 Set the srcpad caps now that we know them */
+ gst_tag_demux_set_src_caps (demux, caps);
+ gst_caps_unref (caps);
+
+ skip_typefinding:
+
+ /* set it again, in case we skipped typefinding */
+ demux->priv->state = GST_TAG_DEMUX_STREAMING;
+ demux->priv->offset += demux->priv->strip_start;
+
+ return ret;
+
+ /* ERRORS */
+ no_size:
+ {
+ GST_ELEMENT_ERROR (demux, STREAM, TYPE_NOT_FOUND,
+ ("Could not get stream size"), (NULL));
+ return GST_FLOW_ERROR;
+ }
+ read_tag_error:
+ {
+ if (ret == GST_FLOW_NOT_LINKED || ret < GST_FLOW_EOS)
+ GST_ELEMENT_FLOW_ERROR (demux, ret);
+ return ret;
+ }
+ no_tags:
+ {
+ GST_ELEMENT_ERROR (demux, STREAM, TYPE_NOT_FOUND,
+ ("Could not get start and/or end tag"), (NULL));
+ return GST_FLOW_ERROR;
+ }
+ no_data:
+ {
+ /* There was no data (probably due to a truncated file) */
+ /* so we don't know about type either */
+ GST_ELEMENT_ERROR (demux, STREAM, TYPE_NOT_FOUND, ("No data in file"),
+ (NULL));
+ return GST_FLOW_ERROR;
+ }
+ no_caps:
+ {
+ GST_ELEMENT_ERROR (demux, STREAM, TYPE_NOT_FOUND,
+ ("Could not detect type of contents"), (NULL));
+ return GST_FLOW_ERROR;
+ }
+ }
+
+ /* This function operates similarly to gst_type_find_element_loop
+ * in the typefind element
+ * 1. try to read tags in pull mode
+ * 2. typefind the contents
+ * 3. if we didn't find any caps, fail.
+ * 4. set caps on srcpad
+ */
+ static void
+ gst_tag_demux_element_loop (GstTagDemux * demux)
+ {
+ GstFlowReturn ret;
+
+ switch (demux->priv->state) {
+ case GST_TAG_DEMUX_READ_START_TAG:
+ case GST_TAG_DEMUX_TYPEFINDING:
+ ret = gst_tag_demux_element_find (demux);
+ break;
+ case GST_TAG_DEMUX_STREAMING:
+ {
+ GstBuffer *outbuf = NULL;
+
+ if (demux->priv->need_newseg) {
+ demux->priv->need_newseg = FALSE;
+ /* FIXME: check segment, should be 0-N for downstream */
+ gst_tag_demux_send_new_segment (demux);
+ }
+
+ /* Send our own pending tag event */
+ if (demux->priv->send_tag_event) {
+ gst_tag_demux_send_tag_event (demux);
+ demux->priv->send_tag_event = FALSE;
+ }
+
+ /* Pull data and push it downstream */
+ ret = gst_pad_pull_range (demux->priv->sinkpad, demux->priv->offset,
+ DEFAULT_PULL_BLOCKSIZE, &outbuf);
+
+ if (ret != GST_FLOW_OK)
+ break;
+
+ GST_BUFFER_OFFSET (outbuf) =
+ demux->priv->offset - demux->priv->strip_start;
+ demux->priv->offset += gst_buffer_get_size (outbuf);
+ GST_BUFFER_OFFSET_END (outbuf) =
+ demux->priv->offset - demux->priv->strip_start;
+
+ ret = gst_pad_push (demux->priv->srcpad, outbuf);
+ break;
+ }
+ default:
+ ret = GST_FLOW_ERROR;
+ break;
+ }
+ if (ret != GST_FLOW_OK)
+ goto pause;
+
+ return;
+
+ /* ERRORS */
+ pause:
+ {
+ const gchar *reason = gst_flow_get_name (ret);
+ gboolean push_eos = FALSE;
+
+ GST_LOG_OBJECT (demux, "pausing task, reason %s", reason);
+ gst_pad_pause_task (demux->priv->sinkpad);
+
+ if (ret == GST_FLOW_EOS) {
+ /* perform EOS logic */
+
+ if (demux->priv->segment.flags & GST_SEEK_FLAG_SEGMENT) {
+ gint64 stop;
+
+ /* for segment playback we need to post when (in stream time)
+ * we stopped, this is either stop (when set) or the duration. */
+ if ((stop = demux->priv->segment.stop) == -1)
+ stop = demux->priv->offset;
+
+ GST_LOG_OBJECT (demux, "Sending segment done, at end of segment");
+ gst_element_post_message (GST_ELEMENT_CAST (demux),
+ gst_message_new_segment_done (GST_OBJECT_CAST (demux),
+ GST_FORMAT_BYTES, stop));
+ gst_pad_push_event (demux->priv->srcpad,
+ gst_event_new_segment_done (GST_FORMAT_BYTES, stop));
+ } else {
+ push_eos = TRUE;
+ }
+ } else if (ret == GST_FLOW_NOT_LINKED || ret < GST_FLOW_EOS) {
+ /* for fatal errors we post an error message */
+ GST_ELEMENT_FLOW_ERROR (demux, ret);
+ push_eos = TRUE;
+ }
+ if (push_eos) {
+ /* send EOS, and prevent hanging if no streams yet */
+ GST_LOG_OBJECT (demux, "Sending EOS, at end of stream");
+ gst_pad_push_event (demux->priv->srcpad, gst_event_new_eos ());
+ }
+ return;
+ }
+ }
+
+ static gboolean
+ gst_tag_demux_sink_activate_mode (GstPad * pad, GstObject * parent,
+ GstPadMode mode, gboolean active)
+ {
+ GstTagDemux *demux = GST_TAG_DEMUX (parent);
+ gboolean res;
+
+ switch (mode) {
+ case GST_PAD_MODE_PULL:
+ if (active) {
+ demux->priv->need_newseg = TRUE;
+ demux->priv->offset = 0;
+ res = TRUE;
+ } else {
+ res = gst_pad_stop_task (pad);
+ }
+ break;
+ default:
+ res = TRUE;
+ break;
+ }
+
+ if (active)
+ GST_TAG_DEMUX (parent)->priv->state = GST_TAG_DEMUX_READ_START_TAG;
+
+ return res;
+ }
+
+ static gboolean
+ gst_tag_demux_sink_activate (GstPad * sinkpad, GstObject * parent)
+ {
+ GstTagDemux *demux;
+ GstQuery *query;
+ gboolean pull_mode;
+
+ demux = GST_TAG_DEMUX (parent);
+
+ /* 1: */
+ /* If we can activate pull_range upstream, then read any end and start
+ * tags, otherwise activate in push mode and the chain function will
+ * collect buffers, read the start tag and output a buffer to end
+ * preroll.
+ */
+ query = gst_query_new_scheduling ();
+
+ if (!gst_pad_peer_query (sinkpad, query)) {
+ gst_query_unref (query);
+ goto activate_push;
+ }
+
+ pull_mode = gst_query_has_scheduling_mode_with_flags (query,
+ GST_PAD_MODE_PULL, GST_SCHEDULING_FLAG_SEEKABLE);
+ gst_query_unref (query);
+
+ if (!pull_mode)
+ goto activate_push;
+
+ if (!gst_pad_activate_mode (sinkpad, GST_PAD_MODE_PULL, TRUE))
+ goto activate_push;
+
+ /* only start our task if we ourselves decide to start in pull mode */
+ return gst_pad_start_task (sinkpad,
+ (GstTaskFunction) gst_tag_demux_element_loop, demux, NULL);
+
+ activate_push:
+ {
+ GST_DEBUG_OBJECT (demux, "No pull mode. Changing to push, but won't be "
+ "able to read end tags");
+ return gst_pad_activate_mode (sinkpad, GST_PAD_MODE_PUSH, TRUE);
+ }
+ }
+
+ static gboolean
+ gst_tag_demux_src_activate_mode (GstPad * pad, GstObject * parent,
+ GstPadMode mode, gboolean active)
+ {
+ gboolean res;
+ GstTagDemux *demux = GST_TAG_DEMUX (parent);
+
+ switch (mode) {
+ case GST_PAD_MODE_PULL:
+ /* make sure our task stops pushing, we can't call _stop here
+ * because this activation might happen from the streaming thread. */
+ gst_pad_pause_task (demux->priv->sinkpad);
+ res = gst_pad_activate_mode (demux->priv->sinkpad, mode, active);
+ break;
+ default:
+ res = TRUE;
+ break;
+ }
+ return res;
+ }
+
+ static inline GstFlowReturn
+ gst_tag_demux_ensure_tags (GstTagDemux * demux)
+ {
+ GstFlowReturn flow = GST_FLOW_OK;
+
+ if (G_UNLIKELY (demux->priv->state == GST_TAG_DEMUX_READ_START_TAG &&
+ GST_PAD_MODE (demux->priv->srcpad) == GST_PAD_MODE_PULL)) {
+
+ flow = gst_tag_demux_element_find (demux);
+ GST_INFO_OBJECT (demux, "pulled tags: %s", gst_flow_get_name (flow));
+ }
+ return flow;
+ }
+
+ static GstFlowReturn
+ gst_tag_demux_read_range (GstTagDemux * demux, GstObject * parent,
+ guint64 offset, guint length, GstBuffer ** buffer)
+ {
+ GstFlowReturn ret;
+ guint64 in_offset;
+ guint in_length;
+ gsize size;
+
+ g_return_val_if_fail (buffer != NULL, GST_FLOW_ERROR);
+
+ /* Ensure we already have computed our tags to properly use the offsets
+ * below */
+ ret = gst_tag_demux_ensure_tags (demux);
+ if (ret != GST_FLOW_OK)
+ return ret;
+
+ /* Adjust offset and length of the request to trim off tag information.
+ * For the returned buffer, adjust the output offset to match what downstream
+ * should see */
+ in_offset = offset + demux->priv->strip_start;
+
+ if (!gst_tag_demux_get_upstream_size (demux))
+ return GST_FLOW_ERROR;
+
+ if (in_offset + length >= demux->priv->upstream_size - demux->priv->strip_end) {
+ if (in_offset + demux->priv->strip_end >= demux->priv->upstream_size)
+ return GST_FLOW_EOS;
+ in_length = demux->priv->upstream_size - demux->priv->strip_end - in_offset;
+ } else {
+ in_length = length;
+ }
+
+ ret = gst_pad_pull_range (demux->priv->sinkpad, in_offset, in_length, buffer);
+
+ if (ret == GST_FLOW_OK && *buffer) {
+ if (!gst_tag_demux_trim_buffer (demux, buffer, &size))
+ goto read_beyond_end;
+
+ /* this should only happen in streaming mode */
+ g_assert (*buffer != NULL);
+ }
+
+ return ret;
+
+ read_beyond_end:
+ {
+ GST_DEBUG_OBJECT (demux, "attempted read beyond end of file");
+ if (*buffer != NULL) {
+ gst_buffer_unref (*buffer);
+ *buffer = NULL;
+ }
+ return GST_FLOW_EOS;
+ }
+ }
+
+ static GstFlowReturn
+ gst_tag_demux_src_getrange (GstPad * srcpad, GstObject * parent,
+ guint64 offset, guint length, GstBuffer ** buffer)
+ {
+ GstTagDemux *demux = GST_TAG_DEMUX (parent);
+
+ /* downstream in pull mode won't miss a newsegment event,
+ * but it likely appreciates other (tag) events */
+ if (demux->priv->need_newseg) {
+ gst_tag_demux_send_pending_events (demux);
+ demux->priv->need_newseg = FALSE;
+ }
+
+ if (demux->priv->send_tag_event) {
+ gst_tag_demux_send_tag_event (demux);
+ demux->priv->send_tag_event = FALSE;
+ }
+
+ return gst_tag_demux_read_range (demux, NULL, offset, length, buffer);
+ }
+
+ static GstStateChangeReturn
+ gst_tag_demux_change_state (GstElement * element, GstStateChange transition)
+ {
+ GstStateChangeReturn ret;
+ GstTagDemux *demux = GST_TAG_DEMUX (element);
+
+ ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_PAUSED_TO_READY:
+ /* Ensure that nothing is in any of the streaming thread functions
+ * anymore. While the above has deactivated all pads, there is nothing
+ * preventing downstream from activating our srcpad again and calling the
+ * getrange() function. Although we're in READY!
+ */
+ GST_PAD_STREAM_LOCK (demux->priv->srcpad);
+ gst_tag_demux_reset (demux);
+ GST_PAD_STREAM_UNLOCK (demux->priv->srcpad);
+ break;
+ default:
+ break;
+ }
+
+ return ret;
+ }
+
+ static gboolean
+ gst_tag_demux_pad_query (GstPad * pad, GstObject * parent, GstQuery * query)
+ {
+ /* For a position or duration query, adjust the returned
+ * bytes to strip off the end and start areas */
+ GstTagDemux *demux = GST_TAG_DEMUX (parent);
+ GstFormat format;
+ gint64 result;
+ gboolean res = TRUE;
+
+ /* FIXME: locking ? */
+ switch (GST_QUERY_TYPE (query)) {
+ case GST_QUERY_SCHEDULING:
+ res = gst_pad_peer_query (demux->priv->sinkpad, query);
+ break;
+ case GST_QUERY_POSITION:
+ {
+ if (!(res = gst_pad_peer_query (demux->priv->sinkpad, query)))
+ goto done;
+
+ gst_query_parse_position (query, &format, &result);
+ if (format == GST_FORMAT_BYTES) {
+ result -= demux->priv->strip_start;
+ gst_query_set_position (query, format, result);
+ }
+ break;
+ }
+ case GST_QUERY_DURATION:
+ {
+ if (!(res = gst_pad_peer_query (demux->priv->sinkpad, query)))
+ goto done;
+
+ gst_query_parse_duration (query, &format, &result);
+ if (format == GST_FORMAT_BYTES) {
+ /* if downstream activated us in pull mode right away, e.g. in case of
+ * filesrc ! id3demux ! xyzparse ! .., read tags here, since we don't
+ * have a streaming thread of our own to do that. We do it here and
+ * not in get_range(), so we can return the right size in bytes.. */
+ gst_tag_demux_ensure_tags (demux);
+ result -= demux->priv->strip_start + demux->priv->strip_end;
+ if (result < 0)
+ result = 0;
+ gst_query_set_duration (query, format, result);
+ }
+ break;
+ }
+ default:
+ res = gst_pad_query_default (pad, parent, query);
+ break;
+ }
+ done:
+ return res;
+ }
+
+ static void
+ gst_tag_demux_send_pending_events (GstTagDemux * demux)
+ {
+ GList *events;
+
+ /* send any pending events we cached */
+ GST_OBJECT_LOCK (demux);
+ events = demux->priv->pending_events;
+ demux->priv->pending_events = NULL;
+ GST_OBJECT_UNLOCK (demux);
+
+ while (events != NULL) {
+ GST_DEBUG_OBJECT (demux->priv->srcpad, "sending cached %s event: %"
+ GST_PTR_FORMAT, GST_EVENT_TYPE_NAME (events->data), events->data);
+ gst_pad_push_event (demux->priv->srcpad, GST_EVENT (events->data));
+ events = g_list_delete_link (events, events);
+ }
+ }
+
+ static void
+ gst_tag_demux_send_tag_event (GstTagDemux * demux)
+ {
+ /* FIXME: what's the correct merge mode? Docs need to tell... */
+ GstTagList *merged = gst_tag_list_merge (demux->priv->event_tags,
+ demux->priv->parsed_tags, GST_TAG_MERGE_KEEP);
+
+ if (merged) {
+ GstEvent *event = gst_event_new_tag (merged);
+
+ GST_DEBUG_OBJECT (demux, "Sending tag event on src pad");
+ gst_pad_push_event (demux->priv->srcpad, event);
+ }
+ }
+
+ static gboolean
+ gst_tag_demux_send_new_segment (GstTagDemux * tagdemux)
+ {
+ GstEvent *event;
+ gint64 start, stop, time;
+ GstSegment *seg = &tagdemux->priv->segment;
+ GstSegment newseg;
+
+ if (seg->format == GST_FORMAT_UNDEFINED) {
+ GST_LOG_OBJECT (tagdemux,
+ "No new segment received before first buffer. Using default");
+ gst_segment_init (seg, GST_FORMAT_BYTES);
+ seg->start = tagdemux->priv->strip_start;
+ seg->time = tagdemux->priv->strip_start;
+ }
+
+ /* Can't adjust segments in non-BYTES formats */
+ if (tagdemux->priv->segment.format != GST_FORMAT_BYTES) {
+ event = gst_event_new_segment (seg);
+ return gst_pad_push_event (tagdemux->priv->srcpad, event);
+ }
+
+ start = seg->start;
+ stop = seg->stop;
+ time = seg->time;
+
+ g_return_val_if_fail (start != -1, FALSE);
+ g_return_val_if_fail (time != -1, FALSE);
+
+ if (tagdemux->priv->strip_end > 0) {
+ if (gst_tag_demux_get_upstream_size (tagdemux)) {
+ guint64 v1tag_offset =
+ tagdemux->priv->upstream_size - tagdemux->priv->strip_end;
+
+ if (start >= v1tag_offset) {
+ /* Segment is completely within the end tag, output an open-ended
+ * segment, even though all the buffers will get trimmed away */
+ start = v1tag_offset;
+ stop = -1;
+ }
+
+ if (stop != -1 && stop >= v1tag_offset) {
+ GST_DEBUG_OBJECT (tagdemux,
+ "Segment crosses the end tag. Trimming end");
+ stop = v1tag_offset;
+ }
+ }
+ }
+
+ if (tagdemux->priv->strip_start > 0) {
+ if (start > tagdemux->priv->strip_start)
+ start -= tagdemux->priv->strip_start;
+ else
+ start = 0;
+
+ if (time > tagdemux->priv->strip_start)
+ time -= tagdemux->priv->strip_start;
+ else
+ time = 0;
+
+ if (stop != -1) {
+ if (stop > tagdemux->priv->strip_start)
+ stop -= tagdemux->priv->strip_start;
+ else
+ stop = 0;
+ }
+ }
+
+ GST_DEBUG_OBJECT (tagdemux, "Sending segment %" GST_SEGMENT_FORMAT, seg);
+
+ gst_segment_copy_into (seg, &newseg);
+ newseg.start = start;
+ newseg.stop = stop;
+ newseg.time = time;
+ event = gst_event_new_segment (&newseg);
+
+ return gst_pad_push_event (tagdemux->priv->srcpad, event);
+ }
--- /dev/null
--- /dev/null
++/*
++ * GStreamer Camera Control Interface
++ *
++ * Copyright (c) 2000 - 2012 Samsung Electronics Co., Ltd.
++ *
++ * Contact: Jeongmo Yang <jm80.yang@samsung.com>
++ *
++ * This library is free software; you can redistribute it and/or modify it under
++ * the terms of the GNU Lesser General Public License as published by the
++ * Free Software Foundation; either version 2.1 of the License, or (at your option)
++ * any later version.
++ *
++ * This library is distributed in the hope that it will be useful, but WITHOUT ANY
++ * WARRANTY; without even the implied warranty of MERCHANTABILITY or
++ * FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
++ * License for more details.
++ *
++ * You should have received a copy of the GNU Lesser General Public License
++ * along with this library; if not, write to the Free Software Foundation, Inc., 51
++ * Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
++ *
++ */
++
++/*============================================================================================
++EDIT HISTORY FOR MODULE
++
++This section contains comments describing changes made to the module.
++Notice that changes are listed in reverse chronological order.
++
++when who what, where, why
++--------- ------------------------ ----------------------------------------------
++12/09/08 jm80.yang@samsung.com Created
++
++============================================================================================*/
++
++
++#ifdef HAVE_CONFIG_H
++#include "config.h"
++#endif
++
++#include "cameracontrol.h"
++
++/**
++ * SECTION:gstcameracontrol
++ * @short_description: Interface for camera control
++ */
++
++enum {
++ CONTROL_VALUE_CHANGED,
++ CONTROL_LAST_SIGNAL
++};
++
++static void gst_camera_control_base_init(GstCameraControlInterface *iface);
++
++static guint gst_camera_control_signals[CONTROL_LAST_SIGNAL] = { 0 };
++
++GType gst_camera_control_get_type(void)
++{
++ static GType gst_camera_control_type = 0;
++
++ if (!gst_camera_control_type) {
++ static const GTypeInfo gst_camera_control_info =
++ {
++ sizeof(GstCameraControlInterface),
++ (GBaseInitFunc)gst_camera_control_base_init,
++ NULL,
++ NULL,
++ NULL,
++ NULL,
++ 0,
++ 0,
++ NULL,
++ };
++
++ gst_camera_control_type = g_type_register_static(G_TYPE_INTERFACE,
++ "GstCameraControl", &gst_camera_control_info, 0);
++ }
++
++ return gst_camera_control_type;
++}
++
++static void gst_camera_control_base_init(GstCameraControlInterface *iface)
++{
++ static gboolean initialized = FALSE;
++
++ if (!initialized) {
++ gst_camera_control_signals[CONTROL_VALUE_CHANGED] =
++ g_signal_new("control-value-changed",
++ GST_TYPE_CAMERA_CONTROL, G_SIGNAL_RUN_LAST,
++ G_STRUCT_OFFSET(GstCameraControlInterface, value_changed),
++ NULL, NULL, NULL,
++ G_TYPE_NONE, 2, GST_TYPE_CAMERA_CONTROL_CHANNEL, G_TYPE_INT);
++
++ initialized = TRUE;
++ }
++
++ // TODO :
++ iface->camera_control_type = 0;
++
++ /* default virtual functions */
++ iface->list_channels = NULL;
++ iface->set_exposure = NULL;
++ iface->get_exposure = NULL;
++ iface->set_capture_mode = NULL;
++ iface->get_capture_mode = NULL;
++ iface->set_strobe = NULL;
++ iface->get_strobe = NULL;
++ iface->set_detect = NULL;
++ iface->get_detect = NULL;
++ iface->set_value = NULL;
++ iface->get_value = NULL;
++ iface->set_zoom = NULL;
++ iface->get_zoom = NULL;
++ iface->set_focus = NULL;
++ iface->get_focus = NULL;
++ iface->start_auto_focus = NULL;
++ iface->stop_auto_focus = NULL;
++ iface->set_focus_level = NULL;
++ iface->get_focus_level = NULL;
++ iface->set_auto_focus_area = NULL;
++ iface->get_auto_focus_area = NULL;
++ iface->set_wdr = NULL;
++ iface->get_wdr = NULL;
++ iface->set_ahs = NULL;
++ iface->get_ahs = NULL;
++ iface->set_part_color = NULL;
++ iface->get_part_color = NULL;
++ iface->get_exif_info = NULL;
++ iface->set_capture_command = NULL;
++ iface->set_record_command = NULL;
++ iface->start_face_zoom = NULL;
++ iface->stop_face_zoom = NULL;
++ iface->set_ae_lock = NULL;
++ iface->get_ae_lock = NULL;
++ iface->set_awb_lock = NULL;
++ iface->get_awb_lock = NULL;
++}
++
++/**
++ * gst_camera_control_list_channels:
++ * @control: A #GstCameraControl instance
++ *
++ * Retrieve a list of the available channels.
++ *
++ * Returns: (element-type GstCameraControlChannel) (transfer none): A
++ * GList containing pointers to #GstCameraControlChannel
++ * objects. The list is owned by the #GstCameraControl
++ * instance and must not be freed.
++ */
++const GList *gst_camera_control_list_channels(GstCameraControl *control)
++{
++ GstCameraControlInterface *iface;
++
++ g_return_val_if_fail(GST_IS_CAMERA_CONTROL(control), NULL);
++
++ iface = GST_CAMERA_CONTROL_GET_INTERFACE(control);
++
++ if (iface->list_channels)
++ return iface->list_channels(control);
++
++ return NULL;
++}
++
++
++gboolean gst_camera_control_set_value(GstCameraControl *control, GstCameraControlChannel *control_channel, gint value)
++{
++ GstCameraControlInterface *iface;
++
++ g_return_val_if_fail(GST_IS_CAMERA_CONTROL(control), FALSE);
++
++ iface = GST_CAMERA_CONTROL_GET_INTERFACE(control);
++
++ if (iface->set_value)
++ return iface->set_value(control, control_channel, value);
++
++ return FALSE;
++}
++
++gboolean gst_camera_control_get_value(GstCameraControl *control, GstCameraControlChannel *control_channel, gint *value)
++{
++ GstCameraControlInterface *iface;
++
++ g_return_val_if_fail(GST_IS_CAMERA_CONTROL(control), FALSE);
++
++ iface = GST_CAMERA_CONTROL_GET_INTERFACE(control);
++
++ if (iface->get_value)
++ return iface->get_value(control, control_channel, value);
++
++ return FALSE;
++}
++
++
++
++gboolean gst_camera_control_set_exposure(GstCameraControl *control, gint type, gint value1, gint value2)
++{
++ GstCameraControlInterface *iface;
++
++ g_return_val_if_fail(GST_IS_CAMERA_CONTROL(control), FALSE);
++
++ iface = GST_CAMERA_CONTROL_GET_INTERFACE(control);
++
++ if (iface->set_exposure)
++ return iface->set_exposure(control, type, value1, value2);
++
++ return FALSE;
++}
++
++gboolean gst_camera_control_get_exposure(GstCameraControl *control, gint type, gint *value1, gint *value2)
++{
++ GstCameraControlInterface *iface;
++
++ g_return_val_if_fail(GST_IS_CAMERA_CONTROL(control), FALSE);
++
++ iface = GST_CAMERA_CONTROL_GET_INTERFACE(control);
++
++ if (iface->get_exposure)
++ return iface->get_exposure(control, type, value1, value2);
++
++ return FALSE;
++}
++
++gboolean gst_camera_control_set_capture_mode(GstCameraControl *control, gint type, gint value)
++{
++ GstCameraControlInterface *iface;
++
++ g_return_val_if_fail(GST_IS_CAMERA_CONTROL(control), FALSE);
++
++ iface = GST_CAMERA_CONTROL_GET_INTERFACE(control);
++
++ if (iface->set_capture_mode)
++ return iface->set_capture_mode(control, type, value);
++
++ return FALSE;
++}
++
++gboolean gst_camera_control_get_capture_mode(GstCameraControl *control, gint type, gint *value)
++{
++ GstCameraControlInterface *iface;
++
++ g_return_val_if_fail(GST_IS_CAMERA_CONTROL(control), FALSE);
++
++ iface = GST_CAMERA_CONTROL_GET_INTERFACE(control);
++
++ if (iface->get_capture_mode)
++ return iface->get_capture_mode(control, type, value);
++
++ return FALSE;
++}
++
++gboolean gst_camera_control_set_strobe(GstCameraControl *control, gint type, gint value)
++{
++ GstCameraControlInterface *iface;
++
++ g_return_val_if_fail(GST_IS_CAMERA_CONTROL(control), FALSE);
++
++ iface = GST_CAMERA_CONTROL_GET_INTERFACE(control);
++
++ if (iface->set_strobe)
++ return iface->set_strobe(control, type, value);
++
++ return FALSE;
++}
++
++gboolean gst_camera_control_get_strobe(GstCameraControl *control, gint type, gint *value)
++{
++ GstCameraControlInterface *iface;
++
++ g_return_val_if_fail(GST_IS_CAMERA_CONTROL(control), FALSE);
++
++ iface = GST_CAMERA_CONTROL_GET_INTERFACE(control);
++
++ if (iface->get_strobe)
++ return iface->get_strobe(control, type, value);
++
++ return FALSE;
++}
++
++gboolean gst_camera_control_set_detect(GstCameraControl *control, gint type, gint value)
++{
++ GstCameraControlInterface *iface;
++
++ g_return_val_if_fail(GST_IS_CAMERA_CONTROL(control), FALSE);
++
++ iface = GST_CAMERA_CONTROL_GET_INTERFACE(control);
++
++ if (iface->set_detect)
++ return iface->set_detect(control, type, value);
++
++ return FALSE;
++}
++
++gboolean gst_camera_control_get_detect(GstCameraControl *control, gint type, gint *value)
++{
++ GstCameraControlInterface *iface;
++
++ g_return_val_if_fail(GST_IS_CAMERA_CONTROL(control), FALSE);
++
++ iface = GST_CAMERA_CONTROL_GET_INTERFACE(control);
++
++ if (iface->get_detect)
++ return iface->get_detect(control, type, value);
++
++ return FALSE;
++}
++
++gboolean gst_camera_control_set_zoom(GstCameraControl *control, gint type, gint value)
++{
++ GstCameraControlInterface *iface;
++
++ g_return_val_if_fail(GST_IS_CAMERA_CONTROL(control), FALSE);
++
++ iface = GST_CAMERA_CONTROL_GET_INTERFACE(control);
++
++ if (iface->set_zoom)
++ return iface->set_zoom(control, type, value);
++
++ return FALSE;
++}
++
++gboolean gst_camera_control_get_zoom(GstCameraControl *control, gint type, gint *value)
++{
++ GstCameraControlInterface *iface;
++
++ g_return_val_if_fail(GST_IS_CAMERA_CONTROL(control), FALSE);
++
++ iface = GST_CAMERA_CONTROL_GET_INTERFACE(control);
++
++ if (iface->get_zoom)
++ return iface->get_zoom(control, type, value);
++
++ return FALSE;
++}
++
++gboolean gst_camera_control_set_focus(GstCameraControl *control, gint mode, gint range)
++{
++ GstCameraControlInterface *iface;
++
++ g_return_val_if_fail(GST_IS_CAMERA_CONTROL(control), FALSE);
++
++ iface = GST_CAMERA_CONTROL_GET_INTERFACE(control);
++
++ if (iface->set_focus)
++ return iface->set_focus(control, mode, range);
++
++ return FALSE;
++}
++
++gboolean gst_camera_control_get_focus(GstCameraControl *control, gint *mode, gint *range)
++{
++ GstCameraControlInterface *iface;
++
++ g_return_val_if_fail(GST_IS_CAMERA_CONTROL(control), FALSE);
++
++ iface = GST_CAMERA_CONTROL_GET_INTERFACE(control);
++
++ if (iface->get_focus)
++ return iface->get_focus(control, mode, range);
++
++ return FALSE;
++}
++
++gboolean gst_camera_control_start_auto_focus(GstCameraControl *control)
++{
++ GstCameraControlInterface *iface;
++
++ g_return_val_if_fail(GST_IS_CAMERA_CONTROL(control), FALSE);
++
++ iface = GST_CAMERA_CONTROL_GET_INTERFACE(control);
++
++ if (iface->start_auto_focus)
++ return iface->start_auto_focus(control);
++
++ return FALSE;
++}
++
++gboolean gst_camera_control_stop_auto_focus(GstCameraControl *control)
++{
++ GstCameraControlInterface *iface;
++
++ g_return_val_if_fail(GST_IS_CAMERA_CONTROL(control), FALSE);
++
++ iface = GST_CAMERA_CONTROL_GET_INTERFACE(control);
++
++ if (iface->stop_auto_focus)
++ return iface->stop_auto_focus(control);
++
++ return FALSE;
++}
++
++gboolean gst_camera_control_set_focus_level(GstCameraControl *control, gint manual_level)
++{
++ GstCameraControlInterface *iface;
++
++ g_return_val_if_fail(GST_IS_CAMERA_CONTROL(control), FALSE);
++
++ iface = GST_CAMERA_CONTROL_GET_INTERFACE(control);
++
++ if (iface->set_focus_level)
++ return iface->set_focus_level(control, manual_level);
++
++ return FALSE;
++}
++
++gboolean gst_camera_control_get_focus_level(GstCameraControl *control, gint *manual_level)
++{
++ GstCameraControlInterface *iface;
++
++ g_return_val_if_fail(GST_IS_CAMERA_CONTROL(control), FALSE);
++
++ iface = GST_CAMERA_CONTROL_GET_INTERFACE(control);
++
++ if (iface->get_focus_level)
++ return iface->get_focus_level(control, manual_level);
++
++ return FALSE;
++}
++
++gboolean gst_camera_control_set_auto_focus_area(GstCameraControl *control, GstCameraControlRectType rect)
++{
++ GstCameraControlInterface *iface;
++
++ g_return_val_if_fail(GST_IS_CAMERA_CONTROL(control), FALSE);
++
++ iface = GST_CAMERA_CONTROL_GET_INTERFACE(control);
++
++ if (iface->set_auto_focus_area)
++ return iface->set_auto_focus_area(control, rect);
++
++ return FALSE;
++}
++
++gboolean gst_camera_control_get_auto_focus_area(GstCameraControl *control, GstCameraControlRectType *rect)
++{
++ GstCameraControlInterface *iface;
++
++ g_return_val_if_fail(GST_IS_CAMERA_CONTROL(control), FALSE);
++
++ iface = GST_CAMERA_CONTROL_GET_INTERFACE(control);
++
++ if (iface->get_auto_focus_area)
++ return iface->get_auto_focus_area(control, rect);
++
++ return FALSE;
++}
++
++gboolean gst_camera_control_set_wdr(GstCameraControl *control, gint value)
++{
++ GstCameraControlInterface *iface;
++
++ g_return_val_if_fail(GST_IS_CAMERA_CONTROL(control), FALSE);
++
++ iface = GST_CAMERA_CONTROL_GET_INTERFACE(control);
++
++ if (iface->set_wdr)
++ return iface->set_wdr(control, value);
++
++ return FALSE;
++}
++
++gboolean gst_camera_control_get_wdr(GstCameraControl *control, gint *value)
++{
++ GstCameraControlInterface *iface;
++
++ g_return_val_if_fail(GST_IS_CAMERA_CONTROL(control), FALSE);
++
++ iface = GST_CAMERA_CONTROL_GET_INTERFACE(control);
++
++ if (iface->get_wdr)
++ return iface->get_wdr(control, value);
++
++ return FALSE;
++}
++
++gboolean gst_camera_control_set_ahs(GstCameraControl *control, gint value)
++{
++ GstCameraControlInterface *iface;
++
++ g_return_val_if_fail(GST_IS_CAMERA_CONTROL(control), FALSE);
++
++ iface = GST_CAMERA_CONTROL_GET_INTERFACE(control);
++
++ if (iface->set_ahs)
++ return iface->set_ahs(control, value);
++
++ return FALSE;
++}
++
++gboolean gst_camera_control_get_ahs(GstCameraControl *control, gint *value)
++{
++ GstCameraControlInterface *iface;
++
++ g_return_val_if_fail(GST_IS_CAMERA_CONTROL(control), FALSE);
++
++ iface = GST_CAMERA_CONTROL_GET_INTERFACE(control);
++
++ if (iface->get_ahs)
++ return iface->get_ahs(control, value);
++
++ return FALSE;
++}
++
++gboolean gst_camera_control_set_part_color(GstCameraControl *control, gint type, gint value)
++{
++ GstCameraControlInterface *iface;
++
++ g_return_val_if_fail(GST_IS_CAMERA_CONTROL(control), FALSE);
++
++ iface = GST_CAMERA_CONTROL_GET_INTERFACE(control);
++
++ if (iface->set_part_color)
++ return iface->set_part_color(control, type, value);
++
++ return FALSE;
++}
++
++gboolean gst_camera_control_get_part_color(GstCameraControl *control, gint type, gint *value)
++{
++ GstCameraControlInterface *iface;
++
++ g_return_val_if_fail(GST_IS_CAMERA_CONTROL(control), FALSE);
++
++ iface = GST_CAMERA_CONTROL_GET_INTERFACE(control);
++
++ if (iface->get_part_color)
++ return iface->get_part_color(control, type, value);
++
++ return FALSE;
++}
++
++gboolean
++gst_camera_control_get_exif_info(GstCameraControl *control, GstCameraControlExifInfo *info)
++{
++ GstCameraControlInterface *iface;
++
++ g_return_val_if_fail(GST_IS_CAMERA_CONTROL(control), FALSE);
++
++ iface = GST_CAMERA_CONTROL_GET_INTERFACE(control);
++
++ if (iface->get_exif_info)
++ return iface->get_exif_info(control, info);
++
++ return FALSE;
++}
++
++
++gboolean gst_camera_control_get_basic_dev_info(GstCameraControl *control, gint dev_id, GstCameraControlCapsInfoType *info)
++{
++ GstCameraControlInterface *iface;
++
++ g_return_val_if_fail(GST_IS_CAMERA_CONTROL(control), FALSE);
++
++ iface = GST_CAMERA_CONTROL_GET_INTERFACE(control);
++
++ if (iface->get_basic_dev_info)
++ return iface->get_basic_dev_info(control, dev_id, info);
++
++ return FALSE;
++}
++
++gboolean gst_camera_control_get_misc_dev_info(GstCameraControl *control, gint dev_id, GstCameraControlCtrlListInfoType *info)
++{
++ GstCameraControlInterface *iface;
++
++ g_return_val_if_fail(GST_IS_CAMERA_CONTROL(control), FALSE);
++
++ iface = GST_CAMERA_CONTROL_GET_INTERFACE(control);
++
++ if (iface->get_misc_dev_info)
++ return iface->get_misc_dev_info(control, dev_id, info);
++
++ return FALSE;
++}
++
++gboolean gst_camera_control_get_extra_dev_info(GstCameraControl *control, gint dev_id, GstCameraControlExtraInfoType *info)
++{
++ GstCameraControlInterface *iface;
++
++ g_return_val_if_fail(GST_IS_CAMERA_CONTROL(control), FALSE);
++
++ iface = GST_CAMERA_CONTROL_GET_INTERFACE(control);
++
++ if (iface->get_extra_dev_info)
++ return iface->get_extra_dev_info(control, dev_id, info);
++
++ return FALSE;
++}
++
++void gst_camera_control_set_capture_command(GstCameraControl *control, GstCameraControlCaptureCommand cmd)
++{
++ GstCameraControlInterface *iface;
++
++ g_return_if_fail(GST_IS_CAMERA_CONTROL(control));
++
++ iface = GST_CAMERA_CONTROL_GET_INTERFACE(control);
++
++ if (iface->set_capture_command)
++ iface->set_capture_command(control, cmd);
++
++ return;
++}
++
++void gst_camera_control_set_record_command(GstCameraControl *control, GstCameraControlRecordCommand cmd)
++{
++ GstCameraControlInterface *iface;
++
++ g_return_if_fail(GST_IS_CAMERA_CONTROL(control));
++
++ iface = GST_CAMERA_CONTROL_GET_INTERFACE(control);
++
++ if (iface->set_record_command)
++ iface->set_record_command(control, cmd);
++
++ return;
++}
++
++gboolean gst_camera_control_start_face_zoom(GstCameraControl *control, gint x, gint y, gint zoom_level)
++{
++ GstCameraControlInterface *iface;
++
++ g_return_val_if_fail(GST_IS_CAMERA_CONTROL(control), FALSE);
++
++ iface = GST_CAMERA_CONTROL_GET_INTERFACE(control);
++
++ if (iface->start_face_zoom)
++ return iface->start_face_zoom(control, x, y, zoom_level);
++
++ return FALSE;
++}
++
++gboolean gst_camera_control_stop_face_zoom(GstCameraControl *control)
++{
++ GstCameraControlInterface *iface;
++
++ g_return_val_if_fail(GST_IS_CAMERA_CONTROL(control), FALSE);
++
++ iface = GST_CAMERA_CONTROL_GET_INTERFACE(control);
++
++ if (iface->stop_face_zoom)
++ return iface->stop_face_zoom(control);
++
++ return FALSE;
++}
++
++gboolean gst_camera_control_set_ae_lock(GstCameraControl *control, gboolean lock)
++{
++ GstCameraControlInterface *iface;
++
++ g_return_val_if_fail(GST_IS_CAMERA_CONTROL(control), FALSE);
++
++ iface = GST_CAMERA_CONTROL_GET_INTERFACE(control);
++
++ if (iface->set_ae_lock)
++ return iface->set_ae_lock(control, lock);
++
++ return FALSE;
++}
++
++gboolean gst_camera_control_get_ae_lock(GstCameraControl *control, gboolean *lock)
++{
++ GstCameraControlInterface *iface;
++
++ g_return_val_if_fail(GST_IS_CAMERA_CONTROL(control), FALSE);
++
++ iface = GST_CAMERA_CONTROL_GET_INTERFACE(control);
++
++ if (iface->get_ae_lock)
++ return iface->get_ae_lock(control, lock);
++
++ return FALSE;
++}
++
++gboolean gst_camera_control_set_awb_lock(GstCameraControl *control, gboolean lock)
++{
++ GstCameraControlInterface *iface;
++
++ g_return_val_if_fail(GST_IS_CAMERA_CONTROL(control), FALSE);
++
++ iface = GST_CAMERA_CONTROL_GET_INTERFACE(control);
++
++ if (iface->set_awb_lock)
++ return iface->set_awb_lock(control, lock);
++
++ return FALSE;
++}
++
++gboolean gst_camera_control_get_awb_lock(GstCameraControl *control, gboolean *lock)
++{
++ GstCameraControlInterface *iface;
++
++ g_return_val_if_fail(GST_IS_CAMERA_CONTROL(control), FALSE);
++
++ iface = GST_CAMERA_CONTROL_GET_INTERFACE(control);
++
++ if (iface->get_awb_lock)
++ return iface->get_awb_lock(control, lock);
++
++ return FALSE;
++}
++
++gboolean gst_camera_control_set_user_buffer_fd(GstCameraControl *control, int *fds, int number)
++{
++ GstCameraControlInterface *iface;
++
++ g_return_val_if_fail(GST_IS_CAMERA_CONTROL(control), FALSE);
++
++ iface = GST_CAMERA_CONTROL_GET_INTERFACE(control);
++
++ if (iface->set_user_buffer_fd)
++ return iface->set_user_buffer_fd(control, fds, number);
++
++ return FALSE;
++}
++
++gboolean gst_camera_control_set_extra_preview_stream_format(GstCameraControl *control, int stream_id, GstCameraControlImageFormat img_fmt, int width, int height, int fps)
++{
++ GstCameraControlInterface *iface;
++
++ g_return_val_if_fail(GST_IS_CAMERA_CONTROL(control), FALSE);
++
++ iface = GST_CAMERA_CONTROL_GET_INTERFACE(control);
++
++ if (iface->set_extra_preview_stream_format)
++ return iface->set_extra_preview_stream_format(control, stream_id, img_fmt, width, height, fps);
++
++ return FALSE;
++}
++
++gboolean gst_camera_control_get_extra_preview_stream_format(GstCameraControl *control, int stream_id, GstCameraControlImageFormat *img_fmt, int *width, int *height, int *fps)
++{
++ GstCameraControlInterface *iface;
++
++ g_return_val_if_fail(GST_IS_CAMERA_CONTROL(control), FALSE);
++
++ iface = GST_CAMERA_CONTROL_GET_INTERFACE(control);
++
++ if (iface->get_extra_preview_stream_format)
++ return iface->get_extra_preview_stream_format(control, stream_id, img_fmt, width, height, fps);
++
++ return FALSE;
++}
++
++gboolean gst_camera_control_set_extra_preview_bitrate(GstCameraControl *control, int stream_id, int bitrate)
++{
++ GstCameraControlInterface *iface;
++
++ g_return_val_if_fail(GST_IS_CAMERA_CONTROL(control), FALSE);
++
++ iface = GST_CAMERA_CONTROL_GET_INTERFACE(control);
++
++ if (iface->set_extra_preview_bitrate)
++ return iface->set_extra_preview_bitrate(control, stream_id, bitrate);
++
++ return FALSE;
++}
++
++gboolean gst_camera_control_get_extra_preview_bitrate(GstCameraControl *control, int stream_id, int *bitrate)
++{
++ GstCameraControlInterface *iface;
++
++ g_return_val_if_fail(GST_IS_CAMERA_CONTROL(control), FALSE);
++
++ iface = GST_CAMERA_CONTROL_GET_INTERFACE(control);
++
++ if (iface->get_extra_preview_bitrate)
++ return iface->get_extra_preview_bitrate(control, stream_id, bitrate);
++
++ return FALSE;
++}
++
++gboolean gst_camera_control_set_extra_preview_gop_interval(GstCameraControl *control, int stream_id, int interval)
++{
++ GstCameraControlInterface *iface;
++
++ g_return_val_if_fail(GST_IS_CAMERA_CONTROL(control), FALSE);
++
++ iface = GST_CAMERA_CONTROL_GET_INTERFACE(control);
++
++ if (iface->set_extra_preview_gop_interval)
++ return iface->set_extra_preview_gop_interval(control, stream_id, interval);
++
++ return FALSE;
++}
++
++gboolean gst_camera_control_get_extra_preview_gop_interval(GstCameraControl *control, int stream_id, int *interval)
++{
++ GstCameraControlInterface *iface;
++
++ g_return_val_if_fail(GST_IS_CAMERA_CONTROL(control), FALSE);
++
++ iface = GST_CAMERA_CONTROL_GET_INTERFACE(control);
++
++ if (iface->get_extra_preview_gop_interval)
++ return iface->get_extra_preview_gop_interval(control, stream_id, interval);
++
++ return FALSE;
++}
++
++void gst_camera_control_value_changed(GstCameraControl *control, GstCameraControlChannel *control_channel, gint value)
++{
++ g_signal_emit(G_OBJECT(control), gst_camera_control_signals[CONTROL_VALUE_CHANGED], 0, control_channel, value);
++ g_signal_emit_by_name(G_OBJECT(control_channel), "control-value-changed", value);
++}
--- /dev/null
--- /dev/null
++/*
++ * GStreamer Camera Control Interface
++ *
++ * Copyright (c) 2000 - 2012 Samsung Electronics Co., Ltd.
++ *
++ * Contact: Jeongmo Yang <jm80.yang@samsung.com>
++ *
++ * This library is free software; you can redistribute it and/or modify it under
++ * the terms of the GNU Lesser General Public License as published by the
++ * Free Software Foundation; either version 2.1 of the License, or (at your option)
++ * any later version.
++ *
++ * This library is distributed in the hope that it will be useful, but WITHOUT ANY
++ * WARRANTY; without even the implied warranty of MERCHANTABILITY or
++ * FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
++ * License for more details.
++ *
++ * You should have received a copy of the GNU Lesser General Public License
++ * along with this library; if not, write to the Free Software Foundation, Inc., 51
++ * Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
++ *
++ */
++
++/* ===========================================================================================
++EDIT HISTORY FOR MODULE
++
++ This section contains comments describing changes made to the module.
++ Notice that changes are listed in reverse chronological order.
++
++when who what, where, why
++--------- ------------------------ ------------------------------------------------------
++12/09/08 jm80.yang@samsung.com Created
++
++=========================================================================================== */
++
++#ifndef __GST_CAMERA_CONTROL_H__
++#define __GST_CAMERA_CONTROL_H__
++
++#include <gst/gst.h>
++#include <gst/video/cameracontrolchannel.h>
++
++G_BEGIN_DECLS
++
++#define GST_TYPE_CAMERA_CONTROL \
++ (gst_camera_control_get_type())
++#define GST_CAMERA_CONTROL(obj) \
++ (G_TYPE_CHECK_INSTANCE_CAST ((obj), GST_TYPE_CAMERA_CONTROL, GstCameraControl))
++#define GST_IS_CAMERA_CONTROL(obj) \
++ (G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_CAMERA_CONTROL))
++#define GST_CAMERA_CONTROL_GET_INTERFACE(inst) \
++ (G_TYPE_INSTANCE_GET_INTERFACE ((inst), GST_TYPE_CAMERA_CONTROL, GstCameraControlInterface))
++#define GST_CAMERA_CONTROL_TYPE(iface) (iface->camera_control_type)
++
++
++typedef struct _GstCameraControl GstCameraControl;
++typedef struct _GstCameraControlInterface GstCameraControlInterface;
++
++typedef enum
++{
++ GST_CAMERA_CONTROL_HARDWARE,
++ GST_CAMERA_CONTROL_SOFTWARE
++} GstCameraControlType;
++
++/* enumerations for Camera control Exposure types */
++typedef enum
++{
++ GST_CAMERA_CONTROL_F_NUMBER,
++ GST_CAMERA_CONTROL_SHUTTER_SPEED,
++ GST_CAMERA_CONTROL_ISO,
++ GST_CAMERA_CONTROL_PROGRAM_MODE,
++ GST_CAMERA_CONTROL_EXPOSURE_MODE,
++ GST_CAMERA_CONTROL_EXPOSURE_VALUE
++} GstCameraControlExposureType;
++
++/* enumerations for Camera control Capture mode types */
++typedef enum
++{
++ GST_CAMERA_CONTROL_CAPTURE_MODE,
++ GST_CAMERA_CONTROL_OUTPUT_MODE,
++ GST_CAMERA_CONTROL_FRAME_COUNT,
++ GST_CAMERA_CONTROL_JPEG_QUALITY
++} GstCameraControlCaptureModeType;
++
++/* enumerations for Camera control Strobe types */
++typedef enum
++{
++ GST_CAMERA_CONTROL_STROBE_CONTROL,
++ GST_CAMERA_CONTROL_STROBE_CAPABILITIES,
++ GST_CAMERA_CONTROL_STROBE_MODE,
++ GST_CAMERA_CONTROL_STROBE_STATUS,
++ GST_CAMERA_CONTROL_STROBE_EV,
++ GST_CAMERA_CONTROL_STROBE_BRIGHTNESS
++} GstCameraControlStrobeType;
++
++/* enumerations for Camera control Face detection types */
++typedef enum
++{
++ GST_CAMERA_CONTROL_FACE_DETECT_MODE,
++ GST_CAMERA_CONTROL_FACE_DETECT_NUMBER,
++ GST_CAMERA_CONTROL_FACE_FOCUS_SELECT,
++ GST_CAMERA_CONTROL_FACE_SELECT_NUMBER,
++ GST_CAMERA_CONTROL_FACE_DETECT_STATUS
++} GstCameraControlFaceDetectType;
++
++/* enumerations for Camera control Zoom types */
++typedef enum
++{
++ GST_CAMERA_CONTROL_DIGITAL_ZOOM,
++ GST_CAMERA_CONTROL_OPTICAL_ZOOM
++} GstCameraControlZoomType;
++
++/* enumerations for Camera control Part color */
++typedef enum
++{
++ GST_CAMERA_CONTROL_PART_COLOR_SRC,
++ GST_CAMERA_CONTROL_PART_COLOR_DST,
++ GST_CAMERA_CONTROL_PART_COLOR_MODE
++} GstCameraControlPartColorType;
++
++/* enumerations for Camera capture command */
++typedef enum
++{
++ GST_CAMERA_CONTROL_CAPTURE_COMMAND_NONE,
++ GST_CAMERA_CONTROL_CAPTURE_COMMAND_START,
++ GST_CAMERA_CONTROL_CAPTURE_COMMAND_STOP,
++ GST_CAMERA_CONTROL_CAPTURE_COMMAND_STOP_MULTISHOT
++} GstCameraControlCaptureCommand;
++
++/* enumerations for Camera record command */
++typedef enum
++{
++ GST_CAMERA_CONTROL_RECORD_COMMAND_NONE,
++ GST_CAMERA_CONTROL_RECORD_COMMAND_START,
++ GST_CAMERA_CONTROL_RECORD_COMMAND_STOP
++} GstCameraControlRecordCommand;
++
++typedef enum
++{
++ /* YUV */
++ GST_CAMERA_CONTROL_IMAGE_FORMAT_NV12,
++ GST_CAMERA_CONTROL_IMAGE_FORMAT_NV21,
++ GST_CAMERA_CONTROL_IMAGE_FORMAT_I420,
++ GST_CAMERA_CONTROL_IMAGE_FORMAT_YV12,
++ GST_CAMERA_CONTROL_IMAGE_FORMAT_YUYV,
++ GST_CAMERA_CONTROL_IMAGE_FORMAT_UYVY,
++
++ /* RGB */
++ GST_CAMERA_CONTROL_IMAGE_FORMAT_BGRA,
++ GST_CAMERA_CONTROL_IMAGE_FORMAT_ARGB,
++
++ /* ENCODED */
++ GST_CAMERA_CONTROL_IMAGE_FORMAT_JPEG,
++ GST_CAMERA_CONTROL_IMAGE_FORMAT_H264,
++ GST_CAMERA_CONTROL_IMAGE_FORMAT_MJPEG,
++ GST_CAMERA_CONTROL_IMAGE_FORMAT_VP8,
++ GST_CAMERA_CONTROL_IMAGE_FORMAT_VP9
++} GstCameraControlImageFormat;
++
++
++/////////////////////////////////
++// For Query functionalities //
++// For Querying capabilities //
++/////////////////////////////////
++#define GST_CAMERA_CONTROL_MAX_NUM_FMT_DESC 32
++#define GST_CAMERA_CONTROL_MAX_NUM_RESOLUTION 32
++#define GST_CAMERA_CONTROL_MAX_NUM_AVAILABLE_TPF 16
++#define GST_CAMERA_CONTROL_MAX_NUM_AVAILABLE_FPS 16
++#define GST_CAMERA_CONTROL_MAX_NUM_CTRL_LIST_INFO 64
++#define GST_CAMERA_CONTROL_MAX_NUM_CTRL_MENU 64
++#define GST_CAMERA_CONTROL_MAX_NUM_DETECTED_FACES 16
++#define GST_CAMERA_CONTROL_MAX_SZ_CTRL_NAME_STRING 32
++#define GST_CAMERA_CONTROL_MAX_SZ_DEV_NAME_STRING 32
++
++/*! @struct GstCameraControlFracType
++ * @brief For timeperframe as fraction type
++ * Elapse time consumed by one frame, reverse of FPS
++ */
++typedef struct _GstCameraControlFracType {
++ gint num;
++ gint den;
++} GstCameraControlFracType;
++
++/*! @struct GstCameraControlRectType
++ * @brief For touch auto focusing area and face detection area
++ */
++typedef struct _GstCameraControlRectType {
++ gint x;
++ gint y;
++ gint width;
++ gint height;
++} GstCameraControlRectType;
++
++/*! @struct GstCameraControlResolutionType
++ * @brief For querying supported resolutions
++ */
++typedef struct _GstCameraControlResolutionType {
++ gint w;
++ gint h;
++
++ /* Available time per frame(tpf) as each pixelformat */
++ gint num_avail_tpf;
++ GstCameraControlFracType tpf[GST_CAMERA_CONTROL_MAX_NUM_AVAILABLE_TPF];
++} GstCameraControlResolutionType;
++
++/*! @struct GstCameraControlFmtDescType
++ * @brief For querying supported format type
++ */
++typedef struct _GstCameraControlFmtDescType {
++ /* fourcc name of each pixelformat */
++ guint fcc;
++ gint fcc_use;
++
++ /* Available resolutions as each pixelformat */
++ gint num_resolution;
++ GstCameraControlResolutionType resolutions[GST_CAMERA_CONTROL_MAX_NUM_RESOLUTION];
++} GstCameraControlFmtDescType;
++
++/*! @struct GstCameraControlCapsInfoType
++ * @brief For querying image input capabilities
++ */
++typedef struct _GstCameraControlCapsInfoType {
++ char dev_name[GST_CAMERA_CONTROL_MAX_SZ_DEV_NAME_STRING];
++ int input_idx;
++ gint num_fmt_desc;
++ GstCameraControlFmtDescType fmt_desc[GST_CAMERA_CONTROL_MAX_NUM_FMT_DESC];
++
++ int num_preview_resolution;
++ int preview_resolution_width[GST_CAMERA_CONTROL_MAX_NUM_RESOLUTION];
++ int preview_resolution_height[GST_CAMERA_CONTROL_MAX_NUM_RESOLUTION];
++
++ int num_capture_resolution;
++ int capture_resolution_width[GST_CAMERA_CONTROL_MAX_NUM_RESOLUTION];
++ int capture_resolution_height[GST_CAMERA_CONTROL_MAX_NUM_RESOLUTION];
++
++ int num_preview_fmt;
++ unsigned int preview_fmt[GST_CAMERA_CONTROL_MAX_NUM_FMT_DESC];
++
++ int num_capture_fmt;
++ unsigned int capture_fmt[GST_CAMERA_CONTROL_MAX_NUM_FMT_DESC];
++
++ int num_fps;
++ GstCameraControlFracType fps[GST_CAMERA_CONTROL_MAX_NUM_AVAILABLE_FPS];
++} GstCameraControlCapsInfoType;
++
++/*! @struct GstCameraControlFaceInfo
++ * @brief For face information
++ */
++typedef struct _GstCameraControlFaceInfo {
++ int id;
++ int score;
++ GstCameraControlRectType rect;
++} GstCameraControlFaceInfo;
++
++/*! @struct GstCameraControlFaceDetectInfo
++ * @brief For face detect information
++ */
++typedef struct _GstCameraControlFaceDetectInfo {
++ int num_of_faces;
++ GstCameraControlFaceInfo face_info[GST_CAMERA_CONTROL_MAX_NUM_DETECTED_FACES];
++} GstCameraControlFaceDetectInfo;
++
++/////////////////////////////
++// For Querying controls //
++/////////////////////////////
++enum {
++ GST_CAMERA_CTRL_TYPE_RANGE = 0,
++ GST_CAMERA_CTRL_TYPE_BOOL,
++ GST_CAMERA_CTRL_TYPE_ARRAY,
++ GST_CAMERA_CTRL_TYPE_UNKNOWN,
++ GST_CAMERA_CTRL_TYPE_NUM,
++};
++
++/*! @struct GstCameraControlCtrlMenuType
++ * @brief For querying menu of specified controls
++ */
++typedef struct _GstCameraControlCtrlMenuType {
++ gint menu_index;
++ gchar menu_name[GST_CAMERA_CONTROL_MAX_SZ_CTRL_NAME_STRING];
++} GstCameraControlCtrlMenuType;
++
++/*! @struct GstCameraControlCtrlInfoType
++ * @brief For querying controls detail
++ */
++typedef struct _GstCameraControlCtrlInfoType {
++ gint avsys_ctrl_id;
++ gint v4l2_ctrl_id;
++ gint ctrl_type;
++ gchar ctrl_name[GST_CAMERA_CONTROL_MAX_SZ_CTRL_NAME_STRING];
++ gint min;
++ gint max;
++ gint step;
++ gint default_val;
++ gint num_ctrl_menu;
++ GstCameraControlCtrlMenuType ctrl_menu[GST_CAMERA_CONTROL_MAX_NUM_CTRL_MENU];
++} GstCameraControlCtrlInfoType;
++
++/*! @struct GstCameraControlCtrlListInfoType
++ * @brief For querying controls
++ */
++typedef struct _GstCameraControlCtrlListInfoType {
++ gint num_ctrl_list_info;
++ GstCameraControlCtrlInfoType ctrl_info[GST_CAMERA_CONTROL_MAX_NUM_CTRL_LIST_INFO];
++} GstCameraControlCtrlListInfoType;
++
++/* capabilities field */
++#define GST_CAMERA_STROBE_CAP_NONE 0x0000 /* No strobe supported */
++#define GST_CAMERA_STROBE_CAP_OFF 0x0001 /* Always flash off mode */
++#define GST_CAMERA_STROBE_CAP_ON 0x0002 /* Always use flash light mode */
++#define GST_CAMERA_STROBE_CAP_AUTO 0x0004 /* Flashlight works automatic */
++#define GST_CAMERA_STROBE_CAP_REDEYE 0x0008 /* Red-eye reduction */
++#define GST_CAMERA_STROBE_CAP_SLOWSYNC 0x0010 /* Slow sync */
++#define GST_CAMERA_STROBE_CAP_FRONT_CURTAIN 0x0020 /* Front curtain */
++#define GST_CAMERA_STROBE_CAP_REAR_CURTAIN 0x0040 /* Rear curtain */
++#define GST_CAMERA_STROBE_CAP_PERMANENT 0x0080 /* keep turned on until turning off */
++#define GST_CAMERA_STROBE_CAP_EXTERNAL 0x0100 /* use external strobe */
++
++typedef struct _GstCameraControlExtraInfoType {
++ guint strobe_caps; /* Use above caps field */
++ guint detection_caps; /* Just boolean */
++ guint reserved[4];
++} GstCameraControlExtraInfoType;
++/////////////////////////////////////
++// END For Query functionalities //
++/////////////////////////////////////
++
++
++/* structure for Camera control EXIF information */
++typedef struct _GstCameraControlExifInfo {
++ /* Dynamic value */
++ guint32 exposure_time_numerator; /* Exposure time, given in seconds */
++ guint32 exposure_time_denominator;
++ gint shutter_speed_numerator; /* Shutter speed, given in APEX(Additive System Photographic Exposure) */
++ gint shutter_speed_denominator;
++ gint brigtness_numerator; /* Value of brightness, before firing flash, given in APEX value */
++ gint brightness_denominator;
++ guint16 iso; /* Sensitivity value of sensor */
++ guint16 flash; /* Whether flash is fired(1) or not(0) */
++ gint metering_mode; /* metering mode in EXIF 2.2 */
++ gint exif_image_width; /* Size of image */
++ gint exif_image_height;
++ gint exposure_bias_in_APEX; /* Exposure bias in APEX standard */
++ gint software_used; /* Firmware S/W version */
++
++ /* Fixed value */
++ gint component_configuration; /* color components arrangement */
++ gint colorspace; /* colorspace information */
++ gint focal_len_numerator; /* Lens focal length */
++ gint focal_len_denominator;
++ gint aperture_f_num_numerator; /* Aperture value */
++ gint aperture_f_num_denominator;
++ gint aperture_in_APEX; /* Aperture value in APEX standard */
++ gint max_lens_aperture_in_APEX; /* Max aperture value in APEX standard */
++} GstCameraControlExifInfo;
++
++/* structure for camera control interface */
++struct _GstCameraControlInterface {
++ GTypeInterface iface;
++ GstCameraControlType camera_control_type;
++
++ /* virtual functions */
++ const GList *(*list_channels) (GstCameraControl *control);
++ gboolean (*set_value) (GstCameraControl *control, GstCameraControlChannel *control_channel, gint value);
++ gboolean (*get_value) (GstCameraControl *control, GstCameraControlChannel *control_channel, gint *value);
++ gboolean (*set_exposure) (GstCameraControl *control, gint type, gint value1, gint value2);
++ gboolean (*get_exposure) (GstCameraControl *control, gint type, gint *value1, gint *value2);
++ gboolean (*set_capture_mode) (GstCameraControl *control, gint type, gint value);
++ gboolean (*get_capture_mode) (GstCameraControl *control, gint type, gint *value);
++ gboolean (*set_strobe) (GstCameraControl *control, gint type, gint value);
++ gboolean (*get_strobe) (GstCameraControl *control, gint type, gint *value);
++ gboolean (*set_detect) (GstCameraControl *control, gint type, gint value);
++ gboolean (*get_detect) (GstCameraControl *control, gint type, gint *value);
++ gboolean (*set_zoom) (GstCameraControl *control, gint type, gint value);
++ gboolean (*get_zoom) (GstCameraControl *control, gint type, gint *value);
++ gboolean (*set_focus) (GstCameraControl *control, gint mode, gint range);
++ gboolean (*get_focus) (GstCameraControl *control, gint *mode, gint *range);
++ gboolean (*start_auto_focus) (GstCameraControl *control);
++ gboolean (*stop_auto_focus) (GstCameraControl *control);
++ gboolean (*set_focus_level) (GstCameraControl *control, gint manual_level);
++ gboolean (*get_focus_level) (GstCameraControl *control, gint *manual_level);
++ gboolean (*set_auto_focus_area) (GstCameraControl *control, GstCameraControlRectType rect);
++ gboolean (*get_auto_focus_area) (GstCameraControl *control, GstCameraControlRectType *rect);
++ gboolean (*set_wdr) (GstCameraControl *control, gint value);
++ gboolean (*get_wdr) (GstCameraControl *control, gint *value);
++ gboolean (*set_ahs) (GstCameraControl *control, gint value);
++ gboolean (*get_ahs) (GstCameraControl *control, gint *value);
++ gboolean (*set_part_color) (GstCameraControl *control, gint type, gint value);
++ gboolean (*get_part_color) (GstCameraControl *control, gint type, gint *value);
++ gboolean (*get_exif_info) (GstCameraControl *control, GstCameraControlExifInfo *info);
++ gboolean (*get_basic_dev_info) (GstCameraControl *control, gint dev_id, GstCameraControlCapsInfoType *info);
++ gboolean (*get_misc_dev_info) (GstCameraControl *control, gint dev_id, GstCameraControlCtrlListInfoType *info);
++ gboolean (*get_extra_dev_info) (GstCameraControl *control, gint dev_id, GstCameraControlExtraInfoType *info);
++ void (*set_capture_command) (GstCameraControl *control, GstCameraControlCaptureCommand cmd);
++ void (*set_record_command) (GstCameraControl *control, GstCameraControlRecordCommand cmd);
++ gboolean (*start_face_zoom) (GstCameraControl *control, gint x, gint y, gint zoom_level);
++ gboolean (*stop_face_zoom) (GstCameraControl *control);
++ gboolean (*set_ae_lock) (GstCameraControl *control, gboolean lock);
++ gboolean (*get_ae_lock) (GstCameraControl *control, gboolean *lock);
++ gboolean (*set_awb_lock) (GstCameraControl *control, gboolean lock);
++ gboolean (*get_awb_lock) (GstCameraControl *control, gboolean *lock);
++ gboolean (*set_user_buffer_fd) (GstCameraControl *control, int *fds, int number);
++ gboolean (*set_extra_preview_stream_format) (GstCameraControl *control, int stream_id, GstCameraControlImageFormat img_fmt, int width, int height, int fps);
++ gboolean (*get_extra_preview_stream_format) (GstCameraControl *control, int stream_id, GstCameraControlImageFormat *img_fmt, int *width, int *height, int *fps);
++ gboolean (*set_extra_preview_bitrate) (GstCameraControl *control, int stream_id, int bitrate);
++ gboolean (*get_extra_preview_bitrate) (GstCameraControl *control, int stream_id, int *bitrate);
++ gboolean (*set_extra_preview_gop_interval) (GstCameraControl *control, int stream_id, int interval);
++ gboolean (*get_extra_preview_gop_interval) (GstCameraControl *control, int stream_id, int *interval);
++
++ /* signals */
++ void (* value_changed) (GstCameraControl *control, GstCameraControlChannel *channel, gint value);
++};
++
++GST_VIDEO_API
++GType gst_camera_control_get_type(void);
++
++GST_VIDEO_API
++const GList *gst_camera_control_list_channels (GstCameraControl *control);
++
++GST_VIDEO_API
++gboolean gst_camera_control_set_value (GstCameraControl *control, GstCameraControlChannel *control_channel, gint value);
++
++GST_VIDEO_API
++gboolean gst_camera_control_get_value (GstCameraControl *control, GstCameraControlChannel *control_channel, gint *value);
++
++GST_VIDEO_API
++gboolean gst_camera_control_set_exposure (GstCameraControl *control, gint type, gint value1, gint value2);
++
++GST_VIDEO_API
++gboolean gst_camera_control_get_exposure (GstCameraControl *control, gint type, gint *value1, gint *value2);
++
++GST_VIDEO_API
++gboolean gst_camera_control_set_capture_mode (GstCameraControl *control, gint type, gint value);
++
++GST_VIDEO_API
++gboolean gst_camera_control_get_capture_mode (GstCameraControl *control, gint type, gint *value);
++
++GST_VIDEO_API
++gboolean gst_camera_control_set_strobe (GstCameraControl *control, gint type, gint value);
++
++GST_VIDEO_API
++gboolean gst_camera_control_get_strobe (GstCameraControl *control, gint type, gint *value);
++
++GST_VIDEO_API
++gboolean gst_camera_control_set_detect (GstCameraControl *control, gint type, gint value);
++
++GST_VIDEO_API
++gboolean gst_camera_control_get_detect (GstCameraControl *control, gint type, gint *value);
++
++GST_VIDEO_API
++gboolean gst_camera_control_set_zoom (GstCameraControl *control, gint type, gint value);
++
++GST_VIDEO_API
++gboolean gst_camera_control_get_zoom (GstCameraControl *control, gint type, gint *value);
++
++GST_VIDEO_API
++gboolean gst_camera_control_set_focus (GstCameraControl *control, gint mode, gint range);
++
++GST_VIDEO_API
++gboolean gst_camera_control_get_focus (GstCameraControl *control, gint *mode, gint *range);
++
++GST_VIDEO_API
++gboolean gst_camera_control_start_auto_focus (GstCameraControl *control);
++
++GST_VIDEO_API
++gboolean gst_camera_control_stop_auto_focus (GstCameraControl *control);
++
++GST_VIDEO_API
++gboolean gst_camera_control_set_focus_level (GstCameraControl *control, gint manual_level);
++
++GST_VIDEO_API
++gboolean gst_camera_control_get_focus_level (GstCameraControl *control, gint *manual_level);
++
++GST_VIDEO_API
++gboolean gst_camera_control_set_auto_focus_area (GstCameraControl *control, GstCameraControlRectType rect);
++
++GST_VIDEO_API
++gboolean gst_camera_control_get_auto_focus_area (GstCameraControl *control, GstCameraControlRectType *rect);
++
++GST_VIDEO_API
++gboolean gst_camera_control_set_wdr (GstCameraControl *control, gint value);
++
++GST_VIDEO_API
++gboolean gst_camera_control_get_wdr (GstCameraControl *control, gint *value);
++
++GST_VIDEO_API
++gboolean gst_camera_control_set_ahs (GstCameraControl *control, gint value);
++
++GST_VIDEO_API
++gboolean gst_camera_control_get_ahs (GstCameraControl *control, gint *value);
++
++GST_VIDEO_API
++gboolean gst_camera_control_set_part_color (GstCameraControl *control, gint type, gint value);
++
++GST_VIDEO_API
++gboolean gst_camera_control_get_part_color (GstCameraControl *control, gint type, gint *value);
++
++GST_VIDEO_API
++gboolean gst_camera_control_get_exif_info (GstCameraControl *control, GstCameraControlExifInfo *info);
++
++GST_VIDEO_API
++gboolean gst_camera_control_get_basic_dev_info (GstCameraControl *control, gint dev_id, GstCameraControlCapsInfoType *info);
++
++GST_VIDEO_API
++gboolean gst_camera_control_get_misc_dev_info (GstCameraControl *control, gint dev_id, GstCameraControlCtrlListInfoType *info);
++
++GST_VIDEO_API
++gboolean gst_camera_control_get_extra_dev_info (GstCameraControl *control, gint dev_id, GstCameraControlExtraInfoType *info);
++
++GST_VIDEO_API
++void gst_camera_control_set_capture_command (GstCameraControl *control, GstCameraControlCaptureCommand cmd);
++
++GST_VIDEO_API
++void gst_camera_control_set_record_command (GstCameraControl *control, GstCameraControlRecordCommand cmd);
++
++GST_VIDEO_API
++gboolean gst_camera_control_start_face_zoom (GstCameraControl *control, gint x, gint y, gint zoom_level);
++
++GST_VIDEO_API
++gboolean gst_camera_control_stop_face_zoom (GstCameraControl *control);
++
++GST_VIDEO_API
++gboolean gst_camera_control_set_ae_lock (GstCameraControl *control, gboolean lock);
++
++GST_VIDEO_API
++gboolean gst_camera_control_get_ae_lock (GstCameraControl *control, gboolean *lock);
++
++GST_VIDEO_API
++gboolean gst_camera_control_set_awb_lock (GstCameraControl *control, gboolean lock);
++
++GST_VIDEO_API
++gboolean gst_camera_control_get_awb_lock (GstCameraControl *control, gboolean *lock);
++
++GST_VIDEO_API
++gboolean gst_camera_control_set_user_buffer_fd (GstCameraControl *control, int *fds, int number);
++
++GST_VIDEO_API
++gboolean gst_camera_control_set_extra_preview_stream_format (GstCameraControl *control, int stream_id, GstCameraControlImageFormat img_fmt, int width, int height, int fps);
++
++GST_VIDEO_API
++gboolean gst_camera_control_get_extra_preview_stream_format (GstCameraControl *control, int stream_id, GstCameraControlImageFormat *img_fmt, int *width, int *height, int *fps);
++
++GST_VIDEO_API
++gboolean gst_camera_control_set_extra_preview_bitrate (GstCameraControl *control, int stream_id, int bitrate);
++
++GST_VIDEO_API
++gboolean gst_camera_control_get_extra_preview_bitrate (GstCameraControl *control, int stream_id, int *bitrate);
++
++GST_VIDEO_API
++gboolean gst_camera_control_set_extra_preview_gop_interval (GstCameraControl *control, int stream_id, int interval);
++
++GST_VIDEO_API
++gboolean gst_camera_control_get_extra_preview_gop_interval (GstCameraControl *control, int stream_id, int *interval);
++
++/* trigger signal */
++GST_VIDEO_API
++void gst_camera_control_value_changed (GstCameraControl *control, GstCameraControlChannel *control_channel, gint value);
++
++G_END_DECLS
++
++#endif /* __GST_CAMERA_CONTROL_H__ */
--- /dev/null
--- /dev/null
++/* GStreamer Camera Control Channel Interface
++ *
++ * Copyright (c) 2000 - 2012 Samsung Electronics Co., Ltd.
++ *
++ * Contact: Jeongmo Yang <jm80.yang@samsung.com>
++ *
++ * cameracontrolchannel.c: cameracontrol channel object design
++ *
++ * This library is free software; you can redistribute it and/or
++ * modify it under the terms of the GNU Library General Public
++ * License as published by the Free Software Foundation; either
++ * version 2 of the License, or (at your option) any later version.
++ *
++ * This library is distributed in the hope that it will be useful,
++ * but WITHOUT ANY WARRANTY; without even the implied warranty of
++ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
++ * Library General Public License for more details.
++ *
++ * You should have received a copy of the GNU Library General Public
++ * License along with this library; if not, write to the
++ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
++ * Boston, MA 02111-1307, USA.
++ */
++
++#ifdef HAVE_CONFIG_H
++#include "config.h"
++#endif
++
++#include "cameracontrolchannel.h"
++
++enum {
++ /* FILL ME */
++ SIGNAL_VALUE_CHANGED,
++ LAST_SIGNAL
++};
++
++static void gst_camera_control_channel_class_init(GstCameraControlChannelClass* klass);
++static void gst_camera_control_channel_init(GstCameraControlChannel* control_channel);
++static void gst_camera_control_channel_dispose(GObject* object);
++
++static GObjectClass *parent_class = NULL;
++static guint signals[LAST_SIGNAL] = { 0 };
++
++GType gst_camera_control_channel_get_type(void)
++{
++ static GType gst_camera_control_channel_type = 0;
++
++ if (!gst_camera_control_channel_type) {
++ static const GTypeInfo camera_control_channel_info = {
++ sizeof (GstCameraControlChannelClass),
++ NULL,
++ NULL,
++ (GClassInitFunc) gst_camera_control_channel_class_init,
++ NULL,
++ NULL,
++ sizeof (GstCameraControlChannel),
++ 0,
++ (GInstanceInitFunc) gst_camera_control_channel_init,
++ NULL
++ };
++
++ gst_camera_control_channel_type = \
++ g_type_register_static(G_TYPE_OBJECT,
++ "GstCameraControlChannel",
++ &camera_control_channel_info,
++ 0);
++ }
++
++ return gst_camera_control_channel_type;
++}
++
++static void gst_camera_control_channel_class_init(GstCameraControlChannelClass* klass)
++{
++ GObjectClass *object_klass = (GObjectClass*) klass;
++
++ parent_class = g_type_class_peek_parent (klass);
++
++ signals[SIGNAL_VALUE_CHANGED] = \
++ g_signal_new("control-value-changed",
++ G_TYPE_FROM_CLASS (klass),
++ G_SIGNAL_RUN_LAST,
++ G_STRUCT_OFFSET (GstCameraControlChannelClass, value_changed),
++ NULL,
++ NULL,
++ g_cclosure_marshal_VOID__INT,
++ G_TYPE_NONE,
++ 1,
++ G_TYPE_INT);
++
++ object_klass->dispose = gst_camera_control_channel_dispose;
++}
++
++static void gst_camera_control_channel_init(GstCameraControlChannel* control_channel)
++{
++ control_channel->label = NULL;
++ control_channel->min_value = control_channel->max_value = 0;
++}
++
++static void gst_camera_control_channel_dispose(GObject* object)
++{
++ GstCameraControlChannel *control_channel = GST_CAMERA_CONTROL_CHANNEL(object);
++
++ if (control_channel->label) {
++ g_free (control_channel->label);
++ }
++
++ control_channel->label = NULL;
++
++ if (parent_class->dispose) {
++ parent_class->dispose (object);
++ }
++}
--- /dev/null
--- /dev/null
++/* GStreamer Camera Control Channel Interface
++ *
++ * Copyright (c) 2000 - 2012 Samsung Electronics Co., Ltd.
++ *
++ * Contact: Jeongmo Yang <jm80.yang@samsung.com>
++ *
++ * cameracontrolchannel.h: individual channel object
++ *
++ * This library is free software; you can redistribute it and/or
++ * modify it under the terms of the GNU Library General Public
++ * License as published by the Free Software Foundation; either
++ * version 2 of the License, or (at your option) any later version.
++ *
++ * This library is distributed in the hope that it will be useful,
++ * but WITHOUT ANY WARRANTY; without even the implied warranty of
++ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
++ * Library General Public License for more details.
++ *
++ * You should have received a copy of the GNU Library General Public
++ * License along with this library; if not, write to the
++ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
++ * Boston, MA 02111-1307, USA.
++ */
++
++#ifndef __GST_CAMERA_CONTROL_CHANNEL_H__
++#define __GST_CAMERA_CONTROL_CHANNEL_H__
++
++#include <gst/gst.h>
++#include <gst/video/video-prelude.h>
++
++G_BEGIN_DECLS
++
++#define GST_TYPE_CAMERA_CONTROL_CHANNEL \
++ (gst_camera_control_channel_get_type ())
++#define GST_CAMERA_CONTROL_CHANNEL(obj) \
++ (G_TYPE_CHECK_INSTANCE_CAST ((obj), GST_TYPE_CAMERA_CONTROL_CHANNEL, \
++ GstCameraControlChannel))
++#define GST_CAMERA_CONTROL_CHANNEL_CLASS(klass) \
++ (G_TYPE_CHECK_CLASS_CAST ((klass), GST_TYPE_CAMERA_CONTROL_CHANNEL, \
++ GstCameraControlChannelClass))
++#define GST_IS_CAMERA_CONTROL_CHANNEL(obj) \
++ (G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_CAMERA_CONTROL_CHANNEL))
++#define GST_IS_CAMERA_CONTROL_CHANNEL_CLASS(klass) \
++ (G_TYPE_CHECK_CLASS_TYPE ((klass), GST_TYPE_CAMERA_CONTROL_CHANNEL))
++
++typedef struct _GstCameraControlChannel {
++ GObject parent;
++ gchar *label;
++ gint min_value;
++ gint max_value;
++} GstCameraControlChannel;
++
++typedef struct _GstCameraControlChannelClass {
++ GObjectClass parent;
++
++ /* signals */
++ void (*value_changed)(GstCameraControlChannel *control_channel, gint value);
++
++ gpointer _gst_reserved[GST_PADDING];
++} GstCameraControlChannelClass;
++
++GST_VIDEO_API
++GType gst_camera_control_channel_get_type(void);
++
++G_END_DECLS
++
++#endif /* __GST_CAMERA_CONTROL_CHANNEL_H__ */
--- /dev/null
+ /* GStreamer
+ * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
+ * Copyright (C) <2003> David Schleef <ds@schleef.org>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+ /**
+ * SECTION:gstvideofilter
+ * @title: GstVideoFilter
+ * @short_description: Base class for video filters
+ *
+ * Provides useful functions and a base class for video filters.
+ *
+ * The videofilter will by default enable QoS on the parent GstBaseTransform
+ * to implement frame dropping.
+ *
+ */
+
+ #ifdef HAVE_CONFIG_H
+ #include "config.h"
+ #endif
+
+ #include "gstvideofilter.h"
+
+ #include <gst/video/video.h>
+ #include <gst/video/gstvideometa.h>
+ #include <gst/video/gstvideopool.h>
+
+ GST_DEBUG_CATEGORY_STATIC (gst_video_filter_debug);
+ #define GST_CAT_DEFAULT gst_video_filter_debug
+
+ #define gst_video_filter_parent_class parent_class
+ G_DEFINE_ABSTRACT_TYPE (GstVideoFilter, gst_video_filter,
+ GST_TYPE_BASE_TRANSFORM);
+
+ /* cached quark to avoid contention on the global quark table lock */
+ #define META_TAG_VIDEO meta_tag_video_quark
+ static GQuark meta_tag_video_quark;
+
+ /* Answer the allocation query downstream. */
+ static gboolean
+ gst_video_filter_propose_allocation (GstBaseTransform * trans,
+ GstQuery * decide_query, GstQuery * query)
+ {
+ GstVideoFilter *filter = GST_VIDEO_FILTER_CAST (trans);
+ GstVideoInfo info;
+ GstBufferPool *pool;
+ GstCaps *caps;
+ guint size;
+
+ if (!GST_BASE_TRANSFORM_CLASS (parent_class)->propose_allocation (trans,
+ decide_query, query))
+ return FALSE;
+
+ /* passthrough, we're done */
+ if (decide_query == NULL)
+ return TRUE;
+
+ gst_query_parse_allocation (query, &caps, NULL);
+
+ if (caps == NULL)
+ return FALSE;
+
+ if (!gst_video_info_from_caps (&info, caps))
+ return FALSE;
+
+ size = GST_VIDEO_INFO_SIZE (&info);
+
+ if (gst_query_get_n_allocation_pools (query) == 0) {
+ GstStructure *structure;
+ GstAllocator *allocator = NULL;
+ GstAllocationParams params = { 0, 15, 0, 0, };
+
+ if (gst_query_get_n_allocation_params (query) > 0)
+ gst_query_parse_nth_allocation_param (query, 0, &allocator, ¶ms);
+ else
+ gst_query_add_allocation_param (query, allocator, ¶ms);
+
+ pool = gst_video_buffer_pool_new ();
+
+ structure = gst_buffer_pool_get_config (pool);
+ gst_buffer_pool_config_set_params (structure, caps, size, 0, 0);
+ gst_buffer_pool_config_set_allocator (structure, allocator, ¶ms);
+
+ if (allocator)
+ gst_object_unref (allocator);
+
+ if (!gst_buffer_pool_set_config (pool, structure))
+ goto config_failed;
+
+ gst_query_add_allocation_pool (query, pool, size, 0, 0);
+ gst_object_unref (pool);
+ gst_query_add_allocation_meta (query, GST_VIDEO_META_API_TYPE, NULL);
+ }
+
+ return TRUE;
+
+ /* ERRORS */
+ config_failed:
+ {
+ GST_ERROR_OBJECT (filter, "failed to set config");
+ gst_object_unref (pool);
+ return FALSE;
+ }
+ }
+
+ /* configure the allocation query that was answered downstream, we can configure
+ * some properties on it. Only called when not in passthrough mode. */
+ static gboolean
+ gst_video_filter_decide_allocation (GstBaseTransform * trans, GstQuery * query)
+ {
+ GstBufferPool *pool = NULL;
+ GstStructure *config;
+ guint min, max, size;
+ gboolean update_pool;
+ GstCaps *outcaps = NULL;
+
+ if (gst_query_get_n_allocation_pools (query) > 0) {
+ gst_query_parse_nth_allocation_pool (query, 0, &pool, &size, &min, &max);
+
+ if (!pool)
+ gst_query_parse_allocation (query, &outcaps, NULL);
+
+ update_pool = TRUE;
+ } else {
+ GstVideoInfo vinfo;
+
+ gst_query_parse_allocation (query, &outcaps, NULL);
+ gst_video_info_init (&vinfo);
+ gst_video_info_from_caps (&vinfo, outcaps);
+ size = vinfo.size;
+ min = max = 0;
+ update_pool = FALSE;
+ }
+
+ if (!pool)
+ pool = gst_video_buffer_pool_new ();
+
+ config = gst_buffer_pool_get_config (pool);
+ gst_buffer_pool_config_add_option (config, GST_BUFFER_POOL_OPTION_VIDEO_META);
+ if (outcaps)
+ gst_buffer_pool_config_set_params (config, outcaps, size, 0, 0);
+ gst_buffer_pool_set_config (pool, config);
+
+ if (update_pool)
+ gst_query_set_nth_allocation_pool (query, 0, pool, size, min, max);
+ else
+ gst_query_add_allocation_pool (query, pool, size, min, max);
+
+ gst_object_unref (pool);
+
+ return GST_BASE_TRANSFORM_CLASS (parent_class)->decide_allocation (trans,
+ query);
+ }
+
+
+ /* our output size only depends on the caps, not on the input caps */
+ static gboolean
+ gst_video_filter_transform_size (GstBaseTransform * btrans,
+ GstPadDirection direction, GstCaps * caps, gsize size,
+ GstCaps * othercaps, gsize * othersize)
+ {
+ gboolean ret = TRUE;
+ GstVideoInfo info;
+
+ g_assert (size);
+
+ ret = gst_video_info_from_caps (&info, othercaps);
+ if (ret)
+ *othersize = info.size;
+
+ return ret;
+ }
+
+ static gboolean
+ gst_video_filter_get_unit_size (GstBaseTransform * btrans, GstCaps * caps,
+ gsize * size)
+ {
+ GstVideoInfo info;
+
+ if (!gst_video_info_from_caps (&info, caps)) {
+ GST_WARNING_OBJECT (btrans, "Failed to parse caps %" GST_PTR_FORMAT, caps);
+ return FALSE;
+ }
+
+ *size = info.size;
+
+ GST_DEBUG_OBJECT (btrans, "Returning size %" G_GSIZE_FORMAT " bytes"
+ "for caps %" GST_PTR_FORMAT, *size, caps);
+
+ return TRUE;
+ }
+
+ static gboolean
+ gst_video_filter_set_caps (GstBaseTransform * trans, GstCaps * incaps,
+ GstCaps * outcaps)
+ {
+ GstVideoFilter *filter = GST_VIDEO_FILTER_CAST (trans);
+ GstVideoFilterClass *fclass;
+ GstVideoInfo in_info, out_info;
+ gboolean res;
+
+ /* input caps */
+ if (!gst_video_info_from_caps (&in_info, incaps))
+ goto invalid_caps;
+
+ /* output caps */
+ if (!gst_video_info_from_caps (&out_info, outcaps))
+ goto invalid_caps;
+
+ fclass = GST_VIDEO_FILTER_GET_CLASS (filter);
+ if (fclass->set_info)
+ res = fclass->set_info (filter, incaps, &in_info, outcaps, &out_info);
+ else
+ res = TRUE;
+
+ if (res) {
+ filter->in_info = in_info;
+ filter->out_info = out_info;
+ if (fclass->transform_frame == NULL)
+ gst_base_transform_set_in_place (trans, TRUE);
+ if (fclass->transform_frame_ip == NULL)
+ GST_BASE_TRANSFORM_CLASS (fclass)->transform_ip_on_passthrough = FALSE;
+ }
+ filter->negotiated = res;
+
+ return res;
+
+ /* ERRORS */
+ invalid_caps:
+ {
+ GST_ERROR_OBJECT (filter, "invalid caps");
+ filter->negotiated = FALSE;
+ return FALSE;
+ }
+ }
+
+ static GstFlowReturn
+ gst_video_filter_transform (GstBaseTransform * trans, GstBuffer * inbuf,
+ GstBuffer * outbuf)
+ {
+ GstFlowReturn res;
+ GstVideoFilter *filter = GST_VIDEO_FILTER_CAST (trans);
+ GstVideoFilterClass *fclass;
++#ifdef USE_TBM
++ GstMapFlags map_flags;
++#endif
+
+ if (G_UNLIKELY (!filter->negotiated))
+ goto unknown_format;
+
+ fclass = GST_VIDEO_FILTER_GET_CLASS (filter);
+ if (fclass->transform_frame) {
+ GstVideoFrame in_frame, out_frame;
+
++#ifdef USE_TBM
++ map_flags = GST_MAP_READ;
++ if (filter->in_info.finfo->format != GST_VIDEO_FORMAT_SN12 &&
++ filter->in_info.finfo->format != GST_VIDEO_FORMAT_SR32)
++ map_flags |= GST_VIDEO_FRAME_MAP_FLAG_NO_REF;
++
++ GST_LOG_OBJECT (trans, "IN format %d, flags 0x%x",
++ filter->in_info.finfo->format, map_flags);
++
++ if (!gst_video_frame_map (&in_frame, &filter->in_info, inbuf, map_flags))
++ goto invalid_buffer;
++
++ map_flags = GST_MAP_WRITE;
++ if (filter->out_info.finfo->format != GST_VIDEO_FORMAT_SN12 &&
++ filter->out_info.finfo->format != GST_VIDEO_FORMAT_SR32)
++ map_flags |= GST_VIDEO_FRAME_MAP_FLAG_NO_REF;
++
++ GST_LOG_OBJECT (trans, "OUT format %d, flags 0x%x",
++ filter->out_info.finfo->format, map_flags);
++
++ if (!gst_video_frame_map (&out_frame, &filter->out_info, outbuf, map_flags)) {
++ gst_video_frame_unmap (&in_frame);
++ goto invalid_buffer;
++ }
++#else
+ if (!gst_video_frame_map (&in_frame, &filter->in_info, inbuf,
+ GST_MAP_READ | GST_VIDEO_FRAME_MAP_FLAG_NO_REF))
+ goto invalid_buffer;
+
+ if (!gst_video_frame_map (&out_frame, &filter->out_info, outbuf,
+ GST_MAP_WRITE | GST_VIDEO_FRAME_MAP_FLAG_NO_REF)) {
+ gst_video_frame_unmap (&in_frame);
+ goto invalid_buffer;
+ }
++#endif
+ res = fclass->transform_frame (filter, &in_frame, &out_frame);
+
+ gst_video_frame_unmap (&out_frame);
+ gst_video_frame_unmap (&in_frame);
+ } else {
+ GST_DEBUG_OBJECT (trans, "no transform_frame vmethod");
+ res = GST_FLOW_OK;
+ }
+
+ return res;
+
+ /* ERRORS */
+ unknown_format:
+ {
+ GST_ELEMENT_ERROR (filter, CORE, NOT_IMPLEMENTED, (NULL),
+ ("unknown format"));
+ return GST_FLOW_NOT_NEGOTIATED;
+ }
+ invalid_buffer:
+ {
+ GST_ELEMENT_WARNING (filter, CORE, NOT_IMPLEMENTED, (NULL),
+ ("invalid video buffer received"));
+ return GST_FLOW_OK;
+ }
+ }
+
+ static GstFlowReturn
+ gst_video_filter_transform_ip (GstBaseTransform * trans, GstBuffer * buf)
+ {
+ GstFlowReturn res;
+ GstVideoFilter *filter = GST_VIDEO_FILTER_CAST (trans);
+ GstVideoFilterClass *fclass;
+
+ if (G_UNLIKELY (!filter->negotiated))
+ goto unknown_format;
+
+ fclass = GST_VIDEO_FILTER_GET_CLASS (filter);
+ if (fclass->transform_frame_ip) {
+ GstVideoFrame frame;
+ GstMapFlags flags;
+
+ flags = GST_MAP_READ | GST_VIDEO_FRAME_MAP_FLAG_NO_REF;
+
+ if (!gst_base_transform_is_passthrough (trans))
+ flags |= GST_MAP_WRITE;
+
+ if (!gst_video_frame_map (&frame, &filter->in_info, buf, flags))
+ goto invalid_buffer;
+
+ res = fclass->transform_frame_ip (filter, &frame);
+
+ gst_video_frame_unmap (&frame);
+ } else {
+ GST_DEBUG_OBJECT (trans, "no transform_frame_ip vmethod");
+ res = GST_FLOW_OK;
+ }
+
+ return res;
+
+ /* ERRORS */
+ unknown_format:
+ {
+ GST_ELEMENT_ERROR (filter, CORE, NOT_IMPLEMENTED, (NULL),
+ ("unknown format"));
+ return GST_FLOW_NOT_NEGOTIATED;
+ }
+ invalid_buffer:
+ {
+ GST_ELEMENT_WARNING (filter, CORE, NOT_IMPLEMENTED, (NULL),
+ ("invalid video buffer received"));
+ return GST_FLOW_OK;
+ }
+ }
+
+ static gboolean
+ gst_video_filter_transform_meta (GstBaseTransform * trans, GstBuffer * outbuf,
+ GstMeta * meta, GstBuffer * inbuf)
+ {
+ const GstMetaInfo *info = meta->info;
+ const gchar *const *tags;
+
+ tags = gst_meta_api_type_get_tags (info->api);
+
+ if (!tags || (g_strv_length ((gchar **) tags) == 1
+ && gst_meta_api_type_has_tag (info->api, META_TAG_VIDEO)))
+ return TRUE;
+
+ return GST_BASE_TRANSFORM_CLASS (parent_class)->transform_meta (trans, outbuf,
+ meta, inbuf);
+ }
+
+ static void
+ gst_video_filter_class_init (GstVideoFilterClass * g_class)
+ {
+ GstBaseTransformClass *trans_class;
+ GstVideoFilterClass *klass;
+
+ klass = (GstVideoFilterClass *) g_class;
+ trans_class = (GstBaseTransformClass *) klass;
+
+ trans_class->set_caps = GST_DEBUG_FUNCPTR (gst_video_filter_set_caps);
+ trans_class->propose_allocation =
+ GST_DEBUG_FUNCPTR (gst_video_filter_propose_allocation);
+ trans_class->decide_allocation =
+ GST_DEBUG_FUNCPTR (gst_video_filter_decide_allocation);
+ trans_class->transform_size =
+ GST_DEBUG_FUNCPTR (gst_video_filter_transform_size);
+ trans_class->get_unit_size =
+ GST_DEBUG_FUNCPTR (gst_video_filter_get_unit_size);
+ trans_class->transform = GST_DEBUG_FUNCPTR (gst_video_filter_transform);
+ trans_class->transform_ip = GST_DEBUG_FUNCPTR (gst_video_filter_transform_ip);
+ trans_class->transform_meta =
+ GST_DEBUG_FUNCPTR (gst_video_filter_transform_meta);
+
+ GST_DEBUG_CATEGORY_INIT (gst_video_filter_debug, "videofilter", 0,
+ "videofilter");
+
+ meta_tag_video_quark = g_quark_from_static_string (GST_META_TAG_VIDEO_STR);
+ }
+
+ static void
+ gst_video_filter_init (GstVideoFilter * instance)
+ {
+ GstVideoFilter *videofilter = GST_VIDEO_FILTER (instance);
+
+ GST_DEBUG_OBJECT (videofilter, "gst_video_filter_init");
+
+ videofilter->negotiated = FALSE;
+ /* enable QoS */
+ gst_base_transform_set_qos_enabled (GST_BASE_TRANSFORM (videofilter), TRUE);
+ }
--- /dev/null
+ /* GStreamer
+ * Copyright (C) 2010 David Schleef <ds@schleef.org>
+ * Copyright (C) 2010 Sebastian Dröge <sebastian.droege@collabora.co.uk>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+ #ifdef HAVE_CONFIG_H
+ #include "config.h"
+ #endif
+
+ #if 0
+ #ifdef HAVE_PTHREAD
+ #define _GNU_SOURCE
+ #include <pthread.h>
+ #endif
+ #endif
+
+ #include "video-converter.h"
+
+ #include <glib.h>
+ #include <string.h>
+ #include <math.h>
++#ifdef USE_TBM
++#include <gst/allocators/gsttizenmemory.h>
++#endif
+ #include <gst/base/base.h>
+
+ #include "video-orc.h"
+
+ /**
+ * SECTION:videoconverter
+ * @title: GstVideoConverter
+ * @short_description: Generic video conversion
+ *
+ * This object is used to convert video frames from one format to another.
+ * The object can perform conversion of:
+ *
+ * * video format
+ * * video colorspace
+ * * chroma-siting
+ * * video size
+ *
+ */
+
+ /*
+ * (a) unpack
+ * (b) chroma upsample
+ * (c) (convert Y'CbCr to R'G'B')
+ * (d) gamma decode
+ * (e) downscale
+ * (f) colorspace convert through XYZ
+ * (g) upscale
+ * (h) gamma encode
+ * (i) (convert R'G'B' to Y'CbCr)
+ * (j) chroma downsample
+ * (k) pack
+ *
+ * quality options
+ *
+ * (a) range truncate, range expand
+ * (b) full upsample, 1-1 non-cosited upsample, no upsample
+ * (c) 8 bits, 16 bits
+ * (d)
+ * (e) 8 bits, 16 bits
+ * (f) 8 bits, 16 bits
+ * (g) 8 bits, 16 bits
+ * (h)
+ * (i) 8 bits, 16 bits
+ * (j) 1-1 cosited downsample, no downsample
+ * (k)
+ *
+ *
+ * 1 : a -> -> -> -> e -> f -> g -> -> -> -> k
+ * 2 : a -> -> -> -> e -> f* -> g -> -> -> -> k
+ * 3 : a -> -> -> -> e* -> f* -> g* -> -> -> -> k
+ * 4 : a -> b -> -> -> e -> f -> g -> -> -> j -> k
+ * 5 : a -> b -> -> -> e* -> f* -> g* -> -> -> j -> k
+ * 6 : a -> b -> c -> d -> e -> f -> g -> h -> i -> j -> k
+ * 7 : a -> b -> c -> d -> e* -> f* -> g* -> h -> i -> j -> k
+ *
+ * 8 : a -> b -> c -> d -> e* -> f* -> g* -> h -> i -> j -> k
+ * 9 : a -> b -> c -> d -> e* -> f* -> g* -> h -> i -> j -> k
+ * 10 : a -> b -> c -> d -> e* -> f* -> g* -> h -> i -> j -> k
+ */
+
+ #ifndef GST_DISABLE_GST_DEBUG
+ #define GST_CAT_DEFAULT ensure_debug_category()
+ static GstDebugCategory *
+ ensure_debug_category (void)
+ {
+ static gsize cat_gonce = 0;
+
+ if (g_once_init_enter (&cat_gonce)) {
+ gsize cat_done;
+
+ cat_done = (gsize) _gst_debug_category_new ("video-converter", 0,
+ "video-converter object");
+
+ g_once_init_leave (&cat_gonce, cat_done);
+ }
+
+ return (GstDebugCategory *) cat_gonce;
+ }
+ #else
+ #define ensure_debug_category() /* NOOP */
+ #endif /* GST_DISABLE_GST_DEBUG */
+
+ typedef void (*GstParallelizedTaskFunc) (gpointer user_data);
+
+ typedef struct _GstParallelizedTaskRunner GstParallelizedTaskRunner;
+ typedef struct _GstParallelizedWorkItem GstParallelizedWorkItem;
+
+ struct _GstParallelizedWorkItem
+ {
+ GstParallelizedTaskRunner *self;
+ GstParallelizedTaskFunc func;
+ gpointer user_data;
+ };
+
+ struct _GstParallelizedTaskRunner
+ {
+ GstTaskPool *pool;
+ gboolean own_pool;
+ guint n_threads;
+
+ GstQueueArray *tasks;
+ GstQueueArray *work_items;
+
+ GMutex lock;
+
+ gboolean async_tasks;
+ };
+
+ static void
+ gst_parallelized_task_thread_func (gpointer data)
+ {
+ GstParallelizedTaskRunner *runner = data;
+ GstParallelizedWorkItem *work_item;
+
+ g_mutex_lock (&runner->lock);
+ work_item = gst_queue_array_pop_head (runner->work_items);
+ g_mutex_unlock (&runner->lock);
+
+ g_assert (work_item != NULL);
+ g_assert (work_item->func != NULL);
+
+
+ work_item->func (work_item->user_data);
+ if (runner->async_tasks)
+ g_free (work_item);
+ }
+
+ static void
+ gst_parallelized_task_runner_join (GstParallelizedTaskRunner * self)
+ {
+ gboolean joined = FALSE;
+
+ while (!joined) {
+ g_mutex_lock (&self->lock);
+ if (!(joined = gst_queue_array_is_empty (self->tasks))) {
+ gpointer task = gst_queue_array_pop_head (self->tasks);
+ g_mutex_unlock (&self->lock);
+ gst_task_pool_join (self->pool, task);
+ } else {
+ g_mutex_unlock (&self->lock);
+ }
+ }
+ }
+
+ static void
+ gst_parallelized_task_runner_free (GstParallelizedTaskRunner * self)
+ {
+ gst_parallelized_task_runner_join (self);
+
+ gst_queue_array_free (self->work_items);
+ gst_queue_array_free (self->tasks);
+ if (self->own_pool)
+ gst_task_pool_cleanup (self->pool);
+ gst_object_unref (self->pool);
+ g_mutex_clear (&self->lock);
+ g_free (self);
+ }
+
+ static GstParallelizedTaskRunner *
+ gst_parallelized_task_runner_new (guint n_threads, GstTaskPool * pool,
+ gboolean async_tasks)
+ {
+ GstParallelizedTaskRunner *self;
+
+ if (n_threads == 0)
+ n_threads = g_get_num_processors ();
+
+ self = g_new0 (GstParallelizedTaskRunner, 1);
+
+ if (pool) {
+ self->pool = g_object_ref (pool);
+ self->own_pool = FALSE;
+
+ /* No reason to split up the work between more threads than the
+ * pool can spawn */
+ if (GST_IS_SHARED_TASK_POOL (pool))
+ n_threads =
+ MIN (n_threads,
+ gst_shared_task_pool_get_max_threads (GST_SHARED_TASK_POOL (pool)));
+ } else {
+ self->pool = gst_shared_task_pool_new ();
+ self->own_pool = TRUE;
+ gst_shared_task_pool_set_max_threads (GST_SHARED_TASK_POOL (self->pool),
+ n_threads);
+ gst_task_pool_prepare (self->pool, NULL);
+ }
+
+ self->tasks = gst_queue_array_new (n_threads);
+ self->work_items = gst_queue_array_new (n_threads);
+
+ self->n_threads = n_threads;
+
+ g_mutex_init (&self->lock);
+
+ /* Set when scheduling a job */
+ self->async_tasks = async_tasks;
+
+ return self;
+ }
+
+ static void
+ gst_parallelized_task_runner_finish (GstParallelizedTaskRunner * self)
+ {
+ gst_parallelized_task_runner_join (self);
+ }
+
+ static void
+ gst_parallelized_task_runner_run (GstParallelizedTaskRunner * self,
+ GstParallelizedTaskFunc func, gpointer * task_data)
+ {
+ guint n_threads = self->n_threads;
+
+ if (n_threads > 1 || self->async_tasks) {
+ guint i = 0;
+ g_mutex_lock (&self->lock);
+ if (!self->async_tasks) {
+ /* if not async, perform one of the functions in the current thread */
+ i = 1;
+ }
+ for (; i < n_threads; i++) {
+ gpointer task;
+ GstParallelizedWorkItem *work_item;
+
+ if (!self->async_tasks)
+ work_item = g_newa (GstParallelizedWorkItem, 1);
+ else
+ work_item = g_new0 (GstParallelizedWorkItem, 1);
+
+ work_item->self = self;
+ work_item->func = func;
+ work_item->user_data = task_data[i];
+ gst_queue_array_push_tail (self->work_items, work_item);
+
+ task =
+ gst_task_pool_push (self->pool, gst_parallelized_task_thread_func,
+ self, NULL);
+
+ /* The return value of push() is unfortunately nullable, and we can't deal with that */
+ g_assert (task != NULL);
+ gst_queue_array_push_tail (self->tasks, task);
+ }
+ g_mutex_unlock (&self->lock);
+ }
+
+ if (!self->async_tasks) {
+ func (task_data[0]);
+
+ gst_parallelized_task_runner_finish (self);
+ }
+ }
+
+ typedef struct _GstLineCache GstLineCache;
+
+ #define SCALE (8)
+ #define SCALE_F ((float) (1 << SCALE))
+
+ typedef struct _MatrixData MatrixData;
+
+ struct _MatrixData
+ {
+ gdouble dm[4][4];
+ gint im[4][4];
+ gint width;
+ guint64 orc_p1;
+ guint64 orc_p2;
+ guint64 orc_p3;
+ guint64 orc_p4;
+ gint64 *t_r;
+ gint64 *t_g;
+ gint64 *t_b;
+ gint64 t_c;
+ void (*matrix_func) (MatrixData * data, gpointer pixels);
+ };
+
+ typedef struct _GammaData GammaData;
+
+ struct _GammaData
+ {
+ gpointer gamma_table;
+ gint width;
+ void (*gamma_func) (GammaData * data, gpointer dest, gpointer src);
+ };
+
+ typedef enum
+ {
+ ALPHA_MODE_NONE = 0,
+ ALPHA_MODE_COPY = (1 << 0),
+ ALPHA_MODE_SET = (1 << 1),
+ ALPHA_MODE_MULT = (1 << 2)
+ } AlphaMode;
+
+ typedef struct
+ {
+ guint8 *data;
+ guint stride;
+ guint n_lines;
+ guint idx;
+ gpointer user_data;
+ GDestroyNotify notify;
+ } ConverterAlloc;
+
+ typedef void (*FastConvertFunc) (GstVideoConverter * convert,
+ const GstVideoFrame * src, GstVideoFrame * dest, gint plane);
+
+ struct _GstVideoConverter
+ {
+ gint flags;
+
+ GstVideoInfo in_info;
+ GstVideoInfo out_info;
+
+ gint in_x;
+ gint in_y;
+ gint in_width;
+ gint in_height;
+ gint in_maxwidth;
+ gint in_maxheight;
+ gint out_x;
+ gint out_y;
+ gint out_width;
+ gint out_height;
+ gint out_maxwidth;
+ gint out_maxheight;
+
+ gint current_pstride;
+ gint current_width;
+ gint current_height;
+ GstVideoFormat current_format;
+ gint current_bits;
+
+ GstStructure *config;
+
+ GstParallelizedTaskRunner *conversion_runner;
+
+ guint16 **tmpline;
+
+ gboolean fill_border;
+ gpointer borderline;
+ guint64 borders[4];
+ guint32 border_argb;
+ guint32 alpha_value;
+ AlphaMode alpha_mode;
+
+ void (*convert) (GstVideoConverter * convert, const GstVideoFrame * src,
+ GstVideoFrame * dest);
+
+ /* data for unpack */
+ GstLineCache **unpack_lines;
+ GstVideoFormat unpack_format;
+ guint unpack_bits;
+ gboolean unpack_rgb;
+ gboolean identity_unpack;
+ gint unpack_pstride;
+
+ /* chroma upsample */
+ GstLineCache **upsample_lines;
+ GstVideoChromaResample **upsample;
+ GstVideoChromaResample **upsample_p;
+ GstVideoChromaResample **upsample_i;
+ guint up_n_lines;
+ gint up_offset;
+
+ /* to R'G'B */
+ GstLineCache **to_RGB_lines;
+ MatrixData to_RGB_matrix;
+ /* gamma decode */
+ GammaData gamma_dec;
+
+ /* scaling */
+ GstLineCache **hscale_lines;
+ GstVideoScaler **h_scaler;
+ gint h_scale_format;
+ GstLineCache **vscale_lines;
+ GstVideoScaler **v_scaler;
+ GstVideoScaler **v_scaler_p;
+ GstVideoScaler **v_scaler_i;
+ gint v_scale_width;
+ gint v_scale_format;
+
+ /* color space conversion */
+ GstLineCache **convert_lines;
+ MatrixData convert_matrix;
+ gint in_bits;
+ gint out_bits;
+
+ /* alpha correction */
+ GstLineCache **alpha_lines;
+ void (*alpha_func) (GstVideoConverter * convert, gpointer pixels, gint width);
+
+ /* gamma encode */
+ GammaData gamma_enc;
+ /* to Y'CbCr */
+ GstLineCache **to_YUV_lines;
+ MatrixData to_YUV_matrix;
+
+ /* chroma downsample */
+ GstLineCache **downsample_lines;
+ GstVideoChromaResample **downsample;
+ GstVideoChromaResample **downsample_p;
+ GstVideoChromaResample **downsample_i;
+ guint down_n_lines;
+ gint down_offset;
+
+ /* dither */
+ GstLineCache **dither_lines;
+ GstVideoDither **dither;
+
+ /* pack */
+ GstLineCache **pack_lines;
+ guint pack_nlines;
+ GstVideoFormat pack_format;
+ guint pack_bits;
+ gboolean pack_rgb;
+ gboolean identity_pack;
+ gint pack_pstride;
+ gconstpointer pack_pal;
+ gsize pack_palsize;
+
+ const GstVideoFrame *src;
+ GstVideoFrame *dest;
+
+ /* fastpath */
+ GstVideoFormat fformat[4];
+ gint fin_x[4];
+ gint fin_y[4];
+ gint fout_x[4];
+ gint fout_y[4];
+ gint fout_width[4];
+ gint fout_height[4];
+ gint fsplane[4];
+ gint ffill[4];
+
+ struct
+ {
+ GstVideoScaler **scaler;
+ } fh_scaler[4];
+ struct
+ {
+ GstVideoScaler **scaler;
+ } fv_scaler[4];
+ FastConvertFunc fconvert[4];
+
+ /* for parallel async running */
+ gpointer tasks[4];
+ gpointer tasks_p[4];
+ };
+
+ typedef gpointer (*GstLineCacheAllocLineFunc) (GstLineCache * cache, gint idx,
+ gpointer user_data);
+ typedef gboolean (*GstLineCacheNeedLineFunc) (GstLineCache * cache, gint idx,
+ gint out_line, gint in_line, gpointer user_data);
+
+ struct _GstLineCache
+ {
+ gint first;
+ gint backlog;
+ GPtrArray *lines;
+
+ GstLineCache *prev;
+ gboolean write_input;
+ gboolean pass_alloc;
+ gboolean alloc_writable;
+
+ GstLineCacheNeedLineFunc need_line;
+ gint need_line_idx;
+ gpointer need_line_data;
+ GDestroyNotify need_line_notify;
+
+ guint n_lines;
+ guint stride;
+ GstLineCacheAllocLineFunc alloc_line;
+ gpointer alloc_line_data;
+ GDestroyNotify alloc_line_notify;
+ };
+
+ static GstLineCache *
+ gst_line_cache_new (GstLineCache * prev)
+ {
+ GstLineCache *result;
+
+ result = g_slice_new0 (GstLineCache);
+ result->lines = g_ptr_array_new ();
+ result->prev = prev;
+
+ return result;
+ }
+
+ static void
+ gst_line_cache_clear (GstLineCache * cache)
+ {
+ g_return_if_fail (cache != NULL);
+
+ g_ptr_array_set_size (cache->lines, 0);
+ cache->first = 0;
+ }
+
+ static void
+ gst_line_cache_free (GstLineCache * cache)
+ {
+ if (cache->need_line_notify)
+ cache->need_line_notify (cache->need_line_data);
+ if (cache->alloc_line_notify)
+ cache->alloc_line_notify (cache->alloc_line_data);
+ gst_line_cache_clear (cache);
+ g_ptr_array_unref (cache->lines);
+ g_slice_free (GstLineCache, cache);
+ }
+
+ static void
+ gst_line_cache_set_need_line_func (GstLineCache * cache,
+ GstLineCacheNeedLineFunc need_line, gint idx, gpointer user_data,
+ GDestroyNotify notify)
+ {
+ cache->need_line = need_line;
+ cache->need_line_idx = idx;
+ cache->need_line_data = user_data;
+ cache->need_line_notify = notify;
+ }
+
+ static void
+ gst_line_cache_set_alloc_line_func (GstLineCache * cache,
+ GstLineCacheAllocLineFunc alloc_line, gpointer user_data,
+ GDestroyNotify notify)
+ {
+ cache->alloc_line = alloc_line;
+ cache->alloc_line_data = user_data;
+ cache->alloc_line_notify = notify;
+ }
+
+ /* keep this much backlog for interlaced video */
+ #define BACKLOG 2
+
+ static gpointer *
+ gst_line_cache_get_lines (GstLineCache * cache, gint idx, gint out_line,
+ gint in_line, gint n_lines)
+ {
+ if (cache->first + cache->backlog < in_line) {
+ gint to_remove =
+ MIN (in_line - (cache->first + cache->backlog), cache->lines->len);
+ if (to_remove > 0) {
+ g_ptr_array_remove_range (cache->lines, 0, to_remove);
+ }
+ cache->first += to_remove;
+ } else if (in_line < cache->first) {
+ gst_line_cache_clear (cache);
+ cache->first = in_line;
+ }
+
+ while (TRUE) {
+ gint oline;
+
+ if (cache->first <= in_line
+ && in_line + n_lines <= cache->first + (gint) cache->lines->len) {
+ return cache->lines->pdata + (in_line - cache->first);
+ }
+
+ if (cache->need_line == NULL)
+ break;
+
+ /* We may be able to skip ahead to the earliest line needed */
+ if (cache->lines->len == 0 && cache->first + cache->backlog < in_line)
+ cache->first = in_line - cache->backlog;
+
+ oline = out_line + cache->first + cache->lines->len - in_line;
+
+ if (!cache->need_line (cache, idx, oline, cache->first + cache->lines->len,
+ cache->need_line_data))
+ break;
+ }
+ GST_DEBUG ("no lines");
+ return NULL;
+ }
+
+ static void
+ gst_line_cache_add_line (GstLineCache * cache, gint idx, gpointer line)
+ {
+ if (cache->first + cache->lines->len != idx) {
+ gst_line_cache_clear (cache);
+ cache->first = idx;
+ }
+ g_ptr_array_add (cache->lines, line);
+ }
+
+ static gpointer
+ gst_line_cache_alloc_line (GstLineCache * cache, gint idx)
+ {
+ gpointer res;
+
+ if (cache->alloc_line)
+ res = cache->alloc_line (cache, idx, cache->alloc_line_data);
+ else
+ res = NULL;
+
+ return res;
+ }
+
+ static void video_converter_generic (GstVideoConverter * convert,
+ const GstVideoFrame * src, GstVideoFrame * dest);
+ static gboolean video_converter_lookup_fastpath (GstVideoConverter * convert);
+ static void video_converter_compute_matrix (GstVideoConverter * convert);
+ static void video_converter_compute_resample (GstVideoConverter * convert,
+ gint idx);
+
+ static gpointer get_dest_line (GstLineCache * cache, gint idx,
+ gpointer user_data);
+
+ static gboolean do_unpack_lines (GstLineCache * cache, gint idx, gint out_line,
+ gint in_line, gpointer user_data);
+ static gboolean do_downsample_lines (GstLineCache * cache, gint idx,
+ gint out_line, gint in_line, gpointer user_data);
+ static gboolean do_convert_to_RGB_lines (GstLineCache * cache, gint idx,
+ gint out_line, gint in_line, gpointer user_data);
+ static gboolean do_convert_lines (GstLineCache * cache, gint idx, gint out_line,
+ gint in_line, gpointer user_data);
+ static gboolean do_alpha_lines (GstLineCache * cache, gint idx, gint out_line,
+ gint in_line, gpointer user_data);
+ static gboolean do_convert_to_YUV_lines (GstLineCache * cache, gint idx,
+ gint out_line, gint in_line, gpointer user_data);
+ static gboolean do_upsample_lines (GstLineCache * cache, gint idx,
+ gint out_line, gint in_line, gpointer user_data);
+ static gboolean do_vscale_lines (GstLineCache * cache, gint idx, gint out_line,
+ gint in_line, gpointer user_data);
+ static gboolean do_hscale_lines (GstLineCache * cache, gint idx, gint out_line,
+ gint in_line, gpointer user_data);
+ static gboolean do_dither_lines (GstLineCache * cache, gint idx, gint out_line,
+ gint in_line, gpointer user_data);
+
+ static ConverterAlloc *
+ converter_alloc_new (guint stride, guint n_lines, gpointer user_data,
+ GDestroyNotify notify)
+ {
+ ConverterAlloc *alloc;
+
+ GST_DEBUG ("stride %d, n_lines %d", stride, n_lines);
+ alloc = g_slice_new0 (ConverterAlloc);
+ alloc->data = g_malloc (stride * n_lines);
+ alloc->stride = stride;
+ alloc->n_lines = n_lines;
+ alloc->idx = 0;
+ alloc->user_data = user_data;
+ alloc->notify = notify;
+
+ return alloc;
+ }
+
+ static void
+ converter_alloc_free (ConverterAlloc * alloc)
+ {
+ if (alloc->notify)
+ alloc->notify (alloc->user_data);
+ g_free (alloc->data);
+ g_slice_free (ConverterAlloc, alloc);
+ }
+
+ static void
+ setup_border_alloc (GstVideoConverter * convert, ConverterAlloc * alloc)
+ {
+ gint i;
+
+ if (convert->borderline) {
+ for (i = 0; i < alloc->n_lines; i++)
+ memcpy (&alloc->data[i * alloc->stride], convert->borderline,
+ alloc->stride);
+ }
+ }
+
+ static gpointer
+ get_temp_line (GstLineCache * cache, gint idx, gpointer user_data)
+ {
+ ConverterAlloc *alloc = user_data;
+ gpointer tmpline;
+
+ GST_DEBUG ("get temp line %d (%p %d)", idx, alloc, alloc->idx);
+ tmpline = &alloc->data[alloc->stride * alloc->idx];
+ alloc->idx = (alloc->idx + 1) % alloc->n_lines;
+
+ return tmpline;
+ }
+
+ static gpointer
+ get_border_temp_line (GstLineCache * cache, gint idx, gpointer user_data)
+ {
+ ConverterAlloc *alloc = user_data;
+ GstVideoConverter *convert = alloc->user_data;
+ gpointer tmpline;
+
+ GST_DEBUG ("get temp line %d (%p %d)", idx, alloc, alloc->idx);
+ tmpline = &alloc->data[alloc->stride * alloc->idx] +
+ (convert->out_x * convert->pack_pstride);
+ alloc->idx = (alloc->idx + 1) % alloc->n_lines;
+
+ return tmpline;
+ }
+
+ static gint
+ get_opt_int (GstVideoConverter * convert, const gchar * opt, gint def)
+ {
+ gint res;
+ if (!gst_structure_get_int (convert->config, opt, &res))
+ res = def;
+ return res;
+ }
+
+ static guint
+ get_opt_uint (GstVideoConverter * convert, const gchar * opt, guint def)
+ {
+ guint res;
+ if (!gst_structure_get_uint (convert->config, opt, &res))
+ res = def;
+ return res;
+ }
+
+ static gdouble
+ get_opt_double (GstVideoConverter * convert, const gchar * opt, gdouble def)
+ {
+ gdouble res;
+ if (!gst_structure_get_double (convert->config, opt, &res))
+ res = def;
+ return res;
+ }
+
+ static gboolean
+ get_opt_bool (GstVideoConverter * convert, const gchar * opt, gboolean def)
+ {
+ gboolean res;
+ if (!gst_structure_get_boolean (convert->config, opt, &res))
+ res = def;
+ return res;
+ }
+
+ static gint
+ get_opt_enum (GstVideoConverter * convert, const gchar * opt, GType type,
+ gint def)
+ {
+ gint res;
+ if (!gst_structure_get_enum (convert->config, opt, type, &res))
+ res = def;
+ return res;
+ }
+
+ #define DEFAULT_OPT_FILL_BORDER TRUE
+ #define DEFAULT_OPT_ALPHA_VALUE 1.0
+ /* options copy, set, mult */
+ #define DEFAULT_OPT_ALPHA_MODE GST_VIDEO_ALPHA_MODE_COPY
+ #define DEFAULT_OPT_BORDER_ARGB 0xff000000
+ /* options full, input-only, output-only, none */
+ #define DEFAULT_OPT_MATRIX_MODE GST_VIDEO_MATRIX_MODE_FULL
+ /* none, remap */
+ #define DEFAULT_OPT_GAMMA_MODE GST_VIDEO_GAMMA_MODE_NONE
+ /* none, merge-only, fast */
+ #define DEFAULT_OPT_PRIMARIES_MODE GST_VIDEO_PRIMARIES_MODE_NONE
+ /* options full, upsample-only, downsample-only, none */
+ #define DEFAULT_OPT_CHROMA_MODE GST_VIDEO_CHROMA_MODE_FULL
+ #define DEFAULT_OPT_RESAMPLER_METHOD GST_VIDEO_RESAMPLER_METHOD_CUBIC
+ #define DEFAULT_OPT_CHROMA_RESAMPLER_METHOD GST_VIDEO_RESAMPLER_METHOD_LINEAR
+ #define DEFAULT_OPT_RESAMPLER_TAPS 0
+ #define DEFAULT_OPT_DITHER_METHOD GST_VIDEO_DITHER_BAYER
+ #define DEFAULT_OPT_DITHER_QUANTIZATION 1
+ #define DEFAULT_OPT_ASYNC_TASKS FALSE
+
+ #define GET_OPT_FILL_BORDER(c) get_opt_bool(c, \
+ GST_VIDEO_CONVERTER_OPT_FILL_BORDER, DEFAULT_OPT_FILL_BORDER)
+ #define GET_OPT_ALPHA_VALUE(c) get_opt_double(c, \
+ GST_VIDEO_CONVERTER_OPT_ALPHA_VALUE, DEFAULT_OPT_ALPHA_VALUE)
+ #define GET_OPT_ALPHA_MODE(c) get_opt_enum(c, \
+ GST_VIDEO_CONVERTER_OPT_ALPHA_MODE, GST_TYPE_VIDEO_ALPHA_MODE, DEFAULT_OPT_ALPHA_MODE)
+ #define GET_OPT_BORDER_ARGB(c) get_opt_uint(c, \
+ GST_VIDEO_CONVERTER_OPT_BORDER_ARGB, DEFAULT_OPT_BORDER_ARGB)
+ #define GET_OPT_MATRIX_MODE(c) get_opt_enum(c, \
+ GST_VIDEO_CONVERTER_OPT_MATRIX_MODE, GST_TYPE_VIDEO_MATRIX_MODE, DEFAULT_OPT_MATRIX_MODE)
+ #define GET_OPT_GAMMA_MODE(c) get_opt_enum(c, \
+ GST_VIDEO_CONVERTER_OPT_GAMMA_MODE, GST_TYPE_VIDEO_GAMMA_MODE, DEFAULT_OPT_GAMMA_MODE)
+ #define GET_OPT_PRIMARIES_MODE(c) get_opt_enum(c, \
+ GST_VIDEO_CONVERTER_OPT_PRIMARIES_MODE, GST_TYPE_VIDEO_PRIMARIES_MODE, DEFAULT_OPT_PRIMARIES_MODE)
+ #define GET_OPT_CHROMA_MODE(c) get_opt_enum(c, \
+ GST_VIDEO_CONVERTER_OPT_CHROMA_MODE, GST_TYPE_VIDEO_CHROMA_MODE, DEFAULT_OPT_CHROMA_MODE)
+ #define GET_OPT_RESAMPLER_METHOD(c) get_opt_enum(c, \
+ GST_VIDEO_CONVERTER_OPT_RESAMPLER_METHOD, GST_TYPE_VIDEO_RESAMPLER_METHOD, \
+ DEFAULT_OPT_RESAMPLER_METHOD)
+ #define GET_OPT_CHROMA_RESAMPLER_METHOD(c) get_opt_enum(c, \
+ GST_VIDEO_CONVERTER_OPT_CHROMA_RESAMPLER_METHOD, GST_TYPE_VIDEO_RESAMPLER_METHOD, \
+ DEFAULT_OPT_CHROMA_RESAMPLER_METHOD)
+ #define GET_OPT_RESAMPLER_TAPS(c) get_opt_uint(c, \
+ GST_VIDEO_CONVERTER_OPT_RESAMPLER_TAPS, DEFAULT_OPT_RESAMPLER_TAPS)
+ #define GET_OPT_DITHER_METHOD(c) get_opt_enum(c, \
+ GST_VIDEO_CONVERTER_OPT_DITHER_METHOD, GST_TYPE_VIDEO_DITHER_METHOD, \
+ DEFAULT_OPT_DITHER_METHOD)
+ #define GET_OPT_DITHER_QUANTIZATION(c) get_opt_uint(c, \
+ GST_VIDEO_CONVERTER_OPT_DITHER_QUANTIZATION, DEFAULT_OPT_DITHER_QUANTIZATION)
+ #define GET_OPT_ASYNC_TASKS(c) get_opt_bool(c, \
+ GST_VIDEO_CONVERTER_OPT_ASYNC_TASKS, DEFAULT_OPT_ASYNC_TASKS)
+
+ #define CHECK_ALPHA_COPY(c) (GET_OPT_ALPHA_MODE(c) == GST_VIDEO_ALPHA_MODE_COPY)
+ #define CHECK_ALPHA_SET(c) (GET_OPT_ALPHA_MODE(c) == GST_VIDEO_ALPHA_MODE_SET)
+ #define CHECK_ALPHA_MULT(c) (GET_OPT_ALPHA_MODE(c) == GST_VIDEO_ALPHA_MODE_MULT)
+
+ #define CHECK_MATRIX_FULL(c) (GET_OPT_MATRIX_MODE(c) == GST_VIDEO_MATRIX_MODE_FULL)
+ #define CHECK_MATRIX_INPUT(c) (GET_OPT_MATRIX_MODE(c) == GST_VIDEO_MATRIX_MODE_INPUT_ONLY)
+ #define CHECK_MATRIX_OUTPUT(c) (GET_OPT_MATRIX_MODE(c) == GST_VIDEO_MATRIX_MODE_OUTPUT_ONLY)
+ #define CHECK_MATRIX_NONE(c) (GET_OPT_MATRIX_MODE(c) == GST_VIDEO_MATRIX_MODE_NONE)
+
+ #define CHECK_GAMMA_NONE(c) (GET_OPT_GAMMA_MODE(c) == GST_VIDEO_GAMMA_MODE_NONE)
+ #define CHECK_GAMMA_REMAP(c) (GET_OPT_GAMMA_MODE(c) == GST_VIDEO_GAMMA_MODE_REMAP)
+
+ #define CHECK_PRIMARIES_NONE(c) (GET_OPT_PRIMARIES_MODE(c) == GST_VIDEO_PRIMARIES_MODE_NONE)
+ #define CHECK_PRIMARIES_MERGE(c) (GET_OPT_PRIMARIES_MODE(c) == GST_VIDEO_PRIMARIES_MODE_MERGE_ONLY)
+ #define CHECK_PRIMARIES_FAST(c) (GET_OPT_PRIMARIES_MODE(c) == GST_VIDEO_PRIMARIES_MODE_FAST)
+
+ #define CHECK_CHROMA_FULL(c) (GET_OPT_CHROMA_MODE(c) == GST_VIDEO_CHROMA_MODE_FULL)
+ #define CHECK_CHROMA_UPSAMPLE(c) (GET_OPT_CHROMA_MODE(c) == GST_VIDEO_CHROMA_MODE_UPSAMPLE_ONLY)
+ #define CHECK_CHROMA_DOWNSAMPLE(c) (GET_OPT_CHROMA_MODE(c) == GST_VIDEO_CHROMA_MODE_DOWNSAMPLE_ONLY)
+ #define CHECK_CHROMA_NONE(c) (GET_OPT_CHROMA_MODE(c) == GST_VIDEO_CHROMA_MODE_NONE)
+
+ static GstLineCache *
+ chain_unpack_line (GstVideoConverter * convert, gint idx)
+ {
+ GstLineCache *prev;
+ GstVideoInfo *info;
+
+ info = &convert->in_info;
+
+ convert->current_format = convert->unpack_format;
+ convert->current_bits = convert->unpack_bits;
+ convert->current_pstride = convert->current_bits >> 1;
+
+ convert->unpack_pstride = convert->current_pstride;
+ convert->identity_unpack = (convert->current_format == info->finfo->format);
+
+ GST_DEBUG ("chain unpack line format %s, pstride %d, identity_unpack %d",
+ gst_video_format_to_string (convert->current_format),
+ convert->current_pstride, convert->identity_unpack);
+
+ prev = convert->unpack_lines[idx] = gst_line_cache_new (NULL);
+ prev->write_input = FALSE;
+ prev->pass_alloc = FALSE;
+ prev->n_lines = 1;
+ prev->stride = convert->current_pstride * convert->current_width;
+ gst_line_cache_set_need_line_func (prev, do_unpack_lines, idx, convert, NULL);
+
+ return prev;
+ }
+
+ static GstLineCache *
+ chain_upsample (GstVideoConverter * convert, GstLineCache * prev, gint idx)
+ {
+ video_converter_compute_resample (convert, idx);
+
+ if (convert->upsample_p[idx] || convert->upsample_i[idx]) {
+ GST_DEBUG ("chain upsample");
+ prev = convert->upsample_lines[idx] = gst_line_cache_new (prev);
+ prev->write_input = TRUE;
+ prev->pass_alloc = TRUE;
+ /* XXX: why this hardcoded value? */
+ prev->n_lines = 5;
+ prev->stride = convert->current_pstride * convert->current_width;
+ gst_line_cache_set_need_line_func (prev,
+ do_upsample_lines, idx, convert, NULL);
+ }
+ return prev;
+ }
+
+ static void
+ color_matrix_set_identity (MatrixData * m)
+ {
+ int i, j;
+
+ for (i = 0; i < 4; i++) {
+ for (j = 0; j < 4; j++) {
+ m->dm[i][j] = (i == j);
+ }
+ }
+ }
+
+ static void
+ color_matrix_copy (MatrixData * d, const MatrixData * s)
+ {
+ gint i, j;
+
+ for (i = 0; i < 4; i++)
+ for (j = 0; j < 4; j++)
+ d->dm[i][j] = s->dm[i][j];
+ }
+
+ /* Perform 4x4 matrix multiplication:
+ * - @dst@ = @a@ * @b@
+ * - @dst@ may be a pointer to @a@ andor @b@
+ */
+ static void
+ color_matrix_multiply (MatrixData * dst, MatrixData * a, MatrixData * b)
+ {
+ MatrixData tmp;
+ int i, j, k;
+
+ for (i = 0; i < 4; i++) {
+ for (j = 0; j < 4; j++) {
+ double x = 0;
+ for (k = 0; k < 4; k++) {
+ x += a->dm[i][k] * b->dm[k][j];
+ }
+ tmp.dm[i][j] = x;
+ }
+ }
+ color_matrix_copy (dst, &tmp);
+ }
+
+ static void
+ color_matrix_invert (MatrixData * d, MatrixData * s)
+ {
+ MatrixData tmp;
+ int i, j;
+ double det;
+
+ color_matrix_set_identity (&tmp);
+ for (j = 0; j < 3; j++) {
+ for (i = 0; i < 3; i++) {
+ tmp.dm[j][i] =
+ s->dm[(i + 1) % 3][(j + 1) % 3] * s->dm[(i + 2) % 3][(j + 2) % 3] -
+ s->dm[(i + 1) % 3][(j + 2) % 3] * s->dm[(i + 2) % 3][(j + 1) % 3];
+ }
+ }
+ det =
+ tmp.dm[0][0] * s->dm[0][0] + tmp.dm[0][1] * s->dm[1][0] +
+ tmp.dm[0][2] * s->dm[2][0];
+ for (j = 0; j < 3; j++) {
+ for (i = 0; i < 3; i++) {
+ tmp.dm[i][j] /= det;
+ }
+ }
+ color_matrix_copy (d, &tmp);
+ }
+
+ static void
+ color_matrix_offset_components (MatrixData * m, double a1, double a2, double a3)
+ {
+ MatrixData a;
+
+ color_matrix_set_identity (&a);
+ a.dm[0][3] = a1;
+ a.dm[1][3] = a2;
+ a.dm[2][3] = a3;
+ color_matrix_multiply (m, &a, m);
+ }
+
+ static void
+ color_matrix_scale_components (MatrixData * m, double a1, double a2, double a3)
+ {
+ MatrixData a;
+
+ color_matrix_set_identity (&a);
+ a.dm[0][0] = a1;
+ a.dm[1][1] = a2;
+ a.dm[2][2] = a3;
+ color_matrix_multiply (m, &a, m);
+ }
+
+ static void
+ color_matrix_debug (const MatrixData * s)
+ {
+ GST_DEBUG ("[%f %f %f %f]", s->dm[0][0], s->dm[0][1], s->dm[0][2],
+ s->dm[0][3]);
+ GST_DEBUG ("[%f %f %f %f]", s->dm[1][0], s->dm[1][1], s->dm[1][2],
+ s->dm[1][3]);
+ GST_DEBUG ("[%f %f %f %f]", s->dm[2][0], s->dm[2][1], s->dm[2][2],
+ s->dm[2][3]);
+ GST_DEBUG ("[%f %f %f %f]", s->dm[3][0], s->dm[3][1], s->dm[3][2],
+ s->dm[3][3]);
+ }
+
+ static void
+ color_matrix_convert (MatrixData * s)
+ {
+ gint i, j;
+
+ for (i = 0; i < 4; i++)
+ for (j = 0; j < 4; j++)
+ s->im[i][j] = rint (s->dm[i][j]);
+
+ GST_DEBUG ("[%6d %6d %6d %6d]", s->im[0][0], s->im[0][1], s->im[0][2],
+ s->im[0][3]);
+ GST_DEBUG ("[%6d %6d %6d %6d]", s->im[1][0], s->im[1][1], s->im[1][2],
+ s->im[1][3]);
+ GST_DEBUG ("[%6d %6d %6d %6d]", s->im[2][0], s->im[2][1], s->im[2][2],
+ s->im[2][3]);
+ GST_DEBUG ("[%6d %6d %6d %6d]", s->im[3][0], s->im[3][1], s->im[3][2],
+ s->im[3][3]);
+ }
+
+ static void
+ color_matrix_YCbCr_to_RGB (MatrixData * m, double Kr, double Kb)
+ {
+ double Kg = 1.0 - Kr - Kb;
+ MatrixData k = {
+ {
+ {1., 0., 2 * (1 - Kr), 0.},
+ {1., -2 * Kb * (1 - Kb) / Kg, -2 * Kr * (1 - Kr) / Kg, 0.},
+ {1., 2 * (1 - Kb), 0., 0.},
+ {0., 0., 0., 1.},
+ }
+ };
+
+ color_matrix_multiply (m, &k, m);
+ }
+
+ static void
+ color_matrix_RGB_to_YCbCr (MatrixData * m, double Kr, double Kb)
+ {
+ double Kg = 1.0 - Kr - Kb;
+ MatrixData k;
+ double x;
+
+ k.dm[0][0] = Kr;
+ k.dm[0][1] = Kg;
+ k.dm[0][2] = Kb;
+ k.dm[0][3] = 0;
+
+ x = 1 / (2 * (1 - Kb));
+ k.dm[1][0] = -x * Kr;
+ k.dm[1][1] = -x * Kg;
+ k.dm[1][2] = x * (1 - Kb);
+ k.dm[1][3] = 0;
+
+ x = 1 / (2 * (1 - Kr));
+ k.dm[2][0] = x * (1 - Kr);
+ k.dm[2][1] = -x * Kg;
+ k.dm[2][2] = -x * Kb;
+ k.dm[2][3] = 0;
+
+ k.dm[3][0] = 0;
+ k.dm[3][1] = 0;
+ k.dm[3][2] = 0;
+ k.dm[3][3] = 1;
+
+ color_matrix_multiply (m, &k, m);
+ }
+
+ static void
+ color_matrix_RGB_to_XYZ (MatrixData * dst, double Rx, double Ry, double Gx,
+ double Gy, double Bx, double By, double Wx, double Wy)
+ {
+ MatrixData m, im;
+ double sx, sy, sz;
+ double wx, wy, wz;
+
+ color_matrix_set_identity (&m);
+
+ m.dm[0][0] = Rx;
+ m.dm[1][0] = Ry;
+ m.dm[2][0] = (1.0 - Rx - Ry);
+ m.dm[0][1] = Gx;
+ m.dm[1][1] = Gy;
+ m.dm[2][1] = (1.0 - Gx - Gy);
+ m.dm[0][2] = Bx;
+ m.dm[1][2] = By;
+ m.dm[2][2] = (1.0 - Bx - By);
+
+ color_matrix_invert (&im, &m);
+
+ wx = Wx / Wy;
+ wy = 1.0;
+ wz = (1.0 - Wx - Wy) / Wy;
+
+ sx = im.dm[0][0] * wx + im.dm[0][1] * wy + im.dm[0][2] * wz;
+ sy = im.dm[1][0] * wx + im.dm[1][1] * wy + im.dm[1][2] * wz;
+ sz = im.dm[2][0] * wx + im.dm[2][1] * wy + im.dm[2][2] * wz;
+
+ m.dm[0][0] *= sx;
+ m.dm[1][0] *= sx;
+ m.dm[2][0] *= sx;
+ m.dm[0][1] *= sy;
+ m.dm[1][1] *= sy;
+ m.dm[2][1] *= sy;
+ m.dm[0][2] *= sz;
+ m.dm[1][2] *= sz;
+ m.dm[2][2] *= sz;
+
+ color_matrix_copy (dst, &m);
+ }
+
+ static void
+ videoconvert_convert_init_tables (MatrixData * data)
+ {
+ gint i, j;
+
+ data->t_r = g_new (gint64, 256);
+ data->t_g = g_new (gint64, 256);
+ data->t_b = g_new (gint64, 256);
+
+ for (i = 0; i < 256; i++) {
+ gint64 r = 0, g = 0, b = 0;
+
+ for (j = 0; j < 3; j++) {
+ r = (r << 16) + data->im[j][0] * i;
+ g = (g << 16) + data->im[j][1] * i;
+ b = (b << 16) + data->im[j][2] * i;
+ }
+ data->t_r[i] = r;
+ data->t_g[i] = g;
+ data->t_b[i] = b;
+ }
+ data->t_c = ((gint64) data->im[0][3] << 32)
+ + ((gint64) data->im[1][3] << 16)
+ + ((gint64) data->im[2][3] << 0);
+ }
+
+ void
+ _custom_video_orc_matrix8 (guint8 * ORC_RESTRICT d1,
+ const guint8 * ORC_RESTRICT s1, orc_int64 p1, orc_int64 p2, orc_int64 p3,
+ orc_int64 p4, int n)
+ {
+ gint i;
+ gint r, g, b;
+ gint y, u, v;
+ gint a00, a01, a02, a03;
+ gint a10, a11, a12, a13;
+ gint a20, a21, a22, a23;
+
+ a00 = (gint16) (p1 >> 16);
+ a01 = (gint16) (p2 >> 16);
+ a02 = (gint16) (p3 >> 16);
+ a03 = (gint16) (p4 >> 16);
+ a10 = (gint16) (p1 >> 32);
+ a11 = (gint16) (p2 >> 32);
+ a12 = (gint16) (p3 >> 32);
+ a13 = (gint16) (p4 >> 32);
+ a20 = (gint16) (p1 >> 48);
+ a21 = (gint16) (p2 >> 48);
+ a22 = (gint16) (p3 >> 48);
+ a23 = (gint16) (p4 >> 48);
+
+ for (i = 0; i < n; i++) {
+ r = s1[i * 4 + 1];
+ g = s1[i * 4 + 2];
+ b = s1[i * 4 + 3];
+
+ y = ((a00 * r + a01 * g + a02 * b) >> SCALE) + a03;
+ u = ((a10 * r + a11 * g + a12 * b) >> SCALE) + a13;
+ v = ((a20 * r + a21 * g + a22 * b) >> SCALE) + a23;
+
+ d1[i * 4 + 1] = CLAMP (y, 0, 255);
+ d1[i * 4 + 2] = CLAMP (u, 0, 255);
+ d1[i * 4 + 3] = CLAMP (v, 0, 255);
+ }
+ }
+
+ static void
+ video_converter_matrix8 (MatrixData * data, gpointer pixels)
+ {
+ gpointer d = pixels;
+ video_orc_matrix8 (d, pixels, data->orc_p1, data->orc_p2,
+ data->orc_p3, data->orc_p4, data->width);
+ }
+
+ static void
+ video_converter_matrix8_table (MatrixData * data, gpointer pixels)
+ {
+ gint i, width = data->width * 4;
+ guint8 r, g, b;
+ gint64 c = data->t_c;
+ guint8 *p = pixels;
+ gint64 x;
+
+ for (i = 0; i < width; i += 4) {
+ r = p[i + 1];
+ g = p[i + 2];
+ b = p[i + 3];
+
+ x = data->t_r[r] + data->t_g[g] + data->t_b[b] + c;
+
+ p[i + 1] = x >> (32 + SCALE);
+ p[i + 2] = x >> (16 + SCALE);
+ p[i + 3] = x >> (0 + SCALE);
+ }
+ }
+
+ static void
+ video_converter_matrix8_AYUV_ARGB (MatrixData * data, gpointer pixels)
+ {
+ gpointer d = pixels;
+
+ video_orc_convert_AYUV_ARGB (d, 0, pixels, 0,
+ data->im[0][0], data->im[0][2],
+ data->im[2][1], data->im[1][1], data->im[1][2], data->width, 1);
+ }
+
+ static gboolean
+ is_ayuv_to_rgb_matrix (MatrixData * data)
+ {
+ if (data->im[0][0] != data->im[1][0] || data->im[1][0] != data->im[2][0])
+ return FALSE;
+
+ if (data->im[0][1] != 0 || data->im[2][2] != 0)
+ return FALSE;
+
+ return TRUE;
+ }
+
+ static gboolean
+ is_identity_matrix (MatrixData * data)
+ {
+ gint i, j;
+ gint c = data->im[0][0];
+
+ /* not really checking identity because of rounding errors but given
+ * the conversions we do we just check for anything that looks like:
+ *
+ * c 0 0 0
+ * 0 c 0 0
+ * 0 0 c 0
+ * 0 0 0 1
+ */
+ for (i = 0; i < 4; i++) {
+ for (j = 0; j < 4; j++) {
+ if (i == j) {
+ if (i == 3 && data->im[i][j] != 1)
+ return FALSE;
+ else if (data->im[i][j] != c)
+ return FALSE;
+ } else if (data->im[i][j] != 0)
+ return FALSE;
+ }
+ }
+ return TRUE;
+ }
+
+ static gboolean
+ is_no_clip_matrix (MatrixData * data)
+ {
+ gint i;
+ static const guint8 test[8][3] = {
+ {0, 0, 0},
+ {0, 0, 255},
+ {0, 255, 0},
+ {0, 255, 255},
+ {255, 0, 0},
+ {255, 0, 255},
+ {255, 255, 0},
+ {255, 255, 255}
+ };
+
+ for (i = 0; i < 8; i++) {
+ gint r, g, b;
+ gint y, u, v;
+
+ r = test[i][0];
+ g = test[i][1];
+ b = test[i][2];
+
+ y = (data->im[0][0] * r + data->im[0][1] * g +
+ data->im[0][2] * b + data->im[0][3]) >> SCALE;
+ u = (data->im[1][0] * r + data->im[1][1] * g +
+ data->im[1][2] * b + data->im[1][3]) >> SCALE;
+ v = (data->im[2][0] * r + data->im[2][1] * g +
+ data->im[2][2] * b + data->im[2][3]) >> SCALE;
+
+ if (y != CLAMP (y, 0, 255) || u != CLAMP (u, 0, 255)
+ || v != CLAMP (v, 0, 255))
+ return FALSE;
+ }
+ return TRUE;
+ }
+
+ static void
+ video_converter_matrix16 (MatrixData * data, gpointer pixels)
+ {
+ int i;
+ int r, g, b;
+ int y, u, v;
+ guint16 *p = pixels;
+ gint width = data->width;
+
+ for (i = 0; i < width; i++) {
+ r = p[i * 4 + 1];
+ g = p[i * 4 + 2];
+ b = p[i * 4 + 3];
+
+ y = (data->im[0][0] * r + data->im[0][1] * g +
+ data->im[0][2] * b + data->im[0][3]) >> SCALE;
+ u = (data->im[1][0] * r + data->im[1][1] * g +
+ data->im[1][2] * b + data->im[1][3]) >> SCALE;
+ v = (data->im[2][0] * r + data->im[2][1] * g +
+ data->im[2][2] * b + data->im[2][3]) >> SCALE;
+
+ p[i * 4 + 1] = CLAMP (y, 0, 65535);
+ p[i * 4 + 2] = CLAMP (u, 0, 65535);
+ p[i * 4 + 3] = CLAMP (v, 0, 65535);
+ }
+ }
+
+
+ static void
+ prepare_matrix (GstVideoConverter * convert, MatrixData * data)
+ {
+ if (is_identity_matrix (data))
+ return;
+
+ color_matrix_scale_components (data, SCALE_F, SCALE_F, SCALE_F);
+ color_matrix_convert (data);
+
+ data->width = convert->current_width;
+
+ if (convert->current_bits == 8) {
+ if (!convert->unpack_rgb && convert->pack_rgb
+ && is_ayuv_to_rgb_matrix (data)) {
+ GST_DEBUG ("use fast AYUV -> RGB matrix");
+ data->matrix_func = video_converter_matrix8_AYUV_ARGB;
+ } else if (is_no_clip_matrix (data)) {
+ GST_DEBUG ("use 8bit table");
+ data->matrix_func = video_converter_matrix8_table;
+ videoconvert_convert_init_tables (data);
+ } else {
+ gint a03, a13, a23;
+
+ GST_DEBUG ("use 8bit matrix");
+ data->matrix_func = video_converter_matrix8;
+
+ data->orc_p1 = (((guint64) (guint16) data->im[2][0]) << 48) |
+ (((guint64) (guint16) data->im[1][0]) << 32) |
+ (((guint64) (guint16) data->im[0][0]) << 16);
+ data->orc_p2 = (((guint64) (guint16) data->im[2][1]) << 48) |
+ (((guint64) (guint16) data->im[1][1]) << 32) |
+ (((guint64) (guint16) data->im[0][1]) << 16);
+ data->orc_p3 = (((guint64) (guint16) data->im[2][2]) << 48) |
+ (((guint64) (guint16) data->im[1][2]) << 32) |
+ (((guint64) (guint16) data->im[0][2]) << 16);
+
+ a03 = data->im[0][3] >> SCALE;
+ a13 = data->im[1][3] >> SCALE;
+ a23 = data->im[2][3] >> SCALE;
+
+ data->orc_p4 = (((guint64) (guint16) a23) << 48) |
+ (((guint64) (guint16) a13) << 32) | (((guint64) (guint16) a03) << 16);
+ }
+ } else {
+ GST_DEBUG ("use 16bit matrix");
+ data->matrix_func = video_converter_matrix16;
+ }
+ }
+
+ static void
+ compute_matrix_to_RGB (GstVideoConverter * convert, MatrixData * data)
+ {
+ GstVideoInfo *info;
+ gdouble Kr = 0, Kb = 0;
+
+ info = &convert->in_info;
+
+ {
+ const GstVideoFormatInfo *uinfo;
+ gint offset[4], scale[4];
+
+ uinfo = gst_video_format_get_info (convert->unpack_format);
+
+ /* bring color components to [0..1.0] range */
+ gst_video_color_range_offsets (info->colorimetry.range, uinfo, offset,
+ scale);
+
+ color_matrix_offset_components (data, -offset[0], -offset[1], -offset[2]);
+ color_matrix_scale_components (data, 1 / ((float) scale[0]),
+ 1 / ((float) scale[1]), 1 / ((float) scale[2]));
+ }
+
+ if (!convert->unpack_rgb && !CHECK_MATRIX_NONE (convert)) {
+ if (CHECK_MATRIX_OUTPUT (convert))
+ info = &convert->out_info;
+
+ /* bring components to R'G'B' space */
+ if (gst_video_color_matrix_get_Kr_Kb (info->colorimetry.matrix, &Kr, &Kb))
+ color_matrix_YCbCr_to_RGB (data, Kr, Kb);
+ }
+ color_matrix_debug (data);
+ }
+
+ static void
+ compute_matrix_to_YUV (GstVideoConverter * convert, MatrixData * data,
+ gboolean force)
+ {
+ GstVideoInfo *info;
+ gdouble Kr = 0, Kb = 0;
+
+ if (force || (!convert->pack_rgb && !CHECK_MATRIX_NONE (convert))) {
+ if (CHECK_MATRIX_INPUT (convert))
+ info = &convert->in_info;
+ else
+ info = &convert->out_info;
+
+ /* bring components to YCbCr space */
+ if (gst_video_color_matrix_get_Kr_Kb (info->colorimetry.matrix, &Kr, &Kb))
+ color_matrix_RGB_to_YCbCr (data, Kr, Kb);
+ }
+
+ info = &convert->out_info;
+
+ {
+ const GstVideoFormatInfo *uinfo;
+ gint offset[4], scale[4];
+
+ uinfo = gst_video_format_get_info (convert->pack_format);
+
+ /* bring color components to nominal range */
+ gst_video_color_range_offsets (info->colorimetry.range, uinfo, offset,
+ scale);
+
+ color_matrix_scale_components (data, (float) scale[0], (float) scale[1],
+ (float) scale[2]);
+ color_matrix_offset_components (data, offset[0], offset[1], offset[2]);
+ }
+
+ color_matrix_debug (data);
+ }
+
+
+ static void
+ gamma_convert_u8_u16 (GammaData * data, gpointer dest, gpointer src)
+ {
+ gint i;
+ guint8 *s = src;
+ guint16 *d = dest;
+ guint16 *table = data->gamma_table;
+ gint width = data->width * 4;
+
+ for (i = 0; i < width; i += 4) {
+ d[i + 0] = (s[i] << 8) | s[i];
+ d[i + 1] = table[s[i + 1]];
+ d[i + 2] = table[s[i + 2]];
+ d[i + 3] = table[s[i + 3]];
+ }
+ }
+
+ static void
+ gamma_convert_u16_u8 (GammaData * data, gpointer dest, gpointer src)
+ {
+ gint i;
+ guint16 *s = src;
+ guint8 *d = dest;
+ guint8 *table = data->gamma_table;
+ gint width = data->width * 4;
+
+ for (i = 0; i < width; i += 4) {
+ d[i + 0] = s[i] >> 8;
+ d[i + 1] = table[s[i + 1]];
+ d[i + 2] = table[s[i + 2]];
+ d[i + 3] = table[s[i + 3]];
+ }
+ }
+
+ static void
+ gamma_convert_u16_u16 (GammaData * data, gpointer dest, gpointer src)
+ {
+ gint i;
+ guint16 *s = src;
+ guint16 *d = dest;
+ guint16 *table = data->gamma_table;
+ gint width = data->width * 4;
+
+ for (i = 0; i < width; i += 4) {
+ d[i + 0] = s[i];
+ d[i + 1] = table[s[i + 1]];
+ d[i + 2] = table[s[i + 2]];
+ d[i + 3] = table[s[i + 3]];
+ }
+ }
+
+ static void
+ setup_gamma_decode (GstVideoConverter * convert)
+ {
+ GstVideoTransferFunction func;
+ guint16 *t;
+ gint i;
+
+ func = convert->in_info.colorimetry.transfer;
+
+ convert->gamma_dec.width = convert->current_width;
+ if (convert->gamma_dec.gamma_table) {
+ GST_DEBUG ("gamma decode already set up");
+ } else if (convert->current_bits == 8) {
+ GST_DEBUG ("gamma decode 8->16: %d", func);
+ convert->gamma_dec.gamma_func = gamma_convert_u8_u16;
+ t = convert->gamma_dec.gamma_table = g_malloc (sizeof (guint16) * 256);
+
+ for (i = 0; i < 256; i++)
+ t[i] =
+ rint (gst_video_transfer_function_decode (func, i / 255.0) * 65535.0);
+ } else {
+ GST_DEBUG ("gamma decode 16->16: %d", func);
+ convert->gamma_dec.gamma_func = gamma_convert_u16_u16;
+ t = convert->gamma_dec.gamma_table = g_malloc (sizeof (guint16) * 65536);
+
+ for (i = 0; i < 65536; i++)
+ t[i] =
+ rint (gst_video_transfer_function_decode (func,
+ i / 65535.0) * 65535.0);
+ }
+ convert->current_bits = 16;
+ convert->current_pstride = 8;
+ convert->current_format = GST_VIDEO_FORMAT_ARGB64;
+ }
+
+ static void
+ setup_gamma_encode (GstVideoConverter * convert, gint target_bits)
+ {
+ GstVideoTransferFunction func;
+ gint i;
+
+ func = convert->out_info.colorimetry.transfer;
+
+ convert->gamma_enc.width = convert->current_width;
+ if (convert->gamma_enc.gamma_table) {
+ GST_DEBUG ("gamma encode already set up");
+ } else if (target_bits == 8) {
+ guint8 *t;
+
+ GST_DEBUG ("gamma encode 16->8: %d", func);
+ convert->gamma_enc.gamma_func = gamma_convert_u16_u8;
+ t = convert->gamma_enc.gamma_table = g_malloc (sizeof (guint8) * 65536);
+
+ for (i = 0; i < 65536; i++)
+ t[i] =
+ rint (gst_video_transfer_function_encode (func, i / 65535.0) * 255.0);
+ } else {
+ guint16 *t;
+
+ GST_DEBUG ("gamma encode 16->16: %d", func);
+ convert->gamma_enc.gamma_func = gamma_convert_u16_u16;
+ t = convert->gamma_enc.gamma_table = g_malloc (sizeof (guint16) * 65536);
+
+ for (i = 0; i < 65536; i++)
+ t[i] =
+ rint (gst_video_transfer_function_encode (func,
+ i / 65535.0) * 65535.0);
+ }
+ }
+
+ static GstLineCache *
+ chain_convert_to_RGB (GstVideoConverter * convert, GstLineCache * prev,
+ gint idx)
+ {
+ gboolean do_gamma;
+
+ do_gamma = CHECK_GAMMA_REMAP (convert);
+
+ if (do_gamma) {
+ gint scale;
+
+ /* Set up conversion matrices if needed, but only for the first thread */
+ if (idx == 0 && !convert->unpack_rgb) {
+ color_matrix_set_identity (&convert->to_RGB_matrix);
+ compute_matrix_to_RGB (convert, &convert->to_RGB_matrix);
+
+ /* matrix is in 0..1 range, scale to current bits */
+ GST_DEBUG ("chain RGB convert");
+ scale = 1 << convert->current_bits;
+ color_matrix_scale_components (&convert->to_RGB_matrix,
+ (float) scale, (float) scale, (float) scale);
+
+ prepare_matrix (convert, &convert->to_RGB_matrix);
+
+ if (convert->current_bits == 8)
+ convert->current_format = GST_VIDEO_FORMAT_ARGB;
+ else
+ convert->current_format = GST_VIDEO_FORMAT_ARGB64;
+ }
+
+ prev = convert->to_RGB_lines[idx] = gst_line_cache_new (prev);
+ prev->write_input = TRUE;
+ prev->pass_alloc = FALSE;
+ prev->n_lines = 1;
+ prev->stride = convert->current_pstride * convert->current_width;
+ gst_line_cache_set_need_line_func (prev,
+ do_convert_to_RGB_lines, idx, convert, NULL);
+
+ GST_DEBUG ("chain gamma decode");
+ setup_gamma_decode (convert);
+ }
+ return prev;
+ }
+
+ static GstLineCache *
+ chain_hscale (GstVideoConverter * convert, GstLineCache * prev, gint idx)
+ {
+ gint method;
+ guint taps;
+
+ method = GET_OPT_RESAMPLER_METHOD (convert);
+ taps = GET_OPT_RESAMPLER_TAPS (convert);
+
+ convert->h_scaler[idx] =
+ gst_video_scaler_new (method, GST_VIDEO_SCALER_FLAG_NONE, taps,
+ convert->in_width, convert->out_width, convert->config);
+
+ gst_video_scaler_get_coeff (convert->h_scaler[idx], 0, NULL, &taps);
+
+ GST_DEBUG ("chain hscale %d->%d, taps %d, method %d",
+ convert->in_width, convert->out_width, taps, method);
+
+ convert->current_width = convert->out_width;
+ convert->h_scale_format = convert->current_format;
+
+ prev = convert->hscale_lines[idx] = gst_line_cache_new (prev);
+ prev->write_input = FALSE;
+ prev->pass_alloc = FALSE;
+ prev->n_lines = 1;
+ prev->stride = convert->current_pstride * convert->current_width;
+ gst_line_cache_set_need_line_func (prev, do_hscale_lines, idx, convert, NULL);
+
+ return prev;
+ }
+
+ static GstLineCache *
+ chain_vscale (GstVideoConverter * convert, GstLineCache * prev, gint idx)
+ {
+ gint method;
+ guint taps, taps_i = 0;
+ gint backlog = 0;
+
+ method = GET_OPT_RESAMPLER_METHOD (convert);
+ taps = GET_OPT_RESAMPLER_TAPS (convert);
+
+ if (GST_VIDEO_INFO_IS_INTERLACED (&convert->in_info)
+ && (GST_VIDEO_INFO_INTERLACE_MODE (&convert->in_info) !=
+ GST_VIDEO_INTERLACE_MODE_ALTERNATE)) {
+ convert->v_scaler_i[idx] =
+ gst_video_scaler_new (method, GST_VIDEO_SCALER_FLAG_INTERLACED, taps,
+ convert->in_height, convert->out_height, convert->config);
+
+ gst_video_scaler_get_coeff (convert->v_scaler_i[idx], 0, NULL, &taps_i);
+ backlog = taps_i;
+ }
+ convert->v_scaler_p[idx] =
+ gst_video_scaler_new (method, 0, taps, convert->in_height,
+ convert->out_height, convert->config);
+ convert->v_scale_width = convert->current_width;
+ convert->v_scale_format = convert->current_format;
+ convert->current_height = convert->out_height;
+
+ gst_video_scaler_get_coeff (convert->v_scaler_p[idx], 0, NULL, &taps);
+
+ GST_DEBUG ("chain vscale %d->%d, taps %d, method %d, backlog %d",
+ convert->in_height, convert->out_height, taps, method, backlog);
+
+ prev->backlog = backlog;
+ prev = convert->vscale_lines[idx] = gst_line_cache_new (prev);
+ prev->pass_alloc = (taps == 1);
+ prev->write_input = FALSE;
+ prev->n_lines = MAX (taps_i, taps);
+ prev->stride = convert->current_pstride * convert->current_width;
+ gst_line_cache_set_need_line_func (prev, do_vscale_lines, idx, convert, NULL);
+
+ return prev;
+ }
+
+ static GstLineCache *
+ chain_scale (GstVideoConverter * convert, GstLineCache * prev, gboolean force,
+ gint idx)
+ {
+ gint s0, s1, s2, s3;
+
+ s0 = convert->current_width * convert->current_height;
+ s3 = convert->out_width * convert->out_height;
+
+ GST_DEBUG ("in pixels %d <> out pixels %d", s0, s3);
+
+ if (s3 <= s0 || force) {
+ /* we are making the image smaller or are forced to resample */
+ s1 = convert->out_width * convert->current_height;
+ s2 = convert->current_width * convert->out_height;
+
+ GST_DEBUG ("%d <> %d", s1, s2);
+
+ if (s1 <= s2) {
+ /* h scaling first produces less pixels */
+ if (convert->current_width != convert->out_width)
+ prev = chain_hscale (convert, prev, idx);
+ if (convert->current_height != convert->out_height)
+ prev = chain_vscale (convert, prev, idx);
+ } else {
+ /* v scaling first produces less pixels */
+ if (convert->current_height != convert->out_height)
+ prev = chain_vscale (convert, prev, idx);
+ if (convert->current_width != convert->out_width)
+ prev = chain_hscale (convert, prev, idx);
+ }
+ }
+ return prev;
+ }
+
+ static GstLineCache *
+ chain_convert (GstVideoConverter * convert, GstLineCache * prev, gint idx)
+ {
+ gboolean do_gamma, do_conversion, pass_alloc = FALSE;
+ gboolean same_matrix, same_primaries, same_bits;
+ MatrixData p1, p2;
+
+ same_bits = convert->unpack_bits == convert->pack_bits;
+ if (CHECK_MATRIX_NONE (convert)) {
+ same_matrix = TRUE;
+ } else {
+ same_matrix =
+ convert->in_info.colorimetry.matrix ==
+ convert->out_info.colorimetry.matrix;
+ }
+
+ if (CHECK_PRIMARIES_NONE (convert)) {
+ same_primaries = TRUE;
+ } else {
+ same_primaries =
+ convert->in_info.colorimetry.primaries ==
+ convert->out_info.colorimetry.primaries;
+ }
+
+ GST_DEBUG ("matrix %d -> %d (%d)", convert->in_info.colorimetry.matrix,
+ convert->out_info.colorimetry.matrix, same_matrix);
+ GST_DEBUG ("bits %d -> %d (%d)", convert->unpack_bits, convert->pack_bits,
+ same_bits);
+ GST_DEBUG ("primaries %d -> %d (%d)", convert->in_info.colorimetry.primaries,
+ convert->out_info.colorimetry.primaries, same_primaries);
+
+ color_matrix_set_identity (&convert->convert_matrix);
+
+ if (!same_primaries) {
+ const GstVideoColorPrimariesInfo *pi;
+
+ /* Convert from RGB_input to RGB_output via XYZ
+ * res = XYZ_to_RGB_output ( RGB_to_XYZ_input ( input ) )
+ * or in matricial form:
+ * RGB_output = XYZ_to_RGB_output_matrix * RGB_TO_XYZ_input_matrix * RGB_input
+ *
+ * The RGB_input is the pre-existing convert_matrix
+ * The convert_matrix will become the RGB_output
+ */
+
+ /* Convert input RGB to XYZ */
+ pi = gst_video_color_primaries_get_info (convert->in_info.colorimetry.
+ primaries);
+ /* Get the RGB_TO_XYZ_input_matrix */
+ color_matrix_RGB_to_XYZ (&p1, pi->Rx, pi->Ry, pi->Gx, pi->Gy, pi->Bx,
+ pi->By, pi->Wx, pi->Wy);
+ GST_DEBUG ("to XYZ matrix");
+ color_matrix_debug (&p1);
+ GST_DEBUG ("current matrix");
+ /* convert_matrix = RGB_TO_XYZ_input_matrix * input_RGB */
+ color_matrix_multiply (&convert->convert_matrix, &convert->convert_matrix,
+ &p1);
+ color_matrix_debug (&convert->convert_matrix);
+
+ /* Convert XYZ to output RGB */
+ pi = gst_video_color_primaries_get_info (convert->out_info.colorimetry.
+ primaries);
+ /* Calculate the XYZ_to_RGB_output_matrix
+ * * Get the RGB_TO_XYZ_output_matrix
+ * * invert it
+ * * store in p2
+ */
+ color_matrix_RGB_to_XYZ (&p2, pi->Rx, pi->Ry, pi->Gx, pi->Gy, pi->Bx,
+ pi->By, pi->Wx, pi->Wy);
+ color_matrix_invert (&p2, &p2);
+ GST_DEBUG ("to RGB matrix");
+ color_matrix_debug (&p2);
+ /* Finally:
+ * convert_matrix = XYZ_to_RGB_output_matrix * RGB_TO_XYZ_input_matrix * RGB_input
+ * = XYZ_to_RGB_output_matrix * convert_matrix
+ * = p2 * convert_matrix
+ */
+ color_matrix_multiply (&convert->convert_matrix, &p2,
+ &convert->convert_matrix);
+ GST_DEBUG ("current matrix");
+ color_matrix_debug (&convert->convert_matrix);
+ }
+
+ do_gamma = CHECK_GAMMA_REMAP (convert);
+ if (!do_gamma) {
+
+ convert->in_bits = convert->unpack_bits;
+ convert->out_bits = convert->pack_bits;
+
+ if (!same_bits || !same_matrix || !same_primaries) {
+ /* no gamma, combine all conversions into 1 */
+ if (convert->in_bits < convert->out_bits) {
+ gint scale = 1 << (convert->out_bits - convert->in_bits);
+ color_matrix_scale_components (&convert->convert_matrix,
+ 1 / (float) scale, 1 / (float) scale, 1 / (float) scale);
+ }
+ GST_DEBUG ("to RGB matrix");
+ compute_matrix_to_RGB (convert, &convert->convert_matrix);
+ GST_DEBUG ("current matrix");
+ color_matrix_debug (&convert->convert_matrix);
+
+ GST_DEBUG ("to YUV matrix");
+ compute_matrix_to_YUV (convert, &convert->convert_matrix, FALSE);
+ GST_DEBUG ("current matrix");
+ color_matrix_debug (&convert->convert_matrix);
+ if (convert->in_bits > convert->out_bits) {
+ gint scale = 1 << (convert->in_bits - convert->out_bits);
+ color_matrix_scale_components (&convert->convert_matrix,
+ (float) scale, (float) scale, (float) scale);
+ }
+ convert->current_bits = MAX (convert->in_bits, convert->out_bits);
+
+ do_conversion = TRUE;
+ if (!same_matrix || !same_primaries) {
+ if (idx == 0)
+ prepare_matrix (convert, &convert->convert_matrix);
+ }
+ if (convert->in_bits == convert->out_bits)
+ pass_alloc = TRUE;
+ } else
+ do_conversion = FALSE;
+
+ convert->current_bits = convert->pack_bits;
+ convert->current_format = convert->pack_format;
+ convert->current_pstride = convert->current_bits >> 1;
+ } else {
+ /* we did gamma, just do colorspace conversion if needed */
+ if (same_primaries) {
+ do_conversion = FALSE;
+ } else {
+ if (idx == 0)
+ prepare_matrix (convert, &convert->convert_matrix);
+ convert->in_bits = convert->out_bits = 16;
+ pass_alloc = TRUE;
+ do_conversion = TRUE;
+ }
+ }
+
+ if (do_conversion) {
+ GST_DEBUG ("chain conversion");
+ prev = convert->convert_lines[idx] = gst_line_cache_new (prev);
+ prev->write_input = TRUE;
+ prev->pass_alloc = pass_alloc;
+ prev->n_lines = 1;
+ prev->stride = convert->current_pstride * convert->current_width;
+ gst_line_cache_set_need_line_func (prev,
+ do_convert_lines, idx, convert, NULL);
+ }
+ return prev;
+ }
+
+ static void
+ convert_set_alpha_u8 (GstVideoConverter * convert, gpointer pixels, gint width)
+ {
+ guint8 *p = pixels;
+ guint8 alpha = MIN (convert->alpha_value, 255);
+ int i;
+
+ for (i = 0; i < width; i++)
+ p[i * 4] = alpha;
+ }
+
+ static void
+ convert_set_alpha_u16 (GstVideoConverter * convert, gpointer pixels, gint width)
+ {
+ guint16 *p = pixels;
+ guint16 alpha;
+ int i;
+
+ alpha = MIN (convert->alpha_value, 255);
+ alpha |= alpha << 8;
+
+ for (i = 0; i < width; i++)
+ p[i * 4] = alpha;
+ }
+
+ static void
+ convert_mult_alpha_u8 (GstVideoConverter * convert, gpointer pixels, gint width)
+ {
+ guint8 *p = pixels;
+ guint alpha = convert->alpha_value;
+ int i;
+
+ for (i = 0; i < width; i++) {
+ gint a = (p[i * 4] * alpha) / 255;
+ p[i * 4] = CLAMP (a, 0, 255);
+ }
+ }
+
+ static void
+ convert_mult_alpha_u16 (GstVideoConverter * convert, gpointer pixels,
+ gint width)
+ {
+ guint16 *p = pixels;
+ guint alpha = convert->alpha_value;
+ int i;
+
+ for (i = 0; i < width; i++) {
+ gint a = (p[i * 4] * alpha) / 255;
+ p[i * 4] = CLAMP (a, 0, 65535);
+ }
+ }
+
+ static GstLineCache *
+ chain_alpha (GstVideoConverter * convert, GstLineCache * prev, gint idx)
+ {
+ switch (convert->alpha_mode) {
+ case ALPHA_MODE_NONE:
+ case ALPHA_MODE_COPY:
+ return prev;
+
+ case ALPHA_MODE_SET:
+ if (convert->current_bits == 8)
+ convert->alpha_func = convert_set_alpha_u8;
+ else
+ convert->alpha_func = convert_set_alpha_u16;
+ break;
+ case ALPHA_MODE_MULT:
+ if (convert->current_bits == 8)
+ convert->alpha_func = convert_mult_alpha_u8;
+ else
+ convert->alpha_func = convert_mult_alpha_u16;
+ break;
+ }
+
+ GST_DEBUG ("chain alpha mode %d", convert->alpha_mode);
+ prev = convert->alpha_lines[idx] = gst_line_cache_new (prev);
+ prev->write_input = TRUE;
+ prev->pass_alloc = TRUE;
+ prev->n_lines = 1;
+ prev->stride = convert->current_pstride * convert->current_width;
+ gst_line_cache_set_need_line_func (prev, do_alpha_lines, idx, convert, NULL);
+
+ return prev;
+ }
+
+ static GstLineCache *
+ chain_convert_to_YUV (GstVideoConverter * convert, GstLineCache * prev,
+ gint idx)
+ {
+ gboolean do_gamma;
+
+ do_gamma = CHECK_GAMMA_REMAP (convert);
+
+ if (do_gamma) {
+ gint scale;
+
+ GST_DEBUG ("chain gamma encode");
+ setup_gamma_encode (convert, convert->pack_bits);
+
+ convert->current_bits = convert->pack_bits;
+ convert->current_pstride = convert->current_bits >> 1;
+
+ if (idx == 0 && !convert->pack_rgb) {
+ color_matrix_set_identity (&convert->to_YUV_matrix);
+ compute_matrix_to_YUV (convert, &convert->to_YUV_matrix, FALSE);
+
+ /* matrix is in 0..255 range, scale to pack bits */
+ GST_DEBUG ("chain YUV convert");
+ scale = 1 << convert->pack_bits;
+ color_matrix_scale_components (&convert->to_YUV_matrix,
+ 1 / (float) scale, 1 / (float) scale, 1 / (float) scale);
+ prepare_matrix (convert, &convert->to_YUV_matrix);
+ }
+ convert->current_format = convert->pack_format;
+
+ prev = convert->to_YUV_lines[idx] = gst_line_cache_new (prev);
+ prev->write_input = FALSE;
+ prev->pass_alloc = FALSE;
+ prev->n_lines = 1;
+ prev->stride = convert->current_pstride * convert->current_width;
+ gst_line_cache_set_need_line_func (prev,
+ do_convert_to_YUV_lines, idx, convert, NULL);
+ }
+
+ return prev;
+ }
+
+ static GstLineCache *
+ chain_downsample (GstVideoConverter * convert, GstLineCache * prev, gint idx)
+ {
+ if (convert->downsample_p[idx] || convert->downsample_i[idx]) {
+ GST_DEBUG ("chain downsample");
+ prev = convert->downsample_lines[idx] = gst_line_cache_new (prev);
+ prev->write_input = TRUE;
+ prev->pass_alloc = TRUE;
+ /* XXX: why this hardcoded value? */
+ prev->n_lines = 5;
+ prev->stride = convert->current_pstride * convert->current_width;
+ gst_line_cache_set_need_line_func (prev,
+ do_downsample_lines, idx, convert, NULL);
+ }
+ return prev;
+ }
+
+ static GstLineCache *
+ chain_dither (GstVideoConverter * convert, GstLineCache * prev, gint idx)
+ {
+ gint i;
+ gboolean do_dither = FALSE;
+ GstVideoDitherFlags flags = 0;
+ GstVideoDitherMethod method;
+ guint quant[4], target_quant;
+
+ method = GET_OPT_DITHER_METHOD (convert);
+ if (method == GST_VIDEO_DITHER_NONE)
+ return prev;
+
+ target_quant = GET_OPT_DITHER_QUANTIZATION (convert);
+ GST_DEBUG ("method %d, target-quantization %d", method, target_quant);
+
+ if (convert->pack_pal) {
+ quant[0] = 47;
+ quant[1] = 47;
+ quant[2] = 47;
+ quant[3] = 1;
+ do_dither = TRUE;
+ } else {
+ for (i = 0; i < GST_VIDEO_MAX_COMPONENTS; i++) {
+ gint depth;
+
+ depth = convert->out_info.finfo->depth[i];
+
+ if (depth == 0) {
+ quant[i] = 0;
+ continue;
+ }
+
+ if (convert->current_bits >= depth) {
+ quant[i] = 1 << (convert->current_bits - depth);
+ if (target_quant > quant[i]) {
+ flags |= GST_VIDEO_DITHER_FLAG_QUANTIZE;
+ quant[i] = target_quant;
+ }
+ } else {
+ quant[i] = 0;
+ }
+ if (quant[i] > 1)
+ do_dither = TRUE;
+ }
+ }
+
+ if (do_dither) {
+ GST_DEBUG ("chain dither");
+
+ convert->dither[idx] = gst_video_dither_new (method,
+ flags, convert->pack_format, quant, convert->current_width);
+
+ prev = convert->dither_lines[idx] = gst_line_cache_new (prev);
+ prev->write_input = TRUE;
+ prev->pass_alloc = TRUE;
+ prev->n_lines = 1;
+ prev->stride = convert->current_pstride * convert->current_width;
+ gst_line_cache_set_need_line_func (prev, do_dither_lines, idx, convert,
+ NULL);
+ }
+ return prev;
+ }
+
+ static GstLineCache *
+ chain_pack (GstVideoConverter * convert, GstLineCache * prev, gint idx)
+ {
+ convert->pack_nlines = convert->out_info.finfo->pack_lines;
+ convert->pack_pstride = convert->current_pstride;
+ convert->identity_pack =
+ (convert->out_info.finfo->format ==
+ convert->out_info.finfo->unpack_format);
+ GST_DEBUG ("chain pack line format %s, pstride %d, identity_pack %d (%d %d)",
+ gst_video_format_to_string (convert->current_format),
+ convert->current_pstride, convert->identity_pack,
+ convert->out_info.finfo->format, convert->out_info.finfo->unpack_format);
+
+ return prev;
+ }
+
+ static void
+ setup_allocators (GstVideoConverter * convert)
+ {
+ GstLineCache *cache, *prev;
+ GstLineCacheAllocLineFunc alloc_line;
+ gboolean alloc_writable;
+ gpointer user_data;
+ GDestroyNotify notify;
+ gint width;
+ gint i;
+
+ width = MAX (convert->in_maxwidth, convert->out_maxwidth);
+ width += convert->out_x;
+
+ for (i = 0; i < convert->conversion_runner->n_threads; i++) {
+ /* start with using dest lines if we can directly write into it */
+ if (convert->identity_pack) {
+ alloc_line = get_dest_line;
+ alloc_writable = TRUE;
+ user_data = convert;
+ notify = NULL;
+ } else {
+ user_data =
+ converter_alloc_new (sizeof (guint16) * width * 4, 4 + BACKLOG,
+ convert, NULL);
+ setup_border_alloc (convert, user_data);
+ notify = (GDestroyNotify) converter_alloc_free;
+ alloc_line = get_border_temp_line;
+ /* when we add a border, we need to write */
+ alloc_writable = convert->borderline != NULL;
+ }
+
+ /* First step, try to calculate how many temp lines we need. Go backwards,
+ * keep track of the maximum number of lines we need for each intermediate
+ * step. */
+ for (prev = cache = convert->pack_lines[i]; cache; cache = cache->prev) {
+ GST_DEBUG ("looking at cache %p, %d lines, %d backlog", cache,
+ cache->n_lines, cache->backlog);
+ prev->n_lines = MAX (prev->n_lines, cache->n_lines);
+ if (!cache->pass_alloc) {
+ GST_DEBUG ("cache %p, needs %d lines", prev, prev->n_lines);
+ prev = cache;
+ }
+ }
+
+ /* now walk backwards, we try to write into the dest lines directly
+ * and keep track if the source needs to be writable */
+ for (cache = convert->pack_lines[i]; cache; cache = cache->prev) {
+ gst_line_cache_set_alloc_line_func (cache, alloc_line, user_data, notify);
+ cache->alloc_writable = alloc_writable;
+
+ /* make sure only one cache frees the allocator */
+ notify = NULL;
+
+ if (!cache->pass_alloc) {
+ /* can't pass allocator, make new temp line allocator */
+ user_data =
+ converter_alloc_new (sizeof (guint16) * width * 4,
+ cache->n_lines + cache->backlog, convert, NULL);
+ notify = (GDestroyNotify) converter_alloc_free;
+ alloc_line = get_temp_line;
+ alloc_writable = FALSE;
+ }
+ /* if someone writes to the input, we need a writable line from the
+ * previous cache */
+ if (cache->write_input)
+ alloc_writable = TRUE;
+ }
+ /* free leftover allocator */
+ if (notify)
+ notify (user_data);
+ }
+ }
+
+ static void
+ setup_borderline (GstVideoConverter * convert)
+ {
+ gint width;
+
+ width = MAX (convert->in_maxwidth, convert->out_maxwidth);
+ width += convert->out_x;
+
+ if (convert->fill_border && (convert->out_height < convert->out_maxheight ||
+ convert->out_width < convert->out_maxwidth)) {
+ guint32 border_val;
+ gint i, w_sub;
+ const GstVideoFormatInfo *out_finfo;
+ gpointer planes[GST_VIDEO_MAX_PLANES];
+ gint strides[GST_VIDEO_MAX_PLANES];
+
+ convert->borderline = g_malloc0 (sizeof (guint16) * width * 4);
+
+ out_finfo = convert->out_info.finfo;
+
+ if (GST_VIDEO_INFO_IS_YUV (&convert->out_info)) {
+ MatrixData cm;
+ gint a, r, g, b;
+ gint y, u, v;
+
+ /* Get Color matrix. */
+ color_matrix_set_identity (&cm);
+ compute_matrix_to_YUV (convert, &cm, TRUE);
+ color_matrix_convert (&cm);
+
+ border_val = GINT32_FROM_BE (convert->border_argb);
+
+ b = (0xFF000000 & border_val) >> 24;
+ g = (0x00FF0000 & border_val) >> 16;
+ r = (0x0000FF00 & border_val) >> 8;
+ a = (0x000000FF & border_val);
+
+ y = 16 + ((r * cm.im[0][0] + g * cm.im[0][1] + b * cm.im[0][2]) >> 8);
+ u = 128 + ((r * cm.im[1][0] + g * cm.im[1][1] + b * cm.im[1][2]) >> 8);
+ v = 128 + ((r * cm.im[2][0] + g * cm.im[2][1] + b * cm.im[2][2]) >> 8);
+
+ a = CLAMP (a, 0, 255);
+ y = CLAMP (y, 0, 255);
+ u = CLAMP (u, 0, 255);
+ v = CLAMP (v, 0, 255);
+
+ border_val = a | (y << 8) | (u << 16) | ((guint32) v << 24);
+ } else {
+ border_val = GINT32_FROM_BE (convert->border_argb);
+ }
+ if (convert->pack_bits == 8)
+ video_orc_splat_u32 (convert->borderline, border_val, width);
+ else
+ video_orc_splat2_u64 (convert->borderline, border_val, width);
+
+ /* convert pixels */
+ for (i = 0; i < out_finfo->n_planes; i++) {
+ planes[i] = &convert->borders[i];
+ strides[i] = sizeof (guint64);
+ }
+ w_sub = 0;
+ if (out_finfo->n_planes == 1) {
+ /* for packed formats, convert based on subsampling so that we
+ * get a complete group of pixels */
+ for (i = 0; i < out_finfo->n_components; i++) {
+ w_sub = MAX (w_sub, out_finfo->w_sub[i]);
+ }
+ }
+ out_finfo->pack_func (out_finfo, GST_VIDEO_PACK_FLAG_NONE,
+ convert->borderline, 0, planes, strides,
+ GST_VIDEO_CHROMA_SITE_UNKNOWN, 0, 1 << w_sub);
+ } else {
+ convert->borderline = NULL;
+ }
+ }
+
+ static AlphaMode
+ convert_get_alpha_mode (GstVideoConverter * convert)
+ {
+ gboolean in_alpha, out_alpha;
+
+ in_alpha = GST_VIDEO_INFO_HAS_ALPHA (&convert->in_info);
+ out_alpha = GST_VIDEO_INFO_HAS_ALPHA (&convert->out_info);
+
+ /* no output alpha, do nothing */
+ if (!out_alpha)
+ return ALPHA_MODE_NONE;
+
+ if (in_alpha) {
+ /* in and out */
+ if (CHECK_ALPHA_COPY (convert))
+ return ALPHA_MODE_COPY;
+
+ if (CHECK_ALPHA_MULT (convert)) {
+ if (GET_OPT_ALPHA_VALUE (convert) == 1.0)
+ return ALPHA_MODE_COPY;
+ else
+ return ALPHA_MODE_MULT;
+ }
+ }
+ /* nothing special, this is what unpack etc does automatically */
+ if (GET_OPT_ALPHA_VALUE (convert) == 1.0)
+ return ALPHA_MODE_NONE;
+
+ /* everything else becomes SET */
+ return ALPHA_MODE_SET;
+ }
+
+ /**
+ * gst_video_converter_new_with_pool: (skip)
+ * @in_info: a #GstVideoInfo
+ * @out_info: a #GstVideoInfo
+ * @config: (transfer full): a #GstStructure with configuration options
+ * @pool: (nullable): a #GstTaskPool to spawn threads from
+ *
+ * Create a new converter object to convert between @in_info and @out_info
+ * with @config.
+ *
+ * The optional @pool can be used to spawn threads, this is useful when
+ * creating new converters rapidly, for example when updating cropping.
+ *
+ * Returns: a #GstVideoConverter or %NULL if conversion is not possible.
+ *
+ * Since: 1.20
+ */
+ GstVideoConverter *
+ gst_video_converter_new_with_pool (const GstVideoInfo * in_info,
+ const GstVideoInfo * out_info, GstStructure * config, GstTaskPool * pool)
+ {
+ GstVideoConverter *convert;
+ GstLineCache *prev;
+ const GstVideoFormatInfo *fin, *fout, *finfo;
+ gdouble alpha_value;
+ gint n_threads, i;
+ gboolean async_tasks;
+
+ g_return_val_if_fail (in_info != NULL, NULL);
+ g_return_val_if_fail (out_info != NULL, NULL);
+ /* we won't ever do framerate conversion */
+ g_return_val_if_fail (in_info->fps_n == out_info->fps_n, NULL);
+ g_return_val_if_fail (in_info->fps_d == out_info->fps_d, NULL);
+ /* we won't ever do deinterlace */
+ g_return_val_if_fail (in_info->interlace_mode == out_info->interlace_mode,
+ NULL);
+
+ convert = g_slice_new0 (GstVideoConverter);
+
+ fin = in_info->finfo;
+ fout = out_info->finfo;
+
+ convert->in_info = *in_info;
+ convert->out_info = *out_info;
+
+ /* default config */
+ convert->config = gst_structure_new_empty ("GstVideoConverter");
+ if (config)
+ gst_video_converter_set_config (convert, config);
+
+ convert->in_maxwidth = GST_VIDEO_INFO_WIDTH (in_info);
+ convert->in_maxheight = GST_VIDEO_INFO_FIELD_HEIGHT (in_info);
+ convert->out_maxwidth = GST_VIDEO_INFO_WIDTH (out_info);
+ convert->out_maxheight = GST_VIDEO_INFO_FIELD_HEIGHT (out_info);
+
+ convert->in_x = get_opt_int (convert, GST_VIDEO_CONVERTER_OPT_SRC_X, 0);
+ convert->in_y = get_opt_int (convert, GST_VIDEO_CONVERTER_OPT_SRC_Y, 0);
+ convert->in_x &= ~((1 << fin->w_sub[1]) - 1);
+ convert->in_y &= ~((1 << fin->h_sub[1]) - 1);
+
+ convert->in_width = get_opt_int (convert,
+ GST_VIDEO_CONVERTER_OPT_SRC_WIDTH, convert->in_maxwidth - convert->in_x);
+ convert->in_height = get_opt_int (convert,
+ GST_VIDEO_CONVERTER_OPT_SRC_HEIGHT,
+ convert->in_maxheight - convert->in_y);
+
+ convert->in_width =
+ MIN (convert->in_width, convert->in_maxwidth - convert->in_x);
+ if (convert->in_width + convert->in_x < 0 ||
+ convert->in_width + convert->in_x > convert->in_maxwidth) {
+ convert->in_width = 0;
+ }
+
+ convert->in_height =
+ MIN (convert->in_height, convert->in_maxheight - convert->in_y);
+ if (convert->in_height + convert->in_y < 0 ||
+ convert->in_height + convert->in_y > convert->in_maxheight) {
+ convert->in_height = 0;
+ }
+
+ convert->out_x = get_opt_int (convert, GST_VIDEO_CONVERTER_OPT_DEST_X, 0);
+ convert->out_y = get_opt_int (convert, GST_VIDEO_CONVERTER_OPT_DEST_Y, 0);
+ convert->out_x &= ~((1 << fout->w_sub[1]) - 1);
+ convert->out_y &= ~((1 << fout->h_sub[1]) - 1);
+
+ convert->out_width = get_opt_int (convert,
+ GST_VIDEO_CONVERTER_OPT_DEST_WIDTH,
+ convert->out_maxwidth - convert->out_x);
+ convert->out_height =
+ get_opt_int (convert, GST_VIDEO_CONVERTER_OPT_DEST_HEIGHT,
+ convert->out_maxheight - convert->out_y);
+
+ if (convert->out_width > convert->out_maxwidth - convert->out_x)
+ convert->out_width = convert->out_maxwidth - convert->out_x;
+ convert->out_width = CLAMP (convert->out_width, 0, convert->out_maxwidth);
+
+ /* Check if completely outside the framebuffer */
+ if (convert->out_width + convert->out_x < 0 ||
+ convert->out_width + convert->out_x > convert->out_maxwidth) {
+ convert->out_width = 0;
+ }
+
+ /* Same for height */
+ if (convert->out_height > convert->out_maxheight - convert->out_y)
+ convert->out_height = convert->out_maxheight - convert->out_y;
+ convert->out_height = CLAMP (convert->out_height, 0, convert->out_maxheight);
+
+ if (convert->out_height + convert->out_y < 0 ||
+ convert->out_height + convert->out_y > convert->out_maxheight) {
+ convert->out_height = 0;
+ }
+
+ convert->fill_border = GET_OPT_FILL_BORDER (convert);
+ convert->border_argb = get_opt_uint (convert,
+ GST_VIDEO_CONVERTER_OPT_BORDER_ARGB, DEFAULT_OPT_BORDER_ARGB);
+
+ alpha_value = GET_OPT_ALPHA_VALUE (convert);
+ convert->alpha_value = 255 * alpha_value;
+ convert->alpha_mode = convert_get_alpha_mode (convert);
+
+ convert->unpack_format = in_info->finfo->unpack_format;
+ finfo = gst_video_format_get_info (convert->unpack_format);
+ convert->unpack_bits = GST_VIDEO_FORMAT_INFO_DEPTH (finfo, 0);
+ convert->unpack_rgb = GST_VIDEO_FORMAT_INFO_IS_RGB (finfo);
+ if (convert->unpack_rgb
+ && in_info->colorimetry.matrix != GST_VIDEO_COLOR_MATRIX_RGB) {
+ /* force identity matrix for RGB input */
+ GST_WARNING ("invalid matrix %d for input RGB format, using RGB",
+ in_info->colorimetry.matrix);
+ convert->in_info.colorimetry.matrix = GST_VIDEO_COLOR_MATRIX_RGB;
+ }
+
+ convert->pack_format = out_info->finfo->unpack_format;
+ finfo = gst_video_format_get_info (convert->pack_format);
+ convert->pack_bits = GST_VIDEO_FORMAT_INFO_DEPTH (finfo, 0);
+ convert->pack_rgb = GST_VIDEO_FORMAT_INFO_IS_RGB (finfo);
+ convert->pack_pal =
+ gst_video_format_get_palette (GST_VIDEO_INFO_FORMAT (out_info),
+ &convert->pack_palsize);
+ if (convert->pack_rgb
+ && out_info->colorimetry.matrix != GST_VIDEO_COLOR_MATRIX_RGB) {
+ /* force identity matrix for RGB output */
+ GST_WARNING ("invalid matrix %d for output RGB format, using RGB",
+ out_info->colorimetry.matrix);
+ convert->out_info.colorimetry.matrix = GST_VIDEO_COLOR_MATRIX_RGB;
+ }
+
+ n_threads = get_opt_uint (convert, GST_VIDEO_CONVERTER_OPT_THREADS, 1);
+ if (n_threads == 0 || n_threads > g_get_num_processors ())
+ n_threads = g_get_num_processors ();
+ /* Magic number of 200 lines */
+ if (MAX (convert->out_height, convert->in_height) / n_threads < 200)
+ n_threads = (MAX (convert->out_height, convert->in_height) + 199) / 200;
+ if (n_threads < 1)
+ n_threads = 1;
+
+ async_tasks = GET_OPT_ASYNC_TASKS (convert);
+ convert->conversion_runner =
+ gst_parallelized_task_runner_new (n_threads, pool, async_tasks);
+
+ if (video_converter_lookup_fastpath (convert))
+ goto done;
+
+ if (in_info->finfo->unpack_func == NULL)
+ goto no_unpack_func;
+
+ if (out_info->finfo->pack_func == NULL)
+ goto no_pack_func;
+
+ convert->convert = video_converter_generic;
+
+ convert->upsample_p = g_new0 (GstVideoChromaResample *, n_threads);
+ convert->upsample_i = g_new0 (GstVideoChromaResample *, n_threads);
+ convert->downsample_p = g_new0 (GstVideoChromaResample *, n_threads);
+ convert->downsample_i = g_new0 (GstVideoChromaResample *, n_threads);
+ convert->v_scaler_p = g_new0 (GstVideoScaler *, n_threads);
+ convert->v_scaler_i = g_new0 (GstVideoScaler *, n_threads);
+ convert->h_scaler = g_new0 (GstVideoScaler *, n_threads);
+ convert->unpack_lines = g_new0 (GstLineCache *, n_threads);
+ convert->pack_lines = g_new0 (GstLineCache *, n_threads);
+ convert->upsample_lines = g_new0 (GstLineCache *, n_threads);
+ convert->to_RGB_lines = g_new0 (GstLineCache *, n_threads);
+ convert->hscale_lines = g_new0 (GstLineCache *, n_threads);
+ convert->vscale_lines = g_new0 (GstLineCache *, n_threads);
+ convert->convert_lines = g_new0 (GstLineCache *, n_threads);
+ convert->alpha_lines = g_new0 (GstLineCache *, n_threads);
+ convert->to_YUV_lines = g_new0 (GstLineCache *, n_threads);
+ convert->downsample_lines = g_new0 (GstLineCache *, n_threads);
+ convert->dither_lines = g_new0 (GstLineCache *, n_threads);
+ convert->dither = g_new0 (GstVideoDither *, n_threads);
+
+ if (convert->in_width > 0 && convert->out_width > 0 && convert->in_height > 0
+ && convert->out_height > 0) {
+ for (i = 0; i < n_threads; i++) {
+ convert->current_format = GST_VIDEO_INFO_FORMAT (in_info);
+ convert->current_width = convert->in_width;
+ convert->current_height = convert->in_height;
+
+ /* unpack */
+ prev = chain_unpack_line (convert, i);
+ /* upsample chroma */
+ prev = chain_upsample (convert, prev, i);
+ /* convert to gamma decoded RGB */
+ prev = chain_convert_to_RGB (convert, prev, i);
+ /* do all downscaling */
+ prev = chain_scale (convert, prev, FALSE, i);
+ /* do conversion between color spaces */
+ prev = chain_convert (convert, prev, i);
+ /* do alpha channels */
+ prev = chain_alpha (convert, prev, i);
+ /* do all remaining (up)scaling */
+ prev = chain_scale (convert, prev, TRUE, i);
+ /* convert to gamma encoded Y'Cb'Cr' */
+ prev = chain_convert_to_YUV (convert, prev, i);
+ /* downsample chroma */
+ prev = chain_downsample (convert, prev, i);
+ /* dither */
+ prev = chain_dither (convert, prev, i);
+ /* pack into final format */
+ convert->pack_lines[i] = chain_pack (convert, prev, i);
+ }
+ }
+
+ setup_borderline (convert);
+ /* now figure out allocators */
+ setup_allocators (convert);
+
+ done:
+ return convert;
+
+ /* ERRORS */
+ no_unpack_func:
+ {
+ GST_ERROR ("no unpack_func for format %s",
+ gst_video_format_to_string (GST_VIDEO_INFO_FORMAT (in_info)));
+ gst_video_converter_free (convert);
+ return NULL;
+ }
+ no_pack_func:
+ {
+ GST_ERROR ("no pack_func for format %s",
+ gst_video_format_to_string (GST_VIDEO_INFO_FORMAT (out_info)));
+ gst_video_converter_free (convert);
+ return NULL;
+ }
+ }
+
+ /**
+ * gst_video_converter_new: (skip)
+ * @in_info: a #GstVideoInfo
+ * @out_info: a #GstVideoInfo
+ * @config: (transfer full): a #GstStructure with configuration options
+ *
+ * Create a new converter object to convert between @in_info and @out_info
+ * with @config.
+ *
+ * Returns: a #GstVideoConverter or %NULL if conversion is not possible.
+ *
+ * Since: 1.6
+ */
+ GstVideoConverter *
+ gst_video_converter_new (const GstVideoInfo * in_info,
+ const GstVideoInfo * out_info, GstStructure * config)
+ {
+ return gst_video_converter_new_with_pool (in_info, out_info, config, NULL);
+ }
+
+ static void
+ clear_matrix_data (MatrixData * data)
+ {
+ g_free (data->t_r);
+ g_free (data->t_g);
+ g_free (data->t_b);
+ }
+
+ /**
+ * gst_video_converter_free:
+ * @convert: a #GstVideoConverter
+ *
+ * Free @convert
+ *
+ * Since: 1.6
+ */
+ void
+ gst_video_converter_free (GstVideoConverter * convert)
+ {
+ guint i, j;
+
+ g_return_if_fail (convert != NULL);
+
+ for (i = 0; i < convert->conversion_runner->n_threads; i++) {
+ if (convert->upsample_p && convert->upsample_p[i])
+ gst_video_chroma_resample_free (convert->upsample_p[i]);
+ if (convert->upsample_i && convert->upsample_i[i])
+ gst_video_chroma_resample_free (convert->upsample_i[i]);
+ if (convert->downsample_p && convert->downsample_p[i])
+ gst_video_chroma_resample_free (convert->downsample_p[i]);
+ if (convert->downsample_i && convert->downsample_i[i])
+ gst_video_chroma_resample_free (convert->downsample_i[i]);
+ if (convert->v_scaler_p && convert->v_scaler_p[i])
+ gst_video_scaler_free (convert->v_scaler_p[i]);
+ if (convert->v_scaler_i && convert->v_scaler_i[i])
+ gst_video_scaler_free (convert->v_scaler_i[i]);
+ if (convert->h_scaler && convert->h_scaler[i])
+ gst_video_scaler_free (convert->h_scaler[i]);
+ if (convert->unpack_lines && convert->unpack_lines[i])
+ gst_line_cache_free (convert->unpack_lines[i]);
+ if (convert->upsample_lines && convert->upsample_lines[i])
+ gst_line_cache_free (convert->upsample_lines[i]);
+ if (convert->to_RGB_lines && convert->to_RGB_lines[i])
+ gst_line_cache_free (convert->to_RGB_lines[i]);
+ if (convert->hscale_lines && convert->hscale_lines[i])
+ gst_line_cache_free (convert->hscale_lines[i]);
+ if (convert->vscale_lines && convert->vscale_lines[i])
+ gst_line_cache_free (convert->vscale_lines[i]);
+ if (convert->convert_lines && convert->convert_lines[i])
+ gst_line_cache_free (convert->convert_lines[i]);
+ if (convert->alpha_lines && convert->alpha_lines[i])
+ gst_line_cache_free (convert->alpha_lines[i]);
+ if (convert->to_YUV_lines && convert->to_YUV_lines[i])
+ gst_line_cache_free (convert->to_YUV_lines[i]);
+ if (convert->downsample_lines && convert->downsample_lines[i])
+ gst_line_cache_free (convert->downsample_lines[i]);
+ if (convert->dither_lines && convert->dither_lines[i])
+ gst_line_cache_free (convert->dither_lines[i]);
+ if (convert->dither && convert->dither[i])
+ gst_video_dither_free (convert->dither[i]);
+ }
+ g_free (convert->upsample_p);
+ g_free (convert->upsample_i);
+ g_free (convert->downsample_p);
+ g_free (convert->downsample_i);
+ g_free (convert->v_scaler_p);
+ g_free (convert->v_scaler_i);
+ g_free (convert->h_scaler);
+ g_free (convert->unpack_lines);
+ g_free (convert->pack_lines);
+ g_free (convert->upsample_lines);
+ g_free (convert->to_RGB_lines);
+ g_free (convert->hscale_lines);
+ g_free (convert->vscale_lines);
+ g_free (convert->convert_lines);
+ g_free (convert->alpha_lines);
+ g_free (convert->to_YUV_lines);
+ g_free (convert->downsample_lines);
+ g_free (convert->dither_lines);
+ g_free (convert->dither);
+
+ g_free (convert->gamma_dec.gamma_table);
+ g_free (convert->gamma_enc.gamma_table);
+
+ if (convert->tmpline) {
+ for (i = 0; i < convert->conversion_runner->n_threads; i++)
+ g_free (convert->tmpline[i]);
+ g_free (convert->tmpline);
+ }
+
+ g_free (convert->borderline);
+
+ if (convert->config)
+ gst_structure_free (convert->config);
+
+ for (i = 0; i < 4; i++) {
+ for (j = 0; j < convert->conversion_runner->n_threads; j++) {
+ if (convert->fv_scaler[i].scaler)
+ gst_video_scaler_free (convert->fv_scaler[i].scaler[j]);
+ if (convert->fh_scaler[i].scaler)
+ gst_video_scaler_free (convert->fh_scaler[i].scaler[j]);
+ }
+ g_free (convert->fv_scaler[i].scaler);
+ g_free (convert->fh_scaler[i].scaler);
+ }
+
+ if (convert->conversion_runner)
+ gst_parallelized_task_runner_free (convert->conversion_runner);
+
+ clear_matrix_data (&convert->to_RGB_matrix);
+ clear_matrix_data (&convert->convert_matrix);
+ clear_matrix_data (&convert->to_YUV_matrix);
+
+ for (i = 0; i < 4; i++) {
+ g_free (convert->tasks[i]);
+ g_free (convert->tasks_p[i]);
+ }
+
+ g_slice_free (GstVideoConverter, convert);
+ }
+
+ static gboolean
+ copy_config (GQuark field_id, const GValue * value, gpointer user_data)
+ {
+ GstVideoConverter *convert = user_data;
+
+ gst_structure_id_set_value (convert->config, field_id, value);
+
+ return TRUE;
+ }
+
+ /**
+ * gst_video_converter_set_config:
+ * @convert: a #GstVideoConverter
+ * @config: (transfer full): a #GstStructure
+ *
+ * Set @config as extra configuration for @convert.
+ *
+ * If the parameters in @config can not be set exactly, this function returns
+ * %FALSE and will try to update as much state as possible. The new state can
+ * then be retrieved and refined with gst_video_converter_get_config().
+ *
+ * Look at the `GST_VIDEO_CONVERTER_OPT_*` fields to check valid configuration
+ * option and values.
+ *
+ * Returns: %TRUE when @config could be set.
+ *
+ * Since: 1.6
+ */
+ gboolean
+ gst_video_converter_set_config (GstVideoConverter * convert,
+ GstStructure * config)
+ {
+ g_return_val_if_fail (convert != NULL, FALSE);
+ g_return_val_if_fail (config != NULL, FALSE);
+
+ gst_structure_foreach (config, copy_config, convert);
+ gst_structure_free (config);
+
+ return TRUE;
+ }
+
+ /**
+ * gst_video_converter_get_config:
+ * @convert: a #GstVideoConverter
+ *
+ * Get the current configuration of @convert.
+ *
+ * Returns: a #GstStructure that remains valid for as long as @convert is valid
+ * or until gst_video_converter_set_config() is called.
+ */
+ const GstStructure *
+ gst_video_converter_get_config (GstVideoConverter * convert)
+ {
+ g_return_val_if_fail (convert != NULL, NULL);
+
+ return convert->config;
+ }
+
+ /**
+ * gst_video_converter_frame:
+ * @convert: a #GstVideoConverter
+ * @dest: a #GstVideoFrame
+ * @src: a #GstVideoFrame
+ *
+ * Convert the pixels of @src into @dest using @convert.
+ *
+ * If #GST_VIDEO_CONVERTER_OPT_ASYNC_TASKS is %TRUE then this function will
+ * return immediately and needs to be followed by a call to
+ * gst_video_converter_frame_finish().
+ *
+ * Since: 1.6
+ */
+ void
+ gst_video_converter_frame (GstVideoConverter * convert,
+ const GstVideoFrame * src, GstVideoFrame * dest)
+ {
+ g_return_if_fail (convert != NULL);
+ g_return_if_fail (src != NULL);
+ g_return_if_fail (dest != NULL);
+
+ /* Check the frames we've been passed match the layout
+ * we were configured for or we might go out of bounds */
+ if (G_UNLIKELY (GST_VIDEO_INFO_FORMAT (&convert->in_info) !=
+ GST_VIDEO_FRAME_FORMAT (src)
+ || GST_VIDEO_INFO_WIDTH (&convert->in_info) >
+ GST_VIDEO_FRAME_WIDTH (src)
+ || GST_VIDEO_INFO_FIELD_HEIGHT (&convert->in_info) >
+ GST_VIDEO_FRAME_HEIGHT (src))) {
+ g_critical ("Input video frame does not match configuration");
+ return;
+ }
+ if (G_UNLIKELY (GST_VIDEO_INFO_FORMAT (&convert->out_info) !=
+ GST_VIDEO_FRAME_FORMAT (dest)
+ || GST_VIDEO_INFO_WIDTH (&convert->out_info) >
+ GST_VIDEO_FRAME_WIDTH (dest)
+ || GST_VIDEO_INFO_FIELD_HEIGHT (&convert->out_info) >
+ GST_VIDEO_FRAME_HEIGHT (dest))) {
+ g_critical ("Output video frame does not match configuration");
+ return;
+ }
+
+ if (G_UNLIKELY (convert->in_width == 0 || convert->in_height == 0 ||
+ convert->out_width == 0 || convert->out_height == 0))
+ return;
+
+ convert->convert (convert, src, dest);
+ }
+
+ /**
+ * gst_video_converter_frame_finish:
+ * @convert: a #GstVideoConverter
+ *
+ * Wait for a previous async conversion performed using
+ * gst_video_converter_frame() to complete.
+ *
+ * Since: 1.20
+ */
+ void
+ gst_video_converter_frame_finish (GstVideoConverter * convert)
+ {
+ g_return_if_fail (convert);
+ g_return_if_fail (convert->conversion_runner);
+ g_return_if_fail (convert->conversion_runner->async_tasks);
+
+ gst_parallelized_task_runner_finish (convert->conversion_runner);
+ }
+
+ static void
+ video_converter_compute_matrix (GstVideoConverter * convert)
+ {
+ MatrixData *dst = &convert->convert_matrix;
+
+ color_matrix_set_identity (dst);
+ compute_matrix_to_RGB (convert, dst);
+ compute_matrix_to_YUV (convert, dst, FALSE);
+
+ convert->current_bits = 8;
+ prepare_matrix (convert, dst);
+ }
+
+ static void
+ video_converter_compute_resample (GstVideoConverter * convert, gint idx)
+ {
+ GstVideoInfo *in_info, *out_info;
+ const GstVideoFormatInfo *sfinfo, *dfinfo;
+
+ if (CHECK_CHROMA_NONE (convert))
+ return;
+
+ in_info = &convert->in_info;
+ out_info = &convert->out_info;
+
+ sfinfo = in_info->finfo;
+ dfinfo = out_info->finfo;
+
+ GST_DEBUG ("site: %d->%d, w_sub: %d->%d, h_sub: %d->%d", in_info->chroma_site,
+ out_info->chroma_site, sfinfo->w_sub[2], dfinfo->w_sub[2],
+ sfinfo->h_sub[2], dfinfo->h_sub[2]);
+
+ if (sfinfo->w_sub[2] != dfinfo->w_sub[2] ||
+ sfinfo->h_sub[2] != dfinfo->h_sub[2] ||
+ in_info->chroma_site != out_info->chroma_site ||
+ in_info->width != out_info->width ||
+ in_info->height != out_info->height) {
+ if (GST_VIDEO_INFO_IS_INTERLACED (in_info)
+ && GST_VIDEO_INFO_INTERLACE_MODE (in_info) !=
+ GST_VIDEO_INTERLACE_MODE_ALTERNATE) {
+ if (!CHECK_CHROMA_DOWNSAMPLE (convert))
+ convert->upsample_i[idx] = gst_video_chroma_resample_new (0,
+ in_info->chroma_site, GST_VIDEO_CHROMA_FLAG_INTERLACED,
+ sfinfo->unpack_format, sfinfo->w_sub[2], sfinfo->h_sub[2]);
+ if (!CHECK_CHROMA_UPSAMPLE (convert))
+ convert->downsample_i[idx] =
+ gst_video_chroma_resample_new (0, out_info->chroma_site,
+ GST_VIDEO_CHROMA_FLAG_INTERLACED, dfinfo->unpack_format,
+ -dfinfo->w_sub[2], -dfinfo->h_sub[2]);
+ }
+ if (!CHECK_CHROMA_DOWNSAMPLE (convert))
+ convert->upsample_p[idx] = gst_video_chroma_resample_new (0,
+ in_info->chroma_site, 0, sfinfo->unpack_format, sfinfo->w_sub[2],
+ sfinfo->h_sub[2]);
+ if (!CHECK_CHROMA_UPSAMPLE (convert))
+ convert->downsample_p[idx] = gst_video_chroma_resample_new (0,
+ out_info->chroma_site, 0, dfinfo->unpack_format, -dfinfo->w_sub[2],
+ -dfinfo->h_sub[2]);
+ }
+ }
+
+ #define FRAME_GET_PLANE_STRIDE(frame, plane) \
+ GST_VIDEO_FRAME_PLANE_STRIDE (frame, plane)
+ #define FRAME_GET_PLANE_LINE(frame, plane, line) \
+ (gpointer)(((guint8*)(GST_VIDEO_FRAME_PLANE_DATA (frame, plane))) + \
+ FRAME_GET_PLANE_STRIDE (frame, plane) * (line))
+
+ #define FRAME_GET_COMP_STRIDE(frame, comp) \
+ GST_VIDEO_FRAME_COMP_STRIDE (frame, comp)
+ #define FRAME_GET_COMP_LINE(frame, comp, line) \
+ (gpointer)(((guint8*)(GST_VIDEO_FRAME_COMP_DATA (frame, comp))) + \
+ FRAME_GET_COMP_STRIDE (frame, comp) * (line))
+
+ #define FRAME_GET_STRIDE(frame) FRAME_GET_PLANE_STRIDE (frame, 0)
+ #define FRAME_GET_LINE(frame,line) FRAME_GET_PLANE_LINE (frame, 0, line)
+
+ #define FRAME_GET_Y_LINE(frame,line) FRAME_GET_COMP_LINE(frame, GST_VIDEO_COMP_Y, line)
+ #define FRAME_GET_U_LINE(frame,line) FRAME_GET_COMP_LINE(frame, GST_VIDEO_COMP_U, line)
+ #define FRAME_GET_V_LINE(frame,line) FRAME_GET_COMP_LINE(frame, GST_VIDEO_COMP_V, line)
+ #define FRAME_GET_A_LINE(frame,line) FRAME_GET_COMP_LINE(frame, GST_VIDEO_COMP_A, line)
+
+ #define FRAME_GET_Y_STRIDE(frame) FRAME_GET_COMP_STRIDE(frame, GST_VIDEO_COMP_Y)
+ #define FRAME_GET_U_STRIDE(frame) FRAME_GET_COMP_STRIDE(frame, GST_VIDEO_COMP_U)
+ #define FRAME_GET_V_STRIDE(frame) FRAME_GET_COMP_STRIDE(frame, GST_VIDEO_COMP_V)
+ #define FRAME_GET_A_STRIDE(frame) FRAME_GET_COMP_STRIDE(frame, GST_VIDEO_COMP_A)
+
+
+ #define UNPACK_FRAME(frame,dest,line,x,width) \
+ frame->info.finfo->unpack_func (frame->info.finfo, \
+ (GST_VIDEO_FRAME_IS_INTERLACED (frame) ? \
+ GST_VIDEO_PACK_FLAG_INTERLACED : \
+ GST_VIDEO_PACK_FLAG_NONE), \
+ dest, frame->data, frame->info.stride, x, \
+ line, width)
+ #define PACK_FRAME(frame,src,line,width) \
+ frame->info.finfo->pack_func (frame->info.finfo, \
+ (GST_VIDEO_FRAME_IS_INTERLACED (frame) ? \
+ GST_VIDEO_PACK_FLAG_INTERLACED : \
+ GST_VIDEO_PACK_FLAG_NONE), \
+ src, 0, frame->data, frame->info.stride, \
+ frame->info.chroma_site, line, width);
+
+ static gpointer
+ get_dest_line (GstLineCache * cache, gint idx, gpointer user_data)
+ {
+ GstVideoConverter *convert = user_data;
+ guint8 *line;
+ gint pstride = convert->pack_pstride;
+ gint out_x = convert->out_x;
+ guint cline;
+
+ cline = CLAMP (idx, 0, convert->out_maxheight - 1);
+
+ line = FRAME_GET_LINE (convert->dest, cline);
+ GST_DEBUG ("get dest line %d %p", cline, line);
+
+ if (convert->borderline) {
+ gint r_border = (out_x + convert->out_width) * pstride;
+ gint rb_width = convert->out_maxwidth * pstride - r_border;
+ gint lb_width = out_x * pstride;
+
+ memcpy (line, convert->borderline, lb_width);
+ memcpy (line + r_border, convert->borderline, rb_width);
+ }
+ line += out_x * pstride;
+
+ return line;
+ }
+
+ static gboolean
+ do_unpack_lines (GstLineCache * cache, gint idx, gint out_line, gint in_line,
+ gpointer user_data)
+ {
+ GstVideoConverter *convert = user_data;
+ gpointer tmpline;
+ guint cline;
+
+ cline = CLAMP (in_line + convert->in_y, 0, convert->in_maxheight - 1);
+
+ if (cache->alloc_writable || !convert->identity_unpack) {
+ tmpline = gst_line_cache_alloc_line (cache, out_line);
+ GST_DEBUG ("unpack line %d (%u) %p", in_line, cline, tmpline);
+ UNPACK_FRAME (convert->src, tmpline, cline, convert->in_x,
+ convert->in_width);
+ } else {
+ tmpline = ((guint8 *) FRAME_GET_LINE (convert->src, cline)) +
+ convert->in_x * convert->unpack_pstride;
+ GST_DEBUG ("get src line %d (%u) %p", in_line, cline, tmpline);
+ }
+ gst_line_cache_add_line (cache, in_line, tmpline);
+
+ return TRUE;
+ }
+
+ static gboolean
+ do_upsample_lines (GstLineCache * cache, gint idx, gint out_line, gint in_line,
+ gpointer user_data)
+ {
+ GstVideoConverter *convert = user_data;
+ gpointer *lines;
+ gint i, start_line, n_lines;
+
+ n_lines = convert->up_n_lines;
+ start_line = in_line;
+ if (start_line < n_lines + convert->up_offset) {
+ start_line += convert->up_offset;
+ out_line += convert->up_offset;
+ }
+
+ /* get the lines needed for chroma upsample */
+ lines =
+ gst_line_cache_get_lines (cache->prev, idx, out_line, start_line,
+ n_lines);
+
+ if (convert->upsample[idx]) {
+ GST_DEBUG ("doing upsample %d-%d %p", start_line, start_line + n_lines - 1,
+ lines[0]);
+ gst_video_chroma_resample (convert->upsample[idx], lines,
+ convert->in_width);
+ }
+
+ for (i = 0; i < n_lines; i++)
+ gst_line_cache_add_line (cache, start_line + i, lines[i]);
+
+ return TRUE;
+ }
+
+ static gboolean
+ do_convert_to_RGB_lines (GstLineCache * cache, gint idx, gint out_line,
+ gint in_line, gpointer user_data)
+ {
+ GstVideoConverter *convert = user_data;
+ MatrixData *data = &convert->to_RGB_matrix;
+ gpointer *lines, destline;
+
+ lines = gst_line_cache_get_lines (cache->prev, idx, out_line, in_line, 1);
+ destline = lines[0];
+
+ if (data->matrix_func) {
+ GST_DEBUG ("to RGB line %d %p", in_line, destline);
+ data->matrix_func (data, destline);
+ }
+ if (convert->gamma_dec.gamma_func) {
+ destline = gst_line_cache_alloc_line (cache, out_line);
+
+ GST_DEBUG ("gamma decode line %d %p->%p", in_line, lines[0], destline);
+ convert->gamma_dec.gamma_func (&convert->gamma_dec, destline, lines[0]);
+ }
+ gst_line_cache_add_line (cache, in_line, destline);
+
+ return TRUE;
+ }
+
+ static gboolean
+ do_hscale_lines (GstLineCache * cache, gint idx, gint out_line, gint in_line,
+ gpointer user_data)
+ {
+ GstVideoConverter *convert = user_data;
+ gpointer *lines, destline;
+
+ lines = gst_line_cache_get_lines (cache->prev, idx, out_line, in_line, 1);
+
+ destline = gst_line_cache_alloc_line (cache, out_line);
+
+ GST_DEBUG ("hresample line %d %p->%p", in_line, lines[0], destline);
+ gst_video_scaler_horizontal (convert->h_scaler[idx], convert->h_scale_format,
+ lines[0], destline, 0, convert->out_width);
+
+ gst_line_cache_add_line (cache, in_line, destline);
+
+ return TRUE;
+ }
+
+ static gboolean
+ do_vscale_lines (GstLineCache * cache, gint idx, gint out_line, gint in_line,
+ gpointer user_data)
+ {
+ GstVideoConverter *convert = user_data;
+ gpointer *lines, destline;
+ guint sline, n_lines;
+ guint cline;
+
+ cline = CLAMP (in_line, 0, convert->out_height - 1);
+
+ gst_video_scaler_get_coeff (convert->v_scaler[idx], cline, &sline, &n_lines);
+ lines = gst_line_cache_get_lines (cache->prev, idx, out_line, sline, n_lines);
+
+ destline = gst_line_cache_alloc_line (cache, out_line);
+
+ GST_DEBUG ("vresample line %d %d-%d %p->%p", in_line, sline,
+ sline + n_lines - 1, lines[0], destline);
+ gst_video_scaler_vertical (convert->v_scaler[idx], convert->v_scale_format,
+ lines, destline, cline, convert->v_scale_width);
+
+ gst_line_cache_add_line (cache, in_line, destline);
+
+ return TRUE;
+ }
+
+ static gboolean
+ do_convert_lines (GstLineCache * cache, gint idx, gint out_line, gint in_line,
+ gpointer user_data)
+ {
+ GstVideoConverter *convert = user_data;
+ MatrixData *data = &convert->convert_matrix;
+ gpointer *lines, destline;
+ guint in_bits, out_bits;
+ gint width;
+
+ lines = gst_line_cache_get_lines (cache->prev, idx, out_line, in_line, 1);
+
+ destline = lines[0];
+
+ in_bits = convert->in_bits;
+ out_bits = convert->out_bits;
+
+ width = MIN (convert->in_width, convert->out_width);
+
+ if (out_bits == 16 || in_bits == 16) {
+ gpointer srcline = lines[0];
+
+ if (out_bits != in_bits)
+ destline = gst_line_cache_alloc_line (cache, out_line);
+
+ /* FIXME, we can scale in the conversion matrix */
+ if (in_bits == 8) {
+ GST_DEBUG ("8->16 line %d %p->%p", in_line, srcline, destline);
+ video_orc_convert_u8_to_u16 (destline, srcline, width * 4);
+ srcline = destline;
+ }
+
+ if (data->matrix_func) {
+ GST_DEBUG ("matrix line %d %p", in_line, srcline);
+ data->matrix_func (data, srcline);
+ }
+
+ /* FIXME, dither here */
+ if (out_bits == 8) {
+ GST_DEBUG ("16->8 line %d %p->%p", in_line, srcline, destline);
+ video_orc_convert_u16_to_u8 (destline, srcline, width * 4);
+ }
+ } else {
+ if (data->matrix_func) {
+ GST_DEBUG ("matrix line %d %p", in_line, destline);
+ data->matrix_func (data, destline);
+ }
+ }
+ gst_line_cache_add_line (cache, in_line, destline);
+
+ return TRUE;
+ }
+
+ static gboolean
+ do_alpha_lines (GstLineCache * cache, gint idx, gint out_line, gint in_line,
+ gpointer user_data)
+ {
+ gpointer *lines, destline;
+ GstVideoConverter *convert = user_data;
+ gint width = MIN (convert->in_width, convert->out_width);
+
+ lines = gst_line_cache_get_lines (cache->prev, idx, out_line, in_line, 1);
+ destline = lines[0];
+
+ GST_DEBUG ("alpha line %d %p", in_line, destline);
+ convert->alpha_func (convert, destline, width);
+
+ gst_line_cache_add_line (cache, in_line, destline);
+
+ return TRUE;
+ }
+
+ static gboolean
+ do_convert_to_YUV_lines (GstLineCache * cache, gint idx, gint out_line,
+ gint in_line, gpointer user_data)
+ {
+ GstVideoConverter *convert = user_data;
+ MatrixData *data = &convert->to_YUV_matrix;
+ gpointer *lines, destline;
+
+ lines = gst_line_cache_get_lines (cache->prev, idx, out_line, in_line, 1);
+ destline = lines[0];
+
+ if (convert->gamma_enc.gamma_func) {
+ destline = gst_line_cache_alloc_line (cache, out_line);
+
+ GST_DEBUG ("gamma encode line %d %p->%p", in_line, lines[0], destline);
+ convert->gamma_enc.gamma_func (&convert->gamma_enc, destline, lines[0]);
+ }
+ if (data->matrix_func) {
+ GST_DEBUG ("to YUV line %d %p", in_line, destline);
+ data->matrix_func (data, destline);
+ }
+ gst_line_cache_add_line (cache, in_line, destline);
+
+ return TRUE;
+ }
+
+ static gboolean
+ do_downsample_lines (GstLineCache * cache, gint idx, gint out_line,
+ gint in_line, gpointer user_data)
+ {
+ GstVideoConverter *convert = user_data;
+ gpointer *lines;
+ gint i, start_line, n_lines;
+
+ n_lines = convert->down_n_lines;
+ start_line = in_line;
+ if (start_line < n_lines + convert->down_offset)
+ start_line += convert->down_offset;
+
+ /* get the lines needed for chroma downsample */
+ lines =
+ gst_line_cache_get_lines (cache->prev, idx, out_line, start_line,
+ n_lines);
+
+ if (convert->downsample[idx]) {
+ GST_DEBUG ("downsample line %d %d-%d %p", in_line, start_line,
+ start_line + n_lines - 1, lines[0]);
+ gst_video_chroma_resample (convert->downsample[idx], lines,
+ convert->out_width);
+ }
+
+ for (i = 0; i < n_lines; i++)
+ gst_line_cache_add_line (cache, start_line + i, lines[i]);
+
+ return TRUE;
+ }
+
+ static gboolean
+ do_dither_lines (GstLineCache * cache, gint idx, gint out_line, gint in_line,
+ gpointer user_data)
+ {
+ GstVideoConverter *convert = user_data;
+ gpointer *lines, destline;
+
+ lines = gst_line_cache_get_lines (cache->prev, idx, out_line, in_line, 1);
+ destline = lines[0];
+
+ if (convert->dither[idx]) {
+ GST_DEBUG ("Dither line %d %p", in_line, destline);
+ gst_video_dither_line (convert->dither[idx], destline, 0, out_line,
+ convert->out_width);
+ }
+ gst_line_cache_add_line (cache, in_line, destline);
+
+ return TRUE;
+ }
+
+ typedef struct
+ {
+ GstLineCache *pack_lines;
+ gint idx;
+ gint h_0, h_1;
+ gint pack_lines_count;
+ gint out_y;
+ gboolean identity_pack;
+ gint lb_width, out_maxwidth;
+ GstVideoFrame *dest;
+ } ConvertTask;
+
+ static void
+ convert_generic_task (ConvertTask * task)
+ {
+ gint i;
+
+ for (i = task->h_0; i < task->h_1; i += task->pack_lines_count) {
+ gpointer *lines;
+
+ /* load the lines needed to pack */
+ lines =
+ gst_line_cache_get_lines (task->pack_lines, task->idx, i + task->out_y,
+ i, task->pack_lines_count);
+
+ if (!task->identity_pack) {
+ /* take away the border */
+ guint8 *l = ((guint8 *) lines[0]) - task->lb_width;
+ /* and pack into destination */
+ GST_DEBUG ("pack line %d %p (%p)", i + task->out_y, lines[0], l);
+ PACK_FRAME (task->dest, l, i + task->out_y, task->out_maxwidth);
+ }
+ }
+ }
+
+ static void
+ video_converter_generic (GstVideoConverter * convert, const GstVideoFrame * src,
+ GstVideoFrame * dest)
+ {
+ gint i;
+ gint out_maxwidth, out_maxheight;
+ gint out_x, out_y, out_height;
+ gint pack_lines, pstride;
+ gint lb_width;
+ ConvertTask *tasks;
+ ConvertTask **tasks_p;
+ gint n_threads;
+ gint lines_per_thread;
+
+ out_height = convert->out_height;
+ out_maxwidth = convert->out_maxwidth;
+ out_maxheight = convert->out_maxheight;
+
+ out_x = convert->out_x;
+ out_y = convert->out_y;
+
+ convert->src = src;
+ convert->dest = dest;
+
+ if (GST_VIDEO_FRAME_IS_INTERLACED (src)) {
+ GST_DEBUG ("setup interlaced frame");
+ convert->upsample = convert->upsample_i;
+ convert->downsample = convert->downsample_i;
+ convert->v_scaler = convert->v_scaler_i;
+ } else {
+ GST_DEBUG ("setup progressive frame");
+ convert->upsample = convert->upsample_p;
+ convert->downsample = convert->downsample_p;
+ convert->v_scaler = convert->v_scaler_p;
+ }
+ if (convert->upsample[0]) {
+ gst_video_chroma_resample_get_info (convert->upsample[0],
+ &convert->up_n_lines, &convert->up_offset);
+ } else {
+ convert->up_n_lines = 1;
+ convert->up_offset = 0;
+ }
+ if (convert->downsample[0]) {
+ gst_video_chroma_resample_get_info (convert->downsample[0],
+ &convert->down_n_lines, &convert->down_offset);
+ } else {
+ convert->down_n_lines = 1;
+ convert->down_offset = 0;
+ }
+
+ pack_lines = convert->pack_nlines; /* only 1 for now */
+ pstride = convert->pack_pstride;
+
+ lb_width = out_x * pstride;
+
+ if (convert->borderline) {
+ /* FIXME we should try to avoid PACK_FRAME */
+ for (i = 0; i < out_y; i++)
+ PACK_FRAME (dest, convert->borderline, i, out_maxwidth);
+ }
+
+ n_threads = convert->conversion_runner->n_threads;
+ tasks = convert->tasks[0] =
+ g_renew (ConvertTask, convert->tasks[0], n_threads);
+ tasks_p = convert->tasks_p[0] =
+ g_renew (ConvertTask *, convert->tasks_p[0], n_threads);
+
+ lines_per_thread =
+ GST_ROUND_UP_N ((out_height + n_threads - 1) / n_threads, pack_lines);
+
+ for (i = 0; i < n_threads; i++) {
+ tasks[i].dest = dest;
+ tasks[i].pack_lines = convert->pack_lines[i];
+ tasks[i].idx = i;
+ tasks[i].pack_lines_count = pack_lines;
+ tasks[i].out_y = out_y;
+ tasks[i].identity_pack = convert->identity_pack;
+ tasks[i].lb_width = lb_width;
+ tasks[i].out_maxwidth = out_maxwidth;
+
+ tasks[i].h_0 = i * lines_per_thread;
+ tasks[i].h_1 = MIN ((i + 1) * lines_per_thread, out_height);
+
+ tasks_p[i] = &tasks[i];
+ }
+
+ gst_parallelized_task_runner_run (convert->conversion_runner,
+ (GstParallelizedTaskFunc) convert_generic_task, (gpointer) tasks_p);
+
+ if (convert->borderline) {
+ for (i = out_y + out_height; i < out_maxheight; i++)
+ PACK_FRAME (dest, convert->borderline, i, out_maxwidth);
+ }
+ if (convert->pack_pal) {
+ memcpy (GST_VIDEO_FRAME_PLANE_DATA (dest, 1), convert->pack_pal,
+ convert->pack_palsize);
+ }
+ }
+
+ static void convert_fill_border (GstVideoConverter * convert,
+ GstVideoFrame * dest);
+
+ /* Fast paths */
+
+ #define GET_LINE_OFFSETS(interlaced,line,l1,l2) \
+ if (interlaced) { \
+ l1 = (line & 2 ? line - 1 : line); \
+ l2 = l1 + 2; \
+ } else { \
+ l1 = line; \
+ l2 = l1 + 1; \
+ }
+
+ typedef struct
+ {
+ const GstVideoFrame *src;
+ GstVideoFrame *dest;
+ gint height_0, height_1;
+
+ /* parameters */
+ gboolean interlaced;
+ gint width;
+ gint alpha;
+ MatrixData *data;
+ gint in_x, in_y;
+ gint out_x, out_y;
+ gpointer tmpline;
+ } FConvertTask;
+
+ static void
+ convert_I420_YUY2_task (FConvertTask * task)
+ {
+ gint i;
+ gint l1, l2;
+
+ for (i = task->height_0; i < task->height_1; i += 2) {
+ GET_LINE_OFFSETS (task->interlaced, i, l1, l2);
+
+ video_orc_convert_I420_YUY2 (FRAME_GET_LINE (task->dest, l1),
+ FRAME_GET_LINE (task->dest, l2),
+ FRAME_GET_Y_LINE (task->src, l1),
+ FRAME_GET_Y_LINE (task->src, l2),
+ FRAME_GET_U_LINE (task->src, i >> 1),
+ FRAME_GET_V_LINE (task->src, i >> 1), (task->width + 1) / 2);
+ }
+ }
+
+ static void
+ convert_I420_YUY2 (GstVideoConverter * convert, const GstVideoFrame * src,
+ GstVideoFrame * dest)
+ {
+ int i;
+ gint width = convert->in_width;
+ gint height = convert->in_height;
+ gboolean interlaced = GST_VIDEO_FRAME_IS_INTERLACED (src)
+ && (GST_VIDEO_INFO_INTERLACE_MODE (&src->info) !=
+ GST_VIDEO_INTERLACE_MODE_ALTERNATE);
+ gint h2;
+ FConvertTask *tasks;
+ FConvertTask **tasks_p;
+ gint n_threads;
+ gint lines_per_thread;
+
+ /* I420 has half as many chroma lines, as such we have to
+ * always merge two into one. For non-interlaced these are
+ * the two next to each other, for interlaced one is skipped
+ * in between. */
+ if (interlaced)
+ h2 = GST_ROUND_DOWN_4 (height);
+ else
+ h2 = GST_ROUND_DOWN_2 (height);
+
+ n_threads = convert->conversion_runner->n_threads;
+ tasks = convert->tasks[0] =
+ g_renew (FConvertTask, convert->tasks[0], n_threads);
+ tasks_p = convert->tasks_p[0] =
+ g_renew (FConvertTask *, convert->tasks_p[0], n_threads);
+
+ lines_per_thread = GST_ROUND_UP_2 ((h2 + n_threads - 1) / n_threads);
+
+ for (i = 0; i < n_threads; i++) {
+ tasks[i].src = src;
+ tasks[i].dest = dest;
+
+ tasks[i].interlaced = interlaced;
+ tasks[i].width = width;
+
+ tasks[i].height_0 = i * lines_per_thread;
+ tasks[i].height_1 = tasks[i].height_0 + lines_per_thread;
+ tasks[i].height_1 = MIN (h2, tasks[i].height_1);
+
+ tasks_p[i] = &tasks[i];
+ }
+
+ gst_parallelized_task_runner_run (convert->conversion_runner,
+ (GstParallelizedTaskFunc) convert_I420_YUY2_task, (gpointer) tasks_p);
+
+ /* now handle last lines. For interlaced these are up to 3 */
+ if (h2 != height) {
+ for (i = h2; i < height; i++) {
+ UNPACK_FRAME (src, convert->tmpline[0], i, convert->in_x, width);
+ PACK_FRAME (dest, convert->tmpline[0], i, width);
+ }
+ }
+ }
+
+ static void
+ convert_I420_UYVY_task (FConvertTask * task)
+ {
+ gint i;
+ gint l1, l2;
+
+ for (i = task->height_0; i < task->height_1; i += 2) {
+ GET_LINE_OFFSETS (task->interlaced, i, l1, l2);
+
+ video_orc_convert_I420_UYVY (FRAME_GET_LINE (task->dest, l1),
+ FRAME_GET_LINE (task->dest, l2),
+ FRAME_GET_Y_LINE (task->src, l1),
+ FRAME_GET_Y_LINE (task->src, l2),
+ FRAME_GET_U_LINE (task->src, i >> 1),
+ FRAME_GET_V_LINE (task->src, i >> 1), (task->width + 1) / 2);
+ }
+ }
+
+ static void
+ convert_I420_UYVY (GstVideoConverter * convert, const GstVideoFrame * src,
+ GstVideoFrame * dest)
+ {
+ int i;
+ gint width = convert->in_width;
+ gint height = convert->in_height;
+ gboolean interlaced = GST_VIDEO_FRAME_IS_INTERLACED (src)
+ && (GST_VIDEO_INFO_INTERLACE_MODE (&src->info) !=
+ GST_VIDEO_INTERLACE_MODE_ALTERNATE);
+ gint h2;
+ FConvertTask *tasks;
+ FConvertTask **tasks_p;
+ gint n_threads;
+ gint lines_per_thread;
+
+ /* I420 has half as many chroma lines, as such we have to
+ * always merge two into one. For non-interlaced these are
+ * the two next to each other, for interlaced one is skipped
+ * in between. */
+ if (interlaced)
+ h2 = GST_ROUND_DOWN_4 (height);
+ else
+ h2 = GST_ROUND_DOWN_2 (height);
+
+ n_threads = convert->conversion_runner->n_threads;
+ tasks = convert->tasks[0] =
+ g_renew (FConvertTask, convert->tasks[0], n_threads);
+ tasks_p = convert->tasks_p[0] =
+ g_renew (FConvertTask *, convert->tasks_p[0], n_threads);
+
+ lines_per_thread = GST_ROUND_UP_2 ((h2 + n_threads - 1) / n_threads);
+
+ for (i = 0; i < n_threads; i++) {
+ tasks[i].src = src;
+ tasks[i].dest = dest;
+
+ tasks[i].interlaced = interlaced;
+ tasks[i].width = width;
+
+ tasks[i].height_0 = i * lines_per_thread;
+ tasks[i].height_1 = tasks[i].height_0 + lines_per_thread;
+ tasks[i].height_1 = MIN (h2, tasks[i].height_1);
+
+ tasks_p[i] = &tasks[i];
+ }
+
+ gst_parallelized_task_runner_run (convert->conversion_runner,
+ (GstParallelizedTaskFunc) convert_I420_UYVY_task, (gpointer) tasks_p);
+
+ /* now handle last lines. For interlaced these are up to 3 */
+ if (h2 != height) {
+ for (i = h2; i < height; i++) {
+ UNPACK_FRAME (src, convert->tmpline[0], i, convert->in_x, width);
+ PACK_FRAME (dest, convert->tmpline[0], i, width);
+ }
+ }
+ }
+
+ static void
+ convert_I420_AYUV_task (FConvertTask * task)
+ {
+ gint i;
+ gint l1, l2;
+
+ for (i = task->height_0; i < task->height_1; i += 2) {
+ GET_LINE_OFFSETS (task->interlaced, i, l1, l2);
+
+ video_orc_convert_I420_AYUV (FRAME_GET_LINE (task->dest, l1),
+ FRAME_GET_LINE (task->dest, l2),
+ FRAME_GET_Y_LINE (task->src, l1),
+ FRAME_GET_Y_LINE (task->src, l2),
+ FRAME_GET_U_LINE (task->src, i >> 1), FRAME_GET_V_LINE (task->src,
+ i >> 1), task->alpha, task->width);
+ }
+ }
+
+ static void
+ convert_I420_AYUV (GstVideoConverter * convert, const GstVideoFrame * src,
+ GstVideoFrame * dest)
+ {
+ int i;
+ gint width = convert->in_width;
+ gint height = convert->in_height;
+ gboolean interlaced = GST_VIDEO_FRAME_IS_INTERLACED (src)
+ && (GST_VIDEO_INFO_INTERLACE_MODE (&src->info) !=
+ GST_VIDEO_INTERLACE_MODE_ALTERNATE);
+ guint8 alpha = MIN (convert->alpha_value, 255);
+ gint h2;
+ FConvertTask *tasks;
+ FConvertTask **tasks_p;
+ gint n_threads;
+ gint lines_per_thread;
+
+ /* I420 has half as many chroma lines, as such we have to
+ * always merge two into one. For non-interlaced these are
+ * the two next to each other, for interlaced one is skipped
+ * in between. */
+ if (interlaced)
+ h2 = GST_ROUND_DOWN_4 (height);
+ else
+ h2 = GST_ROUND_DOWN_2 (height);
+
+
+ n_threads = convert->conversion_runner->n_threads;
+ tasks = convert->tasks[0] =
+ g_renew (FConvertTask, convert->tasks[0], n_threads);
+ tasks_p = convert->tasks_p[0] =
+ g_renew (FConvertTask *, convert->tasks_p[0], n_threads);
+
+ lines_per_thread = GST_ROUND_UP_2 ((h2 + n_threads - 1) / n_threads);
+
+ for (i = 0; i < n_threads; i++) {
+ tasks[i].src = src;
+ tasks[i].dest = dest;
+
+ tasks[i].interlaced = interlaced;
+ tasks[i].width = width;
+ tasks[i].alpha = alpha;
+
+ tasks[i].height_0 = i * lines_per_thread;
+ tasks[i].height_1 = tasks[i].height_0 + lines_per_thread;
+ tasks[i].height_1 = MIN (h2, tasks[i].height_1);
+
+ tasks_p[i] = &tasks[i];
+ }
+
+ gst_parallelized_task_runner_run (convert->conversion_runner,
+ (GstParallelizedTaskFunc) convert_I420_AYUV_task, (gpointer) tasks_p);
+
+ /* now handle last lines. For interlaced these are up to 3 */
+ if (h2 != height) {
+ for (i = h2; i < height; i++) {
+ UNPACK_FRAME (src, convert->tmpline[0], i, convert->in_x, width);
+ if (alpha != 0xff)
+ convert_set_alpha_u8 (convert, convert->tmpline[0], width);
+ PACK_FRAME (dest, convert->tmpline[0], i, width);
+ }
+ }
+ }
+
+ static void
+ convert_YUY2_I420_task (FConvertTask * task)
+ {
+ gint i;
+ gint l1, l2;
+
+ for (i = task->height_0; i < task->height_1; i += 2) {
+ GET_LINE_OFFSETS (task->interlaced, i, l1, l2);
+
+ video_orc_convert_YUY2_I420 (FRAME_GET_Y_LINE (task->dest, l1),
+ FRAME_GET_Y_LINE (task->dest, l2),
+ FRAME_GET_U_LINE (task->dest, i >> 1),
+ FRAME_GET_V_LINE (task->dest, i >> 1),
+ FRAME_GET_LINE (task->src, l1), FRAME_GET_LINE (task->src, l2),
+ (task->width + 1) / 2);
+ }
+ }
+
+ static void
+ convert_YUY2_I420 (GstVideoConverter * convert, const GstVideoFrame * src,
+ GstVideoFrame * dest)
+ {
+ int i;
+ gint width = convert->in_width;
+ gint height = convert->in_height;
+ gboolean interlaced = GST_VIDEO_FRAME_IS_INTERLACED (src)
+ && (GST_VIDEO_INFO_INTERLACE_MODE (&src->info) !=
+ GST_VIDEO_INTERLACE_MODE_ALTERNATE);
+ gint h2;
+ FConvertTask *tasks;
+ FConvertTask **tasks_p;
+ gint n_threads;
+ gint lines_per_thread;
+
+ /* I420 has half as many chroma lines, as such we have to
+ * always merge two into one. For non-interlaced these are
+ * the two next to each other, for interlaced one is skipped
+ * in between. */
+ if (interlaced)
+ h2 = GST_ROUND_DOWN_4 (height);
+ else
+ h2 = GST_ROUND_DOWN_2 (height);
+
+ n_threads = convert->conversion_runner->n_threads;
+ tasks = convert->tasks[0] =
+ g_renew (FConvertTask, convert->tasks[0], n_threads);
+ tasks_p = convert->tasks_p[0] =
+ g_renew (FConvertTask *, convert->tasks_p[0], n_threads);
+
+ lines_per_thread = GST_ROUND_UP_2 ((h2 + n_threads - 1) / n_threads);
+
+ for (i = 0; i < n_threads; i++) {
+ tasks[i].src = src;
+ tasks[i].dest = dest;
+
+ tasks[i].interlaced = interlaced;
+ tasks[i].width = width;
+
+ tasks[i].height_0 = i * lines_per_thread;
+ tasks[i].height_1 = tasks[i].height_0 + lines_per_thread;
+ tasks[i].height_1 = MIN (h2, tasks[i].height_1);
+
+ tasks_p[i] = &tasks[i];
+ }
+
+ gst_parallelized_task_runner_run (convert->conversion_runner,
+ (GstParallelizedTaskFunc) convert_YUY2_I420_task, (gpointer) tasks_p);
+
+ /* now handle last lines. For interlaced these are up to 3 */
+ if (h2 != height) {
+ for (i = h2; i < height; i++) {
+ UNPACK_FRAME (src, convert->tmpline[0], i, convert->in_x, width);
+ PACK_FRAME (dest, convert->tmpline[0], i, width);
+ }
+ }
+ }
+
+ static void
+ convert_v210_I420_task (FConvertTask * task)
+ {
+ gint i, j;
+ gint l1, l2;
+ guint8 *d_y1, *d_y2, *d_u, *d_v;
+ const guint8 *s1, *s2;
+ guint32 a0, a1, a2, a3;
+ guint16 y0_1, y1_1, y2_1, y3_1, y4_1, y5_1;
+ guint16 u0_1, u2_1, u4_1;
+ guint16 v0_1, v2_1, v4_1;
+ guint16 y0_2, y1_2, y2_2, y3_2, y4_2, y5_2;
+ guint16 u0_2, u2_2, u4_2;
+ guint16 v0_2, v2_2, v4_2;
+
+ for (i = task->height_0; i < task->height_1; i += 2) {
+ GET_LINE_OFFSETS (task->interlaced, i, l1, l2);
+
+ d_y1 = FRAME_GET_Y_LINE (task->dest, l1);
+ d_y2 = FRAME_GET_Y_LINE (task->dest, l2);
+ d_u = FRAME_GET_U_LINE (task->dest, i >> 1);
+ d_v = FRAME_GET_V_LINE (task->dest, i >> 1);
+
+ s1 = FRAME_GET_LINE (task->src, l1);
+ s2 = FRAME_GET_LINE (task->src, l2);
+
+ for (j = 0; j < task->width; j += 6) {
+ a0 = GST_READ_UINT32_LE (s1 + (j / 6) * 16 + 0);
+ a1 = GST_READ_UINT32_LE (s1 + (j / 6) * 16 + 4);
+ a2 = GST_READ_UINT32_LE (s1 + (j / 6) * 16 + 8);
+ a3 = GST_READ_UINT32_LE (s1 + (j / 6) * 16 + 12);
+
+ u0_1 = ((a0 >> 0) & 0x3ff) >> 2;
+ y0_1 = ((a0 >> 10) & 0x3ff) >> 2;
+ v0_1 = ((a0 >> 20) & 0x3ff) >> 2;
+ y1_1 = ((a1 >> 0) & 0x3ff) >> 2;
+
+ u2_1 = ((a1 >> 10) & 0x3ff) >> 2;
+ y2_1 = ((a1 >> 20) & 0x3ff) >> 2;
+ v2_1 = ((a2 >> 0) & 0x3ff) >> 2;
+ y3_1 = ((a2 >> 10) & 0x3ff) >> 2;
+
+ u4_1 = ((a2 >> 20) & 0x3ff) >> 2;
+ y4_1 = ((a3 >> 0) & 0x3ff) >> 2;
+ v4_1 = ((a3 >> 10) & 0x3ff) >> 2;
+ y5_1 = ((a3 >> 20) & 0x3ff) >> 2;
+
+ a0 = GST_READ_UINT32_LE (s2 + (j / 6) * 16 + 0);
+ a1 = GST_READ_UINT32_LE (s2 + (j / 6) * 16 + 4);
+ a2 = GST_READ_UINT32_LE (s2 + (j / 6) * 16 + 8);
+ a3 = GST_READ_UINT32_LE (s2 + (j / 6) * 16 + 12);
+
+ u0_2 = ((a0 >> 0) & 0x3ff) >> 2;
+ y0_2 = ((a0 >> 10) & 0x3ff) >> 2;
+ v0_2 = ((a0 >> 20) & 0x3ff) >> 2;
+ y1_2 = ((a1 >> 0) & 0x3ff) >> 2;
+
+ u2_2 = ((a1 >> 10) & 0x3ff) >> 2;
+ y2_2 = ((a1 >> 20) & 0x3ff) >> 2;
+ v2_2 = ((a2 >> 0) & 0x3ff) >> 2;
+ y3_2 = ((a2 >> 10) & 0x3ff) >> 2;
+
+ u4_2 = ((a2 >> 20) & 0x3ff) >> 2;
+ y4_2 = ((a3 >> 0) & 0x3ff) >> 2;
+ v4_2 = ((a3 >> 10) & 0x3ff) >> 2;
+ y5_2 = ((a3 >> 20) & 0x3ff) >> 2;
+
+ d_y1[j] = y0_1;
+ d_y2[j] = y0_2;
+ d_u[j / 2] = (u0_1 + u0_2) / 2;
+ d_v[j / 2] = (v0_1 + v0_2) / 2;
+
+ if (j < task->width - 1) {
+ d_y1[j + 1] = y1_1;
+ d_y2[j + 1] = y1_2;
+ }
+
+ if (j < task->width - 2) {
+ d_y1[j + 2] = y2_1;
+ d_y2[j + 2] = y2_2;
+ d_u[j / 2 + 1] = (u2_1 + u2_2) / 2;
+ d_v[j / 2 + 1] = (v2_1 + v2_2) / 2;
+ }
+
+ if (j < task->width - 3) {
+ d_y1[j + 3] = y3_1;
+ d_y2[j + 3] = y3_2;
+ }
+
+ if (j < task->width - 4) {
+ d_y1[j + 4] = y4_1;
+ d_y2[j + 4] = y4_2;
+ d_u[j / 2 + 2] = (u4_1 + u4_2) / 2;
+ d_v[j / 2 + 2] = (v4_1 + v4_2) / 2;
+ }
+
+ if (j < task->width - 5) {
+ d_y1[j + 5] = y5_1;
+ d_y2[j + 5] = y5_2;
+ }
+ }
+ }
+ }
+
+ static void
+ convert_v210_I420 (GstVideoConverter * convert, const GstVideoFrame * src,
+ GstVideoFrame * dest)
+ {
+ int i;
+ gint width = convert->in_width;
+ gint height = convert->in_height;
+ gboolean interlaced = GST_VIDEO_FRAME_IS_INTERLACED (src)
+ && (GST_VIDEO_INFO_INTERLACE_MODE (&src->info) !=
+ GST_VIDEO_INTERLACE_MODE_ALTERNATE);
+ gint h2;
+ FConvertTask *tasks;
+ FConvertTask **tasks_p;
+ gint n_threads;
+ gint lines_per_thread;
+
+ /* I420 has half as many chroma lines, as such we have to
+ * always merge two into one. For non-interlaced these are
+ * the two next to each other, for interlaced one is skipped
+ * in between. */
+ if (interlaced)
+ h2 = GST_ROUND_DOWN_4 (height);
+ else
+ h2 = GST_ROUND_DOWN_2 (height);
+
+ n_threads = convert->conversion_runner->n_threads;
+ tasks = convert->tasks[0] =
+ g_renew (FConvertTask, convert->tasks[0], n_threads);
+ tasks_p = convert->tasks_p[0] =
+ g_renew (FConvertTask *, convert->tasks_p[0], n_threads);
+
+ lines_per_thread = GST_ROUND_UP_2 ((h2 + n_threads - 1) / n_threads);
+
+ for (i = 0; i < n_threads; i++) {
+ tasks[i].src = src;
+ tasks[i].dest = dest;
+
+ tasks[i].interlaced = interlaced;
+ tasks[i].width = width;
+
+ tasks[i].height_0 = i * lines_per_thread;
+ tasks[i].height_1 = tasks[i].height_0 + lines_per_thread;
+ tasks[i].height_1 = MIN (h2, tasks[i].height_1);
+
+ tasks_p[i] = &tasks[i];
+ }
+
+ gst_parallelized_task_runner_run (convert->conversion_runner,
+ (GstParallelizedTaskFunc) convert_v210_I420_task, (gpointer) tasks_p);
+
+ /* now handle last lines. For interlaced these are up to 3 */
+ if (h2 != height) {
+ for (i = h2; i < height; i++) {
+ UNPACK_FRAME (src, convert->tmpline[0], i, convert->in_x, width);
+ PACK_FRAME (dest, convert->tmpline[0], i, width);
+ }
+ }
+ }
+
+ typedef struct
+ {
+ const guint8 *s, *s2, *su, *sv;
+ guint8 *d, *d2, *du, *dv;
+ gint sstride, sustride, svstride;
+ gint dstride, dustride, dvstride;
+ gint width, height;
+ gint alpha;
+ MatrixData *data;
+ } FConvertPlaneTask;
+
+ static void
+ convert_YUY2_AYUV_task (FConvertPlaneTask * task)
+ {
+ video_orc_convert_YUY2_AYUV (task->d, task->dstride, task->s,
+ task->sstride, task->alpha, (task->width + 1) / 2, task->height);
+ }
+
+ static void
+ convert_YUY2_AYUV (GstVideoConverter * convert, const GstVideoFrame * src,
+ GstVideoFrame * dest)
+ {
+ gint width = convert->in_width;
+ gint height = convert->in_height;
+ guint8 *s, *d;
+ guint8 alpha = MIN (convert->alpha_value, 255);
+ FConvertPlaneTask *tasks;
+ FConvertPlaneTask **tasks_p;
+ gint n_threads;
+ gint lines_per_thread;
+ gint i;
+
+ s = FRAME_GET_LINE (src, convert->in_y);
+ s += (GST_ROUND_UP_2 (convert->in_x) * 2);
+ d = FRAME_GET_LINE (dest, convert->out_y);
+ d += (convert->out_x * 4);
+
+ n_threads = convert->conversion_runner->n_threads;
+ tasks = convert->tasks[0] =
+ g_renew (FConvertPlaneTask, convert->tasks[0], n_threads);
+ tasks_p = convert->tasks_p[0] =
+ g_renew (FConvertPlaneTask *, convert->tasks_p[0], n_threads);
+
+ lines_per_thread = (height + n_threads - 1) / n_threads;
+
+ for (i = 0; i < n_threads; i++) {
+ tasks[i].dstride = FRAME_GET_STRIDE (dest);
+ tasks[i].sstride = FRAME_GET_STRIDE (src);
+ tasks[i].d = d + i * lines_per_thread * tasks[i].dstride;
+ tasks[i].s = s + i * lines_per_thread * tasks[i].sstride;
+
+ tasks[i].width = width;
+ tasks[i].height = (i + 1) * lines_per_thread;
+ tasks[i].height = MIN (tasks[i].height, height);
+ tasks[i].height -= i * lines_per_thread;
+ tasks[i].alpha = alpha;
+
+ tasks_p[i] = &tasks[i];
+ }
+
+ gst_parallelized_task_runner_run (convert->conversion_runner,
+ (GstParallelizedTaskFunc) convert_YUY2_AYUV_task, (gpointer) tasks_p);
+
+ convert_fill_border (convert, dest);
+ }
+
+ static void
+ convert_YUY2_Y42B_task (FConvertPlaneTask * task)
+ {
+ video_orc_convert_YUY2_Y42B (task->d, task->dstride, task->du,
+ task->dustride, task->dv, task->dvstride,
+ task->s, task->sstride, (task->width + 1) / 2, task->height);
+ }
+
+ static void
+ convert_YUY2_Y42B (GstVideoConverter * convert, const GstVideoFrame * src,
+ GstVideoFrame * dest)
+ {
+ gint width = convert->in_width;
+ gint height = convert->in_height;
+ guint8 *s, *dy, *du, *dv;
+ FConvertPlaneTask *tasks;
+ FConvertPlaneTask **tasks_p;
+ gint n_threads;
+ gint lines_per_thread;
+ gint i;
+
+ s = FRAME_GET_LINE (src, convert->in_y);
+ s += (GST_ROUND_UP_2 (convert->in_x) * 2);
+
+ dy = FRAME_GET_Y_LINE (dest, convert->out_y);
+ dy += convert->out_x;
+ du = FRAME_GET_U_LINE (dest, convert->out_y);
+ du += convert->out_x >> 1;
+ dv = FRAME_GET_V_LINE (dest, convert->out_y);
+ dv += convert->out_x >> 1;
+
+ n_threads = convert->conversion_runner->n_threads;
+ tasks = convert->tasks[0] =
+ g_renew (FConvertPlaneTask, convert->tasks[0], n_threads);
+ tasks_p = convert->tasks_p[0] =
+ g_renew (FConvertPlaneTask *, convert->tasks_p[0], n_threads);
+
+ lines_per_thread = (height + n_threads - 1) / n_threads;
+
+ for (i = 0; i < n_threads; i++) {
+ tasks[i].dstride = FRAME_GET_Y_STRIDE (dest);
+ tasks[i].dustride = FRAME_GET_U_STRIDE (dest);
+ tasks[i].dvstride = FRAME_GET_V_STRIDE (dest);
+ tasks[i].sstride = FRAME_GET_STRIDE (src);
+ tasks[i].d = dy + i * lines_per_thread * tasks[i].dstride;
+ tasks[i].du = du + i * lines_per_thread * tasks[i].dustride;
+ tasks[i].dv = dv + i * lines_per_thread * tasks[i].dvstride;
+ tasks[i].s = s + i * lines_per_thread * tasks[i].sstride;
+
+ tasks[i].width = width;
+ tasks[i].height = (i + 1) * lines_per_thread;
+ tasks[i].height = MIN (tasks[i].height, height);
+ tasks[i].height -= i * lines_per_thread;
+
+ tasks_p[i] = &tasks[i];
+ }
+
+ gst_parallelized_task_runner_run (convert->conversion_runner,
+ (GstParallelizedTaskFunc) convert_YUY2_Y42B_task, (gpointer) tasks_p);
+
+ convert_fill_border (convert, dest);
+ }
+
+ static void
+ convert_YUY2_Y444_task (FConvertPlaneTask * task)
+ {
+ video_orc_convert_YUY2_Y444 (task->d,
+ task->dstride, task->du,
+ task->dustride, task->dv,
+ task->dvstride, task->s,
+ task->sstride, (task->width + 1) / 2, task->height);
+ }
+
+ static void
+ convert_YUY2_Y444 (GstVideoConverter * convert, const GstVideoFrame * src,
+ GstVideoFrame * dest)
+ {
+ gint width = convert->in_width;
+ gint height = convert->in_height;
+ guint8 *s, *dy, *du, *dv;
+ FConvertPlaneTask *tasks;
+ FConvertPlaneTask **tasks_p;
+ gint n_threads;
+ gint lines_per_thread;
+ gint i;
+
+ s = FRAME_GET_LINE (src, convert->in_y);
+ s += (GST_ROUND_UP_2 (convert->in_x) * 2);
+
+ dy = FRAME_GET_Y_LINE (dest, convert->out_y);
+ dy += convert->out_x;
+ du = FRAME_GET_U_LINE (dest, convert->out_y);
+ du += convert->out_x;
+ dv = FRAME_GET_V_LINE (dest, convert->out_y);
+ dv += convert->out_x;
+
+ n_threads = convert->conversion_runner->n_threads;
+ tasks = convert->tasks[0] =
+ g_renew (FConvertPlaneTask, convert->tasks[0], n_threads);
+ tasks_p = convert->tasks_p[0] =
+ g_renew (FConvertPlaneTask *, convert->tasks_p[0], n_threads);
+
+ lines_per_thread = (height + n_threads - 1) / n_threads;
+
+ for (i = 0; i < n_threads; i++) {
+ tasks[i].dstride = FRAME_GET_Y_STRIDE (dest);
+ tasks[i].dustride = FRAME_GET_U_STRIDE (dest);
+ tasks[i].dvstride = FRAME_GET_V_STRIDE (dest);
+ tasks[i].sstride = FRAME_GET_STRIDE (src);
+ tasks[i].d = dy + i * lines_per_thread * tasks[i].dstride;
+ tasks[i].du = du + i * lines_per_thread * tasks[i].dustride;
+ tasks[i].dv = dv + i * lines_per_thread * tasks[i].dvstride;
+ tasks[i].s = s + i * lines_per_thread * tasks[i].sstride;
+
+ tasks[i].width = width;
+ tasks[i].height = (i + 1) * lines_per_thread;
+ tasks[i].height = MIN (tasks[i].height, height);
+ tasks[i].height -= i * lines_per_thread;
+
+ tasks_p[i] = &tasks[i];
+ }
+
+ gst_parallelized_task_runner_run (convert->conversion_runner,
+ (GstParallelizedTaskFunc) convert_YUY2_Y444_task, (gpointer) tasks_p);
+
+ convert_fill_border (convert, dest);
+ }
+
+ static void
+ convert_v210_Y42B_task (FConvertPlaneTask * task)
+ {
+ gint i, j;
+ guint8 *d_y, *d_u, *d_v;
+ const guint8 *s;
+ guint32 a0, a1, a2, a3;
+ guint16 y0, y1, y2, y3, y4, y5;
+ guint16 u0, u2, u4;
+ guint16 v0, v2, v4;
+
+ for (i = 0; i < task->height; i++) {
+ d_y = task->d + i * task->dstride;
+ d_u = task->du + i * task->dustride;
+ d_v = task->dv + i * task->dvstride;
+ s = task->s + i * task->sstride;
+
+ for (j = 0; j < task->width; j += 6) {
+ a0 = GST_READ_UINT32_LE (s + (j / 6) * 16 + 0);
+ a1 = GST_READ_UINT32_LE (s + (j / 6) * 16 + 4);
+ a2 = GST_READ_UINT32_LE (s + (j / 6) * 16 + 8);
+ a3 = GST_READ_UINT32_LE (s + (j / 6) * 16 + 12);
+
+ u0 = ((a0 >> 0) & 0x3ff) >> 2;
+ y0 = ((a0 >> 10) & 0x3ff) >> 2;
+ v0 = ((a0 >> 20) & 0x3ff) >> 2;
+ y1 = ((a1 >> 0) & 0x3ff) >> 2;
+
+ u2 = ((a1 >> 10) & 0x3ff) >> 2;
+ y2 = ((a1 >> 20) & 0x3ff) >> 2;
+ v2 = ((a2 >> 0) & 0x3ff) >> 2;
+ y3 = ((a2 >> 10) & 0x3ff) >> 2;
+
+ u4 = ((a2 >> 20) & 0x3ff) >> 2;
+ y4 = ((a3 >> 0) & 0x3ff) >> 2;
+ v4 = ((a3 >> 10) & 0x3ff) >> 2;
+ y5 = ((a3 >> 20) & 0x3ff) >> 2;
+
+ d_y[j] = y0;
+ d_u[j / 2] = u0;
+ d_v[j / 2] = v0;
+
+ if (j < task->width - 1) {
+ d_y[j + 1] = y1;
+ }
+
+ if (j < task->width - 2) {
+ d_y[j + 2] = y2;
+ d_u[j / 2 + 1] = u2;
+ d_v[j / 2 + 1] = v2;
+ }
+
+ if (j < task->width - 3) {
+ d_y[j + 3] = y3;
+ }
+
+ if (j < task->width - 4) {
+ d_y[j + 4] = y4;
+ d_u[j / 2 + 2] = u4;
+ d_v[j / 2 + 2] = v4;
+ }
+
+ if (j < task->width - 5) {
+ d_y[j + 5] = y5;
+ }
+ }
+ }
+ }
+
+ static void
+ convert_v210_Y42B (GstVideoConverter * convert, const GstVideoFrame * src,
+ GstVideoFrame * dest)
+ {
+ gint width = convert->in_width;
+ gint height = convert->in_height;
+ guint8 *s, *dy, *du, *dv;
+ FConvertPlaneTask *tasks;
+ FConvertPlaneTask **tasks_p;
+ gint n_threads;
+ gint lines_per_thread;
+ gint i;
+
+ s = FRAME_GET_LINE (src, convert->in_y);
+ s += (GST_ROUND_UP_2 (convert->in_x) * 2);
+
+ dy = FRAME_GET_Y_LINE (dest, convert->out_y);
+ dy += convert->out_x;
+ du = FRAME_GET_U_LINE (dest, convert->out_y);
+ du += convert->out_x >> 1;
+ dv = FRAME_GET_V_LINE (dest, convert->out_y);
+ dv += convert->out_x >> 1;
+
+ n_threads = convert->conversion_runner->n_threads;
+ tasks = convert->tasks[0] =
+ g_renew (FConvertPlaneTask, convert->tasks[0], n_threads);
+ tasks_p = convert->tasks_p[0] =
+ g_renew (FConvertPlaneTask *, convert->tasks_p[0], n_threads);
+
+ lines_per_thread = (height + n_threads - 1) / n_threads;
+
+ for (i = 0; i < n_threads; i++) {
+ tasks[i].dstride = FRAME_GET_Y_STRIDE (dest);
+ tasks[i].dustride = FRAME_GET_U_STRIDE (dest);
+ tasks[i].dvstride = FRAME_GET_V_STRIDE (dest);
+ tasks[i].sstride = FRAME_GET_STRIDE (src);
+ tasks[i].d = dy + i * lines_per_thread * tasks[i].dstride;
+ tasks[i].du = du + i * lines_per_thread * tasks[i].dustride;
+ tasks[i].dv = dv + i * lines_per_thread * tasks[i].dvstride;
+ tasks[i].s = s + i * lines_per_thread * tasks[i].sstride;
+
+ tasks[i].width = width;
+ tasks[i].height = (i + 1) * lines_per_thread;
+ tasks[i].height = MIN (tasks[i].height, height);
+ tasks[i].height -= i * lines_per_thread;
+
+ tasks_p[i] = &tasks[i];
+ }
+
+ gst_parallelized_task_runner_run (convert->conversion_runner,
+ (GstParallelizedTaskFunc) convert_v210_Y42B_task, (gpointer) tasks_p);
+
+ convert_fill_border (convert, dest);
+ }
+
+ static void
+ convert_UYVY_I420_task (FConvertTask * task)
+ {
+ gint i;
+ gint l1, l2;
+
+ for (i = task->height_0; i < task->height_1; i += 2) {
+ GET_LINE_OFFSETS (task->interlaced, i, l1, l2);
+
+ video_orc_convert_UYVY_I420 (FRAME_GET_COMP_LINE (task->dest, 0, l1),
+ FRAME_GET_COMP_LINE (task->dest, 0, l2),
+ FRAME_GET_COMP_LINE (task->dest, 1, i >> 1),
+ FRAME_GET_COMP_LINE (task->dest, 2, i >> 1),
+ FRAME_GET_LINE (task->src, l1), FRAME_GET_LINE (task->src, l2),
+ (task->width + 1) / 2);
+ }
+ }
+
+ static void
+ convert_UYVY_I420 (GstVideoConverter * convert, const GstVideoFrame * src,
+ GstVideoFrame * dest)
+ {
+ int i;
+ gint width = convert->in_width;
+ gint height = convert->in_height;
+ gboolean interlaced = GST_VIDEO_FRAME_IS_INTERLACED (src)
+ && (GST_VIDEO_INFO_INTERLACE_MODE (&src->info) !=
+ GST_VIDEO_INTERLACE_MODE_ALTERNATE);
+ gint h2;
+ FConvertTask *tasks;
+ FConvertTask **tasks_p;
+ gint n_threads;
+ gint lines_per_thread;
+
+ /* I420 has half as many chroma lines, as such we have to
+ * always merge two into one. For non-interlaced these are
+ * the two next to each other, for interlaced one is skipped
+ * in between. */
+ if (interlaced)
+ h2 = GST_ROUND_DOWN_4 (height);
+ else
+ h2 = GST_ROUND_DOWN_2 (height);
+
+ n_threads = convert->conversion_runner->n_threads;
+ tasks = convert->tasks[0] =
+ g_renew (FConvertTask, convert->tasks[0], n_threads);
+ tasks_p = convert->tasks_p[0] =
+ g_renew (FConvertTask *, convert->tasks_p[0], n_threads);
+
+ lines_per_thread = GST_ROUND_UP_2 ((h2 + n_threads - 1) / n_threads);
+
+ for (i = 0; i < n_threads; i++) {
+ tasks[i].src = src;
+ tasks[i].dest = dest;
+
+ tasks[i].interlaced = interlaced;
+ tasks[i].width = width;
+
+ tasks[i].height_0 = i * lines_per_thread;
+ tasks[i].height_1 = tasks[i].height_0 + lines_per_thread;
+ tasks[i].height_1 = MIN (h2, tasks[i].height_1);
+
+ tasks_p[i] = &tasks[i];
+ }
+
+ gst_parallelized_task_runner_run (convert->conversion_runner,
+ (GstParallelizedTaskFunc) convert_UYVY_I420_task, (gpointer) tasks_p);
+
+ /* now handle last lines. For interlaced these are up to 3 */
+ if (h2 != height) {
+ for (i = h2; i < height; i++) {
+ UNPACK_FRAME (src, convert->tmpline[0], i, convert->in_x, width);
+ PACK_FRAME (dest, convert->tmpline[0], i, width);
+ }
+ }
+ }
+
+ static void
+ convert_UYVY_AYUV_task (FConvertPlaneTask * task)
+ {
+ video_orc_convert_UYVY_AYUV (task->d, task->dstride, task->s,
+ task->sstride, task->alpha, (task->width + 1) / 2, task->height);
+ }
+
+ static void
+ convert_UYVY_AYUV (GstVideoConverter * convert, const GstVideoFrame * src,
+ GstVideoFrame * dest)
+ {
+ gint width = convert->in_width;
+ gint height = convert->in_height;
+ guint8 *s, *d;
+ guint8 alpha = MIN (convert->alpha_value, 255);
+ FConvertPlaneTask *tasks;
+ FConvertPlaneTask **tasks_p;
+ gint n_threads;
+ gint lines_per_thread;
+ gint i;
+
+ s = FRAME_GET_LINE (src, convert->in_y);
+ s += (GST_ROUND_UP_2 (convert->in_x) * 2);
+ d = FRAME_GET_LINE (dest, convert->out_y);
+ d += (convert->out_x * 4);
+
+ n_threads = convert->conversion_runner->n_threads;
+ tasks = convert->tasks[0] =
+ g_renew (FConvertPlaneTask, convert->tasks[0], n_threads);
+ tasks_p = convert->tasks_p[0] =
+ g_renew (FConvertPlaneTask *, convert->tasks_p[0], n_threads);
+
+ lines_per_thread = (height + n_threads - 1) / n_threads;
+
+ for (i = 0; i < n_threads; i++) {
+ tasks[i].dstride = FRAME_GET_STRIDE (dest);
+ tasks[i].sstride = FRAME_GET_STRIDE (src);
+ tasks[i].d = d + i * lines_per_thread * tasks[i].dstride;
+ tasks[i].s = s + i * lines_per_thread * tasks[i].sstride;
+
+ tasks[i].width = width;
+ tasks[i].height = (i + 1) * lines_per_thread;
+ tasks[i].height = MIN (tasks[i].height, height);
+ tasks[i].height -= i * lines_per_thread;
+ tasks[i].alpha = alpha;
+
+ tasks_p[i] = &tasks[i];
+ }
+
+ gst_parallelized_task_runner_run (convert->conversion_runner,
+ (GstParallelizedTaskFunc) convert_UYVY_AYUV_task, (gpointer) tasks_p);
+
+ convert_fill_border (convert, dest);
+ }
+
+ static void
+ convert_UYVY_YUY2_task (FConvertPlaneTask * task)
+ {
+ video_orc_convert_UYVY_YUY2 (task->d, task->dstride, task->s,
+ task->sstride, (task->width + 1) / 2, task->height);
+ }
+
+ static void
+ convert_UYVY_YUY2 (GstVideoConverter * convert, const GstVideoFrame * src,
+ GstVideoFrame * dest)
+ {
+ gint width = convert->in_width;
+ gint height = convert->in_height;
+ guint8 *s, *d;
+ FConvertPlaneTask *tasks;
+ FConvertPlaneTask **tasks_p;
+ gint n_threads;
+ gint lines_per_thread;
+ gint i;
+
+ s = FRAME_GET_LINE (src, convert->in_y);
+ s += (GST_ROUND_UP_2 (convert->in_x) * 2);
+ d = FRAME_GET_LINE (dest, convert->out_y);
+ d += (GST_ROUND_UP_2 (convert->out_x) * 2);
+
+ n_threads = convert->conversion_runner->n_threads;
+ tasks = convert->tasks[0] =
+ g_renew (FConvertPlaneTask, convert->tasks[0], n_threads);
+ tasks_p = convert->tasks_p[0] =
+ g_renew (FConvertPlaneTask *, convert->tasks_p[0], n_threads);
+
+ lines_per_thread = (height + n_threads - 1) / n_threads;
+
+ for (i = 0; i < n_threads; i++) {
+ tasks[i].dstride = FRAME_GET_STRIDE (dest);
+ tasks[i].sstride = FRAME_GET_STRIDE (src);
+ tasks[i].d = d + i * lines_per_thread * tasks[i].dstride;
+ tasks[i].s = s + i * lines_per_thread * tasks[i].sstride;
+
+ tasks[i].width = width;
+ tasks[i].height = (i + 1) * lines_per_thread;
+ tasks[i].height = MIN (tasks[i].height, height);
+ tasks[i].height -= i * lines_per_thread;
+
+ tasks_p[i] = &tasks[i];
+ }
+
+ gst_parallelized_task_runner_run (convert->conversion_runner,
+ (GstParallelizedTaskFunc) convert_UYVY_YUY2_task, (gpointer) tasks_p);
+
+ convert_fill_border (convert, dest);
+ }
+
+ static void
+ convert_v210_UYVY_task (FConvertPlaneTask * task)
+ {
+ gint i, j;
+ guint8 *d;
+ const guint8 *s;
+ guint32 a0, a1, a2, a3;
+ guint16 y0, y1, y2, y3, y4, y5;
+ guint16 u0, u2, u4;
+ guint16 v0, v2, v4;
+
+ for (i = 0; i < task->height; i++) {
+ d = task->d + i * task->dstride;
+ s = task->s + i * task->sstride;
+
+ for (j = 0; j < task->width; j += 6) {
+ a0 = GST_READ_UINT32_LE (s + (j / 6) * 16 + 0);
+ a1 = GST_READ_UINT32_LE (s + (j / 6) * 16 + 4);
+ a2 = GST_READ_UINT32_LE (s + (j / 6) * 16 + 8);
+ a3 = GST_READ_UINT32_LE (s + (j / 6) * 16 + 12);
+
+ u0 = ((a0 >> 0) & 0x3ff) >> 2;
+ y0 = ((a0 >> 10) & 0x3ff) >> 2;
+ v0 = ((a0 >> 20) & 0x3ff) >> 2;
+ y1 = ((a1 >> 0) & 0x3ff) >> 2;
+
+ u2 = ((a1 >> 10) & 0x3ff) >> 2;
+ y2 = ((a1 >> 20) & 0x3ff) >> 2;
+ v2 = ((a2 >> 0) & 0x3ff) >> 2;
+ y3 = ((a2 >> 10) & 0x3ff) >> 2;
+
+ u4 = ((a2 >> 20) & 0x3ff) >> 2;
+ y4 = ((a3 >> 0) & 0x3ff) >> 2;
+ v4 = ((a3 >> 10) & 0x3ff) >> 2;
+ y5 = ((a3 >> 20) & 0x3ff) >> 2;
+
+ d[2 * j + 1] = y0;
+ d[2 * j] = u0;
+ d[2 * j + 2] = v0;
+
+ if (j < task->width - 1) {
+ d[2 * j + 3] = y1;
+ }
+
+ if (j < task->width - 2) {
+ d[2 * j + 5] = y2;
+ d[2 * j + 4] = u2;
+ d[2 * j + 6] = v2;
+ }
+
+ if (j < task->width - 3) {
+ d[2 * j + 7] = y3;
+ }
+
+ if (j < task->width - 4) {
+ d[2 * j + 9] = y4;
+ d[2 * j + 8] = u4;
+ d[2 * j + 10] = v4;
+ }
+
+ if (j < task->width - 5) {
+ d[2 * j + 11] = y5;
+ }
+ }
+ }
+ }
+
+ static void
+ convert_v210_UYVY (GstVideoConverter * convert, const GstVideoFrame * src,
+ GstVideoFrame * dest)
+ {
+ gint width = convert->in_width;
+ gint height = convert->in_height;
+ guint8 *s, *d;
+ FConvertPlaneTask *tasks;
+ FConvertPlaneTask **tasks_p;
+ gint n_threads;
+ gint lines_per_thread;
+ gint i;
+
+ s = FRAME_GET_LINE (src, convert->in_y);
+ s += (GST_ROUND_UP_2 (convert->in_x) * 2);
+ d = FRAME_GET_LINE (dest, convert->out_y);
+ d += (GST_ROUND_UP_2 (convert->out_x) * 2);
+
+ n_threads = convert->conversion_runner->n_threads;
+ tasks = convert->tasks[0] =
+ g_renew (FConvertPlaneTask, convert->tasks[0], n_threads);
+ tasks_p = convert->tasks_p[0] =
+ g_renew (FConvertPlaneTask *, convert->tasks_p[0], n_threads);
+
+ lines_per_thread = (height + n_threads - 1) / n_threads;
+
+ for (i = 0; i < n_threads; i++) {
+ tasks[i].dstride = FRAME_GET_STRIDE (dest);
+ tasks[i].sstride = FRAME_GET_STRIDE (src);
+ tasks[i].d = d + i * lines_per_thread * tasks[i].dstride;
+ tasks[i].s = s + i * lines_per_thread * tasks[i].sstride;
+
+ tasks[i].width = width;
+ tasks[i].height = (i + 1) * lines_per_thread;
+ tasks[i].height = MIN (tasks[i].height, height);
+ tasks[i].height -= i * lines_per_thread;
+
+ tasks_p[i] = &tasks[i];
+ }
+
+ gst_parallelized_task_runner_run (convert->conversion_runner,
+ (GstParallelizedTaskFunc) convert_v210_UYVY_task, (gpointer) tasks_p);
+
+ convert_fill_border (convert, dest);
+ }
+
+ static void
+ convert_v210_YUY2_task (FConvertPlaneTask * task)
+ {
+ gint i, j;
+ guint8 *d;
+ const guint8 *s;
+ guint32 a0, a1, a2, a3;
+ guint16 y0, y1, y2, y3, y4, y5;
+ guint16 u0, u2, u4;
+ guint16 v0, v2, v4;
+
+ for (i = 0; i < task->height; i++) {
+ d = task->d + i * task->dstride;
+ s = task->s + i * task->sstride;
+
+ for (j = 0; j < task->width; j += 6) {
+ a0 = GST_READ_UINT32_LE (s + (j / 6) * 16 + 0);
+ a1 = GST_READ_UINT32_LE (s + (j / 6) * 16 + 4);
+ a2 = GST_READ_UINT32_LE (s + (j / 6) * 16 + 8);
+ a3 = GST_READ_UINT32_LE (s + (j / 6) * 16 + 12);
+
+ u0 = ((a0 >> 0) & 0x3ff) >> 2;
+ y0 = ((a0 >> 10) & 0x3ff) >> 2;
+ v0 = ((a0 >> 20) & 0x3ff) >> 2;
+ y1 = ((a1 >> 0) & 0x3ff) >> 2;
+
+ u2 = ((a1 >> 10) & 0x3ff) >> 2;
+ y2 = ((a1 >> 20) & 0x3ff) >> 2;
+ v2 = ((a2 >> 0) & 0x3ff) >> 2;
+ y3 = ((a2 >> 10) & 0x3ff) >> 2;
+
+ u4 = ((a2 >> 20) & 0x3ff) >> 2;
+ y4 = ((a3 >> 0) & 0x3ff) >> 2;
+ v4 = ((a3 >> 10) & 0x3ff) >> 2;
+ y5 = ((a3 >> 20) & 0x3ff) >> 2;
+
+ d[2 * j] = y0;
+ d[2 * j + 1] = u0;
+ d[2 * j + 3] = v0;
+
+ if (j < task->width - 1) {
+ d[2 * j + 2] = y1;
+ }
+
+ if (j < task->width - 2) {
+ d[2 * j + 4] = y2;
+ d[2 * j + 5] = u2;
+ d[2 * j + 7] = v2;
+ }
+
+ if (j < task->width - 3) {
+ d[2 * j + 6] = y3;
+ }
+
+ if (j < task->width - 4) {
+ d[2 * j + 8] = y4;
+ d[2 * j + 9] = u4;
+ d[2 * j + 11] = v4;
+ }
+
+ if (j < task->width - 5) {
+ d[2 * j + 10] = y5;
+ }
+ }
+ }
+ }
+
+ static void
+ convert_v210_YUY2 (GstVideoConverter * convert, const GstVideoFrame * src,
+ GstVideoFrame * dest)
+ {
+ gint width = convert->in_width;
+ gint height = convert->in_height;
+ guint8 *s, *d;
+ FConvertPlaneTask *tasks;
+ FConvertPlaneTask **tasks_p;
+ gint n_threads;
+ gint lines_per_thread;
+ gint i;
+
+ s = FRAME_GET_LINE (src, convert->in_y);
+ s += (GST_ROUND_UP_2 (convert->in_x) * 2);
+ d = FRAME_GET_LINE (dest, convert->out_y);
+ d += (GST_ROUND_UP_2 (convert->out_x) * 2);
+
+ n_threads = convert->conversion_runner->n_threads;
+ tasks = convert->tasks[0] =
+ g_renew (FConvertPlaneTask, convert->tasks[0], n_threads);
+ tasks_p = convert->tasks_p[0] =
+ g_renew (FConvertPlaneTask *, convert->tasks_p[0], n_threads);
+
+ lines_per_thread = (height + n_threads - 1) / n_threads;
+
+ for (i = 0; i < n_threads; i++) {
+ tasks[i].dstride = FRAME_GET_STRIDE (dest);
+ tasks[i].sstride = FRAME_GET_STRIDE (src);
+ tasks[i].d = d + i * lines_per_thread * tasks[i].dstride;
+ tasks[i].s = s + i * lines_per_thread * tasks[i].sstride;
+
+ tasks[i].width = width;
+ tasks[i].height = (i + 1) * lines_per_thread;
+ tasks[i].height = MIN (tasks[i].height, height);
+ tasks[i].height -= i * lines_per_thread;
+
+ tasks_p[i] = &tasks[i];
+ }
+
+ gst_parallelized_task_runner_run (convert->conversion_runner,
+ (GstParallelizedTaskFunc) convert_v210_YUY2_task, (gpointer) tasks_p);
+
+ convert_fill_border (convert, dest);
+ }
+
+ static void
+ convert_UYVY_Y42B_task (FConvertPlaneTask * task)
+ {
+ video_orc_convert_UYVY_Y42B (task->d, task->dstride, task->du,
+ task->dustride, task->dv, task->dvstride,
+ task->s, task->sstride, (task->width + 1) / 2, task->height);
+ }
+
+ static void
+ convert_UYVY_Y42B (GstVideoConverter * convert, const GstVideoFrame * src,
+ GstVideoFrame * dest)
+ {
+ gint width = convert->in_width;
+ gint height = convert->in_height;
+ guint8 *s, *dy, *du, *dv;
+ FConvertPlaneTask *tasks;
+ FConvertPlaneTask **tasks_p;
+ gint n_threads;
+ gint lines_per_thread;
+ gint i;
+
+ s = FRAME_GET_LINE (src, convert->in_y);
+ s += (GST_ROUND_UP_2 (convert->in_x) * 2);
+
+ dy = FRAME_GET_Y_LINE (dest, convert->out_y);
+ dy += convert->out_x;
+ du = FRAME_GET_U_LINE (dest, convert->out_y);
+ du += convert->out_x >> 1;
+ dv = FRAME_GET_V_LINE (dest, convert->out_y);
+ dv += convert->out_x >> 1;
+
+ n_threads = convert->conversion_runner->n_threads;
+ tasks = convert->tasks[0] =
+ g_renew (FConvertPlaneTask, convert->tasks[0], n_threads);
+ tasks_p = convert->tasks_p[0] =
+ g_renew (FConvertPlaneTask *, convert->tasks_p[0], n_threads);
+
+ lines_per_thread = (height + n_threads - 1) / n_threads;
+
+ for (i = 0; i < n_threads; i++) {
+ tasks[i].dstride = FRAME_GET_Y_STRIDE (dest);
+ tasks[i].dustride = FRAME_GET_U_STRIDE (dest);
+ tasks[i].dvstride = FRAME_GET_V_STRIDE (dest);
+ tasks[i].sstride = FRAME_GET_STRIDE (src);
+ tasks[i].d = dy + i * lines_per_thread * tasks[i].dstride;
+ tasks[i].du = du + i * lines_per_thread * tasks[i].dustride;
+ tasks[i].dv = dv + i * lines_per_thread * tasks[i].dvstride;
+ tasks[i].s = s + i * lines_per_thread * tasks[i].sstride;
+
+ tasks[i].width = width;
+ tasks[i].height = (i + 1) * lines_per_thread;
+ tasks[i].height = MIN (tasks[i].height, height);
+ tasks[i].height -= i * lines_per_thread;
+
+ tasks_p[i] = &tasks[i];
+ }
+
+ gst_parallelized_task_runner_run (convert->conversion_runner,
+ (GstParallelizedTaskFunc) convert_UYVY_Y42B_task, (gpointer) tasks_p);
+
+ convert_fill_border (convert, dest);
+ }
+
+ static void
+ convert_UYVY_Y444_task (FConvertPlaneTask * task)
+ {
+ video_orc_convert_UYVY_Y444 (task->d,
+ task->dstride, task->du,
+ task->dustride, task->dv,
+ task->dvstride, task->s,
+ task->sstride, (task->width + 1) / 2, task->height);
+ }
+
+ static void
+ convert_UYVY_Y444 (GstVideoConverter * convert, const GstVideoFrame * src,
+ GstVideoFrame * dest)
+ {
+ gint width = convert->in_width;
+ gint height = convert->in_height;
+ guint8 *s, *dy, *du, *dv;
+ FConvertPlaneTask *tasks;
+ FConvertPlaneTask **tasks_p;
+ gint n_threads;
+ gint lines_per_thread;
+ gint i;
+
+ s = FRAME_GET_LINE (src, convert->in_y);
+ s += (GST_ROUND_UP_2 (convert->in_x) * 2);
+
+ dy = FRAME_GET_Y_LINE (dest, convert->out_y);
+ dy += convert->out_x;
+ du = FRAME_GET_U_LINE (dest, convert->out_y);
+ du += convert->out_x;
+ dv = FRAME_GET_V_LINE (dest, convert->out_y);
+ dv += convert->out_x;
+
+ n_threads = convert->conversion_runner->n_threads;
+ tasks = convert->tasks[0] =
+ g_renew (FConvertPlaneTask, convert->tasks[0], n_threads);
+ tasks_p = convert->tasks_p[0] =
+ g_renew (FConvertPlaneTask *, convert->tasks_p[0], n_threads);
+
+ lines_per_thread = (height + n_threads - 1) / n_threads;
+
+ for (i = 0; i < n_threads; i++) {
+ tasks[i].dstride = FRAME_GET_Y_STRIDE (dest);
+ tasks[i].dustride = FRAME_GET_U_STRIDE (dest);
+ tasks[i].dvstride = FRAME_GET_V_STRIDE (dest);
+ tasks[i].sstride = FRAME_GET_STRIDE (src);
+ tasks[i].d = dy + i * lines_per_thread * tasks[i].dstride;
+ tasks[i].du = du + i * lines_per_thread * tasks[i].dustride;
+ tasks[i].dv = dv + i * lines_per_thread * tasks[i].dvstride;
+ tasks[i].s = s + i * lines_per_thread * tasks[i].sstride;
+
+ tasks[i].width = width;
+ tasks[i].height = (i + 1) * lines_per_thread;
+ tasks[i].height = MIN (tasks[i].height, height);
+ tasks[i].height -= i * lines_per_thread;
+
+ tasks_p[i] = &tasks[i];
+ }
+
+ gst_parallelized_task_runner_run (convert->conversion_runner,
+ (GstParallelizedTaskFunc) convert_UYVY_Y444_task, (gpointer) tasks_p);
+
+ convert_fill_border (convert, dest);
+ }
+
+ static void
+ convert_UYVY_GRAY8_task (FConvertPlaneTask * task)
+ {
+ video_orc_convert_UYVY_GRAY8 (task->d, task->dstride, (guint16 *) task->s,
+ task->sstride, task->width, task->height);
+ }
+
+ static void
+ convert_UYVY_GRAY8 (GstVideoConverter * convert, const GstVideoFrame * src,
+ GstVideoFrame * dest)
+ {
+ gint width = convert->in_width;
+ gint height = convert->in_height;
+ guint8 *s;
+ guint8 *d;
+ FConvertPlaneTask *tasks;
+ FConvertPlaneTask **tasks_p;
+ gint n_threads;
+ gint lines_per_thread;
+ gint i;
+
+ s = GST_VIDEO_FRAME_PLANE_DATA (src, 0);
+ d = GST_VIDEO_FRAME_PLANE_DATA (dest, 0);
+
+ n_threads = convert->conversion_runner->n_threads;
+ tasks = convert->tasks[0] =
+ g_renew (FConvertPlaneTask, convert->tasks[0], n_threads);
+ tasks_p = convert->tasks_p[0] =
+ g_renew (FConvertPlaneTask *, convert->tasks_p[0], n_threads);
+
+ lines_per_thread = (height + n_threads - 1) / n_threads;
+
+ for (i = 0; i < n_threads; i++) {
+ tasks[i].dstride = FRAME_GET_STRIDE (dest);
+ tasks[i].sstride = FRAME_GET_STRIDE (src);
+ tasks[i].d = d + i * lines_per_thread * tasks[i].dstride;
+ tasks[i].s = s + i * lines_per_thread * tasks[i].sstride;
+
+ tasks[i].width = width;
+ tasks[i].height = (i + 1) * lines_per_thread;
+ tasks[i].height = MIN (tasks[i].height, height);
+ tasks[i].height -= i * lines_per_thread;
+
+ tasks_p[i] = &tasks[i];
+ }
+
+ gst_parallelized_task_runner_run (convert->conversion_runner,
+ (GstParallelizedTaskFunc) convert_UYVY_GRAY8_task, (gpointer) tasks_p);
+
+ convert_fill_border (convert, dest);
+ }
+
+ static void
+ convert_AYUV_I420_task (FConvertPlaneTask * task)
+ {
+ video_orc_convert_AYUV_I420 (task->d,
+ 2 * task->dstride, task->d2,
+ 2 * task->dstride, task->du,
+ task->dustride, task->dv,
+ task->dvstride, task->s,
+ 2 * task->sstride, task->s2,
+ 2 * task->sstride, task->width / 2, task->height / 2);
+ }
+
+ static void
+ convert_AYUV_I420 (GstVideoConverter * convert, const GstVideoFrame * src,
+ GstVideoFrame * dest)
+ {
+ gint width = convert->in_width;
+ gint height = convert->in_height;
+ guint8 *s1, *s2, *dy1, *dy2, *du, *dv;
+ FConvertPlaneTask *tasks;
+ FConvertPlaneTask **tasks_p;
+ gint n_threads;
+ gint lines_per_thread;
+ gint i;
+
+ s1 = FRAME_GET_LINE (src, convert->in_y + 0);
+ s1 += convert->in_x * 4;
+ s2 = FRAME_GET_LINE (src, convert->in_y + 1);
+ s2 += convert->in_x * 4;
+
+ dy1 = FRAME_GET_Y_LINE (dest, convert->out_y + 0);
+ dy1 += convert->out_x;
+ dy2 = FRAME_GET_Y_LINE (dest, convert->out_y + 1);
+ dy2 += convert->out_x;
+ du = FRAME_GET_U_LINE (dest, convert->out_y >> 1);
+ du += convert->out_x >> 1;
+ dv = FRAME_GET_V_LINE (dest, convert->out_y >> 1);
+ dv += convert->out_x >> 1;
+
+ /* only for even width/height */
+
+ n_threads = convert->conversion_runner->n_threads;
+ tasks = convert->tasks[0] =
+ g_renew (FConvertPlaneTask, convert->tasks[0], n_threads);
+ tasks_p = convert->tasks_p[0] =
+ g_renew (FConvertPlaneTask *, convert->tasks_p[0], n_threads);
+
+ lines_per_thread = GST_ROUND_UP_2 ((height + n_threads - 1) / n_threads);
+
+ for (i = 0; i < n_threads; i++) {
+ tasks[i].dstride = FRAME_GET_Y_STRIDE (dest);
+ tasks[i].dustride = FRAME_GET_U_STRIDE (dest);
+ tasks[i].dvstride = FRAME_GET_V_STRIDE (dest);
+ tasks[i].sstride = FRAME_GET_STRIDE (src);
+ tasks[i].d = dy1 + i * lines_per_thread * tasks[i].dstride;
+ tasks[i].d2 = dy2 + i * lines_per_thread * tasks[i].dstride;
+ tasks[i].du = du + i * lines_per_thread * tasks[i].dustride / 2;
+ tasks[i].dv = dv + i * lines_per_thread * tasks[i].dvstride / 2;
+ tasks[i].s = s1 + i * lines_per_thread * tasks[i].sstride;
+ tasks[i].s2 = s2 + i * lines_per_thread * tasks[i].sstride;
+
+ tasks[i].width = width;
+ tasks[i].height = (i + 1) * lines_per_thread;
+ tasks[i].height = MIN (tasks[i].height, height);
+ tasks[i].height -= i * lines_per_thread;
+
+ tasks_p[i] = &tasks[i];
+ }
+
+ gst_parallelized_task_runner_run (convert->conversion_runner,
+ (GstParallelizedTaskFunc) convert_AYUV_I420_task, (gpointer) tasks_p);
+
+ convert_fill_border (convert, dest);
+ }
+
+ static void
+ convert_AYUV_YUY2_task (FConvertPlaneTask * task)
+ {
+ video_orc_convert_AYUV_YUY2 (task->d, task->dstride, task->s,
+ task->sstride, task->width / 2, task->height);
+ }
+
+ static void
+ convert_AYUV_YUY2 (GstVideoConverter * convert, const GstVideoFrame * src,
+ GstVideoFrame * dest)
+ {
+ gint width = convert->in_width;
+ gint height = convert->in_height;
+ guint8 *s, *d;
+ FConvertPlaneTask *tasks;
+ FConvertPlaneTask **tasks_p;
+ gint n_threads;
+ gint lines_per_thread;
+ gint i;
+
+ s = FRAME_GET_LINE (src, convert->in_y);
+ s += convert->in_x * 4;
+ d = FRAME_GET_LINE (dest, convert->out_y);
+ d += (GST_ROUND_UP_2 (convert->out_x) * 2);
+
+ /* only for even width */
+ n_threads = convert->conversion_runner->n_threads;
+ tasks = convert->tasks[0] =
+ g_renew (FConvertPlaneTask, convert->tasks[0], n_threads);
+ tasks_p = convert->tasks_p[0] =
+ g_renew (FConvertPlaneTask *, convert->tasks_p[0], n_threads);
+
+ lines_per_thread = (height + n_threads - 1) / n_threads;
+
+ for (i = 0; i < n_threads; i++) {
+ tasks[i].dstride = FRAME_GET_STRIDE (dest);
+ tasks[i].sstride = FRAME_GET_STRIDE (src);
+ tasks[i].d = d + i * lines_per_thread * tasks[i].dstride;
+ tasks[i].s = s + i * lines_per_thread * tasks[i].sstride;
+
+ tasks[i].width = width;
+ tasks[i].height = (i + 1) * lines_per_thread;
+ tasks[i].height = MIN (tasks[i].height, height);
+ tasks[i].height -= i * lines_per_thread;
+
+ tasks_p[i] = &tasks[i];
+ }
+
+ gst_parallelized_task_runner_run (convert->conversion_runner,
+ (GstParallelizedTaskFunc) convert_AYUV_YUY2_task, (gpointer) tasks_p);
+
+ convert_fill_border (convert, dest);
+ }
+
+ static void
+ convert_AYUV_UYVY_task (FConvertPlaneTask * task)
+ {
+ video_orc_convert_AYUV_UYVY (task->d, task->dstride, task->s,
+ task->sstride, task->width / 2, task->height);
+ }
+
+ static void
+ convert_AYUV_UYVY (GstVideoConverter * convert, const GstVideoFrame * src,
+ GstVideoFrame * dest)
+ {
+ gint width = convert->in_width;
+ gint height = convert->in_height;
+ guint8 *s, *d;
+ FConvertPlaneTask *tasks;
+ FConvertPlaneTask **tasks_p;
+ gint n_threads;
+ gint lines_per_thread;
+ gint i;
+
+ s = FRAME_GET_LINE (src, convert->in_y);
+ s += convert->in_x * 4;
+ d = FRAME_GET_LINE (dest, convert->out_y);
+ d += (GST_ROUND_UP_2 (convert->out_x) * 2);
+
+ /* only for even width */
+ n_threads = convert->conversion_runner->n_threads;
+ tasks = convert->tasks[0] =
+ g_renew (FConvertPlaneTask, convert->tasks[0], n_threads);
+ tasks_p = convert->tasks_p[0] =
+ g_renew (FConvertPlaneTask *, convert->tasks_p[0], n_threads);
+
+ lines_per_thread = (height + n_threads - 1) / n_threads;
+
+ for (i = 0; i < n_threads; i++) {
+ tasks[i].dstride = FRAME_GET_STRIDE (dest);
+ tasks[i].sstride = FRAME_GET_STRIDE (src);
+ tasks[i].d = d + i * lines_per_thread * tasks[i].dstride;
+ tasks[i].s = s + i * lines_per_thread * tasks[i].sstride;
+
+ tasks[i].width = width;
+ tasks[i].height = (i + 1) * lines_per_thread;
+ tasks[i].height = MIN (tasks[i].height, height);
+ tasks[i].height -= i * lines_per_thread;
+
+ tasks_p[i] = &tasks[i];
+ }
+
+ gst_parallelized_task_runner_run (convert->conversion_runner,
+ (GstParallelizedTaskFunc) convert_AYUV_UYVY_task, (gpointer) tasks_p);
+
+ convert_fill_border (convert, dest);
+ }
+
+ static void
+ convert_AYUV_Y42B_task (FConvertPlaneTask * task)
+ {
+ video_orc_convert_AYUV_Y42B (task->d, task->dstride, task->du,
+ task->dustride, task->dv, task->dvstride,
+ task->s, task->sstride, task->width / 2, task->height);
+ }
+
+ static void
+ convert_AYUV_Y42B (GstVideoConverter * convert, const GstVideoFrame * src,
+ GstVideoFrame * dest)
+ {
+ gint width = convert->in_width;
+ gint height = convert->in_height;
+ guint8 *s, *dy, *du, *dv;
+ FConvertPlaneTask *tasks;
+ FConvertPlaneTask **tasks_p;
+ gint n_threads;
+ gint lines_per_thread;
+ gint i;
+
+ s = FRAME_GET_LINE (src, convert->in_y);
+ s += convert->in_x * 4;
+
+ dy = FRAME_GET_Y_LINE (dest, convert->out_y);
+ dy += convert->out_x;
+ du = FRAME_GET_U_LINE (dest, convert->out_y);
+ du += convert->out_x >> 1;
+ dv = FRAME_GET_V_LINE (dest, convert->out_y);
+ dv += convert->out_x >> 1;
+
+ /* only works for even width */
+ n_threads = convert->conversion_runner->n_threads;
+ tasks = convert->tasks[0] =
+ g_renew (FConvertPlaneTask, convert->tasks[0], n_threads);
+ tasks_p = convert->tasks_p[0] =
+ g_renew (FConvertPlaneTask *, convert->tasks_p[0], n_threads);
+
+ lines_per_thread = (height + n_threads - 1) / n_threads;
+
+ for (i = 0; i < n_threads; i++) {
+ tasks[i].dstride = FRAME_GET_Y_STRIDE (dest);
+ tasks[i].dustride = FRAME_GET_U_STRIDE (dest);
+ tasks[i].dvstride = FRAME_GET_V_STRIDE (dest);
+ tasks[i].sstride = FRAME_GET_STRIDE (src);
+ tasks[i].d = dy + i * lines_per_thread * tasks[i].dstride;
+ tasks[i].du = du + i * lines_per_thread * tasks[i].dustride;
+ tasks[i].dv = dv + i * lines_per_thread * tasks[i].dvstride;
+ tasks[i].s = s + i * lines_per_thread * tasks[i].sstride;
+
+ tasks[i].width = width;
+ tasks[i].height = (i + 1) * lines_per_thread;
+ tasks[i].height = MIN (tasks[i].height, height);
+ tasks[i].height -= i * lines_per_thread;
+
+ tasks_p[i] = &tasks[i];
+ }
+
+ gst_parallelized_task_runner_run (convert->conversion_runner,
+ (GstParallelizedTaskFunc) convert_AYUV_Y42B_task, (gpointer) tasks_p);
+
+ convert_fill_border (convert, dest);
+ }
+
+ static void
+ convert_AYUV_Y444_task (FConvertPlaneTask * task)
+ {
+ video_orc_convert_AYUV_Y444 (task->d, task->dstride, task->du,
+ task->dustride, task->dv, task->dvstride,
+ task->s, task->sstride, task->width, task->height);
+ }
+
+ static void
+ convert_AYUV_Y444 (GstVideoConverter * convert, const GstVideoFrame * src,
+ GstVideoFrame * dest)
+ {
+ gint width = convert->in_width;
+ gint height = convert->in_height;
+ guint8 *s, *dy, *du, *dv;
+ FConvertPlaneTask *tasks;
+ FConvertPlaneTask **tasks_p;
+ gint n_threads;
+ gint lines_per_thread;
+ gint i;
+
+ s = FRAME_GET_LINE (src, convert->in_y);
+ s += convert->in_x * 4;
+
+ dy = FRAME_GET_Y_LINE (dest, convert->out_y);
+ dy += convert->out_x;
+ du = FRAME_GET_U_LINE (dest, convert->out_y);
+ du += convert->out_x;
+ dv = FRAME_GET_V_LINE (dest, convert->out_y);
+ dv += convert->out_x;
+
+ n_threads = convert->conversion_runner->n_threads;
+ tasks = convert->tasks[0] =
+ g_renew (FConvertPlaneTask, convert->tasks[0], n_threads);
+ tasks_p = convert->tasks_p[0] =
+ g_renew (FConvertPlaneTask *, convert->tasks_p[0], n_threads);
+
+ lines_per_thread = (height + n_threads - 1) / n_threads;
+
+ for (i = 0; i < n_threads; i++) {
+ tasks[i].dstride = FRAME_GET_Y_STRIDE (dest);
+ tasks[i].dustride = FRAME_GET_U_STRIDE (dest);
+ tasks[i].dvstride = FRAME_GET_V_STRIDE (dest);
+ tasks[i].sstride = FRAME_GET_STRIDE (src);
+ tasks[i].d = dy + i * lines_per_thread * tasks[i].dstride;
+ tasks[i].du = du + i * lines_per_thread * tasks[i].dustride;
+ tasks[i].dv = dv + i * lines_per_thread * tasks[i].dvstride;
+ tasks[i].s = s + i * lines_per_thread * tasks[i].sstride;
+
+ tasks[i].width = width;
+ tasks[i].height = (i + 1) * lines_per_thread;
+ tasks[i].height = MIN (tasks[i].height, height);
+ tasks[i].height -= i * lines_per_thread;
+
+ tasks_p[i] = &tasks[i];
+ }
+
+ gst_parallelized_task_runner_run (convert->conversion_runner,
+ (GstParallelizedTaskFunc) convert_AYUV_Y444_task, (gpointer) tasks_p);
+ convert_fill_border (convert, dest);
+ }
+
+ static void
+ convert_Y42B_YUY2_task (FConvertPlaneTask * task)
+ {
+ video_orc_convert_Y42B_YUY2 (task->d, task->dstride,
+ task->s, task->sstride,
+ task->su, task->sustride,
+ task->sv, task->svstride, (task->width + 1) / 2, task->height);
+ }
+
+ static void
+ convert_Y42B_YUY2 (GstVideoConverter * convert, const GstVideoFrame * src,
+ GstVideoFrame * dest)
+ {
+ gint width = convert->in_width;
+ gint height = convert->in_height;
+ guint8 *sy, *su, *sv, *d;
+ FConvertPlaneTask *tasks;
+ FConvertPlaneTask **tasks_p;
+ gint n_threads;
+ gint lines_per_thread;
+ gint i;
+
+ sy = FRAME_GET_Y_LINE (src, convert->in_y);
+ sy += convert->in_x;
+ su = FRAME_GET_U_LINE (src, convert->in_y);
+ su += convert->in_x >> 1;
+ sv = FRAME_GET_V_LINE (src, convert->in_y);
+ sv += convert->in_x >> 1;
+
+ d = FRAME_GET_LINE (dest, convert->out_y);
+ d += (GST_ROUND_UP_2 (convert->out_x) * 2);
+
+ n_threads = convert->conversion_runner->n_threads;
+ tasks = convert->tasks[0] =
+ g_renew (FConvertPlaneTask, convert->tasks[0], n_threads);
+ tasks_p = convert->tasks_p[0] =
+ g_renew (FConvertPlaneTask *, convert->tasks_p[0], n_threads);
+
+ lines_per_thread = (height + n_threads - 1) / n_threads;
+
+ for (i = 0; i < n_threads; i++) {
+ tasks[i].dstride = FRAME_GET_STRIDE (dest);
+ tasks[i].sstride = FRAME_GET_Y_STRIDE (src);
+ tasks[i].sustride = FRAME_GET_U_STRIDE (src);
+ tasks[i].svstride = FRAME_GET_V_STRIDE (src);
+ tasks[i].d = d + i * lines_per_thread * tasks[i].dstride;
+ tasks[i].s = sy + i * lines_per_thread * tasks[i].sstride;
+ tasks[i].su = su + i * lines_per_thread * tasks[i].sustride;
+ tasks[i].sv = sv + i * lines_per_thread * tasks[i].svstride;
+
+ tasks[i].width = width;
+ tasks[i].height = (i + 1) * lines_per_thread;
+ tasks[i].height = MIN (tasks[i].height, height);
+ tasks[i].height -= i * lines_per_thread;
+
+ tasks_p[i] = &tasks[i];
+ }
+
+ gst_parallelized_task_runner_run (convert->conversion_runner,
+ (GstParallelizedTaskFunc) convert_Y42B_YUY2_task, (gpointer) tasks_p);
+
+ convert_fill_border (convert, dest);
+ }
+
+ static void
+ convert_Y42B_UYVY_task (FConvertPlaneTask * task)
+ {
+ video_orc_convert_Y42B_UYVY (task->d, task->dstride,
+ task->s, task->sstride,
+ task->su, task->sustride,
+ task->sv, task->svstride, (task->width + 1) / 2, task->height);
+ }
+
+ static void
+ convert_Y42B_UYVY (GstVideoConverter * convert, const GstVideoFrame * src,
+ GstVideoFrame * dest)
+ {
+ gint width = convert->in_width;
+ gint height = convert->in_height;
+ guint8 *sy, *su, *sv, *d;
+ FConvertPlaneTask *tasks;
+ FConvertPlaneTask **tasks_p;
+ gint n_threads;
+ gint lines_per_thread;
+ gint i;
+
+ sy = FRAME_GET_Y_LINE (src, convert->in_y);
+ sy += convert->in_x;
+ su = FRAME_GET_U_LINE (src, convert->in_y);
+ su += convert->in_x >> 1;
+ sv = FRAME_GET_V_LINE (src, convert->in_y);
+ sv += convert->in_x >> 1;
+
+ d = FRAME_GET_LINE (dest, convert->out_y);
+ d += (GST_ROUND_UP_2 (convert->out_x) * 2);
+
+ n_threads = convert->conversion_runner->n_threads;
+ tasks = convert->tasks[0] =
+ g_renew (FConvertPlaneTask, convert->tasks[0], n_threads);
+ tasks_p = convert->tasks_p[0] =
+ g_renew (FConvertPlaneTask *, convert->tasks_p[0], n_threads);
+
+ lines_per_thread = (height + n_threads - 1) / n_threads;
+
+ for (i = 0; i < n_threads; i++) {
+ tasks[i].dstride = FRAME_GET_STRIDE (dest);
+ tasks[i].sstride = FRAME_GET_Y_STRIDE (src);
+ tasks[i].sustride = FRAME_GET_U_STRIDE (src);
+ tasks[i].svstride = FRAME_GET_V_STRIDE (src);
+ tasks[i].d = d + i * lines_per_thread * tasks[i].dstride;
+ tasks[i].s = sy + i * lines_per_thread * tasks[i].sstride;
+ tasks[i].su = su + i * lines_per_thread * tasks[i].sustride;
+ tasks[i].sv = sv + i * lines_per_thread * tasks[i].svstride;
+
+ tasks[i].width = width;
+ tasks[i].height = (i + 1) * lines_per_thread;
+ tasks[i].height = MIN (tasks[i].height, height);
+ tasks[i].height -= i * lines_per_thread;
+
+ tasks_p[i] = &tasks[i];
+ }
+
+ gst_parallelized_task_runner_run (convert->conversion_runner,
+ (GstParallelizedTaskFunc) convert_Y42B_UYVY_task, (gpointer) tasks_p);
+
+ convert_fill_border (convert, dest);
+ }
+
+ static void
+ convert_Y42B_AYUV_task (FConvertPlaneTask * task)
+ {
+ video_orc_convert_Y42B_AYUV (task->d, task->dstride, task->s,
+ task->sstride,
+ task->su,
+ task->sustride,
+ task->sv, task->svstride, task->alpha, task->width / 2, task->height);
+ }
+
+ static void
+ convert_Y42B_AYUV (GstVideoConverter * convert, const GstVideoFrame * src,
+ GstVideoFrame * dest)
+ {
+ gint width = convert->in_width;
+ gint height = convert->in_height;
+ guint8 *sy, *su, *sv, *d;
+ guint8 alpha = MIN (convert->alpha_value, 255);
+ FConvertPlaneTask *tasks;
+ FConvertPlaneTask **tasks_p;
+ gint n_threads;
+ gint lines_per_thread;
+ gint i;
+
+ sy = FRAME_GET_Y_LINE (src, convert->in_y);
+ sy += convert->in_x;
+ su = FRAME_GET_U_LINE (src, convert->in_y);
+ su += convert->in_x >> 1;
+ sv = FRAME_GET_V_LINE (src, convert->in_y);
+ sv += convert->in_x >> 1;
+
+ d = FRAME_GET_LINE (dest, convert->out_y);
+ d += convert->out_x * 4;
+
+ /* only for even width */
+ n_threads = convert->conversion_runner->n_threads;
+ tasks = convert->tasks[0] =
+ g_renew (FConvertPlaneTask, convert->tasks[0], n_threads);
+ tasks_p = convert->tasks_p[0] =
+ g_renew (FConvertPlaneTask *, convert->tasks_p[0], n_threads);
+
+ lines_per_thread = (height + n_threads - 1) / n_threads;
+
+ for (i = 0; i < n_threads; i++) {
+ tasks[i].dstride = FRAME_GET_STRIDE (dest);
+ tasks[i].sstride = FRAME_GET_Y_STRIDE (src);
+ tasks[i].sustride = FRAME_GET_U_STRIDE (src);
+ tasks[i].svstride = FRAME_GET_V_STRIDE (src);
+ tasks[i].d = d + i * lines_per_thread * tasks[i].dstride;
+ tasks[i].s = sy + i * lines_per_thread * tasks[i].sstride;
+ tasks[i].su = su + i * lines_per_thread * tasks[i].sustride;
+ tasks[i].sv = sv + i * lines_per_thread * tasks[i].svstride;
+
+ tasks[i].width = width;
+ tasks[i].height = (i + 1) * lines_per_thread;
+ tasks[i].height = MIN (tasks[i].height, height);
+ tasks[i].height -= i * lines_per_thread;
+ tasks[i].alpha = alpha;
+
+ tasks_p[i] = &tasks[i];
+ }
+
+ gst_parallelized_task_runner_run (convert->conversion_runner,
+ (GstParallelizedTaskFunc) convert_Y42B_AYUV_task, (gpointer) tasks_p);
+
+ convert_fill_border (convert, dest);
+ }
+
+ static void
+ convert_Y444_YUY2_task (FConvertPlaneTask * task)
+ {
+ video_orc_convert_Y444_YUY2 (task->d, task->dstride, task->s,
+ task->sstride,
+ task->su,
+ task->sustride, task->sv, task->svstride, task->width / 2, task->height);
+ }
+
+ static void
+ convert_Y444_YUY2 (GstVideoConverter * convert, const GstVideoFrame * src,
+ GstVideoFrame * dest)
+ {
+ gint width = convert->in_width;
+ gint height = convert->in_height;
+ guint8 *sy, *su, *sv, *d;
+ FConvertPlaneTask *tasks;
+ FConvertPlaneTask **tasks_p;
+ gint n_threads;
+ gint lines_per_thread;
+ gint i;
+
+ sy = FRAME_GET_Y_LINE (src, convert->in_y);
+ sy += convert->in_x;
+ su = FRAME_GET_U_LINE (src, convert->in_y);
+ su += convert->in_x;
+ sv = FRAME_GET_V_LINE (src, convert->in_y);
+ sv += convert->in_x;
+
+ d = FRAME_GET_LINE (dest, convert->out_y);
+ d += (GST_ROUND_UP_2 (convert->out_x) * 2);
+
+ n_threads = convert->conversion_runner->n_threads;
+ tasks = convert->tasks[0] =
+ g_renew (FConvertPlaneTask, convert->tasks[0], n_threads);
+ tasks_p = convert->tasks_p[0] =
+ g_renew (FConvertPlaneTask *, convert->tasks_p[0], n_threads);
+
+ lines_per_thread = (height + n_threads - 1) / n_threads;
+
+ for (i = 0; i < n_threads; i++) {
+ tasks[i].dstride = FRAME_GET_STRIDE (dest);
+ tasks[i].sstride = FRAME_GET_Y_STRIDE (src);
+ tasks[i].sustride = FRAME_GET_U_STRIDE (src);
+ tasks[i].svstride = FRAME_GET_V_STRIDE (src);
+ tasks[i].d = d + i * lines_per_thread * tasks[i].dstride;
+ tasks[i].s = sy + i * lines_per_thread * tasks[i].sstride;
+ tasks[i].su = su + i * lines_per_thread * tasks[i].sustride;
+ tasks[i].sv = sv + i * lines_per_thread * tasks[i].svstride;
+
+ tasks[i].width = width;
+ tasks[i].height = (i + 1) * lines_per_thread;
+ tasks[i].height = MIN (tasks[i].height, height);
+ tasks[i].height -= i * lines_per_thread;
+
+ tasks_p[i] = &tasks[i];
+ }
+
+ gst_parallelized_task_runner_run (convert->conversion_runner,
+ (GstParallelizedTaskFunc) convert_Y444_YUY2_task, (gpointer) tasks_p);
+
+ convert_fill_border (convert, dest);
+ }
+
+ static void
+ convert_Y444_UYVY_task (FConvertPlaneTask * task)
+ {
+ video_orc_convert_Y444_UYVY (task->d, task->dstride, task->s,
+ task->sstride,
+ task->su,
+ task->sustride, task->sv, task->svstride, task->width / 2, task->height);
+ }
+
+ static void
+ convert_Y444_UYVY (GstVideoConverter * convert, const GstVideoFrame * src,
+ GstVideoFrame * dest)
+ {
+ gint width = convert->in_width;
+ gint height = convert->in_height;
+ guint8 *sy, *su, *sv, *d;
+ FConvertPlaneTask *tasks;
+ FConvertPlaneTask **tasks_p;
+ gint n_threads;
+ gint lines_per_thread;
+ gint i;
+
+ sy = FRAME_GET_Y_LINE (src, convert->in_y);
+ sy += convert->in_x;
+ su = FRAME_GET_U_LINE (src, convert->in_y);
+ su += convert->in_x;
+ sv = FRAME_GET_V_LINE (src, convert->in_y);
+ sv += convert->in_x;
+
+ d = FRAME_GET_LINE (dest, convert->out_y);
+ d += (GST_ROUND_UP_2 (convert->out_x) * 2);
+
+ n_threads = convert->conversion_runner->n_threads;
+ tasks = convert->tasks[0] =
+ g_renew (FConvertPlaneTask, convert->tasks[0], n_threads);
+ tasks_p = convert->tasks_p[0] =
+ g_renew (FConvertPlaneTask *, convert->tasks_p[0], n_threads);
+
+ lines_per_thread = (height + n_threads - 1) / n_threads;
+
+ for (i = 0; i < n_threads; i++) {
+ tasks[i].dstride = FRAME_GET_STRIDE (dest);
+ tasks[i].sstride = FRAME_GET_Y_STRIDE (src);
+ tasks[i].sustride = FRAME_GET_U_STRIDE (src);
+ tasks[i].svstride = FRAME_GET_V_STRIDE (src);
+ tasks[i].d = d + i * lines_per_thread * tasks[i].dstride;
+ tasks[i].s = sy + i * lines_per_thread * tasks[i].sstride;
+ tasks[i].su = su + i * lines_per_thread * tasks[i].sustride;
+ tasks[i].sv = sv + i * lines_per_thread * tasks[i].svstride;
+
+ tasks[i].width = width;
+ tasks[i].height = (i + 1) * lines_per_thread;
+ tasks[i].height = MIN (tasks[i].height, height);
+ tasks[i].height -= i * lines_per_thread;
+
+ tasks_p[i] = &tasks[i];
+ }
+
+ gst_parallelized_task_runner_run (convert->conversion_runner,
+ (GstParallelizedTaskFunc) convert_Y444_UYVY_task, (gpointer) tasks_p);
+
+ convert_fill_border (convert, dest);
+ }
+
+ static void
+ convert_Y444_AYUV_task (FConvertPlaneTask * task)
+ {
+ video_orc_convert_Y444_AYUV (task->d, task->dstride, task->s,
+ task->sstride,
+ task->su,
+ task->sustride,
+ task->sv, task->svstride, task->alpha, task->width, task->height);
+ }
+
+ static void
+ convert_Y444_AYUV (GstVideoConverter * convert, const GstVideoFrame * src,
+ GstVideoFrame * dest)
+ {
+ gint width = convert->in_width;
+ gint height = convert->in_height;
+ guint8 *sy, *su, *sv, *d;
+ guint8 alpha = MIN (convert->alpha_value, 255);
+ FConvertPlaneTask *tasks;
+ FConvertPlaneTask **tasks_p;
+ gint n_threads;
+ gint lines_per_thread;
+ gint i;
+
+ sy = FRAME_GET_Y_LINE (src, convert->in_y);
+ sy += convert->in_x;
+ su = FRAME_GET_U_LINE (src, convert->in_y);
+ su += convert->in_x;
+ sv = FRAME_GET_V_LINE (src, convert->in_y);
+ sv += convert->in_x;
+
+ d = FRAME_GET_LINE (dest, convert->out_y);
+ d += convert->out_x * 4;
+
+ n_threads = convert->conversion_runner->n_threads;
+ tasks = convert->tasks[0] =
+ g_renew (FConvertPlaneTask, convert->tasks[0], n_threads);
+ tasks_p = convert->tasks_p[0] =
+ g_renew (FConvertPlaneTask *, convert->tasks_p[0], n_threads);
+
+ lines_per_thread = (height + n_threads - 1) / n_threads;
+
+ for (i = 0; i < n_threads; i++) {
+ tasks[i].dstride = FRAME_GET_STRIDE (dest);
+ tasks[i].sstride = FRAME_GET_Y_STRIDE (src);
+ tasks[i].sustride = FRAME_GET_U_STRIDE (src);
+ tasks[i].svstride = FRAME_GET_V_STRIDE (src);
+ tasks[i].d = d + i * lines_per_thread * tasks[i].dstride;
+ tasks[i].s = sy + i * lines_per_thread * tasks[i].sstride;
+ tasks[i].su = su + i * lines_per_thread * tasks[i].sustride;
+ tasks[i].sv = sv + i * lines_per_thread * tasks[i].svstride;
+
+ tasks[i].width = width;
+ tasks[i].height = (i + 1) * lines_per_thread;
+ tasks[i].height = MIN (tasks[i].height, height);
+ tasks[i].height -= i * lines_per_thread;
+ tasks[i].alpha = alpha;
+
+ tasks_p[i] = &tasks[i];
+ }
+
+ gst_parallelized_task_runner_run (convert->conversion_runner,
+ (GstParallelizedTaskFunc) convert_Y444_AYUV_task, (gpointer) tasks_p);
+
+ convert_fill_border (convert, dest);
+ }
+
+ #if G_BYTE_ORDER == G_LITTLE_ENDIAN
+ static void
+ convert_AYUV_ARGB_task (FConvertPlaneTask * task)
+ {
+ video_orc_convert_AYUV_ARGB (task->d, task->dstride, task->s,
+ task->sstride, task->data->im[0][0], task->data->im[0][2],
+ task->data->im[2][1], task->data->im[1][1], task->data->im[1][2],
+ task->width, task->height);
+ }
+
+ static void
+ convert_AYUV_ARGB (GstVideoConverter * convert, const GstVideoFrame * src,
+ GstVideoFrame * dest)
+ {
+ gint width = convert->in_width;
+ gint height = convert->in_height;
+ MatrixData *data = &convert->convert_matrix;
+ guint8 *s, *d;
+ FConvertPlaneTask *tasks;
+ FConvertPlaneTask **tasks_p;
+ gint n_threads;
+ gint lines_per_thread;
+ gint i;
+
+ s = FRAME_GET_LINE (src, convert->in_y);
+ s += (convert->in_x * 4);
+ d = FRAME_GET_LINE (dest, convert->out_y);
+ d += (convert->out_x * 4);
+
+ n_threads = convert->conversion_runner->n_threads;
+ tasks = convert->tasks[0] =
+ g_renew (FConvertPlaneTask, convert->tasks[0], n_threads);
+ tasks_p = convert->tasks_p[0] =
+ g_renew (FConvertPlaneTask *, convert->tasks_p[0], n_threads);
+
+ lines_per_thread = (height + n_threads - 1) / n_threads;
+
+ for (i = 0; i < n_threads; i++) {
+ tasks[i].dstride = FRAME_GET_STRIDE (dest);
+ tasks[i].sstride = FRAME_GET_STRIDE (src);
+ tasks[i].d = d + i * lines_per_thread * tasks[i].dstride;
+ tasks[i].s = s + i * lines_per_thread * tasks[i].sstride;
+
+ tasks[i].width = width;
+ tasks[i].height = (i + 1) * lines_per_thread;
+ tasks[i].height = MIN (tasks[i].height, height);
+ tasks[i].height -= i * lines_per_thread;
+ tasks[i].data = data;
+
+ tasks_p[i] = &tasks[i];
+ }
+
+ gst_parallelized_task_runner_run (convert->conversion_runner,
+ (GstParallelizedTaskFunc) convert_AYUV_ARGB_task, (gpointer) tasks_p);
+
+ convert_fill_border (convert, dest);
+ }
+
+ static void
+ convert_AYUV_BGRA_task (FConvertPlaneTask * task)
+ {
+ video_orc_convert_AYUV_BGRA (task->d, task->dstride, task->s,
+ task->sstride, task->data->im[0][0], task->data->im[0][2],
+ task->data->im[2][1], task->data->im[1][1], task->data->im[1][2],
+ task->width, task->height);
+ }
+
+ static void
+ convert_AYUV_BGRA (GstVideoConverter * convert, const GstVideoFrame * src,
+ GstVideoFrame * dest)
+ {
+ gint width = convert->in_width;
+ gint height = convert->in_height;
+ MatrixData *data = &convert->convert_matrix;
+ guint8 *s, *d;
+ FConvertPlaneTask *tasks;
+ FConvertPlaneTask **tasks_p;
+ gint n_threads;
+ gint lines_per_thread;
+ gint i;
+
+ s = FRAME_GET_LINE (src, convert->in_y);
+ s += (convert->in_x * 4);
+ d = FRAME_GET_LINE (dest, convert->out_y);
+ d += (convert->out_x * 4);
+
+ n_threads = convert->conversion_runner->n_threads;
+ tasks = convert->tasks[0] =
+ g_renew (FConvertPlaneTask, convert->tasks[0], n_threads);
+ tasks_p = convert->tasks_p[0] =
+ g_renew (FConvertPlaneTask *, convert->tasks_p[0], n_threads);
+
+ lines_per_thread = (height + n_threads - 1) / n_threads;
+
+ for (i = 0; i < n_threads; i++) {
+ tasks[i].dstride = FRAME_GET_STRIDE (dest);
+ tasks[i].sstride = FRAME_GET_STRIDE (src);
+ tasks[i].d = d + i * lines_per_thread * tasks[i].dstride;
+ tasks[i].s = s + i * lines_per_thread * tasks[i].sstride;
+
+ tasks[i].width = width;
+ tasks[i].height = (i + 1) * lines_per_thread;
+ tasks[i].height = MIN (tasks[i].height, height);
+ tasks[i].height -= i * lines_per_thread;
+ tasks[i].data = data;
+
+ tasks_p[i] = &tasks[i];
+ }
+
+ gst_parallelized_task_runner_run (convert->conversion_runner,
+ (GstParallelizedTaskFunc) convert_AYUV_BGRA_task, (gpointer) tasks_p);
+
+ convert_fill_border (convert, dest);
+ }
+
+ static void
+ convert_AYUV_ABGR_task (FConvertPlaneTask * task)
+ {
+ video_orc_convert_AYUV_ABGR (task->d, task->dstride, task->s,
+ task->sstride, task->data->im[0][0], task->data->im[0][2],
+ task->data->im[2][1], task->data->im[1][1], task->data->im[1][2],
+ task->width, task->height);
+ }
+
+ static void
+ convert_AYUV_ABGR (GstVideoConverter * convert, const GstVideoFrame * src,
+ GstVideoFrame * dest)
+ {
+ gint width = convert->in_width;
+ gint height = convert->in_height;
+ MatrixData *data = &convert->convert_matrix;
+ guint8 *s, *d;
+ FConvertPlaneTask *tasks;
+ FConvertPlaneTask **tasks_p;
+ gint n_threads;
+ gint lines_per_thread;
+ gint i;
+
+ s = FRAME_GET_LINE (src, convert->in_y);
+ s += (convert->in_x * 4);
+ d = FRAME_GET_LINE (dest, convert->out_y);
+ d += (convert->out_x * 4);
+
+ n_threads = convert->conversion_runner->n_threads;
+ tasks = convert->tasks[0] =
+ g_renew (FConvertPlaneTask, convert->tasks[0], n_threads);
+ tasks_p = convert->tasks_p[0] =
+ g_renew (FConvertPlaneTask *, convert->tasks_p[0], n_threads);
+
+ lines_per_thread = (height + n_threads - 1) / n_threads;
+
+ for (i = 0; i < n_threads; i++) {
+ tasks[i].dstride = FRAME_GET_STRIDE (dest);
+ tasks[i].sstride = FRAME_GET_STRIDE (src);
+ tasks[i].d = d + i * lines_per_thread * tasks[i].dstride;
+ tasks[i].s = s + i * lines_per_thread * tasks[i].sstride;
+
+ tasks[i].width = width;
+ tasks[i].height = (i + 1) * lines_per_thread;
+ tasks[i].height = MIN (tasks[i].height, height);
+ tasks[i].height -= i * lines_per_thread;
+ tasks[i].data = data;
+
+ tasks_p[i] = &tasks[i];
+ }
+
+ gst_parallelized_task_runner_run (convert->conversion_runner,
+ (GstParallelizedTaskFunc) convert_AYUV_ABGR_task, (gpointer) tasks_p);
+
+ convert_fill_border (convert, dest);
+ }
+
+ static void
+ convert_AYUV_RGBA_task (FConvertPlaneTask * task)
+ {
+ video_orc_convert_AYUV_RGBA (task->d, task->dstride, task->s,
+ task->sstride, task->data->im[0][0], task->data->im[0][2],
+ task->data->im[2][1], task->data->im[1][1], task->data->im[1][2],
+ task->width, task->height);
+ }
+
+ static void
+ convert_AYUV_RGBA (GstVideoConverter * convert, const GstVideoFrame * src,
+ GstVideoFrame * dest)
+ {
+ gint width = convert->in_width;
+ gint height = convert->in_height;
+ MatrixData *data = &convert->convert_matrix;
+ guint8 *s, *d;
+ FConvertPlaneTask *tasks;
+ FConvertPlaneTask **tasks_p;
+ gint n_threads;
+ gint lines_per_thread;
+ gint i;
+
+ s = FRAME_GET_LINE (src, convert->in_y);
+ s += (convert->in_x * 4);
+ d = FRAME_GET_LINE (dest, convert->out_y);
+ d += (convert->out_x * 4);
+
+ n_threads = convert->conversion_runner->n_threads;
+ tasks = convert->tasks[0] =
+ g_renew (FConvertPlaneTask, convert->tasks[0], n_threads);
+ tasks_p = convert->tasks_p[0] =
+ g_renew (FConvertPlaneTask *, convert->tasks_p[0], n_threads);
+
+ lines_per_thread = (height + n_threads - 1) / n_threads;
+
+ for (i = 0; i < n_threads; i++) {
+ tasks[i].dstride = FRAME_GET_STRIDE (dest);
+ tasks[i].sstride = FRAME_GET_STRIDE (src);
+ tasks[i].d = d + i * lines_per_thread * tasks[i].dstride;
+ tasks[i].s = s + i * lines_per_thread * tasks[i].sstride;
+
+ tasks[i].width = width;
+ tasks[i].height = (i + 1) * lines_per_thread;
+ tasks[i].height = MIN (tasks[i].height, height);
+ tasks[i].height -= i * lines_per_thread;
+ tasks[i].data = data;
+
+ tasks_p[i] = &tasks[i];
+ }
+
+ gst_parallelized_task_runner_run (convert->conversion_runner,
+ (GstParallelizedTaskFunc) convert_AYUV_RGBA_task, (gpointer) tasks_p);
+
+ convert_fill_border (convert, dest);
+ }
+ #endif
+
+ static void
+ convert_I420_BGRA_task (FConvertTask * task)
+ {
+ gint i;
+
+ for (i = task->height_0; i < task->height_1; i++) {
+ guint8 *sy, *su, *sv, *d;
+
+ d = FRAME_GET_LINE (task->dest, i + task->out_y);
+ d += (task->out_x * 4);
+ sy = FRAME_GET_Y_LINE (task->src, i + task->in_y);
+ sy += task->in_x;
+ su = FRAME_GET_U_LINE (task->src, (i + task->in_y) >> 1);
+ su += (task->in_x >> 1);
+ sv = FRAME_GET_V_LINE (task->src, (i + task->in_y) >> 1);
+ sv += (task->in_x >> 1);
+
+ #if G_BYTE_ORDER == G_LITTLE_ENDIAN
+ video_orc_convert_I420_BGRA (d, sy, su, sv,
+ task->data->im[0][0], task->data->im[0][2],
+ task->data->im[2][1], task->data->im[1][1], task->data->im[1][2],
+ task->width);
+ #else
+ video_orc_convert_I420_ARGB (d, sy, su, sv,
+ task->data->im[0][0], task->data->im[0][2],
+ task->data->im[2][1], task->data->im[1][1], task->data->im[1][2],
+ task->width);
+ #endif
+ }
+ }
+
+ static void
+ convert_I420_BGRA (GstVideoConverter * convert, const GstVideoFrame * src,
+ GstVideoFrame * dest)
+ {
+ int i;
+ gint width = convert->in_width;
+ gint height = convert->in_height;
+ MatrixData *data = &convert->convert_matrix;
+ FConvertTask *tasks;
+ FConvertTask **tasks_p;
+ gint n_threads;
+ gint lines_per_thread;
+
+ n_threads = convert->conversion_runner->n_threads;
+ tasks = convert->tasks[0] =
+ g_renew (FConvertTask, convert->tasks[0], n_threads);
+ tasks_p = convert->tasks_p[0] =
+ g_renew (FConvertTask *, convert->tasks_p[0], n_threads);
+
+ lines_per_thread = (height + n_threads - 1) / n_threads;
+
+ for (i = 0; i < n_threads; i++) {
+ tasks[i].src = src;
+ tasks[i].dest = dest;
+
+ tasks[i].width = width;
+ tasks[i].data = data;
+ tasks[i].in_x = convert->in_x;
+ tasks[i].in_y = convert->in_y;
+ tasks[i].out_x = convert->out_x;
+ tasks[i].out_y = convert->out_y;
+
+ tasks[i].height_0 = i * lines_per_thread;
+ tasks[i].height_1 = tasks[i].height_0 + lines_per_thread;
+ tasks[i].height_1 = MIN (height, tasks[i].height_1);
+
+ tasks_p[i] = &tasks[i];
+ }
+
+ gst_parallelized_task_runner_run (convert->conversion_runner,
+ (GstParallelizedTaskFunc) convert_I420_BGRA_task, (gpointer) tasks_p);
+
+ convert_fill_border (convert, dest);
+ }
+
+ static void
+ convert_I420_ARGB_task (FConvertTask * task)
+ {
+ gint i;
+
+ for (i = task->height_0; i < task->height_1; i++) {
+ guint8 *sy, *su, *sv, *d;
+
+ d = FRAME_GET_LINE (task->dest, i + task->out_y);
+ d += (task->out_x * 4);
+ sy = FRAME_GET_Y_LINE (task->src, i + task->in_y);
+ sy += task->in_x;
+ su = FRAME_GET_U_LINE (task->src, (i + task->in_y) >> 1);
+ su += (task->in_x >> 1);
+ sv = FRAME_GET_V_LINE (task->src, (i + task->in_y) >> 1);
+ sv += (task->in_x >> 1);
+
+ #if G_BYTE_ORDER == G_LITTLE_ENDIAN
+ video_orc_convert_I420_ARGB (d, sy, su, sv,
+ task->data->im[0][0], task->data->im[0][2],
+ task->data->im[2][1], task->data->im[1][1], task->data->im[1][2],
+ task->width);
+ #else
+ video_orc_convert_I420_BGRA (d, sy, su, sv,
+ task->data->im[0][0], task->data->im[0][2],
+ task->data->im[2][1], task->data->im[1][1], task->data->im[1][2],
+ task->width);
+ #endif
+ }
+ }
+
+ static void
+ convert_I420_ARGB (GstVideoConverter * convert, const GstVideoFrame * src,
+ GstVideoFrame * dest)
+ {
+ int i;
+ gint width = convert->in_width;
+ gint height = convert->in_height;
+ MatrixData *data = &convert->convert_matrix;
+ FConvertTask *tasks;
+ FConvertTask **tasks_p;
+ gint n_threads;
+ gint lines_per_thread;
+
+ n_threads = convert->conversion_runner->n_threads;
+ tasks = convert->tasks[0] =
+ g_renew (FConvertTask, convert->tasks[0], n_threads);
+ tasks_p = convert->tasks_p[0] =
+ g_renew (FConvertTask *, convert->tasks_p[0], n_threads);
+
+ lines_per_thread = (height + n_threads - 1) / n_threads;
+
+ for (i = 0; i < n_threads; i++) {
+ tasks[i].src = src;
+ tasks[i].dest = dest;
+
+ tasks[i].width = width;
+ tasks[i].data = data;
+ tasks[i].in_x = convert->in_x;
+ tasks[i].in_y = convert->in_y;
+ tasks[i].out_x = convert->out_x;
+ tasks[i].out_y = convert->out_y;
+
+ tasks[i].height_0 = i * lines_per_thread;
+ tasks[i].height_1 = tasks[i].height_0 + lines_per_thread;
+ tasks[i].height_1 = MIN (height, tasks[i].height_1);
+
+ tasks_p[i] = &tasks[i];
+ }
+
+ gst_parallelized_task_runner_run (convert->conversion_runner,
+ (GstParallelizedTaskFunc) convert_I420_ARGB_task, (gpointer) tasks_p);
+
+ convert_fill_border (convert, dest);
+ }
+
+ static void
+ convert_I420_pack_ARGB_task (FConvertTask * task)
+ {
+ gint i;
+ gpointer d[GST_VIDEO_MAX_PLANES];
+
+ d[0] = FRAME_GET_LINE (task->dest, 0);
+ d[0] =
+ (guint8 *) d[0] +
+ task->out_x * GST_VIDEO_FORMAT_INFO_PSTRIDE (task->dest->info.finfo, 0);
+
+ for (i = task->height_0; i < task->height_1; i++) {
+ guint8 *sy, *su, *sv;
+
+ sy = FRAME_GET_Y_LINE (task->src, i + task->in_y);
+ sy += task->in_x;
+ su = FRAME_GET_U_LINE (task->src, (i + task->in_y) >> 1);
+ su += (task->in_x >> 1);
+ sv = FRAME_GET_V_LINE (task->src, (i + task->in_y) >> 1);
+ sv += (task->in_x >> 1);
+
+ #if G_BYTE_ORDER == G_LITTLE_ENDIAN
+ video_orc_convert_I420_ARGB (task->tmpline, sy, su, sv,
+ task->data->im[0][0], task->data->im[0][2],
+ task->data->im[2][1], task->data->im[1][1], task->data->im[1][2],
+ task->width);
+ #else
+ video_orc_convert_I420_BGRA (task->tmpline, sy, su, sv,
+ task->data->im[0][0], task->data->im[0][2],
+ task->data->im[2][1], task->data->im[1][1], task->data->im[1][2],
+ task->width);
+ #endif
+ task->dest->info.finfo->pack_func (task->dest->info.finfo,
+ (GST_VIDEO_FRAME_IS_INTERLACED (task->dest) ?
+ GST_VIDEO_PACK_FLAG_INTERLACED :
+ GST_VIDEO_PACK_FLAG_NONE),
+ task->tmpline, 0, d, task->dest->info.stride,
+ task->dest->info.chroma_site, i + task->out_y, task->width);
+ }
+ }
+
+ static void
+ convert_I420_pack_ARGB (GstVideoConverter * convert, const GstVideoFrame * src,
+ GstVideoFrame * dest)
+ {
+ int i;
+ gint width = convert->in_width;
+ gint height = convert->in_height;
+ MatrixData *data = &convert->convert_matrix;
+ FConvertTask *tasks;
+ FConvertTask **tasks_p;
+ gint n_threads;
+ gint lines_per_thread;
+
+ n_threads = convert->conversion_runner->n_threads;
+ tasks = convert->tasks[0] =
+ g_renew (FConvertTask, convert->tasks[0], n_threads);
+ tasks_p = convert->tasks_p[0] =
+ g_renew (FConvertTask *, convert->tasks_p[0], n_threads);
+
+ lines_per_thread = (height + n_threads - 1) / n_threads;
+
+ for (i = 0; i < n_threads; i++) {
+ tasks[i].src = src;
+ tasks[i].dest = dest;
+
+ tasks[i].width = width;
+ tasks[i].data = data;
+ tasks[i].in_x = convert->in_x;
+ tasks[i].in_y = convert->in_y;
+ tasks[i].out_x = convert->out_x;
+ tasks[i].out_y = convert->out_y;
+ tasks[i].tmpline = convert->tmpline[i];
+
+ tasks[i].height_0 = i * lines_per_thread;
+ tasks[i].height_1 = tasks[i].height_0 + lines_per_thread;
+ tasks[i].height_1 = MIN (height, tasks[i].height_1);
+
+ tasks_p[i] = &tasks[i];
+ }
+
+ gst_parallelized_task_runner_run (convert->conversion_runner,
+ (GstParallelizedTaskFunc) convert_I420_pack_ARGB_task,
+ (gpointer) tasks_p);
+
+ convert_fill_border (convert, dest);
+ }
+
+ static void
+ convert_A420_pack_ARGB_task (FConvertTask * task)
+ {
+ gint i;
+ gpointer d[GST_VIDEO_MAX_PLANES];
+
+ d[0] = FRAME_GET_LINE (task->dest, 0);
+ d[0] =
+ (guint8 *) d[0] +
+ task->out_x * GST_VIDEO_FORMAT_INFO_PSTRIDE (task->dest->info.finfo, 0);
+
+ for (i = task->height_0; i < task->height_1; i++) {
+ guint8 *sy, *su, *sv, *sa;
+
+ sy = FRAME_GET_Y_LINE (task->src, i + task->in_y);
+ sy += task->in_x;
+ su = FRAME_GET_U_LINE (task->src, (i + task->in_y) >> 1);
+ su += (task->in_x >> 1);
+ sv = FRAME_GET_V_LINE (task->src, (i + task->in_y) >> 1);
+ sv += (task->in_x >> 1);
+ sa = FRAME_GET_A_LINE (task->src, i + task->in_y);
+ sa += task->in_x;
+
+ #if G_BYTE_ORDER == G_LITTLE_ENDIAN
+ video_orc_convert_A420_ARGB (task->tmpline, sy, su, sv, sa,
+ task->data->im[0][0], task->data->im[0][2],
+ task->data->im[2][1], task->data->im[1][1], task->data->im[1][2],
+ task->width);
+ #else
+ video_orc_convert_A420_BGRA (task->tmpline, sy, su, sv, sa,
+ task->data->im[0][0], task->data->im[0][2],
+ task->data->im[2][1], task->data->im[1][1], task->data->im[1][2],
+ task->width);
+ #endif
+
+ task->dest->info.finfo->pack_func (task->dest->info.finfo,
+ (GST_VIDEO_FRAME_IS_INTERLACED (task->dest) ?
+ GST_VIDEO_PACK_FLAG_INTERLACED :
+ GST_VIDEO_PACK_FLAG_NONE),
+ task->tmpline, 0, d, task->dest->info.stride,
+ task->dest->info.chroma_site, i + task->out_y, task->width);
+ }
+ }
+
+ static void
+ convert_A420_pack_ARGB (GstVideoConverter * convert, const GstVideoFrame * src,
+ GstVideoFrame * dest)
+ {
+ int i;
+ gint width = convert->in_width;
+ gint height = convert->in_height;
+ MatrixData *data = &convert->convert_matrix;
+ FConvertTask *tasks;
+ FConvertTask **tasks_p;
+ gint n_threads;
+ gint lines_per_thread;
+
+ n_threads = convert->conversion_runner->n_threads;
+ tasks = convert->tasks[0] =
+ g_renew (FConvertTask, convert->tasks[0], n_threads);
+ tasks_p = convert->tasks_p[0] =
+ g_renew (FConvertTask *, convert->tasks_p[0], n_threads);
+
+ lines_per_thread = (height + n_threads - 1) / n_threads;
+
+ for (i = 0; i < n_threads; i++) {
+ tasks[i].src = src;
+ tasks[i].dest = dest;
+
+ tasks[i].width = width;
+ tasks[i].data = data;
+ tasks[i].in_x = convert->in_x;
+ tasks[i].in_y = convert->in_y;
+ tasks[i].out_x = convert->out_x;
+ tasks[i].out_y = convert->out_y;
+ tasks[i].tmpline = convert->tmpline[i];
+
+ tasks[i].height_0 = i * lines_per_thread;
+ tasks[i].height_1 = tasks[i].height_0 + lines_per_thread;
+ tasks[i].height_1 = MIN (height, tasks[i].height_1);
+
+ tasks_p[i] = &tasks[i];
+ }
+
+ gst_parallelized_task_runner_run (convert->conversion_runner,
+ (GstParallelizedTaskFunc) convert_A420_pack_ARGB_task,
+ (gpointer) tasks_p);
+
+ convert_fill_border (convert, dest);
+ }
+
+ static void
+ convert_A420_BGRA_task (FConvertTask * task)
+ {
+ gint i;
+
+ for (i = task->height_0; i < task->height_1; i++) {
+ guint8 *sy, *su, *sv, *sa, *d;
+
+ d = FRAME_GET_LINE (task->dest, i + task->out_y);
+ d += (task->out_x * 4);
+ sy = FRAME_GET_Y_LINE (task->src, i + task->in_y);
+ sy += task->in_x;
+ su = FRAME_GET_U_LINE (task->src, (i + task->in_y) >> 1);
+ su += (task->in_x >> 1);
+ sv = FRAME_GET_V_LINE (task->src, (i + task->in_y) >> 1);
+ sv += (task->in_x >> 1);
+ sa = FRAME_GET_A_LINE (task->src, i + task->in_y);
+ sa += task->in_x;
+
+ #if G_BYTE_ORDER == G_LITTLE_ENDIAN
+ video_orc_convert_A420_BGRA (d, sy, su, sv, sa,
+ task->data->im[0][0], task->data->im[0][2],
+ task->data->im[2][1], task->data->im[1][1], task->data->im[1][2],
+ task->width);
+ #else
+ video_orc_convert_A420_ARGB (d, sy, su, sv, sa,
+ task->data->im[0][0], task->data->im[0][2],
+ task->data->im[2][1], task->data->im[1][1], task->data->im[1][2],
+ task->width);
+ #endif
+ }
+ }
+
+ static void
+ convert_A420_BGRA (GstVideoConverter * convert, const GstVideoFrame * src,
+ GstVideoFrame * dest)
+ {
+ int i;
+ gint width = convert->in_width;
+ gint height = convert->in_height;
+ MatrixData *data = &convert->convert_matrix;
+ FConvertTask *tasks;
+ FConvertTask **tasks_p;
+ gint n_threads;
+ gint lines_per_thread;
+
+ n_threads = convert->conversion_runner->n_threads;
+ tasks = convert->tasks[0] =
+ g_renew (FConvertTask, convert->tasks[0], n_threads);
+ tasks_p = convert->tasks_p[0] =
+ g_renew (FConvertTask *, convert->tasks_p[0], n_threads);
+
+ lines_per_thread = (height + n_threads - 1) / n_threads;
+
+ for (i = 0; i < n_threads; i++) {
+ tasks[i].src = src;
+ tasks[i].dest = dest;
+
+ tasks[i].width = width;
+ tasks[i].data = data;
+ tasks[i].in_x = convert->in_x;
+ tasks[i].in_y = convert->in_y;
+ tasks[i].out_x = convert->out_x;
+ tasks[i].out_y = convert->out_y;
+
+ tasks[i].height_0 = i * lines_per_thread;
+ tasks[i].height_1 = tasks[i].height_0 + lines_per_thread;
+ tasks[i].height_1 = MIN (height, tasks[i].height_1);
+
+ tasks_p[i] = &tasks[i];
+ }
+
+ gst_parallelized_task_runner_run (convert->conversion_runner,
+ (GstParallelizedTaskFunc) convert_A420_BGRA_task, (gpointer) tasks_p);
+
+ convert_fill_border (convert, dest);
+ }
+
+ static void
+ memset_u24 (guint8 * data, guint8 col[3], unsigned int n)
+ {
+ unsigned int i;
+
+ for (i = 0; i < n; i++) {
+ data[0] = col[0];
+ data[1] = col[1];
+ data[2] = col[2];
+ data += 3;
+ }
+ }
+
+ static void
+ memset_u32_16 (guint8 * data, guint8 col[4], unsigned int n)
+ {
+ unsigned int i;
+
+ for (i = 0; i < n; i += 2) {
+ data[0] = col[0];
+ data[1] = col[1];
+ if (i + 1 < n) {
+ data[2] = col[2];
+ data[3] = col[3];
+ }
+ data += 4;
+ }
+ }
+
+ #define MAKE_BORDER_FUNC(func) \
+ for (i = 0; i < out_y; i++) \
+ func (FRAME_GET_PLANE_LINE (dest, k, i), col, out_maxwidth); \
+ if (rb_width || lb_width) { \
+ for (i = 0; i < out_height; i++) { \
+ guint8 *d = FRAME_GET_PLANE_LINE (dest, k, i + out_y); \
+ if (lb_width) \
+ func (d, col, lb_width); \
+ if (rb_width) \
+ func (d + (pstride * r_border), col, rb_width); \
+ } \
+ } \
+ for (i = out_y + out_height; i < out_maxheight; i++) \
+ func (FRAME_GET_PLANE_LINE (dest, k, i), col, out_maxwidth); \
+
+ static void
+ convert_fill_border (GstVideoConverter * convert, GstVideoFrame * dest)
+ {
+ int k, n_planes;
+ const GstVideoFormatInfo *out_finfo;
+
+ if (!convert->fill_border || !convert->borderline)
+ return;
+
+ out_finfo = convert->out_info.finfo;
+
+ n_planes = GST_VIDEO_FRAME_N_PLANES (dest);
+
+ for (k = 0; k < n_planes; k++) {
+ gint comp[GST_VIDEO_MAX_COMPONENTS];
+ gint i, out_x, out_y, out_width, out_height, pstride, pgroup;
+ gint r_border, lb_width, rb_width;
+ gint out_maxwidth, out_maxheight;
+ gpointer borders;
+
+ gst_video_format_info_component (out_finfo, k, comp);
+ out_x = GST_VIDEO_FORMAT_INFO_SCALE_WIDTH (out_finfo, comp[0],
+ convert->out_x);
+ out_y = GST_VIDEO_FORMAT_INFO_SCALE_HEIGHT (out_finfo, comp[0],
+ convert->out_y);
+ out_width = GST_VIDEO_FORMAT_INFO_SCALE_WIDTH (out_finfo, comp[0],
+ convert->out_width);
+ out_height = GST_VIDEO_FORMAT_INFO_SCALE_HEIGHT (out_finfo, comp[0],
+ convert->out_height);
+ out_maxwidth = GST_VIDEO_FORMAT_INFO_SCALE_WIDTH (out_finfo, comp[0],
+ convert->out_maxwidth);
+ out_maxheight = GST_VIDEO_FORMAT_INFO_SCALE_HEIGHT (out_finfo, comp[0],
+ convert->out_maxheight);
+
+ pstride = GST_VIDEO_FORMAT_INFO_PSTRIDE (out_finfo, comp[0]);
+
+ switch (GST_VIDEO_FORMAT_INFO_FORMAT (out_finfo)) {
+ case GST_VIDEO_FORMAT_YUY2:
+ case GST_VIDEO_FORMAT_YVYU:
+ case GST_VIDEO_FORMAT_UYVY:
+ pgroup = 42;
+ out_maxwidth = GST_ROUND_UP_2 (out_maxwidth);
+ break;
+ default:
+ pgroup = pstride;
+ break;
+ }
+
+ r_border = out_x + out_width;
+ rb_width = out_maxwidth - r_border;
+ lb_width = out_x;
+
+ borders = &convert->borders[k];
+
+ switch (pgroup) {
+ case 1:
+ {
+ guint8 col = ((guint8 *) borders)[0];
+ MAKE_BORDER_FUNC (memset);
+ break;
+ }
+ case 2:
+ {
+ guint16 col = ((guint16 *) borders)[0];
+ MAKE_BORDER_FUNC (video_orc_splat_u16);
+ break;
+ }
+ case 3:
+ {
+ guint8 col[3];
+ col[0] = ((guint8 *) borders)[0];
+ col[1] = ((guint8 *) borders)[1];
+ col[2] = ((guint8 *) borders)[2];
+ MAKE_BORDER_FUNC (memset_u24);
+ break;
+ }
+ case 4:
+ {
+ guint32 col = ((guint32 *) borders)[0];
+ MAKE_BORDER_FUNC (video_orc_splat_u32);
+ break;
+ }
+ case 8:
+ {
+ guint64 col = ((guint64 *) borders)[0];
+ MAKE_BORDER_FUNC (video_orc_splat_u64);
+ break;
+ }
+ case 42:
+ {
+ guint8 col[4];
+ col[0] = ((guint8 *) borders)[0];
+ col[2] = ((guint8 *) borders)[2];
+ col[1] = ((guint8 *) borders)[r_border & 1 ? 3 : 1];
+ col[3] = ((guint8 *) borders)[r_border & 1 ? 1 : 3];
+ MAKE_BORDER_FUNC (memset_u32_16);
+ break;
+ }
+ default:
+ break;
+ }
+ }
+ }
+
+ typedef struct
+ {
+ const guint8 *s, *s2;
+ guint8 *d, *d2;
+ gint sstride, dstride;
+ gint width, height;
+ gint fill;
+ } FSimpleScaleTask;
+
+ static void
+ convert_plane_fill_task (FSimpleScaleTask * task)
+ {
+ video_orc_memset_2d (task->d, task->dstride,
+ task->fill, task->width, task->height);
+ }
+
+ static void
+ convert_plane_fill (GstVideoConverter * convert,
+ const GstVideoFrame * src, GstVideoFrame * dest, gint plane)
+ {
+ guint8 *d;
+ FSimpleScaleTask *tasks;
+ FSimpleScaleTask **tasks_p;
+ gint n_threads;
+ gint lines_per_thread;
+ gint i;
+
+ d = FRAME_GET_PLANE_LINE (dest, plane, convert->fout_y[plane]);
+ d += convert->fout_x[plane];
+
+ n_threads = convert->conversion_runner->n_threads;
+ tasks = convert->tasks[plane] =
+ g_renew (FSimpleScaleTask, convert->tasks[plane], n_threads);
+ tasks_p = convert->tasks_p[plane] =
+ g_renew (FSimpleScaleTask *, convert->tasks_p[plane], n_threads);
+ lines_per_thread = (convert->fout_height[plane] + n_threads - 1) / n_threads;
+
+ for (i = 0; i < n_threads; i++) {
+ tasks[i].d = d + i * lines_per_thread * convert->fout_width[plane];
+
+ tasks[i].fill = convert->ffill[plane];
+ tasks[i].width = convert->fout_width[plane];
+ tasks[i].height = (i + 1) * lines_per_thread;
+ tasks[i].height = MIN (tasks[i].height, convert->fout_height[plane]);
+ tasks[i].height -= i * lines_per_thread;
+ tasks[i].dstride = FRAME_GET_PLANE_STRIDE (dest, plane);
+
+ tasks_p[i] = &tasks[i];
+ }
+
+ gst_parallelized_task_runner_run (convert->conversion_runner,
+ (GstParallelizedTaskFunc) convert_plane_fill_task, (gpointer) tasks_p);
+ }
+
+ static void
+ convert_plane_h_double_task (FSimpleScaleTask * task)
+ {
+ video_orc_planar_chroma_422_444 (task->d,
+ task->dstride, task->s, task->sstride, task->width / 2, task->height);
+ }
+
+ static void
+ convert_plane_h_double (GstVideoConverter * convert,
+ const GstVideoFrame * src, GstVideoFrame * dest, gint plane)
+ {
+ guint8 *s, *d;
+ gint splane = convert->fsplane[plane];
+ FSimpleScaleTask *tasks;
+ FSimpleScaleTask **tasks_p;
+ gint n_threads;
+ gint lines_per_thread;
+ gint i;
+
+ s = FRAME_GET_PLANE_LINE (src, splane, convert->fin_y[splane]);
+ s += convert->fin_x[splane];
+ d = FRAME_GET_PLANE_LINE (dest, plane, convert->fout_y[plane]);
+ d += convert->fout_x[plane];
+
+ n_threads = convert->conversion_runner->n_threads;
+ tasks = convert->tasks[plane] =
+ g_renew (FSimpleScaleTask, convert->tasks[plane], n_threads);
+ tasks_p = convert->tasks_p[plane] =
+ g_renew (FSimpleScaleTask *, convert->tasks_p[plane], n_threads);
+ lines_per_thread = (convert->fout_height[plane] + n_threads - 1) / n_threads;
+
+ for (i = 0; i < n_threads; i++) {
+ tasks[i].dstride = FRAME_GET_PLANE_STRIDE (dest, plane);
+ tasks[i].sstride = FRAME_GET_PLANE_STRIDE (src, splane);
+
+ tasks[i].d = d + i * lines_per_thread * tasks[i].dstride;
+ tasks[i].s = s + i * lines_per_thread * tasks[i].sstride;
+
+ tasks[i].width = convert->fout_width[plane];
+ tasks[i].height = (i + 1) * lines_per_thread;
+ tasks[i].height = MIN (tasks[i].height, convert->fout_height[plane]);
+ tasks[i].height -= i * lines_per_thread;
+
+ tasks_p[i] = &tasks[i];
+ }
+
+ gst_parallelized_task_runner_run (convert->conversion_runner,
+ (GstParallelizedTaskFunc) convert_plane_h_double_task,
+ (gpointer) tasks_p);
+ }
+
+ static void
+ convert_plane_h_halve_task (FSimpleScaleTask * task)
+ {
+ video_orc_planar_chroma_444_422 (task->d,
+ task->dstride, task->s, task->sstride, task->width, task->height);
+ }
+
+ static void
+ convert_plane_h_halve (GstVideoConverter * convert,
+ const GstVideoFrame * src, GstVideoFrame * dest, gint plane)
+ {
+ guint8 *s, *d;
+ gint splane = convert->fsplane[plane];
+ FSimpleScaleTask *tasks;
+ FSimpleScaleTask **tasks_p;
+ gint n_threads;
+ gint lines_per_thread;
+ gint i;
+
+ s = FRAME_GET_PLANE_LINE (src, splane, convert->fin_y[splane]);
+ s += convert->fin_x[splane];
+ d = FRAME_GET_PLANE_LINE (dest, plane, convert->fout_y[plane]);
+ d += convert->fout_x[plane];
+
+ n_threads = convert->conversion_runner->n_threads;
+ tasks = convert->tasks[plane] =
+ g_renew (FSimpleScaleTask, convert->tasks[plane], n_threads);
+ tasks_p = convert->tasks_p[plane] =
+ g_renew (FSimpleScaleTask *, convert->tasks_p[plane], n_threads);
+ lines_per_thread = (convert->fout_height[plane] + n_threads - 1) / n_threads;
+
+ for (i = 0; i < n_threads; i++) {
+ tasks[i].dstride = FRAME_GET_PLANE_STRIDE (dest, plane);
+ tasks[i].sstride = FRAME_GET_PLANE_STRIDE (src, splane);
+
+ tasks[i].d = d + i * lines_per_thread * tasks[i].dstride;
+ tasks[i].s = s + i * lines_per_thread * tasks[i].sstride;
+
+ tasks[i].width = convert->fout_width[plane];
+ tasks[i].height = (i + 1) * lines_per_thread;
+ tasks[i].height = MIN (tasks[i].height, convert->fout_height[plane]);
+ tasks[i].height -= i * lines_per_thread;
+
+ tasks_p[i] = &tasks[i];
+ }
+
+ gst_parallelized_task_runner_run (convert->conversion_runner,
+ (GstParallelizedTaskFunc) convert_plane_h_halve_task, (gpointer) tasks_p);
+ }
+
+ static void
+ convert_plane_v_double_task (FSimpleScaleTask * task)
+ {
+ video_orc_planar_chroma_420_422 (task->d, 2 * task->dstride, task->d2,
+ 2 * task->dstride, task->s, task->sstride, task->width, task->height / 2);
+ }
+
+ static void
+ convert_plane_v_double (GstVideoConverter * convert,
+ const GstVideoFrame * src, GstVideoFrame * dest, gint plane)
+ {
+ guint8 *s, *d1, *d2;
+ gint ds, splane = convert->fsplane[plane];
+ FSimpleScaleTask *tasks;
+ FSimpleScaleTask **tasks_p;
+ gint n_threads;
+ gint lines_per_thread;
+ gint i;
+
+ s = FRAME_GET_PLANE_LINE (src, splane, convert->fin_y[splane]);
+ s += convert->fin_x[splane];
+ d1 = FRAME_GET_PLANE_LINE (dest, plane, convert->fout_y[plane]);
+ d1 += convert->fout_x[plane];
+ d2 = FRAME_GET_PLANE_LINE (dest, plane, convert->fout_y[plane] + 1);
+ d2 += convert->fout_x[plane];
+ ds = FRAME_GET_PLANE_STRIDE (dest, plane);
+
+ n_threads = convert->conversion_runner->n_threads;
+ tasks = convert->tasks[plane] =
+ g_renew (FSimpleScaleTask, convert->tasks[plane], n_threads);
+ tasks_p = convert->tasks_p[plane] =
+ g_renew (FSimpleScaleTask *, convert->tasks_p[plane], n_threads);
+ lines_per_thread =
+ GST_ROUND_UP_2 ((convert->fout_height[plane] + n_threads -
+ 1) / n_threads);
+
+ for (i = 0; i < n_threads; i++) {
+ tasks[i].d = d1 + i * lines_per_thread * ds;
+ tasks[i].d2 = d2 + i * lines_per_thread * ds;
+ tasks[i].dstride = ds;
+ tasks[i].sstride = FRAME_GET_PLANE_STRIDE (src, splane);
+ tasks[i].s = s + i * lines_per_thread * tasks[i].sstride / 2;
+
+ tasks[i].width = convert->fout_width[plane];
+ tasks[i].height = (i + 1) * lines_per_thread;
+ tasks[i].height = MIN (tasks[i].height, convert->fout_height[plane]);
+ tasks[i].height -= i * lines_per_thread;
+
+ tasks_p[i] = &tasks[i];
+ }
+
+ gst_parallelized_task_runner_run (convert->conversion_runner,
+ (GstParallelizedTaskFunc) convert_plane_v_double_task,
+ (gpointer) tasks_p);
+ }
+
+ static void
+ convert_plane_v_halve_task (FSimpleScaleTask * task)
+ {
+ video_orc_planar_chroma_422_420 (task->d, task->dstride, task->s,
+ 2 * task->sstride, task->s2, 2 * task->sstride, task->width,
+ task->height);
+ }
+
+ static void
+ convert_plane_v_halve (GstVideoConverter * convert,
+ const GstVideoFrame * src, GstVideoFrame * dest, gint plane)
+ {
+ guint8 *s1, *s2, *d;
+ gint ss, ds, splane = convert->fsplane[plane];
+ FSimpleScaleTask *tasks;
+ FSimpleScaleTask **tasks_p;
+ gint n_threads;
+ gint lines_per_thread;
+ gint i;
+
+ s1 = FRAME_GET_PLANE_LINE (src, splane, convert->fin_y[splane]);
+ s1 += convert->fin_x[splane];
+ s2 = FRAME_GET_PLANE_LINE (src, splane, convert->fin_y[splane] + 1);
+ s2 += convert->fin_x[splane];
+ d = FRAME_GET_PLANE_LINE (dest, plane, convert->fout_y[plane]);
+ d += convert->fout_x[plane];
+
+ ss = FRAME_GET_PLANE_STRIDE (src, splane);
+ ds = FRAME_GET_PLANE_STRIDE (dest, plane);
+
+ n_threads = convert->conversion_runner->n_threads;
+ tasks = convert->tasks[plane] =
+ g_renew (FSimpleScaleTask, convert->tasks[plane], n_threads);
+ tasks_p = convert->tasks_p[plane] =
+ g_renew (FSimpleScaleTask *, convert->tasks_p[plane], n_threads);
+ lines_per_thread = (convert->fout_height[plane] + n_threads - 1) / n_threads;
+
+ for (i = 0; i < n_threads; i++) {
+ tasks[i].d = d + i * lines_per_thread * ds;
+ tasks[i].dstride = ds;
+ tasks[i].s = s1 + i * lines_per_thread * ss * 2;
+ tasks[i].s2 = s2 + i * lines_per_thread * ss * 2;
+ tasks[i].sstride = ss;
+
+ tasks[i].width = convert->fout_width[plane];
+ tasks[i].height = (i + 1) * lines_per_thread;
+ tasks[i].height = MIN (tasks[i].height, convert->fout_height[plane]);
+ tasks[i].height -= i * lines_per_thread;
+
+ tasks_p[i] = &tasks[i];
+ }
+
+ gst_parallelized_task_runner_run (convert->conversion_runner,
+ (GstParallelizedTaskFunc) convert_plane_v_halve_task, (gpointer) tasks_p);
+ }
+
+ static void
+ convert_plane_hv_double_task (FSimpleScaleTask * task)
+ {
+ video_orc_planar_chroma_420_444 (task->d, 2 * task->dstride, task->d2,
+ 2 * task->dstride, task->s, task->sstride, (task->width + 1) / 2,
+ task->height / 2);
+ }
+
+ static void
+ convert_plane_hv_double (GstVideoConverter * convert,
+ const GstVideoFrame * src, GstVideoFrame * dest, gint plane)
+ {
+ guint8 *s, *d1, *d2;
+ gint ss, ds, splane = convert->fsplane[plane];
+ FSimpleScaleTask *tasks;
+ FSimpleScaleTask **tasks_p;
+ gint n_threads;
+ gint lines_per_thread;
+ gint i;
+
+ s = FRAME_GET_PLANE_LINE (src, splane, convert->fin_y[splane]);
+ s += convert->fin_x[splane];
+ d1 = FRAME_GET_PLANE_LINE (dest, plane, convert->fout_y[plane]);
+ d1 += convert->fout_x[plane];
+ d2 = FRAME_GET_PLANE_LINE (dest, plane, convert->fout_y[plane] + 1);
+ d2 += convert->fout_x[plane];
+ ss = FRAME_GET_PLANE_STRIDE (src, splane);
+ ds = FRAME_GET_PLANE_STRIDE (dest, plane);
+
+ n_threads = convert->conversion_runner->n_threads;
+ tasks = convert->tasks[plane] =
+ g_renew (FSimpleScaleTask, convert->tasks[plane], n_threads);
+ tasks_p = convert->tasks_p[plane] =
+ g_renew (FSimpleScaleTask *, convert->tasks_p[plane], n_threads);
+ lines_per_thread =
+ GST_ROUND_UP_2 ((convert->fout_height[plane] + n_threads -
+ 1) / n_threads);
+
+ for (i = 0; i < n_threads; i++) {
+ tasks[i].d = d1 + i * lines_per_thread * ds;
+ tasks[i].d2 = d2 + i * lines_per_thread * ds;
+ tasks[i].dstride = ds;
+ tasks[i].sstride = ss;
+ tasks[i].s = s + i * lines_per_thread * ss / 2;
+
+ tasks[i].width = convert->fout_width[plane];
+ tasks[i].height = (i + 1) * lines_per_thread;
+ tasks[i].height = MIN (tasks[i].height, convert->fout_height[plane]);
+ tasks[i].height -= i * lines_per_thread;
+
+ tasks_p[i] = &tasks[i];
+ }
+
+ gst_parallelized_task_runner_run (convert->conversion_runner,
+ (GstParallelizedTaskFunc) convert_plane_hv_double_task,
+ (gpointer) tasks_p);
+ }
+
+ static void
+ convert_plane_hv_halve_task (FSimpleScaleTask * task)
+ {
+ video_orc_planar_chroma_444_420 (task->d, task->dstride, task->s,
+ 2 * task->sstride, task->s2, 2 * task->sstride, task->width,
+ task->height);
+ }
+
+ static void
+ convert_plane_hv_halve (GstVideoConverter * convert,
+ const GstVideoFrame * src, GstVideoFrame * dest, gint plane)
+ {
+ guint8 *s1, *s2, *d;
+ gint ss, ds, splane = convert->fsplane[plane];
+ FSimpleScaleTask *tasks;
+ FSimpleScaleTask **tasks_p;
+ gint n_threads;
+ gint lines_per_thread;
+ gint i;
+
+ s1 = FRAME_GET_PLANE_LINE (src, splane, convert->fin_y[splane]);
+ s1 += convert->fin_x[splane];
+ s2 = FRAME_GET_PLANE_LINE (src, splane, convert->fin_y[splane] + 1);
+ s2 += convert->fin_x[splane];
+ d = FRAME_GET_PLANE_LINE (dest, plane, convert->fout_y[plane]);
+ d += convert->fout_x[plane];
+ ss = FRAME_GET_PLANE_STRIDE (src, splane);
+ ds = FRAME_GET_PLANE_STRIDE (dest, plane);
+
+ n_threads = convert->conversion_runner->n_threads;
+ tasks = convert->tasks[plane] =
+ g_renew (FSimpleScaleTask, convert->tasks[plane], n_threads);
+ tasks_p = convert->tasks_p[plane] =
+ g_renew (FSimpleScaleTask *, convert->tasks_p[plane], n_threads);
+ lines_per_thread = (convert->fout_height[plane] + n_threads - 1) / n_threads;
+
+ for (i = 0; i < n_threads; i++) {
+ tasks[i].d = d + i * lines_per_thread * ds;
+ tasks[i].dstride = ds;
+ tasks[i].s = s1 + i * lines_per_thread * ss * 2;
+ tasks[i].s2 = s2 + i * lines_per_thread * ss * 2;
+ tasks[i].sstride = ss;
+
+ tasks[i].width = convert->fout_width[plane];
+ tasks[i].height = (i + 1) * lines_per_thread;
+ tasks[i].height = MIN (tasks[i].height, convert->fout_height[plane]);
+ tasks[i].height -= i * lines_per_thread;
+
+ tasks_p[i] = &tasks[i];
+ }
+
+ gst_parallelized_task_runner_run (convert->conversion_runner,
+ (GstParallelizedTaskFunc) convert_plane_hv_halve_task,
+ (gpointer) tasks_p);
+ }
+
+ typedef struct
+ {
+ GstVideoScaler *h_scaler, *v_scaler;
+ GstVideoFormat format;
+ const guint8 *s;
+ guint8 *d;
+ gint sstride, dstride;
+ guint x, y, w, h;
+ } FScaleTask;
+
+ static void
+ convert_plane_hv_task (FScaleTask * task)
+ {
+ gst_video_scaler_2d (task->h_scaler, task->v_scaler, task->format,
+ (guint8 *) task->s, task->sstride,
+ task->d, task->dstride, task->x, task->y, task->w, task->h);
+ }
+
+ static void
+ convert_plane_hv (GstVideoConverter * convert,
+ const GstVideoFrame * src, GstVideoFrame * dest, gint plane)
+ {
+ gint in_x, in_y, out_x, out_y, out_width, out_height;
+ GstVideoFormat format;
+ gint splane = convert->fsplane[plane];
+ guint8 *s, *d;
+ gint sstride, dstride;
+ FScaleTask *tasks;
+ FScaleTask **tasks_p;
+ gint i, n_threads, lines_per_thread;
+
+ in_x = convert->fin_x[splane];
+ in_y = convert->fin_y[splane];
+ out_x = convert->fout_x[plane];
+ out_y = convert->fout_y[plane];
+ out_width = convert->fout_width[plane];
+ out_height = convert->fout_height[plane];
+ format = convert->fformat[plane];
+
+ s = FRAME_GET_PLANE_LINE (src, splane, in_y);
+ s += in_x;
+ d = FRAME_GET_PLANE_LINE (dest, plane, out_y);
+ d += out_x;
+
+ sstride = FRAME_GET_PLANE_STRIDE (src, splane);
+ dstride = FRAME_GET_PLANE_STRIDE (dest, plane);
+
+ n_threads = convert->conversion_runner->n_threads;
+ tasks = convert->tasks[plane] =
+ g_renew (FScaleTask, convert->tasks[plane], n_threads);
+ tasks_p = convert->tasks_p[plane] =
+ g_renew (FScaleTask *, convert->tasks_p[plane], n_threads);
+
+ lines_per_thread = (out_height + n_threads - 1) / n_threads;
+
+ for (i = 0; i < n_threads; i++) {
+ tasks[i].h_scaler =
+ convert->fh_scaler[plane].scaler ? convert->
+ fh_scaler[plane].scaler[i] : NULL;
+ tasks[i].v_scaler =
+ convert->fv_scaler[plane].scaler ? convert->
+ fv_scaler[plane].scaler[i] : NULL;
+ tasks[i].format = format;
+ tasks[i].s = s;
+ tasks[i].d = d;
+ tasks[i].sstride = sstride;
+ tasks[i].dstride = dstride;
+
+ tasks[i].x = 0;
+ tasks[i].w = out_width;
+
+ tasks[i].y = i * lines_per_thread;
+ tasks[i].h = tasks[i].y + lines_per_thread;
+ tasks[i].h = MIN (out_height, tasks[i].h);
+
+ tasks_p[i] = &tasks[i];
+ }
+
+ gst_parallelized_task_runner_run (convert->conversion_runner,
+ (GstParallelizedTaskFunc) convert_plane_hv_task, (gpointer) tasks_p);
+ }
+
+ static void
+ convert_scale_planes (GstVideoConverter * convert,
+ const GstVideoFrame * src, GstVideoFrame * dest)
+ {
+ int i, n_planes;
+
+ n_planes = GST_VIDEO_FRAME_N_PLANES (dest);
+ for (i = 0; i < n_planes; i++) {
+ if (convert->fconvert[i])
+ convert->fconvert[i] (convert, src, dest, i);
+ }
+ convert_fill_border (convert, dest);
+ }
+
++#ifdef USE_TBM
++static void
++convert_I420_SN12 (GstVideoConverter * convert, const GstVideoFrame * src,
++ GstVideoFrame * dst)
++{
++ guint8 *src_Y = NULL;
++ guint8 *src_U = NULL;
++ guint8 *src_V = NULL;
++ guint8 *dst_Y = NULL;
++ guint8 *dst_UV = NULL;
++ int i = 0;
++ int j = 0;
++
++ gint width = convert->in_width;
++ gint height = convert->in_height;
++ gint loop_count_Y = GST_ROUND_DOWN_2 (height);
++ gint loop_count_UV = GST_ROUND_UP_2 (width) >> 1;
++
++ gint src_stride_Y = GST_VIDEO_FRAME_PLANE_STRIDE (src, 0);
++ gint src_stride_U = GST_VIDEO_FRAME_PLANE_STRIDE (src, 1);
++ gint src_stride_diff = src_stride_U - loop_count_UV;
++
++ gint dst_stride_Y = GST_VIDEO_FRAME_PLANE_STRIDE (dst, 0);
++ gint dst_stride_UV = GST_VIDEO_FRAME_PLANE_STRIDE (dst, 1);
++ gint dst_stride_diff = GST_ROUND_DOWN_2 (dst_stride_UV - width);
++
++ src_Y = GST_VIDEO_FRAME_PLANE_DATA (src, 0);
++ src_U = GST_VIDEO_FRAME_PLANE_DATA (src, 1);
++ src_V = GST_VIDEO_FRAME_PLANE_DATA (src, 2);
++
++ dst_Y = GST_VIDEO_FRAME_PLANE_DATA (dst, 0);
++ dst_UV = GST_VIDEO_FRAME_PLANE_DATA (dst, 1);
++
++ GST_DEBUG_OBJECT (convert, "size %dx%d, stride src[0:%d,1:%d], dst[0:%d,1:%d]",
++ width, height, src_stride_Y, src_stride_U, dst_stride_Y, dst_stride_UV);
++
++ for (i = 0 ; i < loop_count_Y ; i += 2) {
++ memcpy (dst_Y, src_Y, width);
++ src_Y += src_stride_Y;
++ dst_Y += dst_stride_Y;
++
++ memcpy (dst_Y, src_Y, width);
++ src_Y += src_stride_Y;
++ dst_Y += dst_stride_Y;
++
++ for (j = 0 ; j < loop_count_UV ; j++) {
++ *dst_UV++ = *src_U++;
++ *dst_UV++ = *src_V++;
++ }
++ src_U += src_stride_diff;
++ src_V += src_stride_diff;
++ dst_UV += dst_stride_diff;
++ }
++}
++#endif
++
+ static GstVideoFormat
+ get_scale_format (GstVideoFormat format, gint plane)
+ {
+ GstVideoFormat res = GST_VIDEO_FORMAT_UNKNOWN;
+
+ switch (format) {
+ case GST_VIDEO_FORMAT_I420:
+ case GST_VIDEO_FORMAT_YV12:
+ case GST_VIDEO_FORMAT_Y41B:
+ case GST_VIDEO_FORMAT_Y42B:
+ case GST_VIDEO_FORMAT_Y444:
+ case GST_VIDEO_FORMAT_GRAY8:
+ case GST_VIDEO_FORMAT_A420:
+ case GST_VIDEO_FORMAT_YUV9:
+ case GST_VIDEO_FORMAT_YVU9:
+ case GST_VIDEO_FORMAT_GBR:
+ case GST_VIDEO_FORMAT_GBRA:
+ case GST_VIDEO_FORMAT_RGBP:
+ case GST_VIDEO_FORMAT_BGRP:
+ res = GST_VIDEO_FORMAT_GRAY8;
+ break;
+ case GST_VIDEO_FORMAT_GRAY16_BE:
+ case GST_VIDEO_FORMAT_GRAY16_LE:
+ res = GST_VIDEO_FORMAT_GRAY16_BE;
+ break;
+ case GST_VIDEO_FORMAT_YUY2:
+ case GST_VIDEO_FORMAT_UYVY:
+ case GST_VIDEO_FORMAT_VYUY:
+ case GST_VIDEO_FORMAT_YVYU:
+ case GST_VIDEO_FORMAT_AYUV:
+ case GST_VIDEO_FORMAT_VUYA:
+ case GST_VIDEO_FORMAT_RGBx:
+ case GST_VIDEO_FORMAT_BGRx:
+ case GST_VIDEO_FORMAT_xRGB:
+ case GST_VIDEO_FORMAT_xBGR:
+ case GST_VIDEO_FORMAT_RGBA:
+ case GST_VIDEO_FORMAT_BGRA:
+ case GST_VIDEO_FORMAT_ARGB:
+ case GST_VIDEO_FORMAT_ABGR:
+ case GST_VIDEO_FORMAT_RGB:
+ case GST_VIDEO_FORMAT_BGR:
+ case GST_VIDEO_FORMAT_v308:
+ case GST_VIDEO_FORMAT_IYU2:
+ case GST_VIDEO_FORMAT_ARGB64:
+ case GST_VIDEO_FORMAT_AYUV64:
+ res = format;
+ break;
+ case GST_VIDEO_FORMAT_RGB15:
+ case GST_VIDEO_FORMAT_BGR15:
+ case GST_VIDEO_FORMAT_RGB16:
+ case GST_VIDEO_FORMAT_BGR16:
+ res = GST_VIDEO_FORMAT_NV12;
+ break;
+ case GST_VIDEO_FORMAT_NV12:
+ case GST_VIDEO_FORMAT_NV21:
+ case GST_VIDEO_FORMAT_NV16:
+ case GST_VIDEO_FORMAT_NV61:
+ case GST_VIDEO_FORMAT_NV24:
+ res = plane == 0 ? GST_VIDEO_FORMAT_GRAY8 : GST_VIDEO_FORMAT_NV12;
+ break;
+ case GST_VIDEO_FORMAT_AV12:
+ res = (plane == 0
+ || plane == 2) ? GST_VIDEO_FORMAT_GRAY8 : GST_VIDEO_FORMAT_NV12;
+ break;
+ case GST_VIDEO_FORMAT_UNKNOWN:
+ case GST_VIDEO_FORMAT_ENCODED:
+ case GST_VIDEO_FORMAT_v210:
+ case GST_VIDEO_FORMAT_v216:
+ case GST_VIDEO_FORMAT_Y210:
+ case GST_VIDEO_FORMAT_Y410:
+ case GST_VIDEO_FORMAT_UYVP:
+ case GST_VIDEO_FORMAT_RGB8P:
+ case GST_VIDEO_FORMAT_IYU1:
+ case GST_VIDEO_FORMAT_r210:
+ case GST_VIDEO_FORMAT_I420_10BE:
+ case GST_VIDEO_FORMAT_I420_10LE:
+ case GST_VIDEO_FORMAT_I422_10BE:
+ case GST_VIDEO_FORMAT_I422_10LE:
+ case GST_VIDEO_FORMAT_Y444_10BE:
+ case GST_VIDEO_FORMAT_Y444_10LE:
+ case GST_VIDEO_FORMAT_I420_12BE:
+ case GST_VIDEO_FORMAT_I420_12LE:
+ case GST_VIDEO_FORMAT_I422_12BE:
+ case GST_VIDEO_FORMAT_I422_12LE:
+ case GST_VIDEO_FORMAT_Y444_12BE:
+ case GST_VIDEO_FORMAT_Y444_12LE:
+ case GST_VIDEO_FORMAT_GBR_10BE:
+ case GST_VIDEO_FORMAT_GBR_10LE:
+ case GST_VIDEO_FORMAT_GBRA_10BE:
+ case GST_VIDEO_FORMAT_GBRA_10LE:
+ case GST_VIDEO_FORMAT_GBR_12BE:
+ case GST_VIDEO_FORMAT_GBR_12LE:
+ case GST_VIDEO_FORMAT_GBRA_12BE:
+ case GST_VIDEO_FORMAT_GBRA_12LE:
+ case GST_VIDEO_FORMAT_NV12_64Z32:
+ case GST_VIDEO_FORMAT_NV12_4L4:
+ case GST_VIDEO_FORMAT_NV12_32L32:
+ case GST_VIDEO_FORMAT_A420_10BE:
+ case GST_VIDEO_FORMAT_A420_10LE:
+ case GST_VIDEO_FORMAT_A422_10BE:
+ case GST_VIDEO_FORMAT_A422_10LE:
+ case GST_VIDEO_FORMAT_A444_10BE:
+ case GST_VIDEO_FORMAT_A444_10LE:
+ case GST_VIDEO_FORMAT_P010_10BE:
+ case GST_VIDEO_FORMAT_P010_10LE:
+ case GST_VIDEO_FORMAT_GRAY10_LE32:
+ case GST_VIDEO_FORMAT_NV12_10LE32:
+ case GST_VIDEO_FORMAT_NV16_10LE32:
+ case GST_VIDEO_FORMAT_NV12_10LE40:
+ case GST_VIDEO_FORMAT_BGR10A2_LE:
++#ifdef USE_TBM
++ case GST_VIDEO_FORMAT_SN12:
++ case GST_VIDEO_FORMAT_ST12:
++#endif
+ case GST_VIDEO_FORMAT_RGB10A2_LE:
+ case GST_VIDEO_FORMAT_Y444_16BE:
+ case GST_VIDEO_FORMAT_Y444_16LE:
+ case GST_VIDEO_FORMAT_P016_BE:
+ case GST_VIDEO_FORMAT_P016_LE:
+ case GST_VIDEO_FORMAT_P012_BE:
+ case GST_VIDEO_FORMAT_P012_LE:
+ case GST_VIDEO_FORMAT_Y212_BE:
+ case GST_VIDEO_FORMAT_Y212_LE:
+ case GST_VIDEO_FORMAT_Y412_BE:
+ case GST_VIDEO_FORMAT_Y412_LE:
+ res = format;
+ g_assert_not_reached ();
+ break;
++#ifdef TIZEN_FEATURE_VIDEO_MODIFICATION
++ default:
++ break;
++#endif
+ }
+ return res;
+ }
+
+ static gboolean
+ is_merge_yuv (GstVideoInfo * info)
+ {
+ switch (GST_VIDEO_INFO_FORMAT (info)) {
+ case GST_VIDEO_FORMAT_YUY2:
+ case GST_VIDEO_FORMAT_YVYU:
+ case GST_VIDEO_FORMAT_UYVY:
+ case GST_VIDEO_FORMAT_VYUY:
+ return TRUE;
+ default:
+ return FALSE;
+ }
+ }
+
+ static gboolean
+ setup_scale (GstVideoConverter * convert)
+ {
+ int i, n_planes;
+ gint method, cr_method, in_width, in_height, out_width, out_height;
+ guint taps;
+ GstVideoInfo *in_info, *out_info;
+ const GstVideoFormatInfo *in_finfo, *out_finfo;
+ GstVideoFormat in_format, out_format;
+ gboolean interlaced;
+ guint n_threads = convert->conversion_runner->n_threads;
+
+ in_info = &convert->in_info;
+ out_info = &convert->out_info;
+
+ in_finfo = in_info->finfo;
+ out_finfo = out_info->finfo;
+
+ n_planes = GST_VIDEO_INFO_N_PLANES (out_info);
+
+ interlaced = GST_VIDEO_INFO_IS_INTERLACED (&convert->in_info)
+ && GST_VIDEO_INFO_INTERLACE_MODE (&convert->in_info) !=
+ GST_VIDEO_INTERLACE_MODE_ALTERNATE;
+
+ method = GET_OPT_RESAMPLER_METHOD (convert);
+ if (method == GST_VIDEO_RESAMPLER_METHOD_NEAREST)
+ cr_method = method;
+ else
+ cr_method = GET_OPT_CHROMA_RESAMPLER_METHOD (convert);
+ taps = GET_OPT_RESAMPLER_TAPS (convert);
+
+ in_format = GST_VIDEO_INFO_FORMAT (in_info);
+ out_format = GST_VIDEO_INFO_FORMAT (out_info);
+
++#ifdef USE_TBM
++ if(out_format == GST_VIDEO_FORMAT_SN12) {
++ /* do nothing for SN12 output format */
++ return TRUE;
++ }
++#endif
+ switch (in_format) {
+ case GST_VIDEO_FORMAT_RGB15:
+ case GST_VIDEO_FORMAT_RGB16:
+ case GST_VIDEO_FORMAT_BGR15:
+ case GST_VIDEO_FORMAT_BGR16:
+ #if G_BYTE_ORDER == G_LITTLE_ENDIAN
+ case GST_VIDEO_FORMAT_GRAY16_BE:
+ #else
+ case GST_VIDEO_FORMAT_GRAY16_LE:
+ #endif
+ if (method != GST_VIDEO_RESAMPLER_METHOD_NEAREST) {
+ GST_DEBUG ("%s only with nearest resampling",
+ gst_video_format_to_string (in_format));
+ return FALSE;
+ }
+ break;
++#ifdef USE_TBM
++ case GST_VIDEO_FORMAT_SN12:
++ return TRUE; /* do nothing for SN12 format */
++#endif
+ default:
+ break;
+ }
+
+ in_width = convert->in_width;
+ in_height = convert->in_height;
+ out_width = convert->out_width;
+ out_height = convert->out_height;
+
+ if (n_planes == 1 && !GST_VIDEO_FORMAT_INFO_IS_GRAY (out_finfo)) {
+ gint pstride;
+ guint j;
+
+ if (is_merge_yuv (in_info)) {
+ GstVideoScaler *y_scaler, *uv_scaler;
+
+ if (in_width != out_width) {
+ convert->fh_scaler[0].scaler = g_new (GstVideoScaler *, n_threads);
+ for (j = 0; j < n_threads; j++) {
+ y_scaler =
+ gst_video_scaler_new (method, GST_VIDEO_SCALER_FLAG_NONE, taps,
+ GST_VIDEO_FORMAT_INFO_SCALE_WIDTH (in_finfo, GST_VIDEO_COMP_Y,
+ in_width), GST_VIDEO_FORMAT_INFO_SCALE_WIDTH (out_finfo,
+ GST_VIDEO_COMP_Y, out_width), convert->config);
+ uv_scaler =
+ gst_video_scaler_new (method, GST_VIDEO_SCALER_FLAG_NONE,
+ gst_video_scaler_get_max_taps (y_scaler),
+ GST_VIDEO_FORMAT_INFO_SCALE_WIDTH (in_finfo, GST_VIDEO_COMP_U,
+ in_width), GST_VIDEO_FORMAT_INFO_SCALE_WIDTH (out_finfo,
+ GST_VIDEO_COMP_U, out_width), convert->config);
+
+ convert->fh_scaler[0].scaler[j] =
+ gst_video_scaler_combine_packed_YUV (y_scaler, uv_scaler,
+ in_format, out_format);
+
+ gst_video_scaler_free (y_scaler);
+ gst_video_scaler_free (uv_scaler);
+ }
+ } else {
+ convert->fh_scaler[0].scaler = NULL;
+ }
+
+ pstride = GST_VIDEO_FORMAT_INFO_PSTRIDE (out_finfo, GST_VIDEO_COMP_Y);
+ convert->fin_x[0] = GST_ROUND_UP_2 (convert->in_x) * pstride;
+ convert->fout_x[0] = GST_ROUND_UP_2 (convert->out_x) * pstride;
+
+ } else {
+ if (in_width != out_width && in_width != 0 && out_width != 0) {
+ convert->fh_scaler[0].scaler = g_new (GstVideoScaler *, n_threads);
+ for (j = 0; j < n_threads; j++) {
+ convert->fh_scaler[0].scaler[j] =
+ gst_video_scaler_new (method, GST_VIDEO_SCALER_FLAG_NONE, taps,
+ in_width, out_width, convert->config);
+ }
+ } else {
+ convert->fh_scaler[0].scaler = NULL;
+ }
+
+ pstride = GST_VIDEO_FORMAT_INFO_PSTRIDE (out_finfo, GST_VIDEO_COMP_R);
+ convert->fin_x[0] = convert->in_x * pstride;
+ convert->fout_x[0] = convert->out_x * pstride;
+ }
+
+ if (in_height != out_height && in_height != 0 && out_height != 0) {
+ convert->fv_scaler[0].scaler = g_new (GstVideoScaler *, n_threads);
+
+ for (j = 0; j < n_threads; j++) {
+ convert->fv_scaler[0].scaler[j] =
+ gst_video_scaler_new (method,
+ interlaced ?
+ GST_VIDEO_SCALER_FLAG_INTERLACED : GST_VIDEO_SCALER_FLAG_NONE, taps,
+ in_height, out_height, convert->config);
+ }
+ } else {
+ convert->fv_scaler[0].scaler = NULL;
+ }
+
+ convert->fin_y[0] = convert->in_y;
+ convert->fout_y[0] = convert->out_y;
+ convert->fout_width[0] = out_width;
+ convert->fout_height[0] = out_height;
+ convert->fconvert[0] = convert_plane_hv;
+ convert->fformat[0] = get_scale_format (in_format, 0);
+ convert->fsplane[0] = 0;
+ } else {
+ for (i = 0; i < n_planes; i++) {
+ gint out_comp[GST_VIDEO_MAX_COMPONENTS];
+ gint comp, j, iw, ih, ow, oh, pstride;
+ gboolean need_v_scaler, need_h_scaler;
+ GstStructure *config;
+ gint resample_method;
+
+ gst_video_format_info_component (out_finfo, i, out_comp);
+ ow = GST_VIDEO_FORMAT_INFO_SCALE_WIDTH (out_finfo, out_comp[0],
+ out_width);
+ oh = GST_VIDEO_FORMAT_INFO_SCALE_HEIGHT (out_finfo, out_comp[0],
+ out_height);
+ pstride = GST_VIDEO_FORMAT_INFO_PSTRIDE (out_finfo, out_comp[0]);
+
+ /* find the component in this plane and map it to the plane of
+ * the source */
+ if (out_comp[0] < GST_VIDEO_FORMAT_INFO_N_COMPONENTS (in_finfo)) {
+ comp = out_comp[0];
+ iw = GST_VIDEO_FORMAT_INFO_SCALE_WIDTH (in_finfo, comp, in_width);
+ ih = GST_VIDEO_FORMAT_INFO_SCALE_HEIGHT (in_finfo, comp, in_height);
+ convert->fin_x[i] = GST_VIDEO_FORMAT_INFO_SCALE_WIDTH (in_finfo, comp,
+ convert->in_x);
+ convert->fin_x[i] *= pstride;
+ convert->fin_y[i] = GST_VIDEO_FORMAT_INFO_SCALE_HEIGHT (in_finfo, comp,
+ convert->in_y);
+ } else {
+ /* we will use a fill instead, setting the parameters to an invalid
+ * size to reduce confusion */
+ comp = -1;
+ iw = ih = -1;
+ convert->fin_x[i] = -1;
+ convert->fin_y[i] = -1;
+ }
+
+ convert->fout_width[i] = ow;
+ convert->fout_height[i] = oh;
+
+ convert->fout_x[i] = GST_VIDEO_FORMAT_INFO_SCALE_WIDTH (out_finfo,
+ out_comp[0], convert->out_x);
+ convert->fout_x[i] *= pstride;
+ convert->fout_y[i] = GST_VIDEO_FORMAT_INFO_SCALE_HEIGHT (out_finfo,
+ out_comp[0], convert->out_y);
+
+ GST_DEBUG ("plane %d: %dx%d -> %dx%d", i, iw, ih, ow, oh);
+ GST_DEBUG ("plane %d: pstride %d", i, pstride);
+ GST_DEBUG ("plane %d: in_x %d, in_y %d", i, convert->fin_x[i],
+ convert->fin_y[i]);
+ GST_DEBUG ("plane %d: out_x %d, out_y %d", i, convert->fout_x[i],
+ convert->fout_y[i]);
+
+ if (comp == -1) {
+ convert->fconvert[i] = convert_plane_fill;
+ if (GST_VIDEO_INFO_IS_YUV (out_info)) {
+ if (i == 3)
+ convert->ffill[i] = convert->alpha_value;
+ if (i == 0)
+ convert->ffill[i] = 0x00;
+ else
+ convert->ffill[i] = 0x80;
+ } else {
+ if (i == 3)
+ convert->ffill[i] = convert->alpha_value;
+ else
+ convert->ffill[i] = 0x00;
+ }
+ GST_DEBUG ("plane %d fill %02x", i, convert->ffill[i]);
+ continue;
+ } else {
+ convert->fsplane[i] = GST_VIDEO_FORMAT_INFO_PLANE (in_finfo, comp);
+ GST_DEBUG ("plane %d -> %d (comp %d)", i, convert->fsplane[i], comp);
+ }
+
+ config = gst_structure_copy (convert->config);
+
+ resample_method = (i == 0 ? method : cr_method);
+
+ need_v_scaler = FALSE;
+ need_h_scaler = FALSE;
+ if (iw == ow) {
+ if (!interlaced && ih == oh) {
+ convert->fconvert[i] = convert_plane_hv;
+ GST_DEBUG ("plane %d: copy", i);
+ } else if (!interlaced && ih == 2 * oh && pstride == 1
+ && resample_method == GST_VIDEO_RESAMPLER_METHOD_LINEAR) {
+ convert->fconvert[i] = convert_plane_v_halve;
+ GST_DEBUG ("plane %d: vertical halve", i);
+ } else if (!interlaced && 2 * ih == oh && pstride == 1
+ && resample_method == GST_VIDEO_RESAMPLER_METHOD_NEAREST) {
+ convert->fconvert[i] = convert_plane_v_double;
+ GST_DEBUG ("plane %d: vertical double", i);
+ } else {
+ convert->fconvert[i] = convert_plane_hv;
+ GST_DEBUG ("plane %d: vertical scale", i);
+ need_v_scaler = TRUE;
+ }
+ } else if (ih == oh) {
+ if (!interlaced && iw == 2 * ow && pstride == 1
+ && resample_method == GST_VIDEO_RESAMPLER_METHOD_LINEAR) {
+ convert->fconvert[i] = convert_plane_h_halve;
+ GST_DEBUG ("plane %d: horizontal halve", i);
+ } else if (!interlaced && 2 * iw == ow && pstride == 1
+ && resample_method == GST_VIDEO_RESAMPLER_METHOD_NEAREST) {
+ convert->fconvert[i] = convert_plane_h_double;
+ GST_DEBUG ("plane %d: horizontal double", i);
+ } else {
+ convert->fconvert[i] = convert_plane_hv;
+ GST_DEBUG ("plane %d: horizontal scale", i);
+ need_h_scaler = TRUE;
+ }
+ } else {
+ if (!interlaced && iw == 2 * ow && ih == 2 * oh && pstride == 1
+ && resample_method == GST_VIDEO_RESAMPLER_METHOD_LINEAR) {
+ convert->fconvert[i] = convert_plane_hv_halve;
+ GST_DEBUG ("plane %d: horizontal/vertical halve", i);
+ } else if (!interlaced && 2 * iw == ow && 2 * ih == oh && pstride == 1
+ && resample_method == GST_VIDEO_RESAMPLER_METHOD_NEAREST) {
+ convert->fconvert[i] = convert_plane_hv_double;
+ GST_DEBUG ("plane %d: horizontal/vertical double", i);
+ } else {
+ convert->fconvert[i] = convert_plane_hv;
+ GST_DEBUG ("plane %d: horizontal/vertical scale", i);
+ need_v_scaler = TRUE;
+ need_h_scaler = TRUE;
+ }
+ }
+
+ if (need_h_scaler && iw != 0 && ow != 0) {
+ convert->fh_scaler[i].scaler = g_new (GstVideoScaler *, n_threads);
+
+ for (j = 0; j < n_threads; j++) {
+ convert->fh_scaler[i].scaler[j] =
+ gst_video_scaler_new (resample_method, GST_VIDEO_SCALER_FLAG_NONE,
+ taps, iw, ow, config);
+ }
+ } else {
+ convert->fh_scaler[i].scaler = NULL;
+ }
+
+ if (need_v_scaler && ih != 0 && oh != 0) {
+ convert->fv_scaler[i].scaler = g_new (GstVideoScaler *, n_threads);
+
+ for (j = 0; j < n_threads; j++) {
+ convert->fv_scaler[i].scaler[j] =
+ gst_video_scaler_new (resample_method,
+ interlaced ?
+ GST_VIDEO_SCALER_FLAG_INTERLACED : GST_VIDEO_SCALER_FLAG_NONE,
+ taps, ih, oh, config);
+ }
+ } else {
+ convert->fv_scaler[i].scaler = NULL;
+ }
+
+ gst_structure_free (config);
+ convert->fformat[i] = get_scale_format (in_format, i);
+ }
+ }
+
+ return TRUE;
+ }
+
+ /* Fast paths */
+
+ typedef struct
+ {
+ GstVideoFormat in_format;
+ GstVideoFormat out_format;
+ gboolean keeps_interlaced;
+ gboolean needs_color_matrix;
+ gboolean keeps_size;
+ gboolean do_crop;
+ gboolean do_border;
+ gboolean alpha_copy;
+ gboolean alpha_set;
+ gboolean alpha_mult;
+ gint width_align, height_align;
+ void (*convert) (GstVideoConverter * convert, const GstVideoFrame * src,
+ GstVideoFrame * dest);
+ } VideoTransform;
+
+ static const VideoTransform transforms[] = {
+ /* planar -> packed */
+ {GST_VIDEO_FORMAT_I420, GST_VIDEO_FORMAT_YUY2, TRUE, FALSE, TRUE, FALSE,
+ FALSE, FALSE, FALSE, FALSE, 0, 0, convert_I420_YUY2},
+ {GST_VIDEO_FORMAT_I420, GST_VIDEO_FORMAT_UYVY, TRUE, FALSE, TRUE, FALSE,
+ FALSE, FALSE, FALSE, FALSE, 0, 0, convert_I420_UYVY},
+ {GST_VIDEO_FORMAT_I420, GST_VIDEO_FORMAT_AYUV, TRUE, FALSE, TRUE, FALSE,
+ FALSE, FALSE, TRUE, FALSE, 0, 0, convert_I420_AYUV},
+
+ {GST_VIDEO_FORMAT_YV12, GST_VIDEO_FORMAT_YUY2, TRUE, FALSE, TRUE, FALSE,
+ FALSE, FALSE, FALSE, FALSE, 0, 0, convert_I420_YUY2},
+ {GST_VIDEO_FORMAT_YV12, GST_VIDEO_FORMAT_UYVY, TRUE, FALSE, TRUE, FALSE,
+ FALSE, FALSE, FALSE, FALSE, 0, 0, convert_I420_UYVY},
+ {GST_VIDEO_FORMAT_YV12, GST_VIDEO_FORMAT_AYUV, TRUE, FALSE, TRUE, FALSE,
+ FALSE, FALSE, TRUE, FALSE, 0, 0, convert_I420_AYUV},
+
+ {GST_VIDEO_FORMAT_Y42B, GST_VIDEO_FORMAT_YUY2, TRUE, FALSE, TRUE, TRUE,
+ TRUE, FALSE, FALSE, FALSE, 0, 0, convert_Y42B_YUY2},
+ {GST_VIDEO_FORMAT_Y42B, GST_VIDEO_FORMAT_UYVY, TRUE, FALSE, TRUE, TRUE,
+ TRUE, FALSE, FALSE, FALSE, 0, 0, convert_Y42B_UYVY},
+ {GST_VIDEO_FORMAT_Y42B, GST_VIDEO_FORMAT_AYUV, TRUE, FALSE, TRUE, TRUE,
+ TRUE, FALSE, TRUE, FALSE, 1, 0, convert_Y42B_AYUV},
+
+ {GST_VIDEO_FORMAT_Y444, GST_VIDEO_FORMAT_YUY2, TRUE, FALSE, TRUE, TRUE,
+ TRUE, FALSE, FALSE, FALSE, 1, 0, convert_Y444_YUY2},
+ {GST_VIDEO_FORMAT_Y444, GST_VIDEO_FORMAT_UYVY, TRUE, FALSE, TRUE, TRUE,
+ TRUE, FALSE, FALSE, FALSE, 1, 0, convert_Y444_UYVY},
+ {GST_VIDEO_FORMAT_Y444, GST_VIDEO_FORMAT_AYUV, TRUE, FALSE, TRUE, TRUE,
+ TRUE, FALSE, TRUE, FALSE, 0, 0, convert_Y444_AYUV},
+
+ /* packed -> packed */
+ {GST_VIDEO_FORMAT_YUY2, GST_VIDEO_FORMAT_YUY2, TRUE, FALSE, FALSE, TRUE,
+ TRUE, FALSE, FALSE, FALSE, 0, 0, convert_scale_planes},
+ {GST_VIDEO_FORMAT_YUY2, GST_VIDEO_FORMAT_UYVY, TRUE, FALSE, TRUE, TRUE,
+ TRUE, FALSE, FALSE, FALSE, 0, 0, convert_UYVY_YUY2}, /* alias */
+ {GST_VIDEO_FORMAT_YUY2, GST_VIDEO_FORMAT_AYUV, TRUE, FALSE, TRUE, TRUE,
+ TRUE, FALSE, TRUE, FALSE, 1, 0, convert_YUY2_AYUV},
+
+ {GST_VIDEO_FORMAT_UYVY, GST_VIDEO_FORMAT_UYVY, TRUE, FALSE, FALSE, TRUE,
+ TRUE, FALSE, FALSE, FALSE, 0, 0, convert_scale_planes},
+ {GST_VIDEO_FORMAT_UYVY, GST_VIDEO_FORMAT_YUY2, TRUE, FALSE, TRUE, TRUE,
+ TRUE, FALSE, FALSE, FALSE, 0, 0, convert_UYVY_YUY2},
+ {GST_VIDEO_FORMAT_UYVY, GST_VIDEO_FORMAT_AYUV, TRUE, FALSE, TRUE, TRUE,
+ TRUE, FALSE, TRUE, FALSE, 0, 0, convert_UYVY_AYUV},
+
+ {GST_VIDEO_FORMAT_AYUV, GST_VIDEO_FORMAT_AYUV, TRUE, FALSE, FALSE, TRUE, TRUE,
+ TRUE, FALSE, FALSE, 0, 0, convert_scale_planes},
+ {GST_VIDEO_FORMAT_AYUV, GST_VIDEO_FORMAT_YUY2, TRUE, FALSE, TRUE, TRUE,
+ TRUE, FALSE, FALSE, FALSE, 1, 0, convert_AYUV_YUY2},
+ {GST_VIDEO_FORMAT_AYUV, GST_VIDEO_FORMAT_UYVY, TRUE, FALSE, TRUE, TRUE,
+ TRUE, FALSE, FALSE, FALSE, 1, 0, convert_AYUV_UYVY},
+
+ {GST_VIDEO_FORMAT_v210, GST_VIDEO_FORMAT_UYVY, TRUE, FALSE, TRUE, FALSE,
+ TRUE, FALSE, FALSE, FALSE, 0, 0, convert_v210_UYVY},
+ {GST_VIDEO_FORMAT_v210, GST_VIDEO_FORMAT_YUY2, TRUE, FALSE, TRUE, FALSE,
+ TRUE, FALSE, FALSE, FALSE, 0, 0, convert_v210_YUY2},
+
+ /* packed -> planar */
+ {GST_VIDEO_FORMAT_YUY2, GST_VIDEO_FORMAT_I420, TRUE, FALSE, TRUE, FALSE,
+ FALSE, FALSE, FALSE, FALSE, 0, 0, convert_YUY2_I420},
+ {GST_VIDEO_FORMAT_YUY2, GST_VIDEO_FORMAT_YV12, TRUE, FALSE, TRUE, FALSE,
+ FALSE, FALSE, FALSE, FALSE, 0, 0, convert_YUY2_I420},
+ {GST_VIDEO_FORMAT_YUY2, GST_VIDEO_FORMAT_Y42B, TRUE, FALSE, TRUE, TRUE,
+ TRUE, FALSE, FALSE, FALSE, 0, 0, convert_YUY2_Y42B},
+ {GST_VIDEO_FORMAT_YUY2, GST_VIDEO_FORMAT_Y444, TRUE, FALSE, TRUE, TRUE,
+ TRUE, FALSE, FALSE, FALSE, 0, 0, convert_YUY2_Y444},
+ {GST_VIDEO_FORMAT_UYVY, GST_VIDEO_FORMAT_GRAY8, TRUE, TRUE, TRUE, TRUE,
+ TRUE, FALSE, FALSE, FALSE, 0, 0, convert_UYVY_GRAY8},
+
+ {GST_VIDEO_FORMAT_UYVY, GST_VIDEO_FORMAT_I420, TRUE, FALSE, TRUE, FALSE,
+ FALSE, FALSE, FALSE, FALSE, 0, 0, convert_UYVY_I420},
+ {GST_VIDEO_FORMAT_UYVY, GST_VIDEO_FORMAT_YV12, TRUE, FALSE, TRUE, FALSE,
+ FALSE, FALSE, FALSE, FALSE, 0, 0, convert_UYVY_I420},
+ {GST_VIDEO_FORMAT_UYVY, GST_VIDEO_FORMAT_Y42B, TRUE, FALSE, TRUE, TRUE,
+ TRUE, FALSE, FALSE, FALSE, 0, 0, convert_UYVY_Y42B},
+ {GST_VIDEO_FORMAT_UYVY, GST_VIDEO_FORMAT_Y444, TRUE, FALSE, TRUE, TRUE,
+ TRUE, FALSE, FALSE, FALSE, 0, 0, convert_UYVY_Y444},
+
+ {GST_VIDEO_FORMAT_AYUV, GST_VIDEO_FORMAT_I420, FALSE, FALSE, TRUE, TRUE,
+ TRUE, FALSE, FALSE, FALSE, 1, 1, convert_AYUV_I420},
+ {GST_VIDEO_FORMAT_AYUV, GST_VIDEO_FORMAT_YV12, FALSE, FALSE, TRUE, TRUE,
+ TRUE, FALSE, FALSE, FALSE, 1, 1, convert_AYUV_I420},
+ {GST_VIDEO_FORMAT_AYUV, GST_VIDEO_FORMAT_Y42B, TRUE, FALSE, TRUE, TRUE,
+ TRUE, FALSE, FALSE, FALSE, 1, 0, convert_AYUV_Y42B},
+ {GST_VIDEO_FORMAT_AYUV, GST_VIDEO_FORMAT_Y444, TRUE, FALSE, TRUE, TRUE,
+ TRUE, FALSE, FALSE, FALSE, 0, 0, convert_AYUV_Y444},
+
+ {GST_VIDEO_FORMAT_v210, GST_VIDEO_FORMAT_I420, TRUE, FALSE, TRUE, FALSE,
+ FALSE, FALSE, FALSE, FALSE, 0, 0, convert_v210_I420},
+ {GST_VIDEO_FORMAT_v210, GST_VIDEO_FORMAT_YV12, TRUE, FALSE, TRUE, FALSE,
+ FALSE, FALSE, FALSE, FALSE, 0, 0, convert_v210_I420},
+ {GST_VIDEO_FORMAT_v210, GST_VIDEO_FORMAT_Y42B, TRUE, FALSE, TRUE, FALSE,
+ FALSE, FALSE, FALSE, FALSE, 0, 0, convert_v210_Y42B},
+
+ /* planar -> planar */
+ {GST_VIDEO_FORMAT_I420, GST_VIDEO_FORMAT_I420, TRUE, FALSE, FALSE, TRUE,
+ TRUE, FALSE, FALSE, FALSE, 0, 0, convert_scale_planes},
+ {GST_VIDEO_FORMAT_I420, GST_VIDEO_FORMAT_YV12, TRUE, FALSE, FALSE, TRUE,
+ TRUE, FALSE, FALSE, FALSE, 0, 0, convert_scale_planes},
+ {GST_VIDEO_FORMAT_I420, GST_VIDEO_FORMAT_Y41B, FALSE, FALSE, FALSE, TRUE,
+ TRUE, FALSE, FALSE, FALSE, 0, 0, convert_scale_planes},
+ {GST_VIDEO_FORMAT_I420, GST_VIDEO_FORMAT_Y42B, FALSE, FALSE, FALSE, TRUE,
+ TRUE, FALSE, FALSE, FALSE, 0, 0, convert_scale_planes},
+ {GST_VIDEO_FORMAT_I420, GST_VIDEO_FORMAT_Y444, FALSE, FALSE, FALSE, TRUE,
+ TRUE, FALSE, FALSE, FALSE, 0, 0, convert_scale_planes},
++#ifdef USE_TBM
++ {GST_VIDEO_FORMAT_I420, GST_VIDEO_FORMAT_SN12, FALSE, FALSE, FALSE, TRUE,
++ TRUE, FALSE, FALSE, FALSE, 0, 0, convert_I420_SN12},
++#endif
+ {GST_VIDEO_FORMAT_I420, GST_VIDEO_FORMAT_GRAY8, FALSE, FALSE, FALSE, TRUE,
+ TRUE, FALSE, FALSE, FALSE, 0, 0, convert_scale_planes},
+ {GST_VIDEO_FORMAT_I420, GST_VIDEO_FORMAT_A420, FALSE, FALSE, FALSE, TRUE,
+ TRUE, FALSE, TRUE, FALSE, 0, 0, convert_scale_planes},
+ {GST_VIDEO_FORMAT_I420, GST_VIDEO_FORMAT_YUV9, FALSE, FALSE, FALSE, TRUE,
+ TRUE, FALSE, FALSE, FALSE, 0, 0, convert_scale_planes},
+ {GST_VIDEO_FORMAT_I420, GST_VIDEO_FORMAT_YVU9, FALSE, FALSE, FALSE, TRUE,
+ TRUE, FALSE, FALSE, FALSE, 0, 0, convert_scale_planes},
+
+ {GST_VIDEO_FORMAT_YV12, GST_VIDEO_FORMAT_I420, TRUE, FALSE, FALSE, TRUE,
+ TRUE, FALSE, FALSE, FALSE, 0, 0, convert_scale_planes},
+ {GST_VIDEO_FORMAT_YV12, GST_VIDEO_FORMAT_YV12, TRUE, FALSE, FALSE, TRUE,
+ TRUE, FALSE, FALSE, FALSE, 0, 0, convert_scale_planes},
+ {GST_VIDEO_FORMAT_YV12, GST_VIDEO_FORMAT_Y41B, FALSE, FALSE, FALSE, TRUE,
+ TRUE, FALSE, FALSE, FALSE, 0, 0, convert_scale_planes},
+ {GST_VIDEO_FORMAT_YV12, GST_VIDEO_FORMAT_Y42B, FALSE, FALSE, FALSE, TRUE,
+ TRUE, FALSE, FALSE, FALSE, 0, 0, convert_scale_planes},
+ {GST_VIDEO_FORMAT_YV12, GST_VIDEO_FORMAT_Y444, FALSE, FALSE, FALSE, TRUE,
+ TRUE, FALSE, FALSE, FALSE, 0, 0, convert_scale_planes},
+ {GST_VIDEO_FORMAT_YV12, GST_VIDEO_FORMAT_GRAY8, FALSE, FALSE, FALSE, TRUE,
+ TRUE, FALSE, FALSE, FALSE, 0, 0, convert_scale_planes},
+ {GST_VIDEO_FORMAT_YV12, GST_VIDEO_FORMAT_A420, FALSE, FALSE, FALSE, TRUE,
+ TRUE, FALSE, TRUE, FALSE, 0, 0, convert_scale_planes},
+ {GST_VIDEO_FORMAT_YV12, GST_VIDEO_FORMAT_YUV9, FALSE, FALSE, FALSE, TRUE,
+ TRUE, FALSE, FALSE, FALSE, 0, 0, convert_scale_planes},
+ {GST_VIDEO_FORMAT_YV12, GST_VIDEO_FORMAT_YVU9, FALSE, FALSE, FALSE, TRUE,
+ TRUE, FALSE, FALSE, FALSE, 0, 0, convert_scale_planes},
+
+ {GST_VIDEO_FORMAT_Y41B, GST_VIDEO_FORMAT_I420, FALSE, FALSE, FALSE, TRUE,
+ TRUE, FALSE, FALSE, FALSE, 0, 0, convert_scale_planes},
+ {GST_VIDEO_FORMAT_Y41B, GST_VIDEO_FORMAT_YV12, FALSE, FALSE, FALSE, TRUE,
+ TRUE, FALSE, FALSE, FALSE, 0, 0, convert_scale_planes},
+ {GST_VIDEO_FORMAT_Y41B, GST_VIDEO_FORMAT_Y41B, TRUE, FALSE, FALSE, TRUE,
+ TRUE, FALSE, FALSE, FALSE, 0, 0, convert_scale_planes},
+ {GST_VIDEO_FORMAT_Y41B, GST_VIDEO_FORMAT_Y42B, FALSE, FALSE, FALSE, TRUE,
+ TRUE, FALSE, FALSE, FALSE, 0, 0, convert_scale_planes},
+ {GST_VIDEO_FORMAT_Y41B, GST_VIDEO_FORMAT_Y444, FALSE, FALSE, FALSE, TRUE,
+ TRUE, FALSE, FALSE, FALSE, 0, 0, convert_scale_planes},
+ {GST_VIDEO_FORMAT_Y41B, GST_VIDEO_FORMAT_GRAY8, FALSE, FALSE, FALSE, TRUE,
+ TRUE, FALSE, FALSE, FALSE, 0, 0, convert_scale_planes},
+ {GST_VIDEO_FORMAT_Y41B, GST_VIDEO_FORMAT_A420, FALSE, FALSE, FALSE, TRUE,
+ TRUE, FALSE, TRUE, FALSE, 0, 0, convert_scale_planes},
+ {GST_VIDEO_FORMAT_Y41B, GST_VIDEO_FORMAT_YUV9, FALSE, FALSE, FALSE, TRUE,
+ TRUE, FALSE, FALSE, FALSE, 0, 0, convert_scale_planes},
+ {GST_VIDEO_FORMAT_Y41B, GST_VIDEO_FORMAT_YVU9, FALSE, FALSE, FALSE, TRUE,
+ TRUE, FALSE, FALSE, FALSE, 0, 0, convert_scale_planes},
+
+ {GST_VIDEO_FORMAT_Y42B, GST_VIDEO_FORMAT_I420, FALSE, FALSE, FALSE, TRUE,
+ TRUE, FALSE, FALSE, FALSE, 0, 0, convert_scale_planes},
+ {GST_VIDEO_FORMAT_Y42B, GST_VIDEO_FORMAT_YV12, FALSE, FALSE, FALSE, TRUE,
+ TRUE, FALSE, FALSE, FALSE, 0, 0, convert_scale_planes},
+ {GST_VIDEO_FORMAT_Y42B, GST_VIDEO_FORMAT_Y41B, FALSE, FALSE, FALSE, TRUE,
+ TRUE, FALSE, FALSE, FALSE, 0, 0, convert_scale_planes},
+ {GST_VIDEO_FORMAT_Y42B, GST_VIDEO_FORMAT_Y42B, TRUE, FALSE, FALSE, TRUE,
+ TRUE, FALSE, FALSE, FALSE, 0, 0, convert_scale_planes},
+ {GST_VIDEO_FORMAT_Y42B, GST_VIDEO_FORMAT_Y444, FALSE, FALSE, FALSE, TRUE,
+ TRUE, FALSE, FALSE, FALSE, 0, 0, convert_scale_planes},
+ {GST_VIDEO_FORMAT_Y42B, GST_VIDEO_FORMAT_GRAY8, FALSE, FALSE, FALSE, TRUE,
+ TRUE, FALSE, FALSE, FALSE, 0, 0, convert_scale_planes},
+ {GST_VIDEO_FORMAT_Y42B, GST_VIDEO_FORMAT_A420, FALSE, FALSE, FALSE, TRUE,
+ TRUE, FALSE, TRUE, FALSE, 0, 0, convert_scale_planes},
+ {GST_VIDEO_FORMAT_Y42B, GST_VIDEO_FORMAT_YUV9, FALSE, FALSE, FALSE, TRUE,
+ TRUE, FALSE, FALSE, FALSE, 0, 0, convert_scale_planes},
+ {GST_VIDEO_FORMAT_Y42B, GST_VIDEO_FORMAT_YVU9, FALSE, FALSE, FALSE, TRUE,
+ TRUE, FALSE, FALSE, FALSE, 0, 0, convert_scale_planes},
+
+ {GST_VIDEO_FORMAT_Y444, GST_VIDEO_FORMAT_I420, FALSE, FALSE, FALSE, TRUE,
+ TRUE, FALSE, FALSE, FALSE, 0, 0, convert_scale_planes},
+ {GST_VIDEO_FORMAT_Y444, GST_VIDEO_FORMAT_YV12, FALSE, FALSE, FALSE, TRUE,
+ TRUE, FALSE, FALSE, FALSE, 0, 0, convert_scale_planes},
+ {GST_VIDEO_FORMAT_Y444, GST_VIDEO_FORMAT_Y41B, FALSE, FALSE, FALSE, TRUE,
+ TRUE, FALSE, FALSE, FALSE, 0, 0, convert_scale_planes},
+ {GST_VIDEO_FORMAT_Y444, GST_VIDEO_FORMAT_Y42B, FALSE, FALSE, FALSE, TRUE,
+ TRUE, FALSE, FALSE, FALSE, 0, 0, convert_scale_planes},
+ {GST_VIDEO_FORMAT_Y444, GST_VIDEO_FORMAT_Y444, TRUE, FALSE, FALSE, TRUE,
+ TRUE, FALSE, FALSE, FALSE, 0, 0, convert_scale_planes},
+ {GST_VIDEO_FORMAT_Y444, GST_VIDEO_FORMAT_GRAY8, FALSE, FALSE, FALSE, TRUE,
+ TRUE, FALSE, FALSE, FALSE, 0, 0, convert_scale_planes},
+ {GST_VIDEO_FORMAT_Y444, GST_VIDEO_FORMAT_A420, FALSE, FALSE, FALSE, TRUE,
+ TRUE, FALSE, TRUE, FALSE, 0, 0, convert_scale_planes},
+ {GST_VIDEO_FORMAT_Y444, GST_VIDEO_FORMAT_YUV9, FALSE, FALSE, FALSE, TRUE,
+ TRUE, FALSE, FALSE, FALSE, 0, 0, convert_scale_planes},
+ {GST_VIDEO_FORMAT_Y444, GST_VIDEO_FORMAT_YVU9, FALSE, FALSE, FALSE, TRUE,
+ TRUE, FALSE, FALSE, FALSE, 0, 0, convert_scale_planes},
+
+ {GST_VIDEO_FORMAT_GRAY8, GST_VIDEO_FORMAT_I420, FALSE, FALSE, FALSE, TRUE,
+ TRUE, FALSE, FALSE, FALSE, 0, 0, convert_scale_planes},
+ {GST_VIDEO_FORMAT_GRAY8, GST_VIDEO_FORMAT_YV12, FALSE, FALSE, FALSE, TRUE,
+ TRUE, FALSE, FALSE, FALSE, 0, 0, convert_scale_planes},
+ {GST_VIDEO_FORMAT_GRAY8, GST_VIDEO_FORMAT_Y41B, FALSE, FALSE, FALSE, TRUE,
+ TRUE, FALSE, FALSE, FALSE, 0, 0, convert_scale_planes},
+ {GST_VIDEO_FORMAT_GRAY8, GST_VIDEO_FORMAT_Y42B, FALSE, FALSE, FALSE, TRUE,
+ TRUE, FALSE, FALSE, FALSE, 0, 0, convert_scale_planes},
+ {GST_VIDEO_FORMAT_GRAY8, GST_VIDEO_FORMAT_Y444, FALSE, FALSE, FALSE, TRUE,
+ TRUE, FALSE, FALSE, FALSE, 0, 0, convert_scale_planes},
+ {GST_VIDEO_FORMAT_GRAY8, GST_VIDEO_FORMAT_GRAY8, TRUE, FALSE, FALSE, TRUE,
+ TRUE, FALSE, FALSE, FALSE, 0, 0, convert_scale_planes},
+ {GST_VIDEO_FORMAT_GRAY8, GST_VIDEO_FORMAT_A420, FALSE, FALSE, FALSE, TRUE,
+ TRUE, FALSE, TRUE, FALSE, 0, 0, convert_scale_planes},
+ {GST_VIDEO_FORMAT_GRAY8, GST_VIDEO_FORMAT_YUV9, FALSE, FALSE, FALSE, TRUE,
+ TRUE, FALSE, FALSE, FALSE, 0, 0, convert_scale_planes},
+ {GST_VIDEO_FORMAT_GRAY8, GST_VIDEO_FORMAT_YVU9, FALSE, FALSE, FALSE, TRUE,
+ TRUE, FALSE, FALSE, FALSE, 0, 0, convert_scale_planes},
+
+ {GST_VIDEO_FORMAT_A420, GST_VIDEO_FORMAT_I420, FALSE, FALSE, FALSE, TRUE,
+ TRUE, FALSE, FALSE, FALSE, 0, 0, convert_scale_planes},
+ {GST_VIDEO_FORMAT_A420, GST_VIDEO_FORMAT_YV12, FALSE, FALSE, FALSE, TRUE,
+ TRUE, FALSE, FALSE, FALSE, 0, 0, convert_scale_planes},
+ {GST_VIDEO_FORMAT_A420, GST_VIDEO_FORMAT_Y41B, FALSE, FALSE, FALSE, TRUE,
+ TRUE, FALSE, FALSE, FALSE, 0, 0, convert_scale_planes},
+ {GST_VIDEO_FORMAT_A420, GST_VIDEO_FORMAT_Y42B, FALSE, FALSE, FALSE, TRUE,
+ TRUE, FALSE, FALSE, FALSE, 0, 0, convert_scale_planes},
+ {GST_VIDEO_FORMAT_A420, GST_VIDEO_FORMAT_Y444, FALSE, FALSE, FALSE, TRUE,
+ TRUE, FALSE, FALSE, FALSE, 0, 0, convert_scale_planes},
+ {GST_VIDEO_FORMAT_A420, GST_VIDEO_FORMAT_GRAY8, FALSE, FALSE, FALSE, TRUE,
+ TRUE, FALSE, FALSE, FALSE, 0, 0, convert_scale_planes},
+ {GST_VIDEO_FORMAT_A420, GST_VIDEO_FORMAT_A420, TRUE, FALSE, FALSE, TRUE,
+ TRUE, TRUE, FALSE, FALSE, 0, 0, convert_scale_planes},
+ {GST_VIDEO_FORMAT_A420, GST_VIDEO_FORMAT_YUV9, FALSE, FALSE, FALSE, TRUE,
+ TRUE, FALSE, FALSE, FALSE, 0, 0, convert_scale_planes},
+ {GST_VIDEO_FORMAT_A420, GST_VIDEO_FORMAT_YVU9, FALSE, FALSE, FALSE, TRUE,
+ TRUE, FALSE, FALSE, FALSE, 0, 0, convert_scale_planes},
+
+ {GST_VIDEO_FORMAT_YUV9, GST_VIDEO_FORMAT_I420, FALSE, FALSE, FALSE, TRUE,
+ TRUE, FALSE, FALSE, FALSE, 0, 0, convert_scale_planes},
+ {GST_VIDEO_FORMAT_YUV9, GST_VIDEO_FORMAT_YV12, FALSE, FALSE, FALSE, TRUE,
+ TRUE, FALSE, FALSE, FALSE, 0, 0, convert_scale_planes},
+ {GST_VIDEO_FORMAT_YUV9, GST_VIDEO_FORMAT_Y41B, FALSE, FALSE, FALSE, TRUE,
+ TRUE, FALSE, FALSE, FALSE, 0, 0, convert_scale_planes},
+ {GST_VIDEO_FORMAT_YUV9, GST_VIDEO_FORMAT_Y42B, FALSE, FALSE, FALSE, TRUE,
+ TRUE, FALSE, FALSE, FALSE, 0, 0, convert_scale_planes},
+ {GST_VIDEO_FORMAT_YUV9, GST_VIDEO_FORMAT_Y444, FALSE, FALSE, FALSE, TRUE,
+ TRUE, FALSE, FALSE, FALSE, 0, 0, convert_scale_planes},
+ {GST_VIDEO_FORMAT_YUV9, GST_VIDEO_FORMAT_GRAY8, FALSE, FALSE, FALSE, TRUE,
+ TRUE, FALSE, FALSE, FALSE, 0, 0, convert_scale_planes},
+ {GST_VIDEO_FORMAT_YUV9, GST_VIDEO_FORMAT_A420, FALSE, FALSE, FALSE, TRUE,
+ TRUE, FALSE, TRUE, FALSE, 0, 0, convert_scale_planes},
+ {GST_VIDEO_FORMAT_YUV9, GST_VIDEO_FORMAT_YUV9, TRUE, FALSE, FALSE, TRUE,
+ TRUE, FALSE, FALSE, FALSE, 0, 0, convert_scale_planes},
+ {GST_VIDEO_FORMAT_YUV9, GST_VIDEO_FORMAT_YVU9, TRUE, FALSE, FALSE, TRUE,
+ TRUE, FALSE, FALSE, FALSE, 0, 0, convert_scale_planes},
+
+ {GST_VIDEO_FORMAT_YVU9, GST_VIDEO_FORMAT_I420, FALSE, FALSE, FALSE, TRUE,
+ TRUE, FALSE, FALSE, FALSE, 0, 0, convert_scale_planes},
+ {GST_VIDEO_FORMAT_YVU9, GST_VIDEO_FORMAT_YV12, FALSE, FALSE, FALSE, TRUE,
+ TRUE, FALSE, FALSE, FALSE, 0, 0, convert_scale_planes},
+ {GST_VIDEO_FORMAT_YVU9, GST_VIDEO_FORMAT_Y41B, FALSE, FALSE, FALSE, TRUE,
+ TRUE, FALSE, FALSE, FALSE, 0, 0, convert_scale_planes},
+ {GST_VIDEO_FORMAT_YVU9, GST_VIDEO_FORMAT_Y42B, FALSE, FALSE, FALSE, TRUE,
+ TRUE, FALSE, FALSE, FALSE, 0, 0, convert_scale_planes},
+ {GST_VIDEO_FORMAT_YVU9, GST_VIDEO_FORMAT_Y444, FALSE, FALSE, FALSE, TRUE,
+ TRUE, FALSE, FALSE, FALSE, 0, 0, convert_scale_planes},
+ {GST_VIDEO_FORMAT_YVU9, GST_VIDEO_FORMAT_GRAY8, FALSE, FALSE, FALSE, TRUE,
+ TRUE, FALSE, FALSE, FALSE, 0, 0, convert_scale_planes},
+ {GST_VIDEO_FORMAT_YVU9, GST_VIDEO_FORMAT_A420, FALSE, FALSE, FALSE, TRUE,
+ TRUE, FALSE, TRUE, FALSE, 0, 0, convert_scale_planes},
+ {GST_VIDEO_FORMAT_YVU9, GST_VIDEO_FORMAT_YUV9, TRUE, FALSE, FALSE, TRUE,
+ TRUE, FALSE, FALSE, FALSE, 0, 0, convert_scale_planes},
+ {GST_VIDEO_FORMAT_YVU9, GST_VIDEO_FORMAT_YVU9, TRUE, FALSE, FALSE, TRUE,
+ TRUE, FALSE, FALSE, FALSE, 0, 0, convert_scale_planes},
+
+ /* sempiplanar -> semiplanar */
+ {GST_VIDEO_FORMAT_NV12, GST_VIDEO_FORMAT_NV12, TRUE, FALSE, FALSE, TRUE,
+ TRUE, FALSE, FALSE, FALSE, 0, 0, convert_scale_planes},
+ {GST_VIDEO_FORMAT_NV12, GST_VIDEO_FORMAT_NV16, TRUE, FALSE, FALSE, TRUE,
+ TRUE, FALSE, FALSE, FALSE, 0, 0, convert_scale_planes},
+ {GST_VIDEO_FORMAT_NV12, GST_VIDEO_FORMAT_NV24, TRUE, FALSE, FALSE, TRUE,
+ TRUE, FALSE, FALSE, FALSE, 0, 0, convert_scale_planes},
+
+ {GST_VIDEO_FORMAT_NV21, GST_VIDEO_FORMAT_NV21, TRUE, FALSE, FALSE, TRUE,
+ TRUE, FALSE, FALSE, FALSE, 0, 0, convert_scale_planes},
+
+ {GST_VIDEO_FORMAT_NV16, GST_VIDEO_FORMAT_NV12, TRUE, FALSE, FALSE, TRUE,
+ TRUE, FALSE, FALSE, FALSE, 0, 0, convert_scale_planes},
+ {GST_VIDEO_FORMAT_NV16, GST_VIDEO_FORMAT_NV16, TRUE, FALSE, FALSE, TRUE,
+ TRUE, FALSE, FALSE, FALSE, 0, 0, convert_scale_planes},
+ {GST_VIDEO_FORMAT_NV16, GST_VIDEO_FORMAT_NV24, TRUE, FALSE, FALSE, TRUE,
+ TRUE, FALSE, FALSE, FALSE, 0, 0, convert_scale_planes},
+
+ {GST_VIDEO_FORMAT_NV61, GST_VIDEO_FORMAT_NV61, TRUE, FALSE, FALSE, TRUE,
+ TRUE, FALSE, FALSE, FALSE, 0, 0, convert_scale_planes},
+
+ {GST_VIDEO_FORMAT_NV24, GST_VIDEO_FORMAT_NV12, TRUE, FALSE, FALSE, TRUE,
+ TRUE, FALSE, FALSE, FALSE, 0, 0, convert_scale_planes},
+ {GST_VIDEO_FORMAT_NV24, GST_VIDEO_FORMAT_NV16, TRUE, FALSE, FALSE, TRUE,
+ TRUE, FALSE, FALSE, FALSE, 0, 0, convert_scale_planes},
+ {GST_VIDEO_FORMAT_NV24, GST_VIDEO_FORMAT_NV24, TRUE, FALSE, FALSE, TRUE,
+ TRUE, FALSE, FALSE, FALSE, 0, 0, convert_scale_planes},
+
+ #if G_BYTE_ORDER == G_LITTLE_ENDIAN
+ {GST_VIDEO_FORMAT_AYUV, GST_VIDEO_FORMAT_ARGB, TRUE, TRUE, TRUE, TRUE, TRUE,
+ TRUE, FALSE, FALSE, 0, 0, convert_AYUV_ARGB},
+ {GST_VIDEO_FORMAT_AYUV, GST_VIDEO_FORMAT_BGRA, TRUE, TRUE, TRUE, TRUE, TRUE,
+ TRUE, FALSE, FALSE, 0, 0, convert_AYUV_BGRA},
+ {GST_VIDEO_FORMAT_AYUV, GST_VIDEO_FORMAT_xRGB, TRUE, TRUE, TRUE, TRUE, TRUE,
+ FALSE, FALSE, FALSE, 0, 0, convert_AYUV_ARGB}, /* alias */
+ {GST_VIDEO_FORMAT_AYUV, GST_VIDEO_FORMAT_BGRx, TRUE, TRUE, TRUE, TRUE, TRUE,
+ FALSE, FALSE, FALSE, 0, 0, convert_AYUV_BGRA}, /* alias */
+ {GST_VIDEO_FORMAT_AYUV, GST_VIDEO_FORMAT_ABGR, TRUE, TRUE, TRUE, TRUE, TRUE,
+ TRUE, FALSE, FALSE, 0, 0, convert_AYUV_ABGR},
+ {GST_VIDEO_FORMAT_AYUV, GST_VIDEO_FORMAT_RGBA, TRUE, TRUE, TRUE, TRUE, TRUE,
+ TRUE, FALSE, FALSE, 0, 0, convert_AYUV_RGBA},
+ {GST_VIDEO_FORMAT_AYUV, GST_VIDEO_FORMAT_xBGR, TRUE, TRUE, TRUE, TRUE, TRUE,
+ FALSE, FALSE, FALSE, 0, 0, convert_AYUV_ABGR}, /* alias */
+ {GST_VIDEO_FORMAT_AYUV, GST_VIDEO_FORMAT_RGBx, TRUE, TRUE, TRUE, TRUE, TRUE,
+ FALSE, FALSE, FALSE, 0, 0, convert_AYUV_RGBA}, /* alias */
+ #endif
+
+ {GST_VIDEO_FORMAT_I420, GST_VIDEO_FORMAT_BGRA, FALSE, TRUE, TRUE, TRUE,
+ TRUE, FALSE, FALSE, FALSE, 0, 0, convert_I420_BGRA},
+ {GST_VIDEO_FORMAT_I420, GST_VIDEO_FORMAT_BGRx, FALSE, TRUE, TRUE, TRUE,
+ TRUE, FALSE, FALSE, FALSE, 0, 0, convert_I420_BGRA},
+ {GST_VIDEO_FORMAT_YV12, GST_VIDEO_FORMAT_BGRA, FALSE, TRUE, TRUE, TRUE,
+ TRUE, FALSE, FALSE, FALSE, 0, 0, convert_I420_BGRA},
+ {GST_VIDEO_FORMAT_YV12, GST_VIDEO_FORMAT_BGRx, FALSE, TRUE, TRUE, TRUE,
+ TRUE, FALSE, FALSE, FALSE, 0, 0, convert_I420_BGRA},
+
+ {GST_VIDEO_FORMAT_I420, GST_VIDEO_FORMAT_ARGB, FALSE, TRUE, TRUE, TRUE,
+ TRUE, FALSE, FALSE, FALSE, 0, 0, convert_I420_ARGB},
+ {GST_VIDEO_FORMAT_I420, GST_VIDEO_FORMAT_xRGB, FALSE, TRUE, TRUE, TRUE,
+ TRUE, FALSE, FALSE, FALSE, 0, 0, convert_I420_ARGB},
+ {GST_VIDEO_FORMAT_YV12, GST_VIDEO_FORMAT_ARGB, FALSE, TRUE, TRUE, TRUE,
+ TRUE, FALSE, FALSE, FALSE, 0, 0, convert_I420_ARGB},
+ {GST_VIDEO_FORMAT_YV12, GST_VIDEO_FORMAT_xRGB, FALSE, TRUE, TRUE, TRUE,
+ TRUE, FALSE, FALSE, FALSE, 0, 0, convert_I420_ARGB},
+
+ {GST_VIDEO_FORMAT_I420, GST_VIDEO_FORMAT_ABGR, FALSE, TRUE, TRUE, TRUE,
+ TRUE, FALSE, FALSE, FALSE, 0, 0, convert_I420_pack_ARGB},
+ {GST_VIDEO_FORMAT_I420, GST_VIDEO_FORMAT_xBGR, FALSE, TRUE, TRUE, TRUE,
+ TRUE, FALSE, FALSE, FALSE, 0, 0, convert_I420_pack_ARGB},
+ {GST_VIDEO_FORMAT_I420, GST_VIDEO_FORMAT_RGBA, FALSE, TRUE, TRUE, TRUE,
+ TRUE, FALSE, FALSE, FALSE, 0, 0, convert_I420_pack_ARGB},
+ {GST_VIDEO_FORMAT_I420, GST_VIDEO_FORMAT_RGBx, FALSE, TRUE, TRUE, TRUE,
+ TRUE, FALSE, FALSE, FALSE, 0, 0, convert_I420_pack_ARGB},
+ {GST_VIDEO_FORMAT_I420, GST_VIDEO_FORMAT_RGB, FALSE, TRUE, TRUE, TRUE,
+ TRUE, FALSE, FALSE, FALSE, 0, 0, convert_I420_pack_ARGB},
+ {GST_VIDEO_FORMAT_I420, GST_VIDEO_FORMAT_BGR, FALSE, TRUE, TRUE, TRUE,
+ TRUE, FALSE, FALSE, FALSE, 0, 0, convert_I420_pack_ARGB},
+ {GST_VIDEO_FORMAT_I420, GST_VIDEO_FORMAT_RGB15, FALSE, TRUE, TRUE, TRUE,
+ TRUE, FALSE, FALSE, FALSE, 0, 0, convert_I420_pack_ARGB},
+ {GST_VIDEO_FORMAT_I420, GST_VIDEO_FORMAT_BGR15, FALSE, TRUE, TRUE, TRUE,
+ TRUE, FALSE, FALSE, FALSE, 0, 0, convert_I420_pack_ARGB},
+ {GST_VIDEO_FORMAT_I420, GST_VIDEO_FORMAT_RGB16, FALSE, TRUE, TRUE, TRUE,
+ TRUE, FALSE, FALSE, FALSE, 0, 0, convert_I420_pack_ARGB},
+ {GST_VIDEO_FORMAT_I420, GST_VIDEO_FORMAT_BGR16, FALSE, TRUE, TRUE, TRUE,
+ TRUE, FALSE, FALSE, FALSE, 0, 0, convert_I420_pack_ARGB},
+
+ {GST_VIDEO_FORMAT_YV12, GST_VIDEO_FORMAT_ABGR, FALSE, TRUE, TRUE, TRUE,
+ TRUE, FALSE, FALSE, FALSE, 0, 0, convert_I420_pack_ARGB},
+ {GST_VIDEO_FORMAT_YV12, GST_VIDEO_FORMAT_xBGR, FALSE, TRUE, TRUE, TRUE,
+ TRUE, FALSE, FALSE, FALSE, 0, 0, convert_I420_pack_ARGB},
+ {GST_VIDEO_FORMAT_YV12, GST_VIDEO_FORMAT_RGBA, FALSE, TRUE, TRUE, TRUE,
+ TRUE, FALSE, FALSE, FALSE, 0, 0, convert_I420_pack_ARGB},
+ {GST_VIDEO_FORMAT_YV12, GST_VIDEO_FORMAT_RGBx, FALSE, TRUE, TRUE, TRUE,
+ TRUE, FALSE, FALSE, FALSE, 0, 0, convert_I420_pack_ARGB},
+ {GST_VIDEO_FORMAT_YV12, GST_VIDEO_FORMAT_RGB, FALSE, TRUE, TRUE, TRUE,
+ TRUE, FALSE, FALSE, FALSE, 0, 0, convert_I420_pack_ARGB},
+ {GST_VIDEO_FORMAT_YV12, GST_VIDEO_FORMAT_BGR, FALSE, TRUE, TRUE, TRUE,
+ TRUE, FALSE, FALSE, FALSE, 0, 0, convert_I420_pack_ARGB},
+ {GST_VIDEO_FORMAT_YV12, GST_VIDEO_FORMAT_RGB15, FALSE, TRUE, TRUE, TRUE,
+ TRUE, FALSE, FALSE, FALSE, 0, 0, convert_I420_pack_ARGB},
+ {GST_VIDEO_FORMAT_YV12, GST_VIDEO_FORMAT_BGR15, FALSE, TRUE, TRUE, TRUE,
+ TRUE, FALSE, FALSE, FALSE, 0, 0, convert_I420_pack_ARGB},
+ {GST_VIDEO_FORMAT_YV12, GST_VIDEO_FORMAT_RGB16, FALSE, TRUE, TRUE, TRUE,
+ TRUE, FALSE, FALSE, FALSE, 0, 0, convert_I420_pack_ARGB},
+ {GST_VIDEO_FORMAT_YV12, GST_VIDEO_FORMAT_BGR16, FALSE, TRUE, TRUE, TRUE,
+ TRUE, FALSE, FALSE, FALSE, 0, 0, convert_I420_pack_ARGB},
+
+ {GST_VIDEO_FORMAT_A420, GST_VIDEO_FORMAT_ABGR, FALSE, TRUE, TRUE, TRUE,
+ TRUE, TRUE, FALSE, FALSE, 0, 0, convert_A420_pack_ARGB},
+ {GST_VIDEO_FORMAT_A420, GST_VIDEO_FORMAT_RGBA, FALSE, TRUE, TRUE, TRUE,
+ TRUE, TRUE, FALSE, FALSE, 0, 0, convert_A420_pack_ARGB},
+ {GST_VIDEO_FORMAT_A420, GST_VIDEO_FORMAT_BGRA, FALSE, TRUE, TRUE, TRUE,
+ TRUE, TRUE, FALSE, FALSE, 0, 0, convert_A420_BGRA},
+ /* A420 to non-alpha RGB formats, reuse I420_* method */
+ {GST_VIDEO_FORMAT_A420, GST_VIDEO_FORMAT_BGRx, FALSE, TRUE, TRUE, TRUE,
+ TRUE, FALSE, FALSE, FALSE, 0, 0, convert_I420_BGRA},
+ {GST_VIDEO_FORMAT_A420, GST_VIDEO_FORMAT_xBGR, FALSE, TRUE, TRUE, TRUE,
+ TRUE, FALSE, FALSE, FALSE, 0, 0, convert_I420_pack_ARGB},
+ {GST_VIDEO_FORMAT_A420, GST_VIDEO_FORMAT_RGBx, FALSE, TRUE, TRUE, TRUE,
+ TRUE, FALSE, FALSE, FALSE, 0, 0, convert_I420_pack_ARGB},
+ {GST_VIDEO_FORMAT_A420, GST_VIDEO_FORMAT_RGB, FALSE, TRUE, TRUE, TRUE,
+ TRUE, FALSE, FALSE, FALSE, 0, 0, convert_I420_pack_ARGB},
+ {GST_VIDEO_FORMAT_A420, GST_VIDEO_FORMAT_BGR, FALSE, TRUE, TRUE, TRUE,
+ TRUE, FALSE, FALSE, FALSE, 0, 0, convert_I420_pack_ARGB},
+ {GST_VIDEO_FORMAT_A420, GST_VIDEO_FORMAT_RGB15, FALSE, TRUE, TRUE, TRUE,
+ TRUE, FALSE, FALSE, FALSE, 0, 0, convert_I420_pack_ARGB},
+ {GST_VIDEO_FORMAT_A420, GST_VIDEO_FORMAT_BGR16, FALSE, TRUE, TRUE, TRUE,
+ TRUE, FALSE, FALSE, FALSE, 0, 0, convert_I420_pack_ARGB},
+
+ /* scalers */
+ {GST_VIDEO_FORMAT_GBR, GST_VIDEO_FORMAT_GBR, TRUE, FALSE, FALSE, TRUE,
+ TRUE, FALSE, FALSE, FALSE, 0, 0, convert_scale_planes},
+ {GST_VIDEO_FORMAT_GBRA, GST_VIDEO_FORMAT_GBRA, TRUE, FALSE, FALSE, TRUE,
+ TRUE, TRUE, FALSE, FALSE, 0, 0, convert_scale_planes},
+ {GST_VIDEO_FORMAT_RGBP, GST_VIDEO_FORMAT_RGBP, TRUE, FALSE, FALSE, TRUE,
+ TRUE, FALSE, FALSE, FALSE, 0, 0, convert_scale_planes},
+ {GST_VIDEO_FORMAT_BGRP, GST_VIDEO_FORMAT_BGRP, TRUE, FALSE, FALSE, TRUE,
+ TRUE, FALSE, FALSE, FALSE, 0, 0, convert_scale_planes},
+
+ {GST_VIDEO_FORMAT_YVYU, GST_VIDEO_FORMAT_YVYU, TRUE, FALSE, FALSE, TRUE,
+ TRUE, FALSE, FALSE, FALSE, 0, 0, convert_scale_planes},
+
+ {GST_VIDEO_FORMAT_RGB15, GST_VIDEO_FORMAT_RGB15, TRUE, FALSE, FALSE, TRUE,
+ TRUE, FALSE, FALSE, FALSE, 0, 0, convert_scale_planes},
+ {GST_VIDEO_FORMAT_RGB16, GST_VIDEO_FORMAT_RGB16, TRUE, FALSE, FALSE, TRUE,
+ TRUE, FALSE, FALSE, FALSE, 0, 0, convert_scale_planes},
+ {GST_VIDEO_FORMAT_BGR15, GST_VIDEO_FORMAT_BGR15, TRUE, FALSE, FALSE, TRUE,
+ TRUE, FALSE, FALSE, FALSE, 0, 0, convert_scale_planes},
+ {GST_VIDEO_FORMAT_BGR16, GST_VIDEO_FORMAT_BGR16, TRUE, FALSE, FALSE, TRUE,
+ TRUE, FALSE, FALSE, FALSE, 0, 0, convert_scale_planes},
+
+ {GST_VIDEO_FORMAT_RGB, GST_VIDEO_FORMAT_RGB, TRUE, FALSE, FALSE, TRUE, TRUE,
+ FALSE, FALSE, FALSE, 0, 0, convert_scale_planes},
+ {GST_VIDEO_FORMAT_BGR, GST_VIDEO_FORMAT_BGR, TRUE, FALSE, FALSE, TRUE, TRUE,
+ FALSE, FALSE, FALSE, 0, 0, convert_scale_planes},
+ {GST_VIDEO_FORMAT_v308, GST_VIDEO_FORMAT_v308, TRUE, FALSE, FALSE, TRUE, TRUE,
+ FALSE, FALSE, FALSE, 0, 0, convert_scale_planes},
+ {GST_VIDEO_FORMAT_IYU2, GST_VIDEO_FORMAT_IYU2, TRUE, FALSE, FALSE, TRUE, TRUE,
+ FALSE, FALSE, FALSE, 0, 0, convert_scale_planes},
+
+ {GST_VIDEO_FORMAT_ARGB, GST_VIDEO_FORMAT_ARGB, TRUE, FALSE, FALSE, TRUE, TRUE,
+ TRUE, FALSE, FALSE, 0, 0, convert_scale_planes},
+ {GST_VIDEO_FORMAT_xRGB, GST_VIDEO_FORMAT_xRGB, TRUE, FALSE, FALSE, TRUE, TRUE,
+ FALSE, FALSE, FALSE, 0, 0, convert_scale_planes},
+ {GST_VIDEO_FORMAT_ABGR, GST_VIDEO_FORMAT_ABGR, TRUE, FALSE, FALSE, TRUE, TRUE,
+ TRUE, FALSE, FALSE, 0, 0, convert_scale_planes},
+ {GST_VIDEO_FORMAT_xBGR, GST_VIDEO_FORMAT_xBGR, TRUE, FALSE, FALSE, TRUE, TRUE,
+ FALSE, FALSE, FALSE, 0, 0, convert_scale_planes},
+ {GST_VIDEO_FORMAT_RGBA, GST_VIDEO_FORMAT_RGBA, TRUE, FALSE, FALSE, TRUE, TRUE,
+ TRUE, FALSE, FALSE, 0, 0, convert_scale_planes},
+ {GST_VIDEO_FORMAT_RGBx, GST_VIDEO_FORMAT_RGBx, TRUE, FALSE, FALSE, TRUE, TRUE,
+ FALSE, FALSE, FALSE, 0, 0, convert_scale_planes},
+ {GST_VIDEO_FORMAT_BGRA, GST_VIDEO_FORMAT_BGRA, TRUE, FALSE, FALSE, TRUE, TRUE,
+ TRUE, FALSE, FALSE, 0, 0, convert_scale_planes},
+ {GST_VIDEO_FORMAT_BGRx, GST_VIDEO_FORMAT_BGRx, TRUE, FALSE, FALSE, TRUE, TRUE,
+ FALSE, FALSE, FALSE, 0, 0, convert_scale_planes},
+
+ {GST_VIDEO_FORMAT_ARGB64, GST_VIDEO_FORMAT_ARGB64, TRUE, FALSE, FALSE, TRUE,
+ TRUE, TRUE, FALSE, FALSE, 0, 0, convert_scale_planes},
+ {GST_VIDEO_FORMAT_AYUV64, GST_VIDEO_FORMAT_AYUV64, TRUE, FALSE, FALSE, TRUE,
+ TRUE, TRUE, FALSE, FALSE, 0, 0, convert_scale_planes},
+
+ {GST_VIDEO_FORMAT_GRAY16_LE, GST_VIDEO_FORMAT_GRAY16_LE, TRUE, FALSE, FALSE,
+ TRUE, TRUE, FALSE, FALSE, FALSE, 0, 0, convert_scale_planes},
+ {GST_VIDEO_FORMAT_GRAY16_BE, GST_VIDEO_FORMAT_GRAY16_BE, TRUE, FALSE, FALSE,
+ TRUE, TRUE, FALSE, FALSE, FALSE, 0, 0, convert_scale_planes},
+ };
+
+ static gboolean
+ video_converter_lookup_fastpath (GstVideoConverter * convert)
+ {
+ int i;
+ GstVideoFormat in_format, out_format;
+ GstVideoTransferFunction in_transf, out_transf;
+ gboolean interlaced, same_matrix, same_primaries, same_size, crop, border;
+ gboolean need_copy, need_set, need_mult;
+ gint width, height;
+ guint in_bpp, out_bpp;
+
+ width = GST_VIDEO_INFO_WIDTH (&convert->in_info);
+ height = GST_VIDEO_INFO_FIELD_HEIGHT (&convert->in_info);
+
+ if (GET_OPT_DITHER_QUANTIZATION (convert) != 1)
+ return FALSE;
+
+ in_bpp = convert->in_info.finfo->bits;
+ out_bpp = convert->out_info.finfo->bits;
+
+ /* we don't do gamma conversion in fastpath */
+ in_transf = convert->in_info.colorimetry.transfer;
+ out_transf = convert->out_info.colorimetry.transfer;
+
+ same_size = (width == convert->out_width && height == convert->out_height);
+
+ /* fastpaths don't do gamma */
+ if (CHECK_GAMMA_REMAP (convert) && (!same_size
+ || !gst_video_transfer_function_is_equivalent (in_transf, in_bpp,
+ out_transf, out_bpp)))
+ return FALSE;
+
+ need_copy = (convert->alpha_mode & ALPHA_MODE_COPY) == ALPHA_MODE_COPY;
+ need_set = (convert->alpha_mode & ALPHA_MODE_SET) == ALPHA_MODE_SET;
+ need_mult = (convert->alpha_mode & ALPHA_MODE_MULT) == ALPHA_MODE_MULT;
+ GST_DEBUG ("alpha copy %d, set %d, mult %d", need_copy, need_set, need_mult);
+
+ in_format = GST_VIDEO_INFO_FORMAT (&convert->in_info);
+ out_format = GST_VIDEO_INFO_FORMAT (&convert->out_info);
+
+ if (CHECK_MATRIX_NONE (convert)) {
+ same_matrix = TRUE;
+ } else {
+ GstVideoColorMatrix in_matrix, out_matrix;
+
+ in_matrix = convert->in_info.colorimetry.matrix;
+ out_matrix = convert->out_info.colorimetry.matrix;
+ same_matrix = in_matrix == out_matrix;
+ }
+
+ if (CHECK_PRIMARIES_NONE (convert)) {
+ same_primaries = TRUE;
+ } else {
+ GstVideoColorPrimaries in_primaries, out_primaries;
+
+ in_primaries = convert->in_info.colorimetry.primaries;
+ out_primaries = convert->out_info.colorimetry.primaries;
+ same_primaries = in_primaries == out_primaries;
+ }
+
+ interlaced = GST_VIDEO_INFO_IS_INTERLACED (&convert->in_info);
+ interlaced |= GST_VIDEO_INFO_IS_INTERLACED (&convert->out_info);
+
+ crop = convert->in_x || convert->in_y
+ || convert->in_width < convert->in_maxwidth
+ || convert->in_height < convert->in_maxheight;
+ border = convert->out_x || convert->out_y
+ || convert->out_width < convert->out_maxwidth
+ || convert->out_height < convert->out_maxheight;
+
+ for (i = 0; i < G_N_ELEMENTS (transforms); i++) {
+ if (transforms[i].in_format == in_format &&
+ transforms[i].out_format == out_format &&
+ (transforms[i].keeps_interlaced || !interlaced) &&
+ (transforms[i].needs_color_matrix || (same_matrix && same_primaries))
+ && (!transforms[i].keeps_size || same_size)
+ && (transforms[i].width_align & width) == 0
+ && (transforms[i].height_align & height) == 0
+ && (transforms[i].do_crop || !crop)
+ && (transforms[i].do_border || !border)
+ && (transforms[i].alpha_copy || !need_copy)
+ && (transforms[i].alpha_set || !need_set)
+ && (transforms[i].alpha_mult || !need_mult)) {
+ guint j;
+
+ GST_DEBUG ("using fastpath");
+ if (transforms[i].needs_color_matrix)
+ video_converter_compute_matrix (convert);
+ convert->convert = transforms[i].convert;
+
+ convert->tmpline =
+ g_new (guint16 *, convert->conversion_runner->n_threads);
+ for (j = 0; j < convert->conversion_runner->n_threads; j++)
+ convert->tmpline[j] = g_malloc0 (sizeof (guint16) * (width + 8) * 4);
+
+ if (!transforms[i].keeps_size)
+ if (!setup_scale (convert))
+ return FALSE;
+ if (border)
+ setup_borderline (convert);
+ return TRUE;
+ }
+ }
+ GST_DEBUG ("no fastpath found");
+ return FALSE;
+ }
--- /dev/null
-
+ /* GStreamer
+ * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
+ * Library <2002> Ronald Bultje <rbultje@ronald.bitfreak.net>
+ * Copyright (C) 2007 David A. Schleef <ds@schleef.org>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+ #ifdef HAVE_CONFIG_H
+ # include "config.h"
+ #endif
+
+ #include <string.h>
+ #include <stdio.h>
+
+ #include "video-format.h"
+ #include "video-orc.h"
+
+ #ifndef restrict
+ #if defined(__STDC_VERSION__) && __STDC_VERSION__ >= 199901L
+ /* restrict should be available */
+ #elif defined(__GNUC__) && __GNUC__ >= 4
+ #define restrict __restrict__
+ #elif defined(_MSC_VER) && _MSC_VER >= 1500
+ #define restrict __restrict
+ #else
+ #define restrict /* no op */
+ #endif
+ #endif
+
+ /* Line conversion to AYUV */
+
+ #define GET_PLANE_STRIDE(plane) (stride(plane))
+ #define GET_PLANE_LINE(plane, line) \
+ (gpointer)(((guint8*)(data[plane])) + stride[plane] * (line))
+
+ #define GET_COMP_STRIDE(comp) \
+ GST_VIDEO_FORMAT_INFO_STRIDE (info, stride, comp)
+ #define GET_COMP_DATA(comp) \
+ GST_VIDEO_FORMAT_INFO_DATA (info, data, comp)
+
+ #define GET_COMP_LINE(comp, line) \
+ (gpointer)(((guint8*)GET_COMP_DATA (comp)) + \
+ GET_COMP_STRIDE(comp) * (line))
+
+ #define GET_LINE(line) GET_PLANE_LINE (0, line)
+
+ #define GET_Y_LINE(line) GET_COMP_LINE(GST_VIDEO_COMP_Y, line)
+ #define GET_U_LINE(line) GET_COMP_LINE(GST_VIDEO_COMP_U, line)
+ #define GET_V_LINE(line) GET_COMP_LINE(GST_VIDEO_COMP_V, line)
+
+ #define GET_R_LINE(line) GET_COMP_LINE(GST_VIDEO_COMP_R, line)
+ #define GET_G_LINE(line) GET_COMP_LINE(GST_VIDEO_COMP_G, line)
+ #define GET_B_LINE(line) GET_COMP_LINE(GST_VIDEO_COMP_B, line)
+
+ #define GET_A_LINE(line) GET_COMP_LINE(GST_VIDEO_COMP_A, line)
+
+ #define GET_UV_420(line, flags) \
+ (flags & GST_VIDEO_PACK_FLAG_INTERLACED ? \
+ ((line & ~3) >> 1) + (line & 1) : \
+ line >> 1)
+ #define GET_UV_410(line, flags) \
+ (flags & GST_VIDEO_PACK_FLAG_INTERLACED ? \
+ ((line & ~7) >> 2) + (line & 1) : \
+ line >> 2)
+
+ #define IS_CHROMA_LINE_420(line, flags) \
+ (flags & GST_VIDEO_PACK_FLAG_INTERLACED ? \
+ !(line & 2) : !(line & 1))
+ #define IS_CHROMA_LINE_410(line, flags) \
+ (flags & GST_VIDEO_PACK_FLAG_INTERLACED ? \
+ !(line & 6) : !(line & 3))
+
+ #define IS_ALIGNED(x,n) ((((guintptr)(x)&((n)-1))) == 0)
+
+ #define PACK_420 GST_VIDEO_FORMAT_AYUV, unpack_planar_420, 1, pack_planar_420
+ static void
+ unpack_planar_420 (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ gpointer dest, const gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], gint x, gint y, gint width)
+ {
+ gint uv = GET_UV_420 (y, flags);
+ const guint8 *restrict sy = GET_Y_LINE (y);
+ const guint8 *restrict su = GET_U_LINE (uv);
+ const guint8 *restrict sv = GET_V_LINE (uv);
+ guint8 *restrict d = dest;
+
+ sy += x;
+ su += x >> 1;
+ sv += x >> 1;
+
+ if (x & 1) {
+ d[0] = 0xff;
+ d[1] = *sy++;
+ d[2] = *su++;
+ d[3] = *sv++;
+ width--;
+ d += 4;
+ }
+ video_orc_unpack_I420 (d, sy, su, sv, width);
+ }
+
+ static void
+ pack_planar_420 (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ const gpointer src, gint sstride, gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], GstVideoChromaSite chroma_site,
+ gint y, gint width)
+ {
+ gint uv = GET_UV_420 (y, flags);
+ guint8 *dy = GET_Y_LINE (y);
+ guint8 *du = GET_U_LINE (uv);
+ guint8 *dv = GET_V_LINE (uv);
+ const guint8 *s = src;
+
+ if (IS_CHROMA_LINE_420 (y, flags)) {
+ if (IS_ALIGNED (s, 8))
+ video_orc_pack_I420 (dy, du, dv, s, width / 2);
+ else {
+ gint i;
+
+ for (i = 0; i < width / 2; i++) {
+ dy[i * 2 + 0] = s[i * 8 + 1];
+ dy[i * 2 + 1] = s[i * 8 + 5];
+ du[i] = s[i * 8 + 2];
+ dv[i] = s[i * 8 + 3];
+ }
+ }
+ if (width & 1) {
+ gint i = width - 1;
+
+ dy[i] = s[i * 4 + 1];
+ du[i >> 1] = s[i * 4 + 2];
+ dv[i >> 1] = s[i * 4 + 3];
+ }
+ } else
+ video_orc_pack_Y (dy, s, width);
+ }
+
+ #define PACK_YUY2 GST_VIDEO_FORMAT_AYUV, unpack_YUY2, 1, pack_YUY2
+ static void
+ unpack_YUY2 (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ gpointer dest, const gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], gint x, gint y, gint width)
+ {
+ const guint8 *restrict s = GET_LINE (y);
+ guint8 *restrict d = dest;
+
+ s += (x & ~1) << 1;
+ if (x & 1) {
+ d[0] = 0xff;
+ d[1] = s[2];
+ d[2] = s[1];
+ d[3] = s[3];
+ s += 4;
+ d += 4;
+ width--;
+ }
+
+ if (IS_ALIGNED (d, 8))
+ video_orc_unpack_YUY2 (d, s, width / 2);
+ else {
+ gint i;
+
+ for (i = 0; i < width / 2; i++) {
+ d[i * 8 + 0] = 0xff;
+ d[i * 8 + 1] = s[i * 4 + 0];
+ d[i * 8 + 2] = s[i * 4 + 1];
+ d[i * 8 + 3] = s[i * 4 + 3];
+ d[i * 8 + 4] = 0xff;
+ d[i * 8 + 5] = s[i * 4 + 2];
+ d[i * 8 + 6] = s[i * 4 + 1];
+ d[i * 8 + 7] = s[i * 4 + 3];
+ }
+ }
+
+ if (width & 1) {
+ gint i = width - 1;
+
+ d[i * 4 + 0] = 0xff;
+ d[i * 4 + 1] = s[i * 2 + 0];
+ d[i * 4 + 2] = s[i * 2 + 1];
+ d[i * 4 + 3] = s[i * 2 + 3];
+ }
+ }
+
+ static void
+ pack_YUY2 (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ const gpointer src, gint sstride, gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], GstVideoChromaSite chroma_site,
+ gint y, gint width)
+ {
+ guint8 *restrict d = GET_LINE (y);
+ const guint8 *restrict s = src;
+
+ if (IS_ALIGNED (s, 8))
+ video_orc_pack_YUY2 (d, s, width / 2);
+ else {
+ gint i;
+ for (i = 0; i < width / 2; i++) {
+ d[i * 4 + 0] = s[i * 8 + 1];
+ d[i * 4 + 1] = s[i * 8 + 2];
+ d[i * 4 + 2] = s[i * 8 + 5];
+ d[i * 4 + 3] = s[i * 8 + 3];
+ }
+ }
+
+ if (width & 1) {
+ gint i = width - 1;
+
+ d[i * 2 + 0] = s[i * 4 + 1];
+ d[i * 2 + 1] = s[i * 4 + 2];
+ d[i * 2 + 3] = s[i * 4 + 3];
+ }
+ }
+
+ #define PACK_UYVY GST_VIDEO_FORMAT_AYUV, unpack_UYVY, 1, pack_UYVY
+ static void
+ unpack_UYVY (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ gpointer dest, const gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], gint x, gint y, gint width)
+ {
+ const guint8 *s = GET_LINE (y);
+ guint8 *d = dest;
+
+ s += (x & ~1) << 1;
+ if (x & 1) {
+ d[0] = 0xff;
+ d[1] = s[3];
+ d[2] = s[0];
+ d[3] = s[2];
+ s += 4;
+ d += 4;
+ width--;
+ }
+
+ if (IS_ALIGNED (d, 8))
+ video_orc_unpack_UYVY (d, s, width / 2);
+ else {
+ gint i;
+
+ for (i = 0; i < width / 2; i++) {
+ d[i * 8 + 0] = 0xff;
+ d[i * 8 + 1] = s[i * 4 + 1];
+ d[i * 8 + 2] = s[i * 4 + 0];
+ d[i * 8 + 3] = s[i * 4 + 2];
+ d[i * 8 + 4] = 0xff;
+ d[i * 8 + 5] = s[i * 4 + 3];
+ d[i * 8 + 6] = s[i * 4 + 0];
+ d[i * 8 + 7] = s[i * 4 + 2];
+ }
+ }
+
+ if (width & 1) {
+ gint i = width - 1;
+
+ d[i * 4 + 0] = 0xff;
+ d[i * 4 + 1] = s[i * 2 + 1];
+ d[i * 4 + 2] = s[i * 2 + 0];
+ d[i * 4 + 3] = s[i * 2 + 2];
+ }
+ }
+
+ static void
+ pack_UYVY (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ const gpointer src, gint sstride, gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], GstVideoChromaSite chroma_site,
+ gint y, gint width)
+ {
+ guint8 *restrict d = GET_LINE (y);
+ const guint8 *restrict s = src;
+
+ if (IS_ALIGNED (s, 8))
+ video_orc_pack_UYVY (d, s, width / 2);
+ else {
+ gint i;
+ for (i = 0; i < width / 2; i++) {
+ d[i * 4 + 0] = s[i * 8 + 2];
+ d[i * 4 + 1] = s[i * 8 + 1];
+ d[i * 4 + 2] = s[i * 8 + 3];
+ d[i * 4 + 3] = s[i * 8 + 5];
+ }
+ }
+ if (width & 1) {
+ gint i = width - 1;
+
+ d[i * 2 + 0] = s[i * 4 + 2];
+ d[i * 2 + 1] = s[i * 4 + 1];
+ d[i * 2 + 2] = s[i * 4 + 3];
+ }
+ }
+
+ #define PACK_VYUY GST_VIDEO_FORMAT_AYUV, unpack_VYUY, 1, pack_VYUY
+ static void
+ unpack_VYUY (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ gpointer dest, const gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], gint x, gint y, gint width)
+ {
+ const guint8 *s = GET_LINE (y);
+ guint8 *d = dest;
+
+ s += (x & ~1) << 1;
+ if (x & 1) {
+ d[0] = 0xff;
+ d[1] = s[3];
+ d[2] = s[0];
+ d[3] = s[2];
+ s += 4;
+ d += 4;
+ width--;
+ }
+
+ if (IS_ALIGNED (d, 8))
+ video_orc_unpack_VYUY (d, s, width / 2);
+ else {
+ gint i;
+
+ for (i = 0; i < width / 2; i++) {
+ d[i * 8 + 0] = 0xff;
+ d[i * 8 + 1] = s[i * 4 + 1];
+ d[i * 8 + 2] = s[i * 4 + 0];
+ d[i * 8 + 3] = s[i * 4 + 2];
+ d[i * 8 + 4] = 0xff;
+ d[i * 8 + 5] = s[i * 4 + 3];
+ d[i * 8 + 6] = s[i * 4 + 0];
+ d[i * 8 + 7] = s[i * 4 + 2];
+ }
+ }
+
+ if (width & 1) {
+ gint i = width - 1;
+
+ d[i * 4 + 0] = 0xff;
+ d[i * 4 + 1] = s[i * 2 + 1];
+ d[i * 4 + 2] = s[i * 2 + 0];
+ d[i * 4 + 3] = s[i * 2 + 2];
+ }
+ }
+
+ static void
+ pack_VYUY (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ const gpointer src, gint sstride, gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], GstVideoChromaSite chroma_site,
+ gint y, gint width)
+ {
+ guint8 *restrict d = GET_LINE (y);
+ const guint8 *restrict s = src;
+
+ if (IS_ALIGNED (s, 8))
+ video_orc_pack_VYUY (d, s, width / 2);
+ else {
+ gint i;
+ for (i = 0; i < width / 2; i++) {
+ d[i * 4 + 0] = s[i * 8 + 2];
+ d[i * 4 + 1] = s[i * 8 + 1];
+ d[i * 4 + 2] = s[i * 8 + 3];
+ d[i * 4 + 3] = s[i * 8 + 5];
+ }
+ }
+ if (width & 1) {
+ gint i = width - 1;
+
+ d[i * 2 + 0] = s[i * 4 + 2];
+ d[i * 2 + 1] = s[i * 4 + 1];
+ d[i * 2 + 2] = s[i * 4 + 3];
+ }
+ }
+
+ #define PACK_YVYU GST_VIDEO_FORMAT_AYUV, unpack_YVYU, 1, pack_YVYU
+ static void
+ unpack_YVYU (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ gpointer dest, const gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], gint x, gint y, gint width)
+ {
+ const guint8 *restrict s = GET_LINE (y);
+ guint8 *restrict d = dest;
+
+ s += (x & ~1) << 1;
+ if (x & 1) {
+ d[0] = 0xff;
+ d[1] = s[2];
+ d[2] = s[3];
+ d[3] = s[1];
+ s += 4;
+ d += 4;
+ width--;
+ }
+
+ if (IS_ALIGNED (d, 8))
+ video_orc_unpack_YVYU (d, s, width / 2);
+ else {
+ gint i;
+
+ for (i = 0; i < width / 2; i++) {
+ d[i * 8 + 0] = 0xff;
+ d[i * 8 + 1] = s[i * 4 + 0];
+ d[i * 8 + 2] = s[i * 4 + 3];
+ d[i * 8 + 3] = s[i * 4 + 1];
+ d[i * 8 + 4] = 0xff;
+ d[i * 8 + 5] = s[i * 4 + 2];
+ d[i * 8 + 6] = s[i * 4 + 3];
+ d[i * 8 + 7] = s[i * 4 + 1];
+ }
+ }
+
+ if (width & 1) {
+ gint i = width - 1;
+
+ d[i * 4 + 0] = 0xff;
+ d[i * 4 + 1] = s[i * 2 + 0];
+ d[i * 4 + 2] = s[i * 2 + 3];
+ d[i * 4 + 3] = s[i * 2 + 1];
+ }
+ }
+
+ static void
+ pack_YVYU (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ const gpointer src, gint sstride, gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], GstVideoChromaSite chroma_site,
+ gint y, gint width)
+ {
+ guint8 *restrict d = GET_LINE (y);
+ const guint8 *restrict s = src;
+
+ if (IS_ALIGNED (s, 8))
+ video_orc_pack_YVYU (d, s, width / 2);
+ else {
+ gint i;
+ for (i = 0; i < width / 2; i++) {
+ d[i * 4 + 0] = s[i * 8 + 1];
+ d[i * 4 + 1] = s[i * 8 + 3];
+ d[i * 4 + 2] = s[i * 8 + 5];
+ d[i * 4 + 3] = s[i * 8 + 2];
+ }
+ }
+
+ if (width & 1) {
+ gint i = width - 1;
+
+ d[i * 2 + 0] = s[i * 4 + 1];
+ d[i * 2 + 1] = s[i * 4 + 3];
+ d[i * 2 + 3] = s[i * 4 + 2];
+ }
+ }
+
+ #define PACK_v308 GST_VIDEO_FORMAT_AYUV, unpack_v308, 1, pack_v308
+ static void
+ unpack_v308 (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ gpointer dest, const gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], gint x, gint y, gint width)
+ {
+ int i;
+ const guint8 *restrict s = GET_LINE (y);
+ guint8 *restrict d = dest;
+
+ s += x * 3;
+
+ for (i = 0; i < width; i++) {
+ d[i * 4 + 0] = 0xff;
+ d[i * 4 + 1] = s[i * 3 + 0];
+ d[i * 4 + 2] = s[i * 3 + 1];
+ d[i * 4 + 3] = s[i * 3 + 2];
+ }
+ }
+
+ static void
+ pack_v308 (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ const gpointer src, gint sstride, gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], GstVideoChromaSite chroma_site,
+ gint y, gint width)
+ {
+ int i;
+ guint8 *restrict d = GET_LINE (y);
+ const guint8 *restrict s = src;
+
+ for (i = 0; i < width; i++) {
+ d[i * 3 + 0] = s[i * 4 + 1];
+ d[i * 3 + 1] = s[i * 4 + 2];
+ d[i * 3 + 2] = s[i * 4 + 3];
+ }
+ }
+
+ #define PACK_IYU2 GST_VIDEO_FORMAT_AYUV, unpack_IYU2, 1, pack_IYU2
+ static void
+ unpack_IYU2 (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ gpointer dest, const gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], gint x, gint y, gint width)
+ {
+ int i;
+ const guint8 *restrict s = GET_LINE (y);
+ guint8 *restrict d = dest;
+
+ s += x * 3;
+
+ for (i = 0; i < width; i++) {
+ d[i * 4 + 0] = 0xff;
+ d[i * 4 + 1] = s[i * 3 + 1];
+ d[i * 4 + 2] = s[i * 3 + 0];
+ d[i * 4 + 3] = s[i * 3 + 2];
+ }
+ }
+
+ static void
+ pack_IYU2 (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ const gpointer src, gint sstride, gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], GstVideoChromaSite chroma_site,
+ gint y, gint width)
+ {
+ int i;
+ guint8 *restrict d = GET_LINE (y);
+ const guint8 *restrict s = src;
+
+ for (i = 0; i < width; i++) {
+ d[i * 3 + 0] = s[i * 4 + 2];
+ d[i * 3 + 1] = s[i * 4 + 1];
+ d[i * 3 + 2] = s[i * 4 + 3];
+ }
+ }
+
+ #define PACK_AYUV GST_VIDEO_FORMAT_AYUV, unpack_copy4, 1, pack_copy4
+ #define PACK_ARGB GST_VIDEO_FORMAT_ARGB, unpack_copy4, 1, pack_copy4
+ static void
+ unpack_copy4 (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ gpointer dest, const gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], gint x, gint y, gint width)
+ {
+ const guint8 *restrict s = GET_LINE (y);
+
+ s += x * 4;
+
+ memcpy (dest, s, width * 4);
+ }
+
+ static void
+ pack_copy4 (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ const gpointer src, gint sstride, gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], GstVideoChromaSite chroma_site,
+ gint y, gint width)
+ {
+ guint8 *restrict d = GET_LINE (y);
+
+ memcpy (d, src, width * 4);
+ }
+
+ #define PACK_v210 GST_VIDEO_FORMAT_AYUV64, unpack_v210, 1, pack_v210
+ static void
+ unpack_v210 (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ gpointer dest, const gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], gint x, gint y, gint width)
+ {
+ int i;
+ const guint8 *restrict s = GET_LINE (y);
+ guint16 *restrict d = dest;
+ guint32 a0, a1, a2, a3;
+ guint16 y0, y1, y2, y3, y4, y5;
+ guint16 u0, u2, u4;
+ guint16 v0, v2, v4;
+
+ /* FIXME */
+ s += x * 2;
+
+ for (i = 0; i < width; i += 6) {
+ a0 = GST_READ_UINT32_LE (s + (i / 6) * 16 + 0);
+ a1 = GST_READ_UINT32_LE (s + (i / 6) * 16 + 4);
+ a2 = GST_READ_UINT32_LE (s + (i / 6) * 16 + 8);
+ a3 = GST_READ_UINT32_LE (s + (i / 6) * 16 + 12);
+
+ u0 = ((a0 >> 0) & 0x3ff) << 6;
+ y0 = ((a0 >> 10) & 0x3ff) << 6;
+ v0 = ((a0 >> 20) & 0x3ff) << 6;
+ y1 = ((a1 >> 0) & 0x3ff) << 6;
+
+ u2 = ((a1 >> 10) & 0x3ff) << 6;
+ y2 = ((a1 >> 20) & 0x3ff) << 6;
+ v2 = ((a2 >> 0) & 0x3ff) << 6;
+ y3 = ((a2 >> 10) & 0x3ff) << 6;
+
+ u4 = ((a2 >> 20) & 0x3ff) << 6;
+ y4 = ((a3 >> 0) & 0x3ff) << 6;
+ v4 = ((a3 >> 10) & 0x3ff) << 6;
+ y5 = ((a3 >> 20) & 0x3ff) << 6;
+
+ if (!(flags & GST_VIDEO_PACK_FLAG_TRUNCATE_RANGE)) {
+ y0 |= (y0 >> 10);
+ y1 |= (y1 >> 10);
+ u0 |= (u0 >> 10);
+ v0 |= (v0 >> 10);
+
+ y2 |= (y2 >> 10);
+ y3 |= (y3 >> 10);
+ u2 |= (u2 >> 10);
+ v2 |= (v2 >> 10);
+
+ y4 |= (y4 >> 10);
+ y5 |= (y5 >> 10);
+ u4 |= (u4 >> 10);
+ v4 |= (v4 >> 10);
+ }
+
+ d[4 * (i + 0) + 0] = 0xffff;
+ d[4 * (i + 0) + 1] = y0;
+ d[4 * (i + 0) + 2] = u0;
+ d[4 * (i + 0) + 3] = v0;
+
+ if (i < width - 1) {
+ d[4 * (i + 1) + 0] = 0xffff;
+ d[4 * (i + 1) + 1] = y1;
+ d[4 * (i + 1) + 2] = u0;
+ d[4 * (i + 1) + 3] = v0;
+ }
+ if (i < width - 2) {
+ d[4 * (i + 2) + 0] = 0xffff;
+ d[4 * (i + 2) + 1] = y2;
+ d[4 * (i + 2) + 2] = u2;
+ d[4 * (i + 2) + 3] = v2;
+ }
+ if (i < width - 3) {
+ d[4 * (i + 3) + 0] = 0xffff;
+ d[4 * (i + 3) + 1] = y3;
+ d[4 * (i + 3) + 2] = u2;
+ d[4 * (i + 3) + 3] = v2;
+ }
+ if (i < width - 4) {
+ d[4 * (i + 4) + 0] = 0xffff;
+ d[4 * (i + 4) + 1] = y4;
+ d[4 * (i + 4) + 2] = u4;
+ d[4 * (i + 4) + 3] = v4;
+ }
+ if (i < width - 5) {
+ d[4 * (i + 5) + 0] = 0xffff;
+ d[4 * (i + 5) + 1] = y5;
+ d[4 * (i + 5) + 2] = u4;
+ d[4 * (i + 5) + 3] = v4;
+ }
+ }
+ }
+
+ static void
+ pack_v210 (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ const gpointer src, gint sstride, gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], GstVideoChromaSite chroma_site,
+ gint y, gint width)
+ {
+ int i;
+ guint8 *restrict d = GET_LINE (y);
+ const guint16 *restrict s = src;
+ guint32 a0, a1, a2, a3;
+ guint16 y0, y1, y2, y3, y4, y5;
+ guint16 u0, u1, u2;
+ guint16 v0, v1, v2;
+
+ for (i = 0; i < width - 5; i += 6) {
+ y0 = s[4 * (i + 0) + 1] >> 6;
+ y1 = s[4 * (i + 1) + 1] >> 6;
+ y2 = s[4 * (i + 2) + 1] >> 6;
+ y3 = s[4 * (i + 3) + 1] >> 6;
+ y4 = s[4 * (i + 4) + 1] >> 6;
+ y5 = s[4 * (i + 5) + 1] >> 6;
+
+ u0 = s[4 * (i + 0) + 2] >> 6;
+ u1 = s[4 * (i + 2) + 2] >> 6;
+ u2 = s[4 * (i + 4) + 2] >> 6;
+
+ v0 = s[4 * (i + 0) + 3] >> 6;
+ v1 = s[4 * (i + 2) + 3] >> 6;
+ v2 = s[4 * (i + 4) + 3] >> 6;
+
+ a0 = u0 | (y0 << 10) | (v0 << 20);
+ a1 = y1 | (u1 << 10) | (y2 << 20);
+ a2 = v1 | (y3 << 10) | (u2 << 20);
+ a3 = y4 | (v2 << 10) | (y5 << 20);
+
+ GST_WRITE_UINT32_LE (d + (i / 6) * 16 + 0, a0);
+ GST_WRITE_UINT32_LE (d + (i / 6) * 16 + 4, a1);
+ GST_WRITE_UINT32_LE (d + (i / 6) * 16 + 8, a2);
+ GST_WRITE_UINT32_LE (d + (i / 6) * 16 + 12, a3);
+ }
+ if (i < width) {
+ y0 = s[4 * (i + 0) + 1] >> 6;
+ u0 = s[4 * (i + 0) + 2] >> 6;
+ v0 = s[4 * (i + 0) + 3] >> 6;
+ if (i < width - 1)
+ y1 = s[4 * (i + 1) + 1] >> 6;
+ else
+ y1 = y0;
+ if (i < width - 2) {
+ y2 = s[4 * (i + 2) + 1] >> 6;
+ u1 = s[4 * (i + 2) + 2] >> 6;
+ v1 = s[4 * (i + 2) + 3] >> 6;
+ } else {
+ y2 = y1;
+ u1 = u0;
+ v1 = v0;
+ }
+ if (i < width - 3)
+ y3 = s[4 * (i + 3) + 1] >> 6;
+ else
+ y3 = y2;
+ if (i < width - 4) {
+ y4 = s[4 * (i + 4) + 1] >> 6;
+ u2 = s[4 * (i + 4) + 2] >> 6;
+ v2 = s[4 * (i + 4) + 3] >> 6;
+ } else {
+ y4 = y3;
+ u2 = u1;
+ v2 = v1;
+ }
+ y5 = y4;
+
+ a0 = u0 | (y0 << 10) | (v0 << 20);
+ a1 = y1 | (u1 << 10) | (y2 << 20);
+ a2 = v1 | (y3 << 10) | (u2 << 20);
+ a3 = y4 | (v2 << 10) | (y5 << 20);
+
+ GST_WRITE_UINT32_LE (d + (i / 6) * 16 + 0, a0);
+ GST_WRITE_UINT32_LE (d + (i / 6) * 16 + 4, a1);
+ GST_WRITE_UINT32_LE (d + (i / 6) * 16 + 8, a2);
+ GST_WRITE_UINT32_LE (d + (i / 6) * 16 + 12, a3);
+ }
+ }
+
+ #define PACK_v216 GST_VIDEO_FORMAT_AYUV64, unpack_v216, 1, pack_v216
+ static void
+ unpack_v216 (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ gpointer dest, const gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], gint x, gint y, gint width)
+ {
+ int i;
+ const guint8 *restrict s = GET_LINE (y);
+ guint16 *restrict d = dest;
+
+ s += (x & ~1) << 2;
+ if (x & 1) {
+ d[0] = 0xffff;
+ d[1] = GST_READ_UINT16_LE (s + 6);
+ d[2] = GST_READ_UINT16_LE (s + 0);
+ d[3] = GST_READ_UINT16_LE (s + 4);
+ s += 8;
+ d += 4;
+ width--;
+ }
+
+ for (i = 0; i < width; i++) {
+ d[i * 4 + 0] = 0xffff;
+ d[i * 4 + 1] = GST_READ_UINT16_LE (s + i * 4 + 2);
+ d[i * 4 + 2] = GST_READ_UINT16_LE (s + (i >> 1) * 8 + 0);
+ d[i * 4 + 3] = GST_READ_UINT16_LE (s + (i >> 1) * 8 + 4);
+ }
+ }
+
+ static void
+ pack_v216 (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ const gpointer src, gint sstride, gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], GstVideoChromaSite chroma_site,
+ gint y, gint width)
+ {
+ int i;
+ guint8 *restrict d = GET_LINE (y);
+ const guint16 *restrict s = src;
+
+ for (i = 0; i < width - 1; i += 2) {
+ GST_WRITE_UINT16_LE (d + i * 4 + 0, s[(i + 0) * 4 + 2]);
+ GST_WRITE_UINT16_LE (d + i * 4 + 2, s[(i + 0) * 4 + 1]);
+ GST_WRITE_UINT16_LE (d + i * 4 + 4, s[(i + 0) * 4 + 3]);
+ GST_WRITE_UINT16_LE (d + i * 4 + 6, s[(i + 1) * 4 + 1]);
+ }
+ if (i == width - 1) {
+ GST_WRITE_UINT16_LE (d + i * 4 + 0, s[i * 4 + 2]);
+ GST_WRITE_UINT16_LE (d + i * 4 + 2, s[i * 4 + 1]);
+ GST_WRITE_UINT16_LE (d + i * 4 + 4, s[i * 4 + 3]);
+ GST_WRITE_UINT16_LE (d + i * 4 + 6, s[i * 4 + 1]);
+ }
+ }
+
+ #define PACK_Y210 GST_VIDEO_FORMAT_AYUV64, unpack_Y210, 1, pack_Y210
+ static void
+ unpack_Y210 (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ gpointer dest, const gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], gint x, gint y, gint width)
+ {
+ int i;
+ const guint8 *restrict s = GET_LINE (y);
+ guint16 *restrict d = dest;
+ guint Y0, Y1, U, V;
+
+ s += GST_ROUND_DOWN_2 (x) * 4;
+
+ if (x & 1) {
+ Y1 = GST_READ_UINT16_LE (s + 4);
+ U = GST_READ_UINT16_LE (s + 2);
+ V = GST_READ_UINT16_LE (s + 6);
+
+ if (!(flags & GST_VIDEO_PACK_FLAG_TRUNCATE_RANGE)) {
+ Y1 |= (Y1 >> 10);
+ U |= (U >> 10);
+ V |= (V >> 10);
+ }
+
+ d[0] = 0xffff;
+ d[1] = Y1;
+ d[2] = U;
+ d[3] = V;
+ s += 8;
+ d += 4;
+ width--;
+ }
+
+ for (i = 0; i < width / 2; i++) {
+ Y0 = GST_READ_UINT16_LE (s + i * 8 + 0);
+ U = GST_READ_UINT16_LE (s + i * 8 + 2);
+ V = GST_READ_UINT16_LE (s + i * 8 + 6);
+ Y1 = GST_READ_UINT16_LE (s + i * 8 + 4);
+
+ if (!(flags & GST_VIDEO_PACK_FLAG_TRUNCATE_RANGE)) {
+ Y0 |= (Y0 >> 10);
+ U |= (U >> 10);
+ V |= (V >> 10);
+ }
+
+ d[i * 8 + 0] = 0xffff;
+ d[i * 8 + 1] = Y0;
+ d[i * 8 + 2] = U;
+ d[i * 8 + 3] = V;
+
+ d[i * 8 + 4] = 0xffff;
+ d[i * 8 + 5] = Y1;
+ d[i * 8 + 6] = U;
+ d[i * 8 + 7] = V;
+ }
+
+ if (width & 1) {
+ i = width - 1;
+
+ Y0 = GST_READ_UINT16_LE (s + i * 4 + 0);
+ U = GST_READ_UINT16_LE (s + i * 4 + 2);
+ V = GST_READ_UINT16_LE (s + i * 4 + 6);
+
+ if (!(flags & GST_VIDEO_PACK_FLAG_TRUNCATE_RANGE)) {
+ Y0 |= (Y0 >> 10);
+ U |= (U >> 10);
+ V |= (V >> 10);
+ }
+
+ d[i * 4 + 0] = 0xffff;
+ d[i * 4 + 1] = Y0;
+ d[i * 4 + 2] = U;
+ d[i * 4 + 3] = V;
+ }
+ }
+
+ static void
+ pack_Y210 (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ const gpointer src, gint sstride, gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], GstVideoChromaSite chroma_site,
+ gint y, gint width)
+ {
+ int i;
+ guint16 Y0, Y1, U, V;
+ guint8 *restrict d = GET_LINE (y);
+ const guint16 *restrict s = src;
+
+ for (i = 0; i < width; i += 2) {
+ Y0 = s[i * 4 + 1] & 0xffc0;
+ U = s[i * 4 + 2] & 0xffc0;
+ V = s[i * 4 + 3] & 0xffc0;
+ if (i == width - 1)
+ Y1 = s[i * 4 + 1] & 0xffc0;
+ else
+ Y1 = s[(i + 1) * 4 + 1] & 0xffc0;
+
+ GST_WRITE_UINT16_LE (d + i * 4 + 0, Y0);
+ GST_WRITE_UINT16_LE (d + i * 4 + 2, U);
+ GST_WRITE_UINT16_LE (d + i * 4 + 4, Y1);
+ GST_WRITE_UINT16_LE (d + i * 4 + 6, V);
+ }
+ }
+
+ #define PACK_Y410 GST_VIDEO_FORMAT_AYUV64, unpack_Y410, 1, pack_Y410
+ static void
+ unpack_Y410 (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ gpointer dest, const gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], gint x, gint y, gint width)
+ {
+ int i;
+ const guint8 *restrict s = GET_LINE (y);
+ guint16 *restrict d = dest;
+ guint32 AVYU;
+ guint16 A, Y, U, V;
+
+ s += x * 4;
+
+ for (i = 0; i < width; i++) {
+ AVYU = GST_READ_UINT32_LE (s + 4 * i);
+
+ U = ((AVYU >> 0) & 0x3ff) << 6;
+ Y = ((AVYU >> 10) & 0x3ff) << 6;
+ V = ((AVYU >> 20) & 0x3ff) << 6;
+ A = ((AVYU >> 30) & 0x03) << 14;
+
+ if (!(flags & GST_VIDEO_PACK_FLAG_TRUNCATE_RANGE)) {
+ U |= (U >> 10);
+ Y |= (Y >> 10);
+ V |= (V >> 10);
+ A |= (A >> 10);
+ }
+
+ d[4 * i + 0] = A;
+ d[4 * i + 1] = Y;
+ d[4 * i + 2] = U;
+ d[4 * i + 3] = V;
+ }
+ }
+
+ static void
+ pack_Y410 (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ const gpointer src, gint sstride, gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], GstVideoChromaSite chroma_site,
+ gint y, gint width)
+ {
+ int i;
+ guint32 *restrict d = GET_LINE (y);
+ const guint16 *restrict s = src;
+ guint32 AVYU;
+ guint16 A, Y, U, V;
+
+ for (i = 0; i < width; i++) {
+ A = s[4 * i] & 0xc000;
+ Y = s[4 * i + 1] & 0xffc0;
+ U = s[4 * i + 2] & 0xffc0;
+ V = s[4 * i + 3] & 0xffc0;
+
+ AVYU = (U >> 6) | (Y << 4) | (V << 14) | (A << 16);
+
+ GST_WRITE_UINT32_LE (d + i, AVYU);
+ }
+ }
+
+ #define PACK_Y41B GST_VIDEO_FORMAT_AYUV, unpack_Y41B, 1, pack_Y41B
+ static void
+ unpack_Y41B (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ gpointer dest, const gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], gint x, gint y, gint width)
+ {
+ const guint8 *restrict sy = GET_Y_LINE (y);
+ const guint8 *restrict su = GET_U_LINE (y);
+ const guint8 *restrict sv = GET_V_LINE (y);
+ guint8 *restrict d = dest;
+
+ sy += x;
+ su += x >> 2;
+ sv += x >> 2;
+
+ if (x & 3) {
+ for (; x & 3; x++) {
+ d[0] = 0xff;
+ d[1] = *sy++;
+ d[2] = *su;
+ d[3] = *sv;
+ width--;
+ d += 4;
+ }
+ su++;
+ sy++;
+ }
+
+ if (IS_ALIGNED (d, 8))
+ video_orc_unpack_YUV9 (d, sy, su, sv, width / 2);
+ else {
+ gint i;
+ for (i = 0; i < width / 2; i++) {
+ d[i * 8 + 0] = 0xff;
+ d[i * 8 + 1] = sy[i * 2 + 0];
+ d[i * 8 + 2] = su[i >> 1];
+ d[i * 8 + 3] = sv[i >> 1];
+ d[i * 8 + 4] = 0xff;
+ d[i * 8 + 5] = sy[i * 2 + 1];
+ d[i * 8 + 6] = su[i >> 1];
+ d[i * 8 + 7] = sv[i >> 1];
+ }
+ }
+
+ if (width & 1) {
+ gint i = width - 1;
+
+ d[i * 4 + 0] = 0xff;
+ d[i * 4 + 1] = sy[i];
+ d[i * 4 + 2] = su[i >> 2];
+ d[i * 4 + 3] = sv[i >> 2];
+ }
+ }
+
+ static void
+ pack_Y41B (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ const gpointer src, gint sstride, gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], GstVideoChromaSite chroma_site,
+ gint y, gint width)
+ {
+ int i;
+ guint8 *restrict dy = GET_Y_LINE (y);
+ guint8 *restrict du = GET_U_LINE (y);
+ guint8 *restrict dv = GET_V_LINE (y);
+ const guint8 *restrict s = src;
+
+ for (i = 0; i < width - 3; i += 4) {
+ dy[i] = s[i * 4 + 1];
+ dy[i + 1] = s[i * 4 + 5];
+ dy[i + 2] = s[i * 4 + 9];
+ dy[i + 3] = s[i * 4 + 13];
+
+ du[i >> 2] = s[i * 4 + 2];
+ dv[i >> 2] = s[i * 4 + 3];
+ }
+ if (i < width) {
+ dy[i] = s[i * 4 + 1];
+ du[i >> 2] = s[i * 4 + 2];
+ dv[i >> 2] = s[i * 4 + 3];
+ if (i < width - 1)
+ dy[i + 1] = s[i * 4 + 5];
+ if (i < width - 2)
+ dy[i + 2] = s[i * 4 + 9];
+ }
+ }
+
+ #define PACK_Y42B GST_VIDEO_FORMAT_AYUV, unpack_Y42B, 1, pack_Y42B
+ static void
+ unpack_Y42B (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ gpointer dest, const gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], gint x, gint y, gint width)
+ {
+ const guint8 *restrict sy = GET_Y_LINE (y);
+ const guint8 *restrict su = GET_U_LINE (y);
+ const guint8 *restrict sv = GET_V_LINE (y);
+ guint8 *restrict d = dest;
+
+ sy += x;
+ su += x >> 1;
+ sv += x >> 1;
+
+ if (x & 1) {
+ d[0] = 0xff;
+ d[1] = *sy++;
+ d[2] = *su++;
+ d[3] = *sv++;
+ width--;
+ d += 4;
+ }
+
+ if (IS_ALIGNED (d, 8))
+ video_orc_unpack_Y42B (d, sy, su, sv, width / 2);
+ else {
+ gint i;
+ for (i = 0; i < width / 2; i++) {
+ d[i * 8 + 0] = 0xff;
+ d[i * 8 + 1] = sy[i * 2 + 0];
+ d[i * 8 + 2] = su[i];
+ d[i * 8 + 3] = sv[i];
+ d[i * 8 + 4] = 0xff;
+ d[i * 8 + 5] = sy[i * 2 + 1];
+ d[i * 8 + 6] = su[i];
+ d[i * 8 + 7] = sv[i];
+ }
+ }
+
+ if (width & 1) {
+ gint i = width - 1;
+
+ d[i * 4 + 0] = 0xff;
+ d[i * 4 + 1] = sy[i];
+ d[i * 4 + 2] = su[i >> 1];
+ d[i * 4 + 3] = sv[i >> 1];
+ }
+ }
+
+ static void
+ pack_Y42B (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ const gpointer src, gint sstride, gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], GstVideoChromaSite chroma_site,
+ gint y, gint width)
+ {
+ guint8 *restrict dy = GET_Y_LINE (y);
+ guint8 *restrict du = GET_U_LINE (y);
+ guint8 *restrict dv = GET_V_LINE (y);
+ const guint8 *restrict s = src;
+
+ if (IS_ALIGNED (s, 8))
+ video_orc_pack_Y42B (dy, du, dv, s, width / 2);
+ else {
+ gint i;
+ for (i = 0; i < width / 2; i++) {
+ dy[i * 2 + 0] = s[i * 8 + 1];
+ dy[i * 2 + 1] = s[i * 8 + 5];
+ du[i] = s[i * 8 + 2];
+ dv[i] = s[i * 8 + 3];
+ }
+ }
+
+ if (width & 1) {
+ gint i = width - 1;
+
+ dy[i] = s[i * 4 + 1];
+ du[i >> 1] = s[i * 4 + 2];
+ dv[i >> 1] = s[i * 4 + 3];
+ }
+ }
+
+ #define PACK_Y444 GST_VIDEO_FORMAT_AYUV, unpack_Y444, 1, pack_Y444
+ static void
+ unpack_Y444 (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ gpointer dest, const gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], gint x, gint y, gint width)
+ {
+ const guint8 *restrict sy = GET_Y_LINE (y);
+ const guint8 *restrict su = GET_U_LINE (y);
+ const guint8 *restrict sv = GET_V_LINE (y);
+
+ sy += x;
+ su += x;
+ sv += x;
+
+ video_orc_unpack_Y444 (dest, sy, su, sv, width);
+ }
+
+ static void
+ pack_Y444 (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ const gpointer src, gint sstride, gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], GstVideoChromaSite chroma_site,
+ gint y, gint width)
+ {
+ guint8 *restrict dy = GET_Y_LINE (y);
+ guint8 *restrict du = GET_U_LINE (y);
+ guint8 *restrict dv = GET_V_LINE (y);
+
+ video_orc_pack_Y444 (dy, du, dv, src, width);
+ }
+
+ #define PACK_GBR GST_VIDEO_FORMAT_ARGB, unpack_GBR, 1, pack_GBR
+ static void
+ unpack_GBR (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ gpointer dest, const gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], gint x, gint y, gint width)
+ {
+ const guint8 *restrict sr = GET_R_LINE (y);
+ const guint8 *restrict sg = GET_G_LINE (y);
+ const guint8 *restrict sb = GET_B_LINE (y);
+
+ sr += x;
+ sg += x;
+ sb += x;
+
+ video_orc_unpack_Y444 (dest, sr, sg, sb, width);
+ }
+
+ static void
+ pack_GBR (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ const gpointer src, gint sstride, gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], GstVideoChromaSite chroma_site,
+ gint y, gint width)
+ {
+ guint8 *restrict dr = GET_R_LINE (y);
+ guint8 *restrict dg = GET_G_LINE (y);
+ guint8 *restrict db = GET_B_LINE (y);
+
+ video_orc_pack_Y444 (dr, dg, db, src, width);
+ }
+
+ #define PACK_GBRA GST_VIDEO_FORMAT_ARGB, unpack_GBRA, 1, pack_GBRA
+ static void
+ unpack_GBRA (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ gpointer dest, const gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], gint x, gint y, gint width)
+ {
+ int i;
+ const guint8 *sg = GET_G_LINE (y);
+ const guint8 *sb = GET_B_LINE (y);
+ const guint8 *sr = GET_R_LINE (y);
+ const guint8 *sa = GET_A_LINE (y);
+ guint8 *d = dest, G, B, R, A;
+
+ sg += x;
+ sb += x;
+ sr += x;
+ sa += x;
+
+ for (i = 0; i < width; i++) {
+ G = GST_READ_UINT8 (sg + i);
+ B = GST_READ_UINT8 (sb + i);
+ R = GST_READ_UINT8 (sr + i);
+ A = GST_READ_UINT8 (sa + i);
+
+ d[i * 4 + 0] = A;
+ d[i * 4 + 1] = R;
+ d[i * 4 + 2] = G;
+ d[i * 4 + 3] = B;
+ }
+ }
+
+ static void
+ pack_GBRA (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ const gpointer src, gint sstride, gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], GstVideoChromaSite chroma_site,
+ gint y, gint width)
+ {
+ int i;
+ guint8 *restrict dg = GET_G_LINE (y);
+ guint8 *restrict db = GET_B_LINE (y);
+ guint8 *restrict dr = GET_R_LINE (y);
+ guint8 *restrict da = GET_A_LINE (y);
+ guint8 G, B, R, A;
+ const guint8 *restrict s = src;
+
+ for (i = 0; i < width; i++) {
+ G = (s[i * 4 + 2]);
+ B = (s[i * 4 + 3]);
+ R = (s[i * 4 + 1]);
+ A = (s[i * 4 + 0]);
+
+ GST_WRITE_UINT8 (dg + i, G);
+ GST_WRITE_UINT8 (db + i, B);
+ GST_WRITE_UINT8 (dr + i, R);
+ GST_WRITE_UINT8 (da + i, A);
+ }
+ }
+
+ #define PACK_GRAY8 GST_VIDEO_FORMAT_AYUV, unpack_GRAY8, 1, pack_GRAY8
+ static void
+ unpack_GRAY8 (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ gpointer dest, const gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], gint x, gint y, gint width)
+ {
+ const guint8 *restrict s = GET_LINE (y);
+
+ s += x;
+
+ video_orc_unpack_GRAY8 (dest, s, width);
+ }
+
+ static void
+ pack_GRAY8 (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ const gpointer src, gint sstride, gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], GstVideoChromaSite chroma_site,
+ gint y, gint width)
+ {
+ guint8 *restrict d = GET_LINE (y);
+
+ video_orc_pack_GRAY8 (d, src, width);
+ }
+
+ #define PACK_GRAY16_BE GST_VIDEO_FORMAT_AYUV64, unpack_GRAY16_BE, 1, pack_GRAY16_BE
+ static void
+ unpack_GRAY16_BE (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ gpointer dest, const gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], gint x, gint y, gint width)
+ {
+ int i;
+ const guint16 *restrict s = GET_LINE (y);
+ guint16 *restrict d = dest;
+
+ s += x;
+
+ for (i = 0; i < width; i++) {
+ d[i * 4 + 0] = 0xffff;
+ d[i * 4 + 1] = GST_READ_UINT16_BE (s + i);
+ d[i * 4 + 2] = 0x8000;
+ d[i * 4 + 3] = 0x8000;
+ }
+ }
+
+ static void
+ pack_GRAY16_BE (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ const gpointer src, gint sstride, gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], GstVideoChromaSite chroma_site,
+ gint y, gint width)
+ {
+ int i;
+ guint16 *restrict d = GET_LINE (y);
+ const guint16 *restrict s = src;
+
+ for (i = 0; i < width; i++) {
+ GST_WRITE_UINT16_BE (d + i, s[i * 4 + 1]);
+ }
+ }
+
+ #define PACK_GRAY16_LE GST_VIDEO_FORMAT_AYUV64, unpack_GRAY16_LE, 1, pack_GRAY16_LE
+ static void
+ unpack_GRAY16_LE (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ gpointer dest, const gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], gint x, gint y, gint width)
+ {
+ int i;
+ const guint16 *restrict s = GET_LINE (y);
+ guint16 *restrict d = dest;
+
+ s += x;
+
+ for (i = 0; i < width; i++) {
+ d[i * 4 + 0] = 0xffff;
+ d[i * 4 + 1] = GST_READ_UINT16_LE (s + i);
+ d[i * 4 + 2] = 0x8000;
+ d[i * 4 + 3] = 0x8000;
+ }
+ }
+
+ static void
+ pack_GRAY16_LE (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ const gpointer src, gint sstride, gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], GstVideoChromaSite chroma_site,
+ gint y, gint width)
+ {
+ int i;
+ guint16 *restrict d = GET_LINE (y);
+ const guint16 *restrict s = src;
+
+ for (i = 0; i < width; i++) {
+ GST_WRITE_UINT16_LE (d + i, s[i * 4 + 1]);
+ }
+ }
+
+ #define PACK_RGB16 GST_VIDEO_FORMAT_ARGB, unpack_RGB16, 1, pack_RGB16
+ static void
+ unpack_RGB16 (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ gpointer dest, const gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], gint x, gint y, gint width)
+ {
+ const guint16 *restrict s = GET_LINE (y);
+
+ if (flags & GST_VIDEO_PACK_FLAG_TRUNCATE_RANGE)
+ video_orc_unpack_RGB16_trunc (dest, s + x, width);
+ else
+ video_orc_unpack_RGB16 (dest, s + x, width);
+ }
+
+ static void
+ pack_RGB16 (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ const gpointer src, gint sstride, gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], GstVideoChromaSite chroma_site,
+ gint y, gint width)
+ {
+ guint16 *restrict d = GET_LINE (y);
+
+ #if G_BYTE_ORDER == G_LITTLE_ENDIAN
+ video_orc_pack_RGB16_le (d, src, width);
+ #else
+ video_orc_pack_RGB16_be (d, src, width);
+ #endif
+ }
+
+ #define PACK_BGR16 GST_VIDEO_FORMAT_ARGB, unpack_BGR16, 1, pack_BGR16
+ static void
+ unpack_BGR16 (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ gpointer dest, const gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], gint x, gint y, gint width)
+ {
+ const guint16 *restrict s = GET_LINE (y);
+
+ if (flags & GST_VIDEO_PACK_FLAG_TRUNCATE_RANGE)
+ video_orc_unpack_BGR16_trunc (dest, s + x, width);
+ else
+ video_orc_unpack_BGR16 (dest, s + x, width);
+ }
+
+ static void
+ pack_BGR16 (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ const gpointer src, gint sstride, gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], GstVideoChromaSite chroma_site,
+ gint y, gint width)
+ {
+ guint16 *restrict d = GET_LINE (y);
+
+ #if G_BYTE_ORDER == G_LITTLE_ENDIAN
+ video_orc_pack_BGR16_le (d, src, width);
+ #else
+ video_orc_pack_BGR16_be (d, src, width);
+ #endif
+ }
+
+ #define PACK_RGB15 GST_VIDEO_FORMAT_ARGB, unpack_RGB15, 1, pack_RGB15
+ static void
+ unpack_RGB15 (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ gpointer dest, const gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], gint x, gint y, gint width)
+ {
+ const guint16 *restrict s = GET_LINE (y);
+
+ #if G_BYTE_ORDER == G_LITTLE_ENDIAN
+ if (flags & GST_VIDEO_PACK_FLAG_TRUNCATE_RANGE)
+ video_orc_unpack_RGB15_le_trunc (dest, s + x, width);
+ else
+ video_orc_unpack_RGB15_le (dest, s + x, width);
+ #else
+ if (flags & GST_VIDEO_PACK_FLAG_TRUNCATE_RANGE)
+ video_orc_unpack_RGB15_be_trunc (dest, s + x, width);
+ else
+ video_orc_unpack_RGB15_be (dest, s + x, width);
+ #endif
+ }
+
+ static void
+ pack_RGB15 (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ const gpointer src, gint sstride, gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], GstVideoChromaSite chroma_site,
+ gint y, gint width)
+ {
+ guint16 *restrict d = GET_LINE (y);
+
+ #if G_BYTE_ORDER == G_LITTLE_ENDIAN
+ video_orc_pack_RGB15_le (d, src, width);
+ #else
+ video_orc_pack_RGB15_be (d, src, width);
+ #endif
+ }
+
+ #define PACK_BGR15 GST_VIDEO_FORMAT_ARGB, unpack_BGR15, 1, pack_BGR15
+ static void
+ unpack_BGR15 (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ gpointer dest, const gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], gint x, gint y, gint width)
+ {
+ const guint16 *restrict s = GET_LINE (y);
+
+ #if G_BYTE_ORDER == G_LITTLE_ENDIAN
+ if (flags & GST_VIDEO_PACK_FLAG_TRUNCATE_RANGE)
+ video_orc_unpack_BGR15_le_trunc (dest, s + x, width);
+ else
+ video_orc_unpack_BGR15_le (dest, s + x, width);
+ #else
+ if (flags & GST_VIDEO_PACK_FLAG_TRUNCATE_RANGE)
+ video_orc_unpack_BGR15_be_trunc (dest, s + x, width);
+ else
+ video_orc_unpack_BGR15_be (dest, s + x, width);
+ #endif
+ }
+
+ static void
+ pack_BGR15 (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ const gpointer src, gint sstride, gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], GstVideoChromaSite chroma_site,
+ gint y, gint width)
+ {
+ guint16 *restrict d = GET_LINE (y);
+
+ #if G_BYTE_ORDER == G_LITTLE_ENDIAN
+ video_orc_pack_BGR15_le (d, src, width);
+ #else
+ video_orc_pack_BGR15_be (d, src, width);
+ #endif
+ }
+
+ #define PACK_BGRA GST_VIDEO_FORMAT_ARGB, unpack_BGRA, 1, pack_BGRA
+ static void
+ unpack_BGRA (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ gpointer dest, const gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], gint x, gint y, gint width)
+ {
+ const guint8 *restrict s = GET_LINE (y);
+
+ s += x * 4;
+
+ video_orc_unpack_BGRA (dest, s, width);
+ }
+
+ static void
+ pack_BGRA (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ const gpointer src, gint sstride, gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], GstVideoChromaSite chroma_site,
+ gint y, gint width)
+ {
+ guint8 *restrict d = GET_LINE (y);
+
+ video_orc_pack_BGRA (d, src, width);
+ }
+
+ #define PACK_ABGR GST_VIDEO_FORMAT_ARGB, unpack_ABGR, 1, pack_ABGR
+ static void
+ unpack_ABGR (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ gpointer dest, const gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], gint x, gint y, gint width)
+ {
+ const guint8 *restrict s = GET_LINE (y);
+
+ s += x * 4;
+
+ #if G_BYTE_ORDER == G_LITTLE_ENDIAN
+ video_orc_unpack_ABGR_le (dest, s, width);
+ #else
+ video_orc_unpack_ABGR_be (dest, s, width);
+ #endif
+ }
+
+ static void
+ pack_ABGR (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ const gpointer src, gint sstride, gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], GstVideoChromaSite chroma_site,
+ gint y, gint width)
+ {
+ guint8 *restrict d = GET_LINE (y);
+
+ #if G_BYTE_ORDER == G_LITTLE_ENDIAN
+ video_orc_pack_ABGR_le (d, src, width);
+ #else
+ video_orc_pack_ABGR_be (d, src, width);
+ #endif
+ }
+
+ #define PACK_RGBA GST_VIDEO_FORMAT_ARGB, unpack_RGBA, 1, pack_RGBA
+ static void
+ unpack_RGBA (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ gpointer dest, const gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], gint x, gint y, gint width)
+ {
+ const guint8 *restrict s = GET_LINE (y);
+
+ s += x * 4;
+
+ #if G_BYTE_ORDER == G_LITTLE_ENDIAN
+ video_orc_unpack_RGBA_le (dest, s, width);
+ #else
+ video_orc_unpack_RGBA_be (dest, s, width);
+ #endif
+ }
+
+ static void
+ pack_RGBA (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ const gpointer src, gint sstride, gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], GstVideoChromaSite chroma_site,
+ gint y, gint width)
+ {
+ guint8 *restrict d = GET_LINE (y);
+
+ #if G_BYTE_ORDER == G_LITTLE_ENDIAN
+ video_orc_pack_RGBA_le (d, src, width);
+ #else
+ video_orc_pack_RGBA_be (d, src, width);
+ #endif
+ }
+
+ #define PACK_RGB GST_VIDEO_FORMAT_ARGB, unpack_RGB, 1, pack_RGB
+ static void
+ unpack_RGB (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ gpointer dest, const gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], gint x, gint y, gint width)
+ {
+ int i;
+ const guint8 *restrict s = GET_LINE (y);
+ guint8 *restrict d = dest;
+
+ s += x * 3;
+
+ for (i = 0; i < width; i++) {
+ d[i * 4 + 0] = 0xff;
+ d[i * 4 + 1] = s[i * 3 + 0];
+ d[i * 4 + 2] = s[i * 3 + 1];
+ d[i * 4 + 3] = s[i * 3 + 2];
+ }
+ }
+
+ static void
+ pack_RGB (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ const gpointer src, gint sstride, gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], GstVideoChromaSite chroma_site,
+ gint y, gint width)
+ {
+ int i;
+ guint8 *restrict d = GET_LINE (y);
+ const guint8 *restrict s = src;
+
+ for (i = 0; i < width; i++) {
+ d[i * 3 + 0] = s[i * 4 + 1];
+ d[i * 3 + 1] = s[i * 4 + 2];
+ d[i * 3 + 2] = s[i * 4 + 3];
+ }
+ }
+
+ #define PACK_BGR GST_VIDEO_FORMAT_ARGB, unpack_BGR, 1, pack_BGR
+ static void
+ unpack_BGR (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ gpointer dest, const gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], gint x, gint y, gint width)
+ {
+ int i;
+ const guint8 *restrict s = GET_LINE (y);
+ guint8 *restrict d = dest;
+
+ s += x * 3;
+
+ for (i = 0; i < width; i++) {
+ d[i * 4 + 0] = 0xff;
+ d[i * 4 + 1] = s[i * 3 + 2];
+ d[i * 4 + 2] = s[i * 3 + 1];
+ d[i * 4 + 3] = s[i * 3 + 0];
+ }
+ }
+
+ static void
+ pack_BGR (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ const gpointer src, gint sstride, gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], GstVideoChromaSite chroma_site,
+ gint y, gint width)
+ {
+ int i;
+ guint8 *restrict d = GET_LINE (y);
+ const guint8 *restrict s = src;
+
+ for (i = 0; i < width; i++) {
+ d[i * 3 + 0] = s[i * 4 + 3];
+ d[i * 3 + 1] = s[i * 4 + 2];
+ d[i * 3 + 2] = s[i * 4 + 1];
+ }
+ }
+
+ #define PACK_NV12 GST_VIDEO_FORMAT_AYUV, unpack_NV12, 1, pack_NV12
+ static void
+ unpack_NV12 (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ gpointer dest, const gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], gint x, gint y, gint width)
+ {
+ gint uv = GET_UV_420 (y, flags);
+ const guint8 *restrict sy = GET_PLANE_LINE (0, y);
+ const guint8 *restrict suv = GET_PLANE_LINE (1, uv);
+ guint8 *restrict d = dest;
+
+ sy += x;
+ suv += (x & ~1);
+
+ if (x & 1) {
+ d[0] = 0xff;
+ d[1] = *sy++;
+ d[2] = suv[0];
+ d[3] = suv[1];
+ width--;
+ d += 4;
+ suv += 2;
+ }
+
+ if (IS_ALIGNED (d, 8))
+ video_orc_unpack_NV12 (d, sy, suv, width / 2);
+ else {
+ gint i;
+ for (i = 0; i < width / 2; i++) {
+ d[i * 8 + 0] = 0xff;
+ d[i * 8 + 1] = sy[i * 2 + 0];
+ d[i * 8 + 2] = suv[i * 2 + 0];
+ d[i * 8 + 3] = suv[i * 2 + 1];
+ d[i * 8 + 4] = 0xff;
+ d[i * 8 + 5] = sy[i * 2 + 1];
+ d[i * 8 + 6] = suv[i * 2 + 0];
+ d[i * 8 + 7] = suv[i * 2 + 1];
+ }
+ }
+
+ if (width & 1) {
+ gint i = width - 1;
+
+ d[i * 4 + 0] = 0xff;
+ d[i * 4 + 1] = sy[i];
+ d[i * 4 + 2] = suv[i + 0];
+ d[i * 4 + 3] = suv[i + 1];
+ }
+ }
+
+ static void
+ pack_NV12 (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ const gpointer src, gint sstride, gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], GstVideoChromaSite chroma_site,
+ gint y, gint width)
+ {
+ gint uv = GET_UV_420 (y, flags);
+ guint8 *restrict dy = GET_PLANE_LINE (0, y);
+ guint8 *restrict duv = GET_PLANE_LINE (1, uv);
+ const guint8 *restrict s = src;
+
+ if (IS_CHROMA_LINE_420 (y, flags)) {
+ if (IS_ALIGNED (s, 8))
+ video_orc_pack_NV12 (dy, duv, s, width / 2);
+ else {
+ gint i;
+ for (i = 0; i < width / 2; i++) {
+ dy[i * 2 + 0] = s[i * 8 + 1];
+ dy[i * 2 + 1] = s[i * 8 + 5];
+ duv[i * 2 + 0] = s[i * 8 + 2];
+ duv[i * 2 + 1] = s[i * 8 + 3];
+ }
+ }
+ if (width & 1) {
+ gint i = width - 1;
+
+ dy[i] = s[i * 4 + 1];
+ duv[i + 0] = s[i * 4 + 2];
+ duv[i + 1] = s[i * 4 + 3];
+ }
+ } else
+ video_orc_pack_Y (dy, s, width);
+ }
+
+ #define PACK_NV21 GST_VIDEO_FORMAT_AYUV, unpack_NV21, 1, pack_NV21
+ static void
+ unpack_NV21 (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ gpointer dest, const gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], gint x, gint y, gint width)
+ {
+ gint uv = GET_UV_420 (y, flags);
+ const guint8 *restrict sy = GET_PLANE_LINE (0, y);
+ const guint8 *restrict suv = GET_PLANE_LINE (1, uv);
+ guint8 *restrict d = dest;
+
+ sy += x;
+ suv += (x & ~1);
+
+ if (x & 1) {
+ d[0] = 0xff;
+ d[1] = *sy++;
+ d[2] = suv[1];
+ d[3] = suv[0];
+ width--;
+ d += 4;
+ suv += 2;
+ }
+
+ if (IS_ALIGNED (d, 8))
+ video_orc_unpack_NV21 (d, sy, suv, width / 2);
+ else {
+ gint i;
+ for (i = 0; i < width / 2; i++) {
+ d[i * 8 + 0] = 0xff;
+ d[i * 8 + 1] = sy[i * 2 + 0];
+ d[i * 8 + 2] = suv[i * 2 + 1];
+ d[i * 8 + 3] = suv[i * 2 + 0];
+ d[i * 8 + 4] = 0xff;
+ d[i * 8 + 5] = sy[i * 2 + 1];
+ d[i * 8 + 6] = suv[i * 2 + 1];
+ d[i * 8 + 7] = suv[i * 2 + 0];
+ }
+ }
+
+ if (width & 1) {
+ gint i = width - 1;
+
+ d[i * 4 + 0] = 0xff;
+ d[i * 4 + 1] = sy[i];
+ d[i * 4 + 2] = suv[i + 1];
+ d[i * 4 + 3] = suv[i + 0];
+ }
+ }
+
+ #define PACK_AV12 GST_VIDEO_FORMAT_AYUV, unpack_AV12, 1, pack_AV12
+ static void
+ unpack_AV12 (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ gpointer dest, const gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], gint x, gint y, gint width)
+ {
+ gint uv = GET_UV_420 (y, flags);
+ const guint8 *restrict sy = GET_PLANE_LINE (0, y);
+ const guint8 *restrict suv = GET_PLANE_LINE (1, uv);
+ const guint8 *restrict sa = GET_PLANE_LINE (2, y); /* a is for 'alpha' */
+ guint8 *restrict d = dest;
+
+ sy += x;
+ sa += x;
+ suv += (x & ~1);
+
+ if (x & 1) {
+ d[0] = *sa++;
+ d[1] = *sy++;
+ d[2] = suv[0];
+ d[3] = suv[1];
+ width--;
+ d += 4;
+ suv += 2;
+ }
+
+ if (IS_ALIGNED (d, 8)) {
+ video_orc_unpack_AV12 (d, sy, suv, sa, width / 2);
+ } else {
+ gint i;
+ for (i = 0; i < width / 2; i++) {
+ d[i * 8 + 0] = sa[i * 2 + 0];
+ d[i * 8 + 1] = sy[i * 2 + 0];
+ d[i * 8 + 2] = suv[i * 2 + 0];
+ d[i * 8 + 3] = suv[i * 2 + 1];
+ d[i * 8 + 4] = sa[i * 2 + 1];
+ d[i * 8 + 5] = sy[i * 2 + 1];
+ d[i * 8 + 6] = suv[i * 2 + 0];
+ d[i * 8 + 7] = suv[i * 2 + 1];
+ }
+ }
+
+ if (width & 1) {
+ gint i = width - 1;
+
+ d[i * 4 + 0] = sa[i];
+ d[i * 4 + 1] = sy[i];
+ d[i * 4 + 2] = suv[i + 0];
+ d[i * 4 + 3] = suv[i + 1];
+ }
+ }
+
+ static void
+ pack_AV12 (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ const gpointer src, gint sstride, gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], GstVideoChromaSite chroma_site,
+ gint y, gint width)
+ {
+ gint uv = GET_UV_420 (y, flags);
+ guint8 *restrict dy = GET_PLANE_LINE (0, y);
+ guint8 *restrict duv = GET_PLANE_LINE (1, uv);
+ guint8 *restrict da = GET_PLANE_LINE (2, y); /* a is for 'alpha' */
+ const guint8 *restrict s = src;
+
+ if (IS_CHROMA_LINE_420 (y, flags)) {
+ if (IS_ALIGNED (s, 8)) {
+ video_orc_pack_AV12 (dy, duv, da, s, width / 2);
+ } else {
+ gint i;
+ for (i = 0; i < width / 2; i++) {
+ /* AYUV_AYUV: alpha is on bytes 0 and 4 */
+ da[i * 2 + 0] = s[i * 8 + 0];
+ da[i * 2 + 1] = s[i * 8 + 4];
+ dy[i * 2 + 0] = s[i * 8 + 1];
+ dy[i * 2 + 1] = s[i * 8 + 5];
+ duv[i * 2 + 0] = s[i * 8 + 2];
+ duv[i * 2 + 1] = s[i * 8 + 3];
+ }
+ }
+ if (width & 1) {
+ gint i = width - 1;
+
+ da[i] = s[i * 4 + 0]; /* AYUV: alpha is byte 0 */
+ dy[i] = s[i * 4 + 1];
+ duv[i + 0] = s[i * 4 + 2];
+ duv[i + 1] = s[i * 4 + 3];
+ }
+ } else {
+ video_orc_pack_YA (dy, da, s, width);
+ }
+ }
+
+ static void
+ pack_NV21 (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ const gpointer src, gint sstride, gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], GstVideoChromaSite chroma_site,
+ gint y, gint width)
+ {
+ gint uv = GET_UV_420 (y, flags);
+ guint8 *restrict dy = GET_PLANE_LINE (0, y);
+ guint8 *restrict duv = GET_PLANE_LINE (1, uv);
+ const guint8 *restrict s = src;
+
+ if (IS_CHROMA_LINE_420 (y, flags)) {
+ if (IS_ALIGNED (s, 8))
+ video_orc_pack_NV21 (dy, duv, s, width / 2);
+ else {
+ gint i;
+ for (i = 0; i < width / 2; i++) {
+ dy[i * 2 + 0] = s[i * 8 + 1];
+ dy[i * 2 + 1] = s[i * 8 + 5];
+ duv[i * 2 + 0] = s[i * 8 + 3];
+ duv[i * 2 + 1] = s[i * 8 + 2];
+ }
+ }
+ if (width & 1) {
+ gint i = width - 1;
+
+ dy[i] = s[i * 4 + 1];
+ duv[i + 0] = s[i * 4 + 3];
+ duv[i + 1] = s[i * 4 + 2];
+ }
+ } else
+ video_orc_pack_Y (dy, s, width);
+ }
+
+ #define PACK_NV16 GST_VIDEO_FORMAT_AYUV, unpack_NV16, 1, pack_NV16
+ static void
+ unpack_NV16 (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ gpointer dest, const gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], gint x, gint y, gint width)
+ {
+ const guint8 *restrict sy = GET_PLANE_LINE (0, y);
+ const guint8 *restrict suv = GET_PLANE_LINE (1, y);
+ guint8 *restrict d = dest;
+
+ sy += x;
+ suv += (x & ~1);
+
+ if (x & 1) {
+ d[0] = 0xff;
+ d[1] = *sy++;
+ d[2] = suv[0];
+ d[3] = suv[1];
+ width--;
+ d += 4;
+ suv += 2;
+ }
+
+ if (IS_ALIGNED (d, 8))
+ video_orc_unpack_NV12 (d, sy, suv, width / 2);
+ else {
+ gint i;
+ for (i = 0; i < width / 2; i++) {
+ d[i * 8 + 0] = 0xff;
+ d[i * 8 + 1] = sy[i * 2 + 0];
+ d[i * 8 + 2] = suv[i * 2 + 0];
+ d[i * 8 + 3] = suv[i * 2 + 1];
+ d[i * 8 + 4] = 0xff;
+ d[i * 8 + 5] = sy[i * 2 + 1];
+ d[i * 8 + 6] = suv[i * 2 + 0];
+ d[i * 8 + 7] = suv[i * 2 + 1];
+ }
+ }
+
+ if (width & 1) {
+ gint i = width - 1;
+
+ d[i * 4 + 0] = 0xff;
+ d[i * 4 + 1] = sy[i];
+ d[i * 4 + 2] = suv[i + 0];
+ d[i * 4 + 3] = suv[i + 1];
+ }
+ }
+
+ static void
+ pack_NV16 (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ const gpointer src, gint sstride, gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], GstVideoChromaSite chroma_site,
+ gint y, gint width)
+ {
+ guint8 *restrict dy = GET_PLANE_LINE (0, y);
+ guint8 *restrict duv = GET_PLANE_LINE (1, y);
+ const guint8 *restrict s = src;
+
+ if (IS_ALIGNED (s, 8))
+ video_orc_pack_NV12 (dy, duv, s, width / 2);
+ else {
+ gint i;
+ for (i = 0; i < width / 2; i++) {
+ dy[i * 2 + 0] = s[i * 8 + 1];
+ dy[i * 2 + 1] = s[i * 8 + 5];
+ duv[i * 2 + 0] = s[i * 8 + 2];
+ duv[i * 2 + 1] = s[i * 8 + 3];
+ }
+ }
+
+ if (width & 1) {
+ gint i = width - 1;
+
+ dy[i] = s[i * 4 + 1];
+ duv[i + 0] = s[i * 4 + 2];
+ duv[i + 1] = s[i * 4 + 3];
+ }
+ }
+
+ #define PACK_NV61 GST_VIDEO_FORMAT_AYUV, unpack_NV61, 1, pack_NV61
+ static void
+ unpack_NV61 (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ gpointer dest, const gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], gint x, gint y, gint width)
+ {
+ const guint8 *restrict sy = GET_PLANE_LINE (0, y);
+ const guint8 *restrict svu = GET_PLANE_LINE (1, y);
+ guint8 *restrict d = dest;
+
+ sy += x;
+ svu += (x & ~1);
+
+ if (x & 1) {
+ d[0] = 0xff;
+ d[1] = *sy++;
+ d[2] = svu[1];
+ d[3] = svu[0];
+ width--;
+ d += 4;
+ svu += 2;
+ }
+
+ if (IS_ALIGNED (d, 8)) {
+ video_orc_unpack_NV21 (d, sy, svu, width / 2);
+ } else {
+ gint i;
+
+ for (i = 0; i < width / 2; i++) {
+ d[i * 8 + 0] = 0xff;
+ d[i * 8 + 1] = sy[i * 2 + 0];
+ d[i * 8 + 2] = svu[i * 2 + 1];
+ d[i * 8 + 3] = svu[i * 2 + 0];
+ d[i * 8 + 4] = 0xff;
+ d[i * 8 + 5] = sy[i * 2 + 1];
+ d[i * 8 + 6] = svu[i * 2 + 1];
+ d[i * 8 + 7] = svu[i * 2 + 0];
+ }
+ }
+
+ if (width & 1) {
+ gint i = width - 1;
+
+ d[i * 4 + 0] = 0xff;
+ d[i * 4 + 1] = sy[i];
+ d[i * 4 + 2] = svu[i + 1];
+ d[i * 4 + 3] = svu[i + 0];
+ }
+ }
+
+ static void
+ pack_NV61 (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ const gpointer src, gint sstride, gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], GstVideoChromaSite chroma_site,
+ gint y, gint width)
+ {
+ const guint8 *restrict s = src;
+ guint8 *restrict dy = GET_PLANE_LINE (0, y);
+ guint8 *restrict dvu = GET_PLANE_LINE (1, y);
+
+ if (IS_ALIGNED (s, 8)) {
+ video_orc_pack_NV21 (dy, dvu, s, width / 2);
+ } else {
+ gint i;
+
+ for (i = 0; i < width / 2; i++) {
+ dy[i * 2 + 0] = s[i * 8 + 1];
+ dy[i * 2 + 1] = s[i * 8 + 5];
+ dvu[i * 2 + 0] = s[i * 8 + 3];
+ dvu[i * 2 + 1] = s[i * 8 + 2];
+ }
+ }
+
+ if (width & 1) {
+ gint i = width - 1;
+
+ dy[i] = s[i * 4 + 1];
+ dvu[i + 0] = s[i * 4 + 2];
+ dvu[i + 1] = s[i * 4 + 3];
+ }
+ }
+
+ #define PACK_NV24 GST_VIDEO_FORMAT_AYUV, unpack_NV24, 1, pack_NV24
+ static void
+ unpack_NV24 (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ gpointer dest, const gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], gint x, gint y, gint width)
+ {
+ const guint8 *restrict sy = GET_PLANE_LINE (0, y);
+ const guint8 *restrict suv = GET_PLANE_LINE (1, y);
+
+ sy += x;
+ suv += x << 1;
+
+ video_orc_unpack_NV24 (dest, sy, suv, width);
+ }
+
+ static void
+ pack_NV24 (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ const gpointer src, gint sstride, gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], GstVideoChromaSite chroma_site,
+ gint y, gint width)
+ {
+ guint8 *restrict dy = GET_PLANE_LINE (0, y);
+ guint8 *restrict duv = GET_PLANE_LINE (1, y);
+
+ video_orc_pack_NV24 (dy, duv, src, width);
+ }
+
+ #define PACK_UYVP GST_VIDEO_FORMAT_AYUV64, unpack_UYVP, 1, pack_UYVP
+ static void
+ unpack_UYVP (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ gpointer dest, const gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], gint x, gint y, gint width)
+ {
+ int i;
+ const guint8 *restrict s = GET_LINE (y);
+ guint16 *restrict d = dest;
+
+ /* FIXME */
+ s += x << 1;
+
+ for (i = 0; i < width; i += 2) {
+ guint16 y0, y1;
+ guint16 u0;
+ guint16 v0;
+
+ u0 = ((s[(i / 2) * 5 + 0] << 2) | (s[(i / 2) * 5 + 1] >> 6)) << 6;
+ y0 = (((s[(i / 2) * 5 + 1] & 0x3f) << 4) | (s[(i / 2) * 5 + 2] >> 4)) << 6;
+ v0 = (((s[(i / 2) * 5 + 2] & 0x0f) << 6) | (s[(i / 2) * 5 + 3] >> 2)) << 6;
+ y1 = (((s[(i / 2) * 5 + 3] & 0x03) << 8) | s[(i / 2) * 5 + 4]) << 6;
+
+ if (!(flags & GST_VIDEO_PACK_FLAG_TRUNCATE_RANGE)) {
+ y0 |= (y0 >> 10);
+ y1 |= (y1 >> 10);
+ u0 |= (u0 >> 10);
+ v0 |= (v0 >> 10);
+ }
+
+ d[i * 4 + 0] = 0xffff;
+ d[i * 4 + 1] = y0;
+ d[i * 4 + 2] = u0;
+ d[i * 4 + 3] = v0;
+
+ if (i < width - 1) {
+ d[i * 4 + 4] = 0xffff;
+ d[i * 4 + 5] = y1;
+ d[i * 4 + 6] = u0;
+ d[i * 4 + 7] = v0;
+ }
+ }
+ }
+
+ static void
+ pack_UYVP (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ const gpointer src, gint sstride, gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], GstVideoChromaSite chroma_site,
+ gint y, gint width)
+ {
+ int i;
+ guint8 *restrict d = GET_LINE (y);
+ const guint16 *restrict s = src;
+
+ for (i = 0; i < width; i += 2) {
+ guint16 y0, y1;
+ guint16 u0;
+ guint16 v0;
+
+ y0 = s[4 * (i + 0) + 1];
+ if (i < width - 1)
+ y1 = s[4 * (i + 1) + 1];
+ else
+ y1 = y0;
+
+ u0 = s[4 * (i + 0) + 2];
+ v0 = s[4 * (i + 0) + 3];
+
+ d[(i / 2) * 5 + 0] = u0 >> 8;
+ d[(i / 2) * 5 + 1] = (u0 & 0xc0) | y0 >> 10;
+ d[(i / 2) * 5 + 2] = ((y0 & 0x3c0) >> 2) | (v0 >> 12);
+ d[(i / 2) * 5 + 3] = ((v0 & 0xfc0) >> 4) | (y1 >> 14);
+ d[(i / 2) * 5 + 4] = (y1 >> 6);
+ }
+ }
+
+ #define PACK_A420 GST_VIDEO_FORMAT_AYUV, unpack_A420, 1, pack_A420
+ static void
+ unpack_A420 (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ gpointer dest, const gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], gint x, gint y, gint width)
+ {
+ gint uv = GET_UV_420 (y, flags);
+ const guint8 *restrict sy = GET_Y_LINE (y);
+ const guint8 *restrict su = GET_U_LINE (uv);
+ const guint8 *restrict sv = GET_V_LINE (uv);
+ const guint8 *restrict sa = GET_A_LINE (y);
+ guint8 *restrict d = dest;
+
+ sy += x;
+ su += x >> 1;
+ sv += x >> 1;
+ sa += x;
+
+ if (x & 1) {
+ d[0] = *sa++;
+ d[1] = *sy++;
+ d[2] = *su++;
+ d[3] = *sv++;
+ width--;
+ d += 4;
+ }
+ video_orc_unpack_A420 (d, sy, su, sv, sa, width);
+ }
+
+ static void
+ pack_A420 (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ const gpointer src, gint sstride, gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], GstVideoChromaSite chroma_site,
+ gint y, gint width)
+ {
+ gint uv = GET_UV_420 (y, flags);
+ guint8 *restrict dy = GET_Y_LINE (y);
+ guint8 *restrict du = GET_U_LINE (uv);
+ guint8 *restrict dv = GET_V_LINE (uv);
+ guint8 *restrict da = GET_A_LINE (y);
+ const guint8 *restrict s = src;
+
+ if (IS_CHROMA_LINE_420 (y, flags)) {
+ if (IS_ALIGNED (s, 8))
+ video_orc_pack_A420 (dy, du, dv, da, s, width / 2);
+ else {
+ gint i;
+ for (i = 0; i < width / 2; i++) {
+ da[i * 2 + 0] = s[i * 8 + 0];
+ dy[i * 2 + 0] = s[i * 8 + 1];
+ da[i * 2 + 1] = s[i * 8 + 4];
+ dy[i * 2 + 1] = s[i * 8 + 5];
+ du[i] = s[i * 8 + 2];
+ dv[i] = s[i * 8 + 3];
+ }
+ }
+
+ if (width & 1) {
+ gint i = width - 1;
+
+ da[i] = s[i * 4 + 0];
+ dy[i] = s[i * 4 + 1];
+ du[i >> 1] = s[i * 4 + 2];
+ dv[i >> 1] = s[i * 4 + 3];
+ }
+ } else
+ video_orc_pack_AY (dy, da, s, width);
+ }
+
+ #define PACK_RGB8P GST_VIDEO_FORMAT_ARGB, unpack_RGB8P, 1, pack_RGB8P
+ static void
+ unpack_RGB8P (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ gpointer dest, const gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], gint x, gint y, gint width)
+ {
+ int i;
+ const guint8 *restrict s = GET_LINE (y);
+ const guint32 *restrict p = data[1];
+ guint8 *restrict d = dest;
+
+ s += x;
+
+ for (i = 0; i < width; i++) {
+ guint32 v = p[s[i]];
+ d[i * 4 + 0] = (v >> 24) & 0xff;
+ d[i * 4 + 1] = (v >> 16) & 0xff;
+ d[i * 4 + 2] = (v >> 8) & 0xff;
+ d[i * 4 + 3] = (v) & 0xff;
+ }
+ }
+
+ static const guint32 std_palette_RGB8P[] = {
+ 0xff000000, 0xff000033, 0xff000066, 0xff000099, 0xff0000cc, 0xff0000ff,
+ 0xff003300, 0xff003333, 0xff003366, 0xff003399, 0xff0033cc, 0xff0033ff,
+ 0xff006600, 0xff006633, 0xff006666, 0xff006699, 0xff0066cc, 0xff0066ff,
+ 0xff009900, 0xff009933, 0xff009966, 0xff009999, 0xff0099cc, 0xff0099ff,
+ 0xff00cc00, 0xff00cc33, 0xff00cc66, 0xff00cc99, 0xff00cccc, 0xff00ccff,
+ 0xff00ff00, 0xff00ff33, 0xff00ff66, 0xff00ff99, 0xff00ffcc, 0xff00ffff,
+ 0xff330000, 0xff330033, 0xff330066, 0xff330099, 0xff3300cc, 0xff3300ff,
+ 0xff333300, 0xff333333, 0xff333366, 0xff333399, 0xff3333cc, 0xff3333ff,
+ 0xff336600, 0xff336633, 0xff336666, 0xff336699, 0xff3366cc, 0xff3366ff,
+ 0xff339900, 0xff339933, 0xff339966, 0xff339999, 0xff3399cc, 0xff3399ff,
+ 0xff33cc00, 0xff33cc33, 0xff33cc66, 0xff33cc99, 0xff33cccc, 0xff33ccff,
+ 0xff33ff00, 0xff33ff33, 0xff33ff66, 0xff33ff99, 0xff33ffcc, 0xff33ffff,
+ 0xff660000, 0xff660033, 0xff660066, 0xff660099, 0xff6600cc, 0xff6600ff,
+ 0xff663300, 0xff663333, 0xff663366, 0xff663399, 0xff6633cc, 0xff6633ff,
+ 0xff666600, 0xff666633, 0xff666666, 0xff666699, 0xff6666cc, 0xff6666ff,
+ 0xff669900, 0xff669933, 0xff669966, 0xff669999, 0xff6699cc, 0xff6699ff,
+ 0xff66cc00, 0xff66cc33, 0xff66cc66, 0xff66cc99, 0xff66cccc, 0xff66ccff,
+ 0xff66ff00, 0xff66ff33, 0xff66ff66, 0xff66ff99, 0xff66ffcc, 0xff66ffff,
+ 0xff990000, 0xff990033, 0xff990066, 0xff990099, 0xff9900cc, 0xff9900ff,
+ 0xff993300, 0xff993333, 0xff993366, 0xff993399, 0xff9933cc, 0xff9933ff,
+ 0xff996600, 0xff996633, 0xff996666, 0xff996699, 0xff9966cc, 0xff9966ff,
+ 0xff999900, 0xff999933, 0xff999966, 0xff999999, 0xff9999cc, 0xff9999ff,
+ 0xff99cc00, 0xff99cc33, 0xff99cc66, 0xff99cc99, 0xff99cccc, 0xff99ccff,
+ 0xff99ff00, 0xff99ff33, 0xff99ff66, 0xff99ff99, 0xff99ffcc, 0xff99ffff,
+ 0xffcc0000, 0xffcc0033, 0xffcc0066, 0xffcc0099, 0xffcc00cc, 0xffcc00ff,
+ 0xffcc3300, 0xffcc3333, 0xffcc3366, 0xffcc3399, 0xffcc33cc, 0xffcc33ff,
+ 0xffcc6600, 0xffcc6633, 0xffcc6666, 0xffcc6699, 0xffcc66cc, 0xffcc66ff,
+ 0xffcc9900, 0xffcc9933, 0xffcc9966, 0xffcc9999, 0xffcc99cc, 0xffcc99ff,
+ 0xffcccc00, 0xffcccc33, 0xffcccc66, 0xffcccc99, 0xffcccccc, 0xffccccff,
+ 0xffccff00, 0xffccff33, 0xffccff66, 0xffccff99, 0xffccffcc, 0xffccffff,
+ 0xffff0000, 0xffff0033, 0xffff0066, 0xffff0099, 0xffff00cc, 0xffff00ff,
+ 0xffff3300, 0xffff3333, 0xffff3366, 0xffff3399, 0xffff33cc, 0xffff33ff,
+ 0xffff6600, 0xffff6633, 0xffff6666, 0xffff6699, 0xffff66cc, 0xffff66ff,
+ 0xffff9900, 0xffff9933, 0xffff9966, 0xffff9999, 0xffff99cc, 0xffff99ff,
+ 0xffffcc00, 0xffffcc33, 0xffffcc66, 0xffffcc99, 0xffffcccc, 0xffffccff,
+ 0xffffff00, 0xffffff33, 0xffffff66, 0xffffff99, 0xffffffcc, 0xffffffff,
+ 0x00000000, 0xff000000, 0xff000000, 0xff000000, 0xff000000, 0xff000000,
+ 0xff000000, 0xff000000, 0xff000000, 0xff000000, 0xff000000, 0xff000000,
+ 0xff000000, 0xff000000, 0xff000000, 0xff000000, 0xff000000, 0xff000000,
+ 0xff000000, 0xff000000, 0xff000000, 0xff000000, 0xff000000, 0xff000000,
+ 0xff000000, 0xff000000, 0xff000000, 0xff000000, 0xff000000, 0xff000000,
+ 0xff000000, 0xff000000, 0xff000000, 0xff000000, 0xff000000, 0xff000000,
+ 0xff000000, 0xff000000, 0xff000000, 0xff000000
+ };
+
+ static void
+ pack_RGB8P (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ const gpointer src, gint sstride, gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], GstVideoChromaSite chroma_site,
+ gint y, gint width)
+ {
+ int i;
+ guint8 *restrict d = GET_LINE (y);
+ const guint8 *restrict s = src;
+
+ /* Use our poor man's palette, taken from ffmpegcolorspace too */
+ for (i = 0; i < width; i++) {
+ /* crude approximation for alpha ! */
+ if (s[i * 4 + 0] < 0x80)
+ d[i] = 6 * 6 * 6;
+ else
+ d[i] =
+ ((((s[i * 4 + 1]) / 47) % 6) * 6 * 6 + (((s[i * 4 +
+ 2]) / 47) % 6) * 6 + (((s[i * 4 + 3]) / 47) % 6));
+ }
+ }
+
+ #define PACK_410 GST_VIDEO_FORMAT_AYUV, unpack_410, 1, pack_410
+ static void
+ unpack_410 (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ gpointer dest, const gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], gint x, gint y, gint width)
+ {
+ gint uv = GET_UV_410 (y, flags);
+ const guint8 *restrict sy = GET_Y_LINE (y);
+ const guint8 *restrict su = GET_U_LINE (uv);
+ const guint8 *restrict sv = GET_V_LINE (uv);
+ guint8 *restrict d = dest;
+
+ sy += x;
+ su += x >> 2;
+ sv += x >> 2;
+
+ if (x & 3) {
+ for (; x & 3; x++) {
+ d[0] = 0xff;
+ d[1] = *sy++;
+ d[2] = *su;
+ d[3] = *sv;
+ width--;
+ d += 4;
+ }
+ su++;
+ sy++;
+ }
+
+ if (IS_ALIGNED (d, 8))
+ video_orc_unpack_YUV9 (d, sy, su, sv, width / 2);
+ else {
+ gint i;
+ for (i = 0; i < width / 2; i++) {
+ d[i * 8 + 0] = 0xff;
+ d[i * 8 + 1] = sy[i * 2 + 0];
+ d[i * 8 + 2] = su[i >> 1];
+ d[i * 8 + 3] = sv[i >> 1];
+ d[i * 8 + 4] = 0xff;
+ d[i * 8 + 5] = sy[i * 2 + 1];
+ d[i * 8 + 6] = su[i >> 1];
+ d[i * 8 + 7] = sv[i >> 1];
+ }
+ }
+
+ if (width & 1) {
+ gint i = width - 1;
+
+ d[i * 4 + 0] = 0xff;
+ d[i * 4 + 1] = sy[i];
+ d[i * 4 + 2] = su[i >> 2];
+ d[i * 4 + 3] = sv[i >> 2];
+ }
+ }
+
+ static void
+ pack_410 (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ const gpointer src, gint sstride, gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], GstVideoChromaSite chroma_site,
+ gint y, gint width)
+ {
+ int i;
+ gint uv = GET_UV_410 (y, flags);
+ guint8 *restrict dy = GET_Y_LINE (y);
+ guint8 *restrict du = GET_U_LINE (uv);
+ guint8 *restrict dv = GET_V_LINE (uv);
+ const guint8 *restrict s = src;
+
+ for (i = 0; i < width - 3; i += 4) {
+ dy[i] = s[i * 4 + 1];
+ dy[i + 1] = s[i * 4 + 5];
+ dy[i + 2] = s[i * 4 + 9];
+ dy[i + 3] = s[i * 4 + 13];
+ if (IS_CHROMA_LINE_410 (y, flags)) {
+ du[i >> 2] = s[i * 4 + 2];
+ dv[i >> 2] = s[i * 4 + 3];
+ }
+ }
+ if (i < width) {
+ dy[i] = s[i * 4 + 1];
+ if (IS_CHROMA_LINE_410 (y, flags)) {
+ du[i >> 2] = s[i * 4 + 2];
+ dv[i >> 2] = s[i * 4 + 3];
+ }
+ if (i < width - 1)
+ dy[i + 1] = s[i * 4 + 5];
+ if (i < width - 2)
+ dy[i + 2] = s[i * 4 + 9];
+ }
+ }
+
+ #define PACK_IYU1 GST_VIDEO_FORMAT_AYUV, unpack_IYU1, 1, pack_IYU1
+ static void
+ unpack_IYU1 (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ gpointer dest, const gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], gint x, gint y, gint width)
+ {
+ int i;
+ const guint8 *restrict s = GET_LINE (y);
+ guint8 *restrict d = dest;
+ guint8 y0, y1, y2, y3;
+ guint8 u0;
+ guint8 v0;
+
+ /* FIXME */
+ s += x * 4;
+
+ for (i = 0; i < width - 3; i += 4) {
+ y0 = s[(i >> 2) * 6 + 1];
+ y1 = s[(i >> 2) * 6 + 2];
+ y2 = s[(i >> 2) * 6 + 4];
+ y3 = s[(i >> 2) * 6 + 5];
+
+ u0 = s[(i >> 2) * 6 + 0];
+ v0 = s[(i >> 2) * 6 + 3];
+
+ d[i * 4 + 0] = 0xff;
+ d[i * 4 + 1] = y0;
+ d[i * 4 + 2] = u0;
+ d[i * 4 + 3] = v0;
+
+ d[i * 4 + 4] = 0xff;
+ d[i * 4 + 5] = y1;
+ d[i * 4 + 6] = u0;
+ d[i * 4 + 7] = v0;
+
+ d[i * 4 + 8] = 0xff;
+ d[i * 4 + 9] = y2;
+ d[i * 4 + 10] = u0;
+ d[i * 4 + 11] = v0;
+
+ d[i * 4 + 12] = 0xff;
+ d[i * 4 + 13] = y3;
+ d[i * 4 + 14] = u0;
+ d[i * 4 + 15] = v0;
+ }
+ if (i < width) {
+ u0 = s[(i >> 2) * 6 + 0];
+ v0 = s[(i >> 2) * 6 + 3];
+
+ d[i * 4 + 0] = 0xff;
+ d[i * 4 + 1] = s[(i >> 2) * 6 + 1];
+ d[i * 4 + 2] = u0;
+ d[i * 4 + 3] = v0;
+
+ if (i < width - 1) {
+ d[i * 4 + 4] = 0xff;
+ d[i * 4 + 5] = s[(i >> 2) * 6 + 2];
+ d[i * 4 + 6] = u0;
+ d[i * 4 + 7] = v0;
+ }
+ if (i < width - 2) {
+ d[i * 4 + 8] = 0xff;
+ d[i * 4 + 9] = s[(i >> 2) * 6 + 4];
+ d[i * 4 + 10] = u0;
+ d[i * 4 + 11] = v0;
+ }
+ }
+ }
+
+ static void
+ pack_IYU1 (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ const gpointer src, gint sstride, gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], GstVideoChromaSite chroma_site,
+ gint y, gint width)
+ {
+ int i;
+ guint8 *restrict d = GET_LINE (y);
+ const guint8 *restrict s = src;
+
+ for (i = 0; i < width - 3; i += 4) {
+ d[(i >> 2) * 6 + 0] = s[i * 4 + 2];
+ d[(i >> 2) * 6 + 1] = s[i * 4 + 1];
+ d[(i >> 2) * 6 + 2] = s[i * 4 + 5];
+ d[(i >> 2) * 6 + 3] = s[i * 4 + 3];
+ d[(i >> 2) * 6 + 4] = s[i * 4 + 9];
+ d[(i >> 2) * 6 + 5] = s[i * 4 + 13];
+ }
+ if (i < width) {
+ d[(i >> 2) * 6 + 1] = s[i * 4 + 1];
+ d[(i >> 2) * 6 + 0] = s[i * 4 + 2];
+ d[(i >> 2) * 6 + 3] = s[i * 4 + 3];
+ if (i < width - 1)
+ d[(i >> 2) * 6 + 2] = s[i * 4 + 5];
+ if (i < width - 2)
+ d[(i >> 2) * 6 + 4] = s[i * 4 + 9];
+ }
+ }
+
+ #define PACK_ARGB64 GST_VIDEO_FORMAT_ARGB64, unpack_copy8, 1, pack_copy8
+ #define PACK_AYUV64 GST_VIDEO_FORMAT_AYUV64, unpack_copy8, 1, pack_copy8
+ static void
+ unpack_copy8 (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ gpointer dest, const gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], gint x, gint y, gint width)
+ {
+ const guint8 *s = GET_LINE (y);
+
+ s += x * 8;
+
+ memcpy (dest, s, width * 8);
+ }
+
+ static void
+ pack_copy8 (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ const gpointer src, gint sstride, gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], GstVideoChromaSite chroma_site,
+ gint y, gint width)
+ {
+ guint8 *restrict d = GET_LINE (y);
+
+ memcpy (d, src, width * 8);
+ }
+
+ #define PACK_r210 GST_VIDEO_FORMAT_ARGB64, unpack_r210, 1, pack_r210
+ static void
+ unpack_r210 (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ gpointer dest, const gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], gint x, gint y, gint width)
+ {
+ int i;
+ const guint8 *restrict s = GET_LINE (y);
+ guint16 *restrict d = dest, R, G, B;
+
+ s += x * 4;
+
+ for (i = 0; i < width; i++) {
+ guint32 x = GST_READ_UINT32_BE (s + i * 4);
+
+ R = ((x >> 14) & 0xffc0);
+ G = ((x >> 4) & 0xffc0);
+ B = ((x << 6) & 0xffc0);
+
+ if (!(flags & GST_VIDEO_PACK_FLAG_TRUNCATE_RANGE)) {
+ R |= (R >> 10);
+ G |= (G >> 10);
+ B |= (B >> 10);
+ }
+
+ d[i * 4 + 0] = 0xffff;
+ d[i * 4 + 1] = R;
+ d[i * 4 + 2] = G;
+ d[i * 4 + 3] = B;
+ }
+ }
+
+ static void
+ pack_r210 (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ const gpointer src, gint sstride, gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], GstVideoChromaSite chroma_site,
+ gint y, gint width)
+ {
+ int i;
+ guint8 *restrict d = GET_LINE (y);
+ const guint16 *restrict s = src;
+
+ for (i = 0; i < width; i++) {
+ guint32 x = 0;
+ x |= (s[i * 4 + 1] & 0xffc0) << 14;
+ x |= (s[i * 4 + 2] & 0xffc0) << 4;
+ x |= (s[i * 4 + 3] & 0xffc0) >> 6;
+ GST_WRITE_UINT32_BE (d + i * 4, x);
+ }
+ }
+
+ #define PACK_GBR_10LE GST_VIDEO_FORMAT_ARGB64, unpack_GBR_10LE, 1, pack_GBR_10LE
+ static void
+ unpack_GBR_10LE (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ gpointer dest, const gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], gint x, gint y, gint width)
+ {
+ int i;
+ const guint16 *sg = GET_G_LINE (y);
+ const guint16 *sb = GET_B_LINE (y);
+ const guint16 *sr = GET_R_LINE (y);
+ guint16 *d = dest, G, B, R;
+
+ sg += x;
+ sb += x;
+ sr += x;
+
+ for (i = 0; i < width; i++) {
+ G = GST_READ_UINT16_LE (sg + i) << 6;
+ B = GST_READ_UINT16_LE (sb + i) << 6;
+ R = GST_READ_UINT16_LE (sr + i) << 6;
+
+ if (!(flags & GST_VIDEO_PACK_FLAG_TRUNCATE_RANGE)) {
+ R |= (R >> 10);
+ G |= (G >> 10);
+ B |= (B >> 10);
+ }
+
+ d[i * 4 + 0] = 0xffff;
+ d[i * 4 + 1] = R;
+ d[i * 4 + 2] = G;
+ d[i * 4 + 3] = B;
+ }
+ }
+
+ static void
+ pack_GBR_10LE (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ const gpointer src, gint sstride, gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], GstVideoChromaSite chroma_site,
+ gint y, gint width)
+ {
+ int i;
+ guint16 *restrict dg = GET_G_LINE (y);
+ guint16 *restrict db = GET_B_LINE (y);
+ guint16 *restrict dr = GET_R_LINE (y);
+ guint16 G, B, R;
+ const guint16 *restrict s = src;
+
+ for (i = 0; i < width; i++) {
+ G = (s[i * 4 + 2]) >> 6;
+ B = (s[i * 4 + 3]) >> 6;
+ R = (s[i * 4 + 1]) >> 6;
+
+ GST_WRITE_UINT16_LE (dg + i, G);
+ GST_WRITE_UINT16_LE (db + i, B);
+ GST_WRITE_UINT16_LE (dr + i, R);
+ }
+ }
+
+ #define PACK_GBR_10BE GST_VIDEO_FORMAT_ARGB64, unpack_GBR_10BE, 1, pack_GBR_10BE
+ static void
+ unpack_GBR_10BE (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ gpointer dest, const gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], gint x, gint y, gint width)
+ {
+ int i;
+ const guint16 *restrict sg = GET_G_LINE (y);
+ const guint16 *restrict sb = GET_B_LINE (y);
+ const guint16 *restrict sr = GET_R_LINE (y);
+ guint16 *restrict d = dest, G, B, R;
+
+ sg += x;
+ sb += x;
+ sr += x;
+
+ for (i = 0; i < width; i++) {
+ G = GST_READ_UINT16_BE (sg + i) << 6;
+ B = GST_READ_UINT16_BE (sb + i) << 6;
+ R = GST_READ_UINT16_BE (sr + i) << 6;
+
+ if (!(flags & GST_VIDEO_PACK_FLAG_TRUNCATE_RANGE)) {
+ R |= (R >> 10);
+ G |= (G >> 10);
+ B |= (B >> 10);
+ }
+
+ d[i * 4 + 0] = 0xffff;
+ d[i * 4 + 1] = R;
+ d[i * 4 + 2] = G;
+ d[i * 4 + 3] = B;
+ }
+ }
+
+ static void
+ pack_GBR_10BE (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ const gpointer src, gint sstride, gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], GstVideoChromaSite chroma_site,
+ gint y, gint width)
+ {
+ int i;
+ guint16 *restrict dg = GET_G_LINE (y);
+ guint16 *restrict db = GET_B_LINE (y);
+ guint16 *restrict dr = GET_R_LINE (y);
+ guint16 G, B, R;
+ const guint16 *restrict s = src;
+
+ for (i = 0; i < width; i++) {
+ G = s[i * 4 + 2] >> 6;
+ B = s[i * 4 + 3] >> 6;
+ R = s[i * 4 + 1] >> 6;
+
+ GST_WRITE_UINT16_BE (dg + i, G);
+ GST_WRITE_UINT16_BE (db + i, B);
+ GST_WRITE_UINT16_BE (dr + i, R);
+ }
+ }
+
+ #define PACK_GBRA_10LE GST_VIDEO_FORMAT_ARGB64, unpack_GBRA_10LE, 1, pack_GBRA_10LE
+ static void
+ unpack_GBRA_10LE (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ gpointer dest, const gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], gint x, gint y, gint width)
+ {
+ int i;
+ const guint16 *sg = GET_G_LINE (y);
+ const guint16 *sb = GET_B_LINE (y);
+ const guint16 *sr = GET_R_LINE (y);
+ const guint16 *sa = GET_A_LINE (y);
+ guint16 *d = dest, G, B, R, A;
+
+ sg += x;
+ sb += x;
+ sr += x;
+ sa += x;
+
+ for (i = 0; i < width; i++) {
+ G = GST_READ_UINT16_LE (sg + i) << 6;
+ B = GST_READ_UINT16_LE (sb + i) << 6;
+ R = GST_READ_UINT16_LE (sr + i) << 6;
+ A = GST_READ_UINT16_LE (sa + i) << 6;
+
+ if (!(flags & GST_VIDEO_PACK_FLAG_TRUNCATE_RANGE)) {
+ R |= (R >> 10);
+ G |= (G >> 10);
+ B |= (B >> 10);
+ A |= (A >> 10);
+ }
+
+ d[i * 4 + 0] = A;
+ d[i * 4 + 1] = R;
+ d[i * 4 + 2] = G;
+ d[i * 4 + 3] = B;
+ }
+ }
+
+ static void
+ pack_GBRA_10LE (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ const gpointer src, gint sstride, gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], GstVideoChromaSite chroma_site,
+ gint y, gint width)
+ {
+ int i;
+ guint16 *restrict dg = GET_G_LINE (y);
+ guint16 *restrict db = GET_B_LINE (y);
+ guint16 *restrict dr = GET_R_LINE (y);
+ guint16 *restrict da = GET_A_LINE (y);
+ guint16 G, B, R, A;
+ const guint16 *restrict s = src;
+
+ for (i = 0; i < width; i++) {
+ G = (s[i * 4 + 2]) >> 6;
+ B = (s[i * 4 + 3]) >> 6;
+ R = (s[i * 4 + 1]) >> 6;
+ A = (s[i * 4 + 0]) >> 6;
+
+ GST_WRITE_UINT16_LE (dg + i, G);
+ GST_WRITE_UINT16_LE (db + i, B);
+ GST_WRITE_UINT16_LE (dr + i, R);
+ GST_WRITE_UINT16_LE (da + i, A);
+ }
+ }
+
+ #define PACK_GBRA_10BE GST_VIDEO_FORMAT_ARGB64, unpack_GBRA_10BE, 1, pack_GBRA_10BE
+ static void
+ unpack_GBRA_10BE (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ gpointer dest, const gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], gint x, gint y, gint width)
+ {
+ int i;
+ const guint16 *restrict sg = GET_G_LINE (y);
+ const guint16 *restrict sb = GET_B_LINE (y);
+ const guint16 *restrict sr = GET_R_LINE (y);
+ const guint16 *restrict sa = GET_A_LINE (y);
+ guint16 *restrict d = dest, G, B, R, A;
+
+ sg += x;
+ sb += x;
+ sr += x;
+ sa += x;
+
+ for (i = 0; i < width; i++) {
+ G = GST_READ_UINT16_BE (sg + i) << 6;
+ B = GST_READ_UINT16_BE (sb + i) << 6;
+ R = GST_READ_UINT16_BE (sr + i) << 6;
+ A = GST_READ_UINT16_BE (sa + i) << 6;
+
+ if (!(flags & GST_VIDEO_PACK_FLAG_TRUNCATE_RANGE)) {
+ R |= (R >> 10);
+ G |= (G >> 10);
+ B |= (B >> 10);
+ A |= (A >> 10);
+ }
+
+ d[i * 4 + 0] = A;
+ d[i * 4 + 1] = R;
+ d[i * 4 + 2] = G;
+ d[i * 4 + 3] = B;
+ }
+ }
+
+ static void
+ pack_GBRA_10BE (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ const gpointer src, gint sstride, gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], GstVideoChromaSite chroma_site,
+ gint y, gint width)
+ {
+ int i;
+ guint16 *restrict dg = GET_G_LINE (y);
+ guint16 *restrict db = GET_B_LINE (y);
+ guint16 *restrict dr = GET_R_LINE (y);
+ guint16 *restrict da = GET_A_LINE (y);
+ guint16 G, B, R, A;
+ const guint16 *restrict s = src;
+
+ for (i = 0; i < width; i++) {
+ G = s[i * 4 + 2] >> 6;
+ B = s[i * 4 + 3] >> 6;
+ R = s[i * 4 + 1] >> 6;
+ A = s[i * 4 + 0] >> 6;
+
+ GST_WRITE_UINT16_BE (dg + i, G);
+ GST_WRITE_UINT16_BE (db + i, B);
+ GST_WRITE_UINT16_BE (dr + i, R);
+ GST_WRITE_UINT16_BE (da + i, A);
+ }
+ }
+
+ #define PACK_GBR_12LE GST_VIDEO_FORMAT_ARGB64, unpack_GBR_12LE, 1, pack_GBR_12LE
+ static void
+ unpack_GBR_12LE (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ gpointer dest, const gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], gint x, gint y, gint width)
+ {
+ int i;
+ const guint16 *sg = GET_G_LINE (y);
+ const guint16 *sb = GET_B_LINE (y);
+ const guint16 *sr = GET_R_LINE (y);
+ guint16 *d = dest, G, B, R;
+
+ sg += x;
+ sb += x;
+ sr += x;
+
+ for (i = 0; i < width; i++) {
+ G = GST_READ_UINT16_LE (sg + i) << 4;
+ B = GST_READ_UINT16_LE (sb + i) << 4;
+ R = GST_READ_UINT16_LE (sr + i) << 4;
+
+ if (!(flags & GST_VIDEO_PACK_FLAG_TRUNCATE_RANGE)) {
+ R |= (R >> 12);
+ G |= (G >> 12);
+ B |= (B >> 12);
+ }
+
+ d[i * 4 + 0] = 0xffff;
+ d[i * 4 + 1] = R;
+ d[i * 4 + 2] = G;
+ d[i * 4 + 3] = B;
+ }
+ }
+
+ static void
+ pack_GBR_12LE (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ const gpointer src, gint sstride, gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], GstVideoChromaSite chroma_site,
+ gint y, gint width)
+ {
+ int i;
+ guint16 *restrict dg = GET_G_LINE (y);
+ guint16 *restrict db = GET_B_LINE (y);
+ guint16 *restrict dr = GET_R_LINE (y);
+ guint16 G, B, R;
+ const guint16 *restrict s = src;
+
+ for (i = 0; i < width; i++) {
+ G = (s[i * 4 + 2]) >> 4;
+ B = (s[i * 4 + 3]) >> 4;
+ R = (s[i * 4 + 1]) >> 4;
+
+ GST_WRITE_UINT16_LE (dg + i, G);
+ GST_WRITE_UINT16_LE (db + i, B);
+ GST_WRITE_UINT16_LE (dr + i, R);
+ }
+ }
+
+ #define PACK_GBR_12BE GST_VIDEO_FORMAT_ARGB64, unpack_GBR_12BE, 1, pack_GBR_12BE
+ static void
+ unpack_GBR_12BE (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ gpointer dest, const gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], gint x, gint y, gint width)
+ {
+ int i;
+ const guint16 *restrict sg = GET_G_LINE (y);
+ const guint16 *restrict sb = GET_B_LINE (y);
+ const guint16 *restrict sr = GET_R_LINE (y);
+ guint16 *restrict d = dest, G, B, R;
+
+ sg += x;
+ sb += x;
+ sr += x;
+
+ for (i = 0; i < width; i++) {
+ G = GST_READ_UINT16_BE (sg + i) << 4;
+ B = GST_READ_UINT16_BE (sb + i) << 4;
+ R = GST_READ_UINT16_BE (sr + i) << 4;
+
+ if (!(flags & GST_VIDEO_PACK_FLAG_TRUNCATE_RANGE)) {
+ R |= (R >> 12);
+ G |= (G >> 12);
+ B |= (B >> 12);
+ }
+
+ d[i * 4 + 0] = 0xffff;
+ d[i * 4 + 1] = R;
+ d[i * 4 + 2] = G;
+ d[i * 4 + 3] = B;
+ }
+ }
+
+ static void
+ pack_GBR_12BE (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ const gpointer src, gint sstride, gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], GstVideoChromaSite chroma_site,
+ gint y, gint width)
+ {
+ int i;
+ guint16 *restrict dg = GET_G_LINE (y);
+ guint16 *restrict db = GET_B_LINE (y);
+ guint16 *restrict dr = GET_R_LINE (y);
+ guint16 G, B, R;
+ const guint16 *restrict s = src;
+
+ for (i = 0; i < width; i++) {
+ G = s[i * 4 + 2] >> 4;
+ B = s[i * 4 + 3] >> 4;
+ R = s[i * 4 + 1] >> 4;
+
+ GST_WRITE_UINT16_BE (dg + i, G);
+ GST_WRITE_UINT16_BE (db + i, B);
+ GST_WRITE_UINT16_BE (dr + i, R);
+ }
+ }
+
+ #define PACK_GBRA_12LE GST_VIDEO_FORMAT_ARGB64, unpack_GBRA_12LE, 1, pack_GBRA_12LE
+ static void
+ unpack_GBRA_12LE (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ gpointer dest, const gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], gint x, gint y, gint width)
+ {
+ int i;
+ const guint16 *sg = GET_G_LINE (y);
+ const guint16 *sb = GET_B_LINE (y);
+ const guint16 *sr = GET_R_LINE (y);
+ const guint16 *sa = GET_A_LINE (y);
+ guint16 *d = dest, G, B, R, A;
+
+ sg += x;
+ sb += x;
+ sr += x;
+ sa += x;
+
+ for (i = 0; i < width; i++) {
+ G = GST_READ_UINT16_LE (sg + i) << 4;
+ B = GST_READ_UINT16_LE (sb + i) << 4;
+ R = GST_READ_UINT16_LE (sr + i) << 4;
+ A = GST_READ_UINT16_LE (sa + i) << 4;
+
+ if (!(flags & GST_VIDEO_PACK_FLAG_TRUNCATE_RANGE)) {
+ A |= (A >> 12);
+ R |= (R >> 12);
+ G |= (G >> 12);
+ B |= (B >> 12);
+ }
+
+ d[i * 4 + 0] = A;
+ d[i * 4 + 1] = R;
+ d[i * 4 + 2] = G;
+ d[i * 4 + 3] = B;
+ }
+ }
+
+ static void
+ pack_GBRA_12LE (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ const gpointer src, gint sstride, gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], GstVideoChromaSite chroma_site,
+ gint y, gint width)
+ {
+ int i;
+ guint16 *restrict dg = GET_G_LINE (y);
+ guint16 *restrict db = GET_B_LINE (y);
+ guint16 *restrict dr = GET_R_LINE (y);
+ guint16 *restrict da = GET_A_LINE (y);
+ guint16 G, B, R, A;
+ const guint16 *restrict s = src;
+
+ for (i = 0; i < width; i++) {
+ G = (s[i * 4 + 2]) >> 4;
+ B = (s[i * 4 + 3]) >> 4;
+ R = (s[i * 4 + 1]) >> 4;
+ A = (s[i * 4 + 0]) >> 4;
+
+ GST_WRITE_UINT16_LE (dg + i, G);
+ GST_WRITE_UINT16_LE (db + i, B);
+ GST_WRITE_UINT16_LE (dr + i, R);
+ GST_WRITE_UINT16_LE (da + i, A);
+ }
+ }
+
+ #define PACK_GBRA_12BE GST_VIDEO_FORMAT_ARGB64, unpack_GBRA_12BE, 1, pack_GBRA_12BE
+ static void
+ unpack_GBRA_12BE (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ gpointer dest, const gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], gint x, gint y, gint width)
+ {
+ int i;
+ const guint16 *restrict sg = GET_G_LINE (y);
+ const guint16 *restrict sb = GET_B_LINE (y);
+ const guint16 *restrict sr = GET_R_LINE (y);
+ const guint16 *restrict sa = GET_A_LINE (y);
+ guint16 *restrict d = dest, G, B, R, A;
+
+ sg += x;
+ sb += x;
+ sr += x;
+ sa += x;
+
+ for (i = 0; i < width; i++) {
+ G = GST_READ_UINT16_BE (sg + i) << 4;
+ B = GST_READ_UINT16_BE (sb + i) << 4;
+ R = GST_READ_UINT16_BE (sr + i) << 4;
+ A = GST_READ_UINT16_BE (sa + i) << 4;
+
+ if (!(flags & GST_VIDEO_PACK_FLAG_TRUNCATE_RANGE)) {
+ R |= (R >> 12);
+ G |= (G >> 12);
+ B |= (B >> 12);
+ A |= (A >> 12);
+ }
+
+ d[i * 4 + 0] = A;
+ d[i * 4 + 1] = R;
+ d[i * 4 + 2] = G;
+ d[i * 4 + 3] = B;
+ }
+ }
+
+ static void
+ pack_GBRA_12BE (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ const gpointer src, gint sstride, gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], GstVideoChromaSite chroma_site,
+ gint y, gint width)
+ {
+ int i;
+ guint16 *restrict dg = GET_G_LINE (y);
+ guint16 *restrict db = GET_B_LINE (y);
+ guint16 *restrict dr = GET_R_LINE (y);
+ guint16 *restrict da = GET_A_LINE (y);
+ guint16 G, B, R, A;
+ const guint16 *restrict s = src;
+
+ for (i = 0; i < width; i++) {
+ G = s[i * 4 + 2] >> 4;
+ B = s[i * 4 + 3] >> 4;
+ R = s[i * 4 + 1] >> 4;
+ A = s[i * 4 + 0] >> 4;
+
+ GST_WRITE_UINT16_BE (dg + i, G);
+ GST_WRITE_UINT16_BE (db + i, B);
+ GST_WRITE_UINT16_BE (dr + i, R);
+ GST_WRITE_UINT16_BE (da + i, A);
+ }
+ }
+
+ #define PACK_Y444_10LE GST_VIDEO_FORMAT_AYUV64, unpack_Y444_10LE, 1, pack_Y444_10LE
+ static void
+ unpack_Y444_10LE (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ gpointer dest, const gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], gint x, gint y, gint width)
+ {
+ int i;
+ guint16 *restrict sy = GET_Y_LINE (y);
+ guint16 *restrict su = GET_U_LINE (y);
+ guint16 *restrict sv = GET_V_LINE (y);
+ guint16 *restrict d = dest, Y, U, V;
+
+ sy += x;
+ su += x;
+ sv += x;
+
+ for (i = 0; i < width; i++) {
+ Y = GST_READ_UINT16_LE (sy + i) << 6;
+ U = GST_READ_UINT16_LE (su + i) << 6;
+ V = GST_READ_UINT16_LE (sv + i) << 6;
+
+ if (!(flags & GST_VIDEO_PACK_FLAG_TRUNCATE_RANGE)) {
+ Y |= (Y >> 10);
+ U |= (U >> 10);
+ V |= (V >> 10);
+ }
+
+ d[i * 4 + 0] = 0xffff;
+ d[i * 4 + 1] = Y;
+ d[i * 4 + 2] = U;
+ d[i * 4 + 3] = V;
+ }
+ }
+
+ static void
+ pack_Y444_10LE (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ const gpointer src, gint sstride, gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], GstVideoChromaSite chroma_site,
+ gint y, gint width)
+ {
+ int i;
+ guint16 *restrict dy = GET_Y_LINE (y);
+ guint16 *restrict du = GET_U_LINE (y);
+ guint16 *restrict dv = GET_V_LINE (y);
+ guint16 Y, U, V;
+ const guint16 *restrict s = src;
+
+ for (i = 0; i < width; i++) {
+ Y = (s[i * 4 + 1]) >> 6;
+ U = (s[i * 4 + 2]) >> 6;
+ V = (s[i * 4 + 3]) >> 6;
+
+ GST_WRITE_UINT16_LE (dy + i, Y);
+ GST_WRITE_UINT16_LE (du + i, U);
+ GST_WRITE_UINT16_LE (dv + i, V);
+ }
+ }
+
+ #define PACK_Y444_10BE GST_VIDEO_FORMAT_AYUV64, unpack_Y444_10BE, 1, pack_Y444_10BE
+ static void
+ unpack_Y444_10BE (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ gpointer dest, const gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], gint x, gint y, gint width)
+ {
+ int i;
+ const guint16 *restrict sy = GET_Y_LINE (y);
+ const guint16 *restrict su = GET_U_LINE (y);
+ const guint16 *restrict sv = GET_V_LINE (y);
+ guint16 *restrict d = dest, Y, U, V;
+
+ sy += x;
+ su += x;
+ sv += x;
+
+ for (i = 0; i < width; i++) {
+ Y = GST_READ_UINT16_BE (sy + i) << 6;
+ U = GST_READ_UINT16_BE (su + i) << 6;
+ V = GST_READ_UINT16_BE (sv + i) << 6;
+
+ if (!(flags & GST_VIDEO_PACK_FLAG_TRUNCATE_RANGE)) {
+ Y |= (Y >> 10);
+ U |= (U >> 10);
+ V |= (V >> 10);
+ }
+
+ d[i * 4 + 0] = 0xffff;
+ d[i * 4 + 1] = Y;
+ d[i * 4 + 2] = U;
+ d[i * 4 + 3] = V;
+ }
+ }
+
+ static void
+ pack_Y444_10BE (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ const gpointer src, gint sstride, gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], GstVideoChromaSite chroma_site,
+ gint y, gint width)
+ {
+ int i;
+ guint16 *restrict dy = GET_Y_LINE (y);
+ guint16 *restrict du = GET_U_LINE (y);
+ guint16 *restrict dv = GET_V_LINE (y);
+ guint16 Y, U, V;
+ const guint16 *restrict s = src;
+
+ for (i = 0; i < width; i++) {
+ Y = s[i * 4 + 1] >> 6;
+ U = s[i * 4 + 2] >> 6;
+ V = s[i * 4 + 3] >> 6;
+
+ GST_WRITE_UINT16_BE (dy + i, Y);
+ GST_WRITE_UINT16_BE (du + i, U);
+ GST_WRITE_UINT16_BE (dv + i, V);
+ }
+ }
+
+ #define PACK_I420_10LE GST_VIDEO_FORMAT_AYUV64, unpack_I420_10LE, 1, pack_I420_10LE
+ static void
+ unpack_I420_10LE (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ gpointer dest, const gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], gint x, gint y, gint width)
+ {
+ int i;
+ gint uv = GET_UV_420 (y, flags);
+ const guint16 *restrict sy = GET_Y_LINE (y);
+ const guint16 *restrict su = GET_U_LINE (uv);
+ const guint16 *restrict sv = GET_V_LINE (uv);
+ guint16 *restrict d = dest, Y, U, V;
+
+ sy += x;
+ su += x >> 1;
+ sv += x >> 1;
+
+ for (i = 0; i < width; i++) {
+ Y = GST_READ_UINT16_LE (sy + i) << 6;
+ U = GST_READ_UINT16_LE (su + (i >> 1)) << 6;
+ V = GST_READ_UINT16_LE (sv + (i >> 1)) << 6;
+
+ if (!(flags & GST_VIDEO_PACK_FLAG_TRUNCATE_RANGE)) {
+ Y |= (Y >> 10);
+ U |= (U >> 10);
+ V |= (V >> 10);
+ }
+
+ d[i * 4 + 0] = 0xffff;
+ d[i * 4 + 1] = Y;
+ d[i * 4 + 2] = U;
+ d[i * 4 + 3] = V;
+
+ if (x & 1) {
+ x = 0;
+ su++;
+ sv++;
+ }
+ }
+ }
+
+ static void
+ pack_I420_10LE (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ const gpointer src, gint sstride, gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], GstVideoChromaSite chroma_site,
+ gint y, gint width)
+ {
+ int i;
+ gint uv = GET_UV_420 (y, flags);
+ guint16 *restrict dy = GET_Y_LINE (y);
+ guint16 *restrict du = GET_U_LINE (uv);
+ guint16 *restrict dv = GET_V_LINE (uv);
+ guint16 Y0, Y1, U, V;
+ const guint16 *restrict s = src;
+
+ if (IS_CHROMA_LINE_420 (y, flags)) {
+ for (i = 0; i < width - 1; i += 2) {
+ Y0 = s[i * 4 + 1] >> 6;
+ Y1 = s[i * 4 + 5] >> 6;
+ U = s[i * 4 + 2] >> 6;
+ V = s[i * 4 + 3] >> 6;
+
+ GST_WRITE_UINT16_LE (dy + i + 0, Y0);
+ GST_WRITE_UINT16_LE (dy + i + 1, Y1);
+ GST_WRITE_UINT16_LE (du + (i >> 1), U);
+ GST_WRITE_UINT16_LE (dv + (i >> 1), V);
+ }
+ if (i == width - 1) {
+ Y0 = s[i * 4 + 1] >> 6;
+ U = s[i * 4 + 2] >> 6;
+ V = s[i * 4 + 3] >> 6;
+
+ GST_WRITE_UINT16_LE (dy + i, Y0);
+ GST_WRITE_UINT16_LE (du + (i >> 1), U);
+ GST_WRITE_UINT16_LE (dv + (i >> 1), V);
+ }
+ } else {
+ for (i = 0; i < width; i++) {
+ Y0 = s[i * 4 + 1] >> 6;
+ GST_WRITE_UINT16_LE (dy + i, Y0);
+ }
+ }
+ }
+
+ #define PACK_I420_10BE GST_VIDEO_FORMAT_AYUV64, unpack_I420_10BE, 1, pack_I420_10BE
+ static void
+ unpack_I420_10BE (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ gpointer dest, const gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], gint x, gint y, gint width)
+ {
+ int i;
+ gint uv = GET_UV_420 (y, flags);
+ const guint16 *restrict sy = GET_Y_LINE (y);
+ const guint16 *restrict su = GET_U_LINE (uv);
+ const guint16 *restrict sv = GET_V_LINE (uv);
+ guint16 *restrict d = dest, Y, U, V;
+
+ sy += x;
+ su += x >> 1;
+ sv += x >> 1;
+
+ for (i = 0; i < width; i++) {
+ Y = GST_READ_UINT16_BE (sy + i) << 6;
+ U = GST_READ_UINT16_BE (su + (i >> 1)) << 6;
+ V = GST_READ_UINT16_BE (sv + (i >> 1)) << 6;
+
+ if (!(flags & GST_VIDEO_PACK_FLAG_TRUNCATE_RANGE)) {
+ Y |= (Y >> 10);
+ U |= (U >> 10);
+ V |= (V >> 10);
+ }
+
+ d[i * 4 + 0] = 0xffff;
+ d[i * 4 + 1] = Y;
+ d[i * 4 + 2] = U;
+ d[i * 4 + 3] = V;
+
+ if (x & 1) {
+ x = 0;
+ su++;
+ sv++;
+ }
+ }
+ }
+
+ static void
+ pack_I420_10BE (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ const gpointer src, gint sstride, gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], GstVideoChromaSite chroma_site,
+ gint y, gint width)
+ {
+ int i;
+ gint uv = GET_UV_420 (y, flags);
+ guint16 *restrict dy = GET_Y_LINE (y);
+ guint16 *restrict du = GET_U_LINE (uv);
+ guint16 *restrict dv = GET_V_LINE (uv);
+ guint16 Y0, Y1, U, V;
+ const guint16 *restrict s = src;
+
+ if (IS_CHROMA_LINE_420 (y, flags)) {
+ for (i = 0; i < width - 1; i += 2) {
+ Y0 = s[i * 4 + 1] >> 6;
+ Y1 = s[i * 4 + 5] >> 6;
+ U = s[i * 4 + 2] >> 6;
+ V = s[i * 4 + 3] >> 6;
+
+ GST_WRITE_UINT16_BE (dy + i + 0, Y0);
+ GST_WRITE_UINT16_BE (dy + i + 1, Y1);
+ GST_WRITE_UINT16_BE (du + (i >> 1), U);
+ GST_WRITE_UINT16_BE (dv + (i >> 1), V);
+ }
+ if (i == width - 1) {
+ Y0 = s[i * 4 + 1] >> 6;
+ U = s[i * 4 + 2] >> 6;
+ V = s[i * 4 + 3] >> 6;
+
+ GST_WRITE_UINT16_BE (dy + i, Y0);
+ GST_WRITE_UINT16_BE (du + (i >> 1), U);
+ GST_WRITE_UINT16_BE (dv + (i >> 1), V);
+ }
+ } else {
+ for (i = 0; i < width; i++) {
+ Y0 = s[i * 4 + 1] >> 6;
+ GST_WRITE_UINT16_BE (dy + i, Y0);
+ }
+ }
+ }
+
+ #define PACK_I422_10LE GST_VIDEO_FORMAT_AYUV64, unpack_I422_10LE, 1, pack_I422_10LE
+ static void
+ unpack_I422_10LE (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ gpointer dest, const gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], gint x, gint y, gint width)
+ {
+ int i;
+ const guint16 *restrict sy = GET_Y_LINE (y);
+ const guint16 *restrict su = GET_U_LINE (y);
+ const guint16 *restrict sv = GET_V_LINE (y);
+ guint16 *restrict d = dest, Y, U, V;
+
+ sy += x;
+ su += x >> 1;
+ sv += x >> 1;
+
+ for (i = 0; i < width; i++) {
+ Y = GST_READ_UINT16_LE (sy + i) << 6;
+ U = GST_READ_UINT16_LE (su + (i >> 1)) << 6;
+ V = GST_READ_UINT16_LE (sv + (i >> 1)) << 6;
+
+ if (!(flags & GST_VIDEO_PACK_FLAG_TRUNCATE_RANGE)) {
+ Y |= (Y >> 10);
+ U |= (U >> 10);
+ V |= (V >> 10);
+ }
+
+ d[i * 4 + 0] = 0xffff;
+ d[i * 4 + 1] = Y;
+ d[i * 4 + 2] = U;
+ d[i * 4 + 3] = V;
+
+ if (x & 1) {
+ x = 0;
+ su++;
+ sv++;
+ }
+ }
+ }
+
+ static void
+ pack_I422_10LE (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ const gpointer src, gint sstride, gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], GstVideoChromaSite chroma_site,
+ gint y, gint width)
+ {
+ int i;
+ guint16 *restrict dy = GET_Y_LINE (y);
+ guint16 *restrict du = GET_U_LINE (y);
+ guint16 *restrict dv = GET_V_LINE (y);
+ guint16 Y0, Y1, U, V;
+ const guint16 *restrict s = src;
+
+ for (i = 0; i < width - 1; i += 2) {
+ Y0 = s[i * 4 + 1] >> 6;
+ Y1 = s[i * 4 + 5] >> 6;
+ U = s[i * 4 + 2] >> 6;
+ V = s[i * 4 + 3] >> 6;
+
+ GST_WRITE_UINT16_LE (dy + i + 0, Y0);
+ GST_WRITE_UINT16_LE (dy + i + 1, Y1);
+ GST_WRITE_UINT16_LE (du + (i >> 1), U);
+ GST_WRITE_UINT16_LE (dv + (i >> 1), V);
+ }
+ if (i == width - 1) {
+ Y0 = s[i * 4 + 1] >> 6;
+ U = s[i * 4 + 2] >> 6;
+ V = s[i * 4 + 3] >> 6;
+
+ GST_WRITE_UINT16_LE (dy + i, Y0);
+ GST_WRITE_UINT16_LE (du + (i >> 1), U);
+ GST_WRITE_UINT16_LE (dv + (i >> 1), V);
+ }
+ }
+
+ #define PACK_I422_10BE GST_VIDEO_FORMAT_AYUV64, unpack_I422_10BE, 1, pack_I422_10BE
+ static void
+ unpack_I422_10BE (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ gpointer dest, const gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], gint x, gint y, gint width)
+ {
+ int i;
+ const guint16 *restrict sy = GET_Y_LINE (y);
+ const guint16 *restrict su = GET_U_LINE (y);
+ const guint16 *restrict sv = GET_V_LINE (y);
+ guint16 *restrict d = dest, Y, U, V;
+
+ sy += x;
+ su += x >> 1;
+ sv += x >> 1;
+
+ for (i = 0; i < width; i++) {
+ Y = GST_READ_UINT16_BE (sy + i) << 6;
+ U = GST_READ_UINT16_BE (su + (i >> 1)) << 6;
+ V = GST_READ_UINT16_BE (sv + (i >> 1)) << 6;
+
+ if (!(flags & GST_VIDEO_PACK_FLAG_TRUNCATE_RANGE)) {
+ Y |= (Y >> 10);
+ U |= (U >> 10);
+ V |= (V >> 10);
+ }
+
+ d[i * 4 + 0] = 0xffff;
+ d[i * 4 + 1] = Y;
+ d[i * 4 + 2] = U;
+ d[i * 4 + 3] = V;
+
+ if (x & 1) {
+ x = 0;
+ su++;
+ sv++;
+ }
+ }
+ }
+
+ static void
+ pack_I422_10BE (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ const gpointer src, gint sstride, gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], GstVideoChromaSite chroma_site,
+ gint y, gint width)
+ {
+ int i;
+ guint16 *restrict dy = GET_Y_LINE (y);
+ guint16 *restrict du = GET_U_LINE (y);
+ guint16 *restrict dv = GET_V_LINE (y);
+ guint16 Y0, Y1, U, V;
+ const guint16 *restrict s = src;
+
+ for (i = 0; i < width - 1; i += 2) {
+ Y0 = s[i * 4 + 1] >> 6;
+ Y1 = s[i * 4 + 5] >> 6;
+ U = s[i * 4 + 2] >> 6;
+ V = s[i * 4 + 3] >> 6;
+
+ GST_WRITE_UINT16_BE (dy + i + 0, Y0);
+ GST_WRITE_UINT16_BE (dy + i + 1, Y1);
+ GST_WRITE_UINT16_BE (du + (i >> 1), U);
+ GST_WRITE_UINT16_BE (dv + (i >> 1), V);
+ }
+ if (i == width - 1) {
+ Y0 = s[i * 4 + 1] >> 6;
+ U = s[i * 4 + 2] >> 6;
+ V = s[i * 4 + 3] >> 6;
+
+ GST_WRITE_UINT16_BE (dy + i, Y0);
+ GST_WRITE_UINT16_BE (du + (i >> 1), U);
+ GST_WRITE_UINT16_BE (dv + (i >> 1), V);
+ }
+ }
+
+ #define PACK_Y444_12LE GST_VIDEO_FORMAT_AYUV64, unpack_Y444_12LE, 1, pack_Y444_12LE
+ static void
+ unpack_Y444_12LE (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ gpointer dest, const gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], gint x, gint y, gint width)
+ {
+ int i;
+ guint16 *restrict sy = GET_Y_LINE (y);
+ guint16 *restrict su = GET_U_LINE (y);
+ guint16 *restrict sv = GET_V_LINE (y);
+ guint16 *restrict d = dest, Y, U, V;
+
+ sy += x;
+ su += x;
+ sv += x;
+
+ for (i = 0; i < width; i++) {
+ Y = GST_READ_UINT16_LE (sy + i) << 4;
+ U = GST_READ_UINT16_LE (su + i) << 4;
+ V = GST_READ_UINT16_LE (sv + i) << 4;
+
+ if (!(flags & GST_VIDEO_PACK_FLAG_TRUNCATE_RANGE)) {
+ Y |= (Y >> 12);
+ U |= (U >> 12);
+ V |= (V >> 12);
+ }
+
+ d[i * 4 + 0] = 0xffff;
+ d[i * 4 + 1] = Y;
+ d[i * 4 + 2] = U;
+ d[i * 4 + 3] = V;
+ }
+ }
+
+ static void
+ pack_Y444_12LE (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ const gpointer src, gint sstride, gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], GstVideoChromaSite chroma_site,
+ gint y, gint width)
+ {
+ int i;
+ guint16 *restrict dy = GET_Y_LINE (y);
+ guint16 *restrict du = GET_U_LINE (y);
+ guint16 *restrict dv = GET_V_LINE (y);
+ guint16 Y, U, V;
+ const guint16 *restrict s = src;
+
+ for (i = 0; i < width; i++) {
+ Y = (s[i * 4 + 1]) >> 4;
+ U = (s[i * 4 + 2]) >> 4;
+ V = (s[i * 4 + 3]) >> 4;
+
+ GST_WRITE_UINT16_LE (dy + i, Y);
+ GST_WRITE_UINT16_LE (du + i, U);
+ GST_WRITE_UINT16_LE (dv + i, V);
+ }
+ }
+
+ #define PACK_Y444_12BE GST_VIDEO_FORMAT_AYUV64, unpack_Y444_12BE, 1, pack_Y444_12BE
+ static void
+ unpack_Y444_12BE (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ gpointer dest, const gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], gint x, gint y, gint width)
+ {
+ int i;
+ const guint16 *restrict sy = GET_Y_LINE (y);
+ const guint16 *restrict su = GET_U_LINE (y);
+ const guint16 *restrict sv = GET_V_LINE (y);
+ guint16 *restrict d = dest, Y, U, V;
+
+ sy += x;
+ su += x;
+ sv += x;
+
+ for (i = 0; i < width; i++) {
+ Y = GST_READ_UINT16_BE (sy + i) << 4;
+ U = GST_READ_UINT16_BE (su + i) << 4;
+ V = GST_READ_UINT16_BE (sv + i) << 4;
+
+ if (!(flags & GST_VIDEO_PACK_FLAG_TRUNCATE_RANGE)) {
+ Y |= (Y >> 12);
+ U |= (U >> 12);
+ V |= (V >> 12);
+ }
+
+ d[i * 4 + 0] = 0xffff;
+ d[i * 4 + 1] = Y;
+ d[i * 4 + 2] = U;
+ d[i * 4 + 3] = V;
+ }
+ }
+
+ static void
+ pack_Y444_12BE (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ const gpointer src, gint sstride, gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], GstVideoChromaSite chroma_site,
+ gint y, gint width)
+ {
+ int i;
+ guint16 *restrict dy = GET_Y_LINE (y);
+ guint16 *restrict du = GET_U_LINE (y);
+ guint16 *restrict dv = GET_V_LINE (y);
+ guint16 Y, U, V;
+ const guint16 *restrict s = src;
+
+ for (i = 0; i < width; i++) {
+ Y = s[i * 4 + 1] >> 4;
+ U = s[i * 4 + 2] >> 4;
+ V = s[i * 4 + 3] >> 4;
+
+ GST_WRITE_UINT16_BE (dy + i, Y);
+ GST_WRITE_UINT16_BE (du + i, U);
+ GST_WRITE_UINT16_BE (dv + i, V);
+ }
+ }
+
+ #define PACK_I420_12LE GST_VIDEO_FORMAT_AYUV64, unpack_I420_12LE, 1, pack_I420_12LE
+ static void
+ unpack_I420_12LE (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ gpointer dest, const gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], gint x, gint y, gint width)
+ {
+ int i;
+ gint uv = GET_UV_420 (y, flags);
+ const guint16 *restrict sy = GET_Y_LINE (y);
+ const guint16 *restrict su = GET_U_LINE (uv);
+ const guint16 *restrict sv = GET_V_LINE (uv);
+ guint16 *restrict d = dest, Y, U, V;
+
+ sy += x;
+ su += x >> 1;
+ sv += x >> 1;
+
+ for (i = 0; i < width; i++) {
+ Y = GST_READ_UINT16_LE (sy + i) << 4;
+ U = GST_READ_UINT16_LE (su + (i >> 1)) << 4;
+ V = GST_READ_UINT16_LE (sv + (i >> 1)) << 4;
+
+ if (!(flags & GST_VIDEO_PACK_FLAG_TRUNCATE_RANGE)) {
+ Y |= (Y >> 12);
+ U |= (U >> 12);
+ V |= (V >> 12);
+ }
+
+ d[i * 4 + 0] = 0xffff;
+ d[i * 4 + 1] = Y;
+ d[i * 4 + 2] = U;
+ d[i * 4 + 3] = V;
+
+ if (x & 1) {
+ x = 0;
+ su++;
+ sv++;
+ }
+ }
+ }
+
+ static void
+ pack_I420_12LE (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ const gpointer src, gint sstride, gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], GstVideoChromaSite chroma_site,
+ gint y, gint width)
+ {
+ int i;
+ gint uv = GET_UV_420 (y, flags);
+ guint16 *restrict dy = GET_Y_LINE (y);
+ guint16 *restrict du = GET_U_LINE (uv);
+ guint16 *restrict dv = GET_V_LINE (uv);
+ guint16 Y0, Y1, U, V;
+ const guint16 *restrict s = src;
+
+ if (IS_CHROMA_LINE_420 (y, flags)) {
+ for (i = 0; i < width - 1; i += 2) {
+ Y0 = s[i * 4 + 1] >> 4;
+ Y1 = s[i * 4 + 5] >> 4;
+ U = s[i * 4 + 2] >> 4;
+ V = s[i * 4 + 3] >> 4;
+
+ GST_WRITE_UINT16_LE (dy + i + 0, Y0);
+ GST_WRITE_UINT16_LE (dy + i + 1, Y1);
+ GST_WRITE_UINT16_LE (du + (i >> 1), U);
+ GST_WRITE_UINT16_LE (dv + (i >> 1), V);
+ }
+ if (i == width - 1) {
+ Y0 = s[i * 4 + 1] >> 4;
+ U = s[i * 4 + 2] >> 4;
+ V = s[i * 4 + 3] >> 4;
+
+ GST_WRITE_UINT16_LE (dy + i, Y0);
+ GST_WRITE_UINT16_LE (du + (i >> 1), U);
+ GST_WRITE_UINT16_LE (dv + (i >> 1), V);
+ }
+ } else {
+ for (i = 0; i < width; i++) {
+ Y0 = s[i * 4 + 1] >> 4;
+ GST_WRITE_UINT16_LE (dy + i, Y0);
+ }
+ }
+ }
+
+ #define PACK_I420_12BE GST_VIDEO_FORMAT_AYUV64, unpack_I420_12BE, 1, pack_I420_12BE
+ static void
+ unpack_I420_12BE (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ gpointer dest, const gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], gint x, gint y, gint width)
+ {
+ int i;
+ gint uv = GET_UV_420 (y, flags);
+ const guint16 *restrict sy = GET_Y_LINE (y);
+ const guint16 *restrict su = GET_U_LINE (uv);
+ const guint16 *restrict sv = GET_V_LINE (uv);
+ guint16 *restrict d = dest, Y, U, V;
+
+ sy += x;
+ su += x >> 1;
+ sv += x >> 1;
+
+ for (i = 0; i < width; i++) {
+ Y = GST_READ_UINT16_BE (sy + i) << 4;
+ U = GST_READ_UINT16_BE (su + (i >> 1)) << 4;
+ V = GST_READ_UINT16_BE (sv + (i >> 1)) << 4;
+
+ if (!(flags & GST_VIDEO_PACK_FLAG_TRUNCATE_RANGE)) {
+ Y |= (Y >> 12);
+ U |= (U >> 12);
+ V |= (V >> 12);
+ }
+
+ d[i * 4 + 0] = 0xffff;
+ d[i * 4 + 1] = Y;
+ d[i * 4 + 2] = U;
+ d[i * 4 + 3] = V;
+
+ if (x & 1) {
+ x = 0;
+ su++;
+ sv++;
+ }
+ }
+ }
+
+ static void
+ pack_I420_12BE (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ const gpointer src, gint sstride, gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], GstVideoChromaSite chroma_site,
+ gint y, gint width)
+ {
+ int i;
+ gint uv = GET_UV_420 (y, flags);
+ guint16 *restrict dy = GET_Y_LINE (y);
+ guint16 *restrict du = GET_U_LINE (uv);
+ guint16 *restrict dv = GET_V_LINE (uv);
+ guint16 Y0, Y1, U, V;
+ const guint16 *restrict s = src;
+
+ if (IS_CHROMA_LINE_420 (y, flags)) {
+ for (i = 0; i < width - 1; i += 2) {
+ Y0 = s[i * 4 + 1] >> 4;
+ Y1 = s[i * 4 + 5] >> 4;
+ U = s[i * 4 + 2] >> 4;
+ V = s[i * 4 + 3] >> 4;
+
+ GST_WRITE_UINT16_BE (dy + i + 0, Y0);
+ GST_WRITE_UINT16_BE (dy + i + 1, Y1);
+ GST_WRITE_UINT16_BE (du + (i >> 1), U);
+ GST_WRITE_UINT16_BE (dv + (i >> 1), V);
+ }
+ if (i == width - 1) {
+ Y0 = s[i * 4 + 1] >> 4;
+ U = s[i * 4 + 2] >> 4;
+ V = s[i * 4 + 3] >> 4;
+
+ GST_WRITE_UINT16_BE (dy + i, Y0);
+ GST_WRITE_UINT16_BE (du + (i >> 1), U);
+ GST_WRITE_UINT16_BE (dv + (i >> 1), V);
+ }
+ } else {
+ for (i = 0; i < width; i++) {
+ Y0 = s[i * 4 + 1] >> 4;
+ GST_WRITE_UINT16_BE (dy + i, Y0);
+ }
+ }
+ }
+
+ #define PACK_I422_12LE GST_VIDEO_FORMAT_AYUV64, unpack_I422_12LE, 1, pack_I422_12LE
+ static void
+ unpack_I422_12LE (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ gpointer dest, const gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], gint x, gint y, gint width)
+ {
+ int i;
+ const guint16 *restrict sy = GET_Y_LINE (y);
+ const guint16 *restrict su = GET_U_LINE (y);
+ const guint16 *restrict sv = GET_V_LINE (y);
+ guint16 *restrict d = dest, Y, U, V;
+
+ sy += x;
+ su += x >> 1;
+ sv += x >> 1;
+
+ for (i = 0; i < width; i++) {
+ Y = GST_READ_UINT16_LE (sy + i) << 4;
+ U = GST_READ_UINT16_LE (su + (i >> 1)) << 4;
+ V = GST_READ_UINT16_LE (sv + (i >> 1)) << 4;
+
+ if (!(flags & GST_VIDEO_PACK_FLAG_TRUNCATE_RANGE)) {
+ Y |= (Y >> 12);
+ U |= (U >> 12);
+ V |= (V >> 12);
+ }
+
+ d[i * 4 + 0] = 0xffff;
+ d[i * 4 + 1] = Y;
+ d[i * 4 + 2] = U;
+ d[i * 4 + 3] = V;
+
+ if (x & 1) {
+ x = 0;
+ su++;
+ sv++;
+ }
+ }
+ }
+
+ static void
+ pack_I422_12LE (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ const gpointer src, gint sstride, gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], GstVideoChromaSite chroma_site,
+ gint y, gint width)
+ {
+ int i;
+ guint16 *restrict dy = GET_Y_LINE (y);
+ guint16 *restrict du = GET_U_LINE (y);
+ guint16 *restrict dv = GET_V_LINE (y);
+ guint16 Y0, Y1, U, V;
+ const guint16 *restrict s = src;
+
+ for (i = 0; i < width - 1; i += 2) {
+ Y0 = s[i * 4 + 1] >> 4;
+ Y1 = s[i * 4 + 5] >> 4;
+ U = s[i * 4 + 2] >> 4;
+ V = s[i * 4 + 3] >> 4;
+
+ GST_WRITE_UINT16_LE (dy + i + 0, Y0);
+ GST_WRITE_UINT16_LE (dy + i + 1, Y1);
+ GST_WRITE_UINT16_LE (du + (i >> 1), U);
+ GST_WRITE_UINT16_LE (dv + (i >> 1), V);
+ }
+ if (i == width - 1) {
+ Y0 = s[i * 4 + 1] >> 4;
+ U = s[i * 4 + 2] >> 4;
+ V = s[i * 4 + 3] >> 4;
+
+ GST_WRITE_UINT16_LE (dy + i, Y0);
+ GST_WRITE_UINT16_LE (du + (i >> 1), U);
+ GST_WRITE_UINT16_LE (dv + (i >> 1), V);
+ }
+ }
+
+ #define PACK_I422_12BE GST_VIDEO_FORMAT_AYUV64, unpack_I422_12BE, 1, pack_I422_12BE
+ static void
+ unpack_I422_12BE (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ gpointer dest, const gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], gint x, gint y, gint width)
+ {
+ int i;
+ const guint16 *restrict sy = GET_Y_LINE (y);
+ const guint16 *restrict su = GET_U_LINE (y);
+ const guint16 *restrict sv = GET_V_LINE (y);
+ guint16 *restrict d = dest, Y, U, V;
+
+ sy += x;
+ su += x >> 1;
+ sv += x >> 1;
+
+ for (i = 0; i < width; i++) {
+ Y = GST_READ_UINT16_BE (sy + i) << 4;
+ U = GST_READ_UINT16_BE (su + (i >> 1)) << 4;
+ V = GST_READ_UINT16_BE (sv + (i >> 1)) << 4;
+
+ if (!(flags & GST_VIDEO_PACK_FLAG_TRUNCATE_RANGE)) {
+ Y |= (Y >> 12);
+ U |= (U >> 12);
+ V |= (V >> 12);
+ }
+
+ d[i * 4 + 0] = 0xffff;
+ d[i * 4 + 1] = Y;
+ d[i * 4 + 2] = U;
+ d[i * 4 + 3] = V;
+
+ if (x & 1) {
+ x = 0;
+ su++;
+ sv++;
+ }
+ }
+ }
+
+ static void
+ pack_I422_12BE (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ const gpointer src, gint sstride, gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], GstVideoChromaSite chroma_site,
+ gint y, gint width)
+ {
+ int i;
+ guint16 *restrict dy = GET_Y_LINE (y);
+ guint16 *restrict du = GET_U_LINE (y);
+ guint16 *restrict dv = GET_V_LINE (y);
+ guint16 Y0, Y1, U, V;
+ const guint16 *restrict s = src;
+
+ for (i = 0; i < width - 1; i += 2) {
+ Y0 = s[i * 4 + 1] >> 4;
+ Y1 = s[i * 4 + 5] >> 4;
+ U = s[i * 4 + 2] >> 4;
+ V = s[i * 4 + 3] >> 4;
+
+ GST_WRITE_UINT16_BE (dy + i + 0, Y0);
+ GST_WRITE_UINT16_BE (dy + i + 1, Y1);
+ GST_WRITE_UINT16_BE (du + (i >> 1), U);
+ GST_WRITE_UINT16_BE (dv + (i >> 1), V);
+ }
+ if (i == width - 1) {
+ Y0 = s[i * 4 + 1] >> 4;
+ U = s[i * 4 + 2] >> 4;
+ V = s[i * 4 + 3] >> 4;
+
+ GST_WRITE_UINT16_BE (dy + i, Y0);
+ GST_WRITE_UINT16_BE (du + (i >> 1), U);
+ GST_WRITE_UINT16_BE (dv + (i >> 1), V);
+ }
+ }
+
+ #define PACK_A444_10LE GST_VIDEO_FORMAT_AYUV64, unpack_A444_10LE, 1, pack_A444_10LE
+ static void
+ unpack_A444_10LE (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ gpointer dest, const gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], gint x, gint y, gint width)
+ {
+ int i;
+ guint16 *restrict sa = GET_A_LINE (y);
+ guint16 *restrict sy = GET_Y_LINE (y);
+ guint16 *restrict su = GET_U_LINE (y);
+ guint16 *restrict sv = GET_V_LINE (y);
+ guint16 *restrict d = dest, A, Y, U, V;
+
+ sa += x;
+ sy += x;
+ su += x;
+ sv += x;
+
+ for (i = 0; i < width; i++) {
+ A = GST_READ_UINT16_LE (sa + i) << 6;
+ Y = GST_READ_UINT16_LE (sy + i) << 6;
+ U = GST_READ_UINT16_LE (su + i) << 6;
+ V = GST_READ_UINT16_LE (sv + i) << 6;
+
+ if (!(flags & GST_VIDEO_PACK_FLAG_TRUNCATE_RANGE)) {
+ A |= (A >> 10);
+ Y |= (Y >> 10);
+ U |= (U >> 10);
+ V |= (V >> 10);
+ }
+
+ d[i * 4 + 0] = A;
+ d[i * 4 + 1] = Y;
+ d[i * 4 + 2] = U;
+ d[i * 4 + 3] = V;
+ }
+ }
+
+ static void
+ pack_A444_10LE (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ const gpointer src, gint sstride, gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], GstVideoChromaSite chroma_site,
+ gint y, gint width)
+ {
+ int i;
+ guint16 *restrict da = GET_A_LINE (y);
+ guint16 *restrict dy = GET_Y_LINE (y);
+ guint16 *restrict du = GET_U_LINE (y);
+ guint16 *restrict dv = GET_V_LINE (y);
+ guint16 A, Y, U, V;
+ const guint16 *restrict s = src;
+
+ for (i = 0; i < width; i++) {
+ A = (s[i * 4 + 0]) >> 6;
+ Y = (s[i * 4 + 1]) >> 6;
+ U = (s[i * 4 + 2]) >> 6;
+ V = (s[i * 4 + 3]) >> 6;
+
+ GST_WRITE_UINT16_LE (da + i, A);
+ GST_WRITE_UINT16_LE (dy + i, Y);
+ GST_WRITE_UINT16_LE (du + i, U);
+ GST_WRITE_UINT16_LE (dv + i, V);
+ }
+ }
+
+ #define PACK_A444_10BE GST_VIDEO_FORMAT_AYUV64, unpack_A444_10BE, 1, pack_A444_10BE
+ static void
+ unpack_A444_10BE (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ gpointer dest, const gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], gint x, gint y, gint width)
+ {
+ int i;
+ const guint16 *restrict sa = GET_A_LINE (y);
+ const guint16 *restrict sy = GET_Y_LINE (y);
+ const guint16 *restrict su = GET_U_LINE (y);
+ const guint16 *restrict sv = GET_V_LINE (y);
+ guint16 *restrict d = dest, A, Y, U, V;
+
+ sa += x;
+ sy += x;
+ su += x;
+ sv += x;
+
+ for (i = 0; i < width; i++) {
+ A = GST_READ_UINT16_BE (sa + i) << 6;
+ Y = GST_READ_UINT16_BE (sy + i) << 6;
+ U = GST_READ_UINT16_BE (su + i) << 6;
+ V = GST_READ_UINT16_BE (sv + i) << 6;
+
+ if (!(flags & GST_VIDEO_PACK_FLAG_TRUNCATE_RANGE)) {
+ A |= (A >> 10);
+ Y |= (Y >> 10);
+ U |= (U >> 10);
+ V |= (V >> 10);
+ }
+
+ d[i * 4 + 0] = A;
+ d[i * 4 + 1] = Y;
+ d[i * 4 + 2] = U;
+ d[i * 4 + 3] = V;
+ }
+ }
+
+ static void
+ pack_A444_10BE (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ const gpointer src, gint sstride, gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], GstVideoChromaSite chroma_site,
+ gint y, gint width)
+ {
+ int i;
+ guint16 *restrict da = GET_A_LINE (y);
+ guint16 *restrict dy = GET_Y_LINE (y);
+ guint16 *restrict du = GET_U_LINE (y);
+ guint16 *restrict dv = GET_V_LINE (y);
+ guint16 A, Y, U, V;
+ const guint16 *restrict s = src;
+
+ for (i = 0; i < width; i++) {
+ A = s[i * 4 + 0] >> 6;
+ Y = s[i * 4 + 1] >> 6;
+ U = s[i * 4 + 2] >> 6;
+ V = s[i * 4 + 3] >> 6;
+
+ GST_WRITE_UINT16_BE (da + i, A);
+ GST_WRITE_UINT16_BE (dy + i, Y);
+ GST_WRITE_UINT16_BE (du + i, U);
+ GST_WRITE_UINT16_BE (dv + i, V);
+ }
+ }
+
+ #define PACK_A420_10LE GST_VIDEO_FORMAT_AYUV64, unpack_A420_10LE, 1, pack_A420_10LE
+ static void
+ unpack_A420_10LE (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ gpointer dest, const gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], gint x, gint y, gint width)
+ {
+ int i;
+ gint uv = GET_UV_420 (y, flags);
+ const guint16 *restrict sa = GET_A_LINE (y);
+ const guint16 *restrict sy = GET_Y_LINE (y);
+ const guint16 *restrict su = GET_U_LINE (uv);
+ const guint16 *restrict sv = GET_V_LINE (uv);
+ guint16 *restrict d = dest, A, Y, U, V;
+
+ sa += x;
+ sy += x;
+ su += x >> 1;
+ sv += x >> 1;
+
+ for (i = 0; i < width; i++) {
+ A = GST_READ_UINT16_LE (sa + i) << 6;
+ Y = GST_READ_UINT16_LE (sy + i) << 6;
+ U = GST_READ_UINT16_LE (su + (i >> 1)) << 6;
+ V = GST_READ_UINT16_LE (sv + (i >> 1)) << 6;
+
+ if (!(flags & GST_VIDEO_PACK_FLAG_TRUNCATE_RANGE)) {
+ A |= (A >> 10);
+ Y |= (Y >> 10);
+ U |= (U >> 10);
+ V |= (V >> 10);
+ }
+
+ d[i * 4 + 0] = A;
+ d[i * 4 + 1] = Y;
+ d[i * 4 + 2] = U;
+ d[i * 4 + 3] = V;
+
+ if (x & 1) {
+ x = 0;
+ su++;
+ sv++;
+ }
+ }
+ }
+
+ static void
+ pack_A420_10LE (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ const gpointer src, gint sstride, gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], GstVideoChromaSite chroma_site,
+ gint y, gint width)
+ {
+ int i;
+ gint uv = GET_UV_420 (y, flags);
+ guint16 *restrict da = GET_A_LINE (y);
+ guint16 *restrict dy = GET_Y_LINE (y);
+ guint16 *restrict du = GET_U_LINE (uv);
+ guint16 *restrict dv = GET_V_LINE (uv);
+ guint16 A0, Y0, A1, Y1, U, V;
+ const guint16 *restrict s = src;
+
+ if (IS_CHROMA_LINE_420 (y, flags)) {
+ for (i = 0; i < width - 1; i += 2) {
+ A0 = s[i * 4 + 0] >> 6;
+ Y0 = s[i * 4 + 1] >> 6;
+ A1 = s[i * 4 + 4] >> 6;
+ Y1 = s[i * 4 + 5] >> 6;
+ U = s[i * 4 + 2] >> 6;
+ V = s[i * 4 + 3] >> 6;
+
+ GST_WRITE_UINT16_LE (da + i + 0, A0);
+ GST_WRITE_UINT16_LE (dy + i + 0, Y0);
+ GST_WRITE_UINT16_LE (da + i + 1, A1);
+ GST_WRITE_UINT16_LE (dy + i + 1, Y1);
+ GST_WRITE_UINT16_LE (du + (i >> 1), U);
+ GST_WRITE_UINT16_LE (dv + (i >> 1), V);
+ }
+ if (i == width - 1) {
+ A0 = s[i * 4 + 0] >> 6;
+ Y0 = s[i * 4 + 1] >> 6;
+ U = s[i * 4 + 2] >> 6;
+ V = s[i * 4 + 3] >> 6;
+
+ GST_WRITE_UINT16_LE (da + i, A0);
+ GST_WRITE_UINT16_LE (dy + i, Y0);
+ GST_WRITE_UINT16_LE (du + (i >> 1), U);
+ GST_WRITE_UINT16_LE (dv + (i >> 1), V);
+ }
+ } else {
+ for (i = 0; i < width; i++) {
+ A0 = s[i * 4 + 0] >> 6;
+ Y0 = s[i * 4 + 1] >> 6;
+ GST_WRITE_UINT16_LE (da + i, A0);
+ GST_WRITE_UINT16_LE (dy + i, Y0);
+ }
+ }
+ }
+
+ #define PACK_A420_10BE GST_VIDEO_FORMAT_AYUV64, unpack_A420_10BE, 1, pack_A420_10BE
+ static void
+ unpack_A420_10BE (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ gpointer dest, const gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], gint x, gint y, gint width)
+ {
+ int i;
+ gint uv = GET_UV_420 (y, flags);
+ const guint16 *restrict sa = GET_A_LINE (y);
+ const guint16 *restrict sy = GET_Y_LINE (y);
+ const guint16 *restrict su = GET_U_LINE (uv);
+ const guint16 *restrict sv = GET_V_LINE (uv);
+ guint16 *restrict d = dest, A, Y, U, V;
+
+ sa += x;
+ sy += x;
+ su += x >> 1;
+ sv += x >> 1;
+
+ for (i = 0; i < width; i++) {
+ A = GST_READ_UINT16_BE (sa + i) << 6;
+ Y = GST_READ_UINT16_BE (sy + i) << 6;
+ U = GST_READ_UINT16_BE (su + (i >> 1)) << 6;
+ V = GST_READ_UINT16_BE (sv + (i >> 1)) << 6;
+
+ if (!(flags & GST_VIDEO_PACK_FLAG_TRUNCATE_RANGE)) {
+ A |= (A >> 10);
+ Y |= (Y >> 10);
+ U |= (U >> 10);
+ V |= (V >> 10);
+ }
+
+ d[i * 4 + 0] = A;
+ d[i * 4 + 1] = Y;
+ d[i * 4 + 2] = U;
+ d[i * 4 + 3] = V;
+
+ if (x & 1) {
+ x = 0;
+ su++;
+ sv++;
+ }
+ }
+ }
+
+ static void
+ pack_A420_10BE (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ const gpointer src, gint sstride, gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], GstVideoChromaSite chroma_site,
+ gint y, gint width)
+ {
+ int i;
+ gint uv = GET_UV_420 (y, flags);
+ guint16 *restrict da = GET_A_LINE (y);
+ guint16 *restrict dy = GET_Y_LINE (y);
+ guint16 *restrict du = GET_U_LINE (uv);
+ guint16 *restrict dv = GET_V_LINE (uv);
+ guint16 A0, Y0, A1, Y1, U, V;
+ const guint16 *restrict s = src;
+
+ if (IS_CHROMA_LINE_420 (y, flags)) {
+ for (i = 0; i < width - 1; i += 2) {
+ A0 = s[i * 4 + 0] >> 6;
+ Y0 = s[i * 4 + 1] >> 6;
+ A1 = s[i * 4 + 4] >> 6;
+ Y1 = s[i * 4 + 5] >> 6;
+ U = s[i * 4 + 2] >> 6;
+ V = s[i * 4 + 3] >> 6;
+
+ GST_WRITE_UINT16_BE (da + i + 0, A0);
+ GST_WRITE_UINT16_BE (dy + i + 0, Y0);
+ GST_WRITE_UINT16_BE (da + i + 1, A1);
+ GST_WRITE_UINT16_BE (dy + i + 1, Y1);
+ GST_WRITE_UINT16_BE (du + (i >> 1), U);
+ GST_WRITE_UINT16_BE (dv + (i >> 1), V);
+ }
+ if (i == width - 1) {
+ A0 = s[i * 4 + 0] >> 6;
+ Y0 = s[i * 4 + 1] >> 6;
+ U = s[i * 4 + 2] >> 6;
+ V = s[i * 4 + 3] >> 6;
+
+ GST_WRITE_UINT16_BE (da + i, A0);
+ GST_WRITE_UINT16_BE (dy + i, Y0);
+ GST_WRITE_UINT16_BE (du + (i >> 1), U);
+ GST_WRITE_UINT16_BE (dv + (i >> 1), V);
+ }
+ } else {
+ for (i = 0; i < width; i++) {
+ A0 = s[i * 4 + 0] >> 6;
+ Y0 = s[i * 4 + 1] >> 6;
+ GST_WRITE_UINT16_BE (da + i, A0);
+ GST_WRITE_UINT16_BE (dy + i, Y0);
+ }
+ }
+ }
+
+ #define PACK_A422_10LE GST_VIDEO_FORMAT_AYUV64, unpack_A422_10LE, 1, pack_A422_10LE
+ static void
+ unpack_A422_10LE (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ gpointer dest, const gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], gint x, gint y, gint width)
+ {
+ int i;
+ const guint16 *restrict sa = GET_A_LINE (y);
+ const guint16 *restrict sy = GET_Y_LINE (y);
+ const guint16 *restrict su = GET_U_LINE (y);
+ const guint16 *restrict sv = GET_V_LINE (y);
+ guint16 *restrict d = dest, A, Y, U, V;
+
+ sa += x;
+ sy += x;
+ su += x >> 1;
+ sv += x >> 1;
+
+ for (i = 0; i < width; i++) {
+ A = GST_READ_UINT16_LE (sa + i) << 6;
+ Y = GST_READ_UINT16_LE (sy + i) << 6;
+ U = GST_READ_UINT16_LE (su + (i >> 1)) << 6;
+ V = GST_READ_UINT16_LE (sv + (i >> 1)) << 6;
+
+ if (!(flags & GST_VIDEO_PACK_FLAG_TRUNCATE_RANGE)) {
+ A |= (A >> 10);
+ Y |= (Y >> 10);
+ U |= (U >> 10);
+ V |= (V >> 10);
+ }
+
+ d[i * 4 + 0] = A;
+ d[i * 4 + 1] = Y;
+ d[i * 4 + 2] = U;
+ d[i * 4 + 3] = V;
+
+ if (x & 1) {
+ x = 0;
+ su++;
+ sv++;
+ }
+ }
+ }
+
+ static void
+ pack_A422_10LE (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ const gpointer src, gint sstride, gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], GstVideoChromaSite chroma_site,
+ gint y, gint width)
+ {
+ int i;
+ guint16 *restrict da = GET_A_LINE (y);
+ guint16 *restrict dy = GET_Y_LINE (y);
+ guint16 *restrict du = GET_U_LINE (y);
+ guint16 *restrict dv = GET_V_LINE (y);
+ guint16 A0, Y0, A1, Y1, U, V;
+ const guint16 *restrict s = src;
+
+ for (i = 0; i < width - 1; i += 2) {
+ A0 = s[i * 4 + 0] >> 6;
+ Y0 = s[i * 4 + 1] >> 6;
+ A1 = s[i * 4 + 4] >> 6;
+ Y1 = s[i * 4 + 5] >> 6;
+ U = s[i * 4 + 2] >> 6;
+ V = s[i * 4 + 3] >> 6;
+
+ GST_WRITE_UINT16_LE (da + i + 0, A0);
+ GST_WRITE_UINT16_LE (dy + i + 0, Y0);
+ GST_WRITE_UINT16_LE (da + i + 1, A1);
+ GST_WRITE_UINT16_LE (dy + i + 1, Y1);
+ GST_WRITE_UINT16_LE (du + (i >> 1), U);
+ GST_WRITE_UINT16_LE (dv + (i >> 1), V);
+ }
+ if (i == width - 1) {
+ A0 = s[i * 4 + 0] >> 6;
+ Y0 = s[i * 4 + 1] >> 6;
+ U = s[i * 4 + 2] >> 6;
+ V = s[i * 4 + 3] >> 6;
+
+ GST_WRITE_UINT16_LE (da + i, A0);
+ GST_WRITE_UINT16_LE (dy + i, Y0);
+ GST_WRITE_UINT16_LE (du + (i >> 1), U);
+ GST_WRITE_UINT16_LE (dv + (i >> 1), V);
+ }
+ }
+
+ #define PACK_A422_10BE GST_VIDEO_FORMAT_AYUV64, unpack_A422_10BE, 1, pack_A422_10BE
+ static void
+ unpack_A422_10BE (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ gpointer dest, const gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], gint x, gint y, gint width)
+ {
+ int i;
+ const guint16 *restrict sa = GET_A_LINE (y);
+ const guint16 *restrict sy = GET_Y_LINE (y);
+ const guint16 *restrict su = GET_U_LINE (y);
+ const guint16 *restrict sv = GET_V_LINE (y);
+ guint16 *restrict d = dest, A, Y, U, V;
+
+ sa += x;
+ sy += x;
+ su += x >> 1;
+ sv += x >> 1;
+
+ for (i = 0; i < width; i++) {
+ A = GST_READ_UINT16_BE (sa + i) << 6;
+ Y = GST_READ_UINT16_BE (sy + i) << 6;
+ U = GST_READ_UINT16_BE (su + (i >> 1)) << 6;
+ V = GST_READ_UINT16_BE (sv + (i >> 1)) << 6;
+
+ if (!(flags & GST_VIDEO_PACK_FLAG_TRUNCATE_RANGE)) {
+ A |= (A >> 10);
+ Y |= (Y >> 10);
+ U |= (U >> 10);
+ V |= (V >> 10);
+ }
+
+ d[i * 4 + 0] = A;
+ d[i * 4 + 1] = Y;
+ d[i * 4 + 2] = U;
+ d[i * 4 + 3] = V;
+
+ if (x & 1) {
+ x = 0;
+ su++;
+ sv++;
+ }
+ }
+ }
+
+ static void
+ pack_A422_10BE (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ const gpointer src, gint sstride, gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], GstVideoChromaSite chroma_site,
+ gint y, gint width)
+ {
+ int i;
+ guint16 *restrict da = GET_A_LINE (y);
+ guint16 *restrict dy = GET_Y_LINE (y);
+ guint16 *restrict du = GET_U_LINE (y);
+ guint16 *restrict dv = GET_V_LINE (y);
+ guint16 A0, Y0, A1, Y1, U, V;
+ const guint16 *restrict s = src;
+
+ for (i = 0; i < width - 1; i += 2) {
+ A0 = s[i * 4 + 0] >> 6;
+ Y0 = s[i * 4 + 1] >> 6;
+ A1 = s[i * 4 + 4] >> 6;
+ Y1 = s[i * 4 + 5] >> 6;
+ U = s[i * 4 + 2] >> 6;
+ V = s[i * 4 + 3] >> 6;
+
+ GST_WRITE_UINT16_BE (da + i + 0, A0);
+ GST_WRITE_UINT16_BE (dy + i + 0, Y0);
+ GST_WRITE_UINT16_BE (da + i + 1, A1);
+ GST_WRITE_UINT16_BE (dy + i + 1, Y1);
+ GST_WRITE_UINT16_BE (du + (i >> 1), U);
+ GST_WRITE_UINT16_BE (dv + (i >> 1), V);
+ }
+ if (i == width - 1) {
+ A0 = s[i * 4 + 0] >> 6;
+ Y0 = s[i * 4 + 1] >> 6;
+ U = s[i * 4 + 2] >> 6;
+ V = s[i * 4 + 3] >> 6;
+
+ GST_WRITE_UINT16_BE (da + i, A0);
+ GST_WRITE_UINT16_BE (dy + i, Y0);
+ GST_WRITE_UINT16_BE (du + (i >> 1), U);
+ GST_WRITE_UINT16_BE (dv + (i >> 1), V);
+ }
+ }
+
+ static void
+ get_tile_NV12 (gint tile_width, gint ts, gint tx, gint ty,
+ GstVideoTileMode mode,
+ const gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES],
+ gpointer tile_data[GST_VIDEO_MAX_PLANES],
+ gint tile_stride[GST_VIDEO_MAX_PLANES])
+ {
+ gsize offset;
+
+ /* index of Y tile */
+ offset = gst_video_tile_get_index (mode,
+ tx, ty, GST_VIDEO_TILE_X_TILES (stride[0]),
+ GST_VIDEO_TILE_Y_TILES (stride[0]));
+ offset <<= ts;
+ tile_data[0] = ((guint8 *) data[0]) + offset;
+
+ /* index of UV tile */
+ offset = gst_video_tile_get_index (mode,
+ tx, ty >> 1, GST_VIDEO_TILE_X_TILES (stride[1]),
+ GST_VIDEO_TILE_Y_TILES (stride[1]));
+ offset <<= ts;
+ /* On odd rows we return the second part of the UV tile */
+ offset |= (ty & 1) << (ts - 1);
+ tile_data[1] = ((guint8 *) data[1]) + offset;
+
+ tile_stride[0] = tile_stride[1] = tile_width;
+ }
+
+ #define PACK_NV12_TILED GST_VIDEO_FORMAT_AYUV, unpack_NV12_TILED, 1, pack_NV12_TILED
+ static void
+ unpack_NV12_TILED (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ gpointer dest, const gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], gint x, gint y, gint width)
+ {
+ const GstVideoFormatInfo *unpack_info, *finfo;
+ guint8 *line = dest;
+ gint ws, hs, ts, tile_width;
+ gint ntx, tx, ty;
+ gint unpack_pstride;
+
+ ws = GST_VIDEO_FORMAT_INFO_TILE_WS (info);
+ hs = GST_VIDEO_FORMAT_INFO_TILE_HS (info);
+ ts = ws + hs;
+
+ tile_width = 1 << ws;
+
+ /* we reuse these unpack functions */
+ finfo = gst_video_format_get_info (GST_VIDEO_FORMAT_NV12);
+
+ /* get pstride of unpacked format */
+ unpack_info = gst_video_format_get_info (info->unpack_format);
+ unpack_pstride = GST_VIDEO_FORMAT_INFO_PSTRIDE (unpack_info, 0);
+
+ /* first x tile to convert */
+ tx = x >> ws;
+ /* Last tile to convert */
+ ntx = ((x + width - 1) >> ws) + 1;
+ /* The row we are going to convert */
+ ty = y >> hs;
+
+ /* y position in a tile */
+ y = y & ((1 << hs) - 1);
+ /* x position in a tile */
+ x = x & (tile_width - 1);
+
+ for (; tx < ntx; tx++) {
+ gpointer tdata[GST_VIDEO_MAX_PLANES];
+ gint tstride[GST_VIDEO_MAX_PLANES];
+ gint unpack_width;
+
+ get_tile_NV12 (tile_width, ts, tx, ty, info->tile_mode,
+ data, stride, tdata, tstride);
+
+ /* the number of bytes left to unpack */
+ unpack_width = MIN (width - x, tile_width - x);
+
+ finfo->unpack_func (finfo, flags, line, tdata, tstride, x, y, unpack_width);
+
+ x = 0;
+ width -= unpack_width;
+ line += unpack_width * unpack_pstride;
+ }
+ }
+
+ static void
+ pack_NV12_TILED (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ const gpointer src, gint sstride, gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], GstVideoChromaSite chroma_site,
+ gint y, gint width)
+ {
+ const GstVideoFormatInfo *pack_info, *finfo;
+ guint8 *line = src;
+ gint ws, hs, ts, tile_width;
+ gint ntx, tx, ty;
+ gint pack_pstride;
+
+ ws = GST_VIDEO_FORMAT_INFO_TILE_WS (info);
+ hs = GST_VIDEO_FORMAT_INFO_TILE_HS (info);
+ ts = ws + hs;
+
+ tile_width = 1 << ws;
+
+ /* we reuse these pack functions */
+ finfo = gst_video_format_get_info (GST_VIDEO_FORMAT_NV12);
+
+ /* get pstride of packed format */
+ pack_info = gst_video_format_get_info (info->unpack_format);
+ pack_pstride = GST_VIDEO_FORMAT_INFO_PSTRIDE (pack_info, 0);
+
+ /* Last tile to convert */
+ ntx = ((width - 1) >> ws) + 1;
+ /* The row we are going to convert */
+ ty = y >> hs;
+
+ /* y position in a tile */
+ y = y & ((1 << hs) - 1);
+
+ for (tx = 0; tx < ntx; tx++) {
+ gpointer tdata[GST_VIDEO_MAX_PLANES];
+ gint tstride[GST_VIDEO_MAX_PLANES];
+ gint pack_width;
+
+ get_tile_NV12 (tile_width, ts, tx, ty, info->tile_mode,
+ data, stride, tdata, tstride);
+
+ /* the number of bytes left to pack */
+ pack_width = MIN (width, tile_width);
+
+ finfo->pack_func (finfo, flags, line, sstride, tdata, tstride,
+ chroma_site, y, pack_width);
+
+ width -= pack_width;
+ line += pack_width * pack_pstride;
+ }
+ }
+
+ #define PACK_P010_10BE GST_VIDEO_FORMAT_AYUV64, unpack_P010_10BE, 1, pack_P010_10BE
+ static void
+ unpack_P010_10BE (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ gpointer dest, const gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], gint x, gint y, gint width)
+ {
+ int i;
+ gint uv = GET_UV_420 (y, flags);
+ const guint16 *restrict sy = GET_PLANE_LINE (0, y);
+ const guint16 *restrict suv = GET_PLANE_LINE (1, uv);
+ guint16 *restrict d = dest, Y0, Y1, U, V;
+
+ sy += x;
+ suv += (x & ~1);
+
+ if (x & 1) {
+ Y0 = GST_READ_UINT16_BE (sy);
+ U = GST_READ_UINT16_BE (suv);
+ V = GST_READ_UINT16_BE (suv + 1);
+
+ if (!(flags & GST_VIDEO_PACK_FLAG_TRUNCATE_RANGE)) {
+ Y0 |= (Y0 >> 10);
+ U |= (U >> 10);
+ V |= (V >> 10);
+ }
+
+ d[0] = 0xffff;
+ d[1] = Y0;
+ d[2] = U;
+ d[3] = V;
+ width--;
+ d += 4;
+ sy += 1;
+ suv += 2;
+ }
+
+ for (i = 0; i < width / 2; i++) {
+ Y0 = GST_READ_UINT16_BE (sy + 2 * i);
+ Y1 = GST_READ_UINT16_BE (sy + 2 * i + 1);
+ U = GST_READ_UINT16_BE (suv + 2 * i);
+ V = GST_READ_UINT16_BE (suv + 2 * i + 1);
+
+ if (!(flags & GST_VIDEO_PACK_FLAG_TRUNCATE_RANGE)) {
+ Y0 |= (Y0 >> 10);
+ Y1 |= (Y1 >> 10);
+ U |= (U >> 10);
+ V |= (V >> 10);
+ }
+
+ d[i * 8 + 0] = 0xffff;
+ d[i * 8 + 1] = Y0;
+ d[i * 8 + 2] = U;
+ d[i * 8 + 3] = V;
+ d[i * 8 + 4] = 0xffff;
+ d[i * 8 + 5] = Y1;
+ d[i * 8 + 6] = U;
+ d[i * 8 + 7] = V;
+ }
+
+ if (width & 1) {
+ gint i = width - 1;
+
+ Y0 = GST_READ_UINT16_BE (sy + i);
+ U = GST_READ_UINT16_BE (suv + i);
+ V = GST_READ_UINT16_BE (suv + i + 1);
+
+ if (!(flags & GST_VIDEO_PACK_FLAG_TRUNCATE_RANGE)) {
+ Y0 |= (Y0 >> 10);
+ U |= (U >> 10);
+ V |= (V >> 10);
+ }
+
+ d[i * 4 + 0] = 0xffff;
+ d[i * 4 + 1] = Y0;
+ d[i * 4 + 2] = U;
+ d[i * 4 + 3] = V;
+ }
+ }
+
+ static void
+ pack_P010_10BE (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ const gpointer src, gint sstride, gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], GstVideoChromaSite chroma_site,
+ gint y, gint width)
+ {
+ int i;
+ gint uv = GET_UV_420 (y, flags);
+ guint16 *restrict dy = GET_PLANE_LINE (0, y);
+ guint16 *restrict duv = GET_PLANE_LINE (1, uv);
+ guint16 Y0, Y1, U, V;
+ const guint16 *restrict s = src;
+
+ if (IS_CHROMA_LINE_420 (y, flags)) {
+ for (i = 0; i < width / 2; i++) {
+ Y0 = s[i * 8 + 1] & 0xffc0;
+ Y1 = s[i * 8 + 5] & 0xffc0;
+ U = s[i * 8 + 2] & 0xffc0;
+ V = s[i * 8 + 3] & 0xffc0;
+
+ GST_WRITE_UINT16_BE (dy + i * 2 + 0, Y0);
+ GST_WRITE_UINT16_BE (dy + i * 2 + 1, Y1);
+ GST_WRITE_UINT16_BE (duv + i * 2 + 0, U);
+ GST_WRITE_UINT16_BE (duv + i * 2 + 1, V);
+ }
+ if (width & 1) {
+ gint i = width - 1;
+
+ Y0 = s[i * 4 + 1] & 0xffc0;
+ U = s[i * 4 + 2] & 0xffc0;
+ V = s[i * 4 + 3] & 0xffc0;
+
+ GST_WRITE_UINT16_BE (dy + i, Y0);
+ GST_WRITE_UINT16_BE (duv + i + 0, U);
+ GST_WRITE_UINT16_BE (duv + i + 1, V);
+ }
+ } else {
+ for (i = 0; i < width; i++) {
+ Y0 = s[i * 4 + 1] & 0xffc0;
+ GST_WRITE_UINT16_BE (dy + i, Y0);
+ }
+ }
+ }
+
+ #define PACK_P010_10LE GST_VIDEO_FORMAT_AYUV64, unpack_P010_10LE, 1, pack_P010_10LE
+ static void
+ unpack_P010_10LE (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ gpointer dest, const gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], gint x, gint y, gint width)
+ {
+ int i;
+ gint uv = GET_UV_420 (y, flags);
+ const guint16 *restrict sy = GET_PLANE_LINE (0, y);
+ const guint16 *restrict suv = GET_PLANE_LINE (1, uv);
+ guint16 *restrict d = dest, Y0, Y1, U, V;
+
+ sy += x;
+ suv += (x & ~1);
+
+ if (x & 1) {
+ Y0 = GST_READ_UINT16_LE (sy);
+ U = GST_READ_UINT16_LE (suv);
+ V = GST_READ_UINT16_LE (suv + 1);
+
+ if (!(flags & GST_VIDEO_PACK_FLAG_TRUNCATE_RANGE)) {
+ Y0 |= (Y0 >> 10);
+ U |= (U >> 10);
+ V |= (V >> 10);
+ }
+
+ d[0] = 0xffff;
+ d[1] = Y0;
+ d[2] = U;
+ d[3] = V;
+ width--;
+ d += 4;
+ sy += 1;
+ suv += 2;
+ }
+
+ for (i = 0; i < width / 2; i++) {
+ Y0 = GST_READ_UINT16_LE (sy + 2 * i);
+ Y1 = GST_READ_UINT16_LE (sy + 2 * i + 1);
+ U = GST_READ_UINT16_LE (suv + 2 * i);
+ V = GST_READ_UINT16_LE (suv + 2 * i + 1);
+
+ if (!(flags & GST_VIDEO_PACK_FLAG_TRUNCATE_RANGE)) {
+ Y0 |= (Y0 >> 10);
+ Y1 |= (Y1 >> 10);
+ U |= (U >> 10);
+ V |= (V >> 10);
+ }
+
+ d[i * 8 + 0] = 0xffff;
+ d[i * 8 + 1] = Y0;
+ d[i * 8 + 2] = U;
+ d[i * 8 + 3] = V;
+ d[i * 8 + 4] = 0xffff;
+ d[i * 8 + 5] = Y1;
+ d[i * 8 + 6] = U;
+ d[i * 8 + 7] = V;
+ }
+
+ if (width & 1) {
+ gint i = width - 1;
+
+ Y0 = GST_READ_UINT16_LE (sy + i);
+ U = GST_READ_UINT16_LE (suv + i);
+ V = GST_READ_UINT16_LE (suv + i + 1);
+
+ if (!(flags & GST_VIDEO_PACK_FLAG_TRUNCATE_RANGE)) {
+ Y0 |= (Y0 >> 10);
+ U |= (U >> 10);
+ V |= (V >> 10);
+ }
+
+ d[i * 4 + 0] = 0xffff;
+ d[i * 4 + 1] = Y0;
+ d[i * 4 + 2] = U;
+ d[i * 4 + 3] = V;
+ }
+ }
+
+ static void
+ pack_P010_10LE (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ const gpointer src, gint sstride, gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], GstVideoChromaSite chroma_site,
+ gint y, gint width)
+ {
+ int i;
+ gint uv = GET_UV_420 (y, flags);
+ guint16 *restrict dy = GET_PLANE_LINE (0, y);
+ guint16 *restrict duv = GET_PLANE_LINE (1, uv);
+ guint16 Y0, Y1, U, V;
+ const guint16 *restrict s = src;
+
+ if (IS_CHROMA_LINE_420 (y, flags)) {
+ for (i = 0; i < width / 2; i++) {
+ Y0 = s[i * 8 + 1] & 0xffc0;
+ Y1 = s[i * 8 + 5] & 0xffc0;
+ U = s[i * 8 + 2] & 0xffc0;
+ V = s[i * 8 + 3] & 0xffc0;
+
+ GST_WRITE_UINT16_LE (dy + i * 2 + 0, Y0);
+ GST_WRITE_UINT16_LE (dy + i * 2 + 1, Y1);
+ GST_WRITE_UINT16_LE (duv + i * 2 + 0, U);
+ GST_WRITE_UINT16_LE (duv + i * 2 + 1, V);
+ }
+ if (width & 1) {
+ gint i = width - 1;
+
+ Y0 = s[i * 4 + 1] & 0xffc0;
+ U = s[i * 4 + 2] & 0xffc0;
+ V = s[i * 4 + 3] & 0xffc0;
+
+ GST_WRITE_UINT16_LE (dy + i, Y0);
+ GST_WRITE_UINT16_LE (duv + i + 0, U);
+ GST_WRITE_UINT16_LE (duv + i + 1, V);
+ }
+ } else {
+ for (i = 0; i < width; i++) {
+ Y0 = s[i * 4 + 1] & 0xffc0;
+ GST_WRITE_UINT16_LE (dy + i, Y0);
+ }
+ }
+ }
+
+ #define PACK_GRAY10_LE32 GST_VIDEO_FORMAT_AYUV64, unpack_GRAY10_LE32, 1, pack_GRAY10_LE32
+ static void
+ unpack_GRAY10_LE32 (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ gpointer dest, const gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], gint x, gint y, gint width)
+ {
+ gint i;
+ const guint32 *restrict sy = GET_PLANE_LINE (0, y);
+ guint16 *restrict d = dest;
+ gint num_words = (width + 2) / 3;
+
+ /* Y data is packed into little endian 32bit words, with the 2 MSB being
+ * padding. There is only 1 pattern.
+ * -> padding | Y1 | Y2 | Y3
+ */
+
+ for (i = 0; i < num_words; i++) {
+ gint num_comps = MIN (3, width - i * 3);
+ guint pix = i * 3;
+ gsize doff = pix * 4;
+ gint c;
+ guint32 Y;
+
+ Y = GST_READ_UINT32_LE (sy + i);
+
+ for (c = 0; c < num_comps; c++) {
+ guint16 Yn;
+
+ /* For Y, we simply read 10 bit and shift it out */
+ Yn = (Y & 0x03ff) << 6;
+ Y >>= 10;
+
+ if (G_UNLIKELY (pix + c < x))
+ continue;
+
+ if (!(flags & GST_VIDEO_PACK_FLAG_TRUNCATE_RANGE))
+ Yn |= Yn >> 10;
+
+ d[doff + 0] = 0xffff;
+ d[doff + 1] = Yn;
+ d[doff + 2] = 0x8000;
+ d[doff + 3] = 0x8000;
+
+ doff += 4;
+ }
+ }
+ }
+
+ static void
+ pack_GRAY10_LE32 (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ const gpointer src, gint sstride, gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], GstVideoChromaSite chroma_site,
+ gint y, gint width)
+ {
+ gint i;
+ guint32 *restrict dy = GET_PLANE_LINE (0, y);
+ const guint16 *restrict s = src;
+ gint num_words = (width + 2) / 3;
+
+ for (i = 0; i < num_words; i++) {
+ gint num_comps = MIN (3, width - i * 3);
+ guint pix = i * 3;
+ gsize soff = pix * 4;
+ gint c;
+ guint32 Y = 0;
+
+ for (c = 0; c < num_comps; c++) {
+ Y |= s[soff + 1] >> 6 << (10 * c);
+ soff += 4;
+ }
+
+ GST_WRITE_UINT32_LE (dy + i, Y);
+ }
+ }
+
+ #define PACK_NV12_10LE32 GST_VIDEO_FORMAT_AYUV64, unpack_NV12_10LE32, 1, pack_NV12_10LE32
+ static void
+ unpack_NV12_10LE32 (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ gpointer dest, const gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], gint x, gint y, gint width)
+ {
+ gint i;
+ gint uv = GET_UV_420 (y, flags);
+ const guint32 *restrict sy = GET_PLANE_LINE (0, y);
+ const guint32 *restrict suv = GET_PLANE_LINE (1, uv);
+ guint16 *restrict d = dest;
+ gint num_words = (width + 2) / 3;
+ guint32 UV = 0;
+ guint16 Un = 0, Vn = 0;
+
+ /* Y data is packed into little endian 32bit words, with the 2 MSB being
+ * padding. There is only 1 pattern.
+ * -> padding | Y1 | Y2 | Y3
+ *
+ * UV is packed the same way, though we end up with 2 patterns:
+ * -> U | V | U | padding
+ * -> V | U | V | padding
+ */
+
+ /* FIXME unroll the 6 states ? */
+
+ for (i = 0; i < num_words; i++) {
+ gint num_comps = MIN (3, width - i * 3);
+ guint pix = i * 3;
+ gsize doff = pix * 4;
+ gint c;
+ guint32 Y;
+
+ Y = GST_READ_UINT32_LE (sy + i);
+
+ for (c = 0; c < num_comps; c++) {
+ guint16 Yn;
+
+ /* For Y, we simply read 10 bit and shift it out */
+ Yn = (Y & 0x03ff) << 6;
+ Y >>= 10;
+
+ /* Unpacking UV has been reduced to a cycle of 6 states. The following
+ * code is a reduce version of:
+ * 0: - Read first UV word (UVU)
+ * Unpack U and V
+ * 1: - Reused U/V from 1 (sub-sampling)
+ * 2: - Unpack remaining U value
+ * - Read following UV word (VUV)
+ * - Unpack V value
+ * 3: - Reuse U/V from 2 (sub-sampling)
+ * 4: - Unpack remaining U
+ * - Unpack remaining V
+ * 5: - Reuse UV/V from 4 (sub-sampling)
+ */
+ switch ((pix + c) % 6) {
+ case 0:
+ UV = GST_READ_UINT32_LE (suv + i);
+ /* fallthrough */
+ case 4:
+ Un = (UV & 0x03ff) << 6;
+ UV >>= 10;
+ Vn = (UV & 0x03ff) << 6;
+ UV >>= 10;
+ break;
+ case 2:
+ Un = (UV & 0x03ff) << 6;
+ UV = GST_READ_UINT32_LE (suv + i + 1);
+ Vn = (UV & 0x03ff) << 6;
+ UV >>= 10;
+ break;
+ default:
+ /* keep value */
+ break;
+ }
+
+ if (G_UNLIKELY (pix + c < x))
+ continue;
+
+ if (!(flags & GST_VIDEO_PACK_FLAG_TRUNCATE_RANGE)) {
+ Yn |= Yn >> 10;
+ Un |= Un >> 10;
+ Vn |= Vn >> 10;
+ }
+
+ d[doff + 0] = 0xffff;
+ d[doff + 1] = Yn;
+ d[doff + 2] = Un;
+ d[doff + 3] = Vn;
+
+ doff += 4;
+ }
+ }
+ }
+
+ static void
+ pack_NV12_10LE32 (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ const gpointer src, gint sstride, gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], GstVideoChromaSite chroma_site,
+ gint y, gint width)
+ {
+ gint i;
+ gint uv = GET_UV_420 (y, flags);
+ guint32 *restrict dy = GET_PLANE_LINE (0, y);
+ guint32 *restrict duv = GET_PLANE_LINE (1, uv);
+ const guint16 *restrict s = src;
+ gint num_words = (width + 2) / 3;
+ guint32 UV = 0;
+
+ /* FIXME unroll the 6 states ? */
+
+ for (i = 0; i < num_words; i++) {
+ gint num_comps = MIN (3, width - i * 3);
+ guint pix = i * 3;
+ gsize soff = pix * 4;
+ gint c;
+ guint32 Y = 0;
+
+ for (c = 0; c < num_comps; c++) {
+ Y |= s[soff + 1] >> 6 << (10 * c);
+
+ if (IS_CHROMA_LINE_420 (y, flags)) {
+ switch ((pix + c) % 6) {
+ case 0:
+ UV = s[soff + 2] >> 6;
+ UV |= s[soff + 3] >> 6 << 10;
+ break;
+ case 2:
+ UV |= s[soff + 2] >> 6 << 20;
+ GST_WRITE_UINT32_LE (duv + i, UV);
+ UV = s[soff + 3] >> 6;
+ break;
+ case 4:
+ UV |= s[soff + 2] >> 6 << 10;
+ UV |= s[soff + 3] >> 6 << 20;
+ GST_WRITE_UINT32_LE (duv + i, UV);
+ break;
+ default:
+ /* keep value */
+ break;
+ }
+ }
+
+ soff += 4;
+ }
+
+ GST_WRITE_UINT32_LE (dy + i, Y);
+
+ if (IS_CHROMA_LINE_420 (y, flags) && num_comps < 3)
+ GST_WRITE_UINT32_LE (duv + i, UV);
+
+ }
+ }
+
+ #define PACK_NV16_10LE32 GST_VIDEO_FORMAT_AYUV64, unpack_NV16_10LE32, 1, pack_NV16_10LE32
+ static void
+ unpack_NV16_10LE32 (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ gpointer dest, const gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], gint x, gint y, gint width)
+ {
+ gint i;
+ const guint32 *restrict sy = GET_PLANE_LINE (0, y);
+ const guint32 *restrict suv = GET_PLANE_LINE (1, y);
+ guint16 *restrict d = dest;
+ gint num_words = (width + 2) / 3;
+ guint32 UV = 0;
+ guint16 Un = 0, Vn = 0;
+
+ /* Y data is packed into little endian 32bit words, with the 2 MSB being
+ * padding. There is only 1 pattern.
+ * -> padding | Y1 | Y2 | Y3
+ *
+ * UV is packed the same way, though we end up with 2 patterns:
+ * -> U | V | U | padding
+ * -> V | U | V | padding
+ */
+
+ /* FIXME unroll the 6 states ? */
+
+ for (i = 0; i < num_words; i++) {
+ gint num_comps = MIN (3, width - i * 3);
+ guint pix = i * 3;
+ gsize doff = pix * 4;
+ gint c;
+ guint32 Y;
+
+ Y = GST_READ_UINT32_LE (sy + i);
+
+ for (c = 0; c < num_comps; c++) {
+ guint16 Yn;
+
+ /* For Y, we simply read 10 bit and shift it out */
+ Yn = (Y & 0x03ff) << 6;
+ Y >>= 10;
+
+ /* Unpacking UV has been reduced to a cycle of 6 states. The following
+ * code is a reduce version of:
+ * 0: - Read first UV word (UVU)
+ * Unpack U and V
+ * 1: - Reused U/V from 1 (sub-sampling)
+ * 2: - Unpack remaining U value
+ * - Read following UV word (VUV)
+ * - Unpack V value
+ * 3: - Reuse U/V from 2 (sub-sampling)
+ * 4: - Unpack remaining U
+ * - Unpack remaining V
+ * 5: - Reuse UV/V from 4 (sub-sampling)
+ */
+ switch ((pix + c) % 6) {
+ case 0:
+ UV = GST_READ_UINT32_LE (suv + i);
+ /* fallthrough */
+ case 4:
+ Un = (UV & 0x03ff) << 6;
+ UV >>= 10;
+ Vn = (UV & 0x03ff) << 6;
+ UV >>= 10;
+ break;
+ case 2:
+ Un = (UV & 0x03ff) << 6;
+ UV = GST_READ_UINT32_LE (suv + i + 1);
+ Vn = (UV & 0x03ff) << 6;
+ UV >>= 10;
+ break;
+ default:
+ /* keep value */
+ break;
+ }
+
+ if (G_UNLIKELY (pix + c < x))
+ continue;
+
+ if (!(flags & GST_VIDEO_PACK_FLAG_TRUNCATE_RANGE)) {
+ Yn |= Yn >> 10;
+ Un |= Un >> 10;
+ Vn |= Vn >> 10;
+ }
+
+ d[doff + 0] = 0xffff;
+ d[doff + 1] = Yn;
+ d[doff + 2] = Un;
+ d[doff + 3] = Vn;
+
+ doff += 4;
+ }
+ }
+ }
+
+ static void
+ pack_NV16_10LE32 (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ const gpointer src, gint sstride, gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], GstVideoChromaSite chroma_site,
+ gint y, gint width)
+ {
+ gint i;
+ guint32 *restrict dy = GET_PLANE_LINE (0, y);
+ guint32 *restrict duv = GET_PLANE_LINE (1, y);
+ const guint16 *restrict s = src;
+ gint num_words = (width + 2) / 3;
+ guint32 UV = 0;
+
+ /* FIXME unroll the 6 states ? */
+
+ for (i = 0; i < num_words; i++) {
+ gint num_comps = MIN (3, width - i * 3);
+ guint pix = i * 3;
+ gsize soff = pix * 4;
+ gint c;
+ guint32 Y = 0;
+
+ for (c = 0; c < num_comps; c++) {
+ Y |= s[soff + 1] >> 6 << (10 * c);
+
+ switch ((pix + c) % 6) {
+ case 0:
+ UV = s[soff + 2] >> 6;
+ UV |= s[soff + 3] >> 6 << 10;
+ break;
+ case 2:
+ UV |= s[soff + 2] >> 6 << 20;
+ GST_WRITE_UINT32_LE (duv + i, UV);
+ UV = s[soff + 3] >> 6;
+ break;
+ case 4:
+ UV |= s[soff + 2] >> 6 << 10;
+ UV |= s[soff + 3] >> 6 << 20;
+ GST_WRITE_UINT32_LE (duv + i, UV);
+ break;
+ default:
+ /* keep value */
+ break;
+ }
+
+ soff += 4;
+ }
+
+ GST_WRITE_UINT32_LE (dy + i, Y);
+
+ if (num_comps < 3)
+ GST_WRITE_UINT32_LE (duv + i, UV);
+ }
+ }
+
+ #define PACK_NV12_10LE40 GST_VIDEO_FORMAT_AYUV64, unpack_NV12_10LE40, 1, pack_NV12_10LE40
+ static void
+ unpack_NV12_10LE40 (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ gpointer dest, const gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], gint x, gint y, gint width)
+ {
+ gint i;
+ gint uv = GET_UV_420 (y, flags);
+ guint16 *restrict d = dest;
+ const guint8 *restrict sy = GET_PLANE_LINE (0, y);
+ const guint8 *restrict suv = GET_PLANE_LINE (1, uv);
+ guint16 Y0 = 0, Y1 = 0, Yn = 0, Un = 0, Vn = 0;
+ guint32 UV = 0;
+
+ for (i = 0; i < width; i++) {
+ gboolean update_c = FALSE;
+
+ switch (i & 3) {
+ case 0:
+ Y0 = GST_READ_UINT16_LE (sy);
+ Yn = Y0 & 0x3ff;
+ sy += 2;
+
+ UV = GST_READ_UINT32_LE (suv);
+ Un = UV & 0x3ff;
+ Vn = (UV >> 10) & 0x3ff;
+ suv += 4;
+
+ Yn <<= 6;
+ Un <<= 6;
+ Vn <<= 6;
+ update_c = TRUE;
+ break;
+ case 1:
+ Y1 = GST_READ_UINT16_LE (sy);
+ Yn = (Y0 >> 10) | ((Y1 & 0xf) << 6);
+ sy += 2;
+
+ Yn <<= 6;
+ break;
+ case 2:
+ Yn = (Y1 >> 4) & 0x3ff;
+
+ Un = (UV >> 20) & 0x3ff;
+ Vn = (UV >> 30);
+ UV = GST_READ_UINT8 (suv);
+ Vn |= (UV << 2);
+ suv++;
+
+ Yn <<= 6;
+ Un <<= 6;
+ Vn <<= 6;
+ update_c = TRUE;
+ break;
+ case 3:
+ Y0 = GST_READ_UINT8 (sy);
+ Yn = (Y1 >> 14) | (Y0 << 2);
+ sy++;
+
+ Yn <<= 6;
+ break;
+ }
+
+ if (!(flags & GST_VIDEO_PACK_FLAG_TRUNCATE_RANGE)) {
+ Yn |= Yn >> 10;
+ if (update_c) {
+ Un |= Un >> 10;
+ Vn |= Vn >> 10;
+ }
+ }
+
+ d[i * 4 + 0] = 0xffff;
+ d[i * 4 + 1] = Yn;
+ d[i * 4 + 2] = Un;
+ d[i * 4 + 3] = Vn;
+ }
+ }
+
+ static void
+ pack_NV12_10LE40 (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ const gpointer src, gint sstride, gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], GstVideoChromaSite chroma_site,
+ gint y, gint width)
+ {
+ gint i;
+ gint uv = GET_UV_420 (y, flags);
+ guint8 *restrict dy = GET_PLANE_LINE (0, y);
+ guint8 *restrict duv = GET_PLANE_LINE (1, uv);
+ guint16 Y0 = 0, Y1 = 0, Y2 = 0, Y3 = 0, U0, V0 = 0, U1 = 0, V1 = 0;
+ const guint16 *restrict s = src;
+
+ for (i = 0; i < width; i++) {
+ switch (i & 3) {
+ case 0:
+ Y0 = s[i * 4 + 1] >> 6;
+ GST_WRITE_UINT8 (dy, Y0 & 0xff);
+ dy++;
+
+ if (IS_CHROMA_LINE_420 (y, flags)) {
+ U0 = s[i * 4 + 2] >> 6;
+ V0 = s[i * 4 + 3] >> 6;
+
+ GST_WRITE_UINT8 (duv, U0 & 0xff);
+ duv++;
+
+ GST_WRITE_UINT8 (duv, (U0 >> 8) | ((V0 & 0x3f) << 2));
+ duv++;
+ }
+ break;
+ case 1:
+ Y1 = s[i * 4 + 1] >> 6;
+ GST_WRITE_UINT8 (dy, (Y0 >> 8) | ((Y1 & 0x3f) << 2));
+ dy++;
+ break;
+ case 2:
+ Y2 = s[i * 4 + 1] >> 6;
+ GST_WRITE_UINT8 (dy, (Y1 >> 6) | ((Y2 & 0xf) << 4));
+ dy++;
+
+ if (IS_CHROMA_LINE_420 (y, flags)) {
+ U1 = s[i * 4 + 2] >> 6;
+ V1 = s[i * 4 + 3] >> 6;
+
+ GST_WRITE_UINT8 (duv, (V0 >> 6) | ((U1 & 0xf) << 4));
+ duv++;
+
+ GST_WRITE_UINT8 (duv, (U1 >> 4) | ((V1 & 0x3) << 6));
+ duv++;
+
+ GST_WRITE_UINT8 (duv, V1 >> 2);
+ duv++;
+ }
+ break;
+ case 3:
+ Y3 = s[i * 4 + 1] >> 6;
+ GST_WRITE_UINT8 (dy, (Y2 >> 4) | ((Y3 & 0x3) << 6));
+ dy++;
+ GST_WRITE_UINT8 (dy, (Y3 >> 2));
+ dy++;
+ break;
+ }
+ }
+
+ switch (width & 3) {
+ case 0:
+ break;
+ case 1:
+ GST_WRITE_UINT8 (dy, Y0 >> 8);
+ if (IS_CHROMA_LINE_420 (y, flags))
+ GST_WRITE_UINT8 (duv, V0 >> 6);
+ break;
+ case 2:
+ GST_WRITE_UINT8 (dy, Y1 >> 6);
+ if (IS_CHROMA_LINE_420 (y, flags))
+ GST_WRITE_UINT8 (duv, V0 >> 6);
+ break;
+ case 3:
+ GST_WRITE_UINT8 (dy, Y2 >> 4);
+ break;
+ }
+ }
+
+ #define PACK_VUYA GST_VIDEO_FORMAT_AYUV, unpack_VUYA, 1, pack_VUYA
+ static void
+ unpack_VUYA (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ gpointer dest, const gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], gint x, gint y, gint width)
+ {
+ const guint8 *restrict s = GET_LINE (y);
+ guint8 *restrict d = dest;
+
+ s += x * 4;
+
+ video_orc_unpack_VUYA (d, s, width);
+ }
+
+ static void
+ pack_VUYA (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ const gpointer src, gint sstride, gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], GstVideoChromaSite chroma_site,
+ gint y, gint width)
+ {
+ const guint8 *restrict s = src;
+ guint8 *restrict d = GET_LINE (y);
+
+ video_orc_pack_VUYA (d, s, width);
+ }
+
+ #define PACK_BGR10A2_LE GST_VIDEO_FORMAT_ARGB64, unpack_bgr10a2_le, 1, pack_bgr10a2_le
+ static void
+ unpack_bgr10a2_le (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ gpointer dest, const gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], gint x, gint y, gint width)
+ {
+ int i;
+ const guint8 *restrict s = GET_LINE (y);
+ guint16 *restrict d = dest;
+ guint32 ARGB;
+ guint16 A, R, G, B;
+
+ s += x * 4;
+
+ for (i = 0; i < width; i++) {
+ ARGB = GST_READ_UINT32_LE (s + 4 * i);
+
+ B = ((ARGB >> 0) & 0x3ff) << 6;
+ G = ((ARGB >> 10) & 0x3ff) << 6;
+ R = ((ARGB >> 20) & 0x3ff) << 6;
+ A = ((ARGB >> 30) & 0x03) << 14;
+
+ if (!(flags & GST_VIDEO_PACK_FLAG_TRUNCATE_RANGE)) {
+ B |= (B >> 10);
+ G |= (G >> 10);
+ R |= (R >> 10);
+ A |= (A >> 10);
+ }
+
+ d[4 * i + 0] = A;
+ d[4 * i + 1] = R;
+ d[4 * i + 2] = G;
+ d[4 * i + 3] = B;
+ }
+ }
+
+ static void
+ pack_bgr10a2_le (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ const gpointer src, gint sstride, gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], GstVideoChromaSite chroma_site,
+ gint y, gint width)
+ {
+ int i;
+ guint32 *restrict d = GET_LINE (y);
+ const guint16 *restrict s = src;
+ guint32 ARGB;
+ guint16 A, R, G, B;
+
+ for (i = 0; i < width; i++) {
+ A = s[4 * i] & 0xc000;
+ R = s[4 * i + 1] & 0xffc0;
+ G = s[4 * i + 2] & 0xffc0;
+ B = s[4 * i + 3] & 0xffc0;
+
+ ARGB = (B >> 6) | (G << 4) | (R << 14) | (A << 16);
+
+ GST_WRITE_UINT32_LE (d + i, ARGB);
+ }
+ }
+
++#define PACK_INVZ16_LE GST_VIDEO_FORMAT_AYUV64, unpack_INVZ16_LE, 1, pack_INVZ16_LE
++static void
++unpack_INVZ16_LE (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
++ gpointer dest, const gpointer data[GST_VIDEO_MAX_PLANES],
++ const gint stride[GST_VIDEO_MAX_PLANES], gint x, gint y, gint width)
++{
++ int i;
++ const guint16 *restrict s = GET_LINE (y);
++ guint16 *restrict d = dest;
++
++ s += x;
++
++ for (i = 0; i < width; i++) {
++ d[i * 4 + 0] = 0xffff;
++ d[i * 4 + 1] = GST_READ_UINT16_LE (s + i);
++ d[i * 4 + 2] = 0x8000;
++ d[i * 4 + 3] = 0x8000;
++ }
++}
++
++static void
++pack_INVZ16_LE (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
++ const gpointer src, gint sstride, gpointer data[GST_VIDEO_MAX_PLANES],
++ const gint stride[GST_VIDEO_MAX_PLANES], GstVideoChromaSite chroma_site,
++ gint y, gint width)
++{
++ int i;
++ guint16 *restrict d = GET_LINE (y);
++ const guint16 *restrict s = src;
++
++ for (i = 0; i < width; i++) {
++ GST_WRITE_UINT16_LE (d + i, s[i * 4 + 1]);
++ }
++}
++
+ #define PACK_RGB10A2_LE GST_VIDEO_FORMAT_ARGB64, unpack_rgb10a2_le, 1, pack_rgb10a2_le
+ static void
+ unpack_rgb10a2_le (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ gpointer dest, const gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], gint x, gint y, gint width)
+ {
+ int i;
+ const guint8 *restrict s = GET_LINE (y);
+ guint16 *restrict d = dest;
+ guint32 ARGB;
+ guint16 A, R, G, B;
+
+ s += x * 4;
+
+ for (i = 0; i < width; i++) {
+ ARGB = GST_READ_UINT32_LE (s + 4 * i);
+
+ R = ((ARGB >> 0) & 0x3ff) << 6;
+ G = ((ARGB >> 10) & 0x3ff) << 6;
+ B = ((ARGB >> 20) & 0x3ff) << 6;
+ A = ((ARGB >> 30) & 0x03) << 14;
+
+ if (!(flags & GST_VIDEO_PACK_FLAG_TRUNCATE_RANGE)) {
+ R |= (R >> 10);
+ G |= (G >> 10);
+ B |= (B >> 10);
+ A |= (A >> 10);
+ }
+
+ d[4 * i + 0] = A;
+ d[4 * i + 1] = R;
+ d[4 * i + 2] = G;
+ d[4 * i + 3] = B;
+ }
+ }
+
+ static void
+ pack_rgb10a2_le (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ const gpointer src, gint sstride, gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], GstVideoChromaSite chroma_site,
+ gint y, gint width)
+ {
+ int i;
+ guint32 *restrict d = GET_LINE (y);
+ const guint16 *restrict s = src;
+ guint32 ARGB;
+ guint16 A, R, G, B;
+
+ for (i = 0; i < width; i++) {
+ A = s[4 * i] & 0xc000;
+ R = s[4 * i + 1] & 0xffc0;
+ G = s[4 * i + 2] & 0xffc0;
+ B = s[4 * i + 3] & 0xffc0;
+
+ ARGB = (R >> 6) | (G << 4) | (B << 14) | (A << 16);
+
+ GST_WRITE_UINT32_LE (d + i, ARGB);
+ }
+ }
+
++#define MAKE_DEPTH_FORMAT(name, desc, fourcc, depth, pstride, plane, offs, sub, pack) \
++ { fourcc, {GST_VIDEO_FORMAT_ ##name, G_STRINGIFY(name), desc, GST_VIDEO_FORMAT_FLAG_GRAY | GST_VIDEO_FORMAT_FLAG_LE, depth, pstride, plane, offs, sub, pack } }
++
+ #define PACK_Y444_16BE GST_VIDEO_FORMAT_AYUV64, unpack_Y444_16BE, 1, pack_Y444_16BE
+ static void
+ unpack_Y444_16BE (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ gpointer dest, const gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], gint x, gint y, gint width)
+ {
+ int i;
+ guint16 *restrict sy = GET_Y_LINE (y);
+ guint16 *restrict su = GET_U_LINE (y);
+ guint16 *restrict sv = GET_V_LINE (y);
+ guint16 *restrict d = dest, Y, U, V;
+
+ sy += x;
+ su += x;
+ sv += x;
+
+ for (i = 0; i < width; i++) {
+ Y = GST_READ_UINT16_BE (sy + i);
+ U = GST_READ_UINT16_BE (su + i);
+ V = GST_READ_UINT16_BE (sv + i);
+
+ d[i * 4 + 0] = 0xffff;
+ d[i * 4 + 1] = Y;
+ d[i * 4 + 2] = U;
+ d[i * 4 + 3] = V;
+ }
+ }
+
+ static void
+ pack_Y444_16BE (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ const gpointer src, gint sstride, gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], GstVideoChromaSite chroma_site,
+ gint y, gint width)
+ {
+ int i;
+ guint16 *restrict dy = GET_Y_LINE (y);
+ guint16 *restrict du = GET_U_LINE (y);
+ guint16 *restrict dv = GET_V_LINE (y);
+ guint16 Y, U, V;
+ const guint16 *restrict s = src;
+
+ for (i = 0; i < width; i++) {
+ Y = s[i * 4 + 1];
+ U = s[i * 4 + 2];
+ V = s[i * 4 + 3];
+
+ GST_WRITE_UINT16_BE (dy + i, Y);
+ GST_WRITE_UINT16_BE (du + i, U);
+ GST_WRITE_UINT16_BE (dv + i, V);
+ }
+ }
+
+ #define PACK_Y444_16LE GST_VIDEO_FORMAT_AYUV64, unpack_Y444_16LE, 1, pack_Y444_16LE
+ static void
+ unpack_Y444_16LE (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ gpointer dest, const gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], gint x, gint y, gint width)
+ {
+ int i;
+ guint16 *restrict sy = GET_Y_LINE (y);
+ guint16 *restrict su = GET_U_LINE (y);
+ guint16 *restrict sv = GET_V_LINE (y);
+ guint16 *restrict d = dest, Y, U, V;
+
+ sy += x;
+ su += x;
+ sv += x;
+
+ for (i = 0; i < width; i++) {
+ Y = GST_READ_UINT16_LE (sy + i);
+ U = GST_READ_UINT16_LE (su + i);
+ V = GST_READ_UINT16_LE (sv + i);
+
+ d[i * 4 + 0] = 0xffff;
+ d[i * 4 + 1] = Y;
+ d[i * 4 + 2] = U;
+ d[i * 4 + 3] = V;
+ }
+ }
+
+ static void
+ pack_Y444_16LE (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ const gpointer src, gint sstride, gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], GstVideoChromaSite chroma_site,
+ gint y, gint width)
+ {
+ int i;
+ guint16 *restrict dy = GET_Y_LINE (y);
+ guint16 *restrict du = GET_U_LINE (y);
+ guint16 *restrict dv = GET_V_LINE (y);
+ guint16 Y, U, V;
+ const guint16 *restrict s = src;
+
+ for (i = 0; i < width; i++) {
+ Y = s[i * 4 + 1];
+ U = s[i * 4 + 2];
+ V = s[i * 4 + 3];
+
+ GST_WRITE_UINT16_LE (dy + i, Y);
+ GST_WRITE_UINT16_LE (du + i, U);
+ GST_WRITE_UINT16_LE (dv + i, V);
+ }
+ }
+
+ #define PACK_P016_BE GST_VIDEO_FORMAT_AYUV64, unpack_P016_BE, 1, pack_P016_BE
+ static void
+ unpack_P016_BE (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ gpointer dest, const gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], gint x, gint y, gint width)
+ {
+ int i;
+ gint uv = GET_UV_420 (y, flags);
+ const guint16 *restrict sy = GET_PLANE_LINE (0, y);
+ const guint16 *restrict suv = GET_PLANE_LINE (1, uv);
+ guint16 *restrict d = dest, Y0, Y1, U, V;
+
+ sy += x;
+ suv += (x & ~1);
+
+ if (x & 1) {
+ Y0 = GST_READ_UINT16_BE (sy);
+ U = GST_READ_UINT16_BE (suv);
+ V = GST_READ_UINT16_BE (suv + 1);
+
+ d[0] = 0xffff;
+ d[1] = Y0;
+ d[2] = U;
+ d[3] = V;
+ width--;
+ d += 4;
+ sy += 1;
+ suv += 2;
+ }
+
+ for (i = 0; i < width / 2; i++) {
+ Y0 = GST_READ_UINT16_BE (sy + 2 * i);
+ Y1 = GST_READ_UINT16_BE (sy + 2 * i + 1);
+ U = GST_READ_UINT16_BE (suv + 2 * i);
+ V = GST_READ_UINT16_BE (suv + 2 * i + 1);
+
+ d[i * 8 + 0] = 0xffff;
+ d[i * 8 + 1] = Y0;
+ d[i * 8 + 2] = U;
+ d[i * 8 + 3] = V;
+ d[i * 8 + 4] = 0xffff;
+ d[i * 8 + 5] = Y1;
+ d[i * 8 + 6] = U;
+ d[i * 8 + 7] = V;
+ }
+
+ if (width & 1) {
+ gint i = width - 1;
+
+ Y0 = GST_READ_UINT16_BE (sy + i);
+ U = GST_READ_UINT16_BE (suv + i);
+ V = GST_READ_UINT16_BE (suv + i + 1);
+
+ d[i * 4 + 0] = 0xffff;
+ d[i * 4 + 1] = Y0;
+ d[i * 4 + 2] = U;
+ d[i * 4 + 3] = V;
+ }
+ }
+
+ static void
+ pack_P016_BE (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ const gpointer src, gint sstride, gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], GstVideoChromaSite chroma_site,
+ gint y, gint width)
+ {
+ int i;
+ gint uv = GET_UV_420 (y, flags);
+ guint16 *restrict dy = GET_PLANE_LINE (0, y);
+ guint16 *restrict duv = GET_PLANE_LINE (1, uv);
+ guint16 Y0, Y1, U, V;
+ const guint16 *restrict s = src;
+
+ if (IS_CHROMA_LINE_420 (y, flags)) {
+ for (i = 0; i < width / 2; i++) {
+ Y0 = s[i * 8 + 1];
+ Y1 = s[i * 8 + 5];
+ U = s[i * 8 + 2];
+ V = s[i * 8 + 3];
+
+ GST_WRITE_UINT16_BE (dy + i * 2 + 0, Y0);
+ GST_WRITE_UINT16_BE (dy + i * 2 + 1, Y1);
+ GST_WRITE_UINT16_BE (duv + i * 2 + 0, U);
+ GST_WRITE_UINT16_BE (duv + i * 2 + 1, V);
+ }
+ if (width & 1) {
+ gint i = width - 1;
+
+ Y0 = s[i * 4 + 1];
+ U = s[i * 4 + 2];
+ V = s[i * 4 + 3];
+
+ GST_WRITE_UINT16_BE (dy + i, Y0);
+ GST_WRITE_UINT16_BE (duv + i + 0, U);
+ GST_WRITE_UINT16_BE (duv + i + 1, V);
+ }
+ } else {
+ for (i = 0; i < width; i++) {
+ Y0 = s[i * 4 + 1];
+ GST_WRITE_UINT16_BE (dy + i, Y0);
+ }
+ }
+ }
+
+ #define PACK_P016_LE GST_VIDEO_FORMAT_AYUV64, unpack_P016_LE, 1, pack_P016_LE
+ static void
+ unpack_P016_LE (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ gpointer dest, const gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], gint x, gint y, gint width)
+ {
+ int i;
+ gint uv = GET_UV_420 (y, flags);
+ const guint16 *restrict sy = GET_PLANE_LINE (0, y);
+ const guint16 *restrict suv = GET_PLANE_LINE (1, uv);
+ guint16 *restrict d = dest, Y0, Y1, U, V;
+
+ sy += x;
+ suv += (x & ~1);
+
+ if (x & 1) {
+ Y0 = GST_READ_UINT16_LE (sy);
+ U = GST_READ_UINT16_LE (suv);
+ V = GST_READ_UINT16_LE (suv + 1);
+
+ d[0] = 0xffff;
+ d[1] = Y0;
+ d[2] = U;
+ d[3] = V;
+ width--;
+ d += 4;
+ sy += 1;
+ suv += 2;
+ }
+
+ for (i = 0; i < width / 2; i++) {
+ Y0 = GST_READ_UINT16_LE (sy + 2 * i);
+ Y1 = GST_READ_UINT16_LE (sy + 2 * i + 1);
+ U = GST_READ_UINT16_LE (suv + 2 * i);
+ V = GST_READ_UINT16_LE (suv + 2 * i + 1);
+
+ d[i * 8 + 0] = 0xffff;
+ d[i * 8 + 1] = Y0;
+ d[i * 8 + 2] = U;
+ d[i * 8 + 3] = V;
+ d[i * 8 + 4] = 0xffff;
+ d[i * 8 + 5] = Y1;
+ d[i * 8 + 6] = U;
+ d[i * 8 + 7] = V;
+ }
+
+ if (width & 1) {
+ gint i = width - 1;
+
+ Y0 = GST_READ_UINT16_LE (sy + i);
+ U = GST_READ_UINT16_LE (suv + i);
+ V = GST_READ_UINT16_LE (suv + i + 1);
+
+ d[i * 4 + 0] = 0xffff;
+ d[i * 4 + 1] = Y0;
+ d[i * 4 + 2] = U;
+ d[i * 4 + 3] = V;
+ }
+ }
+
+ static void
+ pack_P016_LE (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ const gpointer src, gint sstride, gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], GstVideoChromaSite chroma_site,
+ gint y, gint width)
+ {
+ int i;
+ gint uv = GET_UV_420 (y, flags);
+ guint16 *restrict dy = GET_PLANE_LINE (0, y);
+ guint16 *restrict duv = GET_PLANE_LINE (1, uv);
+ guint16 Y0, Y1, U, V;
+ const guint16 *restrict s = src;
+
+ if (IS_CHROMA_LINE_420 (y, flags)) {
+ for (i = 0; i < width / 2; i++) {
+ Y0 = s[i * 8 + 1];
+ Y1 = s[i * 8 + 5];
+ U = s[i * 8 + 2];
+ V = s[i * 8 + 3];
+
+ GST_WRITE_UINT16_LE (dy + i * 2 + 0, Y0);
+ GST_WRITE_UINT16_LE (dy + i * 2 + 1, Y1);
+ GST_WRITE_UINT16_LE (duv + i * 2 + 0, U);
+ GST_WRITE_UINT16_LE (duv + i * 2 + 1, V);
+ }
+ if (width & 1) {
+ gint i = width - 1;
+
+ Y0 = s[i * 4 + 1];
+ U = s[i * 4 + 2];
+ V = s[i * 4 + 3];
+
+ GST_WRITE_UINT16_LE (dy + i, Y0);
+ GST_WRITE_UINT16_LE (duv + i + 0, U);
+ GST_WRITE_UINT16_LE (duv + i + 1, V);
+ }
+ } else {
+ for (i = 0; i < width; i++) {
+ Y0 = s[i * 4 + 1];
+ GST_WRITE_UINT16_LE (dy + i, Y0);
+ }
+ }
+ }
+
+ #define PACK_P012_BE GST_VIDEO_FORMAT_AYUV64, unpack_P012_BE, 1, pack_P012_BE
+ static void
+ unpack_P012_BE (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ gpointer dest, const gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], gint x, gint y, gint width)
+ {
+ int i;
+ gint uv = GET_UV_420 (y, flags);
+ const guint16 *restrict sy = GET_PLANE_LINE (0, y);
+ const guint16 *restrict suv = GET_PLANE_LINE (1, uv);
+ guint16 *restrict d = dest, Y0, Y1, U, V;
+
+ sy += x;
+ suv += (x & ~1);
+
+ if (x & 1) {
+ Y0 = GST_READ_UINT16_BE (sy);
+ U = GST_READ_UINT16_BE (suv);
+ V = GST_READ_UINT16_BE (suv + 1);
+
+ if (!(flags & GST_VIDEO_PACK_FLAG_TRUNCATE_RANGE)) {
+ Y0 |= (Y0 >> 12);
+ U |= (U >> 12);
+ V |= (V >> 12);
+ }
+
+ d[0] = 0xffff;
+ d[1] = Y0;
+ d[2] = U;
+ d[3] = V;
+ width--;
+ d += 4;
+ sy += 1;
+ suv += 2;
+ }
+
+ for (i = 0; i < width / 2; i++) {
+ Y0 = GST_READ_UINT16_BE (sy + 2 * i);
+ Y1 = GST_READ_UINT16_BE (sy + 2 * i + 1);
+ U = GST_READ_UINT16_BE (suv + 2 * i);
+ V = GST_READ_UINT16_BE (suv + 2 * i + 1);
+
+ if (!(flags & GST_VIDEO_PACK_FLAG_TRUNCATE_RANGE)) {
+ Y0 |= (Y0 >> 12);
+ Y1 |= (Y1 >> 12);
+ U |= (U >> 12);
+ V |= (V >> 12);
+ }
+
+ d[i * 8 + 0] = 0xffff;
+ d[i * 8 + 1] = Y0;
+ d[i * 8 + 2] = U;
+ d[i * 8 + 3] = V;
+ d[i * 8 + 4] = 0xffff;
+ d[i * 8 + 5] = Y1;
+ d[i * 8 + 6] = U;
+ d[i * 8 + 7] = V;
+ }
+
+ if (width & 1) {
+ gint i = width - 1;
+
+ Y0 = GST_READ_UINT16_BE (sy + i);
+ U = GST_READ_UINT16_BE (suv + i);
+ V = GST_READ_UINT16_BE (suv + i + 1);
+
+ if (!(flags & GST_VIDEO_PACK_FLAG_TRUNCATE_RANGE)) {
+ Y0 |= (Y0 >> 12);
+ U |= (U >> 12);
+ V |= (V >> 12);
+ }
+
+ d[i * 4 + 0] = 0xffff;
+ d[i * 4 + 1] = Y0;
+ d[i * 4 + 2] = U;
+ d[i * 4 + 3] = V;
+ }
+ }
+
+ static void
+ pack_P012_BE (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ const gpointer src, gint sstride, gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], GstVideoChromaSite chroma_site,
+ gint y, gint width)
+ {
+ int i;
+ gint uv = GET_UV_420 (y, flags);
+ guint16 *restrict dy = GET_PLANE_LINE (0, y);
+ guint16 *restrict duv = GET_PLANE_LINE (1, uv);
+ guint16 Y0, Y1, U, V;
+ const guint16 *restrict s = src;
+
+ if (IS_CHROMA_LINE_420 (y, flags)) {
+ for (i = 0; i < width / 2; i++) {
+ Y0 = s[i * 8 + 1] & 0xfff0;
+ Y1 = s[i * 8 + 5] & 0xfff0;
+ U = s[i * 8 + 2] & 0xfff0;
+ V = s[i * 8 + 3] & 0xfff0;
+
+ GST_WRITE_UINT16_BE (dy + i * 2 + 0, Y0);
+ GST_WRITE_UINT16_BE (dy + i * 2 + 1, Y1);
+ GST_WRITE_UINT16_BE (duv + i * 2 + 0, U);
+ GST_WRITE_UINT16_BE (duv + i * 2 + 1, V);
+ }
+ if (width & 1) {
+ gint i = width - 1;
+
+ Y0 = s[i * 4 + 1] & 0xfff0;
+ U = s[i * 4 + 2] & 0xfff0;
+ V = s[i * 4 + 3] & 0xfff0;
+
+ GST_WRITE_UINT16_BE (dy + i, Y0);
+ GST_WRITE_UINT16_BE (duv + i + 0, U);
+ GST_WRITE_UINT16_BE (duv + i + 1, V);
+ }
+ } else {
+ for (i = 0; i < width; i++) {
+ Y0 = s[i * 4 + 1] & 0xfff0;
+ GST_WRITE_UINT16_BE (dy + i, Y0);
+ }
+ }
+ }
+
+ #define PACK_P012_LE GST_VIDEO_FORMAT_AYUV64, unpack_P012_LE, 1, pack_P012_LE
+ static void
+ unpack_P012_LE (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ gpointer dest, const gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], gint x, gint y, gint width)
+ {
+ int i;
+ gint uv = GET_UV_420 (y, flags);
+ const guint16 *restrict sy = GET_PLANE_LINE (0, y);
+ const guint16 *restrict suv = GET_PLANE_LINE (1, uv);
+ guint16 *restrict d = dest, Y0, Y1, U, V;
+
+ sy += x;
+ suv += (x & ~1);
+
+ if (x & 1) {
+ Y0 = GST_READ_UINT16_LE (sy);
+ U = GST_READ_UINT16_LE (suv);
+ V = GST_READ_UINT16_LE (suv + 1);
+
+ if (!(flags & GST_VIDEO_PACK_FLAG_TRUNCATE_RANGE)) {
+ Y0 |= (Y0 >> 12);
+ U |= (U >> 12);
+ V |= (V >> 12);
+ }
+
+ d[0] = 0xffff;
+ d[1] = Y0;
+ d[2] = U;
+ d[3] = V;
+ width--;
+ d += 4;
+ sy += 1;
+ suv += 2;
+ }
+
+ for (i = 0; i < width / 2; i++) {
+ Y0 = GST_READ_UINT16_LE (sy + 2 * i);
+ Y1 = GST_READ_UINT16_LE (sy + 2 * i + 1);
+ U = GST_READ_UINT16_LE (suv + 2 * i);
+ V = GST_READ_UINT16_LE (suv + 2 * i + 1);
+
+ if (!(flags & GST_VIDEO_PACK_FLAG_TRUNCATE_RANGE)) {
+ Y0 |= (Y0 >> 12);
+ Y1 |= (Y1 >> 12);
+ U |= (U >> 12);
+ V |= (V >> 12);
+ }
+
+ d[i * 8 + 0] = 0xffff;
+ d[i * 8 + 1] = Y0;
+ d[i * 8 + 2] = U;
+ d[i * 8 + 3] = V;
+ d[i * 8 + 4] = 0xffff;
+ d[i * 8 + 5] = Y1;
+ d[i * 8 + 6] = U;
+ d[i * 8 + 7] = V;
+ }
+
+ if (width & 1) {
+ gint i = width - 1;
+
+ Y0 = GST_READ_UINT16_LE (sy + i);
+ U = GST_READ_UINT16_LE (suv + i);
+ V = GST_READ_UINT16_LE (suv + i + 1);
+
+ if (!(flags & GST_VIDEO_PACK_FLAG_TRUNCATE_RANGE)) {
+ Y0 |= (Y0 >> 12);
+ U |= (U >> 12);
+ V |= (V >> 12);
+ }
+
+ d[i * 4 + 0] = 0xffff;
+ d[i * 4 + 1] = Y0;
+ d[i * 4 + 2] = U;
+ d[i * 4 + 3] = V;
+ }
+ }
+
+ static void
+ pack_P012_LE (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ const gpointer src, gint sstride, gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], GstVideoChromaSite chroma_site,
+ gint y, gint width)
+ {
+ int i;
+ gint uv = GET_UV_420 (y, flags);
+ guint16 *restrict dy = GET_PLANE_LINE (0, y);
+ guint16 *restrict duv = GET_PLANE_LINE (1, uv);
+ guint16 Y0, Y1, U, V;
+ const guint16 *restrict s = src;
+
+ if (IS_CHROMA_LINE_420 (y, flags)) {
+ for (i = 0; i < width / 2; i++) {
+ Y0 = s[i * 8 + 1] & 0xfff0;
+ Y1 = s[i * 8 + 5] & 0xfff0;
+ U = s[i * 8 + 2] & 0xfff0;
+ V = s[i * 8 + 3] & 0xfff0;
+
+ GST_WRITE_UINT16_LE (dy + i * 2 + 0, Y0);
+ GST_WRITE_UINT16_LE (dy + i * 2 + 1, Y1);
+ GST_WRITE_UINT16_LE (duv + i * 2 + 0, U);
+ GST_WRITE_UINT16_LE (duv + i * 2 + 1, V);
+ }
+ if (width & 1) {
+ gint i = width - 1;
+
+ Y0 = s[i * 4 + 1] & 0xfff0;
+ U = s[i * 4 + 2] & 0xfff0;
+ V = s[i * 4 + 3] & 0xfff0;
+
+ GST_WRITE_UINT16_LE (dy + i, Y0);
+ GST_WRITE_UINT16_LE (duv + i + 0, U);
+ GST_WRITE_UINT16_LE (duv + i + 1, V);
+ }
+ } else {
+ for (i = 0; i < width; i++) {
+ Y0 = s[i * 4 + 1] & 0xfff0;
+ GST_WRITE_UINT16_LE (dy + i, Y0);
+ }
+ }
+ }
+
+ #define PACK_Y212_BE GST_VIDEO_FORMAT_AYUV64, unpack_Y212_BE, 1, pack_Y212_BE
+ static void
+ unpack_Y212_BE (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ gpointer dest, const gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], gint x, gint y, gint width)
+ {
+ int i;
+ const guint8 *restrict s = GET_LINE (y);
+ guint16 *restrict d = dest;
+ guint Y0, Y1, U, V;
+
+ s += GST_ROUND_DOWN_2 (x) * 4;
+
+ if (x & 1) {
+ Y1 = GST_READ_UINT16_BE (s + 4);
+ U = GST_READ_UINT16_BE (s + 2);
+ V = GST_READ_UINT16_BE (s + 6);
+
+ if (!(flags & GST_VIDEO_PACK_FLAG_TRUNCATE_RANGE)) {
+ Y1 |= (Y1 >> 12);
+ U |= (U >> 12);
+ V |= (V >> 12);
+ }
+
+ d[0] = 0xffff;
+ d[1] = Y1;
+ d[2] = U;
+ d[3] = V;
+ s += 8;
+ d += 4;
+ width--;
+ }
+
+ for (i = 0; i < width / 2; i++) {
+ Y0 = GST_READ_UINT16_BE (s + i * 8 + 0);
+ U = GST_READ_UINT16_BE (s + i * 8 + 2);
+ V = GST_READ_UINT16_BE (s + i * 8 + 6);
+ Y1 = GST_READ_UINT16_BE (s + i * 8 + 4);
+
+ if (!(flags & GST_VIDEO_PACK_FLAG_TRUNCATE_RANGE)) {
+ Y0 |= (Y0 >> 12);
+ U |= (U >> 12);
+ V |= (V >> 12);
+ }
+
+ d[i * 8 + 0] = 0xffff;
+ d[i * 8 + 1] = Y0;
+ d[i * 8 + 2] = U;
+ d[i * 8 + 3] = V;
+
+ d[i * 8 + 4] = 0xffff;
+ d[i * 8 + 5] = Y1;
+ d[i * 8 + 6] = U;
+ d[i * 8 + 7] = V;
+ }
+
+ if (width & 1) {
+ i = width - 1;
+
+ Y0 = GST_READ_UINT16_BE (s + i * 4 + 0);
+ U = GST_READ_UINT16_BE (s + i * 4 + 2);
+ V = GST_READ_UINT16_BE (s + i * 4 + 6);
+
+ if (!(flags & GST_VIDEO_PACK_FLAG_TRUNCATE_RANGE)) {
+ Y0 |= (Y0 >> 12);
+ U |= (U >> 12);
+ V |= (V >> 12);
+ }
+
+ d[i * 4 + 0] = 0xffff;
+ d[i * 4 + 1] = Y0;
+ d[i * 4 + 2] = U;
+ d[i * 4 + 3] = V;
+ }
+ }
+
+ static void
+ pack_Y212_BE (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ const gpointer src, gint sstride, gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], GstVideoChromaSite chroma_site,
+ gint y, gint width)
+ {
+ int i;
+ guint16 Y0, Y1, U, V;
+ guint8 *restrict d = GET_LINE (y);
+ const guint16 *restrict s = src;
+
+ for (i = 0; i < width; i += 2) {
+ Y0 = s[i * 4 + 1] & 0xfff0;
+ U = s[i * 4 + 2] & 0xfff0;
+ V = s[i * 4 + 3] & 0xfff0;
+ if (i == width - 1)
+ Y1 = s[i * 4 + 1] & 0xfff0;
+ else
+ Y1 = s[(i + 1) * 4 + 1] & 0xfff0;
+
+ GST_WRITE_UINT16_BE (d + i * 4 + 0, Y0);
+ GST_WRITE_UINT16_BE (d + i * 4 + 2, U);
+ GST_WRITE_UINT16_BE (d + i * 4 + 4, Y1);
+ GST_WRITE_UINT16_BE (d + i * 4 + 6, V);
+ }
+ }
+
+ #define PACK_Y212_LE GST_VIDEO_FORMAT_AYUV64, unpack_Y212_LE, 1, pack_Y212_LE
+ static void
+ unpack_Y212_LE (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ gpointer dest, const gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], gint x, gint y, gint width)
+ {
+ int i;
+ const guint8 *restrict s = GET_LINE (y);
+ guint16 *restrict d = dest;
+ guint Y0, Y1, U, V;
+
+ s += GST_ROUND_DOWN_2 (x) * 4;
+
+ if (x & 1) {
+ Y1 = GST_READ_UINT16_LE (s + 4);
+ U = GST_READ_UINT16_LE (s + 2);
+ V = GST_READ_UINT16_LE (s + 6);
+
+ if (!(flags & GST_VIDEO_PACK_FLAG_TRUNCATE_RANGE)) {
+ Y1 |= (Y1 >> 12);
+ U |= (U >> 12);
+ V |= (V >> 12);
+ }
+
+ d[0] = 0xffff;
+ d[1] = Y1;
+ d[2] = U;
+ d[3] = V;
+ s += 8;
+ d += 4;
+ width--;
+ }
+
+ for (i = 0; i < width / 2; i++) {
+ Y0 = GST_READ_UINT16_LE (s + i * 8 + 0);
+ U = GST_READ_UINT16_LE (s + i * 8 + 2);
+ V = GST_READ_UINT16_LE (s + i * 8 + 6);
+ Y1 = GST_READ_UINT16_LE (s + i * 8 + 4);
+
+ if (!(flags & GST_VIDEO_PACK_FLAG_TRUNCATE_RANGE)) {
+ Y0 |= (Y0 >> 12);
+ U |= (U >> 12);
+ V |= (V >> 12);
+ }
+
+ d[i * 8 + 0] = 0xffff;
+ d[i * 8 + 1] = Y0;
+ d[i * 8 + 2] = U;
+ d[i * 8 + 3] = V;
+
+ d[i * 8 + 4] = 0xffff;
+ d[i * 8 + 5] = Y1;
+ d[i * 8 + 6] = U;
+ d[i * 8 + 7] = V;
+ }
+
+ if (width & 1) {
+ i = width - 1;
+
+ Y0 = GST_READ_UINT16_LE (s + i * 4 + 0);
+ U = GST_READ_UINT16_LE (s + i * 4 + 2);
+ V = GST_READ_UINT16_LE (s + i * 4 + 6);
+
+ if (!(flags & GST_VIDEO_PACK_FLAG_TRUNCATE_RANGE)) {
+ Y0 |= (Y0 >> 12);
+ U |= (U >> 12);
+ V |= (V >> 12);
+ }
+
+ d[i * 4 + 0] = 0xffff;
+ d[i * 4 + 1] = Y0;
+ d[i * 4 + 2] = U;
+ d[i * 4 + 3] = V;
+ }
+ }
+
+ static void
+ pack_Y212_LE (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ const gpointer src, gint sstride, gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], GstVideoChromaSite chroma_site,
+ gint y, gint width)
+ {
+ int i;
+ guint16 Y0, Y1, U, V;
+ guint8 *restrict d = GET_LINE (y);
+ const guint16 *restrict s = src;
+
+ for (i = 0; i < width; i += 2) {
+ Y0 = s[i * 4 + 1] & 0xfff0;
+ U = s[i * 4 + 2] & 0xfff0;
+ V = s[i * 4 + 3] & 0xfff0;
+ if (i == width - 1)
+ Y1 = s[i * 4 + 1] & 0xfff0;
+ else
+ Y1 = s[(i + 1) * 4 + 1] & 0xfff0;
+
+ GST_WRITE_UINT16_LE (d + i * 4 + 0, Y0);
+ GST_WRITE_UINT16_LE (d + i * 4 + 2, U);
+ GST_WRITE_UINT16_LE (d + i * 4 + 4, Y1);
+ GST_WRITE_UINT16_LE (d + i * 4 + 6, V);
+ }
+ }
+
+ #define PACK_Y412_BE GST_VIDEO_FORMAT_AYUV64, unpack_Y412_BE, 1, pack_Y412_BE
+ static void
+ unpack_Y412_BE (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ gpointer dest, const gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], gint x, gint y, gint width)
+ {
+ int i;
+ const guint16 *restrict s = GET_LINE (y);
+ guint16 *restrict d = dest;
+ guint16 A, Y, U, V;
+
+ s += x * 4;
+
+ for (i = 0; i < width; i++) {
+ U = GST_READ_UINT16_BE (s + 4 * i + 0) & 0xfff0;
+ Y = GST_READ_UINT16_BE (s + 4 * i + 1) & 0xfff0;
+ V = GST_READ_UINT16_BE (s + 4 * i + 2) & 0xfff0;
+ A = GST_READ_UINT16_BE (s + 4 * i + 3) & 0xfff0;
+
+ if (!(flags & GST_VIDEO_PACK_FLAG_TRUNCATE_RANGE)) {
+ U |= (U >> 12);
+ Y |= (Y >> 12);
+ V |= (V >> 12);
+ A |= (A >> 12);
+ }
+
+ d[4 * i + 0] = A;
+ d[4 * i + 1] = Y;
+ d[4 * i + 2] = U;
+ d[4 * i + 3] = V;
+ }
+ }
+
+ static void
+ pack_Y412_BE (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ const gpointer src, gint sstride, gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], GstVideoChromaSite chroma_site,
+ gint y, gint width)
+ {
+ int i;
+ guint16 *restrict d = GET_LINE (y);
+ const guint16 *restrict s = src;
+ guint16 A, Y, U, V;
+
+ for (i = 0; i < width; i++) {
+ A = s[4 * i + 0] & 0xfff0;
+ Y = s[4 * i + 1] & 0xfff0;
+ U = s[4 * i + 2] & 0xfff0;
+ V = s[4 * i + 3] & 0xfff0;
+
+ GST_WRITE_UINT16_BE (d + 4 * i + 0, U);
+ GST_WRITE_UINT16_BE (d + 4 * i + 1, Y);
+ GST_WRITE_UINT16_BE (d + 4 * i + 2, V);
+ GST_WRITE_UINT16_BE (d + 4 * i + 3, A);
+ }
+ }
+
+ #define PACK_Y412_LE GST_VIDEO_FORMAT_AYUV64, unpack_Y412_LE, 1, pack_Y412_LE
+ static void
+ unpack_Y412_LE (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ gpointer dest, const gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], gint x, gint y, gint width)
+ {
+ int i;
+ const guint16 *restrict s = GET_LINE (y);
+ guint16 *restrict d = dest;
+ guint16 A, Y, U, V;
+
+ s += x * 4;
+
+ for (i = 0; i < width; i++) {
+ U = GST_READ_UINT16_LE (s + 4 * i + 0) & 0xfff0;
+ Y = GST_READ_UINT16_LE (s + 4 * i + 1) & 0xfff0;
+ V = GST_READ_UINT16_LE (s + 4 * i + 2) & 0xfff0;
+ A = GST_READ_UINT16_LE (s + 4 * i + 3) & 0xfff0;
+
+ if (!(flags & GST_VIDEO_PACK_FLAG_TRUNCATE_RANGE)) {
+ U |= (U >> 12);
+ Y |= (Y >> 12);
+ V |= (V >> 12);
+ A |= (A >> 12);
+ }
+
+ d[4 * i + 0] = A;
+ d[4 * i + 1] = Y;
+ d[4 * i + 2] = U;
+ d[4 * i + 3] = V;
+ }
+ }
+
+ static void
+ pack_Y412_LE (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ const gpointer src, gint sstride, gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], GstVideoChromaSite chroma_site,
+ gint y, gint width)
+ {
+ int i;
+ guint16 *restrict d = GET_LINE (y);
+ const guint16 *restrict s = src;
+ guint16 A, Y, U, V;
+
+ for (i = 0; i < width; i++) {
+ A = s[4 * i + 0] & 0xfff0;
+ Y = s[4 * i + 1] & 0xfff0;
+ U = s[4 * i + 2] & 0xfff0;
+ V = s[4 * i + 3] & 0xfff0;
+
+ GST_WRITE_UINT16_LE (d + 4 * i + 0, U);
+ GST_WRITE_UINT16_LE (d + 4 * i + 1, Y);
+ GST_WRITE_UINT16_LE (d + 4 * i + 2, V);
+ GST_WRITE_UINT16_LE (d + 4 * i + 3, A);
+ }
+ }
+
+ #define PACK_RGBP GST_VIDEO_FORMAT_ARGB, unpack_RGBP, 1, pack_RGBP
+ static void
+ unpack_RGBP (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ gpointer dest, const gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], gint x, gint y, gint width)
+ {
+ const guint8 *restrict sr = GET_R_LINE (y);
+ const guint8 *restrict sg = GET_G_LINE (y);
+ const guint8 *restrict sb = GET_B_LINE (y);
+
+ sr += x;
+ sg += x;
+ sb += x;
+
+ video_orc_unpack_Y444 (dest, sr, sg, sb, width);
+ }
+
+ static void
+ pack_RGBP (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ const gpointer src, gint sstride, gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], GstVideoChromaSite chroma_site,
+ gint y, gint width)
+ {
+ guint8 *restrict dr = GET_R_LINE (y);
+ guint8 *restrict dg = GET_G_LINE (y);
+ guint8 *restrict db = GET_B_LINE (y);
+
+ video_orc_pack_Y444 (dr, dg, db, src, width);
+ }
+
+ #define PACK_BGRP GST_VIDEO_FORMAT_ARGB, unpack_BGRP, 1, pack_BGRP
+ static void
+ unpack_BGRP (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ gpointer dest, const gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], gint x, gint y, gint width)
+ {
+ const guint8 *restrict sr = GET_R_LINE (y);
+ const guint8 *restrict sg = GET_G_LINE (y);
+ const guint8 *restrict sb = GET_B_LINE (y);
+
+ sr += x;
+ sg += x;
+ sb += x;
+
+ video_orc_unpack_Y444 (dest, sr, sg, sb, width);
+ }
+
+ static void
+ pack_BGRP (const GstVideoFormatInfo * info, GstVideoPackFlags flags,
+ const gpointer src, gint sstride, gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES], GstVideoChromaSite chroma_site,
+ gint y, gint width)
+ {
+ guint8 *restrict dr = GET_R_LINE (y);
+ guint8 *restrict dg = GET_G_LINE (y);
+ guint8 *restrict db = GET_B_LINE (y);
+
+ video_orc_pack_Y444 (dr, dg, db, src, width);
+ }
+
+ typedef struct
+ {
+ guint32 fourcc;
+ GstVideoFormatInfo info;
+ } VideoFormat;
+
+ /* depths: bits, n_components, shift, depth */
+ #define DPTH0 0, 0, { 0, 0, 0, 0 }, { 0, 0, 0, 0 }
+ #define DPTH8 8, 1, { 0, 0, 0, 0 }, { 8, 0, 0, 0 }
+ #define DPTH8_32 8, 2, { 0, 0, 0, 0 }, { 8, 32, 0, 0 }
+ #define DPTH888 8, 3, { 0, 0, 0, 0 }, { 8, 8, 8, 0 }
+ #define DPTH8888 8, 4, { 0, 0, 0, 0 }, { 8, 8, 8, 8 }
+ #define DPTH8880 8, 4, { 0, 0, 0, 0 }, { 8, 8, 8, 0 }
+ #define DPTH10 10, 1, { 0, 0, 0, 0 }, { 10, 0, 0, 0 }
+ #define DPTH10_10_10 10, 3, { 0, 0, 0, 0 }, { 10, 10, 10, 0 }
+ #define DPTH10_10_10_10 10, 4, { 0, 0, 0, 0 }, { 10, 10, 10, 10 }
+ #define DPTH10_10_10_HI 16, 3, { 6, 6, 6, 0 }, { 10, 10, 10, 0 }
+ #define DPTH10_10_10_2 10, 4, { 0, 0, 0, 0 }, { 10, 10, 10, 2}
+ #define DPTH12_12_12 12, 3, { 0, 0, 0, 0 }, { 12, 12, 12, 0 }
+ #define DPTH12_12_12_HI 16, 3, { 4, 4, 4, 0 }, { 12, 12, 12, 0 }
+ #define DPTH12_12_12_12 12, 4, { 0, 0, 0, 0 }, { 12, 12, 12, 12 }
+ #define DPTH12_12_12_12_HI 16, 4, { 4, 4, 4, 4 }, { 12, 12, 12, 12 }
+ #define DPTH16 16, 1, { 0, 0, 0, 0 }, { 16, 0, 0, 0 }
+ #define DPTH16_16_16 16, 3, { 0, 0, 0, 0 }, { 16, 16, 16, 0 }
+ #define DPTH16_16_16_16 16, 4, { 0, 0, 0, 0 }, { 16, 16, 16, 16 }
+ #define DPTH555 5, 3, { 10, 5, 0, 0 }, { 5, 5, 5, 0 }
+ #define DPTH565 6, 3, { 11, 5, 0, 0 }, { 5, 6, 5, 0 }
+
+ /* pixel strides */
+ #define PSTR0 { 0, 0, 0, 0 }
+ #define PSTR1 { 1, 0, 0, 0 }
+ #define PSTR14 { 1, 4, 0, 0 }
+ #define PSTR111 { 1, 1, 1, 0 }
+ #define PSTR1111 { 1, 1, 1, 1 }
+ #define PSTR122 { 1, 2, 2, 0 }
+ #define PSTR1221 { 1, 2, 2, 1 }
+ #define PSTR2 { 2, 0, 0, 0 }
+ #define PSTR222 { 2, 2, 2, 0 }
+ #define PSTR2222 { 2, 2, 2, 2 }
+ #define PSTR244 { 2, 4, 4, 0 }
+ #define PSTR444 { 4, 4, 4, 0 }
+ #define PSTR4444 { 4, 4, 4, 4 }
+ #define PSTR333 { 3, 3, 3, 0 }
+ #define PSTR488 { 4, 8, 8, 0 }
+ #define PSTR8888 { 8, 8, 8, 8 }
+
+ /* planes, in what plane do we find component N */
+ #define PLANE_NA 0, { 0, 0, 0, 0 }
+ #define PLANE0 1, { 0, 0, 0, 0 }
+ #define PLANE01 2, { 0, 1, 0, 0 }
+ #define PLANE011 2, { 0, 1, 1, 0 }
+ #define PLANE0112 3, { 0, 1, 1, 2 }
+ #define PLANE012 3, { 0, 1, 2, 0 }
+ #define PLANE0123 4, { 0, 1, 2, 3 }
+ #define PLANE021 3, { 0, 2, 1, 0 }
+ #define PLANE201 3, { 2, 0, 1, 0 }
+ #define PLANE2013 4, { 2, 0, 1, 3 }
+ #define PLANE210 3, { 2, 1, 0, 0 }
+
+ /* offsets */
+ #define OFFS0 { 0, 0, 0, 0 }
+ #define OFFS013 { 0, 1, 3, 0 }
+ #define OFFS102 { 1, 0, 2, 0 }
+ #define OFFS1230 { 1, 2, 3, 0 }
+ #define OFFS012 { 0, 1, 2, 0 }
+ #define OFFS210 { 2, 1, 0, 0 }
+ #define OFFS123 { 1, 2, 3, 0 }
+ #define OFFS321 { 3, 2, 1, 0 }
+ #define OFFS0123 { 0, 1, 2, 3 }
+ #define OFFS2103 { 2, 1, 0, 3 }
+ #define OFFS3210 { 3, 2, 1, 0 }
+ #define OFFS031 { 0, 3, 1, 0 }
+ #define OFFS204 { 2, 0, 4, 0 }
+ #define OFFS001 { 0, 0, 1, 0 }
+ #define OFFS010 { 0, 1, 0, 0 }
+ #define OFFS104 { 1, 0, 4, 0 }
+ #define OFFS2460 { 2, 4, 6, 0 }
+
+ /* subsampling, w_sub, h_sub */
+ #define SUB410 { 0, 2, 2, 0 }, { 0, 2, 2, 0 }
+ #define SUB411 { 0, 2, 2, 0 }, { 0, 0, 0, 0 }
+ #define SUB420 { 0, 1, 1, 0 }, { 0, 1, 1, 0 }
+ #define SUB422 { 0, 1, 1, 0 }, { 0, 0, 0, 0 }
+ #define SUB4 { 0, 0, 0, 0 }, { 0, 0, 0, 0 }
+ #define SUB44 { 0, 0, 0, 0 }, { 0, 0, 0, 0 }
+ #define SUB444 { 0, 0, 0, 0 }, { 0, 0, 0, 0 }
+ #define SUB4444 { 0, 0, 0, 0 }, { 0, 0, 0, 0 }
+ #define SUB4204 { 0, 1, 1, 0 }, { 0, 1, 1, 0 }
+ #define SUB4224 { 0, 1, 1, 0 }, { 0, 0, 0, 0 }
+
+ /* tile_mode, tile_ws (width shift), tile_hs (height shift) */
+ #define TILE_4x4(mode) GST_VIDEO_TILE_MODE_ ##mode, 2, 2
+ #define TILE_32x32(mode) GST_VIDEO_TILE_MODE_ ##mode, 5, 5
+ #define TILE_64x32(mode) GST_VIDEO_TILE_MODE_ ##mode, 6, 5
+
+ #define MAKE_YUV_FORMAT(name, desc, fourcc, depth, pstride, plane, offs, sub, pack ) \
+ { fourcc, {GST_VIDEO_FORMAT_ ##name, G_STRINGIFY(name), desc, GST_VIDEO_FORMAT_FLAG_YUV, depth, pstride, plane, offs, sub, pack } }
+ #define MAKE_YUV_LE_FORMAT(name, desc, fourcc, depth, pstride, plane, offs, sub, pack ) \
+ { fourcc, {GST_VIDEO_FORMAT_ ##name, G_STRINGIFY(name), desc, GST_VIDEO_FORMAT_FLAG_YUV | GST_VIDEO_FORMAT_FLAG_LE, depth, pstride, plane, offs, sub, pack } }
+ #define MAKE_YUVA_FORMAT(name, desc, fourcc, depth, pstride, plane, offs, sub, pack) \
+ { fourcc, {GST_VIDEO_FORMAT_ ##name, G_STRINGIFY(name), desc, GST_VIDEO_FORMAT_FLAG_YUV | GST_VIDEO_FORMAT_FLAG_ALPHA, depth, pstride, plane, offs, sub, pack } }
+ #define MAKE_YUVA_LE_FORMAT(name, desc, fourcc, depth, pstride, plane, offs, sub, pack ) \
+ { fourcc, {GST_VIDEO_FORMAT_ ##name, G_STRINGIFY(name), desc, GST_VIDEO_FORMAT_FLAG_YUV | GST_VIDEO_FORMAT_FLAG_ALPHA | GST_VIDEO_FORMAT_FLAG_LE, depth, pstride, plane, offs, sub, pack } }
+ #define MAKE_YUVA_PACK_FORMAT(name, desc, fourcc, depth, pstride, plane, offs, sub, pack) \
+ { fourcc, {GST_VIDEO_FORMAT_ ##name, G_STRINGIFY(name), desc, GST_VIDEO_FORMAT_FLAG_YUV | GST_VIDEO_FORMAT_FLAG_ALPHA | GST_VIDEO_FORMAT_FLAG_UNPACK, depth, pstride, plane, offs, sub, pack } }
+ #define MAKE_YUVA_LE_PACK_FORMAT(name, desc, fourcc, depth, pstride, plane, offs, sub, pack) \
+ { fourcc, {GST_VIDEO_FORMAT_ ##name, G_STRINGIFY(name), desc, GST_VIDEO_FORMAT_FLAG_YUV | GST_VIDEO_FORMAT_FLAG_ALPHA | GST_VIDEO_FORMAT_FLAG_UNPACK | GST_VIDEO_FORMAT_FLAG_LE, depth, pstride, plane, offs, sub, pack } }
+ #define MAKE_YUV_C_FORMAT(name, desc, fourcc, depth, pstride, plane, offs, sub, pack) \
+ { fourcc, {GST_VIDEO_FORMAT_ ##name, G_STRINGIFY(name), desc, GST_VIDEO_FORMAT_FLAG_YUV | GST_VIDEO_FORMAT_FLAG_COMPLEX, depth, pstride, plane, offs, sub, pack } }
+ #define MAKE_YUV_C_LE_FORMAT(name, desc, fourcc, depth, pstride, plane, offs, sub, pack) \
+ { fourcc, {GST_VIDEO_FORMAT_ ##name, G_STRINGIFY(name), desc, GST_VIDEO_FORMAT_FLAG_YUV | GST_VIDEO_FORMAT_FLAG_COMPLEX | GST_VIDEO_FORMAT_FLAG_LE, depth, pstride, plane, offs, sub, pack } }
+ #define MAKE_YUV_T_FORMAT(name, desc, fourcc, depth, pstride, plane, offs, sub, pack, tile) \
+ { fourcc, {GST_VIDEO_FORMAT_ ##name, G_STRINGIFY(name), desc, GST_VIDEO_FORMAT_FLAG_YUV | GST_VIDEO_FORMAT_FLAG_COMPLEX | GST_VIDEO_FORMAT_FLAG_TILED, depth, pstride, plane, offs, sub, pack, tile } }
+
+ #define MAKE_RGB_FORMAT(name, desc, depth, pstride, plane, offs, sub, pack) \
+ { 0x00000000, {GST_VIDEO_FORMAT_ ##name, G_STRINGIFY(name), desc, GST_VIDEO_FORMAT_FLAG_RGB, depth, pstride, plane, offs, sub, pack } }
+ #define MAKE_RGB_LE_FORMAT(name, desc, depth, pstride, plane, offs, sub, pack) \
+ { 0x00000000, {GST_VIDEO_FORMAT_ ##name, G_STRINGIFY(name), desc, GST_VIDEO_FORMAT_FLAG_RGB | GST_VIDEO_FORMAT_FLAG_LE, depth, pstride, plane, offs, sub, pack } }
+ #define MAKE_RGBA_FORMAT(name, desc, depth, pstride, plane, offs, sub, pack) \
+ { 0x00000000, {GST_VIDEO_FORMAT_ ##name, G_STRINGIFY(name), desc, GST_VIDEO_FORMAT_FLAG_RGB | GST_VIDEO_FORMAT_FLAG_ALPHA, depth, pstride, plane, offs, sub, pack } }
+ #define MAKE_RGBA_LE_FORMAT(name, desc, depth, pstride, plane, offs, sub, pack) \
+ { 0x00000000, {GST_VIDEO_FORMAT_ ##name, G_STRINGIFY(name), desc, GST_VIDEO_FORMAT_FLAG_RGB | GST_VIDEO_FORMAT_FLAG_ALPHA | GST_VIDEO_FORMAT_FLAG_LE, depth, pstride, plane, offs, sub, pack } }
+ #define MAKE_RGBAP_FORMAT(name, desc, depth, pstride, plane, offs, sub, pack) \
+ { 0x00000000, {GST_VIDEO_FORMAT_ ##name, G_STRINGIFY(name), desc, GST_VIDEO_FORMAT_FLAG_RGB | GST_VIDEO_FORMAT_FLAG_ALPHA | GST_VIDEO_FORMAT_FLAG_PALETTE, depth, pstride, plane, offs, sub, pack } }
+ #define MAKE_RGBA_PACK_FORMAT(name, desc, depth, pstride, plane, offs, sub, pack) \
+ { 0x00000000, {GST_VIDEO_FORMAT_ ##name, G_STRINGIFY(name), desc, GST_VIDEO_FORMAT_FLAG_RGB | GST_VIDEO_FORMAT_FLAG_ALPHA | GST_VIDEO_FORMAT_FLAG_UNPACK, depth, pstride, plane, offs, sub, pack } }
+ #define MAKE_RGBA_LE_PACK_FORMAT(name, desc, depth, pstride, plane, offs, sub, pack) \
+ { 0x00000000, {GST_VIDEO_FORMAT_ ##name, G_STRINGIFY(name), desc, GST_VIDEO_FORMAT_FLAG_RGB | GST_VIDEO_FORMAT_FLAG_ALPHA | GST_VIDEO_FORMAT_FLAG_UNPACK | GST_VIDEO_FORMAT_FLAG_LE, depth, pstride, plane, offs, sub, pack } }
+
+ #define MAKE_GRAY_FORMAT(name, desc, depth, pstride, plane, offs, sub, pack) \
+ { 0x00000000, {GST_VIDEO_FORMAT_ ##name, G_STRINGIFY(name), desc, GST_VIDEO_FORMAT_FLAG_GRAY, depth, pstride, plane, offs, sub, pack } }
+ #define MAKE_GRAY_LE_FORMAT(name, desc, depth, pstride, plane, offs, sub, pack) \
+ { 0x00000000, {GST_VIDEO_FORMAT_ ##name, G_STRINGIFY(name), desc, GST_VIDEO_FORMAT_FLAG_GRAY | GST_VIDEO_FORMAT_FLAG_LE, depth, pstride, plane, offs, sub, pack } }
+ #define MAKE_GRAY_C_LE_FORMAT(name, desc, depth, pstride, plane, offs, sub, pack) \
+ { 0x00000000, {GST_VIDEO_FORMAT_ ##name, G_STRINGIFY(name), desc, GST_VIDEO_FORMAT_FLAG_GRAY | GST_VIDEO_FORMAT_FLAG_COMPLEX | GST_VIDEO_FORMAT_FLAG_LE, depth, pstride, plane, offs, sub, pack } }
+
+ static const VideoFormat formats[] = {
+ {0x00000000, {GST_VIDEO_FORMAT_UNKNOWN, "UNKNOWN", "unknown video", 0, DPTH0,
+ PSTR0, PLANE_NA, OFFS0}},
+ {0x00000000, {GST_VIDEO_FORMAT_ENCODED, "ENCODED", "encoded video",
+ GST_VIDEO_FORMAT_FLAG_COMPLEX, DPTH0, PSTR0, PLANE_NA, OFFS0}},
+
+ MAKE_YUV_FORMAT (I420, "raw video", GST_MAKE_FOURCC ('I', '4', '2', '0'),
+ DPTH888, PSTR111, PLANE012, OFFS0, SUB420, PACK_420),
++ MAKE_YUV_FORMAT (S420, "raw video", GST_MAKE_FOURCC ('S', '4', '2', '0'),
++ DPTH888, PSTR111, PLANE012, OFFS0, SUB420, PACK_420),
+ MAKE_YUV_FORMAT (YV12, "raw video", GST_MAKE_FOURCC ('Y', 'V', '1', '2'),
+ DPTH888, PSTR111, PLANE021, OFFS0, SUB420, PACK_420),
+ MAKE_YUV_FORMAT (YUY2, "raw video", GST_MAKE_FOURCC ('Y', 'U', 'Y', '2'),
+ DPTH888, PSTR244, PLANE0, OFFS013, SUB422, PACK_YUY2),
+ MAKE_YUV_FORMAT (UYVY, "raw video", GST_MAKE_FOURCC ('U', 'Y', 'V', 'Y'),
+ DPTH888, PSTR244, PLANE0, OFFS102, SUB422, PACK_UYVY),
++ MAKE_YUV_FORMAT (ITLV, "raw video", GST_MAKE_FOURCC ('I', 'T', 'L', 'V'),
++ DPTH888, PSTR244, PLANE0, OFFS012, SUB422, PACK_UYVY),
+ MAKE_YUVA_PACK_FORMAT (AYUV, "raw video", GST_MAKE_FOURCC ('A', 'Y', 'U',
+ 'V'), DPTH8888, PSTR4444, PLANE0, OFFS1230, SUB4444, PACK_AYUV),
+ MAKE_RGB_FORMAT (RGBx, "raw video", DPTH888, PSTR444, PLANE0, OFFS012,
+ SUB444, PACK_RGBA),
+ MAKE_RGB_FORMAT (BGRx, "raw video", DPTH888, PSTR444, PLANE0, OFFS210,
+ SUB444, PACK_BGRA),
+ MAKE_RGB_FORMAT (xRGB, "raw video", DPTH888, PSTR444, PLANE0, OFFS123,
+ SUB444, PACK_ARGB),
+ MAKE_RGB_FORMAT (xBGR, "raw video", DPTH888, PSTR444, PLANE0, OFFS321,
+ SUB444, PACK_ABGR),
+ MAKE_RGBA_FORMAT (RGBA, "raw video", DPTH8888, PSTR4444, PLANE0, OFFS0123,
+ SUB4444, PACK_RGBA),
+ MAKE_RGBA_FORMAT (BGRA, "raw video", DPTH8888, PSTR4444, PLANE0, OFFS2103,
+ SUB4444, PACK_BGRA),
++ MAKE_RGBA_FORMAT (SR32, "raw video", DPTH8888, PSTR4444, PLANE0, OFFS2103,
++ SUB4444, PACK_BGRA),
+ MAKE_RGBA_PACK_FORMAT (ARGB, "raw video", DPTH8888, PSTR4444, PLANE0,
+ OFFS1230, SUB4444, PACK_ARGB),
+ MAKE_RGBA_FORMAT (ABGR, "raw video", DPTH8888, PSTR4444, PLANE0, OFFS3210,
+ SUB4444, PACK_ABGR),
+ MAKE_RGB_FORMAT (RGB, "raw video", DPTH888, PSTR333, PLANE0, OFFS012, SUB444,
+ PACK_RGB),
+ MAKE_RGB_FORMAT (BGR, "raw video", DPTH888, PSTR333, PLANE0, OFFS210, SUB444,
+ PACK_BGR),
+
+ MAKE_YUV_FORMAT (Y41B, "raw video", GST_MAKE_FOURCC ('Y', '4', '1', 'B'),
+ DPTH888, PSTR111, PLANE012, OFFS0, SUB411, PACK_Y41B),
+ MAKE_YUV_FORMAT (Y42B, "raw video", GST_MAKE_FOURCC ('Y', '4', '2', 'B'),
+ DPTH888, PSTR111, PLANE012, OFFS0, SUB422, PACK_Y42B),
+ MAKE_YUV_FORMAT (YVYU, "raw video", GST_MAKE_FOURCC ('Y', 'V', 'Y', 'U'),
+ DPTH888, PSTR244, PLANE0, OFFS031, SUB422, PACK_YVYU),
+ MAKE_YUV_FORMAT (Y444, "raw video", GST_MAKE_FOURCC ('Y', '4', '4', '4'),
+ DPTH888, PSTR111, PLANE012, OFFS0, SUB444, PACK_Y444),
+ MAKE_YUV_C_FORMAT (v210, "raw video", GST_MAKE_FOURCC ('v', '2', '1', '0'),
+ DPTH10_10_10, PSTR0, PLANE0, OFFS0, SUB422, PACK_v210),
+ MAKE_YUV_FORMAT (v216, "raw video", GST_MAKE_FOURCC ('v', '2', '1', '6'),
+ DPTH16_16_16, PSTR488, PLANE0, OFFS204, SUB422, PACK_v216),
+ MAKE_YUV_FORMAT (NV12, "raw video", GST_MAKE_FOURCC ('N', 'V', '1', '2'),
+ DPTH888, PSTR122, PLANE011, OFFS001, SUB420, PACK_NV12),
++ MAKE_YUV_FORMAT (SN12, "raw video", GST_MAKE_FOURCC ('S', 'N', '1', '2'),
++ DPTH888, PSTR122, PLANE011, OFFS001, SUB420, PACK_NV12),
++ MAKE_YUV_FORMAT (ST12, "raw video", GST_MAKE_FOURCC ('S', 'T', '1', '2'),
++ DPTH888, PSTR122, PLANE011, OFFS001, SUB420, PACK_NV12),
+ MAKE_YUV_FORMAT (NV21, "raw video", GST_MAKE_FOURCC ('N', 'V', '2', '1'),
+ DPTH888, PSTR122, PLANE011, OFFS010, SUB420, PACK_NV21),
++ MAKE_YUV_FORMAT (SN21, "raw video", GST_MAKE_FOURCC ('S', 'N', '2', '1'),
++ DPTH888, PSTR122, PLANE011, OFFS010, SUB420, PACK_NV21),
+
+ MAKE_GRAY_FORMAT (GRAY8, "raw video", DPTH8, PSTR1, PLANE0, OFFS0, SUB4,
+ PACK_GRAY8),
+ MAKE_GRAY_FORMAT (GRAY16_BE, "raw video", DPTH16, PSTR2, PLANE0, OFFS0, SUB4,
+ PACK_GRAY16_BE),
+ MAKE_GRAY_LE_FORMAT (GRAY16_LE, "raw video", DPTH16, PSTR2, PLANE0, OFFS0,
+ SUB4, PACK_GRAY16_LE),
+
+ MAKE_YUV_FORMAT (v308, "raw video", GST_MAKE_FOURCC ('v', '3', '0', '8'),
+ DPTH888, PSTR333, PLANE0, OFFS012, SUB444, PACK_v308),
+
+ #if G_BYTE_ORDER == G_LITTLE_ENDIAN
+ MAKE_RGB_LE_FORMAT (RGB16, "raw video", DPTH565, PSTR222, PLANE0, OFFS0,
+ SUB444, PACK_RGB16),
+ MAKE_RGB_LE_FORMAT (BGR16, "raw video", DPTH565, PSTR222, PLANE0, OFFS0,
+ SUB444, PACK_BGR16),
+ MAKE_RGB_LE_FORMAT (RGB15, "raw video", DPTH555, PSTR222, PLANE0, OFFS0,
+ SUB444, PACK_RGB15),
+ MAKE_RGB_LE_FORMAT (BGR15, "raw video", DPTH555, PSTR222, PLANE0, OFFS0,
+ SUB444, PACK_BGR15),
+ #else
+ MAKE_RGB_FORMAT (RGB16, "raw video", DPTH565, PSTR222, PLANE0, OFFS0, SUB444,
+ PACK_RGB16),
+ MAKE_RGB_FORMAT (BGR16, "raw video", DPTH565, PSTR222, PLANE0, OFFS0, SUB444,
+ PACK_BGR16),
+ MAKE_RGB_FORMAT (RGB15, "raw video", DPTH555, PSTR222, PLANE0, OFFS0, SUB444,
+ PACK_RGB15),
+ MAKE_RGB_FORMAT (BGR15, "raw video", DPTH555, PSTR222, PLANE0, OFFS0, SUB444,
+ PACK_BGR15),
+ #endif
+
+ MAKE_YUV_C_FORMAT (UYVP, "raw video", GST_MAKE_FOURCC ('U', 'Y', 'V', 'P'),
+ DPTH10_10_10, PSTR0, PLANE0, OFFS0, SUB422, PACK_UYVP),
+ MAKE_YUVA_FORMAT (A420, "raw video", GST_MAKE_FOURCC ('A', '4', '2', '0'),
+ DPTH8888, PSTR1111, PLANE0123, OFFS0, SUB4204, PACK_A420),
+ MAKE_RGBAP_FORMAT (RGB8P, "raw video", DPTH8_32, PSTR14, PLANE01,
+ OFFS0, SUB44, PACK_RGB8P),
+ MAKE_YUV_FORMAT (YUV9, "raw video", GST_MAKE_FOURCC ('Y', 'U', 'V', '9'),
+ DPTH888, PSTR111, PLANE012, OFFS0, SUB410, PACK_410),
+ MAKE_YUV_FORMAT (YVU9, "raw video", GST_MAKE_FOURCC ('Y', 'V', 'U', '9'),
+ DPTH888, PSTR111, PLANE021, OFFS0, SUB410, PACK_410),
+ MAKE_YUV_FORMAT (IYU1, "raw video", GST_MAKE_FOURCC ('I', 'Y', 'U', '1'),
+ DPTH888, PSTR0, PLANE0, OFFS104, SUB411, PACK_IYU1),
+ #if G_BYTE_ORDER == G_LITTLE_ENDIAN
+ MAKE_RGBA_LE_PACK_FORMAT (ARGB64, "raw video", DPTH16_16_16_16, PSTR8888,
+ PLANE0,
+ OFFS2460, SUB444, PACK_ARGB64),
+ MAKE_YUVA_LE_PACK_FORMAT (AYUV64, "raw video", 0x00000000, DPTH16_16_16_16,
+ PSTR8888, PLANE0, OFFS2460, SUB444, PACK_AYUV64),
+ #else
+ MAKE_RGBA_PACK_FORMAT (ARGB64, "raw video", DPTH16_16_16_16, PSTR8888, PLANE0,
+ OFFS2460, SUB444, PACK_ARGB64),
+ MAKE_YUVA_PACK_FORMAT (AYUV64, "raw video", 0x00000000, DPTH16_16_16_16,
+ PSTR8888, PLANE0, OFFS2460, SUB444, PACK_AYUV64),
+ #endif
+ MAKE_RGB_FORMAT (r210, "raw video", DPTH10_10_10, PSTR444, PLANE0, OFFS0,
+ SUB444, PACK_r210),
+ MAKE_YUV_FORMAT (I420_10BE, "raw video", 0x00000000, DPTH10_10_10,
+ PSTR222, PLANE012, OFFS0, SUB420, PACK_I420_10BE),
+ MAKE_YUV_LE_FORMAT (I420_10LE, "raw video", 0x00000000, DPTH10_10_10,
+ PSTR222, PLANE012, OFFS0, SUB420, PACK_I420_10LE),
+ MAKE_YUV_FORMAT (I422_10BE, "raw video", 0x00000000, DPTH10_10_10,
+ PSTR222, PLANE012, OFFS0, SUB422, PACK_I422_10BE),
+ MAKE_YUV_LE_FORMAT (I422_10LE, "raw video", 0x00000000, DPTH10_10_10,
+ PSTR222, PLANE012, OFFS0, SUB422, PACK_I422_10LE),
+ MAKE_YUV_FORMAT (Y444_10BE, "raw video", 0x00000000, DPTH10_10_10,
+ PSTR222, PLANE012, OFFS0, SUB444, PACK_Y444_10BE),
+ MAKE_YUV_LE_FORMAT (Y444_10LE, "raw video", 0x00000000, DPTH10_10_10,
+ PSTR222, PLANE012, OFFS0, SUB444, PACK_Y444_10LE),
+ MAKE_RGB_FORMAT (GBR, "raw video", DPTH888, PSTR111, PLANE201, OFFS0, SUB444,
+ PACK_GBR),
+ MAKE_RGB_FORMAT (GBR_10BE, "raw video", DPTH10_10_10, PSTR222, PLANE201,
+ OFFS0, SUB444, PACK_GBR_10BE),
+ MAKE_RGB_LE_FORMAT (GBR_10LE, "raw video", DPTH10_10_10, PSTR222, PLANE201,
+ OFFS0, SUB444, PACK_GBR_10LE),
+ MAKE_YUV_FORMAT (NV16, "raw video", GST_MAKE_FOURCC ('N', 'V', '1', '6'),
+ DPTH888, PSTR122, PLANE011, OFFS001, SUB422, PACK_NV16),
+ MAKE_YUV_FORMAT (NV24, "raw video", GST_MAKE_FOURCC ('N', 'V', '2', '4'),
+ DPTH888, PSTR122, PLANE011, OFFS001, SUB444, PACK_NV24),
+ MAKE_YUV_T_FORMAT (NV12_64Z32, "raw video",
+ GST_MAKE_FOURCC ('T', 'M', '1', '2'), DPTH888, PSTR122, PLANE011,
+ OFFS001, SUB420, PACK_NV12_TILED, TILE_64x32 (ZFLIPZ_2X2)),
+ MAKE_YUVA_FORMAT (A420_10BE, "raw video", 0x00000000, DPTH10_10_10_10,
+ PSTR2222, PLANE0123, OFFS0, SUB4204, PACK_A420_10BE),
+ MAKE_YUVA_LE_FORMAT (A420_10LE, "raw video", 0x00000000, DPTH10_10_10_10,
+ PSTR2222, PLANE0123, OFFS0, SUB4204, PACK_A420_10LE),
+ MAKE_YUVA_FORMAT (A422_10BE, "raw video", 0x00000000, DPTH10_10_10_10,
+ PSTR2222, PLANE0123, OFFS0, SUB4224, PACK_A422_10BE),
+ MAKE_YUVA_LE_FORMAT (A422_10LE, "raw video", 0x00000000, DPTH10_10_10_10,
+ PSTR2222, PLANE0123, OFFS0, SUB4224, PACK_A422_10LE),
+ MAKE_YUVA_FORMAT (A444_10BE, "raw video", 0x00000000, DPTH10_10_10_10,
+ PSTR2222, PLANE0123, OFFS0, SUB4444, PACK_A444_10BE),
+ MAKE_YUVA_LE_FORMAT (A444_10LE, "raw video", 0x00000000, DPTH10_10_10_10,
+ PSTR2222, PLANE0123, OFFS0, SUB4444, PACK_A444_10LE),
+ MAKE_YUV_FORMAT (NV61, "raw video", GST_MAKE_FOURCC ('N', 'V', '6', '1'),
+ DPTH888, PSTR122, PLANE011, OFFS010, SUB422, PACK_NV61),
+ MAKE_YUV_FORMAT (P010_10BE, "raw video", 0x00000000, DPTH10_10_10_HI,
+ PSTR244, PLANE011, OFFS001, SUB420, PACK_P010_10BE),
+ MAKE_YUV_LE_FORMAT (P010_10LE, "raw video", 0x00000000, DPTH10_10_10_HI,
+ PSTR244, PLANE011, OFFS001, SUB420, PACK_P010_10LE),
+ MAKE_YUV_FORMAT (IYU2, "raw video", GST_MAKE_FOURCC ('I', 'Y', 'U', '2'),
+ DPTH888, PSTR333, PLANE0, OFFS102, SUB444, PACK_IYU2),
+ MAKE_YUV_FORMAT (VYUY, "raw video", GST_MAKE_FOURCC ('V', 'Y', 'U', 'Y'),
+ DPTH888, PSTR244, PLANE0, OFFS102, SUB422, PACK_VYUY),
+ MAKE_RGBA_FORMAT (GBRA, "raw video", DPTH8888, PSTR1111, PLANE2013,
+ OFFS0, SUB4444, PACK_GBRA),
+ MAKE_RGBA_FORMAT (GBRA_10BE, "raw video", DPTH10_10_10_10, PSTR2222,
+ PLANE2013, OFFS0, SUB4444, PACK_GBRA_10BE),
+ MAKE_RGBA_LE_FORMAT (GBRA_10LE, "raw video", DPTH10_10_10_10, PSTR2222,
+ PLANE2013, OFFS0, SUB4444, PACK_GBRA_10LE),
+ MAKE_RGB_FORMAT (GBR_12BE, "raw video", DPTH12_12_12, PSTR222, PLANE201,
+ OFFS0, SUB444, PACK_GBR_12BE),
+ MAKE_RGB_LE_FORMAT (GBR_12LE, "raw video", DPTH12_12_12, PSTR222, PLANE201,
+ OFFS0, SUB444, PACK_GBR_12LE),
+ MAKE_RGBA_FORMAT (GBRA_12BE, "raw video", DPTH12_12_12_12, PSTR2222,
+ PLANE2013, OFFS0, SUB4444, PACK_GBRA_12BE),
+ MAKE_RGBA_LE_PACK_FORMAT (GBRA_12LE, "raw video", DPTH12_12_12_12, PSTR2222,
+ PLANE2013, OFFS0, SUB4444, PACK_GBRA_12LE),
+ MAKE_YUV_FORMAT (I420_12BE, "raw video", 0x00000000, DPTH12_12_12,
+ PSTR222, PLANE012, OFFS0, SUB420, PACK_I420_12BE),
+ MAKE_YUV_LE_FORMAT (I420_12LE, "raw video", 0x00000000, DPTH12_12_12,
+ PSTR222, PLANE012, OFFS0, SUB420, PACK_I420_12LE),
+ MAKE_YUV_FORMAT (I422_12BE, "raw video", 0x00000000, DPTH12_12_12,
+ PSTR222, PLANE012, OFFS0, SUB422, PACK_I422_12BE),
+ MAKE_YUV_LE_FORMAT (I422_12LE, "raw video", 0x00000000, DPTH12_12_12,
+ PSTR222, PLANE012, OFFS0, SUB422, PACK_I422_12LE),
+ MAKE_YUV_FORMAT (Y444_12BE, "raw video", 0x00000000, DPTH12_12_12,
+ PSTR222, PLANE012, OFFS0, SUB444, PACK_Y444_12BE),
+ MAKE_YUV_LE_FORMAT (Y444_12LE, "raw video", 0x00000000, DPTH12_12_12,
+ PSTR222, PLANE012, OFFS0, SUB444, PACK_Y444_12LE),
+ MAKE_GRAY_C_LE_FORMAT (GRAY10_LE32, "raw video", DPTH10, PSTR0, PLANE0, OFFS0,
+ SUB4, PACK_GRAY10_LE32),
+ MAKE_YUV_C_LE_FORMAT (NV12_10LE32, "raw video",
+ GST_MAKE_FOURCC ('X', 'V', '1', '5'), DPTH10_10_10, PSTR0, PLANE011,
+ OFFS001, SUB420, PACK_NV12_10LE32),
+ MAKE_YUV_C_LE_FORMAT (NV16_10LE32, "raw video",
+ GST_MAKE_FOURCC ('X', 'V', '2', '0'), DPTH10_10_10, PSTR0, PLANE011,
+ OFFS001, SUB422, PACK_NV16_10LE32),
+ MAKE_YUV_C_LE_FORMAT (NV12_10LE40, "raw video",
+ GST_MAKE_FOURCC ('R', 'K', '2', '0'), DPTH10_10_10, PSTR0, PLANE011,
+ OFFS0, SUB420, PACK_NV12_10LE40),
+ MAKE_YUV_FORMAT (Y210, "raw video", GST_MAKE_FOURCC ('Y', '2', '1', '0'),
+ DPTH10_10_10, PSTR488, PLANE0, OFFS0, SUB422, PACK_Y210),
+ MAKE_YUV_FORMAT (Y410, "raw video", GST_MAKE_FOURCC ('Y', '4', '1', '0'),
+ DPTH10_10_10_2, PSTR4444, PLANE0, OFFS0, SUB4444, PACK_Y410),
+ MAKE_YUVA_PACK_FORMAT (VUYA, "raw video", GST_MAKE_FOURCC ('V', 'U', 'Y',
+ 'A'), DPTH8888, PSTR4444, PLANE0, OFFS2103, SUB4444, PACK_VUYA),
+ MAKE_RGBA_LE_PACK_FORMAT (BGR10A2_LE, "raw video", DPTH10_10_10_2, PSTR4444,
+ PLANE0,
+ OFFS0, SUB4444, PACK_BGR10A2_LE),
++ MAKE_DEPTH_FORMAT (INVZ, "raw video", GST_MAKE_FOURCC ('I', 'N', 'V', 'Z'),
++ DPTH16, PSTR2, PLANE0, OFFS0, SUB4, PACK_INVZ16_LE),
++#ifdef TIZEN_PROFILE_TV
++ MAKE_YUV_FORMAT (STV0, "raw video", GST_MAKE_FOURCC ('S', 'T', 'V', '0'),
++ DPTH888, PSTR111, PLANE012, OFFS0, SUB420, PACK_420),
++ MAKE_YUV_FORMAT (STV1, "raw video", GST_MAKE_FOURCC ('S', 'T', 'V', '1'),
++ DPTH888, PSTR111, PLANE012, OFFS0, SUB420, PACK_420),
++#endif
+ MAKE_RGBA_LE_PACK_FORMAT (RGB10A2_LE, "raw video", DPTH10_10_10_2, PSTR4444,
+ PLANE0, OFFS0, SUB4444, PACK_RGB10A2_LE),
+ MAKE_YUV_FORMAT (Y444_16BE, "raw video", 0x00000000, DPTH16_16_16,
+ PSTR222, PLANE012, OFFS0, SUB444, PACK_Y444_16BE),
+ MAKE_YUV_LE_FORMAT (Y444_16LE, "raw video", 0x00000000, DPTH16_16_16,
+ PSTR222, PLANE012, OFFS0, SUB444, PACK_Y444_16LE),
+ MAKE_YUV_FORMAT (P016_BE, "raw video", 0x00000000, DPTH16_16_16,
+ PSTR244, PLANE011, OFFS001, SUB420, PACK_P016_BE),
+ MAKE_YUV_LE_FORMAT (P016_LE, "raw video", 0x00000000, DPTH16_16_16,
+ PSTR244, PLANE011, OFFS001, SUB420, PACK_P016_LE),
+ MAKE_YUV_FORMAT (P012_BE, "raw video", 0x00000000, DPTH12_12_12_HI,
+ PSTR244, PLANE011, OFFS001, SUB420, PACK_P012_BE),
+ MAKE_YUV_LE_FORMAT (P012_LE, "raw video", 0x00000000, DPTH12_12_12_HI,
+ PSTR244, PLANE011, OFFS001, SUB420, PACK_P012_LE),
+ MAKE_YUV_FORMAT (Y212_BE, "raw video", 0x00000000, DPTH12_12_12_HI,
+ PSTR488, PLANE0, OFFS0, SUB422, PACK_Y212_BE),
+ MAKE_YUV_LE_FORMAT (Y212_LE, "raw video", 0x00000000, DPTH12_12_12_HI,
+ PSTR488, PLANE0, OFFS0, SUB422, PACK_Y212_LE),
+ MAKE_YUV_FORMAT (Y412_BE, "raw video", 0x00000000, DPTH12_12_12_12_HI,
+ PSTR8888, PLANE0, OFFS0, SUB4444, PACK_Y412_BE),
+ MAKE_YUV_LE_FORMAT (Y412_LE, "raw video", 0x00000000, DPTH12_12_12_12_HI,
+ PSTR8888, PLANE0, OFFS0, SUB4444, PACK_Y412_LE),
+ MAKE_YUV_T_FORMAT (NV12_4L4, "raw video",
+ GST_MAKE_FOURCC ('V', 'T', '1', '2'), DPTH888, PSTR122, PLANE011,
+ OFFS001, SUB420, PACK_NV12_TILED, TILE_4x4 (LINEAR)),
+ MAKE_YUV_T_FORMAT (NV12_32L32, "raw video",
+ GST_MAKE_FOURCC ('S', 'T', '1', '2'), DPTH888, PSTR122, PLANE011,
+ OFFS001, SUB420, PACK_NV12_TILED, TILE_32x32 (LINEAR)),
+ MAKE_RGB_FORMAT (RGBP, "raw video", DPTH888, PSTR111, PLANE012, OFFS0, SUB444,
+ PACK_RGBP),
+ MAKE_RGB_FORMAT (BGRP, "raw video", DPTH888, PSTR111, PLANE210, OFFS0, SUB444,
+ PACK_BGRP),
+ MAKE_YUV_FORMAT (AV12, "raw video", GST_MAKE_FOURCC ('A', 'V', '1', '2'),
+ DPTH8888, PSTR1221, PLANE0112, OFFS001, SUB4204, PACK_AV12),
+ };
+
+ static GstVideoFormat
+ gst_video_format_from_rgb32_masks (int red_mask, int green_mask, int blue_mask)
+ {
+ if (red_mask == 0xff000000 && green_mask == 0x00ff0000 &&
+ blue_mask == 0x0000ff00) {
+ return GST_VIDEO_FORMAT_RGBx;
+ }
+ if (red_mask == 0x0000ff00 && green_mask == 0x00ff0000 &&
+ blue_mask == 0xff000000) {
+ return GST_VIDEO_FORMAT_BGRx;
+ }
+ if (red_mask == 0x00ff0000 && green_mask == 0x0000ff00 &&
+ blue_mask == 0x000000ff) {
+ return GST_VIDEO_FORMAT_xRGB;
+ }
+ if (red_mask == 0x000000ff && green_mask == 0x0000ff00 &&
+ blue_mask == 0x00ff0000) {
+ return GST_VIDEO_FORMAT_xBGR;
+ }
+
+ return GST_VIDEO_FORMAT_UNKNOWN;
+ }
+
+ static GstVideoFormat
+ gst_video_format_from_rgba32_masks (int red_mask, int green_mask,
+ int blue_mask, int alpha_mask)
+ {
+ if (red_mask == 0xff000000 && green_mask == 0x00ff0000 &&
+ blue_mask == 0x0000ff00 && alpha_mask == 0x000000ff) {
+ return GST_VIDEO_FORMAT_RGBA;
+ }
+ if (red_mask == 0x0000ff00 && green_mask == 0x00ff0000 &&
+ blue_mask == 0xff000000 && alpha_mask == 0x000000ff) {
+ return GST_VIDEO_FORMAT_BGRA;
+ }
+ if (red_mask == 0x00ff0000 && green_mask == 0x0000ff00 &&
+ blue_mask == 0x000000ff && alpha_mask == 0xff000000) {
+ return GST_VIDEO_FORMAT_ARGB;
+ }
+ if (red_mask == 0x000000ff && green_mask == 0x0000ff00 &&
+ blue_mask == 0x00ff0000 && alpha_mask == 0xff000000) {
+ return GST_VIDEO_FORMAT_ABGR;
+ }
+ return GST_VIDEO_FORMAT_UNKNOWN;
+ }
+
+ static GstVideoFormat
+ gst_video_format_from_rgb24_masks (int red_mask, int green_mask, int blue_mask)
+ {
+ if (red_mask == 0xff0000 && green_mask == 0x00ff00 && blue_mask == 0x0000ff) {
+ return GST_VIDEO_FORMAT_RGB;
+ }
+ if (red_mask == 0x0000ff && green_mask == 0x00ff00 && blue_mask == 0xff0000) {
+ return GST_VIDEO_FORMAT_BGR;
+ }
+
+ return GST_VIDEO_FORMAT_UNKNOWN;
+ }
+
+ #define GST_VIDEO_COMP1_MASK_16_INT 0xf800
+ #define GST_VIDEO_COMP2_MASK_16_INT 0x07e0
+ #define GST_VIDEO_COMP3_MASK_16_INT 0x001f
+
+ #define GST_VIDEO_COMP1_MASK_15_INT 0x7c00
+ #define GST_VIDEO_COMP2_MASK_15_INT 0x03e0
+ #define GST_VIDEO_COMP3_MASK_15_INT 0x001f
+
+ static GstVideoFormat
+ gst_video_format_from_rgb16_masks (int red_mask, int green_mask, int blue_mask)
+ {
+ if (red_mask == GST_VIDEO_COMP1_MASK_16_INT
+ && green_mask == GST_VIDEO_COMP2_MASK_16_INT
+ && blue_mask == GST_VIDEO_COMP3_MASK_16_INT) {
+ return GST_VIDEO_FORMAT_RGB16;
+ }
+ if (red_mask == GST_VIDEO_COMP3_MASK_16_INT
+ && green_mask == GST_VIDEO_COMP2_MASK_16_INT
+ && blue_mask == GST_VIDEO_COMP1_MASK_16_INT) {
+ return GST_VIDEO_FORMAT_BGR16;
+ }
+ if (red_mask == GST_VIDEO_COMP1_MASK_15_INT
+ && green_mask == GST_VIDEO_COMP2_MASK_15_INT
+ && blue_mask == GST_VIDEO_COMP3_MASK_15_INT) {
+ return GST_VIDEO_FORMAT_RGB15;
+ }
+ if (red_mask == GST_VIDEO_COMP3_MASK_15_INT
+ && green_mask == GST_VIDEO_COMP2_MASK_15_INT
+ && blue_mask == GST_VIDEO_COMP1_MASK_15_INT) {
+ return GST_VIDEO_FORMAT_BGR15;
+ }
+ return GST_VIDEO_FORMAT_UNKNOWN;
+ }
+
+ /**
+ * gst_video_format_from_masks:
+ * @depth: the amount of bits used for a pixel
+ * @bpp: the amount of bits used to store a pixel. This value is bigger than
+ * @depth
+ * @endianness: the endianness of the masks, #G_LITTLE_ENDIAN or #G_BIG_ENDIAN
+ * @red_mask: the red mask
+ * @green_mask: the green mask
+ * @blue_mask: the blue mask
+ * @alpha_mask: the alpha mask, or 0 if no alpha mask
+ *
+ * Find the #GstVideoFormat for the given parameters.
+ *
+ * Returns: a #GstVideoFormat or GST_VIDEO_FORMAT_UNKNOWN when the parameters to
+ * not specify a known format.
+ */
+ GstVideoFormat
+ gst_video_format_from_masks (gint depth, gint bpp, gint endianness,
+ guint red_mask, guint green_mask, guint blue_mask, guint alpha_mask)
+ {
+ GstVideoFormat format;
+
+ /* our caps system handles 24/32bpp RGB as big-endian. */
+ if ((bpp == 24 || bpp == 32) && endianness == G_LITTLE_ENDIAN &&
+ alpha_mask != 0xc0000000) {
+ red_mask = GUINT32_TO_BE (red_mask);
+ green_mask = GUINT32_TO_BE (green_mask);
+ blue_mask = GUINT32_TO_BE (blue_mask);
+ alpha_mask = GUINT32_TO_BE (alpha_mask);
+ endianness = G_BIG_ENDIAN;
+ if (bpp == 24) {
+ red_mask >>= 8;
+ green_mask >>= 8;
+ blue_mask >>= 8;
+ }
+ }
+
+ if (depth == 32 && bpp == 32 && alpha_mask == 0xc0000000 &&
+ endianness == G_LITTLE_ENDIAN) {
+ if (red_mask == 0x3ff00000)
+ format = GST_VIDEO_FORMAT_RGB10A2_LE;
+ else
+ format = GST_VIDEO_FORMAT_BGR10A2_LE;
+ } else if (depth == 30 && bpp == 32) {
+ format = GST_VIDEO_FORMAT_r210;
+ } else if (depth == 24 && bpp == 32) {
+ format = gst_video_format_from_rgb32_masks (red_mask, green_mask,
+ blue_mask);
+ } else if (depth == 32 && bpp == 32 && alpha_mask) {
+ format = gst_video_format_from_rgba32_masks (red_mask, green_mask,
+ blue_mask, alpha_mask);
+ } else if (depth == 24 && bpp == 24) {
+ format = gst_video_format_from_rgb24_masks (red_mask, green_mask,
+ blue_mask);
+ } else if ((depth == 15 || depth == 16) && bpp == 16 &&
+ endianness == G_BYTE_ORDER) {
+ format = gst_video_format_from_rgb16_masks (red_mask, green_mask,
+ blue_mask);
+ } else if (depth == 8 && bpp == 8) {
+ format = GST_VIDEO_FORMAT_RGB8P;
+ } else if (depth == 64 && bpp == 64) {
+ format = gst_video_format_from_rgba32_masks (red_mask, green_mask,
+ blue_mask, alpha_mask);
+ if (format == GST_VIDEO_FORMAT_ARGB) {
+ format = GST_VIDEO_FORMAT_ARGB64;
+ } else {
+ format = GST_VIDEO_FORMAT_UNKNOWN;
+ }
+ } else {
+ format = GST_VIDEO_FORMAT_UNKNOWN;
+ }
+ return format;
+ }
+
+ /**
+ * gst_video_format_from_fourcc:
+ * @fourcc: a FOURCC value representing raw YUV video
+ *
+ * Converts a FOURCC value into the corresponding #GstVideoFormat.
+ * If the FOURCC cannot be represented by #GstVideoFormat,
+ * #GST_VIDEO_FORMAT_UNKNOWN is returned.
+ *
+ * Returns: the #GstVideoFormat describing the FOURCC value
+ */
+ GstVideoFormat
+ gst_video_format_from_fourcc (guint32 fourcc)
+ {
+ switch (fourcc) {
+ case GST_MAKE_FOURCC ('I', '4', '2', '0'):
+ return GST_VIDEO_FORMAT_I420;
++ case GST_MAKE_FOURCC ('S', '4', '2', '0'):
++ return GST_VIDEO_FORMAT_S420;
++#ifdef TIZEN_PROFILE_TV
++ case GST_MAKE_FOURCC ('S', 'T', 'V', '0'):
++ return GST_VIDEO_FORMAT_STV0;
++ case GST_MAKE_FOURCC ('S', 'T', 'V', '1'):
++ return GST_VIDEO_FORMAT_STV1;
++#endif
+ case GST_MAKE_FOURCC ('Y', 'V', '1', '2'):
+ return GST_VIDEO_FORMAT_YV12;
+ case GST_MAKE_FOURCC ('Y', 'U', 'Y', '2'):
+ return GST_VIDEO_FORMAT_YUY2;
+ case GST_MAKE_FOURCC ('Y', 'V', 'Y', 'U'):
+ return GST_VIDEO_FORMAT_YVYU;
+ case GST_MAKE_FOURCC ('U', 'Y', 'V', 'Y'):
+ return GST_VIDEO_FORMAT_UYVY;
++ case GST_MAKE_FOURCC ('I', 'T', 'L', 'V'):
++ return GST_VIDEO_FORMAT_ITLV;
+ case GST_MAKE_FOURCC ('V', 'Y', 'U', 'Y'):
+ return GST_VIDEO_FORMAT_VYUY;
+ case GST_MAKE_FOURCC ('A', 'Y', 'U', 'V'):
+ return GST_VIDEO_FORMAT_AYUV;
+ case GST_MAKE_FOURCC ('Y', '4', '1', 'B'):
+ return GST_VIDEO_FORMAT_Y41B;
+ case GST_MAKE_FOURCC ('Y', '4', '2', 'B'):
+ return GST_VIDEO_FORMAT_Y42B;
+ case GST_MAKE_FOURCC ('Y', '4', '4', '4'):
+ return GST_VIDEO_FORMAT_Y444;
+ case GST_MAKE_FOURCC ('v', '2', '1', '0'):
+ return GST_VIDEO_FORMAT_v210;
+ case GST_MAKE_FOURCC ('v', '2', '1', '6'):
+ return GST_VIDEO_FORMAT_v216;
+ case GST_MAKE_FOURCC ('Y', '2', '1', '0'):
+ return GST_VIDEO_FORMAT_Y210;
+ case GST_MAKE_FOURCC ('N', 'V', '1', '2'):
+ return GST_VIDEO_FORMAT_NV12;
++ case GST_MAKE_FOURCC ('S', 'N', '1', '2'):
++ return GST_VIDEO_FORMAT_SN12;
++ case GST_MAKE_FOURCC ('S', 'T', '1', '2'):
++ return GST_VIDEO_FORMAT_ST12;
+ case GST_MAKE_FOURCC ('N', 'V', '2', '1'):
+ return GST_VIDEO_FORMAT_NV21;
++ case GST_MAKE_FOURCC ('S', 'N', '2', '1'):
++ return GST_VIDEO_FORMAT_SN21;
+ case GST_MAKE_FOURCC ('N', 'V', '1', '6'):
+ return GST_VIDEO_FORMAT_NV16;
+ case GST_MAKE_FOURCC ('N', 'V', '6', '1'):
+ return GST_VIDEO_FORMAT_NV61;
+ case GST_MAKE_FOURCC ('N', 'V', '2', '4'):
+ return GST_VIDEO_FORMAT_NV24;
+ case GST_MAKE_FOURCC ('v', '3', '0', '8'):
+ return GST_VIDEO_FORMAT_v308;
+ case GST_MAKE_FOURCC ('I', 'Y', 'U', '2'):
+ return GST_VIDEO_FORMAT_IYU2;
+ case GST_MAKE_FOURCC ('Y', '8', '0', '0'):
+ case GST_MAKE_FOURCC ('Y', '8', ' ', ' '):
+ case GST_MAKE_FOURCC ('G', 'R', 'E', 'Y'):
+ return GST_VIDEO_FORMAT_GRAY8;
+ case GST_MAKE_FOURCC ('Y', '1', '6', ' '):
+ return GST_VIDEO_FORMAT_GRAY16_LE;
+ case GST_MAKE_FOURCC ('U', 'Y', 'V', 'P'):
+ return GST_VIDEO_FORMAT_UYVP;
+ case GST_MAKE_FOURCC ('A', '4', '2', '0'):
+ return GST_VIDEO_FORMAT_A420;
+ case GST_MAKE_FOURCC ('Y', 'U', 'V', '9'):
+ return GST_VIDEO_FORMAT_YUV9;
+ case GST_MAKE_FOURCC ('Y', 'V', 'U', '9'):
+ return GST_VIDEO_FORMAT_YVU9;
+ case GST_MAKE_FOURCC ('I', 'Y', 'U', '1'):
+ return GST_VIDEO_FORMAT_IYU1;
+ case GST_MAKE_FOURCC ('A', 'Y', '6', '4'):
+ return GST_VIDEO_FORMAT_AYUV64;
+ case GST_MAKE_FOURCC ('X', 'V', '1', '0'):
+ return GST_VIDEO_FORMAT_GRAY10_LE32;
+ case GST_MAKE_FOURCC ('X', 'V', '1', '5'):
+ return GST_VIDEO_FORMAT_NV12_10LE32;
+ case GST_MAKE_FOURCC ('X', 'V', '2', '0'):
+ return GST_VIDEO_FORMAT_NV16_10LE32;
+ case GST_MAKE_FOURCC ('R', 'K', '2', '0'):
+ return GST_VIDEO_FORMAT_NV12_10LE40;
+ case GST_MAKE_FOURCC ('Y', '4', '1', '0'):
+ return GST_VIDEO_FORMAT_Y410;
+ case GST_MAKE_FOURCC ('V', 'U', 'Y', 'A'):
+ return GST_VIDEO_FORMAT_VUYA;
+ case GST_MAKE_FOURCC ('A', 'R', '3', '0'):
+ return GST_VIDEO_FORMAT_BGR10A2_LE;
++ case GST_MAKE_FOURCC ('I', 'N', 'V', 'Z'):
++ return GST_VIDEO_FORMAT_INVZ;
+ default:
+ return GST_VIDEO_FORMAT_UNKNOWN;
+ }
+ }
+
+ /**
+ * gst_video_format_from_string:
+ * @format: a format string
+ *
+ * Convert the @format string to its #GstVideoFormat.
+ *
+ * Returns: the #GstVideoFormat for @format or GST_VIDEO_FORMAT_UNKNOWN when the
+ * string is not a known format.
+ */
+ GstVideoFormat
+ gst_video_format_from_string (const gchar * format)
+ {
+ guint i;
+
+ g_return_val_if_fail (format != NULL, GST_VIDEO_FORMAT_UNKNOWN);
+
+ for (i = 0; i < G_N_ELEMENTS (formats); i++) {
+ if (strcmp (GST_VIDEO_FORMAT_INFO_NAME (&formats[i].info), format) == 0)
+ return GST_VIDEO_FORMAT_INFO_FORMAT (&formats[i].info);
+ }
+ return GST_VIDEO_FORMAT_UNKNOWN;
+ }
+
+
+ /**
+ * gst_video_format_to_fourcc:
+ * @format: a #GstVideoFormat video format
+ *
+ * Converts a #GstVideoFormat value into the corresponding FOURCC. Only
+ * a few YUV formats have corresponding FOURCC values. If @format has
+ * no corresponding FOURCC value, 0 is returned.
+ *
+ * Returns: the FOURCC corresponding to @format
+ */
+ guint32
+ gst_video_format_to_fourcc (GstVideoFormat format)
+ {
+ g_return_val_if_fail (format != GST_VIDEO_FORMAT_UNKNOWN, 0);
+
+ if ((gint) format >= G_N_ELEMENTS (formats))
+ return 0;
+
+ return formats[format].fourcc;
+ }
+
+ /**
+ * gst_video_format_to_string:
+ * @format: a #GstVideoFormat video format
+ *
+ * Returns a string containing a descriptive name for
+ * the #GstVideoFormat if there is one, or NULL otherwise.
+ *
+ * Returns: the name corresponding to @format
+ */
+ const gchar *
+ gst_video_format_to_string (GstVideoFormat format)
+ {
+ g_return_val_if_fail (format != GST_VIDEO_FORMAT_UNKNOWN, NULL);
+
+ if ((gint) format >= G_N_ELEMENTS (formats))
+ return NULL;
+
+ return GST_VIDEO_FORMAT_INFO_NAME (&formats[format].info);
+ }
+
+ /**
+ * gst_video_format_get_info:
+ * @format: a #GstVideoFormat
+ *
+ * Get the #GstVideoFormatInfo for @format
+ *
+ * Returns: The #GstVideoFormatInfo for @format.
+ */
+ const GstVideoFormatInfo *
+ gst_video_format_get_info (GstVideoFormat format)
+ {
+ g_return_val_if_fail ((gint) format < G_N_ELEMENTS (formats), NULL);
+
+ return &formats[format].info;
+ }
+
+ /**
+ * gst_video_format_get_palette:
+ * @format: a #GstVideoFormat
+ * @size: (out): size of the palette in bytes
+ *
+ * Get the default palette of @format. This the palette used in the pack
+ * function for paletted formats.
+ *
+ * Returns: (transfer none): the default palette of @format or %NULL when
+ * @format does not have a palette.
+ *
+ * Since: 1.2
+ */
+ gconstpointer
+ gst_video_format_get_palette (GstVideoFormat format, gsize * size)
+ {
+ g_return_val_if_fail ((gint) format < G_N_ELEMENTS (formats), NULL);
+ g_return_val_if_fail (size != NULL, NULL);
+
+ switch (format) {
+ case GST_VIDEO_FORMAT_RGB8P:
+ *size = sizeof (std_palette_RGB8P);
+ return std_palette_RGB8P;
+ default:
+ return NULL;
+ }
+ }
+
+ /**
+ * gst_video_format_info_component:
+ * @info: #GstVideoFormatInfo
+ * @plane: a plane number
+ * @components: (out): array used to store component numbers
+ *
+ * Fill @components with the number of all the components packed in plane @p
+ * for the format @info. A value of -1 in @components indicates that no more
+ * components are packed in the plane.
+ *
+ * Since: 1.18
+ */
+ void
+ gst_video_format_info_component (const GstVideoFormatInfo * info, guint plane,
+ gint components[GST_VIDEO_MAX_COMPONENTS])
+ {
+ guint c, i = 0;
+
+ /* Reverse mapping of info->plane */
+ for (c = 0; c < GST_VIDEO_FORMAT_INFO_N_COMPONENTS (info); c++) {
+ if (GST_VIDEO_FORMAT_INFO_PLANE (info, c) == plane) {
+ components[i] = c;
+ i++;
+ }
+ }
+
+ for (c = i; c < GST_VIDEO_MAX_COMPONENTS; c++)
+ components[c] = -1;
+ }
+
+ struct RawVideoFormats
+ {
+ GstVideoFormat *formats;
+ guint n;
+ };
+
+ static gpointer
+ generate_raw_video_formats (gpointer data)
+ {
+ GValue list = G_VALUE_INIT;
+ struct RawVideoFormats *all = g_new (struct RawVideoFormats, 1);
+ gchar *tmp;
+ guint i;
+ gboolean res G_GNUC_UNUSED;
+
+ g_value_init (&list, GST_TYPE_LIST);
+ /* Workaround a bug in our parser that would lead to segfaults
+ * when deserializing container types using static strings,
+ * see https://gitlab.freedesktop.org/gstreamer/gstreamer/-/issues/446 */
+ tmp = g_strdup (GST_VIDEO_FORMATS_ALL);
+ res = gst_value_deserialize (&list, tmp);
+ g_assert (res);
+ g_free (tmp);
+
+ all->n = gst_value_list_get_size (&list);
+ all->formats = g_new (GstVideoFormat, all->n);
+
+ for (i = 0; i < all->n; i++) {
+ const GValue *v = gst_value_list_get_value (&list, i);
+
+ all->formats[i] = gst_video_format_from_string (g_value_get_string (v));
+ g_assert (all->formats[i] != GST_VIDEO_FORMAT_UNKNOWN
+ && all->formats[i] != GST_VIDEO_FORMAT_ENCODED);
+ }
+
+ g_value_unset (&list);
+
+ return all;
+ }
+
+ /**
+ * gst_video_formats_raw:
+ * @len: (out): the number of elements in the returned array
+ *
+ * Return all the raw video formats supported by GStreamer.
+ *
+ * Returns: (transfer none) (array length=len): an array of #GstVideoFormat
+ * Since: 1.18
+ */
+ const GstVideoFormat *
+ gst_video_formats_raw (guint * len)
+ {
+ static GOnce raw_video_formats_once = G_ONCE_INIT;
+ struct RawVideoFormats *all;
+
+ g_return_val_if_fail (len, NULL);
+
+ g_once (&raw_video_formats_once, generate_raw_video_formats, NULL);
+
+ all = raw_video_formats_once.retval;
+ *len = all->n;
+ return all->formats;
+ }
+
+ /**
+ * gst_video_make_raw_caps:
+ * @formats: (array length=len) (nullable): an array of raw #GstVideoFormat, or %NULL
+ * @len: the size of @formats
+ *
+ * Return a generic raw video caps for formats defined in @formats.
+ * If @formats is %NULL returns a caps for all the supported raw video formats,
+ * see gst_video_formats_raw().
+ *
+ * Returns: (transfer full): a video @GstCaps
+ * Since: 1.18
+ */
+ GstCaps *
+ gst_video_make_raw_caps (const GstVideoFormat formats[], guint len)
+ {
+ return gst_video_make_raw_caps_with_features (formats, len, NULL);
+ }
+
+ /**
+ * gst_video_make_raw_caps_with_features:
+ * @formats: (array length=len) (nullable): an array of raw #GstVideoFormat, or %NULL
+ * @len: the size of @formats
+ * @features: (transfer full) (allow-none): the #GstCapsFeatures to set on the caps
+ *
+ * Return a generic raw video caps for formats defined in @formats with features
+ * @features.
+ * If @formats is %NULL returns a caps for all the supported video formats,
+ * see gst_video_formats_raw().
+ *
+ * Returns: (transfer full): a video @GstCaps
+ * Since: 1.18
+ */
+ GstCaps *
+ gst_video_make_raw_caps_with_features (const GstVideoFormat formats[],
+ guint len, GstCapsFeatures * features)
+ {
+ GstStructure *s;
+ GValue format = G_VALUE_INIT;
+ GstCaps *caps;
+
+ g_return_val_if_fail ((formats && len > 0) || (!formats && len == 0), NULL);
+
+ if (!formats) {
+ formats = gst_video_formats_raw (&len);
+ }
+
+ if (len > 1) {
+ guint i;
+
+ g_value_init (&format, GST_TYPE_LIST);
+
+ for (i = 0; i < len; i++) {
+ GValue v = G_VALUE_INIT;
+
+ g_return_val_if_fail (formats[i] != GST_VIDEO_FORMAT_UNKNOWN
+ && formats[i] != GST_VIDEO_FORMAT_ENCODED, NULL);
+
+ g_value_init (&v, G_TYPE_STRING);
+ g_value_set_static_string (&v, gst_video_format_to_string (formats[i]));
+ gst_value_list_append_and_take_value (&format, &v);
+ }
+ } else {
+ g_value_init (&format, G_TYPE_STRING);
+
+ g_value_set_static_string (&format,
+ gst_video_format_to_string (formats[0]));
+ }
+
+ s = gst_structure_new ("video/x-raw",
+ "width", GST_TYPE_INT_RANGE, 1, G_MAXINT,
+ "height", GST_TYPE_INT_RANGE, 1, G_MAXINT,
+ "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, G_MAXINT, 1, NULL);
+
+ gst_structure_take_value (s, "format", &format);
+
+ caps = gst_caps_new_full (s, NULL);
+
+ if (features)
+ gst_caps_set_features (caps, 0, features);
+
+ return caps;
+ }
--- /dev/null
+ /* GStreamer
+ * Copyright (C) <2011> Wim Taymans <wim.taymans@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+ #ifndef __GST_VIDEO_FORMAT_H__
+ #define __GST_VIDEO_FORMAT_H__
+
+ #include <gst/gst.h>
+
+ G_BEGIN_DECLS
+
+ #include <gst/video/video-enumtypes.h>
+ #include <gst/video/video-tile.h>
+
+ /**
+ * GstVideoFormat:
+ * @GST_VIDEO_FORMAT_UNKNOWN: Unknown or unset video format id
+ * @GST_VIDEO_FORMAT_ENCODED: Encoded video format. Only ever use that in caps for
+ * special video formats in combination with non-system
+ * memory GstCapsFeatures where it does not make sense
+ * to specify a real video format.
+ * @GST_VIDEO_FORMAT_I420: planar 4:2:0 YUV
+ * @GST_VIDEO_FORMAT_YV12: planar 4:2:0 YVU (like I420 but UV planes swapped)
+ * @GST_VIDEO_FORMAT_YUY2: packed 4:2:2 YUV (Y0-U0-Y1-V0 Y2-U2-Y3-V2 Y4 ...)
+ * @GST_VIDEO_FORMAT_UYVY: packed 4:2:2 YUV (U0-Y0-V0-Y1 U2-Y2-V2-Y3 U4 ...)
+ * @GST_VIDEO_FORMAT_VYUY: packed 4:2:2 YUV (V0-Y0-U0-Y1 V2-Y2-U2-Y3 V4 ...)
+ * @GST_VIDEO_FORMAT_AYUV: packed 4:4:4 YUV with alpha channel (A0-Y0-U0-V0 ...)
+ * @GST_VIDEO_FORMAT_RGBx: sparse rgb packed into 32 bit, space last
+ * @GST_VIDEO_FORMAT_BGRx: sparse reverse rgb packed into 32 bit, space last
+ * @GST_VIDEO_FORMAT_xRGB: sparse rgb packed into 32 bit, space first
+ * @GST_VIDEO_FORMAT_xBGR: sparse reverse rgb packed into 32 bit, space first
+ * @GST_VIDEO_FORMAT_RGBA: rgb with alpha channel last
+ * @GST_VIDEO_FORMAT_BGRA: reverse rgb with alpha channel last
+ * @GST_VIDEO_FORMAT_ARGB: rgb with alpha channel first
+ * @GST_VIDEO_FORMAT_ABGR: reverse rgb with alpha channel first
+ * @GST_VIDEO_FORMAT_RGB: RGB packed into 24 bits without padding (`R-G-B-R-G-B`)
+ * @GST_VIDEO_FORMAT_BGR: reverse RGB packed into 24 bits without padding (`B-G-R-B-G-R`)
+ * @GST_VIDEO_FORMAT_Y41B: planar 4:1:1 YUV
+ * @GST_VIDEO_FORMAT_Y42B: planar 4:2:2 YUV
+ * @GST_VIDEO_FORMAT_YVYU: packed 4:2:2 YUV (Y0-V0-Y1-U0 Y2-V2-Y3-U2 Y4 ...)
+ * @GST_VIDEO_FORMAT_Y444: planar 4:4:4 YUV
+ * @GST_VIDEO_FORMAT_v210: packed 4:2:2 10-bit YUV, complex format
+ * @GST_VIDEO_FORMAT_v216: packed 4:2:2 16-bit YUV, Y0-U0-Y1-V1 order
+ * @GST_VIDEO_FORMAT_NV12: planar 4:2:0 YUV with interleaved UV plane
+ * @GST_VIDEO_FORMAT_NV21: planar 4:2:0 YUV with interleaved VU plane
+ * @GST_VIDEO_FORMAT_NV12_10LE32: 10-bit variant of @GST_VIDEO_FORMAT_NV12, packed into 32bit words (MSB 2 bits padding) (Since: 1.14)
+ * @GST_VIDEO_FORMAT_GRAY8: 8-bit grayscale
+ * @GST_VIDEO_FORMAT_GRAY10_LE32: 10-bit grayscale, packed into 32bit words (2 bits padding) (Since: 1.14)
+ * @GST_VIDEO_FORMAT_GRAY16_BE: 16-bit grayscale, most significant byte first
+ * @GST_VIDEO_FORMAT_GRAY16_LE: 16-bit grayscale, least significant byte first
+ * @GST_VIDEO_FORMAT_v308: packed 4:4:4 YUV (Y-U-V ...)
+ * @GST_VIDEO_FORMAT_IYU2: packed 4:4:4 YUV (U-Y-V ...) (Since: 1.10)
+ * @GST_VIDEO_FORMAT_RGB16: rgb 5-6-5 bits per component
+ * @GST_VIDEO_FORMAT_BGR16: reverse rgb 5-6-5 bits per component
+ * @GST_VIDEO_FORMAT_RGB15: rgb 5-5-5 bits per component
+ * @GST_VIDEO_FORMAT_BGR15: reverse rgb 5-5-5 bits per component
+ * @GST_VIDEO_FORMAT_UYVP: packed 10-bit 4:2:2 YUV (U0-Y0-V0-Y1 U2-Y2-V2-Y3 U4 ...)
+ * @GST_VIDEO_FORMAT_A420: planar 4:4:2:0 AYUV
+ * @GST_VIDEO_FORMAT_RGB8P: 8-bit paletted RGB
+ * @GST_VIDEO_FORMAT_YUV9: planar 4:1:0 YUV
+ * @GST_VIDEO_FORMAT_YVU9: planar 4:1:0 YUV (like YUV9 but UV planes swapped)
+ * @GST_VIDEO_FORMAT_IYU1: packed 4:1:1 YUV (Cb-Y0-Y1-Cr-Y2-Y3 ...)
+ * @GST_VIDEO_FORMAT_ARGB64: rgb with alpha channel first, 16 bits per channel
+ * @GST_VIDEO_FORMAT_AYUV64: packed 4:4:4 YUV with alpha channel, 16 bits per channel (A0-Y0-U0-V0 ...)
+ * @GST_VIDEO_FORMAT_r210: packed 4:4:4 RGB, 10 bits per channel
+ * @GST_VIDEO_FORMAT_I420_10BE: planar 4:2:0 YUV, 10 bits per channel
+ * @GST_VIDEO_FORMAT_I420_10LE: planar 4:2:0 YUV, 10 bits per channel
+ * @GST_VIDEO_FORMAT_I422_10BE: planar 4:2:2 YUV, 10 bits per channel
+ * @GST_VIDEO_FORMAT_I422_10LE: planar 4:2:2 YUV, 10 bits per channel
+ * @GST_VIDEO_FORMAT_Y444_10BE: planar 4:4:4 YUV, 10 bits per channel (Since: 1.2)
+ * @GST_VIDEO_FORMAT_Y444_10LE: planar 4:4:4 YUV, 10 bits per channel (Since: 1.2)
+ * @GST_VIDEO_FORMAT_GBR: planar 4:4:4 RGB, 8 bits per channel (Since: 1.2)
+ * @GST_VIDEO_FORMAT_GBR_10BE: planar 4:4:4 RGB, 10 bits per channel (Since: 1.2)
+ * @GST_VIDEO_FORMAT_GBR_10LE: planar 4:4:4 RGB, 10 bits per channel (Since: 1.2)
+ * @GST_VIDEO_FORMAT_NV16: planar 4:2:2 YUV with interleaved UV plane (Since: 1.2)
+ * @GST_VIDEO_FORMAT_NV16_10LE32: 10-bit variant of @GST_VIDEO_FORMAT_NV16, packed into 32bit words (MSB 2 bits padding) (Since: 1.14)
+ * @GST_VIDEO_FORMAT_NV24: planar 4:4:4 YUV with interleaved UV plane (Since: 1.2)
+ * @GST_VIDEO_FORMAT_NV12_64Z32: NV12 with 64x32 tiling in zigzag pattern (Since: 1.4)
+ * @GST_VIDEO_FORMAT_A420_10BE: planar 4:4:2:0 YUV, 10 bits per channel (Since: 1.6)
+ * @GST_VIDEO_FORMAT_A420_10LE: planar 4:4:2:0 YUV, 10 bits per channel (Since: 1.6)
+ * @GST_VIDEO_FORMAT_A422_10BE: planar 4:4:2:2 YUV, 10 bits per channel (Since: 1.6)
+ * @GST_VIDEO_FORMAT_A422_10LE: planar 4:4:2:2 YUV, 10 bits per channel (Since: 1.6)
+ * @GST_VIDEO_FORMAT_A444_10BE: planar 4:4:4:4 YUV, 10 bits per channel (Since: 1.6)
+ * @GST_VIDEO_FORMAT_A444_10LE: planar 4:4:4:4 YUV, 10 bits per channel (Since: 1.6)
+ * @GST_VIDEO_FORMAT_NV61: planar 4:2:2 YUV with interleaved VU plane (Since: 1.6)
+ * @GST_VIDEO_FORMAT_P010_10BE: planar 4:2:0 YUV with interleaved UV plane, 10 bits per channel (Since: 1.10)
+ * @GST_VIDEO_FORMAT_P010_10LE: planar 4:2:0 YUV with interleaved UV plane, 10 bits per channel (Since: 1.10)
+ * @GST_VIDEO_FORMAT_GBRA: planar 4:4:4:4 ARGB, 8 bits per channel (Since: 1.12)
+ * @GST_VIDEO_FORMAT_GBRA_10BE: planar 4:4:4:4 ARGB, 10 bits per channel (Since: 1.12)
+ * @GST_VIDEO_FORMAT_GBRA_10LE: planar 4:4:4:4 ARGB, 10 bits per channel (Since: 1.12)
+ * @GST_VIDEO_FORMAT_GBR_12BE: planar 4:4:4 RGB, 12 bits per channel (Since: 1.12)
+ * @GST_VIDEO_FORMAT_GBR_12LE: planar 4:4:4 RGB, 12 bits per channel (Since: 1.12)
+ * @GST_VIDEO_FORMAT_GBRA_12BE: planar 4:4:4:4 ARGB, 12 bits per channel (Since: 1.12)
+ * @GST_VIDEO_FORMAT_GBRA_12LE: planar 4:4:4:4 ARGB, 12 bits per channel (Since: 1.12)
+ * @GST_VIDEO_FORMAT_I420_12BE: planar 4:2:0 YUV, 12 bits per channel (Since: 1.12)
+ * @GST_VIDEO_FORMAT_I420_12LE: planar 4:2:0 YUV, 12 bits per channel (Since: 1.12)
+ * @GST_VIDEO_FORMAT_I422_12BE: planar 4:2:2 YUV, 12 bits per channel (Since: 1.12)
+ * @GST_VIDEO_FORMAT_I422_12LE: planar 4:2:2 YUV, 12 bits per channel (Since: 1.12)
+ * @GST_VIDEO_FORMAT_Y444_12BE: planar 4:4:4 YUV, 12 bits per channel (Since: 1.12)
+ * @GST_VIDEO_FORMAT_Y444_12LE: planar 4:4:4 YUV, 12 bits per channel (Since: 1.12)
+ * @GST_VIDEO_FORMAT_NV12_10LE40: Fully packed variant of NV12_10LE32 (Since: 1.16)
+ * @GST_VIDEO_FORMAT_Y210: packed 4:2:2 YUV, 10 bits per channel (Since: 1.16)
+ * @GST_VIDEO_FORMAT_Y410: packed 4:4:4 YUV, 10 bits per channel(A-V-Y-U...) (Since: 1.16)
+ * @GST_VIDEO_FORMAT_VUYA: packed 4:4:4 YUV with alpha channel (V0-U0-Y0-A0...) (Since: 1.16)
+ * @GST_VIDEO_FORMAT_BGR10A2_LE: packed 4:4:4 RGB with alpha channel(B-G-R-A), 10 bits for R/G/B channel and MSB 2 bits for alpha channel (Since: 1.16)
+ * @GST_VIDEO_FORMAT_RGB10A2_LE: packed 4:4:4 RGB with alpha channel(R-G-B-A), 10 bits for R/G/B channel and MSB 2 bits for alpha channel (Since: 1.18)
+ * @GST_VIDEO_FORMAT_Y444_16BE: planar 4:4:4 YUV, 16 bits per channel (Since: 1.18)
+ * @GST_VIDEO_FORMAT_Y444_16LE: planar 4:4:4 YUV, 16 bits per channel (Since: 1.18)
+ * @GST_VIDEO_FORMAT_P016_BE: planar 4:2:0 YUV with interleaved UV plane, 16 bits per channel (Since: 1.18)
+ * @GST_VIDEO_FORMAT_P016_LE: planar 4:2:0 YUV with interleaved UV plane, 16 bits per channel (Since: 1.18)
+ * @GST_VIDEO_FORMAT_P012_BE: planar 4:2:0 YUV with interleaved UV plane, 12 bits per channel (Since: 1.18)
+ * @GST_VIDEO_FORMAT_P012_LE: planar 4:2:0 YUV with interleaved UV plane, 12 bits per channel (Since: 1.18)
+ * @GST_VIDEO_FORMAT_Y212_BE: packed 4:2:2 YUV, 12 bits per channel (Y-U-Y-V) (Since: 1.18)
+ * @GST_VIDEO_FORMAT_Y212_LE: packed 4:2:2 YUV, 12 bits per channel (Y-U-Y-V) (Since: 1.18)
+ * @GST_VIDEO_FORMAT_Y412_BE: packed 4:4:4:4 YUV, 12 bits per channel(U-Y-V-A...) (Since: 1.18)
+ * @GST_VIDEO_FORMAT_Y412_LE: packed 4:4:4:4 YUV, 12 bits per channel(U-Y-V-A...) (Since: 1.18)
+ * @GST_VIDEO_FORMAT_NV12_4L4: NV12 with 4x4 tiles in linear order (Since: 1.18)
+ * @GST_VIDEO_FORMAT_NV12_32L32: NV12 with 32x32 tiles in linear order (Since: 1.18)
+ * @GST_VIDEO_FORMAT_RGBP: planar 4:4:4 RGB, 8 bits per channel (Since: 1.20)
+ * @GST_VIDEO_FORMAT_BGRP: planar 4:4:4 RGB, 8 bits per channel (Since: 1.20)
+ * @GST_VIDEO_FORMAT_AV12: Planar 4:2:0 YUV with interleaved UV plane with alpha as 3rd plane (Since: 1.20)
+ *
+ * Enum value describing the most common video formats.
+ *
+ * See the [GStreamer raw video format design document](https://gstreamer.freedesktop.org/documentation/additional/design/mediatype-video-raw.html#formats)
+ * for details about the layout and packing of these formats in memory.
+ */
+ typedef enum {
+ GST_VIDEO_FORMAT_UNKNOWN,
+ GST_VIDEO_FORMAT_ENCODED,
+ GST_VIDEO_FORMAT_I420,
++ GST_VIDEO_FORMAT_S420,
+ GST_VIDEO_FORMAT_YV12,
+ GST_VIDEO_FORMAT_YUY2,
+ GST_VIDEO_FORMAT_UYVY,
++ GST_VIDEO_FORMAT_ITLV,
+ GST_VIDEO_FORMAT_AYUV,
+ GST_VIDEO_FORMAT_RGBx,
+ GST_VIDEO_FORMAT_BGRx,
+ GST_VIDEO_FORMAT_xRGB,
+ GST_VIDEO_FORMAT_xBGR,
+ GST_VIDEO_FORMAT_RGBA,
+ GST_VIDEO_FORMAT_BGRA,
++ GST_VIDEO_FORMAT_SR32,
+ GST_VIDEO_FORMAT_ARGB,
+ GST_VIDEO_FORMAT_ABGR,
+ GST_VIDEO_FORMAT_RGB,
+ GST_VIDEO_FORMAT_BGR,
+ GST_VIDEO_FORMAT_Y41B,
+ GST_VIDEO_FORMAT_Y42B,
+ GST_VIDEO_FORMAT_YVYU,
+ GST_VIDEO_FORMAT_Y444,
+ GST_VIDEO_FORMAT_v210,
+ GST_VIDEO_FORMAT_v216,
+ GST_VIDEO_FORMAT_NV12,
++ GST_VIDEO_FORMAT_SN12,
++ GST_VIDEO_FORMAT_ST12,
+ GST_VIDEO_FORMAT_NV21,
++ GST_VIDEO_FORMAT_SN21,
+ GST_VIDEO_FORMAT_GRAY8,
+ GST_VIDEO_FORMAT_GRAY16_BE,
+ GST_VIDEO_FORMAT_GRAY16_LE,
+ GST_VIDEO_FORMAT_v308,
+ GST_VIDEO_FORMAT_RGB16,
+ GST_VIDEO_FORMAT_BGR16,
+ GST_VIDEO_FORMAT_RGB15,
+ GST_VIDEO_FORMAT_BGR15,
+ GST_VIDEO_FORMAT_UYVP,
+ GST_VIDEO_FORMAT_A420,
+ GST_VIDEO_FORMAT_RGB8P,
+ GST_VIDEO_FORMAT_YUV9,
+ GST_VIDEO_FORMAT_YVU9,
+ GST_VIDEO_FORMAT_IYU1,
+ GST_VIDEO_FORMAT_ARGB64,
+ GST_VIDEO_FORMAT_AYUV64,
+ GST_VIDEO_FORMAT_r210,
+ GST_VIDEO_FORMAT_I420_10BE,
+ GST_VIDEO_FORMAT_I420_10LE,
+ GST_VIDEO_FORMAT_I422_10BE,
+ GST_VIDEO_FORMAT_I422_10LE,
+ GST_VIDEO_FORMAT_Y444_10BE,
+ GST_VIDEO_FORMAT_Y444_10LE,
+ GST_VIDEO_FORMAT_GBR,
+ GST_VIDEO_FORMAT_GBR_10BE,
+ GST_VIDEO_FORMAT_GBR_10LE,
+ GST_VIDEO_FORMAT_NV16,
+ GST_VIDEO_FORMAT_NV24,
+ GST_VIDEO_FORMAT_NV12_64Z32,
+ GST_VIDEO_FORMAT_A420_10BE,
+ GST_VIDEO_FORMAT_A420_10LE,
+ GST_VIDEO_FORMAT_A422_10BE,
+ GST_VIDEO_FORMAT_A422_10LE,
+ GST_VIDEO_FORMAT_A444_10BE,
+ GST_VIDEO_FORMAT_A444_10LE,
+ GST_VIDEO_FORMAT_NV61,
+ GST_VIDEO_FORMAT_P010_10BE,
+ GST_VIDEO_FORMAT_P010_10LE,
+ GST_VIDEO_FORMAT_IYU2,
+ GST_VIDEO_FORMAT_VYUY,
+ GST_VIDEO_FORMAT_GBRA,
+ GST_VIDEO_FORMAT_GBRA_10BE,
+ GST_VIDEO_FORMAT_GBRA_10LE,
+ GST_VIDEO_FORMAT_GBR_12BE,
+ GST_VIDEO_FORMAT_GBR_12LE,
+ GST_VIDEO_FORMAT_GBRA_12BE,
+ GST_VIDEO_FORMAT_GBRA_12LE,
+ GST_VIDEO_FORMAT_I420_12BE,
+ GST_VIDEO_FORMAT_I420_12LE,
+ GST_VIDEO_FORMAT_I422_12BE,
+ GST_VIDEO_FORMAT_I422_12LE,
+ GST_VIDEO_FORMAT_Y444_12BE,
+ GST_VIDEO_FORMAT_Y444_12LE,
+ GST_VIDEO_FORMAT_GRAY10_LE32,
+ GST_VIDEO_FORMAT_NV12_10LE32,
+ GST_VIDEO_FORMAT_NV16_10LE32,
+ GST_VIDEO_FORMAT_NV12_10LE40,
+ GST_VIDEO_FORMAT_Y210,
+ GST_VIDEO_FORMAT_Y410,
+ GST_VIDEO_FORMAT_VUYA,
+ GST_VIDEO_FORMAT_BGR10A2_LE,
++ GST_VIDEO_FORMAT_INVZ,
++ GST_VIDEO_FORMAT_STV0, /* TIZEN_PROFILE_TV */
++ GST_VIDEO_FORMAT_STV1, /* TIZEN_PROFILE_TV */
+ GST_VIDEO_FORMAT_RGB10A2_LE,
+ GST_VIDEO_FORMAT_Y444_16BE,
+ GST_VIDEO_FORMAT_Y444_16LE,
+ GST_VIDEO_FORMAT_P016_BE,
+ GST_VIDEO_FORMAT_P016_LE,
+ GST_VIDEO_FORMAT_P012_BE,
+ GST_VIDEO_FORMAT_P012_LE,
+ GST_VIDEO_FORMAT_Y212_BE,
+ GST_VIDEO_FORMAT_Y212_LE,
+ GST_VIDEO_FORMAT_Y412_BE,
+ GST_VIDEO_FORMAT_Y412_LE,
+ /**
+ * GST_VIDEO_FORMAT_NV12_4L4:
+ *
+ * NV12 with 4x4 tiles in linear order.
+ *
+ * Since: 1.18
+ */
+ GST_VIDEO_FORMAT_NV12_4L4,
+ /**
+ * GST_VIDEO_FORMAT_NV12_32L32:
+ *
+ * NV12 with 32x32 tiles in linear order.
+ *
+ * Since: 1.18
+ */
+ GST_VIDEO_FORMAT_NV12_32L32,
+
+ /**
+ * GST_VIDEO_FORMAT_RGBP:
+ *
+ * Planar 4:4:4 RGB, R-G-B order
+ *
+ * Since: 1.20
+ */
+ GST_VIDEO_FORMAT_RGBP,
+
+ /**
+ * GST_VIDEO_FORMAT_BGRP:
+ *
+ * Planar 4:4:4 RGB, B-G-R order
+ *
+ * Since: 1.20
+ */
+ GST_VIDEO_FORMAT_BGRP,
+
+ /**
+ * GST_VIDEO_FORMAT_AV12:
+ *
+ * Planar 4:2:0 YUV with interleaved UV plane with alpha as
+ * 3rd plane.
+ *
+ * Since: 1.20
+ */
+ GST_VIDEO_FORMAT_AV12,
+ } GstVideoFormat;
+
+ #define GST_VIDEO_MAX_PLANES 4
+ #define GST_VIDEO_MAX_COMPONENTS 4
+
+ typedef struct _GstVideoFormatInfo GstVideoFormatInfo;
+
+ /**
+ * GstVideoFormatFlags:
+ * @GST_VIDEO_FORMAT_FLAG_YUV: The video format is YUV, components are numbered
+ * 0=Y, 1=U, 2=V.
+ * @GST_VIDEO_FORMAT_FLAG_RGB: The video format is RGB, components are numbered
+ * 0=R, 1=G, 2=B.
+ * @GST_VIDEO_FORMAT_FLAG_GRAY: The video is gray, there is one gray component
+ * with index 0.
+ * @GST_VIDEO_FORMAT_FLAG_ALPHA: The video format has an alpha components with
+ * the number 3.
+ * @GST_VIDEO_FORMAT_FLAG_LE: The video format has data stored in little
+ * endianness.
+ * @GST_VIDEO_FORMAT_FLAG_PALETTE: The video format has a palette. The palette
+ * is stored in the second plane and indexes are stored in the first plane.
+ * @GST_VIDEO_FORMAT_FLAG_COMPLEX: The video format has a complex layout that
+ * can't be described with the usual information in the #GstVideoFormatInfo.
+ * @GST_VIDEO_FORMAT_FLAG_UNPACK: This format can be used in a
+ * #GstVideoFormatUnpack and #GstVideoFormatPack function.
+ * @GST_VIDEO_FORMAT_FLAG_TILED: The format is tiled, there is tiling information
+ * in the last plane.
+ *
+ * The different video flags that a format info can have.
+ */
+ typedef enum
+ {
+ GST_VIDEO_FORMAT_FLAG_YUV = (1 << 0),
+ GST_VIDEO_FORMAT_FLAG_RGB = (1 << 1),
+ GST_VIDEO_FORMAT_FLAG_GRAY = (1 << 2),
+ GST_VIDEO_FORMAT_FLAG_ALPHA = (1 << 3),
+ GST_VIDEO_FORMAT_FLAG_LE = (1 << 4),
+ GST_VIDEO_FORMAT_FLAG_PALETTE = (1 << 5),
+ GST_VIDEO_FORMAT_FLAG_COMPLEX = (1 << 6),
+ GST_VIDEO_FORMAT_FLAG_UNPACK = (1 << 7),
+ GST_VIDEO_FORMAT_FLAG_TILED = (1 << 8)
+ } GstVideoFormatFlags;
+
+ /* YUV components */
+ #define GST_VIDEO_COMP_Y 0
+ #define GST_VIDEO_COMP_U 1
+ #define GST_VIDEO_COMP_V 2
+
+ /* RGB components */
+ #define GST_VIDEO_COMP_R 0
+ #define GST_VIDEO_COMP_G 1
+ #define GST_VIDEO_COMP_B 2
+
+ /* alpha component */
+ #define GST_VIDEO_COMP_A 3
+
+ /* palette components */
+ #define GST_VIDEO_COMP_INDEX 0
+ #define GST_VIDEO_COMP_PALETTE 1
+
+ #include <gst/video/video-chroma.h>
+
+ /**
+ * GstVideoPackFlags:
+ * @GST_VIDEO_PACK_FLAG_NONE: No flag
+ * @GST_VIDEO_PACK_FLAG_TRUNCATE_RANGE: When the source has a smaller depth
+ * than the target format, set the least significant bits of the target
+ * to 0. This is likely slightly faster but less accurate. When this flag
+ * is not specified, the most significant bits of the source are duplicated
+ * in the least significant bits of the destination.
+ * @GST_VIDEO_PACK_FLAG_INTERLACED: The source is interlaced. The unpacked
+ * format will be interlaced as well with each line containing
+ * information from alternating fields. (Since: 1.2)
+ *
+ * The different flags that can be used when packing and unpacking.
+ */
+ typedef enum
+ {
+ GST_VIDEO_PACK_FLAG_NONE = 0,
+ GST_VIDEO_PACK_FLAG_TRUNCATE_RANGE = (1 << 0),
+ GST_VIDEO_PACK_FLAG_INTERLACED = (1 << 1)
+ } GstVideoPackFlags;
+
+ /**
+ * GstVideoFormatUnpack:
+ * @info: a #GstVideoFormatInfo
+ * @flags: flags to control the unpacking
+ * @dest: a destination array
+ * @data: pointers to the data planes
+ * @stride: strides of the planes
+ * @x: the x position in the image to start from
+ * @y: the y position in the image to start from
+ * @width: the amount of pixels to unpack.
+ *
+ * Unpacks @width pixels from the given planes and strides containing data of
+ * format @info. The pixels will be unpacked into @dest with each component
+ * interleaved as per @info's unpack_format, which will usually be one of
+ * #GST_VIDEO_FORMAT_ARGB, #GST_VIDEO_FORMAT_AYUV, #GST_VIDEO_FORMAT_ARGB64 or
+ * #GST_VIDEO_FORMAT_AYUV64 depending on the format to unpack.
+ * @dest should at least be big enough to hold @width * bytes_per_pixel bytes
+ * where bytes_per_pixel relates to the unpack format and will usually be
+ * either 4 or 8 depending on the unpack format. bytes_per_pixel will be
+ * the same as the pixel stride for plane 0 for the above formats.
+ *
+ * For subsampled formats, the components will be duplicated in the destination
+ * array. Reconstruction of the missing components can be performed in a
+ * separate step after unpacking.
+ */
+ typedef void (*GstVideoFormatUnpack) (const GstVideoFormatInfo *info,
+ GstVideoPackFlags flags, gpointer dest,
+ const gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES],
+ gint x, gint y, gint width);
+ /**
+ * GstVideoFormatPack:
+ * @info: a #GstVideoFormatInfo
+ * @flags: flags to control the packing
+ * @src: a source array
+ * @sstride: the source array stride
+ * @data: pointers to the destination data planes
+ * @stride: strides of the destination planes
+ * @chroma_site: the chroma siting of the target when subsampled (not used)
+ * @y: the y position in the image to pack to
+ * @width: the amount of pixels to pack.
+ *
+ * Packs @width pixels from @src to the given planes and strides in the
+ * format @info. The pixels from source have each component interleaved
+ * and will be packed into the planes in @data.
+ *
+ * This function operates on pack_lines lines, meaning that @src should
+ * contain at least pack_lines lines with a stride of @sstride and @y
+ * should be a multiple of pack_lines.
+ *
+ * Subsampled formats will use the horizontally and vertically cosited
+ * component from the source. Subsampling should be performed before
+ * packing.
+ *
+ * Because this function does not have a x coordinate, it is not possible to
+ * pack pixels starting from an unaligned position. For tiled images this
+ * means that packing should start from a tile coordinate. For subsampled
+ * formats this means that a complete pixel needs to be packed.
+ */
+ /* FIXME(2.0): remove the chroma_site, it is unused and is not relevant for
+ * packing, chroma subsampling based on chroma-site should be done in a separate
+ * step before packing*/
+ typedef void (*GstVideoFormatPack) (const GstVideoFormatInfo *info,
+ GstVideoPackFlags flags,
+ const gpointer src, gint sstride,
+ gpointer data[GST_VIDEO_MAX_PLANES],
+ const gint stride[GST_VIDEO_MAX_PLANES],
+ GstVideoChromaSite chroma_site,
+ gint y, gint width);
+
+ /**
+ * GstVideoFormatInfo:
+ * @format: #GstVideoFormat
+ * @name: string representation of the format
+ * @description: use readable description of the format
+ * @flags: #GstVideoFormatFlags
+ * @bits: The number of bits used to pack data items. This can be less than 8
+ * when multiple pixels are stored in a byte. for values > 8 multiple bytes
+ * should be read according to the endianness flag before applying the shift
+ * and mask.
+ * @n_components: the number of components in the video format.
+ * @shift: the number of bits to shift away to get the component data
+ * @depth: the depth in bits for each component
+ * @pixel_stride: the pixel stride of each component. This is the amount of
+ * bytes to the pixel immediately to the right. When bits < 8, the stride is
+ * expressed in bits. For 24-bit RGB, this would be 3 bytes, for example,
+ * while it would be 4 bytes for RGBx or ARGB.
+ * @n_planes: the number of planes for this format. The number of planes can be
+ * less than the amount of components when multiple components are packed into
+ * one plane.
+ * @plane: the plane number where a component can be found
+ * @poffset: the offset in the plane where the first pixel of the components
+ * can be found.
+ * @w_sub: subsampling factor of the width for the component. Use
+ * GST_VIDEO_SUB_SCALE to scale a width.
+ * @h_sub: subsampling factor of the height for the component. Use
+ * GST_VIDEO_SUB_SCALE to scale a height.
+ * @unpack_format: the format of the unpacked pixels. This format must have the
+ * #GST_VIDEO_FORMAT_FLAG_UNPACK flag set.
+ * @unpack_func: an unpack function for this format
+ * @pack_lines: the amount of lines that will be packed
+ * @pack_func: an pack function for this format
+ * @tile_mode: The tiling mode
+ * @tile_ws: The width of a tile, in bytes, represented as a shift
+ * @tile_hs: The height of a tile, in bytes, represented as a shift
+ *
+ * Information for a video format.
+ */
+ struct _GstVideoFormatInfo {
+ GstVideoFormat format;
+ const gchar *name;
+ const gchar *description;
+ GstVideoFormatFlags flags;
+ guint bits;
+ guint n_components;
+ guint shift[GST_VIDEO_MAX_COMPONENTS];
+ guint depth[GST_VIDEO_MAX_COMPONENTS];
+ gint pixel_stride[GST_VIDEO_MAX_COMPONENTS];
+ guint n_planes;
+ guint plane[GST_VIDEO_MAX_COMPONENTS];
+ guint poffset[GST_VIDEO_MAX_COMPONENTS];
+ guint w_sub[GST_VIDEO_MAX_COMPONENTS];
+ guint h_sub[GST_VIDEO_MAX_COMPONENTS];
+
+ GstVideoFormat unpack_format;
+ GstVideoFormatUnpack unpack_func;
+ gint pack_lines;
+ GstVideoFormatPack pack_func;
+
+ GstVideoTileMode tile_mode;
+ guint tile_ws;
+ guint tile_hs;
+
+ /*< private >*/
+ gpointer _gst_reserved[GST_PADDING];
+ };
+
+ #define GST_VIDEO_FORMAT_INFO_FORMAT(info) ((info)->format)
+ #define GST_VIDEO_FORMAT_INFO_NAME(info) ((info)->name)
+ #define GST_VIDEO_FORMAT_INFO_FLAGS(info) ((info)->flags)
+
+ #define GST_VIDEO_FORMAT_INFO_IS_YUV(info) ((info)->flags & GST_VIDEO_FORMAT_FLAG_YUV)
+ #define GST_VIDEO_FORMAT_INFO_IS_RGB(info) ((info)->flags & GST_VIDEO_FORMAT_FLAG_RGB)
+ #define GST_VIDEO_FORMAT_INFO_IS_GRAY(info) ((info)->flags & GST_VIDEO_FORMAT_FLAG_GRAY)
+ #define GST_VIDEO_FORMAT_INFO_HAS_ALPHA(info) ((info)->flags & GST_VIDEO_FORMAT_FLAG_ALPHA)
+ #define GST_VIDEO_FORMAT_INFO_IS_LE(info) ((info)->flags & GST_VIDEO_FORMAT_FLAG_LE)
+ #define GST_VIDEO_FORMAT_INFO_HAS_PALETTE(info) ((info)->flags & GST_VIDEO_FORMAT_FLAG_PALETTE)
+ #define GST_VIDEO_FORMAT_INFO_IS_COMPLEX(info) ((info)->flags & GST_VIDEO_FORMAT_FLAG_COMPLEX)
+ #define GST_VIDEO_FORMAT_INFO_IS_TILED(info) ((info)->flags & GST_VIDEO_FORMAT_FLAG_TILED)
+
+ #define GST_VIDEO_FORMAT_INFO_BITS(info) ((info)->bits)
+ #define GST_VIDEO_FORMAT_INFO_N_COMPONENTS(info) ((info)->n_components)
+ #define GST_VIDEO_FORMAT_INFO_SHIFT(info,c) ((info)->shift[c])
+ #define GST_VIDEO_FORMAT_INFO_DEPTH(info,c) ((info)->depth[c])
+ /**
+ * GST_VIDEO_FORMAT_INFO_PSTRIDE:
+ * @info: a #GstVideoFormatInfo
+ * @c: the component index
+ *
+ * pixel stride for the given component. This is the amount of bytes to the
+ * pixel immediately to the right, so basically bytes from one pixel to the
+ * next. When bits < 8, the stride is expressed in bits.
+ *
+ * Examples: for 24-bit RGB, the pixel stride would be 3 bytes, while it
+ * would be 4 bytes for RGBx or ARGB, and 8 bytes for ARGB64 or AYUV64.
+ * For planar formats such as I420 the pixel stride is usually 1. For
+ * YUY2 it would be 2 bytes.
+ */
+ #define GST_VIDEO_FORMAT_INFO_PSTRIDE(info,c) ((info)->pixel_stride[c])
+ /**
+ * GST_VIDEO_FORMAT_INFO_N_PLANES:
+ * @info: a #GstVideoFormatInfo
+ *
+ * Number of planes. This is the number of planes the pixel layout is
+ * organized in in memory. The number of planes can be less than the
+ * number of components (e.g. Y,U,V,A or R, G, B, A) when multiple
+ * components are packed into one plane.
+ *
+ * Examples: RGB/RGBx/RGBA: 1 plane, 3/3/4 components;
+ * I420: 3 planes, 3 components; NV21/NV12: 2 planes, 3 components.
+ */
+ #define GST_VIDEO_FORMAT_INFO_N_PLANES(info) ((info)->n_planes)
+ /**
+ * GST_VIDEO_FORMAT_INFO_PLANE:
+ * @info: a #GstVideoFormatInfo
+ * @c: the component index
+ *
+ * Plane number where the given component can be found. A plane may
+ * contain data for multiple components.
+ */
+ #define GST_VIDEO_FORMAT_INFO_PLANE(info,c) ((info)->plane[c])
+ #define GST_VIDEO_FORMAT_INFO_POFFSET(info,c) ((info)->poffset[c])
+ #define GST_VIDEO_FORMAT_INFO_W_SUB(info,c) ((info)->w_sub[c])
+ #define GST_VIDEO_FORMAT_INFO_H_SUB(info,c) ((info)->h_sub[c])
+
+ /* rounds up */
+ #define GST_VIDEO_SUB_SCALE(scale,val) (-((-((gint)(val)))>>(scale)))
+
+ #define GST_VIDEO_FORMAT_INFO_SCALE_WIDTH(info,c,w) GST_VIDEO_SUB_SCALE ((info)->w_sub[c],(w))
+ #define GST_VIDEO_FORMAT_INFO_SCALE_HEIGHT(info,c,h) GST_VIDEO_SUB_SCALE ((info)->h_sub[c],(h))
+
+ #define GST_VIDEO_FORMAT_INFO_DATA(info,planes,comp) \
+ (((guint8*)(planes)[(info)->plane[comp]]) + (info)->poffset[comp])
+ /**
+ * GST_VIDEO_FORMAT_INFO_STRIDE:
+ * @info: a #GstVideoFormatInfo
+ * @strides: an array of strides
+ * @comp: the component index
+ *
+ * Row stride in bytes, that is number of bytes from the first pixel component
+ * of a row to the first pixel component in the next row. This might include
+ * some row padding (memory not actually used for anything, to make sure the
+ * beginning of the next row is aligned in a particular way).
+ */
+ #define GST_VIDEO_FORMAT_INFO_STRIDE(info,strides,comp) ((strides)[(info)->plane[comp]])
+ #define GST_VIDEO_FORMAT_INFO_OFFSET(info,offsets,comp) \
+ (((offsets)[(info)->plane[comp]]) + (info)->poffset[comp])
+
+ #define GST_VIDEO_FORMAT_INFO_TILE_MODE(info) ((info)->tile_mode)
+ #define GST_VIDEO_FORMAT_INFO_TILE_WS(info) ((info)->tile_ws)
+ #define GST_VIDEO_FORMAT_INFO_TILE_HS(info) ((info)->tile_hs)
+
+ GST_VIDEO_API
+ void gst_video_format_info_component (const GstVideoFormatInfo *info, guint plane, gint components[GST_VIDEO_MAX_COMPONENTS]);
+
+ /* format properties */
+
+ GST_VIDEO_API
+ GstVideoFormat gst_video_format_from_masks (gint depth, gint bpp, gint endianness,
+ guint red_mask, guint green_mask,
+ guint blue_mask, guint alpha_mask) G_GNUC_CONST;
+
+ GST_VIDEO_API
+ GstVideoFormat gst_video_format_from_fourcc (guint32 fourcc) G_GNUC_CONST;
+
+ GST_VIDEO_API
+ GstVideoFormat gst_video_format_from_string (const gchar *format) G_GNUC_CONST;
+
+ GST_VIDEO_API
+ guint32 gst_video_format_to_fourcc (GstVideoFormat format) G_GNUC_CONST;
+
+ GST_VIDEO_API
+ const gchar * gst_video_format_to_string (GstVideoFormat format) G_GNUC_CONST;
+
+ GST_VIDEO_API
+ const GstVideoFormatInfo *
+ gst_video_format_get_info (GstVideoFormat format) G_GNUC_CONST;
+
+ GST_VIDEO_API
+ gconstpointer gst_video_format_get_palette (GstVideoFormat format, gsize *size);
+
+ #define GST_VIDEO_SIZE_RANGE "(int) [ 1, max ]"
+ #define GST_VIDEO_FPS_RANGE "(fraction) [ 0, max ]"
+
+ #if G_BYTE_ORDER == G_LITTLE_ENDIAN
+ # define GST_VIDEO_NE(s) G_STRINGIFY(s)"_LE"
+ # define GST_VIDEO_OE(s) G_STRINGIFY(s)"_BE"
+ #else
+ # define GST_VIDEO_NE(s) G_STRINGIFY(s)"_BE"
+ # define GST_VIDEO_OE(s) G_STRINGIFY(s)"_LE"
+ #endif
+
+ /**
+ * GST_VIDEO_FORMATS_ALL:
+ *
+ * List of all video formats, for use in template caps strings.
+ *
+ * Formats are sorted by decreasing "quality", using these criteria by priority:
+ * - number of components
+ * - depth
+ * - subsampling factor of the width
+ * - subsampling factor of the height
+ * - number of planes
+ * - native endianness preferred
+ * - pixel stride
+ * - poffset
+ * - prefer non-complex formats
+ * - prefer YUV formats over RGB ones
+ * - prefer I420 over YV12
+ * - format name
+ */
+ #if G_BYTE_ORDER == G_BIG_ENDIAN
+ #define GST_VIDEO_FORMATS_ALL "{ AYUV64, ARGB64, GBRA_12BE, GBRA_12LE, Y412_BE, " \
+ "Y412_LE, A444_10BE, GBRA_10BE, A444_10LE, GBRA_10LE, A422_10BE, A422_10LE, " \
+ "A420_10BE, A420_10LE, Y410, RGB10A2_LE, BGR10A2_LE, GBRA, ABGR, VUYA, BGRA, " \
+ "AYUV, ARGB, RGBA, A420, AV12, Y444_16BE, Y444_16LE, v216, P016_BE, P016_LE, Y444_12BE, " \
+ "GBR_12BE, Y444_12LE, GBR_12LE, I422_12BE, I422_12LE, Y212_BE, Y212_LE, I420_12BE, " \
+ "I420_12LE, P012_BE, P012_LE, Y444_10BE, GBR_10BE, Y444_10LE, GBR_10LE, r210, " \
+ "I422_10BE, I422_10LE, NV16_10LE32, Y210, v210, UYVP, I420_10BE, I420_10LE, " \
+ "P010_10BE, P010_10LE, NV12_10LE32, NV12_10LE40, Y444, RGBP, GBR, BGRP, NV24, xBGR, BGRx, " \
+ "xRGB, RGBx, BGR, IYU2, v308, RGB, Y42B, NV61, NV16, VYUY, UYVY, YVYU, YUY2, I420, " \
+ "YV12, NV21, NV12, NV12_64Z32, NV12_4L4, NV12_32L32, Y41B, IYU1, YVU9, YUV9, RGB16, " \
++ "S420, ITLV, SN12, ST12, SN21, INVZ, STV0, STV1, " \
+ "BGR16, RGB15, BGR15, RGB8P, GRAY16_BE, GRAY16_LE, GRAY10_LE32, GRAY8 }"
+ #elif G_BYTE_ORDER == G_LITTLE_ENDIAN
+ #define GST_VIDEO_FORMATS_ALL "{ AYUV64, ARGB64, GBRA_12LE, GBRA_12BE, Y412_LE, " \
+ "Y412_BE, A444_10LE, GBRA_10LE, A444_10BE, GBRA_10BE, A422_10LE, A422_10BE, " \
+ "A420_10LE, A420_10BE, RGB10A2_LE, BGR10A2_LE, Y410, GBRA, ABGR, VUYA, BGRA, " \
+ "AYUV, ARGB, RGBA, A420, AV12, Y444_16LE, Y444_16BE, v216, P016_LE, P016_BE, Y444_12LE, " \
+ "GBR_12LE, Y444_12BE, GBR_12BE, I422_12LE, I422_12BE, Y212_LE, Y212_BE, I420_12LE, " \
+ "I420_12BE, P012_LE, P012_BE, Y444_10LE, GBR_10LE, Y444_10BE, GBR_10BE, r210, " \
+ "I422_10LE, I422_10BE, NV16_10LE32, Y210, v210, UYVP, I420_10LE, I420_10BE, " \
+ "P010_10LE, NV12_10LE32, NV12_10LE40, P010_10BE, Y444, RGBP, GBR, BGRP, NV24, xBGR, BGRx, " \
+ "xRGB, RGBx, BGR, IYU2, v308, RGB, Y42B, NV61, NV16, VYUY, UYVY, YVYU, YUY2, I420, " \
+ "YV12, NV21, NV12, NV12_64Z32, NV12_4L4, NV12_32L32, Y41B, IYU1, YVU9, YUV9, RGB16, " \
++ "S420, ITLV, SN12, ST12, SN21, INVZ, STV0, STV1, " \
+ "BGR16, RGB15, BGR15, RGB8P, GRAY16_LE, GRAY16_BE, GRAY10_LE32, GRAY8 }"
+ #endif
+
+ GST_VIDEO_API
+ const GstVideoFormat * gst_video_formats_raw (guint * len);
+
+ /**
+ * GST_VIDEO_CAPS_MAKE:
+ * @format: string format that describes the pixel layout, as string
+ * (e.g. "I420", "RGB", "YV12", "YUY2", "AYUV", etc.)
+ *
+ * Generic caps string for video, for use in pad templates.
+ */
+ #define GST_VIDEO_CAPS_MAKE(format) \
+ "video/x-raw, " \
+ "format = (string) " format ", " \
+ "width = " GST_VIDEO_SIZE_RANGE ", " \
+ "height = " GST_VIDEO_SIZE_RANGE ", " \
+ "framerate = " GST_VIDEO_FPS_RANGE
+
+ /**
+ * GST_VIDEO_CAPS_MAKE_WITH_FEATURES:
+ * @format: string format that describes the pixel layout, as string
+ * (e.g. "I420", "RGB", "YV12", "YUY2", "AYUV", etc.)
+ * @features: Requires caps features as a string, e.g.
+ * "memory:SystemMemory".
+ *
+ * Generic caps string for video, for use in pad templates.
+ *
+ * Since: 1.2
+ */
+ #define GST_VIDEO_CAPS_MAKE_WITH_FEATURES(features,format) \
+ "video/x-raw(" features "), " \
+ "format = (string) " format ", " \
+ "width = " GST_VIDEO_SIZE_RANGE ", " \
+ "height = " GST_VIDEO_SIZE_RANGE ", " \
+ "framerate = " GST_VIDEO_FPS_RANGE
+
+ GST_VIDEO_API
+ GstCaps * gst_video_make_raw_caps (const GstVideoFormat formats[], guint len);
+
+ GST_VIDEO_API
+ GstCaps * gst_video_make_raw_caps_with_features (const GstVideoFormat formats[], guint len,
+ GstCapsFeatures * features);
+
+ G_END_DECLS
+
+ #endif /* __GST_VIDEO_FORMAT_H__ */
--- /dev/null
+ /* GStreamer
+ * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
+ * Library <2002> Ronald Bultje <rbultje@ronald.bitfreak.net>
+ * Copyright (C) 2007 David A. Schleef <ds@schleef.org>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+ /**
+ * SECTION:video-info
+ * @title: GstVideoInfo
+ * @short_description: Structures and enumerations to describe raw images
+ */
+
+ #ifdef HAVE_CONFIG_H
+ # include "config.h"
+ #endif
+
+ #include <string.h>
+ #include <stdio.h>
+
+ #include "video-info.h"
+ #include "video-tile.h"
+
+ #ifndef GST_DISABLE_GST_DEBUG
+ #define GST_CAT_DEFAULT ensure_debug_category()
+ static GstDebugCategory *
+ ensure_debug_category (void)
+ {
+ static gsize cat_gonce = 0;
+
+ if (g_once_init_enter (&cat_gonce)) {
+ gsize cat_done;
+
+ cat_done = (gsize) _gst_debug_category_new ("video-info", 0,
+ "video-info structure");
+
+ g_once_init_leave (&cat_gonce, cat_done);
+ }
+
+ return (GstDebugCategory *) cat_gonce;
+ }
+ #else
+ #define ensure_debug_category() /* NOOP */
+ #endif /* GST_DISABLE_GST_DEBUG */
+
+ /**
+ * gst_video_info_copy:
+ * @info: a #GstVideoInfo
+ *
+ * Copy a GstVideoInfo structure.
+ *
+ * Returns: a new #GstVideoInfo. free with gst_video_info_free.
+ *
+ * Since: 1.6
+ */
+ GstVideoInfo *
+ gst_video_info_copy (const GstVideoInfo * info)
+ {
+ return g_slice_dup (GstVideoInfo, info);
+ }
+
+ /**
+ * gst_video_info_free:
+ * @info: a #GstVideoInfo
+ *
+ * Free a GstVideoInfo structure previously allocated with gst_video_info_new()
+ * or gst_video_info_copy().
+ *
+ * Since: 1.6
+ */
+ void
+ gst_video_info_free (GstVideoInfo * info)
+ {
+ g_slice_free (GstVideoInfo, info);
+ }
+
+ G_DEFINE_BOXED_TYPE (GstVideoInfo, gst_video_info,
+ (GBoxedCopyFunc) gst_video_info_copy, (GBoxedFreeFunc) gst_video_info_free);
+
+ /**
+ * gst_video_info_new:
+ *
+ * Allocate a new #GstVideoInfo that is also initialized with
+ * gst_video_info_init().
+ *
+ * Returns: a new #GstVideoInfo. free with gst_video_info_free().
+ *
+ * Since: 1.6
+ */
+ GstVideoInfo *
+ gst_video_info_new (void)
+ {
+ GstVideoInfo *info;
+
+ info = g_slice_new (GstVideoInfo);
+ gst_video_info_init (info);
+
+ return info;
+ }
+
+ static gboolean fill_planes (GstVideoInfo * info,
+ gsize plane_size[GST_VIDEO_MAX_PLANES]);
+
+ /**
+ * gst_video_info_init:
+ * @info: (out caller-allocates): a #GstVideoInfo
+ *
+ * Initialize @info with default values.
+ */
+ void
+ gst_video_info_init (GstVideoInfo * info)
+ {
+ g_return_if_fail (info != NULL);
+
+ memset (info, 0, sizeof (GstVideoInfo));
+
+ info->finfo = gst_video_format_get_info (GST_VIDEO_FORMAT_UNKNOWN);
+
+ info->views = 1;
+ /* arrange for sensible defaults, e.g. if turned into caps */
+ info->fps_n = 0;
+ info->fps_d = 1;
+ info->par_n = 1;
+ info->par_d = 1;
+ GST_VIDEO_INFO_MULTIVIEW_MODE (info) = GST_VIDEO_MULTIVIEW_MODE_NONE;
+ GST_VIDEO_INFO_MULTIVIEW_FLAGS (info) = GST_VIDEO_MULTIVIEW_FLAGS_NONE;
+ GST_VIDEO_INFO_FIELD_ORDER (info) = GST_VIDEO_FIELD_ORDER_UNKNOWN;
+ }
+
+ #define MAKE_COLORIMETRY(r,m,t,p) { \
+ GST_VIDEO_COLOR_RANGE ##r, GST_VIDEO_COLOR_MATRIX_ ##m, \
+ GST_VIDEO_TRANSFER_ ##t, GST_VIDEO_COLOR_PRIMARIES_ ##p }
+
+ #define DEFAULT_YUV_SD 0
+ #define DEFAULT_YUV_HD 1
+ #define DEFAULT_RGB 2
+ #define DEFAULT_GRAY 3
+ #define DEFAULT_UNKNOWN 4
+ #define DEFAULT_YUV_UHD 5
+
+ static const GstVideoColorimetry default_color[] = {
+ MAKE_COLORIMETRY (_16_235, BT601, BT601, SMPTE170M),
+ MAKE_COLORIMETRY (_16_235, BT709, BT709, BT709),
+ MAKE_COLORIMETRY (_0_255, RGB, SRGB, BT709),
+ MAKE_COLORIMETRY (_0_255, BT601, UNKNOWN, UNKNOWN),
+ MAKE_COLORIMETRY (_UNKNOWN, UNKNOWN, UNKNOWN, UNKNOWN),
+ MAKE_COLORIMETRY (_16_235, BT2020, BT2020_12, BT2020),
+ };
+
+ static void
+ set_default_colorimetry (GstVideoInfo * info)
+ {
+ const GstVideoFormatInfo *finfo = info->finfo;
+
+ if (GST_VIDEO_FORMAT_INFO_IS_YUV (finfo)) {
+ if (info->height >= 2160) {
+ info->chroma_site = GST_VIDEO_CHROMA_SITE_H_COSITED;
+ info->colorimetry = default_color[DEFAULT_YUV_UHD];
+ } else if (info->height > 576) {
+ info->chroma_site = GST_VIDEO_CHROMA_SITE_H_COSITED;
+ info->colorimetry = default_color[DEFAULT_YUV_HD];
+ } else {
+ info->chroma_site = GST_VIDEO_CHROMA_SITE_NONE;
+ info->colorimetry = default_color[DEFAULT_YUV_SD];
+ }
+ } else if (GST_VIDEO_FORMAT_INFO_IS_GRAY (finfo)) {
+ info->colorimetry = default_color[DEFAULT_GRAY];
+ } else if (GST_VIDEO_FORMAT_INFO_IS_RGB (finfo)) {
+ info->colorimetry = default_color[DEFAULT_RGB];
+ } else {
+ info->colorimetry = default_color[DEFAULT_UNKNOWN];
+ }
+ }
+
+ static gboolean
+ validate_colorimetry (GstVideoInfo * info)
+ {
+ const GstVideoFormatInfo *finfo = info->finfo;
+
+ if (!GST_VIDEO_FORMAT_INFO_IS_RGB (finfo) &&
+ info->colorimetry.matrix == GST_VIDEO_COLOR_MATRIX_RGB) {
+ GST_WARNING
+ ("color matrix RGB is only supported with RGB format, %s is not",
+ finfo->name);
+ return FALSE;
+ }
+
+ if (GST_VIDEO_FORMAT_INFO_IS_YUV (finfo) &&
+ info->colorimetry.matrix == GST_VIDEO_COLOR_MATRIX_UNKNOWN) {
+ GST_WARNING ("Need to specify a color matrix when using YUV format (%s)",
+ finfo->name);
+ return FALSE;
+ }
+
+ return TRUE;
+ }
+
+ static gboolean
+ gst_video_info_set_format_common (GstVideoInfo * info, GstVideoFormat format,
+ guint width, guint height)
+ {
+ g_return_val_if_fail (info != NULL, FALSE);
+ g_return_val_if_fail (format != GST_VIDEO_FORMAT_UNKNOWN, FALSE);
+
+ if (width > G_MAXINT || height > G_MAXINT)
+ return FALSE;
+
+ gst_video_info_init (info);
+
+ info->finfo = gst_video_format_get_info (format);
+ info->width = width;
+ info->height = height;
+ info->views = 1;
+
+ set_default_colorimetry (info);
+
+ return TRUE;
+ }
+
+ /**
+ * gst_video_info_set_format:
+ * @info: a #GstVideoInfo
+ * @format: the format
+ * @width: a width
+ * @height: a height
+ *
+ * Set the default info for a video frame of @format and @width and @height.
+ *
+ * Note: This initializes @info first, no values are preserved. This function
+ * does not set the offsets correctly for interlaced vertically
+ * subsampled formats.
+ *
+ * Returns: %FALSE if the returned video info is invalid, e.g. because the
+ * size of a frame can't be represented as a 32 bit integer (Since: 1.12)
+ */
+ gboolean
+ gst_video_info_set_format (GstVideoInfo * info, GstVideoFormat format,
+ guint width, guint height)
+ {
+ if (!gst_video_info_set_format_common (info, format, width, height))
+ return FALSE;
+
+ return fill_planes (info, NULL);
+ }
+
+ /**
+ * gst_video_info_set_interlaced_format:
+ * @info: a #GstVideoInfo
+ * @format: the format
+ * @mode: a #GstVideoInterlaceMode
+ * @width: a width
+ * @height: a height
+ *
+ * Same as #gst_video_info_set_format but also allowing to set the interlaced
+ * mode.
+ *
+ * Returns: %FALSE if the returned video info is invalid, e.g. because the
+ * size of a frame can't be represented as a 32 bit integer.
+ *
+ * Since: 1.16
+ */
+ gboolean
+ gst_video_info_set_interlaced_format (GstVideoInfo * info,
+ GstVideoFormat format, GstVideoInterlaceMode mode, guint width,
+ guint height)
+ {
+ if (!gst_video_info_set_format_common (info, format, width, height))
+ return FALSE;
+
+ GST_VIDEO_INFO_INTERLACE_MODE (info) = mode;
+ return fill_planes (info, NULL);
+ }
+
+ static const gchar *interlace_mode[] = {
+ "progressive",
+ "interleaved",
+ "mixed",
+ "fields",
+ "alternate"
+ };
+
+ /**
+ * gst_video_interlace_mode_to_string:
+ * @mode: a #GstVideoInterlaceMode
+ *
+ * Convert @mode to its string representation.
+ *
+ * Returns: @mode as a string or NULL if @mode in invalid.
+ *
+ * Since: 1.6
+ */
+ const gchar *
+ gst_video_interlace_mode_to_string (GstVideoInterlaceMode mode)
+ {
+ if (((guint) mode) >= G_N_ELEMENTS (interlace_mode))
+ return NULL;
+
+ return interlace_mode[mode];
+ }
+
+ /**
+ * gst_video_interlace_mode_from_string:
+ * @mode: a mode
+ *
+ * Convert @mode to a #GstVideoInterlaceMode
+ *
+ * Returns: the #GstVideoInterlaceMode of @mode or
+ * #GST_VIDEO_INTERLACE_MODE_PROGRESSIVE when @mode is not a valid
+ * string representation for a #GstVideoInterlaceMode.
+ *
+ * Since: 1.6
+ */
+ GstVideoInterlaceMode
+ gst_video_interlace_mode_from_string (const gchar * mode)
+ {
+ gint i;
+ for (i = 0; i < G_N_ELEMENTS (interlace_mode); i++) {
+ if (g_str_equal (interlace_mode[i], mode))
+ return i;
+ }
+ return GST_VIDEO_INTERLACE_MODE_PROGRESSIVE;
+ }
+
+ static const gchar *field_order[] = {
+ "unknown",
+ "top-field-first",
+ "bottom-field-first"
+ };
+
+ /**
+ * gst_video_field_order_to_string:
+ * @order: a #GstVideoFieldOrder
+ *
+ * Convert @order to its string representation.
+ *
+ * Returns: @order as a string or NULL if @order in invalid.
+ *
+ * Since: 1.12
+ */
+ const gchar *
+ gst_video_field_order_to_string (GstVideoFieldOrder order)
+ {
+ if (((guint) order) >= G_N_ELEMENTS (field_order))
+ return NULL;
+
+ return field_order[order];
+ }
+
+ /**
+ * gst_video_field_order_from_string:
+ * @order: a field order
+ *
+ * Convert @order to a #GstVideoFieldOrder
+ *
+ * Returns: the #GstVideoFieldOrder of @order or
+ * #GST_VIDEO_FIELD_ORDER_UNKNOWN when @order is not a valid
+ * string representation for a #GstVideoFieldOrder.
+ *
+ * Since: 1.12
+ */
+ GstVideoFieldOrder
+ gst_video_field_order_from_string (const gchar * order)
+ {
+ gint i;
+ for (i = 0; i < G_N_ELEMENTS (field_order); i++) {
+ if (g_str_equal (field_order[i], order))
+ return i;
+ }
+ return GST_VIDEO_FIELD_ORDER_UNKNOWN;
+ }
+
+ /**
+ * gst_video_info_from_caps:
+ * @info: (out caller-allocates): #GstVideoInfo
+ * @caps: a #GstCaps
+ *
+ * Parse @caps and update @info.
+ *
+ * Returns: TRUE if @caps could be parsed
+ */
+ gboolean
+ gst_video_info_from_caps (GstVideoInfo * info, const GstCaps * caps)
+ {
+ GstStructure *structure;
+ const gchar *s;
+ GstVideoFormat format = GST_VIDEO_FORMAT_UNKNOWN;
+ gint width = 0, height = 0;
+ gint fps_n, fps_d;
+ gint par_n, par_d;
+ guint multiview_flags;
+
+ g_return_val_if_fail (info != NULL, FALSE);
+ g_return_val_if_fail (caps != NULL, FALSE);
+ g_return_val_if_fail (gst_caps_is_fixed (caps), FALSE);
+
+ GST_DEBUG ("parsing caps %" GST_PTR_FORMAT, caps);
+
+ structure = gst_caps_get_structure (caps, 0);
+
+ if (gst_structure_has_name (structure, "video/x-raw")) {
+ if (!(s = gst_structure_get_string (structure, "format")))
+ goto no_format;
+
+ format = gst_video_format_from_string (s);
+ if (format == GST_VIDEO_FORMAT_UNKNOWN)
+ goto unknown_format;
+
+ } else if (g_str_has_prefix (gst_structure_get_name (structure), "video/") ||
+ g_str_has_prefix (gst_structure_get_name (structure), "image/")) {
+ format = GST_VIDEO_FORMAT_ENCODED;
+ } else {
+ goto wrong_name;
+ }
+
+ /* width and height are mandatory, except for non-raw-formats */
+ if (!gst_structure_get_int (structure, "width", &width) &&
+ format != GST_VIDEO_FORMAT_ENCODED)
+ goto no_width;
+ if (!gst_structure_get_int (structure, "height", &height) &&
+ format != GST_VIDEO_FORMAT_ENCODED)
+ goto no_height;
+
+ gst_video_info_init (info);
+
+ info->finfo = gst_video_format_get_info (format);
+ info->width = width;
+ info->height = height;
+
+ if (gst_structure_get_fraction (structure, "framerate", &fps_n, &fps_d)) {
+ if (fps_n == 0) {
+ /* variable framerate */
+ info->flags |= GST_VIDEO_FLAG_VARIABLE_FPS;
+ /* see if we have a max-framerate */
+ gst_structure_get_fraction (structure, "max-framerate", &fps_n, &fps_d);
+ }
+ info->fps_n = fps_n;
+ info->fps_d = fps_d;
+ } else {
+ /* unspecified is variable framerate */
+ info->fps_n = 0;
+ info->fps_d = 1;
+ }
+
+ if (gst_structure_get_fraction (structure, "pixel-aspect-ratio",
+ &par_n, &par_d)) {
+ info->par_n = par_n;
+ info->par_d = par_d;
+ } else {
+ info->par_n = 1;
+ info->par_d = 1;
+ }
+
+ if ((s = gst_structure_get_string (structure, "interlace-mode")))
+ info->interlace_mode = gst_video_interlace_mode_from_string (s);
+ else
+ info->interlace_mode = GST_VIDEO_INTERLACE_MODE_PROGRESSIVE;
+
+ /* Interlaced feature is mandatory for raw alternate streams */
+ if (info->interlace_mode == GST_VIDEO_INTERLACE_MODE_ALTERNATE &&
+ format != GST_VIDEO_FORMAT_ENCODED) {
+ GstCapsFeatures *f;
+
+ f = gst_caps_get_features (caps, 0);
+ if (!f
+ || !gst_caps_features_contains (f, GST_CAPS_FEATURE_FORMAT_INTERLACED))
+ goto alternate_no_feature;
+ }
+
+ if (GST_VIDEO_INFO_IS_INTERLACED (info) &&
+ (s = gst_structure_get_string (structure, "field-order"))) {
+ GST_VIDEO_INFO_FIELD_ORDER (info) = gst_video_field_order_from_string (s);
+ } else {
+ GST_VIDEO_INFO_FIELD_ORDER (info) = GST_VIDEO_FIELD_ORDER_UNKNOWN;
+ }
+
+ {
+ if ((s = gst_structure_get_string (structure, "multiview-mode")))
+ GST_VIDEO_INFO_MULTIVIEW_MODE (info) =
+ gst_video_multiview_mode_from_caps_string (s);
+ else
+ GST_VIDEO_INFO_MULTIVIEW_MODE (info) = GST_VIDEO_MULTIVIEW_MODE_NONE;
+
+ if (gst_structure_get_flagset (structure, "multiview-flags",
+ &multiview_flags, NULL))
+ GST_VIDEO_INFO_MULTIVIEW_FLAGS (info) = multiview_flags;
+
+ if (!gst_structure_get_int (structure, "views", &info->views))
+ info->views = 1;
+
+ /* At one point, I tried normalising the half-aspect flag here,
+ * but it behaves weird for GstVideoInfo operations other than
+ * directly converting to/from caps - sometimes causing the
+ * PAR to be doubled/halved too many times */
+ }
+
+ if ((s = gst_structure_get_string (structure, "chroma-site")))
+ info->chroma_site = gst_video_chroma_site_from_string (s);
+ else
+ info->chroma_site = GST_VIDEO_CHROMA_SITE_UNKNOWN;
+
+ if ((s = gst_structure_get_string (structure, "colorimetry"))) {
+ if (!gst_video_colorimetry_from_string (&info->colorimetry, s)) {
+ GST_WARNING ("unparsable colorimetry, using default");
+ set_default_colorimetry (info);
+ } else if (!validate_colorimetry (info)) {
+ GST_WARNING ("invalid colorimetry, using default");
+ set_default_colorimetry (info);
+ } else {
+ /* force RGB matrix for RGB formats */
+ if (GST_VIDEO_FORMAT_INFO_IS_RGB (info->finfo) &&
+ info->colorimetry.matrix != GST_VIDEO_COLOR_MATRIX_RGB) {
+ GST_WARNING ("invalid matrix %d for RGB format, using RGB",
+ info->colorimetry.matrix);
+ info->colorimetry.matrix = GST_VIDEO_COLOR_MATRIX_RGB;
+ }
+ }
+ } else {
+ GST_DEBUG ("no colorimetry, using default");
+ set_default_colorimetry (info);
+ }
+
+ if (!fill_planes (info, NULL))
+ return FALSE;
+
+ return TRUE;
+
+ /* ERROR */
+ wrong_name:
+ {
+ GST_ERROR ("wrong name '%s', expected video/ or image/",
+ gst_structure_get_name (structure));
+ return FALSE;
+ }
+ no_format:
+ {
+ GST_ERROR ("no format given");
+ return FALSE;
+ }
+ unknown_format:
+ {
+ GST_ERROR ("unknown format '%s' given", s);
+ return FALSE;
+ }
+ no_width:
+ {
+ GST_ERROR ("no width property given");
+ return FALSE;
+ }
+ no_height:
+ {
+ GST_ERROR ("no height property given");
+ return FALSE;
+ }
+ alternate_no_feature:
+ {
+ GST_ERROR
+ ("caps has 'interlace-mode=alternate' but doesn't have the Interlaced feature");
+ return FALSE;
+ }
+ }
+
+ /**
+ * gst_video_info_is_equal:
+ * @info: a #GstVideoInfo
+ * @other: a #GstVideoInfo
+ *
+ * Compares two #GstVideoInfo and returns whether they are equal or not
+ *
+ * Returns: %TRUE if @info and @other are equal, else %FALSE.
+ */
+ gboolean
+ gst_video_info_is_equal (const GstVideoInfo * info, const GstVideoInfo * other)
+ {
+ gint i;
+
+ if (GST_VIDEO_INFO_FORMAT (info) != GST_VIDEO_INFO_FORMAT (other))
+ return FALSE;
+ if (GST_VIDEO_INFO_INTERLACE_MODE (info) !=
+ GST_VIDEO_INFO_INTERLACE_MODE (other))
+ return FALSE;
+ if (GST_VIDEO_INFO_FLAGS (info) != GST_VIDEO_INFO_FLAGS (other))
+ return FALSE;
+ if (GST_VIDEO_INFO_WIDTH (info) != GST_VIDEO_INFO_WIDTH (other))
+ return FALSE;
+ if (GST_VIDEO_INFO_HEIGHT (info) != GST_VIDEO_INFO_HEIGHT (other))
+ return FALSE;
+ if (GST_VIDEO_INFO_SIZE (info) != GST_VIDEO_INFO_SIZE (other))
+ return FALSE;
+ if (GST_VIDEO_INFO_PAR_N (info) != GST_VIDEO_INFO_PAR_N (other))
+ return FALSE;
+ if (GST_VIDEO_INFO_PAR_D (info) != GST_VIDEO_INFO_PAR_D (other))
+ return FALSE;
+ if (GST_VIDEO_INFO_FPS_N (info) != GST_VIDEO_INFO_FPS_N (other))
+ return FALSE;
+ if (GST_VIDEO_INFO_FPS_D (info) != GST_VIDEO_INFO_FPS_D (other))
+ return FALSE;
+ if (!gst_video_colorimetry_is_equal (&GST_VIDEO_INFO_COLORIMETRY (info),
+ &GST_VIDEO_INFO_COLORIMETRY (other)))
+ return FALSE;
+ if (GST_VIDEO_INFO_CHROMA_SITE (info) != GST_VIDEO_INFO_CHROMA_SITE (other))
+ return FALSE;
+ if (GST_VIDEO_INFO_MULTIVIEW_MODE (info) !=
+ GST_VIDEO_INFO_MULTIVIEW_MODE (other))
+ return FALSE;
+ if (GST_VIDEO_INFO_MULTIVIEW_FLAGS (info) !=
+ GST_VIDEO_INFO_MULTIVIEW_FLAGS (other))
+ return FALSE;
+ if (GST_VIDEO_INFO_VIEWS (info) != GST_VIDEO_INFO_VIEWS (other))
+ return FALSE;
+
+ for (i = 0; i < info->finfo->n_planes; i++) {
+ if (info->stride[i] != other->stride[i])
+ return FALSE;
+ if (info->offset[i] != other->offset[i])
+ return FALSE;
+ }
+
+ return TRUE;
+ }
+
+ /**
+ * gst_video_info_to_caps:
+ * @info: a #GstVideoInfo
+ *
+ * Convert the values of @info into a #GstCaps.
+ *
+ * Returns: a new #GstCaps containing the info of @info.
+ */
+ GstCaps *
+ gst_video_info_to_caps (const GstVideoInfo * info)
+ {
+ GstCaps *caps;
+ const gchar *format;
+ gchar *color;
+ gint par_n, par_d;
+ GstVideoColorimetry colorimetry;
+
+ g_return_val_if_fail (info != NULL, NULL);
+ g_return_val_if_fail (info->finfo != NULL, NULL);
+ g_return_val_if_fail (info->finfo->format != GST_VIDEO_FORMAT_UNKNOWN, NULL);
+
+ format = gst_video_format_to_string (info->finfo->format);
+ g_return_val_if_fail (format != NULL, NULL);
+
+ caps = gst_caps_new_simple ("video/x-raw",
+ "format", G_TYPE_STRING, format,
+ "width", G_TYPE_INT, info->width,
+ "height", G_TYPE_INT, info->height, NULL);
+
+ par_n = info->par_n;
+ par_d = info->par_d;
+
+ gst_caps_set_simple (caps, "interlace-mode", G_TYPE_STRING,
+ gst_video_interlace_mode_to_string (info->interlace_mode), NULL);
+
+ if ((info->interlace_mode == GST_VIDEO_INTERLACE_MODE_INTERLEAVED ||
+ info->interlace_mode == GST_VIDEO_INTERLACE_MODE_ALTERNATE) &&
+ GST_VIDEO_INFO_FIELD_ORDER (info) != GST_VIDEO_FIELD_ORDER_UNKNOWN) {
+ gst_caps_set_simple (caps, "field-order", G_TYPE_STRING,
+ gst_video_field_order_to_string (GST_VIDEO_INFO_FIELD_ORDER (info)),
+ NULL);
+ }
+
+ if (info->interlace_mode == GST_VIDEO_INTERLACE_MODE_ALTERNATE) {
+ /* 'alternate' mode must always be accompanied by interlaced caps feature.
+ */
+ GstCapsFeatures *features;
+
+ features = gst_caps_features_new (GST_CAPS_FEATURE_FORMAT_INTERLACED, NULL);
+ gst_caps_set_features (caps, 0, features);
+ }
+
+ if (GST_VIDEO_INFO_MULTIVIEW_MODE (info) != GST_VIDEO_MULTIVIEW_MODE_NONE) {
+ const gchar *caps_str = NULL;
+ GstVideoMultiviewFlags multiview_flags =
+ GST_VIDEO_INFO_MULTIVIEW_FLAGS (info);
+
+ /* If the half-aspect flag is set, applying it into the PAR of the
+ * resulting caps now seems safe, and helps with automatic behaviour
+ * in elements that aren't explicitly multiview aware */
+ if (multiview_flags & GST_VIDEO_MULTIVIEW_FLAGS_HALF_ASPECT) {
+ multiview_flags &= ~GST_VIDEO_MULTIVIEW_FLAGS_HALF_ASPECT;
+ switch (GST_VIDEO_INFO_MULTIVIEW_MODE (info)) {
+ case GST_VIDEO_MULTIVIEW_MODE_SIDE_BY_SIDE:
+ case GST_VIDEO_MULTIVIEW_MODE_SIDE_BY_SIDE_QUINCUNX:
+ case GST_VIDEO_MULTIVIEW_MODE_COLUMN_INTERLEAVED:
+ case GST_VIDEO_MULTIVIEW_MODE_CHECKERBOARD:
+ par_n *= 2; /* double the width / half the height */
+ break;
+ case GST_VIDEO_MULTIVIEW_MODE_ROW_INTERLEAVED:
+ case GST_VIDEO_MULTIVIEW_MODE_TOP_BOTTOM:
+ par_d *= 2; /* half the width / double the height */
+ break;
+ default:
+ break;
+ }
+ }
+
+ caps_str =
+ gst_video_multiview_mode_to_caps_string (GST_VIDEO_INFO_MULTIVIEW_MODE
+ (info));
+ if (caps_str != NULL) {
+ gst_caps_set_simple (caps, "multiview-mode", G_TYPE_STRING,
+ caps_str, "multiview-flags", GST_TYPE_VIDEO_MULTIVIEW_FLAGSET,
+ multiview_flags, GST_FLAG_SET_MASK_EXACT, NULL);
+ }
+ }
+
+ gst_caps_set_simple (caps, "pixel-aspect-ratio",
+ GST_TYPE_FRACTION, par_n, par_d, NULL);
+
+ if (info->chroma_site != GST_VIDEO_CHROMA_SITE_UNKNOWN) {
+ gchar *chroma_site = gst_video_chroma_site_to_string (info->chroma_site);
+
+ if (!chroma_site) {
+ GST_WARNING ("Couldn't convert chroma-site 0x%x to string",
+ info->chroma_site);
+ } else {
+ gst_caps_set_simple (caps,
+ "chroma-site", G_TYPE_STRING, chroma_site, NULL);
+ g_free (chroma_site);
+ }
+ }
+
+ /* make sure we set the RGB matrix for RGB formats */
+ colorimetry = info->colorimetry;
+ if (GST_VIDEO_FORMAT_INFO_IS_RGB (info->finfo) &&
+ colorimetry.matrix != GST_VIDEO_COLOR_MATRIX_RGB) {
+ GST_WARNING ("invalid matrix %d for RGB format, using RGB",
+ colorimetry.matrix);
+ colorimetry.matrix = GST_VIDEO_COLOR_MATRIX_RGB;
+ }
+ if ((color = gst_video_colorimetry_to_string (&colorimetry))) {
+ gst_caps_set_simple (caps, "colorimetry", G_TYPE_STRING, color, NULL);
+ g_free (color);
+ }
+
+ if (info->views > 1)
+ gst_caps_set_simple (caps, "views", G_TYPE_INT, info->views, NULL);
+
+ if (info->flags & GST_VIDEO_FLAG_VARIABLE_FPS && info->fps_n != 0) {
+ /* variable fps with a max-framerate */
+ gst_caps_set_simple (caps, "framerate", GST_TYPE_FRACTION, 0, 1,
+ "max-framerate", GST_TYPE_FRACTION, info->fps_n, info->fps_d, NULL);
+ } else {
+ /* no variable fps or no max-framerate */
+ gst_caps_set_simple (caps, "framerate", GST_TYPE_FRACTION,
+ info->fps_n, info->fps_d, NULL);
+ }
+
+ return caps;
+ }
+
+ static gboolean
+ fill_planes (GstVideoInfo * info, gsize plane_size[GST_VIDEO_MAX_PLANES])
+ {
+ gsize width, height, cr_h;
+ gint bpp = 0, i;
+
+ width = (gsize) info->width;
+ height = (gsize) GST_VIDEO_INFO_FIELD_HEIGHT (info);
+
+ /* Sanity check the resulting frame size for overflows */
+ for (i = 0; i < GST_VIDEO_INFO_N_COMPONENTS (info); i++)
+ bpp += GST_VIDEO_INFO_COMP_DEPTH (info, i);
+ bpp = GST_ROUND_UP_8 (bpp) / 8;
+ if (bpp > 0 && GST_ROUND_UP_128 ((guint64) width) * ((guint64) height) >=
+ G_MAXUINT / bpp) {
+ GST_ERROR ("Frame size %ux%u would overflow", info->width, info->height);
+ return FALSE;
+ }
+
+ switch (info->finfo->format) {
+ case GST_VIDEO_FORMAT_YUY2:
+ case GST_VIDEO_FORMAT_YVYU:
+ case GST_VIDEO_FORMAT_UYVY:
+ case GST_VIDEO_FORMAT_VYUY:
+ info->stride[0] = GST_ROUND_UP_4 (width * 2);
+ info->offset[0] = 0;
+ info->size = info->stride[0] * height;
+ break;
+ case GST_VIDEO_FORMAT_AYUV:
+ case GST_VIDEO_FORMAT_RGBx:
+ case GST_VIDEO_FORMAT_RGBA:
+ case GST_VIDEO_FORMAT_BGRx:
+ case GST_VIDEO_FORMAT_BGRA:
++ case GST_VIDEO_FORMAT_SR32:
+ case GST_VIDEO_FORMAT_xRGB:
+ case GST_VIDEO_FORMAT_ARGB:
+ case GST_VIDEO_FORMAT_xBGR:
+ case GST_VIDEO_FORMAT_ABGR:
+ case GST_VIDEO_FORMAT_r210:
+ case GST_VIDEO_FORMAT_Y410:
+ case GST_VIDEO_FORMAT_VUYA:
+ case GST_VIDEO_FORMAT_BGR10A2_LE:
+ case GST_VIDEO_FORMAT_RGB10A2_LE:
+ info->stride[0] = width * 4;
+ info->offset[0] = 0;
+ info->size = info->stride[0] * height;
+ break;
+ case GST_VIDEO_FORMAT_RGB16:
+ case GST_VIDEO_FORMAT_BGR16:
+ case GST_VIDEO_FORMAT_RGB15:
+ case GST_VIDEO_FORMAT_BGR15:
+ info->stride[0] = GST_ROUND_UP_4 (width * 2);
+ info->offset[0] = 0;
+ info->size = info->stride[0] * height;
+ break;
+ case GST_VIDEO_FORMAT_RGB:
+ case GST_VIDEO_FORMAT_BGR:
+ case GST_VIDEO_FORMAT_v308:
+ case GST_VIDEO_FORMAT_IYU2:
+ info->stride[0] = GST_ROUND_UP_4 (width * 3);
+ info->offset[0] = 0;
+ info->size = info->stride[0] * height;
+ break;
+ case GST_VIDEO_FORMAT_v210:
+ info->stride[0] = ((width + 47) / 48) * 128;
+ info->offset[0] = 0;
+ info->size = info->stride[0] * height;
+ break;
+ case GST_VIDEO_FORMAT_v216:
+ case GST_VIDEO_FORMAT_Y210:
+ case GST_VIDEO_FORMAT_Y212_BE:
+ case GST_VIDEO_FORMAT_Y212_LE:
+ info->stride[0] = GST_ROUND_UP_8 (width * 4);
+ info->offset[0] = 0;
+ info->size = info->stride[0] * height;
+ break;
+ case GST_VIDEO_FORMAT_GRAY8:
+ info->stride[0] = GST_ROUND_UP_4 (width);
+ info->offset[0] = 0;
+ info->size = info->stride[0] * height;
+ break;
+ case GST_VIDEO_FORMAT_GRAY16_BE:
+ case GST_VIDEO_FORMAT_GRAY16_LE:
++ case GST_VIDEO_FORMAT_INVZ:
+ info->stride[0] = GST_ROUND_UP_4 (width * 2);
+ info->offset[0] = 0;
+ info->size = info->stride[0] * height;
+ break;
+ case GST_VIDEO_FORMAT_UYVP:
+ info->stride[0] = GST_ROUND_UP_4 ((width * 2 * 5 + 3) / 4);
+ info->offset[0] = 0;
+ info->size = info->stride[0] * height;
+ break;
+ case GST_VIDEO_FORMAT_RGB8P:
+ info->stride[0] = GST_ROUND_UP_4 (width);
+ info->stride[1] = 4;
+ info->offset[0] = 0;
+ info->offset[1] = info->stride[0] * height;
+ info->size = info->offset[1] + (4 * 256);
+ break;
+ case GST_VIDEO_FORMAT_IYU1:
+ info->stride[0] = GST_ROUND_UP_4 (GST_ROUND_UP_4 (width) +
+ GST_ROUND_UP_4 (width) / 2);
+ info->offset[0] = 0;
+ info->size = info->stride[0] * height;
+ break;
+ case GST_VIDEO_FORMAT_ARGB64:
+ case GST_VIDEO_FORMAT_AYUV64:
+ case GST_VIDEO_FORMAT_Y412_BE:
+ case GST_VIDEO_FORMAT_Y412_LE:
+ info->stride[0] = width * 8;
+ info->offset[0] = 0;
+ info->size = info->stride[0] * height;
+ break;
+ case GST_VIDEO_FORMAT_I420:
++ case GST_VIDEO_FORMAT_S420:
+ case GST_VIDEO_FORMAT_YV12: /* same as I420, but plane 1+2 swapped */
++#ifdef TIZEN_PROFILE_TV
++ case GST_VIDEO_FORMAT_STV0:
++ case GST_VIDEO_FORMAT_STV1:
++#endif
+ info->stride[0] = GST_ROUND_UP_4 (width);
+ info->stride[1] = GST_ROUND_UP_4 (GST_ROUND_UP_2 (width) / 2);
+ info->stride[2] = info->stride[1];
+ info->offset[0] = 0;
+ info->offset[1] = info->stride[0] * GST_ROUND_UP_2 (height);
+ cr_h = GST_ROUND_UP_2 (height) / 2;
+ if (GST_VIDEO_INFO_IS_INTERLACED (info))
+ cr_h = GST_ROUND_UP_2 (cr_h);
+ info->offset[2] = info->offset[1] + info->stride[1] * cr_h;
+ info->size = info->offset[2] + info->stride[2] * cr_h;
+ break;
+ case GST_VIDEO_FORMAT_Y41B:
+ info->stride[0] = GST_ROUND_UP_4 (width);
+ info->stride[1] = GST_ROUND_UP_16 (width) / 4;
+ info->stride[2] = info->stride[1];
+ info->offset[0] = 0;
+ info->offset[1] = info->stride[0] * height;
+ info->offset[2] = info->offset[1] + info->stride[1] * height;
+ /* simplification of ROUNDUP4(w)*h + 2*((ROUNDUP16(w)/4)*h */
+ info->size = (info->stride[0] + (GST_ROUND_UP_16 (width) / 2)) * height;
+ break;
+ case GST_VIDEO_FORMAT_Y42B:
+ info->stride[0] = GST_ROUND_UP_4 (width);
+ info->stride[1] = GST_ROUND_UP_8 (width) / 2;
+ info->stride[2] = info->stride[1];
+ info->offset[0] = 0;
+ info->offset[1] = info->stride[0] * height;
+ info->offset[2] = info->offset[1] + info->stride[1] * height;
+ /* simplification of ROUNDUP4(w)*h + 2*(ROUNDUP8(w)/2)*h */
+ info->size = (info->stride[0] + GST_ROUND_UP_8 (width)) * height;
+ break;
+ case GST_VIDEO_FORMAT_Y444:
+ case GST_VIDEO_FORMAT_GBR:
+ case GST_VIDEO_FORMAT_RGBP:
+ case GST_VIDEO_FORMAT_BGRP:
+ info->stride[0] = GST_ROUND_UP_4 (width);
+ info->stride[1] = info->stride[0];
+ info->stride[2] = info->stride[0];
+ info->offset[0] = 0;
+ info->offset[1] = info->stride[0] * height;
+ info->offset[2] = info->offset[1] * 2;
+ info->size = info->stride[0] * height * 3;
+ break;
+ case GST_VIDEO_FORMAT_GBRA:
+ info->stride[0] = GST_ROUND_UP_4 (width);
+ info->stride[1] = info->stride[0];
+ info->stride[2] = info->stride[0];
+ info->stride[3] = info->stride[0];
+ info->offset[0] = 0;
+ info->offset[1] = info->stride[0] * height;
+ info->offset[2] = info->offset[1] * 2;
+ info->offset[3] = info->offset[1] * 3;
+ info->size = info->stride[0] * height * 4;
+ break;
+ case GST_VIDEO_FORMAT_NV12:
++#ifdef TIZEN_FEATURE_VIDEO_MODIFICATION
++ case GST_VIDEO_FORMAT_SN12:
++ case GST_VIDEO_FORMAT_ST12:
++ case GST_VIDEO_FORMAT_SN21:
++#endif
+ case GST_VIDEO_FORMAT_NV21:
+ info->stride[0] = GST_ROUND_UP_4 (width);
+ info->stride[1] = info->stride[0];
+ info->offset[0] = 0;
+ info->offset[1] = info->stride[0] * GST_ROUND_UP_2 (height);
++#ifdef TIZEN_FEATURE_VIDEO_MODIFICATION
++ info->size = info->stride[0] * GST_ROUND_UP_2 (height) * 3 / 2;
++#else
+ cr_h = GST_ROUND_UP_2 (height) / 2;
+ if (GST_VIDEO_INFO_IS_INTERLACED (info))
+ cr_h = GST_ROUND_UP_2 (cr_h);
+ info->size = info->offset[1] + info->stride[0] * cr_h;
++#endif
+ break;
+ case GST_VIDEO_FORMAT_AV12:
+ info->stride[0] = GST_ROUND_UP_4 (width);
+ info->stride[1] = info->stride[0];
+ info->stride[2] = info->stride[0];
+ info->offset[0] = 0;
+ info->offset[1] = info->stride[0] * GST_ROUND_UP_2 (height);
+ info->offset[2] =
+ info->offset[1] + (info->stride[1] * GST_ROUND_UP_2 (height) / 2);
+ info->size = info->offset[2] + info->stride[2] * GST_ROUND_UP_2 (height);
+ break;
+ case GST_VIDEO_FORMAT_NV16:
+ case GST_VIDEO_FORMAT_NV61:
+ info->stride[0] = GST_ROUND_UP_4 (width);
+ info->stride[1] = info->stride[0];
+ info->offset[0] = 0;
+ info->offset[1] = info->stride[0] * height;
+ info->size = info->stride[0] * height * 2;
+ break;
+ case GST_VIDEO_FORMAT_NV24:
+ info->stride[0] = GST_ROUND_UP_4 (width);
+ info->stride[1] = GST_ROUND_UP_4 (width * 2);
+ info->offset[0] = 0;
+ info->offset[1] = info->stride[0] * height;
+ info->size = info->stride[0] * height + info->stride[1] * height;
+ break;
+ case GST_VIDEO_FORMAT_A420:
+ info->stride[0] = GST_ROUND_UP_4 (width);
+ info->stride[1] = GST_ROUND_UP_4 (GST_ROUND_UP_2 (width) / 2);
+ info->stride[2] = info->stride[1];
+ info->stride[3] = info->stride[0];
+ info->offset[0] = 0;
+ info->offset[1] = info->stride[0] * GST_ROUND_UP_2 (height);
+ cr_h = GST_ROUND_UP_2 (height) / 2;
+ if (GST_VIDEO_INFO_IS_INTERLACED (info))
+ cr_h = GST_ROUND_UP_2 (cr_h);
+ info->offset[2] = info->offset[1] + info->stride[1] * cr_h;
+ info->offset[3] = info->offset[2] + info->stride[2] * cr_h;
+ info->size = info->offset[3] + info->stride[0] * GST_ROUND_UP_2 (height);
+ break;
+ case GST_VIDEO_FORMAT_YUV9:
+ case GST_VIDEO_FORMAT_YVU9:
+ info->stride[0] = GST_ROUND_UP_4 (width);
+ info->stride[1] = GST_ROUND_UP_4 (GST_ROUND_UP_4 (width) / 4);
+ info->stride[2] = info->stride[1];
+ info->offset[0] = 0;
+ info->offset[1] = info->stride[0] * height;
+ cr_h = GST_ROUND_UP_4 (height) / 4;
+ if (GST_VIDEO_INFO_IS_INTERLACED (info))
+ cr_h = GST_ROUND_UP_2 (cr_h);
+ info->offset[2] = info->offset[1] + info->stride[1] * cr_h;
+ info->size = info->offset[2] + info->stride[2] * cr_h;
+ break;
+ case GST_VIDEO_FORMAT_I420_10LE:
+ case GST_VIDEO_FORMAT_I420_10BE:
+ case GST_VIDEO_FORMAT_I420_12LE:
+ case GST_VIDEO_FORMAT_I420_12BE:
+ info->stride[0] = GST_ROUND_UP_4 (width * 2);
+ info->stride[1] = GST_ROUND_UP_4 (width);
+ info->stride[2] = info->stride[1];
+ info->offset[0] = 0;
+ info->offset[1] = info->stride[0] * GST_ROUND_UP_2 (height);
+ cr_h = GST_ROUND_UP_2 (height) / 2;
+ if (GST_VIDEO_INFO_IS_INTERLACED (info))
+ cr_h = GST_ROUND_UP_2 (cr_h);
+ info->offset[2] = info->offset[1] + info->stride[1] * cr_h;
+ info->size = info->offset[2] + info->stride[2] * cr_h;
+ break;
+ case GST_VIDEO_FORMAT_I422_10LE:
+ case GST_VIDEO_FORMAT_I422_10BE:
+ case GST_VIDEO_FORMAT_I422_12LE:
+ case GST_VIDEO_FORMAT_I422_12BE:
+ info->stride[0] = GST_ROUND_UP_4 (width * 2);
+ info->stride[1] = GST_ROUND_UP_4 (width);
+ info->stride[2] = info->stride[1];
+ info->offset[0] = 0;
+ info->offset[1] = info->stride[0] * GST_ROUND_UP_2 (height);
+ info->offset[2] = info->offset[1] +
+ info->stride[1] * GST_ROUND_UP_2 (height);
+ info->size = info->offset[2] + info->stride[2] * GST_ROUND_UP_2 (height);
+ break;
+ case GST_VIDEO_FORMAT_Y444_10LE:
+ case GST_VIDEO_FORMAT_Y444_10BE:
+ case GST_VIDEO_FORMAT_Y444_12LE:
+ case GST_VIDEO_FORMAT_Y444_12BE:
+ case GST_VIDEO_FORMAT_GBR_10LE:
+ case GST_VIDEO_FORMAT_GBR_10BE:
+ case GST_VIDEO_FORMAT_GBR_12LE:
+ case GST_VIDEO_FORMAT_GBR_12BE:
+ case GST_VIDEO_FORMAT_Y444_16LE:
+ case GST_VIDEO_FORMAT_Y444_16BE:
+ info->stride[0] = GST_ROUND_UP_4 (width * 2);
+ info->stride[1] = info->stride[0];
+ info->stride[2] = info->stride[0];
+ info->offset[0] = 0;
+ info->offset[1] = info->stride[0] * height;
+ info->offset[2] = info->offset[1] * 2;
+ info->size = info->stride[0] * height * 3;
+ break;
+ case GST_VIDEO_FORMAT_GBRA_10LE:
+ case GST_VIDEO_FORMAT_GBRA_10BE:
+ case GST_VIDEO_FORMAT_GBRA_12LE:
+ case GST_VIDEO_FORMAT_GBRA_12BE:
+ info->stride[0] = GST_ROUND_UP_4 (width * 2);
+ info->stride[1] = info->stride[0];
+ info->stride[2] = info->stride[0];
+ info->stride[3] = info->stride[0];
+ info->offset[0] = 0;
+ info->offset[1] = info->stride[0] * height;
+ info->offset[2] = info->offset[1] * 2;
+ info->offset[3] = info->offset[1] * 3;
+ info->size = info->stride[0] * height * 4;
+ break;
+ case GST_VIDEO_FORMAT_NV12_64Z32:
+ info->stride[0] =
+ GST_VIDEO_TILE_MAKE_STRIDE (GST_ROUND_UP_128 (width) / 64,
+ GST_ROUND_UP_32 (height) / 32);
+ info->stride[1] =
+ GST_VIDEO_TILE_MAKE_STRIDE (GST_ROUND_UP_128 (width) / 64,
+ GST_ROUND_UP_64 (height) / 64);
+ info->offset[0] = 0;
+ info->offset[1] = GST_ROUND_UP_128 (width) * GST_ROUND_UP_32 (height);
+ info->size = info->offset[1] +
+ GST_ROUND_UP_128 (width) * (GST_ROUND_UP_64 (height) / 2);
+ break;
+ case GST_VIDEO_FORMAT_NV12_4L4:
+ case GST_VIDEO_FORMAT_NV12_32L32:
+ {
+ gint ws = GST_VIDEO_FORMAT_INFO_TILE_WS (info->finfo);
+ gint hs = GST_VIDEO_FORMAT_INFO_TILE_HS (info->finfo);
+ info->stride[0] =
+ GST_VIDEO_TILE_MAKE_STRIDE (GST_ROUND_UP_N (width, 1 << ws) >> ws,
+ GST_ROUND_UP_N (height, 1 << hs) >> hs);
+ info->stride[1] =
+ GST_VIDEO_TILE_MAKE_STRIDE (GST_ROUND_UP_N (width, 1 << ws) >> ws,
+ GST_ROUND_UP_N (height, 1 << (hs + 1)) >> (hs + 1));
+ info->offset[0] = 0;
+ info->offset[1] =
+ GST_ROUND_UP_N (width, 1 << ws) * GST_ROUND_UP_N (height, 1 << hs);
+ info->size = info->offset[1] +
+ GST_ROUND_UP_N (width, 1 << ws) *
+ (GST_ROUND_UP_N (height, 1 << (hs + 1)) / 2);
+ break;
+ }
+ case GST_VIDEO_FORMAT_A420_10LE:
+ case GST_VIDEO_FORMAT_A420_10BE:
+ info->stride[0] = GST_ROUND_UP_4 (width * 2);
+ info->stride[1] = GST_ROUND_UP_4 (width);
+ info->stride[2] = info->stride[1];
+ info->stride[3] = info->stride[0];
+ info->offset[0] = 0;
+ info->offset[1] = info->stride[0] * GST_ROUND_UP_2 (height);
+ cr_h = GST_ROUND_UP_2 (height) / 2;
+ if (GST_VIDEO_INFO_IS_INTERLACED (info))
+ cr_h = GST_ROUND_UP_2 (cr_h);
+ info->offset[2] = info->offset[1] + info->stride[1] * cr_h;
+ info->offset[3] = info->offset[2] + info->stride[2] * cr_h;
+ info->size = info->offset[3] + info->stride[0] * GST_ROUND_UP_2 (height);
+ break;
+ case GST_VIDEO_FORMAT_A422_10LE:
+ case GST_VIDEO_FORMAT_A422_10BE:
+ info->stride[0] = GST_ROUND_UP_4 (width * 2);
+ info->stride[1] = GST_ROUND_UP_4 (width);
+ info->stride[2] = info->stride[1];
+ info->stride[3] = info->stride[0];
+ info->offset[0] = 0;
+ info->offset[1] = info->stride[0] * GST_ROUND_UP_2 (height);
+ info->offset[2] = info->offset[1] +
+ info->stride[1] * GST_ROUND_UP_2 (height);
+ info->offset[3] =
+ info->offset[2] + info->stride[2] * GST_ROUND_UP_2 (height);
+ info->size = info->offset[3] + info->stride[0] * GST_ROUND_UP_2 (height);
+ break;
+ case GST_VIDEO_FORMAT_A444_10LE:
+ case GST_VIDEO_FORMAT_A444_10BE:
+ info->stride[0] = GST_ROUND_UP_4 (width * 2);
+ info->stride[1] = info->stride[0];
+ info->stride[2] = info->stride[0];
+ info->stride[3] = info->stride[0];
+ info->offset[0] = 0;
+ info->offset[1] = info->stride[0] * height;
+ info->offset[2] = info->offset[1] * 2;
+ info->offset[3] = info->offset[1] * 3;
+ info->size = info->stride[0] * height * 4;
+ break;
+ case GST_VIDEO_FORMAT_P010_10LE:
+ case GST_VIDEO_FORMAT_P010_10BE:
+ case GST_VIDEO_FORMAT_P016_LE:
+ case GST_VIDEO_FORMAT_P016_BE:
+ case GST_VIDEO_FORMAT_P012_LE:
+ case GST_VIDEO_FORMAT_P012_BE:
+ info->stride[0] = GST_ROUND_UP_4 (width * 2);
+ info->stride[1] = info->stride[0];
+ info->offset[0] = 0;
+ info->offset[1] = info->stride[0] * GST_ROUND_UP_2 (height);
+ cr_h = GST_ROUND_UP_2 (height) / 2;
+ info->size = info->offset[1] + info->stride[0] * cr_h;
+ break;
+ case GST_VIDEO_FORMAT_GRAY10_LE32:
+ info->stride[0] = (width + 2) / 3 * 4;
+ info->offset[0] = 0;
+ info->size = info->stride[0] * GST_ROUND_UP_2 (height);
+ break;
+ case GST_VIDEO_FORMAT_NV12_10LE32:
+ info->stride[0] = (width + 2) / 3 * 4;
+ info->stride[1] = info->stride[0];
+ info->offset[0] = 0;
+ info->offset[1] = info->stride[0] * GST_ROUND_UP_2 (height);
+ cr_h = GST_ROUND_UP_2 (height) / 2;
+ if (GST_VIDEO_INFO_IS_INTERLACED (info))
+ cr_h = GST_ROUND_UP_2 (cr_h);
+ info->size = info->offset[1] + info->stride[0] * cr_h;
+ break;
+ case GST_VIDEO_FORMAT_NV16_10LE32:
+ info->stride[0] = (width + 2) / 3 * 4;
+ info->stride[1] = info->stride[0];
+ info->offset[0] = 0;
+ info->offset[1] = info->stride[0] * height;
+ info->size = info->stride[0] * height * 2;
+ break;
+ case GST_VIDEO_FORMAT_NV12_10LE40:
+ info->stride[0] = ((width * 5 >> 2) + 4) / 5 * 5;
+ info->stride[1] = info->stride[0];
+ info->offset[0] = 0;
+ info->offset[1] = info->stride[0] * GST_ROUND_UP_2 (height);
+ cr_h = GST_ROUND_UP_2 (height) / 2;
+ if (GST_VIDEO_INFO_IS_INTERLACED (info))
+ cr_h = GST_ROUND_UP_2 (cr_h);
+ info->size = info->offset[1] + info->stride[0] * cr_h;
+ break;
+
+ case GST_VIDEO_FORMAT_ENCODED:
+ break;
+ case GST_VIDEO_FORMAT_UNKNOWN:
++#ifdef TIZEN_FEATURE_VIDEO_MODIFICATION
++ default:
++#endif
+ GST_ERROR ("invalid format");
+ g_warning ("invalid format");
+ return FALSE;
+ break;
+ }
+
+ if (plane_size) {
+ for (i = 0; i < GST_VIDEO_MAX_PLANES; i++) {
+ if (i < GST_VIDEO_INFO_N_PLANES (info)) {
+ gint comp[GST_VIDEO_MAX_COMPONENTS];
+ guint plane_height;
+
+ /* Convert plane index to component index */
+ gst_video_format_info_component (info->finfo, i, comp);
+ plane_height =
+ GST_VIDEO_FORMAT_INFO_SCALE_HEIGHT (info->finfo, comp[0],
+ GST_VIDEO_INFO_FIELD_HEIGHT (info));
+ plane_size[i] = plane_height * GST_VIDEO_INFO_PLANE_STRIDE (info, i);
+ } else {
+ plane_size[i] = 0;
+ }
+ }
+ }
+
+ return TRUE;
+ }
+
+ /**
+ * gst_video_info_convert:
+ * @info: a #GstVideoInfo
+ * @src_format: #GstFormat of the @src_value
+ * @src_value: value to convert
+ * @dest_format: #GstFormat of the @dest_value
+ * @dest_value: (out): pointer to destination value
+ *
+ * Converts among various #GstFormat types. This function handles
+ * GST_FORMAT_BYTES, GST_FORMAT_TIME, and GST_FORMAT_DEFAULT. For
+ * raw video, GST_FORMAT_DEFAULT corresponds to video frames. This
+ * function can be used to handle pad queries of the type GST_QUERY_CONVERT.
+ *
+ * Returns: TRUE if the conversion was successful.
+ */
+ gboolean
+ gst_video_info_convert (const GstVideoInfo * info,
+ GstFormat src_format, gint64 src_value,
+ GstFormat dest_format, gint64 * dest_value)
+ {
+ gboolean ret = FALSE;
+ int fps_n, fps_d;
+ gsize size;
+
+ g_return_val_if_fail (info != NULL, 0);
+ g_return_val_if_fail (info->finfo != NULL, 0);
+ g_return_val_if_fail (info->finfo->format != GST_VIDEO_FORMAT_UNKNOWN, 0);
+ g_return_val_if_fail (info->size > 0, 0);
+
+ size = info->size;
+ fps_n = info->fps_n;
+ fps_d = info->fps_d;
+
+ GST_DEBUG ("converting value %" G_GINT64_FORMAT " from %s to %s",
+ src_value, gst_format_get_name (src_format),
+ gst_format_get_name (dest_format));
+
+ if (src_format == dest_format) {
+ *dest_value = src_value;
+ ret = TRUE;
+ goto done;
+ }
+
+ if (src_value == -1) {
+ *dest_value = -1;
+ ret = TRUE;
+ goto done;
+ }
+
+ /* bytes to frames */
+ if (src_format == GST_FORMAT_BYTES && dest_format == GST_FORMAT_DEFAULT) {
+ if (size != 0) {
+ *dest_value = gst_util_uint64_scale (src_value, 1, size);
+ } else {
+ GST_ERROR ("blocksize is 0");
+ *dest_value = 0;
+ }
+ ret = TRUE;
+ goto done;
+ }
+
+ /* frames to bytes */
+ if (src_format == GST_FORMAT_DEFAULT && dest_format == GST_FORMAT_BYTES) {
+ *dest_value = gst_util_uint64_scale (src_value, size, 1);
+ ret = TRUE;
+ goto done;
+ }
+
+ /* time to frames */
+ if (src_format == GST_FORMAT_TIME && dest_format == GST_FORMAT_DEFAULT) {
+ if (fps_d != 0) {
+ *dest_value = gst_util_uint64_scale (src_value,
+ fps_n, GST_SECOND * fps_d);
+ } else {
+ GST_ERROR ("framerate denominator is 0");
+ *dest_value = 0;
+ }
+ ret = TRUE;
+ goto done;
+ }
+
+ /* frames to time */
+ if (src_format == GST_FORMAT_DEFAULT && dest_format == GST_FORMAT_TIME) {
+ if (fps_n != 0) {
+ *dest_value = gst_util_uint64_scale (src_value,
+ GST_SECOND * fps_d, fps_n);
+ } else {
+ GST_ERROR ("framerate numerator is 0");
+ *dest_value = 0;
+ }
+ ret = TRUE;
+ goto done;
+ }
+
+ /* time to bytes */
+ if (src_format == GST_FORMAT_TIME && dest_format == GST_FORMAT_BYTES) {
+ if (fps_d != 0) {
+ *dest_value = gst_util_uint64_scale (src_value,
+ fps_n * size, GST_SECOND * fps_d);
+ } else {
+ GST_ERROR ("framerate denominator is 0");
+ *dest_value = 0;
+ }
+ ret = TRUE;
+ goto done;
+ }
+
+ /* bytes to time */
+ if (src_format == GST_FORMAT_BYTES && dest_format == GST_FORMAT_TIME) {
+ if (fps_n != 0 && size != 0) {
+ *dest_value = gst_util_uint64_scale (src_value,
+ GST_SECOND * fps_d, fps_n * size);
+ } else {
+ GST_ERROR ("framerate denominator and/or blocksize is 0");
+ *dest_value = 0;
+ }
+ ret = TRUE;
+ }
+
+ done:
+
+ GST_DEBUG ("ret=%d result %" G_GINT64_FORMAT, ret, *dest_value);
+
+ return ret;
+ }
+
+ /**
+ * gst_video_info_align_full:
+ * @info: a #GstVideoInfo
+ * @align: alignment parameters
+ * @plane_size: (out) (allow-none): array used to store the plane sizes
+ *
+ * Extra padding will be added to the right side when stride alignment padding
+ * is required and @align will be updated with the new padding values.
+ *
+ * This variant of gst_video_info_align() provides the updated size, in bytes,
+ * of each video plane after the alignment, including all horizontal and vertical
+ * paddings.
+ *
+ * In case of GST_VIDEO_INTERLACE_MODE_ALTERNATE info, the returned sizes are the
+ * ones used to hold a single field, not the full frame.
+ *
+ * Returns: %FALSE if alignment could not be applied, e.g. because the
+ * size of a frame can't be represented as a 32 bit integer
+ *
+ * Since: 1.18
+ */
+ gboolean
+ gst_video_info_align_full (GstVideoInfo * info, GstVideoAlignment * align,
+ gsize plane_size[GST_VIDEO_MAX_PLANES])
+ {
+ const GstVideoFormatInfo *vinfo = info->finfo;
+ gint width, height;
+ gint padded_width, padded_height;
+ gint i, n_planes;
+ gboolean aligned;
+
+ width = GST_VIDEO_INFO_WIDTH (info);
+ height = GST_VIDEO_INFO_HEIGHT (info);
+
+ GST_LOG ("padding %u-%ux%u-%u", align->padding_top,
+ align->padding_left, align->padding_right, align->padding_bottom);
+
+ n_planes = GST_VIDEO_INFO_N_PLANES (info);
+
+ if (GST_VIDEO_FORMAT_INFO_HAS_PALETTE (vinfo))
+ n_planes--;
+
+ /* first make sure the left padding does not cause alignment problems later */
+ do {
+ GST_LOG ("left padding %u", align->padding_left);
+ aligned = TRUE;
+ for (i = 0; i < n_planes; i++) {
+ gint comp[GST_VIDEO_MAX_COMPONENTS];
+ gint hedge;
+
+ /* this is the amount of pixels to add as left padding */
+ gst_video_format_info_component (vinfo, i, comp);
+ hedge = GST_VIDEO_FORMAT_INFO_SCALE_WIDTH (vinfo, comp[0],
+ align->padding_left);
+ hedge *= GST_VIDEO_FORMAT_INFO_PSTRIDE (vinfo, comp[0]);
+
+ GST_LOG ("plane %d, padding %d, alignment %u", i, hedge,
+ align->stride_align[i]);
+ aligned &= (hedge & align->stride_align[i]) == 0;
+ }
+ if (aligned)
+ break;
+
+ GST_LOG ("unaligned padding, increasing padding");
+ /* increase padded_width */
+ align->padding_left += align->padding_left & ~(align->padding_left - 1);
+ } while (!aligned);
+
+ /* add the padding */
+ padded_width = width + align->padding_left + align->padding_right;
+ padded_height = height + align->padding_top + align->padding_bottom;
+
+ do {
+ GST_LOG ("padded dimension %u-%u", padded_width, padded_height);
+
+ info->width = padded_width;
+ info->height = padded_height;
+
+ if (!fill_planes (info, plane_size))
+ return FALSE;
+
+ /* check alignment */
+ aligned = TRUE;
+ for (i = 0; i < n_planes; i++) {
+ GST_LOG ("plane %d, stride %d, alignment %u", i, info->stride[i],
+ align->stride_align[i]);
+ aligned &= (info->stride[i] & align->stride_align[i]) == 0;
+ }
+ if (aligned)
+ break;
+
+ GST_LOG ("unaligned strides, increasing dimension");
+ /* increase padded_width */
+ padded_width += padded_width & ~(padded_width - 1);
+ } while (!aligned);
+
+ align->padding_right = padded_width - width - align->padding_left;
+
+ info->width = width;
+ info->height = height;
+
+ for (i = 0; i < n_planes; i++) {
+ gint comp[GST_VIDEO_MAX_COMPONENTS];
+ gint vedge, hedge;
+
+ gst_video_format_info_component (info->finfo, i, comp);
+ hedge =
+ GST_VIDEO_FORMAT_INFO_SCALE_WIDTH (vinfo, comp[0], align->padding_left);
+ vedge =
+ GST_VIDEO_FORMAT_INFO_SCALE_HEIGHT (vinfo, comp[0], align->padding_top);
+
+ GST_DEBUG ("plane %d: comp: %d, hedge %d vedge %d align %d stride %d", i,
+ comp[0], hedge, vedge, align->stride_align[i], info->stride[i]);
+
+ info->offset[i] += (vedge * info->stride[i]) +
+ (hedge * GST_VIDEO_FORMAT_INFO_PSTRIDE (vinfo, comp[0]));
+ }
+
+ return TRUE;
+ }
+
+ /**
+ * gst_video_info_align:
+ * @info: a #GstVideoInfo
+ * @align: alignment parameters
+ *
+ * Adjust the offset and stride fields in @info so that the padding and
+ * stride alignment in @align is respected.
+ *
+ * Extra padding will be added to the right side when stride alignment padding
+ * is required and @align will be updated with the new padding values.
+ *
+ * Returns: %FALSE if alignment could not be applied, e.g. because the
+ * size of a frame can't be represented as a 32 bit integer (Since: 1.12)
+ */
+ gboolean
+ gst_video_info_align (GstVideoInfo * info, GstVideoAlignment * align)
+ {
+ return gst_video_info_align_full (info, align, NULL);
+ }
--- /dev/null
+ /* GStreamer Video Overlay interface
+ * Copyright (C) 2003 Ronald Bultje <rbultje@ronald.bitfreak.net>
+ * Copyright (C) 2011 Tim-Philipp Müller <tim@centricular.net>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+ /**
+ * SECTION:gstvideooverlay
+ * @title: GstVideoOverlay
+ * @short_description: Interface for setting/getting a window system resource
+ * on elements supporting it to configure a window into which to render a
+ * video.
+ *
+ * The #GstVideoOverlay interface is used for 2 main purposes :
+ *
+ * * To get a grab on the Window where the video sink element is going to render.
+ * This is achieved by either being informed about the Window identifier that
+ * the video sink element generated, or by forcing the video sink element to use
+ * a specific Window identifier for rendering.
+ * * To force a redrawing of the latest video frame the video sink element
+ * displayed on the Window. Indeed if the #GstPipeline is in #GST_STATE_PAUSED
+ * state, moving the Window around will damage its content. Application
+ * developers will want to handle the Expose events themselves and force the
+ * video sink element to refresh the Window's content.
+ *
+ * Using the Window created by the video sink is probably the simplest scenario,
+ * in some cases, though, it might not be flexible enough for application
+ * developers if they need to catch events such as mouse moves and button
+ * clicks.
+ *
+ * Setting a specific Window identifier on the video sink element is the most
+ * flexible solution but it has some issues. Indeed the application needs to set
+ * its Window identifier at the right time to avoid internal Window creation
+ * from the video sink element. To solve this issue a #GstMessage is posted on
+ * the bus to inform the application that it should set the Window identifier
+ * immediately. Here is an example on how to do that correctly:
+ * |[
+ * static GstBusSyncReply
+ * create_window (GstBus * bus, GstMessage * message, GstPipeline * pipeline)
+ * {
+ * // ignore anything but 'prepare-window-handle' element messages
+ * if (!gst_is_video_overlay_prepare_window_handle_message (message))
+ * return GST_BUS_PASS;
+ *
+ * win = XCreateSimpleWindow (disp, root, 0, 0, 320, 240, 0, 0, 0);
+ *
+ * XSetWindowBackgroundPixmap (disp, win, None);
+ *
+ * XMapRaised (disp, win);
+ *
+ * XSync (disp, FALSE);
+ *
+ * gst_video_overlay_set_window_handle (GST_VIDEO_OVERLAY (GST_MESSAGE_SRC (message)),
+ * win);
+ *
+ * gst_message_unref (message);
+ *
+ * return GST_BUS_DROP;
+ * }
+ * ...
+ * int
+ * main (int argc, char **argv)
+ * {
+ * ...
+ * bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
+ * gst_bus_set_sync_handler (bus, (GstBusSyncHandler) create_window, pipeline,
+ NULL);
+ * ...
+ * }
+ * ]|
+ *
+ * ## Two basic usage scenarios
+ *
+ * There are two basic usage scenarios: in the simplest case, the application
+ * uses #playbin or #playsink or knows exactly what particular element is used
+ * for video output, which is usually the case when the application creates
+ * the videosink to use (e.g. #xvimagesink, #ximagesink, etc.) itself; in this
+ * case, the application can just create the videosink element, create and
+ * realize the window to render the video on and then
+ * call gst_video_overlay_set_window_handle() directly with the XID or native
+ * window handle, before starting up the pipeline.
+ * As #playbin and #playsink implement the video overlay interface and proxy
+ * it transparently to the actual video sink even if it is created later, this
+ * case also applies when using these elements.
+ *
+ * In the other and more common case, the application does not know in advance
+ * what GStreamer video sink element will be used for video output. This is
+ * usually the case when an element such as #autovideosink is used.
+ * In this case, the video sink element itself is created
+ * asynchronously from a GStreamer streaming thread some time after the
+ * pipeline has been started up. When that happens, however, the video sink
+ * will need to know right then whether to render onto an already existing
+ * application window or whether to create its own window. This is when it
+ * posts a prepare-window-handle message, and that is also why this message needs
+ * to be handled in a sync bus handler which will be called from the streaming
+ * thread directly (because the video sink will need an answer right then).
+ *
+ * As response to the prepare-window-handle element message in the bus sync
+ * handler, the application may use gst_video_overlay_set_window_handle() to tell
+ * the video sink to render onto an existing window surface. At this point the
+ * application should already have obtained the window handle / XID, so it
+ * just needs to set it. It is generally not advisable to call any GUI toolkit
+ * functions or window system functions from the streaming thread in which the
+ * prepare-window-handle message is handled, because most GUI toolkits and
+ * windowing systems are not thread-safe at all and a lot of care would be
+ * required to co-ordinate the toolkit and window system calls of the
+ * different threads (Gtk+ users please note: prior to Gtk+ 2.18
+ * `GDK_WINDOW_XID` was just a simple structure access, so generally fine to do
+ * within the bus sync handler; this macro was changed to a function call in
+ * Gtk+ 2.18 and later, which is likely to cause problems when called from a
+ * sync handler; see below for a better approach without `GDK_WINDOW_XID`
+ * used in the callback).
+ *
+ * ## GstVideoOverlay and Gtk+
+ *
+ * |[
+ * #include <gst/video/videooverlay.h>
+ * #include <gtk/gtk.h>
+ * #ifdef GDK_WINDOWING_X11
+ * #include <gdk/gdkx.h> // for GDK_WINDOW_XID
+ * #endif
+ * #ifdef GDK_WINDOWING_WIN32
+ * #include <gdk/gdkwin32.h> // for GDK_WINDOW_HWND
+ * #endif
+ * ...
+ * static guintptr video_window_handle = 0;
+ * ...
+ * static GstBusSyncReply
+ * bus_sync_handler (GstBus * bus, GstMessage * message, gpointer user_data)
+ * {
+ * // ignore anything but 'prepare-window-handle' element messages
+ * if (!gst_is_video_overlay_prepare_window_handle_message (message))
+ * return GST_BUS_PASS;
+ *
+ * if (video_window_handle != 0) {
+ * GstVideoOverlay *overlay;
+ *
+ * // GST_MESSAGE_SRC (message) will be the video sink element
+ * overlay = GST_VIDEO_OVERLAY (GST_MESSAGE_SRC (message));
+ * gst_video_overlay_set_window_handle (overlay, video_window_handle);
+ * } else {
+ * g_warning ("Should have obtained video_window_handle by now!");
+ * }
+ *
+ * gst_message_unref (message);
+ * return GST_BUS_DROP;
+ * }
+ * ...
+ * static void
+ * video_widget_realize_cb (GtkWidget * widget, gpointer data)
+ * {
+ * #if GTK_CHECK_VERSION(2,18,0)
+ * // Tell Gtk+/Gdk to create a native window for this widget instead of
+ * // drawing onto the parent widget.
+ * // This is here just for pedagogical purposes, GDK_WINDOW_XID will call
+ * // it as well in newer Gtk versions
+ * if (!gdk_window_ensure_native (widget->window))
+ * g_error ("Couldn't create native window needed for GstVideoOverlay!");
+ * #endif
+ *
+ * #ifdef GDK_WINDOWING_X11
+ * {
+ * gulong xid = GDK_WINDOW_XID (gtk_widget_get_window (video_window));
+ * video_window_handle = xid;
+ * }
+ * #endif
+ * #ifdef GDK_WINDOWING_WIN32
+ * {
+ * HWND wnd = GDK_WINDOW_HWND (gtk_widget_get_window (video_window));
+ * video_window_handle = (guintptr) wnd;
+ * }
+ * #endif
+ * }
+ * ...
+ * int
+ * main (int argc, char **argv)
+ * {
+ * GtkWidget *video_window;
+ * GtkWidget *app_window;
+ * ...
+ * app_window = gtk_window_new (GTK_WINDOW_TOPLEVEL);
+ * ...
+ * video_window = gtk_drawing_area_new ();
+ * g_signal_connect (video_window, "realize",
+ * G_CALLBACK (video_widget_realize_cb), NULL);
+ * gtk_widget_set_double_buffered (video_window, FALSE);
+ * ...
+ * // usually the video_window will not be directly embedded into the
+ * // application window like this, but there will be many other widgets
+ * // and the video window will be embedded in one of them instead
+ * gtk_container_add (GTK_CONTAINER (ap_window), video_window);
+ * ...
+ * // show the GUI
+ * gtk_widget_show_all (app_window);
+ *
+ * // realize window now so that the video window gets created and we can
+ * // obtain its XID/HWND before the pipeline is started up and the videosink
+ * // asks for the XID/HWND of the window to render onto
+ * gtk_widget_realize (video_window);
+ *
+ * // we should have the XID/HWND now
+ * g_assert (video_window_handle != 0);
+ * ...
+ * // set up sync handler for setting the xid once the pipeline is started
+ * bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
+ * gst_bus_set_sync_handler (bus, (GstBusSyncHandler) bus_sync_handler, NULL,
+ * NULL);
+ * gst_object_unref (bus);
+ * ...
+ * gst_element_set_state (pipeline, GST_STATE_PLAYING);
+ * ...
+ * }
+ * ]|
+ *
+ * ## GstVideoOverlay and Qt
+ *
+ * |[
+ * #include <glib.h>;
+ * #include <gst/gst.h>;
+ * #include <gst/video/videooverlay.h>;
+ *
+ * #include <QApplication>;
+ * #include <QTimer>;
+ * #include <QWidget>;
+ *
+ * int main(int argc, char *argv[])
+ * {
+ * if (!g_thread_supported ())
+ * g_thread_init (NULL);
+ *
+ * gst_init (&argc, &argv);
+ * QApplication app(argc, argv);
+ * app.connect(&app, SIGNAL(lastWindowClosed()), &app, SLOT(quit ()));
+ *
+ * // prepare the pipeline
+ *
+ * GstElement *pipeline = gst_pipeline_new ("xvoverlay");
+ * GstElement *src = gst_element_factory_make ("videotestsrc", NULL);
+ * GstElement *sink = gst_element_factory_make ("xvimagesink", NULL);
+ * gst_bin_add_many (GST_BIN (pipeline), src, sink, NULL);
+ * gst_element_link (src, sink);
+ *
+ * // prepare the ui
+ *
+ * QWidget window;
+ * window.resize(320, 240);
+ * window.show();
+ *
+ * WId xwinid = window.winId();
+ * gst_video_overlay_set_window_handle (GST_VIDEO_OVERLAY (sink), xwinid);
+ *
+ * // run the pipeline
+ *
+ * GstStateChangeReturn sret = gst_element_set_state (pipeline,
+ * GST_STATE_PLAYING);
+ * if (sret == GST_STATE_CHANGE_FAILURE) {
+ * gst_element_set_state (pipeline, GST_STATE_NULL);
+ * gst_object_unref (pipeline);
+ * // Exit application
+ * QTimer::singleShot(0, QApplication::activeWindow(), SLOT(quit()));
+ * }
+ *
+ * int ret = app.exec();
+ *
+ * window.hide();
+ * gst_element_set_state (pipeline, GST_STATE_NULL);
+ * gst_object_unref (pipeline);
+ *
+ * return ret;
+ * }
+ * ]|
+ *
+ */
+
+ #ifdef HAVE_CONFIG_H
+ #include "config.h"
+ #endif
+
+ #include "videooverlay.h"
+
+ enum
+ {
+ PROP_RENDER_RECTANGLE,
+ };
+
+ GST_DEBUG_CATEGORY_STATIC (gst_video_overlay_debug);
+ #define GST_CAT_DEFAULT gst_video_overlay_debug
+
+ GType
+ gst_video_overlay_get_type (void)
+ {
+ static GType gst_video_overlay_type = 0;
+
+ if (!gst_video_overlay_type) {
+ static const GTypeInfo gst_video_overlay_info = {
+ sizeof (GstVideoOverlayInterface),
+ NULL,
+ NULL,
+ NULL,
+ NULL,
+ NULL,
+ 0,
+ 0,
+ NULL,
+ };
+
+ gst_video_overlay_type = g_type_register_static (G_TYPE_INTERFACE,
+ "GstVideoOverlay", &gst_video_overlay_info, 0);
+
+ GST_DEBUG_CATEGORY_INIT (gst_video_overlay_debug, "videooverlay", 0,
+ "videooverlay interface");
+ }
+
+ return gst_video_overlay_type;
+ }
+
++#ifdef TIZEN_FEATURE_WAYLAND_ENHANCEMENT
++/**
++* gst_video_overlay_set_wl_window_wl_surface_id:
++* @overlay: a #GstVideoOverlay to set the window on.
++* @wl_surface_id: a global resource id of wl_surface referencing the wayland window.
++
++* This will call the video overlay's set_wl_window_wl_surface_id method. You
++* should use this medtod to tell to an overlay to display video output to a
++* specific window(e.g. an Wayland Window on Wayland).
++* But you can also set handle to wayland videosink with gst_video_overlay_set_wl_window_wl_surface_id().
++*/
++void
++gst_video_overlay_set_wl_window_wl_surface_id (GstVideoOverlay * overlay,
++ gint wl_surface_id)
++{
++ GstVideoOverlayInterface *iface;
++
++ g_return_if_fail (overlay != NULL);
++ g_return_if_fail (GST_IS_VIDEO_OVERLAY (overlay));
++
++ iface = GST_VIDEO_OVERLAY_GET_INTERFACE (overlay);
++
++ if (iface->set_wl_window_wl_surface_id) {
++ iface->set_wl_window_wl_surface_id (overlay, wl_surface_id);
++ }
++}
++
++/**
++* gst_video_overlay_set_wl_window_exported_shell_handle:
++* @overlay: a #GstVideoOverlay to set the window on.
++* @exported_shell_handle: a shell handle exported by wayland window for synchronization between UI and video
++
++* This will call the video overlay's set_wl_window_exported_shell_handle method. You
++* should use this medtod to tell to an overlay to display video output to a
++* specific window(e.g. an Wayland Window on Wayland).
++* But you can also set handle to wayland videosink with gst_video_overlay_set_wl_window_exported_shell_handle().
++*/
++void
++gst_video_overlay_set_wl_window_exported_shell_handle (GstVideoOverlay * overlay,
++ const char *exported_shell_handle)
++{
++ GstVideoOverlayInterface *iface;
++
++ g_return_if_fail (overlay != NULL);
++ g_return_if_fail (GST_IS_VIDEO_OVERLAY (overlay));
++
++ iface = GST_VIDEO_OVERLAY_GET_INTERFACE (overlay);
++
++ if (iface->set_wl_window_exported_shell_handle) {
++ iface->set_wl_window_exported_shell_handle (overlay, exported_shell_handle);
++ }
++}
++
++
++
++/**
++ * gst_video_overlay_set_display_roi_area:
++ * @overlay: a #GstVideoOverlay
++ * @x: the horizontal offset of the render area inside the window
++ * @y: the vertical offset of the render area inside the window
++ * @width: the width of the render area inside the window
++ * @height: the height of the render area inside the window
++ *
++ * Set the ROI(Region of Interest) area of wayland window.
++ * Returns: %FALSE if not supported by the sink.
++ */
++gboolean
++gst_video_overlay_set_display_roi_area (GstVideoOverlay * overlay,
++ gint x, gint y, gint width, gint height)
++{
++ GstVideoOverlayInterface *iface;
++
++ g_return_val_if_fail (overlay != NULL, FALSE);
++ g_return_val_if_fail (GST_IS_VIDEO_OVERLAY (overlay), FALSE);
++ g_return_val_if_fail (width > 0 && height > 0, FALSE);
++
++ iface = GST_VIDEO_OVERLAY_GET_INTERFACE (overlay);
++
++ if (iface->set_display_roi_area) {
++ iface->set_display_roi_area (overlay, x, y, width, height);
++ return TRUE;
++ }
++ return FALSE;
++}
++
++/**
++ * gst_video_overlay_set_video_roi_area:
++ * @overlay: a #GstVideoOverlay
++ * @x_scale: x coordinate ratio value of video source area
++ * based on video width size, valid range is from 0.0 to 1.0.
++ * @y_scale: y coordinate ratio value of video source area
++ * based on video width size, valid range is from 0.0 to 1.0.
++ * @w_scale: width ratio value of the video source area
++ * based on video width size, valid range is from greater than 0.0 to 1.0.
++ * @h_scale: height ratio value of the video source area
++ * based on video width size, valid range is from greater than 0.0 to 1.0.
++ *
++ * Sets the ROI(Region Of Interest) area of video source.
++ * Returns: %FALSE if not supported by the sink.
++**/
++gboolean
++gst_video_overlay_set_video_roi_area (GstVideoOverlay * overlay,
++ gdouble x_scale, gdouble y_scale, gdouble w_scale, gdouble h_scale)
++{
++ GstVideoOverlayInterface *iface;
++
++ g_return_val_if_fail (overlay != NULL, FALSE);
++ g_return_val_if_fail (GST_IS_VIDEO_OVERLAY (overlay), FALSE);
++ g_return_val_if_fail (x_scale >= 0.0 && x_scale <= 1.0, FALSE);
++ g_return_val_if_fail (y_scale >= 0.0 && y_scale <= 1.0, FALSE);
++ g_return_val_if_fail (w_scale > 0.0 && w_scale <= 1.0, FALSE);
++ g_return_val_if_fail (h_scale > 0.0 && h_scale <= 1.0, FALSE);
++
++ iface = GST_VIDEO_OVERLAY_GET_INTERFACE (overlay);
++
++ if (iface->set_video_roi_area) {
++ iface->set_video_roi_area (overlay, x_scale, y_scale, w_scale, h_scale);
++ return TRUE;
++ }
++ return FALSE;
++}
++#endif
+ /**
+ * gst_video_overlay_set_window_handle:
+ * @overlay: a #GstVideoOverlay to set the window on.
+ * @handle: a handle referencing the window.
+ *
+ * This will call the video overlay's set_window_handle method. You
+ * should use this method to tell to an overlay to display video output to a
+ * specific window (e.g. an XWindow on X11). Passing 0 as the @handle will
+ * tell the overlay to stop using that window and create an internal one.
+ */
+ void
+ gst_video_overlay_set_window_handle (GstVideoOverlay * overlay, guintptr handle)
+ {
+ GstVideoOverlayInterface *iface;
+
+ g_return_if_fail (overlay != NULL);
+ g_return_if_fail (GST_IS_VIDEO_OVERLAY (overlay));
+
+ iface = GST_VIDEO_OVERLAY_GET_INTERFACE (overlay);
+
+ if (iface->set_window_handle) {
+ iface->set_window_handle (overlay, handle);
+ }
+ }
+
+ /**
+ * gst_video_overlay_got_window_handle:
+ * @overlay: a #GstVideoOverlay which got a window
+ * @handle: a platform-specific handle referencing the window
+ *
+ * This will post a "have-window-handle" element message on the bus.
+ *
+ * This function should only be used by video overlay plugin developers.
+ */
+ void
+ gst_video_overlay_got_window_handle (GstVideoOverlay * overlay, guintptr handle)
+ {
+ GstStructure *s;
+ GstMessage *msg;
+
+ g_return_if_fail (overlay != NULL);
+ g_return_if_fail (GST_IS_VIDEO_OVERLAY (overlay));
+
+ GST_LOG_OBJECT (GST_OBJECT (overlay), "window_handle = %p", (gpointer)
+ handle);
+ s = gst_structure_new ("have-window-handle",
+ "window-handle", G_TYPE_UINT64, (guint64) handle, NULL);
+ msg = gst_message_new_element (GST_OBJECT (overlay), s);
+ gst_element_post_message (GST_ELEMENT (overlay), msg);
+ }
+
+ /**
+ * gst_video_overlay_prepare_window_handle:
+ * @overlay: a #GstVideoOverlay which does not yet have an Window handle set
+ *
+ * This will post a "prepare-window-handle" element message on the bus
+ * to give applications an opportunity to call
+ * gst_video_overlay_set_window_handle() before a plugin creates its own
+ * window.
+ *
+ * This function should only be used by video overlay plugin developers.
+ */
+ void
+ gst_video_overlay_prepare_window_handle (GstVideoOverlay * overlay)
+ {
+ GstStructure *s;
+ GstMessage *msg;
+
+ g_return_if_fail (overlay != NULL);
+ g_return_if_fail (GST_IS_VIDEO_OVERLAY (overlay));
+
+ GST_LOG_OBJECT (GST_OBJECT (overlay), "prepare window handle");
+ s = gst_structure_new_empty ("prepare-window-handle");
+ msg = gst_message_new_element (GST_OBJECT (overlay), s);
+ gst_element_post_message (GST_ELEMENT (overlay), msg);
+ }
+
+ /**
+ * gst_video_overlay_expose:
+ * @overlay: a #GstVideoOverlay to expose.
+ *
+ * Tell an overlay that it has been exposed. This will redraw the current frame
+ * in the drawable even if the pipeline is PAUSED.
+ */
+ void
+ gst_video_overlay_expose (GstVideoOverlay * overlay)
+ {
+ GstVideoOverlayInterface *iface;
+
+ g_return_if_fail (overlay != NULL);
+ g_return_if_fail (GST_IS_VIDEO_OVERLAY (overlay));
+
+ iface = GST_VIDEO_OVERLAY_GET_INTERFACE (overlay);
+
+ if (iface->expose) {
+ iface->expose (overlay);
+ }
+ }
+
+ /**
+ * gst_video_overlay_handle_events:
+ * @overlay: a #GstVideoOverlay to expose.
+ * @handle_events: a #gboolean indicating if events should be handled or not.
+ *
+ * Tell an overlay that it should handle events from the window system. These
+ * events are forwarded upstream as navigation events. In some window system,
+ * events are not propagated in the window hierarchy if a client is listening
+ * for them. This method allows you to disable events handling completely
+ * from the #GstVideoOverlay.
+ */
+ void
+ gst_video_overlay_handle_events (GstVideoOverlay * overlay,
+ gboolean handle_events)
+ {
+ GstVideoOverlayInterface *iface;
+
+ g_return_if_fail (overlay != NULL);
+ g_return_if_fail (GST_IS_VIDEO_OVERLAY (overlay));
+
+ iface = GST_VIDEO_OVERLAY_GET_INTERFACE (overlay);
+
+ if (iface->handle_events) {
+ iface->handle_events (overlay, handle_events);
+ }
+ }
+
+ /**
+ * gst_video_overlay_set_render_rectangle:
+ * @overlay: a #GstVideoOverlay
+ * @x: the horizontal offset of the render area inside the window
+ * @y: the vertical offset of the render area inside the window
+ * @width: the width of the render area inside the window
+ * @height: the height of the render area inside the window
+ *
+ * Configure a subregion as a video target within the window set by
+ * gst_video_overlay_set_window_handle(). If this is not used or not supported
+ * the video will fill the area of the window set as the overlay to 100%.
+ * By specifying the rectangle, the video can be overlayed to a specific region
+ * of that window only. After setting the new rectangle one should call
+ * gst_video_overlay_expose() to force a redraw. To unset the region pass -1 for
+ * the @width and @height parameters.
+ *
+ * This method is needed for non fullscreen video overlay in UI toolkits that
+ * do not support subwindows.
+ *
+ * Returns: %FALSE if not supported by the sink.
+ */
+ gboolean
+ gst_video_overlay_set_render_rectangle (GstVideoOverlay * overlay,
+ gint x, gint y, gint width, gint height)
+ {
+ GstVideoOverlayInterface *iface;
+
+ g_return_val_if_fail (overlay != NULL, FALSE);
+ g_return_val_if_fail (GST_IS_VIDEO_OVERLAY (overlay), FALSE);
+ g_return_val_if_fail ((width == -1 && height == -1) ||
+ (width > 0 && height > 0), FALSE);
+
+ iface = GST_VIDEO_OVERLAY_GET_INTERFACE (overlay);
+
+ if (iface->set_render_rectangle) {
+ iface->set_render_rectangle (overlay, x, y, width, height);
+ return TRUE;
+ }
+ return FALSE;
+ }
+
+ /**
+ * gst_is_video_overlay_prepare_window_handle_message:
+ * @msg: a #GstMessage
+ *
+ * Convenience function to check if the given message is a
+ * "prepare-window-handle" message from a #GstVideoOverlay.
+ *
+ * Returns: whether @msg is a "prepare-window-handle" message
+ */
+ gboolean
+ gst_is_video_overlay_prepare_window_handle_message (GstMessage * msg)
+ {
+ g_return_val_if_fail (msg != NULL, FALSE);
+
+ if (GST_MESSAGE_TYPE (msg) != GST_MESSAGE_ELEMENT)
+ return FALSE;
+
+ return gst_message_has_name (msg, "prepare-window-handle");
+ }
+
+
+ /**
+ * gst_video_overlay_install_properties:
+ * @oclass: The class on which the properties will be installed
+ * @last_prop_id: The first free property ID to use
+ *
+ * This helper shall be used by classes implementing the #GstVideoOverlay
+ * interface that want the render rectangle to be controllable using
+ * properties. This helper will install "render-rectangle" property into the
+ * class.
+ *
+ * Since: 1.14
+ */
+ void
+ gst_video_overlay_install_properties (GObjectClass * oclass, gint last_prop_id)
+ {
+ g_object_class_install_property (oclass, last_prop_id + PROP_RENDER_RECTANGLE,
+ gst_param_spec_array ("render-rectangle", "Render Rectangle",
+ "The render rectangle ('<x, y, width, height>')",
+ g_param_spec_int ("rect-value", "Rectangle Value",
+ "One of x, y, width or height value.", G_MININT, G_MAXINT, -1,
+ G_PARAM_WRITABLE | G_PARAM_STATIC_STRINGS),
+ G_PARAM_WRITABLE | G_PARAM_STATIC_STRINGS));
+ }
+
+ /**
+ * gst_video_overlay_set_property:
+ * @object: The instance on which the property is set
+ * @last_prop_id: The highest property ID.
+ * @property_id: The property ID
+ * @value: The #GValue to be set
+ *
+ * This helper shall be used by classes implementing the #GstVideoOverlay
+ * interface that want the render rectangle to be controllable using
+ * properties. This helper will parse and set the render rectangle calling
+ * gst_video_overlay_set_render_rectangle().
+ *
+ * Returns: %TRUE if the @property_id matches the GstVideoOverlay property
+ *
+ * Since: 1.14
+ */
+ gboolean
+ gst_video_overlay_set_property (GObject * object, gint last_prop_id,
+ guint property_id, const GValue * value)
+ {
+ gboolean ret = FALSE;
+
+ if (property_id == last_prop_id) {
+ const GValue *v;
+ gint rect[4], i;
+
+ ret = TRUE;
+
+ if (gst_value_array_get_size (value) != 4)
+ goto wrong_format;
+
+ for (i = 0; i < 4; i++) {
+ v = gst_value_array_get_value (value, i);
+ if (!G_VALUE_HOLDS_INT (v))
+ goto wrong_format;
+
+ rect[i] = g_value_get_int (v);
+ }
+
+ gst_video_overlay_set_render_rectangle (GST_VIDEO_OVERLAY (object),
+ rect[0], rect[1], rect[2], rect[3]);
+ }
+
+ return ret;
+
+ wrong_format:
+ {
+ GValue string = G_VALUE_INIT;
+
+ g_value_init (&string, G_TYPE_STRING);
+ g_value_transform (value, &string);
+
+ g_critical ("Badly formatted rectangle, must contains four gint (got '%s')",
+ g_value_get_string (&string));
+
+ g_value_unset (&string);
+ return TRUE;
+ }
+ }
--- /dev/null
+ /* GStreamer Video Overlay Interface
+ * Copyright (C) 2003 Ronald Bultje <rbultje@ronald.bitfreak.net>
+ * Copyright (C) 2003 Julien Moutte <julien@moutte.net>
+ * Copyright (C) 2011 Tim-Philipp Müller <tim@centricular.net>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+ #ifndef __GST_VIDEO_OVERLAY_H__
+ #define __GST_VIDEO_OVERLAY_H__
+
+ #include <gst/gst.h>
+ #include <gst/video/gstvideosink.h>
+
+ G_BEGIN_DECLS
+
+ #define GST_TYPE_VIDEO_OVERLAY \
+ (gst_video_overlay_get_type ())
+ #define GST_VIDEO_OVERLAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST ((obj), GST_TYPE_VIDEO_OVERLAY, GstVideoOverlay))
+ #define GST_IS_VIDEO_OVERLAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_VIDEO_OVERLAY))
+ #define GST_VIDEO_OVERLAY_GET_INTERFACE(inst) \
+ (G_TYPE_INSTANCE_GET_INTERFACE ((inst), GST_TYPE_VIDEO_OVERLAY, GstVideoOverlayInterface))
+
+ /**
+ * GstVideoOverlay:
+ *
+ * Opaque #GstVideoOverlay interface structure
+ */
+ typedef struct _GstVideoOverlay GstVideoOverlay;
+ typedef struct _GstVideoOverlayInterface GstVideoOverlayInterface;
+
+ /**
+ * GstVideoOverlayInterface:
+ * @iface: parent interface type.
+ * @expose: virtual method to handle expose events
+ * @handle_events: virtual method to handle events
+ * @set_render_rectangle: virtual method to set the render rectangle
+ * @set_window_handle: virtual method to configure the window handle
+ *
+ * #GstVideoOverlay interface
+ */
+ struct _GstVideoOverlayInterface {
+ GTypeInterface iface;
+
+ /* virtual functions */
+ void (*expose) (GstVideoOverlay *overlay);
+
+ void (*handle_events) (GstVideoOverlay *overlay, gboolean handle_events);
+
+ void (*set_render_rectangle) (GstVideoOverlay *overlay,
+ gint x, gint y,
+ gint width, gint height);
+
+ void (*set_window_handle) (GstVideoOverlay *overlay, guintptr handle);
++#ifdef TIZEN_FEATURE_WAYLAND_ENHANCEMENT
++ void (*set_wl_window_wl_surface_id) (GstVideoOverlay * overlay, gint wl_surface_id);
++ void (*set_wl_window_exported_shell_handle) (GstVideoOverlay * overlay, const char *exported_shell_handle);
++ void (*set_display_roi_area) (GstVideoOverlay *overlay,
++ gint x, gint y,
++ gint width, gint height);
++
++ void (*set_video_roi_area) (GstVideoOverlay *overlay,
++ gdouble x_scale, gdouble y_scale,
++ gdouble w_scale, gdouble h_scale);
++#endif
+ };
+
+ GST_VIDEO_API
+ GType gst_video_overlay_get_type (void);
+
+ /* virtual function wrappers */
+
+ GST_VIDEO_API
+ gboolean gst_video_overlay_set_render_rectangle (GstVideoOverlay * overlay,
+ gint x,
+ gint y,
+ gint width,
+ gint height);
+
+ GST_VIDEO_API
+ void gst_video_overlay_expose (GstVideoOverlay * overlay);
+
+ GST_VIDEO_API
+ void gst_video_overlay_handle_events (GstVideoOverlay * overlay,
+ gboolean handle_events);
+
+ GST_VIDEO_API
+ void gst_video_overlay_set_window_handle (GstVideoOverlay * overlay,
+ guintptr handle);
+
+ /* public methods to dispatch bus messages */
+
+ GST_VIDEO_API
+ void gst_video_overlay_got_window_handle (GstVideoOverlay * overlay,
+ guintptr handle);
+
+ GST_VIDEO_API
+ void gst_video_overlay_prepare_window_handle (GstVideoOverlay * overlay);
+
+ GST_VIDEO_API
+ gboolean gst_is_video_overlay_prepare_window_handle_message (GstMessage * msg);
+
+ GST_VIDEO_API
+ void gst_video_overlay_install_properties (GObjectClass * oclass,
+ gint last_prop_id);
+
+ GST_VIDEO_API
+ gboolean gst_video_overlay_set_property (GObject * object,
+ gint last_prop_id,
+ guint property_id,
+ const GValue * value);
++#ifdef TIZEN_FEATURE_WAYLAND_ENHANCEMENT
++GST_VIDEO_API
++void gst_video_overlay_set_wl_window_wl_surface_id (GstVideoOverlay * overlay,
++ gint wl_surface_id);
+
++GST_VIDEO_API
++void gst_video_overlay_set_wl_window_exported_shell_handle (GstVideoOverlay * overlay,
++ const char *exported_shell_handle);
++
++GST_VIDEO_API
++gboolean gst_video_overlay_set_display_roi_area (GstVideoOverlay * overlay,
++ gint x, gint y,
++ gint width, gint height);
++
++GST_VIDEO_API
++gboolean gst_video_overlay_set_video_roi_area (GstVideoOverlay * overlay,
++ gdouble x_scale, gdouble y_scale,
++ gdouble w_scale, gdouble h_scale);
++#endif
+ G_END_DECLS
+
+ #endif /* __GST_VIDEO_OVERLAY_H__ */
--- /dev/null
+ /* GStreamer
+ * Copyright (C) <2006> Edward Hervey <edward@fluendo.com>
+ * Copyright (C) <2009> Sebastian Dröge <sebastian.droege@collabora.co.uk>
+ * Copyright (C) <2011> Hewlett-Packard Development Company, L.P.
+ * Author: Sebastian Dröge <sebastian.droege@collabora.co.uk>, Collabora Ltd.
+ * Copyright (C) <2013> Collabora Ltd.
+ * Author: Sebastian Dröge <sebastian.droege@collabora.co.uk>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+ /**
+ * SECTION:element-decodebin
+ * @title: decodebin
+ *
+ * #GstBin that auto-magically constructs a decoding pipeline using available
+ * decoders and demuxers via auto-plugging.
+ *
+ * decodebin is considered stable now and replaces the old #decodebin element.
+ * #uridecodebin uses decodebin internally and is often more convenient to
+ * use, as it creates a suitable source element as well.
+ */
+
+ /* Implementation notes:
+ *
+ * The following section describes how decodebin works internally.
+ *
+ * The first part of decodebin is its typefind element, which tries
+ * to determine the media type of the input stream. If the type is found
+ * autoplugging starts.
+ *
+ * decodebin internally organizes the elements it autoplugged into GstDecodeChains
+ * and GstDecodeGroups. A decode chain is a single chain of decoding, this
+ * means that if decodebin every autoplugs an element with two+ srcpads
+ * (e.g. a demuxer) this will end the chain and everything following this
+ * demuxer will be put into decode groups below the chain. Otherwise,
+ * if an element has a single srcpad that outputs raw data the decode chain
+ * is ended too and a GstDecodePad is stored and blocked.
+ *
+ * A decode group combines a number of chains that are created by a
+ * demuxer element. All those chains are connected through a multiqueue to
+ * the demuxer. A new group for the same demuxer is only created if the
+ * demuxer has signaled no-more-pads, in which case all following pads
+ * create a new chain in the new group.
+ *
+ * This continues until the top-level decode chain is complete. A decode
+ * chain is complete if it either ends with a blocked endpad, if autoplugging
+ * stopped because no suitable plugins could be found or if the active group
+ * is complete. A decode group on the other hand is complete if all child
+ * chains are complete.
+ *
+ * If this happens at some point, all endpads of all active groups are exposed.
+ * For this decodebin adds the endpads, signals no-more-pads and then unblocks
+ * them. Now playback starts.
+ *
+ * If one of the chains that end on a endpad receives EOS decodebin checks
+ * if all chains and groups are drained. In that case everything goes into EOS.
+ * If there is a chain where the active group is drained but there exist next
+ * groups, the active group is hidden (endpads are removed) and the next group
+ * is exposed. This means that in some cases more pads may be created even
+ * after the initial no-more-pads signal. This happens for example with
+ * so-called "chained oggs", most commonly found among ogg/vorbis internet
+ * radio streams.
+ *
+ * Note 1: If we're talking about blocked endpads this really means that the
+ * *target* pads of the endpads are blocked. Pads that are exposed to the outside
+ * should never ever be blocked!
+ *
+ * Note 2: If a group is complete and the parent's chain demuxer adds new pads
+ * but never signaled no-more-pads this additional pads will be ignored!
+ *
+ */
+
+ /* FIXME 0.11: suppress warnings for deprecated API such as GValueArray
+ * with newer GLib versions (>= 2.31.0) */
+ #define GLIB_DISABLE_DEPRECATION_WARNINGS
+
+ #ifdef HAVE_CONFIG_H
+ #include "config.h"
+ #endif
+
+ #include <gst/gst-i18n-plugin.h>
+
+ #include <string.h>
+ #include <gst/gst.h>
+ #include <gst/pbutils/pbutils.h>
+
+ #include "gstplay-enum.h"
+ #include "gstplaybackelements.h"
+ #include "gstrawcaps.h"
+ #include "gstplaybackutils.h"
+
+ /* generic templates */
+ static GstStaticPadTemplate decoder_bin_sink_template =
+ GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS_ANY);
+
+ static GstStaticPadTemplate decoder_bin_src_template =
+ GST_STATIC_PAD_TEMPLATE ("src_%u",
+ GST_PAD_SRC,
+ GST_PAD_SOMETIMES,
+ GST_STATIC_CAPS_ANY);
+
+ GST_DEBUG_CATEGORY_STATIC (gst_decode_bin_debug);
+ #define GST_CAT_DEFAULT gst_decode_bin_debug
+
+ typedef struct _GstPendingPad GstPendingPad;
+ typedef struct _GstDecodeElement GstDecodeElement;
+ typedef struct _GstDemuxerPad GstDemuxerPad;
+ typedef struct _GstDecodeChain GstDecodeChain;
+ typedef struct _GstDecodeGroup GstDecodeGroup;
+ typedef struct _GstDecodePad GstDecodePad;
+ typedef GstGhostPadClass GstDecodePadClass;
+ typedef struct _GstDecodeBin GstDecodeBin;
+ typedef struct _GstDecodeBinClass GstDecodeBinClass;
+
+ #define GST_TYPE_DECODE_BIN (gst_decode_bin_get_type())
+ #define GST_DECODE_BIN_CAST(obj) ((GstDecodeBin*)(obj))
+ #define GST_DECODE_BIN(obj) (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_DECODE_BIN,GstDecodeBin))
+ #define GST_DECODE_BIN_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_DECODE_BIN,GstDecodeBinClass))
+ #define GST_IS_DECODE_BIN(obj) (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_DECODE_BIN))
+ #define GST_IS_DECODE_BIN_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_DECODE_BIN))
+
+ /**
+ * GstDecodeBin:
+ *
+ * The opaque #GstDecodeBin data structure
+ */
+ struct _GstDecodeBin
+ {
+ GstBin bin; /* we extend GstBin */
+
+ /* properties */
+ GstCaps *caps; /* caps on which to stop decoding */
+ gchar *encoding; /* encoding of subtitles */
+ gboolean use_buffering; /* configure buffering on multiqueues */
+ gboolean force_sw_decoders;
+ gint low_percent;
+ gint high_percent;
+ guint max_size_bytes;
+ guint max_size_buffers;
+ guint64 max_size_time;
+ gboolean post_stream_topology;
+ guint64 connection_speed;
+
+ GstElement *typefind; /* this holds the typefind object */
+
+ GMutex expose_lock; /* Protects exposal and removal of groups */
+ GstDecodeChain *decode_chain; /* Top level decode chain */
+ guint nbpads; /* unique identifier for source pads */
+
+ GMutex factories_lock;
+ guint32 factories_cookie; /* Cookie from last time when factories was updated */
+ GList *factories; /* factories we can use for selecting elements */
+
+ GMutex subtitle_lock; /* Protects changes to subtitles and encoding */
+ GList *subtitles; /* List of elements with subtitle-encoding,
+ * protected by above mutex! */
+
+ gboolean have_type; /* if we received the have_type signal */
+ guint have_type_id; /* signal id for have-type from typefind */
+
+ gboolean async_pending; /* async-start has been emitted */
+
+ GMutex dyn_lock; /* lock protecting pad blocking */
+ gboolean shutdown; /* if we are shutting down */
+ GList *blocked_pads; /* pads that have set to block */
+
+ gboolean expose_allstreams; /* Whether to expose unknown type streams or not */
+
++#ifdef TIZEN_FEATURE_TRUSTZONE
++ /*tzmultiqueue patch : use trustzone flag*/
++ gboolean use_trustzone;
++#endif
+ GList *filtered; /* elements for which error messages are filtered */
+ GList *filtered_errors; /* filtered error messages */
+
+ GList *buffering_status; /* element currently buffering messages */
+ GMutex buffering_lock;
+ GMutex buffering_post_lock;
+
+ GMutex cleanup_lock; /* Mutex used to protect the cleanup thread */
+ GThread *cleanup_thread; /* thread used to free chains asynchronously.
+ * We store it to make sure we end up joining it
+ * before stopping the element.
+ * Protected by the object lock */
+ GList *cleanup_groups; /* List of groups to free */
+ };
+
+ struct _GstDecodeBinClass
+ {
+ GstBinClass parent_class;
+
+ /* signal fired when we found a pad that we cannot decode */
+ void (*unknown_type) (GstElement * element, GstPad * pad, GstCaps * caps);
+
+ /* signal fired to know if we continue trying to decode the given caps */
+ gboolean (*autoplug_continue) (GstElement * element, GstPad * pad,
+ GstCaps * caps);
+ /* signal fired to get a list of factories to try to autoplug */
+ GValueArray *(*autoplug_factories) (GstElement * element, GstPad * pad,
+ GstCaps * caps);
+ /* signal fired to sort the factories */
+ GValueArray *(*autoplug_sort) (GstElement * element, GstPad * pad,
+ GstCaps * caps, GValueArray * factories);
+ /* signal fired to select from the proposed list of factories */
+ GstAutoplugSelectResult (*autoplug_select) (GstElement * element,
+ GstPad * pad, GstCaps * caps, GstElementFactory * factory);
+ /* signal fired when a autoplugged element that is not linked downstream
+ * or exposed wants to query something */
+ gboolean (*autoplug_query) (GstElement * element, GstPad * pad,
+ GstQuery * query);
+
+ /* fired when the last group is drained */
+ void (*drained) (GstElement * element);
+ };
+
+ /* signals */
+ enum
+ {
+ SIGNAL_UNKNOWN_TYPE,
+ SIGNAL_AUTOPLUG_CONTINUE,
+ SIGNAL_AUTOPLUG_FACTORIES,
+ SIGNAL_AUTOPLUG_SELECT,
+ SIGNAL_AUTOPLUG_SORT,
+ SIGNAL_AUTOPLUG_QUERY,
+ SIGNAL_DRAINED,
+ LAST_SIGNAL
+ };
+
+ /* automatic sizes, while prerolling we buffer up to 2MB, we ignore time
+ * and buffers in this case. */
+ #define AUTO_PREROLL_SIZE_BYTES 2 * 1024 * 1024
+ #define AUTO_PREROLL_SIZE_BUFFERS 0
+ #define AUTO_PREROLL_NOT_SEEKABLE_SIZE_TIME 10 * GST_SECOND
+ #define AUTO_PREROLL_SEEKABLE_SIZE_TIME 0
+
+ /* when playing, keep a max of 2MB of data but try to keep the number of buffers
+ * as low as possible (try to aim for 5 buffers) */
+ #define AUTO_PLAY_SIZE_BYTES 2 * 1024 * 1024
+ #define AUTO_PLAY_SIZE_BUFFERS 5
+ #define AUTO_PLAY_SIZE_TIME 0
+
+ #define DEFAULT_SUBTITLE_ENCODING NULL
+ #define DEFAULT_USE_BUFFERING FALSE
+ #define DEFAULT_FORCE_SW_DECODERS FALSE
+ #define DEFAULT_LOW_PERCENT 10
+ #define DEFAULT_HIGH_PERCENT 99
+ /* by default we use the automatic values above */
+ #define DEFAULT_MAX_SIZE_BYTES 0
+ #define DEFAULT_MAX_SIZE_BUFFERS 0
+ #define DEFAULT_MAX_SIZE_TIME 0
+ #define DEFAULT_POST_STREAM_TOPOLOGY FALSE
+ #define DEFAULT_EXPOSE_ALL_STREAMS TRUE
+ #define DEFAULT_CONNECTION_SPEED 0
+
+ /* Properties */
+ enum
+ {
+ PROP_0,
+ PROP_CAPS,
+ PROP_SUBTITLE_ENCODING,
+ PROP_SINK_CAPS,
+ PROP_USE_BUFFERING,
+ PROP_FORCE_SW_DECODERS,
+ PROP_LOW_PERCENT,
+ PROP_HIGH_PERCENT,
+ PROP_MAX_SIZE_BYTES,
+ PROP_MAX_SIZE_BUFFERS,
+ PROP_MAX_SIZE_TIME,
+ PROP_POST_STREAM_TOPOLOGY,
+ PROP_EXPOSE_ALL_STREAMS,
+ PROP_CONNECTION_SPEED
++#ifdef TIZEN_FEATURE_TRUSTZONE
++ /*tzmultiqueue patch : use trustzone flag*/
++ ,PROP_USE_TRUSTZONE
++#endif
+ };
+
+ static GstBinClass *parent_class;
+ static guint gst_decode_bin_signals[LAST_SIGNAL] = { 0 };
+
+ static GstStaticCaps default_raw_caps = GST_STATIC_CAPS (DEFAULT_RAW_CAPS);
+
+ static void do_async_start (GstDecodeBin * dbin);
+ static void do_async_done (GstDecodeBin * dbin);
+
+ static void type_found (GstElement * typefind, guint probability,
+ GstCaps * caps, GstDecodeBin * decode_bin);
+
+ static void decodebin_set_queue_size (GstDecodeBin * dbin,
+ GstElement * multiqueue, gboolean preroll, gboolean seekable);
+ static void decodebin_set_queue_size_full (GstDecodeBin * dbin,
+ GstElement * multiqueue, gboolean use_buffering, gboolean preroll,
+ gboolean seekable);
+
+ static gboolean gst_decode_bin_autoplug_continue (GstElement * element,
+ GstPad * pad, GstCaps * caps);
+ static GValueArray *gst_decode_bin_autoplug_factories (GstElement *
+ element, GstPad * pad, GstCaps * caps);
+ static GValueArray *gst_decode_bin_autoplug_sort (GstElement * element,
+ GstPad * pad, GstCaps * caps, GValueArray * factories);
+ static GstAutoplugSelectResult gst_decode_bin_autoplug_select (GstElement *
+ element, GstPad * pad, GstCaps * caps, GstElementFactory * factory);
+ static gboolean gst_decode_bin_autoplug_query (GstElement * element,
+ GstPad * pad, GstQuery * query);
+
+ static void gst_decode_bin_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec);
+ static void gst_decode_bin_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec);
+ static void gst_decode_bin_set_caps (GstDecodeBin * dbin, GstCaps * caps);
+ static GstCaps *gst_decode_bin_get_caps (GstDecodeBin * dbin);
+ static void caps_notify_cb (GstPad * pad, GParamSpec * unused,
+ GstDecodeChain * chain);
+
+ static void flush_chain (GstDecodeChain * chain, gboolean flushing);
+ static void flush_group (GstDecodeGroup * group, gboolean flushing);
+ static GstPad *find_sink_pad (GstElement * element);
+ static GstStateChangeReturn gst_decode_bin_change_state (GstElement * element,
+ GstStateChange transition);
+ static void gst_decode_bin_handle_message (GstBin * bin, GstMessage * message);
+ static gboolean gst_decode_bin_remove_element (GstBin * bin,
+ GstElement * element);
+
+ static gboolean check_upstream_seekable (GstDecodeBin * dbin, GstPad * pad);
+
+ static GstCaps *get_pad_caps (GstPad * pad);
+ static void unblock_pads (GstDecodeBin * dbin);
+
+ #define EXPOSE_LOCK(dbin) G_STMT_START { \
+ GST_LOG_OBJECT (dbin, \
+ "expose locking from thread %p", \
+ g_thread_self ()); \
+ g_mutex_lock (&GST_DECODE_BIN_CAST(dbin)->expose_lock); \
+ GST_LOG_OBJECT (dbin, \
+ "expose locked from thread %p", \
+ g_thread_self ()); \
+ } G_STMT_END
+
+ #define EXPOSE_UNLOCK(dbin) G_STMT_START { \
+ GST_LOG_OBJECT (dbin, \
+ "expose unlocking from thread %p", \
+ g_thread_self ()); \
+ g_mutex_unlock (&GST_DECODE_BIN_CAST(dbin)->expose_lock); \
+ } G_STMT_END
+
+ #define DYN_LOCK(dbin) G_STMT_START { \
+ GST_LOG_OBJECT (dbin, \
+ "dynlocking from thread %p", \
+ g_thread_self ()); \
+ g_mutex_lock (&GST_DECODE_BIN_CAST(dbin)->dyn_lock); \
+ GST_LOG_OBJECT (dbin, \
+ "dynlocked from thread %p", \
+ g_thread_self ()); \
+ } G_STMT_END
+
+ #define DYN_UNLOCK(dbin) G_STMT_START { \
+ GST_LOG_OBJECT (dbin, \
+ "dynunlocking from thread %p", \
+ g_thread_self ()); \
+ g_mutex_unlock (&GST_DECODE_BIN_CAST(dbin)->dyn_lock); \
+ } G_STMT_END
+
+ #define SUBTITLE_LOCK(dbin) G_STMT_START { \
+ GST_LOG_OBJECT (dbin, \
+ "subtitle locking from thread %p", \
+ g_thread_self ()); \
+ g_mutex_lock (&GST_DECODE_BIN_CAST(dbin)->subtitle_lock); \
+ GST_LOG_OBJECT (dbin, \
+ "subtitle lock from thread %p", \
+ g_thread_self ()); \
+ } G_STMT_END
+
+ #define SUBTITLE_UNLOCK(dbin) G_STMT_START { \
+ GST_LOG_OBJECT (dbin, \
+ "subtitle unlocking from thread %p", \
+ g_thread_self ()); \
+ g_mutex_unlock (&GST_DECODE_BIN_CAST(dbin)->subtitle_lock); \
+ } G_STMT_END
+
+ #define BUFFERING_LOCK(dbin) G_STMT_START { \
+ GST_LOG_OBJECT (dbin, \
+ "buffering locking from thread %p", \
+ g_thread_self ()); \
+ g_mutex_lock (&GST_DECODE_BIN_CAST(dbin)->buffering_lock); \
+ GST_LOG_OBJECT (dbin, \
+ "buffering lock from thread %p", \
+ g_thread_self ()); \
+ } G_STMT_END
+
+ #define BUFFERING_UNLOCK(dbin) G_STMT_START { \
+ GST_LOG_OBJECT (dbin, \
+ "buffering unlocking from thread %p", \
+ g_thread_self ()); \
+ g_mutex_unlock (&GST_DECODE_BIN_CAST(dbin)->buffering_lock); \
+ } G_STMT_END
+
+ struct _GstPendingPad
+ {
+ GstPad *pad;
+ GstDecodeChain *chain;
+ gulong event_probe_id;
+ gulong notify_caps_id;
+ };
+
+ struct _GstDecodeElement
+ {
+ GstElement *element;
+ GstElement *capsfilter; /* Optional capsfilter for Parser/Convert */
+ gulong pad_added_id;
+ gulong pad_removed_id;
+ gulong no_more_pads_id;
+ };
+
+ struct _GstDemuxerPad
+ {
+ GWeakRef weakPad;
+ gulong event_probe_id;
+ gulong query_probe_id;
+ };
+
+
+ /* GstDecodeGroup
+ *
+ * Streams belonging to the same group/chain of a media file
+ *
+ * When changing something here lock the parent chain!
+ */
+ struct _GstDecodeGroup
+ {
+ GstDecodeBin *dbin;
+ GstDecodeChain *parent;
+
+ GstElement *multiqueue; /* Used for linking all child chains */
+ gulong overrunsig; /* the overrun signal for multiqueue */
+
+ gboolean overrun; /* TRUE if the multiqueue signaled overrun. This
+ * means that we should really expose the group */
+
+ gboolean no_more_pads; /* TRUE if the demuxer signaled no-more-pads */
+ gboolean drained; /* TRUE if the all children are drained */
+
+ GList *children; /* List of GstDecodeChains in this group */
+ GList *demuxer_pad_probe_ids;
+
+ GList *reqpads; /* List of RequestPads for multiqueue, there is
+ * exactly one RequestPad per child chain */
+ };
+
+ struct _GstDecodeChain
+ {
+ GstDecodeGroup *parent;
+ GstDecodeBin *dbin;
+
+ gint refs; /* Number of references to this object */
+
+ GMutex lock; /* Protects this chain and its groups */
+
+ GstPad *pad; /* srcpad that caused creation of this chain */
+ gulong pad_probe_id; /* id for the demuxer_source_pad_probe probe */
+
+ gboolean drained; /* TRUE if the all children are drained */
+ gboolean demuxer; /* TRUE if elements->data is a demuxer */
+ gboolean adaptive_demuxer; /* TRUE if elements->data is an adaptive streaming demuxer */
+ gboolean seekable; /* TRUE if this chain ends on a demuxer and is seekable */
+ GList *elements; /* All elements in this group, first
+ is the latest and most downstream element */
+
+ /* Note: there are only groups if the last element of this chain
+ * is a demuxer, otherwise the chain will end with an endpad.
+ * The other way around this means, that endpad only exists if this
+ * chain doesn't end with a demuxer! */
+
+ GstDecodeGroup *active_group; /* Currently active group */
+ GList *next_groups; /* head is newest group, tail is next group.
+ a new group will be created only if the head
+ group had no-more-pads. If it's only exposed
+ all new pads will be ignored! */
+ GList *pending_pads; /* Pads that have no fixed caps yet */
+
+ GstDecodePad *current_pad; /* Current ending pad of the chain that can't
+ * be exposed yet but would be the same as endpad
+ * once it can be exposed */
+ GstDecodePad *endpad; /* Pad of this chain that could be exposed */
+ gboolean deadend; /* This chain is incomplete and can't be completed,
+ e.g. no suitable decoder could be found
+ e.g. stream got EOS without buffers
+ */
+ gchar *deadend_details;
+ GstCaps *endcaps; /* Caps that were used when linking to the endpad
+ or that resulted in the deadend
+ */
+
+ /* FIXME: This should be done directly via a thread! */
+ GList *old_groups; /* Groups that should be freed later */
+ };
+
+ static GstDecodeChain *gst_decode_chain_ref (GstDecodeChain * chain);
+ static void gst_decode_chain_unref (GstDecodeChain * chain);
+ static void gst_decode_chain_free (GstDecodeChain * chain);
+ static GstDecodeChain *gst_decode_chain_new (GstDecodeBin * dbin,
+ GstDecodeGroup * group, GstPad * pad);
+ static void gst_decode_group_hide (GstDecodeGroup * group);
+ static void gst_decode_group_free (GstDecodeGroup * group);
+ static GstDecodeGroup *gst_decode_group_new (GstDecodeBin * dbin,
+ GstDecodeChain * chain);
+ static gboolean gst_decode_chain_is_complete (GstDecodeChain * chain);
+ static gboolean gst_decode_chain_expose (GstDecodeChain * chain,
+ GList ** endpads, gboolean * missing_plugin,
+ GString * missing_plugin_details, gboolean * last_group);
+ static gboolean gst_decode_chain_is_drained (GstDecodeChain * chain);
+ static gboolean gst_decode_chain_reset_buffering (GstDecodeChain * chain);
+ static gboolean gst_decode_group_is_complete (GstDecodeGroup * group);
+ static GstPad *gst_decode_group_control_demuxer_pad (GstDecodeGroup * group,
+ GstPad * pad);
+ static gboolean gst_decode_group_is_drained (GstDecodeGroup * group);
+ static gboolean gst_decode_group_reset_buffering (GstDecodeGroup * group);
+
+ static gboolean gst_decode_bin_expose (GstDecodeBin * dbin);
+ static void gst_decode_bin_reset_buffering (GstDecodeBin * dbin);
+
+ #define CHAIN_MUTEX_LOCK(chain) G_STMT_START { \
+ GST_LOG_OBJECT (chain->dbin, \
+ "locking chain %p from thread %p", \
+ chain, g_thread_self ()); \
+ g_mutex_lock (&chain->lock); \
+ GST_LOG_OBJECT (chain->dbin, \
+ "locked chain %p from thread %p", \
+ chain, g_thread_self ()); \
+ } G_STMT_END
+
+ #define CHAIN_MUTEX_UNLOCK(chain) G_STMT_START { \
+ GST_LOG_OBJECT (chain->dbin, \
+ "unlocking chain %p from thread %p", \
+ chain, g_thread_self ()); \
+ g_mutex_unlock (&chain->lock); \
+ } G_STMT_END
+
+ /* GstDecodePad
+ *
+ * GstPad private used for source pads of chains
+ */
+ struct _GstDecodePad
+ {
+ GstGhostPad parent;
+ GstDecodeBin *dbin;
+ GstDecodeChain *chain;
+
+ gboolean blocked; /* the *target* pad is blocked */
+ gboolean exposed; /* the pad is exposed */
+ gboolean drained; /* an EOS has been seen on the pad */
+
+ gulong block_id;
+ };
+
+ GType gst_decode_pad_get_type (void);
+ G_DEFINE_TYPE (GstDecodePad, gst_decode_pad, GST_TYPE_GHOST_PAD);
+ #define GST_TYPE_DECODE_PAD (gst_decode_pad_get_type ())
+ #define GST_DECODE_PAD(obj) (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_DECODE_PAD,GstDecodePad))
+
+ static GstDecodePad *gst_decode_pad_new (GstDecodeBin * dbin,
+ GstDecodeChain * chain);
+ static void gst_decode_pad_activate (GstDecodePad * dpad,
+ GstDecodeChain * chain);
+ static void gst_decode_pad_unblock (GstDecodePad * dpad);
+ static void gst_decode_pad_set_blocked (GstDecodePad * dpad, gboolean blocked);
+ static gboolean gst_decode_pad_query (GstPad * pad, GstObject * parent,
+ GstQuery * query);
+ static gboolean gst_decode_pad_is_exposable (GstDecodePad * endpad);
+
+ static void gst_pending_pad_free (GstPendingPad * ppad);
+ static GstPadProbeReturn pad_event_cb (GstPad * pad, GstPadProbeInfo * info,
+ gpointer data);
+
+ /********************************
+ * Standard GObject boilerplate *
+ ********************************/
+
+ static void gst_decode_bin_dispose (GObject * object);
+ static void gst_decode_bin_finalize (GObject * object);
+
+ /* Register some quarks here for the stream topology message */
+ static GQuark topology_structure_name = 0;
+ static GQuark topology_caps = 0;
+ static GQuark topology_next = 0;
+ static GQuark topology_pad = 0;
+ static GQuark topology_element_srcpad = 0;
+
+ GType gst_decode_bin_get_type (void);
+ G_DEFINE_TYPE (GstDecodeBin, gst_decode_bin, GST_TYPE_BIN);
+ #define _do_init \
+ GST_DEBUG_CATEGORY_INIT (gst_decode_bin_debug, "decodebin", 0, "decoder bin");\
+ topology_structure_name = g_quark_from_static_string ("stream-topology"); \
+ topology_caps = g_quark_from_static_string ("caps");\
+ topology_next = g_quark_from_static_string ("next");\
+ topology_pad = g_quark_from_static_string ("pad");\
+ topology_element_srcpad = g_quark_from_static_string ("element-srcpad");\
+ playback_element_init (plugin);\
+
+ GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (decodebin, "decodebin", GST_RANK_NONE,
+ GST_TYPE_DECODE_BIN, _do_init);
+
+ static gboolean
+ _gst_boolean_accumulator (GSignalInvocationHint * ihint,
+ GValue * return_accu, const GValue * handler_return, gpointer dummy)
+ {
+ gboolean myboolean;
+
+ myboolean = g_value_get_boolean (handler_return);
+ g_value_set_boolean (return_accu, myboolean);
+
+ /* stop emission if FALSE */
+ return myboolean;
+ }
+
+ static gboolean
+ _gst_boolean_or_accumulator (GSignalInvocationHint * ihint,
+ GValue * return_accu, const GValue * handler_return, gpointer dummy)
+ {
+ gboolean myboolean;
+ gboolean retboolean;
+
+ myboolean = g_value_get_boolean (handler_return);
+ retboolean = g_value_get_boolean (return_accu);
+
+ g_value_set_boolean (return_accu, myboolean || retboolean);
+
+ return TRUE;
+ }
+
+ /* we collect the first result */
+ static gboolean
+ _gst_array_accumulator (GSignalInvocationHint * ihint,
+ GValue * return_accu, const GValue * handler_return, gpointer dummy)
+ {
+ gpointer array;
+
+ array = g_value_get_boxed (handler_return);
+ g_value_set_boxed (return_accu, array);
+
+ return FALSE;
+ }
+
+ static gboolean
+ _gst_select_accumulator (GSignalInvocationHint * ihint,
+ GValue * return_accu, const GValue * handler_return, gpointer dummy)
+ {
+ GstAutoplugSelectResult res;
+
+ res = g_value_get_enum (handler_return);
+ g_value_set_enum (return_accu, res);
+
+ /* Call the next handler in the chain (if any) when the current callback
+ * returns TRY. This makes it possible to register separate autoplug-select
+ * handlers that implement different TRY/EXPOSE/SKIP strategies.
+ */
+ if (res == GST_AUTOPLUG_SELECT_TRY)
+ return TRUE;
+
+ return FALSE;
+ }
+
+ static gboolean
+ _gst_array_hasvalue_accumulator (GSignalInvocationHint * ihint,
+ GValue * return_accu, const GValue * handler_return, gpointer dummy)
+ {
+ gpointer array;
+
+ array = g_value_get_boxed (handler_return);
+ g_value_set_boxed (return_accu, array);
+
+ if (array != NULL)
+ return FALSE;
+
+ return TRUE;
+ }
+
+ static void
+ gst_decode_bin_class_init (GstDecodeBinClass * klass)
+ {
+ GObjectClass *gobject_klass;
+ GstElementClass *gstelement_klass;
+ GstBinClass *gstbin_klass;
+
+ gobject_klass = (GObjectClass *) klass;
+ gstelement_klass = (GstElementClass *) klass;
+ gstbin_klass = (GstBinClass *) klass;
+
+ parent_class = g_type_class_peek_parent (klass);
+
+ gobject_klass->dispose = gst_decode_bin_dispose;
+ gobject_klass->finalize = gst_decode_bin_finalize;
+ gobject_klass->set_property = gst_decode_bin_set_property;
+ gobject_klass->get_property = gst_decode_bin_get_property;
+
+ /**
+ * GstDecodeBin::unknown-type:
+ * @bin: The decodebin.
+ * @pad: The new pad containing caps that cannot be resolved to a 'final'
+ * stream type.
+ * @caps: The #GstCaps of the pad that cannot be resolved.
+ *
+ * This signal is emitted when a pad for which there is no further possible
+ * decoding is added to the decodebin.
+ */
+ gst_decode_bin_signals[SIGNAL_UNKNOWN_TYPE] =
+ g_signal_new ("unknown-type", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (GstDecodeBinClass, unknown_type),
+ NULL, NULL, NULL, G_TYPE_NONE, 2, GST_TYPE_PAD, GST_TYPE_CAPS);
+
+ /**
+ * GstDecodeBin::autoplug-continue:
+ * @bin: The decodebin.
+ * @pad: The #GstPad.
+ * @caps: The #GstCaps found.
+ *
+ * This signal is emitted whenever decodebin finds a new stream. It is
+ * emitted before looking for any elements that can handle that stream.
+ *
+ * > Invocation of signal handlers stops after the first signal handler
+ * > returns %FALSE. Signal handlers are invoked in the order they were
+ * > connected in.
+ *
+ * Returns: %TRUE if you wish decodebin to look for elements that can
+ * handle the given @caps. If %FALSE, those caps will be considered as
+ * final and the pad will be exposed as such (see 'pad-added' signal of
+ * #GstElement).
+ */
+ gst_decode_bin_signals[SIGNAL_AUTOPLUG_CONTINUE] =
+ g_signal_new ("autoplug-continue", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (GstDecodeBinClass, autoplug_continue),
+ _gst_boolean_accumulator, NULL, NULL, G_TYPE_BOOLEAN, 2, GST_TYPE_PAD,
+ GST_TYPE_CAPS);
+
+ /**
+ * GstDecodeBin::autoplug-factories:
+ * @bin: The decodebin.
+ * @pad: The #GstPad.
+ * @caps: The #GstCaps found.
+ *
+ * This signal is emitted when an array of possible factories for @caps on
+ * @pad is needed. Decodebin will by default return an array with all
+ * compatible factories, sorted by rank.
+ *
+ * If this function returns NULL, @pad will be exposed as a final caps.
+ *
+ * If this function returns an empty array, the pad will be considered as
+ * having an unhandled type media type.
+ *
+ * > Only the signal handler that is connected first will ever by invoked.
+ * > Don't connect signal handlers with the #G_CONNECT_AFTER flag to this
+ * > signal, they will never be invoked!
+ *
+ * Returns: a #GValueArray* with a list of factories to try. The factories are
+ * by default tried in the returned order or based on the index returned by
+ * "autoplug-select".
+ */
+ gst_decode_bin_signals[SIGNAL_AUTOPLUG_FACTORIES] =
+ g_signal_new ("autoplug-factories", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (GstDecodeBinClass,
+ autoplug_factories), _gst_array_accumulator, NULL,
+ NULL, G_TYPE_VALUE_ARRAY, 2, GST_TYPE_PAD, GST_TYPE_CAPS);
+
+ /**
+ * GstDecodeBin::autoplug-sort:
+ * @bin: The decodebin.
+ * @pad: The #GstPad.
+ * @caps: The #GstCaps.
+ * @factories: A #GValueArray of possible #GstElementFactory to use.
+ *
+ * Once decodebin has found the possible #GstElementFactory objects to try
+ * for @caps on @pad, this signal is emitted. The purpose of the signal is for
+ * the application to perform additional sorting or filtering on the element
+ * factory array.
+ *
+ * The callee should copy and modify @factories or return %NULL if the
+ * order should not change.
+ *
+ * > Invocation of signal handlers stops after one signal handler has
+ * > returned something else than %NULL. Signal handlers are invoked in
+ * > the order they were connected in.
+ * > Don't connect signal handlers with the #G_CONNECT_AFTER flag to this
+ * > signal, they will never be invoked!
+ *
+ * Returns: A new sorted array of #GstElementFactory objects.
+ */
+ gst_decode_bin_signals[SIGNAL_AUTOPLUG_SORT] =
+ g_signal_new ("autoplug-sort", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (GstDecodeBinClass, autoplug_sort),
+ _gst_array_hasvalue_accumulator, NULL,
+ NULL, G_TYPE_VALUE_ARRAY, 3, GST_TYPE_PAD, GST_TYPE_CAPS,
+ G_TYPE_VALUE_ARRAY | G_SIGNAL_TYPE_STATIC_SCOPE);
+
+ /**
+ * GstDecodeBin::autoplug-select:
+ * @bin: The decodebin.
+ * @pad: The #GstPad.
+ * @caps: The #GstCaps.
+ * @factory: A #GstElementFactory to use.
+ *
+ * This signal is emitted once decodebin has found all the possible
+ * #GstElementFactory that can be used to handle the given @caps. For each of
+ * those factories, this signal is emitted.
+ *
+ * The signal handler should return a #GstAutoplugSelectResult enum
+ * value indicating what decodebin should do next.
+ *
+ * A value of #GstAutoplugSelectResult::try will try to autoplug an element from
+ * @factory.
+ *
+ * A value of #GstAutoplugSelectResult::expose will expose @pad without plugging
+ * any element to it.
+ *
+ * A value of #GstAutoplugSelectResult::skip will skip @factory and move to the
+ * next factory.
+ *
+ * > The signal handler will not be invoked if any of the previously
+ * > registered signal handlers (if any) return a value other than
+ * > GST_AUTOPLUG_SELECT_TRY. Which also means that if you return
+ * > GST_AUTOPLUG_SELECT_TRY from one signal handler, handlers that get
+ * > registered next (again, if any) can override that decision.
+ *
+ * Returns: a #GstAutoplugSelectResult that indicates the required
+ * operation. the default handler will always return
+ * #GstAutoplugSelectResult::try.
+ */
+ gst_decode_bin_signals[SIGNAL_AUTOPLUG_SELECT] =
+ g_signal_new ("autoplug-select", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (GstDecodeBinClass, autoplug_select),
+ _gst_select_accumulator, NULL,
+ NULL, GST_TYPE_AUTOPLUG_SELECT_RESULT, 3, GST_TYPE_PAD, GST_TYPE_CAPS,
+ GST_TYPE_ELEMENT_FACTORY);
+
+ /**
+ * GstDecodeBin::autoplug-query:
+ * @bin: The decodebin.
+ * @pad: The #GstPad.
+ * @child: The child element doing the query
+ * @query: The #GstQuery.
+ *
+ * This signal is emitted whenever an autoplugged element that is
+ * not linked downstream yet and not exposed does a query. It can
+ * be used to tell the element about the downstream supported caps
+ * for example.
+ *
+ * Returns: %TRUE if the query was handled, %FALSE otherwise.
+ */
+ gst_decode_bin_signals[SIGNAL_AUTOPLUG_QUERY] =
+ g_signal_new ("autoplug-query", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (GstDecodeBinClass, autoplug_query),
+ _gst_boolean_or_accumulator, NULL, NULL, G_TYPE_BOOLEAN, 3, GST_TYPE_PAD,
+ GST_TYPE_ELEMENT, GST_TYPE_QUERY | G_SIGNAL_TYPE_STATIC_SCOPE);
+
+ /**
+ * GstDecodeBin::drained
+ * @bin: The decodebin
+ *
+ * This signal is emitted once decodebin has finished decoding all the data.
+ */
+ gst_decode_bin_signals[SIGNAL_DRAINED] =
+ g_signal_new ("drained", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (GstDecodeBinClass, drained),
+ NULL, NULL, NULL, G_TYPE_NONE, 0, G_TYPE_NONE);
+
+ g_object_class_install_property (gobject_klass, PROP_CAPS,
+ g_param_spec_boxed ("caps", "Caps", "The caps on which to stop decoding.",
+ GST_TYPE_CAPS, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_klass, PROP_SUBTITLE_ENCODING,
+ g_param_spec_string ("subtitle-encoding", "subtitle encoding",
+ "Encoding to assume if input subtitles are not in UTF-8 encoding. "
+ "If not set, the GST_SUBTITLE_ENCODING environment variable will "
+ "be checked for an encoding to use. If that is not set either, "
+ "ISO-8859-15 will be assumed.", NULL,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_klass, PROP_SINK_CAPS,
+ g_param_spec_boxed ("sink-caps", "Sink Caps",
+ "The caps of the input data. (NULL = use typefind element)",
+ GST_TYPE_CAPS, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ /**
+ * GstDecodeBin::use-buffering
+ *
+ * Activate buffering in decodebin. This will instruct the multiqueues behind
+ * decoders to emit BUFFERING messages.
+ */
+ g_object_class_install_property (gobject_klass, PROP_USE_BUFFERING,
+ g_param_spec_boolean ("use-buffering", "Use Buffering",
+ "Emit GST_MESSAGE_BUFFERING based on low-/high-percent thresholds",
+ DEFAULT_USE_BUFFERING, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ /**
+ * GstDecodeBin::force-sw-decoders:
+ *
+ * While auto-plugging, if set to %TRUE, those decoders within
+ * "Hardware" klass will be ignored. Otherwise they will be tried.
+ *
+ * Since: 1.18
+ */
+ g_object_class_install_property (gobject_klass, PROP_FORCE_SW_DECODERS,
+ g_param_spec_boolean ("force-sw-decoders", "Software Docoders Only",
+ "Use only sofware decoders to process streams",
+ DEFAULT_FORCE_SW_DECODERS,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ /**
+ * GstDecodeBin:low-percent
+ *
+ * Low threshold percent for buffering to start.
+ */
+ g_object_class_install_property (gobject_klass, PROP_LOW_PERCENT,
+ g_param_spec_int ("low-percent", "Low percent",
+ "Low threshold for buffering to start", 0, 100,
+ DEFAULT_LOW_PERCENT, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ /**
+ * GstDecodeBin:high-percent
+ *
+ * High threshold percent for buffering to finish.
+ */
+ g_object_class_install_property (gobject_klass, PROP_HIGH_PERCENT,
+ g_param_spec_int ("high-percent", "High percent",
+ "High threshold for buffering to finish", 0, 100,
+ DEFAULT_HIGH_PERCENT, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ /**
+ * GstDecodeBin:max-size-bytes
+ *
+ * Max amount of bytes in the queue (0=automatic).
+ */
+ g_object_class_install_property (gobject_klass, PROP_MAX_SIZE_BYTES,
+ g_param_spec_uint ("max-size-bytes", "Max. size (bytes)",
+ "Max. amount of bytes in the queue (0=automatic)",
+ 0, G_MAXUINT, DEFAULT_MAX_SIZE_BYTES,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ /**
+ * GstDecodeBin:max-size-buffers
+ *
+ * Max amount of buffers in the queue (0=automatic).
+ */
+ g_object_class_install_property (gobject_klass, PROP_MAX_SIZE_BUFFERS,
+ g_param_spec_uint ("max-size-buffers", "Max. size (buffers)",
+ "Max. number of buffers in the queue (0=automatic)",
+ 0, G_MAXUINT, DEFAULT_MAX_SIZE_BUFFERS,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ /**
+ * GstDecodeBin:max-size-time
+ *
+ * Max amount of time in the queue (in ns, 0=automatic).
+ */
+ g_object_class_install_property (gobject_klass, PROP_MAX_SIZE_TIME,
+ g_param_spec_uint64 ("max-size-time", "Max. size (ns)",
+ "Max. amount of data in the queue (in ns, 0=automatic)",
+ 0, G_MAXUINT64,
+ DEFAULT_MAX_SIZE_TIME, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ /**
+ * GstDecodeBin::post-stream-topology
+ *
+ * Post stream-topology messages on the bus every time the topology changes.
+ */
+ g_object_class_install_property (gobject_klass, PROP_POST_STREAM_TOPOLOGY,
+ g_param_spec_boolean ("post-stream-topology", "Post Stream Topology",
+ "Post stream-topology messages",
+ DEFAULT_POST_STREAM_TOPOLOGY,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ /**
+ * GstDecodeBin::expose-all-streams
+ *
+ * Expose streams of unknown type.
+ *
+ * If set to %FALSE, then only the streams that can be decoded to the final
+ * caps (see 'caps' property) will have a pad exposed. Streams that do not
+ * match those caps but could have been decoded will not have decoder plugged
+ * in internally and will not have a pad exposed.
+ */
+ g_object_class_install_property (gobject_klass, PROP_EXPOSE_ALL_STREAMS,
+ g_param_spec_boolean ("expose-all-streams", "Expose All Streams",
+ "Expose all streams, including those of unknown type or that don't match the 'caps' property",
+ DEFAULT_EXPOSE_ALL_STREAMS,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
++#ifdef TIZEN_FEATURE_TRUSTZONE
++ /*tzmultiqueue patch : use-trustzone property*/
++ g_object_class_install_property (gobject_klass, PROP_USE_TRUSTZONE,
++ g_param_spec_boolean ("use-trustzone", "Add trustzone flag to decodebin2",
++ "Add trustzone flag to decodebin2 to add tzmultiqueue element",
++ 0,
++ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
++#endif
+ /**
+ * GstDecodeBin2::connection-speed
+ *
+ * Network connection speed in kbps (0 = unknownw)
+ */
+ g_object_class_install_property (gobject_klass, PROP_CONNECTION_SPEED,
+ g_param_spec_uint64 ("connection-speed", "Connection Speed",
+ "Network connection speed in kbps (0 = unknown)",
+ 0, G_MAXUINT64 / 1000, DEFAULT_CONNECTION_SPEED,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+
+
+ klass->autoplug_continue =
+ GST_DEBUG_FUNCPTR (gst_decode_bin_autoplug_continue);
+ klass->autoplug_factories =
+ GST_DEBUG_FUNCPTR (gst_decode_bin_autoplug_factories);
+ klass->autoplug_sort = GST_DEBUG_FUNCPTR (gst_decode_bin_autoplug_sort);
+ klass->autoplug_select = GST_DEBUG_FUNCPTR (gst_decode_bin_autoplug_select);
+ klass->autoplug_query = GST_DEBUG_FUNCPTR (gst_decode_bin_autoplug_query);
+
+ gst_element_class_add_static_pad_template (gstelement_klass,
+ &decoder_bin_sink_template);
+ gst_element_class_add_static_pad_template (gstelement_klass,
+ &decoder_bin_src_template);
+
+ gst_element_class_set_static_metadata (gstelement_klass,
+ "Decoder Bin", "Generic/Bin/Decoder",
+ "Autoplug and decode to raw media",
+ "Edward Hervey <edward.hervey@collabora.co.uk>, "
+ "Sebastian Dröge <sebastian.droege@collabora.co.uk>");
+
+ gstelement_klass->change_state =
+ GST_DEBUG_FUNCPTR (gst_decode_bin_change_state);
+
+ gstbin_klass->handle_message =
+ GST_DEBUG_FUNCPTR (gst_decode_bin_handle_message);
+
+ gstbin_klass->remove_element =
+ GST_DEBUG_FUNCPTR (gst_decode_bin_remove_element);
+
+ g_type_class_ref (GST_TYPE_DECODE_PAD);
+
+ gst_type_mark_as_plugin_api (GST_TYPE_AUTOPLUG_SELECT_RESULT, 0);
+ }
+
+ /* Must be called with factories lock! */
+ static void
+ gst_decode_bin_update_factories_list (GstDecodeBin * dbin)
+ {
+ guint cookie;
+ GList *factories, *tmp;
+
+ cookie = gst_registry_get_feature_list_cookie (gst_registry_get ());
+ if (!dbin->factories || dbin->factories_cookie != cookie) {
+ if (dbin->factories)
+ gst_plugin_feature_list_free (dbin->factories);
+ factories =
+ gst_element_factory_list_get_elements
+ (GST_ELEMENT_FACTORY_TYPE_DECODABLE, GST_RANK_MARGINAL);
+
+ if (dbin->force_sw_decoders) {
+ /* filter out Hardware class elements */
+ dbin->factories = NULL;
+ for (tmp = factories; tmp; tmp = g_list_next (tmp)) {
+ GstElementFactory *factory = GST_ELEMENT_FACTORY_CAST (tmp->data);
+ if (!gst_element_factory_list_is_type (factory,
+ GST_ELEMENT_FACTORY_TYPE_HARDWARE)) {
+ dbin->factories = g_list_prepend (dbin->factories, factory);
+ } else {
+ gst_object_unref (factory);
+ }
+ }
+ g_list_free (factories);
+ } else {
+ dbin->factories = factories;
+ }
+
+ dbin->factories =
+ g_list_sort (dbin->factories,
+ gst_playback_utils_compare_factories_func);
+ dbin->factories_cookie = cookie;
+ }
+ }
+
+ static void
+ gst_decode_bin_init (GstDecodeBin * decode_bin)
+ {
+ /* first filter out the interesting element factories */
+ g_mutex_init (&decode_bin->factories_lock);
+
+ /* we create the typefind element only once */
+ decode_bin->typefind = gst_element_factory_make ("typefind", "typefind");
+ if (!decode_bin->typefind) {
+ g_warning ("can't find typefind element, decodebin will not work");
+ } else {
+ GstPad *pad;
+ GstPad *gpad;
+ GstPadTemplate *pad_tmpl;
+
+ /* add the typefind element */
+ if (!gst_bin_add (GST_BIN (decode_bin), decode_bin->typefind)) {
+ g_warning ("Could not add typefind element, decodebin will not work");
+ gst_object_unref (decode_bin->typefind);
+ decode_bin->typefind = NULL;
+ }
+
+ /* get the sinkpad */
+ pad = gst_element_get_static_pad (decode_bin->typefind, "sink");
+
+ /* get the pad template */
+ pad_tmpl = gst_static_pad_template_get (&decoder_bin_sink_template);
+
+ /* ghost the sink pad to ourself */
+ gpad = gst_ghost_pad_new_from_template ("sink", pad, pad_tmpl);
+ gst_pad_set_active (gpad, TRUE);
+ gst_element_add_pad (GST_ELEMENT (decode_bin), gpad);
+
+ gst_object_unref (pad_tmpl);
+ gst_object_unref (pad);
+ }
+
+ g_mutex_init (&decode_bin->expose_lock);
+ decode_bin->decode_chain = NULL;
+
+ g_mutex_init (&decode_bin->dyn_lock);
+ decode_bin->shutdown = FALSE;
+ decode_bin->blocked_pads = NULL;
+
+ g_mutex_init (&decode_bin->subtitle_lock);
+ g_mutex_init (&decode_bin->buffering_lock);
+ g_mutex_init (&decode_bin->buffering_post_lock);
+
+ g_mutex_init (&decode_bin->cleanup_lock);
+ decode_bin->cleanup_thread = NULL;
+
+ decode_bin->encoding = g_strdup (DEFAULT_SUBTITLE_ENCODING);
+ decode_bin->caps = gst_static_caps_get (&default_raw_caps);
+ decode_bin->use_buffering = DEFAULT_USE_BUFFERING;
+ decode_bin->force_sw_decoders = DEFAULT_FORCE_SW_DECODERS;
+ decode_bin->low_percent = DEFAULT_LOW_PERCENT;
+ decode_bin->high_percent = DEFAULT_HIGH_PERCENT;
+
+ decode_bin->max_size_bytes = DEFAULT_MAX_SIZE_BYTES;
+ decode_bin->max_size_buffers = DEFAULT_MAX_SIZE_BUFFERS;
+ decode_bin->max_size_time = DEFAULT_MAX_SIZE_TIME;
+
+ decode_bin->expose_allstreams = DEFAULT_EXPOSE_ALL_STREAMS;
+ decode_bin->connection_speed = DEFAULT_CONNECTION_SPEED;
+ }
+
+ static void
+ gst_decode_bin_dispose (GObject * object)
+ {
+ GstDecodeBin *decode_bin;
+
+ decode_bin = GST_DECODE_BIN (object);
+
+ if (decode_bin->factories)
+ gst_plugin_feature_list_free (decode_bin->factories);
+ decode_bin->factories = NULL;
+
+ if (decode_bin->decode_chain)
+ gst_decode_chain_free (decode_bin->decode_chain);
+ decode_bin->decode_chain = NULL;
+
+ if (decode_bin->caps)
+ gst_caps_unref (decode_bin->caps);
+ decode_bin->caps = NULL;
+
+ g_free (decode_bin->encoding);
+ decode_bin->encoding = NULL;
+
+ g_list_free (decode_bin->subtitles);
+ decode_bin->subtitles = NULL;
+
+ unblock_pads (decode_bin);
+
+ G_OBJECT_CLASS (parent_class)->dispose (object);
+ }
+
+ static void
+ gst_decode_bin_finalize (GObject * object)
+ {
+ GstDecodeBin *decode_bin;
+
+ decode_bin = GST_DECODE_BIN (object);
+
+ g_mutex_clear (&decode_bin->expose_lock);
+ g_mutex_clear (&decode_bin->dyn_lock);
+ g_mutex_clear (&decode_bin->subtitle_lock);
+ g_mutex_clear (&decode_bin->buffering_lock);
+ g_mutex_clear (&decode_bin->buffering_post_lock);
+ g_mutex_clear (&decode_bin->factories_lock);
+ g_mutex_clear (&decode_bin->cleanup_lock);
+
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+ }
+
+ /* _set_caps
+ * Changes the caps on which decodebin will stop decoding.
+ * Will unref the previously set one. The refcount of the given caps will be
+ * increased.
+ * @caps can be NULL.
+ *
+ * MT-safe
+ */
+ static void
+ gst_decode_bin_set_caps (GstDecodeBin * dbin, GstCaps * caps)
+ {
+ GST_DEBUG_OBJECT (dbin, "Setting new caps: %" GST_PTR_FORMAT, caps);
+
+ GST_OBJECT_LOCK (dbin);
+ gst_caps_replace (&dbin->caps, caps);
+ GST_OBJECT_UNLOCK (dbin);
+ }
+
+ /* _get_caps
+ * Returns the currently configured caps on which decodebin will stop decoding.
+ * The returned caps (if not NULL), will have its refcount incremented.
+ *
+ * MT-safe
+ */
+ static GstCaps *
+ gst_decode_bin_get_caps (GstDecodeBin * dbin)
+ {
+ GstCaps *caps;
+
+ GST_DEBUG_OBJECT (dbin, "Getting currently set caps");
+
+ GST_OBJECT_LOCK (dbin);
+ caps = dbin->caps;
+ if (caps)
+ gst_caps_ref (caps);
+ GST_OBJECT_UNLOCK (dbin);
+
+ return caps;
+ }
+
+ static void
+ gst_decode_bin_set_sink_caps (GstDecodeBin * dbin, GstCaps * caps)
+ {
+ GST_DEBUG_OBJECT (dbin, "Setting new caps: %" GST_PTR_FORMAT, caps);
+
+ g_object_set (dbin->typefind, "force-caps", caps, NULL);
+ }
+
+ static GstCaps *
+ gst_decode_bin_get_sink_caps (GstDecodeBin * dbin)
+ {
+ GstCaps *caps;
+
+ GST_DEBUG_OBJECT (dbin, "Getting currently set caps");
+
+ g_object_get (dbin->typefind, "force-caps", &caps, NULL);
+
+ return caps;
+ }
+
+ static void
+ gst_decode_bin_set_subs_encoding (GstDecodeBin * dbin, const gchar * encoding)
+ {
+ GList *walk;
+
+ GST_DEBUG_OBJECT (dbin, "Setting new encoding: %s", GST_STR_NULL (encoding));
+
+ SUBTITLE_LOCK (dbin);
+ g_free (dbin->encoding);
+ dbin->encoding = g_strdup (encoding);
+
+ /* set the subtitle encoding on all added elements */
+ for (walk = dbin->subtitles; walk; walk = g_list_next (walk)) {
+ g_object_set (G_OBJECT (walk->data), "subtitle-encoding", dbin->encoding,
+ NULL);
+ }
+ SUBTITLE_UNLOCK (dbin);
+ }
+
+ static gchar *
+ gst_decode_bin_get_subs_encoding (GstDecodeBin * dbin)
+ {
+ gchar *encoding;
+
+ GST_DEBUG_OBJECT (dbin, "Getting currently set encoding");
+
+ SUBTITLE_LOCK (dbin);
+ encoding = g_strdup (dbin->encoding);
+ SUBTITLE_UNLOCK (dbin);
+
+ return encoding;
+ }
+
+ static void
+ gst_decode_bin_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+ {
+ GstDecodeBin *dbin;
+
+ dbin = GST_DECODE_BIN (object);
+
+ switch (prop_id) {
+ case PROP_CAPS:
+ gst_decode_bin_set_caps (dbin, g_value_get_boxed (value));
+ break;
+ case PROP_SUBTITLE_ENCODING:
+ gst_decode_bin_set_subs_encoding (dbin, g_value_get_string (value));
+ break;
+ case PROP_SINK_CAPS:
+ gst_decode_bin_set_sink_caps (dbin, g_value_get_boxed (value));
+ break;
+ case PROP_USE_BUFFERING:
+ dbin->use_buffering = g_value_get_boolean (value);
+ break;
+ case PROP_FORCE_SW_DECODERS:
+ dbin->force_sw_decoders = g_value_get_boolean (value);
+ break;
+ case PROP_LOW_PERCENT:
+ dbin->low_percent = g_value_get_int (value);
+ break;
+ case PROP_HIGH_PERCENT:
+ dbin->high_percent = g_value_get_int (value);
+ break;
+ case PROP_MAX_SIZE_BYTES:
+ dbin->max_size_bytes = g_value_get_uint (value);
+ break;
+ case PROP_MAX_SIZE_BUFFERS:
+ dbin->max_size_buffers = g_value_get_uint (value);
+ break;
+ case PROP_MAX_SIZE_TIME:
+ dbin->max_size_time = g_value_get_uint64 (value);
+ break;
+ case PROP_POST_STREAM_TOPOLOGY:
+ dbin->post_stream_topology = g_value_get_boolean (value);
+ break;
+ case PROP_EXPOSE_ALL_STREAMS:
+ dbin->expose_allstreams = g_value_get_boolean (value);
+ break;
++#ifdef TIZEN_FEATURE_TRUSTZONE
++ /*tzmultiqueue patch : use-trustzone property*/
++ case PROP_USE_TRUSTZONE:
++ GST_DEBUG_OBJECT (dbin, "set dbin->use_trustzone to %d", g_value_get_boolean(value));
++ dbin->use_trustzone = g_value_get_boolean (value);
++ break;
++#endif
+ case PROP_CONNECTION_SPEED:
+ GST_OBJECT_LOCK (dbin);
+ dbin->connection_speed = g_value_get_uint64 (value) * 1000;
+ GST_OBJECT_UNLOCK (dbin);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+ }
+
+ static void
+ gst_decode_bin_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec)
+ {
+ GstDecodeBin *dbin;
+
+ dbin = GST_DECODE_BIN (object);
+ switch (prop_id) {
+ case PROP_CAPS:
+ g_value_take_boxed (value, gst_decode_bin_get_caps (dbin));
+ break;
+ case PROP_SUBTITLE_ENCODING:
+ g_value_take_string (value, gst_decode_bin_get_subs_encoding (dbin));
+ break;
+ case PROP_SINK_CAPS:
+ g_value_take_boxed (value, gst_decode_bin_get_sink_caps (dbin));
+ break;
+ case PROP_USE_BUFFERING:
+ g_value_set_boolean (value, dbin->use_buffering);
+ break;
+ case PROP_FORCE_SW_DECODERS:
+ g_value_set_boolean (value, dbin->force_sw_decoders);
+ break;
+ case PROP_LOW_PERCENT:
+ g_value_set_int (value, dbin->low_percent);
+ break;
+ case PROP_HIGH_PERCENT:
+ g_value_set_int (value, dbin->high_percent);
+ break;
+ case PROP_MAX_SIZE_BYTES:
+ g_value_set_uint (value, dbin->max_size_bytes);
+ break;
+ case PROP_MAX_SIZE_BUFFERS:
+ g_value_set_uint (value, dbin->max_size_buffers);
+ break;
+ case PROP_MAX_SIZE_TIME:
+ g_value_set_uint64 (value, dbin->max_size_time);
+ break;
+ case PROP_POST_STREAM_TOPOLOGY:
+ g_value_set_boolean (value, dbin->post_stream_topology);
+ break;
+ case PROP_EXPOSE_ALL_STREAMS:
+ g_value_set_boolean (value, dbin->expose_allstreams);
+ break;
++#ifdef TIZEN_FEATURE_TRUSTZONE
++ /*tzmultiqueue patch : use-trustzone property*/
++ case PROP_USE_TRUSTZONE:
++ g_value_set_boolean (value, dbin->use_trustzone);
++ break;
++#endif
+ case PROP_CONNECTION_SPEED:
+ GST_OBJECT_LOCK (dbin);
+ g_value_set_uint64 (value, dbin->connection_speed / 1000);
+ GST_OBJECT_UNLOCK (dbin);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+ }
+
+
+ /*****
+ * Default autoplug signal handlers
+ *****/
+ static gboolean
+ gst_decode_bin_autoplug_continue (GstElement * element, GstPad * pad,
+ GstCaps * caps)
+ {
+ GST_DEBUG_OBJECT (element, "autoplug-continue returns TRUE");
+
+ /* by default we always continue */
+ return TRUE;
+ }
+
+ static GValueArray *
+ gst_decode_bin_autoplug_factories (GstElement * element, GstPad * pad,
+ GstCaps * caps)
+ {
+ GList *list, *tmp;
+ GValueArray *result;
+ GstDecodeBin *dbin = GST_DECODE_BIN_CAST (element);
+
+ GST_DEBUG_OBJECT (element, "finding factories");
+
+ /* return all compatible factories for caps */
+ g_mutex_lock (&dbin->factories_lock);
+ gst_decode_bin_update_factories_list (dbin);
+ list =
+ gst_element_factory_list_filter (dbin->factories, caps, GST_PAD_SINK,
+ gst_caps_is_fixed (caps));
+ g_mutex_unlock (&dbin->factories_lock);
+
+ result = g_value_array_new (g_list_length (list));
+ for (tmp = list; tmp; tmp = tmp->next) {
+ GstElementFactory *factory = GST_ELEMENT_FACTORY_CAST (tmp->data);
+ GValue val = { 0, };
+
+ g_value_init (&val, G_TYPE_OBJECT);
+ g_value_set_object (&val, factory);
+ g_value_array_append (result, &val);
+ g_value_unset (&val);
+ }
+ gst_plugin_feature_list_free (list);
+
+ GST_DEBUG_OBJECT (element, "autoplug-factories returns %p", result);
+
+ return result;
+ }
+
+ static GValueArray *
+ gst_decode_bin_autoplug_sort (GstElement * element, GstPad * pad,
+ GstCaps * caps, GValueArray * factories)
+ {
+ return NULL;
+ }
+
+ static GstAutoplugSelectResult
+ gst_decode_bin_autoplug_select (GstElement * element, GstPad * pad,
+ GstCaps * caps, GstElementFactory * factory)
+ {
+ GST_DEBUG_OBJECT (element, "default autoplug-select returns TRY");
+
+ /* Try factory. */
+ return GST_AUTOPLUG_SELECT_TRY;
+ }
+
+ static gboolean
+ gst_decode_bin_autoplug_query (GstElement * element, GstPad * pad,
+ GstQuery * query)
+ {
+ /* No query handled here */
+ return FALSE;
+ }
+
+ /********
+ * Discovery methods
+ *****/
+
+ static gboolean are_final_caps (GstDecodeBin * dbin, GstCaps * caps);
+ static gboolean is_demuxer_element (GstElement * srcelement);
+ static gboolean is_adaptive_demuxer_element (GstElement * srcelement);
+
+ static gboolean connect_pad (GstDecodeBin * dbin, GstElement * src,
+ GstDecodePad * dpad, GstPad * pad, GstCaps * caps, GValueArray * factories,
+ GstDecodeChain * chain, gchar ** deadend_details);
+ static GList *connect_element (GstDecodeBin * dbin, GstDecodeElement * delem,
+ GstDecodeChain * chain);
+ static void expose_pad (GstDecodeBin * dbin, GstElement * src,
+ GstDecodePad * dpad, GstPad * pad, GstCaps * caps, GstDecodeChain * chain);
+
+ static void pad_added_cb (GstElement * element, GstPad * pad,
+ GstDecodeChain * chain);
+ static void pad_removed_cb (GstElement * element, GstPad * pad,
+ GstDecodeChain * chain);
+ static void no_more_pads_cb (GstElement * element, GstDecodeChain * chain);
+
+ static GstDecodeGroup *gst_decode_chain_get_current_group (GstDecodeChain *
+ chain);
+
+ static gboolean
+ clear_sticky_events (GstPad * pad, GstEvent ** event, gpointer user_data)
+ {
+ GST_DEBUG_OBJECT (pad, "clearing sticky event %" GST_PTR_FORMAT, *event);
+ gst_event_unref (*event);
+ *event = NULL;
+ return TRUE;
+ }
+
+ static gboolean
+ copy_sticky_events (GstPad * pad, GstEvent ** event, gpointer user_data)
+ {
+ GstPad *gpad = GST_PAD_CAST (user_data);
+
+ GST_DEBUG_OBJECT (gpad, "store sticky event %" GST_PTR_FORMAT, *event);
+ gst_pad_store_sticky_event (gpad, *event);
+
+ return TRUE;
+ }
+
+ static void
+ decode_pad_set_target (GstDecodePad * dpad, GstPad * target)
+ {
+ gst_ghost_pad_set_target (GST_GHOST_PAD_CAST (dpad), target);
+ if (target == NULL)
+ gst_pad_sticky_events_foreach (GST_PAD_CAST (dpad), clear_sticky_events,
+ NULL);
+ else
+ gst_pad_sticky_events_foreach (target, copy_sticky_events, dpad);
+ }
+
+ /* called when a new pad is discovered. It will perform some basic actions
+ * before trying to link something to it.
+ *
+ * - Check the caps, don't do anything when there are no caps or when they have
+ * no good type.
+ * - signal AUTOPLUG_CONTINUE to check if we need to continue autoplugging this
+ * pad.
+ * - if the caps are non-fixed, setup a handler to continue autoplugging when
+ * the caps become fixed (connect to notify::caps).
+ * - get list of factories to autoplug.
+ * - continue autoplugging to one of the factories.
+ */
+ /* returns whether to expose the pad */
+ static gboolean
+ analyze_new_pad (GstDecodeBin * dbin, GstElement * src, GstPad * pad,
+ GstCaps * caps, GstDecodeChain * chain, GstDecodeChain ** new_chain)
+ {
+ gboolean apcontinue = TRUE;
+ GValueArray *factories = NULL, *result = NULL;
+ GstDecodePad *dpad;
+ GstElementFactory *factory;
+ const gchar *classification;
+ gboolean is_parser_converter = FALSE;
+ gboolean res;
+ gchar *deadend_details = NULL;
+
+ GST_DEBUG_OBJECT (dbin, "Pad %s:%s caps:%" GST_PTR_FORMAT,
+ GST_DEBUG_PAD_NAME (pad), caps);
+
+ if (new_chain)
+ *new_chain = chain;
+
+ if (chain->elements
+ && src != ((GstDecodeElement *) chain->elements->data)->element
+ && src != ((GstDecodeElement *) chain->elements->data)->capsfilter) {
+ GST_ERROR_OBJECT (dbin, "New pad from not the last element in this chain");
+ return FALSE;
+ }
+
+ if (chain->endpad) {
+ GST_ERROR_OBJECT (dbin, "New pad in a chain that is already complete");
+ return FALSE;
+ }
+
+ if (chain->demuxer) {
+ GstDecodeGroup *group;
+ GstDecodeChain *oldchain = chain;
+ GstDecodeElement *demux = (chain->elements ? chain->elements->data : NULL);
+
+ if (chain->current_pad)
+ gst_object_unref (chain->current_pad);
+ chain->current_pad = NULL;
+
+ /* we are adding a new pad for a demuxer (see is_demuxer_element(),
+ * start a new chain for it */
+ CHAIN_MUTEX_LOCK (oldchain);
+ group = gst_decode_chain_get_current_group (chain);
+ if (group && !g_list_find (group->children, chain)) {
+ g_assert (new_chain != NULL);
+ *new_chain = chain = gst_decode_chain_new (dbin, group, pad);
+ group->children = g_list_prepend (group->children, chain);
+ }
+ CHAIN_MUTEX_UNLOCK (oldchain);
+ if (!group) {
+ GST_WARNING_OBJECT (dbin, "No current group");
+ return FALSE;
+ }
+
+ /* If this is not a dynamic pad demuxer, we're no-more-pads
+ * already before anything else happens
+ */
+ if (demux == NULL || !demux->no_more_pads_id)
+ group->no_more_pads = TRUE;
+ }
+
+ /* From here on we own a reference to the caps as
+ * we might create new caps below and would need
+ * to unref them later */
+ if (caps)
+ gst_caps_ref (caps);
+
+ if ((caps == NULL) || gst_caps_is_empty (caps))
+ goto unknown_type;
+
+ if (gst_caps_is_any (caps))
+ goto any_caps;
+
+ if (!chain->current_pad)
+ chain->current_pad = gst_decode_pad_new (dbin, chain);
+
+ dpad = gst_object_ref (chain->current_pad);
+ gst_pad_set_active (GST_PAD_CAST (dpad), TRUE);
+ decode_pad_set_target (dpad, pad);
+
+ /* 1. Emit 'autoplug-continue' the result will tell us if this pads needs
+ * further autoplugging. Only do this for fixed caps, for unfixed caps
+ * we will later come here again from the notify::caps handler. The
+ * problem with unfixed caps is that, we can't reliably tell if the output
+ * is e.g. accepted by a sink because only parts of the possible final
+ * caps might be accepted by the sink. */
+ if (gst_caps_is_fixed (caps))
+ g_signal_emit (G_OBJECT (dbin),
+ gst_decode_bin_signals[SIGNAL_AUTOPLUG_CONTINUE], 0, dpad, caps,
+ &apcontinue);
+ else
+ apcontinue = TRUE;
+
+ /* 1.a if autoplug-continue is FALSE or caps is a raw format, goto pad_is_final */
+ if ((!apcontinue) || are_final_caps (dbin, caps))
+ goto expose_pad;
+
+ /* 1.b For Parser/Converter that can output different stream formats
+ * we insert a capsfilter with the sorted caps of all possible next
+ * elements and continue with the capsfilter srcpad */
+ factory = gst_element_get_factory (src);
+ classification =
+ gst_element_factory_get_metadata (factory, GST_ELEMENT_METADATA_KLASS);
+ is_parser_converter = (strstr (classification, "Parser")
+ && strstr (classification, "Converter"));
+
+ /* 1.c when the caps are not fixed yet, we can't be sure what element to
+ * connect. We delay autoplugging until the caps are fixed */
+ if (!is_parser_converter && !gst_caps_is_fixed (caps)) {
+ goto non_fixed;
+ } else if (!is_parser_converter) {
+ gst_caps_unref (caps);
+ caps = gst_pad_get_current_caps (pad);
+ if (!caps) {
+ GST_DEBUG_OBJECT (dbin, "No final caps set yet, delaying autoplugging");
+ gst_object_unref (dpad);
+ goto setup_caps_delay;
+ }
+ }
+
+ /* 1.d else get the factories and if there's no compatible factory goto
+ * unknown_type */
+ g_signal_emit (G_OBJECT (dbin),
+ gst_decode_bin_signals[SIGNAL_AUTOPLUG_FACTORIES], 0, dpad, caps,
+ &factories);
+
+ /* NULL means that we can expose the pad */
+ if (factories == NULL)
+ goto expose_pad;
+
+ /* if the array is empty, we have a type for which we have no decoder */
+ if (factories->n_values == 0) {
+ if (!dbin->expose_allstreams) {
+ GstCaps *raw = gst_static_caps_get (&default_raw_caps);
+
+ /* If the caps are raw, this just means we don't want to expose them */
+ if (gst_caps_is_subset (caps, raw)) {
+ g_value_array_free (factories);
+ gst_caps_unref (raw);
+ gst_object_unref (dpad);
+ goto discarded_type;
+ }
+ gst_caps_unref (raw);
+ }
+
+ /* if not we have a unhandled type with no compatible factories */
+ g_value_array_free (factories);
+ gst_object_unref (dpad);
+ goto unknown_type;
+ }
+
+ /* 1.e sort some more. */
+ g_signal_emit (G_OBJECT (dbin),
+ gst_decode_bin_signals[SIGNAL_AUTOPLUG_SORT], 0, dpad, caps, factories,
+ &result);
+ if (result) {
+ g_value_array_free (factories);
+ factories = result;
+ }
+
+ /* At this point we have a potential decoder, but we might not need it
+ * if it doesn't match the output caps */
+ if (!dbin->expose_allstreams && gst_caps_is_fixed (caps)) {
+ guint i;
+ const GList *tmps;
+ gboolean dontuse = FALSE;
+
+ GST_DEBUG ("Checking if we can abort early");
+
+ /* 1.f Do an early check to see if the candidates are potential decoders, but
+ * due to the fact that they decode to a mediatype that is not final we don't
+ * need them */
+
+ for (i = 0; i < factories->n_values && !dontuse; i++) {
+ GstElementFactory *factory =
+ g_value_get_object (g_value_array_get_nth (factories, i));
+ GstCaps *tcaps;
+
+ /* We are only interested in skipping decoders */
+ if (strstr (gst_element_factory_get_metadata (factory,
+ GST_ELEMENT_METADATA_KLASS), "Decoder")) {
+
+ GST_DEBUG ("Trying factory %s",
+ gst_plugin_feature_get_name (GST_PLUGIN_FEATURE (factory)));
+
+ /* Check the source pad template caps to see if they match raw caps but don't match
+ * our final caps*/
+ for (tmps = gst_element_factory_get_static_pad_templates (factory);
+ tmps && !dontuse; tmps = tmps->next) {
+ GstStaticPadTemplate *st = (GstStaticPadTemplate *) tmps->data;
+ if (st->direction != GST_PAD_SRC)
+ continue;
+ tcaps = gst_static_pad_template_get_caps (st);
+
+ apcontinue = TRUE;
+
+ /* Emit autoplug-continue to see if the caps are considered to be raw caps */
+ g_signal_emit (G_OBJECT (dbin),
+ gst_decode_bin_signals[SIGNAL_AUTOPLUG_CONTINUE], 0, dpad, tcaps,
+ &apcontinue);
+
+ /* If autoplug-continue returns TRUE and the caps are not final, don't use them */
+ if (apcontinue && !are_final_caps (dbin, tcaps))
+ dontuse = TRUE;
+ gst_caps_unref (tcaps);
+ }
+ }
+ }
+
+ if (dontuse) {
+ gst_object_unref (dpad);
+ g_value_array_free (factories);
+ goto discarded_type;
+ }
+ }
+
+ /* 1.g now get the factory template caps and insert the capsfilter if this
+ * is a parser/converter
+ */
+ if (is_parser_converter) {
+ GstCaps *filter_caps;
+ gint i;
+ GstElement *capsfilter;
+ GstPad *p;
+ GstDecodeElement *delem;
+
+ filter_caps = gst_caps_new_empty ();
+ for (i = 0; i < factories->n_values; i++) {
+ GstElementFactory *factory =
+ g_value_get_object (g_value_array_get_nth (factories, i));
+ GstCaps *tcaps, *intersection;
+ const GList *tmps;
+
+ GST_DEBUG ("Trying factory %s",
+ gst_plugin_feature_get_name (GST_PLUGIN_FEATURE (factory)));
+
+ if (gst_element_get_factory (src) == factory ||
+ gst_element_factory_list_is_type (factory,
+ GST_ELEMENT_FACTORY_TYPE_PARSER)) {
+ GST_DEBUG ("Skipping factory");
+ continue;
+ }
+
+ for (tmps = gst_element_factory_get_static_pad_templates (factory); tmps;
+ tmps = tmps->next) {
+ GstStaticPadTemplate *st = (GstStaticPadTemplate *) tmps->data;
+ if (st->direction != GST_PAD_SINK || st->presence != GST_PAD_ALWAYS)
+ continue;
+ tcaps = gst_static_pad_template_get_caps (st);
+ intersection =
+ gst_caps_intersect_full (tcaps, caps, GST_CAPS_INTERSECT_FIRST);
+ filter_caps = gst_caps_merge (filter_caps, intersection);
+ gst_caps_unref (tcaps);
+ }
+ }
+
+ /* Append the parser caps to prevent any not-negotiated errors */
+ filter_caps = gst_caps_merge (filter_caps, gst_caps_ref (caps));
+
+ if (chain->elements) {
+ delem = (GstDecodeElement *) chain->elements->data;
+ capsfilter = delem->capsfilter =
+ gst_element_factory_make ("capsfilter", NULL);
+ } else {
+ delem = g_slice_new0 (GstDecodeElement);
+ capsfilter = delem->element =
+ gst_element_factory_make ("capsfilter", NULL);
+ delem->capsfilter = NULL;
+ chain->elements = g_list_prepend (chain->elements, delem);
+ }
+
+ g_object_set (G_OBJECT (capsfilter), "caps", filter_caps, NULL);
+ gst_caps_unref (filter_caps);
+ gst_element_set_state (capsfilter, GST_STATE_PAUSED);
+ gst_bin_add (GST_BIN_CAST (dbin), gst_object_ref (capsfilter));
+
+ decode_pad_set_target (dpad, NULL);
+ p = gst_element_get_static_pad (capsfilter, "sink");
+ gst_pad_link_full (pad, p, GST_PAD_LINK_CHECK_NOTHING);
+ gst_object_unref (p);
+ p = gst_element_get_static_pad (capsfilter, "src");
+ decode_pad_set_target (dpad, p);
+ pad = p;
+
+ gst_caps_unref (caps);
+
+ caps = gst_pad_get_current_caps (pad);
+ if (!caps) {
+ GST_DEBUG_OBJECT (dbin, "No final caps set yet, delaying autoplugging");
+ gst_object_unref (dpad);
+ g_value_array_free (factories);
+ goto setup_caps_delay;
+ }
+ }
+
+ /* 1.h else continue autoplugging something from the list. */
+ GST_LOG_OBJECT (pad, "Let's continue discovery on this pad");
+ res =
+ connect_pad (dbin, src, dpad, pad, caps, factories, chain,
+ &deadend_details);
+
+ /* Need to unref the capsfilter srcpad here if
+ * we inserted a capsfilter */
+ if (is_parser_converter)
+ gst_object_unref (pad);
+
+ gst_object_unref (dpad);
+ g_value_array_free (factories);
+
+ if (!res)
+ goto unknown_type;
+
+ gst_caps_unref (caps);
+
+ return FALSE;
+
+ expose_pad:
+ {
+ GST_LOG_OBJECT (dbin, "Pad is final and should expose the pad. "
+ "autoplug-continue:%d", apcontinue);
+ gst_object_unref (dpad);
+ gst_caps_unref (caps);
+ return TRUE;
+ }
+
+ discarded_type:
+ {
+ GST_LOG_OBJECT (pad, "Known type, but discarded because not final caps");
+ chain->deadend = TRUE;
+ chain->endcaps = caps;
+ gst_object_replace ((GstObject **) & chain->current_pad, NULL);
+
+ /* Try to expose anything */
+ EXPOSE_LOCK (dbin);
+ if (dbin->decode_chain) {
+ if (gst_decode_chain_is_complete (dbin->decode_chain)) {
+ gst_decode_bin_expose (dbin);
+ }
+ }
+ EXPOSE_UNLOCK (dbin);
+ do_async_done (dbin);
+
+ return FALSE;
+ }
+
+ unknown_type:
+ {
+ GST_LOG_OBJECT (pad, "Unknown type, posting message and firing signal");
+
+ chain->deadend_details = deadend_details;
+ chain->deadend = TRUE;
+ chain->endcaps = caps;
+ gst_object_replace ((GstObject **) & chain->current_pad, NULL);
+
+ gst_element_post_message (GST_ELEMENT_CAST (dbin),
+ gst_missing_decoder_message_new (GST_ELEMENT_CAST (dbin), caps));
+
+ g_signal_emit (G_OBJECT (dbin),
+ gst_decode_bin_signals[SIGNAL_UNKNOWN_TYPE], 0, pad, caps);
+
+ /* Try to expose anything */
+ EXPOSE_LOCK (dbin);
+ if (dbin->decode_chain) {
+ if (gst_decode_chain_is_complete (dbin->decode_chain)) {
+ gst_decode_bin_expose (dbin);
+ }
+ }
+ EXPOSE_UNLOCK (dbin);
+
+ if (src == dbin->typefind) {
+ if (!caps || gst_caps_is_empty (caps)) {
+ GST_ELEMENT_ERROR (dbin, STREAM, TYPE_NOT_FOUND,
+ (_("Could not determine type of stream")), (NULL));
+ }
+ do_async_done (dbin);
+ }
+ return FALSE;
+ }
+ non_fixed:
+ {
+ GST_DEBUG_OBJECT (pad, "pad has non-fixed caps delay autoplugging");
+ gst_object_unref (dpad);
+ goto setup_caps_delay;
+ }
+ any_caps:
+ {
+ GST_DEBUG_OBJECT (pad, "pad has ANY caps, delaying auto-pluggin");
+ goto setup_caps_delay;
+ }
+ setup_caps_delay:
+ {
+ GstPendingPad *ppad;
+
+ /* connect to caps notification */
+ CHAIN_MUTEX_LOCK (chain);
+ GST_LOG_OBJECT (dbin, "Chain %p has now %d dynamic pads", chain,
+ g_list_length (chain->pending_pads));
+ ppad = g_slice_new0 (GstPendingPad);
+ ppad->pad = gst_object_ref (pad);
+ ppad->chain = chain;
+ ppad->event_probe_id =
+ gst_pad_add_probe (pad, GST_PAD_PROBE_TYPE_EVENT_DOWNSTREAM,
+ pad_event_cb, ppad, NULL);
+ chain->pending_pads = g_list_prepend (chain->pending_pads, ppad);
+ ppad->notify_caps_id = g_signal_connect (pad, "notify::caps",
+ G_CALLBACK (caps_notify_cb), chain);
+ CHAIN_MUTEX_UNLOCK (chain);
+
+ /* If we're here because we have a Parser/Converter
+ * we have to unref the pad */
+ if (is_parser_converter)
+ gst_object_unref (pad);
+ if (caps)
+ gst_caps_unref (caps);
+
+ return FALSE;
+ }
+ }
+
+ static void
+ add_error_filter (GstDecodeBin * dbin, GstElement * element)
+ {
+ GST_OBJECT_LOCK (dbin);
+ dbin->filtered = g_list_prepend (dbin->filtered, element);
+ GST_OBJECT_UNLOCK (dbin);
+ }
+
+ static void
+ remove_error_filter (GstDecodeBin * dbin, GstElement * element,
+ GstMessage ** error)
+ {
+ GList *l;
+
+ GST_OBJECT_LOCK (dbin);
+ dbin->filtered = g_list_remove (dbin->filtered, element);
+
+ if (error)
+ *error = NULL;
+
+ l = dbin->filtered_errors;
+ while (l) {
+ GstMessage *msg = l->data;
+
+ if (GST_MESSAGE_SRC (msg) == GST_OBJECT_CAST (element)) {
+ /* Get the last error of this element, i.e. the earliest */
+ if (error)
+ gst_message_replace (error, msg);
+ gst_message_unref (msg);
+ l = dbin->filtered_errors = g_list_delete_link (dbin->filtered_errors, l);
+ } else {
+ l = l->next;
+ }
+ }
+ GST_OBJECT_UNLOCK (dbin);
+ }
+
+ typedef struct
+ {
+ gboolean ret;
+ GstPad *peer;
+ } SendStickyEventsData;
+
+ static gboolean
+ send_sticky_event (GstPad * pad, GstEvent ** event, gpointer user_data)
+ {
+ SendStickyEventsData *data = user_data;
+ gboolean ret;
+
+ ret = gst_pad_send_event (data->peer, gst_event_ref (*event));
+ if (!ret)
+ data->ret = FALSE;
+
+ return data->ret;
+ }
+
+ static gboolean
+ send_sticky_events (GstDecodeBin * dbin, GstPad * pad)
+ {
+ SendStickyEventsData data;
+
+ data.ret = TRUE;
+ data.peer = gst_pad_get_peer (pad);
+
+ gst_pad_sticky_events_foreach (pad, send_sticky_event, &data);
+
+ gst_object_unref (data.peer);
+
+ return data.ret;
+ }
+
+ static gchar *
+ error_message_to_string (GstMessage * msg)
+ {
+ GError *err;
+ gchar *debug, *message, *full_message;
+
+ gst_message_parse_error (msg, &err, &debug);
+
+ message = gst_error_get_message (err->domain, err->code);
+
+ if (debug)
+ full_message = g_strdup_printf ("%s\n%s\n%s", message, err->message, debug);
+ else
+ full_message = g_strdup_printf ("%s\n%s", message, err->message);
+
+ g_free (message);
+ g_free (debug);
+ g_clear_error (&err);
+
+ return full_message;
+ }
+
+ static GstPadProbeReturn
+ demuxer_source_pad_probe (GstPad * pad, GstPadProbeInfo * info,
+ gpointer user_data)
+ {
+ GstEvent *event = GST_PAD_PROBE_INFO_EVENT (info);
+ GstDecodeGroup *group = (GstDecodeGroup *) user_data;
+ GstDecodeChain *parent_chain = group->parent;
+
+ GST_DEBUG_OBJECT (pad, "Saw event %s", GST_EVENT_TYPE_NAME (event));
+ /* Check if we are the active group, if not we need to proxy the flush
+ * events to the other groups (of which at least one is exposed, ensuring
+ * flushing properly propagates downstream of decodebin */
+ if (parent_chain->active_group == group)
+ return GST_PAD_PROBE_OK;
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_FLUSH_START:
+ case GST_EVENT_FLUSH_STOP:
+ {
+ GList *tmp;
+ GST_DEBUG_OBJECT (pad, "Proxying flush events to inactive groups");
+ /* Proxy to active group */
+ for (tmp = parent_chain->active_group->reqpads; tmp; tmp = tmp->next) {
+ GstPad *reqpad = (GstPad *) tmp->data;
+ gst_pad_send_event (reqpad, gst_event_ref (event));
+ }
+ /* Proxy to other non-active groups (except ourself) */
+ for (tmp = parent_chain->next_groups; tmp; tmp = tmp->next) {
+ GList *tmp2;
+ GstDecodeGroup *tmpgroup = (GstDecodeGroup *) tmp->data;
+ if (tmpgroup != group) {
+ for (tmp2 = tmpgroup->reqpads; tmp2; tmp2 = tmp2->next) {
+ GstPad *reqpad = (GstPad *) tmp2->data;
+ gst_pad_send_event (reqpad, gst_event_ref (event));
+ }
+ }
+ }
+ flush_chain (parent_chain,
+ GST_EVENT_TYPE (event) == GST_EVENT_FLUSH_START);
+ }
+ break;
+ default:
+ break;
+ }
+
+ return GST_PAD_PROBE_OK;
+ }
+
+ typedef struct
+ {
+ GstDecodeChain *chain;
+ GstPad *pad;
+ } PadExposeData;
+
+ /* connect_pad:
+ *
+ * Try to connect the given pad to an element created from one of the factories,
+ * and recursively.
+ *
+ * Note that dpad is ghosting pad, and so pad is linked; be sure to unset dpad's
+ * target before trying to link pad.
+ *
+ * Returns TRUE if an element was properly created and linked
+ */
+ static gboolean
+ connect_pad (GstDecodeBin * dbin, GstElement * src, GstDecodePad * dpad,
+ GstPad * pad, GstCaps * caps, GValueArray * factories,
+ GstDecodeChain * chain, gchar ** deadend_details)
+ {
+ gboolean res = FALSE;
+ GstPad *mqpad = NULL;
+ gboolean is_demuxer = chain->parent && !chain->elements; /* First pad after the demuxer */
+ GString *error_details = NULL;
+
+ g_return_val_if_fail (factories != NULL, FALSE);
+ g_return_val_if_fail (factories->n_values > 0, FALSE);
+
+ GST_DEBUG_OBJECT (dbin,
+ "pad %s:%s , chain:%p, %d factories, caps %" GST_PTR_FORMAT,
+ GST_DEBUG_PAD_NAME (pad), chain, factories->n_values, caps);
+
+ /* 1. is element demuxer or parser */
+ if (is_demuxer) {
+ GST_LOG_OBJECT (src,
+ "is a demuxer, connecting the pad through multiqueue '%s'",
+ GST_OBJECT_NAME (chain->parent->multiqueue));
+
+ /* Set a flush-start/-stop probe on the downstream events */
+ chain->pad_probe_id =
+ gst_pad_add_probe (pad, GST_PAD_PROBE_TYPE_EVENT_FLUSH,
+ demuxer_source_pad_probe, chain->parent, NULL);
+
+ decode_pad_set_target (dpad, NULL);
+ if (!(mqpad = gst_decode_group_control_demuxer_pad (chain->parent, pad)))
+ goto beach;
+ src = chain->parent->multiqueue;
+ /* Forward sticky events to mq src pad to allow factory initialization */
+ gst_pad_sticky_events_foreach (pad, copy_sticky_events, mqpad);
+ pad = mqpad;
+ decode_pad_set_target (dpad, pad);
+ }
+
+ error_details = g_string_new ("");
+
+ /* 2. Try to create an element and link to it */
+ while (factories->n_values > 0) {
+ GstAutoplugSelectResult ret;
+ GstElementFactory *factory;
+ GstDecodeElement *delem;
+ GstElement *element;
+ GstPad *sinkpad;
+ GParamSpec *pspec;
+ gboolean subtitle;
+ GList *to_connect = NULL;
+ GList *to_expose = NULL;
+ gboolean is_parser = FALSE;
+ gboolean is_decoder = FALSE;
+
+ /* Set dpad target to pad again, it might've been unset
+ * below but we came back here because something failed
+ */
+ decode_pad_set_target (dpad, pad);
+
+ /* take first factory */
+ factory = g_value_get_object (g_value_array_get_nth (factories, 0));
+ /* Remove selected factory from the list. */
+ g_value_array_remove (factories, 0);
+
+ GST_LOG_OBJECT (src, "trying factory %" GST_PTR_FORMAT, factory);
+
+ /* Check if the caps are really supported by the factory. The
+ * factory list is non-empty-subset filtered while caps
+ * are only accepted by a pad if they are a subset of the
+ * pad caps.
+ *
+ * FIXME: Only do this for fixed caps here. Non-fixed caps
+ * can happen if a Parser/Converter was autoplugged before
+ * this. We then assume that it will be able to convert to
+ * everything that the decoder would want.
+ *
+ * A subset check will fail here because the parser caps
+ * will be generic and while the decoder will only
+ * support a subset of the parser caps.
+ */
+ if (gst_caps_is_fixed (caps)) {
+ const GList *templs;
+ gboolean skip = FALSE;
+
+ templs = gst_element_factory_get_static_pad_templates (factory);
+
+ while (templs) {
+ GstStaticPadTemplate *templ = (GstStaticPadTemplate *) templs->data;
+
+ if (templ->direction == GST_PAD_SINK) {
+ GstCaps *templcaps = gst_static_caps_get (&templ->static_caps);
+
+ if (!gst_caps_is_subset (caps, templcaps)) {
+ GST_DEBUG_OBJECT (src,
+ "caps %" GST_PTR_FORMAT " not subset of %" GST_PTR_FORMAT, caps,
+ templcaps);
+ gst_caps_unref (templcaps);
+ skip = TRUE;
+ break;
+ }
+
+ gst_caps_unref (templcaps);
+ }
+ templs = g_list_next (templs);
+ }
+ if (skip)
+ continue;
+ }
+
+ /* If the factory is for a parser we first check if the factory
+ * was already used for the current chain. If it was used already
+ * we would otherwise create an infinite loop here because the
+ * parser apparently accepts its own output as input.
+ * This is only done for parsers because it's perfectly valid
+ * to have other element classes after each other because a
+ * parser is the only one that does not change the data. A
+ * valid example for this would be multiple id3demux in a row.
+ */
+ is_parser = strstr (gst_element_factory_get_metadata (factory,
+ GST_ELEMENT_METADATA_KLASS), "Parser") != NULL;
+
+ if (is_parser) {
+ gboolean skip = FALSE;
+ GList *l;
+
+ CHAIN_MUTEX_LOCK (chain);
+ for (l = chain->elements; l; l = l->next) {
+ GstDecodeElement *delem = (GstDecodeElement *) l->data;
+ GstElement *otherelement = delem->element;
+
+ if (gst_element_get_factory (otherelement) == factory) {
+ skip = TRUE;
+ break;
+ }
+ }
+
+ if (!skip && chain->parent && chain->parent->parent) {
+ GstDecodeChain *parent_chain = chain->parent->parent;
+ GstDecodeElement *pelem =
+ parent_chain->elements ? parent_chain->elements->data : NULL;
+
+ if (pelem && gst_element_get_factory (pelem->element) == factory)
+ skip = TRUE;
+ }
+ CHAIN_MUTEX_UNLOCK (chain);
+ if (skip) {
+ GST_DEBUG_OBJECT (dbin,
+ "Skipping factory '%s' because it was already used in this chain",
+ gst_plugin_feature_get_name (GST_PLUGIN_FEATURE_CAST (factory)));
+ continue;
+ }
+
+ }
+
+ /* emit autoplug-select to see what we should do with it. */
+ g_signal_emit (G_OBJECT (dbin),
+ gst_decode_bin_signals[SIGNAL_AUTOPLUG_SELECT],
+ 0, dpad, caps, factory, &ret);
+
+ switch (ret) {
+ case GST_AUTOPLUG_SELECT_TRY:
+ GST_DEBUG_OBJECT (dbin, "autoplug select requested try");
+ break;
+ case GST_AUTOPLUG_SELECT_EXPOSE:
+ GST_DEBUG_OBJECT (dbin, "autoplug select requested expose");
+ /* expose the pad, we don't have the source element */
+ expose_pad (dbin, src, dpad, pad, caps, chain);
+ res = TRUE;
+ goto beach;
+ case GST_AUTOPLUG_SELECT_SKIP:
+ GST_DEBUG_OBJECT (dbin, "autoplug select requested skip");
+ continue;
+ default:
+ GST_WARNING_OBJECT (dbin, "autoplug select returned unhandled %d", ret);
+ break;
+ }
+
+ /* 2.0. Unlink pad */
+ decode_pad_set_target (dpad, NULL);
+
+ /* 2.1. Try to create an element */
+ if ((element = gst_element_factory_create (factory, NULL)) == NULL) {
+ GST_WARNING_OBJECT (dbin, "Could not create an element from %s",
+ gst_plugin_feature_get_name (GST_PLUGIN_FEATURE (factory)));
+ g_string_append_printf (error_details,
+ "Could not create an element from %s\n",
+ gst_plugin_feature_get_name (GST_PLUGIN_FEATURE (factory)));
+ continue;
+ }
+
+ /* Filter errors, this will prevent the element from causing the pipeline
+ * to error while we test it using READY state. */
+ add_error_filter (dbin, element);
+
+ /* We don't yet want the bin to control the element's state */
+ gst_element_set_locked_state (element, TRUE);
+
+ /* ... add it ... */
+ if (!(gst_bin_add (GST_BIN_CAST (dbin), element))) {
+ GST_WARNING_OBJECT (dbin, "Couldn't add %s to the bin",
+ GST_ELEMENT_NAME (element));
+ remove_error_filter (dbin, element, NULL);
+ g_string_append_printf (error_details, "Couldn't add %s to the bin\n",
+ GST_ELEMENT_NAME (element));
+ gst_object_unref (element);
+ continue;
+ }
+
+ /* Find its sink pad. */
+ if (!(sinkpad = find_sink_pad (element))) {
+ GST_WARNING_OBJECT (dbin, "Element %s doesn't have a sink pad",
+ GST_ELEMENT_NAME (element));
+ remove_error_filter (dbin, element, NULL);
+ g_string_append_printf (error_details,
+ "Element %s doesn't have a sink pad", GST_ELEMENT_NAME (element));
+ gst_bin_remove (GST_BIN (dbin), element);
+ continue;
+ }
+
+ /* ... and try to link */
+ if ((gst_pad_link_full (pad, sinkpad,
+ GST_PAD_LINK_CHECK_NOTHING)) != GST_PAD_LINK_OK) {
+ GST_WARNING_OBJECT (dbin, "Link failed on pad %s:%s",
+ GST_DEBUG_PAD_NAME (sinkpad));
+ remove_error_filter (dbin, element, NULL);
+ g_string_append_printf (error_details, "Link failed on pad %s:%s",
+ GST_DEBUG_PAD_NAME (sinkpad));
+ gst_object_unref (sinkpad);
+ gst_bin_remove (GST_BIN (dbin), element);
+ continue;
+ }
+
+ /* ... activate it ... */
+ if ((gst_element_set_state (element,
+ GST_STATE_READY)) == GST_STATE_CHANGE_FAILURE) {
+ GstMessage *error_msg;
+
+ GST_WARNING_OBJECT (dbin, "Couldn't set %s to READY",
+ GST_ELEMENT_NAME (element));
+ remove_error_filter (dbin, element, &error_msg);
+
+ if (error_msg) {
+ gchar *error_string = error_message_to_string (error_msg);
+ g_string_append_printf (error_details, "Couldn't set %s to READY:\n%s",
+ GST_ELEMENT_NAME (element), error_string);
+ gst_message_unref (error_msg);
+ g_free (error_string);
+ } else {
+ g_string_append_printf (error_details, "Couldn't set %s to READY",
+ GST_ELEMENT_NAME (element));
+ }
+ gst_object_unref (sinkpad);
+ gst_bin_remove (GST_BIN (dbin), element);
+ continue;
+ }
+
+ /* check if we still accept the caps on the pad after setting
+ * the element to READY */
+ if (!gst_pad_query_accept_caps (sinkpad, caps)) {
+ GstMessage *error_msg;
+
+ GST_WARNING_OBJECT (dbin, "Element %s does not accept caps",
+ GST_ELEMENT_NAME (element));
+
+ remove_error_filter (dbin, element, &error_msg);
+
+ if (error_msg) {
+ gchar *error_string = error_message_to_string (error_msg);
+ g_string_append_printf (error_details,
+ "Element %s does not accept caps:\n%s", GST_ELEMENT_NAME (element),
+ error_string);
+ gst_message_unref (error_msg);
+ g_free (error_string);
+ } else {
+ g_string_append_printf (error_details,
+ "Element %s does not accept caps", GST_ELEMENT_NAME (element));
+ }
+
+ gst_element_set_state (element, GST_STATE_NULL);
+ gst_object_unref (sinkpad);
+ gst_bin_remove (GST_BIN (dbin), element);
+ continue;
+ }
+
+ gst_object_unref (sinkpad);
+ GST_LOG_OBJECT (dbin, "linked on pad %s:%s", GST_DEBUG_PAD_NAME (pad));
+
+ CHAIN_MUTEX_LOCK (chain);
+ delem = g_slice_new0 (GstDecodeElement);
+ delem->element = gst_object_ref (element);
+ delem->capsfilter = NULL;
+ chain->elements = g_list_prepend (chain->elements, delem);
+ chain->demuxer = is_demuxer_element (element);
+ chain->adaptive_demuxer = is_adaptive_demuxer_element (element);
+
+ is_decoder = strstr (gst_element_factory_get_metadata (factory,
+ GST_ELEMENT_METADATA_KLASS), "Decoder") != NULL;
+
+ /* For adaptive streaming demuxer we insert a multiqueue after
+ * this demuxer.
+ * Now for the case where we have a container stream inside these
+ * buffers, another demuxer will be plugged and after this second
+ * demuxer there will be a second multiqueue. This second multiqueue
+ * will get smaller buffers and will be the one emitting buffering
+ * messages.
+ * If we don't have a container stream inside the fragment buffers,
+ * we'll insert a multiqueue below right after the next element after
+ * the adaptive streaming demuxer. This is going to be a parser or
+ * decoder, and will output smaller buffers.
+ */
+ if (chain->parent && chain->parent->parent) {
+ GstDecodeChain *parent_chain = chain->parent->parent;
+
+ if (parent_chain->adaptive_demuxer && (is_parser || is_decoder))
+ chain->demuxer = TRUE;
+ }
+
+ /* If we are configured to use buffering and there is no demuxer in the
+ * chain, we still want a multiqueue, otherwise we will ignore the
+ * use-buffering property. In that case, we will insert a multiqueue after
+ * the parser or decoder - not elsewhere, otherwise we won't have
+ * timestamps.
+ */
+
+ if (!chain->parent && (is_parser || is_decoder) && dbin->use_buffering) {
+ chain->demuxer = TRUE;
+ if (is_decoder) {
+ GST_WARNING_OBJECT (dbin,
+ "Buffering messages used for decoded and non-parsed data");
+ }
+ }
+
+ CHAIN_MUTEX_UNLOCK (chain);
+
+ /* Set connection-speed property if needed */
+ if (chain->demuxer) {
+ GParamSpec *pspec;
+
+ if ((pspec = g_object_class_find_property (G_OBJECT_GET_CLASS (element),
+ "connection-speed"))) {
+ guint64 speed = dbin->connection_speed / 1000;
+ gboolean wrong_type = FALSE;
+
+ if (G_PARAM_SPEC_TYPE (pspec) == G_TYPE_PARAM_UINT) {
+ GParamSpecUInt *pspecuint = G_PARAM_SPEC_UINT (pspec);
+
+ speed = CLAMP (speed, pspecuint->minimum, pspecuint->maximum);
+ } else if (G_PARAM_SPEC_TYPE (pspec) == G_TYPE_PARAM_INT) {
+ GParamSpecInt *pspecint = G_PARAM_SPEC_INT (pspec);
+
+ speed = CLAMP (speed, pspecint->minimum, pspecint->maximum);
+ } else if (G_PARAM_SPEC_TYPE (pspec) == G_TYPE_PARAM_UINT64) {
+ GParamSpecUInt64 *pspecuint = G_PARAM_SPEC_UINT64 (pspec);
+
+ speed = CLAMP (speed, pspecuint->minimum, pspecuint->maximum);
+ } else if (G_PARAM_SPEC_TYPE (pspec) == G_TYPE_PARAM_INT64) {
+ GParamSpecInt64 *pspecint = G_PARAM_SPEC_INT64 (pspec);
+
+ speed = CLAMP (speed, pspecint->minimum, pspecint->maximum);
+ } else {
+ GST_WARNING_OBJECT (dbin,
+ "The connection speed property %" G_GUINT64_FORMAT " of type %s"
+ " is not useful not setting it", speed,
+ g_type_name (G_PARAM_SPEC_TYPE (pspec)));
+ wrong_type = TRUE;
+ }
+
+ if (!wrong_type) {
+ GST_DEBUG_OBJECT (dbin, "setting connection-speed=%" G_GUINT64_FORMAT
+ " to demuxer element", speed);
+
+ g_object_set (element, "connection-speed", speed, NULL);
+ }
+ }
+ }
+
+ /* try to configure the subtitle encoding property when we can */
+ pspec = g_object_class_find_property (G_OBJECT_GET_CLASS (element),
+ "subtitle-encoding");
+ if (pspec && G_PARAM_SPEC_VALUE_TYPE (pspec) == G_TYPE_STRING) {
+ SUBTITLE_LOCK (dbin);
+ GST_DEBUG_OBJECT (dbin,
+ "setting subtitle-encoding=%s to element", dbin->encoding);
+ g_object_set (G_OBJECT (element), "subtitle-encoding", dbin->encoding,
+ NULL);
+ SUBTITLE_UNLOCK (dbin);
+ subtitle = TRUE;
+ } else {
+ subtitle = FALSE;
+ }
+
+ /* link this element further */
+ to_connect = connect_element (dbin, delem, chain);
+
+ while (to_connect) {
+ GstPad *opad = to_connect->data;
+ gboolean expose_pad = FALSE;
+ GstDecodeChain *new_chain;
+ GstCaps *ocaps;
+
+ ocaps = get_pad_caps (opad);
+ expose_pad =
+ analyze_new_pad (dbin, delem->element, opad, ocaps, chain,
+ &new_chain);
+
+ if (ocaps)
+ gst_caps_unref (ocaps);
+
+ if (expose_pad) {
+ PadExposeData *expose_data = g_new0 (PadExposeData, 1);
+ expose_data->chain = new_chain;
+ expose_data->pad = gst_object_ref (opad);
+ to_expose = g_list_prepend (to_expose, expose_data);
+ }
+
+ gst_object_unref (opad);
+ to_connect = g_list_delete_link (to_connect, to_connect);
+ }
+ /* any pads left in to_expose are to be exposed */
+
+ /* Bring the element to the state of the parent */
+
+ /* First lock element's sinkpad stream lock so no data reaches
+ * the possible new element added when caps are sent by element
+ * while we're still sending sticky events */
+ GST_PAD_STREAM_LOCK (sinkpad);
+
+ if ((gst_element_set_state (element,
+ GST_STATE_PAUSED)) == GST_STATE_CHANGE_FAILURE ||
+ !send_sticky_events (dbin, pad)) {
+ GstDecodeElement *dtmp = NULL;
+ GstElement *tmp = NULL;
+ GstMessage *error_msg;
+
+ GST_PAD_STREAM_UNLOCK (sinkpad);
+
+ GST_WARNING_OBJECT (dbin, "Couldn't set %s to PAUSED",
+ GST_ELEMENT_NAME (element));
+
+ while (to_expose) {
+ PadExposeData *expose_data = to_expose->data;
+ gst_object_unref (expose_data->pad);
+ g_free (expose_data);
+ to_expose = g_list_delete_link (to_expose, to_expose);
+ }
+
+ remove_error_filter (dbin, element, &error_msg);
+
+ if (error_msg) {
+ gchar *error_string = error_message_to_string (error_msg);
+ g_string_append_printf (error_details, "Couldn't set %s to PAUSED:\n%s",
+ GST_ELEMENT_NAME (element), error_string);
+ gst_message_unref (error_msg);
+ g_free (error_string);
+ } else {
+ g_string_append_printf (error_details, "Couldn't set %s to PAUSED",
+ GST_ELEMENT_NAME (element));
+ }
+
+ /* Remove all elements in this chain that were just added. No
+ * other thread could've added elements in the meantime */
+ CHAIN_MUTEX_LOCK (chain);
+ do {
+ GList *l;
+
+ dtmp = chain->elements->data;
+ tmp = dtmp->element;
+
+ /* Disconnect any signal handlers that might be connected
+ * in connect_element() or analyze_pad() */
+ if (dtmp->pad_added_id)
+ g_signal_handler_disconnect (tmp, dtmp->pad_added_id);
+ if (dtmp->pad_removed_id)
+ g_signal_handler_disconnect (tmp, dtmp->pad_removed_id);
+ if (dtmp->no_more_pads_id)
+ g_signal_handler_disconnect (tmp, dtmp->no_more_pads_id);
+
+ for (l = chain->pending_pads; l;) {
+ GstPendingPad *pp = l->data;
+ GList *n;
+
+ if (GST_PAD_PARENT (pp->pad) != tmp) {
+ l = l->next;
+ continue;
+ }
+
+ gst_pending_pad_free (pp);
+
+ /* Remove element from the list, update list head and go to the
+ * next element in the list */
+ n = l->next;
+ chain->pending_pads = g_list_delete_link (chain->pending_pads, l);
+ l = n;
+ }
+
+ if (dtmp->capsfilter) {
+ gst_bin_remove (GST_BIN (dbin), dtmp->capsfilter);
+ gst_element_set_state (dtmp->capsfilter, GST_STATE_NULL);
+ gst_object_unref (dtmp->capsfilter);
+ }
+
+ gst_bin_remove (GST_BIN (dbin), tmp);
+ gst_element_set_state (tmp, GST_STATE_NULL);
+
+ gst_object_unref (tmp);
+ g_slice_free (GstDecodeElement, dtmp);
+
+ chain->elements = g_list_delete_link (chain->elements, chain->elements);
+ } while (tmp != element);
+ CHAIN_MUTEX_UNLOCK (chain);
+
+ continue;
+ } else {
+ /* Everything went well, the spice must flow now */
+ GST_PAD_STREAM_UNLOCK (sinkpad);
+ }
+
+ /* Remove error filter now, from now on we can't gracefully
+ * handle errors of the element anymore */
+ remove_error_filter (dbin, element, NULL);
+
+ /* Now let the bin handle the state */
+ gst_element_set_locked_state (element, FALSE);
+
+ if (subtitle) {
+ SUBTITLE_LOCK (dbin);
+ /* we added the element now, add it to the list of subtitle-encoding
+ * elements when we can set the property */
+ dbin->subtitles = g_list_prepend (dbin->subtitles, element);
+ SUBTITLE_UNLOCK (dbin);
+ }
+
+ while (to_expose) {
+ PadExposeData *expose_data = to_expose->data;
+ GstCaps *ocaps;
+
+ ocaps = get_pad_caps (expose_data->pad);
+ expose_pad (dbin, delem->element, expose_data->chain->current_pad,
+ expose_data->pad, ocaps, expose_data->chain);
+
+ if (ocaps)
+ gst_caps_unref (ocaps);
+
+ gst_object_unref (expose_data->pad);
+ g_free (expose_data);
+ to_expose = g_list_delete_link (to_expose, to_expose);
+ }
+
+ res = TRUE;
+ break;
+ }
+
+ beach:
+ if (mqpad)
+ gst_object_unref (mqpad);
+
+ if (error_details)
+ *deadend_details = g_string_free (error_details, (error_details->len == 0
+ || res));
+ else
+ *deadend_details = NULL;
+
+ return res;
+ }
+
+ static GstCaps *
+ get_pad_caps (GstPad * pad)
+ {
+ GstCaps *caps;
+
+ /* first check the pad caps, if this is set, we are positively sure it is
+ * fixed and exactly what the element will produce. */
+ caps = gst_pad_get_current_caps (pad);
+
+ /* then use the getcaps function if we don't have caps. These caps might not
+ * be fixed in some cases, in which case analyze_new_pad will set up a
+ * notify::caps signal to continue autoplugging. */
+ if (caps == NULL)
+ caps = gst_pad_query_caps (pad, NULL);
+
+ return caps;
+ }
+
+ /* Returns a list of pads that can be connected to already and
+ * connects to pad-added and related signals */
+ static GList *
+ connect_element (GstDecodeBin * dbin, GstDecodeElement * delem,
+ GstDecodeChain * chain)
+ {
+ GstElement *element = delem->element;
+ GList *pads;
+ gboolean dynamic = FALSE;
+ GList *to_connect = NULL;
+
+ GST_DEBUG_OBJECT (dbin, "Attempting to connect element %s [chain:%p] further",
+ GST_ELEMENT_NAME (element), chain);
+
+ /* 1. Loop over pad templates, grabbing existing pads along the way */
+ for (pads = GST_ELEMENT_GET_CLASS (element)->padtemplates; pads;
+ pads = g_list_next (pads)) {
+ GstPadTemplate *templ = GST_PAD_TEMPLATE (pads->data);
+ const gchar *templ_name;
+
+ /* we are only interested in source pads */
+ if (GST_PAD_TEMPLATE_DIRECTION (templ) != GST_PAD_SRC)
+ continue;
+
+ templ_name = GST_PAD_TEMPLATE_NAME_TEMPLATE (templ);
+ GST_DEBUG_OBJECT (dbin, "got a source pad template %s", templ_name);
+
+ /* figure out what kind of pad this is */
+ switch (GST_PAD_TEMPLATE_PRESENCE (templ)) {
+ case GST_PAD_ALWAYS:
+ {
+ /* get the pad that we need to autoplug */
+ GstPad *pad = gst_element_get_static_pad (element, templ_name);
+
+ if (pad) {
+ GST_DEBUG_OBJECT (dbin, "got the pad for always template %s",
+ templ_name);
+ /* here is the pad, we need to autoplug it */
+ to_connect = g_list_prepend (to_connect, pad);
+ } else {
+ /* strange, pad is marked as always but it's not
+ * there. Fix the element */
+ GST_WARNING_OBJECT (dbin,
+ "could not get the pad for always template %s", templ_name);
+ }
+ break;
+ }
+ case GST_PAD_SOMETIMES:
+ {
+ /* try to get the pad to see if it is already created or
+ * not */
+ GstPad *pad = gst_element_get_static_pad (element, templ_name);
+
+ if (pad) {
+ GST_DEBUG_OBJECT (dbin, "got the pad for sometimes template %s",
+ templ_name);
+ /* the pad is created, we need to autoplug it */
+ to_connect = g_list_prepend (to_connect, pad);
+ } else {
+ GST_DEBUG_OBJECT (dbin,
+ "did not get the sometimes pad of template %s", templ_name);
+ /* we have an element that will create dynamic pads */
+ dynamic = TRUE;
+ }
+ break;
+ }
+ case GST_PAD_REQUEST:
+ /* ignore request pads */
+ GST_DEBUG_OBJECT (dbin, "ignoring request padtemplate %s", templ_name);
+ break;
+ }
+ }
+
+ /* 2. if there are more potential pads, connect to relevant signals */
+ if (dynamic) {
+ GST_LOG_OBJECT (dbin, "Adding signals to element %s in chain %p",
+ GST_ELEMENT_NAME (element), chain);
+ delem->pad_added_id = g_signal_connect (element, "pad-added",
+ G_CALLBACK (pad_added_cb), chain);
+ delem->pad_removed_id = g_signal_connect (element, "pad-removed",
+ G_CALLBACK (pad_removed_cb), chain);
+ delem->no_more_pads_id = g_signal_connect (element, "no-more-pads",
+ G_CALLBACK (no_more_pads_cb), chain);
+ }
+
+ /* 3. return all pads that can be connected to already */
+
+ return to_connect;
+ }
+
+ /* expose_pad:
+ *
+ * Expose the given pad on the chain as a decoded pad.
+ */
+ static void
+ expose_pad (GstDecodeBin * dbin, GstElement * src, GstDecodePad * dpad,
+ GstPad * pad, GstCaps * caps, GstDecodeChain * chain)
+ {
+ GstPad *mqpad = NULL;
+
+ GST_DEBUG_OBJECT (dbin, "pad %s:%s, chain:%p",
+ GST_DEBUG_PAD_NAME (pad), chain);
+
+ /* If this is the first pad for this chain, there are no other elements
+ * and the source element is not the multiqueue we must link through the
+ * multiqueue.
+ *
+ * This is the case if a demuxer directly exposed a raw pad.
+ */
+ if (chain->parent && !chain->elements && src != chain->parent->multiqueue) {
+ GST_LOG_OBJECT (src, "connecting the pad through multiqueue");
+
+ decode_pad_set_target (dpad, NULL);
+ if (!(mqpad = gst_decode_group_control_demuxer_pad (chain->parent, pad)))
+ goto beach;
+ pad = mqpad;
+ decode_pad_set_target (dpad, pad);
+ }
+
+ gst_decode_pad_activate (dpad, chain);
+ chain->endpad = gst_object_ref (dpad);
+ chain->endcaps = gst_caps_ref (caps);
+
+ EXPOSE_LOCK (dbin);
+ if (dbin->decode_chain) {
+ if (gst_decode_chain_is_complete (dbin->decode_chain)) {
+ gst_decode_bin_expose (dbin);
+ }
+ }
+ EXPOSE_UNLOCK (dbin);
+
+ if (mqpad)
+ gst_object_unref (mqpad);
+
+ beach:
+ return;
+ }
+
+ /* check_upstream_seekable:
+ *
+ * Check if upstream is seekable.
+ */
+ static gboolean
+ check_upstream_seekable (GstDecodeBin * dbin, GstPad * pad)
+ {
+ GstQuery *query;
+ gint64 start = -1, stop = -1;
+ gboolean seekable = FALSE;
+
+ query = gst_query_new_seeking (GST_FORMAT_BYTES);
+ if (!gst_pad_peer_query (pad, query)) {
+ GST_DEBUG_OBJECT (dbin, "seeking query failed");
+ goto done;
+ }
+
+ gst_query_parse_seeking (query, NULL, &seekable, &start, &stop);
+
+ /* try harder to query upstream size if we didn't get it the first time */
+ if (seekable && stop == -1) {
+ GST_DEBUG_OBJECT (dbin, "doing duration query to fix up unset stop");
+ gst_pad_peer_query_duration (pad, GST_FORMAT_BYTES, &stop);
+ }
+
+ /* if upstream doesn't know the size, it's likely that it's not seekable in
+ * practice even if it technically may be seekable */
+ if (seekable && (start != 0 || stop <= start)) {
+ GST_DEBUG_OBJECT (dbin, "seekable but unknown start/stop -> disable");
+ seekable = FALSE;
+ } else {
+ GST_DEBUG_OBJECT (dbin, "upstream seekable: %d", seekable);
+ }
+
+ done:
+ gst_query_unref (query);
+ return seekable;
+ }
+
+ static void
+ type_found (GstElement * typefind, guint probability,
+ GstCaps * caps, GstDecodeBin * decode_bin)
+ {
+ GstPad *pad, *sink_pad;
+ GstDecodeChain *chain;
+
+ GST_DEBUG_OBJECT (decode_bin, "typefind found caps %" GST_PTR_FORMAT, caps);
+
+ /* If the typefinder (but not something else) finds text/plain - i.e. that's
+ * the top-level type of the file - then error out.
+ */
+ if (gst_structure_has_name (gst_caps_get_structure (caps, 0), "text/plain")) {
+ GST_ELEMENT_ERROR (decode_bin, STREAM, WRONG_TYPE,
+ (_("This appears to be a text file")),
+ ("decodebin cannot decode plain text files"));
+ goto exit;
+ }
+
+ pad = gst_element_get_static_pad (typefind, "src");
+ sink_pad = gst_element_get_static_pad (typefind, "sink");
+
+ /* need some lock here to prevent race with shutdown state change
+ * which might yank away e.g. decode_chain while building stuff here.
+ * In typical cases, STREAM_LOCK is held and handles that, it need not
+ * be held (if called from a proxied setcaps), so grab it anyway */
+ GST_PAD_STREAM_LOCK (sink_pad);
+ /* FIXME: we can only deal with one type, we don't yet support dynamically changing
+ * caps from the typefind element */
+ if (decode_bin->have_type || decode_bin->decode_chain) {
+ } else {
+ decode_bin->have_type = TRUE;
+
+ decode_bin->decode_chain = gst_decode_chain_new (decode_bin, NULL, pad);
+ chain = gst_decode_chain_ref (decode_bin->decode_chain);
+
+ if (analyze_new_pad (decode_bin, typefind, pad, caps,
+ decode_bin->decode_chain, NULL))
+ expose_pad (decode_bin, typefind, decode_bin->decode_chain->current_pad,
+ pad, caps, decode_bin->decode_chain);
+
+ gst_decode_chain_unref (chain);
+ }
+
+ GST_PAD_STREAM_UNLOCK (sink_pad);
+ gst_object_unref (sink_pad);
+ gst_object_unref (pad);
+
+ exit:
+ return;
+ }
+
+ static GstPadProbeReturn
+ pad_event_cb (GstPad * pad, GstPadProbeInfo * info, gpointer data)
+ {
+ GstEvent *event = GST_PAD_PROBE_INFO_EVENT (info);
+ GstPendingPad *ppad = (GstPendingPad *) data;
+ GstDecodeChain *chain = ppad->chain;
+ GstDecodeBin *dbin = chain->dbin;
+
+ g_assert (ppad);
+ g_assert (chain);
+ g_assert (dbin);
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_EOS:
+ GST_DEBUG_OBJECT (pad, "Received EOS on a non final pad, this stream "
+ "ended too early");
+ chain->deadend = TRUE;
+ chain->drained = TRUE;
+ gst_object_replace ((GstObject **) & chain->current_pad, NULL);
+ /* we don't set the endcaps because NULL endcaps means early EOS */
+
+ EXPOSE_LOCK (dbin);
+ if (dbin->decode_chain)
+ if (gst_decode_chain_is_complete (dbin->decode_chain))
+ gst_decode_bin_expose (dbin);
+ EXPOSE_UNLOCK (dbin);
+ break;
+ default:
+ break;
+ }
+ return GST_PAD_PROBE_OK;
+ }
+
+ static void
+ pad_added_cb (GstElement * element, GstPad * pad, GstDecodeChain * chain)
+ {
+ GstCaps *caps;
+ GstDecodeBin *dbin;
+ GstDecodeChain *new_chain;
+
+ dbin = chain->dbin;
+
+ GST_DEBUG_OBJECT (pad, "pad added, chain:%p", chain);
+ GST_PAD_STREAM_LOCK (pad);
+ if (!gst_pad_is_active (pad)) {
+ GST_PAD_STREAM_UNLOCK (pad);
+ GST_DEBUG_OBJECT (pad, "Ignoring pad-added from a deactivated pad");
+ return;
+ }
+
+ caps = get_pad_caps (pad);
+ if (analyze_new_pad (dbin, element, pad, caps, chain, &new_chain))
+ expose_pad (dbin, element, new_chain->current_pad, pad, caps, new_chain);
+ if (caps)
+ gst_caps_unref (caps);
+
+ GST_PAD_STREAM_UNLOCK (pad);
+ }
+
+ static GstPadProbeReturn
+ sink_pad_event_probe (GstPad * pad, GstPadProbeInfo * info, gpointer user_data)
+ {
+ GstDecodeGroup *group = (GstDecodeGroup *) user_data;
+ GstEvent *event = GST_PAD_PROBE_INFO_EVENT (info);
+ GstPad *peer = gst_pad_get_peer (pad);
+ GstPadProbeReturn proberet = GST_PAD_PROBE_OK;
+
+ GST_DEBUG_OBJECT (pad, "Got upstream event %s", GST_EVENT_TYPE_NAME (event));
+
+ if (peer == NULL) {
+ GST_DEBUG_OBJECT (pad, "We are unlinked !");
+ if (group->parent && group->parent->next_groups) {
+ GstDecodeGroup *last_group =
+ g_list_last (group->parent->next_groups)->data;
+ GST_DEBUG_OBJECT (pad, "We could send the event to another group (%p)",
+ last_group);
+ /* Grab another sinkpad for that last group through which we will forward this event */
+ if (last_group->reqpads) {
+ GstPad *sinkpad = (GstPad *) last_group->reqpads->data;
+ GstPad *otherpeer = gst_pad_get_peer (sinkpad);
+ if (otherpeer) {
+ GST_DEBUG_OBJECT (otherpeer, "Attempting to forward event");
+ if (gst_pad_send_event (otherpeer, gst_event_ref (event))) {
+ gst_event_unref (event);
+ proberet = GST_PAD_PROBE_HANDLED;
+ }
+ gst_object_unref (otherpeer);
+ }
+ } else {
+ GST_DEBUG_OBJECT (pad, "No request pads, can't forward event");
+ }
+ }
+ } else {
+ gst_object_unref (peer);
+ }
+
+ return proberet;
+ }
+
+ static GstPadProbeReturn
+ sink_pad_query_probe (GstPad * pad, GstPadProbeInfo * info, gpointer user_data)
+ {
+ GstDecodeGroup *group = (GstDecodeGroup *) user_data;
+ GstPad *peer = gst_pad_get_peer (pad);
+ GstQuery *query = GST_PAD_PROBE_INFO_QUERY (info);
+ GstPadProbeReturn proberet = GST_PAD_PROBE_OK;
+
+ GST_DEBUG_OBJECT (pad, "Got upstream query %s", GST_QUERY_TYPE_NAME (query));
+
+ if (peer == NULL) {
+ GST_DEBUG_OBJECT (pad, "We are unlinked !");
+ if (group->parent && group->parent->next_groups) {
+ GstDecodeGroup *last_group =
+ g_list_last (group->parent->next_groups)->data;
+ GST_DEBUG_OBJECT (pad, "We could send the query to another group");
+ /* Grab another sinkpad for that last group through which we will forward this event */
+ if (last_group->reqpads) {
+ GstPad *sinkpad = (GstPad *) last_group->reqpads->data;
+ GstPad *otherpeer = gst_pad_get_peer (sinkpad);
+ if (otherpeer) {
+ GST_DEBUG_OBJECT (otherpeer, "Attempting to forward query");
+ if (gst_pad_query (otherpeer, query)) {
+ proberet = GST_PAD_PROBE_HANDLED;
+ } else
+ GST_DEBUG ("FAILURE");
+ gst_object_unref (otherpeer);
+ } else
+ GST_DEBUG_OBJECT (sinkpad, "request pad not connected ??");
+ } else
+ GST_DEBUG_OBJECT (pad, "No request pads ???");
+ }
+ } else
+ gst_object_unref (peer);
+
+ return proberet;
+ }
+
+ static void
+ pad_removed_cb (GstElement * element, GstPad * pad, GstDecodeChain * chain)
+ {
+ GList *l;
+
+ GST_LOG_OBJECT (pad, "pad removed, chain:%p", chain);
+
+ /* In fact, we don't have to do anything here, the active group will be
+ * removed when the group's multiqueue is drained */
+ CHAIN_MUTEX_LOCK (chain);
+ for (l = chain->pending_pads; l; l = l->next) {
+ GstPendingPad *ppad = l->data;
+ GstPad *opad = ppad->pad;
+
+ if (pad == opad) {
+ gst_pending_pad_free (ppad);
+ chain->pending_pads = g_list_delete_link (chain->pending_pads, l);
+ break;
+ }
+ }
+ CHAIN_MUTEX_UNLOCK (chain);
+ }
+
+ static void
+ no_more_pads_cb (GstElement * element, GstDecodeChain * chain)
+ {
+ GstDecodeGroup *group = NULL;
+
+ GST_LOG_OBJECT (element, "got no more pads");
+
+ CHAIN_MUTEX_LOCK (chain);
+ if (!chain->elements
+ || ((GstDecodeElement *) chain->elements->data)->element != element) {
+ GST_LOG_OBJECT (chain->dbin, "no-more-pads from old chain element '%s'",
+ GST_OBJECT_NAME (element));
+ CHAIN_MUTEX_UNLOCK (chain);
+ return;
+ } else if (!chain->demuxer) {
+ GST_LOG_OBJECT (chain->dbin, "no-more-pads from a non-demuxer element '%s'",
+ GST_OBJECT_NAME (element));
+ CHAIN_MUTEX_UNLOCK (chain);
+ return;
+ }
+
+ /* when we received no_more_pads, we can complete the pads of the chain */
+ if (!chain->next_groups && chain->active_group) {
+ group = chain->active_group;
+ } else if (chain->next_groups) {
+ GList *iter;
+ for (iter = chain->next_groups; iter; iter = g_list_next (iter)) {
+ group = iter->data;
+ if (!group->no_more_pads)
+ break;
+ }
+ }
+ if (!group) {
+ GST_ERROR_OBJECT (chain->dbin, "can't find group for element");
+ CHAIN_MUTEX_UNLOCK (chain);
+ return;
+ }
+
+ GST_DEBUG_OBJECT (element, "Setting group %p to complete", group);
+
+ group->no_more_pads = TRUE;
+ /* this group has prerolled enough to not need more pads,
+ * we can probably set its buffering state to playing now */
+ GST_DEBUG_OBJECT (group->dbin, "Setting group %p multiqueue to "
+ "'playing' buffering mode", group);
+ decodebin_set_queue_size (group->dbin, group->multiqueue, FALSE,
+ (group->parent ? group->parent->seekable : TRUE));
+ CHAIN_MUTEX_UNLOCK (chain);
+
+ EXPOSE_LOCK (chain->dbin);
+ if (chain->dbin->decode_chain) {
+ if (gst_decode_chain_is_complete (chain->dbin->decode_chain)) {
+ gst_decode_bin_expose (chain->dbin);
+ }
+ }
+ EXPOSE_UNLOCK (chain->dbin);
+ }
+
+ static void
+ caps_notify_cb (GstPad * pad, GParamSpec * unused, GstDecodeChain * chain)
+ {
+ GstElement *element;
+ GList *l;
+
+ GST_LOG_OBJECT (pad, "Notified caps for pad %s:%s", GST_DEBUG_PAD_NAME (pad));
+
+ /* Disconnect this; if we still need it, we'll reconnect to this in
+ * analyze_new_pad */
+ element = GST_ELEMENT_CAST (gst_pad_get_parent (pad));
+
+ CHAIN_MUTEX_LOCK (chain);
+ for (l = chain->pending_pads; l; l = l->next) {
+ GstPendingPad *ppad = l->data;
+ if (ppad->pad == pad) {
+ gst_pending_pad_free (ppad);
+ chain->pending_pads = g_list_delete_link (chain->pending_pads, l);
+ break;
+ }
+ }
+ CHAIN_MUTEX_UNLOCK (chain);
+
+ pad_added_cb (element, pad, chain);
+
+ gst_object_unref (element);
+ }
+
+ /* Decide whether an element is a demuxer based on the
+ * klass and number/type of src pad templates it has */
+ static gboolean
+ is_demuxer_element (GstElement * srcelement)
+ {
+ GstElementFactory *srcfactory;
+ GstElementClass *elemclass;
+ GList *walk;
+ const gchar *klass;
+ gint potential_src_pads = 0;
+
+ srcfactory = gst_element_get_factory (srcelement);
+ klass =
+ gst_element_factory_get_metadata (srcfactory, GST_ELEMENT_METADATA_KLASS);
+
+ /* Can't be a demuxer unless it has Demux in the klass name */
+ if (!strstr (klass, "Demux"))
+ return FALSE;
+
+ /* Walk the src pad templates and count how many the element
+ * might produce */
+ elemclass = GST_ELEMENT_GET_CLASS (srcelement);
+
+ walk = gst_element_class_get_pad_template_list (elemclass);
+ while (walk != NULL) {
+ GstPadTemplate *templ;
+
+ templ = (GstPadTemplate *) walk->data;
+ if (GST_PAD_TEMPLATE_DIRECTION (templ) == GST_PAD_SRC) {
+ switch (GST_PAD_TEMPLATE_PRESENCE (templ)) {
+ case GST_PAD_ALWAYS:
+ case GST_PAD_SOMETIMES:
+ if (strstr (GST_PAD_TEMPLATE_NAME_TEMPLATE (templ), "%"))
+ potential_src_pads += 2; /* Might make multiple pads */
+ else
+ potential_src_pads += 1;
+ break;
+ case GST_PAD_REQUEST:
+ potential_src_pads += 2;
+ break;
+ }
+ }
+ walk = g_list_next (walk);
+ }
+
+ if (potential_src_pads < 2)
+ return FALSE;
+
+ return TRUE;
+ }
+
+ static gboolean
+ is_adaptive_demuxer_element (GstElement * srcelement)
+ {
+ GstElementFactory *srcfactory;
+ const gchar *klass;
+
+ srcfactory = gst_element_get_factory (srcelement);
+ klass =
+ gst_element_factory_get_metadata (srcfactory, GST_ELEMENT_METADATA_KLASS);
+
+ /* Can't be a demuxer unless it has Demux in the klass name */
+ if (!strstr (klass, "Demux") || !strstr (klass, "Adaptive"))
+ return FALSE;
+
+ return TRUE;
+ }
+
+ /* Returns TRUE if the caps are compatible with the caps specified in the 'caps'
+ * property (which by default are the raw caps)
+ *
+ * The decodebin_lock should be taken !
+ */
+ static gboolean
+ are_final_caps (GstDecodeBin * dbin, GstCaps * caps)
+ {
+ gboolean res;
+
+ GST_LOG_OBJECT (dbin, "Checking with caps %" GST_PTR_FORMAT, caps);
+
+ /* lock for getting the caps */
+ GST_OBJECT_LOCK (dbin);
+ res = gst_caps_is_subset (caps, dbin->caps);
+ GST_OBJECT_UNLOCK (dbin);
+
+ GST_LOG_OBJECT (dbin, "Caps are %sfinal caps", res ? "" : "not ");
+
+ return res;
+ }
+
+ /* gst_decode_bin_reset_buffering:
+ *
+ * Enables buffering on the last multiqueue of each group only,
+ * disabling the rest
+ *
+ */
+ static void
+ gst_decode_bin_reset_buffering (GstDecodeBin * dbin)
+ {
+ if (!dbin->use_buffering)
+ return;
+
+ GST_DEBUG_OBJECT (dbin, "Resetting multiqueues buffering");
+ if (dbin->decode_chain) {
+ CHAIN_MUTEX_LOCK (dbin->decode_chain);
+ gst_decode_chain_reset_buffering (dbin->decode_chain);
+ CHAIN_MUTEX_UNLOCK (dbin->decode_chain);
+ }
+ }
+
+ /****
+ * GstDecodeChain functions
+ ****/
+
+ static gboolean
+ gst_decode_chain_reset_buffering (GstDecodeChain * chain)
+ {
+ GstDecodeGroup *group;
+
+ group = chain->active_group;
+ GST_LOG_OBJECT (chain->dbin, "Resetting chain %p buffering, active group: %p",
+ chain, group);
+ if (group) {
+ return gst_decode_group_reset_buffering (group);
+ }
+ return FALSE;
+ }
+
+ /* gst_decode_chain_get_current_group:
+ *
+ * Returns the current group of this chain, to which
+ * new chains should be attached or NULL if the last
+ * group didn't have no-more-pads.
+ *
+ * Not MT-safe: Call with parent chain lock!
+ */
+ static GstDecodeGroup *
+ gst_decode_chain_get_current_group (GstDecodeChain * chain)
+ {
+ GstDecodeGroup *group;
+
+ if (!chain->next_groups && chain->active_group
+ && chain->active_group->overrun && !chain->active_group->no_more_pads) {
+ GST_WARNING_OBJECT (chain->dbin,
+ "Currently active group %p is exposed"
+ " and wants to add a new pad without having signaled no-more-pads",
+ chain->active_group);
+ return NULL;
+ }
+
+ if (chain->next_groups && (group = chain->next_groups->data) && group->overrun
+ && !group->no_more_pads) {
+ GST_WARNING_OBJECT (chain->dbin,
+ "Currently newest pending group %p "
+ "had overflow but didn't signal no-more-pads", group);
+ return NULL;
+ }
+
+ /* Now we know that we can really return something useful */
+ if (!chain->active_group) {
+ chain->active_group = group = gst_decode_group_new (chain->dbin, chain);
+ } else if (!chain->active_group->overrun
+ && !chain->active_group->no_more_pads) {
+ group = chain->active_group;
+ } else {
+ GList *iter;
+ group = NULL;
+ for (iter = chain->next_groups; iter; iter = g_list_next (iter)) {
+ GstDecodeGroup *next_group = iter->data;
+
+ if (!next_group->overrun && !next_group->no_more_pads) {
+ group = next_group;
+ break;
+ }
+ }
+ }
+ if (!group) {
+ group = gst_decode_group_new (chain->dbin, chain);
+ chain->next_groups = g_list_append (chain->next_groups, group);
+ }
+
+ return group;
+ }
+
+ static void gst_decode_group_free_internal (GstDecodeGroup * group,
+ gboolean hide);
+
+ static void
+ gst_decode_chain_unref (GstDecodeChain * chain)
+ {
+ if (g_atomic_int_dec_and_test (&chain->refs)) {
+ g_mutex_clear (&chain->lock);
+ g_slice_free (GstDecodeChain, chain);
+ }
+ }
+
+ static GstDecodeChain *
+ gst_decode_chain_ref (GstDecodeChain * chain)
+ {
+ g_atomic_int_inc (&chain->refs);
+ return chain;
+ }
+
+ static void
+ gst_decode_chain_free_internal (GstDecodeChain * chain, gboolean hide)
+ {
+ GList *l, *set_to_null = NULL;
+
+ CHAIN_MUTEX_LOCK (chain);
+
+ GST_DEBUG_OBJECT (chain->dbin, "%s chain %p", (hide ? "Hiding" : "Freeing"),
+ chain);
+
+ if (chain->active_group) {
+ gst_decode_group_free_internal (chain->active_group, hide);
+ if (!hide)
+ chain->active_group = NULL;
+ }
+
+ for (l = chain->next_groups; l; l = l->next) {
+ gst_decode_group_free_internal ((GstDecodeGroup *) l->data, hide);
+ if (!hide)
+ l->data = NULL;
+ }
+ if (!hide) {
+ g_list_free (chain->next_groups);
+ chain->next_groups = NULL;
+ }
+
+ if (!hide) {
+ for (l = chain->old_groups; l; l = l->next) {
+ GstDecodeGroup *group = l->data;
+
+ gst_decode_group_free (group);
+ }
+ g_list_free (chain->old_groups);
+ chain->old_groups = NULL;
+ }
+
+ for (l = chain->pending_pads; l; l = l->next) {
+ GstPendingPad *ppad = l->data;
+ gst_pending_pad_free (ppad);
+ l->data = NULL;
+ }
+ g_list_free (chain->pending_pads);
+ chain->pending_pads = NULL;
+
+ for (l = chain->elements; l; l = l->next) {
+ GstDecodeElement *delem = l->data;
+ GstElement *element = delem->element;
+
+ if (delem->pad_added_id)
+ g_signal_handler_disconnect (element, delem->pad_added_id);
+ delem->pad_added_id = 0;
+ if (delem->pad_removed_id)
+ g_signal_handler_disconnect (element, delem->pad_removed_id);
+ delem->pad_removed_id = 0;
+ if (delem->no_more_pads_id)
+ g_signal_handler_disconnect (element, delem->no_more_pads_id);
+ delem->no_more_pads_id = 0;
+
+ if (delem->capsfilter) {
+ if (GST_OBJECT_PARENT (delem->capsfilter) ==
+ GST_OBJECT_CAST (chain->dbin))
+ gst_bin_remove (GST_BIN_CAST (chain->dbin), delem->capsfilter);
+ if (!hide) {
+ set_to_null =
+ g_list_append (set_to_null, gst_object_ref (delem->capsfilter));
+ }
+ }
+
+ if (GST_OBJECT_PARENT (element) == GST_OBJECT_CAST (chain->dbin))
+ gst_bin_remove (GST_BIN_CAST (chain->dbin), element);
+ if (!hide) {
+ set_to_null = g_list_append (set_to_null, gst_object_ref (element));
+ }
+
+ SUBTITLE_LOCK (chain->dbin);
+ /* remove possible subtitle element */
+ chain->dbin->subtitles = g_list_remove (chain->dbin->subtitles, element);
+ SUBTITLE_UNLOCK (chain->dbin);
+
+ if (!hide) {
+ if (delem->capsfilter) {
+ gst_object_unref (delem->capsfilter);
+ delem->capsfilter = NULL;
+ }
+
+ gst_object_unref (element);
+ l->data = NULL;
+
+ g_slice_free (GstDecodeElement, delem);
+ }
+ }
+ if (!hide) {
+ g_list_free (chain->elements);
+ chain->elements = NULL;
+ }
+
+ if (chain->endpad) {
+ if (chain->endpad->exposed) {
+ gst_element_remove_pad (GST_ELEMENT_CAST (chain->dbin),
+ GST_PAD_CAST (chain->endpad));
+ }
+
+ decode_pad_set_target (chain->endpad, NULL);
+ chain->endpad->exposed = FALSE;
+ if (!hide) {
+ gst_object_unref (chain->endpad);
+ chain->endpad = NULL;
+ }
+ }
+
+ if (!hide && chain->current_pad) {
+ gst_object_unref (chain->current_pad);
+ chain->current_pad = NULL;
+ }
+
+ if (chain->pad) {
+ gst_object_unref (chain->pad);
+ chain->pad = NULL;
+ }
+
+ if (chain->endcaps) {
+ gst_caps_unref (chain->endcaps);
+ chain->endcaps = NULL;
+ }
+ g_free (chain->deadend_details);
+ chain->deadend_details = NULL;
+
+ GST_DEBUG_OBJECT (chain->dbin, "%s chain %p", (hide ? "Hidden" : "Freed"),
+ chain);
+ CHAIN_MUTEX_UNLOCK (chain);
+
+ while (set_to_null) {
+ GstElement *element = set_to_null->data;
+ set_to_null = g_list_delete_link (set_to_null, set_to_null);
+ gst_element_set_state (element, GST_STATE_NULL);
+ gst_object_unref (element);
+ }
+
+ if (!hide)
+ gst_decode_chain_unref (chain);
+ }
+
+ /* gst_decode_chain_free:
+ *
+ * Completely frees and removes the chain and all
+ * child groups from decodebin.
+ *
+ * MT-safe, don't hold the chain lock or any child chain's lock
+ * when calling this!
+ */
+ static void
+ gst_decode_chain_free (GstDecodeChain * chain)
+ {
+ gst_decode_chain_free_internal (chain, FALSE);
+ }
+
+ /* gst_decode_chain_new:
+ *
+ * Creates a new decode chain and initializes it.
+ *
+ * It's up to the caller to add it to the list of child chains of
+ * a group!
+ */
+ static GstDecodeChain *
+ gst_decode_chain_new (GstDecodeBin * dbin, GstDecodeGroup * parent,
+ GstPad * pad)
+ {
+ GstDecodeChain *chain = g_slice_new0 (GstDecodeChain);
+
+ GST_DEBUG_OBJECT (dbin, "Creating new chain %p with parent group %p", chain,
+ parent);
+
+ chain->dbin = dbin;
+ chain->parent = parent;
+ chain->refs = 1;
+ g_mutex_init (&chain->lock);
+ chain->pad = gst_object_ref (pad);
+
+ return chain;
+ }
+
+ /****
+ * GstDecodeGroup functions
+ ****/
+
+ /* The overrun callback is used to expose groups that have not yet had their
+ * no_more_pads called while the (large) multiqueue overflowed. When this
+ * happens we must assume that the no_more_pads will not arrive anymore and we
+ * must expose the pads that we have.
+ */
+ static void
+ multi_queue_overrun_cb (GstElement * queue, GstDecodeGroup * group)
+ {
+ GstDecodeBin *dbin;
+
+ dbin = group->dbin;
+
+ GST_LOG_OBJECT (dbin, "multiqueue '%s' (%p) is full", GST_OBJECT_NAME (queue),
+ queue);
+
+ group->overrun = TRUE;
+ /* this group has prerolled enough to not need more pads,
+ * we can probably set its buffering state to playing now */
+ GST_DEBUG_OBJECT (group->dbin, "Setting group %p multiqueue to "
+ "'playing' buffering mode", group);
+ decodebin_set_queue_size (group->dbin, group->multiqueue, FALSE,
+ (group->parent ? group->parent->seekable : TRUE));
+
+ /* FIXME: We should make sure that everything gets exposed now
+ * even if child chains are not complete because the will never
+ * be complete! Ignore any non-complete chains when exposing
+ * and never expose them later
+ */
+
+ EXPOSE_LOCK (dbin);
+ if (dbin->decode_chain) {
+ if (gst_decode_chain_is_complete (dbin->decode_chain)) {
+ if (!gst_decode_bin_expose (dbin))
+ GST_WARNING_OBJECT (dbin, "Couldn't expose group");
+ }
+ }
+ EXPOSE_UNLOCK (dbin);
+ }
+
+ static void
+ gst_decode_group_free_internal (GstDecodeGroup * group, gboolean hide)
+ {
+ GList *l;
+
+ GST_DEBUG_OBJECT (group->dbin, "%s group %p", (hide ? "Hiding" : "Freeing"),
+ group);
+
+ if (!hide) {
+ for (l = group->demuxer_pad_probe_ids; l != NULL; l = l->next) {
+ GstDemuxerPad *demuxer_pad = l->data;
+ GstPad *sinkpad = g_weak_ref_get (&demuxer_pad->weakPad);
+
+ if (sinkpad != NULL) {
+ gst_pad_remove_probe (sinkpad, demuxer_pad->event_probe_id);
+ gst_pad_remove_probe (sinkpad, demuxer_pad->query_probe_id);
+ g_weak_ref_clear (&demuxer_pad->weakPad);
+ gst_object_unref (sinkpad);
+ }
+ g_free (demuxer_pad);
+ }
+ g_list_free (group->demuxer_pad_probe_ids);
+ group->demuxer_pad_probe_ids = NULL;
+ }
+
+ for (l = group->children; l; l = l->next) {
+ GstDecodeChain *chain = (GstDecodeChain *) l->data;
+
+ gst_decode_chain_free_internal (chain, hide);
+ if (!hide)
+ l->data = NULL;
+ }
+ if (!hide) {
+ g_list_free (group->children);
+ group->children = NULL;
+ }
+
+ if (!hide) {
+ for (l = group->reqpads; l; l = l->next) {
+ GstPad *pad = l->data;
+
+ gst_element_release_request_pad (group->multiqueue, pad);
+ gst_object_unref (pad);
+ l->data = NULL;
+ }
+ g_list_free (group->reqpads);
+ group->reqpads = NULL;
+ }
+
+ if (group->multiqueue) {
+ if (group->overrunsig) {
+ g_signal_handler_disconnect (group->multiqueue, group->overrunsig);
+ group->overrunsig = 0;
+ }
+
+ if (GST_OBJECT_PARENT (group->multiqueue) == GST_OBJECT_CAST (group->dbin))
+ gst_bin_remove (GST_BIN_CAST (group->dbin), group->multiqueue);
+ if (!hide) {
+ gst_element_set_state (group->multiqueue, GST_STATE_NULL);
+ gst_object_unref (group->multiqueue);
+ group->multiqueue = NULL;
+ }
+ }
+
+ GST_DEBUG_OBJECT (group->dbin, "%s group %p", (hide ? "Hid" : "Freed"),
+ group);
+ if (!hide)
+ g_slice_free (GstDecodeGroup, group);
+ }
+
+ /* gst_decode_group_free:
+ *
+ * Completely frees and removes the decode group and all
+ * it's children.
+ *
+ * Never call this from any streaming thread!
+ *
+ * Not MT-safe, call with parent's chain lock!
+ */
+ static void
+ gst_decode_group_free (GstDecodeGroup * group)
+ {
+ gst_decode_group_free_internal (group, FALSE);
+ }
+
+ /* gst_decode_group_hide:
+ *
+ * Hide the decode group only, this means that
+ * all child endpads are removed from decodebin
+ * and all signals are unconnected.
+ *
+ * No element is set to NULL state and completely
+ * unrefed here.
+ *
+ * Can be called from streaming threads.
+ *
+ * Not MT-safe, call with parent's chain lock!
+ */
+ static void
+ gst_decode_group_hide (GstDecodeGroup * group)
+ {
+ gst_decode_group_free_internal (group, TRUE);
+ }
+
+ /* gst_decode_chain_free_hidden_groups:
+ *
+ * Frees any decode groups that were hidden previously.
+ * This allows keeping memory use from ballooning when
+ * switching chains repeatedly.
+ *
+ * A new throwaway thread will be created to free the
+ * groups, so any delay does not block the setup of a
+ * new group.
+ *
+ * Not MT-safe, call with parent's chain lock!
+ */
+ static void
+ gst_decode_chain_free_hidden_groups (GList * old_groups)
+ {
+ GList *l;
+
+ for (l = old_groups; l; l = l->next) {
+ GstDecodeGroup *group = l->data;
+
+ gst_decode_group_free (group);
+ }
+ g_list_free (old_groups);
+ }
+
+ static void
+ gst_decode_chain_start_free_hidden_groups_thread (GstDecodeChain * chain)
+ {
+ GThread *thread;
+ GError *error = NULL;
+ GList *old_groups;
+ GstDecodeBin *dbin = chain->dbin;
+
+ old_groups = chain->old_groups;
+ if (!old_groups)
+ return;
+
+ /* If we already have a thread running, wait for it to finish */
+ g_mutex_lock (&dbin->cleanup_lock);
+ if (dbin->cleanup_thread) {
+ g_thread_join (dbin->cleanup_thread);
+ dbin->cleanup_thread = NULL;
+ }
+
+ chain->old_groups = NULL;
+
+ if (dbin->shutdown) {
+ /* If we're shutting down, add the groups to be cleaned up in the
+ * state change handler (which *is* another thread). Also avoids
+ * playing racy games with the state change handler */
+ dbin->cleanup_groups = g_list_concat (dbin->cleanup_groups, old_groups);
+ g_mutex_unlock (&dbin->cleanup_lock);
+ return;
+ }
+
+ thread = g_thread_try_new ("free-hidden-groups",
+ (GThreadFunc) gst_decode_chain_free_hidden_groups, old_groups, &error);
+ if (!thread || error) {
+ GST_ERROR ("Failed to start free-hidden-groups thread: %s",
+ error ? error->message : "unknown reason");
+ g_clear_error (&error);
+ chain->old_groups = old_groups;
+ g_mutex_unlock (&dbin->cleanup_lock);
+ return;
+ }
+
+ dbin->cleanup_thread = thread;
+ g_mutex_unlock (&dbin->cleanup_lock);
+
+ GST_DEBUG_OBJECT (chain->dbin, "Started free-hidden-groups thread");
+ }
+
+ static void
+ decodebin_set_queue_size (GstDecodeBin * dbin, GstElement * multiqueue,
+ gboolean preroll, gboolean seekable)
+ {
+ gboolean use_buffering;
+
+ /* get the current config from the multiqueue */
+ g_object_get (multiqueue, "use-buffering", &use_buffering, NULL);
+
+ decodebin_set_queue_size_full (dbin, multiqueue, use_buffering, preroll,
+ seekable);
+ }
+
+ /* configure queue sizes, this depends on the buffering method and if we are
+ * playing or prerolling. */
+ static void
+ decodebin_set_queue_size_full (GstDecodeBin * dbin, GstElement * multiqueue,
+ gboolean use_buffering, gboolean preroll, gboolean seekable)
+ {
+ guint max_bytes, max_buffers;
+ guint64 max_time;
+
+ GST_DEBUG_OBJECT (multiqueue, "use buffering %d", use_buffering);
+
+ if (preroll || use_buffering) {
+ /* takes queue limits, initially we only queue up up to the max bytes limit,
+ * with a default of 2MB. we use the same values for buffering mode. */
+ if (preroll || (max_bytes = dbin->max_size_bytes) == 0)
+ max_bytes = AUTO_PREROLL_SIZE_BYTES;
+ if (preroll || (max_buffers = dbin->max_size_buffers) == 0)
+ max_buffers = AUTO_PREROLL_SIZE_BUFFERS;
+ if (preroll || (max_time = dbin->max_size_time) == 0) {
+ if (dbin->use_buffering && !preroll)
+ max_time = 5 * GST_SECOND;
+ else
+ max_time = seekable ? AUTO_PREROLL_SEEKABLE_SIZE_TIME :
+ AUTO_PREROLL_NOT_SEEKABLE_SIZE_TIME;
+ }
+ } else {
+ /* update runtime limits. At runtime, we try to keep the amount of buffers
+ * in the queues as low as possible (but at least 5 buffers). */
+ if (dbin->use_buffering)
+ max_bytes = 0;
+ else if ((max_bytes = dbin->max_size_bytes) == 0)
+ max_bytes = AUTO_PLAY_SIZE_BYTES;
+ if ((max_buffers = dbin->max_size_buffers) == 0)
+ max_buffers = AUTO_PLAY_SIZE_BUFFERS;
+ /* this is a multiqueue with disabled buffering, don't limit max_time */
+ if (dbin->use_buffering)
+ max_time = 0;
+ else if ((max_time = dbin->max_size_time) == 0)
+ max_time = AUTO_PLAY_SIZE_TIME;
+ }
+
+ GST_DEBUG_OBJECT (multiqueue, "setting limits %u bytes, %u buffers, "
+ "%" G_GUINT64_FORMAT " time", max_bytes, max_buffers, max_time);
+ g_object_set (multiqueue,
+ "max-size-bytes", max_bytes, "max-size-time", max_time,
+ "max-size-buffers", max_buffers, NULL);
+ }
+
+ /* gst_decode_group_new:
+ * @dbin: Parent decodebin
+ * @parent: Parent chain or %NULL
+ *
+ * Creates a new GstDecodeGroup. It is up to the caller to add it to the list
+ * of groups.
+ */
+ static GstDecodeGroup *
+ gst_decode_group_new (GstDecodeBin * dbin, GstDecodeChain * parent)
+ {
+ GstDecodeGroup *group = g_slice_new0 (GstDecodeGroup);
+ GstElement *mq;
+ gboolean seekable;
+
+ GST_DEBUG_OBJECT (dbin, "Creating new group %p with parent chain %p", group,
+ parent);
+
+ group->dbin = dbin;
+ group->parent = parent;
+
++#ifdef TIZEN_FEATURE_TRUSTZONE
++ /*tzmultiqueue patch : when this flag is set to TRUE, we will use tzmultiqueue instead of multiqueue element in pipeline*/
++ if (dbin->use_trustzone) {
++ GST_DEBUG_OBJECT (dbin, "decodebin2 use tzmultiqueue");
++ mq = group->multiqueue = gst_element_factory_make ("tzmultiqueue", NULL);
++ }
++ else
++#endif
+ mq = group->multiqueue = gst_element_factory_make ("multiqueue", NULL);
+ if (G_UNLIKELY (!group->multiqueue))
+ goto missing_multiqueue;
+
+ /* configure queue sizes for preroll */
+ seekable = FALSE;
+ if (parent && parent->demuxer) {
+ GstElement *element =
+ ((GstDecodeElement *) parent->elements->data)->element;
+ GstPad *pad = gst_element_get_static_pad (element, "sink");
+ if (pad) {
+ seekable = parent->seekable = check_upstream_seekable (dbin, pad);
+ gst_object_unref (pad);
+ }
+ }
+ decodebin_set_queue_size_full (dbin, mq, FALSE, TRUE, seekable);
+
+ group->overrunsig = g_signal_connect (mq, "overrun",
+ G_CALLBACK (multi_queue_overrun_cb), group);
+ group->demuxer_pad_probe_ids = NULL;
+
+ gst_element_set_state (mq, GST_STATE_PAUSED);
+ gst_bin_add (GST_BIN (dbin), gst_object_ref (mq));
+
+ return group;
+
+ /* ERRORS */
+ missing_multiqueue:
+ {
+ gst_element_post_message (GST_ELEMENT_CAST (dbin),
+ gst_missing_element_message_new (GST_ELEMENT_CAST (dbin),
+ "multiqueue"));
+ GST_ELEMENT_ERROR (dbin, CORE, MISSING_PLUGIN, (NULL), ("no multiqueue!"));
+ g_slice_free (GstDecodeGroup, group);
+ return NULL;
+ }
+ }
+
+ /* gst_decode_group_control_demuxer_pad
+ *
+ * Adds a new demuxer srcpad to the given group.
+ *
+ * Returns the srcpad of the multiqueue corresponding the given pad.
+ * Returns NULL if there was an error.
+ */
+ static GstPad *
+ gst_decode_group_control_demuxer_pad (GstDecodeGroup * group, GstPad * pad)
+ {
+ GstDecodeBin *dbin;
+ GstDemuxerPad *demuxer_pad;
+ GstPad *srcpad, *sinkpad;
+ GstIterator *it = NULL;
+ GValue item = { 0, };
+
+ dbin = group->dbin;
+
+ GST_LOG_OBJECT (dbin, "group:%p pad %s:%s", group, GST_DEBUG_PAD_NAME (pad));
+
+ srcpad = NULL;
+
+ if (G_UNLIKELY (!group->multiqueue))
+ return NULL;
+
+ if (!(sinkpad =
+ gst_element_request_pad_simple (group->multiqueue, "sink_%u"))) {
+ GST_ERROR_OBJECT (dbin, "Couldn't get sinkpad from multiqueue");
+ return NULL;
+ }
+
+ if ((gst_pad_link_full (pad, sinkpad,
+ GST_PAD_LINK_CHECK_NOTHING) != GST_PAD_LINK_OK)) {
+ GST_ERROR_OBJECT (dbin, "Couldn't link demuxer and multiqueue");
+ goto error;
+ }
+
+ it = gst_pad_iterate_internal_links (sinkpad);
+
+ if (!it || (gst_iterator_next (it, &item)) != GST_ITERATOR_OK
+ || ((srcpad = g_value_dup_object (&item)) == NULL)) {
+ GST_ERROR_OBJECT (dbin,
+ "Couldn't get srcpad from multiqueue for sinkpad %" GST_PTR_FORMAT,
+ sinkpad);
+ goto error;
+ }
+
+ CHAIN_MUTEX_LOCK (group->parent);
+
+ /* Note: GWeakRefs can't be moved in memory once they're in use, so do a
+ * dedicated alloc for the GstDemuxerPad struct that contains it */
+ demuxer_pad = g_new0 (GstDemuxerPad, 1);
+ demuxer_pad->event_probe_id = gst_pad_add_probe (sinkpad,
+ GST_PAD_PROBE_TYPE_EVENT_UPSTREAM, sink_pad_event_probe, group, NULL);
+ demuxer_pad->query_probe_id = gst_pad_add_probe (sinkpad,
+ GST_PAD_PROBE_TYPE_QUERY_UPSTREAM, sink_pad_query_probe, group, NULL);
+
+ g_weak_ref_set (&demuxer_pad->weakPad, sinkpad);
+ group->demuxer_pad_probe_ids =
+ g_list_prepend (group->demuxer_pad_probe_ids, demuxer_pad);
+
+ group->reqpads = g_list_prepend (group->reqpads, gst_object_ref (sinkpad));
+ CHAIN_MUTEX_UNLOCK (group->parent);
+
+ beach:
+ if (G_IS_VALUE (&item))
+ g_value_unset (&item);
+ if (it)
+ gst_iterator_free (it);
+ gst_object_unref (sinkpad);
+ return srcpad;
+
+ error:
+ gst_element_release_request_pad (group->multiqueue, sinkpad);
+ goto beach;
+ }
+
+ /* gst_decode_group_is_complete:
+ *
+ * Checks if the group is complete, this means that
+ * a) overrun of the multiqueue or no-more-pads happened
+ * b) all child chains are complete
+ *
+ * Not MT-safe, always call with decodebin expose lock
+ */
+ static gboolean
+ gst_decode_group_is_complete (GstDecodeGroup * group)
+ {
+ GList *l;
+ gboolean complete = TRUE;
+
+ if (!group->overrun && !group->no_more_pads) {
+ complete = FALSE;
+ goto out;
+ }
+
+ for (l = group->children; l; l = l->next) {
+ GstDecodeChain *chain = l->data;
+
+ if (!gst_decode_chain_is_complete (chain)) {
+ complete = FALSE;
+ goto out;
+ }
+ }
+
+ out:
+ GST_DEBUG_OBJECT (group->dbin, "Group %p is complete: %d", group, complete);
+ return complete;
+ }
+
+ /* gst_decode_chain_is_complete:
+ *
+ * Returns TRUE if the chain is complete, this means either
+ * a) This chain is a dead end, i.e. we have no suitable plugins
+ * b) This chain ends in an endpad and this is blocked or exposed
+ *
+ * Not MT-safe, always call with decodebin expose lock
+ */
+ static gboolean
+ gst_decode_chain_is_complete (GstDecodeChain * chain)
+ {
+ gboolean complete = FALSE;
+
+ CHAIN_MUTEX_LOCK (chain);
+ if (chain->dbin->shutdown)
+ goto out;
+
+ if (chain->deadend) {
+ complete = TRUE;
+ goto out;
+ }
+
+ if (chain->endpad && gst_decode_pad_is_exposable (chain->endpad)) {
+ complete = TRUE;
+ goto out;
+ }
+
+ if (chain->demuxer) {
+ if (chain->active_group
+ && gst_decode_group_is_complete (chain->active_group)) {
+ complete = TRUE;
+ goto out;
+ }
+ }
+
+ out:
+ CHAIN_MUTEX_UNLOCK (chain);
+ GST_DEBUG_OBJECT (chain->dbin, "Chain %p is complete: %d", chain, complete);
+ return complete;
+ }
+
+ /* Flushing group/chains */
+ static void
+ flush_group (GstDecodeGroup * group, gboolean flushing)
+ {
+ GList *tmp;
+
+ GST_DEBUG ("group %p flushing:%d", group, flushing);
+
+ if (group->drained == flushing)
+ return;
+ for (tmp = group->children; tmp; tmp = tmp->next) {
+ GstDecodeChain *chain = (GstDecodeChain *) tmp->data;
+ flush_chain (chain, flushing);
+ }
+ GST_DEBUG ("Setting group %p to drained:%d", group, flushing);
+ group->drained = flushing;
+ }
+
+ static void
+ flush_chain (GstDecodeChain * chain, gboolean flushing)
+ {
+ GList *tmp;
+ GstDecodeBin *dbin = chain->dbin;
+
+ GST_DEBUG_OBJECT (dbin, "chain %p (pad %s:%s) flushing:%d", chain,
+ GST_DEBUG_PAD_NAME (chain->pad), flushing);
+ if (chain->drained == flushing)
+ return;
+ /* if unflushing, check if we should switch to last group */
+ if (flushing == FALSE && chain->next_groups) {
+ GstDecodeGroup *target_group =
+ (GstDecodeGroup *) g_list_last (chain->next_groups)->data;
+ gst_decode_chain_start_free_hidden_groups_thread (chain);
+ /* Hide active group (we're sure it's not that one we'll be using) */
+ GST_DEBUG_OBJECT (dbin, "Switching from active group %p to group %p",
+ chain->active_group, target_group);
+ gst_decode_group_hide (chain->active_group);
+ chain->old_groups = g_list_prepend (chain->old_groups, chain->active_group);
+ chain->active_group = target_group;
+ /* Hide all groups but the target_group */
+ for (tmp = chain->next_groups; tmp; tmp = tmp->next) {
+ GstDecodeGroup *group = (GstDecodeGroup *) tmp->data;
+ if (group != target_group) {
+ gst_decode_group_hide (group);
+ chain->old_groups = g_list_prepend (chain->old_groups, group);
+ }
+ }
+ /* Clear next groups */
+ g_list_free (chain->next_groups);
+ chain->next_groups = NULL;
+ }
+ /* Mark all groups as flushing */
+ if (chain->active_group)
+ flush_group (chain->active_group, flushing);
+ for (tmp = chain->next_groups; tmp; tmp = tmp->next) {
+ GstDecodeGroup *group = (GstDecodeGroup *) tmp->data;
+ flush_group (group, flushing);
+ }
+ GST_DEBUG ("Setting chain %p to drained:%d", chain, flushing);
+ chain->drained = flushing;
+ }
+
+ static gboolean
+ drain_and_switch_chains (GstDecodeChain * chain, GstDecodePad * drainpad,
+ gboolean * last_group, gboolean * drained, gboolean * switched);
+ /* drain_and_switch_chains/groups:
+ *
+ * CALL WITH CHAIN LOCK (or group parent) TAKEN !
+ *
+ * Goes down the chains/groups until it finds the chain
+ * to which the drainpad belongs.
+ *
+ * It marks that pad/chain as drained and then will figure
+ * out which group to switch to or not.
+ *
+ * last_chain will be set to TRUE if the group to which the
+ * pad belongs is the last one.
+ *
+ * drained will be set to TRUE if the chain/group is drained.
+ *
+ * Returns: TRUE if the chain contained the target pad */
+ static gboolean
+ drain_and_switch_group (GstDecodeGroup * group, GstDecodePad * drainpad,
+ gboolean * last_group, gboolean * drained, gboolean * switched)
+ {
+ gboolean handled = FALSE;
+ GList *tmp;
+
+ GST_DEBUG ("Checking group %p (target pad %s:%s)",
+ group, GST_DEBUG_PAD_NAME (drainpad));
+
+ /* Definitely can't be in drained groups */
+ if (G_UNLIKELY (group->drained)) {
+ goto beach;
+ }
+
+ /* Figure out if all our chains are drained with the
+ * new information */
+ group->drained = TRUE;
+ for (tmp = group->children; tmp; tmp = tmp->next) {
+ GstDecodeChain *chain = (GstDecodeChain *) tmp->data;
+ gboolean subdrained = FALSE;
+
+ handled |=
+ drain_and_switch_chains (chain, drainpad, last_group, &subdrained,
+ switched);
+ if (!subdrained)
+ group->drained = FALSE;
+ }
+
+ beach:
+ GST_DEBUG ("group %p (last_group:%d, drained:%d, switched:%d, handled:%d)",
+ group, *last_group, group->drained, *switched, handled);
+ *drained = group->drained;
+ return handled;
+ }
+
+ static gboolean
+ drain_and_switch_chains (GstDecodeChain * chain, GstDecodePad * drainpad,
+ gboolean * last_group, gboolean * drained, gboolean * switched)
+ {
+ gboolean handled = FALSE;
+ GstDecodeBin *dbin = chain->dbin;
+
+ GST_DEBUG ("Checking chain %p %s:%s (target pad %s:%s)",
+ chain, GST_DEBUG_PAD_NAME (chain->pad), GST_DEBUG_PAD_NAME (drainpad));
+
+ CHAIN_MUTEX_LOCK (chain);
+
+ if (chain->pad_probe_id) {
+ gst_pad_remove_probe (chain->pad, chain->pad_probe_id);
+ chain->pad_probe_id = 0;
+ }
+
+ /* Definitely can't be in drained chains */
+ if (G_UNLIKELY (chain->drained)) {
+ goto beach;
+ }
+
+ if (chain->endpad) {
+ /* Check if we're reached the target endchain */
+ if (drainpad != NULL && chain == drainpad->chain) {
+ GST_DEBUG ("Found the target chain");
+ drainpad->drained = TRUE;
+ handled = TRUE;
+ }
+
+ chain->drained = chain->endpad->drained;
+ goto beach;
+ }
+
+ /* We known there are groups to switch to */
+ if (chain->next_groups)
+ *last_group = FALSE;
+
+ /* Check the active group */
+ if (chain->active_group) {
+ gboolean subdrained = FALSE;
+ handled = drain_and_switch_group (chain->active_group, drainpad,
+ last_group, &subdrained, switched);
+
+ /* The group is drained, see if we can switch to another */
+ if ((handled || drainpad == NULL) && subdrained && !*switched) {
+ if (chain->next_groups) {
+ /* Switch to next group */
+ GST_DEBUG_OBJECT (dbin, "Hiding current group %p", chain->active_group);
+ gst_decode_group_hide (chain->active_group);
+ chain->old_groups =
+ g_list_prepend (chain->old_groups, chain->active_group);
+ GST_DEBUG_OBJECT (dbin, "Switching to next group %p",
+ chain->next_groups->data);
+ chain->active_group = chain->next_groups->data;
+ chain->next_groups =
+ g_list_delete_link (chain->next_groups, chain->next_groups);
+ gst_decode_chain_start_free_hidden_groups_thread (chain);
+ *switched = TRUE;
+ chain->drained = FALSE;
+ } else {
+ GST_DEBUG ("Group %p was the last in chain %p", chain->active_group,
+ chain);
+ chain->drained = TRUE;
+ /* We're drained ! */
+ }
+ } else {
+ if (subdrained && !chain->next_groups)
+ *drained = TRUE;
+ }
+ }
+
+ beach:
+ CHAIN_MUTEX_UNLOCK (chain);
+
+ GST_DEBUG ("Chain %p (handled:%d, last_group:%d, drained:%d, switched:%d)",
+ chain, handled, *last_group, chain->drained, *switched);
+
+ *drained = chain->drained;
+
+ if (*drained && !chain->parent) /* only emit signal from top chain */
+ g_signal_emit (dbin, gst_decode_bin_signals[SIGNAL_DRAINED], 0, NULL);
+
+ return handled;
+ }
+
+ /* check if the group is drained, meaning all pads have seen an EOS
+ * event. */
+ static gboolean
+ gst_decode_pad_handle_eos (GstDecodePad * pad)
+ {
+ gboolean last_group = TRUE;
+ gboolean switched = FALSE;
+ gboolean drained = FALSE;
+ GstDecodeChain *chain = pad->chain;
+ GstDecodeBin *dbin = chain->dbin;
+ GstEvent *tmp;
+
+ GST_LOG_OBJECT (dbin, "pad %p", pad);
+
+ /* Send a stream-group-done event in case downstream needs
+ * to unblock before we can drain */
+ tmp = gst_pad_get_sticky_event (GST_PAD (pad), GST_EVENT_STREAM_START, 0);
+ if (tmp) {
+ guint group_id;
+ if (gst_event_parse_group_id (tmp, &group_id)) {
+ GstPad *peer = gst_pad_get_peer (GST_PAD (pad));
+
+ if (peer) {
+ GST_DEBUG_OBJECT (dbin,
+ "Sending stream-group-done for group %u to pad %"
+ GST_PTR_FORMAT, group_id, pad);
+ gst_pad_send_event (peer, gst_event_new_stream_group_done (group_id));
+ gst_object_unref (peer);
+ }
+ } else {
+ GST_DEBUG_OBJECT (dbin,
+ "No group ID to send stream-group-done on pad %" GST_PTR_FORMAT, pad);
+ }
+ gst_event_unref (tmp);
+ }
+
+ EXPOSE_LOCK (dbin);
+ if (dbin->decode_chain) {
+ drain_and_switch_chains (dbin->decode_chain, pad, &last_group, &drained,
+ &switched);
+
+ if (switched) {
+ /* If we resulted in a group switch, expose what's needed */
+ if (gst_decode_chain_is_complete (dbin->decode_chain))
+ gst_decode_bin_expose (dbin);
+ }
+ }
+ EXPOSE_UNLOCK (dbin);
+
+ return last_group;
+ }
+
+ /* gst_decode_group_is_drained:
+ *
+ * Check is this group is drained and cache this result.
+ * The group is drained if all child chains are drained.
+ *
+ * Not MT-safe, call with group->parent's lock */
+ static gboolean
+ gst_decode_group_is_drained (GstDecodeGroup * group)
+ {
+ GList *l;
+ gboolean drained = TRUE;
+
+ if (group->drained) {
+ drained = TRUE;
+ goto out;
+ }
+
+ for (l = group->children; l; l = l->next) {
+ GstDecodeChain *chain = l->data;
+
+ CHAIN_MUTEX_LOCK (chain);
+ if (!gst_decode_chain_is_drained (chain))
+ drained = FALSE;
+ CHAIN_MUTEX_UNLOCK (chain);
+ if (!drained)
+ goto out;
+ }
+ group->drained = drained;
+
+ out:
+ GST_DEBUG_OBJECT (group->dbin, "Group %p is drained: %d", group, drained);
+ return drained;
+ }
+
+ /* gst_decode_chain_is_drained:
+ *
+ * Check is the chain is drained, which means that
+ * either
+ *
+ * a) it's endpad is drained
+ * b) there are no pending pads, the active group is drained
+ * and there are no next groups
+ *
+ * Not MT-safe, call with chain lock
+ */
+ static gboolean
+ gst_decode_chain_is_drained (GstDecodeChain * chain)
+ {
+ gboolean drained = FALSE;
+
+ if (chain->endpad) {
+ drained = chain->endpad->drained;
+ goto out;
+ }
+
+ if (chain->pending_pads) {
+ drained = FALSE;
+ goto out;
+ }
+
+ if (chain->active_group && gst_decode_group_is_drained (chain->active_group)
+ && !chain->next_groups) {
+ drained = TRUE;
+ goto out;
+ }
+
+ out:
+ GST_DEBUG_OBJECT (chain->dbin, "Chain %p is drained: %d", chain, drained);
+ return drained;
+ }
+
+ static gboolean
+ gst_decode_group_reset_buffering (GstDecodeGroup * group)
+ {
+ GList *l;
+ gboolean ret = TRUE;
+
+ GST_DEBUG_OBJECT (group->dbin, "Group reset buffering %p %s", group,
+ GST_ELEMENT_NAME (group->multiqueue));
+ for (l = group->children; l; l = l->next) {
+ GstDecodeChain *chain = l->data;
+
+ CHAIN_MUTEX_LOCK (chain);
+ if (!gst_decode_chain_reset_buffering (chain)) {
+ ret = FALSE;
+ }
+ CHAIN_MUTEX_UNLOCK (chain);
+ }
+
+ decodebin_set_queue_size_full (group->dbin, group->multiqueue, !ret,
+ FALSE, (group->parent ? group->parent->seekable : TRUE));
+
+ if (ret) {
+ /* all chains are buffering already, no need to do it here */
+ g_object_set (group->multiqueue, "use-buffering", FALSE, NULL);
+ } else {
+ g_object_set (group->multiqueue, "use-buffering", TRUE,
+ "low-percent", group->dbin->low_percent,
+ "high-percent", group->dbin->high_percent, NULL);
+ }
+
+ GST_DEBUG_OBJECT (group->dbin, "Setting %s buffering to %d",
+ GST_ELEMENT_NAME (group->multiqueue), !ret);
+ return TRUE;
+ }
+
+
+ /* sort_end_pads:
+ * GCompareFunc to use with lists of GstPad.
+ * Sorts pads by mime type.
+ * First video (raw, then non-raw), then audio (raw, then non-raw),
+ * then others.
+ *
+ * Return: negative if a<b, 0 if a==b, positive if a>b
+ */
+ static gint
+ sort_end_pads (GstDecodePad * da, GstDecodePad * db)
+ {
+ gint va, vb;
+ GstCaps *capsa, *capsb;
+ GstStructure *sa, *sb;
+ const gchar *namea, *nameb;
+ gchar *ida, *idb;
+ gint ret;
+
+ capsa = get_pad_caps (GST_PAD_CAST (da));
+ capsb = get_pad_caps (GST_PAD_CAST (db));
+
+ sa = gst_caps_get_structure ((const GstCaps *) capsa, 0);
+ sb = gst_caps_get_structure ((const GstCaps *) capsb, 0);
+
+ namea = gst_structure_get_name (sa);
+ nameb = gst_structure_get_name (sb);
+
+ if (g_strrstr (namea, "video/x-raw"))
+ va = 0;
+ else if (g_strrstr (namea, "video/"))
+ va = 1;
+ else if (g_strrstr (namea, "audio/x-raw"))
+ va = 2;
+ else if (g_strrstr (namea, "audio/"))
+ va = 3;
+ else
+ va = 4;
+
+ if (g_strrstr (nameb, "video/x-raw"))
+ vb = 0;
+ else if (g_strrstr (nameb, "video/"))
+ vb = 1;
+ else if (g_strrstr (nameb, "audio/x-raw"))
+ vb = 2;
+ else if (g_strrstr (nameb, "audio/"))
+ vb = 3;
+ else
+ vb = 4;
+
+ gst_caps_unref (capsa);
+ gst_caps_unref (capsb);
+
+ if (va != vb)
+ return va - vb;
+
+ /* if otherwise the same, sort by stream-id */
+ ida = gst_pad_get_stream_id (GST_PAD_CAST (da));
+ idb = gst_pad_get_stream_id (GST_PAD_CAST (db));
+ ret = (ida) ? ((idb) ? strcmp (ida, idb) : -1) : 1;
+ g_free (ida);
+ g_free (idb);
+
+ return ret;
+ }
+
+ static GstCaps *
+ _gst_element_get_linked_caps (GstElement * src, GstElement * sink,
+ GstElement * capsfilter, GstPad ** srcpad)
+ {
+ GstIterator *it;
+ GstElement *parent;
+ GstPad *pad, *peer;
+ gboolean done = FALSE;
+ GstCaps *caps = NULL;
+ GValue item = { 0, };
+
+ it = gst_element_iterate_src_pads (src);
+ while (!done) {
+ switch (gst_iterator_next (it, &item)) {
+ case GST_ITERATOR_OK:
+ pad = g_value_get_object (&item);
+ peer = gst_pad_get_peer (pad);
+ if (peer) {
+ parent = gst_pad_get_parent_element (peer);
+ if (parent == sink || (capsfilter != NULL && parent == capsfilter)) {
+ caps = gst_pad_get_current_caps (pad);
+ *srcpad = gst_object_ref (pad);
+ done = TRUE;
+ }
+
+ if (parent)
+ gst_object_unref (parent);
+ gst_object_unref (peer);
+ }
+ g_value_reset (&item);
+ break;
+ case GST_ITERATOR_RESYNC:
+ gst_iterator_resync (it);
+ break;
+ case GST_ITERATOR_ERROR:
+ case GST_ITERATOR_DONE:
+ done = TRUE;
+ break;
+ }
+ }
+ g_value_unset (&item);
+ gst_iterator_free (it);
+
+ return caps;
+ }
+
+ /* FIXME: Invent gst_structure_take_structure() to prevent all the
+ * structure copying for nothing
+ */
+ static GstStructure *
+ gst_decode_chain_get_topology (GstDecodeChain * chain)
+ {
+ GstStructure *s, *u;
+ GList *l;
+ GstCaps *caps;
+
+ if (G_UNLIKELY ((chain->endpad || chain->deadend)
+ && (chain->endcaps == NULL))) {
+ GST_WARNING ("End chain without valid caps !");
+ return NULL;
+ }
+
+ u = gst_structure_new_id_empty (topology_structure_name);
+
+ /* Now at the last element */
+ if ((chain->elements || !chain->active_group) &&
+ (chain->endpad || chain->deadend)) {
+ GstPad *srcpad;
+
+ s = gst_structure_new_id_empty (topology_structure_name);
+ gst_structure_id_set (u, topology_caps, GST_TYPE_CAPS, chain->endcaps,
+ NULL);
+
+ if (chain->endpad) {
+ gst_structure_id_set (u, topology_pad, GST_TYPE_PAD, chain->endpad, NULL);
+
+ srcpad = gst_ghost_pad_get_target (GST_GHOST_PAD_CAST (chain->endpad));
+ gst_structure_id_set (u, topology_element_srcpad, GST_TYPE_PAD,
+ srcpad, NULL);
+
+ gst_object_unref (srcpad);
+ }
+
+ gst_structure_id_set (s, topology_next, GST_TYPE_STRUCTURE, u, NULL);
+ gst_structure_free (u);
+ u = s;
+ } else if (chain->active_group) {
+ GValue list = { 0, };
+ GValue item = { 0, };
+
+ g_value_init (&list, GST_TYPE_LIST);
+ g_value_init (&item, GST_TYPE_STRUCTURE);
+ for (l = chain->active_group->children; l; l = l->next) {
+ s = gst_decode_chain_get_topology (l->data);
+ if (s) {
+ gst_value_set_structure (&item, s);
+ gst_value_list_append_value (&list, &item);
+ g_value_reset (&item);
+ gst_structure_free (s);
+ }
+ }
+ gst_structure_id_set_value (u, topology_next, &list);
+ g_value_unset (&list);
+ g_value_unset (&item);
+ }
+
+ /* Get caps between all elements in this chain */
+ l = (chain->elements && chain->elements->next) ? chain->elements : NULL;
+ for (; l && l->next; l = l->next) {
+ GstDecodeElement *delem, *delem_next;
+ GstElement *elem, *capsfilter, *elem_next;
+ GstCaps *caps;
+ GstPad *srcpad;
+
+ delem = l->data;
+ elem = delem->element;
+ delem_next = l->next->data;
+ elem_next = delem_next->element;
+ capsfilter = delem_next->capsfilter;
+ srcpad = NULL;
+
+ caps = _gst_element_get_linked_caps (elem_next, elem, capsfilter, &srcpad);
+
+ if (caps) {
+ s = gst_structure_new_id_empty (topology_structure_name);
+ gst_structure_id_set (u, topology_caps, GST_TYPE_CAPS, caps, NULL);
+ gst_caps_unref (caps);
+
+ gst_structure_id_set (s, topology_next, GST_TYPE_STRUCTURE, u, NULL);
+ gst_structure_free (u);
+ u = s;
+ }
+
+ if (srcpad) {
+ gst_structure_id_set (u, topology_element_srcpad, GST_TYPE_PAD, srcpad,
+ NULL);
+ gst_object_unref (srcpad);
+ }
+ }
+
+ /* Caps that resulted in this chain */
+ caps = get_pad_caps (chain->pad);
+ if (G_UNLIKELY (!caps)) {
+ GST_WARNING_OBJECT (chain->pad, "Couldn't get the caps of decode chain");
+ return u;
+ }
+ gst_structure_id_set (u, topology_caps, GST_TYPE_CAPS, caps, NULL);
+ gst_structure_id_set (u, topology_element_srcpad, GST_TYPE_PAD, chain->pad,
+ NULL);
+ gst_caps_unref (caps);
+
+ return u;
+ }
+
+ static void
+ gst_decode_bin_post_topology_message (GstDecodeBin * dbin)
+ {
+ GstStructure *s;
+ GstMessage *msg;
+
+ s = gst_decode_chain_get_topology (dbin->decode_chain);
+
+ if (G_UNLIKELY (s == NULL))
+ return;
+ msg = gst_message_new_element (GST_OBJECT (dbin), s);
+ gst_element_post_message (GST_ELEMENT (dbin), msg);
+ }
+
+ static gboolean
+ debug_sticky_event (GstPad * pad, GstEvent ** event, gpointer user_data)
+ {
+ GST_DEBUG_OBJECT (pad, "sticky event %s (%p)", GST_EVENT_TYPE_NAME (*event),
+ *event);
+ return TRUE;
+ }
+
+
+ /* Must only be called if the toplevel chain is complete and blocked! */
+ /* Not MT-safe, call with decodebin expose lock! */
+ static gboolean
+ gst_decode_bin_expose (GstDecodeBin * dbin)
+ {
+ GList *tmp, *endpads;
+ gboolean missing_plugin;
+ GString *missing_plugin_details;
+ gboolean already_exposed;
+ gboolean last_group;
+
+ retry:
+ endpads = NULL;
+ missing_plugin = FALSE;
+ already_exposed = TRUE;
+ last_group = TRUE;
+
+ missing_plugin_details = g_string_new ("");
+
+ GST_DEBUG_OBJECT (dbin, "Exposing currently active chains/groups");
+
+ /* Don't expose if we're currently shutting down */
+ DYN_LOCK (dbin);
+ if (G_UNLIKELY (dbin->shutdown)) {
+ GST_WARNING_OBJECT (dbin, "Currently, shutting down, aborting exposing");
+ DYN_UNLOCK (dbin);
+ return FALSE;
+ }
+ DYN_UNLOCK (dbin);
+
+ /* Get the pads that we're going to expose and mark things as exposed */
+ if (!gst_decode_chain_expose (dbin->decode_chain, &endpads, &missing_plugin,
+ missing_plugin_details, &last_group)) {
+ g_list_foreach (endpads, (GFunc) gst_object_unref, NULL);
+ g_list_free (endpads);
+ g_string_free (missing_plugin_details, TRUE);
+ /* Failures could be due to the fact that we are currently shutting down (recheck) */
+ DYN_LOCK (dbin);
+ if (G_UNLIKELY (dbin->shutdown)) {
+ GST_WARNING_OBJECT (dbin, "Currently, shutting down, aborting exposing");
+ DYN_UNLOCK (dbin);
+ return FALSE;
+ }
+ DYN_UNLOCK (dbin);
+ GST_ERROR_OBJECT (dbin, "Broken chain/group tree");
+ g_return_val_if_reached (FALSE);
+ return FALSE;
+ }
+ if (endpads == NULL) {
+ if (missing_plugin) {
+ if (missing_plugin_details->len > 0) {
+ gchar *details = g_string_free (missing_plugin_details, FALSE);
+ GST_ELEMENT_ERROR (dbin, CORE, MISSING_PLUGIN, (NULL),
+ ("no suitable plugins found:\n%s", details));
+ g_free (details);
+ } else {
+ g_string_free (missing_plugin_details, TRUE);
+ GST_ELEMENT_ERROR (dbin, CORE, MISSING_PLUGIN, (NULL),
+ ("no suitable plugins found"));
+ }
+ } else {
+ /* in this case, the stream ended without buffers,
+ * just post a warning */
+ g_string_free (missing_plugin_details, TRUE);
+
+ GST_WARNING_OBJECT (dbin, "All streams finished without buffers. "
+ "Last group: %d", last_group);
+ if (last_group) {
+ GST_ELEMENT_ERROR (dbin, STREAM, FAILED, (NULL),
+ ("all streams without buffers"));
+ } else {
+ gboolean switched = FALSE;
+ gboolean drained = FALSE;
+
+ drain_and_switch_chains (dbin->decode_chain, NULL, &last_group,
+ &drained, &switched);
+ GST_ELEMENT_WARNING (dbin, STREAM, FAILED, (NULL),
+ ("all streams without buffers"));
+ if (switched) {
+ if (gst_decode_chain_is_complete (dbin->decode_chain))
+ goto retry;
+ else
+ return FALSE;
+ }
+ }
+ }
+
+ do_async_done (dbin);
+ return FALSE;
+ }
+
+ g_string_free (missing_plugin_details, TRUE);
+
+ /* Check if this was called when everything was exposed already */
+ for (tmp = endpads; tmp && already_exposed; tmp = tmp->next) {
+ GstDecodePad *dpad = tmp->data;
+
+ already_exposed &= dpad->exposed;
+ if (!already_exposed)
+ break;
+ }
+ if (already_exposed) {
+ GST_DEBUG_OBJECT (dbin, "Everything was exposed already!");
+ g_list_foreach (endpads, (GFunc) gst_object_unref, NULL);
+ g_list_free (endpads);
+ return TRUE;
+ }
+
+ /* going to expose something, reset buffering */
+ gst_decode_bin_reset_buffering (dbin);
+
+ /* Set all already exposed pads to blocked */
+ for (tmp = endpads; tmp; tmp = tmp->next) {
+ GstDecodePad *dpad = tmp->data;
+
+ if (dpad->exposed) {
+ GST_DEBUG_OBJECT (dpad, "blocking exposed pad");
+ gst_decode_pad_set_blocked (dpad, TRUE);
+ }
+ }
+
+ /* re-order pads : video, then audio, then others */
+ endpads = g_list_sort (endpads, (GCompareFunc) sort_end_pads);
+
+ /* Don't add pads if we are shutting down */
+ DYN_LOCK (dbin);
+ if (G_UNLIKELY (dbin->shutdown)) {
+ GST_WARNING_OBJECT (dbin, "Currently, shutting down, aborting exposing");
+ DYN_UNLOCK (dbin);
+ return FALSE;
+ }
+
+ /* Expose pads */
+ for (tmp = endpads; tmp; tmp = tmp->next) {
+ GstDecodePad *dpad = (GstDecodePad *) tmp->data;
+ gchar *padname;
+
+ /* 1. rewrite name */
+ padname = g_strdup_printf ("src_%u", dbin->nbpads);
+ dbin->nbpads++;
+ GST_DEBUG_OBJECT (dbin, "About to expose dpad %s as %s",
+ GST_OBJECT_NAME (dpad), padname);
+ gst_object_set_name (GST_OBJECT (dpad), padname);
+ g_free (padname);
+
+ gst_pad_sticky_events_foreach (GST_PAD_CAST (dpad), debug_sticky_event,
+ dpad);
+
+ /* 2. activate and add */
+ if (!dpad->exposed) {
+ dpad->exposed = TRUE;
+ if (!gst_element_add_pad (GST_ELEMENT (dbin), GST_PAD_CAST (dpad))) {
+ /* not really fatal, we can try to add the other pads */
+ g_warning ("error adding pad to decodebin");
+ dpad->exposed = FALSE;
+ continue;
+ }
+ }
+
+ /* 3. emit signal */
+ GST_INFO_OBJECT (dpad, "added new decoded pad");
+ }
+ DYN_UNLOCK (dbin);
+
+ /* 4. Signal no-more-pads. This allows the application to hook stuff to the
+ * exposed pads */
+ GST_LOG_OBJECT (dbin, "signaling no-more-pads");
+ gst_element_no_more_pads (GST_ELEMENT (dbin));
+
+ /* 5. Send a custom element message with the stream topology */
+ if (dbin->post_stream_topology)
+ gst_decode_bin_post_topology_message (dbin);
+
+ /* 6. Unblock internal pads. The application should have connected stuff now
+ * so that streaming can continue. */
+ for (tmp = endpads; tmp; tmp = tmp->next) {
+ GstDecodePad *dpad = (GstDecodePad *) tmp->data;
+
+ GST_DEBUG_OBJECT (dpad, "unblocking");
+ gst_decode_pad_unblock (dpad);
+ GST_DEBUG_OBJECT (dpad, "unblocked");
+ gst_object_unref (dpad);
+ }
+ g_list_free (endpads);
+
+ do_async_done (dbin);
+ GST_DEBUG_OBJECT (dbin, "Exposed everything");
+ return TRUE;
+ }
+
+ /* gst_decode_chain_expose:
+ *
+ * Check if the chain can be exposed and add all endpads
+ * to the endpads list.
+ *
+ * Also update the active group's multiqueue to the
+ * runtime limits.
+ *
+ * Not MT-safe, call with decodebin expose lock! *
+ */
+ static gboolean
+ gst_decode_chain_expose (GstDecodeChain * chain, GList ** endpads,
+ gboolean * missing_plugin, GString * missing_plugin_details,
+ gboolean * last_group)
+ {
+ GstDecodeGroup *group;
+ GList *l;
+ GstDecodeBin *dbin;
+
+ if (chain->deadend) {
+ if (chain->endcaps) {
+ if (chain->deadend_details) {
+ g_string_append (missing_plugin_details, chain->deadend_details);
+ g_string_append_c (missing_plugin_details, '\n');
+ } else {
+ gchar *desc = gst_pb_utils_get_codec_description (chain->endcaps);
+ gchar *caps_str = gst_caps_to_string (chain->endcaps);
+ g_string_append_printf (missing_plugin_details,
+ "Missing decoder: %s (%s)\n", desc, caps_str);
+ g_free (caps_str);
+ g_free (desc);
+ }
+ *missing_plugin = TRUE;
+ }
+ return TRUE;
+ }
+
+ if (chain->endpad) {
+ if (!gst_decode_pad_is_exposable (chain->endpad) && !chain->endpad->exposed)
+ return FALSE;
+ *endpads = g_list_prepend (*endpads, gst_object_ref (chain->endpad));
+ return TRUE;
+ }
+
+ if (chain->next_groups)
+ *last_group = FALSE;
+
+ group = chain->active_group;
+ if (!group)
+ return FALSE;
+ if (!group->no_more_pads && !group->overrun)
+ return FALSE;
+
+ dbin = group->dbin;
+
+ /* we can now disconnect any overrun signal, which is used to expose the
+ * group. */
+ if (group->overrunsig) {
+ GST_LOG_OBJECT (dbin, "Disconnecting overrun");
+ g_signal_handler_disconnect (group->multiqueue, group->overrunsig);
+ group->overrunsig = 0;
+ }
+
+ for (l = group->children; l; l = l->next) {
+ GstDecodeChain *childchain = l->data;
+
+ if (!gst_decode_chain_expose (childchain, endpads, missing_plugin,
+ missing_plugin_details, last_group))
+ return FALSE;
+ }
+
+ return TRUE;
+ }
+
+ /*************************
+ * GstDecodePad functions
+ *************************/
+
+ static void
+ gst_decode_pad_class_init (GstDecodePadClass * klass)
+ {
+ }
+
+ static void
+ gst_decode_pad_init (GstDecodePad * pad)
+ {
+ pad->chain = NULL;
+ pad->blocked = FALSE;
+ pad->exposed = FALSE;
+ pad->drained = FALSE;
+ gst_object_ref_sink (pad);
+ }
+
+ static GstPadProbeReturn
+ source_pad_blocked_cb (GstPad * pad, GstPadProbeInfo * info, gpointer user_data)
+ {
+ GstDecodePad *dpad = user_data;
+ GstDecodeChain *chain;
+ GstDecodeBin *dbin;
+ GstPadProbeReturn ret = GST_PAD_PROBE_OK;
+
+ if (GST_PAD_PROBE_INFO_TYPE (info) & GST_PAD_PROBE_TYPE_EVENT_DOWNSTREAM) {
+ GstEvent *event = GST_PAD_PROBE_INFO_EVENT (info);
+
+ GST_LOG_OBJECT (pad, "Seeing event '%s'", GST_EVENT_TYPE_NAME (event));
+
+ if (!GST_EVENT_IS_SERIALIZED (event)) {
+ /* do not block on sticky or out of band events otherwise the allocation query
+ from demuxer might block the loop thread */
+ GST_LOG_OBJECT (pad, "Letting OOB event through");
+ return GST_PAD_PROBE_PASS;
+ }
+
+ if (GST_EVENT_IS_STICKY (event) && GST_EVENT_TYPE (event) != GST_EVENT_EOS) {
+ /* manually push sticky events to ghost pad to avoid exposing pads
+ * that don't have the sticky events. Handle EOS separately as we
+ * want to block the pad on it if we didn't get any buffers before
+ * EOS and expose the pad then. */
+ gst_pad_push_event (GST_PAD_CAST (dpad), gst_event_ref (event));
+
+ /* let the sticky events pass */
+ ret = GST_PAD_PROBE_PASS;
+
+ /* we only want to try to expose on CAPS events */
+ if (GST_EVENT_TYPE (event) != GST_EVENT_CAPS) {
+ GST_LOG_OBJECT (pad, "Letting sticky non-CAPS event through");
+ goto done;
+ }
+ }
+ } else if (GST_PAD_PROBE_INFO_TYPE (info) &
+ GST_PAD_PROBE_TYPE_QUERY_DOWNSTREAM) {
+ GstQuery *query = GST_PAD_PROBE_INFO_QUERY (info);
+
+ if (!GST_QUERY_IS_SERIALIZED (query)) {
+ /* do not block on non-serialized queries */
+ GST_LOG_OBJECT (pad, "Letting non-serialized query through");
+ return GST_PAD_PROBE_PASS;
+ }
+ if (!gst_pad_has_current_caps (pad)) {
+ /* do not block on allocation queries before we have caps,
+ * this would deadlock because we are doing no autoplugging
+ * without caps.
+ * TODO: Try to do autoplugging based on the query caps
+ */
+ GST_LOG_OBJECT (pad, "Letting serialized query before caps through");
+ return GST_PAD_PROBE_PASS;
+ }
+ }
+ chain = dpad->chain;
+ dbin = chain->dbin;
+
+ GST_LOG_OBJECT (dpad, "blocked: dpad->chain:%p", chain);
+
+ dpad->blocked = TRUE;
+
+ EXPOSE_LOCK (dbin);
+ if (dbin->decode_chain) {
+ if (gst_decode_chain_is_complete (dbin->decode_chain)) {
+ if (!gst_decode_bin_expose (dbin))
+ GST_WARNING_OBJECT (dbin, "Couldn't expose group");
+ }
+ }
+ EXPOSE_UNLOCK (dbin);
+
+ done:
+ return ret;
+ }
+
+ static GstPadProbeReturn
+ source_pad_event_probe (GstPad * pad, GstPadProbeInfo * info,
+ gpointer user_data)
+ {
+ GstEvent *event = GST_PAD_PROBE_INFO_EVENT (info);
+ GstDecodePad *dpad = user_data;
+ gboolean res = TRUE;
+
+ GST_LOG_OBJECT (pad, "%s dpad:%p", GST_EVENT_TYPE_NAME (event), dpad);
+
+ if (GST_EVENT_TYPE (event) == GST_EVENT_EOS) {
+ GST_DEBUG_OBJECT (pad, "we received EOS");
+
+ /* Check if all pads are drained.
+ * * If there is no next group, we will let the EOS go through.
+ * * If there is a next group but the current group isn't completely
+ * drained, we will drop the EOS event.
+ * * If there is a next group to expose and this was the last non-drained
+ * pad for that group, we will remove the ghostpad of the current group
+ * first, which unlinks the peer and so drops the EOS. */
+ res = gst_decode_pad_handle_eos (dpad);
+ }
+ if (res)
+ return GST_PAD_PROBE_OK;
+ else
+ return GST_PAD_PROBE_DROP;
+ }
+
+ static void
+ gst_decode_pad_set_blocked (GstDecodePad * dpad, gboolean blocked)
+ {
+ GstDecodeBin *dbin = dpad->dbin;
+ GstPad *opad;
+
+ DYN_LOCK (dbin);
+
+ GST_DEBUG_OBJECT (dpad, "blocking pad: %d", blocked);
+
+ opad = gst_ghost_pad_get_target (GST_GHOST_PAD_CAST (dpad));
+ if (!opad)
+ goto out;
+
+ /* do not block if shutting down.
+ * we do not consider/expect it blocked further below, but use other trick */
+ if (!blocked || !dbin->shutdown) {
+ if (blocked) {
+ if (dpad->block_id == 0)
+ dpad->block_id =
+ gst_pad_add_probe (opad,
+ GST_PAD_PROBE_TYPE_BLOCK_DOWNSTREAM |
+ GST_PAD_PROBE_TYPE_QUERY_DOWNSTREAM, source_pad_blocked_cb,
+ gst_object_ref (dpad), (GDestroyNotify) gst_object_unref);
+ } else {
+ if (dpad->block_id != 0) {
+ gst_pad_remove_probe (opad, dpad->block_id);
+ dpad->block_id = 0;
+ }
+ dpad->blocked = FALSE;
+ }
+ }
+
+ if (blocked) {
+ if (dbin->shutdown) {
+ /* deactivate to force flushing state to prevent NOT_LINKED errors */
+ gst_pad_set_active (GST_PAD_CAST (dpad), FALSE);
+ /* note that deactivating the target pad would have no effect here,
+ * since elements are typically connected first (and pads exposed),
+ * and only then brought to PAUSED state (so pads activated) */
+ } else {
+ gst_object_ref (dpad);
+ dbin->blocked_pads = g_list_prepend (dbin->blocked_pads, dpad);
+ }
+ } else {
+ GList *l;
+
+ if ((l = g_list_find (dbin->blocked_pads, dpad))) {
+ gst_object_unref (dpad);
+ dbin->blocked_pads = g_list_delete_link (dbin->blocked_pads, l);
+ }
+ }
+ gst_object_unref (opad);
+ out:
+ DYN_UNLOCK (dbin);
+ }
+
+ static void
+ gst_decode_pad_add_drained_check (GstDecodePad * dpad)
+ {
+ gst_pad_add_probe (GST_PAD_CAST (dpad), GST_PAD_PROBE_TYPE_EVENT_DOWNSTREAM,
+ source_pad_event_probe, dpad, NULL);
+ }
+
+ static void
+ gst_decode_pad_activate (GstDecodePad * dpad, GstDecodeChain * chain)
+ {
+ g_return_if_fail (chain != NULL);
+
+ dpad->chain = chain;
+ gst_pad_set_active (GST_PAD_CAST (dpad), TRUE);
+ gst_decode_pad_set_blocked (dpad, TRUE);
+ gst_decode_pad_add_drained_check (dpad);
+ }
+
+ static void
+ gst_decode_pad_unblock (GstDecodePad * dpad)
+ {
+ gst_decode_pad_set_blocked (dpad, FALSE);
+ }
+
+ static gboolean
+ gst_decode_pad_event (GstPad * pad, GstObject * parent, GstEvent * event)
+ {
+ GstDecodeBin *dbin = GST_DECODE_BIN (parent);
+
+ if (GST_EVENT_TYPE (event) == GST_EVENT_SEEK && dbin && dbin->decode_chain) {
+ GstElement *demuxer = NULL;
+
+ /* For adaptive demuxers we send the seek event directly to the demuxer.
+ * See https://bugzilla.gnome.org/show_bug.cgi?id=606382
+ */
+ CHAIN_MUTEX_LOCK (dbin->decode_chain);
+ if (dbin->decode_chain->adaptive_demuxer) {
+ GstDecodeElement *delem = dbin->decode_chain->elements->data;
+ demuxer = gst_object_ref (delem->element);
+ }
+ CHAIN_MUTEX_UNLOCK (dbin->decode_chain);
+
+ if (demuxer) {
+ gboolean ret;
+
+ GST_DEBUG_OBJECT (dbin,
+ "Sending SEEK event directly to adaptive streaming demuxer %s",
+ GST_OBJECT_NAME (demuxer));
+ ret = gst_element_send_event (demuxer, event);
+ gst_object_unref (demuxer);
+ return ret;
+ }
+ }
+
+ return gst_pad_event_default (pad, parent, event);
+ }
+
+ static gboolean
+ gst_decode_pad_query (GstPad * pad, GstObject * parent, GstQuery * query)
+ {
+ GstDecodePad *dpad = GST_DECODE_PAD (parent);
+ gboolean ret = FALSE;
+
+ CHAIN_MUTEX_LOCK (dpad->chain);
+ if (!dpad->exposed && !dpad->dbin->shutdown && !dpad->chain->deadend
+ && dpad->chain->elements) {
+ GstDecodeElement *delem = dpad->chain->elements->data;
+
+ ret = FALSE;
+ GST_DEBUG_OBJECT (dpad->dbin,
+ "calling autoplug-query for %s (element %s): %" GST_PTR_FORMAT,
+ GST_PAD_NAME (dpad), GST_ELEMENT_NAME (delem->element), query);
+ g_signal_emit (G_OBJECT (dpad->dbin),
+ gst_decode_bin_signals[SIGNAL_AUTOPLUG_QUERY], 0, dpad, delem->element,
+ query, &ret);
+
+ if (ret)
+ GST_DEBUG_OBJECT (dpad->dbin,
+ "autoplug-query returned %d: %" GST_PTR_FORMAT, ret, query);
+ else
+ GST_DEBUG_OBJECT (dpad->dbin, "autoplug-query returned %d", ret);
+ }
+ CHAIN_MUTEX_UNLOCK (dpad->chain);
+
+ /* If exposed or nothing handled the query use the default handler */
+ if (!ret)
+ ret = gst_pad_query_default (pad, parent, query);
+
+ return ret;
+ }
+
+ static gboolean
+ gst_decode_pad_is_exposable (GstDecodePad * endpad)
+ {
+ if (endpad->blocked || endpad->exposed)
+ return TRUE;
+
+ return gst_pad_has_current_caps (GST_PAD_CAST (endpad));
+ }
+
+ /*gst_decode_pad_new:
+ *
+ * Creates a new GstDecodePad for the given pad.
+ */
+ static GstDecodePad *
+ gst_decode_pad_new (GstDecodeBin * dbin, GstDecodeChain * chain)
+ {
+ GstDecodePad *dpad;
+ GstProxyPad *ppad;
+ GstPadTemplate *pad_tmpl;
+
+ GST_DEBUG_OBJECT (dbin, "making new decodepad");
+ pad_tmpl = gst_static_pad_template_get (&decoder_bin_src_template);
+ dpad =
+ g_object_new (GST_TYPE_DECODE_PAD, "direction", GST_PAD_SRC,
+ "template", pad_tmpl, NULL);
+ dpad->chain = chain;
+ dpad->dbin = dbin;
+ gst_object_unref (pad_tmpl);
+
+ ppad = gst_proxy_pad_get_internal (GST_PROXY_PAD (dpad));
+ gst_pad_set_query_function (GST_PAD_CAST (ppad), gst_decode_pad_query);
+ gst_pad_set_event_function (GST_PAD_CAST (dpad), gst_decode_pad_event);
+ gst_object_unref (ppad);
+
+ return dpad;
+ }
+
+ static void
+ gst_pending_pad_free (GstPendingPad * ppad)
+ {
+ g_assert (ppad);
+ g_assert (ppad->pad);
+
+ if (ppad->event_probe_id != 0)
+ gst_pad_remove_probe (ppad->pad, ppad->event_probe_id);
+ if (ppad->notify_caps_id)
+ g_signal_handler_disconnect (ppad->pad, ppad->notify_caps_id);
+ gst_object_unref (ppad->pad);
+ g_slice_free (GstPendingPad, ppad);
+ }
+
+ /*****
+ * Element add/remove
+ *****/
+
+ static void
+ do_async_start (GstDecodeBin * dbin)
+ {
+ GstMessage *message;
+
+ dbin->async_pending = TRUE;
+
+ message = gst_message_new_async_start (GST_OBJECT_CAST (dbin));
+ parent_class->handle_message (GST_BIN_CAST (dbin), message);
+ }
+
+ static void
+ do_async_done (GstDecodeBin * dbin)
+ {
+ GstMessage *message;
+
+ if (dbin->async_pending) {
+ message =
+ gst_message_new_async_done (GST_OBJECT_CAST (dbin),
+ GST_CLOCK_TIME_NONE);
+ parent_class->handle_message (GST_BIN_CAST (dbin), message);
+
+ dbin->async_pending = FALSE;
+ }
+ }
+
+ /*****
+ * convenience functions
+ *****/
+
+ /* find_sink_pad
+ *
+ * Returns the first sink pad of the given element, or NULL if it doesn't have
+ * any.
+ */
+
+ static GstPad *
+ find_sink_pad (GstElement * element)
+ {
+ GstIterator *it;
+ GstPad *pad = NULL;
+ GValue item = { 0, };
+
+ it = gst_element_iterate_sink_pads (element);
+
+ if ((gst_iterator_next (it, &item)) == GST_ITERATOR_OK)
+ pad = g_value_dup_object (&item);
+ g_value_unset (&item);
+ gst_iterator_free (it);
+
+ return pad;
+ }
+
+ /* call with dyn_lock held */
+ static void
+ unblock_pads (GstDecodeBin * dbin)
+ {
+ GST_LOG_OBJECT (dbin, "unblocking pads");
+
+ while (dbin->blocked_pads) {
+ GList *tmp = dbin->blocked_pads;
+ GstDecodePad *dpad = (GstDecodePad *) tmp->data;
+ GstPad *opad;
+
+ dbin->blocked_pads = g_list_delete_link (dbin->blocked_pads, tmp);
+ opad = gst_ghost_pad_get_target (GST_GHOST_PAD_CAST (dpad));
+ if (opad) {
+
+ GST_DEBUG_OBJECT (dpad, "unblocking");
+ if (dpad->block_id != 0) {
+ gst_pad_remove_probe (opad, dpad->block_id);
+ dpad->block_id = 0;
+ }
+ gst_object_unref (opad);
+ }
+
+ dpad->blocked = FALSE;
+
+ /* We release the dyn lock since we want to allow the streaming threads
+ * to properly stop and not be blocked in our various probes */
+ DYN_UNLOCK (dbin);
+ /* make flushing, prevent NOT_LINKED */
+ gst_pad_set_active (GST_PAD_CAST (dpad), FALSE);
+ DYN_LOCK (dbin);
+
+ GST_DEBUG_OBJECT (dpad, "unblocked");
+ gst_object_unref (dpad);
+ }
+ }
+
+ static void
+ gst_decode_chain_stop (GstDecodeBin * dbin, GstDecodeChain * chain,
+ GQueue * elements)
+ {
+ GQueue *internal_elements, internal_elements_ = G_QUEUE_INIT;
+ GList *l;
+
+ CHAIN_MUTEX_LOCK (chain);
+ if (elements) {
+ internal_elements = elements;
+ } else {
+ internal_elements = &internal_elements_;
+ }
+
+ for (l = chain->next_groups; l; l = l->next) {
+ GstDecodeGroup *group = l->data;
+ GList *m;
+
+ for (m = group->children; m; m = m->next) {
+ GstDecodeChain *chain2 = m->data;
+ gst_decode_chain_stop (dbin, chain2, internal_elements);
+ }
+ if (group->multiqueue)
+ g_queue_push_head (internal_elements, gst_object_ref (group->multiqueue));
+ }
+
+ if (chain->active_group) {
+ for (l = chain->active_group->children; l; l = l->next) {
+ GstDecodeChain *chain2 = l->data;
+ gst_decode_chain_stop (dbin, chain2, internal_elements);
+ }
+ if (chain->active_group->multiqueue)
+ g_queue_push_head (internal_elements,
+ gst_object_ref (chain->active_group->multiqueue));
+ }
+
+ for (l = chain->old_groups; l; l = l->next) {
+ GstDecodeGroup *group = l->data;
+ GList *m;
+
+ for (m = group->children; m; m = m->next) {
+ GstDecodeChain *chain2 = m->data;
+ gst_decode_chain_stop (dbin, chain2, internal_elements);
+ }
+ if (group->multiqueue)
+ g_queue_push_head (internal_elements, gst_object_ref (group->multiqueue));
+ }
+
+ for (l = chain->elements; l; l = l->next) {
+ GstDecodeElement *delem = l->data;
+
+ if (delem->capsfilter)
+ g_queue_push_head (internal_elements, gst_object_ref (delem->capsfilter));
+ g_queue_push_head (internal_elements, gst_object_ref (delem->element));
+ }
+
+ CHAIN_MUTEX_UNLOCK (chain);
+
+ if (!elements) {
+ GstElement *element;
+
+ EXPOSE_UNLOCK (dbin);
+ /* Shut down from bottom to top */
+ while ((element = g_queue_pop_tail (internal_elements))) {
+ /* The bin must never ever change the state of this element anymore */
+ gst_element_set_locked_state (element, TRUE);
+ gst_element_set_state (element, GST_STATE_NULL);
+ gst_object_unref (element);
+ }
+ g_queue_clear (internal_elements);
+ EXPOSE_LOCK (dbin);
+ }
+ }
+
+ static GstStateChangeReturn
+ gst_decode_bin_change_state (GstElement * element, GstStateChange transition)
+ {
+ GstStateChangeReturn ret = GST_STATE_CHANGE_SUCCESS;
+ GstDecodeBin *dbin = GST_DECODE_BIN (element);
+ GstDecodeChain *chain_to_free = NULL;
+
+ switch (transition) {
+ case GST_STATE_CHANGE_NULL_TO_READY:
+ if (dbin->typefind == NULL)
+ goto missing_typefind;
+ break;
+ case GST_STATE_CHANGE_READY_TO_PAUSED:
+ /* Make sure we've cleared all existing chains */
+ EXPOSE_LOCK (dbin);
+ if (dbin->decode_chain) {
+ gst_decode_chain_free (dbin->decode_chain);
+ dbin->decode_chain = NULL;
+ }
+ EXPOSE_UNLOCK (dbin);
+ DYN_LOCK (dbin);
+ GST_LOG_OBJECT (dbin, "clearing shutdown flag");
+ dbin->shutdown = FALSE;
+ DYN_UNLOCK (dbin);
+ dbin->have_type = FALSE;
+ ret = GST_STATE_CHANGE_ASYNC;
+ do_async_start (dbin);
+
+
+ /* connect a signal to find out when the typefind element found
+ * a type */
+ dbin->have_type_id =
+ g_signal_connect (dbin->typefind, "have-type",
+ G_CALLBACK (type_found), dbin);
+ break;
+ case GST_STATE_CHANGE_PAUSED_TO_READY:
+ case GST_STATE_CHANGE_READY_TO_NULL:
+ if (dbin->have_type_id)
+ g_signal_handler_disconnect (dbin->typefind, dbin->have_type_id);
+ dbin->have_type_id = 0;
+ DYN_LOCK (dbin);
+ GST_LOG_OBJECT (dbin, "setting shutdown flag");
+ dbin->shutdown = TRUE;
+ unblock_pads (dbin);
+ DYN_UNLOCK (dbin);
+
+ /* Make sure we don't have cleanup races where
+ * we might be trying to deactivate pads (in the cleanup thread)
+ * at the same time as the default element deactivation
+ * (in PAUSED=>READY) */
+ g_mutex_lock (&dbin->cleanup_lock);
+ if (dbin->cleanup_thread) {
+ g_thread_join (dbin->cleanup_thread);
+ dbin->cleanup_thread = NULL;
+ }
+ g_mutex_unlock (&dbin->cleanup_lock);
+ default:
+ break;
+ }
+
+ {
+ GstStateChangeReturn bret;
+
+ bret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
+ if (G_UNLIKELY (bret == GST_STATE_CHANGE_FAILURE))
+ goto activate_failed;
+ else if (G_UNLIKELY (bret == GST_STATE_CHANGE_NO_PREROLL)) {
+ do_async_done (dbin);
+ ret = bret;
+ }
+ }
+ switch (transition) {
+ case GST_STATE_CHANGE_PAUSED_TO_READY:
+ do_async_done (dbin);
+ EXPOSE_LOCK (dbin);
+ if (dbin->decode_chain) {
+ gst_decode_chain_stop (dbin, dbin->decode_chain, NULL);
+ chain_to_free = dbin->decode_chain;
+ gst_decode_chain_free_internal (dbin->decode_chain, TRUE);
+ dbin->decode_chain = NULL;
+ }
+ EXPOSE_UNLOCK (dbin);
+ if (chain_to_free)
+ gst_decode_chain_free (chain_to_free);
+ g_list_free_full (dbin->buffering_status,
+ (GDestroyNotify) gst_message_unref);
+ dbin->buffering_status = NULL;
+ /* Let's do a final check of leftover groups to free */
+ g_mutex_lock (&dbin->cleanup_lock);
+ if (dbin->cleanup_groups) {
+ gst_decode_chain_free_hidden_groups (dbin->cleanup_groups);
+ dbin->cleanup_groups = NULL;
+ }
+ g_mutex_unlock (&dbin->cleanup_lock);
+ break;
+ case GST_STATE_CHANGE_READY_TO_NULL:
+ /* Let's do a final check of leftover groups to free */
+ g_mutex_lock (&dbin->cleanup_lock);
+ if (dbin->cleanup_groups) {
+ gst_decode_chain_free_hidden_groups (dbin->cleanup_groups);
+ dbin->cleanup_groups = NULL;
+ }
+ g_mutex_unlock (&dbin->cleanup_lock);
+ break;
+ default:
+ break;
+ }
+
+ return ret;
+
+ /* ERRORS */
+ missing_typefind:
+ {
+ gst_element_post_message (element,
+ gst_missing_element_message_new (element, "typefind"));
+ GST_ELEMENT_ERROR (dbin, CORE, MISSING_PLUGIN, (NULL), ("no typefind!"));
+ return GST_STATE_CHANGE_FAILURE;
+ }
+ activate_failed:
+ {
+ GST_DEBUG_OBJECT (element,
+ "element failed to change states -- activation problem?");
+ do_async_done (dbin);
+ return GST_STATE_CHANGE_FAILURE;
+ }
+ }
+
+ static void
+ gst_decode_bin_handle_message (GstBin * bin, GstMessage * msg)
+ {
+ GstDecodeBin *dbin = GST_DECODE_BIN (bin);
+ gboolean drop = FALSE;
+
+ if (GST_MESSAGE_TYPE (msg) == GST_MESSAGE_ERROR) {
+ /* Don't pass errors when shutting down. Sometimes,
+ * elements can generate spurious errors because we set the
+ * output pads to flushing, and they can't detect that if they
+ * send an event at exactly the wrong moment */
+ DYN_LOCK (dbin);
+ drop = dbin->shutdown;
+ DYN_UNLOCK (dbin);
+
+ if (!drop) {
+ GST_OBJECT_LOCK (dbin);
+ drop = (g_list_find (dbin->filtered, GST_MESSAGE_SRC (msg)) != NULL);
+ if (drop)
+ dbin->filtered_errors =
+ g_list_prepend (dbin->filtered_errors, gst_message_ref (msg));
+ GST_OBJECT_UNLOCK (dbin);
+ }
+ } else if (GST_MESSAGE_TYPE (msg) == GST_MESSAGE_BUFFERING) {
+ gint perc, msg_perc;
+ gint smaller_perc = 100;
+ GstMessage *smaller = NULL;
+ GList *found = NULL;
+ GList *iter;
+
+ /* buffering messages must be aggregated as there might be multiple
+ * multiqueue in the pipeline and their independent buffering messages
+ * will confuse the application
+ *
+ * decodebin keeps a list of messages received from elements that are
+ * buffering.
+ * Rules are:
+ * 1) Always post the smaller buffering %
+ * 2) If an element posts a 100% buffering message, remove it from the list
+ * 3) When there are no more messages on the list, post 100% message
+ * 4) When an element posts a new buffering message, update the one
+ * on the list to this new value
+ */
+
+ BUFFERING_LOCK (dbin);
+ gst_message_parse_buffering (msg, &msg_perc);
+
+ GST_DEBUG_OBJECT (dbin, "Got buffering msg %" GST_PTR_FORMAT, msg);
+
+ g_mutex_lock (&dbin->buffering_post_lock);
+
+ /*
+ * Single loop for 2 things:
+ * 1) Look for a message with the same source
+ * 1.1) If the received message is 100%, remove it from the list
+ * 2) Find the minimum buffering from the list
+ */
+ for (iter = dbin->buffering_status; iter;) {
+ GstMessage *bufstats = iter->data;
+ if (GST_MESSAGE_SRC (bufstats) == GST_MESSAGE_SRC (msg)) {
+ found = iter;
+ if (msg_perc < 100) {
+ GST_DEBUG_OBJECT (dbin, "Replacing old buffering msg %"
+ GST_PTR_FORMAT, iter->data);
+ gst_message_unref (iter->data);
+ bufstats = iter->data = gst_message_ref (msg);
+ } else {
+ GList *current = iter;
+
+ /* remove the element here and avoid confusing the loop */
+ iter = g_list_next (iter);
+
+ GST_DEBUG_OBJECT (dbin, "Deleting old buffering msg %"
+ GST_PTR_FORMAT, current->data);
+
+ gst_message_unref (current->data);
+ dbin->buffering_status =
+ g_list_delete_link (dbin->buffering_status, current);
+
+ continue;
+ }
+ }
+
+ gst_message_parse_buffering (bufstats, &perc);
+ if (perc < smaller_perc) {
+ smaller_perc = perc;
+ smaller = bufstats;
+ }
+ iter = g_list_next (iter);
+ }
+
+ if (found == NULL && msg_perc < 100) {
+ if (msg_perc < smaller_perc) {
+ smaller_perc = msg_perc;
+ smaller = msg;
+ }
+ GST_DEBUG_OBJECT (dbin, "Storing buffering msg %" GST_PTR_FORMAT, msg);
+ dbin->buffering_status =
+ g_list_prepend (dbin->buffering_status, gst_message_ref (msg));
+ }
+
+ /* now compute the buffering message that should be posted */
+ if (smaller_perc == 100) {
+ g_assert (dbin->buffering_status == NULL);
+ /* we are posting the original received msg */
+ } else {
+ gst_message_replace (&msg, smaller);
+ }
+ BUFFERING_UNLOCK (dbin);
+
+ GST_DEBUG_OBJECT (dbin, "Forwarding buffering msg %" GST_PTR_FORMAT, msg);
+ GST_BIN_CLASS (parent_class)->handle_message (bin, msg);
+
+ g_mutex_unlock (&dbin->buffering_post_lock);
+ return;
+ }
+
+ if (drop) {
+ gst_message_unref (msg);
+ } else {
+ GST_DEBUG_OBJECT (dbin, "Forwarding msg %" GST_PTR_FORMAT, msg);
+ GST_BIN_CLASS (parent_class)->handle_message (bin, msg);
+ }
+ }
+
+ static gboolean
+ gst_decode_bin_remove_element (GstBin * bin, GstElement * element)
+ {
+ GstDecodeBin *dbin = GST_DECODE_BIN (bin);
+ gboolean removed = FALSE, post = FALSE;
+ GList *iter;
+
+ BUFFERING_LOCK (bin);
+ g_mutex_lock (&dbin->buffering_post_lock);
+ for (iter = dbin->buffering_status; iter; iter = iter->next) {
+ GstMessage *bufstats = iter->data;
+
+ if (GST_MESSAGE_SRC (bufstats) == GST_OBJECT_CAST (element) ||
+ gst_object_has_as_ancestor (GST_MESSAGE_SRC (bufstats),
+ GST_OBJECT_CAST (element))) {
+ gst_message_unref (bufstats);
+ dbin->buffering_status =
+ g_list_delete_link (dbin->buffering_status, iter);
+ removed = TRUE;
+ break;
+ }
+ }
+
+ if (removed && dbin->buffering_status == NULL)
+ post = TRUE;
+ BUFFERING_UNLOCK (bin);
+
+ if (post) {
+ gst_element_post_message (GST_ELEMENT_CAST (bin),
+ gst_message_new_buffering (GST_OBJECT_CAST (dbin), 100));
+ }
+ g_mutex_unlock (&dbin->buffering_post_lock);
+
+ return GST_BIN_CLASS (parent_class)->remove_element (bin, element);
+ }
--- /dev/null
- gint (*select_stream) (GstDecodebin3 * dbin,
- GstStreamCollection * collection, GstStream * stream);
+ /* GStreamer
+ *
+ * Copyright (C) <2015> Centricular Ltd
+ * @author: Edward Hervey <edward@centricular.com>
+ * @author: Jan Schmidt <jan@centricular.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+ #ifdef HAVE_CONFIG_H
+ #include "config.h"
+ #endif
+
+ #include <glib.h>
+ #include <glib-object.h>
+ #include <glib/gprintf.h>
+ #include <gst/gst.h>
+ #include <gst/pbutils/pbutils.h>
+
+ #include "gstplaybackelements.h"
+ #include "gstplay-enum.h"
+ #include "gstrawcaps.h"
+
+ /**
+ * SECTION:element-decodebin3
+ * @title: decodebin3
+ *
+ * #GstBin that auto-magically constructs a decoding pipeline using available
+ * decoders and demuxers via auto-plugging. The output is raw audio, video
+ * or subtitle streams.
+ *
+ * decodebin3 differs from the previous decodebin (decodebin2) in important ways:
+ *
+ * * supports publication and selection of stream information via
+ * GstStreamCollection messages and #GST_EVENT_SELECT_STREAMS events.
+ *
+ * * dynamically switches stream connections internally, and
+ * reuses decoder elements when stream selections change, so that in
+ * the normal case it maintains 1 decoder of each type (video/audio/subtitle)
+ * and only creates new elements when streams change and an existing decoder
+ * is not capable of handling the new format.
+ *
+ * * supports multiple input pads for the parallel decoding of auxiliary streams
+ * not muxed with the primary stream.
+ *
+ * * does not handle network stream buffering. decodebin3 expects that network stream
+ * buffering is handled upstream, before data is passed to it.
+ *
+ * > decodebin3 is still experimental API and a technology preview.
+ * > Its behaviour and exposed API is subject to change.
+ *
+ */
+
+ /*
+ * Global design
+ *
+ * 1) From sink pad to elementary streams (GstParseBin)
+ *
+ * The input sink pads are fed to GstParseBin. GstParseBin will feed them
+ * through typefind. When the caps are detected (or changed) we recursively
+ * figure out which demuxer, parser or depayloader is needed until we get to
+ * elementary streams.
+ *
+ * All elementary streams (whether decoded or not, whether exposed or not) are
+ * fed through multiqueue. There is only *one* multiqueue in decodebin3.
+ *
+ * => MultiQueue is the cornerstone.
+ * => No buffering before multiqueue
+ *
+ * 2) Elementary streams
+ *
+ * After GstParseBin, there are 3 main components:
+ * 1) Input Streams (provided by GstParseBin)
+ * 2) Multiqueue slots
+ * 3) Output Streams
+ *
+ * Input Streams correspond to the stream coming from GstParseBin and that gets
+ * fed into a multiqueue slot.
+ *
+ * Output Streams correspond to the combination of a (optional) decoder and an
+ * output ghostpad. Output Streams can be moved from one multiqueue slot to
+ * another, can reconfigure itself (different decoders), and can be
+ * added/removed depending on the configuration (all streams outputted, only one
+ * of each type, ...).
+ *
+ * Multiqueue slots correspond to a pair of sink/src pad from multiqueue. For
+ * each 'active' Input Stream there is a corresponding slot.
+ * Slots might have different streams on input and output (due to internal
+ * buffering).
+ *
+ * Due to internal queuing/buffering/..., all those components (might) behave
+ * asynchronously. Therefore probes will be used on each component source pad to
+ * detect various key-points:
+ * * EOS :
+ * the stream is done => Mark that component as done, optionally freeing/removing it
+ * * STREAM_START :
+ * a new stream is starting => link it further if needed
+ *
+ * 3) Gradual replacement
+ *
+ * If the caps change at any point in decodebin (input sink pad, demuxer output,
+ * multiqueue output, ..), we gradually replace (if needed) the following elements.
+ *
+ * This is handled by the probes in various locations:
+ * a) typefind output
+ * b) multiqueue input (source pad of Input Streams)
+ * c) multiqueue output (source pad of Multiqueue Slots)
+ * d) final output (target of source ghostpads)
+ *
+ * When CAPS event arrive at those points, one of three things can happen:
+ * a) There is no elements downstream yet, just create/link-to following elements
+ * b) There are downstream elements, do a ACCEPT_CAPS query
+ * b.1) The new CAPS are accepted, keep current configuration
+ * b.2) The new CAPS are not accepted, remove following elements then do a)
+ *
+ * Components:
+ *
+ * MultiQ Output
+ * Input(s) Slots Streams
+ * /-------------------------------------------\ /-----\ /------------- \
+ *
+ * +-------------------------------------------------------------------------+
+ * | |
+ * | +---------------------------------------------+ |
+ * | | GstParseBin(s) | |
+ * | | +--------------+ | +-----+ |
+ * | | | |---[parser]-[|--| Mul |---[ decoder ]-[|
+ * |]--[ typefind ]---| demuxer(s) |------------[| | ti | |
+ * | | | (if needed) |---[parser]-[|--| qu | |
+ * | | | |---[parser]-[|--| eu |---[ decoder ]-[|
+ * | | +--------------+ | +------ ^ |
+ * | +---------------------------------------------+ ^ | |
+ * | ^ | | |
+ * +-----------------------------------------------+--------+-------------+--+
+ * | | |
+ * | | |
+ * Probes --/--------/-------------/
+ *
+ * ATOMIC SWITCHING
+ *
+ * We want to ensure we re-use decoders when switching streams. This takes place
+ * at the multiqueue output level.
+ *
+ * MAIN CONCEPTS
+ * 1) Activating a stream (i.e. linking a slot to an output) is only done within
+ * the streaming thread in the multiqueue_src_probe() and only if the
+ stream is in the REQUESTED selection.
+ * 2) Deactivating a stream (i.e. unlinking a slot from an output) is also done
+ * within the stream thread, but only in a purposefully called IDLE probe
+ * that calls reassign_slot().
+ *
+ * Based on those two principles, 3 "selection" of streams (stream-id) are used:
+ * 1) requested_selection
+ * All streams within that list should be activated
+ * 2) active_selection
+ * List of streams that are exposed by decodebin
+ * 3) to_activate
+ * List of streams that will be moved to requested_selection in the
+ * reassign_slot() method (i.e. once a stream was deactivated, and the output
+ * was retargetted)
+ */
+
+
+ GST_DEBUG_CATEGORY_STATIC (decodebin3_debug);
+ #define GST_CAT_DEFAULT decodebin3_debug
+
+ #define GST_TYPE_DECODEBIN3 (gst_decodebin3_get_type ())
+
+ #define EXTRA_DEBUG 1
+
+ #define CUSTOM_FINAL_EOS_QUARK _custom_final_eos_quark_get ()
+ #define CUSTOM_FINAL_EOS_QUARK_DATA "custom-final-eos"
+ static GQuark
+ _custom_final_eos_quark_get (void)
+ {
+ static gsize g_quark;
+
+ if (g_once_init_enter (&g_quark)) {
+ gsize quark =
+ (gsize) g_quark_from_static_string ("decodebin3-custom-final-eos");
+ g_once_init_leave (&g_quark, quark);
+ }
+ return g_quark;
+ }
+
+ typedef struct _GstDecodebin3 GstDecodebin3;
+ typedef struct _GstDecodebin3Class GstDecodebin3Class;
+
+ typedef struct _DecodebinInputStream DecodebinInputStream;
+ typedef struct _DecodebinInput DecodebinInput;
+ typedef struct _DecodebinOutputStream DecodebinOutputStream;
+
+ struct _GstDecodebin3
+ {
+ GstBin bin;
+
+ /* input_lock protects the following variables */
+ GMutex input_lock;
+ /* Main input (static sink pad) */
+ DecodebinInput *main_input;
+ /* Supplementary input (request sink pads) */
+ GList *other_inputs;
+ /* counter for input */
+ guint32 input_counter;
+ /* Current stream group_id (default : GST_GROUP_ID_INVALID) */
+ /* FIXME : Needs to be reset appropriately (when upstream changes ?) */
+ guint32 current_group_id;
+ /* End of variables protected by input_lock */
+
+ GstElement *multiqueue;
+ GstClockTime default_mq_min_interleave;
+ GstClockTime current_mq_min_interleave;
+
+ /* selection_lock protects access to following variables */
+ GMutex selection_lock;
+ GList *input_streams; /* List of DecodebinInputStream for active collection */
+ GList *output_streams; /* List of DecodebinOutputStream used for output */
+ GList *slots; /* List of MultiQueueSlot */
+ guint slot_id;
+
+ /* Active collection */
+ GstStreamCollection *collection;
+ /* requested selection of stream-id to activate post-multiqueue */
+ GList *requested_selection;
+ /* list of stream-id currently activated in output */
+ GList *active_selection;
+ /* List of stream-id that need to be activated (after a stream switch for ex) */
+ GList *to_activate;
+ /* Pending select streams event */
+ guint32 select_streams_seqnum;
+ /* pending list of streams to select (from downstream) */
+ GList *pending_select_streams;
+ /* TRUE if requested_selection was updated, will become FALSE once
+ * it has fully transitioned to active */
+ gboolean selection_updated;
+ /* End of variables protected by selection_lock */
+
+ /* List of pending collections.
+ * FIXME : Is this really needed ? */
+ GList *pending_collection;
+
+ /* Factories */
+ GMutex factories_lock;
+ guint32 factories_cookie;
+ /* All DECODABLE factories */
+ GList *factories;
+ /* Only DECODER factories */
+ GList *decoder_factories;
+ /* DECODABLE but not DECODER factories */
+ GList *decodable_factories;
+
+ /* counters for pads */
+ guint32 apadcount, vpadcount, tpadcount, opadcount;
+
+ /* Properties */
+ GstCaps *caps;
++#ifdef TIZEN_FEATURE_FORCE_SW_DECODER
++ gboolean force_sw_decoders_for_video;
++ gboolean force_sw_decoders_for_audio;
++#else
++ gboolean force_sw_decoders;
++#endif
+ };
+
+ struct _GstDecodebin3Class
+ {
+ GstBinClass class;
+
- PROP_CAPS
++ gint (*select_stream) (GstDecodebin3 * dbin,
++ GstStreamCollection * collection, GstStream * stream);
++
++#ifdef TIZEN_FEATURE_RESOURCE_MANAGER
++ gboolean (*request_resource) (GstDecodebin3 * dbin,
++ GstStreamCollection * collection, GstStream * stream);
++#endif
+ };
+
+ /* Input of decodebin, controls input pad and parsebin */
+ struct _DecodebinInput
+ {
+ GstDecodebin3 *dbin;
+
+ gboolean is_main;
+
+ GstPad *ghost_sink;
+ GstPad *parsebin_sink;
+
+ GstStreamCollection *collection; /* Active collection */
+
+ guint group_id;
+
+ GstElement *parsebin;
+
+ gulong pad_added_sigid;
+ gulong pad_removed_sigid;
+ gulong drained_sigid;
+
+ /* TRUE if the input got drained
+ * FIXME : When do we reset it if re-used ?
+ */
+ gboolean drained;
+
+ /* HACK : Remove these fields */
+ /* List of PendingPad structures */
+ GList *pending_pads;
+ };
+
+ /* Multiqueue Slots */
+ typedef struct _MultiQueueSlot
+ {
+ guint id;
+
+ GstDecodebin3 *dbin;
+ /* Type of stream handled by this slot */
+ GstStreamType type;
+
+ /* Linked input and output */
+ DecodebinInputStream *input;
+
+ /* pending => last stream received on sink pad */
+ GstStream *pending_stream;
+ /* active => last stream outputted on source pad */
+ GstStream *active_stream;
+
+ GstPad *sink_pad, *src_pad;
+
+ /* id of the MQ src_pad event probe */
+ gulong probe_id;
+
+ gboolean is_drained;
+
+ DecodebinOutputStream *output;
+ } MultiQueueSlot;
+
+ /* Streams that are exposed downstream (i.e. output) */
+ struct _DecodebinOutputStream
+ {
+ GstDecodebin3 *dbin;
+ /* The type of stream handled by this output stream */
+ GstStreamType type;
+
+ /* The slot to which this output stream is currently connected to */
+ MultiQueueSlot *slot;
+
+ GstElement *decoder; /* Optional */
+ GstPad *decoder_sink, *decoder_src;
+ gboolean linked;
+
+ /* ghostpad */
+ GstPad *src_pad;
+ /* Flag if ghost pad is exposed */
+ gboolean src_exposed;
+
+ /* Reported decoder latency */
+ GstClockTime decoder_latency;
+
+ /* keyframe dropping probe */
+ gulong drop_probe_id;
+ };
+
+ /* Pending pads from parsebin */
+ typedef struct _PendingPad
+ {
+ GstDecodebin3 *dbin;
+ DecodebinInput *input;
+ GstPad *pad;
+
+ gulong buffer_probe;
+ gulong event_probe;
+ gboolean saw_eos;
+ } PendingPad;
+
+ /* properties */
+ enum
+ {
+ PROP_0,
-
++ PROP_CAPS,
++#ifdef TIZEN_FEATURE_FORCE_SW_DECODER
++ PROP_FORCE_SW_DECODERS_FOR_VIDEO,
++ PROP_FORCE_SW_DECODERS_FOR_AUDIO,
++#else
++ PROP_FORCE_SW_DECODERS,
++#endif
+ };
+
+ /* signals */
+ enum
+ {
+ SIGNAL_SELECT_STREAM,
+ SIGNAL_ABOUT_TO_FINISH,
++#ifdef TIZEN_FEATURE_RESOURCE_MANAGER
++ SIGNAL_REQUEST_RESOURCE,
++#endif
+ LAST_SIGNAL
+ };
+ static guint gst_decodebin3_signals[LAST_SIGNAL] = { 0 };
+
+ #define SELECTION_LOCK(dbin) G_STMT_START { \
+ GST_LOG_OBJECT (dbin, \
+ "selection locking from thread %p", \
+ g_thread_self ()); \
+ g_mutex_lock (&dbin->selection_lock); \
+ GST_LOG_OBJECT (dbin, \
+ "selection locked from thread %p", \
+ g_thread_self ()); \
+ } G_STMT_END
+
+ #define SELECTION_UNLOCK(dbin) G_STMT_START { \
+ GST_LOG_OBJECT (dbin, \
+ "selection unlocking from thread %p", \
+ g_thread_self ()); \
+ g_mutex_unlock (&dbin->selection_lock); \
+ } G_STMT_END
+
+ #define INPUT_LOCK(dbin) G_STMT_START { \
+ GST_LOG_OBJECT (dbin, \
+ "input locking from thread %p", \
+ g_thread_self ()); \
+ g_mutex_lock (&dbin->input_lock); \
+ GST_LOG_OBJECT (dbin, \
+ "input locked from thread %p", \
+ g_thread_self ()); \
+ } G_STMT_END
+
+ #define INPUT_UNLOCK(dbin) G_STMT_START { \
+ GST_LOG_OBJECT (dbin, \
+ "input unlocking from thread %p", \
+ g_thread_self ()); \
+ g_mutex_unlock (&dbin->input_lock); \
+ } G_STMT_END
+
+ GType gst_decodebin3_get_type (void);
+ #define gst_decodebin3_parent_class parent_class
+ G_DEFINE_TYPE (GstDecodebin3, gst_decodebin3, GST_TYPE_BIN);
+ #define _do_init \
+ GST_DEBUG_CATEGORY_INIT (decodebin3_debug, "decodebin3", 0, "decoder bin");\
+ playback_element_init (plugin);
+ GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (decodebin3, "decodebin3", GST_RANK_NONE,
+ GST_TYPE_DECODEBIN3, _do_init);
+
+ static GstStaticCaps default_raw_caps = GST_STATIC_CAPS (DEFAULT_RAW_CAPS);
+
++#define DEFAULT_FORCE_SW_DECODERS FALSE
++
+ static GstStaticPadTemplate sink_template = GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS_ANY);
+
+ static GstStaticPadTemplate request_sink_template =
+ GST_STATIC_PAD_TEMPLATE ("sink_%u",
+ GST_PAD_SINK,
+ GST_PAD_REQUEST,
+ GST_STATIC_CAPS_ANY);
+
+ static GstStaticPadTemplate video_src_template =
+ GST_STATIC_PAD_TEMPLATE ("video_%u",
+ GST_PAD_SRC,
+ GST_PAD_SOMETIMES,
+ GST_STATIC_CAPS_ANY);
+
+ static GstStaticPadTemplate audio_src_template =
+ GST_STATIC_PAD_TEMPLATE ("audio_%u",
+ GST_PAD_SRC,
+ GST_PAD_SOMETIMES,
+ GST_STATIC_CAPS_ANY);
+
+ static GstStaticPadTemplate text_src_template =
+ GST_STATIC_PAD_TEMPLATE ("text_%u",
+ GST_PAD_SRC,
+ GST_PAD_SOMETIMES,
+ GST_STATIC_CAPS_ANY);
+
+ static GstStaticPadTemplate src_template = GST_STATIC_PAD_TEMPLATE ("src_%u",
+ GST_PAD_SRC,
+ GST_PAD_SOMETIMES,
+ GST_STATIC_CAPS_ANY);
+
+
+ static void gst_decodebin3_dispose (GObject * object);
+ static void gst_decodebin3_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec);
+ static void gst_decodebin3_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec);
++#ifdef TIZEN_FEATURE_RESOURCE_MANAGER
++static gboolean gst_decodebin3_request_resource (GstElement * element,
++ GstStreamCollection * collection, GstStream * stream);
++#endif
+
+ static gboolean parsebin_autoplug_continue_cb (GstElement *
+ parsebin, GstPad * pad, GstCaps * caps, GstDecodebin3 * dbin);
+
+ static gint
+ gst_decodebin3_select_stream (GstDecodebin3 * dbin,
+ GstStreamCollection * collection, GstStream * stream)
+ {
+ GST_LOG_OBJECT (dbin, "default select-stream, returning -1");
+
+ return -1;
+ }
+
+ static GstPad *gst_decodebin3_request_new_pad (GstElement * element,
+ GstPadTemplate * temp, const gchar * name, const GstCaps * caps);
+ static void gst_decodebin3_handle_message (GstBin * bin, GstMessage * message);
+ static GstStateChangeReturn gst_decodebin3_change_state (GstElement * element,
+ GstStateChange transition);
+ static gboolean gst_decodebin3_send_event (GstElement * element,
+ GstEvent * event);
+
+ static void gst_decode_bin_update_factories_list (GstDecodebin3 * dbin);
+ #if 0
+ static gboolean have_factory (GstDecodebin3 * dbin, GstCaps * caps,
+ GstElementFactoryListType ftype);
+ #endif
+
+ static void free_input (GstDecodebin3 * dbin, DecodebinInput * input);
+ static void free_input_async (GstDecodebin3 * dbin, DecodebinInput * input);
+ static DecodebinInput *create_new_input (GstDecodebin3 * dbin, gboolean main);
+ static gboolean set_input_group_id (DecodebinInput * input, guint32 * group_id);
+
+ static void reconfigure_output_stream (DecodebinOutputStream * output,
+ MultiQueueSlot * slot);
+ static void free_output_stream (GstDecodebin3 * dbin,
+ DecodebinOutputStream * output);
+ static DecodebinOutputStream *create_output_stream (GstDecodebin3 * dbin,
+ GstStreamType type);
+
+ static GstPadProbeReturn slot_unassign_probe (GstPad * pad,
+ GstPadProbeInfo * info, MultiQueueSlot * slot);
+ static gboolean reassign_slot (GstDecodebin3 * dbin, MultiQueueSlot * slot);
+ static MultiQueueSlot *get_slot_for_input (GstDecodebin3 * dbin,
+ DecodebinInputStream * input);
+ static void link_input_to_slot (DecodebinInputStream * input,
+ MultiQueueSlot * slot);
+ static void free_multiqueue_slot (GstDecodebin3 * dbin, MultiQueueSlot * slot);
+ static void free_multiqueue_slot_async (GstDecodebin3 * dbin,
+ MultiQueueSlot * slot);
+
+ static GstStreamCollection *get_merged_collection (GstDecodebin3 * dbin);
+ static void update_requested_selection (GstDecodebin3 * dbin);
+
+ /* FIXME: Really make all the parser stuff a self-contained helper object */
+ #include "gstdecodebin3-parse.c"
+
++#ifdef TIZEN_FEATURE_RESOURCE_MANAGER
++static gboolean gst_decodebin3_request_resource (GstElement * element,
++ GstStreamCollection * collection, GstStream * stream)
++{
++ /* do not consider the resource limit */
++ return TRUE;
++}
++
++static gboolean
++_gst_boolean_accumulator (GSignalInvocationHint * ihint,
++ GValue * return_accu, const GValue * handler_return, gpointer dummy)
++{
++ gboolean myboolean;
++
++ myboolean = g_value_get_boolean (handler_return);
++ if (!(ihint->run_type & G_SIGNAL_RUN_CLEANUP))
++ g_value_set_boolean (return_accu, myboolean);
++
++ return myboolean;
++}
++#endif
++
+ static gboolean
+ _gst_int_accumulator (GSignalInvocationHint * ihint,
+ GValue * return_accu, const GValue * handler_return, gpointer dummy)
+ {
+ gint res = g_value_get_int (handler_return);
+
+ g_value_set_int (return_accu, res);
+
+ if (res == -1)
+ return TRUE;
+
+ return FALSE;
+ }
+
+ static void
+ gst_decodebin3_class_init (GstDecodebin3Class * klass)
+ {
+ GObjectClass *gobject_klass = (GObjectClass *) klass;
+ GstElementClass *element_class = (GstElementClass *) klass;
+ GstBinClass *bin_klass = (GstBinClass *) klass;
+
+ gobject_klass->dispose = gst_decodebin3_dispose;
+ gobject_klass->set_property = gst_decodebin3_set_property;
+ gobject_klass->get_property = gst_decodebin3_get_property;
+
+ /* FIXME : ADD PROPERTIES ! */
+ g_object_class_install_property (gobject_klass, PROP_CAPS,
+ g_param_spec_boxed ("caps", "Caps",
+ "The caps on which to stop decoding. (NULL = default)",
+ GST_TYPE_CAPS, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
- GST_ELEMENT_FACTORY_TYPE_DECODER))
- dbin->decoder_factories = g_list_append (dbin->decoder_factories, fact);
- else
++#ifdef TIZEN_FEATURE_FORCE_SW_DECODER
++ g_object_class_install_property (gobject_klass, PROP_FORCE_SW_DECODERS_FOR_VIDEO,
++ g_param_spec_boolean ("force-sw-decoders-for-video", "Video Software Decoders Only",
++ "Use only sofware decoders for video to process streams",
++ DEFAULT_FORCE_SW_DECODERS,
++ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
++ g_object_class_install_property (gobject_klass, PROP_FORCE_SW_DECODERS_FOR_AUDIO,
++ g_param_spec_boolean ("force-sw-decoders-for-audio", "Audio Software Decoders Only",
++ "Use only sofware decoders for audio to process streams",
++ DEFAULT_FORCE_SW_DECODERS,
++ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
++#else
++ /**
++ * GstDecodeBin::force-sw-decoders:
++ *
++ * While auto-plugging, if set to %TRUE, those decoders within
++ * "Hardware" klass will be ignored. Otherwise they will be tried.
++ *
++ * Since: 1.18
++ */
++ g_object_class_install_property (gobject_klass, PROP_FORCE_SW_DECODERS,
++ g_param_spec_boolean ("force-sw-decoders", "Software Decoders Only",
++ "Use only sofware decoders to process streams",
++ DEFAULT_FORCE_SW_DECODERS,
++ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
++#endif
+ /* FIXME : ADD SIGNALS ! */
+ /**
+ * GstDecodebin3::select-stream
+ * @decodebin: a #GstDecodebin3
+ * @collection: a #GstStreamCollection
+ * @stream: a #GstStream
+ *
+ * This signal is emitted whenever @decodebin needs to decide whether
+ * to expose a @stream of a given @collection.
+ *
+ * Note that the prefered way to select streams is to listen to
+ * GST_MESSAGE_STREAM_COLLECTION on the bus and send a
+ * GST_EVENT_SELECT_STREAMS with the streams the user wants.
+ *
+ * Returns: 1 if the stream should be selected, 0 if it shouldn't be selected.
+ * A value of -1 (default) lets @decodebin decide what to do with the stream.
+ * */
+ gst_decodebin3_signals[SIGNAL_SELECT_STREAM] =
+ g_signal_new ("select-stream", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (GstDecodebin3Class, select_stream),
+ _gst_int_accumulator, NULL, NULL,
+ G_TYPE_INT, 2, GST_TYPE_STREAM_COLLECTION, GST_TYPE_STREAM);
+
+ /**
+ * GstDecodebin3::about-to-finish:
+ *
+ * This signal is emitted when the data for the selected URI is
+ * entirely buffered and it is safe to specify another URI.
+ */
+ gst_decodebin3_signals[SIGNAL_ABOUT_TO_FINISH] =
+ g_signal_new ("about-to-finish", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST, 0, NULL, NULL, NULL, G_TYPE_NONE, 0, G_TYPE_NONE);
+
++#ifdef TIZEN_FEATURE_RESOURCE_MANAGER
++ gst_decodebin3_signals[SIGNAL_REQUEST_RESOURCE] =
++ g_signal_new ("request-resource", G_TYPE_FROM_CLASS (klass),
++ G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (GstDecodebin3Class, request_resource),
++ _gst_boolean_accumulator, NULL, g_cclosure_marshal_generic,
++ G_TYPE_BOOLEAN, 2, GST_TYPE_STREAM_COLLECTION, GST_TYPE_STREAM);
++#endif
+
+ element_class->request_new_pad =
+ GST_DEBUG_FUNCPTR (gst_decodebin3_request_new_pad);
+ element_class->change_state = GST_DEBUG_FUNCPTR (gst_decodebin3_change_state);
+ element_class->send_event = GST_DEBUG_FUNCPTR (gst_decodebin3_send_event);
+
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&sink_template));
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&request_sink_template));
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&video_src_template));
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&audio_src_template));
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&text_src_template));
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&src_template));
+
+ gst_element_class_set_static_metadata (element_class,
+ "Decoder Bin 3", "Generic/Bin/Decoder",
+ "Autoplug and decode to raw media",
+ "Edward Hervey <edward@centricular.com>");
+
+ bin_klass->handle_message = gst_decodebin3_handle_message;
+
+ klass->select_stream = gst_decodebin3_select_stream;
++
++#ifdef TIZEN_FEATURE_RESOURCE_MANAGER
++ klass->request_resource = gst_decodebin3_request_resource;
++#endif
+ }
+
+ static void
+ gst_decodebin3_init (GstDecodebin3 * dbin)
+ {
+ /* Create main input */
+ dbin->main_input = create_new_input (dbin, TRUE);
+
+ dbin->multiqueue = gst_element_factory_make ("multiqueue", NULL);
+ g_object_get (dbin->multiqueue, "min-interleave-time",
+ &dbin->default_mq_min_interleave, NULL);
+ dbin->current_mq_min_interleave = dbin->default_mq_min_interleave;
+ g_object_set (dbin->multiqueue, "sync-by-running-time", TRUE,
+ "max-size-buffers", 0, "use-interleave", TRUE, NULL);
+ gst_bin_add ((GstBin *) dbin, dbin->multiqueue);
+
+ dbin->current_group_id = GST_GROUP_ID_INVALID;
+
+ g_mutex_init (&dbin->factories_lock);
+ g_mutex_init (&dbin->selection_lock);
+ g_mutex_init (&dbin->input_lock);
+
+ dbin->caps = gst_static_caps_get (&default_raw_caps);
++#ifdef TIZEN_FEATURE_FORCE_SW_DECODER
++ dbin->force_sw_decoders_for_video = DEFAULT_FORCE_SW_DECODERS;
++ dbin->force_sw_decoders_for_audio = DEFAULT_FORCE_SW_DECODERS;
++#else
++ dbin->force_sw_decoders = DEFAULT_FORCE_SW_DECODERS;
++#endif
+
+ GST_OBJECT_FLAG_SET (dbin, GST_BIN_FLAG_STREAMS_AWARE);
+ }
+
+ static void
+ gst_decodebin3_dispose (GObject * object)
+ {
+ GstDecodebin3 *dbin = (GstDecodebin3 *) object;
+ GList *walk, *next;
+
+ if (dbin->factories)
+ gst_plugin_feature_list_free (dbin->factories);
+ if (dbin->decoder_factories)
+ g_list_free (dbin->decoder_factories);
+ if (dbin->decodable_factories)
+ g_list_free (dbin->decodable_factories);
+ g_list_free_full (dbin->requested_selection, g_free);
+ g_list_free (dbin->active_selection);
+ g_list_free (dbin->to_activate);
+ g_list_free (dbin->pending_select_streams);
+ g_clear_object (&dbin->collection);
+
+ free_input (dbin, dbin->main_input);
+
+ for (walk = dbin->other_inputs; walk; walk = next) {
+ DecodebinInput *input = walk->data;
+
+ next = g_list_next (walk);
+
+ free_input (dbin, input);
+ dbin->other_inputs = g_list_delete_link (dbin->other_inputs, walk);
+ }
+
+ G_OBJECT_CLASS (parent_class)->dispose (object);
+ }
+
+ static void
+ gst_decodebin3_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+ {
+ GstDecodebin3 *dbin = (GstDecodebin3 *) object;
+
+ /* FIXME : IMPLEMENT */
+ switch (prop_id) {
+ case PROP_CAPS:
+ GST_OBJECT_LOCK (dbin);
+ if (dbin->caps)
+ gst_caps_unref (dbin->caps);
+ dbin->caps = g_value_dup_boxed (value);
+ GST_OBJECT_UNLOCK (dbin);
+ break;
++#ifdef TIZEN_FEATURE_FORCE_SW_DECODER
++ case PROP_FORCE_SW_DECODERS_FOR_VIDEO:
++ dbin->force_sw_decoders_for_video = g_value_get_boolean (value);
++ break;
++ case PROP_FORCE_SW_DECODERS_FOR_AUDIO:
++ dbin->force_sw_decoders_for_audio = g_value_get_boolean (value);
++ break;
++#else
++ case PROP_FORCE_SW_DECODERS:
++ dbin->force_sw_decoders = g_value_get_boolean (value);
++ break;
++#endif
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+ }
+
+ static void
+ gst_decodebin3_get_property (GObject * object, guint prop_id, GValue * value,
+ GParamSpec * pspec)
+ {
+ GstDecodebin3 *dbin = (GstDecodebin3 *) object;
+
+ /* FIXME : IMPLEMENT */
+ switch (prop_id) {
+ case PROP_CAPS:
+ GST_OBJECT_LOCK (dbin);
+ g_value_set_boxed (value, dbin->caps);
+ GST_OBJECT_UNLOCK (dbin);
+ break;
++#ifdef TIZEN_FEATURE_FORCE_SW_DECODER
++ case PROP_FORCE_SW_DECODERS_FOR_VIDEO:
++ g_value_set_boolean (value, dbin->force_sw_decoders_for_video);
++ break;
++ case PROP_FORCE_SW_DECODERS_FOR_AUDIO:
++ g_value_set_boolean (value, dbin->force_sw_decoders_for_audio);
++ break;
++#else
++ case PROP_FORCE_SW_DECODERS:
++ g_value_set_boolean (value, dbin->force_sw_decoders);
++ break;
++#endif
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+ }
+
+ static gboolean
+ parsebin_autoplug_continue_cb (GstElement * parsebin, GstPad * pad,
+ GstCaps * caps, GstDecodebin3 * dbin)
+ {
+ GST_DEBUG_OBJECT (pad, "caps %" GST_PTR_FORMAT, caps);
+
+ /* If it matches our target caps, expose it */
+ if (gst_caps_can_intersect (caps, dbin->caps))
+ return FALSE;
+
+ return TRUE;
+ }
+
+ /* This method should be called whenever a STREAM_START event
+ * comes out of a given parsebin.
+ * The caller shall replace the group_id if the function returns TRUE */
+ static gboolean
+ set_input_group_id (DecodebinInput * input, guint32 * group_id)
+ {
+ GstDecodebin3 *dbin = input->dbin;
+
+ if (input->group_id != *group_id) {
+ if (input->group_id != GST_GROUP_ID_INVALID)
+ GST_WARNING_OBJECT (dbin,
+ "Group id changed (%" G_GUINT32_FORMAT " -> %" G_GUINT32_FORMAT
+ ") on input %p ", input->group_id, *group_id, input);
+ input->group_id = *group_id;
+ }
+
+ if (*group_id != dbin->current_group_id) {
+ if (dbin->current_group_id == GST_GROUP_ID_INVALID) {
+ GST_DEBUG_OBJECT (dbin, "Setting current group id to %" G_GUINT32_FORMAT,
+ *group_id);
+ dbin->current_group_id = *group_id;
+ }
+ *group_id = dbin->current_group_id;
+ return TRUE;
+ }
+
+ return FALSE;
+ }
+
+ static void
+ parsebin_drained_cb (GstElement * parsebin, DecodebinInput * input)
+ {
+ GstDecodebin3 *dbin = input->dbin;
+ gboolean all_drained;
+ GList *tmp;
+
+ GST_INFO_OBJECT (dbin, "input %p drained", input);
+ input->drained = TRUE;
+
+ all_drained = dbin->main_input->drained;
+ for (tmp = dbin->other_inputs; tmp; tmp = tmp->next) {
+ DecodebinInput *data = (DecodebinInput *) tmp->data;
+
+ all_drained &= data->drained;
+ }
+
+ if (all_drained) {
+ GST_INFO_OBJECT (dbin, "All inputs drained. Posting about-to-finish");
+ g_signal_emit (dbin, gst_decodebin3_signals[SIGNAL_ABOUT_TO_FINISH], 0,
+ NULL);
+ }
+ }
+
+ /* Call with INPUT_LOCK taken */
+ static gboolean
+ ensure_input_parsebin (GstDecodebin3 * dbin, DecodebinInput * input)
+ {
+ gboolean set_state = FALSE;
+
+ if (input->parsebin == NULL) {
+ input->parsebin = gst_element_factory_make ("parsebin", NULL);
+ if (input->parsebin == NULL)
+ goto no_parsebin;
+ input->parsebin = gst_object_ref (input->parsebin);
+ input->parsebin_sink = gst_element_get_static_pad (input->parsebin, "sink");
+ input->pad_added_sigid =
+ g_signal_connect (input->parsebin, "pad-added",
+ (GCallback) parsebin_pad_added_cb, input);
+ input->pad_removed_sigid =
+ g_signal_connect (input->parsebin, "pad-removed",
+ (GCallback) parsebin_pad_removed_cb, input);
+ input->drained_sigid =
+ g_signal_connect (input->parsebin, "drained",
+ (GCallback) parsebin_drained_cb, input);
+ g_signal_connect (input->parsebin, "autoplug-continue",
+ (GCallback) parsebin_autoplug_continue_cb, dbin);
+ }
+
+ if (GST_OBJECT_PARENT (GST_OBJECT (input->parsebin)) != GST_OBJECT (dbin)) {
+ /* The state lock is taken so that we ensure we are the one (de)activating
+ * parsebin. We need to do this to ensure any activation taking place in
+ * parsebin (including by elements doing upstream activation) are done
+ * within the same thread. */
+ GST_STATE_LOCK (input->parsebin);
+ gst_bin_add (GST_BIN (dbin), input->parsebin);
+ set_state = TRUE;
+ }
+
+ gst_ghost_pad_set_target (GST_GHOST_PAD (input->ghost_sink),
+ input->parsebin_sink);
+
+ if (set_state) {
+ gst_element_sync_state_with_parent (input->parsebin);
+ GST_STATE_UNLOCK (input->parsebin);
+ }
+
+ return TRUE;
+
+ /* ERRORS */
+ no_parsebin:
+ {
+ gst_element_post_message ((GstElement *) dbin,
+ gst_missing_element_message_new ((GstElement *) dbin, "parsebin"));
+ return FALSE;
+ }
+ }
+
+ static GstPadLinkReturn
+ gst_decodebin3_input_pad_link (GstPad * pad, GstObject * parent, GstPad * peer)
+ {
+ GstDecodebin3 *dbin = (GstDecodebin3 *) parent;
+ GstPadLinkReturn res = GST_PAD_LINK_OK;
+ DecodebinInput *input;
+
+ GST_LOG_OBJECT (parent, "Got link on input pad %" GST_PTR_FORMAT
+ ". Creating parsebin if needed", pad);
+
+ if ((input = g_object_get_data (G_OBJECT (pad), "decodebin.input")) == NULL)
+ goto fail;
+
+ INPUT_LOCK (dbin);
+ if (!ensure_input_parsebin (dbin, input))
+ res = GST_PAD_LINK_REFUSED;
+ INPUT_UNLOCK (dbin);
+
+ return res;
+ fail:
+ GST_ERROR_OBJECT (parent, "Failed to retrieve input state from ghost pad");
+ return GST_PAD_LINK_REFUSED;
+ }
+
+ /* Drop duration query during _input_pad_unlink */
+ static GstPadProbeReturn
+ query_duration_drop_probe (GstPad * pad, GstPadProbeInfo * info,
+ DecodebinInput * input)
+ {
+ GstPadProbeReturn ret = GST_PAD_PROBE_OK;
+
+ if (GST_IS_QUERY (GST_PAD_PROBE_INFO_DATA (info))) {
+ GstQuery *query = GST_PAD_PROBE_INFO_QUERY (info);
+ if (GST_QUERY_TYPE (query) == GST_QUERY_DURATION) {
+ GST_LOG_OBJECT (pad, "stop forwarding query duration");
+ ret = GST_PAD_PROBE_HANDLED;
+ }
+ }
+
+ return ret;
+ }
+
+ static void
+ gst_decodebin3_input_pad_unlink (GstPad * pad, GstObject * parent)
+ {
+ GstDecodebin3 *dbin = (GstDecodebin3 *) parent;
+ DecodebinInput *input;
+
+ GST_LOG_OBJECT (parent, "Got unlink on input pad %" GST_PTR_FORMAT
+ ". Removing parsebin.", pad);
+
+ if ((input = g_object_get_data (G_OBJECT (pad), "decodebin.input")) == NULL)
+ goto fail;
+
+ INPUT_LOCK (dbin);
+ if (input->parsebin == NULL) {
+ INPUT_UNLOCK (dbin);
+ return;
+ }
+
+ if (GST_OBJECT_PARENT (GST_OBJECT (input->parsebin)) == GST_OBJECT (dbin)) {
+ GstStreamCollection *collection = NULL;
+ gulong probe_id = gst_pad_add_probe (input->parsebin_sink,
+ GST_PAD_PROBE_TYPE_QUERY_UPSTREAM,
+ (GstPadProbeCallback) query_duration_drop_probe, input, NULL);
+
+ /* Clear stream-collection corresponding to current INPUT and post new
+ * stream-collection message, if needed */
+ if (input->collection) {
+ gst_object_unref (input->collection);
+ input->collection = NULL;
+ }
+
+ SELECTION_LOCK (dbin);
+ collection = get_merged_collection (dbin);
+ if (collection && collection != dbin->collection) {
+ GstMessage *msg;
+ GST_DEBUG_OBJECT (dbin, "Update Stream Collection");
+
+ if (dbin->collection)
+ gst_object_unref (dbin->collection);
+ dbin->collection = collection;
+ dbin->select_streams_seqnum = GST_SEQNUM_INVALID;
+
+ msg =
+ gst_message_new_stream_collection ((GstObject *) dbin,
+ dbin->collection);
+
+ SELECTION_UNLOCK (dbin);
+ gst_element_post_message (GST_ELEMENT_CAST (dbin), msg);
+ update_requested_selection (dbin);
+ } else
+ SELECTION_UNLOCK (dbin);
+
+ gst_bin_remove (GST_BIN (dbin), input->parsebin);
+ gst_element_set_state (input->parsebin, GST_STATE_NULL);
+ g_signal_handler_disconnect (input->parsebin, input->pad_removed_sigid);
+ g_signal_handler_disconnect (input->parsebin, input->pad_added_sigid);
+ g_signal_handler_disconnect (input->parsebin, input->drained_sigid);
+ gst_pad_remove_probe (input->parsebin_sink, probe_id);
+ gst_object_unref (input->parsebin);
+ gst_object_unref (input->parsebin_sink);
+
+ input->parsebin = NULL;
+ input->parsebin_sink = NULL;
+
+ if (!input->is_main) {
+ dbin->other_inputs = g_list_remove (dbin->other_inputs, input);
+ free_input_async (dbin, input);
+ }
+ }
+ INPUT_UNLOCK (dbin);
+ return;
+
+ fail:
+ GST_ERROR_OBJECT (parent, "Failed to retrieve input state from ghost pad");
+ return;
+ }
+
+ static void
+ free_input (GstDecodebin3 * dbin, DecodebinInput * input)
+ {
+ GST_DEBUG ("Freeing input %p", input);
+ gst_ghost_pad_set_target (GST_GHOST_PAD (input->ghost_sink), NULL);
+ gst_element_remove_pad (GST_ELEMENT (dbin), input->ghost_sink);
+ if (input->parsebin) {
+ g_signal_handler_disconnect (input->parsebin, input->pad_removed_sigid);
+ g_signal_handler_disconnect (input->parsebin, input->pad_added_sigid);
+ g_signal_handler_disconnect (input->parsebin, input->drained_sigid);
+ gst_element_set_state (input->parsebin, GST_STATE_NULL);
+ gst_object_unref (input->parsebin);
+ gst_object_unref (input->parsebin_sink);
+ }
+ if (input->collection)
+ gst_object_unref (input->collection);
+ g_free (input);
+ }
+
+ static void
+ free_input_async (GstDecodebin3 * dbin, DecodebinInput * input)
+ {
+ GST_LOG_OBJECT (dbin, "pushing input %p on thread pool to free", input);
+ gst_element_call_async (GST_ELEMENT_CAST (dbin),
+ (GstElementCallAsyncFunc) free_input, input, NULL);
+ }
+
+ /* Call with INPUT_LOCK taken */
+ static DecodebinInput *
+ create_new_input (GstDecodebin3 * dbin, gboolean main)
+ {
+ DecodebinInput *input;
+
+ input = g_new0 (DecodebinInput, 1);
+ input->dbin = dbin;
+ input->is_main = main;
+ input->group_id = GST_GROUP_ID_INVALID;
+ if (main)
+ input->ghost_sink = gst_ghost_pad_new_no_target ("sink", GST_PAD_SINK);
+ else {
+ gchar *pad_name = g_strdup_printf ("sink_%u", dbin->input_counter++);
+ input->ghost_sink = gst_ghost_pad_new_no_target (pad_name, GST_PAD_SINK);
+ g_free (pad_name);
+ }
+ g_object_set_data (G_OBJECT (input->ghost_sink), "decodebin.input", input);
+ gst_pad_set_link_function (input->ghost_sink, gst_decodebin3_input_pad_link);
+ gst_pad_set_unlink_function (input->ghost_sink,
+ gst_decodebin3_input_pad_unlink);
+
+ gst_pad_set_active (input->ghost_sink, TRUE);
+ gst_element_add_pad ((GstElement *) dbin, input->ghost_sink);
+
+ return input;
+
+ }
+
+ static GstPad *
+ gst_decodebin3_request_new_pad (GstElement * element, GstPadTemplate * temp,
+ const gchar * name, const GstCaps * caps)
+ {
+ GstDecodebin3 *dbin = (GstDecodebin3 *) element;
+ DecodebinInput *input;
+ GstPad *res = NULL;
+
+ /* We are ignoring names for the time being, not sure it makes any sense
+ * within the context of decodebin3 ... */
+ input = create_new_input (dbin, FALSE);
+ if (input) {
+ INPUT_LOCK (dbin);
+ dbin->other_inputs = g_list_append (dbin->other_inputs, input);
+ res = input->ghost_sink;
+ INPUT_UNLOCK (dbin);
+ }
+
+ return res;
+ }
+
+ /* Must be called with factories lock! */
+ static void
+ gst_decode_bin_update_factories_list (GstDecodebin3 * dbin)
+ {
+ guint cookie;
+
+ cookie = gst_registry_get_feature_list_cookie (gst_registry_get ());
+ if (!dbin->factories || dbin->factories_cookie != cookie) {
+ GList *tmp;
+ if (dbin->factories)
+ gst_plugin_feature_list_free (dbin->factories);
+ if (dbin->decoder_factories)
+ g_list_free (dbin->decoder_factories);
+ if (dbin->decodable_factories)
+ g_list_free (dbin->decodable_factories);
+ dbin->factories =
+ gst_element_factory_list_get_elements
+ (GST_ELEMENT_FACTORY_TYPE_DECODABLE, GST_RANK_MARGINAL);
+ dbin->factories =
+ g_list_sort (dbin->factories, gst_plugin_feature_rank_compare_func);
+ dbin->factories_cookie = cookie;
+
+ /* Filter decoder and other decodables */
+ dbin->decoder_factories = NULL;
+ dbin->decodable_factories = NULL;
+ for (tmp = dbin->factories; tmp; tmp = tmp->next) {
+ GstElementFactory *fact = (GstElementFactory *) tmp->data;
+ if (gst_element_factory_list_is_type (fact,
++ GST_ELEMENT_FACTORY_TYPE_DECODER)) {
++#ifdef TIZEN_FEATURE_FORCE_SW_DECODER
++ if (!(dbin->force_sw_decoders_for_video &&
++ gst_element_factory_list_is_type (fact, GST_ELEMENT_FACTORY_TYPE_MEDIA_VIDEO) &&
++ gst_element_factory_list_is_type (fact, GST_ELEMENT_FACTORY_TYPE_HARDWARE)) &&
++ !(dbin->force_sw_decoders_for_audio &&
++ gst_element_factory_list_is_type (fact, GST_ELEMENT_FACTORY_TYPE_MEDIA_AUDIO) &&
++ gst_element_factory_list_is_type (fact, GST_ELEMENT_FACTORY_TYPE_HARDWARE))) {
++ dbin->decoder_factories =
++ g_list_append (dbin->decoder_factories, fact);
++ } else {
++ GST_WARNING("%s is skipped", GST_OBJECT_NAME(fact));
++ }
++#else
++ if (!(dbin->force_sw_decoders
++ && gst_element_factory_list_is_type (fact,
++ GST_ELEMENT_FACTORY_TYPE_HARDWARE))) {
++ dbin->decoder_factories =
++ g_list_append (dbin->decoder_factories, fact);
++ }
++#endif
++ } else {
+ dbin->decodable_factories =
+ g_list_append (dbin->decodable_factories, fact);
++ }
+ }
+ }
+ }
+
+ /* Must be called with appropriate lock if list is a protected variable */
+ static const gchar *
+ stream_in_list (GList * list, const gchar * sid)
+ {
+ GList *tmp;
+
+ #if EXTRA_DEBUG
+ for (tmp = list; tmp; tmp = tmp->next) {
+ gchar *osid = (gchar *) tmp->data;
+ GST_DEBUG ("Checking %s against %s", sid, osid);
+ }
+ #endif
+
+ for (tmp = list; tmp; tmp = tmp->next) {
+ const gchar *osid = (gchar *) tmp->data;
+ if (!g_strcmp0 (sid, osid))
+ return osid;
+ }
+
+ return NULL;
+ }
+
+ static void
+ update_requested_selection (GstDecodebin3 * dbin)
+ {
+ guint i, nb;
+ GList *tmp = NULL;
+ gboolean all_user_selected = TRUE;
+ GstStreamType used_types = 0;
+ GstStreamCollection *collection;
+
+ /* 1. Is there a pending SELECT_STREAMS we can return straight away since
+ * the switch handler will take care of the pending selection */
+ SELECTION_LOCK (dbin);
+ if (dbin->pending_select_streams) {
+ GST_DEBUG_OBJECT (dbin,
+ "No need to create pending selection, SELECT_STREAMS underway");
+ goto beach;
+ }
+
+ collection = dbin->collection;
+ if (G_UNLIKELY (collection == NULL)) {
+ GST_DEBUG_OBJECT (dbin, "No current GstStreamCollection");
+ goto beach;
+ }
+ nb = gst_stream_collection_get_size (collection);
+
+ /* 2. If not, are we in EXPOSE_ALL_MODE ? If so, match everything */
+ GST_FIXME_OBJECT (dbin, "Implement EXPOSE_ALL_MODE");
+
+ /* 3. If not, check if we already have some of the streams in the
+ * existing active/requested selection */
+ for (i = 0; i < nb; i++) {
+ GstStream *stream = gst_stream_collection_get_stream (collection, i);
+ const gchar *sid = gst_stream_get_stream_id (stream);
+ gint request = -1;
+ /* Fire select-stream signal to see if outside components want to
+ * hint at which streams should be selected */
+ g_signal_emit (G_OBJECT (dbin),
+ gst_decodebin3_signals[SIGNAL_SELECT_STREAM], 0, collection, stream,
+ &request);
+ GST_DEBUG_OBJECT (dbin, "stream %s , request:%d", sid, request);
+
+ if (request == -1)
+ all_user_selected = FALSE;
+ if (request == 1 || (request == -1
+ && (stream_in_list (dbin->requested_selection, sid)
+ || stream_in_list (dbin->active_selection, sid)))) {
+ GstStreamType curtype = gst_stream_get_stream_type (stream);
+ if (request == 1)
+ GST_DEBUG_OBJECT (dbin,
+ "Using stream requested by 'select-stream' signal : %s", sid);
+ else
+ GST_DEBUG_OBJECT (dbin,
+ "Re-using stream already present in requested or active selection : %s",
+ sid);
+ tmp = g_list_append (tmp, (gchar *) sid);
+ used_types |= curtype;
+ }
+ }
+
+ /* 4. If the user didn't explicitly selected all streams, match one stream of each type */
+ if (!all_user_selected && dbin->select_streams_seqnum == GST_SEQNUM_INVALID) {
+ for (i = 0; i < nb; i++) {
+ GstStream *stream = gst_stream_collection_get_stream (collection, i);
+ GstStreamType curtype = gst_stream_get_stream_type (stream);
+ if (!(used_types & curtype)) {
+ const gchar *sid = gst_stream_get_stream_id (stream);
+ GST_DEBUG_OBJECT (dbin,
+ "Automatically selecting stream '%s' of type %s", sid,
+ gst_stream_type_get_name (curtype));
+ tmp = g_list_append (tmp, (gchar *) sid);
+ used_types |= curtype;
+ }
+ }
+ }
+
+ beach:
+ /* Finally set the requested selection */
+ if (tmp) {
+ if (dbin->requested_selection) {
+ GST_FIXME_OBJECT (dbin,
+ "Replacing non-NULL requested_selection, what should we do ??");
+ g_list_free_full (dbin->requested_selection, g_free);
+ }
+ dbin->requested_selection =
+ g_list_copy_deep (tmp, (GCopyFunc) g_strdup, NULL);
+ dbin->selection_updated = TRUE;
+ g_list_free (tmp);
+ }
+ SELECTION_UNLOCK (dbin);
+ }
+
+ /* sort_streams:
+ * GCompareFunc to use with lists of GstStream.
+ * Sorts GstStreams by stream type and SELECT flag and stream-id
+ * First video, then audio, then others.
+ *
+ * Return: negative if a<b, 0 if a==b, positive if a>b
+ */
+ static gint
+ sort_streams (GstStream * sa, GstStream * sb)
+ {
+ GstStreamType typea, typeb;
+ GstStreamFlags flaga, flagb;
+ const gchar *ida, *idb;
+ gint ret = 0;
+
+ typea = gst_stream_get_stream_type (sa);
+ typeb = gst_stream_get_stream_type (sb);
+
+ GST_LOG ("sa(%s), sb(%s)", gst_stream_get_stream_id (sa),
+ gst_stream_get_stream_id (sb));
+
+ /* Sort by stream type. First video, then audio, then others(text, container, unknown) */
+ if (typea != typeb) {
+ if (typea & GST_STREAM_TYPE_VIDEO)
+ ret = -1;
+ else if (typea & GST_STREAM_TYPE_AUDIO)
+ ret = (!(typeb & GST_STREAM_TYPE_VIDEO)) ? -1 : 1;
+ else if (typea & GST_STREAM_TYPE_TEXT)
+ ret = (!(typeb & GST_STREAM_TYPE_VIDEO)
+ && !(typeb & GST_STREAM_TYPE_AUDIO)) ? -1 : 1;
+ else if (typea & GST_STREAM_TYPE_CONTAINER)
+ ret = (typeb & GST_STREAM_TYPE_UNKNOWN) ? -1 : 1;
+ else
+ ret = 1;
+
+ if (ret != 0) {
+ GST_LOG ("Sort by stream-type: %d", ret);
+ return ret;
+ }
+ }
+
+ /* Sort by SELECT flag, if stream type is same. */
+ flaga = gst_stream_get_stream_flags (sa);
+ flagb = gst_stream_get_stream_flags (sb);
+
+ ret =
+ (flaga & GST_STREAM_FLAG_SELECT) ? ((flagb & GST_STREAM_FLAG_SELECT) ? 0 :
+ -1) : ((flagb & GST_STREAM_FLAG_SELECT) ? 1 : 0);
+
+ if (ret != 0) {
+ GST_LOG ("Sort by SELECT flag: %d", ret);
+ return ret;
+ }
+
+ /* Sort by stream-id, if otherwise the same. */
+ ida = gst_stream_get_stream_id (sa);
+ idb = gst_stream_get_stream_id (sb);
+ ret = g_strcmp0 (ida, idb);
+
+ GST_LOG ("Sort by stream-id: %d", ret);
+
+ return ret;
+ }
+
+ /* Call with INPUT_LOCK taken */
+ static GstStreamCollection *
+ get_merged_collection (GstDecodebin3 * dbin)
+ {
+ gboolean needs_merge = FALSE;
+ GstStreamCollection *res = NULL;
+ GList *tmp;
+ GList *unsorted_streams = NULL;
+ guint i, nb_stream;
+
+ /* First check if we need to do a merge or just return the only collection */
+ res = dbin->main_input->collection;
+
+ for (tmp = dbin->other_inputs; tmp; tmp = tmp->next) {
+ DecodebinInput *input = (DecodebinInput *) tmp->data;
+ if (input->collection) {
+ if (res) {
+ needs_merge = TRUE;
+ break;
+ }
+ res = input->collection;
+ }
+ }
+
+ if (!needs_merge) {
+ GST_DEBUG_OBJECT (dbin, "No need to merge, returning %p", res);
+ return res ? gst_object_ref (res) : NULL;
+ }
+
+ /* We really need to create a new collection */
+ /* FIXME : Some numbering scheme maybe ?? */
+ res = gst_stream_collection_new ("decodebin3");
+ if (dbin->main_input->collection) {
+ nb_stream = gst_stream_collection_get_size (dbin->main_input->collection);
+ GST_DEBUG_OBJECT (dbin, "main input %p %d", dbin->main_input, nb_stream);
+ for (i = 0; i < nb_stream; i++) {
+ GstStream *stream =
+ gst_stream_collection_get_stream (dbin->main_input->collection, i);
+ unsorted_streams = g_list_append (unsorted_streams, stream);
+ }
+ }
+
+ for (tmp = dbin->other_inputs; tmp; tmp = tmp->next) {
+ DecodebinInput *input = (DecodebinInput *) tmp->data;
+ GST_DEBUG_OBJECT (dbin, "input %p , collection %p", input,
+ input->collection);
+ if (input->collection) {
+ nb_stream = gst_stream_collection_get_size (input->collection);
+ GST_DEBUG_OBJECT (dbin, "nb_stream : %d", nb_stream);
+ for (i = 0; i < nb_stream; i++) {
+ GstStream *stream =
+ gst_stream_collection_get_stream (input->collection, i);
+ /* Only add if not already present in the list */
+ if (!g_list_find (unsorted_streams, stream))
+ unsorted_streams = g_list_append (unsorted_streams, stream);
+ }
+ }
+ }
+
+ /* re-order streams : video, then audio, then others */
+ unsorted_streams =
+ g_list_sort (unsorted_streams, (GCompareFunc) sort_streams);
+ for (tmp = unsorted_streams; tmp; tmp = tmp->next) {
+ GstStream *stream = (GstStream *) tmp->data;
+ GST_DEBUG_OBJECT (dbin, "Adding #stream(%s) to collection",
+ gst_stream_get_stream_id (stream));
+ gst_stream_collection_add_stream (res, gst_object_ref (stream));
+ }
+
+ if (unsorted_streams)
+ g_list_free (unsorted_streams);
+
+ return res;
+ }
+
+ /* Call with INPUT_LOCK taken */
+ static DecodebinInput *
+ find_message_parsebin (GstDecodebin3 * dbin, GstElement * child)
+ {
+ DecodebinInput *input = NULL;
+ GstElement *parent = gst_object_ref (child);
+ GList *tmp;
+
+ do {
+ GstElement *next_parent;
+
+ GST_DEBUG_OBJECT (dbin, "parent %s",
+ parent ? GST_ELEMENT_NAME (parent) : "<NONE>");
+
+ if (parent == dbin->main_input->parsebin) {
+ input = dbin->main_input;
+ break;
+ }
+ for (tmp = dbin->other_inputs; tmp; tmp = tmp->next) {
+ DecodebinInput *cur = (DecodebinInput *) tmp->data;
+ if (parent == cur->parsebin) {
+ input = cur;
+ break;
+ }
+ }
+ next_parent = (GstElement *) gst_element_get_parent (parent);
+ gst_object_unref (parent);
+ parent = next_parent;
+
+ } while (parent && parent != (GstElement *) dbin);
+
+ if (parent)
+ gst_object_unref (parent);
+
+ return input;
+ }
+
+ static const gchar *
+ stream_in_collection (GstDecodebin3 * dbin, gchar * sid)
+ {
+ guint i, len;
+
+ if (dbin->collection == NULL)
+ return NULL;
+ len = gst_stream_collection_get_size (dbin->collection);
+ for (i = 0; i < len; i++) {
+ GstStream *stream = gst_stream_collection_get_stream (dbin->collection, i);
+ const gchar *osid = gst_stream_get_stream_id (stream);
+ if (!g_strcmp0 (sid, osid))
+ return osid;
+ }
+
+ return NULL;
+ }
+
+ /* Call with INPUT_LOCK taken */
+ static void
+ handle_stream_collection (GstDecodebin3 * dbin,
+ GstStreamCollection * collection, GstElement * child)
+ {
+ #ifndef GST_DISABLE_GST_DEBUG
+ const gchar *upstream_id;
+ guint i;
+ #endif
+ DecodebinInput *input = find_message_parsebin (dbin, child);
+
+ if (!input) {
+ GST_DEBUG_OBJECT (dbin,
+ "Couldn't find corresponding input, most likely shutting down");
+ return;
+ }
+
+ /* Replace collection in input */
+ if (input->collection)
+ gst_object_unref (input->collection);
+ input->collection = gst_object_ref (collection);
+ GST_DEBUG_OBJECT (dbin, "Setting collection %p on input %p", collection,
+ input);
+
+ /* Merge collection if needed */
+ collection = get_merged_collection (dbin);
+
+ #ifndef GST_DISABLE_GST_DEBUG
+ /* Just some debugging */
+ upstream_id = gst_stream_collection_get_upstream_id (collection);
+ GST_DEBUG ("Received Stream Collection. Upstream_id : %s", upstream_id);
+ GST_DEBUG ("From input %p", input);
+ GST_DEBUG (" %d streams", gst_stream_collection_get_size (collection));
+ for (i = 0; i < gst_stream_collection_get_size (collection); i++) {
+ GstStream *stream = gst_stream_collection_get_stream (collection, i);
+ GstTagList *taglist;
+ GstCaps *caps;
+
+ GST_DEBUG (" Stream '%s'", gst_stream_get_stream_id (stream));
+ GST_DEBUG (" type : %s",
+ gst_stream_type_get_name (gst_stream_get_stream_type (stream)));
+ GST_DEBUG (" flags : 0x%x", gst_stream_get_stream_flags (stream));
+ taglist = gst_stream_get_tags (stream);
+ GST_DEBUG (" tags : %" GST_PTR_FORMAT, taglist);
+ caps = gst_stream_get_caps (stream);
+ GST_DEBUG (" caps : %" GST_PTR_FORMAT, caps);
+ if (taglist)
+ gst_tag_list_unref (taglist);
+ if (caps)
+ gst_caps_unref (caps);
+ }
+ #endif
+
+ /* Store collection for later usage */
+ SELECTION_LOCK (dbin);
+ if (dbin->collection == NULL) {
+ dbin->collection = collection;
+ } else {
+ /* We need to check who emitted this collection (the owner).
+ * If we already had a collection from that user, this one is an update,
+ * that is to say that we need to figure out how we are going to re-use
+ * the streams/slot */
+ GST_FIXME_OBJECT (dbin, "New collection but already had one ...");
+ /* FIXME : When do we switch from pending collection to active collection ?
+ * When all streams from active collection are drained in multiqueue output ? */
+ gst_object_unref (dbin->collection);
+ dbin->collection = collection;
+ /* dbin->pending_collection = */
+ /* g_list_append (dbin->pending_collection, collection); */
+ }
+ dbin->select_streams_seqnum = GST_SEQNUM_INVALID;
+ SELECTION_UNLOCK (dbin);
+ }
+
+ /* Must be called with the selection lock taken */
+ static void
+ gst_decodebin3_update_min_interleave (GstDecodebin3 * dbin)
+ {
+ GstClockTime max_latency = GST_CLOCK_TIME_NONE;
+ GList *tmp;
+
+ GST_DEBUG_OBJECT (dbin, "Recalculating max latency of decoders");
+ for (tmp = dbin->output_streams; tmp; tmp = tmp->next) {
+ DecodebinOutputStream *out = (DecodebinOutputStream *) tmp->data;
+ if (GST_CLOCK_TIME_IS_VALID (out->decoder_latency)) {
+ if (max_latency == GST_CLOCK_TIME_NONE
+ || out->decoder_latency > max_latency)
+ max_latency = out->decoder_latency;
+ }
+ }
+ GST_DEBUG_OBJECT (dbin, "max latency of all decoders: %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (max_latency));
+
+ if (!GST_CLOCK_TIME_IS_VALID (max_latency))
+ return;
+
+ /* Make sure we keep an extra overhead */
+ max_latency += 100 * GST_MSECOND;
+ if (max_latency == dbin->current_mq_min_interleave)
+ return;
+
+ dbin->current_mq_min_interleave = max_latency;
+ GST_DEBUG_OBJECT (dbin, "Setting mq min-interleave to %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (dbin->current_mq_min_interleave));
+ g_object_set (dbin->multiqueue, "min-interleave-time",
+ dbin->current_mq_min_interleave, NULL);
+ }
+
+ static void
+ gst_decodebin3_handle_message (GstBin * bin, GstMessage * message)
+ {
+ GstDecodebin3 *dbin = (GstDecodebin3 *) bin;
+ gboolean posting_collection = FALSE;
+
+ GST_DEBUG_OBJECT (bin, "Got Message %s", GST_MESSAGE_TYPE_NAME (message));
+
+ switch (GST_MESSAGE_TYPE (message)) {
+ case GST_MESSAGE_STREAM_COLLECTION:
+ {
+ GstStreamCollection *collection = NULL;
+ gst_message_parse_stream_collection (message, &collection);
+ if (collection) {
+ INPUT_LOCK (dbin);
+ handle_stream_collection (dbin, collection,
+ (GstElement *) GST_MESSAGE_SRC (message));
+ posting_collection = TRUE;
+ INPUT_UNLOCK (dbin);
+ }
+
+ SELECTION_LOCK (dbin);
+ if (dbin->collection) {
+ /* Replace collection message, we most likely aggregated it */
+ GstMessage *new_msg;
+ new_msg =
+ gst_message_new_stream_collection ((GstObject *) dbin,
+ dbin->collection);
+ gst_message_unref (message);
+ message = new_msg;
+ }
+ SELECTION_UNLOCK (dbin);
+
+ if (collection)
+ gst_object_unref (collection);
+ break;
+ }
+ case GST_MESSAGE_LATENCY:
+ {
+ GList *tmp;
+ /* Check if this is from one of our decoders */
+ SELECTION_LOCK (dbin);
+ for (tmp = dbin->output_streams; tmp; tmp = tmp->next) {
+ DecodebinOutputStream *out = (DecodebinOutputStream *) tmp->data;
+ if (out->decoder == (GstElement *) GST_MESSAGE_SRC (message)) {
+ GstClockTime min, max;
+ if (GST_IS_VIDEO_DECODER (out->decoder)) {
+ gst_video_decoder_get_latency (GST_VIDEO_DECODER (out->decoder),
+ &min, &max);
+ GST_DEBUG_OBJECT (dbin,
+ "Got latency update from one of our decoders. min: %"
+ GST_TIME_FORMAT " max: %" GST_TIME_FORMAT, GST_TIME_ARGS (min),
+ GST_TIME_ARGS (max));
+ out->decoder_latency = min;
+ /* Trigger recalculation */
+ gst_decodebin3_update_min_interleave (dbin);
+ }
+ break;
+ }
+ }
+ SELECTION_UNLOCK (dbin);
+ }
+ default:
+ break;
+ }
+
+ GST_BIN_CLASS (parent_class)->handle_message (bin, message);
+
+ if (posting_collection) {
+ /* Figure out a selection for that collection */
+ update_requested_selection (dbin);
+ }
+ }
+
+ static DecodebinOutputStream *
+ find_free_compatible_output (GstDecodebin3 * dbin, GstStream * stream)
+ {
+ GList *tmp;
+ GstStreamType stype = gst_stream_get_stream_type (stream);
+
+ for (tmp = dbin->output_streams; tmp; tmp = tmp->next) {
+ DecodebinOutputStream *output = (DecodebinOutputStream *) tmp->data;
+ if (output->type == stype && output->slot && output->slot->active_stream) {
+ GstStream *tstream = output->slot->active_stream;
+ if (!stream_in_list (dbin->requested_selection,
+ (gchar *) gst_stream_get_stream_id (tstream))) {
+ return output;
+ }
+ }
+ }
+
+ return NULL;
+ }
+
+ /* Give a certain slot, figure out if it should be linked to an
+ * output stream
+ * CALL WITH SELECTION LOCK TAKEN !*/
+ static DecodebinOutputStream *
+ get_output_for_slot (MultiQueueSlot * slot)
+ {
+ GstDecodebin3 *dbin = slot->dbin;
+ DecodebinOutputStream *output = NULL;
+ const gchar *stream_id;
+ GstCaps *caps;
+ gchar *id_in_list = NULL;
+
+ /* If we already have a configured output, just use it */
+ if (slot->output != NULL)
+ return slot->output;
+
+ /*
+ * FIXME
+ *
+ * This method needs to be split into multiple parts
+ *
+ * 1) Figure out whether stream should be exposed or not
+ * This is based on autoplug-continue, EXPOSE_ALL_MODE, or presence
+ * in the default stream attribution
+ *
+ * 2) Figure out whether an output stream should be created, whether
+ * we can re-use the output stream already linked to the slot, or
+ * whether we need to get re-assigned another (currently used) output
+ * stream.
+ */
+
+ stream_id = gst_stream_get_stream_id (slot->active_stream);
+ caps = gst_stream_get_caps (slot->active_stream);
+ GST_DEBUG_OBJECT (dbin, "stream %s , %" GST_PTR_FORMAT, stream_id, caps);
+ gst_caps_unref (caps);
+
+ /* 0. Emit autoplug-continue signal for pending caps ? */
+ GST_FIXME_OBJECT (dbin, "emit autoplug-continue");
+
+ /* 1. if in EXPOSE_ALL_MODE, just accept */
+ GST_FIXME_OBJECT (dbin, "Handle EXPOSE_ALL_MODE");
+
+ #if 0
+ /* FIXME : The idea around this was to avoid activating a stream for
+ * which we have no decoder. Unfortunately it is way too
+ * expensive. Need to figure out a better solution */
+ /* 2. Is there a potential decoder (if one is required) */
+ if (!gst_caps_can_intersect (caps, dbin->caps)
+ && !have_factory (dbin, (GstCaps *) caps,
+ GST_ELEMENT_FACTORY_TYPE_DECODER)) {
+ GST_WARNING_OBJECT (dbin, "Don't have a decoder for %" GST_PTR_FORMAT,
+ caps);
+ SELECTION_UNLOCK (dbin);
+ gst_element_post_message (GST_ELEMENT_CAST (dbin),
+ gst_missing_decoder_message_new (GST_ELEMENT_CAST (dbin), caps));
+ SELECTION_LOCK (dbin);
+ return NULL;
+ }
+ #endif
+
+ /* 3. In default mode check if we should expose */
+ id_in_list = (gchar *) stream_in_list (dbin->requested_selection, stream_id);
+ if (id_in_list) {
+ /* Check if we can steal an existing output stream we could re-use.
+ * that is:
+ * * an output stream whose slot->stream is not in requested
+ * * and is of the same type as this stream
+ */
+ output = find_free_compatible_output (dbin, slot->active_stream);
+ if (output) {
+ /* Move this output from its current slot to this slot */
+ dbin->to_activate =
+ g_list_append (dbin->to_activate, (gchar *) stream_id);
+ dbin->requested_selection =
+ g_list_remove (dbin->requested_selection, id_in_list);
+ g_free (id_in_list);
+ SELECTION_UNLOCK (dbin);
+ gst_pad_add_probe (output->slot->src_pad, GST_PAD_PROBE_TYPE_IDLE,
+ (GstPadProbeCallback) slot_unassign_probe, output->slot, NULL);
+ SELECTION_LOCK (dbin);
+ return NULL;
+ }
+
+ output = create_output_stream (dbin, slot->type);
+ output->slot = slot;
+ GST_DEBUG ("Linking slot %p to new output %p", slot, output);
+ slot->output = output;
+ dbin->active_selection =
+ g_list_append (dbin->active_selection, (gchar *) stream_id);
+ } else
+ GST_DEBUG ("Not creating any output for slot %p", slot);
+
+ return output;
+ }
+
+ /* Returns SELECTED_STREAMS message if active_selection is equal to
+ * requested_selection, else NULL.
+ * Must be called with LOCK taken */
+ static GstMessage *
+ is_selection_done (GstDecodebin3 * dbin)
+ {
+ GList *tmp;
+ GstMessage *msg;
+
+ if (!dbin->selection_updated)
+ return NULL;
+
+ GST_LOG_OBJECT (dbin, "Checking");
+
+ if (dbin->to_activate != NULL) {
+ GST_DEBUG ("Still have streams to activate");
+ return NULL;
+ }
+ for (tmp = dbin->requested_selection; tmp; tmp = tmp->next) {
+ GST_DEBUG ("Checking requested stream %s", (gchar *) tmp->data);
+ if (!stream_in_list (dbin->active_selection, (gchar *) tmp->data)) {
+ GST_DEBUG ("Not in active selection, returning");
+ return NULL;
+ }
+ }
+
+ GST_DEBUG_OBJECT (dbin, "Selection active, creating message");
+
+ /* We are completely active */
+ msg = gst_message_new_streams_selected ((GstObject *) dbin, dbin->collection);
+ if (dbin->select_streams_seqnum != GST_SEQNUM_INVALID) {
+ gst_message_set_seqnum (msg, dbin->select_streams_seqnum);
+ }
+ for (tmp = dbin->output_streams; tmp; tmp = tmp->next) {
+ DecodebinOutputStream *output = (DecodebinOutputStream *) tmp->data;
+ if (output->slot) {
+ GST_DEBUG_OBJECT (dbin, "Adding stream %s",
+ gst_stream_get_stream_id (output->slot->active_stream));
+
+ gst_message_streams_selected_add (msg, output->slot->active_stream);
+ } else
+ GST_WARNING_OBJECT (dbin, "No valid slot for output %p", output);
+ }
+ dbin->selection_updated = FALSE;
+ return msg;
+ }
+
+ /* Must be called with SELECTION_LOCK taken */
+ static void
+ check_all_slot_for_eos (GstDecodebin3 * dbin)
+ {
+ gboolean all_drained = TRUE;
+ GList *iter;
+
+ GST_DEBUG_OBJECT (dbin, "check slot for eos");
+
+ for (iter = dbin->slots; iter; iter = iter->next) {
+ MultiQueueSlot *slot = iter->data;
+
+ if (!slot->output)
+ continue;
+
+ if (slot->is_drained) {
+ GST_LOG_OBJECT (slot->sink_pad, "slot %p is drained", slot);
+ continue;
+ }
+
+ all_drained = FALSE;
+ break;
+ }
+
+ if (all_drained) {
+ INPUT_LOCK (dbin);
+ if (!pending_pads_are_eos (dbin->main_input))
+ all_drained = FALSE;
+
+ if (all_drained) {
+ for (iter = dbin->other_inputs; iter; iter = iter->next) {
+ if (!pending_pads_are_eos ((DecodebinInput *) iter->data)) {
+ all_drained = FALSE;
+ break;
+ }
+ }
+ }
+ INPUT_UNLOCK (dbin);
+ }
+
+ if (all_drained) {
+ GST_DEBUG_OBJECT (dbin,
+ "All active slots are drained, and no pending input, push EOS");
+
+ for (iter = dbin->input_streams; iter; iter = iter->next) {
+ DecodebinInputStream *input = (DecodebinInputStream *) iter->data;
+ GstPad *peer = gst_pad_get_peer (input->srcpad);
+
+ /* Send EOS to all slots */
+ if (peer) {
+ GstEvent *stream_start, *eos;
+
+ stream_start =
+ gst_pad_get_sticky_event (input->srcpad, GST_EVENT_STREAM_START, 0);
+
+ /* First forward a custom STREAM_START event to reset the EOS status (if any) */
+ if (stream_start) {
+ GstStructure *s;
+ GstEvent *custom_stream_start = gst_event_copy (stream_start);
+ gst_event_unref (stream_start);
+ s = (GstStructure *) gst_event_get_structure (custom_stream_start);
+ gst_structure_set (s, "decodebin3-flushing-stream-start",
+ G_TYPE_BOOLEAN, TRUE, NULL);
+ gst_pad_send_event (peer, custom_stream_start);
+ }
+
+ eos = gst_event_new_eos ();
+ gst_mini_object_set_qdata (GST_MINI_OBJECT_CAST (eos),
+ CUSTOM_FINAL_EOS_QUARK, (gchar *) CUSTOM_FINAL_EOS_QUARK_DATA,
+ NULL);
+ gst_pad_send_event (peer, eos);
+ gst_object_unref (peer);
+ } else
+ GST_DEBUG_OBJECT (dbin, "no output");
+ }
+ }
+ }
+
+ static GstPadProbeReturn
+ multiqueue_src_probe (GstPad * pad, GstPadProbeInfo * info,
+ MultiQueueSlot * slot)
+ {
+ GstPadProbeReturn ret = GST_PAD_PROBE_OK;
+ GstDecodebin3 *dbin = slot->dbin;
+
+ if (GST_IS_EVENT (GST_PAD_PROBE_INFO_DATA (info))) {
+ GstEvent *ev = GST_PAD_PROBE_INFO_EVENT (info);
+
+ GST_DEBUG_OBJECT (pad, "Got event %p %s", ev, GST_EVENT_TYPE_NAME (ev));
+ switch (GST_EVENT_TYPE (ev)) {
+ case GST_EVENT_STREAM_START:
+ {
+ GstStream *stream = NULL;
+ const GstStructure *s = gst_event_get_structure (ev);
+
+ /* Drop STREAM_START events used to cleanup multiqueue */
+ if (s
+ && gst_structure_has_field (s,
+ "decodebin3-flushing-stream-start")) {
+ ret = GST_PAD_PROBE_HANDLED;
+ gst_event_unref (ev);
+ break;
+ }
+
+ gst_event_parse_stream (ev, &stream);
+ if (stream == NULL) {
+ GST_ERROR_OBJECT (pad,
+ "Got a STREAM_START event without a GstStream");
+ break;
+ }
+ slot->is_drained = FALSE;
+ GST_DEBUG_OBJECT (pad, "Stream Start '%s'",
+ gst_stream_get_stream_id (stream));
+ if (slot->active_stream == NULL) {
+ slot->active_stream = stream;
+ } else if (slot->active_stream != stream) {
+ GST_FIXME_OBJECT (pad, "Handle stream changes (%s => %s) !",
+ gst_stream_get_stream_id (slot->active_stream),
+ gst_stream_get_stream_id (stream));
+ gst_object_unref (slot->active_stream);
+ slot->active_stream = stream;
+ } else
+ gst_object_unref (stream);
+ #if 0 /* Disabled because stream-start is pushed for every buffer on every unlinked pad */
+ {
+ gboolean is_active, is_requested;
+ /* Quick check to see if we're in the current selection */
+ /* FIXME : Re-check all slot<=>output mappings based on requested_selection */
+ SELECTION_LOCK (dbin);
+ GST_DEBUG_OBJECT (dbin, "Checking active selection");
+ is_active = stream_in_list (dbin->active_selection, stream_id);
+ GST_DEBUG_OBJECT (dbin, "Checking requested selection");
+ is_requested = stream_in_list (dbin->requested_selection, stream_id);
+ SELECTION_UNLOCK (dbin);
+ if (is_active)
+ GST_DEBUG_OBJECT (pad, "Slot in ACTIVE selection (output:%p)",
+ slot->output);
+ if (is_requested)
+ GST_DEBUG_OBJECT (pad, "Slot in REQUESTED selection (output:%p)",
+ slot->output);
+ else if (slot->output) {
+ GST_DEBUG_OBJECT (pad,
+ "Slot needs to be deactivated ? It's no longer in requested selection");
+ } else if (!is_active)
+ GST_DEBUG_OBJECT (pad,
+ "Slot in neither active nor requested selection");
+ }
+ #endif
+ }
+ break;
+ case GST_EVENT_CAPS:
+ {
+ /* Configure the output slot if needed */
+ DecodebinOutputStream *output;
+ GstMessage *msg = NULL;
+ SELECTION_LOCK (dbin);
+ output = get_output_for_slot (slot);
+ if (output) {
+ reconfigure_output_stream (output, slot);
+ msg = is_selection_done (dbin);
+ }
+ SELECTION_UNLOCK (dbin);
+ if (msg)
+ gst_element_post_message ((GstElement *) slot->dbin, msg);
+ }
+ break;
+ case GST_EVENT_EOS:
+ {
+ gboolean was_drained = slot->is_drained;
+ slot->is_drained = TRUE;
+
+ /* Custom EOS handling first */
+ if (gst_mini_object_get_qdata (GST_MINI_OBJECT_CAST (ev),
+ CUSTOM_EOS_QUARK)) {
+ /* remove custom-eos */
+ gst_mini_object_set_qdata (GST_MINI_OBJECT_CAST (ev),
+ CUSTOM_EOS_QUARK, NULL, NULL);
+ GST_LOG_OBJECT (pad, "Received custom EOS");
+ ret = GST_PAD_PROBE_HANDLED;
+ SELECTION_LOCK (dbin);
+ if (slot->input == NULL) {
+ GST_DEBUG_OBJECT (pad,
+ "Got custom-eos from null input stream, remove output stream");
+ /* Remove the output */
+ if (slot->output) {
+ DecodebinOutputStream *output = slot->output;
+ dbin->output_streams =
+ g_list_remove (dbin->output_streams, output);
+ free_output_stream (dbin, output);
+ /* Reacalculate min interleave */
+ gst_decodebin3_update_min_interleave (dbin);
+ }
+ slot->probe_id = 0;
+ dbin->slots = g_list_remove (dbin->slots, slot);
+ free_multiqueue_slot_async (dbin, slot);
+ ret = GST_PAD_PROBE_REMOVE;
+ } else if (!was_drained) {
+ check_all_slot_for_eos (dbin);
+ }
+ if (ret == GST_PAD_PROBE_HANDLED)
+ gst_event_unref (ev);
+ SELECTION_UNLOCK (dbin);
+ break;
+ }
+
+ GST_FIXME_OBJECT (pad, "EOS on multiqueue source pad. input:%p",
+ slot->input);
+ if (slot->input == NULL) {
+ GstPad *peer;
+ GST_DEBUG_OBJECT (pad,
+ "last EOS for input, forwarding and removing slot");
+ peer = gst_pad_get_peer (pad);
+ if (peer) {
+ gst_pad_send_event (peer, ev);
+ gst_object_unref (peer);
+ } else {
+ gst_event_unref (ev);
+ }
+ SELECTION_LOCK (dbin);
+ /* FIXME : Shouldn't we try to re-assign the output instead of just
+ * removing it ? */
+ /* Remove the output */
+ if (slot->output) {
+ DecodebinOutputStream *output = slot->output;
+ dbin->output_streams = g_list_remove (dbin->output_streams, output);
+ free_output_stream (dbin, output);
+ }
+ slot->probe_id = 0;
+ dbin->slots = g_list_remove (dbin->slots, slot);
+ SELECTION_UNLOCK (dbin);
+
+ free_multiqueue_slot_async (dbin, slot);
+ ret = GST_PAD_PROBE_REMOVE;
+ } else if (gst_mini_object_get_qdata (GST_MINI_OBJECT_CAST (ev),
+ CUSTOM_FINAL_EOS_QUARK)) {
+ GST_DEBUG_OBJECT (pad, "Got final eos, propagating downstream");
+ } else {
+ GST_DEBUG_OBJECT (pad, "Got regular eos (all_inputs_are_eos)");
+ /* drop current event as eos will be sent in check_all_slot_for_eos
+ * when all output streams are also eos */
++#ifndef TIZEN_FEATURE_DISABLE_EOS_DROP
+ ret = GST_PAD_PROBE_DROP;
++#endif
+ SELECTION_LOCK (dbin);
+ check_all_slot_for_eos (dbin);
+ SELECTION_UNLOCK (dbin);
+ }
+ }
+ break;
+ default:
+ break;
+ }
+ } else if (GST_IS_QUERY (GST_PAD_PROBE_INFO_DATA (info))) {
+ GstQuery *query = GST_PAD_PROBE_INFO_QUERY (info);
+ switch (GST_QUERY_TYPE (query)) {
+ case GST_QUERY_CAPS:
+ {
+ GST_DEBUG_OBJECT (pad, "Intercepting CAPS query");
+ gst_query_set_caps_result (query, GST_CAPS_ANY);
+ ret = GST_PAD_PROBE_HANDLED;
+ }
+ break;
+
+ case GST_QUERY_ACCEPT_CAPS:
+ {
+ GST_DEBUG_OBJECT (pad, "Intercepting Accept Caps query");
+ /* If the current decoder doesn't accept caps, we'll reconfigure
+ * on the actual caps event. So accept any caps. */
+ gst_query_set_accept_caps_result (query, TRUE);
+ ret = GST_PAD_PROBE_HANDLED;
+ }
+ default:
+ break;
+ }
+ }
+
+ return ret;
+ }
+
+ /* Create a new multiqueue slot for the given type
+ *
+ * It is up to the caller to know whether that slot is needed or not
+ * (and release it when no longer needed) */
+ static MultiQueueSlot *
+ create_new_slot (GstDecodebin3 * dbin, GstStreamType type)
+ {
+ MultiQueueSlot *slot;
+ GstIterator *it = NULL;
+ GValue item = { 0, };
+
+ GST_DEBUG_OBJECT (dbin, "Creating new slot for type %s",
+ gst_stream_type_get_name (type));
+ slot = g_new0 (MultiQueueSlot, 1);
+ slot->dbin = dbin;
+
+ slot->id = dbin->slot_id++;
+
+ slot->type = type;
+ slot->sink_pad = gst_element_request_pad_simple (dbin->multiqueue, "sink_%u");
+ if (slot->sink_pad == NULL)
+ goto fail;
+
+ it = gst_pad_iterate_internal_links (slot->sink_pad);
+ if (!it || (gst_iterator_next (it, &item)) != GST_ITERATOR_OK
+ || ((slot->src_pad = g_value_dup_object (&item)) == NULL)) {
+ GST_ERROR ("Couldn't get srcpad from multiqueue for sink pad %s:%s",
+ GST_DEBUG_PAD_NAME (slot->src_pad));
+ goto fail;
+ }
+ gst_iterator_free (it);
+ g_value_reset (&item);
+
+ g_object_set (slot->sink_pad, "group-id", (guint) type, NULL);
+
+ /* Add event probe */
+ slot->probe_id =
+ gst_pad_add_probe (slot->src_pad,
+ GST_PAD_PROBE_TYPE_EVENT_DOWNSTREAM | GST_PAD_PROBE_TYPE_QUERY_DOWNSTREAM,
+ (GstPadProbeCallback) multiqueue_src_probe, slot, NULL);
+
+ GST_DEBUG ("Created new slot %u (%p) (%s:%s)", slot->id, slot,
+ GST_DEBUG_PAD_NAME (slot->src_pad));
+
+ dbin->slots = g_list_append (dbin->slots, slot);
+
+ return slot;
+
+ /* ERRORS */
+ fail:
+ {
+ if (slot->sink_pad)
+ gst_element_release_request_pad (dbin->multiqueue, slot->sink_pad);
+ g_free (slot);
+ return NULL;
+ }
+ }
+
+ /* Must be called with SELECTION_LOCK */
+ static MultiQueueSlot *
+ get_slot_for_input (GstDecodebin3 * dbin, DecodebinInputStream * input)
+ {
+ GList *tmp;
+ MultiQueueSlot *empty_slot = NULL;
+ GstStreamType input_type = 0;
+ gchar *stream_id = NULL;
+
+ GST_DEBUG_OBJECT (dbin, "input %p (stream %p %s)",
+ input, input->active_stream,
+ input->
+ active_stream ? gst_stream_get_stream_id (input->active_stream) : "");
+
+ if (input->active_stream) {
+ input_type = gst_stream_get_stream_type (input->active_stream);
+ stream_id = (gchar *) gst_stream_get_stream_id (input->active_stream);
+ }
+
+ /* Go over existing slots and check if there is already one for it */
+ for (tmp = dbin->slots; tmp; tmp = tmp->next) {
+ MultiQueueSlot *slot = (MultiQueueSlot *) tmp->data;
+ /* Already used input, return that one */
+ if (slot->input == input) {
+ GST_DEBUG_OBJECT (dbin, "Returning already specified slot %d", slot->id);
+ return slot;
+ }
+ }
+
+ /* Go amongst all unused slots of the right type and try to find a candidate */
+ for (tmp = dbin->slots; tmp; tmp = tmp->next) {
+ MultiQueueSlot *slot = (MultiQueueSlot *) tmp->data;
+ if (slot->input == NULL && input_type == slot->type) {
+ /* Remember this empty slot for later */
+ empty_slot = slot;
+ /* Check if available slot is of the same stream_id */
+ GST_LOG_OBJECT (dbin, "Checking candidate slot %d (active_stream:%p)",
+ slot->id, slot->active_stream);
+ if (stream_id && slot->active_stream) {
+ gchar *ostream_id =
+ (gchar *) gst_stream_get_stream_id (slot->active_stream);
+ GST_DEBUG_OBJECT (dbin, "Checking slot %d %s against %s", slot->id,
+ ostream_id, stream_id);
+ if (!g_strcmp0 (stream_id, ostream_id))
+ break;
+ }
+ }
+ }
+
+ if (empty_slot) {
+ GST_DEBUG_OBJECT (dbin, "Re-using existing unused slot %d", empty_slot->id);
+ empty_slot->input = input;
+ return empty_slot;
+ }
+
+ if (input_type)
+ return create_new_slot (dbin, input_type);
+
+ return NULL;
+ }
+
+ static void
+ link_input_to_slot (DecodebinInputStream * input, MultiQueueSlot * slot)
+ {
+ if (slot->input != NULL && slot->input != input) {
+ GST_ERROR_OBJECT (slot->dbin,
+ "Trying to link input to an already used slot");
+ return;
+ }
+ gst_pad_link_full (input->srcpad, slot->sink_pad, GST_PAD_LINK_CHECK_NOTHING);
+ slot->pending_stream = input->active_stream;
+ slot->input = input;
+ }
+
+ #if 0
+ static gboolean
+ have_factory (GstDecodebin3 * dbin, GstCaps * caps,
+ GstElementFactoryListType ftype)
+ {
+ gboolean ret = FALSE;
+ GList *res;
+
+ g_mutex_lock (&dbin->factories_lock);
+ gst_decode_bin_update_factories_list (dbin);
+ if (ftype == GST_ELEMENT_FACTORY_TYPE_DECODER)
+ res =
+ gst_element_factory_list_filter (dbin->decoder_factories,
+ caps, GST_PAD_SINK, TRUE);
+ else
+ res =
+ gst_element_factory_list_filter (dbin->decodable_factories,
+ caps, GST_PAD_SINK, TRUE);
+ g_mutex_unlock (&dbin->factories_lock);
+
+ if (res) {
+ ret = TRUE;
+ gst_plugin_feature_list_free (res);
+ }
+
+ return ret;
+ }
+ #endif
+
+ static GList *
+ create_decoder_factory_list (GstDecodebin3 * dbin, GstCaps * caps)
+ {
+ GList *res;
+
+ g_mutex_lock (&dbin->factories_lock);
+ gst_decode_bin_update_factories_list (dbin);
+ res = gst_element_factory_list_filter (dbin->decoder_factories,
+ caps, GST_PAD_SINK, TRUE);
+ g_mutex_unlock (&dbin->factories_lock);
+ return res;
+ }
+
+ static GstPadProbeReturn
+ keyframe_waiter_probe (GstPad * pad, GstPadProbeInfo * info,
+ DecodebinOutputStream * output)
+ {
+ GstBuffer *buf = GST_PAD_PROBE_INFO_BUFFER (info);
+ /* If we have a keyframe, remove the probe and let all data through */
+ /* FIXME : HANDLE HEADER BUFFER ?? */
+ if (!GST_BUFFER_FLAG_IS_SET (buf, GST_BUFFER_FLAG_DELTA_UNIT) ||
+ GST_BUFFER_FLAG_IS_SET (buf, GST_BUFFER_FLAG_HEADER)) {
+ GST_DEBUG_OBJECT (pad,
+ "Buffer is keyframe or header, letting through and removing probe");
+ output->drop_probe_id = 0;
+ return GST_PAD_PROBE_REMOVE;
+ }
+ GST_DEBUG_OBJECT (pad, "Buffer is not a keyframe, dropping");
+ return GST_PAD_PROBE_DROP;
+ }
+
+ static void
+ reconfigure_output_stream (DecodebinOutputStream * output,
+ MultiQueueSlot * slot)
+ {
+ GstDecodebin3 *dbin = output->dbin;
+ GstCaps *new_caps = (GstCaps *) gst_stream_get_caps (slot->active_stream);
+ gboolean needs_decoder;
+
+ needs_decoder = gst_caps_can_intersect (new_caps, dbin->caps) != TRUE;
+
+ GST_DEBUG_OBJECT (dbin,
+ "Reconfiguring output %p to slot %p, needs_decoder:%d", output, slot,
+ needs_decoder);
+
+ /* FIXME : Maybe make the output un-hook itself automatically ? */
+ if (output->slot != NULL && output->slot != slot) {
+ GST_WARNING_OBJECT (dbin,
+ "Output still linked to another slot (%p)", output->slot);
+ gst_caps_unref (new_caps);
+ return;
+ }
+
+ /* Check if existing config is reusable as-is by checking if
+ * the existing decoder accepts the new caps, if not delete
+ * it and create a new one */
+ if (output->decoder) {
+ gboolean can_reuse_decoder;
+
+ if (needs_decoder) {
+ can_reuse_decoder =
+ gst_pad_query_accept_caps (output->decoder_sink, new_caps);
+ } else
+ can_reuse_decoder = FALSE;
+
+ if (can_reuse_decoder) {
+ if (output->type & GST_STREAM_TYPE_VIDEO && output->drop_probe_id == 0) {
+ GST_DEBUG_OBJECT (dbin, "Adding keyframe-waiter probe");
+ output->drop_probe_id =
+ gst_pad_add_probe (slot->src_pad, GST_PAD_PROBE_TYPE_BUFFER,
+ (GstPadProbeCallback) keyframe_waiter_probe, output, NULL);
+ }
+ GST_DEBUG_OBJECT (dbin, "Reusing existing decoder for slot %p", slot);
+ if (output->linked == FALSE) {
+ gst_pad_link_full (slot->src_pad, output->decoder_sink,
+ GST_PAD_LINK_CHECK_NOTHING);
+ output->linked = TRUE;
+ }
+ gst_caps_unref (new_caps);
+ return;
+ }
+
+ GST_DEBUG_OBJECT (dbin, "Removing old decoder for slot %p", slot);
+
+ if (output->linked)
+ gst_pad_unlink (slot->src_pad, output->decoder_sink);
+ output->linked = FALSE;
+ if (output->drop_probe_id) {
+ gst_pad_remove_probe (slot->src_pad, output->drop_probe_id);
+ output->drop_probe_id = 0;
+ }
+
+ if (!gst_ghost_pad_set_target ((GstGhostPad *) output->src_pad, NULL)) {
+ GST_ERROR_OBJECT (dbin, "Could not release decoder pad");
+ gst_caps_unref (new_caps);
+ goto cleanup;
+ }
+
+ gst_element_set_locked_state (output->decoder, TRUE);
+ gst_element_set_state (output->decoder, GST_STATE_NULL);
+
+ gst_bin_remove ((GstBin *) dbin, output->decoder);
+ output->decoder = NULL;
+ output->decoder_latency = GST_CLOCK_TIME_NONE;
+ } else if (output->linked) {
+ /* Otherwise if we have no decoder yet but the output is linked make
+ * sure that the ghost pad is really unlinked in case no decoder was
+ * needed previously */
+ if (!gst_ghost_pad_set_target ((GstGhostPad *) output->src_pad, NULL)) {
+ GST_ERROR_OBJECT (dbin, "Could not release ghost pad");
+ gst_caps_unref (new_caps);
+ goto cleanup;
+ }
+ }
+
+ gst_object_replace ((GstObject **) & output->decoder_sink, NULL);
+ gst_object_replace ((GstObject **) & output->decoder_src, NULL);
+
+ /* If a decoder is required, create one */
+ if (needs_decoder) {
+ GList *factories, *next_factory;
+
+ factories = next_factory = create_decoder_factory_list (dbin, new_caps);
+ while (!output->decoder) {
+ gboolean decoder_failed = FALSE;
+
+ /* If we don't have a decoder yet, instantiate one */
+ if (next_factory) {
++#ifdef TIZEN_FEATURE_RESOURCE_MANAGER
++ if (gst_element_factory_list_is_type (next_factory->data,
++ GST_ELEMENT_FACTORY_TYPE_HARDWARE)) {
++ gboolean result = FALSE;
++ g_signal_emit (G_OBJECT (dbin),
++ gst_decodebin3_signals[SIGNAL_REQUEST_RESOURCE], 0, dbin->collection, slot->active_stream,
++ &result);
++ if (!result) {
++ GstCaps *caps;
++ GST_WARNING_OBJECT (dbin, "Failed to get HW resource.");
++ SELECTION_UNLOCK (dbin);
++ caps = gst_stream_get_caps (slot->active_stream);
++ gst_element_post_message (GST_ELEMENT_CAST (dbin),
++ gst_missing_decoder_message_new (GST_ELEMENT_CAST (dbin), caps));
++ gst_caps_unref (caps);
++ SELECTION_LOCK (dbin);
++ goto cleanup;
++ }
++ }
++#endif
+ output->decoder = gst_element_factory_create ((GstElementFactory *)
+ next_factory->data, NULL);
+ GST_DEBUG ("Created decoder '%s'", GST_ELEMENT_NAME (output->decoder));
+ } else
+ GST_DEBUG ("Could not find an element for caps %" GST_PTR_FORMAT,
+ new_caps);
+
+ if (output->decoder == NULL) {
+ GstCaps *caps;
+
+ SELECTION_UNLOCK (dbin);
+ /* FIXME : Should we be smarter if there's a missing decoder ?
+ * Should we deactivate that stream ? */
+ caps = gst_stream_get_caps (slot->active_stream);
+ gst_element_post_message (GST_ELEMENT_CAST (dbin),
+ gst_missing_decoder_message_new (GST_ELEMENT_CAST (dbin), caps));
+ gst_caps_unref (caps);
+ SELECTION_LOCK (dbin);
+ goto cleanup;
+ }
+ if (!gst_bin_add ((GstBin *) dbin, output->decoder)) {
+ GST_ERROR_OBJECT (dbin, "could not add decoder to pipeline");
+ goto cleanup;
+ }
+ output->decoder_sink =
+ gst_element_get_static_pad (output->decoder, "sink");
+ output->decoder_src = gst_element_get_static_pad (output->decoder, "src");
+ if (output->type & GST_STREAM_TYPE_VIDEO) {
+ GST_DEBUG_OBJECT (dbin, "Adding keyframe-waiter probe");
+ output->drop_probe_id =
+ gst_pad_add_probe (slot->src_pad, GST_PAD_PROBE_TYPE_BUFFER,
+ (GstPadProbeCallback) keyframe_waiter_probe, output, NULL);
+ }
+ if (gst_pad_link_full (slot->src_pad, output->decoder_sink,
+ GST_PAD_LINK_CHECK_NOTHING) != GST_PAD_LINK_OK) {
+ GST_ERROR_OBJECT (dbin, "could not link to %s:%s",
+ GST_DEBUG_PAD_NAME (output->decoder_sink));
+ goto cleanup;
+ }
+ if (gst_element_set_state (output->decoder,
+ GST_STATE_READY) == GST_STATE_CHANGE_FAILURE) {
+ GST_DEBUG_OBJECT (dbin,
+ "Decoder '%s' failed to reach READY state, trying the next type",
+ GST_ELEMENT_NAME (output->decoder));
+ decoder_failed = TRUE;
+ }
+ if (!gst_pad_query_accept_caps (output->decoder_sink, new_caps)) {
+ GST_DEBUG_OBJECT (dbin,
+ "Decoder '%s' did not accept the caps, trying the next type",
+ GST_ELEMENT_NAME (output->decoder));
+ decoder_failed = TRUE;
+ }
+ if (decoder_failed) {
+ gst_pad_unlink (slot->src_pad, output->decoder_sink);
+ if (output->drop_probe_id) {
+ gst_pad_remove_probe (slot->src_pad, output->drop_probe_id);
+ output->drop_probe_id = 0;
+ }
+
+ gst_element_set_locked_state (output->decoder, TRUE);
+ gst_element_set_state (output->decoder, GST_STATE_NULL);
+
+ gst_bin_remove ((GstBin *) dbin, output->decoder);
+ output->decoder = NULL;
+ }
+ next_factory = next_factory->next;
+ }
+ gst_plugin_feature_list_free (factories);
+ } else {
+ output->decoder_src = gst_object_ref (slot->src_pad);
+ output->decoder_sink = NULL;
+ }
+ gst_caps_unref (new_caps);
+
+ output->linked = TRUE;
+ if (!gst_ghost_pad_set_target ((GstGhostPad *) output->src_pad,
+ output->decoder_src)) {
+ GST_ERROR_OBJECT (dbin, "Could not expose decoder pad");
+ goto cleanup;
+ }
+ if (output->src_exposed == FALSE) {
+ GstEvent *stream_start;
+
+ stream_start = gst_pad_get_sticky_event (slot->src_pad,
+ GST_EVENT_STREAM_START, 0);
+
+ /* Ensure GstStream is accesiable from pad-added callback */
+ if (stream_start) {
+ gst_pad_store_sticky_event (output->src_pad, stream_start);
+ gst_event_unref (stream_start);
+ } else {
+ GST_WARNING_OBJECT (slot->src_pad,
+ "Pad has no stored stream-start event");
+ }
+
+ output->src_exposed = TRUE;
+ gst_element_add_pad (GST_ELEMENT_CAST (dbin), output->src_pad);
+ }
+
+ if (output->decoder)
+ gst_element_sync_state_with_parent (output->decoder);
+
+ output->slot = slot;
+ return;
+
+ cleanup:
+ {
+ GST_DEBUG_OBJECT (dbin, "Cleanup");
+ if (output->decoder_sink) {
+ gst_object_unref (output->decoder_sink);
+ output->decoder_sink = NULL;
+ }
+ if (output->decoder_src) {
+ gst_object_unref (output->decoder_src);
+ output->decoder_src = NULL;
+ }
+ if (output->decoder) {
+ gst_element_set_state (output->decoder, GST_STATE_NULL);
+ gst_bin_remove ((GstBin *) dbin, output->decoder);
+ output->decoder = NULL;
+ }
+ }
+ }
+
+ static GstPadProbeReturn
+ idle_reconfigure (GstPad * pad, GstPadProbeInfo * info, MultiQueueSlot * slot)
+ {
+ GstMessage *msg = NULL;
+ DecodebinOutputStream *output;
+
+ SELECTION_LOCK (slot->dbin);
+ output = get_output_for_slot (slot);
+
+ GST_DEBUG_OBJECT (pad, "output : %p", output);
+
+ if (output) {
+ reconfigure_output_stream (output, slot);
+ msg = is_selection_done (slot->dbin);
+ }
+ SELECTION_UNLOCK (slot->dbin);
+ if (msg)
+ gst_element_post_message ((GstElement *) slot->dbin, msg);
+
+ return GST_PAD_PROBE_REMOVE;
+ }
+
+ static MultiQueueSlot *
+ find_slot_for_stream_id (GstDecodebin3 * dbin, const gchar * sid)
+ {
+ GList *tmp;
+
+ for (tmp = dbin->slots; tmp; tmp = tmp->next) {
+ MultiQueueSlot *slot = (MultiQueueSlot *) tmp->data;
+ const gchar *stream_id;
+ if (slot->active_stream) {
+ stream_id = gst_stream_get_stream_id (slot->active_stream);
+ if (!g_strcmp0 (sid, stream_id))
+ return slot;
+ }
+ if (slot->pending_stream && slot->pending_stream != slot->active_stream) {
+ stream_id = gst_stream_get_stream_id (slot->pending_stream);
+ if (!g_strcmp0 (sid, stream_id))
+ return slot;
+ }
+ }
+
+ return NULL;
+ }
+
+ /* This function handles the reassignment of a slot. Call this from
+ * the streaming thread of a slot. */
+ static gboolean
+ reassign_slot (GstDecodebin3 * dbin, MultiQueueSlot * slot)
+ {
+ DecodebinOutputStream *output;
+ MultiQueueSlot *target_slot = NULL;
+ GList *tmp;
+ const gchar *sid, *tsid;
+
+ SELECTION_LOCK (dbin);
+ output = slot->output;
+
+ if (G_UNLIKELY (slot->active_stream == NULL)) {
+ GST_DEBUG_OBJECT (slot->src_pad,
+ "Called on inactive slot (active_stream == NULL)");
+ SELECTION_UNLOCK (dbin);
+ return FALSE;
+ }
+
+ if (G_UNLIKELY (output == NULL)) {
+ GST_DEBUG_OBJECT (slot->src_pad,
+ "Slot doesn't have any output to be removed");
+ SELECTION_UNLOCK (dbin);
+ return FALSE;
+ }
+
+ sid = gst_stream_get_stream_id (slot->active_stream);
+ GST_DEBUG_OBJECT (slot->src_pad, "slot %s %p", sid, slot);
+
+ /* Recheck whether this stream is still in the list of streams to deactivate */
+ if (stream_in_list (dbin->requested_selection, sid)) {
+ /* Stream is in the list of requested streams, don't remove */
+ SELECTION_UNLOCK (dbin);
+ GST_DEBUG_OBJECT (slot->src_pad,
+ "Stream '%s' doesn't need to be deactivated", sid);
+ return FALSE;
+ }
+
+ /* Unlink slot from output */
+ /* FIXME : Handle flushing ? */
+ /* FIXME : Handle outputs without decoders */
+ GST_DEBUG_OBJECT (slot->src_pad, "Unlinking from decoder %p",
+ output->decoder_sink);
+ if (output->decoder_sink)
+ gst_pad_unlink (slot->src_pad, output->decoder_sink);
+ output->linked = FALSE;
+ slot->output = NULL;
+ output->slot = NULL;
+ /* Remove sid from active selection */
+ for (tmp = dbin->active_selection; tmp; tmp = tmp->next)
+ if (!g_strcmp0 (sid, tmp->data)) {
+ dbin->active_selection = g_list_delete_link (dbin->active_selection, tmp);
+ break;
+ }
+
+ /* Can we re-assign this output to a requested stream ? */
+ GST_DEBUG_OBJECT (slot->src_pad, "Attempting to re-assing output stream");
+ for (tmp = dbin->to_activate; tmp; tmp = tmp->next) {
+ MultiQueueSlot *tslot = find_slot_for_stream_id (dbin, tmp->data);
+ GST_LOG_OBJECT (tslot->src_pad, "Checking slot %p (output:%p , stream:%s)",
+ tslot, tslot->output, gst_stream_get_stream_id (tslot->active_stream));
+ if (tslot && tslot->type == output->type && tslot->output == NULL) {
+ GST_DEBUG_OBJECT (tslot->src_pad, "Using as reassigned slot");
+ target_slot = tslot;
+ tsid = tmp->data;
+ /* Pass target stream id to requested selection */
+ dbin->requested_selection =
+ g_list_append (dbin->requested_selection, g_strdup (tmp->data));
+ dbin->to_activate = g_list_remove (dbin->to_activate, tmp->data);
+ break;
+ }
+ }
+
+ if (target_slot) {
+ GST_DEBUG_OBJECT (slot->src_pad, "Assigning output to slot %p '%s'",
+ target_slot, tsid);
+ target_slot->output = output;
+ output->slot = target_slot;
+ dbin->active_selection =
+ g_list_append (dbin->active_selection, (gchar *) tsid);
+ SELECTION_UNLOCK (dbin);
+
+ /* Wakeup the target slot so that it retries to send events/buffers
+ * thereby triggering the output reconfiguration codepath */
+ gst_pad_add_probe (target_slot->src_pad, GST_PAD_PROBE_TYPE_IDLE,
+ (GstPadProbeCallback) idle_reconfigure, target_slot, NULL);
+ /* gst_pad_send_event (target_slot->src_pad, gst_event_new_reconfigure ()); */
+ } else {
+ GstMessage *msg;
+
+ dbin->output_streams = g_list_remove (dbin->output_streams, output);
+ free_output_stream (dbin, output);
+ msg = is_selection_done (slot->dbin);
+ SELECTION_UNLOCK (dbin);
+
+ if (msg)
+ gst_element_post_message ((GstElement *) slot->dbin, msg);
+ }
+
+ return TRUE;
+ }
+
+ /* Idle probe called when a slot should be unassigned from its output stream.
+ * This is needed to ensure nothing is flowing when unlinking the slot.
+ *
+ * Also, this method will search for a pending stream which could re-use
+ * the output stream. */
+ static GstPadProbeReturn
+ slot_unassign_probe (GstPad * pad, GstPadProbeInfo * info,
+ MultiQueueSlot * slot)
+ {
+ GstDecodebin3 *dbin = slot->dbin;
+
+ reassign_slot (dbin, slot);
+
+ return GST_PAD_PROBE_REMOVE;
+ }
+
+ static gboolean
+ handle_stream_switch (GstDecodebin3 * dbin, GList * select_streams,
+ guint32 seqnum)
+ {
+ gboolean ret = TRUE;
+ GList *tmp;
+ /* List of slots to (de)activate. */
+ GList *to_deactivate = NULL;
+ GList *to_activate = NULL;
+ /* List of unknown stream id, most likely means the event
+ * should be sent upstream so that elements can expose the requested stream */
+ GList *unknown = NULL;
+ GList *to_reassign = NULL;
+ GList *future_request_streams = NULL;
+ GList *pending_streams = NULL;
+ GList *slots_to_reassign = NULL;
+
+ SELECTION_LOCK (dbin);
+ if (G_UNLIKELY (seqnum != dbin->select_streams_seqnum)) {
+ GST_DEBUG_OBJECT (dbin, "New SELECT_STREAMS has arrived in the meantime");
+ SELECTION_UNLOCK (dbin);
+ return TRUE;
+ }
+ /* Remove pending select_streams */
+ g_list_free (dbin->pending_select_streams);
+ dbin->pending_select_streams = NULL;
+
+ /* COMPARE the requested streams to the active and requested streams
+ * on multiqueue. */
+
+ /* First check the slots to activate and which ones are unknown */
+ for (tmp = select_streams; tmp; tmp = tmp->next) {
+ const gchar *sid = (const gchar *) tmp->data;
+ MultiQueueSlot *slot;
+ GST_DEBUG_OBJECT (dbin, "Checking stream '%s'", sid);
+ slot = find_slot_for_stream_id (dbin, sid);
+ /* Find the corresponding slot */
+ if (slot == NULL) {
+ if (stream_in_collection (dbin, (gchar *) sid)) {
+ pending_streams = g_list_append (pending_streams, (gchar *) sid);
+ } else {
+ GST_DEBUG_OBJECT (dbin, "We don't have a slot for stream '%s'", sid);
+ unknown = g_list_append (unknown, (gchar *) sid);
+ }
+ } else if (slot->output == NULL) {
+ GST_DEBUG_OBJECT (dbin, "We need to activate slot %p for stream '%s')",
+ slot, sid);
+ to_activate = g_list_append (to_activate, slot);
+ } else {
+ GST_DEBUG_OBJECT (dbin,
+ "Stream '%s' from slot %p is already active on output %p", sid, slot,
+ slot->output);
+ future_request_streams =
+ g_list_append (future_request_streams, (gchar *) sid);
+ }
+ }
+
+ for (tmp = dbin->slots; tmp; tmp = tmp->next) {
+ MultiQueueSlot *slot = (MultiQueueSlot *) tmp->data;
+ /* For slots that have an output, check if it's part of the streams to
+ * be active */
+ if (slot->output) {
+ gboolean slot_to_deactivate = TRUE;
+
+ if (slot->active_stream) {
+ if (stream_in_list (select_streams,
+ gst_stream_get_stream_id (slot->active_stream)))
+ slot_to_deactivate = FALSE;
+ }
+ if (slot_to_deactivate && slot->pending_stream
+ && slot->pending_stream != slot->active_stream) {
+ if (stream_in_list (select_streams,
+ gst_stream_get_stream_id (slot->pending_stream)))
+ slot_to_deactivate = FALSE;
+ }
+ if (slot_to_deactivate) {
+ GST_DEBUG_OBJECT (dbin,
+ "Slot %p (%s) should be deactivated, no longer used", slot,
+ slot->
+ active_stream ? gst_stream_get_stream_id (slot->active_stream) :
+ "NULL");
+ to_deactivate = g_list_append (to_deactivate, slot);
+ }
+ }
+ }
+
+ if (to_deactivate != NULL) {
+ GST_DEBUG_OBJECT (dbin, "Check if we can reassign slots");
+ /* We need to compare what needs to be activated and deactivated in order
+ * to determine whether there are outputs that can be transferred */
+ /* Take the stream-id of the slots that are to be activated, for which there
+ * is a slot of the same type that needs to be deactivated */
+ tmp = to_deactivate;
+ while (tmp) {
+ MultiQueueSlot *slot_to_deactivate = (MultiQueueSlot *) tmp->data;
+ gboolean removeit = FALSE;
+ GList *tmp2, *next;
+ GST_DEBUG_OBJECT (dbin,
+ "Checking if slot to deactivate (%p) has a candidate slot to activate",
+ slot_to_deactivate);
+ for (tmp2 = to_activate; tmp2; tmp2 = tmp2->next) {
+ MultiQueueSlot *slot_to_activate = (MultiQueueSlot *) tmp2->data;
+ GST_DEBUG_OBJECT (dbin, "Comparing to slot %p", slot_to_activate);
+ if (slot_to_activate->type == slot_to_deactivate->type) {
+ GST_DEBUG_OBJECT (dbin, "Re-using");
+ to_reassign = g_list_append (to_reassign, (gchar *)
+ gst_stream_get_stream_id (slot_to_activate->active_stream));
+ slots_to_reassign =
+ g_list_append (slots_to_reassign, slot_to_deactivate);
+ to_activate = g_list_remove (to_activate, slot_to_activate);
+ removeit = TRUE;
+ break;
+ }
+ }
+ next = tmp->next;
+ if (removeit)
+ to_deactivate = g_list_delete_link (to_deactivate, tmp);
+ tmp = next;
+ }
+ }
+
+ for (tmp = to_deactivate; tmp; tmp = tmp->next) {
+ MultiQueueSlot *slot = (MultiQueueSlot *) tmp->data;
+ GST_DEBUG_OBJECT (dbin,
+ "Really need to deactivate slot %p, but no available alternative",
+ slot);
+
+ slots_to_reassign = g_list_append (slots_to_reassign, slot);
+ }
+
+ /* The only slots left to activate are the ones that won't be reassigned and
+ * therefore really need to have a new output created */
+ for (tmp = to_activate; tmp; tmp = tmp->next) {
+ MultiQueueSlot *slot = (MultiQueueSlot *) tmp->data;
+ if (slot->active_stream)
+ future_request_streams =
+ g_list_append (future_request_streams,
+ (gchar *) gst_stream_get_stream_id (slot->active_stream));
+ else if (slot->pending_stream)
+ future_request_streams =
+ g_list_append (future_request_streams,
+ (gchar *) gst_stream_get_stream_id (slot->pending_stream));
+ else
+ GST_ERROR_OBJECT (dbin, "No stream for slot %p !!", slot);
+ }
+
+ if (to_activate == NULL && pending_streams != NULL) {
+ GST_DEBUG_OBJECT (dbin, "Stream switch requested for future collection");
+ if (dbin->requested_selection)
+ g_list_free_full (dbin->requested_selection, g_free);
+ dbin->requested_selection =
+ g_list_copy_deep (select_streams, (GCopyFunc) g_strdup, NULL);
+ g_list_free (to_deactivate);
+ g_list_free (pending_streams);
+ to_deactivate = NULL;
+ pending_streams = NULL;
+ } else {
+ if (dbin->requested_selection)
+ g_list_free_full (dbin->requested_selection, g_free);
+ dbin->requested_selection =
+ g_list_copy_deep (future_request_streams, (GCopyFunc) g_strdup, NULL);
+ dbin->requested_selection =
+ g_list_concat (dbin->requested_selection,
+ g_list_copy_deep (pending_streams, (GCopyFunc) g_strdup, NULL));
+ if (dbin->to_activate)
+ g_list_free (dbin->to_activate);
+ dbin->to_activate = g_list_copy (to_reassign);
+ }
+
+ dbin->selection_updated = TRUE;
+ SELECTION_UNLOCK (dbin);
+
+ if (unknown) {
+ GST_FIXME_OBJECT (dbin, "Got request for an unknown stream");
+ g_list_free (unknown);
+ }
+
+ if (to_activate && !slots_to_reassign) {
+ for (tmp = to_activate; tmp; tmp = tmp->next) {
+ MultiQueueSlot *slot = (MultiQueueSlot *) tmp->data;
+ gst_pad_add_probe (slot->src_pad, GST_PAD_PROBE_TYPE_IDLE,
+ (GstPadProbeCallback) idle_reconfigure, slot, NULL);
+ }
+ }
+
+ /* For all streams to deactivate, add an idle probe where we will do
+ * the unassignment and switch over */
+ for (tmp = slots_to_reassign; tmp; tmp = tmp->next) {
+ MultiQueueSlot *slot = (MultiQueueSlot *) tmp->data;
+ gst_pad_add_probe (slot->src_pad, GST_PAD_PROBE_TYPE_IDLE,
+ (GstPadProbeCallback) slot_unassign_probe, slot, NULL);
+ }
+
+ if (to_deactivate)
+ g_list_free (to_deactivate);
+ if (to_activate)
+ g_list_free (to_activate);
+ if (to_reassign)
+ g_list_free (to_reassign);
+ if (future_request_streams)
+ g_list_free (future_request_streams);
+ if (pending_streams)
+ g_list_free (pending_streams);
+ if (slots_to_reassign)
+ g_list_free (slots_to_reassign);
+
+ return ret;
+ }
+
+ static GstPadProbeReturn
+ ghost_pad_event_probe (GstPad * pad, GstPadProbeInfo * info,
+ DecodebinOutputStream * output)
+ {
+ GstPadProbeReturn ret = GST_PAD_PROBE_OK;
+ GstDecodebin3 *dbin = output->dbin;
+ GstEvent *event = GST_PAD_PROBE_INFO_EVENT (info);
+
+ GST_DEBUG_OBJECT (pad, "Got event %p %s", event, GST_EVENT_TYPE_NAME (event));
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_SELECT_STREAMS:
+ {
+ GstPad *peer;
+ GList *streams = NULL;
+ guint32 seqnum = gst_event_get_seqnum (event);
+
+ SELECTION_LOCK (dbin);
+ if (seqnum == dbin->select_streams_seqnum) {
+ SELECTION_UNLOCK (dbin);
+ GST_DEBUG_OBJECT (pad,
+ "Already handled/handling that SELECT_STREAMS event");
+ gst_event_unref (event);
+ ret = GST_PAD_PROBE_HANDLED;
+ break;
+ }
+ dbin->select_streams_seqnum = seqnum;
+ if (dbin->pending_select_streams != NULL) {
+ GST_LOG_OBJECT (dbin, "Replacing pending select streams");
+ g_list_free (dbin->pending_select_streams);
+ dbin->pending_select_streams = NULL;
+ }
+ gst_event_parse_select_streams (event, &streams);
+ dbin->pending_select_streams = g_list_copy (streams);
+ SELECTION_UNLOCK (dbin);
+
+ /* Send event upstream */
+ if ((peer = gst_pad_get_peer (pad))) {
+ gst_pad_send_event (peer, event);
+ gst_object_unref (peer);
+ } else {
+ gst_event_unref (event);
+ }
+ /* Finally handle the switch */
+ if (streams) {
+ handle_stream_switch (dbin, streams, seqnum);
+ g_list_free_full (streams, g_free);
+ }
+ ret = GST_PAD_PROBE_HANDLED;
+ }
+ break;
+ default:
+ break;
+ }
+
+ return ret;
+ }
+
+ static gboolean
+ gst_decodebin3_send_event (GstElement * element, GstEvent * event)
+ {
+ GST_DEBUG_OBJECT (element, "event %s", GST_EVENT_TYPE_NAME (event));
+ if (GST_EVENT_TYPE (event) == GST_EVENT_SELECT_STREAMS) {
+ GstDecodebin3 *dbin = (GstDecodebin3 *) element;
+ GList *streams = NULL;
+ guint32 seqnum = gst_event_get_seqnum (event);
+
+ SELECTION_LOCK (dbin);
+ if (seqnum == dbin->select_streams_seqnum) {
+ SELECTION_UNLOCK (dbin);
+ GST_DEBUG_OBJECT (dbin,
+ "Already handled/handling that SELECT_STREAMS event");
+ return TRUE;
+ }
+ dbin->select_streams_seqnum = seqnum;
+ if (dbin->pending_select_streams != NULL) {
+ GST_LOG_OBJECT (dbin, "Replacing pending select streams");
+ g_list_free (dbin->pending_select_streams);
+ dbin->pending_select_streams = NULL;
+ }
+ gst_event_parse_select_streams (event, &streams);
+ dbin->pending_select_streams = g_list_copy (streams);
+ SELECTION_UNLOCK (dbin);
+
+ /* FIXME : We don't have an upstream ?? */
+ #if 0
+ /* Send event upstream */
+ if ((peer = gst_pad_get_peer (pad))) {
+ gst_pad_send_event (peer, event);
+ gst_object_unref (peer);
+ }
+ #endif
+ /* Finally handle the switch */
+ if (streams) {
+ handle_stream_switch (dbin, streams, seqnum);
+ g_list_free_full (streams, g_free);
+ }
+
+ gst_event_unref (event);
+ return TRUE;
+ }
+ return GST_ELEMENT_CLASS (parent_class)->send_event (element, event);
+ }
+
+
+ static void
+ free_multiqueue_slot (GstDecodebin3 * dbin, MultiQueueSlot * slot)
+ {
+ if (slot->probe_id)
+ gst_pad_remove_probe (slot->src_pad, slot->probe_id);
+ if (slot->input) {
+ if (slot->input->srcpad)
+ gst_pad_unlink (slot->input->srcpad, slot->sink_pad);
+ }
+
+ gst_element_release_request_pad (dbin->multiqueue, slot->sink_pad);
+ gst_object_replace ((GstObject **) & slot->sink_pad, NULL);
+ gst_object_replace ((GstObject **) & slot->src_pad, NULL);
+ gst_object_replace ((GstObject **) & slot->active_stream, NULL);
+ g_free (slot);
+ }
+
+ static void
+ free_multiqueue_slot_async (GstDecodebin3 * dbin, MultiQueueSlot * slot)
+ {
+ GST_LOG_OBJECT (dbin, "pushing multiqueue slot on thread pool to free");
+ gst_element_call_async (GST_ELEMENT_CAST (dbin),
+ (GstElementCallAsyncFunc) free_multiqueue_slot, slot, NULL);
+ }
+
+ /* Create a DecodebinOutputStream for a given type
+ * Note: It will be empty initially, it needs to be configured
+ * afterwards */
+ static DecodebinOutputStream *
+ create_output_stream (GstDecodebin3 * dbin, GstStreamType type)
+ {
+ DecodebinOutputStream *res = g_new0 (DecodebinOutputStream, 1);
+ gchar *pad_name;
+ const gchar *prefix;
+ GstStaticPadTemplate *templ;
+ GstPadTemplate *ptmpl;
+ guint32 *counter;
+ GstPad *internal_pad;
+
+ GST_DEBUG_OBJECT (dbin, "Created new output stream %p for type %s",
+ res, gst_stream_type_get_name (type));
+
+ res->type = type;
+ res->dbin = dbin;
+ res->decoder_latency = GST_CLOCK_TIME_NONE;
+
+ if (type & GST_STREAM_TYPE_VIDEO) {
+ templ = &video_src_template;
+ counter = &dbin->vpadcount;
+ prefix = "video";
+ } else if (type & GST_STREAM_TYPE_AUDIO) {
+ templ = &audio_src_template;
+ counter = &dbin->apadcount;
+ prefix = "audio";
+ } else if (type & GST_STREAM_TYPE_TEXT) {
+ templ = &text_src_template;
+ counter = &dbin->tpadcount;
+ prefix = "text";
+ } else {
+ templ = &src_template;
+ counter = &dbin->opadcount;
+ prefix = "src";
+ }
+
+ pad_name = g_strdup_printf ("%s_%u", prefix, *counter);
+ *counter += 1;
+ ptmpl = gst_static_pad_template_get (templ);
+ res->src_pad = gst_ghost_pad_new_no_target_from_template (pad_name, ptmpl);
+ gst_object_unref (ptmpl);
+ g_free (pad_name);
+ gst_pad_set_active (res->src_pad, TRUE);
+ /* Put an event probe on the internal proxy pad to detect upstream
+ * events */
+ internal_pad =
+ (GstPad *) gst_proxy_pad_get_internal ((GstProxyPad *) res->src_pad);
+ gst_pad_add_probe (internal_pad, GST_PAD_PROBE_TYPE_EVENT_UPSTREAM,
+ (GstPadProbeCallback) ghost_pad_event_probe, res, NULL);
+ gst_object_unref (internal_pad);
+
+ dbin->output_streams = g_list_append (dbin->output_streams, res);
+
+ return res;
+ }
+
+ static void
+ free_output_stream (GstDecodebin3 * dbin, DecodebinOutputStream * output)
+ {
+ if (output->slot) {
+ if (output->decoder_sink && output->decoder)
+ gst_pad_unlink (output->slot->src_pad, output->decoder_sink);
+
+ output->slot->output = NULL;
+ output->slot = NULL;
+ }
+ gst_object_replace ((GstObject **) & output->decoder_sink, NULL);
+ gst_ghost_pad_set_target ((GstGhostPad *) output->src_pad, NULL);
+ gst_object_replace ((GstObject **) & output->decoder_src, NULL);
+ if (output->src_exposed) {
+ gst_element_remove_pad ((GstElement *) dbin, output->src_pad);
+ }
+ if (output->decoder) {
+ gst_element_set_locked_state (output->decoder, TRUE);
+ gst_element_set_state (output->decoder, GST_STATE_NULL);
+ gst_bin_remove ((GstBin *) dbin, output->decoder);
+ }
+ g_free (output);
+ }
+
+ static GstStateChangeReturn
+ gst_decodebin3_change_state (GstElement * element, GstStateChange transition)
+ {
+ GstDecodebin3 *dbin = (GstDecodebin3 *) element;
+ GstStateChangeReturn ret;
+
+ /* Upwards */
+ switch (transition) {
+ default:
+ break;
+ }
+ ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
+ if (ret == GST_STATE_CHANGE_FAILURE)
+ goto beach;
+
+ switch (transition) {
+ case GST_STATE_CHANGE_PAUSED_TO_READY:
+ {
+ GList *tmp;
+
+ /* Free output streams */
+ for (tmp = dbin->output_streams; tmp; tmp = tmp->next) {
+ DecodebinOutputStream *output = (DecodebinOutputStream *) tmp->data;
+ free_output_stream (dbin, output);
+ }
+ g_list_free (dbin->output_streams);
+ dbin->output_streams = NULL;
+ /* Free multiqueue slots */
+ for (tmp = dbin->slots; tmp; tmp = tmp->next) {
+ MultiQueueSlot *slot = (MultiQueueSlot *) tmp->data;
+ free_multiqueue_slot (dbin, slot);
+ }
+ g_list_free (dbin->slots);
+ dbin->slots = NULL;
+ dbin->current_group_id = GST_GROUP_ID_INVALID;
+ /* Free inputs */
+ /* Reset the main input group id since it will get a new id on a new stream */
+ dbin->main_input->group_id = GST_GROUP_ID_INVALID;
+ /* Reset multiqueue to default interleave */
+ g_object_set (dbin->multiqueue, "min-interleave-time",
+ dbin->default_mq_min_interleave, NULL);
+ dbin->current_mq_min_interleave = dbin->default_mq_min_interleave;
+ }
+ break;
+ default:
+ break;
+ }
+ beach:
+ return ret;
+ }
--- /dev/null
- g_object_set (playbin->curr_group->uridecodebin, "download",
- (g_value_get_flags (value) & GST_PLAY_FLAG_DOWNLOAD) != 0, NULL);
+ /* GStreamer
+ * Copyright (C) <2007> Wim Taymans <wim.taymans@gmail.com>
+ * Copyright (C) <2011> Sebastian Dröge <sebastian.droege@collabora.co.uk>
+ * Copyright (C) <2013> Collabora Ltd.
+ * Author: Sebastian Dröge <sebastian.droege@collabora.co.uk>
+ * Copyright (C) <2015> Jan Schmidt <jan@centricular.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+ /**
+ * SECTION:element-playbin3
+ * @title: playbin3
+ *
+ * playbin3 provides a stand-alone everything-in-one abstraction for an
+ * audio and/or video player. It differs from the previous playbin (playbin2)
+ * by supporting publication and selection of available streams via the
+ * #GstStreamCollection message and #GST_EVENT_SELECT_STREAMS event API.
+ *
+ * > playbin3 is still experimental API and a technology preview.
+ * > Its behaviour and exposed API is subject to change.
+ *
+ * playbin3 can handle both audio and video files and features
+ *
+ * * automatic file type recognition and based on that automatic
+ * selection and usage of the right audio/video/subtitle demuxers/decoders
+ *
+ * * auxiliary files - such as external subtitles and audio tracks
+ * * visualisations for audio files
+ * * subtitle support for video files. Subtitles can be store in external
+ * files.
+ * * stream selection between different video/audio/subtitles streams
+ * * meta info (tag) extraction
+ * * easy access to the last video sample
+ * * buffering when playing streams over a network
+ * * volume control with mute option
+ *
+ * ## Usage
+ *
+ * A playbin element can be created just like any other element using
+ * gst_element_factory_make(). The file/URI to play should be set via the #GstPlayBin3:uri
+ * property. This must be an absolute URI, relative file paths are not allowed.
+ * Example URIs are file:///home/joe/movie.avi or http://www.joedoe.com/foo.ogg
+ *
+ * Playbin3 is a #GstPipeline. It will notify the application of everything
+ * that's happening (errors, end of stream, tags found, state changes, etc.)
+ * by posting messages on its #GstBus. The application needs to watch the
+ * bus.
+ *
+ * Playback can be initiated by setting the element to PLAYING state using
+ * gst_element_set_state(). Note that the state change will take place in
+ * the background in a separate thread, when the function returns playback
+ * is probably not happening yet and any errors might not have occurred yet.
+ * Applications using playbin3 should ideally be written to deal with things
+ * completely asynchroneous.
+ *
+ * When playback has finished (an EOS message has been received on the bus)
+ * or an error has occurred (an ERROR message has been received on the bus) or
+ * the user wants to play a different track, playbin3 should be set back to
+ * READY or NULL state, then the #GstPlayBin3:uri property should be set to the
+ * new location and then playbin3 be set to PLAYING state again.
+ *
+ * Seeking can be done using gst_element_seek_simple() or gst_element_seek()
+ * on the playbin3 element. Again, the seek will not be executed
+ * instantaneously, but will be done in a background thread. When the seek
+ * call returns the seek will most likely still be in process. An application
+ * may wait for the seek to finish (or fail) using gst_element_get_state() with
+ * -1 as the timeout, but this will block the user interface and is not
+ * recommended at all.
+ *
+ * Applications may query the current position and duration of the stream
+ * via gst_element_query_position() and gst_element_query_duration() and
+ * setting the format passed to GST_FORMAT_TIME. If the query was successful,
+ * the duration or position will have been returned in units of nanoseconds.
+ *
+ * ## Selecting streams
+ *
+ * The recommended way to select streams (instead of the default selection) is
+ * to listen to GST_MESSAGE_STREAM_COLLECTION messages on the GstBus and send a
+ * GST_EVENT_SELECT_STREAMS on the pipeline with the selected streams. This
+ * provides more information and flexibility compared to the legacy #GstPlayBin
+ * property and signal-based mechanism.
+ *
+ * Note: The application should not assume that collections will not change
+ * throughout a single file. If it wishes to modify the default selection, it
+ * should always respond to new collections posted on the bus with a
+ * GST_EVENT_SELECT_STREAMS.
+ *
+ * ## Advanced Usage: specifying the audio and video sink
+ *
+ * By default, if no audio sink or video sink has been specified via the
+ * #GstPlayBin3:audio-sink or #GstPlayBin3:video-sink property, playbin3 will use the autoaudiosink
+ * and autovideosink elements to find the first-best available output method.
+ * This should work in most cases, but is not always desirable. Often either
+ * the user or application might want to specify more explicitly what to use
+ * for audio and video output.
+ *
+ * If the application wants more control over how audio or video should be
+ * output, it may create the audio/video sink elements itself (for example
+ * using gst_element_factory_make()) and provide them to playbin3 using the
+ * #GstPlayBin3:audio-sink or #GstPlayBin3:video-sink property.
+ *
+ * GNOME-based applications, for example, will usually want to create
+ * gconfaudiosink and gconfvideosink elements and make playbin3 use those,
+ * so that output happens to whatever the user has configured in the GNOME
+ * Multimedia System Selector configuration dialog.
+ *
+ * The sink elements do not necessarily need to be ready-made sinks. It is
+ * possible to create container elements that look like a sink to playbin3,
+ * but in reality contain a number of custom elements linked together. This
+ * can be achieved by creating a #GstBin and putting elements in there and
+ * linking them, and then creating a sink #GstGhostPad for the bin and pointing
+ * it to the sink pad of the first element within the bin. This can be used
+ * for a number of purposes, for example to force output to a particular
+ * format or to modify or observe the data before it is output.
+ *
+ * It is also possible to 'suppress' audio and/or video output by using
+ * 'fakesink' elements (or capture it from there using the fakesink element's
+ * "handoff" signal, which, nota bene, is fired from the streaming thread!).
+ *
+ * ## Retrieving Tags and Other Meta Data
+ *
+ * Most of the common meta data (artist, title, etc.) can be retrieved by
+ * watching for TAG messages on the pipeline's bus (see above).
+ *
+ * Other more specific meta information like width/height/framerate of video
+ * streams or samplerate/number of channels of audio streams can be obtained
+ * from the negotiated caps on the sink pads of the sinks.
+ *
+ * ## Buffering
+ * Playbin3 handles buffering automatically for the most part, but applications
+ * need to handle parts of the buffering process as well. Whenever playbin3 is
+ * buffering, it will post BUFFERING messages on the bus with a percentage
+ * value that shows the progress of the buffering process. Applications need
+ * to set playbin3 to PLAYING or PAUSED state in response to these messages.
+ * They may also want to convey the buffering progress to the user in some
+ * way. Here is how to extract the percentage information from the message:
+ * |[
+ * switch (GST_MESSAGE_TYPE (msg)) {
+ * case GST_MESSAGE_BUFFERING: {
+ * gint percent = 0;
+ * gst_message_parse_buffering (msg, &percent);
+ * g_print ("Buffering (%u percent done)", percent);
+ * break;
+ * }
+ * ...
+ * }
+ * ]|
+ *
+ * Note that applications should keep/set the pipeline in the PAUSED state when
+ * a BUFFERING message is received with a buffer percent value < 100 and set
+ * the pipeline back to PLAYING state when a BUFFERING message with a value
+ * of 100 percent is received (if PLAYING is the desired state, that is).
+ *
+ * ## Embedding the video window in your application
+ * By default, playbin3 (or rather the video sinks used) will create their own
+ * window. Applications will usually want to force output to a window of their
+ * own, however. This can be done using the #GstVideoOverlay interface, which most
+ * video sinks implement. See the documentation there for more details.
+ *
+ * ## Specifying which CD/DVD device to use
+ *
+ * The device to use for CDs/DVDs needs to be set on the source element playbin3
+ * creates before it is opened. The most generic way of doing this is to connect
+ * to playbin3's "source-setup" signal, which will be emitted by playbin3 when
+ * it has created the source element for a particular URI. In the signal
+ * callback you can check if the source element has a "device" property and set
+ * it appropriately. In some cases the device can also be set as part of the
+ * URI, but it depends on the elements involved if this will work or not. For
+ * example, for DVD menu playback, the following syntax might work (if the
+ * resindvd plugin is used): dvd://[/path/to/device]
+ *
+ * ## Handling redirects
+ *
+ * Some elements may post 'redirect' messages on the bus to tell the
+ * application to open another location. These are element messages containing
+ * a structure named 'redirect' along with a 'new-location' field of string
+ * type. The new location may be a relative or an absolute URI. Examples
+ * for such redirects can be found in many quicktime movie trailers.
+ *
+ * ## Examples
+ * |[
+ * gst-launch-1.0 -v playbin3 uri=file:///path/to/somefile.mp4
+ * ]|
+ * This will play back the given AVI video file, given that the video and
+ * audio decoders required to decode the content are installed. Since no
+ * special audio sink or video sink is supplied (via playbin3's audio-sink or
+ * video-sink properties) playbin3 will try to find a suitable audio and
+ * video sink automatically using the autoaudiosink and autovideosink elements.
+ * |[
+ * gst-launch-1.0 -v playbin3 uri=cdda://4
+ * ]|
+ * This will play back track 4 on an audio CD in your disc drive (assuming
+ * the drive is detected automatically by the plugin).
+ * |[
+ * gst-launch-1.0 -v playbin3 uri=dvd://
+ * ]|
+ * This will play back the DVD in your disc drive (assuming
+ * the drive is detected automatically by the plugin).
+ *
+ */
+
+ #ifdef HAVE_CONFIG_H
+ #include "config.h"
+ #endif
+
+ #include <string.h>
+ #include <gst/gst.h>
+
+ #include <gst/gst-i18n-plugin.h>
+ #include <gst/pbutils/pbutils.h>
+ #include <gst/audio/streamvolume.h>
+ #include <gst/video/video-info.h>
+ #include <gst/video/video-multiview.h>
+ #include <gst/video/videooverlay.h>
+ #include <gst/video/navigation.h>
+ #include <gst/video/colorbalance.h>
+ #include "gstplay-enum.h"
+ #include "gstplaybackelements.h"
+ #include "gstplaysink.h"
+ #include "gstsubtitleoverlay.h"
+ #include "gstplaybackutils.h"
+
+ GST_DEBUG_CATEGORY_STATIC (gst_play_bin3_debug);
+ #define GST_CAT_DEFAULT gst_play_bin3_debug
+
+ #define GST_TYPE_PLAY_BIN (gst_play_bin3_get_type())
+ #define GST_PLAY_BIN3(obj) (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_PLAY_BIN,GstPlayBin3))
+ #define GST_PLAY_BIN3_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_PLAY_BIN,GstPlayBin3Class))
+ #define GST_IS_PLAY_BIN(obj) (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_PLAY_BIN))
+ #define GST_IS_PLAY_BIN_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_PLAY_BIN))
+
+ #define ULONG_TO_POINTER(number) ((gpointer) (guintptr) (number))
+ #define POINTER_TO_ULONG(number) ((guintptr) (number))
+
+ #define VOLUME_MAX_DOUBLE 10.0
+
+ typedef struct _GstPlayBin3 GstPlayBin3;
+ typedef struct _GstPlayBin3Class GstPlayBin3Class;
+ typedef struct _GstSourceGroup GstSourceGroup;
+ typedef struct _GstSourceCombine GstSourceCombine;
+ typedef struct _SourcePad SourcePad;
+
+ typedef GstCaps *(*SourceCombineGetMediaCapsFunc) (void);
+
+ /* GstSourceCombine controls all the information regarding a certain
+ * media type.
+ *
+ * It can control a custom combiner element (by default none)
+ */
+ struct _GstSourceCombine
+ {
+ const gchar *media_type; /* the media type for the combiner */
+ SourceCombineGetMediaCapsFunc get_media_caps; /* more complex caps for the combiner */
+ GstPlaySinkType type; /* the sink pad type of the combiner */
+ GstStreamType stream_type; /* The GstStreamType of the combiner */
+
+ GstElement *combiner; /* the combiner */
+ GPtrArray *channels; /* Array of GstPad ? */
+
+ GstPad *srcpad; /* the source pad of the combiner */
+ GstPad *sinkpad; /* the sinkpad of the sink when the combiner
+ * is linked */
+
+ GPtrArray *streams; /* Sorted array of GstStream for the given type */
+
+ gboolean has_active_pad; /* stream combiner has the "active-pad" property */
+
+ gboolean is_concat; /* The stream combiner is the 'concat' element */
+ };
+
+ #define GST_SOURCE_GROUP_GET_LOCK(group) (&((GstSourceGroup*)(group))->lock)
+ #define GST_SOURCE_GROUP_LOCK(group) (g_mutex_lock (GST_SOURCE_GROUP_GET_LOCK(group)))
+ #define GST_SOURCE_GROUP_UNLOCK(group) (g_mutex_unlock (GST_SOURCE_GROUP_GET_LOCK(group)))
+
+ enum
+ {
+ PLAYBIN_STREAM_AUDIO = 0,
+ PLAYBIN_STREAM_VIDEO,
+ PLAYBIN_STREAM_TEXT,
+ PLAYBIN_STREAM_LAST
+ };
+
+ /* names matching the enum above */
+ static const gchar *stream_type_names[] = {
+ "audio", "video", "text"
+ };
+
+
+ #define STREAM_TYPES_FORMAT "s%s%s"
+ #define STREAM_TYPES_ARGS(s) (s) & GST_STREAM_TYPE_AUDIO ? "audio " : "", \
+ (s) & GST_STREAM_TYPE_VIDEO ? "video " : "", \
+ (s) & GST_STREAM_TYPE_TEXT ? "text " : ""
+
+
+
+ #if 0 /* AUTOPLUG DISABLED */
+ static void avelements_free (gpointer data);
+ static GSequence *avelements_create (GstPlayBin3 * playbin,
+ gboolean isaudioelement);
+ #endif
+
+ /* The GstAudioVideoElement structure holding the audio/video decoder
+ * and the audio/video sink factories together with field indicating
+ * the number of common caps features */
+ typedef struct
+ {
+ GstElementFactory *dec; /* audio:video decoder */
+ GstElementFactory *sink; /* audio:video sink */
+ guint n_comm_cf; /* number of common caps features */
+ } GstAVElement;
+
+ /* a structure to hold information about a uridecodebin pad */
+ struct _SourcePad
+ {
+ GstPad *pad; /* The controlled pad */
+ GstStreamType stream_type; /* stream type of the controlled pad */
+ gulong event_probe_id;
+ };
+
+ /* a structure to hold the objects for decoding a uri and the subtitle uri
+ */
+ struct _GstSourceGroup
+ {
+ GstPlayBin3 *playbin;
+
+ GMutex lock;
+
+ gboolean valid; /* the group has valid info to start playback */
+ gboolean active; /* the group is active */
+
+ gboolean playing; /* the group is currently playing
+ * (outputted on the sinks) */
+
+ /* properties */
+ gchar *uri;
+ gchar *suburi;
+
+ /* The currently outputted group_id */
+ guint group_id;
+
+ /* Bit-wise set of stream types we have requested from uridecodebin3 */
+ GstStreamType selected_stream_types;
+
+ /* Bit-wise set of stream types for which pads are present */
+ GstStreamType present_stream_types;
+
+ /* TRUE if a 'about-to-finish' needs to be posted once we have
+ * got source pads for all requested stream types
+ *
+ * FIXME : Move this logic to uridecodebin3 later */
+ gboolean pending_about_to_finish;
+
+ /* uridecodebin to handle uri and suburi */
+ GstElement *uridecodebin;
+
+ /* Active sinks for each media type. These are initialized with
+ * the configured or currently used sink, otherwise
+ * left as NULL and playbin tries to automatically
+ * select a good sink */
+ GstElement *audio_sink;
+ GstElement *video_sink;
+ GstElement *text_sink;
+
+ /* List of source pads */
+ GList *source_pads;
+
+ /* uridecodebin signals */
+ gulong pad_added_id;
+ gulong pad_removed_id;
+ gulong select_stream_id;
+ gulong source_setup_id;
+ gulong about_to_finish_id;
+
+ #if 0 /* AUTOPLUG DISABLED */
+ gulong autoplug_factories_id;
+ gulong autoplug_select_id;
+ gulong autoplug_continue_id;
+ gulong autoplug_query_id;
+ #endif
+
+ gboolean stream_changed_pending;
+
+ /* Active stream collection */
+ GstStreamCollection *collection;
+
+
+ /* buffering message stored for after switching */
+ GstMessage *pending_buffering_msg;
+ };
+
+ #define GST_PLAY_BIN3_GET_LOCK(bin) (&((GstPlayBin3*)(bin))->lock)
+ #define GST_PLAY_BIN3_LOCK(bin) (g_rec_mutex_lock (GST_PLAY_BIN3_GET_LOCK(bin)))
+ #define GST_PLAY_BIN3_UNLOCK(bin) (g_rec_mutex_unlock (GST_PLAY_BIN3_GET_LOCK(bin)))
+
+ /* lock to protect dynamic callbacks, like no-more-pads */
+ #define GST_PLAY_BIN3_DYN_LOCK(bin) g_mutex_lock (&(bin)->dyn_lock)
+ #define GST_PLAY_BIN3_DYN_UNLOCK(bin) g_mutex_unlock (&(bin)->dyn_lock)
+
+ /* lock for shutdown */
+ #define GST_PLAY_BIN3_SHUTDOWN_LOCK(bin,label) \
+ G_STMT_START { \
+ if (G_UNLIKELY (g_atomic_int_get (&bin->shutdown))) \
+ goto label; \
+ GST_PLAY_BIN3_DYN_LOCK (bin); \
+ if (G_UNLIKELY (g_atomic_int_get (&bin->shutdown))) { \
+ GST_PLAY_BIN3_DYN_UNLOCK (bin); \
+ goto label; \
+ } \
+ } G_STMT_END
+
+ /* unlock for shutdown */
+ #define GST_PLAY_BIN3_SHUTDOWN_UNLOCK(bin) \
+ GST_PLAY_BIN3_DYN_UNLOCK (bin); \
+
+ /**
+ * GstPlayBin3:
+ *
+ * playbin element structure
+ */
+ struct _GstPlayBin3
+ {
+ GstPipeline parent;
+
+ GRecMutex lock; /* to protect group switching */
+
+ /* the input groups, we use a double buffer to switch between current and next */
+ GstSourceGroup groups[2]; /* array with group info */
+ GstSourceGroup *curr_group; /* pointer to the currently playing group */
+ GstSourceGroup *next_group; /* pointer to the next group */
+
+ /* Array of GstPad controlled by each combiner */
+ GPtrArray *channels[PLAYBIN_STREAM_LAST]; /* links to combiner pads */
+
+ /* combiners for different streams */
+ GstSourceCombine combiner[PLAYBIN_STREAM_LAST];
+
+ /* Bit-wise set of stream types we have requested from uridecodebin3.
+ * Calculated as the combination of the 'selected_stream_types' of
+ * each sourcegroup */
+ GstStreamType selected_stream_types;
+
+ /* Bit-wise set of configured output stream types (i.e. active
+ playsink inputs and combiners) */
+ GstStreamType active_stream_types;
+
+ /* properties */
+ guint64 connection_speed; /* connection speed in bits/sec (0 = unknown) */
+ gint current_video; /* the currently selected stream */
+ gint current_audio; /* the currently selected stream */
+ gint current_text; /* the currently selected stream */
+
+ gboolean do_stream_selections; /* Set to TRUE when any of current-{video|audio|text} are set to
+ say playbin should do backwards-compatibility behaviours */
+
+ guint64 buffer_duration; /* When buffering, the max buffer duration (ns) */
+ guint buffer_size; /* When buffering, the max buffer size (bytes) */
+ gboolean force_aspect_ratio;
+
+ /* Multiview/stereoscopic overrides */
+ GstVideoMultiviewFramePacking multiview_mode;
+ GstVideoMultiviewFlags multiview_flags;
+
+ /* our play sink */
+ GstPlaySink *playsink;
+
+ /* Task for (de)activating groups, protected by the activation lock */
+ GstTask *activation_task;
+ GRecMutex activation_lock;
+
+ /* lock protecting dynamic adding/removing */
+ GMutex dyn_lock;
+ /* if we are shutting down or not */
+ gint shutdown;
+ gboolean async_pending; /* async-start has been emitted */
+
+ GMutex elements_lock;
+ guint32 elements_cookie;
+ GList *elements; /* factories we can use for selecting elements */
+
+ gboolean have_selector; /* set to FALSE when we fail to create an
+ * input-selector, so that we only post a
+ * warning once */
+
+ gboolean video_pending_flush_finish; /* whether we are pending to send a custom
+ * custom-video-flush-finish event
+ * on pad activation */
+ gboolean audio_pending_flush_finish; /* whether we are pending to send a custom
+ * custom-audio-flush-finish event
+ * on pad activation */
+ gboolean text_pending_flush_finish; /* whether we are pending to send a custom
+ * custom-subtitle-flush-finish event
+ * on pad activation */
+
+ GstElement *audio_sink; /* configured audio sink, or NULL */
+ GstElement *video_sink; /* configured video sink, or NULL */
+ GstElement *text_sink; /* configured text sink, or NULL */
+
+ GstElement *audio_stream_combiner; /* configured audio stream combiner, or NULL */
+ GstElement *video_stream_combiner; /* configured video stream combiner, or NULL */
+ GstElement *text_stream_combiner; /* configured text stream combiner, or NULL */
+
+ GSequence *aelements; /* a list of GstAVElements for audio stream */
+ GSequence *velements; /* a list of GstAVElements for video stream */
+
+ guint64 ring_buffer_max_size; /* 0 means disabled */
+
+ gboolean is_live; /* Whether our current group is live */
+ };
+
+ struct _GstPlayBin3Class
+ {
+ GstPipelineClass parent_class;
+
+ /* notify app that the current uri finished decoding and it is possible to
+ * queue a new one for gapless playback */
+ void (*about_to_finish) (GstPlayBin3 * playbin);
+
+ /* get the last video sample and convert it to the given caps */
+ GstSample *(*convert_sample) (GstPlayBin3 * playbin, GstCaps * caps);
+ };
+
+ /* props */
+ #define DEFAULT_URI NULL
+ #define DEFAULT_SUBURI NULL
+ #define DEFAULT_FLAGS GST_PLAY_FLAG_AUDIO | GST_PLAY_FLAG_VIDEO | GST_PLAY_FLAG_TEXT | \
+ GST_PLAY_FLAG_SOFT_VOLUME | GST_PLAY_FLAG_DEINTERLACE | \
+ GST_PLAY_FLAG_SOFT_COLORBALANCE | GST_PLAY_FLAG_BUFFERING
+ #define DEFAULT_CURRENT_VIDEO -1
+ #define DEFAULT_CURRENT_AUDIO -1
+ #define DEFAULT_CURRENT_TEXT -1
+ #define DEFAULT_SUBTITLE_ENCODING NULL
+ #define DEFAULT_AUDIO_SINK NULL
+ #define DEFAULT_VIDEO_SINK NULL
+ #define DEFAULT_VIS_PLUGIN NULL
+ #define DEFAULT_TEXT_SINK NULL
+ #define DEFAULT_VOLUME 1.0
+ #define DEFAULT_MUTE FALSE
+ #define DEFAULT_FRAME NULL
+ #define DEFAULT_FONT_DESC NULL
+ #define DEFAULT_CONNECTION_SPEED 0
+ #define DEFAULT_BUFFER_DURATION -1
+ #define DEFAULT_BUFFER_SIZE -1
+ #define DEFAULT_RING_BUFFER_MAX_SIZE 0
+
+ enum
+ {
+ PROP_0,
+ PROP_URI,
+ PROP_CURRENT_URI,
+ PROP_SUBURI,
+ PROP_CURRENT_SUBURI,
+ PROP_FLAGS,
+ PROP_SUBTITLE_ENCODING,
+ PROP_AUDIO_SINK,
+ PROP_VIDEO_SINK,
+ PROP_VIS_PLUGIN,
+ PROP_TEXT_SINK,
+ PROP_VIDEO_STREAM_COMBINER,
+ PROP_AUDIO_STREAM_COMBINER,
+ PROP_TEXT_STREAM_COMBINER,
+ PROP_VOLUME,
+ PROP_MUTE,
+ PROP_SAMPLE,
+ PROP_FONT_DESC,
+ PROP_CONNECTION_SPEED,
+ PROP_BUFFER_SIZE,
+ PROP_BUFFER_DURATION,
+ PROP_AV_OFFSET,
+ PROP_TEXT_OFFSET,
+ PROP_RING_BUFFER_MAX_SIZE,
+ PROP_FORCE_ASPECT_RATIO,
+ PROP_AUDIO_FILTER,
+ PROP_VIDEO_FILTER,
+ PROP_MULTIVIEW_MODE,
+ PROP_MULTIVIEW_FLAGS
+ };
+
+ /* signals */
+ enum
+ {
+ SIGNAL_ABOUT_TO_FINISH,
+ SIGNAL_CONVERT_SAMPLE,
+ SIGNAL_SOURCE_SETUP,
+ SIGNAL_ELEMENT_SETUP,
+ LAST_SIGNAL
+ };
+
+ #if 0 /* AUTOPLUG DISABLED */
+ static GstStaticCaps raw_audio_caps = GST_STATIC_CAPS ("audio/x-raw(ANY)");
+ static GstStaticCaps raw_video_caps = GST_STATIC_CAPS ("video/x-raw(ANY)");
+ #endif
+
+ static void gst_play_bin3_finalize (GObject * object);
+
+ static void gst_play_bin3_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * spec);
+ static void gst_play_bin3_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * spec);
+
+ static GstStateChangeReturn gst_play_bin3_change_state (GstElement * element,
+ GstStateChange transition);
+
+ static void gst_play_bin3_handle_message (GstBin * bin, GstMessage * message);
+ static void gst_play_bin3_deep_element_added (GstBin * playbin,
+ GstBin * sub_bin, GstElement * child);
+ static gboolean gst_play_bin3_send_event (GstElement * element,
+ GstEvent * event);
+
+ static GstSample *gst_play_bin3_convert_sample (GstPlayBin3 * playbin,
+ GstCaps * caps);
+
+ static GstStateChangeReturn setup_next_source (GstPlayBin3 * playbin);
+
+ static void gst_play_bin3_check_group_status (GstPlayBin3 * playbin);
+ static void emit_about_to_finish (GstPlayBin3 * playbin);
+ static void reconfigure_output (GstPlayBin3 * playbin);
+ static void pad_removed_cb (GstElement * decodebin, GstPad * pad,
+ GstSourceGroup * group);
+
+ static gint select_stream_cb (GstElement * decodebin,
+ GstStreamCollection * collection, GstStream * stream,
+ GstSourceGroup * group);
+
+ static void do_stream_selection (GstPlayBin3 * playbin, GstSourceGroup * group);
+
+ static GstElementClass *parent_class;
+
+ static guint gst_play_bin3_signals[LAST_SIGNAL] = { 0 };
+
+ #define REMOVE_SIGNAL(obj,id) \
+ if (id) { \
+ g_signal_handler_disconnect (obj, id); \
+ id = 0; \
+ }
+
+ static void gst_play_bin3_overlay_init (gpointer g_iface,
+ gpointer g_iface_data);
+ static void gst_play_bin3_navigation_init (gpointer g_iface,
+ gpointer g_iface_data);
+ static void gst_play_bin3_colorbalance_init (gpointer g_iface,
+ gpointer g_iface_data);
+
+ static void
+ _do_init_type (GType type)
+ {
+ static const GInterfaceInfo svol_info = {
+ NULL, NULL, NULL
+ };
+ static const GInterfaceInfo ov_info = {
+ gst_play_bin3_overlay_init,
+ NULL, NULL
+ };
+ static const GInterfaceInfo nav_info = {
+ gst_play_bin3_navigation_init,
+ NULL, NULL
+ };
+ static const GInterfaceInfo col_info = {
+ gst_play_bin3_colorbalance_init,
+ NULL, NULL
+ };
+
+ g_type_add_interface_static (type, GST_TYPE_STREAM_VOLUME, &svol_info);
+ g_type_add_interface_static (type, GST_TYPE_VIDEO_OVERLAY, &ov_info);
+ g_type_add_interface_static (type, GST_TYPE_NAVIGATION, &nav_info);
+ g_type_add_interface_static (type, GST_TYPE_COLOR_BALANCE, &col_info);
+ }
+
+ static GType gst_play_bin3_get_type (void);
+ G_DEFINE_TYPE_WITH_CODE (GstPlayBin3, gst_play_bin3, GST_TYPE_PIPELINE,
+ _do_init_type (g_define_type_id));
+
+ GST_ELEMENT_REGISTER_DEFINE_CUSTOM (playbin3,
+ gst_play_bin3_custom_element_init);
+
+ static void
+ gst_play_bin3_class_init (GstPlayBin3Class * klass)
+ {
+ GObjectClass *gobject_klass;
+ GstElementClass *gstelement_klass;
+ GstBinClass *gstbin_klass;
+
+ gobject_klass = (GObjectClass *) klass;
+ gstelement_klass = (GstElementClass *) klass;
+ gstbin_klass = (GstBinClass *) klass;
+
+ parent_class = g_type_class_peek_parent (klass);
+
+ gobject_klass->set_property = gst_play_bin3_set_property;
+ gobject_klass->get_property = gst_play_bin3_get_property;
+
+ gobject_klass->finalize = gst_play_bin3_finalize;
+
+ /**
+ * GstPlayBin3:uri
+ *
+ * Set the next URI that playbin will play. This property can be set from the
+ * about-to-finish signal to queue the next media file.
+ */
+ g_object_class_install_property (gobject_klass, PROP_URI,
+ g_param_spec_string ("uri", "URI", "URI of the media to play",
+ NULL, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ /**
+ * GstPlayBin3:current-uri
+ *
+ * The currently playing uri.
+ */
+ g_object_class_install_property (gobject_klass, PROP_CURRENT_URI,
+ g_param_spec_string ("current-uri", "Current URI",
+ "The currently playing URI", NULL,
+ G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
+
+ /**
+ * GstPlayBin3:suburi
+ *
+ * Set the next subtitle URI that playbin will play. This property can be
+ * set from the about-to-finish signal to queue the next subtitle media file.
+ */
+ g_object_class_install_property (gobject_klass, PROP_SUBURI,
+ g_param_spec_string ("suburi", ".sub-URI", "Optional URI of a subtitle",
+ NULL, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ /**
+ * GstPlayBin3:current-suburi
+ *
+ * The currently playing subtitle uri.
+ */
+ g_object_class_install_property (gobject_klass, PROP_CURRENT_SUBURI,
+ g_param_spec_string ("current-suburi", "Current .sub-URI",
+ "The currently playing URI of a subtitle",
+ NULL, G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
+
+ /**
+ * GstPlayBin3:flags
+ *
+ * Control the behaviour of playbin.
+ */
+ g_object_class_install_property (gobject_klass, PROP_FLAGS,
+ g_param_spec_flags ("flags", "Flags", "Flags to control behaviour",
+ GST_TYPE_PLAY_FLAGS, DEFAULT_FLAGS,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_klass, PROP_SUBTITLE_ENCODING,
+ g_param_spec_string ("subtitle-encoding", "subtitle encoding",
+ "Encoding to assume if input subtitles are not in UTF-8 encoding. "
+ "If not set, the GST_SUBTITLE_ENCODING environment variable will "
+ "be checked for an encoding to use. If that is not set either, "
+ "ISO-8859-15 will be assumed.", NULL,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_klass, PROP_VIDEO_FILTER,
+ g_param_spec_object ("video-filter", "Video filter",
+ "the video filter(s) to apply, if possible",
+ GST_TYPE_ELEMENT, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_klass, PROP_AUDIO_FILTER,
+ g_param_spec_object ("audio-filter", "Audio filter",
+ "the audio filter(s) to apply, if possible",
+ GST_TYPE_ELEMENT, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ /**
+ * GstPlayBin3:video-sink
+ *
+ * Get or set the video sink to use for video output. If set to
+ * NULL, one will be auto-selected. To disable video entirely, unset
+ * the VIDEO flag in the #GstPlayBin3:flags property.
+ *
+ */
+ g_object_class_install_property (gobject_klass, PROP_VIDEO_SINK,
+ g_param_spec_object ("video-sink", "Video Sink",
+ "the video output element to use (NULL = default sink)",
+ GST_TYPE_ELEMENT, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ /**
+ * GstPlayBin3:audio-sink
+ *
+ * Get or set the audio sink to use for audio output. If set to
+ * NULL, one will be auto-selected. To disable audio entirely, unset
+ * the AUDIO flag in the #GstPlayBin3:flags property.
+ *
+ */
+ g_object_class_install_property (gobject_klass, PROP_AUDIO_SINK,
+ g_param_spec_object ("audio-sink", "Audio Sink",
+ "the audio output element to use (NULL = default sink)",
+ GST_TYPE_ELEMENT, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_klass, PROP_VIS_PLUGIN,
+ g_param_spec_object ("vis-plugin", "Vis plugin",
+ "the visualization element to use (NULL = default)",
+ GST_TYPE_ELEMENT, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_klass, PROP_TEXT_SINK,
+ g_param_spec_object ("text-sink", "Text plugin",
+ "the text output element to use (NULL = default subtitleoverlay)",
+ GST_TYPE_ELEMENT, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ /**
+ * GstPlayBin3:video-stream-combiner
+ *
+ * Get or set the current video stream combiner. By default, no
+ * element is used and the selected stream is used directly.
+ */
+ g_object_class_install_property (gobject_klass, PROP_VIDEO_STREAM_COMBINER,
+ g_param_spec_object ("video-stream-combiner", "Video stream combiner",
+ "Current video stream combiner (default: none)",
+ GST_TYPE_ELEMENT, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ /**
+ * GstPlayBin3:audio-stream-combiner
+ *
+ * Get or set the current audio stream combiner. By default, no
+ * element is used and the selected stream is used directly.
+ */
+ g_object_class_install_property (gobject_klass, PROP_AUDIO_STREAM_COMBINER,
+ g_param_spec_object ("audio-stream-combiner", "Audio stream combiner",
+ "Current audio stream combiner (default: none))",
+ GST_TYPE_ELEMENT, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ /**
+ * GstPlayBin3:text-stream-combiner
+ *
+ * Get or set the current text stream combiner. By default, no
+ * element is used and the selected stream is used directly.
+ */
+ g_object_class_install_property (gobject_klass, PROP_TEXT_STREAM_COMBINER,
+ g_param_spec_object ("text-stream-combiner", "Text stream combiner",
+ "Current text stream combiner (default: none)",
+ GST_TYPE_ELEMENT, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ /**
+ * GstPlayBin3:volume:
+ *
+ * Get or set the current audio stream volume. 1.0 means 100%,
+ * 0.0 means mute. This uses a linear volume scale.
+ *
+ */
+ g_object_class_install_property (gobject_klass, PROP_VOLUME,
+ g_param_spec_double ("volume", "Volume", "The audio volume, 1.0=100%",
+ 0.0, VOLUME_MAX_DOUBLE, 1.0,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_klass, PROP_MUTE,
+ g_param_spec_boolean ("mute", "Mute",
+ "Mute the audio channel without changing the volume", FALSE,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ /**
+ * GstPlayBin3:sample:
+ * @playbin: a #GstPlayBin3
+ *
+ * Get the currently rendered or prerolled sample in the video sink.
+ * The #GstCaps in the sample will describe the format of the buffer.
+ */
+ g_object_class_install_property (gobject_klass, PROP_SAMPLE,
+ g_param_spec_boxed ("sample", "Sample",
+ "The last sample (NULL = no video available)",
+ GST_TYPE_SAMPLE, G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_klass, PROP_FONT_DESC,
+ g_param_spec_string ("subtitle-font-desc",
+ "Subtitle font description",
+ "Pango font description of font "
+ "to be used for subtitle rendering", NULL,
+ G_PARAM_WRITABLE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_klass, PROP_CONNECTION_SPEED,
+ g_param_spec_uint64 ("connection-speed", "Connection Speed",
+ "Network connection speed in kbps (0 = unknown)",
+ 0, G_MAXUINT64 / 1000, DEFAULT_CONNECTION_SPEED,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_klass, PROP_BUFFER_SIZE,
+ g_param_spec_int ("buffer-size", "Buffer size (bytes)",
+ "Buffer size when buffering network streams",
+ -1, G_MAXINT, DEFAULT_BUFFER_SIZE,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_klass, PROP_BUFFER_DURATION,
+ g_param_spec_int64 ("buffer-duration", "Buffer duration (ns)",
+ "Buffer duration when buffering network streams",
+ -1, G_MAXINT64, DEFAULT_BUFFER_DURATION,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ /**
+ * GstPlayBin3:av-offset:
+ *
+ * Control the synchronisation offset between the audio and video streams.
+ * Positive values make the audio ahead of the video and negative values make
+ * the audio go behind the video.
+ */
+ g_object_class_install_property (gobject_klass, PROP_AV_OFFSET,
+ g_param_spec_int64 ("av-offset", "AV Offset",
+ "The synchronisation offset between audio and video in nanoseconds",
+ G_MININT64, G_MAXINT64, 0,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ /**
+ * GstPlayBin3:text-offset:
+ *
+ * Control the synchronisation offset between the text and video streams.
+ * Positive values make the text ahead of the video and negative values make
+ * the text go behind the video.
+ */
+ g_object_class_install_property (gobject_klass, PROP_TEXT_OFFSET,
+ g_param_spec_int64 ("text-offset", "Text Offset",
+ "The synchronisation offset between text and video in nanoseconds",
+ G_MININT64, G_MAXINT64, 0,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ /**
+ * GstPlayBin3:ring-buffer-max-size
+ *
+ * The maximum size of the ring buffer in bytes. If set to 0, the ring
+ * buffer is disabled. Default 0.
+ */
+ g_object_class_install_property (gobject_klass, PROP_RING_BUFFER_MAX_SIZE,
+ g_param_spec_uint64 ("ring-buffer-max-size",
+ "Max. ring buffer size (bytes)",
+ "Max. amount of data in the ring buffer (bytes, 0 = ring buffer disabled)",
+ 0, G_MAXUINT, DEFAULT_RING_BUFFER_MAX_SIZE,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ /**
+ * GstPlayBin3::force-aspect-ratio:
+ *
+ * Requests the video sink to enforce the video display aspect ratio.
+ */
+ g_object_class_install_property (gobject_klass, PROP_FORCE_ASPECT_RATIO,
+ g_param_spec_boolean ("force-aspect-ratio", "Force Aspect Ratio",
+ "When enabled, scaling will respect original aspect ratio", TRUE,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ /**
+ * GstPlayBin3::video-multiview-mode:
+ *
+ * Set the stereoscopic mode for video streams that don't contain
+ * any information in the stream, so they can be correctly played
+ * as 3D streams. If a video already has multiview information
+ * encoded, this property can override other modes in the set,
+ * but cannot be used to re-interpret MVC or mixed-mono streams.
+ *
+ * See Also: The #GstPlayBin3::video-multiview-flags property
+ *
+ */
+ g_object_class_install_property (gobject_klass, PROP_MULTIVIEW_MODE,
+ g_param_spec_enum ("video-multiview-mode",
+ "Multiview Mode Override",
+ "Re-interpret a video stream as one of several frame-packed stereoscopic modes.",
+ GST_TYPE_VIDEO_MULTIVIEW_FRAME_PACKING,
+ GST_VIDEO_MULTIVIEW_FRAME_PACKING_NONE,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ /**
+ * GstPlayBin3::video-multiview-flags:
+ *
+ * When overriding the multiview mode of an input stream,
+ * these flags modify details of the view layout.
+ *
+ * See Also: The #GstPlayBin3::video-multiview-mode property
+ */
+ g_object_class_install_property (gobject_klass, PROP_MULTIVIEW_FLAGS,
+ g_param_spec_flags ("video-multiview-flags",
+ "Multiview Flags Override",
+ "Override details of the multiview frame layout",
+ GST_TYPE_VIDEO_MULTIVIEW_FLAGS, GST_VIDEO_MULTIVIEW_FLAGS_NONE,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ /**
+ * GstPlayBin3::about-to-finish
+ * @playbin: a #GstPlayBin3
+ *
+ * This signal is emitted when the current uri is about to finish. You can
+ * set the uri and suburi to make sure that playback continues.
+ *
+ * This signal is emitted from the context of a GStreamer streaming thread.
+ */
+ gst_play_bin3_signals[SIGNAL_ABOUT_TO_FINISH] =
+ g_signal_new ("about-to-finish", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST,
+ G_STRUCT_OFFSET (GstPlayBin3Class, about_to_finish), NULL, NULL,
+ NULL, G_TYPE_NONE, 0, G_TYPE_NONE);
+
+
+ /**
+ * GstPlayBin3::source-setup:
+ * @playbin: a #GstPlayBin3
+ * @source: source element
+ *
+ * This signal is emitted after the source element has been created, so
+ * it can be configured by setting additional properties (e.g. set a
+ * proxy server for an http source, or set the device and read speed for
+ * an audio cd source). This is functionally equivalent to connecting to
+ * the notify::source signal, but more convenient.
+ *
+ * This signal is usually emitted from the context of a GStreamer streaming
+ * thread.
+ */
+ gst_play_bin3_signals[SIGNAL_SOURCE_SETUP] =
+ g_signal_new ("source-setup", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST, 0, NULL, NULL, NULL, G_TYPE_NONE, 1, GST_TYPE_ELEMENT);
+
+ /**
+ * GstPlayBin3::element-setup:
+ * @playbin: a #GstPlayBin3
+ * @element: an element that was added to the playbin hierarchy
+ *
+ * This signal is emitted when a new element is added to playbin or any of
+ * its sub-bins. This signal can be used to configure elements, e.g. to set
+ * properties on decoders. This is functionally equivalent to connecting to
+ * the deep-element-added signal, but more convenient.
+ *
+ * This signal is usually emitted from the context of a GStreamer streaming
+ * thread, so might be called at the same time as code running in the main
+ * application thread.
+ *
+ * Since: 1.10
+ */
+ gst_play_bin3_signals[SIGNAL_ELEMENT_SETUP] =
+ g_signal_new ("element-setup", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST, 0, NULL, NULL, NULL, G_TYPE_NONE, 1, GST_TYPE_ELEMENT);
+
+ /**
+ * GstPlayBin3::convert-sample
+ * @playbin: a #GstPlayBin3
+ * @caps: the target format of the frame
+ *
+ * Action signal to retrieve the currently playing video frame in the format
+ * specified by @caps.
+ * If @caps is %NULL, no conversion will be performed and this function is
+ * equivalent to the #GstPlayBin3:sample property.
+ *
+ * Returns: a #GstSample of the current video frame converted to #caps.
+ * The caps on the sample will describe the final layout of the buffer data.
+ * %NULL is returned when no current buffer can be retrieved or when the
+ * conversion failed.
+ */
+ gst_play_bin3_signals[SIGNAL_CONVERT_SAMPLE] =
+ g_signal_new ("convert-sample", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST | G_SIGNAL_ACTION,
+ G_STRUCT_OFFSET (GstPlayBin3Class, convert_sample), NULL, NULL,
+ NULL, GST_TYPE_SAMPLE, 1, GST_TYPE_CAPS);
+
+ klass->convert_sample = gst_play_bin3_convert_sample;
+
+ gst_element_class_set_static_metadata (gstelement_klass,
+ "Player Bin 3", "Generic/Bin/Player",
+ "Autoplug and play media from an uri",
+ "Wim Taymans <wim.taymans@gmail.com>");
+
+ gstelement_klass->change_state =
+ GST_DEBUG_FUNCPTR (gst_play_bin3_change_state);
+ gstelement_klass->send_event = GST_DEBUG_FUNCPTR (gst_play_bin3_send_event);
+
+ gstbin_klass->handle_message =
+ GST_DEBUG_FUNCPTR (gst_play_bin3_handle_message);
+ gstbin_klass->deep_element_added =
+ GST_DEBUG_FUNCPTR (gst_play_bin3_deep_element_added);
+ }
+
+ static void
+ do_async_start (GstPlayBin3 * playbin)
+ {
+ GstMessage *message;
+
+ playbin->async_pending = TRUE;
+
+ message = gst_message_new_async_start (GST_OBJECT_CAST (playbin));
+ GST_BIN_CLASS (parent_class)->handle_message (GST_BIN_CAST (playbin),
+ message);
+ }
+
+ static void
+ do_async_done (GstPlayBin3 * playbin)
+ {
+ GstMessage *message;
+
+ if (playbin->async_pending) {
+ GST_DEBUG_OBJECT (playbin, "posting ASYNC_DONE");
+ message =
+ gst_message_new_async_done (GST_OBJECT_CAST (playbin),
+ GST_CLOCK_TIME_NONE);
+ GST_BIN_CLASS (parent_class)->handle_message (GST_BIN_CAST (playbin),
+ message);
+
+ playbin->async_pending = FALSE;
+ }
+ }
+
+ /* init combiners. The combiner is found by finding the first prefix that
+ * matches the media. */
+ static void
+ init_combiners (GstPlayBin3 * playbin)
+ {
+ gint i;
+
+ /* store the array for the different channels */
+ for (i = 0; i < PLAYBIN_STREAM_LAST; i++)
+ playbin->channels[i] = g_ptr_array_new ();
+
+ playbin->combiner[PLAYBIN_STREAM_AUDIO].media_type = "audio";
+ playbin->combiner[PLAYBIN_STREAM_AUDIO].type = GST_PLAY_SINK_TYPE_AUDIO;
+ playbin->combiner[PLAYBIN_STREAM_AUDIO].stream_type = GST_STREAM_TYPE_AUDIO;
+ playbin->combiner[PLAYBIN_STREAM_AUDIO].channels = playbin->channels[0];
+ playbin->combiner[PLAYBIN_STREAM_AUDIO].streams =
+ g_ptr_array_new_with_free_func ((GDestroyNotify) gst_object_unref);
+
+ playbin->combiner[PLAYBIN_STREAM_VIDEO].media_type = "video";
+ playbin->combiner[PLAYBIN_STREAM_VIDEO].type = GST_PLAY_SINK_TYPE_VIDEO;
+ playbin->combiner[PLAYBIN_STREAM_VIDEO].stream_type = GST_STREAM_TYPE_VIDEO;
+ playbin->combiner[PLAYBIN_STREAM_VIDEO].channels = playbin->channels[1];
+ playbin->combiner[PLAYBIN_STREAM_VIDEO].streams =
+ g_ptr_array_new_with_free_func ((GDestroyNotify) gst_object_unref);
+
+ playbin->combiner[PLAYBIN_STREAM_TEXT].media_type = "text";
+ playbin->combiner[PLAYBIN_STREAM_TEXT].get_media_caps =
+ gst_subtitle_overlay_create_factory_caps;
+ playbin->combiner[PLAYBIN_STREAM_TEXT].type = GST_PLAY_SINK_TYPE_TEXT;
+ playbin->combiner[PLAYBIN_STREAM_TEXT].stream_type = GST_STREAM_TYPE_TEXT;
+ playbin->combiner[PLAYBIN_STREAM_TEXT].channels = playbin->channels[2];
+ playbin->combiner[PLAYBIN_STREAM_TEXT].streams =
+ g_ptr_array_new_with_free_func ((GDestroyNotify) gst_object_unref);
+ }
+
+ /* Update the combiner information to be in sync with the current collection
+ *
+ * FIXME : "current" collection doesn't mean anything until we have a "combined"
+ * collection of all groups */
+ static void
+ update_combiner_info (GstPlayBin3 * playbin, GstStreamCollection * collection)
+ {
+ guint i, len;
+
+ if (collection == NULL)
+ return;
+
+ GST_DEBUG_OBJECT (playbin, "Updating combiner info");
+
+ /* Wipe current combiner streams */
+ g_ptr_array_free (playbin->combiner[PLAYBIN_STREAM_AUDIO].streams, TRUE);
+ g_ptr_array_free (playbin->combiner[PLAYBIN_STREAM_VIDEO].streams, TRUE);
+ g_ptr_array_free (playbin->combiner[PLAYBIN_STREAM_TEXT].streams, TRUE);
+ playbin->combiner[PLAYBIN_STREAM_AUDIO].streams =
+ g_ptr_array_new_with_free_func ((GDestroyNotify) gst_object_unref);
+ playbin->combiner[PLAYBIN_STREAM_VIDEO].streams =
+ g_ptr_array_new_with_free_func ((GDestroyNotify) gst_object_unref);
+ playbin->combiner[PLAYBIN_STREAM_TEXT].streams =
+ g_ptr_array_new_with_free_func ((GDestroyNotify) gst_object_unref);
+
+ len = gst_stream_collection_get_size (collection);
+ for (i = 0; i < len; i++) {
+ GstStream *stream = gst_stream_collection_get_stream (collection, i);
+ GstStreamType stype = gst_stream_get_stream_type (stream);
+
+ if (stype & GST_STREAM_TYPE_AUDIO) {
+ g_ptr_array_add (playbin->combiner[PLAYBIN_STREAM_AUDIO].streams,
+ gst_object_ref (stream));
+ } else if (stype & GST_STREAM_TYPE_VIDEO) {
+ g_ptr_array_add (playbin->combiner[PLAYBIN_STREAM_VIDEO].streams,
+ gst_object_ref (stream));
+ } else if (stype & GST_STREAM_TYPE_TEXT) {
+ g_ptr_array_add (playbin->combiner[PLAYBIN_STREAM_TEXT].streams,
+ gst_object_ref (stream));
+ }
+ }
+
+ GST_DEBUG_OBJECT (playbin, "There are %d audio streams",
+ playbin->combiner[PLAYBIN_STREAM_AUDIO].streams->len);
+ GST_DEBUG_OBJECT (playbin, "There are %d video streams",
+ playbin->combiner[PLAYBIN_STREAM_VIDEO].streams->len);
+ GST_DEBUG_OBJECT (playbin, "There are %d text streams",
+ playbin->combiner[PLAYBIN_STREAM_TEXT].streams->len);
+ }
+
+ #ifndef GST_DISABLE_GST_DEBUG
+ #define debug_groups(playbin) G_STMT_START { \
+ guint i; \
+ \
+ for (i = 0; i < 2; i++) { \
+ GstSourceGroup *group = &playbin->groups[i]; \
+ \
+ GST_DEBUG ("GstSourceGroup #%d (%s)", i, (group == playbin->curr_group) ? "current" : (group == playbin->next_group) ? "next" : "unused"); \
+ GST_DEBUG (" valid:%d , active:%d , playing:%d", group->valid, group->active, group->playing); \
+ GST_DEBUG (" uri:%s", group->uri); \
+ GST_DEBUG (" suburi:%s", group->suburi); \
+ GST_DEBUG (" group_id:%d", group->group_id); \
+ GST_DEBUG (" pending_about_to_finish:%d", group->pending_about_to_finish); \
+ } \
+ } G_STMT_END
+ #else
+ #define debug_groups(p) {}
+ #endif
+
+ static void
+ init_group (GstPlayBin3 * playbin, GstSourceGroup * group)
+ {
+ g_mutex_init (&group->lock);
+
+ group->stream_changed_pending = FALSE;
+ group->group_id = GST_GROUP_ID_INVALID;
+
+ group->playbin = playbin;
+ }
+
+ static void
+ free_group (GstPlayBin3 * playbin, GstSourceGroup * group)
+ {
+ g_free (group->uri);
+ g_free (group->suburi);
+
+ g_mutex_clear (&group->lock);
+ group->stream_changed_pending = FALSE;
+
+ if (group->pending_buffering_msg)
+ gst_message_unref (group->pending_buffering_msg);
+ group->pending_buffering_msg = NULL;
+
+ gst_object_replace ((GstObject **) & group->collection, NULL);
+
+ gst_object_replace ((GstObject **) & group->audio_sink, NULL);
+ gst_object_replace ((GstObject **) & group->video_sink, NULL);
+ gst_object_replace ((GstObject **) & group->text_sink, NULL);
+ }
+
+ static void
+ notify_volume_cb (GObject * combiner, GParamSpec * pspec, GstPlayBin3 * playbin)
+ {
+ g_object_notify (G_OBJECT (playbin), "volume");
+ }
+
+ static void
+ notify_mute_cb (GObject * combiner, GParamSpec * pspec, GstPlayBin3 * playbin)
+ {
+ g_object_notify (G_OBJECT (playbin), "mute");
+ }
+
+ static void
+ colorbalance_value_changed_cb (GstColorBalance * balance,
+ GstColorBalanceChannel * channel, gint value, GstPlayBin3 * playbin)
+ {
+ gst_color_balance_value_changed (GST_COLOR_BALANCE (playbin), channel, value);
+ }
+
+ #if 0 /* AUTOPLUG DISABLED */
+ static gint
+ compare_factories_func (gconstpointer p1, gconstpointer p2)
+ {
+ GstPluginFeature *f1, *f2;
+ gboolean is_sink1, is_sink2;
+ gboolean is_parser1, is_parser2;
+
+ f1 = (GstPluginFeature *) p1;
+ f2 = (GstPluginFeature *) p2;
+
+ is_sink1 = gst_element_factory_list_is_type (GST_ELEMENT_FACTORY_CAST (f1),
+ GST_ELEMENT_FACTORY_TYPE_SINK);
+ is_sink2 = gst_element_factory_list_is_type (GST_ELEMENT_FACTORY_CAST (f2),
+ GST_ELEMENT_FACTORY_TYPE_SINK);
+ is_parser1 = gst_element_factory_list_is_type (GST_ELEMENT_FACTORY_CAST (f1),
+ GST_ELEMENT_FACTORY_TYPE_PARSER);
+ is_parser2 = gst_element_factory_list_is_type (GST_ELEMENT_FACTORY_CAST (f2),
+ GST_ELEMENT_FACTORY_TYPE_PARSER);
+
+ /* First we want all sinks as we prefer a sink if it directly
+ * supports the current caps */
+ if (is_sink1 && !is_sink2)
+ return -1;
+ else if (!is_sink1 && is_sink2)
+ return 1;
+
+ /* Then we want all parsers as we always want to plug parsers
+ * before decoders */
+ if (is_parser1 && !is_parser2)
+ return -1;
+ else if (!is_parser1 && is_parser2)
+ return 1;
+
+ /* And if it's a both a parser or sink we first sort by rank
+ * and then by factory name */
+ return gst_plugin_feature_rank_compare_func (p1, p2);
+ }
+
+ /* Must be called with elements lock! */
+ static void
+ gst_play_bin3_update_elements_list (GstPlayBin3 * playbin)
+ {
+ GList *res, *tmp;
+ guint cookie;
+
+ cookie = gst_registry_get_feature_list_cookie (gst_registry_get ());
+
+ if (!playbin->elements || playbin->elements_cookie != cookie) {
+ if (playbin->elements)
+ gst_plugin_feature_list_free (playbin->elements);
+ res =
+ gst_element_factory_list_get_elements
+ (GST_ELEMENT_FACTORY_TYPE_DECODABLE, GST_RANK_MARGINAL);
+ tmp =
+ gst_element_factory_list_get_elements
+ (GST_ELEMENT_FACTORY_TYPE_AUDIOVIDEO_SINKS, GST_RANK_MARGINAL);
+ playbin->elements = g_list_concat (res, tmp);
+ playbin->elements = g_list_sort (playbin->elements, compare_factories_func);
+ }
+
+ if (!playbin->aelements || playbin->elements_cookie != cookie) {
+ if (playbin->aelements)
+ g_sequence_free (playbin->aelements);
+ playbin->aelements = avelements_create (playbin, TRUE);
+ }
+
+ if (!playbin->velements || playbin->elements_cookie != cookie) {
+ if (playbin->velements)
+ g_sequence_free (playbin->velements);
+ playbin->velements = avelements_create (playbin, FALSE);
+ }
+
+ playbin->elements_cookie = cookie;
+ }
+ #endif
+
+ static void
+ gst_play_bin3_init (GstPlayBin3 * playbin)
+ {
+ g_rec_mutex_init (&playbin->lock);
+ g_mutex_init (&playbin->dyn_lock);
+
+ /* assume we can create an input-selector */
+ playbin->have_selector = TRUE;
+
+ init_combiners (playbin);
+
+ /* init groups */
+ playbin->curr_group = &playbin->groups[0];
+ playbin->next_group = &playbin->groups[1];
+ init_group (playbin, &playbin->groups[0]);
+ init_group (playbin, &playbin->groups[1]);
+
+ /* first filter out the interesting element factories */
+ g_mutex_init (&playbin->elements_lock);
+
+ g_rec_mutex_init (&playbin->activation_lock);
+
+ /* add sink */
+ playbin->playsink =
+ g_object_new (GST_TYPE_PLAY_SINK, "name", "playsink", "send-event-mode",
+ 1, NULL);
+ gst_bin_add (GST_BIN_CAST (playbin), GST_ELEMENT_CAST (playbin->playsink));
+ gst_play_sink_set_flags (playbin->playsink, DEFAULT_FLAGS);
+ /* Connect to notify::volume and notify::mute signals for proxying */
+ g_signal_connect (playbin->playsink, "notify::volume",
+ G_CALLBACK (notify_volume_cb), playbin);
+ g_signal_connect (playbin->playsink, "notify::mute",
+ G_CALLBACK (notify_mute_cb), playbin);
+ g_signal_connect (playbin->playsink, "value-changed",
+ G_CALLBACK (colorbalance_value_changed_cb), playbin);
+
+ playbin->current_video = DEFAULT_CURRENT_VIDEO;
+ playbin->current_audio = DEFAULT_CURRENT_AUDIO;
+ playbin->current_text = DEFAULT_CURRENT_TEXT;
+
+ playbin->buffer_duration = DEFAULT_BUFFER_DURATION;
+ playbin->buffer_size = DEFAULT_BUFFER_SIZE;
+ playbin->ring_buffer_max_size = DEFAULT_RING_BUFFER_MAX_SIZE;
+
+ playbin->force_aspect_ratio = TRUE;
+
+ playbin->multiview_mode = GST_VIDEO_MULTIVIEW_FRAME_PACKING_NONE;
+ playbin->multiview_flags = GST_VIDEO_MULTIVIEW_FLAGS_NONE;
+
+ playbin->is_live = FALSE;
+ }
+
+ static void
+ gst_play_bin3_finalize (GObject * object)
+ {
+ GstPlayBin3 *playbin;
+ gint i;
+
+ playbin = GST_PLAY_BIN3 (object);
+
+ free_group (playbin, &playbin->groups[0]);
+ free_group (playbin, &playbin->groups[1]);
+
+ for (i = 0; i < PLAYBIN_STREAM_LAST; i++)
+ g_ptr_array_free (playbin->channels[i], TRUE);
+
+ /* Setting states to NULL is safe here because playsink
+ * will already be gone and none of these sinks will be
+ * a child of playsink
+ */
+ if (playbin->video_sink) {
+ gst_element_set_state (playbin->video_sink, GST_STATE_NULL);
+ gst_object_unref (playbin->video_sink);
+ }
+ if (playbin->audio_sink) {
+ gst_element_set_state (playbin->audio_sink, GST_STATE_NULL);
+ gst_object_unref (playbin->audio_sink);
+ }
+ if (playbin->text_sink) {
+ gst_element_set_state (playbin->text_sink, GST_STATE_NULL);
+ gst_object_unref (playbin->text_sink);
+ }
+
+ if (playbin->video_stream_combiner) {
+ gst_element_set_state (playbin->video_stream_combiner, GST_STATE_NULL);
+ gst_object_unref (playbin->video_stream_combiner);
+ }
+ if (playbin->audio_stream_combiner) {
+ gst_element_set_state (playbin->audio_stream_combiner, GST_STATE_NULL);
+ gst_object_unref (playbin->audio_stream_combiner);
+ }
+ if (playbin->text_stream_combiner) {
+ gst_element_set_state (playbin->text_stream_combiner, GST_STATE_NULL);
+ gst_object_unref (playbin->text_stream_combiner);
+ }
+
+ g_ptr_array_free (playbin->combiner[PLAYBIN_STREAM_AUDIO].streams, TRUE);
+ g_ptr_array_free (playbin->combiner[PLAYBIN_STREAM_VIDEO].streams, TRUE);
+ g_ptr_array_free (playbin->combiner[PLAYBIN_STREAM_TEXT].streams, TRUE);
+
+ if (playbin->elements)
+ gst_plugin_feature_list_free (playbin->elements);
+
+ if (playbin->aelements)
+ g_sequence_free (playbin->aelements);
+
+ if (playbin->velements)
+ g_sequence_free (playbin->velements);
+
+ g_rec_mutex_clear (&playbin->activation_lock);
+ g_rec_mutex_clear (&playbin->lock);
+ g_mutex_clear (&playbin->dyn_lock);
+ g_mutex_clear (&playbin->elements_lock);
+
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+ }
+
+ static gboolean
+ gst_playbin_uri_is_valid (GstPlayBin3 * playbin, const gchar * uri)
+ {
+ const gchar *c;
+
+ GST_LOG_OBJECT (playbin, "checking uri '%s'", uri);
+
+ /* this just checks the protocol */
+ if (!gst_uri_is_valid (uri))
+ return FALSE;
+
+ for (c = uri; *c != '\0'; ++c) {
+ if (!g_ascii_isprint (*c))
+ goto invalid;
+ if (*c == ' ')
+ goto invalid;
+ }
+
+ return TRUE;
+
+ invalid:
+ {
+ GST_WARNING_OBJECT (playbin, "uri '%s' not valid, character #%u",
+ uri, (guint) ((guintptr) c - (guintptr) uri));
+ return FALSE;
+ }
+ }
+
+ static void
+ gst_play_bin3_set_uri (GstPlayBin3 * playbin, const gchar * uri)
+ {
+ GstSourceGroup *group;
+
+ if (uri == NULL) {
+ g_warning ("cannot set NULL uri");
+ return;
+ }
+
+ if (!gst_playbin_uri_is_valid (playbin, uri)) {
+ if (g_str_has_prefix (uri, "file:")) {
+ GST_WARNING_OBJECT (playbin, "not entirely correct file URI '%s' - make "
+ "sure to escape spaces and non-ASCII characters properly and specify "
+ "an absolute path. Use gst_filename_to_uri() to convert filenames "
+ "to URIs", uri);
+ } else {
+ /* GST_ERROR_OBJECT (playbin, "malformed URI '%s'", uri); */
+ }
+ }
+
+ GST_PLAY_BIN3_LOCK (playbin);
+ group = playbin->next_group;
+
+ GST_SOURCE_GROUP_LOCK (group);
+ /* store the uri in the next group we will play */
+ g_free (group->uri);
+ group->uri = g_strdup (uri);
+ group->valid = TRUE;
+ GST_SOURCE_GROUP_UNLOCK (group);
+
+ GST_DEBUG ("set new uri to %s", uri);
+ GST_PLAY_BIN3_UNLOCK (playbin);
+ }
+
+ static void
+ gst_play_bin3_set_suburi (GstPlayBin3 * playbin, const gchar * suburi)
+ {
+ GstSourceGroup *group;
+
+ GST_PLAY_BIN3_LOCK (playbin);
+ group = playbin->next_group;
+
+ GST_SOURCE_GROUP_LOCK (group);
+ g_free (group->suburi);
+ group->suburi = g_strdup (suburi);
+ GST_SOURCE_GROUP_UNLOCK (group);
+
+ GST_DEBUG ("setting new .sub uri to %s", suburi);
+
+ GST_PLAY_BIN3_UNLOCK (playbin);
+ }
+
+ static void
+ gst_play_bin3_set_flags (GstPlayBin3 * playbin, GstPlayFlags flags)
+ {
+ GstPlayFlags old_flags;
+ old_flags = gst_play_sink_get_flags (playbin->playsink);
+
+ if (flags != old_flags) {
+ gst_play_sink_set_flags (playbin->playsink, flags);
+ gst_play_sink_reconfigure (playbin->playsink);
+ }
+ }
+
+ static GstPlayFlags
+ gst_play_bin3_get_flags (GstPlayBin3 * playbin)
+ {
+ GstPlayFlags flags;
+
+ flags = gst_play_sink_get_flags (playbin->playsink);
+
+ return flags;
+ }
+
+ /* get the currently playing group or if nothing is playing, the next
+ * group. Must be called with the PLAY_BIN_LOCK. */
+ static GstSourceGroup *
+ get_group (GstPlayBin3 * playbin)
+ {
+ GstSourceGroup *result;
+
+ if (!(result = playbin->curr_group))
+ result = playbin->next_group;
+
+ return result;
+ }
+
+
+ static GstSample *
+ gst_play_bin3_convert_sample (GstPlayBin3 * playbin, GstCaps * caps)
+ {
+ return gst_play_sink_convert_sample (playbin->playsink, caps);
+ }
+
+ static gboolean
+ gst_play_bin3_send_custom_event (GstObject * combiner, const gchar * event_name)
+ {
+ GstPad *src;
+ GstPad *peer;
+ GstStructure *s;
+ GstEvent *event;
+ gboolean ret = FALSE;
+
+ src = gst_element_get_static_pad (GST_ELEMENT_CAST (combiner), "src");
+ peer = gst_pad_get_peer (src);
+ if (peer) {
+ s = gst_structure_new_empty (event_name);
+ event = gst_event_new_custom (GST_EVENT_CUSTOM_DOWNSTREAM_OOB, s);
+ gst_pad_send_event (peer, event);
+ gst_object_unref (peer);
+ ret = TRUE;
+ }
+ gst_object_unref (src);
+ return ret;
+ }
+
+ static gboolean
+ gst_play_bin3_set_current_stream (GstPlayBin3 * playbin,
+ gint stream_type, gint * current_value, gint stream,
+ gboolean * flush_marker)
+ {
+ GstSourceCombine *combine;
+ GPtrArray *channels;
+ GstPad *sinkpad;
+
+ GST_PLAY_BIN3_LOCK (playbin);
+ /* This function is only called if the app sets
+ * one of the current-* properties, which means it doesn't
+ * handle collections or select-streams yet */
+ playbin->do_stream_selections = TRUE;
+
+ combine = playbin->combiner + stream_type;
+ channels = playbin->channels[stream_type];
+
+ GST_DEBUG_OBJECT (playbin, "Changing current %s stream %d -> %d",
+ stream_type_names[stream_type], *current_value, stream);
+
+ if (combine->combiner == NULL || combine->is_concat) {
+ /* FIXME: Check that the current_value is within range */
+ *current_value = stream;
+ do_stream_selection (playbin, playbin->curr_group);
+ GST_PLAY_BIN3_UNLOCK (playbin);
+ return TRUE;
+ }
+
+ GST_DEBUG_OBJECT (playbin, "Using old style combiner");
+
+ if (!combine->has_active_pad)
+ goto no_active_pad;
+ if (channels == NULL)
+ goto no_channels;
+
+ if (stream == -1 || channels->len <= stream) {
+ sinkpad = NULL;
+ } else {
+ /* take channel from selected stream */
+ sinkpad = g_ptr_array_index (channels, stream);
+ }
+
+ if (sinkpad)
+ gst_object_ref (sinkpad);
+ GST_PLAY_BIN3_UNLOCK (playbin);
+
+ if (sinkpad) {
+ GstObject *combiner;
+
+ if ((combiner = gst_pad_get_parent (sinkpad))) {
+ GstPad *old_sinkpad;
+
+ g_object_get (combiner, "active-pad", &old_sinkpad, NULL);
+
+ if (old_sinkpad != sinkpad) {
+ /* FIXME: Is there actually any reason playsink
+ * needs special names for each type of stream we flush? */
+ gchar *flush_event_name = g_strdup_printf ("playsink-custom-%s-flush",
+ stream_type_names[stream_type]);
+ if (gst_play_bin3_send_custom_event (combiner, flush_event_name))
+ *flush_marker = TRUE;
+ g_free (flush_event_name);
+
+ /* activate the selected pad */
+ g_object_set (combiner, "active-pad", sinkpad, NULL);
+ }
+
+ if (old_sinkpad)
+ gst_object_unref (old_sinkpad);
+
+ gst_object_unref (combiner);
+ }
+ gst_object_unref (sinkpad);
+ }
+ return TRUE;
+
+ no_active_pad:
+ {
+ GST_PLAY_BIN3_UNLOCK (playbin);
+ GST_WARNING_OBJECT (playbin,
+ "can't switch %s, the stream combiner's sink pads don't have the \"active-pad\" property",
+ stream_type_names[stream_type]);
+ return FALSE;
+ }
+ no_channels:
+ {
+ GST_PLAY_BIN3_UNLOCK (playbin);
+ GST_DEBUG_OBJECT (playbin, "can't switch video, we have no channels");
+ return FALSE;
+ }
+ }
+
+ static gboolean
+ gst_play_bin3_set_current_video_stream (GstPlayBin3 * playbin, gint stream)
+ {
+ return gst_play_bin3_set_current_stream (playbin, PLAYBIN_STREAM_VIDEO,
+ &playbin->current_video, stream, &playbin->video_pending_flush_finish);
+ }
+
+ static gboolean
+ gst_play_bin3_set_current_audio_stream (GstPlayBin3 * playbin, gint stream)
+ {
+ return gst_play_bin3_set_current_stream (playbin, PLAYBIN_STREAM_AUDIO,
+ &playbin->current_audio, stream, &playbin->audio_pending_flush_finish);
+ }
+
+ static gboolean
+ gst_play_bin3_set_current_text_stream (GstPlayBin3 * playbin, gint stream)
+ {
+ return gst_play_bin3_set_current_stream (playbin, PLAYBIN_STREAM_TEXT,
+ &playbin->current_text, stream, &playbin->text_pending_flush_finish);
+ }
+
+
+ static void
+ gst_play_bin3_set_sink (GstPlayBin3 * playbin, GstPlaySinkType type,
+ const gchar * dbg, GstElement ** elem, GstElement * sink)
+ {
+ GST_INFO_OBJECT (playbin, "Setting %s sink to %" GST_PTR_FORMAT, dbg, sink);
+
+ gst_play_sink_set_sink (playbin->playsink, type, sink);
+
+ if (*elem)
+ gst_object_unref (*elem);
+ *elem = sink ? gst_object_ref (sink) : NULL;
+ }
+
+ static void
+ gst_play_bin3_set_stream_combiner (GstPlayBin3 * playbin, GstElement ** elem,
+ const gchar * dbg, GstElement * combiner)
+ {
+ GST_INFO_OBJECT (playbin, "Setting %s stream combiner to %" GST_PTR_FORMAT,
+ dbg, combiner);
+
+ GST_PLAY_BIN3_LOCK (playbin);
+ if (*elem != combiner) {
+ GstElement *old;
+
+ old = *elem;
+ if (combiner)
+ gst_object_ref_sink (combiner);
+
+ *elem = combiner;
+ if (old)
+ gst_object_unref (old);
+ }
+ GST_LOG_OBJECT (playbin, "%s stream combiner now %" GST_PTR_FORMAT, dbg,
+ *elem);
+ GST_PLAY_BIN3_UNLOCK (playbin);
+ }
+
+ static void
+ gst_play_bin3_set_encoding (GstPlayBin3 * playbin, const gchar * encoding)
+ {
+ GST_PLAY_BIN3_LOCK (playbin);
+ gst_play_sink_set_subtitle_encoding (playbin->playsink, encoding);
+ GST_PLAY_BIN3_UNLOCK (playbin);
+ }
+
+ static void
+ gst_play_bin3_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+ {
+ GstPlayBin3 *playbin = GST_PLAY_BIN3 (object);
+
+ switch (prop_id) {
+ case PROP_URI:
+ gst_play_bin3_set_uri (playbin, g_value_get_string (value));
+ break;
+ case PROP_SUBURI:
+ gst_play_bin3_set_suburi (playbin, g_value_get_string (value));
+ break;
+ case PROP_FLAGS:
+ gst_play_bin3_set_flags (playbin, g_value_get_flags (value));
+ if (playbin->curr_group) {
+ GST_SOURCE_GROUP_LOCK (playbin->curr_group);
+ if (playbin->curr_group->uridecodebin) {
-create_decoders_list (GList * factory_list, GSequence * avelements)
++ guint flags = g_value_get_flags (value);
++ g_object_set (playbin->curr_group->uridecodebin,
++ "download", (flags & GST_PLAY_FLAG_DOWNLOAD) != 0,
++ "force-sw-decoders",
++ (flags & GST_PLAY_FLAG_FORCE_SW_DECODERS) != 0, NULL);
+ }
+ GST_SOURCE_GROUP_UNLOCK (playbin->curr_group);
+ }
+ break;
+ case PROP_SUBTITLE_ENCODING:
+ gst_play_bin3_set_encoding (playbin, g_value_get_string (value));
+ break;
+ case PROP_VIDEO_FILTER:
+ gst_play_sink_set_filter (playbin->playsink, GST_PLAY_SINK_TYPE_VIDEO,
+ GST_ELEMENT (g_value_get_object (value)));
+ break;
+ case PROP_AUDIO_FILTER:
+ gst_play_sink_set_filter (playbin->playsink, GST_PLAY_SINK_TYPE_AUDIO,
+ GST_ELEMENT (g_value_get_object (value)));
+ break;
+ case PROP_VIDEO_SINK:
+ gst_play_bin3_set_sink (playbin, GST_PLAY_SINK_TYPE_VIDEO, "video",
+ &playbin->video_sink, g_value_get_object (value));
+ break;
+ case PROP_AUDIO_SINK:
+ gst_play_bin3_set_sink (playbin, GST_PLAY_SINK_TYPE_AUDIO, "audio",
+ &playbin->audio_sink, g_value_get_object (value));
+ break;
+ case PROP_VIS_PLUGIN:
+ gst_play_sink_set_vis_plugin (playbin->playsink,
+ g_value_get_object (value));
+ break;
+ case PROP_TEXT_SINK:
+ gst_play_bin3_set_sink (playbin, GST_PLAY_SINK_TYPE_TEXT, "text",
+ &playbin->text_sink, g_value_get_object (value));
+ break;
+ case PROP_VIDEO_STREAM_COMBINER:
+ gst_play_bin3_set_stream_combiner (playbin,
+ &playbin->video_stream_combiner, "video", g_value_get_object (value));
+ break;
+ case PROP_AUDIO_STREAM_COMBINER:
+ gst_play_bin3_set_stream_combiner (playbin,
+ &playbin->audio_stream_combiner, "audio", g_value_get_object (value));
+ break;
+ case PROP_TEXT_STREAM_COMBINER:
+ gst_play_bin3_set_stream_combiner (playbin,
+ &playbin->text_stream_combiner, "text", g_value_get_object (value));
+ break;
+ case PROP_VOLUME:
+ gst_play_sink_set_volume (playbin->playsink, g_value_get_double (value));
+ break;
+ case PROP_MUTE:
+ gst_play_sink_set_mute (playbin->playsink, g_value_get_boolean (value));
+ break;
+ case PROP_FONT_DESC:
+ gst_play_sink_set_font_desc (playbin->playsink,
+ g_value_get_string (value));
+ break;
+ case PROP_CONNECTION_SPEED:
+ GST_PLAY_BIN3_LOCK (playbin);
+ playbin->connection_speed = g_value_get_uint64 (value) * 1000;
+ GST_PLAY_BIN3_UNLOCK (playbin);
+ break;
+ case PROP_BUFFER_SIZE:
+ playbin->buffer_size = g_value_get_int (value);
+ break;
+ case PROP_BUFFER_DURATION:
+ playbin->buffer_duration = g_value_get_int64 (value);
+ break;
+ case PROP_AV_OFFSET:
+ gst_play_sink_set_av_offset (playbin->playsink,
+ g_value_get_int64 (value));
+ break;
+ case PROP_TEXT_OFFSET:
+ gst_play_sink_set_text_offset (playbin->playsink,
+ g_value_get_int64 (value));
+ break;
+ case PROP_RING_BUFFER_MAX_SIZE:
+ playbin->ring_buffer_max_size = g_value_get_uint64 (value);
+ if (playbin->curr_group) {
+ GST_SOURCE_GROUP_LOCK (playbin->curr_group);
+ if (playbin->curr_group->uridecodebin) {
+ g_object_set (playbin->curr_group->uridecodebin,
+ "ring-buffer-max-size", playbin->ring_buffer_max_size, NULL);
+ }
+ GST_SOURCE_GROUP_UNLOCK (playbin->curr_group);
+ }
+ break;
+ case PROP_FORCE_ASPECT_RATIO:
+ g_object_set (playbin->playsink, "force-aspect-ratio",
+ g_value_get_boolean (value), NULL);
+ break;
+ case PROP_MULTIVIEW_MODE:
+ GST_PLAY_BIN3_LOCK (playbin);
+ playbin->multiview_mode = g_value_get_enum (value);
+ GST_PLAY_BIN3_UNLOCK (playbin);
+ break;
+ case PROP_MULTIVIEW_FLAGS:
+ GST_PLAY_BIN3_LOCK (playbin);
+ playbin->multiview_flags = g_value_get_flags (value);
+ GST_PLAY_BIN3_UNLOCK (playbin);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+ }
+
+ static GstElement *
+ gst_play_bin3_get_current_sink (GstPlayBin3 * playbin, GstElement ** elem,
+ const gchar * dbg, GstPlaySinkType type)
+ {
+ GstElement *sink = gst_play_sink_get_sink (playbin->playsink, type);
+
+ GST_LOG_OBJECT (playbin, "play_sink_get_sink() returned %s sink %"
+ GST_PTR_FORMAT ", the originally set %s sink is %" GST_PTR_FORMAT,
+ dbg, sink, dbg, *elem);
+
+ if (sink == NULL) {
+ GST_PLAY_BIN3_LOCK (playbin);
+ if ((sink = *elem))
+ gst_object_ref (sink);
+ GST_PLAY_BIN3_UNLOCK (playbin);
+ }
+
+ return sink;
+ }
+
+ static GstElement *
+ gst_play_bin3_get_current_stream_combiner (GstPlayBin3 * playbin,
+ GstElement ** elem, const gchar * dbg, int stream_type)
+ {
+ GstElement *combiner;
+
+ GST_PLAY_BIN3_LOCK (playbin);
+ /* The special concat element should never be returned */
+ if (playbin->combiner[stream_type].is_concat)
+ combiner = NULL;
+ else if ((combiner = playbin->combiner[stream_type].combiner))
+ gst_object_ref (combiner);
+ else if ((combiner = *elem))
+ gst_object_ref (combiner);
+ GST_PLAY_BIN3_UNLOCK (playbin);
+
+ return combiner;
+ }
+
+ static void
+ gst_play_bin3_get_property (GObject * object, guint prop_id, GValue * value,
+ GParamSpec * pspec)
+ {
+ GstPlayBin3 *playbin = GST_PLAY_BIN3 (object);
+
+ switch (prop_id) {
+ case PROP_URI:
+ {
+ GstSourceGroup *group;
+
+ GST_PLAY_BIN3_LOCK (playbin);
+ group = playbin->next_group;
+ g_value_set_string (value, group->uri);
+ GST_PLAY_BIN3_UNLOCK (playbin);
+ break;
+ }
+ case PROP_CURRENT_URI:
+ {
+ GstSourceGroup *group;
+
+ GST_PLAY_BIN3_LOCK (playbin);
+ group = get_group (playbin);
+ g_value_set_string (value, group->uri);
+ GST_PLAY_BIN3_UNLOCK (playbin);
+ break;
+ }
+ case PROP_SUBURI:
+ {
+ GstSourceGroup *group;
+
+ GST_PLAY_BIN3_LOCK (playbin);
+ group = playbin->next_group;
+ g_value_set_string (value, group->suburi);
+ GST_PLAY_BIN3_UNLOCK (playbin);
+ break;
+ }
+ case PROP_CURRENT_SUBURI:
+ {
+ GstSourceGroup *group;
+
+ GST_PLAY_BIN3_LOCK (playbin);
+ group = get_group (playbin);
+ g_value_set_string (value, group->suburi);
+ GST_PLAY_BIN3_UNLOCK (playbin);
+ break;
+ }
+ case PROP_FLAGS:
+ g_value_set_flags (value, gst_play_bin3_get_flags (playbin));
+ break;
+ case PROP_SUBTITLE_ENCODING:
+ GST_PLAY_BIN3_LOCK (playbin);
+ g_value_take_string (value,
+ gst_play_sink_get_subtitle_encoding (playbin->playsink));
+ GST_PLAY_BIN3_UNLOCK (playbin);
+ break;
+ case PROP_VIDEO_FILTER:
+ g_value_take_object (value,
+ gst_play_sink_get_filter (playbin->playsink,
+ GST_PLAY_SINK_TYPE_VIDEO));
+ break;
+ case PROP_AUDIO_FILTER:
+ g_value_take_object (value,
+ gst_play_sink_get_filter (playbin->playsink,
+ GST_PLAY_SINK_TYPE_AUDIO));
+ break;
+ case PROP_VIDEO_SINK:
+ g_value_take_object (value,
+ gst_play_bin3_get_current_sink (playbin, &playbin->video_sink,
+ "video", GST_PLAY_SINK_TYPE_VIDEO));
+ break;
+ case PROP_AUDIO_SINK:
+ g_value_take_object (value,
+ gst_play_bin3_get_current_sink (playbin, &playbin->audio_sink,
+ "audio", GST_PLAY_SINK_TYPE_AUDIO));
+ break;
+ case PROP_VIS_PLUGIN:
+ g_value_take_object (value,
+ gst_play_sink_get_vis_plugin (playbin->playsink));
+ break;
+ case PROP_TEXT_SINK:
+ g_value_take_object (value,
+ gst_play_bin3_get_current_sink (playbin, &playbin->text_sink,
+ "text", GST_PLAY_SINK_TYPE_TEXT));
+ break;
+ case PROP_VIDEO_STREAM_COMBINER:
+ g_value_take_object (value,
+ gst_play_bin3_get_current_stream_combiner (playbin,
+ &playbin->video_stream_combiner, "video", PLAYBIN_STREAM_VIDEO));
+ break;
+ case PROP_AUDIO_STREAM_COMBINER:
+ g_value_take_object (value,
+ gst_play_bin3_get_current_stream_combiner (playbin,
+ &playbin->audio_stream_combiner, "audio", PLAYBIN_STREAM_AUDIO));
+ break;
+ case PROP_TEXT_STREAM_COMBINER:
+ g_value_take_object (value,
+ gst_play_bin3_get_current_stream_combiner (playbin,
+ &playbin->text_stream_combiner, "text", PLAYBIN_STREAM_TEXT));
+ break;
+ case PROP_VOLUME:
+ g_value_set_double (value, gst_play_sink_get_volume (playbin->playsink));
+ break;
+ case PROP_MUTE:
+ g_value_set_boolean (value, gst_play_sink_get_mute (playbin->playsink));
+ break;
+ case PROP_SAMPLE:
+ gst_value_take_sample (value,
+ gst_play_sink_get_last_sample (playbin->playsink));
+ break;
+ case PROP_FONT_DESC:
+ g_value_take_string (value,
+ gst_play_sink_get_font_desc (playbin->playsink));
+ break;
+ case PROP_CONNECTION_SPEED:
+ GST_PLAY_BIN3_LOCK (playbin);
+ g_value_set_uint64 (value, playbin->connection_speed / 1000);
+ GST_PLAY_BIN3_UNLOCK (playbin);
+ break;
+ case PROP_BUFFER_SIZE:
+ GST_OBJECT_LOCK (playbin);
+ g_value_set_int (value, playbin->buffer_size);
+ GST_OBJECT_UNLOCK (playbin);
+ break;
+ case PROP_BUFFER_DURATION:
+ GST_OBJECT_LOCK (playbin);
+ g_value_set_int64 (value, playbin->buffer_duration);
+ GST_OBJECT_UNLOCK (playbin);
+ break;
+ case PROP_AV_OFFSET:
+ g_value_set_int64 (value,
+ gst_play_sink_get_av_offset (playbin->playsink));
+ break;
+ case PROP_TEXT_OFFSET:
+ g_value_set_int64 (value,
+ gst_play_sink_get_text_offset (playbin->playsink));
+ break;
+ case PROP_RING_BUFFER_MAX_SIZE:
+ g_value_set_uint64 (value, playbin->ring_buffer_max_size);
+ break;
+ case PROP_FORCE_ASPECT_RATIO:{
+ gboolean v;
+
+ g_object_get (playbin->playsink, "force-aspect-ratio", &v, NULL);
+ g_value_set_boolean (value, v);
+ break;
+ }
+ case PROP_MULTIVIEW_MODE:
+ GST_OBJECT_LOCK (playbin);
+ g_value_set_enum (value, playbin->multiview_mode);
+ GST_OBJECT_UNLOCK (playbin);
+ break;
+ case PROP_MULTIVIEW_FLAGS:
+ GST_OBJECT_LOCK (playbin);
+ g_value_set_flags (value, playbin->multiview_flags);
+ GST_OBJECT_UNLOCK (playbin);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+ }
+
+ static gint
+ get_combiner_stream_id (GstPlayBin3 * playbin, GstSourceCombine * combine,
+ GList * full_list)
+ {
+ gint i;
+ GList *tmp;
+
+ for (i = 0; i < combine->streams->len; i++) {
+ GstStream *stream = (GstStream *) g_ptr_array_index (combine->streams, i);
+ const gchar *sid = gst_stream_get_stream_id (stream);
+ for (tmp = full_list; tmp; tmp = tmp->next) {
+ gchar *orig = (gchar *) tmp->data;
+ if (!g_strcmp0 (orig, sid))
+ return i;
+ }
+ }
+
+ /* Fallback */
+ return -1;
+ }
+
+ static GList *
+ extend_list_of_streams (GstPlayBin3 * playbin, GstStreamType stype,
+ GList * list, GstStreamCollection * collection)
+ {
+ GList *tmp, *res;
+ gint i, nb;
+
+ res = list;
+
+ nb = gst_stream_collection_get_size (collection);
+ for (i = 0; i < nb; i++) {
+ GstStream *stream = gst_stream_collection_get_stream (collection, i);
+ GstStreamType curtype = gst_stream_get_stream_type (stream);
+ if (stype == curtype) {
+ gboolean already_there = FALSE;
+ const gchar *sid = gst_stream_get_stream_id (stream);
+ for (tmp = res; tmp; tmp = tmp->next) {
+ const gchar *other = (const gchar *) tmp->data;
+ if (!g_strcmp0 (sid, other)) {
+ already_there = TRUE;
+ break;
+ }
+ }
+ if (!already_there) {
+ GST_DEBUG_OBJECT (playbin, "Adding stream %s", sid);
+ res = g_list_append (res, g_strdup (sid));
+ }
+ }
+ }
+
+ return res;
+ }
+
+ static GstEvent *
+ update_select_streams_event (GstPlayBin3 * playbin, GstEvent * event,
+ GstSourceGroup * group)
+ {
+ GList *streams = NULL;
+ GList *to_use;
+ gint combine_id;
+
+ if (!playbin->audio_stream_combiner && !playbin->video_stream_combiner &&
+ !playbin->text_stream_combiner) {
+ /* Nothing to do */
+ GST_DEBUG_OBJECT (playbin,
+ "No custom combiners, no need to modify SELECT_STREAMS event");
+ return event;
+ }
+
+ if (!group->collection) {
+ GST_DEBUG_OBJECT (playbin,
+ "No stream collection for group, no need to modify SELECT_STREAMS event");
+ return event;
+ }
+
+ gst_event_parse_select_streams (event, &streams);
+ to_use = g_list_copy_deep (streams, (GCopyFunc) g_strdup, NULL);
+
+ /* For each combiner, we want to add all streams of that type to the
+ * selection */
+ if (playbin->audio_stream_combiner) {
+ to_use =
+ extend_list_of_streams (playbin, GST_STREAM_TYPE_AUDIO, to_use,
+ group->collection);
+ combine_id =
+ get_combiner_stream_id (playbin,
+ &playbin->combiner[PLAYBIN_STREAM_AUDIO], streams);
+ if (combine_id != -1)
+ gst_play_bin3_set_current_audio_stream (playbin, combine_id);
+ }
+ if (playbin->video_stream_combiner) {
+ to_use =
+ extend_list_of_streams (playbin, GST_STREAM_TYPE_VIDEO, to_use,
+ group->collection);
+ combine_id =
+ get_combiner_stream_id (playbin,
+ &playbin->combiner[PLAYBIN_STREAM_VIDEO], streams);
+ if (combine_id != -1)
+ gst_play_bin3_set_current_video_stream (playbin, combine_id);
+ }
+ if (playbin->text_stream_combiner) {
+ to_use =
+ extend_list_of_streams (playbin, GST_STREAM_TYPE_TEXT, to_use,
+ group->collection);
+ combine_id =
+ get_combiner_stream_id (playbin,
+ &playbin->combiner[PLAYBIN_STREAM_TEXT], streams);
+ if (combine_id != -1)
+ gst_play_bin3_set_current_text_stream (playbin, combine_id);
+ }
+
+ gst_event_unref (event);
+ event = gst_event_new_select_streams (to_use);
+
+ if (streams)
+ g_list_free_full (streams, g_free);
+ if (to_use)
+ g_list_free_full (to_use, g_free);
+
+ return event;
+ }
+
+ /* Returns TRUE if the given list of streams belongs to the stream collection */
+ static gboolean
+ gst_streams_belong_to_collection (GList * streams,
+ GstStreamCollection * collection)
+ {
+ GList *tmp;
+ guint i, nb;
+
+ if (streams == NULL || collection == NULL)
+ return FALSE;
+ nb = gst_stream_collection_get_size (collection);
+ if (nb == 0)
+ return FALSE;
+
+ for (tmp = streams; tmp; tmp = tmp->next) {
+ const gchar *cand = (const gchar *) tmp->data;
+ gboolean found = FALSE;
+
+ for (i = 0; i < nb; i++) {
+ GstStream *stream = gst_stream_collection_get_stream (collection, i);
+ if (!g_strcmp0 (cand, gst_stream_get_stream_id (stream))) {
+ found = TRUE;
+ break;
+ }
+ }
+ if (!found)
+ return FALSE;
+ }
+ return TRUE;
+ }
+
+ static GstSourceGroup *
+ get_source_group_for_streams (GstPlayBin3 * playbin, GstEvent * event)
+ {
+ GList *streams;
+ GstSourceGroup *res = NULL;
+
+ gst_event_parse_select_streams (event, &streams);
+ if (playbin->curr_group->collection &&
+ gst_streams_belong_to_collection (streams,
+ playbin->curr_group->collection))
+ res = playbin->curr_group;
+ else if (playbin->next_group->collection &&
+ gst_streams_belong_to_collection (streams,
+ playbin->next_group->collection))
+ res = playbin->next_group;
+ g_list_free_full (streams, g_free);
+
+ return res;
+ }
+
+ static GstStreamType
+ get_stream_type_for_event (GstStreamCollection * collection, GstEvent * event)
+ {
+ GList *stream_list = NULL;
+ GList *tmp;
+ GstStreamType res = 0;
+ guint i, len;
+
+ gst_event_parse_select_streams (event, &stream_list);
+ len = gst_stream_collection_get_size (collection);
+ for (tmp = stream_list; tmp; tmp = tmp->next) {
+ gchar *stid = (gchar *) tmp->data;
+
+ for (i = 0; i < len; i++) {
+ GstStream *stream = gst_stream_collection_get_stream (collection, i);
+ if (!g_strcmp0 (stid, gst_stream_get_stream_id (stream))) {
+ res |= gst_stream_get_stream_type (stream);
+ }
+ }
+ }
+ g_list_free_full (stream_list, g_free);
+
+ return res;
+ }
+
+ static gboolean
+ gst_play_bin3_send_event (GstElement * element, GstEvent * event)
+ {
+ GstPlayBin3 *playbin = GST_PLAY_BIN3 (element);
+
+ if (GST_EVENT_TYPE (event) == GST_EVENT_SELECT_STREAMS) {
+ gboolean res;
+ GstSourceGroup *group;
+
+ GST_PLAY_BIN3_LOCK (playbin);
+ GST_LOG_OBJECT (playbin,
+ "App sent select-streams, we won't do anything ourselves now");
+ /* This is probably already false, but it doesn't hurt to be sure */
+ playbin->do_stream_selections = FALSE;
+
+ group = get_source_group_for_streams (playbin, event);
+ if (group == NULL) {
+ GST_WARNING_OBJECT (playbin,
+ "Can't figure out to which uridecodebin the select-streams event should be sent to");
+ GST_PLAY_BIN3_UNLOCK (playbin);
+ return FALSE;
+ }
+
+ /* If we have custom combiners, we need to extend the selection with
+ * the list of all streams for that given type since we will be handling
+ * the selection with that combiner */
+ event = update_select_streams_event (playbin, event, group);
+
+ if (group->collection) {
+ group->selected_stream_types =
+ get_stream_type_for_event (group->collection, event);
+ playbin->selected_stream_types =
+ playbin->groups[0].selected_stream_types | playbin->groups[1].
+ selected_stream_types;
+ if (playbin->active_stream_types != playbin->selected_stream_types)
+ reconfigure_output (playbin);
+ }
+
+ /* Send this event directly to uridecodebin, so it works even
+ * if uridecodebin didn't add any pads yet */
+ res = gst_element_send_event (group->uridecodebin, event);
+ GST_PLAY_BIN3_UNLOCK (playbin);
+
+ return res;
+ }
+
+ /* Send event directly to playsink instead of letting GstBin iterate
+ * over all sink elements. The latter might send the event multiple times
+ * in case the SEEK causes a reconfiguration of the pipeline, as can easily
+ * happen with adaptive streaming demuxers.
+ *
+ * What would then happen is that the iterator would be reset, we send the
+ * event again, and on the second time it will fail in the majority of cases
+ * because the pipeline is still being reconfigured
+ */
+ if (GST_EVENT_IS_UPSTREAM (event)) {
+ return gst_element_send_event (GST_ELEMENT_CAST (playbin->playsink), event);
+ }
+
+ return GST_ELEMENT_CLASS (parent_class)->send_event (element, event);
+ }
+
+ /* Called with playbin lock held */
+ static void
+ do_stream_selection (GstPlayBin3 * playbin, GstSourceGroup * group)
+ {
+ GstStreamCollection *collection;
+ guint i, nb_streams;
+ GList *streams = NULL;
+ gint nb_video = 0, nb_audio = 0, nb_text = 0;
+ GstStreamType chosen_stream_types = 0;
+
+ if (group == NULL)
+ return;
+
+ collection = group->collection;
+ if (collection == NULL) {
+ GST_LOG_OBJECT (playbin, "No stream collection. Not doing stream-select");
+ return;
+ }
+
+ nb_streams = gst_stream_collection_get_size (collection);
+ if (nb_streams == 0) {
+ GST_INFO_OBJECT (playbin, "Empty collection received! Ignoring");
+ }
+
+ GST_DEBUG_OBJECT (playbin, "Doing selection on collection with %d streams",
+ nb_streams);
+
+ /* Iterate the collection and choose the streams that match
+ * either the current-* setting, or all streams of a type if there's
+ * a combiner for that type */
+ for (i = 0; i < nb_streams; i++) {
+ GstStream *stream = gst_stream_collection_get_stream (collection, i);
+ GstStreamType stream_type = gst_stream_get_stream_type (stream);
+ const gchar *stream_id = gst_stream_get_stream_id (stream);
+ gint pb_stream_type = -1;
+ gboolean select_this = FALSE;
+
+ GST_LOG_OBJECT (playbin, "Looking at stream #%d : %s", i, stream_id);
+
+ if (stream_type & GST_STREAM_TYPE_AUDIO) {
+ pb_stream_type = PLAYBIN_STREAM_AUDIO;
+ /* Select the stream if it's the current one or if there's a custom selector */
+ select_this =
+ (nb_audio == playbin->current_audio ||
+ (playbin->current_audio == -1 && nb_audio == 0) ||
+ playbin->audio_stream_combiner != NULL);
+ nb_audio++;
+ } else if (stream_type & GST_STREAM_TYPE_VIDEO) {
+ pb_stream_type = PLAYBIN_STREAM_VIDEO;
+ select_this =
+ (nb_video == playbin->current_video ||
+ (playbin->current_video == -1 && nb_video == 0) ||
+ playbin->video_stream_combiner != NULL);
+ nb_video++;
+ } else if (stream_type & GST_STREAM_TYPE_TEXT) {
+ pb_stream_type = PLAYBIN_STREAM_TEXT;
+ select_this =
+ (nb_text == playbin->current_text ||
+ (playbin->current_text == -1 && nb_text == 0) ||
+ playbin->text_stream_combiner != NULL);
+ nb_text++;
+ }
+ if (pb_stream_type < 0) {
+ GST_DEBUG_OBJECT (playbin,
+ "Stream %d (id %s) of unhandled type %s. Ignoring", i, stream_id,
+ gst_stream_type_get_name (stream_type));
+ continue;
+ }
+ if (select_this) {
+ GST_DEBUG_OBJECT (playbin, "Selecting stream %s of type %s",
+ stream_id, gst_stream_type_get_name (stream_type));
+ /* Don't build the list if we're not in charge of stream selection */
+ if (playbin->do_stream_selections)
+ streams = g_list_append (streams, (gpointer) stream_id);
+ chosen_stream_types |= stream_type;
+ }
+ }
+
+ if (streams) {
+ if (group->uridecodebin) {
+ GstEvent *ev = gst_event_new_select_streams (streams);
+ gst_element_send_event (group->uridecodebin, ev);
+ }
+ g_list_free (streams);
+ }
+
+ group->selected_stream_types = chosen_stream_types;
+ /* Update global selected_stream_types */
+ playbin->selected_stream_types =
+ playbin->groups[0].selected_stream_types | playbin->groups[1].
+ selected_stream_types;
+ if (playbin->active_stream_types != playbin->selected_stream_types)
+ reconfigure_output (playbin);
+ }
+
+ /* Return the GstSourceGroup to which this element belongs
+ * Can be NULL (if it belongs to playsink for example) */
+ static GstSourceGroup *
+ find_source_group_owner (GstPlayBin3 * playbin, GstObject * element)
+ {
+ if (playbin->curr_group->uridecodebin
+ && gst_object_has_as_ancestor (element,
+ GST_OBJECT_CAST (playbin->curr_group->uridecodebin)))
+ return playbin->curr_group;
+ if (playbin->next_group->uridecodebin
+ && gst_object_has_as_ancestor (element,
+ GST_OBJECT_CAST (playbin->next_group->uridecodebin)))
+ return playbin->next_group;
+ return NULL;
+ }
+
+ static void
+ gst_play_bin3_handle_message (GstBin * bin, GstMessage * msg)
+ {
+ GstPlayBin3 *playbin = GST_PLAY_BIN3 (bin);
+ gboolean do_reset_time = FALSE;
+
+ if (GST_MESSAGE_TYPE (msg) == GST_MESSAGE_STREAM_START) {
+ GstSourceGroup *group = NULL, *other_group = NULL;
+ gboolean changed = FALSE;
+ guint group_id;
+ GstMessage *buffering_msg;
+
+ if (!gst_message_parse_group_id (msg, &group_id)) {
+ GST_ERROR_OBJECT (bin,
+ "Could not get group_id from STREAM_START message !");
+ goto beach;
+ }
+ GST_DEBUG_OBJECT (bin, "STREAM_START group_id:%u", group_id);
+
+ /* Figure out to which group this group_id corresponds */
+ GST_PLAY_BIN3_LOCK (playbin);
+ if (playbin->groups[0].group_id == group_id) {
+ group = &playbin->groups[0];
+ other_group = &playbin->groups[1];
+ } else if (playbin->groups[1].group_id == group_id) {
+ group = &playbin->groups[1];
+ other_group = &playbin->groups[0];
+ }
+ if (group == NULL) {
+ GST_ERROR_OBJECT (bin, "group_id %u is not provided by any group !",
+ group_id);
+ GST_PLAY_BIN3_UNLOCK (playbin);
+ goto beach;
+ }
+
+ debug_groups (playbin);
+
+ /* Do the switch now ! */
+ playbin->curr_group = group;
+ playbin->next_group = other_group;
+
+ GST_SOURCE_GROUP_LOCK (group);
+ if (group->playing == FALSE)
+ changed = TRUE;
+ group->playing = TRUE;
+ buffering_msg = group->pending_buffering_msg;
+ group->pending_buffering_msg = NULL;
+ GST_SOURCE_GROUP_UNLOCK (group);
+
+ GST_SOURCE_GROUP_LOCK (other_group);
+ other_group->playing = FALSE;
+ GST_SOURCE_GROUP_UNLOCK (other_group);
+
+ debug_groups (playbin);
+ GST_PLAY_BIN3_UNLOCK (playbin);
+ if (changed)
+ gst_play_bin3_check_group_status (playbin);
+ else
+ GST_DEBUG_OBJECT (bin, "Groups didn't changed");
+ /* If there was a pending buffering message to send, do it now */
+ if (buffering_msg)
+ GST_BIN_CLASS (parent_class)->handle_message (bin, buffering_msg);
+ } else if (GST_MESSAGE_TYPE (msg) == GST_MESSAGE_BUFFERING) {
+ GstSourceGroup *group;
+
+ /* Only post buffering messages for group which is currently playing */
+ group = find_source_group_owner (playbin, msg->src);
+ GST_SOURCE_GROUP_LOCK (group);
+ if (!group->playing) {
+ GST_DEBUG_OBJECT (playbin, "Storing buffering message from pending group "
+ "%p %" GST_PTR_FORMAT, group, msg);
+ gst_message_replace (&group->pending_buffering_msg, msg);
+ gst_message_unref (msg);
+ msg = NULL;
+ }
+ GST_SOURCE_GROUP_UNLOCK (group);
+ } else if (GST_MESSAGE_TYPE (msg) == GST_MESSAGE_STREAM_COLLECTION) {
+ GstStreamCollection *collection = NULL;
+
+ gst_message_parse_stream_collection (msg, &collection);
+
+ if (collection) {
+ gboolean pstate = playbin->do_stream_selections;
+ GstSourceGroup *target_group = NULL;
+
+ GST_PLAY_BIN3_LOCK (playbin);
+ GST_DEBUG_OBJECT (playbin,
+ "STREAM_COLLECTION: Got a collection from %" GST_PTR_FORMAT,
+ msg->src);
+ target_group = find_source_group_owner (playbin, msg->src);
+ if (target_group)
+ gst_object_replace ((GstObject **) & target_group->collection,
+ (GstObject *) collection);
+ /* FIXME: Only do the following if it's the current group? */
+ if (target_group == playbin->curr_group)
+ update_combiner_info (playbin, target_group->collection);
+ if (pstate)
+ playbin->do_stream_selections = FALSE;
+ do_stream_selection (playbin, target_group);
+ if (pstate)
+ playbin->do_stream_selections = TRUE;
+ GST_PLAY_BIN3_UNLOCK (playbin);
+
+ gst_object_unref (collection);
+ }
+ } else if (GST_MESSAGE_TYPE (msg) == GST_MESSAGE_RESET_TIME) {
+ if (playbin->is_live && GST_STATE_TARGET (playbin) == GST_STATE_PLAYING) {
+ do_reset_time = TRUE;
+ }
+ }
+
+ beach:
+ if (msg)
+ GST_BIN_CLASS (parent_class)->handle_message (bin, msg);
+
+ if (do_reset_time) {
+ /* If we are live, sample a new base_time immediately */
+ gst_element_change_state (GST_ELEMENT (playbin),
+ GST_STATE_CHANGE_PAUSED_TO_PLAYING);
+ }
+ }
+
+ static void
+ gst_play_bin3_deep_element_added (GstBin * playbin, GstBin * sub_bin,
+ GstElement * child)
+ {
+ GST_LOG_OBJECT (playbin, "element %" GST_PTR_FORMAT " was added to "
+ "%" GST_PTR_FORMAT, child, sub_bin);
+
+ g_signal_emit (playbin, gst_play_bin3_signals[SIGNAL_ELEMENT_SETUP], 0,
+ child);
+
+ GST_BIN_CLASS (parent_class)->deep_element_added (playbin, sub_bin, child);
+ }
+
+ /* Returns current stream number, or -1 if none has been selected yet */
+ static int
+ get_current_stream_number (GstPlayBin3 * playbin, GstSourceCombine * combine,
+ GPtrArray * channels)
+ {
+ /* Internal API cleanup would make this easier... */
+ int i;
+ GstPad *pad, *current;
+ GstObject *combiner = NULL;
+ int ret = -1;
+
+ if (!combine->has_active_pad) {
+ GST_WARNING_OBJECT (playbin,
+ "combiner doesn't have the \"active-pad\" property");
+ return ret;
+ }
+
+ for (i = 0; i < channels->len; i++) {
+ pad = g_ptr_array_index (channels, i);
+ if ((combiner = gst_pad_get_parent (pad))) {
+ g_object_get (combiner, "active-pad", ¤t, NULL);
+ gst_object_unref (combiner);
+
+ if (pad == current) {
+ gst_object_unref (current);
+ ret = i;
+ break;
+ }
+
+ if (current)
+ gst_object_unref (current);
+ }
+ }
+
+ return ret;
+ }
+
+ static void
+ combiner_active_pad_changed (GObject * combiner, GParamSpec * pspec,
+ GstPlayBin3 * playbin)
+ {
+ GstSourceCombine *combine = NULL;
+ GPtrArray *channels = NULL;
+ int i;
+
+ GST_PLAY_BIN3_LOCK (playbin);
+
+ for (i = 0; i < PLAYBIN_STREAM_LAST; i++) {
+ if (combiner == G_OBJECT (playbin->combiner[i].combiner)) {
+ combine = &playbin->combiner[i];
+ channels = playbin->channels[i];
+ }
+ }
+
+ /* We got a pad-change after our group got switched out; no need to notify */
+ if (!combine) {
+ GST_PLAY_BIN3_UNLOCK (playbin);
+ return;
+ }
+
+ switch (combine->type) {
+ case GST_PLAY_SINK_TYPE_VIDEO:
+ playbin->current_video = get_current_stream_number (playbin,
+ combine, channels);
+
+ if (playbin->video_pending_flush_finish) {
+ playbin->video_pending_flush_finish = FALSE;
+ GST_PLAY_BIN3_UNLOCK (playbin);
+ gst_play_bin3_send_custom_event (GST_OBJECT (combiner),
+ "playsink-custom-video-flush-finish");
+ }
+ break;
+ case GST_PLAY_SINK_TYPE_AUDIO:
+ playbin->current_audio = get_current_stream_number (playbin,
+ combine, channels);
+
+ if (playbin->audio_pending_flush_finish) {
+ playbin->audio_pending_flush_finish = FALSE;
+ GST_PLAY_BIN3_UNLOCK (playbin);
+ gst_play_bin3_send_custom_event (GST_OBJECT (combiner),
+ "playsink-custom-audio-flush-finish");
+ }
+ break;
+ case GST_PLAY_SINK_TYPE_TEXT:
+ playbin->current_text = get_current_stream_number (playbin,
+ combine, channels);
+
+ if (playbin->text_pending_flush_finish) {
+ playbin->text_pending_flush_finish = FALSE;
+ GST_PLAY_BIN3_UNLOCK (playbin);
+ gst_play_bin3_send_custom_event (GST_OBJECT (combiner),
+ "playsink-custom-subtitle-flush-finish");
+ }
+ break;
+ default:
+ break;
+ }
+ GST_PLAY_BIN3_UNLOCK (playbin);
+ }
+
+ static GstCaps *
+ update_video_multiview_caps (GstPlayBin3 * playbin, GstCaps * caps)
+ {
+ GstVideoMultiviewMode mv_mode;
+ GstVideoMultiviewMode cur_mv_mode;
+ guint mv_flags, cur_mv_flags;
+ GstStructure *s;
+ const gchar *mview_mode_str;
+ GstCaps *out_caps;
+
+ GST_OBJECT_LOCK (playbin);
+ mv_mode = (GstVideoMultiviewMode) playbin->multiview_mode;
+ mv_flags = playbin->multiview_flags;
+ GST_OBJECT_UNLOCK (playbin);
+
+ if (mv_mode == GST_VIDEO_MULTIVIEW_MODE_NONE)
+ return NULL;
+
+ cur_mv_mode = GST_VIDEO_MULTIVIEW_MODE_NONE;
+ cur_mv_flags = GST_VIDEO_MULTIVIEW_FLAGS_NONE;
+
+ s = gst_caps_get_structure (caps, 0);
+
+ gst_structure_get_flagset (s, "multiview-flags", &cur_mv_flags, NULL);
+ if ((mview_mode_str = gst_structure_get_string (s, "multiview-mode")))
+ cur_mv_mode = gst_video_multiview_mode_from_caps_string (mview_mode_str);
+
+ /* We can't override an existing annotated multiview mode, except
+ * maybe (in the future) we could change some flags. */
+ if ((gint) cur_mv_mode > GST_VIDEO_MULTIVIEW_MAX_FRAME_PACKING) {
+ GST_INFO_OBJECT (playbin, "Cannot override existing multiview mode");
+ return NULL;
+ }
+
+ mview_mode_str = gst_video_multiview_mode_to_caps_string (mv_mode);
+ g_assert (mview_mode_str != NULL);
+ out_caps = gst_caps_copy (caps);
+ s = gst_caps_get_structure (out_caps, 0);
+
+ gst_structure_set (s, "multiview-mode", G_TYPE_STRING, mview_mode_str,
+ "multiview-flags", GST_TYPE_VIDEO_MULTIVIEW_FLAGSET, mv_flags,
+ GST_FLAG_SET_MASK_EXACT, NULL);
+
+ return out_caps;
+ }
+
+ static void
+ emit_about_to_finish (GstPlayBin3 * playbin)
+ {
+ GST_DEBUG_OBJECT (playbin, "Emitting about-to-finish");
+
+ /* after this call, we should have a next group to activate or we EOS */
+ g_signal_emit (G_OBJECT (playbin),
+ gst_play_bin3_signals[SIGNAL_ABOUT_TO_FINISH], 0, NULL);
+
+ debug_groups (playbin);
+
+ /* now activate the next group. If the app did not set a uri, this will
+ * fail and we can do EOS */
+ setup_next_source (playbin);
+ }
+
+ static SourcePad *
+ find_source_pad (GstSourceGroup * group, GstPad * target)
+ {
+ GList *tmp;
+
+ for (tmp = group->source_pads; tmp; tmp = tmp->next) {
+ SourcePad *res = (SourcePad *) tmp->data;
+ if (res->pad == target)
+ return res;
+ }
+ return NULL;
+ }
+
+ static GstPadProbeReturn
+ _decodebin_event_probe (GstPad * pad, GstPadProbeInfo * info, gpointer udata)
+ {
+ GstPadProbeReturn ret = GST_PAD_PROBE_OK;
+ GstSourceGroup *group = (GstSourceGroup *) udata;
+ GstPlayBin3 *playbin = group->playbin;
+ GstEvent *event = GST_PAD_PROBE_INFO_DATA (info);
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_CAPS:{
+ GstCaps *caps = NULL;
+ const GstStructure *s;
+ const gchar *name;
+
+ gst_event_parse_caps (event, &caps);
+ /* If video caps, check if we should override multiview flags */
+ s = gst_caps_get_structure (caps, 0);
+ name = gst_structure_get_name (s);
+ if (g_str_has_prefix (name, "video/")) {
+ caps = update_video_multiview_caps (playbin, caps);
+ if (caps) {
+ gst_event_unref (event);
+ event = gst_event_new_caps (caps);
+ GST_PAD_PROBE_INFO_DATA (info) = event;
+ gst_caps_unref (caps);
+ }
+ }
+ break;
+ }
+ case GST_EVENT_STREAM_START:
+ {
+ guint group_id;
+ if (gst_event_parse_group_id (event, &group_id)) {
+ GST_LOG_OBJECT (pad, "STREAM_START group_id:%u", group_id);
+ if (group->group_id == GST_GROUP_ID_INVALID)
+ group->group_id = group_id;
+ else if (group->group_id != group_id) {
+ GST_DEBUG_OBJECT (pad, "group_id changing from %u to %u",
+ group->group_id, group_id);
+ group->group_id = group_id;
+ }
+ }
+ break;
+ }
+ default:
+ break;
+ }
+
+ return ret;
+ }
+
+ static void
+ control_source_pad (GstSourceGroup * group, GstPad * pad,
+ GstStreamType stream_type)
+ {
+ SourcePad *sourcepad = g_slice_new0 (SourcePad);
+
+ sourcepad->pad = pad;
+ sourcepad->event_probe_id =
+ gst_pad_add_probe (pad, GST_PAD_PROBE_TYPE_EVENT_DOWNSTREAM,
+ _decodebin_event_probe, group, NULL);
+ sourcepad->stream_type = stream_type;
+ group->source_pads = g_list_append (group->source_pads, sourcepad);
+ }
+
+ static void
+ remove_combiner (GstPlayBin3 * playbin, GstSourceCombine * combine)
+ {
+ gint n;
+
+ if (combine->combiner == NULL) {
+ GST_DEBUG_OBJECT (playbin, "No combiner element to remove");
+ return;
+ }
+
+ /* Go over all sink pads and release them ! */
+ for (n = 0; n < combine->channels->len; n++) {
+ GstPad *sinkpad = g_ptr_array_index (combine->channels, n);
+
+ gst_element_release_request_pad (combine->combiner, sinkpad);
+ gst_object_unref (sinkpad);
+ }
+ g_ptr_array_set_size (combine->channels, 0);
+
+ gst_element_set_state (combine->combiner, GST_STATE_NULL);
+ gst_bin_remove (GST_BIN_CAST (playbin), combine->combiner);
+ combine->combiner = NULL;
+
+ }
+
+ /* Create the combiner element if needed for the given combine */
+ static void
+ create_combiner (GstPlayBin3 * playbin, GstSourceCombine * combine)
+ {
+ GstElement *custom_combiner = NULL;
+
+ if (combine->combiner) {
+ GST_WARNING_OBJECT (playbin, "Combiner element already exists!");
+ return;
+ }
+
+ if (combine->stream_type == GST_STREAM_TYPE_VIDEO)
+ custom_combiner = playbin->video_stream_combiner;
+ else if (combine->stream_type == GST_STREAM_TYPE_AUDIO)
+ custom_combiner = playbin->audio_stream_combiner;
+ else if (combine->stream_type == GST_STREAM_TYPE_TEXT)
+ custom_combiner = playbin->text_stream_combiner;
+
+ combine->combiner = custom_combiner;
+
+ if (!combine->combiner) {
+ gchar *concat_name;
+ GST_DEBUG_OBJECT (playbin,
+ "No custom combiner requested, using 'concat' element");
+ concat_name = g_strdup_printf ("%s-concat", combine->media_type);
+ combine->combiner = gst_element_factory_make ("concat", concat_name);
+ g_object_set (combine->combiner, "adjust-base", FALSE, NULL);
+ g_free (concat_name);
+ combine->is_concat = TRUE;
+ }
+
+ combine->srcpad = gst_element_get_static_pad (combine->combiner, "src");
+
+ /* We only want to use 'active-pad' if it's a regular combiner that
+ * will consume all streams, and not concat (which is just used for
+ * gapless) */
+ if (!combine->is_concat) {
+ combine->has_active_pad =
+ g_object_class_find_property (G_OBJECT_GET_CLASS (combine->combiner),
+ "active-pad") != NULL;
+
+ if (combine->has_active_pad)
+ g_signal_connect (combine->combiner, "notify::active-pad",
+ G_CALLBACK (combiner_active_pad_changed), playbin);
+ }
+
+ GST_DEBUG_OBJECT (playbin, "adding new stream combiner %" GST_PTR_FORMAT,
+ combine->combiner);
+ gst_bin_add (GST_BIN_CAST (playbin), combine->combiner);
+ gst_element_sync_state_with_parent (combine->combiner);
+ }
+
+ static gboolean
+ combiner_control_pad (GstPlayBin3 * playbin, GstSourceCombine * combine,
+ GstPad * srcpad)
+ {
+ GstPadLinkReturn res;
+
+ GST_DEBUG_OBJECT (playbin, "srcpad %" GST_PTR_FORMAT, srcpad);
+
+ if (combine->combiner) {
+ GstPad *sinkpad =
+ gst_element_request_pad_simple (combine->combiner, "sink_%u");
+
+ if (sinkpad == NULL)
+ goto request_pad_failed;
+
+ GST_DEBUG_OBJECT (playbin, "Got new combiner pad %" GST_PTR_FORMAT,
+ sinkpad);
+
+ /* store the pad in the array */
+ GST_DEBUG_OBJECT (playbin, "pad %" GST_PTR_FORMAT " added to array",
+ sinkpad);
+ g_ptr_array_add (combine->channels, sinkpad);
+
+ res = gst_pad_link (srcpad, sinkpad);
+ if (GST_PAD_LINK_FAILED (res))
+ goto failed_combiner_link;
+
+ GST_DEBUG_OBJECT (playbin,
+ "linked pad %" GST_PTR_FORMAT " to combiner %" GST_PTR_FORMAT, srcpad,
+ combine->combiner);
+
+ } else {
+ GST_LOG_OBJECT (playbin, "combine->sinkpad:%" GST_PTR_FORMAT,
+ combine->sinkpad);
+ g_assert (combine->sinkpad != NULL);
+ /* Connect directly to playsink */
+ if (gst_pad_is_linked (combine->sinkpad))
+ goto sinkpad_already_linked;
+
+ GST_DEBUG_OBJECT (playbin, "Linking new pad straight to playsink");
+ res = gst_pad_link (srcpad, combine->sinkpad);
+
+ if (res != GST_PAD_LINK_OK)
+ goto failed_sinkpad_link;
+ }
+
+ return TRUE;
+
+ /* Failure cases */
+ request_pad_failed:
+ GST_ELEMENT_ERROR (playbin, CORE, PAD,
+ ("Internal playbin error."),
+ ("Failed to get request pad from combiner %p.", combine->combiner));
+ return FALSE;
+
+
+ sinkpad_already_linked:
+ GST_ELEMENT_ERROR (playbin, CORE, PAD,
+ ("Internal playbin error."), ("playsink pad already used !"));
+ return FALSE;
+
+ failed_sinkpad_link:
+ GST_ELEMENT_ERROR (playbin, CORE, PAD,
+ ("Internal playbin error."),
+ ("Failed to link pad to sink. Error %d", res));
+ return FALSE;
+
+ failed_combiner_link:
+ GST_ELEMENT_ERROR (playbin, CORE, PAD,
+ ("Internal playbin error."),
+ ("Failed to link pad to combiner. Error %d", res));
+ return FALSE;
+ }
+
+ static void
+ combiner_release_pad (GstPlayBin3 * playbin, GstSourceCombine * combine,
+ GstPad * pad)
+ {
+ if (combine->combiner) {
+ GstPad *peer = gst_pad_get_peer (pad);
+
+ if (peer) {
+ GST_DEBUG_OBJECT (playbin, "Removing combiner pad %" GST_PTR_FORMAT,
+ peer);
+ g_ptr_array_remove (combine->channels, peer);
+
+ gst_element_release_request_pad (combine->combiner, peer);
+ gst_object_unref (peer);
+ }
+ } else {
+ /* Release direct link if present */
+ if (combine->sinkpad) {
+ GST_DEBUG_OBJECT (playbin, "Unlinking pad from playsink sinkpad");
+ gst_pad_unlink (pad, combine->sinkpad);
+ }
+ }
+ }
+
+ /* Call after pad was unlinked from (potential) combiner */
+ static void
+ release_source_pad (GstPlayBin3 * playbin, GstSourceGroup * group, GstPad * pad)
+ {
+ SourcePad *sourcepad;
+ GList *tmp;
+ GstStreamType alltype = 0;
+
+ sourcepad = find_source_pad (group, pad);
+ if (!sourcepad) {
+ GST_DEBUG_OBJECT (playbin, "Not a pad controlled by us ?");
+ return;
+ }
+
+ if (sourcepad->event_probe_id) {
+ gst_pad_remove_probe (pad, sourcepad->event_probe_id);
+ sourcepad->event_probe_id = 0;
+ }
+
+ /* Remove from list of controlled pads and check again for EOS status */
+ group->source_pads = g_list_remove (group->source_pads, sourcepad);
+ g_slice_free (SourcePad, sourcepad);
+
+ /* Update present stream types */
+ for (tmp = group->source_pads; tmp; tmp = tmp->next) {
+ SourcePad *cand = (SourcePad *) tmp->data;
+ alltype |= cand->stream_type;
+ }
+ group->present_stream_types = alltype;
+ }
+
+ /* this function is called when a new pad is added to decodebin. We check the
+ * type of the pad and add it to the combiner element
+ */
+ static void
+ pad_added_cb (GstElement * uridecodebin, GstPad * pad, GstSourceGroup * group)
+ {
+ GstSourceCombine *combine = NULL;
+ gint pb_stream_type = -1;
+ gchar *pad_name;
+ GstPlayBin3 *playbin = group->playbin;
+
+ GST_PLAY_BIN3_SHUTDOWN_LOCK (playbin, shutdown);
+
+ pad_name = gst_object_get_name (GST_OBJECT (pad));
+
+ GST_DEBUG_OBJECT (playbin, "decoded pad %s:%s added",
+ GST_DEBUG_PAD_NAME (pad));
+
+ /* major type of the pad, this determines the combiner to use,
+ try exact match first */
+ if (g_str_has_prefix (pad_name, "video")) {
+ pb_stream_type = PLAYBIN_STREAM_VIDEO;
+ } else if (g_str_has_prefix (pad_name, "audio")) {
+ pb_stream_type = PLAYBIN_STREAM_AUDIO;
+ } else if (g_str_has_prefix (pad_name, "text")) {
+ pb_stream_type = PLAYBIN_STREAM_TEXT;
+ }
+
+ g_free (pad_name);
+
+ /* no stream type found for the media type, don't bother linking it to a
+ * combiner. This will leave the pad unlinked and thus ignored. */
+ if (pb_stream_type < 0) {
+ GST_PLAY_BIN3_SHUTDOWN_UNLOCK (playbin);
+ goto unknown_type;
+ }
+
+ combine = &playbin->combiner[pb_stream_type];
+
+ combiner_control_pad (playbin, combine, pad);
+
+ control_source_pad (group, pad, combine->stream_type);
+
+ /* Update present stream_types and check whether we should post a pending about-to-finish */
+ group->present_stream_types |= combine->stream_type;
+
+ if (group->playing && group->pending_about_to_finish
+ && group->present_stream_types == group->selected_stream_types) {
+ group->pending_about_to_finish = FALSE;
+ emit_about_to_finish (playbin);
+ }
+
+ GST_PLAY_BIN3_SHUTDOWN_UNLOCK (playbin);
+
+ return;
+
+ /* ERRORS */
+ unknown_type:
+ GST_DEBUG_OBJECT (playbin, "Ignoring pad with unknown type");
+ return;
+
+ shutdown:
+ {
+ GST_DEBUG ("ignoring, we are shutting down. Pad will be left unlinked");
+ /* not going to done as we didn't request the caps */
+ return;
+ }
+ }
+
+ /* called when a pad is removed from the decodebin. We unlink the pad from
+ * the combiner. */
+ static void
+ pad_removed_cb (GstElement * decodebin, GstPad * pad, GstSourceGroup * group)
+ {
+ GstSourceCombine *combine;
+ GstPlayBin3 *playbin = group->playbin;
+
+ GST_DEBUG_OBJECT (playbin,
+ "decoded pad %s:%s removed", GST_DEBUG_PAD_NAME (pad));
+
+ GST_PLAY_BIN3_LOCK (playbin);
+
+ /* Get combiner for pad */
+ if (g_str_has_prefix (GST_PAD_NAME (pad), "video"))
+ combine = &playbin->combiner[PLAYBIN_STREAM_VIDEO];
+ else if (g_str_has_prefix (GST_PAD_NAME (pad), "audio"))
+ combine = &playbin->combiner[PLAYBIN_STREAM_AUDIO];
+ else if (g_str_has_prefix (GST_PAD_NAME (pad), "text"))
+ combine = &playbin->combiner[PLAYBIN_STREAM_TEXT];
+ else
++#ifdef TIZEN_FEATURE_PLAYBIN3_MODIFICATION
++ {
++ GST_PLAY_BIN3_UNLOCK (playbin);
++ return;
++ }
++#else
+ return;
++#endif
+
+ combiner_release_pad (playbin, combine, pad);
+ release_source_pad (playbin, group, pad);
+
+ GST_PLAY_BIN3_UNLOCK (playbin);
+ }
+
+
+ static gint
+ select_stream_cb (GstElement * decodebin, GstStreamCollection * collection,
+ GstStream * stream, GstSourceGroup * group)
+ {
+ GstStreamType stype = gst_stream_get_stream_type (stream);
+ GstElement *combiner = NULL;
+ GstPlayBin3 *playbin = group->playbin;
+
+ if (stype & GST_STREAM_TYPE_AUDIO)
+ combiner = playbin->audio_stream_combiner;
+ else if (stype & GST_STREAM_TYPE_VIDEO)
+ combiner = playbin->video_stream_combiner;
+ else if (stype & GST_STREAM_TYPE_TEXT)
+ combiner = playbin->text_stream_combiner;
+
+ if (combiner) {
+ GST_DEBUG_OBJECT (playbin, "Got a combiner, requesting stream activation");
+ return 1;
+ }
+
+ /* Let decodebin3 decide otherwise */
+ return -1;
+ }
+
+ /* We get called when the selected stream types change and
+ * reconfiguration of output (i.e. playsink and potential combiners)
+ * are required.
+ */
+ static void
+ reconfigure_output (GstPlayBin3 * playbin)
+ {
+ GstPadLinkReturn res;
+ gint i;
+
+ g_assert (playbin->selected_stream_types != playbin->active_stream_types);
+
+ GST_DEBUG_OBJECT (playbin, "selected_stream_types : %" STREAM_TYPES_FORMAT,
+ STREAM_TYPES_ARGS (playbin->selected_stream_types));
+ GST_DEBUG_OBJECT (playbin, "active_stream_types : %" STREAM_TYPES_FORMAT,
+ STREAM_TYPES_ARGS (playbin->active_stream_types));
+
+ GST_PLAY_BIN3_LOCK (playbin);
+
+ /* Make sure combiners/playsink are in sync with selected stream types */
+ for (i = 0; i < PLAYBIN_STREAM_LAST; i++) {
+ GstSourceCombine *combine = &playbin->combiner[i];
+ gboolean is_selected =
+ (combine->stream_type & playbin->selected_stream_types) ==
+ combine->stream_type;
+ gboolean is_active =
+ (combine->stream_type & playbin->active_stream_types) ==
+ combine->stream_type;
+
+ GST_DEBUG_OBJECT (playbin, "Stream type status: '%s' %s %s",
+ combine->media_type, is_selected ? "selected" : "NOT selected",
+ is_active ? "active" : "NOT active");
+ /* FIXME : Remove asserts below once enough testing has been done */
+
+ if (is_selected && is_active) {
+ GST_DEBUG_OBJECT (playbin, "Stream type '%s' already active",
+ combine->media_type);
+ } else if (is_active && !is_selected) {
+ GST_DEBUG_OBJECT (playbin, "Stream type '%s' is no longer requested",
+ combine->media_type);
+
+ /* Unlink combiner from sink */
+ if (combine->srcpad) {
+ GST_LOG_OBJECT (playbin, "Unlinking from sink");
+ if (combine->sinkpad)
+ gst_pad_unlink (combine->srcpad, combine->sinkpad);
+ gst_object_unref (combine->srcpad);
+ combine->srcpad = NULL;
+ }
+
+ if (combine->sinkpad) {
+ /* Release playsink sink pad */
+ GST_LOG_OBJECT (playbin, "Releasing playsink pad");
+ gst_play_sink_release_pad (playbin->playsink, combine->sinkpad);
+ gst_object_unref (combine->sinkpad);
+ combine->sinkpad = NULL;
+ }
+
+ /* Release combiner */
+ GST_FIXME_OBJECT (playbin, "Release combiner");
+ remove_combiner (playbin, combine);
+ } else if (!is_active && is_selected) {
+ GST_DEBUG_OBJECT (playbin, "Stream type '%s' is now requested",
+ combine->media_type);
+
+ /* If we are shutting down, do *not* add more combiners */
+ if (g_atomic_int_get (&playbin->shutdown))
+ continue;
+
+ g_assert (combine->sinkpad == NULL);
+
+ /* Request playsink sink pad */
+ combine->sinkpad =
+ gst_play_sink_request_pad (playbin->playsink, combine->type);
+ gst_object_ref (combine->sinkpad);
+ /* Create combiner if needed and link it */
+ create_combiner (playbin, combine);
+ if (combine->combiner) {
+ res = gst_pad_link (combine->srcpad, combine->sinkpad);
+ GST_DEBUG_OBJECT (playbin, "linked type %s, result: %d",
+ combine->media_type, res);
+ if (res != GST_PAD_LINK_OK) {
+ GST_ELEMENT_ERROR (playbin, CORE, PAD,
+ ("Internal playbin error."),
+ ("Failed to link combiner to sink. Error %d", res));
+ }
+
+ }
+ }
+ }
+
+ playbin->active_stream_types = playbin->selected_stream_types;
+
+ GST_PLAY_BIN3_UNLOCK (playbin);
+
+ gst_play_sink_reconfigure (playbin->playsink);
+
+ do_async_done (playbin);
+
+ GST_DEBUG_OBJECT (playbin, "selected_stream_types : %" STREAM_TYPES_FORMAT,
+ STREAM_TYPES_ARGS (playbin->selected_stream_types));
+ GST_DEBUG_OBJECT (playbin, "active_stream_types : %" STREAM_TYPES_FORMAT,
+ STREAM_TYPES_ARGS (playbin->active_stream_types));
+
+ return;
+ }
+
+ static void
+ about_to_finish_cb (GstElement * uridecodebin, GstSourceGroup * group)
+ {
+ GstPlayBin3 *playbin = group->playbin;
+ GST_DEBUG_OBJECT (playbin, "about to finish in group %p", group);
+
+ GST_LOG_OBJECT (playbin, "selected_stream_types:%" STREAM_TYPES_FORMAT,
+ STREAM_TYPES_ARGS (group->selected_stream_types));
+ GST_LOG_OBJECT (playbin, "present_stream_types:%" STREAM_TYPES_FORMAT,
+ STREAM_TYPES_ARGS (group->present_stream_types));
+
+ if (group->selected_stream_types == 0
+ || (group->selected_stream_types != group->present_stream_types)) {
+ GST_LOG_OBJECT (playbin,
+ "Delaying emission of signal until this group is ready");
+ group->pending_about_to_finish = TRUE;
+ } else
+ emit_about_to_finish (playbin);
+ }
+
+ #if 0 /* AUTOPLUG DISABLED */
+ /* Like gst_element_factory_can_sink_any_caps() but doesn't
+ * allow ANY caps on the sinkpad template */
+ static gboolean
+ _factory_can_sink_caps (GstElementFactory * factory, GstCaps * caps)
+ {
+ const GList *templs;
+
+ templs = gst_element_factory_get_static_pad_templates (factory);
+
+ while (templs) {
+ GstStaticPadTemplate *templ = (GstStaticPadTemplate *) templs->data;
+
+ if (templ->direction == GST_PAD_SINK) {
+ GstCaps *templcaps = gst_static_caps_get (&templ->static_caps);
+
+ if (!gst_caps_is_any (templcaps)
+ && gst_caps_is_subset (caps, templcaps)) {
+ gst_caps_unref (templcaps);
+ return TRUE;
+ }
+ gst_caps_unref (templcaps);
+ }
+ templs = g_list_next (templs);
+ }
+
+ return FALSE;
+ }
+
+ static void
+ avelements_free (gpointer avelement)
+ {
+ GstAVElement *elm = (GstAVElement *) avelement;
+
+ if (elm->dec)
+ gst_object_unref (elm->dec);
+ if (elm->sink)
+ gst_object_unref (elm->sink);
+ g_slice_free (GstAVElement, elm);
+ }
+
+ static gint
+ avelement_compare_decoder (gconstpointer p1, gconstpointer p2,
+ gpointer user_data)
+ {
+ GstAVElement *v1, *v2;
+
+ v1 = (GstAVElement *) p1;
+ v2 = (GstAVElement *) p2;
+
+ return strcmp (GST_OBJECT_NAME (v1->dec), GST_OBJECT_NAME (v2->dec));
+ }
+
+ static gint
+ avelement_lookup_decoder (gconstpointer p1, gconstpointer p2,
+ gpointer user_data)
+ {
+ GstAVElement *v1;
+ GstElementFactory *f2;
+
+ v1 = (GstAVElement *) p1;
+ f2 = (GstElementFactory *) p2;
+
+ return strcmp (GST_OBJECT_NAME (v1->dec), GST_OBJECT_NAME (f2));
+ }
+
+ static gint
+ avelement_compare (gconstpointer p1, gconstpointer p2)
+ {
+ GstAVElement *v1, *v2;
+ GstPluginFeature *fd1, *fd2, *fs1, *fs2;
+ gint64 diff, v1_rank, v2_rank;
+
+ v1 = (GstAVElement *) p1;
+ v2 = (GstAVElement *) p2;
+
+ fd1 = (GstPluginFeature *) v1->dec;
+ fd2 = (GstPluginFeature *) v2->dec;
+
+ /* If both have a sink, we also compare their ranks */
+ if (v1->sink && v2->sink) {
+ fs1 = (GstPluginFeature *) v1->sink;
+ fs2 = (GstPluginFeature *) v2->sink;
+ v1_rank = (gint64) gst_plugin_feature_get_rank (fd1) *
+ gst_plugin_feature_get_rank (fs1);
+ v2_rank = (gint64) gst_plugin_feature_get_rank (fd2) *
+ gst_plugin_feature_get_rank (fs2);
+ } else {
+ v1_rank = gst_plugin_feature_get_rank (fd1);
+ v2_rank = gst_plugin_feature_get_rank (fd2);
+ fs1 = fs2 = NULL;
+ }
+
+ /* comparison based on the rank */
+ diff = v2_rank - v1_rank;
+ if (diff < 0)
+ return -1;
+ else if (diff > 0)
+ return 1;
+
+ /* comparison based on number of common caps features */
+ diff = v2->n_comm_cf - v1->n_comm_cf;
+ if (diff != 0)
+ return diff;
+
+ if (fs1 && fs2) {
+ /* comparison based on the name of sink elements */
+ diff = strcmp (GST_OBJECT_NAME (fs1), GST_OBJECT_NAME (fs2));
+ if (diff != 0)
+ return diff;
+ }
+
+ /* comparison based on the name of decoder elements */
+ return strcmp (GST_OBJECT_NAME (fd1), GST_OBJECT_NAME (fd2));
+ }
+
+ static GSequence *
+ avelements_create (GstPlayBin3 * playbin, gboolean isaudioelement)
+ {
+ GstElementFactory *d_factory, *s_factory;
+ GList *dec_list, *sink_list, *dl, *sl;
+ GSequence *ave_seq = NULL;
+ GstAVElement *ave;
+ guint n_common_cf = 0;
+
+ if (isaudioelement) {
+ sink_list = gst_element_factory_list_get_elements
+ (GST_ELEMENT_FACTORY_TYPE_SINK |
+ GST_ELEMENT_FACTORY_TYPE_MEDIA_AUDIO, GST_RANK_MARGINAL);
+ dec_list =
+ gst_element_factory_list_get_elements (GST_ELEMENT_FACTORY_TYPE_DECODER
+ | GST_ELEMENT_FACTORY_TYPE_MEDIA_AUDIO, GST_RANK_MARGINAL);
+ } else {
+ sink_list = gst_element_factory_list_get_elements
+ (GST_ELEMENT_FACTORY_TYPE_SINK |
+ GST_ELEMENT_FACTORY_TYPE_MEDIA_VIDEO |
+ GST_ELEMENT_FACTORY_TYPE_MEDIA_IMAGE, GST_RANK_MARGINAL);
+
+ dec_list =
+ gst_element_factory_list_get_elements (GST_ELEMENT_FACTORY_TYPE_DECODER
+ | GST_ELEMENT_FACTORY_TYPE_MEDIA_VIDEO |
+ GST_ELEMENT_FACTORY_TYPE_MEDIA_IMAGE, GST_RANK_MARGINAL);
+ }
+
+ /* create a list of audio/video elements. Each element in the list
+ * is holding an audio/video decoder and an audio/video sink in which
+ * the decoders srcpad template caps and sink element's sinkpad template
+ * caps are compatible */
+ dl = dec_list;
+ sl = sink_list;
+
+ ave_seq = g_sequence_new ((GDestroyNotify) avelements_free);
+
+ for (; dl; dl = dl->next) {
+ d_factory = (GstElementFactory *) dl->data;
+ for (; sl; sl = sl->next) {
+ s_factory = (GstElementFactory *) sl->data;
+
+ n_common_cf =
+ gst_playback_utils_get_n_common_capsfeatures (d_factory, s_factory,
+ gst_play_bin3_get_flags (playbin), isaudioelement);
+ if (n_common_cf < 1)
+ continue;
+
+ ave = g_slice_new (GstAVElement);
+ ave->dec = gst_object_ref (d_factory);
+ ave->sink = gst_object_ref (s_factory);
+ ave->n_comm_cf = n_common_cf;
+ g_sequence_append (ave_seq, ave);
+ }
+ sl = sink_list;
+ }
+ g_sequence_sort (ave_seq, (GCompareDataFunc) avelement_compare_decoder, NULL);
+
+ gst_plugin_feature_list_free (dec_list);
+ gst_plugin_feature_list_free (sink_list);
+
+ return ave_seq;
+ }
+
+ static gboolean
+ avelement_iter_is_equal (GSequenceIter * iter, GstElementFactory * factory)
+ {
+ GstAVElement *ave;
+
+ if (!iter)
+ return FALSE;
+
+ ave = g_sequence_get (iter);
+ if (!ave)
+ return FALSE;
+
+ return strcmp (GST_OBJECT_NAME (ave->dec), GST_OBJECT_NAME (factory)) == 0;
+ }
+
+ static GList *
- } else {
++create_decoders_list (GList * factory_list, GSequence * avelements,
++ GstPlayFlags flags)
+ {
+ GList *dec_list = NULL, *tmp;
+ GList *ave_list = NULL;
+ GList *ave_free_list = NULL;
+ GstAVElement *ave, *best_ave;
+
+ g_return_val_if_fail (factory_list != NULL, NULL);
+ g_return_val_if_fail (avelements != NULL, NULL);
+
+ for (tmp = factory_list; tmp; tmp = tmp->next) {
+ GstElementFactory *factory = (GstElementFactory *) tmp->data;
+
+ /* if there are parsers or sink elements, add them first */
+ if (gst_element_factory_list_is_type (factory,
+ GST_ELEMENT_FACTORY_TYPE_PARSER) ||
+ gst_element_factory_list_is_type (factory,
+ GST_ELEMENT_FACTORY_TYPE_SINK)) {
+ dec_list = g_list_prepend (dec_list, gst_object_ref (factory));
- factory_list = create_decoders_list (factory_list, *ave_list);
++ } else if (!(((flags & GST_PLAY_FLAG_FORCE_SW_DECODERS) != 0)
++ && gst_element_factory_list_is_type (factory,
++ GST_ELEMENT_FACTORY_TYPE_HARDWARE))) {
+ GSequenceIter *seq_iter;
+
+ seq_iter =
+ g_sequence_lookup (avelements, factory,
+ (GCompareDataFunc) avelement_lookup_decoder, NULL);
+ if (!seq_iter) {
+ GstAVElement *ave = g_slice_new0 (GstAVElement);
+
+ ave->dec = factory;
+ ave->sink = NULL;
+ /* There's at least raw */
+ ave->n_comm_cf = 1;
+
+ ave_list = g_list_prepend (ave_list, ave);
+
+ /* We need to free these later */
+ ave_free_list = g_list_prepend (ave_free_list, ave);
+ continue;
+ }
+
+ /* Go to first iter with that decoder */
+ do {
+ GSequenceIter *tmp_seq_iter;
+
+ tmp_seq_iter = g_sequence_iter_prev (seq_iter);
+ if (!avelement_iter_is_equal (tmp_seq_iter, factory))
+ break;
+ seq_iter = tmp_seq_iter;
+ } while (!g_sequence_iter_is_begin (seq_iter));
+
+ /* Get the best ranked GstAVElement for that factory */
+ best_ave = NULL;
+ while (!g_sequence_iter_is_end (seq_iter)
+ && avelement_iter_is_equal (seq_iter, factory)) {
+ ave = g_sequence_get (seq_iter);
+
+ if (!best_ave || avelement_compare (ave, best_ave) < 0)
+ best_ave = ave;
+
+ seq_iter = g_sequence_iter_next (seq_iter);
+ }
+ ave_list = g_list_prepend (ave_list, best_ave);
+ }
+ }
+
+ /* Sort all GstAVElements by their relative ranks and insert
+ * into the decoders list */
+ ave_list = g_list_sort (ave_list, (GCompareFunc) avelement_compare);
+ for (tmp = ave_list; tmp; tmp = tmp->next) {
+ ave = (GstAVElement *) tmp->data;
+ dec_list = g_list_prepend (dec_list, gst_object_ref (ave->dec));
+ }
+ g_list_free (ave_list);
+ gst_plugin_feature_list_free (factory_list);
+
+ for (tmp = ave_free_list; tmp; tmp = tmp->next)
+ g_slice_free (GstAVElement, tmp->data);
+ g_list_free (ave_free_list);
+
+ dec_list = g_list_reverse (dec_list);
+
+ return dec_list;
+ }
+
+ /* Called when we must provide a list of factories to plug to @pad with @caps.
+ * We first check if we have a sink that can handle the format and if we do, we
+ * return NULL, to expose the pad. If we have no sink (or the sink does not
+ * work), we return the list of elements that can connect. */
+ static GValueArray *
+ autoplug_factories_cb (GstElement * decodebin, GstPad * pad,
+ GstCaps * caps, GstSourceGroup * group)
+ {
+ GstPlayBin3 *playbin;
+ GList *factory_list, *tmp;
+ GValueArray *result;
+ gboolean unref_caps = FALSE;
+ gboolean isaudiodeclist = FALSE;
+ gboolean isvideodeclist = FALSE;
+
+ if (!caps) {
+ caps = gst_caps_new_any ();
+ unref_caps = TRUE;
+ }
+
+ playbin = group->playbin;
+
+ GST_DEBUG_OBJECT (playbin, "factories group %p for %s:%s, %" GST_PTR_FORMAT,
+ group, GST_DEBUG_PAD_NAME (pad), caps);
+
+ /* filter out the elements based on the caps. */
+ g_mutex_lock (&playbin->elements_lock);
+ gst_play_bin3_update_elements_list (playbin);
+ factory_list =
+ gst_element_factory_list_filter (playbin->elements, caps, GST_PAD_SINK,
+ gst_caps_is_fixed (caps));
+ g_mutex_unlock (&playbin->elements_lock);
+
+ GST_DEBUG_OBJECT (playbin, "found factories %p", factory_list);
+ GST_PLUGIN_FEATURE_LIST_DEBUG (factory_list);
+
+ /* check whether the caps are asking for a list of audio/video decoders */
+ tmp = factory_list;
+ if (!gst_caps_is_any (caps)) {
+ for (; tmp; tmp = tmp->next) {
+ GstElementFactory *factory = (GstElementFactory *) tmp->data;
+
+ isvideodeclist = gst_element_factory_list_is_type (factory,
+ GST_ELEMENT_FACTORY_TYPE_DECODER |
+ GST_ELEMENT_FACTORY_TYPE_MEDIA_VIDEO |
+ GST_ELEMENT_FACTORY_TYPE_MEDIA_IMAGE);
+ isaudiodeclist = gst_element_factory_list_is_type (factory,
+ GST_ELEMENT_FACTORY_TYPE_DECODER |
+ GST_ELEMENT_FACTORY_TYPE_MEDIA_AUDIO);
+
+ if (isaudiodeclist || isvideodeclist)
+ break;
+ }
+ }
+
+ if (isaudiodeclist || isvideodeclist) {
+ GSequence **ave_list;
++ GstPlayFlags flags;
++
+ if (isaudiodeclist)
+ ave_list = &playbin->aelements;
+ else
+ ave_list = &playbin->velements;
+
++ flags = gst_play_bin_get_flags (playbin);
++
+ g_mutex_lock (&playbin->elements_lock);
+ /* sort factory_list based on the GstAVElement list priority */
++ factory_list = create_decoders_list (factory_list, *ave_list, flags);
+ g_mutex_unlock (&playbin->elements_lock);
+ }
+
+ /* 2 additional elements for the already set audio/video sinks */
+ result = g_value_array_new (g_list_length (factory_list) + 2);
+
+ /* Check if we already have an audio/video sink and if this is the case
+ * put it as the first element of the array */
+ if (group->audio_sink) {
+ GstElementFactory *factory = gst_element_get_factory (group->audio_sink);
+
+ if (factory && _factory_can_sink_caps (factory, caps)) {
+ GValue val = { 0, };
+
+ g_value_init (&val, G_TYPE_OBJECT);
+ g_value_set_object (&val, factory);
+ result = g_value_array_append (result, &val);
+ g_value_unset (&val);
+ }
+ }
+
+ if (group->video_sink) {
+ GstElementFactory *factory = gst_element_get_factory (group->video_sink);
+
+ if (factory && _factory_can_sink_caps (factory, caps)) {
+ GValue val = { 0, };
+
+ g_value_init (&val, G_TYPE_OBJECT);
+ g_value_set_object (&val, factory);
+ result = g_value_array_append (result, &val);
+ g_value_unset (&val);
+ }
+ }
+
+ for (tmp = factory_list; tmp; tmp = tmp->next) {
+ GstElementFactory *factory = GST_ELEMENT_FACTORY_CAST (tmp->data);
+ GValue val = { 0, };
+
+ if (group->audio_sink && gst_element_factory_list_is_type (factory,
+ GST_ELEMENT_FACTORY_TYPE_SINK |
+ GST_ELEMENT_FACTORY_TYPE_MEDIA_AUDIO)) {
+ continue;
+ }
+ if (group->video_sink && gst_element_factory_list_is_type (factory,
+ GST_ELEMENT_FACTORY_TYPE_SINK | GST_ELEMENT_FACTORY_TYPE_MEDIA_VIDEO
+ | GST_ELEMENT_FACTORY_TYPE_MEDIA_IMAGE)) {
+ continue;
+ }
+
+ g_value_init (&val, G_TYPE_OBJECT);
+ g_value_set_object (&val, factory);
+ g_value_array_append (result, &val);
+ g_value_unset (&val);
+ }
+ gst_plugin_feature_list_free (factory_list);
+
+ if (unref_caps)
+ gst_caps_unref (caps);
+
+ return result;
+ }
+ #endif
+
+ static GstBusSyncReply
+ activate_sink_bus_handler (GstBus * bus, GstMessage * msg,
+ GstPlayBin3 * playbin)
+ {
+ if (GST_MESSAGE_TYPE (msg) == GST_MESSAGE_ERROR) {
+ /* Only proxy errors from a fixed sink. If that fails we can just error out
+ * early as stuff will fail later anyway */
+ if (playbin->audio_sink
+ && gst_object_has_as_ancestor (GST_MESSAGE_SRC (msg),
+ GST_OBJECT_CAST (playbin->audio_sink)))
+ gst_element_post_message (GST_ELEMENT_CAST (playbin), msg);
+ else if (playbin->video_sink
+ && gst_object_has_as_ancestor (GST_MESSAGE_SRC (msg),
+ GST_OBJECT_CAST (playbin->video_sink)))
+ gst_element_post_message (GST_ELEMENT_CAST (playbin), msg);
+ else if (playbin->text_sink
+ && gst_object_has_as_ancestor (GST_MESSAGE_SRC (msg),
+ GST_OBJECT_CAST (playbin->text_sink)))
+ gst_element_post_message (GST_ELEMENT_CAST (playbin), msg);
+ else
+ gst_message_unref (msg);
+ } else if (GST_MESSAGE_TYPE (msg) == GST_MESSAGE_HAVE_CONTEXT) {
+ GstContext *context;
+
+ gst_message_parse_have_context (msg, &context);
+ gst_element_set_context (GST_ELEMENT_CAST (playbin), context);
+ gst_context_unref (context);
+ gst_element_post_message (GST_ELEMENT_CAST (playbin), msg);
+ } else {
+ gst_element_post_message (GST_ELEMENT_CAST (playbin), msg);
+ }
+
+ /* Doesn't really matter, nothing is using this bus */
+ return GST_BUS_DROP;
+ }
+
+ static gboolean
+ activate_sink (GstPlayBin3 * playbin, GstElement * sink, gboolean * activated)
+ {
+ GstState state;
+ GstBus *bus = NULL;
+ GstStateChangeReturn sret;
+ gboolean ret = FALSE;
+
+ if (activated)
+ *activated = FALSE;
+
+ GST_OBJECT_LOCK (sink);
+ state = GST_STATE (sink);
+ GST_OBJECT_UNLOCK (sink);
+ if (state >= GST_STATE_READY) {
+ ret = TRUE;
+ goto done;
+ }
+
+ if (!GST_OBJECT_PARENT (sink)) {
+ bus = gst_bus_new ();
+ gst_bus_set_sync_handler (bus,
+ (GstBusSyncHandler) activate_sink_bus_handler, playbin, NULL);
+ gst_element_set_bus (sink, bus);
+ }
+
+ sret = gst_element_set_state (sink, GST_STATE_READY);
+ if (sret == GST_STATE_CHANGE_FAILURE)
+ goto done;
+
+ if (activated)
+ *activated = TRUE;
+ ret = TRUE;
+
+ done:
+ if (bus) {
+ gst_element_set_bus (sink, NULL);
+ gst_object_unref (bus);
+ }
+
+ return ret;
+ }
+
+ #if 0 /* AUTOPLUG DISABLED */
+ /* autoplug-continue decides, if a pad has raw caps that can be exposed
+ * directly or if further decoding is necessary. We use this to expose
+ * supported subtitles directly */
+
+ /* FIXME 0.11: Remove the checks for ANY caps, a sink should specify
+ * explicitly the caps it supports and if it claims to support ANY
+ * caps it really should support everything */
+ static gboolean
+ autoplug_continue_cb (GstElement * element, GstPad * pad, GstCaps * caps,
+ GstSourceGroup * group)
+ {
+ gboolean ret = TRUE;
+ GstPad *sinkpad = NULL;
+ gboolean activated_sink;
+
+ GST_SOURCE_GROUP_LOCK (group);
+
+ if (group->text_sink &&
+ activate_sink (group->playbin, group->text_sink, &activated_sink)) {
+ sinkpad = gst_element_get_static_pad (group->text_sink, "sink");
+ if (sinkpad) {
+ GstCaps *sinkcaps;
+
+ sinkcaps = gst_pad_query_caps (sinkpad, NULL);
+ if (!gst_caps_is_any (sinkcaps))
+ ret = !gst_pad_query_accept_caps (sinkpad, caps);
+ gst_caps_unref (sinkcaps);
+ gst_object_unref (sinkpad);
+ }
+ if (activated_sink)
+ gst_element_set_state (group->text_sink, GST_STATE_NULL);
+ } else {
+ GstCaps *subcaps = gst_subtitle_overlay_create_factory_caps ();
+ ret = !gst_caps_is_subset (caps, subcaps);
+ gst_caps_unref (subcaps);
+ }
+ /* If autoplugging can stop don't do additional checks */
+ if (!ret)
+ goto done;
+
+ if (group->audio_sink &&
+ activate_sink (group->playbin, group->audio_sink, &activated_sink)) {
+
+ sinkpad = gst_element_get_static_pad (group->audio_sink, "sink");
+ if (sinkpad) {
+ GstCaps *sinkcaps;
+
+ sinkcaps = gst_pad_query_caps (sinkpad, NULL);
+ if (!gst_caps_is_any (sinkcaps))
+ ret = !gst_pad_query_accept_caps (sinkpad, caps);
+ gst_caps_unref (sinkcaps);
+ gst_object_unref (sinkpad);
+ }
+ if (activated_sink)
+ gst_element_set_state (group->audio_sink, GST_STATE_NULL);
+ }
+ if (!ret)
+ goto done;
+
+ if (group->video_sink
+ && activate_sink (group->playbin, group->video_sink, &activated_sink)) {
+ sinkpad = gst_element_get_static_pad (group->video_sink, "sink");
+ if (sinkpad) {
+ GstCaps *sinkcaps;
+
+ sinkcaps = gst_pad_query_caps (sinkpad, NULL);
+ if (!gst_caps_is_any (sinkcaps))
+ ret = !gst_pad_query_accept_caps (sinkpad, caps);
+ gst_caps_unref (sinkcaps);
+ gst_object_unref (sinkpad);
+ }
+ if (activated_sink)
+ gst_element_set_state (group->video_sink, GST_STATE_NULL);
+ }
+
+ done:
+ GST_SOURCE_GROUP_UNLOCK (group);
+
+ GST_DEBUG_OBJECT (group->playbin,
+ "continue autoplugging group %p for %s:%s, %" GST_PTR_FORMAT ": %d",
+ group, GST_DEBUG_PAD_NAME (pad), caps, ret);
+
+ return ret;
+ }
+
+ static gboolean
+ sink_accepts_caps (GstPlayBin3 * playbin, GstElement * sink, GstCaps * caps)
+ {
+ GstPad *sinkpad;
+
+ if ((sinkpad = gst_element_get_static_pad (sink, "sink"))) {
+ /* Got the sink pad, now let's see if the element actually does accept the
+ * caps that we have */
+ if (!gst_pad_query_accept_caps (sinkpad, caps)) {
+ gst_object_unref (sinkpad);
+ return FALSE;
+ }
+ gst_object_unref (sinkpad);
+ }
+
+ return TRUE;
+ }
+
+ /* We are asked to select an element. See if the next element to check
+ * is a sink. If this is the case, we see if the sink works by setting it to
+ * READY. If the sink works, we return SELECT_EXPOSE to make decodebin
+ * expose the raw pad so that we can setup the mixers. */
+ static GstAutoplugSelectResult
+ autoplug_select_cb (GstElement * decodebin, GstPad * pad,
+ GstCaps * caps, GstElementFactory * factory, GstSourceGroup * group)
+ {
+ GstPlayBin3 *playbin;
+ GstElement *element;
+ const gchar *klass;
+ GstPlaySinkType type;
+ GstElement **sinkp;
+ GList *ave_list = NULL, *l;
+ GstAVElement *ave = NULL;
+ GSequence *ave_seq = NULL;
+ GSequenceIter *seq_iter;
+
+ playbin = group->playbin;
+
+ GST_DEBUG_OBJECT (playbin, "select group %p for %s:%s, %" GST_PTR_FORMAT,
+ group, GST_DEBUG_PAD_NAME (pad), caps);
+
+ GST_DEBUG_OBJECT (playbin, "checking factory %s", GST_OBJECT_NAME (factory));
+
+ /* if it's not a sink, we make sure the element is compatible with
+ * the fixed sink */
+ if (!gst_element_factory_list_is_type (factory,
+ GST_ELEMENT_FACTORY_TYPE_SINK)) {
+ gboolean isvideodec = gst_element_factory_list_is_type (factory,
+ GST_ELEMENT_FACTORY_TYPE_DECODER |
+ GST_ELEMENT_FACTORY_TYPE_MEDIA_VIDEO |
+ GST_ELEMENT_FACTORY_TYPE_MEDIA_IMAGE);
+ gboolean isaudiodec = gst_element_factory_list_is_type (factory,
+ GST_ELEMENT_FACTORY_TYPE_DECODER |
+ GST_ELEMENT_FACTORY_TYPE_MEDIA_AUDIO);
+
+ if (!isvideodec && !isaudiodec)
+ return GST_AUTOPLUG_SELECT_TRY;
+
+ GST_SOURCE_GROUP_LOCK (group);
+ g_mutex_lock (&playbin->elements_lock);
+
+ if (isaudiodec) {
+ ave_seq = playbin->aelements;
+ sinkp = &group->audio_sink;
+ } else {
+ ave_seq = playbin->velements;
+ sinkp = &group->video_sink;
+ }
+
+ seq_iter =
+ g_sequence_lookup (ave_seq, factory,
+ (GCompareDataFunc) avelement_lookup_decoder, NULL);
+ if (seq_iter) {
+ /* Go to first iter with that decoder */
+ do {
+ GSequenceIter *tmp_seq_iter;
+
+ tmp_seq_iter = g_sequence_iter_prev (seq_iter);
+ if (!avelement_iter_is_equal (tmp_seq_iter, factory))
+ break;
+ seq_iter = tmp_seq_iter;
+ } while (!g_sequence_iter_is_begin (seq_iter));
+
+ while (!g_sequence_iter_is_end (seq_iter)
+ && avelement_iter_is_equal (seq_iter, factory)) {
+ ave = g_sequence_get (seq_iter);
+ ave_list = g_list_prepend (ave_list, ave);
+ seq_iter = g_sequence_iter_next (seq_iter);
+ }
+
+ /* Sort all GstAVElements by their relative ranks and insert
+ * into the decoders list */
+ ave_list = g_list_sort (ave_list, (GCompareFunc) avelement_compare);
+ } else {
+ ave_list = g_list_prepend (ave_list, NULL);
+ }
+
+ /* if it is a decoder and we don't have a fixed sink, then find out
+ * the matching audio/video sink from GstAVElements list */
+ for (l = ave_list; l; l = l->next) {
+ gboolean created_sink = FALSE;
+
+ ave = (GstAVElement *) l->data;
+
+ if (((isaudiodec && !group->audio_sink) ||
+ (isvideodec && !group->video_sink))) {
+ if (ave && ave->sink) {
+ GST_DEBUG_OBJECT (playbin,
+ "Trying to create sink '%s' for decoder '%s'",
+ gst_plugin_feature_get_name (GST_PLUGIN_FEATURE (ave->sink)),
+ gst_plugin_feature_get_name (GST_PLUGIN_FEATURE (factory)));
+ if ((*sinkp = gst_element_factory_create (ave->sink, NULL)) == NULL) {
+ GST_WARNING_OBJECT (playbin,
+ "Could not create an element from %s",
+ gst_plugin_feature_get_name (GST_PLUGIN_FEATURE (ave->sink)));
+ continue;
+ } else {
+ if (!activate_sink (playbin, *sinkp, NULL)) {
+ gst_object_unref (*sinkp);
+ *sinkp = NULL;
+ GST_WARNING_OBJECT (playbin,
+ "Could not activate sink %s",
+ gst_plugin_feature_get_name (GST_PLUGIN_FEATURE (ave->sink)));
+ continue;
+ }
+ gst_object_ref_sink (*sinkp);
+ created_sink = TRUE;
+ }
+ }
+ }
+
+ /* If it is a decoder and we have a fixed sink for the media
+ * type it outputs, check that the decoder is compatible with this sink */
+ if ((isaudiodec && group->audio_sink) || (isvideodec
+ && group->video_sink)) {
+ gboolean compatible = FALSE;
+ GstPad *sinkpad;
+ GstCaps *caps;
+ GstElement *sink;
+
+ sink = *sinkp;
+
+ if ((sinkpad = gst_element_get_static_pad (sink, "sink"))) {
+ GstPlayFlags flags = gst_play_bin3_get_flags (playbin);
+ GstCaps *raw_caps =
+ (isaudiodec) ? gst_static_caps_get (&raw_audio_caps) :
+ gst_static_caps_get (&raw_video_caps);
+
+ caps = gst_pad_query_caps (sinkpad, NULL);
+
+ /* If the sink supports raw audio/video, we first check
+ * if the decoder could output any raw audio/video format
+ * and assume it is compatible with the sink then. We don't
+ * do a complete compatibility check here if converters
+ * are plugged between the decoder and the sink because
+ * the converters will convert between raw formats and
+ * even if the decoder format is not supported by the decoder
+ * a converter will convert it.
+ *
+ * We assume here that the converters can convert between
+ * any raw format.
+ */
+ if ((isaudiodec && !(flags & GST_PLAY_FLAG_NATIVE_AUDIO)
+ && gst_caps_can_intersect (caps, raw_caps)) || (!isaudiodec
+ && !(flags & GST_PLAY_FLAG_NATIVE_VIDEO)
+ && gst_caps_can_intersect (caps, raw_caps))) {
+ compatible =
+ gst_element_factory_can_src_any_caps (factory, raw_caps)
+ || gst_element_factory_can_src_any_caps (factory, caps);
+ } else {
+ compatible = gst_element_factory_can_src_any_caps (factory, caps);
+ }
+
+ gst_object_unref (sinkpad);
+ gst_caps_unref (caps);
+ }
+
+ if (compatible)
+ break;
+
+ GST_DEBUG_OBJECT (playbin, "%s not compatible with the fixed sink",
+ GST_OBJECT_NAME (factory));
+
+ /* If it is not compatible, either continue with the next possible
+ * sink or if we have a fixed sink, skip the decoder */
+ if (created_sink) {
+ gst_element_set_state (*sinkp, GST_STATE_NULL);
+ gst_object_unref (*sinkp);
+ *sinkp = NULL;
+ } else {
+ g_mutex_unlock (&playbin->elements_lock);
+ GST_SOURCE_GROUP_UNLOCK (group);
+ return GST_AUTOPLUG_SELECT_SKIP;
+ }
+ }
+ }
+ g_list_free (ave_list);
+ g_mutex_unlock (&playbin->elements_lock);
+ GST_SOURCE_GROUP_UNLOCK (group);
+ return GST_AUTOPLUG_SELECT_TRY;
+ }
+
+ /* it's a sink, see if an instance of it actually works */
+ GST_DEBUG_OBJECT (playbin, "we found a sink '%s'", GST_OBJECT_NAME (factory));
+
+ klass =
+ gst_element_factory_get_metadata (factory, GST_ELEMENT_METADATA_KLASS);
+
+ /* figure out the klass */
+ if (strstr (klass, "Audio")) {
+ GST_DEBUG_OBJECT (playbin, "we found an audio sink");
+ type = GST_PLAY_SINK_TYPE_AUDIO;
+ sinkp = &group->audio_sink;
+ } else if (strstr (klass, "Video")) {
+ GST_DEBUG_OBJECT (playbin, "we found a video sink");
+ type = GST_PLAY_SINK_TYPE_VIDEO;
+ sinkp = &group->video_sink;
+ } else {
+ /* unknown klass, skip this element */
+ GST_WARNING_OBJECT (playbin, "unknown sink klass %s found", klass);
+ return GST_AUTOPLUG_SELECT_SKIP;
+ }
+
+ /* if we are asked to do visualisations and it's an audio sink, skip the
+ * element. We can only do visualisations with raw sinks */
+ if (gst_play_sink_get_flags (playbin->playsink) & GST_PLAY_FLAG_VIS) {
+ if (type == GST_PLAY_SINK_TYPE_AUDIO) {
+ GST_DEBUG_OBJECT (playbin, "skip audio sink because of vis");
+ return GST_AUTOPLUG_SELECT_SKIP;
+ }
+ }
+
+ /* now see if we already have a sink element */
+ GST_SOURCE_GROUP_LOCK (group);
+ if (*sinkp && GST_STATE (*sinkp) >= GST_STATE_READY) {
+ GstElement *sink = gst_object_ref (*sinkp);
+
+ if (sink_accepts_caps (playbin, sink, caps)) {
+ GST_DEBUG_OBJECT (playbin,
+ "Existing sink '%s' accepts caps: %" GST_PTR_FORMAT,
+ GST_ELEMENT_NAME (sink), caps);
+ gst_object_unref (sink);
+ GST_SOURCE_GROUP_UNLOCK (group);
+ return GST_AUTOPLUG_SELECT_EXPOSE;
+ } else {
+ GST_DEBUG_OBJECT (playbin,
+ "Existing sink '%s' does not accept caps: %" GST_PTR_FORMAT,
+ GST_ELEMENT_NAME (sink), caps);
+ gst_object_unref (sink);
+ GST_SOURCE_GROUP_UNLOCK (group);
+ return GST_AUTOPLUG_SELECT_SKIP;
+ }
+ }
+ GST_DEBUG_OBJECT (playbin, "we have no pending sink, try to create '%s'",
+ gst_plugin_feature_get_name (GST_PLUGIN_FEATURE (factory)));
+
+ if ((*sinkp = gst_element_factory_create (factory, NULL)) == NULL) {
+ GST_WARNING_OBJECT (playbin, "Could not create an element from %s",
+ gst_plugin_feature_get_name (GST_PLUGIN_FEATURE (factory)));
+ GST_SOURCE_GROUP_UNLOCK (group);
+ return GST_AUTOPLUG_SELECT_SKIP;
+ }
+
+ element = *sinkp;
+
+ if (!activate_sink (playbin, element, NULL)) {
+ GST_WARNING_OBJECT (playbin, "Could not activate sink %s",
+ gst_plugin_feature_get_name (GST_PLUGIN_FEATURE (factory)));
+ *sinkp = NULL;
+ gst_object_unref (element);
+ GST_SOURCE_GROUP_UNLOCK (group);
+ return GST_AUTOPLUG_SELECT_SKIP;
+ }
+
+ /* Check if the selected sink actually supports the
+ * caps and can be set to READY*/
+ if (!sink_accepts_caps (playbin, element, caps)) {
+ *sinkp = NULL;
+ gst_element_set_state (element, GST_STATE_NULL);
+ gst_object_unref (element);
+ GST_SOURCE_GROUP_UNLOCK (group);
+ return GST_AUTOPLUG_SELECT_SKIP;
+ }
+
+ /* remember the sink in the group now, the element is floating, we take
+ * ownership now
+ *
+ * store the sink in the group, we will configure it later when we
+ * reconfigure the sink */
+ GST_DEBUG_OBJECT (playbin, "remember sink");
+ gst_object_ref_sink (element);
+ GST_SOURCE_GROUP_UNLOCK (group);
+
+ /* tell decodebin to expose the pad because we are going to use this
+ * sink */
+ GST_DEBUG_OBJECT (playbin, "we found a working sink, expose pad");
+
+ return GST_AUTOPLUG_SELECT_EXPOSE;
+ }
+
+ #define GST_PLAY_BIN3_FILTER_CAPS(filter,caps) G_STMT_START { \
+ if ((filter)) { \
+ GstCaps *intersection = \
+ gst_caps_intersect_full ((filter), (caps), GST_CAPS_INTERSECT_FIRST); \
+ gst_caps_unref ((caps)); \
+ (caps) = intersection; \
+ } \
+ } G_STMT_END
+
+ static gboolean
+ autoplug_query_caps (GstElement * uridecodebin, GstPad * pad,
+ GstElement * element, GstQuery * query, GstSourceGroup * group)
+ {
+ GstCaps *filter, *result = NULL;
+ GstElement *sink;
+ GstPad *sinkpad = NULL;
+ GstElementFactory *factory;
+ GstElementFactoryListType factory_type;
+ gboolean have_sink = FALSE;
+
+ GST_SOURCE_GROUP_LOCK (group);
+ gst_query_parse_caps (query, &filter);
+
+ factory = gst_element_get_factory (element);
+ if (!factory)
+ goto done;
+
+ if (gst_element_factory_list_is_type (factory,
+ GST_ELEMENT_FACTORY_TYPE_MEDIA_VIDEO |
+ GST_ELEMENT_FACTORY_TYPE_MEDIA_IMAGE)) {
+ factory_type =
+ GST_ELEMENT_FACTORY_TYPE_MEDIA_VIDEO |
+ GST_ELEMENT_FACTORY_TYPE_MEDIA_IMAGE;
+
+ if ((sink = group->video_sink)) {
+ sinkpad = gst_element_get_static_pad (sink, "sink");
+ if (sinkpad) {
+ GstCaps *sinkcaps;
+
+ sinkcaps = gst_pad_query_caps (sinkpad, filter);
+ if (!gst_caps_is_any (sinkcaps)) {
+ if (!result)
+ result = sinkcaps;
+ else
+ result = gst_caps_merge (result, sinkcaps);
+ } else {
+ gst_caps_unref (sinkcaps);
+ }
+ gst_object_unref (sinkpad);
+ }
+ have_sink = TRUE;
+ }
+ } else if (gst_element_factory_list_is_type (factory,
+ GST_ELEMENT_FACTORY_TYPE_MEDIA_AUDIO)) {
+ factory_type = GST_ELEMENT_FACTORY_TYPE_MEDIA_AUDIO;
+
+ if ((sink = group->audio_sink)) {
+ sinkpad = gst_element_get_static_pad (sink, "sink");
+ if (sinkpad) {
+ GstCaps *sinkcaps;
+
+ sinkcaps = gst_pad_query_caps (sinkpad, filter);
+ if (!gst_caps_is_any (sinkcaps)) {
+ if (!result)
+ result = sinkcaps;
+ else
+ result = gst_caps_merge (result, sinkcaps);
+ } else {
+ gst_caps_unref (sinkcaps);
+ }
+ gst_object_unref (sinkpad);
+ }
+ have_sink = TRUE;
+ }
+ } else if (gst_element_factory_list_is_type (factory,
+ GST_ELEMENT_FACTORY_TYPE_MEDIA_SUBTITLE)) {
+ factory_type = GST_ELEMENT_FACTORY_TYPE_MEDIA_SUBTITLE;
+
+ if ((sink = group->playbin->text_sink)) {
+ sinkpad = gst_element_get_static_pad (sink, "sink");
+ if (sinkpad) {
+ GstCaps *sinkcaps;
+
+ sinkcaps = gst_pad_query_caps (sinkpad, filter);
+ if (!gst_caps_is_any (sinkcaps)) {
+ if (!result)
+ result = sinkcaps;
+ else
+ result = gst_caps_merge (result, sinkcaps);
+ } else {
+ gst_caps_unref (sinkcaps);
+ }
+ gst_object_unref (sinkpad);
+ }
+ have_sink = TRUE;
+ } else {
+ GstCaps *subcaps = gst_subtitle_overlay_create_factory_caps ();
+ GST_PLAY_BIN3_FILTER_CAPS (filter, subcaps);
+ if (!result)
+ result = subcaps;
+ else
+ result = gst_caps_merge (result, subcaps);
+ }
+ } else {
+ goto done;
+ }
+
+ if (!have_sink) {
+ GValueArray *factories;
+ gint i, n;
+
+ factories = autoplug_factories_cb (uridecodebin, pad, NULL, group);
+ n = factories->n_values;
+ for (i = 0; i < n; i++) {
+ GValue *v = g_value_array_get_nth (factories, i);
+ GstElementFactory *f = g_value_get_object (v);
+ const GList *templates;
+ const GList *l;
+ GstCaps *templ_caps;
+
+ if (!gst_element_factory_list_is_type (f, factory_type))
+ continue;
+
+ templates = gst_element_factory_get_static_pad_templates (f);
+
+ for (l = templates; l; l = l->next) {
+ templ_caps = gst_static_pad_template_get_caps (l->data);
+
+ if (!gst_caps_is_any (templ_caps)) {
+ GST_PLAY_BIN3_FILTER_CAPS (filter, templ_caps);
+ if (!result)
+ result = templ_caps;
+ else
+ result = gst_caps_merge (result, templ_caps);
+ } else {
+ gst_caps_unref (templ_caps);
+ }
+ }
+ }
+ g_value_array_free (factories);
+ }
+
+ done:
+ GST_SOURCE_GROUP_UNLOCK (group);
+
+ if (!result)
+ return FALSE;
+
+ /* Add the actual decoder/parser/etc caps at the very end to
+ * make sure we don't cause empty caps to be returned, e.g.
+ * if a parser asks us but a decoder is required after it
+ * because no sink can handle the format directly.
+ */
+ {
+ GstPad *target = gst_ghost_pad_get_target (GST_GHOST_PAD (pad));
+
+ if (target) {
+ GstCaps *target_caps = gst_pad_get_pad_template_caps (target);
+ GST_PLAY_BIN3_FILTER_CAPS (filter, target_caps);
+ result = gst_caps_merge (result, target_caps);
+ gst_object_unref (target);
+ }
+ }
+
+
+ gst_query_set_caps_result (query, result);
+ gst_caps_unref (result);
+
+ return TRUE;
+ }
+
+ static gboolean
+ autoplug_query_context (GstElement * uridecodebin, GstPad * pad,
+ GstElement * element, GstQuery * query, GstSourceGroup * group)
+ {
+ GstElement *sink;
+ GstPad *sinkpad = NULL;
+ GstElementFactory *factory;
+ gboolean res = FALSE;
+
+ GST_SOURCE_GROUP_LOCK (group);
+
+ factory = gst_element_get_factory (element);
+ if (!factory)
+ goto done;
+
+ if (gst_element_factory_list_is_type (factory,
+ GST_ELEMENT_FACTORY_TYPE_MEDIA_VIDEO |
+ GST_ELEMENT_FACTORY_TYPE_MEDIA_IMAGE)) {
+ if ((sink = group->video_sink)) {
+ sinkpad = gst_element_get_static_pad (sink, "sink");
+ if (sinkpad) {
+ res = gst_pad_query (sinkpad, query);
+ gst_object_unref (sinkpad);
+ }
+ }
+ } else if (gst_element_factory_list_is_type (factory,
+ GST_ELEMENT_FACTORY_TYPE_MEDIA_AUDIO)) {
+ if ((sink = group->audio_sink)) {
+ sinkpad = gst_element_get_static_pad (sink, "sink");
+ if (sinkpad) {
+ res = gst_pad_query (sinkpad, query);
+ gst_object_unref (sinkpad);
+ }
+ }
+ } else if (gst_element_factory_list_is_type (factory,
+ GST_ELEMENT_FACTORY_TYPE_MEDIA_SUBTITLE)) {
+ if ((sink = group->playbin->text_sink)) {
+ sinkpad = gst_element_get_static_pad (sink, "sink");
+ if (sinkpad) {
+ res = gst_pad_query (sinkpad, query);
+ gst_object_unref (sinkpad);
+ }
+ }
+ } else {
+ goto done;
+ }
+
+ done:
+ GST_SOURCE_GROUP_UNLOCK (group);
+
+ return res;
+ }
+
+ static gboolean
+ autoplug_query_cb (GstElement * uridecodebin, GstPad * pad,
+ GstElement * element, GstQuery * query, GstSourceGroup * group)
+ {
+
+ switch (GST_QUERY_TYPE (query)) {
+ case GST_QUERY_CAPS:
+ return autoplug_query_caps (uridecodebin, pad, element, query, group);
+ case GST_QUERY_CONTEXT:
+ return autoplug_query_context (uridecodebin, pad, element, query, group);
+ default:
+ return FALSE;
+ }
+ }
+ #endif
+
+ /* must be called with the group lock */
+ static gboolean
+ group_set_locked_state_unlocked (GstPlayBin3 * playbin, GstSourceGroup * group,
+ gboolean locked)
+ {
+ GST_DEBUG_OBJECT (playbin, "locked_state %d on group %p", locked, group);
+
+ if (group->uridecodebin)
+ gst_element_set_locked_state (group->uridecodebin, locked);
+
+ return TRUE;
+ }
+
+ static gboolean
+ make_or_reuse_element (GstPlayBin3 * playbin, const gchar * name,
+ GstElement ** elem)
+ {
+ if (*elem) {
+ GST_DEBUG_OBJECT (playbin, "reusing existing %s", name);
+ gst_element_set_state (*elem, GST_STATE_READY);
+ /* no need to take extra ref, we already have one
+ * and the bin will add one since it is no longer floating,
+ * as we added a non-floating ref when removing it from the
+ * bin earlier */
+ } else {
+ GstElement *new_elem;
+ GST_DEBUG_OBJECT (playbin, "making new %s", name);
+ new_elem = gst_element_factory_make (name, NULL);
+ if (!new_elem)
+ return FALSE;
+ *elem = gst_object_ref (new_elem);
+ }
+
+ if (GST_OBJECT_PARENT (*elem) != GST_OBJECT_CAST (playbin))
+ gst_bin_add (GST_BIN_CAST (playbin), *elem);
+ return TRUE;
+ }
+
+
+ static void
+ source_setup_cb (GstElement * element, GstElement * source,
+ GstSourceGroup * group)
+ {
+ g_signal_emit (group->playbin, gst_play_bin3_signals[SIGNAL_SOURCE_SETUP], 0,
+ source);
+ }
+
+ /* must be called with PLAY_BIN_LOCK */
+ static GstStateChangeReturn
+ activate_group (GstPlayBin3 * playbin, GstSourceGroup * group)
+ {
+ GstElement *uridecodebin = NULL;
+ GstPlayFlags flags;
+ gboolean audio_sink_activated = FALSE;
+ gboolean video_sink_activated = FALSE;
+ gboolean text_sink_activated = FALSE;
+ GstStateChangeReturn state_ret;
+
+ g_return_val_if_fail (group->valid, GST_STATE_CHANGE_FAILURE);
+ g_return_val_if_fail (!group->active, GST_STATE_CHANGE_FAILURE);
+
+ GST_DEBUG_OBJECT (playbin, "activating group %p", group);
+
+ GST_SOURCE_GROUP_LOCK (group);
+
+ /* First set up the custom sinks */
+ if (playbin->audio_sink)
+ group->audio_sink = gst_object_ref (playbin->audio_sink);
+ else
+ group->audio_sink =
+ gst_play_sink_get_sink (playbin->playsink, GST_PLAY_SINK_TYPE_AUDIO);
+
+ if (group->audio_sink) {
+ if (!activate_sink (playbin, group->audio_sink, &audio_sink_activated)) {
+ if (group->audio_sink == playbin->audio_sink) {
+ goto sink_failure;
+ } else {
+ gst_object_unref (group->audio_sink);
+ group->audio_sink = NULL;
+ }
+ }
+ }
+
+ if (playbin->video_sink)
+ group->video_sink = gst_object_ref (playbin->video_sink);
+ else
+ group->video_sink =
+ gst_play_sink_get_sink (playbin->playsink, GST_PLAY_SINK_TYPE_VIDEO);
+
+ if (group->video_sink) {
+ if (!activate_sink (playbin, group->video_sink, &video_sink_activated)) {
+ if (group->video_sink == playbin->video_sink) {
+ goto sink_failure;
+ } else {
+ gst_object_unref (group->video_sink);
+ group->video_sink = NULL;
+ }
+ }
+ }
+
+ if (playbin->text_sink)
+ group->text_sink = gst_object_ref (playbin->text_sink);
+ else
+ group->text_sink =
+ gst_play_sink_get_sink (playbin->playsink, GST_PLAY_SINK_TYPE_TEXT);
+
+ if (group->text_sink) {
+ if (!activate_sink (playbin, group->text_sink, &text_sink_activated)) {
+ if (group->text_sink == playbin->text_sink) {
+ goto sink_failure;
+ } else {
+ gst_object_unref (group->text_sink);
+ group->text_sink = NULL;
+ }
+ }
+ }
+
+
+ if (!make_or_reuse_element (playbin, "uridecodebin3", &group->uridecodebin))
+ goto no_uridecodebin;
+ uridecodebin = group->uridecodebin;
+
+ flags = gst_play_sink_get_flags (playbin->playsink);
+
+ g_object_set (uridecodebin,
+ /* configure connection speed */
+ "connection-speed", playbin->connection_speed / 1000,
+ /* configure uri */
+ "uri", group->uri,
+ /* configure download buffering */
+ "download", ((flags & GST_PLAY_FLAG_DOWNLOAD) != 0),
+ /* configure buffering of demuxed/parsed data */
+ "use-buffering", ((flags & GST_PLAY_FLAG_BUFFERING) != 0),
++ /* configure usage of hardware elements */
++ "force-sw-decoders", ((flags & GST_PLAY_FLAG_FORCE_SW_DECODERS) != 0),
+ /* configure buffering parameters */
+ "buffer-duration", playbin->buffer_duration,
+ "buffer-size", playbin->buffer_size,
+ "ring-buffer-max-size", playbin->ring_buffer_max_size, NULL);
+
+ group->pad_added_id = g_signal_connect (uridecodebin, "pad-added",
+ G_CALLBACK (pad_added_cb), group);
+ group->pad_removed_id = g_signal_connect (uridecodebin,
+ "pad-removed", G_CALLBACK (pad_removed_cb), group);
+ group->select_stream_id = g_signal_connect (uridecodebin, "select-stream",
+ G_CALLBACK (select_stream_cb), group);
+ group->source_setup_id = g_signal_connect (uridecodebin, "source-setup",
+ G_CALLBACK (source_setup_cb), group);
+ group->about_to_finish_id =
+ g_signal_connect (uridecodebin, "about-to-finish",
+ G_CALLBACK (about_to_finish_cb), group);
+
+ if (group->suburi)
+ g_object_set (group->uridecodebin, "suburi", group->suburi, NULL);
+
+ /* release the group lock before setting the state of the source bins, they
+ * might fire signals in this thread that we need to handle with the
+ * group_lock taken. */
+ GST_SOURCE_GROUP_UNLOCK (group);
+
+ if ((state_ret =
+ gst_element_set_state (uridecodebin,
+ GST_STATE_PAUSED)) == GST_STATE_CHANGE_FAILURE)
+ goto uridecodebin_failure;
+
+ GST_SOURCE_GROUP_LOCK (group);
+ /* allow state changes of the playbin affect the group elements now */
+ group_set_locked_state_unlocked (playbin, group, FALSE);
+ group->active = TRUE;
+ GST_SOURCE_GROUP_UNLOCK (group);
+
+ return state_ret;
+
+ /* ERRORS */
+ no_uridecodebin:
+ {
+ GstMessage *msg;
+
+ GST_SOURCE_GROUP_UNLOCK (group);
+ msg =
+ gst_missing_element_message_new (GST_ELEMENT_CAST (playbin),
+ "uridecodebin3");
+ gst_element_post_message (GST_ELEMENT_CAST (playbin), msg);
+
+ GST_ELEMENT_ERROR (playbin, CORE, MISSING_PLUGIN,
+ (_("Could not create \"uridecodebin3\" element.")), (NULL));
+
+ GST_SOURCE_GROUP_LOCK (group);
+
+ goto error_cleanup;
+ }
+ uridecodebin_failure:
+ {
+ GST_DEBUG_OBJECT (playbin, "failed state change of uridecodebin");
+ GST_SOURCE_GROUP_LOCK (group);
+ goto error_cleanup;
+ }
+ sink_failure:
+ {
+ GST_ERROR_OBJECT (playbin, "failed to activate sinks");
+ goto error_cleanup;
+ }
+
+ error_cleanup:
+ {
+ group->selected_stream_types = 0;
+
+ /* delete any custom sinks we might have */
+ if (group->audio_sink) {
+ /* If this is a automatically created sink set it to NULL */
+ if (audio_sink_activated)
+ gst_element_set_state (group->audio_sink, GST_STATE_NULL);
+ gst_object_unref (group->audio_sink);
+ }
+ group->audio_sink = NULL;
+
+ if (group->video_sink) {
+ /* If this is a automatically created sink set it to NULL */
+ if (video_sink_activated)
+ gst_element_set_state (group->video_sink, GST_STATE_NULL);
+ gst_object_unref (group->video_sink);
+ }
+ group->video_sink = NULL;
+
+ if (group->text_sink) {
+ /* If this is a automatically created sink set it to NULL */
+ if (text_sink_activated)
+ gst_element_set_state (group->text_sink, GST_STATE_NULL);
+ gst_object_unref (group->text_sink);
+ }
+ group->text_sink = NULL;
+
+ if (uridecodebin) {
+ REMOVE_SIGNAL (group->uridecodebin, group->pad_added_id);
+ REMOVE_SIGNAL (group->uridecodebin, group->pad_removed_id);
+ REMOVE_SIGNAL (group->uridecodebin, group->select_stream_id);
+ REMOVE_SIGNAL (group->uridecodebin, group->source_setup_id);
+ REMOVE_SIGNAL (group->uridecodebin, group->about_to_finish_id);
+ #if 0
+ REMOVE_SIGNAL (group->urisourcebin, group->autoplug_factories_id);
+ REMOVE_SIGNAL (group->urisourcebin, group->autoplug_select_id);
+ REMOVE_SIGNAL (group->urisourcebin, group->autoplug_continue_id);
+ REMOVE_SIGNAL (group->urisourcebin, group->autoplug_query_id);
+ #endif
+
+ gst_element_set_state (uridecodebin, GST_STATE_NULL);
+ gst_bin_remove (GST_BIN_CAST (playbin), uridecodebin);
+ }
+
+ GST_SOURCE_GROUP_UNLOCK (group);
+
+ return GST_STATE_CHANGE_FAILURE;
+ }
+ }
+
+ /* must be called with PLAY_BIN_LOCK */
+ static gboolean
+ deactivate_group (GstPlayBin3 * playbin, GstSourceGroup * group)
+ {
+ g_return_val_if_fail (group->active, FALSE);
+ g_return_val_if_fail (group->valid, FALSE);
+
+ GST_DEBUG_OBJECT (playbin, "unlinking group %p", group);
+
+ GST_SOURCE_GROUP_LOCK (group);
+ group->active = FALSE;
+ group->playing = FALSE;
+ group->group_id = GST_GROUP_ID_INVALID;
+
+ group->selected_stream_types = 0;
+ /* Update global selected_stream_types */
+ playbin->selected_stream_types =
+ playbin->groups[0].selected_stream_types | playbin->groups[1].
+ selected_stream_types;
+ if (playbin->active_stream_types != playbin->selected_stream_types)
+ reconfigure_output (playbin);
+
+ #if 0
+ /* delete any custom sinks we might have.
+ * conditionally set them to null if they aren't inside playsink yet */
+ if (group->audio_sink) {
+ if (!gst_object_has_as_ancestor (GST_OBJECT_CAST (group->audio_sink),
+ GST_OBJECT_CAST (playbin->playsink))) {
+ gst_element_set_state (group->audio_sink, GST_STATE_NULL);
+ }
+ gst_object_unref (group->audio_sink);
+ }
+ group->audio_sink = NULL;
+ if (group->video_sink) {
+ if (!gst_object_has_as_ancestor (GST_OBJECT_CAST (group->video_sink),
+ GST_OBJECT_CAST (playbin->playsink))) {
+ gst_element_set_state (group->video_sink, GST_STATE_NULL);
+ }
+ gst_object_unref (group->video_sink);
+ }
+ group->video_sink = NULL;
+ if (group->text_sink) {
+ if (!gst_object_has_as_ancestor (GST_OBJECT_CAST (group->text_sink),
+ GST_OBJECT_CAST (playbin->playsink))) {
+ gst_element_set_state (group->text_sink, GST_STATE_NULL);
+ }
+ gst_object_unref (group->text_sink);
+ }
+ group->text_sink = NULL;
+ #endif
+
+ if (group->uridecodebin) {
+ REMOVE_SIGNAL (group->uridecodebin, group->select_stream_id);
+ REMOVE_SIGNAL (group->uridecodebin, group->source_setup_id);
+ REMOVE_SIGNAL (group->uridecodebin, group->about_to_finish_id);
+
+ gst_element_set_state (group->uridecodebin, GST_STATE_NULL);
+ gst_bin_remove (GST_BIN_CAST (playbin), group->uridecodebin);
+
+ REMOVE_SIGNAL (group->uridecodebin, group->pad_added_id);
+ REMOVE_SIGNAL (group->uridecodebin, group->pad_removed_id);
+ #if 0
+ REMOVE_SIGNAL (group->urisourcebin, group->autoplug_factories_id);
+ REMOVE_SIGNAL (group->urisourcebin, group->autoplug_select_id);
+ REMOVE_SIGNAL (group->urisourcebin, group->autoplug_continue_id);
+ REMOVE_SIGNAL (group->urisourcebin, group->autoplug_query_id);
+ #endif
+ }
+
+ GST_SOURCE_GROUP_UNLOCK (group);
+
+ GST_DEBUG_OBJECT (playbin, "Done");
+
+ return TRUE;
+ }
+
+ /* setup the next group to play, this assumes the next_group is valid and
+ * configured. It swaps out the current_group and activates the valid
+ * next_group. */
+ static GstStateChangeReturn
+ setup_next_source (GstPlayBin3 * playbin)
+ {
+ GstSourceGroup *new_group;
+ GstStateChangeReturn state_ret;
+
+ GST_DEBUG_OBJECT (playbin, "setup next source");
+
+ debug_groups (playbin);
+
+ /* see if there is a next group */
+ GST_PLAY_BIN3_LOCK (playbin);
+ new_group = playbin->next_group;
+ if (!new_group || !new_group->valid || new_group->active)
+ goto no_next_group;
+
+ /* activate the new group */
+ state_ret = activate_group (playbin, new_group);
+ if (state_ret == GST_STATE_CHANGE_FAILURE)
+ goto activate_failed;
+
+ GST_PLAY_BIN3_UNLOCK (playbin);
+
+ debug_groups (playbin);
+
+ return state_ret;
+
+ /* ERRORS */
+ no_next_group:
+ {
+ GST_DEBUG_OBJECT (playbin, "no next group");
+ GST_PLAY_BIN3_UNLOCK (playbin);
+ return GST_STATE_CHANGE_FAILURE;
+ }
+ activate_failed:
+ {
+ new_group->stream_changed_pending = FALSE;
+ GST_DEBUG_OBJECT (playbin, "activate failed");
+ new_group->valid = FALSE;
+ GST_PLAY_BIN3_UNLOCK (playbin);
+ return GST_STATE_CHANGE_FAILURE;
+ }
+ }
+
+ /* The group that is currently playing is copied again to the
+ * next_group so that it will start playing the next time.
+ */
+ static gboolean
+ save_current_group (GstPlayBin3 * playbin)
+ {
+ GstSourceGroup *curr_group;
+
+ GST_DEBUG_OBJECT (playbin, "save current group");
+
+ /* see if there is a current group */
+ GST_PLAY_BIN3_LOCK (playbin);
+ curr_group = playbin->curr_group;
+ if (curr_group && curr_group->valid && curr_group->active) {
+ /* unlink our pads with the sink */
+ deactivate_group (playbin, curr_group);
+ }
+ /* swap old and new */
+ playbin->curr_group = playbin->next_group;
+ playbin->next_group = curr_group;
+ GST_PLAY_BIN3_UNLOCK (playbin);
+
+ return TRUE;
+ }
+
+ /* clear the locked state from all groups. This function is called before a
+ * state change to NULL is performed on them. */
+ static gboolean
+ groups_set_locked_state (GstPlayBin3 * playbin, gboolean locked)
+ {
+ GST_DEBUG_OBJECT (playbin, "setting locked state to %d on all groups",
+ locked);
+
+ GST_PLAY_BIN3_LOCK (playbin);
+ GST_SOURCE_GROUP_LOCK (playbin->curr_group);
+ group_set_locked_state_unlocked (playbin, playbin->curr_group, locked);
+ GST_SOURCE_GROUP_UNLOCK (playbin->curr_group);
+ GST_SOURCE_GROUP_LOCK (playbin->next_group);
+ group_set_locked_state_unlocked (playbin, playbin->next_group, locked);
+ GST_SOURCE_GROUP_UNLOCK (playbin->next_group);
+ GST_PLAY_BIN3_UNLOCK (playbin);
+
+ return TRUE;
+ }
+
+ static void
+ gst_play_bin3_check_group_status (GstPlayBin3 * playbin)
+ {
+ if (playbin->activation_task)
+ gst_task_start (playbin->activation_task);
+ }
+
+ static void
+ gst_play_bin3_activation_thread (GstPlayBin3 * playbin)
+ {
+ GST_DEBUG_OBJECT (playbin, "starting");
+
+ debug_groups (playbin);
+
+ /* Check if next_group needs to be deactivated */
+ GST_PLAY_BIN3_LOCK (playbin);
+ if (playbin->next_group->active) {
+ deactivate_group (playbin, playbin->next_group);
+ playbin->next_group->valid = FALSE;
+ }
+
+ /* Is there a pending about-to-finish to be emitted ? */
+ GST_SOURCE_GROUP_LOCK (playbin->curr_group);
+ if (playbin->curr_group->pending_about_to_finish) {
+ GST_LOG_OBJECT (playbin, "Propagating about-to-finish");
+ playbin->curr_group->pending_about_to_finish = FALSE;
+ GST_SOURCE_GROUP_UNLOCK (playbin->curr_group);
+ /* This will activate the next source afterwards */
+ emit_about_to_finish (playbin);
+ } else
+ GST_SOURCE_GROUP_UNLOCK (playbin->curr_group);
+
+ GST_LOG_OBJECT (playbin, "Pausing task");
+ if (playbin->activation_task)
+ gst_task_pause (playbin->activation_task);
+ GST_PLAY_BIN3_UNLOCK (playbin);
+
+ GST_DEBUG_OBJECT (playbin, "done");
+ return;
+ }
+
+ static gboolean
+ gst_play_bin3_start (GstPlayBin3 * playbin)
+ {
+ GST_DEBUG_OBJECT (playbin, "starting");
+
+ GST_PLAY_BIN3_LOCK (playbin);
+
+ if (playbin->activation_task == NULL) {
+ playbin->activation_task =
+ gst_task_new ((GstTaskFunction) gst_play_bin3_activation_thread,
+ playbin, NULL);
+ if (playbin->activation_task == NULL)
+ goto task_error;
+ gst_task_set_lock (playbin->activation_task, &playbin->activation_lock);
+ }
+ GST_LOG_OBJECT (playbin, "clearing shutdown flag");
+ g_atomic_int_set (&playbin->shutdown, 0);
+ do_async_start (playbin);
+
+ GST_PLAY_BIN3_UNLOCK (playbin);
+
+ return TRUE;
+
+ task_error:
+ {
+ GST_PLAY_BIN3_UNLOCK (playbin);
+ GST_ERROR_OBJECT (playbin, "Failed to create task");
+ return FALSE;
+ }
+ }
+
+ static void
+ gst_play_bin3_stop (GstPlayBin3 * playbin)
+ {
+ GstTask *task;
+
+ GST_DEBUG_OBJECT (playbin, "stopping");
+
+ /* FIXME unlock our waiting groups */
+ GST_LOG_OBJECT (playbin, "setting shutdown flag");
+ g_atomic_int_set (&playbin->shutdown, 1);
+
+ /* wait for all callbacks to end by taking the lock.
+ * No dynamic (critical) new callbacks will
+ * be able to happen as we set the shutdown flag. */
+ GST_PLAY_BIN3_DYN_LOCK (playbin);
+ GST_LOG_OBJECT (playbin, "dynamic lock taken, we can continue shutdown");
+ GST_PLAY_BIN3_DYN_UNLOCK (playbin);
+
+ /* Stop the activation task */
+ GST_PLAY_BIN3_LOCK (playbin);
+ if ((task = playbin->activation_task)) {
+ playbin->activation_task = NULL;
+ GST_PLAY_BIN3_UNLOCK (playbin);
+
+ gst_task_stop (task);
+
+ /* Make sure task is not running */
+ g_rec_mutex_lock (&playbin->activation_lock);
+ g_rec_mutex_unlock (&playbin->activation_lock);
+
+ /* Wait for task to finish and unref it */
+ gst_task_join (task);
+ gst_object_unref (task);
+
+ GST_PLAY_BIN3_LOCK (playbin);
+ }
+ GST_PLAY_BIN3_UNLOCK (playbin);
+ }
+
+ static GstStateChangeReturn
+ gst_play_bin3_change_state (GstElement * element, GstStateChange transition)
+ {
+ GstStateChangeReturn ret;
+ GstPlayBin3 *playbin;
+ gboolean do_save = FALSE;
+
+ playbin = GST_PLAY_BIN3 (element);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_READY_TO_PAUSED:
+ if (!gst_play_bin3_start (playbin))
+ return GST_STATE_CHANGE_FAILURE;
+ break;
+ case GST_STATE_CHANGE_PAUSED_TO_READY:
+ async_down:
+ gst_play_bin3_stop (playbin);
+ if (!do_save)
+ break;
+ case GST_STATE_CHANGE_READY_TO_NULL:
+ /* we go async to PAUSED, so if that fails, we never make it to PAUSED
+ * and we will never be called with the GST_STATE_CHANGE_PAUSED_TO_READY.
+ * Make sure we do go through the same steps (see above) for
+ * proper cleanup */
+ if (!g_atomic_int_get (&playbin->shutdown)) {
+ do_save = TRUE;
+ goto async_down;
+ }
+
+ /* unlock so that all groups go to NULL */
+ groups_set_locked_state (playbin, FALSE);
+ break;
+ default:
+ break;
+ }
+
+ ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
+ if (ret == GST_STATE_CHANGE_FAILURE)
+ goto failure;
+
+ switch (transition) {
+ case GST_STATE_CHANGE_READY_TO_PAUSED:
+ if ((ret = setup_next_source (playbin)) == GST_STATE_CHANGE_FAILURE)
+ goto failure;
+ if (ret == GST_STATE_CHANGE_SUCCESS)
+ ret = GST_STATE_CHANGE_ASYNC;
+
+ break;
+ case GST_STATE_CHANGE_PLAYING_TO_PAUSED:
+ do_async_done (playbin);
+ /* FIXME Release audio device when we implement that */
+ break;
+ case GST_STATE_CHANGE_PAUSED_TO_READY:
+ playbin->is_live = FALSE;
+ save_current_group (playbin);
+ break;
+ case GST_STATE_CHANGE_READY_TO_NULL:
+ {
+ guint i;
+
+ /* also do missed state change down to READY */
+ if (do_save)
+ save_current_group (playbin);
+ /* Deactivate the groups, set uridecodebin to NULL and unref it */
+ for (i = 0; i < 2; i++) {
+ if (playbin->groups[i].active && playbin->groups[i].valid) {
+ deactivate_group (playbin, &playbin->groups[i]);
+ playbin->groups[i].valid = FALSE;
+ }
+
+ if (playbin->groups[i].uridecodebin) {
+ gst_element_set_state (playbin->groups[i].uridecodebin,
+ GST_STATE_NULL);
+ gst_object_unref (playbin->groups[i].uridecodebin);
+ playbin->groups[i].uridecodebin = NULL;
+ }
+
+ }
+
+ /* Set our sinks back to NULL, they might not be child of playbin */
+ if (playbin->audio_sink)
+ gst_element_set_state (playbin->audio_sink, GST_STATE_NULL);
+ if (playbin->video_sink)
+ gst_element_set_state (playbin->video_sink, GST_STATE_NULL);
+ if (playbin->text_sink)
+ gst_element_set_state (playbin->text_sink, GST_STATE_NULL);
+
+ if (playbin->video_stream_combiner)
+ gst_element_set_state (playbin->video_stream_combiner, GST_STATE_NULL);
+ if (playbin->audio_stream_combiner)
+ gst_element_set_state (playbin->audio_stream_combiner, GST_STATE_NULL);
+ if (playbin->text_stream_combiner)
+ gst_element_set_state (playbin->text_stream_combiner, GST_STATE_NULL);
+
+ /* make sure the groups don't perform a state change anymore until we
+ * enable them again */
+ groups_set_locked_state (playbin, TRUE);
+ break;
+ }
+ default:
+ break;
+ }
+
+ if (GST_STATE_TRANSITION_NEXT (transition) == GST_STATE_PAUSED)
+ playbin->is_live = ret == GST_STATE_CHANGE_NO_PREROLL;
+
+ if (ret == GST_STATE_CHANGE_NO_PREROLL)
+ do_async_done (playbin);
+
+ return ret;
+
+ /* ERRORS */
+ failure:
+ {
+ do_async_done (playbin);
+
+ if (transition == GST_STATE_CHANGE_READY_TO_PAUSED) {
+ GstSourceGroup *curr_group;
+
+ curr_group = playbin->curr_group;
+ if (curr_group) {
+ if (curr_group->active && curr_group->valid) {
+ /* unlink our pads with the sink */
+ deactivate_group (playbin, curr_group);
+ }
+ curr_group->valid = FALSE;
+ }
+
+ /* Swap current and next group back */
+ playbin->curr_group = playbin->next_group;
+ playbin->next_group = curr_group;
+ }
+ return ret;
+ }
+ }
+
+ static void
+ gst_play_bin3_overlay_expose (GstVideoOverlay * overlay)
+ {
+ GstPlayBin3 *playbin = GST_PLAY_BIN3 (overlay);
+
+ gst_video_overlay_expose (GST_VIDEO_OVERLAY (playbin->playsink));
+ }
+
+ static void
+ gst_play_bin3_overlay_handle_events (GstVideoOverlay * overlay,
+ gboolean handle_events)
+ {
+ GstPlayBin3 *playbin = GST_PLAY_BIN3 (overlay);
+
+ gst_video_overlay_handle_events (GST_VIDEO_OVERLAY (playbin->playsink),
+ handle_events);
+ }
+
+ static void
+ gst_play_bin3_overlay_set_render_rectangle (GstVideoOverlay * overlay, gint x,
+ gint y, gint width, gint height)
+ {
+ GstPlayBin3 *playbin = GST_PLAY_BIN3 (overlay);
+
+ gst_video_overlay_set_render_rectangle (GST_VIDEO_OVERLAY (playbin->playsink),
+ x, y, width, height);
+ }
+
+ static void
+ gst_play_bin3_overlay_set_window_handle (GstVideoOverlay * overlay,
+ guintptr handle)
+ {
+ GstPlayBin3 *playbin = GST_PLAY_BIN3 (overlay);
+
+ gst_video_overlay_set_window_handle (GST_VIDEO_OVERLAY (playbin->playsink),
+ handle);
+ }
+
+ static void
+ gst_play_bin3_overlay_init (gpointer g_iface, gpointer g_iface_data)
+ {
+ GstVideoOverlayInterface *iface = (GstVideoOverlayInterface *) g_iface;
+ iface->expose = gst_play_bin3_overlay_expose;
+ iface->handle_events = gst_play_bin3_overlay_handle_events;
+ iface->set_render_rectangle = gst_play_bin3_overlay_set_render_rectangle;
+ iface->set_window_handle = gst_play_bin3_overlay_set_window_handle;
+ }
+
+ static void
+ gst_play_bin3_navigation_send_event (GstNavigation * navigation,
+ GstStructure * structure)
+ {
+ GstPlayBin3 *playbin = GST_PLAY_BIN3 (navigation);
+
+ gst_navigation_send_event (GST_NAVIGATION (playbin->playsink), structure);
+ }
+
+ static void
+ gst_play_bin3_navigation_init (gpointer g_iface, gpointer g_iface_data)
+ {
+ GstNavigationInterface *iface = (GstNavigationInterface *) g_iface;
+
+ iface->send_event = gst_play_bin3_navigation_send_event;
+ }
+
+ static const GList *
+ gst_play_bin3_colorbalance_list_channels (GstColorBalance * balance)
+ {
+ GstPlayBin3 *playbin = GST_PLAY_BIN3 (balance);
+
+ return
+ gst_color_balance_list_channels (GST_COLOR_BALANCE (playbin->playsink));
+ }
+
+ static void
+ gst_play_bin3_colorbalance_set_value (GstColorBalance * balance,
+ GstColorBalanceChannel * channel, gint value)
+ {
+ GstPlayBin3 *playbin = GST_PLAY_BIN3 (balance);
+
+ gst_color_balance_set_value (GST_COLOR_BALANCE (playbin->playsink), channel,
+ value);
+ }
+
+ static gint
+ gst_play_bin3_colorbalance_get_value (GstColorBalance * balance,
+ GstColorBalanceChannel * channel)
+ {
+ GstPlayBin3 *playbin = GST_PLAY_BIN3 (balance);
+
+ return gst_color_balance_get_value (GST_COLOR_BALANCE (playbin->playsink),
+ channel);
+ }
+
+ static GstColorBalanceType
+ gst_play_bin3_colorbalance_get_balance_type (GstColorBalance * balance)
+ {
+ GstPlayBin3 *playbin = GST_PLAY_BIN3 (balance);
+
+ return
+ gst_color_balance_get_balance_type (GST_COLOR_BALANCE
+ (playbin->playsink));
+ }
+
+ static void
+ gst_play_bin3_colorbalance_init (gpointer g_iface, gpointer g_iface_data)
+ {
+ GstColorBalanceInterface *iface = (GstColorBalanceInterface *) g_iface;
+
+ iface->list_channels = gst_play_bin3_colorbalance_list_channels;
+ iface->set_value = gst_play_bin3_colorbalance_set_value;
+ iface->get_value = gst_play_bin3_colorbalance_get_value;
+ iface->get_balance_type = gst_play_bin3_colorbalance_get_balance_type;
+ }
+
+ gboolean
+ gst_play_bin3_custom_element_init (GstPlugin * plugin)
+ {
+ gboolean ret = TRUE;
+
+ GST_DEBUG_CATEGORY_INIT (gst_play_bin3_debug, "playbin3", 0, "play bin3");
+
+ playback_element_init (plugin);
+
+ if (g_getenv ("USE_PLAYBIN3"))
+ ret &= gst_element_register (plugin, "playbin", GST_RANK_NONE,
+ GST_TYPE_PLAY_BIN);
+ else
+ ret &= gst_element_register (plugin, "playbin3", GST_RANK_NONE,
+ GST_TYPE_PLAY_BIN);
+
+ return ret;
+ }
--- /dev/null
+ /* GStreamer
+ * Copyright (C) <2009> Sebastian Dröge <sebastian.droege@collabora.co.uk>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+
+ #ifndef __GST_RAW_CAPS_H__
+ #define __GST_RAW_CAPS_H__
+
+ #include <gst/gst.h>
+
+ G_BEGIN_DECLS
+
++#ifdef TIZEN_PROFILE_TV
+ #define DEFAULT_RAW_CAPS \
+ "video/x-raw(ANY); " \
+ "audio/x-raw(ANY); " \
+ "text/x-raw(ANY); " \
++ "text/x-pango-markup; " \
++ "text/plain; " \
+ "subpicture/x-dvd; " \
+ "subpicture/x-dvb; " \
+ "subpicture/x-xsub; " \
+ "subpicture/x-pgs; " \
+ "closedcaption/x-cea-608; " \
+ "closedcaption/x-cea-708"
++#else
++#define DEFAULT_RAW_CAPS \
++ "video/x-raw(ANY); " \
++ "audio/x-raw(ANY); " \
++ "text/x-raw(ANY); " \
++ "subpicture/x-dvd; " \
++ "subpicture/x-dvb; " \
++ "subpicture/x-xsub; " \
++ "subpicture/x-pgs; " \
++ "closedcaption/x-cea-608; " \
++ "closedcaption/x-cea-708"
++#endif
+
+ G_END_DECLS
+
+ #endif /* __GST_RAW_CAPS__ */
--- /dev/null
+ /* GStreamer
+ * Copyright (C) <2007> Wim Taymans <wim.taymans@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+ /* TODO/FIXME:
+ *
+ * * BUFFERING MESSAGES
+ * ** How/Where do we deal with buffering messages from a new/prerolling
+ * source ? Ideally we want to re-use the same sourcebin ?
+ * ** Remember last buffering messages per source handler, if the SourceEntry
+ * group_id is the one being currently outputted on the source ghostpads,
+ * post the (last) buffering messages.
+ * If no group_id is being outputted (still prerolling), then output
+ * the messages directly
+ *
+ * * ASYNC HANDLING
+ * ** URIDECODEBIN3 is not async-aware.
+ *
+ * * GAPLESS HANDLING
+ * ** Correlate group_id and URI to know when/which stream is being outputted/started
+ */
+
+ /**
+ * SECTION:element-uridecodebin3
+ * @title: uridecodebin3
+ *
+ * Decodes data from a URI into raw media. It selects a source element that can
+ * handle the given #GstURIDecodeBin3:uri scheme and connects it to a decodebin.
+ */
+
+ #ifdef HAVE_CONFIG_H
+ # include "config.h"
+ #endif
+
+ #include <string.h>
+
+ #include <gst/gst.h>
+ #include <gst/gst-i18n-plugin.h>
+ #include <gst/pbutils/missing-plugins.h>
+
+ #include "gstplay-enum.h"
+ #include "gstrawcaps.h"
+ #include "gstplaybackelements.h"
+ #include "gstplaybackutils.h"
+
+ #define GST_TYPE_URI_DECODE_BIN3 \
+ (gst_uri_decode_bin3_get_type())
+ #define GST_URI_DECODE_BIN3(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_URI_DECODE_BIN3,GstURIDecodeBin3))
+ #define GST_URI_DECODE_BIN3_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_URI_DECODE_BIN3,GstURIDecodeBin3Class))
+ #define GST_IS_URI_DECODE_BIN3(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_URI_DECODE_BIN3))
+ #define GST_IS_URI_DECODE_BIN3_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_URI_DECODE_BIN3))
+ #define GST_URI_DECODE_BIN3_CAST(obj) ((GstURIDecodeBin3 *) (obj))
+
+ typedef struct _GstSourceGroup GstSourceGroup;
+ typedef struct _GstURIDecodeBin3 GstURIDecodeBin3;
+ typedef struct _GstURIDecodeBin3Class GstURIDecodeBin3Class;
+
+ #define GST_URI_DECODE_BIN3_LOCK(dec) (g_mutex_lock(&((GstURIDecodeBin3*)(dec))->lock))
+ #define GST_URI_DECODE_BIN3_UNLOCK(dec) (g_mutex_unlock(&((GstURIDecodeBin3*)(dec))->lock))
+
+ typedef struct _GstPlayItem GstPlayItem;
+ typedef struct _GstSourceItem GstSourceItem;
+ typedef struct _GstSourceHandler GstSourceHandler;
+ typedef struct _OutputPad OutputPad;
+
+ /* A structure describing a play item, which travels through the elements
+ * over time. */
+ struct _GstPlayItem
+ {
+ GstURIDecodeBin3 *uridecodebin;
+
+ /* Main URI */
+ GstSourceItem *main_item;
+
+ /* Auxiliary URI */
+ /* FIXME : Replace by a list later */
+ GstSourceItem *sub_item;
+
+ /* The group_id used to identify this play item via STREAM_START events
+ * This is the group_id which will be used externally (i.e. rewritten
+ * to outgoing STREAM_START events and in emitted signals).
+ * The urisourcebin-specific group_id is located in GstSourceItem */
+ guint group_id;
+
+ /* Is this play item the one being currently outputted by decodebin3
+ * and on our source ghostpads */
+ gboolean currently_outputted;
+ };
+
+ struct _GstSourceItem
+ {
+ /* The GstPlayItem to which this GstSourceItem belongs to */
+ GstPlayItem *play_item;
+
+ gchar *uri;
+
+ /* The urisourcebin controlling this uri
+ * Can be NULL */
+ GstSourceHandler *handler;
+
+ /* Last buffering information */
+ gint last_perc;
+ GstMessage *last_buffering_message;
+
+ /* The groupid created by urisourcebin for this uri */
+ guint internal_groupid;
+
+ /* FIXME : Add tag lists and other uri-specific items here ? */
+ };
+
+ /* Structure wrapping everything related to a urisourcebin */
+ struct _GstSourceHandler
+ {
+ GstURIDecodeBin3 *uridecodebin;
+
+ GstElement *urisourcebin;
+
+ /* Signal handlers */
+ gulong pad_added_id;
+ gulong pad_removed_id;
+ gulong source_setup_id;
+ gulong about_to_finish_id;
+
+ /* TRUE if the controlled urisourcebin was added to uridecodebin */
+ gboolean active;
+
+ /* whether urisourcebin is drained or not.
+ * Reset if/when setting a new URI */
+ gboolean drained;
+
+ /* Whether urisourcebin posted EOS on all pads and
+ * there is no pending entry */
+ gboolean is_eos;
+
+ /* TRUE if the urisourcebin handles main item */
+ gboolean is_main_source;
+
+ /* buffering message stored for after switching */
+ GstMessage *pending_buffering_msg;
+ };
+
+ /* Controls an output source pad */
+ struct _OutputPad
+ {
+ GstURIDecodeBin3 *uridecodebin;
+
+ GstPad *target_pad;
+ GstPad *ghost_pad;
+
+ /* Downstream event probe id */
+ gulong probe_id;
+
+ /* TRUE if the pad saw EOS. Reset to FALSE on STREAM_START */
+ gboolean is_eos;
+
+ /* The last seen (i.e. current) group_id
+ * Can be (guint)-1 if no group_id was seen yet */
+ guint current_group_id;
+ };
+
+ /**
+ * GstURIDecodeBin3
+ *
+ * uridecodebin3 element struct
+ */
+ struct _GstURIDecodeBin3
+ {
+ GstBin parent_instance;
+
+ GMutex lock; /* lock for constructing */
+
+ /* Properties */
+ GstElement *source;
+ guint64 connection_speed; /* In bits/sec (0 = unknown) */
+ GstCaps *caps;
+ guint64 buffer_duration; /* When buffering, buffer duration (ns) */
+ guint buffer_size; /* When buffering, buffer size (bytes) */
+ gboolean download;
+ gboolean use_buffering;
++ gboolean force_sw_decoders;
+ guint64 ring_buffer_max_size;
+
+ GList *play_items; /* List of GstPlayItem ordered by time of
+ * creation. Head of list is therefore the
+ * current (or pending if initial) one being
+ * outputted */
+ GstPlayItem *current; /* Currently active GstPlayItem. Can be NULL
+ * if no entry is active yet (i.e. no source
+ * pads) */
+
+ /* sources.
+ * FIXME : Replace by a more modular system later on */
+ GstSourceHandler *main_handler;
+ GstSourceHandler *sub_handler;
+
+ /* URI handling
+ * FIXME : Switch to a playlist-based API */
+ gchar *uri;
+ gboolean uri_changed; /* TRUE if uri changed */
+ gchar *suburi;
+ gboolean suburi_changed; /* TRUE if suburi changed */
+
+ /* A global decodebin3 that's used to actually do decoding */
+ GstElement *decodebin;
+
+ /* db3 signals */
+ gulong db_pad_added_id;
+ gulong db_pad_removed_id;
+ gulong db_select_stream_id;
+ gulong db_about_to_finish_id;
+
+ GList *output_pads; /* List of OutputPad */
+
+ GList *source_handlers; /* List of SourceHandler */
+
+ /* Whether we already signalled about-to-finish or not
+ * FIXME: Track this by group-id ! */
+ gboolean posted_about_to_finish;
+ };
+
+ static gint
+ gst_uridecodebin3_select_stream (GstURIDecodeBin3 * dbin,
+ GstStreamCollection * collection, GstStream * stream)
+ {
+ GST_LOG_OBJECT (dbin, "default select-stream, returning -1");
+
+ return -1;
+ }
+
+ struct _GstURIDecodeBin3Class
+ {
+ GstBinClass parent_class;
+
+ gint (*select_stream) (GstURIDecodeBin3 * dbin,
+ GstStreamCollection * collection, GstStream * stream);
+ };
+
+ GST_DEBUG_CATEGORY_STATIC (gst_uri_decode_bin3_debug);
+ #define GST_CAT_DEFAULT gst_uri_decode_bin3_debug
+
+ /* signals */
+ enum
+ {
+ SIGNAL_SELECT_STREAM,
+ SIGNAL_SOURCE_SETUP,
+ SIGNAL_ABOUT_TO_FINISH,
+ LAST_SIGNAL
+ };
+
+ #if 0
+ static GstStaticCaps raw_audio_caps = GST_STATIC_CAPS ("audio/x-raw(ANY)");
+ static GstStaticCaps raw_video_caps = GST_STATIC_CAPS ("video/x-raw(ANY)");
+ #endif
+
+ /* properties */
+ #define DEFAULT_PROP_URI NULL
+ #define DEFAULT_PROP_SUBURI NULL
+ #define DEFAULT_CONNECTION_SPEED 0
+ #define DEFAULT_CAPS (gst_static_caps_get (&default_raw_caps))
+ #define DEFAULT_BUFFER_DURATION -1
+ #define DEFAULT_BUFFER_SIZE -1
+ #define DEFAULT_DOWNLOAD FALSE
+ #define DEFAULT_USE_BUFFERING FALSE
++#define DEFAULT_FORCE_SW_DECODERS FALSE
+ #define DEFAULT_RING_BUFFER_MAX_SIZE 0
+
+ enum
+ {
+ PROP_0,
+ PROP_URI,
+ PROP_CURRENT_URI,
+ PROP_SUBURI,
+ PROP_CURRENT_SUBURI,
+ PROP_SOURCE,
+ PROP_CONNECTION_SPEED,
+ PROP_BUFFER_SIZE,
+ PROP_BUFFER_DURATION,
+ PROP_DOWNLOAD,
+ PROP_USE_BUFFERING,
++ PROP_FORCE_SW_DECODERS,
+ PROP_RING_BUFFER_MAX_SIZE,
+ PROP_CAPS
+ };
+
+ static guint gst_uri_decode_bin3_signals[LAST_SIGNAL] = { 0 };
+
+ static GstStaticCaps default_raw_caps = GST_STATIC_CAPS (DEFAULT_RAW_CAPS);
+
+ static GstStaticPadTemplate video_src_template =
+ GST_STATIC_PAD_TEMPLATE ("video_%u",
+ GST_PAD_SRC,
+ GST_PAD_SOMETIMES,
+ GST_STATIC_CAPS_ANY);
+
+ static GstStaticPadTemplate audio_src_template =
+ GST_STATIC_PAD_TEMPLATE ("audio_%u",
+ GST_PAD_SRC,
+ GST_PAD_SOMETIMES,
+ GST_STATIC_CAPS_ANY);
+
+ static GstStaticPadTemplate text_src_template =
+ GST_STATIC_PAD_TEMPLATE ("text_%u",
+ GST_PAD_SRC,
+ GST_PAD_SOMETIMES,
+ GST_STATIC_CAPS_ANY);
+
+ static GstStaticPadTemplate src_template = GST_STATIC_PAD_TEMPLATE ("src_%u",
+ GST_PAD_SRC,
+ GST_PAD_SOMETIMES,
+ GST_STATIC_CAPS_ANY);
+
+ GType gst_uri_decode_bin3_get_type (void);
+ #define gst_uri_decode_bin3_parent_class parent_class
+ G_DEFINE_TYPE (GstURIDecodeBin3, gst_uri_decode_bin3, GST_TYPE_BIN);
+
+ #define _do_init \
+ GST_DEBUG_CATEGORY_INIT (gst_uri_decode_bin3_debug, "uridecodebin3", 0, "URI decoder element 3"); \
+ playback_element_init (plugin);
+ GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (uridecodebin3, "uridecodebin3",
+ GST_RANK_NONE, GST_TYPE_URI_DECODE_BIN3, _do_init);
+
+ #define REMOVE_SIGNAL(obj,id) \
+ if (id) { \
+ g_signal_handler_disconnect (obj, id); \
+ id = 0; \
+ }
+
+ static void gst_uri_decode_bin3_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec);
+ static void gst_uri_decode_bin3_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec);
+ static void gst_uri_decode_bin3_finalize (GObject * obj);
+ static GstSourceHandler *new_source_handler (GstURIDecodeBin3 * uridecodebin,
+ gboolean is_main);
+
+ static GstStateChangeReturn gst_uri_decode_bin3_change_state (GstElement *
+ element, GstStateChange transition);
+ static gboolean gst_uri_decodebin3_send_event (GstElement * element,
+ GstEvent * event);
+
+ static gboolean
+ _gst_int_accumulator (GSignalInvocationHint * ihint,
+ GValue * return_accu, const GValue * handler_return, gpointer dummy)
+ {
+ gint res = g_value_get_int (handler_return);
+
+ g_value_set_int (return_accu, res);
+
+ if (res == -1)
+ return TRUE;
+
+ return FALSE;
+ }
+
+
+ static void
+ gst_uri_decode_bin3_class_init (GstURIDecodeBin3Class * klass)
+ {
+ GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
+
+ gobject_class = G_OBJECT_CLASS (klass);
+ gstelement_class = GST_ELEMENT_CLASS (klass);
+
+ gobject_class->set_property = gst_uri_decode_bin3_set_property;
+ gobject_class->get_property = gst_uri_decode_bin3_get_property;
+ gobject_class->finalize = gst_uri_decode_bin3_finalize;
+
+ g_object_class_install_property (gobject_class, PROP_URI,
+ g_param_spec_string ("uri", "URI", "URI to decode",
+ DEFAULT_PROP_URI, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_CURRENT_URI,
+ g_param_spec_string ("current-uri", "Current URI",
+ "The currently playing URI", NULL,
+ G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_SUBURI,
+ g_param_spec_string ("suburi", ".sub-URI", "Optional URI of a subtitle",
+ NULL, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_CURRENT_SUBURI,
+ g_param_spec_string ("current-suburi", "Current .sub-URI",
+ "The currently playing URI of a subtitle",
+ NULL, G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_SOURCE,
+ g_param_spec_object ("source", "Source", "Source object used",
+ GST_TYPE_ELEMENT, G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_CONNECTION_SPEED,
+ g_param_spec_uint64 ("connection-speed", "Connection Speed",
+ "Network connection speed in kbps (0 = unknown)",
+ 0, G_MAXUINT64 / 1000, DEFAULT_CONNECTION_SPEED,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_BUFFER_SIZE,
+ g_param_spec_int ("buffer-size", "Buffer size (bytes)",
+ "Buffer size when buffering streams (-1 default value)",
+ -1, G_MAXINT, DEFAULT_BUFFER_SIZE,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_BUFFER_DURATION,
+ g_param_spec_int64 ("buffer-duration", "Buffer duration (ns)",
+ "Buffer duration when buffering streams (-1 default value)",
+ -1, G_MAXINT64, DEFAULT_BUFFER_DURATION,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ /**
+ * GstURIDecodeBin3::download:
+ *
+ * For certain media type, enable download buffering.
+ */
+ g_object_class_install_property (gobject_class, PROP_DOWNLOAD,
+ g_param_spec_boolean ("download", "Download",
+ "Attempt download buffering when buffering network streams",
+ DEFAULT_DOWNLOAD, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ /**
+ * GstURIDecodeBin3::use-buffering:
+ *
+ * Emit BUFFERING messages based on low-/high-percent thresholds of the
+ * demuxed or parsed data.
+ * When download buffering is activated and used for the current media
+ * type, this property does nothing. Otherwise perform buffering on the
+ * demuxed or parsed media.
+ */
+ g_object_class_install_property (gobject_class, PROP_USE_BUFFERING,
+ g_param_spec_boolean ("use-buffering", "Use Buffering",
+ "Perform buffering on demuxed/parsed media",
+ DEFAULT_USE_BUFFERING, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
++ /**
++ * GstURIDecodeBin3::force-sw-decoders:
++ *
++ * While auto-plugging, if set to %TRUE, those decoders within
++ * "Hardware" klass will by tried. Otherwise they will be ignored.
++ *
++ * Since: 1.18
++ */
++ g_object_class_install_property (gobject_class, PROP_FORCE_SW_DECODERS,
++ g_param_spec_boolean ("force-sw-decoders", "Software Decoders Only",
++ "Use only sofware decoders to process streams",
++ DEFAULT_FORCE_SW_DECODERS,
++ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
++
++
+ /**
+ * GstURIDecodeBin3::ring-buffer-max-size
+ *
+ * The maximum size of the ring buffer in kilobytes. If set to 0, the ring
+ * buffer is disabled. Default is 0.
+ */
+ g_object_class_install_property (gobject_class, PROP_RING_BUFFER_MAX_SIZE,
+ g_param_spec_uint64 ("ring-buffer-max-size",
+ "Max. ring buffer size (bytes)",
+ "Max. amount of data in the ring buffer (bytes, 0 = ring buffer disabled)",
+ 0, G_MAXUINT, DEFAULT_RING_BUFFER_MAX_SIZE,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_CAPS,
+ g_param_spec_boxed ("caps", "Caps",
+ "The caps on which to stop decoding. (NULL = default)",
+ GST_TYPE_CAPS, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ /**
+ * GstURIDecodebin3::select-stream
+ * @decodebin: a #GstURIDecodebin3
+ * @collection: a #GstStreamCollection
+ * @stream: a #GstStream
+ *
+ * This signal is emitted whenever @decodebin needs to decide whether
+ * to expose a @stream of a given @collection.
+ *
+ * Note that the prefered way to select streams is to listen to
+ * GST_MESSAGE_STREAM_COLLECTION on the bus and send a
+ * GST_EVENT_SELECT_STREAMS with the streams the user wants.
+ *
+ * Returns: 1 if the stream should be selected, 0 if it shouldn't be selected.
+ * A value of -1 (default) lets @decodebin decide what to do with the stream.
+ * */
+ gst_uri_decode_bin3_signals[SIGNAL_SELECT_STREAM] =
+ g_signal_new ("select-stream", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (GstURIDecodeBin3Class, select_stream),
+ _gst_int_accumulator, NULL, NULL, G_TYPE_INT, 2,
+ GST_TYPE_STREAM_COLLECTION, GST_TYPE_STREAM);
+
+ /**
+ * GstURIDecodeBin3::source-setup:
+ * @bin: the uridecodebin.
+ * @source: source element
+ *
+ * This signal is emitted after a source element has been created, so
+ * it can be configured by setting additional properties (e.g. set a
+ * proxy server for an http source, or set the device and read speed for
+ * an audio cd source).
+ */
+ gst_uri_decode_bin3_signals[SIGNAL_SOURCE_SETUP] =
+ g_signal_new ("source-setup", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST, 0, NULL, NULL, NULL, G_TYPE_NONE, 1, GST_TYPE_ELEMENT);
+ /**
+ * GstURIDecodeBin3::about-to-finish:
+ *
+ * This signal is emitted when the data for the selected URI is
+ * entirely buffered and it is safe to specify another URI.
+ */
+ gst_uri_decode_bin3_signals[SIGNAL_ABOUT_TO_FINISH] =
+ g_signal_new ("about-to-finish", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST, 0, NULL, NULL, NULL, G_TYPE_NONE, 0, G_TYPE_NONE);
+
+
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &video_src_template);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &audio_src_template);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &text_src_template);
+ gst_element_class_add_static_pad_template (gstelement_class, &src_template);
+ gst_element_class_set_static_metadata (gstelement_class,
+ "URI Decoder", "Generic/Bin/Decoder",
+ "Autoplug and decode an URI to raw media",
+ "Edward Hervey <edward@centricular.com>, Jan Schmidt <jan@centricular.com>");
+
+ gstelement_class->change_state = gst_uri_decode_bin3_change_state;
+ gstelement_class->send_event =
+ GST_DEBUG_FUNCPTR (gst_uri_decodebin3_send_event);
+
+ klass->select_stream = gst_uridecodebin3_select_stream;
+ }
+
+ static GstPadProbeReturn
+ db_src_probe (GstPad * pad, GstPadProbeInfo * info, OutputPad * output)
+ {
+ /* FIXME : IMPLEMENT */
+
+ /* EOS : Mark pad as EOS */
+
+ /* STREAM_START : Store group_id and check if currently active
+ * PlayEntry changed */
+
+ return GST_PAD_PROBE_OK;
+ }
+
+ static OutputPad *
+ add_output_pad (GstURIDecodeBin3 * dec, GstPad * target_pad)
+ {
+ OutputPad *output;
+ gchar *pad_name;
+ GstEvent *stream_start;
+
+ output = g_slice_new0 (OutputPad);
+
+ GST_LOG_OBJECT (dec, "Created output %p", output);
+
+ output->uridecodebin = dec;
+ output->target_pad = target_pad;
+ output->current_group_id = (guint) - 1;
+ pad_name = gst_pad_get_name (target_pad);
+ output->ghost_pad = gst_ghost_pad_new (pad_name, target_pad);
+ g_free (pad_name);
+
+ gst_pad_set_active (output->ghost_pad, TRUE);
+
+ stream_start = gst_pad_get_sticky_event (target_pad,
+ GST_EVENT_STREAM_START, 0);
+ if (stream_start) {
+ gst_pad_store_sticky_event (output->ghost_pad, stream_start);
+ gst_event_unref (stream_start);
+ } else {
+ GST_WARNING_OBJECT (target_pad,
+ "Exposing pad without stored stream-start event");
+ }
+
+ gst_element_add_pad (GST_ELEMENT (dec), output->ghost_pad);
+
+ output->probe_id =
+ gst_pad_add_probe (output->target_pad,
+ GST_PAD_PROBE_TYPE_EVENT_DOWNSTREAM, (GstPadProbeCallback) db_src_probe,
+ output, NULL);
+
+ /* FIXME: LOCK TO PROTECT PAD LIST */
+ dec->output_pads = g_list_append (dec->output_pads, output);
+
+ return output;
+ }
+
+ static void
+ db_pad_added_cb (GstElement * element, GstPad * pad, GstURIDecodeBin3 * dec)
+ {
+ GST_DEBUG_OBJECT (dec, "Wrapping new pad %s:%s", GST_DEBUG_PAD_NAME (pad));
+
+ if (GST_PAD_IS_SRC (pad))
+ add_output_pad (dec, pad);
+ }
+
+ static void
+ db_pad_removed_cb (GstElement * element, GstPad * pad, GstURIDecodeBin3 * dec)
+ {
+ GList *tmp;
+ OutputPad *output = NULL;
+
+ if (!GST_PAD_IS_SRC (pad))
+ return;
+
+ GST_DEBUG_OBJECT (dec, "pad %s:%s", GST_DEBUG_PAD_NAME (pad));
+ /* FIXME: LOCK for list access */
+
+ for (tmp = dec->output_pads; tmp; tmp = tmp->next) {
+ OutputPad *cand = (OutputPad *) tmp->data;
+
+ if (cand->target_pad == pad) {
+ output = cand;
+ dec->output_pads = g_list_delete_link (dec->output_pads, tmp);
+ break;
+ }
+ }
+
+ if (output) {
+ GST_LOG_OBJECT (element, "Removing output %p", output);
+ /* Remove source ghost pad */
+ gst_ghost_pad_set_target ((GstGhostPad *) output->ghost_pad, NULL);
+ gst_element_remove_pad ((GstElement *) dec, output->ghost_pad);
+
+ /* FIXME : Update global/current PlayEntry group_id (did we switch ?) */
+
+ /* Remove event probe */
+ gst_pad_remove_probe (output->target_pad, output->probe_id);
+
+ g_slice_free (OutputPad, output);
+ }
+ }
+
+ static gint
+ db_select_stream_cb (GstElement * decodebin,
+ GstStreamCollection * collection, GstStream * stream,
+ GstURIDecodeBin3 * uridecodebin)
+ {
+ gint response = -1;
+
+ g_signal_emit (uridecodebin,
+ gst_uri_decode_bin3_signals[SIGNAL_SELECT_STREAM], 0, collection, stream,
+ &response);
+ return response;
+ }
+
+ static void
+ db_about_to_finish_cb (GstElement * decodebin, GstURIDecodeBin3 * uridecodebin)
+ {
+ if (!uridecodebin->posted_about_to_finish) {
+ uridecodebin->posted_about_to_finish = TRUE;
+ g_signal_emit (uridecodebin,
+ gst_uri_decode_bin3_signals[SIGNAL_ABOUT_TO_FINISH], 0, NULL);
+ }
+ }
+
+ static void
+ gst_uri_decode_bin3_init (GstURIDecodeBin3 * dec)
+ {
+ g_mutex_init (&dec->lock);
+
+ dec->uri = DEFAULT_PROP_URI;
+ dec->suburi = DEFAULT_PROP_SUBURI;
+ dec->connection_speed = DEFAULT_CONNECTION_SPEED;
+ dec->caps = DEFAULT_CAPS;
+ dec->buffer_duration = DEFAULT_BUFFER_DURATION;
+ dec->buffer_size = DEFAULT_BUFFER_SIZE;
+ dec->download = DEFAULT_DOWNLOAD;
+ dec->use_buffering = DEFAULT_USE_BUFFERING;
+ dec->ring_buffer_max_size = DEFAULT_RING_BUFFER_MAX_SIZE;
+
+ dec->decodebin = gst_element_factory_make ("decodebin3", NULL);
+ gst_bin_add (GST_BIN_CAST (dec), dec->decodebin);
+ dec->db_pad_added_id =
+ g_signal_connect (dec->decodebin, "pad-added",
+ G_CALLBACK (db_pad_added_cb), dec);
+ dec->db_pad_removed_id =
+ g_signal_connect (dec->decodebin, "pad-removed",
+ G_CALLBACK (db_pad_removed_cb), dec);
+ dec->db_select_stream_id =
+ g_signal_connect (dec->decodebin, "select-stream",
+ G_CALLBACK (db_select_stream_cb), dec);
+ dec->db_about_to_finish_id =
+ g_signal_connect (dec->decodebin, "about-to-finish",
+ G_CALLBACK (db_about_to_finish_cb), dec);
+
+ GST_OBJECT_FLAG_SET (dec, GST_ELEMENT_FLAG_SOURCE);
+ gst_bin_set_suppressed_flags (GST_BIN (dec),
+ GST_ELEMENT_FLAG_SOURCE | GST_ELEMENT_FLAG_SINK);
+ }
+
+ static void
+ gst_uri_decode_bin3_finalize (GObject * obj)
+ {
+ GstURIDecodeBin3 *dec = GST_URI_DECODE_BIN3 (obj);
+
+ g_mutex_clear (&dec->lock);
+ g_free (dec->uri);
+ g_free (dec->suburi);
+
+ G_OBJECT_CLASS (parent_class)->finalize (obj);
+ }
+
+ static GstStateChangeReturn
+ activate_source_item (GstSourceItem * item)
+ {
+ GstSourceHandler *handler = item->handler;
+
+ if (handler == NULL) {
+ GST_WARNING ("Can't activate item without a handler");
+ return GST_STATE_CHANGE_FAILURE;
+ }
+
+ g_object_set (handler->urisourcebin, "uri", item->uri, NULL);
+ if (!handler->active) {
+ gst_bin_add ((GstBin *) handler->uridecodebin, handler->urisourcebin);
+ /* if (!gst_element_sync_state_with_parent (handler->urisourcebin)) */
+ /* return GST_STATE_CHANGE_FAILURE; */
+ handler->active = TRUE;
+ }
+
+ return GST_STATE_CHANGE_SUCCESS;
+ }
+
+ static void
+ src_pad_added_cb (GstElement * element, GstPad * pad,
+ GstSourceHandler * handler)
+ {
+ GstURIDecodeBin3 *uridecodebin;
+ GstPad *sinkpad = NULL;
+ GstPadLinkReturn res;
+ GstPlayItem *current_play_item;
+ GstStateChangeReturn ret = GST_STATE_CHANGE_SUCCESS;
+
+ uridecodebin = handler->uridecodebin;
+ current_play_item = uridecodebin->current;
+
+ GST_DEBUG_OBJECT (uridecodebin,
+ "New pad %" GST_PTR_FORMAT " from source %" GST_PTR_FORMAT, pad, element);
+
++#ifdef TIZEN_FEATURE_U3_AVOID_DEADLOCK
++ /* to sync the process with typefind task */
++ gst_element_get_state (uridecodebin, NULL, NULL, 1 * GST_SECOND);
++#endif
++
+ /* FIXME: Add probe to unify group_id and detect EOS */
+
+ /* Try to link to main sink pad only if it's from a main handler */
+ if (handler->is_main_source) {
+ sinkpad = gst_element_get_static_pad (uridecodebin->decodebin, "sink");
+ if (gst_pad_is_linked (sinkpad)) {
+ gst_object_unref (sinkpad);
+ sinkpad = NULL;
+ }
+ }
+
+ if (sinkpad == NULL)
+ sinkpad =
+ gst_element_request_pad_simple (uridecodebin->decodebin, "sink_%u");
+
+ if (sinkpad) {
+ GST_DEBUG_OBJECT (uridecodebin,
+ "Linking %" GST_PTR_FORMAT " to %" GST_PTR_FORMAT, pad, sinkpad);
+ res = gst_pad_link (pad, sinkpad);
+ gst_object_unref (sinkpad);
+ if (GST_PAD_LINK_FAILED (res))
+ goto link_failed;
+ }
+
+ /* Activate sub_item after the main source activation was finished */
+ if (handler->is_main_source && current_play_item->sub_item
+ && !current_play_item->sub_item->handler) {
+ current_play_item->sub_item->handler =
+ new_source_handler (uridecodebin, FALSE);
+ ret = activate_source_item (current_play_item->sub_item);
+ if (ret == GST_STATE_CHANGE_FAILURE)
+ goto sub_item_activation_failed;
+ }
+
+ return;
+
+ link_failed:
+ {
+ GST_ERROR_OBJECT (uridecodebin,
+ "failed to link pad %s:%s to decodebin, reason %s (%d)",
+ GST_DEBUG_PAD_NAME (pad), gst_pad_link_get_name (res), res);
+ return;
+ }
+ sub_item_activation_failed:
+ {
+ GST_ERROR_OBJECT (uridecodebin,
+ "failed to activate subtitle playback item");
+ return;
+ }
+ }
+
+ static void
+ src_pad_removed_cb (GstElement * element, GstPad * pad,
+ GstSourceHandler * handler)
+ {
+ /* FIXME : IMPLEMENT */
+ }
+
+ static void
+ src_source_setup_cb (GstElement * element, GstElement * source,
+ GstSourceHandler * handler)
+ {
+ g_signal_emit (handler->uridecodebin,
+ gst_uri_decode_bin3_signals[SIGNAL_SOURCE_SETUP], 0, source, NULL);
+ }
+
+ static void
+ src_about_to_finish_cb (GstElement * element, GstSourceHandler * handler)
+ {
+ /* FIXME : check if all sources are done */
+ if (!handler->uridecodebin->posted_about_to_finish) {
+ handler->uridecodebin->posted_about_to_finish = TRUE;
+ g_signal_emit (handler->uridecodebin,
+ gst_uri_decode_bin3_signals[SIGNAL_ABOUT_TO_FINISH], 0, NULL);
+ }
+ }
+
+ static GstSourceHandler *
+ new_source_handler (GstURIDecodeBin3 * uridecodebin, gboolean is_main)
+ {
+ GstSourceHandler *handler;
+
+ handler = g_slice_new0 (GstSourceHandler);
+
+ handler->uridecodebin = uridecodebin;
+ handler->is_main_source = is_main;
+ handler->urisourcebin = gst_element_factory_make ("urisourcebin", NULL);
+ /* Set pending properties */
+ g_object_set (handler->urisourcebin,
+ "connection-speed", uridecodebin->connection_speed / 1000,
+ "download", uridecodebin->download,
+ "use-buffering", uridecodebin->use_buffering,
+ "buffer-duration", uridecodebin->buffer_duration,
+ "buffer-size", uridecodebin->buffer_size,
+ "ring-buffer-max-size", uridecodebin->ring_buffer_max_size, NULL);
+
+ handler->pad_added_id =
+ g_signal_connect (handler->urisourcebin, "pad-added",
+ (GCallback) src_pad_added_cb, handler);
+ handler->pad_removed_id =
+ g_signal_connect (handler->urisourcebin, "pad-removed",
+ (GCallback) src_pad_removed_cb, handler);
+ handler->source_setup_id =
+ g_signal_connect (handler->urisourcebin, "source-setup",
+ (GCallback) src_source_setup_cb, handler);
+ handler->about_to_finish_id =
+ g_signal_connect (handler->urisourcebin, "about-to-finish",
+ (GCallback) src_about_to_finish_cb, handler);
+
+ uridecodebin->source_handlers =
+ g_list_append (uridecodebin->source_handlers, handler);
+
+ return handler;
+ }
+
+ static void
+ gst_uri_decode_bin3_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+ {
+ GstURIDecodeBin3 *dec = GST_URI_DECODE_BIN3 (object);
+
+ switch (prop_id) {
+ case PROP_URI:
+ if (dec->uri)
+ g_free (dec->uri);
+ dec->uri = g_value_dup_string (value);
+ break;
+ case PROP_SUBURI:
+ if (dec->suburi)
+ g_free (dec->suburi);
+ dec->suburi = g_value_dup_string (value);
+ break;
+ case PROP_CONNECTION_SPEED:
+ GST_URI_DECODE_BIN3_LOCK (dec);
+ dec->connection_speed = g_value_get_uint64 (value) * 1000;
+ GST_URI_DECODE_BIN3_UNLOCK (dec);
+ break;
+ case PROP_BUFFER_SIZE:
+ dec->buffer_size = g_value_get_int (value);
+ break;
+ case PROP_BUFFER_DURATION:
+ dec->buffer_duration = g_value_get_int64 (value);
+ break;
+ case PROP_DOWNLOAD:
+ dec->download = g_value_get_boolean (value);
+ break;
+ case PROP_USE_BUFFERING:
+ dec->use_buffering = g_value_get_boolean (value);
+ break;
++ case PROP_FORCE_SW_DECODERS:
++ if (dec->decodebin) {
++ g_object_set_property (G_OBJECT (dec->decodebin), "force-sw-decoders",
++ value);
++ }
++ break;
+ case PROP_RING_BUFFER_MAX_SIZE:
+ dec->ring_buffer_max_size = g_value_get_uint64 (value);
+ break;
+ case PROP_CAPS:
+ GST_OBJECT_LOCK (dec);
+ if (dec->caps)
+ gst_caps_unref (dec->caps);
+ dec->caps = g_value_dup_boxed (value);
+ GST_OBJECT_UNLOCK (dec);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+ }
+
+ static void
+ gst_uri_decode_bin3_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec)
+ {
+ GstURIDecodeBin3 *dec = GST_URI_DECODE_BIN3 (object);
+
+ switch (prop_id) {
+ case PROP_URI:
+ {
+ g_value_set_string (value, dec->uri);
+ break;
+ }
+ case PROP_CURRENT_URI:
+ {
+ if (dec->current && dec->current->main_item) {
+ g_value_set_string (value, dec->current->main_item->uri);
+ } else {
+ g_value_set_string (value, NULL);
+ }
+ break;
+ }
+ case PROP_SUBURI:
+ {
+ g_value_set_string (value, dec->suburi);
+ break;
+ }
+ case PROP_CURRENT_SUBURI:
+ {
+ if (dec->current && dec->current->sub_item) {
+ g_value_set_string (value, dec->current->sub_item->uri);
+ } else {
+ g_value_set_string (value, NULL);
+ }
+ break;
+ }
+ case PROP_SOURCE:
+ {
+ GST_OBJECT_LOCK (dec);
+ g_value_set_object (value, dec->source);
+ GST_OBJECT_UNLOCK (dec);
+ break;
+ }
+ case PROP_CONNECTION_SPEED:
+ GST_URI_DECODE_BIN3_LOCK (dec);
+ g_value_set_uint64 (value, dec->connection_speed / 1000);
+ GST_URI_DECODE_BIN3_UNLOCK (dec);
+ break;
+ case PROP_BUFFER_SIZE:
+ GST_OBJECT_LOCK (dec);
+ g_value_set_int (value, dec->buffer_size);
+ GST_OBJECT_UNLOCK (dec);
+ break;
+ case PROP_BUFFER_DURATION:
+ GST_OBJECT_LOCK (dec);
+ g_value_set_int64 (value, dec->buffer_duration);
+ GST_OBJECT_UNLOCK (dec);
+ break;
+ case PROP_DOWNLOAD:
+ g_value_set_boolean (value, dec->download);
+ break;
+ case PROP_USE_BUFFERING:
+ g_value_set_boolean (value, dec->use_buffering);
+ break;
++ case PROP_FORCE_SW_DECODERS:
++ if (dec->decodebin) {
++ g_object_get_property (G_OBJECT (dec->decodebin), "force-sw-decoders",
++ value);
++ }
++ break;
+ case PROP_RING_BUFFER_MAX_SIZE:
+ g_value_set_uint64 (value, dec->ring_buffer_max_size);
+ break;
+ case PROP_CAPS:
+ GST_OBJECT_LOCK (dec);
+ g_value_set_boxed (value, dec->caps);
+ GST_OBJECT_UNLOCK (dec);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+ }
+
+ static void
+ free_source_handler (GstURIDecodeBin3 * uridecodebin,
+ GstSourceHandler * handler)
+ {
+ GST_LOG_OBJECT (uridecodebin, "source handler %p", handler);
+ if (handler->active) {
+ GST_LOG_OBJECT (uridecodebin, "Removing %" GST_PTR_FORMAT,
+ handler->urisourcebin);
+ gst_element_set_state (handler->urisourcebin, GST_STATE_NULL);
+ gst_bin_remove ((GstBin *) uridecodebin, handler->urisourcebin);
+ }
+ uridecodebin->source_handlers =
+ g_list_remove (uridecodebin->source_handlers, handler);
+ g_slice_free (GstSourceHandler, handler);
+ }
+
+ static GstSourceItem *
+ new_source_item (GstURIDecodeBin3 * dec, GstPlayItem * item, gchar * uri)
+ {
+ GstSourceItem *sourceitem = g_slice_new0 (GstSourceItem);
+
+ sourceitem->play_item = item;
+ sourceitem->uri = uri;
+
+ return sourceitem;
+ }
+
+ static void
+ free_source_item (GstURIDecodeBin3 * uridecodebin, GstSourceItem * item)
+ {
+ GST_LOG_OBJECT (uridecodebin, "source item %p", item);
+ if (item->handler)
+ free_source_handler (uridecodebin, item->handler);
+ g_slice_free (GstSourceItem, item);
+ }
+
+ static GstPlayItem *
+ new_play_item (GstURIDecodeBin3 * dec, gchar * uri, gchar * suburi)
+ {
+ GstPlayItem *item = g_slice_new0 (GstPlayItem);
+
+ item->uridecodebin = dec;
+ item->main_item = new_source_item (dec, item, uri);
+ if (suburi)
+ item->sub_item = new_source_item (dec, item, suburi);
+
+ return item;
+ }
+
+ static void
+ free_play_item (GstURIDecodeBin3 * dec, GstPlayItem * item)
+ {
+ GST_LOG_OBJECT (dec, "play item %p", item);
+ if (item->main_item)
+ free_source_item (dec, item->main_item);
+ if (item->sub_item)
+ free_source_item (dec, item->sub_item);
+
+ g_slice_free (GstPlayItem, item);
+ }
+
+ /* Sync source handlers for the given play item. Might require creating/removing some
+ * and/or configure the handlers accordingly */
+ static GstStateChangeReturn
+ assign_handlers_to_item (GstURIDecodeBin3 * dec, GstPlayItem * item)
+ {
+ GstStateChangeReturn ret = GST_STATE_CHANGE_SUCCESS;
+
+ /* FIXME : Go over existing handlers to see if we can assign some to the
+ * given item */
+
+ /* Create missing handlers */
+ if (item->main_item->handler == NULL) {
+ item->main_item->handler = new_source_handler (dec, TRUE);
+ ret = activate_source_item (item->main_item);
+ if (ret == GST_STATE_CHANGE_FAILURE)
+ return ret;
+ }
+
+ return ret;
+ }
+
+ /* Called to activate the next play item */
+ static GstStateChangeReturn
+ activate_next_play_item (GstURIDecodeBin3 * dec)
+ {
+ GstPlayItem *item;
+ GstStateChangeReturn ret;
+
+ /* If there is no current play entry, create one from the uri/suburi
+ * FIXME : Use a playlist API in the future */
+ item = new_play_item (dec, dec->uri, dec->suburi);
+
+ ret = assign_handlers_to_item (dec, item);
+ if (ret == GST_STATE_CHANGE_FAILURE) {
+ free_play_item (dec, item);
+ return ret;
+ }
+
+ dec->play_items = g_list_append (dec->play_items, item);
+ dec->current = dec->play_items->data;
+
+ return ret;
+ }
+
+ static void
+ free_play_items (GstURIDecodeBin3 * dec)
+ {
+ GList *tmp;
+
+ for (tmp = dec->play_items; tmp; tmp = tmp->next) {
+ GstPlayItem *item = (GstPlayItem *) tmp->data;
+ free_play_item (dec, item);
+ }
+
+ g_list_free (dec->play_items);
+ dec->play_items = NULL;
+ }
+
+ static GstStateChangeReturn
+ gst_uri_decode_bin3_change_state (GstElement * element,
+ GstStateChange transition)
+ {
+ GstStateChangeReturn ret;
+ GstURIDecodeBin3 *uridecodebin = (GstURIDecodeBin3 *) element;
+
+ switch (transition) {
+ case GST_STATE_CHANGE_NULL_TO_READY:
+ g_object_set (uridecodebin->decodebin, "caps", uridecodebin->caps, NULL);
+ break;
+ case GST_STATE_CHANGE_READY_TO_PAUSED:
+ ret = activate_next_play_item (uridecodebin);
+ if (ret == GST_STATE_CHANGE_FAILURE)
+ goto failure;
+ default:
+ break;
+ }
+
+ ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
+ if (ret == GST_STATE_CHANGE_FAILURE)
+ goto failure;
+
+ switch (transition) {
+ case GST_STATE_CHANGE_PAUSED_TO_READY:
+ /* FIXME: Cleanup everything */
+ free_play_items (uridecodebin);
+ /* Free play item */
+ uridecodebin->posted_about_to_finish = FALSE;
+ break;
+ default:
+ break;
+ }
+
+ return ret;
+
+ /* ERRORS */
+ failure:
+ {
+ if (transition == GST_STATE_CHANGE_READY_TO_PAUSED)
+ free_play_items (uridecodebin);
+ return ret;
+ }
+ }
+
+ static gboolean
+ gst_uri_decodebin3_send_event (GstElement * element, GstEvent * event)
+ {
+ GstURIDecodeBin3 *self = GST_URI_DECODE_BIN3 (element);
+
+ if (GST_EVENT_IS_UPSTREAM (event) && self->decodebin)
+ return gst_element_send_event (self->decodebin, event);
+
+ return GST_ELEMENT_CLASS (parent_class)->send_event (element, event);
+ }
--- /dev/null
- PROP_VIDEOFPS
+ /* GStreamer
+ * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
+ * Copyright (C) 2004 Ronald S. Bultje <rbultje@ronald.bitfreak.net>
+ * Copyright (C) 2006 Tim-Philipp Müller <tim centricular net>
+ * Copyright (C) 2016 Philippe Normand <pnormand@igalia.com>
+ * Copyright (C) 2016 Jan Schmidt <jan@centricular.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+ #ifdef HAVE_CONFIG_H
+ #include "config.h"
+ #endif
+
+ #include <stdio.h>
+ #include <stdlib.h>
+ #include <string.h>
+ #include <sys/types.h>
+ #include <glib.h>
+
+ #include "gstsubparse.h"
+
+ #include "gstssaparse.h"
+ #include "samiparse.h"
+ #include "tmplayerparse.h"
+ #include "mpl2parse.h"
+ #include "qttextparse.h"
+ #include "gstsubparseelements.h"
+
++#ifdef TIZEN_FEATURE_SUBPARSE_MODIFICATION
++#define SUBPARSE_SEEK_GET_LOCK(elem) (&elem->seek_lock)
++#define SUBPARSE_SEEK_LOCK(elem) g_mutex_lock(SUBPARSE_SEEK_GET_LOCK(elem))
++#define SUBPARSE_SEEK_TRYLOCK(elem) g_mutex_trylock(SUBPARSE_SEEK_GET_LOCK(elem))
++#define SUBPARSE_SEEK_UNLOCK(elem) g_mutex_unlock(SUBPARSE_SEEK_GET_LOCK(elem))
++#endif
+ #define DEFAULT_ENCODING NULL
+ #define ATTRIBUTE_REGEX "\\s?[a-zA-Z0-9\\. \t\\(\\)]*"
+ static const gchar *allowed_srt_tags[] = { "i", "b", "u", NULL };
+ static const gchar *allowed_vtt_tags[] =
+ { "i", "b", "c", "u", "v", "ruby", "rt", NULL };
+
++#ifdef TIZEN_FEATURE_SUBPARSE_MODIFICATION
++#define DEFAULT_CURRENT_LANGUAGE NULL
++#endif
++#ifdef TIZEN_FEATURE_SUBPARSE_DROP_OUT_OF_SEGMENT
++#define DEFAULT_DROP_OUT_OF_SEGMENT TRUE
++#endif
+ enum
+ {
+ PROP_0,
+ PROP_ENCODING,
-
++ PROP_VIDEOFPS,
++#ifdef TIZEN_FEATURE_SUBPARSE_MODIFICATION
++ PROP_EXTSUB_CURRENT_LANGUAGE,
++#endif
++#ifdef TIZEN_FEATURE_SUBPARSE_DROP_OUT_OF_SEGMENT
++ PROP_DROP_OUT_OF_SEGMENT,
++#endif
+ };
++#ifdef TIZEN_FEATURE_HLS_WEBVTT
++#define MPEGTIME_TO_GSTTIME(t) ((t) * (guint64)100000 / 9)
++#endif
+
+ static void
+ gst_sub_parse_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec);
+ static void
+ gst_sub_parse_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec);
+
+
+ static GstStaticPadTemplate sink_templ = GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("application/x-subtitle; application/x-subtitle-sami; "
+ "application/x-subtitle-tmplayer; application/x-subtitle-mpl2; "
+ "application/x-subtitle-dks; application/x-subtitle-qttext;"
+ "application/x-subtitle-lrc; application/x-subtitle-vtt")
+ );
+
+ static GstStaticPadTemplate src_templ = GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("text/x-raw, format= { pango-markup, utf8 }")
+ );
+
+
+ static gboolean gst_sub_parse_src_event (GstPad * pad, GstObject * parent,
+ GstEvent * event);
+ static gboolean gst_sub_parse_src_query (GstPad * pad, GstObject * parent,
+ GstQuery * query);
+ static gboolean gst_sub_parse_sink_event (GstPad * pad, GstObject * parent,
+ GstEvent * event);
+
+ static GstStateChangeReturn gst_sub_parse_change_state (GstElement * element,
+ GstStateChange transition);
+
+ static GstFlowReturn gst_sub_parse_chain (GstPad * sinkpad, GstObject * parent,
+ GstBuffer * buf);
-
++#ifdef TIZEN_FEATURE_SUBPARSE_MODIFICATION
++static gboolean gst_sub_parse_check_byte_seekability (GstSubParse * subparse);
++#endif
+ #define gst_sub_parse_parent_class parent_class
+ G_DEFINE_TYPE (GstSubParse, gst_sub_parse, GST_TYPE_ELEMENT);
+
+ GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (subparse, "subparse",
+ GST_RANK_PRIMARY, GST_TYPE_SUBPARSE, sub_parse_element_init (plugin))
+
+
+ static void gst_sub_parse_dispose (GObject * object)
+ {
+ GstSubParse *subparse = GST_SUBPARSE (object);
+
+ GST_DEBUG_OBJECT (subparse, "cleaning up subtitle parser");
+
+ if (subparse->encoding) {
+ g_free (subparse->encoding);
+ subparse->encoding = NULL;
+ }
+
+ if (subparse->detected_encoding) {
+ g_free (subparse->detected_encoding);
+ subparse->detected_encoding = NULL;
+ }
+
+ if (subparse->adapter) {
+ g_object_unref (subparse->adapter);
+ subparse->adapter = NULL;
+ }
+
+ if (subparse->textbuf) {
+ g_string_free (subparse->textbuf, TRUE);
+ subparse->textbuf = NULL;
+ }
++#ifdef TIZEN_FEATURE_SUBPARSE_MODIFICATION
++ g_free (subparse->state.current_language);
++ subparse->state.current_language = NULL;
+
++ g_mutex_clear (&subparse->seek_lock);
++#endif
+ GST_CALL_PARENT (G_OBJECT_CLASS, dispose, (object));
+ }
+
+ static void
+ gst_sub_parse_class_init (GstSubParseClass * klass)
+ {
+ GObjectClass *object_class = G_OBJECT_CLASS (klass);
+ GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
+
+ object_class->dispose = gst_sub_parse_dispose;
+ object_class->set_property = gst_sub_parse_set_property;
+ object_class->get_property = gst_sub_parse_get_property;
+
+ gst_element_class_add_static_pad_template (element_class, &sink_templ);
+ gst_element_class_add_static_pad_template (element_class, &src_templ);
+ gst_element_class_set_static_metadata (element_class,
+ "Subtitle parser", "Codec/Parser/Subtitle",
+ "Parses subtitle (.sub) files into text streams",
+ "Gustavo J. A. M. Carneiro <gjc@inescporto.pt>, "
+ "GStreamer maintainers <gstreamer-devel@lists.freedesktop.org>");
+
+ element_class->change_state = gst_sub_parse_change_state;
+
+ g_object_class_install_property (object_class, PROP_ENCODING,
+ g_param_spec_string ("subtitle-encoding", "subtitle charset encoding",
+ "Encoding to assume if input subtitles are not in UTF-8 or any other "
+ "Unicode encoding. If not set, the GST_SUBTITLE_ENCODING environment "
+ "variable will be checked for an encoding to use. If that is not set "
+ "either, ISO-8859-15 will be assumed.", DEFAULT_ENCODING,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (object_class, PROP_VIDEOFPS,
+ gst_param_spec_fraction ("video-fps", "Video framerate",
+ "Framerate of the video stream. This is needed by some subtitle "
+ "formats to synchronize subtitles and video properly. If not set "
+ "and the subtitle format requires it subtitles may be out of sync.",
+ 0, 1, G_MAXINT, 1, 24000, 1001,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
++#ifdef TIZEN_FEATURE_SUBPARSE_MODIFICATION
++ g_object_class_install_property (object_class, PROP_EXTSUB_CURRENT_LANGUAGE,
++ g_param_spec_string ("current-language", "Current language",
++ "Current language of the subtitle in external subtitle case.",
++ DEFAULT_CURRENT_LANGUAGE,
++ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
++#endif
++
++#ifdef TIZEN_FEATURE_SUBPARSE_DROP_OUT_OF_SEGMENT
++ g_object_class_install_property (object_class, PROP_DROP_OUT_OF_SEGMENT,
++ g_param_spec_boolean ("drop-out-of-segment",
++ "Drop out-of-segment buffers",
++ "Drop and don't send out-of-segment buffers",
++ DEFAULT_DROP_OUT_OF_SEGMENT,
++ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
++#endif
+ }
+
+ static void
+ gst_sub_parse_init (GstSubParse * subparse)
+ {
+ subparse->sinkpad = gst_pad_new_from_static_template (&sink_templ, "sink");
+ gst_pad_set_chain_function (subparse->sinkpad,
+ GST_DEBUG_FUNCPTR (gst_sub_parse_chain));
+ gst_pad_set_event_function (subparse->sinkpad,
+ GST_DEBUG_FUNCPTR (gst_sub_parse_sink_event));
+ gst_element_add_pad (GST_ELEMENT (subparse), subparse->sinkpad);
+
+ subparse->srcpad = gst_pad_new_from_static_template (&src_templ, "src");
+ gst_pad_set_event_function (subparse->srcpad,
+ GST_DEBUG_FUNCPTR (gst_sub_parse_src_event));
+ gst_pad_set_query_function (subparse->srcpad,
+ GST_DEBUG_FUNCPTR (gst_sub_parse_src_query));
+ gst_element_add_pad (GST_ELEMENT (subparse), subparse->srcpad);
+
+ subparse->textbuf = g_string_new (NULL);
+ subparse->parser_type = GST_SUB_PARSE_FORMAT_UNKNOWN;
+ subparse->strip_pango_markup = FALSE;
+ subparse->flushing = FALSE;
+ gst_segment_init (&subparse->segment, GST_FORMAT_TIME);
+ subparse->need_segment = TRUE;
+ subparse->encoding = g_strdup (DEFAULT_ENCODING);
+ subparse->detected_encoding = NULL;
+ subparse->adapter = gst_adapter_new ();
+
+ subparse->fps_n = 24000;
+ subparse->fps_d = 1001;
++#ifdef TIZEN_FEATURE_SUBPARSE_MODIFICATION
++ subparse->state.language_list = NULL;
++ subparse->state.current_language = NULL;
++ subparse->state.langlist_msg_posted = FALSE;
++ g_mutex_init (&subparse->seek_lock);
++#endif
++#ifdef TIZEN_FEATURE_SUBPARSE_DROP_OUT_OF_SEGMENT
++ subparse->state.drop_out_of_segment = DEFAULT_DROP_OUT_OF_SEGMENT;
++#endif
+ }
+
+ /*
+ * Source pad functions.
+ */
+
+ static gboolean
+ gst_sub_parse_src_query (GstPad * pad, GstObject * parent, GstQuery * query)
+ {
+ GstSubParse *self = GST_SUBPARSE (parent);
+ gboolean ret = FALSE;
+
+ GST_DEBUG ("Handling %s query", GST_QUERY_TYPE_NAME (query));
+
+ switch (GST_QUERY_TYPE (query)) {
+ case GST_QUERY_POSITION:{
+ GstFormat fmt;
+
+ gst_query_parse_position (query, &fmt, NULL);
+ if (fmt != GST_FORMAT_TIME) {
+ ret = gst_pad_peer_query (self->sinkpad, query);
+ } else {
+ ret = TRUE;
+ gst_query_set_position (query, GST_FORMAT_TIME, self->segment.position);
+ }
+ break;
+ }
+ case GST_QUERY_SEEKING:
+ {
+ GstFormat fmt;
+ gboolean seekable = FALSE;
+
+ ret = TRUE;
+
+ gst_query_parse_seeking (query, &fmt, NULL, NULL, NULL);
+ if (fmt == GST_FORMAT_TIME) {
+ GstQuery *peerquery = gst_query_new_seeking (GST_FORMAT_BYTES);
+
+ seekable = gst_pad_peer_query (self->sinkpad, peerquery);
+ if (seekable)
+ gst_query_parse_seeking (peerquery, NULL, &seekable, NULL, NULL);
+ gst_query_unref (peerquery);
+ }
+
+ gst_query_set_seeking (query, fmt, seekable, seekable ? 0 : -1, -1);
+ break;
+ }
+ default:
+ ret = gst_pad_query_default (pad, parent, query);
+ break;
+ }
+
+ return ret;
+ }
+
+ static gboolean
+ gst_sub_parse_src_event (GstPad * pad, GstObject * parent, GstEvent * event)
+ {
+ GstSubParse *self = GST_SUBPARSE (parent);
+ gboolean ret = FALSE;
+
+ GST_DEBUG ("Handling %s event", GST_EVENT_TYPE_NAME (event));
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_SEEK:
+ {
+ GstFormat format;
+ GstSeekFlags flags;
+ GstSeekType start_type, stop_type;
+ gint64 start, stop;
+ gdouble rate;
+ gboolean update;
+
+ gst_event_parse_seek (event, &rate, &format, &flags,
+ &start_type, &start, &stop_type, &stop);
+
+ if (format != GST_FORMAT_TIME) {
+ GST_WARNING_OBJECT (self, "we only support seeking in TIME format");
+ gst_event_unref (event);
+ goto beach;
+ }
++#ifdef TIZEN_FEATURE_SUBPARSE_MODIFICATION
++ if (!gst_sub_parse_check_byte_seekability (self)) {
++ ret = gst_pad_event_default (pad, parent, event);
++ break;
++ }
+
++ SUBPARSE_SEEK_LOCK (self);
++#endif
+ /* Convert that seek to a seeking in bytes at position 0,
+ FIXME: could use an index */
+ ret = gst_pad_push_event (self->sinkpad,
+ gst_event_new_seek (rate, GST_FORMAT_BYTES, flags,
+ GST_SEEK_TYPE_SET, 0, GST_SEEK_TYPE_NONE, 0));
+
+ if (ret) {
+ /* Apply the seek to our segment */
+ gst_segment_do_seek (&self->segment, rate, format, flags,
+ start_type, start, stop_type, stop, &update);
+
+ GST_DEBUG_OBJECT (self, "segment after seek: %" GST_SEGMENT_FORMAT,
+ &self->segment);
+
+ /* will mark need_segment when receiving segment from upstream,
+ * after FLUSH and all that has happened,
+ * rather than racing with chain */
+ } else {
+ GST_WARNING_OBJECT (self, "seek to 0 bytes failed");
+ }
+
++#ifdef TIZEN_FEATURE_SUBPARSE_MODIFICATION
++ SUBPARSE_SEEK_UNLOCK (self);
++#endif
++
+ gst_event_unref (event);
+ break;
+ }
+ default:
+ ret = gst_pad_event_default (pad, parent, event);
+ break;
+ }
+
+ beach:
+ return ret;
+ }
+
++#ifdef TIZEN_FEATURE_SUBPARSE_MODIFICATION
++static gboolean
++gst_sub_parse_check_byte_seekability (GstSubParse * subparse)
++{
++ GstQuery *query;
++ gboolean seekable = FALSE;
++
++ query = gst_query_new_seeking (GST_FORMAT_BYTES);
++
++ if (gst_pad_peer_query (subparse->sinkpad, query)) {
++ gst_query_parse_seeking (query, NULL, &seekable, NULL, NULL);
++ } else {
++ GST_DEBUG_OBJECT (subparse, "seeking query failed");
++ }
++
++ gst_query_unref (query);
++
++ GST_INFO_OBJECT (subparse, "byte seekable: %d", seekable);
++
++ return seekable;
++}
++#endif
++
+ static void
+ gst_sub_parse_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+ {
+ GstSubParse *subparse = GST_SUBPARSE (object);
+
+ GST_OBJECT_LOCK (subparse);
+ switch (prop_id) {
+ case PROP_ENCODING:
+ g_free (subparse->encoding);
+ subparse->encoding = g_value_dup_string (value);
+ GST_LOG_OBJECT (object, "subtitle encoding set to %s",
+ GST_STR_NULL (subparse->encoding));
+ break;
+ case PROP_VIDEOFPS:
+ {
+ subparse->fps_n = gst_value_get_fraction_numerator (value);
+ subparse->fps_d = gst_value_get_fraction_denominator (value);
+ GST_DEBUG_OBJECT (object, "video framerate set to %d/%d", subparse->fps_n,
+ subparse->fps_d);
+
+ if (!subparse->state.have_internal_fps) {
+ subparse->state.fps_n = subparse->fps_n;
+ subparse->state.fps_d = subparse->fps_d;
+ }
+ break;
+ }
++#ifdef TIZEN_FEATURE_SUBPARSE_MODIFICATION
++ case PROP_EXTSUB_CURRENT_LANGUAGE:
++ g_free (subparse->state.current_language);
++ subparse->state.current_language = g_value_dup_string (value);
++ GST_LOG_OBJECT (subparse, "subtitle current language set to %s",
++ GST_STR_NULL (subparse->state.current_language));
++ sami_context_change_language (&subparse->state);
++ break;
++#endif
++#ifdef TIZEN_FEATURE_SUBPARSE_DROP_OUT_OF_SEGMENT
++ case PROP_DROP_OUT_OF_SEGMENT:
++ subparse->state.drop_out_of_segment = g_value_get_boolean (value);
++ GST_DEBUG_OBJECT (object, "Drop out of segment set to %d",
++ subparse->state.drop_out_of_segment);
++ break;
++#endif
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+ GST_OBJECT_UNLOCK (subparse);
+ }
+
+ static void
+ gst_sub_parse_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec)
+ {
+ GstSubParse *subparse = GST_SUBPARSE (object);
+
+ GST_OBJECT_LOCK (subparse);
+ switch (prop_id) {
+ case PROP_ENCODING:
+ g_value_set_string (value, subparse->encoding);
+ break;
+ case PROP_VIDEOFPS:
+ gst_value_set_fraction (value, subparse->fps_n, subparse->fps_d);
+ break;
++#ifdef TIZEN_FEATURE_SUBPARSE_MODIFICATION
++ case PROP_EXTSUB_CURRENT_LANGUAGE:
++ g_value_set_string (value, subparse->state.current_language);
++ break;
++#endif
++#ifdef TIZEN_FEATURE_SUBPARSE_DROP_OUT_OF_SEGMENT
++ case PROP_DROP_OUT_OF_SEGMENT:
++ g_value_set_boolean (value, subparse->state.drop_out_of_segment);
++ break;
++#endif
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+ GST_OBJECT_UNLOCK (subparse);
+ }
+
+ static const gchar *
+ gst_sub_parse_get_format_description (GstSubParseFormat format)
+ {
+ switch (format) {
+ case GST_SUB_PARSE_FORMAT_MDVDSUB:
+ return "MicroDVD";
+ case GST_SUB_PARSE_FORMAT_SUBRIP:
+ return "SubRip";
+ case GST_SUB_PARSE_FORMAT_MPSUB:
+ return "MPSub";
+ case GST_SUB_PARSE_FORMAT_SAMI:
+ return "SAMI";
+ case GST_SUB_PARSE_FORMAT_TMPLAYER:
+ return "TMPlayer";
+ case GST_SUB_PARSE_FORMAT_MPL2:
+ return "MPL2";
+ case GST_SUB_PARSE_FORMAT_SUBVIEWER:
+ return "SubViewer";
+ case GST_SUB_PARSE_FORMAT_DKS:
+ return "DKS";
+ case GST_SUB_PARSE_FORMAT_VTT:
+ return "WebVTT";
+ case GST_SUB_PARSE_FORMAT_QTTEXT:
+ return "QTtext";
+ case GST_SUB_PARSE_FORMAT_LRC:
+ return "LRC";
+ default:
+ case GST_SUB_PARSE_FORMAT_UNKNOWN:
+ break;
+ }
+ return NULL;
+ }
+
+
+
+
+
+ static gchar *
+ convert_encoding (GstSubParse * self, const gchar * str, gsize len,
+ gsize * consumed)
+ {
+ const gchar *encoding;
+ GError *err = NULL;
+ gchar *ret = NULL;
+
+ *consumed = 0;
+
+ /* First try any detected encoding */
+ if (self->detected_encoding) {
+ ret =
+ gst_sub_parse_gst_convert_to_utf8 (str, len, self->detected_encoding,
+ consumed, &err);
+
+ if (!err)
+ return ret;
+
+ GST_WARNING_OBJECT (self, "could not convert string from '%s' to UTF-8: %s",
+ self->detected_encoding, err->message);
+ g_free (self->detected_encoding);
+ self->detected_encoding = NULL;
+ g_clear_error (&err);
+ }
+
+ /* Otherwise check if it's UTF8 */
+ if (self->valid_utf8) {
+ if (g_utf8_validate (str, len, NULL)) {
+ GST_LOG_OBJECT (self, "valid UTF-8, no conversion needed");
+ *consumed = len;
+ return g_strndup (str, len);
+ }
+ GST_INFO_OBJECT (self, "invalid UTF-8!");
+ self->valid_utf8 = FALSE;
+ }
+
+ /* Else try fallback */
+ encoding = self->encoding;
+ if (encoding == NULL || *encoding == '\0') {
+ encoding = g_getenv ("GST_SUBTITLE_ENCODING");
+ }
+ if (encoding == NULL || *encoding == '\0') {
+ /* if local encoding is UTF-8 and no encoding specified
+ * via the environment variable, assume ISO-8859-15 */
+ if (g_get_charset (&encoding)) {
+ encoding = "ISO-8859-15";
+ }
+ }
+
+ ret = gst_sub_parse_gst_convert_to_utf8 (str, len, encoding, consumed, &err);
+
+ if (err) {
+ GST_WARNING_OBJECT (self, "could not convert string from '%s' to UTF-8: %s",
+ encoding, err->message);
+ g_clear_error (&err);
+
++#ifdef TIZEN_FEATURE_SUBPARSE_MODIFICATION
++ if (!g_strcmp0 (self->encoding, "EUC-KR")) {
++ GST_LOG_OBJECT (self, "use CP949 as fallback");
++ g_free (self->encoding);
++ self->encoding = g_strdup ("CP949");
++ encoding = self->encoding;
++ ret = gst_sub_parse_gst_convert_to_utf8 (str, len, encoding, consumed, &err);
++ } else
++#endif
+ /* invalid input encoding, fall back to ISO-8859-15 (always succeeds) */
+ ret =
+ gst_sub_parse_gst_convert_to_utf8 (str, len, "ISO-8859-15", consumed,
+ NULL);
+ }
+
+ GST_LOG_OBJECT (self,
+ "successfully converted %" G_GSIZE_FORMAT " characters from %s to UTF-8"
+ "%s", len, encoding, (err) ? " , using ISO-8859-15 as fallback" : "");
+
+ return ret;
+ }
+
+ static gchar *
+ get_next_line (GstSubParse * self)
+ {
+ char *line = NULL;
+ const char *line_end;
+ int line_len;
+ gboolean have_r = FALSE;
+
+ line_end = strchr (self->textbuf->str, '\n');
+
+ if (!line_end) {
+ /* end-of-line not found; return for more data */
+ return NULL;
+ }
+
+ /* get rid of '\r' */
+ if (line_end != self->textbuf->str && *(line_end - 1) == '\r') {
+ line_end--;
+ have_r = TRUE;
+ }
+
+ line_len = line_end - self->textbuf->str;
+ line = g_strndup (self->textbuf->str, line_len);
+ self->textbuf = g_string_erase (self->textbuf, 0,
+ line_len + (have_r ? 2 : 1));
+ return line;
+ }
+
+ static gchar *
+ parse_mdvdsub (ParserState * state, const gchar * line)
+ {
+ const gchar *line_split;
+ gchar *line_chunk;
+ guint start_frame, end_frame;
+ guint64 clip_start = 0, clip_stop = 0;
+ gboolean in_seg = FALSE;
+ GString *markup;
+ gchar *ret;
+
+ /* style variables */
+ gboolean italic;
+ gboolean bold;
+ guint fontsize;
+ gdouble fps = 0.0;
+
+ if (sscanf (line, "{%u}{%u}", &start_frame, &end_frame) != 2) {
+ g_warning ("Parse of the following line, assumed to be in microdvd .sub"
+ " format, failed:\n%s", line);
+ return NULL;
+ }
+
+ /* skip the {%u}{%u} part */
+ line = strchr (line, '}') + 1;
+ line = strchr (line, '}') + 1;
+
+ /* see if there's a first line with a framerate */
+ if (start_frame == 1 && end_frame == 1) {
+ gchar *rest, *end = NULL;
+
+ rest = g_strdup (line);
+ g_strdelimit (rest, ",", '.');
+ fps = g_ascii_strtod (rest, &end);
+ if (end != rest) {
+ gst_util_double_to_fraction (fps, &state->fps_n, &state->fps_d);
+ GST_INFO ("framerate from file: %d/%d ('%s')", state->fps_n,
+ state->fps_d, rest);
+ }
+ g_free (rest);
+ return NULL;
+ }
+
+ state->start_time =
+ gst_util_uint64_scale (start_frame, GST_SECOND * state->fps_d,
+ state->fps_n);
+ state->duration =
+ gst_util_uint64_scale (end_frame - start_frame, GST_SECOND * state->fps_d,
+ state->fps_n);
+
+ /* Check our segment start/stop */
++#ifdef TIZEN_FEATURE_SUBPARSE_DROP_OUT_OF_SEGMENT
++ if (state->drop_out_of_segment) {
++#endif
+ in_seg = gst_segment_clip (state->segment, GST_FORMAT_TIME,
+ state->start_time, state->start_time + state->duration, &clip_start,
+ &clip_stop);
+
+ /* No need to parse that text if it's out of segment */
+ if (in_seg) {
+ state->start_time = clip_start;
+ state->duration = clip_stop - clip_start;
+ } else {
+ return NULL;
+ }
++#ifdef TIZEN_FEATURE_SUBPARSE_DROP_OUT_OF_SEGMENT
++ }
++#endif
+
+ markup = g_string_new (NULL);
+ while (1) {
+ italic = FALSE;
+ bold = FALSE;
+ fontsize = 0;
+ /* parse style markup */
+ if (strncmp (line, "{y:i}", 5) == 0) {
+ italic = TRUE;
+ line = strchr (line, '}') + 1;
+ }
+ if (strncmp (line, "{y:b}", 5) == 0) {
+ bold = TRUE;
+ line = strchr (line, '}') + 1;
+ }
+ if (sscanf (line, "{s:%u}", &fontsize) == 1) {
+ line = strchr (line, '}') + 1;
+ }
+ /* forward slashes at beginning/end signify italics too */
+ if (g_str_has_prefix (line, "/")) {
+ italic = TRUE;
+ ++line;
+ }
+ if ((line_split = strchr (line, '|')))
+ line_chunk = g_markup_escape_text (line, line_split - line);
+ else
+ line_chunk = g_markup_escape_text (line, strlen (line));
+
+ /* Remove italics markers at end of line/stanza (CHECKME: are end slashes
+ * always at the end of a line or can they span multiple lines?) */
+ if (g_str_has_suffix (line_chunk, "/")) {
+ line_chunk[strlen (line_chunk) - 1] = '\0';
+ }
+
+ markup = g_string_append (markup, "<span");
+ if (italic)
+ g_string_append (markup, " style=\"italic\"");
+ if (bold)
+ g_string_append (markup, " weight=\"bold\"");
+ if (fontsize)
+ g_string_append_printf (markup, " size=\"%u\"", fontsize * 1000);
+ g_string_append_printf (markup, ">%s</span>", line_chunk);
+ g_free (line_chunk);
+ if (line_split) {
+ g_string_append (markup, "\n");
+ line = line_split + 1;
+ } else {
+ break;
+ }
+ }
+ ret = markup->str;
+ g_string_free (markup, FALSE);
+ GST_DEBUG ("parse_mdvdsub returning (%f+%f): %s",
+ state->start_time / (double) GST_SECOND,
+ state->duration / (double) GST_SECOND, ret);
+ return ret;
+ }
+
+ static void
+ strip_trailing_newlines (gchar * txt)
+ {
+ if (txt) {
+ guint len;
+
+ len = strlen (txt);
+ while (len > 1 && txt[len - 1] == '\n') {
+ txt[len - 1] = '\0';
+ --len;
+ }
+ }
+ }
+
+ /* we want to escape text in general, but retain basic markup like
+ * <i></i>, <u></u>, and <b></b>. The easiest and safest way is to
+ * just unescape a white list of allowed markups again after
+ * escaping everything (the text between these simple markers isn't
+ * necessarily escaped, so it seems best to do it like this) */
+ static void
+ subrip_unescape_formatting (gchar * txt, gconstpointer allowed_tags_ptr,
+ gboolean allows_tag_attributes)
+ {
+ gchar *res;
+ GRegex *tag_regex;
+ gchar *allowed_tags_pattern, *search_pattern;
+ const gchar *replace_pattern;
+
+ /* No processing needed if no escaped tag marker found in the string. */
+ if (strstr (txt, "<") == NULL)
+ return;
+
+ /* Build a list of alternates for our regexp.
+ * FIXME: Could be built once and stored */
+ allowed_tags_pattern = g_strjoinv ("|", (gchar **) allowed_tags_ptr);
+ /* Look for starting/ending escaped tags with optional attributes. */
+ search_pattern = g_strdup_printf ("<(/)?\\ *(%s)(%s)>",
+ allowed_tags_pattern, ATTRIBUTE_REGEX);
+ /* And unescape appropriately */
+ if (allows_tag_attributes) {
+ replace_pattern = "<\\1\\2\\3>";
+ } else {
+ replace_pattern = "<\\1\\2>";
+ }
+
+ tag_regex = g_regex_new (search_pattern, 0, 0, NULL);
+ res = g_regex_replace (tag_regex, txt, strlen (txt), 0,
+ replace_pattern, 0, NULL);
+
+ /* res will always be shorter than the input or identical, so this
+ * copy is OK */
+ strcpy (txt, res);
+
+ g_free (res);
+ g_free (search_pattern);
+ g_free (allowed_tags_pattern);
+
+ g_regex_unref (tag_regex);
+ }
+
+
+ static gboolean
+ subrip_remove_unhandled_tag (gchar * start, gchar * stop)
+ {
+ gchar *tag, saved;
+
+ tag = start + strlen ("<");
+ if (*tag == '/')
+ ++tag;
+
+ if (g_ascii_tolower (*tag) < 'a' || g_ascii_tolower (*tag) > 'z')
+ return FALSE;
+
+ saved = *stop;
+ *stop = '\0';
+ GST_LOG ("removing unhandled tag '%s'", start);
+ *stop = saved;
+ memmove (start, stop, strlen (stop) + 1);
+ return TRUE;
+ }
+
+ /* remove tags we haven't explicitly allowed earlier on, like font tags
+ * for example */
+ static void
+ subrip_remove_unhandled_tags (gchar * txt)
+ {
+ gchar *pos, *gt;
+
+ for (pos = txt; pos != NULL && *pos != '\0'; ++pos) {
+ if (strncmp (pos, "<", 4) == 0 && (gt = strstr (pos + 4, ">"))) {
+ if (subrip_remove_unhandled_tag (pos, gt + strlen (">")))
+ --pos;
+ }
+ }
+ }
+
+ /* we only allow a fixed set of tags like <i>, <u> and <b>, so let's
+ * take a simple approach. This code assumes the input has been
+ * escaped and subrip_unescape_formatting() has then been run over the
+ * input! This function adds missing closing markup tags and removes
+ * broken closing tags for tags that have never been opened. */
+ static void
+ subrip_fix_up_markup (gchar ** p_txt, gconstpointer allowed_tags_ptr)
+ {
+ gchar *cur, *next_tag;
+ GPtrArray *open_tags = NULL;
+ guint num_open_tags = 0;
+ const gchar *iter_tag;
+ guint offset = 0;
+ guint index;
+ gchar *cur_tag;
+ gchar *end_tag;
+ GRegex *tag_regex;
+ GMatchInfo *match_info;
+ gchar **allowed_tags = (gchar **) allowed_tags_ptr;
+
+ g_assert (*p_txt != NULL);
+
+ open_tags = g_ptr_array_new_with_free_func (g_free);
+ cur = *p_txt;
+ while (*cur != '\0') {
+ next_tag = strchr (cur, '<');
+ if (next_tag == NULL)
+ break;
+ offset = 0;
+ index = 0;
+ while (index < g_strv_length (allowed_tags)) {
+ iter_tag = allowed_tags[index];
+ /* Look for a white listed tag */
+ cur_tag = g_strconcat ("<", iter_tag, ATTRIBUTE_REGEX, ">", NULL);
+ tag_regex = g_regex_new (cur_tag, 0, 0, NULL);
+ (void) g_regex_match (tag_regex, next_tag, 0, &match_info);
+
+ if (g_match_info_matches (match_info)) {
+ gint start_pos, end_pos;
+ gchar *word = g_match_info_fetch (match_info, 0);
+ g_match_info_fetch_pos (match_info, 0, &start_pos, &end_pos);
+ if (start_pos == 0) {
+ offset = strlen (word);
+ }
+ g_free (word);
+ }
+ g_match_info_free (match_info);
+ g_regex_unref (tag_regex);
+ g_free (cur_tag);
+ index++;
+ if (offset) {
+ /* OK we found a tag, let's keep track of it */
+ g_ptr_array_add (open_tags, g_ascii_strdown (iter_tag, -1));
+ ++num_open_tags;
+ break;
+ }
+ }
+
+ if (offset) {
+ next_tag += offset;
+ cur = next_tag;
+ continue;
+ }
+
+ if (*next_tag == '<' && *(next_tag + 1) == '/') {
+ end_tag = strchr (cur, '>');
+ if (end_tag) {
+ const gchar *last = NULL;
+ if (num_open_tags > 0)
+ last = g_ptr_array_index (open_tags, num_open_tags - 1);
+ if (num_open_tags == 0
+ || g_ascii_strncasecmp (end_tag - 1, last, strlen (last))) {
+ GST_LOG ("broken input, closing tag '%s' is not open", end_tag - 1);
+ memmove (next_tag, end_tag + 1, strlen (end_tag) + 1);
+ next_tag -= strlen (end_tag);
+ } else {
+ --num_open_tags;
+ g_ptr_array_remove_index (open_tags, num_open_tags);
+ }
+ }
+ }
+ ++next_tag;
+ cur = next_tag;
+ }
+
+ if (num_open_tags > 0) {
+ GString *s;
+
+ s = g_string_new (*p_txt);
+ while (num_open_tags > 0) {
+ GST_LOG ("adding missing closing tag '%s'",
+ (char *) g_ptr_array_index (open_tags, num_open_tags - 1));
+ g_string_append_c (s, '<');
+ g_string_append_c (s, '/');
+ g_string_append (s, g_ptr_array_index (open_tags, num_open_tags - 1));
+ g_string_append_c (s, '>');
+ --num_open_tags;
+ }
+ g_free (*p_txt);
+ *p_txt = g_string_free (s, FALSE);
+ }
+ g_ptr_array_free (open_tags, TRUE);
+ }
+
+ static gboolean
+ parse_subrip_time (const gchar * ts_string, GstClockTime * t)
+ {
+ gchar s[128] = { '\0', };
+ gchar *end, *p;
+ guint hour, min, sec, msec, len;
+
+ while (*ts_string == ' ')
+ ++ts_string;
+
+ g_strlcpy (s, ts_string, sizeof (s));
+ if ((end = strstr (s, "-->")))
+ *end = '\0';
+ g_strchomp (s);
+
+ /* ms may be in these formats:
+ * hh:mm:ss,500 = 500ms
+ * hh:mm:ss, 5 = 5ms
+ * hh:mm:ss, 5 = 50ms
+ * hh:mm:ss, 50 = 50ms
+ * hh:mm:ss,5 = 500ms
+ * and the same with . instead of ,.
+ * sscanf() doesn't differentiate between ' 5' and '5' so munge
+ * the white spaces within the timestamp to '0' (I'm sure there's a
+ * way to make sscanf() do this for us, but how?)
+ */
+ g_strdelimit (s, " ", '0');
+ g_strdelimit (s, ".", ',');
+
+ /* make sure we have exactly three digits after he comma */
+ p = strchr (s, ',');
+ if (p == NULL) {
+ /* If there isn't a ',' the timestamp is broken */
+ /* https://gitlab.freedesktop.org/gstreamer/gst-plugins-base/issues/532#note_100179 */
+ GST_WARNING ("failed to parse subrip timestamp string '%s'", s);
+ return FALSE;
+ }
+
+ ++p;
+ len = strlen (p);
+ if (len > 3) {
+ p[3] = '\0';
+ } else
+ while (len < 3) {
+ g_strlcat (&p[len], "0", 2);
+ ++len;
+ }
+
+ GST_LOG ("parsing timestamp '%s'", s);
+ if (sscanf (s, "%u:%u:%u,%u", &hour, &min, &sec, &msec) != 4) {
+ /* https://www.w3.org/TR/webvtt1/#webvtt-timestamp
+ *
+ * The hours component is optional with webVTT, for example
+ * mm:ss,500 is a valid webVTT timestamp. When not present,
+ * hours is 0.
+ */
+ hour = 0;
+
+ if (sscanf (s, "%u:%u,%u", &min, &sec, &msec) != 3) {
+ GST_WARNING ("failed to parse subrip timestamp string '%s'", s);
+ return FALSE;
+ }
+ }
+
+ *t = ((hour * 3600) + (min * 60) + sec) * GST_SECOND + msec * GST_MSECOND;
+ return TRUE;
+ }
+
+ /* cue settings are part of the WebVTT specification. They are
+ * declared after the time interval in the first line of the
+ * cue. Example: 00:00:01,000 --> 00:00:02,000 D:vertical-lr A:start
+ * See also http://www.whatwg.org/specs/web-apps/current-work/webvtt.html
+ */
+ static void
+ parse_webvtt_cue_settings (ParserState * state, const gchar * settings)
+ {
+ gchar **splitted_settings = g_strsplit_set (settings, " \t", -1);
+ gint i = 0;
+ gint16 text_position, text_size;
+ gint16 line_position;
+ gboolean vertical_found = FALSE;
+ gboolean alignment_found = FALSE;
+
+ while (i < g_strv_length (splitted_settings)) {
+ gboolean valid_tag = FALSE;
+ switch (splitted_settings[i][0]) {
+ case 'T':
+ if (sscanf (splitted_settings[i], "T:%" G_GINT16_FORMAT "%%",
+ &text_position) > 0) {
+ state->text_position = (guint8) text_position;
+ valid_tag = TRUE;
+ }
+ break;
+ case 'D':
+ if (strlen (splitted_settings[i]) > 2) {
+ vertical_found = TRUE;
+ g_free (state->vertical);
+ state->vertical = g_strdup (splitted_settings[i] + 2);
+ valid_tag = TRUE;
+ }
+ break;
+ case 'L':
+ if (g_str_has_suffix (splitted_settings[i], "%")) {
+ if (sscanf (splitted_settings[i], "L:%" G_GINT16_FORMAT "%%",
+ &line_position) > 0) {
+ state->line_position = line_position;
+ valid_tag = TRUE;
+ }
+ } else {
+ if (sscanf (splitted_settings[i], "L:%" G_GINT16_FORMAT,
+ &line_position) > 0) {
+ state->line_number = line_position;
+ valid_tag = TRUE;
+ }
+ }
+ break;
+ case 'S':
+ if (sscanf (splitted_settings[i], "S:%" G_GINT16_FORMAT "%%",
+ &text_size) > 0) {
+ state->text_size = (guint8) text_size;
+ valid_tag = TRUE;
+ }
+ break;
+ case 'A':
+ if (strlen (splitted_settings[i]) > 2) {
+ g_free (state->alignment);
+ state->alignment = g_strdup (splitted_settings[i] + 2);
+ alignment_found = TRUE;
+ valid_tag = TRUE;
+ }
+ break;
+ default:
+ break;
+ }
+ if (!valid_tag) {
+ GST_LOG ("Invalid or unrecognised setting found: %s",
+ splitted_settings[i]);
+ }
+ i++;
+ }
+ g_strfreev (splitted_settings);
+ if (!vertical_found) {
+ g_free (state->vertical);
+ state->vertical = g_strdup ("");
+ }
+ if (!alignment_found) {
+ g_free (state->alignment);
+ state->alignment = g_strdup ("");
+ }
+ }
+
++#ifdef TIZEN_FEATURE_HLS_WEBVTT
++static void
++parse_timestamp_map (ParserState * state, const gchar * timestamp_map)
++{
++ GstClockTime local = 0;
++ guint64 mpegts = 0;
++ gchar *local_start = NULL;
++ gchar *mpegts_start = NULL;
++
++ if (!timestamp_map)
++ return;
++
++ local_start = g_strrstr (timestamp_map, "LOCAL:");
++ if (local_start)
++ parse_subrip_time (local_start + strlen ("LOCAL:"), &local);
++
++ mpegts_start = g_strrstr (timestamp_map, "MPEGTS:");
++ if (mpegts_start)
++ mpegts = g_ascii_strtoull (mpegts_start + strlen ("MPEGTS:"), NULL, 10);
++
++ GST_LOG ("parsed local time %" GST_TIME_FORMAT " MPEGTS: %" G_GUINT64_FORMAT,
++ GST_TIME_ARGS (local), mpegts);
++
++ state->local = local;
++ state->mpegts = mpegts;
++}
++
++static void
++send_fragment_timestamp_event (GstSubParse * self, GstClockTime timestamp)
++{
++ GstEvent *event = NULL;
++
++ if (!GST_CLOCK_TIME_IS_VALID (timestamp))
++ return;
++
++ GST_LOG ("send fragment_timestamp %" GST_TIME_FORMAT,
++ GST_TIME_ARGS (timestamp));
++
++ event = gst_event_new_custom (GST_EVENT_CUSTOM_DOWNSTREAM,
++ gst_structure_new ("fragment_timestamp",
++ "timestamp", G_TYPE_UINT64, timestamp, NULL));
++
++ gst_pad_push_event (self->srcpad, event);
++}
++#endif
++
+ static gchar *
+ parse_subrip (ParserState * state, const gchar * line)
+ {
+ gchar *ret;
+
+ switch (state->state) {
+ case 0:{
+ char *endptr;
+ guint64 id;
+
+ /* looking for a single integer as a Cue ID, but we
+ * don't actually use it */
+ errno = 0;
+ id = g_ascii_strtoull (line, &endptr, 10);
+ if (id == G_MAXUINT64 && errno == ERANGE)
+ state->state = 1;
+ else if (id == 0 && errno == EINVAL)
+ state->state = 1;
+ else if (endptr != line && *endptr == '\0')
+ state->state = 1;
+ return NULL;
+ }
+ case 1:
+ {
+ GstClockTime ts_start, ts_end;
+ gchar *end_time;
+
+ /* looking for start_time --> end_time */
+ if ((end_time = strstr (line, " --> ")) &&
+ parse_subrip_time (line, &ts_start) &&
+ parse_subrip_time (end_time + strlen (" --> "), &ts_end) &&
++#ifdef TIZEN_FEATURE_HLS_WEBVTT
++ state->start_time <=
++ ts_end + MPEGTIME_TO_GSTTIME (state->mpegts) - state->local) {
++#else
+ state->start_time <= ts_end) {
++#endif
+ state->state = 2;
+ state->start_time = ts_start;
++#ifdef TIZEN_FEATURE_HLS_WEBVTT
++ state->start_time += MPEGTIME_TO_GSTTIME (state->mpegts) - state->local;
++#endif
+ state->duration = ts_end - ts_start;
+ } else {
+ GST_DEBUG ("error parsing subrip time line '%s'", line);
+ state->state = 0;
+ }
+ return NULL;
+ }
+ case 2:
+ {
+ /* No need to parse that text if it's out of segment */
++#ifdef TIZEN_FEATURE_SUBPARSE_DROP_OUT_OF_SEGMENT
++ if (state->drop_out_of_segment) {
++#endif
+ guint64 clip_start = 0, clip_stop = 0;
+ gboolean in_seg = FALSE;
+
+ /* Check our segment start/stop */
+ in_seg = gst_segment_clip (state->segment, GST_FORMAT_TIME,
+ state->start_time, state->start_time + state->duration,
+ &clip_start, &clip_stop);
+
+ if (in_seg) {
+ state->start_time = clip_start;
+ state->duration = clip_stop - clip_start;
+ } else {
+ state->state = 0;
+ return NULL;
+ }
++#ifdef TIZEN_FEATURE_SUBPARSE_DROP_OUT_OF_SEGMENT
++ }
++#endif
+ }
+ /* looking for subtitle text; empty line ends this subtitle entry */
+ if (state->buf->len)
+ g_string_append_c (state->buf, '\n');
+ g_string_append (state->buf, line);
+ if (strlen (line) == 0) {
+ ret = g_markup_escape_text (state->buf->str, state->buf->len);
+ g_string_truncate (state->buf, 0);
+ state->state = 0;
+ subrip_unescape_formatting (ret, state->allowed_tags,
+ state->allows_tag_attributes);
+ subrip_remove_unhandled_tags (ret);
+ strip_trailing_newlines (ret);
+ subrip_fix_up_markup (&ret, state->allowed_tags);
+ return ret;
+ }
+ return NULL;
+ default:
+ g_return_val_if_reached (NULL);
+ }
+ }
+
+ static gchar *
+ parse_lrc (ParserState * state, const gchar * line)
+ {
+ gint m, s, c;
+ const gchar *start;
+ gint milli;
+
+ if (line[0] != '[')
+ return NULL;
+
+ if (sscanf (line, "[%u:%02u.%03u]", &m, &s, &c) != 3 &&
+ sscanf (line, "[%u:%02u.%02u]", &m, &s, &c) != 3)
+ return NULL;
+
+ start = strchr (line, ']');
+ if (start - line == 9)
+ milli = 10;
+ else
+ milli = 1;
+
+ state->start_time = gst_util_uint64_scale (m, 60 * GST_SECOND, 1)
+ + gst_util_uint64_scale (s, GST_SECOND, 1)
+ + gst_util_uint64_scale (c, milli * GST_MSECOND, 1);
+ state->duration = GST_CLOCK_TIME_NONE;
+
+ return g_strdup (start + 1);
+ }
+
+ /* WebVTT is a new subtitle format for the upcoming HTML5 video track
+ * element. This format is similar to Subrip, the biggest differences
+ * are that there can be cue settings detailing how to display the cue
+ * text and more markup tags are allowed.
+ * See also http://www.whatwg.org/specs/web-apps/current-work/webvtt.html
+ */
+ static gchar *
+ parse_webvtt (ParserState * state, const gchar * line)
+ {
+ /* Cue IDs are optional in WebVTT, but not in subrip,
+ * so when in state 0 (cue ID), also check if we're
+ * already at the start --> end time marker */
+ if (state->state == 0 || state->state == 1) {
+ GstClockTime ts_start, ts_end;
+ gchar *end_time;
+ gchar *cue_settings = NULL;
+
+ /* looking for start_time --> end_time */
+ if ((end_time = strstr (line, " --> ")) &&
+ parse_subrip_time (line, &ts_start) &&
+ parse_subrip_time (end_time + strlen (" --> "), &ts_end) &&
+ state->start_time <= ts_end) {
+ state->state = 2;
+ state->start_time = ts_start;
++#ifdef TIZEN_FEATURE_HLS_WEBVTT
++ state->start_time += MPEGTIME_TO_GSTTIME (state->mpegts) - state->local;
++#endif
+ state->duration = ts_end - ts_start;
+ cue_settings = strstr (end_time + strlen (" --> "), " ");
++#ifdef TIZEN_FEATURE_HLS_WEBVTT
++ } else if (strstr (line, "X-TIMESTAMP-MAP")) {
++ GST_DEBUG ("got X-TIMESTAMP-MAP '%s'", line);
++ parse_timestamp_map (state, line);
++ state->state = 0;
++#endif
+ } else {
+ GST_DEBUG ("error parsing subrip time line '%s'", line);
+ state->state = 0;
+ }
+
+ state->text_position = 0;
+ state->text_size = 0;
+ state->line_position = 0;
+ state->line_number = 0;
+
+ if (cue_settings)
+ parse_webvtt_cue_settings (state, cue_settings + 1);
+ else {
+ g_free (state->vertical);
+ state->vertical = g_strdup ("");
+ g_free (state->alignment);
+ state->alignment = g_strdup ("");
+ }
+
+ return NULL;
+ } else
+ return parse_subrip (state, line);
+ }
+
+ static void
+ unescape_newlines_br (gchar * read)
+ {
+ gchar *write = read;
+
+ /* Replace all occurrences of '[br]' with a newline as version 2
+ * of the subviewer format uses this for newlines */
+
+ if (read[0] == '\0' || read[1] == '\0' || read[2] == '\0' || read[3] == '\0')
+ return;
+
+ do {
+ if (strncmp (read, "[br]", 4) == 0) {
+ *write = '\n';
+ read += 4;
+ } else {
+ *write = *read;
+ read++;
+ }
+ write++;
+ } while (*read);
+
+ *write = '\0';
+ }
+
+ static gchar *
+ parse_subviewer (ParserState * state, const gchar * line)
+ {
+ guint h1, m1, s1, ms1;
+ guint h2, m2, s2, ms2;
+ gchar *ret;
+
+ /* TODO: Maybe also parse the fields in the header, especially DELAY.
+ * For examples see the unit test or
+ * http://www.doom9.org/index.html?/sub.htm */
+
+ switch (state->state) {
+ case 0:
+ /* looking for start_time,end_time */
+ if (sscanf (line, "%u:%u:%u.%u,%u:%u:%u.%u",
+ &h1, &m1, &s1, &ms1, &h2, &m2, &s2, &ms2) == 8) {
+ state->state = 1;
+ state->start_time =
+ (((guint64) h1) * 3600 + m1 * 60 + s1) * GST_SECOND +
+ ms1 * GST_MSECOND;
+ state->duration =
+ (((guint64) h2) * 3600 + m2 * 60 + s2) * GST_SECOND +
+ ms2 * GST_MSECOND - state->start_time;
+ }
+ return NULL;
+ case 1:
+ {
+ /* No need to parse that text if it's out of segment */
++#ifdef TIZEN_FEATURE_SUBPARSE_DROP_OUT_OF_SEGMENT
++ if (state->drop_out_of_segment) {
++#endif
+ guint64 clip_start = 0, clip_stop = 0;
+ gboolean in_seg = FALSE;
+
+ /* Check our segment start/stop */
+ in_seg = gst_segment_clip (state->segment, GST_FORMAT_TIME,
+ state->start_time, state->start_time + state->duration,
+ &clip_start, &clip_stop);
+
+ if (in_seg) {
+ state->start_time = clip_start;
+ state->duration = clip_stop - clip_start;
+ } else {
+ state->state = 0;
+ return NULL;
+ }
++#ifdef TIZEN_FEATURE_SUBPARSE_DROP_OUT_OF_SEGMENT
++ }
++#endif
+ }
+ /* looking for subtitle text; empty line ends this subtitle entry */
+ if (state->buf->len)
+ g_string_append_c (state->buf, '\n');
+ g_string_append (state->buf, line);
+ if (strlen (line) == 0) {
+ ret = g_strdup (state->buf->str);
+ unescape_newlines_br (ret);
+ strip_trailing_newlines (ret);
+ g_string_truncate (state->buf, 0);
+ state->state = 0;
+ return ret;
+ }
+ return NULL;
+ default:
+ g_assert_not_reached ();
+ return NULL;
+ }
+ }
+
+ static gchar *
+ parse_mpsub (ParserState * state, const gchar * line)
+ {
+ gchar *ret;
+ float t1, t2;
+
+ switch (state->state) {
+ case 0:
+ /* looking for two floats (offset, duration) */
+ if (sscanf (line, "%f %f", &t1, &t2) == 2) {
+ state->state = 1;
+ state->start_time += state->duration + GST_SECOND * t1;
+ state->duration = GST_SECOND * t2;
+ }
+ return NULL;
+ case 1:
+ { /* No need to parse that text if it's out of segment */
++#ifdef TIZEN_FEATURE_SUBPARSE_DROP_OUT_OF_SEGMENT
++ if (state->drop_out_of_segment) {
++#endif
+ guint64 clip_start = 0, clip_stop = 0;
+ gboolean in_seg = FALSE;
+
+ /* Check our segment start/stop */
+ in_seg = gst_segment_clip (state->segment, GST_FORMAT_TIME,
+ state->start_time, state->start_time + state->duration,
+ &clip_start, &clip_stop);
+
+ if (in_seg) {
+ state->start_time = clip_start;
+ state->duration = clip_stop - clip_start;
+ } else {
+ state->state = 0;
+ return NULL;
+ }
++#ifdef TIZEN_FEATURE_SUBPARSE_DROP_OUT_OF_SEGMENT
++ }
++#endif
+ }
+ /* looking for subtitle text; empty line ends this
+ * subtitle entry */
+ if (state->buf->len)
+ g_string_append_c (state->buf, '\n');
+ g_string_append (state->buf, line);
+ if (strlen (line) == 0) {
+ ret = g_strdup (state->buf->str);
+ g_string_truncate (state->buf, 0);
+ state->state = 0;
+ return ret;
+ }
+ return NULL;
+ default:
+ g_assert_not_reached ();
+ return NULL;
+ }
+ }
+
+ static const gchar *
+ dks_skip_timestamp (const gchar * line)
+ {
+ while (*line && *line != ']')
+ line++;
+ if (*line == ']')
+ line++;
+ return line;
+ }
+
+ static gchar *
+ parse_dks (ParserState * state, const gchar * line)
+ {
+ guint h, m, s;
+
+ switch (state->state) {
+ case 0:
+ /* Looking for the start time and text */
+ if (sscanf (line, "[%u:%u:%u]", &h, &m, &s) == 3) {
+ const gchar *text;
+ state->start_time = (((guint64) h) * 3600 + m * 60 + s) * GST_SECOND;
+ text = dks_skip_timestamp (line);
+ if (*text) {
+ state->state = 1;
+ g_string_append (state->buf, text);
+ }
+ }
+ return NULL;
+ case 1:
+ {
+ guint64 clip_start = 0, clip_stop = 0;
+ gboolean in_seg;
+ gchar *ret;
+
+ /* Looking for the end time */
+ if (sscanf (line, "[%u:%u:%u]", &h, &m, &s) == 3) {
+ state->state = 0;
+ state->duration = (((guint64) h) * 3600 + m * 60 + s) * GST_SECOND -
+ state->start_time;
+ } else {
+ GST_WARNING ("Failed to parse subtitle end time");
+ return NULL;
+ }
-
++#ifdef TIZEN_FEATURE_SUBPARSE_DROP_OUT_OF_SEGMENT
++ if (state->drop_out_of_segment) {
++#endif
+ /* Check if this subtitle is out of the current segment */
+ in_seg = gst_segment_clip (state->segment, GST_FORMAT_TIME,
+ state->start_time, state->start_time + state->duration,
+ &clip_start, &clip_stop);
+
+ if (!in_seg) {
+ return NULL;
+ }
++#ifdef TIZEN_FEATURE_SUBPARSE_DROP_OUT_OF_SEGMENT
++ }
++#endif
+
+ state->start_time = clip_start;
+ state->duration = clip_stop - clip_start;
+
+ ret = g_strdup (state->buf->str);
+ g_string_truncate (state->buf, 0);
+ unescape_newlines_br (ret);
+ return ret;
+ }
+ default:
+ g_assert_not_reached ();
+ return NULL;
+ }
+ }
+
+ static void
+ parser_state_init (ParserState * state)
+ {
+ GST_DEBUG ("initialising parser");
+
+ if (state->buf) {
+ g_string_truncate (state->buf, 0);
+ } else {
+ state->buf = g_string_new (NULL);
+ }
+
+ state->start_time = 0;
+ state->duration = 0;
+ state->max_duration = 0; /* no limit */
+ state->state = 0;
+ state->segment = NULL;
++#ifdef TIZEN_FEATURE_HLS_WEBVTT
++ state->local = 0;
++ state->mpegts = 0;
++#endif
+ }
+
+ static void
+ parser_state_dispose (GstSubParse * self, ParserState * state)
+ {
+ if (state->buf) {
+ g_string_free (state->buf, TRUE);
+ state->buf = NULL;
+ }
+
+ g_free (state->vertical);
+ state->vertical = NULL;
+ g_free (state->alignment);
+ state->alignment = NULL;
+
+ if (state->user_data) {
+ switch (self->parser_type) {
+ case GST_SUB_PARSE_FORMAT_QTTEXT:
+ qttext_context_deinit (state);
+ break;
+ case GST_SUB_PARSE_FORMAT_SAMI:
+ sami_context_deinit (state);
+ break;
+ default:
+ break;
+ }
+ }
+ state->allowed_tags = NULL;
+ }
+
+
+
+ static GstCaps *
+ gst_sub_parse_format_autodetect (GstSubParse * self)
+ {
+ gchar *data;
+ GstSubParseFormat format;
+
+ if (strlen (self->textbuf->str) < 6) {
+ GST_DEBUG ("File too small to be a subtitles file");
+ return NULL;
+ }
+
+ data = g_strndup (self->textbuf->str, 35);
+ format = gst_sub_parse_data_format_autodetect (data);
+ g_free (data);
+
+ self->parser_type = format;
+ self->subtitle_codec = gst_sub_parse_get_format_description (format);
+ parser_state_init (&self->state);
+ self->state.allowed_tags = NULL;
+
+ switch (format) {
+ case GST_SUB_PARSE_FORMAT_MDVDSUB:
+ self->parse_line = parse_mdvdsub;
+ return gst_caps_new_simple ("text/x-raw",
+ "format", G_TYPE_STRING, "pango-markup", NULL);
+ case GST_SUB_PARSE_FORMAT_SUBRIP:
+ self->state.allowed_tags = (gpointer) allowed_srt_tags;
+ self->state.allows_tag_attributes = FALSE;
+ self->parse_line = parse_subrip;
+ return gst_caps_new_simple ("text/x-raw",
+ "format", G_TYPE_STRING, "pango-markup", NULL);
+ case GST_SUB_PARSE_FORMAT_MPSUB:
+ self->parse_line = parse_mpsub;
+ return gst_caps_new_simple ("text/x-raw",
+ "format", G_TYPE_STRING, "utf8", NULL);
+ case GST_SUB_PARSE_FORMAT_SAMI:
+ self->parse_line = parse_sami;
+ sami_context_init (&self->state);
+ return gst_caps_new_simple ("text/x-raw",
+ "format", G_TYPE_STRING, "pango-markup", NULL);
+ case GST_SUB_PARSE_FORMAT_TMPLAYER:
+ self->parse_line = parse_tmplayer;
+ self->state.max_duration = 5 * GST_SECOND;
+ return gst_caps_new_simple ("text/x-raw",
+ "format", G_TYPE_STRING, "utf8", NULL);
+ case GST_SUB_PARSE_FORMAT_MPL2:
+ self->parse_line = parse_mpl2;
+ return gst_caps_new_simple ("text/x-raw",
+ "format", G_TYPE_STRING, "pango-markup", NULL);
+ case GST_SUB_PARSE_FORMAT_DKS:
+ self->parse_line = parse_dks;
+ return gst_caps_new_simple ("text/x-raw",
+ "format", G_TYPE_STRING, "utf8", NULL);
+ case GST_SUB_PARSE_FORMAT_VTT:
+ self->state.allowed_tags = (gpointer) allowed_vtt_tags;
+ self->state.allows_tag_attributes = TRUE;
+ self->parse_line = parse_webvtt;
+ return gst_caps_new_simple ("text/x-raw",
+ "format", G_TYPE_STRING, "pango-markup", NULL);
+ case GST_SUB_PARSE_FORMAT_SUBVIEWER:
+ self->parse_line = parse_subviewer;
+ return gst_caps_new_simple ("text/x-raw",
+ "format", G_TYPE_STRING, "utf8", NULL);
+ case GST_SUB_PARSE_FORMAT_QTTEXT:
+ self->parse_line = parse_qttext;
+ qttext_context_init (&self->state);
+ return gst_caps_new_simple ("text/x-raw",
+ "format", G_TYPE_STRING, "pango-markup", NULL);
+ case GST_SUB_PARSE_FORMAT_LRC:
+ self->parse_line = parse_lrc;
+ return gst_caps_new_simple ("text/x-raw",
+ "format", G_TYPE_STRING, "utf8", NULL);
+ case GST_SUB_PARSE_FORMAT_UNKNOWN:
+ default:
+ GST_DEBUG ("no subtitle format detected");
+ GST_ELEMENT_ERROR (self, STREAM, WRONG_TYPE,
+ ("The input is not a valid/supported subtitle file"), (NULL));
+ return NULL;
+ }
+ }
+
+ static void
+ feed_textbuf (GstSubParse * self, GstBuffer * buf)
+ {
+ gboolean discont;
+ gsize consumed;
+ gchar *input = NULL;
+ const guint8 *data;
+ gsize avail;
+
+ discont = GST_BUFFER_IS_DISCONT (buf);
+
+ if (GST_BUFFER_OFFSET_IS_VALID (buf) &&
+ GST_BUFFER_OFFSET (buf) != self->offset) {
+ self->offset = GST_BUFFER_OFFSET (buf);
+ discont = TRUE;
+ }
+
+ if (discont) {
+ GST_INFO ("discontinuity");
+ /* flush the parser state */
+ parser_state_init (&self->state);
+ g_string_truncate (self->textbuf, 0);
+ gst_adapter_clear (self->adapter);
+ if (self->parser_type == GST_SUB_PARSE_FORMAT_SAMI)
+ sami_context_reset (&self->state);
+ /* we could set a flag to make sure that the next buffer we push out also
+ * has the DISCONT flag set, but there's no point really given that it's
+ * subtitles which are discontinuous by nature. */
+ }
+
+ self->offset += gst_buffer_get_size (buf);
+
+ gst_adapter_push (self->adapter, buf);
+
+ avail = gst_adapter_available (self->adapter);
+ data = gst_adapter_map (self->adapter, avail);
+ input = convert_encoding (self, (const gchar *) data, avail, &consumed);
+
+ if (input && consumed > 0) {
+ self->textbuf = g_string_append (self->textbuf, input);
+ gst_adapter_unmap (self->adapter);
+ gst_adapter_flush (self->adapter, consumed);
+ } else {
+ gst_adapter_unmap (self->adapter);
+ }
+
+ g_free (input);
+ }
+
+
+ static void
+ xml_text (GMarkupParseContext * context,
+ const gchar * text, gsize text_len, gpointer user_data, GError ** error)
+ {
+ gchar **accum = (gchar **) user_data;
+ gchar *concat;
+
+ if (*accum) {
+ concat = g_strconcat (*accum, text, NULL);
+ g_free (*accum);
+ *accum = concat;
+ } else {
+ *accum = g_strdup (text);
+ }
+ }
+
+ static gchar *
+ strip_pango_markup (gchar * markup, GError ** error)
+ {
+ GMarkupParser parser = { 0, };
+ GMarkupParseContext *context;
+ gchar *accum = NULL;
+
+ parser.text = xml_text;
+ context = g_markup_parse_context_new (&parser, 0, &accum, NULL);
+
+ g_markup_parse_context_parse (context, "<root>", 6, NULL);
+ g_markup_parse_context_parse (context, markup, strlen (markup), error);
+ g_markup_parse_context_parse (context, "</root>", 7, NULL);
+ if (*error)
+ goto error;
+
+ g_markup_parse_context_end_parse (context, error);
+ if (*error)
+ goto error;
+
+ done:
+ g_markup_parse_context_free (context);
+ return accum;
+
+ error:
+ g_free (accum);
+ accum = NULL;
+ goto done;
+ }
+
+ static gboolean
+ gst_sub_parse_negotiate (GstSubParse * self, GstCaps * preferred)
+ {
+ GstCaps *caps;
+ gboolean ret = FALSE;
+ const GstStructure *s1, *s2;
+
+ caps = gst_pad_get_allowed_caps (self->srcpad);
+
+ s1 = gst_caps_get_structure (preferred, 0);
+
+ if (!g_strcmp0 (gst_structure_get_string (s1, "format"), "utf8")) {
+ GstCaps *intersected = gst_caps_intersect (caps, preferred);
+ gst_caps_unref (caps);
+ caps = intersected;
+ }
+
+ caps = gst_caps_fixate (caps);
+
+ if (gst_caps_is_empty (caps)) {
+ goto done;
+ }
+
+ s2 = gst_caps_get_structure (caps, 0);
+
+ self->strip_pango_markup =
+ !g_strcmp0 (gst_structure_get_string (s2, "format"), "utf8")
+ && !g_strcmp0 (gst_structure_get_string (s1, "format"), "pango-markup");
+
+ if (self->strip_pango_markup) {
+ GST_INFO_OBJECT (self, "We will convert from pango-markup to utf8");
+ }
+
+ ret = gst_pad_set_caps (self->srcpad, caps);
+
+ done:
+ gst_caps_unref (caps);
+ return ret;
+ }
+
+ static GstFlowReturn
+ handle_buffer (GstSubParse * self, GstBuffer * buf)
+ {
+ GstFlowReturn ret = GST_FLOW_OK;
+ gchar *line, *subtitle;
+ gboolean need_tags = FALSE;
++#ifdef TIZEN_FEATURE_SUBPARSE_MODIFICATION
++ GstMessage *m = NULL;
++#endif
++#ifdef TIZEN_FEATURE_HLS_WEBVTT
++ GstClockTime fragment_timestamp = GST_CLOCK_TIME_NONE;
++#endif
+
+ if (self->first_buffer) {
+ GstMapInfo map;
+
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+ self->detected_encoding =
+ gst_sub_parse_detect_encoding ((gchar *) map.data, map.size);
+ gst_buffer_unmap (buf, &map);
+ self->first_buffer = FALSE;
+ self->state.fps_n = self->fps_n;
+ self->state.fps_d = self->fps_d;
+ }
++#ifdef TIZEN_FEATURE_HLS_WEBVTT
++ if (GST_BUFFER_IS_DISCONT (buf) && GST_BUFFER_PTS_IS_VALID (buf))
++ fragment_timestamp = GST_BUFFER_PTS (buf);
++#endif
+
+ feed_textbuf (self, buf);
+
+ /* make sure we know the format */
+ if (G_UNLIKELY (self->parser_type == GST_SUB_PARSE_FORMAT_UNKNOWN)) {
+ GstCaps *preferred;
+
+ if (!(preferred = gst_sub_parse_format_autodetect (self))) {
+ return GST_FLOW_NOT_NEGOTIATED;
+ }
+
+ if (!gst_sub_parse_negotiate (self, preferred)) {
+ gst_caps_unref (preferred);
+ return GST_FLOW_NOT_NEGOTIATED;
+ }
+
+ gst_caps_unref (preferred);
+
+ need_tags = TRUE;
+ }
+
+ /* Push newsegment if needed */
+ if (self->need_segment) {
+ GST_LOG_OBJECT (self, "pushing newsegment event with %" GST_SEGMENT_FORMAT,
+ &self->segment);
+
+ gst_pad_push_event (self->srcpad, gst_event_new_segment (&self->segment));
+ self->need_segment = FALSE;
+ }
+
+ if (need_tags) {
+ /* push tags */
+ if (self->subtitle_codec != NULL) {
+ GstTagList *tags;
+
+ tags = gst_tag_list_new (GST_TAG_SUBTITLE_CODEC, self->subtitle_codec,
+ NULL);
+ gst_pad_push_event (self->srcpad, gst_event_new_tag (tags));
+ }
+ }
++#ifdef TIZEN_FEATURE_HLS_WEBVTT
++ if (self->parser_type == GST_SUB_PARSE_FORMAT_VTT)
++ send_fragment_timestamp_event (self, fragment_timestamp);
++#endif
+
+ while (!self->flushing && (line = get_next_line (self))) {
+ guint offset = 0;
+
+ /* Set segment on our parser state machine */
+ self->state.segment = &self->segment;
+ /* Now parse the line, out of segment lines will just return NULL */
+ GST_LOG_OBJECT (self, "State %d. Parsing line '%s'", self->state.state,
+ line + offset);
+ subtitle = self->parse_line (&self->state, line + offset);
+ g_free (line);
++#ifdef TIZEN_FEATURE_SUBPARSE_MODIFICATION
++ if (!self->state.langlist_msg_posted && self->state.language_list) {
++ m = gst_message_new_element (GST_OBJECT_CAST (self),
++ gst_structure_new ("Ext_Sub_Language_List", "lang_list",
++ G_TYPE_POINTER, self->state.language_list, NULL));
++
++ gst_element_post_message (GST_ELEMENT_CAST (self), m);
++ self->state.langlist_msg_posted = TRUE;
++ GST_DEBUG_OBJECT (self, "curr lang as : %s ",
++ GST_STR_NULL (self->state.current_language));
++ }
++#endif
+ if (subtitle) {
+ guint subtitle_len;
+
+ if (self->strip_pango_markup) {
+ GError *error = NULL;
+ gchar *stripped;
+
+ if ((stripped = strip_pango_markup (subtitle, &error))) {
+ g_free (subtitle);
+ subtitle = stripped;
+ } else {
+ GST_WARNING_OBJECT (self, "Failed to strip pango markup: %s",
+ error->message);
+ }
+ }
+
+ subtitle_len = strlen (subtitle);
+
+ /* +1 for terminating NUL character */
+ buf = gst_buffer_new_and_alloc (subtitle_len + 1);
+
+ /* copy terminating NUL character as well */
+ gst_buffer_fill (buf, 0, subtitle, subtitle_len + 1);
+ gst_buffer_set_size (buf, subtitle_len);
+
+ GST_BUFFER_TIMESTAMP (buf) = self->state.start_time;
+ GST_BUFFER_DURATION (buf) = self->state.duration;
+
+ /* in some cases (e.g. tmplayer) we can only determine the duration
+ * of a text chunk from the timestamp of the next text chunk; in those
+ * cases, we probably want to limit the duration to something
+ * reasonable, so we don't end up showing some text for e.g. 40 seconds
+ * just because nothing else is being said during that time */
+ if (self->state.max_duration > 0 && GST_BUFFER_DURATION_IS_VALID (buf)) {
+ if (GST_BUFFER_DURATION (buf) > self->state.max_duration)
+ GST_BUFFER_DURATION (buf) = self->state.max_duration;
+ }
+
+ self->segment.position = self->state.start_time;
+
+ GST_DEBUG_OBJECT (self, "Sending text '%s', %" GST_TIME_FORMAT " + %"
+ GST_TIME_FORMAT, subtitle, GST_TIME_ARGS (self->state.start_time),
+ GST_TIME_ARGS (self->state.duration));
+
+ g_free (self->state.vertical);
+ self->state.vertical = NULL;
+ g_free (self->state.alignment);
+ self->state.alignment = NULL;
+
+ ret = gst_pad_push (self->srcpad, buf);
+
+ /* move this forward (the tmplayer parser needs this) */
+ if (self->state.duration != GST_CLOCK_TIME_NONE)
+ self->state.start_time += self->state.duration;
+
+ g_free (subtitle);
+ subtitle = NULL;
+
+ if (ret != GST_FLOW_OK) {
+ GST_DEBUG_OBJECT (self, "flow: %s", gst_flow_get_name (ret));
+ break;
+ }
+ }
+ }
+
+ return ret;
+ }
+
+ static GstFlowReturn
+ gst_sub_parse_chain (GstPad * sinkpad, GstObject * parent, GstBuffer * buf)
+ {
+ GstFlowReturn ret;
+ GstSubParse *self;
+
+ self = GST_SUBPARSE (parent);
+
+ ret = handle_buffer (self, buf);
+
+ return ret;
+ }
+
+ static gboolean
+ gst_sub_parse_sink_event (GstPad * pad, GstObject * parent, GstEvent * event)
+ {
+ GstSubParse *self = GST_SUBPARSE (parent);
+ gboolean ret = FALSE;
+
+ GST_LOG_OBJECT (self, "%s event", GST_EVENT_TYPE_NAME (event));
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_STREAM_GROUP_DONE:
+ case GST_EVENT_EOS:{
+ /* Make sure the last subrip chunk is pushed out even
+ * if the file does not have an empty line at the end */
+ if (self->parser_type == GST_SUB_PARSE_FORMAT_SUBRIP ||
+ self->parser_type == GST_SUB_PARSE_FORMAT_TMPLAYER ||
+ self->parser_type == GST_SUB_PARSE_FORMAT_MPL2 ||
+ self->parser_type == GST_SUB_PARSE_FORMAT_QTTEXT ||
+ self->parser_type == GST_SUB_PARSE_FORMAT_VTT) {
+ gchar term_chars[] = { '\n', '\n', '\0' };
+ GstBuffer *buf = gst_buffer_new_and_alloc (2 + 1);
+
+ GST_DEBUG_OBJECT (self, "%s: force pushing of any remaining text",
+ GST_EVENT_TYPE_NAME (event));
+
+ gst_buffer_fill (buf, 0, term_chars, 3);
+ gst_buffer_set_size (buf, 2);
+
+ GST_BUFFER_OFFSET (buf) = self->offset;
+ gst_sub_parse_chain (pad, parent, buf);
+ }
+ ret = gst_pad_event_default (pad, parent, event);
+ break;
+ }
+ case GST_EVENT_SEGMENT:
+ {
+ const GstSegment *s;
++
++#ifdef TIZEN_FEATURE_SUBPARSE_MODIFICATION
++ if (self->first_buffer) {
++ if (!SUBPARSE_SEEK_TRYLOCK (self)) {
++ /* new seeking request is in process */
++ GST_WARNING_OBJECT (self, "ignore the old newsegment event");
++ ret = TRUE;
++ gst_event_unref (event);
++ break;
++ }
++ } else {
++ SUBPARSE_SEEK_LOCK (self);
++ }
++#endif
++
+ gst_event_parse_segment (event, &s);
+ if (s->format == GST_FORMAT_TIME)
+ gst_event_copy_segment (event, &self->segment);
+ GST_DEBUG_OBJECT (self, "newsegment (%s)",
+ gst_format_get_name (self->segment.format));
+
+ /* if not time format, we'll either start with a 0 timestamp anyway or
+ * it's following a seek in which case we'll have saved the requested
+ * seek segment and don't want to overwrite it (remember that on a seek
+ * we always just seek back to the start in BYTES format and just throw
+ * away all text that's before the requested position; if the subtitles
+ * come from an upstream demuxer, it won't be able to handle our BYTES
+ * seek request and instead send us a newsegment from the seek request
+ * it received via its video pads instead, so all is fine then too) */
+ ret = TRUE;
+ gst_event_unref (event);
+ /* in either case, let's not simply discard this event;
+ * trigger sending of the saved requested seek segment
+ * or the one taken here from upstream */
+ self->need_segment = TRUE;
++
++#ifdef TIZEN_FEATURE_SUBPARSE_MODIFICATION
++ SUBPARSE_SEEK_UNLOCK (self);
++#endif
++
+ break;
+ }
+ case GST_EVENT_FLUSH_START:
+ {
+ self->flushing = TRUE;
+
+ ret = gst_pad_event_default (pad, parent, event);
+ break;
+ }
+ case GST_EVENT_FLUSH_STOP:
+ {
+ self->flushing = FALSE;
+
+ ret = gst_pad_event_default (pad, parent, event);
+ break;
+ }
+ default:
+ ret = gst_pad_event_default (pad, parent, event);
+ break;
+ }
+
+ return ret;
+ }
+
+
+ static GstStateChangeReturn
+ gst_sub_parse_change_state (GstElement * element, GstStateChange transition)
+ {
+ GstStateChangeReturn ret = GST_STATE_CHANGE_SUCCESS;
+ GstSubParse *self = GST_SUBPARSE (element);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_READY_TO_PAUSED:
+ /* format detection will init the parser state */
+ self->offset = 0;
+ self->parser_type = GST_SUB_PARSE_FORMAT_UNKNOWN;
+ self->strip_pango_markup = FALSE;
+ self->valid_utf8 = TRUE;
+ self->first_buffer = TRUE;
+ g_free (self->detected_encoding);
+ self->detected_encoding = NULL;
+ g_string_truncate (self->textbuf, 0);
+ gst_adapter_clear (self->adapter);
+ break;
+ default:
+ break;
+ }
+
+ ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
+ if (ret == GST_STATE_CHANGE_FAILURE)
+ return ret;
+
+ switch (transition) {
+ case GST_STATE_CHANGE_PAUSED_TO_READY:
+ parser_state_dispose (self, &self->state);
+ self->parser_type = GST_SUB_PARSE_FORMAT_UNKNOWN;
+ break;
+ default:
+ break;
+ }
+
+ return ret;
+ }
--- /dev/null
-
+ /* GStreamer
+ * Copyright (C) <2002> David A. Schleef <ds@schleef.org>
+ * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+ #ifndef __GST_SUBPARSE_H__
+ #define __GST_SUBPARSE_H__
+
+ #include <gst/gst.h>
+ #include <gst/base/gstadapter.h>
+
+ #include "gstsubparseelements.h"
+
+ G_BEGIN_DECLS
+
+ #define GST_TYPE_SUBPARSE (gst_sub_parse_get_type ())
+ G_DECLARE_FINAL_TYPE (GstSubParse, gst_sub_parse, GST, SUBPARSE, GstElement)
+
+
+ typedef struct {
+ int state;
+ GString *buf;
+ guint64 start_time;
+ guint64 duration;
+ guint64 max_duration; /* to clamp duration, 0 = no limit (used by tmplayer parser) */
+ GstSegment *segment;
+ gpointer user_data;
+ gboolean have_internal_fps; /* If TRUE don't overwrite fps by property */
+ gint fps_n, fps_d; /* used by frame based parsers */
+ guint8 line_position; /* percent value */
+ gint line_number; /* line number, can be positive or negative */
+ guint8 text_position; /* percent value */
+ guint8 text_size; /* percent value */
+ gchar *vertical; /* "", "vertical", "vertical-lr" */
+ gchar *alignment; /* "", "start", "middle", "end" */
+ gconstpointer allowed_tags; /* list of markup tags allowed in the cue text. */
+ gboolean allows_tag_attributes;
++#ifdef TIZEN_FEATURE_SUBPARSE_MODIFICATION
++ GList* language_list;
++ gchar* current_language;
++ gboolean langlist_msg_posted;
++#endif
++#ifdef TIZEN_FEATURE_HLS_WEBVTT
++ guint64 local;
++ guint64 mpegts;
++#endif
++#ifdef TIZEN_FEATURE_SUBPARSE_DROP_OUT_OF_SEGMENT
++ gboolean drop_out_of_segment;
++#endif
+ } ParserState;
+
+ typedef gchar* (*Parser) (ParserState *state, const gchar *line);
+
+ struct _GstSubParse {
+ GstElement element;
+
+ GstPad *sinkpad,*srcpad;
+
+ /* contains the input in the input encoding */
+ GstAdapter *adapter;
+ /* contains the UTF-8 decoded input */
+ GString *textbuf;
+
+ GstSubParseFormat parser_type;
+ gboolean parser_detected;
+ const gchar *subtitle_codec;
+
+ Parser parse_line;
+ ParserState state;
+
+ /* seek */
+ guint64 offset;
-
++
+ /* Segment */
+ GstSegment segment;
+ gboolean need_segment;
- gint fps_n, fps_d;
++
+ gboolean flushing;
+ gboolean valid_utf8;
+ gchar *detected_encoding;
+ gchar *encoding;
+ gboolean strip_pango_markup;
+
+ gboolean first_buffer;
+
+ /* used by frame based parsers */
++ gint fps_n, fps_d;
++#ifdef TIZEN_FEATURE_SUBPARSE_MODIFICATION
++ GMutex seek_lock;
++#endif
+ };
+
+ G_END_DECLS
+
+ #endif /* __GST_SUBPARSE_H__ */
--- /dev/null
- if (context->has_result) {
- if (context->rubybuf->len) {
- context->rubybuf = g_string_append_c (context->rubybuf, '\n');
- g_string_prepend (context->resultbuf, context->rubybuf->str);
- context->rubybuf = g_string_truncate (context->rubybuf, 0);
+ /* GStreamer SAMI subtitle parser
+ * Copyright (c) 2006, 2013 Young-Ho Cha <ganadist at gmail com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
++#ifdef HAVE_CONFIG_H
++#include "config.h"
++#endif
+ #include "samiparse.h"
+
+ #include <glib.h>
+ #include <string.h>
+ #include <stdlib.h>
+
+ #define ITALIC_TAG 'i'
+ #define SPAN_TAG 's'
+ #define RUBY_TAG 'r'
+ #define RT_TAG 't'
+ #define CLEAR_TAG '0'
+
+ typedef struct _HtmlParser HtmlParser;
+ typedef struct _HtmlContext HtmlContext;
+ typedef struct _GstSamiContext GstSamiContext;
++#ifdef TIZEN_FEATURE_SUBPARSE_MODIFICATION
++typedef struct _LanguageStruct GstLangStruct;
++struct _LanguageStruct
++{
++ gchar *language_code;
++ gchar *language_key;
++};
++#endif
+
+ struct _GstSamiContext
+ {
+ GString *buf; /* buffer to collect content */
+ GString *rubybuf; /* buffer to collect ruby content */
+ GString *resultbuf; /* when opening the next 'sync' tag, move
+ * from 'buf' to avoid to append following
+ * content */
+ GString *state; /* in many sami files there are tags that
+ * are not closed, so for each open tag the
+ * parser will append a tag flag here so
+ * that tags can be closed properly on
+ * 'sync' tags. See _context_push_state()
+ * and _context_pop_state(). */
+ HtmlContext *htmlctxt; /* html parser context */
+ gboolean has_result; /* set when ready to push out result */
+ gboolean in_sync; /* flag to avoid appending anything except the
+ * content of the sync elements to buf */
+ guint64 time1; /* previous start attribute in sync tag */
+ guint64 time2; /* current start attribute in sync tag */
++#ifdef TIZEN_FEATURE_SUBPARSE_MODIFICATION
++ guint64 time3; /* To store the last current time when language is changed */
++ GList *lang_list; /* Language list for an external subtitle file */
++ gchar *current_language; /* Current language parsed */
++ gchar *desired_language; /* Language set by user */
++ gboolean language_changed; /* language changed signal */
++ gboolean end_body; /* </BODY> reached */
++#endif
+ };
+
++#ifdef TIZEN_FEATURE_SUBPARSE_MODIFICATION
++#define strcasestr _gst_ascii_strcasestr
++
++/* From https://github.com/freebsd/freebsd/blob/master/contrib/file/src/strcasestr.c
++ * Updated to use GLib types and GLib string functions
++ *
++ * Copyright (c) 1990, 1993
++ * The Regents of the University of California. All rights reserved.
++ *
++ * This code is derived from software contributed to Berkeley by
++ * Chris Torek.
++ *
++ * Redistribution and use in source and binary forms, with or without
++ * modification, are permitted provided that the following conditions
++ * are met:
++ * 1. Redistributions of source code must retain the above copyright
++ * notice, this list of conditions and the following disclaimer.
++ * 2. Redistributions in binary form must reproduce the above copyright
++ * notice, this list of conditions and the following disclaimer in the
++ * documentation and/or other materials provided with the distribution.
++ * 3. Neither the name of the University nor the names of its contributors
++ * may be used to endorse or promote products derived from this software
++ * without specific prior written permission.
++ *
++ * THIS SOFTWARE IS PROVIDED BY THE REGENTS AND CONTRIBUTORS ``AS IS'' AND
++ * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
++ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
++ * ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR CONTRIBUTORS BE LIABLE
++ * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
++ * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
++ * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
++ * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
++ * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
++ * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
++ * SUCH DAMAGE.
++ */
++
++/*
++ * Find the first occurrence of find in s, ignore case.
++ */
++
++static gchar *
++_gst_ascii_strcasestr (const gchar * s, const gchar * find)
++{
++ gchar c, sc;
++ gsize len;
++
++ if ((c = *find++) != 0) {
++ c = g_ascii_tolower (c);
++ len = strlen (find);
++ do {
++ do {
++ if ((sc = *s++) == 0)
++ return (NULL);
++ } while (g_ascii_tolower (sc) != c);
++ } while (g_ascii_strncasecmp (s, find, len) != 0);
++ s--;
++ }
++ return (gchar *) (gintptr) (s);
++}
++#endif
++
+ struct _HtmlParser
+ {
+ void (*start_element) (HtmlContext * ctx,
+ const gchar * name, const gchar ** attr, gpointer user_data);
+ void (*end_element) (HtmlContext * ctx,
+ const gchar * name, gpointer user_data);
+ void (*text) (HtmlContext * ctx,
+ const gchar * text, gsize text_len, gpointer user_data);
+ };
+
+ struct _HtmlContext
+ {
+ const HtmlParser *parser;
+ gpointer user_data;
+ GString *buf;
+ };
+
+ static HtmlContext *
+ html_context_new (HtmlParser * parser, gpointer user_data)
+ {
+ HtmlContext *ctxt = (HtmlContext *) g_new0 (HtmlContext, 1);
+ ctxt->parser = parser;
+ ctxt->user_data = user_data;
+ ctxt->buf = g_string_new (NULL);
+ return ctxt;
+ }
+
+ static void
+ html_context_free (HtmlContext * ctxt)
+ {
+ g_string_free (ctxt->buf, TRUE);
+ g_free (ctxt);
+ }
+
+ struct EntityMap
+ {
+ const gunichar unescaped;
+ const gchar *escaped;
+ };
+
+ struct EntityMap XmlEntities[] = {
+ {34, "quot;"},
+ {38, "amp;"},
+ {39, "apos;"},
+ {60, "lt;"},
+ {62, "gt;"},
+ {0, NULL},
+ };
+
+ struct EntityMap HtmlEntities[] = {
+ /* nbsp will handle manually
+ { 160, "nbsp;" }, */
+ {161, "iexcl;"},
+ {162, "cent;"},
+ {163, "pound;"},
+ {164, "curren;"},
+ {165, "yen;"},
+ {166, "brvbar;"},
+ {167, "sect;"},
+ {168, "uml;"},
+ {169, "copy;"},
+ {170, "ordf;"},
+ {171, "laquo;"},
+ {172, "not;"},
+ {173, "shy;"},
+ {174, "reg;"},
+ {175, "macr;"},
+ {176, "deg;"},
+ {177, "plusmn;"},
+ {178, "sup2;"},
+ {179, "sup3;"},
+ {180, "acute;"},
+ {181, "micro;"},
+ {182, "para;"},
+ {183, "middot;"},
+ {184, "cedil;"},
+ {185, "sup1;"},
+ {186, "ordm;"},
+ {187, "raquo;"},
+ {188, "frac14;"},
+ {189, "frac12;"},
+ {190, "frac34;"},
+ {191, "iquest;"},
+ {192, "Agrave;"},
+ {193, "Aacute;"},
+ {194, "Acirc;"},
+ {195, "Atilde;"},
+ {196, "Auml;"},
+ {197, "Aring;"},
+ {198, "AElig;"},
+ {199, "Ccedil;"},
+ {200, "Egrave;"},
+ {201, "Eacute;"},
+ {202, "Ecirc;"},
+ {203, "Euml;"},
+ {204, "Igrave;"},
+ {205, "Iacute;"},
+ {206, "Icirc;"},
+ {207, "Iuml;"},
+ {208, "ETH;"},
+ {209, "Ntilde;"},
+ {210, "Ograve;"},
+ {211, "Oacute;"},
+ {212, "Ocirc;"},
+ {213, "Otilde;"},
+ {214, "Ouml;"},
+ {215, "times;"},
+ {216, "Oslash;"},
+ {217, "Ugrave;"},
+ {218, "Uacute;"},
+ {219, "Ucirc;"},
+ {220, "Uuml;"},
+ {221, "Yacute;"},
+ {222, "THORN;"},
+ {223, "szlig;"},
+ {224, "agrave;"},
+ {225, "aacute;"},
+ {226, "acirc;"},
+ {227, "atilde;"},
+ {228, "auml;"},
+ {229, "aring;"},
+ {230, "aelig;"},
+ {231, "ccedil;"},
+ {232, "egrave;"},
+ {233, "eacute;"},
+ {234, "ecirc;"},
+ {235, "euml;"},
+ {236, "igrave;"},
+ {237, "iacute;"},
+ {238, "icirc;"},
+ {239, "iuml;"},
+ {240, "eth;"},
+ {241, "ntilde;"},
+ {242, "ograve;"},
+ {243, "oacute;"},
+ {244, "ocirc;"},
+ {245, "otilde;"},
+ {246, "ouml;"},
+ {247, "divide;"},
+ {248, "oslash;"},
+ {249, "ugrave;"},
+ {250, "uacute;"},
+ {251, "ucirc;"},
+ {252, "uuml;"},
+ {253, "yacute;"},
+ {254, "thorn;"},
+ {255, "yuml;"},
+ {338, "OElig;"},
+ {339, "oelig;"},
+ {352, "Scaron;"},
+ {353, "scaron;"},
+ {376, "Yuml;"},
+ {402, "fnof;"},
+ {710, "circ;"},
+ {732, "tilde;"},
+ {913, "Alpha;"},
+ {914, "Beta;"},
+ {915, "Gamma;"},
+ {916, "Delta;"},
+ {917, "Epsilon;"},
+ {918, "Zeta;"},
+ {919, "Eta;"},
+ {920, "Theta;"},
+ {921, "Iota;"},
+ {922, "Kappa;"},
+ {923, "Lambda;"},
+ {924, "Mu;"},
+ {925, "Nu;"},
+ {926, "Xi;"},
+ {927, "Omicron;"},
+ {928, "Pi;"},
+ {929, "Rho;"},
+ {931, "Sigma;"},
+ {932, "Tau;"},
+ {933, "Upsilon;"},
+ {934, "Phi;"},
+ {935, "Chi;"},
+ {936, "Psi;"},
+ {937, "Omega;"},
+ {945, "alpha;"},
+ {946, "beta;"},
+ {947, "gamma;"},
+ {948, "delta;"},
+ {949, "epsilon;"},
+ {950, "zeta;"},
+ {951, "eta;"},
+ {952, "theta;"},
+ {953, "iota;"},
+ {954, "kappa;"},
+ {955, "lambda;"},
+ {956, "mu;"},
+ {957, "nu;"},
+ {958, "xi;"},
+ {959, "omicron;"},
+ {960, "pi;"},
+ {961, "rho;"},
+ {962, "sigmaf;"},
+ {963, "sigma;"},
+ {964, "tau;"},
+ {965, "upsilon;"},
+ {966, "phi;"},
+ {967, "chi;"},
+ {968, "psi;"},
+ {969, "omega;"},
+ {977, "thetasym;"},
+ {978, "upsih;"},
+ {982, "piv;"},
+ {8194, "ensp;"},
+ {8195, "emsp;"},
+ {8201, "thinsp;"},
+ {8204, "zwnj;"},
+ {8205, "zwj;"},
+ {8206, "lrm;"},
+ {8207, "rlm;"},
+ {8211, "ndash;"},
+ {8212, "mdash;"},
+ {8216, "lsquo;"},
+ {8217, "rsquo;"},
+ {8218, "sbquo;"},
+ {8220, "ldquo;"},
+ {8221, "rdquo;"},
+ {8222, "bdquo;"},
+ {8224, "dagger;"},
+ {8225, "Dagger;"},
+ {8226, "bull;"},
+ {8230, "hellip;"},
+ {8240, "permil;"},
+ {8242, "prime;"},
+ {8243, "Prime;"},
+ {8249, "lsaquo;"},
+ {8250, "rsaquo;"},
+ {8254, "oline;"},
+ {8260, "frasl;"},
+ {8364, "euro;"},
+ {8465, "image;"},
+ {8472, "weierp;"},
+ {8476, "real;"},
+ {8482, "trade;"},
+ {8501, "alefsym;"},
+ {8592, "larr;"},
+ {8593, "uarr;"},
+ {8594, "rarr;"},
+ {8595, "darr;"},
+ {8596, "harr;"},
+ {8629, "crarr;"},
+ {8656, "lArr;"},
+ {8657, "uArr;"},
+ {8658, "rArr;"},
+ {8659, "dArr;"},
+ {8660, "hArr;"},
+ {8704, "forall;"},
+ {8706, "part;"},
+ {8707, "exist;"},
+ {8709, "empty;"},
+ {8711, "nabla;"},
+ {8712, "isin;"},
+ {8713, "notin;"},
+ {8715, "ni;"},
+ {8719, "prod;"},
+ {8721, "sum;"},
+ {8722, "minus;"},
+ {8727, "lowast;"},
+ {8730, "radic;"},
+ {8733, "prop;"},
+ {8734, "infin;"},
+ {8736, "ang;"},
+ {8743, "and;"},
+ {8744, "or;"},
+ {8745, "cap;"},
+ {8746, "cup;"},
+ {8747, "int;"},
+ {8756, "there4;"},
+ {8764, "sim;"},
+ {8773, "cong;"},
+ {8776, "asymp;"},
+ {8800, "ne;"},
+ {8801, "equiv;"},
+ {8804, "le;"},
+ {8805, "ge;"},
+ {8834, "sub;"},
+ {8835, "sup;"},
+ {8836, "nsub;"},
+ {8838, "sube;"},
+ {8839, "supe;"},
+ {8853, "oplus;"},
+ {8855, "otimes;"},
+ {8869, "perp;"},
+ {8901, "sdot;"},
+ {8968, "lceil;"},
+ {8969, "rceil;"},
+ {8970, "lfloor;"},
+ {8971, "rfloor;"},
+ {9001, "lang;"},
+ {9002, "rang;"},
+ {9674, "loz;"},
+ {9824, "spades;"},
+ {9827, "clubs;"},
+ {9829, "hearts;"},
+ {9830, "diams;"},
+ {0, NULL},
+ };
+
+ static gchar *
+ unescape_string (const gchar * text)
+ {
+ gint i;
+ GString *unescaped = g_string_new (NULL);
+
+ while (*text) {
+ if (*text == '&') {
+ text++;
+
+ /* unescape   and */
+ if (!g_ascii_strncasecmp (text, "nbsp", 4)) {
+ unescaped = g_string_append_unichar (unescaped, 160);
+ text += 4;
+ if (*text == ';') {
+ text++;
+ }
+ goto next;
+ }
+
+ /* pass xml entities. these will be processed as pango markup */
+ for (i = 0; XmlEntities[i].escaped; i++) {
+ gssize len = strlen (XmlEntities[i].escaped);
+ if (!g_ascii_strncasecmp (text, XmlEntities[i].escaped, len)) {
+ unescaped = g_string_append_c (unescaped, '&');
+ unescaped =
+ g_string_append_len (unescaped, XmlEntities[i].escaped, len);
+ text += len;
+ goto next;
+ }
+ }
+
+ /* convert html entities */
+ for (i = 0; HtmlEntities[i].escaped; i++) {
+ gssize len = strlen (HtmlEntities[i].escaped);
+ if (!strncmp (text, HtmlEntities[i].escaped, len)) {
+ unescaped =
+ g_string_append_unichar (unescaped, HtmlEntities[i].unescaped);
+ text += len;
+ goto next;
+ }
+ }
+
+ if (*text == '#') {
+ gboolean is_hex = FALSE;
+ gunichar l;
+ gchar *end = NULL;
+
+ text++;
+ if (*text == 'x') {
+ is_hex = TRUE;
+ text++;
+ }
+ errno = 0;
+ if (is_hex) {
+ l = strtoul (text, &end, 16);
+ } else {
+ l = strtoul (text, &end, 10);
+ }
+
+ if (text == end || errno != 0) {
+ /* error occurred. pass it */
+ goto next;
+ }
+ unescaped = g_string_append_unichar (unescaped, l);
+ text = end;
+
+ if (*text == ';') {
+ text++;
+ }
+ goto next;
+ }
+
+ /* escape & */
+ unescaped = g_string_append (unescaped, "&");
+
+ next:
+ continue;
+
+ } else if (g_ascii_isspace (*text)) {
+ unescaped = g_string_append_c (unescaped, ' ');
+ /* strip whitespace */
+ do {
+ text++;
+ } while ((*text) && g_ascii_isspace (*text));
+ } else {
+ unescaped = g_string_append_c (unescaped, *text);
+ text++;
+ }
+ }
+
+ return g_string_free (unescaped, FALSE);
+ }
+
+ static const gchar *
+ string_token (const gchar * string, const gchar * delimiter, gchar ** first)
+ {
+ gchar *next = strstr (string, delimiter);
+ if (next) {
+ *first = g_strndup (string, next - string);
+ } else {
+ *first = g_strdup (string);
+ }
+ return next;
+ }
+
+ static void
+ html_context_handle_element (HtmlContext * ctxt,
+ const gchar * string, gboolean must_close)
+ {
+ gchar *name = NULL;
+ gint count = 0, i;
+ gchar **attrs;
+ const gchar *found, *next;
++#ifdef TIZEN_FEATURE_SUBPARSE_MODIFICATION
++ const gchar *name_temp = NULL;
++ gint j = 0;
++#endif
+
+ /* split element name and attributes */
+ next = string_token (string, " ", &name);
+
+ if (next) {
+ /* count attributes */
+ found = next + 1;
+ while (TRUE) {
+ found = strchr (found, '=');
+ if (!found)
+ break;
+ found++;
+ count++;
+ }
+ } else {
+ count = 0;
+ }
+
+ attrs = g_new0 (gchar *, (count + 1) * 2);
+
+ for (i = 0; i < count && next != NULL; i += 2) {
+ gchar *attr_name = NULL, *attr_value = NULL;
+ gsize length;
+ next = string_token (next + 1, "=", &attr_name);
+ next = string_token (next + 1, " ", &attr_value);
+
+ /* strip " or ' from attribute value */
+ if (attr_value[0] == '"' || attr_value[0] == '\'') {
+ gchar *tmp = g_strdup (attr_value + 1);
+ g_free (attr_value);
+ attr_value = tmp;
+ }
+
+ length = strlen (attr_value);
+ if (length > 0 && (attr_value[length - 1] == '"'
+ || attr_value[length - 1] == '\'')) {
+ attr_value[length - 1] = '\0';
+ }
+
+ attrs[i] = attr_name;
+ attrs[i + 1] = attr_value;
+ }
++#ifdef TIZEN_FEATURE_SUBPARSE_MODIFICATION
++ /* sometimes spaces can be there in between !-- and P
++ * that also we have to take care */
++ if (!g_ascii_strcasecmp("!--", name)) {
++ gchar* tempchar = (gchar*)(string + 3);
++ while (*tempchar == ' ') {
++ tempchar++;
++ if (*tempchar == 'P' || *tempchar == 'p') {
++ *(name + 3) = *tempchar;
++ *(name + 4) = '\0';
++ next = tempchar + 1;
++ break;
++ }
++ }
++ }
++ if (next && (!g_ascii_strcasecmp("!--P", name))) {
++ gint attrindex = 0;
++ count = 0;
++ /* count attributes */
++ found = next + 1;
++ while (TRUE) {
++ found = (gchar*)strcasestr (found, "lang:");
++ if (!found)
++ break;
++ found++;
++ count++;
++ }
++ g_strfreev (attrs);
++
++ attrs = g_new0 (gchar *, count * 2);
++
++ for (i = 0; i < count; i++) {
++ gchar *attr_name = NULL, *attr_value = NULL;
++
++ next = (gchar*)strcasestr (next, "lang:");
++ if (!next)
++ break;
++
++ attr_value = (gchar*)g_malloc0 (3);
++ next = next + 5;
++ /* skip spaces */
++ while (*next == ' ')
++ next++;
++ strncpy (attr_value, next, 2);
++ attr_value[2] = '\0';
++ GST_LOG ("Language value comes as %s", attr_value);
++ name_temp = next;
++ while (TRUE) {
++ if (*name_temp == '{') {
++ int character_count = 0;
++
++ while (TRUE) {
++ name_temp--;
++
++ if (*name_temp == '.') {
++ attr_name = (gchar*) g_malloc0 (character_count + 1);
++ break;
++ }
++ else if (*name_temp != ' ')
++ character_count++;
++ }
++ break;
++ }
++ name_temp--;
++ }
++ name_temp++;
++ if (attr_name != NULL) {
++ for (j = 0; *(name_temp + j) != ' '; j++) {
++ attr_name[j] = *(name_temp + j);
++ }
++ attr_name[j] = '\0';
++ attrs[attrindex++] = attr_name;
++ attrs[attrindex++] = attr_value;
++ }
++ }
++ } else {
++ count = 0;
++ }
+
++#endif
+ ctxt->parser->start_element (ctxt, name,
+ (const gchar **) attrs, ctxt->user_data);
+ if (must_close) {
+ ctxt->parser->end_element (ctxt, name, ctxt->user_data);
+ }
+ g_strfreev (attrs);
+ g_free (name);
+ }
+
+ static void
+ html_context_parse (HtmlContext * ctxt, gchar * text, gsize text_len)
+ {
+ const gchar *next = NULL;
+ ctxt->buf = g_string_append_len (ctxt->buf, text, text_len);
+ next = ctxt->buf->str;
+ while (TRUE) {
+ if (next[0] == '<') {
+ gchar *element = NULL;
+ /* find <blahblah> */
+ if (!strchr (next, '>')) {
+ /* no tag end point. buffer will be process in next time */
+ return;
+ }
+
++#ifdef TIZEN_FEATURE_SUBPARSE_MODIFICATION
++ /* after seeking,
++ the subtitle file will be delivered from the start again. */
++ if (strcasestr(next, "<SAMI>"))
++ next = strcasestr(next, "<SAMI>");
++#endif
++
+ next = string_token (next, ">", &element);
+ next++;
+ if (g_str_has_suffix (next, "/")) {
+ /* handle <blah/> */
+ element[strlen (element) - 1] = '\0';
+ html_context_handle_element (ctxt, element + 1, TRUE);
+ } else if (element[1] == '/') {
+ /* handle </blah> */
+ ctxt->parser->end_element (ctxt, element + 2, ctxt->user_data);
+ } else {
+ /* handle <blah> */
+ html_context_handle_element (ctxt, element + 1, FALSE);
+ }
+ g_free (element);
+ } else if (strchr (next, '<')) {
+ gchar *text = NULL;
+ gsize length;
+ next = string_token (next, "<", &text);
+ text = g_strstrip (text);
+ length = strlen (text);
+ ctxt->parser->text (ctxt, text, length, ctxt->user_data);
+ g_free (text);
+
+ } else {
+ gchar *text = (gchar *) next;
+ gsize length;
+ text = g_strstrip (text);
+ length = strlen (text);
+ ctxt->parser->text (ctxt, text, length, ctxt->user_data);
+ ctxt->buf = g_string_assign (ctxt->buf, "");
+ return;
+ }
+ }
+
+ ctxt->buf = g_string_assign (ctxt->buf, next);
+ }
+
+ static gchar *
+ has_tag (GString * str, const gchar tag)
+ {
+ return strrchr (str->str, tag);
+ }
+
+ static void
+ sami_context_push_state (GstSamiContext * sctx, char state)
+ {
+ GST_LOG ("state %c", state);
+ g_string_append_c (sctx->state, state);
+ }
+
+ static void
+ sami_context_pop_state (GstSamiContext * sctx, char state)
+ {
+ GString *str = g_string_new ("");
+ GString *context_state = sctx->state;
+ int i;
+
+ GST_LOG ("state %c", state);
+ for (i = context_state->len - 1; i >= 0; i--) {
+ switch (context_state->str[i]) {
+ case ITALIC_TAG: /* <i> */
+ {
+ g_string_append (str, "</i>");
+ break;
+ }
+ case SPAN_TAG: /* <span foreground= > */
+ {
+ g_string_append (str, "</span>");
+ break;
+ }
+ case RUBY_TAG: /* <span size= > -- ruby */
+ {
+ break;
+ }
+ case RT_TAG: /* ruby */
+ {
+ /* FIXME: support for furigana/ruby once implemented in pango */
+ g_string_append (sctx->rubybuf, "</span>");
+ if (has_tag (context_state, ITALIC_TAG)) {
+ g_string_append (sctx->rubybuf, "</i>");
+ }
+
+ break;
+ }
+ default:
+ break;
+ }
+ if (context_state->str[i] == state) {
+ g_string_append (sctx->buf, str->str);
+ g_string_free (str, TRUE);
+ g_string_truncate (context_state, i);
+ return;
+ }
+ }
+ if (state == CLEAR_TAG) {
+ g_string_append (sctx->buf, str->str);
+ g_string_truncate (context_state, 0);
+ }
+ g_string_free (str, TRUE);
+ }
+
+ static void
+ handle_start_sync (GstSamiContext * sctx, const gchar ** atts)
+ {
+ int i;
+
+ sami_context_pop_state (sctx, CLEAR_TAG);
+ if (atts != NULL) {
+ for (i = 0; (atts[i] != NULL); i += 2) {
+ const gchar *key, *value;
+
+ key = atts[i];
+ value = atts[i + 1];
+
+ if (!value)
+ continue;
+ if (!g_ascii_strcasecmp ("start", key)) {
+ /* Only set a new start time if we don't have text pending */
+ if (sctx->resultbuf->len == 0)
+ sctx->time1 = sctx->time2;
+
+ sctx->time2 = atoi ((const char *) value) * GST_MSECOND;
++#ifdef TIZEN_FEATURE_SUBPARSE_MODIFICATION
++ sctx->time3 = sctx->time2;
++#endif
+ sctx->time2 = MAX (sctx->time2, sctx->time1);
+ g_string_append (sctx->resultbuf, sctx->buf->str);
+ sctx->has_result = (sctx->resultbuf->len != 0) ? TRUE : FALSE;
+ g_string_truncate (sctx->buf, 0);
+ }
+ }
+ }
+ }
+
+ static void
+ handle_start_font (GstSamiContext * sctx, const gchar ** atts)
+ {
+ int i;
+
+ sami_context_pop_state (sctx, SPAN_TAG);
+ if (atts != NULL) {
+ g_string_append (sctx->buf, "<span");
+ for (i = 0; (atts[i] != NULL); i += 2) {
+ const gchar *key, *value;
+
+ key = atts[i];
+ value = atts[i + 1];
+
+ if (!value)
+ continue;
+ if (!g_ascii_strcasecmp ("color", key)) {
+ /*
+ * There are invalid color value in many
+ * sami files.
+ * It will fix hex color value that start without '#'
+ */
+ const gchar *sharp = "";
+ int len = strlen (value);
+
+ if (!(*value == '#' && len == 7)) {
+ gchar *r;
+
+ /* check if it looks like hex */
+ if (strtol ((const char *) value, &r, 16) >= 0 &&
+ ((gchar *) r == (value + 6) && len == 6)) {
+ sharp = "#";
+ }
+ }
+ /* some colours can be found in many sami files, but X RGB database
+ * doesn't contain a colour by this name, so map explicitly */
+ if (!g_ascii_strcasecmp ("aqua", value)) {
+ value = "#00ffff";
+ } else if (!g_ascii_strcasecmp ("crimson", value)) {
+ value = "#dc143c";
+ } else if (!g_ascii_strcasecmp ("fuchsia", value)) {
+ value = "#ff00ff";
+ } else if (!g_ascii_strcasecmp ("indigo", value)) {
+ value = "#4b0082";
+ } else if (!g_ascii_strcasecmp ("lime", value)) {
+ value = "#00ff00";
+ } else if (!g_ascii_strcasecmp ("olive", value)) {
+ value = "#808000";
+ } else if (!g_ascii_strcasecmp ("silver", value)) {
+ value = "#c0c0c0";
+ } else if (!g_ascii_strcasecmp ("teal", value)) {
+ value = "#008080";
+ }
+ g_string_append_printf (sctx->buf, " foreground=\"%s%s\"", sharp,
+ value);
+ } else if (!g_ascii_strcasecmp ("face", key)) {
+ g_string_append_printf (sctx->buf, " font_family=\"%s\"", value);
+ }
+ }
+ g_string_append_c (sctx->buf, '>');
+ sami_context_push_state (sctx, SPAN_TAG);
+ }
+ }
++#ifdef TIZEN_FEATURE_SUBPARSE_MODIFICATION
++static void
++handle_p (GstSamiContext * sctx, const gchar ** atts)
++{
++ int i;
++ const int str_size = 128;
++
++ if (atts != NULL) {
++ for (i = 0; (atts[i] != NULL); i += 2) {
++ const gchar *key, *value;
++
++ key = atts[i];
++ value = atts[i + 1];
++
++ if (sctx->current_language && value && strcmp(sctx->current_language, value)
++ && (sctx->time1 == sctx->time2))
++ sctx->language_changed = TRUE;
++ else if (!sctx->current_language)
++ sctx->current_language = (gchar*) g_malloc0 (str_size);
++
++ if (key && !g_ascii_strcasecmp ("class", key) && value) {
++ strncpy (sctx->current_language, value, str_size - 1);
++
++ if (sctx->desired_language == NULL) {
++ sctx->desired_language = g_strdup(value);
++ GST_LOG("no language list was found and desired lang was set to %s", sctx->desired_language);
++ }
++ }
++ if (sctx->language_changed)
++ {
++ sctx->time1 = 0;
++ sctx->time2 = sctx->time3;
++ sctx->language_changed = FALSE;
++ }
++ if (!value)
++ continue;
++ }
++ }
++}
++
++static void
++handle_start_language_list (GstSamiContext * sctx, const gchar ** atts)
++{
++ int i = 0;
++ int attrIndex = 0;
++ GstLangStruct *new = NULL;
++ GstLangStruct *temp = NULL;
++
++ if (atts != NULL) {
++ if (g_list_length (sctx->lang_list)) {
++ GST_LOG ("We already got the language list");
++ return;
++ }
++ for (i = 0; (atts[attrIndex] != NULL); i++) {
++ const gchar *key, *value;
++
++ key = atts[attrIndex++];
++ value = atts[attrIndex++];
++
++ GST_LOG ("Inside handle_start_language_list key: %s, value: %s", key, value);
++
++ if (!value)
++ continue;
++
++ new = g_new0 (GstLangStruct, 1);
++ new->language_code = (gchar*) g_malloc0 (strlen(value) + 1);
++ if (new->language_code && value)
++ g_strlcpy (new->language_code, value, strlen(value) + 1);
++ new->language_key = (gchar*) g_malloc0 (strlen(key) + 1);
++ if (new->language_key && key)
++ g_strlcpy (new->language_key, key, strlen(key) + 1);
++ sctx->lang_list = g_list_append (sctx->lang_list, new);
++ temp = g_list_nth_data (sctx->lang_list, i);
++ if (sctx->desired_language == NULL && key){
++ sctx->desired_language = g_strdup(key);
++ GST_WARNING("set desired lang %s", sctx->desired_language);
++ }
++
++ if (temp)
++ GST_LOG ("Inside handle_start_language_list of glist key: %s, value: %s",
++ temp->language_key, temp->language_code);
++ }
++ }
++}
++#endif
+
+ static void
+ handle_start_element (HtmlContext * ctx, const gchar * name,
+ const char **atts, gpointer user_data)
+ {
+ GstSamiContext *sctx = (GstSamiContext *) user_data;
+
+ GST_LOG ("name:%s", name);
+
+ if (!g_ascii_strcasecmp ("sync", name)) {
+ handle_start_sync (sctx, atts);
+ sctx->in_sync = TRUE;
+ } else if (!g_ascii_strcasecmp ("font", name)) {
+ handle_start_font (sctx, atts);
+ } else if (!g_ascii_strcasecmp ("ruby", name)) {
+ sami_context_push_state (sctx, RUBY_TAG);
+ } else if (!g_ascii_strcasecmp ("br", name)) {
++#ifdef TIZEN_FEATURE_SUBPARSE_MODIFICATION
++ if (sctx->current_language && sctx->desired_language &&
++ !strcmp(sctx->current_language, sctx->desired_language))
++#endif
+ g_string_append_c (sctx->buf, '\n');
+ /* FIXME: support for furigana/ruby once implemented in pango */
+ } else if (!g_ascii_strcasecmp ("rt", name)) {
+ if (has_tag (sctx->state, ITALIC_TAG)) {
+ g_string_append (sctx->rubybuf, "<i>");
+ }
+ g_string_append (sctx->rubybuf, "<span size='xx-small' rise='-100'>");
+ sami_context_push_state (sctx, RT_TAG);
+ } else if (!g_ascii_strcasecmp ("i", name)) {
++#ifdef TIZEN_FEATURE_SUBPARSE_MODIFICATION
++ if (sctx->current_language && sctx->desired_language &&
++ !strcmp(sctx->current_language, sctx->desired_language)) {
++#endif
+ g_string_append (sctx->buf, "<i>");
++#ifdef TIZEN_FEATURE_SUBPARSE_MODIFICATION
++ }
++#endif
+ sami_context_push_state (sctx, ITALIC_TAG);
+ } else if (!g_ascii_strcasecmp ("p", name)) {
++#ifdef TIZEN_FEATURE_SUBPARSE_MODIFICATION
++ handle_p (sctx, atts);
++ } else if (!g_ascii_strcasecmp ("!--P", name)) {
++ handle_start_language_list (sctx, atts);
++#endif
+ }
+ }
+
+ static void
+ handle_end_element (HtmlContext * ctx, const char *name, gpointer user_data)
+ {
+ GstSamiContext *sctx = (GstSamiContext *) user_data;
+
+ GST_LOG ("name:%s", name);
+
+ if (!g_ascii_strcasecmp ("sync", name)) {
+ sctx->in_sync = FALSE;
+ } else if ((!g_ascii_strcasecmp ("body", name)) ||
+ (!g_ascii_strcasecmp ("sami", name))) {
+ /* We will usually have one buffer left when the body is closed
+ * as we need the next sync to actually send it */
++
++#ifdef TIZEN_FEATURE_SUBPARSE_MODIFICATION
++ sctx->end_body = TRUE;
++#endif
++
+ if (sctx->buf->len != 0) {
+ /* Only set a new start time if we don't have text pending */
+ if (sctx->resultbuf->len == 0)
+ sctx->time1 = sctx->time2;
+
+ sctx->time2 = GST_CLOCK_TIME_NONE;
+ g_string_append (sctx->resultbuf, sctx->buf->str);
+ sctx->has_result = (sctx->resultbuf->len != 0) ? TRUE : FALSE;
+ g_string_truncate (sctx->buf, 0);
+ }
+ } else if (!g_ascii_strcasecmp ("font", name)) {
+ sami_context_pop_state (sctx, SPAN_TAG);
+ } else if (!g_ascii_strcasecmp ("ruby", name)) {
+ sami_context_pop_state (sctx, RUBY_TAG);
+ } else if (!g_ascii_strcasecmp ("i", name)) {
+ sami_context_pop_state (sctx, ITALIC_TAG);
+ }
+ }
+
+ static void
+ handle_text (HtmlContext * ctx, const gchar * text, gsize text_len,
+ gpointer user_data)
+ {
+ GstSamiContext *sctx = (GstSamiContext *) user_data;
+
+ /* Skip everything except content of the sync elements */
+ if (!sctx->in_sync)
+ return;
+
++#ifdef TIZEN_FEATURE_SUBPARSE_MODIFICATION
++ if (has_tag (sctx->state, RT_TAG) && (sctx->current_language && sctx->desired_language &&
++ !strcmp(sctx->current_language, sctx->desired_language))) {
++#else
+ if (has_tag (sctx->state, RT_TAG)) {
++#endif
+ g_string_append_c (sctx->rubybuf, ' ');
+ g_string_append (sctx->rubybuf, text);
+ g_string_append_c (sctx->rubybuf, ' ');
+ } else {
++#ifdef TIZEN_FEATURE_SUBPARSE_MODIFICATION
++ if (sctx->current_language && sctx->desired_language &&
++ !strcmp(sctx->current_language, sctx->desired_language))
++#endif
+ g_string_append (sctx->buf, text);
+ }
+ }
+
+ static HtmlParser samiParser = {
+ handle_start_element, /* start_element */
+ handle_end_element, /* end_element */
+ handle_text, /* text */
+ };
+
+ void
+ sami_context_init (ParserState * state)
+ {
+ GstSamiContext *context;
+
+ g_assert (state->user_data == NULL);
+
+ context = g_new0 (GstSamiContext, 1);
+
+ context->htmlctxt = html_context_new (&samiParser, context);
+ context->buf = g_string_new ("");
+ context->rubybuf = g_string_new ("");
+ context->resultbuf = g_string_new ("");
+ context->state = g_string_new ("");
++#ifdef TIZEN_FEATURE_SUBPARSE_MODIFICATION
++ g_free(context->current_language);
++ context->current_language = NULL;
++
++ g_free(context->desired_language);
++ context->desired_language = NULL;
++
++ if (context->lang_list) {
++ GstLangStruct *temp = NULL;
++ int i = 0;
++
++ while ((temp = g_list_nth_data (context->lang_list, i))) {
++ g_free (temp->language_code);
++ temp->language_code = NULL;
++
++ g_free (temp->language_key);
++ temp->language_key = NULL;
++
++ g_free (temp);
++ i++;
++ }
++ g_list_free (context->lang_list);
++ }
++ context->lang_list = NULL;
++
++ context->language_changed = FALSE;
++ context->end_body = FALSE;
++#endif
+
+ state->user_data = context;
+ }
+
+ void
+ sami_context_deinit (ParserState * state)
+ {
+ GstSamiContext *context = (GstSamiContext *) state->user_data;
++#ifdef TIZEN_FEATURE_SUBPARSE_MODIFICATION
++ GstLangStruct *temp = NULL;
++ int i = 0;
++#endif
+
+ if (context) {
+ html_context_free (context->htmlctxt);
+ context->htmlctxt = NULL;
+ g_string_free (context->buf, TRUE);
+ g_string_free (context->rubybuf, TRUE);
+ g_string_free (context->resultbuf, TRUE);
+ g_string_free (context->state, TRUE);
++#ifdef TIZEN_FEATURE_SUBPARSE_MODIFICATION
++ if (context->lang_list) {
++ while ((temp = g_list_nth_data (context->lang_list, i))) {
++ g_free (temp->language_code);
++ temp->language_code = NULL;
++
++ g_free (temp->language_key);
++ temp->language_key = NULL;
++
++ g_free (temp);
++ i++;
++ }
++ g_list_free (context->lang_list);
++ }
++ context->lang_list = NULL;
++
++ g_free (context->current_language);
++ context->current_language = NULL;
++
++ g_free (context->desired_language);
++ context->desired_language = NULL;
++#endif
++
+ g_free (context);
+ state->user_data = NULL;
+ }
+ }
+
+ void
+ sami_context_reset (ParserState * state)
+ {
+ GstSamiContext *context = (GstSamiContext *) state->user_data;
+
+ if (context) {
+ g_string_truncate (context->buf, 0);
+ g_string_truncate (context->rubybuf, 0);
+ g_string_truncate (context->resultbuf, 0);
+ g_string_truncate (context->state, 0);
+ context->has_result = FALSE;
+ context->in_sync = FALSE;
+ context->time1 = 0;
+ context->time2 = 0;
+ }
+ }
+
++#ifdef TIZEN_FEATURE_SUBPARSE_MODIFICATION
++void
++sami_context_change_language (ParserState * state)
++{
++ GstSamiContext *context = (GstSamiContext *) state->user_data;
++ if (context->desired_language) {
++ GST_LOG ("desired language was %s", context->desired_language);
++ g_free (context->desired_language);
++ context->desired_language = NULL;
++ }
++ if(state->current_language) {
++ context->desired_language = g_strdup(state->current_language);
++ GST_WARNING ("desired language changed to %s", GST_STR_NULL(context->desired_language));
++ }
++}
++#endif
++
+ gchar *
+ parse_sami (ParserState * state, const gchar * line)
+ {
+ gchar *ret = NULL;
+ GstSamiContext *context = (GstSamiContext *) state->user_data;
+
+ gchar *unescaped = unescape_string (line);
+ html_context_parse (context->htmlctxt, (gchar *) unescaped,
+ strlen (unescaped));
++#ifdef TIZEN_FEATURE_SUBPARSE_MODIFICATION
++ if (context->lang_list)
++ state->language_list = context->lang_list;
++
++ if (context->desired_language && g_strcmp0(context->desired_language, state->current_language)) {
++ g_free (state->current_language);
++ state->current_language = g_strdup(context->desired_language);
++ GST_WARNING("current lang is updated %s", state->current_language);
++ }
++#endif
+ g_free (unescaped);
++#ifdef TIZEN_FEATURE_SUBPARSE_MODIFICATION
++ if (context->desired_language && context->current_language) {
++ if ((!strcmp(context->current_language, context->desired_language)) || context->end_body) {
++#endif
++ if (context->has_result) {
++ if (context->rubybuf->len) {
++ context->rubybuf = g_string_append_c (context->rubybuf, '\n');
++ g_string_prepend (context->resultbuf, context->rubybuf->str);
++ context->rubybuf = g_string_truncate (context->rubybuf, 0);
++ }
+
-
- ret = g_string_free (context->resultbuf, FALSE);
- context->resultbuf = g_string_new ("");
- state->start_time = context->time1;
- state->duration = context->time2 - context->time1;
- context->has_result = FALSE;
++ ret = g_string_free (context->resultbuf, FALSE);
++ context->resultbuf = g_string_new ("");
++ state->start_time = context->time1;
++#ifdef TIZEN_FEATURE_SUBPARSE_MODIFICATION
++ if (!GST_CLOCK_TIME_IS_VALID(context->time2))
++ state->duration = GST_CLOCK_TIME_NONE;
++ else
++ state->duration = context->time2 - context->time1;
++#else
++ state->duration = context->time2 - context->time1;
++#endif
++ context->has_result = FALSE;
++ }
++#ifdef TIZEN_FEATURE_SUBPARSE_MODIFICATION
++ context->end_body = FALSE;
+ }
+ }
++#endif
++
+ return ret;
+ }
--- /dev/null
+ /* GStreamer SAMI subtitle parser
+ * Copyright (c) 2006 Young-Ho Cha <ganadist chollian net>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+ #ifndef _SAMI_PARSE_H_
+ #define _SAMI_PARSE_H_
+
+ #include "gstsubparse.h"
+
+ G_BEGIN_DECLS
+
+ gchar * parse_sami (ParserState * state, const gchar * line);
+
+ void sami_context_init (ParserState * state);
+
+ void sami_context_deinit (ParserState * state);
+
+ void sami_context_reset (ParserState * state);
++#ifdef TIZEN_FEATURE_SUBPARSE_MODIFICATION
++void sami_context_change_language (ParserState * state);
++#endif
+
+ G_END_DECLS
+
+ #endif /* _SAMI_PARSE_H_ */
+
--- /dev/null
-
+ /* GStreamer
+ * Copyright (C) 2003 Benjamin Otte <in7y118@public.uni-hamburg.de>
+ * Copyright (C) 2005-2009 Tim-Philipp Müller <tim centricular net>
+ * Copyright (C) 2009 Sebastian Dröge <sebastian.droege@collabora.co.uk>
+ *
+ * gsttypefindfunctions.c: collection of various typefind functions
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+ #ifdef HAVE_CONFIG_H
+ #include "config.h"
+ #endif
+
+ #include <glib.h>
+ #include <glib/gprintf.h>
+
+ /* don't want to add gio xdgmime typefinder if gio was disabled via configure */
+ #ifdef HAVE_GIO
+ #include <gio/gio.h>
+ #define USE_GIO
+ #endif
+
+ #include <gst/gst.h>
+
+ #include <stdio.h>
+ #include <string.h>
+ #include <ctype.h>
+
+ #include <gst/pbutils/pbutils.h>
+ #include <gst/base/gstbytereader.h>
+
+ #include "gsttypefindfunctionsplugin.h"
+
+ /* DataScanCtx: helper for typefind functions that scan through data
+ * step-by-step, to avoid doing a peek at each and every offset */
+
+ #define DATA_SCAN_CTX_CHUNK_SIZE 4096
+
+ typedef struct
+ {
+ guint64 offset;
+ const guint8 *data;
+ guint size;
+ } DataScanCtx;
+
+ static inline void
+ data_scan_ctx_advance (GstTypeFind * tf, DataScanCtx * c, guint bytes_to_skip)
+ {
+ c->offset += bytes_to_skip;
+ if (G_LIKELY (c->size > bytes_to_skip)) {
+ c->size -= bytes_to_skip;
+ c->data += bytes_to_skip;
+ } else {
+ c->data += c->size;
+ c->size = 0;
+ }
+ }
+
+ static inline gboolean
+ data_scan_ctx_ensure_data (GstTypeFind * tf, DataScanCtx * c, guint min_len)
+ {
+ const guint8 *data;
+ guint64 len;
+ guint chunk_len = MAX (DATA_SCAN_CTX_CHUNK_SIZE, min_len);
+
+ if (G_LIKELY (c->size >= min_len))
+ return TRUE;
+
+ data = gst_type_find_peek (tf, c->offset, chunk_len);
+ if (G_LIKELY (data != NULL)) {
+ c->data = data;
+ c->size = chunk_len;
+ return TRUE;
+ }
+
+ /* if there's less than our chunk size, try to get as much as we can, but
+ * always at least min_len bytes (we might be typefinding the first buffer
+ * of the stream and not have as much data available as we'd like) */
+ len = gst_type_find_get_length (tf);
+ if (len > 0) {
+ len = CLAMP (len - c->offset, min_len, chunk_len);
+ } else {
+ len = min_len;
+ }
+
+ data = gst_type_find_peek (tf, c->offset, len);
+ if (data != NULL) {
+ c->data = data;
+ c->size = len;
+ return TRUE;
+ }
+
+ return FALSE;
+ }
+
+ static inline gboolean
+ data_scan_ctx_memcmp (GstTypeFind * tf, DataScanCtx * c, guint offset,
+ const gchar * data, guint len)
+ {
+ if (G_UNLIKELY (offset + len >= G_MAXUINT32))
+ return FALSE;
+
+ if (!data_scan_ctx_ensure_data (tf, c, offset + len))
+ return FALSE;
+
+ return (memcmp (c->data + offset, data, len) == 0);
+ }
+
+ /*** text/plain ***/
+ static gboolean xml_check_first_element (GstTypeFind * tf,
+ const gchar * element, guint elen, gboolean strict);
+ static gboolean sdp_check_header (GstTypeFind * tf);
+
+ static GstStaticCaps utf8_caps = GST_STATIC_CAPS ("text/plain");
+
+ #define UTF8_CAPS gst_static_caps_get(&utf8_caps)
+
+ static gboolean
+ utf8_type_find_have_valid_utf8_at_offset (GstTypeFind * tf, guint64 offset,
+ GstTypeFindProbability * prob)
+ {
+ const guint8 *data;
+
+ /* randomly decided values */
+ guint min_size = 16; /* minimum size */
+ guint size = 32 * 1024; /* starting size */
+ guint probability = 95; /* starting probability */
+ guint step = 10; /* how much we reduce probability in each
+ * iteration */
+
+ while (probability > step && size > min_size) {
+ data = gst_type_find_peek (tf, offset, size);
+ if (data) {
+ gchar *end;
+ gchar *start = (gchar *) data;
+
+ if (g_utf8_validate (start, size, (const gchar **) &end) || (end - start + 4 > size)) { /* allow last char to be cut off */
+ *prob = probability;
+ return TRUE;
+ }
+ *prob = 0;
+ return FALSE;
+ }
+ size /= 2;
+ probability -= step;
+ }
+ *prob = 0;
+ return FALSE;
+ }
+
+ static void
+ utf8_type_find (GstTypeFind * tf, gpointer unused)
+ {
+ GstTypeFindProbability start_prob, mid_prob;
+ guint64 length;
+
+ /* leave xml to the xml typefinders */
+ if (xml_check_first_element (tf, "", 0, TRUE))
+ return;
+
+ /* leave sdp to the sdp typefinders */
+ if (sdp_check_header (tf))
+ return;
+
+ /* check beginning of stream */
+ if (!utf8_type_find_have_valid_utf8_at_offset (tf, 0, &start_prob))
+ return;
+
+ GST_LOG ("start is plain text with probability of %u", start_prob);
+
+ /* POSSIBLE is the highest probability we ever return if we can't
+ * probe into the middle of the file and don't know its length */
+
+ length = gst_type_find_get_length (tf);
+ if (length == 0 || length == (guint64) - 1) {
+ gst_type_find_suggest (tf, MIN (start_prob, GST_TYPE_FIND_POSSIBLE),
+ UTF8_CAPS);
+ return;
+ }
+
+ if (length < 64 * 1024) {
+ gst_type_find_suggest (tf, start_prob, UTF8_CAPS);
+ return;
+ }
+
+ /* check middle of stream */
+ if (!utf8_type_find_have_valid_utf8_at_offset (tf, length / 2, &mid_prob))
+ return;
+
+ GST_LOG ("middle is plain text with probability of %u", mid_prob);
+ gst_type_find_suggest (tf, (start_prob + mid_prob) / 2, UTF8_CAPS);
+ }
+
+ /*** text/utf-16 and text/utf-32} ***/
+ /* While UTF-8 is unicode too, using text/plain for UTF-16 and UTF-32
+ is going to break stuff. */
+
+ typedef struct
+ {
+ size_t bomlen;
+ const char *const bom;
+ gboolean (*checker) (const guint8 *, gint, gint);
+ int boost;
+ int endianness;
+ } GstUnicodeTester;
+
+ static gboolean
+ check_utf16 (const guint8 * data, gint len, gint endianness)
+ {
+ GstByteReader br;
+ guint16 high, low;
+
+ low = high = 0;
+
+ if (len & 1)
+ return FALSE;
+
+ gst_byte_reader_init (&br, data, len);
+ while (len >= 2) {
+ /* test first for a single 16 bit value in the BMP */
+ if (endianness == G_BIG_ENDIAN)
+ high = gst_byte_reader_get_uint16_be_unchecked (&br);
+ else
+ high = gst_byte_reader_get_uint16_le_unchecked (&br);
+ if (high >= 0xD800 && high <= 0xDBFF) {
+ /* start of a surrogate pair */
+ if (len < 4)
+ return FALSE;
+ len -= 2;
+ if (endianness == G_BIG_ENDIAN)
+ low = gst_byte_reader_get_uint16_be_unchecked (&br);
+ else
+ low = gst_byte_reader_get_uint16_le_unchecked (&br);
+ if (low >= 0xDC00 && low <= 0xDFFF) {
+ /* second half of the surrogate pair */
+ } else
+ return FALSE;
+ } else {
+ if (high >= 0xDC00 && high <= 0xDFFF)
+ return FALSE;
+ }
+ len -= 2;
+ }
+ return TRUE;
+ }
+
+ static gboolean
+ check_utf32 (const guint8 * data, gint len, gint endianness)
+ {
+ if (len & 3)
+ return FALSE;
+ while (len > 3) {
+ guint32 v;
+ if (endianness == G_BIG_ENDIAN)
+ v = GST_READ_UINT32_BE (data);
+ else
+ v = GST_READ_UINT32_LE (data);
+ if (v >= 0x10FFFF)
+ return FALSE;
+ data += 4;
+ len -= 4;
+ }
+ return TRUE;
+ }
+
+ static void
+ unicode_type_find (GstTypeFind * tf, const GstUnicodeTester * tester,
+ guint n_tester, const char *media_type, gboolean require_bom)
+ {
+ gsize n;
+ gsize len = 4;
+ const guint8 *data = gst_type_find_peek (tf, 0, len);
+ int prob = -1;
+ const gint max_scan_size = 256 * 1024;
+ int endianness = 0;
+
+ if (!data) {
+ len = 2;
+ data = gst_type_find_peek (tf, 0, len);
+ if (!data)
+ return;
+ }
+
+ /* find a large enough size that works */
+ while (len < max_scan_size) {
+ size_t newlen = len << 1;
+ const guint8 *newdata = gst_type_find_peek (tf, 0, newlen);
+ if (!newdata)
+ break;
+ len = newlen;
+ data = newdata;
+ }
+
+ for (n = 0; n < n_tester; ++n) {
+ int bom_boost = 0, tmpprob;
+ if (len >= tester[n].bomlen) {
+ if (!memcmp (data, tester[n].bom, tester[n].bomlen))
+ bom_boost = tester[n].boost;
+ }
+ if (require_bom && bom_boost == 0)
+ continue;
+ if (!(*tester[n].checker) (data, len, tester[n].endianness))
+ continue;
+ tmpprob = GST_TYPE_FIND_POSSIBLE - 20 + bom_boost;
+ if (tmpprob > prob) {
+ prob = tmpprob;
+ endianness = tester[n].endianness;
+ }
+ }
+
+ if (prob > 0) {
+ GST_DEBUG ("This is valid %s %s", media_type,
+ endianness == G_BIG_ENDIAN ? "be" : "le");
+ gst_type_find_suggest_simple (tf, prob, media_type,
+ "endianness", G_TYPE_INT, endianness, NULL);
+ }
+ }
+
+ static GstStaticCaps utf16_caps = GST_STATIC_CAPS ("text/utf-16");
+
+ #define UTF16_CAPS gst_static_caps_get(&utf16_caps)
+
+ static void
+ utf16_type_find (GstTypeFind * tf, gpointer unused)
+ {
+ static const GstUnicodeTester utf16tester[2] = {
+ {2, "\xff\xfe", check_utf16, 10, G_LITTLE_ENDIAN},
+ {2, "\xfe\xff", check_utf16, 20, G_BIG_ENDIAN},
+ };
+ unicode_type_find (tf, utf16tester, G_N_ELEMENTS (utf16tester),
+ "text/utf-16", TRUE);
+ }
+
+ static GstStaticCaps utf32_caps = GST_STATIC_CAPS ("text/utf-32");
+
+ #define UTF32_CAPS gst_static_caps_get(&utf32_caps)
+
+ static void
+ utf32_type_find (GstTypeFind * tf, gpointer unused)
+ {
+ static const GstUnicodeTester utf32tester[2] = {
+ {4, "\xff\xfe\x00\x00", check_utf32, 10, G_LITTLE_ENDIAN},
+ {4, "\x00\x00\xfe\xff", check_utf32, 20, G_BIG_ENDIAN}
+ };
+ unicode_type_find (tf, utf32tester, G_N_ELEMENTS (utf32tester),
+ "text/utf-32", TRUE);
+ }
+
+ /*** text/uri-list ***/
-
++#ifndef TIZEN_FEATURE_DISABLE_MIME_TYPES
+ static GstStaticCaps uri_caps = GST_STATIC_CAPS ("text/uri-list");
+
+ #define URI_CAPS (gst_static_caps_get(&uri_caps))
+ #define BUFFER_SIZE 16 /* If the string is < 16 bytes we're screwed */
+ #define INC_BUFFER { \
+ pos++; \
+ if (pos == BUFFER_SIZE) { \
+ pos = 0; \
+ offset += BUFFER_SIZE; \
+ data = gst_type_find_peek (tf, offset, BUFFER_SIZE); \
+ if (data == NULL) return; \
+ } else { \
+ data++; \
+ } \
+ }
++
+ static void
+ uri_type_find (GstTypeFind * tf, gpointer unused)
+ {
+ const guint8 *data = gst_type_find_peek (tf, 0, BUFFER_SIZE);
+ guint pos = 0;
+ guint offset = 0;
+
+ if (data) {
+ /* Search for # comment lines */
+ while (*data == '#') {
+ /* Goto end of line */
+ while (*data != '\n') {
+ INC_BUFFER;
+ }
+
+ INC_BUFFER;
+ }
+
+ if (!g_ascii_isalpha (*data)) {
+ /* Had a non alpha char - can't be uri-list */
+ return;
+ }
+
+ INC_BUFFER;
+
+ while (g_ascii_isalnum (*data)) {
+ INC_BUFFER;
+ }
+
+ if (*data != ':') {
+ /* First non alpha char is not a : */
+ return;
+ }
+
+ /* Get the next 2 bytes as well */
+ data = gst_type_find_peek (tf, offset + pos, 3);
+ if (data == NULL)
+ return;
+
+ if (data[1] != '/' && data[2] != '/') {
+ return;
+ }
+
+ gst_type_find_suggest (tf, GST_TYPE_FIND_LIKELY, URI_CAPS);
+ }
+ }
++#endif
+
+ /*** application/itc ***/
+ static GstStaticCaps itc_caps = GST_STATIC_CAPS ("application/itc");
+ #define ITC_CAPS (gst_static_caps_get(&itc_caps))
+
+ static void
+ itc_type_find (GstTypeFind * tf, gpointer unused)
+ {
+ DataScanCtx c = { 0, NULL, 0 };
+ guint8 magic[8] = { 0x00, 0x00, 0x01, 0x1C, 0x69, 0x74, 0x63, 0x68 };
+ guint8 preamble[4] = { 0x00, 0x00, 0x00, 0x02 };
+ guint8 artwork_marker[8] = { 0x00, 0x00, 0x00, 0x00, 0x61, 0x72, 0x74, 0x77 };
+ guint8 item_marker[4] = { 0x69, 0x74, 0x65, 0x6D };
+ GstTypeFindProbability itc_prob = GST_TYPE_FIND_NONE;
+ int i;
+
+ if (G_UNLIKELY (!data_scan_ctx_ensure_data (tf, &c, 8)))
+ return;
+
+ if (memcmp (c.data, magic, 8))
+ return;
+
+ /* At least we found the right magic */
+ itc_prob = GST_TYPE_FIND_MINIMUM;
+ data_scan_ctx_advance (tf, &c, 8);
+
+ if (G_UNLIKELY (!data_scan_ctx_ensure_data (tf, &c, 12)))
+ goto done;
+
+ /* Check preamble 3 consecutive times */
+ for (i = 0; i < 3; i++) {
+ if (memcmp (c.data, preamble, 4))
+ goto done;
+ data_scan_ctx_advance (tf, &c, 4);
+ }
+
+ itc_prob = GST_TYPE_FIND_POSSIBLE;
+
+ if (G_UNLIKELY (!data_scan_ctx_ensure_data (tf, &c, 8)))
+ goto done;
+
+ if (memcmp (c.data, artwork_marker, 8))
+ goto done;
+
+ itc_prob = GST_TYPE_FIND_LIKELY;
+ data_scan_ctx_advance (tf, &c, 8);
+
+ if (G_UNLIKELY (!data_scan_ctx_ensure_data (tf, &c, 256)))
+ goto done;
+
+ /* ...and 256 0x00 padding bytes on what looks like the header's end */
+ for (i = 0; i < 256; i++) {
+ if (c.data[i])
+ goto done;
+ }
+
+ itc_prob = GST_TYPE_FIND_NEARLY_CERTAIN;
+ data_scan_ctx_advance (tf, &c, 256);
+
+ if (G_UNLIKELY (!data_scan_ctx_ensure_data (tf, &c, 8)))
+ goto done;
+
+ if (memcmp (c.data + 4, item_marker, 4))
+ goto done;
+
+ itc_prob = GST_TYPE_FIND_MAXIMUM;
+
+ done:
+ gst_type_find_suggest (tf, itc_prob, ITC_CAPS);
+ }
+
+ /*** application/x-hls ***/
+
+ static GstStaticCaps hls_caps = GST_STATIC_CAPS ("application/x-hls");
+ #define HLS_CAPS (gst_static_caps_get(&hls_caps))
+
+ /* See http://tools.ietf.org/html/draft-pantos-http-live-streaming-05 */
+ static void
+ hls_type_find (GstTypeFind * tf, gpointer unused)
+ {
+ DataScanCtx c = { 0, NULL, 0 };
+
+ /* Minimum useful size is #EXTM3U\n + 1 tag + ':' = 30 bytes */
+ if (G_UNLIKELY (!data_scan_ctx_ensure_data (tf, &c, 30)))
+ return;
+
+ if (memcmp (c.data, "#EXTM3U", 7))
+ return;
+
+ data_scan_ctx_advance (tf, &c, 7);
+
+ /* Check only the first 4KB */
+ while (c.offset < 4096) {
+ if (G_UNLIKELY (!data_scan_ctx_ensure_data (tf, &c, 21)))
+ return;
+
+ /* Search for # comment lines */
+ if (c.data[0] == '#' && (memcmp (c.data, "#EXT-X-TARGETDURATION", 21) == 0
+ || memcmp (c.data, "#EXT-X-STREAM-INF", 17) == 0
+ || memcmp (c.data, "#EXT-X-MEDIA", 12) == 0)) {
+ gst_type_find_suggest (tf, GST_TYPE_FIND_MAXIMUM, HLS_CAPS);
+ return;
+ }
+
+ data_scan_ctx_advance (tf, &c, 1);
+ }
+ }
+
+
+ /*** application/xml **********************************************************/
+
+ #define XML_BUFFER_SIZE 16
+ #define XML_INC_BUFFER { \
+ pos++; \
+ if (pos == XML_BUFFER_SIZE) { \
+ pos = 0; \
+ offset += XML_BUFFER_SIZE; \
+ data = gst_type_find_peek (tf, offset, XML_BUFFER_SIZE); \
+ if (data == NULL) return FALSE; \
+ } else { \
+ data++; \
+ } \
+ }
+
+ #define XML_INC_BUFFER_DATA { \
+ pos++; \
+ if (pos >= length) { \
+ return FALSE; \
+ } else { \
+ data++; \
+ } \
+ }
+
+ static gboolean
+ xml_check_first_element_from_data (const guint8 * data, guint length,
+ const gchar * element, guint elen, gboolean strict)
+ {
+ gboolean got_xmldec;
+ guint pos = 0;
+
+ g_return_val_if_fail (data != NULL, FALSE);
+
+ if (length <= 5)
+ return FALSE;
+
+ /* look for the XMLDec
+ * see XML spec 2.8, Prolog and Document Type Declaration
+ * http://www.w3.org/TR/2004/REC-xml-20040204/#sec-prolog-dtd */
+ got_xmldec = (memcmp (data, "<?xml", 5) == 0);
+
+ if (strict && !got_xmldec)
+ return FALSE;
+
+ /* skip XMLDec in any case if we've got one */
+ if (got_xmldec) {
+ pos += 5;
+ data += 5;
+ }
+
+ /* look for the first element, it has to be the requested element. Bail
+ * out if it is not within the first 4kB. */
+ while (pos < MIN (4096, length)) {
+ while (*data != '<' && pos < MIN (4096, length)) {
+ XML_INC_BUFFER_DATA;
+ }
+
+ XML_INC_BUFFER_DATA;
+ if (!g_ascii_isalpha (*data)) {
+ /* if not alphabetic, it's a PI or an element / attribute declaration
+ * like <?xxx or <!xxx */
+ XML_INC_BUFFER_DATA;
+ continue;
+ }
+
+ /* the first normal element, check if it's the one asked for */
+ if (pos + elen + 1 >= length)
+ return FALSE;
+ return (element && strncmp ((const char *) data, element, elen) == 0);
+ }
+
+ return FALSE;
+ }
+
+ static gboolean
+ xml_check_first_element (GstTypeFind * tf, const gchar * element, guint elen,
+ gboolean strict)
+ {
+ gboolean got_xmldec;
+ const guint8 *data;
+ guint offset = 0;
+ guint pos = 0;
+
+ data = gst_type_find_peek (tf, 0, XML_BUFFER_SIZE);
+ if (!data)
+ return FALSE;
+
+ /* look for the XMLDec
+ * see XML spec 2.8, Prolog and Document Type Declaration
+ * http://www.w3.org/TR/2004/REC-xml-20040204/#sec-prolog-dtd */
+ got_xmldec = (memcmp (data, "<?xml", 5) == 0);
+
+ if (strict && !got_xmldec)
+ return FALSE;
+
+ /* skip XMLDec in any case if we've got one */
+ if (got_xmldec) {
+ pos += 5;
+ data += 5;
+ }
+
+ /* look for the first element, it has to be the requested element. Bail
+ * out if it is not within the first 4kB. */
+ while (data && (offset + pos) < 4096) {
+ while (*data != '<' && (offset + pos) < 4096) {
+ XML_INC_BUFFER;
+ }
+
+ XML_INC_BUFFER;
+ if (!g_ascii_isalpha (*data)) {
+ /* if not alphabetic, it's a PI or an element / attribute declaration
+ * like <?xxx or <!xxx */
+ XML_INC_BUFFER;
+ continue;
+ }
+
+ /* the first normal element, check if it's the one asked for */
+ data = gst_type_find_peek (tf, offset + pos, elen + 1);
+ return (data && element && strncmp ((char *) data, element, elen) == 0);
+ }
+
+ return FALSE;
+ }
+
++#ifndef TIZEN_FEATURE_DISABLE_MIME_TYPES
+ static GstStaticCaps generic_xml_caps = GST_STATIC_CAPS ("application/xml");
+
+ #define GENERIC_XML_CAPS (gst_static_caps_get(&generic_xml_caps))
+ static void
+ xml_type_find (GstTypeFind * tf, gpointer unused)
+ {
+ if (xml_check_first_element (tf, "", 0, TRUE)) {
+ gst_type_find_suggest (tf, GST_TYPE_FIND_MINIMUM, GENERIC_XML_CAPS);
+ }
+ }
-
++#endif
+ /*** application/dash+xml ****************************************************/
+
+ static GstStaticCaps dash_caps = GST_STATIC_CAPS ("application/dash+xml");
+
+ #define DASH_CAPS gst_static_caps_get (&dash_caps)
+
+ static void
+ dash_mpd_type_find (GstTypeFind * tf, gpointer unused)
+ {
+ if (xml_check_first_element (tf, "MPD", 3, FALSE) ||
+ xml_check_first_element (tf, "mpd", 3, FALSE)) {
+ gst_type_find_suggest (tf, GST_TYPE_FIND_MAXIMUM, DASH_CAPS);
+ }
+ }
+
+ /*** application/xges ****************************************************/
+
+ static GstStaticCaps xges_caps = GST_STATIC_CAPS ("application/xges");
+
+ #define XGES_CAPS gst_static_caps_get (&xges_caps)
+
+ static void
+ xges_type_find (GstTypeFind * tf, gpointer unused)
+ {
+ if (xml_check_first_element (tf, "ges", 3, FALSE)) {
+ gst_type_find_suggest (tf, GST_TYPE_FIND_MAXIMUM, XGES_CAPS);
+ }
+ }
+
+ /***application/vnd.apple-fcp+xml ****************************************************/
+
+ static GstStaticCaps fcpxml_caps =
+ GST_STATIC_CAPS ("application/vnd.apple-fcp+xml");
+
+ #define FCPXML_CAPS gst_static_caps_get (&fcpxml_caps)
+
+ static void
+ fcpxml_type_find (GstTypeFind * tf, gpointer unused)
+ {
+ if (xml_check_first_element (tf, "fcpxml", 3, FALSE)) {
+ gst_type_find_suggest (tf, GST_TYPE_FIND_MAXIMUM, FCPXML_CAPS);
+ }
+ }
+
+ /*** application/vnd.apple-xmeml+xml ****************************************************/
+
+ static GstStaticCaps xmeml_caps =
+ GST_STATIC_CAPS ("application/vnd.apple-xmeml+xml");
+
+ #define XMEML_CAPS gst_static_caps_get (&xmeml_caps)
+
+ static void
+ xmeml_type_find (GstTypeFind * tf, gpointer unused)
+ {
+ if (xml_check_first_element (tf, "xmeml", 3, FALSE)) {
+ gst_type_find_suggest (tf, GST_TYPE_FIND_MAXIMUM, XMEML_CAPS);
+ }
+ }
+
+ /*** application/otio ****************************************************/
+
+ static GstStaticCaps otio_caps =
+ GST_STATIC_CAPS ("application/vnd.pixar.opentimelineio+json");
+
+ #define OTIO_CAPS gst_static_caps_get (&otio_caps)
+
+ static void
+ otio_type_find (GstTypeFind * tf, gpointer unused)
+ {
+ const gchar *data, *tmp;
+
+ data = (const gchar *) gst_type_find_peek (tf, 0, 30);
+ if (!data)
+ return;
+
+ tmp = (const gchar *) memchr (data, '{', 30);
+ if (!tmp)
+ return;
+
+ data = (const gchar *) gst_type_find_peek (tf, tmp - data, 30);
+ if (!data)
+ return;
+
+ tmp = (const gchar *) memchr (data, '"', 30);
+ if (!tmp)
+ return;
+
+ data = (const gchar *) gst_type_find_peek (tf, tmp - data, 14);
+ if (!data)
+ return;
+
+ if (memcmp (data, "\"OTIO_SCHEMA\":", 14) == 0) {
+ gst_type_find_suggest (tf, GST_TYPE_FIND_MAXIMUM, OTIO_CAPS);
+ }
+ }
+
+
+ /*** application/sdp *********************************************************/
+
+ static GstStaticCaps sdp_caps = GST_STATIC_CAPS ("application/sdp");
+
+ #define SDP_CAPS (gst_static_caps_get(&sdp_caps))
+ static gboolean
+ sdp_check_header (GstTypeFind * tf)
+ {
+ const guint8 *data;
+
+ data = gst_type_find_peek (tf, 0, 5);
+ if (!data)
+ return FALSE;
+
+ /* sdp must start with v=0[\r]\n */
+ if (memcmp (data, "v=0", 3))
+ return FALSE;
+
+ if (data[3] == '\r' && data[4] == '\n')
+ return TRUE;
+ if (data[3] == '\n')
+ return TRUE;
+
+ return FALSE;
+ }
+
+ static void
+ sdp_type_find (GstTypeFind * tf, gpointer unused)
+ {
+ if (sdp_check_header (tf))
+ gst_type_find_suggest (tf, GST_TYPE_FIND_MAXIMUM, SDP_CAPS);
+ }
+
+ /*** application/smil *********************************************************/
-
++#ifndef TIZEN_FEATURE_DISABLE_MIME_TYPES
+ static GstStaticCaps smil_caps = GST_STATIC_CAPS ("application/smil");
+
+ #define SMIL_CAPS (gst_static_caps_get(&smil_caps))
+ static void
+ smil_type_find (GstTypeFind * tf, gpointer unused)
+ {
+ if (xml_check_first_element (tf, "smil", 4, FALSE)) {
+ gst_type_find_suggest (tf, GST_TYPE_FIND_MAXIMUM, SMIL_CAPS);
+ }
+ }
+
+ /*** application/ttml+xml *****************************************************/
+
+ static GstStaticCaps ttml_xml_caps = GST_STATIC_CAPS ("application/ttml+xml");
+
+ #define TTML_XML_CAPS (gst_static_caps_get(&ttml_xml_caps))
+ static void
+ ttml_xml_type_find (GstTypeFind * tf, gpointer unused)
+ {
+ if (xml_check_first_element (tf, "tt", 2, FALSE)) {
+ gst_type_find_suggest (tf, GST_TYPE_FIND_MAXIMUM, TTML_XML_CAPS);
+ }
+ }
+
+ /*** text/html ***/
+
+ static GstStaticCaps html_caps = GST_STATIC_CAPS ("text/html");
+
+ #define HTML_CAPS gst_static_caps_get (&html_caps)
+
+ static void
+ html_type_find (GstTypeFind * tf, gpointer unused)
+ {
+ const gchar *d, *data;
+
+ data = (const gchar *) gst_type_find_peek (tf, 0, 16);
+ if (!data)
+ return;
+
+ if (!g_ascii_strncasecmp (data, "<!DOCTYPE HTML", 14)) {
+ gst_type_find_suggest (tf, GST_TYPE_FIND_MAXIMUM, HTML_CAPS);
+ } else if (xml_check_first_element (tf, "html", 4, FALSE)) {
+ gst_type_find_suggest (tf, GST_TYPE_FIND_MAXIMUM, HTML_CAPS);
+ } else if ((d = memchr (data, '<', 16))) {
+ data = (const gchar *) gst_type_find_peek (tf, d - data, 6);
+ if (data && g_ascii_strncasecmp (data, "<html>", 6) == 0) {
+ gst_type_find_suggest (tf, GST_TYPE_FIND_MAXIMUM, HTML_CAPS);
+ }
+ }
+ }
-
++#endif
+ /*** audio/midi ***/
+
+ static GstStaticCaps mid_caps = GST_STATIC_CAPS ("audio/midi");
+
+ #define MID_CAPS gst_static_caps_get(&mid_caps)
+ static void
+ mid_type_find (GstTypeFind * tf, gpointer unused)
+ {
+ const guint8 *data = gst_type_find_peek (tf, 0, 4);
+
+ /* http://jedi.ks.uiuc.edu/~johns/links/music/midifile.html */
+ if (data && data[0] == 'M' && data[1] == 'T' && data[2] == 'h'
+ && data[3] == 'd')
+ gst_type_find_suggest (tf, GST_TYPE_FIND_MAXIMUM, MID_CAPS);
+ }
+
+ /*** audio/mobile-xmf ***/
+
+ static GstStaticCaps mxmf_caps = GST_STATIC_CAPS ("audio/mobile-xmf");
+
+ #define MXMF_CAPS gst_static_caps_get(&mxmf_caps)
+ static void
+ mxmf_type_find (GstTypeFind * tf, gpointer unused)
+ {
+ const guint8 *data = NULL;
+
+ /* Search FileId "XMF_" 4 bytes */
+ data = gst_type_find_peek (tf, 0, 4);
+ if (data && data[0] == 'X' && data[1] == 'M' && data[2] == 'F'
+ && data[3] == '_') {
+ /* Search Format version "2.00" 4 bytes */
+ data = gst_type_find_peek (tf, 4, 4);
+ if (data && data[0] == '2' && data[1] == '.' && data[2] == '0'
+ && data[3] == '0') {
+ /* Search TypeId 2 1 byte */
+ data = gst_type_find_peek (tf, 11, 1);
+ if (data && data[0] == 2) {
+ gst_type_find_suggest (tf, GST_TYPE_FIND_MAXIMUM, MXMF_CAPS);
+ }
+ }
+ }
+ }
+
+
+ /*** video/x-fli ***/
-
++#ifndef TIZEN_FEATURE_DISABLE_MIME_TYPES
+ static GstStaticCaps flx_caps = GST_STATIC_CAPS ("video/x-fli");
+
+ #define FLX_CAPS gst_static_caps_get(&flx_caps)
+ static void
+ flx_type_find (GstTypeFind * tf, gpointer unused)
+ {
+ const guint8 *data = gst_type_find_peek (tf, 0, 134);
+
+ if (data) {
+ /* check magic and the frame type of the first frame */
+ if ((data[4] == 0x11 || data[4] == 0x12 ||
+ data[4] == 0x30 || data[4] == 0x44) &&
+ data[5] == 0xaf &&
+ ((data[132] == 0x00 || data[132] == 0xfa) && data[133] == 0xf1)) {
+ gst_type_find_suggest (tf, GST_TYPE_FIND_MAXIMUM, FLX_CAPS);
+ }
+ return;
+ }
+ data = gst_type_find_peek (tf, 0, 6);
+ if (data) {
+ /* check magic only */
+ if ((data[4] == 0x11 || data[4] == 0x12 ||
+ data[4] == 0x30 || data[4] == 0x44) && data[5] == 0xaf) {
+ gst_type_find_suggest (tf, GST_TYPE_FIND_LIKELY, FLX_CAPS);
+ }
+ return;
+ }
+ }
-
++#endif
+ /*** application/x-id3 ***/
+
+ static GstStaticCaps id3_caps = GST_STATIC_CAPS ("application/x-id3");
+
+ #define ID3_CAPS gst_static_caps_get(&id3_caps)
+ static void
+ id3v2_type_find (GstTypeFind * tf, gpointer unused)
+ {
+ const guint8 *data = gst_type_find_peek (tf, 0, 10);
+
+ if (data && memcmp (data, "ID3", 3) == 0 &&
+ data[3] != 0xFF && data[4] != 0xFF &&
+ (data[6] & 0x80) == 0 && (data[7] & 0x80) == 0 &&
+ (data[8] & 0x80) == 0 && (data[9] & 0x80) == 0) {
+ gst_type_find_suggest (tf, GST_TYPE_FIND_MAXIMUM, ID3_CAPS);
+ }
+ }
+
+ static void
+ id3v1_type_find (GstTypeFind * tf, gpointer unused)
+ {
+ const guint8 *data = gst_type_find_peek (tf, -128, 3);
+
+ if (data && memcmp (data, "TAG", 3) == 0) {
+ gst_type_find_suggest (tf, GST_TYPE_FIND_MAXIMUM, ID3_CAPS);
+ }
+ }
+
+ /*** application/x-ape ***/
+
+ static GstStaticCaps apetag_caps = GST_STATIC_CAPS ("application/x-apetag");
+
+ #define APETAG_CAPS gst_static_caps_get(&apetag_caps)
+ static void
+ apetag_type_find (GstTypeFind * tf, gpointer unused)
+ {
+ const guint8 *data;
+
+ /* APEv1/2 at start of file */
+ data = gst_type_find_peek (tf, 0, 8);
+ if (data && !memcmp (data, "APETAGEX", 8)) {
+ gst_type_find_suggest (tf, GST_TYPE_FIND_MAXIMUM, APETAG_CAPS);
+ return;
+ }
+
+ /* APEv1/2 at end of file */
+ data = gst_type_find_peek (tf, -32, 8);
+ if (data && !memcmp (data, "APETAGEX", 8)) {
+ gst_type_find_suggest (tf, GST_TYPE_FIND_MAXIMUM, APETAG_CAPS);
+ return;
+ }
+ }
+
+ /*** audio/x-ttafile ***/
-
++#ifndef TIZEN_FEATURE_DISABLE_MIME_TYPES
+ static GstStaticCaps tta_caps = GST_STATIC_CAPS ("audio/x-ttafile");
+
+ #define TTA_CAPS gst_static_caps_get(&tta_caps)
+ static void
+ tta_type_find (GstTypeFind * tf, gpointer unused)
+ {
+ const guint8 *data = gst_type_find_peek (tf, 0, 3);
+
+ if (data) {
+ if (memcmp (data, "TTA", 3) == 0) {
+ gst_type_find_suggest (tf, GST_TYPE_FIND_MAXIMUM, TTA_CAPS);
+ return;
+ }
+ }
+ }
-
++#endif
+ /*** audio/x-flac ***/
+ static GstStaticCaps flac_caps = GST_STATIC_CAPS ("audio/x-flac");
+
+ #define FLAC_CAPS (gst_static_caps_get(&flac_caps))
+
+ static void
+ flac_type_find (GstTypeFind * tf, gpointer unused)
+ {
+ DataScanCtx c = { 0, NULL, 0 };
+
+ if (G_UNLIKELY (!data_scan_ctx_ensure_data (tf, &c, 4)))
+ return;
+
+ /* standard flac (also old/broken flac-in-ogg with an initial 4-byte marker
+ * packet and without the usual packet framing) */
+ if (memcmp (c.data, "fLaC", 4) == 0) {
+ gst_type_find_suggest (tf, GST_TYPE_FIND_MAXIMUM, FLAC_CAPS);
+ return;
+ }
+
+ if (G_UNLIKELY (!data_scan_ctx_ensure_data (tf, &c, 6)))
+ return;
+
+ /* flac-in-ogg, see http://flac.sourceforge.net/ogg_mapping.html */
+ if (memcmp (c.data, "\177FLAC\001", 6) == 0) {
+ gst_type_find_suggest (tf, GST_TYPE_FIND_MAXIMUM, FLAC_CAPS);
+ return;
+ }
+
+ /* disabled because it happily typefinds /dev/urandom as audio/x-flac, and
+ * because I yet have to see header-less flac in the wild */
+ #if 0
+ /* flac without headers (subset format) */
+ /* 64K should be enough */
+ while (c.offset < (64 * 1024)) {
+ if (G_UNLIKELY (!data_scan_ctx_ensure_data (tf, &c, 4)))
+ break;
+
+ /* look for frame header,
+ * http://flac.sourceforge.net/format.html#frame_header
+ */
+ if (c.data[0] == 0xff && (c.data[1] >> 2) == 0x3e) {
+ /* bit 15 in the header must be 0 */
+ if (((c.data[1] >> 1) & 0x01) == 0x01)
+ goto advance;
+
+ /* blocksize must be != 0x00 */
+ if ((c.data[2] >> 4) == 0x00)
+ goto advance;
+
+ /* samplerate must be != 0x0f */
+ if ((c.data[2] & 0x0f) == 0x0f)
+ goto advance;
+ /* also 0 is invalid, as it means get the info from the header and we
+ * don't have headers if we are here */
+ if ((c.data[2] & 0x0f) == 0x00)
+ goto advance;
+
+ /* channel assignment must be < 11 */
+ if ((c.data[3] >> 4) >= 11)
+ goto advance;
+
+ /* sample size must be != 0x07 and != 0x05 */
+ if (((c.data[3] >> 1) & 0x07) == 0x07)
+ goto advance;
+ if (((c.data[3] >> 1) & 0x07) == 0x05)
+ goto advance;
+ /* also 0 is invalid, as it means get the info from the header and we
+ * don't have headers if we are here */
+ if (((c.data[3] >> 1) & 0x07) == 0x00)
+ goto advance;
+
+ /* next bit must be 0 */
+ if ((c.data[3] & 0x01) == 0x01)
+ goto advance;
+
+ /* FIXME: shouldn't we include the crc check ? */
+
+ GST_DEBUG ("Found flac without headers at %d", (gint) c.offset);
+ gst_type_find_suggest (tf, GST_TYPE_FIND_POSSIBLE, FLAC_CAPS);
+ return;
+ }
+ advance:
+ data_scan_ctx_advance (tf, &c, 1);
+ }
+ #endif
+ }
+
+ /* TODO: we could probably make a generic function for this.. */
+ static gint
+ aac_type_find_scan_loas_frames_ep (GstTypeFind * tf, DataScanCtx * scan_ctx,
+ gint max_frames)
+ {
+ DataScanCtx c = *scan_ctx;
+ guint16 snc;
+ guint len;
+ gint count = 0;
+
+ do {
+ if (!data_scan_ctx_ensure_data (tf, &c, 5))
+ break;
+
+ /* EPAudioSyncStream */
+ len = ((c.data[2] & 0x0f) << 9) | (c.data[3] << 1) |
+ ((c.data[4] & 0x80) >> 7);
+
+ if (len == 0 || !data_scan_ctx_ensure_data (tf, &c, len + 2)) {
+ GST_DEBUG ("Wrong sync or next frame not within reach, len=%u", len);
+ break;
+ }
+
+ /* check length of frame */
+ snc = GST_READ_UINT16_BE (c.data + len);
+ if (snc != 0x4de1) {
+ GST_DEBUG ("No sync found at 0x%" G_GINT64_MODIFIER "x", c.offset + len);
+ break;
+ }
+
+ ++count;
+
+ GST_DEBUG ("Found LOAS syncword #%d at offset 0x%" G_GINT64_MODIFIER "x, "
+ "framelen %u", count, c.offset, len);
+
+ data_scan_ctx_advance (tf, &c, len);
+ } while (count < max_frames && (c.offset - scan_ctx->offset) < 64 * 1024);
+
+ GST_DEBUG ("found %d consecutive frames", count);
+ return count;
+ }
+
+ static gint
+ aac_type_find_scan_loas_frames (GstTypeFind * tf, DataScanCtx * scan_ctx,
+ gint max_frames)
+ {
+ DataScanCtx c = *scan_ctx;
+ guint16 snc;
+ guint len;
+ gint count = 0;
+
+ do {
+ if (!data_scan_ctx_ensure_data (tf, &c, 3))
+ break;
+
+ /* AudioSyncStream */
+ len = ((c.data[1] & 0x1f) << 8) | c.data[2];
+ /* add size of sync stream header */
+ len += 3;
+
+ if (len == 0 || !data_scan_ctx_ensure_data (tf, &c, len + 2)) {
+ GST_DEBUG ("Wrong sync or next frame not within reach, len=%u", len);
+ break;
+ }
+
+ /* check length of frame */
+ snc = GST_READ_UINT16_BE (c.data + len);
+ if ((snc & 0xffe0) != 0x56e0) {
+ GST_DEBUG ("No sync found at 0x%" G_GINT64_MODIFIER "x", c.offset + len);
+ break;
+ }
+
+ ++count;
+
+ GST_DEBUG ("Found LOAS syncword #%d at offset 0x%" G_GINT64_MODIFIER "x, "
+ "framelen %u", count, c.offset, len);
+
+ data_scan_ctx_advance (tf, &c, len);
+ } while (count < max_frames && (c.offset - scan_ctx->offset) < 64 * 1024);
+
+ GST_DEBUG ("found %d consecutive frames", count);
+ return count;
+ }
+
+ /*** audio/mpeg version 2, 4 ***/
+
+ static GstStaticCaps aac_caps = GST_STATIC_CAPS ("audio/mpeg, "
+ "mpegversion = (int) { 2, 4 }, framed = (bool) false");
+ #define AAC_CAPS (gst_static_caps_get(&aac_caps))
+ #define AAC_AMOUNT (4096)
+ static void
+ aac_type_find (GstTypeFind * tf, gpointer unused)
+ {
+ DataScanCtx c = { 0, NULL, 0 };
+ GstTypeFindProbability best_probability = GST_TYPE_FIND_NONE;
+ GstCaps *best_caps = NULL;
+ gint best_count = 0;
+
+ while (c.offset < AAC_AMOUNT) {
+ guint snc, len, offset, i;
+
+ /* detect adts header or adif header.
+ * The ADIF header is 4 bytes, that should be OK. The ADTS header, on
+ * the other hand, is 14 bits only, so we require one valid frame with
+ * again a valid syncpoint on the next one (28 bits) for certainty. We
+ * require 4 kB, which is quite a lot, since frames are generally 200-400
+ * bytes.
+ * LOAS has 2 possible syncwords, which are 11 bits and 16 bits long.
+ * The following stream syntax depends on which one is found.
+ */
+ if (G_UNLIKELY (!data_scan_ctx_ensure_data (tf, &c, 6)))
+ break;
+
+ snc = GST_READ_UINT16_BE (c.data);
+ if (G_UNLIKELY ((snc & 0xfff6) == 0xfff0)) {
+ /* ADTS header - find frame length */
+ GST_DEBUG ("Found one ADTS syncpoint at offset 0x%" G_GINT64_MODIFIER
+ "x, tracing next...", c.offset);
+ len = ((c.data[3] & 0x03) << 11) |
+ (c.data[4] << 3) | ((c.data[5] & 0xe0) >> 5);
+
+ if (len == 0 || !data_scan_ctx_ensure_data (tf, &c, len + 6)) {
+ GST_DEBUG ("Wrong sync or next frame not within reach, len=%u", len);
+ goto next;
+ }
+
+ offset = len;
+ /* check if there's a second ADTS frame */
+ snc = GST_READ_UINT16_BE (c.data + offset);
+ if ((snc & 0xfff6) == 0xfff0) {
+ GstCaps *caps;
+ guint mpegversion, sample_freq_idx, channel_config, profile_idx, rate;
+ guint8 audio_config[2];
+
+ mpegversion = (c.data[1] & 0x08) ? 2 : 4;
+ profile_idx = c.data[2] >> 6;
+ sample_freq_idx = ((c.data[2] & 0x3c) >> 2);
+ channel_config = ((c.data[2] & 0x01) << 2) + (c.data[3] >> 6);
+
+ GST_DEBUG ("Found second ADTS-%d syncpoint at offset 0x%"
+ G_GINT64_MODIFIER "x, framelen %u", mpegversion, c.offset, len);
+
+ /* 0xd and 0xe are reserved. 0xf means the sample frequency is directly
+ * specified in the header, but that's not allowed for ADTS */
+ if (sample_freq_idx > 0xc) {
+ GST_DEBUG ("Unexpected sample frequency index %d or wrong sync",
+ sample_freq_idx);
+ goto next;
+ }
+
+ rate = gst_codec_utils_aac_get_sample_rate_from_index (sample_freq_idx);
+ GST_LOG ("ADTS: profile=%u, rate=%u", profile_idx, rate);
+
+ /* The ADTS frame header is slightly different from the
+ * AudioSpecificConfig defined for the MPEG-4 container, so we just
+ * construct enough of it for getting the level here. */
+ /* ADTS counts profiles from 0 instead of 1 to save bits */
+ audio_config[0] = (profile_idx + 1) << 3;
+ audio_config[0] |= (sample_freq_idx >> 1) & 0x7;
+ audio_config[1] = (sample_freq_idx & 0x1) << 7;
+ audio_config[1] |= (channel_config & 0xf) << 3;
+
+ caps = gst_caps_new_simple ("audio/mpeg",
+ "framed", G_TYPE_BOOLEAN, FALSE,
+ "mpegversion", G_TYPE_INT, mpegversion,
+ "stream-format", G_TYPE_STRING, "adts", NULL);
+
+ gst_codec_utils_aac_caps_set_level_and_profile (caps, audio_config, 2);
+
+ /* add rate and number of channels if we can */
+ if (channel_config != 0 && channel_config <= 7) {
+ const guint channels_map[] = { 0, 1, 2, 3, 4, 5, 6, 8 };
+
+ gst_caps_set_simple (caps, "channels", G_TYPE_INT,
+ channels_map[channel_config], "rate", G_TYPE_INT, rate, NULL);
+ }
+
+ /* length of the second ADTS frame */
+ len = ((c.data[offset + 3] & 0x03) << 11) |
+ (c.data[offset + 4] << 3) | ((c.data[offset + 5] & 0xe0) >> 5);
+
+ if (len == 0 || !data_scan_ctx_ensure_data (tf, &c, offset + len + 6)) {
+ GST_DEBUG ("Wrong sync or next frame not within reach, len=%u", len);
+ gst_type_find_suggest (tf, GST_TYPE_FIND_LIKELY, caps);
+ } else {
+ offset += len;
+ /* find more aac sync to select correctly */
+ /* check if there's a third/fourth/fifth/sixth ADTS frame, if there is a sixth frame, set probability to maximum:100% */
+ for (i = 3; i <= 6; i++) {
+ len = ((c.data[offset + 3] & 0x03) << 11) |
+ (c.data[offset + 4] << 3) | ((c.data[offset + 5] & 0xe0) >> 5);
+ if (len == 0
+ || !data_scan_ctx_ensure_data (tf, &c, offset + len + 6)) {
+ GST_DEBUG ("Wrong sync or next frame not within reach, len=%u",
+ len);
+ break;
+ }
+ snc = GST_READ_UINT16_BE (c.data + offset);
+ if ((snc & 0xfff6) == 0xfff0) {
+ GST_DEBUG ("Find %und Sync..probability is %u ", i,
+ GST_TYPE_FIND_LIKELY + 5 * (i - 2));
+ offset += len;
+ } else {
+ break;
+ }
+ }
+ gst_type_find_suggest (tf, GST_TYPE_FIND_LIKELY + 5 * (i - 3), caps);
+
+ }
+ gst_caps_unref (caps);
+ break;
+ }
+
+ GST_DEBUG ("No next frame found... (should have been at 0x%x)", len);
+ } else if (G_UNLIKELY ((snc & 0xffe0) == 0x56e0 || snc == 0x4de1)) {
+ gint count;
+
+ /* LOAS frame */
+ GST_INFO ("Possible LOAS syncword at offset 0x%" G_GINT64_MODIFIER
+ "x, scanning for more frames...", c.offset);
+
+ if (snc == 0x4de1)
+ count = aac_type_find_scan_loas_frames_ep (tf, &c, 20);
+ else
+ count = aac_type_find_scan_loas_frames (tf, &c, 20);
+
+ if (count >= 3 && count > best_count) {
+ gst_caps_replace (&best_caps, NULL);
+ best_caps = gst_caps_new_simple ("audio/mpeg",
+ "framed", G_TYPE_BOOLEAN, FALSE,
+ "mpegversion", G_TYPE_INT, 4,
+ "stream-format", G_TYPE_STRING, "loas", NULL);
+ best_count = count;
+ best_probability = GST_TYPE_FIND_POSSIBLE - 10 + count * 3;
+ if (best_probability >= GST_TYPE_FIND_LIKELY)
+ break;
+ }
+ } else if (!memcmp (c.data, "ADIF", 4)) {
+ /* ADIF header */
+ gst_type_find_suggest_simple (tf, GST_TYPE_FIND_LIKELY, "audio/mpeg",
+ "framed", G_TYPE_BOOLEAN, FALSE, "mpegversion", G_TYPE_INT, 4,
+ "stream-format", G_TYPE_STRING, "adif", NULL);
+ break;
+ }
+
+ next:
+
+ data_scan_ctx_advance (tf, &c, 1);
+ }
+
+ if (best_probability > GST_TYPE_FIND_NONE) {
+ gst_type_find_suggest (tf, best_probability, best_caps);
+ gst_caps_unref (best_caps);
+ }
+ }
+
+ /*** audio/mpeg version 1 ***/
+
+ /*
+ * The chance that random data is identified as a valid mp3 header is 63 / 2^18
+ * (0.024%) per try. This makes the function for calculating false positives
+ * 1 - (1 - ((63 / 2 ^18) ^ GST_MP3_TYPEFIND_MIN_HEADERS)) ^ buffersize)
+ * This has the following probabilities of false positives:
+ * datasize MIN_HEADERS
+ * (bytes) 1 2 3 4
+ * 4096 62.6% 0.02% 0% 0%
+ * 16384 98% 0.09% 0% 0%
+ * 1 MiB 100% 5.88% 0% 0%
+ * 1 GiB 100% 100% 1.44% 0%
+ * 1 TiB 100% 100% 100% 0.35%
+ * This means that the current choice (3 headers by most of the time 4096 byte
+ * buffers is pretty safe for now.
+ *
+ * The max. size of each frame is 1440 bytes, which means that for N frames to
+ * be detected, we need 1440 * GST_MP3_TYPEFIND_MIN_HEADERS + 3 bytes of data.
+ * Assuming we step into the stream right after the frame header, this
+ * means we need 1440 * (GST_MP3_TYPEFIND_MIN_HEADERS + 1) - 1 + 3 bytes
+ * of data (5762) to always detect any mp3.
+ */
+
+ static const guint mp3types_bitrates[2][3][16] =
+ { {{0, 32, 64, 96, 128, 160, 192, 224, 256, 288, 320, 352, 384, 416, 448,},
+ {0, 32, 48, 56, 64, 80, 96, 112, 128, 160, 192, 224, 256, 320, 384,},
+ {0, 32, 40, 48, 56, 64, 80, 96, 112, 128, 160, 192, 224, 256, 320,}},
+ {{0, 32, 48, 56, 64, 80, 96, 112, 128, 144, 160, 176, 192, 224, 256,},
+ {0, 8, 16, 24, 32, 40, 48, 56, 64, 80, 96, 112, 128, 144, 160,},
+ {0, 8, 16, 24, 32, 40, 48, 56, 64, 80, 96, 112, 128, 144, 160,}},
+ };
+
+ static const guint mp3types_freqs[3][3] = { {11025, 12000, 8000},
+ {22050, 24000, 16000},
+ {44100, 48000, 32000}
+ };
+
+ static inline guint
+ mp3_type_frame_length_from_header (guint32 header, guint * put_layer,
+ guint * put_channels, guint * put_bitrate, guint * put_samplerate,
+ gboolean * may_be_free_format, gint possible_free_framelen)
+ {
+ guint bitrate, layer, length, mode, samplerate, version, channels;
+
+ if ((header & 0xffe00000) != 0xffe00000)
+ return 0;
+
+ /* we don't need extension, copyright, original or
+ * emphasis for the frame length */
+ header >>= 6;
+
+ /* mode */
+ mode = header & 0x3;
+ header >>= 3;
+
+ /* padding */
+ length = header & 0x1;
+ header >>= 1;
+
+ /* sampling frequency */
+ samplerate = header & 0x3;
+ if (samplerate == 3)
+ return 0;
+ header >>= 2;
+
+ /* bitrate index */
+ bitrate = header & 0xF;
+ if (bitrate == 0 && possible_free_framelen == -1) {
+ GST_LOG ("Possibly a free format mp3 - signaling");
+ *may_be_free_format = TRUE;
+ }
+ if (bitrate == 15 || (bitrate == 0 && possible_free_framelen == -1))
+ return 0;
+
+ /* ignore error correction, too */
+ header >>= 5;
+
+ /* layer */
+ layer = 4 - (header & 0x3);
+ if (layer == 4)
+ return 0;
+ header >>= 2;
+
+ /* version 0=MPEG2.5; 2=MPEG2; 3=MPEG1 */
+ version = header & 0x3;
+ if (version == 1)
+ return 0;
+
+ /* lookup */
+ channels = (mode == 3) ? 1 : 2;
+ samplerate = mp3types_freqs[version > 0 ? version - 1 : 0][samplerate];
+ if (bitrate == 0) {
+ /* possible freeform mp3 */
+ if (layer == 1) {
+ length *= 4;
+ length += possible_free_framelen;
+ bitrate = length * samplerate / 48000;
+ } else {
+ length += possible_free_framelen;
+ bitrate = length * samplerate /
+ ((layer == 3 && version != 3) ? 72000 : 144000);
+ }
+ /* freeform mp3 should have a higher-than-usually-allowed bitrate */
+ GST_LOG ("calculated bitrate: %u, max usually: %u", bitrate,
+ mp3types_bitrates[version == 3 ? 0 : 1][layer - 1][14]);
+ if (bitrate < mp3types_bitrates[version == 3 ? 0 : 1][layer - 1][14])
+ return 0;
+ } else {
+ /* calculating */
+ bitrate = mp3types_bitrates[version == 3 ? 0 : 1][layer - 1][bitrate];
+ if (layer == 1) {
+ length = ((12000 * bitrate / samplerate) + length) * 4;
+ } else {
+ length += ((layer == 3
+ && version != 3) ? 72000 : 144000) * bitrate / samplerate;
+ }
+ }
+
+ GST_LOG ("mp3typefind: calculated mp3 frame length of %u bytes", length);
+ GST_LOG
+ ("mp3typefind: samplerate = %u - bitrate = %u - layer = %u - version = %u"
+ " - channels = %u", samplerate, bitrate, layer, version, channels);
+
+ if (put_layer)
+ *put_layer = layer;
+ if (put_channels)
+ *put_channels = channels;
+ if (put_bitrate)
+ *put_bitrate = bitrate;
+ if (put_samplerate)
+ *put_samplerate = samplerate;
+
+ return length;
+ }
+
+
+ static GstStaticCaps mp3_caps = GST_STATIC_CAPS ("audio/mpeg, "
+ "mpegversion = (int) 1, layer = (int) [ 1, 3 ]");
+ #define MP3_CAPS (gst_static_caps_get(&mp3_caps))
+ /*
+ * random values for typefinding
+ * if no more data is available, we will return a probability of
+ * (found_headers/TRY_HEADERS) * (MAXIMUM * (TRY_SYNC - bytes_skipped)
+ * / TRY_SYNC)
+ * if found_headers >= MIN_HEADERS
+ */
+ #define GST_MP3_TYPEFIND_MIN_HEADERS (2)
+ #define GST_MP3_TYPEFIND_TRY_HEADERS (5)
+ #define GST_MP3_TYPEFIND_TRY_SYNC (GST_TYPE_FIND_MAXIMUM * 100) /* 10kB */
+ #define GST_MP3_TYPEFIND_SYNC_SIZE (2048)
+ #define GST_MP3_WRONG_HEADER (10)
+
+ static void
+ mp3_type_find_at_offset (GstTypeFind * tf, guint64 start_off,
+ guint * found_layer, GstTypeFindProbability * found_prob)
+ {
+ const guint8 *data = NULL;
+ const guint8 *data_end = NULL;
+ guint size;
+ guint64 skipped;
+ gint last_free_offset = -1;
+ gint last_free_framelen = -1;
+ gboolean headerstart = TRUE;
+
+ *found_layer = 0;
+ *found_prob = 0;
+
+ size = 0;
+ skipped = 0;
+ while (skipped < GST_MP3_TYPEFIND_TRY_SYNC) {
+ if (size <= 0) {
+ size = GST_MP3_TYPEFIND_SYNC_SIZE * 2;
+ do {
+ size /= 2;
+ data = gst_type_find_peek (tf, skipped + start_off, size);
+ } while (size > 10 && !data);
+ if (!data)
+ break;
+ data_end = data + size;
+ }
+ if (*data == 0xFF) {
+ const guint8 *head_data = NULL;
+ guint layer = 0, bitrate, samplerate, channels;
+ guint found = 0; /* number of valid headers found */
+ guint64 offset = skipped;
+ gboolean changed = FALSE;
+ guint prev_layer = 0;
+ guint prev_channels = 0, prev_samplerate = 0;
+
+ while (found < GST_MP3_TYPEFIND_TRY_HEADERS) {
+ guint32 head;
+ guint length;
+ gboolean free = FALSE;
+
+ if ((gint64) (offset - skipped + 4) >= 0 &&
+ data + offset - skipped + 4 < data_end) {
+ head_data = data + offset - skipped;
+ } else {
+ head_data = gst_type_find_peek (tf, offset + start_off, 4);
+ }
+ if (!head_data)
+ break;
+ head = GST_READ_UINT32_BE (head_data);
+ if (!(length = mp3_type_frame_length_from_header (head, &layer,
+ &channels, &bitrate, &samplerate, &free,
+ last_free_framelen))) {
+ if (free) {
+ if (last_free_offset == -1)
+ last_free_offset = offset;
+ else {
+ last_free_framelen = offset - last_free_offset;
+ offset = last_free_offset;
+ continue;
+ }
+ } else {
+ last_free_framelen = -1;
+ }
+
+ /* Mark the fact that we didn't find a valid header at the beginning */
+ if (found == 0)
+ headerstart = FALSE;
+
+ GST_LOG ("%d. header at offset %" G_GUINT64_FORMAT
+ " (0x%" G_GINT64_MODIFIER "x) was not an mp3 header "
+ "(possibly-free: %s)", found + 1, start_off + offset,
+ start_off + offset, free ? "yes" : "no");
+ break;
+ }
+ if ((prev_layer && prev_layer != layer) ||
+ /* (prev_bitrate && prev_bitrate != bitrate) || <-- VBR */
+ (prev_samplerate && prev_samplerate != samplerate) ||
+ (prev_channels && prev_channels != channels)) {
+ /* this means an invalid property, or a change, which might mean
+ * that this is not a mp3 but just a random bytestream. It could
+ * be a freaking funky encoded mp3 though. We'll just not count
+ * this header*/
+ if (prev_layer)
+ changed = TRUE;
+ } else {
+ found++;
+ GST_LOG ("found %d. header at offset %" G_GUINT64_FORMAT " (0x%"
+ G_GINT64_MODIFIER "X)", found, start_off + offset,
+ start_off + offset);
+ }
+ prev_layer = layer;
+ prev_channels = channels;
+ prev_samplerate = samplerate;
+
+ offset += length;
+ }
+ g_assert (found <= GST_MP3_TYPEFIND_TRY_HEADERS);
+ if (found != 0 && head_data == NULL &&
+ gst_type_find_peek (tf, offset + start_off - 1, 1) == NULL)
+ /* Incomplete last frame - don't count it. */
+ found--;
+ if (found == GST_MP3_TYPEFIND_TRY_HEADERS ||
+ (found >= GST_MP3_TYPEFIND_MIN_HEADERS && head_data == NULL)) {
+ /* we can make a valid guess */
+ guint probability = found * GST_TYPE_FIND_MAXIMUM *
+ (GST_MP3_TYPEFIND_TRY_SYNC - skipped) /
+ GST_MP3_TYPEFIND_TRY_HEADERS / GST_MP3_TYPEFIND_TRY_SYNC;
+
+ if (!headerstart
+ && probability > (GST_TYPE_FIND_MINIMUM + GST_MP3_WRONG_HEADER))
+ probability -= GST_MP3_WRONG_HEADER;
+ if (probability < GST_TYPE_FIND_MINIMUM)
+ probability = GST_TYPE_FIND_MINIMUM;
+ if (start_off > 0)
+ probability /= 2;
+ if (!changed)
+ probability = (probability + GST_TYPE_FIND_MAXIMUM) / 2;
+
+ GST_INFO
+ ("audio/mpeg calculated %u = %u * %u / %u * (%u - %"
+ G_GUINT64_FORMAT ") / %u", probability, GST_TYPE_FIND_MAXIMUM,
+ found, GST_MP3_TYPEFIND_TRY_HEADERS, GST_MP3_TYPEFIND_TRY_SYNC,
+ (guint64) skipped, GST_MP3_TYPEFIND_TRY_SYNC);
+ /* make sure we're not id3 tagged */
+ head_data = gst_type_find_peek (tf, -128, 3);
+ if (head_data && (memcmp (head_data, "TAG", 3) == 0)) {
+ probability = 0;
+ }
+ g_assert (probability <= GST_TYPE_FIND_MAXIMUM);
+
+ *found_prob = probability;
+ if (probability > 0)
+ *found_layer = layer;
+ return;
+ }
+ }
+ data++;
+ skipped++;
+ size--;
+ }
+ }
+
+ static void
+ mp3_type_find (GstTypeFind * tf, gpointer unused)
+ {
+ GstTypeFindProbability prob, mid_prob;
+ const guint8 *data;
+ guint layer, mid_layer;
+ guint64 length;
+
+ mp3_type_find_at_offset (tf, 0, &layer, &prob);
+ length = gst_type_find_get_length (tf);
+
+ if (length == 0 || length == (guint64) - 1) {
+ if (prob != 0)
+ goto suggest;
+ return;
+ }
+
+ /* if we're pretty certain already, skip the additional check */
+ if (prob >= GST_TYPE_FIND_LIKELY)
+ goto suggest;
+
+ mp3_type_find_at_offset (tf, length / 2, &mid_layer, &mid_prob);
+
+ if (mid_prob > 0) {
+ if (prob == 0) {
+ GST_LOG ("detected audio/mpeg only in the middle (p=%u)", mid_prob);
+ layer = mid_layer;
+ prob = mid_prob;
+ goto suggest;
+ }
+
+ if (layer != mid_layer) {
+ GST_WARNING ("audio/mpeg layer discrepancy: %u vs. %u", layer, mid_layer);
+ return; /* FIXME: or should we just go with the one in the middle? */
+ }
+
+ /* detected mpeg audio both in middle of the file and at the start */
+ prob = (prob + mid_prob) / 2;
+ goto suggest;
+ }
+
+ /* a valid header right at the start makes it more likely
+ * that this is actually plain mpeg-1 audio */
+ if (prob > 0) {
+ data = gst_type_find_peek (tf, 0, 4); /* use min. frame size? */
+ if (data && mp3_type_frame_length_from_header (GST_READ_UINT32_BE (data),
+ &layer, NULL, NULL, NULL, NULL, 0) != 0) {
+ prob = MIN (prob + 10, GST_TYPE_FIND_MAXIMUM);
+ }
+ }
+
+ if (prob > 0)
+ goto suggest;
+
+ return;
+
+ suggest:
+ {
+ g_return_if_fail (layer >= 1 && layer <= 3);
+
+ gst_type_find_suggest_simple (tf, prob, "audio/mpeg",
+ "mpegversion", G_TYPE_INT, 1, "layer", G_TYPE_INT, layer,
+ "parsed", G_TYPE_BOOLEAN, FALSE, NULL);
+ }
+ }
+
+ /*** audio/x-musepack ***/
+
+ static GstStaticCaps musepack_caps =
+ GST_STATIC_CAPS ("audio/x-musepack, streamversion= (int) { 7, 8 }");
+
+ #define MUSEPACK_CAPS (gst_static_caps_get(&musepack_caps))
+ static void
+ musepack_type_find (GstTypeFind * tf, gpointer unused)
+ {
+ const guint8 *data = gst_type_find_peek (tf, 0, 4);
+ GstTypeFindProbability prop = GST_TYPE_FIND_MINIMUM;
+ gint streamversion = -1;
+
+ if (data && memcmp (data, "MP+", 3) == 0) {
+ streamversion = 7;
+ if ((data[3] & 0x7f) == 7) {
+ prop = GST_TYPE_FIND_MAXIMUM;
+ } else {
+ prop = GST_TYPE_FIND_LIKELY + 10;
+ }
+ } else if (data && memcmp (data, "MPCK", 4) == 0) {
+ streamversion = 8;
+ prop = GST_TYPE_FIND_MAXIMUM;
+ }
+
+ if (streamversion != -1) {
+ gst_type_find_suggest_simple (tf, prop, "audio/x-musepack",
+ "streamversion", G_TYPE_INT, streamversion, NULL);
+ }
+ }
+
+ /*** audio/x-ac3 ***/
+ /* FIXME 0.11: should be audio/ac3, but isn't for backwards compatibility */
+ static GstStaticCaps ac3_caps = GST_STATIC_CAPS ("audio/x-ac3");
+
+ #define AC3_CAPS (gst_static_caps_get(&ac3_caps))
+
+ static GstStaticCaps eac3_caps = GST_STATIC_CAPS ("audio/x-eac3");
+
+ #define EAC3_CAPS (gst_static_caps_get(&eac3_caps))
+
+ struct ac3_frmsize
+ {
+ unsigned short bit_rate;
+ unsigned short frm_size[3];
+ };
+
+ static const struct ac3_frmsize ac3_frmsizecod_tbl[] = {
+ {32, {64, 69, 96}},
+ {32, {64, 70, 96}},
+ {40, {80, 87, 120}},
+ {40, {80, 88, 120}},
+ {48, {96, 104, 144}},
+ {48, {96, 105, 144}},
+ {56, {112, 121, 168}},
+ {56, {112, 122, 168}},
+ {64, {128, 139, 192}},
+ {64, {128, 140, 192}},
+ {80, {160, 174, 240}},
+ {80, {160, 175, 240}},
+ {96, {192, 208, 288}},
+ {96, {192, 209, 288}},
+ {112, {224, 243, 336}},
+ {112, {224, 244, 336}},
+ {128, {256, 278, 384}},
+ {128, {256, 279, 384}},
+ {160, {320, 348, 480}},
+ {160, {320, 349, 480}},
+ {192, {384, 417, 576}},
+ {192, {384, 418, 576}},
+ {224, {448, 487, 672}},
+ {224, {448, 488, 672}},
+ {256, {512, 557, 768}},
+ {256, {512, 558, 768}},
+ {320, {640, 696, 960}},
+ {320, {640, 697, 960}},
+ {384, {768, 835, 1152}},
+ {384, {768, 836, 1152}},
+ {448, {896, 975, 1344}},
+ {448, {896, 976, 1344}},
+ {512, {1024, 1114, 1536}},
+ {512, {1024, 1115, 1536}},
+ {576, {1152, 1253, 1728}},
+ {576, {1152, 1254, 1728}},
+ {640, {1280, 1393, 1920}},
+ {640, {1280, 1394, 1920}}
+ };
+
+ static void
+ ac3_type_find (GstTypeFind * tf, gpointer unused)
+ {
+ DataScanCtx c = { 0, NULL, 0 };
+
+ /* Search for an ac3 frame; not necessarily right at the start, but give it
+ * a lower probability if not found right at the start. Check that the
+ * frame is followed by a second frame at the expected offset.
+ * We could also check the two ac3 CRCs, but we don't do that right now */
+ while (c.offset < 1024) {
+ if (G_UNLIKELY (!data_scan_ctx_ensure_data (tf, &c, 6)))
+ break;
+
+ if (c.data[0] == 0x0b && c.data[1] == 0x77) {
+ guint bsid = c.data[5] >> 3;
+
+ if (bsid <= 8) {
+ /* ac3 */
+ guint fscod = c.data[4] >> 6;
+ guint frmsizecod = c.data[4] & 0x3f;
+
+ if (fscod < 3 && frmsizecod < 38) {
+ DataScanCtx c_next = c;
+ guint frame_size;
+
+ frame_size = ac3_frmsizecod_tbl[frmsizecod].frm_size[fscod];
+ GST_LOG ("possible AC3 frame sync at offset %"
+ G_GUINT64_FORMAT ", size=%u", c.offset, frame_size);
+ if (data_scan_ctx_ensure_data (tf, &c_next, (frame_size * 2) + 5)) {
+ data_scan_ctx_advance (tf, &c_next, frame_size * 2);
+
+ if (c_next.data[0] == 0x0b && c_next.data[1] == 0x77) {
+ fscod = c_next.data[4] >> 6;
+ frmsizecod = c_next.data[4] & 0x3f;
+
+ if (fscod < 3 && frmsizecod < 38) {
+ GstTypeFindProbability prob;
+
+ GST_LOG ("found second AC3 frame (size=%u), looks good",
+ ac3_frmsizecod_tbl[frmsizecod].frm_size[fscod]);
+ if (c.offset == 0)
+ prob = GST_TYPE_FIND_MAXIMUM;
+ else
+ prob = GST_TYPE_FIND_NEARLY_CERTAIN;
+
+ gst_type_find_suggest (tf, prob, AC3_CAPS);
+ return;
+ }
+ } else {
+ GST_LOG ("no second AC3 frame found, false sync");
+ }
+ }
+ }
+ } else if (bsid <= 16 && bsid > 10) {
+ /* eac3 */
+ DataScanCtx c_next = c;
+ guint frame_size;
+
+ frame_size = (((c.data[2] & 0x07) << 8) + c.data[3]) + 1;
+ GST_LOG ("possible E-AC3 frame sync at offset %"
+ G_GUINT64_FORMAT ", size=%u", c.offset, frame_size);
+ if (data_scan_ctx_ensure_data (tf, &c_next, (frame_size * 2) + 5)) {
+ data_scan_ctx_advance (tf, &c_next, frame_size * 2);
+
+ if (c_next.data[0] == 0x0b && c_next.data[1] == 0x77) {
+ GstTypeFindProbability prob;
+
+ GST_LOG ("found second E-AC3 frame, looks good");
+ if (c.offset == 0)
+ prob = GST_TYPE_FIND_MAXIMUM;
+ else
+ prob = GST_TYPE_FIND_NEARLY_CERTAIN;
+
+ gst_type_find_suggest (tf, prob, EAC3_CAPS);
+ return;
+ } else {
+ GST_LOG ("no second E-AC3 frame found, false sync");
+ }
+ }
+ } else {
+ GST_LOG ("invalid AC3 BSID: %u", bsid);
+ }
+ }
+ data_scan_ctx_advance (tf, &c, 1);
+ }
+ }
+
+ /*** audio/x-dts ***/
+ static GstStaticCaps dts_caps = GST_STATIC_CAPS ("audio/x-dts");
+ #define DTS_CAPS (gst_static_caps_get (&dts_caps))
+ #define DTS_MIN_FRAMESIZE 96
+ #define DTS_MAX_FRAMESIZE 18725 /* 16384*16/14 */
+
+ static gboolean
+ dts_parse_frame_header (DataScanCtx * c, guint * frame_size,
+ guint * sample_rate, guint * channels, guint * depth, guint * endianness)
+ {
+ static const int sample_rates[16] = { 0, 8000, 16000, 32000, 0, 0, 11025,
+ 22050, 44100, 0, 0, 12000, 24000, 48000, 96000, 192000
+ };
+ static const guint8 channels_table[16] = { 1, 2, 2, 2, 2, 3, 3, 4, 4, 5,
+ 6, 6, 6, 7, 8, 8
+ };
+ guint16 hdr[8];
+ guint32 marker;
+ guint num_blocks, chans, lfe, i;
+
+ marker = GST_READ_UINT32_BE (c->data);
+
+ /* raw big endian or 14-bit big endian */
+ if (marker == 0x7FFE8001 || marker == 0x1FFFE800) {
+ *endianness = G_BIG_ENDIAN;
+ for (i = 0; i < G_N_ELEMENTS (hdr); ++i)
+ hdr[i] = GST_READ_UINT16_BE (c->data + (i * sizeof (guint16)));
+ } else
+ /* raw little endian or 14-bit little endian */
+ if (marker == 0xFE7F0180 || marker == 0xFF1F00E8) {
+ *endianness = G_LITTLE_ENDIAN;
+ for (i = 0; i < G_N_ELEMENTS (hdr); ++i)
+ hdr[i] = GST_READ_UINT16_LE (c->data + (i * sizeof (guint16)));
+ } else {
+ return FALSE;
+ }
+
+ GST_LOG ("dts sync marker 0x%08x at offset %u", marker, (guint) c->offset);
+
+ /* 14-bit mode */
+ if (marker == 0x1FFFE800 || marker == 0xFF1F00E8) {
+ if ((hdr[2] & 0xFFF0) != 0x07F0)
+ return FALSE;
+ /* discard top 2 bits (2 void), shift in 2 */
+ hdr[0] = (hdr[0] << 2) | ((hdr[1] >> 12) & 0x0003);
+ /* discard top 4 bits (2 void, 2 shifted into hdr[0]), shift in 4 etc. */
+ hdr[1] = (hdr[1] << 4) | ((hdr[2] >> 10) & 0x000F);
+ hdr[2] = (hdr[2] << 6) | ((hdr[3] >> 8) & 0x003F);
+ hdr[3] = (hdr[3] << 8) | ((hdr[4] >> 6) & 0x00FF);
+ hdr[4] = (hdr[4] << 10) | ((hdr[5] >> 4) & 0x03FF);
+ hdr[5] = (hdr[5] << 12) | ((hdr[6] >> 2) & 0x0FFF);
+ hdr[6] = (hdr[6] << 14) | ((hdr[7] >> 0) & 0x3FFF);
+ g_assert (hdr[0] == 0x7FFE && hdr[1] == 0x8001);
+ *depth = 14;
+ } else {
+ *depth = 16;
+ }
+
+ GST_LOG ("frame header: %04x%04x%04x%04x", hdr[2], hdr[3], hdr[4], hdr[5]);
+
+ num_blocks = (hdr[2] >> 2) & 0x7F;
+ *frame_size = (((hdr[2] & 0x03) << 12) | (hdr[3] >> 4)) + 1;
+ chans = ((hdr[3] & 0x0F) << 2) | (hdr[4] >> 14);
+ *sample_rate = sample_rates[(hdr[4] >> 10) & 0x0F];
+ lfe = (hdr[5] >> 9) & 0x03;
+
+ if (num_blocks < 5 || *frame_size < 96 || *sample_rate == 0)
+ return FALSE;
+
+ if (marker == 0x1FFFE800 || marker == 0xFF1F00E8)
+ *frame_size = (*frame_size * 16) / 14; /* FIXME: round up? */
+
+ if (chans < G_N_ELEMENTS (channels_table))
+ *channels = channels_table[chans] + ((lfe) ? 1 : 0);
+ else
+ *channels = 0;
+
+ return TRUE;
+ }
+
+ static void
+ dts_type_find (GstTypeFind * tf, gpointer unused)
+ {
+ DataScanCtx c = { 0, NULL, 0 };
+
+ /* Search for an dts frame; not necessarily right at the start, but give it
+ * a lower probability if not found right at the start. Check that the
+ * frame is followed by a second frame at the expected offset. */
+ while (c.offset <= DTS_MAX_FRAMESIZE) {
+ guint frame_size = 0, rate = 0, chans = 0, depth = 0, endianness = 0;
+
+ if (G_UNLIKELY (!data_scan_ctx_ensure_data (tf, &c, DTS_MIN_FRAMESIZE)))
+ return;
+
+ if (G_UNLIKELY (dts_parse_frame_header (&c, &frame_size, &rate, &chans,
+ &depth, &endianness))) {
+ GstTypeFindProbability prob;
+ DataScanCtx next_c;
+
+ prob = (c.offset == 0) ? GST_TYPE_FIND_LIKELY : GST_TYPE_FIND_POSSIBLE;
+
+ /* check for second frame sync */
+ next_c = c;
+ data_scan_ctx_advance (tf, &next_c, frame_size);
+ if (data_scan_ctx_ensure_data (tf, &next_c, 4)) {
+ GST_LOG ("frame size: %u 0x%04x", frame_size, frame_size);
+ GST_MEMDUMP ("second frame sync", next_c.data, 4);
+ if (GST_READ_UINT32_BE (c.data) == GST_READ_UINT32_BE (next_c.data))
+ prob = GST_TYPE_FIND_MAXIMUM;
+ }
+
+ if (chans > 0) {
+ gst_type_find_suggest_simple (tf, prob, "audio/x-dts",
+ "rate", G_TYPE_INT, rate, "channels", G_TYPE_INT, chans,
+ "depth", G_TYPE_INT, depth, "endianness", G_TYPE_INT, endianness,
+ "framed", G_TYPE_BOOLEAN, FALSE, NULL);
+ } else {
+ gst_type_find_suggest_simple (tf, prob, "audio/x-dts",
+ "rate", G_TYPE_INT, rate, "depth", G_TYPE_INT, depth,
+ "endianness", G_TYPE_INT, endianness,
+ "framed", G_TYPE_BOOLEAN, FALSE, NULL);
+ }
+
+ return;
+ }
+
+ data_scan_ctx_advance (tf, &c, 1);
+ }
+ }
+
+ /*** gsm ***/
-
++#ifndef TIZEN_FEATURE_DISABLE_MIME_TYPES
+ /* can only be detected by using the extension, in which case we use the default
+ * GSM properties */
+ static GstStaticCaps gsm_caps =
+ GST_STATIC_CAPS ("audio/x-gsm, rate=8000, channels=1");
+
+ #define GSM_CAPS (gst_static_caps_get(&gsm_caps))
+
+ /*** wavpack ***/
+
+ static GstStaticCaps wavpack_caps =
+ GST_STATIC_CAPS ("audio/x-wavpack, framed = (boolean) false");
+
+ #define WAVPACK_CAPS (gst_static_caps_get(&wavpack_caps))
+
+ static GstStaticCaps wavpack_correction_caps =
+ GST_STATIC_CAPS ("audio/x-wavpack-correction, framed = (boolean) false");
+
+ #define WAVPACK_CORRECTION_CAPS (gst_static_caps_get(&wavpack_correction_caps))
+
+ static void
+ wavpack_type_find (GstTypeFind * tf, gpointer unused)
+ {
+ GstTypeFindProbability base_prob = GST_TYPE_FIND_POSSIBLE;
+ guint64 offset;
+ guint32 blocksize;
+ const guint8 *data;
+ guint count_wv, count_wvc;
+
+ data = gst_type_find_peek (tf, 0, 32);
+ if (!data)
+ return;
+
+ if (data[0] != 'w' || data[1] != 'v' || data[2] != 'p' || data[3] != 'k')
+ return;
+
+ /* Note: wavpack blocks can be fairly large (easily 60-110k), possibly
+ * larger than the max. limits imposed by certain typefinding elements
+ * like id3demux or apedemux, so typefinding is most likely only going to
+ * work in pull-mode */
+ blocksize = GST_READ_UINT32_LE (data + 4);
+ GST_LOG ("wavpack header, blocksize=0x%04x", blocksize);
+ /* If bigger than maximum allowed blocksize, refuse */
+ if (blocksize > 131072)
+ return;
+ count_wv = 0;
+ count_wvc = 0;
+ offset = 32;
+ while (offset < 8 + blocksize) {
+ guint32 sublen;
+
+ /* get chunk header */
+ GST_LOG ("peeking at chunk at offset 0x%04x", (guint) offset);
+ data = gst_type_find_peek (tf, offset, 4);
+ if (data == NULL)
+ break;
+ sublen = ((guint32) data[1]) << 1;
+ if (data[0] & 0x80) {
+ sublen |= (((guint32) data[2]) << 9) | (((guint32) data[3]) << 17);
+ sublen += 1 + 3; /* id + length */
+ } else {
+ sublen += 1 + 1; /* id + length */
+ }
+ if (offset + sublen > 8 + blocksize) {
+ GST_LOG ("chunk length too big (%u > %" G_GUINT64_FORMAT ")", sublen,
+ blocksize - offset);
+ break;
+ }
+ if ((data[0] & 0x20) == 0) {
+ switch (data[0] & 0x0f) {
+ case 0xa: /* ID_WV_BITSTREAM */
+ case 0xc: /* ID_WVX_BITSTREAM */
+ ++count_wv;
+ break;
+ case 0xb: /* ID_WVC_BITSTREAM */
+ ++count_wvc;
+ break;
+ default:
+ break;
+ }
+ if (count_wv >= 5 || count_wvc >= 5)
+ break;
+ }
+ offset += sublen;
+ }
+
+ /* check for second block header */
+ data = gst_type_find_peek (tf, 8 + blocksize, 4);
+ if (data != NULL && memcmp (data, "wvpk", 4) == 0) {
+ GST_DEBUG ("found second block sync");
+ base_prob = GST_TYPE_FIND_LIKELY;
+ }
+
+ GST_DEBUG ("wvc=%d, wv=%d", count_wvc, count_wv);
+
+ if (count_wvc > 0 && count_wvc > count_wv) {
+ gst_type_find_suggest (tf,
+ MIN (base_prob + 5 * count_wvc, GST_TYPE_FIND_NEARLY_CERTAIN),
+ WAVPACK_CORRECTION_CAPS);
+ } else if (count_wv > 0) {
+ gst_type_find_suggest (tf,
+ MIN (base_prob + 5 * count_wv, GST_TYPE_FIND_NEARLY_CERTAIN),
+ WAVPACK_CAPS);
+ }
+ }
+
+ /*** application/postscrip ***/
+ static GstStaticCaps postscript_caps =
+ GST_STATIC_CAPS ("application/postscript");
+
+ #define POSTSCRIPT_CAPS (gst_static_caps_get(&postscript_caps))
+
+ static void
+ postscript_type_find (GstTypeFind * tf, gpointer unused)
+ {
+ const guint8 *data = gst_type_find_peek (tf, 0, 3);
+ if (!data)
+ return;
+
+ if (data[0] == 0x04)
+ data++;
+ if (data[0] == '%' && data[1] == '!')
+ gst_type_find_suggest (tf, GST_TYPE_FIND_POSSIBLE, POSTSCRIPT_CAPS);
+
+ }
+
+ /*** image/svg+xml ***/
+ static GstStaticCaps svg_caps = GST_STATIC_CAPS ("image/svg+xml");
+
+ #define SVG_CAPS (gst_static_caps_get(&svg_caps))
+
+ static void
+ svg_type_find (GstTypeFind * tf, gpointer unused)
+ {
+ static const gchar svg_doctype[] = "!DOCTYPE svg";
+ static const gchar svg_tag[] = "<svg";
+ DataScanCtx c = { 0, NULL, 0 };
+
+ while (c.offset <= 1024) {
+ if (G_UNLIKELY (!data_scan_ctx_ensure_data (tf, &c, 12)))
+ break;
+
+ if (memcmp (svg_doctype, c.data, 12) == 0) {
+ gst_type_find_suggest (tf, GST_TYPE_FIND_MAXIMUM, SVG_CAPS);
+ return;
+ } else if (memcmp (svg_tag, c.data, 4) == 0) {
+ gst_type_find_suggest (tf, GST_TYPE_FIND_LIKELY, SVG_CAPS);
+ return;
+ }
+ data_scan_ctx_advance (tf, &c, 1);
+ }
+ }
+
+ /*** multipart/x-mixed-replace mimestream ***/
+
+ static GstStaticCaps multipart_caps =
+ GST_STATIC_CAPS ("multipart/x-mixed-replace");
+ #define MULTIPART_CAPS gst_static_caps_get(&multipart_caps)
+
+ /* multipart/x-mixed replace is:
+ * <maybe some whitespace>--<some ascii chars>[\r]\n
+ * <more ascii chars>[\r]\nContent-type:<more ascii>[\r]\n */
+ static void
+ multipart_type_find (GstTypeFind * tf, gpointer unused)
+ {
+ const guint8 *data;
+ const guint8 *x;
+
+ #define MULTIPART_MAX_BOUNDARY_OFFSET 16
+ data = gst_type_find_peek (tf, 0, MULTIPART_MAX_BOUNDARY_OFFSET);
+ if (!data)
+ return;
+
+ for (x = data;
+ x - data < MULTIPART_MAX_BOUNDARY_OFFSET - 2 && g_ascii_isspace (*x);
+ x++);
+ if (x[0] != '-' || x[1] != '-')
+ return;
+
+ /* Could be okay, peek what should be enough for a complete header */
+ #define MULTIPART_MAX_HEADER_SIZE 256
+ data = gst_type_find_peek (tf, 0, MULTIPART_MAX_HEADER_SIZE);
+ if (!data)
+ return;
+
+ for (x = data; x - data < MULTIPART_MAX_HEADER_SIZE - 14; x++) {
+ if (!isascii (*x)) {
+ return;
+ }
+ if (*x == '\n' &&
+ !g_ascii_strncasecmp ("content-type:", (gchar *) x + 1, 13)) {
+ gst_type_find_suggest (tf, GST_TYPE_FIND_MAXIMUM, MULTIPART_CAPS);
+ return;
+ }
+ }
+ }
++#endif
+
+ /*** video/mpeg systemstream ***/
++#if defined(TIZEN_PROFILE_TV) || !defined(TIZEN_FEATURE_DISABLE_MIME_TYPES)
+ static GstStaticCaps mpeg_sys_caps = GST_STATIC_CAPS ("video/mpeg, "
+ "systemstream = (boolean) true, mpegversion = (int) [ 1, 2 ]");
+
+ #define MPEG_SYS_CAPS gst_static_caps_get(&mpeg_sys_caps)
++#endif
+ #define IS_MPEG_HEADER(data) (G_UNLIKELY((((guint8 *)(data))[0] == 0x00) && \
+ (((guint8 *)(data))[1] == 0x00) && \
+ (((guint8 *)(data))[2] == 0x01)))
-
++#if defined(TIZEN_PROFILE_TV) || !defined(TIZEN_FEATURE_DISABLE_MIME_TYPES)
+ #define IS_MPEG_PACK_CODE(b) ((b) == 0xBA)
+ #define IS_MPEG_SYS_CODE(b) ((b) == 0xBB)
+ #define IS_MPEG_PACK_HEADER(data) (IS_MPEG_HEADER (data) && \
+ IS_MPEG_PACK_CODE (((guint8 *)(data))[3]))
+
+ #define IS_MPEG_PES_CODE(b) (((b) & 0xF0) == 0xE0 || ((b) & 0xF0) == 0xC0 || \
+ (b) >= 0xBC)
+ #define IS_MPEG_PES_HEADER(data) (IS_MPEG_HEADER (data) && \
+ IS_MPEG_PES_CODE (((guint8 *)(data))[3]))
+
+ #define MPEG2_MAX_PROBE_LENGTH (128 * 1024) /* 128kB should be 64 packs of the
+ * most common 2kB pack size. */
+
+ #define MPEG2_MIN_SYS_HEADERS 2
+ #define MPEG2_MAX_SYS_HEADERS 5
++#endif
+
+ static gboolean
+ mpeg_sys_is_valid_pack (GstTypeFind * tf, const guint8 * data, guint len,
+ guint * pack_size)
+ {
+ /* Check the pack header @ offset for validity, assuming that the 4 byte header
+ * itself has already been checked. */
+ guint8 stuff_len;
+
+ if (len < 12)
+ return FALSE;
+
+ /* Check marker bits */
+ if ((data[4] & 0xC4) == 0x44) {
+ /* MPEG-2 PACK */
+ if (len < 14)
+ return FALSE;
+
+ if ((data[6] & 0x04) != 0x04 ||
+ (data[8] & 0x04) != 0x04 ||
+ (data[9] & 0x01) != 0x01 || (data[12] & 0x03) != 0x03)
+ return FALSE;
+
+ stuff_len = data[13] & 0x07;
+
+ /* Check the following header bytes, if we can */
+ if ((14 + stuff_len + 4) <= len) {
+ if (!IS_MPEG_HEADER (data + 14 + stuff_len))
+ return FALSE;
+ }
+ if (pack_size)
+ *pack_size = 14 + stuff_len;
+ return TRUE;
+ } else if ((data[4] & 0xF1) == 0x21) {
+ /* MPEG-1 PACK */
+ if ((data[6] & 0x01) != 0x01 ||
+ (data[8] & 0x01) != 0x01 ||
+ (data[9] & 0x80) != 0x80 || (data[11] & 0x01) != 0x01)
+ return FALSE;
+
+ /* Check the following header bytes, if we can */
+ if ((12 + 4) <= len) {
+ if (!IS_MPEG_HEADER (data + 12))
+ return FALSE;
+ }
+ if (pack_size)
+ *pack_size = 12;
+ return TRUE;
+ }
+
+ return FALSE;
+ }
+
++#if defined(TIZEN_PROFILE_TV) || !defined(TIZEN_FEATURE_DISABLE_MIME_TYPES)
+ static gboolean
+ mpeg_sys_is_valid_pes (GstTypeFind * tf, const guint8 * data, guint len,
+ guint * pack_size)
+ {
+ guint pes_packet_len;
+
+ /* Check the PES header at the given position, assuming the header code itself
+ * was already checked */
+ if (len < 6)
+ return FALSE;
+
+ /* For MPEG Program streams, unbounded PES is not allowed, so we must have a
+ * valid length present */
+ pes_packet_len = GST_READ_UINT16_BE (data + 4);
+ if (pes_packet_len == 0)
+ return FALSE;
+
+ /* Check the following header, if we can */
+ if (6 + pes_packet_len + 4 <= len) {
+ if (!IS_MPEG_HEADER (data + 6 + pes_packet_len))
+ return FALSE;
+ }
+
+ if (pack_size)
+ *pack_size = 6 + pes_packet_len;
+ return TRUE;
+ }
+
+ static gboolean
+ mpeg_sys_is_valid_sys (GstTypeFind * tf, const guint8 * data, guint len,
+ guint * pack_size)
+ {
+ guint sys_hdr_len;
+
+ /* Check the System header at the given position, assuming the header code itself
+ * was already checked */
+ if (len < 6)
+ return FALSE;
+ sys_hdr_len = GST_READ_UINT16_BE (data + 4);
+ if (sys_hdr_len < 6)
+ return FALSE;
+
+ /* Check the following header, if we can */
+ if (6 + sys_hdr_len + 4 <= len) {
+ if (!IS_MPEG_HEADER (data + 6 + sys_hdr_len))
+ return FALSE;
+ }
+
+ if (pack_size)
+ *pack_size = 6 + sys_hdr_len;
+
+ return TRUE;
+ }
+
+ /* calculation of possibility to identify random data as mpeg systemstream:
+ * bits that must match in header detection: 32 (or more)
+ * chance that random data is identifed: 1/2^32
+ * chance that MPEG2_MIN_PACK_HEADERS headers are identified:
+ * 1/2^(32*MPEG2_MIN_PACK_HEADERS)
+ * chance that this happens in MPEG2_MAX_PROBE_LENGTH bytes:
+ * 1-(1+1/2^(32*MPEG2_MIN_PACK_HEADERS)^MPEG2_MAX_PROBE_LENGTH)
+ * for current values:
+ * 1-(1+1/2^(32*4)^101024)
+ * = <some_number>
+ * Since we also check marker bits and pes packet lengths, this probability is a
+ * very coarse upper bound.
+ */
+ static void
+ mpeg_sys_type_find (GstTypeFind * tf, gpointer unused)
+ {
+ const guint8 *data, *data0, *first_sync, *end;
+ gint mpegversion = 0;
+ guint pack_headers = 0;
+ guint pes_headers = 0;
+ guint pack_size;
+ guint since_last_sync = 0;
+ guint32 sync_word = 0xffffffff;
+ guint potential_headers = 0;
+
+ G_STMT_START {
+ gint len;
+
+ len = MPEG2_MAX_PROBE_LENGTH;
+
+ while (len >= 16) {
+ data = gst_type_find_peek (tf, 0, 5 + len);
+ if (data != NULL)
+ break;
+ len = len / 2;
+ }
+
+ if (!data)
+ return;
+
+ end = data + len;
+ }
+ G_STMT_END;
+
+ data0 = data;
+ first_sync = NULL;
+
+ while (data < end) {
+ sync_word <<= 8;
+ if (sync_word == 0x00000100) {
+ /* Found potential sync word */
+ if (first_sync == NULL)
+ first_sync = data - 3;
+
+ if (since_last_sync > 4) {
+ /* If more than 4 bytes since the last sync word, reset our counters,
+ * as we're only interested in counting contiguous packets */
+ pes_headers = pack_headers = 0;
+ }
+ pack_size = 0;
+
+ potential_headers++;
+ if (IS_MPEG_PACK_CODE (data[0])) {
+ if ((data[1] & 0xC0) == 0x40) {
+ /* MPEG-2 */
+ mpegversion = 2;
+ } else if ((data[1] & 0xF0) == 0x20) {
+ mpegversion = 1;
+ }
+ if (mpegversion != 0 &&
+ mpeg_sys_is_valid_pack (tf, data - 3, end - data + 3, &pack_size)) {
+ pack_headers++;
+ }
+ } else if (IS_MPEG_PES_CODE (data[0])) {
+ /* PES stream */
+ if (mpeg_sys_is_valid_pes (tf, data - 3, end - data + 3, &pack_size)) {
+ pes_headers++;
+ if (mpegversion == 0)
+ mpegversion = 2;
+ }
+ } else if (IS_MPEG_SYS_CODE (data[0])) {
+ if (mpeg_sys_is_valid_sys (tf, data - 3, end - data + 3, &pack_size)) {
+ pack_headers++;
+ }
+ }
+
+ /* If we found a packet with a known size, skip the bytes in it and loop
+ * around to check the next packet. */
+ if (pack_size != 0) {
+ data += pack_size - 3;
+ sync_word = 0xffffffff;
+ since_last_sync = 0;
+ continue;
+ }
+ }
+
+ sync_word |= data[0];
+ since_last_sync++;
+ data++;
+
+ /* If we have found MAX headers, and *some* were pes headers (pack headers
+ * are optional in an mpeg system stream) then return our high-probability
+ * result */
+ if (pes_headers > 0 && (pack_headers + pes_headers) > MPEG2_MAX_SYS_HEADERS)
+ goto suggest;
+ }
+
+ /* If we at least saw MIN headers, and *some* were pes headers (pack headers
+ * are optional in an mpeg system stream) then return a lower-probability
+ * result */
+ if (pes_headers > 0 && (pack_headers + pes_headers) > MPEG2_MIN_SYS_HEADERS)
+ goto suggest;
+
+ return;
+ suggest:
+ {
+ guint prob;
+
+ prob = GST_TYPE_FIND_POSSIBLE + (10 * (pack_headers + pes_headers));
+ prob = MIN (prob, GST_TYPE_FIND_MAXIMUM);
+
+ /* With the above test, we get into problems when we try to typefind
+ a MPEG stream from a small amount of data, which can happen when
+ we get data pushed from a HTTP source. We thus make a second test
+ to give higher probability if all the potential headers were either
+ pack or pes headers (ie, no potential header was unrecognized). */
+ if (potential_headers == pack_headers + pes_headers) {
+ GST_LOG ("Only %u headers, but all were recognized", potential_headers);
+ prob += 10;
+ prob = MIN (prob, GST_TYPE_FIND_MAXIMUM);
+ }
+
+ /* lower probability if the first packet wasn't right at the start */
+ if (data0 != first_sync && prob >= 10)
+ prob -= 10;
+
+ GST_LOG ("Suggesting MPEG %d system stream, %d packs, %d pes, prob %u%%",
+ mpegversion, pack_headers, pes_headers, prob);
+
+ gst_type_find_suggest_simple (tf, prob, "video/mpeg",
+ "systemstream", G_TYPE_BOOLEAN, TRUE,
+ "mpegversion", G_TYPE_INT, mpegversion, NULL);
+ }
+ };
++#endif
+
+ /*** video/mpegts Transport Stream ***/
+ static GstStaticCaps mpegts_caps = GST_STATIC_CAPS ("video/mpegts, "
+ "systemstream = (boolean) true, packetsize = (int) [ 188, 208 ]");
+ #define MPEGTS_CAPS gst_static_caps_get(&mpegts_caps)
+
+ #define GST_MPEGTS_TYPEFIND_MIN_HEADERS 4
+ #define GST_MPEGTS_TYPEFIND_MAX_HEADERS 10
+ #define GST_MPEGTS_MAX_PACKET_SIZE 208
+ #define GST_MPEGTS_TYPEFIND_SYNC_SIZE \
+ (GST_MPEGTS_TYPEFIND_MIN_HEADERS * GST_MPEGTS_MAX_PACKET_SIZE)
+ #define GST_MPEGTS_TYPEFIND_MAX_SYNC \
+ (GST_MPEGTS_TYPEFIND_MAX_HEADERS * GST_MPEGTS_MAX_PACKET_SIZE)
+ #define GST_MPEGTS_TYPEFIND_SCAN_LENGTH \
+ (GST_MPEGTS_TYPEFIND_MAX_SYNC * 4)
+
+ #define MPEGTS_HDR_SIZE 4
+ /* Check for sync byte, error_indicator == 0 and packet has payload.
+ * Adaptation control field (data[3] & 0x30) may be zero for TS packets with
+ * null PIDs. Still, these streams are valid TS streams (for null packets,
+ * AFC is supposed to be 0x1, but the spec also says decoders should just
+ * discard any packets with AFC = 0x00) */
+ #define IS_MPEGTS_HEADER(data) (data[0] == 0x47 && \
+ (data[1] & 0x80) == 0x00 && \
+ ((data[3] & 0x30) != 0x00 || \
+ ((data[3] & 0x30) == 0x00 && (data[1] & 0x1f) == 0x1f && (data[2] & 0xff) == 0xff)))
+
+ /* Helper function to search ahead at intervals of packet_size for mpegts
+ * headers */
+ static gint
+ mpeg_ts_probe_headers (GstTypeFind * tf, guint64 offset, gint packet_size)
+ {
+ /* We always enter this function having found at least one header already */
+ gint found = 1;
+ const guint8 *data = NULL;
+
+ GST_LOG ("looking for mpeg-ts packets of size %u", packet_size);
+ while (found < GST_MPEGTS_TYPEFIND_MAX_HEADERS) {
+ offset += packet_size;
+
+ data = gst_type_find_peek (tf, offset, MPEGTS_HDR_SIZE);
+ if (data == NULL || !IS_MPEGTS_HEADER (data))
+ return found;
+
+ found++;
+ GST_LOG ("mpeg-ts sync #%2d at offset %" G_GUINT64_FORMAT, found, offset);
+ }
+
+ return found;
+ }
+
+ /* Try and detect at least 4 packets in at most 10 packets worth of
+ * data. Need to try several possible packet sizes */
+ static void
+ mpeg_ts_type_find (GstTypeFind * tf, gpointer unused)
+ {
+ /* TS packet sizes to test: normal, DVHS packet size and
+ * FEC with 16 or 20 byte codes packet size. */
+ const gint pack_sizes[] = { 188, 192, 204, 208 };
+ const guint8 *data = NULL;
+ guint size = 0;
+ guint64 skipped = 0;
+
+ while (skipped < GST_MPEGTS_TYPEFIND_SCAN_LENGTH) {
+ if (size < MPEGTS_HDR_SIZE) {
+ data = gst_type_find_peek (tf, skipped, GST_MPEGTS_TYPEFIND_SYNC_SIZE);
+ if (!data)
+ break;
+ size = GST_MPEGTS_TYPEFIND_SYNC_SIZE;
+ }
+
+ /* Have at least MPEGTS_HDR_SIZE bytes at this point */
+ if (IS_MPEGTS_HEADER (data)) {
+ gsize p;
+
+ GST_LOG ("possible mpeg-ts sync at offset %" G_GUINT64_FORMAT, skipped);
+
+ for (p = 0; p < G_N_ELEMENTS (pack_sizes); p++) {
+ gint found;
+
+ /* Probe ahead at size pack_sizes[p] */
+ found = mpeg_ts_probe_headers (tf, skipped, pack_sizes[p]);
+ if (found >= GST_MPEGTS_TYPEFIND_MIN_HEADERS) {
+ gint probability;
+
+ /* found at least 4 headers. 10 headers = MAXIMUM probability.
+ * Arbitrarily, I assigned 10% probability for each header we
+ * found, 40% -> 100% */
+ probability = MIN (10 * found, GST_TYPE_FIND_MAXIMUM);
+
+ gst_type_find_suggest_simple (tf, probability, "video/mpegts",
+ "systemstream", G_TYPE_BOOLEAN, TRUE,
+ "packetsize", G_TYPE_INT, pack_sizes[p], NULL);
+ return;
+ }
+ }
+ }
+ data++;
+ skipped++;
+ size--;
+ }
+ }
+
+ #define GST_MPEGVID_TYPEFIND_TRY_PICTURES 6
+ #define GST_MPEGVID_TYPEFIND_TRY_SYNC (100 * 1024) /* 100 kB */
+
+ /* Scan ahead a maximum of max_extra_offset bytes until the next IS_MPEG_HEADER
+ * offset. After the call, offset will be after the 0x000001, i.e. at the 4th
+ * byte of the MPEG header. Returns TRUE if a header was found, FALSE if not.
+ */
+ static gboolean
+ mpeg_find_next_header (GstTypeFind * tf, DataScanCtx * c,
+ guint64 max_extra_offset)
+ {
+ guint64 extra_offset;
+
+ for (extra_offset = 0; extra_offset <= max_extra_offset; ++extra_offset) {
+ if (!data_scan_ctx_ensure_data (tf, c, 4))
+ return FALSE;
+ if (IS_MPEG_HEADER (c->data)) {
+ data_scan_ctx_advance (tf, c, 3);
+ return TRUE;
+ }
+ data_scan_ctx_advance (tf, c, 1);
+ }
+ return FALSE;
+ }
+
+ /*** video/mpeg MPEG-4 elementary video stream ***/
+
+ static GstStaticCaps mpeg4_video_caps = GST_STATIC_CAPS ("video/mpeg, "
+ "systemstream=(boolean)false, mpegversion=4, parsed=(boolean)false");
+ #define MPEG4_VIDEO_CAPS gst_static_caps_get(&mpeg4_video_caps)
+
+ /*
+ * This typefind is based on the elementary video header defined in
+ * http://xhelmboyx.tripod.com/formats/mpeg-layout.txt
+ * In addition, it allows the visual object sequence header to be
+ * absent, and even the VOS header to be absent. In the latter case,
+ * a number of VOPs have to be present.
+ */
+ static void
+ mpeg4_video_type_find (GstTypeFind * tf, gpointer unused)
+ {
+ DataScanCtx c = { 0, NULL, 0 };
+ gboolean seen_vios_at_0 = FALSE;
+ gboolean seen_vios = FALSE;
+ gboolean seen_vos = FALSE;
+ gboolean seen_vol = FALSE;
+ guint num_vop_headers = 0;
+ guint8 sc;
+
+ while (c.offset < GST_MPEGVID_TYPEFIND_TRY_SYNC) {
+ if (num_vop_headers >= GST_MPEGVID_TYPEFIND_TRY_PICTURES)
+ break;
+
+ if (!mpeg_find_next_header (tf, &c,
+ GST_MPEGVID_TYPEFIND_TRY_SYNC - c.offset))
+ break;
+
+ sc = c.data[0];
+
+ /* visual_object_sequence_start_code */
+ if (sc == 0xB0) {
+ if (seen_vios)
+ break; /* Terminate at second vios */
+ if (c.offset == 0)
+ seen_vios_at_0 = TRUE;
+ seen_vios = TRUE;
+ data_scan_ctx_advance (tf, &c, 2);
+ if (!mpeg_find_next_header (tf, &c, 0))
+ break;
+
+ sc = c.data[0];
+
+ /* Optional metadata */
+ if (sc == 0xB2)
+ if (!mpeg_find_next_header (tf, &c, 24))
+ break;
+ }
+
+ /* visual_object_start_code (consider it optional) */
+ if (sc == 0xB5) {
+ data_scan_ctx_advance (tf, &c, 2);
+ /* may contain ID marker and YUV clamping */
+ if (!mpeg_find_next_header (tf, &c, 7))
+ break;
+
+ sc = c.data[0];
+ }
+
+ /* video_object_start_code */
+ if (sc <= 0x1F) {
+ if (seen_vos)
+ break; /* Terminate at second vos */
+ seen_vos = TRUE;
+ data_scan_ctx_advance (tf, &c, 2);
+ continue;
+ }
+
+ /* video_object_layer_start_code */
+ if (sc >= 0x20 && sc <= 0x2F) {
+ seen_vol = TRUE;
+ data_scan_ctx_advance (tf, &c, 5);
+ continue;
+ }
+
+ /* video_object_plane_start_code */
+ if (sc == 0xB6) {
+ num_vop_headers++;
+ data_scan_ctx_advance (tf, &c, 2);
+ continue;
+ }
+
+ /* Unknown start code. */
+ }
+
+ if (num_vop_headers > 0 || seen_vol) {
+ GstTypeFindProbability probability = 0;
+
+ GST_LOG ("Found %d pictures, vios: %d, vos:%d, vol:%d", num_vop_headers,
+ seen_vios, seen_vos, seen_vol);
+
+ if (num_vop_headers >= GST_MPEGVID_TYPEFIND_TRY_PICTURES && seen_vios_at_0
+ && seen_vos && seen_vol)
+ probability = GST_TYPE_FIND_MAXIMUM - 1;
+ else if (num_vop_headers >= GST_MPEGVID_TYPEFIND_TRY_PICTURES && seen_vios
+ && seen_vos && seen_vol)
+ probability = GST_TYPE_FIND_NEARLY_CERTAIN - 1;
+ else if (seen_vios_at_0 && seen_vos && seen_vol)
+ probability = GST_TYPE_FIND_NEARLY_CERTAIN - 6;
+ else if (num_vop_headers >= GST_MPEGVID_TYPEFIND_TRY_PICTURES && seen_vos
+ && seen_vol)
+ probability = GST_TYPE_FIND_NEARLY_CERTAIN - 6;
+ else if (num_vop_headers >= GST_MPEGVID_TYPEFIND_TRY_PICTURES && seen_vol)
+ probability = GST_TYPE_FIND_NEARLY_CERTAIN - 9;
+ else if (num_vop_headers >= GST_MPEGVID_TYPEFIND_TRY_PICTURES)
+ probability = GST_TYPE_FIND_LIKELY - 1;
+ else if (num_vop_headers > 2 && seen_vios && seen_vos && seen_vol)
+ probability = GST_TYPE_FIND_LIKELY - 9;
+ else if (seen_vios && seen_vos && seen_vol)
+ probability = GST_TYPE_FIND_LIKELY - 20;
+ else if (num_vop_headers > 0 && seen_vos && seen_vol)
+ probability = GST_TYPE_FIND_POSSIBLE;
+ else if (num_vop_headers > 0)
+ probability = GST_TYPE_FIND_POSSIBLE - 10;
+ else if (seen_vos && seen_vol)
+ probability = GST_TYPE_FIND_POSSIBLE - 20;
+
+ gst_type_find_suggest (tf, probability, MPEG4_VIDEO_CAPS);
+ }
+ }
+
+ /*** video/x-h263 H263 video stream ***/
+ static GstStaticCaps h263_video_caps =
+ GST_STATIC_CAPS ("video/x-h263, variant=(string)itu");
+
+ #define H263_VIDEO_CAPS gst_static_caps_get(&h263_video_caps)
+
+ #define H263_MAX_PROBE_LENGTH (128 * 1024)
+
+ static void
+ h263_video_type_find (GstTypeFind * tf, gpointer unused)
+ {
+ DataScanCtx c = { 0, NULL, 0 };
+ guint64 data = 0xffff; /* prevents false positive for first 2 bytes */
+ guint64 psc = 0;
+ guint8 ptype = 0;
+ guint format;
+ guint good = 0;
+ guint bad = 0;
+ guint pc_type, pb_mode;
+
+ while (c.offset < H263_MAX_PROBE_LENGTH) {
+ if (G_UNLIKELY (!data_scan_ctx_ensure_data (tf, &c, 4)))
+ break;
+
+ /* Find the picture start code */
+ data = (data << 8) + c.data[0];
+ psc = data & G_GUINT64_CONSTANT (0xfffffc0000);
+ if (psc == 0x800000) {
+ /* Found PSC */
+ /* PTYPE */
+ ptype = (data & 0x3fc) >> 2;
+ /* Source Format */
+ format = ptype & 0x07;
+
+ /* Now that we have a Valid PSC, check if we also have a valid PTYPE and
+ the Source Format, which should range between 1 and 5 */
+ if (((ptype >> 6) == 0x2) && (format > 0 && format < 6)) {
+ pc_type = data & 0x02;
+ pb_mode = c.data[1] & 0x20 >> 4;
+ if (!pc_type && pb_mode)
+ bad++;
+ else
+ good++;
+ } else
+ bad++;
+
+ /* FIXME: maybe bail out early if we get mostly bad syncs ? */
+ }
+
+ data_scan_ctx_advance (tf, &c, 1);
+ }
+
+ GST_LOG ("good: %d, bad: %d", good, bad);
+
+ if (good > 2 * bad)
+ gst_type_find_suggest (tf, GST_TYPE_FIND_POSSIBLE, H263_VIDEO_CAPS);
+
+ return;
+ }
+
+ /*** video/x-h264 H264 elementary video stream ***/
+
+ static GstStaticCaps h264_video_caps =
+ GST_STATIC_CAPS ("video/x-h264,stream-format=byte-stream");
+
+ #define H264_VIDEO_CAPS gst_static_caps_get(&h264_video_caps)
+
+ #define H264_MAX_PROBE_LENGTH (128 * 1024) /* 128kB for HD should be enough. */
+
+ static void
+ h264_video_type_find (GstTypeFind * tf, gpointer unused)
+ {
+ DataScanCtx c = { 0, NULL, 0 };
+
+ /* Stream consists of: a series of sync codes (00 00 00 01) followed
+ * by NALs
+ */
+ gboolean seen_idr = FALSE;
+ gboolean seen_sps = FALSE;
+ gboolean seen_pps = FALSE;
+ gboolean seen_ssps = FALSE;
+ int nut, ref;
+ int good = 0;
+ int bad = 0;
+
+ while (c.offset < H264_MAX_PROBE_LENGTH) {
+ if (G_UNLIKELY (!data_scan_ctx_ensure_data (tf, &c, 4)))
+ break;
+
+ if (IS_MPEG_HEADER (c.data)) {
+ nut = c.data[3] & 0x9f; /* forbiden_zero_bit | nal_unit_type */
+ ref = c.data[3] & 0x60; /* nal_ref_idc */
+
+ /* if forbidden bit is different to 0 won't be h264 */
+ if (nut > 0x1f) {
+ bad++;
+ break;
+ }
+
+ /* collect statistics about the NAL types */
+ if ((nut >= 1 && nut <= 13) || nut == 19) {
+ if ((nut == 5 && ref == 0) ||
+ ((nut == 6 || (nut >= 9 && nut <= 12)) && ref != 0)) {
+ bad++;
+ } else {
+ if (nut == 7)
+ seen_sps = TRUE;
+ else if (nut == 8)
+ seen_pps = TRUE;
+ else if (nut == 5)
+ seen_idr = TRUE;
+
+ good++;
+ }
+ } else if (nut >= 14 && nut <= 33) {
+ if (nut == 15) {
+ seen_ssps = TRUE;
+ good++;
+ } else if (nut == 14 || nut == 20) {
+ /* Sometimes we see NAL 14 or 20 without SSPS
+ * if dropped into the middle of a stream -
+ * just ignore those (don't add to bad count) */
+ if (seen_ssps)
+ good++;
+ } else {
+ /* reserved */
+ /* Theoretically these are good, since if they exist in the
+ stream it merely means that a newer backwards-compatible
+ h.264 stream. But we should be identifying that separately. */
+ bad++;
+ }
+ } else {
+ /* unspecified, application specific */
+ /* don't consider these bad */
+ }
+
+ GST_LOG ("good:%d, bad:%d, pps:%d, sps:%d, idr:%d ssps:%d", good, bad,
+ seen_pps, seen_sps, seen_idr, seen_ssps);
+
+ if (seen_sps && seen_pps && seen_idr && good >= 10 && bad < 4) {
+ gst_type_find_suggest (tf, GST_TYPE_FIND_LIKELY, H264_VIDEO_CAPS);
+ return;
+ }
+
+ data_scan_ctx_advance (tf, &c, 4);
+ }
+ data_scan_ctx_advance (tf, &c, 1);
+ }
+
+ GST_LOG ("good:%d, bad:%d, pps:%d, sps:%d, idr:%d ssps=%d", good, bad,
+ seen_pps, seen_sps, seen_idr, seen_ssps);
+
+ if (good >= 2 && bad == 0) {
+ gst_type_find_suggest (tf, GST_TYPE_FIND_POSSIBLE, H264_VIDEO_CAPS);
+ }
+ }
+
+ /*** video/x-h265 H265 elementary video stream ***/
+
+ static GstStaticCaps h265_video_caps =
+ GST_STATIC_CAPS ("video/x-h265,stream-format=byte-stream");
+
+ #define H265_VIDEO_CAPS gst_static_caps_get(&h265_video_caps)
+
+ #define H265_MAX_PROBE_LENGTH (128 * 1024) /* 128kB for HD should be enough. */
+
+ static void
+ h265_video_type_find (GstTypeFind * tf, gpointer unused)
+ {
+ DataScanCtx c = { 0, NULL, 0 };
+
+ /* Stream consists of: a series of sync codes (00 00 00 01) followed
+ * by NALs
+ */
+ gboolean seen_irap = FALSE;
+ gboolean seen_vps = FALSE;
+ gboolean seen_sps = FALSE;
+ gboolean seen_pps = FALSE;
+ int nut;
+ int good = 0;
+ int bad = 0;
+
+ while (c.offset < H265_MAX_PROBE_LENGTH) {
+ if (G_UNLIKELY (!data_scan_ctx_ensure_data (tf, &c, 5)))
+ break;
+
+ if (IS_MPEG_HEADER (c.data)) {
+ /* forbiden_zero_bit | nal_unit_type */
+ nut = c.data[3] & 0xfe;
+
+ /* if forbidden bit is different to 0 won't be h265 */
+ if (nut > 0x7e) {
+ bad++;
+ break;
+ }
+ nut = nut >> 1;
+
+ /* if nuh_layer_id is not zero or nuh_temporal_id_plus1 is zero then
+ * it won't be h265 */
+ if ((c.data[3] & 0x01) || (c.data[4] & 0xf8) || !(c.data[4] & 0x07)) {
+ bad++;
+ break;
+ }
+
+ /* collect statistics about the NAL types */
+ if ((nut >= 0 && nut <= 9) || (nut >= 16 && nut <= 21) || (nut >= 32
+ && nut <= 40)) {
+ if (nut == 32)
+ seen_vps = TRUE;
+ else if (nut == 33)
+ seen_sps = TRUE;
+ else if (nut == 34)
+ seen_pps = TRUE;
+ else if (nut >= 16 && nut <= 21) {
+ /* BLA, IDR and CRA pictures are belongs to be IRAP picture */
+ /* we are not counting the reserved IRAP pictures (22 and 23) to good */
+ seen_irap = TRUE;
+ }
+
+ good++;
+ } else if ((nut >= 10 && nut <= 15) || (nut >= 22 && nut <= 31)
+ || (nut >= 41 && nut <= 47)) {
+ /* reserved values are counting as bad */
+ bad++;
+ } else {
+ /* unspecified (48..63), application specific */
+ /* don't consider these as bad */
+ }
+
+ GST_LOG ("good:%d, bad:%d, pps:%d, sps:%d, vps:%d, irap:%d", good, bad,
+ seen_pps, seen_sps, seen_vps, seen_irap);
+
+ if (seen_sps && seen_pps && seen_irap && good >= 10 && bad < 4) {
+ gst_type_find_suggest (tf, GST_TYPE_FIND_LIKELY, H265_VIDEO_CAPS);
+ return;
+ }
+
+ data_scan_ctx_advance (tf, &c, 5);
+ }
+ data_scan_ctx_advance (tf, &c, 1);
+ }
+
+ GST_LOG ("good:%d, bad:%d, pps:%d, sps:%d, vps:%d, irap:%d", good, bad,
+ seen_pps, seen_sps, seen_vps, seen_irap);
+
+ if (good >= 2 && bad == 0) {
+ gst_type_find_suggest (tf, GST_TYPE_FIND_POSSIBLE, H265_VIDEO_CAPS);
+ }
+ }
+
+ /*** video/mpeg video stream ***/
+
+ static GstStaticCaps mpeg_video_caps = GST_STATIC_CAPS ("video/mpeg, "
+ "systemstream = (boolean) false");
+ #define MPEG_VIDEO_CAPS gst_static_caps_get(&mpeg_video_caps)
+
+ /*
+ * Idea is the same as MPEG system stream typefinding: We check each
+ * byte of the stream to see if - from that point on - the stream
+ * matches a predefined set of marker bits as defined in the MPEG
+ * video specs.
+ *
+ * I'm sure someone will do a chance calculation here too.
+ */
+
+ static void
+ mpeg_video_stream_type_find (GstTypeFind * tf, gpointer unused)
+ {
+ DataScanCtx c = { 0, NULL, 0 };
+ gboolean seen_seq_at_0 = FALSE;
+ gboolean seen_seq = FALSE;
+ gboolean seen_gop = FALSE;
+ guint64 last_pic_offset = 0;
+ gint num_pic_headers = 0;
+ gint found = 0;
+
+ while (c.offset < GST_MPEGVID_TYPEFIND_TRY_SYNC) {
+ if (found >= GST_MPEGVID_TYPEFIND_TRY_PICTURES)
+ break;
+
+ if (!data_scan_ctx_ensure_data (tf, &c, 5))
+ break;
+
+ if (!IS_MPEG_HEADER (c.data))
+ goto next;
+
+ /* a pack header indicates that this isn't an elementary stream */
+ if (c.data[3] == 0xBA && mpeg_sys_is_valid_pack (tf, c.data, c.size, NULL))
+ return;
+
+ /* do we have a sequence header? */
+ if (c.data[3] == 0xB3) {
+ seen_seq_at_0 = seen_seq_at_0 || (c.offset == 0);
+ seen_seq = TRUE;
+ data_scan_ctx_advance (tf, &c, 4 + 8);
+ continue;
+ }
+
+ /* or a GOP header */
+ if (c.data[3] == 0xB8) {
+ seen_gop = TRUE;
+ data_scan_ctx_advance (tf, &c, 8);
+ continue;
+ }
+
+ /* but what we'd really like to see is a picture header */
+ if (c.data[3] == 0x00) {
+ ++num_pic_headers;
+ last_pic_offset = c.offset;
+ data_scan_ctx_advance (tf, &c, 8);
+ continue;
+ }
+
+ /* ... each followed by a slice header with slice_vertical_pos=1 that's
+ * not too far away from the previously seen picture header. */
+ if (c.data[3] == 0x01 && num_pic_headers > found &&
+ (c.offset - last_pic_offset) >= 4 &&
+ (c.offset - last_pic_offset) <= 64) {
+ data_scan_ctx_advance (tf, &c, 4);
+ found += 1;
+ continue;
+ }
+
+ next:
+
+ data_scan_ctx_advance (tf, &c, 1);
+ }
+
+ if (found > 0 || seen_seq) {
+ GstTypeFindProbability probability = 0;
+
+ GST_LOG ("Found %d pictures, seq:%d, gop:%d", found, seen_seq, seen_gop);
+
+ if (found >= GST_MPEGVID_TYPEFIND_TRY_PICTURES && seen_seq && seen_gop)
+ probability = GST_TYPE_FIND_NEARLY_CERTAIN - 1;
+ else if (found >= GST_MPEGVID_TYPEFIND_TRY_PICTURES && seen_seq)
+ probability = GST_TYPE_FIND_NEARLY_CERTAIN - 9;
+ else if (found >= GST_MPEGVID_TYPEFIND_TRY_PICTURES)
+ probability = GST_TYPE_FIND_LIKELY;
+ else if (seen_seq_at_0 && seen_gop && found > 2)
+ probability = GST_TYPE_FIND_LIKELY - 10;
+ else if (seen_seq && seen_gop && found > 2)
+ probability = GST_TYPE_FIND_LIKELY - 20;
+ else if (seen_seq_at_0 && found > 0)
+ probability = GST_TYPE_FIND_POSSIBLE;
+ else if (seen_seq && found > 0)
+ probability = GST_TYPE_FIND_POSSIBLE - 5;
+ else if (found > 0)
+ probability = GST_TYPE_FIND_POSSIBLE - 10;
+ else if (seen_seq)
+ probability = GST_TYPE_FIND_POSSIBLE - 20;
+
+ gst_type_find_suggest_simple (tf, probability, "video/mpeg",
+ "systemstream", G_TYPE_BOOLEAN, FALSE,
+ "mpegversion", G_TYPE_INT, 1, "parsed", G_TYPE_BOOLEAN, FALSE, NULL);
+ }
+ }
+
+ /*** audio/x-aiff ***/
+
+ static GstStaticCaps aiff_caps = GST_STATIC_CAPS ("audio/x-aiff");
+
+ #define AIFF_CAPS gst_static_caps_get(&aiff_caps)
+ static void
+ aiff_type_find (GstTypeFind * tf, gpointer unused)
+ {
+ const guint8 *data = gst_type_find_peek (tf, 0, 16);
+
+ if (data && memcmp (data, "FORM", 4) == 0) {
+ data += 8;
+ if (memcmp (data, "AIFF", 4) == 0 || memcmp (data, "AIFC", 4) == 0)
+ gst_type_find_suggest (tf, GST_TYPE_FIND_MAXIMUM, AIFF_CAPS);
+ }
+ }
+
+ /*** audio/x-svx ***/
-
++#ifndef TIZEN_FEATURE_DISABLE_MIME_TYPES
+ static GstStaticCaps svx_caps = GST_STATIC_CAPS ("audio/x-svx");
+
+ #define SVX_CAPS gst_static_caps_get(&svx_caps)
+ static void
+ svx_type_find (GstTypeFind * tf, gpointer unused)
+ {
+ const guint8 *data = gst_type_find_peek (tf, 0, 16);
+
+ if (data && memcmp (data, "FORM", 4) == 0) {
+ data += 8;
+ if (memcmp (data, "8SVX", 4) == 0 || memcmp (data, "16SV", 4) == 0)
+ gst_type_find_suggest (tf, GST_TYPE_FIND_MAXIMUM, SVX_CAPS);
+ }
+ }
+
+ /*** audio/x-shorten ***/
+
+ static GstStaticCaps shn_caps = GST_STATIC_CAPS ("audio/x-shorten");
+
+ #define SHN_CAPS gst_static_caps_get(&shn_caps)
+ static void
+ shn_type_find (GstTypeFind * tf, gpointer unused)
+ {
+ const guint8 *data = gst_type_find_peek (tf, 0, 4);
+
+ if (data && memcmp (data, "ajkg", 4) == 0) {
+ gst_type_find_suggest (tf, GST_TYPE_FIND_MAXIMUM, SHN_CAPS);
+ }
+ data = gst_type_find_peek (tf, -8, 8);
+ if (data && memcmp (data, "SHNAMPSK", 8) == 0) {
+ gst_type_find_suggest (tf, GST_TYPE_FIND_MAXIMUM, SHN_CAPS);
+ }
+ }
+
+ /*** application/x-ape ***/
+
+ static GstStaticCaps ape_caps = GST_STATIC_CAPS ("application/x-ape");
+
+ #define APE_CAPS gst_static_caps_get(&ape_caps)
+ static void
+ ape_type_find (GstTypeFind * tf, gpointer unused)
+ {
+ const guint8 *data = gst_type_find_peek (tf, 0, 4);
+
+ if (data && memcmp (data, "MAC ", 4) == 0) {
+ gst_type_find_suggest (tf, GST_TYPE_FIND_LIKELY + 10, APE_CAPS);
+ }
+ }
-
++#endif
+ /*** ISO FORMATS ***/
+
+ /*** audio/x-m4a ***/
+
+ static GstStaticCaps m4a_caps = GST_STATIC_CAPS ("audio/x-m4a");
+
+ #define M4A_CAPS (gst_static_caps_get(&m4a_caps))
+ static void
+ m4a_type_find (GstTypeFind * tf, gpointer unused)
+ {
+ const guint8 *data = gst_type_find_peek (tf, 4, 8);
+
+ if (data && (memcmp (data, "ftypM4A ", 8) == 0)) {
+ gst_type_find_suggest (tf, GST_TYPE_FIND_MAXIMUM, M4A_CAPS);
+ }
+ }
+
+ /*** application/x-3gp ***/
+
+ /* The Q is there because variables can't start with a number. */
+ static GstStaticCaps q3gp_caps = GST_STATIC_CAPS ("application/x-3gp");
+ #define Q3GP_CAPS (gst_static_caps_get(&q3gp_caps))
+
+ static const gchar *
+ q3gp_type_find_get_profile (const guint8 * data)
+ {
+ switch (GST_MAKE_FOURCC (data[0], data[1], data[2], 0)) {
+ case GST_MAKE_FOURCC ('3', 'g', 'g', 0):
+ return "general";
+ case GST_MAKE_FOURCC ('3', 'g', 'p', 0):
+ return "basic";
+ case GST_MAKE_FOURCC ('3', 'g', 's', 0):
+ return "streaming-server";
+ case GST_MAKE_FOURCC ('3', 'g', 'r', 0):
+ return "progressive-download";
+ default:
+ break;
+ }
+ return NULL;
+ }
+
+ static void
+ q3gp_type_find (GstTypeFind * tf, gpointer unused)
+ {
+ const gchar *profile;
+ guint32 ftyp_size = 0;
+ guint32 offset = 0;
+ const guint8 *data = NULL;
+
+ if ((data = gst_type_find_peek (tf, 0, 12)) == NULL) {
+ return;
+ }
+
+ data += 4;
+ if (memcmp (data, "ftyp", 4) != 0) {
+ return;
+ }
+
+ /* check major brand */
+ data += 4;
+ if ((profile = q3gp_type_find_get_profile (data))) {
+ gst_type_find_suggest_simple (tf, GST_TYPE_FIND_MAXIMUM,
+ "application/x-3gp", "profile", G_TYPE_STRING, profile, NULL);
+ return;
+ }
+
+ /* check compatible brands */
+ if ((data = gst_type_find_peek (tf, 0, 4)) != NULL) {
+ ftyp_size = GST_READ_UINT32_BE (data);
+ }
+ if ((data = gst_type_find_peek (tf, 0, ftyp_size)) != NULL) {
+ for (offset = 16; offset + 4 < ftyp_size; offset += 4) {
+ if ((profile = q3gp_type_find_get_profile (data + offset))) {
+ gst_type_find_suggest_simple (tf, GST_TYPE_FIND_MAXIMUM,
+ "application/x-3gp", "profile", G_TYPE_STRING, profile, NULL);
+ return;
+ }
+ }
+ }
+
+ return;
+
+ }
+
++#ifndef TIZEN_FEATURE_DISABLE_MIME_TYPES
+ /*** video/mj2 and image/jp2 ***/
+ static GstStaticCaps mj2_caps = GST_STATIC_CAPS ("video/mj2");
+
+ #define MJ2_CAPS gst_static_caps_get(&mj2_caps)
+
+ static GstStaticCaps jp2_caps = GST_STATIC_CAPS ("image/jp2");
+
+ #define JP2_CAPS gst_static_caps_get(&jp2_caps)
+
+ static void
+ jp2_type_find (GstTypeFind * tf, gpointer unused)
+ {
+ const guint8 *data;
+
+ data = gst_type_find_peek (tf, 0, 24);
+ if (!data)
+ return;
+
+ /* jp2 signature */
+ if (memcmp (data, "\000\000\000\014jP \015\012\207\012", 12) != 0)
+ return;
+
+ /* check ftyp box */
+ data += 12;
+ if (memcmp (data + 4, "ftyp", 4) == 0) {
+ if (memcmp (data + 8, "jp2 ", 4) == 0)
+ gst_type_find_suggest (tf, GST_TYPE_FIND_MAXIMUM, JP2_CAPS);
+ else if (memcmp (data + 8, "mjp2", 4) == 0)
+ gst_type_find_suggest (tf, GST_TYPE_FIND_MAXIMUM, MJ2_CAPS);
+ }
+ }
+
+
+ static GstStaticCaps jpc_caps = GST_STATIC_CAPS ("image/x-jpc");
+
+ #define JPC_CAPS gst_static_caps_get(&jpc_caps)
+
+ static void
+ jpc_type_find (GstTypeFind * tf, gpointer unused)
+ {
+ gboolean found_cod = FALSE;
+ gboolean found_qcd = FALSE;
+ gboolean found_sot = FALSE;
+ const guint8 *data;
+ gint offset = 0;
+ const guint8 soc_siz[] = { 0xff, 0x4f, 0xff, 0x51 };
+
+ #define GST_TYPE_FIND_JPC_MARKER_SOT 0xFF90
+ #define GST_TYPE_FIND_JPC_MARKER_COD 0xFF52
+ #define GST_TYPE_FIND_JPC_MARKER_QCD 0xFF5C
+ #define GST_TYPE_FIND_JPC_MARKER_COC 0xFF53
+ #define GST_TYPE_FIND_JPC_MARKER_RGN 0xFF5E
+ #define GST_TYPE_FIND_JPC_MARKER_QCC 0xFF5D
+ #define GST_TYPE_FIND_JPC_MARKER_POC 0xFF5F
+ #define GST_TYPE_FIND_JPC_MARKER_PLM 0xFF57
+ #define GST_TYPE_FIND_JPC_MARKER_PPM 0xFF60
+ #define GST_TYPE_FIND_JPC_MARKER_TLM 0xFF55
+ #define GST_TYPE_FIND_JPC_MARKER_CRG 0xFF63
+ #define GST_TYPE_FIND_JPC_MARKER_COM 0xFF64
+ #define GST_TYPE_FIND_JPC_MARKER_CBD 0xFF78
+ #define GST_TYPE_FIND_JPC_MARKER_MCC 0xFF75
+ #define GST_TYPE_FIND_JPC_MARKER_MCT 0xFF74
+ #define GST_TYPE_FIND_JPC_MARKER_MCO 0xFF77
+
+
+ /* SOC marker + SIZ marker */
+ if ((data = gst_type_find_peek (tf, 0, 4)) != NULL) {
+ if (memcmp (data, soc_siz, 4) != 0)
+ return;
+ offset += 4;
+ } else {
+ return;
+ }
+
+ while (!found_sot) {
+
+ /* skip actual marker data */
+ if ((data = gst_type_find_peek (tf, offset, 2)) != NULL) {
+ offset += GST_READ_UINT16_BE (data);
+ } else {
+ return;
+ }
+
+ /* read marker */
+ if ((data = gst_type_find_peek (tf, offset, 2)) != NULL) {
+ guint16 marker = GST_READ_UINT16_BE (data);
+ switch (marker) {
+ case GST_TYPE_FIND_JPC_MARKER_SOT:
+ found_sot = TRUE;
+ break;
+ case GST_TYPE_FIND_JPC_MARKER_COD:
+ found_cod = TRUE;
+ break;
+ case GST_TYPE_FIND_JPC_MARKER_QCD:
+ found_qcd = TRUE;
+ break;
+ /* optional header markers */
+ case GST_TYPE_FIND_JPC_MARKER_COC:
+ case GST_TYPE_FIND_JPC_MARKER_RGN:
+ case GST_TYPE_FIND_JPC_MARKER_QCC:
+ case GST_TYPE_FIND_JPC_MARKER_POC:
+ case GST_TYPE_FIND_JPC_MARKER_PLM:
+ case GST_TYPE_FIND_JPC_MARKER_PPM:
+ case GST_TYPE_FIND_JPC_MARKER_TLM:
+ case GST_TYPE_FIND_JPC_MARKER_CRG:
+ case GST_TYPE_FIND_JPC_MARKER_COM:
+ case GST_TYPE_FIND_JPC_MARKER_CBD:
+ case GST_TYPE_FIND_JPC_MARKER_MCC:
+ case GST_TYPE_FIND_JPC_MARKER_MCT:
+ case GST_TYPE_FIND_JPC_MARKER_MCO:
+ break;
+ /* unrecognized marker */
+ default:
+ return;
+ }
+ offset += 2;
+ } else {
+ return;
+ }
+ }
+
+ if (found_cod && found_qcd && found_sot)
+ gst_type_find_suggest (tf, GST_TYPE_FIND_MAXIMUM, JPC_CAPS);
+ }
-
++#endif
+
+ /*** video/quicktime ***/
+
+ static GstStaticCaps qt_caps = GST_STATIC_CAPS ("video/quicktime");
+
+ #define QT_CAPS gst_static_caps_get(&qt_caps)
+ #define STRNCMP(x,y,z) (strncmp ((char*)(x), (char*)(y), z))
+
+ /* FIXME 0.11: go through http://www.ftyps.com/ */
+ static void
+ qt_type_find (GstTypeFind * tf, gpointer unused)
+ {
+ const guint8 *data;
+ guint tip = 0;
+ guint atoms_in_a_row = 0;
+ gboolean have_moov = FALSE, have_mdat = FALSE;
+ guint64 offset = 0;
+ guint64 size;
+ const gchar *variant = NULL;
+
+ while ((data = gst_type_find_peek (tf, offset, 12)) != NULL) {
+ guint64 new_offset;
+
+ if (STRNCMP (&data[4], "ftypqt ", 8) == 0) {
+ tip = GST_TYPE_FIND_MAXIMUM;
+ break;
+ }
+
+ if (STRNCMP (&data[4], "ftypisom", 8) == 0 ||
+ STRNCMP (&data[4], "ftypavc1", 8) == 0 ||
++#ifdef TIZEN_FEATURE_TYPEFIND_ENHANCEMENT
++ STRNCMP (&data[4], "ftypwmf ", 8) == 0 ||
++#endif
+ STRNCMP (&data[4], "ftypmp42", 8) == 0) {
+ tip = GST_TYPE_FIND_MAXIMUM;
+ variant = "iso";
+ break;
+ }
+
+ if (STRNCMP (&data[4], "ftypisml", 8) == 0 ||
+ STRNCMP (&data[4], "ftypavc3", 8) == 0) {
+ tip = GST_TYPE_FIND_MAXIMUM;
+ variant = "iso-fragmented";
+ break;
+ }
+
+ if (STRNCMP (&data[4], "ftypccff", 8) == 0) {
+ tip = GST_TYPE_FIND_MAXIMUM;
+ variant = "ccff";
+ break;
+ }
+
+ if (STRNCMP (&data[4], "ftypmif1", 8) == 0) {
+ tip = GST_TYPE_FIND_MAXIMUM;
+ variant = "heif";
+ break;
+ }
+
+ /* box/atom types that are in common with ISO base media file format */
+ if (STRNCMP (&data[4], "moov", 4) == 0 ||
+ STRNCMP (&data[4], "mdat", 4) == 0 ||
+ STRNCMP (&data[4], "ftyp", 4) == 0 ||
+ STRNCMP (&data[4], "free", 4) == 0 ||
+ STRNCMP (&data[4], "uuid", 4) == 0 ||
+ STRNCMP (&data[4], "moof", 4) == 0 ||
+ STRNCMP (&data[4], "skip", 4) == 0) {
+ if (tip == 0) {
+ tip = GST_TYPE_FIND_LIKELY;
+ } else {
+ tip = GST_TYPE_FIND_NEARLY_CERTAIN;
+ }
+
+ if (STRNCMP (&data[4], "moov", 4) == 0)
+ have_moov = TRUE;
+ if (STRNCMP (&data[4], "mdat", 4) == 0)
+ have_mdat = TRUE;
+
+ atoms_in_a_row += 1;
+ if ((have_moov && have_mdat) || atoms_in_a_row >= 5) {
+ tip = GST_TYPE_FIND_MAXIMUM;
+ break;
+ }
+ }
+ /* other box/atom types, apparently quicktime specific */
+ else if (STRNCMP (&data[4], "pnot", 4) == 0 ||
+ STRNCMP (&data[4], "PICT", 4) == 0 ||
+ STRNCMP (&data[4], "wide", 4) == 0 ||
+ STRNCMP (&data[4], "prfl", 4) == 0) {
+ tip = GST_TYPE_FIND_MAXIMUM;
+ break;
+ } else {
+ if (atoms_in_a_row >= 3)
+ tip = GST_TYPE_FIND_LIKELY;
+ else
+ tip = 0;
+ break;
+ }
+
+ size = GST_READ_UINT32_BE (data);
+ if (size + offset >= G_MAXINT64)
+ break;
+ /* check compatible brands rather than ever expanding major brands above */
+ if ((STRNCMP (&data[4], "ftyp", 4) == 0) && (size >= 16)) {
+ data = gst_type_find_peek (tf, offset, size);
+ if (data == NULL)
+ goto done;
+ new_offset = 12;
+ while (new_offset + 4 <= size) {
+ if (STRNCMP (&data[new_offset], "isom", 4) == 0 ||
+ STRNCMP (&data[new_offset], "dash", 4) == 0 ||
+ STRNCMP (&data[new_offset], "avc1", 4) == 0 ||
+ STRNCMP (&data[new_offset], "avc3", 4) == 0 ||
+ STRNCMP (&data[new_offset], "mp41", 4) == 0 ||
+ STRNCMP (&data[new_offset], "mp42", 4) == 0) {
+ tip = GST_TYPE_FIND_MAXIMUM;
+ variant = "iso";
+ goto done;
+ } else if (STRNCMP (&data[new_offset], "mif1", 4) == 0) {
+ tip = GST_TYPE_FIND_MAXIMUM;
+ variant = "heif";
+ goto done;
+ }
+ new_offset += 4;
+ }
+ }
+ if (size == 1) {
+ const guint8 *sizedata;
+
+ sizedata = gst_type_find_peek (tf, offset + 8, 8);
+ if (sizedata == NULL)
+ break;
+
+ size = GST_READ_UINT64_BE (sizedata);
+ } else {
+ if (size < 8)
+ break;
+ }
+ new_offset = offset + size;
+ if (new_offset <= offset)
+ break;
+ if (new_offset + 16 >= G_MAXINT64)
+ break;
+ offset = new_offset;
+ }
+
+ done:
+ if (tip > 0) {
+ if (variant) {
+ GstCaps *caps = gst_caps_copy (QT_CAPS);
+
+ gst_caps_set_simple (caps, "variant", G_TYPE_STRING, variant, NULL);
+ gst_type_find_suggest (tf, tip, caps);
+ gst_caps_unref (caps);
+ } else {
+ gst_type_find_suggest (tf, tip, QT_CAPS);
+ }
+ }
+ };
+
+
+ /*** image/x-quicktime ***/
-
++#ifndef TIZEN_FEATURE_DISABLE_MIME_TYPES
+ static GstStaticCaps qtif_caps = GST_STATIC_CAPS ("image/x-quicktime");
+
+ #define QTIF_CAPS gst_static_caps_get(&qtif_caps)
+
+ /* how many atoms we check before we give up */
+ #define QTIF_MAXROUNDS 25
+
+ static void
+ qtif_type_find (GstTypeFind * tf, gpointer unused)
+ {
+ const guint8 *data;
+ gboolean found_idsc = FALSE;
+ gboolean found_idat = FALSE;
+ guint64 offset = 0;
+ guint rounds = 0;
+
+ while ((data = gst_type_find_peek (tf, offset, 8)) != NULL) {
+ guint64 size;
+
+ size = GST_READ_UINT32_BE (data);
+ if (size == 1) {
+ const guint8 *sizedata;
+
+ sizedata = gst_type_find_peek (tf, offset + 8, 8);
+ if (sizedata == NULL)
+ break;
+
+ size = GST_READ_UINT64_BE (sizedata);
+ }
+ if (size < 8)
+ break;
+
+ if (STRNCMP (data + 4, "idsc", 4) == 0)
+ found_idsc = TRUE;
+ if (STRNCMP (data + 4, "idat", 4) == 0)
+ found_idat = TRUE;
+
+ if (found_idsc && found_idat) {
+ gst_type_find_suggest (tf, GST_TYPE_FIND_MAXIMUM, QTIF_CAPS);
+ return;
+ }
+
+ offset += size;
+ if (offset + 8 >= G_MAXINT64)
+ break;
+ if (++rounds > QTIF_MAXROUNDS)
+ break;
+ }
+
+ if (found_idsc || found_idat) {
+ gst_type_find_suggest (tf, GST_TYPE_FIND_LIKELY, QTIF_CAPS);
+ return;
+ }
+ };
+
+ /*** audio/x-mod ***/
+
+ static GstStaticCaps mod_caps = GST_STATIC_CAPS ("audio/x-mod");
+
+ #define MOD_CAPS gst_static_caps_get(&mod_caps)
+ /* FIXME: M15 CheckType to do */
+ static void
+ mod_type_find (GstTypeFind * tf, gpointer unused)
+ {
+ const guint8 *data;
+ GstTypeFindProbability probability;
+ const char *mod_type = NULL;
+
+ /* MOD */
+ if ((data = gst_type_find_peek (tf, 1080, 4)) != NULL) {
+ /* Protracker and variants */
+ if ((memcmp (data, "M.K.", 4) == 0) ||
+ (memcmp (data, "M!K!", 4) == 0) ||
+ (memcmp (data, "M&K!", 4) == 0) || (memcmp (data, "N.T.", 4) == 0) ||
+ /* Star Tracker */
+ (memcmp (data, "FLT", 3) == 0 && isdigit (data[3])) ||
+ (memcmp (data, "EXO", 3) == 0 && isdigit (data[3])) ||
+ /* Oktalyzer (Amiga) */
+ (memcmp (data, "OKTA", 4) == 0) || (memcmp (data, "OCTA", 4) == 0) ||
+ /* Oktalyser (Atari) */
+ (memcmp (data, "CD81", 4) == 0) ||
+ /* Taketracker */
+ (memcmp (data, "TDZ", 3) == 0 && isdigit (data[3])) ||
+ /* Fasttracker */
+ (memcmp (data + 1, "CHN", 3) == 0 && isdigit (data[0])) ||
+ /* Fasttracker or Taketracker */
+ (memcmp (data + 2, "CH", 2) == 0 && isdigit (data[0])
+ && isdigit (data[1])) || (memcmp (data + 2, "CN", 2) == 0
+ && isdigit (data[0]) && isdigit (data[1]))) {
+ mod_type = "mod";
+ probability = GST_TYPE_FIND_MAXIMUM;
+ goto suggest_audio_mod_caps;
+ }
+ }
+ /* J2B (Jazz Jackrabbit 2) */
+ if ((data = gst_type_find_peek (tf, 0, 8)) != NULL) {
+ if ((memcmp (data, "MUSE\xDE\xAD", 4) == 0) &&
+ ((memcmp (data + 6, "\xBE\xEF", 2) == 0) ||
+ (memcmp (data + 6, "\xBA\xBE", 2) == 0))) {
+ mod_type = "j2b";
+ probability = GST_TYPE_FIND_MAXIMUM;
+ goto suggest_audio_mod_caps;
+ }
+ }
+ /* AMS (Velvet Studio) */
+ if ((data = gst_type_find_peek (tf, 0, 7)) != NULL) {
+ if (memcmp (data, "AMShdr\x1A", 7) == 0) {
+ mod_type = "velvet-ams";
+ probability = GST_TYPE_FIND_MAXIMUM;
+ goto suggest_audio_mod_caps;
+ }
+ }
+ /* AMS (Extreme Tracker) */
+ if ((data = gst_type_find_peek (tf, 0, 9)) != NULL) {
+ if ((memcmp (data, "Extreme", 7) == 0) && (data[8] == 1)) {
+ mod_type = "extreme-ams";
+ probability = GST_TYPE_FIND_LIKELY;
+ goto suggest_audio_mod_caps;
+ }
+ }
+ /* ULT (Ultratracker) */
+ if ((data = gst_type_find_peek (tf, 0, 14)) != NULL) {
+ if (memcmp (data, "MAS_UTrack_V00", 14) == 0) {
+ mod_type = "ult";
+ probability = GST_TYPE_FIND_MAXIMUM;
+ goto suggest_audio_mod_caps;
+ }
+ }
+ /* DIGI (DigiBooster) */
+ if ((data = gst_type_find_peek (tf, 0, 20)) != NULL) {
+ if (memcmp (data, "DIGI Booster module\0", 20) == 0) {
+ mod_type = "digi";
+ probability = GST_TYPE_FIND_MAXIMUM;
+ goto suggest_audio_mod_caps;
+ }
+ }
+ /* PTM (PolyTracker) */
+ if ((data = gst_type_find_peek (tf, 0x2C, 4)) != NULL) {
+ if (memcmp (data, "PTMF", 4) == 0) {
+ mod_type = "ptm";
+ probability = GST_TYPE_FIND_LIKELY;
+ goto suggest_audio_mod_caps;
+ }
+ }
+ /* XM */
+ if ((data = gst_type_find_peek (tf, 0, 38)) != NULL) {
+ if ((memcmp (data, "Extended Module: ", 17) == 0) && (data[37] == 0x1A)) {
+ mod_type = "xm";
+ probability = GST_TYPE_FIND_MAXIMUM;
+ goto suggest_audio_mod_caps;
+ }
+ }
+ /* OKT */
+ if (data || (data = gst_type_find_peek (tf, 0, 8)) != NULL) {
+ if (memcmp (data, "OKTASONG", 8) == 0) {
+ mod_type = "okt";
+ probability = GST_TYPE_FIND_MAXIMUM;
+ goto suggest_audio_mod_caps;
+ }
+ }
+ /* Various formats with a 4-byte magic ID at the beginning of the file */
+ if (data || (data = gst_type_find_peek (tf, 0, 4)) != NULL) {
+ /* PSM (Protracker Studio PSM) */
+ if (memcmp (data, "PSM", 3) == 0) {
+ unsigned char fbyte = data[3];
+ if ((fbyte == ' ') || (fbyte == 254)) {
+ mod_type = "psm";
+ probability = GST_TYPE_FIND_MAXIMUM;
+ goto suggest_audio_mod_caps;
+ }
+ }
+ /* 669 */
+ if ((memcmp (data, "if", 2) == 0) || (memcmp (data, "JN", 2) == 0)) {
+ mod_type = "669";
+ probability = GST_TYPE_FIND_LIKELY;
+ goto suggest_audio_mod_caps;
+ }
+ /* AMF */
+ if ((memcmp (data, "AMF", 3) == 0) && (data[3] > 10) && (data[3] < 14)) {
+ mod_type = "dsmi-amf";
+ probability = GST_TYPE_FIND_MAXIMUM;
+ goto suggest_audio_mod_caps;
+ }
+ /* IT */
+ if (memcmp (data, "IMPM", 4) == 0) {
+ mod_type = "it";
+ probability = GST_TYPE_FIND_MAXIMUM;
+ goto suggest_audio_mod_caps;
+ }
+ /* DBM (DigiBooster Pro) */
+ if (memcmp (data, "DBM0", 4) == 0) {
+ mod_type = "dbm";
+ probability = GST_TYPE_FIND_MAXIMUM;
+ goto suggest_audio_mod_caps;
+ }
+ /* MDL (DigiTrakker) */
+ if (memcmp (data, "DMDL", 4) == 0) {
+ mod_type = "mdl";
+ probability = GST_TYPE_FIND_MAXIMUM;
+ goto suggest_audio_mod_caps;
+ }
+ /* MT2 (MadTracker 2.0) */
+ if (memcmp (data, "MT20", 4) == 0) {
+ mod_type = "mt2";
+ probability = GST_TYPE_FIND_MAXIMUM;
+ goto suggest_audio_mod_caps;
+ }
+ /* DMF (X-Tracker) */
+ if (memcmp (data, "DDMF", 4) == 0) {
+ mod_type = "dmf";
+ probability = GST_TYPE_FIND_MAXIMUM;
+ goto suggest_audio_mod_caps;
+ }
+ /* MED */
+ if ((memcmp (data, "MMD0", 4) == 0) || (memcmp (data, "MMD1", 4) == 0)) {
+ mod_type = "med";
+ probability = GST_TYPE_FIND_MAXIMUM;
+ goto suggest_audio_mod_caps;
+ }
+ /* MTM */
+ if (memcmp (data, "MTM", 3) == 0) {
+ mod_type = "mtm";
+ probability = GST_TYPE_FIND_MAXIMUM;
+ goto suggest_audio_mod_caps;
+ }
+ /* DSM */
+ if (memcmp (data, "RIFF", 4) == 0) {
+ const guint8 *data2 = gst_type_find_peek (tf, 8, 4);
+
+ if (data2) {
+ if (memcmp (data2, "DSMF", 4) == 0) {
+ mod_type = "dsm";
+ probability = GST_TYPE_FIND_MAXIMUM;
+ goto suggest_audio_mod_caps;
+ }
+ }
+ }
+ /* FAR (Farandole) */
+ if (memcmp (data, "FAR\xFE", 4) == 0) {
+ mod_type = "far";
+ probability = GST_TYPE_FIND_MAXIMUM;
+ goto suggest_audio_mod_caps;
+ }
+ /* FAM */
+ if (memcmp (data, "FAM\xFE", 4) == 0) {
+ const guint8 *data2 = gst_type_find_peek (tf, 44, 3);
+
+ if (data2) {
+ if (memcmp (data2, "compare", 3) == 0) {
+ mod_type = "fam";
+ probability = GST_TYPE_FIND_MAXIMUM;
+ goto suggest_audio_mod_caps;
+ }
+ /* otherwise do not suggest anything */
+ } else {
+ mod_type = "fam";
+ probability = GST_TYPE_FIND_LIKELY;
+ goto suggest_audio_mod_caps;
+ }
+ }
+ /* GDM */
+ if (memcmp (data, "GDM\xFE", 4) == 0) {
+ const guint8 *data2 = gst_type_find_peek (tf, 71, 4);
+
+ if (data2) {
+ if (memcmp (data2, "GMFS", 4) == 0) {
+ mod_type = "gdm";
+ probability = GST_TYPE_FIND_MAXIMUM;
+ goto suggest_audio_mod_caps;
+ }
+ /* otherwise do not suggest anything */
+ } else {
+ mod_type = "gdm";
+ probability = GST_TYPE_FIND_LIKELY;
+ goto suggest_audio_mod_caps;
+ }
+ }
+ /* UMX */
+ if (memcmp (data, "\xC1\x83\x2A\x9E", 4) == 0) {
+ mod_type = "umx";
+ probability = GST_TYPE_FIND_POSSIBLE;
+ goto suggest_audio_mod_caps;
+ }
+ }
+ /* FAR (Farandole) (secondary detection) */
+ if ((data = gst_type_find_peek (tf, 44, 3)) != NULL) {
+ if (memcmp (data, "\x0D\x0A\x1A", 3) == 0) {
+ mod_type = "far";
+ probability = GST_TYPE_FIND_POSSIBLE;
+ goto suggest_audio_mod_caps;
+ }
+ }
+ /* IMF */
+ if ((data = gst_type_find_peek (tf, 60, 4)) != NULL) {
+ if (memcmp (data, "IM10", 4) == 0) {
+ mod_type = "imf";
+ probability = GST_TYPE_FIND_MAXIMUM;
+ goto suggest_audio_mod_caps;
+ }
+ }
+ /* S3M */
+ if ((data = gst_type_find_peek (tf, 44, 4)) != NULL) {
+ if (memcmp (data, "SCRM", 4) == 0) {
+ mod_type = "s3m";
+ probability = GST_TYPE_FIND_MAXIMUM;
+ goto suggest_audio_mod_caps;
+ }
+ }
+ /* STM */
+ if ((data = gst_type_find_peek (tf, 20, 8)) != NULL) {
+ if (g_ascii_strncasecmp ((gchar *) data, "!Scream!", 8) == 0 ||
+ g_ascii_strncasecmp ((gchar *) data, "BMOD2STM", 8) == 0) {
+ const guint8 *id, *stmtype;
+
+ if ((id = gst_type_find_peek (tf, 28, 1)) == NULL)
+ return;
+ if ((stmtype = gst_type_find_peek (tf, 29, 1)) == NULL)
+ return;
+ if (*id == 0x1A && *stmtype == 2) {
+ mod_type = "stm";
+ probability = GST_TYPE_FIND_MAXIMUM;
+ goto suggest_audio_mod_caps;
+ }
+ }
+ }
+ /* AMF */
+ if ((data = gst_type_find_peek (tf, 0, 19)) != NULL) {
+ if (memcmp (data, "ASYLUM Music Format", 19) == 0) {
+ mod_type = "asylum-amf";
+ probability = GST_TYPE_FIND_MAXIMUM;
+ goto suggest_audio_mod_caps;
+ }
+ }
+
+ suggest_audio_mod_caps:
+ if (mod_type != NULL) {
+ GstCaps *caps = gst_caps_new_simple ("audio/x-mod",
+ "type", G_TYPE_STRING, mod_type, NULL);
+
+ gst_type_find_suggest (tf, probability, caps);
+ gst_caps_unref (caps);
+ }
+ }
+
+ /*** application/x-shockwave-flash ***/
+
+ static GstStaticCaps swf_caps =
+ GST_STATIC_CAPS ("application/x-shockwave-flash");
+ #define SWF_CAPS (gst_static_caps_get(&swf_caps))
+ static void
+ swf_type_find (GstTypeFind * tf, gpointer unused)
+ {
+ const guint8 *data = gst_type_find_peek (tf, 0, 4);
+
+ if (data && (data[0] == 'F' || data[0] == 'C') &&
+ data[1] == 'W' && data[2] == 'S') {
+ gst_type_find_suggest (tf, GST_TYPE_FIND_MAXIMUM, SWF_CAPS);
+ }
+ }
-
++#endif
+ /*** application/vnd.ms-sstr+xml ***/
+
+ static void
+ mss_manifest_load_utf16 (gunichar2 * utf16_ne, const guint8 * utf16_data,
+ gsize data_size, guint data_endianness)
+ {
+ memcpy (utf16_ne, utf16_data, data_size);
+ if (data_endianness != G_BYTE_ORDER) {
+ guint i;
+
+ for (i = 0; i < data_size / 2; ++i)
+ utf16_ne[i] = GUINT16_SWAP_LE_BE (utf16_ne[i]);
+ }
+ }
+
+ static GstStaticCaps mss_manifest_caps =
+ GST_STATIC_CAPS ("application/vnd.ms-sstr+xml");
+ #define MSS_MANIFEST_CAPS (gst_static_caps_get(&mss_manifest_caps))
+ static void
+ mss_manifest_type_find (GstTypeFind * tf, gpointer unused)
+ {
+ gunichar2 utf16_ne[512];
+ const guint8 *data;
+ guint data_endianness = 0;
+ glong n_read = 0, size = 0;
+ guint length;
+ gchar *utf8;
+ gboolean utf8_bom_detected = FALSE;
+
+ if (xml_check_first_element (tf, "SmoothStreamingMedia", 20, TRUE)) {
+ gst_type_find_suggest (tf, GST_TYPE_FIND_MAXIMUM, MSS_MANIFEST_CAPS);
+ return;
+ }
+
+ length = gst_type_find_get_length (tf);
+
+ /* try detecting the charset */
+ data = gst_type_find_peek (tf, 0, 3);
+
+ if (data == NULL)
+ return;
+
+ /* look for a possible BOM */
+ if (data[0] == 0xEF && data[1] == 0xBB && data[2] == 0xBF)
+ utf8_bom_detected = TRUE;
+ else if (data[0] == 0xFF && data[1] == 0xFE)
+ data_endianness = G_LITTLE_ENDIAN;
+ else if (data[0] == 0xFE && data[1] == 0xFF)
+ data_endianness = G_BIG_ENDIAN;
+ else
+ return;
+
+ /* try a default that should be enough */
+ if (length == 0)
+ length = 512;
+ else if (length < 64)
+ return;
+ else /* the first few bytes should be enough */
+ length = MIN (1024, length);
+
+ data = gst_type_find_peek (tf, 0, length);
+
+ if (data == NULL)
+ return;
+
+ /* skip the BOM */
+ data += 2;
+ length -= 2;
+
+ if (utf8_bom_detected) {
+ /* skip last byte of the BOM */
+ data++;
+ length--;
+
+ if (xml_check_first_element_from_data (data, length,
+ "SmoothStreamingMedia", 20, TRUE))
+ gst_type_find_suggest (tf, GST_TYPE_FIND_MAXIMUM, MSS_MANIFEST_CAPS);
+ } else {
+ length = GST_ROUND_DOWN_2 (length);
+
+ /* convert to native endian UTF-16 */
+ mss_manifest_load_utf16 (utf16_ne, data, length, data_endianness);
+
+ /* and now convert to UTF-8 */
+ utf8 = g_utf16_to_utf8 (utf16_ne, length / 2, &n_read, &size, NULL);
+ if (utf8 != NULL && n_read > 0) {
+ if (xml_check_first_element_from_data ((const guint8 *) utf8, size,
+ "SmoothStreamingMedia", 20, TRUE))
+ gst_type_find_suggest (tf, GST_TYPE_FIND_MAXIMUM, MSS_MANIFEST_CAPS);
+ }
+ g_free (utf8);
+ }
+ }
+
+ /*** image/jpeg ***/
+
+ #define JPEG_MARKER_IS_START_OF_FRAME(x) \
+ ((x)>=0xc0 && (x) <= 0xcf && (x)!=0xc4 && (x)!=0xc8 && (x)!=0xcc)
+
+ static GstStaticCaps jpeg_caps = GST_STATIC_CAPS ("image/jpeg");
+
+ #define JPEG_CAPS (gst_static_caps_get(&jpeg_caps))
+ static void
+ jpeg_type_find (GstTypeFind * tf, gpointer unused)
+ {
+ GstTypeFindProbability prob = GST_TYPE_FIND_POSSIBLE;
+ DataScanCtx c = { 0, NULL, 0 };
+ GstCaps *caps;
+ guint num_markers;
+
+ if (G_UNLIKELY (!data_scan_ctx_ensure_data (tf, &c, 2)))
+ return;
+
+ if (c.data[0] != 0xff || c.data[1] != 0xd8)
+ return;
+
+ num_markers = 1;
+ data_scan_ctx_advance (tf, &c, 2);
+
+ caps = gst_caps_copy (JPEG_CAPS);
+
+ while (data_scan_ctx_ensure_data (tf, &c, 4) && c.offset < (200 * 1024)) {
+ guint16 len;
+ guint8 marker;
+
+ if (c.data[0] != 0xff)
+ break;
+
+ marker = c.data[1];
+ if (G_UNLIKELY (marker == 0xff)) {
+ data_scan_ctx_advance (tf, &c, 1);
+ continue;
+ }
+
+ data_scan_ctx_advance (tf, &c, 2);
+
+ /* we assume all markers we'll see before SOF have a payload length; if
+ * that's not the case we'll just detect a false sync and bail out, but
+ * still report POSSIBLE probability */
+ len = GST_READ_UINT16_BE (c.data);
+
+ GST_LOG ("possible JPEG marker 0x%02x (@0x%04x), segment length %u",
+ marker, (guint) c.offset, len);
+
+ if (!data_scan_ctx_ensure_data (tf, &c, len))
+ break;
+
+ if (marker == 0xc4 || /* DEFINE_HUFFMAN_TABLES */
+ marker == 0xcc || /* DEFINE_ARITHMETIC_CONDITIONING */
+ marker == 0xdb || /* DEFINE_QUANTIZATION_TABLES */
+ marker == 0xdd || /* DEFINE_RESTART_INTERVAL */
+ marker == 0xfe) { /* COMMENT */
+ data_scan_ctx_advance (tf, &c, len);
+ ++num_markers;
+ } else if (marker == 0xe0 && len >= (2 + 4) && /* APP0 */
+ data_scan_ctx_memcmp (tf, &c, 2, "JFIF", 4)) {
+ GST_LOG ("found JFIF tag");
+ prob = GST_TYPE_FIND_MAXIMUM;
+ data_scan_ctx_advance (tf, &c, len);
+ ++num_markers;
+ /* we continue until we find a start of frame marker */
+ } else if (marker == 0xe1 && len >= (2 + 4) && /* APP1 */
+ data_scan_ctx_memcmp (tf, &c, 2, "Exif", 4)) {
+ GST_LOG ("found Exif tag");
+ prob = GST_TYPE_FIND_MAXIMUM;
+ data_scan_ctx_advance (tf, &c, len);
+ ++num_markers;
+ /* we continue until we find a start of frame marker */
+ } else if (marker >= 0xe0 && marker <= 0xef) { /* APPn */
+ data_scan_ctx_advance (tf, &c, len);
+ ++num_markers;
+ } else if (JPEG_MARKER_IS_START_OF_FRAME (marker) && len >= (2 + 8)) {
+ int h, w;
+
+ h = GST_READ_UINT16_BE (c.data + 2 + 1);
+ w = GST_READ_UINT16_BE (c.data + 2 + 1 + 2);
+ if (h == 0 || w == 0) {
+ GST_WARNING ("bad width %u and/or height %u in SOF header", w, h);
+ break;
+ }
+
+ GST_LOG ("SOF at offset %" G_GUINT64_FORMAT ", num_markers=%d, "
+ "WxH=%dx%d", c.offset - 2, num_markers, w, h);
+
+ if (num_markers >= 5 || prob == GST_TYPE_FIND_MAXIMUM)
+ prob = GST_TYPE_FIND_MAXIMUM;
+ else
+ prob = GST_TYPE_FIND_LIKELY;
+
+ gst_caps_set_simple (caps, "width", G_TYPE_INT, w,
+ "height", G_TYPE_INT, h, "sof-marker", G_TYPE_INT, marker & 0xf,
+ NULL);
+
+ break;
+ } else {
+ GST_WARNING ("bad length or unexpected JPEG marker 0xff 0x%02x", marker);
+ break;
+ }
+ }
+
+ gst_type_find_suggest (tf, prob, caps);
+ gst_caps_unref (caps);
+ }
+
+ /*** image/bmp ***/
+
+ static GstStaticCaps bmp_caps = GST_STATIC_CAPS ("image/bmp");
+
+ #define BMP_CAPS (gst_static_caps_get(&bmp_caps))
+ static void
+ bmp_type_find (GstTypeFind * tf, gpointer unused)
+ {
+ DataScanCtx c = { 0, NULL, 0 };
+ guint32 struct_size, w, h, planes, bpp;
+
+ if (G_UNLIKELY (!data_scan_ctx_ensure_data (tf, &c, 54)))
+ return;
+
+ if (c.data[0] != 'B' || c.data[1] != 'M')
+ return;
+
+ /* skip marker + size */
+ data_scan_ctx_advance (tf, &c, 2 + 4);
+
+ /* reserved, must be 0 */
+ if (c.data[0] != 0 || c.data[1] != 0 || c.data[2] != 0 || c.data[3] != 0)
+ return;
+
+ data_scan_ctx_advance (tf, &c, 2 + 2);
+
+ /* offset to start of image data in bytes (check for sanity) */
+ GST_LOG ("offset=%u", GST_READ_UINT32_LE (c.data));
+ if (GST_READ_UINT32_LE (c.data) > (10 * 1024 * 1024))
+ return;
+
+ struct_size = GST_READ_UINT32_LE (c.data + 4);
+ GST_LOG ("struct_size=%u", struct_size);
+
+ data_scan_ctx_advance (tf, &c, 4 + 4);
+
+ if (struct_size == 0x0C) {
+ w = GST_READ_UINT16_LE (c.data);
+ h = GST_READ_UINT16_LE (c.data + 2);
+ planes = GST_READ_UINT16_LE (c.data + 2 + 2);
+ bpp = GST_READ_UINT16_LE (c.data + 2 + 2 + 2);
+ } else if (struct_size == 40 || struct_size == 64 || struct_size == 108
+ || struct_size == 124 || struct_size == 0xF0) {
+ w = GST_READ_UINT32_LE (c.data);
+ h = GST_READ_UINT32_LE (c.data + 4);
+ planes = GST_READ_UINT16_LE (c.data + 4 + 4);
+ bpp = GST_READ_UINT16_LE (c.data + 4 + 4 + 2);
+ } else {
+ return;
+ }
+
+ /* image sizes sanity check */
+ GST_LOG ("w=%u, h=%u, planes=%u, bpp=%u", w, h, planes, bpp);
+ if (w == 0 || w > 0xfffff || h == 0 || h > 0xfffff || planes != 1 ||
+ (bpp != 1 && bpp != 4 && bpp != 8 && bpp != 16 && bpp != 24 && bpp != 32))
+ return;
+
+ gst_type_find_suggest_simple (tf, GST_TYPE_FIND_MAXIMUM, "image/bmp",
+ "width", G_TYPE_INT, w, "height", G_TYPE_INT, h, "bpp", G_TYPE_INT, bpp,
+ NULL);
+ }
+
++#ifndef TIZEN_FEATURE_DISABLE_MIME_TYPES
+ /*** image/tiff ***/
+ static GstStaticCaps tiff_caps = GST_STATIC_CAPS ("image/tiff, "
+ "endianness = (int) { BIG_ENDIAN, LITTLE_ENDIAN }");
+ #define TIFF_CAPS (gst_static_caps_get(&tiff_caps))
+ static GstStaticCaps tiff_be_caps = GST_STATIC_CAPS ("image/tiff, "
+ "endianness = (int) BIG_ENDIAN");
+ #define TIFF_BE_CAPS (gst_static_caps_get(&tiff_be_caps))
+ static GstStaticCaps tiff_le_caps = GST_STATIC_CAPS ("image/tiff, "
+ "endianness = (int) LITTLE_ENDIAN");
+ #define TIFF_LE_CAPS (gst_static_caps_get(&tiff_le_caps))
+ static void
+ tiff_type_find (GstTypeFind * tf, gpointer unused)
+ {
+ const guint8 *data = gst_type_find_peek (tf, 0, 8);
+ guint8 le_header[4] = { 0x49, 0x49, 0x2A, 0x00 };
+ guint8 be_header[4] = { 0x4D, 0x4D, 0x00, 0x2A };
+
+ if (data) {
+ if (memcmp (data, le_header, 4) == 0) {
+ gst_type_find_suggest (tf, GST_TYPE_FIND_MAXIMUM, TIFF_LE_CAPS);
+ } else if (memcmp (data, be_header, 4) == 0) {
+ gst_type_find_suggest (tf, GST_TYPE_FIND_MAXIMUM, TIFF_BE_CAPS);
+ }
+ }
+ }
++#endif
+
+ /*** image/x-exr ***/
+ static GstStaticCaps exr_caps = GST_STATIC_CAPS ("image/x-exr");
+ #define EXR_CAPS (gst_static_caps_get(&exr_caps))
+ static void
+ exr_type_find (GstTypeFind * tf, gpointer unused)
+ {
+ const guint8 *data = gst_type_find_peek (tf, 0, 8);
+
+ if (data) {
+ guint32 flags;
+
+ if (GST_READ_UINT32_LE (data) != 0x01312f76)
+ return;
+
+ flags = GST_READ_UINT32_LE (data + 4);
+ if ((flags & 0xff) != 1 && (flags & 0xff) != 2)
+ return;
+
+ /* If bit 9 is set, bit 11 and 12 must be 0 */
+ if ((flags & 0x200) && (flags & 0x1800))
+ return;
+
+ gst_type_find_suggest (tf, GST_TYPE_FIND_MAXIMUM, EXR_CAPS);
+ }
+ }
+
+
+ /*** PNM ***/
+
+ static GstStaticCaps pnm_caps = GST_STATIC_CAPS ("image/x-portable-bitmap; "
+ "image/x-portable-graymap; image/x-portable-pixmap; "
+ "image/x-portable-anymap");
+
+ #define PNM_CAPS (gst_static_caps_get(&pnm_caps))
+
+ #define IS_PNM_WHITESPACE(c) \
+ ((c) == ' ' || (c) == '\r' || (c) == '\n' || (c) == 't')
+
+ static void
+ pnm_type_find (GstTypeFind * tf, gpointer unused)
+ {
+ const gchar *media_type = NULL;
+ DataScanCtx c = { 0, NULL, 0 };
+ guint h = 0, w = 0;
+
+ if (G_UNLIKELY (!data_scan_ctx_ensure_data (tf, &c, 16)))
+ return;
+
+ /* see http://en.wikipedia.org/wiki/Netpbm_format */
+ if (c.data[0] != 'P' || c.data[1] < '1' || c.data[1] > '7' ||
+ !IS_PNM_WHITESPACE (c.data[2]) ||
+ (c.data[3] != '#' && c.data[3] < '0' && c.data[3] > '9'))
+ return;
+
+ switch (c.data[1]) {
+ case '1':
+ media_type = "image/x-portable-bitmap"; /* ASCII */
+ break;
+ case '2':
+ media_type = "image/x-portable-graymap"; /* ASCII */
+ break;
+ case '3':
+ media_type = "image/x-portable-pixmap"; /* ASCII */
+ break;
+ case '4':
+ media_type = "image/x-portable-bitmap"; /* Raw */
+ break;
+ case '5':
+ media_type = "image/x-portable-graymap"; /* Raw */
+ break;
+ case '6':
+ media_type = "image/x-portable-pixmap"; /* Raw */
+ break;
+ case '7':
+ media_type = "image/x-portable-anymap";
+ break;
+ default:
+ g_return_if_reached ();
+ }
+
+ /* try to extract width and height as well */
+ if (c.data[1] != '7') {
+ gchar s[64] = { 0, }
+ , sep1, sep2;
+
+ /* need to skip any comment lines first */
+ data_scan_ctx_advance (tf, &c, 3);
+
+ if (!data_scan_ctx_ensure_data (tf, &c, 1))
+ return;
+
+ while (c.data[0] == '#') { /* we know there's still data left */
+ data_scan_ctx_advance (tf, &c, 1);
+ if (!data_scan_ctx_ensure_data (tf, &c, 1))
+ return;
+
+ while (c.data[0] != '\n' && c.data[0] != '\r') {
+ data_scan_ctx_advance (tf, &c, 1);
+ if (!data_scan_ctx_ensure_data (tf, &c, 1))
+ return;
+ }
+ data_scan_ctx_advance (tf, &c, 1);
+ GST_LOG ("skipped comment line in PNM header");
+ if (!data_scan_ctx_ensure_data (tf, &c, 1))
+ return;
+ }
+
+ if (!data_scan_ctx_ensure_data (tf, &c, 32) &&
+ !data_scan_ctx_ensure_data (tf, &c, 4)) {
+ return;
+ }
+
+ /* need to NUL-terminate data for sscanf */
+ memcpy (s, c.data, MIN (sizeof (s) - 1, c.size));
+ if (sscanf (s, "%u%c%u%c", &w, &sep1, &h, &sep2) == 4 &&
+ IS_PNM_WHITESPACE (sep1) && IS_PNM_WHITESPACE (sep2) &&
+ w > 0 && w < G_MAXINT && h > 0 && h < G_MAXINT) {
+ GST_LOG ("extracted PNM width and height: %dx%d", w, h);
+ } else {
+ w = 0;
+ h = 0;
+ }
+ } else {
+ /* FIXME: extract width + height for anymaps too */
+ }
+
+ if (w > 0 && h > 0) {
+ gst_type_find_suggest_simple (tf, GST_TYPE_FIND_MAXIMUM, media_type,
+ "width", G_TYPE_INT, w, "height", G_TYPE_INT, h, NULL);
+ } else {
+ gst_type_find_suggest_empty_simple (tf, GST_TYPE_FIND_LIKELY, media_type);
+ }
+ }
+
++#ifndef TIZEN_FEATURE_DISABLE_MIME_TYPES
+ static GstStaticCaps sds_caps = GST_STATIC_CAPS ("audio/x-sds");
+
+ #define SDS_CAPS (gst_static_caps_get(&sds_caps))
+ static void
+ sds_type_find (GstTypeFind * tf, gpointer unused)
+ {
+ const guint8 *data = gst_type_find_peek (tf, 0, 4);
+ guint8 mask[4] = { 0xFF, 0xFF, 0x80, 0xFF };
+ guint8 match[4] = { 0xF0, 0x7E, 0, 0x01 };
+ gint x;
+
+ if (data) {
+ for (x = 0; x < 4; x++) {
+ if ((data[x] & mask[x]) != match[x]) {
+ return;
+ }
+ }
+ gst_type_find_suggest (tf, GST_TYPE_FIND_MAXIMUM, SDS_CAPS);
+ }
+ }
+
+ static GstStaticCaps ircam_caps = GST_STATIC_CAPS ("audio/x-ircam");
+
+ #define IRCAM_CAPS (gst_static_caps_get(&ircam_caps))
+ static void
+ ircam_type_find (GstTypeFind * tf, gpointer unused)
+ {
+ const guint8 *data = gst_type_find_peek (tf, 0, 4);
+ guint8 mask[4] = { 0xFF, 0xFF, 0xF8, 0xFF };
+ guint8 match[4] = { 0x64, 0xA3, 0x00, 0x00 };
+ gint x;
+ gboolean matched = TRUE;
+
+ if (!data) {
+ return;
+ }
+ for (x = 0; x < 4; x++) {
+ if ((data[x] & mask[x]) != match[x]) {
+ matched = FALSE;
+ }
+ }
+ if (matched) {
+ gst_type_find_suggest (tf, GST_TYPE_FIND_MAXIMUM, IRCAM_CAPS);
+ return;
+ }
+ /* now try the reverse version */
+ matched = TRUE;
+ for (x = 0; x < 4; x++) {
+ if ((data[x] & mask[3 - x]) != match[3 - x]) {
+ matched = FALSE;
+ }
+ }
+ }
-
++#endif
+ /*** Matroska/WebM ***/
+
+ #define EBML_HEADER 0x1A45DFA3
+ #define EBML_VERSION 0x4286
+ #define EBML_DOCTYPE 0x4282
+ #define EBML_DOCTYPE_VERSION 0x4287
+ #define MATROSKA_SEGMENT 0x18538067
+ #define MATROSKA_CLUSTER 0x1F43B675
+ #define MATROSKA_TRACKS 0x1654AE6B
+ #define MATROSKA_TRACK_ENTRY 0xAE
+ #define MATROSKA_TRACK_TYPE 0x83
+ #define MATROSKA_STEREO_MODE 0x53B8
+
+ #define EBML_MAX_LEN (2 * 1024 * 1024)
+
+ typedef enum
+ {
+ EBML_DOCTYPE_UNKNOWN = 0,
+ EBML_DOCTYPE_MATROSKA,
+ EBML_DOCTYPE_WEBM
+ } GstEbmlDocType;
+
+ typedef struct
+ {
+ GstEbmlDocType doctype;
+ guint audio;
+ guint video;
+ guint other;
+ guint video_stereo;
+ guint chunks;
+ guint tracks_ok; /* if we've seen and fully parsed the TRACKS element */
+ } GstMatroskaInfo;
+
+ static inline guint
+ ebml_read_chunk_header (GstTypeFind * tf, DataScanCtx * c, guint max_size,
+ guint32 * id, guint64 * size)
+ {
+ guint64 mask;
+ guint msbit_set, i, len, id_len;
+
+ if (c->size < 12 || max_size < 1)
+ return 0;
+
+ /* element ID */
+ *id = c->data[0];
+ if ((c->data[0] & 0x80) == 0x80) {
+ id_len = 1;
+ } else if ((c->data[0] & 0xC0) == 0x40) {
+ id_len = 2;
+ } else if ((c->data[0] & 0xE0) == 0x20) {
+ id_len = 3;
+ } else if ((c->data[0] & 0xF0) == 0x10) {
+ id_len = 4;
+ } else {
+ return 0;
+ }
+
+ if (max_size < id_len)
+ return 0;
+
+ for (i = 1; i < id_len; ++i) {
+ *id = (*id << 8) | c->data[i];
+ }
+
+ data_scan_ctx_advance (tf, c, id_len);
+ max_size -= id_len;
+
+ /* size */
+ if (max_size < 1 || c->data[0] == 0)
+ return 0;
+
+ msbit_set = g_bit_nth_msf (c->data[0], 8);
+ mask = ((1 << msbit_set) - 1);
+ *size = c->data[0] & mask;
+ len = 7 - msbit_set;
+
+ if (max_size < 1 + len)
+ return 0;
+ for (i = 0; i < len; ++i) {
+ mask = (mask << 8) | 0xff;
+ *size = (*size << 8) | c->data[1 + i];
+ }
+
+ data_scan_ctx_advance (tf, c, 1 + len);
+
+ /* undefined/unknown size? (all bits 1) */
+ if (*size == mask) {
+ /* allow unknown size for SEGMENT chunk, bail out otherwise */
+ if (*id == MATROSKA_SEGMENT)
+ *size = G_MAXUINT64;
+ else
+ return 0;
+ }
+
+ return id_len + (1 + len);
+ }
+
+ static gboolean
+ ebml_parse_chunk (GstTypeFind * tf, DataScanCtx * ctx, guint32 chunk_id,
+ guint chunk_size, GstMatroskaInfo * info, guint depth)
+ { /* FIXME: make sure input size is clipped to 32 bit */
+ static const gchar SPACES[] = " ";
+ DataScanCtx c = *ctx;
+ guint64 element_size = 0;
+ guint32 id, hdr_len;
+
+ if (depth >= 8) /* keep SPACES large enough for depth */
+ return FALSE;
+
+ while (chunk_size > 0) {
+ if (c.offset > EBML_MAX_LEN || !data_scan_ctx_ensure_data (tf, &c, 64))
+ return FALSE;
+
+ hdr_len = ebml_read_chunk_header (tf, &c, chunk_size, &id, &element_size);
+ if (hdr_len == 0)
+ return FALSE;
+
+ g_assert (hdr_len <= chunk_size);
+ chunk_size -= hdr_len;
+
+ if (element_size > chunk_size)
+ return FALSE;
+
+ GST_DEBUG ("%s %08x, size %" G_GUINT64_FORMAT " / %" G_GUINT64_FORMAT,
+ SPACES + sizeof (SPACES) - 1 - (2 * depth), id, element_size,
+ hdr_len + element_size);
+
+ if (element_size >= G_MAXUINT32) {
+ GST_DEBUG ("Chunk too big for typefinding");
+ return FALSE;
+ }
+
+ if (!data_scan_ctx_ensure_data (tf, &c, element_size)) {
+ GST_DEBUG ("not enough data");
+ return FALSE;
+ }
+
+ switch (id) {
+ case EBML_DOCTYPE:
+ if (element_size >= 8 && memcmp (c.data, "matroska", 8) == 0)
+ info->doctype = EBML_DOCTYPE_MATROSKA;
+ else if (element_size >= 4 && memcmp (c.data, "webm", 4) == 0)
+ info->doctype = EBML_DOCTYPE_WEBM;
+ break;
+ case MATROSKA_SEGMENT:
+ GST_LOG ("parsing segment");
+ ebml_parse_chunk (tf, &c, id, element_size, info, depth + 1);
+ GST_LOG ("parsed segment, done");
+ return FALSE;
+ case MATROSKA_TRACKS:
+ GST_LOG ("parsing tracks");
+ info->tracks_ok =
+ ebml_parse_chunk (tf, &c, id, element_size, info, depth + 1);
+ GST_LOG ("parsed tracks: %s, done (after %" G_GUINT64_FORMAT " bytes)",
+ info->tracks_ok ? "ok" : "FAIL", c.offset + element_size);
+ return FALSE;
+ case MATROSKA_TRACK_ENTRY:
+ GST_LOG ("parsing track entry");
+ if (!ebml_parse_chunk (tf, &c, id, element_size, info, depth + 1))
+ return FALSE;
+ break;
+ case MATROSKA_TRACK_TYPE:{
+ guint type = 0, i;
+
+ /* is supposed to always be 1-byte, but not everyone's following that */
+ for (i = 0; i < element_size; ++i)
+ type = (type << 8) | c.data[i];
+
+ GST_DEBUG ("%s track type %u",
+ SPACES + sizeof (SPACES) - 1 - (2 * depth), type);
+
+ if (type == 1)
+ ++info->video;
+ else if (c.data[0] == 2)
+ ++info->audio;
+ else
+ ++info->other;
+ break;
+ }
+ case MATROSKA_STEREO_MODE:
+ ++info->video_stereo;
+ break;
+ case MATROSKA_CLUSTER:
+ GST_WARNING ("cluster, bailing out (should've found tracks by now)");
+ return FALSE;
+ default:
+ break;
+ }
+ data_scan_ctx_advance (tf, &c, element_size);
+ chunk_size -= element_size;
+ ++info->chunks;
+ }
+
+ return TRUE;
+ }
+
+ static GstStaticCaps matroska_caps = GST_STATIC_CAPS ("video/x-matroska");
+
+ #define MATROSKA_CAPS (gst_static_caps_get(&matroska_caps))
+ static void
+ matroska_type_find (GstTypeFind * tf, gpointer unused)
+ {
+ GstTypeFindProbability prob;
+ GstMatroskaInfo info = { 0, };
+ const gchar *type_name;
+ DataScanCtx c = { 0, NULL, 0 };
+ gboolean is_audio;
+ guint64 size;
+ guint32 id, hdr_len;
+
+ if (!data_scan_ctx_ensure_data (tf, &c, 64))
+ return;
+
+ if (GST_READ_UINT32_BE (c.data) != EBML_HEADER)
+ return;
+
+ while (c.offset < EBML_MAX_LEN && data_scan_ctx_ensure_data (tf, &c, 64)) {
+ hdr_len = ebml_read_chunk_header (tf, &c, c.size, &id, &size);
+ if (hdr_len == 0)
+ return;
+
+ GST_INFO ("=== top-level chunk %08x, size %" G_GUINT64_FORMAT
+ " / %" G_GUINT64_FORMAT, id, size, size + hdr_len);
+
+ if (!ebml_parse_chunk (tf, &c, id, size, &info, 0))
+ break;
+ data_scan_ctx_advance (tf, &c, size);
+ GST_INFO ("=== done with chunk %08x", id);
+ if (id == MATROSKA_SEGMENT)
+ break;
+ }
+
+ GST_INFO ("audio=%u video=%u other=%u chunks=%u doctype=%d all_tracks=%d",
+ info.audio, info.video, info.other, info.chunks, info.doctype,
+ info.tracks_ok);
+
+ /* perhaps we should bail out if tracks_ok is FALSE and wait for more data?
+ * (we would need new API to signal this properly and prevent other
+ * typefinders from taking over the decision then) */
+ is_audio = (info.audio > 0 && info.video == 0 && info.other == 0);
+
+ if (info.doctype == EBML_DOCTYPE_WEBM) {
+ type_name = (is_audio) ? "audio/webm" : "video/webm";
+ } else if (info.video > 0 && info.video_stereo) {
+ type_name = "video/x-matroska-3d";
+ } else {
+ type_name = (is_audio) ? "audio/x-matroska" : "video/x-matroska";
+ }
+
+ if (info.doctype == EBML_DOCTYPE_UNKNOWN)
+ prob = GST_TYPE_FIND_LIKELY;
+ else
+ prob = GST_TYPE_FIND_MAXIMUM;
+
+ gst_type_find_suggest_empty_simple (tf, prob, type_name);
+ }
+
+ /*** application/mxf ***/
++#ifndef TIZEN_FEATURE_DISABLE_MIME_TYPES
+ static GstStaticCaps mxf_caps = GST_STATIC_CAPS ("application/mxf");
+
+ #define MXF_MAX_PROBE_LENGTH (1024 * 64)
+ #define MXF_CAPS (gst_static_caps_get(&mxf_caps))
+
+ /*
+ * MXF files start with a header partition pack key of 16 bytes which is defined
+ * at SMPTE-377M 6.1. Before this there can be up to 64K of run-in which _must_
+ * not contain the partition pack key.
+ */
+ static void
+ mxf_type_find (GstTypeFind * tf, gpointer unused)
+ {
+ static const guint8 partition_pack_key[] =
+ { 0x06, 0x0e, 0x2b, 0x34, 0x02, 0x05, 0x01, 0x01, 0x0d, 0x01, 0x02, 0x01,
+ 0x01
+ };
+ DataScanCtx c = { 0, NULL, 0 };
+
+ while (c.offset <= MXF_MAX_PROBE_LENGTH) {
+ guint i;
+ if (G_UNLIKELY (!data_scan_ctx_ensure_data (tf, &c, 1024)))
+ break;
+
+ /* look over in chunks of 1kbytes to avoid too much overhead */
+
+ for (i = 0; i < 1024 - 16; i++) {
+ /* Check first byte before calling more expensive memcmp function */
+ if (G_UNLIKELY (c.data[i] == 0x06
+ && memcmp (c.data + i, partition_pack_key, 13) == 0)) {
+ /* Header partition pack? */
+ if (c.data[i + 13] != 0x02)
+ goto advance;
+
+ /* Partition status */
+ if (c.data[i + 14] >= 0x05)
+ goto advance;
+
+ /* Reserved, must be 0x00 */
+ if (c.data[i + 15] != 0x00)
+ goto advance;
+
+ gst_type_find_suggest (tf, GST_TYPE_FIND_MAXIMUM, MXF_CAPS);
+ return;
+ }
+ }
+
+ advance:
+ data_scan_ctx_advance (tf, &c, 1024 - 16);
+ }
+ }
+
+ /*** video/x-dv ***/
+
+ static GstStaticCaps dv_caps = GST_STATIC_CAPS ("video/x-dv, "
+ "systemstream = (boolean) true");
+ #define DV_CAPS (gst_static_caps_get(&dv_caps))
+ static void
+ dv_type_find (GstTypeFind * tf, gpointer private)
+ {
+ const guint8 *data;
+
+ data = gst_type_find_peek (tf, 0, 5);
+
+ /* check for DIF and DV flag */
+ if (data && (data[0] == 0x1f) && (data[1] == 0x07) && (data[2] == 0x00)) {
+ const gchar *format;
+
+ if (data[3] & 0x80) {
+ format = "PAL";
+ } else {
+ format = "NTSC";
+ }
+
+ gst_type_find_suggest_simple (tf, GST_TYPE_FIND_MAXIMUM, "video/x-dv",
+ "systemstream", G_TYPE_BOOLEAN, TRUE,
+ "format", G_TYPE_STRING, format, NULL);
+ }
+ }
-
++#endif
+
+ /*** Ogg variants ***/
+ static GstStaticCaps ogg_caps =
+ GST_STATIC_CAPS ("application/ogg;video/ogg;audio/ogg;application/kate");
+
+ #define OGG_CAPS (gst_static_caps_get(&ogg_caps))
+
+ typedef enum
+ {
+ OGG_AUDIO = 0,
+ OGG_VIDEO,
+ OGG_KATE,
+ OGG_OTHER,
+ OGG_SKELETON,
+ OGG_ANNODEX,
+ OGG_NUM
+ } GstOggStreamType;
+
+ static void
+ ogganx_type_find (GstTypeFind * tf, gpointer private)
+ {
+ const gchar *media_type;
+ DataScanCtx c = { 0, NULL, 0 };
+ guint ogg_syncs = 0;
+ guint hdr_count[OGG_NUM] = { 0, };
+ static const struct
+ {
+ const gchar marker[10];
+ guint8 marker_size;
+ GstOggStreamType stream_type;
+ } markers[] = {
+ {
+ "\001vorbis", 7, OGG_AUDIO}, {
+ "\200theora", 7, OGG_VIDEO}, {
+ "fLaC", 4, OGG_AUDIO}, {
+ "\177FLAC", 5, OGG_AUDIO}, {
+ "Speex", 5, OGG_AUDIO}, {
+ "CMML\0\0\0\0", 8, OGG_OTHER}, {
+ "PCM ", 8, OGG_AUDIO}, {
+ "Annodex", 7, OGG_ANNODEX}, {
+ "fishead", 7, OGG_SKELETON}, {
+ "AnxData", 7, OGG_ANNODEX}, {
+ "CELT ", 8, OGG_AUDIO}, {
+ "\200kate\0\0\0", 8, OGG_KATE}, {
+ "BBCD\0", 5, OGG_VIDEO}, {
+ "OVP80\1\1", 7, OGG_VIDEO}, {
+ "OpusHead", 8, OGG_AUDIO}, {
+ "\001audio\0\0\0", 9, OGG_AUDIO}, {
+ "\001video\0\0\0", 9, OGG_VIDEO}, {
+ "\001text\0\0\0", 9, OGG_OTHER}
+ };
+
+ while (c.offset < 4096 && data_scan_ctx_ensure_data (tf, &c, 64)) {
+ guint size, i;
+
+ if (memcmp (c.data, "OggS", 5) != 0)
+ break;
+
+ ++ogg_syncs;
+
+ /* check if BOS */
+ if (c.data[5] != 0x02)
+ break;
+
+ /* headers should only have one segment */
+ if (c.data[26] != 1)
+ break;
+
+ size = c.data[27];
+ if (size < 8)
+ break;
+
+ data_scan_ctx_advance (tf, &c, 28);
+
+ if (!data_scan_ctx_ensure_data (tf, &c, MAX (size, 8)))
+ break;
+
+ for (i = 0; i < G_N_ELEMENTS (markers); ++i) {
+ if (memcmp (c.data, markers[i].marker, markers[i].marker_size) == 0) {
+ ++hdr_count[markers[i].stream_type];
+ break;
+ }
+ }
+
+ if (i == G_N_ELEMENTS (markers)) {
+ GST_MEMDUMP ("unknown Ogg stream marker", c.data, size);
+ ++hdr_count[OGG_OTHER];
+ }
+
+ data_scan_ctx_advance (tf, &c, size);
+ }
+
+ if (ogg_syncs == 0)
+ return;
+
+ /* We don't bother with annodex types. FIXME: what about XSPF? */
+ if (hdr_count[OGG_VIDEO] > 0) {
+ media_type = "video/ogg";
+ } else if (hdr_count[OGG_AUDIO] > 0) {
+ media_type = "audio/ogg";
+ } else if (hdr_count[OGG_KATE] > 0 && hdr_count[OGG_OTHER] == 0) {
+ media_type = "application/kate";
+ } else {
+ media_type = "application/ogg";
+ }
+
+ GST_INFO ("found %s (audio:%u, video:%u, annodex:%u, skeleton:%u, other:%u)",
+ media_type, hdr_count[OGG_AUDIO], hdr_count[OGG_VIDEO],
+ hdr_count[OGG_ANNODEX], hdr_count[OGG_SKELETON], hdr_count[OGG_OTHER]);
+
+ gst_type_find_suggest_empty_simple (tf, GST_TYPE_FIND_MAXIMUM, media_type);
+ }
+
+ /*** audio/x-vorbis ***/
+ static GstStaticCaps vorbis_caps = GST_STATIC_CAPS ("audio/x-vorbis");
+
+ #define VORBIS_CAPS (gst_static_caps_get(&vorbis_caps))
+ static void
+ vorbis_type_find (GstTypeFind * tf, gpointer private)
+ {
+ const guint8 *data = gst_type_find_peek (tf, 0, 30);
+
+ if (data) {
+ guint blocksize_0;
+ guint blocksize_1;
+
+ /* 1 byte packet type (identification=0x01)
+ 6 byte string "vorbis"
+ 4 byte vorbis version */
+ if (memcmp (data, "\001vorbis\000\000\000\000", 11) != 0)
+ return;
+ data += 11;
+ /* 1 byte channels must be != 0 */
+ if (data[0] == 0)
+ return;
+ data++;
+ /* 4 byte samplerate must be != 0 */
+ if (GST_READ_UINT32_LE (data) == 0)
+ return;
+ data += 16;
+ /* blocksize checks */
+ blocksize_0 = data[0] & 0x0F;
+ blocksize_1 = (data[0] & 0xF0) >> 4;
+ if (blocksize_0 > blocksize_1)
+ return;
+ if (blocksize_0 < 6 || blocksize_0 > 13)
+ return;
+ if (blocksize_1 < 6 || blocksize_1 > 13)
+ return;
+ data++;
+ /* framing bit */
+ if ((data[0] & 0x01) != 1)
+ return;
+ gst_type_find_suggest (tf, GST_TYPE_FIND_MAXIMUM, VORBIS_CAPS);
+ }
+ }
+
+ /*** video/x-theora ***/
+
+ static GstStaticCaps theora_caps = GST_STATIC_CAPS ("video/x-theora");
+
+ #define THEORA_CAPS (gst_static_caps_get(&theora_caps))
+ static void
+ theora_type_find (GstTypeFind * tf, gpointer private)
+ {
+ const guint8 *data = gst_type_find_peek (tf, 0, 7); //42);
+
+ if (data) {
+ if (data[0] != 0x80)
+ return;
+ if (memcmp (&data[1], "theora", 6) != 0)
+ return;
+ /* FIXME: make this more reliable when specs are out */
+
+ gst_type_find_suggest (tf, GST_TYPE_FIND_MAXIMUM, THEORA_CAPS);
+ }
+ }
+
++#ifndef TIZEN_FEATURE_DISABLE_MIME_TYPES
+ /*** kate ***/
+ static void
+ kate_type_find (GstTypeFind * tf, gpointer private)
+ {
+ const guint8 *data = gst_type_find_peek (tf, 0, 64);
+ gchar category[16] = { 0, };
+
+ if (G_UNLIKELY (data == NULL))
+ return;
+
+ /* see: http://wiki.xiph.org/index.php/OggKate#Format_specification */
+ if (G_LIKELY (memcmp (data, "\200kate\0\0\0", 8) != 0))
+ return;
+
+ /* make sure we always have a NUL-terminated string */
+ memcpy (category, data + 48, 15);
+ GST_LOG ("kate category: %s", category);
+ /* canonical categories for subtitles: subtitles, spu-subtitles, SUB, K-SPU */
+ if (strcmp (category, "subtitles") == 0 || strcmp (category, "SUB") == 0 ||
+ strcmp (category, "spu-subtitles") == 0 ||
+ strcmp (category, "K-SPU") == 0) {
+ gst_type_find_suggest_empty_simple (tf, GST_TYPE_FIND_MAXIMUM,
+ "subtitle/x-kate");
+ } else {
+ gst_type_find_suggest_empty_simple (tf, GST_TYPE_FIND_MAXIMUM,
+ "application/x-kate");
+ }
+ }
++#endif
+
+ /*** WEBVTTT subtitles ***/
+ static GstStaticCaps webvtt_caps =
+ GST_STATIC_CAPS ("application/x-subtitle-vtt, parsed=(boolean)false");
+ #define WEBVTT_CAPS (gst_static_caps_get(&webvtt_caps))
+
+ static void
+ webvtt_type_find (GstTypeFind * tf, gpointer private)
+ {
+ const guint8 *data;
+
+ data = gst_type_find_peek (tf, 0, 10);
+
+ if (data == NULL)
+ return;
+
+ /* there might be a UTF-8 BOM at the beginning */
+ if (memcmp (data, "WEBVTT", 6) != 0 && memcmp (data + 3, "WEBVTT", 6) != 0) {
+ return;
+ }
+
+ if (data[0] != 'W') {
+ if (data[0] != 0xef || data[1] != 0xbb || data[2] != 0xbf)
+ return; /* Not a UTF-8 BOM */
+ data += 3;
+ }
+
+ /* After the WEBVTT magic must be one of these chars:
+ * 0x20 (space), 0x9 (tab), 0xa (LF) or 0xd (CR) */
+ if (data[6] != 0x20 && data[6] != 0x9 && data[6] != 0xa && data[6] != 0xd) {
+ return;
+ }
+
+ gst_type_find_suggest (tf, GST_TYPE_FIND_MAXIMUM, WEBVTT_CAPS);
+ }
+
++#ifndef TIZEN_FEATURE_DISABLE_MIME_TYPES
+ /*** application/x-ogm-video or audio***/
+
+ static GstStaticCaps ogmvideo_caps =
+ GST_STATIC_CAPS ("application/x-ogm-video");
+ #define OGMVIDEO_CAPS (gst_static_caps_get(&ogmvideo_caps))
+ static void
+ ogmvideo_type_find (GstTypeFind * tf, gpointer private)
+ {
+ const guint8 *data = gst_type_find_peek (tf, 0, 9);
+
+ if (data) {
+ if (memcmp (data, "\001video\000\000\000", 9) != 0)
+ return;
+ gst_type_find_suggest (tf, GST_TYPE_FIND_MAXIMUM, OGMVIDEO_CAPS);
+ }
+ }
+
+ static GstStaticCaps ogmaudio_caps =
+ GST_STATIC_CAPS ("application/x-ogm-audio");
+ #define OGMAUDIO_CAPS (gst_static_caps_get(&ogmaudio_caps))
+ static void
+ ogmaudio_type_find (GstTypeFind * tf, gpointer private)
+ {
+ const guint8 *data = gst_type_find_peek (tf, 0, 9);
+
+ if (data) {
+ if (memcmp (data, "\001audio\000\000\000", 9) != 0)
+ return;
+ gst_type_find_suggest (tf, GST_TYPE_FIND_MAXIMUM, OGMAUDIO_CAPS);
+ }
+ }
+
+ static GstStaticCaps ogmtext_caps = GST_STATIC_CAPS ("application/x-ogm-text");
+
+ #define OGMTEXT_CAPS (gst_static_caps_get(&ogmtext_caps))
+ static void
+ ogmtext_type_find (GstTypeFind * tf, gpointer private)
+ {
+ const guint8 *data = gst_type_find_peek (tf, 0, 9);
+
+ if (data) {
+ if (memcmp (data, "\001text\000\000\000\000", 9) != 0)
+ return;
+ gst_type_find_suggest (tf, GST_TYPE_FIND_MAXIMUM, OGMTEXT_CAPS);
+ }
+ }
+
+ /*** audio/x-speex ***/
+
+ static GstStaticCaps speex_caps = GST_STATIC_CAPS ("audio/x-speex");
+
+ #define SPEEX_CAPS (gst_static_caps_get(&speex_caps))
+ static void
+ speex_type_find (GstTypeFind * tf, gpointer private)
+ {
+ const guint8 *data = gst_type_find_peek (tf, 0, 80);
+
+ if (data) {
+ /* 8 byte string "Speex "
+ 24 byte speex version string + int */
+ if (memcmp (data, "Speex ", 8) != 0)
+ return;
+ data += 32;
+
+ /* 4 byte header size >= 80 */
+ if (GST_READ_UINT32_LE (data) < 80)
+ return;
+ data += 4;
+
+ /* 4 byte sample rate <= 48000 */
+ if (GST_READ_UINT32_LE (data) > 48000)
+ return;
+ data += 4;
+
+ /* currently there are only 3 speex modes. */
+ if (GST_READ_UINT32_LE (data) > 3)
+ return;
+ data += 12;
+
+ gst_type_find_suggest (tf, GST_TYPE_FIND_MAXIMUM, SPEEX_CAPS);
+ }
+ }
+
+ /*** audio/x-celt ***/
+
+ static GstStaticCaps celt_caps = GST_STATIC_CAPS ("audio/x-celt");
+
+ #define CELT_CAPS (gst_static_caps_get(&celt_caps))
+ static void
+ celt_type_find (GstTypeFind * tf, gpointer private)
+ {
+ const guint8 *data = gst_type_find_peek (tf, 0, 8);
+
+ if (data) {
+ /* 8 byte string "CELT " */
+ if (memcmp (data, "CELT ", 8) != 0)
+ return;
+
+ /* TODO: Check other values of the CELT header */
+ gst_type_find_suggest (tf, GST_TYPE_FIND_MAXIMUM, CELT_CAPS);
+ }
+ }
+
+ /*** application/x-ogg-skeleton ***/
+ static GstStaticCaps ogg_skeleton_caps =
+ GST_STATIC_CAPS ("application/x-ogg-skeleton, parsed=(boolean)FALSE");
+ #define OGG_SKELETON_CAPS (gst_static_caps_get(&ogg_skeleton_caps))
+ static void
+ oggskel_type_find (GstTypeFind * tf, gpointer private)
+ {
+ const guint8 *data = gst_type_find_peek (tf, 0, 12);
+
+ if (data) {
+ /* 8 byte string "fishead\0" for the ogg skeleton stream */
+ if (memcmp (data, "fishead\0", 8) != 0)
+ return;
+ data += 8;
+
+ /* Require that the header contains version 3.0 */
+ if (GST_READ_UINT16_LE (data) != 3)
+ return;
+ data += 2;
+ if (GST_READ_UINT16_LE (data) != 0)
+ return;
+
+ gst_type_find_suggest (tf, GST_TYPE_FIND_MAXIMUM, OGG_SKELETON_CAPS);
+ }
+ }
+
+ static GstStaticCaps cmml_caps = GST_STATIC_CAPS ("text/x-cmml");
+
+ #define CMML_CAPS (gst_static_caps_get(&cmml_caps))
+ static void
+ cmml_type_find (GstTypeFind * tf, gpointer private)
+ {
+ /* Header is 12 bytes minimum (though we don't check the minor version */
+ const guint8 *data = gst_type_find_peek (tf, 0, 12);
+
+ if (data) {
+
+ /* 8 byte string "CMML\0\0\0\0" for the magic number */
+ if (memcmp (data, "CMML\0\0\0\0", 8) != 0)
+ return;
+ data += 8;
+
+ /* Require that the header contains at least version 2.0 */
+ if (GST_READ_UINT16_LE (data) < 2)
+ return;
+
+ gst_type_find_suggest (tf, GST_TYPE_FIND_MAXIMUM, CMML_CAPS);
+ }
+ }
+
+ /*** application/x-tar ***/
+
+ static GstStaticCaps tar_caps = GST_STATIC_CAPS ("application/x-tar");
+
+ #define TAR_CAPS (gst_static_caps_get(&tar_caps))
+ #define OLDGNU_MAGIC "ustar " /* 7 chars and a NUL */
+ #define NEWGNU_MAGIC "ustar" /* 5 chars and a NUL */
+ static void
+ tar_type_find (GstTypeFind * tf, gpointer unused)
+ {
+ const guint8 *data = gst_type_find_peek (tf, 257, 8);
+
+ /* of course we are not certain, but we don't want other typefind funcs
+ * to detect formats of files within the tar archive, e.g. mp3s */
+ if (data) {
+ if (memcmp (data, OLDGNU_MAGIC, 8) == 0) { /* sic */
+ gst_type_find_suggest (tf, GST_TYPE_FIND_NEARLY_CERTAIN, TAR_CAPS);
+ } else if (memcmp (data, NEWGNU_MAGIC, 6) == 0 && /* sic */
+ g_ascii_isdigit (data[6]) && g_ascii_isdigit (data[7])) {
+ gst_type_find_suggest (tf, GST_TYPE_FIND_NEARLY_CERTAIN, TAR_CAPS);
+ }
+ }
+ }
+
+ /*** application/x-ar ***/
+
+ static GstStaticCaps ar_caps = GST_STATIC_CAPS ("application/x-ar");
+
+ #define AR_CAPS (gst_static_caps_get(&ar_caps))
+ static void
+ ar_type_find (GstTypeFind * tf, gpointer unused)
+ {
+ const guint8 *data = gst_type_find_peek (tf, 0, 24);
+
+ if (data && memcmp (data, "!<arch>", 7) == 0) {
+ gint i;
+
+ for (i = 7; i < 24; ++i) {
+ if (!g_ascii_isprint (data[i]) && data[i] != '\n') {
+ gst_type_find_suggest (tf, GST_TYPE_FIND_POSSIBLE, AR_CAPS);
+ }
+ }
+
+ gst_type_find_suggest (tf, GST_TYPE_FIND_NEARLY_CERTAIN, AR_CAPS);
+ }
+ }
++#endif
+
+ /*** audio/x-au ***/
+
+ /* NOTE: we cannot replace this function with TYPE_FIND_REGISTER_START_WITH,
+ * as it is only possible to register one typefind factory per 'name'
+ * (which is in this case the caps), and the first one would be replaced by
+ * the second one. */
+ static GstStaticCaps au_caps = GST_STATIC_CAPS ("audio/x-au");
+
+ #define AU_CAPS (gst_static_caps_get(&au_caps))
+ static void
+ au_type_find (GstTypeFind * tf, gpointer unused)
+ {
+ const guint8 *data = gst_type_find_peek (tf, 0, 4);
+
+ if (data) {
+ if (memcmp (data, ".snd", 4) == 0 || memcmp (data, "dns.", 4) == 0) {
+ gst_type_find_suggest (tf, GST_TYPE_FIND_MAXIMUM, AU_CAPS);
+ }
+ }
+ }
+
+
+ /*** video/x-nuv ***/
+
+ /* NOTE: we cannot replace this function with TYPE_FIND_REGISTER_START_WITH,
+ * as it is only possible to register one typefind factory per 'name'
+ * (which is in this case the caps), and the first one would be replaced by
+ * the second one. */
+ static GstStaticCaps nuv_caps = GST_STATIC_CAPS ("video/x-nuv");
+
+ #define NUV_CAPS (gst_static_caps_get(&nuv_caps))
+ static void
+ nuv_type_find (GstTypeFind * tf, gpointer unused)
+ {
+ const guint8 *data = gst_type_find_peek (tf, 0, 11);
+
+ if (data) {
+ if (memcmp (data, "MythTVVideo", 11) == 0
+ || memcmp (data, "NuppelVideo", 11) == 0) {
+ gst_type_find_suggest (tf, GST_TYPE_FIND_MAXIMUM, NUV_CAPS);
+ }
+ }
+ }
+
++#ifndef TIZEN_FEATURE_DISABLE_MIME_TYPES
+ /*** audio/x-paris ***/
+ /* NOTE: do not replace this function with two TYPE_FIND_REGISTER_START_WITH */
+ static GstStaticCaps paris_caps = GST_STATIC_CAPS ("audio/x-paris");
+
+ #define PARIS_CAPS (gst_static_caps_get(&paris_caps))
+ static void
+ paris_type_find (GstTypeFind * tf, gpointer unused)
+ {
+ const guint8 *data = gst_type_find_peek (tf, 0, 4);
+
+ if (data) {
+ if (memcmp (data, " paf", 4) == 0 || memcmp (data, "fap ", 4) == 0) {
+ gst_type_find_suggest (tf, GST_TYPE_FIND_MAXIMUM, PARIS_CAPS);
+ }
+ }
+ }
+
+ /*** audio/x-sbc ***/
+ static GstStaticCaps sbc_caps = GST_STATIC_CAPS ("audio/x-sbc");
+ #define SBC_CAPS (gst_static_caps_get(&sbc_caps))
+
+ static gsize
+ sbc_check_header (const guint8 * data, gsize len, guint * rate,
+ guint * channels)
+ {
+ static const guint16 sbc_rates[4] = { 16000, 32000, 44100, 48000 };
+ static const guint8 sbc_blocks[4] = { 4, 8, 12, 16 };
+ guint n_blocks, ch_mode, n_subbands, bitpool;
+
+ if (data[0] != 0x9C || len < 4)
+ return 0;
+
+ n_blocks = sbc_blocks[(data[1] >> 4) & 0x03];
+ ch_mode = (data[1] >> 2) & 0x03;
+ n_subbands = (data[1] & 0x01) ? 8 : 4;
+ bitpool = data[2];
+ if (bitpool < 2)
+ return 0;
+
+ *rate = sbc_rates[(data[1] >> 6) & 0x03];
+ *channels = (ch_mode == 0) ? 1 : 2;
+
+ if (ch_mode == 0)
+ return 4 + (n_subbands * 1) / 2 + (n_blocks * 1 * bitpool) / 8;
+ else if (ch_mode == 1)
+ return 4 + (n_subbands * 2) / 2 + (n_blocks * 2 * bitpool) / 8;
+ else if (ch_mode == 2)
+ return 4 + (n_subbands * 2) / 2 + (n_blocks * bitpool) / 8;
+ else if (ch_mode == 3)
+ return 4 + (n_subbands * 2) / 2 + (n_subbands + n_blocks * bitpool) / 8;
+
+ return 0;
+ }
+
+ static void
+ sbc_type_find (GstTypeFind * tf, gpointer unused)
+ {
+ const guint8 *data;
+ gsize frame_len;
+ guint i, rate, channels, offset = 0;
+
+ for (i = 0; i < 10; ++i) {
+ data = gst_type_find_peek (tf, offset, 8);
+ if (data == NULL)
+ return;
+
+ frame_len = sbc_check_header (data, 8, &rate, &channels);
+ if (frame_len == 0)
+ return;
+
+ offset += frame_len;
+ }
+ gst_type_find_suggest_simple (tf, GST_TYPE_FIND_POSSIBLE, "audio/x-sbc",
+ "rate", G_TYPE_INT, rate, "channels", G_TYPE_INT, channels,
+ "parsed", G_TYPE_BOOLEAN, FALSE, NULL);
+ }
+
+ /*** audio/iLBC-sh ***/
+ /* NOTE: do not replace this function with two TYPE_FIND_REGISTER_START_WITH */
+ static GstStaticCaps ilbc_caps = GST_STATIC_CAPS ("audio/iLBC-sh");
+
+ #define ILBC_CAPS (gst_static_caps_get(&ilbc_caps))
+ static void
+ ilbc_type_find (GstTypeFind * tf, gpointer unused)
+ {
+ const guint8 *data = gst_type_find_peek (tf, 0, 8);
+
+ if (data) {
+ if (memcmp (data, "#!iLBC30", 8) == 0 || memcmp (data, "#!iLBC20", 8) == 0) {
+ gst_type_find_suggest (tf, GST_TYPE_FIND_LIKELY, ILBC_CAPS);
+ }
+ }
+ }
+
+ /*** application/x-ms-dos-executable ***/
+
+ static GstStaticCaps msdos_caps =
+ GST_STATIC_CAPS ("application/x-ms-dos-executable");
+ #define MSDOS_CAPS (gst_static_caps_get(&msdos_caps))
+ /* see http://www.madchat.org/vxdevl/papers/winsys/pefile/pefile.htm */
+ static void
+ msdos_type_find (GstTypeFind * tf, gpointer unused)
+ {
+ const guint8 *data = gst_type_find_peek (tf, 0, 64);
+
+ if (data && data[0] == 'M' && data[1] == 'Z' &&
+ GST_READ_UINT16_LE (data + 8) == 4) {
+ guint32 pe_offset = GST_READ_UINT32_LE (data + 60);
+
+ data = gst_type_find_peek (tf, pe_offset, 2);
+ if (data && data[0] == 'P' && data[1] == 'E') {
+ gst_type_find_suggest (tf, GST_TYPE_FIND_NEARLY_CERTAIN, MSDOS_CAPS);
+ }
+ }
+ }
+
+ /*** application/x-mmsh ***/
+
+ static GstStaticCaps mmsh_caps = GST_STATIC_CAPS ("application/x-mmsh");
+
+ #define MMSH_CAPS gst_static_caps_get(&mmsh_caps)
+
+ /* This is to recognise mssh-over-http */
+ static void
+ mmsh_type_find (GstTypeFind * tf, gpointer unused)
+ {
+ static const guint8 asf_marker[16] = { 0x30, 0x26, 0xb2, 0x75, 0x8e, 0x66,
+ 0xcf, 0x11, 0xa6, 0xd9, 0x00, 0xaa, 0x00, 0x62, 0xce, 0x6c
+ };
+
+ const guint8 *data;
+
+ data = gst_type_find_peek (tf, 0, 2 + 2 + 4 + 2 + 2 + 16);
+ if (data && data[0] == 0x24 && data[1] == 0x48 &&
+ GST_READ_UINT16_LE (data + 2) > 2 + 2 + 4 + 2 + 2 + 16 &&
+ memcmp (data + 2 + 2 + 4 + 2 + 2, asf_marker, 16) == 0) {
+ gst_type_find_suggest (tf, GST_TYPE_FIND_LIKELY, MMSH_CAPS);
+ }
+ }
+
+ /*** video/x-dirac ***/
+
+ /* NOTE: we cannot replace this function with TYPE_FIND_REGISTER_START_WITH,
+ * as it is only possible to register one typefind factory per 'name'
+ * (which is in this case the caps), and the first one would be replaced by
+ * the second one. */
+ static GstStaticCaps dirac_caps = GST_STATIC_CAPS ("video/x-dirac");
+
+ #define DIRAC_CAPS (gst_static_caps_get(&dirac_caps))
+ static void
+ dirac_type_find (GstTypeFind * tf, gpointer unused)
+ {
+ const guint8 *data = gst_type_find_peek (tf, 0, 8);
+
+ if (data) {
+ if (memcmp (data, "BBCD", 4) == 0 || memcmp (data, "KW-DIRAC", 8) == 0) {
+ gst_type_find_suggest (tf, GST_TYPE_FIND_MAXIMUM, DIRAC_CAPS);
+ }
+ }
+ }
+
+ /*** audio/x-tap-tap ***/
+
+ /* NOTE: we cannot replace this function with TYPE_FIND_REGISTER_START_WITH,
+ * as it is only possible to register one typefind factory per 'name'
+ * (which is in this case the caps), and the first one would be replaced by
+ * the second one. */
+ static GstStaticCaps tap_caps = GST_STATIC_CAPS ("audio/x-tap-tap");
+
+ #define TAP_CAPS (gst_static_caps_get(&tap_caps))
+ static void
+ tap_type_find (GstTypeFind * tf, gpointer unused)
+ {
+ const guint8 *data = gst_type_find_peek (tf, 0, 16);
+
+ if (data) {
+ if (memcmp (data, "C64-TAPE-RAW", 12) == 0
+ || memcmp (data, "C16-TAPE-RAW", 12) == 0) {
+ gst_type_find_suggest (tf, GST_TYPE_FIND_MAXIMUM, TAP_CAPS);
+ }
+ }
+ }
+
+ /*** video/vivo ***/
+
+ static GstStaticCaps vivo_caps = GST_STATIC_CAPS ("video/vivo");
+
+ #define VIVO_CAPS gst_static_caps_get(&vivo_caps)
+
+ static void
+ vivo_type_find (GstTypeFind * tf, gpointer unused)
+ {
+ static const guint8 vivo_marker[] = { 'V', 'e', 'r', 's', 'i', 'o', 'n',
+ ':', 'V', 'i', 'v', 'o', '/'
+ };
+ const guint8 *data;
+ guint hdr_len, pos;
+
+ data = gst_type_find_peek (tf, 0, 1024);
+ if (data == NULL || data[0] != 0x00)
+ return;
+
+ if ((data[1] & 0x80)) {
+ if ((data[2] & 0x80))
+ return;
+ hdr_len = ((guint) (data[1] & 0x7f)) << 7;
+ hdr_len += data[2];
+ if (hdr_len > 2048)
+ return;
+ pos = 3;
+ } else {
+ hdr_len = data[1];
+ pos = 2;
+ }
+
+ /* 1008 = 1022 - strlen ("Version:Vivo/") - 1 */
+ while (pos < 1008 && data[pos] == '\r' && data[pos + 1] == '\n')
+ pos += 2;
+
+ if (memcmp (data + pos, vivo_marker, sizeof (vivo_marker)) == 0) {
+ gst_type_find_suggest (tf, GST_TYPE_FIND_MAXIMUM, VIVO_CAPS);
+ }
+ }
-
++#endif
+ /*** XDG MIME typefinder (to avoid false positives mostly) ***/
+
+ #ifdef USE_GIO
+ static gboolean
+ xdgmime_validate_name (const gchar * name)
+ {
+ const gchar *s;
+
+ if (G_UNLIKELY (!g_ascii_isalpha (*name))) {
+ return FALSE;
+ }
+
+ /* FIXME: test name string more */
+ s = &name[1];
+ while (*s && (g_ascii_isalnum (*s) || strchr ("/-_.:+", *s) != NULL))
+ s++;
+ if (G_UNLIKELY (*s != '\0')) {
+ return FALSE;
+ }
+
+ return TRUE;
+ }
+
+ static void
+ xdgmime_typefind (GstTypeFind * find, gpointer user_data)
+ {
+ gchar *mimetype;
+ gsize length = 16384;
+ guint64 tf_length;
+ const guint8 *data;
+ gchar *tmp;
+
+ if ((tf_length = gst_type_find_get_length (find)) > 0)
+ length = MIN (length, tf_length);
+
+ if ((data = gst_type_find_peek (find, 0, length)) == NULL)
+ return;
+
+ tmp = g_content_type_guess (NULL, data, length, NULL);
+ if (tmp == NULL || g_content_type_is_unknown (tmp)) {
+ g_free (tmp);
+ return;
+ }
+
+ mimetype = g_content_type_get_mime_type (tmp);
+ g_free (tmp);
+
+ if (mimetype == NULL)
+ return;
+
+ GST_DEBUG ("Got mimetype '%s'", mimetype);
+
+ /* Ignore audio/video types:
+ * - our own typefinders in -base are likely to be better at this
+ * (and if they're not, we really want to fix them, that's why we don't
+ * report xdg-detected audio/video types at all, not even with a low
+ * probability)
+ * - we want to detect GStreamer media types and not MIME types
+ * - the purpose of this xdg mime finder is mainly to prevent false
+ * positives of non-media formats, not to typefind audio/video formats */
+ if (g_str_has_prefix (mimetype, "audio/") ||
+ g_str_has_prefix (mimetype, "video/")) {
+ GST_LOG ("Ignoring audio/video mime type");
+ g_free (mimetype);
+ return;
+ }
+
+ if (!xdgmime_validate_name (mimetype)) {
+ GST_LOG ("Ignoring mimetype with invalid structure name");
+ g_free (mimetype);
+ return;
+ }
+
+ /* Again, we mainly want the xdg typefinding to prevent false-positives on
+ * non-media formats, so suggest the type with a probability that trumps
+ * uncertain results of our typefinders, but not more than that. */
+ GST_LOG ("Suggesting '%s' with probability POSSIBLE", mimetype);
+ gst_type_find_suggest_empty_simple (find, GST_TYPE_FIND_POSSIBLE, mimetype);
+ g_free (mimetype);
+ }
+ #endif /* USE_GIO */
+
+ /*** Windows icon typefinder (to avoid false positives mostly) ***/
++#ifndef TIZEN_FEATURE_DISABLE_MIME_TYPES
+ static void
+ windows_icon_typefind (GstTypeFind * find, gpointer user_data)
+ {
+ const guint8 *data;
+ gint64 datalen;
+ guint16 type, nimages;
+ gint32 size, offset;
+
+ datalen = gst_type_find_get_length (find);
+ if (datalen < 22)
+ return;
+ if ((data = gst_type_find_peek (find, 0, 6)) == NULL)
+ return;
+
+ /* header - simple and not enough to rely on it alone */
+ if (GST_READ_UINT16_LE (data) != 0)
+ return;
+ type = GST_READ_UINT16_LE (data + 2);
+ if (type != 1 && type != 2)
+ return;
+ nimages = GST_READ_UINT16_LE (data + 4);
+ if (nimages == 0) /* we can assume we can't have an empty image file ? */
+ return;
+
+ /* first image */
+ if (data[6 + 3] != 0)
+ return;
+ if (type == 1) {
+ guint16 planes = GST_READ_UINT16_LE (data + 6 + 4);
+ if (planes > 1)
+ return;
+ }
+ size = GST_READ_UINT32_LE (data + 6 + 8);
+ offset = GST_READ_UINT32_LE (data + 6 + 12);
+ if (offset < 0 || size <= 0 || size >= datalen || offset >= datalen
+ || size + offset > datalen)
+ return;
+
+ gst_type_find_suggest_empty_simple (find, GST_TYPE_FIND_NEARLY_CERTAIN,
+ "image/x-icon");
+ }
+
+ /*** WAP WBMP typefinder ***/
+
+ static void
+ wbmp_typefind (GstTypeFind * find, gpointer user_data)
+ {
+ const guint8 *data;
+ gint64 datalen;
+ guint w, h, size;
+
+ /* http://en.wikipedia.org/wiki/Wireless_Application_Protocol_Bitmap_Format */
+ datalen = gst_type_find_get_length (find);
+ if (datalen == 0)
+ return;
+
+ data = gst_type_find_peek (find, 0, 5);
+ if (data == NULL)
+ return;
+
+ /* want 0x00 0x00 at start */
+ if (*data++ != 0 || *data++ != 0)
+ return;
+
+ /* min header size */
+ size = 4;
+
+ /* let's assume max width/height is 65536 */
+ w = *data++;
+ if ((w & 0x80)) {
+ w = (w << 8) | *data++;
+ if ((w & 0x80))
+ return;
+ ++size;
+ data = gst_type_find_peek (find, 4, 2);
+ if (data == NULL)
+ return;
+ }
+ h = *data++;
+ if ((h & 0x80)) {
+ h = (h << 8) | *data++;
+ if ((h & 0x80))
+ return;
+ ++size;
+ }
+
+ if (w == 0 || h == 0)
+ return;
+
+ /* now add bitmap size */
+ size += h * (GST_ROUND_UP_8 (w) / 8);
+
+ if (datalen == size) {
+ gst_type_find_suggest_empty_simple (find, GST_TYPE_FIND_POSSIBLE - 10,
+ "image/vnd.wap.wbmp");
+ }
+ }
+
+ /*** DEGAS Atari images (also to avoid false positives, see #625129) ***/
+ static void
+ degas_type_find (GstTypeFind * tf, gpointer private)
+ {
+ /* No magic, but it should have a fixed size and a few invalid values */
+ /* http://www.fileformat.info/format/atari/spec/6ecf9f6eb5be494284a47feb8a214687/view.htm */
+ gint64 len;
+ const guint8 *data;
+ guint16 resolution;
+ int n;
+
+ len = gst_type_find_get_length (tf);
+ if (len < 34) /* smallest header of the lot */
+ return;
+ data = gst_type_find_peek (tf, 0, 4);
+ if (G_UNLIKELY (data == NULL))
+ return;
+ resolution = GST_READ_UINT16_BE (data);
+ if (len == 32034) {
+ /* could be DEGAS */
+ if (resolution <= 2)
+ gst_type_find_suggest_empty_simple (tf, GST_TYPE_FIND_POSSIBLE + 5,
+ "image/x-degas");
+ } else if (len == 32066) {
+ /* could be DEGAS Elite */
+ if (resolution <= 2) {
+ data = gst_type_find_peek (tf, len - 16, 8);
+ if (G_UNLIKELY (data == NULL))
+ return;
+ for (n = 0; n < 4; n++) {
+ if (GST_READ_UINT16_BE (data + n * 2) > 2)
+ return;
+ }
+ gst_type_find_suggest_empty_simple (tf, GST_TYPE_FIND_POSSIBLE + 5,
+ "image/x-degas");
+ }
+ } else if (len >= 66 && len < 32066) {
+ /* could be compressed DEGAS Elite, but it's compressed and so we can't rely on size,
+ it does have 4 16 bytes values near the end that are 0-2 though. */
+ if ((resolution & 0x8000) && (resolution & 0x7fff) <= 2) {
+ data = gst_type_find_peek (tf, len - 16, 8);
+ if (G_UNLIKELY (data == NULL))
+ return;
+ for (n = 0; n < 4; n++) {
+ if (GST_READ_UINT16_BE (data + n * 2) > 2)
+ return;
+ }
+ gst_type_find_suggest_empty_simple (tf, GST_TYPE_FIND_POSSIBLE + 5,
+ "image/x-degas");
+ }
+ }
+ }
+
+ /*** y4m ***/
+
+ static void
+ y4m_typefind (GstTypeFind * tf, gpointer private)
+ {
+ const guint8 *data;
+
+ data = gst_type_find_peek (tf, 0, 10);
+ if (data != NULL && memcmp (data, "YUV4MPEG2 ", 10) == 0) {
+ gst_type_find_suggest_simple (tf, GST_TYPE_FIND_LIKELY,
+ "application/x-yuv4mpeg", "y4mversion", G_TYPE_INT, 2, NULL);
+ }
+ }
++#endif
+
+ /*** DVD ISO images (looks like H.264, see #674069) ***/
+ static void
+ dvdiso_type_find (GstTypeFind * tf, gpointer private)
+ {
+ /* 0x8000 bytes of zeros, then "\001CD001" */
+ gint64 len;
+ const guint8 *data;
+
+ len = gst_type_find_get_length (tf);
+ if (len < 0x8006)
+ return;
+ data = gst_type_find_peek (tf, 0, 0x8006);
+ if (G_UNLIKELY (data == NULL))
+ return;
+ for (len = 0; len < 0x8000; len++)
+ if (data[len])
+ return;
+ /* Can the '1' be anything else ? My three samples all have '1'. */
+ if (memcmp (data + 0x8000, "\001CD001", 6))
+ return;
+
+ /* May need more inspection, we may be able to demux some of them */
+ gst_type_find_suggest_empty_simple (tf, GST_TYPE_FIND_LIKELY,
+ "application/octet-stream");
+ }
+
+ /* SSA/ASS subtitles
+ *
+ * http://en.wikipedia.org/wiki/SubStation_Alpha
+ * http://matroska.org/technical/specs/subtitles/ssa.html
+ */
+ static void
+ ssa_type_find (GstTypeFind * tf, gpointer private)
+ {
+ const gchar *start, *end, *ver_str, *media_type = NULL;
+ const guint8 *data;
+ gchar *str, *script_type, *p = NULL;
+ gint64 len;
+
+ data = gst_type_find_peek (tf, 0, 32);
+
+ if (data == NULL)
+ return;
+
+ /* FIXME: detect utf-16/32 BOM and convert before typefinding the rest */
+
+ /* there might be a UTF-8 BOM at the beginning */
+ if (memcmp (data, "[Script Info]", 13) != 0 &&
+ memcmp (data + 3, "[Script Info]", 13) != 0) {
+ return;
+ }
+
+ /* now check if we have SSA or ASS */
+ len = gst_type_find_get_length (tf);
+ if (len > 8192)
+ len = 8192;
+
+ data = gst_type_find_peek (tf, 0, len);
+ if (data == NULL)
+ return;
+
+ /* skip BOM */
+ start = (gchar *) memchr (data, '[', 5);
+ g_assert (start);
+ len -= (start - (gchar *) data);
+
+ /* ignore anything non-UTF8 for now, in future we might at least allow
+ * other UTF variants that are clearly prefixed with the appropriate BOM */
+ if (!g_utf8_validate (start, len, &end) && (len - (end - start)) > 6) {
+ GST_FIXME ("non-UTF8 SSA/ASS file");
+ return;
+ }
+
+ /* something at start, but not a UTF-8 BOM? */
+ if (data[0] != '[' && (data[0] != 0xEF || data[1] != 0xBB || data[2] != 0xBF))
+ return;
+
+ /* ignore any partial UTF-8 characters at the end */
+ len = end - start;
+
+ /* create a NUL-terminated string so it's easier to process it safely */
+ str = g_strndup (start, len - 1);
+ script_type = strstr (str, "ScriptType:");
+ if (script_type != NULL) {
+ gdouble version;
+
+ ver_str = script_type + 11;
+ while (*ver_str == ' ' || *ver_str == 'v' || *ver_str == 'V')
+ ++ver_str;
+ version = g_ascii_strtod (ver_str, &p);
+ if (version == 4.0 && p != NULL && *p == '+')
+ media_type = "application/x-ass";
+ else if (version >= 1.0 && version <= 4.0)
+ media_type = "application/x-ssa";
+ }
+
+ if (media_type == NULL) {
+ if (strstr (str, "[v4+ Styles]") || strstr (str, "[V4+ Styles]"))
+ media_type = "application/x-ass";
+ else if (strstr (str, "[v4 Styles]") || strstr (str, "[V4 Styles]"))
+ media_type = "application/x-ssa";
+ }
+
+ if (media_type != NULL) {
+ gst_type_find_suggest_simple (tf, GST_TYPE_FIND_MAXIMUM,
+ media_type, "parsed", G_TYPE_BOOLEAN, FALSE, NULL);
+ } else {
+ GST_WARNING ("could not detect SSA/ASS variant");
+ }
+
+ g_free (str);
+ }
+
+ /*** application/x-mcc ***/
+ static GstStaticCaps mcc_caps = GST_STATIC_CAPS ("application/x-mcc");
+
+ #define MCC_CAPS gst_static_caps_get(&mcc_caps)
+
+ static void
+ mcc_type_find (GstTypeFind * tf, gpointer private)
+ {
+ const guint8 *data;
+
+ data = gst_type_find_peek (tf, 0, 31);
+
+ if (data == NULL)
+ return;
+
+ /* MCC files always start with this followed by the version */
+ if (memcmp (data, "File Format=MacCaption_MCC V", 28) != 0 ||
+ !g_ascii_isdigit (data[28]) || data[29] != '.' ||
+ !g_ascii_isdigit (data[30])) {
+ return;
+ }
+
+ gst_type_find_suggest_simple (tf, GST_TYPE_FIND_MAXIMUM,
+ "application/x-mcc", "version", G_TYPE_INT, data[28] - '0', NULL);
+ }
+
+ /*** video/x-pva ***/
+
+ static GstStaticCaps pva_caps = GST_STATIC_CAPS ("video/x-pva");
+
+ #define PVA_CAPS gst_static_caps_get(&pva_caps)
+
+ static void
+ pva_type_find (GstTypeFind * tf, gpointer private)
+ {
+ const guint8 *data;
+
+ data = gst_type_find_peek (tf, 0, 5);
+
+ if (data == NULL)
+ return;
+
+ if (data[0] == 'A' && data[1] == 'V' && data[2] < 3 && data[4] == 0x55)
+ gst_type_find_suggest (tf, GST_TYPE_FIND_NEARLY_CERTAIN, PVA_CAPS);
+ }
+
+ /*** audio/audible ***/
+
+ /* derived from pyaudibletags
+ * http://code.google.com/p/pyaudibletags/source/browse/trunk/pyaudibletags.py
+ */
+ static GstStaticCaps aa_caps = GST_STATIC_CAPS ("audio/x-audible");
+
+ #define AA_CAPS gst_static_caps_get(&aa_caps)
+
+ static void
+ aa_type_find (GstTypeFind * tf, gpointer private)
+ {
+ const guint8 *data;
+
+ data = gst_type_find_peek (tf, 0, 12);
+ if (data == NULL)
+ return;
+
+ if (GST_READ_UINT32_BE (data + 4) == 0x57907536) {
+ guint64 media_len;
+
+ media_len = gst_type_find_get_length (tf);
+ if (media_len > 0 && GST_READ_UINT32_BE (data) == media_len)
+ gst_type_find_suggest (tf, GST_TYPE_FIND_NEARLY_CERTAIN, AA_CAPS);
+ else
+ gst_type_find_suggest (tf, GST_TYPE_FIND_POSSIBLE, AA_CAPS);
+ }
+ }
+
+ /*Type find definition by functions */
+ GST_TYPE_FIND_REGISTER_DEFINE (musepack, "audio/x-musepack", GST_RANK_PRIMARY,
+ musepack_type_find, "mpc,mpp,mp+", MUSEPACK_CAPS, NULL, NULL);
+ GST_TYPE_FIND_REGISTER_DEFINE (au, "audio/x-au", GST_RANK_MARGINAL,
+ au_type_find, "au,snd", AU_CAPS, NULL, NULL);
+
+ GST_TYPE_FIND_REGISTER_DEFINE (mcc, "application/x-mcc", GST_RANK_PRIMARY,
+ mcc_type_find, "mcc", MCC_CAPS, NULL, NULL);
+ #if 0
+ GST_TYPE_FIND_REGISTER_START_WITH_DEFINE (smoke, "video/x-smoke",
+ GST_RANK_PRIMARY, NULL, "\x80smoke\x00\x01\x00", 6, GST_TYPE_FIND_MAXIMUM);
+ #endif
+ GST_TYPE_FIND_REGISTER_DEFINE (mid, "audio/midi", GST_RANK_PRIMARY,
+ mid_type_find, "mid,midi", MID_CAPS, NULL, NULL);
+ GST_TYPE_FIND_REGISTER_DEFINE (mxmf, "audio/mobile-xmf", GST_RANK_PRIMARY,
+ mxmf_type_find, "mxmf", MXMF_CAPS, NULL, NULL);
++#ifndef TIZEN_FEATURE_DISABLE_MIME_TYPES
+ GST_TYPE_FIND_REGISTER_DEFINE (flx, "video/x-fli", GST_RANK_MARGINAL,
+ flx_type_find, "flc,fli", FLX_CAPS, NULL, NULL);
++#endif
+ GST_TYPE_FIND_REGISTER_DEFINE (id3v2, "application/x-id3v2",
+ GST_RANK_PRIMARY + 103, id3v2_type_find, "mp3,mp2,mp1,mpga,ogg,flac,tta",
+ ID3_CAPS, NULL, NULL);
+ GST_TYPE_FIND_REGISTER_DEFINE (id3v1, "application/x-id3v1",
+ GST_RANK_PRIMARY + 101, id3v1_type_find, "mp3,mp2,mp1,mpga,ogg,flac,tta",
+ ID3_CAPS, NULL, NULL);
+ GST_TYPE_FIND_REGISTER_DEFINE (apetag, "application/x-apetag",
+ GST_RANK_PRIMARY + 102, apetag_type_find, "mp3,ape,mpc,wv", APETAG_CAPS,
+ NULL, NULL);
++#ifndef TIZEN_FEATURE_DISABLE_MIME_TYPES
+ GST_TYPE_FIND_REGISTER_DEFINE (tta, "audio/x-ttafile", GST_RANK_PRIMARY,
+ tta_type_find, "tta", TTA_CAPS, NULL, NULL);
+ GST_TYPE_FIND_REGISTER_DEFINE (mod, "audio/x-mod", GST_RANK_SECONDARY,
+ mod_type_find,
+ "669,amf,ams,dbm,digi,dmf,dsm,gdm,far,imf,it,j2b,mdl,med,mod,mt2,mtm,"
+ "okt,psm,ptm,sam,s3m,stm,stx,ult,umx,xm", MOD_CAPS, NULL, NULL);
++#endif
+ GST_TYPE_FIND_REGISTER_DEFINE (mp3, "audio/mpeg", GST_RANK_PRIMARY,
+ mp3_type_find, "mp3,mp2,mp1,mpga", MP3_CAPS, NULL, NULL);
+ GST_TYPE_FIND_REGISTER_DEFINE (ac3, "audio/x-ac3", GST_RANK_PRIMARY,
+ ac3_type_find, "ac3,eac3", AC3_CAPS, NULL, NULL);
+ GST_TYPE_FIND_REGISTER_DEFINE (dts, "audio/x-dts", GST_RANK_SECONDARY,
+ dts_type_find, "dts", DTS_CAPS, NULL, NULL);
++#ifndef TIZEN_FEATURE_DISABLE_MIME_TYPES
+ GST_TYPE_FIND_REGISTER_DEFINE (gsm, "audio/x-gsm", GST_RANK_PRIMARY, NULL,
+ "gsm", GSM_CAPS, NULL, NULL);
++#endif
++#ifdef TIZEN_PROFILE_TV
+ GST_TYPE_FIND_REGISTER_DEFINE (mpeg_sys, "video/mpeg-sys", GST_RANK_PRIMARY,
+ mpeg_sys_type_find, "mpe,mpeg,mpg", MPEG_SYS_CAPS, NULL, NULL);
++#endif
+ GST_TYPE_FIND_REGISTER_DEFINE (mpeg_ts, "video/mpegts", GST_RANK_PRIMARY,
+ mpeg_ts_type_find, "ts,mts", MPEGTS_CAPS, NULL, NULL);
+ GST_TYPE_FIND_REGISTER_DEFINE (ogganx, "application/ogg", GST_RANK_PRIMARY,
+ ogganx_type_find, "ogg,oga,ogv,ogm,ogx,spx,anx,axa,axv", OGG_CAPS, NULL,
+ NULL);
+ GST_TYPE_FIND_REGISTER_DEFINE (mpeg_video_stream, "video/mpeg-elementary",
+ GST_RANK_MARGINAL, mpeg_video_stream_type_find, "mpv,mpeg,mpg",
+ MPEG_VIDEO_CAPS, NULL, NULL);
+ GST_TYPE_FIND_REGISTER_DEFINE (mpeg4_video, "video/mpeg4", GST_RANK_PRIMARY,
+ mpeg4_video_type_find, "m4v", MPEG_VIDEO_CAPS, NULL, NULL);
+ GST_TYPE_FIND_REGISTER_DEFINE (h263_video, "video/x-h263", GST_RANK_SECONDARY,
+ h263_video_type_find, "h263,263", H263_VIDEO_CAPS, NULL, NULL);
+ GST_TYPE_FIND_REGISTER_DEFINE (h264_video, "video/x-h264", GST_RANK_PRIMARY,
+ h264_video_type_find, "h264,x264,264", H264_VIDEO_CAPS, NULL, NULL);
+ GST_TYPE_FIND_REGISTER_DEFINE (h265_video, "video/x-h265", GST_RANK_PRIMARY,
+ h265_video_type_find, "h265,x265,265", H265_VIDEO_CAPS, NULL, NULL);
+ GST_TYPE_FIND_REGISTER_DEFINE (nuv, "video/x-nuv", GST_RANK_SECONDARY,
+ nuv_type_find, "nuv", NUV_CAPS, NULL, NULL);
+ /* ISO formats */
+ GST_TYPE_FIND_REGISTER_DEFINE (m4a, "audio/x-m4a", GST_RANK_PRIMARY,
+ m4a_type_find, "m4a", M4A_CAPS, NULL, NULL);
+ GST_TYPE_FIND_REGISTER_DEFINE (q3gp, "application/x-3gp", GST_RANK_PRIMARY,
+ q3gp_type_find, "3gp", Q3GP_CAPS, NULL, NULL);
+ GST_TYPE_FIND_REGISTER_DEFINE (qt, "video/quicktime", GST_RANK_PRIMARY,
+ qt_type_find, "mov,mp4", QT_CAPS, NULL, NULL);
++#ifndef TIZEN_FEATURE_DISABLE_MIME_TYPES
+ GST_TYPE_FIND_REGISTER_DEFINE (qtif, "image/x-quicktime", GST_RANK_SECONDARY,
+ qtif_type_find, "qif,qtif,qti", QTIF_CAPS, NULL, NULL);
+ GST_TYPE_FIND_REGISTER_DEFINE (jp2, "image/jp2", GST_RANK_PRIMARY,
+ jp2_type_find, "jp2", JP2_CAPS, NULL, NULL);
+ GST_TYPE_FIND_REGISTER_DEFINE (jpc, "image/x-jpc", GST_RANK_PRIMARY,
+ jpc_type_find, "jpc,j2k", JPC_CAPS, NULL, NULL);
+ GST_TYPE_FIND_REGISTER_DEFINE (mj2, "video/mj2", GST_RANK_PRIMARY,
+ jp2_type_find, "mj2", MJ2_CAPS, NULL, NULL);
+ GST_TYPE_FIND_REGISTER_DEFINE (html, "text/html", GST_RANK_SECONDARY,
+ html_type_find, "htm,html", HTML_CAPS, NULL, NULL);
+ GST_TYPE_FIND_REGISTER_DEFINE (swf, "application/x-shockwave-flash",
+ GST_RANK_SECONDARY, swf_type_find, "swf,swfl", SWF_CAPS, NULL, NULL);
++#endif
+ GST_TYPE_FIND_REGISTER_DEFINE (xges, "application/xges",
+ GST_RANK_PRIMARY, xges_type_find, "xges", XGES_CAPS, NULL, NULL);
+ GST_TYPE_FIND_REGISTER_DEFINE (xmeml, "application/vnd.apple-xmeml+xml",
+ GST_RANK_SECONDARY, xmeml_type_find, "xmeml", XMEML_CAPS, NULL, NULL);
+ GST_TYPE_FIND_REGISTER_DEFINE (fcpxml, "application/vnd.apple-fcp+xml",
+ GST_RANK_SECONDARY, fcpxml_type_find, "fcpxml", FCPXML_CAPS, NULL, NULL);
+ GST_TYPE_FIND_REGISTER_DEFINE (otio,
+ "application/vnd.pixar.opentimelineio+json", GST_RANK_SECONDARY,
+ otio_type_find, "otio", OTIO_CAPS, NULL, NULL);
+ GST_TYPE_FIND_REGISTER_DEFINE (dash_mpd, "application/dash+xml",
+ GST_RANK_PRIMARY, dash_mpd_type_find, "mpd,MPD", DASH_CAPS, NULL, NULL);
+ GST_TYPE_FIND_REGISTER_DEFINE (mss_manifest, "application/vnd.ms-sstr+xml",
+ GST_RANK_PRIMARY, mss_manifest_type_find, NULL, MSS_MANIFEST_CAPS, NULL,
+ NULL);
+ GST_TYPE_FIND_REGISTER_DEFINE (utf8, "text/plain", GST_RANK_MARGINAL,
+ utf8_type_find, "txt", UTF8_CAPS, NULL, NULL);
+ GST_TYPE_FIND_REGISTER_DEFINE (utf16, "text/utf-16", GST_RANK_MARGINAL,
+ utf16_type_find, "txt", UTF16_CAPS, NULL, NULL);
+ GST_TYPE_FIND_REGISTER_DEFINE (utf32, "text/utf-32", GST_RANK_MARGINAL,
+ utf32_type_find, "txt", UTF32_CAPS, NULL, NULL);
++#ifndef TIZEN_FEATURE_DISABLE_MIME_TYPES
+ GST_TYPE_FIND_REGISTER_DEFINE (uri, "text/uri-list", GST_RANK_MARGINAL,
+ uri_type_find, "ram", URI_CAPS, NULL, NULL);
++#endif
+ GST_TYPE_FIND_REGISTER_DEFINE (itc, "application/itc", GST_RANK_SECONDARY,
+ itc_type_find, "itc", ITC_CAPS, NULL, NULL);
+ GST_TYPE_FIND_REGISTER_DEFINE (hls, "application/x-hls", GST_RANK_MARGINAL,
+ hls_type_find, "m3u8", HLS_CAPS, NULL, NULL);
+ GST_TYPE_FIND_REGISTER_DEFINE (sdp, "application/sdp", GST_RANK_SECONDARY,
+ sdp_type_find, "sdp", SDP_CAPS, NULL, NULL);
++#ifndef TIZEN_FEATURE_DISABLE_MIME_TYPES
+ GST_TYPE_FIND_REGISTER_DEFINE (smil, "application/smil", GST_RANK_SECONDARY,
+ smil_type_find, "smil", SMIL_CAPS, NULL, NULL);
+ GST_TYPE_FIND_REGISTER_DEFINE (ttml_xml, "application/ttml+xml",
+ GST_RANK_SECONDARY, ttml_xml_type_find, "ttml+xml", TTML_XML_CAPS, NULL,
+ NULL);
+ GST_TYPE_FIND_REGISTER_DEFINE (xml, "application/xml", GST_RANK_MARGINAL,
+ xml_type_find, "xml", GENERIC_XML_CAPS, NULL, NULL);
++#endif
+ GST_TYPE_FIND_REGISTER_DEFINE (aiff, "audio/x-aiff", GST_RANK_SECONDARY,
+ aiff_type_find, "aiff,aif,aifc", AIFF_CAPS, NULL, NULL);
++#ifndef TIZEN_FEATURE_DISABLE_MIME_TYPES
+ GST_TYPE_FIND_REGISTER_DEFINE (svx, "audio/x-svx", GST_RANK_SECONDARY,
+ svx_type_find, "iff,svx", SVX_CAPS, NULL, NULL);
+ GST_TYPE_FIND_REGISTER_DEFINE (paris, "audio/x-paris", GST_RANK_SECONDARY,
+ paris_type_find, "paf", PARIS_CAPS, NULL, NULL);
+ GST_TYPE_FIND_REGISTER_DEFINE (sds, "audio/x-sds", GST_RANK_SECONDARY,
+ sds_type_find, "sds", SDS_CAPS, NULL, NULL);
+ GST_TYPE_FIND_REGISTER_DEFINE (ircam, "audio/x-ircam", GST_RANK_SECONDARY,
+ ircam_type_find, "sf", IRCAM_CAPS, NULL, NULL);
+ GST_TYPE_FIND_REGISTER_DEFINE (shn, "audio/x-shorten", GST_RANK_SECONDARY,
+ shn_type_find, "shn", SHN_CAPS, NULL, NULL);
+ GST_TYPE_FIND_REGISTER_DEFINE (ape, "application/x-ape", GST_RANK_SECONDARY,
+ ape_type_find, "ape", APE_CAPS, NULL, NULL);
++#endif
+ GST_TYPE_FIND_REGISTER_DEFINE (jpeg, "image/jpeg", GST_RANK_PRIMARY + 15,
+ jpeg_type_find, "jpg,jpe,jpeg", JPEG_CAPS, NULL, NULL);
+ GST_TYPE_FIND_REGISTER_DEFINE (bmp, "image/bmp", GST_RANK_PRIMARY,
+ bmp_type_find, "bmp", BMP_CAPS, NULL, NULL);
++#ifndef TIZEN_FEATURE_DISABLE_MIME_TYPES
+ GST_TYPE_FIND_REGISTER_DEFINE (tiff, "image/tiff", GST_RANK_PRIMARY,
+ tiff_type_find, "tif,tiff", TIFF_CAPS, NULL, NULL);
++#endif
+ GST_TYPE_FIND_REGISTER_DEFINE (exr, "image/x-exr", GST_RANK_PRIMARY,
+ exr_type_find, "exr", EXR_CAPS, NULL, NULL);
+ GST_TYPE_FIND_REGISTER_DEFINE (pnm, "image/x-portable-pixmap",
+ GST_RANK_SECONDARY, pnm_type_find, "pnm,ppm,pgm,pbm", PNM_CAPS, NULL, NULL);
+ GST_TYPE_FIND_REGISTER_DEFINE (matroska, "video/x-matroska", GST_RANK_PRIMARY,
+ matroska_type_find, "mkv,mka,mk3d,webm", MATROSKA_CAPS, NULL, NULL);
++#ifndef TIZEN_FEATURE_DISABLE_MIME_TYPES
+ GST_TYPE_FIND_REGISTER_DEFINE (mxf, "application/mxf", GST_RANK_PRIMARY,
+ mxf_type_find, "mxf", MXF_CAPS, NULL, NULL);
+ GST_TYPE_FIND_REGISTER_DEFINE (dv, "video/x-dv", GST_RANK_SECONDARY,
+ dv_type_find, "dv,dif", DV_CAPS, NULL, NULL);
+ GST_TYPE_FIND_REGISTER_DEFINE (ilbc, "audio/iLBC-sh", GST_RANK_PRIMARY,
+ ilbc_type_find, "ilbc", ILBC_CAPS, NULL, NULL);
+ GST_TYPE_FIND_REGISTER_DEFINE (sbc, "audio/x-sbc", GST_RANK_MARGINAL,
+ sbc_type_find, "sbc", SBC_CAPS, NULL, NULL);
+ GST_TYPE_FIND_REGISTER_DEFINE (kate, "subtitle/x-kate", GST_RANK_MARGINAL,
+ kate_type_find, NULL, NULL, NULL, NULL);
++#endif
+ GST_TYPE_FIND_REGISTER_DEFINE (webvtt, "application/x-subtitle-vtt",
+ GST_RANK_MARGINAL, webvtt_type_find, "vtt", WEBVTT_CAPS, NULL, NULL);
+ GST_TYPE_FIND_REGISTER_DEFINE (flac, "audio/x-flac", GST_RANK_PRIMARY,
+ flac_type_find, "flac", FLAC_CAPS, NULL, NULL);
+ GST_TYPE_FIND_REGISTER_DEFINE (vorbis, "audio/x-vorbis", GST_RANK_PRIMARY,
+ vorbis_type_find, NULL, VORBIS_CAPS, NULL, NULL);
+ GST_TYPE_FIND_REGISTER_DEFINE (theora, "video/x-theora", GST_RANK_PRIMARY,
+ theora_type_find, NULL, THEORA_CAPS, NULL, NULL);
++#ifndef TIZEN_FEATURE_DISABLE_MIME_TYPES
+ GST_TYPE_FIND_REGISTER_DEFINE (ogmvideo, "application/x-ogm-video",
+ GST_RANK_PRIMARY, ogmvideo_type_find, NULL, OGMVIDEO_CAPS, NULL, NULL);
+ GST_TYPE_FIND_REGISTER_DEFINE (ogmaudio, "application/x-ogm-audio",
+ GST_RANK_PRIMARY, ogmaudio_type_find, NULL, OGMAUDIO_CAPS, NULL, NULL);
+ GST_TYPE_FIND_REGISTER_DEFINE (ogmtext, "application/x-ogm-text",
+ GST_RANK_PRIMARY, ogmtext_type_find, NULL, OGMTEXT_CAPS, NULL, NULL);
+ GST_TYPE_FIND_REGISTER_DEFINE (speex, "audio/x-speex", GST_RANK_PRIMARY,
+ speex_type_find, NULL, SPEEX_CAPS, NULL, NULL);
+ GST_TYPE_FIND_REGISTER_DEFINE (celt, "audio/x-celt", GST_RANK_PRIMARY,
+ celt_type_find, NULL, CELT_CAPS, NULL, NULL);
+ GST_TYPE_FIND_REGISTER_DEFINE (oggskel, "application/x-ogg-skeleton",
+ GST_RANK_PRIMARY, oggskel_type_find, NULL, OGG_SKELETON_CAPS, NULL, NULL);
+ GST_TYPE_FIND_REGISTER_DEFINE (cmml, "text/x-cmml", GST_RANK_PRIMARY,
+ cmml_type_find, NULL, CMML_CAPS, NULL, NULL);
++#endif
+ GST_TYPE_FIND_REGISTER_DEFINE (aac, "audio/aac", GST_RANK_SECONDARY,
+ aac_type_find, "aac,adts,adif,loas", AAC_CAPS, NULL, NULL);
++#ifndef TIZEN_FEATURE_DISABLE_MIME_TYPES
+ GST_TYPE_FIND_REGISTER_DEFINE (wavpack_wvp, "audio/x-wavpack",
+ GST_RANK_SECONDARY, wavpack_type_find, "wv,wvp", WAVPACK_CAPS, NULL, NULL);
+ GST_TYPE_FIND_REGISTER_DEFINE (wavpack_wvc, "audio/x-wavpack-correction",
+ GST_RANK_SECONDARY, wavpack_type_find, "wvc", WAVPACK_CORRECTION_CAPS, NULL,
+ NULL);
+ GST_TYPE_FIND_REGISTER_DEFINE (postscript, "application/postscript",
+ GST_RANK_SECONDARY, postscript_type_find, "ps", POSTSCRIPT_CAPS, NULL,
+ NULL);
+ GST_TYPE_FIND_REGISTER_DEFINE (svg, "image/svg+xml", GST_RANK_SECONDARY,
+ svg_type_find, "svg", SVG_CAPS, NULL, NULL);
+ GST_TYPE_FIND_REGISTER_DEFINE (tar, "application/x-tar", GST_RANK_SECONDARY,
+ tar_type_find, "tar", TAR_CAPS, NULL, NULL);
+ GST_TYPE_FIND_REGISTER_DEFINE (ar, "application/x-ar", GST_RANK_SECONDARY,
+ ar_type_find, "a", AR_CAPS, NULL, NULL);
+ GST_TYPE_FIND_REGISTER_DEFINE (msdos, "application/x-ms-dos-executable",
+ GST_RANK_SECONDARY, msdos_type_find, "dll,exe,ocx,sys,scr,msstyles,cpl",
+ MSDOS_CAPS, NULL, NULL);
+ GST_TYPE_FIND_REGISTER_DEFINE (dirac, "video/x-dirac", GST_RANK_PRIMARY,
+ dirac_type_find, NULL, DIRAC_CAPS, NULL, NULL);
+ GST_TYPE_FIND_REGISTER_DEFINE (multipart, "multipart/x-mixed-replace",
+ GST_RANK_SECONDARY, multipart_type_find, NULL, MULTIPART_CAPS, NULL, NULL);
+ GST_TYPE_FIND_REGISTER_DEFINE (mmsh, "application/x-mmsh", GST_RANK_SECONDARY,
+ mmsh_type_find, NULL, MMSH_CAPS, NULL, NULL);
+ GST_TYPE_FIND_REGISTER_DEFINE (vivo, "video/vivo", GST_RANK_SECONDARY,
+ vivo_type_find, "viv", VIVO_CAPS, NULL, NULL);
+ GST_TYPE_FIND_REGISTER_DEFINE (wbmp, "image/vnd.wap.wbmp", GST_RANK_MARGINAL,
+ wbmp_typefind, NULL, NULL, NULL, NULL);
+ GST_TYPE_FIND_REGISTER_DEFINE (y4m, "application/x-yuv4mpeg",
+ GST_RANK_SECONDARY, y4m_typefind, NULL, NULL, NULL, NULL);
+ GST_TYPE_FIND_REGISTER_DEFINE (windows_icon, "image/x-icon", GST_RANK_MARGINAL,
+ windows_icon_typefind, NULL, NULL, NULL, NULL);
++#endif
+ #ifdef USE_GIO
+ GST_TYPE_FIND_REGISTER_DEFINE (xdgmime, "xdgmime-base", GST_RANK_MARGINAL,
+ xdgmime_typefind, NULL, NULL, NULL, NULL);
+ #endif
++#ifndef TIZEN_FEATURE_DISABLE_MIME_TYPES
+ GST_TYPE_FIND_REGISTER_DEFINE (degas, "image/x-degas", GST_RANK_MARGINAL,
+ degas_type_find, NULL, NULL, NULL, NULL);
++#endif
+ GST_TYPE_FIND_REGISTER_DEFINE (dvdiso, "application/octet-stream",
+ GST_RANK_MARGINAL, dvdiso_type_find, NULL, NULL, NULL, NULL);
+ GST_TYPE_FIND_REGISTER_DEFINE (ssa, "application/x-ssa", GST_RANK_SECONDARY,
+ ssa_type_find, "ssa,ass", NULL, NULL, NULL);
+ GST_TYPE_FIND_REGISTER_DEFINE (pva, "video/x-pva", GST_RANK_SECONDARY,
+ pva_type_find, "pva", PVA_CAPS, NULL, NULL);
+ GST_TYPE_FIND_REGISTER_DEFINE (aa, "audio/audible", GST_RANK_MARGINAL,
+ aa_type_find, "aa,aax", AA_CAPS, NULL, NULL);
++#ifndef TIZEN_FEATURE_DISABLE_MIME_TYPES
+ GST_TYPE_FIND_REGISTER_DEFINE (tap, "audio/x-tap-tap", GST_RANK_PRIMARY,
+ tap_type_find, "tap", TAP_CAPS, NULL, NULL);
++#endif
--- /dev/null
-
+ /* GStreamer
+ * Copyright (C) 2003 Benjamin Otte <in7y118@public.uni-hamburg.de>
+ * Copyright (C) 2005-2009 Tim-Philipp Müller <tim centricular net>
+ * Copyright (C) 2020 Huawei Technologies Co., Ltd.
+ * @Author: Stéphane Cerveau <scerveau@collabora.com>
+ *
+ * gsttypefindfunctions.c: collection of various typefind functions
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+ #ifdef HAVE_CONFIG_H
+ #include "config.h"
+ #endif
+
+ #include "gsttypefindfunctionsplugin.h"
+
+ #include <gst/gst.h>
+
+ GST_DEBUG_CATEGORY (type_find_functions_debug);
+
+ static gboolean
+ plugin_init (GstPlugin * plugin)
+ {
+ /* can't initialize this via a struct as caps can't be statically initialized */
+ GST_DEBUG_CATEGORY_INIT (type_find_functions_debug, "typefindfunctions",
+ GST_DEBUG_FG_GREEN | GST_DEBUG_BG_RED, "generic type find functions");
+ /* note: asx/wax/wmx are XML files, asf doesn't handle them */
+ /* must use strings, macros don't accept initializers */
+
+ /*Riff Type find register */
+ GST_TYPE_FIND_REGISTER (avi, plugin);
++#ifndef TIZEN_FEATURE_DISABLE_MIME_TYPES
+ GST_TYPE_FIND_REGISTER (qcp, plugin);
+ GST_TYPE_FIND_REGISTER (cdxa, plugin);
++#endif
+ GST_TYPE_FIND_REGISTER (riff_mid, plugin);
+ GST_TYPE_FIND_REGISTER (wav, plugin);
++#ifndef TIZEN_FEATURE_DISABLE_MIME_TYPES
+ GST_TYPE_FIND_REGISTER (webp, plugin);
++#endif
+ /*'Start with' Type find register */
+ GST_TYPE_FIND_REGISTER (asf, plugin);
++#ifndef TIZEN_FEATURE_DISABLE_MIME_TYPES
+ GST_TYPE_FIND_REGISTER (vcd, plugin);
++#endif
+ GST_TYPE_FIND_REGISTER (imelody, plugin);
+ GST_TYPE_FIND_REGISTER (scc, plugin);
+ #if 0
+ GST_TYPE_FIND_REGISTER (smoke, plugin);
+ #endif
+ GST_TYPE_FIND_REGISTER (rmf, plugin);
+ GST_TYPE_FIND_REGISTER (ram, plugin);
+ GST_TYPE_FIND_REGISTER (flv, plugin);
+ GST_TYPE_FIND_REGISTER (nist, plugin);
+ GST_TYPE_FIND_REGISTER (voc, plugin);
+ GST_TYPE_FIND_REGISTER (w64, plugin);
+ GST_TYPE_FIND_REGISTER (rf64, plugin);
+ GST_TYPE_FIND_REGISTER (gif, plugin);
+ GST_TYPE_FIND_REGISTER (png, plugin);
++#ifndef TIZEN_FEATURE_DISABLE_MIME_TYPES
+ GST_TYPE_FIND_REGISTER (mve, plugin);
++#endif
+ GST_TYPE_FIND_REGISTER (amr, plugin);
+ GST_TYPE_FIND_REGISTER (amr_wb, plugin);
++#ifndef TIZEN_FEATURE_DISABLE_MIME_TYPES
+ GST_TYPE_FIND_REGISTER (sid, plugin);
+ GST_TYPE_FIND_REGISTER (xcf, plugin);
+ GST_TYPE_FIND_REGISTER (mng, plugin);
+ GST_TYPE_FIND_REGISTER (jng, plugin);
+ GST_TYPE_FIND_REGISTER (xpm, plugin);
+ GST_TYPE_FIND_REGISTER (ras, plugin);
+ GST_TYPE_FIND_REGISTER (bz2, plugin);
+ GST_TYPE_FIND_REGISTER (gz, plugin);
+ GST_TYPE_FIND_REGISTER (zip, plugin);
+ GST_TYPE_FIND_REGISTER (z, plugin);
++#endif
+ GST_TYPE_FIND_REGISTER (elf, plugin);
+ GST_TYPE_FIND_REGISTER (spc, plugin);
+ GST_TYPE_FIND_REGISTER (caf, plugin);
+ GST_TYPE_FIND_REGISTER (rar, plugin);
+ GST_TYPE_FIND_REGISTER (nsf, plugin);
+ GST_TYPE_FIND_REGISTER (gym, plugin);
+ GST_TYPE_FIND_REGISTER (ay, plugin);
+ GST_TYPE_FIND_REGISTER (gbs, plugin);
+ GST_TYPE_FIND_REGISTER (vgm, plugin);
+ GST_TYPE_FIND_REGISTER (sap, plugin);
+ GST_TYPE_FIND_REGISTER (ivf, plugin);
+ GST_TYPE_FIND_REGISTER (kss, plugin);
+ GST_TYPE_FIND_REGISTER (pdf, plugin);
+ GST_TYPE_FIND_REGISTER (doc, plugin);
+ /* Mac OS X .DS_Store files tend to be taken for video/mpeg */
+ GST_TYPE_FIND_REGISTER (ds_store, plugin);
+ GST_TYPE_FIND_REGISTER (psd, plugin);
+ GST_TYPE_FIND_REGISTER (xi, plugin);
+ GST_TYPE_FIND_REGISTER (dmp, plugin);
+
+ /* functions Type find register */
+ GST_TYPE_FIND_REGISTER (musepack, plugin);
+ GST_TYPE_FIND_REGISTER (au, plugin);
+ GST_TYPE_FIND_REGISTER (mcc, plugin);
+ GST_TYPE_FIND_REGISTER (mid, plugin);
+ GST_TYPE_FIND_REGISTER (mxmf, plugin);
++#ifndef TIZEN_FEATURE_DISABLE_MIME_TYPES
+ GST_TYPE_FIND_REGISTER (flx, plugin);
++#endif
+ GST_TYPE_FIND_REGISTER (id3v2, plugin);
+ GST_TYPE_FIND_REGISTER (id3v1, plugin);
+ GST_TYPE_FIND_REGISTER (apetag, plugin);
++#ifndef TIZEN_FEATURE_DISABLE_MIME_TYPES
+ GST_TYPE_FIND_REGISTER (tta, plugin);
+ GST_TYPE_FIND_REGISTER (mod, plugin);
++#endif
+ GST_TYPE_FIND_REGISTER (mp3, plugin);
+ GST_TYPE_FIND_REGISTER (ac3, plugin);
+ GST_TYPE_FIND_REGISTER (dts, plugin);
++#ifndef TIZEN_FEATURE_DISABLE_MIME_TYPES
+ GST_TYPE_FIND_REGISTER (gsm, plugin);
++#endif
++#ifdef TIZEN_PROFILE_TV
+ GST_TYPE_FIND_REGISTER (mpeg_sys, plugin);
++#endif
+ GST_TYPE_FIND_REGISTER (mpeg_ts, plugin);
+ GST_TYPE_FIND_REGISTER (ogganx, plugin);
+ GST_TYPE_FIND_REGISTER (mpeg_video_stream, plugin);
+ GST_TYPE_FIND_REGISTER (mpeg4_video, plugin);
+ GST_TYPE_FIND_REGISTER (h263_video, plugin);
+ GST_TYPE_FIND_REGISTER (h264_video, plugin);
+ GST_TYPE_FIND_REGISTER (h265_video, plugin);
+ GST_TYPE_FIND_REGISTER (nuv, plugin);
+ /* ISO formats */
+ GST_TYPE_FIND_REGISTER (m4a, plugin);
+ GST_TYPE_FIND_REGISTER (q3gp, plugin);
+ GST_TYPE_FIND_REGISTER (qt, plugin);
++#ifndef TIZEN_FEATURE_DISABLE_MIME_TYPES
+ GST_TYPE_FIND_REGISTER (qtif, plugin);
+ GST_TYPE_FIND_REGISTER (jp2, plugin);
+ GST_TYPE_FIND_REGISTER (jpc, plugin);
+ GST_TYPE_FIND_REGISTER (mj2, plugin);
+ GST_TYPE_FIND_REGISTER (html, plugin);
+ GST_TYPE_FIND_REGISTER (swf, plugin);
++#endif
+ GST_TYPE_FIND_REGISTER (xges, plugin);
+ GST_TYPE_FIND_REGISTER (xmeml, plugin);
+ GST_TYPE_FIND_REGISTER (fcpxml, plugin);
+ GST_TYPE_FIND_REGISTER (otio, plugin);
+ GST_TYPE_FIND_REGISTER (dash_mpd, plugin);
+ GST_TYPE_FIND_REGISTER (mss_manifest, plugin);
+ GST_TYPE_FIND_REGISTER (utf8, plugin);
+ GST_TYPE_FIND_REGISTER (utf16, plugin);
+ GST_TYPE_FIND_REGISTER (utf32, plugin);
++#ifndef TIZEN_FEATURE_DISABLE_MIME_TYPES
+ GST_TYPE_FIND_REGISTER (uri, plugin);
++#endif
+ GST_TYPE_FIND_REGISTER (itc, plugin);
+ GST_TYPE_FIND_REGISTER (hls, plugin);
+ GST_TYPE_FIND_REGISTER (sdp, plugin);
++#ifndef TIZEN_FEATURE_DISABLE_MIME_TYPES
+ GST_TYPE_FIND_REGISTER (smil, plugin);
+ GST_TYPE_FIND_REGISTER (ttml_xml, plugin);
+ GST_TYPE_FIND_REGISTER (xml, plugin);
++#endif
+ GST_TYPE_FIND_REGISTER (aiff, plugin);
++#ifndef TIZEN_FEATURE_DISABLE_MIME_TYPES
+ GST_TYPE_FIND_REGISTER (svx, plugin);
+ GST_TYPE_FIND_REGISTER (paris, plugin);
+ GST_TYPE_FIND_REGISTER (sds, plugin);
+ GST_TYPE_FIND_REGISTER (ircam, plugin);
+ GST_TYPE_FIND_REGISTER (shn, plugin);
+ GST_TYPE_FIND_REGISTER (ape, plugin);
++#endif
+ GST_TYPE_FIND_REGISTER (jpeg, plugin);
+ GST_TYPE_FIND_REGISTER (bmp, plugin);
++#ifndef TIZEN_FEATURE_DISABLE_MIME_TYPES
+ GST_TYPE_FIND_REGISTER (tiff, plugin);
++#endif
+ GST_TYPE_FIND_REGISTER (exr, plugin);
+ GST_TYPE_FIND_REGISTER (pnm, plugin);
+ GST_TYPE_FIND_REGISTER (matroska, plugin);
++#ifndef TIZEN_FEATURE_DISABLE_MIME_TYPES
+ GST_TYPE_FIND_REGISTER (mxf, plugin);
+ GST_TYPE_FIND_REGISTER (dv, plugin);
+ GST_TYPE_FIND_REGISTER (ilbc, plugin);
+ GST_TYPE_FIND_REGISTER (sbc, plugin);
+ GST_TYPE_FIND_REGISTER (kate, plugin);
++#endif
+ GST_TYPE_FIND_REGISTER (webvtt, plugin);
+ GST_TYPE_FIND_REGISTER (flac, plugin);
+ GST_TYPE_FIND_REGISTER (vorbis, plugin);
+ GST_TYPE_FIND_REGISTER (theora, plugin);
++#ifndef TIZEN_FEATURE_DISABLE_MIME_TYPES
+ GST_TYPE_FIND_REGISTER (ogmvideo, plugin);
+ GST_TYPE_FIND_REGISTER (ogmaudio, plugin);
+ GST_TYPE_FIND_REGISTER (ogmtext, plugin);
+ GST_TYPE_FIND_REGISTER (speex, plugin);
+ GST_TYPE_FIND_REGISTER (celt, plugin);
+ GST_TYPE_FIND_REGISTER (oggskel, plugin);
+ GST_TYPE_FIND_REGISTER (cmml, plugin);
++#endif
+ GST_TYPE_FIND_REGISTER (aac, plugin);
++#ifndef TIZEN_FEATURE_DISABLE_MIME_TYPES
+ GST_TYPE_FIND_REGISTER (wavpack_wvp, plugin);
+ GST_TYPE_FIND_REGISTER (wavpack_wvc, plugin);
+ GST_TYPE_FIND_REGISTER (postscript, plugin);
+ GST_TYPE_FIND_REGISTER (svg, plugin);
+ GST_TYPE_FIND_REGISTER (tar, plugin);
+ GST_TYPE_FIND_REGISTER (ar, plugin);
+ GST_TYPE_FIND_REGISTER (msdos, plugin);
+ GST_TYPE_FIND_REGISTER (dirac, plugin);
+ GST_TYPE_FIND_REGISTER (multipart, plugin);
+ GST_TYPE_FIND_REGISTER (mmsh, plugin);
+ GST_TYPE_FIND_REGISTER (vivo, plugin);
+ GST_TYPE_FIND_REGISTER (wbmp, plugin);
+ GST_TYPE_FIND_REGISTER (y4m, plugin);
+ GST_TYPE_FIND_REGISTER (windows_icon, plugin);
++#endif
+ #ifdef USE_GIO
+ GST_TYPE_FIND_REGISTER (xdgmime, plugin);
+ #endif
++#ifndef TIZEN_FEATURE_DISABLE_MIME_TYPES
+ GST_TYPE_FIND_REGISTER (degas, plugin);
++#endif
+ GST_TYPE_FIND_REGISTER (dvdiso, plugin);
+ GST_TYPE_FIND_REGISTER (ssa, plugin);
+ GST_TYPE_FIND_REGISTER (pva, plugin);
+ GST_TYPE_FIND_REGISTER (aa, plugin);
++#ifndef TIZEN_FEATURE_DISABLE_MIME_TYPES
+ GST_TYPE_FIND_REGISTER (tap, plugin);
++#endif
+
+ return TRUE;
+ }
+
+ GST_PLUGIN_DEFINE (GST_VERSION_MAJOR,
+ GST_VERSION_MINOR,
+ typefindfunctions,
+ "default typefind functions",
+ plugin_init, VERSION, GST_LICENSE, GST_PACKAGE_NAME, GST_PACKAGE_ORIGIN)
--- /dev/null
+ /* GStreamer
+ * Copyright (C) 2003 Benjamin Otte <in7y118@public.uni-hamburg.de>
+ * Copyright (C) 2005-2009 Tim-Philipp Müller <tim centricular net>
+ * Copyright (C) 2009 Sebastian Dröge <sebastian.droege@collabora.co.uk>
+ * Copyright (C) 2020 Huawei Technologies Co., Ltd.
+ * @Author: Stéphane Cerveau <scerveau@collabora.com>
+ *
+ * gsttypefindfunctionsriff.c: collection of various typefind functions
+ * based on riff format.
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+ #ifdef HAVE_CONFIG_H
+ #include "config.h"
+ #endif
+
+ #include <gst/gst.h>
+
+ #include "gsttypefindfunctionsplugin.h"
+ #include "gsttypefindfunctionsdata.h"
+
+ /*** same for riff types ***/
+ static void
+ riff_type_find (GstTypeFind * tf, gpointer private)
+ {
+ GstTypeFindData *riff_data = (GstTypeFindData *) private;
+ const guint8 *data = gst_type_find_peek (tf, 0, 12);
+
+ if (data && (memcmp (data, "RIFF", 4) == 0 || memcmp (data, "AVF0", 4) == 0)) {
+ data += 8;
+ if (memcmp (data, riff_data->data, 4) == 0)
+ gst_type_find_suggest (tf, riff_data->probability, riff_data->caps);
+ }
+ }
+
+ #define TYPE_FIND_REGISTER_RIFF_DEFINE(typefind_name, name, rank, ext, _data) \
+ G_BEGIN_DECLS \
+ static gboolean \
+ G_PASTE(_private_type_find_riff_, typefind_name) (GstPlugin * plugin) \
+ { \
+ GstTypeFindData *sw_data = g_slice_new (GstTypeFindData); \
+ sw_data->data = (gpointer)_data; \
+ sw_data->size = 4; \
+ sw_data->probability = GST_TYPE_FIND_MAXIMUM; \
+ sw_data->caps = gst_caps_new_empty_simple (name); \
+ if (!gst_type_find_register (plugin, name, rank, riff_type_find, \
+ ext, sw_data->caps, sw_data, \
+ (GDestroyNotify) (sw_data_destroy))) { \
+ sw_data_destroy (sw_data); \
+ return FALSE; \
+ } \
+ return TRUE; \
+ } \
+ GST_TYPE_FIND_REGISTER_DEFINE_CUSTOM (typefind_name, G_PASTE(_private_type_find_riff_, typefind_name)); \
+ G_END_DECLS
+
+ /*RIFF type find definition */
+ TYPE_FIND_REGISTER_RIFF_DEFINE (avi, "video/x-msvideo", GST_RANK_PRIMARY,
+ "avi", "AVI ");
++#ifndef TIZEN_FEATURE_DISABLE_MIME_TYPES
+ TYPE_FIND_REGISTER_RIFF_DEFINE (qcp, "audio/qcelp", GST_RANK_PRIMARY,
+ "qcp", "QLCM");
+ TYPE_FIND_REGISTER_RIFF_DEFINE (cdxa, "video/x-cdxa", GST_RANK_PRIMARY,
+ "dat", "CDXA");
++#endif
+ TYPE_FIND_REGISTER_RIFF_DEFINE (riff_mid, "audio/riff-midi",
+ GST_RANK_PRIMARY, "mid,midi", "RMID");
+ TYPE_FIND_REGISTER_RIFF_DEFINE (wav, "audio/x-wav", GST_RANK_PRIMARY, "wav",
+ "WAVE");
++#ifndef TIZEN_FEATURE_DISABLE_MIME_TYPES
+ TYPE_FIND_REGISTER_RIFF_DEFINE (webp, "image/webp", GST_RANK_PRIMARY,
+ "webp", "WEBP");
++#endif
--- /dev/null
+ /* GStreamer
+ * Copyright (C) 2003 Benjamin Otte <in7y118@public.uni-hamburg.de>
+ * Copyright (C) 2005-2009 Tim-Philipp Müller <tim centricular net>
+ * Copyright (C) 2009 Sebastian Dröge <sebastian.droege@collabora.co.uk>
+ * Copyright (C) 2020 Huawei Technologies Co., Ltd.
+ * @Author: Stéphane Cerveau <scerveau@collabora.com>
+ *
+ * gsttypefindfunctionsstartwith.c: collection of various typefind functions
+ * using the start with pattern
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+ #ifdef HAVE_CONFIG_H
+ #include "config.h"
+ #endif
+
+ #include <gst/gst.h>
+
+ #include "gsttypefindfunctionsplugin.h"
+ #include "gsttypefindfunctionsdata.h"
+
+ static void
+ start_with_type_find (GstTypeFind * tf, gpointer private)
+ {
+ GstTypeFindData *start_with = (GstTypeFindData *) private;
+ const guint8 *data;
+
+ GST_LOG ("trying to find mime type %s with the first %u bytes of data",
+ gst_structure_get_name (gst_caps_get_structure (start_with->caps, 0)),
+ start_with->size);
+ data = gst_type_find_peek (tf, 0, start_with->size);
+ if (data && memcmp (data, start_with->data, start_with->size) == 0) {
+ gst_type_find_suggest (tf, start_with->probability, start_with->caps);
+ }
+ }
+
+ #define TYPE_FIND_REGISTER_START_WITH_DEFINE(typefind_name, name, rank, ext, _data, _size, _probability)\
+ G_BEGIN_DECLS \
+ static gboolean \
+ G_PASTE(_private_type_find_start_with_, typefind_name) (GstPlugin * plugin) \
+ { \
+ GstTypeFindData *sw_data = g_slice_new (GstTypeFindData); \
+ sw_data->data = (const guint8 *)_data; \
+ sw_data->size = _size; \
+ sw_data->probability = _probability; \
+ sw_data->caps = gst_caps_new_empty_simple (name); \
+ if (!gst_type_find_register (plugin, name, rank, start_with_type_find,\
+ ext, sw_data->caps, sw_data, \
+ (GDestroyNotify) (sw_data_destroy))) { \
+ sw_data_destroy (sw_data); \
+ return FALSE; \
+ } \
+ return TRUE; \
+ }\
+ GST_TYPE_FIND_REGISTER_DEFINE_CUSTOM (typefind_name, G_PASTE(_private_type_find_start_with_, typefind_name)); \
+ G_END_DECLS
+
+ /*'Start with' type find definition */
+ TYPE_FIND_REGISTER_START_WITH_DEFINE (asf, "video/x-ms-asf",
+ GST_RANK_SECONDARY, "asf,wm,wma,wmv",
+ "\060\046\262\165\216\146\317\021\246\331\000\252\000\142\316\154", 16,
+ GST_TYPE_FIND_MAXIMUM);
++#ifndef TIZEN_FEATURE_DISABLE_MIME_TYPES
+ TYPE_FIND_REGISTER_START_WITH_DEFINE (vcd, "video/x-vcd", GST_RANK_PRIMARY,
+ "dat", "\000\377\377\377\377\377\377\377\377\377\377\000", 12,
+ GST_TYPE_FIND_MAXIMUM);
++#endif
+ TYPE_FIND_REGISTER_START_WITH_DEFINE (imelody, "audio/x-imelody",
+ GST_RANK_PRIMARY, "imy,ime,imelody", "BEGIN:IMELODY", 13,
+ GST_TYPE_FIND_MAXIMUM);
+ TYPE_FIND_REGISTER_START_WITH_DEFINE (scc, "application/x-scc",
+ GST_RANK_PRIMARY, "scc", "Scenarist_SCC V1.0", 18, GST_TYPE_FIND_MAXIMUM);
+ #if 0
+ TYPE_FIND_REGISTER_START_WITH_DEFINE (smoke, "video/x-smoke",
+ GST_RANK_PRIMARY, NULL, "\x80smoke\x00\x01\x00", 6, GST_TYPE_FIND_MAXIMUM);
+ #endif
+ TYPE_FIND_REGISTER_START_WITH_DEFINE (rmf, "application/vnd.rn-realmedia",
+ GST_RANK_SECONDARY, "ra,ram,rm,rmvb", ".RMF", 4, GST_TYPE_FIND_MAXIMUM);
+ TYPE_FIND_REGISTER_START_WITH_DEFINE (ram, "application/x-pn-realaudio",
+ GST_RANK_SECONDARY, "ra,ram,rm,rmvb", ".ra\375", 4, GST_TYPE_FIND_MAXIMUM);
+ TYPE_FIND_REGISTER_START_WITH_DEFINE (flv, "video/x-flv",
+ GST_RANK_SECONDARY, "flv", "FLV", 3, GST_TYPE_FIND_MAXIMUM);
+ TYPE_FIND_REGISTER_START_WITH_DEFINE (nist, "audio/x-nist",
+ GST_RANK_SECONDARY, "nist", "NIST", 4, GST_TYPE_FIND_MAXIMUM);
+ TYPE_FIND_REGISTER_START_WITH_DEFINE (voc, "audio/x-voc",
+ GST_RANK_SECONDARY, "voc", "Creative", 8, GST_TYPE_FIND_MAXIMUM);
+ TYPE_FIND_REGISTER_START_WITH_DEFINE (w64, "audio/x-w64",
+ GST_RANK_SECONDARY, "w64", "riff", 4, GST_TYPE_FIND_MAXIMUM);
+ TYPE_FIND_REGISTER_START_WITH_DEFINE (rf64, "audio/x-rf64",
+ GST_RANK_PRIMARY, "rf64", "RF64", 4, GST_TYPE_FIND_MAXIMUM);
+ TYPE_FIND_REGISTER_START_WITH_DEFINE (gif, "image/gif", GST_RANK_PRIMARY,
+ "gif", "GIF8", 4, GST_TYPE_FIND_MAXIMUM);
+ TYPE_FIND_REGISTER_START_WITH_DEFINE (png, "image/png",
+ GST_RANK_PRIMARY + 14, "png", "\211PNG\015\012\032\012", 8,
+ GST_TYPE_FIND_MAXIMUM);
++#ifndef TIZEN_FEATURE_DISABLE_MIME_TYPES
+ TYPE_FIND_REGISTER_START_WITH_DEFINE (mve, "video/x-mve",
+ GST_RANK_SECONDARY, "mve",
+ "Interplay MVE File\032\000\032\000\000\001\063\021", 26,
+ GST_TYPE_FIND_MAXIMUM);
++#endif
+ TYPE_FIND_REGISTER_START_WITH_DEFINE (amr, "audio/x-amr-nb-sh",
+ GST_RANK_PRIMARY, "amr", "#!AMR", 5, GST_TYPE_FIND_LIKELY);
+ TYPE_FIND_REGISTER_START_WITH_DEFINE (amr_wb, "audio/x-amr-wb-sh",
+ GST_RANK_PRIMARY, "amr", "#!AMR-WB", 7, GST_TYPE_FIND_MAXIMUM);
++#ifndef TIZEN_FEATURE_DISABLE_MIME_TYPES
+ TYPE_FIND_REGISTER_START_WITH_DEFINE (sid, "audio/x-sid", GST_RANK_MARGINAL,
+ "sid", "PSID", 4, GST_TYPE_FIND_MAXIMUM);
+ TYPE_FIND_REGISTER_START_WITH_DEFINE (xcf, "image/x-xcf",
+ GST_RANK_SECONDARY, "xcf", "gimp xcf", 8, GST_TYPE_FIND_MAXIMUM);
+ TYPE_FIND_REGISTER_START_WITH_DEFINE (mng, "video/x-mng",
+ GST_RANK_SECONDARY, "mng", "\212MNG\015\012\032\012", 8,
+ GST_TYPE_FIND_MAXIMUM);
+ TYPE_FIND_REGISTER_START_WITH_DEFINE (jng, "image/x-jng",
+ GST_RANK_SECONDARY, "jng", "\213JNG\015\012\032\012", 8,
+ GST_TYPE_FIND_MAXIMUM);
+ TYPE_FIND_REGISTER_START_WITH_DEFINE (xpm, "image/x-xpixmap",
+ GST_RANK_SECONDARY, "xpm", "/* XPM */", 9, GST_TYPE_FIND_MAXIMUM);
+ TYPE_FIND_REGISTER_START_WITH_DEFINE (ras, "image/x-sun-raster",
+ GST_RANK_SECONDARY, "ras", "\131\246\152\225", 4, GST_TYPE_FIND_MAXIMUM);
+ TYPE_FIND_REGISTER_START_WITH_DEFINE (bz2, "application/x-bzip",
+ GST_RANK_SECONDARY, "bz2", "BZh", 3, GST_TYPE_FIND_LIKELY);
+ TYPE_FIND_REGISTER_START_WITH_DEFINE (gz, "application/x-gzip",
+ GST_RANK_SECONDARY, "gz", "\037\213", 2, GST_TYPE_FIND_LIKELY);
+ TYPE_FIND_REGISTER_START_WITH_DEFINE (zip, "application/zip",
+ GST_RANK_SECONDARY, "zip", "PK\003\004", 4, GST_TYPE_FIND_LIKELY);
+ TYPE_FIND_REGISTER_START_WITH_DEFINE (z, "application/x-compress",
+ GST_RANK_SECONDARY, "Z", "\037\235", 2, GST_TYPE_FIND_LIKELY);
++#endif
+ TYPE_FIND_REGISTER_START_WITH_DEFINE (elf, "application/x-executable",
+ GST_RANK_MARGINAL, NULL, "\177ELF", 4, GST_TYPE_FIND_MAXIMUM);
+ TYPE_FIND_REGISTER_START_WITH_DEFINE (spc, "audio/x-spc",
+ GST_RANK_SECONDARY, "spc", "SNES-SPC700 Sound File Data", 27,
+ GST_TYPE_FIND_MAXIMUM);
+ TYPE_FIND_REGISTER_START_WITH_DEFINE (caf, "audio/x-caf",
+ GST_RANK_SECONDARY, "caf", "caff\000\001", 6, GST_TYPE_FIND_MAXIMUM);
+ TYPE_FIND_REGISTER_START_WITH_DEFINE (rar, "application/x-rar",
+ GST_RANK_SECONDARY, "rar", "Rar!", 4, GST_TYPE_FIND_LIKELY);
+ TYPE_FIND_REGISTER_START_WITH_DEFINE (nsf, "audio/x-nsf",
+ GST_RANK_SECONDARY, "nsf", "NESM\x1a", 5, GST_TYPE_FIND_MAXIMUM);
+ TYPE_FIND_REGISTER_START_WITH_DEFINE (gym, "audio/x-gym",
+ GST_RANK_SECONDARY, "gym", "GYMX", 4, GST_TYPE_FIND_MAXIMUM);
+ TYPE_FIND_REGISTER_START_WITH_DEFINE (ay, "audio/x-ay", GST_RANK_SECONDARY,
+ "ay", "ZXAYEMUL", 8, GST_TYPE_FIND_MAXIMUM);
+ TYPE_FIND_REGISTER_START_WITH_DEFINE (gbs, "audio/x-gbs",
+ GST_RANK_SECONDARY, "gbs", "GBS\x01", 4, GST_TYPE_FIND_MAXIMUM);
+ TYPE_FIND_REGISTER_START_WITH_DEFINE (vgm, "audio/x-vgm",
+ GST_RANK_SECONDARY, "vgm", "Vgm\x20", 4, GST_TYPE_FIND_MAXIMUM);
+ TYPE_FIND_REGISTER_START_WITH_DEFINE (sap, "audio/x-sap",
+ GST_RANK_SECONDARY, "sap", "SAP\x0d\x0a" "AUTHOR\x20", 12,
+ GST_TYPE_FIND_MAXIMUM);
+ TYPE_FIND_REGISTER_START_WITH_DEFINE (ivf, "video/x-ivf",
+ GST_RANK_SECONDARY, "ivf", "DKIF", 4, GST_TYPE_FIND_NEARLY_CERTAIN);
+ TYPE_FIND_REGISTER_START_WITH_DEFINE (kss, "audio/x-kss",
+ GST_RANK_SECONDARY, "kss", "KSSX\0", 5, GST_TYPE_FIND_MAXIMUM);
+ TYPE_FIND_REGISTER_START_WITH_DEFINE (pdf, "application/pdf",
+ GST_RANK_SECONDARY, "pdf", "%PDF-", 5, GST_TYPE_FIND_LIKELY);
+ TYPE_FIND_REGISTER_START_WITH_DEFINE (doc, "application/msword",
+ GST_RANK_SECONDARY, "doc", "\320\317\021\340\241\261\032\341", 8,
+ GST_TYPE_FIND_LIKELY);
+ /* Mac OS X .DS_Store files tend to be taken for video/mpeg */
+ TYPE_FIND_REGISTER_START_WITH_DEFINE (ds_store, "application/octet-stream",
+ GST_RANK_SECONDARY, "DS_Store", "\000\000\000\001Bud1", 8,
+ GST_TYPE_FIND_LIKELY);
+ TYPE_FIND_REGISTER_START_WITH_DEFINE (psd, "image/vnd.adobe.photoshop",
+ GST_RANK_SECONDARY, "psd", "8BPS\000\001\000\000\000\000", 10,
+ GST_TYPE_FIND_LIKELY);
+ TYPE_FIND_REGISTER_START_WITH_DEFINE (xi, "audio/x-xi", GST_RANK_SECONDARY,
+ "xi", "Extended Instrument: ", 21, GST_TYPE_FIND_MAXIMUM);
+ TYPE_FIND_REGISTER_START_WITH_DEFINE (dmp, "audio/x-tap-dmp",
+ GST_RANK_SECONDARY, "dmp", "DC2N-TAP-RAW", 12, GST_TYPE_FIND_LIKELY);
--- /dev/null
- GST_VIDEO_CAPS_MAKE_WITH_FEATURES ("ANY", GST_VIDEO_FORMATS_ALL)
+ /* GStreamer
+ * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
+ * This file:
+ * Copyright (C) 2003 Ronald Bultje <rbultje@ronald.bitfreak.net>
+ * Copyright (C) 2010 David Schleef <ds@schleef.org>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+ /**
+ * SECTION:element-videoconvert
+ * @title: videoconvert
+ *
+ * Convert video frames between a great variety of video formats.
+ *
+ * ## Example launch line
+ * |[
+ * gst-launch-1.0 -v videotestsrc ! video/x-raw,format=YUY2 ! videoconvert ! autovideosink
+ * ]|
+ * This will output a test video (generated in YUY2 format) in a video
+ * window. If the video sink selected does not support YUY2 videoconvert will
+ * automatically convert the video to a format understood by the video sink.
+ *
+ */
+
+ #ifdef HAVE_CONFIG_H
+ # include "config.h"
+ #endif
+
+ #include "gstvideoconvert.h"
+
+ #include <gst/video/video.h>
+ #include <gst/video/gstvideometa.h>
+ #include <gst/video/gstvideopool.h>
+
++#ifdef USE_TBM
++#include <gst/allocators/gsttizenbufferpool.h>
++#endif
++
+ #include <string.h>
+
+ GST_DEBUG_CATEGORY (videoconvert_debug);
+ #define GST_CAT_DEFAULT videoconvert_debug
+ GST_DEBUG_CATEGORY_STATIC (CAT_PERFORMANCE);
+
+ static GQuark _colorspace_quark;
+
+ #define gst_video_convert_parent_class parent_class
+ G_DEFINE_TYPE (GstVideoConvert, gst_video_convert, GST_TYPE_VIDEO_FILTER);
+ GST_ELEMENT_REGISTER_DEFINE (videoconvert, "videoconvert",
+ GST_RANK_NONE, GST_TYPE_VIDEO_CONVERT);
+
+ #define DEFAULT_PROP_DITHER GST_VIDEO_DITHER_BAYER
+ #define DEFAULT_PROP_DITHER_QUANTIZATION 1
+ #define DEFAULT_PROP_CHROMA_RESAMPLER GST_VIDEO_RESAMPLER_METHOD_LINEAR
+ #define DEFAULT_PROP_ALPHA_MODE GST_VIDEO_ALPHA_MODE_COPY
+ #define DEFAULT_PROP_ALPHA_VALUE 1.0
+ #define DEFAULT_PROP_CHROMA_MODE GST_VIDEO_CHROMA_MODE_FULL
+ #define DEFAULT_PROP_MATRIX_MODE GST_VIDEO_MATRIX_MODE_FULL
+ #define DEFAULT_PROP_GAMMA_MODE GST_VIDEO_GAMMA_MODE_NONE
+ #define DEFAULT_PROP_PRIMARIES_MODE GST_VIDEO_PRIMARIES_MODE_NONE
+ #define DEFAULT_PROP_N_THREADS 1
+
+ enum
+ {
+ PROP_0,
+ PROP_DITHER,
+ PROP_DITHER_QUANTIZATION,
+ PROP_CHROMA_RESAMPLER,
+ PROP_ALPHA_MODE,
+ PROP_ALPHA_VALUE,
+ PROP_CHROMA_MODE,
+ PROP_MATRIX_MODE,
+ PROP_GAMMA_MODE,
+ PROP_PRIMARIES_MODE,
+ PROP_N_THREADS
+ };
+
+ #define CSP_VIDEO_CAPS GST_VIDEO_CAPS_MAKE (GST_VIDEO_FORMATS_ALL) ";" \
- GST_STATIC_CAPS (CSP_VIDEO_CAPS)
++ GST_VIDEO_CAPS_MAKE_WITH_FEATURES ("ANY", GST_VIDEO_FORMATS_ALL) ";" \
++ GST_VIDEO_CAPS_MAKE("{ SUYV , SYVY , S420 , ITLV }") ";" \
++ GST_VIDEO_CAPS_MAKE_WITH_FEATURES ("ANY", "{ SUYV , SYVY , S420 , ITLV }")
++
++#define CSP_VIDEO_SRC_CAPS GST_VIDEO_CAPS_MAKE (GST_VIDEO_FORMATS_ALL) ";" \
++ GST_VIDEO_CAPS_MAKE_WITH_FEATURES ("ANY", GST_VIDEO_FORMATS_ALL) ";" \
++ GST_VIDEO_CAPS_MAKE("{ SUYV , SYVY , S420 , ITLV , SN12 }") ";" \
++ GST_VIDEO_CAPS_MAKE_WITH_FEATURES ("ANY", "{ SUYV , SYVY , S420 , ITLV , SN12 }")
+
+ static GstStaticPadTemplate gst_video_convert_src_template =
+ GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
++ GST_STATIC_CAPS (CSP_VIDEO_SRC_CAPS)
+ );
+
+ static GstStaticPadTemplate gst_video_convert_sink_template =
+ GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS (CSP_VIDEO_CAPS)
+ );
+
+ static void gst_video_convert_set_property (GObject * object,
+ guint property_id, const GValue * value, GParamSpec * pspec);
+ static void gst_video_convert_get_property (GObject * object,
+ guint property_id, GValue * value, GParamSpec * pspec);
+
+ static gboolean gst_video_convert_set_info (GstVideoFilter * filter,
+ GstCaps * incaps, GstVideoInfo * in_info, GstCaps * outcaps,
+ GstVideoInfo * out_info);
+ static GstFlowReturn gst_video_convert_transform_frame (GstVideoFilter * filter,
+ GstVideoFrame * in_frame, GstVideoFrame * out_frame);
+
++#ifdef USE_TBM
++static gboolean gst_video_convert_decide_allocation (GstBaseTransform * bsrc,
++ GstQuery * query);
++static GstFlowReturn gst_video_convert_prepare_output_buffer (GstBaseTransform * trans,
++ GstBuffer *input, GstBuffer **outbuf);
++#endif
+ static GstCapsFeatures *features_format_interlaced,
+ *features_format_interlaced_sysmem;
+
+ /* copies the given caps */
+ static GstCaps *
+ gst_video_convert_caps_remove_format_info (GstCaps * caps)
+ {
+ GstStructure *st;
+ GstCapsFeatures *f;
+ gint i, n;
+ GstCaps *res;
+
+ res = gst_caps_new_empty ();
+
+ n = gst_caps_get_size (caps);
+ for (i = 0; i < n; i++) {
+ st = gst_caps_get_structure (caps, i);
+ f = gst_caps_get_features (caps, i);
+
+ /* If this is already expressed by the existing caps
+ * skip this structure */
+ if (i > 0 && gst_caps_is_subset_structure_full (res, st, f))
+ continue;
+
+ st = gst_structure_copy (st);
+ /* Only remove format info for the cases when we can actually convert */
+ if (!gst_caps_features_is_any (f)
+ && (gst_caps_features_is_equal (f,
+ GST_CAPS_FEATURES_MEMORY_SYSTEM_MEMORY)
+ || gst_caps_features_is_equal (f, features_format_interlaced)
+ || gst_caps_features_is_equal (f,
+ features_format_interlaced_sysmem))) {
+ gst_structure_remove_fields (st, "format", "colorimetry", "chroma-site",
+ NULL);
+ }
+
+ gst_caps_append_structure_full (res, st, gst_caps_features_copy (f));
+ }
+
+ return res;
+ }
+
+ /*
+ * This is an incomplete matrix of in formats and a score for the preferred output
+ * format.
+ *
+ * out: RGB24 RGB16 ARGB AYUV YUV444 YUV422 YUV420 YUV411 YUV410 PAL GRAY
+ * in
+ * RGB24 0 2 1 2 2 3 4 5 6 7 8
+ * RGB16 1 0 1 2 2 3 4 5 6 7 8
+ * ARGB 2 3 0 1 4 5 6 7 8 9 10
+ * AYUV 3 4 1 0 2 5 6 7 8 9 10
+ * YUV444 2 4 3 1 0 5 6 7 8 9 10
+ * YUV422 3 5 4 2 1 0 6 7 8 9 10
+ * YUV420 4 6 5 3 2 1 0 7 8 9 10
+ * YUV411 4 6 5 3 2 1 7 0 8 9 10
+ * YUV410 6 8 7 5 4 3 2 1 0 9 10
+ * PAL 1 3 2 6 4 6 7 8 9 0 10
+ * GRAY 1 4 3 2 1 5 6 7 8 9 0
+ *
+ * PAL or GRAY are never preferred, if we can we would convert to PAL instead
+ * of GRAY, though
+ * less subsampling is preferred and if any, preferably horizontal
+ * We would like to keep the alpha, even if we would need to to colorspace conversion
+ * or lose depth.
+ */
+ #define SCORE_FORMAT_CHANGE 1
+ #define SCORE_DEPTH_CHANGE 1
+ #define SCORE_ALPHA_CHANGE 1
+ #define SCORE_CHROMA_W_CHANGE 1
+ #define SCORE_CHROMA_H_CHANGE 1
+ #define SCORE_PALETTE_CHANGE 1
+
+ #define SCORE_COLORSPACE_LOSS 2 /* RGB <-> YUV */
+ #define SCORE_DEPTH_LOSS 4 /* change bit depth */
+ #define SCORE_ALPHA_LOSS 8 /* lose the alpha channel */
+ #define SCORE_CHROMA_W_LOSS 16 /* vertical subsample */
+ #define SCORE_CHROMA_H_LOSS 32 /* horizontal subsample */
+ #define SCORE_PALETTE_LOSS 64 /* convert to palette format */
+ #define SCORE_COLOR_LOSS 128 /* convert to GRAY */
+
+ #define COLORSPACE_MASK (GST_VIDEO_FORMAT_FLAG_YUV | \
+ GST_VIDEO_FORMAT_FLAG_RGB | GST_VIDEO_FORMAT_FLAG_GRAY)
+ #define ALPHA_MASK (GST_VIDEO_FORMAT_FLAG_ALPHA)
+ #define PALETTE_MASK (GST_VIDEO_FORMAT_FLAG_PALETTE)
+
+ /* calculate how much loss a conversion would be */
+ static void
+ score_value (GstBaseTransform * base, const GstVideoFormatInfo * in_info,
+ const GValue * val, gint * min_loss, const GstVideoFormatInfo ** out_info)
+ {
+ const gchar *fname;
+ const GstVideoFormatInfo *t_info;
+ GstVideoFormatFlags in_flags, t_flags;
+ gint loss;
+
+ fname = g_value_get_string (val);
+ t_info = gst_video_format_get_info (gst_video_format_from_string (fname));
+ if (!t_info)
+ return;
+
+ /* accept input format immediately without loss */
+ if (in_info == t_info) {
+ *min_loss = 0;
+ *out_info = t_info;
+ return;
+ }
+
+ loss = SCORE_FORMAT_CHANGE;
+
+ in_flags = GST_VIDEO_FORMAT_INFO_FLAGS (in_info);
+ in_flags &= ~GST_VIDEO_FORMAT_FLAG_LE;
+ in_flags &= ~GST_VIDEO_FORMAT_FLAG_COMPLEX;
+ in_flags &= ~GST_VIDEO_FORMAT_FLAG_UNPACK;
+
+ t_flags = GST_VIDEO_FORMAT_INFO_FLAGS (t_info);
+ t_flags &= ~GST_VIDEO_FORMAT_FLAG_LE;
+ t_flags &= ~GST_VIDEO_FORMAT_FLAG_COMPLEX;
+ t_flags &= ~GST_VIDEO_FORMAT_FLAG_UNPACK;
+
+ if ((t_flags & PALETTE_MASK) != (in_flags & PALETTE_MASK)) {
+ loss += SCORE_PALETTE_CHANGE;
+ if (t_flags & PALETTE_MASK)
+ loss += SCORE_PALETTE_LOSS;
+ }
+
+ if ((t_flags & COLORSPACE_MASK) != (in_flags & COLORSPACE_MASK)) {
+ loss += SCORE_COLORSPACE_LOSS;
+ if (t_flags & GST_VIDEO_FORMAT_FLAG_GRAY)
+ loss += SCORE_COLOR_LOSS;
+ }
+
+ if ((t_flags & ALPHA_MASK) != (in_flags & ALPHA_MASK)) {
+ loss += SCORE_ALPHA_CHANGE;
+ if (in_flags & ALPHA_MASK)
+ loss += SCORE_ALPHA_LOSS;
+ }
+
+ if ((in_info->h_sub[1]) != (t_info->h_sub[1])) {
+ loss += SCORE_CHROMA_H_CHANGE;
+ if ((in_info->h_sub[1]) < (t_info->h_sub[1]))
+ loss += SCORE_CHROMA_H_LOSS;
+ }
+ if ((in_info->w_sub[1]) != (t_info->w_sub[1])) {
+ loss += SCORE_CHROMA_W_CHANGE;
+ if ((in_info->w_sub[1]) < (t_info->w_sub[1]))
+ loss += SCORE_CHROMA_W_LOSS;
+ }
+
+ if ((in_info->bits) != (t_info->bits)) {
+ loss += SCORE_DEPTH_CHANGE;
+ if ((in_info->bits) > (t_info->bits))
+ loss += SCORE_DEPTH_LOSS;
+ }
+
+ GST_DEBUG_OBJECT (base, "score %s -> %s = %d",
+ GST_VIDEO_FORMAT_INFO_NAME (in_info),
+ GST_VIDEO_FORMAT_INFO_NAME (t_info), loss);
+
+ if (loss < *min_loss) {
+ GST_DEBUG_OBJECT (base, "found new best %d", loss);
+ *out_info = t_info;
+ *min_loss = loss;
+ }
+ }
+
+ static void
+ gst_video_convert_fixate_format (GstBaseTransform * base, GstCaps * caps,
+ GstCaps * result)
+ {
+ GstStructure *ins, *outs;
+ const gchar *in_format;
+ const GstVideoFormatInfo *in_info, *out_info = NULL;
+ gint min_loss = G_MAXINT;
+ guint i, capslen;
+
+ ins = gst_caps_get_structure (caps, 0);
+ in_format = gst_structure_get_string (ins, "format");
+ if (!in_format)
+ return;
+
+ GST_DEBUG_OBJECT (base, "source format %s", in_format);
+
+ in_info =
+ gst_video_format_get_info (gst_video_format_from_string (in_format));
+ if (!in_info)
+ return;
+
+ outs = gst_caps_get_structure (result, 0);
+
+ capslen = gst_caps_get_size (result);
+ GST_DEBUG_OBJECT (base, "iterate %d structures", capslen);
+ for (i = 0; i < capslen; i++) {
+ GstStructure *tests;
+ const GValue *format;
+
+ tests = gst_caps_get_structure (result, i);
+ format = gst_structure_get_value (tests, "format");
+ /* should not happen */
+ if (format == NULL)
+ continue;
+
+ if (GST_VALUE_HOLDS_LIST (format)) {
+ gint j, len;
+
+ len = gst_value_list_get_size (format);
+ GST_DEBUG_OBJECT (base, "have %d formats", len);
+ for (j = 0; j < len; j++) {
+ const GValue *val;
+
+ val = gst_value_list_get_value (format, j);
+ if (G_VALUE_HOLDS_STRING (val)) {
+ score_value (base, in_info, val, &min_loss, &out_info);
+ if (min_loss == 0)
+ break;
+ }
+ }
+ } else if (G_VALUE_HOLDS_STRING (format)) {
+ score_value (base, in_info, format, &min_loss, &out_info);
+ }
+ }
+ if (out_info)
+ gst_structure_set (outs, "format", G_TYPE_STRING,
+ GST_VIDEO_FORMAT_INFO_NAME (out_info), NULL);
+ }
+
+ static gboolean
+ subsampling_unchanged (GstVideoInfo * in_info, GstVideoInfo * out_info)
+ {
+ gint i;
+ const GstVideoFormatInfo *in_format, *out_format;
+
+ if (GST_VIDEO_INFO_N_COMPONENTS (in_info) !=
+ GST_VIDEO_INFO_N_COMPONENTS (out_info))
+ return FALSE;
+
+ in_format = in_info->finfo;
+ out_format = out_info->finfo;
+
+ for (i = 0; i < GST_VIDEO_INFO_N_COMPONENTS (in_info); i++) {
+ if (GST_VIDEO_FORMAT_INFO_W_SUB (in_format,
+ i) != GST_VIDEO_FORMAT_INFO_W_SUB (out_format, i))
+ return FALSE;
+ if (GST_VIDEO_FORMAT_INFO_H_SUB (in_format,
+ i) != GST_VIDEO_FORMAT_INFO_H_SUB (out_format, i))
+ return FALSE;
+ }
+
+ return TRUE;
+ }
+
+ static void
+ transfer_colorimetry_from_input (GstBaseTransform * trans, GstCaps * in_caps,
+ GstCaps * out_caps)
+ {
+ GstStructure *out_caps_s = gst_caps_get_structure (out_caps, 0);
+ GstStructure *in_caps_s = gst_caps_get_structure (in_caps, 0);
+ gboolean have_colorimetry =
+ gst_structure_has_field (out_caps_s, "colorimetry");
+ gboolean have_chroma_site =
+ gst_structure_has_field (out_caps_s, "chroma-site");
+
+ /* If the output already has colorimetry and chroma-site, stop,
+ * otherwise try and transfer what we can from the input caps */
+ if (have_colorimetry && have_chroma_site)
+ return;
+
+ {
+ GstVideoInfo in_info, out_info;
+ const GValue *in_colorimetry =
+ gst_structure_get_value (in_caps_s, "colorimetry");
+
+ if (!gst_video_info_from_caps (&in_info, in_caps)) {
+ GST_WARNING_OBJECT (trans,
+ "Failed to convert sink pad caps to video info");
+ return;
+ }
+ if (!gst_video_info_from_caps (&out_info, out_caps)) {
+ GST_WARNING_OBJECT (trans,
+ "Failed to convert src pad caps to video info");
+ return;
+ }
+
+ if (!have_colorimetry && in_colorimetry != NULL) {
+ if ((GST_VIDEO_INFO_IS_YUV (&out_info)
+ && GST_VIDEO_INFO_IS_YUV (&in_info))
+ || (GST_VIDEO_INFO_IS_RGB (&out_info)
+ && GST_VIDEO_INFO_IS_RGB (&in_info))
+ || (GST_VIDEO_INFO_IS_GRAY (&out_info)
+ && GST_VIDEO_INFO_IS_GRAY (&in_info))) {
+ /* Can transfer the colorimetry intact from the input if it has it */
+ gst_structure_set_value (out_caps_s, "colorimetry", in_colorimetry);
+ } else {
+ gchar *colorimetry_str;
+
+ /* Changing between YUV/RGB - forward primaries and transfer function, but use
+ * default range and matrix.
+ * the primaries is used for conversion between RGB and XYZ (CIE 1931 coordinate).
+ * the transfer function could be another reference (e.g., HDR)
+ */
+ out_info.colorimetry.primaries = in_info.colorimetry.primaries;
+ out_info.colorimetry.transfer = in_info.colorimetry.transfer;
+
+ colorimetry_str =
+ gst_video_colorimetry_to_string (&out_info.colorimetry);
+ gst_caps_set_simple (out_caps, "colorimetry", G_TYPE_STRING,
+ colorimetry_str, NULL);
+ g_free (colorimetry_str);
+ }
+ }
+
+ /* Only YUV output needs chroma-site. If the input was also YUV and had the same chroma
+ * subsampling, transfer the siting. If the sub-sampling is changing, then the planes get
+ * scaled anyway so there's no real reason to prefer the input siting. */
+ if (!have_chroma_site && GST_VIDEO_INFO_IS_YUV (&out_info)) {
+ if (GST_VIDEO_INFO_IS_YUV (&in_info)) {
+ const GValue *in_chroma_site =
+ gst_structure_get_value (in_caps_s, "chroma-site");
+ if (in_chroma_site != NULL
+ && subsampling_unchanged (&in_info, &out_info))
+ gst_structure_set_value (out_caps_s, "chroma-site", in_chroma_site);
+ }
+ }
+ }
+ }
+
+ static GstCaps *
+ gst_video_convert_fixate_caps (GstBaseTransform * trans,
+ GstPadDirection direction, GstCaps * caps, GstCaps * othercaps)
+ {
+ GstCaps *result;
+
+ GST_DEBUG_OBJECT (trans, "trying to fixate othercaps %" GST_PTR_FORMAT
+ " based on caps %" GST_PTR_FORMAT, othercaps, caps);
+
+ result = gst_caps_intersect (othercaps, caps);
+ if (gst_caps_is_empty (result)) {
+ gst_caps_unref (result);
+ result = othercaps;
+ } else {
+ gst_caps_unref (othercaps);
+ }
+
+ GST_DEBUG_OBJECT (trans, "now fixating %" GST_PTR_FORMAT, result);
+
+ result = gst_caps_make_writable (result);
+ gst_video_convert_fixate_format (trans, caps, result);
+
+ /* fixate remaining fields */
+ result = gst_caps_fixate (result);
+
+ if (direction == GST_PAD_SINK) {
+ if (gst_caps_is_subset (caps, result)) {
+ gst_caps_replace (&result, caps);
+ } else {
+ /* Try and preserve input colorimetry / chroma information */
+ transfer_colorimetry_from_input (trans, caps, result);
+ }
+ }
+
+ return result;
+ }
+
+ static gboolean
+ gst_video_convert_filter_meta (GstBaseTransform * trans, GstQuery * query,
+ GType api, const GstStructure * params)
+ {
+ /* This element cannot passthrough the crop meta, because it would convert the
+ * wrong sub-region of the image, and worst, our output image may not be large
+ * enough for the crop to be applied later */
+ if (api == GST_VIDEO_CROP_META_API_TYPE)
+ return FALSE;
+
+ /* propose all other metadata upstream */
+ return TRUE;
+ }
+
+ /* The caps can be transformed into any other caps with format info removed.
+ * However, we should prefer passthrough, so if passthrough is possible,
+ * put it first in the list. */
+ static GstCaps *
+ gst_video_convert_transform_caps (GstBaseTransform * btrans,
+ GstPadDirection direction, GstCaps * caps, GstCaps * filter)
+ {
+ GstCaps *tmp, *tmp2;
+ GstCaps *result;
+
+ /* Get all possible caps that we can transform to */
+ tmp = gst_video_convert_caps_remove_format_info (caps);
+
+ if (filter) {
+ tmp2 = gst_caps_intersect_full (filter, tmp, GST_CAPS_INTERSECT_FIRST);
+ gst_caps_unref (tmp);
+ tmp = tmp2;
+ }
+
+ result = tmp;
+
+ GST_DEBUG_OBJECT (btrans, "transformed %" GST_PTR_FORMAT " into %"
+ GST_PTR_FORMAT, caps, result);
+
+ return result;
+ }
+
+ static gboolean
+ gst_video_convert_transform_meta (GstBaseTransform * trans, GstBuffer * outbuf,
+ GstMeta * meta, GstBuffer * inbuf)
+ {
+ const GstMetaInfo *info = meta->info;
+ gboolean ret;
+
+ if (gst_meta_api_type_has_tag (info->api, _colorspace_quark)) {
+ /* don't copy colorspace specific metadata, FIXME, we need a MetaTransform
+ * for the colorspace metadata. */
+ ret = FALSE;
+ } else {
+ /* copy other metadata */
+ ret = TRUE;
+ }
+ return ret;
+ }
+
+ static gboolean
+ gst_video_convert_set_info (GstVideoFilter * filter,
+ GstCaps * incaps, GstVideoInfo * in_info, GstCaps * outcaps,
+ GstVideoInfo * out_info)
+ {
+ GstVideoConvert *space;
+ GstBaseTransformClass *gstbasetransform_class =
+ GST_BASE_TRANSFORM_GET_CLASS (filter);
+ GstVideoInfo tmp_info;
+
+ space = GST_VIDEO_CONVERT_CAST (filter);
+
+ if (space->convert) {
+ gst_video_converter_free (space->convert);
+ space->convert = NULL;
+ }
+
+ /* these must match */
+ if (in_info->width != out_info->width || in_info->height != out_info->height
+ || in_info->fps_n != out_info->fps_n || in_info->fps_d != out_info->fps_d)
+ goto format_mismatch;
+
+ /* if present, these must match too */
+ if (in_info->par_n != out_info->par_n || in_info->par_d != out_info->par_d)
+ goto format_mismatch;
+
+ /* if present, these must match too */
+ if (in_info->interlace_mode != out_info->interlace_mode)
+ goto format_mismatch;
+
+ /* if the only thing different in the caps is the transfer function, and
+ * we're converting between equivalent transfer functions, do passthrough */
+ tmp_info = *in_info;
+ tmp_info.colorimetry.transfer = out_info->colorimetry.transfer;
+ if (gst_video_info_is_equal (&tmp_info, out_info)) {
+ if (gst_video_transfer_function_is_equivalent (in_info->
+ colorimetry.transfer, in_info->finfo->bits,
+ out_info->colorimetry.transfer, out_info->finfo->bits)) {
+ gstbasetransform_class->passthrough_on_same_caps = FALSE;
+ gst_base_transform_set_passthrough (GST_BASE_TRANSFORM (filter), TRUE);
+ return TRUE;
+ }
+ }
+ gstbasetransform_class->passthrough_on_same_caps = TRUE;
+ gst_base_transform_set_passthrough (GST_BASE_TRANSFORM (filter), FALSE);
+
+ space->convert = gst_video_converter_new (in_info, out_info,
+ gst_structure_new ("GstVideoConvertConfig",
+ GST_VIDEO_CONVERTER_OPT_DITHER_METHOD, GST_TYPE_VIDEO_DITHER_METHOD,
+ space->dither,
+ GST_VIDEO_CONVERTER_OPT_DITHER_QUANTIZATION, G_TYPE_UINT,
+ space->dither_quantization,
+ GST_VIDEO_CONVERTER_OPT_CHROMA_RESAMPLER_METHOD,
+ GST_TYPE_VIDEO_RESAMPLER_METHOD, space->chroma_resampler,
+ GST_VIDEO_CONVERTER_OPT_ALPHA_MODE,
+ GST_TYPE_VIDEO_ALPHA_MODE, space->alpha_mode,
+ GST_VIDEO_CONVERTER_OPT_ALPHA_VALUE,
+ G_TYPE_DOUBLE, space->alpha_value,
+ GST_VIDEO_CONVERTER_OPT_CHROMA_MODE,
+ GST_TYPE_VIDEO_CHROMA_MODE, space->chroma_mode,
+ GST_VIDEO_CONVERTER_OPT_MATRIX_MODE,
+ GST_TYPE_VIDEO_MATRIX_MODE, space->matrix_mode,
+ GST_VIDEO_CONVERTER_OPT_GAMMA_MODE,
+ GST_TYPE_VIDEO_GAMMA_MODE, space->gamma_mode,
+ GST_VIDEO_CONVERTER_OPT_PRIMARIES_MODE,
+ GST_TYPE_VIDEO_PRIMARIES_MODE, space->primaries_mode,
+ GST_VIDEO_CONVERTER_OPT_THREADS, G_TYPE_UINT,
+ space->n_threads, NULL));
+ if (space->convert == NULL)
+ goto no_convert;
+
+ GST_DEBUG_OBJECT (filter, "converting format %s -> %s",
+ gst_video_format_to_string (GST_VIDEO_INFO_FORMAT (in_info)),
+ gst_video_format_to_string (GST_VIDEO_INFO_FORMAT (out_info)));
+
+ return TRUE;
+
+ /* ERRORS */
+ format_mismatch:
+ {
+ GST_ERROR_OBJECT (space, "input and output formats do not match");
+ return FALSE;
+ }
+ no_convert:
+ {
+ GST_ERROR_OBJECT (space, "could not create converter");
+ return FALSE;
+ }
+ }
+
+ static void
+ gst_video_convert_finalize (GObject * obj)
+ {
+ GstVideoConvert *space = GST_VIDEO_CONVERT (obj);
+
++#ifdef USE_TBM
++ if (space->pool) {
++ gst_buffer_pool_set_active (space->pool, FALSE);
++ gst_object_unref (space->pool);
++ space->pool = NULL;
++ }
++#endif
++
+ if (space->convert) {
+ gst_video_converter_free (space->convert);
+ }
+
+ G_OBJECT_CLASS (parent_class)->finalize (obj);
+ }
+
+ static void
+ gst_video_convert_class_init (GstVideoConvertClass * klass)
+ {
+ GObjectClass *gobject_class = (GObjectClass *) klass;
+ GstElementClass *gstelement_class = (GstElementClass *) klass;
+ GstBaseTransformClass *gstbasetransform_class =
+ (GstBaseTransformClass *) klass;
+ GstVideoFilterClass *gstvideofilter_class = (GstVideoFilterClass *) klass;
+
+ gobject_class->set_property = gst_video_convert_set_property;
+ gobject_class->get_property = gst_video_convert_get_property;
+ gobject_class->finalize = gst_video_convert_finalize;
+
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_video_convert_src_template);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_video_convert_sink_template);
+
+ gst_element_class_set_static_metadata (gstelement_class,
+ "Colorspace converter", "Filter/Converter/Video",
+ "Converts video from one colorspace to another",
+ "GStreamer maintainers <gstreamer-devel@lists.freedesktop.org>");
+
+ gstbasetransform_class->transform_caps =
+ GST_DEBUG_FUNCPTR (gst_video_convert_transform_caps);
+ gstbasetransform_class->fixate_caps =
+ GST_DEBUG_FUNCPTR (gst_video_convert_fixate_caps);
+ gstbasetransform_class->filter_meta =
+ GST_DEBUG_FUNCPTR (gst_video_convert_filter_meta);
+ gstbasetransform_class->transform_meta =
+ GST_DEBUG_FUNCPTR (gst_video_convert_transform_meta);
+
+ gstbasetransform_class->passthrough_on_same_caps = TRUE;
+
+ gstvideofilter_class->set_info =
+ GST_DEBUG_FUNCPTR (gst_video_convert_set_info);
+ gstvideofilter_class->transform_frame =
+ GST_DEBUG_FUNCPTR (gst_video_convert_transform_frame);
+
++#ifdef USE_TBM
++ gstbasetransform_class->decide_allocation = gst_video_convert_decide_allocation;
++ gstbasetransform_class->prepare_output_buffer = gst_video_convert_prepare_output_buffer;
++#endif
++
+ g_object_class_install_property (gobject_class, PROP_DITHER,
+ g_param_spec_enum ("dither", "Dither", "Apply dithering while converting",
+ gst_video_dither_method_get_type (), DEFAULT_PROP_DITHER,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_DITHER_QUANTIZATION,
+ g_param_spec_uint ("dither-quantization", "Dither Quantize",
+ "Quantizer to use", 0, G_MAXUINT, DEFAULT_PROP_DITHER_QUANTIZATION,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_CHROMA_RESAMPLER,
+ g_param_spec_enum ("chroma-resampler", "Chroma resampler",
+ "Chroma resampler method", gst_video_resampler_method_get_type (),
+ DEFAULT_PROP_CHROMA_RESAMPLER,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_ALPHA_MODE,
+ g_param_spec_enum ("alpha-mode", "Alpha Mode",
+ "Alpha Mode to use", gst_video_alpha_mode_get_type (),
+ DEFAULT_PROP_ALPHA_MODE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_ALPHA_VALUE,
+ g_param_spec_double ("alpha-value", "Alpha Value",
+ "Alpha Value to use", 0.0, 1.0,
+ DEFAULT_PROP_ALPHA_VALUE,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_CHROMA_MODE,
+ g_param_spec_enum ("chroma-mode", "Chroma Mode", "Chroma Resampling Mode",
+ gst_video_chroma_mode_get_type (), DEFAULT_PROP_CHROMA_MODE,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_MATRIX_MODE,
+ g_param_spec_enum ("matrix-mode", "Matrix Mode", "Matrix Conversion Mode",
+ gst_video_matrix_mode_get_type (), DEFAULT_PROP_MATRIX_MODE,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_GAMMA_MODE,
+ g_param_spec_enum ("gamma-mode", "Gamma Mode", "Gamma Conversion Mode",
+ gst_video_gamma_mode_get_type (), DEFAULT_PROP_GAMMA_MODE,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_PRIMARIES_MODE,
+ g_param_spec_enum ("primaries-mode", "Primaries Mode",
+ "Primaries Conversion Mode", gst_video_primaries_mode_get_type (),
+ DEFAULT_PROP_PRIMARIES_MODE,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_N_THREADS,
+ g_param_spec_uint ("n-threads", "Threads",
+ "Maximum number of threads to use", 0, G_MAXUINT,
+ DEFAULT_PROP_N_THREADS, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ }
+
+ static void
+ gst_video_convert_init (GstVideoConvert * space)
+ {
++#ifdef USE_TBM
++ space->pool = NULL;
++#endif
+ space->dither = DEFAULT_PROP_DITHER;
+ space->dither_quantization = DEFAULT_PROP_DITHER_QUANTIZATION;
+ space->chroma_resampler = DEFAULT_PROP_CHROMA_RESAMPLER;
+ space->alpha_mode = DEFAULT_PROP_ALPHA_MODE;
+ space->alpha_value = DEFAULT_PROP_ALPHA_VALUE;
+ space->chroma_mode = DEFAULT_PROP_CHROMA_MODE;
+ space->matrix_mode = DEFAULT_PROP_MATRIX_MODE;
+ space->gamma_mode = DEFAULT_PROP_GAMMA_MODE;
+ space->primaries_mode = DEFAULT_PROP_PRIMARIES_MODE;
+ space->n_threads = DEFAULT_PROP_N_THREADS;
+ }
+
+ void
+ gst_video_convert_set_property (GObject * object, guint property_id,
+ const GValue * value, GParamSpec * pspec)
+ {
+ GstVideoConvert *csp;
+
+ csp = GST_VIDEO_CONVERT (object);
+
+ switch (property_id) {
+ case PROP_DITHER:
+ csp->dither = g_value_get_enum (value);
+ break;
+ case PROP_CHROMA_RESAMPLER:
+ csp->chroma_resampler = g_value_get_enum (value);
+ break;
+ case PROP_ALPHA_MODE:
+ csp->alpha_mode = g_value_get_enum (value);
+ break;
+ case PROP_ALPHA_VALUE:
+ csp->alpha_value = g_value_get_double (value);
+ break;
+ case PROP_CHROMA_MODE:
+ csp->chroma_mode = g_value_get_enum (value);
+ break;
+ case PROP_MATRIX_MODE:
+ csp->matrix_mode = g_value_get_enum (value);
+ break;
+ case PROP_GAMMA_MODE:
+ csp->gamma_mode = g_value_get_enum (value);
+ break;
+ case PROP_PRIMARIES_MODE:
+ csp->primaries_mode = g_value_get_enum (value);
+ break;
+ case PROP_DITHER_QUANTIZATION:
+ csp->dither_quantization = g_value_get_uint (value);
+ break;
+ case PROP_N_THREADS:
+ csp->n_threads = g_value_get_uint (value);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, property_id, pspec);
+ break;
+ }
+ }
+
+ void
+ gst_video_convert_get_property (GObject * object, guint property_id,
+ GValue * value, GParamSpec * pspec)
+ {
+ GstVideoConvert *csp;
+
+ csp = GST_VIDEO_CONVERT (object);
+
+ switch (property_id) {
+ case PROP_DITHER:
+ g_value_set_enum (value, csp->dither);
+ break;
+ case PROP_CHROMA_RESAMPLER:
+ g_value_set_enum (value, csp->chroma_resampler);
+ break;
+ case PROP_ALPHA_MODE:
+ g_value_set_enum (value, csp->alpha_mode);
+ break;
+ case PROP_ALPHA_VALUE:
+ g_value_set_double (value, csp->alpha_value);
+ break;
+ case PROP_CHROMA_MODE:
+ g_value_set_enum (value, csp->chroma_mode);
+ break;
+ case PROP_MATRIX_MODE:
+ g_value_set_enum (value, csp->matrix_mode);
+ break;
+ case PROP_GAMMA_MODE:
+ g_value_set_enum (value, csp->gamma_mode);
+ break;
+ case PROP_PRIMARIES_MODE:
+ g_value_set_enum (value, csp->primaries_mode);
+ break;
+ case PROP_DITHER_QUANTIZATION:
+ g_value_set_uint (value, csp->dither_quantization);
+ break;
+ case PROP_N_THREADS:
+ g_value_set_uint (value, csp->n_threads);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, property_id, pspec);
+ break;
+ }
+ }
+
+ static GstFlowReturn
+ gst_video_convert_transform_frame (GstVideoFilter * filter,
+ GstVideoFrame * in_frame, GstVideoFrame * out_frame)
+ {
+ GstVideoConvert *space;
+
+ space = GST_VIDEO_CONVERT_CAST (filter);
+
+ GST_CAT_DEBUG_OBJECT (CAT_PERFORMANCE, filter,
+ "doing colorspace conversion from %s -> to %s",
+ GST_VIDEO_INFO_NAME (&filter->in_info),
+ GST_VIDEO_INFO_NAME (&filter->out_info));
+
+ gst_video_converter_frame (space->convert, in_frame, out_frame);
+
+ return GST_FLOW_OK;
+ }
+
++#ifdef USE_TBM
++static gboolean
++gst_video_convert_decide_allocation (GstBaseTransform * trans,
++ GstQuery * query)
++{
++ GstVideoConvert *vc = NULL;
++ GstVideoFilter *filter = GST_VIDEO_FILTER_CAST (trans);
++ vc = GST_VIDEO_CONVERT_CAST(trans);
++
++ if (filter->out_info.finfo->format == GST_VIDEO_FORMAT_SN12 ) {
++ guint size;
++ GstStructure *config;
++ GstCaps *caps = NULL;
++ GstVideoInfo vinfo;
++ gst_query_parse_allocation (query, &caps, NULL);
++ gst_video_info_init (&vinfo);
++ gst_video_info_from_caps (&vinfo, caps);
++
++ size = vinfo.size;
++
++ if (caps) {
++ vc->pool = gst_tizen_buffer_pool_new ();
++ config = gst_buffer_pool_get_config (vc->pool);
++
++ gst_buffer_pool_config_set_params (config, caps, size, 4, 10);
++ gst_buffer_pool_config_add_option (config, GST_BUFFER_POOL_OPTION_VIDEO_META);
++ gst_buffer_pool_set_config (vc->pool, config);
++
++ if (!gst_buffer_pool_set_active (vc->pool, TRUE)) {
++ gst_object_unref (vc->pool);
++ vc->pool = NULL;
++ GST_INFO ("Failed to activate internal pool");
++ }
++ } else {
++ GST_ERROR("Not using our internal pool and copying buffers for downstream");
++ return FALSE;
++ }
++ }
++ GST_DEBUG("[%s]Creating Tizen Buffer Pool", __FUNCTION__);
++
++ return GST_BASE_TRANSFORM_CLASS (parent_class)->decide_allocation (trans, query);
++}
++
++static GstFlowReturn
++gst_video_convert_prepare_output_buffer (GstBaseTransform * trans,
++ GstBuffer *input, GstBuffer **outbuf)
++{
++ GstBuffer *buf = NULL;
++ GstVideoConvert *vc = NULL;
++ GstVideoFilter *filter = GST_VIDEO_FILTER_CAST (trans);
++
++ vc = GST_VIDEO_CONVERT_CAST (trans);
++
++ if (filter->out_info.finfo->format == GST_VIDEO_FORMAT_SN12 ) {
++ if (gst_buffer_pool_acquire_buffer (vc->pool, &buf, 0) != GST_FLOW_OK) {
++ GST_ERROR("[%s] memory prepare failed.",__FUNCTION__);
++ return GST_FLOW_ERROR;
++ }
++
++ if (input != buf)
++ GST_BASE_TRANSFORM_CLASS (parent_class)->copy_metadata (trans, input, buf);
++ *outbuf = buf;
++
++ return GST_FLOW_OK;
++ }
++ return GST_BASE_TRANSFORM_CLASS (parent_class)->prepare_output_buffer(trans, input, outbuf);
++}
++#endif
++
+ static gboolean
+ plugin_init (GstPlugin * plugin)
+ {
+ GST_DEBUG_CATEGORY_INIT (videoconvert_debug, "videoconvert", 0,
+ "Colorspace Converter");
+
+ GST_DEBUG_CATEGORY_GET (CAT_PERFORMANCE, "GST_PERFORMANCE");
+
+ _colorspace_quark = g_quark_from_static_string ("colorspace");
+
+ features_format_interlaced =
+ gst_caps_features_new (GST_CAPS_FEATURE_FORMAT_INTERLACED, NULL);
+ features_format_interlaced_sysmem =
+ gst_caps_features_copy (features_format_interlaced);
+ gst_caps_features_add (features_format_interlaced_sysmem,
+ GST_CAPS_FEATURE_MEMORY_SYSTEM_MEMORY);
+
+ return GST_ELEMENT_REGISTER (videoconvert, plugin);
+ }
+
+ GST_PLUGIN_DEFINE (GST_VERSION_MAJOR,
+ GST_VERSION_MINOR,
+ videoconvert, "Colorspace conversion", plugin_init, VERSION, GST_LICENSE,
+ GST_PACKAGE_NAME, GST_PACKAGE_ORIGIN)
--- /dev/null
+ /* GStreamer
+ * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
+ * This file:
+ * Copyright (C) 2003 Ronald Bultje <rbultje@ronald.bitfreak.net>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+ #ifndef __GST_VIDEOCONVERT_H__
+ #define __GST_VIDEOCONVERT_H__
+
+ #include <gst/gst.h>
+ #include <gst/video/video.h>
+ #include <gst/video/gstvideofilter.h>
+
+ G_BEGIN_DECLS
+
+ #define GST_TYPE_VIDEO_CONVERT (gst_video_convert_get_type())
+ #define GST_VIDEO_CONVERT_CAST(obj) ((GstVideoConvert *)(obj))
+ G_DECLARE_FINAL_TYPE (GstVideoConvert, gst_video_convert, GST, VIDEO_CONVERT,
+ GstVideoFilter)
+
+ /**
+ * GstVideoConvert:
+ *
+ * Opaque object data structure.
+ */
+ struct _GstVideoConvert {
+ GstVideoFilter element;
+
+ GstVideoConverter *convert;
+ GstVideoDitherMethod dither;
+ guint dither_quantization;
+ GstVideoResamplerMethod chroma_resampler;
+ GstVideoAlphaMode alpha_mode;
+ GstVideoChromaMode chroma_mode;
+ GstVideoMatrixMode matrix_mode;
+ GstVideoGammaMode gamma_mode;
+ GstVideoPrimariesMode primaries_mode;
+ gdouble alpha_value;
+ gint n_threads;
++#ifdef USE_TBM
++ GstBufferPool *pool;
++#endif
+ };
+
+ GST_ELEMENT_REGISTER_DECLARE (videoconvert);
+
+ G_END_DECLS
+
+ #endif /* __GST_VIDEOCONVERT_H__ */
--- /dev/null
- dependencies : [video_dep],
+ vconvert_sources = [
+ 'gstvideoconvert.c',
+ ]
+
+ gstvideoconvert = library('gstvideoconvert',
+ vconvert_sources,
+ c_args : gst_plugins_base_args,
+ include_directories: [configinc, libsinc],
++ dependencies : [video_dep, allocators_dep],
+ install : true,
+ install_dir : plugins_install_dir,
+ )
+ pkgconfig.generate(gstvideoconvert, install_dir : plugins_pkgconfig_install_dir)
+ plugins += [gstvideoconvert]
--- /dev/null
+ /* -*- c-basic-offset: 2 -*-
+ * vi:si:et:sw=2:sts=8:ts=8:expandtab
+ *
+ * GStreamer
+ * Copyright (C) 1999-2001 Erik Walthinsen <omega@cse.ogi.edu>
+ * Copyright (C) 2005 Andy Wingo <wingo@pobox.com>
+ * Copyright (C) 2010 Sebastian Dröge <sebastian.droege@collabora.co.uk>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+ /**
+ * SECTION:element-volume
+ * @title: volume
+ *
+ * The volume element changes the volume of the audio data.
+ *
+ * ## Example launch line
+ * |[
+ * gst-launch-1.0 -v -m audiotestsrc ! volume volume=0.5 ! level ! fakesink silent=TRUE
+ * ]|
+ * This pipeline shows that the level of audiotestsrc has been halved
+ * (peak values are around -6 dB and RMS around -9 dB) compared to
+ * the same pipeline without the volume element.
+ *
+ */
+
+ #ifdef HAVE_CONFIG_H
+ #include "config.h"
+ #endif
+
+ #include <string.h>
+ #include <gst/gst.h>
+ #include <gst/base/gstbasetransform.h>
+ #include <gst/audio/audio.h>
+ #include <gst/audio/gstaudiofilter.h>
+
+ #ifdef HAVE_ORC
+ #include <orc/orcfunctions.h>
+ #else
+ #define orc_memset memset
+ #endif
+
+ #include "gstvolumeorc.h"
+ #include "gstvolume.h"
+
+ /* some defines for audio processing */
+ /* the volume factor is a range from 0.0 to (arbitrary) VOLUME_MAX_DOUBLE = 10.0
+ * we map 1.0 to VOLUME_UNITY_INT*
+ */
+ #define VOLUME_UNITY_INT8 8 /* internal int for unity 2^(8-5) */
+ #define VOLUME_UNITY_INT8_BIT_SHIFT 3 /* number of bits to shift for unity */
+ #define VOLUME_UNITY_INT16 2048 /* internal int for unity 2^(16-5) */
+ #define VOLUME_UNITY_INT16_BIT_SHIFT 11 /* number of bits to shift for unity */
+ #define VOLUME_UNITY_INT24 524288 /* internal int for unity 2^(24-5) */
+ #define VOLUME_UNITY_INT24_BIT_SHIFT 19 /* number of bits to shift for unity */
+ #define VOLUME_UNITY_INT32 134217728 /* internal int for unity 2^(32-5) */
+ #define VOLUME_UNITY_INT32_BIT_SHIFT 27
+ #define VOLUME_MAX_DOUBLE 10.0
+ #define VOLUME_MAX_INT8 G_MAXINT8
+ #define VOLUME_MIN_INT8 G_MININT8
+ #define VOLUME_MAX_INT16 G_MAXINT16
+ #define VOLUME_MIN_INT16 G_MININT16
+ #define VOLUME_MAX_INT24 8388607
+ #define VOLUME_MIN_INT24 -8388608
+ #define VOLUME_MAX_INT32 G_MAXINT32
+ #define VOLUME_MIN_INT32 G_MININT32
+
+ #define GST_CAT_DEFAULT gst_volume_debug
+ GST_DEBUG_CATEGORY_STATIC (GST_CAT_DEFAULT);
+
+ /* Filter signals and args */
+ enum
+ {
+ /* FILL ME */
+ LAST_SIGNAL
+ };
+
+ #define DEFAULT_PROP_MUTE FALSE
+ #define DEFAULT_PROP_VOLUME 1.0
+
+ enum
+ {
+ PROP_0,
+ PROP_MUTE,
+ PROP_VOLUME
+ };
+
+ #if G_BYTE_ORDER == G_LITTLE_ENDIAN
+ #define ALLOWED_CAPS \
+ GST_AUDIO_CAPS_MAKE ("{ F32LE, F64LE, S8, S16LE, S24LE, S32LE }") \
+ ", layout = (string) interleaved"
+ #else
+ #define ALLOWED_CAPS \
+ GST_AUDIO_CAPS_MAKE ("{ F32BE, F64BE, S8, S16BE, S24BE, S32BE }") \
+ ", layout = (string) { interleaved, non-interleaved }"
+ #endif
+
+ #define gst_volume_parent_class parent_class
+ G_DEFINE_TYPE_WITH_CODE (GstVolume, gst_volume,
+ GST_TYPE_AUDIO_FILTER,
+ G_IMPLEMENT_INTERFACE (GST_TYPE_STREAM_VOLUME, NULL));
+ GST_ELEMENT_REGISTER_DEFINE (volume, "volume", GST_RANK_NONE, GST_TYPE_VOLUME);
+
+ static void volume_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec);
+ static void volume_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec);
+
+ static void volume_before_transform (GstBaseTransform * base,
+ GstBuffer * buffer);
+ static GstFlowReturn volume_transform_ip (GstBaseTransform * base,
+ GstBuffer * outbuf);
+ static gboolean volume_stop (GstBaseTransform * base);
+ static gboolean volume_setup (GstAudioFilter * filter,
+ const GstAudioInfo * info);
+
+ static void volume_process_double (GstVolume * self, gpointer bytes,
+ guint n_bytes);
+ static void volume_process_controlled_double (GstVolume * self, gpointer bytes,
+ gdouble * volume, guint channels, guint n_bytes);
+ static void volume_process_float (GstVolume * self, gpointer bytes,
+ guint n_bytes);
+ static void volume_process_controlled_float (GstVolume * self, gpointer bytes,
+ gdouble * volume, guint channels, guint n_bytes);
+ static void volume_process_int32 (GstVolume * self, gpointer bytes,
+ guint n_bytes);
+ static void volume_process_int32_clamp (GstVolume * self, gpointer bytes,
+ guint n_bytes);
+ static void volume_process_controlled_int32_clamp (GstVolume * self,
+ gpointer bytes, gdouble * volume, guint channels, guint n_bytes);
+ static void volume_process_int24 (GstVolume * self, gpointer bytes,
+ guint n_bytes);
+ static void volume_process_int24_clamp (GstVolume * self, gpointer bytes,
+ guint n_bytes);
+ static void volume_process_controlled_int24_clamp (GstVolume * self,
+ gpointer bytes, gdouble * volume, guint channels, guint n_bytes);
+ static void volume_process_int16 (GstVolume * self, gpointer bytes,
+ guint n_bytes);
+ static void volume_process_int16_clamp (GstVolume * self, gpointer bytes,
+ guint n_bytes);
+ static void volume_process_controlled_int16_clamp (GstVolume * self,
+ gpointer bytes, gdouble * volume, guint channels, guint n_bytes);
+ static void volume_process_int8 (GstVolume * self, gpointer bytes,
+ guint n_bytes);
+ static void volume_process_int8_clamp (GstVolume * self, gpointer bytes,
+ guint n_bytes);
+ static void volume_process_controlled_int8_clamp (GstVolume * self,
+ gpointer bytes, gdouble * volume, guint channels, guint n_bytes);
+
+
+ /* helper functions */
+
+ static gboolean
+ volume_choose_func (GstVolume * self, const GstAudioInfo * info)
+ {
+ GstAudioFormat format;
+
+ self->process = NULL;
+ self->process_controlled = NULL;
+
+ format = GST_AUDIO_INFO_FORMAT (info);
+
+ if (format == GST_AUDIO_FORMAT_UNKNOWN)
+ return FALSE;
+
+ switch (format) {
+ case GST_AUDIO_FORMAT_S32:
+ /* only clamp if the gain is greater than 1.0 */
+ if (self->current_vol_i32 > VOLUME_UNITY_INT32) {
+ self->process = volume_process_int32_clamp;
+ } else {
+ self->process = volume_process_int32;
+ }
+ self->process_controlled = volume_process_controlled_int32_clamp;
+ break;
+ case GST_AUDIO_FORMAT_S24:
+ /* only clamp if the gain is greater than 1.0 */
+ if (self->current_vol_i24 > VOLUME_UNITY_INT24) {
+ self->process = volume_process_int24_clamp;
+ } else {
+ self->process = volume_process_int24;
+ }
+ self->process_controlled = volume_process_controlled_int24_clamp;
+ break;
+ case GST_AUDIO_FORMAT_S16:
+ /* only clamp if the gain is greater than 1.0 */
+ if (self->current_vol_i16 > VOLUME_UNITY_INT16) {
+ self->process = volume_process_int16_clamp;
+ } else {
+ self->process = volume_process_int16;
+ }
+ self->process_controlled = volume_process_controlled_int16_clamp;
+ break;
+ case GST_AUDIO_FORMAT_S8:
+ /* only clamp if the gain is greater than 1.0 */
+ if (self->current_vol_i8 > VOLUME_UNITY_INT8) {
+ self->process = volume_process_int8_clamp;
+ } else {
+ self->process = volume_process_int8;
+ }
+ self->process_controlled = volume_process_controlled_int8_clamp;
+ break;
+ case GST_AUDIO_FORMAT_F32:
+ self->process = volume_process_float;
+ self->process_controlled = volume_process_controlled_float;
+ break;
+ case GST_AUDIO_FORMAT_F64:
+ self->process = volume_process_double;
+ self->process_controlled = volume_process_controlled_double;
+ break;
+ default:
+ break;
+ }
+
+ return (self->process != NULL);
+ }
+
+ static gboolean
+ volume_update_volume (GstVolume * self, const GstAudioInfo * info,
+ gdouble volume, gboolean mute)
+ {
+ gboolean passthrough;
+ gboolean res;
+
+ GST_DEBUG_OBJECT (self, "configure mute %d, volume %f", mute, volume);
+
+ if (mute) {
+ self->current_mute = TRUE;
+ self->current_volume = 0.0;
+
+ self->current_vol_i8 = 0;
+ self->current_vol_i16 = 0;
+ self->current_vol_i24 = 0;
+ self->current_vol_i32 = 0;
+
+ passthrough = FALSE;
+ } else {
+ self->current_mute = FALSE;
+ self->current_volume = volume;
+
+ self->current_vol_i8 =
+ (gint) ((gdouble) volume * (gdouble) VOLUME_UNITY_INT8);
+ self->current_vol_i16 =
+ (gint) ((gdouble) volume * (gdouble) VOLUME_UNITY_INT16);
+ self->current_vol_i24 =
+ (gint) ((gdouble) volume * (gdouble) VOLUME_UNITY_INT24);
+ self->current_vol_i32 =
+ (gint) ((gdouble) volume * (gdouble) VOLUME_UNITY_INT32);
+
+ passthrough = (self->current_vol_i16 == VOLUME_UNITY_INT16);
+ }
+
+ /* If a controller is used, never use passthrough mode
+ * because the property can change from 1.0 to something
+ * else in the middle of a buffer.
+ */
+ passthrough &= !gst_object_has_active_control_bindings (GST_OBJECT (self));
+
+ GST_DEBUG_OBJECT (self, "set passthrough %d", passthrough);
+
+ gst_base_transform_set_passthrough (GST_BASE_TRANSFORM (self), passthrough);
+
+ res = self->negotiated = volume_choose_func (self, info);
+
+ return res;
+ }
+
+ /* Element class */
+
+ static void
+ gst_volume_dispose (GObject * object)
+ {
+ GstVolume *volume = GST_VOLUME (object);
+
+ if (volume->tracklist) {
+ if (volume->tracklist->data)
+ g_object_unref (volume->tracklist->data);
+ g_list_free (volume->tracklist);
+ volume->tracklist = NULL;
+ }
+
+ G_OBJECT_CLASS (parent_class)->dispose (object);
+ }
+
+ static void
+ gst_volume_class_init (GstVolumeClass * klass)
+ {
+ GObjectClass *gobject_class;
+ GstElementClass *element_class;
+ GstBaseTransformClass *trans_class;
+ GstAudioFilterClass *filter_class;
+ GstCaps *caps;
+
+ gobject_class = (GObjectClass *) klass;
+ element_class = (GstElementClass *) klass;
+ trans_class = (GstBaseTransformClass *) klass;
+ filter_class = (GstAudioFilterClass *) (klass);
+
+ gobject_class->set_property = volume_set_property;
+ gobject_class->get_property = volume_get_property;
+ gobject_class->dispose = gst_volume_dispose;
+
+ g_object_class_install_property (gobject_class, PROP_MUTE,
+ g_param_spec_boolean ("mute", "Mute", "mute channel",
+ DEFAULT_PROP_MUTE,
+ G_PARAM_READWRITE | GST_PARAM_CONTROLLABLE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_VOLUME,
+ g_param_spec_double ("volume", "Volume", "volume factor, 1.0=100%",
+ 0.0, VOLUME_MAX_DOUBLE, DEFAULT_PROP_VOLUME,
+ G_PARAM_READWRITE | GST_PARAM_CONTROLLABLE | G_PARAM_STATIC_STRINGS));
+
+ gst_element_class_set_static_metadata (element_class, "Volume",
+ "Filter/Effect/Audio",
+ "Set volume on audio/raw streams", "Andy Wingo <wingo@pobox.com>");
+
+ caps = gst_caps_from_string (ALLOWED_CAPS);
+ gst_audio_filter_class_add_pad_templates (filter_class, caps);
+ gst_caps_unref (caps);
+
+ trans_class->before_transform = GST_DEBUG_FUNCPTR (volume_before_transform);
+ trans_class->transform_ip = GST_DEBUG_FUNCPTR (volume_transform_ip);
+ trans_class->stop = GST_DEBUG_FUNCPTR (volume_stop);
+ trans_class->transform_ip_on_passthrough = FALSE;
+
+ filter_class->setup = GST_DEBUG_FUNCPTR (volume_setup);
+ }
+
+ static void
+ gst_volume_init (GstVolume * self)
+ {
+ self->mute = DEFAULT_PROP_MUTE;
+ self->volume = DEFAULT_PROP_VOLUME;
+
+ self->tracklist = NULL;
+ self->negotiated = FALSE;
+
+ gst_base_transform_set_gap_aware (GST_BASE_TRANSFORM (self), TRUE);
+ }
+
+ static void
+ volume_process_double (GstVolume * self, gpointer bytes, guint n_bytes)
+ {
+ gdouble *data = (gdouble *) bytes;
+ guint num_samples = n_bytes / sizeof (gdouble);
+
+ volume_orc_scalarmultiply_f64_ns (data, self->current_volume, num_samples);
+ }
+
+ static void
+ volume_process_controlled_double (GstVolume * self, gpointer bytes,
+ gdouble * volume, guint channels, guint n_bytes)
+ {
+ gdouble *data = (gdouble *) bytes;
+ guint num_samples = n_bytes / (sizeof (gdouble) * channels);
+ guint i, j;
+ gdouble vol;
+
+ if (channels == 1) {
+ volume_orc_process_controlled_f64_1ch (data, volume, num_samples);
+ } else {
+ for (i = 0; i < num_samples; i++) {
+ vol = *volume++;
+ for (j = 0; j < channels; j++) {
+ *data++ *= vol;
+ }
+ }
+ }
+ }
+
+ static void
+ volume_process_float (GstVolume * self, gpointer bytes, guint n_bytes)
+ {
+ gfloat *data = (gfloat *) bytes;
+ guint num_samples = n_bytes / sizeof (gfloat);
+
+ volume_orc_scalarmultiply_f32_ns (data, self->current_volume, num_samples);
+ }
+
+ static void
+ volume_process_controlled_float (GstVolume * self, gpointer bytes,
+ gdouble * volume, guint channels, guint n_bytes)
+ {
+ gfloat *data = (gfloat *) bytes;
+ guint num_samples = n_bytes / (sizeof (gfloat) * channels);
+ guint i, j;
+ gdouble vol;
+
+ if (channels == 1) {
+ volume_orc_process_controlled_f32_1ch (data, volume, num_samples);
+ } else if (channels == 2) {
+ volume_orc_process_controlled_f32_2ch (data, volume, num_samples);
+ } else {
+ for (i = 0; i < num_samples; i++) {
+ vol = *volume++;
+ for (j = 0; j < channels; j++) {
+ *data++ *= vol;
+ }
+ }
+ }
+ }
+
+ static void
+ volume_process_int32 (GstVolume * self, gpointer bytes, guint n_bytes)
+ {
+ gint32 *data = (gint32 *) bytes;
+ guint num_samples = n_bytes / sizeof (gint);
+
+ /* hard coded in volume.orc */
+ g_assert (VOLUME_UNITY_INT32_BIT_SHIFT == 27);
+ volume_orc_process_int32 (data, self->current_vol_i32, num_samples);
+ }
+
+ static void
+ volume_process_int32_clamp (GstVolume * self, gpointer bytes, guint n_bytes)
+ {
+ gint32 *data = (gint32 *) bytes;
+ guint num_samples = n_bytes / sizeof (gint);
+
+ /* hard coded in volume.orc */
+ g_assert (VOLUME_UNITY_INT32_BIT_SHIFT == 27);
+
+ volume_orc_process_int32_clamp (data, self->current_vol_i32, num_samples);
+ }
+
+ static void
+ volume_process_controlled_int32_clamp (GstVolume * self, gpointer bytes,
+ gdouble * volume, guint channels, guint n_bytes)
+ {
+ gint32 *data = (gint32 *) bytes;
+ guint i, j;
+ guint num_samples = n_bytes / (sizeof (gint32) * channels);
+ gdouble vol, val;
+
+ if (channels == 1) {
+ volume_orc_process_controlled_int32_1ch (data, volume, num_samples);
+ } else {
+ for (i = 0; i < num_samples; i++) {
+ vol = *volume++;
+ for (j = 0; j < channels; j++) {
+ val = *data * vol;
+ *data++ = (gint32) CLAMP (val, VOLUME_MIN_INT32, VOLUME_MAX_INT32);
+ }
+ }
+ }
+ }
+
+ #if (G_BYTE_ORDER == G_LITTLE_ENDIAN)
+ #define get_unaligned_i24(_x) ( (((guint8*)_x)[0]) | ((((guint8*)_x)[1]) << 8) | ((((gint8*)_x)[2]) << 16) )
+
+ #define write_unaligned_u24(_x,samp) \
+ G_STMT_START { \
+ *(_x)++ = samp & 0xFF; \
+ *(_x)++ = (samp >> 8) & 0xFF; \
+ *(_x)++ = (samp >> 16) & 0xFF; \
+ } G_STMT_END
+
+ #else /* BIG ENDIAN */
+ #define get_unaligned_i24(_x) ( (((guint8*)_x)[2]) | ((((guint8*)_x)[1]) << 8) | ((((gint8*)_x)[0]) << 16) )
+ #define write_unaligned_u24(_x,samp) \
+ G_STMT_START { \
+ *(_x)++ = (samp >> 16) & 0xFF; \
+ *(_x)++ = (samp >> 8) & 0xFF; \
+ *(_x)++ = samp & 0xFF; \
+ } G_STMT_END
+ #endif
+
+ static void
+ volume_process_int24 (GstVolume * self, gpointer bytes, guint n_bytes)
+ {
+ gint8 *data = (gint8 *) bytes; /* treat the data as a byte stream */
+ guint i, num_samples;
+ guint32 samp;
+ gint64 val;
+
+ num_samples = n_bytes / (sizeof (gint8) * 3);
+ for (i = 0; i < num_samples; i++) {
+ samp = get_unaligned_i24 (data);
+
+ val = (gint32) samp;
+ val =
+ (((gint64) self->current_vol_i24 *
+ val) >> VOLUME_UNITY_INT24_BIT_SHIFT);
+ samp = (guint32) val;
+
+ /* write the value back into the stream */
+ write_unaligned_u24 (data, samp);
+ }
+ }
+
+ static void
+ volume_process_int24_clamp (GstVolume * self, gpointer bytes, guint n_bytes)
+ {
+ gint8 *data = (gint8 *) bytes; /* treat the data as a byte stream */
+ guint i, num_samples;
+ guint32 samp;
+ gint64 val;
+
+ num_samples = n_bytes / (sizeof (gint8) * 3);
+ for (i = 0; i < num_samples; i++) {
+ samp = get_unaligned_i24 (data);
+
+ val = (gint32) samp;
+ val =
+ (((gint64) self->current_vol_i24 *
+ val) >> VOLUME_UNITY_INT24_BIT_SHIFT);
+ samp = (guint32) CLAMP (val, VOLUME_MIN_INT24, VOLUME_MAX_INT24);
+
+ /* write the value back into the stream */
+ write_unaligned_u24 (data, samp);
+ }
+ }
+
+ static void
+ volume_process_controlled_int24_clamp (GstVolume * self, gpointer bytes,
+ gdouble * volume, guint channels, guint n_bytes)
+ {
+ gint8 *data = (gint8 *) bytes; /* treat the data as a byte stream */
+ guint i, j;
+ guint num_samples = n_bytes / (sizeof (gint8) * 3 * channels);
+ gdouble vol, val;
+
+ for (i = 0; i < num_samples; i++) {
+ vol = *volume++;
+ for (j = 0; j < channels; j++) {
+ val = get_unaligned_i24 (data) * vol;
+ val = CLAMP (val, VOLUME_MIN_INT24, VOLUME_MAX_INT24);
+ write_unaligned_u24 (data, (gint32) val);
+ }
+ }
+ }
+
+ static void
+ volume_process_int16 (GstVolume * self, gpointer bytes, guint n_bytes)
+ {
+ gint16 *data = (gint16 *) bytes;
+ guint num_samples = n_bytes / sizeof (gint16);
+
+ /* hard coded in volume.orc */
+ g_assert (VOLUME_UNITY_INT16_BIT_SHIFT == 11);
+
+ volume_orc_process_int16 (data, self->current_vol_i16, num_samples);
+ }
+
+ static void
+ volume_process_int16_clamp (GstVolume * self, gpointer bytes, guint n_bytes)
+ {
+ gint16 *data = (gint16 *) bytes;
+ guint num_samples = n_bytes / sizeof (gint16);
+
+ /* hard coded in volume.orc */
+ g_assert (VOLUME_UNITY_INT16_BIT_SHIFT == 11);
+
+ volume_orc_process_int16_clamp (data, self->current_vol_i16, num_samples);
+ }
+
+ static void
+ volume_process_controlled_int16_clamp (GstVolume * self, gpointer bytes,
+ gdouble * volume, guint channels, guint n_bytes)
+ {
+ gint16 *data = (gint16 *) bytes;
+ guint i, j;
+ guint num_samples = n_bytes / (sizeof (gint16) * channels);
+ gdouble vol, val;
+
+ if (channels == 1) {
+ volume_orc_process_controlled_int16_1ch (data, volume, num_samples);
+ } else if (channels == 2) {
+ volume_orc_process_controlled_int16_2ch (data, volume, num_samples);
+ } else {
+ for (i = 0; i < num_samples; i++) {
+ vol = *volume++;
+ for (j = 0; j < channels; j++) {
+ val = *data * vol;
+ *data++ = (gint16) CLAMP (val, VOLUME_MIN_INT16, VOLUME_MAX_INT16);
+ }
+ }
+ }
+ }
+
+ static void
+ volume_process_int8 (GstVolume * self, gpointer bytes, guint n_bytes)
+ {
+ gint8 *data = (gint8 *) bytes;
+ guint num_samples = n_bytes / sizeof (gint8);
+
+ /* hard coded in volume.orc */
+ g_assert (VOLUME_UNITY_INT8_BIT_SHIFT == 3);
+
+ volume_orc_process_int8 (data, self->current_vol_i8, num_samples);
+ }
+
+ static void
+ volume_process_int8_clamp (GstVolume * self, gpointer bytes, guint n_bytes)
+ {
+ gint8 *data = (gint8 *) bytes;
+ guint num_samples = n_bytes / sizeof (gint8);
+
+ /* hard coded in volume.orc */
+ g_assert (VOLUME_UNITY_INT8_BIT_SHIFT == 3);
+
+ volume_orc_process_int8_clamp (data, self->current_vol_i8, num_samples);
+ }
+
+ static void
+ volume_process_controlled_int8_clamp (GstVolume * self, gpointer bytes,
+ gdouble * volume, guint channels, guint n_bytes)
+ {
+ gint8 *data = (gint8 *) bytes;
+ guint i, j;
+ guint num_samples = n_bytes / (sizeof (gint8) * channels);
+ gdouble val, vol;
+
+ if (channels == 1) {
+ volume_orc_process_controlled_int8_1ch (data, volume, num_samples);
+ } else if (channels == 2) {
+ volume_orc_process_controlled_int8_2ch (data, volume, num_samples);
+ } else {
+ for (i = 0; i < num_samples; i++) {
+ vol = *volume++;
+ for (j = 0; j < channels; j++) {
+ val = *data * vol;
+ *data++ = (gint8) CLAMP (val, VOLUME_MIN_INT8, VOLUME_MAX_INT8);
+ }
+ }
+ }
+ }
+
+ /* GstBaseTransform vmethod implementations */
+
+ /* get notified of caps and plug in the correct process function */
+ static gboolean
+ volume_setup (GstAudioFilter * filter, const GstAudioInfo * info)
+ {
+ gboolean res;
+ GstVolume *self = GST_VOLUME (filter);
+ gdouble volume;
+ gboolean mute;
+
+ GST_OBJECT_LOCK (self);
+ volume = self->volume;
+ mute = self->mute;
+ GST_OBJECT_UNLOCK (self);
+
+ res = volume_update_volume (self, info, volume, mute);
+ if (!res) {
+ GST_ELEMENT_ERROR (self, CORE, NEGOTIATION,
+ ("Invalid incoming format"), (NULL));
+ }
+ self->negotiated = res;
+
+ return res;
+ }
+
+ static gboolean
+ volume_stop (GstBaseTransform * base)
+ {
+ GstVolume *self = GST_VOLUME (base);
+
+ g_free (self->volumes);
+ self->volumes = NULL;
+ self->volumes_count = 0;
+
+ g_free (self->mutes);
+ self->mutes = NULL;
+ self->mutes_count = 0;
+
+ return GST_CALL_PARENT_WITH_DEFAULT (GST_BASE_TRANSFORM_CLASS, stop, (base),
+ TRUE);
+ }
+
+ static void
+ volume_before_transform (GstBaseTransform * base, GstBuffer * buffer)
+ {
+ GstClockTime timestamp;
+ GstVolume *self = GST_VOLUME (base);
+ gdouble volume;
+ gboolean mute;
+
+ timestamp = GST_BUFFER_TIMESTAMP (buffer);
+ timestamp =
+ gst_segment_to_stream_time (&base->segment, GST_FORMAT_TIME, timestamp);
+
+ GST_DEBUG_OBJECT (base, "sync to %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (timestamp));
+
+ if (GST_CLOCK_TIME_IS_VALID (timestamp))
+ gst_object_sync_values (GST_OBJECT (self), timestamp);
+
+ /* get latest values */
+ GST_OBJECT_LOCK (self);
+ volume = self->volume;
+ mute = self->mute;
+ GST_OBJECT_UNLOCK (self);
+
+ if ((volume != self->current_volume) || (mute != self->current_mute)) {
+ /* the volume or mute was updated, update our internal state before
+ * we continue processing. */
+ volume_update_volume (self, GST_AUDIO_FILTER_INFO (self), volume, mute);
+ }
+ }
+
+ /* call the plugged-in process function for this instance
+ * needs to be done with this indirection since volume_transform is
+ * a class-global method
+ */
+ static GstFlowReturn
+ volume_transform_ip (GstBaseTransform * base, GstBuffer * outbuf)
+ {
+ GstAudioFilter *filter = GST_AUDIO_FILTER_CAST (base);
+ GstVolume *self = GST_VOLUME (base);
+ GstMapInfo map;
+ GstClockTime ts;
+
+ if (G_UNLIKELY (!self->negotiated))
+ goto not_negotiated;
+
+ /* don't process data with GAP */
+ if (GST_BUFFER_FLAG_IS_SET (outbuf, GST_BUFFER_FLAG_GAP))
+ return GST_FLOW_OK;
+
+ gst_buffer_map (outbuf, &map, GST_MAP_READWRITE);
+ ts = GST_BUFFER_TIMESTAMP (outbuf);
+ ts = gst_segment_to_stream_time (&base->segment, GST_FORMAT_TIME, ts);
+
+ if (GST_CLOCK_TIME_IS_VALID (ts)) {
+ GstControlBinding *mute_cb, *volume_cb;
+
+ mute_cb = gst_object_get_control_binding (GST_OBJECT (self), "mute");
+ volume_cb = gst_object_get_control_binding (GST_OBJECT (self), "volume");
+
+ if (mute_cb || (volume_cb && !self->current_mute)) {
+ gint rate = GST_AUDIO_INFO_RATE (&filter->info);
+ gint width = GST_AUDIO_FORMAT_INFO_WIDTH (filter->info.finfo) / 8;
+ gint channels = GST_AUDIO_INFO_CHANNELS (&filter->info);
+ guint nsamples = map.size / (width * channels);
+ GstClockTime interval = gst_util_uint64_scale_int (1, GST_SECOND, rate);
+ gboolean have_mutes = FALSE;
+ gboolean have_volumes = FALSE;
+
+ if (self->mutes_count < nsamples && mute_cb) {
+ self->mutes = g_realloc (self->mutes, sizeof (gboolean) * nsamples);
+ self->mutes_count = nsamples;
+ }
+
+ if (self->volumes_count < nsamples) {
+ self->volumes = g_realloc (self->volumes, sizeof (gdouble) * nsamples);
+ self->volumes_count = nsamples;
+ }
+
+ if (volume_cb && self->volumes) {
+ have_volumes =
+ gst_control_binding_get_value_array (volume_cb, ts, interval,
+ nsamples, (gpointer) self->volumes);
+ gst_object_replace ((GstObject **) & volume_cb, NULL);
+ }
+ if (!have_volumes) {
+ volume_orc_memset_f64 (self->volumes, self->current_volume, nsamples);
+ }
+
+ if (mute_cb && self->mutes) {
+ have_mutes = gst_control_binding_get_value_array (mute_cb, ts, interval,
+ nsamples, (gpointer) self->mutes);
+ gst_object_replace ((GstObject **) & mute_cb, NULL);
+ }
+ if (have_mutes) {
+ volume_orc_prepare_volumes (self->volumes, self->mutes, nsamples);
+ } else {
+ g_free (self->mutes);
+ self->mutes = NULL;
+ self->mutes_count = 0;
+ }
+
+ self->process_controlled (self, map.data, self->volumes, channels,
+ map.size);
+
+ goto done;
+ } else if (volume_cb) {
+ gst_object_unref (volume_cb);
+ }
+ }
+
+ if (self->current_volume == 0.0 || self->current_mute) {
+ orc_memset (map.data, 0, map.size);
++#ifndef TIZEN_FEATURE_VOLUME_MODIFICATION
++ /* if set this flag, it will be drop at baseaudiosink and it cause pa close. */
+ GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_GAP);
++#endif
+ } else if (self->current_volume != 1.0) {
+ self->process (self, map.data, map.size);
+ }
+
+ done:
+ gst_buffer_unmap (outbuf, &map);
+
+ return GST_FLOW_OK;
+
+ /* ERRORS */
+ not_negotiated:
+ {
+ GST_ELEMENT_ERROR (self, CORE, NEGOTIATION,
+ ("No format was negotiated"), (NULL));
+ return GST_FLOW_NOT_NEGOTIATED;
+ }
+ }
+
+ static void
+ volume_set_property (GObject * object, guint prop_id, const GValue * value,
+ GParamSpec * pspec)
+ {
+ GstVolume *self = GST_VOLUME (object);
+
+ switch (prop_id) {
+ case PROP_MUTE:
+ GST_OBJECT_LOCK (self);
+ self->mute = g_value_get_boolean (value);
+ GST_OBJECT_UNLOCK (self);
+ break;
+ case PROP_VOLUME:
+ GST_OBJECT_LOCK (self);
+ self->volume = g_value_get_double (value);
+ GST_OBJECT_UNLOCK (self);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+ }
+
+ static void
+ volume_get_property (GObject * object, guint prop_id, GValue * value,
+ GParamSpec * pspec)
+ {
+ GstVolume *self = GST_VOLUME (object);
+
+ switch (prop_id) {
+ case PROP_MUTE:
+ GST_OBJECT_LOCK (self);
+ g_value_set_boolean (value, self->mute);
+ GST_OBJECT_UNLOCK (self);
+ break;
+ case PROP_VOLUME:
+ GST_OBJECT_LOCK (self);
+ g_value_set_double (value, self->volume);
+ GST_OBJECT_UNLOCK (self);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+ }
+
+ static gboolean
+ plugin_init (GstPlugin * plugin)
+ {
+ GST_DEBUG_CATEGORY_INIT (GST_CAT_DEFAULT, "volume", 0, "Volume gain");
+
+ return GST_ELEMENT_REGISTER (volume, plugin);
+ }
+
+ GST_PLUGIN_DEFINE (GST_VERSION_MAJOR,
+ GST_VERSION_MINOR,
+ volume,
+ "plugin for controlling audio volume",
+ plugin_init, VERSION, GST_LICENSE, GST_PACKAGE_NAME, GST_PACKAGE_ORIGIN);
--- /dev/null
+ project('gst-plugins-base', 'c',
+ version : '1.19.2',
+ meson_version : '>= 0.54',
+ default_options : [ 'warning_level=1',
+ 'buildtype=debugoptimized' ])
+
+ gst_version = meson.project_version()
+ version_arr = gst_version.split('.')
+ gst_version_major = version_arr[0].to_int()
+ gst_version_minor = version_arr[1].to_int()
+ gst_version_micro = version_arr[2].to_int()
+ if version_arr.length() == 4
+ gst_version_nano = version_arr[3].to_int()
+ else
+ gst_version_nano = 0
+ endif
+ gst_version_is_dev = gst_version_minor % 2 == 1 and gst_version_micro < 90
+
+ host_system = host_machine.system()
+
+ have_cxx = add_languages('cpp', native: false, required: false)
+
+ if host_system in ['ios', 'darwin']
+ have_objc = add_languages('objc', native: false)
+ else
+ have_objc = false
+ endif
+
+ glib_req = '>= 2.56.0'
+ orc_req = '>= 0.4.24'
+ gst_req = '>= @0@.@1@.0'.format(gst_version_major, gst_version_minor)
+
+ api_version = '1.0'
+ soversion = 0
+ # maintaining compatibility with the previous libtool versioning
+ # current = minor * 100 + micro
+ curversion = gst_version_minor * 100 + gst_version_micro
+ libversion = '@0@.@1@.0'.format(soversion, curversion)
+ osxversion = curversion + 1
+
+ plugins_install_dir = join_paths(get_option('libdir'), 'gstreamer-1.0')
+ plugins = []
+
+ cc = meson.get_compiler('c')
+
+ if cc.get_id() == 'msvc'
+ msvc_args = [
+ # Ignore several spurious warnings for things gstreamer does very commonly
+ # If a warning is completely useless and spammy, use '/wdXXXX' to suppress it
+ # If a warning is harmless but hard to fix, use '/woXXXX' so it's shown once
+ # NOTE: Only add warnings here if you are sure they're spurious
+ '/wd4018', # implicit signed/unsigned conversion
+ '/wd4146', # unary minus on unsigned (beware INT_MIN)
+ '/wd4244', # lossy type conversion (e.g. double -> int)
+ '/wd4305', # truncating type conversion (e.g. double -> float)
+ cc.get_supported_arguments(['/utf-8']), # set the input encoding to utf-8
+
+ # Enable some warnings on MSVC to match GCC/Clang behaviour
+ '/w14062', # enumerator 'identifier' in switch of enum 'enumeration' is not handled
+ '/w14101', # 'identifier' : unreferenced local variable
+ '/w14189', # 'identifier' : local variable is initialized but not referenced
+ ]
+ add_project_arguments(msvc_args, language: ['c', 'cpp'])
+ # Disable SAFESEH with MSVC for plugins and libs that use external deps that
+ # are built with MinGW
+ noseh_link_args = ['/SAFESEH:NO']
+ else
+ noseh_link_args = []
+ endif
+
+ if cc.has_link_argument('-Wl,-Bsymbolic-functions')
+ add_project_link_arguments('-Wl,-Bsymbolic-functions', language : 'c')
+ endif
+
+ core_conf = configuration_data()
+
+ # Symbol visibility
+ if cc.get_id() == 'msvc'
+ export_define = '__declspec(dllexport) extern'
+ elif cc.has_argument('-fvisibility=hidden')
+ add_project_arguments('-fvisibility=hidden', language: 'c')
+ if have_objc
+ add_project_arguments('-fvisibility=hidden', language: 'objc')
+ endif
+ export_define = 'extern __attribute__ ((visibility ("default")))'
+ else
+ export_define = 'extern'
+ endif
+
+ # Passing this through the command line would be too messy
+ core_conf.set('GST_API_EXPORT', export_define)
+
+ # Disable strict aliasing
+ if cc.has_argument('-fno-strict-aliasing')
+ add_project_arguments('-fno-strict-aliasing', language: 'c')
+ endif
+
+ # Define G_DISABLE_DEPRECATED for development versions
+ if gst_version_is_dev
+ message('Disabling deprecated GLib API')
+ add_project_arguments('-DG_DISABLE_DEPRECATED', language: 'c')
+ endif
+
+ cast_checks = get_option('gobject-cast-checks')
+ if cast_checks.disabled() or (cast_checks.auto() and not gst_version_is_dev)
+ message('Disabling GLib cast checks')
+ add_project_arguments('-DG_DISABLE_CAST_CHECKS', language: 'c')
+ endif
+
+ glib_asserts = get_option('glib-asserts')
+ if glib_asserts.disabled() or (glib_asserts.auto() and not gst_version_is_dev)
+ message('Disabling GLib asserts')
+ add_project_arguments('-DG_DISABLE_ASSERT', language: 'c')
+ endif
+
+ glib_checks = get_option('glib-checks')
+ if glib_checks.disabled() or (glib_checks.auto() and not gst_version_is_dev)
+ message('Disabling GLib checks')
+ add_project_arguments('-DG_DISABLE_CHECKS', language: 'c')
+ endif
+
+ # These are only needed/used by the ABI tests from core
+ host_defines = [
+ [ 'x86', 'HAVE_CPU_I386' ],
+ [ 'x86_64', 'HAVE_CPU_X86_64' ],
+ [ 'arm', 'HAVE_CPU_ARM' ],
+ [ 'aarch64', 'HAVE_CPU_AARCH64' ],
+ [ 'mips', 'HAVE_CPU_MIPS' ],
+ [ 'powerpc', 'HAVE_CPU_PPC' ],
+ [ 'powerpc64', 'HAVE_CPU_PPC64' ],
+ [ 'alpha', 'HAVE_CPU_ALPHA' ],
+ [ 'sparc', 'HAVE_CPU_SPARC' ],
+ [ 'ia64', 'HAVE_CPU_IA64' ],
+ [ 'hppa', 'HAVE_CPU_HPPA' ],
+ [ 'm68k', 'HAVE_CPU_M68K' ],
+ [ 's390', 'HAVE_CPU_S390' ],
+ ]
+ foreach h : host_defines
+ if h.get(0) == host_machine.cpu_family()
+ core_conf.set(h.get(1), 1)
+ endif
+ endforeach
+ # FIXME: should really be called HOST_CPU or such
+ core_conf.set_quoted('TARGET_CPU', host_machine.cpu())
+
+ check_headers = [
+ ['HAVE_DLFCN_H', 'dlfcn.h'],
+ ['HAVE_EMMINTRIN_H', 'emmintrin.h'],
+ ['HAVE_INTTYPES_H', 'inttypes.h'],
+ ['HAVE_MEMORY_H', 'memory.h'],
+ ['HAVE_NETINET_IN_H', 'netinet/in.h'],
+ ['HAVE_NETINET_TCP_H', 'netinet/tcp.h'],
+ ['HAVE_PROCESS_H', 'process.h'],
+ ['HAVE_SMMINTRIN_H', 'smmintrin.h'],
+ ['HAVE_STDINT_H', 'stdint.h'],
+ ['HAVE_STRINGS_H', 'strings.h'],
+ ['HAVE_STRING_H', 'string.h'],
+ ['HAVE_SYS_SOCKET_H', 'sys/socket.h'],
+ ['HAVE_SYS_STAT_H', 'sys/stat.h'],
+ ['HAVE_SYS_TYPES_H', 'sys/types.h'],
+ ['HAVE_SYS_WAIT_H', 'sys/wait.h'],
+ ['HAVE_UNISTD_H', 'unistd.h'],
+ ['HAVE_WINSOCK2_H', 'winsock2.h'],
+ ['HAVE_XMMINTRIN_H', 'xmmintrin.h'],
+ ['HAVE_LINUX_DMA_BUF_H', 'linux/dma-buf.h'],
+ ]
+ foreach h : check_headers
+ if cc.has_header(h.get(1))
+ core_conf.set(h.get(0), 1)
+ endif
+ endforeach
+
+ check_functions = [
+ ['HAVE_DCGETTEXT', 'dcgettext', '#include<libintl.h>'],
+ ['HAVE_GMTIME_R', 'gmtime_r', '#include<time.h>'],
+ ['HAVE_LOCALTIME_R', 'localtime_r', '#include<time.h>'],
+ ['HAVE_LRINTF', 'lrintf', '#include<math.h>'],
+ ['HAVE_MMAP', 'mmap', '#include<sys/mman.h>'],
+ ['HAVE_LOG2', 'log2', '#include<math.h>'],
+ ]
+
+ libm = cc.find_library('m', required : false)
+ foreach f : check_functions
+ if cc.has_function(f.get(1), prefix : f.get(2), dependencies : libm)
+ core_conf.set(f.get(0), 1)
+ endif
+ endforeach
+
+ core_conf.set('SIZEOF_CHAR', cc.sizeof('char'))
+ core_conf.set('SIZEOF_INT', cc.sizeof('int'))
+ core_conf.set('SIZEOF_LONG', cc.sizeof('long'))
+ core_conf.set('SIZEOF_SHORT', cc.sizeof('short'))
+ core_conf.set('SIZEOF_VOIDP', cc.sizeof('void*'))
+
+ core_conf.set_quoted('GETTEXT_PACKAGE', 'gst-plugins-base-1.0')
+ core_conf.set_quoted('LOCALEDIR', join_paths(get_option('prefix'), get_option('localedir')))
+ core_conf.set_quoted('PACKAGE', 'gst-plugins-base')
+ core_conf.set_quoted('VERSION', gst_version)
+ core_conf.set_quoted('PACKAGE_VERSION', gst_version)
+ core_conf.set_quoted('GST_API_VERSION', api_version)
+ core_conf.set_quoted('GST_DATADIR', join_paths(get_option('prefix'), get_option('datadir')))
+ core_conf.set_quoted('GST_LICENSE', 'LGPL')
+
+ install_plugins_helper = get_option('install_plugins_helper')
+ if install_plugins_helper == ''
+ install_plugins_helper = join_paths(get_option('prefix'),
+ get_option('libexecdir'),
+ 'gst-install-plugins-helper')
+ endif
+ core_conf.set_quoted('GST_INSTALL_PLUGINS_HELPER', install_plugins_helper)
+
+ warning_flags = [
+ '-Wmissing-declarations',
+ '-Wredundant-decls',
+ '-Wundef',
+ '-Wwrite-strings',
+ '-Wformat',
+ '-Wformat-nonliteral',
+ '-Wformat-security',
+ '-Winit-self',
+ '-Wmissing-include-dirs',
+ '-Waddress',
+ '-Wno-multichar',
+ '-Wvla',
+ '-Wpointer-arith',
+ ]
+
+ warning_c_flags = [
+ '-Wmissing-prototypes',
+ '-Wdeclaration-after-statement',
+ ]
+
+ warning_cxx_flags = [
+ '-Waggregate-return',
+ ]
+
+ if have_cxx
+ cxx = meson.get_compiler('cpp')
+ foreach extra_arg : warning_cxx_flags
+ if cxx.has_argument (extra_arg)
+ add_project_arguments([extra_arg], language: 'cpp')
+ endif
+ endforeach
+ endif
+
+ foreach extra_arg : warning_flags
+ if cc.has_argument (extra_arg)
+ add_project_arguments([extra_arg], language: 'c')
+ endif
+ if have_cxx and cxx.has_argument (extra_arg)
+ add_project_arguments([extra_arg], language: 'cpp')
+ endif
+ endforeach
+
+ foreach extra_arg : warning_c_flags
+ if cc.has_argument (extra_arg)
+ add_project_arguments([extra_arg], language: 'c')
+ endif
+ endforeach
+
+ # GStreamer package name and origin url
+ gst_package_name = get_option('package-name')
+ if gst_package_name == ''
+ if gst_version_nano == 0
+ gst_package_name = 'GStreamer Base Plug-ins source release'
+ elif gst_version_nano == 1
+ gst_package_name = 'GStreamer Base Plug-ins git'
+ else
+ gst_package_name = 'GStreamer Base Plug-ins prerelease'
+ endif
+ endif
+ core_conf.set_quoted('GST_PACKAGE_NAME', gst_package_name)
+ core_conf.set_quoted('GST_PACKAGE_ORIGIN', get_option('package-origin'))
+
+ # FIXME: These should be configure options
+ core_conf.set_quoted('DEFAULT_VIDEOSINK', 'autovideosink')
+ core_conf.set_quoted('DEFAULT_AUDIOSINK', 'autoaudiosink')
+
+ # Set whether the audioresampling method should be detected at runtime
+ core_conf.set('AUDIORESAMPLE_FORMAT_' + get_option('audioresample_format').to_upper(), true)
+
+ gst_plugins_base_args = ['-DHAVE_CONFIG_H']
+ if get_option('default_library') == 'static'
+ gst_plugins_base_args += ['-DGST_STATIC_COMPILATION']
+ endif
+
+ # X11 checks are for sys/ and tests/
+ x11_dep = dependency('x11', required : get_option('x11'))
+ # GLib checks are for the entire project
+ # Almost everything that uses glib also uses gobject
+ glib_deps = [dependency('glib-2.0', version : glib_req, fallback: ['glib', 'libglib_dep']),
+ dependency('gobject-2.0', fallback: ['glib', 'libgobject_dep'])]
+ # GIO is used by the GIO plugin, and by the TCP, SDP, and RTSP plugins
+ gio_dep = dependency('gio-2.0', fallback: ['glib', 'libgio_dep'])
+ giounix_dep = dependency('', required: false)
+ if host_system != 'windows'
+ giounix_dep = dependency('gio-unix-2.0', version : glib_req,
+ fallback: ['glib', 'libgiounix_dep'])
+ endif
+ gmodule_dep = dependency('gmodule-no-export-2.0',
+ fallback: ['glib', 'libgmodule_dep'])
+
+ # some of the examples can use gdk-pixbuf and GTK+3
+ gdk_pixbuf_dep = dependency('gdk-pixbuf-2.0', required : get_option('examples'))
+ gtk_dep = dependency('gtk+-3.0', version : '>= 3.10', required : get_option('examples'))
+ # TODO: https://github.com/mesonbuild/meson/issues/3941
+ if not get_option('x11').disabled()
+ gtk_x11_dep = dependency('gtk+-x11-3.0', version : '>= 3.10', required : get_option('examples'))
+ else
+ gtk_x11_dep = dependency('', required : false)
+ endif
+ # gtk+ quartz backend is only available on macOS
+ if host_system == 'darwin'
+ gtk_quartz_dep = dependency('gtk+-quartz-3.0', version : '>= 3.10', required : get_option('examples'))
+ else
+ gtk_quartz_dep = dependency('', required : false)
+ endif
+
+ core_conf.set('HAVE_X11', x11_dep.found())
+ core_conf.set('HAVE_GIO_UNIX_2_0', giounix_dep.found())
+
+ if gio_dep.type_name() == 'pkgconfig'
+ core_conf.set_quoted('GIO_MODULE_DIR',
+ gio_dep.get_pkgconfig_variable('giomoduledir'))
+ core_conf.set_quoted('GIO_LIBDIR',
+ gio_dep.get_pkgconfig_variable('libdir'))
+ core_conf.set_quoted('GIO_PREFIX',
+ gio_dep.get_pkgconfig_variable('prefix'))
+ else
+ core_conf.set_quoted('GIO_MODULE_DIR', join_paths(get_option('prefix'),
+ get_option('libdir'), 'gio/modules'))
+ core_conf.set_quoted('GIO_LIBDIR', join_paths(get_option('prefix'),
+ get_option('libdir')))
+ core_conf.set_quoted('GIO_PREFIX', join_paths(get_option('prefix')))
+ endif
+
+ configinc = include_directories('.')
+ libsinc = include_directories('gst-libs')
+
+ # To use the subproject make subprojects directory
+ # and put gstreamer meson git there (symlinking is fine)
+ gst_dep = dependency('gstreamer-1.0', version : gst_req,
+ fallback : ['gstreamer', 'gst_dep'])
+ gst_base_dep = dependency('gstreamer-base-1.0', version : gst_req,
+ fallback : ['gstreamer', 'gst_base_dep'])
+ gst_net_dep = dependency('gstreamer-net-1.0', version : gst_req,
+ fallback : ['gstreamer', 'gst_net_dep'])
+ gst_check_dep = dependency('gstreamer-check-1.0', version : gst_req,
+ required : get_option('tests'),
+ fallback : ['gstreamer', 'gst_check_dep'])
+ gst_controller_dep = dependency('gstreamer-controller-1.0', version : gst_req,
+ fallback : ['gstreamer', 'gst_controller_dep'])
+
+ have_orcc = false
+ orcc_args = []
+ orc_targets = []
+ # Used by various libraries/elements that use Orc code
+ orc_dep = dependency('orc-0.4', version : orc_req, required : get_option('orc'),
+ fallback : ['orc', 'orc_dep'])
+ orcc = find_program('orcc', required : get_option('orc'))
+ if orc_dep.found() and orcc.found()
+ have_orcc = true
+ orcc_args = [orcc, '--include', 'glib.h']
+ core_conf.set('HAVE_ORC', 1)
+ else
+ message('Orc Compiler not found or disabled, will use backup C code')
+ core_conf.set('DISABLE_ORC', 1)
+ endif
+
++# TIZEN_BUILD_OPTION
++core_conf.set('TIZEN_FEATURE_WAYLAND_ENHANCEMENT', true)
++core_conf.set('TIZEN_FEATURE_TYPEFIND_ENHANCEMENT', true)
++core_conf.set('TIZEN_FEATURE_AUDIODECODER_MODIFICATION', true)
++core_conf.set('TIZEN_FEATURE_DISABLE_MIME_TYPES', true)
++core_conf.set('TIZEN_FEATURE_VIDEO_MODIFICATION', true)
++core_conf.set('TIZEN_FEATURE_SUBPARSE_MODIFICATION', true)
++core_conf.set('TIZEN_FEATURE_SUBPARSE_DROP_OUT_OF_SEGMENT', true)
++core_conf.set('TIZEN_FEATURE_HLS_WEBVTT', true)
++core_conf.set('TIZEN_FEATURE_VOLUME_MODIFICATION', true)
++core_conf.set('TIZEN_FEATURE_FORCE_SW_DECODER', true)
++core_conf.set('TIZEN_FEATURE_U3_AVOID_DEADLOCK', true)
++core_conf.set('TIZEN_FEATURE_PLAYBIN3_MODIFICATION', true)
++core_conf.set('TIZEN_FEATURE_DISABLE_EOS_DROP', true)
++
++tbm_dep = dependency('libtbm', required : get_option('tbm'))
++if tbm_dep.found()
++ core_conf.set('USE_TBM', true)
++endif
++
++if get_option('tv-profile')
++ core_conf.set('TIZEN_TV_PROFILE', true)
++ core_conf.set('TIZEN_FEATURE_TRUSTZONE', true)
++else
++ core_conf.set('TIZEN_FEATURE_RESOURCE_MANAGER', true)
++endif
++
++# TIZEN_BUILD_OPTION end
++
+ # Used to build SSE* things in audio-resampler
+ sse_args = '-msse'
+ sse2_args = '-msse2'
+ sse41_args = '-msse4.1'
+
+ have_sse = cc.has_argument(sse_args)
+ have_sse2 = cc.has_argument(sse2_args)
+ have_sse41 = cc.has_argument(sse41_args)
+
+ if host_machine.cpu_family() == 'arm'
+ if cc.compiles('''
+ #include <arm_neon.h>
+ int32x4_t testfunc(int16_t *a, int16_t *b) {
+ asm volatile ("vmull.s16 q0, d0, d0" : : : "q0");
+ return vmull_s16(vld1_s16(a), vld1_s16(b));
+ }
+ ''', name : 'NEON support')
+ core_conf.set('HAVE_ARM_NEON', true)
+ endif
+ endif
+
+ if gst_dep.type_name() == 'internal'
+ gst_proj = subproject('gstreamer')
+
+ if not gst_proj.get_variable('gst_debug')
+ message('GStreamer debug system is disabled')
+ add_project_arguments('-Wno-unused', language: 'c')
+ else
+ message('GStreamer debug system is enabled')
+ endif
+ else
+ # We can't check that in the case of subprojects as we won't
+ # be able to build against an internal dependency (which is not built yet)
+ if not cc.compiles('''
+ #include <gst/gstconfig.h>
+ #ifdef GST_DISABLE_GST_DEBUG
+ #error "debugging disabled, make compiler fail"
+ #endif''' , dependencies: gst_dep)
+ message('GStreamer debug system is disabled')
+ add_project_arguments('-Wno-unused', language: 'c')
+ else
+ message('GStreamer debug system is enabled')
+ endif
+ endif
+
+ if cc.has_member('struct tcp_info', '__tcpi_reordering', prefix: '#include <netinet/tcp.h>')
+ core_conf.set('HAVE_BSD_TCP_INFO', true)
+ endif
+
+ if cc.has_member('struct tcp_info', 'tcpi_reordering', prefix: '#include <netinet/tcp.h>')
+ core_conf.set('HAVE_LINUX_TCP_INFO', true)
+ endif
+
+ gir = find_program('g-ir-scanner', required : get_option('introspection'))
+ gnome = import('gnome')
+ build_gir = gir.found() and (not meson.is_cross_build() or get_option('introspection').enabled())
+ gir_init_section = [ '--add-init-section=extern void gst_init(gint*,gchar**);' + \
+ 'g_setenv("GST_REGISTRY_DISABLE", "yes", TRUE);' + \
+ 'g_setenv("GST_REGISTRY_1.0", "@0@", TRUE);'.format(meson.current_build_dir() + '/gir_empty_registry.reg') + \
+ 'g_setenv("GST_PLUGIN_PATH_1_0", "", TRUE);' + \
+ 'g_setenv("GST_PLUGIN_SYSTEM_PATH_1_0", "", TRUE);' + \
+ 'gst_init(NULL,NULL);', '--quiet']
+
+ pkgconfig = import('pkgconfig')
+ plugins_pkgconfig_install_dir = join_paths(plugins_install_dir, 'pkgconfig')
+ if get_option('default_library') == 'shared'
+ # If we don't build static plugins there is no need to generate pc files
+ plugins_pkgconfig_install_dir = disabler()
+ endif
+
+ pkgconfig_variables = ['exec_prefix=${prefix}',
+ 'toolsdir=${exec_prefix}/bin',
+ 'pluginsdir=${libdir}/gstreamer-1.0',
+ 'datarootdir=${prefix}/share',
+ 'datadir=${datarootdir}',
+ 'girdir=${datadir}/gir-1.0',
+ 'typelibdir=${libdir}/girepository-1.0',
+ 'libexecdir=${prefix}/libexec']
+ pkgconfig_subdirs = ['gstreamer-1.0']
+
+ meson_pkg_config_file_fixup_script = find_program('scripts/meson-pkg-config-file-fixup.py')
+
+ python3 = import('python').find_installation()
+ subdir('gst-libs')
+ subdir('gst')
+ subdir('ext')
+ subdir('sys')
+ if not get_option('tools').disabled()
+ subdir('tools')
+ endif
+ subdir('tests')
+
+ # xgettext is optional (on Windows for instance)
+ if find_program('xgettext', required : get_option('nls')).found()
+ core_conf.set('ENABLE_NLS', 1)
+ subdir('po')
+ endif
+ subdir('docs')
+ subdir('scripts')
+
+ base_libraries = ['allocators', 'app', 'audio', 'fft', 'pbutils', 'riff', 'rtp', 'rtsp', 'sdp', 'tag', 'video']
+ if build_gstgl
+ base_libraries += 'gl'
+ endif
+
+ pkgconfig_plugins_base_libs_variables = [
+ 'libraries=' + ' '.join(base_libraries),
+ ]
+
+ pkgconfig.generate(
+ libraries : [gst_dep],
+ variables : pkgconfig_variables + pkgconfig_plugins_base_libs_variables,
+ uninstalled_variables : pkgconfig_plugins_base_libs_variables,
+ subdirs : pkgconfig_subdirs,
+ name : 'gstreamer-plugins-base-1.0',
+ description : 'Streaming media framework, base plugins libraries',
+ )
+
+ # Desperate times, desperate measures... fix up escaping of our variables
+ run_command(meson_pkg_config_file_fixup_script,
+ 'gstreamer-plugins-base-1.0', 'libraries',
+ check: true)
+
+ if have_orcc
+ update_orc_dist_files = find_program('scripts/update-orc-dist-files.py')
+
+ orc_update_targets = []
+ foreach t : orc_targets
+ orc_name = t.get('name')
+ orc_file = t.get('orc-source')
+ header = t.get('header')
+ source = t.get('source')
+ # alias_target() only works with build targets, so can't use run_target() here
+ orc_update_targets += [
+ custom_target('update-orc-@0@'.format(orc_name),
+ input: [header, source],
+ command: [update_orc_dist_files, orc_file, header, source],
+ output: ['@0@-dist.c'.format(orc_name)]) # not entirely true
+ ]
+ endforeach
+
+ if meson.version().version_compare('>= 0.52')
+ update_orc_dist_target = alias_target('update-orc-dist', orc_update_targets)
+ endif
+ endif
+
+ # Set release date
+ if gst_version_nano == 0
+ extract_release_date = find_program('scripts/extract-release-date-from-doap-file.py')
+ run_result = run_command(extract_release_date, gst_version, files('gst-plugins-base.doap'))
+ if run_result.returncode() == 0
+ release_date = run_result.stdout().strip()
+ core_conf.set_quoted('GST_PACKAGE_RELEASE_DATETIME', release_date)
+ message('Package release date: ' + release_date)
+ else
+ # Error out if our release can't be found in the .doap file
+ error(run_result.stderr())
+ endif
+ endif
+
+ if gio_dep.version().version_compare('< 2.67.4')
+ core_conf.set('g_memdup2(ptr,sz)', '(G_LIKELY(((guint64)(sz)) < G_MAXUINT)) ? g_memdup(ptr,sz) : (g_abort(),NULL)')
+ endif
+
+ # Use core_conf after all subdirs have set values
+ configure_file(output : 'config.h', configuration : core_conf)
+
+ run_command(python3, '-c', 'import shutil; shutil.copy("hooks/pre-commit.hook", ".git/hooks/pre-commit")')
+
+ if meson.version().version_compare('>= 0.54')
+ plugin_names = []
+ foreach plugin: plugins
+ # FIXME: Use str.subtring() when we can depend on Meson 0.56
+ split = plugin.name().split('gst')
+ if split.length() == 2
+ plugin_names += [split[1]]
+ else
+ warning('Need substring API in meson >= 0.56 to properly parse plugin name: ' + plugin.name())
+ plugin_names += [plugin.name()]
+ endif
+ endforeach
+ summary({'Plugins':plugin_names}, list_sep: ', ')
+ endif
--- /dev/null
+ option('audioresample_format', type : 'combo',
+ choices : ['int', 'float', 'auto'], value : 'auto')
+ option('install_plugins_helper', type: 'string', value: '',
+ description: 'Path of distro helper script to call to install missing plugins')
+ option('iso-codes', type : 'feature', value : 'auto',
+ description: 'Use iso-codes in libgsttag (for language names)')
+
+ # OpenGL integration library options
+ option('gl_api', type : 'array', choices : ['opengl', 'gles2', 'auto'], value : ['auto'],
+ description : 'A comma separated list of opengl APIs to enable building against')
+ option('gl_platform', type : 'array',
+ choices : ['glx', 'egl', 'cgl', 'wgl', 'eagl', 'auto'], value : ['auto'],
+ description : 'A comma separated list of opengl platforms to enable building against')
+ option('gl_winsys', type : 'array',
+ choices : ['x11', 'wayland', 'win32', 'winrt', 'cocoa', 'dispmanx', 'egl', 'viv-fb', 'gbm', 'android', 'auto'], value : ['auto'],
+ description : 'A comma separated list of opengl windows systems to enable building against. Supported values are x11, wayland, win32, winrt, cocoa, dispmanx, egl, viv-fb, gbm, and android')
+ option('egl_module_name', type : 'string', value : '',
+ description : 'The file to pass to g_module_open to open the libEGL library (default: libEGL)')
+ option('opengl_module_name', type : 'string', value : '',
+ description : 'The file to pass to g_module_open to open the libGL library (default: libGL)')
+ option('gles2_module_name', type : 'string', value : '',
+ description : 'The file to pass to g_module_open to open the libGLESv2 library (default: libGLESv2)')
+
+ # Feature option for opengl plugin and integration library
+ option('gl', type : 'feature', value : 'auto', description : 'OpenGL integration library and OpenGL plugin')
+ option('gl-graphene', type : 'feature', value : 'auto', description : 'Use Graphene in OpenGL plugin')
+ option('gl-jpeg', type : 'feature', value : 'auto', description : 'Use libjpeg in OpenGL plugin')
+ option('gl-png', type : 'feature', value : 'auto', description : 'Use libpng in OpenGL plugin')
+
+ # Feature options for plugins with no external deps
+ option('adder', type : 'feature', value : 'auto')
+ option('app', type : 'feature', value : 'auto')
+ option('audioconvert', type : 'feature', value : 'auto')
+ option('audiomixer', type : 'feature', value : 'auto')
+ option('audiorate', type : 'feature', value : 'auto')
+ option('audioresample', type : 'feature', value : 'auto')
+ option('audiotestsrc', type : 'feature', value : 'auto')
+ option('compositor', type : 'feature', value : 'auto')
+ option('encoding', type : 'feature', value : 'auto')
+ option('gio', type : 'feature', value : 'auto')
+ option('gio-typefinder', type : 'feature', value : 'auto')
+ option('overlaycomposition', type : 'feature', value : 'auto')
+ option('pbtypes', type : 'feature', value : 'auto')
+ option('playback', type : 'feature', value : 'auto')
+ option('rawparse', type : 'feature', value : 'auto')
+ option('subparse', type : 'feature', value : 'auto')
+ option('tcp', type : 'feature', value : 'auto')
+ option('typefind', type : 'feature', value : 'auto')
+ option('videoconvert', type : 'feature', value : 'auto')
+ option('videorate', type : 'feature', value : 'auto')
+ option('videoscale', type : 'feature', value : 'auto')
+ option('videotestsrc', type : 'feature', value : 'auto')
+ option('volume', type : 'feature', value : 'auto')
+
+ # Feature options for plugins with external deps
+ option('alsa', type : 'feature', value : 'auto', description : 'ALSA audio source/sink plugin')
+ option('cdparanoia', type : 'feature', value : 'auto', description : 'cdparanoia plugin')
+ option('libvisual', type : 'feature', value : 'auto', description : 'libvisual audio visualization plugin')
+ option('ogg', type : 'feature', value : 'auto', description : 'ogg parser, muxer, demuxer plugin')
+ option('opus', type : 'feature', value : 'auto', description : 'OPUS audio codec plugin')
+ option('pango', type : 'feature', value : 'auto', description : 'Pango text rendering and overlay plugin')
+ option('theora', type : 'feature', value : 'auto', description : 'Theora video parser and codec plugin')
+ option('tremor', type : 'feature', value : 'auto', description : 'Integer Vorbis decoder plugin for devices without floating point')
+ option('vorbis', type : 'feature', value : 'auto', description : 'Vorbis audio parser, tagger, and codec plugin')
+ option('x11', type : 'feature', value : 'auto', description : 'X11 ximagesink plugin, and X11 support in libraries, plugins, examples')
+ option('xshm', type : 'feature', value : 'auto', description : 'X11 shared memory support for X11 plugins')
+ option('xvideo', type : 'feature', value : 'auto', description : 'X11 XVideo xvimagesink plugin')
+
+ # Common feature options
+ option('examples', type : 'feature', value : 'auto', yield : true)
+ option('tests', type : 'feature', value : 'auto', yield : true)
+ option('tools', type : 'feature', value : 'auto', yield : true)
+ option('introspection', type : 'feature', value : 'auto', yield : true, description : 'Generate gobject-introspection bindings')
+ option('nls', type : 'feature', value : 'auto', yield: true, description : 'Enable native language support (translations)')
+ option('orc', type : 'feature', value : 'auto', yield : true)
+ option('gobject-cast-checks', type : 'feature', value : 'auto', yield : true,
+ description: 'Enable run-time GObject cast checks (auto = enabled for development, disabled for stable releases)')
+ option('glib-asserts', type : 'feature', value : 'enabled', yield : true,
+ description: 'Enable GLib assertion (auto = enabled for development, disabled for stable releases)')
+ option('glib-checks', type : 'feature', value : 'enabled', yield : true,
+ description: 'Enable GLib checks such as API guards (auto = enabled for development, disabled for stable releases)')
+ option('qt5', type : 'feature', value : 'auto', yield : true, description : 'Qt5 QML examples')
+
+ # Common options
+ option('package-name', type : 'string', yield : true,
+ description : 'package name to use in plugins')
+ option('package-origin', type : 'string', value : 'Unknown package origin', yield : true,
+ description : 'package origin URL to use in plugins')
+ option('doc', type : 'feature', value : 'auto', yield: true,
+ description: 'Enable documentation.')
++
++# Tizen Options
++option('tbm', type : 'boolean', value : true,
++ description : 'tizen buffer manager')
++option('tv-profile', type : 'boolean', value : false,
++ description : 'tv-profile')
--- /dev/null
-# Copyright (C) 2011, 2016 Free Software Foundation, Inc.
+ # Bulgarian translation of gst-plugins-base.
+ # Copyright (C) 2005, 2006, 2007, 2008, 2009, 2010 Free Software Foundation, Inc.
-# Alexander Shopov <ash@kambanaria.org>, 2011, 2016.
++# Copyright (C) 2011, 2016, 2019 Free Software Foundation, Inc.
+ # This file is distributed under the same license as the gst-plugins-base package.
+ # Alexander Shopov <ash@kambanaria.org>, 2005, 2006, 2007, 2008, 2009, 2010.
-"Project-Id-Version: gst-plugins-base 1.7.2\n"
++# Alexander Shopov <ash@kambanaria.org>, 2011, 2016, 2019.
+ #
+ #
+ msgid ""
+ msgstr ""
-"POT-Creation-Date: 2019-02-26 11:43+0000\n"
-"PO-Revision-Date: 2016-02-21 21:03+0200\n"
++"Project-Id-Version: gst-plugins-base 1.16.0\n"
+ "Report-Msgid-Bugs-To: http://bugzilla.gnome.org/\n"
-msgid ""
-"Could not open audio device for playback. Device is being used by another "
-"application."
-msgstr ""
-"Звуковото устройство не може да се отвори за изпълнение, защото се ползва от "
-"друго приложение."
++"POT-Creation-Date: 2019-04-19 00:20+0100\n"
++"PO-Revision-Date: 2019-09-04 10:08+0200\n"
+ "Last-Translator: Alexander Shopov <ash@kambanaria.org>\n"
+ "Language-Team: Bulgarian <dict@ludost.net>\n"
+ "Language: bg\n"
+ "MIME-Version: 1.0\n"
+ "Content-Type: text/plain; charset=UTF-8\n"
+ "Content-Transfer-Encoding: 8bit\n"
++"X-Bugs: Report translation errors to the Language-Team address.\n"
+ "Plural-Forms: nplurals=2; plural=n != 1;\n"
+
++#: ext/alsa/gstalsasink.c:579
+ msgid "Could not open device for playback in mono mode."
+ msgstr "Устройството не може да се отвори за изпълнение в моно режим."
+
++#: ext/alsa/gstalsasink.c:581
+ msgid "Could not open device for playback in stereo mode."
+ msgstr "Устройството не може да се отвори за изпълнение в стерео режим."
+
++#: ext/alsa/gstalsasink.c:585
+ #, c-format
+ msgid "Could not open device for playback in %d-channel mode."
+ msgstr "Устройството не може да се отвори за изпълнение в %d-канален режим."
+
-msgid ""
-"Could not open audio device for recording. Device is being used by another "
-"application."
-msgstr ""
-"Звуковото устройство не може да се отвори за запис, защото се ползва от "
-"друго приложение."
++#: ext/alsa/gstalsasink.c:858
++msgid "Could not open audio device for playback. Device is being used by another application."
++msgstr "Звуковото устройство не може да се отвори за изпълнение, защото се ползва от друго приложение."
+
++#: ext/alsa/gstalsasink.c:863
+ msgid "Could not open audio device for playback."
+ msgstr "Звуковото устройство не може да се отвори за изпълнение."
+
++#: ext/alsa/gstalsasink.c:1076
+ msgid "Error outputting to audio device. The device has been disconnected."
+ msgstr "Грешка при извеждане към аудио устройството. То не е свързано."
+
++#: ext/alsa/gstalsasrc.c:444
+ msgid "Could not open device for recording in mono mode."
+ msgstr "Устройството не може да се отвори за запис в моно режим."
+
++#: ext/alsa/gstalsasrc.c:446
+ msgid "Could not open device for recording in stereo mode."
+ msgstr "Устройството не може да се отвори за изпълнение в стерео режим."
+
++#: ext/alsa/gstalsasrc.c:450
+ #, c-format
+ msgid "Could not open device for recording in %d-channel mode"
+ msgstr "Устройството не може да се отвори за изпълнение в %d-канален режим."
+
-msgstr ""
++#: ext/alsa/gstalsasrc.c:741
++msgid "Could not open audio device for recording. Device is being used by another application."
++msgstr "Звуковото устройство не може да се отвори за запис, защото се ползва от друго приложение."
+
++#: ext/alsa/gstalsasrc.c:746
+ msgid "Could not open audio device for recording."
+ msgstr "Звуковото устройство не може да се отвори за запис."
+
++#: ext/alsa/gstalsasrc.c:992
+ msgid "Error recording from audio device. The device has been disconnected."
+ msgstr "Грешка при запис от аудио устройството. То не е свързано."
+
++#: ext/cdparanoia/gstcdparanoiasrc.c:277 ext/cdparanoia/gstcdparanoiasrc.c:283
+ msgid "Could not open CD device for reading."
+ msgstr "Устройството за CD-та не може да се отвори за четене."
+
++#: ext/cdparanoia/gstcdparanoiasrc.c:409
+ msgid "Could not seek CD."
+ msgstr "По CD-то не може да се търси."
+
++#: ext/cdparanoia/gstcdparanoiasrc.c:417
+ msgid "Could not read CD."
+ msgstr "CD-то не може да бъде прочетено."
+
++#: ext/gl/gstgltestsrc.c:491
+ msgid "failed to draw pattern"
-msgstr ""
++msgstr "шарката не може да се начертае"
+
++#: ext/gl/gstgltestsrc.c:492
+ msgid "A GL error occurred"
-msgstr ""
-
++msgstr "Грешка от графичната система GL"
+
++#: ext/gl/gstgltestsrc.c:498
+ msgid "format wasn't negotiated before get function"
-#, fuzzy
++msgstr "форматът не е бил уточнен преди извикването на функцията за получаване"
++
++#: gst/encoding/gstencodebin.c:1636 gst/playback/gstplaybin2.c:3399
++#: gst/playback/gstplaysink.c:1522 gst/playback/gstplaysink.c:1535
++#: gst/playback/gstplaysink.c:1872 gst/playback/gstplaysink.c:1904
++#: gst/playback/gstplaysink.c:2488 gst/playback/gstplaysink.c:2537
++#: gst/playback/gstplaysink.c:2552 gst/playback/gstplaysink.c:2577
++#: gst/playback/gstplaysink.c:2609 gst/playback/gstplaysink.c:2757
++#: gst/playback/gstplaysink.c:2788 gst/playback/gstplaysink.c:3166
++#: gst/playback/gstplaysink.c:3175 gst/playback/gstplaysink.c:3184
++#: gst/playback/gstplaysink.c:3193 gst/playback/gstplaysink.c:3606
++#: gst/playback/gstplaysink.c:4526 gst/playback/gstplaysinkconvertbin.c:97
++#: gst/playback/gstplaysinkconvertbin.c:117
++#: gst/playback/gsturidecodebin.c:1472 gst/playback/gsturisourcebin.c:1597
+ #, c-format
+ msgid "Missing element '%s' - check your GStreamer installation."
+ msgstr "Елементът „%s“ липсва — проверете инсталацията на GStreamer."
+
++#: gst/playback/gstdecodebin2.c:1883 gst/playback/gstparsebin.c:1553
+ msgid "Could not determine type of stream"
+ msgstr "Видът на потока не може да бъде определен"
+
++#: gst/playback/gstdecodebin2.c:2861 gst/playback/gstparsebin.c:2385
+ msgid "This appears to be a text file"
+ msgstr "Това изглежда е текстов файл"
+
++#: gst/playback/gstplaybin2.c:5452
+ msgid "Could not create \"uridecodebin\" element."
+ msgstr "Не може да се създаде елемент „uridecodebin“."
+
-msgstr "Не може да се създаде елемент „uridecodebin“."
++#: gst/playback/gstplaybin3.c:4558
+ msgid "Could not create \"uridecodebin3\" element."
-msgstr ""
++msgstr "Не може да се създаде елемент „uridecodebin3“."
+
++#: gst/playback/gstplaysink.c:2003
+ #, c-format
+ msgid "Both autovideosink and %s elements are missing."
+ msgstr "Едновременно липсват елементите-приемници „autovideosink“ и „%s“."
+
++#: gst/playback/gstplaysink.c:2007
+ msgid "The autovideosink element is missing."
+ msgstr "Липсва елементът-приемник „autovideosink“."
+
++#: gst/playback/gstplaysink.c:2012
+ #, c-format
+ msgid "Configured videosink %s is not working."
+ msgstr "Настроеният елемент-приемник за видео %s не работи."
+
++#: gst/playback/gstplaysink.c:2016
+ #, c-format
+ msgid "Both autovideosink and %s elements are not working."
+ msgstr "Едновременно не работят елементите-приемници „autovideosink“ и „%s“."
+
++#: gst/playback/gstplaysink.c:2020
+ msgid "The autovideosink element is not working."
+ msgstr "Елементът-приемник за автоматично видео „autovideosink“ не работи."
+
++#: gst/playback/gstplaysink.c:2525
+ msgid "Custom text sink element is not usable."
+ msgstr "Потребителският елемент-приемник за текст е неизползваем."
+
++#: gst/playback/gstplaysink.c:2903
+ msgid "No volume control found"
+ msgstr "Липсва управление на силата на звука"
+
++#: gst/playback/gstplaysink.c:2933
+ #, c-format
+ msgid "Both autoaudiosink and %s elements are missing."
+ msgstr "Едновременно липсват елементите-приемници „autoaudiosink“ и „%s“."
+
++#: gst/playback/gstplaysink.c:2937
+ msgid "The autoaudiosink element is missing."
+ msgstr "Липсва елементът „autoaudiosink“."
+
++#: gst/playback/gstplaysink.c:2942
+ #, c-format
+ msgid "Configured audiosink %s is not working."
+ msgstr "Настроеният елемент-приемник за аудио %s не работи."
+
++#: gst/playback/gstplaysink.c:2946
+ #, c-format
+ msgid "Both autoaudiosink and %s elements are not working."
+ msgstr "Едновременно не работят елементите „autoaudiosink“ и „%s“."
+
++#: gst/playback/gstplaysink.c:2950
+ msgid "The autoaudiosink element is not working."
+ msgstr "Елементът „autoaudiosink“ не работи."
+
++#: gst/playback/gstplaysink.c:3269 gst/playback/gstplaysink.c:3274
+ msgid "Can't play a text file without video or visualizations."
+ msgstr "Текстов файл не може да се изпълни без видео или визуализация."
+
++#: gst/playback/gsturidecodebin.c:921
+ #, c-format
+ msgid "No decoder available for type '%s'."
+ msgstr "Липсва декодер за вида „%s“."
+
++#: gst/playback/gsturidecodebin.c:1378 gst/playback/gsturisourcebin.c:1491
+ msgid "No URI specified to play from."
+ msgstr "Не е указан адрес за пускане."
+
++#: gst/playback/gsturidecodebin.c:1384 gst/playback/gsturisourcebin.c:1497
+ #, c-format
+ msgid "Invalid URI \"%s\"."
+ msgstr "Грешен адрес „%s“."
+
++#: gst/playback/gsturidecodebin.c:1391 gst/playback/gsturisourcebin.c:1504
+ msgid "This stream type cannot be played yet."
+ msgstr "Потоци от този тип все още не могат да бъдат изпълнявани."
+
++#: gst/playback/gsturidecodebin.c:1409 gst/playback/gsturisourcebin.c:1522
+ #, c-format
+ msgid "No URI handler implemented for \"%s\"."
+ msgstr "Обработката на адреси „%s“ не е реализирана."
+
++#: gst/playback/gsturidecodebin.c:2289 gst/playback/gsturisourcebin.c:2228
+ msgid "Source element is invalid."
+ msgstr "Елементът-източник е грешен."
+
++#: gst/tcp/gsttcpclientsink.c:216
+ #, c-format
+ msgid "Error while sending data to \"%s:%d\"."
+ msgstr "Грешка при запис във файла „%s:%d“."
+
++#: gst-libs/gst/audio/gstaudiobasesrc.c:842
+ msgid "Can't record audio fast enough"
+ msgstr "Звукът не може да бъде записан достатъчно бързо"
+
++#: gst-libs/gst/audio/gstaudiocdsrc.c:1625
+ msgid "This CD has no audio tracks"
+ msgstr "В това CD липсва аудио"
+
++#: gst-libs/gst/pbutils/descriptions.c:89
+ msgid "ID3 tag"
+ msgstr "Етикет ID3"
+
++#: gst-libs/gst/pbutils/descriptions.c:90
++#: gst-libs/gst/pbutils/descriptions.c:91
+ msgid "APE tag"
+ msgstr "Етикет APE"
+
++#: gst-libs/gst/pbutils/descriptions.c:92
+ msgid "ICY internet radio"
+ msgstr "Радио в Интернет по ICY"
+
++#: gst-libs/gst/pbutils/descriptions.c:127
+ msgid "Apple Lossless Audio (ALAC)"
+ msgstr "Аудио на Apple без загуба на качество (ALAC)"
+
++#: gst-libs/gst/pbutils/descriptions.c:143
+ msgid "Free Lossless Audio Codec (FLAC)"
+ msgstr "Свободен кодер за аудио без загуба на качество (FLAC)"
+
++#: gst-libs/gst/pbutils/descriptions.c:173
++#: gst-libs/gst/pbutils/descriptions.c:174
+ msgid "Lossless True Audio (TTA)"
+ msgstr "Истинско аудио без загуба на качество (TTA)"
+
++#: gst-libs/gst/pbutils/descriptions.c:182
+ msgid "Windows Media Speech"
+ msgstr "Говор, формат Windows Media"
+
++#: gst-libs/gst/pbutils/descriptions.c:199
+ msgid "CYUV Lossless"
+ msgstr "CYUV без загуба на качество"
+
++#: gst-libs/gst/pbutils/descriptions.c:203
+ msgid "FFMpeg v1"
+ msgstr "FFMpeg, версия 1"
+
++#: gst-libs/gst/pbutils/descriptions.c:217
+ msgid "Lossless MSZH"
+ msgstr "MSZH без загуба на качество"
+
++#: gst-libs/gst/pbutils/descriptions.c:228
+ msgid "Run-length encoding"
+ msgstr "Последователно кодиране RLE"
+
++#: gst-libs/gst/pbutils/descriptions.c:274
+ msgid "Timed Text"
+ msgstr "Текст по време"
+
++#: gst-libs/gst/pbutils/descriptions.c:278
+ msgid "Subtitle"
+ msgstr "Субтитри"
+
++#: gst-libs/gst/pbutils/descriptions.c:279
+ msgid "MPL2 subtitle format"
+ msgstr "Субтитри, формат MPL2"
+
++#: gst-libs/gst/pbutils/descriptions.c:280
+ msgid "DKS subtitle format"
+ msgstr "Субтитри, формат DKS"
+
++#: gst-libs/gst/pbutils/descriptions.c:281
+ msgid "QTtext subtitle format"
+ msgstr "Субтитри, формат QText"
+
++#: gst-libs/gst/pbutils/descriptions.c:282
+ msgid "Sami subtitle format"
+ msgstr "Субтитри, формат Sami"
+
++#: gst-libs/gst/pbutils/descriptions.c:283
+ msgid "TMPlayer subtitle format"
+ msgstr "Субтитри, формат TMPlayer"
+
++#: gst-libs/gst/pbutils/descriptions.c:287
+ msgid "CEA 608 Closed Caption"
-msgstr ""
++msgstr "Субтитри, формат CEA 608"
+
++#: gst-libs/gst/pbutils/descriptions.c:288
+ msgid "CEA 708 Closed Caption"
-#, fuzzy
++msgstr "Субтитри, формат CEA 708"
+
++#: gst-libs/gst/pbutils/descriptions.c:289
+ msgid "Kate subtitle format"
+ msgstr "Субтитри, формат Kate"
+
-msgstr "Субтитри, формат Kate"
++#: gst-libs/gst/pbutils/descriptions.c:290
+ msgid "WebVTT subtitle format"
-msgstr "Аудио, необработен %d-битов %s"
++msgstr "Субтитри, формат WebVTT"
+
++#: gst-libs/gst/pbutils/descriptions.c:455
++#: gst-libs/gst/pbutils/descriptions.c:458
++#: gst-libs/gst/pbutils/descriptions.c:508
+ msgid "Uncompressed video"
+ msgstr "Некомпресирано видео"
+
++#: gst-libs/gst/pbutils/descriptions.c:463
+ msgid "Uncompressed gray"
+ msgstr "Некомпресирано видео с нива на сивото"
+
++#: gst-libs/gst/pbutils/descriptions.c:486
+ #, c-format
+ msgid "Uncompressed packed YUV %s"
+ msgstr "Некомпресирано видео пакетирано YUV %s"
+
++#: gst-libs/gst/pbutils/descriptions.c:488
+ #, c-format
+ msgid "Uncompressed semi-planar YUV %s"
+ msgstr "Некомпресирано видео по равнини (презредово) YUV %s"
+
++#: gst-libs/gst/pbutils/descriptions.c:490
+ #, c-format
+ msgid "Uncompressed planar YUV %s"
+ msgstr "Некомпресирано видео по равнини YUV %s"
+
++#: gst-libs/gst/pbutils/descriptions.c:501
+ #, c-format
+ msgid "Uncompressed palettized %d-bit %s"
+ msgstr "Некомпресирано видео, индексирано, %d-битово %s"
+
++#: gst-libs/gst/pbutils/descriptions.c:504
+ #, c-format
+ msgid "Uncompressed %d-bit %s"
+ msgstr "Некомпресирано видео, %d-битово %s"
+
++#: gst-libs/gst/pbutils/descriptions.c:586
+ #, c-format
+ msgid "DivX MPEG-4 Version %d"
+ msgstr "DivX MPEG-4, версия %d"
+
++#: gst-libs/gst/pbutils/descriptions.c:834
+ msgid "Uncompressed audio"
+ msgstr "Некомпресирано аудио"
+
++#: gst-libs/gst/pbutils/descriptions.c:840
+ #, c-format
+ msgid "Raw %d-bit %s audio"
-#, fuzzy
++msgstr "Аудио, необработено %d-битово %s"
+
++#: gst-libs/gst/pbutils/descriptions.c:942
+ msgid "Audio CD source"
+ msgstr "Елемент-източник — CD"
+
++#: gst-libs/gst/pbutils/descriptions.c:945
+ msgid "DVD source"
+ msgstr "Елемент-източник — DVD"
+
++#: gst-libs/gst/pbutils/descriptions.c:948
+ msgid "Real Time Streaming Protocol (RTSP) source"
+ msgstr "Елемент-източник — RTSP (поточен протокол в реално време)"
+
++#: gst-libs/gst/pbutils/descriptions.c:952
+ msgid "Microsoft Media Server (MMS) protocol source"
+ msgstr "Елемент-източник — MMS (сървър за медия на Microsoft)"
+
++#: gst-libs/gst/pbutils/descriptions.c:960
+ #, c-format
+ msgid "%s protocol source"
+ msgstr "Елемент-източник, протокол %s"
+
++#: gst-libs/gst/pbutils/descriptions.c:1034
+ #, c-format
+ msgid "%s video RTP depayloader"
+ msgstr "%s елемент, разтоварващ видеото от RTP"
+
++#: gst-libs/gst/pbutils/descriptions.c:1036
+ #, c-format
+ msgid "%s audio RTP depayloader"
+ msgstr "%s елемент, разтоварващ аудиото от RTP"
+
++#: gst-libs/gst/pbutils/descriptions.c:1038
+ #, c-format
+ msgid "%s RTP depayloader"
+ msgstr "%s елемент, разтоварващ данни от RTP"
+
++#: gst-libs/gst/pbutils/descriptions.c:1045
+ #, c-format
+ msgid "%s demuxer"
+ msgstr "Разделител (демултиплексор) %s"
+
++#: gst-libs/gst/pbutils/descriptions.c:1047
+ #, c-format
+ msgid "%s decoder"
+ msgstr "Декодер %s"
+
++#: gst-libs/gst/pbutils/descriptions.c:1086
+ #, c-format
+ msgid "%s video RTP payloader"
+ msgstr "%s елемент, товарещ видеото по RTP"
+
++#: gst-libs/gst/pbutils/descriptions.c:1088
+ #, c-format
+ msgid "%s audio RTP payloader"
+ msgstr "%s елемент, товарещ аудиото по RTP"
+
++#: gst-libs/gst/pbutils/descriptions.c:1090
+ #, c-format
+ msgid "%s RTP payloader"
+ msgstr "%s елемент, товарещ данни по RTP"
+
++#: gst-libs/gst/pbutils/descriptions.c:1097
+ #, c-format
+ msgid "%s muxer"
+ msgstr "Уплътнител (мултиплексор) %s"
+
++#: gst-libs/gst/pbutils/descriptions.c:1099
+ #, c-format
+ msgid "%s encoder"
+ msgstr "Кодер %s"
+
++#: gst-libs/gst/pbutils/descriptions.c:1133
+ #, c-format
+ msgid "GStreamer element %s"
+ msgstr "Елемент на GStreamer „%s“"
+
++#: gst-libs/gst/pbutils/missing-plugins.c:558
+ msgid "Unknown source element"
+ msgstr "Непознат елемент-източник"
+
++#: gst-libs/gst/pbutils/missing-plugins.c:561
+ msgid "Unknown sink element"
+ msgstr "Непознат елемент-приемник"
+
++#: gst-libs/gst/pbutils/missing-plugins.c:564
+ msgid "Unknown element"
+ msgstr "Непознат елемент"
+
++#: gst-libs/gst/pbutils/missing-plugins.c:567
+ msgid "Unknown decoder element"
+ msgstr "Непознат елемент-декодер"
+
++#: gst-libs/gst/pbutils/missing-plugins.c:570
+ msgid "Unknown encoder element"
+ msgstr "Непознат елемент-кодер"
+
++#: gst-libs/gst/pbutils/missing-plugins.c:575
+ msgid "Plugin or element of unknown type"
+ msgstr "Приставка или елемент от непознат вид"
+
++#: gst-libs/gst/tag/gsttagdemux.c:1243
+ msgid "Failed to read tag: not enough data"
+ msgstr "Неуспешно прочитано на етикет: липсват достатъчно данни"
+
++#: gst-libs/gst/tag/tags.c:77
+ msgid "track ID"
+ msgstr "идентификатор на песен"
+
++#: gst-libs/gst/tag/tags.c:77
+ msgid "MusicBrainz track ID"
+ msgstr "идентификатор на песен според MusicBrainz"
+
++#: gst-libs/gst/tag/tags.c:79
+ msgid "artist ID"
+ msgstr "идентификатор на изпълнител"
+
++#: gst-libs/gst/tag/tags.c:79
+ msgid "MusicBrainz artist ID"
+ msgstr "идентификатор на изпълнител според MusicBrainz"
+
++#: gst-libs/gst/tag/tags.c:81
+ msgid "album ID"
+ msgstr "идентификатор на албум"
+
++#: gst-libs/gst/tag/tags.c:81
+ msgid "MusicBrainz album ID"
+ msgstr "идентификатор на албум според MusicBrainz"
+
++#: gst-libs/gst/tag/tags.c:84
+ msgid "album artist ID"
+ msgstr "идентификатор на изпълнител в албума"
+
++#: gst-libs/gst/tag/tags.c:84
+ msgid "MusicBrainz album artist ID"
+ msgstr "идентификатор на изпълнител в албума според MusicBrainz"
+
++#: gst-libs/gst/tag/tags.c:86
+ msgid "track TRM ID"
+ msgstr "индустриален идентификатор на песен (TRM)"
+
++#: gst-libs/gst/tag/tags.c:86
+ msgid "MusicBrainz TRM ID"
+ msgstr "индустриален идентификатор на песен според MusicBrainz (TRM)"
+
++#: gst-libs/gst/tag/tags.c:108
+ msgid "capturing shutter speed"
+ msgstr "скорост на затвора"
+
++#: gst-libs/gst/tag/tags.c:109
+ msgid "Shutter speed used when capturing an image, in seconds"
+ msgstr "Скорост на затвора при заснемане в секунди"
+
++#: gst-libs/gst/tag/tags.c:112
+ msgid "capturing focal ratio"
+ msgstr "относителна бленда"
+
++#: gst-libs/gst/tag/tags.c:113
+ msgid "Focal ratio (f-number) used when capturing the image"
+ msgstr "Относителна бленда при заснемане"
+
++#: gst-libs/gst/tag/tags.c:116
+ msgid "capturing focal length"
+ msgstr "фокусно разстояние"
+
++#: gst-libs/gst/tag/tags.c:117
+ msgid "Focal length of the lens used capturing the image, in mm"
+ msgstr "Фокусно разстояние на обектива при заснемане в милиметри"
+
-msgstr "фокусно разстояние"
++#: gst-libs/gst/tag/tags.c:121
+ msgid "capturing 35 mm equivalent focal length"
-#, fuzzy
-msgid ""
-"35 mm equivalent focal length of the lens used capturing the image, in mm"
-msgstr "Фокусно разстояние на обектива при заснемане в милиметри"
++msgstr "приравнено към 35mm фокусно разстояние "
+
-msgid ""
-"The metering mode used while determining exposure for capturing an image"
++#: gst-libs/gst/tag/tags.c:122
++msgid "35 mm equivalent focal length of the lens used capturing the image, in mm"
++msgstr "Приравнено към 35mm фокусно разстояние на обектива при заснемане в милиметри"
+
++#: gst-libs/gst/tag/tags.c:126
+ msgid "capturing digital zoom ratio"
+ msgstr "цифрово увеличение"
+
++#: gst-libs/gst/tag/tags.c:127
+ msgid "Digital zoom ratio used when capturing an image"
+ msgstr "Цифрово увеличение при заснемане"
+
++#: gst-libs/gst/tag/tags.c:130
+ msgid "capturing iso speed"
+ msgstr "светлочувствителност по ISO"
+
++#: gst-libs/gst/tag/tags.c:131
+ msgid "The ISO speed used when capturing an image"
+ msgstr "Светлочувствителност по ISO при заснемане"
+
++#: gst-libs/gst/tag/tags.c:134
+ msgid "capturing exposure program"
+ msgstr "програма на експозиция"
+
++#: gst-libs/gst/tag/tags.c:135
+ msgid "The exposure program used when capturing an image"
+ msgstr "Програма на експозиция при заснемане"
+
++#: gst-libs/gst/tag/tags.c:138
+ msgid "capturing exposure mode"
+ msgstr "режим на експозиция"
+
++#: gst-libs/gst/tag/tags.c:139
+ msgid "The exposure mode used when capturing an image"
+ msgstr "Режим на експозиция при заснемане"
+
++#: gst-libs/gst/tag/tags.c:142
+ msgid "capturing exposure compensation"
+ msgstr "компенсация на експозицията"
+
++#: gst-libs/gst/tag/tags.c:143
+ msgid "The exposure compensation used when capturing an image"
+ msgstr "Компенсацията на експозицията при заснемане"
+
++#: gst-libs/gst/tag/tags.c:146
+ msgid "capturing scene capture type"
+ msgstr "вид сцена"
+
++#: gst-libs/gst/tag/tags.c:147
+ msgid "The scene capture mode used when capturing an image"
+ msgstr "Вид сцена при заснемане"
+
++#: gst-libs/gst/tag/tags.c:150
+ msgid "capturing gain adjustment"
+ msgstr "корекция с усилване"
+
++#: gst-libs/gst/tag/tags.c:151
+ msgid "The overall gain adjustment applied on an image"
+ msgstr "Общата корекция с усилване при заснемане"
+
++#: gst-libs/gst/tag/tags.c:154
+ msgid "capturing white balance"
+ msgstr "баланс на бялото"
+
++#: gst-libs/gst/tag/tags.c:155
+ msgid "The white balance mode set when capturing an image"
+ msgstr "Режим на баланс на бялото при заснемане"
+
++#: gst-libs/gst/tag/tags.c:158
+ msgid "capturing contrast"
+ msgstr "контраст"
+
++#: gst-libs/gst/tag/tags.c:159
+ msgid "The direction of contrast processing applied when capturing an image"
+ msgstr "Посоката на обработката за контраст при заснемане"
+
++#: gst-libs/gst/tag/tags.c:163
+ msgid "capturing saturation"
+ msgstr "наситеност"
+
++#: gst-libs/gst/tag/tags.c:164
+ msgid "The direction of saturation processing applied when capturing an image"
+ msgstr "Посоката на обработката за наситеност при заснемане"
+
++#: gst-libs/gst/tag/tags.c:168
+ msgid "capturing sharpness"
+ msgstr "острота"
+
++#: gst-libs/gst/tag/tags.c:169
+ msgid "The direction of sharpness processing applied when capturing an image"
+ msgstr "Посоката на обработката за острота при заснемане"
+
++#: gst-libs/gst/tag/tags.c:173
+ msgid "capturing flash fired"
+ msgstr "светкавица"
+
++#: gst-libs/gst/tag/tags.c:174
+ msgid "If the flash fired while capturing an image"
+ msgstr "Дали е ползвана светкавица при заснемане"
+
++#: gst-libs/gst/tag/tags.c:177
+ msgid "capturing flash mode"
+ msgstr "режим на светкавица"
+
++#: gst-libs/gst/tag/tags.c:178
+ msgid "The selected flash mode while capturing an image"
+ msgstr "Режимът на светкавица при заснемане"
+
++#: gst-libs/gst/tag/tags.c:181
+ msgid "capturing metering mode"
+ msgstr "режим на заснемане"
+
-msgid ""
-"Don't exit after showing the initial device list, but wait for devices to "
-"added/removed."
-msgstr ""
-"След извеждане на първоначалния списък с устройства, да се чака за добавяне/"
-"махане на устройства."
++#: gst-libs/gst/tag/tags.c:182
++msgid "The metering mode used while determining exposure for capturing an image"
+ msgstr "Режим на заснемане при определянето на експозиция"
+
++#: gst-libs/gst/tag/tags.c:186
+ msgid "capturing source"
+ msgstr "източник"
+
++#: gst-libs/gst/tag/tags.c:187
+ msgid "The source or type of device used for the capture"
+ msgstr "Видът на устройството-източник при заснемане"
+
++#: gst-libs/gst/tag/tags.c:190
+ msgid "image horizontal ppi"
+ msgstr "точки на инч по хоризонтал"
+
++#: gst-libs/gst/tag/tags.c:191
+ msgid "Media (image/video) intended horizontal pixel density in ppi"
+ msgstr "Целевата разделителна способност на медията по хоризонтал"
+
++#: gst-libs/gst/tag/tags.c:194
+ msgid "image vertical ppi"
+ msgstr "точки на инч по вертикал"
+
++#: gst-libs/gst/tag/tags.c:195
+ msgid "Media (image/video) intended vertical pixel density in ppi"
+ msgstr "Целевата разделителна способност на медията по вертикал"
+
++#: gst-libs/gst/tag/tags.c:198
+ msgid "ID3v2 frame"
+ msgstr "Кадър за ID3v2"
+
++#: gst-libs/gst/tag/tags.c:198
+ msgid "unparsed id3v2 tag frame"
+ msgstr "неанализиран кадър за етикети id3v2"
+
++#: gst-libs/gst/tag/tags.c:202
+ msgid "musical-key"
+ msgstr "лад"
+
++#: gst-libs/gst/tag/tags.c:202
+ msgid "Initial key in which the sound starts"
+ msgstr "Тоналността, в която почва музиката"
+
++#: tools/gst-device-monitor.c:260 tools/gst-play.c:1444
+ msgid "Print version information and exit"
+ msgstr "Извеждане на информация за версията и спиране на програмата"
+
-msgstr ""
++#: tools/gst-device-monitor.c:262
++msgid "Don't exit after showing the initial device list, but wait for devices to added/removed."
++msgstr "След извеждане на първоначалния списък с устройства, да се чака за добавяне/махане на устройства."
+
++#: tools/gst-play.c:310
+ #, c-format
+ msgid "Volume: %.0f%%"
+ msgstr "Сила на звука: %.0f%%"
+
++#: tools/gst-play.c:349
+ msgid "Buffering..."
+ msgstr "Буфериране…"
+
++#: tools/gst-play.c:370
+ msgid "Clock lost, selecting a new one\n"
+ msgstr "Часовникът се загуби, избира се нов\n"
+
++#: tools/gst-play.c:400 tools/gst-play.c:446 tools/gst-play.c:883
++#: tools/gst-play.c:1347
+ msgid "Reached end of play list."
+ msgstr "Стигнат е края на списъка за изпълнение."
+
++#: tools/gst-play.c:613
+ msgid "Paused"
+ msgstr "Пауза"
+
++#: tools/gst-play.c:671
+ #, c-format
+ msgid "Now playing %s\n"
+ msgstr "В момента се изпълнява „%s“\n"
+
++#: tools/gst-play.c:734
+ #, c-format
+ msgid "About to finish, preparing next title: %s"
+ msgstr "Изпълнението е към края, приготвя се следващото заглавие „%s“"
+
++#: tools/gst-play.c:979
+ #, c-format
+ msgid "Playback rate: %.2f"
+ msgstr "Скорост на изпълнение: %.2f"
+
++#: tools/gst-play.c:983
+ #, c-format
+ msgid "Could not change playback rate to %.2f"
+ msgstr "Скоростта на изпълнение не може да се зададе да е %.2f"
+
++#: tools/gst-play.c:1287
+ msgid "space"
+ msgstr "интервал"
+
++#: tools/gst-play.c:1287
+ msgid "pause/unpause"
+ msgstr "пауза/изпълнение"
+
++#: tools/gst-play.c:1288
+ msgid "q or ESC"
+ msgstr "„q“ или „ESC“"
+
++#: tools/gst-play.c:1288
+ msgid "quit"
+ msgstr "спиране"
+
++#: tools/gst-play.c:1289
+ msgid "> or n"
-msgstr ""
++msgstr "„>“ или „n“"
+
++#: tools/gst-play.c:1289
+ msgid "play next"
+ msgstr "следващо"
+
++#: tools/gst-play.c:1290
+ msgid "< or b"
-msgstr ""
-"Управление на поведението при изпълнение чрез задаване на свойството "
-"„flags“ (флагове)"
++msgstr "„<“ или „b“"
+
++#: tools/gst-play.c:1290
+ msgid "play previous"
+ msgstr "предишно"
+
++#: tools/gst-play.c:1291
+ msgid "seek forward"
+ msgstr "търсене напред"
+
++#: tools/gst-play.c:1292
+ msgid "seek backward"
+ msgstr "търсене назад"
+
++#: tools/gst-play.c:1293
+ msgid "volume up"
+ msgstr "увеличаване на звука"
+
++#: tools/gst-play.c:1294
+ msgid "volume down"
+ msgstr "намаляване на звука"
+
++#: tools/gst-play.c:1295
+ msgid "increase playback rate"
+ msgstr "забързване на изпълнението"
+
++#: tools/gst-play.c:1296
+ msgid "decrease playback rate"
+ msgstr "забавяне на изпълнението"
+
++#: tools/gst-play.c:1297
+ msgid "change playback direction"
+ msgstr "смяна на посоката на изпълнение"
+
++#: tools/gst-play.c:1298
+ msgid "enable/disable trick modes"
+ msgstr "включване/изключване на ефекти"
+
++#: tools/gst-play.c:1299
+ msgid "change audio track"
+ msgstr "смяна на песента"
+
++#: tools/gst-play.c:1300
+ msgid "change video track"
+ msgstr "смяна на видеото"
+
++#: tools/gst-play.c:1301
+ msgid "change subtitle track"
+ msgstr "смяна на субтитрите"
+
++#: tools/gst-play.c:1302
+ msgid "seek to beginning"
+ msgstr "търсене към началото"
+
++#: tools/gst-play.c:1303
+ msgid "show keyboard shortcuts"
+ msgstr "извеждане на клавишните комбинации"
+
++#: tools/gst-play.c:1306
+ msgid "Interactive mode - keyboard controls:"
+ msgstr "Интерактивен режим, управление с клавиатурата:"
+
++#: tools/gst-play.c:1439
+ msgid "Output status information and property notifications"
+ msgstr "Извеждане на информация за състоянието и свойствата"
+
++#: tools/gst-play.c:1441
+ msgid "Control playback behaviour setting playbin 'flags' property"
-msgstr ""
++msgstr "Управление на поведението при изпълнение чрез задаване на свойството „flags“ (флагове)"
+
++#: tools/gst-play.c:1446
+ msgid "Video sink to use (default is autovideosink)"
+ msgstr "Елемент-приемник за видео (стандартно е „autovideosink“)"
+
++#: tools/gst-play.c:1448
+ msgid "Audio sink to use (default is autoaudiosink)"
+ msgstr "Елемент-приемник за аудио (стандартно е „autoaudiosink“)"
+
++#: tools/gst-play.c:1450
+ msgid "Enable gapless playback"
+ msgstr "Изпълнение без прекъсвания"
+
++#: tools/gst-play.c:1452
+ msgid "Shuffle playlist"
+ msgstr "Разбъркано изпълнение"
+
++#: tools/gst-play.c:1455
+ msgid "Disable interactive control via the keyboard"
+ msgstr "Без управление от клавиатурата"
+
++#: tools/gst-play.c:1457
+ msgid "Volume"
+ msgstr "Сила на звука"
+
++#: tools/gst-play.c:1459
+ msgid "Playlist file containing input media files"
+ msgstr "Файл със списък за изпълнение"
+
++#: tools/gst-play.c:1461
+ msgid "Do not print any output (apart from errors)"
+ msgstr "Без извеждане на информация (освен грешките)"
+
++#: tools/gst-play.c:1463
+ msgid "Use playbin3 pipeline"
-msgstr ""
++msgstr "Конвейер с „playbin3“"
+
++#: tools/gst-play.c:1464
+ msgid "(default varies depending on 'USE_PLAYBIN' env variable)"
-
-#, fuzzy
-#~ msgid "Could not create \"decodebin3\" element."
-#~ msgstr "Не може да се създаде елемент „uridecodebin“."
-
-#, fuzzy
-#~ msgid "Could not create \"urisourcebin\" element."
-#~ msgstr "Не може да се създаде елемент „uridecodebin“."
-
-#~ msgid "Internal data stream error."
-#~ msgstr "Вътрешна грешка на потока от данни."
++msgstr "(стандартният вариант зависи от променливата на средата „USE_PLAYBIN“)"
++
++#: tools/gst-play.c:1468
++msgid "Keep showing the last frame on EOS until quit or playlist change command (gapless is ignored)"
++msgstr "Последният кадър в края на поток да се показва до получаването на команда за спиране или смяна на списъка за изпълнение (в този случай настройката за изпълнение без прекъсвания се пренебрегва)"
+
++#: tools/gst-play.c:1546
+ #, c-format
+ msgid "Usage: %s FILE1|URI1 [FILE2|URI2] [FILE3|URI3] ..."
+ msgstr "Употреба: %s ФАЙЛ_1|АДРЕС_1 [ФАЙЛ_2|АДРЕС_2] [ФАЙЛ_3|АДРЕС_3]…"
+
++#: tools/gst-play.c:1550
+ msgid "You must provide at least one filename or URI to play."
+ msgstr "Трябва да укажете поне един файл или адрес за изпълнение."
+
++#: tools/gst-play.c:1590
+ msgid "Press 'k' to see a list of keyboard shortcuts.\n"
+ msgstr "Натиснете „k“, за да видите списъка с клавишни комбинации.\n"
--- /dev/null
-"Project-Id-Version: gst-plugins-base 1.15.1\n"
+ # German messages for gst-plugins-base 1.15.1
+ # Copyright © 2006 Karl Eichwalder
+ # This file is distributed under the same license as the gst-plugins-base package.
+ # Karl Eichwalder <ke@suse.de>, 2006.
+ # Mario Blättermann <mariobl@gnome.org>, 2010.
+ # Christian Kirbach <christian.kirbach@gmail.com>, 2009, 2010, 2011, 2012, 2013, 2016, 2019.
+ #
+ msgid ""
+ msgstr ""
-"POT-Creation-Date: 2019-02-26 11:43+0000\n"
-"PO-Revision-Date: 2019-02-10 15:50+0100\n"
++"Project-Id-Version: gst-plugins-base 1.16.0\n"
+ "Report-Msgid-Bugs-To: http://bugzilla.gnome.org/\n"
-"X-Generator: Poedit 2.2\n"
++"POT-Creation-Date: 2019-04-19 00:20+0100\n"
++"PO-Revision-Date: 2019-09-08 12:27+0200\n"
+ "Last-Translator: Christian Kirbach <christian.kirbach@gmail.com>\n"
+ "Language-Team: German <translation-team-de@lists.sourceforge.net>\n"
+ "Language: de\n"
+ "MIME-Version: 1.0\n"
+ "Content-Type: text/plain; charset=UTF-8\n"
+ "Content-Transfer-Encoding: 8bit\n"
+ "X-Bugs: Report translation errors to the Language-Team address.\n"
+ "Plural-Forms: nplurals=2; plural=(n != 1);\n"
-msgid ""
-"Could not open audio device for playback. Device is being used by another "
-"application."
-msgstr ""
-"Audio-Gerät konnte nicht zur Wiedergabe geöffnet werden. Es wird durch eine "
-"andere Anwendung verwendet."
++"X-Generator: Poedit 2.2.1\n"
+
++#: ext/alsa/gstalsasink.c:579
+ msgid "Could not open device for playback in mono mode."
+ msgstr "Gerät konnte nicht zur Wiedergabe in Mono geöffnet werden."
+
++#: ext/alsa/gstalsasink.c:581
+ msgid "Could not open device for playback in stereo mode."
+ msgstr "Gerät konnte nicht zur Wiedergabe in Stereo geöffnet werden."
+
++#: ext/alsa/gstalsasink.c:585
+ #, c-format
+ msgid "Could not open device for playback in %d-channel mode."
+ msgstr "Gerät konnte nicht zur Wiedergabe im %d-Kanalmodus geöffnet werden."
+
-msgid ""
-"Could not open audio device for recording. Device is being used by another "
-"application."
-msgstr ""
-"Audio-Gerät konnte nicht zur Aufnahme geöffnet werden. Es wird durch eine "
-"andere Anwendung verwendet."
++#: ext/alsa/gstalsasink.c:858
++msgid "Could not open audio device for playback. Device is being used by another application."
++msgstr "Audio-Gerät konnte nicht zur Wiedergabe geöffnet werden. Es wird durch eine andere Anwendung verwendet."
+
++#: ext/alsa/gstalsasink.c:863
+ msgid "Could not open audio device for playback."
+ msgstr "Audio-Gerät konnte nicht zur Wiedergabe geöffnet werden."
+
++#: ext/alsa/gstalsasink.c:1076
+ msgid "Error outputting to audio device. The device has been disconnected."
+ msgstr "Fehler bei Ausgabe aus dem Audio-Gerät. Das Gerät wurde getrennt."
+
++#: ext/alsa/gstalsasrc.c:444
+ msgid "Could not open device for recording in mono mode."
+ msgstr "Gerät konnte nicht zur Aufnahme in Mono geöffnet werden."
+
++#: ext/alsa/gstalsasrc.c:446
+ msgid "Could not open device for recording in stereo mode."
+ msgstr "Gerät konnte nicht zur Aufnahme in Stereo geöffnet werden."
+
++#: ext/alsa/gstalsasrc.c:450
+ #, c-format
+ msgid "Could not open device for recording in %d-channel mode"
+ msgstr "Gerät konnte nicht zur Aufnahme im %d-Kanalmodus geöffnet werden."
+
-msgstr ""
-"Das Element »%s« fehlt - überprüfen Sie Ihre Installation von GStreamer."
++#: ext/alsa/gstalsasrc.c:741
++msgid "Could not open audio device for recording. Device is being used by another application."
++msgstr "Audio-Gerät konnte nicht zur Aufnahme geöffnet werden. Es wird durch eine andere Anwendung verwendet."
+
++#: ext/alsa/gstalsasrc.c:746
+ msgid "Could not open audio device for recording."
+ msgstr "Audio-Gerät konnte nicht zur Aufnahme geöffnet werden."
+
++#: ext/alsa/gstalsasrc.c:992
+ msgid "Error recording from audio device. The device has been disconnected."
+ msgstr "Fehler bei Aufnahme vom Audio-Gerät. Das Gerät wurde getrennt."
+
++#: ext/cdparanoia/gstcdparanoiasrc.c:277 ext/cdparanoia/gstcdparanoiasrc.c:283
+ msgid "Could not open CD device for reading."
+ msgstr "Das CD-Laufwerk konnte nicht zum Lesen geöffnet werden."
+
++#: ext/cdparanoia/gstcdparanoiasrc.c:409
+ msgid "Could not seek CD."
+ msgstr "Suchvorgang auf der CD ist fehlgeschlagen."
+
++#: ext/cdparanoia/gstcdparanoiasrc.c:417
+ msgid "Could not read CD."
+ msgstr "CD konnte nicht gelesen werden."
+
++#: ext/gl/gstgltestsrc.c:491
+ msgid "failed to draw pattern"
+ msgstr "Zeichnen des Musters ist gescheitert"
+
++#: ext/gl/gstgltestsrc.c:492
+ msgid "A GL error occurred"
+ msgstr "Es ist ein GL-Fehler aufgetreten"
+
++#: ext/gl/gstgltestsrc.c:498
+ msgid "format wasn't negotiated before get function"
+ msgstr "Format wurde nicht vor der get-Funktion erkannt"
+
++#: gst/encoding/gstencodebin.c:1636 gst/playback/gstplaybin2.c:3399
++#: gst/playback/gstplaysink.c:1522 gst/playback/gstplaysink.c:1535
++#: gst/playback/gstplaysink.c:1872 gst/playback/gstplaysink.c:1904
++#: gst/playback/gstplaysink.c:2488 gst/playback/gstplaysink.c:2537
++#: gst/playback/gstplaysink.c:2552 gst/playback/gstplaysink.c:2577
++#: gst/playback/gstplaysink.c:2609 gst/playback/gstplaysink.c:2757
++#: gst/playback/gstplaysink.c:2788 gst/playback/gstplaysink.c:3166
++#: gst/playback/gstplaysink.c:3175 gst/playback/gstplaysink.c:3184
++#: gst/playback/gstplaysink.c:3193 gst/playback/gstplaysink.c:3606
++#: gst/playback/gstplaysink.c:4526 gst/playback/gstplaysinkconvertbin.c:97
++#: gst/playback/gstplaysinkconvertbin.c:117
++#: gst/playback/gsturidecodebin.c:1472 gst/playback/gsturisourcebin.c:1597
+ #, c-format
+ msgid "Missing element '%s' - check your GStreamer installation."
-msgstr ""
-"Es kann keine Textdatei ohne Video oder Visualisierungen abgespielt werden."
++msgstr "Das Element »%s« fehlt - überprüfen Sie Ihre Installation von GStreamer."
+
++#: gst/playback/gstdecodebin2.c:1883 gst/playback/gstparsebin.c:1553
+ msgid "Could not determine type of stream"
+ msgstr "Der Typ des Datenstroms konnte nicht bestimmt werden"
+
++#: gst/playback/gstdecodebin2.c:2861 gst/playback/gstparsebin.c:2385
+ msgid "This appears to be a text file"
+ msgstr "Dies scheint eine Textdatei zu sein"
+
++#: gst/playback/gstplaybin2.c:5452
+ msgid "Could not create \"uridecodebin\" element."
+ msgstr "Das Element »uridecodebin« konnte nicht erstellt werden."
+
++#: gst/playback/gstplaybin3.c:4558
+ msgid "Could not create \"uridecodebin3\" element."
+ msgstr "Das Element »uridecodebin3« konnte nicht erstellt werden."
+
++#: gst/playback/gstplaysink.c:2003
+ #, c-format
+ msgid "Both autovideosink and %s elements are missing."
+ msgstr "Beide Elemente »autovideosink« und »%s« fehlen."
+
++#: gst/playback/gstplaysink.c:2007
+ msgid "The autovideosink element is missing."
+ msgstr "Das Element »autovideosink« fehlt."
+
++#: gst/playback/gstplaysink.c:2012
+ #, c-format
+ msgid "Configured videosink %s is not working."
+ msgstr "Das konfigurierte Videoziel »%s« funktioniert nicht."
+
++#: gst/playback/gstplaysink.c:2016
+ #, c-format
+ msgid "Both autovideosink and %s elements are not working."
+ msgstr "Beide Elemente »autovideosink« und »%s« funktionieren nicht."
+
++#: gst/playback/gstplaysink.c:2020
+ msgid "The autovideosink element is not working."
+ msgstr "Das Element »autovideosink« funktioniert nicht."
+
++#: gst/playback/gstplaysink.c:2525
+ msgid "Custom text sink element is not usable."
+ msgstr "Das eigene Element des Textziels ist nicht verwendbar."
+
++#: gst/playback/gstplaysink.c:2903
+ msgid "No volume control found"
+ msgstr "Keine Lautstärkeregelung gefunden"
+
++#: gst/playback/gstplaysink.c:2933
+ #, c-format
+ msgid "Both autoaudiosink and %s elements are missing."
+ msgstr "Beide Elemente »autoaudiosink« und »%s« fehlen."
+
++#: gst/playback/gstplaysink.c:2937
+ msgid "The autoaudiosink element is missing."
+ msgstr "Das Element »autoaudiosink« fehlt."
+
++#: gst/playback/gstplaysink.c:2942
+ #, c-format
+ msgid "Configured audiosink %s is not working."
+ msgstr "Das konfigurierte Audioziel »%s« funktioniert nicht."
+
++#: gst/playback/gstplaysink.c:2946
+ #, c-format
+ msgid "Both autoaudiosink and %s elements are not working."
+ msgstr "Beide Elemente »autoaudiosink« und »%s« funktionieren nicht."
+
++#: gst/playback/gstplaysink.c:2950
+ msgid "The autoaudiosink element is not working."
+ msgstr "Das Element »autoaudiosink« funktioniert nicht."
+
++#: gst/playback/gstplaysink.c:3269 gst/playback/gstplaysink.c:3274
+ msgid "Can't play a text file without video or visualizations."
-msgid ""
-"35 mm equivalent focal length of the lens used capturing the image, in mm"
-msgstr ""
-"eine zu 35 mm äquivalente Brennweite des Objektivs bei Bildaufnahme, in mm"
++msgstr "Es kann keine Textdatei ohne Video oder Visualisierungen abgespielt werden."
+
++#: gst/playback/gsturidecodebin.c:921
+ #, c-format
+ msgid "No decoder available for type '%s'."
+ msgstr "Für den Typ »%s« ist kein Decoder vorhanden."
+
++#: gst/playback/gsturidecodebin.c:1378 gst/playback/gsturisourcebin.c:1491
+ msgid "No URI specified to play from."
+ msgstr "Keine Adresse (URI) zum Abspielen angegeben."
+
++#: gst/playback/gsturidecodebin.c:1384 gst/playback/gsturisourcebin.c:1497
+ #, c-format
+ msgid "Invalid URI \"%s\"."
+ msgstr "Ungültige Adresse (URI) »%s«."
+
++#: gst/playback/gsturidecodebin.c:1391 gst/playback/gsturisourcebin.c:1504
+ msgid "This stream type cannot be played yet."
+ msgstr "Dieser Datenstrom kann noch nicht abgespielt werden."
+
++#: gst/playback/gsturidecodebin.c:1409 gst/playback/gsturisourcebin.c:1522
+ #, c-format
+ msgid "No URI handler implemented for \"%s\"."
+ msgstr "Es ist kein URI-Handler für »%s« implementiert."
+
++#: gst/playback/gsturidecodebin.c:2289 gst/playback/gsturisourcebin.c:2228
+ msgid "Source element is invalid."
+ msgstr "Das Quellelement ist ungültig."
+
++#: gst/tcp/gsttcpclientsink.c:216
+ #, c-format
+ msgid "Error while sending data to \"%s:%d\"."
+ msgstr "Fehler beim Senden der Daten nach »%s:%d«."
+
++#: gst-libs/gst/audio/gstaudiobasesrc.c:842
+ msgid "Can't record audio fast enough"
+ msgstr "Der Ton kann nicht schnell genug aufgezeichnet werden"
+
++#: gst-libs/gst/audio/gstaudiocdsrc.c:1625
+ msgid "This CD has no audio tracks"
+ msgstr "Auf dieser CD befinden sich keine Audio-Titel"
+
++#: gst-libs/gst/pbutils/descriptions.c:89
+ msgid "ID3 tag"
+ msgstr "ID3-Kennzeichnung"
+
++#: gst-libs/gst/pbutils/descriptions.c:90
++#: gst-libs/gst/pbutils/descriptions.c:91
+ msgid "APE tag"
+ msgstr "APE-Kennzeichnung"
+
++#: gst-libs/gst/pbutils/descriptions.c:92
+ msgid "ICY internet radio"
+ msgstr "ICY Internet-Radio"
+
++#: gst-libs/gst/pbutils/descriptions.c:127
+ msgid "Apple Lossless Audio (ALAC)"
+ msgstr "Apple Lossless Audio (ALAC)"
+
++#: gst-libs/gst/pbutils/descriptions.c:143
+ msgid "Free Lossless Audio Codec (FLAC)"
+ msgstr "Free Lossless Audio Codec (FLAC)"
+
++#: gst-libs/gst/pbutils/descriptions.c:173
++#: gst-libs/gst/pbutils/descriptions.c:174
+ msgid "Lossless True Audio (TTA)"
+ msgstr "Lossless True Audio (TTA)"
+
++#: gst-libs/gst/pbutils/descriptions.c:182
+ msgid "Windows Media Speech"
+ msgstr "Windows Media Speech"
+
++#: gst-libs/gst/pbutils/descriptions.c:199
+ msgid "CYUV Lossless"
+ msgstr "CYUV verlustlos"
+
++#: gst-libs/gst/pbutils/descriptions.c:203
+ msgid "FFMpeg v1"
+ msgstr "FFMpeg v1"
+
++#: gst-libs/gst/pbutils/descriptions.c:217
+ msgid "Lossless MSZH"
+ msgstr "verlustloses MSZH"
+
++#: gst-libs/gst/pbutils/descriptions.c:228
+ msgid "Run-length encoding"
+ msgstr "RLE-Codierung"
+
++#: gst-libs/gst/pbutils/descriptions.c:274
+ msgid "Timed Text"
+ msgstr "Zeitlich abgepasster Text"
+
++#: gst-libs/gst/pbutils/descriptions.c:278
+ msgid "Subtitle"
+ msgstr "Untertitel"
+
++#: gst-libs/gst/pbutils/descriptions.c:279
+ msgid "MPL2 subtitle format"
+ msgstr "Untertitel-Format MPL2"
+
++#: gst-libs/gst/pbutils/descriptions.c:280
+ msgid "DKS subtitle format"
+ msgstr "Untertitel-Format DKS"
+
++#: gst-libs/gst/pbutils/descriptions.c:281
+ msgid "QTtext subtitle format"
+ msgstr "Untertitel-Format QTtext"
+
++#: gst-libs/gst/pbutils/descriptions.c:282
+ msgid "Sami subtitle format"
+ msgstr "Untertitel-Format Sami"
+
++#: gst-libs/gst/pbutils/descriptions.c:283
+ msgid "TMPlayer subtitle format"
+ msgstr "Untertitel-Format TMPlayer"
+
++#: gst-libs/gst/pbutils/descriptions.c:287
+ msgid "CEA 608 Closed Caption"
+ msgstr "CEA 608 Untertitel"
+
++#: gst-libs/gst/pbutils/descriptions.c:288
+ msgid "CEA 708 Closed Caption"
+ msgstr "CEA 708 Untertitel"
+
++#: gst-libs/gst/pbutils/descriptions.c:289
+ msgid "Kate subtitle format"
+ msgstr "Untertitel-Format Kate"
+
++#: gst-libs/gst/pbutils/descriptions.c:290
+ msgid "WebVTT subtitle format"
+ msgstr "Untertitel-Format WebVTT"
+
++#: gst-libs/gst/pbutils/descriptions.c:455
++#: gst-libs/gst/pbutils/descriptions.c:458
++#: gst-libs/gst/pbutils/descriptions.c:508
+ msgid "Uncompressed video"
+ msgstr "Unkomprimiertes Video"
+
++#: gst-libs/gst/pbutils/descriptions.c:463
+ msgid "Uncompressed gray"
+ msgstr "Unkomprimiertes Grau"
+
++#: gst-libs/gst/pbutils/descriptions.c:486
+ #, c-format
+ msgid "Uncompressed packed YUV %s"
+ msgstr "Unkomprimiertes gepacktes YUV %s"
+
++#: gst-libs/gst/pbutils/descriptions.c:488
+ #, c-format
+ msgid "Uncompressed semi-planar YUV %s"
+ msgstr "Unkomprimiertes halbflaches YUV %s"
+
++#: gst-libs/gst/pbutils/descriptions.c:490
+ #, c-format
+ msgid "Uncompressed planar YUV %s"
+ msgstr "Unkomprimiertes flaches YUV %s"
+
++#: gst-libs/gst/pbutils/descriptions.c:501
+ #, c-format
+ msgid "Uncompressed palettized %d-bit %s"
+ msgstr "Unkomprimiertes palletiertes %d-Bit %s"
+
++#: gst-libs/gst/pbutils/descriptions.c:504
+ #, c-format
+ msgid "Uncompressed %d-bit %s"
+ msgstr "Unkomprimiertes %d-Bit %s"
+
++#: gst-libs/gst/pbutils/descriptions.c:586
+ #, c-format
+ msgid "DivX MPEG-4 Version %d"
+ msgstr "DivX MPEG-4 Version %d"
+
++#: gst-libs/gst/pbutils/descriptions.c:834
+ msgid "Uncompressed audio"
+ msgstr "Unkomprimiertes Audio"
+
++#: gst-libs/gst/pbutils/descriptions.c:840
+ #, c-format
+ msgid "Raw %d-bit %s audio"
+ msgstr "Rohes %d-Bit %s Audio"
+
++#: gst-libs/gst/pbutils/descriptions.c:942
+ msgid "Audio CD source"
+ msgstr "Musik-CD-Quelle"
+
++#: gst-libs/gst/pbutils/descriptions.c:945
+ msgid "DVD source"
+ msgstr "DVD-Quelle"
+
++#: gst-libs/gst/pbutils/descriptions.c:948
+ msgid "Real Time Streaming Protocol (RTSP) source"
+ msgstr "Real Time Streaming Protocol (RTSP)-Quelle"
+
++#: gst-libs/gst/pbutils/descriptions.c:952
+ msgid "Microsoft Media Server (MMS) protocol source"
+ msgstr "Microsoft Media Server (MMS)-Protokollquelle"
+
++#: gst-libs/gst/pbutils/descriptions.c:960
+ #, c-format
+ msgid "%s protocol source"
+ msgstr "%s Protokollquelle"
+
++#: gst-libs/gst/pbutils/descriptions.c:1034
+ #, c-format
+ msgid "%s video RTP depayloader"
+ msgstr "%s-Video RTP-Payload-Entnahme"
+
++#: gst-libs/gst/pbutils/descriptions.c:1036
+ #, c-format
+ msgid "%s audio RTP depayloader"
+ msgstr "%s-Audio RTP-Payload-Entnahme"
+
++#: gst-libs/gst/pbutils/descriptions.c:1038
+ #, c-format
+ msgid "%s RTP depayloader"
+ msgstr "%s-RTP-Payload-Entnahme"
+
++#: gst-libs/gst/pbutils/descriptions.c:1045
+ #, c-format
+ msgid "%s demuxer"
+ msgstr "%s-Demuxer"
+
++#: gst-libs/gst/pbutils/descriptions.c:1047
+ #, c-format
+ msgid "%s decoder"
+ msgstr "%s-Decoder"
+
++#: gst-libs/gst/pbutils/descriptions.c:1086
+ #, c-format
+ msgid "%s video RTP payloader"
+ msgstr "%s-Video RTP-Payloader"
+
++#: gst-libs/gst/pbutils/descriptions.c:1088
+ #, c-format
+ msgid "%s audio RTP payloader"
+ msgstr "%s-Audio RTP-Payloader"
+
++#: gst-libs/gst/pbutils/descriptions.c:1090
+ #, c-format
+ msgid "%s RTP payloader"
+ msgstr "%s-RTP-Payloader"
+
++#: gst-libs/gst/pbutils/descriptions.c:1097
+ #, c-format
+ msgid "%s muxer"
+ msgstr "%s-Multiplexer"
+
++#: gst-libs/gst/pbutils/descriptions.c:1099
+ #, c-format
+ msgid "%s encoder"
+ msgstr "%s-Encoder"
+
++#: gst-libs/gst/pbutils/descriptions.c:1133
+ #, c-format
+ msgid "GStreamer element %s"
+ msgstr "GStreamer-Element %s"
+
++#: gst-libs/gst/pbutils/missing-plugins.c:558
+ msgid "Unknown source element"
+ msgstr "Unbekanntes Quellelement"
+
++#: gst-libs/gst/pbutils/missing-plugins.c:561
+ msgid "Unknown sink element"
+ msgstr "Unbekanntes Zielelement"
+
++#: gst-libs/gst/pbutils/missing-plugins.c:564
+ msgid "Unknown element"
+ msgstr "Unbekanntes Element"
+
++#: gst-libs/gst/pbutils/missing-plugins.c:567
+ msgid "Unknown decoder element"
+ msgstr "Unbekanntes Decoder-Element"
+
++#: gst-libs/gst/pbutils/missing-plugins.c:570
+ msgid "Unknown encoder element"
+ msgstr "Unbekanntes Encoder-Element"
+
++#: gst-libs/gst/pbutils/missing-plugins.c:575
+ msgid "Plugin or element of unknown type"
+ msgstr "Plugin oder Element unbekannten Typs"
+
++#: gst-libs/gst/tag/gsttagdemux.c:1243
+ msgid "Failed to read tag: not enough data"
+ msgstr "Lesen der Kennzeichnung schlug fehl: Nicht genug Daten"
+
++#: gst-libs/gst/tag/tags.c:77
+ msgid "track ID"
+ msgstr "Titelkennung"
+
++#: gst-libs/gst/tag/tags.c:77
+ msgid "MusicBrainz track ID"
+ msgstr "Titelkennung von MusicBrainz"
+
+ # Hach, hier haben wir ja noch mehr »Interpreten«!
++#: gst-libs/gst/tag/tags.c:79
+ msgid "artist ID"
+ msgstr "Künstlerkennung"
+
++#: gst-libs/gst/tag/tags.c:79
+ msgid "MusicBrainz artist ID"
+ msgstr "Künstlerkennung von MusicBrainz"
+
++#: gst-libs/gst/tag/tags.c:81
+ msgid "album ID"
+ msgstr "Albenkennung"
+
++#: gst-libs/gst/tag/tags.c:81
+ msgid "MusicBrainz album ID"
+ msgstr "Albenkennung von MusicBrainz"
+
++#: gst-libs/gst/tag/tags.c:84
+ msgid "album artist ID"
+ msgstr "Kennung des Albenkünstlers"
+
++#: gst-libs/gst/tag/tags.c:84
+ msgid "MusicBrainz album artist ID"
+ msgstr "Kennung des Albenkünstlers von MusicBrainz"
+
++#: gst-libs/gst/tag/tags.c:86
+ msgid "track TRM ID"
+ msgstr "TRM-Kennung verfolgen"
+
++#: gst-libs/gst/tag/tags.c:86
+ msgid "MusicBrainz TRM ID"
+ msgstr "MusicBrainz-TRM-Kennung"
+
++#: gst-libs/gst/tag/tags.c:108
+ msgid "capturing shutter speed"
+ msgstr "Belichtungszeit"
+
++#: gst-libs/gst/tag/tags.c:109
+ msgid "Shutter speed used when capturing an image, in seconds"
+ msgstr "Belichtungszeit bei Aufnahme eines Bildes in Sekunden"
+
++#: gst-libs/gst/tag/tags.c:112
+ msgid "capturing focal ratio"
+ msgstr "Blendenwert"
+
++#: gst-libs/gst/tag/tags.c:113
+ msgid "Focal ratio (f-number) used when capturing the image"
+ msgstr "Der verwendete Blendenwert bei Bildaufnahme"
+
++#: gst-libs/gst/tag/tags.c:116
+ msgid "capturing focal length"
+ msgstr "Brennweite"
+
++#: gst-libs/gst/tag/tags.c:117
+ msgid "Focal length of the lens used capturing the image, in mm"
+ msgstr "Die verwendete Brennweite des Objektivs bei Bildaufnahme in mm"
+
++#: gst-libs/gst/tag/tags.c:121
+ msgid "capturing 35 mm equivalent focal length"
+ msgstr "Aufnahme entspricht einer äquivalenten Brennweite von 35 mm"
+
-msgstr ""
-"Die Einstellung zur angewendeten Sättigungsbearbeitung bei Bildaufnahme"
++#: gst-libs/gst/tag/tags.c:122
++msgid "35 mm equivalent focal length of the lens used capturing the image, in mm"
++msgstr "eine zu 35 mm äquivalente Brennweite des Objektivs bei Bildaufnahme, in mm"
+
++#: gst-libs/gst/tag/tags.c:126
+ msgid "capturing digital zoom ratio"
+ msgstr "Digitale Vergrößerung"
+
++#: gst-libs/gst/tag/tags.c:127
+ msgid "Digital zoom ratio used when capturing an image"
+ msgstr "Die verwendete digitale Vergrößerung bei Bildaufnahme"
+
++#: gst-libs/gst/tag/tags.c:130
+ msgid "capturing iso speed"
+ msgstr "ISO-Empfindlichkeit"
+
++#: gst-libs/gst/tag/tags.c:131
+ msgid "The ISO speed used when capturing an image"
+ msgstr "Die verwendete ISO-Empfindlichkeit bei Bildaufnahme"
+
++#: gst-libs/gst/tag/tags.c:134
+ msgid "capturing exposure program"
+ msgstr "Belichtungsprogramm"
+
++#: gst-libs/gst/tag/tags.c:135
+ msgid "The exposure program used when capturing an image"
+ msgstr "Das verwendete Belichtungsprogramm bei Bildaufnahme"
+
++#: gst-libs/gst/tag/tags.c:138
+ msgid "capturing exposure mode"
+ msgstr "Belichtungsmodus der Aufnahme"
+
++#: gst-libs/gst/tag/tags.c:139
+ msgid "The exposure mode used when capturing an image"
+ msgstr "Der verwendete Belichtungsmodus bei Bildaufnahme"
+
++#: gst-libs/gst/tag/tags.c:142
+ msgid "capturing exposure compensation"
+ msgstr "Belichtungskorrektur der Aufnahme"
+
++#: gst-libs/gst/tag/tags.c:143
+ msgid "The exposure compensation used when capturing an image"
+ msgstr "Der verwendete Belichtungskorrektur bei Bildaufnahme"
+
++#: gst-libs/gst/tag/tags.c:146
+ msgid "capturing scene capture type"
+ msgstr "Motivwahl"
+
++#: gst-libs/gst/tag/tags.c:147
+ msgid "The scene capture mode used when capturing an image"
+ msgstr "Die verwendete Motivwahl bei Bildaufnahme"
+
++#: gst-libs/gst/tag/tags.c:150
+ msgid "capturing gain adjustment"
+ msgstr "Anpassung der Aufnahmebelichtung"
+
+ # »Gain adjustment« kenne ich eigentlich nur bei Audiodateien. Müsste sich hier um eine Anpassung der Belichtungswerte handeln.
++#: gst-libs/gst/tag/tags.c:151
+ msgid "The overall gain adjustment applied on an image"
+ msgstr "Die auf das Bild angewendete Anpassung der Belichtungswerte"
+
++#: gst-libs/gst/tag/tags.c:154
+ msgid "capturing white balance"
+ msgstr "Weißabgleich"
+
++#: gst-libs/gst/tag/tags.c:155
+ msgid "The white balance mode set when capturing an image"
+ msgstr "Der eingestellte Weißabgleich bei Bildaufnahme"
+
++#: gst-libs/gst/tag/tags.c:158
+ msgid "capturing contrast"
+ msgstr "Kontrast"
+
++#: gst-libs/gst/tag/tags.c:159
+ msgid "The direction of contrast processing applied when capturing an image"
+ msgstr "Die Einstellung zur angewendeten Kontrastbearbeitung bei Bildaufnahme"
+
++#: gst-libs/gst/tag/tags.c:163
+ msgid "capturing saturation"
+ msgstr "Sättigung"
+
++#: gst-libs/gst/tag/tags.c:164
+ msgid "The direction of saturation processing applied when capturing an image"
-msgid ""
-"The metering mode used while determining exposure for capturing an image"
-msgstr ""
-"Die während der Bestimmung der Belichtungszeit eingesetzte Messmethode zur "
-"Aufnahme eines Bildes"
++msgstr "Die Einstellung zur angewendeten Sättigungsbearbeitung bei Bildaufnahme"
+
++#: gst-libs/gst/tag/tags.c:168
+ msgid "capturing sharpness"
+ msgstr "Schärfe"
+
++#: gst-libs/gst/tag/tags.c:169
+ msgid "The direction of sharpness processing applied when capturing an image"
+ msgstr "Die Einstellung zur angewendeten Schärfebearbeitung bei Bildaufnahme"
+
++#: gst-libs/gst/tag/tags.c:173
+ msgid "capturing flash fired"
+ msgstr "Blitzauslösung"
+
++#: gst-libs/gst/tag/tags.c:174
+ msgid "If the flash fired while capturing an image"
+ msgstr "Gibt an, ob der Blitz bei der Bildaufnahme ausgelöst wurde"
+
++#: gst-libs/gst/tag/tags.c:177
+ msgid "capturing flash mode"
+ msgstr "Blitzmodus"
+
++#: gst-libs/gst/tag/tags.c:178
+ msgid "The selected flash mode while capturing an image"
+ msgstr "Der gewählte Blitzmodus bei Bildaufnahme"
+
++#: gst-libs/gst/tag/tags.c:181
+ msgid "capturing metering mode"
+ msgstr "Messmethode der Aufnahme"
+
-msgid ""
-"Don't exit after showing the initial device list, but wait for devices to "
-"added/removed."
-msgstr ""
-"Nicht nach dem Anzeigen der anfänglichen Geräteliste beenden, sondern auf "
-"das Hinzufügen/Entfernen von Geräten warten."
++#: gst-libs/gst/tag/tags.c:182
++msgid "The metering mode used while determining exposure for capturing an image"
++msgstr "Die während der Bestimmung der Belichtungszeit eingesetzte Messmethode zur Aufnahme eines Bildes"
+
++#: gst-libs/gst/tag/tags.c:186
+ msgid "capturing source"
+ msgstr "Aufnahmequelle"
+
++#: gst-libs/gst/tag/tags.c:187
+ msgid "The source or type of device used for the capture"
+ msgstr "Die Quelle oder der Gerätetyp, mit dem aufgenommen wird"
+
++#: gst-libs/gst/tag/tags.c:190
+ msgid "image horizontal ppi"
+ msgstr "Horizontale Bildauflösung"
+
++#: gst-libs/gst/tag/tags.c:191
+ msgid "Media (image/video) intended horizontal pixel density in ppi"
+ msgstr "Horizontale Auflösung des Bildes oder Videos in ppi (Pixel pro Zoll)"
+
++#: gst-libs/gst/tag/tags.c:194
+ msgid "image vertical ppi"
+ msgstr "Vertikale Bildauflösung"
+
++#: gst-libs/gst/tag/tags.c:195
+ msgid "Media (image/video) intended vertical pixel density in ppi"
+ msgstr "Vertikale Auflösung des Bildes oder Videos in ppi (Pixel pro Zoll)"
+
++#: gst-libs/gst/tag/tags.c:198
+ msgid "ID3v2 frame"
+ msgstr "ID3v2-Rahmen"
+
++#: gst-libs/gst/tag/tags.c:198
+ msgid "unparsed id3v2 tag frame"
+ msgstr "unverarbeiteter ID3v2-Kennzeichnungsrahmen"
+
++#: gst-libs/gst/tag/tags.c:202
+ msgid "musical-key"
+ msgstr "musical-key"
+
++#: gst-libs/gst/tag/tags.c:202
+ msgid "Initial key in which the sound starts"
+ msgstr "Anfänglicher Schlüssel, in dem der Ton startet"
+
++#: tools/gst-device-monitor.c:260 tools/gst-play.c:1444
+ msgid "Print version information and exit"
+ msgstr "Versionsinformationen ausgeben und beenden"
+
-msgstr ""
-"Sie müssen mindestens einen Dateinamen oder eine Adresse zur Wiedergabe "
-"angeben."
++#: tools/gst-device-monitor.c:262
++msgid "Don't exit after showing the initial device list, but wait for devices to added/removed."
++msgstr "Nicht nach dem Anzeigen der anfänglichen Geräteliste beenden, sondern auf das Hinzufügen/Entfernen von Geräten warten."
+
++#: tools/gst-play.c:310
+ #, c-format
+ msgid "Volume: %.0f%%"
+ msgstr "Lautstärke: %.0f%%"
+
++#: tools/gst-play.c:349
+ msgid "Buffering..."
+ msgstr "Zwischenspeichern …"
+
++#: tools/gst-play.c:370
+ msgid "Clock lost, selecting a new one\n"
+ msgstr "Uhrzeit verloren, es wird eine neue gewählt\n"
+
++#: tools/gst-play.c:400 tools/gst-play.c:446 tools/gst-play.c:883
++#: tools/gst-play.c:1347
+ msgid "Reached end of play list."
+ msgstr "Das Ende der Wiedergabeliste wurde erreicht."
+
++#: tools/gst-play.c:613
+ msgid "Paused"
+ msgstr "Angehalten"
+
++#: tools/gst-play.c:671
+ #, c-format
+ msgid "Now playing %s\n"
+ msgstr "Momentan wird %s wiedergegeben\n"
+
++#: tools/gst-play.c:734
+ #, c-format
+ msgid "About to finish, preparing next title: %s"
+ msgstr "Kurz vor dem Abschluss. Nächster Titel wird vorbereitet: %s"
+
++#: tools/gst-play.c:979
+ #, c-format
+ msgid "Playback rate: %.2f"
+ msgstr "Wiedergabegeschwindigkeit: %.2f"
+
++#: tools/gst-play.c:983
+ #, c-format
+ msgid "Could not change playback rate to %.2f"
+ msgstr "Wiedergabegeschwindigkeit konnte nicht auf %.2f geändert werden"
+
++#: tools/gst-play.c:1287
+ msgid "space"
+ msgstr "Leertaste"
+
++#: tools/gst-play.c:1287
+ msgid "pause/unpause"
+ msgstr "Anhalten/Fortsetzen"
+
++#: tools/gst-play.c:1288
+ msgid "q or ESC"
+ msgstr "q oder ESC"
+
++#: tools/gst-play.c:1288
+ msgid "quit"
+ msgstr "Beenden"
+
++#: tools/gst-play.c:1289
+ msgid "> or n"
+ msgstr "> oder n"
+
++#: tools/gst-play.c:1289
+ msgid "play next"
+ msgstr "Nächsten wiedergeben"
+
++#: tools/gst-play.c:1290
+ msgid "< or b"
+ msgstr "< oder b"
+
++#: tools/gst-play.c:1290
+ msgid "play previous"
+ msgstr "Vorherigen wiedergeben"
+
++#: tools/gst-play.c:1291
+ msgid "seek forward"
+ msgstr "Vorspulen"
+
++#: tools/gst-play.c:1292
+ msgid "seek backward"
+ msgstr "Zurückspulen"
+
++#: tools/gst-play.c:1293
+ msgid "volume up"
+ msgstr "Lauter"
+
++#: tools/gst-play.c:1294
+ msgid "volume down"
+ msgstr "Leiser"
+
++#: tools/gst-play.c:1295
+ msgid "increase playback rate"
+ msgstr "Schneller wiedergeben"
+
++#: tools/gst-play.c:1296
+ msgid "decrease playback rate"
+ msgstr "Langsamer wiedergeben"
+
++#: tools/gst-play.c:1297
+ msgid "change playback direction"
+ msgstr "Wiedergaberichtung ändern"
+
++#: tools/gst-play.c:1298
+ msgid "enable/disable trick modes"
+ msgstr "Trickmodi einschalten/ausschalten"
+
++#: tools/gst-play.c:1299
+ msgid "change audio track"
+ msgstr "Audio-Titel wechseln"
+
++#: tools/gst-play.c:1300
+ msgid "change video track"
+ msgstr "Videl-Titel wechseln"
+
++#: tools/gst-play.c:1301
+ msgid "change subtitle track"
+ msgstr "Untertitel wechseln"
+
++#: tools/gst-play.c:1302
+ msgid "seek to beginning"
+ msgstr "An den Anfang springen"
+
++#: tools/gst-play.c:1303
+ msgid "show keyboard shortcuts"
+ msgstr "Tastenkombinationen anzeigen"
+
++#: tools/gst-play.c:1306
+ msgid "Interactive mode - keyboard controls:"
+ msgstr "Interaktiver Modus - Tastatursteuerung:"
+
++#: tools/gst-play.c:1439
+ msgid "Output status information and property notifications"
+ msgstr "Zustandsinformation und Eigenschaftsmitteilungen ausgeben"
+
++#: tools/gst-play.c:1441
+ msgid "Control playback behaviour setting playbin 'flags' property"
+ msgstr "Steuerung der Wiedergabe mittels der playbin-Eigenschaft »flags«"
+
++#: tools/gst-play.c:1446
+ msgid "Video sink to use (default is autovideosink)"
+ msgstr "Zu verwendende Video-Ziel (Voreinstellung ist »autovideosink«)"
+
++#: tools/gst-play.c:1448
+ msgid "Audio sink to use (default is autoaudiosink)"
+ msgstr "Zu verwendende Audio-Ziel (Voreinstellung ist »autoaudiosink«)"
+
++#: tools/gst-play.c:1450
+ msgid "Enable gapless playback"
+ msgstr "Lückenlose Wiedergabe einschalten"
+
++#: tools/gst-play.c:1452
+ msgid "Shuffle playlist"
+ msgstr "Wiedergabeliste mischen"
+
++#: tools/gst-play.c:1455
+ msgid "Disable interactive control via the keyboard"
+ msgstr "Interaktive Steuerung via Tastatur deaktivieren"
+
++#: tools/gst-play.c:1457
+ msgid "Volume"
+ msgstr "Lautstärke"
+
++#: tools/gst-play.c:1459
+ msgid "Playlist file containing input media files"
+ msgstr "Datei mit Wiedergabeliste enthält Eingabe-Mediendateien"
+
++#: tools/gst-play.c:1461
+ msgid "Do not print any output (apart from errors)"
+ msgstr "Nichts ausgeben (außer Fehlermeldungen)"
+
++#: tools/gst-play.c:1463
+ msgid "Use playbin3 pipeline"
+ msgstr "playbin3-Weiterleitung verwenden"
+
++#: tools/gst-play.c:1464
+ msgid "(default varies depending on 'USE_PLAYBIN' env variable)"
+ msgstr "(die Vorgabe hängt von der Umgebungsvariablen »USE_PLABIN« ab)"
+
++#: tools/gst-play.c:1468
++msgid "Keep showing the last frame on EOS until quit or playlist change command (gapless is ignored)"
++msgstr "Bei Stromende das letzte Einzelbild dargestellt lassen bis zum Beenden oder Wechseln der Wiedergabeliste (lückenlose Wiedergabe wird ignoriert)"
++
++#: tools/gst-play.c:1546
+ #, c-format
+ msgid "Usage: %s FILE1|URI1 [FILE2|URI2] [FILE3|URI3] ..."
+ msgstr "Aufruf: %s DATEI1|URI1 [DATEI2|URI2] [DATEI3|URI3] ..."
+
++#: tools/gst-play.c:1550
+ msgid "You must provide at least one filename or URI to play."
-#~ msgstr ""
-#~ "Ein Plugin »%s« wird zum Abspielen dieses Datenstroms benötigt, ist aber "
-#~ "nicht installiert."
++msgstr "Sie müssen mindestens einen Dateinamen oder eine Adresse zur Wiedergabe angeben."
+
++#: tools/gst-play.c:1590
+ msgid "Press 'k' to see a list of keyboard shortcuts.\n"
+ msgstr "Geben Sie »k« ein, um die Liste der Tastenkombinationen zu sehen.\n"
+
+ #~ msgid "Could not create \"decodebin3\" element."
+ #~ msgstr "Das Element »decodebin3« konnte nicht erstellt werden."
+
+ #~ msgid "Could not create \"urisourcebin\" element."
+ #~ msgstr "Das Element »urisourcebin« konnte nicht erstellt werden."
+
+ #~ msgid "Internal data stream error."
+ #~ msgstr "Interner Fehler im Datenstrom."
+
+ #~ msgid "A %s plugin is required to play this stream, but not installed."
-#~ msgid ""
-#~ "Only a subtitle stream was detected. Either you are loading a subtitle "
-#~ "file or some other type of text file, or the media file was not "
-#~ "recognized."
-#~ msgstr ""
-#~ "Es wurde nur ein Datenstrom mit Untertiteln erkannt. Entweder laden Sie "
-#~ "eine Datei mit Untertiteln oder eine andere Art Textdatei oder die "
-#~ "Mediendatei wurde nicht erkannt."
-
-#~ msgid ""
-#~ "You do not have a decoder installed to handle this file. You might need "
-#~ "to install the necessary plugins."
-#~ msgstr ""
-#~ "Sie haben keinen Decoder für den Umgang mit dieser Datei installiert. Sie "
-#~ "sollten die erforderlichen Plugins installieren."
++#~ msgstr "Ein Plugin »%s« wird zum Abspielen dieses Datenstroms benötigt, ist aber nicht installiert."
+
+ #~ msgid "Uncompressed %s YUV %s"
+ #~ msgstr "Unkomprimiertes %s YUV %s"
+
+ #~ msgid "Master"
+ #~ msgstr "Hauptregler"
+
+ #~ msgid "Bass"
+ #~ msgstr "Bass"
+
+ #~ msgid "Treble"
+ #~ msgstr "Höhen"
+
+ #~ msgid "PCM"
+ #~ msgstr "PCM"
+
+ #~ msgid "Synth"
+ #~ msgstr "Synth"
+
+ #~ msgid "Line-in"
+ #~ msgstr "Line-Eingang"
+
+ #~ msgid "CD"
+ #~ msgstr "CD"
+
+ #~ msgid "Microphone"
+ #~ msgstr "Mikrofon"
+
+ #~ msgid "PC Speaker"
+ #~ msgstr "PC-Lautsprecher"
+
+ #~ msgid "Playback"
+ #~ msgstr "Wiedergabe"
+
+ #~ msgid "Capture"
+ #~ msgstr "Aufnahme"
+
+ #~ msgid "Could not open vfs file \"%s\" for writing: %s."
+ #~ msgstr "VFS-Datei »%s« konnte nicht zum Schreiben geöffnet werden: %s."
+
+ #~ msgid "No filename given"
+ #~ msgstr "Kein Dateiname angegeben"
+
+ #~ msgid "Could not close vfs file \"%s\"."
+ #~ msgstr "VFS-Datei »%s« konnte nicht geschlossen werden."
+
+ #~ msgid "Error while writing to file \"%s\"."
+ #~ msgstr "Fehler beim Schreiben in die Datei »%s«."
+
+ #~ msgid "Invalid subtitle URI \"%s\", subtitles disabled."
+ #~ msgstr "Ungültige Untertitel-URI »%s«, Untertitel werden deaktiviert."
+
+ #~ msgid "RTSP streams cannot be played yet."
+ #~ msgstr "RTSP-Datenströme können noch nicht abgespielt werden."
+
-#~ msgstr ""
-#~ "Das Gerät »%s« konnte nicht zum Lesen und Schreiben geöffnet werden."
++#~ msgid "Only a subtitle stream was detected. Either you are loading a subtitle file or some other type of text file, or the media file was not recognized."
++#~ msgstr "Es wurde nur ein Datenstrom mit Untertiteln erkannt. Entweder laden Sie eine Datei mit Untertiteln oder eine andere Art Textdatei oder die Mediendatei wurde nicht erkannt."
++
++#~ msgid "You do not have a decoder installed to handle this file. You might need to install the necessary plugins."
++#~ msgstr "Sie haben keinen Decoder für den Umgang mit dieser Datei installiert. Sie sollten die erforderlichen Plugins installieren."
+
+ #~ msgid "This is not a media file"
+ #~ msgstr "Dies ist keine Mediendatei"
+
+ #~ msgid "A subtitle stream was detected, but no video stream."
+ #~ msgstr "Ein Datenstrom mit Untertiteln wurde erkannt, aber kein Videostrom."
+
+ #~ msgid "Both autovideosink and xvimagesink elements are missing."
+ #~ msgstr "Beide Elemente »autovideosink« und »xvimagesink« fehlen."
+
+ #~ msgid "Both autoaudiosink and alsasink elements are missing."
+ #~ msgstr "Beide Elemente »autoaudiosink« und »alsasink« fehlen."
+
+ #~ msgid "Error while sending gdp header data to \"%s:%d\"."
+ #~ msgstr "Fehler beim Senden der GDP-Kopfdaten nach »%s:%d«."
+
+ #~ msgid "Error while sending gdp payload data to \"%s:%d\"."
+ #~ msgstr "Fehler beim Senden der GDP-Nutzdaten nach »%s:%d«."
+
+ #~ msgid "Connection to %s:%d refused."
+ #~ msgstr "Verbindung nach %s:%d wurde verweigert."
+
+ #~ msgid "Uncompressed planar YVU 4:2:0"
+ #~ msgstr "Unkomprimiertes ungepacktes YVU 4:2:0"
+
+ #~ msgid "Uncompressed packed YUV 4:1:0"
+ #~ msgstr "Unkomprimiertes gepacktes YUV 4:1:0"
+
+ #~ msgid "Uncompressed packed YVU 4:1:0"
+ #~ msgstr "Unkomprimiertes gepacktes YVU 4:1:0"
+
+ #~ msgid "Uncompressed packed YUV 4:1:1"
+ #~ msgstr "Unkomprimiertes gepacktes YUV 4:1:1"
+
+ #~ msgid "Uncompressed packed YUV 4:4:4"
+ #~ msgstr "Unkomprimiertes gepacktes YUV 4:4:4"
+
+ #~ msgid "Uncompressed planar YUV 4:2:2"
+ #~ msgstr "Unkomprimiertes ungepacktes YUV 4:2:2"
+
+ #~ msgid "Uncompressed planar YUV 4:1:1"
+ #~ msgstr "Unkomprimiertes ungepacktes YUV 4:1:1"
+
+ #~ msgid "Uncompressed black and white Y-plane"
+ #~ msgstr "Unkomprimiertes schwarz-weißes Y-plane"
+
+ #~ msgid "Raw PCM audio"
+ #~ msgstr "Rohes PCM-Audio"
+
+ #~ msgid "Raw %d-bit floating-point audio"
+ #~ msgstr "Rohes %d-Bit Gleitkomma-Audio"
+
+ #~ msgid "Raw floating-point audio"
+ #~ msgstr "Rohes Gleitkomma-Audio"
+
+ #~ msgid "No device specified."
+ #~ msgstr "Kein Gerät angegeben."
+
+ #~ msgid "Device \"%s\" does not exist."
+ #~ msgstr "Das Gerät »%s« existiert nicht."
+
+ #~ msgid "Device \"%s\" is already being used."
+ #~ msgstr "Das Gerät »%s« wird bereits verwendet."
+
+ #~ msgid "Could not open device \"%s\" for reading and writing."
-#~ msgstr ""
-#~ "Es können nicht Untertiteltexte und Kleinbilder zugleich angezeigt werden."
++#~ msgstr "Das Gerät »%s« konnte nicht zum Lesen und Schreiben geöffnet werden."
+
+ #~ msgid "Can't display both text subtitles and subpictures."
++#~ msgstr "Es können nicht Untertiteltexte und Kleinbilder zugleich angezeigt werden."
+
+ #~ msgid "No Temp directory specified."
+ #~ msgstr "Kein temporärer Ordner angegeben."
+
+ #~ msgid "Could not create temp file \"%s\"."
+ #~ msgstr "Temporäre Datei »%s« konnte nicht erstellt werden."
+
+ #~ msgid "Could not open file \"%s\" for reading."
+ #~ msgstr "Die Datei »%s« konnte zum Lesen nicht geöffnet werden."
+
+ #~ msgid "Internal data flow error."
+ #~ msgstr "Interner Fehler im Datenstrom."
+
+ #~ msgid "Could not create \"queue2\" element."
+ #~ msgstr "Das Element »queue2« konnte nicht erstellt werden."
+
+ #~ msgid "Could not create \"typefind\" element."
+ #~ msgstr "Das Element »typefind« konnte nicht erstellt werden."
+
+ #~ msgid "No file name specified."
+ #~ msgstr "Kein Dateiname angegeben."
--- /dev/null
-# Stéphane Aulery <lkppo@free.fr>, 2015-2016.
+ # Translation of gst-plugins-base to French
+ # Copyright (C) 2003-2011 GStreamer core team
+ # This file is distributed under the same license as the gst-plugins-base package.
+ #
+ # Nicolas Velin <nicolas@velin.fr>, 2008.
+ # Claude Paroz <claude@2xlibre.net>, 2008-2011.
-"Project-Id-Version: gst-plugins-base 1.10.0\n"
++# Stéphane Aulery <lkppo@free.fr>, 2015-2016, 2019.
+ #
+ msgid ""
+ msgstr ""
-"POT-Creation-Date: 2019-02-26 11:43+0000\n"
-"PO-Revision-Date: 2016-12-23 20:39+0100\n"
++"Project-Id-Version: gst-plugins-base 1.16.0\n"
+ "Report-Msgid-Bugs-To: http://bugzilla.gnome.org/\n"
-msgstr ""
-"Impossible d’utiliser le périphérique pour la lecture en mode %d voie(s)."
++"POT-Creation-Date: 2019-04-19 00:20+0100\n"
++"PO-Revision-Date: 2019-09-03 20:19+0200\n"
+ "Last-Translator: Stéphane Aulery <lkppo@free.fr>\n"
+ "Language-Team: French <traduc@traduc.org>\n"
+ "Language: fr\n"
+ "MIME-Version: 1.0\n"
+ "Content-Type: text/plain; charset=UTF-8\n"
+ "Content-Transfer-Encoding: 8bit\n"
+ "X-Bugs: Report translation errors to the Language-Team address.\n"
+ "Plural-Forms: nplurals=2; plural=n>1;\n"
+
++#: ext/alsa/gstalsasink.c:579
+ msgid "Could not open device for playback in mono mode."
+ msgstr "Impossible d’utiliser le périphérique pour la lecture en mode mono."
+
++#: ext/alsa/gstalsasink.c:581
+ msgid "Could not open device for playback in stereo mode."
+ msgstr "Impossible d’utiliser le périphérique pour la lecture en mode stéréo."
+
++#: ext/alsa/gstalsasink.c:585
+ #, c-format
+ msgid "Could not open device for playback in %d-channel mode."
-msgid ""
-"Could not open audio device for playback. Device is being used by another "
-"application."
-msgstr ""
-"Impossible d’utiliser le périphérique audio pour la lecture. Celui-ci est "
-"occupé par une autre application."
++msgstr "Impossible d’utiliser le périphérique pour la lecture en mode %d voie(s)."
+
-msgstr ""
-"Erreur de sortie vers le périphérique audio. Le périphérique a été "
-"déconnecté."
++#: ext/alsa/gstalsasink.c:858
++msgid "Could not open audio device for playback. Device is being used by another application."
++msgstr "Impossible d’utiliser le périphérique audio pour la lecture. Celui-ci est occupé par une autre application."
+
++#: ext/alsa/gstalsasink.c:863
+ msgid "Could not open audio device for playback."
+ msgstr "Impossible d’utiliser le périphérique audio pour la lecture."
+
++#: ext/alsa/gstalsasink.c:1076
+ msgid "Error outputting to audio device. The device has been disconnected."
-msgstr ""
-"Impossible d’utiliser le périphérique pour l’enregistrement en mode mono."
++msgstr "Erreur de sortie vers le périphérique audio. Le périphérique a été déconnecté."
+
++#: ext/alsa/gstalsasrc.c:444
+ msgid "Could not open device for recording in mono mode."
-msgstr ""
-"Impossible d’utiliser le périphérique pour l’enregistrement en mode stéréo."
++msgstr "Impossible d’utiliser le périphérique pour l’enregistrement en mode mono."
+
++#: ext/alsa/gstalsasrc.c:446
+ msgid "Could not open device for recording in stereo mode."
-msgstr ""
-"Impossible d’utiliser le périphérique pour l'enregistrement en mode %d "
-"voie(s)."
++msgstr "Impossible d’utiliser le périphérique pour l’enregistrement en mode stéréo."
+
++#: ext/alsa/gstalsasrc.c:450
+ #, c-format
+ msgid "Could not open device for recording in %d-channel mode"
-msgid ""
-"Could not open audio device for recording. Device is being used by another "
-"application."
-msgstr ""
-"Impossible d’utiliser le périphérique audio pour l’enregistrement. Celui-ci "
-"est occupé par une autre application."
++msgstr "Impossible d’utiliser le périphérique pour l'enregistrement en mode %d voie(s)."
+
-msgstr ""
-"Erreur d’enregistrement de puis le périphèrique audio. Le périphèrique a été "
-"déconnecté."
++#: ext/alsa/gstalsasrc.c:741
++msgid "Could not open audio device for recording. Device is being used by another application."
++msgstr "Impossible d’utiliser le périphérique audio pour l’enregistrement. Celui-ci est occupé par une autre application."
+
++#: ext/alsa/gstalsasrc.c:746
+ msgid "Could not open audio device for recording."
+ msgstr "Impossible d’utiliser le périphérique audio pour l’enregistrement."
+
++#: ext/alsa/gstalsasrc.c:992
+ msgid "Error recording from audio device. The device has been disconnected."
-msgstr ""
++msgstr "Erreur d’enregistrement de puis le périphèrique audio. Le périphèrique a été déconnecté."
+
++#: ext/cdparanoia/gstcdparanoiasrc.c:277 ext/cdparanoia/gstcdparanoiasrc.c:283
+ msgid "Could not open CD device for reading."
+ msgstr "Impossible d’utiliser le lecteur CD pour la lecture."
+
++#: ext/cdparanoia/gstcdparanoiasrc.c:409
+ msgid "Could not seek CD."
+ msgstr "Impossible de rechercher sur le CD."
+
++#: ext/cdparanoia/gstcdparanoiasrc.c:417
+ msgid "Could not read CD."
+ msgstr "Impossible de lire le CD."
+
++#: ext/gl/gstgltestsrc.c:491
+ msgid "failed to draw pattern"
-msgstr ""
++msgstr "Tracer du motif en échec"
+
++#: ext/gl/gstgltestsrc.c:492
+ msgid "A GL error occurred"
-msgstr ""
-
++msgstr "Erreur de rendu GL"
+
++#: ext/gl/gstgltestsrc.c:498
+ msgid "format wasn't negotiated before get function"
-#, fuzzy
++msgstr "le format n'a pas été négocié avant l'appel de fonction"
++
++#: gst/encoding/gstencodebin.c:1636 gst/playback/gstplaybin2.c:3399
++#: gst/playback/gstplaysink.c:1522 gst/playback/gstplaysink.c:1535
++#: gst/playback/gstplaysink.c:1872 gst/playback/gstplaysink.c:1904
++#: gst/playback/gstplaysink.c:2488 gst/playback/gstplaysink.c:2537
++#: gst/playback/gstplaysink.c:2552 gst/playback/gstplaysink.c:2577
++#: gst/playback/gstplaysink.c:2609 gst/playback/gstplaysink.c:2757
++#: gst/playback/gstplaysink.c:2788 gst/playback/gstplaysink.c:3166
++#: gst/playback/gstplaysink.c:3175 gst/playback/gstplaysink.c:3184
++#: gst/playback/gstplaysink.c:3193 gst/playback/gstplaysink.c:3606
++#: gst/playback/gstplaysink.c:4526 gst/playback/gstplaysinkconvertbin.c:97
++#: gst/playback/gstplaysinkconvertbin.c:117
++#: gst/playback/gsturidecodebin.c:1472 gst/playback/gsturisourcebin.c:1597
+ #, c-format
+ msgid "Missing element '%s' - check your GStreamer installation."
+ msgstr "Élément « %s » manquant — Vérifiez votre installation de GStreamer."
+
++#: gst/playback/gstdecodebin2.c:1883 gst/playback/gstparsebin.c:1553
+ msgid "Could not determine type of stream"
+ msgstr "Impossible de déterminer le type de flux"
+
++#: gst/playback/gstdecodebin2.c:2861 gst/playback/gstparsebin.c:2385
+ msgid "This appears to be a text file"
+ msgstr "Ce fichier semble être un fichier texte"
+
++#: gst/playback/gstplaybin2.c:5452
+ msgid "Could not create \"uridecodebin\" element."
+ msgstr "Impossible de créer un élément « uridecodebin »."
+
-msgstr "Impossible de créer un élément « uridecodebin »."
++#: gst/playback/gstplaybin3.c:4558
+ msgid "Could not create \"uridecodebin3\" element."
-msgstr ""
++msgstr "Impossible de créer un élément « uridecodebin3 »."
+
++#: gst/playback/gstplaysink.c:2003
+ #, c-format
+ msgid "Both autovideosink and %s elements are missing."
+ msgstr "Les éléments autovideosink et %s sont tous deux manquants."
+
++#: gst/playback/gstplaysink.c:2007
+ msgid "The autovideosink element is missing."
+ msgstr "L'élément autovideosink est manquant."
+
++#: gst/playback/gstplaysink.c:2012
+ #, c-format
+ msgid "Configured videosink %s is not working."
+ msgstr "L’élément videosink %s configuré ne fonctionne pas."
+
++#: gst/playback/gstplaysink.c:2016
+ #, c-format
+ msgid "Both autovideosink and %s elements are not working."
+ msgstr "Les éléments autovideosink et %s ne fonctionnent pas."
+
++#: gst/playback/gstplaysink.c:2020
+ msgid "The autovideosink element is not working."
+ msgstr "L’élément autovideosink ne fonctionne pas."
+
++#: gst/playback/gstplaysink.c:2525
+ msgid "Custom text sink element is not usable."
+ msgstr "L’élément récepteur de text personnalisé n’est pas utilisable."
+
++#: gst/playback/gstplaysink.c:2903
+ msgid "No volume control found"
+ msgstr "Aucun contrôle de volume trouvé"
+
++#: gst/playback/gstplaysink.c:2933
+ #, c-format
+ msgid "Both autoaudiosink and %s elements are missing."
+ msgstr "Les éléments autoaudiosink et %s sont tous deux manquants."
+
++#: gst/playback/gstplaysink.c:2937
+ msgid "The autoaudiosink element is missing."
+ msgstr "L'élément autoaudiosink est manquant."
+
++#: gst/playback/gstplaysink.c:2942
+ #, c-format
+ msgid "Configured audiosink %s is not working."
+ msgstr "L’élément autoaudiosink %s configuré ne fonctionne pas."
+
++#: gst/playback/gstplaysink.c:2946
+ #, c-format
+ msgid "Both autoaudiosink and %s elements are not working."
+ msgstr "Les éléments autoaudiosink et %s ne fonctionnent pas."
+
++#: gst/playback/gstplaysink.c:2950
+ msgid "The autoaudiosink element is not working."
+ msgstr "L’élément autoaudiosink ne fonctionne pas."
+
++#: gst/playback/gstplaysink.c:3269 gst/playback/gstplaysink.c:3274
+ msgid "Can't play a text file without video or visualizations."
+ msgstr "Impossible de lire un fichier texte sans vidéo ou aperçus."
+
++#: gst/playback/gsturidecodebin.c:921
+ #, c-format
+ msgid "No decoder available for type '%s'."
+ msgstr "Aucun décodeur n’est disponible pour le type « %s »."
+
++#: gst/playback/gsturidecodebin.c:1378 gst/playback/gsturisourcebin.c:1491
+ msgid "No URI specified to play from."
+ msgstr "Aucun URI source indiquée pour la lecture."
+
++#: gst/playback/gsturidecodebin.c:1384 gst/playback/gsturisourcebin.c:1497
+ #, c-format
+ msgid "Invalid URI \"%s\"."
+ msgstr "URI « %s » non valide."
+
++#: gst/playback/gsturidecodebin.c:1391 gst/playback/gsturisourcebin.c:1504
+ msgid "This stream type cannot be played yet."
+ msgstr "Ce type de flux ne peut pas encore être lu."
+
++#: gst/playback/gsturidecodebin.c:1409 gst/playback/gsturisourcebin.c:1522
+ #, c-format
+ msgid "No URI handler implemented for \"%s\"."
+ msgstr "Aucun gestionnaire d’URI implémenté pour « %s »."
+
++#: gst/playback/gsturidecodebin.c:2289 gst/playback/gsturisourcebin.c:2228
+ msgid "Source element is invalid."
+ msgstr "Élément source non valide."
+
++#: gst/tcp/gsttcpclientsink.c:216
+ #, c-format
+ msgid "Error while sending data to \"%s:%d\"."
+ msgstr "Erreur lors de l’envoi de données vers « %s:%d »."
+
++#: gst-libs/gst/audio/gstaudiobasesrc.c:842
+ msgid "Can't record audio fast enough"
+ msgstr "Impossible d’enregistrer assez rapidement les données audio"
+
++#: gst-libs/gst/audio/gstaudiocdsrc.c:1625
+ msgid "This CD has no audio tracks"
+ msgstr "Ce CD ne contient aucune piste audio"
+
++#: gst-libs/gst/pbutils/descriptions.c:89
+ msgid "ID3 tag"
+ msgstr "Étiquette ID3"
+
++#: gst-libs/gst/pbutils/descriptions.c:90
++#: gst-libs/gst/pbutils/descriptions.c:91
+ msgid "APE tag"
+ msgstr "Étiquette APE"
+
++#: gst-libs/gst/pbutils/descriptions.c:92
+ msgid "ICY internet radio"
+ msgstr "Radio Internet ICY"
+
++#: gst-libs/gst/pbutils/descriptions.c:127
+ msgid "Apple Lossless Audio (ALAC)"
+ msgstr "ALAC (Apple Lossless Audio)"
+
++#: gst-libs/gst/pbutils/descriptions.c:143
+ msgid "Free Lossless Audio Codec (FLAC)"
+ msgstr "FLAC (Free Lossless Audio Codec)"
+
++#: gst-libs/gst/pbutils/descriptions.c:173
++#: gst-libs/gst/pbutils/descriptions.c:174
+ msgid "Lossless True Audio (TTA)"
+ msgstr "TTA (Lossless True Audio)"
+
++#: gst-libs/gst/pbutils/descriptions.c:182
+ msgid "Windows Media Speech"
+ msgstr "Windows Media Speech"
+
++#: gst-libs/gst/pbutils/descriptions.c:199
+ msgid "CYUV Lossless"
+ msgstr "CYUV sans perte"
+
++#: gst-libs/gst/pbutils/descriptions.c:203
+ msgid "FFMpeg v1"
+ msgstr "FFMpeg v1"
+
++#: gst-libs/gst/pbutils/descriptions.c:217
+ msgid "Lossless MSZH"
+ msgstr "MSZH sans perte"
+
+ # http://fr.wikipedia.org/wiki/Run-length_encoding
++#: gst-libs/gst/pbutils/descriptions.c:228
+ msgid "Run-length encoding"
+ msgstr "Codage par plages"
+
++#: gst-libs/gst/pbutils/descriptions.c:274
+ msgid "Timed Text"
+ msgstr "Timed Text"
+
++#: gst-libs/gst/pbutils/descriptions.c:278
+ msgid "Subtitle"
+ msgstr "Sous-titre"
+
++#: gst-libs/gst/pbutils/descriptions.c:279
+ msgid "MPL2 subtitle format"
+ msgstr "Format de sous-titres MPL2"
+
++#: gst-libs/gst/pbutils/descriptions.c:280
+ msgid "DKS subtitle format"
+ msgstr "Format de sous-titres DKS"
+
++#: gst-libs/gst/pbutils/descriptions.c:281
+ msgid "QTtext subtitle format"
+ msgstr "Format de sous-titres QTtext"
+
++#: gst-libs/gst/pbutils/descriptions.c:282
+ msgid "Sami subtitle format"
+ msgstr "Format de sous-titres Sami"
+
++#: gst-libs/gst/pbutils/descriptions.c:283
+ msgid "TMPlayer subtitle format"
+ msgstr "Format de sous-titres TMPlayer"
+
++#: gst-libs/gst/pbutils/descriptions.c:287
+ msgid "CEA 608 Closed Caption"
-msgstr ""
++msgstr "Sous-titres CEA 608"
+
++#: gst-libs/gst/pbutils/descriptions.c:288
+ msgid "CEA 708 Closed Caption"
-msgstr ""
-"Distance focale de l’optique utilisée pour la capture de l’image (en mm)"
++msgstr "Sous-titres CEA 708"
+
++#: gst-libs/gst/pbutils/descriptions.c:289
+ msgid "Kate subtitle format"
+ msgstr "Format de sous-titres Kate"
+
++#: gst-libs/gst/pbutils/descriptions.c:290
+ msgid "WebVTT subtitle format"
+ msgstr "Format de sous-titres WebVTT"
+
++#: gst-libs/gst/pbutils/descriptions.c:455
++#: gst-libs/gst/pbutils/descriptions.c:458
++#: gst-libs/gst/pbutils/descriptions.c:508
+ msgid "Uncompressed video"
+ msgstr "Video non compressée"
+
++#: gst-libs/gst/pbutils/descriptions.c:463
+ msgid "Uncompressed gray"
+ msgstr "Niveaux de gris non compressés"
+
++#: gst-libs/gst/pbutils/descriptions.c:486
+ #, c-format
+ msgid "Uncompressed packed YUV %s"
+ msgstr "YUV empaqueté %s non compressé"
+
++#: gst-libs/gst/pbutils/descriptions.c:488
+ #, c-format
+ msgid "Uncompressed semi-planar YUV %s"
+ msgstr "YUV semi-planaire %s non compressé"
+
++#: gst-libs/gst/pbutils/descriptions.c:490
+ #, c-format
+ msgid "Uncompressed planar YUV %s"
+ msgstr "YUV planaire %s non compressé"
+
++#: gst-libs/gst/pbutils/descriptions.c:501
+ #, c-format
+ msgid "Uncompressed palettized %d-bit %s"
+ msgstr "%2$s %1$d bits en palette non compressé"
+
++#: gst-libs/gst/pbutils/descriptions.c:504
+ #, c-format
+ msgid "Uncompressed %d-bit %s"
+ msgstr "%2$s %1$d bits non compressé"
+
++#: gst-libs/gst/pbutils/descriptions.c:586
+ #, c-format
+ msgid "DivX MPEG-4 Version %d"
+ msgstr "MPEG-4 DivX version %d"
+
++#: gst-libs/gst/pbutils/descriptions.c:834
+ msgid "Uncompressed audio"
+ msgstr "Audio non compressé"
+
++#: gst-libs/gst/pbutils/descriptions.c:840
+ #, c-format
+ msgid "Raw %d-bit %s audio"
+ msgstr "Audio brut %2$s %1$d bits"
+
++#: gst-libs/gst/pbutils/descriptions.c:942
+ msgid "Audio CD source"
+ msgstr "Source CD audio"
+
++#: gst-libs/gst/pbutils/descriptions.c:945
+ msgid "DVD source"
+ msgstr "Source DVD"
+
++#: gst-libs/gst/pbutils/descriptions.c:948
+ msgid "Real Time Streaming Protocol (RTSP) source"
+ msgstr "Source RTSP (Real Time Streaming Protocol)"
+
++#: gst-libs/gst/pbutils/descriptions.c:952
+ msgid "Microsoft Media Server (MMS) protocol source"
+ msgstr "Source protocole MMS (Microsoft Media Server)"
+
++#: gst-libs/gst/pbutils/descriptions.c:960
+ #, c-format
+ msgid "%s protocol source"
+ msgstr "Source protocole %s"
+
++#: gst-libs/gst/pbutils/descriptions.c:1034
+ #, c-format
+ msgid "%s video RTP depayloader"
+ msgstr "Depayloader RTP vidéo %s"
+
++#: gst-libs/gst/pbutils/descriptions.c:1036
+ #, c-format
+ msgid "%s audio RTP depayloader"
+ msgstr "Depayloader RTP audio %s"
+
++#: gst-libs/gst/pbutils/descriptions.c:1038
+ #, c-format
+ msgid "%s RTP depayloader"
+ msgstr "Depayloader RTP %s"
+
++#: gst-libs/gst/pbutils/descriptions.c:1045
+ #, c-format
+ msgid "%s demuxer"
+ msgstr "Démultiplexeur %s"
+
++#: gst-libs/gst/pbutils/descriptions.c:1047
+ #, c-format
+ msgid "%s decoder"
+ msgstr "Décodeur %s"
+
++#: gst-libs/gst/pbutils/descriptions.c:1086
+ #, c-format
+ msgid "%s video RTP payloader"
+ msgstr "Payloader RTP vidéo %s"
+
++#: gst-libs/gst/pbutils/descriptions.c:1088
+ #, c-format
+ msgid "%s audio RTP payloader"
+ msgstr "Payloader RTP audio %s"
+
++#: gst-libs/gst/pbutils/descriptions.c:1090
+ #, c-format
+ msgid "%s RTP payloader"
+ msgstr "Payloader RTP %s"
+
++#: gst-libs/gst/pbutils/descriptions.c:1097
+ #, c-format
+ msgid "%s muxer"
+ msgstr "Multiplexeur %s"
+
++#: gst-libs/gst/pbutils/descriptions.c:1099
+ #, c-format
+ msgid "%s encoder"
+ msgstr "Encodeur %s"
+
++#: gst-libs/gst/pbutils/descriptions.c:1133
+ #, c-format
+ msgid "GStreamer element %s"
+ msgstr "Élément GStreamer %s"
+
++#: gst-libs/gst/pbutils/missing-plugins.c:558
+ msgid "Unknown source element"
+ msgstr "Élément source inconnu"
+
++#: gst-libs/gst/pbutils/missing-plugins.c:561
+ msgid "Unknown sink element"
+ msgstr "Élément d’entrée inconnu"
+
++#: gst-libs/gst/pbutils/missing-plugins.c:564
+ msgid "Unknown element"
+ msgstr "Élément inconnu"
+
++#: gst-libs/gst/pbutils/missing-plugins.c:567
+ msgid "Unknown decoder element"
+ msgstr "Élément décodeur inconnu"
+
++#: gst-libs/gst/pbutils/missing-plugins.c:570
+ msgid "Unknown encoder element"
+ msgstr "Élément codeur inconnu"
+
++#: gst-libs/gst/pbutils/missing-plugins.c:575
+ msgid "Plugin or element of unknown type"
+ msgstr "Greffon ou élément de type inconnu"
+
++#: gst-libs/gst/tag/gsttagdemux.c:1243
+ msgid "Failed to read tag: not enough data"
+ msgstr "Échec de lecture de l’étiquette : données insuffisantes"
+
++#: gst-libs/gst/tag/tags.c:77
+ msgid "track ID"
+ msgstr "identifiant de piste"
+
++#: gst-libs/gst/tag/tags.c:77
+ msgid "MusicBrainz track ID"
+ msgstr "identifiant de piste MusicBrainz"
+
++#: gst-libs/gst/tag/tags.c:79
+ msgid "artist ID"
+ msgstr "identifiant d’artiste"
+
++#: gst-libs/gst/tag/tags.c:79
+ msgid "MusicBrainz artist ID"
+ msgstr "identifiant d’artiste MusicBrainz"
+
++#: gst-libs/gst/tag/tags.c:81
+ msgid "album ID"
+ msgstr "identifiant d’album"
+
++#: gst-libs/gst/tag/tags.c:81
+ msgid "MusicBrainz album ID"
+ msgstr "identifiant d’album MusicBrainz"
+
++#: gst-libs/gst/tag/tags.c:84
+ msgid "album artist ID"
+ msgstr "identifiant d’artiste de l’album"
+
++#: gst-libs/gst/tag/tags.c:84
+ msgid "MusicBrainz album artist ID"
+ msgstr "identifiant d’artiste de l’album MusicBrainz"
+
++#: gst-libs/gst/tag/tags.c:86
+ msgid "track TRM ID"
+ msgstr "identifiant TRM de piste"
+
++#: gst-libs/gst/tag/tags.c:86
+ msgid "MusicBrainz TRM ID"
+ msgstr "identifiant TRM MusicBrainz"
+
++#: gst-libs/gst/tag/tags.c:108
+ msgid "capturing shutter speed"
+ msgstr "vitesse d’obturation de la capture"
+
++#: gst-libs/gst/tag/tags.c:109
+ msgid "Shutter speed used when capturing an image, in seconds"
+ msgstr "Vitesse d’obturation utilisée lors de la capture de l’image (en s)"
+
+ # http://www.blog-couleur.com/?Qu-est-ce-que-l-ouverture-en
++#: gst-libs/gst/tag/tags.c:112
+ msgid "capturing focal ratio"
+ msgstr "Focal de capture"
+
++#: gst-libs/gst/tag/tags.c:113
+ msgid "Focal ratio (f-number) used when capturing the image"
+ msgstr "Focal (nombre décimal) utilisée lors de la capture de l’image"
+
++#: gst-libs/gst/tag/tags.c:116
+ msgid "capturing focal length"
+ msgstr "distance focale de la capture"
+
++#: gst-libs/gst/tag/tags.c:117
+ msgid "Focal length of the lens used capturing the image, in mm"
-msgid ""
-"35 mm equivalent focal length of the lens used capturing the image, in mm"
-msgstr ""
-"Distance focale de l’optique équivalente à 35 mm, utilisée pour la capture "
-"de l’image (en mm)"
++msgstr "Distance focale de l’optique utilisée pour la capture de l’image (en mm)"
+
++#: gst-libs/gst/tag/tags.c:121
+ msgid "capturing 35 mm equivalent focal length"
+ msgstr "distance focale de la capture équivalente à 35 mm"
+
-msgstr ""
-"La direction du traitement de contraste appliqué lors de la capture d’une "
-"image"
++#: gst-libs/gst/tag/tags.c:122
++msgid "35 mm equivalent focal length of the lens used capturing the image, in mm"
++msgstr "Distance focale de l’optique équivalente à 35 mm, utilisée pour la capture de l’image (en mm)"
+
++#: gst-libs/gst/tag/tags.c:126
+ msgid "capturing digital zoom ratio"
+ msgstr "valeur du zoom numérique de la capture"
+
++#: gst-libs/gst/tag/tags.c:127
+ msgid "Digital zoom ratio used when capturing an image"
+ msgstr "Valeur du zoom numérique utilisé lors de la capture de l’image"
+
++#: gst-libs/gst/tag/tags.c:130
+ msgid "capturing iso speed"
+ msgstr "sensibilité ISO de la capture"
+
++#: gst-libs/gst/tag/tags.c:131
+ msgid "The ISO speed used when capturing an image"
+ msgstr "La sensibilité ISO utilisée lors de la capture de l’image"
+
++#: gst-libs/gst/tag/tags.c:134
+ msgid "capturing exposure program"
+ msgstr "programme d’exposition de la capture"
+
++#: gst-libs/gst/tag/tags.c:135
+ msgid "The exposure program used when capturing an image"
+ msgstr "Le programme d’exposition utilisé lors de la capture de l’image"
+
++#: gst-libs/gst/tag/tags.c:138
+ msgid "capturing exposure mode"
+ msgstr "mode d’exposition de la capture"
+
++#: gst-libs/gst/tag/tags.c:139
+ msgid "The exposure mode used when capturing an image"
+ msgstr "Le mode d'exposition utilisé lors de la capture de l’image"
+
++#: gst-libs/gst/tag/tags.c:142
+ msgid "capturing exposure compensation"
+ msgstr "compensation d’exposition de la capture"
+
++#: gst-libs/gst/tag/tags.c:143
+ msgid "The exposure compensation used when capturing an image"
+ msgstr "La compensation d'exposition utilisée lors de la capture de l’image"
+
++#: gst-libs/gst/tag/tags.c:146
+ msgid "capturing scene capture type"
+ msgstr "type de mode scène de la capture"
+
++#: gst-libs/gst/tag/tags.c:147
+ msgid "The scene capture mode used when capturing an image"
+ msgstr "Le mode scène utilisé lors de la capture de l’image"
+
++#: gst-libs/gst/tag/tags.c:150
+ msgid "capturing gain adjustment"
+ msgstr "ajustement du gain de la capture"
+
++#: gst-libs/gst/tag/tags.c:151
+ msgid "The overall gain adjustment applied on an image"
+ msgstr "L'ajustement général du gain appliqué à une image"
+
++#: gst-libs/gst/tag/tags.c:154
+ msgid "capturing white balance"
+ msgstr "balance des blancs de la capture"
+
++#: gst-libs/gst/tag/tags.c:155
+ msgid "The white balance mode set when capturing an image"
+ msgstr "Le mode de balance des blancs défini lors de la capture d’une image"
+
++#: gst-libs/gst/tag/tags.c:158
+ msgid "capturing contrast"
+ msgstr "contraste de la capture"
+
++#: gst-libs/gst/tag/tags.c:159
+ msgid "The direction of contrast processing applied when capturing an image"
-msgstr ""
-"La direction du traitement de saturation appliqué lors de la capture d’une "
-"image"
++msgstr "La direction du traitement de contraste appliqué lors de la capture d’une image"
+
++#: gst-libs/gst/tag/tags.c:163
+ msgid "capturing saturation"
+ msgstr "saturation de la capture"
+
++#: gst-libs/gst/tag/tags.c:164
+ msgid "The direction of saturation processing applied when capturing an image"
-msgstr ""
-"La direction du traitement de netteté appliqué lors de la capture d’une image"
++msgstr "La direction du traitement de saturation appliqué lors de la capture d’une image"
+
++#: gst-libs/gst/tag/tags.c:168
+ msgid "capturing sharpness"
+ msgstr "netteté de la capture"
+
++#: gst-libs/gst/tag/tags.c:169
+ msgid "The direction of sharpness processing applied when capturing an image"
-msgid ""
-"The metering mode used while determining exposure for capturing an image"
-msgstr ""
-"Le mode de mesure utilisé lors de l’évaluation de l’exposition pour la "
-"capture d’une image"
++msgstr "La direction du traitement de netteté appliqué lors de la capture d’une image"
+
++#: gst-libs/gst/tag/tags.c:173
+ msgid "capturing flash fired"
+ msgstr "flash utilisé pour la capture"
+
++#: gst-libs/gst/tag/tags.c:174
+ msgid "If the flash fired while capturing an image"
+ msgstr "Indique si le flash a été utilisé pour capturer une image"
+
++#: gst-libs/gst/tag/tags.c:177
+ msgid "capturing flash mode"
+ msgstr "mode de flash de la capture"
+
++#: gst-libs/gst/tag/tags.c:178
+ msgid "The selected flash mode while capturing an image"
+ msgstr "Le mode de flash sélectionné lors de la capture d’une image"
+
++#: gst-libs/gst/tag/tags.c:181
+ msgid "capturing metering mode"
+ msgstr "mode de mesure de la capture"
+
-msgstr ""
-"Densité de pixels horizontale annoncée par le média (image/vidéo), en points "
-"par pouce"
++#: gst-libs/gst/tag/tags.c:182
++msgid "The metering mode used while determining exposure for capturing an image"
++msgstr "Le mode de mesure utilisé lors de l’évaluation de l’exposition pour la capture d’une image"
+
++#: gst-libs/gst/tag/tags.c:186
+ msgid "capturing source"
+ msgstr "source de capture"
+
++#: gst-libs/gst/tag/tags.c:187
+ msgid "The source or type of device used for the capture"
+ msgstr "La source ou le type d’appareil utilisé pour la capture"
+
++#: gst-libs/gst/tag/tags.c:190
+ msgid "image horizontal ppi"
+ msgstr "ppp horizontal de l'image"
+
++#: gst-libs/gst/tag/tags.c:191
+ msgid "Media (image/video) intended horizontal pixel density in ppi"
-msgstr ""
-"Densité de pixels verticale annoncée par le média (image/vidéo), en points "
-"par pouce"
++msgstr "Densité de pixels horizontale annoncée par le média (image/vidéo), en points par pouce"
+
++#: gst-libs/gst/tag/tags.c:194
+ msgid "image vertical ppi"
+ msgstr "ppp vertical de l'image"
+
++#: gst-libs/gst/tag/tags.c:195
+ msgid "Media (image/video) intended vertical pixel density in ppi"
-msgid ""
-"Don't exit after showing the initial device list, but wait for devices to "
-"added/removed."
-msgstr ""
-"Ne pas quitter après l’affichage de la liste initiale des périphèriques, "
-"mais attendre l’ajout ou la suppression de périphèriques."
++msgstr "Densité de pixels verticale annoncée par le média (image/vidéo), en points par pouce"
+
++#: gst-libs/gst/tag/tags.c:198
+ msgid "ID3v2 frame"
+ msgstr "Frame ID3v2"
+
++#: gst-libs/gst/tag/tags.c:198
+ msgid "unparsed id3v2 tag frame"
+ msgstr "frame tag id3v2 non analysé"
+
++#: gst-libs/gst/tag/tags.c:202
+ msgid "musical-key"
+ msgstr "Clef-musicale"
+
++#: gst-libs/gst/tag/tags.c:202
+ msgid "Initial key in which the sound starts"
+ msgstr "Clef initiale dans laquelle démarre le son"
+
++#: tools/gst-device-monitor.c:260 tools/gst-play.c:1444
+ msgid "Print version information and exit"
+ msgstr "Affiche la version et quit"
+
-msgstr ""
-"Contrôler le comportement de lecture en modifiant la pripriété du "
-"« drapeau » playbin"
++#: tools/gst-device-monitor.c:262
++msgid "Don't exit after showing the initial device list, but wait for devices to added/removed."
++msgstr "Ne pas quitter après l’affichage de la liste initiale des périphèriques, mais attendre l’ajout ou la suppression de périphèriques."
+
++#: tools/gst-play.c:310
+ #, c-format
+ msgid "Volume: %.0f%%"
+ msgstr "Volume : %.0f%%"
+
++#: tools/gst-play.c:349
+ msgid "Buffering..."
+ msgstr "Mise en cache…"
+
++#: tools/gst-play.c:370
+ msgid "Clock lost, selecting a new one\n"
+ msgstr "Horloge perdu, sélection d’une nouvelle\n"
+
++#: tools/gst-play.c:400 tools/gst-play.c:446 tools/gst-play.c:883
++#: tools/gst-play.c:1347
+ msgid "Reached end of play list."
+ msgstr "Liste de lecture terminée."
+
++#: tools/gst-play.c:613
+ msgid "Paused"
+ msgstr "En pause"
+
++#: tools/gst-play.c:671
+ #, c-format
+ msgid "Now playing %s\n"
+ msgstr "Lecture en cours de %s\n"
+
++#: tools/gst-play.c:734
+ #, c-format
+ msgid "About to finish, preparing next title: %s"
+ msgstr "Fin de piste, préparation du titre suivant : %s"
+
++#: tools/gst-play.c:979
+ #, c-format
+ msgid "Playback rate: %.2f"
+ msgstr "Vitesse de lecture : %.2f"
+
++#: tools/gst-play.c:983
+ #, c-format
+ msgid "Could not change playback rate to %.2f"
+ msgstr "Impossible de modifier la vitesse de lecture à %.2f"
+
++#: tools/gst-play.c:1287
+ msgid "space"
+ msgstr "espace"
+
++#: tools/gst-play.c:1287
+ msgid "pause/unpause"
+ msgstr "pause / lecture"
+
++#: tools/gst-play.c:1288
+ msgid "q or ESC"
+ msgstr "q ou ECHAP"
+
++#: tools/gst-play.c:1288
+ msgid "quit"
+ msgstr "quitter"
+
++#: tools/gst-play.c:1289
+ msgid "> or n"
+ msgstr "> ou n"
+
++#: tools/gst-play.c:1289
+ msgid "play next"
+ msgstr "lire la suivante"
+
++#: tools/gst-play.c:1290
+ msgid "< or b"
+ msgstr "< ou b"
+
++#: tools/gst-play.c:1290
+ msgid "play previous"
+ msgstr "lire la précédente"
+
++#: tools/gst-play.c:1291
+ msgid "seek forward"
+ msgstr "chercher en avant"
+
++#: tools/gst-play.c:1292
+ msgid "seek backward"
+ msgstr "chercher en arrière"
+
++#: tools/gst-play.c:1293
+ msgid "volume up"
+ msgstr "augmenter le volume"
+
++#: tools/gst-play.c:1294
+ msgid "volume down"
+ msgstr "baisser le volume"
+
++#: tools/gst-play.c:1295
+ msgid "increase playback rate"
+ msgstr "augmenter la vitesse de lecture"
+
++#: tools/gst-play.c:1296
+ msgid "decrease playback rate"
+ msgstr "baisser la vitesse de lecture"
+
++#: tools/gst-play.c:1297
+ msgid "change playback direction"
+ msgstr "modifier le sens de lecture"
+
++#: tools/gst-play.c:1298
+ msgid "enable/disable trick modes"
+ msgstr "activer / désactiver les astuces"
+
++#: tools/gst-play.c:1299
+ msgid "change audio track"
+ msgstr "modifier la piste audio"
+
++#: tools/gst-play.c:1300
+ msgid "change video track"
+ msgstr "modifier la piste vidéo"
+
++#: tools/gst-play.c:1301
+ msgid "change subtitle track"
+ msgstr "modifier la piste de sous-titre"
+
++#: tools/gst-play.c:1302
+ msgid "seek to beginning"
+ msgstr "chercher depuis le début"
+
++#: tools/gst-play.c:1303
+ msgid "show keyboard shortcuts"
+ msgstr "afficher les raccourcis clavier"
+
++#: tools/gst-play.c:1306
+ msgid "Interactive mode - keyboard controls:"
+ msgstr "Mode interactif — commandes clavier :"
+
++#: tools/gst-play.c:1439
+ msgid "Output status information and property notifications"
+ msgstr "Afficher des informations sur l’état et les notifications de propriété"
+
++#: tools/gst-play.c:1441
+ msgid "Control playback behaviour setting playbin 'flags' property"
-msgstr ""
++msgstr "Contrôler le comportement de lecture en modifiant la pripriété du « drapeau » playbin"
+
++#: tools/gst-play.c:1446
+ msgid "Video sink to use (default is autovideosink)"
+ msgstr "Récepteur vidéo à utiliser (autovideosink par défaut)"
+
++#: tools/gst-play.c:1448
+ msgid "Audio sink to use (default is autoaudiosink)"
+ msgstr "Récepteur audio à utiliser (autoaudiosink par défaut)"
+
++#: tools/gst-play.c:1450
+ msgid "Enable gapless playback"
+ msgstr "Activer la lecture sans blanc"
+
++#: tools/gst-play.c:1452
+ msgid "Shuffle playlist"
+ msgstr "Mélanger les morceaux"
+
++#: tools/gst-play.c:1455
+ msgid "Disable interactive control via the keyboard"
+ msgstr "Désactiver les commandes interactives au clavier"
+
++#: tools/gst-play.c:1457
+ msgid "Volume"
+ msgstr "Volume"
+
++#: tools/gst-play.c:1459
+ msgid "Playlist file containing input media files"
+ msgstr "Fichier de lecture contenant des fichiers de media d’entrée"
+
++#: tools/gst-play.c:1461
+ msgid "Do not print any output (apart from errors)"
+ msgstr "Ne pas afficher de message en sortie (sauf les erreurs)"
+
++#: tools/gst-play.c:1463
+ msgid "Use playbin3 pipeline"
-msgstr ""
++msgstr "Utiliser un pipeline playbin3"
+
++#: tools/gst-play.c:1464
+ msgid "(default varies depending on 'USE_PLAYBIN' env variable)"
-
-#~ msgid "Could not create \"decodebin3\" element."
-#~ msgstr "Impossible de créer un élément « decodebin3 »."
-
-#~ msgid "Could not create \"urisourcebin\" element."
-#~ msgstr "Impossible de créer un élément « urisourcebin »."
-
-#~ msgid "Internal data stream error."
-#~ msgstr "Erreur du flux de données interne."
++msgstr "(la valeur par défaut dépend de la variable d'environnement USE_PLAYBIN)"
+
++#: tools/gst-play.c:1468
++msgid "Keep showing the last frame on EOS until quit or playlist change command (gapless is ignored)"
++msgstr "Continuer à afficher la dernière frame après la fin de lecture jusqu'à la fin de programme ou une nouvelle commande de lecture (la lecture sans blanc est ignorée)"
++
++#: tools/gst-play.c:1546
+ #, c-format
+ msgid "Usage: %s FILE1|URI1 [FILE2|URI2] [FILE3|URI3] ..."
+ msgstr "Usage : %s FICHIER1|URI1 [FICHIER2|URI2] [FICHIER3|URI3] …"
+
++#: tools/gst-play.c:1550
+ msgid "You must provide at least one filename or URI to play."
+ msgstr "Vous devez fournir au moins un nom de fichier ou une URI à lire."
+
++#: tools/gst-play.c:1590
+ msgid "Press 'k' to see a list of keyboard shortcuts.\n"
+ msgstr "Pressez k pour voir la liste des raccourcis clavier.\n"
--- /dev/null
-"POT-Creation-Date: 2019-02-26 11:43+0000\n"
-"PO-Revision-Date: 2019-03-03 13:33-0800\n"
+ # Translation of gst-plugins-base messages to Croatian.
+ # This file is put in the public domain.
+ # Copyright (C) 2004-2010, 2019 GStreamer core team.
+ # This file is distributed under the same license as the gst-plugins-base package.
+ #
+ # Tomislav Krznar <tomislav.krznar@gmail.com>, 2012.
+ # Božidar Putanec <bozidarp@yahoo.com>, 2016, 2018, 2019.
+ msgid ""
+ msgstr ""
+ "Project-Id-Version: gst-plugins-base-1.15.1\n"
+ "Report-Msgid-Bugs-To: http://bugzilla.gnome.org/\n"
-"Plural-Forms: nplurals=3; plural=(n%10==1 && n%100!=11 ? 0 : n%10>=2 && n"
-"%10<=4 && (n%100<10 || n%100>=20) ? 1 : 2);\n"
++"POT-Creation-Date: 2019-01-17 01:50+0000\n"
++"PO-Revision-Date: 2019-05-31 17:42-0700\n"
+ "Last-Translator: Božidar Putanec <bozidarp@yahoo.com>\n"
+ "Language-Team: Croatian <lokalizacija@linux.hr>\n"
+ "Language: hr\n"
+ "MIME-Version: 1.0\n"
+ "Content-Type: text/plain; charset=UTF-8\n"
+ "Content-Transfer-Encoding: 8bit\n"
+ "X-Bugs: Report translation errors to the Language-Team address.\n"
-msgstr "Uređaj za reprodukciju nije moguće otvoriti u mono mȏdu."
++"Plural-Forms: nplurals=3; plural=(n%10==1 && n%100!=11 ? 0 : n%10>=2 && n%10<=4 && (n%100<10 || n%100>=20) ? 1 : 2);\n"
+ "X-Generator: Poedit 2.2.1\n"
+
++#: ext/alsa/gstalsasink.c:579
+ msgid "Could not open device for playback in mono mode."
-msgstr "Uređaj za reprodukciju nije moguće otvoriti u stereo mȏdu."
++msgstr "Uređaj za reprodukciju nije moguće otvoriti u mono načinu."
+
++#: ext/alsa/gstalsasink.c:581
+ msgid "Could not open device for playback in stereo mode."
-msgstr "Uređaj za reprodukciju nije moguće otvoriti u %d-kanalnom mȏdu."
++msgstr "Uređaj za reprodukciju nije moguće otvoriti u stereo načinu."
+
++#: ext/alsa/gstalsasink.c:585
+ #, c-format
+ msgid "Could not open device for playback in %d-channel mode."
-msgid ""
-"Could not open audio device for playback. Device is being used by another "
-"application."
-msgstr ""
-"Audiouređaj nije moguće otvoriti za reprodukciju jer ga koristi neka druga "
-"aplikacija."
++msgstr "Uređaj za reprodukciju nije moguće otvoriti u %d-kanalnom načinu."
+
-msgstr "Uređaj za snimanje nije moguće otvoriti u mono mȏdu."
++#: ext/alsa/gstalsasink.c:858
++msgid "Could not open audio device for playback. Device is being used by another application."
++msgstr "Audiouređaj nije moguće otvoriti za reprodukciju jer ga koristi neka druga aplikacija."
+
++#: ext/alsa/gstalsasink.c:863
+ msgid "Could not open audio device for playback."
+ msgstr "Audiouređaj nije moguće otvoriti za reprodukciju."
+
++#: ext/alsa/gstalsasink.c:1076
+ msgid "Error outputting to audio device. The device has been disconnected."
+ msgstr "Greška na ulazu audiouređaja. Uređaj nije spojen."
+
++#: ext/alsa/gstalsasrc.c:444
+ msgid "Could not open device for recording in mono mode."
-msgstr "Uređaj za snimanje nije moguće otvoriti u stereo mȏdu."
++msgstr "Uređaj za snimanje nije moguće otvoriti u mono načinu."
+
++#: ext/alsa/gstalsasrc.c:446
+ msgid "Could not open device for recording in stereo mode."
-msgstr "Uređaj za snimanje nije moguće otvoriti u %d-kanalnom mȏdu"
++msgstr "Uređaj za snimanje nije moguće otvoriti u stereo načinu."
+
++#: ext/alsa/gstalsasrc.c:450
+ #, c-format
+ msgid "Could not open device for recording in %d-channel mode"
-msgid ""
-"Could not open audio device for recording. Device is being used by another "
-"application."
-msgstr ""
-"Uređaj nije moguće otvoriti za snimanje. Uređaj trenutačno koristi neka "
-"druga aplikacija."
++msgstr "Uređaj za snimanje nije moguće otvoriti u %d-kanalnom načinu"
+
-msgstr ""
-"Nije moguće reproducirati tekstualnu datoteku bez videa ili vizualizacije."
++#: ext/alsa/gstalsasrc.c:741
++msgid "Could not open audio device for recording. Device is being used by another application."
++msgstr "Uređaj nije moguće otvoriti za snimanje. Uređaj trenutačno koristi neka druga aplikacija."
+
++#: ext/alsa/gstalsasrc.c:746
+ msgid "Could not open audio device for recording."
+ msgstr "Audiouređaj nije moguće otvoriti za snimanje."
+
++#: ext/alsa/gstalsasrc.c:992
+ msgid "Error recording from audio device. The device has been disconnected."
+ msgstr "Greška pri snimanju iz audiouređaja. Uređaj nije spojen."
+
++#: ext/cdparanoia/gstcdparanoiasrc.c:277 ext/cdparanoia/gstcdparanoiasrc.c:283
+ msgid "Could not open CD device for reading."
+ msgstr "CD uređaj nije moguće otvoriti za čitanje."
+
++#: ext/cdparanoia/gstcdparanoiasrc.c:409
+ msgid "Could not seek CD."
+ msgstr "Nije moguće pozicioniranje (skočiti na poziciju) na CD-u."
+
++#: ext/cdparanoia/gstcdparanoiasrc.c:417
+ msgid "Could not read CD."
+ msgstr "CD nije moguće čitati."
+
++#: ext/gl/gstgltestsrc.c:491
+ msgid "failed to draw pattern"
+ msgstr "nije uspjelo iscrtati uzorak"
+
++#: ext/gl/gstgltestsrc.c:492
+ msgid "A GL error occurred"
+ msgstr "Dogodila se GL greška"
+
++#: ext/gl/gstgltestsrc.c:498
+ msgid "format wasn't negotiated before get function"
+ msgstr "format nije dogovoren prije pozivanja funkcije"
+
++#: gst/encoding/gstencodebin.c:1636 gst/playback/gstplaybin2.c:3399
++#: gst/playback/gstplaysink.c:1522 gst/playback/gstplaysink.c:1535
++#: gst/playback/gstplaysink.c:1872 gst/playback/gstplaysink.c:1904
++#: gst/playback/gstplaysink.c:2488 gst/playback/gstplaysink.c:2537
++#: gst/playback/gstplaysink.c:2552 gst/playback/gstplaysink.c:2577
++#: gst/playback/gstplaysink.c:2609 gst/playback/gstplaysink.c:2757
++#: gst/playback/gstplaysink.c:2788 gst/playback/gstplaysink.c:3166
++#: gst/playback/gstplaysink.c:3175 gst/playback/gstplaysink.c:3184
++#: gst/playback/gstplaysink.c:3193 gst/playback/gstplaysink.c:3606
++#: gst/playback/gstplaysink.c:4526 gst/playback/gstplaysinkconvertbin.c:97
++#: gst/playback/gstplaysinkconvertbin.c:117
++#: gst/playback/gsturidecodebin.c:1472 gst/playback/gsturisourcebin.c:1596
+ #, c-format
+ msgid "Missing element '%s' - check your GStreamer installation."
+ msgstr "Nema elementa „%s“ -- provjerite vašu GStreamer instalaciju."
+
++#: gst/playback/gstdecodebin2.c:1873 gst/playback/gstparsebin.c:1553
+ msgid "Could not determine type of stream"
+ msgstr "Nije moguće odrediti vrstu protoka"
+
++#: gst/playback/gstdecodebin2.c:2851 gst/playback/gstparsebin.c:2385
+ msgid "This appears to be a text file"
+ msgstr "Ovo izgleda kao tekstualna datoteka"
+
++#: gst/playback/gstplaybin2.c:5452
+ msgid "Could not create \"uridecodebin\" element."
+ msgstr "Nije moguće stvoriti element „uridecodebin“."
+
++#: gst/playback/gstplaybin3.c:4558
+ msgid "Could not create \"uridecodebin3\" element."
+ msgstr "Nije moguće stvoriti element „uridecodebin3“."
+
++#: gst/playback/gstplaysink.c:2003
+ #, c-format
+ msgid "Both autovideosink and %s elements are missing."
+ msgstr "Nema ni elementa autovideosink ni elementa %s."
+
++#: gst/playback/gstplaysink.c:2007
+ msgid "The autovideosink element is missing."
+ msgstr "Nema elementa autovideosink."
+
++#: gst/playback/gstplaysink.c:2012
+ #, c-format
+ msgid "Configured videosink %s is not working."
+ msgstr "Konfigurirani videosink %s ne radi."
+
++#: gst/playback/gstplaysink.c:2016
+ #, c-format
+ msgid "Both autovideosink and %s elements are not working."
+ msgstr "Ne radi ni element autovideosink ni element %s."
+
++#: gst/playback/gstplaysink.c:2020
+ msgid "The autovideosink element is not working."
+ msgstr "Element autovideosink ne radi."
+
++#: gst/playback/gstplaysink.c:2525
+ msgid "Custom text sink element is not usable."
+ msgstr "Prilagođeni tekstualni element-ponor nije upotrebljivi."
+
++#: gst/playback/gstplaysink.c:2903
+ msgid "No volume control found"
+ msgstr "Nema regulatora glasnoće"
+
++#: gst/playback/gstplaysink.c:2933
+ #, c-format
+ msgid "Both autoaudiosink and %s elements are missing."
+ msgstr "Nema ni elementa autoaudiosink ni elementa %s."
+
++#: gst/playback/gstplaysink.c:2937
+ msgid "The autoaudiosink element is missing."
+ msgstr "Nema elementa autoaudiosink."
+
++#: gst/playback/gstplaysink.c:2942
+ #, c-format
+ msgid "Configured audiosink %s is not working."
+ msgstr "Konfigurirani audiosink %s ne radi."
+
++#: gst/playback/gstplaysink.c:2946
+ #, c-format
+ msgid "Both autoaudiosink and %s elements are not working."
+ msgstr "Ne radi ni element autoaudiosink ni element %s."
+
++#: gst/playback/gstplaysink.c:2950
+ msgid "The autoaudiosink element is not working."
+ msgstr "Element autoaudiosink ne radi."
+
++#: gst/playback/gstplaysink.c:3269 gst/playback/gstplaysink.c:3274
+ msgid "Can't play a text file without video or visualizations."
-#, fuzzy
++msgstr "Nije moguće reproducirati tekstualnu datoteku bez videa ili vizualizacije."
+
++#: gst/playback/gsturidecodebin.c:921
+ #, c-format
+ msgid "No decoder available for type '%s'."
+ msgstr "Dekoder za vrstu „%s“ nije na raspolaganju."
+
++#: gst/playback/gsturidecodebin.c:1378 gst/playback/gsturisourcebin.c:1490
+ msgid "No URI specified to play from."
+ msgstr "Nije naveden URI izvor za reprodukciju."
+
++#: gst/playback/gsturidecodebin.c:1384 gst/playback/gsturisourcebin.c:1496
+ #, c-format
+ msgid "Invalid URI \"%s\"."
+ msgstr "URI adresa „%s“ nije valjana."
+
++#: gst/playback/gsturidecodebin.c:1391 gst/playback/gsturisourcebin.c:1503
+ msgid "This stream type cannot be played yet."
+ msgstr "Ova vrsta protoka još se ne može reproducirati."
+
++#: gst/playback/gsturidecodebin.c:1409 gst/playback/gsturisourcebin.c:1521
+ #, c-format
+ msgid "No URI handler implemented for \"%s\"."
+ msgstr "Nijedan URI rukovatelj nije realiziran za „%s“."
+
++#: gst/playback/gsturidecodebin.c:2280 gst/playback/gsturisourcebin.c:2227
+ msgid "Source element is invalid."
+ msgstr "Element izvor nije valjan."
+
++#: gst/tcp/gsttcpclientsink.c:216
+ #, c-format
+ msgid "Error while sending data to \"%s:%d\"."
+ msgstr "Greška pri slanju podataka u „%s:%d“."
+
++#: gst-libs/gst/audio/gstaudiobasesrc.c:842
+ msgid "Can't record audio fast enough"
+ msgstr "Audio se ne može dovoljno brzo snimati"
+
++#: gst-libs/gst/audio/gstaudiocdsrc.c:1625
+ msgid "This CD has no audio tracks"
+ msgstr "Ovaj CD nema audio zapisa"
+
++#: gst-libs/gst/pbutils/descriptions.c:89
+ msgid "ID3 tag"
+ msgstr "ID3 tag"
+
++#: gst-libs/gst/pbutils/descriptions.c:90
++#: gst-libs/gst/pbutils/descriptions.c:91
+ msgid "APE tag"
+ msgstr "APE tag"
+
++#: gst-libs/gst/pbutils/descriptions.c:92
+ msgid "ICY internet radio"
+ msgstr "ICY internetski radio"
+
++#: gst-libs/gst/pbutils/descriptions.c:127
+ msgid "Apple Lossless Audio (ALAC)"
+ msgstr "Apple Lossless Audio (ALAC)"
+
++#: gst-libs/gst/pbutils/descriptions.c:143
+ msgid "Free Lossless Audio Codec (FLAC)"
+ msgstr "Free Lossless Audio Codec (FLAC)"
+
++#: gst-libs/gst/pbutils/descriptions.c:173
++#: gst-libs/gst/pbutils/descriptions.c:174
+ msgid "Lossless True Audio (TTA)"
+ msgstr "Lossless True Audio (TTA)"
+
++#: gst-libs/gst/pbutils/descriptions.c:182
+ msgid "Windows Media Speech"
+ msgstr "Windows Media Speech"
+
++#: gst-libs/gst/pbutils/descriptions.c:198
+ msgid "CYUV Lossless"
+ msgstr "CYUV Lossless"
+
++#: gst-libs/gst/pbutils/descriptions.c:202
+ msgid "FFMpeg v1"
+ msgstr "FFMpeg v1"
+
++#: gst-libs/gst/pbutils/descriptions.c:216
+ msgid "Lossless MSZH"
+ msgstr "Lossless MSZH"
+
++#: gst-libs/gst/pbutils/descriptions.c:227
+ msgid "Run-length encoding"
+ msgstr "RLE (Run-length encoding)"
+
+ # https://en.wikipedia.org/wiki/Timed_text
++#: gst-libs/gst/pbutils/descriptions.c:273
+ msgid "Timed Text"
+ msgstr "Timed Text (vremenski napasani tekst)"
+
++#: gst-libs/gst/pbutils/descriptions.c:277
+ msgid "Subtitle"
+ msgstr "Podnaslov"
+
++#: gst-libs/gst/pbutils/descriptions.c:278
+ msgid "MPL2 subtitle format"
+ msgstr "Format podnaslova MPL2"
+
++#: gst-libs/gst/pbutils/descriptions.c:279
+ msgid "DKS subtitle format"
+ msgstr "Format podnaslova DKS"
+
++#: gst-libs/gst/pbutils/descriptions.c:280
+ msgid "QTtext subtitle format"
+ msgstr "Format podnaslova QText"
+
++#: gst-libs/gst/pbutils/descriptions.c:281
+ msgid "Sami subtitle format"
+ msgstr "Format podnaslova Sami"
+
++#: gst-libs/gst/pbutils/descriptions.c:282
+ msgid "TMPlayer subtitle format"
+ msgstr "Format podnaslova TMPlayer"
+
++#: gst-libs/gst/pbutils/descriptions.c:286
+ msgid "CEA 608 Closed Caption"
+ msgstr "CEA 608 Closed Caption (titlovanje)"
+
++#: gst-libs/gst/pbutils/descriptions.c:287
+ msgid "CEA 708 Closed Caption"
+ msgstr "CEA 708 Closed Caption (titlovanje)"
+
++#: gst-libs/gst/pbutils/descriptions.c:288
+ msgid "Kate subtitle format"
+ msgstr "Format naslova Kate"
+
++#: gst-libs/gst/pbutils/descriptions.c:289
+ msgid "WebVTT subtitle format"
+ msgstr "Format naslova WebVTT"
+
++#: gst-libs/gst/pbutils/descriptions.c:454
++#: gst-libs/gst/pbutils/descriptions.c:457
++#: gst-libs/gst/pbutils/descriptions.c:507
+ msgid "Uncompressed video"
+ msgstr "Nekomprimirani video"
+
++#: gst-libs/gst/pbutils/descriptions.c:462
+ msgid "Uncompressed gray"
+ msgstr "Nekomprimirano sivilo"
+
++#: gst-libs/gst/pbutils/descriptions.c:485
+ #, c-format
+ msgid "Uncompressed packed YUV %s"
+ msgstr "Nekomprimirani pakirani YUV %s"
+
++#: gst-libs/gst/pbutils/descriptions.c:487
+ #, c-format
+ msgid "Uncompressed semi-planar YUV %s"
+ msgstr "Nekomprimirani polu-planarni YUV %s"
+
++#: gst-libs/gst/pbutils/descriptions.c:489
+ #, c-format
+ msgid "Uncompressed planar YUV %s"
+ msgstr "Nekomprimirani planarni YUV %s"
+
++#: gst-libs/gst/pbutils/descriptions.c:500
+ #, c-format
+ msgid "Uncompressed palettized %d-bit %s"
+ msgstr "Nekomprimirani paletizirani %d-bitni %s"
+
++#: gst-libs/gst/pbutils/descriptions.c:503
+ #, c-format
+ msgid "Uncompressed %d-bit %s"
+ msgstr "Nekomprimirani %d-bitni %s"
+
++#: gst-libs/gst/pbutils/descriptions.c:585
+ #, c-format
+ msgid "DivX MPEG-4 Version %d"
+ msgstr "DivX MPEG-4 inačica %d"
+
++#: gst-libs/gst/pbutils/descriptions.c:833
+ msgid "Uncompressed audio"
+ msgstr "Nekomprimirani audio"
+
++#: gst-libs/gst/pbutils/descriptions.c:839
+ #, c-format
+ msgid "Raw %d-bit %s audio"
+ msgstr "Sirovi (neobrađeni) %d-bitni %s audio"
+
++#: gst-libs/gst/pbutils/descriptions.c:941
+ msgid "Audio CD source"
+ msgstr "Izvor je CD audio"
+
++#: gst-libs/gst/pbutils/descriptions.c:944
+ msgid "DVD source"
+ msgstr "Izvor je DVD"
+
++#: gst-libs/gst/pbutils/descriptions.c:947
+ msgid "Real Time Streaming Protocol (RTSP) source"
+ msgstr "Izvor je Real Time Streaming Protocol (RTSP)"
+
++#: gst-libs/gst/pbutils/descriptions.c:951
+ msgid "Microsoft Media Server (MMS) protocol source"
+ msgstr "Izvor je Microsoft Media Server (MMS)"
+
++#: gst-libs/gst/pbutils/descriptions.c:959
+ #, c-format
+ msgid "%s protocol source"
+ msgstr "Protokol izvora je %s"
+
+ # https://en.wikipedia.org/wiki/Payload_(computing)
+ # http://www.rfc-editor.org/rfc/rfc3984.txt
++#: gst-libs/gst/pbutils/descriptions.c:1033
+ #, c-format
+ msgid "%s video RTP depayloader"
+ msgstr "Depayloader RTP %s video"
+
++#: gst-libs/gst/pbutils/descriptions.c:1035
+ #, c-format
+ msgid "%s audio RTP depayloader"
+ msgstr "Depayloader RTP %s audio"
+
++#: gst-libs/gst/pbutils/descriptions.c:1037
+ #, c-format
+ msgid "%s RTP depayloader"
+ msgstr "Depayloader RTP %s"
+
++#: gst-libs/gst/pbutils/descriptions.c:1044
+ #, c-format
+ msgid "%s demuxer"
+ msgstr "Demultipleksor %s (demuxer)"
+
++#: gst-libs/gst/pbutils/descriptions.c:1046
+ #, c-format
+ msgid "%s decoder"
+ msgstr "Dekoder %s"
+
++#: gst-libs/gst/pbutils/descriptions.c:1085
+ #, c-format
+ msgid "%s video RTP payloader"
+ msgstr "Payloader RTP %s video"
+
++#: gst-libs/gst/pbutils/descriptions.c:1087
+ #, c-format
+ msgid "%s audio RTP payloader"
+ msgstr "%s audio RTP payloader (RPT-utovarivač audia)"
+
++#: gst-libs/gst/pbutils/descriptions.c:1089
+ #, c-format
+ msgid "%s RTP payloader"
+ msgstr "Payloader RTP %s audio"
+
++#: gst-libs/gst/pbutils/descriptions.c:1096
+ #, c-format
+ msgid "%s muxer"
+ msgstr "Multipleksor %s (muxer)"
+
++#: gst-libs/gst/pbutils/descriptions.c:1098
+ #, c-format
+ msgid "%s encoder"
+ msgstr "Koder %s (encoder)"
+
++#: gst-libs/gst/pbutils/descriptions.c:1132
+ #, c-format
+ msgid "GStreamer element %s"
+ msgstr "Element GStreamera %s"
+
++#: gst-libs/gst/pbutils/missing-plugins.c:558
+ msgid "Unknown source element"
+ msgstr "Nepoznati element-izvor"
+
-msgstr "Nepoznati element"
++#: gst-libs/gst/pbutils/missing-plugins.c:561
+ msgid "Unknown sink element"
-msgid ""
-"35 mm equivalent focal length of the lens used capturing the image, in mm"
++msgstr "Nepoznati element-ponor"
+
++#: gst-libs/gst/pbutils/missing-plugins.c:564
+ msgid "Unknown element"
+ msgstr "Nepoznati element"
+
++#: gst-libs/gst/pbutils/missing-plugins.c:567
+ msgid "Unknown decoder element"
+ msgstr "Nepoznati element dekoder"
+
++#: gst-libs/gst/pbutils/missing-plugins.c:570
+ msgid "Unknown encoder element"
+ msgstr "Nepoznati element koder"
+
++#: gst-libs/gst/pbutils/missing-plugins.c:575
+ msgid "Plugin or element of unknown type"
+ msgstr "Plugin ili element nepoznate vrste"
+
++#: gst-libs/gst/tag/gsttagdemux.c:1243
+ msgid "Failed to read tag: not enough data"
+ msgstr "Nije uspjelo pročitati tag: nema dovoljno podataka"
+
++#: gst-libs/gst/tag/tags.c:77
+ msgid "track ID"
+ msgstr "ID zapisa"
+
++#: gst-libs/gst/tag/tags.c:77
+ msgid "MusicBrainz track ID"
+ msgstr "MusicBrainz ID zapisa"
+
++#: gst-libs/gst/tag/tags.c:79
+ msgid "artist ID"
+ msgstr "ID izvođača"
+
++#: gst-libs/gst/tag/tags.c:79
+ msgid "MusicBrainz artist ID"
+ msgstr "MusicBrainz ID izvođača"
+
++#: gst-libs/gst/tag/tags.c:81
+ msgid "album ID"
+ msgstr "ID albuma"
+
++#: gst-libs/gst/tag/tags.c:81
+ msgid "MusicBrainz album ID"
+ msgstr "MusicBrainz ID albuma"
+
++#: gst-libs/gst/tag/tags.c:84
+ msgid "album artist ID"
+ msgstr "ID izvođača albuma"
+
++#: gst-libs/gst/tag/tags.c:84
+ msgid "MusicBrainz album artist ID"
+ msgstr "MusicBrainz ID izvođača albuma"
+
++#: gst-libs/gst/tag/tags.c:86
+ msgid "track TRM ID"
+ msgstr "ID TRM zapisa"
+
++#: gst-libs/gst/tag/tags.c:86
+ msgid "MusicBrainz TRM ID"
+ msgstr "MusicBrainz TRM ID"
+
++#: gst-libs/gst/tag/tags.c:108
+ msgid "capturing shutter speed"
+ msgstr "ekspozicija"
+
++#: gst-libs/gst/tag/tags.c:109
+ msgid "Shutter speed used when capturing an image, in seconds"
+ msgstr "Primijenjena ekspozicija pri snimanju slike u sekundama"
+
++#: gst-libs/gst/tag/tags.c:112
+ msgid "capturing focal ratio"
+ msgstr "otvor blende (f broj)"
+
++#: gst-libs/gst/tag/tags.c:113
+ msgid "Focal ratio (f-number) used when capturing the image"
+ msgstr "Primijenjeni otvor blende (f-broj) pri snimanju slike"
+
++#: gst-libs/gst/tag/tags.c:116
+ msgid "capturing focal length"
+ msgstr "žarišna duljina"
+
++#: gst-libs/gst/tag/tags.c:117
+ msgid "Focal length of the lens used capturing the image, in mm"
+ msgstr "Primijenjena žarišna duljina leće pri snimanju slike u mm"
+
++#: gst-libs/gst/tag/tags.c:121
+ msgid "capturing 35 mm equivalent focal length"
+ msgstr "snimanje s 35 mm ekvivalentne žarišne duljine"
+
-msgstr "mȏd ekspozicije"
++#: gst-libs/gst/tag/tags.c:122
++msgid "35 mm equivalent focal length of the lens used capturing the image, in mm"
+ msgstr "snimanje s 35 mm ekvivalentne žarišne duljine pri snimanju u mm"
+
++#: gst-libs/gst/tag/tags.c:126
+ msgid "capturing digital zoom ratio"
+ msgstr "omjer digitalnog zuma"
+
++#: gst-libs/gst/tag/tags.c:127
+ msgid "Digital zoom ratio used when capturing an image"
+ msgstr "Primijenjeni omjer digitalnog zuma pri snimanju slike"
+
++#: gst-libs/gst/tag/tags.c:130
+ msgid "capturing iso speed"
+ msgstr "osjetljivost ISO"
+
++#: gst-libs/gst/tag/tags.c:131
+ msgid "The ISO speed used when capturing an image"
+ msgstr "Primijenjena osjetljivost ISO pri snimanju slike"
+
++#: gst-libs/gst/tag/tags.c:134
+ msgid "capturing exposure program"
+ msgstr "program ekspozicije"
+
++#: gst-libs/gst/tag/tags.c:135
+ msgid "The exposure program used when capturing an image"
+ msgstr "Primijenjeni program ekspozicije pri snimanju slike"
+
++#: gst-libs/gst/tag/tags.c:138
+ msgid "capturing exposure mode"
-msgstr "Primijenjeni mȏd ekspozicije pri snimanju"
++msgstr "način ekspozicije"
+
++#: gst-libs/gst/tag/tags.c:139
+ msgid "The exposure mode used when capturing an image"
-msgstr "mȏd bljeskalice"
++msgstr "Primijenjeni način ekspozicije pri snimanju"
+
++#: gst-libs/gst/tag/tags.c:142
+ msgid "capturing exposure compensation"
+ msgstr "korektura ekspozicije"
+
++#: gst-libs/gst/tag/tags.c:143
+ msgid "The exposure compensation used when capturing an image"
+ msgstr "Primijenjena korektura ekspozicije pri snimanju"
+
++#: gst-libs/gst/tag/tags.c:146
+ msgid "capturing scene capture type"
+ msgstr "izbor motiva"
+
++#: gst-libs/gst/tag/tags.c:147
+ msgid "The scene capture mode used when capturing an image"
+ msgstr "Primijenjeni izbor motiva pri snimanju slike"
+
++#: gst-libs/gst/tag/tags.c:150
+ msgid "capturing gain adjustment"
+ msgstr "korekcija osvjetljenja"
+
++#: gst-libs/gst/tag/tags.c:151
+ msgid "The overall gain adjustment applied on an image"
+ msgstr "Ukupna korekcija osvjetljenja primijenjena na sliku"
+
++#: gst-libs/gst/tag/tags.c:154
+ msgid "capturing white balance"
+ msgstr "balans bijele"
+
++#: gst-libs/gst/tag/tags.c:155
+ msgid "The white balance mode set when capturing an image"
+ msgstr "Primijenjeni balans bijele pri snimanju slike"
+
++#: gst-libs/gst/tag/tags.c:158
+ msgid "capturing contrast"
+ msgstr "kontrast"
+
++#: gst-libs/gst/tag/tags.c:159
+ msgid "The direction of contrast processing applied when capturing an image"
+ msgstr "Postavke za obradu kontrasta primijenjene pri snimanju slike"
+
++#: gst-libs/gst/tag/tags.c:163
+ msgid "capturing saturation"
+ msgstr "zasićenje"
+
++#: gst-libs/gst/tag/tags.c:164
+ msgid "The direction of saturation processing applied when capturing an image"
+ msgstr "Postavke za obradu zasićenja primijenjene pri snimanju slike"
+
++#: gst-libs/gst/tag/tags.c:168
+ msgid "capturing sharpness"
+ msgstr "oštrina"
+
++#: gst-libs/gst/tag/tags.c:169
+ msgid "The direction of sharpness processing applied when capturing an image"
+ msgstr "Postavke za obradu oštrine primijenjene pri snimanju slike"
+
++#: gst-libs/gst/tag/tags.c:173
+ msgid "capturing flash fired"
+ msgstr "upotreba bljeskalice"
+
++#: gst-libs/gst/tag/tags.c:174
+ msgid "If the flash fired while capturing an image"
+ msgstr "Je li se bljeskalica aktivirala pri snimanju slike"
+
++#: gst-libs/gst/tag/tags.c:177
+ msgid "capturing flash mode"
-msgstr "Odabrani mȏd bljeskalice pri snimanju slike"
++msgstr "način bljeskalice"
+
++#: gst-libs/gst/tag/tags.c:178
+ msgid "The selected flash mode while capturing an image"
-msgstr "mȏd mjerenja"
++msgstr "Odabrani način bljeskalice pri snimanju slike"
+
++#: gst-libs/gst/tag/tags.c:181
+ msgid "capturing metering mode"
-msgid ""
-"The metering mode used while determining exposure for capturing an image"
-msgstr ""
-"Primijenjeni mȏd mjerenja za određivanje ekspozicije pri snimanju slike"
++msgstr "način mjerenja"
+
-msgstr ""
-"Horizontalna rezolucija medija (nositelja slike/videa) u ppi (piksel/col)"
++#: gst-libs/gst/tag/tags.c:182
++msgid "The metering mode used while determining exposure for capturing an image"
++msgstr "Primijenjeni način mjerenja za određivanje ekspozicije pri snimanju slike"
+
++#: gst-libs/gst/tag/tags.c:186
+ msgid "capturing source"
+ msgstr "izvor za snimanje"
+
++#: gst-libs/gst/tag/tags.c:187
+ msgid "The source or type of device used for the capture"
+ msgstr "Izvor ili vrsta uređaja korištena za snimanje"
+
++#: gst-libs/gst/tag/tags.c:190
+ msgid "image horizontal ppi"
+ msgstr "horizontalna rezolucija slike (ppi)"
+
++#: gst-libs/gst/tag/tags.c:191
+ msgid "Media (image/video) intended horizontal pixel density in ppi"
-msgstr ""
-"Vertikalna rezolucija medija (nositelja slike/videa) u ppi (piksel/col)"
++msgstr "Horizontalna rezolucija medija (nositelja slike/videa) u ppi (piksel/col)"
+
++#: gst-libs/gst/tag/tags.c:194
+ msgid "image vertical ppi"
+ msgstr "vertikalna rezolucija slike (ppi)"
+
++#: gst-libs/gst/tag/tags.c:195
+ msgid "Media (image/video) intended vertical pixel density in ppi"
-msgid ""
-"Don't exit after showing the initial device list, but wait for devices to "
-"added/removed."
-msgstr ""
-"Ne izlazi nakon prikaza popisa početnih uređaja već pričeka da se uređaji "
-"dodaju ili uklone."
++msgstr "Vertikalna rezolucija medija (nositelja slike/videa) u ppi (piksel/col)"
+
++#: gst-libs/gst/tag/tags.c:198
+ msgid "ID3v2 frame"
+ msgstr "Okvir ID3v2"
+
++#: gst-libs/gst/tag/tags.c:198
+ msgid "unparsed id3v2 tag frame"
+ msgstr "neobrađeni okvir taga id3v2"
+
++#: gst-libs/gst/tag/tags.c:202
+ msgid "musical-key"
+ msgstr "glazbeni ključ"
+
++#: gst-libs/gst/tag/tags.c:202
+ msgid "Initial key in which the sound starts"
+ msgstr "Početni ključ u kojem započinje audio"
+
++#: tools/gst-device-monitor.c:255 tools/gst-play.c:1442
+ msgid "Print version information and exit"
+ msgstr "ispiše inačicu ovog programa i iziđe"
+
-msgstr ""
-"omogući/onemogući „trick modes“ (simulira brzo premotavanje analognih VCR)"
++#: tools/gst-device-monitor.c:257
++msgid "Don't exit after showing the initial device list, but wait for devices to added/removed."
++msgstr "Ne izlazi nakon prikaza popisa početnih uređaja već pričeka da se uređaji dodaju ili uklone."
+
++#: tools/gst-play.c:308
+ #, c-format
+ msgid "Volume: %.0f%%"
+ msgstr "Glasnoća: %.0f%%"
+
++#: tools/gst-play.c:347
+ msgid "Buffering..."
+ msgstr "Punjenje međuspremnika..."
+
++#: tools/gst-play.c:368
+ msgid "Clock lost, selecting a new one\n"
+ msgstr "Vrijeme (sat) je izgubljeno, bira se novo\n"
+
++#: tools/gst-play.c:398 tools/gst-play.c:444 tools/gst-play.c:881
++#: tools/gst-play.c:1345
+ msgid "Reached end of play list."
+ msgstr "Dostignut je kraj popisa za reprodukciju."
+
++#: tools/gst-play.c:611
+ msgid "Paused"
+ msgstr "Pauziranje"
+
++#: tools/gst-play.c:669
+ #, c-format
+ msgid "Now playing %s\n"
+ msgstr "Upravo se reproducira %s\n"
+
++#: tools/gst-play.c:732
+ #, c-format
+ msgid "About to finish, preparing next title: %s"
+ msgstr "Bliži se kraj, priprema se novi naslov: %s"
+
++#: tools/gst-play.c:977
+ #, c-format
+ msgid "Playback rate: %.2f"
+ msgstr "Brzina reproduciranja: %.2f"
+
++#: tools/gst-play.c:981
+ #, c-format
+ msgid "Could not change playback rate to %.2f"
+ msgstr "Brzinu reprodukcije nije moguće promijeniti na %.2f"
+
++#: tools/gst-play.c:1285
+ msgid "space"
+ msgstr "tipka za razmak"
+
++#: tools/gst-play.c:1285
+ msgid "pause/unpause"
+ msgstr "stani/nastavi"
+
++#: tools/gst-play.c:1286
+ msgid "q or ESC"
+ msgstr "q ili ESC"
+
++#: tools/gst-play.c:1286
+ msgid "quit"
+ msgstr "svršetak"
+
++#: tools/gst-play.c:1287
+ msgid "> or n"
+ msgstr "> ili n"
+
++#: tools/gst-play.c:1287
+ msgid "play next"
+ msgstr "sljedeći zapis (glazba/video/...)"
+
++#: tools/gst-play.c:1288
+ msgid "< or b"
+ msgstr "< ili b"
+
++#: tools/gst-play.c:1288
+ msgid "play previous"
+ msgstr "prethodni zapis (glazba/video/...)"
+
++#: tools/gst-play.c:1289
+ msgid "seek forward"
+ msgstr "traži (brzo) poziciju prema naprijed"
+
++#: tools/gst-play.c:1290
+ msgid "seek backward"
+ msgstr "traži (brzo) poziciju prema natrag"
+
++#: tools/gst-play.c:1291
+ msgid "volume up"
+ msgstr "glasnije"
+
++#: tools/gst-play.c:1292
+ msgid "volume down"
+ msgstr "tiše"
+
++#: tools/gst-play.c:1293
+ msgid "increase playback rate"
+ msgstr "brže"
+
++#: tools/gst-play.c:1294
+ msgid "decrease playback rate"
+ msgstr "sporije"
+
++#: tools/gst-play.c:1295
+ msgid "change playback direction"
+ msgstr "promjeni smjer reprodukcije"
+
+ # A feature of digital video systems that mimics the visual feedback given during fast forward and rewind operations that were provided by analog systems such as VCRs.
++#: tools/gst-play.c:1296
+ msgid "enable/disable trick modes"
-msgstr "Interaktivni mȏd - tipkovničko upravljanje:"
++msgstr "omogući/onemogući „trick modes“ (simulira brzo premotavanje analognih VCR)"
+
++#: tools/gst-play.c:1297
+ msgid "change audio track"
+ msgstr "promijeni audio zapis"
+
++#: tools/gst-play.c:1298
+ msgid "change video track"
+ msgstr "promijeni video zapis"
+
++#: tools/gst-play.c:1299
+ msgid "change subtitle track"
+ msgstr "promijeni (zapis) podnaslov"
+
++#: tools/gst-play.c:1300
+ msgid "seek to beginning"
+ msgstr "skoči (premota) na početak"
+
++#: tools/gst-play.c:1301
+ msgid "show keyboard shortcuts"
+ msgstr "tipkovnički prečaci"
+
++#: tools/gst-play.c:1304
+ msgid "Interactive mode - keyboard controls:"
-msgstr ""
-"Upravljajte ponašanjem reprodukcije postavkama za „playbin“ via „flags“"
++msgstr "Interaktivni način - tipkovničko upravljanje:"
+
++#: tools/gst-play.c:1437
+ msgid "Output status information and property notifications"
+ msgstr "ispiše status i obavijesti o svojstvima"
+
++#: tools/gst-play.c:1439
+ msgid "Control playback behaviour setting playbin 'flags' property"
-msgstr ""
-"Datoteka s popisom za reprodukciju koja sadrži ulazne multimedijalne datoteke"
++msgstr "Upravljajte ponašanjem reprodukcije postavkama za „playbin“ via „flags“"
+
++#: tools/gst-play.c:1444
+ msgid "Video sink to use (default is autovideosink)"
+ msgstr "Videosink koji će se koristiti (zadano: autovideosink)"
+
++#: tools/gst-play.c:1446
+ msgid "Audio sink to use (default is autoaudiosink)"
+ msgstr "Audiosink koji će se koristiti (zadano: autovideosink)"
+
++#: tools/gst-play.c:1448
+ msgid "Enable gapless playback"
+ msgstr "Omogući reprodukciju bez pauza (između zapisa)"
+
++#: tools/gst-play.c:1450
+ msgid "Shuffle playlist"
+ msgstr "Izmiješati popis za reprodukciju"
+
++#: tools/gst-play.c:1453
+ msgid "Disable interactive control via the keyboard"
+ msgstr "Onemogući interaktivno upravljanje pomoću tipkovnice"
+
++#: tools/gst-play.c:1455
+ msgid "Volume"
+ msgstr "Glasnoća"
+
++#: tools/gst-play.c:1457
+ msgid "Playlist file containing input media files"
-#~ msgstr ""
-#~ "Potreban je priključak %s za reprodukciju ovog niza, ali nije instaliran."
++msgstr "Datoteka s popisom za reprodukciju koja sadrži ulazne multimedijalne datoteke"
+
++#: tools/gst-play.c:1459
+ msgid "Do not print any output (apart from errors)"
+ msgstr "Ne ispisuje nikakve izlazne informacije (osim greški)"
+
++#: tools/gst-play.c:1461
+ msgid "Use playbin3 pipeline"
+ msgstr "Rabi playbin3 cjevovod"
+
++#: tools/gst-play.c:1462
+ msgid "(default varies depending on 'USE_PLAYBIN' env variable)"
+ msgstr "(zadano: varira, ovisno o varijabli okružja „USE_PLAYBIN“)"
+
++#: tools/gst-play.c:1536
+ #, c-format
+ msgid "Usage: %s FILE1|URI1 [FILE2|URI2] [FILE3|URI3] ..."
+ msgstr "Uporaba: %s DATOTEKA1|URI1 [DATOTEKA2|URI2] [DATOTEKA3|URI3] ..."
+
++#: tools/gst-play.c:1540
+ msgid "You must provide at least one filename or URI to play."
+ msgstr "Morate navesti barem jednu datoteku ili URI adresu za reprodukciju."
+
++#: tools/gst-play.c:1580
+ msgid "Press 'k' to see a list of keyboard shortcuts.\n"
+ msgstr "Pritisnite „k“ da pogledate listu tipkovničkih prečaca.\n"
+
+ #~ msgid "Unknown ponor element"
+ #~ msgstr "Nepoznati element-ponor"
+
+ #~ msgid "Could not create \"decodebin3\" element."
+ #~ msgstr "Element „decodebin3“ nije bilo moguće napraviti."
+
+ #~ msgid "Could not create \"urisourcebin\" element."
+ #~ msgstr "Element „urisourcebin“ nije bilo moguće napraviti."
+
+ #~ msgid "Internal data stream error."
+ #~ msgstr "Interna greška toka (stream) podataka."
+
+ #~ msgid "Master"
+ #~ msgstr "Glavni"
+
+ #~ msgid "Bass"
+ #~ msgstr "Niski"
+
+ #~ msgid "Treble"
+ #~ msgstr "Visoki"
+
+ #~ msgid "PCM"
+ #~ msgstr "PCM"
+
+ #~ msgid "Synth"
+ #~ msgstr "Sintetizator"
+
+ #~ msgid "Line-in"
+ #~ msgstr "Ulazna linija"
+
+ #~ msgid "CD"
+ #~ msgstr "CD"
+
+ #~ msgid "Microphone"
+ #~ msgstr "Mikrofon"
+
+ #~ msgid "PC Speaker"
+ #~ msgstr "PC zvučnik"
+
+ #~ msgid "Playback"
+ #~ msgstr "Reprodukcija"
+
+ #~ msgid "Capture"
+ #~ msgstr "Snimanje"
+
+ #~ msgid "Could not open vfs file \"%s\" for writing: %s."
+ #~ msgstr "Ne mogu otvoriti vfs datoteku „%s” za pisanje: %s."
+
+ #~ msgid "No filename given"
+ #~ msgstr "Nije zadano ime datoteke"
+
+ #~ msgid "Could not close vfs file \"%s\"."
+ #~ msgstr "Ne mogu zatvoriti vfs datoteku „%s”."
+
+ #~ msgid "Error while writing to file \"%s\"."
+ #~ msgstr "Greška pri pisanju u datoteku „%s”."
+
+ #~ msgid "A %s plugin is required to play this stream, but not installed."
-#~ msgid ""
-#~ "Only a subtitle stream was detected. Either you are loading a subtitle "
-#~ "file or some other type of text file, or the media file was not "
-#~ "recognized."
-#~ msgstr ""
-#~ "Pronađen je samo niz titlova. Ili ste učitali datoteku titlova ili neku "
-#~ "drugu vrstu tekstualne datoteke, ili medijska datoteka nije prepoznata."
-
-#~ msgid ""
-#~ "You do not have a decoder installed to handle this file. You might need "
-#~ "to install the necessary plugins."
-#~ msgstr ""
-#~ "Nemate instaliran dekoder za upravljanje ovom datotekom. Trebate "
-#~ "instalirati potrebne priključke."
++#~ msgstr "Potreban je priključak %s za reprodukciju ovog niza, ali nije instaliran."
+
+ #~ msgid "Invalid subtitle URI \"%s\", subtitles disabled."
+ #~ msgstr "Neispravan URI titlova „%s”, titlovi su onemogućeni."
+
+ #~ msgid "RTSP streams cannot be played yet."
+ #~ msgstr "Još ne mogu reproducirati RTSP nizove."
+
++#~ msgid "Only a subtitle stream was detected. Either you are loading a subtitle file or some other type of text file, or the media file was not recognized."
++#~ msgstr "Pronađen je samo niz titlova. Ili ste učitali datoteku titlova ili neku drugu vrstu tekstualne datoteke, ili medijska datoteka nije prepoznata."
++
++#~ msgid "You do not have a decoder installed to handle this file. You might need to install the necessary plugins."
++#~ msgstr "Nemate instaliran dekoder za upravljanje ovom datotekom. Trebate instalirati potrebne priključke."
+
+ #~ msgid "This is not a media file"
+ #~ msgstr "Ovo nije medijska datoteka"
+
+ #~ msgid "A subtitle stream was detected, but no video stream."
+ #~ msgstr "Pronađen je niz titlova, ali nije video niz."
+
+ #~ msgid "Both autovideosink and xvimagesink elements are missing."
+ #~ msgstr "Nedostaju elementi autovideosink i xvimagesink."
+
+ #~ msgid "Both autoaudiosink and alsasink elements are missing."
+ #~ msgstr "Nedostaju elementi autoaudiosink i alsasink."
+
+ #~ msgid "Error while sending gdp header data to \"%s:%d\"."
+ #~ msgstr "Greška pri slanju podataka gdp zaglavlja u „%s:%d”."
+
+ #~ msgid "Error while sending gdp payload data to \"%s:%d\"."
+ #~ msgstr "Greška pri slanju podataka gdp opterećenja „%s:%d”."
+
+ #~ msgid "Connection to %s:%d refused."
+ #~ msgstr "Spajanje na %s:%d odbijeno."
+
+ #~ msgid "Uncompressed planar YVU 4:2:0"
+ #~ msgstr "Nekomprimirani ravninski YVU 4:2:0"
+
+ #~ msgid "Uncompressed packed YUV 4:1:0"
+ #~ msgstr "Nekomprimirani pakirani YUV 4:1:0"
+
+ #~ msgid "Uncompressed packed YVU 4:1:0"
+ #~ msgstr "Nekomprimirani pakirani YVU 4:1:0"
+
+ #~ msgid "Uncompressed packed YUV 4:1:1"
+ #~ msgstr "Nekomprimirani pakirani YUV 4:1:1"
+
+ #~ msgid "Uncompressed packed YUV 4:4:4"
+ #~ msgstr "Nekomprimirani pakirani YUV 4:4:4"
+
+ #~ msgid "Uncompressed planar YUV 4:2:2"
+ #~ msgstr "Nekomprimirani ravninski YUV 4:2:2"
+
+ #~ msgid "Uncompressed planar YUV 4:1:1"
+ #~ msgstr "Nekomprimirani ravninski YUV 4:1:1"
+
+ #~ msgid "Uncompressed black and white Y-plane"
+ #~ msgstr "Nekomprimirana crno-bijela Y-ravnina"
+
+ #~ msgid "Raw PCM audio"
+ #~ msgstr "Sirovi PCM zvuk"
+
+ #~ msgid "Raw %d-bit floating-point audio"
+ #~ msgstr "Sirovi %d-bitni zvuk s pomičnim zarezom"
+
+ #~ msgid "Raw floating-point audio"
+ #~ msgstr "Sirovi zvuk s pomičnim zarezom"
+
+ #~ msgid "No device specified."
+ #~ msgstr "Nije naveden uređaj."
+
+ #~ msgid "Device \"%s\" does not exist."
+ #~ msgstr "Uređaj „%s” ne postoji."
+
+ #~ msgid "Device \"%s\" is already being used."
+ #~ msgstr "Uređaj „%s” se već koristi."
+
+ #~ msgid "Could not open device \"%s\" for reading and writing."
+ #~ msgstr "Ne mogu otvoriti uređaj „%s” za čitanje i pisanje."
--- /dev/null
-# Hungarian translation of gst-plugins-base
-# Copyright (C) 2004, 2006, 2007, 2008, 2009, 2010, 2012, 2014, 2015, 2016, 2017 Free Software Foundation, Inc.
++# Hungarian translation for gst-plugins-base.
++# Copyright (C) 2004, 2006, 2007, 2008, 2009, 2010, 2012, 2014, 2015, 2016, 2017, 2019 Free Software Foundation, Inc.
+ # This file is distributed under the same license as the gst-plugins-base package.
+ #
+ # Laszlo Dvornik <dvornik@invitel.hu>, 2004.
+ # Gabor Kelemen <kelemeng@gnome.hu>, 2006, 2007, 2008, 2009, 2012, 2016.
-# Balázs Úr <urbalazs@gmail.com>, 2014, 2015, 2017.
++# Balázs Úr <ur.balazs@fsf.hu>, 2014, 2015, 2017, 2019.
+ msgid ""
+ msgstr ""
-"Project-Id-Version: gst-plugins-base 1.10.0\n"
++"Project-Id-Version: gst-plugins-base 1.16.0\n"
+ "Report-Msgid-Bugs-To: http://bugzilla.gnome.org/\n"
-"POT-Creation-Date: 2019-02-26 11:43+0000\n"
-"PO-Revision-Date: 2017-03-19 00:47+0100\n"
-"Last-Translator: Balázs Úr <urbalazs@gmail.com>\n"
++"POT-Creation-Date: 2019-04-19 00:20+0100\n"
++"PO-Revision-Date: 2019-11-23 21:59+0100\n"
++"Last-Translator: Balázs Úr <ur.balazs@fsf.hu>\n"
+ "Language-Team: Hungarian <translation-team-hu@lists.sourceforge.net>\n"
+ "Language: hu\n"
+ "MIME-Version: 1.0\n"
+ "Content-Type: text/plain; charset=UTF-8\n"
+ "Content-Transfer-Encoding: 8bit\n"
+ "X-Bugs: Report translation errors to the Language-Team address.\n"
+ "X-Rosetta-Export-Date: 2007-03-10 00:18+0000\n"
-"X-Generator: Lokalize 1.2\n"
++"X-Generator: Lokalize 19.04.3\n"
+ "Plural-Forms: nplurals=2; plural=(n != 1);\n"
+
++#: ext/alsa/gstalsasink.c:579
+ msgid "Could not open device for playback in mono mode."
-msgstr "Nem lehet lejátszásra megnyitni az eszközt mono módban."
++msgstr "Nem sikerült lejátszásra megnyitni az eszközt mono módban."
+
++#: ext/alsa/gstalsasink.c:581
+ msgid "Could not open device for playback in stereo mode."
-msgstr "Nem lehet lejátszásra megnyitni az eszközt sztereó módban."
++msgstr "Nem sikerült lejátszásra megnyitni az eszközt sztereó módban."
+
++#: ext/alsa/gstalsasink.c:585
+ #, c-format
+ msgid "Could not open device for playback in %d-channel mode."
-msgstr "Nem lehet lejátszásra megnyitni az eszközt %d csatornás módban."
++msgstr "Nem sikerült lejátszásra megnyitni az eszközt %d csatornás módban."
+
-msgid ""
-"Could not open audio device for playback. Device is being used by another "
-"application."
-msgstr ""
-"Nem lehet lejátszásra megnyitni a hangeszközt. Az eszközt másik alkalmazás "
-"használja."
++#: ext/alsa/gstalsasink.c:858
++msgid "Could not open audio device for playback. Device is being used by another application."
++msgstr "Nem sikerült lejátszásra megnyitni a hangeszközt. Az eszközt másik alkalmazás használja."
+
++#: ext/alsa/gstalsasink.c:863
+ msgid "Could not open audio device for playback."
-msgstr "Nem lehet lejátszásra megnyitni a hangeszközt."
++msgstr "Nem sikerült lejátszásra megnyitni a hangeszközt."
+
++#: ext/alsa/gstalsasink.c:1076
+ msgid "Error outputting to audio device. The device has been disconnected."
+ msgstr "Hiba a hangeszközre való kimenetküldéskor. Az eszközt leválasztották."
+
++#: ext/alsa/gstalsasrc.c:444
+ msgid "Could not open device for recording in mono mode."
-msgstr "Nem lehet felvételre megnyitni az eszközt mono módban."
++msgstr "Nem sikerült felvételre megnyitni az eszközt mono módban."
+
++#: ext/alsa/gstalsasrc.c:446
+ msgid "Could not open device for recording in stereo mode."
-msgstr "Nem lehet felvételre megnyitni az eszközt sztereó módban."
++msgstr "Nem sikerült felvételre megnyitni az eszközt sztereó módban."
+
++#: ext/alsa/gstalsasrc.c:450
+ #, c-format
+ msgid "Could not open device for recording in %d-channel mode"
-msgstr "Az eszköz nem nyitható meg felvételre %d csatornás módban."
++msgstr "Nem sikerült felvételre megnyitni az eszközt %d csatornás módban"
+
-msgid ""
-"Could not open audio device for recording. Device is being used by another "
-"application."
-msgstr ""
-"Nem lehet felvételre megnyitni a hangeszközt. Az eszközt másik alkalmazás "
-"használja."
++#: ext/alsa/gstalsasrc.c:741
++msgid "Could not open audio device for recording. Device is being used by another application."
++msgstr "Nem sikerült felvételre megnyitni a hangeszközt. Az eszközt másik alkalmazás használja."
+
++#: ext/alsa/gstalsasrc.c:746
+ msgid "Could not open audio device for recording."
-msgstr "Nem lehet felvételre megnyitni a hangeszközt."
++msgstr "Nem sikerült felvételre megnyitni a hangeszközt."
+
++#: ext/alsa/gstalsasrc.c:992
+ msgid "Error recording from audio device. The device has been disconnected."
+ msgstr "Hiba a hangeszközről való felvételkor. Az eszközt leválasztották."
+
++#: ext/cdparanoia/gstcdparanoiasrc.c:277 ext/cdparanoia/gstcdparanoiasrc.c:283
+ msgid "Could not open CD device for reading."
-msgstr "Nem lehet olvasásra megnyitni a CD-eszközt."
++msgstr "Nem sikerült olvasásra megnyitni a CD-eszközt."
+
++#: ext/cdparanoia/gstcdparanoiasrc.c:409
+ msgid "Could not seek CD."
-msgstr "Nem kereshető a CD."
++msgstr "Nem sikerült pozicionálni a CD-t."
+
++#: ext/cdparanoia/gstcdparanoiasrc.c:417
+ msgid "Could not read CD."
-msgstr "Nem olvasható a CD."
++msgstr "Nem sikerült olvasni a CD-t."
+
++#: ext/gl/gstgltestsrc.c:491
+ msgid "failed to draw pattern"
-msgstr ""
++msgstr "a minta rajzolása sikertelen"
+
++#: ext/gl/gstgltestsrc.c:492
+ msgid "A GL error occurred"
-msgstr ""
++msgstr "Egy GL hiba történt"
+
++#: ext/gl/gstgltestsrc.c:498
+ msgid "format wasn't negotiated before get function"
-msgstr ""
-
++msgstr "a formátum nem lett egyeztetve a függvény lekérése előtt"
++
++#: gst/encoding/gstencodebin.c:1636 gst/playback/gstplaybin2.c:3399
++#: gst/playback/gstplaysink.c:1522 gst/playback/gstplaysink.c:1535
++#: gst/playback/gstplaysink.c:1872 gst/playback/gstplaysink.c:1904
++#: gst/playback/gstplaysink.c:2488 gst/playback/gstplaysink.c:2537
++#: gst/playback/gstplaysink.c:2552 gst/playback/gstplaysink.c:2577
++#: gst/playback/gstplaysink.c:2609 gst/playback/gstplaysink.c:2757
++#: gst/playback/gstplaysink.c:2788 gst/playback/gstplaysink.c:3166
++#: gst/playback/gstplaysink.c:3175 gst/playback/gstplaysink.c:3184
++#: gst/playback/gstplaysink.c:3193 gst/playback/gstplaysink.c:3606
++#: gst/playback/gstplaysink.c:4526 gst/playback/gstplaysinkconvertbin.c:97
++#: gst/playback/gstplaysinkconvertbin.c:117
++#: gst/playback/gsturidecodebin.c:1472 gst/playback/gsturisourcebin.c:1597
+ #, c-format
+ msgid "Missing element '%s' - check your GStreamer installation."
+ msgstr "A(z) „%s” elem hiányzik - ellenőrizze a Gstreamer telepítését."
+
++#: gst/playback/gstdecodebin2.c:1883 gst/playback/gstparsebin.c:1553
+ msgid "Could not determine type of stream"
-msgstr "Nem határozható meg az adatfolyam típusa"
++msgstr "Nem sikerült meghatározni az adatfolyam típusát"
+
++#: gst/playback/gstdecodebin2.c:2861 gst/playback/gstparsebin.c:2385
+ msgid "This appears to be a text file"
+ msgstr "Ez egy szövegfájlnak tűnik"
+
++#: gst/playback/gstplaybin2.c:5452
+ msgid "Could not create \"uridecodebin\" element."
-msgstr "Nem hozható létre „uridecodebin” elem."
++msgstr "Nem sikerült létrehozni „uridecodebin” elemet."
+
-#, fuzzy
++#: gst/playback/gstplaybin3.c:4558
+ msgid "Could not create \"uridecodebin3\" element."
-msgstr "Nem hozható létre „uridecodebin” elem."
++msgstr "Nem sikerült létrehozni „uridecodebin3” elemet."
+
++#: gst/playback/gstplaysink.c:2003
+ #, c-format
+ msgid "Both autovideosink and %s elements are missing."
+ msgstr "Az autovideosink és a(z) %s elem is hiányzik."
+
++#: gst/playback/gstplaysink.c:2007
+ msgid "The autovideosink element is missing."
+ msgstr "Az autovideosink elem hiányzik."
+
++#: gst/playback/gstplaysink.c:2012
+ #, c-format
+ msgid "Configured videosink %s is not working."
+ msgstr "A beállított %s videosink elem nem működik."
+
++#: gst/playback/gstplaysink.c:2016
+ #, c-format
+ msgid "Both autovideosink and %s elements are not working."
+ msgstr "Sem az autovideosink, sem a(z) %s elem nem működik."
+
++#: gst/playback/gstplaysink.c:2020
+ msgid "The autovideosink element is not working."
+ msgstr "Az autovideosink elem nem működik."
+
++#: gst/playback/gstplaysink.c:2525
+ msgid "Custom text sink element is not usable."
+ msgstr "Az egyéni szövegnyelő elem nem használható."
+
++#: gst/playback/gstplaysink.c:2903
+ msgid "No volume control found"
+ msgstr "Nem található hangerőszabályzó"
+
++#: gst/playback/gstplaysink.c:2933
+ #, c-format
+ msgid "Both autoaudiosink and %s elements are missing."
+ msgstr "Az autoaudiosink és a(z) %s elem is hiányzik."
+
++#: gst/playback/gstplaysink.c:2937
+ msgid "The autoaudiosink element is missing."
+ msgstr "Az autoaudiosink elem hiányzik."
+
++#: gst/playback/gstplaysink.c:2942
+ #, c-format
+ msgid "Configured audiosink %s is not working."
+ msgstr "A beállított %s audiosink elem nem működik."
+
++#: gst/playback/gstplaysink.c:2946
+ #, c-format
+ msgid "Both autoaudiosink and %s elements are not working."
+ msgstr "Sem az autoaudiosink, sem a(z) %s elem nem működik."
+
++#: gst/playback/gstplaysink.c:2950
+ msgid "The autoaudiosink element is not working."
+ msgstr "Az autoaudiosink elem nem működik."
+
++#: gst/playback/gstplaysink.c:3269 gst/playback/gstplaysink.c:3274
+ msgid "Can't play a text file without video or visualizations."
+ msgstr "A szövegfájl nem játszható le videó vagy vizualizációk nélkül."
+
++#: gst/playback/gsturidecodebin.c:921
+ #, c-format
+ msgid "No decoder available for type '%s'."
+ msgstr "Nem érhető el dekódoló a(z) „%s” típushoz."
+
++#: gst/playback/gsturidecodebin.c:1378 gst/playback/gsturisourcebin.c:1491
+ msgid "No URI specified to play from."
+ msgstr "Nincs megadva URI a lejátszáshoz."
+
++#: gst/playback/gsturidecodebin.c:1384 gst/playback/gsturisourcebin.c:1497
+ #, c-format
+ msgid "Invalid URI \"%s\"."
+ msgstr "Érvénytelen URI: „%s”."
+
++#: gst/playback/gsturidecodebin.c:1391 gst/playback/gsturisourcebin.c:1504
+ msgid "This stream type cannot be played yet."
+ msgstr "Ez az adatfolyamtípus még nem játszható le."
+
++#: gst/playback/gsturidecodebin.c:1409 gst/playback/gsturisourcebin.c:1522
+ #, c-format
+ msgid "No URI handler implemented for \"%s\"."
+ msgstr "Nincs URI kezelő megvalósítva a következőhöz: „%s”."
+
++#: gst/playback/gsturidecodebin.c:2289 gst/playback/gsturisourcebin.c:2228
+ msgid "Source element is invalid."
+ msgstr "A forráselem érvénytelen."
+
++#: gst/tcp/gsttcpclientsink.c:216
+ #, c-format
+ msgid "Error while sending data to \"%s:%d\"."
+ msgstr "Hiba adatok küldése során a következőnek: „%s:%d”."
+
++#: gst-libs/gst/audio/gstaudiobasesrc.c:842
+ msgid "Can't record audio fast enough"
+ msgstr "Nem lehet elég gyorsan rögzíteni a hangot"
+
++#: gst-libs/gst/audio/gstaudiocdsrc.c:1625
+ msgid "This CD has no audio tracks"
+ msgstr "Ez a CD nem rendelkezik hangsávokkal"
+
++#: gst-libs/gst/pbutils/descriptions.c:89
+ msgid "ID3 tag"
+ msgstr "ID3 címke"
+
++#: gst-libs/gst/pbutils/descriptions.c:90
++#: gst-libs/gst/pbutils/descriptions.c:91
+ msgid "APE tag"
+ msgstr "APE címke"
+
++#: gst-libs/gst/pbutils/descriptions.c:92
+ msgid "ICY internet radio"
+ msgstr "ICY internetrádió"
+
++#: gst-libs/gst/pbutils/descriptions.c:127
+ msgid "Apple Lossless Audio (ALAC)"
+ msgstr "Apple veszteségmentes hang (ALAC)"
+
++#: gst-libs/gst/pbutils/descriptions.c:143
+ msgid "Free Lossless Audio Codec (FLAC)"
+ msgstr "Szabad veszteségmentes hangkodek (FLAC)"
+
++#: gst-libs/gst/pbutils/descriptions.c:173
++#: gst-libs/gst/pbutils/descriptions.c:174
+ msgid "Lossless True Audio (TTA)"
+ msgstr "Veszteségmentes valódi hang (TTA)"
+
++#: gst-libs/gst/pbutils/descriptions.c:182
+ msgid "Windows Media Speech"
+ msgstr "Windows Media beszéd"
+
++#: gst-libs/gst/pbutils/descriptions.c:199
+ msgid "CYUV Lossless"
+ msgstr "CYUV veszteségmentes"
+
++#: gst-libs/gst/pbutils/descriptions.c:203
+ msgid "FFMpeg v1"
+ msgstr "FFMpeg v1"
+
++#: gst-libs/gst/pbutils/descriptions.c:217
+ msgid "Lossless MSZH"
+ msgstr "Veszteségmentes MSZH"
+
++#: gst-libs/gst/pbutils/descriptions.c:228
+ msgid "Run-length encoding"
+ msgstr "Műsorhossz-kódolás"
+
++#: gst-libs/gst/pbutils/descriptions.c:274
+ msgid "Timed Text"
+ msgstr "Időzített szöveg"
+
++#: gst-libs/gst/pbutils/descriptions.c:278
+ msgid "Subtitle"
+ msgstr "Felirat"
+
++#: gst-libs/gst/pbutils/descriptions.c:279
+ msgid "MPL2 subtitle format"
+ msgstr "MPL2 feliratformátum"
+
++#: gst-libs/gst/pbutils/descriptions.c:280
+ msgid "DKS subtitle format"
+ msgstr "DKS feliratformátum"
+
++#: gst-libs/gst/pbutils/descriptions.c:281
+ msgid "QTtext subtitle format"
+ msgstr "QTtext feliratformátum"
+
++#: gst-libs/gst/pbutils/descriptions.c:282
+ msgid "Sami subtitle format"
+ msgstr "Sami feliratformátum"
+
++#: gst-libs/gst/pbutils/descriptions.c:283
+ msgid "TMPlayer subtitle format"
+ msgstr "TMPlayer feliratformátum"
+
++#: gst-libs/gst/pbutils/descriptions.c:287
+ msgid "CEA 608 Closed Caption"
-msgstr ""
++msgstr "CEA 608 lezárt felirat"
+
++#: gst-libs/gst/pbutils/descriptions.c:288
+ msgid "CEA 708 Closed Caption"
-msgstr ""
++msgstr "CEA 708 lezárt felirat"
+
++#: gst-libs/gst/pbutils/descriptions.c:289
+ msgid "Kate subtitle format"
+ msgstr "Kate feliratformátum"
+
++#: gst-libs/gst/pbutils/descriptions.c:290
+ msgid "WebVTT subtitle format"
+ msgstr "WebVTT feliratformátum"
+
++#: gst-libs/gst/pbutils/descriptions.c:455
++#: gst-libs/gst/pbutils/descriptions.c:458
++#: gst-libs/gst/pbutils/descriptions.c:508
+ msgid "Uncompressed video"
+ msgstr "Tömörítetlen videó"
+
++#: gst-libs/gst/pbutils/descriptions.c:463
+ msgid "Uncompressed gray"
+ msgstr "Tömörítetlen szürke"
+
++#: gst-libs/gst/pbutils/descriptions.c:486
+ #, c-format
+ msgid "Uncompressed packed YUV %s"
+ msgstr "Tömörítetlen csomagolt YUV %s"
+
++#: gst-libs/gst/pbutils/descriptions.c:488
+ #, c-format
+ msgid "Uncompressed semi-planar YUV %s"
+ msgstr "Tömörítetlen félig síkbeli YUV %s"
+
++#: gst-libs/gst/pbutils/descriptions.c:490
+ #, c-format
+ msgid "Uncompressed planar YUV %s"
+ msgstr "Tömörítetlen síkbeli YUV %s"
+
++#: gst-libs/gst/pbutils/descriptions.c:501
+ #, c-format
+ msgid "Uncompressed palettized %d-bit %s"
+ msgstr "Tömörítetlen, %d-bites palettázott %s"
+
++#: gst-libs/gst/pbutils/descriptions.c:504
+ #, c-format
+ msgid "Uncompressed %d-bit %s"
+ msgstr "Tömörítetlen, %d-bites %s"
+
++#: gst-libs/gst/pbutils/descriptions.c:586
+ #, c-format
+ msgid "DivX MPEG-4 Version %d"
+ msgstr "DivX MPEG-4 %d. verzió"
+
++#: gst-libs/gst/pbutils/descriptions.c:834
+ msgid "Uncompressed audio"
+ msgstr "Tömörítetlen hang"
+
++#: gst-libs/gst/pbutils/descriptions.c:840
+ #, c-format
+ msgid "Raw %d-bit %s audio"
+ msgstr "Nyers %d bites %s hang"
+
++#: gst-libs/gst/pbutils/descriptions.c:942
+ msgid "Audio CD source"
+ msgstr "Hang CD forrás"
+
++#: gst-libs/gst/pbutils/descriptions.c:945
+ msgid "DVD source"
+ msgstr "DVD forrás"
+
++#: gst-libs/gst/pbutils/descriptions.c:948
+ msgid "Real Time Streaming Protocol (RTSP) source"
+ msgstr "Valósidejű adatfolyam-protokoll (RTSP) forrás"
+
++#: gst-libs/gst/pbutils/descriptions.c:952
+ msgid "Microsoft Media Server (MMS) protocol source"
+ msgstr "Microsoft Media Server (MMS) protokollforrás"
+
++#: gst-libs/gst/pbutils/descriptions.c:960
+ #, c-format
+ msgid "%s protocol source"
+ msgstr "%s protokollforrás"
+
++#: gst-libs/gst/pbutils/descriptions.c:1034
+ #, c-format
+ msgid "%s video RTP depayloader"
+ msgstr "%s videó RTP dekódoló"
+
++#: gst-libs/gst/pbutils/descriptions.c:1036
+ #, c-format
+ msgid "%s audio RTP depayloader"
+ msgstr "%s hang RTP dekódoló"
+
++#: gst-libs/gst/pbutils/descriptions.c:1038
+ #, c-format
+ msgid "%s RTP depayloader"
+ msgstr "%s RTP dekódoló"
+
++#: gst-libs/gst/pbutils/descriptions.c:1045
+ #, c-format
+ msgid "%s demuxer"
+ msgstr "%s szétválasztó"
+
++#: gst-libs/gst/pbutils/descriptions.c:1047
+ #, c-format
+ msgid "%s decoder"
+ msgstr "%s dekódoló"
+
++#: gst-libs/gst/pbutils/descriptions.c:1086
+ #, c-format
+ msgid "%s video RTP payloader"
+ msgstr "%s videó RTP kódoló"
+
++#: gst-libs/gst/pbutils/descriptions.c:1088
+ #, c-format
+ msgid "%s audio RTP payloader"
+ msgstr "%s hang RTP kódoló"
+
++#: gst-libs/gst/pbutils/descriptions.c:1090
+ #, c-format
+ msgid "%s RTP payloader"
+ msgstr "%s RTP kódoló"
+
++#: gst-libs/gst/pbutils/descriptions.c:1097
+ #, c-format
+ msgid "%s muxer"
+ msgstr "%s egyesítő"
+
++#: gst-libs/gst/pbutils/descriptions.c:1099
+ #, c-format
+ msgid "%s encoder"
+ msgstr "%s kódoló"
+
++#: gst-libs/gst/pbutils/descriptions.c:1133
+ #, c-format
+ msgid "GStreamer element %s"
+ msgstr "GStreamer elem: %s"
+
++#: gst-libs/gst/pbutils/missing-plugins.c:558
+ msgid "Unknown source element"
+ msgstr "Ismeretlen forráselem"
+
++#: gst-libs/gst/pbutils/missing-plugins.c:561
+ msgid "Unknown sink element"
+ msgstr "Ismeretlen nyelőelem"
+
++#: gst-libs/gst/pbutils/missing-plugins.c:564
+ msgid "Unknown element"
+ msgstr "Ismeretlen elem"
+
++#: gst-libs/gst/pbutils/missing-plugins.c:567
+ msgid "Unknown decoder element"
+ msgstr "Ismeretlen dekódolóelem"
+
++#: gst-libs/gst/pbutils/missing-plugins.c:570
+ msgid "Unknown encoder element"
+ msgstr "Ismeretlen kódolóelem"
+
++#: gst-libs/gst/pbutils/missing-plugins.c:575
+ msgid "Plugin or element of unknown type"
+ msgstr "Ismeretlen típusú bővítmény vagy elem"
+
++#: gst-libs/gst/tag/gsttagdemux.c:1243
+ msgid "Failed to read tag: not enough data"
+ msgstr "A címke nem olvasható: nincs elég adat"
+
++#: gst-libs/gst/tag/tags.c:77
+ msgid "track ID"
+ msgstr "számazonosító"
+
++#: gst-libs/gst/tag/tags.c:77
+ msgid "MusicBrainz track ID"
+ msgstr "MusicBrainz számazonosító"
+
++#: gst-libs/gst/tag/tags.c:79
+ msgid "artist ID"
+ msgstr "előadó-azonosító"
+
++#: gst-libs/gst/tag/tags.c:79
+ msgid "MusicBrainz artist ID"
+ msgstr "MusicBrainz előadó-azonosító"
+
++#: gst-libs/gst/tag/tags.c:81
+ msgid "album ID"
+ msgstr "Albumazonosító"
+
++#: gst-libs/gst/tag/tags.c:81
+ msgid "MusicBrainz album ID"
+ msgstr "MusicBrainz albumazonosító"
+
++#: gst-libs/gst/tag/tags.c:84
+ msgid "album artist ID"
+ msgstr "Albumelőadó azonosítója"
+
++#: gst-libs/gst/tag/tags.c:84
+ msgid "MusicBrainz album artist ID"
+ msgstr "MusicBrainz albumelőadó azonosítója"
+
++#: gst-libs/gst/tag/tags.c:86
+ msgid "track TRM ID"
+ msgstr "szám TRM azonosítója"
+
++#: gst-libs/gst/tag/tags.c:86
+ msgid "MusicBrainz TRM ID"
+ msgstr "MusicBrainz szám TRM azonosítója"
+
++#: gst-libs/gst/tag/tags.c:108
+ msgid "capturing shutter speed"
+ msgstr "felvétel zársebessége"
+
++#: gst-libs/gst/tag/tags.c:109
+ msgid "Shutter speed used when capturing an image, in seconds"
+ msgstr "Kép felvételéhez használt zársebesség másodpercben"
+
++#: gst-libs/gst/tag/tags.c:112
+ msgid "capturing focal ratio"
+ msgstr "felvétel fókuszaránya"
+
++#: gst-libs/gst/tag/tags.c:113
+ msgid "Focal ratio (f-number) used when capturing the image"
+ msgstr "A kép felvételéhez használt fókuszarány (f-szám)"
+
++#: gst-libs/gst/tag/tags.c:116
+ msgid "capturing focal length"
+ msgstr "felvétel fókusztávolsága"
+
++#: gst-libs/gst/tag/tags.c:117
+ msgid "Focal length of the lens used capturing the image, in mm"
+ msgstr "A kép felvételéhez használt lencse fókusztávolsága mm-ben"
+
++#: gst-libs/gst/tag/tags.c:121
+ msgid "capturing 35 mm equivalent focal length"
+ msgstr "felvétel 35 mm egyenértékes fókusztávolsága"
+
-msgid ""
-"35 mm equivalent focal length of the lens used capturing the image, in mm"
-msgstr ""
-"A kép felvételéhez használt lencse 35 mm egyenértékes fókusztávolsága mm-ben"
++#: gst-libs/gst/tag/tags.c:122
++msgid "35 mm equivalent focal length of the lens used capturing the image, in mm"
++msgstr "A kép felvételéhez használt lencse 35 mm egyenértékes fókusztávolsága mm-ben"
+
++#: gst-libs/gst/tag/tags.c:126
+ msgid "capturing digital zoom ratio"
+ msgstr "felvétel digitális nagyítási aránya"
+
++#: gst-libs/gst/tag/tags.c:127
+ msgid "Digital zoom ratio used when capturing an image"
+ msgstr "A kép felvételéhez használt digitális nagyítási arány"
+
++#: gst-libs/gst/tag/tags.c:130
+ msgid "capturing iso speed"
+ msgstr "felvétel ISO sebessége"
+
++#: gst-libs/gst/tag/tags.c:131
+ msgid "The ISO speed used when capturing an image"
+ msgstr "A kép felvételéhez használt ISO sebesség"
+
++#: gst-libs/gst/tag/tags.c:134
+ msgid "capturing exposure program"
+ msgstr "felvétel expozíciós programja"
+
++#: gst-libs/gst/tag/tags.c:135
+ msgid "The exposure program used when capturing an image"
+ msgstr "A kép felvételéhez használt expozíciós program"
+
++#: gst-libs/gst/tag/tags.c:138
+ msgid "capturing exposure mode"
+ msgstr "felvétel expozíciós módja"
+
++#: gst-libs/gst/tag/tags.c:139
+ msgid "The exposure mode used when capturing an image"
+ msgstr "A kép felvételéhez használt expozíciós mód"
+
++#: gst-libs/gst/tag/tags.c:142
+ msgid "capturing exposure compensation"
+ msgstr "felvétel expozíciós kompenzációja"
+
++#: gst-libs/gst/tag/tags.c:143
+ msgid "The exposure compensation used when capturing an image"
+ msgstr "A kép felvételéhez használt expozíciós kompenzáció"
+
++#: gst-libs/gst/tag/tags.c:146
+ msgid "capturing scene capture type"
+ msgstr "felvétel helyszínfelvételi módja"
+
++#: gst-libs/gst/tag/tags.c:147
+ msgid "The scene capture mode used when capturing an image"
+ msgstr "A kép felvételéhez használt helyszínfelvételi mód"
+
++#: gst-libs/gst/tag/tags.c:150
+ msgid "capturing gain adjustment"
+ msgstr "felvétel erősítésmódosítása"
+
++#: gst-libs/gst/tag/tags.c:151
+ msgid "The overall gain adjustment applied on an image"
+ msgstr "A képre alkalmazott általános erősítésmódosítás"
+
++#: gst-libs/gst/tag/tags.c:154
+ msgid "capturing white balance"
+ msgstr "felvétel fehéregyensúlya"
+
++#: gst-libs/gst/tag/tags.c:155
+ msgid "The white balance mode set when capturing an image"
+ msgstr "A kép felvételéhez használt fehéregyensúlymód"
+
++#: gst-libs/gst/tag/tags.c:158
+ msgid "capturing contrast"
+ msgstr "felvétel kontrasztja"
+
++#: gst-libs/gst/tag/tags.c:159
+ msgid "The direction of contrast processing applied when capturing an image"
+ msgstr "A kép felvételekor használt kontrasztfeldolgozás iránya"
+
++#: gst-libs/gst/tag/tags.c:163
+ msgid "capturing saturation"
+ msgstr "felvétel telítettsége"
+
++#: gst-libs/gst/tag/tags.c:164
+ msgid "The direction of saturation processing applied when capturing an image"
+ msgstr "A kép felvételekor alkalmazott telítettségfeldolgozás iránya"
+
++#: gst-libs/gst/tag/tags.c:168
+ msgid "capturing sharpness"
+ msgstr "felvétel élessége"
+
++#: gst-libs/gst/tag/tags.c:169
+ msgid "The direction of sharpness processing applied when capturing an image"
+ msgstr "A kép felvételekor alkalmazott élességfeldolgozás iránya"
+
++#: gst-libs/gst/tag/tags.c:173
+ msgid "capturing flash fired"
+ msgstr "felvétel vakuja"
+
++#: gst-libs/gst/tag/tags.c:174
+ msgid "If the flash fired while capturing an image"
+ msgstr "A kép felvételéhez használt vaku"
+
++#: gst-libs/gst/tag/tags.c:177
+ msgid "capturing flash mode"
+ msgstr "felvétel vakumódja"
+
++#: gst-libs/gst/tag/tags.c:178
+ msgid "The selected flash mode while capturing an image"
+ msgstr "A kép felvételéhez használt kijelölt vakumód"
+
++#: gst-libs/gst/tag/tags.c:181
+ msgid "capturing metering mode"
+ msgstr "felvétel mérési módja"
+
-msgid ""
-"The metering mode used while determining exposure for capturing an image"
++#: gst-libs/gst/tag/tags.c:182
++msgid "The metering mode used while determining exposure for capturing an image"
+ msgstr "A kép felvételekor az expozíció meghatározásához használt mérési mód"
+
++#: gst-libs/gst/tag/tags.c:186
+ msgid "capturing source"
+ msgstr "felvétel forrása"
+
++#: gst-libs/gst/tag/tags.c:187
+ msgid "The source or type of device used for the capture"
+ msgstr "A felvételhez használt forrás vagy eszköz típusa"
+
++#: gst-libs/gst/tag/tags.c:190
+ msgid "image horizontal ppi"
+ msgstr "kép vízszintes ppi"
+
++#: gst-libs/gst/tag/tags.c:191
+ msgid "Media (image/video) intended horizontal pixel density in ppi"
+ msgstr "A média (kép/videó) tervezett vízszintes képpontsűrűsége ppi-ben"
+
++#: gst-libs/gst/tag/tags.c:194
+ msgid "image vertical ppi"
+ msgstr "kép függőleges ppi"
+
++#: gst-libs/gst/tag/tags.c:195
+ msgid "Media (image/video) intended vertical pixel density in ppi"
+ msgstr "A média (kép/videó) tervezett függőleges képpontsűrűsége ppi-ben"
+
++#: gst-libs/gst/tag/tags.c:198
+ msgid "ID3v2 frame"
+ msgstr "ID3v2 keret"
+
++#: gst-libs/gst/tag/tags.c:198
+ msgid "unparsed id3v2 tag frame"
+ msgstr "feldolgozatlan id3v2 címke keret"
+
++#: gst-libs/gst/tag/tags.c:202
+ msgid "musical-key"
+ msgstr "zenei kulcs"
+
++#: gst-libs/gst/tag/tags.c:202
+ msgid "Initial key in which the sound starts"
+ msgstr "Kezdeti kulcs, amelyben a hang indul"
+
++#: tools/gst-device-monitor.c:260 tools/gst-play.c:1444
+ msgid "Print version information and exit"
+ msgstr "Verzióinformációk kiírása és kilépés"
+
-msgid ""
-"Don't exit after showing the initial device list, but wait for devices to "
-"added/removed."
-msgstr ""
-"Ne lépjen ki a kezdeti eszközlista megjelenítése után, de várjon az eszközök "
-"hozzáadására/eltávolítására."
++#: tools/gst-device-monitor.c:262
++msgid "Don't exit after showing the initial device list, but wait for devices to added/removed."
++msgstr "Ne lépjen ki a kezdeti eszközlista megjelenítése után, de várjon az eszközök hozzáadására/eltávolítására."
+
++#: tools/gst-play.c:310
+ #, c-format
+ msgid "Volume: %.0f%%"
+ msgstr "Hangerő: %.0f%%"
+
++#: tools/gst-play.c:349
+ msgid "Buffering..."
+ msgstr "Pufferelés…"
+
++#: tools/gst-play.c:370
+ msgid "Clock lost, selecting a new one\n"
+ msgstr "Óra elveszítve, egy új kiválasztása\n"
+
++#: tools/gst-play.c:400 tools/gst-play.c:446 tools/gst-play.c:883
++#: tools/gst-play.c:1347
+ msgid "Reached end of play list."
+ msgstr "A lejátszólista vége elérve."
+
++#: tools/gst-play.c:613
+ msgid "Paused"
+ msgstr "Szüneteltetve"
+
++#: tools/gst-play.c:671
+ #, c-format
+ msgid "Now playing %s\n"
+ msgstr "Most játszott: %s\n"
+
++#: tools/gst-play.c:734
+ #, c-format
+ msgid "About to finish, preparing next title: %s"
+ msgstr "Hamarosan befejeződik, következő cím előkészítése: %s"
+
++#: tools/gst-play.c:979
+ #, c-format
+ msgid "Playback rate: %.2f"
+ msgstr "Lejátszási sebesség: %.2f"
+
++#: tools/gst-play.c:983
+ #, c-format
+ msgid "Could not change playback rate to %.2f"
-msgstr "Nem lehet megváltoztatni a lejátszási sebességet erre: %.2f"
++msgstr "Nem sikerült megváltoztatni a lejátszási sebességet erre: %.2f"
+
++#: tools/gst-play.c:1287
+ msgid "space"
+ msgstr "szóköz"
+
++#: tools/gst-play.c:1287
+ msgid "pause/unpause"
+ msgstr "szünet/folytatás"
+
++#: tools/gst-play.c:1288
+ msgid "q or ESC"
+ msgstr "q vagy ESC"
+
++#: tools/gst-play.c:1288
+ msgid "quit"
+ msgstr "kilépés"
+
++#: tools/gst-play.c:1289
+ msgid "> or n"
+ msgstr "> vagy n"
+
++#: tools/gst-play.c:1289
+ msgid "play next"
+ msgstr "következő lejátszása"
+
++#: tools/gst-play.c:1290
+ msgid "< or b"
+ msgstr "< vagy b"
+
++#: tools/gst-play.c:1290
+ msgid "play previous"
+ msgstr "előző lejátszása"
+
++#: tools/gst-play.c:1291
+ msgid "seek forward"
+ msgstr "tekerés előre"
+
++#: tools/gst-play.c:1292
+ msgid "seek backward"
+ msgstr "tekerés hátra"
+
++#: tools/gst-play.c:1293
+ msgid "volume up"
+ msgstr "hangerő fel"
+
++#: tools/gst-play.c:1294
+ msgid "volume down"
+ msgstr "hangerő le"
+
++#: tools/gst-play.c:1295
+ msgid "increase playback rate"
+ msgstr "lejátszási sebesség növelése"
+
++#: tools/gst-play.c:1296
+ msgid "decrease playback rate"
+ msgstr "lejátszási sebesség csökkentése"
+
++#: tools/gst-play.c:1297
+ msgid "change playback direction"
+ msgstr "lejátszási irány megváltoztatása"
+
++#: tools/gst-play.c:1298
+ msgid "enable/disable trick modes"
+ msgstr "trükk módok engedélyezése/letiltása"
+
++#: tools/gst-play.c:1299
+ msgid "change audio track"
+ msgstr "hangsáv megváltoztatása"
+
++#: tools/gst-play.c:1300
+ msgid "change video track"
+ msgstr "videosáv megváltoztatása"
+
++#: tools/gst-play.c:1301
+ msgid "change subtitle track"
+ msgstr "feliratsáv megváltoztatása"
+
++#: tools/gst-play.c:1302
+ msgid "seek to beginning"
+ msgstr "tekerés az elejére"
+
++#: tools/gst-play.c:1303
+ msgid "show keyboard shortcuts"
+ msgstr "gyorsbillentyűk megjelenítése"
+
++#: tools/gst-play.c:1306
+ msgid "Interactive mode - keyboard controls:"
+ msgstr "Interaktív mód - billentyűzetvezérlők:"
+
++#: tools/gst-play.c:1439
+ msgid "Output status information and property notifications"
+ msgstr "Állapotinformációk és tulajdonságértesítések kiírása"
+
++#: tools/gst-play.c:1441
+ msgid "Control playback behaviour setting playbin 'flags' property"
-msgstr ""
-"Lejátszás viselkedésének módosítása a playbin „flags” tulajdonságának "
-"beállításával"
++msgstr "Lejátszás viselkedésének módosítása a playbin „flags” tulajdonságának beállításával"
+
++#: tools/gst-play.c:1446
+ msgid "Video sink to use (default is autovideosink)"
+ msgstr "Használandó videosüllyesztés (az alapértelmezett az autovideosink)"
+
++#: tools/gst-play.c:1448
+ msgid "Audio sink to use (default is autoaudiosink)"
+ msgstr "Használandó hangsüllyesztés (az alapértelmezett az autoaudiosink)"
+
++#: tools/gst-play.c:1450
+ msgid "Enable gapless playback"
+ msgstr "Szünetmentes lejátszás bekapcsolása"
+
++#: tools/gst-play.c:1452
+ msgid "Shuffle playlist"
+ msgstr "Lejátszólista keverése"
+
++#: tools/gst-play.c:1455
+ msgid "Disable interactive control via the keyboard"
+ msgstr "Interaktív vezérlés letiltása a billentyűzeten keresztül"
+
++#: tools/gst-play.c:1457
+ msgid "Volume"
+ msgstr "Hangerő"
+
++#: tools/gst-play.c:1459
+ msgid "Playlist file containing input media files"
+ msgstr "Bemeneti médiafájlokat tartalmazó lejátszólista fájl"
+
++#: tools/gst-play.c:1461
+ msgid "Do not print any output (apart from errors)"
+ msgstr "Ne írjon ki semmilyen kimenetet (a hibákon kívül)"
+
++#: tools/gst-play.c:1463
+ msgid "Use playbin3 pipeline"
-msgstr ""
++msgstr "Playbin3 csővezeték használata"
+
++#: tools/gst-play.c:1464
+ msgid "(default varies depending on 'USE_PLAYBIN' env variable)"
-msgstr ""
++msgstr "(az alapértelmezett a „USE_PLAYBIN” környezeti változótól függően változik)"
++
++#: tools/gst-play.c:1468
++msgid "Keep showing the last frame on EOS until quit or playlist change command (gapless is ignored)"
++msgstr "Az utolsó képkocka láthatóan tartása az adatfolyam végén a kilépésig vagy a lejátszólista megváltoztatása parancsig (szünetmentes mellőzve)"
+
++#: tools/gst-play.c:1546
+ #, c-format
+ msgid "Usage: %s FILE1|URI1 [FILE2|URI2] [FILE3|URI3] ..."
+ msgstr "Használat: %s FÁJL1|URI1 [FÁJL2|URI2] [FÁJL3|URI3] …"
+
++#: tools/gst-play.c:1550
+ msgid "You must provide at least one filename or URI to play."
+ msgstr "Meg kell adnia legalább egy fájlnevet vagy URI-t a lejátszáshoz."
+
++#: tools/gst-play.c:1590
+ msgid "Press 'k' to see a list of keyboard shortcuts.\n"
-msgstr ""
-"Nyomja meg a „k” billentyűt a gyorsbillentyűk listájának megtekintéséhez.\n"
-
-#~ msgid "Could not create \"decodebin3\" element."
-#~ msgstr "Nem hozható létre „decodebin3” elem."
-
-#~ msgid "Could not create \"urisourcebin\" element."
-#~ msgstr "Nem hozható létre „urisourcebin” elem."
++msgstr "Nyomja meg a „k” billentyűt a gyorsbillentyűk listájának megtekintéséhez.\n"
--- /dev/null
-"Project-Id-Version: gst-plugins-base-1.15.1\n"
+ # Italian translation for gst-plugins-base package of GStreamer project.
+ # Copyright (C) 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2019 GStreamer team
+ # This file is distributed under the same license as the gst-plugins-base package.
+ # Luca Ferretti <elle.uca@infinito.it>, 2004, 2005, 2006, 2007, 2008, 2009, 2010.
+ # Milo Casagrande <milo@milo.name>, 2019.
+ #
+ msgid ""
+ msgstr ""
-"POT-Creation-Date: 2019-02-26 11:43+0000\n"
-"PO-Revision-Date: 2019-01-29 17:52+0100\n"
++"Project-Id-Version: gst-plugins-base-1.16.0\n"
+ "Report-Msgid-Bugs-To: http://bugzilla.gnome.org/\n"
-"X-Generator: Poedit 2.2.1\n"
++"POT-Creation-Date: 2019-04-19 00:20+0100\n"
++"PO-Revision-Date: 2019-09-03 10:01+0200\n"
+ "Last-Translator: Milo Casagrande <milo@milo.name>\n"
+ "Language-Team: Italian <tp@lists.linux.it>\n"
+ "Language: it\n"
+ "MIME-Version: 1.0\n"
+ "Content-Type: text/plain; charset=UTF-8\n"
+ "Content-Transfer-Encoding: 8bit\n"
+ "X-Bugs: Report translation errors to the Language-Team address.\n"
-msgstr ""
-"Impossibile aprire il dispositivo per la riproduzione in modalità mono."
++"X-Generator: Poedit 2.2.3\n"
+
++#: ext/alsa/gstalsasink.c:579
+ msgid "Could not open device for playback in mono mode."
-msgstr ""
-"Impossibile aprire il dispositivo per la riproduzione in modalità stereo."
++msgstr "Impossibile aprire il dispositivo per la riproduzione in modalità mono."
+
++#: ext/alsa/gstalsasink.c:581
+ msgid "Could not open device for playback in stereo mode."
-msgstr ""
-"Impossibile aprire il dispositivo per la riproduzione in modalità %d-canali."
++msgstr "Impossibile aprire il dispositivo per la riproduzione in modalità stereo."
+
++#: ext/alsa/gstalsasink.c:585
+ #, c-format
+ msgid "Could not open device for playback in %d-channel mode."
-msgid ""
-"Could not open audio device for playback. Device is being used by another "
-"application."
-msgstr ""
-"Impossibile aprire il dispositivo audio per la riproduzione: è utilizzato da "
-"un'altra applicazione."
++msgstr "Impossibile aprire il dispositivo per la riproduzione in modalità %d-canali."
+
-msgstr ""
-"Errore nell'inviare dati audio al dispositivo di uscita: è stato scollegato."
++#: ext/alsa/gstalsasink.c:858
++msgid "Could not open audio device for playback. Device is being used by another application."
++msgstr "Impossibile aprire il dispositivo audio per la riproduzione: è utilizzato da un'altra applicazione."
+
++#: ext/alsa/gstalsasink.c:863
+ msgid "Could not open audio device for playback."
+ msgstr "Impossibile aprire il dispositivo audio per la riproduzione."
+
++#: ext/alsa/gstalsasink.c:1076
+ msgid "Error outputting to audio device. The device has been disconnected."
-msgstr ""
-"Impossibile aprire il dispositivo per la registrazione in modalità mono."
++msgstr "Errore nell'inviare dati audio al dispositivo di uscita: è stato scollegato."
+
++#: ext/alsa/gstalsasrc.c:444
+ msgid "Could not open device for recording in mono mode."
-msgstr ""
-"Impossibile aprire il dispositivo per la registrazione in modalità stereo."
++msgstr "Impossibile aprire il dispositivo per la registrazione in modalità mono."
+
++#: ext/alsa/gstalsasrc.c:446
+ msgid "Could not open device for recording in stereo mode."
-msgstr ""
-"Impossibile aprire il dispositivo per la registrazione in modalità %d-canali."
++msgstr "Impossibile aprire il dispositivo per la registrazione in modalità stereo."
+
++#: ext/alsa/gstalsasrc.c:450
+ #, c-format
+ msgid "Could not open device for recording in %d-channel mode"
-msgid ""
-"Could not open audio device for recording. Device is being used by another "
-"application."
-msgstr ""
-"Impossibile aprire il dispositivo audio per la registrazione: è utilizzato "
-"da un'altra applicazione."
++msgstr "Impossibile aprire il dispositivo per la registrazione in modalità %d-canali."
+
-msgstr ""
-"Elemento «%s» mancante - verificare la propria installazione di GStreamer."
++#: ext/alsa/gstalsasrc.c:741
++msgid "Could not open audio device for recording. Device is being used by another application."
++msgstr "Impossibile aprire il dispositivo audio per la registrazione: è utilizzato da un'altra applicazione."
+
++#: ext/alsa/gstalsasrc.c:746
+ msgid "Could not open audio device for recording."
+ msgstr "Impossibile aprire il dispositivo audio per la registrazione."
+
++#: ext/alsa/gstalsasrc.c:992
+ msgid "Error recording from audio device. The device has been disconnected."
+ msgstr "Errore nel registrare dal dispositivo audio: è stato scollegato."
+
++#: ext/cdparanoia/gstcdparanoiasrc.c:277 ext/cdparanoia/gstcdparanoiasrc.c:283
+ msgid "Could not open CD device for reading."
+ msgstr "Impossibile aprire il device CD in lettura."
+
+ # seek --> posizionamento (come in glib/gio)
++#: ext/cdparanoia/gstcdparanoiasrc.c:409
+ msgid "Could not seek CD."
+ msgstr "Impossibile effettuare il posizionamento nel CD."
+
++#: ext/cdparanoia/gstcdparanoiasrc.c:417
+ msgid "Could not read CD."
+ msgstr "Impossibile leggere il CD."
+
++#: ext/gl/gstgltestsrc.c:491
+ msgid "failed to draw pattern"
+ msgstr "generazione modello non riuscita"
+
++#: ext/gl/gstgltestsrc.c:492
+ msgid "A GL error occurred"
+ msgstr "Si è verificato un errore GL"
+
++#: ext/gl/gstgltestsrc.c:498
+ msgid "format wasn't negotiated before get function"
+ msgstr "il formato non è stato concordato prima della funzione get"
+
++#: gst/encoding/gstencodebin.c:1636 gst/playback/gstplaybin2.c:3399
++#: gst/playback/gstplaysink.c:1522 gst/playback/gstplaysink.c:1535
++#: gst/playback/gstplaysink.c:1872 gst/playback/gstplaysink.c:1904
++#: gst/playback/gstplaysink.c:2488 gst/playback/gstplaysink.c:2537
++#: gst/playback/gstplaysink.c:2552 gst/playback/gstplaysink.c:2577
++#: gst/playback/gstplaysink.c:2609 gst/playback/gstplaysink.c:2757
++#: gst/playback/gstplaysink.c:2788 gst/playback/gstplaysink.c:3166
++#: gst/playback/gstplaysink.c:3175 gst/playback/gstplaysink.c:3184
++#: gst/playback/gstplaysink.c:3193 gst/playback/gstplaysink.c:3606
++#: gst/playback/gstplaysink.c:4526 gst/playback/gstplaysinkconvertbin.c:97
++#: gst/playback/gstplaysinkconvertbin.c:117
++#: gst/playback/gsturidecodebin.c:1472 gst/playback/gsturisourcebin.c:1597
+ #, c-format
+ msgid "Missing element '%s' - check your GStreamer installation."
-msgid ""
-"35 mm equivalent focal length of the lens used capturing the image, in mm"
-msgstr ""
-"Lunghezza focale equivalente a 35 mm delle lenti utilizzate per catturare "
-"un'immagine, in mm"
++msgstr "Elemento «%s» mancante - verificare la propria installazione di GStreamer."
+
++#: gst/playback/gstdecodebin2.c:1883 gst/playback/gstparsebin.c:1553
+ msgid "Could not determine type of stream"
+ msgstr "Impossibile determinare il tipo di flusso"
+
++#: gst/playback/gstdecodebin2.c:2861 gst/playback/gstparsebin.c:2385
+ msgid "This appears to be a text file"
+ msgstr "Questo sembra essere un file di testo"
+
++#: gst/playback/gstplaybin2.c:5452
+ msgid "Could not create \"uridecodebin\" element."
+ msgstr "Impossibile creare l'elemento «uridecodebin»."
+
++#: gst/playback/gstplaybin3.c:4558
+ msgid "Could not create \"uridecodebin3\" element."
+ msgstr "Impossibile creare l'elemento «uridecodebin3»."
+
++#: gst/playback/gstplaysink.c:2003
+ #, c-format
+ msgid "Both autovideosink and %s elements are missing."
+ msgstr "Risultano mancanti entrambi gli elementi «autovideosink» e «%s»."
+
++#: gst/playback/gstplaysink.c:2007
+ msgid "The autovideosink element is missing."
+ msgstr "Risulta mancante l'elemento «autovideosink»."
+
++#: gst/playback/gstplaysink.c:2012
+ #, c-format
+ msgid "Configured videosink %s is not working."
+ msgstr "L'elemento videosink «%s» non è operativo."
+
++#: gst/playback/gstplaysink.c:2016
+ #, c-format
+ msgid "Both autovideosink and %s elements are not working."
+ msgstr "Entrambi gli elementi «autovideosink» e «%s» non sono operativi."
+
++#: gst/playback/gstplaysink.c:2020
+ msgid "The autovideosink element is not working."
+ msgstr "L'elemento «autovideosink» non è operativo."
+
++#: gst/playback/gstplaysink.c:2525
+ msgid "Custom text sink element is not usable."
+ msgstr "L'elemento sink di testo personalizzato non è utilizzabile."
+
++#: gst/playback/gstplaysink.c:2903
+ msgid "No volume control found"
+ msgstr "Non è stato trovato alcun controllo del volume"
+
++#: gst/playback/gstplaysink.c:2933
+ #, c-format
+ msgid "Both autoaudiosink and %s elements are missing."
+ msgstr "Risultano mancanti entrambi gli elementi «autoaudiosink» e «%s»."
+
++#: gst/playback/gstplaysink.c:2937
+ msgid "The autoaudiosink element is missing."
+ msgstr "Risulta mancante l'elemento «autoaudiosink»."
+
++#: gst/playback/gstplaysink.c:2942
+ #, c-format
+ msgid "Configured audiosink %s is not working."
+ msgstr "L'elemento audiosink «%s» non è operativo."
+
++#: gst/playback/gstplaysink.c:2946
+ #, c-format
+ msgid "Both autoaudiosink and %s elements are not working."
+ msgstr "Entrambi gli elementi «autoaudiosink» e «%s» non sono operativi."
+
++#: gst/playback/gstplaysink.c:2950
+ msgid "The autoaudiosink element is not working."
+ msgstr "L'elemento «autoaudiosink» non è operativo."
+
++#: gst/playback/gstplaysink.c:3269 gst/playback/gstplaysink.c:3274
+ msgid "Can't play a text file without video or visualizations."
+ msgstr "Impossibile riprodurre un file di testo senza video o visualizzazioni."
+
++#: gst/playback/gsturidecodebin.c:921
+ #, c-format
+ msgid "No decoder available for type '%s'."
+ msgstr "Nessun decoder disponibile per il tipo «%s»."
+
++#: gst/playback/gsturidecodebin.c:1378 gst/playback/gsturisourcebin.c:1491
+ msgid "No URI specified to play from."
+ msgstr "Non è stato specificato alcun URI da cui riprodurre."
+
++#: gst/playback/gsturidecodebin.c:1384 gst/playback/gsturisourcebin.c:1497
+ #, c-format
+ msgid "Invalid URI \"%s\"."
+ msgstr "URI «%s» non valido."
+
++#: gst/playback/gsturidecodebin.c:1391 gst/playback/gsturisourcebin.c:1504
+ msgid "This stream type cannot be played yet."
+ msgstr "Non è ancora possibile riprodurre questo tipo di stream."
+
++#: gst/playback/gsturidecodebin.c:1409 gst/playback/gsturisourcebin.c:1522
+ #, c-format
+ msgid "No URI handler implemented for \"%s\"."
+ msgstr "Nessun gestore di URI implementato per «%s»."
+
++#: gst/playback/gsturidecodebin.c:2289 gst/playback/gsturisourcebin.c:2228
+ msgid "Source element is invalid."
+ msgstr "L'elemento sorgente non è valido."
+
++#: gst/tcp/gsttcpclientsink.c:216
+ #, c-format
+ msgid "Error while sending data to \"%s:%d\"."
+ msgstr "Errore durante l'invio dei dati a \"%s:%d\"."
+
+ # una parafrasi, ma mi pare non perda... -Luca
++#: gst-libs/gst/audio/gstaudiobasesrc.c:842
+ msgid "Can't record audio fast enough"
+ msgstr "Impossibile registrare l'audio a velocità adeguata"
+
++#: gst-libs/gst/audio/gstaudiocdsrc.c:1625
+ msgid "This CD has no audio tracks"
+ msgstr "Questo CD non presenta alcuna traccia audio"
+
++#: gst-libs/gst/pbutils/descriptions.c:89
+ msgid "ID3 tag"
+ msgstr "Tag ID3"
+
++#: gst-libs/gst/pbutils/descriptions.c:90
++#: gst-libs/gst/pbutils/descriptions.c:91
+ msgid "APE tag"
+ msgstr "Tag APE"
+
++#: gst-libs/gst/pbutils/descriptions.c:92
+ msgid "ICY internet radio"
+ msgstr "Radio internet ICY"
+
++#: gst-libs/gst/pbutils/descriptions.c:127
+ msgid "Apple Lossless Audio (ALAC)"
+ msgstr "ALAC (Apple Lossless Audio)"
+
++#: gst-libs/gst/pbutils/descriptions.c:143
+ msgid "Free Lossless Audio Codec (FLAC)"
+ msgstr "FLAC (Free Lossless Audio Codec)"
+
++#: gst-libs/gst/pbutils/descriptions.c:173
++#: gst-libs/gst/pbutils/descriptions.c:174
+ msgid "Lossless True Audio (TTA)"
+ msgstr "TTA (Lossless True Audio)"
+
++#: gst-libs/gst/pbutils/descriptions.c:182
+ msgid "Windows Media Speech"
+ msgstr "Windows Media Speech"
+
++#: gst-libs/gst/pbutils/descriptions.c:199
+ msgid "CYUV Lossless"
+ msgstr "CYUV senza perdita"
+
++#: gst-libs/gst/pbutils/descriptions.c:203
+ msgid "FFMpeg v1"
+ msgstr "FFMpeg v1"
+
++#: gst-libs/gst/pbutils/descriptions.c:217
+ msgid "Lossless MSZH"
+ msgstr "MSZH senza perdita"
+
+ # cfr http://en.wikipedia.org/wiki/Run_length_encoding
++#: gst-libs/gst/pbutils/descriptions.c:228
+ msgid "Run-length encoding"
+ msgstr "RLE (Run-Length Encoding)"
+
++#: gst-libs/gst/pbutils/descriptions.c:274
+ msgid "Timed Text"
+ msgstr "Testo temporizzato"
+
++#: gst-libs/gst/pbutils/descriptions.c:278
+ msgid "Subtitle"
+ msgstr "Sottotitolo"
+
++#: gst-libs/gst/pbutils/descriptions.c:279
+ msgid "MPL2 subtitle format"
+ msgstr "Sottotitoli formato MPL2"
+
++#: gst-libs/gst/pbutils/descriptions.c:280
+ msgid "DKS subtitle format"
+ msgstr "Sottotitoli formato DKS"
+
++#: gst-libs/gst/pbutils/descriptions.c:281
+ msgid "QTtext subtitle format"
+ msgstr "Sottotitoli formato QTtext"
+
+ # grazie San Google
++#: gst-libs/gst/pbutils/descriptions.c:282
+ msgid "Sami subtitle format"
+ msgstr "Sottotitoli formato SAMI"
+
++#: gst-libs/gst/pbutils/descriptions.c:283
+ msgid "TMPlayer subtitle format"
+ msgstr "Sottotitoli formato TMPlayer"
+
++#: gst-libs/gst/pbutils/descriptions.c:287
+ msgid "CEA 608 Closed Caption"
+ msgstr "Sottotitolo per non udenti CEA 608"
+
++#: gst-libs/gst/pbutils/descriptions.c:288
+ msgid "CEA 708 Closed Caption"
+ msgstr "Sottotitolo per non udenti CEA 708"
+
++#: gst-libs/gst/pbutils/descriptions.c:289
+ msgid "Kate subtitle format"
+ msgstr "Sottotitoli formato Kate"
+
++#: gst-libs/gst/pbutils/descriptions.c:290
+ msgid "WebVTT subtitle format"
+ msgstr "Sottotitoli formato WebVTT"
+
++#: gst-libs/gst/pbutils/descriptions.c:455
++#: gst-libs/gst/pbutils/descriptions.c:458
++#: gst-libs/gst/pbutils/descriptions.c:508
+ msgid "Uncompressed video"
+ msgstr "Video non compresso"
+
+ # (ndt) forse scala di grigi?
++#: gst-libs/gst/pbutils/descriptions.c:463
+ msgid "Uncompressed gray"
+ msgstr "Grigio non compresso"
+
++#: gst-libs/gst/pbutils/descriptions.c:486
+ #, c-format
+ msgid "Uncompressed packed YUV %s"
+ msgstr "YUV %s packed non compresso"
+
+ # cfr http://en.wikipedia.org/wiki/YUV
+ # http://support.microsoft.com/kb/281188/it
+ # http://support.microsoft.com/kb/294880/it (traduz automatica) :-(
+ # http://www.benis.it/dvd/agg3.htm
+ #
+ # Dal secondo e terzo, evinco YUV compresso o planare sarebbero
+ # buone abbreviazioni, ma tengo planare/packed + compresso/noncompresso
++#: gst-libs/gst/pbutils/descriptions.c:488
+ #, c-format
+ msgid "Uncompressed semi-planar YUV %s"
+ msgstr "YUV %s semi-planare non compresso"
+
+ # cfr http://en.wikipedia.org/wiki/YUV
+ # http://support.microsoft.com/kb/281188/it
+ # http://support.microsoft.com/kb/294880/it (traduz automatica) :-(
+ # http://www.benis.it/dvd/agg3.htm
+ #
+ # Dal secondo e terzo, evinco YUV compresso o planare sarebbero
+ # buone abbreviazioni, ma tengo planare/packed + compresso/noncompresso
++#: gst-libs/gst/pbutils/descriptions.c:490
+ #, c-format
+ msgid "Uncompressed planar YUV %s"
+ msgstr "YUV %s planare non compresso"
+
++#: gst-libs/gst/pbutils/descriptions.c:501
+ #, c-format
+ msgid "Uncompressed palettized %d-bit %s"
+ msgstr "%2$s %1$d-bit con tavolozza non compresso"
+
++#: gst-libs/gst/pbutils/descriptions.c:504
+ #, c-format
+ msgid "Uncompressed %d-bit %s"
+ msgstr "%2$s %1$d-bit non compresso"
+
++#: gst-libs/gst/pbutils/descriptions.c:586
+ #, c-format
+ msgid "DivX MPEG-4 Version %d"
+ msgstr "MPEG-4 DivX versione %d"
+
++#: gst-libs/gst/pbutils/descriptions.c:834
+ msgid "Uncompressed audio"
+ msgstr "Audio non compresso"
+
++#: gst-libs/gst/pbutils/descriptions.c:840
+ #, c-format
+ msgid "Raw %d-bit %s audio"
+ msgstr "Audio raw %2$s %1$d-bit"
+
++#: gst-libs/gst/pbutils/descriptions.c:942
+ msgid "Audio CD source"
+ msgstr "Sorgente CD audio"
+
++#: gst-libs/gst/pbutils/descriptions.c:945
+ msgid "DVD source"
+ msgstr "Sorgente DVD"
+
++#: gst-libs/gst/pbutils/descriptions.c:948
+ msgid "Real Time Streaming Protocol (RTSP) source"
+ msgstr "Sorgente protocollo RTSP (Real Time Streaming Protocol)"
+
++#: gst-libs/gst/pbutils/descriptions.c:952
+ msgid "Microsoft Media Server (MMS) protocol source"
+ msgstr "Sorgente protocollo MMS (Microsoft Media Server)"
+
++#: gst-libs/gst/pbutils/descriptions.c:960
+ #, c-format
+ msgid "%s protocol source"
+ msgstr "Sorgente protocollo %s"
+
++#: gst-libs/gst/pbutils/descriptions.c:1034
+ #, c-format
+ msgid "%s video RTP depayloader"
+ msgstr "Depayloader RTP %s video"
+
++#: gst-libs/gst/pbutils/descriptions.c:1036
+ #, c-format
+ msgid "%s audio RTP depayloader"
+ msgstr "Depayloader RTP %s audio"
+
++#: gst-libs/gst/pbutils/descriptions.c:1038
+ #, c-format
+ msgid "%s RTP depayloader"
+ msgstr "Depayloader RTP %s"
+
++#: gst-libs/gst/pbutils/descriptions.c:1045
+ #, c-format
+ msgid "%s demuxer"
+ msgstr "Demuxer %s"
+
++#: gst-libs/gst/pbutils/descriptions.c:1047
+ #, c-format
+ msgid "%s decoder"
+ msgstr "Decoder %s"
+
++#: gst-libs/gst/pbutils/descriptions.c:1086
+ #, c-format
+ msgid "%s video RTP payloader"
+ msgstr "Payloader RTP %s video"
+
++#: gst-libs/gst/pbutils/descriptions.c:1088
+ #, c-format
+ msgid "%s audio RTP payloader"
+ msgstr "Payloader RTP %s audio"
+
++#: gst-libs/gst/pbutils/descriptions.c:1090
+ #, c-format
+ msgid "%s RTP payloader"
+ msgstr "Payloader RTP %s"
+
++#: gst-libs/gst/pbutils/descriptions.c:1097
+ #, c-format
+ msgid "%s muxer"
+ msgstr "Muxer %s"
+
++#: gst-libs/gst/pbutils/descriptions.c:1099
+ #, c-format
+ msgid "%s encoder"
+ msgstr "Encoder %s"
+
++#: gst-libs/gst/pbutils/descriptions.c:1133
+ #, c-format
+ msgid "GStreamer element %s"
+ msgstr "Elemento GStreamer %s"
+
++#: gst-libs/gst/pbutils/missing-plugins.c:558
+ msgid "Unknown source element"
+ msgstr "Elemento sorgente sconosciuto"
+
++#: gst-libs/gst/pbutils/missing-plugins.c:561
+ msgid "Unknown sink element"
+ msgstr "Elemento sink sconosciuto"
+
++#: gst-libs/gst/pbutils/missing-plugins.c:564
+ msgid "Unknown element"
+ msgstr "Elemento sconosciuto"
+
++#: gst-libs/gst/pbutils/missing-plugins.c:567
+ msgid "Unknown decoder element"
+ msgstr "Elemendo di decodifica sconosciuto"
+
++#: gst-libs/gst/pbutils/missing-plugins.c:570
+ msgid "Unknown encoder element"
+ msgstr "Elemento di codifica sconosciuto"
+
++#: gst-libs/gst/pbutils/missing-plugins.c:575
+ msgid "Plugin or element of unknown type"
+ msgstr "Plugin o elemento di tipo sconosciuto"
+
++#: gst-libs/gst/tag/gsttagdemux.c:1243
+ msgid "Failed to read tag: not enough data"
+ msgstr "Lettura del tag non riuscita: dati insufficienti"
+
++#: gst-libs/gst/tag/tags.c:77
+ msgid "track ID"
+ msgstr "ID traccia"
+
++#: gst-libs/gst/tag/tags.c:77
+ msgid "MusicBrainz track ID"
+ msgstr "ID MusicBrainz della traccia"
+
++#: gst-libs/gst/tag/tags.c:79
+ msgid "artist ID"
+ msgstr "ID artista"
+
++#: gst-libs/gst/tag/tags.c:79
+ msgid "MusicBrainz artist ID"
+ msgstr "ID MusicBrainz dell'artista"
+
++#: gst-libs/gst/tag/tags.c:81
+ msgid "album ID"
+ msgstr "ID album"
+
++#: gst-libs/gst/tag/tags.c:81
+ msgid "MusicBrainz album ID"
+ msgstr "ID MusicBrainz dell'album"
+
++#: gst-libs/gst/tag/tags.c:84
+ msgid "album artist ID"
+ msgstr "ID artista dell'album"
+
++#: gst-libs/gst/tag/tags.c:84
+ msgid "MusicBrainz album artist ID"
+ msgstr "ID MusicBrainz dell'artista dell'album"
+
++#: gst-libs/gst/tag/tags.c:86
+ msgid "track TRM ID"
+ msgstr "ID TRM della traccia"
+
++#: gst-libs/gst/tag/tags.c:86
+ msgid "MusicBrainz TRM ID"
+ msgstr "ID MusicBrainz del TRM"
+
++#: gst-libs/gst/tag/tags.c:108
+ msgid "capturing shutter speed"
+ msgstr "velocità di scatto di cattura"
+
++#: gst-libs/gst/tag/tags.c:109
+ msgid "Shutter speed used when capturing an image, in seconds"
+ msgstr "Velocità di scatto usata nel catturare un'immagine, in secondi"
+
++#: gst-libs/gst/tag/tags.c:112
+ msgid "capturing focal ratio"
+ msgstr "rapporto focale di cattura"
+
++#: gst-libs/gst/tag/tags.c:113
+ msgid "Focal ratio (f-number) used when capturing the image"
+ msgstr "Rapporto focale (numero f) usata nel catturare un'immagine"
+
++#: gst-libs/gst/tag/tags.c:116
+ msgid "capturing focal length"
+ msgstr "lunghezza focale di cattura"
+
++#: gst-libs/gst/tag/tags.c:117
+ msgid "Focal length of the lens used capturing the image, in mm"
+ msgstr "Lunghezza focale dell'obiettivo usato per catturare un'immagine, in mm"
+
++#: gst-libs/gst/tag/tags.c:121
+ msgid "capturing 35 mm equivalent focal length"
+ msgstr "lunghezza focale di cattura equivalente 35 mm"
+
-msgstr ""
-"La modalità di bilanciamento del bianco usata nel catturare un'immagine"
++#: gst-libs/gst/tag/tags.c:122
++msgid "35 mm equivalent focal length of the lens used capturing the image, in mm"
++msgstr "Lunghezza focale equivalente a 35 mm delle lenti utilizzate per catturare un'immagine, in mm"
+
++#: gst-libs/gst/tag/tags.c:126
+ msgid "capturing digital zoom ratio"
+ msgstr "rapporto ingrandimento digitale di cattura"
+
++#: gst-libs/gst/tag/tags.c:127
+ msgid "Digital zoom ratio used when capturing an image"
+ msgstr "Rapporto ingrandimento digitale usato nel catturare un'immagine"
+
++#: gst-libs/gst/tag/tags.c:130
+ msgid "capturing iso speed"
+ msgstr "velocità iso di cattura"
+
++#: gst-libs/gst/tag/tags.c:131
+ msgid "The ISO speed used when capturing an image"
+ msgstr "La velocità ISO usata nel catturare un'immagine"
+
++#: gst-libs/gst/tag/tags.c:134
+ msgid "capturing exposure program"
+ msgstr "programma esposizione di cattura"
+
++#: gst-libs/gst/tag/tags.c:135
+ msgid "The exposure program used when capturing an image"
+ msgstr "Il programma di esposizione usato nel catturare un'immagine"
+
++#: gst-libs/gst/tag/tags.c:138
+ msgid "capturing exposure mode"
+ msgstr "modalità esposizione di cattura"
+
++#: gst-libs/gst/tag/tags.c:139
+ msgid "The exposure mode used when capturing an image"
+ msgstr "La modalità di esposizione usata nel catturare un'immagine"
+
++#: gst-libs/gst/tag/tags.c:142
+ msgid "capturing exposure compensation"
+ msgstr "compensazione esposizione di cattura"
+
++#: gst-libs/gst/tag/tags.c:143
+ msgid "The exposure compensation used when capturing an image"
+ msgstr "La compensazione dell'esposizione usata nel catturare un'immagine"
+
++#: gst-libs/gst/tag/tags.c:146
+ msgid "capturing scene capture type"
+ msgstr "tipo di scena di cattura"
+
++#: gst-libs/gst/tag/tags.c:147
+ msgid "The scene capture mode used when capturing an image"
+ msgstr "Il tipo di scena usato nel catturare un'immagine"
+
++#: gst-libs/gst/tag/tags.c:150
+ msgid "capturing gain adjustment"
+ msgstr "regolazione guadagno di cattura"
+
++#: gst-libs/gst/tag/tags.c:151
+ msgid "The overall gain adjustment applied on an image"
+ msgstr "La regolazione del guadagno applicata a un'immagine"
+
++#: gst-libs/gst/tag/tags.c:154
+ msgid "capturing white balance"
+ msgstr "bilanciamento bianco di cattura"
+
++#: gst-libs/gst/tag/tags.c:155
+ msgid "The white balance mode set when capturing an image"
-msgstr ""
-"La direzione di elaborazione del contrasto applicata nella cattura di "
-"un'immagine"
++msgstr "La modalità di bilanciamento del bianco usata nel catturare un'immagine"
+
++#: gst-libs/gst/tag/tags.c:158
+ msgid "capturing contrast"
+ msgstr "contrasto di cattura"
+
++#: gst-libs/gst/tag/tags.c:159
+ msgid "The direction of contrast processing applied when capturing an image"
-msgstr ""
-"La direzione di elaborazione della saturazione applicata nella cattura di "
-"un'immagine"
++msgstr "La direzione di elaborazione del contrasto applicata nella cattura di un'immagine"
+
++#: gst-libs/gst/tag/tags.c:163
+ msgid "capturing saturation"
+ msgstr "saturazione di cattura"
+
++#: gst-libs/gst/tag/tags.c:164
+ msgid "The direction of saturation processing applied when capturing an image"
-msgstr ""
-"La direzione di elaborazione della nitidezza applicata nella cattura di "
-"un'immagine"
++msgstr "La direzione di elaborazione della saturazione applicata nella cattura di un'immagine"
+
++#: gst-libs/gst/tag/tags.c:168
+ msgid "capturing sharpness"
+ msgstr "nitidezza di cattura"
+
++#: gst-libs/gst/tag/tags.c:169
+ msgid "The direction of sharpness processing applied when capturing an image"
-msgid ""
-"The metering mode used while determining exposure for capturing an image"
-msgstr ""
-"La modalità di misurazione dell'esposizione usata nel catturare un'immagine"
++msgstr "La direzione di elaborazione della nitidezza applicata nella cattura di un'immagine"
+
++#: gst-libs/gst/tag/tags.c:173
+ msgid "capturing flash fired"
+ msgstr "flash utilizzato in cattura"
+
++#: gst-libs/gst/tag/tags.c:174
+ msgid "If the flash fired while capturing an image"
+ msgstr "Indica se è stato usato il flash nel catturare un'immagine"
+
++#: gst-libs/gst/tag/tags.c:177
+ msgid "capturing flash mode"
+ msgstr "modalità flash di cattura"
+
++#: gst-libs/gst/tag/tags.c:178
+ msgid "The selected flash mode while capturing an image"
+ msgstr "La modalità selezionata del flash nel catturare un'immagine"
+
++#: gst-libs/gst/tag/tags.c:181
+ msgid "capturing metering mode"
+ msgstr "modalità misurazione esposizione in cattura"
+
-msgstr ""
-"Densità pixel orizzontale in ppi per elementi multimediali (image/video)"
++#: gst-libs/gst/tag/tags.c:182
++msgid "The metering mode used while determining exposure for capturing an image"
++msgstr "La modalità di misurazione dell'esposizione usata nel catturare un'immagine"
+
++#: gst-libs/gst/tag/tags.c:186
+ msgid "capturing source"
+ msgstr "sorgente di cattura"
+
++#: gst-libs/gst/tag/tags.c:187
+ msgid "The source or type of device used for the capture"
+ msgstr "La sorgente o il tipo di dispositivo usato per la cattura"
+
++#: gst-libs/gst/tag/tags.c:190
+ msgid "image horizontal ppi"
+ msgstr "ppi orizzontale immagine"
+
++#: gst-libs/gst/tag/tags.c:191
+ msgid "Media (image/video) intended horizontal pixel density in ppi"
-msgid ""
-"Don't exit after showing the initial device list, but wait for devices to "
-"added/removed."
-msgstr ""
-"Non esce dopo aver mostrato l'elenco dispositivi, ma attende dispositivi "
-"aggiunti/rimossi"
++msgstr "Densità pixel orizzontale in ppi per elementi multimediali (image/video)"
+
++#: gst-libs/gst/tag/tags.c:194
+ msgid "image vertical ppi"
+ msgstr "ppi verticale immagine"
+
++#: gst-libs/gst/tag/tags.c:195
+ msgid "Media (image/video) intended vertical pixel density in ppi"
+ msgstr "Densità pixel verticale in ppi per elementi multimediali (image/video)"
+
++#: gst-libs/gst/tag/tags.c:198
+ msgid "ID3v2 frame"
+ msgstr "Frame ID3v2"
+
++#: gst-libs/gst/tag/tags.c:198
+ msgid "unparsed id3v2 tag frame"
+ msgstr "Tag frame id3v2 non analizzato"
+
++#: gst-libs/gst/tag/tags.c:202
+ msgid "musical-key"
+ msgstr "chiave musicale"
+
++#: gst-libs/gst/tag/tags.c:202
+ msgid "Initial key in which the sound starts"
+ msgstr "Chiave iniziale con cui inizia l'audio"
+
++#: tools/gst-device-monitor.c:260 tools/gst-play.c:1444
+ msgid "Print version information and exit"
+ msgstr "Stampa informazioni di versione ed esce"
+
-msgstr ""
-"Controlla il comportamento di riproduzione impostando la proprietà «flags» "
-"di playbin"
++#: tools/gst-device-monitor.c:262
++msgid "Don't exit after showing the initial device list, but wait for devices to added/removed."
++msgstr "Non esce dopo aver mostrato l'elenco dispositivi, ma attende dispositivi aggiunti/rimossi"
+
++#: tools/gst-play.c:310
+ #, c-format
+ msgid "Volume: %.0f%%"
+ msgstr "Volume: %.0f%%"
+
++#: tools/gst-play.c:349
+ msgid "Buffering..."
+ msgstr "Riempimento buffer..."
+
++#: tools/gst-play.c:370
+ msgid "Clock lost, selecting a new one\n"
+ msgstr "Orologio perso, ne viene selezionato uno nuovo\n"
+
++#: tools/gst-play.c:400 tools/gst-play.c:446 tools/gst-play.c:883
++#: tools/gst-play.c:1347
+ msgid "Reached end of play list."
+ msgstr "Raggiunta la fine dalla coda di riproduzione."
+
++#: tools/gst-play.c:613
+ msgid "Paused"
+ msgstr "In pausa"
+
++#: tools/gst-play.c:671
+ #, c-format
+ msgid "Now playing %s\n"
+ msgstr "Ora in riproduzione %s\n"
+
++#: tools/gst-play.c:734
+ #, c-format
+ msgid "About to finish, preparing next title: %s"
+ msgstr "Quasi finito, preparazione del prossimo titolo: %s"
+
++#: tools/gst-play.c:979
+ #, c-format
+ msgid "Playback rate: %.2f"
+ msgstr "Velocità riproduzione: %.2f"
+
++#: tools/gst-play.c:983
+ #, c-format
+ msgid "Could not change playback rate to %.2f"
+ msgstr "Impossibile cambiare la velocità di riproduzione a %.2f"
+
++#: tools/gst-play.c:1287
+ msgid "space"
+ msgstr "spazio"
+
++#: tools/gst-play.c:1287
+ msgid "pause/unpause"
+ msgstr "pausa/riproduci"
+
++#: tools/gst-play.c:1288
+ msgid "q or ESC"
+ msgstr "q o Esc"
+
++#: tools/gst-play.c:1288
+ msgid "quit"
+ msgstr "esci"
+
++#: tools/gst-play.c:1289
+ msgid "> or n"
+ msgstr "> o n"
+
++#: tools/gst-play.c:1289
+ msgid "play next"
+ msgstr "riproduci successivo"
+
++#: tools/gst-play.c:1290
+ msgid "< or b"
+ msgstr "< o b"
+
++#: tools/gst-play.c:1290
+ msgid "play previous"
+ msgstr "riproduci precedente"
+
++#: tools/gst-play.c:1291
+ msgid "seek forward"
+ msgstr "posiziona avanti"
+
++#: tools/gst-play.c:1292
+ msgid "seek backward"
+ msgstr "posiziona indietro"
+
++#: tools/gst-play.c:1293
+ msgid "volume up"
+ msgstr "alza volume"
+
++#: tools/gst-play.c:1294
+ msgid "volume down"
+ msgstr "abbassa volumne"
+
++#: tools/gst-play.c:1295
+ msgid "increase playback rate"
+ msgstr "aumenta velocità riproduzione"
+
++#: tools/gst-play.c:1296
+ msgid "decrease playback rate"
+ msgstr "diminuisci velocità riproduzione"
+
++#: tools/gst-play.c:1297
+ msgid "change playback direction"
+ msgstr "cambia direzione di riproduzione"
+
++#: tools/gst-play.c:1298
+ msgid "enable/disable trick modes"
+ msgstr "abilita/disabilita le modalità trick"
+
++#: tools/gst-play.c:1299
+ msgid "change audio track"
+ msgstr "cambia traccia audio"
+
++#: tools/gst-play.c:1300
+ msgid "change video track"
+ msgstr "cambia traccia video"
+
++#: tools/gst-play.c:1301
+ msgid "change subtitle track"
+ msgstr "cambia traccia sottotitoli"
+
++#: tools/gst-play.c:1302
+ msgid "seek to beginning"
+ msgstr "vai all'inizio"
+
++#: tools/gst-play.c:1303
+ msgid "show keyboard shortcuts"
+ msgstr "mostra scorciatoie da tastiera"
+
++#: tools/gst-play.c:1306
+ msgid "Interactive mode - keyboard controls:"
+ msgstr "Modalità interattiva - controlli da tastiera:"
+
++#: tools/gst-play.c:1439
+ msgid "Output status information and property notifications"
+ msgstr "Stampa informazioni di stato e notifiche delle proprietà"
+
++#: tools/gst-play.c:1441
+ msgid "Control playback behaviour setting playbin 'flags' property"
-
-#~ msgid "Master"
-#~ msgstr "Principale"
-
-#~ msgid "Bass"
-#~ msgstr "Bassi"
-
-#~ msgid "Treble"
-#~ msgstr "Acuti"
-
-#~ msgid "PCM"
-#~ msgstr "PCM"
-
-#~ msgid "Synth"
-#~ msgstr "Sintetiz"
-
-#~ msgid "Line-in"
-#~ msgstr "Linea in"
-
-#~ msgid "CD"
-#~ msgstr "CD"
-
-#~ msgid "Microphone"
-#~ msgstr "Microfono"
-
-#~ msgid "PC Speaker"
-#~ msgstr "Altoparlanti PC"
-
-#~ msgid "Playback"
-#~ msgstr "Riproduzione"
-
-#~ msgid "Capture"
-#~ msgstr "Cattura"
-
-#~ msgid "Could not open vfs file \"%s\" for writing: %s."
-#~ msgstr "Impossibile aprire il file vfs «%s» in scrittura: %s."
-
-#~ msgid "No filename given"
-#~ msgstr "Nessun nome di file fornito"
-
-#~ msgid "Could not close vfs file \"%s\"."
-#~ msgstr "Impossibile chiudere il file vfs «%s»."
-
-#~ msgid "Error while writing to file \"%s\"."
-#~ msgstr "Errore durante la scrittura sul file «%s»."
-
-#~ msgid "Internal data stream error."
-#~ msgstr "Errore interno nel flusso di dati."
-
-#~ msgid "A %s plugin is required to play this stream, but not installed."
-#~ msgstr ""
-#~ "Per riprodurre questo stream è richiesto un plugin %s, che però non "
-#~ "risulta installato. "
-
-#~ msgid "Invalid subtitle URI \"%s\", subtitles disabled."
-#~ msgstr "URI dei sottotitoli «%s» non valido, sottotitoli disabilitati."
-
-#~ msgid "RTSP streams cannot be played yet."
-#~ msgstr "Non è ancora possibile riprodurre gli stream RTSP."
-
-#~ msgid ""
-#~ "Only a subtitle stream was detected. Either you are loading a subtitle "
-#~ "file or some other type of text file, or the media file was not "
-#~ "recognized."
-#~ msgstr ""
-#~ "È stato rilevato unicamente uno stream di sottotitoli. Ciò significa che "
-#~ "si sta caricando un file di sottotitoli o un altro tipo di file di testo, "
-#~ "oppure che il file multimediale non è stato riconosciuto."
-
-#~ msgid ""
-#~ "You do not have a decoder installed to handle this file. You might need "
-#~ "to install the necessary plugins."
-#~ msgstr ""
-#~ "Non risulta installato alcun decoder in grado di gestire questo file. "
-#~ "Potrebbe essere necessario installare gli opportuni plugin."
-
-#~ msgid "This is not a media file"
-#~ msgstr "Questo non è un file multimediale"
-
-#~ msgid "A subtitle stream was detected, but no video stream."
-#~ msgstr ""
-#~ "È stato rilevato uno stream di sottotitoli, ma nessuno stream video."
-
-#~ msgid "Both autovideosink and xvimagesink elements are missing."
-#~ msgstr ""
-#~ "Risultano mancanti entrambi gli elementi «autovideosink» e «xvimagesink»."
-
-#~ msgid "Both autoaudiosink and alsasink elements are missing."
-#~ msgstr ""
-#~ "Risultano mancanti entrambi gli elementi «autoaudiosink» e «alsasink»."
-
-#~ msgid "Error while sending gdp header data to \"%s:%d\"."
-#~ msgstr "Errore durante l'invio di dati header gdp a \"%s:%d\"."
-
-#~ msgid "Error while sending gdp payload data to \"%s:%d\"."
-#~ msgstr "Errore durante l'invio di dati payload gdp a \"%s:%d\"."
-
-#~ msgid "Connection to %s:%d refused."
-#~ msgstr "Connessione a %s:%d rifiutata."
-
-#~ msgid "Uncompressed planar YVU 4:2:0"
-#~ msgstr "YVU 4:2:0 planare non compresso"
-
-#~ msgid "Uncompressed packed YUV 4:1:0"
-#~ msgstr "YUV 4:1:0 packed non compresso"
-
-#~ msgid "Uncompressed packed YVU 4:1:0"
-#~ msgstr "YVU 4:1:0 packed non compresso"
-
-#~ msgid "Uncompressed packed YUV 4:1:1"
-#~ msgstr "YUV 4:1:1 packed non compresso"
-
-#~ msgid "Uncompressed packed YUV 4:4:4"
-#~ msgstr "YUV 4:4:4 packed non compresso"
-
-#~ msgid "Uncompressed planar YUV 4:2:2"
-#~ msgstr "YUV 4:2:2 planare non compresso"
-
-#~ msgid "Uncompressed planar YUV 4:1:1"
-#~ msgstr "YUV 4:1:1 planare non compresso"
-
-#~ msgid "Uncompressed black and white Y-plane"
-#~ msgstr "Bianco e nero Y-plane non compresso"
-
-#~ msgid "Raw PCM audio"
-#~ msgstr "Audio raw PCM"
-
-#~ msgid "Raw %d-bit floating-point audio"
-#~ msgstr "Audio raw floating-point %d-bit"
-
-#~ msgid "Raw floating-point audio"
-#~ msgstr "Audio raw floating-point"
-
-#~ msgid "No device specified."
-#~ msgstr "Nessun device specificato."
-
-#~ msgid "Device \"%s\" does not exist."
-#~ msgstr "Il device «%s» non esiste."
-
-#~ msgid "Device \"%s\" is already being used."
-#~ msgstr "Il device «%s» è già in uso."
-
-#~ msgid "Could not open device \"%s\" for reading and writing."
-#~ msgstr "Impossibile aprire il device «%s» in lettura e scrittura."
++msgstr "Controlla il comportamento di riproduzione impostando la proprietà «flags» di playbin"
+
++#: tools/gst-play.c:1446
+ msgid "Video sink to use (default is autovideosink)"
+ msgstr "Sink video da usare (predefinito autovideosink)"
+
++#: tools/gst-play.c:1448
+ msgid "Audio sink to use (default is autoaudiosink)"
+ msgstr "Sink audio da usare (predefinito autoaudiosink)"
+
++#: tools/gst-play.c:1450
+ msgid "Enable gapless playback"
+ msgstr "Abilita riproduzione senza interruzioni"
+
++#: tools/gst-play.c:1452
+ msgid "Shuffle playlist"
+ msgstr "Mischia playlist"
+
++#: tools/gst-play.c:1455
+ msgid "Disable interactive control via the keyboard"
+ msgstr "Disabilita il controllo interattivo da tastiera"
+
++#: tools/gst-play.c:1457
+ msgid "Volume"
+ msgstr "Volume"
+
++#: tools/gst-play.c:1459
+ msgid "Playlist file containing input media files"
+ msgstr "File playlist contenente i file multimediali di ingresso"
+
++#: tools/gst-play.c:1461
+ msgid "Do not print any output (apart from errors)"
+ msgstr "Non stampare nulla (a parte gli errori)"
+
++#: tools/gst-play.c:1463
+ msgid "Use playbin3 pipeline"
+ msgstr "Usa pipeline playbin3"
+
++#: tools/gst-play.c:1464
+ msgid "(default varies depending on 'USE_PLAYBIN' env variable)"
+ msgstr "(il predefinito varia in base alla variabile USE_PLAYBIN)"
+
++#: tools/gst-play.c:1468
++msgid "Keep showing the last frame on EOS until quit or playlist change command (gapless is ignored)"
++msgstr "Continua a mostrare l'ultimo fotogramma su EOS fino all'uscita o al comando di modifica della playlist (gapless viene ignorato)"
++
++#: tools/gst-play.c:1546
+ #, c-format
+ msgid "Usage: %s FILE1|URI1 [FILE2|URI2] [FILE3|URI3] ..."
+ msgstr "Utilizzo: %s FILE1|URI1 [FILE2|URI2] [FILE3|URI3] ..."
+
++#: tools/gst-play.c:1550
+ msgid "You must provide at least one filename or URI to play."
+ msgstr "È necessario fornire almeno un nome file o un URI da riprodurre."
+
++#: tools/gst-play.c:1590
+ msgid "Press 'k' to see a list of keyboard shortcuts.\n"
+ msgstr "Premere «k» per consultare le scorciatoie da tastiera.\n"
--- /dev/null
-# Johnny A. Solbu <johnny@solbu.net>, 2012-2017
+ # Norwegian bokmaal translation of gst-utils.
+ # This file is put in the public domain.
+ #
+ # Kjartan Maraas <kmaraas@gnome.org>, 2004-2010.
-"Project-Id-Version: gst-plugins-base 1.10.0\n"
++# Johnny A. Solbu <johnny@solbu.net>, 2012-2019
+ #
+ msgid ""
+ msgstr ""
-"POT-Creation-Date: 2019-02-26 11:43+0000\n"
-"PO-Revision-Date: 2017-01-05 01:29+0100\n"
++"Project-Id-Version: gst-plugins-base 1.16.0\n"
+ "Report-Msgid-Bugs-To: http://bugzilla.gnome.org/\n"
-msgid ""
-"Could not open audio device for playback. Device is being used by another "
-"application."
-msgstr ""
-"Kunne ikke åpne lydenheten for avspilling. Enheten brukes av et annet "
-"program."
++"POT-Creation-Date: 2019-04-19 00:20+0100\n"
++"PO-Revision-Date: 2019-09-03 09:24+0200\n"
+ "Last-Translator: Johnny A. Solbu <johnny@solbu.net>\n"
+ "Language-Team: Norwegian Bokmaal <i18n-nb@lister.ping.uio.no>\n"
+ "Language: nb_NO\n"
+ "MIME-Version: 1.0\n"
+ "Content-Type: text/plain; charset=UTF-8\n"
+ "Content-Transfer-Encoding: 8bit\n"
+ "X-Bugs: Report translation errors to the Language-Team address.\n"
+ "Plural-Forms: nplurals=2; plural=(n != 1);\n"
+ "X-Generator: Poedit 1.8.7.1\n"
+
++#: ext/alsa/gstalsasink.c:579
+ msgid "Could not open device for playback in mono mode."
+ msgstr "Kunne ikke åpne enheten for avspilling i mono-modus."
+
++#: ext/alsa/gstalsasink.c:581
+ msgid "Could not open device for playback in stereo mode."
+ msgstr "Kunne ikke åpne enheten for avspilling i stereo-modus."
+
++#: ext/alsa/gstalsasink.c:585
+ #, c-format
+ msgid "Could not open device for playback in %d-channel mode."
+ msgstr "Kunne ikke åpne enhet for avspilling i %d-kanalmodus."
+
-msgid ""
-"Could not open audio device for recording. Device is being used by another "
-"application."
-msgstr ""
-"Kunne ikke åpne lydenheten for opptak. Enheten brukes av et annet program."
++#: ext/alsa/gstalsasink.c:858
++msgid "Could not open audio device for playback. Device is being used by another application."
++msgstr "Kunne ikke åpne lydenheten for avspilling. Enheten brukes av et annet program."
+
++#: ext/alsa/gstalsasink.c:863
+ msgid "Could not open audio device for playback."
+ msgstr "Kunne ikke åpne lydenheten for avspilling."
+
++#: ext/alsa/gstalsasink.c:1076
+ msgid "Error outputting to audio device. The device has been disconnected."
+ msgstr "Feil ved sending til lydenhet. Enheten er frakoblet."
+
++#: ext/alsa/gstalsasrc.c:444
+ msgid "Could not open device for recording in mono mode."
+ msgstr "Kunne ikke åpne enhet for opptak i monomodus."
+
++#: ext/alsa/gstalsasrc.c:446
+ msgid "Could not open device for recording in stereo mode."
+ msgstr "Kunne ikke åpne enhet for opptak i stereomodus"
+
++#: ext/alsa/gstalsasrc.c:450
+ #, c-format
+ msgid "Could not open device for recording in %d-channel mode"
+ msgstr "Kunne ikke åpne enhet for opptak i %d-kanalmodus."
+
-msgstr ""
++#: ext/alsa/gstalsasrc.c:741
++msgid "Could not open audio device for recording. Device is being used by another application."
++msgstr "Kunne ikke åpne lydenheten for opptak. Enheten brukes av et annet program."
+
++#: ext/alsa/gstalsasrc.c:746
+ msgid "Could not open audio device for recording."
+ msgstr "Kunne ikke åpne lydenheten for opptak."
+
++#: ext/alsa/gstalsasrc.c:992
+ msgid "Error recording from audio device. The device has been disconnected."
+ msgstr "Feil ved opptak fra lydenhet. Enheten er frakoblet."
+
++#: ext/cdparanoia/gstcdparanoiasrc.c:277 ext/cdparanoia/gstcdparanoiasrc.c:283
+ msgid "Could not open CD device for reading."
+ msgstr "Kunne ikke åpne CD-enheten for lesing."
+
++#: ext/cdparanoia/gstcdparanoiasrc.c:409
+ msgid "Could not seek CD."
+ msgstr "Kunne ikke søke på CD."
+
++#: ext/cdparanoia/gstcdparanoiasrc.c:417
+ msgid "Could not read CD."
+ msgstr "Kunne ikke lese CD."
+
++#: ext/gl/gstgltestsrc.c:491
+ msgid "failed to draw pattern"
-msgstr ""
++msgstr "Kunne ikke tegne mønster"
+
++#: ext/gl/gstgltestsrc.c:492
+ msgid "A GL error occurred"
-msgstr ""
-
++msgstr "En GL-feil oppsto"
+
++#: ext/gl/gstgltestsrc.c:498
+ msgid "format wasn't negotiated before get function"
-#, fuzzy
++msgstr "formatet ble ikke forhandlet før get-funksjon"
++
++#: gst/encoding/gstencodebin.c:1636 gst/playback/gstplaybin2.c:3399
++#: gst/playback/gstplaysink.c:1522 gst/playback/gstplaysink.c:1535
++#: gst/playback/gstplaysink.c:1872 gst/playback/gstplaysink.c:1904
++#: gst/playback/gstplaysink.c:2488 gst/playback/gstplaysink.c:2537
++#: gst/playback/gstplaysink.c:2552 gst/playback/gstplaysink.c:2577
++#: gst/playback/gstplaysink.c:2609 gst/playback/gstplaysink.c:2757
++#: gst/playback/gstplaysink.c:2788 gst/playback/gstplaysink.c:3166
++#: gst/playback/gstplaysink.c:3175 gst/playback/gstplaysink.c:3184
++#: gst/playback/gstplaysink.c:3193 gst/playback/gstplaysink.c:3606
++#: gst/playback/gstplaysink.c:4526 gst/playback/gstplaysinkconvertbin.c:97
++#: gst/playback/gstplaysinkconvertbin.c:117
++#: gst/playback/gsturidecodebin.c:1472 gst/playback/gsturisourcebin.c:1597
+ #, c-format
+ msgid "Missing element '%s' - check your GStreamer installation."
+ msgstr "Mangler elementet «%s» - kontroller GStreamer-installasjonen."
+
++#: gst/playback/gstdecodebin2.c:1883 gst/playback/gstparsebin.c:1553
+ msgid "Could not determine type of stream"
+ msgstr "Kunne ikke bestemme type strøm."
+
++#: gst/playback/gstdecodebin2.c:2861 gst/playback/gstparsebin.c:2385
+ msgid "This appears to be a text file"
+ msgstr "Dette ser ut som en tekstfil"
+
++#: gst/playback/gstplaybin2.c:5452
+ msgid "Could not create \"uridecodebin\" element."
+ msgstr "Kunne ikke opprette elementet «uridecodebin»"
+
-msgstr "Kunne ikke opprette elementet «uridecodebin»"
++#: gst/playback/gstplaybin3.c:4558
+ msgid "Could not create \"uridecodebin3\" element."
-msgstr ""
++msgstr "Kunne ikke opprette elementet «uridecodebin3»"
+
++#: gst/playback/gstplaysink.c:2003
+ #, c-format
+ msgid "Both autovideosink and %s elements are missing."
+ msgstr "Både autobildesluk- og %s-elementer mangler."
+
++#: gst/playback/gstplaysink.c:2007
+ msgid "The autovideosink element is missing."
+ msgstr "Autobildeslukelementet mangler."
+
++#: gst/playback/gstplaysink.c:2012
+ #, c-format
+ msgid "Configured videosink %s is not working."
+ msgstr "Konfigurert bildesluk %s fungerer ikke."
+
++#: gst/playback/gstplaysink.c:2016
+ #, c-format
+ msgid "Both autovideosink and %s elements are not working."
+ msgstr "Både autobildesluk- og %s-elementer virker ikke."
+
++#: gst/playback/gstplaysink.c:2020
+ msgid "The autovideosink element is not working."
+ msgstr "autobildeslukelementet fungerer ikke."
+
++#: gst/playback/gstplaysink.c:2525
+ msgid "Custom text sink element is not usable."
+ msgstr "Tilpasset tekstslukelement er ikke brukbart."
+
++#: gst/playback/gstplaysink.c:2903
+ msgid "No volume control found"
+ msgstr "Fant ikke volumkontroll"
+
++#: gst/playback/gstplaysink.c:2933
+ #, c-format
+ msgid "Both autoaudiosink and %s elements are missing."
+ msgstr "Både autolydsluk- og %s-elementer mangler."
+
++#: gst/playback/gstplaysink.c:2937
+ msgid "The autoaudiosink element is missing."
+ msgstr "Autolydslukelementet mangler"
+
++#: gst/playback/gstplaysink.c:2942
+ #, c-format
+ msgid "Configured audiosink %s is not working."
+ msgstr "Konfigurert lydsluk %s fungerer ikke."
+
++#: gst/playback/gstplaysink.c:2946
+ #, c-format
+ msgid "Both autoaudiosink and %s elements are not working."
+ msgstr "Både autolydsluk- og %s-elementer virker ikke."
+
++#: gst/playback/gstplaysink.c:2950
+ msgid "The autoaudiosink element is not working."
+ msgstr "Autolydslukelementet fungerer ikke."
+
++#: gst/playback/gstplaysink.c:3269 gst/playback/gstplaysink.c:3274
+ msgid "Can't play a text file without video or visualizations."
+ msgstr "Kan ikke spille av en tekstfil uten film eller visualiseringer"
+
++#: gst/playback/gsturidecodebin.c:921
+ #, c-format
+ msgid "No decoder available for type '%s'."
+ msgstr "Ingen dekoder tilgjengelig for type «%s»."
+
++#: gst/playback/gsturidecodebin.c:1378 gst/playback/gsturisourcebin.c:1491
+ msgid "No URI specified to play from."
+ msgstr "Ingen URI for avspilling oppgitt."
+
++#: gst/playback/gsturidecodebin.c:1384 gst/playback/gsturisourcebin.c:1497
+ #, c-format
+ msgid "Invalid URI \"%s\"."
+ msgstr "Ugyldig URI «%s»."
+
++#: gst/playback/gsturidecodebin.c:1391 gst/playback/gsturisourcebin.c:1504
+ msgid "This stream type cannot be played yet."
+ msgstr "Dennestrømtypen kan ikke spilles ennå."
+
++#: gst/playback/gsturidecodebin.c:1409 gst/playback/gsturisourcebin.c:1522
+ #, c-format
+ msgid "No URI handler implemented for \"%s\"."
+ msgstr "Ingen URI-håndterer er implementert for «%s»."
+
++#: gst/playback/gsturidecodebin.c:2289 gst/playback/gsturisourcebin.c:2228
+ msgid "Source element is invalid."
+ msgstr "Kildeelement er ugyldig."
+
++#: gst/tcp/gsttcpclientsink.c:216
+ #, c-format
+ msgid "Error while sending data to \"%s:%d\"."
+ msgstr "Feil ved sending av data til «%s:%d»."
+
++#: gst-libs/gst/audio/gstaudiobasesrc.c:842
+ msgid "Can't record audio fast enough"
+ msgstr "Kan ikke ta opp lyd rask nok"
+
++#: gst-libs/gst/audio/gstaudiocdsrc.c:1625
+ msgid "This CD has no audio tracks"
+ msgstr "Denne CDen har ingen lydspor"
+
++#: gst-libs/gst/pbutils/descriptions.c:89
+ msgid "ID3 tag"
+ msgstr "ID3-merke"
+
++#: gst-libs/gst/pbutils/descriptions.c:90
++#: gst-libs/gst/pbutils/descriptions.c:91
+ msgid "APE tag"
+ msgstr "APE-merke"
+
++#: gst-libs/gst/pbutils/descriptions.c:92
+ msgid "ICY internet radio"
+ msgstr "ICY internettradio"
+
++#: gst-libs/gst/pbutils/descriptions.c:127
+ msgid "Apple Lossless Audio (ALAC)"
+ msgstr "Apple Lossless Audio (ALAC)"
+
++#: gst-libs/gst/pbutils/descriptions.c:143
+ msgid "Free Lossless Audio Codec (FLAC)"
+ msgstr "Free Lossless Audio Codec (FLAC)"
+
++#: gst-libs/gst/pbutils/descriptions.c:173
++#: gst-libs/gst/pbutils/descriptions.c:174
+ msgid "Lossless True Audio (TTA)"
+ msgstr "Tapsfri True Audio (TTA)"
+
++#: gst-libs/gst/pbutils/descriptions.c:182
+ msgid "Windows Media Speech"
+ msgstr "Windows Mediatale"
+
++#: gst-libs/gst/pbutils/descriptions.c:199
+ msgid "CYUV Lossless"
+ msgstr "CYUV tapsfritt"
+
++#: gst-libs/gst/pbutils/descriptions.c:203
+ msgid "FFMpeg v1"
+ msgstr "FFMpeg v1"
+
++#: gst-libs/gst/pbutils/descriptions.c:217
+ msgid "Lossless MSZH"
+ msgstr "Tapsfritt MSZH"
+
++#: gst-libs/gst/pbutils/descriptions.c:228
+ msgid "Run-length encoding"
+ msgstr "Kjørelengde-koding"
+
++#: gst-libs/gst/pbutils/descriptions.c:274
+ msgid "Timed Text"
+ msgstr "Tidsbestemt tekst"
+
++#: gst-libs/gst/pbutils/descriptions.c:278
+ msgid "Subtitle"
+ msgstr "Undertekst"
+
++#: gst-libs/gst/pbutils/descriptions.c:279
+ msgid "MPL2 subtitle format"
+ msgstr "MPL2 undertekstformat"
+
++#: gst-libs/gst/pbutils/descriptions.c:280
+ msgid "DKS subtitle format"
+ msgstr "DKS undertekstformat"
+
++#: gst-libs/gst/pbutils/descriptions.c:281
+ msgid "QTtext subtitle format"
+ msgstr "QTtext undertekstformat"
+
++#: gst-libs/gst/pbutils/descriptions.c:282
+ msgid "Sami subtitle format"
+ msgstr "Samisk undertekstformat"
+
++#: gst-libs/gst/pbutils/descriptions.c:283
+ msgid "TMPlayer subtitle format"
+ msgstr "TMPlayer undertekstformat"
+
++#: gst-libs/gst/pbutils/descriptions.c:287
+ msgid "CEA 608 Closed Caption"
-msgstr ""
++msgstr "CEA 608 teksting"
+
++#: gst-libs/gst/pbutils/descriptions.c:288
+ msgid "CEA 708 Closed Caption"
-msgid ""
-"35 mm equivalent focal length of the lens used capturing the image, in mm"
-msgstr ""
-"35 mm ekvivalent brennvidde til objektivet som brukes når du tar bildet, i mm"
++msgstr "CEA 708 teksting"
+
++#: gst-libs/gst/pbutils/descriptions.c:289
+ msgid "Kate subtitle format"
+ msgstr "Kate undertekstformat"
+
++#: gst-libs/gst/pbutils/descriptions.c:290
+ msgid "WebVTT subtitle format"
+ msgstr "WebVTT undertekstformat"
+
++#: gst-libs/gst/pbutils/descriptions.c:455
++#: gst-libs/gst/pbutils/descriptions.c:458
++#: gst-libs/gst/pbutils/descriptions.c:508
+ msgid "Uncompressed video"
+ msgstr "Ukomprimert video"
+
++#: gst-libs/gst/pbutils/descriptions.c:463
+ msgid "Uncompressed gray"
+ msgstr "Ukomprimert grå"
+
++#: gst-libs/gst/pbutils/descriptions.c:486
+ #, c-format
+ msgid "Uncompressed packed YUV %s"
+ msgstr "Ukomprimert pakket YUV %s"
+
++#: gst-libs/gst/pbutils/descriptions.c:488
+ #, c-format
+ msgid "Uncompressed semi-planar YUV %s"
+ msgstr "Ukomprimert semi-planar YUV %s"
+
++#: gst-libs/gst/pbutils/descriptions.c:490
+ #, c-format
+ msgid "Uncompressed planar YUV %s"
+ msgstr "Ukomprimert planar YUV %s"
+
++#: gst-libs/gst/pbutils/descriptions.c:501
+ #, c-format
+ msgid "Uncompressed palettized %d-bit %s"
+ msgstr "Ukomprimert palettopmimert %d-bit %s"
+
++#: gst-libs/gst/pbutils/descriptions.c:504
+ #, c-format
+ msgid "Uncompressed %d-bit %s"
+ msgstr "Ukomprimert %d-bit %s"
+
++#: gst-libs/gst/pbutils/descriptions.c:586
+ #, c-format
+ msgid "DivX MPEG-4 Version %d"
+ msgstr "DivX MPEG-4-versjon %d"
+
++#: gst-libs/gst/pbutils/descriptions.c:834
+ msgid "Uncompressed audio"
+ msgstr "Ukomprimert lyd"
+
++#: gst-libs/gst/pbutils/descriptions.c:840
+ #, c-format
+ msgid "Raw %d-bit %s audio"
+ msgstr "Rå %d-bit %s-lyd"
+
++#: gst-libs/gst/pbutils/descriptions.c:942
+ msgid "Audio CD source"
+ msgstr "Lyd-CD-kilde"
+
++#: gst-libs/gst/pbutils/descriptions.c:945
+ msgid "DVD source"
+ msgstr "DVD-kilde"
+
++#: gst-libs/gst/pbutils/descriptions.c:948
+ msgid "Real Time Streaming Protocol (RTSP) source"
+ msgstr "Sanntidsdirekteavspillingsprotokoll (RTSP) kilde"
+
++#: gst-libs/gst/pbutils/descriptions.c:952
+ msgid "Microsoft Media Server (MMS) protocol source"
+ msgstr "Microsoft Media Server (MMS) protokollkilde"
+
++#: gst-libs/gst/pbutils/descriptions.c:960
+ #, c-format
+ msgid "%s protocol source"
+ msgstr "%s protokollkilde"
+
++#: gst-libs/gst/pbutils/descriptions.c:1034
+ #, c-format
+ msgid "%s video RTP depayloader"
+ msgstr "%s bilde RTP-depayloader"
+
++#: gst-libs/gst/pbutils/descriptions.c:1036
+ #, c-format
+ msgid "%s audio RTP depayloader"
+ msgstr "%s lyd RTP-depayloader"
+
++#: gst-libs/gst/pbutils/descriptions.c:1038
+ #, c-format
+ msgid "%s RTP depayloader"
+ msgstr "%s RTP-depayloader"
+
++#: gst-libs/gst/pbutils/descriptions.c:1045
+ #, c-format
+ msgid "%s demuxer"
+ msgstr "%s demultiplekser"
+
++#: gst-libs/gst/pbutils/descriptions.c:1047
+ #, c-format
+ msgid "%s decoder"
+ msgstr "%s dekoder"
+
++#: gst-libs/gst/pbutils/descriptions.c:1086
+ #, c-format
+ msgid "%s video RTP payloader"
+ msgstr "%s bilde RTP-payloader"
+
++#: gst-libs/gst/pbutils/descriptions.c:1088
+ #, c-format
+ msgid "%s audio RTP payloader"
+ msgstr "%s lyd RTP-payloader"
+
++#: gst-libs/gst/pbutils/descriptions.c:1090
+ #, c-format
+ msgid "%s RTP payloader"
+ msgstr "%s RTP-payloader"
+
++#: gst-libs/gst/pbutils/descriptions.c:1097
+ #, c-format
+ msgid "%s muxer"
+ msgstr "%s multiplekser"
+
++#: gst-libs/gst/pbutils/descriptions.c:1099
+ #, c-format
+ msgid "%s encoder"
+ msgstr "%s koder"
+
++#: gst-libs/gst/pbutils/descriptions.c:1133
+ #, c-format
+ msgid "GStreamer element %s"
+ msgstr "GStreamer-element %s"
+
++#: gst-libs/gst/pbutils/missing-plugins.c:558
+ msgid "Unknown source element"
+ msgstr "Ukjent kildeelement"
+
++#: gst-libs/gst/pbutils/missing-plugins.c:561
+ msgid "Unknown sink element"
+ msgstr "Ukjent slukelement"
+
++#: gst-libs/gst/pbutils/missing-plugins.c:564
+ msgid "Unknown element"
+ msgstr "Ukjent element"
+
++#: gst-libs/gst/pbutils/missing-plugins.c:567
+ msgid "Unknown decoder element"
+ msgstr "Ukjent dekoderelement"
+
++#: gst-libs/gst/pbutils/missing-plugins.c:570
+ msgid "Unknown encoder element"
+ msgstr "Ukjent element for koding"
+
++#: gst-libs/gst/pbutils/missing-plugins.c:575
+ msgid "Plugin or element of unknown type"
+ msgstr "Tillegg eller element av ukjent type"
+
++#: gst-libs/gst/tag/gsttagdemux.c:1243
+ msgid "Failed to read tag: not enough data"
+ msgstr "Klarte ikke å lese merket: Ikke nok data"
+
++#: gst-libs/gst/tag/tags.c:77
+ msgid "track ID"
+ msgstr "spor-ID"
+
++#: gst-libs/gst/tag/tags.c:77
+ msgid "MusicBrainz track ID"
+ msgstr "MusicBrainz spor-id"
+
++#: gst-libs/gst/tag/tags.c:79
+ msgid "artist ID"
+ msgstr "artist-ID"
+
++#: gst-libs/gst/tag/tags.c:79
+ msgid "MusicBrainz artist ID"
+ msgstr "MusicBrainz artist-id"
+
++#: gst-libs/gst/tag/tags.c:81
+ msgid "album ID"
+ msgstr "album-ID"
+
++#: gst-libs/gst/tag/tags.c:81
+ msgid "MusicBrainz album ID"
+ msgstr "MusicBrainz album-id"
+
++#: gst-libs/gst/tag/tags.c:84
+ msgid "album artist ID"
+ msgstr "albumartist-id"
+
++#: gst-libs/gst/tag/tags.c:84
+ msgid "MusicBrainz album artist ID"
+ msgstr "MusicBrainz albumartist-id"
+
++#: gst-libs/gst/tag/tags.c:86
+ msgid "track TRM ID"
+ msgstr "spor TRM-id"
+
++#: gst-libs/gst/tag/tags.c:86
+ msgid "MusicBrainz TRM ID"
+ msgstr "MusicBrainz TRM-id"
+
++#: gst-libs/gst/tag/tags.c:108
+ msgid "capturing shutter speed"
+ msgstr "opptakslukkerhastighet"
+
++#: gst-libs/gst/tag/tags.c:109
+ msgid "Shutter speed used when capturing an image, in seconds"
+ msgstr "Lukkehastighet som benyttes når du tar et bilde, i sekunder"
+
++#: gst-libs/gst/tag/tags.c:112
+ msgid "capturing focal ratio"
+ msgstr "opptaksbrennvidder"
+
++#: gst-libs/gst/tag/tags.c:113
+ msgid "Focal ratio (f-number) used when capturing the image"
+ msgstr "Brennvidden (f-tall) som benyttes når du tar bildet"
+
++#: gst-libs/gst/tag/tags.c:116
+ msgid "capturing focal length"
+ msgstr "opptaksbrennvidde"
+
++#: gst-libs/gst/tag/tags.c:117
+ msgid "Focal length of the lens used capturing the image, in mm"
+ msgstr "Brennvidden til objektivet som brukes når du tar bildet, i mm"
+
++#: gst-libs/gst/tag/tags.c:121
+ msgid "capturing 35 mm equivalent focal length"
+ msgstr "35 mm ekvivalent opptaksbrennvidde"
+
-msgid ""
-"The metering mode used while determining exposure for capturing an image"
-msgstr ""
-"Målemodus som brukes mens man bestemmer eksponeringen for å ta et bilde"
++#: gst-libs/gst/tag/tags.c:122
++msgid "35 mm equivalent focal length of the lens used capturing the image, in mm"
++msgstr "35 mm ekvivalent brennvidde til objektivet som brukes når du tar bildet, i mm"
+
++#: gst-libs/gst/tag/tags.c:126
+ msgid "capturing digital zoom ratio"
+ msgstr "fanger digitalt zoomforhold"
+
++#: gst-libs/gst/tag/tags.c:127
+ msgid "Digital zoom ratio used when capturing an image"
+ msgstr "Digitalt zoomforhold som benyttes når du tar et bilde"
+
++#: gst-libs/gst/tag/tags.c:130
+ msgid "capturing iso speed"
+ msgstr "opptaks-ISO-hastighet"
+
++#: gst-libs/gst/tag/tags.c:131
+ msgid "The ISO speed used when capturing an image"
+ msgstr "ISO-hastigheten som benyttes når du tar et bilde"
+
++#: gst-libs/gst/tag/tags.c:134
+ msgid "capturing exposure program"
+ msgstr "opptakseksponeringsprogram"
+
++#: gst-libs/gst/tag/tags.c:135
+ msgid "The exposure program used when capturing an image"
+ msgstr "Eksponeringsprogrammet som benyttes når du tar et bilde"
+
++#: gst-libs/gst/tag/tags.c:138
+ msgid "capturing exposure mode"
+ msgstr "opptakseksponeringsmodus"
+
++#: gst-libs/gst/tag/tags.c:139
+ msgid "The exposure mode used when capturing an image"
+ msgstr "Eksponeringsmodusen som benyttes når du tar et bilde"
+
++#: gst-libs/gst/tag/tags.c:142
+ msgid "capturing exposure compensation"
+ msgstr "opptakseksponeringskompensasjon"
+
++#: gst-libs/gst/tag/tags.c:143
+ msgid "The exposure compensation used when capturing an image"
+ msgstr "Eksponeringskompensasjonen som benyttes når du tar et bilde"
+
++#: gst-libs/gst/tag/tags.c:146
+ msgid "capturing scene capture type"
+ msgstr "opptaksscene opptakstype"
+
++#: gst-libs/gst/tag/tags.c:147
+ msgid "The scene capture mode used when capturing an image"
+ msgstr "Sceneopptaksmodus som anvendes når du tar et bilde"
+
++#: gst-libs/gst/tag/tags.c:150
+ msgid "capturing gain adjustment"
+ msgstr "opptaksstyrkejustering"
+
++#: gst-libs/gst/tag/tags.c:151
+ msgid "The overall gain adjustment applied on an image"
+ msgstr "Den samlede styrkejusteringen som anvendes på et bilde"
+
++#: gst-libs/gst/tag/tags.c:154
+ msgid "capturing white balance"
+ msgstr "opptakshvitbalanse"
+
++#: gst-libs/gst/tag/tags.c:155
+ msgid "The white balance mode set when capturing an image"
+ msgstr "Hvitbalanseinnstillingen når du tar et bilde"
+
++#: gst-libs/gst/tag/tags.c:158
+ msgid "capturing contrast"
+ msgstr "opptakskontrast"
+
++#: gst-libs/gst/tag/tags.c:159
+ msgid "The direction of contrast processing applied when capturing an image"
+ msgstr "Retningen av kontrastbehandling som anvendes når du tar et bilde"
+
++#: gst-libs/gst/tag/tags.c:163
+ msgid "capturing saturation"
+ msgstr "opptaksmetning"
+
++#: gst-libs/gst/tag/tags.c:164
+ msgid "The direction of saturation processing applied when capturing an image"
+ msgstr "Retningen av metningsbehandling som anvendes når du tar et bilde"
+
++#: gst-libs/gst/tag/tags.c:168
+ msgid "capturing sharpness"
+ msgstr "opptaksskarphet"
+
++#: gst-libs/gst/tag/tags.c:169
+ msgid "The direction of sharpness processing applied when capturing an image"
+ msgstr "Retningen av skarphetsbehandling som anvendes når du tar et bilde"
+
++#: gst-libs/gst/tag/tags.c:173
+ msgid "capturing flash fired"
+ msgstr "opptaksblits avfyrt"
+
++#: gst-libs/gst/tag/tags.c:174
+ msgid "If the flash fired while capturing an image"
+ msgstr "Om blitsen avfyres mens du tar et bilde"
+
++#: gst-libs/gst/tag/tags.c:177
+ msgid "capturing flash mode"
+ msgstr "opptaksblitsmodus"
+
++#: gst-libs/gst/tag/tags.c:178
+ msgid "The selected flash mode while capturing an image"
+ msgstr "Valgt blitsmodus mens du tar et bilde"
+
++#: gst-libs/gst/tag/tags.c:181
+ msgid "capturing metering mode"
+ msgstr "opptaksmålemodus"
+
-msgid ""
-"Don't exit after showing the initial device list, but wait for devices to "
-"added/removed."
-msgstr ""
-"Ikke avslutt etter å ha vist den første enheteslisten, men vent på at "
-"enheter blir lagt til/fjernet."
++#: gst-libs/gst/tag/tags.c:182
++msgid "The metering mode used while determining exposure for capturing an image"
++msgstr "Målemodus som brukes mens man bestemmer eksponeringen for å ta et bilde"
+
++#: gst-libs/gst/tag/tags.c:186
+ msgid "capturing source"
+ msgstr "opptakskilde"
+
++#: gst-libs/gst/tag/tags.c:187
+ msgid "The source or type of device used for the capture"
+ msgstr "Kilden eller type enhet som brukes for opptak"
+
++#: gst-libs/gst/tag/tags.c:190
+ msgid "image horizontal ppi"
+ msgstr "Bilde horisontalt (ppi)"
+
++#: gst-libs/gst/tag/tags.c:191
+ msgid "Media (image/video) intended horizontal pixel density in ppi"
+ msgstr "Media (bilde/video) beregnet horisontal pikseltetthet i ppi"
+
++#: gst-libs/gst/tag/tags.c:194
+ msgid "image vertical ppi"
+ msgstr "Bilde vertikalt (ppi)"
+
++#: gst-libs/gst/tag/tags.c:195
+ msgid "Media (image/video) intended vertical pixel density in ppi"
+ msgstr "Media (bilde/video) beregnet vertikal piksel tetthet i ppi"
+
++#: gst-libs/gst/tag/tags.c:198
+ msgid "ID3v2 frame"
+ msgstr "ID3v2-ramme"
+
++#: gst-libs/gst/tag/tags.c:198
+ msgid "unparsed id3v2 tag frame"
+ msgstr "Utolket id3v2 merkeramme"
+
++#: gst-libs/gst/tag/tags.c:202
+ msgid "musical-key"
+ msgstr "musikal-tast"
+
++#: gst-libs/gst/tag/tags.c:202
+ msgid "Initial key in which the sound starts"
+ msgstr "Innledende tast der lyden begynner"
+
++#: tools/gst-device-monitor.c:260 tools/gst-play.c:1444
+ msgid "Print version information and exit"
+ msgstr "Skriv ut versjonsinformasjon og avslutt"
+
-msgstr ""
++#: tools/gst-device-monitor.c:262
++msgid "Don't exit after showing the initial device list, but wait for devices to added/removed."
++msgstr "Ikke avslutt etter å ha vist den første enheteslisten, men vent på at enheter blir lagt til/fjernet."
+
++#: tools/gst-play.c:310
+ #, c-format
+ msgid "Volume: %.0f%%"
+ msgstr "Volum: %.0f%%"
+
++#: tools/gst-play.c:349
+ msgid "Buffering..."
+ msgstr "Bufrer ..."
+
++#: tools/gst-play.c:370
+ msgid "Clock lost, selecting a new one\n"
+ msgstr "Klokke tapt, velger en ny\n"
+
++#: tools/gst-play.c:400 tools/gst-play.c:446 tools/gst-play.c:883
++#: tools/gst-play.c:1347
+ msgid "Reached end of play list."
+ msgstr "Nådde slutten av spillelisten."
+
++#: tools/gst-play.c:613
+ msgid "Paused"
+ msgstr "Pauset"
+
++#: tools/gst-play.c:671
+ #, c-format
+ msgid "Now playing %s\n"
+ msgstr "Spiller nå %s\n"
+
++#: tools/gst-play.c:734
+ #, c-format
+ msgid "About to finish, preparing next title: %s"
+ msgstr "I ferd med å fullføre, forbereder neste tittel: %s"
+
++#: tools/gst-play.c:979
+ #, c-format
+ msgid "Playback rate: %.2f"
+ msgstr "Avspillingshastighet: %.2f"
+
++#: tools/gst-play.c:983
+ #, c-format
+ msgid "Could not change playback rate to %.2f"
+ msgstr "Kunne ikke endre avspillingshastigheten til %.2f"
+
++#: tools/gst-play.c:1287
+ msgid "space"
+ msgstr "avstand"
+
++#: tools/gst-play.c:1287
+ msgid "pause/unpause"
+ msgstr "pause/opphev pause"
+
++#: tools/gst-play.c:1288
+ msgid "q or ESC"
+ msgstr "q eller ESC"
+
++#: tools/gst-play.c:1288
+ msgid "quit"
+ msgstr "Avslutt"
+
++#: tools/gst-play.c:1289
+ msgid "> or n"
+ msgstr "> eller n"
+
++#: tools/gst-play.c:1289
+ msgid "play next"
+ msgstr "spill neste"
+
++#: tools/gst-play.c:1290
+ msgid "< or b"
+ msgstr "< eller b"
+
++#: tools/gst-play.c:1290
+ msgid "play previous"
+ msgstr "spill forige"
+
++#: tools/gst-play.c:1291
+ msgid "seek forward"
+ msgstr "søk forover"
+
++#: tools/gst-play.c:1292
+ msgid "seek backward"
+ msgstr "søk bakover"
+
++#: tools/gst-play.c:1293
+ msgid "volume up"
+ msgstr "Volum opp"
+
++#: tools/gst-play.c:1294
+ msgid "volume down"
+ msgstr "volum opp"
+
++#: tools/gst-play.c:1295
+ msgid "increase playback rate"
+ msgstr "øk avspillingshastigheten"
+
++#: tools/gst-play.c:1296
+ msgid "decrease playback rate"
+ msgstr "reduser avspillingshastigheten"
+
++#: tools/gst-play.c:1297
+ msgid "change playback direction"
+ msgstr "endre avspillingsretningen"
+
++#: tools/gst-play.c:1298
+ msgid "enable/disable trick modes"
+ msgstr "aktivere/deaktivere trikse-moduser\t"
+
++#: tools/gst-play.c:1299
+ msgid "change audio track"
+ msgstr "endre lydspor"
+
++#: tools/gst-play.c:1300
+ msgid "change video track"
+ msgstr "endre videospor"
+
++#: tools/gst-play.c:1301
+ msgid "change subtitle track"
+ msgstr "endre undertekstspor"
+
++#: tools/gst-play.c:1302
+ msgid "seek to beginning"
+ msgstr "Søk til begynnelsen"
+
++#: tools/gst-play.c:1303
+ msgid "show keyboard shortcuts"
+ msgstr "vis hurtigtaster"
+
++#: tools/gst-play.c:1306
+ msgid "Interactive mode - keyboard controls:"
+ msgstr "Interaktiv modus - tastaturkontroller:"
+
++#: tools/gst-play.c:1439
+ msgid "Output status information and property notifications"
+ msgstr "Utdata-statusinformasjon og egenskapsvarslinger"
+
++#: tools/gst-play.c:1441
+ msgid "Control playback behaviour setting playbin 'flags' property"
+ msgstr "Kontroller avspillingsoppførsel ved å sette playbin «flagg»-egenskap"
+
++#: tools/gst-play.c:1446
+ msgid "Video sink to use (default is autovideosink)"
+ msgstr "Video-sink som skal brukes (standard er autovideosink)"
+
++#: tools/gst-play.c:1448
+ msgid "Audio sink to use (default is autoaudiosink)"
+ msgstr "Lyd-sink som skal brukes (standard er autoaudiosink)"
+
++#: tools/gst-play.c:1450
+ msgid "Enable gapless playback"
+ msgstr "Aktiver sømløs avspilling"
+
++#: tools/gst-play.c:1452
+ msgid "Shuffle playlist"
+ msgstr "Stokke spilleliste"
+
++#: tools/gst-play.c:1455
+ msgid "Disable interactive control via the keyboard"
+ msgstr "Deaktiver interaktiv kontroll via tastaturet"
+
++#: tools/gst-play.c:1457
+ msgid "Volume"
+ msgstr "Volum"
+
++#: tools/gst-play.c:1459
+ msgid "Playlist file containing input media files"
+ msgstr "Spillelistefil som inneholder inngangsmediefiler"
+
++#: tools/gst-play.c:1461
+ msgid "Do not print any output (apart from errors)"
+ msgstr "Ikke skriv noe utdata (bortsett fra feil)"
+
++#: tools/gst-play.c:1463
+ msgid "Use playbin3 pipeline"
-msgstr ""
++msgstr "Bruk playbin3-datakanal"
+
++#: tools/gst-play.c:1464
+ msgid "(default varies depending on 'USE_PLAYBIN' env variable)"
-#~ msgstr ""
-#~ "Et %s-tillegg kreves for å spille av denne strømmen, men det er ikke "
-#~ "installert."
++msgstr "(standard varierer avhengig av miljøvariabelen «USE_PLAYBIN»"
+
++#: tools/gst-play.c:1468
++msgid "Keep showing the last frame on EOS until quit or playlist change command (gapless is ignored)"
++msgstr "Fortsett å vise den siste rammen på EOS til du avslutter eller endrer kommandoen for spillelisten (gapless ignoreres)"
++
++#: tools/gst-play.c:1546
+ #, c-format
+ msgid "Usage: %s FILE1|URI1 [FILE2|URI2] [FILE3|URI3] ..."
+ msgstr "Bruk: %s FIL1|URI1 [FILE|URI2] [FILE|URI3] ..."
+
++#: tools/gst-play.c:1550
+ msgid "You must provide at least one filename or URI to play."
+ msgstr "Du må ha med minst ett filnavn eller nettadresse å spille."
+
++#: tools/gst-play.c:1590
+ msgid "Press 'k' to see a list of keyboard shortcuts.\n"
+ msgstr "Trykk «k» for å se en liste over hurtigtaster.\n"
+
+ #~ msgid "Could not create \"decodebin3\" element."
+ #~ msgstr "Kunne ikke opprette elementet «decodebin3»"
+
+ #~ msgid "Could not create \"urisourcebin\" element."
+ #~ msgstr "Kunne ikke opprette elementet «urisourcebin»"
+
+ #~ msgid "Internal data stream error."
+ #~ msgstr "Intern feil i datastrøm."
+
+ #~ msgid "A %s plugin is required to play this stream, but not installed."
-#~ msgid ""
-#~ "Only a subtitle stream was detected. Either you are loading a subtitle "
-#~ "file or some other type of text file, or the media file was not "
-#~ "recognized."
-#~ msgstr ""
-#~ "Kun en undertekststrøm ble oppdaget. Enten laster du inn en undertekstfil "
-#~ "eller en annen type tekstfil, eller mediafilen ble ikke gjenkjent."
-
-#~ msgid ""
-#~ "You do not have a decoder installed to handle this file. You might need "
-#~ "to install the necessary plugins."
-#~ msgstr ""
-#~ "Du har ikke en dekoder installert for å håndtere denne filen. Du må "
-#~ "kanskje installere de nødvendige programtilleggene."
++#~ msgstr "Et %s-tillegg kreves for å spille av denne strømmen, men det er ikke installert."
+
+ #~ msgid "Uncompressed %s YUV %s"
+ #~ msgstr "Ukomprimert %s YUV %s"
+
+ #~ msgid "Master"
+ #~ msgstr "Hovedvolum"
+
+ #~ msgid "Bass"
+ #~ msgstr "Bass"
+
+ #~ msgid "Treble"
+ #~ msgstr "Diskant"
+
+ #~ msgid "PCM"
+ #~ msgstr "PCM"
+
+ #~ msgid "Synth"
+ #~ msgstr "Synth"
+
+ #~ msgid "Line-in"
+ #~ msgstr "Linje inn"
+
+ #~ msgid "CD"
+ #~ msgstr "CD"
+
+ #~ msgid "Microphone"
+ #~ msgstr "Mikrofon"
+
+ #~ msgid "PC Speaker"
+ #~ msgstr "PC-høyttaler"
+
+ #~ msgid "Playback"
+ #~ msgstr "Spill av"
+
+ #~ msgid "Capture"
+ #~ msgstr "Ta opp"
+
+ #~ msgid "Could not open vfs file \"%s\" for writing: %s."
+ #~ msgstr "Kunne ikke åpne VFS-fil «%s» for skriving: %s."
+
+ #~ msgid "No filename given"
+ #~ msgstr "Ingen filnavn oppgitt"
+
+ #~ msgid "Could not close vfs file \"%s\"."
+ #~ msgstr "Kunne ikke lukke VFS-fil «%s»."
+
+ #~ msgid "Error while writing to file \"%s\"."
+ #~ msgstr "Feil under skriving til fil «%s»."
+
+ #~ msgid "Invalid subtitle URI \"%s\", subtitles disabled."
+ #~ msgstr "Ugyldig URI «%s» til undertekst. Undertekst er slått av."
+
+ #~ msgid "RTSP streams cannot be played yet."
+ #~ msgstr "RTSP-strømmer kan ikke spilles av ennå."
+
++#~ msgid "Only a subtitle stream was detected. Either you are loading a subtitle file or some other type of text file, or the media file was not recognized."
++#~ msgstr "Kun en undertekststrøm ble oppdaget. Enten laster du inn en undertekstfil eller en annen type tekstfil, eller mediafilen ble ikke gjenkjent."
++
++#~ msgid "You do not have a decoder installed to handle this file. You might need to install the necessary plugins."
++#~ msgstr "Du har ikke en dekoder installert for å håndtere denne filen. Du må kanskje installere de nødvendige programtilleggene."
+
+ #~ msgid "This is not a media file"
+ #~ msgstr "Dette er ikke en mediefil"
+
+ #~ msgid "A subtitle stream was detected, but no video stream."
+ #~ msgstr "En undertekststrøm ble oppdaget, men ingen videostrøm."
+
+ #~ msgid "Both autovideosink and xvimagesink elements are missing."
+ #~ msgstr "Både autobildesluk- og xvbildeslukelementene mangler."
+
+ #~ msgid "Both autoaudiosink and alsasink elements are missing."
+ #~ msgstr "Både autolydsluk- og alsaslukelementene mangler."
+
+ #~ msgid "Error while sending gdp header data to \"%s:%d\"."
+ #~ msgstr "Feil ved sending av GDP overskriftsdata til «%s:%d»"
+
+ #~ msgid "Error while sending gdp payload data to \"%s:%d\"."
+ #~ msgstr "Feil ved sending av GDP nyttelast data til «%s:%d»"
+
+ #~ msgid "Connection to %s:%d refused."
+ #~ msgstr "Forbindelse til %s:%d ble avvist"
+
+ #~ msgid "Uncompressed planar YVU 4:2:0"
+ #~ msgstr "Ukomprimert pakket YVU 4:2:0"
+
+ #~ msgid "Uncompressed packed YUV 4:1:0"
+ #~ msgstr "Ukomprimert pakket YUV 4:1:0"
+
+ #~ msgid "Uncompressed packed YVU 4:1:0"
+ #~ msgstr "Ukomprimert pakket YVU 4:1:0"
+
+ #~ msgid "Uncompressed packed YUV 4:1:1"
+ #~ msgstr "Ukomprimert pakket YUV 4:1:1"
+
+ #~ msgid "Uncompressed packed YUV 4:4:4"
+ #~ msgstr "Ukomprimert pakket YUV 4:4:4"
+
+ #~ msgid "Uncompressed planar YUV 4:2:2"
+ #~ msgstr "Ukomprimert planar YUV 4:2:2"
+
+ #~ msgid "Uncompressed planar YUV 4:1:1"
+ #~ msgstr "Ukomprimert planar YUV 4:1:1"
+
+ #~ msgid "Uncompressed black and white Y-plane"
+ #~ msgstr "Ukomprimert svart og hvitt Y-flate"
+
+ #~ msgid "Raw PCM audio"
+ #~ msgstr "Rå PCM-lyd"
+
+ #~ msgid "Raw %d-bit floating-point audio"
+ #~ msgstr "Rå %d-bit flyttallslyd"
+
+ #~ msgid "Raw floating-point audio"
+ #~ msgstr "Rå flyttallslyd"
+
+ #~ msgid "No device specified."
+ #~ msgstr "Ingen enhet oppgitt."
+
+ #~ msgid "Device \"%s\" does not exist."
+ #~ msgstr "Enheten «%s» eksisterer ikke."
+
+ #~ msgid "Device \"%s\" is already being used."
+ #~ msgstr "Enheten «%s» er allerede i bruk."
+
+ #~ msgid "Could not open device \"%s\" for reading and writing."
+ #~ msgstr "Kunne ikke åpne enhet «%s» for lesing og skriving."
--- /dev/null
-"Project-Id-Version: gst-plugins-base 1.15.1\n"
+ # Polish translation for gst-plugins-base.
+ # This file is distributed under the same license as the gst-plugins-base package.
+ # Jakub Bogusz <qboosh@pld-linux.org>, 2007-2019.
+ #
+ msgid ""
+ msgstr ""
-"POT-Creation-Date: 2019-02-26 11:43+0000\n"
-"PO-Revision-Date: 2019-01-25 05:30+0100\n"
++"Project-Id-Version: gst-plugins-base 1.16.0\n"
+ "Report-Msgid-Bugs-To: http://bugzilla.gnome.org/\n"
-msgstr ""
-"Nie udało się otworzyć urządzenia dźwiękowego do odtwarzania w trybie mono."
++"POT-Creation-Date: 2019-04-19 00:20+0100\n"
++"PO-Revision-Date: 2019-09-03 15:57+0200\n"
+ "Last-Translator: Jakub Bogusz <qboosh@pld-linux.org>\n"
+ "Language-Team: Polish <translation-team-pl@lists.sourceforge.net>\n"
+ "Language: pl\n"
+ "MIME-Version: 1.0\n"
+ "Content-Type: text/plain; charset=UTF-8\n"
+ "Content-Transfer-Encoding: 8bit\n"
+ "X-Bugs: Report translation errors to the Language-Team address.\n"
+
++#: ext/alsa/gstalsasink.c:579
+ msgid "Could not open device for playback in mono mode."
-msgstr ""
-"Nie udało się otworzyć urządzenia dźwiękowego do odtwarzania w trybie stereo."
++msgstr "Nie udało się otworzyć urządzenia dźwiękowego do odtwarzania w trybie mono."
+
++#: ext/alsa/gstalsasink.c:581
+ msgid "Could not open device for playback in stereo mode."
-msgstr ""
-"Nie udało się otworzyć urządzenia dźwiękowego do odtwarzania w trybie %d-"
-"kanałowym."
++msgstr "Nie udało się otworzyć urządzenia dźwiękowego do odtwarzania w trybie stereo."
+
++#: ext/alsa/gstalsasink.c:585
+ #, c-format
+ msgid "Could not open device for playback in %d-channel mode."
-msgid ""
-"Could not open audio device for playback. Device is being used by another "
-"application."
-msgstr ""
-"Nie udało się otworzyć urządzenia dźwiękowego do odtwarzania. Urządzenie "
-"jest używane przez inną aplikację."
++msgstr "Nie udało się otworzyć urządzenia dźwiękowego do odtwarzania w trybie %d-kanałowym."
+
-msgstr ""
-"Błąd przekazywania danych do urządzenia dźwiękowego. Urządzenie zostało "
-"odłączone."
++#: ext/alsa/gstalsasink.c:858
++msgid "Could not open audio device for playback. Device is being used by another application."
++msgstr "Nie udało się otworzyć urządzenia dźwiękowego do odtwarzania. Urządzenie jest używane przez inną aplikację."
+
++#: ext/alsa/gstalsasink.c:863
+ msgid "Could not open audio device for playback."
+ msgstr "Nie udało się otworzyć urządzenia dźwiękowego do odtwarzania."
+
++#: ext/alsa/gstalsasink.c:1076
+ msgid "Error outputting to audio device. The device has been disconnected."
-msgstr ""
-"Nie udało się otworzyć urządzenia dźwiękowego do nagrywania w trybie mono."
++msgstr "Błąd przekazywania danych do urządzenia dźwiękowego. Urządzenie zostało odłączone."
+
++#: ext/alsa/gstalsasrc.c:444
+ msgid "Could not open device for recording in mono mode."
-msgstr ""
-"Nie udało się otworzyć urządzenia dźwiękowego do nagrywania w trybie stereo."
++msgstr "Nie udało się otworzyć urządzenia dźwiękowego do nagrywania w trybie mono."
+
++#: ext/alsa/gstalsasrc.c:446
+ msgid "Could not open device for recording in stereo mode."
-msgstr ""
-"Nie udało się otworzyć urządzenia dźwiękowego do nagrywania w trybie %d-"
-"kanałowym."
++msgstr "Nie udało się otworzyć urządzenia dźwiękowego do nagrywania w trybie stereo."
+
++#: ext/alsa/gstalsasrc.c:450
+ #, c-format
+ msgid "Could not open device for recording in %d-channel mode"
-msgid ""
-"Could not open audio device for recording. Device is being used by another "
-"application."
-msgstr ""
-"Nie udało się otworzyć urządzenia dźwiękowego do nagrywania. Urządzenie jest "
-"używane przez inną aplikację."
++msgstr "Nie udało się otworzyć urządzenia dźwiękowego do nagrywania w trybie %d-kanałowym."
+
-msgstr ""
-"Błąd nagrywania z urządzenia dźwiękowego. Urządzenie zostało odłączone."
++#: ext/alsa/gstalsasrc.c:741
++msgid "Could not open audio device for recording. Device is being used by another application."
++msgstr "Nie udało się otworzyć urządzenia dźwiękowego do nagrywania. Urządzenie jest używane przez inną aplikację."
+
++#: ext/alsa/gstalsasrc.c:746
+ msgid "Could not open audio device for recording."
+ msgstr "Nie udało się otworzyć urządzenia dźwiękowego do nagrywania."
+
++#: ext/alsa/gstalsasrc.c:992
+ msgid "Error recording from audio device. The device has been disconnected."
-msgid ""
-"35 mm equivalent focal length of the lens used capturing the image, in mm"
-msgstr ""
-"Długość ogniskowej obiektywu użyta przy robieniu zdjęcia, w mm, będąca "
-"odpowiednikiem 35 mm"
++msgstr "Błąd nagrywania z urządzenia dźwiękowego. Urządzenie zostało odłączone."
+
++#: ext/cdparanoia/gstcdparanoiasrc.c:277 ext/cdparanoia/gstcdparanoiasrc.c:283
+ msgid "Could not open CD device for reading."
+ msgstr "Nie udało się otworzyć urządzenia CD do odczytu."
+
++#: ext/cdparanoia/gstcdparanoiasrc.c:409
+ msgid "Could not seek CD."
+ msgstr "Nie udało się ustawić położenia na CD."
+
++#: ext/cdparanoia/gstcdparanoiasrc.c:417
+ msgid "Could not read CD."
+ msgstr "Nie udał się odczyt CD."
+
++#: ext/gl/gstgltestsrc.c:491
+ msgid "failed to draw pattern"
+ msgstr "nie udało się narysować wzoru"
+
++#: ext/gl/gstgltestsrc.c:492
+ msgid "A GL error occurred"
+ msgstr "Wystąpił błąd GL"
+
++#: ext/gl/gstgltestsrc.c:498
+ msgid "format wasn't negotiated before get function"
+ msgstr "format nie został wynegocjowany przed funkcją pobierającą"
+
++#: gst/encoding/gstencodebin.c:1636 gst/playback/gstplaybin2.c:3399
++#: gst/playback/gstplaysink.c:1522 gst/playback/gstplaysink.c:1535
++#: gst/playback/gstplaysink.c:1872 gst/playback/gstplaysink.c:1904
++#: gst/playback/gstplaysink.c:2488 gst/playback/gstplaysink.c:2537
++#: gst/playback/gstplaysink.c:2552 gst/playback/gstplaysink.c:2577
++#: gst/playback/gstplaysink.c:2609 gst/playback/gstplaysink.c:2757
++#: gst/playback/gstplaysink.c:2788 gst/playback/gstplaysink.c:3166
++#: gst/playback/gstplaysink.c:3175 gst/playback/gstplaysink.c:3184
++#: gst/playback/gstplaysink.c:3193 gst/playback/gstplaysink.c:3606
++#: gst/playback/gstplaysink.c:4526 gst/playback/gstplaysinkconvertbin.c:97
++#: gst/playback/gstplaysinkconvertbin.c:117
++#: gst/playback/gsturidecodebin.c:1472 gst/playback/gsturisourcebin.c:1597
+ #, c-format
+ msgid "Missing element '%s' - check your GStreamer installation."
+ msgstr "Brak elementu '%s' - proszę sprawdzić instalację GStreamera."
+
++#: gst/playback/gstdecodebin2.c:1883 gst/playback/gstparsebin.c:1553
+ msgid "Could not determine type of stream"
+ msgstr "Nie udało się określić typu strumienia"
+
++#: gst/playback/gstdecodebin2.c:2861 gst/playback/gstparsebin.c:2385
+ msgid "This appears to be a text file"
+ msgstr "To wygląda na plik tekstowy"
+
++#: gst/playback/gstplaybin2.c:5452
+ msgid "Could not create \"uridecodebin\" element."
+ msgstr "Nie udało się utworzyć elementu \"uridecodebin\"."
+
++#: gst/playback/gstplaybin3.c:4558
+ msgid "Could not create \"uridecodebin3\" element."
+ msgstr "Nie udało się utworzyć elementu \"uridecodebin3\"."
+
++#: gst/playback/gstplaysink.c:2003
+ #, c-format
+ msgid "Both autovideosink and %s elements are missing."
+ msgstr "Nie ma żadnego z elementów autovideosink i %s."
+
++#: gst/playback/gstplaysink.c:2007
+ msgid "The autovideosink element is missing."
+ msgstr "Nie ma elementu autovideosink."
+
++#: gst/playback/gstplaysink.c:2012
+ #, c-format
+ msgid "Configured videosink %s is not working."
+ msgstr "Skonfigurowany element videosink %s nie działa."
+
++#: gst/playback/gstplaysink.c:2016
+ #, c-format
+ msgid "Both autovideosink and %s elements are not working."
+ msgstr "Żaden z elementów autovideosink i %s nie działa."
+
++#: gst/playback/gstplaysink.c:2020
+ msgid "The autovideosink element is not working."
+ msgstr "Element autovideosink nie działa."
+
++#: gst/playback/gstplaysink.c:2525
+ msgid "Custom text sink element is not usable."
+ msgstr "Własny element text sink nie jest użyteczny."
+
++#: gst/playback/gstplaysink.c:2903
+ msgid "No volume control found"
+ msgstr "Nie znaleziono sterowania głośnością"
+
++#: gst/playback/gstplaysink.c:2933
+ #, c-format
+ msgid "Both autoaudiosink and %s elements are missing."
+ msgstr "Nie ma żadnego z elementów autoaudiosink i %s."
+
++#: gst/playback/gstplaysink.c:2937
+ msgid "The autoaudiosink element is missing."
+ msgstr "Nie ma elementu autoaudiosink."
+
++#: gst/playback/gstplaysink.c:2942
+ #, c-format
+ msgid "Configured audiosink %s is not working."
+ msgstr "Skonfigurowany element audiosink %s nie działa."
+
++#: gst/playback/gstplaysink.c:2946
+ #, c-format
+ msgid "Both autoaudiosink and %s elements are not working."
+ msgstr "Żaden z elementów autoaudiosink i %s nie działa."
+
++#: gst/playback/gstplaysink.c:2950
+ msgid "The autoaudiosink element is not working."
+ msgstr "Element autoaudiosink nie działa."
+
++#: gst/playback/gstplaysink.c:3269 gst/playback/gstplaysink.c:3274
+ msgid "Can't play a text file without video or visualizations."
+ msgstr "Nie można odtwarzać pliku tekstowego bez obrazu lub wizualizacji."
+
++#: gst/playback/gsturidecodebin.c:921
+ #, c-format
+ msgid "No decoder available for type '%s'."
+ msgstr "Brak dostępnego dekodera dla typu '%s'."
+
++#: gst/playback/gsturidecodebin.c:1378 gst/playback/gsturisourcebin.c:1491
+ msgid "No URI specified to play from."
+ msgstr "Nie podano URI do odtwarzania."
+
++#: gst/playback/gsturidecodebin.c:1384 gst/playback/gsturisourcebin.c:1497
+ #, c-format
+ msgid "Invalid URI \"%s\"."
+ msgstr "Niepoprawne URI \"%s\"."
+
++#: gst/playback/gsturidecodebin.c:1391 gst/playback/gsturisourcebin.c:1504
+ msgid "This stream type cannot be played yet."
+ msgstr "Tego strumienia jeszcze nie można odtworzyć."
+
++#: gst/playback/gsturidecodebin.c:1409 gst/playback/gsturisourcebin.c:1522
+ #, c-format
+ msgid "No URI handler implemented for \"%s\"."
+ msgstr "Nie ma zaimplementowanej obsługi URI dla \"%s\"."
+
++#: gst/playback/gsturidecodebin.c:2289 gst/playback/gsturisourcebin.c:2228
+ msgid "Source element is invalid."
+ msgstr "Element źródłowy jest niepoprawny."
+
++#: gst/tcp/gsttcpclientsink.c:216
+ #, c-format
+ msgid "Error while sending data to \"%s:%d\"."
+ msgstr "Błąd podczas wysyłania danych do \"%s:%d\"."
+
++#: gst-libs/gst/audio/gstaudiobasesrc.c:842
+ msgid "Can't record audio fast enough"
+ msgstr "Nie można zapisywać danych dźwiękowych wystarczająco szybko"
+
++#: gst-libs/gst/audio/gstaudiocdsrc.c:1625
+ msgid "This CD has no audio tracks"
+ msgstr "Ta płyta CD nie ma ścieżek dźwiękowych"
+
++#: gst-libs/gst/pbutils/descriptions.c:89
+ msgid "ID3 tag"
+ msgstr "Znacznik ID3"
+
++#: gst-libs/gst/pbutils/descriptions.c:90
++#: gst-libs/gst/pbutils/descriptions.c:91
+ msgid "APE tag"
+ msgstr "Znacznik APE"
+
++#: gst-libs/gst/pbutils/descriptions.c:92
+ msgid "ICY internet radio"
+ msgstr "Radio internetowe ICY"
+
++#: gst-libs/gst/pbutils/descriptions.c:127
+ msgid "Apple Lossless Audio (ALAC)"
+ msgstr "Apple Lossless Audio (ALAC)"
+
++#: gst-libs/gst/pbutils/descriptions.c:143
+ msgid "Free Lossless Audio Codec (FLAC)"
+ msgstr "Free Lossless Audio Codec (FLAC)"
+
++#: gst-libs/gst/pbutils/descriptions.c:173
++#: gst-libs/gst/pbutils/descriptions.c:174
+ msgid "Lossless True Audio (TTA)"
+ msgstr "Lossless True Audio (TTA)"
+
++#: gst-libs/gst/pbutils/descriptions.c:182
+ msgid "Windows Media Speech"
+ msgstr "Windows Media Speech"
+
++#: gst-libs/gst/pbutils/descriptions.c:199
+ msgid "CYUV Lossless"
+ msgstr "CYUV Lossless (bezstratne CYUV)"
+
++#: gst-libs/gst/pbutils/descriptions.c:203
+ msgid "FFMpeg v1"
+ msgstr "FFMpeg v1"
+
++#: gst-libs/gst/pbutils/descriptions.c:217
+ msgid "Lossless MSZH"
+ msgstr "Lossless MSZH"
+
++#: gst-libs/gst/pbutils/descriptions.c:228
+ msgid "Run-length encoding"
+ msgstr "Kodowanie RLE"
+
++#: gst-libs/gst/pbutils/descriptions.c:274
+ msgid "Timed Text"
+ msgstr "Tekst w czasie"
+
++#: gst-libs/gst/pbutils/descriptions.c:278
+ msgid "Subtitle"
+ msgstr "Podpisy"
+
++#: gst-libs/gst/pbutils/descriptions.c:279
+ msgid "MPL2 subtitle format"
+ msgstr "Format podpisów MPL2"
+
++#: gst-libs/gst/pbutils/descriptions.c:280
+ msgid "DKS subtitle format"
+ msgstr "Format podpisów DKS"
+
++#: gst-libs/gst/pbutils/descriptions.c:281
+ msgid "QTtext subtitle format"
+ msgstr "Format podpisów QTtext"
+
++#: gst-libs/gst/pbutils/descriptions.c:282
+ msgid "Sami subtitle format"
+ msgstr "Format podpisów Sami"
+
++#: gst-libs/gst/pbutils/descriptions.c:283
+ msgid "TMPlayer subtitle format"
+ msgstr "Format podpisów TMPlayer"
+
++#: gst-libs/gst/pbutils/descriptions.c:287
+ msgid "CEA 608 Closed Caption"
+ msgstr "Podpisy CEA 608"
+
++#: gst-libs/gst/pbutils/descriptions.c:288
+ msgid "CEA 708 Closed Caption"
+ msgstr "Podpisy CEA 708"
+
++#: gst-libs/gst/pbutils/descriptions.c:289
+ msgid "Kate subtitle format"
+ msgstr "Format podpisów Kate"
+
++#: gst-libs/gst/pbutils/descriptions.c:290
+ msgid "WebVTT subtitle format"
+ msgstr "Format podpisów WebVTT"
+
++#: gst-libs/gst/pbutils/descriptions.c:455
++#: gst-libs/gst/pbutils/descriptions.c:458
++#: gst-libs/gst/pbutils/descriptions.c:508
+ msgid "Uncompressed video"
+ msgstr "Nieskompresowany obraz"
+
++#: gst-libs/gst/pbutils/descriptions.c:463
+ msgid "Uncompressed gray"
+ msgstr "Nieskompresowane szarości"
+
++#: gst-libs/gst/pbutils/descriptions.c:486
+ #, c-format
+ msgid "Uncompressed packed YUV %s"
+ msgstr "Nieskompresowany upakowany YUV %s"
+
++#: gst-libs/gst/pbutils/descriptions.c:488
+ #, c-format
+ msgid "Uncompressed semi-planar YUV %s"
+ msgstr "Nieskompresowany dwuwarstwowy YUV %s"
+
++#: gst-libs/gst/pbutils/descriptions.c:490
+ #, c-format
+ msgid "Uncompressed planar YUV %s"
+ msgstr "Nieskompresowany warstwowy YUV %s"
+
++#: gst-libs/gst/pbutils/descriptions.c:501
+ #, c-format
+ msgid "Uncompressed palettized %d-bit %s"
+ msgstr "Nieskompresowany z paletą %d-bitowy %s"
+
++#: gst-libs/gst/pbutils/descriptions.c:504
+ #, c-format
+ msgid "Uncompressed %d-bit %s"
+ msgstr "Nieskompresowany %d-bitowy %s"
+
++#: gst-libs/gst/pbutils/descriptions.c:586
+ #, c-format
+ msgid "DivX MPEG-4 Version %d"
+ msgstr "DivX MPEG-4 w wersji %d"
+
++#: gst-libs/gst/pbutils/descriptions.c:834
+ msgid "Uncompressed audio"
+ msgstr "Nieskompresowany dźwięk"
+
++#: gst-libs/gst/pbutils/descriptions.c:840
+ #, c-format
+ msgid "Raw %d-bit %s audio"
+ msgstr "Surowy %d-bitowy dźwięk %s"
+
++#: gst-libs/gst/pbutils/descriptions.c:942
+ msgid "Audio CD source"
+ msgstr "Źródło Audio CD"
+
++#: gst-libs/gst/pbutils/descriptions.c:945
+ msgid "DVD source"
+ msgstr "Źródło DVD"
+
++#: gst-libs/gst/pbutils/descriptions.c:948
+ msgid "Real Time Streaming Protocol (RTSP) source"
+ msgstr "Źródło RTSP (Real Time Streaming Protocol)"
+
++#: gst-libs/gst/pbutils/descriptions.c:952
+ msgid "Microsoft Media Server (MMS) protocol source"
+ msgstr "Źródło protokołu MMS (Microsoft Media Server)"
+
++#: gst-libs/gst/pbutils/descriptions.c:960
+ #, c-format
+ msgid "%s protocol source"
+ msgstr "Źródło protokołu %s"
+
++#: gst-libs/gst/pbutils/descriptions.c:1034
+ #, c-format
+ msgid "%s video RTP depayloader"
+ msgstr "Depayloader obrazu RTP %s"
+
++#: gst-libs/gst/pbutils/descriptions.c:1036
+ #, c-format
+ msgid "%s audio RTP depayloader"
+ msgstr "Depayloader dźwięku RTP %s"
+
++#: gst-libs/gst/pbutils/descriptions.c:1038
+ #, c-format
+ msgid "%s RTP depayloader"
+ msgstr "Depayloader RTP %s"
+
++#: gst-libs/gst/pbutils/descriptions.c:1045
+ #, c-format
+ msgid "%s demuxer"
+ msgstr "Demuxer %s"
+
++#: gst-libs/gst/pbutils/descriptions.c:1047
+ #, c-format
+ msgid "%s decoder"
+ msgstr "Dekoder %s"
+
++#: gst-libs/gst/pbutils/descriptions.c:1086
+ #, c-format
+ msgid "%s video RTP payloader"
+ msgstr "Payloader obrazu RTP %s"
+
++#: gst-libs/gst/pbutils/descriptions.c:1088
+ #, c-format
+ msgid "%s audio RTP payloader"
+ msgstr "Payloader dźwięku RTP %s"
+
++#: gst-libs/gst/pbutils/descriptions.c:1090
+ #, c-format
+ msgid "%s RTP payloader"
+ msgstr "Payloader RTP %s"
+
++#: gst-libs/gst/pbutils/descriptions.c:1097
+ #, c-format
+ msgid "%s muxer"
+ msgstr "Muxer %s"
+
++#: gst-libs/gst/pbutils/descriptions.c:1099
+ #, c-format
+ msgid "%s encoder"
+ msgstr "Koder %s"
+
++#: gst-libs/gst/pbutils/descriptions.c:1133
+ #, c-format
+ msgid "GStreamer element %s"
+ msgstr "Element GStreamera %s"
+
++#: gst-libs/gst/pbutils/missing-plugins.c:558
+ msgid "Unknown source element"
+ msgstr "Nieznany element źródłowy"
+
++#: gst-libs/gst/pbutils/missing-plugins.c:561
+ msgid "Unknown sink element"
+ msgstr "Nieznany element pochłaniający"
+
++#: gst-libs/gst/pbutils/missing-plugins.c:564
+ msgid "Unknown element"
+ msgstr "Nieznany element"
+
++#: gst-libs/gst/pbutils/missing-plugins.c:567
+ msgid "Unknown decoder element"
+ msgstr "Nieznany element dekodujący"
+
++#: gst-libs/gst/pbutils/missing-plugins.c:570
+ msgid "Unknown encoder element"
+ msgstr "Nieznany element kodujący"
+
++#: gst-libs/gst/pbutils/missing-plugins.c:575
+ msgid "Plugin or element of unknown type"
+ msgstr "Wtyczka lub element nieznanego typu"
+
++#: gst-libs/gst/tag/gsttagdemux.c:1243
+ msgid "Failed to read tag: not enough data"
+ msgstr "Nie udało się odczytać znacznika: za mało danych"
+
++#: gst-libs/gst/tag/tags.c:77
+ msgid "track ID"
+ msgstr "ID ścieżki"
+
++#: gst-libs/gst/tag/tags.c:77
+ msgid "MusicBrainz track ID"
+ msgstr "ID ścieżki wg MusicBrainz"
+
++#: gst-libs/gst/tag/tags.c:79
+ msgid "artist ID"
+ msgstr "ID artysty"
+
++#: gst-libs/gst/tag/tags.c:79
+ msgid "MusicBrainz artist ID"
+ msgstr "ID artysty wg MusicBrainz"
+
++#: gst-libs/gst/tag/tags.c:81
+ msgid "album ID"
+ msgstr "ID albumu"
+
++#: gst-libs/gst/tag/tags.c:81
+ msgid "MusicBrainz album ID"
+ msgstr "ID albumu wg MusicBrainz"
+
++#: gst-libs/gst/tag/tags.c:84
+ msgid "album artist ID"
+ msgstr "ID artysty albumu ID"
+
++#: gst-libs/gst/tag/tags.c:84
+ msgid "MusicBrainz album artist ID"
+ msgstr "ID artysty albumu wg MusicBrainz"
+
++#: gst-libs/gst/tag/tags.c:86
+ msgid "track TRM ID"
+ msgstr "TRM ID ścieżki"
+
++#: gst-libs/gst/tag/tags.c:86
+ msgid "MusicBrainz TRM ID"
+ msgstr "TRM ID wg MusicBrainz"
+
++#: gst-libs/gst/tag/tags.c:108
+ msgid "capturing shutter speed"
+ msgstr "szybkość migawki"
+
++#: gst-libs/gst/tag/tags.c:109
+ msgid "Shutter speed used when capturing an image, in seconds"
+ msgstr "Szybkość migawki zastosowana przy robieniu zdjęcia, w sekundach"
+
++#: gst-libs/gst/tag/tags.c:112
+ msgid "capturing focal ratio"
+ msgstr "współczynnik przysłony"
+
++#: gst-libs/gst/tag/tags.c:113
+ msgid "Focal ratio (f-number) used when capturing the image"
+ msgstr "Przysłona (liczba f) użyta przy robieniu zdjęcia"
+
++#: gst-libs/gst/tag/tags.c:116
+ msgid "capturing focal length"
+ msgstr "ogniskowa obiektywu"
+
++#: gst-libs/gst/tag/tags.c:117
+ msgid "Focal length of the lens used capturing the image, in mm"
+ msgstr "Długość ogniskowej obiektywu użyta przy robieniu zdjęcia, w mm"
+
++#: gst-libs/gst/tag/tags.c:121
+ msgid "capturing 35 mm equivalent focal length"
+ msgstr "ogniskowa obiektywu - odpowiednik 35 mm"
+
-msgid ""
-"The metering mode used while determining exposure for capturing an image"
++#: gst-libs/gst/tag/tags.c:122
++msgid "35 mm equivalent focal length of the lens used capturing the image, in mm"
++msgstr "Długość ogniskowej obiektywu użyta przy robieniu zdjęcia, w mm, będąca odpowiednikiem 35 mm"
+
++#: gst-libs/gst/tag/tags.c:126
+ msgid "capturing digital zoom ratio"
+ msgstr "współczynnik powiększenia cyfrowego"
+
++#: gst-libs/gst/tag/tags.c:127
+ msgid "Digital zoom ratio used when capturing an image"
+ msgstr "Współczynnik powiększenia cyfrowego użytego przy robieniu zdjęcia"
+
++#: gst-libs/gst/tag/tags.c:130
+ msgid "capturing iso speed"
+ msgstr "czyłość ISO"
+
++#: gst-libs/gst/tag/tags.c:131
+ msgid "The ISO speed used when capturing an image"
+ msgstr "Czułość ISO użyta przy robieniu zdjęcia"
+
++#: gst-libs/gst/tag/tags.c:134
+ msgid "capturing exposure program"
+ msgstr "program ekspozycji"
+
++#: gst-libs/gst/tag/tags.c:135
+ msgid "The exposure program used when capturing an image"
+ msgstr "Program ekspozycji użyty przy robieniu zdjęcia"
+
++#: gst-libs/gst/tag/tags.c:138
+ msgid "capturing exposure mode"
+ msgstr "tryb ekspozycji"
+
++#: gst-libs/gst/tag/tags.c:139
+ msgid "The exposure mode used when capturing an image"
+ msgstr "Tryb ekspozycji użyty przy robieniu zdjęcia"
+
++#: gst-libs/gst/tag/tags.c:142
+ msgid "capturing exposure compensation"
+ msgstr "kompensacja ekspozycji"
+
++#: gst-libs/gst/tag/tags.c:143
+ msgid "The exposure compensation used when capturing an image"
+ msgstr "Kompensacja ekspozycji użyta przy robieniu zdjęcia"
+
++#: gst-libs/gst/tag/tags.c:146
+ msgid "capturing scene capture type"
+ msgstr "Tyb sceny zdjęcia"
+
++#: gst-libs/gst/tag/tags.c:147
+ msgid "The scene capture mode used when capturing an image"
+ msgstr "Tryb sceny użyty przy robieniu zdjęcia"
+
++#: gst-libs/gst/tag/tags.c:150
+ msgid "capturing gain adjustment"
+ msgstr "korekcja zdjęcia"
+
++#: gst-libs/gst/tag/tags.c:151
+ msgid "The overall gain adjustment applied on an image"
+ msgstr "Całkowita korekcja zastosowana dla zdjęcia"
+
++#: gst-libs/gst/tag/tags.c:154
+ msgid "capturing white balance"
+ msgstr "balans bieli"
+
++#: gst-libs/gst/tag/tags.c:155
+ msgid "The white balance mode set when capturing an image"
+ msgstr "Tryb balansu bieli ustawiony przy robieniu zdjęcia"
+
++#: gst-libs/gst/tag/tags.c:158
+ msgid "capturing contrast"
+ msgstr "kontrast zdjęcia"
+
++#: gst-libs/gst/tag/tags.c:159
+ msgid "The direction of contrast processing applied when capturing an image"
+ msgstr "Kierunek korekty kontrastu zastosowanej przy robieniu zdjęcia"
+
++#: gst-libs/gst/tag/tags.c:163
+ msgid "capturing saturation"
+ msgstr "nasycenie zdjęcia"
+
++#: gst-libs/gst/tag/tags.c:164
+ msgid "The direction of saturation processing applied when capturing an image"
+ msgstr "Kierunek korekty nasycenia zastosowanej przy robieniu zdjęcia"
+
++#: gst-libs/gst/tag/tags.c:168
+ msgid "capturing sharpness"
+ msgstr "ostrość zdjęcia"
+
++#: gst-libs/gst/tag/tags.c:169
+ msgid "The direction of sharpness processing applied when capturing an image"
+ msgstr "Kierunek korekty ostrości zastosowanej przy robieniu zdjęcia"
+
++#: gst-libs/gst/tag/tags.c:173
+ msgid "capturing flash fired"
+ msgstr "użycie flesza"
+
++#: gst-libs/gst/tag/tags.c:174
+ msgid "If the flash fired while capturing an image"
+ msgstr "Czy flesz był uruchomiony przy robieniu zdjęcia"
+
++#: gst-libs/gst/tag/tags.c:177
+ msgid "capturing flash mode"
+ msgstr "tryb flesza"
+
++#: gst-libs/gst/tag/tags.c:178
+ msgid "The selected flash mode while capturing an image"
+ msgstr "Tryb flesza wybrany przy robieniu zdjęcia"
+
++#: gst-libs/gst/tag/tags.c:181
+ msgid "capturing metering mode"
+ msgstr "tryb pomiaru dla zdjęcia"
+
-msgid ""
-"Don't exit after showing the initial device list, but wait for devices to "
-"added/removed."
-msgstr ""
-"Bez kończenia po wyświetleniu początkowej listy urządzeń, oczekiwanie na "
-"dodanie/usunięcie urządzeń."
++#: gst-libs/gst/tag/tags.c:182
++msgid "The metering mode used while determining exposure for capturing an image"
+ msgstr "Tryb pomiaru użyty przy określaniu ekspozycji dla zdjęcia"
+
++#: gst-libs/gst/tag/tags.c:186
+ msgid "capturing source"
+ msgstr "źródło zdjęcia"
+
++#: gst-libs/gst/tag/tags.c:187
+ msgid "The source or type of device used for the capture"
+ msgstr "Źródło lub rodzaj urządzenia użytego do zrobienia zdjęcia"
+
++#: gst-libs/gst/tag/tags.c:190
+ msgid "image horizontal ppi"
+ msgstr "rozdzielczość pozioma"
+
++#: gst-libs/gst/tag/tags.c:191
+ msgid "Media (image/video) intended horizontal pixel density in ppi"
+ msgstr "Rozdzielczość pozioma nośnika w pikselach na cal"
+
++#: gst-libs/gst/tag/tags.c:194
+ msgid "image vertical ppi"
+ msgstr "rozdzielczość pionowa"
+
++#: gst-libs/gst/tag/tags.c:195
+ msgid "Media (image/video) intended vertical pixel density in ppi"
+ msgstr "Rozdzielczość pionowa nośnika w pikselach na cal"
+
++#: gst-libs/gst/tag/tags.c:198
+ msgid "ID3v2 frame"
+ msgstr "ramka ID3v2"
+
++#: gst-libs/gst/tag/tags.c:198
+ msgid "unparsed id3v2 tag frame"
+ msgstr "ramka znacznika id3v2"
+
++#: gst-libs/gst/tag/tags.c:202
+ msgid "musical-key"
+ msgstr "klucz muzyczny"
+
++#: gst-libs/gst/tag/tags.c:202
+ msgid "Initial key in which the sound starts"
+ msgstr "Początkowy klucz, w którym zaczyna się dźwięk"
+
++#: tools/gst-device-monitor.c:260 tools/gst-play.c:1444
+ msgid "Print version information and exit"
+ msgstr "Wypisanie informacji o wersji i zakończenie"
+
-msgstr ""
-"Sterowanie zachowaniem odtwarzania przez ustawianie własności 'flags' "
-"elementu playbin"
++#: tools/gst-device-monitor.c:262
++msgid "Don't exit after showing the initial device list, but wait for devices to added/removed."
++msgstr "Bez kończenia po wyświetleniu początkowej listy urządzeń, oczekiwanie na dodanie/usunięcie urządzeń."
+
++#: tools/gst-play.c:310
+ #, c-format
+ msgid "Volume: %.0f%%"
+ msgstr "Głośność: %.0f%%"
+
++#: tools/gst-play.c:349
+ msgid "Buffering..."
+ msgstr "Buforowanie..."
+
++#: tools/gst-play.c:370
+ msgid "Clock lost, selecting a new one\n"
+ msgstr "Utracono zegar, wybieranie nowego\n"
+
++#: tools/gst-play.c:400 tools/gst-play.c:446 tools/gst-play.c:883
++#: tools/gst-play.c:1347
+ msgid "Reached end of play list."
+ msgstr "Osiągnięto koniec listy odtwarzania."
+
++#: tools/gst-play.c:613
+ msgid "Paused"
+ msgstr "Pauza"
+
++#: tools/gst-play.c:671
+ #, c-format
+ msgid "Now playing %s\n"
+ msgstr "Trwa odtwarzanie %s\n"
+
++#: tools/gst-play.c:734
+ #, c-format
+ msgid "About to finish, preparing next title: %s"
+ msgstr "Blisko końca, przygotowywanie następnego tytułu: %s"
+
++#: tools/gst-play.c:979
+ #, c-format
+ msgid "Playback rate: %.2f"
+ msgstr "Tempo odtwarzania: %.2f"
+
++#: tools/gst-play.c:983
+ #, c-format
+ msgid "Could not change playback rate to %.2f"
+ msgstr "Nie udało się zmienić tempa odtwarzania na %.2f"
+
++#: tools/gst-play.c:1287
+ msgid "space"
+ msgstr "spacja"
+
++#: tools/gst-play.c:1287
+ msgid "pause/unpause"
+ msgstr "pauza/wznowienie"
+
++#: tools/gst-play.c:1288
+ msgid "q or ESC"
+ msgstr "q lub ESC"
+
++#: tools/gst-play.c:1288
+ msgid "quit"
+ msgstr "wyjście"
+
++#: tools/gst-play.c:1289
+ msgid "> or n"
+ msgstr "> lub n"
+
++#: tools/gst-play.c:1289
+ msgid "play next"
+ msgstr "odtworzenie następnego"
+
++#: tools/gst-play.c:1290
+ msgid "< or b"
+ msgstr "< lub b"
+
++#: tools/gst-play.c:1290
+ msgid "play previous"
+ msgstr "odtworzenie poprzedniego"
+
++#: tools/gst-play.c:1291
+ msgid "seek forward"
+ msgstr "przewinięcie w przód"
+
++#: tools/gst-play.c:1292
+ msgid "seek backward"
+ msgstr "przewinięcie w tył"
+
++#: tools/gst-play.c:1293
+ msgid "volume up"
+ msgstr "większa głośność"
+
++#: tools/gst-play.c:1294
+ msgid "volume down"
+ msgstr "mniejsza głośność"
+
++#: tools/gst-play.c:1295
+ msgid "increase playback rate"
+ msgstr "większe tempo odtwarzania"
+
++#: tools/gst-play.c:1296
+ msgid "decrease playback rate"
+ msgstr "mniejsze tempo odtwarzania"
+
++#: tools/gst-play.c:1297
+ msgid "change playback direction"
+ msgstr "zmiana kierunku odtwarzania"
+
++#: tools/gst-play.c:1298
+ msgid "enable/disable trick modes"
+ msgstr "włączenie/wyłączenie trybów trików"
+
++#: tools/gst-play.c:1299
+ msgid "change audio track"
+ msgstr "zmiana ścieżki dźwiękowej"
+
++#: tools/gst-play.c:1300
+ msgid "change video track"
+ msgstr "zmiana ścieżki filmowej"
+
++#: tools/gst-play.c:1301
+ msgid "change subtitle track"
+ msgstr "zmiana ścieżki podpisów"
+
++#: tools/gst-play.c:1302
+ msgid "seek to beginning"
+ msgstr "przewinięcie na początek"
+
++#: tools/gst-play.c:1303
+ msgid "show keyboard shortcuts"
+ msgstr "wyświetlenie skrótów klawiatury"
+
++#: tools/gst-play.c:1306
+ msgid "Interactive mode - keyboard controls:"
+ msgstr "Tryb interaktywny - sterowanie z klawiatury:"
+
++#: tools/gst-play.c:1439
+ msgid "Output status information and property notifications"
+ msgstr "Informacje o stanie wyjścia i powiadomienia o własnościach"
+
++#: tools/gst-play.c:1441
+ msgid "Control playback behaviour setting playbin 'flags' property"
++msgstr "Sterowanie zachowaniem odtwarzania przez ustawianie własności 'flags' elementu playbin"
+
++#: tools/gst-play.c:1446
+ msgid "Video sink to use (default is autovideosink)"
+ msgstr "Element pochłaniający obraz (domyślny to autovideosink)"
+
++#: tools/gst-play.c:1448
+ msgid "Audio sink to use (default is autoaudiosink)"
+ msgstr "Element pochłaniający dźwięk (domyślny to autoaudiosink)"
+
++#: tools/gst-play.c:1450
+ msgid "Enable gapless playback"
+ msgstr "Włączenie odtwarzania bez przerw"
+
++#: tools/gst-play.c:1452
+ msgid "Shuffle playlist"
+ msgstr "Przetasowanie listy odtwarzania"
+
++#: tools/gst-play.c:1455
+ msgid "Disable interactive control via the keyboard"
+ msgstr "Wyłączenie interaktywnego sterowania z klawiatury"
+
++#: tools/gst-play.c:1457
+ msgid "Volume"
+ msgstr "Głośność"
+
++#: tools/gst-play.c:1459
+ msgid "Playlist file containing input media files"
+ msgstr "Plik listy odtwarzania zawierający wejściowe pliki multimedialne"
+
++#: tools/gst-play.c:1461
+ msgid "Do not print any output (apart from errors)"
+ msgstr "Bez wypisywania żadnego wyjścia (poza błędami)"
+
++#: tools/gst-play.c:1463
+ msgid "Use playbin3 pipeline"
+ msgstr "Użycie potoku playbin3"
+
++#: tools/gst-play.c:1464
+ msgid "(default varies depending on 'USE_PLAYBIN' env variable)"
+ msgstr "(wartość domyślna zależy od zmiennej środowiskowej 'USE_PLAYBIN')"
+
++#: tools/gst-play.c:1468
++msgid "Keep showing the last frame on EOS until quit or playlist change command (gapless is ignored)"
++msgstr "Pokazywanie ostatniej klatki po EOS aż do polecenia zakończenia lub zmiany listy odtwarzania (brak przerw jest ignorowany)"
++
++#: tools/gst-play.c:1546
+ #, c-format
+ msgid "Usage: %s FILE1|URI1 [FILE2|URI2] [FILE3|URI3] ..."
+ msgstr "Składnia: %s PLIK1|URI1 [PLIK2|URI2] [PLIK3|URI3] ..."
+
++#: tools/gst-play.c:1550
+ msgid "You must provide at least one filename or URI to play."
+ msgstr "Trzeba podać przynajmniej jedną nazwę pliku lub URI do odtworzenia."
+
++#: tools/gst-play.c:1590
+ msgid "Press 'k' to see a list of keyboard shortcuts.\n"
+ msgstr "Klawisz 'k' wyświetla listę skrótów klawiatury.\n"
--- /dev/null
-"Project-Id-Version: gst-plugins-base 1.15.1\n"
+ # Translation for gst-plugins-base messages to Russian
+ # This file is put in the public domain.
+ #
+ # Артём Попов <artfwo@gmail.com>, 2009.
+ # Pavel Maryanov <acid_jack@ukr.net>, 2009.
+ # Yuri Kozlov <yuray@komyakino.ru>, 2010, 2011, 2012, 2013, 2014, 2015, 2016, 2019.
+ msgid ""
+ msgstr ""
-"POT-Creation-Date: 2019-02-26 11:43+0000\n"
-"PO-Revision-Date: 2019-01-25 19:47+0300\n"
++"Project-Id-Version: gst-plugins-base 1.16.0\n"
+ "Report-Msgid-Bugs-To: http://bugzilla.gnome.org/\n"
-"Plural-Forms: nplurals=3; plural=(n%10==1 && n%100!=11 ? 0 : n%10>=2 && n"
-"%10<=4 && (n%100<10 || n%100>=20) ? 1 : 2);\n"
++"POT-Creation-Date: 2019-04-19 00:20+0100\n"
++"PO-Revision-Date: 2019-09-03 18:48+0300\n"
+ "Last-Translator: Yuri Kozlov <yuray@komyakino.ru>\n"
+ "Language-Team: Russian <gnu@d07.ru>\n"
+ "Language: ru\n"
+ "MIME-Version: 1.0\n"
+ "Content-Type: text/plain; charset=UTF-8\n"
+ "Content-Transfer-Encoding: 8bit\n"
+ "X-Bugs: Report translation errors to the Language-Team address.\n"
+ "X-Generator: Lokalize 2.0\n"
-msgstr ""
-"Не удалось открыть устройство для воспроизведения в %d-канальном режиме."
++"Plural-Forms: nplurals=3; plural=(n%10==1 && n%100!=11 ? 0 : n%10>=2 && n%10<=4 && (n%100<10 || n%100>=20) ? 1 : 2);\n"
+
++#: ext/alsa/gstalsasink.c:579
+ msgid "Could not open device for playback in mono mode."
+ msgstr "Не удалось открыть устройство для воспроизведения в режиме моно."
+
++#: ext/alsa/gstalsasink.c:581
+ msgid "Could not open device for playback in stereo mode."
+ msgstr "Не удалось открыть устройство для воспроизведения в режиме стерео."
+
++#: ext/alsa/gstalsasink.c:585
+ #, c-format
+ msgid "Could not open device for playback in %d-channel mode."
-msgid ""
-"Could not open audio device for playback. Device is being used by another "
-"application."
-msgstr ""
-"Не удалось открыть аудио-устройство для воспроизведения. Устройство "
-"используется другим приложением."
++msgstr "Не удалось открыть устройство для воспроизведения в %d-канальном режиме."
+
-msgid ""
-"Could not open audio device for recording. Device is being used by another "
-"application."
-msgstr ""
-"Не удалось открыть устройство для записи. Устройство используется другим "
-"приложением."
++#: ext/alsa/gstalsasink.c:858
++msgid "Could not open audio device for playback. Device is being used by another application."
++msgstr "Не удалось открыть аудио-устройство для воспроизведения. Устройство используется другим приложением."
+
++#: ext/alsa/gstalsasink.c:863
+ msgid "Could not open audio device for playback."
+ msgstr "Не удалось открыть аудио-устройство для воспроизведения."
+
++#: ext/alsa/gstalsasink.c:1076
+ msgid "Error outputting to audio device. The device has been disconnected."
+ msgstr "Ошибка вывода в аудио-устройство. Устройство отсоединено."
+
++#: ext/alsa/gstalsasrc.c:444
+ msgid "Could not open device for recording in mono mode."
+ msgstr "Не удалось открыть устройство для записи в режиме моно."
+
++#: ext/alsa/gstalsasrc.c:446
+ msgid "Could not open device for recording in stereo mode."
+ msgstr "Не удалось открыть устройство для записи в режиме стерео."
+
++#: ext/alsa/gstalsasrc.c:450
+ #, c-format
+ msgid "Could not open device for recording in %d-channel mode"
+ msgstr "Не удалось открыть устройство для записи в %d-канальном режиме."
+
-msgstr ""
-"Воспроизведение текстового файла без видео или визуализации невозможно."
++#: ext/alsa/gstalsasrc.c:741
++msgid "Could not open audio device for recording. Device is being used by another application."
++msgstr "Не удалось открыть устройство для записи. Устройство используется другим приложением."
+
++#: ext/alsa/gstalsasrc.c:746
+ msgid "Could not open audio device for recording."
+ msgstr "Не удалось открыть устройство для записи."
+
++#: ext/alsa/gstalsasrc.c:992
+ msgid "Error recording from audio device. The device has been disconnected."
+ msgstr "Ошибка записи из аудио-устройства. Устройство отсоединено."
+
++#: ext/cdparanoia/gstcdparanoiasrc.c:277 ext/cdparanoia/gstcdparanoiasrc.c:283
+ msgid "Could not open CD device for reading."
+ msgstr "Не удалось открыть CD-устройство для чтения."
+
++#: ext/cdparanoia/gstcdparanoiasrc.c:409
+ msgid "Could not seek CD."
+ msgstr "Не удалось сменить позицию воспроизведения CD."
+
++#: ext/cdparanoia/gstcdparanoiasrc.c:417
+ msgid "Could not read CD."
+ msgstr "Не удалось прочитать CD."
+
++#: ext/gl/gstgltestsrc.c:491
+ msgid "failed to draw pattern"
+ msgstr "ошибка при рисовании шаблона"
+
++#: ext/gl/gstgltestsrc.c:492
+ msgid "A GL error occurred"
+ msgstr "Произошла ошибка GL"
+
++#: ext/gl/gstgltestsrc.c:498
+ msgid "format wasn't negotiated before get function"
+ msgstr "перед функцией получения не был согласован формат"
+
++#: gst/encoding/gstencodebin.c:1636 gst/playback/gstplaybin2.c:3399
++#: gst/playback/gstplaysink.c:1522 gst/playback/gstplaysink.c:1535
++#: gst/playback/gstplaysink.c:1872 gst/playback/gstplaysink.c:1904
++#: gst/playback/gstplaysink.c:2488 gst/playback/gstplaysink.c:2537
++#: gst/playback/gstplaysink.c:2552 gst/playback/gstplaysink.c:2577
++#: gst/playback/gstplaysink.c:2609 gst/playback/gstplaysink.c:2757
++#: gst/playback/gstplaysink.c:2788 gst/playback/gstplaysink.c:3166
++#: gst/playback/gstplaysink.c:3175 gst/playback/gstplaysink.c:3184
++#: gst/playback/gstplaysink.c:3193 gst/playback/gstplaysink.c:3606
++#: gst/playback/gstplaysink.c:4526 gst/playback/gstplaysinkconvertbin.c:97
++#: gst/playback/gstplaysinkconvertbin.c:117
++#: gst/playback/gsturidecodebin.c:1472 gst/playback/gsturisourcebin.c:1597
+ #, c-format
+ msgid "Missing element '%s' - check your GStreamer installation."
+ msgstr "Отсутствует элемент «%s» — проверьте правильность установки GStreamer."
+
++#: gst/playback/gstdecodebin2.c:1883 gst/playback/gstparsebin.c:1553
+ msgid "Could not determine type of stream"
+ msgstr "Не удалось определить тип потока"
+
++#: gst/playback/gstdecodebin2.c:2861 gst/playback/gstparsebin.c:2385
+ msgid "This appears to be a text file"
+ msgstr "Этот файл является текстовым"
+
++#: gst/playback/gstplaybin2.c:5452
+ msgid "Could not create \"uridecodebin\" element."
+ msgstr "Не удалось создать элемент «uridecodebin»."
+
++#: gst/playback/gstplaybin3.c:4558
+ msgid "Could not create \"uridecodebin3\" element."
+ msgstr "Не удалось создать элемент «uridecodebin3»."
+
++#: gst/playback/gstplaysink.c:2003
+ #, c-format
+ msgid "Both autovideosink and %s elements are missing."
+ msgstr "Отсутствуют элементы autovideosink и %s."
+
++#: gst/playback/gstplaysink.c:2007
+ msgid "The autovideosink element is missing."
+ msgstr "Отсутствует элемент autovideosink."
+
++#: gst/playback/gstplaysink.c:2012
+ #, c-format
+ msgid "Configured videosink %s is not working."
+ msgstr "Настроенный videosink %s не работает."
+
++#: gst/playback/gstplaysink.c:2016
+ #, c-format
+ msgid "Both autovideosink and %s elements are not working."
+ msgstr "Элементы autovideosink и %s не работают."
+
++#: gst/playback/gstplaysink.c:2020
+ msgid "The autovideosink element is not working."
+ msgstr "Элемент autovideosink не работает."
+
++#: gst/playback/gstplaysink.c:2525
+ msgid "Custom text sink element is not usable."
+ msgstr "Входной элемент пользовательского текста не работоспособен."
+
++#: gst/playback/gstplaysink.c:2903
+ msgid "No volume control found"
+ msgstr "Не найден элемент управления громкостью"
+
++#: gst/playback/gstplaysink.c:2933
+ #, c-format
+ msgid "Both autoaudiosink and %s elements are missing."
+ msgstr "Отсутствуют элементы autoaudiosink и %s."
+
++#: gst/playback/gstplaysink.c:2937
+ msgid "The autoaudiosink element is missing."
+ msgstr "Отсутствует элемент autoaudiosink."
+
++#: gst/playback/gstplaysink.c:2942
+ #, c-format
+ msgid "Configured audiosink %s is not working."
+ msgstr "Настроенный audiosink %s не работает."
+
++#: gst/playback/gstplaysink.c:2946
+ #, c-format
+ msgid "Both autoaudiosink and %s elements are not working."
+ msgstr "Элементы autoaudiosink и %s не работают."
+
++#: gst/playback/gstplaysink.c:2950
+ msgid "The autoaudiosink element is not working."
+ msgstr "Элемент autoaudiosink не работает."
+
++#: gst/playback/gstplaysink.c:3269 gst/playback/gstplaysink.c:3274
+ msgid "Can't play a text file without video or visualizations."
-msgid ""
-"35 mm equivalent focal length of the lens used capturing the image, in mm"
-msgstr ""
-"35 мм эквивалент фокусного расстояния зеркала при съёмке изображения, в мм"
++msgstr "Воспроизведение текстового файла без видео или визуализации невозможно."
+
++#: gst/playback/gsturidecodebin.c:921
+ #, c-format
+ msgid "No decoder available for type '%s'."
+ msgstr "Для типа «%s» недоступен декодер."
+
++#: gst/playback/gsturidecodebin.c:1378 gst/playback/gsturisourcebin.c:1491
+ msgid "No URI specified to play from."
+ msgstr "Не указан URI источника воспроизведения."
+
++#: gst/playback/gsturidecodebin.c:1384 gst/playback/gsturisourcebin.c:1497
+ #, c-format
+ msgid "Invalid URI \"%s\"."
+ msgstr "Неверный URI «%s»."
+
++#: gst/playback/gsturidecodebin.c:1391 gst/playback/gsturisourcebin.c:1504
+ msgid "This stream type cannot be played yet."
+ msgstr "Воспроизведение этого потока в данный момент невозможно."
+
++#: gst/playback/gsturidecodebin.c:1409 gst/playback/gsturisourcebin.c:1522
+ #, c-format
+ msgid "No URI handler implemented for \"%s\"."
+ msgstr "Для «%s» не реализован обработчик URI."
+
++#: gst/playback/gsturidecodebin.c:2289 gst/playback/gsturisourcebin.c:2228
+ msgid "Source element is invalid."
+ msgstr "Неверный элемент источника."
+
++#: gst/tcp/gsttcpclientsink.c:216
+ #, c-format
+ msgid "Error while sending data to \"%s:%d\"."
+ msgstr "Ошибка отправки данных в «%s:%d»."
+
++#: gst-libs/gst/audio/gstaudiobasesrc.c:842
+ msgid "Can't record audio fast enough"
+ msgstr "Невозможна запись аудио с достаточной скоростью"
+
++#: gst-libs/gst/audio/gstaudiocdsrc.c:1625
+ msgid "This CD has no audio tracks"
+ msgstr "На CD нет звуковых дорожек"
+
++#: gst-libs/gst/pbutils/descriptions.c:89
+ msgid "ID3 tag"
+ msgstr "ID3-тег"
+
++#: gst-libs/gst/pbutils/descriptions.c:90
++#: gst-libs/gst/pbutils/descriptions.c:91
+ msgid "APE tag"
+ msgstr "APE-тег"
+
++#: gst-libs/gst/pbutils/descriptions.c:92
+ msgid "ICY internet radio"
+ msgstr "Интернет-радио ICY"
+
++#: gst-libs/gst/pbutils/descriptions.c:127
+ msgid "Apple Lossless Audio (ALAC)"
+ msgstr "Apple Lossless Audio (ALAC)"
+
++#: gst-libs/gst/pbutils/descriptions.c:143
+ msgid "Free Lossless Audio Codec (FLAC)"
+ msgstr "Free Lossless Audio Codec (FLAC)"
+
++#: gst-libs/gst/pbutils/descriptions.c:173
++#: gst-libs/gst/pbutils/descriptions.c:174
+ msgid "Lossless True Audio (TTA)"
+ msgstr "Lossless True Audio (TTA)"
+
++#: gst-libs/gst/pbutils/descriptions.c:182
+ msgid "Windows Media Speech"
+ msgstr "Windows Media Speech"
+
++#: gst-libs/gst/pbutils/descriptions.c:199
+ msgid "CYUV Lossless"
+ msgstr "CYUV Lossless"
+
++#: gst-libs/gst/pbutils/descriptions.c:203
+ msgid "FFMpeg v1"
+ msgstr "FFMpeg v1"
+
++#: gst-libs/gst/pbutils/descriptions.c:217
+ msgid "Lossless MSZH"
+ msgstr "Lossless MSZH"
+
++#: gst-libs/gst/pbutils/descriptions.c:228
+ msgid "Run-length encoding"
+ msgstr "RLE-сжатие"
+
++#: gst-libs/gst/pbutils/descriptions.c:274
+ msgid "Timed Text"
+ msgstr "Timed Text"
+
++#: gst-libs/gst/pbutils/descriptions.c:278
+ msgid "Subtitle"
+ msgstr "Субтитры"
+
++#: gst-libs/gst/pbutils/descriptions.c:279
+ msgid "MPL2 subtitle format"
+ msgstr "Формат субтитров MPL2"
+
++#: gst-libs/gst/pbutils/descriptions.c:280
+ msgid "DKS subtitle format"
+ msgstr "Формат субтитров DKS"
+
++#: gst-libs/gst/pbutils/descriptions.c:281
+ msgid "QTtext subtitle format"
+ msgstr "Формат субтитров QTtext"
+
++#: gst-libs/gst/pbutils/descriptions.c:282
+ msgid "Sami subtitle format"
+ msgstr "Формат субтитров Sami"
+
++#: gst-libs/gst/pbutils/descriptions.c:283
+ msgid "TMPlayer subtitle format"
+ msgstr "Формат субтитров TMPlayer"
+
++#: gst-libs/gst/pbutils/descriptions.c:287
+ msgid "CEA 608 Closed Caption"
+ msgstr "Скрытые субтитры CEA 608"
+
++#: gst-libs/gst/pbutils/descriptions.c:288
+ msgid "CEA 708 Closed Caption"
+ msgstr "Скрытые субтитры CEA 708"
+
++#: gst-libs/gst/pbutils/descriptions.c:289
+ msgid "Kate subtitle format"
+ msgstr "Формат субтитров Kate"
+
++#: gst-libs/gst/pbutils/descriptions.c:290
+ msgid "WebVTT subtitle format"
+ msgstr "Формат субтитров WebVTT"
+
++#: gst-libs/gst/pbutils/descriptions.c:455
++#: gst-libs/gst/pbutils/descriptions.c:458
++#: gst-libs/gst/pbutils/descriptions.c:508
+ msgid "Uncompressed video"
+ msgstr "Несжатое видео"
+
++#: gst-libs/gst/pbutils/descriptions.c:463
+ msgid "Uncompressed gray"
+ msgstr "Несжатый серый"
+
++#: gst-libs/gst/pbutils/descriptions.c:486
+ #, c-format
+ msgid "Uncompressed packed YUV %s"
+ msgstr "Несжатый смешанный YUV %s"
+
++#: gst-libs/gst/pbutils/descriptions.c:488
+ #, c-format
+ msgid "Uncompressed semi-planar YUV %s"
+ msgstr "Несжатый полу-планарный YUV %s"
+
++#: gst-libs/gst/pbutils/descriptions.c:490
+ #, c-format
+ msgid "Uncompressed planar YUV %s"
+ msgstr "Несжатый планарный YUV %s"
+
++#: gst-libs/gst/pbutils/descriptions.c:501
+ #, c-format
+ msgid "Uncompressed palettized %d-bit %s"
+ msgstr "Несжатое %d-битное %s с палитрой"
+
++#: gst-libs/gst/pbutils/descriptions.c:504
+ #, c-format
+ msgid "Uncompressed %d-bit %s"
+ msgstr "Несжатое %d-битное %s"
+
++#: gst-libs/gst/pbutils/descriptions.c:586
+ #, c-format
+ msgid "DivX MPEG-4 Version %d"
+ msgstr "DivX MPEG-4 Версия %d"
+
++#: gst-libs/gst/pbutils/descriptions.c:834
+ msgid "Uncompressed audio"
+ msgstr "Несжатое аудио"
+
++#: gst-libs/gst/pbutils/descriptions.c:840
+ #, c-format
+ msgid "Raw %d-bit %s audio"
+ msgstr "Необработанное %d-битное аудио %s"
+
++#: gst-libs/gst/pbutils/descriptions.c:942
+ msgid "Audio CD source"
+ msgstr "Источник аудио-CD"
+
++#: gst-libs/gst/pbutils/descriptions.c:945
+ msgid "DVD source"
+ msgstr "Источник DVD"
+
++#: gst-libs/gst/pbutils/descriptions.c:948
+ msgid "Real Time Streaming Protocol (RTSP) source"
+ msgstr "Источник протокола Real Time Streaming Protocol (RTSP)"
+
++#: gst-libs/gst/pbutils/descriptions.c:952
+ msgid "Microsoft Media Server (MMS) protocol source"
+ msgstr "Источник протокола Microsoft Media Server (MMS)"
+
++#: gst-libs/gst/pbutils/descriptions.c:960
+ #, c-format
+ msgid "%s protocol source"
+ msgstr "Источник протокола %s"
+
++#: gst-libs/gst/pbutils/descriptions.c:1034
+ #, c-format
+ msgid "%s video RTP depayloader"
+ msgstr "RTP-разгрузчик %s-видео"
+
++#: gst-libs/gst/pbutils/descriptions.c:1036
+ #, c-format
+ msgid "%s audio RTP depayloader"
+ msgstr "RTP-разгрузчик %s-аудио"
+
++#: gst-libs/gst/pbutils/descriptions.c:1038
+ #, c-format
+ msgid "%s RTP depayloader"
+ msgstr "RTP-разгрузчик %s"
+
++#: gst-libs/gst/pbutils/descriptions.c:1045
+ #, c-format
+ msgid "%s demuxer"
+ msgstr "Демуксер %s"
+
++#: gst-libs/gst/pbutils/descriptions.c:1047
+ #, c-format
+ msgid "%s decoder"
+ msgstr "Декодер %s"
+
++#: gst-libs/gst/pbutils/descriptions.c:1086
+ #, c-format
+ msgid "%s video RTP payloader"
+ msgstr "RTP-нагрузчик %s-видео"
+
++#: gst-libs/gst/pbutils/descriptions.c:1088
+ #, c-format
+ msgid "%s audio RTP payloader"
+ msgstr "RTP-нагрузчик %s-аудио"
+
++#: gst-libs/gst/pbutils/descriptions.c:1090
+ #, c-format
+ msgid "%s RTP payloader"
+ msgstr "RTP-нагрузчик %s"
+
++#: gst-libs/gst/pbutils/descriptions.c:1097
+ #, c-format
+ msgid "%s muxer"
+ msgstr "Муксер %s"
+
++#: gst-libs/gst/pbutils/descriptions.c:1099
+ #, c-format
+ msgid "%s encoder"
+ msgstr "Кодировщик %s"
+
++#: gst-libs/gst/pbutils/descriptions.c:1133
+ #, c-format
+ msgid "GStreamer element %s"
+ msgstr "Элемент GStreamer %s"
+
++#: gst-libs/gst/pbutils/missing-plugins.c:558
+ msgid "Unknown source element"
+ msgstr "Неизвестный элемент-источник"
+
++#: gst-libs/gst/pbutils/missing-plugins.c:561
+ msgid "Unknown sink element"
+ msgstr "Неизвестный элемент-приёмник"
+
++#: gst-libs/gst/pbutils/missing-plugins.c:564
+ msgid "Unknown element"
+ msgstr "Неизвестный элемент"
+
++#: gst-libs/gst/pbutils/missing-plugins.c:567
+ msgid "Unknown decoder element"
+ msgstr "Неизвестный элемент-декодер"
+
++#: gst-libs/gst/pbutils/missing-plugins.c:570
+ msgid "Unknown encoder element"
+ msgstr "Неизвестный элемент-кодировщик"
+
++#: gst-libs/gst/pbutils/missing-plugins.c:575
+ msgid "Plugin or element of unknown type"
+ msgstr "Модуль или элемент неизвестного типа"
+
++#: gst-libs/gst/tag/gsttagdemux.c:1243
+ msgid "Failed to read tag: not enough data"
+ msgstr "Не удалось прочитать тег: недостаточно данных"
+
++#: gst-libs/gst/tag/tags.c:77
+ msgid "track ID"
+ msgstr "ID дорожки"
+
++#: gst-libs/gst/tag/tags.c:77
+ msgid "MusicBrainz track ID"
+ msgstr "MusicBrainz ID дорожки"
+
++#: gst-libs/gst/tag/tags.c:79
+ msgid "artist ID"
+ msgstr "ID исполнителя"
+
++#: gst-libs/gst/tag/tags.c:79
+ msgid "MusicBrainz artist ID"
+ msgstr "MusicBrainz ID исполнителя"
+
++#: gst-libs/gst/tag/tags.c:81
+ msgid "album ID"
+ msgstr "ID альбома"
+
++#: gst-libs/gst/tag/tags.c:81
+ msgid "MusicBrainz album ID"
+ msgstr "MusicBrainz ID альбома"
+
++#: gst-libs/gst/tag/tags.c:84
+ msgid "album artist ID"
+ msgstr "ID исполнителя альбома"
+
++#: gst-libs/gst/tag/tags.c:84
+ msgid "MusicBrainz album artist ID"
+ msgstr "MusicBrainz ID исполнителя альбома"
+
++#: gst-libs/gst/tag/tags.c:86
+ msgid "track TRM ID"
+ msgstr "ID TRM дорожки"
+
++#: gst-libs/gst/tag/tags.c:86
+ msgid "MusicBrainz TRM ID"
+ msgstr "MusicBrainz TRM ID"
+
++#: gst-libs/gst/tag/tags.c:108
+ msgid "capturing shutter speed"
+ msgstr "выдержка при съёмке"
+
++#: gst-libs/gst/tag/tags.c:109
+ msgid "Shutter speed used when capturing an image, in seconds"
+ msgstr "Выдержка при съёмке изображения, в секундах"
+
++#: gst-libs/gst/tag/tags.c:112
+ msgid "capturing focal ratio"
+ msgstr "диафрагменное число при съёмке"
+
++#: gst-libs/gst/tag/tags.c:113
+ msgid "Focal ratio (f-number) used when capturing the image"
+ msgstr "Диафрагменное число (f) при съёмке изображения"
+
++#: gst-libs/gst/tag/tags.c:116
+ msgid "capturing focal length"
+ msgstr "фокусное расстояние при съёмке"
+
++#: gst-libs/gst/tag/tags.c:117
+ msgid "Focal length of the lens used capturing the image, in mm"
+ msgstr "Фокусное расстояние зеркала при съёмке изображения, в мм"
+
++#: gst-libs/gst/tag/tags.c:121
+ msgid "capturing 35 mm equivalent focal length"
+ msgstr "съёмка при 35 мм эквивалентном фокусном расстоянии"
+
-msgid ""
-"The metering mode used while determining exposure for capturing an image"
-msgstr ""
-"Режим замера, использованный для определения экспозиции при съёмке "
-"изображения"
++#: gst-libs/gst/tag/tags.c:122
++msgid "35 mm equivalent focal length of the lens used capturing the image, in mm"
++msgstr "35 мм эквивалент фокусного расстояния зеркала при съёмке изображения, в мм"
+
++#: gst-libs/gst/tag/tags.c:126
+ msgid "capturing digital zoom ratio"
+ msgstr "коэффициент цифрового трансфокатора при съёмке"
+
++#: gst-libs/gst/tag/tags.c:127
+ msgid "Digital zoom ratio used when capturing an image"
+ msgstr "Коэффициент цифрового трансфокатора при съёмке изображения"
+
++#: gst-libs/gst/tag/tags.c:130
+ msgid "capturing iso speed"
+ msgstr "чувствительность ISO при съёмке"
+
++#: gst-libs/gst/tag/tags.c:131
+ msgid "The ISO speed used when capturing an image"
+ msgstr "Чувствительность ISO при съёмке изображения"
+
++#: gst-libs/gst/tag/tags.c:134
+ msgid "capturing exposure program"
+ msgstr "программа экспозиции при съёмке"
+
++#: gst-libs/gst/tag/tags.c:135
+ msgid "The exposure program used when capturing an image"
+ msgstr "Программа экспозиции при съёмке изображения"
+
++#: gst-libs/gst/tag/tags.c:138
+ msgid "capturing exposure mode"
+ msgstr "режим экспозиции при съёмке"
+
++#: gst-libs/gst/tag/tags.c:139
+ msgid "The exposure mode used when capturing an image"
+ msgstr "Режим экспозиции при съёмке изображения"
+
++#: gst-libs/gst/tag/tags.c:142
+ msgid "capturing exposure compensation"
+ msgstr "компенсация экспозиции при съёмке"
+
++#: gst-libs/gst/tag/tags.c:143
+ msgid "The exposure compensation used when capturing an image"
+ msgstr "Компенсация экспозиции при съёмке изображения"
+
++#: gst-libs/gst/tag/tags.c:146
+ msgid "capturing scene capture type"
+ msgstr "тип сцены при съёмке"
+
++#: gst-libs/gst/tag/tags.c:147
+ msgid "The scene capture mode used when capturing an image"
+ msgstr "Тип сцены при съёмке изображения"
+
++#: gst-libs/gst/tag/tags.c:150
+ msgid "capturing gain adjustment"
+ msgstr "регулировка усиления при съёмке"
+
++#: gst-libs/gst/tag/tags.c:151
+ msgid "The overall gain adjustment applied on an image"
+ msgstr "Итоговая настройка усиления, применяемая к изображению"
+
++#: gst-libs/gst/tag/tags.c:154
+ msgid "capturing white balance"
+ msgstr "баланс белого при съёмке"
+
++#: gst-libs/gst/tag/tags.c:155
+ msgid "The white balance mode set when capturing an image"
+ msgstr "Режим баланса белого при съёмке изображения"
+
++#: gst-libs/gst/tag/tags.c:158
+ msgid "capturing contrast"
+ msgstr "контрастность при съёмке"
+
++#: gst-libs/gst/tag/tags.c:159
+ msgid "The direction of contrast processing applied when capturing an image"
+ msgstr "Направление обработки контраста при съёмке изображения"
+
++#: gst-libs/gst/tag/tags.c:163
+ msgid "capturing saturation"
+ msgstr "насыщенность при съёмке"
+
++#: gst-libs/gst/tag/tags.c:164
+ msgid "The direction of saturation processing applied when capturing an image"
+ msgstr "Направление обработки насыщенности при съёмке изображения"
+
++#: gst-libs/gst/tag/tags.c:168
+ msgid "capturing sharpness"
+ msgstr "резкость при съёмке"
+
++#: gst-libs/gst/tag/tags.c:169
+ msgid "The direction of sharpness processing applied when capturing an image"
+ msgstr "Направление обработки резкости при съёмке изображения"
+
++#: gst-libs/gst/tag/tags.c:173
+ msgid "capturing flash fired"
+ msgstr "вспышка при съёмке"
+
++#: gst-libs/gst/tag/tags.c:174
+ msgid "If the flash fired while capturing an image"
+ msgstr "Сработала ли вспышка при съёмке изображения"
+
++#: gst-libs/gst/tag/tags.c:177
+ msgid "capturing flash mode"
+ msgstr "режим вспышки при съёмке"
+
++#: gst-libs/gst/tag/tags.c:178
+ msgid "The selected flash mode while capturing an image"
+ msgstr "Выбранный режим для вспышки при съёмке изображения"
+
++#: gst-libs/gst/tag/tags.c:181
+ msgid "capturing metering mode"
+ msgstr "режим замера при съёмке"
+
-msgid ""
-"Don't exit after showing the initial device list, but wait for devices to "
-"added/removed."
-msgstr ""
-"Не завершать работу после показа начального списка устройств, а ждать их "
-"добавления/удаления."
++#: gst-libs/gst/tag/tags.c:182
++msgid "The metering mode used while determining exposure for capturing an image"
++msgstr "Режим замера, использованный для определения экспозиции при съёмке изображения"
+
++#: gst-libs/gst/tag/tags.c:186
+ msgid "capturing source"
+ msgstr "источник для съёмки"
+
++#: gst-libs/gst/tag/tags.c:187
+ msgid "The source or type of device used for the capture"
+ msgstr "Источник или тип устройства, использованный для съёмки"
+
++#: gst-libs/gst/tag/tags.c:190
+ msgid "image horizontal ppi"
+ msgstr "изображение по горизонтали в ppi"
+
++#: gst-libs/gst/tag/tags.c:191
+ msgid "Media (image/video) intended horizontal pixel density in ppi"
+ msgstr "Горизонтальная плотность носителя (изображение/видео) в ppi"
+
++#: gst-libs/gst/tag/tags.c:194
+ msgid "image vertical ppi"
+ msgstr "изображение по вертикали в ppi"
+
++#: gst-libs/gst/tag/tags.c:195
+ msgid "Media (image/video) intended vertical pixel density in ppi"
+ msgstr "Вертикальная плотность носителя (изображение/видео) в ppi"
+
++#: gst-libs/gst/tag/tags.c:198
+ msgid "ID3v2 frame"
+ msgstr "кадр ID3v2"
+
++#: gst-libs/gst/tag/tags.c:198
+ msgid "unparsed id3v2 tag frame"
+ msgstr "неразбираемый тег кадра id3v2"
+
++#: gst-libs/gst/tag/tags.c:202
+ msgid "musical-key"
+ msgstr "музыкальный ключ"
+
++#: gst-libs/gst/tag/tags.c:202
+ msgid "Initial key in which the sound starts"
+ msgstr "Начальный ключ, с которого начинается звук"
+
++#: tools/gst-device-monitor.c:260 tools/gst-play.c:1444
+ msgid "Print version information and exit"
+ msgstr "Вывод информации о версии и выход из программы"
+
-msgstr ""
-"Управлять настройкой поведения воспроизведения playbin через свойство «flags»"
++#: tools/gst-device-monitor.c:262
++msgid "Don't exit after showing the initial device list, but wait for devices to added/removed."
++msgstr "Не завершать работу после показа начального списка устройств, а ждать их добавления/удаления."
+
++#: tools/gst-play.c:310
+ #, c-format
+ msgid "Volume: %.0f%%"
+ msgstr "Громкость: %.0f%%"
+
++#: tools/gst-play.c:349
+ msgid "Buffering..."
+ msgstr "Буферизация…"
+
++#: tools/gst-play.c:370
+ msgid "Clock lost, selecting a new one\n"
+ msgstr "Часы потеряны, выбираем новые\n"
+
++#: tools/gst-play.c:400 tools/gst-play.c:446 tools/gst-play.c:883
++#: tools/gst-play.c:1347
+ msgid "Reached end of play list."
+ msgstr "Достигнут конец списка воспроизведения."
+
++#: tools/gst-play.c:613
+ msgid "Paused"
+ msgstr "Пауза"
+
++#: tools/gst-play.c:671
+ #, c-format
+ msgid "Now playing %s\n"
+ msgstr "Сейчас проигрывается %s\n"
+
++#: tools/gst-play.c:734
+ #, c-format
+ msgid "About to finish, preparing next title: %s"
+ msgstr "Почти закончено, подготавливается следующая часть: %s"
+
++#: tools/gst-play.c:979
+ #, c-format
+ msgid "Playback rate: %.2f"
+ msgstr "Скорость воспроизведения: %.2f"
+
++#: tools/gst-play.c:983
+ #, c-format
+ msgid "Could not change playback rate to %.2f"
+ msgstr "Не удалось изменить скорость воспроизведения на %.2f"
+
++#: tools/gst-play.c:1287
+ msgid "space"
+ msgstr "пробел"
+
++#: tools/gst-play.c:1287
+ msgid "pause/unpause"
+ msgstr "пауза/продолжение"
+
++#: tools/gst-play.c:1288
+ msgid "q or ESC"
+ msgstr "q или ESC"
+
++#: tools/gst-play.c:1288
+ msgid "quit"
+ msgstr "выход"
+
++#: tools/gst-play.c:1289
+ msgid "> or n"
+ msgstr "> или n"
+
++#: tools/gst-play.c:1289
+ msgid "play next"
+ msgstr "воспроизвести следующее"
+
++#: tools/gst-play.c:1290
+ msgid "< or b"
+ msgstr "< или b"
+
++#: tools/gst-play.c:1290
+ msgid "play previous"
+ msgstr "воспроизвести предыдущее"
+
++#: tools/gst-play.c:1291
+ msgid "seek forward"
+ msgstr "перемотать вперед"
+
++#: tools/gst-play.c:1292
+ msgid "seek backward"
+ msgstr "перемотать назад"
+
++#: tools/gst-play.c:1293
+ msgid "volume up"
+ msgstr "увеличить громкость"
+
++#: tools/gst-play.c:1294
+ msgid "volume down"
+ msgstr "уменьшить громкость"
+
++#: tools/gst-play.c:1295
+ msgid "increase playback rate"
+ msgstr "увеличить скорость воспроизведения"
+
++#: tools/gst-play.c:1296
+ msgid "decrease playback rate"
+ msgstr "уменьшить скорость воспроизведения"
+
++#: tools/gst-play.c:1297
+ msgid "change playback direction"
+ msgstr "изменить направление воспроизведения"
+
++#: tools/gst-play.c:1298
+ msgid "enable/disable trick modes"
+ msgstr "включить/выключить режим рекомендаций"
+
++#: tools/gst-play.c:1299
+ msgid "change audio track"
+ msgstr "изменить звуковую дорожку"
+
++#: tools/gst-play.c:1300
+ msgid "change video track"
+ msgstr "изменить видео дорожку"
+
++#: tools/gst-play.c:1301
+ msgid "change subtitle track"
+ msgstr "изменить дорожку субтитров"
+
++#: tools/gst-play.c:1302
+ msgid "seek to beginning"
+ msgstr "перемотать в начало"
+
++#: tools/gst-play.c:1303
+ msgid "show keyboard shortcuts"
+ msgstr "показать клавиатурные сокращения"
+
++#: tools/gst-play.c:1306
+ msgid "Interactive mode - keyboard controls:"
+ msgstr "Интерактивный режим — управление с клавиатуры:"
+
++#: tools/gst-play.c:1439
+ msgid "Output status information and property notifications"
+ msgstr "Выводить информацию о состоянии и уведомления о свойствах"
+
++#: tools/gst-play.c:1441
+ msgid "Control playback behaviour setting playbin 'flags' property"
-#~ msgstr ""
-#~ "Для воспроизведения этого потока требуется модуль %s, но он не установлен."
++msgstr "Управлять настройкой поведения воспроизведения playbin через свойство «flags»"
+
++#: tools/gst-play.c:1446
+ msgid "Video sink to use (default is autovideosink)"
+ msgstr "Используемый приёмник видео (по умолчанию autovideosink)"
+
++#: tools/gst-play.c:1448
+ msgid "Audio sink to use (default is autoaudiosink)"
+ msgstr "Используемый приёмник аудио (по умолчанию autoaudiosink)"
+
++#: tools/gst-play.c:1450
+ msgid "Enable gapless playback"
+ msgstr "Включить непрерывное воспроизведение"
+
++#: tools/gst-play.c:1452
+ msgid "Shuffle playlist"
+ msgstr "Перемешать список воспроизведения"
+
++#: tools/gst-play.c:1455
+ msgid "Disable interactive control via the keyboard"
+ msgstr "Отключить интерактивное управление с клавиатуры"
+
++#: tools/gst-play.c:1457
+ msgid "Volume"
+ msgstr "Громкость"
+
++#: tools/gst-play.c:1459
+ msgid "Playlist file containing input media files"
+ msgstr "Файл списка воспроизведения с входными медиа-файлами"
+
++#: tools/gst-play.c:1461
+ msgid "Do not print any output (apart from errors)"
+ msgstr "Не выводить ничего в поток стандартного вывода (кроме ошибок)"
+
++#: tools/gst-play.c:1463
+ msgid "Use playbin3 pipeline"
+ msgstr "Использовать конвейер playbin3"
+
++#: tools/gst-play.c:1464
+ msgid "(default varies depending on 'USE_PLAYBIN' env variable)"
+ msgstr "(значение по умолчанию зависит от переменной окружения «USE_PLAYBIN»)"
+
++#: tools/gst-play.c:1468
++msgid "Keep showing the last frame on EOS until quit or playlist change command (gapless is ignored)"
++msgstr "Показывать последний кадр при EOS до завершения работы или команды смены списка воспроизведения (непрерывность воспроизведения игнорируется)"
++
++#: tools/gst-play.c:1546
+ #, c-format
+ msgid "Usage: %s FILE1|URI1 [FILE2|URI2] [FILE3|URI3] ..."
+ msgstr "Использование: %s ФАЙЛ1|URI1 [ФАЙЛ2|URI2] [ФАЙЛ3|URI3] …"
+
++#: tools/gst-play.c:1550
+ msgid "You must provide at least one filename or URI to play."
+ msgstr "Вы должны указать не менее одного имени файла или URI."
+
++#: tools/gst-play.c:1590
+ msgid "Press 'k' to see a list of keyboard shortcuts.\n"
+ msgstr "Нажмите «k» для показа списка клавиатурных комбинаций.\n"
+
+ #~ msgid "Could not create \"decodebin3\" element."
+ #~ msgstr "Не удалось создать элемент «decodebin3»."
+
+ #~ msgid "Could not create \"urisourcebin\" element."
+ #~ msgstr "Не удалось создать элемент «urisourcebin»."
+
+ #~ msgid "Internal data stream error."
+ #~ msgstr "Внутренняя ошибка потока данных."
+
+ #~ msgid "A %s plugin is required to play this stream, but not installed."
-#~ msgid ""
-#~ "Only a subtitle stream was detected. Either you are loading a subtitle "
-#~ "file or some other type of text file, or the media file was not "
-#~ "recognized."
-#~ msgstr ""
-#~ "Обнаружен только поток субтитров. Либо вы загружаете файл субтитров или "
-#~ "иной текстовый файл, либо медиа-файл не распознан."
-
-#~ msgid ""
-#~ "You do not have a decoder installed to handle this file. You might need "
-#~ "to install the necessary plugins."
-#~ msgstr ""
-#~ "Не установлен требуемый декодер для обработки этого файла. Возможно, вам "
-#~ "следует поставить необходимые модули."
++#~ msgstr "Для воспроизведения этого потока требуется модуль %s, но он не установлен."
+
+ #~ msgid "Uncompressed %s YUV %s"
+ #~ msgstr "Несжатый %s YUV %s"
+
+ #~ msgid "Master"
+ #~ msgstr "Общий"
+
+ #~ msgid "Bass"
+ #~ msgstr "Низкие"
+
+ #~ msgid "Treble"
+ #~ msgstr "Высокие"
+
+ #~ msgid "PCM"
+ #~ msgstr "PCM"
+
+ #~ msgid "Synth"
+ #~ msgstr "Синтезатор"
+
+ #~ msgid "Line-in"
+ #~ msgstr "Линейный вход"
+
+ #~ msgid "CD"
+ #~ msgstr "CD"
+
+ #~ msgid "Microphone"
+ #~ msgstr "Микрофон"
+
+ #~ msgid "PC Speaker"
+ #~ msgstr "Внутренний динамик"
+
+ #~ msgid "Playback"
+ #~ msgstr "Воспроизведение"
+
+ #~ msgid "Capture"
+ #~ msgstr "Съёмка"
+
+ #~ msgid "Could not open vfs file \"%s\" for writing: %s."
+ #~ msgstr "Не удалось открыть vfs-файл «%s» для записи: %s."
+
+ #~ msgid "No filename given"
+ #~ msgstr "Не указано имя файла"
+
+ #~ msgid "Could not close vfs file \"%s\"."
+ #~ msgstr "Не удалось закрыть vfs-файл «%s»."
+
+ #~ msgid "Error while writing to file \"%s\"."
+ #~ msgstr "Ошибка записи в файл «%s»."
+
+ #~ msgid "Invalid subtitle URI \"%s\", subtitles disabled."
+ #~ msgstr "Неверный URI субтитров «%s», субтитры выключены."
+
+ #~ msgid "RTSP streams cannot be played yet."
+ #~ msgstr "Воспроизведение RTSP-потоков в данный момент невозможно."
+
++#~ msgid "Only a subtitle stream was detected. Either you are loading a subtitle file or some other type of text file, or the media file was not recognized."
++#~ msgstr "Обнаружен только поток субтитров. Либо вы загружаете файл субтитров или иной текстовый файл, либо медиа-файл не распознан."
++
++#~ msgid "You do not have a decoder installed to handle this file. You might need to install the necessary plugins."
++#~ msgstr "Не установлен требуемый декодер для обработки этого файла. Возможно, вам следует поставить необходимые модули."
+
+ #~ msgid "This is not a media file"
+ #~ msgstr "Файл не является медиа-файлом"
+
+ #~ msgid "A subtitle stream was detected, but no video stream."
+ #~ msgstr "Обнаружен только поток субтитров, поток видео не обнаружен."
+
+ #~ msgid "Both autovideosink and xvimagesink elements are missing."
+ #~ msgstr "Отсутствуют элементы autovideosink и xvimagesink."
+
+ #~ msgid "Both autoaudiosink and alsasink elements are missing."
+ #~ msgstr "Отсутствуют элементы autoaudiosink и alsasink."
+
+ #~ msgid "Error while sending gdp header data to \"%s:%d\"."
+ #~ msgstr "Ошибка отправки данных заголовка gdp в «%s:%d»."
+
+ #~ msgid "Error while sending gdp payload data to \"%s:%d\"."
+ #~ msgstr "Ошибка отправки данных gdp в «%s:%d»."
+
+ #~ msgid "Connection to %s:%d refused."
+ #~ msgstr "В соединении с %s:%d отказано."
+
+ #~ msgid "Uncompressed planar YVU 4:2:0"
+ #~ msgstr "Несжатый планарный YVU 4:2:0"
+
+ #~ msgid "Uncompressed packed YUV 4:1:0"
+ #~ msgstr "Несжатый смешанный YUV 4:1:0"
+
+ #~ msgid "Uncompressed packed YVU 4:1:0"
+ #~ msgstr "Несжатый смешанный YVU 4:1:0"
+
+ #~ msgid "Uncompressed packed YUV 4:1:1"
+ #~ msgstr "Несжатый смешанный YUV 4:1:1"
+
+ #~ msgid "Uncompressed packed YUV 4:4:4"
+ #~ msgstr "Несжатый смешанный YUV 4:4:4"
+
+ #~ msgid "Uncompressed planar YUV 4:2:2"
+ #~ msgstr "Несжатый планарный YUV 4:2:2"
+
+ #~ msgid "Uncompressed planar YUV 4:1:1"
+ #~ msgstr "Несжатый планарный YUV 4:1:1"
+
+ #~ msgid "Uncompressed black and white Y-plane"
+ #~ msgstr "Несжатая чёрно-белая Y-плоскость"
+
+ #~ msgid "Raw PCM audio"
+ #~ msgstr "Несжатое PCM-аудио"
+
+ #~ msgid "Raw %d-bit floating-point audio"
+ #~ msgstr "Несжатое %d-битное аудио с плавающей точкой"
+
+ #~ msgid "Raw floating-point audio"
+ #~ msgstr "Несжатое аудио с плавающей точкой"
+
+ #~ msgid "No device specified."
+ #~ msgstr "Не указано устройство."
+
+ #~ msgid "Device \"%s\" does not exist."
+ #~ msgstr "Устройство «%s» не существует."
+
+ #~ msgid "Device \"%s\" is already being used."
+ #~ msgstr "Устройство «%s» уже используется."
+
+ #~ msgid "Could not open device \"%s\" for reading and writing."
+ #~ msgstr "Не удалось открыть устройство «%s» для чтения и записи."
+
+ #~ msgid "No file name specified."
+ #~ msgstr "Не указано имя файла."
+
+ #~ msgid "Could not open file \"%s\" for reading."
+ #~ msgstr "Не удалось открыть файл «%s» для чтения."
+
+ #~ msgid "Could not create \"queue2\" element."
+ #~ msgstr "Не удалось создать элемент «queue2»."
--- /dev/null
-"Project-Id-Version: gst-plugins-base 1.15.1\n"
+ # Swedish messages for gst-plugins-base.
+ # Copyright © 2004, 2005, 2007, 2008, 2009, 2014, 2015, 2016, 2017, 2019 Free Software Foundation, Inc.
+ # This file is distributed under the same license as the gst-plugins-base package.
+ # Christian Rose <menthos@menthos.com>, 2004, 2005.
+ # Daniel Nylander <po@danielnylander.se>, 2007, 2008, 2009.
+ # Sebastian Rasmussen <sebras@gmail.com>, 2014, 2015, 2016, 2017, 2019.
+ #
+ msgid ""
+ msgstr ""
-"POT-Creation-Date: 2019-02-26 11:43+0000\n"
-"PO-Revision-Date: 2019-02-05 18:23+0100\n"
++"Project-Id-Version: gst-plugins-base 1.16.0\n"
+ "Report-Msgid-Bugs-To: http://bugzilla.gnome.org/\n"
-"X-Generator: Poedit 2.2.1\n"
++"POT-Creation-Date: 2019-04-19 00:20+0100\n"
++"PO-Revision-Date: 2019-09-03 18:26+0200\n"
+ "Last-Translator: Sebastian Rasmussen <sebras@gmail.com>\n"
+ "Language-Team: Swedish <tp-sv@listor.tp-sv.se>\n"
+ "Language: sv\n"
+ "MIME-Version: 1.0\n"
+ "Content-Type: text/plain; charset=UTF-8\n"
+ "Content-Transfer-Encoding: 8bit\n"
+ "X-Bugs: Report translation errors to the Language-Team address.\n"
-msgid ""
-"Could not open audio device for playback. Device is being used by another "
-"application."
-msgstr ""
-"Kunde inte öppna ljudenheten för uppspelning. Enheten används av ett annat "
-"program."
++"X-Generator: Poedit 2.2.3\n"
+
++#: ext/alsa/gstalsasink.c:579
+ msgid "Could not open device for playback in mono mode."
+ msgstr "Kunde inte öppna enheten för uppspelning i monoläge."
+
++#: ext/alsa/gstalsasink.c:581
+ msgid "Could not open device for playback in stereo mode."
+ msgstr "Kunde inte öppna enheten för uppspelning i stereoläge."
+
++#: ext/alsa/gstalsasink.c:585
+ #, c-format
+ msgid "Could not open device for playback in %d-channel mode."
+ msgstr "Kunde inte öppna enheten för uppspelning i %d-kanalsläge."
+
-msgid ""
-"Could not open audio device for recording. Device is being used by another "
-"application."
-msgstr ""
-"Kunde inte öppna ljudenheten för inspelning. Enheten används av ett annat "
-"program."
++#: ext/alsa/gstalsasink.c:858
++msgid "Could not open audio device for playback. Device is being used by another application."
++msgstr "Kunde inte öppna ljudenheten för uppspelning. Enheten används av ett annat program."
+
++#: ext/alsa/gstalsasink.c:863
+ msgid "Could not open audio device for playback."
+ msgstr "Kunde inte öppna ljudenheten för uppspelning."
+
++#: ext/alsa/gstalsasink.c:1076
+ msgid "Error outputting to audio device. The device has been disconnected."
+ msgstr "Fel vid utmatning till ljudenhet. Enheten har kopplats loss."
+
++#: ext/alsa/gstalsasrc.c:444
+ msgid "Could not open device for recording in mono mode."
+ msgstr "Kunde inte öppna enheten för inspelning i monoläge."
+
++#: ext/alsa/gstalsasrc.c:446
+ msgid "Could not open device for recording in stereo mode."
+ msgstr "Kunde inte öppna enheten för inspelning i stereoläge."
+
++#: ext/alsa/gstalsasrc.c:450
+ #, c-format
+ msgid "Could not open device for recording in %d-channel mode"
+ msgstr "Kunde inte öppna enheten för inspelning i %d-kanalsläge"
+
-msgid ""
-"35 mm equivalent focal length of the lens used capturing the image, in mm"
-msgstr ""
-"35 mm-ekvivalent brännvidd för linsen som används när bild infångas, i mm"
++#: ext/alsa/gstalsasrc.c:741
++msgid "Could not open audio device for recording. Device is being used by another application."
++msgstr "Kunde inte öppna ljudenheten för inspelning. Enheten används av ett annat program."
+
++#: ext/alsa/gstalsasrc.c:746
+ msgid "Could not open audio device for recording."
+ msgstr "Kunde inte öppna ljudenheten för inspelning."
+
++#: ext/alsa/gstalsasrc.c:992
+ msgid "Error recording from audio device. The device has been disconnected."
+ msgstr "Fel vid inspelning från ljudenhet. Enheten har kopplats loss."
+
++#: ext/cdparanoia/gstcdparanoiasrc.c:277 ext/cdparanoia/gstcdparanoiasrc.c:283
+ msgid "Could not open CD device for reading."
+ msgstr "Kunde inte öppna cd-enheten för läsning."
+
++#: ext/cdparanoia/gstcdparanoiasrc.c:409
+ msgid "Could not seek CD."
+ msgstr "Kunde inte söka på cd-skivan."
+
++#: ext/cdparanoia/gstcdparanoiasrc.c:417
+ msgid "Could not read CD."
+ msgstr "Kunde inte läsa cd-skivan."
+
++#: ext/gl/gstgltestsrc.c:491
+ msgid "failed to draw pattern"
+ msgstr "misslyckades med att rita mönster"
+
++#: ext/gl/gstgltestsrc.c:492
+ msgid "A GL error occurred"
+ msgstr "Ett GL-fel uppstod"
+
++#: ext/gl/gstgltestsrc.c:498
+ msgid "format wasn't negotiated before get function"
+ msgstr "formatet förhandlades inte fram innan hämtfunktionen"
+
++#: gst/encoding/gstencodebin.c:1636 gst/playback/gstplaybin2.c:3399
++#: gst/playback/gstplaysink.c:1522 gst/playback/gstplaysink.c:1535
++#: gst/playback/gstplaysink.c:1872 gst/playback/gstplaysink.c:1904
++#: gst/playback/gstplaysink.c:2488 gst/playback/gstplaysink.c:2537
++#: gst/playback/gstplaysink.c:2552 gst/playback/gstplaysink.c:2577
++#: gst/playback/gstplaysink.c:2609 gst/playback/gstplaysink.c:2757
++#: gst/playback/gstplaysink.c:2788 gst/playback/gstplaysink.c:3166
++#: gst/playback/gstplaysink.c:3175 gst/playback/gstplaysink.c:3184
++#: gst/playback/gstplaysink.c:3193 gst/playback/gstplaysink.c:3606
++#: gst/playback/gstplaysink.c:4526 gst/playback/gstplaysinkconvertbin.c:97
++#: gst/playback/gstplaysinkconvertbin.c:117
++#: gst/playback/gsturidecodebin.c:1472 gst/playback/gsturisourcebin.c:1597
+ #, c-format
+ msgid "Missing element '%s' - check your GStreamer installation."
+ msgstr "Saknar elementet ”%s” - kontrollera din GStreamer-installation."
+
++#: gst/playback/gstdecodebin2.c:1883 gst/playback/gstparsebin.c:1553
+ msgid "Could not determine type of stream"
+ msgstr "Kunde inte fastställa typ av ström"
+
++#: gst/playback/gstdecodebin2.c:2861 gst/playback/gstparsebin.c:2385
+ msgid "This appears to be a text file"
+ msgstr "Den här ser ut att vara en textfil"
+
++#: gst/playback/gstplaybin2.c:5452
+ msgid "Could not create \"uridecodebin\" element."
+ msgstr "Kunde inte skapa ”uridecodebin”-element."
+
++#: gst/playback/gstplaybin3.c:4558
+ msgid "Could not create \"uridecodebin3\" element."
+ msgstr "Kunde inte skapa ”uridecodebin3”-element."
+
++#: gst/playback/gstplaysink.c:2003
+ #, c-format
+ msgid "Both autovideosink and %s elements are missing."
+ msgstr "Både autovideosink- och %s-elementet saknas."
+
++#: gst/playback/gstplaysink.c:2007
+ msgid "The autovideosink element is missing."
+ msgstr "Elementet autovideosink saknas."
+
++#: gst/playback/gstplaysink.c:2012
+ #, c-format
+ msgid "Configured videosink %s is not working."
+ msgstr "Konfigurerat videosink %s fungerar inte."
+
++#: gst/playback/gstplaysink.c:2016
+ #, c-format
+ msgid "Both autovideosink and %s elements are not working."
+ msgstr "Det finns både autovideosink- och %s-element som inte fungerar."
+
++#: gst/playback/gstplaysink.c:2020
+ msgid "The autovideosink element is not working."
+ msgstr "Elementet autovideosink fungerar inte."
+
++#: gst/playback/gstplaysink.c:2525
+ msgid "Custom text sink element is not usable."
+ msgstr "Anpassat text utgångs-element är inte användbart."
+
++#: gst/playback/gstplaysink.c:2903
+ msgid "No volume control found"
+ msgstr "Ingen volymkontroll hittades"
+
++#: gst/playback/gstplaysink.c:2933
+ #, c-format
+ msgid "Both autoaudiosink and %s elements are missing."
+ msgstr "Både autoaudiosink- och %s-elementet saknas."
+
++#: gst/playback/gstplaysink.c:2937
+ msgid "The autoaudiosink element is missing."
+ msgstr "Elementet autoaudiosink saknas."
+
++#: gst/playback/gstplaysink.c:2942
+ #, c-format
+ msgid "Configured audiosink %s is not working."
+ msgstr "Konfigurerat audiosink %s fungerar inte."
+
++#: gst/playback/gstplaysink.c:2946
+ #, c-format
+ msgid "Both autoaudiosink and %s elements are not working."
+ msgstr "Det finns både autoaudiosink- och %s-element som inte fungerar."
+
++#: gst/playback/gstplaysink.c:2950
+ msgid "The autoaudiosink element is not working."
+ msgstr "Elementet autoaudiosink fungerar inte."
+
++#: gst/playback/gstplaysink.c:3269 gst/playback/gstplaysink.c:3274
+ msgid "Can't play a text file without video or visualizations."
+ msgstr "Kan inte spela upp en textfil utan video eller visualiseringar."
+
++#: gst/playback/gsturidecodebin.c:921
+ #, c-format
+ msgid "No decoder available for type '%s'."
+ msgstr "Ingen avkodare finns tillgänglig för typen ”%s”."
+
++#: gst/playback/gsturidecodebin.c:1378 gst/playback/gsturisourcebin.c:1491
+ msgid "No URI specified to play from."
+ msgstr "Ingen uri angiven att spela upp från."
+
++#: gst/playback/gsturidecodebin.c:1384 gst/playback/gsturisourcebin.c:1497
+ #, c-format
+ msgid "Invalid URI \"%s\"."
+ msgstr "Ogiltig uri ”%s”."
+
++#: gst/playback/gsturidecodebin.c:1391 gst/playback/gsturisourcebin.c:1504
+ msgid "This stream type cannot be played yet."
+ msgstr "Den här strömtypen kan inte spelas upp än."
+
++#: gst/playback/gsturidecodebin.c:1409 gst/playback/gsturisourcebin.c:1522
+ #, c-format
+ msgid "No URI handler implemented for \"%s\"."
+ msgstr "Ingen URI-hanterare implementerad för ”%s”."
+
++#: gst/playback/gsturidecodebin.c:2289 gst/playback/gsturisourcebin.c:2228
+ msgid "Source element is invalid."
+ msgstr "Källelementet är ogiltigt."
+
++#: gst/tcp/gsttcpclientsink.c:216
+ #, c-format
+ msgid "Error while sending data to \"%s:%d\"."
+ msgstr "Fel vid sändning av data till ”%s:%d”."
+
++#: gst-libs/gst/audio/gstaudiobasesrc.c:842
+ msgid "Can't record audio fast enough"
+ msgstr "Kan inte spela in ljud tillräckligt snabbt"
+
++#: gst-libs/gst/audio/gstaudiocdsrc.c:1625
+ msgid "This CD has no audio tracks"
+ msgstr "Den här cd-skivan saknar ljudspår"
+
++#: gst-libs/gst/pbutils/descriptions.c:89
+ msgid "ID3 tag"
+ msgstr "ID3-tagg"
+
++#: gst-libs/gst/pbutils/descriptions.c:90
++#: gst-libs/gst/pbutils/descriptions.c:91
+ msgid "APE tag"
+ msgstr "APE-tagg"
+
++#: gst-libs/gst/pbutils/descriptions.c:92
+ msgid "ICY internet radio"
+ msgstr "ICY internetradio"
+
++#: gst-libs/gst/pbutils/descriptions.c:127
+ msgid "Apple Lossless Audio (ALAC)"
+ msgstr "Apple Lossless Audio (ALAC)"
+
++#: gst-libs/gst/pbutils/descriptions.c:143
+ msgid "Free Lossless Audio Codec (FLAC)"
+ msgstr "Free Lossless Audio Codec (FLAC)"
+
++#: gst-libs/gst/pbutils/descriptions.c:173
++#: gst-libs/gst/pbutils/descriptions.c:174
+ msgid "Lossless True Audio (TTA)"
+ msgstr "Lossless True Audio (TTA)"
+
++#: gst-libs/gst/pbutils/descriptions.c:182
+ msgid "Windows Media Speech"
+ msgstr "Windows Media Speech"
+
++#: gst-libs/gst/pbutils/descriptions.c:199
+ msgid "CYUV Lossless"
+ msgstr "Förlustfri CYUV"
+
++#: gst-libs/gst/pbutils/descriptions.c:203
+ msgid "FFMpeg v1"
+ msgstr "FFMpeg v1"
+
++#: gst-libs/gst/pbutils/descriptions.c:217
+ msgid "Lossless MSZH"
+ msgstr "Förlustfri MSZH"
+
++#: gst-libs/gst/pbutils/descriptions.c:228
+ msgid "Run-length encoding"
+ msgstr "Skurlängdskodning"
+
++#: gst-libs/gst/pbutils/descriptions.c:274
+ msgid "Timed Text"
+ msgstr "Textning"
+
++#: gst-libs/gst/pbutils/descriptions.c:278
+ msgid "Subtitle"
+ msgstr "Undertext"
+
++#: gst-libs/gst/pbutils/descriptions.c:279
+ msgid "MPL2 subtitle format"
+ msgstr "Undertextformatet MPL2"
+
++#: gst-libs/gst/pbutils/descriptions.c:280
+ msgid "DKS subtitle format"
+ msgstr "Undertextformatet DKS"
+
++#: gst-libs/gst/pbutils/descriptions.c:281
+ msgid "QTtext subtitle format"
+ msgstr "Undertextformatet QTtext"
+
++#: gst-libs/gst/pbutils/descriptions.c:282
+ msgid "Sami subtitle format"
+ msgstr "Undertextformatet Sami"
+
++#: gst-libs/gst/pbutils/descriptions.c:283
+ msgid "TMPlayer subtitle format"
+ msgstr "Undertextformatet TMPlayer"
+
++#: gst-libs/gst/pbutils/descriptions.c:287
+ msgid "CEA 608 Closed Caption"
+ msgstr "CEA 608-undertexter"
+
++#: gst-libs/gst/pbutils/descriptions.c:288
+ msgid "CEA 708 Closed Caption"
+ msgstr "CEA 708-undertexter"
+
++#: gst-libs/gst/pbutils/descriptions.c:289
+ msgid "Kate subtitle format"
+ msgstr "Undertextformatet Kate"
+
++#: gst-libs/gst/pbutils/descriptions.c:290
+ msgid "WebVTT subtitle format"
+ msgstr "WebVTT-undertextformat"
+
++#: gst-libs/gst/pbutils/descriptions.c:455
++#: gst-libs/gst/pbutils/descriptions.c:458
++#: gst-libs/gst/pbutils/descriptions.c:508
+ msgid "Uncompressed video"
+ msgstr "Okomprimerad video"
+
++#: gst-libs/gst/pbutils/descriptions.c:463
+ msgid "Uncompressed gray"
+ msgstr "Okomprimerad gråskala"
+
++#: gst-libs/gst/pbutils/descriptions.c:486
+ #, c-format
+ msgid "Uncompressed packed YUV %s"
+ msgstr "Okomprimerad paketerad YUV %s"
+
++#: gst-libs/gst/pbutils/descriptions.c:488
+ #, c-format
+ msgid "Uncompressed semi-planar YUV %s"
+ msgstr "Okomprimerad semi-planär YUV %s"
+
++#: gst-libs/gst/pbutils/descriptions.c:490
+ #, c-format
+ msgid "Uncompressed planar YUV %s"
+ msgstr "Okomprimerad planär YUV %s"
+
++#: gst-libs/gst/pbutils/descriptions.c:501
+ #, c-format
+ msgid "Uncompressed palettized %d-bit %s"
+ msgstr "Okomprimerad palettiserad %d-bitars %s"
+
++#: gst-libs/gst/pbutils/descriptions.c:504
+ #, c-format
+ msgid "Uncompressed %d-bit %s"
+ msgstr "Okomprimerad %d-bitars %s"
+
++#: gst-libs/gst/pbutils/descriptions.c:586
+ #, c-format
+ msgid "DivX MPEG-4 Version %d"
+ msgstr "DivX MPEG-4 version %d"
+
++#: gst-libs/gst/pbutils/descriptions.c:834
+ msgid "Uncompressed audio"
+ msgstr "Okomprimerad audio"
+
++#: gst-libs/gst/pbutils/descriptions.c:840
+ #, c-format
+ msgid "Raw %d-bit %s audio"
+ msgstr "Rått %d-bitars %s-ljud"
+
++#: gst-libs/gst/pbutils/descriptions.c:942
+ msgid "Audio CD source"
+ msgstr "Ljud-cd-källa"
+
++#: gst-libs/gst/pbutils/descriptions.c:945
+ msgid "DVD source"
+ msgstr "Dvd-källa"
+
++#: gst-libs/gst/pbutils/descriptions.c:948
+ msgid "Real Time Streaming Protocol (RTSP) source"
+ msgstr "Real Time Streaming Protocol-källa (RTSP)"
+
++#: gst-libs/gst/pbutils/descriptions.c:952
+ msgid "Microsoft Media Server (MMS) protocol source"
+ msgstr "Microsoft Media Server-protokollkälla (MMS)"
+
++#: gst-libs/gst/pbutils/descriptions.c:960
+ #, c-format
+ msgid "%s protocol source"
+ msgstr "%s-protokollkälla"
+
++#: gst-libs/gst/pbutils/descriptions.c:1034
+ #, c-format
+ msgid "%s video RTP depayloader"
+ msgstr "%s RTP-uppackare (video)"
+
++#: gst-libs/gst/pbutils/descriptions.c:1036
+ #, c-format
+ msgid "%s audio RTP depayloader"
+ msgstr "%s RTP-uppackare (ljud)"
+
++#: gst-libs/gst/pbutils/descriptions.c:1038
+ #, c-format
+ msgid "%s RTP depayloader"
+ msgstr "%s RTP-uppackare"
+
++#: gst-libs/gst/pbutils/descriptions.c:1045
+ #, c-format
+ msgid "%s demuxer"
+ msgstr "%s-avmultiplexor"
+
++#: gst-libs/gst/pbutils/descriptions.c:1047
+ #, c-format
+ msgid "%s decoder"
+ msgstr "%s-avkodare"
+
++#: gst-libs/gst/pbutils/descriptions.c:1086
+ #, c-format
+ msgid "%s video RTP payloader"
+ msgstr "%s RTP-packare (video)"
+
++#: gst-libs/gst/pbutils/descriptions.c:1088
+ #, c-format
+ msgid "%s audio RTP payloader"
+ msgstr "%s RTP-packare (ljud)"
+
++#: gst-libs/gst/pbutils/descriptions.c:1090
+ #, c-format
+ msgid "%s RTP payloader"
+ msgstr "%s RTP-packare"
+
++#: gst-libs/gst/pbutils/descriptions.c:1097
+ #, c-format
+ msgid "%s muxer"
+ msgstr "%s-multiplexor"
+
++#: gst-libs/gst/pbutils/descriptions.c:1099
+ #, c-format
+ msgid "%s encoder"
+ msgstr "%s-kodare"
+
++#: gst-libs/gst/pbutils/descriptions.c:1133
+ #, c-format
+ msgid "GStreamer element %s"
+ msgstr "GStreamer-elementet %s"
+
++#: gst-libs/gst/pbutils/missing-plugins.c:558
+ msgid "Unknown source element"
+ msgstr "Okänt käll-element"
+
++#: gst-libs/gst/pbutils/missing-plugins.c:561
+ msgid "Unknown sink element"
+ msgstr "Okänt utgångs-element"
+
++#: gst-libs/gst/pbutils/missing-plugins.c:564
+ msgid "Unknown element"
+ msgstr "Okänt element"
+
++#: gst-libs/gst/pbutils/missing-plugins.c:567
+ msgid "Unknown decoder element"
+ msgstr "Okänt avkodarelement"
+
++#: gst-libs/gst/pbutils/missing-plugins.c:570
+ msgid "Unknown encoder element"
+ msgstr "Okänt kodarelement"
+
++#: gst-libs/gst/pbutils/missing-plugins.c:575
+ msgid "Plugin or element of unknown type"
+ msgstr "Insticksmodul eller element av okänd typ"
+
++#: gst-libs/gst/tag/gsttagdemux.c:1243
+ msgid "Failed to read tag: not enough data"
+ msgstr "Misslyckades med att läsa tagg: inte tillräckligt mycket data"
+
++#: gst-libs/gst/tag/tags.c:77
+ msgid "track ID"
+ msgstr "spår-ID"
+
++#: gst-libs/gst/tag/tags.c:77
+ msgid "MusicBrainz track ID"
+ msgstr "MusicBrainz spår-ID"
+
++#: gst-libs/gst/tag/tags.c:79
+ msgid "artist ID"
+ msgstr "artist-ID"
+
++#: gst-libs/gst/tag/tags.c:79
+ msgid "MusicBrainz artist ID"
+ msgstr "MusicBrainz artist-ID"
+
++#: gst-libs/gst/tag/tags.c:81
+ msgid "album ID"
+ msgstr "album-ID"
+
++#: gst-libs/gst/tag/tags.c:81
+ msgid "MusicBrainz album ID"
+ msgstr "MusicBrainz album-ID"
+
++#: gst-libs/gst/tag/tags.c:84
+ msgid "album artist ID"
+ msgstr "albumartist-ID"
+
++#: gst-libs/gst/tag/tags.c:84
+ msgid "MusicBrainz album artist ID"
+ msgstr "MusicBrainz albumartist-ID"
+
++#: gst-libs/gst/tag/tags.c:86
+ msgid "track TRM ID"
+ msgstr "spår TRM-ID"
+
++#: gst-libs/gst/tag/tags.c:86
+ msgid "MusicBrainz TRM ID"
+ msgstr "MusicBrainz TRM-ID"
+
++#: gst-libs/gst/tag/tags.c:108
+ msgid "capturing shutter speed"
+ msgstr "slutarhastighet"
+
++#: gst-libs/gst/tag/tags.c:109
+ msgid "Shutter speed used when capturing an image, in seconds"
+ msgstr "Slutarhastighet i sekunder som används när bild infångas"
+
++#: gst-libs/gst/tag/tags.c:112
+ msgid "capturing focal ratio"
+ msgstr "bländarvärde"
+
++#: gst-libs/gst/tag/tags.c:113
+ msgid "Focal ratio (f-number) used when capturing the image"
+ msgstr "Bländarvärde (f-tal) som används när bild infångas"
+
++#: gst-libs/gst/tag/tags.c:116
+ msgid "capturing focal length"
+ msgstr "brännvidd"
+
++#: gst-libs/gst/tag/tags.c:117
+ msgid "Focal length of the lens used capturing the image, in mm"
+ msgstr "Brännvidd för linsen i mm när bild infångas"
+
++#: gst-libs/gst/tag/tags.c:121
+ msgid "capturing 35 mm equivalent focal length"
+ msgstr "fångar 35mm-ekvivalent brännvidd"
+
-msgid ""
-"The metering mode used while determining exposure for capturing an image"
++#: gst-libs/gst/tag/tags.c:122
++msgid "35 mm equivalent focal length of the lens used capturing the image, in mm"
++msgstr "35 mm-ekvivalent brännvidd för linsen som används när bild infångas, i mm"
+
++#: gst-libs/gst/tag/tags.c:126
+ msgid "capturing digital zoom ratio"
+ msgstr "digitalt zoom-förhållande"
+
++#: gst-libs/gst/tag/tags.c:127
+ msgid "Digital zoom ratio used when capturing an image"
+ msgstr "Digitalt zoom-förhållande som används när bild infångas"
+
++#: gst-libs/gst/tag/tags.c:130
+ msgid "capturing iso speed"
+ msgstr "ISO-tal"
+
++#: gst-libs/gst/tag/tags.c:131
+ msgid "The ISO speed used when capturing an image"
+ msgstr "ISO-tal som används när bild infångas"
+
++#: gst-libs/gst/tag/tags.c:134
+ msgid "capturing exposure program"
+ msgstr "exponeringsprogram"
+
++#: gst-libs/gst/tag/tags.c:135
+ msgid "The exposure program used when capturing an image"
+ msgstr "Det exponeringsprogram som används när bild infångas"
+
++#: gst-libs/gst/tag/tags.c:138
+ msgid "capturing exposure mode"
+ msgstr "exponeringsläge"
+
++#: gst-libs/gst/tag/tags.c:139
+ msgid "The exposure mode used when capturing an image"
+ msgstr "Exponeringsläge som används när bild infångas"
+
++#: gst-libs/gst/tag/tags.c:142
+ msgid "capturing exposure compensation"
+ msgstr "exponeringskompensation"
+
++#: gst-libs/gst/tag/tags.c:143
+ msgid "The exposure compensation used when capturing an image"
+ msgstr "Exponeringskompensationen som används när bild infångas"
+
++#: gst-libs/gst/tag/tags.c:146
+ msgid "capturing scene capture type"
+ msgstr "scenläge"
+
++#: gst-libs/gst/tag/tags.c:147
+ msgid "The scene capture mode used when capturing an image"
+ msgstr "Scenläge som används när bild infångas"
+
++#: gst-libs/gst/tag/tags.c:150
+ msgid "capturing gain adjustment"
+ msgstr "förstärkningsjustering"
+
++#: gst-libs/gst/tag/tags.c:151
+ msgid "The overall gain adjustment applied on an image"
+ msgstr "Förstärkningsjustering som appliceras på en bild"
+
++#: gst-libs/gst/tag/tags.c:154
+ msgid "capturing white balance"
+ msgstr "vitbalans"
+
++#: gst-libs/gst/tag/tags.c:155
+ msgid "The white balance mode set when capturing an image"
+ msgstr "Vitbalansläge som sätts när bild infångas"
+
++#: gst-libs/gst/tag/tags.c:158
+ msgid "capturing contrast"
+ msgstr "kontrast"
+
++#: gst-libs/gst/tag/tags.c:159
+ msgid "The direction of contrast processing applied when capturing an image"
+ msgstr "Riktning för kontrastbehandling som appliceras när bild infångas"
+
++#: gst-libs/gst/tag/tags.c:163
+ msgid "capturing saturation"
+ msgstr "mättnad"
+
++#: gst-libs/gst/tag/tags.c:164
+ msgid "The direction of saturation processing applied when capturing an image"
+ msgstr "Riktning för mättnadsbehandling som appliceras när bild infångas"
+
++#: gst-libs/gst/tag/tags.c:168
+ msgid "capturing sharpness"
+ msgstr "skärpning"
+
++#: gst-libs/gst/tag/tags.c:169
+ msgid "The direction of sharpness processing applied when capturing an image"
+ msgstr "Riktningen på skärpningsbehandling som appliceras när bild infångas"
+
++#: gst-libs/gst/tag/tags.c:173
+ msgid "capturing flash fired"
+ msgstr "blixtavfyrning"
+
++#: gst-libs/gst/tag/tags.c:174
+ msgid "If the flash fired while capturing an image"
+ msgstr "Om blixten avfyrats när bild infångas"
+
++#: gst-libs/gst/tag/tags.c:177
+ msgid "capturing flash mode"
+ msgstr "blixtläge"
+
++#: gst-libs/gst/tag/tags.c:178
+ msgid "The selected flash mode while capturing an image"
+ msgstr "Det valda blixtläget när bild infångas"
+
++#: gst-libs/gst/tag/tags.c:181
+ msgid "capturing metering mode"
+ msgstr "ljusmätarläge"
+
-msgid ""
-"Don't exit after showing the initial device list, but wait for devices to "
-"added/removed."
-msgstr ""
-"Avsluta inte efter att den inledande enhetslistan visats, utan vänta på att "
-"enheter läggs till/tas bort."
++#: gst-libs/gst/tag/tags.c:182
++msgid "The metering mode used while determining exposure for capturing an image"
+ msgstr "Ljusmätarläge som används då exponering bestäms när bild infångas"
+
++#: gst-libs/gst/tag/tags.c:186
+ msgid "capturing source"
+ msgstr "infångstkälla"
+
++#: gst-libs/gst/tag/tags.c:187
+ msgid "The source or type of device used for the capture"
+ msgstr "Källa eller typ av enhet som används för infångsten"
+
++#: gst-libs/gst/tag/tags.c:190
+ msgid "image horizontal ppi"
+ msgstr "bild horisontell ppi"
+
++#: gst-libs/gst/tag/tags.c:191
+ msgid "Media (image/video) intended horizontal pixel density in ppi"
+ msgstr "Media (bild/video) avsedd horisontell pixeldensitet i ppi"
+
++#: gst-libs/gst/tag/tags.c:194
+ msgid "image vertical ppi"
+ msgstr "bild vertikal ppi"
+
++#: gst-libs/gst/tag/tags.c:195
+ msgid "Media (image/video) intended vertical pixel density in ppi"
+ msgstr "Media (bild/video) avsedd vertikal pixeldensitet i ppi"
+
++#: gst-libs/gst/tag/tags.c:198
+ msgid "ID3v2 frame"
+ msgstr "ID3v2-ram"
+
++#: gst-libs/gst/tag/tags.c:198
+ msgid "unparsed id3v2 tag frame"
+ msgstr "otolkad id3v2-tagg-ram"
+
++#: gst-libs/gst/tag/tags.c:202
+ msgid "musical-key"
+ msgstr "musikalisk tonart"
+
++#: gst-libs/gst/tag/tags.c:202
+ msgid "Initial key in which the sound starts"
+ msgstr "Inledande tonart med vilken ljudet börjar"
+
++#: tools/gst-device-monitor.c:260 tools/gst-play.c:1444
+ msgid "Print version information and exit"
+ msgstr "Skriv ut versionsinformation och avsluta"
+
-msgstr ""
-"Styr uppspelningsbeteende genom att ställa in playbins egenskap ”flags”"
++#: tools/gst-device-monitor.c:262
++msgid "Don't exit after showing the initial device list, but wait for devices to added/removed."
++msgstr "Avsluta inte efter att den inledande enhetslistan visats, utan vänta på att enheter läggs till/tas bort."
+
++#: tools/gst-play.c:310
+ #, c-format
+ msgid "Volume: %.0f%%"
+ msgstr "Volym: %.0f%%"
+
++#: tools/gst-play.c:349
+ msgid "Buffering..."
+ msgstr "Buffrar..."
+
++#: tools/gst-play.c:370
+ msgid "Clock lost, selecting a new one\n"
+ msgstr "Klocka förlorad, väljer en ny\n"
+
++#: tools/gst-play.c:400 tools/gst-play.c:446 tools/gst-play.c:883
++#: tools/gst-play.c:1347
+ msgid "Reached end of play list."
+ msgstr "Nådde slutet av spellistan."
+
++#: tools/gst-play.c:613
+ msgid "Paused"
+ msgstr "Pausad"
+
++#: tools/gst-play.c:671
+ #, c-format
+ msgid "Now playing %s\n"
+ msgstr "Spelar nu %s\n"
+
++#: tools/gst-play.c:734
+ #, c-format
+ msgid "About to finish, preparing next title: %s"
+ msgstr "Slutar snart, förbereder nästa titel: %s"
+
++#: tools/gst-play.c:979
+ #, c-format
+ msgid "Playback rate: %.2f"
+ msgstr "Uppspelningshastighet: %.2f"
+
++#: tools/gst-play.c:983
+ #, c-format
+ msgid "Could not change playback rate to %.2f"
+ msgstr "Kunde inte ändra uppspelningshastigheten till %.2f"
+
++#: tools/gst-play.c:1287
+ msgid "space"
+ msgstr "blanksteg"
+
++#: tools/gst-play.c:1287
+ msgid "pause/unpause"
+ msgstr "pausa/fortsätt"
+
++#: tools/gst-play.c:1288
+ msgid "q or ESC"
+ msgstr "q eller ESC"
+
++#: tools/gst-play.c:1288
+ msgid "quit"
+ msgstr "avsluta"
+
++#: tools/gst-play.c:1289
+ msgid "> or n"
+ msgstr "> eller n"
+
++#: tools/gst-play.c:1289
+ msgid "play next"
+ msgstr "spela nästa"
+
++#: tools/gst-play.c:1290
+ msgid "< or b"
+ msgstr "< eller b"
+
++#: tools/gst-play.c:1290
+ msgid "play previous"
+ msgstr "spela föregående"
+
++#: tools/gst-play.c:1291
+ msgid "seek forward"
+ msgstr "sök framåt"
+
++#: tools/gst-play.c:1292
+ msgid "seek backward"
+ msgstr "sök bakåt"
+
++#: tools/gst-play.c:1293
+ msgid "volume up"
+ msgstr "höj volym"
+
++#: tools/gst-play.c:1294
+ msgid "volume down"
+ msgstr "sänk volym"
+
++#: tools/gst-play.c:1295
+ msgid "increase playback rate"
+ msgstr "öka uppspelningshastighet"
+
++#: tools/gst-play.c:1296
+ msgid "decrease playback rate"
+ msgstr "sänk uppspelningshastighet"
+
++#: tools/gst-play.c:1297
+ msgid "change playback direction"
+ msgstr "ändra uppspelningsriktning"
+
++#: tools/gst-play.c:1298
+ msgid "enable/disable trick modes"
+ msgstr "aktivera/inaktivera tricklägen"
+
++#: tools/gst-play.c:1299
+ msgid "change audio track"
+ msgstr "byt ljudspår"
+
++#: tools/gst-play.c:1300
+ msgid "change video track"
+ msgstr "byt videospår"
+
++#: tools/gst-play.c:1301
+ msgid "change subtitle track"
+ msgstr "byt undertextspår"
+
++#: tools/gst-play.c:1302
+ msgid "seek to beginning"
+ msgstr "sök till början"
+
++#: tools/gst-play.c:1303
+ msgid "show keyboard shortcuts"
+ msgstr "visa tangentbordsgenvägar"
+
++#: tools/gst-play.c:1306
+ msgid "Interactive mode - keyboard controls:"
+ msgstr "Interaktivt läge: tangentbordsstyrning:"
+
++#: tools/gst-play.c:1439
+ msgid "Output status information and property notifications"
+ msgstr "Mata ut statusinformation och egenskapsaviseringar"
+
++#: tools/gst-play.c:1441
+ msgid "Control playback behaviour setting playbin 'flags' property"
-#~ msgstr ""
-#~ "En %s-insticksmodul krävs för att spela upp den här strömmen men är inte "
-#~ "installerad."
++msgstr "Styr uppspelningsbeteende genom att ställa in playbins egenskap ”flags”"
+
++#: tools/gst-play.c:1446
+ msgid "Video sink to use (default is autovideosink)"
+ msgstr "Videoutgång som ska användas (standard är autovideosink)"
+
++#: tools/gst-play.c:1448
+ msgid "Audio sink to use (default is autoaudiosink)"
+ msgstr "Ljudutgång som ska användas (standard är autoaudiosink)"
+
++#: tools/gst-play.c:1450
+ msgid "Enable gapless playback"
+ msgstr "Aktivera mellanrumslös uppspelning"
+
++#: tools/gst-play.c:1452
+ msgid "Shuffle playlist"
+ msgstr "Blanda spellista"
+
++#: tools/gst-play.c:1455
+ msgid "Disable interactive control via the keyboard"
+ msgstr "Inaktivera interaktiv styrning via tangentbordet"
+
++#: tools/gst-play.c:1457
+ msgid "Volume"
+ msgstr "Volym"
+
++#: tools/gst-play.c:1459
+ msgid "Playlist file containing input media files"
+ msgstr "Fil med spellista innehållandes inkommande mediafiler"
+
++#: tools/gst-play.c:1461
+ msgid "Do not print any output (apart from errors)"
+ msgstr "Mata inte ut någonting (bortsett från fel)"
+
++#: tools/gst-play.c:1463
+ msgid "Use playbin3 pipeline"
+ msgstr "Använd playbin3-rörledning"
+
++#: tools/gst-play.c:1464
+ msgid "(default varies depending on 'USE_PLAYBIN' env variable)"
+ msgstr "(standardvärdet beror på miljövariabeln ”USE_PLAYBIN”)"
+
++#: tools/gst-play.c:1468
++msgid "Keep showing the last frame on EOS until quit or playlist change command (gapless is ignored)"
++msgstr "Fortsätt visa den sista ramen vid EOS tills avslutning eller ändringskommando för spellista (mellanrumslös hoppas över)"
++
++#: tools/gst-play.c:1546
+ #, c-format
+ msgid "Usage: %s FILE1|URI1 [FILE2|URI2] [FILE3|URI3] ..."
+ msgstr "Användning: %s FIL1|URI1 [FIL2|URI2] [FIL3|URI3] ..."
+
++#: tools/gst-play.c:1550
+ msgid "You must provide at least one filename or URI to play."
+ msgstr "Du måste ange åtminstone ett filnamn eller en URI att spela."
+
++#: tools/gst-play.c:1590
+ msgid "Press 'k' to see a list of keyboard shortcuts.\n"
+ msgstr "Tryck på 'k' för att se en lista över tangentbordsgenvägar.\n"
+
+ #~ msgid "Could not create \"decodebin3\" element."
+ #~ msgstr "Kunde inte skapa \"decodebin3\"-element."
+
+ #~ msgid "Could not create \"urisourcebin\" element."
+ #~ msgstr "Kunde inte skapa \"urisourcebin\"-element."
+
+ #~ msgid "Internal data stream error."
+ #~ msgstr "Internt dataströmningsfel."
+
+ #~ msgid "A %s plugin is required to play this stream, but not installed."
++#~ msgstr "En %s-insticksmodul krävs för att spela upp den här strömmen men är inte installerad."
--- /dev/null
-# translation of gst-plugins-base-1.10.0.po to Turkish
++# translation of gst-plugins-base-1.16.0.po to Turkish
+ # This file is put in the public domain.
+ # This file is distributed under the same license as the gst-plugins-base package.
+ # Server Acim <serveracim@gmail.com>, 2010.
+ # Server Acim <serveracim@gmail.com>, 2013, 2015.
+ # Emin Tufan Çetin <etcetin@gmail.com>, 2017.
+ # Mehmet Kececi <mkececi@mehmetkececi.com>, 2017, 2019.
+ msgid ""
+ msgstr ""
-"Project-Id-Version: gst-plugins-base 1.15.1\n"
++"Project-Id-Version: gst-plugins-base 1.16.0\n"
+ "Report-Msgid-Bugs-To: http://bugzilla.gnome.org/\n"
-"POT-Creation-Date: 2019-02-26 11:43+0000\n"
-"PO-Revision-Date: 2019-01-25 12:06+0300\n"
++"POT-Creation-Date: 2019-04-19 00:20+0100\n"
++"PO-Revision-Date: 2019-09-03 19:42+0300\n"
+ "Last-Translator: Mehmet Kececi <mkececi@mehmetkececi.com>\n"
+ "Language-Team: Turkish <gnu-tr-u12a@lists.sourceforge.net>\n"
+ "Language: tr\n"
+ "MIME-Version: 1.0\n"
+ "Content-Type: text/plain; charset=UTF-8\n"
+ "Content-Transfer-Encoding: 8bit\n"
+ "Plural-Forms: nplurals=1; plural=0;\n"
+ "X-Generator: Virtaal 0.7.1\n"
+ "X-Bugs: Report translation errors to the Language-Team address.\n"
+ "X-Project-Style: gnome\n"
+
++#: ext/alsa/gstalsasink.c:579
+ msgid "Could not open device for playback in mono mode."
+ msgstr "Mono kipte çalmak için aygıt açılamıyor."
+
++#: ext/alsa/gstalsasink.c:581
+ msgid "Could not open device for playback in stereo mode."
+ msgstr "Stereo kipte çalmak için aygıt açılamıyor."
+
++#: ext/alsa/gstalsasink.c:585
+ #, c-format
+ msgid "Could not open device for playback in %d-channel mode."
+ msgstr "Aygıtı %d- kanal kipinde çalmak için açamıyor."
+
-msgid ""
-"Could not open audio device for playback. Device is being used by another "
-"application."
-msgstr ""
-"Aygıtı çalmak için için açamıyor. Aygıt başka bir uygulama tarafından "
-"kullanılmakta."
++#: ext/alsa/gstalsasink.c:858
++msgid "Could not open audio device for playback. Device is being used by another application."
++msgstr "Aygıtı çalmak için için açamıyor. Aygıt başka bir uygulama tarafından kullanılmakta."
+
++#: ext/alsa/gstalsasink.c:863
+ msgid "Could not open audio device for playback."
+ msgstr "Aygıtı çalma için açamıyor."
+
++#: ext/alsa/gstalsasink.c:1076
+ msgid "Error outputting to audio device. The device has been disconnected."
+ msgstr "Ses aygıtına çıktı gönderilirken hata. Aygıt bağlantısı kesildi."
+
++#: ext/alsa/gstalsasrc.c:444
+ msgid "Could not open device for recording in mono mode."
+ msgstr "Mono kipte kaydetmek için aygıtı açamıyor."
+
++#: ext/alsa/gstalsasrc.c:446
+ msgid "Could not open device for recording in stereo mode."
+ msgstr "Stereo kipte kaydetmek için aygıtı açamıyor."
+
++#: ext/alsa/gstalsasrc.c:450
+ #, c-format
+ msgid "Could not open device for recording in %d-channel mode"
+ msgstr "Aygıtı %d-kanal kipinde kaydetmek için açamıyor"
+
-msgid ""
-"Could not open audio device for recording. Device is being used by another "
-"application."
-msgstr ""
-"Aygıtı kaydetmek için açamıyor. Aygıt başka bir uygulama tarafından "
-"kullanılmakta."
++#: ext/alsa/gstalsasrc.c:741
++msgid "Could not open audio device for recording. Device is being used by another application."
++msgstr "Aygıtı kaydetmek için açamıyor. Aygıt başka bir uygulama tarafından kullanılmakta."
+
++#: ext/alsa/gstalsasrc.c:746
+ msgid "Could not open audio device for recording."
+ msgstr "Kayıt için ses aygıtı açılamıyor."
+
++#: ext/alsa/gstalsasrc.c:992
+ msgid "Error recording from audio device. The device has been disconnected."
+ msgstr "Ses aygıtından kayıt sırasında hata. Aygıt bağlantısı kesildi."
+
++#: ext/cdparanoia/gstcdparanoiasrc.c:277 ext/cdparanoia/gstcdparanoiasrc.c:283
+ msgid "Could not open CD device for reading."
+ msgstr "Okumak için CD aygıtı açılamıyor."
+
++#: ext/cdparanoia/gstcdparanoiasrc.c:409
+ msgid "Could not seek CD."
+ msgstr "CD bulunamıyor."
+
++#: ext/cdparanoia/gstcdparanoiasrc.c:417
+ msgid "Could not read CD."
+ msgstr "CD okunamıyor."
+
++#: ext/gl/gstgltestsrc.c:491
+ msgid "failed to draw pattern"
+ msgstr "desen çizilemedi"
+
++#: ext/gl/gstgltestsrc.c:492
+ msgid "A GL error occurred"
+ msgstr "Bir GL hatası oluştu"
+
++#: ext/gl/gstgltestsrc.c:498
+ msgid "format wasn't negotiated before get function"
+ msgstr "İşlev alınmadan önce biçim anlaşması yapılmadı"
+
++#: gst/encoding/gstencodebin.c:1636 gst/playback/gstplaybin2.c:3399
++#: gst/playback/gstplaysink.c:1522 gst/playback/gstplaysink.c:1535
++#: gst/playback/gstplaysink.c:1872 gst/playback/gstplaysink.c:1904
++#: gst/playback/gstplaysink.c:2488 gst/playback/gstplaysink.c:2537
++#: gst/playback/gstplaysink.c:2552 gst/playback/gstplaysink.c:2577
++#: gst/playback/gstplaysink.c:2609 gst/playback/gstplaysink.c:2757
++#: gst/playback/gstplaysink.c:2788 gst/playback/gstplaysink.c:3166
++#: gst/playback/gstplaysink.c:3175 gst/playback/gstplaysink.c:3184
++#: gst/playback/gstplaysink.c:3193 gst/playback/gstplaysink.c:3606
++#: gst/playback/gstplaysink.c:4526 gst/playback/gstplaysinkconvertbin.c:97
++#: gst/playback/gstplaysinkconvertbin.c:117
++#: gst/playback/gsturidecodebin.c:1472 gst/playback/gsturisourcebin.c:1597
+ #, c-format
+ msgid "Missing element '%s' - check your GStreamer installation."
+ msgstr "Kayıp öge '%s' - GStreamer kurulumunu denetleyin."
+
++#: gst/playback/gstdecodebin2.c:1883 gst/playback/gstparsebin.c:1553
+ msgid "Could not determine type of stream"
+ msgstr "Akışın türü belirlenemiyor"
+
++#: gst/playback/gstdecodebin2.c:2861 gst/playback/gstparsebin.c:2385
+ msgid "This appears to be a text file"
+ msgstr "Bu bir metin dosyasına benziyor"
+
++#: gst/playback/gstplaybin2.c:5452
+ msgid "Could not create \"uridecodebin\" element."
+ msgstr "\"uridecodebin\" ögesi oluşturulamıyor."
+
++#: gst/playback/gstplaybin3.c:4558
+ msgid "Could not create \"uridecodebin3\" element."
+ msgstr "\"uridecodebin3\" ögesi oluşturulamıyor."
+
++#: gst/playback/gstplaysink.c:2003
+ #, c-format
+ msgid "Both autovideosink and %s elements are missing."
+ msgstr "autovideosink ve %s ögelerininin ikisi de kayıp."
+
++#: gst/playback/gstplaysink.c:2007
+ msgid "The autovideosink element is missing."
+ msgstr "autovideosink ögesi kayıp."
+
++#: gst/playback/gstplaysink.c:2012
+ #, c-format
+ msgid "Configured videosink %s is not working."
+ msgstr "Yapılandırılmış olan videosink ögesi %s çalışmıyor."
+
++#: gst/playback/gstplaysink.c:2016
+ #, c-format
+ msgid "Both autovideosink and %s elements are not working."
+ msgstr "autovideosink ve %s ögelerinin ikisi de çalışmıyor."
+
++#: gst/playback/gstplaysink.c:2020
+ msgid "The autovideosink element is not working."
+ msgstr "autovideosink ögesi çalışmıyor."
+
++#: gst/playback/gstplaysink.c:2525
+ msgid "Custom text sink element is not usable."
+ msgstr "Özel metin gömme ögesi kullanılamaz durumda."
+
++#: gst/playback/gstplaysink.c:2903
+ msgid "No volume control found"
+ msgstr "Ses denetimi bulunamadı"
+
++#: gst/playback/gstplaysink.c:2933
+ #, c-format
+ msgid "Both autoaudiosink and %s elements are missing."
+ msgstr "autoaudiosink ve %s ögelerinin ikisi de kayıp."
+
++#: gst/playback/gstplaysink.c:2937
+ msgid "The autoaudiosink element is missing."
+ msgstr "autoaudiosink ögesi kayıp."
+
++#: gst/playback/gstplaysink.c:2942
+ #, c-format
+ msgid "Configured audiosink %s is not working."
+ msgstr "Yapılandırılmış olan audiosink ögesi %s çalışmıyor."
+
++#: gst/playback/gstplaysink.c:2946
+ #, c-format
+ msgid "Both autoaudiosink and %s elements are not working."
+ msgstr "autoaudiosink ve %s ögelerinin ikisi de çalışmıyor."
+
++#: gst/playback/gstplaysink.c:2950
+ msgid "The autoaudiosink element is not working."
+ msgstr "autoaudiosink ögesi çalışmıyor."
+
++#: gst/playback/gstplaysink.c:3269 gst/playback/gstplaysink.c:3274
+ msgid "Can't play a text file without video or visualizations."
+ msgstr "Video veya görsellik olmadan metin dosyasını oynatamaz."
+
++#: gst/playback/gsturidecodebin.c:921
+ #, c-format
+ msgid "No decoder available for type '%s'."
+ msgstr "'%s' türü için kod çözücü bulunamadı."
+
++#: gst/playback/gsturidecodebin.c:1378 gst/playback/gsturisourcebin.c:1491
+ msgid "No URI specified to play from."
+ msgstr "Çalmak için bir ağ adresi belirtilmedi."
+
++#: gst/playback/gsturidecodebin.c:1384 gst/playback/gsturisourcebin.c:1497
+ #, c-format
+ msgid "Invalid URI \"%s\"."
+ msgstr "Geçersiz URI \"%s\"."
+
++#: gst/playback/gsturidecodebin.c:1391 gst/playback/gsturisourcebin.c:1504
+ msgid "This stream type cannot be played yet."
+ msgstr "Bu akış türü henüz çalınamıyor."
+
++#: gst/playback/gsturidecodebin.c:1409 gst/playback/gsturisourcebin.c:1522
+ #, c-format
+ msgid "No URI handler implemented for \"%s\"."
+ msgstr "\"%s\" için hiçbir URI işleyicisi geliştirilmemiş."
+
++#: gst/playback/gsturidecodebin.c:2289 gst/playback/gsturisourcebin.c:2228
+ msgid "Source element is invalid."
+ msgstr "Kaynak ögesi geçersiz."
+
++#: gst/tcp/gsttcpclientsink.c:216
+ #, c-format
+ msgid "Error while sending data to \"%s:%d\"."
+ msgstr "Dosyayı şuraya gönderirken hata \"%s:%d\"."
+
++#: gst-libs/gst/audio/gstaudiobasesrc.c:842
+ msgid "Can't record audio fast enough"
+ msgstr "Sesi yeterince hızlı kaydedemiyor"
+
++#: gst-libs/gst/audio/gstaudiocdsrc.c:1625
+ msgid "This CD has no audio tracks"
+ msgstr "Bu CD hiç ses izi içermiyor"
+
++#: gst-libs/gst/pbutils/descriptions.c:89
+ msgid "ID3 tag"
+ msgstr "ID3 etiketi"
+
++#: gst-libs/gst/pbutils/descriptions.c:90
++#: gst-libs/gst/pbutils/descriptions.c:91
+ msgid "APE tag"
+ msgstr "APE etiketi"
+
++#: gst-libs/gst/pbutils/descriptions.c:92
+ msgid "ICY internet radio"
+ msgstr "ICY internet radyosu"
+
++#: gst-libs/gst/pbutils/descriptions.c:127
+ msgid "Apple Lossless Audio (ALAC)"
+ msgstr "Apple Lossless Audio (ALAC)"
+
++#: gst-libs/gst/pbutils/descriptions.c:143
+ msgid "Free Lossless Audio Codec (FLAC)"
+ msgstr "Free Lossless Audio Codec (FLAC)"
+
++#: gst-libs/gst/pbutils/descriptions.c:173
++#: gst-libs/gst/pbutils/descriptions.c:174
+ msgid "Lossless True Audio (TTA)"
+ msgstr "Lossless True Audio (TTA)"
+
++#: gst-libs/gst/pbutils/descriptions.c:182
+ msgid "Windows Media Speech"
+ msgstr "Windows Media Hecelemesi"
+
++#: gst-libs/gst/pbutils/descriptions.c:199
+ msgid "CYUV Lossless"
+ msgstr "CYUV Kayıpsız"
+
++#: gst-libs/gst/pbutils/descriptions.c:203
+ msgid "FFMpeg v1"
+ msgstr "FFMpeg v1"
+
++#: gst-libs/gst/pbutils/descriptions.c:217
+ msgid "Lossless MSZH"
+ msgstr "Kayıpsız MSZH"
+
++#: gst-libs/gst/pbutils/descriptions.c:228
+ msgid "Run-length encoding"
+ msgstr "Çalıştırma-uzunluğu kodlaması"
+
++#: gst-libs/gst/pbutils/descriptions.c:274
+ msgid "Timed Text"
+ msgstr "Zamanlanmış Metin"
+
++#: gst-libs/gst/pbutils/descriptions.c:278
+ msgid "Subtitle"
+ msgstr "Alt yazı"
+
++#: gst-libs/gst/pbutils/descriptions.c:279
+ msgid "MPL2 subtitle format"
+ msgstr "MPL2 alt yazı kipi"
+
++#: gst-libs/gst/pbutils/descriptions.c:280
+ msgid "DKS subtitle format"
+ msgstr "DKS alt yazı kipi"
+
++#: gst-libs/gst/pbutils/descriptions.c:281
+ msgid "QTtext subtitle format"
+ msgstr "QTtext alt yazı kipi"
+
++#: gst-libs/gst/pbutils/descriptions.c:282
+ msgid "Sami subtitle format"
+ msgstr "Sami alt yazı kipi"
+
++#: gst-libs/gst/pbutils/descriptions.c:283
+ msgid "TMPlayer subtitle format"
+ msgstr "TMPlayer alt yazı kipi"
+
++#: gst-libs/gst/pbutils/descriptions.c:287
+ msgid "CEA 608 Closed Caption"
+ msgstr "CEA 608 Kapalı Başlık"
+
++#: gst-libs/gst/pbutils/descriptions.c:288
+ msgid "CEA 708 Closed Caption"
+ msgstr "CEA 708 Kapalı Başlık"
+
++#: gst-libs/gst/pbutils/descriptions.c:289
+ msgid "Kate subtitle format"
+ msgstr "Kate alt yazı kipi"
+
++#: gst-libs/gst/pbutils/descriptions.c:290
+ msgid "WebVTT subtitle format"
+ msgstr "WebVTT alt yazı biçimi"
+
++#: gst-libs/gst/pbutils/descriptions.c:455
++#: gst-libs/gst/pbutils/descriptions.c:458
++#: gst-libs/gst/pbutils/descriptions.c:508
+ msgid "Uncompressed video"
+ msgstr "Sıkıştırılmamış video"
+
++#: gst-libs/gst/pbutils/descriptions.c:463
+ msgid "Uncompressed gray"
+ msgstr "Sıkıştırılmamış gri resim"
+
++#: gst-libs/gst/pbutils/descriptions.c:486
+ #, c-format
+ msgid "Uncompressed packed YUV %s"
+ msgstr "Sıkıştırılmamış paket YUV %s"
+
++#: gst-libs/gst/pbutils/descriptions.c:488
+ #, c-format
+ msgid "Uncompressed semi-planar YUV %s"
+ msgstr "Sıkıştırılmamış yarı düzlem YUV %s"
+
++#: gst-libs/gst/pbutils/descriptions.c:490
+ #, c-format
+ msgid "Uncompressed planar YUV %s"
+ msgstr "Sıkıştırılmamış düzlem YUV %s"
+
++#: gst-libs/gst/pbutils/descriptions.c:501
+ #, c-format
+ msgid "Uncompressed palettized %d-bit %s"
+ msgstr "Sıkıştırılmamış paletlenmiş %d-bit %s"
+
++#: gst-libs/gst/pbutils/descriptions.c:504
+ #, c-format
+ msgid "Uncompressed %d-bit %s"
+ msgstr "Şıkıştırılmamış %d-bit %s"
+
++#: gst-libs/gst/pbutils/descriptions.c:586
+ #, c-format
+ msgid "DivX MPEG-4 Version %d"
+ msgstr "DivX MPEG-4 Versiyonu %d"
+
++#: gst-libs/gst/pbutils/descriptions.c:834
+ msgid "Uncompressed audio"
+ msgstr "Sıkıştırılmamış ses"
+
++#: gst-libs/gst/pbutils/descriptions.c:840
+ #, c-format
+ msgid "Raw %d-bit %s audio"
+ msgstr "Ham %d-bit %s audio"
+
++#: gst-libs/gst/pbutils/descriptions.c:942
+ msgid "Audio CD source"
+ msgstr "Ses CD'si kaynağı"
+
++#: gst-libs/gst/pbutils/descriptions.c:945
+ msgid "DVD source"
+ msgstr "DVD kaynağı"
+
++#: gst-libs/gst/pbutils/descriptions.c:948
+ msgid "Real Time Streaming Protocol (RTSP) source"
+ msgstr "Gerçek Zamanlı Akış Protokolu (RTSP) kaynağı"
+
++#: gst-libs/gst/pbutils/descriptions.c:952
+ msgid "Microsoft Media Server (MMS) protocol source"
+ msgstr "Microsoft Ortam Sunucusu (MMS) protokol kaynağı"
+
++#: gst-libs/gst/pbutils/descriptions.c:960
+ #, c-format
+ msgid "%s protocol source"
+ msgstr "%s protokol kaynağı"
+
++#: gst-libs/gst/pbutils/descriptions.c:1034
+ #, c-format
+ msgid "%s video RTP depayloader"
+ msgstr "%s video RTP depayloader"
+
++#: gst-libs/gst/pbutils/descriptions.c:1036
+ #, c-format
+ msgid "%s audio RTP depayloader"
+ msgstr "%s audio RTP depayloader"
+
++#: gst-libs/gst/pbutils/descriptions.c:1038
+ #, c-format
+ msgid "%s RTP depayloader"
+ msgstr "%s RTP depayloader"
+
++#: gst-libs/gst/pbutils/descriptions.c:1045
+ #, c-format
+ msgid "%s demuxer"
+ msgstr "%s demuxer"
+
++#: gst-libs/gst/pbutils/descriptions.c:1047
+ #, c-format
+ msgid "%s decoder"
+ msgstr "%s kod çözücü"
+
++#: gst-libs/gst/pbutils/descriptions.c:1086
+ #, c-format
+ msgid "%s video RTP payloader"
+ msgstr "%s video RTP payloader"
+
++#: gst-libs/gst/pbutils/descriptions.c:1088
+ #, c-format
+ msgid "%s audio RTP payloader"
+ msgstr "%s audio RTP payloader"
+
++#: gst-libs/gst/pbutils/descriptions.c:1090
+ #, c-format
+ msgid "%s RTP payloader"
+ msgstr "%s RTP payloader"
+
++#: gst-libs/gst/pbutils/descriptions.c:1097
+ #, c-format
+ msgid "%s muxer"
+ msgstr "%s muxer"
+
++#: gst-libs/gst/pbutils/descriptions.c:1099
+ #, c-format
+ msgid "%s encoder"
+ msgstr "%s kodlayıcı"
+
++#: gst-libs/gst/pbutils/descriptions.c:1133
+ #, c-format
+ msgid "GStreamer element %s"
+ msgstr "GStreamer ögesi %s"
+
++#: gst-libs/gst/pbutils/missing-plugins.c:558
+ msgid "Unknown source element"
+ msgstr "Bilinmeyen kaynak ögesi"
+
++#: gst-libs/gst/pbutils/missing-plugins.c:561
+ msgid "Unknown sink element"
+ msgstr "Bilinmeyen gömme ögesi"
+
++#: gst-libs/gst/pbutils/missing-plugins.c:564
+ msgid "Unknown element"
+ msgstr "Bilinmeyen öge"
+
++#: gst-libs/gst/pbutils/missing-plugins.c:567
+ msgid "Unknown decoder element"
+ msgstr "Bilinmeyen kod çözücü ögesi"
+
++#: gst-libs/gst/pbutils/missing-plugins.c:570
+ msgid "Unknown encoder element"
+ msgstr "Bilinmeyen kodlama ögesi"
+
++#: gst-libs/gst/pbutils/missing-plugins.c:575
+ msgid "Plugin or element of unknown type"
+ msgstr "Bilinmeyen türde eklenti veya öge"
+
++#: gst-libs/gst/tag/gsttagdemux.c:1243
+ msgid "Failed to read tag: not enough data"
+ msgstr "Etiketi okunamadı: yetersiz veri"
+
++#: gst-libs/gst/tag/tags.c:77
+ msgid "track ID"
+ msgstr "iz KİMLİĞİ"
+
++#: gst-libs/gst/tag/tags.c:77
+ msgid "MusicBrainz track ID"
+ msgstr "MusicBrainz iz KİMLİĞİ"
+
++#: gst-libs/gst/tag/tags.c:79
+ msgid "artist ID"
+ msgstr "sanatçı KİMLİĞİ"
+
++#: gst-libs/gst/tag/tags.c:79
+ msgid "MusicBrainz artist ID"
+ msgstr "MusicBrainz sanatçı KİMLİĞİ"
+
++#: gst-libs/gst/tag/tags.c:81
+ msgid "album ID"
+ msgstr "albüm KİMLİĞİ"
+
++#: gst-libs/gst/tag/tags.c:81
+ msgid "MusicBrainz album ID"
+ msgstr "MusicBrainz albümü KİMLİĞİ"
+
++#: gst-libs/gst/tag/tags.c:84
+ msgid "album artist ID"
+ msgstr "albüm sanatçısı KİMLİĞİ"
+
++#: gst-libs/gst/tag/tags.c:84
+ msgid "MusicBrainz album artist ID"
+ msgstr "MusicBrainz albüm sanatçısı KİMLİĞİ"
+
++#: gst-libs/gst/tag/tags.c:86
+ msgid "track TRM ID"
+ msgstr "iz TRM KİMLİĞİ"
+
++#: gst-libs/gst/tag/tags.c:86
+ msgid "MusicBrainz TRM ID"
+ msgstr "MusicBrainz TRM BAŞLIĞI"
+
++#: gst-libs/gst/tag/tags.c:108
+ msgid "capturing shutter speed"
+ msgstr "deklanşör hızını yakalıyor"
+
++#: gst-libs/gst/tag/tags.c:109
+ msgid "Shutter speed used when capturing an image, in seconds"
+ msgstr "Saniye bazında, bir resim çekerken kullanılan objektif hızı"
+
++#: gst-libs/gst/tag/tags.c:112
+ msgid "capturing focal ratio"
+ msgstr "odaksal oranı yakalıyor"
+
++#: gst-libs/gst/tag/tags.c:113
+ msgid "Focal ratio (f-number) used when capturing the image"
+ msgstr "Resim çekerken kullanılan odaksal oran (f-numarası)"
+
++#: gst-libs/gst/tag/tags.c:116
+ msgid "capturing focal length"
+ msgstr "odaksal uzunluğu yakalıyor"
+
++#: gst-libs/gst/tag/tags.c:117
+ msgid "Focal length of the lens used capturing the image, in mm"
+ msgstr "Mm bazında, resim çekerken kullanılan odaksal uzunluk"
+
++#: gst-libs/gst/tag/tags.c:121
+ msgid "capturing 35 mm equivalent focal length"
+ msgstr "35 mm eşdeğer odak uzaklığı yakalıyor"
+
-msgid ""
-"35 mm equivalent focal length of the lens used capturing the image, in mm"
-msgstr ""
-"resim çekerken kullanılan merceğin 35 mm eşdeğer odak uzaklığı, mm bazında"
++#: gst-libs/gst/tag/tags.c:122
++msgid "35 mm equivalent focal length of the lens used capturing the image, in mm"
++msgstr "resim çekerken kullanılan merceğin 35 mm eşdeğer odak uzaklığı, mm bazında"
+
++#: gst-libs/gst/tag/tags.c:126
+ msgid "capturing digital zoom ratio"
+ msgstr "sayısal zum oranını yakalıyor"
+
++#: gst-libs/gst/tag/tags.c:127
+ msgid "Digital zoom ratio used when capturing an image"
+ msgstr "Bir resim çekerken kullanılan sayısal yakınlaştırma oranı"
+
++#: gst-libs/gst/tag/tags.c:130
+ msgid "capturing iso speed"
+ msgstr "iso hızını yakalıyor"
+
++#: gst-libs/gst/tag/tags.c:131
+ msgid "The ISO speed used when capturing an image"
+ msgstr "Bir resim çekerken kulllanılan ISO hızı"
+
++#: gst-libs/gst/tag/tags.c:134
+ msgid "capturing exposure program"
+ msgstr "pozlandırma programını yakalıyor"
+
++#: gst-libs/gst/tag/tags.c:135
+ msgid "The exposure program used when capturing an image"
+ msgstr "Bir resim çekerken kullanılan pozlandırma programı"
+
++#: gst-libs/gst/tag/tags.c:138
+ msgid "capturing exposure mode"
+ msgstr "pozlandırma kipini yakalıyor"
+
++#: gst-libs/gst/tag/tags.c:139
+ msgid "The exposure mode used when capturing an image"
+ msgstr "Bir resim çekerken kullanılan pozlandırma kipi"
+
++#: gst-libs/gst/tag/tags.c:142
+ msgid "capturing exposure compensation"
+ msgstr "pozlandırma kipini yakalanıyor"
+
++#: gst-libs/gst/tag/tags.c:143
+ msgid "The exposure compensation used when capturing an image"
+ msgstr "Bir resim çekerken kullanılan pozlandırma kipi"
+
++#: gst-libs/gst/tag/tags.c:146
+ msgid "capturing scene capture type"
+ msgstr "manzara çekme türünü yakalıyor"
+
++#: gst-libs/gst/tag/tags.c:147
+ msgid "The scene capture mode used when capturing an image"
+ msgstr "Bir resim çekerken kullanılan manzara yakalama kipi"
+
++#: gst-libs/gst/tag/tags.c:150
+ msgid "capturing gain adjustment"
+ msgstr "kazanç ayarı yakalanıyor"
+
++#: gst-libs/gst/tag/tags.c:151
+ msgid "The overall gain adjustment applied on an image"
+ msgstr "Bir resime uygulanan kapsamlı kazanç ayarı"
+
++#: gst-libs/gst/tag/tags.c:154
+ msgid "capturing white balance"
+ msgstr "beyaz dengesi yakalanıyor"
+
++#: gst-libs/gst/tag/tags.c:155
+ msgid "The white balance mode set when capturing an image"
+ msgstr "Bir resim çekerken kullanılan beyaz dengesi kipi"
+
++#: gst-libs/gst/tag/tags.c:158
+ msgid "capturing contrast"
+ msgstr "zıtlık yakalanıyor"
+
++#: gst-libs/gst/tag/tags.c:159
+ msgid "The direction of contrast processing applied when capturing an image"
+ msgstr "Bir resim çekerken uygulanan zıtlık işleminin yönü"
+
++#: gst-libs/gst/tag/tags.c:163
+ msgid "capturing saturation"
+ msgstr "canlılık yakalanıyor"
+
++#: gst-libs/gst/tag/tags.c:164
+ msgid "The direction of saturation processing applied when capturing an image"
+ msgstr "Bir resim çekerken uygulanan doygunluk işleminin yönü"
+
++#: gst-libs/gst/tag/tags.c:168
+ msgid "capturing sharpness"
+ msgstr "keskinlik yakalanıyor"
+
++#: gst-libs/gst/tag/tags.c:169
+ msgid "The direction of sharpness processing applied when capturing an image"
+ msgstr "Bir resim çekerken uygulanan keskinlik işleminin yönü"
+
++#: gst-libs/gst/tag/tags.c:173
+ msgid "capturing flash fired"
+ msgstr "flaşın yanışı yakalanıyor"
+
++#: gst-libs/gst/tag/tags.c:174
+ msgid "If the flash fired while capturing an image"
+ msgstr "Bir resim çekerken eğer flaş patlamışsa"
+
++#: gst-libs/gst/tag/tags.c:177
+ msgid "capturing flash mode"
+ msgstr "flaş kipi yakalanıyor"
+
++#: gst-libs/gst/tag/tags.c:178
+ msgid "The selected flash mode while capturing an image"
+ msgstr "Bir resim çekerken kullanılan flaş kipi"
+
++#: gst-libs/gst/tag/tags.c:181
+ msgid "capturing metering mode"
+ msgstr "metreleme kipi yakalanıyor"
+
-msgid ""
-"The metering mode used while determining exposure for capturing an image"
++#: gst-libs/gst/tag/tags.c:182
++msgid "The metering mode used while determining exposure for capturing an image"
+ msgstr "Bir resim çekmek için pozlandırmanın metreleme kipi"
+
++#: gst-libs/gst/tag/tags.c:186
+ msgid "capturing source"
+ msgstr "kaynak yakalanıyor"
+
++#: gst-libs/gst/tag/tags.c:187
+ msgid "The source or type of device used for the capture"
+ msgstr "Resim çekerken kullanılan aygıtın kaynak türü"
+
++#: gst-libs/gst/tag/tags.c:190
+ msgid "image horizontal ppi"
+ msgstr "yatay resim ppi değeri"
+
++#: gst-libs/gst/tag/tags.c:191
+ msgid "Media (image/video) intended horizontal pixel density in ppi"
+ msgstr "Ortam (resim/video)'nın ppi değerinden yatay piksel yoğunluğu"
+
++#: gst-libs/gst/tag/tags.c:194
+ msgid "image vertical ppi"
+ msgstr "resim dikey ppi değeri"
+
++#: gst-libs/gst/tag/tags.c:195
+ msgid "Media (image/video) intended vertical pixel density in ppi"
+ msgstr "Ortam (resim/video)'nun ppi değerinden dikey piksel yoğunluğu"
+
++#: gst-libs/gst/tag/tags.c:198
+ msgid "ID3v2 frame"
+ msgstr "ID3v2 çerçevesi"
+
++#: gst-libs/gst/tag/tags.c:198
+ msgid "unparsed id3v2 tag frame"
+ msgstr "ayrıştırılmamış id3v2 etiket çerçevesi"
+
++#: gst-libs/gst/tag/tags.c:202
+ msgid "musical-key"
+ msgstr "müzik anahtarı"
+
++#: gst-libs/gst/tag/tags.c:202
+ msgid "Initial key in which the sound starts"
+ msgstr "Sesin başladığı başlangıç anahtarı"
+
++#: tools/gst-device-monitor.c:260 tools/gst-play.c:1444
+ msgid "Print version information and exit"
+ msgstr "Sürüm bilgisini yaz ve çık"
+
-msgid ""
-"Don't exit after showing the initial device list, but wait for devices to "
-"added/removed."
-msgstr ""
-"Başlangıç aygıt listesini gösterdikten sonra kapanma, eklenecek/çıkartılacak "
-"aygıtları bekle."
++#: tools/gst-device-monitor.c:262
++msgid "Don't exit after showing the initial device list, but wait for devices to added/removed."
++msgstr "Başlangıç aygıt listesini gösterdikten sonra kapanma, eklenecek/çıkartılacak aygıtları bekle."
+
++#: tools/gst-play.c:310
+ #, c-format
+ msgid "Volume: %.0f%%"
+ msgstr "Ses: %%%.0f"
+
++#: tools/gst-play.c:349
+ msgid "Buffering..."
+ msgstr "Tampon belleğe alınıyor..."
+
++#: tools/gst-play.c:370
+ msgid "Clock lost, selecting a new one\n"
+ msgstr "Saat kayboldu, yenisi seçiliyor\n"
+
++#: tools/gst-play.c:400 tools/gst-play.c:446 tools/gst-play.c:883
++#: tools/gst-play.c:1347
+ msgid "Reached end of play list."
+ msgstr "Çalma listesinin sonuna ulaşıldı."
+
++#: tools/gst-play.c:613
+ msgid "Paused"
+ msgstr "Duraklatıldı"
+
++#: tools/gst-play.c:671
+ #, c-format
+ msgid "Now playing %s\n"
+ msgstr "Şimdi %s oynatılıyor\n"
+
++#: tools/gst-play.c:734
+ #, c-format
+ msgid "About to finish, preparing next title: %s"
+ msgstr "Bitmek üzere, sonraki başlık hazırlanıyor: %s"
+
++#: tools/gst-play.c:979
+ #, c-format
+ msgid "Playback rate: %.2f"
+ msgstr "Oynatma hızı: %.2f"
+
++#: tools/gst-play.c:983
+ #, c-format
+ msgid "Could not change playback rate to %.2f"
+ msgstr "Oynatma hızı %.2f'e değiştirilemiyor"
+
++#: tools/gst-play.c:1287
+ msgid "space"
+ msgstr "boşluk"
+
++#: tools/gst-play.c:1287
+ msgid "pause/unpause"
+ msgstr "duraklat/sürdür"
+
++#: tools/gst-play.c:1288
+ msgid "q or ESC"
+ msgstr "q veya ESC"
+
++#: tools/gst-play.c:1288
+ msgid "quit"
+ msgstr "çık"
+
++#: tools/gst-play.c:1289
+ msgid "> or n"
+ msgstr "> veya n"
+
++#: tools/gst-play.c:1289
+ msgid "play next"
+ msgstr "sonrakini oynat"
+
++#: tools/gst-play.c:1290
+ msgid "< or b"
+ msgstr "< veya b"
+
++#: tools/gst-play.c:1290
+ msgid "play previous"
+ msgstr "öncekini oynat"
+
++#: tools/gst-play.c:1291
+ msgid "seek forward"
+ msgstr "ileri sar"
+
++#: tools/gst-play.c:1292
+ msgid "seek backward"
+ msgstr "geri sar"
+
++#: tools/gst-play.c:1293
+ msgid "volume up"
+ msgstr "sesi yükselt"
+
++#: tools/gst-play.c:1294
+ msgid "volume down"
+ msgstr "sesi alçalt"
+
++#: tools/gst-play.c:1295
+ msgid "increase playback rate"
+ msgstr "oynatma hızını artır"
+
++#: tools/gst-play.c:1296
+ msgid "decrease playback rate"
+ msgstr "oynatma hızını azalt"
+
++#: tools/gst-play.c:1297
+ msgid "change playback direction"
+ msgstr "oynatma yönünü değiştir"
+
++#: tools/gst-play.c:1298
+ msgid "enable/disable trick modes"
+ msgstr "hile modularını etkinleştir/devre dışı bırak"
+
++#: tools/gst-play.c:1299
+ msgid "change audio track"
+ msgstr "ses izini değiştir"
+
++#: tools/gst-play.c:1300
+ msgid "change video track"
+ msgstr "video izini değiştir"
+
++#: tools/gst-play.c:1301
+ msgid "change subtitle track"
+ msgstr "alt yazı izini değiştir"
+
++#: tools/gst-play.c:1302
+ msgid "seek to beginning"
+ msgstr "başa sar"
+
++#: tools/gst-play.c:1303
+ msgid "show keyboard shortcuts"
+ msgstr "klavye kısayollarını göster"
+
++#: tools/gst-play.c:1306
+ msgid "Interactive mode - keyboard controls:"
+ msgstr "Etkileşimli kip - klavye denetimleri:"
+
++#: tools/gst-play.c:1439
+ msgid "Output status information and property notifications"
+ msgstr "Çıktı durum bilgisi ve özellik bildirimleri"
+
++#: tools/gst-play.c:1441
+ msgid "Control playback behaviour setting playbin 'flags' property"
+ msgstr "Oynatma davranışını kontrol etme ayarı playbin 'bayrakları' özelliği"
+
++#: tools/gst-play.c:1446
+ msgid "Video sink to use (default is autovideosink)"
+ msgstr "Kullanılacak video alıcı (varsayılan autovideosink)"
+
++#: tools/gst-play.c:1448
+ msgid "Audio sink to use (default is autoaudiosink)"
+ msgstr "Kullanılacak ses alıcı (varsayılan autoaudiosink)"
+
++#: tools/gst-play.c:1450
+ msgid "Enable gapless playback"
+ msgstr "Aralıksız çalmayı etkinleştir"
+
++#: tools/gst-play.c:1452
+ msgid "Shuffle playlist"
+ msgstr "Çalma listesini karıştır"
+
++#: tools/gst-play.c:1455
+ msgid "Disable interactive control via the keyboard"
+ msgstr "Klavye aracılığıyla etkileşimli denetimi devre dışı bırak"
+
++#: tools/gst-play.c:1457
+ msgid "Volume"
+ msgstr "Ses"
+
++#: tools/gst-play.c:1459
+ msgid "Playlist file containing input media files"
+ msgstr "Çalma listesi dosyası girdi ortam dosyaları içeriyor"
+
++#: tools/gst-play.c:1461
+ msgid "Do not print any output (apart from errors)"
+ msgstr "Herhangi bir çıktı yazdırma (hatalar dışında)"
+
++#: tools/gst-play.c:1463
+ msgid "Use playbin3 pipeline"
+ msgstr "playbin3 ardaşık düzenini kullan"
+
++#: tools/gst-play.c:1464
+ msgid "(default varies depending on 'USE_PLAYBIN' env variable)"
+ msgstr "(varsayılan 'USE_PLAYBIN' env değişkenine bağlı olarak değişir)"
+
++#: tools/gst-play.c:1468
++msgid "Keep showing the last frame on EOS until quit or playlist change command (gapless is ignored)"
++msgstr "Çıkma veya çalma listesi değiştirme komutuna kadar son kareyi EOS'da göstermeye devam et (boşluksuz göz ardı edilir)"
++
++#: tools/gst-play.c:1546
+ #, c-format
+ msgid "Usage: %s FILE1|URI1 [FILE2|URI2] [FILE3|URI3] ..."
+ msgstr "Kullanım: %s DOSYA1|URI1 [DOSYA2|URI2] [DOSYA3|URI3] ..."
+
++#: tools/gst-play.c:1550
+ msgid "You must provide at least one filename or URI to play."
+ msgstr "Çalmak için en azından bir dosya adı veya URI sağlamalısınız."
+
++#: tools/gst-play.c:1590
+ msgid "Press 'k' to see a list of keyboard shortcuts.\n"
+ msgstr "Klavye kısayollarının bir listesini görmek için 'k'ye basın.\n"
+
+ #~ msgid "Could not create \"decodebin3\" element."
+ #~ msgstr "\"decodebin3\" ögesi oluşturamıyor."
+
+ #~ msgid "Could not create \"urisourcebin\" element."
+ #~ msgstr "\"urisourcebin\" ögesi oluşturulamıyor."
+
+ #~ msgid "Internal data stream error."
+ #~ msgstr "İç veri akışı hatası."
+
+ #~ msgid "A %s plugin is required to play this stream, but not installed."
+ #~ msgstr "Bir %s eklenti bu akışı çalmak için gerekli, fakat kurulu değil."
+
+ #~ msgid "Uncompressed %s YUV %s"
+ #~ msgstr "Sıkıştırılmamış %s YUV %s"
+
+ #~ msgid "Master"
+ #~ msgstr "Ana"
+
+ #~ msgid "Bass"
+ #~ msgstr "Bas"
+
+ #~ msgid "Treble"
+ #~ msgstr "Tiz"
+
+ #~ msgid "PCM"
+ #~ msgstr "PCM"
+
+ #~ msgid "Synth"
+ #~ msgstr "Synthizer"
+
+ #~ msgid "Line-in"
+ #~ msgstr "Hat-girişi"
+
+ #~ msgid "CD"
+ #~ msgstr "CD"
+
+ #~ msgid "Microphone"
+ #~ msgstr "Mikrofon"
+
+ #~ msgid "PC Speaker"
+ #~ msgstr "PC Hoparlörü"
+
+ #~ msgid "Playback"
+ #~ msgstr "Çalma"
+
+ #~ msgid "Capture"
+ #~ msgstr "Yakala"
+
+ #~ msgid "Could not open vfs file \"%s\" for writing: %s."
+ #~ msgstr "Vfs dosyası \"%s\" yazmak için açılamıyor: %s."
+
+ #~ msgid "No filename given"
+ #~ msgstr "Dosya adı verilmedi"
+
+ #~ msgid "Could not close vfs file \"%s\"."
+ #~ msgstr "Vfs dosyası kapatılamıyor \"%s\"."
+
+ #~ msgid "Error while writing to file \"%s\"."
+ #~ msgstr "Dosyaya yazmada hata \"%s\"."
+
+ #~ msgid "Invalid subtitle URI \"%s\", subtitles disabled."
+ #~ msgstr "Geçersiz altyazı adresi \"%s\", altyazılar kaldırıldı."
+
+ #~ msgid "RTSP streams cannot be played yet."
+ #~ msgstr "RTSP akışları henüz çalınamıyor."
+
-#~ msgid ""
-#~ "Only a subtitle stream was detected. Either you are loading a subtitle "
-#~ "file or some other type of text file, or the media file was not "
-#~ "recognized."
-#~ msgstr ""
-#~ "Bir altyazı dosyası algılandı. Siz ya bir altyazı veya başka bir metin "
-#~ "dosyası yüklemek üzeresiniz, veya çokluortam dosyası tanınamadı."
-
-#~ msgid ""
-#~ "You do not have a decoder installed to handle this file. You might need "
-#~ "to install the necessary plugins."
-#~ msgstr ""
-#~ "Bu yüklü dosyası çözümleyecek bir kod çözücüye sahip değilsiniz. Gerekli "
-#~ "eklentileri kurmak zorundasınız."
++#~ msgid "Only a subtitle stream was detected. Either you are loading a subtitle file or some other type of text file, or the media file was not recognized."
++#~ msgstr "Bir altyazı dosyası algılandı. Siz ya bir altyazı veya başka bir metin dosyası yüklemek üzeresiniz, veya çokluortam dosyası tanınamadı."
++
++#~ msgid "You do not have a decoder installed to handle this file. You might need to install the necessary plugins."
++#~ msgstr "Bu yüklü dosyası çözümleyecek bir kod çözücüye sahip değilsiniz. Gerekli eklentileri kurmak zorundasınız."
+
+ #~ msgid "This is not a media file"
+ #~ msgstr "Bu bir çokluortam dosyası değil."
+
+ #~ msgid "A subtitle stream was detected, but no video stream."
+ #~ msgstr "Bir altyazı akışı algılanda, ancak vidyo akışı bulunamadı."
+
+ #~ msgid "Both autovideosink and xvimagesink elements are missing."
+ #~ msgstr "Autovideosink ve xvimagesink öğelerininin ikisi de kayıp."
+
+ #~ msgid "Both autoaudiosink and alsasink elements are missing."
+ #~ msgstr "Autoaudiosink ve alsasink öğelerinin ikisi de kayıp."
+
+ #~ msgid "Error while sending gdp header data to \"%s:%d\"."
+ #~ msgstr "Bir gdp başlık verisini şuraya gönderirken hata \"%s:%d\"."
+
+ #~ msgid "Error while sending gdp payload data to \"%s:%d\"."
+ #~ msgstr "Bir gdp verisini şuraya gönderirken hata \"%s:%d\"."
+
+ #~ msgid "Connection to %s:%d refused."
+ #~ msgstr "Şuraya bağlantı %s:%d reddedildi."
+
+ #~ msgid "Uncompressed planar YVU 4:2:0"
+ #~ msgstr "Sıkıştırılmamış planar YVU 4:2:0"
+
+ #~ msgid "Uncompressed packed YUV 4:1:0"
+ #~ msgstr "Sıkıştırılmamış paket YUV 4:1:0"
+
+ #~ msgid "Uncompressed packed YVU 4:1:0"
+ #~ msgstr "Sıkıştırılmamış paket YVU 4:1:0"
+
+ #~ msgid "Uncompressed packed YUV 4:1:1"
+ #~ msgstr "Sıkıştırılmamış paket YUV 4:1:1"
+
+ #~ msgid "Uncompressed packed YUV 4:4:4"
+ #~ msgstr "Sıkıştırılmamış paket YUV 4:4:4"
+
+ #~ msgid "Uncompressed planar YUV 4:2:2"
+ #~ msgstr "Sıkıştırılmamış planar YUV 4:2:2"
+
+ #~ msgid "Uncompressed planar YUV 4:1:1"
+ #~ msgstr "Sıkıştırılmamış planar YUV 4:1:1"
+
+ #~ msgid "Uncompressed black and white Y-plane"
+ #~ msgstr "Sıkıştırılmamış siyah ve beyaz Y"
+
+ #~ msgid "Raw PCM audio"
+ #~ msgstr "Ham PCM ses"
+
+ #~ msgid "Raw %d-bit floating-point audio"
+ #~ msgstr "Ham %d-bit yüzer-noktalı ses"
+
+ #~ msgid "Raw floating-point audio"
+ #~ msgstr "Ham yüzer-noktalı ses"
+
+ #~ msgid "No device specified."
+ #~ msgstr "Aygıt belirtilmedi."
+
+ #~ msgid "Device \"%s\" does not exist."
+ #~ msgstr "Aygıt \"%s\" bulunamadı."
+
+ #~ msgid "Device \"%s\" is already being used."
+ #~ msgstr "Aygıt \"%s\" zaten kullanımda."
+
+ #~ msgid "Could not open device \"%s\" for reading and writing."
+ #~ msgstr "Şu aygıtı\"%s\" for okuma veya yazma için açamıyor."
+
+ #~ msgid "Can't display both text subtitles and subpictures."
+ #~ msgstr "Metin altyazılarını altgörüntülerini oynatamıyor."
+
+ #~ msgid "No Temp directory specified."
+ #~ msgstr "Geçici Temp dizini belirtilmedi."
+
+ #~ msgid "Could not create temp file \"%s\"."
+ #~ msgstr "Geçici dosya oluşturulamıyor \"%s\"."
+
+ #~ msgid "Could not open file \"%s\" for reading."
+ #~ msgstr "Dosyayı \"%s\" okumak için açamıyor."
+
+ #~ msgid "Internal data flow error."
+ #~ msgstr "İç veri akışı hatası."
+
+ #~ msgid "Could not create \"queue2\" element."
+ #~ msgstr "Bir \"queue2\" öğesi oluşturamıyor."
+
+ #~ msgid "Could not create \"typefind\" element."
+ #~ msgstr "Bir \"typefind\" öğesi oluşturamıyor."
+
+ #~ msgid "No file name specified."
+ #~ msgstr "Dosya adı belirtilmedi."
--- /dev/null
-"Project-Id-Version: gst-plugins-base 1.15.1\n"
+ # Ukrainian translation to gst-plugins.
+ # Copyright (C) 2004 Free Software Foundation, Inc.
+ # This file is distributed under the same license as the gst-plugins-base package.
+ #
+ # Maxim V. Dziumanenko <dziumanenko@gmail.com>, 2004-2007.
+ # Yuri Chornoivan <yurchor@ukr.net>, 2010, 2011, 2012, 2013, 2014, 2015, 2016, 2019.
+ msgid ""
+ msgstr ""
-"POT-Creation-Date: 2019-02-26 11:43+0000\n"
-"PO-Revision-Date: 2019-01-24 22:01+0200\n"
++"Project-Id-Version: gst-plugins-base 1.16.0\n"
+ "Report-Msgid-Bugs-To: http://bugzilla.gnome.org/\n"
-"Language-Team: Ukrainian <translation-team-uk@lists.sourceforge.net>\n"
++"POT-Creation-Date: 2019-04-19 00:20+0100\n"
++"PO-Revision-Date: 2019-09-03 10:25+0300\n"
+ "Last-Translator: Yuri Chornoivan <yurchor@ukr.net>\n"
-"X-Generator: Lokalize 2.0\n"
++"Language-Team: Ukrainian <trans-uk@lists.fedoraproject.org>\n"
+ "Language: uk\n"
+ "MIME-Version: 1.0\n"
+ "Content-Type: text/plain; charset=UTF-8\n"
+ "Content-Transfer-Encoding: 8bit\n"
+ "X-Bugs: Report translation errors to the Language-Team address.\n"
+ "Plural-Forms: nplurals=1; plural=0;\n"
-msgid ""
-"Could not open audio device for playback. Device is being used by another "
-"application."
-msgstr ""
-"Не вдалося відкрити пристрій для відтворення. Пристрій використовується "
-"сторонньою програмою."
++"X-Generator: Lokalize 19.04.0\n"
+
++#: ext/alsa/gstalsasink.c:579
+ msgid "Could not open device for playback in mono mode."
+ msgstr "Не вдалося відкрити пристрій відтворення у режимі моно."
+
++#: ext/alsa/gstalsasink.c:581
+ msgid "Could not open device for playback in stereo mode."
+ msgstr "Не вдалося відкрити пристрій відтворення у режимі стерео."
+
++#: ext/alsa/gstalsasink.c:585
+ #, c-format
+ msgid "Could not open device for playback in %d-channel mode."
+ msgstr "Не вдалося відкрити пристрій відтворення у %d канальному режимі."
+
-msgstr ""
-"Помилка під час спроби вивести дані на звуковий пристрій. Цей пристрій було "
-"від’єднано."
++#: ext/alsa/gstalsasink.c:858
++msgid "Could not open audio device for playback. Device is being used by another application."
++msgstr "Не вдалося відкрити пристрій для відтворення. Пристрій використовується сторонньою програмою."
+
++#: ext/alsa/gstalsasink.c:863
+ msgid "Could not open audio device for playback."
+ msgstr "Не вдалося відкрити пристрій для відтворення."
+
++#: ext/alsa/gstalsasink.c:1076
+ msgid "Error outputting to audio device. The device has been disconnected."
-msgid ""
-"Could not open audio device for recording. Device is being used by another "
-"application."
-msgstr ""
-"Не вдалося відкрити пристрій для запису. Пристрій використовується "
-"сторонньою програмою."
++msgstr "Помилка під час спроби вивести дані на звуковий пристрій. Цей пристрій було від’єднано."
+
++#: ext/alsa/gstalsasrc.c:444
+ msgid "Could not open device for recording in mono mode."
+ msgstr "Не вдалося відкрити пристрій запису у режимі моно."
+
++#: ext/alsa/gstalsasrc.c:446
+ msgid "Could not open device for recording in stereo mode."
+ msgstr "Не вдалося відкрити пристрій запису у режимі стерео."
+
++#: ext/alsa/gstalsasrc.c:450
+ #, c-format
+ msgid "Could not open device for recording in %d-channel mode"
+ msgstr "Не вдалося відкрити пристрій запису у %d-канальному режимі."
+
-msgstr ""
-"Помилка під час записати дані зі звукового пристрою. Цей пристрій було "
-"від’єднано."
++#: ext/alsa/gstalsasrc.c:741
++msgid "Could not open audio device for recording. Device is being used by another application."
++msgstr "Не вдалося відкрити пристрій для запису. Пристрій використовується сторонньою програмою."
+
++#: ext/alsa/gstalsasrc.c:746
+ msgid "Could not open audio device for recording."
+ msgstr "Не вдалося відкрити пристрій для запису."
+
++#: ext/alsa/gstalsasrc.c:992
+ msgid "Error recording from audio device. The device has been disconnected."
-msgstr ""
-"Відсутній елемент «%s». Перевірте, чи встановлено GStreamer належним чином."
++msgstr "Помилка під час записати дані зі звукового пристрою. Цей пристрій було від’єднано."
+
++#: ext/cdparanoia/gstcdparanoiasrc.c:277 ext/cdparanoia/gstcdparanoiasrc.c:283
+ msgid "Could not open CD device for reading."
+ msgstr "Не вдалося відкрити пристрій компакт-дисків для читання."
+
++#: ext/cdparanoia/gstcdparanoiasrc.c:409
+ msgid "Could not seek CD."
+ msgstr "Не вдалося встановити позицію на CD."
+
++#: ext/cdparanoia/gstcdparanoiasrc.c:417
+ msgid "Could not read CD."
+ msgstr "Не вдалося прочитати CD."
+
++#: ext/gl/gstgltestsrc.c:491
+ msgid "failed to draw pattern"
+ msgstr "не вдалося намалювати візерунок"
+
++#: ext/gl/gstgltestsrc.c:492
+ msgid "A GL error occurred"
+ msgstr "Сталася помилка GL"
+
++#: ext/gl/gstgltestsrc.c:498
+ msgid "format wasn't negotiated before get function"
+ msgstr "формат не було узгоджено до функції отримання"
+
++#: gst/encoding/gstencodebin.c:1636 gst/playback/gstplaybin2.c:3399
++#: gst/playback/gstplaysink.c:1522 gst/playback/gstplaysink.c:1535
++#: gst/playback/gstplaysink.c:1872 gst/playback/gstplaysink.c:1904
++#: gst/playback/gstplaysink.c:2488 gst/playback/gstplaysink.c:2537
++#: gst/playback/gstplaysink.c:2552 gst/playback/gstplaysink.c:2577
++#: gst/playback/gstplaysink.c:2609 gst/playback/gstplaysink.c:2757
++#: gst/playback/gstplaysink.c:2788 gst/playback/gstplaysink.c:3166
++#: gst/playback/gstplaysink.c:3175 gst/playback/gstplaysink.c:3184
++#: gst/playback/gstplaysink.c:3193 gst/playback/gstplaysink.c:3606
++#: gst/playback/gstplaysink.c:4526 gst/playback/gstplaysinkconvertbin.c:97
++#: gst/playback/gstplaysinkconvertbin.c:117
++#: gst/playback/gsturidecodebin.c:1472 gst/playback/gsturisourcebin.c:1597
+ #, c-format
+ msgid "Missing element '%s' - check your GStreamer installation."
-msgstr ""
-"Нетиповий елемент приймача текстових даних непридатний до використання."
++msgstr "Відсутній елемент «%s». Перевірте, чи встановлено GStreamer належним чином."
+
++#: gst/playback/gstdecodebin2.c:1883 gst/playback/gstparsebin.c:1553
+ msgid "Could not determine type of stream"
+ msgstr "Не вдалося визначити тип даних потоку"
+
++#: gst/playback/gstdecodebin2.c:2861 gst/playback/gstparsebin.c:2385
+ msgid "This appears to be a text file"
+ msgstr "Файл схожий на текстовий файл"
+
++#: gst/playback/gstplaybin2.c:5452
+ msgid "Could not create \"uridecodebin\" element."
+ msgstr "Не вдалося створити елемент \"uridecodebin\"."
+
++#: gst/playback/gstplaybin3.c:4558
+ msgid "Could not create \"uridecodebin3\" element."
+ msgstr "Не вдалося створити елемент \"uridecodebin3\"."
+
++#: gst/playback/gstplaysink.c:2003
+ #, c-format
+ msgid "Both autovideosink and %s elements are missing."
+ msgstr "Не вказано елементів autovideosink і %s."
+
++#: gst/playback/gstplaysink.c:2007
+ msgid "The autovideosink element is missing."
+ msgstr "Не вказано елемента autovideosink."
+
++#: gst/playback/gstplaysink.c:2012
+ #, c-format
+ msgid "Configured videosink %s is not working."
+ msgstr "Налаштований приймач відеоданих, %s, не працює."
+
++#: gst/playback/gstplaysink.c:2016
+ #, c-format
+ msgid "Both autovideosink and %s elements are not working."
+ msgstr "Непридатними є обидва елементи: autovideosink і %s."
+
++#: gst/playback/gstplaysink.c:2020
+ msgid "The autovideosink element is not working."
+ msgstr "Елемент autovideosink є непридатним."
+
++#: gst/playback/gstplaysink.c:2525
+ msgid "Custom text sink element is not usable."
-msgstr ""
-"Фокальний коефіцієнт (число діафрагми), використаний під час створення "
-"зображення"
++msgstr "Нетиповий елемент приймача текстових даних непридатний до використання."
+
++#: gst/playback/gstplaysink.c:2903
+ msgid "No volume control found"
+ msgstr "Не знайдено регулятора гучності"
+
++#: gst/playback/gstplaysink.c:2933
+ #, c-format
+ msgid "Both autoaudiosink and %s elements are missing."
+ msgstr "Не вказано обох елементів: autoaudiosink і %s."
+
++#: gst/playback/gstplaysink.c:2937
+ msgid "The autoaudiosink element is missing."
+ msgstr "Не вказано елемента autoaudiosink."
+
++#: gst/playback/gstplaysink.c:2942
+ #, c-format
+ msgid "Configured audiosink %s is not working."
+ msgstr "Налаштований приймач звукових даних, %s, не працює."
+
++#: gst/playback/gstplaysink.c:2946
+ #, c-format
+ msgid "Both autoaudiosink and %s elements are not working."
+ msgstr "Елементи autoaudiosink і %s є непридатними."
+
++#: gst/playback/gstplaysink.c:2950
+ msgid "The autoaudiosink element is not working."
+ msgstr "Елемент autoaudiosink є непридатним."
+
++#: gst/playback/gstplaysink.c:3269 gst/playback/gstplaysink.c:3274
+ msgid "Can't play a text file without video or visualizations."
+ msgstr "Відтворення текстового файла без відео або візуалізації неможливе."
+
++#: gst/playback/gsturidecodebin.c:921
+ #, c-format
+ msgid "No decoder available for type '%s'."
+ msgstr "Для типу '%s' немає доступних декодерів."
+
++#: gst/playback/gsturidecodebin.c:1378 gst/playback/gsturisourcebin.c:1491
+ msgid "No URI specified to play from."
+ msgstr "Не вказано URI для відтворення."
+
++#: gst/playback/gsturidecodebin.c:1384 gst/playback/gsturisourcebin.c:1497
+ #, c-format
+ msgid "Invalid URI \"%s\"."
+ msgstr "Неправильний URI «%s»."
+
++#: gst/playback/gsturidecodebin.c:1391 gst/playback/gsturisourcebin.c:1504
+ msgid "This stream type cannot be played yet."
+ msgstr "Цей потік поки-що не може бути відтворений."
+
++#: gst/playback/gsturidecodebin.c:1409 gst/playback/gsturisourcebin.c:1522
+ #, c-format
+ msgid "No URI handler implemented for \"%s\"."
+ msgstr "Не реалізовано обробник URI для «%s»."
+
++#: gst/playback/gsturidecodebin.c:2289 gst/playback/gsturisourcebin.c:2228
+ msgid "Source element is invalid."
+ msgstr "Неправильний вхідний елемент."
+
++#: gst/tcp/gsttcpclientsink.c:216
+ #, c-format
+ msgid "Error while sending data to \"%s:%d\"."
+ msgstr "Помилка при надсиланні даних до \"%s:%d\"."
+
++#: gst-libs/gst/audio/gstaudiobasesrc.c:842
+ msgid "Can't record audio fast enough"
+ msgstr "Не вдалося запис аудіоданих з потрібною швидкістю"
+
++#: gst-libs/gst/audio/gstaudiocdsrc.c:1625
+ msgid "This CD has no audio tracks"
+ msgstr "На цьому компакт-диску немає звукових доріжок"
+
++#: gst-libs/gst/pbutils/descriptions.c:89
+ msgid "ID3 tag"
+ msgstr "Мітка ID3"
+
++#: gst-libs/gst/pbutils/descriptions.c:90
++#: gst-libs/gst/pbutils/descriptions.c:91
+ msgid "APE tag"
+ msgstr "Мітка APE"
+
++#: gst-libs/gst/pbutils/descriptions.c:92
+ msgid "ICY internet radio"
+ msgstr "Інтернет-радіо ICY"
+
++#: gst-libs/gst/pbutils/descriptions.c:127
+ msgid "Apple Lossless Audio (ALAC)"
+ msgstr "Аудіо без втрат від Apple (ALAC)"
+
++#: gst-libs/gst/pbutils/descriptions.c:143
+ msgid "Free Lossless Audio Codec (FLAC)"
+ msgstr "Вільний алгоритм аудіо без втрат (FLAC)"
+
++#: gst-libs/gst/pbutils/descriptions.c:173
++#: gst-libs/gst/pbutils/descriptions.c:174
+ msgid "Lossless True Audio (TTA)"
+ msgstr "Lossless True Audio (TTA)"
+
++#: gst-libs/gst/pbutils/descriptions.c:182
+ msgid "Windows Media Speech"
+ msgstr "Windows Media Speech"
+
++#: gst-libs/gst/pbutils/descriptions.c:199
+ msgid "CYUV Lossless"
+ msgstr "CYUV без втрат"
+
++#: gst-libs/gst/pbutils/descriptions.c:203
+ msgid "FFMpeg v1"
+ msgstr "FFMpeg v1"
+
++#: gst-libs/gst/pbutils/descriptions.c:217
+ msgid "Lossless MSZH"
+ msgstr "MSZH без втрат"
+
++#: gst-libs/gst/pbutils/descriptions.c:228
+ msgid "Run-length encoding"
+ msgstr "Кодування Run-length"
+
++#: gst-libs/gst/pbutils/descriptions.c:274
+ msgid "Timed Text"
+ msgstr "Timed Text"
+
++#: gst-libs/gst/pbutils/descriptions.c:278
+ msgid "Subtitle"
+ msgstr "Субтитри"
+
++#: gst-libs/gst/pbutils/descriptions.c:279
+ msgid "MPL2 subtitle format"
+ msgstr "Формат субтитрів MPL2"
+
++#: gst-libs/gst/pbutils/descriptions.c:280
+ msgid "DKS subtitle format"
+ msgstr "Формат субтитрів DKS"
+
++#: gst-libs/gst/pbutils/descriptions.c:281
+ msgid "QTtext subtitle format"
+ msgstr "Формат субтитрів QTtext"
+
++#: gst-libs/gst/pbutils/descriptions.c:282
+ msgid "Sami subtitle format"
+ msgstr "Формат субтитрів Sami"
+
++#: gst-libs/gst/pbutils/descriptions.c:283
+ msgid "TMPlayer subtitle format"
+ msgstr "Формат субтитрів TMPlayer"
+
++#: gst-libs/gst/pbutils/descriptions.c:287
+ msgid "CEA 608 Closed Caption"
+ msgstr "Субтитри CEA 608"
+
++#: gst-libs/gst/pbutils/descriptions.c:288
+ msgid "CEA 708 Closed Caption"
+ msgstr "Субтитри CEA 708"
+
++#: gst-libs/gst/pbutils/descriptions.c:289
+ msgid "Kate subtitle format"
+ msgstr "Формат субтитрів Kate"
+
++#: gst-libs/gst/pbutils/descriptions.c:290
+ msgid "WebVTT subtitle format"
+ msgstr "Формат субтитрів WebVTT"
+
++#: gst-libs/gst/pbutils/descriptions.c:455
++#: gst-libs/gst/pbutils/descriptions.c:458
++#: gst-libs/gst/pbutils/descriptions.c:508
+ msgid "Uncompressed video"
+ msgstr "Нестиснене відео"
+
++#: gst-libs/gst/pbutils/descriptions.c:463
+ msgid "Uncompressed gray"
+ msgstr "Нестиснене сіре зображення"
+
++#: gst-libs/gst/pbutils/descriptions.c:486
+ #, c-format
+ msgid "Uncompressed packed YUV %s"
+ msgstr "Нестиснений пакетний YUV %s"
+
++#: gst-libs/gst/pbutils/descriptions.c:488
+ #, c-format
+ msgid "Uncompressed semi-planar YUV %s"
+ msgstr "Нестиснений напівпланарний YUV %s"
+
++#: gst-libs/gst/pbutils/descriptions.c:490
+ #, c-format
+ msgid "Uncompressed planar YUV %s"
+ msgstr "Нестиснений планарний YUV %s"
+
++#: gst-libs/gst/pbutils/descriptions.c:501
+ #, c-format
+ msgid "Uncompressed palettized %d-bit %s"
+ msgstr "Нестиснений пакетний %d-бітовий %s"
+
++#: gst-libs/gst/pbutils/descriptions.c:504
+ #, c-format
+ msgid "Uncompressed %d-bit %s"
+ msgstr "Нестиснений %d-бітовий %s"
+
++#: gst-libs/gst/pbutils/descriptions.c:586
+ #, c-format
+ msgid "DivX MPEG-4 Version %d"
+ msgstr "DivX MPEG-4 версії %d"
+
++#: gst-libs/gst/pbutils/descriptions.c:834
+ msgid "Uncompressed audio"
+ msgstr "Нестиснені звукові дані"
+
++#: gst-libs/gst/pbutils/descriptions.c:840
+ #, c-format
+ msgid "Raw %d-bit %s audio"
+ msgstr "Необроблені %d-бітові звукові дані %s"
+
++#: gst-libs/gst/pbutils/descriptions.c:942
+ msgid "Audio CD source"
+ msgstr "Джерело аудіо-CD"
+
++#: gst-libs/gst/pbutils/descriptions.c:945
+ msgid "DVD source"
+ msgstr "Джерело DVD"
+
++#: gst-libs/gst/pbutils/descriptions.c:948
+ msgid "Real Time Streaming Protocol (RTSP) source"
+ msgstr "Джерело потокового протоколу реального часу (RTSP)"
+
++#: gst-libs/gst/pbutils/descriptions.c:952
+ msgid "Microsoft Media Server (MMS) protocol source"
+ msgstr "Джерело протоколу Microsoft Media Server (MMS)"
+
++#: gst-libs/gst/pbutils/descriptions.c:960
+ #, c-format
+ msgid "%s protocol source"
+ msgstr "Джерело протоколу %s"
+
++#: gst-libs/gst/pbutils/descriptions.c:1034
+ #, c-format
+ msgid "%s video RTP depayloader"
+ msgstr "RTP модуль розпаковування відеоданих %s"
+
++#: gst-libs/gst/pbutils/descriptions.c:1036
+ #, c-format
+ msgid "%s audio RTP depayloader"
+ msgstr "RTP модуль розпаковування аудіоданих %s"
+
++#: gst-libs/gst/pbutils/descriptions.c:1038
+ #, c-format
+ msgid "%s RTP depayloader"
+ msgstr "RTP модуль розпаковування даних %s"
+
++#: gst-libs/gst/pbutils/descriptions.c:1045
+ #, c-format
+ msgid "%s demuxer"
+ msgstr "демультиплексор %s"
+
++#: gst-libs/gst/pbutils/descriptions.c:1047
+ #, c-format
+ msgid "%s decoder"
+ msgstr "декодер %s"
+
++#: gst-libs/gst/pbutils/descriptions.c:1086
+ #, c-format
+ msgid "%s video RTP payloader"
+ msgstr "RTP модуль запаковування відеоданих %s"
+
++#: gst-libs/gst/pbutils/descriptions.c:1088
+ #, c-format
+ msgid "%s audio RTP payloader"
+ msgstr "RTP модуль запаковування аудіоданих %s"
+
++#: gst-libs/gst/pbutils/descriptions.c:1090
+ #, c-format
+ msgid "%s RTP payloader"
+ msgstr "RTP модуль запаковування даних %s"
+
++#: gst-libs/gst/pbutils/descriptions.c:1097
+ #, c-format
+ msgid "%s muxer"
+ msgstr "мультиплексор %s"
+
++#: gst-libs/gst/pbutils/descriptions.c:1099
+ #, c-format
+ msgid "%s encoder"
+ msgstr "кодер %s"
+
++#: gst-libs/gst/pbutils/descriptions.c:1133
+ #, c-format
+ msgid "GStreamer element %s"
+ msgstr "Елемент GStreamer %s"
+
++#: gst-libs/gst/pbutils/missing-plugins.c:558
+ msgid "Unknown source element"
+ msgstr "Невідомий елемент джерела"
+
++#: gst-libs/gst/pbutils/missing-plugins.c:561
+ msgid "Unknown sink element"
+ msgstr "Невідомий елемент приймача"
+
++#: gst-libs/gst/pbutils/missing-plugins.c:564
+ msgid "Unknown element"
+ msgstr "Невідомий елемент"
+
++#: gst-libs/gst/pbutils/missing-plugins.c:567
+ msgid "Unknown decoder element"
+ msgstr "Невідомий елемент декодера"
+
++#: gst-libs/gst/pbutils/missing-plugins.c:570
+ msgid "Unknown encoder element"
+ msgstr "Невідомий елемент кодера"
+
++#: gst-libs/gst/pbutils/missing-plugins.c:575
+ msgid "Plugin or element of unknown type"
+ msgstr "Модуль або елемент невідомого типу"
+
++#: gst-libs/gst/tag/gsttagdemux.c:1243
+ msgid "Failed to read tag: not enough data"
+ msgstr "Не вдалося прочитати мітку: недостатньо даних"
+
++#: gst-libs/gst/tag/tags.c:77
+ msgid "track ID"
+ msgstr "ідентифікатор доріжки"
+
++#: gst-libs/gst/tag/tags.c:77
+ msgid "MusicBrainz track ID"
+ msgstr "Ідентифікатор доріжки у MusicBrainz"
+
++#: gst-libs/gst/tag/tags.c:79
+ msgid "artist ID"
+ msgstr "ідентифікатор виконавця"
+
++#: gst-libs/gst/tag/tags.c:79
+ msgid "MusicBrainz artist ID"
+ msgstr "Ідентифікатор виконавця у MusicBrainz"
+
++#: gst-libs/gst/tag/tags.c:81
+ msgid "album ID"
+ msgstr "ідентифікатор альбому"
+
++#: gst-libs/gst/tag/tags.c:81
+ msgid "MusicBrainz album ID"
+ msgstr "Ідентифікатор альбому у MusicBrainz"
+
++#: gst-libs/gst/tag/tags.c:84
+ msgid "album artist ID"
+ msgstr "ідентифікатор виконавця альбому"
+
++#: gst-libs/gst/tag/tags.c:84
+ msgid "MusicBrainz album artist ID"
+ msgstr "Ідентифікатор виконавця альбому у MusicBrainz"
+
++#: gst-libs/gst/tag/tags.c:86
+ msgid "track TRM ID"
+ msgstr "ідентифікатор доріжки TRM"
+
++#: gst-libs/gst/tag/tags.c:86
+ msgid "MusicBrainz TRM ID"
+ msgstr "Ідентифікатор доріжки TRM у MusicBrainz"
+
++#: gst-libs/gst/tag/tags.c:108
+ msgid "capturing shutter speed"
+ msgstr "швидкість затвору"
+
++#: gst-libs/gst/tag/tags.c:109
+ msgid "Shutter speed used when capturing an image, in seconds"
+ msgstr "Швидкість затвору (діафрагми) під час створення зображення, у секундах"
+
++#: gst-libs/gst/tag/tags.c:112
+ msgid "capturing focal ratio"
+ msgstr "фокальний коефіцієнт"
+
++#: gst-libs/gst/tag/tags.c:113
+ msgid "Focal ratio (f-number) used when capturing the image"
-msgstr ""
-"Фокальна відстань об’єктива, використаного при створенні зображення, у мм"
++msgstr "Фокальний коефіцієнт (число діафрагми), використаний під час створення зображення"
+
++#: gst-libs/gst/tag/tags.c:116
+ msgid "capturing focal length"
+ msgstr "фокальна відстань"
+
++#: gst-libs/gst/tag/tags.c:117
+ msgid "Focal length of the lens used capturing the image, in mm"
-msgid ""
-"35 mm equivalent focal length of the lens used capturing the image, in mm"
-msgstr ""
-"35-мм еквівалент фокальної відстані об’єктива, який буде використано для "
-"створення зображення, у мм"
++msgstr "Фокальна відстань об’єктива, використаного при створенні зображення, у мм"
+
++#: gst-libs/gst/tag/tags.c:121
+ msgid "capturing 35 mm equivalent focal length"
+ msgstr "захоплюємо у 35-мм еквіваленті фокальної відстані"
+
-msgstr ""
-"Коефіцієнт цифрового збільшення, використаний під час створення зображення"
++#: gst-libs/gst/tag/tags.c:122
++msgid "35 mm equivalent focal length of the lens used capturing the image, in mm"
++msgstr "35-мм еквівалент фокальної відстані об’єктива, який буде використано для створення зображення, у мм"
+
++#: gst-libs/gst/tag/tags.c:126
+ msgid "capturing digital zoom ratio"
+ msgstr "коефіцієнт цифрового збільшення"
+
++#: gst-libs/gst/tag/tags.c:127
+ msgid "Digital zoom ratio used when capturing an image"
-msgstr ""
-"Режим балансування білого кольору, використаний під час створення зображення"
++msgstr "Коефіцієнт цифрового збільшення, використаний під час створення зображення"
+
++#: gst-libs/gst/tag/tags.c:130
+ msgid "capturing iso speed"
+ msgstr "світлочутливість ISO"
+
++#: gst-libs/gst/tag/tags.c:131
+ msgid "The ISO speed used when capturing an image"
+ msgstr "Світлочутливість ISO, використана під час створення зображення"
+
++#: gst-libs/gst/tag/tags.c:134
+ msgid "capturing exposure program"
+ msgstr "програма експозиції"
+
++#: gst-libs/gst/tag/tags.c:135
+ msgid "The exposure program used when capturing an image"
+ msgstr "Програма експозиції, використана під час створення зображення"
+
++#: gst-libs/gst/tag/tags.c:138
+ msgid "capturing exposure mode"
+ msgstr "режим експозиції"
+
++#: gst-libs/gst/tag/tags.c:139
+ msgid "The exposure mode used when capturing an image"
+ msgstr "Режим експозиції, використаний під час створення зображення"
+
++#: gst-libs/gst/tag/tags.c:142
+ msgid "capturing exposure compensation"
+ msgstr "компенсація експозиції під час захоплення"
+
++#: gst-libs/gst/tag/tags.c:143
+ msgid "The exposure compensation used when capturing an image"
+ msgstr "Компенсація часу експозиції, потрібного для захоплення зображення"
+
++#: gst-libs/gst/tag/tags.c:146
+ msgid "capturing scene capture type"
+ msgstr "тип сцени"
+
++#: gst-libs/gst/tag/tags.c:147
+ msgid "The scene capture mode used when capturing an image"
+ msgstr "Режим типу сцени, використаний під час створення зображення"
+
++#: gst-libs/gst/tag/tags.c:150
+ msgid "capturing gain adjustment"
+ msgstr "коригування підсилання"
+
++#: gst-libs/gst/tag/tags.c:151
+ msgid "The overall gain adjustment applied on an image"
+ msgstr "Загальне коригування підсилення, застосоване до зображення"
+
++#: gst-libs/gst/tag/tags.c:154
+ msgid "capturing white balance"
+ msgstr "баланс білого"
+
++#: gst-libs/gst/tag/tags.c:155
+ msgid "The white balance mode set when capturing an image"
-msgid ""
-"The metering mode used while determining exposure for capturing an image"
-msgstr ""
-"Режим вимірювання, застосований для визначення експозиції створеного "
-"зображення"
++msgstr "Режим балансування білого кольору, використаний під час створення зображення"
+
++#: gst-libs/gst/tag/tags.c:158
+ msgid "capturing contrast"
+ msgstr "контрастність"
+
++#: gst-libs/gst/tag/tags.c:159
+ msgid "The direction of contrast processing applied when capturing an image"
+ msgstr "Напрям обробки контрастності, застосований при створенні зображення"
+
++#: gst-libs/gst/tag/tags.c:163
+ msgid "capturing saturation"
+ msgstr "насиченість"
+
++#: gst-libs/gst/tag/tags.c:164
+ msgid "The direction of saturation processing applied when capturing an image"
+ msgstr "Напрям обробки насиченості, застосований при створенні зображення"
+
++#: gst-libs/gst/tag/tags.c:168
+ msgid "capturing sharpness"
+ msgstr "різкість"
+
++#: gst-libs/gst/tag/tags.c:169
+ msgid "The direction of sharpness processing applied when capturing an image"
+ msgstr "Напрям обробки різкості, застосований при створенні зображення"
+
++#: gst-libs/gst/tag/tags.c:173
+ msgid "capturing flash fired"
+ msgstr "спалах"
+
++#: gst-libs/gst/tag/tags.c:174
+ msgid "If the flash fired while capturing an image"
+ msgstr "Чи було використано спалах при створенні зображення"
+
++#: gst-libs/gst/tag/tags.c:177
+ msgid "capturing flash mode"
+ msgstr "режим спалаху"
+
++#: gst-libs/gst/tag/tags.c:178
+ msgid "The selected flash mode while capturing an image"
+ msgstr "Вибраний режим спалаху під час створення зображення"
+
++#: gst-libs/gst/tag/tags.c:181
+ msgid "capturing metering mode"
+ msgstr "режим вимірювання"
+
-msgstr ""
-"Горизонтальна щільність пікселів на матриці (зображення або відео), у точках "
-"на дюйм"
++#: gst-libs/gst/tag/tags.c:182
++msgid "The metering mode used while determining exposure for capturing an image"
++msgstr "Режим вимірювання, застосований для визначення експозиції створеного зображення"
+
++#: gst-libs/gst/tag/tags.c:186
+ msgid "capturing source"
+ msgstr "джерело"
+
++#: gst-libs/gst/tag/tags.c:187
+ msgid "The source or type of device used for the capture"
+ msgstr "Тип пристрою-джерела зображення"
+
++#: gst-libs/gst/tag/tags.c:190
+ msgid "image horizontal ppi"
+ msgstr "горизонтальна щільність пікселів"
+
++#: gst-libs/gst/tag/tags.c:191
+ msgid "Media (image/video) intended horizontal pixel density in ppi"
-msgstr ""
-"Вертикальна щільність пікселів на матриці (зображення або відео), у точках "
-"на дюйм"
++msgstr "Горизонтальна щільність пікселів на матриці (зображення або відео), у точках на дюйм"
+
++#: gst-libs/gst/tag/tags.c:194
+ msgid "image vertical ppi"
+ msgstr "вертикальна щільність пікселів"
+
++#: gst-libs/gst/tag/tags.c:195
+ msgid "Media (image/video) intended vertical pixel density in ppi"
-msgid ""
-"Don't exit after showing the initial device list, but wait for devices to "
-"added/removed."
-msgstr ""
-"Не завершувати роботу після показу початкового списку пристроїв, чекати на "
-"додавання або вилучення пристроїв."
++msgstr "Вертикальна щільність пікселів на матриці (зображення або відео), у точках на дюйм"
+
++#: gst-libs/gst/tag/tags.c:198
+ msgid "ID3v2 frame"
+ msgstr "Блок ID3v2"
+
++#: gst-libs/gst/tag/tags.c:198
+ msgid "unparsed id3v2 tag frame"
+ msgstr "необроблений блок теґу id3v2"
+
++#: gst-libs/gst/tag/tags.c:202
+ msgid "musical-key"
+ msgstr "музична-тональність"
+
++#: gst-libs/gst/tag/tags.c:202
+ msgid "Initial key in which the sound starts"
+ msgstr "Початкова тональність, якою розпочинається звук"
+
++#: tools/gst-device-monitor.c:260 tools/gst-play.c:1444
+ msgid "Print version information and exit"
+ msgstr "Показати дані щодо версії і завершити роботу"
+
-msgstr ""
-"Керувати параметром поведінки відтворення встановленням властивості «flags»"
++#: tools/gst-device-monitor.c:262
++msgid "Don't exit after showing the initial device list, but wait for devices to added/removed."
++msgstr "Не завершувати роботу після показу початкового списку пристроїв, чекати на додавання або вилучення пристроїв."
+
++#: tools/gst-play.c:310
+ #, c-format
+ msgid "Volume: %.0f%%"
+ msgstr "Гучність: %.0f%%"
+
++#: tools/gst-play.c:349
+ msgid "Buffering..."
+ msgstr "Буферизація…"
+
++#: tools/gst-play.c:370
+ msgid "Clock lost, selecting a new one\n"
+ msgstr "Доступ до годинника втрачено, вибираємо новий\n"
+
++#: tools/gst-play.c:400 tools/gst-play.c:446 tools/gst-play.c:883
++#: tools/gst-play.c:1347
+ msgid "Reached end of play list."
+ msgstr "Відтворення списку завершено."
+
++#: tools/gst-play.c:613
+ msgid "Paused"
+ msgstr "Призупинено"
+
++#: tools/gst-play.c:671
+ #, c-format
+ msgid "Now playing %s\n"
+ msgstr "Відтворюється %s\n"
+
++#: tools/gst-play.c:734
+ #, c-format
+ msgid "About to finish, preparing next title: %s"
+ msgstr "Майже завершили, готуємо наступну композицію: %s"
+
++#: tools/gst-play.c:979
+ #, c-format
+ msgid "Playback rate: %.2f"
+ msgstr "Частота відтворення: %.2f"
+
++#: tools/gst-play.c:983
+ #, c-format
+ msgid "Could not change playback rate to %.2f"
+ msgstr "Не вдалося змінити частоту відтворення на %.2f"
+
++#: tools/gst-play.c:1287
+ msgid "space"
+ msgstr "пробіл"
+
++#: tools/gst-play.c:1287
+ msgid "pause/unpause"
+ msgstr "призупинити/поновити"
+
++#: tools/gst-play.c:1288
+ msgid "q or ESC"
+ msgstr "q або ESC"
+
++#: tools/gst-play.c:1288
+ msgid "quit"
+ msgstr "вийти"
+
++#: tools/gst-play.c:1289
+ msgid "> or n"
+ msgstr "> або n"
+
++#: tools/gst-play.c:1289
+ msgid "play next"
+ msgstr "наступна"
+
++#: tools/gst-play.c:1290
+ msgid "< or b"
+ msgstr "< або b"
+
++#: tools/gst-play.c:1290
+ msgid "play previous"
+ msgstr "попередня"
+
++#: tools/gst-play.c:1291
+ msgid "seek forward"
+ msgstr "до кінця"
+
++#: tools/gst-play.c:1292
+ msgid "seek backward"
+ msgstr "до початку"
+
++#: tools/gst-play.c:1293
+ msgid "volume up"
+ msgstr "гучніше"
+
++#: tools/gst-play.c:1294
+ msgid "volume down"
+ msgstr "тихіше"
+
++#: tools/gst-play.c:1295
+ msgid "increase playback rate"
+ msgstr "збільшити частоту відтворення"
+
++#: tools/gst-play.c:1296
+ msgid "decrease playback rate"
+ msgstr "зменшити частоту відтворення"
+
++#: tools/gst-play.c:1297
+ msgid "change playback direction"
+ msgstr "змінити напрям відтворення"
+
++#: tools/gst-play.c:1298
+ msgid "enable/disable trick modes"
+ msgstr "увімкнути або вимкнути особливі режими"
+
++#: tools/gst-play.c:1299
+ msgid "change audio track"
+ msgstr "змінити звукову доріжку"
+
++#: tools/gst-play.c:1300
+ msgid "change video track"
+ msgstr "змінити відеодоріжку"
+
++#: tools/gst-play.c:1301
+ msgid "change subtitle track"
+ msgstr "змінити доріжку субтитрів"
+
++#: tools/gst-play.c:1302
+ msgid "seek to beginning"
+ msgstr "позиціювати на початок"
+
++#: tools/gst-play.c:1303
+ msgid "show keyboard shortcuts"
+ msgstr "показати клавіатурні скорочення"
+
++#: tools/gst-play.c:1306
+ msgid "Interactive mode - keyboard controls:"
+ msgstr "Інтерактивне керування за допомогою клавіатури:"
+
++#: tools/gst-play.c:1439
+ msgid "Output status information and property notifications"
+ msgstr "Виводити дані щодо стану та сповіщення щодо властивостей"
+
++#: tools/gst-play.c:1441
+ msgid "Control playback behaviour setting playbin 'flags' property"
-msgstr ""
-"Вам слід вказати принаймні одну назву файла або адресу для відтворення."
++msgstr "Керувати параметром поведінки відтворення встановленням властивості «flags»"
+
++#: tools/gst-play.c:1446
+ msgid "Video sink to use (default is autovideosink)"
+ msgstr "Приймач відеоданих, яким слід скористатися (типовим є autovideosink)"
+
++#: tools/gst-play.c:1448
+ msgid "Audio sink to use (default is autoaudiosink)"
+ msgstr "Приймач аудіоданих, яким слід скористатися (типовим є autoaudiosink)"
+
++#: tools/gst-play.c:1450
+ msgid "Enable gapless playback"
+ msgstr "Увімкнути неперервне відтворення"
+
++#: tools/gst-play.c:1452
+ msgid "Shuffle playlist"
+ msgstr "Перемішати список відтворення"
+
++#: tools/gst-play.c:1455
+ msgid "Disable interactive control via the keyboard"
+ msgstr "Вимкнути інтерактивне керування за допомогою клавіатури"
+
++#: tools/gst-play.c:1457
+ msgid "Volume"
+ msgstr "Гучність"
+
++#: tools/gst-play.c:1459
+ msgid "Playlist file containing input media files"
+ msgstr "Файл списку відтворення, що містить мультимедійні файли вхідних даних"
+
++#: tools/gst-play.c:1461
+ msgid "Do not print any output (apart from errors)"
+ msgstr "Не виводити повідомлень (окрім повідомлень щодо помилок)"
+
++#: tools/gst-play.c:1463
+ msgid "Use playbin3 pipeline"
+ msgstr "Використовувати конвеєр playbin3"
+
++#: tools/gst-play.c:1464
+ msgid "(default varies depending on 'USE_PLAYBIN' env variable)"
+ msgstr "(типове значення залежить від змінної середовища USE_PLAYBIN)"
+
++#: tools/gst-play.c:1468
++msgid "Keep showing the last frame on EOS until quit or playlist change command (gapless is ignored)"
++msgstr "Показувати останній кадр на EOS до команди виходу або зміни списку відтворення (gapless буде проігноровано)"
++
++#: tools/gst-play.c:1546
+ #, c-format
+ msgid "Usage: %s FILE1|URI1 [FILE2|URI2] [FILE3|URI3] ..."
+ msgstr "Користування: %s ФАЙЛ1|АДРЕСА1 [ФАЙЛ2|АДРЕСА2] [ФАЙЛ3|АДРЕСА3] ..."
+
++#: tools/gst-play.c:1550
+ msgid "You must provide at least one filename or URI to play."
-#~ msgstr ""
-#~ "Не вдалося відкрити файл віртуальної файлової системи «%s» для запису: %s."
++msgstr "Вам слід вказати принаймні одну назву файла або адресу для відтворення."
+
++#: tools/gst-play.c:1590
+ msgid "Press 'k' to see a list of keyboard shortcuts.\n"
+ msgstr "Натисніть «k», щоб переглянути список клавіатурних скорочень.\n"
+
+ #~ msgid "Could not create \"decodebin3\" element."
+ #~ msgstr "Не вдалося створити елемент \"decodebin3\"."
+
+ #~ msgid "Could not create \"urisourcebin\" element."
+ #~ msgstr "Не вдалося створити елемент \"urisourcebin\"."
+
+ #~ msgid "Internal data stream error."
+ #~ msgstr "Помилка внутрішнього потоку даних."
+
+ #~ msgid "A %s plugin is required to play this stream, but not installed."
+ #~ msgstr "Для відтворення потоку потрібен модуль %s, але він не встановлений."
+
+ #~ msgid "Uncompressed %s YUV %s"
+ #~ msgstr "Нестиснене %s, YUV %s"
+
+ #~ msgid "Master"
+ #~ msgstr "Головний"
+
+ #~ msgid "Bass"
+ #~ msgstr "Низькі"
+
+ #~ msgid "Treble"
+ #~ msgstr "Високі"
+
+ #~ msgid "PCM"
+ #~ msgstr "КІМ"
+
+ #~ msgid "Synth"
+ #~ msgstr "Синтезатор"
+
+ #~ msgid "Line-in"
+ #~ msgstr "Лінійний вхід"
+
+ #~ msgid "CD"
+ #~ msgstr "Компакт-диск"
+
+ #~ msgid "Microphone"
+ #~ msgstr "Мікрофон"
+
+ #~ msgid "PC Speaker"
+ #~ msgstr "Гучномовець"
+
+ #~ msgid "Playback"
+ #~ msgstr "Відтворення"
+
+ #~ msgid "Capture"
+ #~ msgstr "Запис"
+
+ #~ msgid "Could not open vfs file \"%s\" for writing: %s."
-#~ msgid ""
-#~ "Only a subtitle stream was detected. Either you are loading a subtitle "
-#~ "file or some other type of text file, or the media file was not "
-#~ "recognized."
-#~ msgstr ""
-#~ "Виявлено лише потік субтитрів. Або ви завантажили файл субтитрів чи інший "
-#~ "текстовий файл, або носій не розпізнано."
-
-#~ msgid ""
-#~ "You do not have a decoder installed to handle this file. You might need "
-#~ "to install the necessary plugins."
-#~ msgstr ""
-#~ "Не встановлено декодер для обробки цього файлу. Можливо потрібно "
-#~ "встановити додаткові модулі."
++#~ msgstr "Не вдалося відкрити файл віртуальної файлової системи «%s» для запису: %s."
+
+ #~ msgid "No filename given"
+ #~ msgstr "Не вказано файл"
+
+ #~ msgid "Could not close vfs file \"%s\"."
+ #~ msgstr "Не вдалося закрити файл віртуальної файлової системи «%s»."
+
+ #~ msgid "Error while writing to file \"%s\"."
+ #~ msgstr "Помилка при записі у файл «%s»."
+
+ #~ msgid "Invalid subtitle URI \"%s\", subtitles disabled."
+ #~ msgstr "Неправильний URI субтитру «%s», субтитри вимкнено."
+
+ #~ msgid "RTSP streams cannot be played yet."
+ #~ msgstr "Відтворення потоків RTSP поки-що не підтримується."
+
++#~ msgid "Only a subtitle stream was detected. Either you are loading a subtitle file or some other type of text file, or the media file was not recognized."
++#~ msgstr "Виявлено лише потік субтитрів. Або ви завантажили файл субтитрів чи інший текстовий файл, або носій не розпізнано."
++
++#~ msgid "You do not have a decoder installed to handle this file. You might need to install the necessary plugins."
++#~ msgstr "Не встановлено декодер для обробки цього файлу. Можливо потрібно встановити додаткові модулі."
+
+ #~ msgid "This is not a media file"
+ #~ msgstr "Файл не є мультимедійним"
+
+ #~ msgid "A subtitle stream was detected, but no video stream."
+ #~ msgstr "Виявлено потік субтитрів, але не відеопотік."
+
+ #~ msgid "Both autovideosink and xvimagesink elements are missing."
+ #~ msgstr "Відсутні елементи autovideosink та xvimagesink."
+
+ #~ msgid "Both autoaudiosink and alsasink elements are missing."
+ #~ msgstr "Відсутні елементи autoaudiosink та alsasink."
+
+ #~ msgid "Error while sending gdp header data to \"%s:%d\"."
+ #~ msgstr "Помилка при надсиланні заголовку gdp даних до \"%s:%d\"."
+
+ #~ msgid "Error while sending gdp payload data to \"%s:%d\"."
+ #~ msgstr "Помилка при надсиланні даних gdp до «%s:%d»."
+
+ #~ msgid "Connection to %s:%d refused."
+ #~ msgstr "У з'єднанні з %s:%d відмовлено."
+
+ #~ msgid "Uncompressed planar YVU 4:2:0"
+ #~ msgstr "Нестиснений планарний YVU 4:2:0"
+
+ #~ msgid "Uncompressed packed YUV 4:1:0"
+ #~ msgstr "Нестиснений пакетний YUV 4:1:0"
+
+ #~ msgid "Uncompressed packed YVU 4:1:0"
+ #~ msgstr "Нестиснений пакетний YVU 4:1:0"
+
+ #~ msgid "Uncompressed packed YUV 4:1:1"
+ #~ msgstr "Нестиснений пакетний YUV 4:1:1"
+
+ #~ msgid "Uncompressed packed YUV 4:4:4"
+ #~ msgstr "Нестиснений пакетний YUV 4:4:4"
+
+ #~ msgid "Uncompressed planar YUV 4:2:2"
+ #~ msgstr "Нестиснений планарний YUV 4:2:2"
+
+ #~ msgid "Uncompressed planar YUV 4:1:1"
+ #~ msgstr "Нестиснений планарний YUV 4:1:1"
+
+ #~ msgid "Uncompressed black and white Y-plane"
+ #~ msgstr "Нестиснений чорно-білий Y-проекція"
+
+ #~ msgid "Raw PCM audio"
+ #~ msgstr "Необроблений PCM-звук"
+
+ #~ msgid "Raw %d-bit floating-point audio"
+ #~ msgstr "Необроблений %d-бітовий звук (дані з плаваючою комою)"
+
+ #~ msgid "Raw floating-point audio"
+ #~ msgstr "Необроблений звук (дані з плаваючою комою)"
+
+ #~ msgid "No device specified."
+ #~ msgstr "Пристрій не вказано."
+
+ #~ msgid "Device \"%s\" does not exist."
+ #~ msgstr "Пристрою «%s» не існує."
+
+ #~ msgid "Device \"%s\" is already being used."
+ #~ msgstr "Пристрій «%s» вже використовується іншою програмою."
+
+ #~ msgid "Could not open device \"%s\" for reading and writing."
+ #~ msgstr "Не вдалося відкрити пристрій «%s» для читання чи запису."
+
+ #~ msgid "No file name specified."
+ #~ msgstr "Не вказано назву файлу."
+
+ #~ msgid "Could not open file \"%s\" for reading."
+ #~ msgstr "Не вдається відкрити файл \"%s\" для читання."
+
+ #~ msgid "Could not create \"queue2\" element."
+ #~ msgstr "Не вдається створити елемент \"queue2\"."
+
+ #~ msgid "artist sortname"
+ #~ msgstr "категорія виконавця"
+
+ #~ msgid "MusicBrainz artist sortname"
+ #~ msgstr "Категорія виконавця у MusicBrainz"
--- /dev/null
-"Project-Id-Version: gst-plugins-base 1.15.1\n"
+ # Chinese (simplified) translation about gst-plugin-base.
+ # This file is put in the public domain.
+ # Funda Wang <fundawang@linux.net.cn>, 2005.
+ # Ji ZhengYu <zhengyuji@gmail.com>, 2008.
+ # Tianze Wang <zwpwjwtz@126.com>, 2015.
+ msgid ""
+ msgstr ""
-"POT-Creation-Date: 2019-01-17 01:50+0000\n"
-"PO-Revision-Date: 2019-03-05 16:30+0100\n"
++"Project-Id-Version: gst-plugins-base 1.16.0\n"
+ "Report-Msgid-Bugs-To: http://bugzilla.gnome.org/\n"
-#: gst/playback/gsturidecodebin.c:1472 gst/playback/gsturisourcebin.c:1596
++"POT-Creation-Date: 2019-04-19 00:20+0100\n"
++"PO-Revision-Date: 2019-09-22 08:34+0200\n"
+ "Last-Translator: Tianze Wang <zwpwjwtz@126.com>\n"
+ "Language-Team: Chinese (simplified) <i18n-zh@googlegroups.com>\n"
+ "Language: zh_CN\n"
+ "MIME-Version: 1.0\n"
+ "Content-Type: text/plain; charset=UTF-8\n"
+ "Content-Transfer-Encoding: 8bit\n"
+ "X-Bugs: Report translation errors to the Language-Team address.\n"
+ "X-Generator: Poedit 2.0.7\n"
+
+ #: ext/alsa/gstalsasink.c:579
+ msgid "Could not open device for playback in mono mode."
+ msgstr "无法在单声道模式打开设备播放。"
+
+ #: ext/alsa/gstalsasink.c:581
+ msgid "Could not open device for playback in stereo mode."
+ msgstr "无法在立体声模式打开设备播放。"
+
+ #: ext/alsa/gstalsasink.c:585
+ #, c-format
+ msgid "Could not open device for playback in %d-channel mode."
+ msgstr "无法在 %d 声道模式打开设备播放。"
+
+ #: ext/alsa/gstalsasink.c:858
+ msgid "Could not open audio device for playback. Device is being used by another application."
+ msgstr "无法打开音频设备用于回放。目前有另一个程序正在使用该设备。"
+
+ #: ext/alsa/gstalsasink.c:863
+ msgid "Could not open audio device for playback."
+ msgstr "无法打开音频设备播放。"
+
+ #: ext/alsa/gstalsasink.c:1076
+ msgid "Error outputting to audio device. The device has been disconnected."
+ msgstr "输出到音频设备错误。设备已断开连接。"
+
+ #: ext/alsa/gstalsasrc.c:444
+ msgid "Could not open device for recording in mono mode."
+ msgstr "无法在单声道模式打开设备录音。"
+
+ #: ext/alsa/gstalsasrc.c:446
+ msgid "Could not open device for recording in stereo mode."
+ msgstr "无法在立体声模式打开设备录音。"
+
+ #: ext/alsa/gstalsasrc.c:450
+ #, c-format
+ msgid "Could not open device for recording in %d-channel mode"
+ msgstr "无法在 %d 声道模式打开设备录音。"
+
+ #: ext/alsa/gstalsasrc.c:741
+ msgid "Could not open audio device for recording. Device is being used by another application."
+ msgstr "无法打开音频设备用于录音。目前有另一个程序正在使用该设备。"
+
+ #: ext/alsa/gstalsasrc.c:746
+ msgid "Could not open audio device for recording."
+ msgstr "无法打开音频设备录音。"
+
+ #: ext/alsa/gstalsasrc.c:992
+ msgid "Error recording from audio device. The device has been disconnected."
+ msgstr "从音频设备输入错误。设备已断开连接。"
+
+ #: ext/cdparanoia/gstcdparanoiasrc.c:277 ext/cdparanoia/gstcdparanoiasrc.c:283
+ msgid "Could not open CD device for reading."
+ msgstr "无法打开 CD 设备读取。"
+
+ #: ext/cdparanoia/gstcdparanoiasrc.c:409
+ msgid "Could not seek CD."
+ msgstr "无法搜索 CD。"
+
+ #: ext/cdparanoia/gstcdparanoiasrc.c:417
+ msgid "Could not read CD."
+ msgstr "无法读取 CD。"
+
+ #: ext/gl/gstgltestsrc.c:491
+ msgid "failed to draw pattern"
+ msgstr "无法绘制图案"
+
+ #: ext/gl/gstgltestsrc.c:492
+ msgid "A GL error occurred"
+ msgstr "发生了 GL 错误"
+
+ #: ext/gl/gstgltestsrc.c:498
+ msgid "format wasn't negotiated before get function"
+ msgstr "在调用 get 函数之前未确定合适的格式"
+
+ #: gst/encoding/gstencodebin.c:1636 gst/playback/gstplaybin2.c:3399
+ #: gst/playback/gstplaysink.c:1522 gst/playback/gstplaysink.c:1535
+ #: gst/playback/gstplaysink.c:1872 gst/playback/gstplaysink.c:1904
+ #: gst/playback/gstplaysink.c:2488 gst/playback/gstplaysink.c:2537
+ #: gst/playback/gstplaysink.c:2552 gst/playback/gstplaysink.c:2577
+ #: gst/playback/gstplaysink.c:2609 gst/playback/gstplaysink.c:2757
+ #: gst/playback/gstplaysink.c:2788 gst/playback/gstplaysink.c:3166
+ #: gst/playback/gstplaysink.c:3175 gst/playback/gstplaysink.c:3184
+ #: gst/playback/gstplaysink.c:3193 gst/playback/gstplaysink.c:3606
+ #: gst/playback/gstplaysink.c:4526 gst/playback/gstplaysinkconvertbin.c:97
+ #: gst/playback/gstplaysinkconvertbin.c:117
-#: gst/playback/gstdecodebin2.c:1873 gst/playback/gstparsebin.c:1553
++#: gst/playback/gsturidecodebin.c:1472 gst/playback/gsturisourcebin.c:1597
+ #, c-format
+ msgid "Missing element '%s' - check your GStreamer installation."
+ msgstr "未找到组件‘%s’-请检查您的 GStreamer 安装情况。"
+
-#: gst/playback/gstdecodebin2.c:2851 gst/playback/gstparsebin.c:2385
++#: gst/playback/gstdecodebin2.c:1883 gst/playback/gstparsebin.c:1553
+ msgid "Could not determine type of stream"
+ msgstr "无法确定流的类型"
+
-#: gst/playback/gsturidecodebin.c:1378 gst/playback/gsturisourcebin.c:1490
++#: gst/playback/gstdecodebin2.c:2861 gst/playback/gstparsebin.c:2385
+ msgid "This appears to be a text file"
+ msgstr "此文件是个文本文件"
+
+ #: gst/playback/gstplaybin2.c:5452
+ msgid "Could not create \"uridecodebin\" element."
+ msgstr "无法创建“uridecodebin”组件。"
+
+ #: gst/playback/gstplaybin3.c:4558
+ msgid "Could not create \"uridecodebin3\" element."
+ msgstr "无法创建“uridecodebin3”组件。"
+
+ #: gst/playback/gstplaysink.c:2003
+ #, c-format
+ msgid "Both autovideosink and %s elements are missing."
+ msgstr "缺少 autovideosink 和 %s 组件。"
+
+ #: gst/playback/gstplaysink.c:2007
+ msgid "The autovideosink element is missing."
+ msgstr "缺少 autovideosink 组件。"
+
+ #: gst/playback/gstplaysink.c:2012
+ #, c-format
+ msgid "Configured videosink %s is not working."
+ msgstr "配置后的 videosink %s 无法工作。"
+
+ #: gst/playback/gstplaysink.c:2016
+ #, c-format
+ msgid "Both autovideosink and %s elements are not working."
+ msgstr "autovideosink 和 %s 组件都无法工作。"
+
+ #: gst/playback/gstplaysink.c:2020
+ msgid "The autovideosink element is not working."
+ msgstr "autovideosink 组件无法工作。"
+
+ #: gst/playback/gstplaysink.c:2525
+ msgid "Custom text sink element is not usable."
+ msgstr "自定义文字汇组件不可用。"
+
+ #: gst/playback/gstplaysink.c:2903
+ msgid "No volume control found"
+ msgstr "未找到音量控制"
+
+ #: gst/playback/gstplaysink.c:2933
+ #, c-format
+ msgid "Both autoaudiosink and %s elements are missing."
+ msgstr "缺少 autovideosink 和 %s 组件。"
+
+ #: gst/playback/gstplaysink.c:2937
+ msgid "The autoaudiosink element is missing."
+ msgstr "缺少 autovideosink 组件。"
+
+ #: gst/playback/gstplaysink.c:2942
+ #, c-format
+ msgid "Configured audiosink %s is not working."
+ msgstr "配置后的 audiosink %s 无法工作。"
+
+ #: gst/playback/gstplaysink.c:2946
+ #, c-format
+ msgid "Both autoaudiosink and %s elements are not working."
+ msgstr "autovideosink 和 %s 组件都无法工作。"
+
+ #: gst/playback/gstplaysink.c:2950
+ msgid "The autoaudiosink element is not working."
+ msgstr "autovideosink 组件无法工作。"
+
+ #: gst/playback/gstplaysink.c:3269 gst/playback/gstplaysink.c:3274
+ msgid "Can't play a text file without video or visualizations."
+ msgstr "无法播放不带视频或可视化效果的文本文件。"
+
+ #: gst/playback/gsturidecodebin.c:921
+ #, c-format
+ msgid "No decoder available for type '%s'."
+ msgstr "找不到‘%s’类型可用的解码器。"
+
-#: gst/playback/gsturidecodebin.c:1384 gst/playback/gsturisourcebin.c:1496
++#: gst/playback/gsturidecodebin.c:1378 gst/playback/gsturisourcebin.c:1491
+ msgid "No URI specified to play from."
+ msgstr "未指定要播放的 URI。"
+
-#: gst/playback/gsturidecodebin.c:1391 gst/playback/gsturisourcebin.c:1503
++#: gst/playback/gsturidecodebin.c:1384 gst/playback/gsturisourcebin.c:1497
+ #, c-format
+ msgid "Invalid URI \"%s\"."
+ msgstr "无效的 URI“%s”。"
+
-#: gst/playback/gsturidecodebin.c:1409 gst/playback/gsturisourcebin.c:1521
++#: gst/playback/gsturidecodebin.c:1391 gst/playback/gsturisourcebin.c:1504
+ msgid "This stream type cannot be played yet."
+ msgstr "此类型的数据流尚不能播放。"
+
-#: gst/playback/gsturidecodebin.c:2280 gst/playback/gsturisourcebin.c:2227
++#: gst/playback/gsturidecodebin.c:1409 gst/playback/gsturisourcebin.c:1522
+ #, c-format
+ msgid "No URI handler implemented for \"%s\"."
+ msgstr "“%s”未实现 URI 处理器。"
+
-#: gst-libs/gst/pbutils/descriptions.c:198
++#: gst/playback/gsturidecodebin.c:2289 gst/playback/gsturisourcebin.c:2228
+ msgid "Source element is invalid."
+ msgstr "无效的源组件。"
+
+ #: gst/tcp/gsttcpclientsink.c:216
+ #, c-format
+ msgid "Error while sending data to \"%s:%d\"."
+ msgstr "发送数据至“%s:%d”时出错。"
+
+ #: gst-libs/gst/audio/gstaudiobasesrc.c:842
+ msgid "Can't record audio fast enough"
+ msgstr "无法足够快的录音"
+
+ #: gst-libs/gst/audio/gstaudiocdsrc.c:1625
+ msgid "This CD has no audio tracks"
+ msgstr "此 CD 无音轨"
+
+ #: gst-libs/gst/pbutils/descriptions.c:89
+ msgid "ID3 tag"
+ msgstr "ID3 标识"
+
+ #: gst-libs/gst/pbutils/descriptions.c:90
+ #: gst-libs/gst/pbutils/descriptions.c:91
+ msgid "APE tag"
+ msgstr "APE 标识"
+
+ #: gst-libs/gst/pbutils/descriptions.c:92
+ msgid "ICY internet radio"
+ msgstr "ICY 网络电台"
+
+ #: gst-libs/gst/pbutils/descriptions.c:127
+ msgid "Apple Lossless Audio (ALAC)"
+ msgstr "Apple 无损音频(ALAC)"
+
+ #: gst-libs/gst/pbutils/descriptions.c:143
+ msgid "Free Lossless Audio Codec (FLAC)"
+ msgstr "自由无损音频编码(FLAC)"
+
+ #: gst-libs/gst/pbutils/descriptions.c:173
+ #: gst-libs/gst/pbutils/descriptions.c:174
+ msgid "Lossless True Audio (TTA)"
+ msgstr "无损高保真音频(TTA)"
+
+ #: gst-libs/gst/pbutils/descriptions.c:182
+ msgid "Windows Media Speech"
+ msgstr "Windows Media 语音"
+
-#: gst-libs/gst/pbutils/descriptions.c:202
++#: gst-libs/gst/pbutils/descriptions.c:199
+ msgid "CYUV Lossless"
+ msgstr "CYUV 无损"
+
-#: gst-libs/gst/pbutils/descriptions.c:216
++#: gst-libs/gst/pbutils/descriptions.c:203
+ msgid "FFMpeg v1"
+ msgstr "FFMpeg v1"
+
-#: gst-libs/gst/pbutils/descriptions.c:227
++#: gst-libs/gst/pbutils/descriptions.c:217
+ msgid "Lossless MSZH"
+ msgstr "无损 MSZH"
+
-#: gst-libs/gst/pbutils/descriptions.c:273
++#: gst-libs/gst/pbutils/descriptions.c:228
+ msgid "Run-length encoding"
+ msgstr "Run-length 编码"
+
-#: gst-libs/gst/pbutils/descriptions.c:277
++#: gst-libs/gst/pbutils/descriptions.c:274
+ msgid "Timed Text"
+ msgstr "定时文本"
+
-#: gst-libs/gst/pbutils/descriptions.c:278
++#: gst-libs/gst/pbutils/descriptions.c:278
+ msgid "Subtitle"
+ msgstr "字幕"
+
-#: gst-libs/gst/pbutils/descriptions.c:279
++#: gst-libs/gst/pbutils/descriptions.c:279
+ msgid "MPL2 subtitle format"
+ msgstr "MPL2 字幕格式"
+
-#: gst-libs/gst/pbutils/descriptions.c:280
++#: gst-libs/gst/pbutils/descriptions.c:280
+ msgid "DKS subtitle format"
+ msgstr "DKS 字幕格式"
+
-#: gst-libs/gst/pbutils/descriptions.c:281
++#: gst-libs/gst/pbutils/descriptions.c:281
+ msgid "QTtext subtitle format"
+ msgstr "QTtext 字幕格式"
+
-#: gst-libs/gst/pbutils/descriptions.c:282
++#: gst-libs/gst/pbutils/descriptions.c:282
+ msgid "Sami subtitle format"
+ msgstr "Sami 字幕格式"
+
-#: gst-libs/gst/pbutils/descriptions.c:286
++#: gst-libs/gst/pbutils/descriptions.c:283
+ msgid "TMPlayer subtitle format"
+ msgstr "TMPlayer 字幕格式"
+
-#: gst-libs/gst/pbutils/descriptions.c:287
++#: gst-libs/gst/pbutils/descriptions.c:287
+ msgid "CEA 608 Closed Caption"
+ msgstr "CEA 608 隐藏式字幕"
+
-#: gst-libs/gst/pbutils/descriptions.c:288
++#: gst-libs/gst/pbutils/descriptions.c:288
+ msgid "CEA 708 Closed Caption"
+ msgstr "CEA 708 隐藏式字幕"
+
-#: gst-libs/gst/pbutils/descriptions.c:289
++#: gst-libs/gst/pbutils/descriptions.c:289
+ msgid "Kate subtitle format"
+ msgstr "Kate 字幕格式"
+
-#: gst-libs/gst/pbutils/descriptions.c:454
-#: gst-libs/gst/pbutils/descriptions.c:457
-#: gst-libs/gst/pbutils/descriptions.c:507
++#: gst-libs/gst/pbutils/descriptions.c:290
+ msgid "WebVTT subtitle format"
+ msgstr "WebVTT 字幕格式"
+
-#: gst-libs/gst/pbutils/descriptions.c:462
++#: gst-libs/gst/pbutils/descriptions.c:455
++#: gst-libs/gst/pbutils/descriptions.c:458
++#: gst-libs/gst/pbutils/descriptions.c:508
+ msgid "Uncompressed video"
+ msgstr "未压缩的视频"
+
-#: gst-libs/gst/pbutils/descriptions.c:485
++#: gst-libs/gst/pbutils/descriptions.c:463
+ msgid "Uncompressed gray"
+ msgstr "未压缩的灰度图像"
+
-#: gst-libs/gst/pbutils/descriptions.c:487
++#: gst-libs/gst/pbutils/descriptions.c:486
+ #, c-format
+ msgid "Uncompressed packed YUV %s"
+ msgstr "未压缩的紧缩 YUV %s"
+
-#: gst-libs/gst/pbutils/descriptions.c:489
++#: gst-libs/gst/pbutils/descriptions.c:488
+ #, c-format
+ msgid "Uncompressed semi-planar YUV %s"
+ msgstr "未压缩的半平面 YUV %s"
+
-#: gst-libs/gst/pbutils/descriptions.c:500
++#: gst-libs/gst/pbutils/descriptions.c:490
+ #, c-format
+ msgid "Uncompressed planar YUV %s"
+ msgstr "未压缩的平面 YUV %s"
+
-#: gst-libs/gst/pbutils/descriptions.c:503
++#: gst-libs/gst/pbutils/descriptions.c:501
+ #, c-format
+ msgid "Uncompressed palettized %d-bit %s"
+ msgstr "未压缩的调色板映射 %d位 %s"
+
-#: gst-libs/gst/pbutils/descriptions.c:585
++#: gst-libs/gst/pbutils/descriptions.c:504
+ #, c-format
+ msgid "Uncompressed %d-bit %s"
+ msgstr "未压缩的 %d位 %s"
+
-#: gst-libs/gst/pbutils/descriptions.c:833
++#: gst-libs/gst/pbutils/descriptions.c:586
+ #, c-format
+ msgid "DivX MPEG-4 Version %d"
+ msgstr "DivX MPEG-4 第 %d 版"
+
-#: gst-libs/gst/pbutils/descriptions.c:839
++#: gst-libs/gst/pbutils/descriptions.c:834
+ msgid "Uncompressed audio"
+ msgstr "未压缩的音频"
+
-#: gst-libs/gst/pbutils/descriptions.c:941
++#: gst-libs/gst/pbutils/descriptions.c:840
+ #, c-format
+ msgid "Raw %d-bit %s audio"
+ msgstr "原始 %d位 %s 音频"
+
-#: gst-libs/gst/pbutils/descriptions.c:944
++#: gst-libs/gst/pbutils/descriptions.c:942
+ msgid "Audio CD source"
+ msgstr "音频 CD 源"
+
-#: gst-libs/gst/pbutils/descriptions.c:947
++#: gst-libs/gst/pbutils/descriptions.c:945
+ msgid "DVD source"
+ msgstr "DVD 源"
+
-#: gst-libs/gst/pbutils/descriptions.c:951
++#: gst-libs/gst/pbutils/descriptions.c:948
+ msgid "Real Time Streaming Protocol (RTSP) source"
+ msgstr "实时流协议(RTSP)源"
+
-#: gst-libs/gst/pbutils/descriptions.c:959
++#: gst-libs/gst/pbutils/descriptions.c:952
+ msgid "Microsoft Media Server (MMS) protocol source"
+ msgstr "Microsoft 媒体服务协议(MMS)源"
+
-#: gst-libs/gst/pbutils/descriptions.c:1033
++#: gst-libs/gst/pbutils/descriptions.c:960
+ #, c-format
+ msgid "%s protocol source"
+ msgstr "%s 协议源"
+
-#: gst-libs/gst/pbutils/descriptions.c:1035
++#: gst-libs/gst/pbutils/descriptions.c:1034
+ #, c-format
+ msgid "%s video RTP depayloader"
+ msgstr "%s 视频 RTP 去负载器"
+
-#: gst-libs/gst/pbutils/descriptions.c:1037
++#: gst-libs/gst/pbutils/descriptions.c:1036
+ #, c-format
+ msgid "%s audio RTP depayloader"
+ msgstr "%s 音频 RTP 去负载器"
+
-#: gst-libs/gst/pbutils/descriptions.c:1044
++#: gst-libs/gst/pbutils/descriptions.c:1038
+ #, c-format
+ msgid "%s RTP depayloader"
+ msgstr "%s RTP 去负载器"
+
-#: gst-libs/gst/pbutils/descriptions.c:1046
++#: gst-libs/gst/pbutils/descriptions.c:1045
+ #, c-format
+ msgid "%s demuxer"
+ msgstr "%s 多路分配器"
+
-#: gst-libs/gst/pbutils/descriptions.c:1085
++#: gst-libs/gst/pbutils/descriptions.c:1047
+ #, c-format
+ msgid "%s decoder"
+ msgstr "%s 解码器"
+
-#: gst-libs/gst/pbutils/descriptions.c:1087
++#: gst-libs/gst/pbutils/descriptions.c:1086
+ #, c-format
+ msgid "%s video RTP payloader"
+ msgstr "%s 视频 RTP 负载器"
+
-#: gst-libs/gst/pbutils/descriptions.c:1089
++#: gst-libs/gst/pbutils/descriptions.c:1088
+ #, c-format
+ msgid "%s audio RTP payloader"
+ msgstr "%s 音频 RTP 负载器"
+
-#: gst-libs/gst/pbutils/descriptions.c:1096
++#: gst-libs/gst/pbutils/descriptions.c:1090
+ #, c-format
+ msgid "%s RTP payloader"
+ msgstr "%s RTP 负载器"
+
-#: gst-libs/gst/pbutils/descriptions.c:1098
++#: gst-libs/gst/pbutils/descriptions.c:1097
+ #, c-format
+ msgid "%s muxer"
+ msgstr "%s 混音器"
+
-#: gst-libs/gst/pbutils/descriptions.c:1132
++#: gst-libs/gst/pbutils/descriptions.c:1099
+ #, c-format
+ msgid "%s encoder"
+ msgstr "%s 编码器"
+
-#: tools/gst-device-monitor.c:255 tools/gst-play.c:1442
++#: gst-libs/gst/pbutils/descriptions.c:1133
+ #, c-format
+ msgid "GStreamer element %s"
+ msgstr "GStreamer 组件 %s"
+
+ #: gst-libs/gst/pbutils/missing-plugins.c:558
+ msgid "Unknown source element"
+ msgstr "未知源组件"
+
+ #: gst-libs/gst/pbutils/missing-plugins.c:561
+ msgid "Unknown sink element"
+ msgstr "未知的汇组件"
+
+ #: gst-libs/gst/pbutils/missing-plugins.c:564
+ msgid "Unknown element"
+ msgstr "未知组件"
+
+ #: gst-libs/gst/pbutils/missing-plugins.c:567
+ msgid "Unknown decoder element"
+ msgstr "未知的解码组件"
+
+ #: gst-libs/gst/pbutils/missing-plugins.c:570
+ msgid "Unknown encoder element"
+ msgstr "未知的编码组件"
+
+ #: gst-libs/gst/pbutils/missing-plugins.c:575
+ msgid "Plugin or element of unknown type"
+ msgstr "未知类型的插件或组件"
+
+ #: gst-libs/gst/tag/gsttagdemux.c:1243
+ msgid "Failed to read tag: not enough data"
+ msgstr "无法读取标签: 数据不足"
+
+ #: gst-libs/gst/tag/tags.c:77
+ msgid "track ID"
+ msgstr "音轨 ID"
+
+ #: gst-libs/gst/tag/tags.c:77
+ msgid "MusicBrainz track ID"
+ msgstr "MusicBrainz 音轨 ID"
+
+ #: gst-libs/gst/tag/tags.c:79
+ msgid "artist ID"
+ msgstr "艺人 ID"
+
+ #: gst-libs/gst/tag/tags.c:79
+ msgid "MusicBrainz artist ID"
+ msgstr "MusicBrainz 艺人 ID"
+
+ #: gst-libs/gst/tag/tags.c:81
+ msgid "album ID"
+ msgstr "专辑 ID"
+
+ #: gst-libs/gst/tag/tags.c:81
+ msgid "MusicBrainz album ID"
+ msgstr "MusicBrainz 专辑 ID"
+
+ #: gst-libs/gst/tag/tags.c:84
+ msgid "album artist ID"
+ msgstr "音轨艺人 ID"
+
+ #: gst-libs/gst/tag/tags.c:84
+ msgid "MusicBrainz album artist ID"
+ msgstr "MusicBrainz 专辑艺人 ID"
+
+ #: gst-libs/gst/tag/tags.c:86
+ msgid "track TRM ID"
+ msgstr "音轨 TRM ID"
+
+ #: gst-libs/gst/tag/tags.c:86
+ msgid "MusicBrainz TRM ID"
+ msgstr "MusicBrainz TRM ID"
+
+ #: gst-libs/gst/tag/tags.c:108
+ msgid "capturing shutter speed"
+ msgstr "捕捉快门速度"
+
+ #: gst-libs/gst/tag/tags.c:109
+ msgid "Shutter speed used when capturing an image, in seconds"
+ msgstr "用于捕捉图像的快门速度,单位为秒"
+
+ #: gst-libs/gst/tag/tags.c:112
+ msgid "capturing focal ratio"
+ msgstr "捕捉焦比"
+
+ #: gst-libs/gst/tag/tags.c:113
+ msgid "Focal ratio (f-number) used when capturing the image"
+ msgstr "捕捉图像时的焦比(f值)"
+
+ #: gst-libs/gst/tag/tags.c:116
+ msgid "capturing focal length"
+ msgstr "捕捉焦距"
+
+ #: gst-libs/gst/tag/tags.c:117
+ msgid "Focal length of the lens used capturing the image, in mm"
+ msgstr "捕捉图像时的等效焦距(单位为 mm)"
+
+ #: gst-libs/gst/tag/tags.c:121
+ msgid "capturing 35 mm equivalent focal length"
+ msgstr "以 35mm 等效焦距捕捉"
+
+ #: gst-libs/gst/tag/tags.c:122
+ msgid "35 mm equivalent focal length of the lens used capturing the image, in mm"
+ msgstr "捕捉图像时使用 35mm 等效焦距"
+
+ #: gst-libs/gst/tag/tags.c:126
+ msgid "capturing digital zoom ratio"
+ msgstr "捕捉数码变焦比"
+
+ #: gst-libs/gst/tag/tags.c:127
+ msgid "Digital zoom ratio used when capturing an image"
+ msgstr "捕捉图像时的数码变焦比"
+
+ #: gst-libs/gst/tag/tags.c:130
+ msgid "capturing iso speed"
+ msgstr "捕捉ISO感光度"
+
+ #: gst-libs/gst/tag/tags.c:131
+ msgid "The ISO speed used when capturing an image"
+ msgstr "捕捉图像时使用的ISO感光度"
+
+ #: gst-libs/gst/tag/tags.c:134
+ msgid "capturing exposure program"
+ msgstr "捕捉曝光程序"
+
+ #: gst-libs/gst/tag/tags.c:135
+ msgid "The exposure program used when capturing an image"
+ msgstr "用于捕捉图像的曝光程序"
+
+ #: gst-libs/gst/tag/tags.c:138
+ msgid "capturing exposure mode"
+ msgstr "捕捉曝光模式"
+
+ #: gst-libs/gst/tag/tags.c:139
+ msgid "The exposure mode used when capturing an image"
+ msgstr "捕捉图像时使用的曝光模式"
+
+ #: gst-libs/gst/tag/tags.c:142
+ msgid "capturing exposure compensation"
+ msgstr "捕捉曝光补偿"
+
+ #: gst-libs/gst/tag/tags.c:143
+ msgid "The exposure compensation used when capturing an image"
+ msgstr "捕捉图像时使用的曝光补偿值"
+
+ #: gst-libs/gst/tag/tags.c:146
+ msgid "capturing scene capture type"
+ msgstr "捕捉场景模式"
+
+ #: gst-libs/gst/tag/tags.c:147
+ msgid "The scene capture mode used when capturing an image"
+ msgstr "用于捕捉图像的场景模式"
+
+ #: gst-libs/gst/tag/tags.c:150
+ msgid "capturing gain adjustment"
+ msgstr "捕捉增益调节"
+
+ #: gst-libs/gst/tag/tags.c:151
+ msgid "The overall gain adjustment applied on an image"
+ msgstr "调整应用于图像的总体增益"
+
+ #: gst-libs/gst/tag/tags.c:154
+ msgid "capturing white balance"
+ msgstr "捕捉白平衡"
+
+ #: gst-libs/gst/tag/tags.c:155
+ msgid "The white balance mode set when capturing an image"
+ msgstr "捕捉图像时的白平衡"
+
+ #: gst-libs/gst/tag/tags.c:158
+ msgid "capturing contrast"
+ msgstr "捕捉对比度"
+
+ #: gst-libs/gst/tag/tags.c:159
+ msgid "The direction of contrast processing applied when capturing an image"
+ msgstr "捕捉图像时所使用的对比度"
+
+ #: gst-libs/gst/tag/tags.c:163
+ msgid "capturing saturation"
+ msgstr "捕捉饱和度"
+
+ #: gst-libs/gst/tag/tags.c:164
+ msgid "The direction of saturation processing applied when capturing an image"
+ msgstr "捕捉图像时所使用的饱和度"
+
+ #: gst-libs/gst/tag/tags.c:168
+ msgid "capturing sharpness"
+ msgstr "捕捉锐度"
+
+ #: gst-libs/gst/tag/tags.c:169
+ msgid "The direction of sharpness processing applied when capturing an image"
+ msgstr "捕捉图像时所使用的锐度"
+
+ #: gst-libs/gst/tag/tags.c:173
+ msgid "capturing flash fired"
+ msgstr "捕捉闪光灯"
+
+ #: gst-libs/gst/tag/tags.c:174
+ msgid "If the flash fired while capturing an image"
+ msgstr "设定在捕捉图像时是否使用闪光灯补光"
+
+ #: gst-libs/gst/tag/tags.c:177
+ msgid "capturing flash mode"
+ msgstr "捕捉闪光模式"
+
+ #: gst-libs/gst/tag/tags.c:178
+ msgid "The selected flash mode while capturing an image"
+ msgstr "捕捉图像时使用的闪光模式"
+
+ #: gst-libs/gst/tag/tags.c:181
+ msgid "capturing metering mode"
+ msgstr "捕捉测光"
+
+ #: gst-libs/gst/tag/tags.c:182
+ msgid "The metering mode used while determining exposure for capturing an image"
+ msgstr "用于确定要捕捉图像的曝光度的测光模式"
+
+ #: gst-libs/gst/tag/tags.c:186
+ msgid "capturing source"
+ msgstr "捕捉源"
+
+ #: gst-libs/gst/tag/tags.c:187
+ msgid "The source or type of device used for the capture"
+ msgstr "指定用于捕捉的源或设备类型"
+
+ #: gst-libs/gst/tag/tags.c:190
+ msgid "image horizontal ppi"
+ msgstr "图像横向ppi"
+
+ #: gst-libs/gst/tag/tags.c:191
+ msgid "Media (image/video) intended horizontal pixel density in ppi"
+ msgstr "指定媒体(图片/视频)的横向像素密度,单位是ppi"
+
+ #: gst-libs/gst/tag/tags.c:194
+ msgid "image vertical ppi"
+ msgstr "图像纵向ppi"
+
+ #: gst-libs/gst/tag/tags.c:195
+ msgid "Media (image/video) intended vertical pixel density in ppi"
+ msgstr "指定媒体(图片/视频)的纵向像素密度,单位是ppi"
+
+ #: gst-libs/gst/tag/tags.c:198
+ msgid "ID3v2 frame"
+ msgstr "ID3v2 帧"
+
+ #: gst-libs/gst/tag/tags.c:198
+ msgid "unparsed id3v2 tag frame"
+ msgstr "未被解析的 ID3v2 标识帧"
+
+ #: gst-libs/gst/tag/tags.c:202
+ msgid "musical-key"
+ msgstr "音调"
+
+ #: gst-libs/gst/tag/tags.c:202
+ msgid "Initial key in which the sound starts"
+ msgstr "音乐开始时的音调"
+
-#: tools/gst-device-monitor.c:257
++#: tools/gst-device-monitor.c:260 tools/gst-play.c:1444
+ msgid "Print version information and exit"
+ msgstr "显示版本信息,然后退出。"
+
-#: tools/gst-play.c:308
++#: tools/gst-device-monitor.c:262
+ msgid "Don't exit after showing the initial device list, but wait for devices to added/removed."
+ msgstr "显示初始设备列表后不退出,而是等待设备被添加或移除。"
+
-#: tools/gst-play.c:347
++#: tools/gst-play.c:310
+ #, c-format
+ msgid "Volume: %.0f%%"
+ msgstr "音量:%.0f%%"
+
-#: tools/gst-play.c:368
++#: tools/gst-play.c:349
+ msgid "Buffering..."
+ msgstr "缓冲中..."
+
-#: tools/gst-play.c:398 tools/gst-play.c:444 tools/gst-play.c:881
-#: tools/gst-play.c:1345
++#: tools/gst-play.c:370
+ msgid "Clock lost, selecting a new one\n"
+ msgstr "时钟已丢失,请选择一个新的\n"
+
-#: tools/gst-play.c:611
++#: tools/gst-play.c:400 tools/gst-play.c:446 tools/gst-play.c:883
++#: tools/gst-play.c:1347
+ msgid "Reached end of play list."
+ msgstr "到达播放列表结尾。"
+
-#: tools/gst-play.c:669
++#: tools/gst-play.c:613
+ msgid "Paused"
+ msgstr "已暂停"
+
-#: tools/gst-play.c:732
++#: tools/gst-play.c:671
+ #, c-format
+ msgid "Now playing %s\n"
+ msgstr "正在播放 %s\n"
+
-#: tools/gst-play.c:977
++#: tools/gst-play.c:734
+ #, c-format
+ msgid "About to finish, preparing next title: %s"
+ msgstr "即将结束,准备下一标题:%s"
+
-#: tools/gst-play.c:981
++#: tools/gst-play.c:979
+ #, c-format
+ msgid "Playback rate: %.2f"
+ msgstr "回放速率:%.2f"
+
-#: tools/gst-play.c:1285
++#: tools/gst-play.c:983
+ #, c-format
+ msgid "Could not change playback rate to %.2f"
+ msgstr "无法将回放速率改变为%.2f"
+
-#: tools/gst-play.c:1285
++#: tools/gst-play.c:1287
+ msgid "space"
+ msgstr "空格"
+
-#: tools/gst-play.c:1286
++#: tools/gst-play.c:1287
+ msgid "pause/unpause"
+ msgstr "暂停/继续"
+
-#: tools/gst-play.c:1286
++#: tools/gst-play.c:1288
+ msgid "q or ESC"
+ msgstr "q 或 ESC"
+
-#: tools/gst-play.c:1287
++#: tools/gst-play.c:1288
+ msgid "quit"
+ msgstr "退出"
+
-#: tools/gst-play.c:1287
++#: tools/gst-play.c:1289
+ msgid "> or n"
+ msgstr "> 或 n"
+
-#: tools/gst-play.c:1288
++#: tools/gst-play.c:1289
+ msgid "play next"
+ msgstr "下一曲"
+
-#: tools/gst-play.c:1288
++#: tools/gst-play.c:1290
+ msgid "< or b"
+ msgstr "< 或 b"
+
-#: tools/gst-play.c:1289
++#: tools/gst-play.c:1290
+ msgid "play previous"
+ msgstr "上一曲"
+
-#: tools/gst-play.c:1290
++#: tools/gst-play.c:1291
+ msgid "seek forward"
+ msgstr "前进"
+
-#: tools/gst-play.c:1291
++#: tools/gst-play.c:1292
+ msgid "seek backward"
+ msgstr "后退"
+
-#: tools/gst-play.c:1292
++#: tools/gst-play.c:1293
+ msgid "volume up"
+ msgstr "增大音量"
+
-#: tools/gst-play.c:1293
++#: tools/gst-play.c:1294
+ msgid "volume down"
+ msgstr "减小音量"
+
-#: tools/gst-play.c:1294
++#: tools/gst-play.c:1295
+ msgid "increase playback rate"
+ msgstr "增大回放速率"
+
-#: tools/gst-play.c:1295
++#: tools/gst-play.c:1296
+ msgid "decrease playback rate"
+ msgstr "减小回访率"
+
-#: tools/gst-play.c:1296
++#: tools/gst-play.c:1297
+ msgid "change playback direction"
+ msgstr "改变回放方向"
+
-#: tools/gst-play.c:1297
++#: tools/gst-play.c:1298
+ msgid "enable/disable trick modes"
+ msgstr "启用/禁用特效模式"
+
-#: tools/gst-play.c:1298
++#: tools/gst-play.c:1299
+ msgid "change audio track"
+ msgstr "改变音频轨"
+
-#: tools/gst-play.c:1299
++#: tools/gst-play.c:1300
+ msgid "change video track"
+ msgstr "改变视频轨"
+
-#: tools/gst-play.c:1300
++#: tools/gst-play.c:1301
+ msgid "change subtitle track"
+ msgstr "改变字幕轨"
+
-#: tools/gst-play.c:1301
++#: tools/gst-play.c:1302
+ msgid "seek to beginning"
+ msgstr "定位至开始处"
+
-#: tools/gst-play.c:1304
++#: tools/gst-play.c:1303
+ msgid "show keyboard shortcuts"
+ msgstr "显示键盘快捷键"
+
-#: tools/gst-play.c:1437
++#: tools/gst-play.c:1306
+ msgid "Interactive mode - keyboard controls:"
+ msgstr "交互模式 - 键盘控制:"
+
-#: tools/gst-play.c:1439
++#: tools/gst-play.c:1439
+ msgid "Output status information and property notifications"
+ msgstr "输出状态信息和属性通知"
+
-#: tools/gst-play.c:1444
++#: tools/gst-play.c:1441
+ msgid "Control playback behaviour setting playbin 'flags' property"
+ msgstr "控制回放行为设置中 playbin 的“flags”属性"
+
-#: tools/gst-play.c:1446
++#: tools/gst-play.c:1446
+ msgid "Video sink to use (default is autovideosink)"
+ msgstr "要使用的视频接收组件(默认为autovideosink)"
+
-#: tools/gst-play.c:1448
++#: tools/gst-play.c:1448
+ msgid "Audio sink to use (default is autoaudiosink)"
+ msgstr "要使用的音频接收组件(默认为autoaudiosink)"
+
-#: tools/gst-play.c:1450
++#: tools/gst-play.c:1450
+ msgid "Enable gapless playback"
+ msgstr "启用无缝回放"
+
-#: tools/gst-play.c:1453
++#: tools/gst-play.c:1452
+ msgid "Shuffle playlist"
+ msgstr "随机播放列表"
+
-#: tools/gst-play.c:1455
++#: tools/gst-play.c:1455
+ msgid "Disable interactive control via the keyboard"
+ msgstr "禁用键盘交互操作"
+
-#: tools/gst-play.c:1457
++#: tools/gst-play.c:1457
+ msgid "Volume"
+ msgstr "音量"
+
-#: tools/gst-play.c:1459
++#: tools/gst-play.c:1459
+ msgid "Playlist file containing input media files"
+ msgstr "播放列表文件含有媒体文件"
+
-#: tools/gst-play.c:1461
++#: tools/gst-play.c:1461
+ msgid "Do not print any output (apart from errors)"
+ msgstr "不显示任何输出(除了错误信息)"
+
-#: tools/gst-play.c:1462
++#: tools/gst-play.c:1463
+ msgid "Use playbin3 pipeline"
+ msgstr "使用 playbin3 管道"
+
-#: tools/gst-play.c:1536
++#: tools/gst-play.c:1464
+ msgid "(default varies depending on 'USE_PLAYBIN' env variable)"
+ msgstr "(默认配置取决于“USE_PLAYBIN”环境变量)"
+
-#: tools/gst-play.c:1540
++#: tools/gst-play.c:1468
++msgid "Keep showing the last frame on EOS until quit or playlist change command (gapless is ignored)"
++msgstr "播放结束后,在 EOS 上维持最后一帧,直到用户手动退出或者切换播放列表(忽略“无缝切换”选项)"
++
++#: tools/gst-play.c:1546
+ #, c-format
+ msgid "Usage: %s FILE1|URI1 [FILE2|URI2] [FILE3|URI3] ..."
+ msgstr "用法:%s [文件1|URI1] [文件2|URI2] [文件3|URI3] ..."
+
-#: tools/gst-play.c:1580
++#: tools/gst-play.c:1550
+ msgid "You must provide at least one filename or URI to play."
+ msgstr "你必须提供至少一个文件名或URI来播放。"
+
++#: tools/gst-play.c:1590
+ msgid "Press 'k' to see a list of keyboard shortcuts.\n"
+ msgstr "按“k”键来显示键盘快捷键列表。\n"
+
+ #~ msgid "Could not create \"decodebin3\" element."
+ #~ msgstr "无法创建“decodebin3”组件。"
+
+ #~ msgid "Could not create \"urisourcebin\" element."
+ #~ msgstr "无法创建“urisourcebin”组件。"
+
+ #~ msgid "Internal data stream error."
+ #~ msgstr "内部数据流错误。"
+
+ #~ msgid "Master"
+ #~ msgstr "主音量"
+
+ #~ msgid "Bass"
+ #~ msgstr "低音(Bass)"
+
+ #~ msgid "Treble"
+ #~ msgstr "高音(Treble)"
+
+ #~ msgid "PCM"
+ #~ msgstr "波形"
+
+ #~ msgid "Synth"
+ #~ msgstr "合成器"
+
+ #~ msgid "Line-in"
+ #~ msgstr "线路输入"
+
+ #~ msgid "CD"
+ #~ msgstr "CD"
+
+ #~ msgid "Microphone"
+ #~ msgstr "话筒"
+
+ #~ msgid "PC Speaker"
+ #~ msgstr "PC 扬声器"
+
+ #~ msgid "Playback"
+ #~ msgstr "回放"
+
+ #~ msgid "Capture"
+ #~ msgstr "录音"
+
+ #~ msgid "Could not open vfs file \"%s\" for writing: %s."
+ #~ msgstr "无法打开 vfs 文件“%s”写入:%s。"
+
+ #~ msgid "No filename given"
+ #~ msgstr "没有给定文件名"
+
+ #~ msgid "Could not close vfs file \"%s\"."
+ #~ msgstr "无法关闭 vfs 文件“%s”。"
+
+ #~ msgid "Error while writing to file \"%s\"."
+ #~ msgstr "写入文件“%s”时出错。"
+
+ #~ msgid "A %s plugin is required to play this stream, but not installed."
+ #~ msgstr "播放此数据流需要 %s 插件,但它未安装。"
+
+ #~ msgid "Invalid subtitle URI \"%s\", subtitles disabled."
+ #~ msgstr "无效的字幕 URI “%s”,不使用字幕。"
+
+ #~ msgid "RTSP streams cannot be played yet."
+ #~ msgstr "RTSP 流尚不能播放。"
+
+ #~ msgid "Only a subtitle stream was detected. Either you are loading a subtitle file or some other type of text file, or the media file was not recognized."
+ #~ msgstr "仅检测到字幕流。您可能装入的是字幕文件或者其它类型文本文件,又或者是媒体文件无法识别。"
+
+ #~ msgid "You do not have a decoder installed to handle this file. You might need to install the necessary plugins."
+ #~ msgstr "您没有安装可处理此文件的解码器。您可能需要安装必要的插件。"
+
+ #~ msgid "This is not a media file"
+ #~ msgstr "此文件不是媒体文件"
+
+ #~ msgid "A subtitle stream was detected, but no video stream."
+ #~ msgstr "检测到了字幕流,但无视频流。"
+
+ #~ msgid "No file name specified."
+ #~ msgstr "未给定文件名。"
+
+ #~ msgid "Could not open file \"%s\" for reading."
+ #~ msgstr "无法打开文件“%s”读取。"
+
+ #~ msgid "Could not create \"queue2\" element."
+ #~ msgstr "无法创建“queue2”组件。"
+
+ #~ msgid "Error while sending gdp header data to \"%s:%d\"."
+ #~ msgstr "发送 gdp 头部数据至“%s:%d”时出错。"
+
+ #~ msgid "Error while sending gdp payload data to \"%s:%d\"."
+ #~ msgstr "发送 gdp 负载数据至“%s:%d”时出错。"
+
+ #~ msgid "Connection to %s:%d refused."
+ #~ msgstr "拒绝连接至 %s:%d。"
+
+ #~ msgid "Uncompressed planar YVU 4:2:0"
+ #~ msgstr "未压缩的平面 YVU 4:2:0"
+
+ #~ msgid "Uncompressed packed YUV 4:1:0"
+ #~ msgstr "未压缩的紧缩 YUV 4:1:0"
+
+ #~ msgid "Uncompressed packed YVU 4:1:0"
+ #~ msgstr "未压缩的紧缩 YVU 4:1:0"
+
+ #~ msgid "Uncompressed packed YUV 4:1:1"
+ #~ msgstr "未压缩的紧缩 YUV 4:1:1"
+
+ #~ msgid "Uncompressed packed YUV 4:4:4"
+ #~ msgstr "未压缩的紧缩 YUV 4:4:4"
+
+ #~ msgid "Uncompressed planar YUV 4:2:2"
+ #~ msgstr "未压缩的平面 YUV 4:2:2"
+
+ #~ msgid "Uncompressed planar YUV 4:1:1"
+ #~ msgstr "未压缩的平面 YUV 4:1:1"
+
+ #~ msgid "Uncompressed black and white Y-plane"
+ #~ msgstr "未压缩的黑白 Y-plane"
+
+ #~ msgid "Raw PCM audio"
+ #~ msgstr "原始 PCM 音频"
+
+ #~ msgid "Raw %d-bit floating-point audio"
+ #~ msgstr "原始 %d位浮点音频"
+
+ #~ msgid "Raw floating-point audio"
+ #~ msgstr "原始浮点音频"
+
+ #~ msgid "No device specified."
+ #~ msgstr "未指定设备。"
+
+ #~ msgid "Device \"%s\" does not exist."
+ #~ msgstr "设备“%s”不存在。"
+
+ #~ msgid "Device \"%s\" is already being used."
+ #~ msgstr "设备“%s”正被使用。"
+
+ #~ msgid "Could not open device \"%s\" for reading and writing."
+ #~ msgstr "无法打开设备“%s”读取。"
+
+ #~ msgid "Uncompressed %s YUV %s"
+ #~ msgstr "未压缩的 %s YUV %s"
+
+ #~ msgid "Both autovideosink and xvimagesink elements are missing."
+ #~ msgstr "缺少 autovideosink 和 xvimagesink 组件。"
+
+ #~ msgid "Both autoaudiosink and alsasink elements are missing."
+ #~ msgstr "缺少 autovideosink 和 alsasink 组件。"
+
+ #~ msgid "Could not create \"typefind\" element."
+ #~ msgstr "无法创建“typefind”组件。"
+
+ #~ msgid "Can't display both text subtitles and subpictures."
+ #~ msgstr "无法同时播放文本格式的子标题和子画面。"
+
+ #~ msgid "No Temp directory specified."
+ #~ msgstr "未指定临时设备。"
+
+ #~ msgid "Could not create temp file \"%s\"."
+ #~ msgstr "无法创建临时文件“%s”。"
+
+ #~ msgid "Internal data flow error."
+ #~ msgstr "内部数据流错误。"