libgstrawparse_la_SOURCES = \
gstrawparse.c \
gstunalignedaudioparse.c \
+ gstunalignedvideoparse.c \
gstaudioparse.c \
gstvideoparse.c \
+ gstrawbaseparse.c \
+ gstrawaudioparse.c \
+ gstrawvideoparse.c \
plugin.c
libgstrawparse_la_CFLAGS = \
$(GST_PLUGINS_BASE_CFLAGS) \
noinst_HEADERS = \
unalignedaudio.h \
+ unalignedvideo.h \
gstunalignedaudioparse.h \
+ gstunalignedvideoparse.h \
gstaudioparse.h \
gstrawparse.h \
- gstvideoparse.h
+ gstvideoparse.h \
+ gstrawbaseparse.h \
+ gstrawaudioparse.h \
+ gstrawvideoparse.h
--- /dev/null
+/* GStreamer
+ * Copyright (C) <2016> Carlos Rafael Giani <dv at pseudoterminal dot org>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+/**
+ * SECTION:element-rawaudioparse
+ *
+ * This element parses incoming data as raw audio samples and timestamps it.
+ * It also handles seek queries in said raw audio data, and ensures that output
+ * buffers contain an integer number of samples, even if the input buffers don't.
+ * For example, with sample format S16LE and 2 channels, an input buffer of 411
+ * bytes contains 102.75 samples. rawaudioparse will then output 102 samples
+ * (= 408 bytes) and keep the remaining 3 bytes. These will then be prepended to
+ * the next input data.
+ *
+ * The element implements the properties and sink caps configuration as specified
+ * in the #GstRawBaseParse documentation. The properties configuration can be
+ * modified by using the sample-rate, num-channels, channel-positions, format,
+ * and pcm-format properties.
+ *
+ * Currently, this parser supports raw data in a-law, mu-law, or linear PCM format.
+ *
+ * To facilitate operation with the unalignedaudioparse element, rawaudioparse
+ * supports the "audio/x-unaligned-raw" media type. This is treated identically to
+ * "audio/x-raw", except that it is used by source elements which do not guarantee
+ * that the buffers they push out are timestamped and contain an integer amount of
+ * samples (see the 411 bytes example above). By using a different media type, it
+ * is guaranteed that unalignedaudioparse is autoplugged, making sure that the
+ * autoplugged chain does not push unparsed content downstream. The source caps'
+ * media type with linear PCM data is always "audio/x-raw", even if the sink caps
+ * use "audio/x-unaligned-raw".
+ *
+ * The channel-positions property can be used to set explicit position information
+ * for each channel. If the array that is passed to this property does not match
+ * the number of channels indicated by num-channels, then said number of channels
+ * is updated to the array length. If channel-positions is NULL, then the default
+ * GStreamer positioning is used. This property is also useful for swapping left
+ * and right in a stereo signal for example.
+ *
+ * <refsect2>
+ * <title>Example pipelines</title>
+ * |[
+ * gst-launch-1.0 souphttpsrc http://my-dlna-server/track.l16 \
+ * rawaudioparse ! audioconvert ! audioresample ! autoaudiosink
+ * ]| Receive L16 data from a DLNA server, parse and timestamp it with
+ * rawaudioparse, and play it. use-sink-caps is set to true since souphttpsrc
+ * will set its source pad's caps to audio/x-unaligned-raw for the L16 stream.
+ * |[
+ * gst-launch-1.0 filesrc location=audio.raw ! rawaudioparse use-sink-caps=false \
+ * format=pcm pcm-format=s16le sample-rate=48000 num-channels=2 \
+ * audioconvert ! audioresample ! autoaudiosink
+ * ]| Read raw data from a local file and parse it as PCM data with 48000 Hz sample
+ * rate, signed 16 bit integer samples, and 2 channels. use-sink-caps is set to
+ * false to ensure the property information is used and the parser does not expect
+ * audio/x-raw or audio/x-unaligned-raw caps.
+ * </refsect2>
+ */
+
+#ifdef HAVE_CONFIG_H
+# include "config.h"
+#endif
+
+/* FIXME: GValueArray is deprecated, but there is currently no viabla alternative
+ * See https://bugzilla.gnome.org/show_bug.cgi?id=667228 */
+#define GLIB_DISABLE_DEPRECATION_WARNINGS
+
+#include <string.h>
+#include "gstrawaudioparse.h"
+#include "unalignedaudio.h"
+
+
+GST_DEBUG_CATEGORY_STATIC (raw_audio_parse_debug);
+#define GST_CAT_DEFAULT raw_audio_parse_debug
+
+
+enum
+{
+ PROP_0,
+ PROP_FORMAT,
+ PROP_PCM_FORMAT,
+ PROP_SAMPLE_RATE,
+ PROP_NUM_CHANNELS,
+ PROP_INTERLEAVED,
+ PROP_CHANNEL_POSITIONS
+};
+
+
+#define DEFAULT_FORMAT GST_RAW_AUDIO_PARSE_FORMAT_PCM
+#define DEFAULT_PCM_FORMAT GST_AUDIO_FORMAT_S16
+#define DEFAULT_SAMPLE_RATE 44100
+#define DEFAULT_NUM_CHANNELS 2
+#define DEFAULT_INTERLEAVED TRUE
+
+
+#define GST_RAW_AUDIO_PARSE_CAPS \
+ GST_AUDIO_CAPS_MAKE(GST_AUDIO_FORMATS_ALL) \
+ ", layout = (string) { interleaved, non-interleaved }; " \
+ "audio/x-alaw, rate = (int) [ 1, MAX ], channels = (int) [ 1, MAX ]; " \
+ "audio/x-mulaw, rate = (int) [ 1, MAX ], channels = (int) [ 1, MAX ]; "
+
+
+static GstStaticPadTemplate static_sink_template =
+ GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS (GST_UNALIGNED_RAW_AUDIO_CAPS "; " GST_RAW_AUDIO_PARSE_CAPS)
+ );
+
+
+static GstStaticPadTemplate static_src_template =
+GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS (GST_RAW_AUDIO_PARSE_CAPS)
+ );
+
+
+#define gst_raw_audio_parse_parent_class parent_class
+G_DEFINE_TYPE (GstRawAudioParse, gst_raw_audio_parse, GST_TYPE_RAW_BASE_PARSE);
+
+
+static void gst_raw_audio_parse_set_property (GObject * object, guint prop_id,
+ GValue const *value, GParamSpec * pspec);
+static void gst_raw_audio_parse_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec);
+
+static gboolean gst_raw_audio_parse_stop (GstBaseParse * parse);
+
+static gboolean gst_raw_audio_parse_set_current_config (GstRawBaseParse *
+ raw_base_parse, GstRawBaseParseConfig config);
+static GstRawBaseParseConfig
+gst_raw_audio_parse_get_current_config (GstRawBaseParse * raw_base_parse);
+static gboolean gst_raw_audio_parse_set_config_from_caps (GstRawBaseParse *
+ raw_base_parse, GstRawBaseParseConfig config, GstCaps * caps);
+static gboolean gst_raw_audio_parse_get_caps_from_config (GstRawBaseParse *
+ raw_base_parse, GstRawBaseParseConfig config, GstCaps ** caps);
+static gsize gst_raw_audio_parse_get_config_frame_size (GstRawBaseParse *
+ raw_base_parse, GstRawBaseParseConfig config);
+static gboolean gst_raw_audio_parse_is_config_ready (GstRawBaseParse *
+ raw_base_parse, GstRawBaseParseConfig config);
+static gboolean gst_raw_audio_parse_process (GstRawBaseParse * raw_base_parse,
+ GstRawBaseParseConfig config, GstBuffer * in_data, gsize total_num_in_bytes,
+ gsize num_valid_in_bytes, GstBuffer ** processed_data);
+static gboolean gst_raw_audio_parse_is_unit_format_supported (GstRawBaseParse *
+ raw_base_parse, GstFormat format);
+static void gst_raw_audio_parse_get_units_per_second (GstRawBaseParse *
+ raw_base_parse, GstFormat format, GstRawBaseParseConfig config,
+ gsize * units_per_sec_n, gsize * units_per_sec_d);
+
+static gboolean gst_raw_audio_parse_is_using_sink_caps (GstRawAudioParse *
+ raw_audio_parse);
+static GstRawAudioParseConfig
+ * gst_raw_audio_parse_get_config_ptr (GstRawAudioParse * raw_audio_parse,
+ GstRawBaseParseConfig config);
+
+static void gst_raw_audio_parse_init_config (GstRawAudioParseConfig * config);
+static gboolean gst_raw_audio_parse_set_config_channels (GstRawAudioParseConfig
+ * config, guint num_channels, guint64 channel_mask, gboolean set_positions);
+static gboolean
+gst_raw_audio_parse_update_channel_reordering_flag (GstRawAudioParseConfig *
+ config);
+static void gst_raw_audio_parse_update_config_bpf (GstRawAudioParseConfig *
+ config);
+static gboolean gst_raw_audio_parse_caps_to_config (GstRawAudioParse *
+ raw_audio_parse, GstCaps * caps, GstRawAudioParseConfig * config);
+static gboolean gst_raw_audio_parse_config_to_caps (GstRawAudioParse *
+ raw_audio_parse, GstCaps ** caps, GstRawAudioParseConfig * config);
+
+
+
+static void
+gst_raw_audio_parse_class_init (GstRawAudioParseClass * klass)
+{
+ GObjectClass *object_class;
+ GstElementClass *element_class;
+ GstBaseParseClass *baseparse_class;
+ GstRawBaseParseClass *rawbaseparse_class;
+
+ GST_DEBUG_CATEGORY_INIT (raw_audio_parse_debug, "rawaudioparse", 0,
+ "rawaudioparse element");
+
+ object_class = G_OBJECT_CLASS (klass);
+ element_class = GST_ELEMENT_CLASS (klass);
+ baseparse_class = GST_BASE_PARSE_CLASS (klass);
+ rawbaseparse_class = GST_RAW_BASE_PARSE_CLASS (klass);
+
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&static_sink_template));
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&static_src_template));
+
+ object_class->set_property =
+ GST_DEBUG_FUNCPTR (gst_raw_audio_parse_set_property);
+ object_class->get_property =
+ GST_DEBUG_FUNCPTR (gst_raw_audio_parse_get_property);
+
+ baseparse_class->stop = GST_DEBUG_FUNCPTR (gst_raw_audio_parse_stop);
+
+ rawbaseparse_class->set_current_config =
+ GST_DEBUG_FUNCPTR (gst_raw_audio_parse_set_current_config);
+ rawbaseparse_class->get_current_config =
+ GST_DEBUG_FUNCPTR (gst_raw_audio_parse_get_current_config);
+ rawbaseparse_class->set_config_from_caps =
+ GST_DEBUG_FUNCPTR (gst_raw_audio_parse_set_config_from_caps);
+ rawbaseparse_class->get_caps_from_config =
+ GST_DEBUG_FUNCPTR (gst_raw_audio_parse_get_caps_from_config);
+ rawbaseparse_class->get_config_frame_size =
+ GST_DEBUG_FUNCPTR (gst_raw_audio_parse_get_config_frame_size);
+ rawbaseparse_class->is_config_ready =
+ GST_DEBUG_FUNCPTR (gst_raw_audio_parse_is_config_ready);
+ rawbaseparse_class->process = GST_DEBUG_FUNCPTR (gst_raw_audio_parse_process);
+ rawbaseparse_class->is_unit_format_supported =
+ GST_DEBUG_FUNCPTR (gst_raw_audio_parse_is_unit_format_supported);
+ rawbaseparse_class->get_units_per_second =
+ GST_DEBUG_FUNCPTR (gst_raw_audio_parse_get_units_per_second);
+
+ g_object_class_install_property (object_class,
+ PROP_FORMAT,
+ g_param_spec_enum ("format",
+ "Format",
+ "Format of the raw audio stream",
+ gst_raw_audio_parse_format_get_type (),
+ GST_RAW_AUDIO_PARSE_FORMAT_PCM,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)
+ );
+ g_object_class_install_property (object_class,
+ PROP_PCM_FORMAT,
+ g_param_spec_enum ("pcm-format",
+ "PCM format",
+ "Format of audio samples in PCM stream (ignored if format property is not set to pcm)",
+ GST_TYPE_AUDIO_FORMAT,
+ GST_RAW_AUDIO_PARSE_FORMAT_PCM,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)
+ );
+ g_object_class_install_property (object_class,
+ PROP_SAMPLE_RATE,
+ g_param_spec_int ("sample-rate",
+ "Sample rate",
+ "Rate of audio samples in raw stream",
+ 1, INT_MAX,
+ DEFAULT_SAMPLE_RATE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)
+ );
+ g_object_class_install_property (object_class,
+ PROP_NUM_CHANNELS,
+ g_param_spec_int ("num-channels",
+ "Number of channels",
+ "Number of channels in raw stream",
+ 1, INT_MAX,
+ DEFAULT_NUM_CHANNELS, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)
+ );
+ g_object_class_install_property (object_class,
+ PROP_INTERLEAVED,
+ g_param_spec_boolean ("interleaved",
+ "Interleaved layout",
+ "True if audio has interleaved layout",
+ DEFAULT_INTERLEAVED, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)
+ );
+ g_object_class_install_property (object_class,
+ PROP_CHANNEL_POSITIONS,
+ g_param_spec_value_array ("channel-positions",
+ "Channel positions",
+ "Channel positions used on the output",
+ g_param_spec_enum ("channel-position",
+ "Channel position",
+ "Channel position of the n-th input",
+ GST_TYPE_AUDIO_CHANNEL_POSITION,
+ GST_AUDIO_CHANNEL_POSITION_NONE,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS),
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)
+ );
+
+ gst_element_class_set_static_metadata (element_class,
+ "rawaudioparse",
+ "Codec/Parser/Audio",
+ "Converts unformatted data streams into timestamped raw audio frames",
+ "Carlos Rafael Giani <dv@pseudoterminal.org>");
+}
+
+
+static void
+gst_raw_audio_parse_init (GstRawAudioParse * raw_audio_parse)
+{
+ /* Setup configs and select which one shall be the current one from the start. */
+ gst_raw_audio_parse_init_config (&(raw_audio_parse->properties_config));
+ gst_raw_audio_parse_init_config (&(raw_audio_parse->sink_caps_config));
+ /* As required by GstRawBaseParse, ensure that the current configuration
+ * is initially set to be the properties config */
+ raw_audio_parse->current_config = &(raw_audio_parse->properties_config);
+
+ /* Properties config must be valid from the start, so set its ready value
+ * to TRUE, and make sure its bpf value is valid. */
+ raw_audio_parse->properties_config.ready = TRUE;
+ gst_raw_audio_parse_update_config_bpf (&(raw_audio_parse->properties_config));
+}
+
+
+static void
+gst_raw_audio_parse_set_property (GObject * object, guint prop_id,
+ GValue const *value, GParamSpec * pspec)
+{
+ GstBaseParse *base_parse = GST_BASE_PARSE (object);
+ GstRawBaseParse *raw_base_parse = GST_RAW_BASE_PARSE (object);
+ GstRawAudioParse *raw_audio_parse = GST_RAW_AUDIO_PARSE (object);
+
+ /* All properties are handled similarly:
+ * - if the new value is the same as the current value, nothing is done
+ * - the parser lock is held while the new value is set
+ * - if the properties config is the current config, the source caps are
+ * invalidated to ensure that the code in handle_frame pushes a new CAPS
+ * event out
+ * - properties that affect the bpf value call the function to update
+ * the bpf and also call gst_base_parse_set_min_frame_size() to ensure
+ * that the minimum frame size can hold 1 frame (= one sample for each
+ * channel)
+ */
+
+ switch (prop_id) {
+ case PROP_FORMAT:
+ {
+ GstRawAudioParseFormat new_format = g_value_get_enum (value);
+
+ GST_RAW_BASE_PARSE_CONFIG_MUTEX_LOCK (object);
+
+ if (new_format != raw_audio_parse->properties_config.format) {
+ raw_audio_parse->properties_config.format = new_format;
+ gst_raw_audio_parse_update_config_bpf (&
+ (raw_audio_parse->properties_config));
+
+ if (!gst_raw_audio_parse_is_using_sink_caps (raw_audio_parse)) {
+ gst_raw_base_parse_invalidate_src_caps (raw_base_parse);
+ gst_base_parse_set_min_frame_size (base_parse,
+ raw_audio_parse->properties_config.bpf);
+ }
+ }
+
+ GST_RAW_BASE_PARSE_CONFIG_MUTEX_UNLOCK (object);
+ break;
+ }
+
+ case PROP_PCM_FORMAT:
+ {
+ GstAudioFormat new_pcm_format = g_value_get_enum (value);
+
+ GST_RAW_BASE_PARSE_CONFIG_MUTEX_LOCK (object);
+
+ if (new_pcm_format != raw_audio_parse->properties_config.pcm_format) {
+ raw_audio_parse->properties_config.pcm_format = new_pcm_format;
+ gst_raw_audio_parse_update_config_bpf (&
+ (raw_audio_parse->properties_config));
+
+ if (!gst_raw_audio_parse_is_using_sink_caps (raw_audio_parse)) {
+ gst_raw_base_parse_invalidate_src_caps (raw_base_parse);
+ gst_base_parse_set_min_frame_size (base_parse,
+ raw_audio_parse->properties_config.bpf);
+ }
+ }
+
+ GST_RAW_BASE_PARSE_CONFIG_MUTEX_UNLOCK (object);
+ break;
+ }
+
+ case PROP_SAMPLE_RATE:
+ {
+ guint new_sample_rate = g_value_get_int (value);
+
+ GST_RAW_BASE_PARSE_CONFIG_MUTEX_LOCK (object);
+
+ if (new_sample_rate != raw_audio_parse->properties_config.sample_rate) {
+ raw_audio_parse->properties_config.sample_rate = new_sample_rate;
+
+ if (!gst_raw_audio_parse_is_using_sink_caps (raw_audio_parse))
+ gst_raw_base_parse_invalidate_src_caps (raw_base_parse);
+ }
+
+ GST_RAW_BASE_PARSE_CONFIG_MUTEX_UNLOCK (object);
+ break;
+ }
+
+ case PROP_NUM_CHANNELS:
+ {
+ guint new_num_channels = g_value_get_int (value);
+
+ GST_RAW_BASE_PARSE_CONFIG_MUTEX_LOCK (object);
+
+ if (new_num_channels != raw_audio_parse->properties_config.num_channels) {
+ gst_raw_audio_parse_set_config_channels (&
+ (raw_audio_parse->properties_config), new_num_channels, 0, TRUE);
+
+ raw_audio_parse->properties_config.num_channels = new_num_channels;
+ gst_raw_audio_parse_update_config_bpf (&
+ (raw_audio_parse->properties_config));
+
+ if (!gst_raw_audio_parse_is_using_sink_caps (raw_audio_parse)) {
+ gst_raw_base_parse_invalidate_src_caps (raw_base_parse);
+ gst_base_parse_set_min_frame_size (base_parse,
+ raw_audio_parse->properties_config.bpf);
+ }
+ }
+
+ GST_RAW_BASE_PARSE_CONFIG_MUTEX_UNLOCK (object);
+ break;
+ }
+
+ case PROP_INTERLEAVED:
+ {
+ gboolean new_interleaved = g_value_get_boolean (value);
+
+ GST_RAW_BASE_PARSE_CONFIG_MUTEX_LOCK (object);
+
+ if (new_interleaved != raw_audio_parse->properties_config.interleaved) {
+ raw_audio_parse->properties_config.interleaved = new_interleaved;
+
+ if (!gst_raw_audio_parse_is_using_sink_caps (raw_audio_parse))
+ gst_raw_base_parse_invalidate_src_caps (raw_base_parse);
+ }
+
+ GST_RAW_BASE_PARSE_CONFIG_MUTEX_UNLOCK (object);
+ break;
+ }
+
+ case PROP_CHANNEL_POSITIONS:
+ {
+ GValueArray *valarray = g_value_get_boxed (value);
+ GstRawAudioParseConfig *config = &(raw_audio_parse->properties_config);
+
+ /* Sanity check - reject empty arrays */
+ if ((valarray != NULL) && (valarray->n_values == 0)) {
+ GST_ELEMENT_ERROR (raw_audio_parse, LIBRARY, SETTINGS,
+ ("channel position property holds an empty array"), (NULL));
+ break;
+ }
+
+ GST_RAW_BASE_PARSE_CONFIG_MUTEX_LOCK (object);
+
+ if ((valarray == NULL) && (config->num_channels > 0)) {
+ /* NULL value given, and number of channels is nonzero.
+ * Use the default GStreamer positioning. Call
+ * set_config_channels with the set_positions parameter
+ * set to TRUE to ensure the position values are filled. */
+ gst_raw_audio_parse_set_config_channels (&
+ (raw_audio_parse->properties_config), config->num_channels, 0,
+ TRUE);
+ } else {
+ /* Non-NULL value given. Make sure the channel_positions
+ * array in the properties config has enough room, and that
+ * the num_channels value equals the array length. Then copy
+ * the values from the valarray to channel_positions, and
+ * produce a copy of that array in case its channel positions
+ * are not in a valid GStreamer order (to be able to apply
+ * channel reordering later).
+ */
+
+ guint i;
+
+ if (valarray->n_values != config->num_channels) {
+ /* Call with set_positions == FALSE to ensure that
+ * the array is properly allocated but not filled
+ * (it is filled below) */
+ gst_raw_audio_parse_set_config_channels (config, valarray->n_values,
+ 0, FALSE);
+ }
+
+ for (i = 0; i < config->num_channels; ++i) {
+ GValue *val = g_value_array_get_nth (valarray, i);
+ config->channel_positions[i] = g_value_get_enum (val);
+ }
+
+ gst_raw_audio_parse_update_channel_reordering_flag (config);
+ }
+
+ gst_raw_audio_parse_update_config_bpf (&
+ (raw_audio_parse->properties_config));
+
+ if (!gst_raw_audio_parse_is_using_sink_caps (raw_audio_parse)) {
+ gst_raw_base_parse_invalidate_src_caps (raw_base_parse);
+ gst_base_parse_set_min_frame_size (base_parse,
+ raw_audio_parse->properties_config.bpf);
+ }
+
+ GST_RAW_BASE_PARSE_CONFIG_MUTEX_UNLOCK (object);
+ break;
+ }
+
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+
+static void
+gst_raw_audio_parse_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec)
+{
+ GstRawAudioParse *raw_audio_parse = GST_RAW_AUDIO_PARSE (object);
+
+ switch (prop_id) {
+ case PROP_FORMAT:
+ GST_RAW_BASE_PARSE_CONFIG_MUTEX_LOCK (object);
+ g_value_set_enum (value, raw_audio_parse->properties_config.format);
+ GST_RAW_BASE_PARSE_CONFIG_MUTEX_UNLOCK (object);
+ break;
+
+ case PROP_PCM_FORMAT:
+ GST_RAW_BASE_PARSE_CONFIG_MUTEX_LOCK (object);
+ g_value_set_enum (value, raw_audio_parse->properties_config.pcm_format);
+ GST_RAW_BASE_PARSE_CONFIG_MUTEX_UNLOCK (object);
+ break;
+
+ case PROP_SAMPLE_RATE:
+ GST_RAW_BASE_PARSE_CONFIG_MUTEX_LOCK (object);
+ g_value_set_int (value, raw_audio_parse->properties_config.sample_rate);
+ GST_RAW_BASE_PARSE_CONFIG_MUTEX_UNLOCK (object);
+ break;
+
+ case PROP_NUM_CHANNELS:
+ GST_RAW_BASE_PARSE_CONFIG_MUTEX_LOCK (object);
+ g_value_set_int (value, raw_audio_parse->properties_config.num_channels);
+ GST_RAW_BASE_PARSE_CONFIG_MUTEX_UNLOCK (object);
+ break;
+
+ case PROP_INTERLEAVED:
+ GST_RAW_BASE_PARSE_CONFIG_MUTEX_LOCK (object);
+ g_value_set_boolean (value,
+ raw_audio_parse->properties_config.interleaved);
+ GST_RAW_BASE_PARSE_CONFIG_MUTEX_UNLOCK (object);
+ break;
+
+ case PROP_CHANNEL_POSITIONS:
+ {
+ GstRawAudioParseConfig *config;
+ GValueArray *valarray;
+
+ GST_RAW_BASE_PARSE_CONFIG_MUTEX_LOCK (object);
+
+ valarray = NULL;
+ config = &(raw_audio_parse->properties_config);
+
+ /* Copy channel positions into the valuearray */
+ if (config->num_channels > 0) {
+ guint i;
+ GValue val = G_VALUE_INIT;
+ g_assert (config->channel_positions);
+
+ g_value_init (&val, GST_TYPE_AUDIO_CHANNEL_POSITION);
+ valarray = g_value_array_new (config->num_channels);
+
+ for (i = 0; i < config->num_channels; ++i) {
+ g_value_set_enum (&val, config->channel_positions[i]);
+ g_value_array_insert (valarray, i, &val);
+ }
+
+ g_value_unset (&val);
+ }
+
+ GST_RAW_BASE_PARSE_CONFIG_MUTEX_UNLOCK (object);
+
+ /* Pass on ownership to the value array,
+ * since we don't need it anymore */
+ g_value_take_boxed (value, valarray);
+
+ break;
+ }
+
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+
+static gboolean
+gst_raw_audio_parse_stop (GstBaseParse * parse)
+{
+ GstRawAudioParse *raw_audio_parse = GST_RAW_AUDIO_PARSE (parse);
+
+ /* Sink caps config is not ready until caps come in.
+ * We are stopping processing, the element is being reset,
+ * so the config has to be un-readied.
+ * (Since the properties config is not depending on caps,
+ * its ready status is always TRUE.) */
+ raw_audio_parse->sink_caps_config.ready = FALSE;
+
+ return GST_BASE_PARSE_CLASS (parent_class)->stop (parse);
+}
+
+
+static gboolean
+gst_raw_audio_parse_set_current_config (GstRawBaseParse * raw_base_parse,
+ GstRawBaseParseConfig config)
+{
+ GstRawAudioParse *raw_audio_parse = GST_RAW_AUDIO_PARSE (raw_base_parse);
+
+ switch (config) {
+ case GST_RAW_BASE_PARSE_CONFIG_PROPERTIES:
+ raw_audio_parse->current_config = &(raw_audio_parse->properties_config);
+ break;
+
+ case GST_RAW_BASE_PARSE_CONFIG_SINKCAPS:
+ raw_audio_parse->current_config = &(raw_audio_parse->sink_caps_config);
+ break;
+
+ default:
+ g_assert_not_reached ();
+ }
+
+ return TRUE;
+}
+
+
+static GstRawBaseParseConfig
+gst_raw_audio_parse_get_current_config (GstRawBaseParse * raw_base_parse)
+{
+ GstRawAudioParse *raw_audio_parse = GST_RAW_AUDIO_PARSE (raw_base_parse);
+ return gst_raw_audio_parse_is_using_sink_caps (raw_audio_parse) ?
+ GST_RAW_BASE_PARSE_CONFIG_SINKCAPS : GST_RAW_BASE_PARSE_CONFIG_PROPERTIES;
+}
+
+
+static gboolean
+gst_raw_audio_parse_set_config_from_caps (GstRawBaseParse * raw_base_parse,
+ GstRawBaseParseConfig config, GstCaps * caps)
+{
+ GstRawAudioParse *raw_audio_parse = GST_RAW_AUDIO_PARSE (raw_base_parse);
+ return gst_raw_audio_parse_caps_to_config (raw_audio_parse, caps,
+ gst_raw_audio_parse_get_config_ptr (raw_audio_parse, config));
+}
+
+
+static gboolean
+gst_raw_audio_parse_get_caps_from_config (GstRawBaseParse * raw_base_parse,
+ GstRawBaseParseConfig config, GstCaps ** caps)
+{
+ GstRawAudioParse *raw_audio_parse = GST_RAW_AUDIO_PARSE (raw_base_parse);
+ return gst_raw_audio_parse_config_to_caps (raw_audio_parse, caps,
+ gst_raw_audio_parse_get_config_ptr (raw_audio_parse, config));
+}
+
+
+static gsize
+gst_raw_audio_parse_get_config_frame_size (GstRawBaseParse * raw_base_parse,
+ GstRawBaseParseConfig config)
+{
+ GstRawAudioParse *raw_audio_parse = GST_RAW_AUDIO_PARSE (raw_base_parse);
+ return gst_raw_audio_parse_get_config_ptr (raw_audio_parse, config)->bpf;
+}
+
+
+static gboolean
+gst_raw_audio_parse_is_config_ready (GstRawBaseParse * raw_base_parse,
+ GstRawBaseParseConfig config)
+{
+ GstRawAudioParse *raw_audio_parse = GST_RAW_AUDIO_PARSE (raw_base_parse);
+ return gst_raw_audio_parse_get_config_ptr (raw_audio_parse, config)->ready;
+}
+
+
+static gboolean
+gst_raw_audio_parse_process (GstRawBaseParse * raw_base_parse,
+ GstRawBaseParseConfig config, GstBuffer * in_data, gsize total_num_in_bytes,
+ gsize num_valid_in_bytes, GstBuffer ** processed_data)
+{
+ GstRawAudioParse *raw_audio_parse = GST_RAW_AUDIO_PARSE (raw_base_parse);
+ GstRawAudioParseConfig *config_ptr =
+ gst_raw_audio_parse_get_config_ptr (raw_audio_parse, config);
+
+ if ((config_ptr->format == GST_RAW_AUDIO_PARSE_FORMAT_PCM)
+ && config_ptr->needs_channel_reordering) {
+ /* Need to reorder samples, since they are in an invalid
+ * channel order. */
+
+ GstBuffer *outbuf;
+
+ GST_LOG_OBJECT (raw_audio_parse,
+ "using %" G_GSIZE_FORMAT " bytes out of the %" G_GSIZE_FORMAT
+ " bytes from the input buffer with reordering", num_valid_in_bytes,
+ total_num_in_bytes);
+
+ outbuf =
+ gst_buffer_copy_region (in_data,
+ GST_BUFFER_COPY_FLAGS | GST_BUFFER_COPY_TIMESTAMPS |
+ GST_BUFFER_COPY_META | GST_BUFFER_COPY_MEMORY, 0, num_valid_in_bytes);
+
+ gst_audio_buffer_reorder_channels (outbuf,
+ config_ptr->pcm_format,
+ config_ptr->num_channels,
+ config_ptr->channel_positions, config_ptr->reordered_channel_positions);
+
+ *processed_data = outbuf;
+ } else {
+ /* Nothing needs to be done with the sample data.
+ * Instruct the baseparse class to just take out_size bytes
+ * from the input buffer */
+
+ GST_LOG_OBJECT (raw_audio_parse,
+ "using %" G_GSIZE_FORMAT " bytes out of the %" G_GSIZE_FORMAT
+ " bytes from the input buffer without reordering", num_valid_in_bytes,
+ total_num_in_bytes);
+
+ *processed_data = NULL;
+ }
+
+ return TRUE;
+}
+
+
+static gboolean
+gst_raw_audio_parse_is_unit_format_supported (G_GNUC_UNUSED GstRawBaseParse *
+ raw_base_parse, GstFormat format)
+{
+ switch (format) {
+ case GST_FORMAT_BYTES:
+ case GST_FORMAT_DEFAULT:
+ return TRUE;
+ default:
+ return FALSE;
+ }
+}
+
+
+static void
+gst_raw_audio_parse_get_units_per_second (GstRawBaseParse * raw_base_parse,
+ GstFormat format, GstRawBaseParseConfig config, gsize * units_per_sec_n,
+ gsize * units_per_sec_d)
+{
+ GstRawAudioParse *raw_audio_parse = GST_RAW_AUDIO_PARSE (raw_base_parse);
+ GstRawAudioParseConfig *config_ptr =
+ gst_raw_audio_parse_get_config_ptr (raw_audio_parse, config);
+
+ switch (format) {
+ case GST_FORMAT_BYTES:
+ *units_per_sec_n = config_ptr->sample_rate * config_ptr->bpf;
+ *units_per_sec_d = 1;
+ break;
+
+ case GST_FORMAT_DEFAULT:
+ *units_per_sec_n = config_ptr->sample_rate;
+ *units_per_sec_d = 1;
+ break;
+
+ default:
+ g_assert_not_reached ();
+ }
+}
+
+
+static gboolean
+gst_raw_audio_parse_is_using_sink_caps (GstRawAudioParse * raw_audio_parse)
+{
+ return raw_audio_parse->current_config ==
+ &(raw_audio_parse->sink_caps_config);
+}
+
+
+static GstRawAudioParseConfig *
+gst_raw_audio_parse_get_config_ptr (GstRawAudioParse * raw_audio_parse,
+ GstRawBaseParseConfig config)
+{
+ g_assert (raw_audio_parse->current_config != NULL);
+
+ switch (config) {
+ case GST_RAW_BASE_PARSE_CONFIG_PROPERTIES:
+ return &(raw_audio_parse->properties_config);
+
+ case GST_RAW_BASE_PARSE_CONFIG_SINKCAPS:
+ return &(raw_audio_parse->sink_caps_config);
+
+ default:
+ g_assert (raw_audio_parse->current_config != NULL);
+ return raw_audio_parse->current_config;
+ }
+}
+
+
+static void
+gst_raw_audio_parse_init_config (GstRawAudioParseConfig * config)
+{
+ config->ready = FALSE;
+ config->format = DEFAULT_FORMAT;
+ config->pcm_format = DEFAULT_PCM_FORMAT;
+ config->bpf = 0;
+ config->sample_rate = DEFAULT_SAMPLE_RATE;
+ config->num_channels = DEFAULT_NUM_CHANNELS;
+ config->interleaved = DEFAULT_INTERLEAVED;
+ config->needs_channel_reordering = FALSE;
+
+ gst_raw_audio_parse_set_config_channels (config, config->num_channels, 0,
+ TRUE);
+}
+
+
+static gboolean
+gst_raw_audio_parse_set_config_channels (GstRawAudioParseConfig * config,
+ guint num_channels, guint64 channel_mask, gboolean set_positions)
+{
+ g_assert (num_channels > 0);
+
+ config->num_channels = num_channels;
+ /* Setting this to FALSE, since initially, after setting the channels,
+ * the default GStreamer channel ordering is used. */
+ config->needs_channel_reordering = FALSE;
+
+ /* Set the channel positions based on the given channel mask if set_positions
+ * is set to TRUE. A channel mask of 0 signifies that a fallback mask should be
+ * used for the given number of channels. */
+ if (set_positions) {
+ if (channel_mask == 0)
+ channel_mask = gst_audio_channel_get_fallback_mask (config->num_channels);
+
+ return gst_audio_channel_positions_from_mask (config->num_channels,
+ channel_mask, config->channel_positions);
+ } else {
+ return TRUE;
+ }
+}
+
+
+static gboolean
+gst_raw_audio_parse_update_channel_reordering_flag (GstRawAudioParseConfig *
+ config)
+{
+ g_assert (config->num_channels > 0);
+
+ /* If the channel_positions array contains channel positions which are in an
+ * order that conforms to the valid GStreamer order, ensure that channel
+ * reordering is disabled.
+ * Otherwise, if the order of the positions in the channel_positions array
+ * does not conform to the GStreamer order, ensure it is enabled.
+ */
+
+ if (gst_audio_check_valid_channel_positions (config->channel_positions,
+ config->num_channels, TRUE)) {
+
+ config->needs_channel_reordering = FALSE;
+
+ return TRUE;
+ } else {
+ config->needs_channel_reordering = TRUE;
+ memcpy (config->reordered_channel_positions, config->channel_positions,
+ sizeof (GstAudioChannelPosition) * config->num_channels);
+ return
+ gst_audio_channel_positions_to_valid_order
+ (config->reordered_channel_positions, config->num_channels);
+ }
+}
+
+
+static void
+gst_raw_audio_parse_update_config_bpf (GstRawAudioParseConfig * config)
+{
+ switch (config->format) {
+ case GST_RAW_AUDIO_PARSE_FORMAT_PCM:
+ {
+ GstAudioFormatInfo const *fmt_info =
+ gst_audio_format_get_info (config->pcm_format);
+ g_assert (fmt_info != NULL);
+
+ config->bpf =
+ GST_AUDIO_FORMAT_INFO_WIDTH (fmt_info) * config->num_channels / 8;
+
+ break;
+ }
+
+ case GST_RAW_AUDIO_PARSE_FORMAT_ALAW:
+ case GST_RAW_AUDIO_PARSE_FORMAT_MULAW:
+ /* A-law and mu-law both use 1 byte per sample */
+ config->bpf = 1 * config->num_channels;
+ break;
+
+ default:
+ g_assert_not_reached ();
+ }
+}
+
+
+static gboolean
+gst_raw_audio_parse_caps_to_config (GstRawAudioParse * raw_audio_parse,
+ GstCaps * caps, GstRawAudioParseConfig * config)
+{
+ gboolean ret = FALSE;
+ GstStructure *structure;
+
+ /* Caps might get copied, and the copy needs to be unref'd.
+ * Also, the caller retains ownership over the original caps.
+ * So, to make this mechanism also work with cases where the
+ * caps are *not* copied, ref the original caps here first. */
+ gst_caps_ref (caps);
+
+ structure = gst_caps_get_structure (caps, 0);
+
+ /* For unaligned raw data, the output caps stay the same,
+ * except that audio/x-unaligned-raw becomes audio/x-raw,
+ * since the parser aligns the sample data */
+ if (gst_structure_has_name (structure, "audio/x-unaligned-raw")) {
+ /* Copy the caps to be able to modify them */
+ GstCaps *new_caps = gst_caps_copy (caps);
+ gst_caps_unref (caps);
+ caps = new_caps;
+
+ /* Change the media type to audio/x-raw , otherwise
+ * gst_audio_info_from_caps() won't work */
+ structure = gst_caps_get_structure (caps, 0);
+ gst_structure_set_name (structure, "audio/x-raw");
+ }
+
+ if (gst_structure_has_name (structure, "audio/x-raw")) {
+ guint num_channels;
+ GstAudioInfo info;
+ if (!gst_audio_info_from_caps (&info, caps)) {
+ GST_ERROR_OBJECT (raw_audio_parse,
+ "failed to parse caps %" GST_PTR_FORMAT, (gpointer) caps);
+ goto done;
+ }
+
+ num_channels = GST_AUDIO_INFO_CHANNELS (&info);
+
+ config->format = GST_RAW_AUDIO_PARSE_FORMAT_PCM;
+ config->pcm_format = GST_AUDIO_INFO_FORMAT (&info);
+ config->bpf = GST_AUDIO_INFO_BPF (&info);
+ config->sample_rate = GST_AUDIO_INFO_RATE (&info);
+ config->interleaved =
+ (GST_AUDIO_INFO_LAYOUT (&info) == GST_AUDIO_LAYOUT_INTERLEAVED);
+
+ gst_raw_audio_parse_set_config_channels (config, num_channels, 0, FALSE);
+ memcpy (config->channel_positions, &(GST_AUDIO_INFO_POSITION (&info, 0)),
+ sizeof (GstAudioChannelPosition) * num_channels);
+ } else if (gst_structure_has_name (structure, "audio/x-alaw")
+ || gst_structure_has_name (structure, "audio/x-mulaw")) {
+ gint i;
+ guint64 channel_mask;
+ guint num_channels;
+
+ config->format =
+ gst_structure_has_name (structure,
+ "audio/x-alaw") ? GST_RAW_AUDIO_PARSE_FORMAT_ALAW :
+ GST_RAW_AUDIO_PARSE_FORMAT_MULAW;
+
+ if (!gst_structure_get_int (structure, "rate", &i)) {
+ GST_ERROR_OBJECT (raw_audio_parse,
+ "missing rate value in caps %" GST_PTR_FORMAT, (gpointer) caps);
+ goto done;
+ }
+ config->sample_rate = i;
+
+ if (!gst_structure_get_int (structure, "channels", &i)) {
+ GST_ERROR_OBJECT (raw_audio_parse,
+ "missing channels value in caps %" GST_PTR_FORMAT, (gpointer) caps);
+ goto done;
+ }
+ num_channels = i;
+
+ if (!gst_structure_get (structure, "channel-mask", GST_TYPE_BITMASK,
+ &channel_mask, NULL)) {
+ channel_mask = gst_audio_channel_get_fallback_mask (num_channels);
+ GST_DEBUG_OBJECT (raw_audio_parse,
+ "input caps have no channel mask - using fallback mask %#lx for %u channels",
+ channel_mask, num_channels);
+ }
+
+ if (!gst_raw_audio_parse_set_config_channels (config, num_channels,
+ channel_mask, TRUE)) {
+ GST_ERROR_OBJECT (raw_audio_parse,
+ "could not use channel mask %#lx for channel positions",
+ channel_mask);
+ goto done;
+ }
+
+ /* A-law and mu-law both use 1 byte per sample */
+ config->bpf = 1 * num_channels;
+ } else {
+ GST_ERROR_OBJECT (raw_audio_parse,
+ "caps %" GST_PTR_FORMAT " have an unsupported media type",
+ (gpointer) caps);
+ goto done;
+ }
+
+ ret = TRUE;
+
+done:
+ gst_caps_unref (caps);
+ if (ret)
+ config->ready = TRUE;
+ return ret;
+}
+
+
+static gboolean
+gst_raw_audio_parse_config_to_caps (GstRawAudioParse * raw_audio_parse,
+ GstCaps ** caps, GstRawAudioParseConfig * config)
+{
+ gboolean ret = TRUE;
+ GstAudioChannelPosition *channel_positions;
+
+ g_assert (caps != NULL);
+
+ if (config->bpf == 0) {
+ GST_ERROR_OBJECT (raw_audio_parse,
+ "cannot convert config to caps - config not filled with valid values");
+ *caps = NULL;
+ return FALSE;
+ }
+
+ channel_positions =
+ config->needs_channel_reordering ? &(config->
+ reordered_channel_positions[0]) : &(config->channel_positions[0]);
+
+ switch (config->format) {
+ case GST_RAW_AUDIO_PARSE_FORMAT_PCM:
+ {
+ GstAudioInfo info;
+ gst_audio_info_init (&info);
+ gst_audio_info_set_format (&info,
+ config->pcm_format,
+ config->sample_rate, config->num_channels, channel_positions);
+
+ *caps = gst_audio_info_to_caps (&info);
+
+ break;
+ }
+
+ case GST_RAW_AUDIO_PARSE_FORMAT_ALAW:
+ case GST_RAW_AUDIO_PARSE_FORMAT_MULAW:
+ {
+ guint64 channel_mask;
+
+ if (!gst_audio_channel_positions_to_mask (channel_positions,
+ config->num_channels, TRUE, &channel_mask)) {
+ GST_ERROR_OBJECT (raw_audio_parse, "invalid channel positions");
+ ret = FALSE;
+ break;
+ }
+
+ *caps = gst_caps_new_simple (
+ (config->format ==
+ GST_RAW_AUDIO_PARSE_FORMAT_ALAW) ? "audio/x-alaw" :
+ "audio/x-mulaw", "rate", G_TYPE_INT, config->sample_rate, "channels",
+ G_TYPE_INT, config->num_channels, "channel-mask", GST_TYPE_BITMASK,
+ channel_mask, NULL);
+
+ break;
+ }
+
+ default:
+ g_assert_not_reached ();
+ ret = FALSE;
+ }
+
+ if (!ret)
+ *caps = NULL;
+
+ return ret;
+}
+
+
+
+
+GType
+gst_raw_audio_parse_format_get_type (void)
+{
+ static GType audio_parse_format_gtype = 0;
+ static const GEnumValue types[] = {
+ {GST_RAW_AUDIO_PARSE_FORMAT_PCM, "PCM", "pcm"},
+ {GST_RAW_AUDIO_PARSE_FORMAT_ALAW, "A-Law", "alaw"},
+ {GST_RAW_AUDIO_PARSE_FORMAT_MULAW, "\302\265-Law", "mulaw"},
+ {0, NULL, NULL}
+ };
+
+ if (!audio_parse_format_gtype)
+ audio_parse_format_gtype =
+ g_enum_register_static ("GstRawAudioParseFormat", types);
+
+ return audio_parse_format_gtype;
+}
--- /dev/null
+/* GStreamer
+ * Copyright (C) <2016> Carlos Rafael Giani <dv at pseudoterminal dot org>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_RAW_AUDIO_PARSE_H__
+#define __GST_RAW_AUDIO_PARSE_H__
+
+#include <gst/gst.h>
+#include <gst/audio/audio.h>
+#include "gstrawbaseparse.h"
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_RAW_AUDIO_PARSE \
+ (gst_raw_audio_parse_get_type())
+#define GST_RAW_AUDIO_PARSE(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj), GST_TYPE_RAW_AUDIO_PARSE, GstRawAudioParse))
+#define GST_RAW_AUDIO_PARSE_CAST(obj) \
+ ((GstRawAudioParse *)(obj))
+#define GST_RAW_AUDIO_PARSE_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass), GST_TYPE_RAW_AUDIO_PARSE, GstRawAudioParseClass))
+#define GST_IS_RAW_AUDIO_PARSE(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj), GST_TYPE_RAW_AUDIO_PARSE))
+#define GST_IS_RAW_AUDIO_PARSE_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass), GST_TYPE_RAW_AUDIO_PARSE))
+
+
+typedef enum _GstRawAudioParseFormat GstRawAudioParseFormat;
+
+typedef struct _GstRawAudioParseConfig GstRawAudioParseConfig;
+typedef struct _GstRawAudioParse GstRawAudioParse;
+typedef struct _GstRawAudioParseClass GstRawAudioParseClass;
+
+
+enum _GstRawAudioParseFormat
+{
+ GST_RAW_AUDIO_PARSE_FORMAT_PCM,
+ GST_RAW_AUDIO_PARSE_FORMAT_MULAW,
+ GST_RAW_AUDIO_PARSE_FORMAT_ALAW
+};
+
+
+/* Contains information about the sample rate, format, and channel count to use. */
+struct _GstRawAudioParseConfig
+{
+ /* If TRUE, then this configuration is ready to use */
+ gboolean ready;
+ /* Format of the configuration. Can be PCM, a-law, mu-law. */
+ GstRawAudioParseFormat format;
+ /* If format is set to PCM, this specifies the exact PCM format in use.
+ * Meaningless if format is set to anything other than PCM. */
+ GstAudioFormat pcm_format;
+ /* Bytes per frame. Calculated as: bpf = bytes_per_sample * num_channels
+ * Must be nonzero. This is the size of one frame, the value returned
+ * by the GstRawBaseParseClass get_config_frame_size() vfunc. */
+ guint bpf;
+ /* Sample rate in Hz - must be nonzero */
+ guint sample_rate;
+ /* Number of channels - must be nonzero */
+ guint num_channels;
+ /* TRUE if the data is interleaved, FALSE otherwise */
+ gboolean interleaved;
+
+ /* Array of channel positions, one position per channel; its first
+ * num_channels values are valid. They are computed out of the number
+ * of channels if no positions are explicitely given. */
+ GstAudioChannelPosition channel_positions[64];
+
+ /* If the channel_positions are in a valid GStreamer channel order, then
+ * this is not used, and needs_channel_reordering is FALSE. Otherwise,
+ * this contains the same positions as in channel_positions, but in the
+ * order GStreamer expects. needs_channel_reordering will be TRUE in that
+ * case. This is used for reordering samples in outgoing buffers if
+ * necessary. */
+ GstAudioChannelPosition reordered_channel_positions[64];
+
+ /* TRUE if channel reordering is necessary, FALSE otherwise. See above
+ * for details. */
+ gboolean needs_channel_reordering;
+};
+
+
+struct _GstRawAudioParse
+{
+ GstRawBaseParse parent;
+
+ /*< private > */
+
+ /* Configuration controlled by the object properties. Its ready value
+ * is set to TRUE from the start, so it can be used right away.
+ */
+ GstRawAudioParseConfig properties_config;
+ /* Configuration controlled by the sink caps. Its ready value is
+ * initially set to FALSE until valid sink caps come in. It is set to
+ * FALSE again when the stream-start event is observed.
+ */
+ GstRawAudioParseConfig sink_caps_config;
+ /* Currently active configuration. Points either to properties_config
+ * or to sink_caps_config. This is never NULL. */
+ GstRawAudioParseConfig *current_config;
+};
+
+
+struct _GstRawAudioParseClass
+{
+ GstRawBaseParseClass parent_class;
+};
+
+
+GType gst_raw_audio_parse_get_type (void);
+GType gst_raw_audio_parse_format_get_type (void);
+
+
+G_END_DECLS
+
+#endif
--- /dev/null
+/* GStreamer
+ * Copyright (C) <2016> Carlos Rafael Giani <dv at pseudoterminal dot org>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+/**
+ * SECTION:gstrawbaseparse
+ * @short_description: Base class for raw media data parsers
+ *
+ * This base class is for parsers which read raw media data and output
+ * timestamped buffers with an integer number of frames inside.
+ *
+ * The format of the raw media data is specified in one of two ways: either,
+ * the information from the sink pad's caps is taken, or the information from
+ * the properties is used (this is chosen by the use-sink-caps property).
+ * These two ways are internally referred to as "configurations". The configuration
+ * that receives its information from the sink pad's caps is called the
+ * "sink caps configuration", while the one that depends on the information from
+ * the properties is the "properties configuration". Configurations have a
+ * "readiness". A configuration is "ready" when it contains valid information.
+ * For example, with an audio parser, a configuration is not ready unless it
+ * contains a valid sample rate, sample format, and channel count.
+ *
+ * The properties configuration must always be ready, even right from the start.
+ * Subclasses must ensure this. The underlying reason is that properties have valid
+ * values right from the start, and with the properties configuration, there is
+ * nothing that readies it before actual data is sent (unlike with the sink caps
+ * configuration, where a sink caps event will ready it before data is pushed
+ * downstream).
+ *
+ * It is possible to switch between the configurations during a stream by
+ * setting the use-sink-caps property. Subclasses typically allow for updating the
+ * properties configuration during a stream by setting the various properties
+ * (like sample-rate for a raw audio parser).
+ * In these cases, the parser will produce a new CAPS event and push it downstream
+ * to announce the caps for the new configuration. This also happens if the sink
+ * caps change.
+ *
+ * A common mistake when trying to parse raw data with no input caps (for example,
+ * a file with raw PCM samples when using rawaudioparse) is to forget to set the
+ * use-sink-caps property to FALSE. In this case, the parser will report an error
+ * when it tries to access the current configuration (because then the sink caps
+ * configuration will be the current one and it will not contain valid values
+ * since no sink caps were seen at this point).
+ *
+ * Subclasses must ensure that the properties configuration is the default one.
+ *
+ * The sink caps configuration is mostly useful with push-based sources, because these
+ * will produce caps events and send them downstream. With pull-based sources, it is
+ * possible that this doesn't happen. Since the sink caps configuration requires a caps
+ * event to arrive at the sinkpad, this will cause the parser to fail then.
+ *
+ * The base class identifies the configurations by means of the GstRawAudioParseConfig
+ * enum. It instructs the subclass to switch between configurations this way, and
+ * also requests information about the current configuration, a configuration's
+ * frame size, its readiness, etc. Subclasses are not required to use any particular
+ * structure for the configuration implementations.
+ *
+ * Use the GST_RAW_BASE_PARSE_CONFIG_MUTEX_LOCK and GST_RAW_BASE_PARSE_CONFIG_MUTEX_UNLOCK
+ * macros to protect configuration modifications.
+ *
+ * <listitem>
+ * <itemizedlist>
+ * <title>Summary of the subclass requirements</title>
+ * <listitem><para>
+ * Sink caps and properties configurations must both be
+ * implemented and supported. It must also be ensured that there is a
+ * "current" configuration.
+ * </para></listitem>
+ * Modifications to the configurations must be protected with the
+ * GstRawBaseParse lock. This is typically necessary when the
+ * properties configuration is modified by setting new property values.
+ * (Note that the lock is held during *all* vfunc calls.)
+ * <listitem><para>
+ * If the properties configuration is updated (typically by
+ * setting new property values), gst_raw_base_parse_invalidate_src_caps()
+ * must be called if the properties config is the current one. This is
+ * necessary to ensure that GstBaseParse pushes a new caps event downstream
+ * which contains caps from the updated configuration.
+ * </para></listitem>
+ * <listitem><para>
+ * In case there are bytes in each frame that aren't part of the actual
+ * payload, the get_overhead_size() vfunc must be defined, and the
+ * @get_config_frame_size() vfunc must return a frame size that includes
+ * the number of non-payload bytes (= the overhead). Otherwise, the
+ * timestamps will incorrectly include the overhead bytes.
+ * </para></listitem>
+ * </listitem>
+ */
+
+#ifdef HAVE_CONFIG_H
+# include "config.h"
+#endif
+
+#include <string.h>
+#include "gstrawbaseparse.h"
+
+
+GST_DEBUG_CATEGORY_STATIC (raw_base_parse_debug);
+#define GST_CAT_DEFAULT raw_base_parse_debug
+
+
+enum
+{
+ PROP_0,
+ PROP_USE_SINK_CAPS
+};
+
+
+#define DEFAULT_USE_SINK_CAPS FALSE
+#define INITIAL_PARSER_CONFIG \
+ ((DEFAULT_USE_SINK_CAPS) ? GST_RAW_BASE_PARSE_CONFIG_SINKCAPS : \
+ GST_RAW_BASE_PARSE_CONFIG_PROPERTIES)
+
+
+#define gst_raw_base_parse_parent_class parent_class
+G_DEFINE_ABSTRACT_TYPE (GstRawBaseParse, gst_raw_base_parse,
+ GST_TYPE_BASE_PARSE);
+
+
+static void gst_raw_base_parse_finalize (GObject * object);
+static void gst_raw_base_parse_set_property (GObject * object, guint prop_id,
+ GValue const *value, GParamSpec * pspec);
+static void gst_raw_base_parse_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec);
+static gboolean gst_raw_base_parse_start (GstBaseParse * parse);
+static gboolean gst_raw_base_parse_stop (GstBaseParse * parse);
+static gboolean gst_raw_base_parse_set_sink_caps (GstBaseParse * parse,
+ GstCaps * caps);
+static GstFlowReturn gst_raw_base_parse_handle_frame (GstBaseParse * parse,
+ GstBaseParseFrame * frame, gint * skipsize);
+static gboolean gst_raw_base_parse_convert (GstBaseParse * parse,
+ GstFormat src_format, gint64 src_value, GstFormat dest_format,
+ gint64 * dest_value);
+
+static gboolean gst_raw_base_parse_is_using_sink_caps (GstRawBaseParse *
+ raw_base_parse);
+static gboolean gst_raw_base_parse_is_gstformat_supported (GstRawBaseParse *
+ raw_base_parse, GstFormat format);
+
+
+
+static void
+gst_raw_base_parse_class_init (GstRawBaseParseClass * klass)
+{
+ GObjectClass *object_class;
+ GstBaseParseClass *baseparse_class;
+
+ GST_DEBUG_CATEGORY_INIT (raw_base_parse_debug, "rawbaseparse", 0,
+ "raw base parse class");
+
+ object_class = G_OBJECT_CLASS (klass);
+ baseparse_class = GST_BASE_PARSE_CLASS (klass);
+
+ object_class->finalize = GST_DEBUG_FUNCPTR (gst_raw_base_parse_finalize);
+ object_class->set_property =
+ GST_DEBUG_FUNCPTR (gst_raw_base_parse_set_property);
+ object_class->get_property =
+ GST_DEBUG_FUNCPTR (gst_raw_base_parse_get_property);
+
+ baseparse_class->start = GST_DEBUG_FUNCPTR (gst_raw_base_parse_start);
+ baseparse_class->stop = GST_DEBUG_FUNCPTR (gst_raw_base_parse_stop);
+ baseparse_class->set_sink_caps =
+ GST_DEBUG_FUNCPTR (gst_raw_base_parse_set_sink_caps);
+ baseparse_class->handle_frame =
+ GST_DEBUG_FUNCPTR (gst_raw_base_parse_handle_frame);
+ baseparse_class->convert = GST_DEBUG_FUNCPTR (gst_raw_base_parse_convert);
+
+ /**
+ * GstRawBaseParse::use-sink-caps:
+ *
+ * Use sink caps configuration. If set to false, the parser
+ * will use the properties configuration instead. It is possible
+ * to switch between these during playback.
+ */
+ g_object_class_install_property (object_class,
+ PROP_USE_SINK_CAPS,
+ g_param_spec_boolean ("use-sink-caps",
+ "Use sink caps",
+ "Use the sink caps for defining the output format",
+ DEFAULT_USE_SINK_CAPS, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)
+ );
+}
+
+
+static void
+gst_raw_base_parse_init (GstRawBaseParse * raw_base_parse)
+{
+ raw_base_parse->src_caps_set = FALSE;
+ g_mutex_init (&(raw_base_parse->config_mutex));
+}
+
+
+static void
+gst_raw_base_parse_finalize (GObject * object)
+{
+ GstRawBaseParse *raw_base_parse = GST_RAW_BASE_PARSE (object);
+
+ g_mutex_clear (&(raw_base_parse->config_mutex));
+
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+}
+
+
+static void
+gst_raw_base_parse_set_property (GObject * object, guint prop_id,
+ GValue const *value, GParamSpec * pspec)
+{
+ GstBaseParse *base_parse = GST_BASE_PARSE (object);
+ GstRawBaseParse *raw_base_parse = GST_RAW_BASE_PARSE (object);
+ GstRawBaseParseClass *klass = GST_RAW_BASE_PARSE_GET_CLASS (object);
+
+ g_assert (klass->is_config_ready);
+ g_assert (klass->set_current_config);
+
+ switch (prop_id) {
+ case PROP_USE_SINK_CAPS:
+ {
+ gboolean new_state, cur_state;
+ GstRawBaseParseConfig new_config;
+
+ GST_RAW_BASE_PARSE_CONFIG_MUTEX_LOCK (object);
+
+ /* Check to ensure nothing is done if the value stays the same */
+ new_state = g_value_get_boolean (value);
+ cur_state = gst_raw_base_parse_is_using_sink_caps (raw_base_parse);
+ if (new_state == cur_state) {
+ GST_RAW_BASE_PARSE_CONFIG_MUTEX_UNLOCK (object);
+ break;
+ }
+
+ GST_DEBUG_OBJECT (raw_base_parse, "switching to %s config",
+ new_state ? "sink caps" : "properties");
+ new_config =
+ new_state ? GST_RAW_BASE_PARSE_CONFIG_SINKCAPS :
+ GST_RAW_BASE_PARSE_CONFIG_PROPERTIES;
+
+ if (!klass->set_current_config (raw_base_parse, new_config)) {
+ GST_RAW_BASE_PARSE_CONFIG_MUTEX_UNLOCK (object);
+ GST_ELEMENT_ERROR (raw_base_parse, STREAM, FAILED,
+ ("could not set new current config"), ("use-sink-caps property: %d",
+ new_state));
+ break;
+ }
+
+ /* Update the minimum frame size if the config is ready. This ensures that
+ * the next buffer that is passed to handle_frame contains complete frames.
+ * If the current config is the properties config, then it will always be
+ * ready, and its frame size will be valid. Ensure that the baseparse minimum
+ * frame size is set properly then.
+ * If the current config is the sink caps config, then it will initially not
+ * be ready until the sink caps are set, so the minimum frame size cannot be
+ * set right here. However, since the caps always come in *before* the actual
+ * data, the config will be readied in the set_sink_caps function, and be ready
+ * by the time handle_frame is called. There, the minimum frame size is set as
+ * well. */
+ if (klass->is_config_ready (raw_base_parse,
+ GST_RAW_BASE_PARSE_CONFIG_CURRENT)) {
+ gsize frame_size = klass->get_config_frame_size (raw_base_parse,
+ GST_RAW_BASE_PARSE_CONFIG_CURRENT);
+ gst_base_parse_set_min_frame_size (base_parse, frame_size);
+ }
+
+ /* Since the current config was switched, the source caps change. Ensure the
+ * new caps are pushed downstream by setting src_caps_set to FALSE: This way,
+ * the next handle_frame call will take care of that. */
+ raw_base_parse->src_caps_set = FALSE;
+
+ GST_RAW_BASE_PARSE_CONFIG_MUTEX_UNLOCK (object);
+
+ break;
+ }
+
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+
+static void
+gst_raw_base_parse_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec)
+{
+ GstRawBaseParse *raw_base_parse = GST_RAW_BASE_PARSE (object);
+
+ switch (prop_id) {
+ case PROP_USE_SINK_CAPS:
+ GST_RAW_BASE_PARSE_CONFIG_MUTEX_LOCK (object);
+ g_value_set_boolean (value,
+ gst_raw_base_parse_is_using_sink_caps (raw_base_parse));
+ GST_RAW_BASE_PARSE_CONFIG_MUTEX_UNLOCK (object);
+ break;
+
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+
+static gboolean
+gst_raw_base_parse_start (GstBaseParse * parse)
+{
+ GstBaseParse *base_parse = GST_BASE_PARSE (parse);
+ GstRawBaseParse *raw_base_parse = GST_RAW_BASE_PARSE (parse);
+ GstRawBaseParseClass *klass = GST_RAW_BASE_PARSE_GET_CLASS (parse);
+
+ g_assert (klass->set_current_config);
+
+ GST_RAW_BASE_PARSE_CONFIG_MUTEX_LOCK (raw_base_parse);
+
+ /* If the config is ready from the start, set the min frame size
+ * (this will happen with the properties config) */
+ if (klass->is_config_ready (raw_base_parse,
+ GST_RAW_BASE_PARSE_CONFIG_CURRENT)) {
+ gsize frame_size = klass->get_config_frame_size (raw_base_parse,
+ GST_RAW_BASE_PARSE_CONFIG_CURRENT);
+ gst_base_parse_set_min_frame_size (base_parse, frame_size);
+ }
+
+ GST_RAW_BASE_PARSE_CONFIG_MUTEX_UNLOCK (raw_base_parse);
+
+ return TRUE;
+}
+
+
+static gboolean
+gst_raw_base_parse_stop (GstBaseParse * parse)
+{
+ GstRawBaseParse *raw_base_parse = GST_RAW_BASE_PARSE (parse);
+
+ GST_RAW_BASE_PARSE_CONFIG_MUTEX_LOCK (raw_base_parse);
+ raw_base_parse->src_caps_set = FALSE;
+ GST_RAW_BASE_PARSE_CONFIG_MUTEX_UNLOCK (raw_base_parse);
+
+ return TRUE;
+}
+
+
+static gboolean
+gst_raw_base_parse_set_sink_caps (GstBaseParse * parse, GstCaps * caps)
+{
+ gboolean ret = FALSE;
+ GstRawBaseParse *raw_base_parse = GST_RAW_BASE_PARSE (parse);
+ GstRawBaseParseClass *klass = GST_RAW_BASE_PARSE_GET_CLASS (parse);
+
+ g_assert (klass->set_config_from_caps);
+ g_assert (klass->get_caps_from_config);
+ g_assert (klass->get_config_frame_size);
+
+
+ GST_RAW_BASE_PARSE_CONFIG_MUTEX_LOCK (raw_base_parse);
+
+ GST_DEBUG_OBJECT (parse, "getting config from new sink caps");
+
+ /* Convert the new sink caps to sink caps config. This also
+ * readies the config. */
+ ret =
+ klass->set_config_from_caps (raw_base_parse,
+ GST_RAW_BASE_PARSE_CONFIG_SINKCAPS, caps);
+ if (!ret) {
+ GST_ERROR_OBJECT (raw_base_parse, "could not get config from sink caps");
+ goto done;
+ }
+
+ /* If the sink caps config is currently active, push caps downstream,
+ * set the minimum frame size (to guarantee that input buffers hold
+ * complete frames), and update the src_caps_set flag. If the sink
+ * caps config isn't the currently active config, just exit, since in
+ * that case, the caps will always be pushed downstream in handle_frame. */
+ if (gst_raw_base_parse_is_using_sink_caps (raw_base_parse)) {
+ GstCaps *new_src_caps;
+ gsize frame_size;
+
+ GST_DEBUG_OBJECT (parse,
+ "sink caps config is the current one; trying to push new caps downstream");
+
+ /* Convert back to caps. The caps may have changed, for example
+ * audio/x-unaligned-raw may have been replaced with audio/x-raw.
+ * (Also, this keeps the behavior in sync with that of the block
+ * in handle_frame that pushes caps downstream if not done already.) */
+ if (!klass->get_caps_from_config (raw_base_parse,
+ GST_RAW_BASE_PARSE_CONFIG_CURRENT, &new_src_caps)) {
+ GST_ERROR_OBJECT (raw_base_parse,
+ "could not get src caps from current config");
+ goto done;
+ }
+
+ GST_DEBUG_OBJECT (raw_base_parse,
+ "got new sink caps; updating src caps to %" GST_PTR_FORMAT,
+ (gpointer) new_src_caps);
+
+ frame_size =
+ klass->get_config_frame_size (raw_base_parse,
+ GST_RAW_BASE_PARSE_CONFIG_CURRENT);
+ gst_base_parse_set_min_frame_size (parse, frame_size);
+
+ raw_base_parse->src_caps_set = TRUE;
+
+ GST_RAW_BASE_PARSE_CONFIG_MUTEX_UNLOCK (raw_base_parse);
+
+ /* Push caps outside of the lock */
+ gst_pad_push_event (GST_BASE_PARSE_SRC_PAD (raw_base_parse),
+ gst_event_new_caps (new_src_caps)
+ );
+
+ gst_caps_unref (new_src_caps);
+ } else {
+ GST_RAW_BASE_PARSE_CONFIG_MUTEX_UNLOCK (raw_base_parse);
+ }
+
+ ret = TRUE;
+
+done:
+ return ret;
+}
+
+
+static GstFlowReturn
+gst_raw_base_parse_handle_frame (GstBaseParse * parse,
+ GstBaseParseFrame * frame, gint * skipsize)
+{
+ gsize in_size, out_size;
+ guint frame_size;
+ guint num_out_frames;
+ gsize units_n, units_d;
+ guint64 buffer_duration;
+ GstFlowReturn flow_ret = GST_FLOW_OK;
+ GstEvent *new_caps_event = NULL;
+ GstRawBaseParse *raw_base_parse = GST_RAW_BASE_PARSE (parse);
+ GstRawBaseParseClass *klass = GST_RAW_BASE_PARSE_GET_CLASS (parse);
+
+ g_assert (klass->is_config_ready);
+ g_assert (klass->get_caps_from_config);
+ g_assert (klass->get_config_frame_size);
+ g_assert (klass->get_units_per_second);
+
+
+ /* We never skip any bytes this way. Instead, subclass takes care
+ * of skipping any overhead (necessary, since the way it needs to
+ * be skipped is completely subclass specific). */
+ *skipsize = 0;
+
+
+ /* The operations below access the current config. Protect
+ * against race conditions by using the object lock. */
+ GST_RAW_BASE_PARSE_CONFIG_MUTEX_LOCK (raw_base_parse);
+
+
+ /* If the source pad caps haven't been set yet, or need to be
+ * set again, do so now, BEFORE any buffers are pushed out */
+ if (G_UNLIKELY (!raw_base_parse->src_caps_set)) {
+ GstCaps *new_src_caps;
+
+ if (G_UNLIKELY (!klass->is_config_ready (raw_base_parse,
+ GST_RAW_BASE_PARSE_CONFIG_CURRENT))) {
+ /* The current configuration is not ready. No caps can be
+ * generated out of it.
+ * The most likely reason for this is that the sink caps config
+ * is the current one and no valid sink caps have been pushed
+ * by upstream. Report the problem and exit. */
+
+ if (gst_raw_base_parse_is_using_sink_caps (raw_base_parse)) {
+ goto config_not_ready;
+ } else {
+ /* This should not be reached if the property config is active */
+ g_assert_not_reached ();
+ }
+ }
+
+ GST_DEBUG_OBJECT (parse,
+ "setting src caps since this has not been done yet");
+
+ /* Convert the current config to a caps structure to
+ * inform downstream about the new format */
+ if (!klass->get_caps_from_config (raw_base_parse,
+ GST_RAW_BASE_PARSE_CONFIG_CURRENT, &new_src_caps)) {
+ GST_ERROR_OBJECT (raw_base_parse,
+ "could not get src caps from current config");
+ flow_ret = GST_FLOW_NOT_NEGOTIATED;
+ goto error_locked;
+ }
+
+ new_caps_event = gst_event_new_caps (new_src_caps);
+ gst_caps_unref (new_src_caps);
+
+ raw_base_parse->src_caps_set = TRUE;
+ }
+
+ frame_size =
+ klass->get_config_frame_size (raw_base_parse,
+ GST_RAW_BASE_PARSE_CONFIG_CURRENT);
+
+
+ in_size = gst_buffer_get_size (frame->buffer);
+
+ /* gst_base_parse_set_min_frame_size() is called when the current
+ * configuration changes and the change affects the frame size. This
+ * means that a buffer must contain at least as many bytes as indicated
+ * by the frame size. If there are fewer inside an error occurred;
+ * either something in the parser went wrong, or the min frame size
+ * wasn't updated properly. */
+ g_assert (in_size >= frame_size);
+
+ /* Determine how many complete frames would fit in the input buffer.
+ * Then check if this amount exceeds the maximum number of frames
+ * as indicated by the subclass. */
+ num_out_frames = (in_size / frame_size);
+ if (klass->get_max_frames_per_buffer) {
+ guint max_num_out_frames = klass->get_max_frames_per_buffer (raw_base_parse,
+ GST_RAW_BASE_PARSE_CONFIG_CURRENT);
+ num_out_frames = MIN (num_out_frames, max_num_out_frames);
+ }
+
+ /* Ensure that the size of the buffers that get pushed downstream
+ * is always an integer multiple of the frame size to prevent cases
+ * where downstream gets buffers with incomplete frames. */
+ out_size = num_out_frames * frame_size;
+
+ /* Set the overhead size to ensure that timestamping excludes these
+ * extra overhead bytes. */
+ frame->overhead =
+ klass->get_overhead_size ? klass->get_overhead_size (raw_base_parse,
+ GST_RAW_BASE_PARSE_CONFIG_CURRENT) : 0;
+
+ g_assert (out_size >= (guint) (frame->overhead));
+ out_size -= frame->overhead;
+
+ GST_LOG_OBJECT (raw_base_parse,
+ "%" G_GSIZE_FORMAT " bytes input %" G_GSIZE_FORMAT
+ " bytes output (%u frame(s)) %d bytes overhead", in_size, out_size,
+ num_out_frames, frame->overhead);
+
+ /* Calculate buffer duration */
+ klass->get_units_per_second (raw_base_parse, GST_FORMAT_BYTES,
+ GST_RAW_BASE_PARSE_CONFIG_CURRENT, &units_n, &units_d);
+ if (units_n == 0 || units_d == 0)
+ buffer_duration = GST_CLOCK_TIME_NONE;
+ else
+ buffer_duration =
+ gst_util_uint64_scale (out_size, GST_SECOND * units_d, units_n);
+
+ if (klass->process) {
+ GstBuffer *processed_data = NULL;
+
+ if (!klass->process (raw_base_parse, GST_RAW_BASE_PARSE_CONFIG_CURRENT,
+ frame->buffer, in_size, out_size, &processed_data))
+ goto process_error;
+
+ frame->out_buffer = processed_data;
+ } else {
+ frame->out_buffer = NULL;
+ }
+
+ /* Set the duration of the output buffer, or if none exists, of
+ * the input buffer. Do this after the process() call, since in
+ * case out_buffer is set, the subclass has created a new buffer.
+ * Instead of requiring subclasses to set the duration (which
+ * anyway must always be buffer_duration), let's do it here. */
+ if (frame->out_buffer != NULL)
+ GST_BUFFER_DURATION (frame->out_buffer) = buffer_duration;
+ else
+ GST_BUFFER_DURATION (frame->buffer) = buffer_duration;
+
+ /* Access to the current config is not needed in subsequent
+ * operations, so the lock can be released */
+ GST_RAW_BASE_PARSE_CONFIG_MUTEX_UNLOCK (raw_base_parse);
+
+
+ /* If any new caps have to be pushed downstrean, do so
+ * *before* the frame is finished */
+ if (G_UNLIKELY (new_caps_event != NULL)) {
+ gst_pad_push_event (GST_BASE_PARSE_SRC_PAD (raw_base_parse),
+ new_caps_event);
+ new_caps_event = NULL;
+ }
+
+ gst_base_parse_finish_frame (parse, frame, out_size + frame->overhead);
+
+
+ return flow_ret;
+
+
+config_not_ready:
+ GST_RAW_BASE_PARSE_CONFIG_MUTEX_UNLOCK (raw_base_parse);
+ GST_ELEMENT_ERROR (parse, STREAM, FORMAT,
+ ("sink caps config is the current config, and it is not ready -"
+ "upstream may not have pushed a caps event yet"), (NULL));
+ flow_ret = GST_FLOW_ERROR;
+ goto error_end;
+
+process_error:
+ GST_RAW_BASE_PARSE_CONFIG_MUTEX_UNLOCK (raw_base_parse);
+ GST_ELEMENT_ERROR (parse, STREAM, DECODE, ("could not process data"), (NULL));
+ flow_ret = GST_FLOW_ERROR;
+ goto error_end;
+
+error_locked:
+ GST_RAW_BASE_PARSE_CONFIG_MUTEX_UNLOCK (raw_base_parse);
+ goto error_end;
+
+error_end:
+ frame->flags |= GST_BASE_PARSE_FRAME_FLAG_DROP;
+ if (new_caps_event != NULL)
+ gst_event_unref (new_caps_event);
+ return flow_ret;
+}
+
+
+static gboolean
+gst_raw_base_parse_convert (GstBaseParse * parse, GstFormat src_format,
+ gint64 src_value, GstFormat dest_format, gint64 * dest_value)
+{
+ GstRawBaseParse *raw_base_parse = GST_RAW_BASE_PARSE (parse);
+ GstRawBaseParseClass *klass = GST_RAW_BASE_PARSE_GET_CLASS (parse);
+ gboolean ret = TRUE;
+ gsize units_n, units_d;
+
+ g_assert (klass->is_config_ready);
+ g_assert (klass->get_units_per_second);
+
+
+ /* The operations below access the current config. Protect
+ * against race conditions by using the object lock. */
+ GST_RAW_BASE_PARSE_CONFIG_MUTEX_LOCK (raw_base_parse);
+
+
+ if (!klass->is_config_ready (raw_base_parse,
+ GST_RAW_BASE_PARSE_CONFIG_CURRENT)) {
+ if (gst_raw_base_parse_is_using_sink_caps (raw_base_parse)) {
+ goto config_not_ready;
+ } else {
+ /* This should not be reached if the property config is active */
+ g_assert_not_reached ();
+ }
+ }
+
+ if (G_UNLIKELY (src_format == dest_format)) {
+ *dest_value = src_value;
+ } else if ((src_format == GST_FORMAT_TIME || dest_format == GST_FORMAT_TIME)
+ && gst_raw_base_parse_is_gstformat_supported (raw_base_parse, src_format)
+ && gst_raw_base_parse_is_gstformat_supported (raw_base_parse, src_format)) {
+ /* Perform conversions here if either the src or dest format
+ * are GST_FORMAT_TIME and the other format is supported by
+ * the subclass. This is because we perform TIME<->non-TIME
+ * conversions here. Typically, subclasses only support
+ * BYTES and DEFAULT formats. */
+
+ if (src_format == GST_FORMAT_TIME) {
+ /* The source format is time, so perform a TIME -> non-TIME conversion */
+ klass->get_units_per_second (raw_base_parse, dest_format,
+ GST_RAW_BASE_PARSE_CONFIG_CURRENT, &units_n, &units_d);
+ *dest_value = (units_n == 0
+ || units_d == 0) ? src_value : gst_util_uint64_scale (src_value,
+ units_n, GST_SECOND * units_d);
+ } else {
+ /* The dest format is time, so perform a non-TIME -> TIME conversion */
+ klass->get_units_per_second (raw_base_parse, src_format,
+ GST_RAW_BASE_PARSE_CONFIG_CURRENT, &units_n, &units_d);
+ *dest_value = (units_n == 0
+ || units_d == 0) ? src_value : gst_util_uint64_scale (src_value,
+ GST_SECOND * units_d, units_n);
+ }
+ } else {
+ /* Fallback for other conversions */
+ ret =
+ gst_base_parse_convert_default (parse, src_format, src_value,
+ dest_format, dest_value);
+ }
+
+ GST_DEBUG_OBJECT (parse,
+ "converted %s -> %s %" G_GINT64_FORMAT " -> %" GST_TIME_FORMAT,
+ gst_format_get_name (src_format), gst_format_get_name (dest_format),
+ src_value, GST_TIME_ARGS (*dest_value));
+
+ GST_RAW_BASE_PARSE_CONFIG_MUTEX_UNLOCK (raw_base_parse);
+ return ret;
+
+
+config_not_ready:
+ GST_RAW_BASE_PARSE_CONFIG_MUTEX_UNLOCK (raw_base_parse);
+ GST_ELEMENT_ERROR (parse, STREAM, FORMAT,
+ ("sink caps config is the current config, and it is not ready - "
+ "upstream may not have pushed a caps event yet"), (NULL));
+ return FALSE;
+}
+
+
+static gboolean
+gst_raw_base_parse_is_using_sink_caps (GstRawBaseParse * raw_base_parse)
+{
+ /* must be called with lock */
+ GstRawBaseParseClass *klass = GST_RAW_BASE_PARSE_GET_CLASS (raw_base_parse);
+ g_assert (klass->get_current_config);
+ return klass->get_current_config (raw_base_parse) ==
+ GST_RAW_BASE_PARSE_CONFIG_SINKCAPS;
+}
+
+
+static gboolean
+gst_raw_base_parse_is_gstformat_supported (GstRawBaseParse * raw_base_parse,
+ GstFormat format)
+{
+ /* must be called with lock */
+ GstRawBaseParseClass *klass = GST_RAW_BASE_PARSE_GET_CLASS (raw_base_parse);
+ g_assert (klass->is_unit_format_supported);
+ return klass->is_unit_format_supported (raw_base_parse, format);
+}
+
+
+
+
+
+/**
+ * gst_raw_base_parse_invalidate_src_caps:
+ * @raw_base_parse: a #GstRawBaseParse instance
+ *
+ * Flags the current source caps as invalid. Before the next downstream
+ * buffer push, @get_caps_from_config is called, and the created caps are
+ * pushed downstream in a new caps event, This is used if for example the
+ * properties configuration is modified in the subclass.
+ *
+ * Note that this must be called with the parser lock held. Use the
+ * GST_RAW_BASE_PARSE_CONFIG_MUTEX_LOCK() and GST_RAW_BASE_PARSE_CONFIG_MUTEX_UNLOCK()
+ * macros for this purpose.
+ */
+void
+gst_raw_base_parse_invalidate_src_caps (GstRawBaseParse * raw_base_parse)
+{
+ /* must be called with lock */
+ g_assert (raw_base_parse != NULL);
+ raw_base_parse->src_caps_set = FALSE;
+}
--- /dev/null
+/* GStreamer
+ * Copyright (C) <2016> Carlos Rafael Giani <dv at pseudoterminal dot org>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_RAW_BASE_PARSE_H__
+#define __GST_RAW_BASE_PARSE_H__
+
+#include <gst/gst.h>
+#include <gst/base/base.h>
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_RAW_BASE_PARSE \
+ (gst_raw_base_parse_get_type())
+#define GST_RAW_BASE_PARSE(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj), GST_TYPE_RAW_BASE_PARSE, GstRawBaseParse))
+#define GST_RAW_BASE_PARSE_CAST(obj) \
+ ((GstRawBaseParse *)(obj))
+#define GST_RAW_BASE_PARSE_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass), GST_TYPE_RAW_BASE_PARSE, GstRawBaseParseClass))
+#define GST_RAW_BASE_PARSE_GET_CLASS(obj) \
+ (G_TYPE_INSTANCE_GET_CLASS((obj), GST_TYPE_RAW_BASE_PARSE, GstRawBaseParseClass))
+#define GST_IS_RAW_BASE_PARSE(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj), GST_TYPE_RAW_BASE_PARSE))
+#define GST_IS_RAW_BASE_PARSE_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass), GST_TYPE_RAW_BASE_PARSE))
+
+#define GST_RAW_BASE_PARSE_CONFIG_MUTEX_LOCK(obj) g_mutex_lock(&(((GstRawBaseParse *)(obj))->config_mutex))
+#define GST_RAW_BASE_PARSE_CONFIG_MUTEX_UNLOCK(obj) g_mutex_unlock(&(((GstRawBaseParse *)(obj))->config_mutex))
+
+
+typedef enum _GstRawBaseParseConfig GstRawBaseParseConfig;
+typedef struct _GstRawBaseParse GstRawBaseParse;
+typedef struct _GstRawBaseParseClass GstRawBaseParseClass;
+
+
+/**
+ * GstRawBaseParseConfig:
+ * @GST_RAW_BASE_PARSE_CONFIG_CURRENT: configuration that is currently active
+ * @GST_RAW_BASE_PARSE_CONFIG_SINKCAPS: configuration that is defined by the input sink caps
+ * @GST_RAW_BASE_PARSE_CONFIG_PROPERTIES: configuration that is defined by class properties
+ *
+ * Identifier for the type of parser configuration.
+ */
+enum _GstRawBaseParseConfig
+{
+ GST_RAW_BASE_PARSE_CONFIG_CURRENT = 1,
+ GST_RAW_BASE_PARSE_CONFIG_SINKCAPS,
+ GST_RAW_BASE_PARSE_CONFIG_PROPERTIES
+};
+
+
+/**
+ * GstRawBaseParse:
+ *
+ * The opaque #GstRawBaseParse data structure.
+ */
+struct _GstRawBaseParse
+{
+ GstBaseParse parent;
+
+ /*< private > */
+
+ /* TRUE if the source pad caps have been set already. This is used
+ * for checking if the source pad caps have to be set. */
+ gboolean src_caps_set;
+
+ /* Mutex which protects access to and modifications on the configs. */
+ GMutex config_mutex;
+};
+
+
+/**
+ * GstRawBaseParseClass:
+ * @parent_class: The parent class structure
+ * @set_current_config: Sets the new current configuration. Subclasses must internally
+ * switch to this new configuration. Return FALSE if this failed,
+ * TRUE otherwise.
+ * @get_current_config: Gets the current configuration. All return values except
+ * except GST_RAW_BASE_PARSE_CONFIG_CURRENT are valid.
+ * @set_config_from_caps: Parses the caps and copies its information to the configuration.
+ * Returns FALSE if this failed, TRUE otheriwse. Specified caps
+ * are not unref'd.
+ * @get_caps_from_config: Creates a new caps structure out of the information from the
+ * specified configuration. Ownership over the returned caps are
+ * transferred to the caller. If something fails during the caps
+ * creation, the vfunc must make sure to destroy any partially
+ * created caps; the *caps value is always set to NULL in case of
+ * failure. Returns FALSE in case of failure,
+ * TRUE in case of success.
+ * @get_config_frame_size: Gets the size of one frame, in bytes, from the specified
+ * configuration. This must be the size of the complete frame,
+ * including any overhead (metadata, headers, padding bytes etc.).
+ * @get_max_frames_per_buffer: Optional.
+ * Returns up to how many complete frames one output buffer may
+ * contain. The value must be nonzero. This is useful for example
+ * with video parsers which need to ensure that one output buffer
+ * contains only one video frame, even if the input buffer contains
+ * several complete frames. If this vfunc is not set, then there
+ * is no maximum number of frames per buffer - the parser reads
+ * as many complete frames as possible from the input buffer.
+ * @is_config_ready: Returns TRUE if the specified configuration is ready, FALSE
+ * otherwise.
+ * @process: Optional.
+ * This is useful to do any last minute processing before the
+ * data is pushed downstream. One example is channel reordering
+ * in audio parsers.
+ * in_data is the complete input buffer, total_num_in_bytes is
+ * the total amount of bytes this input buffer contains (including
+ * excess bytes that form an incomplete rame). num_valid_in_bytes
+ * is the subset of these bytes that are to be pushed downstream.
+ * If for example the frame size is 4, and total_num_in_bytes is
+ * 411, then num_valid_in_bytes will be 408, since the last 3
+ * bytes form an incomplete frame.
+ * The value of num_valid_in_bytes excludes the overhead bytes
+ * indicated by @get_overhead_size.
+ * If the subclass creates a new buffer here, *processed_data
+ * must be set to the new buffer's pointer. If the subclass does
+ * not create any new buffer, and just expects the first
+ * num_valid_in_bytes of the input buffer to be pushed downstream,
+ * then *processed_data must be set to NULL.
+ * If this vfunc is not set, then the parser behaves as if this
+ * vfunc set *processed_data data to NULL.
+ * @is_unit_format_supported: Returns TRUE if the given format is supported by the
+ * @get_units_per_second function, FALSE otherwise.
+ * @get_units_per_second: Returns how many units per second exist for a given format.
+ * For example, with an audio parser and format DEFAULT, the units
+ * per second are typically the number of samples per second
+ * (= the sample rate). For video parsers, this would be the frame
+ * rate. If BYTES or TIME are used as format, then the result must
+ * not include any extra overhead (metadata, headers, padding etc.)
+ * @get_overhead_size: Optional.
+ * Returns the number of bytes that make up the portion of a frame
+ * that isn't payload. Examples are padding bytes, headers, and
+ * other kinds of metadata. If this vfunc isn't defined, then an
+ * overhead size of 0 bytes is assumed.
+ *
+ * Subclasses are required to override all vfuncs except for @process, which is optional.
+ * The raw base parser lock is held during all vfunc calls.
+ */
+struct _GstRawBaseParseClass
+{
+ GstBaseParseClass parent_class;
+
+ gboolean (*set_current_config) (GstRawBaseParse *raw_base_parse,
+ GstRawBaseParseConfig config);
+ GstRawBaseParseConfig (*get_current_config) (GstRawBaseParse *raw_base_parse);
+
+ gboolean (*set_config_from_caps) (GstRawBaseParse * raw_base_parse,
+ GstRawBaseParseConfig config,
+ GstCaps * caps);
+ gboolean (*get_caps_from_config) (GstRawBaseParse * raw_base_parse,
+ GstRawBaseParseConfig config,
+ GstCaps ** caps);
+
+ gsize (*get_config_frame_size) (GstRawBaseParse * raw_base_parse,
+ GstRawBaseParseConfig config);
+ guint (*get_max_frames_per_buffer) (GstRawBaseParse * raw_base_parse,
+ GstRawBaseParseConfig config);
+
+ gboolean (*is_config_ready) (GstRawBaseParse * raw_base_parse,
+ GstRawBaseParseConfig config);
+
+ gboolean (*process) (GstRawBaseParse * raw_base_parse,
+ GstRawBaseParseConfig config,
+ GstBuffer * in_data,
+ gsize total_num_in_bytes,
+ gsize num_valid_in_bytes,
+ GstBuffer ** processed_data);
+
+ gboolean (*is_unit_format_supported) (GstRawBaseParse * raw_base_parse,
+ GstFormat format);
+ void (*get_units_per_second) (GstRawBaseParse * raw_base_parse,
+ GstFormat format,
+ GstRawBaseParseConfig config,
+ gsize * units_per_sec_n,
+ gsize * units_per_sec_d);
+
+ gint (*get_overhead_size) (GstRawBaseParse * raw_base_parse,
+ GstRawBaseParseConfig config);
+};
+
+
+void gst_raw_base_parse_invalidate_src_caps (GstRawBaseParse * raw_base_parse);
+
+
+GType gst_raw_base_parse_get_type (void);
+
+
+G_END_DECLS
+
+#endif
--- /dev/null
+/* GStreamer
+ * Copyright (C) <2016> Carlos Rafael Giani <dv at pseudoterminal dot org>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+/**
+ * SECTION:element-rawvideoparse
+ *
+ * This element parses incoming data as raw video frames and timestamps these.
+ * It also handles seek queries in said raw video data, and ensures that output
+ * buffers contain exactly one frame, even if the input buffers contain only
+ * partial frames or multiple frames. In the former case, it will continue to
+ * receive buffers until there is enough input data to output one frame. In the
+ * latter case, it will extract the first frame in the buffer and output it, then
+ * the second one etc. until the remaining unparsed bytes aren't enough to form
+ * a complete frame, and it will then continue as described in the earlier case.
+ *
+ * The element implements the properties and sink caps configuration as specified
+ * in the #GstRawBaseParse documentation. The properties configuration can be
+ * modified by using the width, height, pixel-aspect-ratio, framerate, interlaced,
+ * top-field-first, plane-strides, plane-offsets, and frame-stride properties.
+ *
+ * If the properties configuration is used, be sure to set valid plane stride
+ * offsets and values, otherwise the produced frames will not have a correct size.
+ * Merely setting the format is not enough.
+ *
+ * The frame stride property is useful in cases where there is extra data between
+ * the frames (for example, trailing metadata, or headers). The parser calculates
+ * the actual frame size out of the other properties and compares it with this
+ * frame-stride value. If the frame stride is larger than the calculated size,
+ * then the extra bytes after the end of the frame are skipped. For example, with
+ * 8-bit grayscale frames and a frame size of 100x10 pixels and a frame stride of
+ * 1500 bytes, there are 500 excess bytes at the end of the actual frame which
+ * are then skipped. It is safe to set the frame stride to a value that is smaller
+ * than the actual frame size (in fact, its default value is 0); if it is smaller,
+ * then no trailing data will be skipped.
+ *
+ * If a framerate of 0 Hz is set (for example, 0/1), then output buffers will have
+ * no duration set. The first output buffer will have a PTS 0, all subsequent ones
+ * an unset PTS.
+ *
+ * <refsect2>
+ * <title>Example pipelines</title>
+ * |[
+ * gst-launch-1.0 filesrc location=video.raw ! rawvideoparse use-sink-caps=false \
+ * width=500 height=400 format=y444 ! autovideosink
+ * ]| Read raw data from a local file and parse it as video data with 500x400 pixels
+ * and Y444 video format.
+ * |[
+ * gst-launch-1.0 filesrc location=video.raw ! queue ! "video/x-raw, width=320, \
+ * height=240, format=I420, framerate=1/1" ! rawvideoparse \
+ * use-sink-caps=true ! autovideosink
+ * ]| Read raw data from a local file and parse it as video data with 320x240 pixels
+ * and I420 video format. The queue element here is to force push based scheduling.
+ * See the documentation in #GstRawBaseParse for the reason why.
+ * </refsect2>
+ */
+
+#ifdef HAVE_CONFIG_H
+# include "config.h"
+#endif
+
+/* FIXME: GValueArray is deprecated, but there is currently no viabla alternative
+ * See https://bugzilla.gnome.org/show_bug.cgi?id=667228 */
+#define GLIB_DISABLE_DEPRECATION_WARNINGS
+
+#include <string.h>
+#include "gstrawvideoparse.h"
+#include "unalignedvideo.h"
+
+
+GST_DEBUG_CATEGORY_STATIC (raw_video_parse_debug);
+#define GST_CAT_DEFAULT raw_video_parse_debug
+
+
+enum
+{
+ PROP_0,
+ PROP_WIDTH,
+ PROP_HEIGHT,
+ PROP_FORMAT,
+ PROP_PIXEL_ASPECT_RATIO,
+ PROP_FRAMERATE,
+ PROP_INTERLACED,
+ PROP_TOP_FIELD_FIRST,
+ PROP_PLANE_STRIDES,
+ PROP_PLANE_OFFSETS,
+ PROP_FRAME_STRIDE
+};
+
+
+#define DEFAULT_WIDTH 320
+#define DEFAULT_HEIGHT 240
+#define DEFAULT_FORMAT GST_VIDEO_FORMAT_I420
+#define DEFAULT_PIXEL_ASPECT_RATIO_N 1
+#define DEFAULT_PIXEL_ASPECT_RATIO_D 1
+#define DEFAULT_FRAMERATE_N 25
+#define DEFAULT_FRAMERATE_D 1
+#define DEFAULT_INTERLACED FALSE
+#define DEFAULT_TOP_FIELD_FIRST FALSE
+#define DEFAULT_FRAME_STRIDE 0
+
+
+#define GST_RAW_VIDEO_PARSE_CAPS \
+ GST_VIDEO_CAPS_MAKE(GST_VIDEO_FORMATS_ALL) "; "
+
+
+static GstStaticPadTemplate static_sink_template =
+ GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS (GST_UNALIGNED_RAW_VIDEO_CAPS "; " GST_RAW_VIDEO_PARSE_CAPS)
+ );
+
+
+static GstStaticPadTemplate static_src_template =
+GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS (GST_RAW_VIDEO_PARSE_CAPS)
+ );
+
+
+#define gst_raw_video_parse_parent_class parent_class
+G_DEFINE_TYPE (GstRawVideoParse, gst_raw_video_parse, GST_TYPE_RAW_BASE_PARSE);
+
+
+static void gst_raw_video_parse_set_property (GObject * object, guint prop_id,
+ GValue const *value, GParamSpec * pspec);
+static void gst_raw_video_parse_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec);
+
+static gboolean gst_raw_video_parse_stop (GstBaseParse * parse);
+
+static gboolean gst_raw_video_parse_set_current_config (GstRawBaseParse *
+ raw_base_parse, GstRawBaseParseConfig config);
+static GstRawBaseParseConfig
+gst_raw_video_parse_get_current_config (GstRawBaseParse * raw_base_parse);
+static gboolean gst_raw_video_parse_set_config_from_caps (GstRawBaseParse *
+ raw_base_parse, GstRawBaseParseConfig config, GstCaps * caps);
+static gboolean gst_raw_video_parse_get_caps_from_config (GstRawBaseParse *
+ raw_base_parse, GstRawBaseParseConfig config, GstCaps ** caps);
+static gsize gst_raw_video_parse_get_config_frame_size (GstRawBaseParse *
+ raw_base_parse, GstRawBaseParseConfig config);
+static guint gst_raw_video_parse_get_max_frames_per_buffer (GstRawBaseParse *
+ raw_base_parse, GstRawBaseParseConfig config);
+static gboolean gst_raw_video_parse_is_config_ready (GstRawBaseParse *
+ raw_base_parse, GstRawBaseParseConfig config);
+static gboolean gst_raw_video_parse_process (GstRawBaseParse * raw_base_parse,
+ GstRawBaseParseConfig config, GstBuffer * in_data, gsize total_num_in_bytes,
+ gsize num_valid_in_bytes, GstBuffer ** processed_data);
+static gboolean gst_raw_video_parse_is_unit_format_supported (GstRawBaseParse *
+ raw_base_parse, GstFormat format);
+static void gst_raw_video_parse_get_units_per_second (GstRawBaseParse *
+ raw_base_parse, GstFormat format, GstRawBaseParseConfig config,
+ gsize * units_per_sec_n, gsize * units_per_sec_d);
+
+static gint gst_raw_video_parse_get_overhead_size (GstRawBaseParse *
+ raw_base_parse, GstRawBaseParseConfig config);
+
+static gboolean gst_raw_video_parse_is_using_sink_caps (GstRawVideoParse *
+ raw_video_parse);
+static GstRawVideoParseConfig
+ * gst_raw_video_parse_get_config_ptr (GstRawVideoParse * raw_video_parse,
+ GstRawBaseParseConfig config);
+
+static void gst_raw_video_parse_init_config (GstRawVideoParseConfig * config);
+static void gst_raw_video_parse_update_info (GstRawVideoParseConfig * config);
+
+
+
+static void
+gst_raw_video_parse_class_init (GstRawVideoParseClass * klass)
+{
+ GObjectClass *object_class;
+ GstElementClass *element_class;
+ GstBaseParseClass *baseparse_class;
+ GstRawBaseParseClass *rawbaseparse_class;
+
+ GST_DEBUG_CATEGORY_INIT (raw_video_parse_debug, "rawvideoparse", 0,
+ "rawvideoparse element");
+
+ object_class = G_OBJECT_CLASS (klass);
+ element_class = GST_ELEMENT_CLASS (klass);
+ baseparse_class = GST_BASE_PARSE_CLASS (klass);
+ rawbaseparse_class = GST_RAW_BASE_PARSE_CLASS (klass);
+
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&static_sink_template));
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&static_src_template));
+
+ object_class->set_property =
+ GST_DEBUG_FUNCPTR (gst_raw_video_parse_set_property);
+ object_class->get_property =
+ GST_DEBUG_FUNCPTR (gst_raw_video_parse_get_property);
+
+ baseparse_class->stop = GST_DEBUG_FUNCPTR (gst_raw_video_parse_stop);
+
+ rawbaseparse_class->set_current_config =
+ GST_DEBUG_FUNCPTR (gst_raw_video_parse_set_current_config);
+ rawbaseparse_class->get_current_config =
+ GST_DEBUG_FUNCPTR (gst_raw_video_parse_get_current_config);
+ rawbaseparse_class->set_config_from_caps =
+ GST_DEBUG_FUNCPTR (gst_raw_video_parse_set_config_from_caps);
+ rawbaseparse_class->get_caps_from_config =
+ GST_DEBUG_FUNCPTR (gst_raw_video_parse_get_caps_from_config);
+ rawbaseparse_class->get_config_frame_size =
+ GST_DEBUG_FUNCPTR (gst_raw_video_parse_get_config_frame_size);
+ rawbaseparse_class->get_max_frames_per_buffer =
+ GST_DEBUG_FUNCPTR (gst_raw_video_parse_get_max_frames_per_buffer);
+ rawbaseparse_class->is_config_ready =
+ GST_DEBUG_FUNCPTR (gst_raw_video_parse_is_config_ready);
+ rawbaseparse_class->process = GST_DEBUG_FUNCPTR (gst_raw_video_parse_process);
+ rawbaseparse_class->is_unit_format_supported =
+ GST_DEBUG_FUNCPTR (gst_raw_video_parse_is_unit_format_supported);
+ rawbaseparse_class->get_units_per_second =
+ GST_DEBUG_FUNCPTR (gst_raw_video_parse_get_units_per_second);
+ rawbaseparse_class->get_overhead_size =
+ GST_DEBUG_FUNCPTR (gst_raw_video_parse_get_overhead_size);
+
+ g_object_class_install_property (object_class,
+ PROP_WIDTH,
+ g_param_spec_int ("width",
+ "Width",
+ "Width of frames in raw stream",
+ 0, G_MAXINT, DEFAULT_WIDTH,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)
+ );
+ g_object_class_install_property (object_class,
+ PROP_HEIGHT,
+ g_param_spec_int ("height",
+ "Height",
+ "Height of frames in raw stream",
+ 0, G_MAXINT,
+ DEFAULT_HEIGHT, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)
+ );
+ g_object_class_install_property (object_class,
+ PROP_FORMAT,
+ g_param_spec_enum ("format",
+ "Format",
+ "Format of frames in raw stream",
+ GST_TYPE_VIDEO_FORMAT,
+ DEFAULT_FORMAT, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)
+ );
+ g_object_class_install_property (object_class,
+ PROP_FRAMERATE,
+ gst_param_spec_fraction ("framerate",
+ "Frame rate",
+ "Rate of frames in raw stream",
+ 0, 1, G_MAXINT, 1,
+ DEFAULT_FRAMERATE_N, DEFAULT_FRAMERATE_D,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)
+ );
+ g_object_class_install_property (object_class,
+ PROP_PIXEL_ASPECT_RATIO,
+ gst_param_spec_fraction ("pixel-aspect-ratio",
+ "Pixel aspect ratio",
+ "Pixel aspect ratio of frames in raw stream",
+ 1, 100, 100, 1,
+ DEFAULT_PIXEL_ASPECT_RATIO_N, DEFAULT_PIXEL_ASPECT_RATIO_D,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)
+ );
+ g_object_class_install_property (object_class,
+ PROP_INTERLACED,
+ g_param_spec_boolean ("interlaced",
+ "Interlaced flag",
+ "True if frames in raw stream are interlaced",
+ DEFAULT_INTERLACED, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)
+ );
+ g_object_class_install_property (object_class,
+ PROP_TOP_FIELD_FIRST,
+ g_param_spec_boolean ("top-field-first",
+ "Top field first",
+ "True if top field in frames in raw stream come first (not used if frames aren't interlaced)",
+ DEFAULT_INTERLACED, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)
+ );
+ g_object_class_install_property (object_class,
+ PROP_PLANE_STRIDES,
+ g_param_spec_value_array ("plane-strides",
+ "Plane strides",
+ "Strides of the planets in bytes",
+ g_param_spec_uint ("plane-stride",
+ "Plane stride",
+ "Stride of the n-th plane in bytes (0 = stride equals width*bytes-per-pixel)",
+ 0, G_MAXUINT,
+ 0,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS),
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)
+ );
+ g_object_class_install_property (object_class,
+ PROP_PLANE_OFFSETS,
+ g_param_spec_value_array ("plane-offsets",
+ "Plane offsets",
+ "Offsets of the planets in bytes",
+ g_param_spec_uint ("plane-offset",
+ "Plane offset",
+ "Offset of the n-th plane in bytes",
+ 0, G_MAXUINT,
+ 0,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS),
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)
+ );
+ g_object_class_install_property (object_class,
+ PROP_FRAME_STRIDE,
+ g_param_spec_uint ("frame-stride",
+ "Frame stride",
+ "Stride between whole frames (0 = frames are tightly packed together)",
+ 0, G_MAXUINT,
+ DEFAULT_FRAME_STRIDE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)
+ );
+
+ gst_element_class_set_static_metadata (element_class,
+ "rawvideoparse",
+ "Codec/Parser/Video",
+ "Converts unformatted data streams into timestamped raw video frames",
+ "Carlos Rafael Giani <dv@pseudoterminal.org>");
+}
+
+
+static void
+gst_raw_video_parse_init (GstRawVideoParse * raw_video_parse)
+{
+ gst_raw_video_parse_init_config (&(raw_video_parse->properties_config));
+ gst_raw_video_parse_init_config (&(raw_video_parse->sink_caps_config));
+
+ /* As required by GstRawBaseParse, ensure that the current configuration
+ * is initially set to be the properties config */
+ raw_video_parse->current_config = &(raw_video_parse->properties_config);
+
+ /* Properties config must be valid from the start, so set its ready value
+ * to TRUE, and make sure its bpf value is valid. */
+ raw_video_parse->properties_config.ready = TRUE;
+ raw_video_parse->properties_config.top_field_first = DEFAULT_TOP_FIELD_FIRST;
+ raw_video_parse->properties_config.frame_stride = DEFAULT_FRAME_STRIDE;
+}
+
+
+static void
+gst_raw_video_parse_set_property (GObject * object, guint prop_id,
+ GValue const *value, GParamSpec * pspec)
+{
+ GstBaseParse *base_parse = GST_BASE_PARSE (object);
+ GstRawBaseParse *raw_base_parse = GST_RAW_BASE_PARSE (object);
+ GstRawVideoParse *raw_video_parse = GST_RAW_VIDEO_PARSE (object);
+ GstRawVideoParseConfig *props_cfg = &(raw_video_parse->properties_config);
+
+ /* All properties are handled similarly:
+ * - if the new value is the same as the current value, nothing is done
+ * - the parser lock is held while the new value is set
+ * - if the properties config is the current config, the source caps are
+ * invalidated to ensure that the code in handle_frame pushes a new CAPS
+ * event out
+ * - properties that affect the video frame size call the function to update
+ * the info and also call gst_base_parse_set_min_frame_size() to ensure
+ * that the minimum frame size can hold 1 frame (= one sample for each
+ * channel); to ensure that the min frame size includes any extra padding,
+ * it is set to the result of gst_raw_video_parse_get_config_frame_size()
+ * - property configuration values that require video info updates aren't
+ * written directory into the video info structure, but in the extra
+ * fields instead (gst_raw_video_parse_update_info() then copies the values
+ * from these fields into the video info); see the documentation inside
+ * gst_raw_video_parse_update_info() for the reason why
+ */
+
+ switch (prop_id) {
+ case PROP_WIDTH:
+ {
+ gint new_width = g_value_get_int (value);
+
+ GST_RAW_BASE_PARSE_CONFIG_MUTEX_LOCK (object);
+
+ if (new_width != props_cfg->width) {
+ props_cfg->width = new_width;
+ gst_raw_video_parse_update_info (props_cfg);
+
+ if (!gst_raw_video_parse_is_using_sink_caps (raw_video_parse)) {
+ gst_raw_base_parse_invalidate_src_caps (raw_base_parse);
+ gst_base_parse_set_min_frame_size (base_parse,
+ gst_raw_video_parse_get_config_frame_size (raw_base_parse,
+ GST_RAW_BASE_PARSE_CONFIG_PROPERTIES));
+ }
+ }
+
+ GST_RAW_BASE_PARSE_CONFIG_MUTEX_UNLOCK (object);
+ break;
+ }
+
+ case PROP_HEIGHT:
+ {
+ gint new_height = g_value_get_int (value);
+
+ GST_RAW_BASE_PARSE_CONFIG_MUTEX_LOCK (object);
+
+ if (new_height != props_cfg->height) {
+ props_cfg->height = new_height;
+ gst_raw_video_parse_update_info (props_cfg);
+
+ if (!gst_raw_video_parse_is_using_sink_caps (raw_video_parse)) {
+ gst_raw_base_parse_invalidate_src_caps (raw_base_parse);
+ gst_base_parse_set_min_frame_size (base_parse,
+ gst_raw_video_parse_get_config_frame_size (raw_base_parse,
+ GST_RAW_BASE_PARSE_CONFIG_PROPERTIES));
+ }
+ }
+
+ GST_RAW_BASE_PARSE_CONFIG_MUTEX_UNLOCK (object);
+ break;
+ }
+
+ case PROP_FORMAT:
+ {
+ GstVideoFormat new_format = g_value_get_enum (value);
+
+ GST_RAW_BASE_PARSE_CONFIG_MUTEX_LOCK (object);
+
+ if (new_format != props_cfg->format) {
+ props_cfg->format = new_format;
+ gst_raw_video_parse_update_info (props_cfg);
+
+ if (!gst_raw_video_parse_is_using_sink_caps (raw_video_parse)) {
+ gst_raw_base_parse_invalidate_src_caps (raw_base_parse);
+ gst_base_parse_set_min_frame_size (base_parse,
+ gst_raw_video_parse_get_config_frame_size (raw_base_parse,
+ GST_RAW_BASE_PARSE_CONFIG_PROPERTIES));
+ }
+ }
+
+ GST_RAW_BASE_PARSE_CONFIG_MUTEX_UNLOCK (object);
+ break;
+ }
+
+ case PROP_PIXEL_ASPECT_RATIO:
+ {
+ GST_RAW_BASE_PARSE_CONFIG_MUTEX_LOCK (object);
+
+ /* The pixel aspect ratio does not affect the video frame size,
+ * so it is just set directly without any updates */
+ props_cfg->pixel_aspect_ratio_n =
+ GST_VIDEO_INFO_PAR_N (&(props_cfg->info)) =
+ gst_value_get_fraction_numerator (value);
+ props_cfg->pixel_aspect_ratio_d =
+ GST_VIDEO_INFO_PAR_D (&(props_cfg->info)) =
+ gst_value_get_fraction_denominator (value);
+ GST_DEBUG_OBJECT (raw_video_parse, "setting pixel aspect ratio to %u/%u",
+ props_cfg->pixel_aspect_ratio_n, props_cfg->pixel_aspect_ratio_d);
+
+ GST_RAW_BASE_PARSE_CONFIG_MUTEX_UNLOCK (object);
+ break;
+ }
+
+ case PROP_FRAMERATE:
+ {
+ GST_RAW_BASE_PARSE_CONFIG_MUTEX_LOCK (object);
+
+ /* The framerate does not affect the video frame size,
+ * so it is just set directly without any updates */
+ props_cfg->framerate_n = GST_VIDEO_INFO_FPS_N (&(props_cfg->info)) =
+ gst_value_get_fraction_numerator (value);
+ props_cfg->framerate_d = GST_VIDEO_INFO_FPS_D (&(props_cfg->info)) =
+ gst_value_get_fraction_denominator (value);
+ GST_DEBUG_OBJECT (raw_video_parse, "setting framerate to %u/%u",
+ props_cfg->framerate_n, props_cfg->framerate_d);
+
+ GST_RAW_BASE_PARSE_CONFIG_MUTEX_UNLOCK (object);
+ break;
+ }
+
+ case PROP_INTERLACED:
+ {
+ GST_RAW_BASE_PARSE_CONFIG_MUTEX_LOCK (object);
+
+ /* Interlacing does not affect the video frame size,
+ * so it is just set directly without any updates */
+ props_cfg->interlaced = g_value_get_boolean (value);
+ GST_VIDEO_INFO_INTERLACE_MODE (&(props_cfg->info)) =
+ props_cfg->interlaced ? GST_VIDEO_INTERLACE_MODE_INTERLEAVED :
+ GST_VIDEO_INTERLACE_MODE_PROGRESSIVE;
+
+ GST_RAW_BASE_PARSE_CONFIG_MUTEX_UNLOCK (object);
+
+ break;
+ }
+
+ case PROP_TOP_FIELD_FIRST:
+ {
+ /* The top-field-first flag is a detail related to
+ * interlacing, so no video info update is needed */
+
+ GST_RAW_BASE_PARSE_CONFIG_MUTEX_LOCK (object);
+ props_cfg->top_field_first = g_value_get_boolean (value);
+ GST_RAW_BASE_PARSE_CONFIG_MUTEX_UNLOCK (object);
+ break;
+ }
+
+ case PROP_PLANE_STRIDES:
+ {
+ GValueArray *valarray = g_value_get_boxed (value);
+ guint n_planes;
+ guint i;
+
+ /* Sanity check - reject empty arrays */
+ if ((valarray != NULL) && (valarray->n_values == 0)) {
+ GST_ELEMENT_ERROR (raw_video_parse, LIBRARY, SETTINGS,
+ ("plane strides property holds an empty array"), (NULL));
+ break;
+ }
+
+ GST_RAW_BASE_PARSE_CONFIG_MUTEX_LOCK (object);
+
+ n_planes = GST_VIDEO_INFO_N_PLANES (&(props_cfg->info));
+
+ /* Check that the valarray holds the right number of values */
+ if (valarray->n_values != n_planes) {
+ GST_ELEMENT_ERROR (raw_video_parse, LIBRARY, SETTINGS,
+ ("incorrect number of elements in plane strides property"),
+ ("expected: %u, got: %u", n_planes, valarray->n_values));
+ GST_RAW_BASE_PARSE_CONFIG_MUTEX_UNLOCK (object);
+ break;
+ }
+
+ /* Copy the values to the stride array */
+ for (i = 0; i < n_planes; ++i) {
+ GValue *val = g_value_array_get_nth (valarray, i);
+ props_cfg->plane_strides[i] = g_value_get_uint (val);
+ GST_DEBUG_OBJECT (raw_video_parse, "plane #%u stride: %d", i,
+ props_cfg->plane_strides[i]);
+ }
+
+ gst_raw_video_parse_update_info (props_cfg);
+
+ if (!gst_raw_video_parse_is_using_sink_caps (raw_video_parse))
+ gst_base_parse_set_min_frame_size (base_parse,
+ gst_raw_video_parse_get_config_frame_size (raw_base_parse,
+ GST_RAW_BASE_PARSE_CONFIG_PROPERTIES));
+
+ GST_RAW_BASE_PARSE_CONFIG_MUTEX_UNLOCK (object);
+ break;
+ }
+
+ case PROP_PLANE_OFFSETS:
+ {
+ GValueArray *valarray = g_value_get_boxed (value);
+ guint n_planes;
+ guint i;
+
+ /* Sanity check - reject empty arrays */
+ if ((valarray != NULL) && (valarray->n_values == 0)) {
+ GST_ELEMENT_ERROR (raw_video_parse, LIBRARY, SETTINGS,
+ ("plane offsets property holds an empty array"), (NULL));
+ break;
+ }
+
+ GST_RAW_BASE_PARSE_CONFIG_MUTEX_LOCK (object);
+
+ n_planes = GST_VIDEO_INFO_N_PLANES (&(props_cfg->info));
+
+ /* Check that the valarray holds the right number of values */
+ if (valarray->n_values != n_planes) {
+ GST_ELEMENT_ERROR (raw_video_parse, LIBRARY, SETTINGS,
+ ("incorrect number of elements in plane offsets property"),
+ ("expected: %u, got: %u", n_planes, valarray->n_values));
+ GST_RAW_BASE_PARSE_CONFIG_MUTEX_UNLOCK (object);
+ break;
+ }
+
+ /* Copy the values to the offset array */
+ for (i = 0; i < n_planes; ++i) {
+ GValue *val = g_value_array_get_nth (valarray, i);
+ props_cfg->plane_offsets[i] = g_value_get_uint (val);
+ GST_DEBUG_OBJECT (raw_video_parse, "plane #%u offset: %" G_GSIZE_FORMAT,
+ i, props_cfg->plane_offsets[i]);
+ }
+
+ gst_raw_video_parse_update_info (props_cfg);
+
+ if (!gst_raw_video_parse_is_using_sink_caps (raw_video_parse))
+ gst_base_parse_set_min_frame_size (base_parse,
+ gst_raw_video_parse_get_config_frame_size (raw_base_parse,
+ GST_RAW_BASE_PARSE_CONFIG_PROPERTIES));
+
+ GST_RAW_BASE_PARSE_CONFIG_MUTEX_UNLOCK (object);
+ break;
+ }
+
+ case PROP_FRAME_STRIDE:
+ {
+ /* The frame stride does not affect the video frame size,
+ * so it is just set directly without any updates */
+
+ GST_RAW_BASE_PARSE_CONFIG_MUTEX_LOCK (object);
+ props_cfg->frame_stride = g_value_get_uint (value);
+ gst_raw_video_parse_update_info (props_cfg);
+ if (!gst_raw_video_parse_is_using_sink_caps (raw_video_parse))
+ gst_base_parse_set_min_frame_size (base_parse,
+ gst_raw_video_parse_get_config_frame_size (raw_base_parse,
+ GST_RAW_BASE_PARSE_CONFIG_PROPERTIES));
+ GST_RAW_BASE_PARSE_CONFIG_MUTEX_UNLOCK (object);
+
+ break;
+ }
+
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+
+static void
+gst_raw_video_parse_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec)
+{
+ GstRawVideoParse *raw_video_parse = GST_RAW_VIDEO_PARSE (object);
+ GstRawVideoParseConfig *props_cfg = &(raw_video_parse->properties_config);
+
+ switch (prop_id) {
+ case PROP_WIDTH:
+ GST_RAW_BASE_PARSE_CONFIG_MUTEX_LOCK (object);
+ g_value_set_int (value, props_cfg->width);
+ GST_RAW_BASE_PARSE_CONFIG_MUTEX_UNLOCK (object);
+ break;
+
+ case PROP_HEIGHT:
+ GST_RAW_BASE_PARSE_CONFIG_MUTEX_LOCK (object);
+ g_value_set_int (value, props_cfg->height);
+ GST_RAW_BASE_PARSE_CONFIG_MUTEX_UNLOCK (object);
+ break;
+
+ case PROP_FORMAT:
+ GST_RAW_BASE_PARSE_CONFIG_MUTEX_LOCK (object);
+ g_value_set_enum (value, props_cfg->format);
+ GST_RAW_BASE_PARSE_CONFIG_MUTEX_UNLOCK (object);
+ break;
+
+ case PROP_PIXEL_ASPECT_RATIO:
+ GST_RAW_BASE_PARSE_CONFIG_MUTEX_LOCK (object);
+ gst_value_set_fraction (value, props_cfg->pixel_aspect_ratio_n,
+ props_cfg->pixel_aspect_ratio_d);
+ GST_RAW_BASE_PARSE_CONFIG_MUTEX_UNLOCK (object);
+
+ break;
+
+ case PROP_FRAMERATE:
+ GST_RAW_BASE_PARSE_CONFIG_MUTEX_LOCK (object);
+ gst_value_set_fraction (value, props_cfg->framerate_n,
+ props_cfg->framerate_d);
+ GST_RAW_BASE_PARSE_CONFIG_MUTEX_UNLOCK (object);
+ break;
+
+ case PROP_INTERLACED:
+ GST_RAW_BASE_PARSE_CONFIG_MUTEX_LOCK (object);
+ g_value_set_boolean (value, props_cfg->interlaced);
+ GST_RAW_BASE_PARSE_CONFIG_MUTEX_UNLOCK (object);
+ break;
+
+ case PROP_TOP_FIELD_FIRST:
+ GST_RAW_BASE_PARSE_CONFIG_MUTEX_LOCK (object);
+ g_value_set_boolean (value, props_cfg->top_field_first);
+ GST_RAW_BASE_PARSE_CONFIG_MUTEX_UNLOCK (object);
+ break;
+
+ case PROP_PLANE_STRIDES:
+ {
+ guint i, n_planes;
+ GValue val = G_VALUE_INIT;
+ GValueArray *valarray;
+
+ GST_RAW_BASE_PARSE_CONFIG_MUTEX_LOCK (object);
+
+ n_planes = GST_VIDEO_INFO_N_PLANES (&(props_cfg->info));
+ valarray = g_value_array_new (n_planes);
+ g_value_init (&val, G_TYPE_UINT);
+
+ for (i = 0; i < n_planes; ++i) {
+ g_value_set_uint (&val, props_cfg->plane_strides[i]);
+ g_value_array_insert (valarray, i, &val);
+ }
+
+ g_value_unset (&val);
+
+ GST_RAW_BASE_PARSE_CONFIG_MUTEX_UNLOCK (object);
+
+ /* Pass on ownership to the value array,
+ * since we don't need it anymore */
+ g_value_take_boxed (value, valarray);
+ break;
+ }
+
+ case PROP_PLANE_OFFSETS:
+ {
+ guint i, n_planes;
+ GValue val = G_VALUE_INIT;
+ GValueArray *valarray;
+
+ GST_RAW_BASE_PARSE_CONFIG_MUTEX_LOCK (object);
+
+ n_planes = GST_VIDEO_INFO_N_PLANES (&(props_cfg->info));
+ valarray = g_value_array_new (n_planes);
+ g_value_init (&val, G_TYPE_UINT);
+
+ for (i = 0; i < n_planes; ++i) {
+ g_value_set_uint (&val, props_cfg->plane_offsets[i]);
+ g_value_array_insert (valarray, i, &val);
+ }
+
+ g_value_unset (&val);
+
+ GST_RAW_BASE_PARSE_CONFIG_MUTEX_UNLOCK (object);
+
+ /* Pass on ownership to the value array,
+ * since we don't need it anymore */
+ g_value_take_boxed (value, valarray);
+ break;
+ }
+
+ case PROP_FRAME_STRIDE:
+ GST_RAW_BASE_PARSE_CONFIG_MUTEX_LOCK (object);
+ g_value_set_uint (value, raw_video_parse->properties_config.frame_stride);
+ GST_RAW_BASE_PARSE_CONFIG_MUTEX_UNLOCK (object);
+ break;
+
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+
+static gboolean
+gst_raw_video_parse_stop (GstBaseParse * parse)
+{
+ GstRawVideoParse *raw_video_parse = GST_RAW_VIDEO_PARSE (parse);
+
+ /* Sink caps config is not ready until caps come in.
+ * We are stopping processing, the element is being reset,
+ * so the config has to be un-readied.
+ * (Since the properties config is not depending on caps,
+ * its ready status is always TRUE.) */
+ raw_video_parse->sink_caps_config.ready = FALSE;
+
+ return GST_BASE_PARSE_CLASS (parent_class)->stop (parse);
+}
+
+
+static gboolean
+gst_raw_video_parse_set_current_config (GstRawBaseParse * raw_base_parse,
+ GstRawBaseParseConfig config)
+{
+ GstRawVideoParse *raw_video_parse = GST_RAW_VIDEO_PARSE (raw_base_parse);
+
+ switch (config) {
+ case GST_RAW_BASE_PARSE_CONFIG_PROPERTIES:
+ raw_video_parse->current_config = &(raw_video_parse->properties_config);
+ break;
+
+ case GST_RAW_BASE_PARSE_CONFIG_SINKCAPS:
+ raw_video_parse->current_config = &(raw_video_parse->sink_caps_config);
+ break;
+
+ default:
+ g_assert_not_reached ();
+ }
+
+ return TRUE;
+}
+
+
+static GstRawBaseParseConfig
+gst_raw_video_parse_get_current_config (GstRawBaseParse * raw_base_parse)
+{
+ GstRawVideoParse *raw_video_parse = GST_RAW_VIDEO_PARSE (raw_base_parse);
+ return gst_raw_video_parse_is_using_sink_caps (raw_video_parse) ?
+ GST_RAW_BASE_PARSE_CONFIG_SINKCAPS : GST_RAW_BASE_PARSE_CONFIG_PROPERTIES;
+}
+
+
+static gboolean
+gst_raw_video_parse_set_config_from_caps (GstRawBaseParse * raw_base_parse,
+ GstRawBaseParseConfig config, GstCaps * caps)
+{
+ int i;
+ GstStructure *structure;
+ GstRawVideoParse *raw_video_parse = GST_RAW_VIDEO_PARSE (raw_base_parse);
+ GstRawVideoParseConfig *config_ptr =
+ gst_raw_video_parse_get_config_ptr (raw_video_parse, config);
+
+ g_assert (caps != NULL);
+
+ /* Caps might get copied, and the copy needs to be unref'd.
+ * Also, the caller retains ownership over the original caps.
+ * So, to make this mechanism also work with cases where the
+ * caps are *not* copied, ref the original caps here first. */
+ gst_caps_ref (caps);
+
+ structure = gst_caps_get_structure (caps, 0);
+
+ /* For unaligned raw data, the output caps stay the same,
+ * except that video/x-unaligned-raw becomes video/x-raw,
+ * since the parser aligns the frame data */
+ if (gst_structure_has_name (structure, "video/x-unaligned-raw")) {
+ /* Copy the caps to be able to modify them */
+ GstCaps *new_caps = gst_caps_copy (caps);
+ gst_caps_unref (caps);
+ caps = new_caps;
+
+ /* Change the media type to video/x-raw , otherwise
+ * gst_video_info_from_caps() won't work */
+ structure = gst_caps_get_structure (caps, 0);
+ gst_structure_set_name (structure, "video/x-raw");
+ }
+
+ config_ptr->ready = gst_video_info_from_caps (&(config_ptr->info), caps);
+
+ if (config_ptr->ready) {
+ config_ptr->width = GST_VIDEO_INFO_WIDTH (&(config_ptr->info));
+ config_ptr->height = GST_VIDEO_INFO_HEIGHT (&(config_ptr->info));
+ config_ptr->pixel_aspect_ratio_n =
+ GST_VIDEO_INFO_PAR_N (&(config_ptr->info));
+ config_ptr->pixel_aspect_ratio_d =
+ GST_VIDEO_INFO_PAR_D (&(config_ptr->info));
+ config_ptr->framerate_n = GST_VIDEO_INFO_FPS_N (&(config_ptr->info));
+ config_ptr->framerate_d = GST_VIDEO_INFO_FPS_D (&(config_ptr->info));
+ config_ptr->interlaced = GST_VIDEO_INFO_IS_INTERLACED (&(config_ptr->info));
+ config_ptr->height = GST_VIDEO_INFO_HEIGHT (&(config_ptr->info));
+ config_ptr->top_field_first = 0;
+ config_ptr->frame_stride = 0;
+
+ for (i = 0; i < GST_VIDEO_MAX_PLANES; ++i) {
+ config_ptr->plane_offsets[i] =
+ GST_VIDEO_INFO_PLANE_OFFSET (&(config_ptr->info), i);
+ config_ptr->plane_strides[i] =
+ GST_VIDEO_INFO_PLANE_STRIDE (&(config_ptr->info), i);
+ }
+ }
+
+ gst_caps_unref (caps);
+
+ return config_ptr->ready;
+}
+
+
+static gboolean
+gst_raw_video_parse_get_caps_from_config (GstRawBaseParse * raw_base_parse,
+ GstRawBaseParseConfig config, GstCaps ** caps)
+{
+ GstRawVideoParse *raw_video_parse = GST_RAW_VIDEO_PARSE (raw_base_parse);
+ GstRawVideoParseConfig *config_ptr =
+ gst_raw_video_parse_get_config_ptr (raw_video_parse, config);
+
+ g_assert (caps != NULL);
+
+ *caps = gst_video_info_to_caps (&(config_ptr->info));
+
+ return *caps != NULL;
+}
+
+
+static gsize
+gst_raw_video_parse_get_config_frame_size (GstRawBaseParse * raw_base_parse,
+ GstRawBaseParseConfig config)
+{
+ GstRawVideoParse *raw_video_parse = GST_RAW_VIDEO_PARSE (raw_base_parse);
+ GstRawVideoParseConfig *config_ptr =
+ gst_raw_video_parse_get_config_ptr (raw_video_parse, config);
+ return MAX (GST_VIDEO_INFO_SIZE (&(config_ptr->info)),
+ (gsize) (config_ptr->frame_stride));
+}
+
+
+static guint
+gst_raw_video_parse_get_max_frames_per_buffer (G_GNUC_UNUSED GstRawBaseParse *
+ raw_base_parse, G_GNUC_UNUSED GstRawBaseParseConfig config)
+{
+ /* We want exactly one frame per buffer */
+ return 1;
+}
+
+
+static gboolean
+gst_raw_video_parse_is_config_ready (GstRawBaseParse * raw_base_parse,
+ GstRawBaseParseConfig config)
+{
+ GstRawVideoParse *raw_video_parse = GST_RAW_VIDEO_PARSE (raw_base_parse);
+ return gst_raw_video_parse_get_config_ptr (raw_video_parse, config)->ready;
+}
+
+
+static gboolean
+gst_raw_video_parse_process (GstRawBaseParse * raw_base_parse,
+ GstRawBaseParseConfig config, GstBuffer * in_data,
+ G_GNUC_UNUSED gsize total_num_in_bytes,
+ G_GNUC_UNUSED gsize num_valid_in_bytes, GstBuffer ** processed_data)
+{
+ GstRawVideoParse *raw_video_parse = GST_RAW_VIDEO_PARSE (raw_base_parse);
+ GstRawVideoParseConfig *config_ptr =
+ gst_raw_video_parse_get_config_ptr (raw_video_parse, config);
+ guint frame_flags = 0;
+ GstVideoInfo *video_info = &(config_ptr->info);
+ GstVideoMeta *videometa;
+ GstBuffer *out_data;
+
+ /* In case of extra padding bytes, get a subbuffer without the padding bytes.
+ * Otherwise, just add the video meta. */
+ if (GST_VIDEO_INFO_SIZE (video_info) < config_ptr->frame_stride) {
+ *processed_data = out_data =
+ gst_buffer_copy_region (in_data,
+ GST_BUFFER_COPY_FLAGS | GST_BUFFER_COPY_TIMESTAMPS |
+ GST_BUFFER_COPY_MEMORY, 0, GST_VIDEO_INFO_SIZE (video_info));
+ } else {
+ out_data = in_data;
+ *processed_data = NULL;
+ }
+
+ if (config_ptr->interlaced) {
+ GST_BUFFER_FLAG_SET (out_data, GST_VIDEO_BUFFER_FLAG_INTERLACED);
+ frame_flags |= GST_VIDEO_FRAME_FLAG_INTERLACED;
+
+ if (config_ptr->top_field_first) {
+ GST_BUFFER_FLAG_SET (out_data, GST_VIDEO_BUFFER_FLAG_TFF);
+ frame_flags |= GST_VIDEO_FRAME_FLAG_TFF;
+ } else
+ GST_BUFFER_FLAG_UNSET (out_data, GST_VIDEO_BUFFER_FLAG_TFF);
+ }
+
+ /* Remove any existing videometa - it will be replaced by the new videometa
+ * from here */
+ while ((videometa = gst_buffer_get_video_meta (out_data))) {
+ GST_LOG_OBJECT (raw_base_parse, "removing existing videometa from buffer");
+ gst_buffer_remove_meta (out_data, (GstMeta *) videometa);
+ }
+
+ gst_buffer_add_video_meta_full (out_data,
+ frame_flags,
+ config_ptr->format,
+ config_ptr->width,
+ config_ptr->height,
+ GST_VIDEO_INFO_N_PLANES (video_info),
+ config_ptr->plane_offsets, config_ptr->plane_strides);
+
+
+ return TRUE;
+}
+
+
+static gboolean
+gst_raw_video_parse_is_unit_format_supported (G_GNUC_UNUSED GstRawBaseParse *
+ raw_base_parse, GstFormat format)
+{
+ switch (format) {
+ case GST_FORMAT_BYTES:
+ case GST_FORMAT_DEFAULT:
+ return TRUE;
+ default:
+ return FALSE;
+ }
+}
+
+
+static void
+gst_raw_video_parse_get_units_per_second (GstRawBaseParse * raw_base_parse,
+ GstFormat format, GstRawBaseParseConfig config, gsize * units_per_sec_n,
+ gsize * units_per_sec_d)
+{
+ GstRawVideoParse *raw_video_parse = GST_RAW_VIDEO_PARSE (raw_base_parse);
+ GstRawVideoParseConfig *config_ptr =
+ gst_raw_video_parse_get_config_ptr (raw_video_parse, config);
+
+ switch (format) {
+ case GST_FORMAT_BYTES:
+ {
+ gsize framesize = GST_VIDEO_INFO_SIZE (&(config_ptr->info));
+ gint64 n = framesize * config_ptr->framerate_n;
+ gint64 d = config_ptr->framerate_d;
+ gint64 common_div = gst_util_greatest_common_divisor_int64 (n, d);
+ GST_DEBUG_OBJECT (raw_video_parse,
+ "n: %" G_GINT64_FORMAT " d: %" G_GINT64_FORMAT " common divisor: %"
+ G_GINT64_FORMAT, n, d, common_div);
+
+ /* Divide numerator and denominator by greatest common divisor.
+ * This minimizes the risk of integer overflows in the baseparse class. */
+ *units_per_sec_n = n / common_div;
+ *units_per_sec_d = d / common_div;
+
+ break;
+ }
+
+ case GST_FORMAT_DEFAULT:
+ {
+ *units_per_sec_n = config_ptr->framerate_n;
+ *units_per_sec_d = config_ptr->framerate_d;
+ break;
+ }
+
+ default:
+ g_assert_not_reached ();
+ }
+}
+
+
+static gint
+gst_raw_video_parse_get_overhead_size (GstRawBaseParse * raw_base_parse,
+ GstRawBaseParseConfig config)
+{
+ GstRawVideoParse *raw_video_parse = GST_RAW_VIDEO_PARSE (raw_base_parse);
+ GstRawVideoParseConfig *config_ptr =
+ gst_raw_video_parse_get_config_ptr (raw_video_parse, config);
+ gint64 frame_size = GST_VIDEO_INFO_SIZE (&(config_ptr->info));
+ gint64 frame_stride = config_ptr->frame_stride;
+
+ /* In the video parser, the overhead is defined by the difference between
+ * the frame stride and the actual frame size. If the former is larger,
+ * then the additional bytes are considered padding bytes and get ignored
+ * by the base class. */
+
+ GST_LOG_OBJECT (raw_video_parse,
+ "frame size: %" G_GINT64_FORMAT " frame stride: %" G_GINT64_FORMAT,
+ frame_size, frame_stride);
+
+ return (frame_size < frame_stride) ? (gint) (frame_stride - frame_size) : 0;
+}
+
+
+static gboolean
+gst_raw_video_parse_is_using_sink_caps (GstRawVideoParse * raw_video_parse)
+{
+ return raw_video_parse->current_config ==
+ &(raw_video_parse->sink_caps_config);
+}
+
+
+static GstRawVideoParseConfig *
+gst_raw_video_parse_get_config_ptr (GstRawVideoParse * raw_video_parse,
+ GstRawBaseParseConfig config)
+{
+ g_assert (raw_video_parse->current_config != NULL);
+
+ switch (config) {
+ case GST_RAW_BASE_PARSE_CONFIG_PROPERTIES:
+ return &(raw_video_parse->properties_config);
+
+ case GST_RAW_BASE_PARSE_CONFIG_SINKCAPS:
+ return &(raw_video_parse->sink_caps_config);
+
+ default:
+ g_assert (raw_video_parse->current_config != NULL);
+ return raw_video_parse->current_config;
+ }
+}
+
+
+static void
+gst_raw_video_parse_init_config (GstRawVideoParseConfig * config)
+{
+ int i;
+
+ config->ready = FALSE;
+ config->width = DEFAULT_WIDTH;
+ config->height = DEFAULT_HEIGHT;
+ config->format = DEFAULT_FORMAT;
+ config->pixel_aspect_ratio_n = DEFAULT_PIXEL_ASPECT_RATIO_N;
+ config->pixel_aspect_ratio_d = DEFAULT_PIXEL_ASPECT_RATIO_D;
+ config->framerate_n = DEFAULT_FRAMERATE_N;
+ config->framerate_d = DEFAULT_FRAMERATE_D;
+ config->interlaced = DEFAULT_INTERLACED;
+
+ config->top_field_first = DEFAULT_TOP_FIELD_FIRST;
+ config->frame_stride = DEFAULT_FRAME_STRIDE;
+
+ gst_video_info_set_format (&(config->info), DEFAULT_FORMAT, DEFAULT_WIDTH,
+ DEFAULT_HEIGHT);
+ for (i = 0; i < GST_VIDEO_MAX_PLANES; ++i) {
+ config->plane_offsets[i] = GST_VIDEO_INFO_PLANE_OFFSET (&(config->info), i);
+ config->plane_strides[i] = GST_VIDEO_INFO_PLANE_STRIDE (&(config->info), i);
+ }
+}
+
+
+static void
+gst_raw_video_parse_update_info (GstRawVideoParseConfig * config)
+{
+ int i;
+ guint n_planes;
+ gsize last_plane_offset, last_plane_size;
+ GstVideoInfo *info = &(config->info);
+
+ gst_video_info_set_format (info, config->format, config->width,
+ config->height);
+
+ GST_VIDEO_INFO_PAR_N (info) = config->pixel_aspect_ratio_n;
+ GST_VIDEO_INFO_PAR_D (info) = config->pixel_aspect_ratio_d;
+ GST_VIDEO_INFO_FPS_N (info) = config->framerate_n;
+ GST_VIDEO_INFO_FPS_D (info) = config->framerate_d;
+ GST_VIDEO_INFO_INTERLACE_MODE (info) =
+ config->interlaced ? GST_VIDEO_INTERLACE_MODE_INTERLEAVED :
+ GST_VIDEO_INTERLACE_MODE_PROGRESSIVE;
+ for (i = 0; i < GST_VIDEO_MAX_PLANES; ++i) {
+ GST_VIDEO_INFO_PLANE_OFFSET (info, i) = config->plane_offsets[i];
+ GST_VIDEO_INFO_PLANE_STRIDE (info, i) = config->plane_strides[i];
+ }
+
+ n_planes = GST_VIDEO_INFO_N_PLANES (info);
+ if (n_planes < 1)
+ n_planes = 1;
+
+ last_plane_offset = GST_VIDEO_INFO_PLANE_OFFSET (info, n_planes - 1);
+ last_plane_size =
+ GST_VIDEO_INFO_PLANE_STRIDE (info,
+ n_planes - 1) * GST_VIDEO_FORMAT_INFO_SCALE_HEIGHT (info->finfo,
+ n_planes - 1, config->height);
+
+ GST_VIDEO_INFO_SIZE (info) = last_plane_offset + last_plane_size;
+
+ GST_DEBUG ("last plane offset: %" G_GSIZE_FORMAT " last plane size: %"
+ G_GSIZE_FORMAT " => frame size minus extra padding: %" G_GSIZE_FORMAT,
+ last_plane_offset, last_plane_size, GST_VIDEO_INFO_SIZE (info));
+}
--- /dev/null
+/* GStreamer
+ * Copyright (C) <2016> Carlos Rafael Giani <dv at pseudoterminal dot org>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_RAW_VIDEO_PARSE_H__
+#define __GST_RAW_VIDEO_PARSE_H__
+
+#include <gst/gst.h>
+#include <gst/video/video.h>
+#include "gstrawbaseparse.h"
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_RAW_VIDEO_PARSE \
+ (gst_raw_video_parse_get_type())
+#define GST_RAW_VIDEO_PARSE(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj), GST_TYPE_RAW_VIDEO_PARSE, GstRawVideoParse))
+#define GST_RAW_VIDEO_PARSE_CAST(obj) \
+ ((GstRawVideoParse *)(obj))
+#define GST_RAW_VIDEO_PARSE_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass), GST_TYPE_RAW_VIDEO_PARSE, GstRawVideoParseClass))
+#define GST_IS_RAW_VIDEO_PARSE(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj), GST_TYPE_RAW_VIDEO_PARSE))
+#define GST_IS_RAW_VIDEO_PARSE_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass), GST_TYPE_RAW_VIDEO_PARSE))
+
+
+typedef struct _GstRawVideoParseConfig GstRawVideoParseConfig;
+typedef struct _GstRawVideoParse GstRawVideoParse;
+typedef struct _GstRawVideoParseClass GstRawVideoParseClass;
+
+
+/* Contains information about the video frame format. */
+struct _GstRawVideoParseConfig
+{
+ /* If TRUE, then this configuration is ready to use */
+ gboolean ready;
+
+ /* FIXME: These values should not be necessary, since there's
+ * GstVideoInfo. However, setting these values in the video
+ * info independently is currently difficult. For example,
+ * setting the video format requires the gst_video_info_set_format()
+ * function, but this function also overwrites plane strides
+ * and offsets. */
+ gint width, height;
+ GstVideoFormat format;
+ gint pixel_aspect_ratio_n, pixel_aspect_ratio_d;
+ gint framerate_n, framerate_d;
+ gboolean interlaced;
+ gsize plane_offsets[GST_VIDEO_MAX_PLANES];
+ gint plane_strides[GST_VIDEO_MAX_PLANES];
+
+ /* If TRUE, then TFF flags are added to outgoing buffers and
+ * their video metadata */
+ gboolean top_field_first;
+
+ /* Distance between the start of each frame, in bytes. If this value
+ * is larger than the actual size of a frame, then the extra bytes
+ * are skipped. For example, with frames that have 115200 bytes, a
+ * frame_stride value of 120000 means that 4800 trailing bytes are
+ * skipped after the 115200 frame bytes. This is useful to skip
+ * metadata in between frames. */
+ guint frame_stride;
+
+ GstVideoInfo info;
+};
+
+
+struct _GstRawVideoParse
+{
+ GstRawBaseParse parent;
+
+ /*< private > */
+
+ /* Configuration controlled by the object properties. Its ready value
+ * is set to TRUE from the start, so it can be used right away.
+ */
+ GstRawVideoParseConfig properties_config;
+ /* Configuration controlled by the sink caps. Its ready value is
+ * initially set to FALSE until valid sink caps come in. It is set to
+ * FALSE again when the stream-start event is observed.
+ */
+ GstRawVideoParseConfig sink_caps_config;
+ /* Currently active configuration. Points either to properties_config
+ * or to sink_caps_config. This is never NULL. */
+ GstRawVideoParseConfig *current_config;
+};
+
+
+struct _GstRawVideoParseClass
+{
+ GstRawBaseParseClass parent_class;
+};
+
+
+GType gst_raw_video_parse_get_type (void);
+
+
+G_END_DECLS
+
+#endif
GstPad *ghostpad;
unaligned_audio_parse->inner_parser =
- gst_element_factory_make ("audioparse", "inner_parser");
+ gst_element_factory_make ("rawaudioaudioparse", "inner_parser");
g_assert (unaligned_audio_parse->inner_parser != NULL);
g_object_set (G_OBJECT (unaligned_audio_parse->inner_parser),
--- /dev/null
+/* GStreamer
+ * Copyright (C) 2016 Carlos Rafael Giani <dv@pseudoterminal.org>
+ *
+ * gstunalignedvideoparse.c:
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#include <string.h>
+#include <stdio.h>
+#include <gst/gst.h>
+#include <gst/video/video.h>
+#include "gstunalignedvideoparse.h"
+#include "unalignedvideo.h"
+
+
+GST_DEBUG_CATEGORY (unaligned_video_parse_debug);
+#define GST_CAT_DEFAULT unaligned_video_parse_debug
+
+
+struct _GstUnalignedVideoParse
+{
+ GstBin parent;
+ GstElement *inner_parser;
+};
+
+
+struct _GstUnalignedVideoParseClass
+{
+ GstBinClass parent_class;
+};
+
+
+static GstStaticPadTemplate static_sink_template =
+GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS (GST_UNALIGNED_RAW_VIDEO_CAPS)
+ );
+
+
+static GstStaticPadTemplate static_src_template =
+GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE (GST_VIDEO_FORMATS_ALL))
+ );
+
+
+
+
+G_DEFINE_TYPE (GstUnalignedVideoParse, gst_unaligned_video_parse, GST_TYPE_BIN);
+
+
+static void
+gst_unaligned_video_parse_class_init (GstUnalignedVideoParseClass * klass)
+{
+ GstElementClass *element_class;
+
+ GST_DEBUG_CATEGORY_INIT (unaligned_video_parse_debug, "unalignedvideoparse",
+ 0, "Unaligned raw video parser");
+
+ element_class = GST_ELEMENT_CLASS (klass);
+
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&static_sink_template));
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&static_src_template));
+
+ gst_element_class_set_static_metadata (element_class,
+ "unalignedvideoparse",
+ "Codec/Parser/Bin/Video",
+ "Parse unaligned raw video data",
+ "Carlos Rafael Giani <dv@pseudoterminal.org>");
+}
+
+
+static void
+gst_unaligned_video_parse_init (GstUnalignedVideoParse * unaligned_video_parse)
+{
+ GstPad *inner_pad;
+ GstPad *ghostpad;
+
+ unaligned_video_parse->inner_parser =
+ gst_element_factory_make ("rawvideoparse", "inner_parser");
+ g_assert (unaligned_video_parse->inner_parser != NULL);
+
+ g_object_set (G_OBJECT (unaligned_video_parse->inner_parser),
+ "use-sink-caps", TRUE, NULL);
+
+ gst_bin_add (GST_BIN (unaligned_video_parse),
+ unaligned_video_parse->inner_parser);
+
+ inner_pad =
+ gst_element_get_static_pad (unaligned_video_parse->inner_parser, "sink");
+ ghostpad =
+ gst_ghost_pad_new_from_template ("sink", inner_pad,
+ gst_element_class_get_pad_template (GST_ELEMENT_GET_CLASS
+ (unaligned_video_parse), "sink"));
+ gst_element_add_pad (GST_ELEMENT (unaligned_video_parse), ghostpad);
+ gst_object_unref (GST_OBJECT (inner_pad));
+
+ inner_pad = gst_element_get_static_pad (unaligned_video_parse->inner_parser,
+ "src");
+ ghostpad =
+ gst_ghost_pad_new_from_template ("src", inner_pad,
+ gst_element_class_get_pad_template (GST_ELEMENT_GET_CLASS
+ (unaligned_video_parse), "src"));
+ gst_element_add_pad (GST_ELEMENT (unaligned_video_parse), ghostpad);
+ gst_object_unref (GST_OBJECT (inner_pad));
+}
--- /dev/null
+/* GStreamer
+ * Copyright (C) 2016 Carlos Rafael Giani <dv@pseudoterminal.org>
+ *
+ * gstunalignedvideoparse.h:
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_UNALIGNED_VIDEO_PARSE_H___
+#define __GST_UNALIGNED_VIDEO_PARSE_H___
+
+#include <gst/gst.h>
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_UNALIGNED_VIDEO_PARSE \
+ (gst_unaligned_video_parse_get_type())
+#define GST_UNALIGNED_VIDEO_PARSE(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj), GST_TYPE_UNALIGNED_VIDEO_PARSE, GstUnalignedVideoParse))
+#define GST_UNALIGNED_VIDEO_PARSE_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass), GST_TYPE_UNALIGNED_VIDEO_PARSE, GstUnalignedVideoParseClass))
+#define GST_UNALIGNED_VIDEO_PARSE_CAST(obj) \
+ ((GstUnalignedVideoParse *)(obj))
+#define GST_IS_UNALIGNED_VIDEO_PARSE(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj), GST_TYPE_UNALIGNED_VIDEO_PARSE))
+#define GST_IS_UNALIGNED_VIDEO_PARSE_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass), GST_TYPE_UNALIGNED_VIDEO_PARSE))
+
+typedef struct _GstUnalignedVideoParse GstUnalignedVideoParse;
+typedef struct _GstUnalignedVideoParseClass GstUnalignedVideoParseClass;
+
+GType gst_unaligned_video_parse_get_type (void);
+
+G_END_DECLS
+
+#endif /* __GST_UNALIGNED_VIDEO_PARSE_H___ */
#endif
#include <gst/gst.h>
+#include "gstrawaudioparse.h"
+#include "gstrawvideoparse.h"
#include "gstunalignedaudioparse.h"
+#include "gstunalignedvideoparse.h"
#include "gstaudioparse.h"
#include "gstvideoparse.h"
gst_audio_parse_get_type ());
ret &= gst_element_register (plugin, "unalignedaudioparse", GST_RANK_MARGINAL,
gst_unaligned_audio_parse_get_type ());
+ ret &= gst_element_register (plugin, "unalignedvideoparse", GST_RANK_MARGINAL,
+ gst_unaligned_video_parse_get_type ());
+ ret &= gst_element_register (plugin, "rawaudioparse", GST_RANK_NONE,
+ gst_raw_audio_parse_get_type ());
+ ret &= gst_element_register (plugin, "rawvideoparse", GST_RANK_NONE,
+ gst_raw_video_parse_get_type ());
return ret;
}
--- /dev/null
+/* GStreamer
+ * Copyright (C) 2016 Carlos Rafael Giani <dv@pseudoterminal.org>
+ *
+ * unalignedvideo.h:
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_UNALIGNED_VIDEO_H__
+#define __GST_UNALIGNED_VIDEO_H__
+
+#include <gst/gst.h>
+#include <gst/video/video.h>
+
+#define GST_UNALIGNED_RAW_VIDEO_CAPS \
+ "video/x-unaligned-raw" \
+ ", format = (string) " GST_VIDEO_FORMATS_ALL \
+ ", width = " GST_VIDEO_SIZE_RANGE \
+ ", height = " GST_VIDEO_SIZE_RANGE \
+ ", framerate = " GST_VIDEO_FPS_RANGE
+
+#endif /* __GST_UNALIGNED_VIDEO_H__ */
--- /dev/null
+/* GStreamer
+ *
+ * unit test for rawaudioparse
+ *
+ * Copyright (C) <2016> Carlos Rafael Giani <dv at pseudoterminal dot org>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+/* FIXME: GValueArray is deprecated, but there is currently no viabla alternative
+ * See https://bugzilla.gnome.org/show_bug.cgi?id=667228 */
+#define GLIB_DISABLE_DEPRECATION_WARNINGS
+
+#include <gst/check/gstcheck.h>
+#include <gst/audio/audio.h>
+
+/* Checks are hardcoded to expect stereo 16-bit data. The sample rate
+ * however varies from the default of 40 kHz in some tests to see the
+ * differences in calculated buffer durations. */
+#define NUM_TEST_SAMPLES 512
+#define NUM_TEST_CHANNELS 2
+#define TEST_SAMPLE_RATE 40000
+#define TEST_SAMPLE_FORMAT GST_AUDIO_FORMAT_S16
+
+/* For ease of programming we use globals to keep refs for our floating
+ * src and sink pads we create; otherwise we always have to do get_pad,
+ * get_peer, and then remove references in every test function */
+static GstPad *mysrcpad, *mysinkpad;
+
+typedef struct
+{
+ GstElement *rawaudioparse;
+ GstAdapter *test_data_adapter;
+}
+RawAudParseTestCtx;
+
+/* Sets up a rawaudioparse element and a GstAdapter that contains 512 test
+ * audio samples. The samples a monotonically increasing set from the values
+ * 0 to 511 for the left and 512 to 1023 for the right channel. The result
+ * is a GstAdapter that contains the interleaved 16-bit integer values:
+ * 0,512,1,513,2,514, ... 511,1023 . This set is used in the checks to see
+ * if rawaudioparse's output buffers contain valid data. */
+static void
+setup_rawaudioparse (RawAudParseTestCtx * testctx, gboolean use_sink_caps,
+ gboolean set_properties, GstCaps * incaps, GstFormat format)
+{
+ GstElement *rawaudioparse;
+ GstAdapter *test_data_adapter;
+ GstBuffer *buffer;
+ guint i;
+ guint16 samples[NUM_TEST_SAMPLES * NUM_TEST_CHANNELS];
+
+
+ /* Setup the rawaudioparse element and the pads */
+
+ static GstStaticPadTemplate sinktemplate = GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS (GST_AUDIO_CAPS_MAKE (GST_AUDIO_FORMATS_ALL))
+ );
+ static GstStaticPadTemplate srctemplate = GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS_ANY);
+
+ rawaudioparse = gst_check_setup_element ("rawaudioparse");
+
+ g_object_set (G_OBJECT (rawaudioparse), "use-sink-caps", use_sink_caps, NULL);
+ if (set_properties)
+ g_object_set (G_OBJECT (rawaudioparse), "sample-rate", TEST_SAMPLE_RATE,
+ "num-channels", NUM_TEST_CHANNELS, "pcm-format", TEST_SAMPLE_FORMAT,
+ NULL);
+
+ fail_unless (gst_element_set_state (rawaudioparse,
+ GST_STATE_PAUSED) == GST_STATE_CHANGE_SUCCESS,
+ "could not set to paused");
+
+ mysrcpad = gst_check_setup_src_pad (rawaudioparse, &srctemplate);
+ mysinkpad = gst_check_setup_sink_pad (rawaudioparse, &sinktemplate);
+
+ gst_pad_set_active (mysrcpad, TRUE);
+ gst_pad_set_active (mysinkpad, TRUE);
+
+ gst_check_setup_events (mysrcpad, rawaudioparse, incaps, format);
+ if (incaps)
+ gst_caps_unref (incaps);
+
+
+ /* Fill the adapter with the interleaved 0..511 and
+ * 512..1023 samples */
+ for (i = 0; i < NUM_TEST_SAMPLES; ++i) {
+ guint c;
+ for (c = 0; c < NUM_TEST_CHANNELS; ++c)
+ samples[i * NUM_TEST_CHANNELS + c] = c * NUM_TEST_SAMPLES + i;
+ }
+
+ test_data_adapter = gst_adapter_new ();
+ buffer = gst_buffer_new_allocate (NULL, sizeof (samples), NULL);
+ gst_buffer_fill (buffer, 0, samples, sizeof (samples));
+ gst_adapter_push (test_data_adapter, buffer);
+
+
+ testctx->rawaudioparse = rawaudioparse;
+ testctx->test_data_adapter = test_data_adapter;
+}
+
+static void
+cleanup_rawaudioparse (RawAudParseTestCtx * testctx)
+{
+ gst_pad_set_active (mysrcpad, FALSE);
+ gst_pad_set_active (mysinkpad, FALSE);
+ gst_check_teardown_src_pad (testctx->rawaudioparse);
+ gst_check_teardown_sink_pad (testctx->rawaudioparse);
+ gst_check_teardown_element (testctx->rawaudioparse);
+
+ g_object_unref (G_OBJECT (testctx->test_data_adapter));
+}
+
+
+static void
+push_data_and_check_output (RawAudParseTestCtx * testctx, gsize num_in_bytes,
+ gsize expected_num_out_bytes, gint64 expected_pts, gint64 expected_dur,
+ guint expected_num_buffers_in_list, guint bpf, guint16 channel0_start,
+ guint16 channel1_start)
+{
+ GstBuffer *inbuf, *outbuf;
+ guint num_buffers;
+
+ /* Simulate upstream input by taking num_in_bytes bytes from the adapter */
+ inbuf = gst_adapter_take_buffer (testctx->test_data_adapter, num_in_bytes);
+ fail_unless (inbuf != NULL);
+
+ /* Push the input data and check that the output buffers list grew as
+ * expected */
+ fail_unless (gst_pad_push (mysrcpad, inbuf) == GST_FLOW_OK);
+ num_buffers = g_list_length (buffers);
+ fail_unless_equals_int (num_buffers, expected_num_buffers_in_list);
+
+ /* Take the latest output buffer */
+ outbuf = g_list_nth_data (buffers, num_buffers - 1);
+ fail_unless (outbuf != NULL);
+
+ /* Verify size, PTS, duration of the output buffer */
+ fail_unless_equals_uint64 (expected_num_out_bytes,
+ gst_buffer_get_size (outbuf));
+ fail_unless_equals_uint64 (expected_pts, GST_BUFFER_PTS (outbuf));
+ fail_unless_equals_uint64 (expected_dur, GST_BUFFER_DURATION (outbuf));
+
+ /* Go through all of the samples in the output buffer and check that they are
+ * valid. The samples are interleaved. The offsets specified by channel0_start
+ * and channel1_start are the expected values of the first sample for each
+ * channel in the buffer. So, if channel0_start is 512, then sample #0 in the
+ * buffer must have value 512, and if channel1_start is 700, then sample #1
+ * in the buffer must have value 700 etc. */
+ {
+ guint i, num_frames;
+ guint16 *s;
+ GstMapInfo map_info;
+ guint channel_starts[2] = { channel0_start, channel1_start };
+
+ gst_buffer_map (outbuf, &map_info, GST_MAP_READ);
+ num_frames = map_info.size / bpf;
+ s = (guint16 *) (map_info.data);
+
+ for (i = 0; i < num_frames; ++i) {
+ guint c;
+
+ for (c = 0; i < NUM_TEST_CHANNELS; ++i) {
+ guint16 expected = channel_starts[c] + i;
+ guint16 actual = s[i * NUM_TEST_CHANNELS + c];
+
+ fail_unless_equals_int (expected, actual);
+ }
+ }
+
+ gst_buffer_unmap (outbuf, &map_info);
+ }
+}
+
+
+GST_START_TEST (test_push_unaligned_data_properties_config)
+{
+ RawAudParseTestCtx testctx;
+
+ setup_rawaudioparse (&testctx, FALSE, TRUE, NULL, GST_FORMAT_BYTES);
+
+ /* Send in data buffers that are not aligned to multiples of the
+ * frame size (= sample size * num_channels). This tests if rawaudioparse
+ * aligns output data properly.
+ *
+ * The second line sends in 99 bytes, and expects 100 bytes in the
+ * output buffer. This is because the first buffer contains 45 bytes,
+ * and rawaudioparse is expected to output 44 bytes (which is an integer
+ * multiple of the frame size). The leftover 1 byte then gets prepended
+ * to the input buffer with 99 bytes, resulting in 100 bytes, which is
+ * an integer multiple of the frame size.
+ */
+
+ push_data_and_check_output (&testctx, 45, 44, GST_USECOND * 0,
+ GST_USECOND * 275, 1, 4, 0, 512);
+ push_data_and_check_output (&testctx, 99, 100, GST_USECOND * 275,
+ GST_USECOND * 625, 2, 4, 11, 523);
+ push_data_and_check_output (&testctx, 18, 16, GST_USECOND * 900,
+ GST_USECOND * 100, 3, 4, 36, 548);
+
+ cleanup_rawaudioparse (&testctx);
+}
+
+GST_END_TEST;
+
+GST_START_TEST (test_push_unaligned_data_sink_caps_config)
+{
+ RawAudParseTestCtx testctx;
+ GstAudioInfo ainfo;
+ GstCaps *caps;
+
+ /* This test is essentially the same as test_push_unaligned_data_properties_config,
+ * except that rawaudioparse uses the sink caps config instead of the property config. */
+
+ gst_audio_info_set_format (&ainfo, TEST_SAMPLE_FORMAT, TEST_SAMPLE_RATE,
+ NUM_TEST_CHANNELS, NULL);
+ caps = gst_audio_info_to_caps (&ainfo);
+
+ setup_rawaudioparse (&testctx, TRUE, FALSE, caps, GST_FORMAT_BYTES);
+
+ push_data_and_check_output (&testctx, 45, 44, GST_USECOND * 0,
+ GST_USECOND * 275, 1, 4, 0, 512);
+ push_data_and_check_output (&testctx, 99, 100, GST_USECOND * 275,
+ GST_USECOND * 625, 2, 4, 11, 523);
+ push_data_and_check_output (&testctx, 18, 16, GST_USECOND * 900,
+ GST_USECOND * 100, 3, 4, 36, 548);
+
+ cleanup_rawaudioparse (&testctx);
+}
+
+GST_END_TEST;
+
+GST_START_TEST (test_push_swapped_channels)
+{
+ RawAudParseTestCtx testctx;
+ GValueArray *valarray;
+ GValue val = G_VALUE_INIT;
+
+ /* Send in 40 bytes and use a nonstandard channel order (left and right channels
+ * swapped). Expected behavior is for rawaudioparse to reorder the samples inside
+ * output buffers to conform to the GStreamer channel order. For this reason,
+ * channel0 offset is 512 and channel1 offset is 0 in the check below. */
+
+ setup_rawaudioparse (&testctx, FALSE, TRUE, NULL, GST_FORMAT_BYTES);
+
+ valarray = g_value_array_new (2);
+ g_value_init (&val, GST_TYPE_AUDIO_CHANNEL_POSITION);
+ g_value_set_enum (&val, GST_AUDIO_CHANNEL_POSITION_FRONT_RIGHT);
+ g_value_array_insert (valarray, 0, &val);
+ g_value_set_enum (&val, GST_AUDIO_CHANNEL_POSITION_FRONT_LEFT);
+ g_value_array_insert (valarray, 1, &val);
+ g_object_set (G_OBJECT (testctx.rawaudioparse), "channel-positions",
+ valarray, NULL);
+ g_value_array_free (valarray);
+ g_value_unset (&val);
+
+ push_data_and_check_output (&testctx, 40, 40, GST_USECOND * 0,
+ GST_USECOND * 250, 1, 4, 512, 0);
+
+ cleanup_rawaudioparse (&testctx);
+}
+
+GST_END_TEST;
+
+GST_START_TEST (test_config_switch)
+{
+ RawAudParseTestCtx testctx;
+ GstAudioInfo ainfo;
+ GstCaps *caps;
+
+ /* Start processing with the properties config active, then mid-stream switch to
+ * the sink caps config. The properties config is altered to have a different
+ * sample rate than the sink caps to be able to detect the switch. The net effect
+ * is that output buffer durations are altered. For example, 40 bytes equal
+ * 10 samples, and this equals 500 us with 20 kHz or 250 us with 40 kHz. */
+
+ gst_audio_info_set_format (&ainfo, TEST_SAMPLE_FORMAT, TEST_SAMPLE_RATE,
+ NUM_TEST_CHANNELS, NULL);
+ caps = gst_audio_info_to_caps (&ainfo);
+
+ setup_rawaudioparse (&testctx, FALSE, TRUE, caps, GST_FORMAT_BYTES);
+
+ g_object_set (G_OBJECT (testctx.rawaudioparse), "sample-rate", 20000, NULL);
+
+ /* Push in data with properties config active, expecting duration calculations
+ * to be based on the 20 kHz sample rate */
+ push_data_and_check_output (&testctx, 40, 40, GST_USECOND * 0,
+ GST_USECOND * 500, 1, 4, 0, 512);
+ push_data_and_check_output (&testctx, 20, 20, GST_USECOND * 500,
+ GST_USECOND * 250, 2, 4, 10, 522);
+
+ /* Perform the switch */
+ g_object_set (G_OBJECT (testctx.rawaudioparse), "use-sink-caps", TRUE, NULL);
+
+ /* Push in data with sink caps config active, expecting duration calculations
+ * to be based on the 40 kHz sample rate */
+ push_data_and_check_output (&testctx, 40, 40, GST_USECOND * 750,
+ GST_USECOND * 250, 3, 4, 15, 527);
+
+ cleanup_rawaudioparse (&testctx);
+}
+
+GST_END_TEST;
+
+
+static Suite *
+rawaudioparse_suite (void)
+{
+ Suite *s = suite_create ("rawaudioparse");
+ TCase *tc_chain = tcase_create ("general");
+
+ suite_add_tcase (s, tc_chain);
+ tcase_add_test (tc_chain, test_push_unaligned_data_properties_config);
+ tcase_add_test (tc_chain, test_push_unaligned_data_sink_caps_config);
+ tcase_add_test (tc_chain, test_push_swapped_channels);
+ tcase_add_test (tc_chain, test_config_switch);
+
+ return s;
+}
+
+GST_CHECK_MAIN (rawaudioparse);
--- /dev/null
+/* GStreamer
+ *
+ * unit test for rawvideoparse
+ *
+ * Copyright (C) <2016> Carlos Rafael Giani <dv at pseudoterminal dot org>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+/* FIXME: GValueArray is deprecated, but there is currently no viabla alternative
+ * See https://bugzilla.gnome.org/show_bug.cgi?id=667228 */
+#define GLIB_DISABLE_DEPRECATION_WARNINGS
+
+#include <gst/check/gstcheck.h>
+#include <gst/video/video.h>
+
+/* The checks use as test data an 8x8 Y444 image, with 25 Hz framerate. In the
+ * sink caps configuration, the stride is 8 bytes, and the frames are tightly
+ * packed together. In the properties configuration, the stride is 10 bytes, the
+ * planes aren't tightly packed (there are 20 bytes between the planes), and the
+ * frames overall have padding between them (the overall frame size is
+ * stride (10) * height (8) * num-planes (3) + bytes-between-planes (20) * 2
+ * = 280 bytes, and the frame stride is 500 bytes, so there are 220 bytes of
+ * extra padding between frames).
+ *
+ * In the test 8x8 frame, the pixels are all set to #000000, except for two
+ * pixels: (xofs+1 yofs+0) is set to #8899AA, (xofs+0 yofs+1) is set to #112233.
+ * The first frame uses the offsets xofs=0 yofs=0. The second frame uses
+ * xofs=1 yofs=0 etc. For each configuration, there is a separate set of frames,
+ * each stored in the GstAdapter in the Context struct.
+ *
+ * During the tests, as part of the checks, the pixels are verified to have the
+ * right values. The pattern of the pixels was chosen to easily detect stride
+ * errors, incorrect plane offsets etc.
+ */
+
+#define TEST_WIDTH 8
+#define TEST_HEIGHT 8
+#define TEST_FRAMERATE_N 25
+#define TEST_FRAMERATE_D 1
+#define TEST_FRAME_FORMAT GST_VIDEO_FORMAT_Y444
+#define NUM_TEST_PLANES 3
+
+#define PROP_CTX_PLANE_STRIDE 10
+#define PROP_CTX_FRAME_STRIDE 500
+#define PROP_CTX_PLANE_PADDING 20
+#define PROP_CTX_PLANE_SIZE (PROP_CTX_PLANE_STRIDE * TEST_HEIGHT + PROP_CTX_PLANE_PADDING)
+
+GstElement *rawvideoparse;
+
+/* For ease of programming we use globals to keep refs for our floating
+ * src and sink pads we create; otherwise we always have to do get_pad,
+ * get_peer, and then remove references in every test function */
+static GstPad *mysrcpad, *mysinkpad;
+
+typedef struct
+{
+ GstAdapter *data;
+ guint plane_stride;
+ guint plane_size;
+}
+Context;
+
+static Context properties_ctx, sinkcaps_ctx;
+
+static void
+set_pixel (Context const *ctx, guint8 * pixels, guint x, guint y, guint32 color)
+{
+ guint i;
+ guint ofs = y * ctx->plane_stride + x;
+ for (i = 0; i < NUM_TEST_PLANES; ++i)
+ pixels[ctx->plane_size * i + ofs] =
+ (color >> ((NUM_TEST_PLANES - 1 - i) * 8)) & 0xFF;
+}
+
+static guint32
+get_pixel (Context const *ctx, const guint8 * pixels, guint x, guint y)
+{
+ guint i;
+ guint ofs = y * ctx->plane_stride + x;
+ guint32 color = 0;
+ for (i = 0; i < NUM_TEST_PLANES; ++i)
+ color |=
+ ((guint32) (pixels[ctx->plane_size * i + ofs])) << ((NUM_TEST_PLANES -
+ 1 - i) * 8);
+ return color;
+}
+
+static void
+fill_test_pattern (Context const *ctx, GstBuffer * buffer, guint xofs,
+ guint yofs)
+{
+ guint8 *pixels;
+ GstMapInfo map_info;
+
+ gst_buffer_map (buffer, &map_info, GST_MAP_WRITE);
+ pixels = map_info.data;
+
+ memset (pixels, 0, ctx->plane_size * NUM_TEST_PLANES);
+ set_pixel (ctx, pixels, 1 + xofs, 0 + yofs, 0x8899AA);
+ set_pixel (ctx, pixels, 0 + xofs, 1 + yofs, 0x112233);
+
+ gst_buffer_unmap (buffer, &map_info);
+}
+
+static void
+check_test_pattern (Context const *ctx, GstBuffer * buffer, guint xofs,
+ guint yofs)
+{
+ guint x, y;
+ guint8 *pixels;
+ GstMapInfo map_info;
+
+ gst_buffer_map (buffer, &map_info, GST_MAP_READ);
+ pixels = map_info.data;
+
+ fail_unless_equals_uint64_hex (get_pixel (ctx, pixels, 1 + xofs, 0 + yofs),
+ 0x8899AA);
+ fail_unless_equals_uint64_hex (get_pixel (ctx, pixels, 0 + xofs, 1 + yofs),
+ 0x112233);
+
+ for (y = 0; y < TEST_HEIGHT; ++y) {
+ for (x = 0; x < TEST_WIDTH; ++x) {
+ if ((x == (1 + xofs) && y == (0 + yofs)) || (x == (0 + xofs)
+ && y == (1 + yofs)))
+ continue;
+
+ fail_unless_equals_uint64_hex (get_pixel (ctx, pixels, x, y), 0x000000);
+ }
+ }
+
+ gst_buffer_unmap (buffer, &map_info);
+}
+
+
+static void
+setup_rawvideoparse (gboolean use_sink_caps,
+ gboolean set_properties, GstCaps * incaps, GstFormat format)
+{
+ guint i;
+
+
+ /* Setup the rawvideoparse element and the pads */
+
+ static GstStaticPadTemplate sinktemplate = GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE (GST_VIDEO_FORMATS_ALL))
+ );
+ static GstStaticPadTemplate srctemplate = GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS_ANY);
+
+ rawvideoparse = gst_check_setup_element ("rawvideoparse");
+
+ properties_ctx.plane_stride = PROP_CTX_PLANE_STRIDE;
+ properties_ctx.plane_size = PROP_CTX_PLANE_SIZE;
+ properties_ctx.data = gst_adapter_new ();
+
+ sinkcaps_ctx.plane_stride = TEST_WIDTH;
+ sinkcaps_ctx.plane_size = TEST_WIDTH * TEST_HEIGHT;
+ sinkcaps_ctx.data = gst_adapter_new ();
+
+ g_object_set (G_OBJECT (rawvideoparse), "use-sink-caps", use_sink_caps, NULL);
+ if (set_properties) {
+ GValueArray *plane_offsets, *plane_strides;
+ GValue val = G_VALUE_INIT;
+
+ g_value_init (&val, G_TYPE_UINT);
+
+ plane_offsets = g_value_array_new (NUM_TEST_PLANES);
+ for (i = 0; i < NUM_TEST_PLANES; ++i) {
+ g_value_set_uint (&val, properties_ctx.plane_size * i);
+ g_value_array_insert (plane_offsets, i, &val);
+ }
+
+ plane_strides = g_value_array_new (NUM_TEST_PLANES);
+ for (i = 0; i < NUM_TEST_PLANES; ++i) {
+ g_value_set_uint (&val, properties_ctx.plane_stride);
+ g_value_array_insert (plane_strides, i, &val);
+ }
+
+ g_value_unset (&val);
+
+ g_object_set (G_OBJECT (rawvideoparse), "width", TEST_WIDTH, "height",
+ TEST_HEIGHT, "frame-stride", PROP_CTX_FRAME_STRIDE, "framerate",
+ TEST_FRAMERATE_N, TEST_FRAMERATE_D, "plane-offsets", plane_offsets,
+ "plane-strides", plane_strides, "format", TEST_FRAME_FORMAT, NULL);
+
+ g_value_array_free (plane_offsets);
+ g_value_array_free (plane_strides);
+ }
+
+ /* Check that the plane stride/offset values are correct */
+ {
+ GValueArray *plane_offsets_array;
+ GValueArray *plane_strides_array;
+ /* By default, 320x240 i420 is used as format */
+ guint plane_offsets[3] = { 0, 76800, 96000 };
+ guint plane_strides[3] = { 320, 160, 160 };
+
+ if (set_properties) {
+ /* When properties are explicitely set, we use Y444 as video format,
+ * so in that case, plane stride values are all the same */
+ plane_offsets[0] = properties_ctx.plane_size * 0;
+ plane_offsets[1] = properties_ctx.plane_size * 1;
+ plane_offsets[2] = properties_ctx.plane_size * 2;
+ plane_strides[0] = plane_strides[1] = plane_strides[2] =
+ properties_ctx.plane_stride;
+ }
+
+ g_object_get (G_OBJECT (rawvideoparse), "plane-offsets",
+ &plane_offsets_array, "plane-strides", &plane_strides_array, NULL);
+ fail_unless (plane_offsets_array != NULL);
+ fail_unless (plane_strides_array != NULL);
+ fail_unless (plane_offsets_array->n_values ==
+ plane_strides_array->n_values);
+
+ for (i = 0; i < plane_offsets_array->n_values; ++i) {
+ GValue *gvalue;
+
+ gvalue = g_value_array_get_nth (plane_offsets_array, i);
+ fail_unless (gvalue != NULL);
+ fail_unless_equals_uint64 (plane_offsets[i], g_value_get_uint (gvalue));
+
+ gvalue = g_value_array_get_nth (plane_strides_array, i);
+ fail_unless (gvalue != NULL);
+ fail_unless_equals_uint64 (plane_strides[i], g_value_get_uint (gvalue));
+ }
+
+ g_value_array_free (plane_offsets_array);
+ g_value_array_free (plane_strides_array);
+ }
+
+ fail_unless (gst_element_set_state (rawvideoparse,
+ GST_STATE_PAUSED) == GST_STATE_CHANGE_SUCCESS,
+ "could not set to paused");
+
+ mysrcpad = gst_check_setup_src_pad (rawvideoparse, &srctemplate);
+ mysinkpad = gst_check_setup_sink_pad (rawvideoparse, &sinktemplate);
+
+ gst_pad_set_active (mysrcpad, TRUE);
+ gst_pad_set_active (mysinkpad, TRUE);
+
+ gst_check_setup_events (mysrcpad, rawvideoparse, incaps, format);
+ if (incaps)
+ gst_caps_unref (incaps);
+
+
+ /* Fill the adapters with test frames */
+
+ for (i = 0; i < 10; ++i) {
+ GstBuffer *buffer =
+ gst_buffer_new_allocate (NULL, PROP_CTX_FRAME_STRIDE, NULL);
+ gst_buffer_memset (buffer, 0, 0xCC, gst_buffer_get_size (buffer));
+ fill_test_pattern (&properties_ctx, buffer, i, 0);
+ gst_adapter_push (properties_ctx.data, buffer);
+ }
+
+ for (i = 0; i < 10; ++i) {
+ GstBuffer *buffer =
+ gst_buffer_new_allocate (NULL, sinkcaps_ctx.plane_size * 3, NULL);
+ gst_buffer_memset (buffer, 0, 0xCC, gst_buffer_get_size (buffer));
+ fill_test_pattern (&sinkcaps_ctx, buffer, i, 0);
+ gst_adapter_push (sinkcaps_ctx.data, buffer);
+ }
+}
+
+static void
+cleanup_rawvideoparse (void)
+{
+ gst_pad_set_active (mysrcpad, FALSE);
+ gst_pad_set_active (mysinkpad, FALSE);
+ gst_check_teardown_src_pad (rawvideoparse);
+ gst_check_teardown_sink_pad (rawvideoparse);
+ gst_check_teardown_element (rawvideoparse);
+
+ g_object_unref (G_OBJECT (properties_ctx.data));
+ g_object_unref (G_OBJECT (sinkcaps_ctx.data));
+}
+
+static void
+push_data_and_check_output (Context * ctx, gsize num_in_bytes,
+ gsize expected_num_out_bytes, gint64 expected_pts, gint64 expected_dur,
+ guint expected_num_buffers_in_list, guint buf_idx, guint xofs, guint yofs)
+{
+ GstBuffer *inbuf, *outbuf;
+ guint num_buffers;
+
+ /* Simulate upstream input by taking num_in_bytes bytes from the adapter */
+ inbuf = gst_adapter_take_buffer (ctx->data, num_in_bytes);
+ fail_unless (inbuf != NULL);
+
+ /* Push the input data and check that the output buffers list grew as
+ * expected */
+ fail_unless (gst_pad_push (mysrcpad, inbuf) == GST_FLOW_OK);
+ num_buffers = g_list_length (buffers);
+ fail_unless_equals_int (num_buffers, expected_num_buffers_in_list);
+
+ /* Take the output buffer */
+ outbuf = g_list_nth_data (buffers, buf_idx);
+ fail_unless (outbuf != NULL);
+
+ /* Verify size, PTS, duration of the output buffer */
+ fail_unless_equals_uint64 (expected_num_out_bytes,
+ gst_buffer_get_size (outbuf));
+ fail_unless_equals_uint64 (expected_pts, GST_BUFFER_PTS (outbuf));
+ fail_unless_equals_uint64 (expected_dur, GST_BUFFER_DURATION (outbuf));
+
+ /* Check that the pixels have the correct values */
+ check_test_pattern (ctx, outbuf, xofs, yofs);
+}
+
+
+GST_START_TEST (test_push_unaligned_data_properties_config)
+{
+ setup_rawvideoparse (FALSE, TRUE, NULL, GST_FORMAT_BYTES);
+
+ /* Send in data buffers that are not aligned to multiples of the
+ * frame size (= sample size * num_channels). This tests if rawvideoparse
+ * aligns output data properly.
+ *
+ * The second line sends a buffer with multiple frames inside.
+ * rawvideoparse will then parse this buffer repeatedly (and prepend
+ * leftover data from the earlier parse iteration), explaining why
+ * all of a sudden there are 4 output buffers, compared to just one
+ * earlier. The output data is expected to be 280 bytes large, since this
+ * is the size of the actual frame, without extra padding at the end.
+ */
+ push_data_and_check_output (&properties_ctx, 511, 280, GST_MSECOND * 0,
+ GST_MSECOND * 40, 1, 0, 0, 0);
+ push_data_and_check_output (&properties_ctx, 1940, 280, GST_MSECOND * 40,
+ GST_MSECOND * 40, 4, 1, 1, 0);
+ push_data_and_check_output (&properties_ctx, 10, 280, GST_MSECOND * 80,
+ GST_MSECOND * 40, 4, 2, 2, 0);
+
+ cleanup_rawvideoparse ();
+}
+
+GST_END_TEST;
+
+GST_START_TEST (test_push_unaligned_data_sink_caps_config)
+{
+ GstVideoInfo vinfo;
+ GstCaps *caps;
+
+ /* This test is essentially the same as test_push_unaligned_data_properties_config,
+ * except that rawvideoparse uses the sink caps config instead of the property config.
+ * Also, the input sizes are different, since the sink caps config does not use extra
+ * padding between planes and does use a stride that directly corresponds to the width,
+ * resulting in smaller frame size (192 bytes vs 280 bytes). */
+
+ gst_video_info_set_format (&vinfo, TEST_FRAME_FORMAT, TEST_WIDTH,
+ TEST_HEIGHT);
+ GST_VIDEO_INFO_FPS_N (&vinfo) = 25;
+ GST_VIDEO_INFO_FPS_D (&vinfo) = 1;
+ caps = gst_video_info_to_caps (&vinfo);
+
+ setup_rawvideoparse (TRUE, FALSE, caps, GST_FORMAT_BYTES);
+
+ push_data_and_check_output (&sinkcaps_ctx, 250, 192, GST_MSECOND * 0,
+ GST_MSECOND * 40, 1, 0, 0, 0);
+ push_data_and_check_output (&sinkcaps_ctx, 811, 192, GST_MSECOND * 40,
+ GST_MSECOND * 40, 5, 1, 1, 0);
+ push_data_and_check_output (&sinkcaps_ctx, 10, 192, GST_MSECOND * 80,
+ GST_MSECOND * 40, 5, 2, 2, 0);
+
+ cleanup_rawvideoparse ();
+}
+
+GST_END_TEST;
+
+GST_START_TEST (test_config_switch)
+{
+ GstVideoInfo vinfo;
+ GstCaps *caps;
+
+ /* Start processing with the properties config active, then mid-stream switch to
+ * the sink caps config. Since the sink caps config does not use padding, its
+ * frame size is smaller. The buffer duration stays the same (since it only depends
+ * on the framerate), but the expected output buffer size is different). */
+
+ gst_video_info_set_format (&vinfo, TEST_FRAME_FORMAT, TEST_WIDTH,
+ TEST_HEIGHT);
+ GST_VIDEO_INFO_FPS_N (&vinfo) = 25;
+ GST_VIDEO_INFO_FPS_D (&vinfo) = 1;
+ caps = gst_video_info_to_caps (&vinfo);
+
+ setup_rawvideoparse (FALSE, TRUE, caps, GST_FORMAT_BYTES);
+
+ /* Push in data with properties config active */
+ push_data_and_check_output (&properties_ctx, 500, 280, GST_MSECOND * 0,
+ GST_MSECOND * 40, 1, 0, 0, 0);
+ push_data_and_check_output (&properties_ctx, 500, 280, GST_MSECOND * 40,
+ GST_MSECOND * 40, 2, 1, 1, 0);
+
+ /* Perform the switch */
+ g_object_set (G_OBJECT (rawvideoparse), "use-sink-caps", TRUE, NULL);
+
+ /* Push in data with sink caps config active, expecting a different frame size */
+ push_data_and_check_output (&sinkcaps_ctx, 192, 192, GST_MSECOND * 80,
+ GST_MSECOND * 40, 3, 2, 0, 0);
+
+ cleanup_rawvideoparse ();
+}
+
+GST_END_TEST;
+
+GST_START_TEST (test_push_with_no_framerate)
+{
+ /* Test the special case when no framerate is set. The parser is expected to
+ * still work then, but without setting duration or PTS/DTS (it cannot do that,
+ * because these require a nonzero framerate). The output buffers have PTS 0,
+ * all subsequent ones have no set PTS. */
+
+ setup_rawvideoparse (FALSE, TRUE, NULL, GST_FORMAT_BYTES);
+ g_object_set (G_OBJECT (rawvideoparse), "framerate", 0, 1, NULL);
+
+ push_data_and_check_output (&properties_ctx, 500, 280, 0, GST_CLOCK_TIME_NONE,
+ 1, 0, 0, 0);
+ push_data_and_check_output (&properties_ctx, 500, 280, GST_CLOCK_TIME_NONE,
+ GST_CLOCK_TIME_NONE, 2, 1, 1, 0);
+
+ cleanup_rawvideoparse ();
+}
+
+GST_END_TEST;
+
+
+static Suite *
+rawvideoparse_suite (void)
+{
+ Suite *s = suite_create ("rawvideoparse");
+ TCase *tc_chain = tcase_create ("general");
+
+ suite_add_tcase (s, tc_chain);
+ tcase_add_test (tc_chain, test_push_unaligned_data_properties_config);
+ tcase_add_test (tc_chain, test_push_unaligned_data_sink_caps_config);
+ tcase_add_test (tc_chain, test_config_switch);
+ tcase_add_test (tc_chain, test_push_with_no_framerate);
+
+ return s;
+}
+
+GST_CHECK_MAIN (rawvideoparse);