/* GStreamer
+ * Copyright (C) <2008> Mindfruit B.V.
+ * @author Sjoerd Simons <sjoerd@luon.net>
* Copyright (C) <2007> Julien Moutte <julien@fluendo.com>
*
* This library is free software; you can redistribute it and/or
#include "config.h"
#endif
+#include <string.h>
#include "mpeg4videoparse.h"
GST_DEBUG_CATEGORY_STATIC (mpeg4v_parse_debug);
"parsed = (boolean) false, " "systemstream = (boolean) false")
);
+/* Properties */
+#define DEFAULT_PROP_DROP TRUE
+
+enum
+{
+ PROP_0,
+ PROP_DROP,
+ PROP_LAST
+};
+
GST_BOILERPLATE (GstMpeg4VParse, gst_mpeg4vparse, GstElement, GST_TYPE_ELEMENT);
+static gboolean
+gst_mpeg4vparse_set_new_caps (GstMpeg4VParse * parse,
+ guint16 time_increment_resolution, guint16 fixed_time_increment,
+ gint aspect_ratio_width, gint aspect_ratio_height, gint width, gint height)
+{
+ gboolean res;
+
+ GstCaps *out_caps = gst_caps_new_simple ("video/mpeg",
+ "mpegversion", G_TYPE_INT, 4,
+ "systemstream", G_TYPE_BOOLEAN, FALSE,
+ "parsed", G_TYPE_BOOLEAN, TRUE, NULL);
+
+ if (parse->profile != 0) {
+ gchar *profile = NULL;
+
+ /* FIXME does it make sense to expose the profile in the caps ? */
+ profile = g_strdup_printf ("%d", parse->profile);
+ gst_caps_set_simple (out_caps, "profile-level-id",
+ G_TYPE_STRING, profile, NULL);
+ g_free (profile);
+ }
+
+ if (parse->config != NULL) {
+ gst_caps_set_simple (out_caps, "codec_data",
+ GST_TYPE_BUFFER, parse->config, NULL);
+ }
+
+ if (fixed_time_increment != 0) {
+ /* we have a framerate */
+ gst_caps_set_simple (out_caps, "framerate",
+ GST_TYPE_FRACTION, time_increment_resolution, fixed_time_increment,
+ NULL);
+ parse->frame_duration = gst_util_uint64_scale_int (GST_SECOND,
+ fixed_time_increment, time_increment_resolution);
+ } else {
+ /* unknown duration */
+ parse->frame_duration = 0;
+ }
+
+ if (aspect_ratio_width > 0 && aspect_ratio_height > 0) {
+ gst_caps_set_simple (out_caps, "pixel-aspect-ratio",
+ GST_TYPE_FRACTION, aspect_ratio_width, aspect_ratio_height, NULL);
+ }
+
+ if (width > 0 && height > 0) {
+ gst_caps_set_simple (out_caps,
+ "width", G_TYPE_INT, width, "height", G_TYPE_INT, height, NULL);
+ }
+
+ GST_DEBUG_OBJECT (parse, "setting downstream caps to %" GST_PTR_FORMAT,
+ out_caps);
+ res = gst_pad_set_caps (parse->srcpad, out_caps);
+ gst_caps_unref (out_caps);
+
+ return res;
+}
+
static void
gst_mpeg4vparse_align (GstMpeg4VParse * parse)
{
/* If we have enough data, ensure we're aligned to a start code */
const guint8 *data = gst_adapter_peek (parse->adapter, 4);
- if (data[0] == 0 && data[1] == 0 && data[2] == 1) {
+ if (G_LIKELY (data[0] == 0 && data[1] == 0 && data[2] == 1)) {
GST_LOG_OBJECT (parse, "found start code with type %02X", data[3]);
parse->state = PARSE_START_FOUND;
break;
}
}
+#define VOS_STARTCODE 0xB0
+#define VOS_ENDCODE 0xB1
+#define USER_DATA_STARTCODE 0xB2
+#define GOP_STARTCODE 0xB3
+#define VISUAL_OBJECT_STARTCODE 0xB5
+#define VOP_STARTCODE 0xB6
+
+#define START_MARKER 0x000001
+#define VISUAL_OBJECT_STARTCODE_MARKER ((START_MARKER << 8) + VISUAL_OBJECT_STARTCODE)
+
+typedef struct
+{
+ const guint8 *data;
+ /* byte offset */
+ gsize offset;
+ /* bit offset */
+ gsize b_offset;
+
+ /* size in bytes */
+ gsize size;
+} bitstream_t;
+
+static gboolean
+get_bits (bitstream_t * b, int num, guint32 * bits)
+{
+
+ *bits = 0;
+
+ if (b->offset + ((b->b_offset + num) / 8) > b->size)
+ return FALSE;
+
+ if (b->b_offset + num <= 8) {
+ *bits = b->data[b->offset];
+ *bits = (*bits >> (8 - num - b->b_offset)) & (((1 << num)) - 1);
+
+ b->offset += (b->b_offset + num) / 8;
+ b->b_offset = (b->b_offset + num) % 8;
+ return TRUE;
+ } else {
+ /* going over the edge.. */
+ int next;
+
+ next = (8 - b->b_offset);
+ do {
+ guint32 t;
+
+ if (!get_bits (b, next, &t))
+ return FALSE;
+ *bits <<= next;
+ *bits |= t;
+ num -= next;
+ next = MIN (8, num);
+ } while (num > 0);
+
+ return TRUE;
+ }
+}
+
+#define GET_BITS(b, num, bits) G_STMT_START { \
+ if (!get_bits(b, num, bits)) \
+ goto failed; \
+} G_STMT_END
+
+#define MARKER_BIT(b) G_STMT_START { \
+ guint32 i; \
+ GET_BITS(b, 1, &i); \
+ if (i != 0x1) \
+ goto failed; \
+} G_STMT_END
+
+static inline gboolean
+next_start_code (bitstream_t * b)
+{
+ guint32 bits;
+
+ GET_BITS (b, 1, &bits);
+ if (bits != 0)
+ goto failed;
+
+ while (b->b_offset != 0) {
+ GET_BITS (b, 1, &bits);
+ if (bits != 0x1)
+ goto failed;
+ }
+
+ return TRUE;
+
+failed:
+ return FALSE;
+}
+
+static gint aspect_ratio_table[6][2] = { {-1, -1}, {1, 1}, {12, 11},
+{10, 11}, {16, 11}, {40, 33}
+};
+
+
+/* Returns whether we successfully set the caps downstream if needed */
+static gboolean
+gst_mpeg4vparse_handle_vos (GstMpeg4VParse * parse, const guint8 * data,
+ gsize size)
+{
+ /* Skip the startcode */
+ guint32 bits;
+ guint16 time_increment_resolution = 0;
+ guint16 fixed_time_increment = 0;
+ gint aspect_ratio_width = -1, aspect_ratio_height = -1;
+ gint height = -1, width = -1;
+ guint8 profile;
+ gboolean equal;
+ bitstream_t bs = { data, 0, 0, size };
+
+ /* Parse the config from the VOS frame */
+ bs.offset = 5;
+
+ profile = data[4];
+
+ /* invalid profile, yikes */
+ if (profile == 0)
+ return FALSE;
+
+ equal = FALSE;
+ if (G_LIKELY (parse->config &&
+ memcmp (GST_BUFFER_DATA (parse->config), data, size) == 0))
+ equal = TRUE;
+
+ if (G_LIKELY (parse->profile == profile && equal)) {
+ /* We know this profile and config data, so we can just keep the same caps
+ */
+ return TRUE;
+ }
+
+ /* Even if we fail to parse, then some other element might succeed, so always
+ * put the VOS in the config */
+ parse->profile = profile;
+ if (parse->config != NULL)
+ gst_buffer_unref (parse->config);
+
+ parse->config = gst_buffer_new_and_alloc (size);
+ memcpy (GST_BUFFER_DATA (parse->config), data, size);
+
+ /* Expect Visual Object startcode */
+ GET_BITS (&bs, 32, &bits);
+
+ if (GUINT32_FROM_LE (bits) != VISUAL_OBJECT_STARTCODE_MARKER)
+ goto failed;
+
+ GET_BITS (&bs, 1, &bits);
+ if (bits == 0x1) {
+ /* Skip visual_object_verid and priority */
+ GET_BITS (&bs, 7, &bits);
+ }
+
+ GET_BITS (&bs, 4, &bits);
+ /* Only support video ID */
+ if (bits != 0x1)
+ goto failed;
+
+ /* video signal type */
+ GET_BITS (&bs, 1, &bits);
+
+ if (bits == 0x1) {
+ /* video signal type, ignore format and range */
+ GET_BITS (&bs, 4, &bits);
+
+ GET_BITS (&bs, 1, &bits);
+ if (bits == 0x1) {
+ /* ignore color description */
+ GET_BITS (&bs, 24, &bits);
+ }
+ }
+
+ if (!next_start_code (&bs))
+ goto failed;
+
+ /* expecting a video object startcode */
+ GET_BITS (&bs, 32, &bits);
+ if (GUINT32_FROM_LE (bits) > 0x11F)
+ goto failed;
+
+ /* expecting a video object layer startcode */
+ GET_BITS (&bs, 32, &bits);
+ if (GUINT32_FROM_LE (bits) < 0x120 || (GUINT32_FROM_LE (bits) > 0x12F))
+ goto failed;
+
+ /* ignore random accessible vol and video object type indication */
+ GET_BITS (&bs, 9, &bits);
+
+ GET_BITS (&bs, 1, &bits);
+ if (bits) {
+ /* skip video object layer verid and priority */
+ GET_BITS (&bs, 7, &bits);
+ }
+
+ /* aspect ratio info */
+ GET_BITS (&bs, 4, &bits);
+ if (bits == 0)
+ goto failed;
+
+ /* check if aspect ratio info is extended par */
+ if (bits == 0xff) {
+ GET_BITS (&bs, 4, &bits);
+ aspect_ratio_width = bits;
+ GET_BITS (&bs, 4, &bits);
+ aspect_ratio_height = bits;
+ } else if (bits < 0x6) {
+ aspect_ratio_width = aspect_ratio_table[bits][0];
+ aspect_ratio_height = aspect_ratio_table[bits][1];
+ }
+
+ GET_BITS (&bs, 1, &bits);
+ if (bits) {
+ /* vol control parameters, skip chroma and low delay */
+ GET_BITS (&bs, 3, &bits);
+ GET_BITS (&bs, 1, &bits);
+ if (bits) {
+ /* skip vbv_parameters */
+ GET_BITS (&bs, 79, &bits);
+ }
+ }
+
+ /* layer shape */
+ GET_BITS (&bs, 2, &bits);
+ /* only support rectangular */
+ if (bits != 0)
+ goto failed;
+
+ MARKER_BIT (&bs);
+ GET_BITS (&bs, 16, &bits);
+ time_increment_resolution = bits;
+ MARKER_BIT (&bs);
+
+ GST_DEBUG_OBJECT (parse, "time increment resolution %d",
+ time_increment_resolution);
+
+ GET_BITS (&bs, 1, &bits);
+ if (bits) {
+ /* fixed time increment */
+ int n;
+
+ /* Lenght of the time increment is the minimal number of bits needed to
+ * represent time_increment_resolution */
+ for (n = 0; (time_increment_resolution >> n) != 0; n++);
+ GET_BITS (&bs, n, &bits);
+
+ fixed_time_increment = bits;
+ } else {
+ fixed_time_increment = 1;
+ }
+ GST_DEBUG_OBJECT (parse, "fixed time increment %d", fixed_time_increment);
+
+ /* assuming rectangular shape */
+ MARKER_BIT (&bs);
+ GET_BITS (&bs, 13, &bits);
+ width = bits;
+ MARKER_BIT (&bs);
+ GET_BITS (&bs, 13, &bits);
+ height = bits;
+ MARKER_BIT (&bs);
+
+out:
+ return gst_mpeg4vparse_set_new_caps (parse, time_increment_resolution,
+ fixed_time_increment, aspect_ratio_width, aspect_ratio_height,
+ width, height);
+
+failed:
+ GST_WARNING_OBJECT (parse, "Failed to parse config data");
+ goto out;
+}
+
+static void
+gst_mpeg4vparse_push (GstMpeg4VParse * parse, gsize size)
+{
+ if (G_UNLIKELY (parse->config == NULL && parse->drop)) {
+ GST_LOG_OBJECT (parse, "Dropping %d bytes", parse->offset);
+ gst_adapter_flush (parse->adapter, size);
+ } else {
+ GstBuffer *out_buf;
+
+ out_buf = gst_adapter_take_buffer (parse->adapter, parse->offset);
+
+ if (out_buf) {
+ /* Set GST_BUFFER_FLAG_DELTA_UNIT if it's not an intra frame */
+ if (!parse->intra_frame) {
+ GST_BUFFER_FLAG_SET (out_buf, GST_BUFFER_FLAG_DELTA_UNIT);
+ }
+ gst_buffer_set_caps (out_buf, GST_PAD_CAPS (parse->srcpad));
+ GST_BUFFER_TIMESTAMP (out_buf) = parse->timestamp;
+ gst_pad_push (parse->srcpad, out_buf);
+ }
+ }
+
+ /* Restart now that we flushed data */
+ parse->offset = 0;
+ parse->state = PARSE_START_FOUND;
+ parse->intra_frame = FALSE;
+}
+
static GstFlowReturn
-gst_mpeg4vparse_drain (GstMpeg4VParse * parse)
+gst_mpeg4vparse_drain (GstMpeg4VParse * parse, GstBuffer * last_buffer)
{
GstFlowReturn ret = GST_FLOW_OK;
const guint8 *data = NULL;
available = gst_adapter_available (parse->adapter);
data = gst_adapter_peek (parse->adapter, available);
- while (parse->offset < available - 4) {
- /* We generate packets based on VOP end code (next start code) */
+ /* Need at least 5 more bytes, 4 for the startcode, 1 to optionally determine
+ * the VOP frame type */
+ while (parse->offset < available - 5) {
if (data[parse->offset] == 0 && data[parse->offset + 1] == 0 &&
data[parse->offset + 2] == 1) {
+
switch (parse->state) {
case PARSE_START_FOUND:
- if (data[parse->offset + 3] == 0xB6) {
- GST_LOG_OBJECT (parse, "found VOP start marker at %u",
- parse->offset);
- parse->state = PARSE_VOP_FOUND;
+ switch (data[parse->offset + 3]) {
+ case VOP_STARTCODE:
+ GST_LOG_OBJECT (parse, "found VOP start marker at %u",
+ parse->offset);
+ parse->intra_frame = ((data[parse->offset + 4] >> 6 & 0x3) == 0);
+ /* Ensure that the timestamp of the outgoing buffer is the same
+ * as the one the VOP header is found in */
+ parse->timestamp = GST_BUFFER_TIMESTAMP (last_buffer);
+ parse->state = PARSE_VOP_FOUND;
+ break;
+ case VOS_STARTCODE:
+ GST_LOG_OBJECT (parse, "found VOS start marker at %u",
+ parse->offset);
+ parse->vos_offset = parse->offset;
+ parse->state = PARSE_VOS_FOUND;
+ break;
}
/* Jump over it */
parse->offset += 4;
break;
+ case PARSE_VOS_FOUND:
+ switch (data[parse->offset + 3]) {
+ case GOP_STARTCODE:
+ case VOP_STARTCODE:
+ /* end of VOS found, interpret the config data and restart the
+ * search for the VOP */
+ gst_mpeg4vparse_handle_vos (parse, data + parse->vos_offset,
+ parse->offset - parse->vos_offset);
+ parse->state = PARSE_START_FOUND;
+ break;
+ default:
+ parse->offset += 4;
+ }
+ break;
case PARSE_VOP_FOUND:
{ /* We were in a VOP already, any start code marks the end of it */
- GstBuffer *out_buf = gst_adapter_take_buffer (parse->adapter,
- parse->offset);
+ if (data[parse->offset + 3] == USER_DATA_STARTCODE) {
+ /* Userdata shouldn't come directly after a VOP, but some
+ * implementation do it anyways :( This prevents pushing
+ * USER_DATA as the initial bit of a buffer, which causes confusing
+ * with other elements */
+ parse->offset += 4;
+ break;
+ }
GST_LOG_OBJECT (parse, "found VOP end marker at %u", parse->offset);
- if (out_buf) {
- gst_buffer_set_caps (out_buf, GST_PAD_CAPS (parse->srcpad));
- gst_pad_push (parse->srcpad, out_buf);
- }
- /* Restart now that we flushed data */
- parse->offset = 0;
- parse->state = PARSE_START_FOUND;
+
+ gst_mpeg4vparse_push (parse, parse->offset);
+
available = gst_adapter_available (parse->adapter);
data = gst_adapter_peek (parse->adapter, available);
break;
}
/* Drain the accumulated blocks frame per frame */
- ret = gst_mpeg4vparse_drain (parse);
+ ret = gst_mpeg4vparse_drain (parse, buffer);
beach:
gst_object_unref (parse);
gst_mpeg4vparse_sink_setcaps (GstPad * pad, GstCaps * caps)
{
gboolean res = TRUE;
- GstCaps *out_caps = NULL;
GstMpeg4VParse *parse = GST_MPEG4VIDEOPARSE (gst_pad_get_parent (pad));
+ GstStructure *s;
+ const GValue *value;
GST_DEBUG_OBJECT (parse, "setcaps called with %" GST_PTR_FORMAT, caps);
- out_caps = gst_caps_new_simple ("video/mpeg",
- "mpegversion", G_TYPE_INT, 4,
- "systemstream", G_TYPE_BOOLEAN, FALSE,
- "parsed", G_TYPE_BOOLEAN, TRUE, NULL);
+ s = gst_caps_get_structure (caps, 0);
+
+ if ((value = gst_structure_get_value (s, "codec_data")) != NULL
+ && G_VALUE_HOLDS (value, GST_TYPE_BUFFER)) {
+ GstBuffer *buf = gst_value_get_buffer (value);
- if (out_caps) {
- GST_DEBUG_OBJECT (parse, "setting downstream caps to %" GST_PTR_FORMAT,
- caps);
- res = gst_pad_set_caps (parse->srcpad, out_caps);
- gst_caps_unref (out_caps);
+ res = gst_mpeg4vparse_handle_vos (parse, GST_BUFFER_DATA (buf),
+ GST_BUFFER_SIZE (buf));
+ } else {
+ /* No codec data, set minimal new caps.. VOS parsing later will fill in
+ * the other fields */
+ res = gst_mpeg4vparse_set_new_caps (parse, 0, 0, 0, 0, 0, 0);
}
gst_object_unref (parse);
-
return res;
}
GST_EVENT_TYPE_NAME (event));
switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_EOS:
+ if (parse->state == PARSE_VOP_FOUND) {
+ /* If we've found the start of the VOP assume what's left in the
+ * adapter is the complete VOP. This might cause us to send an
+ * incomplete VOP out, but prevents the last video frame from
+ * potentially being dropped */
+ gst_mpeg4vparse_push (parse, gst_adapter_available (parse->adapter));
+ }
+ /* fallthrough */
default:
res = gst_pad_event_default (pad, event);
break;
return res;
}
+static gboolean
+gst_mpeg4vparse_src_query (GstPad * pad, GstQuery * query)
+{
+ GstMpeg4VParse *parse = GST_MPEG4VIDEOPARSE (gst_pad_get_parent (pad));
+ gboolean res;
+
+ switch (GST_QUERY_TYPE (query)) {
+ case GST_QUERY_LATENCY:
+ {
+ /* We need to send the query upstream and add the returned latency to our
+ * own */
+ GstClockTime min_latency, max_latency;
+ gboolean us_live;
+ GstClockTime our_latency;
+
+ if ((res = gst_pad_peer_query (parse->sinkpad, query))) {
+ gst_query_parse_latency (query, &us_live, &min_latency, &max_latency);
+
+ GST_DEBUG_OBJECT (parse, "Peer latency: min %"
+ GST_TIME_FORMAT " max %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (min_latency), GST_TIME_ARGS (max_latency));
+
+ /* our latency is 1 frame, find the frame duration */
+ our_latency = parse->frame_duration;
+
+ GST_DEBUG_OBJECT (parse, "Our latency: %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (our_latency));
+
+ /* we add some latency */
+ min_latency += our_latency;
+ if (max_latency != -1)
+ max_latency += our_latency;
+
+ GST_DEBUG_OBJECT (parse, "Calculated total latency : min %"
+ GST_TIME_FORMAT " max %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (min_latency), GST_TIME_ARGS (max_latency));
+
+ gst_query_set_latency (query, us_live, min_latency, max_latency);
+ }
+ break;
+ }
+ default:
+ res = gst_pad_peer_query (parse->sinkpad, query);
+ break;
+ }
+ gst_object_unref (parse);
+
+ return res;
+}
+
static void
gst_mpeg4vparse_cleanup (GstMpeg4VParse * parse)
{
}
static void
+gst_mpeg4vparse_set_property (GObject * object, guint property_id,
+ const GValue * value, GParamSpec * pspec)
+{
+ GstMpeg4VParse *parse = GST_MPEG4VIDEOPARSE (object);
+
+ switch (property_id) {
+ case PROP_DROP:
+ parse->drop = g_value_get_boolean (value);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, property_id, pspec);
+ }
+}
+
+static void
+gst_mpeg4vparse_get_property (GObject * object, guint property_id,
+ GValue * value, GParamSpec * pspec)
+{
+ GstMpeg4VParse *parse = GST_MPEG4VIDEOPARSE (object);
+
+ switch (property_id) {
+ case PROP_DROP:
+ g_value_set_boolean (value, parse->drop);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, property_id, pspec);
+ }
+}
+
+static void
gst_mpeg4vparse_class_init (GstMpeg4VParseClass * klass)
{
GObjectClass *gobject_class;
parent_class = g_type_class_peek_parent (klass);
gobject_class->dispose = GST_DEBUG_FUNCPTR (gst_mpeg4vparse_dispose);
+
+ gobject_class->set_property = gst_mpeg4vparse_set_property;
+ gobject_class->get_property = gst_mpeg4vparse_get_property;
+
+ g_object_class_install_property (gobject_class, PROP_DROP,
+ g_param_spec_boolean ("drop", "drop",
+ "Drop data untill valid configuration data is received either "
+ "in the stream or through caps", DEFAULT_PROP_DROP,
+ G_PARAM_CONSTRUCT | G_PARAM_READWRITE));
+
gstelement_class->change_state =
GST_DEBUG_FUNCPTR (gst_mpeg4vparse_change_state);
}
GST_DEBUG_FUNCPTR (gst_mpeg4vparse_sink_event));
gst_pad_set_setcaps_function (parse->sinkpad,
GST_DEBUG_FUNCPTR (gst_mpeg4vparse_sink_setcaps));
+ gst_pad_use_fixed_caps (parse->srcpad);
gst_element_add_pad (GST_ELEMENT (parse), parse->sinkpad);
parse->srcpad = gst_pad_new_from_static_template (&src_template, "src");
- gst_pad_use_fixed_caps (parse->srcpad);
+ gst_pad_set_query_function (parse->srcpad,
+ GST_DEBUG_FUNCPTR (gst_mpeg4vparse_src_query));
gst_element_add_pad (GST_ELEMENT (parse), parse->srcpad);
parse->adapter = gst_adapter_new ();