/* GStreamer
* Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
* Copyright (C) <2009> Tim-Philipp Müller <tim centricular net>
+ * Copyright (C) 2012 Collabora Ltd.
+ * Author : Edward Hervey <edward@collabora.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
- * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
- * Boston, MA 02111-1307, USA.
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
*/
/**
* <refsect2>
* <title>Example launch line</title>
* |[
- * gst-launch -v v4l2src ! jpegdec ! ffmpegcolorspace ! xvimagesink
- * ]| The above pipeline reads a motion JPEG stream from a v4l2 camera
- * and renders it to the screen.
+ * gst-launch-1.0 -v filesrc location=mjpeg.avi ! avidemux ! queue ! jpegdec ! videoconvert ! videoscale ! autovideosink
+ * ]| The above pipeline decode the mjpeg stream and renders it to the screen.
* </refsect2>
*/
#include "gstjpegdec.h"
#include "gstjpeg.h"
#include <gst/video/video.h>
+#include <gst/video/gstvideometa.h>
+#include <gst/video/gstvideopool.h>
#include "gst/gst-i18n-plugin.h"
#include <jerror.h>
GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE
("{ I420, RGB, BGR, RGBx, xRGB, BGRx, xBGR, GRAY8 }"))
);
-
/* *INDENT-ON* */
/* FIXME: sof-marker is for IJG libjpeg 8, should be different for 6.2 */
+/* FIXME: add back "sof-marker = (int) { 0, 1, 2, 5, 6, 7, 9, 10, 13, 14 }"
+ * once we have a parser and/or demuxer set caps properly */
static GstStaticPadTemplate gst_jpeg_dec_sink_pad_template =
GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
- GST_STATIC_CAPS ("image/jpeg, "
- "width = (int) [ " G_STRINGIFY (MIN_WIDTH) ", " G_STRINGIFY (MAX_WIDTH)
- " ], " "height = (int) [ " G_STRINGIFY (MIN_HEIGHT) ", "
- G_STRINGIFY (MAX_HEIGHT) " ], framerate = (fraction) [ 0/1, MAX ], "
- "sof-marker = (int) { 0, 1, 2, 5, 6, 7, 9, 10, 13, 14 }")
+ GST_STATIC_CAPS ("image/jpeg")
);
GST_DEBUG_CATEGORY_STATIC (jpeg_dec_debug);
static void gst_jpeg_dec_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec);
-static GstFlowReturn gst_jpeg_dec_chain (GstPad * pad, GstObject * parent,
- GstBuffer * buffer);
-static GstCaps *gst_jpeg_dec_getcaps (GstPad * pad, GstCaps * filter);
-static gboolean gst_jpeg_dec_sink_query (GstPad * pad, GstObject * parent,
+static gboolean gst_jpeg_dec_set_format (GstVideoDecoder * dec,
+ GstVideoCodecState * state);
+static gboolean gst_jpeg_dec_start (GstVideoDecoder * bdec);
+static gboolean gst_jpeg_dec_stop (GstVideoDecoder * bdec);
+static gboolean gst_jpeg_dec_flush (GstVideoDecoder * bdec);
+static GstFlowReturn gst_jpeg_dec_parse (GstVideoDecoder * bdec,
+ GstVideoCodecFrame * frame, GstAdapter * adapter, gboolean at_eos);
+static GstFlowReturn gst_jpeg_dec_handle_frame (GstVideoDecoder * bdec,
+ GstVideoCodecFrame * frame);
+static gboolean gst_jpeg_dec_decide_allocation (GstVideoDecoder * bdec,
GstQuery * query);
-static gboolean gst_jpeg_dec_sink_event (GstPad * pad, GstObject * parent,
- GstEvent * event);
-static gboolean gst_jpeg_dec_src_event (GstPad * pad, GstObject * parent,
+static gboolean gst_jpeg_dec_sink_event (GstVideoDecoder * bdec,
GstEvent * event);
-static GstStateChangeReturn gst_jpeg_dec_change_state (GstElement * element,
- GstStateChange transition);
-static void gst_jpeg_dec_update_qos (GstJpegDec * dec, gdouble proportion,
- GstClockTimeDiff diff, GstClockTime ts);
-static void gst_jpeg_dec_reset_qos (GstJpegDec * dec);
-static void gst_jpeg_dec_read_qos (GstJpegDec * dec, gdouble * proportion,
- GstClockTime * time);
#define gst_jpeg_dec_parent_class parent_class
-G_DEFINE_TYPE (GstJpegDec, gst_jpeg_dec, GST_TYPE_ELEMENT);
+G_DEFINE_TYPE (GstJpegDec, gst_jpeg_dec, GST_TYPE_VIDEO_DECODER);
static void
gst_jpeg_dec_finalize (GObject * object)
GstJpegDec *dec = GST_JPEG_DEC (object);
jpeg_destroy_decompress (&dec->cinfo);
-
- g_object_unref (dec->adapter);
+ if (dec->input_state)
+ gst_video_codec_state_unref (dec->input_state);
G_OBJECT_CLASS (parent_class)->finalize (object);
}
static void
gst_jpeg_dec_class_init (GstJpegDecClass * klass)
{
- GstElementClass *gstelement_class;
GObjectClass *gobject_class;
+ GstElementClass *element_class;
+ GstVideoDecoderClass *vdec_class;
- gstelement_class = (GstElementClass *) klass;
gobject_class = (GObjectClass *) klass;
+ element_class = (GstElementClass *) klass;
+ vdec_class = (GstVideoDecoderClass *) klass;
parent_class = g_type_class_peek_parent (klass);
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
/**
- * GstJpegDec:max-errors
+ * GstJpegDec:max-errors:
*
* Error out after receiving N consecutive decoding errors
* (-1 = never error out, 0 = automatic, 1 = fail on first error, etc.)
*
- * Since: 0.10.27
- **/
+ * Deprecated: 1.3.1: Property wasn't used internally
+ */
+#ifndef GST_REMOVE_DEPRECATED
g_object_class_install_property (gobject_class, PROP_MAX_ERRORS,
g_param_spec_int ("max-errors", "Maximum Consecutive Decoding Errors",
- "Error out after receiving N consecutive decoding errors "
- "(-1 = never fail, 0 = automatic, 1 = fail on first error)",
+ "(Deprecated) Error out after receiving N consecutive decoding errors"
+ " (-1 = never fail, 0 = automatic, 1 = fail on first error)",
-1, G_MAXINT, JPEG_DEFAULT_MAX_ERRORS,
- G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
-
- gst_element_class_add_pad_template (gstelement_class,
- gst_static_pad_template_get (&gst_jpeg_dec_src_pad_template));
- gst_element_class_add_pad_template (gstelement_class,
- gst_static_pad_template_get (&gst_jpeg_dec_sink_pad_template));
- gst_element_class_set_details_simple (gstelement_class, "JPEG image decoder",
- "Codec/Decoder/Image",
- "Decode images from JPEG format", "Wim Taymans <wim@fluendo.com>");
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | G_PARAM_DEPRECATED));
+#endif
- gstelement_class->change_state =
- GST_DEBUG_FUNCPTR (gst_jpeg_dec_change_state);
+ gst_element_class_add_static_pad_template (element_class,
+ &gst_jpeg_dec_src_pad_template);
+ gst_element_class_add_static_pad_template (element_class,
+ &gst_jpeg_dec_sink_pad_template);
+ gst_element_class_set_static_metadata (element_class, "JPEG image decoder",
+ "Codec/Decoder/Image", "Decode images from JPEG format",
+ "Wim Taymans <wim@fluendo.com>");
+
+ vdec_class->start = gst_jpeg_dec_start;
+ vdec_class->stop = gst_jpeg_dec_stop;
+ vdec_class->flush = gst_jpeg_dec_flush;
+ vdec_class->parse = gst_jpeg_dec_parse;
+ vdec_class->set_format = gst_jpeg_dec_set_format;
+ vdec_class->handle_frame = gst_jpeg_dec_handle_frame;
+ vdec_class->decide_allocation = gst_jpeg_dec_decide_allocation;
+ vdec_class->sink_event = gst_jpeg_dec_sink_event;
GST_DEBUG_CATEGORY_INIT (jpeg_dec_debug, "jpegdec", 0, "JPEG decoder");
GST_DEBUG_CATEGORY_GET (GST_CAT_PERFORMANCE, "GST_PERFORMANCE");
}
-static void
-gst_jpeg_dec_clear_error (GstJpegDec * dec)
-{
- g_free (dec->error_msg);
- dec->error_msg = NULL;
- dec->error_line = 0;
- dec->error_func = NULL;
-}
-
-static void
-gst_jpeg_dec_set_error_va (GstJpegDec * dec, const gchar * func, gint line,
- const gchar * debug_msg_format, va_list args)
-{
-#ifndef GST_DISABLE_GST_DEBUG
- gst_debug_log_valist (GST_CAT_DEFAULT, GST_LEVEL_WARNING, __FILE__, func,
- line, (GObject *) dec, debug_msg_format, args);
-#endif
-
- g_free (dec->error_msg);
- if (debug_msg_format)
- dec->error_msg = g_strdup_vprintf (debug_msg_format, args);
- else
- dec->error_msg = NULL;
-
- dec->error_line = line;
- dec->error_func = func;
-}
-
-static void
-gst_jpeg_dec_set_error (GstJpegDec * dec, const gchar * func, gint line,
- const gchar * debug_msg_format, ...)
-{
- va_list va;
-
- va_start (va, debug_msg_format);
- gst_jpeg_dec_set_error_va (dec, func, line, debug_msg_format, va);
- va_end (va);
-}
-
-static GstFlowReturn
-gst_jpeg_dec_post_error_or_warning (GstJpegDec * dec)
-{
- GstFlowReturn ret;
- int max_errors;
-
- ++dec->error_count;
- max_errors = g_atomic_int_get (&dec->max_errors);
-
- if (max_errors < 0) {
- ret = GST_FLOW_OK;
- } else if (max_errors == 0) {
- /* FIXME: do something more clever in "automatic mode" */
- if (dec->packetized) {
- ret = (dec->error_count < 3) ? GST_FLOW_OK : GST_FLOW_ERROR;
- } else {
- ret = GST_FLOW_ERROR;
- }
- } else {
- ret = (dec->error_count < max_errors) ? GST_FLOW_OK : GST_FLOW_ERROR;
- }
-
- GST_INFO_OBJECT (dec, "decoding error %d/%d (%s)", dec->error_count,
- max_errors, (ret == GST_FLOW_OK) ? "ignoring error" : "erroring out");
-
- gst_element_message_full (GST_ELEMENT (dec),
- (ret == GST_FLOW_OK) ? GST_MESSAGE_WARNING : GST_MESSAGE_ERROR,
- GST_STREAM_ERROR, GST_STREAM_ERROR_DECODE,
- g_strdup (_("Failed to decode JPEG image")), dec->error_msg,
- __FILE__, dec->error_func, dec->error_line);
-
- dec->error_msg = NULL;
- gst_jpeg_dec_clear_error (dec);
- return ret;
-}
-
static boolean
gst_jpeg_dec_fill_input_buffer (j_decompress_ptr cinfo)
{
- GstJpegDec *dec;
- guint av;
-
- dec = CINFO_GET_JPEGDEC (cinfo);
- g_return_val_if_fail (dec != NULL, FALSE);
-
- av = gst_adapter_available_fast (dec->adapter);
- GST_DEBUG_OBJECT (dec, "fill_input_buffer: fast av=%u, remaining=%u", av,
- dec->rem_img_len);
-
- if (av == 0) {
- GST_DEBUG_OBJECT (dec, "Out of data");
- return FALSE;
- }
-
- if (dec->rem_img_len < av)
- av = dec->rem_img_len;
- dec->rem_img_len -= av;
-
- g_free (dec->cur_buf);
- dec->cur_buf = gst_adapter_take (dec->adapter, av);
-
- cinfo->src->next_input_byte = dec->cur_buf;
- cinfo->src->bytes_in_buffer = av;
-
- return TRUE;
+ /* We pass in full frame initially, if this get called, the frame is most likely
+ * corrupted */
+ return FALSE;
}
static void
if (num_bytes > 0 && cinfo->src->bytes_in_buffer >= num_bytes) {
cinfo->src->next_input_byte += (size_t) num_bytes;
cinfo->src->bytes_in_buffer -= (size_t) num_bytes;
- } else if (num_bytes > 0) {
- gint available;
-
- num_bytes -= cinfo->src->bytes_in_buffer;
- cinfo->src->next_input_byte += (size_t) cinfo->src->bytes_in_buffer;
- cinfo->src->bytes_in_buffer = 0;
-
- available = gst_adapter_available (dec->adapter);
- if (available < num_bytes || available < dec->rem_img_len) {
- GST_WARNING_OBJECT (dec, "Less bytes to skip than available in the "
- "adapter or the remaining image length %ld < %d or %u",
- num_bytes, available, dec->rem_img_len);
- }
- num_bytes = MIN (MIN (num_bytes, available), dec->rem_img_len);
- gst_adapter_flush (dec->adapter, num_bytes);
- dec->rem_img_len -= num_bytes;
}
}
{
GST_DEBUG ("initializing");
- /* create the sink and src pads */
- dec->sinkpad =
- gst_pad_new_from_static_template (&gst_jpeg_dec_sink_pad_template,
- "sink");
- gst_element_add_pad (GST_ELEMENT (dec), dec->sinkpad);
- gst_pad_set_chain_function (dec->sinkpad,
- GST_DEBUG_FUNCPTR (gst_jpeg_dec_chain));
- gst_pad_set_event_function (dec->sinkpad,
- GST_DEBUG_FUNCPTR (gst_jpeg_dec_sink_event));
- gst_pad_set_query_function (dec->sinkpad,
- GST_DEBUG_FUNCPTR (gst_jpeg_dec_sink_query));
-
- dec->srcpad =
- gst_pad_new_from_static_template (&gst_jpeg_dec_src_pad_template, "src");
- gst_pad_set_event_function (dec->srcpad,
- GST_DEBUG_FUNCPTR (gst_jpeg_dec_src_event));
- gst_pad_use_fixed_caps (dec->srcpad);
- gst_element_add_pad (GST_ELEMENT (dec), dec->srcpad);
-
/* setup jpeglib */
memset (&dec->cinfo, 0, sizeof (dec->cinfo));
memset (&dec->jerr, 0, sizeof (dec->jerr));
dec->idct_method = JPEG_DEFAULT_IDCT_METHOD;
dec->max_errors = JPEG_DEFAULT_MAX_ERRORS;
- dec->adapter = gst_adapter_new ();
-}
-
-static gboolean
-gst_jpeg_dec_ensure_header (GstJpegDec * dec)
-{
- gint av;
- gint offset;
-
- av = gst_adapter_available (dec->adapter);
- /* we expect at least 4 bytes, first of which start marker */
- offset = gst_adapter_masked_scan_uint32 (dec->adapter, 0xffffff00, 0xffd8ff00,
- 0, av);
- if (G_UNLIKELY (offset < 0)) {
- GST_DEBUG_OBJECT (dec, "No JPEG header in current buffer");
- /* not found */
- if (av > 4)
- gst_adapter_flush (dec->adapter, av - 4);
- return FALSE;
- }
-
- if (offset > 0) {
- GST_LOG_OBJECT (dec, "Skipping %u bytes.", offset);
- gst_adapter_flush (dec->adapter, offset);
- }
- GST_DEBUG_OBJECT (dec, "Found JPEG header");
-
- return TRUE;
+ gst_video_decoder_set_use_default_pad_acceptcaps (GST_VIDEO_DECODER_CAST
+ (dec), TRUE);
+ GST_PAD_SET_ACCEPT_TEMPLATE (GST_VIDEO_DECODER_SINK_PAD (dec));
}
static inline gboolean
return FALSE;
}
-/* returns image length in bytes if parsed successfully,
- * otherwise 0 if more data needed,
- * if < 0 the absolute value needs to be flushed */
-static gint
-gst_jpeg_dec_parse_image_data (GstJpegDec * dec)
+static GstFlowReturn
+gst_jpeg_dec_parse (GstVideoDecoder * bdec, GstVideoCodecFrame * frame,
+ GstAdapter * adapter, gboolean at_eos)
{
guint size;
+ gint toadd = 0;
gboolean resync;
- GstAdapter *adapter = dec->adapter;
- gint offset, noffset;
+ gint offset = 0, noffset;
+ GstJpegDec *dec = (GstJpegDec *) bdec;
- size = gst_adapter_available (adapter);
+ GST_VIDEO_CODEC_FRAME_SET_SYNC_POINT (frame);
- /* we expect at least 4 bytes, first of which start marker */
- if (gst_adapter_masked_scan_uint32 (adapter, 0xffff0000, 0xffd80000, 0, 4))
- return 0;
+ /* FIXME : The overhead of using scan_uint32 is massive */
+ size = gst_adapter_available (adapter);
GST_DEBUG ("Parsing jpeg image data (%u bytes)", size);
- GST_DEBUG ("Parse state: offset=%d, resync=%d, entropy len=%d",
- dec->parse_offset, dec->parse_resync, dec->parse_entropy_len);
+ if (at_eos) {
+ GST_DEBUG ("Flushing all data out");
+ toadd = size;
- /* offset is 2 less than actual offset;
- * - adapter needs at least 4 bytes for scanning,
- * - start and end marker ensure at least that much
- */
- /* resume from state offset */
- offset = dec->parse_offset;
+ /* If we have leftover data, throw it away */
+ if (!dec->saw_header)
+ goto drop_frame;
+ goto have_full_frame;
+ }
+
+ if (size < 8)
+ goto need_more_data;
+
+ if (!dec->saw_header) {
+ gint ret;
+ /* we expect at least 4 bytes, first of which start marker */
+ ret =
+ gst_adapter_masked_scan_uint32 (adapter, 0xffff0000, 0xffd80000, 0,
+ size - 4);
+
+ GST_DEBUG ("ret:%d", ret);
+ if (ret < 0)
+ goto need_more_data;
+
+ if (ret) {
+ gst_adapter_flush (adapter, ret);
+ size -= ret;
+ }
+ dec->saw_header = TRUE;
+ }
while (1) {
guint frame_len;
guint32 value;
+ GST_DEBUG ("offset:%d, size:%d", offset, size);
+
noffset =
gst_adapter_masked_scan_uint32_peek (adapter, 0x0000ff00, 0x0000ff00,
offset, size - offset, &value);
+
/* lost sync if 0xff marker not where expected */
if ((resync = (noffset != offset))) {
GST_DEBUG ("Lost sync at 0x%08x, resyncing", offset + 2);
if (value == 0xd9) {
GST_DEBUG ("0x%08x: EOI marker", offset + 2);
/* clear parse state */
+ dec->saw_header = FALSE;
dec->parse_resync = FALSE;
- dec->parse_offset = 0;
- return (offset + 4);
- } else if (value == 0xd8) {
- /* Skip this frame if we found another SOI marker */
- GST_DEBUG ("0x%08x: SOI marker before EOI, skipping", offset + 2);
+ toadd = offset + 4;
+ goto have_full_frame;
+ }
+ if (value == 0xd8) {
+ GST_DEBUG ("0x%08x: SOI marker before EOI marker", offset + 2);
+
+ /* clear parse state */
+ dec->saw_header = FALSE;
dec->parse_resync = FALSE;
- dec->parse_offset = 0;
- return -(offset + 2);
+ toadd = offset;
+ goto have_full_frame;
}
if (gst_jpeg_dec_parse_tag_has_entropy_segment (value)) {
guint eseglen = dec->parse_entropy_len;
- GST_DEBUG ("0x%08x: finding entropy segment length", offset + 2);
+ GST_DEBUG ("0x%08x: finding entropy segment length (eseglen:%d)",
+ offset + 2, eseglen);
+ if (size < offset + 2 + frame_len + eseglen)
+ goto need_more_data;
noffset = offset + 2 + frame_len + dec->parse_entropy_len;
while (1) {
+ GST_DEBUG ("noffset:%d, size:%d, size - noffset:%d",
+ noffset, size, size - noffset);
noffset = gst_adapter_masked_scan_uint32_peek (adapter, 0x0000ff00,
0x0000ff00, noffset, size - noffset, &value);
if (noffset < 0) {
GST_DEBUG ("found sync at 0x%x", offset + 2);
}
+ /* Add current data to output buffer */
+ toadd += frame_len + 2;
offset += frame_len + 2;
}
- /* EXITS */
need_more_data:
- {
- dec->parse_offset = offset;
- dec->parse_resync = resync;
- return 0;
- }
+ if (toadd)
+ gst_video_decoder_add_to_frame (bdec, toadd);
+ return GST_VIDEO_DECODER_FLOW_NEED_DATA;
+
+have_full_frame:
+ if (toadd)
+ gst_video_decoder_add_to_frame (bdec, toadd);
+ GST_VIDEO_CODEC_FRAME_SET_SYNC_POINT (frame);
+ return gst_video_decoder_have_frame (bdec);
+
+drop_frame:
+ gst_adapter_flush (adapter, size);
+ return GST_FLOW_OK;
}
+
/* shamelessly ripped from jpegutils.c in mjpegtools */
static void
add_huff_table (j_decompress_ptr dinfo,
}
static gboolean
-gst_jpeg_dec_setcaps (GstJpegDec * dec, GstCaps * caps)
+gst_jpeg_dec_set_format (GstVideoDecoder * dec, GstVideoCodecState * state)
{
- GstStructure *s;
- const GValue *framerate;
-
- s = gst_caps_get_structure (caps, 0);
-
- if ((framerate = gst_structure_get_value (s, "framerate")) != NULL) {
- dec->in_fps_n = gst_value_get_fraction_numerator (framerate);
- dec->in_fps_d = gst_value_get_fraction_denominator (framerate);
- dec->packetized = TRUE;
- GST_DEBUG ("got framerate of %d/%d fps => packetized mode",
- dec->in_fps_n, dec->in_fps_d);
- }
-
- /* do not extract width/height here. we do that in the chain
- * function on a per-frame basis (including the line[] array
- * setup) */
+ GstJpegDec *jpeg = GST_JPEG_DEC (dec);
- /* But we can take the framerate values and set them on the src pad */
+ if (jpeg->input_state)
+ gst_video_codec_state_unref (jpeg->input_state);
+ jpeg->input_state = gst_video_codec_state_ref (state);
return TRUE;
}
-static GstCaps *
-gst_jpeg_dec_getcaps (GstPad * pad, GstCaps * filter)
-{
- GstJpegDec *dec;
- GstCaps *caps;
- GstPad *peer;
-
- dec = GST_JPEG_DEC (GST_OBJECT_PARENT (pad));
-
- if (gst_pad_has_current_caps (pad))
- return gst_pad_get_current_caps (pad);
-
- peer = gst_pad_get_peer (dec->srcpad);
-
- if (peer) {
- GstCaps *peer_caps;
- const GstCaps *templ_caps;
- GstStructure *s;
- guint i, n;
-
- peer_caps = gst_pad_query_caps (peer, filter);
-
- /* Translate peercaps to image/jpeg */
- peer_caps = gst_caps_make_writable (peer_caps);
- n = gst_caps_get_size (peer_caps);
- for (i = 0; i < n; i++) {
- s = gst_caps_get_structure (peer_caps, i);
-
- gst_structure_set_name (s, "image/jpeg");
- }
-
- templ_caps = gst_pad_get_pad_template_caps (pad);
- caps = gst_caps_intersect_full (peer_caps, templ_caps,
- GST_CAPS_INTERSECT_FIRST);
-
- gst_object_unref (peer);
- } else {
- caps = gst_caps_copy (gst_pad_get_pad_template_caps (pad));
- }
-
- return caps;
-}
-
/* yuk */
static void
{
gint i;
- if (G_LIKELY (dec->idr_width_allocated >= maxrowbytes))
+ if (G_LIKELY (dec->idr_width_allocated == maxrowbytes))
return TRUE;
/* FIXME: maybe just alloc one or three blocks altogether? */
}
static void
-gst_jpeg_dec_decode_grayscale (GstJpegDec * dec, GstVideoFrame * frame)
+gst_jpeg_dec_decode_grayscale (GstJpegDec * dec, GstVideoFrame * frame,
+ guint field, guint num_fields)
{
guchar *rows[16];
guchar **scanarray[1] = { rows };
GST_DEBUG_OBJECT (dec, "indirect decoding of grayscale");
width = GST_VIDEO_FRAME_WIDTH (frame);
- height = GST_VIDEO_FRAME_HEIGHT (frame);
+ height = GST_VIDEO_FRAME_HEIGHT (frame) / num_fields;
if (G_UNLIKELY (!gst_jpeg_dec_ensure_buffers (dec, GST_ROUND_UP_32 (width))))
return;
base[0] = GST_VIDEO_FRAME_COMP_DATA (frame, 0);
+ if (field == 2) {
+ base[0] += GST_VIDEO_FRAME_COMP_STRIDE (frame, 0);
+ }
+
pstride = GST_VIDEO_FRAME_COMP_PSTRIDE (frame, 0);
- rstride = GST_VIDEO_FRAME_COMP_STRIDE (frame, 0);
+ rstride = GST_VIDEO_FRAME_COMP_STRIDE (frame, 0) * num_fields;
memcpy (rows, dec->idr_y, 16 * sizeof (gpointer));
}
static void
-gst_jpeg_dec_decode_rgb (GstJpegDec * dec, GstVideoFrame * frame)
+gst_jpeg_dec_decode_rgb (GstJpegDec * dec, GstVideoFrame * frame,
+ guint field, guint num_fields)
{
guchar *r_rows[16], *g_rows[16], *b_rows[16];
guchar **scanarray[3] = { r_rows, g_rows, b_rows };
GST_DEBUG_OBJECT (dec, "indirect decoding of RGB");
width = GST_VIDEO_FRAME_WIDTH (frame);
- height = GST_VIDEO_FRAME_HEIGHT (frame);
+ height = GST_VIDEO_FRAME_HEIGHT (frame) / num_fields;
if (G_UNLIKELY (!gst_jpeg_dec_ensure_buffers (dec, GST_ROUND_UP_32 (width))))
return;
- for (i = 0; i < 3; i++)
+ for (i = 0; i < 3; i++) {
base[i] = GST_VIDEO_FRAME_COMP_DATA (frame, i);
+ if (field == 2)
+ base[i] += GST_VIDEO_FRAME_COMP_STRIDE (frame, i);
+ }
pstride = GST_VIDEO_FRAME_COMP_PSTRIDE (frame, 0);
- rstride = GST_VIDEO_FRAME_COMP_STRIDE (frame, 0);
+ rstride = GST_VIDEO_FRAME_COMP_STRIDE (frame, 0) * num_fields;
memcpy (r_rows, dec->idr_y, 16 * sizeof (gpointer));
memcpy (g_rows, dec->idr_u, 16 * sizeof (gpointer));
}
static void
-gst_jpeg_dec_decode_indirect (GstJpegDec * dec, GstVideoFrame * frame,
- gint r_v, gint r_h, gint comp)
+gst_jpeg_dec_decode_indirect (GstJpegDec * dec, GstVideoFrame * frame, gint r_v,
+ gint r_h, gint comp, guint field, guint num_fields)
{
guchar *y_rows[16], *u_rows[16], *v_rows[16];
guchar **scanarray[3] = { y_rows, u_rows, v_rows };
gint i, j, k;
gint lines;
guchar *base[3], *last[3];
- gint stride[3];
+ gint rowsize[3], stride[3];
gint width, height;
GST_DEBUG_OBJECT (dec,
for (i = 0; i < 3; i++) {
base[i] = GST_VIDEO_FRAME_COMP_DATA (frame, i);
- stride[i] = GST_VIDEO_FRAME_COMP_STRIDE (frame, i);
+ stride[i] = GST_VIDEO_FRAME_COMP_STRIDE (frame, i) * num_fields;
+ rowsize[i] = GST_VIDEO_FRAME_COMP_STRIDE (frame, i);
/* make sure we don't make jpeglib write beyond our buffer,
* which might happen if (height % (r_v*DCTSIZE)) != 0 */
last[i] = base[i] + (GST_VIDEO_FRAME_COMP_STRIDE (frame, i) *
(GST_VIDEO_FRAME_COMP_HEIGHT (frame, i) - 1));
+
+ if (field == 2) {
+ base[i] += GST_VIDEO_FRAME_COMP_STRIDE (frame, i);
+ }
}
memcpy (y_rows, dec->idr_y, 16 * sizeof (gpointer));
if (G_LIKELY (lines > 0)) {
for (j = 0, k = 0; j < (r_v * DCTSIZE); j += r_v, k++) {
if (G_LIKELY (base[0] <= last[0])) {
- memcpy (base[0], y_rows[j], stride[0]);
+ memcpy (base[0], y_rows[j], rowsize[0]);
base[0] += stride[0];
}
if (r_v == 2) {
if (G_LIKELY (base[0] <= last[0])) {
- memcpy (base[0], y_rows[j + 1], stride[0]);
+ memcpy (base[0], y_rows[j + 1], rowsize[0]);
base[0] += stride[0];
}
}
if (G_LIKELY (base[1] <= last[1] && base[2] <= last[2])) {
if (r_h == 2) {
- memcpy (base[1], u_rows[k], stride[1]);
- memcpy (base[2], v_rows[k], stride[2]);
+ memcpy (base[1], u_rows[k], rowsize[1]);
+ memcpy (base[2], v_rows[k], rowsize[2]);
} else if (r_h == 1) {
- hresamplecpy1 (base[1], u_rows[k], stride[1]);
- hresamplecpy1 (base[2], v_rows[k], stride[2]);
+ hresamplecpy1 (base[1], u_rows[k], rowsize[1]);
+ hresamplecpy1 (base[2], v_rows[k], rowsize[2]);
} else {
/* FIXME: implement (at least we avoid crashing by doing nothing) */
}
}
static GstFlowReturn
-gst_jpeg_dec_decode_direct (GstJpegDec * dec, GstVideoFrame * frame)
+gst_jpeg_dec_decode_direct (GstJpegDec * dec, GstVideoFrame * frame,
+ guint field, guint num_fields)
{
guchar **line[3]; /* the jpeg line buffer */
guchar *y[4 * DCTSIZE] = { NULL, }; /* alloc enough for the lines */
for (i = 0; i < 3; i++) {
base[i] = GST_VIDEO_FRAME_COMP_DATA (frame, i);
- stride[i] = GST_VIDEO_FRAME_COMP_STRIDE (frame, i);
+ stride[i] = GST_VIDEO_FRAME_COMP_STRIDE (frame, i) * num_fields;
/* make sure we don't make jpeglib write beyond our buffer,
* which might happen if (height % (r_v*DCTSIZE)) != 0 */
last[i] = base[i] + (GST_VIDEO_FRAME_COMP_STRIDE (frame, i) *
(GST_VIDEO_FRAME_COMP_HEIGHT (frame, i) - 1));
+
+ if (field == 2) {
+ base[i] += GST_VIDEO_FRAME_COMP_STRIDE (frame, i);
+ }
}
/* let jpeglib decode directly into our final buffer */
format_not_supported:
{
- gst_jpeg_dec_set_error (dec, GST_FUNCTION, __LINE__,
- "Unsupported subsampling schema: v_samp factors: %u %u %u",
- v_samp[0], v_samp[1], v_samp[2]);
- return GST_FLOW_ERROR;
- }
-}
-
-static void
-gst_jpeg_dec_update_qos (GstJpegDec * dec, gdouble proportion,
- GstClockTimeDiff diff, GstClockTime ts)
-{
- GST_OBJECT_LOCK (dec);
- dec->proportion = proportion;
- if (G_LIKELY (ts != GST_CLOCK_TIME_NONE)) {
- if (G_UNLIKELY (diff > dec->qos_duration))
- dec->earliest_time = ts + 2 * diff + dec->qos_duration;
- else
- dec->earliest_time = ts + diff;
- } else {
- dec->earliest_time = GST_CLOCK_TIME_NONE;
- }
- GST_OBJECT_UNLOCK (dec);
-}
-
-static void
-gst_jpeg_dec_reset_qos (GstJpegDec * dec)
-{
- gst_jpeg_dec_update_qos (dec, 0.5, 0, GST_CLOCK_TIME_NONE);
-}
-
-static void
-gst_jpeg_dec_read_qos (GstJpegDec * dec, gdouble * proportion,
- GstClockTime * time)
-{
- GST_OBJECT_LOCK (dec);
- *proportion = dec->proportion;
- *time = dec->earliest_time;
- GST_OBJECT_UNLOCK (dec);
-}
-
-/* Perform qos calculations before decoding the next frame. Returns TRUE if the
- * frame should be decoded, FALSE if the frame can be dropped entirely */
-static gboolean
-gst_jpeg_dec_do_qos (GstJpegDec * dec, GstClockTime timestamp)
-{
- GstClockTime qostime, earliest_time;
- gdouble proportion;
-
- /* no timestamp, can't do QoS => decode frame */
- if (G_UNLIKELY (!GST_CLOCK_TIME_IS_VALID (timestamp))) {
- GST_LOG_OBJECT (dec, "invalid timestamp, can't do QoS, decode frame");
- return TRUE;
- }
-
- /* get latest QoS observation values */
- gst_jpeg_dec_read_qos (dec, &proportion, &earliest_time);
-
- /* skip qos if we have no observation (yet) => decode frame */
- if (G_UNLIKELY (!GST_CLOCK_TIME_IS_VALID (earliest_time))) {
- GST_LOG_OBJECT (dec, "no observation yet, decode frame");
- return TRUE;
- }
-
- /* qos is done on running time */
- qostime = gst_segment_to_running_time (&dec->segment, GST_FORMAT_TIME,
- timestamp);
+ gboolean ret = GST_FLOW_OK;
- /* see how our next timestamp relates to the latest qos timestamp */
- GST_LOG_OBJECT (dec, "qostime %" GST_TIME_FORMAT ", earliest %"
- GST_TIME_FORMAT, GST_TIME_ARGS (qostime), GST_TIME_ARGS (earliest_time));
+ GST_VIDEO_DECODER_ERROR (dec, 1, STREAM, DECODE,
+ (_("Failed to decode JPEG image")),
+ ("Unsupported subsampling schema: v_samp factors: %u %u %u", v_samp[0],
+ v_samp[1], v_samp[2]), ret);
- if (qostime != GST_CLOCK_TIME_NONE && qostime <= earliest_time) {
- GST_DEBUG_OBJECT (dec, "we are late, drop frame");
- return FALSE;
+ return ret;
}
-
- GST_LOG_OBJECT (dec, "decode frame");
- return TRUE;
}
-static gboolean
-gst_jpeg_dec_buffer_pool (GstJpegDec * dec, GstCaps * caps)
+static void
+gst_jpeg_dec_negotiate (GstJpegDec * dec, gint width, gint height, gint clrspc,
+ gboolean interlaced)
{
- GstQuery *query;
- GstBufferPool *pool = NULL;
- guint size, min, max, prefix, alignment;
- GstStructure *config;
-
- GST_DEBUG_OBJECT (dec, "setting up bufferpool");
-
- /* find a pool for the negotiated caps now */
- query = gst_query_new_allocation (caps, TRUE);
-
- if (gst_pad_peer_query (dec->srcpad, query)) {
- /* we got configuration from our peer, parse them */
- gst_query_parse_allocation_params (query, &size, &min, &max, &prefix,
- &alignment, &pool);
- size = MAX (size, dec->info.size);
- } else {
- GST_DEBUG_OBJECT (dec, "peer query failed, using defaults");
- size = dec->info.size;
- min = max = 0;
- prefix = 0;
- alignment = 15;
- }
- gst_query_unref (query);
+ GstVideoCodecState *outstate;
+ GstVideoInfo *info;
+ GstVideoFormat format;
- if (pool == NULL) {
- /* we did not get a pool, make one ourselves then */
- pool = gst_buffer_pool_new ();
+ switch (clrspc) {
+ case JCS_RGB:
+ format = GST_VIDEO_FORMAT_RGB;
+ break;
+ case JCS_GRAYSCALE:
+ format = GST_VIDEO_FORMAT_GRAY8;
+ break;
+ default:
+ format = GST_VIDEO_FORMAT_I420;
+ break;
}
- config = gst_buffer_pool_get_config (pool);
- gst_buffer_pool_config_set (config, caps, size, min, max, prefix,
- alignment | 15);
- /* and store */
- gst_buffer_pool_set_config (pool, config);
+ /* Compare to currently configured output state */
+ outstate = gst_video_decoder_get_output_state (GST_VIDEO_DECODER (dec));
+ if (outstate) {
+ info = &outstate->info;
- if (dec->pool) {
- gst_buffer_pool_set_active (dec->pool, FALSE);
- gst_object_unref (dec->pool);
+ if (width == GST_VIDEO_INFO_WIDTH (info) &&
+ height == GST_VIDEO_INFO_HEIGHT (info) &&
+ format == GST_VIDEO_INFO_FORMAT (info)) {
+ gst_video_codec_state_unref (outstate);
+ return;
+ }
+ gst_video_codec_state_unref (outstate);
}
- dec->pool = pool;
-
- /* and activate */
- gst_buffer_pool_set_active (pool, TRUE);
-
- return TRUE;
-}
-static gboolean
-gst_jpeg_dec_negotiate (GstJpegDec * dec, gint width, gint height, gint clrspc)
-{
- GstCaps *caps;
- GstVideoFormat format;
- GstVideoInfo info;
+ outstate =
+ gst_video_decoder_set_output_state (GST_VIDEO_DECODER (dec), format,
+ width, height, dec->input_state);
- if (G_UNLIKELY (width == dec->info.width && height == dec->info.height &&
- dec->in_fps_n == dec->info.fps_n && dec->in_fps_d == dec->info.fps_d
- && clrspc == dec->clrspc))
- return TRUE;
-
- gst_video_info_init (&info);
-
- /* framerate == 0/1 is a still frame */
- if (dec->in_fps_d == 0) {
- info.fps_n = 0;
- info.fps_d = 1;
- } else {
- info.fps_n = dec->in_fps_n;
- info.fps_d = dec->in_fps_d;
+ switch (clrspc) {
+ case JCS_RGB:
+ case JCS_GRAYSCALE:
+ break;
+ default:
+ outstate->info.colorimetry.range = GST_VIDEO_COLOR_RANGE_0_255;
+ outstate->info.colorimetry.matrix = GST_VIDEO_COLOR_MATRIX_BT601;
+ outstate->info.colorimetry.transfer = GST_VIDEO_TRANSFER_UNKNOWN;
+ outstate->info.colorimetry.primaries = GST_VIDEO_COLOR_PRIMARIES_UNKNOWN;
+ break;
}
- /* calculate or assume an average frame duration for QoS purposes */
- GST_OBJECT_LOCK (dec);
- if (info.fps_n != 0) {
- dec->qos_duration =
- gst_util_uint64_scale (GST_SECOND, info.fps_d, info.fps_n);
- dec->duration = dec->qos_duration;
- } else {
- /* if not set just use 25fps */
- dec->qos_duration = gst_util_uint64_scale (GST_SECOND, 1, 25);
- dec->duration = GST_CLOCK_TIME_NONE;
+ if (interlaced) {
+ outstate->info.interlace_mode = GST_VIDEO_INTERLACE_MODE_INTERLEAVED;
+ GST_VIDEO_INFO_FIELD_ORDER (&outstate->info) =
+ GST_VIDEO_FIELD_ORDER_TOP_FIELD_FIRST;
}
- GST_OBJECT_UNLOCK (dec);
- if (dec->cinfo.jpeg_color_space == JCS_RGB) {
- gint i;
- GstCaps *allowed_caps;
- GstVideoInfo tmpinfo;
-
- GST_DEBUG_OBJECT (dec, "selecting RGB format");
- /* retrieve allowed caps, and find the first one that reasonably maps
- * to the parameters of the colourspace */
- caps = gst_pad_get_allowed_caps (dec->srcpad);
- if (!caps) {
- GST_DEBUG_OBJECT (dec, "... but no peer, using template caps");
- /* need to copy because get_allowed_caps returns a ref,
- * and get_pad_template_caps doesn't */
- caps = gst_caps_copy (gst_pad_get_pad_template_caps (dec->srcpad));
- }
- /* avoid lists of formats, etc */
- allowed_caps = gst_caps_normalize (caps);
- gst_caps_unref (caps);
- caps = NULL;
- GST_LOG_OBJECT (dec, "allowed source caps %" GST_PTR_FORMAT, allowed_caps);
-
- for (i = 0; i < gst_caps_get_size (allowed_caps); i++) {
- if (caps)
- gst_caps_unref (caps);
- caps = gst_caps_copy_nth (allowed_caps, i);
- /* sigh, ds and _parse_caps need fixed caps for parsing, fixate */
- gst_caps_fixate (caps);
- GST_LOG_OBJECT (dec, "checking caps %" GST_PTR_FORMAT, caps);
-
- if (!gst_video_info_from_caps (&tmpinfo, caps))
- continue;
- /* we'll settle for the first (preferred) downstream rgb format */
- if (GST_VIDEO_INFO_IS_RGB (&tmpinfo))
- break;
- /* default fall-back */
- format = GST_VIDEO_FORMAT_RGB;
- }
- if (caps)
- gst_caps_unref (caps);
- gst_caps_unref (allowed_caps);
- } else if (dec->cinfo.jpeg_color_space == JCS_GRAYSCALE) {
- /* TODO is anything else then 8bit supported in jpeg? */
- format = GST_VIDEO_FORMAT_GRAY8;
- } else {
- /* go for plain and simple I420 */
- /* TODO other YUV cases ? */
- format = GST_VIDEO_FORMAT_I420;
- }
+ gst_video_codec_state_unref (outstate);
- gst_video_info_set_format (&info, format, width, height);
- caps = gst_video_info_to_caps (&info);
+ gst_video_decoder_negotiate (GST_VIDEO_DECODER (dec));
- GST_DEBUG_OBJECT (dec, "setting caps %" GST_PTR_FORMAT, caps);
GST_DEBUG_OBJECT (dec, "max_v_samp_factor=%d", dec->cinfo.max_v_samp_factor);
GST_DEBUG_OBJECT (dec, "max_h_samp_factor=%d", dec->cinfo.max_h_samp_factor);
-
- gst_pad_set_caps (dec->srcpad, caps);
-
- dec->info = info;
- dec->clrspc = clrspc;
-
- gst_jpeg_dec_buffer_pool (dec, caps);
- gst_caps_unref (caps);
-
- return TRUE;
}
static GstFlowReturn
-gst_jpeg_dec_chain (GstPad * pad, GstObject * parent, GstBuffer * buf)
+gst_jpeg_dec_prepare_decode (GstJpegDec * dec)
{
- GstFlowReturn ret = GST_FLOW_OK;
- GstJpegDec *dec;
- GstBuffer *outbuf = NULL;
- gint img_len;
- gint width, height;
- gint r_h, r_v;
- guint code, hdr_ok;
- GstClockTime timestamp, duration;
- GstVideoFrame frame;
-
- dec = GST_JPEG_DEC (parent);
-
- timestamp = GST_BUFFER_TIMESTAMP (buf);
- duration = GST_BUFFER_DURATION (buf);
-
- if (GST_CLOCK_TIME_IS_VALID (timestamp))
- dec->next_ts = timestamp;
-
- if (GST_BUFFER_IS_DISCONT (buf)) {
- GST_DEBUG_OBJECT (dec, "buffer has DISCONT flag set");
- dec->discont = TRUE;
- if (!dec->packetized && gst_adapter_available (dec->adapter)) {
- GST_WARNING_OBJECT (dec, "DISCONT buffer in non-packetized mode, bad");
- gst_adapter_clear (dec->adapter);
- }
- }
-
- gst_adapter_push (dec->adapter, buf);
- buf = NULL;
-
- /* If we are non-packetized and know the total incoming size in bytes,
- * just wait until we have enough before doing any processing. */
-
- if (!dec->packetized && (dec->segment.format == GST_FORMAT_BYTES) &&
- (dec->segment.stop != -1) &&
- (gst_adapter_available (dec->adapter) < dec->segment.stop)) {
- /* We assume that non-packetized input in bytes is *one* single jpeg image */
- GST_DEBUG ("Non-packetized mode. Got %" G_GSIZE_FORMAT " bytes, "
- "need %" G_GINT64_FORMAT, gst_adapter_available (dec->adapter),
- dec->segment.stop);
- goto need_more_data;
- }
-
-again:
- if (!gst_jpeg_dec_ensure_header (dec))
- goto need_more_data;
-
- /* If we know that each input buffer contains data
- * for a whole jpeg image (e.g. MJPEG streams), just
- * do some sanity checking instead of parsing all of
- * the jpeg data */
- if (dec->packetized) {
- img_len = gst_adapter_available (dec->adapter);
- } else {
- /* Parse jpeg image to handle jpeg input that
- * is not aligned to buffer boundaries */
- img_len = gst_jpeg_dec_parse_image_data (dec);
-
- if (img_len == 0) {
- goto need_more_data;
- } else if (img_len < 0) {
- gst_adapter_flush (dec->adapter, -img_len);
- goto again;
- }
- }
-
- dec->rem_img_len = img_len;
-
- GST_LOG_OBJECT (dec, "image size = %u", img_len);
-
- /* QoS: if we're too late anyway, skip decoding */
- if (dec->packetized && !gst_jpeg_dec_do_qos (dec, timestamp))
- goto skip_decoding;
-
-#ifndef GST_DISABLE_GST_DEBUG
- {
- guchar data[4];
-
- gst_adapter_copy (dec->adapter, data, 0, 4);
- GST_LOG_OBJECT (dec, "reading header %02x %02x %02x %02x", data[0], data[1],
- data[2], data[3]);
- }
-#endif
-
- gst_jpeg_dec_fill_input_buffer (&dec->cinfo);
-
- if (setjmp (dec->jerr.setjmp_buffer)) {
- code = dec->jerr.pub.msg_code;
-
- if (code == JERR_INPUT_EOF) {
- GST_DEBUG ("jpeg input EOF error, we probably need more data");
- goto need_more_data;
- }
- goto decode_error;
- }
+ G_GNUC_UNUSED GstFlowReturn ret;
+ guint r_h, r_v, hdr_ok;
/* read header */
hdr_ok = jpeg_read_header (&dec->cinfo, TRUE);
break;
}
- width = dec->cinfo.output_width;
- height = dec->cinfo.output_height;
-
- if (G_UNLIKELY (width < MIN_WIDTH || width > MAX_WIDTH ||
- height < MIN_HEIGHT || height > MAX_HEIGHT))
+ if (G_UNLIKELY (dec->cinfo.output_width < MIN_WIDTH ||
+ dec->cinfo.output_width > MAX_WIDTH ||
+ dec->cinfo.output_height < MIN_HEIGHT ||
+ dec->cinfo.output_height > MAX_HEIGHT))
goto wrong_size;
- gst_jpeg_dec_negotiate (dec, width, height, dec->cinfo.jpeg_color_space);
-
- ret = gst_buffer_pool_acquire_buffer (dec->pool, &outbuf, NULL);
- if (G_UNLIKELY (ret != GST_FLOW_OK))
- goto alloc_failed;
-
- if (!gst_video_frame_map (&frame, &dec->info, outbuf, GST_MAP_READWRITE))
- goto invalid_frame;
-
- GST_LOG_OBJECT (dec, "width %d, height %d", width, height);
-
- GST_BUFFER_TIMESTAMP (outbuf) = dec->next_ts;
+ return GST_FLOW_OK;
- if (dec->packetized && GST_CLOCK_TIME_IS_VALID (dec->next_ts)) {
- if (GST_CLOCK_TIME_IS_VALID (duration)) {
- /* use duration from incoming buffer for outgoing buffer */
- dec->next_ts += duration;
- } else if (GST_CLOCK_TIME_IS_VALID (dec->duration)) {
- duration = dec->duration;
- dec->next_ts += dec->duration;
- } else {
- duration = GST_CLOCK_TIME_NONE;
- dec->next_ts = GST_CLOCK_TIME_NONE;
- }
- } else {
- duration = GST_CLOCK_TIME_NONE;
- dec->next_ts = GST_CLOCK_TIME_NONE;
+/* ERRORS */
+wrong_size:
+ {
+ ret = GST_FLOW_ERROR;
+ GST_VIDEO_DECODER_ERROR (dec, 1, STREAM, DECODE,
+ (_("Failed to decode JPEG image")),
+ ("Picture is too small or too big (%ux%u)", dec->cinfo.output_width,
+ dec->cinfo.output_height), ret);
+ return GST_FLOW_ERROR;
+ }
+components_not_supported:
+ {
+ ret = GST_FLOW_ERROR;
+ GST_VIDEO_DECODER_ERROR (dec, 1, STREAM, DECODE,
+ (_("Failed to decode JPEG image")),
+ ("number of components not supported: %d (max 3)",
+ dec->cinfo.num_components), ret);
+ jpeg_abort_decompress (&dec->cinfo);
+ return GST_FLOW_ERROR;
+ }
+unsupported_colorspace:
+ {
+ ret = GST_FLOW_ERROR;
+ GST_VIDEO_DECODER_ERROR (dec, 1, STREAM, DECODE,
+ (_("Failed to decode JPEG image")),
+ ("Picture has unknown or unsupported colourspace"), ret);
+ jpeg_abort_decompress (&dec->cinfo);
+ return GST_FLOW_ERROR;
+ }
+invalid_yuvrgbgrayscale:
+ {
+ ret = GST_FLOW_ERROR;
+ GST_VIDEO_DECODER_ERROR (dec, 1, STREAM, DECODE,
+ (_("Failed to decode JPEG image")),
+ ("Picture is corrupt or unhandled YUV/RGB/grayscale layout"), ret);
+ jpeg_abort_decompress (&dec->cinfo);
+ return GST_FLOW_ERROR;
}
- GST_BUFFER_DURATION (outbuf) = duration;
+}
+
+static GstFlowReturn
+gst_jpeg_dec_decode (GstJpegDec * dec, GstVideoFrame * vframe, guint width,
+ guint height, guint field, guint num_fields)
+{
+ GstFlowReturn ret = GST_FLOW_OK;
if (dec->cinfo.jpeg_color_space == JCS_RGB) {
- gst_jpeg_dec_decode_rgb (dec, &frame);
+ gst_jpeg_dec_decode_rgb (dec, vframe, field, num_fields);
} else if (dec->cinfo.jpeg_color_space == JCS_GRAYSCALE) {
- gst_jpeg_dec_decode_grayscale (dec, &frame);
+ gst_jpeg_dec_decode_grayscale (dec, vframe, field, num_fields);
} else {
- GST_LOG_OBJECT (dec, "decompressing (reqired scanline buffer height = %u)",
+ GST_LOG_OBJECT (dec, "decompressing (required scanline buffer height = %u)",
dec->cinfo.rec_outbuf_height);
/* For some widths jpeglib requires more horizontal padding than I420
|| dec->cinfo.comp_info[2].h_samp_factor != 1)) {
GST_CAT_LOG_OBJECT (GST_CAT_PERFORMANCE, dec,
"indirect decoding using extra buffer copy");
- gst_jpeg_dec_decode_indirect (dec, &frame, r_v, r_h,
- dec->cinfo.num_components);
+ gst_jpeg_dec_decode_indirect (dec, vframe,
+ dec->cinfo.comp_info[0].v_samp_factor,
+ dec->cinfo.comp_info[0].h_samp_factor, dec->cinfo.num_components,
+ field, num_fields);
} else {
- ret = gst_jpeg_dec_decode_direct (dec, &frame);
- if (G_UNLIKELY (ret != GST_FLOW_OK))
- goto decode_direct_failed;
+ ret = gst_jpeg_dec_decode_direct (dec, vframe, field, num_fields);
}
}
- GST_LOG_OBJECT (dec, "decompressing finished");
- jpeg_finish_decompress (&dec->cinfo);
+ GST_LOG_OBJECT (dec, "decompressing finished: %s", gst_flow_get_name (ret));
+
+ if (G_UNLIKELY (ret != GST_FLOW_OK)) {
+ jpeg_abort_decompress (&dec->cinfo);
+ } else {
+ jpeg_finish_decompress (&dec->cinfo);
+ }
+
+ return ret;
+}
+
+static GstFlowReturn
+gst_jpeg_dec_handle_frame (GstVideoDecoder * bdec, GstVideoCodecFrame * frame)
+{
+ GstFlowReturn ret = GST_FLOW_OK;
+ GstJpegDec *dec = (GstJpegDec *) bdec;
+ GstVideoFrame vframe;
+ gint num_fields; /* number of fields (1 or 2) */
+ gint output_height; /* height of output image (one or two fields) */
+ gint height; /* height of current frame (whole image or a field) */
+ gint width;
+ guint code;
+ gboolean need_unmap = TRUE;
+ GstVideoCodecState *state = NULL;
+ gboolean release_frame = TRUE;
+ gboolean has_eoi;
+ guint8 *data;
+ gsize nbytes;
+
+ gst_buffer_map (frame->input_buffer, &dec->current_frame_map, GST_MAP_READ);
+
+ data = dec->current_frame_map.data;
+ nbytes = dec->current_frame_map.size;
+ has_eoi = ((data[nbytes - 2] != 0xff) || (data[nbytes - 1] != 0xd9));
+
+ /* some cameras fail to send an end-of-image marker (EOI),
+ * add it if that is the case. */
+ if (!has_eoi) {
+ GstMapInfo map;
+ GstBuffer *eoibuf = gst_buffer_new_and_alloc (2);
+
+ /* unmap, will add EOI and remap at the end */
+ gst_buffer_unmap (frame->input_buffer, &dec->current_frame_map);
+
+ gst_buffer_map (eoibuf, &map, GST_MAP_WRITE);
+ map.data[0] = 0xff;
+ map.data[1] = 0xd9;
+ gst_buffer_unmap (eoibuf, &map);
+
+ /* append to input buffer, and remap */
+ frame->input_buffer = gst_buffer_append (frame->input_buffer, eoibuf);
+
+ gst_buffer_map (frame->input_buffer, &dec->current_frame_map, GST_MAP_READ);
+ GST_DEBUG ("fixup EOI marker added");
+ }
- gst_video_frame_unmap (&frame);
+ dec->current_frame = frame;
+ dec->cinfo.src->next_input_byte = dec->current_frame_map.data;
+ dec->cinfo.src->bytes_in_buffer = dec->current_frame_map.size;
- /* Clipping */
- if (dec->segment.format == GST_FORMAT_TIME) {
- guint64 start, stop, clip_start, clip_stop;
+ if (setjmp (dec->jerr.setjmp_buffer)) {
+ code = dec->jerr.pub.msg_code;
- GST_LOG_OBJECT (dec, "Attempting clipping");
+ if (code == JERR_INPUT_EOF) {
+ GST_DEBUG ("jpeg input EOF error, we probably need more data");
+ goto need_more_data;
+ }
+ goto decode_error;
+ }
- start = GST_BUFFER_TIMESTAMP (outbuf);
- if (GST_BUFFER_DURATION (outbuf) == GST_CLOCK_TIME_NONE)
- stop = start;
- else
- stop = start + GST_BUFFER_DURATION (outbuf);
+ /* read header and check values */
+ ret = gst_jpeg_dec_prepare_decode (dec);
+ if (G_UNLIKELY (ret == GST_FLOW_ERROR))
+ goto done;
- if (gst_segment_clip (&dec->segment, GST_FORMAT_TIME,
- start, stop, &clip_start, &clip_stop)) {
- GST_LOG_OBJECT (dec, "Clipping start to %" GST_TIME_FORMAT,
- GST_TIME_ARGS (clip_start));
- GST_BUFFER_TIMESTAMP (outbuf) = clip_start;
- if (GST_BUFFER_DURATION (outbuf) != GST_CLOCK_TIME_NONE) {
- GST_LOG_OBJECT (dec, "Clipping duration to %" GST_TIME_FORMAT,
- GST_TIME_ARGS (clip_stop - clip_start));
- GST_BUFFER_DURATION (outbuf) = clip_stop - clip_start;
- }
- } else
- goto drop_buffer;
+ width = dec->cinfo.output_width;
+ height = dec->cinfo.output_height;
+
+ /* is it interlaced MJPEG? (we really don't want to scan the jpeg data
+ * to see if there are two SOF markers in the packet to detect this) */
+ if (gst_video_decoder_get_packetized (bdec) &&
+ dec->input_state->info.height > height &&
+ dec->input_state->info.height <= (height * 2)
+ && dec->input_state->info.width == width) {
+ GST_LOG_OBJECT (dec,
+ "looks like an interlaced image: "
+ "input width/height of %dx%d with JPEG frame width/height of %dx%d",
+ dec->input_state->info.width, dec->input_state->info.height, width,
+ height);
+ output_height = dec->input_state->info.height;
+ height = dec->input_state->info.height / 2;
+ num_fields = 2;
+ GST_LOG_OBJECT (dec, "field height=%d", height);
+ } else {
+ output_height = height;
+ num_fields = 1;
+ }
+
+ gst_jpeg_dec_negotiate (dec, width, output_height,
+ dec->cinfo.jpeg_color_space, num_fields == 2);
+
+ state = gst_video_decoder_get_output_state (bdec);
+ ret = gst_video_decoder_allocate_output_frame (bdec, frame);
+ if (G_UNLIKELY (ret != GST_FLOW_OK))
+ goto alloc_failed;
+
+ if (!gst_video_frame_map (&vframe, &state->info, frame->output_buffer,
+ GST_MAP_READWRITE))
+ goto alloc_failed;
+
+ if (setjmp (dec->jerr.setjmp_buffer)) {
+ code = dec->jerr.pub.msg_code;
+ gst_video_frame_unmap (&vframe);
+ goto decode_error;
}
- /* reset error count on successful decode */
- dec->error_count = 0;
+ GST_LOG_OBJECT (dec, "width %d, height %d, fields %d", width, output_height,
+ num_fields);
- ++dec->good_count;
+ ret = gst_jpeg_dec_decode (dec, &vframe, width, height, 1, num_fields);
+ if (G_UNLIKELY (ret != GST_FLOW_OK)) {
+ gst_video_frame_unmap (&vframe);
+ goto decode_failed;
+ }
+
+ if (setjmp (dec->jerr.setjmp_buffer)) {
+ code = dec->jerr.pub.msg_code;
+ gst_video_frame_unmap (&vframe);
+ goto decode_error;
+ }
+
+ /* decode second field if there is one */
+ if (num_fields == 2) {
+ GstVideoFormat field2_format;
- GST_LOG_OBJECT (dec, "pushing buffer (ts=%" GST_TIME_FORMAT ", dur=%"
- GST_TIME_FORMAT, GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (outbuf)),
- GST_TIME_ARGS (GST_BUFFER_DURATION (outbuf)));
+ /* skip any chunk or padding bytes before the next SOI marker; both fields
+ * are in one single buffer here, so direct access should be fine here */
+ while (dec->jsrc.pub.bytes_in_buffer > 2 &&
+ GST_READ_UINT16_BE (dec->jsrc.pub.next_input_byte) != 0xffd8) {
+ --dec->jsrc.pub.bytes_in_buffer;
+ ++dec->jsrc.pub.next_input_byte;
+ }
+
+ if (gst_jpeg_dec_prepare_decode (dec) != GST_FLOW_OK) {
+ GST_WARNING_OBJECT (dec, "problem reading jpeg header of 2nd field");
+ /* FIXME: post a warning message here? */
+ gst_video_frame_unmap (&vframe);
+ goto decode_failed;
+ }
- ret = gst_pad_push (dec->srcpad, outbuf);
+ /* check if format has changed for the second field */
+ switch (dec->cinfo.jpeg_color_space) {
+ case JCS_RGB:
+ field2_format = GST_VIDEO_FORMAT_RGB;
+ break;
+ case JCS_GRAYSCALE:
+ field2_format = GST_VIDEO_FORMAT_GRAY8;
+ break;
+ default:
+ field2_format = GST_VIDEO_FORMAT_I420;
+ break;
+ }
+
+ GST_LOG_OBJECT (dec,
+ "got for second field of interlaced image: "
+ "input width/height of %dx%d with JPEG frame width/height of %dx%d",
+ dec->input_state->info.width, dec->input_state->info.height,
+ dec->cinfo.output_width, dec->cinfo.output_height);
+
+ if (dec->cinfo.output_width != GST_VIDEO_INFO_WIDTH (&state->info) ||
+ GST_VIDEO_INFO_HEIGHT (&state->info) <= dec->cinfo.output_height ||
+ GST_VIDEO_INFO_HEIGHT (&state->info) > (dec->cinfo.output_height * 2) ||
+ field2_format != GST_VIDEO_INFO_FORMAT (&state->info)) {
+ GST_WARNING_OBJECT (dec, "second field has different format than first");
+ gst_video_frame_unmap (&vframe);
+ goto decode_failed;
+ }
+
+ ret = gst_jpeg_dec_decode (dec, &vframe, width, height, 2, 2);
+ if (G_UNLIKELY (ret != GST_FLOW_OK)) {
+ gst_video_frame_unmap (&vframe);
+ goto decode_failed;
+ }
+ }
+ gst_video_frame_unmap (&vframe);
+
+ gst_buffer_unmap (frame->input_buffer, &dec->current_frame_map);
+ ret = gst_video_decoder_finish_frame (bdec, frame);
+ release_frame = FALSE;
+ need_unmap = FALSE;
-skip_decoding:
done:
- gst_adapter_flush (dec->adapter, dec->rem_img_len);
exit:
- if (G_UNLIKELY (ret == GST_FLOW_ERROR)) {
- jpeg_abort_decompress (&dec->cinfo);
- ret = gst_jpeg_dec_post_error_or_warning (dec);
- }
+ if (need_unmap)
+ gst_buffer_unmap (frame->input_buffer, &dec->current_frame_map);
+
+ if (release_frame)
+ gst_video_decoder_release_frame (bdec, frame);
+
+ if (state)
+ gst_video_codec_state_unref (state);
return ret;
need_more_data:
{
GST_LOG_OBJECT (dec, "we need more data");
- if (outbuf) {
- gst_buffer_unref (outbuf);
- outbuf = NULL;
- }
ret = GST_FLOW_OK;
goto exit;
}
/* ERRORS */
-wrong_size:
- {
- gst_jpeg_dec_set_error (dec, GST_FUNCTION, __LINE__,
- "Picture is too small or too big (%ux%u)", width, height);
- ret = GST_FLOW_ERROR;
- goto done;
- }
decode_error:
{
gchar err_msg[JMSG_LENGTH_MAX];
dec->jerr.pub.format_message ((j_common_ptr) (&dec->cinfo), err_msg);
- gst_jpeg_dec_set_error (dec, GST_FUNCTION, __LINE__,
- "Decode error #%u: %s", code, err_msg);
+ GST_VIDEO_DECODER_ERROR (dec, 1, STREAM, DECODE,
+ (_("Failed to decode JPEG image")), ("Decode error #%u: %s", code,
+ err_msg), ret);
+
+ gst_buffer_unmap (frame->input_buffer, &dec->current_frame_map);
+ gst_video_decoder_drop_frame (bdec, frame);
+ release_frame = FALSE;
+ need_unmap = FALSE;
+ jpeg_abort_decompress (&dec->cinfo);
- if (outbuf) {
- gst_buffer_unref (outbuf);
- outbuf = NULL;
- }
- ret = GST_FLOW_ERROR;
goto done;
}
-decode_direct_failed:
+decode_failed:
{
/* already posted an error message */
- jpeg_abort_decompress (&dec->cinfo);
- gst_buffer_replace (&outbuf, NULL);
goto done;
}
alloc_failed:
GST_DEBUG_OBJECT (dec, "failed to alloc buffer, reason %s", reason);
/* Reset for next time */
jpeg_abort_decompress (&dec->cinfo);
- if (ret != GST_FLOW_UNEXPECTED && ret != GST_FLOW_WRONG_STATE &&
+ if (ret != GST_FLOW_EOS && ret != GST_FLOW_FLUSHING &&
ret != GST_FLOW_NOT_LINKED) {
- gst_jpeg_dec_set_error (dec, GST_FUNCTION, __LINE__,
- "Buffer allocation failed, reason: %s", reason);
+ GST_VIDEO_DECODER_ERROR (dec, 1, STREAM, DECODE,
+ (_("Failed to decode JPEG image")),
+ ("Buffer allocation failed, reason: %s", reason), ret);
+ jpeg_abort_decompress (&dec->cinfo);
}
goto exit;
}
-invalid_frame:
- {
- jpeg_abort_decompress (&dec->cinfo);
- gst_buffer_unref (outbuf);
- ret = GST_FLOW_OK;
- goto exit;
- }
-drop_buffer:
- {
- GST_WARNING_OBJECT (dec, "Outgoing buffer is outside configured segment");
- gst_buffer_unref (outbuf);
- ret = GST_FLOW_OK;
- goto exit;
- }
-components_not_supported:
- {
- gst_jpeg_dec_set_error (dec, GST_FUNCTION, __LINE__,
- "number of components not supported: %d (max 3)",
- dec->cinfo.num_components);
- ret = GST_FLOW_ERROR;
- goto done;
- }
-unsupported_colorspace:
- {
- gst_jpeg_dec_set_error (dec, GST_FUNCTION, __LINE__,
- "Picture has unknown or unsupported colourspace");
- ret = GST_FLOW_ERROR;
- goto done;
- }
-invalid_yuvrgbgrayscale:
- {
- gst_jpeg_dec_set_error (dec, GST_FUNCTION, __LINE__,
- "Picture is corrupt or unhandled YUV/RGB/grayscale layout");
- ret = GST_FLOW_ERROR;
- goto done;
- }
}
static gboolean
-gst_jpeg_dec_src_event (GstPad * pad, GstObject * parent, GstEvent * event)
+gst_jpeg_dec_decide_allocation (GstVideoDecoder * bdec, GstQuery * query)
{
- GstJpegDec *dec;
- gboolean res;
+ GstBufferPool *pool = NULL;
+ GstStructure *config;
- dec = GST_JPEG_DEC (parent);
+ if (!GST_VIDEO_DECODER_CLASS (parent_class)->decide_allocation (bdec, query))
+ return FALSE;
- switch (GST_EVENT_TYPE (event)) {
- case GST_EVENT_QOS:{
- GstQOSType type;
- GstClockTimeDiff diff;
- GstClockTime timestamp;
- gdouble proportion;
+ if (gst_query_get_n_allocation_pools (query) > 0)
+ gst_query_parse_nth_allocation_pool (query, 0, &pool, NULL, NULL, NULL);
- gst_event_parse_qos (event, &type, &proportion, &diff, ×tamp);
- gst_jpeg_dec_update_qos (dec, proportion, diff, timestamp);
- break;
- }
- default:
- break;
- }
+ if (pool == NULL)
+ return FALSE;
- res = gst_pad_push_event (dec->sinkpad, event);
+ config = gst_buffer_pool_get_config (pool);
+ if (gst_query_find_allocation_meta (query, GST_VIDEO_META_API_TYPE, NULL)) {
+ gst_buffer_pool_config_add_option (config,
+ GST_BUFFER_POOL_OPTION_VIDEO_META);
+ }
+ gst_buffer_pool_set_config (pool, config);
+ gst_object_unref (pool);
- return res;
+ return TRUE;
}
static gboolean
-gst_jpeg_dec_sink_event (GstPad * pad, GstObject * parent, GstEvent * event)
+gst_jpeg_dec_sink_event (GstVideoDecoder * bdec, GstEvent * event)
{
- gboolean ret = TRUE, forward = TRUE;
- GstJpegDec *dec = GST_JPEG_DEC (parent);
+ const GstSegment *segment;
- GST_DEBUG_OBJECT (dec, "event : %s", GST_EVENT_TYPE_NAME (event));
-
- switch (GST_EVENT_TYPE (event)) {
- case GST_EVENT_FLUSH_STOP:
- GST_DEBUG_OBJECT (dec, "Aborting decompress");
- jpeg_abort_decompress (&dec->cinfo);
- gst_segment_init (&dec->segment, GST_FORMAT_UNDEFINED);
- gst_adapter_clear (dec->adapter);
- g_free (dec->cur_buf);
- dec->cur_buf = NULL;
- dec->parse_offset = 0;
- dec->parse_entropy_len = 0;
- dec->parse_resync = FALSE;
- gst_jpeg_dec_reset_qos (dec);
- break;
- case GST_EVENT_SEGMENT:
- gst_event_copy_segment (event, &dec->segment);
- GST_DEBUG_OBJECT (dec, "Got NEWSEGMENT %" GST_SEGMENT_FORMAT,
- &dec->segment);
- break;
- case GST_EVENT_CAPS:
- {
- GstCaps *caps;
+ if (GST_EVENT_TYPE (event) != GST_EVENT_SEGMENT)
+ goto done;
- gst_event_parse_caps (event, &caps);
- ret = gst_jpeg_dec_setcaps (dec, caps);
- forward = FALSE;
- break;
- }
- default:
- break;
- }
+ gst_event_parse_segment (event, &segment);
- if (forward)
- ret = gst_pad_push_event (dec->srcpad, event);
+ if (segment->format == GST_FORMAT_TIME)
+ gst_video_decoder_set_packetized (bdec, TRUE);
else
- gst_event_unref (event);
+ gst_video_decoder_set_packetized (bdec, FALSE);
- return ret;
+done:
+ return GST_VIDEO_DECODER_CLASS (parent_class)->sink_event (bdec, event);
}
static gboolean
-gst_jpeg_dec_sink_query (GstPad * pad, GstObject * parent, GstQuery * query)
+gst_jpeg_dec_start (GstVideoDecoder * bdec)
{
- gboolean res = FALSE;
-
- switch (GST_QUERY_TYPE (query)) {
- case GST_QUERY_CAPS:
- {
- GstCaps *filter, *caps;
-
- gst_query_parse_caps (query, &filter);
- caps = gst_jpeg_dec_getcaps (pad, filter);
- gst_query_set_caps_result (query, caps);
- gst_caps_unref (caps);
- res = TRUE;
- break;
- }
- default:
- res = gst_pad_query_default (pad, parent, query);
- break;
- }
- return res;
+ GstJpegDec *dec = (GstJpegDec *) bdec;
+
+ dec->saw_header = FALSE;
+ dec->parse_entropy_len = 0;
+ dec->parse_resync = FALSE;
+
+ gst_video_decoder_set_packetized (bdec, FALSE);
+
+ return TRUE;
+}
+
+static gboolean
+gst_jpeg_dec_flush (GstVideoDecoder * bdec)
+{
+ GstJpegDec *dec = (GstJpegDec *) bdec;
+
+ jpeg_abort_decompress (&dec->cinfo);
+ dec->parse_entropy_len = 0;
+ dec->parse_resync = FALSE;
+ dec->saw_header = FALSE;
+
+ return TRUE;
}
static void
case PROP_IDCT_METHOD:
dec->idct_method = g_value_get_enum (value);
break;
+#ifndef GST_REMOVE_DEPRECATED
case PROP_MAX_ERRORS:
g_atomic_int_set (&dec->max_errors, g_value_get_int (value));
break;
-
+#endif
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
case PROP_IDCT_METHOD:
g_value_set_enum (value, dec->idct_method);
break;
+#ifndef GST_REMOVE_DEPRECATED
case PROP_MAX_ERRORS:
g_value_set_int (value, g_atomic_int_get (&dec->max_errors));
break;
-
+#endif
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
}
}
-static GstStateChangeReturn
-gst_jpeg_dec_change_state (GstElement * element, GstStateChange transition)
+static gboolean
+gst_jpeg_dec_stop (GstVideoDecoder * bdec)
{
- GstStateChangeReturn ret;
- GstJpegDec *dec;
+ GstJpegDec *dec = (GstJpegDec *) bdec;
- dec = GST_JPEG_DEC (element);
-
- switch (transition) {
- case GST_STATE_CHANGE_READY_TO_PAUSED:
- dec->error_count = 0;
- dec->good_count = 0;
- dec->in_fps_n = 0;
- dec->in_fps_d = 1;
- gst_video_info_init (&dec->info);
- dec->clrspc = -1;
- dec->packetized = FALSE;
- dec->next_ts = 0;
- dec->discont = TRUE;
- dec->parse_offset = 0;
- dec->parse_entropy_len = 0;
- dec->parse_resync = FALSE;
- dec->cur_buf = NULL;
- gst_segment_init (&dec->segment, GST_FORMAT_UNDEFINED);
- gst_jpeg_dec_reset_qos (dec);
- default:
- break;
- }
+ gst_jpeg_dec_free_buffers (dec);
- ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
- if (ret != GST_STATE_CHANGE_SUCCESS)
- return ret;
-
- switch (transition) {
- case GST_STATE_CHANGE_PAUSED_TO_READY:
- gst_adapter_clear (dec->adapter);
- g_free (dec->cur_buf);
- dec->cur_buf = NULL;
- gst_jpeg_dec_free_buffers (dec);
- if (dec->pool) {
- gst_buffer_pool_set_active (dec->pool, FALSE);
- gst_object_unref (dec->pool);
- }
- dec->pool = NULL;
- break;
- default:
- break;
- }
-
- return ret;
+ return TRUE;
}