2 * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
3 * Copyright (C) <2009> Tim-Philipp Müller <tim centricular net>
4 * Copyright (C) 2012 Collabora Ltd.
5 * Author : Edward Hervey <edward@collabora.com>
7 * This library is free software; you can redistribute it and/or
8 * modify it under the terms of the GNU Library General Public
9 * License as published by the Free Software Foundation; either
10 * version 2 of the License, or (at your option) any later version.
12 * This library is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 * Library General Public License for more details.
17 * You should have received a copy of the GNU Library General Public
18 * License along with this library; if not, write to the
19 * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
20 * Boston, MA 02111-1307, USA.
24 * SECTION:element-jpegdec
26 * Decodes jpeg images.
29 * <title>Example launch line</title>
31 * gst-launch -v v4l2src ! jpegdec ! ffmpegcolorspace ! xvimagesink
32 * ]| The above pipeline reads a motion JPEG stream from a v4l2 camera
33 * and renders it to the screen.
42 #include "gstjpegdec.h"
44 #include <gst/video/video.h>
45 #include <gst/video/gstvideometa.h>
46 #include <gst/video/gstvideopool.h>
47 #include "gst/gst-i18n-plugin.h"
51 #define MAX_WIDTH 65535
53 #define MAX_HEIGHT 65535
55 #define CINFO_GET_JPEGDEC(cinfo_ptr) \
56 (((struct GstJpegDecSourceMgr*)((cinfo_ptr)->src))->dec)
58 #define JPEG_DEFAULT_IDCT_METHOD JDCT_FASTEST
59 #define JPEG_DEFAULT_MAX_ERRORS 0
69 static GstStaticPadTemplate gst_jpeg_dec_src_pad_template =
70 GST_STATIC_PAD_TEMPLATE ("src",
73 GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE
74 ("{ I420, RGB, BGR, RGBx, xRGB, BGRx, xBGR, GRAY8 }"))
78 /* FIXME: sof-marker is for IJG libjpeg 8, should be different for 6.2 */
79 static GstStaticPadTemplate gst_jpeg_dec_sink_pad_template =
80 GST_STATIC_PAD_TEMPLATE ("sink",
83 GST_STATIC_CAPS ("image/jpeg, "
84 "width = (int) [ " G_STRINGIFY (MIN_WIDTH) ", " G_STRINGIFY (MAX_WIDTH)
85 " ], " "height = (int) [ " G_STRINGIFY (MIN_HEIGHT) ", "
86 G_STRINGIFY (MAX_HEIGHT) " ], framerate = (fraction) [ 0/1, MAX ], "
87 "sof-marker = (int) { 0, 1, 2, 5, 6, 7, 9, 10, 13, 14 }")
90 GST_DEBUG_CATEGORY_STATIC (jpeg_dec_debug);
91 #define GST_CAT_DEFAULT jpeg_dec_debug
92 GST_DEBUG_CATEGORY_STATIC (GST_CAT_PERFORMANCE);
94 static void gst_jpeg_dec_set_property (GObject * object, guint prop_id,
95 const GValue * value, GParamSpec * pspec);
96 static void gst_jpeg_dec_get_property (GObject * object, guint prop_id,
97 GValue * value, GParamSpec * pspec);
99 static gboolean gst_jpeg_dec_set_format (GstVideoDecoder * dec,
100 GstVideoCodecState * state);
101 static gboolean gst_jpeg_dec_start (GstVideoDecoder * bdec);
102 static gboolean gst_jpeg_dec_stop (GstVideoDecoder * bdec);
103 static gboolean gst_jpeg_dec_reset (GstVideoDecoder * bdec, gboolean hard);
104 static GstFlowReturn gst_jpeg_dec_parse (GstVideoDecoder * bdec,
105 GstVideoCodecFrame * frame, GstAdapter * adapter, gboolean at_eos);
106 static GstFlowReturn gst_jpeg_dec_handle_frame (GstVideoDecoder * bdec,
107 GstVideoCodecFrame * frame);
108 static gboolean gst_jpeg_dec_decide_allocation (GstVideoDecoder * bdec,
111 #define gst_jpeg_dec_parent_class parent_class
112 G_DEFINE_TYPE (GstJpegDec, gst_jpeg_dec, GST_TYPE_VIDEO_DECODER);
115 gst_jpeg_dec_finalize (GObject * object)
117 GstJpegDec *dec = GST_JPEG_DEC (object);
119 jpeg_destroy_decompress (&dec->cinfo);
121 G_OBJECT_CLASS (parent_class)->finalize (object);
125 gst_jpeg_dec_class_init (GstJpegDecClass * klass)
127 GObjectClass *gobject_class;
128 GstElementClass *element_class;
129 GstVideoDecoderClass *vdec_class;
131 gobject_class = (GObjectClass *) klass;
132 element_class = (GstElementClass *) klass;
133 vdec_class = (GstVideoDecoderClass *) klass;
135 parent_class = g_type_class_peek_parent (klass);
137 gobject_class->finalize = gst_jpeg_dec_finalize;
138 gobject_class->set_property = gst_jpeg_dec_set_property;
139 gobject_class->get_property = gst_jpeg_dec_get_property;
141 g_object_class_install_property (gobject_class, PROP_IDCT_METHOD,
142 g_param_spec_enum ("idct-method", "IDCT Method",
143 "The IDCT algorithm to use", GST_TYPE_IDCT_METHOD,
144 JPEG_DEFAULT_IDCT_METHOD,
145 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
148 * GstJpegDec:max-errors
150 * Error out after receiving N consecutive decoding errors
151 * (-1 = never error out, 0 = automatic, 1 = fail on first error, etc.)
155 g_object_class_install_property (gobject_class, PROP_MAX_ERRORS,
156 g_param_spec_int ("max-errors", "Maximum Consecutive Decoding Errors",
157 "Error out after receiving N consecutive decoding errors "
158 "(-1 = never fail, 0 = automatic, 1 = fail on first error)",
159 -1, G_MAXINT, JPEG_DEFAULT_MAX_ERRORS,
160 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
162 gst_element_class_add_pad_template (element_class,
163 gst_static_pad_template_get (&gst_jpeg_dec_src_pad_template));
164 gst_element_class_add_pad_template (element_class,
165 gst_static_pad_template_get (&gst_jpeg_dec_sink_pad_template));
166 gst_element_class_set_details_simple (element_class, "JPEG image decoder",
167 "Codec/Decoder/Image",
168 "Decode images from JPEG format", "Wim Taymans <wim@fluendo.com>");
170 vdec_class->start = gst_jpeg_dec_start;
171 vdec_class->stop = gst_jpeg_dec_stop;
172 vdec_class->reset = gst_jpeg_dec_reset;
173 vdec_class->parse = gst_jpeg_dec_parse;
174 vdec_class->set_format = gst_jpeg_dec_set_format;
175 vdec_class->handle_frame = gst_jpeg_dec_handle_frame;
176 vdec_class->decide_allocation = gst_jpeg_dec_decide_allocation;
178 GST_DEBUG_CATEGORY_INIT (jpeg_dec_debug, "jpegdec", 0, "JPEG decoder");
179 GST_DEBUG_CATEGORY_GET (GST_CAT_PERFORMANCE, "GST_PERFORMANCE");
183 gst_jpeg_dec_clear_error (GstJpegDec * dec)
185 g_free (dec->error_msg);
186 dec->error_msg = NULL;
188 dec->error_func = NULL;
192 gst_jpeg_dec_set_error_va (GstJpegDec * dec, const gchar * func, gint line,
193 const gchar * debug_msg_format, va_list args)
195 #ifndef GST_DISABLE_GST_DEBUG
196 gst_debug_log_valist (GST_CAT_DEFAULT, GST_LEVEL_WARNING, __FILE__, func,
197 line, (GObject *) dec, debug_msg_format, args);
200 g_free (dec->error_msg);
201 if (debug_msg_format)
202 dec->error_msg = g_strdup_vprintf (debug_msg_format, args);
204 dec->error_msg = NULL;
206 dec->error_line = line;
207 dec->error_func = func;
211 gst_jpeg_dec_set_error (GstJpegDec * dec, const gchar * func, gint line,
212 const gchar * debug_msg_format, ...)
216 va_start (va, debug_msg_format);
217 gst_jpeg_dec_set_error_va (dec, func, line, debug_msg_format, va);
222 gst_jpeg_dec_post_error_or_warning (GstJpegDec * dec)
228 max_errors = g_atomic_int_get (&dec->max_errors);
230 if (max_errors < 0) {
232 } else if (max_errors == 0) {
233 /* FIXME: do something more clever in "automatic mode" */
234 if (gst_video_decoder_get_packetized (GST_VIDEO_DECODER (dec))) {
235 ret = (dec->error_count < 3) ? GST_FLOW_OK : GST_FLOW_ERROR;
237 ret = GST_FLOW_ERROR;
240 ret = (dec->error_count < max_errors) ? GST_FLOW_OK : GST_FLOW_ERROR;
243 GST_INFO_OBJECT (dec, "decoding error %d/%d (%s)", dec->error_count,
244 max_errors, (ret == GST_FLOW_OK) ? "ignoring error" : "erroring out");
246 gst_element_message_full (GST_ELEMENT (dec),
247 (ret == GST_FLOW_OK) ? GST_MESSAGE_WARNING : GST_MESSAGE_ERROR,
248 GST_STREAM_ERROR, GST_STREAM_ERROR_DECODE,
249 g_strdup (_("Failed to decode JPEG image")), dec->error_msg,
250 __FILE__, dec->error_func, dec->error_line);
252 dec->error_msg = NULL;
253 gst_jpeg_dec_clear_error (dec);
258 gst_jpeg_dec_fill_input_buffer (j_decompress_ptr cinfo)
262 dec = CINFO_GET_JPEGDEC (cinfo);
263 g_return_val_if_fail (dec != NULL, FALSE);
264 g_return_val_if_fail (dec->current_frame != NULL, FALSE);
265 g_return_val_if_fail (dec->current_frame_map.data != NULL, FALSE);
267 cinfo->src->next_input_byte = dec->current_frame_map.data;
268 cinfo->src->bytes_in_buffer = dec->current_frame_map.size;
274 gst_jpeg_dec_init_source (j_decompress_ptr cinfo)
276 GST_LOG_OBJECT (CINFO_GET_JPEGDEC (cinfo), "init_source");
281 gst_jpeg_dec_skip_input_data (j_decompress_ptr cinfo, glong num_bytes)
283 GstJpegDec *dec = CINFO_GET_JPEGDEC (cinfo);
285 GST_DEBUG_OBJECT (dec, "skip %ld bytes", num_bytes);
287 if (num_bytes > 0 && cinfo->src->bytes_in_buffer >= num_bytes) {
288 cinfo->src->next_input_byte += (size_t) num_bytes;
289 cinfo->src->bytes_in_buffer -= (size_t) num_bytes;
292 else if (num_bytes > 0) {
295 num_bytes -= cinfo->src->bytes_in_buffer;
296 cinfo->src->next_input_byte += (size_t) cinfo->src->bytes_in_buffer;
297 cinfo->src->bytes_in_buffer = 0;
299 available = gst_adapter_available (dec->adapter);
300 if (available < num_bytes || available < dec->rem_img_len) {
301 GST_WARNING_OBJECT (dec, "Less bytes to skip than available in the "
302 "adapter or the remaining image length %ld < %d or %u",
303 num_bytes, available, dec->rem_img_len);
305 num_bytes = MIN (MIN (num_bytes, available), dec->rem_img_len);
306 gst_adapter_flush (dec->adapter, num_bytes);
307 dec->rem_img_len -= num_bytes;
313 gst_jpeg_dec_resync_to_restart (j_decompress_ptr cinfo, gint desired)
315 GST_LOG_OBJECT (CINFO_GET_JPEGDEC (cinfo), "resync_to_start");
320 gst_jpeg_dec_term_source (j_decompress_ptr cinfo)
322 GST_LOG_OBJECT (CINFO_GET_JPEGDEC (cinfo), "term_source");
327 gst_jpeg_dec_my_output_message (j_common_ptr cinfo)
329 return; /* do nothing */
333 gst_jpeg_dec_my_emit_message (j_common_ptr cinfo, int msg_level)
335 /* GST_LOG_OBJECT (CINFO_GET_JPEGDEC (&cinfo), "msg_level=%d", msg_level); */
340 gst_jpeg_dec_my_error_exit (j_common_ptr cinfo)
342 struct GstJpegDecErrorMgr *err_mgr = (struct GstJpegDecErrorMgr *) cinfo->err;
344 (*cinfo->err->output_message) (cinfo);
345 longjmp (err_mgr->setjmp_buffer, 1);
349 gst_jpeg_dec_init (GstJpegDec * dec)
351 GST_DEBUG ("initializing");
354 /* create the sink and src pads */
356 gst_pad_new_from_static_template (&gst_jpeg_dec_sink_pad_template,
358 gst_element_add_pad (GST_ELEMENT (dec), dec->sinkpad);
359 gst_pad_set_setcaps_function (dec->sinkpad,
360 GST_DEBUG_FUNCPTR (gst_jpeg_dec_setcaps));
363 gst_pad_new_from_static_template (&gst_jpeg_dec_src_pad_template, "src");
364 gst_pad_use_fixed_caps (dec->srcpad);
365 gst_element_add_pad (GST_ELEMENT (dec), dec->srcpad);
369 memset (&dec->cinfo, 0, sizeof (dec->cinfo));
370 memset (&dec->jerr, 0, sizeof (dec->jerr));
371 dec->cinfo.err = jpeg_std_error (&dec->jerr.pub);
372 dec->jerr.pub.output_message = gst_jpeg_dec_my_output_message;
373 dec->jerr.pub.emit_message = gst_jpeg_dec_my_emit_message;
374 dec->jerr.pub.error_exit = gst_jpeg_dec_my_error_exit;
376 jpeg_create_decompress (&dec->cinfo);
378 dec->cinfo.src = (struct jpeg_source_mgr *) &dec->jsrc;
379 dec->cinfo.src->init_source = gst_jpeg_dec_init_source;
380 dec->cinfo.src->fill_input_buffer = gst_jpeg_dec_fill_input_buffer;
381 dec->cinfo.src->skip_input_data = gst_jpeg_dec_skip_input_data;
382 dec->cinfo.src->resync_to_restart = gst_jpeg_dec_resync_to_restart;
383 dec->cinfo.src->term_source = gst_jpeg_dec_term_source;
386 /* init properties */
387 dec->idct_method = JPEG_DEFAULT_IDCT_METHOD;
388 dec->max_errors = JPEG_DEFAULT_MAX_ERRORS;
393 gst_jpeg_dec_ensure_header (GstJpegDec * dec)
398 av = gst_adapter_available (dec->adapter);
399 /* we expect at least 4 bytes, first of which start marker */
400 offset = gst_adapter_masked_scan_uint32 (dec->adapter, 0xffffff00, 0xffd8ff00,
402 if (G_UNLIKELY (offset < 0)) {
403 GST_DEBUG_OBJECT (dec, "No JPEG header in current buffer");
406 gst_adapter_flush (dec->adapter, av - 4);
411 GST_LOG_OBJECT (dec, "Skipping %u bytes.", offset);
412 gst_adapter_flush (dec->adapter, offset);
414 GST_DEBUG_OBJECT (dec, "Found JPEG header");
420 static inline gboolean
421 gst_jpeg_dec_parse_tag_has_entropy_segment (guint8 tag)
423 if (tag == 0xda || (tag >= 0xd0 && tag <= 0xd7))
429 gst_jpeg_dec_parse (GstVideoDecoder * bdec, GstVideoCodecFrame * frame,
430 GstAdapter * adapter, gboolean at_eos)
435 gint offset = 0, noffset;
436 GstJpegDec *dec = (GstJpegDec *) bdec;
438 /* FIXME : The overhead of using scan_uint32 is massive */
440 size = gst_adapter_available (adapter);
441 GST_DEBUG ("Parsing jpeg image data (%u bytes)", size);
444 GST_DEBUG ("Flushing all data out");
447 /* If we have leftover data, throw it away */
448 if (!dec->saw_header)
450 goto have_full_frame;
456 if (!dec->saw_header) {
458 /* we expect at least 4 bytes, first of which start marker */
460 gst_adapter_masked_scan_uint32 (adapter, 0xffff0000, 0xffd80000, 0,
463 GST_DEBUG ("ret:%d", ret);
468 gst_adapter_flush (adapter, ret);
471 dec->saw_header = TRUE;
478 GST_DEBUG ("offset:%d, size:%d", offset, size);
481 gst_adapter_masked_scan_uint32_peek (adapter, 0x0000ff00, 0x0000ff00,
482 offset, size - offset, &value);
484 /* lost sync if 0xff marker not where expected */
485 if ((resync = (noffset != offset))) {
486 GST_DEBUG ("Lost sync at 0x%08x, resyncing", offset + 2);
488 /* may have marker, but could have been resyncng */
489 resync = resync || dec->parse_resync;
490 /* Skip over extra 0xff */
491 while ((noffset >= 0) && ((value & 0xff) == 0xff)) {
494 gst_adapter_masked_scan_uint32_peek (adapter, 0x0000ff00, 0x0000ff00,
495 noffset, size - noffset, &value);
497 /* enough bytes left for marker? (we need 0xNN after the 0xff) */
499 GST_DEBUG ("at end of input and no EOI marker found, need more data");
503 /* now lock on the marker we found */
505 value = value & 0xff;
507 GST_DEBUG ("0x%08x: EOI marker", offset + 2);
508 /* clear parse state */
509 dec->saw_header = FALSE;
510 dec->parse_resync = FALSE;
512 goto have_full_frame;
515 /* Skip this frame if we found another SOI marker */
516 GST_DEBUG ("0x%08x: SOI marker before EOI, skipping", offset + 2);
517 dec->parse_resync = FALSE;
518 /* FIXME : Need to skip data */
520 goto have_full_frame;
524 if (value >= 0xd0 && value <= 0xd7)
527 /* peek tag and subsequent length */
528 if (offset + 2 + 4 > size)
531 gst_adapter_masked_scan_uint32_peek (adapter, 0x0, 0x0, offset + 2, 4,
533 frame_len = frame_len & 0xffff;
535 GST_DEBUG ("0x%08x: tag %02x, frame_len=%u", offset + 2, value, frame_len);
536 /* the frame length includes the 2 bytes for the length; here we want at
537 * least 2 more bytes at the end for an end marker */
538 if (offset + 2 + 2 + frame_len + 2 > size) {
542 if (gst_jpeg_dec_parse_tag_has_entropy_segment (value)) {
543 guint eseglen = dec->parse_entropy_len;
545 GST_DEBUG ("0x%08x: finding entropy segment length (eseglen:%d)",
546 offset + 2, eseglen);
547 if (size < offset + 2 + frame_len + eseglen)
549 noffset = offset + 2 + frame_len + dec->parse_entropy_len;
551 GST_DEBUG ("noffset:%d, size:%d, size - noffset:%d",
552 noffset, size, size - noffset);
553 noffset = gst_adapter_masked_scan_uint32_peek (adapter, 0x0000ff00,
554 0x0000ff00, noffset, size - noffset, &value);
557 dec->parse_entropy_len = size - offset - 4 - frame_len - 2;
560 if ((value & 0xff) != 0x00) {
561 eseglen = noffset - offset - frame_len - 2;
566 dec->parse_entropy_len = 0;
567 frame_len += eseglen;
568 GST_DEBUG ("entropy segment length=%u => frame_len=%u", eseglen,
572 /* check if we will still be in sync if we interpret
573 * this as a sync point and skip this frame */
574 noffset = offset + frame_len + 2;
575 noffset = gst_adapter_masked_scan_uint32 (adapter, 0x0000ff00, 0x0000ff00,
578 /* ignore and continue resyncing until we hit the end
579 * of our data or find a sync point that looks okay */
583 GST_DEBUG ("found sync at 0x%x", offset + 2);
586 /* Add current data to output buffer */
587 toadd += frame_len + 2;
588 offset += frame_len + 2;
593 gst_video_decoder_add_to_frame (bdec, toadd);
594 return GST_VIDEO_DECODER_FLOW_NEED_DATA;
598 gst_video_decoder_add_to_frame (bdec, toadd);
599 return gst_video_decoder_have_frame (bdec);
602 return GST_VIDEO_DECODER_FLOW_DROPPED;
606 /* shamelessly ripped from jpegutils.c in mjpegtools */
608 add_huff_table (j_decompress_ptr dinfo,
609 JHUFF_TBL ** htblptr, const UINT8 * bits, const UINT8 * val)
610 /* Define a Huffman table */
614 if (*htblptr == NULL)
615 *htblptr = jpeg_alloc_huff_table ((j_common_ptr) dinfo);
619 /* Copy the number-of-symbols-of-each-code-length counts */
620 memcpy ((*htblptr)->bits, bits, sizeof ((*htblptr)->bits));
622 /* Validate the counts. We do this here mainly so we can copy the right
623 * number of symbols from the val[] array, without risking marching off
624 * the end of memory. jchuff.c will do a more thorough test later.
627 for (len = 1; len <= 16; len++)
628 nsymbols += bits[len];
629 if (nsymbols < 1 || nsymbols > 256)
630 g_error ("jpegutils.c: add_huff_table failed badly. ");
632 memcpy ((*htblptr)->huffval, val, nsymbols * sizeof (UINT8));
638 std_huff_tables (j_decompress_ptr dinfo)
639 /* Set up the standard Huffman tables (cf. JPEG standard section K.3) */
640 /* IMPORTANT: these are only valid for 8-bit data precision! */
642 static const UINT8 bits_dc_luminance[17] =
643 { /* 0-base */ 0, 0, 1, 5, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0 };
644 static const UINT8 val_dc_luminance[] =
645 { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11 };
647 static const UINT8 bits_dc_chrominance[17] =
648 { /* 0-base */ 0, 0, 3, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0 };
649 static const UINT8 val_dc_chrominance[] =
650 { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11 };
652 static const UINT8 bits_ac_luminance[17] =
653 { /* 0-base */ 0, 0, 2, 1, 3, 3, 2, 4, 3, 5, 5, 4, 4, 0, 0, 1, 0x7d };
654 static const UINT8 val_ac_luminance[] =
655 { 0x01, 0x02, 0x03, 0x00, 0x04, 0x11, 0x05, 0x12,
656 0x21, 0x31, 0x41, 0x06, 0x13, 0x51, 0x61, 0x07,
657 0x22, 0x71, 0x14, 0x32, 0x81, 0x91, 0xa1, 0x08,
658 0x23, 0x42, 0xb1, 0xc1, 0x15, 0x52, 0xd1, 0xf0,
659 0x24, 0x33, 0x62, 0x72, 0x82, 0x09, 0x0a, 0x16,
660 0x17, 0x18, 0x19, 0x1a, 0x25, 0x26, 0x27, 0x28,
661 0x29, 0x2a, 0x34, 0x35, 0x36, 0x37, 0x38, 0x39,
662 0x3a, 0x43, 0x44, 0x45, 0x46, 0x47, 0x48, 0x49,
663 0x4a, 0x53, 0x54, 0x55, 0x56, 0x57, 0x58, 0x59,
664 0x5a, 0x63, 0x64, 0x65, 0x66, 0x67, 0x68, 0x69,
665 0x6a, 0x73, 0x74, 0x75, 0x76, 0x77, 0x78, 0x79,
666 0x7a, 0x83, 0x84, 0x85, 0x86, 0x87, 0x88, 0x89,
667 0x8a, 0x92, 0x93, 0x94, 0x95, 0x96, 0x97, 0x98,
668 0x99, 0x9a, 0xa2, 0xa3, 0xa4, 0xa5, 0xa6, 0xa7,
669 0xa8, 0xa9, 0xaa, 0xb2, 0xb3, 0xb4, 0xb5, 0xb6,
670 0xb7, 0xb8, 0xb9, 0xba, 0xc2, 0xc3, 0xc4, 0xc5,
671 0xc6, 0xc7, 0xc8, 0xc9, 0xca, 0xd2, 0xd3, 0xd4,
672 0xd5, 0xd6, 0xd7, 0xd8, 0xd9, 0xda, 0xe1, 0xe2,
673 0xe3, 0xe4, 0xe5, 0xe6, 0xe7, 0xe8, 0xe9, 0xea,
674 0xf1, 0xf2, 0xf3, 0xf4, 0xf5, 0xf6, 0xf7, 0xf8,
678 static const UINT8 bits_ac_chrominance[17] =
679 { /* 0-base */ 0, 0, 2, 1, 2, 4, 4, 3, 4, 7, 5, 4, 4, 0, 1, 2, 0x77 };
680 static const UINT8 val_ac_chrominance[] =
681 { 0x00, 0x01, 0x02, 0x03, 0x11, 0x04, 0x05, 0x21,
682 0x31, 0x06, 0x12, 0x41, 0x51, 0x07, 0x61, 0x71,
683 0x13, 0x22, 0x32, 0x81, 0x08, 0x14, 0x42, 0x91,
684 0xa1, 0xb1, 0xc1, 0x09, 0x23, 0x33, 0x52, 0xf0,
685 0x15, 0x62, 0x72, 0xd1, 0x0a, 0x16, 0x24, 0x34,
686 0xe1, 0x25, 0xf1, 0x17, 0x18, 0x19, 0x1a, 0x26,
687 0x27, 0x28, 0x29, 0x2a, 0x35, 0x36, 0x37, 0x38,
688 0x39, 0x3a, 0x43, 0x44, 0x45, 0x46, 0x47, 0x48,
689 0x49, 0x4a, 0x53, 0x54, 0x55, 0x56, 0x57, 0x58,
690 0x59, 0x5a, 0x63, 0x64, 0x65, 0x66, 0x67, 0x68,
691 0x69, 0x6a, 0x73, 0x74, 0x75, 0x76, 0x77, 0x78,
692 0x79, 0x7a, 0x82, 0x83, 0x84, 0x85, 0x86, 0x87,
693 0x88, 0x89, 0x8a, 0x92, 0x93, 0x94, 0x95, 0x96,
694 0x97, 0x98, 0x99, 0x9a, 0xa2, 0xa3, 0xa4, 0xa5,
695 0xa6, 0xa7, 0xa8, 0xa9, 0xaa, 0xb2, 0xb3, 0xb4,
696 0xb5, 0xb6, 0xb7, 0xb8, 0xb9, 0xba, 0xc2, 0xc3,
697 0xc4, 0xc5, 0xc6, 0xc7, 0xc8, 0xc9, 0xca, 0xd2,
698 0xd3, 0xd4, 0xd5, 0xd6, 0xd7, 0xd8, 0xd9, 0xda,
699 0xe2, 0xe3, 0xe4, 0xe5, 0xe6, 0xe7, 0xe8, 0xe9,
700 0xea, 0xf2, 0xf3, 0xf4, 0xf5, 0xf6, 0xf7, 0xf8,
704 add_huff_table (dinfo, &dinfo->dc_huff_tbl_ptrs[0],
705 bits_dc_luminance, val_dc_luminance);
706 add_huff_table (dinfo, &dinfo->ac_huff_tbl_ptrs[0],
707 bits_ac_luminance, val_ac_luminance);
708 add_huff_table (dinfo, &dinfo->dc_huff_tbl_ptrs[1],
709 bits_dc_chrominance, val_dc_chrominance);
710 add_huff_table (dinfo, &dinfo->ac_huff_tbl_ptrs[1],
711 bits_ac_chrominance, val_ac_chrominance);
717 guarantee_huff_tables (j_decompress_ptr dinfo)
719 if ((dinfo->dc_huff_tbl_ptrs[0] == NULL) &&
720 (dinfo->dc_huff_tbl_ptrs[1] == NULL) &&
721 (dinfo->ac_huff_tbl_ptrs[0] == NULL) &&
722 (dinfo->ac_huff_tbl_ptrs[1] == NULL)) {
723 GST_DEBUG ("Generating standard Huffman tables for this frame.");
724 std_huff_tables (dinfo);
729 gst_jpeg_dec_set_format (GstVideoDecoder * dec, GstVideoCodecState * state)
731 GstJpegDec *jpeg = GST_JPEG_DEC (dec);
732 GstVideoInfo *info = &state->info;
734 /* FIXME : previously jpegdec would handled input as packetized
735 * if the framerate was present. Here we consider it packetized if
736 * the fps is != 1/1 */
737 if (GST_VIDEO_INFO_FPS_N (info) != 1 && GST_VIDEO_INFO_FPS_D (info) != 1)
738 gst_video_decoder_set_packetized (dec, TRUE);
740 gst_video_decoder_set_packetized (dec, FALSE);
742 if (jpeg->input_state)
743 gst_video_codec_state_unref (jpeg->input_state);
744 jpeg->input_state = gst_video_codec_state_ref (state);
752 hresamplecpy1 (guint8 * dest, const guint8 * src, guint len)
756 for (i = 0; i < len; ++i) {
757 /* equivalent to: dest[i] = src[i << 1] */
766 gst_jpeg_dec_free_buffers (GstJpegDec * dec)
770 for (i = 0; i < 16; i++) {
771 g_free (dec->idr_y[i]);
772 g_free (dec->idr_u[i]);
773 g_free (dec->idr_v[i]);
774 dec->idr_y[i] = NULL;
775 dec->idr_u[i] = NULL;
776 dec->idr_v[i] = NULL;
779 dec->idr_width_allocated = 0;
782 static inline gboolean
783 gst_jpeg_dec_ensure_buffers (GstJpegDec * dec, guint maxrowbytes)
787 if (G_LIKELY (dec->idr_width_allocated == maxrowbytes))
790 /* FIXME: maybe just alloc one or three blocks altogether? */
791 for (i = 0; i < 16; i++) {
792 dec->idr_y[i] = g_try_realloc (dec->idr_y[i], maxrowbytes);
793 dec->idr_u[i] = g_try_realloc (dec->idr_u[i], maxrowbytes);
794 dec->idr_v[i] = g_try_realloc (dec->idr_v[i], maxrowbytes);
796 if (G_UNLIKELY (!dec->idr_y[i] || !dec->idr_u[i] || !dec->idr_v[i])) {
797 GST_WARNING_OBJECT (dec, "out of memory, i=%d, bytes=%u", i, maxrowbytes);
802 dec->idr_width_allocated = maxrowbytes;
803 GST_LOG_OBJECT (dec, "allocated temp memory, %u bytes/row", maxrowbytes);
808 gst_jpeg_dec_decode_grayscale (GstJpegDec * dec, GstVideoFrame * frame)
811 guchar **scanarray[1] = { rows };
816 gint pstride, rstride;
818 GST_DEBUG_OBJECT (dec, "indirect decoding of grayscale");
820 width = GST_VIDEO_FRAME_WIDTH (frame);
821 height = GST_VIDEO_FRAME_HEIGHT (frame);
823 if (G_UNLIKELY (!gst_jpeg_dec_ensure_buffers (dec, GST_ROUND_UP_32 (width))))
826 base[0] = GST_VIDEO_FRAME_COMP_DATA (frame, 0);
827 pstride = GST_VIDEO_FRAME_COMP_PSTRIDE (frame, 0);
828 rstride = GST_VIDEO_FRAME_COMP_STRIDE (frame, 0);
830 memcpy (rows, dec->idr_y, 16 * sizeof (gpointer));
834 lines = jpeg_read_raw_data (&dec->cinfo, scanarray, DCTSIZE);
835 if (G_LIKELY (lines > 0)) {
836 for (j = 0; (j < DCTSIZE) && (i < height); j++, i++) {
840 for (k = 0; k < width; k++) {
841 base[0][p] = rows[j][k];
847 GST_INFO_OBJECT (dec, "jpeg_read_raw_data() returned 0");
853 gst_jpeg_dec_decode_rgb (GstJpegDec * dec, GstVideoFrame * frame)
855 guchar *r_rows[16], *g_rows[16], *b_rows[16];
856 guchar **scanarray[3] = { r_rows, g_rows, b_rows };
860 guint pstride, rstride;
863 GST_DEBUG_OBJECT (dec, "indirect decoding of RGB");
865 width = GST_VIDEO_FRAME_WIDTH (frame);
866 height = GST_VIDEO_FRAME_HEIGHT (frame);
868 if (G_UNLIKELY (!gst_jpeg_dec_ensure_buffers (dec, GST_ROUND_UP_32 (width))))
871 for (i = 0; i < 3; i++)
872 base[i] = GST_VIDEO_FRAME_COMP_DATA (frame, i);
874 pstride = GST_VIDEO_FRAME_COMP_PSTRIDE (frame, 0);
875 rstride = GST_VIDEO_FRAME_COMP_STRIDE (frame, 0);
877 memcpy (r_rows, dec->idr_y, 16 * sizeof (gpointer));
878 memcpy (g_rows, dec->idr_u, 16 * sizeof (gpointer));
879 memcpy (b_rows, dec->idr_v, 16 * sizeof (gpointer));
883 lines = jpeg_read_raw_data (&dec->cinfo, scanarray, DCTSIZE);
884 if (G_LIKELY (lines > 0)) {
885 for (j = 0; (j < DCTSIZE) && (i < height); j++, i++) {
889 for (k = 0; k < width; k++) {
890 base[0][p] = r_rows[j][k];
891 base[1][p] = g_rows[j][k];
892 base[2][p] = b_rows[j][k];
900 GST_INFO_OBJECT (dec, "jpeg_read_raw_data() returned 0");
906 gst_jpeg_dec_decode_indirect (GstJpegDec * dec, GstVideoFrame * frame, gint r_v,
909 guchar *y_rows[16], *u_rows[16], *v_rows[16];
910 guchar **scanarray[3] = { y_rows, u_rows, v_rows };
913 guchar *base[3], *last[3];
917 GST_DEBUG_OBJECT (dec,
918 "unadvantageous width or r_h, taking slow route involving memcpy");
920 width = GST_VIDEO_FRAME_WIDTH (frame);
921 height = GST_VIDEO_FRAME_HEIGHT (frame);
923 if (G_UNLIKELY (!gst_jpeg_dec_ensure_buffers (dec, GST_ROUND_UP_32 (width))))
926 for (i = 0; i < 3; i++) {
927 base[i] = GST_VIDEO_FRAME_COMP_DATA (frame, i);
928 stride[i] = GST_VIDEO_FRAME_COMP_STRIDE (frame, i);
929 /* make sure we don't make jpeglib write beyond our buffer,
930 * which might happen if (height % (r_v*DCTSIZE)) != 0 */
931 last[i] = base[i] + (GST_VIDEO_FRAME_COMP_STRIDE (frame, i) *
932 (GST_VIDEO_FRAME_COMP_HEIGHT (frame, i) - 1));
935 memcpy (y_rows, dec->idr_y, 16 * sizeof (gpointer));
936 memcpy (u_rows, dec->idr_u, 16 * sizeof (gpointer));
937 memcpy (v_rows, dec->idr_v, 16 * sizeof (gpointer));
939 /* fill chroma components for grayscale */
941 GST_DEBUG_OBJECT (dec, "grayscale, filling chroma");
942 for (i = 0; i < 16; i++) {
943 memset (u_rows[i], GST_ROUND_UP_32 (width), 0x80);
944 memset (v_rows[i], GST_ROUND_UP_32 (width), 0x80);
948 for (i = 0; i < height; i += r_v * DCTSIZE) {
949 lines = jpeg_read_raw_data (&dec->cinfo, scanarray, r_v * DCTSIZE);
950 if (G_LIKELY (lines > 0)) {
951 for (j = 0, k = 0; j < (r_v * DCTSIZE); j += r_v, k++) {
952 if (G_LIKELY (base[0] <= last[0])) {
953 memcpy (base[0], y_rows[j], stride[0]);
954 base[0] += stride[0];
957 if (G_LIKELY (base[0] <= last[0])) {
958 memcpy (base[0], y_rows[j + 1], stride[0]);
959 base[0] += stride[0];
962 if (G_LIKELY (base[1] <= last[1] && base[2] <= last[2])) {
964 memcpy (base[1], u_rows[k], stride[1]);
965 memcpy (base[2], v_rows[k], stride[2]);
966 } else if (r_h == 1) {
967 hresamplecpy1 (base[1], u_rows[k], stride[1]);
968 hresamplecpy1 (base[2], v_rows[k], stride[2]);
970 /* FIXME: implement (at least we avoid crashing by doing nothing) */
974 if (r_v == 2 || (k & 1) != 0) {
975 base[1] += stride[1];
976 base[2] += stride[2];
980 GST_INFO_OBJECT (dec, "jpeg_read_raw_data() returned 0");
986 gst_jpeg_dec_decode_direct (GstJpegDec * dec, GstVideoFrame * frame)
988 guchar **line[3]; /* the jpeg line buffer */
989 guchar *y[4 * DCTSIZE] = { NULL, }; /* alloc enough for the lines */
990 guchar *u[4 * DCTSIZE] = { NULL, }; /* r_v will be <4 */
991 guchar *v[4 * DCTSIZE] = { NULL, };
993 gint lines, v_samp[3];
994 guchar *base[3], *last[3];
1002 v_samp[0] = dec->cinfo.comp_info[0].v_samp_factor;
1003 v_samp[1] = dec->cinfo.comp_info[1].v_samp_factor;
1004 v_samp[2] = dec->cinfo.comp_info[2].v_samp_factor;
1006 if (G_UNLIKELY (v_samp[0] > 2 || v_samp[1] > 2 || v_samp[2] > 2))
1007 goto format_not_supported;
1009 height = GST_VIDEO_FRAME_HEIGHT (frame);
1011 for (i = 0; i < 3; i++) {
1012 base[i] = GST_VIDEO_FRAME_COMP_DATA (frame, i);
1013 stride[i] = GST_VIDEO_FRAME_COMP_STRIDE (frame, i);
1014 /* make sure we don't make jpeglib write beyond our buffer,
1015 * which might happen if (height % (r_v*DCTSIZE)) != 0 */
1016 last[i] = base[i] + (GST_VIDEO_FRAME_COMP_STRIDE (frame, i) *
1017 (GST_VIDEO_FRAME_COMP_HEIGHT (frame, i) - 1));
1020 /* let jpeglib decode directly into our final buffer */
1021 GST_DEBUG_OBJECT (dec, "decoding directly into output buffer");
1023 for (i = 0; i < height; i += v_samp[0] * DCTSIZE) {
1024 for (j = 0; j < (v_samp[0] * DCTSIZE); ++j) {
1026 line[0][j] = base[0] + (i + j) * stride[0];
1027 if (G_UNLIKELY (line[0][j] > last[0]))
1028 line[0][j] = last[0];
1030 if (v_samp[1] == v_samp[0]) {
1031 line[1][j] = base[1] + ((i + j) / 2) * stride[1];
1032 } else if (j < (v_samp[1] * DCTSIZE)) {
1033 line[1][j] = base[1] + ((i / 2) + j) * stride[1];
1035 if (G_UNLIKELY (line[1][j] > last[1]))
1036 line[1][j] = last[1];
1038 if (v_samp[2] == v_samp[0]) {
1039 line[2][j] = base[2] + ((i + j) / 2) * stride[2];
1040 } else if (j < (v_samp[2] * DCTSIZE)) {
1041 line[2][j] = base[2] + ((i / 2) + j) * stride[2];
1043 if (G_UNLIKELY (line[2][j] > last[2]))
1044 line[2][j] = last[2];
1047 lines = jpeg_read_raw_data (&dec->cinfo, line, v_samp[0] * DCTSIZE);
1048 if (G_UNLIKELY (!lines)) {
1049 GST_INFO_OBJECT (dec, "jpeg_read_raw_data() returned 0");
1054 format_not_supported:
1056 gst_jpeg_dec_set_error (dec, GST_FUNCTION, __LINE__,
1057 "Unsupported subsampling schema: v_samp factors: %u %u %u",
1058 v_samp[0], v_samp[1], v_samp[2]);
1059 return GST_FLOW_ERROR;
1064 gst_jpeg_dec_negotiate (GstJpegDec * dec, gint width, gint height, gint clrspc)
1066 GstVideoCodecState *outstate;
1068 GstVideoFormat format;
1072 format = GST_VIDEO_FORMAT_RGB;
1075 format = GST_VIDEO_FORMAT_GRAY8;
1078 format = GST_VIDEO_FORMAT_I420;
1082 /* Compare to currently configured output state */
1083 outstate = gst_video_decoder_get_output_state (GST_VIDEO_DECODER (dec));
1085 info = &outstate->info;
1087 if (width == GST_VIDEO_INFO_WIDTH (info) &&
1088 height == GST_VIDEO_INFO_HEIGHT (info) &&
1089 format == GST_VIDEO_INFO_FORMAT (info)) {
1090 gst_video_codec_state_unref (outstate);
1093 gst_video_codec_state_unref (outstate);
1097 gst_video_decoder_set_output_state (GST_VIDEO_DECODER (dec), format,
1098 width, height, dec->input_state);
1100 gst_video_codec_state_unref (outstate);
1102 GST_DEBUG_OBJECT (dec, "max_v_samp_factor=%d", dec->cinfo.max_v_samp_factor);
1103 GST_DEBUG_OBJECT (dec, "max_h_samp_factor=%d", dec->cinfo.max_h_samp_factor);
1106 static GstFlowReturn
1107 gst_jpeg_dec_handle_frame (GstVideoDecoder * bdec, GstVideoCodecFrame * frame)
1109 GstFlowReturn ret = GST_FLOW_OK;
1110 GstJpegDec *dec = (GstJpegDec *) bdec;
1111 GstVideoFrame vframe;
1115 gboolean need_unmap = TRUE;
1116 GstVideoCodecState *state = NULL;
1120 if (!gst_jpeg_dec_ensure_header (dec))
1121 goto need_more_data;
1123 /* If we know that each input buffer contains data
1124 * for a whole jpeg image (e.g. MJPEG streams), just
1125 * do some sanity checking instead of parsing all of
1127 if (dec->packetized) {
1128 img_len = gst_adapter_available (dec->adapter);
1130 /* Parse jpeg image to handle jpeg input that
1131 * is not aligned to buffer boundaries */
1132 img_len = gst_jpeg_dec_parse_image_data (dec);
1135 goto need_more_data;
1136 } else if (img_len < 0) {
1137 gst_adapter_flush (dec->adapter, -img_len);
1143 dec->current_frame = frame;
1144 gst_buffer_map (frame->input_buffer, &dec->current_frame_map, GST_MAP_READ);
1145 gst_jpeg_dec_fill_input_buffer (&dec->cinfo);
1147 if (setjmp (dec->jerr.setjmp_buffer)) {
1148 code = dec->jerr.pub.msg_code;
1150 if (code == JERR_INPUT_EOF) {
1151 GST_DEBUG ("jpeg input EOF error, we probably need more data");
1152 goto need_more_data;
1158 hdr_ok = jpeg_read_header (&dec->cinfo, TRUE);
1159 if (G_UNLIKELY (hdr_ok != JPEG_HEADER_OK)) {
1160 GST_WARNING_OBJECT (dec, "reading the header failed, %d", hdr_ok);
1163 GST_LOG_OBJECT (dec, "num_components=%d", dec->cinfo.num_components);
1164 GST_LOG_OBJECT (dec, "jpeg_color_space=%d", dec->cinfo.jpeg_color_space);
1166 if (!dec->cinfo.num_components || !dec->cinfo.comp_info)
1167 goto components_not_supported;
1169 r_h = dec->cinfo.comp_info[0].h_samp_factor;
1170 r_v = dec->cinfo.comp_info[0].v_samp_factor;
1172 GST_LOG_OBJECT (dec, "r_h = %d, r_v = %d", r_h, r_v);
1174 if (dec->cinfo.num_components > 3)
1175 goto components_not_supported;
1177 /* verify color space expectation to avoid going *boom* or bogus output */
1178 if (dec->cinfo.jpeg_color_space != JCS_YCbCr &&
1179 dec->cinfo.jpeg_color_space != JCS_GRAYSCALE &&
1180 dec->cinfo.jpeg_color_space != JCS_RGB)
1181 goto unsupported_colorspace;
1183 #ifndef GST_DISABLE_GST_DEBUG
1187 for (i = 0; i < dec->cinfo.num_components; ++i) {
1188 GST_LOG_OBJECT (dec, "[%d] h_samp_factor=%d, v_samp_factor=%d, cid=%d",
1189 i, dec->cinfo.comp_info[i].h_samp_factor,
1190 dec->cinfo.comp_info[i].v_samp_factor,
1191 dec->cinfo.comp_info[i].component_id);
1196 /* prepare for raw output */
1197 dec->cinfo.do_fancy_upsampling = FALSE;
1198 dec->cinfo.do_block_smoothing = FALSE;
1199 dec->cinfo.out_color_space = dec->cinfo.jpeg_color_space;
1200 dec->cinfo.dct_method = dec->idct_method;
1201 dec->cinfo.raw_data_out = TRUE;
1203 GST_LOG_OBJECT (dec, "starting decompress");
1204 guarantee_huff_tables (&dec->cinfo);
1205 if (!jpeg_start_decompress (&dec->cinfo)) {
1206 GST_WARNING_OBJECT (dec, "failed to start decompression cycle");
1209 /* sanity checks to get safe and reasonable output */
1210 switch (dec->cinfo.jpeg_color_space) {
1212 if (dec->cinfo.num_components != 1)
1213 goto invalid_yuvrgbgrayscale;
1216 if (dec->cinfo.num_components != 3 || dec->cinfo.max_v_samp_factor > 1 ||
1217 dec->cinfo.max_h_samp_factor > 1)
1218 goto invalid_yuvrgbgrayscale;
1221 if (dec->cinfo.num_components != 3 ||
1222 r_v > 2 || r_v < dec->cinfo.comp_info[0].v_samp_factor ||
1223 r_v < dec->cinfo.comp_info[1].v_samp_factor ||
1224 r_h < dec->cinfo.comp_info[0].h_samp_factor ||
1225 r_h < dec->cinfo.comp_info[1].h_samp_factor)
1226 goto invalid_yuvrgbgrayscale;
1229 g_assert_not_reached ();
1233 width = dec->cinfo.output_width;
1234 height = dec->cinfo.output_height;
1236 if (G_UNLIKELY (width < MIN_WIDTH || width > MAX_WIDTH ||
1237 height < MIN_HEIGHT || height > MAX_HEIGHT))
1240 gst_jpeg_dec_negotiate (dec, width, height, dec->cinfo.jpeg_color_space);
1242 state = gst_video_decoder_get_output_state (bdec);
1243 ret = gst_video_decoder_alloc_output_frame (bdec, frame);
1244 if (G_UNLIKELY (ret != GST_FLOW_OK))
1247 if (!gst_video_frame_map (&vframe, &state->info, frame->output_buffer,
1251 GST_LOG_OBJECT (dec, "width %d, height %d", width, height);
1253 if (dec->cinfo.jpeg_color_space == JCS_RGB) {
1254 gst_jpeg_dec_decode_rgb (dec, &vframe);
1255 } else if (dec->cinfo.jpeg_color_space == JCS_GRAYSCALE) {
1256 gst_jpeg_dec_decode_grayscale (dec, &vframe);
1258 GST_LOG_OBJECT (dec, "decompressing (reqired scanline buffer height = %u)",
1259 dec->cinfo.rec_outbuf_height);
1261 /* For some widths jpeglib requires more horizontal padding than I420
1262 * provides. In those cases we need to decode into separate buffers and then
1263 * copy over the data into our final picture buffer, otherwise jpeglib might
1264 * write over the end of a line into the beginning of the next line,
1265 * resulting in blocky artifacts on the left side of the picture. */
1266 if (G_UNLIKELY (width % (dec->cinfo.max_h_samp_factor * DCTSIZE) != 0
1267 || dec->cinfo.comp_info[0].h_samp_factor != 2
1268 || dec->cinfo.comp_info[1].h_samp_factor != 1
1269 || dec->cinfo.comp_info[2].h_samp_factor != 1)) {
1270 GST_CAT_LOG_OBJECT (GST_CAT_PERFORMANCE, dec,
1271 "indirect decoding using extra buffer copy");
1272 gst_jpeg_dec_decode_indirect (dec, &vframe, r_v, r_h,
1273 dec->cinfo.num_components);
1275 ret = gst_jpeg_dec_decode_direct (dec, &vframe);
1277 if (G_UNLIKELY (ret != GST_FLOW_OK))
1278 goto decode_direct_failed;
1282 gst_video_frame_unmap (&vframe);
1284 GST_LOG_OBJECT (dec, "decompressing finished");
1285 jpeg_finish_decompress (&dec->cinfo);
1287 /* reset error count on successful decode */
1288 dec->error_count = 0;
1290 gst_buffer_unmap (frame->input_buffer, &dec->current_frame_map);
1291 ret = gst_video_decoder_finish_frame (bdec, frame);
1298 if (G_UNLIKELY (ret == GST_FLOW_ERROR)) {
1299 jpeg_abort_decompress (&dec->cinfo);
1300 ret = gst_jpeg_dec_post_error_or_warning (dec);
1304 gst_buffer_unmap (frame->input_buffer, &dec->current_frame_map);
1307 gst_video_codec_state_unref (state);
1314 GST_LOG_OBJECT (dec, "we need more data");
1321 gst_jpeg_dec_set_error (dec, GST_FUNCTION, __LINE__,
1322 "Picture is too small or too big (%ux%u)", width, height);
1323 ret = GST_FLOW_ERROR;
1328 gchar err_msg[JMSG_LENGTH_MAX];
1330 dec->jerr.pub.format_message ((j_common_ptr) (&dec->cinfo), err_msg);
1332 gst_jpeg_dec_set_error (dec, GST_FUNCTION, __LINE__,
1333 "Decode error #%u: %s", code, err_msg);
1335 gst_buffer_unmap (frame->input_buffer, &dec->current_frame_map);
1336 gst_video_decoder_drop_frame (bdec, frame);
1339 ret = GST_FLOW_ERROR;
1342 decode_direct_failed:
1344 /* already posted an error message */
1345 jpeg_abort_decompress (&dec->cinfo);
1350 const gchar *reason;
1352 reason = gst_flow_get_name (ret);
1354 GST_DEBUG_OBJECT (dec, "failed to alloc buffer, reason %s", reason);
1355 /* Reset for next time */
1356 jpeg_abort_decompress (&dec->cinfo);
1357 if (ret != GST_FLOW_EOS && ret != GST_FLOW_FLUSHING &&
1358 ret != GST_FLOW_NOT_LINKED) {
1359 gst_jpeg_dec_set_error (dec, GST_FUNCTION, __LINE__,
1360 "Buffer allocation failed, reason: %s", reason);
1364 components_not_supported:
1366 gst_jpeg_dec_set_error (dec, GST_FUNCTION, __LINE__,
1367 "number of components not supported: %d (max 3)",
1368 dec->cinfo.num_components);
1369 ret = GST_FLOW_ERROR;
1372 unsupported_colorspace:
1374 gst_jpeg_dec_set_error (dec, GST_FUNCTION, __LINE__,
1375 "Picture has unknown or unsupported colourspace");
1376 ret = GST_FLOW_ERROR;
1379 invalid_yuvrgbgrayscale:
1381 gst_jpeg_dec_set_error (dec, GST_FUNCTION, __LINE__,
1382 "Picture is corrupt or unhandled YUV/RGB/grayscale layout");
1383 ret = GST_FLOW_ERROR;
1389 gst_jpeg_dec_decide_allocation (GstVideoDecoder * bdec, GstQuery * query)
1391 GstBufferPool *pool;
1392 GstStructure *config;
1394 if (!GST_VIDEO_DECODER_CLASS (parent_class)->decide_allocation (bdec, query))
1397 g_assert (gst_query_get_n_allocation_pools (query) > 0);
1398 gst_query_parse_nth_allocation_pool (query, 0, &pool, NULL, NULL, NULL);
1399 g_assert (pool != NULL);
1401 config = gst_buffer_pool_get_config (pool);
1402 if (gst_query_has_allocation_meta (query, GST_VIDEO_META_API_TYPE)) {
1403 gst_buffer_pool_config_add_option (config,
1404 GST_BUFFER_POOL_OPTION_VIDEO_META);
1406 gst_buffer_pool_set_config (pool, config);
1407 gst_object_unref (pool);
1413 gst_jpeg_dec_reset (GstVideoDecoder * bdec, gboolean hard)
1415 GstJpegDec *dec = (GstJpegDec *) bdec;
1417 jpeg_abort_decompress (&dec->cinfo);
1418 dec->parse_offset = 0;
1419 dec->parse_entropy_len = 0;
1420 dec->parse_resync = FALSE;
1421 dec->saw_header = FALSE;
1427 gst_jpeg_dec_set_property (GObject * object, guint prop_id,
1428 const GValue * value, GParamSpec * pspec)
1432 dec = GST_JPEG_DEC (object);
1435 case PROP_IDCT_METHOD:
1436 dec->idct_method = g_value_get_enum (value);
1438 case PROP_MAX_ERRORS:
1439 g_atomic_int_set (&dec->max_errors, g_value_get_int (value));
1443 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
1449 gst_jpeg_dec_get_property (GObject * object, guint prop_id, GValue * value,
1454 dec = GST_JPEG_DEC (object);
1457 case PROP_IDCT_METHOD:
1458 g_value_set_enum (value, dec->idct_method);
1460 case PROP_MAX_ERRORS:
1461 g_value_set_int (value, g_atomic_int_get (&dec->max_errors));
1465 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
1471 gst_jpeg_dec_start (GstVideoDecoder * bdec)
1473 GstJpegDec *dec = (GstJpegDec *) bdec;
1475 dec->error_count = 0;
1476 dec->parse_entropy_len = 0;
1477 dec->parse_resync = FALSE;
1483 gst_jpeg_dec_stop (GstVideoDecoder * bdec)
1485 GstJpegDec *dec = (GstJpegDec *) bdec;
1487 gst_jpeg_dec_free_buffers (dec);