2 * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
3 * Copyright (C) <2009> Tim-Philipp Müller <tim centricular net>
4 * Copyright (C) 2012 Collabora Ltd.
5 * Author : Edward Hervey <edward@collabora.com>
7 * This library is free software; you can redistribute it and/or
8 * modify it under the terms of the GNU Library General Public
9 * License as published by the Free Software Foundation; either
10 * version 2 of the License, or (at your option) any later version.
12 * This library is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 * Library General Public License for more details.
17 * You should have received a copy of the GNU Library General Public
18 * License along with this library; if not, write to the
19 * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
20 * Boston, MA 02111-1307, USA.
24 * SECTION:element-jpegdec
26 * Decodes jpeg images.
29 * <title>Example launch line</title>
31 * gst-launch -v filesrc location=mjpeg.avi ! avidemux ! queue ! jpegdec ! videoconvert ! videoscale ! autovideosink
32 * ]| The above pipeline decode the mjpeg stream and renders it to the screen.
41 #include "gstjpegdec.h"
43 #include <gst/video/video.h>
44 #include <gst/video/gstvideometa.h>
45 #include <gst/video/gstvideopool.h>
46 #include "gst/gst-i18n-plugin.h"
50 #define MAX_WIDTH 65535
52 #define MAX_HEIGHT 65535
54 #define CINFO_GET_JPEGDEC(cinfo_ptr) \
55 (((struct GstJpegDecSourceMgr*)((cinfo_ptr)->src))->dec)
57 #define JPEG_DEFAULT_IDCT_METHOD JDCT_FASTEST
58 #define JPEG_DEFAULT_MAX_ERRORS 0
68 static GstStaticPadTemplate gst_jpeg_dec_src_pad_template =
69 GST_STATIC_PAD_TEMPLATE ("src",
72 GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE
73 ("{ I420, RGB, BGR, RGBx, xRGB, BGRx, xBGR, GRAY8 }"))
77 /* FIXME: sof-marker is for IJG libjpeg 8, should be different for 6.2 */
78 static GstStaticPadTemplate gst_jpeg_dec_sink_pad_template =
79 GST_STATIC_PAD_TEMPLATE ("sink",
82 GST_STATIC_CAPS ("image/jpeg, "
83 "width = (int) [ " G_STRINGIFY (MIN_WIDTH) ", " G_STRINGIFY (MAX_WIDTH)
84 " ], " "height = (int) [ " G_STRINGIFY (MIN_HEIGHT) ", "
85 G_STRINGIFY (MAX_HEIGHT) " ], "
86 "sof-marker = (int) { 0, 1, 2, 5, 6, 7, 9, 10, 13, 14 }")
89 GST_DEBUG_CATEGORY_STATIC (jpeg_dec_debug);
90 #define GST_CAT_DEFAULT jpeg_dec_debug
91 GST_DEBUG_CATEGORY_STATIC (GST_CAT_PERFORMANCE);
93 static void gst_jpeg_dec_set_property (GObject * object, guint prop_id,
94 const GValue * value, GParamSpec * pspec);
95 static void gst_jpeg_dec_get_property (GObject * object, guint prop_id,
96 GValue * value, GParamSpec * pspec);
98 static gboolean gst_jpeg_dec_set_format (GstVideoDecoder * dec,
99 GstVideoCodecState * state);
100 static gboolean gst_jpeg_dec_start (GstVideoDecoder * bdec);
101 static gboolean gst_jpeg_dec_stop (GstVideoDecoder * bdec);
102 static gboolean gst_jpeg_dec_reset (GstVideoDecoder * bdec, gboolean hard);
103 static GstFlowReturn gst_jpeg_dec_parse (GstVideoDecoder * bdec,
104 GstVideoCodecFrame * frame, GstAdapter * adapter, gboolean at_eos);
105 static GstFlowReturn gst_jpeg_dec_handle_frame (GstVideoDecoder * bdec,
106 GstVideoCodecFrame * frame);
107 static gboolean gst_jpeg_dec_decide_allocation (GstVideoDecoder * bdec,
110 #define gst_jpeg_dec_parent_class parent_class
111 G_DEFINE_TYPE (GstJpegDec, gst_jpeg_dec, GST_TYPE_VIDEO_DECODER);
114 gst_jpeg_dec_finalize (GObject * object)
116 GstJpegDec *dec = GST_JPEG_DEC (object);
118 jpeg_destroy_decompress (&dec->cinfo);
119 if (dec->input_state)
120 gst_video_codec_state_unref (dec->input_state);
122 G_OBJECT_CLASS (parent_class)->finalize (object);
126 gst_jpeg_dec_class_init (GstJpegDecClass * klass)
128 GObjectClass *gobject_class;
129 GstElementClass *element_class;
130 GstVideoDecoderClass *vdec_class;
132 gobject_class = (GObjectClass *) klass;
133 element_class = (GstElementClass *) klass;
134 vdec_class = (GstVideoDecoderClass *) klass;
136 parent_class = g_type_class_peek_parent (klass);
138 gobject_class->finalize = gst_jpeg_dec_finalize;
139 gobject_class->set_property = gst_jpeg_dec_set_property;
140 gobject_class->get_property = gst_jpeg_dec_get_property;
142 g_object_class_install_property (gobject_class, PROP_IDCT_METHOD,
143 g_param_spec_enum ("idct-method", "IDCT Method",
144 "The IDCT algorithm to use", GST_TYPE_IDCT_METHOD,
145 JPEG_DEFAULT_IDCT_METHOD,
146 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
149 * GstJpegDec:max-errors
151 * Error out after receiving N consecutive decoding errors
152 * (-1 = never error out, 0 = automatic, 1 = fail on first error, etc.)
156 g_object_class_install_property (gobject_class, PROP_MAX_ERRORS,
157 g_param_spec_int ("max-errors", "Maximum Consecutive Decoding Errors",
158 "Error out after receiving N consecutive decoding errors "
159 "(-1 = never fail, 0 = automatic, 1 = fail on first error)",
160 -1, G_MAXINT, JPEG_DEFAULT_MAX_ERRORS,
161 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
163 gst_element_class_add_pad_template (element_class,
164 gst_static_pad_template_get (&gst_jpeg_dec_src_pad_template));
165 gst_element_class_add_pad_template (element_class,
166 gst_static_pad_template_get (&gst_jpeg_dec_sink_pad_template));
167 gst_element_class_set_details_simple (element_class, "JPEG image decoder",
168 "Codec/Decoder/Image",
169 "Decode images from JPEG format", "Wim Taymans <wim@fluendo.com>");
171 vdec_class->start = gst_jpeg_dec_start;
172 vdec_class->stop = gst_jpeg_dec_stop;
173 vdec_class->reset = gst_jpeg_dec_reset;
174 vdec_class->parse = gst_jpeg_dec_parse;
175 vdec_class->set_format = gst_jpeg_dec_set_format;
176 vdec_class->handle_frame = gst_jpeg_dec_handle_frame;
177 vdec_class->decide_allocation = gst_jpeg_dec_decide_allocation;
179 GST_DEBUG_CATEGORY_INIT (jpeg_dec_debug, "jpegdec", 0, "JPEG decoder");
180 GST_DEBUG_CATEGORY_GET (GST_CAT_PERFORMANCE, "GST_PERFORMANCE");
184 gst_jpeg_dec_clear_error (GstJpegDec * dec)
186 g_free (dec->error_msg);
187 dec->error_msg = NULL;
189 dec->error_func = NULL;
193 gst_jpeg_dec_set_error_va (GstJpegDec * dec, const gchar * func, gint line,
194 const gchar * debug_msg_format, va_list args)
196 #ifndef GST_DISABLE_GST_DEBUG
197 gst_debug_log_valist (GST_CAT_DEFAULT, GST_LEVEL_WARNING, __FILE__, func,
198 line, (GObject *) dec, debug_msg_format, args);
201 g_free (dec->error_msg);
202 if (debug_msg_format)
203 dec->error_msg = g_strdup_vprintf (debug_msg_format, args);
205 dec->error_msg = NULL;
207 dec->error_line = line;
208 dec->error_func = func;
212 gst_jpeg_dec_set_error (GstJpegDec * dec, const gchar * func, gint line,
213 const gchar * debug_msg_format, ...)
217 va_start (va, debug_msg_format);
218 gst_jpeg_dec_set_error_va (dec, func, line, debug_msg_format, va);
223 gst_jpeg_dec_post_error_or_warning (GstJpegDec * dec)
229 max_errors = g_atomic_int_get (&dec->max_errors);
231 if (max_errors < 0) {
233 } else if (max_errors == 0) {
234 /* FIXME: do something more clever in "automatic mode" */
235 if (gst_video_decoder_get_packetized (GST_VIDEO_DECODER (dec))) {
236 ret = (dec->error_count < 3) ? GST_FLOW_OK : GST_FLOW_ERROR;
238 ret = GST_FLOW_ERROR;
241 ret = (dec->error_count < max_errors) ? GST_FLOW_OK : GST_FLOW_ERROR;
244 GST_INFO_OBJECT (dec, "decoding error %d/%d (%s)", dec->error_count,
245 max_errors, (ret == GST_FLOW_OK) ? "ignoring error" : "erroring out");
247 gst_element_message_full (GST_ELEMENT (dec),
248 (ret == GST_FLOW_OK) ? GST_MESSAGE_WARNING : GST_MESSAGE_ERROR,
249 GST_STREAM_ERROR, GST_STREAM_ERROR_DECODE,
250 g_strdup (_("Failed to decode JPEG image")), dec->error_msg,
251 __FILE__, dec->error_func, dec->error_line);
253 dec->error_msg = NULL;
254 gst_jpeg_dec_clear_error (dec);
259 gst_jpeg_dec_fill_input_buffer (j_decompress_ptr cinfo)
263 dec = CINFO_GET_JPEGDEC (cinfo);
264 g_return_val_if_fail (dec != NULL, FALSE);
265 g_return_val_if_fail (dec->current_frame != NULL, FALSE);
266 g_return_val_if_fail (dec->current_frame_map.data != NULL, FALSE);
268 cinfo->src->next_input_byte = dec->current_frame_map.data;
269 cinfo->src->bytes_in_buffer = dec->current_frame_map.size;
275 gst_jpeg_dec_init_source (j_decompress_ptr cinfo)
277 GST_LOG_OBJECT (CINFO_GET_JPEGDEC (cinfo), "init_source");
282 gst_jpeg_dec_skip_input_data (j_decompress_ptr cinfo, glong num_bytes)
284 GstJpegDec *dec = CINFO_GET_JPEGDEC (cinfo);
286 GST_DEBUG_OBJECT (dec, "skip %ld bytes", num_bytes);
288 if (num_bytes > 0 && cinfo->src->bytes_in_buffer >= num_bytes) {
289 cinfo->src->next_input_byte += (size_t) num_bytes;
290 cinfo->src->bytes_in_buffer -= (size_t) num_bytes;
295 gst_jpeg_dec_resync_to_restart (j_decompress_ptr cinfo, gint desired)
297 GST_LOG_OBJECT (CINFO_GET_JPEGDEC (cinfo), "resync_to_start");
302 gst_jpeg_dec_term_source (j_decompress_ptr cinfo)
304 GST_LOG_OBJECT (CINFO_GET_JPEGDEC (cinfo), "term_source");
309 gst_jpeg_dec_my_output_message (j_common_ptr cinfo)
311 return; /* do nothing */
315 gst_jpeg_dec_my_emit_message (j_common_ptr cinfo, int msg_level)
317 /* GST_LOG_OBJECT (CINFO_GET_JPEGDEC (&cinfo), "msg_level=%d", msg_level); */
322 gst_jpeg_dec_my_error_exit (j_common_ptr cinfo)
324 struct GstJpegDecErrorMgr *err_mgr = (struct GstJpegDecErrorMgr *) cinfo->err;
326 (*cinfo->err->output_message) (cinfo);
327 longjmp (err_mgr->setjmp_buffer, 1);
331 gst_jpeg_dec_init (GstJpegDec * dec)
333 GST_DEBUG ("initializing");
336 memset (&dec->cinfo, 0, sizeof (dec->cinfo));
337 memset (&dec->jerr, 0, sizeof (dec->jerr));
338 dec->cinfo.err = jpeg_std_error (&dec->jerr.pub);
339 dec->jerr.pub.output_message = gst_jpeg_dec_my_output_message;
340 dec->jerr.pub.emit_message = gst_jpeg_dec_my_emit_message;
341 dec->jerr.pub.error_exit = gst_jpeg_dec_my_error_exit;
343 jpeg_create_decompress (&dec->cinfo);
345 dec->cinfo.src = (struct jpeg_source_mgr *) &dec->jsrc;
346 dec->cinfo.src->init_source = gst_jpeg_dec_init_source;
347 dec->cinfo.src->fill_input_buffer = gst_jpeg_dec_fill_input_buffer;
348 dec->cinfo.src->skip_input_data = gst_jpeg_dec_skip_input_data;
349 dec->cinfo.src->resync_to_restart = gst_jpeg_dec_resync_to_restart;
350 dec->cinfo.src->term_source = gst_jpeg_dec_term_source;
353 /* init properties */
354 dec->idct_method = JPEG_DEFAULT_IDCT_METHOD;
355 dec->max_errors = JPEG_DEFAULT_MAX_ERRORS;
358 static inline gboolean
359 gst_jpeg_dec_parse_tag_has_entropy_segment (guint8 tag)
361 if (tag == 0xda || (tag >= 0xd0 && tag <= 0xd7))
367 gst_jpeg_dec_parse (GstVideoDecoder * bdec, GstVideoCodecFrame * frame,
368 GstAdapter * adapter, gboolean at_eos)
373 gint offset = 0, noffset;
374 GstJpegDec *dec = (GstJpegDec *) bdec;
376 /* FIXME : The overhead of using scan_uint32 is massive */
378 size = gst_adapter_available (adapter);
379 GST_DEBUG ("Parsing jpeg image data (%u bytes)", size);
382 GST_DEBUG ("Flushing all data out");
385 /* If we have leftover data, throw it away */
386 if (!dec->saw_header)
388 goto have_full_frame;
394 if (!dec->saw_header) {
396 /* we expect at least 4 bytes, first of which start marker */
398 gst_adapter_masked_scan_uint32 (adapter, 0xffff0000, 0xffd80000, 0,
401 GST_DEBUG ("ret:%d", ret);
406 gst_adapter_flush (adapter, ret);
409 dec->saw_header = TRUE;
416 GST_DEBUG ("offset:%d, size:%d", offset, size);
419 gst_adapter_masked_scan_uint32_peek (adapter, 0x0000ff00, 0x0000ff00,
420 offset, size - offset, &value);
422 /* lost sync if 0xff marker not where expected */
423 if ((resync = (noffset != offset))) {
424 GST_DEBUG ("Lost sync at 0x%08x, resyncing", offset + 2);
426 /* may have marker, but could have been resyncng */
427 resync = resync || dec->parse_resync;
428 /* Skip over extra 0xff */
429 while ((noffset >= 0) && ((value & 0xff) == 0xff)) {
432 gst_adapter_masked_scan_uint32_peek (adapter, 0x0000ff00, 0x0000ff00,
433 noffset, size - noffset, &value);
435 /* enough bytes left for marker? (we need 0xNN after the 0xff) */
437 GST_DEBUG ("at end of input and no EOI marker found, need more data");
441 /* now lock on the marker we found */
443 value = value & 0xff;
445 GST_DEBUG ("0x%08x: EOI marker", offset + 2);
446 /* clear parse state */
447 dec->saw_header = FALSE;
448 dec->parse_resync = FALSE;
450 goto have_full_frame;
453 /* Skip this frame if we found another SOI marker */
454 GST_DEBUG ("0x%08x: SOI marker before EOI, skipping", offset + 2);
455 dec->parse_resync = FALSE;
456 /* FIXME : Need to skip data */
458 goto have_full_frame;
462 if (value >= 0xd0 && value <= 0xd7)
465 /* peek tag and subsequent length */
466 if (offset + 2 + 4 > size)
469 gst_adapter_masked_scan_uint32_peek (adapter, 0x0, 0x0, offset + 2, 4,
471 frame_len = frame_len & 0xffff;
473 GST_DEBUG ("0x%08x: tag %02x, frame_len=%u", offset + 2, value, frame_len);
474 /* the frame length includes the 2 bytes for the length; here we want at
475 * least 2 more bytes at the end for an end marker */
476 if (offset + 2 + 2 + frame_len + 2 > size) {
480 if (gst_jpeg_dec_parse_tag_has_entropy_segment (value)) {
481 guint eseglen = dec->parse_entropy_len;
483 GST_DEBUG ("0x%08x: finding entropy segment length (eseglen:%d)",
484 offset + 2, eseglen);
485 if (size < offset + 2 + frame_len + eseglen)
487 noffset = offset + 2 + frame_len + dec->parse_entropy_len;
489 GST_DEBUG ("noffset:%d, size:%d, size - noffset:%d",
490 noffset, size, size - noffset);
491 noffset = gst_adapter_masked_scan_uint32_peek (adapter, 0x0000ff00,
492 0x0000ff00, noffset, size - noffset, &value);
495 dec->parse_entropy_len = size - offset - 4 - frame_len - 2;
498 if ((value & 0xff) != 0x00) {
499 eseglen = noffset - offset - frame_len - 2;
504 dec->parse_entropy_len = 0;
505 frame_len += eseglen;
506 GST_DEBUG ("entropy segment length=%u => frame_len=%u", eseglen,
510 /* check if we will still be in sync if we interpret
511 * this as a sync point and skip this frame */
512 noffset = offset + frame_len + 2;
513 noffset = gst_adapter_masked_scan_uint32 (adapter, 0x0000ff00, 0x0000ff00,
516 /* ignore and continue resyncing until we hit the end
517 * of our data or find a sync point that looks okay */
521 GST_DEBUG ("found sync at 0x%x", offset + 2);
524 /* Add current data to output buffer */
525 toadd += frame_len + 2;
526 offset += frame_len + 2;
531 gst_video_decoder_add_to_frame (bdec, toadd);
532 return GST_VIDEO_DECODER_FLOW_NEED_DATA;
536 gst_video_decoder_add_to_frame (bdec, toadd);
537 return gst_video_decoder_have_frame (bdec);
540 gst_adapter_flush (adapter, size);
545 /* shamelessly ripped from jpegutils.c in mjpegtools */
547 add_huff_table (j_decompress_ptr dinfo,
548 JHUFF_TBL ** htblptr, const UINT8 * bits, const UINT8 * val)
549 /* Define a Huffman table */
553 if (*htblptr == NULL)
554 *htblptr = jpeg_alloc_huff_table ((j_common_ptr) dinfo);
558 /* Copy the number-of-symbols-of-each-code-length counts */
559 memcpy ((*htblptr)->bits, bits, sizeof ((*htblptr)->bits));
561 /* Validate the counts. We do this here mainly so we can copy the right
562 * number of symbols from the val[] array, without risking marching off
563 * the end of memory. jchuff.c will do a more thorough test later.
566 for (len = 1; len <= 16; len++)
567 nsymbols += bits[len];
568 if (nsymbols < 1 || nsymbols > 256)
569 g_error ("jpegutils.c: add_huff_table failed badly. ");
571 memcpy ((*htblptr)->huffval, val, nsymbols * sizeof (UINT8));
577 std_huff_tables (j_decompress_ptr dinfo)
578 /* Set up the standard Huffman tables (cf. JPEG standard section K.3) */
579 /* IMPORTANT: these are only valid for 8-bit data precision! */
581 static const UINT8 bits_dc_luminance[17] =
582 { /* 0-base */ 0, 0, 1, 5, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0 };
583 static const UINT8 val_dc_luminance[] =
584 { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11 };
586 static const UINT8 bits_dc_chrominance[17] =
587 { /* 0-base */ 0, 0, 3, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0 };
588 static const UINT8 val_dc_chrominance[] =
589 { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11 };
591 static const UINT8 bits_ac_luminance[17] =
592 { /* 0-base */ 0, 0, 2, 1, 3, 3, 2, 4, 3, 5, 5, 4, 4, 0, 0, 1, 0x7d };
593 static const UINT8 val_ac_luminance[] =
594 { 0x01, 0x02, 0x03, 0x00, 0x04, 0x11, 0x05, 0x12,
595 0x21, 0x31, 0x41, 0x06, 0x13, 0x51, 0x61, 0x07,
596 0x22, 0x71, 0x14, 0x32, 0x81, 0x91, 0xa1, 0x08,
597 0x23, 0x42, 0xb1, 0xc1, 0x15, 0x52, 0xd1, 0xf0,
598 0x24, 0x33, 0x62, 0x72, 0x82, 0x09, 0x0a, 0x16,
599 0x17, 0x18, 0x19, 0x1a, 0x25, 0x26, 0x27, 0x28,
600 0x29, 0x2a, 0x34, 0x35, 0x36, 0x37, 0x38, 0x39,
601 0x3a, 0x43, 0x44, 0x45, 0x46, 0x47, 0x48, 0x49,
602 0x4a, 0x53, 0x54, 0x55, 0x56, 0x57, 0x58, 0x59,
603 0x5a, 0x63, 0x64, 0x65, 0x66, 0x67, 0x68, 0x69,
604 0x6a, 0x73, 0x74, 0x75, 0x76, 0x77, 0x78, 0x79,
605 0x7a, 0x83, 0x84, 0x85, 0x86, 0x87, 0x88, 0x89,
606 0x8a, 0x92, 0x93, 0x94, 0x95, 0x96, 0x97, 0x98,
607 0x99, 0x9a, 0xa2, 0xa3, 0xa4, 0xa5, 0xa6, 0xa7,
608 0xa8, 0xa9, 0xaa, 0xb2, 0xb3, 0xb4, 0xb5, 0xb6,
609 0xb7, 0xb8, 0xb9, 0xba, 0xc2, 0xc3, 0xc4, 0xc5,
610 0xc6, 0xc7, 0xc8, 0xc9, 0xca, 0xd2, 0xd3, 0xd4,
611 0xd5, 0xd6, 0xd7, 0xd8, 0xd9, 0xda, 0xe1, 0xe2,
612 0xe3, 0xe4, 0xe5, 0xe6, 0xe7, 0xe8, 0xe9, 0xea,
613 0xf1, 0xf2, 0xf3, 0xf4, 0xf5, 0xf6, 0xf7, 0xf8,
617 static const UINT8 bits_ac_chrominance[17] =
618 { /* 0-base */ 0, 0, 2, 1, 2, 4, 4, 3, 4, 7, 5, 4, 4, 0, 1, 2, 0x77 };
619 static const UINT8 val_ac_chrominance[] =
620 { 0x00, 0x01, 0x02, 0x03, 0x11, 0x04, 0x05, 0x21,
621 0x31, 0x06, 0x12, 0x41, 0x51, 0x07, 0x61, 0x71,
622 0x13, 0x22, 0x32, 0x81, 0x08, 0x14, 0x42, 0x91,
623 0xa1, 0xb1, 0xc1, 0x09, 0x23, 0x33, 0x52, 0xf0,
624 0x15, 0x62, 0x72, 0xd1, 0x0a, 0x16, 0x24, 0x34,
625 0xe1, 0x25, 0xf1, 0x17, 0x18, 0x19, 0x1a, 0x26,
626 0x27, 0x28, 0x29, 0x2a, 0x35, 0x36, 0x37, 0x38,
627 0x39, 0x3a, 0x43, 0x44, 0x45, 0x46, 0x47, 0x48,
628 0x49, 0x4a, 0x53, 0x54, 0x55, 0x56, 0x57, 0x58,
629 0x59, 0x5a, 0x63, 0x64, 0x65, 0x66, 0x67, 0x68,
630 0x69, 0x6a, 0x73, 0x74, 0x75, 0x76, 0x77, 0x78,
631 0x79, 0x7a, 0x82, 0x83, 0x84, 0x85, 0x86, 0x87,
632 0x88, 0x89, 0x8a, 0x92, 0x93, 0x94, 0x95, 0x96,
633 0x97, 0x98, 0x99, 0x9a, 0xa2, 0xa3, 0xa4, 0xa5,
634 0xa6, 0xa7, 0xa8, 0xa9, 0xaa, 0xb2, 0xb3, 0xb4,
635 0xb5, 0xb6, 0xb7, 0xb8, 0xb9, 0xba, 0xc2, 0xc3,
636 0xc4, 0xc5, 0xc6, 0xc7, 0xc8, 0xc9, 0xca, 0xd2,
637 0xd3, 0xd4, 0xd5, 0xd6, 0xd7, 0xd8, 0xd9, 0xda,
638 0xe2, 0xe3, 0xe4, 0xe5, 0xe6, 0xe7, 0xe8, 0xe9,
639 0xea, 0xf2, 0xf3, 0xf4, 0xf5, 0xf6, 0xf7, 0xf8,
643 add_huff_table (dinfo, &dinfo->dc_huff_tbl_ptrs[0],
644 bits_dc_luminance, val_dc_luminance);
645 add_huff_table (dinfo, &dinfo->ac_huff_tbl_ptrs[0],
646 bits_ac_luminance, val_ac_luminance);
647 add_huff_table (dinfo, &dinfo->dc_huff_tbl_ptrs[1],
648 bits_dc_chrominance, val_dc_chrominance);
649 add_huff_table (dinfo, &dinfo->ac_huff_tbl_ptrs[1],
650 bits_ac_chrominance, val_ac_chrominance);
656 guarantee_huff_tables (j_decompress_ptr dinfo)
658 if ((dinfo->dc_huff_tbl_ptrs[0] == NULL) &&
659 (dinfo->dc_huff_tbl_ptrs[1] == NULL) &&
660 (dinfo->ac_huff_tbl_ptrs[0] == NULL) &&
661 (dinfo->ac_huff_tbl_ptrs[1] == NULL)) {
662 GST_DEBUG ("Generating standard Huffman tables for this frame.");
663 std_huff_tables (dinfo);
668 gst_jpeg_dec_set_format (GstVideoDecoder * dec, GstVideoCodecState * state)
670 GstJpegDec *jpeg = GST_JPEG_DEC (dec);
671 GstVideoInfo *info = &state->info;
673 /* FIXME : previously jpegdec would handled input as packetized
674 * if the framerate was present. Here we consider it packetized if
675 * the fps is != 1/1 */
676 if (GST_VIDEO_INFO_FPS_N (info) != 1 && GST_VIDEO_INFO_FPS_D (info) != 1)
677 gst_video_decoder_set_packetized (dec, TRUE);
679 gst_video_decoder_set_packetized (dec, FALSE);
681 if (jpeg->input_state)
682 gst_video_codec_state_unref (jpeg->input_state);
683 jpeg->input_state = gst_video_codec_state_ref (state);
691 hresamplecpy1 (guint8 * dest, const guint8 * src, guint len)
695 for (i = 0; i < len; ++i) {
696 /* equivalent to: dest[i] = src[i << 1] */
705 gst_jpeg_dec_free_buffers (GstJpegDec * dec)
709 for (i = 0; i < 16; i++) {
710 g_free (dec->idr_y[i]);
711 g_free (dec->idr_u[i]);
712 g_free (dec->idr_v[i]);
713 dec->idr_y[i] = NULL;
714 dec->idr_u[i] = NULL;
715 dec->idr_v[i] = NULL;
718 dec->idr_width_allocated = 0;
721 static inline gboolean
722 gst_jpeg_dec_ensure_buffers (GstJpegDec * dec, guint maxrowbytes)
726 if (G_LIKELY (dec->idr_width_allocated == maxrowbytes))
729 /* FIXME: maybe just alloc one or three blocks altogether? */
730 for (i = 0; i < 16; i++) {
731 dec->idr_y[i] = g_try_realloc (dec->idr_y[i], maxrowbytes);
732 dec->idr_u[i] = g_try_realloc (dec->idr_u[i], maxrowbytes);
733 dec->idr_v[i] = g_try_realloc (dec->idr_v[i], maxrowbytes);
735 if (G_UNLIKELY (!dec->idr_y[i] || !dec->idr_u[i] || !dec->idr_v[i])) {
736 GST_WARNING_OBJECT (dec, "out of memory, i=%d, bytes=%u", i, maxrowbytes);
741 dec->idr_width_allocated = maxrowbytes;
742 GST_LOG_OBJECT (dec, "allocated temp memory, %u bytes/row", maxrowbytes);
747 gst_jpeg_dec_decode_grayscale (GstJpegDec * dec, GstVideoFrame * frame)
750 guchar **scanarray[1] = { rows };
755 gint pstride, rstride;
757 GST_DEBUG_OBJECT (dec, "indirect decoding of grayscale");
759 width = GST_VIDEO_FRAME_WIDTH (frame);
760 height = GST_VIDEO_FRAME_HEIGHT (frame);
762 if (G_UNLIKELY (!gst_jpeg_dec_ensure_buffers (dec, GST_ROUND_UP_32 (width))))
765 base[0] = GST_VIDEO_FRAME_COMP_DATA (frame, 0);
766 pstride = GST_VIDEO_FRAME_COMP_PSTRIDE (frame, 0);
767 rstride = GST_VIDEO_FRAME_COMP_STRIDE (frame, 0);
769 memcpy (rows, dec->idr_y, 16 * sizeof (gpointer));
773 lines = jpeg_read_raw_data (&dec->cinfo, scanarray, DCTSIZE);
774 if (G_LIKELY (lines > 0)) {
775 for (j = 0; (j < DCTSIZE) && (i < height); j++, i++) {
779 for (k = 0; k < width; k++) {
780 base[0][p] = rows[j][k];
786 GST_INFO_OBJECT (dec, "jpeg_read_raw_data() returned 0");
792 gst_jpeg_dec_decode_rgb (GstJpegDec * dec, GstVideoFrame * frame)
794 guchar *r_rows[16], *g_rows[16], *b_rows[16];
795 guchar **scanarray[3] = { r_rows, g_rows, b_rows };
799 guint pstride, rstride;
802 GST_DEBUG_OBJECT (dec, "indirect decoding of RGB");
804 width = GST_VIDEO_FRAME_WIDTH (frame);
805 height = GST_VIDEO_FRAME_HEIGHT (frame);
807 if (G_UNLIKELY (!gst_jpeg_dec_ensure_buffers (dec, GST_ROUND_UP_32 (width))))
810 for (i = 0; i < 3; i++)
811 base[i] = GST_VIDEO_FRAME_COMP_DATA (frame, i);
813 pstride = GST_VIDEO_FRAME_COMP_PSTRIDE (frame, 0);
814 rstride = GST_VIDEO_FRAME_COMP_STRIDE (frame, 0);
816 memcpy (r_rows, dec->idr_y, 16 * sizeof (gpointer));
817 memcpy (g_rows, dec->idr_u, 16 * sizeof (gpointer));
818 memcpy (b_rows, dec->idr_v, 16 * sizeof (gpointer));
822 lines = jpeg_read_raw_data (&dec->cinfo, scanarray, DCTSIZE);
823 if (G_LIKELY (lines > 0)) {
824 for (j = 0; (j < DCTSIZE) && (i < height); j++, i++) {
828 for (k = 0; k < width; k++) {
829 base[0][p] = r_rows[j][k];
830 base[1][p] = g_rows[j][k];
831 base[2][p] = b_rows[j][k];
839 GST_INFO_OBJECT (dec, "jpeg_read_raw_data() returned 0");
845 gst_jpeg_dec_decode_indirect (GstJpegDec * dec, GstVideoFrame * frame, gint r_v,
848 guchar *y_rows[16], *u_rows[16], *v_rows[16];
849 guchar **scanarray[3] = { y_rows, u_rows, v_rows };
852 guchar *base[3], *last[3];
856 GST_DEBUG_OBJECT (dec,
857 "unadvantageous width or r_h, taking slow route involving memcpy");
859 width = GST_VIDEO_FRAME_WIDTH (frame);
860 height = GST_VIDEO_FRAME_HEIGHT (frame);
862 if (G_UNLIKELY (!gst_jpeg_dec_ensure_buffers (dec, GST_ROUND_UP_32 (width))))
865 for (i = 0; i < 3; i++) {
866 base[i] = GST_VIDEO_FRAME_COMP_DATA (frame, i);
867 stride[i] = GST_VIDEO_FRAME_COMP_STRIDE (frame, i);
868 /* make sure we don't make jpeglib write beyond our buffer,
869 * which might happen if (height % (r_v*DCTSIZE)) != 0 */
870 last[i] = base[i] + (GST_VIDEO_FRAME_COMP_STRIDE (frame, i) *
871 (GST_VIDEO_FRAME_COMP_HEIGHT (frame, i) - 1));
874 memcpy (y_rows, dec->idr_y, 16 * sizeof (gpointer));
875 memcpy (u_rows, dec->idr_u, 16 * sizeof (gpointer));
876 memcpy (v_rows, dec->idr_v, 16 * sizeof (gpointer));
878 /* fill chroma components for grayscale */
880 GST_DEBUG_OBJECT (dec, "grayscale, filling chroma");
881 for (i = 0; i < 16; i++) {
882 memset (u_rows[i], GST_ROUND_UP_32 (width), 0x80);
883 memset (v_rows[i], GST_ROUND_UP_32 (width), 0x80);
887 for (i = 0; i < height; i += r_v * DCTSIZE) {
888 lines = jpeg_read_raw_data (&dec->cinfo, scanarray, r_v * DCTSIZE);
889 if (G_LIKELY (lines > 0)) {
890 for (j = 0, k = 0; j < (r_v * DCTSIZE); j += r_v, k++) {
891 if (G_LIKELY (base[0] <= last[0])) {
892 memcpy (base[0], y_rows[j], stride[0]);
893 base[0] += stride[0];
896 if (G_LIKELY (base[0] <= last[0])) {
897 memcpy (base[0], y_rows[j + 1], stride[0]);
898 base[0] += stride[0];
901 if (G_LIKELY (base[1] <= last[1] && base[2] <= last[2])) {
903 memcpy (base[1], u_rows[k], stride[1]);
904 memcpy (base[2], v_rows[k], stride[2]);
905 } else if (r_h == 1) {
906 hresamplecpy1 (base[1], u_rows[k], stride[1]);
907 hresamplecpy1 (base[2], v_rows[k], stride[2]);
909 /* FIXME: implement (at least we avoid crashing by doing nothing) */
913 if (r_v == 2 || (k & 1) != 0) {
914 base[1] += stride[1];
915 base[2] += stride[2];
919 GST_INFO_OBJECT (dec, "jpeg_read_raw_data() returned 0");
925 gst_jpeg_dec_decode_direct (GstJpegDec * dec, GstVideoFrame * frame)
927 guchar **line[3]; /* the jpeg line buffer */
928 guchar *y[4 * DCTSIZE] = { NULL, }; /* alloc enough for the lines */
929 guchar *u[4 * DCTSIZE] = { NULL, }; /* r_v will be <4 */
930 guchar *v[4 * DCTSIZE] = { NULL, };
932 gint lines, v_samp[3];
933 guchar *base[3], *last[3];
941 v_samp[0] = dec->cinfo.comp_info[0].v_samp_factor;
942 v_samp[1] = dec->cinfo.comp_info[1].v_samp_factor;
943 v_samp[2] = dec->cinfo.comp_info[2].v_samp_factor;
945 if (G_UNLIKELY (v_samp[0] > 2 || v_samp[1] > 2 || v_samp[2] > 2))
946 goto format_not_supported;
948 height = GST_VIDEO_FRAME_HEIGHT (frame);
950 for (i = 0; i < 3; i++) {
951 base[i] = GST_VIDEO_FRAME_COMP_DATA (frame, i);
952 stride[i] = GST_VIDEO_FRAME_COMP_STRIDE (frame, i);
953 /* make sure we don't make jpeglib write beyond our buffer,
954 * which might happen if (height % (r_v*DCTSIZE)) != 0 */
955 last[i] = base[i] + (GST_VIDEO_FRAME_COMP_STRIDE (frame, i) *
956 (GST_VIDEO_FRAME_COMP_HEIGHT (frame, i) - 1));
959 /* let jpeglib decode directly into our final buffer */
960 GST_DEBUG_OBJECT (dec, "decoding directly into output buffer");
962 for (i = 0; i < height; i += v_samp[0] * DCTSIZE) {
963 for (j = 0; j < (v_samp[0] * DCTSIZE); ++j) {
965 line[0][j] = base[0] + (i + j) * stride[0];
966 if (G_UNLIKELY (line[0][j] > last[0]))
967 line[0][j] = last[0];
969 if (v_samp[1] == v_samp[0]) {
970 line[1][j] = base[1] + ((i + j) / 2) * stride[1];
971 } else if (j < (v_samp[1] * DCTSIZE)) {
972 line[1][j] = base[1] + ((i / 2) + j) * stride[1];
974 if (G_UNLIKELY (line[1][j] > last[1]))
975 line[1][j] = last[1];
977 if (v_samp[2] == v_samp[0]) {
978 line[2][j] = base[2] + ((i + j) / 2) * stride[2];
979 } else if (j < (v_samp[2] * DCTSIZE)) {
980 line[2][j] = base[2] + ((i / 2) + j) * stride[2];
982 if (G_UNLIKELY (line[2][j] > last[2]))
983 line[2][j] = last[2];
986 lines = jpeg_read_raw_data (&dec->cinfo, line, v_samp[0] * DCTSIZE);
987 if (G_UNLIKELY (!lines)) {
988 GST_INFO_OBJECT (dec, "jpeg_read_raw_data() returned 0");
993 format_not_supported:
995 gst_jpeg_dec_set_error (dec, GST_FUNCTION, __LINE__,
996 "Unsupported subsampling schema: v_samp factors: %u %u %u",
997 v_samp[0], v_samp[1], v_samp[2]);
998 return GST_FLOW_ERROR;
1003 gst_jpeg_dec_negotiate (GstJpegDec * dec, gint width, gint height, gint clrspc)
1005 GstVideoCodecState *outstate;
1007 GstVideoFormat format;
1011 format = GST_VIDEO_FORMAT_RGB;
1014 format = GST_VIDEO_FORMAT_GRAY8;
1017 format = GST_VIDEO_FORMAT_I420;
1021 /* Compare to currently configured output state */
1022 outstate = gst_video_decoder_get_output_state (GST_VIDEO_DECODER (dec));
1024 info = &outstate->info;
1026 if (width == GST_VIDEO_INFO_WIDTH (info) &&
1027 height == GST_VIDEO_INFO_HEIGHT (info) &&
1028 format == GST_VIDEO_INFO_FORMAT (info)) {
1029 gst_video_codec_state_unref (outstate);
1032 gst_video_codec_state_unref (outstate);
1036 gst_video_decoder_set_output_state (GST_VIDEO_DECODER (dec), format,
1037 width, height, dec->input_state);
1044 outstate->info.colorimetry.range = GST_VIDEO_COLOR_RANGE_0_255;
1045 outstate->info.colorimetry.matrix = GST_VIDEO_COLOR_MATRIX_BT601;
1046 outstate->info.colorimetry.transfer = GST_VIDEO_TRANSFER_UNKNOWN;
1047 outstate->info.colorimetry.primaries = GST_VIDEO_COLOR_PRIMARIES_UNKNOWN;
1051 gst_video_codec_state_unref (outstate);
1053 gst_video_decoder_negotiate (GST_VIDEO_DECODER (dec));
1055 GST_DEBUG_OBJECT (dec, "max_v_samp_factor=%d", dec->cinfo.max_v_samp_factor);
1056 GST_DEBUG_OBJECT (dec, "max_h_samp_factor=%d", dec->cinfo.max_h_samp_factor);
1059 static GstFlowReturn
1060 gst_jpeg_dec_handle_frame (GstVideoDecoder * bdec, GstVideoCodecFrame * frame)
1062 GstFlowReturn ret = GST_FLOW_OK;
1063 GstJpegDec *dec = (GstJpegDec *) bdec;
1064 GstVideoFrame vframe;
1068 gboolean need_unmap = TRUE;
1069 GstVideoCodecState *state = NULL;
1071 dec->current_frame = frame;
1072 gst_buffer_map (frame->input_buffer, &dec->current_frame_map, GST_MAP_READ);
1073 gst_jpeg_dec_fill_input_buffer (&dec->cinfo);
1075 if (setjmp (dec->jerr.setjmp_buffer)) {
1076 code = dec->jerr.pub.msg_code;
1078 if (code == JERR_INPUT_EOF) {
1079 GST_DEBUG ("jpeg input EOF error, we probably need more data");
1080 goto need_more_data;
1086 hdr_ok = jpeg_read_header (&dec->cinfo, TRUE);
1087 if (G_UNLIKELY (hdr_ok != JPEG_HEADER_OK)) {
1088 GST_WARNING_OBJECT (dec, "reading the header failed, %d", hdr_ok);
1091 GST_LOG_OBJECT (dec, "num_components=%d", dec->cinfo.num_components);
1092 GST_LOG_OBJECT (dec, "jpeg_color_space=%d", dec->cinfo.jpeg_color_space);
1094 if (!dec->cinfo.num_components || !dec->cinfo.comp_info)
1095 goto components_not_supported;
1097 r_h = dec->cinfo.comp_info[0].h_samp_factor;
1098 r_v = dec->cinfo.comp_info[0].v_samp_factor;
1100 GST_LOG_OBJECT (dec, "r_h = %d, r_v = %d", r_h, r_v);
1102 if (dec->cinfo.num_components > 3)
1103 goto components_not_supported;
1105 /* verify color space expectation to avoid going *boom* or bogus output */
1106 if (dec->cinfo.jpeg_color_space != JCS_YCbCr &&
1107 dec->cinfo.jpeg_color_space != JCS_GRAYSCALE &&
1108 dec->cinfo.jpeg_color_space != JCS_RGB)
1109 goto unsupported_colorspace;
1111 #ifndef GST_DISABLE_GST_DEBUG
1115 for (i = 0; i < dec->cinfo.num_components; ++i) {
1116 GST_LOG_OBJECT (dec, "[%d] h_samp_factor=%d, v_samp_factor=%d, cid=%d",
1117 i, dec->cinfo.comp_info[i].h_samp_factor,
1118 dec->cinfo.comp_info[i].v_samp_factor,
1119 dec->cinfo.comp_info[i].component_id);
1124 /* prepare for raw output */
1125 dec->cinfo.do_fancy_upsampling = FALSE;
1126 dec->cinfo.do_block_smoothing = FALSE;
1127 dec->cinfo.out_color_space = dec->cinfo.jpeg_color_space;
1128 dec->cinfo.dct_method = dec->idct_method;
1129 dec->cinfo.raw_data_out = TRUE;
1131 GST_LOG_OBJECT (dec, "starting decompress");
1132 guarantee_huff_tables (&dec->cinfo);
1133 if (!jpeg_start_decompress (&dec->cinfo)) {
1134 GST_WARNING_OBJECT (dec, "failed to start decompression cycle");
1137 /* sanity checks to get safe and reasonable output */
1138 switch (dec->cinfo.jpeg_color_space) {
1140 if (dec->cinfo.num_components != 1)
1141 goto invalid_yuvrgbgrayscale;
1144 if (dec->cinfo.num_components != 3 || dec->cinfo.max_v_samp_factor > 1 ||
1145 dec->cinfo.max_h_samp_factor > 1)
1146 goto invalid_yuvrgbgrayscale;
1149 if (dec->cinfo.num_components != 3 ||
1150 r_v > 2 || r_v < dec->cinfo.comp_info[0].v_samp_factor ||
1151 r_v < dec->cinfo.comp_info[1].v_samp_factor ||
1152 r_h < dec->cinfo.comp_info[0].h_samp_factor ||
1153 r_h < dec->cinfo.comp_info[1].h_samp_factor)
1154 goto invalid_yuvrgbgrayscale;
1157 g_assert_not_reached ();
1161 width = dec->cinfo.output_width;
1162 height = dec->cinfo.output_height;
1164 if (G_UNLIKELY (width < MIN_WIDTH || width > MAX_WIDTH ||
1165 height < MIN_HEIGHT || height > MAX_HEIGHT))
1168 gst_jpeg_dec_negotiate (dec, width, height, dec->cinfo.jpeg_color_space);
1170 state = gst_video_decoder_get_output_state (bdec);
1171 ret = gst_video_decoder_allocate_output_frame (bdec, frame);
1172 if (G_UNLIKELY (ret != GST_FLOW_OK))
1175 if (!gst_video_frame_map (&vframe, &state->info, frame->output_buffer,
1179 GST_LOG_OBJECT (dec, "width %d, height %d", width, height);
1181 if (dec->cinfo.jpeg_color_space == JCS_RGB) {
1182 gst_jpeg_dec_decode_rgb (dec, &vframe);
1183 } else if (dec->cinfo.jpeg_color_space == JCS_GRAYSCALE) {
1184 gst_jpeg_dec_decode_grayscale (dec, &vframe);
1186 GST_LOG_OBJECT (dec, "decompressing (reqired scanline buffer height = %u)",
1187 dec->cinfo.rec_outbuf_height);
1189 /* For some widths jpeglib requires more horizontal padding than I420
1190 * provides. In those cases we need to decode into separate buffers and then
1191 * copy over the data into our final picture buffer, otherwise jpeglib might
1192 * write over the end of a line into the beginning of the next line,
1193 * resulting in blocky artifacts on the left side of the picture. */
1194 if (G_UNLIKELY (width % (dec->cinfo.max_h_samp_factor * DCTSIZE) != 0
1195 || dec->cinfo.comp_info[0].h_samp_factor != 2
1196 || dec->cinfo.comp_info[1].h_samp_factor != 1
1197 || dec->cinfo.comp_info[2].h_samp_factor != 1)) {
1198 GST_CAT_LOG_OBJECT (GST_CAT_PERFORMANCE, dec,
1199 "indirect decoding using extra buffer copy");
1200 gst_jpeg_dec_decode_indirect (dec, &vframe, r_v, r_h,
1201 dec->cinfo.num_components);
1203 ret = gst_jpeg_dec_decode_direct (dec, &vframe);
1205 if (G_UNLIKELY (ret != GST_FLOW_OK))
1206 goto decode_direct_failed;
1210 gst_video_frame_unmap (&vframe);
1212 GST_LOG_OBJECT (dec, "decompressing finished");
1213 jpeg_finish_decompress (&dec->cinfo);
1215 /* reset error count on successful decode */
1216 dec->error_count = 0;
1218 gst_buffer_unmap (frame->input_buffer, &dec->current_frame_map);
1219 ret = gst_video_decoder_finish_frame (bdec, frame);
1226 if (G_UNLIKELY (ret == GST_FLOW_ERROR)) {
1227 jpeg_abort_decompress (&dec->cinfo);
1228 ret = gst_jpeg_dec_post_error_or_warning (dec);
1232 gst_buffer_unmap (frame->input_buffer, &dec->current_frame_map);
1235 gst_video_codec_state_unref (state);
1242 GST_LOG_OBJECT (dec, "we need more data");
1249 gst_jpeg_dec_set_error (dec, GST_FUNCTION, __LINE__,
1250 "Picture is too small or too big (%ux%u)", width, height);
1251 ret = GST_FLOW_ERROR;
1256 gchar err_msg[JMSG_LENGTH_MAX];
1258 dec->jerr.pub.format_message ((j_common_ptr) (&dec->cinfo), err_msg);
1260 gst_jpeg_dec_set_error (dec, GST_FUNCTION, __LINE__,
1261 "Decode error #%u: %s", code, err_msg);
1263 gst_buffer_unmap (frame->input_buffer, &dec->current_frame_map);
1264 gst_video_decoder_drop_frame (bdec, frame);
1267 ret = GST_FLOW_ERROR;
1270 decode_direct_failed:
1272 /* already posted an error message */
1273 jpeg_abort_decompress (&dec->cinfo);
1278 const gchar *reason;
1280 reason = gst_flow_get_name (ret);
1282 GST_DEBUG_OBJECT (dec, "failed to alloc buffer, reason %s", reason);
1283 /* Reset for next time */
1284 jpeg_abort_decompress (&dec->cinfo);
1285 if (ret != GST_FLOW_EOS && ret != GST_FLOW_FLUSHING &&
1286 ret != GST_FLOW_NOT_LINKED) {
1287 gst_jpeg_dec_set_error (dec, GST_FUNCTION, __LINE__,
1288 "Buffer allocation failed, reason: %s", reason);
1292 components_not_supported:
1294 gst_jpeg_dec_set_error (dec, GST_FUNCTION, __LINE__,
1295 "number of components not supported: %d (max 3)",
1296 dec->cinfo.num_components);
1297 ret = GST_FLOW_ERROR;
1300 unsupported_colorspace:
1302 gst_jpeg_dec_set_error (dec, GST_FUNCTION, __LINE__,
1303 "Picture has unknown or unsupported colourspace");
1304 ret = GST_FLOW_ERROR;
1307 invalid_yuvrgbgrayscale:
1309 gst_jpeg_dec_set_error (dec, GST_FUNCTION, __LINE__,
1310 "Picture is corrupt or unhandled YUV/RGB/grayscale layout");
1311 ret = GST_FLOW_ERROR;
1317 gst_jpeg_dec_decide_allocation (GstVideoDecoder * bdec, GstQuery * query)
1319 GstBufferPool *pool = NULL;
1320 GstStructure *config;
1322 if (!GST_VIDEO_DECODER_CLASS (parent_class)->decide_allocation (bdec, query))
1325 if (gst_query_get_n_allocation_pools (query) > 0)
1326 gst_query_parse_nth_allocation_pool (query, 0, &pool, NULL, NULL, NULL);
1331 config = gst_buffer_pool_get_config (pool);
1332 if (gst_query_find_allocation_meta (query, GST_VIDEO_META_API_TYPE, NULL)) {
1333 gst_buffer_pool_config_add_option (config,
1334 GST_BUFFER_POOL_OPTION_VIDEO_META);
1336 gst_buffer_pool_set_config (pool, config);
1337 gst_object_unref (pool);
1343 gst_jpeg_dec_reset (GstVideoDecoder * bdec, gboolean hard)
1345 GstJpegDec *dec = (GstJpegDec *) bdec;
1347 jpeg_abort_decompress (&dec->cinfo);
1348 dec->parse_entropy_len = 0;
1349 dec->parse_resync = FALSE;
1350 dec->saw_header = FALSE;
1356 gst_jpeg_dec_set_property (GObject * object, guint prop_id,
1357 const GValue * value, GParamSpec * pspec)
1361 dec = GST_JPEG_DEC (object);
1364 case PROP_IDCT_METHOD:
1365 dec->idct_method = g_value_get_enum (value);
1367 case PROP_MAX_ERRORS:
1368 g_atomic_int_set (&dec->max_errors, g_value_get_int (value));
1372 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
1378 gst_jpeg_dec_get_property (GObject * object, guint prop_id, GValue * value,
1383 dec = GST_JPEG_DEC (object);
1386 case PROP_IDCT_METHOD:
1387 g_value_set_enum (value, dec->idct_method);
1389 case PROP_MAX_ERRORS:
1390 g_value_set_int (value, g_atomic_int_get (&dec->max_errors));
1394 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
1400 gst_jpeg_dec_start (GstVideoDecoder * bdec)
1402 GstJpegDec *dec = (GstJpegDec *) bdec;
1404 dec->error_count = 0;
1405 dec->parse_entropy_len = 0;
1406 dec->parse_resync = FALSE;
1412 gst_jpeg_dec_stop (GstVideoDecoder * bdec)
1414 GstJpegDec *dec = (GstJpegDec *) bdec;
1416 gst_jpeg_dec_free_buffers (dec);