2 * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
3 * Copyright (C) <2009> Tim-Philipp Müller <tim centricular net>
4 * Copyright (C) 2012 Collabora Ltd.
5 * Author : Edward Hervey <edward@collabora.com>
7 * This library is free software; you can redistribute it and/or
8 * modify it under the terms of the GNU Library General Public
9 * License as published by the Free Software Foundation; either
10 * version 2 of the License, or (at your option) any later version.
12 * This library is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 * Library General Public License for more details.
17 * You should have received a copy of the GNU Library General Public
18 * License along with this library; if not, write to the
19 * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
20 * Boston, MA 02111-1307, USA.
24 * SECTION:element-jpegdec
26 * Decodes jpeg images.
29 * <title>Example launch line</title>
31 * gst-launch -v v4l2src ! jpegdec ! ffmpegcolorspace ! xvimagesink
32 * ]| The above pipeline reads a motion JPEG stream from a v4l2 camera
33 * and renders it to the screen.
42 #include "gstjpegdec.h"
44 #include <gst/video/video.h>
45 #include <gst/video/gstvideometa.h>
46 #include <gst/video/gstvideopool.h>
47 #include "gst/gst-i18n-plugin.h"
51 #define MAX_WIDTH 65535
53 #define MAX_HEIGHT 65535
55 #define CINFO_GET_JPEGDEC(cinfo_ptr) \
56 (((struct GstJpegDecSourceMgr*)((cinfo_ptr)->src))->dec)
58 #define JPEG_DEFAULT_IDCT_METHOD JDCT_FASTEST
59 #define JPEG_DEFAULT_MAX_ERRORS 0
69 static GstStaticPadTemplate gst_jpeg_dec_src_pad_template =
70 GST_STATIC_PAD_TEMPLATE ("src",
73 GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE
74 ("{ I420, RGB, BGR, RGBx, xRGB, BGRx, xBGR, GRAY8 }"))
78 /* FIXME: sof-marker is for IJG libjpeg 8, should be different for 6.2 */
79 static GstStaticPadTemplate gst_jpeg_dec_sink_pad_template =
80 GST_STATIC_PAD_TEMPLATE ("sink",
83 GST_STATIC_CAPS ("image/jpeg, "
84 "width = (int) [ " G_STRINGIFY (MIN_WIDTH) ", " G_STRINGIFY (MAX_WIDTH)
85 " ], " "height = (int) [ " G_STRINGIFY (MIN_HEIGHT) ", "
86 G_STRINGIFY (MAX_HEIGHT) " ], "
87 "sof-marker = (int) { 0, 1, 2, 5, 6, 7, 9, 10, 13, 14 }")
90 GST_DEBUG_CATEGORY_STATIC (jpeg_dec_debug);
91 #define GST_CAT_DEFAULT jpeg_dec_debug
92 GST_DEBUG_CATEGORY_STATIC (GST_CAT_PERFORMANCE);
94 static void gst_jpeg_dec_set_property (GObject * object, guint prop_id,
95 const GValue * value, GParamSpec * pspec);
96 static void gst_jpeg_dec_get_property (GObject * object, guint prop_id,
97 GValue * value, GParamSpec * pspec);
99 static gboolean gst_jpeg_dec_set_format (GstVideoDecoder * dec,
100 GstVideoCodecState * state);
101 static gboolean gst_jpeg_dec_start (GstVideoDecoder * bdec);
102 static gboolean gst_jpeg_dec_stop (GstVideoDecoder * bdec);
103 static gboolean gst_jpeg_dec_reset (GstVideoDecoder * bdec, gboolean hard);
104 static GstFlowReturn gst_jpeg_dec_parse (GstVideoDecoder * bdec,
105 GstVideoCodecFrame * frame, GstAdapter * adapter, gboolean at_eos);
106 static GstFlowReturn gst_jpeg_dec_handle_frame (GstVideoDecoder * bdec,
107 GstVideoCodecFrame * frame);
108 static gboolean gst_jpeg_dec_decide_allocation (GstVideoDecoder * bdec,
111 #define gst_jpeg_dec_parent_class parent_class
112 G_DEFINE_TYPE (GstJpegDec, gst_jpeg_dec, GST_TYPE_VIDEO_DECODER);
115 gst_jpeg_dec_finalize (GObject * object)
117 GstJpegDec *dec = GST_JPEG_DEC (object);
119 jpeg_destroy_decompress (&dec->cinfo);
120 if (dec->input_state)
121 gst_video_codec_state_unref (dec->input_state);
123 G_OBJECT_CLASS (parent_class)->finalize (object);
127 gst_jpeg_dec_class_init (GstJpegDecClass * klass)
129 GObjectClass *gobject_class;
130 GstElementClass *element_class;
131 GstVideoDecoderClass *vdec_class;
133 gobject_class = (GObjectClass *) klass;
134 element_class = (GstElementClass *) klass;
135 vdec_class = (GstVideoDecoderClass *) klass;
137 parent_class = g_type_class_peek_parent (klass);
139 gobject_class->finalize = gst_jpeg_dec_finalize;
140 gobject_class->set_property = gst_jpeg_dec_set_property;
141 gobject_class->get_property = gst_jpeg_dec_get_property;
143 g_object_class_install_property (gobject_class, PROP_IDCT_METHOD,
144 g_param_spec_enum ("idct-method", "IDCT Method",
145 "The IDCT algorithm to use", GST_TYPE_IDCT_METHOD,
146 JPEG_DEFAULT_IDCT_METHOD,
147 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
150 * GstJpegDec:max-errors
152 * Error out after receiving N consecutive decoding errors
153 * (-1 = never error out, 0 = automatic, 1 = fail on first error, etc.)
157 g_object_class_install_property (gobject_class, PROP_MAX_ERRORS,
158 g_param_spec_int ("max-errors", "Maximum Consecutive Decoding Errors",
159 "Error out after receiving N consecutive decoding errors "
160 "(-1 = never fail, 0 = automatic, 1 = fail on first error)",
161 -1, G_MAXINT, JPEG_DEFAULT_MAX_ERRORS,
162 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
164 gst_element_class_add_pad_template (element_class,
165 gst_static_pad_template_get (&gst_jpeg_dec_src_pad_template));
166 gst_element_class_add_pad_template (element_class,
167 gst_static_pad_template_get (&gst_jpeg_dec_sink_pad_template));
168 gst_element_class_set_details_simple (element_class, "JPEG image decoder",
169 "Codec/Decoder/Image",
170 "Decode images from JPEG format", "Wim Taymans <wim@fluendo.com>");
172 vdec_class->start = gst_jpeg_dec_start;
173 vdec_class->stop = gst_jpeg_dec_stop;
174 vdec_class->reset = gst_jpeg_dec_reset;
175 vdec_class->parse = gst_jpeg_dec_parse;
176 vdec_class->set_format = gst_jpeg_dec_set_format;
177 vdec_class->handle_frame = gst_jpeg_dec_handle_frame;
178 vdec_class->decide_allocation = gst_jpeg_dec_decide_allocation;
180 GST_DEBUG_CATEGORY_INIT (jpeg_dec_debug, "jpegdec", 0, "JPEG decoder");
181 GST_DEBUG_CATEGORY_GET (GST_CAT_PERFORMANCE, "GST_PERFORMANCE");
185 gst_jpeg_dec_clear_error (GstJpegDec * dec)
187 g_free (dec->error_msg);
188 dec->error_msg = NULL;
190 dec->error_func = NULL;
194 gst_jpeg_dec_set_error_va (GstJpegDec * dec, const gchar * func, gint line,
195 const gchar * debug_msg_format, va_list args)
197 #ifndef GST_DISABLE_GST_DEBUG
198 gst_debug_log_valist (GST_CAT_DEFAULT, GST_LEVEL_WARNING, __FILE__, func,
199 line, (GObject *) dec, debug_msg_format, args);
202 g_free (dec->error_msg);
203 if (debug_msg_format)
204 dec->error_msg = g_strdup_vprintf (debug_msg_format, args);
206 dec->error_msg = NULL;
208 dec->error_line = line;
209 dec->error_func = func;
213 gst_jpeg_dec_set_error (GstJpegDec * dec, const gchar * func, gint line,
214 const gchar * debug_msg_format, ...)
218 va_start (va, debug_msg_format);
219 gst_jpeg_dec_set_error_va (dec, func, line, debug_msg_format, va);
224 gst_jpeg_dec_post_error_or_warning (GstJpegDec * dec)
230 max_errors = g_atomic_int_get (&dec->max_errors);
232 if (max_errors < 0) {
234 } else if (max_errors == 0) {
235 /* FIXME: do something more clever in "automatic mode" */
236 if (gst_video_decoder_get_packetized (GST_VIDEO_DECODER (dec))) {
237 ret = (dec->error_count < 3) ? GST_FLOW_OK : GST_FLOW_ERROR;
239 ret = GST_FLOW_ERROR;
242 ret = (dec->error_count < max_errors) ? GST_FLOW_OK : GST_FLOW_ERROR;
245 GST_INFO_OBJECT (dec, "decoding error %d/%d (%s)", dec->error_count,
246 max_errors, (ret == GST_FLOW_OK) ? "ignoring error" : "erroring out");
248 gst_element_message_full (GST_ELEMENT (dec),
249 (ret == GST_FLOW_OK) ? GST_MESSAGE_WARNING : GST_MESSAGE_ERROR,
250 GST_STREAM_ERROR, GST_STREAM_ERROR_DECODE,
251 g_strdup (_("Failed to decode JPEG image")), dec->error_msg,
252 __FILE__, dec->error_func, dec->error_line);
254 dec->error_msg = NULL;
255 gst_jpeg_dec_clear_error (dec);
260 gst_jpeg_dec_fill_input_buffer (j_decompress_ptr cinfo)
264 dec = CINFO_GET_JPEGDEC (cinfo);
265 g_return_val_if_fail (dec != NULL, FALSE);
266 g_return_val_if_fail (dec->current_frame != NULL, FALSE);
267 g_return_val_if_fail (dec->current_frame_map.data != NULL, FALSE);
269 cinfo->src->next_input_byte = dec->current_frame_map.data;
270 cinfo->src->bytes_in_buffer = dec->current_frame_map.size;
276 gst_jpeg_dec_init_source (j_decompress_ptr cinfo)
278 GST_LOG_OBJECT (CINFO_GET_JPEGDEC (cinfo), "init_source");
283 gst_jpeg_dec_skip_input_data (j_decompress_ptr cinfo, glong num_bytes)
285 GstJpegDec *dec = CINFO_GET_JPEGDEC (cinfo);
287 GST_DEBUG_OBJECT (dec, "skip %ld bytes", num_bytes);
289 if (num_bytes > 0 && cinfo->src->bytes_in_buffer >= num_bytes) {
290 cinfo->src->next_input_byte += (size_t) num_bytes;
291 cinfo->src->bytes_in_buffer -= (size_t) num_bytes;
296 gst_jpeg_dec_resync_to_restart (j_decompress_ptr cinfo, gint desired)
298 GST_LOG_OBJECT (CINFO_GET_JPEGDEC (cinfo), "resync_to_start");
303 gst_jpeg_dec_term_source (j_decompress_ptr cinfo)
305 GST_LOG_OBJECT (CINFO_GET_JPEGDEC (cinfo), "term_source");
310 gst_jpeg_dec_my_output_message (j_common_ptr cinfo)
312 return; /* do nothing */
316 gst_jpeg_dec_my_emit_message (j_common_ptr cinfo, int msg_level)
318 /* GST_LOG_OBJECT (CINFO_GET_JPEGDEC (&cinfo), "msg_level=%d", msg_level); */
323 gst_jpeg_dec_my_error_exit (j_common_ptr cinfo)
325 struct GstJpegDecErrorMgr *err_mgr = (struct GstJpegDecErrorMgr *) cinfo->err;
327 (*cinfo->err->output_message) (cinfo);
328 longjmp (err_mgr->setjmp_buffer, 1);
332 gst_jpeg_dec_init (GstJpegDec * dec)
334 GST_DEBUG ("initializing");
337 memset (&dec->cinfo, 0, sizeof (dec->cinfo));
338 memset (&dec->jerr, 0, sizeof (dec->jerr));
339 dec->cinfo.err = jpeg_std_error (&dec->jerr.pub);
340 dec->jerr.pub.output_message = gst_jpeg_dec_my_output_message;
341 dec->jerr.pub.emit_message = gst_jpeg_dec_my_emit_message;
342 dec->jerr.pub.error_exit = gst_jpeg_dec_my_error_exit;
344 jpeg_create_decompress (&dec->cinfo);
346 dec->cinfo.src = (struct jpeg_source_mgr *) &dec->jsrc;
347 dec->cinfo.src->init_source = gst_jpeg_dec_init_source;
348 dec->cinfo.src->fill_input_buffer = gst_jpeg_dec_fill_input_buffer;
349 dec->cinfo.src->skip_input_data = gst_jpeg_dec_skip_input_data;
350 dec->cinfo.src->resync_to_restart = gst_jpeg_dec_resync_to_restart;
351 dec->cinfo.src->term_source = gst_jpeg_dec_term_source;
354 /* init properties */
355 dec->idct_method = JPEG_DEFAULT_IDCT_METHOD;
356 dec->max_errors = JPEG_DEFAULT_MAX_ERRORS;
359 static inline gboolean
360 gst_jpeg_dec_parse_tag_has_entropy_segment (guint8 tag)
362 if (tag == 0xda || (tag >= 0xd0 && tag <= 0xd7))
368 gst_jpeg_dec_parse (GstVideoDecoder * bdec, GstVideoCodecFrame * frame,
369 GstAdapter * adapter, gboolean at_eos)
374 gint offset = 0, noffset;
375 GstJpegDec *dec = (GstJpegDec *) bdec;
377 /* FIXME : The overhead of using scan_uint32 is massive */
379 size = gst_adapter_available (adapter);
380 GST_DEBUG ("Parsing jpeg image data (%u bytes)", size);
383 GST_DEBUG ("Flushing all data out");
386 /* If we have leftover data, throw it away */
387 if (!dec->saw_header)
389 goto have_full_frame;
395 if (!dec->saw_header) {
397 /* we expect at least 4 bytes, first of which start marker */
399 gst_adapter_masked_scan_uint32 (adapter, 0xffff0000, 0xffd80000, 0,
402 GST_DEBUG ("ret:%d", ret);
407 gst_adapter_flush (adapter, ret);
410 dec->saw_header = TRUE;
417 GST_DEBUG ("offset:%d, size:%d", offset, size);
420 gst_adapter_masked_scan_uint32_peek (adapter, 0x0000ff00, 0x0000ff00,
421 offset, size - offset, &value);
423 /* lost sync if 0xff marker not where expected */
424 if ((resync = (noffset != offset))) {
425 GST_DEBUG ("Lost sync at 0x%08x, resyncing", offset + 2);
427 /* may have marker, but could have been resyncng */
428 resync = resync || dec->parse_resync;
429 /* Skip over extra 0xff */
430 while ((noffset >= 0) && ((value & 0xff) == 0xff)) {
433 gst_adapter_masked_scan_uint32_peek (adapter, 0x0000ff00, 0x0000ff00,
434 noffset, size - noffset, &value);
436 /* enough bytes left for marker? (we need 0xNN after the 0xff) */
438 GST_DEBUG ("at end of input and no EOI marker found, need more data");
442 /* now lock on the marker we found */
444 value = value & 0xff;
446 GST_DEBUG ("0x%08x: EOI marker", offset + 2);
447 /* clear parse state */
448 dec->saw_header = FALSE;
449 dec->parse_resync = FALSE;
451 goto have_full_frame;
454 /* Skip this frame if we found another SOI marker */
455 GST_DEBUG ("0x%08x: SOI marker before EOI, skipping", offset + 2);
456 dec->parse_resync = FALSE;
457 /* FIXME : Need to skip data */
459 goto have_full_frame;
463 if (value >= 0xd0 && value <= 0xd7)
466 /* peek tag and subsequent length */
467 if (offset + 2 + 4 > size)
470 gst_adapter_masked_scan_uint32_peek (adapter, 0x0, 0x0, offset + 2, 4,
472 frame_len = frame_len & 0xffff;
474 GST_DEBUG ("0x%08x: tag %02x, frame_len=%u", offset + 2, value, frame_len);
475 /* the frame length includes the 2 bytes for the length; here we want at
476 * least 2 more bytes at the end for an end marker */
477 if (offset + 2 + 2 + frame_len + 2 > size) {
481 if (gst_jpeg_dec_parse_tag_has_entropy_segment (value)) {
482 guint eseglen = dec->parse_entropy_len;
484 GST_DEBUG ("0x%08x: finding entropy segment length (eseglen:%d)",
485 offset + 2, eseglen);
486 if (size < offset + 2 + frame_len + eseglen)
488 noffset = offset + 2 + frame_len + dec->parse_entropy_len;
490 GST_DEBUG ("noffset:%d, size:%d, size - noffset:%d",
491 noffset, size, size - noffset);
492 noffset = gst_adapter_masked_scan_uint32_peek (adapter, 0x0000ff00,
493 0x0000ff00, noffset, size - noffset, &value);
496 dec->parse_entropy_len = size - offset - 4 - frame_len - 2;
499 if ((value & 0xff) != 0x00) {
500 eseglen = noffset - offset - frame_len - 2;
505 dec->parse_entropy_len = 0;
506 frame_len += eseglen;
507 GST_DEBUG ("entropy segment length=%u => frame_len=%u", eseglen,
511 /* check if we will still be in sync if we interpret
512 * this as a sync point and skip this frame */
513 noffset = offset + frame_len + 2;
514 noffset = gst_adapter_masked_scan_uint32 (adapter, 0x0000ff00, 0x0000ff00,
517 /* ignore and continue resyncing until we hit the end
518 * of our data or find a sync point that looks okay */
522 GST_DEBUG ("found sync at 0x%x", offset + 2);
525 /* Add current data to output buffer */
526 toadd += frame_len + 2;
527 offset += frame_len + 2;
532 gst_video_decoder_add_to_frame (bdec, toadd);
533 return GST_VIDEO_DECODER_FLOW_NEED_DATA;
537 gst_video_decoder_add_to_frame (bdec, toadd);
538 return gst_video_decoder_have_frame (bdec);
541 gst_adapter_flush (adapter, size);
546 /* shamelessly ripped from jpegutils.c in mjpegtools */
548 add_huff_table (j_decompress_ptr dinfo,
549 JHUFF_TBL ** htblptr, const UINT8 * bits, const UINT8 * val)
550 /* Define a Huffman table */
554 if (*htblptr == NULL)
555 *htblptr = jpeg_alloc_huff_table ((j_common_ptr) dinfo);
559 /* Copy the number-of-symbols-of-each-code-length counts */
560 memcpy ((*htblptr)->bits, bits, sizeof ((*htblptr)->bits));
562 /* Validate the counts. We do this here mainly so we can copy the right
563 * number of symbols from the val[] array, without risking marching off
564 * the end of memory. jchuff.c will do a more thorough test later.
567 for (len = 1; len <= 16; len++)
568 nsymbols += bits[len];
569 if (nsymbols < 1 || nsymbols > 256)
570 g_error ("jpegutils.c: add_huff_table failed badly. ");
572 memcpy ((*htblptr)->huffval, val, nsymbols * sizeof (UINT8));
578 std_huff_tables (j_decompress_ptr dinfo)
579 /* Set up the standard Huffman tables (cf. JPEG standard section K.3) */
580 /* IMPORTANT: these are only valid for 8-bit data precision! */
582 static const UINT8 bits_dc_luminance[17] =
583 { /* 0-base */ 0, 0, 1, 5, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0 };
584 static const UINT8 val_dc_luminance[] =
585 { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11 };
587 static const UINT8 bits_dc_chrominance[17] =
588 { /* 0-base */ 0, 0, 3, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0 };
589 static const UINT8 val_dc_chrominance[] =
590 { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11 };
592 static const UINT8 bits_ac_luminance[17] =
593 { /* 0-base */ 0, 0, 2, 1, 3, 3, 2, 4, 3, 5, 5, 4, 4, 0, 0, 1, 0x7d };
594 static const UINT8 val_ac_luminance[] =
595 { 0x01, 0x02, 0x03, 0x00, 0x04, 0x11, 0x05, 0x12,
596 0x21, 0x31, 0x41, 0x06, 0x13, 0x51, 0x61, 0x07,
597 0x22, 0x71, 0x14, 0x32, 0x81, 0x91, 0xa1, 0x08,
598 0x23, 0x42, 0xb1, 0xc1, 0x15, 0x52, 0xd1, 0xf0,
599 0x24, 0x33, 0x62, 0x72, 0x82, 0x09, 0x0a, 0x16,
600 0x17, 0x18, 0x19, 0x1a, 0x25, 0x26, 0x27, 0x28,
601 0x29, 0x2a, 0x34, 0x35, 0x36, 0x37, 0x38, 0x39,
602 0x3a, 0x43, 0x44, 0x45, 0x46, 0x47, 0x48, 0x49,
603 0x4a, 0x53, 0x54, 0x55, 0x56, 0x57, 0x58, 0x59,
604 0x5a, 0x63, 0x64, 0x65, 0x66, 0x67, 0x68, 0x69,
605 0x6a, 0x73, 0x74, 0x75, 0x76, 0x77, 0x78, 0x79,
606 0x7a, 0x83, 0x84, 0x85, 0x86, 0x87, 0x88, 0x89,
607 0x8a, 0x92, 0x93, 0x94, 0x95, 0x96, 0x97, 0x98,
608 0x99, 0x9a, 0xa2, 0xa3, 0xa4, 0xa5, 0xa6, 0xa7,
609 0xa8, 0xa9, 0xaa, 0xb2, 0xb3, 0xb4, 0xb5, 0xb6,
610 0xb7, 0xb8, 0xb9, 0xba, 0xc2, 0xc3, 0xc4, 0xc5,
611 0xc6, 0xc7, 0xc8, 0xc9, 0xca, 0xd2, 0xd3, 0xd4,
612 0xd5, 0xd6, 0xd7, 0xd8, 0xd9, 0xda, 0xe1, 0xe2,
613 0xe3, 0xe4, 0xe5, 0xe6, 0xe7, 0xe8, 0xe9, 0xea,
614 0xf1, 0xf2, 0xf3, 0xf4, 0xf5, 0xf6, 0xf7, 0xf8,
618 static const UINT8 bits_ac_chrominance[17] =
619 { /* 0-base */ 0, 0, 2, 1, 2, 4, 4, 3, 4, 7, 5, 4, 4, 0, 1, 2, 0x77 };
620 static const UINT8 val_ac_chrominance[] =
621 { 0x00, 0x01, 0x02, 0x03, 0x11, 0x04, 0x05, 0x21,
622 0x31, 0x06, 0x12, 0x41, 0x51, 0x07, 0x61, 0x71,
623 0x13, 0x22, 0x32, 0x81, 0x08, 0x14, 0x42, 0x91,
624 0xa1, 0xb1, 0xc1, 0x09, 0x23, 0x33, 0x52, 0xf0,
625 0x15, 0x62, 0x72, 0xd1, 0x0a, 0x16, 0x24, 0x34,
626 0xe1, 0x25, 0xf1, 0x17, 0x18, 0x19, 0x1a, 0x26,
627 0x27, 0x28, 0x29, 0x2a, 0x35, 0x36, 0x37, 0x38,
628 0x39, 0x3a, 0x43, 0x44, 0x45, 0x46, 0x47, 0x48,
629 0x49, 0x4a, 0x53, 0x54, 0x55, 0x56, 0x57, 0x58,
630 0x59, 0x5a, 0x63, 0x64, 0x65, 0x66, 0x67, 0x68,
631 0x69, 0x6a, 0x73, 0x74, 0x75, 0x76, 0x77, 0x78,
632 0x79, 0x7a, 0x82, 0x83, 0x84, 0x85, 0x86, 0x87,
633 0x88, 0x89, 0x8a, 0x92, 0x93, 0x94, 0x95, 0x96,
634 0x97, 0x98, 0x99, 0x9a, 0xa2, 0xa3, 0xa4, 0xa5,
635 0xa6, 0xa7, 0xa8, 0xa9, 0xaa, 0xb2, 0xb3, 0xb4,
636 0xb5, 0xb6, 0xb7, 0xb8, 0xb9, 0xba, 0xc2, 0xc3,
637 0xc4, 0xc5, 0xc6, 0xc7, 0xc8, 0xc9, 0xca, 0xd2,
638 0xd3, 0xd4, 0xd5, 0xd6, 0xd7, 0xd8, 0xd9, 0xda,
639 0xe2, 0xe3, 0xe4, 0xe5, 0xe6, 0xe7, 0xe8, 0xe9,
640 0xea, 0xf2, 0xf3, 0xf4, 0xf5, 0xf6, 0xf7, 0xf8,
644 add_huff_table (dinfo, &dinfo->dc_huff_tbl_ptrs[0],
645 bits_dc_luminance, val_dc_luminance);
646 add_huff_table (dinfo, &dinfo->ac_huff_tbl_ptrs[0],
647 bits_ac_luminance, val_ac_luminance);
648 add_huff_table (dinfo, &dinfo->dc_huff_tbl_ptrs[1],
649 bits_dc_chrominance, val_dc_chrominance);
650 add_huff_table (dinfo, &dinfo->ac_huff_tbl_ptrs[1],
651 bits_ac_chrominance, val_ac_chrominance);
657 guarantee_huff_tables (j_decompress_ptr dinfo)
659 if ((dinfo->dc_huff_tbl_ptrs[0] == NULL) &&
660 (dinfo->dc_huff_tbl_ptrs[1] == NULL) &&
661 (dinfo->ac_huff_tbl_ptrs[0] == NULL) &&
662 (dinfo->ac_huff_tbl_ptrs[1] == NULL)) {
663 GST_DEBUG ("Generating standard Huffman tables for this frame.");
664 std_huff_tables (dinfo);
669 gst_jpeg_dec_set_format (GstVideoDecoder * dec, GstVideoCodecState * state)
671 GstJpegDec *jpeg = GST_JPEG_DEC (dec);
672 GstVideoInfo *info = &state->info;
674 /* FIXME : previously jpegdec would handled input as packetized
675 * if the framerate was present. Here we consider it packetized if
676 * the fps is != 1/1 */
677 if (GST_VIDEO_INFO_FPS_N (info) != 1 && GST_VIDEO_INFO_FPS_D (info) != 1)
678 gst_video_decoder_set_packetized (dec, TRUE);
680 gst_video_decoder_set_packetized (dec, FALSE);
682 if (jpeg->input_state)
683 gst_video_codec_state_unref (jpeg->input_state);
684 jpeg->input_state = gst_video_codec_state_ref (state);
692 hresamplecpy1 (guint8 * dest, const guint8 * src, guint len)
696 for (i = 0; i < len; ++i) {
697 /* equivalent to: dest[i] = src[i << 1] */
706 gst_jpeg_dec_free_buffers (GstJpegDec * dec)
710 for (i = 0; i < 16; i++) {
711 g_free (dec->idr_y[i]);
712 g_free (dec->idr_u[i]);
713 g_free (dec->idr_v[i]);
714 dec->idr_y[i] = NULL;
715 dec->idr_u[i] = NULL;
716 dec->idr_v[i] = NULL;
719 dec->idr_width_allocated = 0;
722 static inline gboolean
723 gst_jpeg_dec_ensure_buffers (GstJpegDec * dec, guint maxrowbytes)
727 if (G_LIKELY (dec->idr_width_allocated == maxrowbytes))
730 /* FIXME: maybe just alloc one or three blocks altogether? */
731 for (i = 0; i < 16; i++) {
732 dec->idr_y[i] = g_try_realloc (dec->idr_y[i], maxrowbytes);
733 dec->idr_u[i] = g_try_realloc (dec->idr_u[i], maxrowbytes);
734 dec->idr_v[i] = g_try_realloc (dec->idr_v[i], maxrowbytes);
736 if (G_UNLIKELY (!dec->idr_y[i] || !dec->idr_u[i] || !dec->idr_v[i])) {
737 GST_WARNING_OBJECT (dec, "out of memory, i=%d, bytes=%u", i, maxrowbytes);
742 dec->idr_width_allocated = maxrowbytes;
743 GST_LOG_OBJECT (dec, "allocated temp memory, %u bytes/row", maxrowbytes);
748 gst_jpeg_dec_decode_grayscale (GstJpegDec * dec, GstVideoFrame * frame)
751 guchar **scanarray[1] = { rows };
756 gint pstride, rstride;
758 GST_DEBUG_OBJECT (dec, "indirect decoding of grayscale");
760 width = GST_VIDEO_FRAME_WIDTH (frame);
761 height = GST_VIDEO_FRAME_HEIGHT (frame);
763 if (G_UNLIKELY (!gst_jpeg_dec_ensure_buffers (dec, GST_ROUND_UP_32 (width))))
766 base[0] = GST_VIDEO_FRAME_COMP_DATA (frame, 0);
767 pstride = GST_VIDEO_FRAME_COMP_PSTRIDE (frame, 0);
768 rstride = GST_VIDEO_FRAME_COMP_STRIDE (frame, 0);
770 memcpy (rows, dec->idr_y, 16 * sizeof (gpointer));
774 lines = jpeg_read_raw_data (&dec->cinfo, scanarray, DCTSIZE);
775 if (G_LIKELY (lines > 0)) {
776 for (j = 0; (j < DCTSIZE) && (i < height); j++, i++) {
780 for (k = 0; k < width; k++) {
781 base[0][p] = rows[j][k];
787 GST_INFO_OBJECT (dec, "jpeg_read_raw_data() returned 0");
793 gst_jpeg_dec_decode_rgb (GstJpegDec * dec, GstVideoFrame * frame)
795 guchar *r_rows[16], *g_rows[16], *b_rows[16];
796 guchar **scanarray[3] = { r_rows, g_rows, b_rows };
800 guint pstride, rstride;
803 GST_DEBUG_OBJECT (dec, "indirect decoding of RGB");
805 width = GST_VIDEO_FRAME_WIDTH (frame);
806 height = GST_VIDEO_FRAME_HEIGHT (frame);
808 if (G_UNLIKELY (!gst_jpeg_dec_ensure_buffers (dec, GST_ROUND_UP_32 (width))))
811 for (i = 0; i < 3; i++)
812 base[i] = GST_VIDEO_FRAME_COMP_DATA (frame, i);
814 pstride = GST_VIDEO_FRAME_COMP_PSTRIDE (frame, 0);
815 rstride = GST_VIDEO_FRAME_COMP_STRIDE (frame, 0);
817 memcpy (r_rows, dec->idr_y, 16 * sizeof (gpointer));
818 memcpy (g_rows, dec->idr_u, 16 * sizeof (gpointer));
819 memcpy (b_rows, dec->idr_v, 16 * sizeof (gpointer));
823 lines = jpeg_read_raw_data (&dec->cinfo, scanarray, DCTSIZE);
824 if (G_LIKELY (lines > 0)) {
825 for (j = 0; (j < DCTSIZE) && (i < height); j++, i++) {
829 for (k = 0; k < width; k++) {
830 base[0][p] = r_rows[j][k];
831 base[1][p] = g_rows[j][k];
832 base[2][p] = b_rows[j][k];
840 GST_INFO_OBJECT (dec, "jpeg_read_raw_data() returned 0");
846 gst_jpeg_dec_decode_indirect (GstJpegDec * dec, GstVideoFrame * frame, gint r_v,
849 guchar *y_rows[16], *u_rows[16], *v_rows[16];
850 guchar **scanarray[3] = { y_rows, u_rows, v_rows };
853 guchar *base[3], *last[3];
857 GST_DEBUG_OBJECT (dec,
858 "unadvantageous width or r_h, taking slow route involving memcpy");
860 width = GST_VIDEO_FRAME_WIDTH (frame);
861 height = GST_VIDEO_FRAME_HEIGHT (frame);
863 if (G_UNLIKELY (!gst_jpeg_dec_ensure_buffers (dec, GST_ROUND_UP_32 (width))))
866 for (i = 0; i < 3; i++) {
867 base[i] = GST_VIDEO_FRAME_COMP_DATA (frame, i);
868 stride[i] = GST_VIDEO_FRAME_COMP_STRIDE (frame, i);
869 /* make sure we don't make jpeglib write beyond our buffer,
870 * which might happen if (height % (r_v*DCTSIZE)) != 0 */
871 last[i] = base[i] + (GST_VIDEO_FRAME_COMP_STRIDE (frame, i) *
872 (GST_VIDEO_FRAME_COMP_HEIGHT (frame, i) - 1));
875 memcpy (y_rows, dec->idr_y, 16 * sizeof (gpointer));
876 memcpy (u_rows, dec->idr_u, 16 * sizeof (gpointer));
877 memcpy (v_rows, dec->idr_v, 16 * sizeof (gpointer));
879 /* fill chroma components for grayscale */
881 GST_DEBUG_OBJECT (dec, "grayscale, filling chroma");
882 for (i = 0; i < 16; i++) {
883 memset (u_rows[i], GST_ROUND_UP_32 (width), 0x80);
884 memset (v_rows[i], GST_ROUND_UP_32 (width), 0x80);
888 for (i = 0; i < height; i += r_v * DCTSIZE) {
889 lines = jpeg_read_raw_data (&dec->cinfo, scanarray, r_v * DCTSIZE);
890 if (G_LIKELY (lines > 0)) {
891 for (j = 0, k = 0; j < (r_v * DCTSIZE); j += r_v, k++) {
892 if (G_LIKELY (base[0] <= last[0])) {
893 memcpy (base[0], y_rows[j], stride[0]);
894 base[0] += stride[0];
897 if (G_LIKELY (base[0] <= last[0])) {
898 memcpy (base[0], y_rows[j + 1], stride[0]);
899 base[0] += stride[0];
902 if (G_LIKELY (base[1] <= last[1] && base[2] <= last[2])) {
904 memcpy (base[1], u_rows[k], stride[1]);
905 memcpy (base[2], v_rows[k], stride[2]);
906 } else if (r_h == 1) {
907 hresamplecpy1 (base[1], u_rows[k], stride[1]);
908 hresamplecpy1 (base[2], v_rows[k], stride[2]);
910 /* FIXME: implement (at least we avoid crashing by doing nothing) */
914 if (r_v == 2 || (k & 1) != 0) {
915 base[1] += stride[1];
916 base[2] += stride[2];
920 GST_INFO_OBJECT (dec, "jpeg_read_raw_data() returned 0");
926 gst_jpeg_dec_decode_direct (GstJpegDec * dec, GstVideoFrame * frame)
928 guchar **line[3]; /* the jpeg line buffer */
929 guchar *y[4 * DCTSIZE] = { NULL, }; /* alloc enough for the lines */
930 guchar *u[4 * DCTSIZE] = { NULL, }; /* r_v will be <4 */
931 guchar *v[4 * DCTSIZE] = { NULL, };
933 gint lines, v_samp[3];
934 guchar *base[3], *last[3];
942 v_samp[0] = dec->cinfo.comp_info[0].v_samp_factor;
943 v_samp[1] = dec->cinfo.comp_info[1].v_samp_factor;
944 v_samp[2] = dec->cinfo.comp_info[2].v_samp_factor;
946 if (G_UNLIKELY (v_samp[0] > 2 || v_samp[1] > 2 || v_samp[2] > 2))
947 goto format_not_supported;
949 height = GST_VIDEO_FRAME_HEIGHT (frame);
951 for (i = 0; i < 3; i++) {
952 base[i] = GST_VIDEO_FRAME_COMP_DATA (frame, i);
953 stride[i] = GST_VIDEO_FRAME_COMP_STRIDE (frame, i);
954 /* make sure we don't make jpeglib write beyond our buffer,
955 * which might happen if (height % (r_v*DCTSIZE)) != 0 */
956 last[i] = base[i] + (GST_VIDEO_FRAME_COMP_STRIDE (frame, i) *
957 (GST_VIDEO_FRAME_COMP_HEIGHT (frame, i) - 1));
960 /* let jpeglib decode directly into our final buffer */
961 GST_DEBUG_OBJECT (dec, "decoding directly into output buffer");
963 for (i = 0; i < height; i += v_samp[0] * DCTSIZE) {
964 for (j = 0; j < (v_samp[0] * DCTSIZE); ++j) {
966 line[0][j] = base[0] + (i + j) * stride[0];
967 if (G_UNLIKELY (line[0][j] > last[0]))
968 line[0][j] = last[0];
970 if (v_samp[1] == v_samp[0]) {
971 line[1][j] = base[1] + ((i + j) / 2) * stride[1];
972 } else if (j < (v_samp[1] * DCTSIZE)) {
973 line[1][j] = base[1] + ((i / 2) + j) * stride[1];
975 if (G_UNLIKELY (line[1][j] > last[1]))
976 line[1][j] = last[1];
978 if (v_samp[2] == v_samp[0]) {
979 line[2][j] = base[2] + ((i + j) / 2) * stride[2];
980 } else if (j < (v_samp[2] * DCTSIZE)) {
981 line[2][j] = base[2] + ((i / 2) + j) * stride[2];
983 if (G_UNLIKELY (line[2][j] > last[2]))
984 line[2][j] = last[2];
987 lines = jpeg_read_raw_data (&dec->cinfo, line, v_samp[0] * DCTSIZE);
988 if (G_UNLIKELY (!lines)) {
989 GST_INFO_OBJECT (dec, "jpeg_read_raw_data() returned 0");
994 format_not_supported:
996 gst_jpeg_dec_set_error (dec, GST_FUNCTION, __LINE__,
997 "Unsupported subsampling schema: v_samp factors: %u %u %u",
998 v_samp[0], v_samp[1], v_samp[2]);
999 return GST_FLOW_ERROR;
1004 gst_jpeg_dec_negotiate (GstJpegDec * dec, gint width, gint height, gint clrspc)
1006 GstVideoCodecState *outstate;
1008 GstVideoFormat format;
1012 format = GST_VIDEO_FORMAT_RGB;
1015 format = GST_VIDEO_FORMAT_GRAY8;
1018 format = GST_VIDEO_FORMAT_I420;
1022 /* Compare to currently configured output state */
1023 outstate = gst_video_decoder_get_output_state (GST_VIDEO_DECODER (dec));
1025 info = &outstate->info;
1027 if (width == GST_VIDEO_INFO_WIDTH (info) &&
1028 height == GST_VIDEO_INFO_HEIGHT (info) &&
1029 format == GST_VIDEO_INFO_FORMAT (info)) {
1030 gst_video_codec_state_unref (outstate);
1033 gst_video_codec_state_unref (outstate);
1037 gst_video_decoder_set_output_state (GST_VIDEO_DECODER (dec), format,
1038 width, height, dec->input_state);
1045 outstate->info.colorimetry.range = GST_VIDEO_COLOR_RANGE_0_255;
1046 outstate->info.colorimetry.matrix = GST_VIDEO_COLOR_MATRIX_BT601;
1047 outstate->info.colorimetry.transfer = GST_VIDEO_TRANSFER_UNKNOWN;
1048 outstate->info.colorimetry.primaries = GST_VIDEO_COLOR_PRIMARIES_UNKNOWN;
1052 gst_video_codec_state_unref (outstate);
1054 gst_video_decoder_negotiate (GST_VIDEO_DECODER (dec));
1056 GST_DEBUG_OBJECT (dec, "max_v_samp_factor=%d", dec->cinfo.max_v_samp_factor);
1057 GST_DEBUG_OBJECT (dec, "max_h_samp_factor=%d", dec->cinfo.max_h_samp_factor);
1060 static GstFlowReturn
1061 gst_jpeg_dec_handle_frame (GstVideoDecoder * bdec, GstVideoCodecFrame * frame)
1063 GstFlowReturn ret = GST_FLOW_OK;
1064 GstJpegDec *dec = (GstJpegDec *) bdec;
1065 GstVideoFrame vframe;
1069 gboolean need_unmap = TRUE;
1070 GstVideoCodecState *state = NULL;
1072 dec->current_frame = frame;
1073 gst_buffer_map (frame->input_buffer, &dec->current_frame_map, GST_MAP_READ);
1074 gst_jpeg_dec_fill_input_buffer (&dec->cinfo);
1076 if (setjmp (dec->jerr.setjmp_buffer)) {
1077 code = dec->jerr.pub.msg_code;
1079 if (code == JERR_INPUT_EOF) {
1080 GST_DEBUG ("jpeg input EOF error, we probably need more data");
1081 goto need_more_data;
1087 hdr_ok = jpeg_read_header (&dec->cinfo, TRUE);
1088 if (G_UNLIKELY (hdr_ok != JPEG_HEADER_OK)) {
1089 GST_WARNING_OBJECT (dec, "reading the header failed, %d", hdr_ok);
1092 GST_LOG_OBJECT (dec, "num_components=%d", dec->cinfo.num_components);
1093 GST_LOG_OBJECT (dec, "jpeg_color_space=%d", dec->cinfo.jpeg_color_space);
1095 if (!dec->cinfo.num_components || !dec->cinfo.comp_info)
1096 goto components_not_supported;
1098 r_h = dec->cinfo.comp_info[0].h_samp_factor;
1099 r_v = dec->cinfo.comp_info[0].v_samp_factor;
1101 GST_LOG_OBJECT (dec, "r_h = %d, r_v = %d", r_h, r_v);
1103 if (dec->cinfo.num_components > 3)
1104 goto components_not_supported;
1106 /* verify color space expectation to avoid going *boom* or bogus output */
1107 if (dec->cinfo.jpeg_color_space != JCS_YCbCr &&
1108 dec->cinfo.jpeg_color_space != JCS_GRAYSCALE &&
1109 dec->cinfo.jpeg_color_space != JCS_RGB)
1110 goto unsupported_colorspace;
1112 #ifndef GST_DISABLE_GST_DEBUG
1116 for (i = 0; i < dec->cinfo.num_components; ++i) {
1117 GST_LOG_OBJECT (dec, "[%d] h_samp_factor=%d, v_samp_factor=%d, cid=%d",
1118 i, dec->cinfo.comp_info[i].h_samp_factor,
1119 dec->cinfo.comp_info[i].v_samp_factor,
1120 dec->cinfo.comp_info[i].component_id);
1125 /* prepare for raw output */
1126 dec->cinfo.do_fancy_upsampling = FALSE;
1127 dec->cinfo.do_block_smoothing = FALSE;
1128 dec->cinfo.out_color_space = dec->cinfo.jpeg_color_space;
1129 dec->cinfo.dct_method = dec->idct_method;
1130 dec->cinfo.raw_data_out = TRUE;
1132 GST_LOG_OBJECT (dec, "starting decompress");
1133 guarantee_huff_tables (&dec->cinfo);
1134 if (!jpeg_start_decompress (&dec->cinfo)) {
1135 GST_WARNING_OBJECT (dec, "failed to start decompression cycle");
1138 /* sanity checks to get safe and reasonable output */
1139 switch (dec->cinfo.jpeg_color_space) {
1141 if (dec->cinfo.num_components != 1)
1142 goto invalid_yuvrgbgrayscale;
1145 if (dec->cinfo.num_components != 3 || dec->cinfo.max_v_samp_factor > 1 ||
1146 dec->cinfo.max_h_samp_factor > 1)
1147 goto invalid_yuvrgbgrayscale;
1150 if (dec->cinfo.num_components != 3 ||
1151 r_v > 2 || r_v < dec->cinfo.comp_info[0].v_samp_factor ||
1152 r_v < dec->cinfo.comp_info[1].v_samp_factor ||
1153 r_h < dec->cinfo.comp_info[0].h_samp_factor ||
1154 r_h < dec->cinfo.comp_info[1].h_samp_factor)
1155 goto invalid_yuvrgbgrayscale;
1158 g_assert_not_reached ();
1162 width = dec->cinfo.output_width;
1163 height = dec->cinfo.output_height;
1165 if (G_UNLIKELY (width < MIN_WIDTH || width > MAX_WIDTH ||
1166 height < MIN_HEIGHT || height > MAX_HEIGHT))
1169 gst_jpeg_dec_negotiate (dec, width, height, dec->cinfo.jpeg_color_space);
1171 state = gst_video_decoder_get_output_state (bdec);
1172 ret = gst_video_decoder_allocate_output_frame (bdec, frame);
1173 if (G_UNLIKELY (ret != GST_FLOW_OK))
1176 if (!gst_video_frame_map (&vframe, &state->info, frame->output_buffer,
1180 GST_LOG_OBJECT (dec, "width %d, height %d", width, height);
1182 if (dec->cinfo.jpeg_color_space == JCS_RGB) {
1183 gst_jpeg_dec_decode_rgb (dec, &vframe);
1184 } else if (dec->cinfo.jpeg_color_space == JCS_GRAYSCALE) {
1185 gst_jpeg_dec_decode_grayscale (dec, &vframe);
1187 GST_LOG_OBJECT (dec, "decompressing (reqired scanline buffer height = %u)",
1188 dec->cinfo.rec_outbuf_height);
1190 /* For some widths jpeglib requires more horizontal padding than I420
1191 * provides. In those cases we need to decode into separate buffers and then
1192 * copy over the data into our final picture buffer, otherwise jpeglib might
1193 * write over the end of a line into the beginning of the next line,
1194 * resulting in blocky artifacts on the left side of the picture. */
1195 if (G_UNLIKELY (width % (dec->cinfo.max_h_samp_factor * DCTSIZE) != 0
1196 || dec->cinfo.comp_info[0].h_samp_factor != 2
1197 || dec->cinfo.comp_info[1].h_samp_factor != 1
1198 || dec->cinfo.comp_info[2].h_samp_factor != 1)) {
1199 GST_CAT_LOG_OBJECT (GST_CAT_PERFORMANCE, dec,
1200 "indirect decoding using extra buffer copy");
1201 gst_jpeg_dec_decode_indirect (dec, &vframe, r_v, r_h,
1202 dec->cinfo.num_components);
1204 ret = gst_jpeg_dec_decode_direct (dec, &vframe);
1206 if (G_UNLIKELY (ret != GST_FLOW_OK))
1207 goto decode_direct_failed;
1211 gst_video_frame_unmap (&vframe);
1213 GST_LOG_OBJECT (dec, "decompressing finished");
1214 jpeg_finish_decompress (&dec->cinfo);
1216 /* reset error count on successful decode */
1217 dec->error_count = 0;
1219 gst_buffer_unmap (frame->input_buffer, &dec->current_frame_map);
1220 ret = gst_video_decoder_finish_frame (bdec, frame);
1227 if (G_UNLIKELY (ret == GST_FLOW_ERROR)) {
1228 jpeg_abort_decompress (&dec->cinfo);
1229 ret = gst_jpeg_dec_post_error_or_warning (dec);
1233 gst_buffer_unmap (frame->input_buffer, &dec->current_frame_map);
1236 gst_video_codec_state_unref (state);
1243 GST_LOG_OBJECT (dec, "we need more data");
1250 gst_jpeg_dec_set_error (dec, GST_FUNCTION, __LINE__,
1251 "Picture is too small or too big (%ux%u)", width, height);
1252 ret = GST_FLOW_ERROR;
1257 gchar err_msg[JMSG_LENGTH_MAX];
1259 dec->jerr.pub.format_message ((j_common_ptr) (&dec->cinfo), err_msg);
1261 gst_jpeg_dec_set_error (dec, GST_FUNCTION, __LINE__,
1262 "Decode error #%u: %s", code, err_msg);
1264 gst_buffer_unmap (frame->input_buffer, &dec->current_frame_map);
1265 gst_video_decoder_drop_frame (bdec, frame);
1268 ret = GST_FLOW_ERROR;
1271 decode_direct_failed:
1273 /* already posted an error message */
1274 jpeg_abort_decompress (&dec->cinfo);
1279 const gchar *reason;
1281 reason = gst_flow_get_name (ret);
1283 GST_DEBUG_OBJECT (dec, "failed to alloc buffer, reason %s", reason);
1284 /* Reset for next time */
1285 jpeg_abort_decompress (&dec->cinfo);
1286 if (ret != GST_FLOW_EOS && ret != GST_FLOW_FLUSHING &&
1287 ret != GST_FLOW_NOT_LINKED) {
1288 gst_jpeg_dec_set_error (dec, GST_FUNCTION, __LINE__,
1289 "Buffer allocation failed, reason: %s", reason);
1293 components_not_supported:
1295 gst_jpeg_dec_set_error (dec, GST_FUNCTION, __LINE__,
1296 "number of components not supported: %d (max 3)",
1297 dec->cinfo.num_components);
1298 ret = GST_FLOW_ERROR;
1301 unsupported_colorspace:
1303 gst_jpeg_dec_set_error (dec, GST_FUNCTION, __LINE__,
1304 "Picture has unknown or unsupported colourspace");
1305 ret = GST_FLOW_ERROR;
1308 invalid_yuvrgbgrayscale:
1310 gst_jpeg_dec_set_error (dec, GST_FUNCTION, __LINE__,
1311 "Picture is corrupt or unhandled YUV/RGB/grayscale layout");
1312 ret = GST_FLOW_ERROR;
1318 gst_jpeg_dec_decide_allocation (GstVideoDecoder * bdec, GstQuery * query)
1320 GstBufferPool *pool;
1321 GstStructure *config;
1323 if (!GST_VIDEO_DECODER_CLASS (parent_class)->decide_allocation (bdec, query))
1326 g_assert (gst_query_get_n_allocation_pools (query) > 0);
1327 gst_query_parse_nth_allocation_pool (query, 0, &pool, NULL, NULL, NULL);
1328 g_assert (pool != NULL);
1330 config = gst_buffer_pool_get_config (pool);
1331 if (gst_query_find_allocation_meta (query, GST_VIDEO_META_API_TYPE, NULL)) {
1332 gst_buffer_pool_config_add_option (config,
1333 GST_BUFFER_POOL_OPTION_VIDEO_META);
1335 gst_buffer_pool_set_config (pool, config);
1336 gst_object_unref (pool);
1342 gst_jpeg_dec_reset (GstVideoDecoder * bdec, gboolean hard)
1344 GstJpegDec *dec = (GstJpegDec *) bdec;
1346 jpeg_abort_decompress (&dec->cinfo);
1347 dec->parse_entropy_len = 0;
1348 dec->parse_resync = FALSE;
1349 dec->saw_header = FALSE;
1355 gst_jpeg_dec_set_property (GObject * object, guint prop_id,
1356 const GValue * value, GParamSpec * pspec)
1360 dec = GST_JPEG_DEC (object);
1363 case PROP_IDCT_METHOD:
1364 dec->idct_method = g_value_get_enum (value);
1366 case PROP_MAX_ERRORS:
1367 g_atomic_int_set (&dec->max_errors, g_value_get_int (value));
1371 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
1377 gst_jpeg_dec_get_property (GObject * object, guint prop_id, GValue * value,
1382 dec = GST_JPEG_DEC (object);
1385 case PROP_IDCT_METHOD:
1386 g_value_set_enum (value, dec->idct_method);
1388 case PROP_MAX_ERRORS:
1389 g_value_set_int (value, g_atomic_int_get (&dec->max_errors));
1393 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
1399 gst_jpeg_dec_start (GstVideoDecoder * bdec)
1401 GstJpegDec *dec = (GstJpegDec *) bdec;
1403 dec->error_count = 0;
1404 dec->parse_entropy_len = 0;
1405 dec->parse_resync = FALSE;
1411 gst_jpeg_dec_stop (GstVideoDecoder * bdec)
1413 GstJpegDec *dec = (GstJpegDec *) bdec;
1415 gst_jpeg_dec_free_buffers (dec);