2 * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
3 * Copyright (C) <2009> Tim-Philipp Müller <tim centricular net>
4 * Copyright (C) 2012 Collabora Ltd.
5 * Author : Edward Hervey <edward@collabora.com>
7 * This library is free software; you can redistribute it and/or
8 * modify it under the terms of the GNU Library General Public
9 * License as published by the Free Software Foundation; either
10 * version 2 of the License, or (at your option) any later version.
12 * This library is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 * Library General Public License for more details.
17 * You should have received a copy of the GNU Library General Public
18 * License along with this library; if not, write to the
19 * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
20 * Boston, MA 02110-1301, USA.
24 * SECTION:element-jpegdec
26 * Decodes jpeg images.
29 * <title>Example launch line</title>
31 * gst-launch-1.0 -v filesrc location=mjpeg.avi ! avidemux ! queue ! jpegdec ! videoconvert ! videoscale ! autovideosink
32 * ]| The above pipeline decode the mjpeg stream and renders it to the screen.
41 #include "gstjpegdec.h"
43 #include <gst/video/video.h>
44 #include <gst/video/gstvideometa.h>
45 #include <gst/video/gstvideopool.h>
46 #include "gst/gst-i18n-plugin.h"
50 #define MAX_WIDTH 65535
52 #define MAX_HEIGHT 65535
54 #define CINFO_GET_JPEGDEC(cinfo_ptr) \
55 (((struct GstJpegDecSourceMgr*)((cinfo_ptr)->src))->dec)
57 #define JPEG_DEFAULT_IDCT_METHOD JDCT_FASTEST
58 #define JPEG_DEFAULT_MAX_ERRORS 0
68 static GstStaticPadTemplate gst_jpeg_dec_src_pad_template =
69 GST_STATIC_PAD_TEMPLATE ("src",
72 GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE
73 ("{ I420, RGB, BGR, RGBx, xRGB, BGRx, xBGR, GRAY8 }"))
77 /* FIXME: sof-marker is for IJG libjpeg 8, should be different for 6.2 */
78 /* FIXME: add back "sof-marker = (int) { 0, 1, 2, 5, 6, 7, 9, 10, 13, 14 }"
79 * once we have a parser and/or demuxer set caps properly */
80 static GstStaticPadTemplate gst_jpeg_dec_sink_pad_template =
81 GST_STATIC_PAD_TEMPLATE ("sink",
84 GST_STATIC_CAPS ("image/jpeg, "
85 "width = (int) [ " G_STRINGIFY (MIN_WIDTH) ", " G_STRINGIFY (MAX_WIDTH)
86 " ], " "height = (int) [ " G_STRINGIFY (MIN_HEIGHT) ", "
87 G_STRINGIFY (MAX_HEIGHT) " ] ")
90 GST_DEBUG_CATEGORY_STATIC (jpeg_dec_debug);
91 #define GST_CAT_DEFAULT jpeg_dec_debug
92 GST_DEBUG_CATEGORY_STATIC (GST_CAT_PERFORMANCE);
94 static void gst_jpeg_dec_set_property (GObject * object, guint prop_id,
95 const GValue * value, GParamSpec * pspec);
96 static void gst_jpeg_dec_get_property (GObject * object, guint prop_id,
97 GValue * value, GParamSpec * pspec);
99 static gboolean gst_jpeg_dec_set_format (GstVideoDecoder * dec,
100 GstVideoCodecState * state);
101 static gboolean gst_jpeg_dec_stop (GstVideoDecoder * bdec);
102 static gboolean gst_jpeg_dec_reset (GstVideoDecoder * bdec, gboolean hard);
103 static GstFlowReturn gst_jpeg_dec_parse (GstVideoDecoder * bdec,
104 GstVideoCodecFrame * frame, GstAdapter * adapter, gboolean at_eos);
105 static GstFlowReturn gst_jpeg_dec_handle_frame (GstVideoDecoder * bdec,
106 GstVideoCodecFrame * frame);
107 static gboolean gst_jpeg_dec_decide_allocation (GstVideoDecoder * bdec,
110 #define gst_jpeg_dec_parent_class parent_class
111 G_DEFINE_TYPE (GstJpegDec, gst_jpeg_dec, GST_TYPE_VIDEO_DECODER);
114 gst_jpeg_dec_finalize (GObject * object)
116 GstJpegDec *dec = GST_JPEG_DEC (object);
118 jpeg_destroy_decompress (&dec->cinfo);
119 if (dec->input_state)
120 gst_video_codec_state_unref (dec->input_state);
122 G_OBJECT_CLASS (parent_class)->finalize (object);
126 gst_jpeg_dec_class_init (GstJpegDecClass * klass)
128 GObjectClass *gobject_class;
129 GstElementClass *element_class;
130 GstVideoDecoderClass *vdec_class;
132 gobject_class = (GObjectClass *) klass;
133 element_class = (GstElementClass *) klass;
134 vdec_class = (GstVideoDecoderClass *) klass;
136 parent_class = g_type_class_peek_parent (klass);
138 gobject_class->finalize = gst_jpeg_dec_finalize;
139 gobject_class->set_property = gst_jpeg_dec_set_property;
140 gobject_class->get_property = gst_jpeg_dec_get_property;
142 g_object_class_install_property (gobject_class, PROP_IDCT_METHOD,
143 g_param_spec_enum ("idct-method", "IDCT Method",
144 "The IDCT algorithm to use", GST_TYPE_IDCT_METHOD,
145 JPEG_DEFAULT_IDCT_METHOD,
146 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
149 * GstJpegDec:max-errors
151 * Error out after receiving N consecutive decoding errors
152 * (-1 = never error out, 0 = automatic, 1 = fail on first error, etc.)
156 g_object_class_install_property (gobject_class, PROP_MAX_ERRORS,
157 g_param_spec_int ("max-errors", "Maximum Consecutive Decoding Errors",
158 "Error out after receiving N consecutive decoding errors "
159 "(-1 = never fail, 0 = automatic, 1 = fail on first error)",
160 -1, G_MAXINT, JPEG_DEFAULT_MAX_ERRORS,
161 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
163 gst_element_class_add_pad_template (element_class,
164 gst_static_pad_template_get (&gst_jpeg_dec_src_pad_template));
165 gst_element_class_add_pad_template (element_class,
166 gst_static_pad_template_get (&gst_jpeg_dec_sink_pad_template));
167 gst_element_class_set_static_metadata (element_class, "JPEG image decoder",
168 "Codec/Decoder/Image",
169 "Decode images from JPEG format", "Wim Taymans <wim@fluendo.com>");
171 vdec_class->stop = gst_jpeg_dec_stop;
172 vdec_class->reset = gst_jpeg_dec_reset;
173 vdec_class->parse = gst_jpeg_dec_parse;
174 vdec_class->set_format = gst_jpeg_dec_set_format;
175 vdec_class->handle_frame = gst_jpeg_dec_handle_frame;
176 vdec_class->decide_allocation = gst_jpeg_dec_decide_allocation;
178 GST_DEBUG_CATEGORY_INIT (jpeg_dec_debug, "jpegdec", 0, "JPEG decoder");
179 GST_DEBUG_CATEGORY_GET (GST_CAT_PERFORMANCE, "GST_PERFORMANCE");
183 gst_jpeg_dec_clear_error (GstJpegDec * dec)
185 g_free (dec->error_msg);
186 dec->error_msg = NULL;
188 dec->error_func = NULL;
192 gst_jpeg_dec_set_error_va (GstJpegDec * dec, const gchar * func, gint line,
193 const gchar * debug_msg_format, va_list args)
195 #ifndef GST_DISABLE_GST_DEBUG
196 gst_debug_log_valist (GST_CAT_DEFAULT, GST_LEVEL_WARNING, __FILE__, func,
197 line, (GObject *) dec, debug_msg_format, args);
200 g_free (dec->error_msg);
201 if (debug_msg_format)
202 dec->error_msg = g_strdup_vprintf (debug_msg_format, args);
204 dec->error_msg = NULL;
206 dec->error_line = line;
207 dec->error_func = func;
211 gst_jpeg_dec_set_error (GstJpegDec * dec, const gchar * func, gint line,
212 const gchar * debug_msg_format, ...)
216 va_start (va, debug_msg_format);
217 gst_jpeg_dec_set_error_va (dec, func, line, debug_msg_format, va);
222 gst_jpeg_dec_post_error_or_warning (GstJpegDec * dec)
228 max_errors = g_atomic_int_get (&dec->max_errors);
230 if (max_errors < 0) {
232 } else if (max_errors == 0) {
233 /* FIXME: do something more clever in "automatic mode" */
234 if (gst_video_decoder_get_packetized (GST_VIDEO_DECODER (dec))) {
235 ret = (dec->error_count < 3) ? GST_FLOW_OK : GST_FLOW_ERROR;
237 ret = GST_FLOW_ERROR;
240 ret = (dec->error_count < max_errors) ? GST_FLOW_OK : GST_FLOW_ERROR;
243 GST_INFO_OBJECT (dec, "decoding error %d/%d (%s)", dec->error_count,
244 max_errors, (ret == GST_FLOW_OK) ? "ignoring error" : "erroring out");
246 gst_element_message_full (GST_ELEMENT (dec),
247 (ret == GST_FLOW_OK) ? GST_MESSAGE_WARNING : GST_MESSAGE_ERROR,
248 GST_STREAM_ERROR, GST_STREAM_ERROR_DECODE,
249 g_strdup (_("Failed to decode JPEG image")), dec->error_msg,
250 __FILE__, dec->error_func, dec->error_line);
252 dec->error_msg = NULL;
253 gst_jpeg_dec_clear_error (dec);
258 gst_jpeg_dec_fill_input_buffer (j_decompress_ptr cinfo)
262 dec = CINFO_GET_JPEGDEC (cinfo);
263 g_return_val_if_fail (dec != NULL, FALSE);
264 g_return_val_if_fail (dec->current_frame != NULL, FALSE);
265 g_return_val_if_fail (dec->current_frame_map.data != NULL, FALSE);
267 cinfo->src->next_input_byte = dec->current_frame_map.data;
268 cinfo->src->bytes_in_buffer = dec->current_frame_map.size;
274 gst_jpeg_dec_init_source (j_decompress_ptr cinfo)
276 GST_LOG_OBJECT (CINFO_GET_JPEGDEC (cinfo), "init_source");
281 gst_jpeg_dec_skip_input_data (j_decompress_ptr cinfo, glong num_bytes)
283 GstJpegDec *dec = CINFO_GET_JPEGDEC (cinfo);
285 GST_DEBUG_OBJECT (dec, "skip %ld bytes", num_bytes);
287 if (num_bytes > 0 && cinfo->src->bytes_in_buffer >= num_bytes) {
288 cinfo->src->next_input_byte += (size_t) num_bytes;
289 cinfo->src->bytes_in_buffer -= (size_t) num_bytes;
294 gst_jpeg_dec_resync_to_restart (j_decompress_ptr cinfo, gint desired)
296 GST_LOG_OBJECT (CINFO_GET_JPEGDEC (cinfo), "resync_to_start");
301 gst_jpeg_dec_term_source (j_decompress_ptr cinfo)
303 GST_LOG_OBJECT (CINFO_GET_JPEGDEC (cinfo), "term_source");
308 gst_jpeg_dec_my_output_message (j_common_ptr cinfo)
310 return; /* do nothing */
314 gst_jpeg_dec_my_emit_message (j_common_ptr cinfo, int msg_level)
316 /* GST_LOG_OBJECT (CINFO_GET_JPEGDEC (&cinfo), "msg_level=%d", msg_level); */
321 gst_jpeg_dec_my_error_exit (j_common_ptr cinfo)
323 struct GstJpegDecErrorMgr *err_mgr = (struct GstJpegDecErrorMgr *) cinfo->err;
325 (*cinfo->err->output_message) (cinfo);
326 longjmp (err_mgr->setjmp_buffer, 1);
330 gst_jpeg_dec_init (GstJpegDec * dec)
332 GST_DEBUG ("initializing");
335 memset (&dec->cinfo, 0, sizeof (dec->cinfo));
336 memset (&dec->jerr, 0, sizeof (dec->jerr));
337 dec->cinfo.err = jpeg_std_error (&dec->jerr.pub);
338 dec->jerr.pub.output_message = gst_jpeg_dec_my_output_message;
339 dec->jerr.pub.emit_message = gst_jpeg_dec_my_emit_message;
340 dec->jerr.pub.error_exit = gst_jpeg_dec_my_error_exit;
342 jpeg_create_decompress (&dec->cinfo);
344 dec->cinfo.src = (struct jpeg_source_mgr *) &dec->jsrc;
345 dec->cinfo.src->init_source = gst_jpeg_dec_init_source;
346 dec->cinfo.src->fill_input_buffer = gst_jpeg_dec_fill_input_buffer;
347 dec->cinfo.src->skip_input_data = gst_jpeg_dec_skip_input_data;
348 dec->cinfo.src->resync_to_restart = gst_jpeg_dec_resync_to_restart;
349 dec->cinfo.src->term_source = gst_jpeg_dec_term_source;
352 /* init properties */
353 dec->idct_method = JPEG_DEFAULT_IDCT_METHOD;
354 dec->max_errors = JPEG_DEFAULT_MAX_ERRORS;
357 static inline gboolean
358 gst_jpeg_dec_parse_tag_has_entropy_segment (guint8 tag)
360 if (tag == 0xda || (tag >= 0xd0 && tag <= 0xd7))
366 gst_jpeg_dec_parse (GstVideoDecoder * bdec, GstVideoCodecFrame * frame,
367 GstAdapter * adapter, gboolean at_eos)
372 gint offset = 0, noffset;
373 GstJpegDec *dec = (GstJpegDec *) bdec;
375 /* FIXME : The overhead of using scan_uint32 is massive */
377 size = gst_adapter_available (adapter);
378 GST_DEBUG ("Parsing jpeg image data (%u bytes)", size);
381 GST_DEBUG ("Flushing all data out");
384 /* If we have leftover data, throw it away */
385 if (!dec->saw_header)
387 goto have_full_frame;
393 if (!dec->saw_header) {
395 /* we expect at least 4 bytes, first of which start marker */
397 gst_adapter_masked_scan_uint32 (adapter, 0xffff0000, 0xffd80000, 0,
400 GST_DEBUG ("ret:%d", ret);
405 gst_adapter_flush (adapter, ret);
408 dec->saw_header = TRUE;
415 GST_DEBUG ("offset:%d, size:%d", offset, size);
418 gst_adapter_masked_scan_uint32_peek (adapter, 0x0000ff00, 0x0000ff00,
419 offset, size - offset, &value);
421 /* lost sync if 0xff marker not where expected */
422 if ((resync = (noffset != offset))) {
423 GST_DEBUG ("Lost sync at 0x%08x, resyncing", offset + 2);
425 /* may have marker, but could have been resyncng */
426 resync = resync || dec->parse_resync;
427 /* Skip over extra 0xff */
428 while ((noffset >= 0) && ((value & 0xff) == 0xff)) {
431 gst_adapter_masked_scan_uint32_peek (adapter, 0x0000ff00, 0x0000ff00,
432 noffset, size - noffset, &value);
434 /* enough bytes left for marker? (we need 0xNN after the 0xff) */
436 GST_DEBUG ("at end of input and no EOI marker found, need more data");
440 /* now lock on the marker we found */
442 value = value & 0xff;
444 GST_DEBUG ("0x%08x: EOI marker", offset + 2);
445 /* clear parse state */
446 dec->saw_header = FALSE;
447 dec->parse_resync = FALSE;
449 goto have_full_frame;
452 /* Skip this frame if we found another SOI marker */
453 GST_DEBUG ("0x%08x: SOI marker before EOI, skipping", offset + 2);
454 dec->parse_resync = FALSE;
455 /* FIXME : Need to skip data */
457 goto have_full_frame;
461 if (value >= 0xd0 && value <= 0xd7)
464 /* peek tag and subsequent length */
465 if (offset + 2 + 4 > size)
468 gst_adapter_masked_scan_uint32_peek (adapter, 0x0, 0x0, offset + 2, 4,
470 frame_len = frame_len & 0xffff;
472 GST_DEBUG ("0x%08x: tag %02x, frame_len=%u", offset + 2, value, frame_len);
473 /* the frame length includes the 2 bytes for the length; here we want at
474 * least 2 more bytes at the end for an end marker */
475 if (offset + 2 + 2 + frame_len + 2 > size) {
479 if (gst_jpeg_dec_parse_tag_has_entropy_segment (value)) {
480 guint eseglen = dec->parse_entropy_len;
482 GST_DEBUG ("0x%08x: finding entropy segment length (eseglen:%d)",
483 offset + 2, eseglen);
484 if (size < offset + 2 + frame_len + eseglen)
486 noffset = offset + 2 + frame_len + dec->parse_entropy_len;
488 GST_DEBUG ("noffset:%d, size:%d, size - noffset:%d",
489 noffset, size, size - noffset);
490 noffset = gst_adapter_masked_scan_uint32_peek (adapter, 0x0000ff00,
491 0x0000ff00, noffset, size - noffset, &value);
494 dec->parse_entropy_len = size - offset - 4 - frame_len - 2;
497 if ((value & 0xff) != 0x00) {
498 eseglen = noffset - offset - frame_len - 2;
503 dec->parse_entropy_len = 0;
504 frame_len += eseglen;
505 GST_DEBUG ("entropy segment length=%u => frame_len=%u", eseglen,
509 /* check if we will still be in sync if we interpret
510 * this as a sync point and skip this frame */
511 noffset = offset + frame_len + 2;
512 noffset = gst_adapter_masked_scan_uint32 (adapter, 0x0000ff00, 0x0000ff00,
515 /* ignore and continue resyncing until we hit the end
516 * of our data or find a sync point that looks okay */
520 GST_DEBUG ("found sync at 0x%x", offset + 2);
523 /* Add current data to output buffer */
524 toadd += frame_len + 2;
525 offset += frame_len + 2;
530 gst_video_decoder_add_to_frame (bdec, toadd);
531 return GST_VIDEO_DECODER_FLOW_NEED_DATA;
535 gst_video_decoder_add_to_frame (bdec, toadd);
536 return gst_video_decoder_have_frame (bdec);
539 gst_adapter_flush (adapter, size);
544 /* shamelessly ripped from jpegutils.c in mjpegtools */
546 add_huff_table (j_decompress_ptr dinfo,
547 JHUFF_TBL ** htblptr, const UINT8 * bits, const UINT8 * val)
548 /* Define a Huffman table */
552 if (*htblptr == NULL)
553 *htblptr = jpeg_alloc_huff_table ((j_common_ptr) dinfo);
557 /* Copy the number-of-symbols-of-each-code-length counts */
558 memcpy ((*htblptr)->bits, bits, sizeof ((*htblptr)->bits));
560 /* Validate the counts. We do this here mainly so we can copy the right
561 * number of symbols from the val[] array, without risking marching off
562 * the end of memory. jchuff.c will do a more thorough test later.
565 for (len = 1; len <= 16; len++)
566 nsymbols += bits[len];
567 if (nsymbols < 1 || nsymbols > 256)
568 g_error ("jpegutils.c: add_huff_table failed badly. ");
570 memcpy ((*htblptr)->huffval, val, nsymbols * sizeof (UINT8));
576 std_huff_tables (j_decompress_ptr dinfo)
577 /* Set up the standard Huffman tables (cf. JPEG standard section K.3) */
578 /* IMPORTANT: these are only valid for 8-bit data precision! */
580 static const UINT8 bits_dc_luminance[17] =
581 { /* 0-base */ 0, 0, 1, 5, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0 };
582 static const UINT8 val_dc_luminance[] =
583 { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11 };
585 static const UINT8 bits_dc_chrominance[17] =
586 { /* 0-base */ 0, 0, 3, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0 };
587 static const UINT8 val_dc_chrominance[] =
588 { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11 };
590 static const UINT8 bits_ac_luminance[17] =
591 { /* 0-base */ 0, 0, 2, 1, 3, 3, 2, 4, 3, 5, 5, 4, 4, 0, 0, 1, 0x7d };
592 static const UINT8 val_ac_luminance[] =
593 { 0x01, 0x02, 0x03, 0x00, 0x04, 0x11, 0x05, 0x12,
594 0x21, 0x31, 0x41, 0x06, 0x13, 0x51, 0x61, 0x07,
595 0x22, 0x71, 0x14, 0x32, 0x81, 0x91, 0xa1, 0x08,
596 0x23, 0x42, 0xb1, 0xc1, 0x15, 0x52, 0xd1, 0xf0,
597 0x24, 0x33, 0x62, 0x72, 0x82, 0x09, 0x0a, 0x16,
598 0x17, 0x18, 0x19, 0x1a, 0x25, 0x26, 0x27, 0x28,
599 0x29, 0x2a, 0x34, 0x35, 0x36, 0x37, 0x38, 0x39,
600 0x3a, 0x43, 0x44, 0x45, 0x46, 0x47, 0x48, 0x49,
601 0x4a, 0x53, 0x54, 0x55, 0x56, 0x57, 0x58, 0x59,
602 0x5a, 0x63, 0x64, 0x65, 0x66, 0x67, 0x68, 0x69,
603 0x6a, 0x73, 0x74, 0x75, 0x76, 0x77, 0x78, 0x79,
604 0x7a, 0x83, 0x84, 0x85, 0x86, 0x87, 0x88, 0x89,
605 0x8a, 0x92, 0x93, 0x94, 0x95, 0x96, 0x97, 0x98,
606 0x99, 0x9a, 0xa2, 0xa3, 0xa4, 0xa5, 0xa6, 0xa7,
607 0xa8, 0xa9, 0xaa, 0xb2, 0xb3, 0xb4, 0xb5, 0xb6,
608 0xb7, 0xb8, 0xb9, 0xba, 0xc2, 0xc3, 0xc4, 0xc5,
609 0xc6, 0xc7, 0xc8, 0xc9, 0xca, 0xd2, 0xd3, 0xd4,
610 0xd5, 0xd6, 0xd7, 0xd8, 0xd9, 0xda, 0xe1, 0xe2,
611 0xe3, 0xe4, 0xe5, 0xe6, 0xe7, 0xe8, 0xe9, 0xea,
612 0xf1, 0xf2, 0xf3, 0xf4, 0xf5, 0xf6, 0xf7, 0xf8,
616 static const UINT8 bits_ac_chrominance[17] =
617 { /* 0-base */ 0, 0, 2, 1, 2, 4, 4, 3, 4, 7, 5, 4, 4, 0, 1, 2, 0x77 };
618 static const UINT8 val_ac_chrominance[] =
619 { 0x00, 0x01, 0x02, 0x03, 0x11, 0x04, 0x05, 0x21,
620 0x31, 0x06, 0x12, 0x41, 0x51, 0x07, 0x61, 0x71,
621 0x13, 0x22, 0x32, 0x81, 0x08, 0x14, 0x42, 0x91,
622 0xa1, 0xb1, 0xc1, 0x09, 0x23, 0x33, 0x52, 0xf0,
623 0x15, 0x62, 0x72, 0xd1, 0x0a, 0x16, 0x24, 0x34,
624 0xe1, 0x25, 0xf1, 0x17, 0x18, 0x19, 0x1a, 0x26,
625 0x27, 0x28, 0x29, 0x2a, 0x35, 0x36, 0x37, 0x38,
626 0x39, 0x3a, 0x43, 0x44, 0x45, 0x46, 0x47, 0x48,
627 0x49, 0x4a, 0x53, 0x54, 0x55, 0x56, 0x57, 0x58,
628 0x59, 0x5a, 0x63, 0x64, 0x65, 0x66, 0x67, 0x68,
629 0x69, 0x6a, 0x73, 0x74, 0x75, 0x76, 0x77, 0x78,
630 0x79, 0x7a, 0x82, 0x83, 0x84, 0x85, 0x86, 0x87,
631 0x88, 0x89, 0x8a, 0x92, 0x93, 0x94, 0x95, 0x96,
632 0x97, 0x98, 0x99, 0x9a, 0xa2, 0xa3, 0xa4, 0xa5,
633 0xa6, 0xa7, 0xa8, 0xa9, 0xaa, 0xb2, 0xb3, 0xb4,
634 0xb5, 0xb6, 0xb7, 0xb8, 0xb9, 0xba, 0xc2, 0xc3,
635 0xc4, 0xc5, 0xc6, 0xc7, 0xc8, 0xc9, 0xca, 0xd2,
636 0xd3, 0xd4, 0xd5, 0xd6, 0xd7, 0xd8, 0xd9, 0xda,
637 0xe2, 0xe3, 0xe4, 0xe5, 0xe6, 0xe7, 0xe8, 0xe9,
638 0xea, 0xf2, 0xf3, 0xf4, 0xf5, 0xf6, 0xf7, 0xf8,
642 add_huff_table (dinfo, &dinfo->dc_huff_tbl_ptrs[0],
643 bits_dc_luminance, val_dc_luminance);
644 add_huff_table (dinfo, &dinfo->ac_huff_tbl_ptrs[0],
645 bits_ac_luminance, val_ac_luminance);
646 add_huff_table (dinfo, &dinfo->dc_huff_tbl_ptrs[1],
647 bits_dc_chrominance, val_dc_chrominance);
648 add_huff_table (dinfo, &dinfo->ac_huff_tbl_ptrs[1],
649 bits_ac_chrominance, val_ac_chrominance);
655 guarantee_huff_tables (j_decompress_ptr dinfo)
657 if ((dinfo->dc_huff_tbl_ptrs[0] == NULL) &&
658 (dinfo->dc_huff_tbl_ptrs[1] == NULL) &&
659 (dinfo->ac_huff_tbl_ptrs[0] == NULL) &&
660 (dinfo->ac_huff_tbl_ptrs[1] == NULL)) {
661 GST_DEBUG ("Generating standard Huffman tables for this frame.");
662 std_huff_tables (dinfo);
667 gst_jpeg_dec_set_format (GstVideoDecoder * dec, GstVideoCodecState * state)
669 GstJpegDec *jpeg = GST_JPEG_DEC (dec);
670 GstVideoInfo *info = &state->info;
672 /* FIXME : previously jpegdec would handled input as packetized
673 * if the framerate was present. Here we consider it packetized if
674 * the fps is != 1/1 */
675 if (GST_VIDEO_INFO_FPS_N (info) != 1 && GST_VIDEO_INFO_FPS_D (info) != 1)
676 gst_video_decoder_set_packetized (dec, TRUE);
678 gst_video_decoder_set_packetized (dec, FALSE);
680 if (jpeg->input_state)
681 gst_video_codec_state_unref (jpeg->input_state);
682 jpeg->input_state = gst_video_codec_state_ref (state);
690 hresamplecpy1 (guint8 * dest, const guint8 * src, guint len)
694 for (i = 0; i < len; ++i) {
695 /* equivalent to: dest[i] = src[i << 1] */
704 gst_jpeg_dec_free_buffers (GstJpegDec * dec)
708 for (i = 0; i < 16; i++) {
709 g_free (dec->idr_y[i]);
710 g_free (dec->idr_u[i]);
711 g_free (dec->idr_v[i]);
712 dec->idr_y[i] = NULL;
713 dec->idr_u[i] = NULL;
714 dec->idr_v[i] = NULL;
717 dec->idr_width_allocated = 0;
720 static inline gboolean
721 gst_jpeg_dec_ensure_buffers (GstJpegDec * dec, guint maxrowbytes)
725 if (G_LIKELY (dec->idr_width_allocated == maxrowbytes))
728 /* FIXME: maybe just alloc one or three blocks altogether? */
729 for (i = 0; i < 16; i++) {
730 dec->idr_y[i] = g_try_realloc (dec->idr_y[i], maxrowbytes);
731 dec->idr_u[i] = g_try_realloc (dec->idr_u[i], maxrowbytes);
732 dec->idr_v[i] = g_try_realloc (dec->idr_v[i], maxrowbytes);
734 if (G_UNLIKELY (!dec->idr_y[i] || !dec->idr_u[i] || !dec->idr_v[i])) {
735 GST_WARNING_OBJECT (dec, "out of memory, i=%d, bytes=%u", i, maxrowbytes);
740 dec->idr_width_allocated = maxrowbytes;
741 GST_LOG_OBJECT (dec, "allocated temp memory, %u bytes/row", maxrowbytes);
746 gst_jpeg_dec_decode_grayscale (GstJpegDec * dec, GstVideoFrame * frame)
749 guchar **scanarray[1] = { rows };
754 gint pstride, rstride;
756 GST_DEBUG_OBJECT (dec, "indirect decoding of grayscale");
758 width = GST_VIDEO_FRAME_WIDTH (frame);
759 height = GST_VIDEO_FRAME_HEIGHT (frame);
761 if (G_UNLIKELY (!gst_jpeg_dec_ensure_buffers (dec, GST_ROUND_UP_32 (width))))
764 base[0] = GST_VIDEO_FRAME_COMP_DATA (frame, 0);
765 pstride = GST_VIDEO_FRAME_COMP_PSTRIDE (frame, 0);
766 rstride = GST_VIDEO_FRAME_COMP_STRIDE (frame, 0);
768 memcpy (rows, dec->idr_y, 16 * sizeof (gpointer));
772 lines = jpeg_read_raw_data (&dec->cinfo, scanarray, DCTSIZE);
773 if (G_LIKELY (lines > 0)) {
774 for (j = 0; (j < DCTSIZE) && (i < height); j++, i++) {
778 for (k = 0; k < width; k++) {
779 base[0][p] = rows[j][k];
785 GST_INFO_OBJECT (dec, "jpeg_read_raw_data() returned 0");
791 gst_jpeg_dec_decode_rgb (GstJpegDec * dec, GstVideoFrame * frame)
793 guchar *r_rows[16], *g_rows[16], *b_rows[16];
794 guchar **scanarray[3] = { r_rows, g_rows, b_rows };
798 guint pstride, rstride;
801 GST_DEBUG_OBJECT (dec, "indirect decoding of RGB");
803 width = GST_VIDEO_FRAME_WIDTH (frame);
804 height = GST_VIDEO_FRAME_HEIGHT (frame);
806 if (G_UNLIKELY (!gst_jpeg_dec_ensure_buffers (dec, GST_ROUND_UP_32 (width))))
809 for (i = 0; i < 3; i++)
810 base[i] = GST_VIDEO_FRAME_COMP_DATA (frame, i);
812 pstride = GST_VIDEO_FRAME_COMP_PSTRIDE (frame, 0);
813 rstride = GST_VIDEO_FRAME_COMP_STRIDE (frame, 0);
815 memcpy (r_rows, dec->idr_y, 16 * sizeof (gpointer));
816 memcpy (g_rows, dec->idr_u, 16 * sizeof (gpointer));
817 memcpy (b_rows, dec->idr_v, 16 * sizeof (gpointer));
821 lines = jpeg_read_raw_data (&dec->cinfo, scanarray, DCTSIZE);
822 if (G_LIKELY (lines > 0)) {
823 for (j = 0; (j < DCTSIZE) && (i < height); j++, i++) {
827 for (k = 0; k < width; k++) {
828 base[0][p] = r_rows[j][k];
829 base[1][p] = g_rows[j][k];
830 base[2][p] = b_rows[j][k];
838 GST_INFO_OBJECT (dec, "jpeg_read_raw_data() returned 0");
844 gst_jpeg_dec_decode_indirect (GstJpegDec * dec, GstVideoFrame * frame, gint r_v,
847 guchar *y_rows[16], *u_rows[16], *v_rows[16];
848 guchar **scanarray[3] = { y_rows, u_rows, v_rows };
851 guchar *base[3], *last[3];
855 GST_DEBUG_OBJECT (dec,
856 "unadvantageous width or r_h, taking slow route involving memcpy");
858 width = GST_VIDEO_FRAME_WIDTH (frame);
859 height = GST_VIDEO_FRAME_HEIGHT (frame);
861 if (G_UNLIKELY (!gst_jpeg_dec_ensure_buffers (dec, GST_ROUND_UP_32 (width))))
864 for (i = 0; i < 3; i++) {
865 base[i] = GST_VIDEO_FRAME_COMP_DATA (frame, i);
866 stride[i] = GST_VIDEO_FRAME_COMP_STRIDE (frame, i);
867 /* make sure we don't make jpeglib write beyond our buffer,
868 * which might happen if (height % (r_v*DCTSIZE)) != 0 */
869 last[i] = base[i] + (GST_VIDEO_FRAME_COMP_STRIDE (frame, i) *
870 (GST_VIDEO_FRAME_COMP_HEIGHT (frame, i) - 1));
873 memcpy (y_rows, dec->idr_y, 16 * sizeof (gpointer));
874 memcpy (u_rows, dec->idr_u, 16 * sizeof (gpointer));
875 memcpy (v_rows, dec->idr_v, 16 * sizeof (gpointer));
877 /* fill chroma components for grayscale */
879 GST_DEBUG_OBJECT (dec, "grayscale, filling chroma");
880 for (i = 0; i < 16; i++) {
881 memset (u_rows[i], GST_ROUND_UP_32 (width), 0x80);
882 memset (v_rows[i], GST_ROUND_UP_32 (width), 0x80);
886 for (i = 0; i < height; i += r_v * DCTSIZE) {
887 lines = jpeg_read_raw_data (&dec->cinfo, scanarray, r_v * DCTSIZE);
888 if (G_LIKELY (lines > 0)) {
889 for (j = 0, k = 0; j < (r_v * DCTSIZE); j += r_v, k++) {
890 if (G_LIKELY (base[0] <= last[0])) {
891 memcpy (base[0], y_rows[j], stride[0]);
892 base[0] += stride[0];
895 if (G_LIKELY (base[0] <= last[0])) {
896 memcpy (base[0], y_rows[j + 1], stride[0]);
897 base[0] += stride[0];
900 if (G_LIKELY (base[1] <= last[1] && base[2] <= last[2])) {
902 memcpy (base[1], u_rows[k], stride[1]);
903 memcpy (base[2], v_rows[k], stride[2]);
904 } else if (r_h == 1) {
905 hresamplecpy1 (base[1], u_rows[k], stride[1]);
906 hresamplecpy1 (base[2], v_rows[k], stride[2]);
908 /* FIXME: implement (at least we avoid crashing by doing nothing) */
912 if (r_v == 2 || (k & 1) != 0) {
913 base[1] += stride[1];
914 base[2] += stride[2];
918 GST_INFO_OBJECT (dec, "jpeg_read_raw_data() returned 0");
924 gst_jpeg_dec_decode_direct (GstJpegDec * dec, GstVideoFrame * frame)
926 guchar **line[3]; /* the jpeg line buffer */
927 guchar *y[4 * DCTSIZE] = { NULL, }; /* alloc enough for the lines */
928 guchar *u[4 * DCTSIZE] = { NULL, }; /* r_v will be <4 */
929 guchar *v[4 * DCTSIZE] = { NULL, };
931 gint lines, v_samp[3];
932 guchar *base[3], *last[3];
940 v_samp[0] = dec->cinfo.comp_info[0].v_samp_factor;
941 v_samp[1] = dec->cinfo.comp_info[1].v_samp_factor;
942 v_samp[2] = dec->cinfo.comp_info[2].v_samp_factor;
944 if (G_UNLIKELY (v_samp[0] > 2 || v_samp[1] > 2 || v_samp[2] > 2))
945 goto format_not_supported;
947 height = GST_VIDEO_FRAME_HEIGHT (frame);
949 for (i = 0; i < 3; i++) {
950 base[i] = GST_VIDEO_FRAME_COMP_DATA (frame, i);
951 stride[i] = GST_VIDEO_FRAME_COMP_STRIDE (frame, i);
952 /* make sure we don't make jpeglib write beyond our buffer,
953 * which might happen if (height % (r_v*DCTSIZE)) != 0 */
954 last[i] = base[i] + (GST_VIDEO_FRAME_COMP_STRIDE (frame, i) *
955 (GST_VIDEO_FRAME_COMP_HEIGHT (frame, i) - 1));
958 /* let jpeglib decode directly into our final buffer */
959 GST_DEBUG_OBJECT (dec, "decoding directly into output buffer");
961 for (i = 0; i < height; i += v_samp[0] * DCTSIZE) {
962 for (j = 0; j < (v_samp[0] * DCTSIZE); ++j) {
964 line[0][j] = base[0] + (i + j) * stride[0];
965 if (G_UNLIKELY (line[0][j] > last[0]))
966 line[0][j] = last[0];
968 if (v_samp[1] == v_samp[0]) {
969 line[1][j] = base[1] + ((i + j) / 2) * stride[1];
970 } else if (j < (v_samp[1] * DCTSIZE)) {
971 line[1][j] = base[1] + ((i / 2) + j) * stride[1];
973 if (G_UNLIKELY (line[1][j] > last[1]))
974 line[1][j] = last[1];
976 if (v_samp[2] == v_samp[0]) {
977 line[2][j] = base[2] + ((i + j) / 2) * stride[2];
978 } else if (j < (v_samp[2] * DCTSIZE)) {
979 line[2][j] = base[2] + ((i / 2) + j) * stride[2];
981 if (G_UNLIKELY (line[2][j] > last[2]))
982 line[2][j] = last[2];
985 lines = jpeg_read_raw_data (&dec->cinfo, line, v_samp[0] * DCTSIZE);
986 if (G_UNLIKELY (!lines)) {
987 GST_INFO_OBJECT (dec, "jpeg_read_raw_data() returned 0");
992 format_not_supported:
994 gst_jpeg_dec_set_error (dec, GST_FUNCTION, __LINE__,
995 "Unsupported subsampling schema: v_samp factors: %u %u %u",
996 v_samp[0], v_samp[1], v_samp[2]);
997 return GST_FLOW_ERROR;
1002 gst_jpeg_dec_negotiate (GstJpegDec * dec, gint width, gint height, gint clrspc)
1004 GstVideoCodecState *outstate;
1006 GstVideoFormat format;
1010 format = GST_VIDEO_FORMAT_RGB;
1013 format = GST_VIDEO_FORMAT_GRAY8;
1016 format = GST_VIDEO_FORMAT_I420;
1020 /* Compare to currently configured output state */
1021 outstate = gst_video_decoder_get_output_state (GST_VIDEO_DECODER (dec));
1023 info = &outstate->info;
1025 if (width == GST_VIDEO_INFO_WIDTH (info) &&
1026 height == GST_VIDEO_INFO_HEIGHT (info) &&
1027 format == GST_VIDEO_INFO_FORMAT (info)) {
1028 gst_video_codec_state_unref (outstate);
1031 gst_video_codec_state_unref (outstate);
1035 gst_video_decoder_set_output_state (GST_VIDEO_DECODER (dec), format,
1036 width, height, dec->input_state);
1043 outstate->info.colorimetry.range = GST_VIDEO_COLOR_RANGE_0_255;
1044 outstate->info.colorimetry.matrix = GST_VIDEO_COLOR_MATRIX_BT601;
1045 outstate->info.colorimetry.transfer = GST_VIDEO_TRANSFER_UNKNOWN;
1046 outstate->info.colorimetry.primaries = GST_VIDEO_COLOR_PRIMARIES_UNKNOWN;
1050 gst_video_codec_state_unref (outstate);
1052 gst_video_decoder_negotiate (GST_VIDEO_DECODER (dec));
1054 GST_DEBUG_OBJECT (dec, "max_v_samp_factor=%d", dec->cinfo.max_v_samp_factor);
1055 GST_DEBUG_OBJECT (dec, "max_h_samp_factor=%d", dec->cinfo.max_h_samp_factor);
1058 static GstFlowReturn
1059 gst_jpeg_dec_handle_frame (GstVideoDecoder * bdec, GstVideoCodecFrame * frame)
1061 GstFlowReturn ret = GST_FLOW_OK;
1062 GstJpegDec *dec = (GstJpegDec *) bdec;
1063 GstVideoFrame vframe;
1067 gboolean need_unmap = TRUE;
1068 GstVideoCodecState *state = NULL;
1070 dec->current_frame = frame;
1071 gst_buffer_map (frame->input_buffer, &dec->current_frame_map, GST_MAP_READ);
1072 gst_jpeg_dec_fill_input_buffer (&dec->cinfo);
1074 if (setjmp (dec->jerr.setjmp_buffer)) {
1075 code = dec->jerr.pub.msg_code;
1077 if (code == JERR_INPUT_EOF) {
1078 GST_DEBUG ("jpeg input EOF error, we probably need more data");
1079 goto need_more_data;
1085 hdr_ok = jpeg_read_header (&dec->cinfo, TRUE);
1086 if (G_UNLIKELY (hdr_ok != JPEG_HEADER_OK)) {
1087 GST_WARNING_OBJECT (dec, "reading the header failed, %d", hdr_ok);
1090 GST_LOG_OBJECT (dec, "num_components=%d", dec->cinfo.num_components);
1091 GST_LOG_OBJECT (dec, "jpeg_color_space=%d", dec->cinfo.jpeg_color_space);
1093 if (!dec->cinfo.num_components || !dec->cinfo.comp_info)
1094 goto components_not_supported;
1096 r_h = dec->cinfo.comp_info[0].h_samp_factor;
1097 r_v = dec->cinfo.comp_info[0].v_samp_factor;
1099 GST_LOG_OBJECT (dec, "r_h = %d, r_v = %d", r_h, r_v);
1101 if (dec->cinfo.num_components > 3)
1102 goto components_not_supported;
1104 /* verify color space expectation to avoid going *boom* or bogus output */
1105 if (dec->cinfo.jpeg_color_space != JCS_YCbCr &&
1106 dec->cinfo.jpeg_color_space != JCS_GRAYSCALE &&
1107 dec->cinfo.jpeg_color_space != JCS_RGB)
1108 goto unsupported_colorspace;
1110 #ifndef GST_DISABLE_GST_DEBUG
1114 for (i = 0; i < dec->cinfo.num_components; ++i) {
1115 GST_LOG_OBJECT (dec, "[%d] h_samp_factor=%d, v_samp_factor=%d, cid=%d",
1116 i, dec->cinfo.comp_info[i].h_samp_factor,
1117 dec->cinfo.comp_info[i].v_samp_factor,
1118 dec->cinfo.comp_info[i].component_id);
1123 /* prepare for raw output */
1124 dec->cinfo.do_fancy_upsampling = FALSE;
1125 dec->cinfo.do_block_smoothing = FALSE;
1126 dec->cinfo.out_color_space = dec->cinfo.jpeg_color_space;
1127 dec->cinfo.dct_method = dec->idct_method;
1128 dec->cinfo.raw_data_out = TRUE;
1130 GST_LOG_OBJECT (dec, "starting decompress");
1131 guarantee_huff_tables (&dec->cinfo);
1132 if (!jpeg_start_decompress (&dec->cinfo)) {
1133 GST_WARNING_OBJECT (dec, "failed to start decompression cycle");
1136 /* sanity checks to get safe and reasonable output */
1137 switch (dec->cinfo.jpeg_color_space) {
1139 if (dec->cinfo.num_components != 1)
1140 goto invalid_yuvrgbgrayscale;
1143 if (dec->cinfo.num_components != 3 || dec->cinfo.max_v_samp_factor > 1 ||
1144 dec->cinfo.max_h_samp_factor > 1)
1145 goto invalid_yuvrgbgrayscale;
1148 if (dec->cinfo.num_components != 3 ||
1149 r_v > 2 || r_v < dec->cinfo.comp_info[0].v_samp_factor ||
1150 r_v < dec->cinfo.comp_info[1].v_samp_factor ||
1151 r_h < dec->cinfo.comp_info[0].h_samp_factor ||
1152 r_h < dec->cinfo.comp_info[1].h_samp_factor)
1153 goto invalid_yuvrgbgrayscale;
1156 g_assert_not_reached ();
1160 width = dec->cinfo.output_width;
1161 height = dec->cinfo.output_height;
1163 if (G_UNLIKELY (width < MIN_WIDTH || width > MAX_WIDTH ||
1164 height < MIN_HEIGHT || height > MAX_HEIGHT))
1167 gst_jpeg_dec_negotiate (dec, width, height, dec->cinfo.jpeg_color_space);
1169 state = gst_video_decoder_get_output_state (bdec);
1170 ret = gst_video_decoder_allocate_output_frame (bdec, frame);
1171 if (G_UNLIKELY (ret != GST_FLOW_OK))
1174 if (!gst_video_frame_map (&vframe, &state->info, frame->output_buffer,
1178 GST_LOG_OBJECT (dec, "width %d, height %d", width, height);
1180 if (dec->cinfo.jpeg_color_space == JCS_RGB) {
1181 gst_jpeg_dec_decode_rgb (dec, &vframe);
1182 } else if (dec->cinfo.jpeg_color_space == JCS_GRAYSCALE) {
1183 gst_jpeg_dec_decode_grayscale (dec, &vframe);
1185 GST_LOG_OBJECT (dec, "decompressing (reqired scanline buffer height = %u)",
1186 dec->cinfo.rec_outbuf_height);
1188 /* For some widths jpeglib requires more horizontal padding than I420
1189 * provides. In those cases we need to decode into separate buffers and then
1190 * copy over the data into our final picture buffer, otherwise jpeglib might
1191 * write over the end of a line into the beginning of the next line,
1192 * resulting in blocky artifacts on the left side of the picture. */
1193 if (G_UNLIKELY (width % (dec->cinfo.max_h_samp_factor * DCTSIZE) != 0
1194 || dec->cinfo.comp_info[0].h_samp_factor != 2
1195 || dec->cinfo.comp_info[1].h_samp_factor != 1
1196 || dec->cinfo.comp_info[2].h_samp_factor != 1)) {
1197 GST_CAT_LOG_OBJECT (GST_CAT_PERFORMANCE, dec,
1198 "indirect decoding using extra buffer copy");
1199 gst_jpeg_dec_decode_indirect (dec, &vframe, r_v, r_h,
1200 dec->cinfo.num_components);
1202 ret = gst_jpeg_dec_decode_direct (dec, &vframe);
1204 if (G_UNLIKELY (ret != GST_FLOW_OK))
1205 goto decode_direct_failed;
1209 gst_video_frame_unmap (&vframe);
1211 GST_LOG_OBJECT (dec, "decompressing finished");
1212 jpeg_finish_decompress (&dec->cinfo);
1214 /* reset error count on successful decode */
1215 dec->error_count = 0;
1217 gst_buffer_unmap (frame->input_buffer, &dec->current_frame_map);
1218 ret = gst_video_decoder_finish_frame (bdec, frame);
1225 if (G_UNLIKELY (ret == GST_FLOW_ERROR)) {
1226 jpeg_abort_decompress (&dec->cinfo);
1227 ret = gst_jpeg_dec_post_error_or_warning (dec);
1231 gst_buffer_unmap (frame->input_buffer, &dec->current_frame_map);
1234 gst_video_codec_state_unref (state);
1241 GST_LOG_OBJECT (dec, "we need more data");
1248 gst_jpeg_dec_set_error (dec, GST_FUNCTION, __LINE__,
1249 "Picture is too small or too big (%ux%u)", width, height);
1250 ret = GST_FLOW_ERROR;
1255 gchar err_msg[JMSG_LENGTH_MAX];
1257 dec->jerr.pub.format_message ((j_common_ptr) (&dec->cinfo), err_msg);
1259 gst_jpeg_dec_set_error (dec, GST_FUNCTION, __LINE__,
1260 "Decode error #%u: %s", code, err_msg);
1262 gst_buffer_unmap (frame->input_buffer, &dec->current_frame_map);
1263 gst_video_decoder_drop_frame (bdec, frame);
1266 ret = GST_FLOW_ERROR;
1269 decode_direct_failed:
1271 /* already posted an error message */
1272 jpeg_abort_decompress (&dec->cinfo);
1277 const gchar *reason;
1279 reason = gst_flow_get_name (ret);
1281 GST_DEBUG_OBJECT (dec, "failed to alloc buffer, reason %s", reason);
1282 /* Reset for next time */
1283 jpeg_abort_decompress (&dec->cinfo);
1284 if (ret != GST_FLOW_EOS && ret != GST_FLOW_FLUSHING &&
1285 ret != GST_FLOW_NOT_LINKED) {
1286 gst_jpeg_dec_set_error (dec, GST_FUNCTION, __LINE__,
1287 "Buffer allocation failed, reason: %s", reason);
1291 components_not_supported:
1293 gst_jpeg_dec_set_error (dec, GST_FUNCTION, __LINE__,
1294 "number of components not supported: %d (max 3)",
1295 dec->cinfo.num_components);
1296 ret = GST_FLOW_ERROR;
1299 unsupported_colorspace:
1301 gst_jpeg_dec_set_error (dec, GST_FUNCTION, __LINE__,
1302 "Picture has unknown or unsupported colourspace");
1303 ret = GST_FLOW_ERROR;
1306 invalid_yuvrgbgrayscale:
1308 gst_jpeg_dec_set_error (dec, GST_FUNCTION, __LINE__,
1309 "Picture is corrupt or unhandled YUV/RGB/grayscale layout");
1310 ret = GST_FLOW_ERROR;
1316 gst_jpeg_dec_decide_allocation (GstVideoDecoder * bdec, GstQuery * query)
1318 GstBufferPool *pool = NULL;
1319 GstStructure *config;
1321 if (!GST_VIDEO_DECODER_CLASS (parent_class)->decide_allocation (bdec, query))
1324 if (gst_query_get_n_allocation_pools (query) > 0)
1325 gst_query_parse_nth_allocation_pool (query, 0, &pool, NULL, NULL, NULL);
1330 config = gst_buffer_pool_get_config (pool);
1331 if (gst_query_find_allocation_meta (query, GST_VIDEO_META_API_TYPE, NULL)) {
1332 gst_buffer_pool_config_add_option (config,
1333 GST_BUFFER_POOL_OPTION_VIDEO_META);
1335 gst_buffer_pool_set_config (pool, config);
1336 gst_object_unref (pool);
1342 gst_jpeg_dec_reset (GstVideoDecoder * bdec, gboolean hard)
1344 GstJpegDec *dec = (GstJpegDec *) bdec;
1346 jpeg_abort_decompress (&dec->cinfo);
1347 dec->parse_entropy_len = 0;
1348 dec->parse_resync = FALSE;
1349 dec->saw_header = FALSE;
1352 dec->parse_entropy_len = 0;
1353 dec->parse_resync = FALSE;
1355 gst_video_decoder_set_packetized (bdec, FALSE);
1362 gst_jpeg_dec_set_property (GObject * object, guint prop_id,
1363 const GValue * value, GParamSpec * pspec)
1367 dec = GST_JPEG_DEC (object);
1370 case PROP_IDCT_METHOD:
1371 dec->idct_method = g_value_get_enum (value);
1373 case PROP_MAX_ERRORS:
1374 g_atomic_int_set (&dec->max_errors, g_value_get_int (value));
1378 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
1384 gst_jpeg_dec_get_property (GObject * object, guint prop_id, GValue * value,
1389 dec = GST_JPEG_DEC (object);
1392 case PROP_IDCT_METHOD:
1393 g_value_set_enum (value, dec->idct_method);
1395 case PROP_MAX_ERRORS:
1396 g_value_set_int (value, g_atomic_int_get (&dec->max_errors));
1400 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
1406 gst_jpeg_dec_stop (GstVideoDecoder * bdec)
1408 GstJpegDec *dec = (GstJpegDec *) bdec;
1410 gst_jpeg_dec_free_buffers (dec);