2 * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
3 * Copyright (C) <2009> Tim-Philipp Müller <tim centricular net>
4 * Copyright (C) 2012 Collabora Ltd.
5 * Author : Edward Hervey <edward@collabora.com>
7 * This library is free software; you can redistribute it and/or
8 * modify it under the terms of the GNU Library General Public
9 * License as published by the Free Software Foundation; either
10 * version 2 of the License, or (at your option) any later version.
12 * This library is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 * Library General Public License for more details.
17 * You should have received a copy of the GNU Library General Public
18 * License along with this library; if not, write to the
19 * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
20 * Boston, MA 02110-1301, USA.
24 * SECTION:element-jpegdec
26 * Decodes jpeg images.
29 * <title>Example launch line</title>
31 * gst-launch-1.0 -v filesrc location=mjpeg.avi ! avidemux ! queue ! jpegdec ! videoconvert ! videoscale ! autovideosink
32 * ]| The above pipeline decode the mjpeg stream and renders it to the screen.
41 #include "gstjpegdec.h"
43 #include <gst/video/video.h>
44 #include <gst/video/gstvideometa.h>
45 #include <gst/video/gstvideopool.h>
46 #include "gst/gst-i18n-plugin.h"
50 #define MAX_WIDTH 65535
52 #define MAX_HEIGHT 65535
54 #define CINFO_GET_JPEGDEC(cinfo_ptr) \
55 (((struct GstJpegDecSourceMgr*)((cinfo_ptr)->src))->dec)
57 #define JPEG_DEFAULT_IDCT_METHOD JDCT_FASTEST
58 #define JPEG_DEFAULT_MAX_ERRORS 0
68 static GstStaticPadTemplate gst_jpeg_dec_src_pad_template =
69 GST_STATIC_PAD_TEMPLATE ("src",
72 GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE
73 ("{ I420, RGB, BGR, RGBx, xRGB, BGRx, xBGR, GRAY8 }"))
77 /* FIXME: sof-marker is for IJG libjpeg 8, should be different for 6.2 */
78 /* FIXME: add back "sof-marker = (int) { 0, 1, 2, 5, 6, 7, 9, 10, 13, 14 }"
79 * once we have a parser and/or demuxer set caps properly */
80 static GstStaticPadTemplate gst_jpeg_dec_sink_pad_template =
81 GST_STATIC_PAD_TEMPLATE ("sink",
84 GST_STATIC_CAPS ("image/jpeg")
87 GST_DEBUG_CATEGORY_STATIC (jpeg_dec_debug);
88 #define GST_CAT_DEFAULT jpeg_dec_debug
89 GST_DEBUG_CATEGORY_STATIC (GST_CAT_PERFORMANCE);
91 static void gst_jpeg_dec_set_property (GObject * object, guint prop_id,
92 const GValue * value, GParamSpec * pspec);
93 static void gst_jpeg_dec_get_property (GObject * object, guint prop_id,
94 GValue * value, GParamSpec * pspec);
96 static gboolean gst_jpeg_dec_set_format (GstVideoDecoder * dec,
97 GstVideoCodecState * state);
98 static gboolean gst_jpeg_dec_start (GstVideoDecoder * bdec);
99 static gboolean gst_jpeg_dec_stop (GstVideoDecoder * bdec);
100 static gboolean gst_jpeg_dec_flush (GstVideoDecoder * bdec);
101 static GstFlowReturn gst_jpeg_dec_parse (GstVideoDecoder * bdec,
102 GstVideoCodecFrame * frame, GstAdapter * adapter, gboolean at_eos);
103 static GstFlowReturn gst_jpeg_dec_handle_frame (GstVideoDecoder * bdec,
104 GstVideoCodecFrame * frame);
105 static gboolean gst_jpeg_dec_decide_allocation (GstVideoDecoder * bdec,
108 #define gst_jpeg_dec_parent_class parent_class
109 G_DEFINE_TYPE (GstJpegDec, gst_jpeg_dec, GST_TYPE_VIDEO_DECODER);
112 gst_jpeg_dec_finalize (GObject * object)
114 GstJpegDec *dec = GST_JPEG_DEC (object);
116 jpeg_destroy_decompress (&dec->cinfo);
117 if (dec->input_state)
118 gst_video_codec_state_unref (dec->input_state);
120 G_OBJECT_CLASS (parent_class)->finalize (object);
124 gst_jpeg_dec_class_init (GstJpegDecClass * klass)
126 GObjectClass *gobject_class;
127 GstElementClass *element_class;
128 GstVideoDecoderClass *vdec_class;
130 gobject_class = (GObjectClass *) klass;
131 element_class = (GstElementClass *) klass;
132 vdec_class = (GstVideoDecoderClass *) klass;
134 parent_class = g_type_class_peek_parent (klass);
136 gobject_class->finalize = gst_jpeg_dec_finalize;
137 gobject_class->set_property = gst_jpeg_dec_set_property;
138 gobject_class->get_property = gst_jpeg_dec_get_property;
140 g_object_class_install_property (gobject_class, PROP_IDCT_METHOD,
141 g_param_spec_enum ("idct-method", "IDCT Method",
142 "The IDCT algorithm to use", GST_TYPE_IDCT_METHOD,
143 JPEG_DEFAULT_IDCT_METHOD,
144 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
147 * GstJpegDec:max-errors:
149 * Error out after receiving N consecutive decoding errors
150 * (-1 = never error out, 0 = automatic, 1 = fail on first error, etc.)
152 * Deprecated: 1.3.1: Property wasn't used internally
154 #ifndef GST_REMOVE_DEPRECATED
155 g_object_class_install_property (gobject_class, PROP_MAX_ERRORS,
156 g_param_spec_int ("max-errors", "Maximum Consecutive Decoding Errors",
157 "(Deprecated) Error out after receiving N consecutive decoding errors"
158 " (-1 = never fail, 0 = automatic, 1 = fail on first error)",
159 -1, G_MAXINT, JPEG_DEFAULT_MAX_ERRORS,
160 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | G_PARAM_DEPRECATED));
163 gst_element_class_add_pad_template (element_class,
164 gst_static_pad_template_get (&gst_jpeg_dec_src_pad_template));
165 gst_element_class_add_pad_template (element_class,
166 gst_static_pad_template_get (&gst_jpeg_dec_sink_pad_template));
167 gst_element_class_set_static_metadata (element_class, "JPEG image decoder",
168 "Codec/Decoder/Image",
169 "Decode images from JPEG format", "Wim Taymans <wim@fluendo.com>");
171 vdec_class->start = gst_jpeg_dec_start;
172 vdec_class->stop = gst_jpeg_dec_stop;
173 vdec_class->flush = gst_jpeg_dec_flush;
174 vdec_class->parse = gst_jpeg_dec_parse;
175 vdec_class->set_format = gst_jpeg_dec_set_format;
176 vdec_class->handle_frame = gst_jpeg_dec_handle_frame;
177 vdec_class->decide_allocation = gst_jpeg_dec_decide_allocation;
179 GST_DEBUG_CATEGORY_INIT (jpeg_dec_debug, "jpegdec", 0, "JPEG decoder");
180 GST_DEBUG_CATEGORY_GET (GST_CAT_PERFORMANCE, "GST_PERFORMANCE");
184 gst_jpeg_dec_fill_input_buffer (j_decompress_ptr cinfo)
188 dec = CINFO_GET_JPEGDEC (cinfo);
189 g_return_val_if_fail (dec != NULL, FALSE);
190 g_return_val_if_fail (dec->current_frame != NULL, FALSE);
191 g_return_val_if_fail (dec->current_frame_map.data != NULL, FALSE);
193 cinfo->src->next_input_byte = dec->current_frame_map.data;
194 cinfo->src->bytes_in_buffer = dec->current_frame_map.size;
200 gst_jpeg_dec_init_source (j_decompress_ptr cinfo)
202 GST_LOG_OBJECT (CINFO_GET_JPEGDEC (cinfo), "init_source");
207 gst_jpeg_dec_skip_input_data (j_decompress_ptr cinfo, glong num_bytes)
209 GstJpegDec *dec = CINFO_GET_JPEGDEC (cinfo);
211 GST_DEBUG_OBJECT (dec, "skip %ld bytes", num_bytes);
213 if (num_bytes > 0 && cinfo->src->bytes_in_buffer >= num_bytes) {
214 cinfo->src->next_input_byte += (size_t) num_bytes;
215 cinfo->src->bytes_in_buffer -= (size_t) num_bytes;
220 gst_jpeg_dec_resync_to_restart (j_decompress_ptr cinfo, gint desired)
222 GST_LOG_OBJECT (CINFO_GET_JPEGDEC (cinfo), "resync_to_start");
227 gst_jpeg_dec_term_source (j_decompress_ptr cinfo)
229 GST_LOG_OBJECT (CINFO_GET_JPEGDEC (cinfo), "term_source");
234 gst_jpeg_dec_my_output_message (j_common_ptr cinfo)
236 return; /* do nothing */
240 gst_jpeg_dec_my_emit_message (j_common_ptr cinfo, int msg_level)
242 /* GST_LOG_OBJECT (CINFO_GET_JPEGDEC (&cinfo), "msg_level=%d", msg_level); */
247 gst_jpeg_dec_my_error_exit (j_common_ptr cinfo)
249 struct GstJpegDecErrorMgr *err_mgr = (struct GstJpegDecErrorMgr *) cinfo->err;
251 (*cinfo->err->output_message) (cinfo);
252 longjmp (err_mgr->setjmp_buffer, 1);
256 gst_jpeg_dec_init (GstJpegDec * dec)
258 GST_DEBUG ("initializing");
261 memset (&dec->cinfo, 0, sizeof (dec->cinfo));
262 memset (&dec->jerr, 0, sizeof (dec->jerr));
263 dec->cinfo.err = jpeg_std_error (&dec->jerr.pub);
264 dec->jerr.pub.output_message = gst_jpeg_dec_my_output_message;
265 dec->jerr.pub.emit_message = gst_jpeg_dec_my_emit_message;
266 dec->jerr.pub.error_exit = gst_jpeg_dec_my_error_exit;
268 jpeg_create_decompress (&dec->cinfo);
270 dec->cinfo.src = (struct jpeg_source_mgr *) &dec->jsrc;
271 dec->cinfo.src->init_source = gst_jpeg_dec_init_source;
272 dec->cinfo.src->fill_input_buffer = gst_jpeg_dec_fill_input_buffer;
273 dec->cinfo.src->skip_input_data = gst_jpeg_dec_skip_input_data;
274 dec->cinfo.src->resync_to_restart = gst_jpeg_dec_resync_to_restart;
275 dec->cinfo.src->term_source = gst_jpeg_dec_term_source;
278 /* init properties */
279 dec->idct_method = JPEG_DEFAULT_IDCT_METHOD;
280 dec->max_errors = JPEG_DEFAULT_MAX_ERRORS;
283 static inline gboolean
284 gst_jpeg_dec_parse_tag_has_entropy_segment (guint8 tag)
286 if (tag == 0xda || (tag >= 0xd0 && tag <= 0xd7))
292 gst_jpeg_dec_parse (GstVideoDecoder * bdec, GstVideoCodecFrame * frame,
293 GstAdapter * adapter, gboolean at_eos)
298 gint offset = 0, noffset;
299 GstJpegDec *dec = (GstJpegDec *) bdec;
301 GST_VIDEO_CODEC_FRAME_SET_SYNC_POINT (frame);
303 /* FIXME : The overhead of using scan_uint32 is massive */
305 size = gst_adapter_available (adapter);
306 GST_DEBUG ("Parsing jpeg image data (%u bytes)", size);
309 GST_DEBUG ("Flushing all data out");
312 /* If we have leftover data, throw it away */
313 if (!dec->saw_header)
315 goto have_full_frame;
321 if (!dec->saw_header) {
323 /* we expect at least 4 bytes, first of which start marker */
325 gst_adapter_masked_scan_uint32 (adapter, 0xffff0000, 0xffd80000, 0,
328 GST_DEBUG ("ret:%d", ret);
333 gst_adapter_flush (adapter, ret);
336 dec->saw_header = TRUE;
343 GST_DEBUG ("offset:%d, size:%d", offset, size);
346 gst_adapter_masked_scan_uint32_peek (adapter, 0x0000ff00, 0x0000ff00,
347 offset, size - offset, &value);
349 /* lost sync if 0xff marker not where expected */
350 if ((resync = (noffset != offset))) {
351 GST_DEBUG ("Lost sync at 0x%08x, resyncing", offset + 2);
353 /* may have marker, but could have been resyncng */
354 resync = resync || dec->parse_resync;
355 /* Skip over extra 0xff */
356 while ((noffset >= 0) && ((value & 0xff) == 0xff)) {
359 gst_adapter_masked_scan_uint32_peek (adapter, 0x0000ff00, 0x0000ff00,
360 noffset, size - noffset, &value);
362 /* enough bytes left for marker? (we need 0xNN after the 0xff) */
364 GST_DEBUG ("at end of input and no EOI marker found, need more data");
368 /* now lock on the marker we found */
370 value = value & 0xff;
372 GST_DEBUG ("0x%08x: EOI marker", offset + 2);
373 /* clear parse state */
374 dec->saw_header = FALSE;
375 dec->parse_resync = FALSE;
377 goto have_full_frame;
380 /* Skip this frame if we found another SOI marker */
381 GST_DEBUG ("0x%08x: SOI marker before EOI, skipping", offset + 2);
382 dec->parse_resync = FALSE;
388 if (value >= 0xd0 && value <= 0xd7)
391 /* peek tag and subsequent length */
392 if (offset + 2 + 4 > size)
395 gst_adapter_masked_scan_uint32_peek (adapter, 0x0, 0x0, offset + 2, 4,
397 frame_len = frame_len & 0xffff;
399 GST_DEBUG ("0x%08x: tag %02x, frame_len=%u", offset + 2, value, frame_len);
400 /* the frame length includes the 2 bytes for the length; here we want at
401 * least 2 more bytes at the end for an end marker */
402 if (offset + 2 + 2 + frame_len + 2 > size) {
406 if (gst_jpeg_dec_parse_tag_has_entropy_segment (value)) {
407 guint eseglen = dec->parse_entropy_len;
409 GST_DEBUG ("0x%08x: finding entropy segment length (eseglen:%d)",
410 offset + 2, eseglen);
411 if (size < offset + 2 + frame_len + eseglen)
413 noffset = offset + 2 + frame_len + dec->parse_entropy_len;
415 GST_DEBUG ("noffset:%d, size:%d, size - noffset:%d",
416 noffset, size, size - noffset);
417 noffset = gst_adapter_masked_scan_uint32_peek (adapter, 0x0000ff00,
418 0x0000ff00, noffset, size - noffset, &value);
421 dec->parse_entropy_len = size - offset - 4 - frame_len - 2;
424 if ((value & 0xff) != 0x00) {
425 eseglen = noffset - offset - frame_len - 2;
430 dec->parse_entropy_len = 0;
431 frame_len += eseglen;
432 GST_DEBUG ("entropy segment length=%u => frame_len=%u", eseglen,
436 /* check if we will still be in sync if we interpret
437 * this as a sync point and skip this frame */
438 noffset = offset + frame_len + 2;
439 noffset = gst_adapter_masked_scan_uint32 (adapter, 0x0000ff00, 0x0000ff00,
442 /* ignore and continue resyncing until we hit the end
443 * of our data or find a sync point that looks okay */
447 GST_DEBUG ("found sync at 0x%x", offset + 2);
450 /* Add current data to output buffer */
451 toadd += frame_len + 2;
452 offset += frame_len + 2;
457 gst_video_decoder_add_to_frame (bdec, toadd);
458 return GST_VIDEO_DECODER_FLOW_NEED_DATA;
462 gst_video_decoder_add_to_frame (bdec, toadd);
463 GST_VIDEO_CODEC_FRAME_SET_SYNC_POINT (frame);
464 return gst_video_decoder_have_frame (bdec);
467 gst_adapter_flush (adapter, size);
472 /* shamelessly ripped from jpegutils.c in mjpegtools */
474 add_huff_table (j_decompress_ptr dinfo,
475 JHUFF_TBL ** htblptr, const UINT8 * bits, const UINT8 * val)
476 /* Define a Huffman table */
480 if (*htblptr == NULL)
481 *htblptr = jpeg_alloc_huff_table ((j_common_ptr) dinfo);
485 /* Copy the number-of-symbols-of-each-code-length counts */
486 memcpy ((*htblptr)->bits, bits, sizeof ((*htblptr)->bits));
488 /* Validate the counts. We do this here mainly so we can copy the right
489 * number of symbols from the val[] array, without risking marching off
490 * the end of memory. jchuff.c will do a more thorough test later.
493 for (len = 1; len <= 16; len++)
494 nsymbols += bits[len];
495 if (nsymbols < 1 || nsymbols > 256)
496 g_error ("jpegutils.c: add_huff_table failed badly. ");
498 memcpy ((*htblptr)->huffval, val, nsymbols * sizeof (UINT8));
504 std_huff_tables (j_decompress_ptr dinfo)
505 /* Set up the standard Huffman tables (cf. JPEG standard section K.3) */
506 /* IMPORTANT: these are only valid for 8-bit data precision! */
508 static const UINT8 bits_dc_luminance[17] =
509 { /* 0-base */ 0, 0, 1, 5, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0 };
510 static const UINT8 val_dc_luminance[] =
511 { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11 };
513 static const UINT8 bits_dc_chrominance[17] =
514 { /* 0-base */ 0, 0, 3, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0 };
515 static const UINT8 val_dc_chrominance[] =
516 { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11 };
518 static const UINT8 bits_ac_luminance[17] =
519 { /* 0-base */ 0, 0, 2, 1, 3, 3, 2, 4, 3, 5, 5, 4, 4, 0, 0, 1, 0x7d };
520 static const UINT8 val_ac_luminance[] =
521 { 0x01, 0x02, 0x03, 0x00, 0x04, 0x11, 0x05, 0x12,
522 0x21, 0x31, 0x41, 0x06, 0x13, 0x51, 0x61, 0x07,
523 0x22, 0x71, 0x14, 0x32, 0x81, 0x91, 0xa1, 0x08,
524 0x23, 0x42, 0xb1, 0xc1, 0x15, 0x52, 0xd1, 0xf0,
525 0x24, 0x33, 0x62, 0x72, 0x82, 0x09, 0x0a, 0x16,
526 0x17, 0x18, 0x19, 0x1a, 0x25, 0x26, 0x27, 0x28,
527 0x29, 0x2a, 0x34, 0x35, 0x36, 0x37, 0x38, 0x39,
528 0x3a, 0x43, 0x44, 0x45, 0x46, 0x47, 0x48, 0x49,
529 0x4a, 0x53, 0x54, 0x55, 0x56, 0x57, 0x58, 0x59,
530 0x5a, 0x63, 0x64, 0x65, 0x66, 0x67, 0x68, 0x69,
531 0x6a, 0x73, 0x74, 0x75, 0x76, 0x77, 0x78, 0x79,
532 0x7a, 0x83, 0x84, 0x85, 0x86, 0x87, 0x88, 0x89,
533 0x8a, 0x92, 0x93, 0x94, 0x95, 0x96, 0x97, 0x98,
534 0x99, 0x9a, 0xa2, 0xa3, 0xa4, 0xa5, 0xa6, 0xa7,
535 0xa8, 0xa9, 0xaa, 0xb2, 0xb3, 0xb4, 0xb5, 0xb6,
536 0xb7, 0xb8, 0xb9, 0xba, 0xc2, 0xc3, 0xc4, 0xc5,
537 0xc6, 0xc7, 0xc8, 0xc9, 0xca, 0xd2, 0xd3, 0xd4,
538 0xd5, 0xd6, 0xd7, 0xd8, 0xd9, 0xda, 0xe1, 0xe2,
539 0xe3, 0xe4, 0xe5, 0xe6, 0xe7, 0xe8, 0xe9, 0xea,
540 0xf1, 0xf2, 0xf3, 0xf4, 0xf5, 0xf6, 0xf7, 0xf8,
544 static const UINT8 bits_ac_chrominance[17] =
545 { /* 0-base */ 0, 0, 2, 1, 2, 4, 4, 3, 4, 7, 5, 4, 4, 0, 1, 2, 0x77 };
546 static const UINT8 val_ac_chrominance[] =
547 { 0x00, 0x01, 0x02, 0x03, 0x11, 0x04, 0x05, 0x21,
548 0x31, 0x06, 0x12, 0x41, 0x51, 0x07, 0x61, 0x71,
549 0x13, 0x22, 0x32, 0x81, 0x08, 0x14, 0x42, 0x91,
550 0xa1, 0xb1, 0xc1, 0x09, 0x23, 0x33, 0x52, 0xf0,
551 0x15, 0x62, 0x72, 0xd1, 0x0a, 0x16, 0x24, 0x34,
552 0xe1, 0x25, 0xf1, 0x17, 0x18, 0x19, 0x1a, 0x26,
553 0x27, 0x28, 0x29, 0x2a, 0x35, 0x36, 0x37, 0x38,
554 0x39, 0x3a, 0x43, 0x44, 0x45, 0x46, 0x47, 0x48,
555 0x49, 0x4a, 0x53, 0x54, 0x55, 0x56, 0x57, 0x58,
556 0x59, 0x5a, 0x63, 0x64, 0x65, 0x66, 0x67, 0x68,
557 0x69, 0x6a, 0x73, 0x74, 0x75, 0x76, 0x77, 0x78,
558 0x79, 0x7a, 0x82, 0x83, 0x84, 0x85, 0x86, 0x87,
559 0x88, 0x89, 0x8a, 0x92, 0x93, 0x94, 0x95, 0x96,
560 0x97, 0x98, 0x99, 0x9a, 0xa2, 0xa3, 0xa4, 0xa5,
561 0xa6, 0xa7, 0xa8, 0xa9, 0xaa, 0xb2, 0xb3, 0xb4,
562 0xb5, 0xb6, 0xb7, 0xb8, 0xb9, 0xba, 0xc2, 0xc3,
563 0xc4, 0xc5, 0xc6, 0xc7, 0xc8, 0xc9, 0xca, 0xd2,
564 0xd3, 0xd4, 0xd5, 0xd6, 0xd7, 0xd8, 0xd9, 0xda,
565 0xe2, 0xe3, 0xe4, 0xe5, 0xe6, 0xe7, 0xe8, 0xe9,
566 0xea, 0xf2, 0xf3, 0xf4, 0xf5, 0xf6, 0xf7, 0xf8,
570 add_huff_table (dinfo, &dinfo->dc_huff_tbl_ptrs[0],
571 bits_dc_luminance, val_dc_luminance);
572 add_huff_table (dinfo, &dinfo->ac_huff_tbl_ptrs[0],
573 bits_ac_luminance, val_ac_luminance);
574 add_huff_table (dinfo, &dinfo->dc_huff_tbl_ptrs[1],
575 bits_dc_chrominance, val_dc_chrominance);
576 add_huff_table (dinfo, &dinfo->ac_huff_tbl_ptrs[1],
577 bits_ac_chrominance, val_ac_chrominance);
583 guarantee_huff_tables (j_decompress_ptr dinfo)
585 if ((dinfo->dc_huff_tbl_ptrs[0] == NULL) &&
586 (dinfo->dc_huff_tbl_ptrs[1] == NULL) &&
587 (dinfo->ac_huff_tbl_ptrs[0] == NULL) &&
588 (dinfo->ac_huff_tbl_ptrs[1] == NULL)) {
589 GST_DEBUG ("Generating standard Huffman tables for this frame.");
590 std_huff_tables (dinfo);
595 gst_jpeg_dec_set_format (GstVideoDecoder * dec, GstVideoCodecState * state)
597 GstJpegDec *jpeg = GST_JPEG_DEC (dec);
599 if (dec->input_segment.format == GST_FORMAT_TIME)
600 gst_video_decoder_set_packetized (dec, TRUE);
602 gst_video_decoder_set_packetized (dec, FALSE);
604 if (jpeg->input_state)
605 gst_video_codec_state_unref (jpeg->input_state);
606 jpeg->input_state = gst_video_codec_state_ref (state);
614 hresamplecpy1 (guint8 * dest, const guint8 * src, guint len)
618 for (i = 0; i < len; ++i) {
619 /* equivalent to: dest[i] = src[i << 1] */
628 gst_jpeg_dec_free_buffers (GstJpegDec * dec)
632 for (i = 0; i < 16; i++) {
633 g_free (dec->idr_y[i]);
634 g_free (dec->idr_u[i]);
635 g_free (dec->idr_v[i]);
636 dec->idr_y[i] = NULL;
637 dec->idr_u[i] = NULL;
638 dec->idr_v[i] = NULL;
641 dec->idr_width_allocated = 0;
644 static inline gboolean
645 gst_jpeg_dec_ensure_buffers (GstJpegDec * dec, guint maxrowbytes)
649 if (G_LIKELY (dec->idr_width_allocated == maxrowbytes))
652 /* FIXME: maybe just alloc one or three blocks altogether? */
653 for (i = 0; i < 16; i++) {
654 dec->idr_y[i] = g_try_realloc (dec->idr_y[i], maxrowbytes);
655 dec->idr_u[i] = g_try_realloc (dec->idr_u[i], maxrowbytes);
656 dec->idr_v[i] = g_try_realloc (dec->idr_v[i], maxrowbytes);
658 if (G_UNLIKELY (!dec->idr_y[i] || !dec->idr_u[i] || !dec->idr_v[i])) {
659 GST_WARNING_OBJECT (dec, "out of memory, i=%d, bytes=%u", i, maxrowbytes);
664 dec->idr_width_allocated = maxrowbytes;
665 GST_LOG_OBJECT (dec, "allocated temp memory, %u bytes/row", maxrowbytes);
670 gst_jpeg_dec_decode_grayscale (GstJpegDec * dec, GstVideoFrame * frame)
673 guchar **scanarray[1] = { rows };
678 gint pstride, rstride;
680 GST_DEBUG_OBJECT (dec, "indirect decoding of grayscale");
682 width = GST_VIDEO_FRAME_WIDTH (frame);
683 height = GST_VIDEO_FRAME_HEIGHT (frame);
685 if (G_UNLIKELY (!gst_jpeg_dec_ensure_buffers (dec, GST_ROUND_UP_32 (width))))
688 base[0] = GST_VIDEO_FRAME_COMP_DATA (frame, 0);
689 pstride = GST_VIDEO_FRAME_COMP_PSTRIDE (frame, 0);
690 rstride = GST_VIDEO_FRAME_COMP_STRIDE (frame, 0);
692 memcpy (rows, dec->idr_y, 16 * sizeof (gpointer));
696 lines = jpeg_read_raw_data (&dec->cinfo, scanarray, DCTSIZE);
697 if (G_LIKELY (lines > 0)) {
698 for (j = 0; (j < DCTSIZE) && (i < height); j++, i++) {
702 for (k = 0; k < width; k++) {
703 base[0][p] = rows[j][k];
709 GST_INFO_OBJECT (dec, "jpeg_read_raw_data() returned 0");
715 gst_jpeg_dec_decode_rgb (GstJpegDec * dec, GstVideoFrame * frame)
717 guchar *r_rows[16], *g_rows[16], *b_rows[16];
718 guchar **scanarray[3] = { r_rows, g_rows, b_rows };
722 guint pstride, rstride;
725 GST_DEBUG_OBJECT (dec, "indirect decoding of RGB");
727 width = GST_VIDEO_FRAME_WIDTH (frame);
728 height = GST_VIDEO_FRAME_HEIGHT (frame);
730 if (G_UNLIKELY (!gst_jpeg_dec_ensure_buffers (dec, GST_ROUND_UP_32 (width))))
733 for (i = 0; i < 3; i++)
734 base[i] = GST_VIDEO_FRAME_COMP_DATA (frame, i);
736 pstride = GST_VIDEO_FRAME_COMP_PSTRIDE (frame, 0);
737 rstride = GST_VIDEO_FRAME_COMP_STRIDE (frame, 0);
739 memcpy (r_rows, dec->idr_y, 16 * sizeof (gpointer));
740 memcpy (g_rows, dec->idr_u, 16 * sizeof (gpointer));
741 memcpy (b_rows, dec->idr_v, 16 * sizeof (gpointer));
745 lines = jpeg_read_raw_data (&dec->cinfo, scanarray, DCTSIZE);
746 if (G_LIKELY (lines > 0)) {
747 for (j = 0; (j < DCTSIZE) && (i < height); j++, i++) {
751 for (k = 0; k < width; k++) {
752 base[0][p] = r_rows[j][k];
753 base[1][p] = g_rows[j][k];
754 base[2][p] = b_rows[j][k];
762 GST_INFO_OBJECT (dec, "jpeg_read_raw_data() returned 0");
768 gst_jpeg_dec_decode_indirect (GstJpegDec * dec, GstVideoFrame * frame, gint r_v,
771 guchar *y_rows[16], *u_rows[16], *v_rows[16];
772 guchar **scanarray[3] = { y_rows, u_rows, v_rows };
775 guchar *base[3], *last[3];
779 GST_DEBUG_OBJECT (dec,
780 "unadvantageous width or r_h, taking slow route involving memcpy");
782 width = GST_VIDEO_FRAME_WIDTH (frame);
783 height = GST_VIDEO_FRAME_HEIGHT (frame);
785 if (G_UNLIKELY (!gst_jpeg_dec_ensure_buffers (dec, GST_ROUND_UP_32 (width))))
788 for (i = 0; i < 3; i++) {
789 base[i] = GST_VIDEO_FRAME_COMP_DATA (frame, i);
790 stride[i] = GST_VIDEO_FRAME_COMP_STRIDE (frame, i);
791 /* make sure we don't make jpeglib write beyond our buffer,
792 * which might happen if (height % (r_v*DCTSIZE)) != 0 */
793 last[i] = base[i] + (GST_VIDEO_FRAME_COMP_STRIDE (frame, i) *
794 (GST_VIDEO_FRAME_COMP_HEIGHT (frame, i) - 1));
797 memcpy (y_rows, dec->idr_y, 16 * sizeof (gpointer));
798 memcpy (u_rows, dec->idr_u, 16 * sizeof (gpointer));
799 memcpy (v_rows, dec->idr_v, 16 * sizeof (gpointer));
801 /* fill chroma components for grayscale */
803 GST_DEBUG_OBJECT (dec, "grayscale, filling chroma");
804 for (i = 0; i < 16; i++) {
805 memset (u_rows[i], GST_ROUND_UP_32 (width), 0x80);
806 memset (v_rows[i], GST_ROUND_UP_32 (width), 0x80);
810 for (i = 0; i < height; i += r_v * DCTSIZE) {
811 lines = jpeg_read_raw_data (&dec->cinfo, scanarray, r_v * DCTSIZE);
812 if (G_LIKELY (lines > 0)) {
813 for (j = 0, k = 0; j < (r_v * DCTSIZE); j += r_v, k++) {
814 if (G_LIKELY (base[0] <= last[0])) {
815 memcpy (base[0], y_rows[j], stride[0]);
816 base[0] += stride[0];
819 if (G_LIKELY (base[0] <= last[0])) {
820 memcpy (base[0], y_rows[j + 1], stride[0]);
821 base[0] += stride[0];
824 if (G_LIKELY (base[1] <= last[1] && base[2] <= last[2])) {
826 memcpy (base[1], u_rows[k], stride[1]);
827 memcpy (base[2], v_rows[k], stride[2]);
828 } else if (r_h == 1) {
829 hresamplecpy1 (base[1], u_rows[k], stride[1]);
830 hresamplecpy1 (base[2], v_rows[k], stride[2]);
832 /* FIXME: implement (at least we avoid crashing by doing nothing) */
836 if (r_v == 2 || (k & 1) != 0) {
837 base[1] += stride[1];
838 base[2] += stride[2];
842 GST_INFO_OBJECT (dec, "jpeg_read_raw_data() returned 0");
848 gst_jpeg_dec_decode_direct (GstJpegDec * dec, GstVideoFrame * frame)
850 guchar **line[3]; /* the jpeg line buffer */
851 guchar *y[4 * DCTSIZE] = { NULL, }; /* alloc enough for the lines */
852 guchar *u[4 * DCTSIZE] = { NULL, }; /* r_v will be <4 */
853 guchar *v[4 * DCTSIZE] = { NULL, };
855 gint lines, v_samp[3];
856 guchar *base[3], *last[3];
864 v_samp[0] = dec->cinfo.comp_info[0].v_samp_factor;
865 v_samp[1] = dec->cinfo.comp_info[1].v_samp_factor;
866 v_samp[2] = dec->cinfo.comp_info[2].v_samp_factor;
868 if (G_UNLIKELY (v_samp[0] > 2 || v_samp[1] > 2 || v_samp[2] > 2))
869 goto format_not_supported;
871 height = GST_VIDEO_FRAME_HEIGHT (frame);
873 for (i = 0; i < 3; i++) {
874 base[i] = GST_VIDEO_FRAME_COMP_DATA (frame, i);
875 stride[i] = GST_VIDEO_FRAME_COMP_STRIDE (frame, i);
876 /* make sure we don't make jpeglib write beyond our buffer,
877 * which might happen if (height % (r_v*DCTSIZE)) != 0 */
878 last[i] = base[i] + (GST_VIDEO_FRAME_COMP_STRIDE (frame, i) *
879 (GST_VIDEO_FRAME_COMP_HEIGHT (frame, i) - 1));
882 /* let jpeglib decode directly into our final buffer */
883 GST_DEBUG_OBJECT (dec, "decoding directly into output buffer");
885 for (i = 0; i < height; i += v_samp[0] * DCTSIZE) {
886 for (j = 0; j < (v_samp[0] * DCTSIZE); ++j) {
888 line[0][j] = base[0] + (i + j) * stride[0];
889 if (G_UNLIKELY (line[0][j] > last[0]))
890 line[0][j] = last[0];
892 if (v_samp[1] == v_samp[0]) {
893 line[1][j] = base[1] + ((i + j) / 2) * stride[1];
894 } else if (j < (v_samp[1] * DCTSIZE)) {
895 line[1][j] = base[1] + ((i / 2) + j) * stride[1];
897 if (G_UNLIKELY (line[1][j] > last[1]))
898 line[1][j] = last[1];
900 if (v_samp[2] == v_samp[0]) {
901 line[2][j] = base[2] + ((i + j) / 2) * stride[2];
902 } else if (j < (v_samp[2] * DCTSIZE)) {
903 line[2][j] = base[2] + ((i / 2) + j) * stride[2];
905 if (G_UNLIKELY (line[2][j] > last[2]))
906 line[2][j] = last[2];
909 lines = jpeg_read_raw_data (&dec->cinfo, line, v_samp[0] * DCTSIZE);
910 if (G_UNLIKELY (!lines)) {
911 GST_INFO_OBJECT (dec, "jpeg_read_raw_data() returned 0");
916 format_not_supported:
918 gboolean ret = GST_FLOW_OK;
920 GST_VIDEO_DECODER_ERROR (dec, 1, STREAM, DECODE,
921 (_("Failed to decode JPEG image")),
922 ("Unsupported subsampling schema: v_samp factors: %u %u %u", v_samp[0],
923 v_samp[1], v_samp[2]), ret);
930 gst_jpeg_dec_negotiate (GstJpegDec * dec, gint width, gint height, gint clrspc)
932 GstVideoCodecState *outstate;
934 GstVideoFormat format;
938 format = GST_VIDEO_FORMAT_RGB;
941 format = GST_VIDEO_FORMAT_GRAY8;
944 format = GST_VIDEO_FORMAT_I420;
948 /* Compare to currently configured output state */
949 outstate = gst_video_decoder_get_output_state (GST_VIDEO_DECODER (dec));
951 info = &outstate->info;
953 if (width == GST_VIDEO_INFO_WIDTH (info) &&
954 height == GST_VIDEO_INFO_HEIGHT (info) &&
955 format == GST_VIDEO_INFO_FORMAT (info)) {
956 gst_video_codec_state_unref (outstate);
959 gst_video_codec_state_unref (outstate);
963 gst_video_decoder_set_output_state (GST_VIDEO_DECODER (dec), format,
964 width, height, dec->input_state);
971 outstate->info.colorimetry.range = GST_VIDEO_COLOR_RANGE_0_255;
972 outstate->info.colorimetry.matrix = GST_VIDEO_COLOR_MATRIX_BT601;
973 outstate->info.colorimetry.transfer = GST_VIDEO_TRANSFER_UNKNOWN;
974 outstate->info.colorimetry.primaries = GST_VIDEO_COLOR_PRIMARIES_UNKNOWN;
978 gst_video_codec_state_unref (outstate);
980 gst_video_decoder_negotiate (GST_VIDEO_DECODER (dec));
982 GST_DEBUG_OBJECT (dec, "max_v_samp_factor=%d", dec->cinfo.max_v_samp_factor);
983 GST_DEBUG_OBJECT (dec, "max_h_samp_factor=%d", dec->cinfo.max_h_samp_factor);
987 gst_jpeg_dec_handle_frame (GstVideoDecoder * bdec, GstVideoCodecFrame * frame)
989 GstFlowReturn ret = GST_FLOW_OK;
990 GstJpegDec *dec = (GstJpegDec *) bdec;
991 GstVideoFrame vframe;
995 gboolean need_unmap = TRUE;
996 GstVideoCodecState *state = NULL;
998 dec->current_frame = frame;
999 gst_buffer_map (frame->input_buffer, &dec->current_frame_map, GST_MAP_READ);
1000 gst_jpeg_dec_fill_input_buffer (&dec->cinfo);
1002 if (setjmp (dec->jerr.setjmp_buffer)) {
1003 code = dec->jerr.pub.msg_code;
1005 if (code == JERR_INPUT_EOF) {
1006 GST_DEBUG ("jpeg input EOF error, we probably need more data");
1007 goto need_more_data;
1013 hdr_ok = jpeg_read_header (&dec->cinfo, TRUE);
1014 if (G_UNLIKELY (hdr_ok != JPEG_HEADER_OK)) {
1015 GST_WARNING_OBJECT (dec, "reading the header failed, %d", hdr_ok);
1018 GST_LOG_OBJECT (dec, "num_components=%d", dec->cinfo.num_components);
1019 GST_LOG_OBJECT (dec, "jpeg_color_space=%d", dec->cinfo.jpeg_color_space);
1021 if (!dec->cinfo.num_components || !dec->cinfo.comp_info)
1022 goto components_not_supported;
1024 r_h = dec->cinfo.comp_info[0].h_samp_factor;
1025 r_v = dec->cinfo.comp_info[0].v_samp_factor;
1027 GST_LOG_OBJECT (dec, "r_h = %d, r_v = %d", r_h, r_v);
1029 if (dec->cinfo.num_components > 3)
1030 goto components_not_supported;
1032 /* verify color space expectation to avoid going *boom* or bogus output */
1033 if (dec->cinfo.jpeg_color_space != JCS_YCbCr &&
1034 dec->cinfo.jpeg_color_space != JCS_GRAYSCALE &&
1035 dec->cinfo.jpeg_color_space != JCS_RGB)
1036 goto unsupported_colorspace;
1038 #ifndef GST_DISABLE_GST_DEBUG
1042 for (i = 0; i < dec->cinfo.num_components; ++i) {
1043 GST_LOG_OBJECT (dec, "[%d] h_samp_factor=%d, v_samp_factor=%d, cid=%d",
1044 i, dec->cinfo.comp_info[i].h_samp_factor,
1045 dec->cinfo.comp_info[i].v_samp_factor,
1046 dec->cinfo.comp_info[i].component_id);
1051 /* prepare for raw output */
1052 dec->cinfo.do_fancy_upsampling = FALSE;
1053 dec->cinfo.do_block_smoothing = FALSE;
1054 dec->cinfo.out_color_space = dec->cinfo.jpeg_color_space;
1055 dec->cinfo.dct_method = dec->idct_method;
1056 dec->cinfo.raw_data_out = TRUE;
1058 GST_LOG_OBJECT (dec, "starting decompress");
1059 guarantee_huff_tables (&dec->cinfo);
1060 if (!jpeg_start_decompress (&dec->cinfo)) {
1061 GST_WARNING_OBJECT (dec, "failed to start decompression cycle");
1064 /* sanity checks to get safe and reasonable output */
1065 switch (dec->cinfo.jpeg_color_space) {
1067 if (dec->cinfo.num_components != 1)
1068 goto invalid_yuvrgbgrayscale;
1071 if (dec->cinfo.num_components != 3 || dec->cinfo.max_v_samp_factor > 1 ||
1072 dec->cinfo.max_h_samp_factor > 1)
1073 goto invalid_yuvrgbgrayscale;
1076 if (dec->cinfo.num_components != 3 ||
1077 r_v > 2 || r_v < dec->cinfo.comp_info[0].v_samp_factor ||
1078 r_v < dec->cinfo.comp_info[1].v_samp_factor ||
1079 r_h < dec->cinfo.comp_info[0].h_samp_factor ||
1080 r_h < dec->cinfo.comp_info[1].h_samp_factor)
1081 goto invalid_yuvrgbgrayscale;
1084 g_assert_not_reached ();
1088 width = dec->cinfo.output_width;
1089 height = dec->cinfo.output_height;
1091 if (G_UNLIKELY (width < MIN_WIDTH || width > MAX_WIDTH ||
1092 height < MIN_HEIGHT || height > MAX_HEIGHT))
1095 gst_jpeg_dec_negotiate (dec, width, height, dec->cinfo.jpeg_color_space);
1097 state = gst_video_decoder_get_output_state (bdec);
1098 ret = gst_video_decoder_allocate_output_frame (bdec, frame);
1099 if (G_UNLIKELY (ret != GST_FLOW_OK))
1102 if (!gst_video_frame_map (&vframe, &state->info, frame->output_buffer,
1106 GST_LOG_OBJECT (dec, "width %d, height %d", width, height);
1108 if (dec->cinfo.jpeg_color_space == JCS_RGB) {
1109 gst_jpeg_dec_decode_rgb (dec, &vframe);
1110 } else if (dec->cinfo.jpeg_color_space == JCS_GRAYSCALE) {
1111 gst_jpeg_dec_decode_grayscale (dec, &vframe);
1113 GST_LOG_OBJECT (dec, "decompressing (reqired scanline buffer height = %u)",
1114 dec->cinfo.rec_outbuf_height);
1116 /* For some widths jpeglib requires more horizontal padding than I420
1117 * provides. In those cases we need to decode into separate buffers and then
1118 * copy over the data into our final picture buffer, otherwise jpeglib might
1119 * write over the end of a line into the beginning of the next line,
1120 * resulting in blocky artifacts on the left side of the picture. */
1121 if (G_UNLIKELY (width % (dec->cinfo.max_h_samp_factor * DCTSIZE) != 0
1122 || dec->cinfo.comp_info[0].h_samp_factor != 2
1123 || dec->cinfo.comp_info[1].h_samp_factor != 1
1124 || dec->cinfo.comp_info[2].h_samp_factor != 1)) {
1125 GST_CAT_LOG_OBJECT (GST_CAT_PERFORMANCE, dec,
1126 "indirect decoding using extra buffer copy");
1127 gst_jpeg_dec_decode_indirect (dec, &vframe, r_v, r_h,
1128 dec->cinfo.num_components);
1130 ret = gst_jpeg_dec_decode_direct (dec, &vframe);
1132 if (G_UNLIKELY (ret != GST_FLOW_OK))
1133 goto decode_direct_failed;
1137 gst_video_frame_unmap (&vframe);
1139 GST_LOG_OBJECT (dec, "decompressing finished");
1140 jpeg_finish_decompress (&dec->cinfo);
1142 gst_buffer_unmap (frame->input_buffer, &dec->current_frame_map);
1143 ret = gst_video_decoder_finish_frame (bdec, frame);
1151 gst_buffer_unmap (frame->input_buffer, &dec->current_frame_map);
1154 gst_video_codec_state_unref (state);
1161 GST_LOG_OBJECT (dec, "we need more data");
1168 GST_VIDEO_DECODER_ERROR (dec, 1, STREAM, DECODE,
1169 (_("Failed to decode JPEG image")),
1170 ("Picture is too small or too big (%ux%u)", width, height), ret);
1171 ret = GST_FLOW_ERROR;
1176 gchar err_msg[JMSG_LENGTH_MAX];
1178 dec->jerr.pub.format_message ((j_common_ptr) (&dec->cinfo), err_msg);
1180 GST_VIDEO_DECODER_ERROR (dec, 1, STREAM, DECODE,
1181 (_("Failed to decode JPEG image")), ("Decode error #%u: %s", code,
1184 gst_buffer_unmap (frame->input_buffer, &dec->current_frame_map);
1185 gst_video_decoder_drop_frame (bdec, frame);
1187 jpeg_abort_decompress (&dec->cinfo);
1191 decode_direct_failed:
1193 /* already posted an error message */
1194 jpeg_abort_decompress (&dec->cinfo);
1199 const gchar *reason;
1201 reason = gst_flow_get_name (ret);
1203 GST_DEBUG_OBJECT (dec, "failed to alloc buffer, reason %s", reason);
1204 /* Reset for next time */
1205 jpeg_abort_decompress (&dec->cinfo);
1206 if (ret != GST_FLOW_EOS && ret != GST_FLOW_FLUSHING &&
1207 ret != GST_FLOW_NOT_LINKED) {
1208 GST_VIDEO_DECODER_ERROR (dec, 1, STREAM, DECODE,
1209 (_("Failed to decode JPEG image")),
1210 ("Buffer allocation failed, reason: %s", reason), ret);
1211 jpeg_abort_decompress (&dec->cinfo);
1215 components_not_supported:
1217 GST_VIDEO_DECODER_ERROR (dec, 1, STREAM, DECODE,
1218 (_("Failed to decode JPEG image")),
1219 ("number of components not supported: %d (max 3)",
1220 dec->cinfo.num_components), ret);
1221 jpeg_abort_decompress (&dec->cinfo);
1224 unsupported_colorspace:
1226 GST_VIDEO_DECODER_ERROR (dec, 1, STREAM, DECODE,
1227 (_("Failed to decode JPEG image")),
1228 ("Picture has unknown or unsupported colourspace"), ret);
1229 jpeg_abort_decompress (&dec->cinfo);
1232 invalid_yuvrgbgrayscale:
1234 GST_VIDEO_DECODER_ERROR (dec, 1, STREAM, DECODE,
1235 (_("Failed to decode JPEG image")),
1236 ("Picture is corrupt or unhandled YUV/RGB/grayscale layout"), ret);
1237 jpeg_abort_decompress (&dec->cinfo);
1243 gst_jpeg_dec_decide_allocation (GstVideoDecoder * bdec, GstQuery * query)
1245 GstBufferPool *pool = NULL;
1246 GstStructure *config;
1248 if (!GST_VIDEO_DECODER_CLASS (parent_class)->decide_allocation (bdec, query))
1251 if (gst_query_get_n_allocation_pools (query) > 0)
1252 gst_query_parse_nth_allocation_pool (query, 0, &pool, NULL, NULL, NULL);
1257 config = gst_buffer_pool_get_config (pool);
1258 if (gst_query_find_allocation_meta (query, GST_VIDEO_META_API_TYPE, NULL)) {
1259 gst_buffer_pool_config_add_option (config,
1260 GST_BUFFER_POOL_OPTION_VIDEO_META);
1262 gst_buffer_pool_set_config (pool, config);
1263 gst_object_unref (pool);
1269 gst_jpeg_dec_start (GstVideoDecoder * bdec)
1271 GstJpegDec *dec = (GstJpegDec *) bdec;
1273 dec->saw_header = FALSE;
1274 dec->parse_entropy_len = 0;
1275 dec->parse_resync = FALSE;
1277 gst_video_decoder_set_packetized (bdec, FALSE);
1283 gst_jpeg_dec_flush (GstVideoDecoder * bdec)
1285 GstJpegDec *dec = (GstJpegDec *) bdec;
1287 jpeg_abort_decompress (&dec->cinfo);
1288 dec->parse_entropy_len = 0;
1289 dec->parse_resync = FALSE;
1290 dec->saw_header = FALSE;
1296 gst_jpeg_dec_set_property (GObject * object, guint prop_id,
1297 const GValue * value, GParamSpec * pspec)
1301 dec = GST_JPEG_DEC (object);
1304 case PROP_IDCT_METHOD:
1305 dec->idct_method = g_value_get_enum (value);
1307 #ifndef GST_REMOVE_DEPRECATED
1308 case PROP_MAX_ERRORS:
1309 g_atomic_int_set (&dec->max_errors, g_value_get_int (value));
1313 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
1319 gst_jpeg_dec_get_property (GObject * object, guint prop_id, GValue * value,
1324 dec = GST_JPEG_DEC (object);
1327 case PROP_IDCT_METHOD:
1328 g_value_set_enum (value, dec->idct_method);
1330 #ifndef GST_REMOVE_DEPRECATED
1331 case PROP_MAX_ERRORS:
1332 g_value_set_int (value, g_atomic_int_get (&dec->max_errors));
1336 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
1342 gst_jpeg_dec_stop (GstVideoDecoder * bdec)
1344 GstJpegDec *dec = (GstJpegDec *) bdec;
1346 gst_jpeg_dec_free_buffers (dec);