2 * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
3 * Copyright (C) <2009> Tim-Philipp Müller <tim centricular net>
4 * Copyright (C) 2012 Collabora Ltd.
5 * Author : Edward Hervey <edward@collabora.com>
7 * This library is free software; you can redistribute it and/or
8 * modify it under the terms of the GNU Library General Public
9 * License as published by the Free Software Foundation; either
10 * version 2 of the License, or (at your option) any later version.
12 * This library is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 * Library General Public License for more details.
17 * You should have received a copy of the GNU Library General Public
18 * License along with this library; if not, write to the
19 * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
20 * Boston, MA 02110-1301, USA.
24 * SECTION:element-jpegdec
26 * Decodes jpeg images.
29 * <title>Example launch line</title>
31 * gst-launch-1.0 -v filesrc location=mjpeg.avi ! avidemux ! queue ! jpegdec ! videoconvert ! videoscale ! autovideosink
32 * ]| The above pipeline decode the mjpeg stream and renders it to the screen.
41 #include "gstjpegdec.h"
43 #include <gst/video/video.h>
44 #include <gst/video/gstvideometa.h>
45 #include <gst/video/gstvideopool.h>
46 #include "gst/gst-i18n-plugin.h"
50 #define MAX_WIDTH 65535
52 #define MAX_HEIGHT 65535
54 #define CINFO_GET_JPEGDEC(cinfo_ptr) \
55 (((struct GstJpegDecSourceMgr*)((cinfo_ptr)->src))->dec)
57 #define JPEG_DEFAULT_IDCT_METHOD JDCT_FASTEST
58 #define JPEG_DEFAULT_MAX_ERRORS 0
68 static GstStaticPadTemplate gst_jpeg_dec_src_pad_template =
69 GST_STATIC_PAD_TEMPLATE ("src",
72 GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE
73 ("{ I420, RGB, BGR, RGBx, xRGB, BGRx, xBGR, GRAY8 }"))
77 /* FIXME: sof-marker is for IJG libjpeg 8, should be different for 6.2 */
78 /* FIXME: add back "sof-marker = (int) { 0, 1, 2, 5, 6, 7, 9, 10, 13, 14 }"
79 * once we have a parser and/or demuxer set caps properly */
80 static GstStaticPadTemplate gst_jpeg_dec_sink_pad_template =
81 GST_STATIC_PAD_TEMPLATE ("sink",
84 GST_STATIC_CAPS ("image/jpeg")
87 GST_DEBUG_CATEGORY_STATIC (jpeg_dec_debug);
88 #define GST_CAT_DEFAULT jpeg_dec_debug
89 GST_DEBUG_CATEGORY_STATIC (GST_CAT_PERFORMANCE);
91 static void gst_jpeg_dec_set_property (GObject * object, guint prop_id,
92 const GValue * value, GParamSpec * pspec);
93 static void gst_jpeg_dec_get_property (GObject * object, guint prop_id,
94 GValue * value, GParamSpec * pspec);
96 static gboolean gst_jpeg_dec_set_format (GstVideoDecoder * dec,
97 GstVideoCodecState * state);
98 static gboolean gst_jpeg_dec_start (GstVideoDecoder * bdec);
99 static gboolean gst_jpeg_dec_stop (GstVideoDecoder * bdec);
100 static gboolean gst_jpeg_dec_flush (GstVideoDecoder * bdec);
101 static GstFlowReturn gst_jpeg_dec_parse (GstVideoDecoder * bdec,
102 GstVideoCodecFrame * frame, GstAdapter * adapter, gboolean at_eos);
103 static GstFlowReturn gst_jpeg_dec_handle_frame (GstVideoDecoder * bdec,
104 GstVideoCodecFrame * frame);
105 static gboolean gst_jpeg_dec_decide_allocation (GstVideoDecoder * bdec,
108 #define gst_jpeg_dec_parent_class parent_class
109 G_DEFINE_TYPE (GstJpegDec, gst_jpeg_dec, GST_TYPE_VIDEO_DECODER);
112 gst_jpeg_dec_finalize (GObject * object)
114 GstJpegDec *dec = GST_JPEG_DEC (object);
116 jpeg_destroy_decompress (&dec->cinfo);
117 if (dec->input_state)
118 gst_video_codec_state_unref (dec->input_state);
120 G_OBJECT_CLASS (parent_class)->finalize (object);
124 gst_jpeg_dec_class_init (GstJpegDecClass * klass)
126 GObjectClass *gobject_class;
127 GstElementClass *element_class;
128 GstVideoDecoderClass *vdec_class;
130 gobject_class = (GObjectClass *) klass;
131 element_class = (GstElementClass *) klass;
132 vdec_class = (GstVideoDecoderClass *) klass;
134 parent_class = g_type_class_peek_parent (klass);
136 gobject_class->finalize = gst_jpeg_dec_finalize;
137 gobject_class->set_property = gst_jpeg_dec_set_property;
138 gobject_class->get_property = gst_jpeg_dec_get_property;
140 g_object_class_install_property (gobject_class, PROP_IDCT_METHOD,
141 g_param_spec_enum ("idct-method", "IDCT Method",
142 "The IDCT algorithm to use", GST_TYPE_IDCT_METHOD,
143 JPEG_DEFAULT_IDCT_METHOD,
144 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
147 * GstJpegDec:max-errors:
149 * Error out after receiving N consecutive decoding errors
150 * (-1 = never error out, 0 = automatic, 1 = fail on first error, etc.)
152 * Deprecated: 1.3.1: Property wasn't used internally
154 #ifndef GST_REMOVE_DEPRECATED
155 g_object_class_install_property (gobject_class, PROP_MAX_ERRORS,
156 g_param_spec_int ("max-errors", "Maximum Consecutive Decoding Errors",
157 "(Deprecated) Error out after receiving N consecutive decoding errors"
158 " (-1 = never fail, 0 = automatic, 1 = fail on first error)",
159 -1, G_MAXINT, JPEG_DEFAULT_MAX_ERRORS,
160 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | G_PARAM_DEPRECATED));
163 gst_element_class_add_static_pad_template (element_class,
164 &gst_jpeg_dec_src_pad_template);
165 gst_element_class_add_static_pad_template (element_class,
166 &gst_jpeg_dec_sink_pad_template);
167 gst_element_class_set_static_metadata (element_class, "JPEG image decoder",
168 "Codec/Decoder/Image", "Decode images from JPEG format",
169 "Wim Taymans <wim@fluendo.com>");
171 vdec_class->start = gst_jpeg_dec_start;
172 vdec_class->stop = gst_jpeg_dec_stop;
173 vdec_class->flush = gst_jpeg_dec_flush;
174 vdec_class->parse = gst_jpeg_dec_parse;
175 vdec_class->set_format = gst_jpeg_dec_set_format;
176 vdec_class->handle_frame = gst_jpeg_dec_handle_frame;
177 vdec_class->decide_allocation = gst_jpeg_dec_decide_allocation;
179 GST_DEBUG_CATEGORY_INIT (jpeg_dec_debug, "jpegdec", 0, "JPEG decoder");
180 GST_DEBUG_CATEGORY_GET (GST_CAT_PERFORMANCE, "GST_PERFORMANCE");
184 gst_jpeg_dec_fill_input_buffer (j_decompress_ptr cinfo)
186 /* We pass in full frame initially, if this get called, the frame is most likely
192 gst_jpeg_dec_init_source (j_decompress_ptr cinfo)
194 GST_LOG_OBJECT (CINFO_GET_JPEGDEC (cinfo), "init_source");
199 gst_jpeg_dec_skip_input_data (j_decompress_ptr cinfo, glong num_bytes)
201 GstJpegDec *dec = CINFO_GET_JPEGDEC (cinfo);
203 GST_DEBUG_OBJECT (dec, "skip %ld bytes", num_bytes);
205 if (num_bytes > 0 && cinfo->src->bytes_in_buffer >= num_bytes) {
206 cinfo->src->next_input_byte += (size_t) num_bytes;
207 cinfo->src->bytes_in_buffer -= (size_t) num_bytes;
212 gst_jpeg_dec_resync_to_restart (j_decompress_ptr cinfo, gint desired)
214 GST_LOG_OBJECT (CINFO_GET_JPEGDEC (cinfo), "resync_to_start");
219 gst_jpeg_dec_term_source (j_decompress_ptr cinfo)
221 GST_LOG_OBJECT (CINFO_GET_JPEGDEC (cinfo), "term_source");
226 gst_jpeg_dec_my_output_message (j_common_ptr cinfo)
228 return; /* do nothing */
232 gst_jpeg_dec_my_emit_message (j_common_ptr cinfo, int msg_level)
234 /* GST_LOG_OBJECT (CINFO_GET_JPEGDEC (&cinfo), "msg_level=%d", msg_level); */
239 gst_jpeg_dec_my_error_exit (j_common_ptr cinfo)
241 struct GstJpegDecErrorMgr *err_mgr = (struct GstJpegDecErrorMgr *) cinfo->err;
243 (*cinfo->err->output_message) (cinfo);
244 longjmp (err_mgr->setjmp_buffer, 1);
248 gst_jpeg_dec_init (GstJpegDec * dec)
250 GST_DEBUG ("initializing");
253 memset (&dec->cinfo, 0, sizeof (dec->cinfo));
254 memset (&dec->jerr, 0, sizeof (dec->jerr));
255 dec->cinfo.err = jpeg_std_error (&dec->jerr.pub);
256 dec->jerr.pub.output_message = gst_jpeg_dec_my_output_message;
257 dec->jerr.pub.emit_message = gst_jpeg_dec_my_emit_message;
258 dec->jerr.pub.error_exit = gst_jpeg_dec_my_error_exit;
260 jpeg_create_decompress (&dec->cinfo);
262 dec->cinfo.src = (struct jpeg_source_mgr *) &dec->jsrc;
263 dec->cinfo.src->init_source = gst_jpeg_dec_init_source;
264 dec->cinfo.src->fill_input_buffer = gst_jpeg_dec_fill_input_buffer;
265 dec->cinfo.src->skip_input_data = gst_jpeg_dec_skip_input_data;
266 dec->cinfo.src->resync_to_restart = gst_jpeg_dec_resync_to_restart;
267 dec->cinfo.src->term_source = gst_jpeg_dec_term_source;
270 /* init properties */
271 dec->idct_method = JPEG_DEFAULT_IDCT_METHOD;
272 dec->max_errors = JPEG_DEFAULT_MAX_ERRORS;
274 gst_video_decoder_set_use_default_pad_acceptcaps (GST_VIDEO_DECODER_CAST
276 GST_PAD_SET_ACCEPT_TEMPLATE (GST_VIDEO_DECODER_SINK_PAD (dec));
279 static inline gboolean
280 gst_jpeg_dec_parse_tag_has_entropy_segment (guint8 tag)
282 if (tag == 0xda || (tag >= 0xd0 && tag <= 0xd7))
288 gst_jpeg_dec_parse (GstVideoDecoder * bdec, GstVideoCodecFrame * frame,
289 GstAdapter * adapter, gboolean at_eos)
294 gint offset = 0, noffset;
295 GstJpegDec *dec = (GstJpegDec *) bdec;
297 GST_VIDEO_CODEC_FRAME_SET_SYNC_POINT (frame);
299 /* FIXME : The overhead of using scan_uint32 is massive */
301 size = gst_adapter_available (adapter);
302 GST_DEBUG ("Parsing jpeg image data (%u bytes)", size);
305 GST_DEBUG ("Flushing all data out");
308 /* If we have leftover data, throw it away */
309 if (!dec->saw_header)
311 goto have_full_frame;
317 if (!dec->saw_header) {
319 /* we expect at least 4 bytes, first of which start marker */
321 gst_adapter_masked_scan_uint32 (adapter, 0xffff0000, 0xffd80000, 0,
324 GST_DEBUG ("ret:%d", ret);
329 gst_adapter_flush (adapter, ret);
332 dec->saw_header = TRUE;
339 GST_DEBUG ("offset:%d, size:%d", offset, size);
342 gst_adapter_masked_scan_uint32_peek (adapter, 0x0000ff00, 0x0000ff00,
343 offset, size - offset, &value);
345 /* lost sync if 0xff marker not where expected */
346 if ((resync = (noffset != offset))) {
347 GST_DEBUG ("Lost sync at 0x%08x, resyncing", offset + 2);
349 /* may have marker, but could have been resyncng */
350 resync = resync || dec->parse_resync;
351 /* Skip over extra 0xff */
352 while ((noffset >= 0) && ((value & 0xff) == 0xff)) {
355 gst_adapter_masked_scan_uint32_peek (adapter, 0x0000ff00, 0x0000ff00,
356 noffset, size - noffset, &value);
358 /* enough bytes left for marker? (we need 0xNN after the 0xff) */
360 GST_DEBUG ("at end of input and no EOI marker found, need more data");
364 /* now lock on the marker we found */
366 value = value & 0xff;
368 GST_DEBUG ("0x%08x: EOI marker", offset + 2);
369 /* clear parse state */
370 dec->saw_header = FALSE;
371 dec->parse_resync = FALSE;
373 goto have_full_frame;
376 /* Skip this frame if we found another SOI marker */
377 GST_DEBUG ("0x%08x: SOI marker before EOI, skipping", offset + 2);
378 dec->parse_resync = FALSE;
384 if (value >= 0xd0 && value <= 0xd7)
387 /* peek tag and subsequent length */
388 if (offset + 2 + 4 > size)
391 gst_adapter_masked_scan_uint32_peek (adapter, 0x0, 0x0, offset + 2, 4,
393 frame_len = frame_len & 0xffff;
395 GST_DEBUG ("0x%08x: tag %02x, frame_len=%u", offset + 2, value, frame_len);
396 /* the frame length includes the 2 bytes for the length; here we want at
397 * least 2 more bytes at the end for an end marker */
398 if (offset + 2 + 2 + frame_len + 2 > size) {
402 if (gst_jpeg_dec_parse_tag_has_entropy_segment (value)) {
403 guint eseglen = dec->parse_entropy_len;
405 GST_DEBUG ("0x%08x: finding entropy segment length (eseglen:%d)",
406 offset + 2, eseglen);
407 if (size < offset + 2 + frame_len + eseglen)
409 noffset = offset + 2 + frame_len + dec->parse_entropy_len;
411 GST_DEBUG ("noffset:%d, size:%d, size - noffset:%d",
412 noffset, size, size - noffset);
413 noffset = gst_adapter_masked_scan_uint32_peek (adapter, 0x0000ff00,
414 0x0000ff00, noffset, size - noffset, &value);
417 dec->parse_entropy_len = size - offset - 4 - frame_len - 2;
420 if ((value & 0xff) != 0x00) {
421 eseglen = noffset - offset - frame_len - 2;
426 dec->parse_entropy_len = 0;
427 frame_len += eseglen;
428 GST_DEBUG ("entropy segment length=%u => frame_len=%u", eseglen,
432 /* check if we will still be in sync if we interpret
433 * this as a sync point and skip this frame */
434 noffset = offset + frame_len + 2;
435 noffset = gst_adapter_masked_scan_uint32 (adapter, 0x0000ff00, 0x0000ff00,
438 /* ignore and continue resyncing until we hit the end
439 * of our data or find a sync point that looks okay */
443 GST_DEBUG ("found sync at 0x%x", offset + 2);
446 /* Add current data to output buffer */
447 toadd += frame_len + 2;
448 offset += frame_len + 2;
453 gst_video_decoder_add_to_frame (bdec, toadd);
454 return GST_VIDEO_DECODER_FLOW_NEED_DATA;
458 gst_video_decoder_add_to_frame (bdec, toadd);
459 GST_VIDEO_CODEC_FRAME_SET_SYNC_POINT (frame);
460 return gst_video_decoder_have_frame (bdec);
463 gst_adapter_flush (adapter, size);
468 /* shamelessly ripped from jpegutils.c in mjpegtools */
470 add_huff_table (j_decompress_ptr dinfo,
471 JHUFF_TBL ** htblptr, const UINT8 * bits, const UINT8 * val)
472 /* Define a Huffman table */
476 if (*htblptr == NULL)
477 *htblptr = jpeg_alloc_huff_table ((j_common_ptr) dinfo);
481 /* Copy the number-of-symbols-of-each-code-length counts */
482 memcpy ((*htblptr)->bits, bits, sizeof ((*htblptr)->bits));
484 /* Validate the counts. We do this here mainly so we can copy the right
485 * number of symbols from the val[] array, without risking marching off
486 * the end of memory. jchuff.c will do a more thorough test later.
489 for (len = 1; len <= 16; len++)
490 nsymbols += bits[len];
491 if (nsymbols < 1 || nsymbols > 256)
492 g_error ("jpegutils.c: add_huff_table failed badly. ");
494 memcpy ((*htblptr)->huffval, val, nsymbols * sizeof (UINT8));
500 std_huff_tables (j_decompress_ptr dinfo)
501 /* Set up the standard Huffman tables (cf. JPEG standard section K.3) */
502 /* IMPORTANT: these are only valid for 8-bit data precision! */
504 static const UINT8 bits_dc_luminance[17] =
505 { /* 0-base */ 0, 0, 1, 5, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0 };
506 static const UINT8 val_dc_luminance[] =
507 { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11 };
509 static const UINT8 bits_dc_chrominance[17] =
510 { /* 0-base */ 0, 0, 3, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0 };
511 static const UINT8 val_dc_chrominance[] =
512 { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11 };
514 static const UINT8 bits_ac_luminance[17] =
515 { /* 0-base */ 0, 0, 2, 1, 3, 3, 2, 4, 3, 5, 5, 4, 4, 0, 0, 1, 0x7d };
516 static const UINT8 val_ac_luminance[] =
517 { 0x01, 0x02, 0x03, 0x00, 0x04, 0x11, 0x05, 0x12,
518 0x21, 0x31, 0x41, 0x06, 0x13, 0x51, 0x61, 0x07,
519 0x22, 0x71, 0x14, 0x32, 0x81, 0x91, 0xa1, 0x08,
520 0x23, 0x42, 0xb1, 0xc1, 0x15, 0x52, 0xd1, 0xf0,
521 0x24, 0x33, 0x62, 0x72, 0x82, 0x09, 0x0a, 0x16,
522 0x17, 0x18, 0x19, 0x1a, 0x25, 0x26, 0x27, 0x28,
523 0x29, 0x2a, 0x34, 0x35, 0x36, 0x37, 0x38, 0x39,
524 0x3a, 0x43, 0x44, 0x45, 0x46, 0x47, 0x48, 0x49,
525 0x4a, 0x53, 0x54, 0x55, 0x56, 0x57, 0x58, 0x59,
526 0x5a, 0x63, 0x64, 0x65, 0x66, 0x67, 0x68, 0x69,
527 0x6a, 0x73, 0x74, 0x75, 0x76, 0x77, 0x78, 0x79,
528 0x7a, 0x83, 0x84, 0x85, 0x86, 0x87, 0x88, 0x89,
529 0x8a, 0x92, 0x93, 0x94, 0x95, 0x96, 0x97, 0x98,
530 0x99, 0x9a, 0xa2, 0xa3, 0xa4, 0xa5, 0xa6, 0xa7,
531 0xa8, 0xa9, 0xaa, 0xb2, 0xb3, 0xb4, 0xb5, 0xb6,
532 0xb7, 0xb8, 0xb9, 0xba, 0xc2, 0xc3, 0xc4, 0xc5,
533 0xc6, 0xc7, 0xc8, 0xc9, 0xca, 0xd2, 0xd3, 0xd4,
534 0xd5, 0xd6, 0xd7, 0xd8, 0xd9, 0xda, 0xe1, 0xe2,
535 0xe3, 0xe4, 0xe5, 0xe6, 0xe7, 0xe8, 0xe9, 0xea,
536 0xf1, 0xf2, 0xf3, 0xf4, 0xf5, 0xf6, 0xf7, 0xf8,
540 static const UINT8 bits_ac_chrominance[17] =
541 { /* 0-base */ 0, 0, 2, 1, 2, 4, 4, 3, 4, 7, 5, 4, 4, 0, 1, 2, 0x77 };
542 static const UINT8 val_ac_chrominance[] =
543 { 0x00, 0x01, 0x02, 0x03, 0x11, 0x04, 0x05, 0x21,
544 0x31, 0x06, 0x12, 0x41, 0x51, 0x07, 0x61, 0x71,
545 0x13, 0x22, 0x32, 0x81, 0x08, 0x14, 0x42, 0x91,
546 0xa1, 0xb1, 0xc1, 0x09, 0x23, 0x33, 0x52, 0xf0,
547 0x15, 0x62, 0x72, 0xd1, 0x0a, 0x16, 0x24, 0x34,
548 0xe1, 0x25, 0xf1, 0x17, 0x18, 0x19, 0x1a, 0x26,
549 0x27, 0x28, 0x29, 0x2a, 0x35, 0x36, 0x37, 0x38,
550 0x39, 0x3a, 0x43, 0x44, 0x45, 0x46, 0x47, 0x48,
551 0x49, 0x4a, 0x53, 0x54, 0x55, 0x56, 0x57, 0x58,
552 0x59, 0x5a, 0x63, 0x64, 0x65, 0x66, 0x67, 0x68,
553 0x69, 0x6a, 0x73, 0x74, 0x75, 0x76, 0x77, 0x78,
554 0x79, 0x7a, 0x82, 0x83, 0x84, 0x85, 0x86, 0x87,
555 0x88, 0x89, 0x8a, 0x92, 0x93, 0x94, 0x95, 0x96,
556 0x97, 0x98, 0x99, 0x9a, 0xa2, 0xa3, 0xa4, 0xa5,
557 0xa6, 0xa7, 0xa8, 0xa9, 0xaa, 0xb2, 0xb3, 0xb4,
558 0xb5, 0xb6, 0xb7, 0xb8, 0xb9, 0xba, 0xc2, 0xc3,
559 0xc4, 0xc5, 0xc6, 0xc7, 0xc8, 0xc9, 0xca, 0xd2,
560 0xd3, 0xd4, 0xd5, 0xd6, 0xd7, 0xd8, 0xd9, 0xda,
561 0xe2, 0xe3, 0xe4, 0xe5, 0xe6, 0xe7, 0xe8, 0xe9,
562 0xea, 0xf2, 0xf3, 0xf4, 0xf5, 0xf6, 0xf7, 0xf8,
566 add_huff_table (dinfo, &dinfo->dc_huff_tbl_ptrs[0],
567 bits_dc_luminance, val_dc_luminance);
568 add_huff_table (dinfo, &dinfo->ac_huff_tbl_ptrs[0],
569 bits_ac_luminance, val_ac_luminance);
570 add_huff_table (dinfo, &dinfo->dc_huff_tbl_ptrs[1],
571 bits_dc_chrominance, val_dc_chrominance);
572 add_huff_table (dinfo, &dinfo->ac_huff_tbl_ptrs[1],
573 bits_ac_chrominance, val_ac_chrominance);
579 guarantee_huff_tables (j_decompress_ptr dinfo)
581 if ((dinfo->dc_huff_tbl_ptrs[0] == NULL) &&
582 (dinfo->dc_huff_tbl_ptrs[1] == NULL) &&
583 (dinfo->ac_huff_tbl_ptrs[0] == NULL) &&
584 (dinfo->ac_huff_tbl_ptrs[1] == NULL)) {
585 GST_DEBUG ("Generating standard Huffman tables for this frame.");
586 std_huff_tables (dinfo);
591 gst_jpeg_dec_set_format (GstVideoDecoder * dec, GstVideoCodecState * state)
593 GstJpegDec *jpeg = GST_JPEG_DEC (dec);
595 if (dec->input_segment.format == GST_FORMAT_TIME)
596 gst_video_decoder_set_packetized (dec, TRUE);
598 gst_video_decoder_set_packetized (dec, FALSE);
600 if (jpeg->input_state)
601 gst_video_codec_state_unref (jpeg->input_state);
602 jpeg->input_state = gst_video_codec_state_ref (state);
610 hresamplecpy1 (guint8 * dest, const guint8 * src, guint len)
614 for (i = 0; i < len; ++i) {
615 /* equivalent to: dest[i] = src[i << 1] */
624 gst_jpeg_dec_free_buffers (GstJpegDec * dec)
628 for (i = 0; i < 16; i++) {
629 g_free (dec->idr_y[i]);
630 g_free (dec->idr_u[i]);
631 g_free (dec->idr_v[i]);
632 dec->idr_y[i] = NULL;
633 dec->idr_u[i] = NULL;
634 dec->idr_v[i] = NULL;
637 dec->idr_width_allocated = 0;
640 static inline gboolean
641 gst_jpeg_dec_ensure_buffers (GstJpegDec * dec, guint maxrowbytes)
645 if (G_LIKELY (dec->idr_width_allocated == maxrowbytes))
648 /* FIXME: maybe just alloc one or three blocks altogether? */
649 for (i = 0; i < 16; i++) {
650 dec->idr_y[i] = g_try_realloc (dec->idr_y[i], maxrowbytes);
651 dec->idr_u[i] = g_try_realloc (dec->idr_u[i], maxrowbytes);
652 dec->idr_v[i] = g_try_realloc (dec->idr_v[i], maxrowbytes);
654 if (G_UNLIKELY (!dec->idr_y[i] || !dec->idr_u[i] || !dec->idr_v[i])) {
655 GST_WARNING_OBJECT (dec, "out of memory, i=%d, bytes=%u", i, maxrowbytes);
660 dec->idr_width_allocated = maxrowbytes;
661 GST_LOG_OBJECT (dec, "allocated temp memory, %u bytes/row", maxrowbytes);
666 gst_jpeg_dec_decode_grayscale (GstJpegDec * dec, GstVideoFrame * frame)
669 guchar **scanarray[1] = { rows };
674 gint pstride, rstride;
676 GST_DEBUG_OBJECT (dec, "indirect decoding of grayscale");
678 width = GST_VIDEO_FRAME_WIDTH (frame);
679 height = GST_VIDEO_FRAME_HEIGHT (frame);
681 if (G_UNLIKELY (!gst_jpeg_dec_ensure_buffers (dec, GST_ROUND_UP_32 (width))))
684 base[0] = GST_VIDEO_FRAME_COMP_DATA (frame, 0);
685 pstride = GST_VIDEO_FRAME_COMP_PSTRIDE (frame, 0);
686 rstride = GST_VIDEO_FRAME_COMP_STRIDE (frame, 0);
688 memcpy (rows, dec->idr_y, 16 * sizeof (gpointer));
692 lines = jpeg_read_raw_data (&dec->cinfo, scanarray, DCTSIZE);
693 if (G_LIKELY (lines > 0)) {
694 for (j = 0; (j < DCTSIZE) && (i < height); j++, i++) {
698 for (k = 0; k < width; k++) {
699 base[0][p] = rows[j][k];
705 GST_INFO_OBJECT (dec, "jpeg_read_raw_data() returned 0");
711 gst_jpeg_dec_decode_rgb (GstJpegDec * dec, GstVideoFrame * frame)
713 guchar *r_rows[16], *g_rows[16], *b_rows[16];
714 guchar **scanarray[3] = { r_rows, g_rows, b_rows };
718 guint pstride, rstride;
721 GST_DEBUG_OBJECT (dec, "indirect decoding of RGB");
723 width = GST_VIDEO_FRAME_WIDTH (frame);
724 height = GST_VIDEO_FRAME_HEIGHT (frame);
726 if (G_UNLIKELY (!gst_jpeg_dec_ensure_buffers (dec, GST_ROUND_UP_32 (width))))
729 for (i = 0; i < 3; i++)
730 base[i] = GST_VIDEO_FRAME_COMP_DATA (frame, i);
732 pstride = GST_VIDEO_FRAME_COMP_PSTRIDE (frame, 0);
733 rstride = GST_VIDEO_FRAME_COMP_STRIDE (frame, 0);
735 memcpy (r_rows, dec->idr_y, 16 * sizeof (gpointer));
736 memcpy (g_rows, dec->idr_u, 16 * sizeof (gpointer));
737 memcpy (b_rows, dec->idr_v, 16 * sizeof (gpointer));
741 lines = jpeg_read_raw_data (&dec->cinfo, scanarray, DCTSIZE);
742 if (G_LIKELY (lines > 0)) {
743 for (j = 0; (j < DCTSIZE) && (i < height); j++, i++) {
747 for (k = 0; k < width; k++) {
748 base[0][p] = r_rows[j][k];
749 base[1][p] = g_rows[j][k];
750 base[2][p] = b_rows[j][k];
758 GST_INFO_OBJECT (dec, "jpeg_read_raw_data() returned 0");
764 gst_jpeg_dec_decode_indirect (GstJpegDec * dec, GstVideoFrame * frame, gint r_v,
767 guchar *y_rows[16], *u_rows[16], *v_rows[16];
768 guchar **scanarray[3] = { y_rows, u_rows, v_rows };
771 guchar *base[3], *last[3];
775 GST_DEBUG_OBJECT (dec,
776 "unadvantageous width or r_h, taking slow route involving memcpy");
778 width = GST_VIDEO_FRAME_WIDTH (frame);
779 height = GST_VIDEO_FRAME_HEIGHT (frame);
781 if (G_UNLIKELY (!gst_jpeg_dec_ensure_buffers (dec, GST_ROUND_UP_32 (width))))
784 for (i = 0; i < 3; i++) {
785 base[i] = GST_VIDEO_FRAME_COMP_DATA (frame, i);
786 stride[i] = GST_VIDEO_FRAME_COMP_STRIDE (frame, i);
787 /* make sure we don't make jpeglib write beyond our buffer,
788 * which might happen if (height % (r_v*DCTSIZE)) != 0 */
789 last[i] = base[i] + (GST_VIDEO_FRAME_COMP_STRIDE (frame, i) *
790 (GST_VIDEO_FRAME_COMP_HEIGHT (frame, i) - 1));
793 memcpy (y_rows, dec->idr_y, 16 * sizeof (gpointer));
794 memcpy (u_rows, dec->idr_u, 16 * sizeof (gpointer));
795 memcpy (v_rows, dec->idr_v, 16 * sizeof (gpointer));
797 /* fill chroma components for grayscale */
799 GST_DEBUG_OBJECT (dec, "grayscale, filling chroma");
800 for (i = 0; i < 16; i++) {
801 memset (u_rows[i], GST_ROUND_UP_32 (width), 0x80);
802 memset (v_rows[i], GST_ROUND_UP_32 (width), 0x80);
806 for (i = 0; i < height; i += r_v * DCTSIZE) {
807 lines = jpeg_read_raw_data (&dec->cinfo, scanarray, r_v * DCTSIZE);
808 if (G_LIKELY (lines > 0)) {
809 for (j = 0, k = 0; j < (r_v * DCTSIZE); j += r_v, k++) {
810 if (G_LIKELY (base[0] <= last[0])) {
811 memcpy (base[0], y_rows[j], stride[0]);
812 base[0] += stride[0];
815 if (G_LIKELY (base[0] <= last[0])) {
816 memcpy (base[0], y_rows[j + 1], stride[0]);
817 base[0] += stride[0];
820 if (G_LIKELY (base[1] <= last[1] && base[2] <= last[2])) {
822 memcpy (base[1], u_rows[k], stride[1]);
823 memcpy (base[2], v_rows[k], stride[2]);
824 } else if (r_h == 1) {
825 hresamplecpy1 (base[1], u_rows[k], stride[1]);
826 hresamplecpy1 (base[2], v_rows[k], stride[2]);
828 /* FIXME: implement (at least we avoid crashing by doing nothing) */
832 if (r_v == 2 || (k & 1) != 0) {
833 base[1] += stride[1];
834 base[2] += stride[2];
838 GST_INFO_OBJECT (dec, "jpeg_read_raw_data() returned 0");
844 gst_jpeg_dec_decode_direct (GstJpegDec * dec, GstVideoFrame * frame)
846 guchar **line[3]; /* the jpeg line buffer */
847 guchar *y[4 * DCTSIZE] = { NULL, }; /* alloc enough for the lines */
848 guchar *u[4 * DCTSIZE] = { NULL, }; /* r_v will be <4 */
849 guchar *v[4 * DCTSIZE] = { NULL, };
851 gint lines, v_samp[3];
852 guchar *base[3], *last[3];
860 v_samp[0] = dec->cinfo.comp_info[0].v_samp_factor;
861 v_samp[1] = dec->cinfo.comp_info[1].v_samp_factor;
862 v_samp[2] = dec->cinfo.comp_info[2].v_samp_factor;
864 if (G_UNLIKELY (v_samp[0] > 2 || v_samp[1] > 2 || v_samp[2] > 2))
865 goto format_not_supported;
867 height = GST_VIDEO_FRAME_HEIGHT (frame);
869 for (i = 0; i < 3; i++) {
870 base[i] = GST_VIDEO_FRAME_COMP_DATA (frame, i);
871 stride[i] = GST_VIDEO_FRAME_COMP_STRIDE (frame, i);
872 /* make sure we don't make jpeglib write beyond our buffer,
873 * which might happen if (height % (r_v*DCTSIZE)) != 0 */
874 last[i] = base[i] + (GST_VIDEO_FRAME_COMP_STRIDE (frame, i) *
875 (GST_VIDEO_FRAME_COMP_HEIGHT (frame, i) - 1));
878 /* let jpeglib decode directly into our final buffer */
879 GST_DEBUG_OBJECT (dec, "decoding directly into output buffer");
881 for (i = 0; i < height; i += v_samp[0] * DCTSIZE) {
882 for (j = 0; j < (v_samp[0] * DCTSIZE); ++j) {
884 line[0][j] = base[0] + (i + j) * stride[0];
885 if (G_UNLIKELY (line[0][j] > last[0]))
886 line[0][j] = last[0];
888 if (v_samp[1] == v_samp[0]) {
889 line[1][j] = base[1] + ((i + j) / 2) * stride[1];
890 } else if (j < (v_samp[1] * DCTSIZE)) {
891 line[1][j] = base[1] + ((i / 2) + j) * stride[1];
893 if (G_UNLIKELY (line[1][j] > last[1]))
894 line[1][j] = last[1];
896 if (v_samp[2] == v_samp[0]) {
897 line[2][j] = base[2] + ((i + j) / 2) * stride[2];
898 } else if (j < (v_samp[2] * DCTSIZE)) {
899 line[2][j] = base[2] + ((i / 2) + j) * stride[2];
901 if (G_UNLIKELY (line[2][j] > last[2]))
902 line[2][j] = last[2];
905 lines = jpeg_read_raw_data (&dec->cinfo, line, v_samp[0] * DCTSIZE);
906 if (G_UNLIKELY (!lines)) {
907 GST_INFO_OBJECT (dec, "jpeg_read_raw_data() returned 0");
912 format_not_supported:
914 gboolean ret = GST_FLOW_OK;
916 GST_VIDEO_DECODER_ERROR (dec, 1, STREAM, DECODE,
917 (_("Failed to decode JPEG image")),
918 ("Unsupported subsampling schema: v_samp factors: %u %u %u", v_samp[0],
919 v_samp[1], v_samp[2]), ret);
926 gst_jpeg_dec_negotiate (GstJpegDec * dec, gint width, gint height, gint clrspc)
928 GstVideoCodecState *outstate;
930 GstVideoFormat format;
934 format = GST_VIDEO_FORMAT_RGB;
937 format = GST_VIDEO_FORMAT_GRAY8;
940 format = GST_VIDEO_FORMAT_I420;
944 /* Compare to currently configured output state */
945 outstate = gst_video_decoder_get_output_state (GST_VIDEO_DECODER (dec));
947 info = &outstate->info;
949 if (width == GST_VIDEO_INFO_WIDTH (info) &&
950 height == GST_VIDEO_INFO_HEIGHT (info) &&
951 format == GST_VIDEO_INFO_FORMAT (info)) {
952 gst_video_codec_state_unref (outstate);
955 gst_video_codec_state_unref (outstate);
959 gst_video_decoder_set_output_state (GST_VIDEO_DECODER (dec), format,
960 width, height, dec->input_state);
967 outstate->info.colorimetry.range = GST_VIDEO_COLOR_RANGE_0_255;
968 outstate->info.colorimetry.matrix = GST_VIDEO_COLOR_MATRIX_BT601;
969 outstate->info.colorimetry.transfer = GST_VIDEO_TRANSFER_UNKNOWN;
970 outstate->info.colorimetry.primaries = GST_VIDEO_COLOR_PRIMARIES_UNKNOWN;
974 gst_video_codec_state_unref (outstate);
976 gst_video_decoder_negotiate (GST_VIDEO_DECODER (dec));
978 GST_DEBUG_OBJECT (dec, "max_v_samp_factor=%d", dec->cinfo.max_v_samp_factor);
979 GST_DEBUG_OBJECT (dec, "max_h_samp_factor=%d", dec->cinfo.max_h_samp_factor);
983 gst_jpeg_dec_handle_frame (GstVideoDecoder * bdec, GstVideoCodecFrame * frame)
985 GstFlowReturn ret = GST_FLOW_OK;
986 GstJpegDec *dec = (GstJpegDec *) bdec;
987 GstVideoFrame vframe;
991 gboolean need_unmap = TRUE;
992 GstVideoCodecState *state = NULL;
993 gboolean release_frame = TRUE;
995 dec->current_frame = frame;
996 gst_buffer_map (frame->input_buffer, &dec->current_frame_map, GST_MAP_READ);
998 dec->cinfo.src->next_input_byte = dec->current_frame_map.data;
999 dec->cinfo.src->bytes_in_buffer = dec->current_frame_map.size;
1001 if (setjmp (dec->jerr.setjmp_buffer)) {
1002 code = dec->jerr.pub.msg_code;
1004 if (code == JERR_INPUT_EOF) {
1005 GST_DEBUG ("jpeg input EOF error, we probably need more data");
1006 goto need_more_data;
1012 hdr_ok = jpeg_read_header (&dec->cinfo, TRUE);
1013 if (G_UNLIKELY (hdr_ok != JPEG_HEADER_OK)) {
1014 GST_WARNING_OBJECT (dec, "reading the header failed, %d", hdr_ok);
1017 GST_LOG_OBJECT (dec, "num_components=%d", dec->cinfo.num_components);
1018 GST_LOG_OBJECT (dec, "jpeg_color_space=%d", dec->cinfo.jpeg_color_space);
1020 if (!dec->cinfo.num_components || !dec->cinfo.comp_info)
1021 goto components_not_supported;
1023 r_h = dec->cinfo.comp_info[0].h_samp_factor;
1024 r_v = dec->cinfo.comp_info[0].v_samp_factor;
1026 GST_LOG_OBJECT (dec, "r_h = %d, r_v = %d", r_h, r_v);
1028 if (dec->cinfo.num_components > 3)
1029 goto components_not_supported;
1031 /* verify color space expectation to avoid going *boom* or bogus output */
1032 if (dec->cinfo.jpeg_color_space != JCS_YCbCr &&
1033 dec->cinfo.jpeg_color_space != JCS_GRAYSCALE &&
1034 dec->cinfo.jpeg_color_space != JCS_RGB)
1035 goto unsupported_colorspace;
1037 #ifndef GST_DISABLE_GST_DEBUG
1041 for (i = 0; i < dec->cinfo.num_components; ++i) {
1042 GST_LOG_OBJECT (dec, "[%d] h_samp_factor=%d, v_samp_factor=%d, cid=%d",
1043 i, dec->cinfo.comp_info[i].h_samp_factor,
1044 dec->cinfo.comp_info[i].v_samp_factor,
1045 dec->cinfo.comp_info[i].component_id);
1050 /* prepare for raw output */
1051 dec->cinfo.do_fancy_upsampling = FALSE;
1052 dec->cinfo.do_block_smoothing = FALSE;
1053 dec->cinfo.out_color_space = dec->cinfo.jpeg_color_space;
1054 dec->cinfo.dct_method = dec->idct_method;
1055 dec->cinfo.raw_data_out = TRUE;
1057 GST_LOG_OBJECT (dec, "starting decompress");
1058 guarantee_huff_tables (&dec->cinfo);
1059 if (!jpeg_start_decompress (&dec->cinfo)) {
1060 GST_WARNING_OBJECT (dec, "failed to start decompression cycle");
1063 /* sanity checks to get safe and reasonable output */
1064 switch (dec->cinfo.jpeg_color_space) {
1066 if (dec->cinfo.num_components != 1)
1067 goto invalid_yuvrgbgrayscale;
1070 if (dec->cinfo.num_components != 3 || dec->cinfo.max_v_samp_factor > 1 ||
1071 dec->cinfo.max_h_samp_factor > 1)
1072 goto invalid_yuvrgbgrayscale;
1075 if (dec->cinfo.num_components != 3 ||
1076 r_v > 2 || r_v < dec->cinfo.comp_info[0].v_samp_factor ||
1077 r_v < dec->cinfo.comp_info[1].v_samp_factor ||
1078 r_h < dec->cinfo.comp_info[0].h_samp_factor ||
1079 r_h < dec->cinfo.comp_info[1].h_samp_factor)
1080 goto invalid_yuvrgbgrayscale;
1083 g_assert_not_reached ();
1087 width = dec->cinfo.output_width;
1088 height = dec->cinfo.output_height;
1090 if (G_UNLIKELY (width < MIN_WIDTH || width > MAX_WIDTH ||
1091 height < MIN_HEIGHT || height > MAX_HEIGHT))
1094 gst_jpeg_dec_negotiate (dec, width, height, dec->cinfo.jpeg_color_space);
1096 state = gst_video_decoder_get_output_state (bdec);
1097 ret = gst_video_decoder_allocate_output_frame (bdec, frame);
1098 if (G_UNLIKELY (ret != GST_FLOW_OK))
1101 if (!gst_video_frame_map (&vframe, &state->info, frame->output_buffer,
1105 if (setjmp (dec->jerr.setjmp_buffer)) {
1106 code = dec->jerr.pub.msg_code;
1107 gst_video_frame_unmap (&vframe);
1111 GST_LOG_OBJECT (dec, "width %d, height %d", width, height);
1113 if (dec->cinfo.jpeg_color_space == JCS_RGB) {
1114 gst_jpeg_dec_decode_rgb (dec, &vframe);
1115 } else if (dec->cinfo.jpeg_color_space == JCS_GRAYSCALE) {
1116 gst_jpeg_dec_decode_grayscale (dec, &vframe);
1118 GST_LOG_OBJECT (dec, "decompressing (reqired scanline buffer height = %u)",
1119 dec->cinfo.rec_outbuf_height);
1121 /* For some widths jpeglib requires more horizontal padding than I420
1122 * provides. In those cases we need to decode into separate buffers and then
1123 * copy over the data into our final picture buffer, otherwise jpeglib might
1124 * write over the end of a line into the beginning of the next line,
1125 * resulting in blocky artifacts on the left side of the picture. */
1126 if (G_UNLIKELY (width % (dec->cinfo.max_h_samp_factor * DCTSIZE) != 0
1127 || dec->cinfo.comp_info[0].h_samp_factor != 2
1128 || dec->cinfo.comp_info[1].h_samp_factor != 1
1129 || dec->cinfo.comp_info[2].h_samp_factor != 1)) {
1130 GST_CAT_LOG_OBJECT (GST_CAT_PERFORMANCE, dec,
1131 "indirect decoding using extra buffer copy");
1132 gst_jpeg_dec_decode_indirect (dec, &vframe, r_v, r_h,
1133 dec->cinfo.num_components);
1135 ret = gst_jpeg_dec_decode_direct (dec, &vframe);
1137 if (G_UNLIKELY (ret != GST_FLOW_OK))
1138 goto decode_direct_failed;
1142 gst_video_frame_unmap (&vframe);
1144 if (setjmp (dec->jerr.setjmp_buffer)) {
1145 code = dec->jerr.pub.msg_code;
1149 GST_LOG_OBJECT (dec, "decompressing finished");
1150 jpeg_finish_decompress (&dec->cinfo);
1152 gst_buffer_unmap (frame->input_buffer, &dec->current_frame_map);
1153 ret = gst_video_decoder_finish_frame (bdec, frame);
1154 release_frame = FALSE;
1162 gst_buffer_unmap (frame->input_buffer, &dec->current_frame_map);
1165 gst_video_decoder_release_frame (bdec, frame);
1168 gst_video_codec_state_unref (state);
1175 GST_LOG_OBJECT (dec, "we need more data");
1182 GST_VIDEO_DECODER_ERROR (dec, 1, STREAM, DECODE,
1183 (_("Failed to decode JPEG image")),
1184 ("Picture is too small or too big (%ux%u)", width, height), ret);
1185 ret = GST_FLOW_ERROR;
1190 gchar err_msg[JMSG_LENGTH_MAX];
1192 dec->jerr.pub.format_message ((j_common_ptr) (&dec->cinfo), err_msg);
1194 GST_VIDEO_DECODER_ERROR (dec, 1, STREAM, DECODE,
1195 (_("Failed to decode JPEG image")), ("Decode error #%u: %s", code,
1198 gst_buffer_unmap (frame->input_buffer, &dec->current_frame_map);
1199 gst_video_decoder_drop_frame (bdec, frame);
1200 release_frame = FALSE;
1202 jpeg_abort_decompress (&dec->cinfo);
1206 decode_direct_failed:
1208 /* already posted an error message */
1209 jpeg_abort_decompress (&dec->cinfo);
1214 const gchar *reason;
1216 reason = gst_flow_get_name (ret);
1218 GST_DEBUG_OBJECT (dec, "failed to alloc buffer, reason %s", reason);
1219 /* Reset for next time */
1220 jpeg_abort_decompress (&dec->cinfo);
1221 if (ret != GST_FLOW_EOS && ret != GST_FLOW_FLUSHING &&
1222 ret != GST_FLOW_NOT_LINKED) {
1223 GST_VIDEO_DECODER_ERROR (dec, 1, STREAM, DECODE,
1224 (_("Failed to decode JPEG image")),
1225 ("Buffer allocation failed, reason: %s", reason), ret);
1226 jpeg_abort_decompress (&dec->cinfo);
1230 components_not_supported:
1232 GST_VIDEO_DECODER_ERROR (dec, 1, STREAM, DECODE,
1233 (_("Failed to decode JPEG image")),
1234 ("number of components not supported: %d (max 3)",
1235 dec->cinfo.num_components), ret);
1236 jpeg_abort_decompress (&dec->cinfo);
1239 unsupported_colorspace:
1241 GST_VIDEO_DECODER_ERROR (dec, 1, STREAM, DECODE,
1242 (_("Failed to decode JPEG image")),
1243 ("Picture has unknown or unsupported colourspace"), ret);
1244 jpeg_abort_decompress (&dec->cinfo);
1247 invalid_yuvrgbgrayscale:
1249 GST_VIDEO_DECODER_ERROR (dec, 1, STREAM, DECODE,
1250 (_("Failed to decode JPEG image")),
1251 ("Picture is corrupt or unhandled YUV/RGB/grayscale layout"), ret);
1252 jpeg_abort_decompress (&dec->cinfo);
1258 gst_jpeg_dec_decide_allocation (GstVideoDecoder * bdec, GstQuery * query)
1260 GstBufferPool *pool = NULL;
1261 GstStructure *config;
1263 if (!GST_VIDEO_DECODER_CLASS (parent_class)->decide_allocation (bdec, query))
1266 if (gst_query_get_n_allocation_pools (query) > 0)
1267 gst_query_parse_nth_allocation_pool (query, 0, &pool, NULL, NULL, NULL);
1272 config = gst_buffer_pool_get_config (pool);
1273 if (gst_query_find_allocation_meta (query, GST_VIDEO_META_API_TYPE, NULL)) {
1274 gst_buffer_pool_config_add_option (config,
1275 GST_BUFFER_POOL_OPTION_VIDEO_META);
1277 gst_buffer_pool_set_config (pool, config);
1278 gst_object_unref (pool);
1284 gst_jpeg_dec_start (GstVideoDecoder * bdec)
1286 GstJpegDec *dec = (GstJpegDec *) bdec;
1288 dec->saw_header = FALSE;
1289 dec->parse_entropy_len = 0;
1290 dec->parse_resync = FALSE;
1292 gst_video_decoder_set_packetized (bdec, FALSE);
1298 gst_jpeg_dec_flush (GstVideoDecoder * bdec)
1300 GstJpegDec *dec = (GstJpegDec *) bdec;
1302 jpeg_abort_decompress (&dec->cinfo);
1303 dec->parse_entropy_len = 0;
1304 dec->parse_resync = FALSE;
1305 dec->saw_header = FALSE;
1311 gst_jpeg_dec_set_property (GObject * object, guint prop_id,
1312 const GValue * value, GParamSpec * pspec)
1316 dec = GST_JPEG_DEC (object);
1319 case PROP_IDCT_METHOD:
1320 dec->idct_method = g_value_get_enum (value);
1322 #ifndef GST_REMOVE_DEPRECATED
1323 case PROP_MAX_ERRORS:
1324 g_atomic_int_set (&dec->max_errors, g_value_get_int (value));
1328 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
1334 gst_jpeg_dec_get_property (GObject * object, guint prop_id, GValue * value,
1339 dec = GST_JPEG_DEC (object);
1342 case PROP_IDCT_METHOD:
1343 g_value_set_enum (value, dec->idct_method);
1345 #ifndef GST_REMOVE_DEPRECATED
1346 case PROP_MAX_ERRORS:
1347 g_value_set_int (value, g_atomic_int_get (&dec->max_errors));
1351 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
1357 gst_jpeg_dec_stop (GstVideoDecoder * bdec)
1359 GstJpegDec *dec = (GstJpegDec *) bdec;
1361 gst_jpeg_dec_free_buffers (dec);