2 * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
3 * Copyright (C) <2009> Tim-Philipp Müller <tim centricular net>
4 * Copyright (C) 2012 Collabora Ltd.
5 * Author : Edward Hervey <edward@collabora.com>
7 * This library is free software; you can redistribute it and/or
8 * modify it under the terms of the GNU Library General Public
9 * License as published by the Free Software Foundation; either
10 * version 2 of the License, or (at your option) any later version.
12 * This library is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 * Library General Public License for more details.
17 * You should have received a copy of the GNU Library General Public
18 * License along with this library; if not, write to the
19 * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
20 * Boston, MA 02110-1301, USA.
24 * SECTION:element-jpegdec
26 * Decodes jpeg images.
29 * <title>Example launch line</title>
31 * gst-launch-1.0 -v filesrc location=mjpeg.avi ! avidemux ! queue ! jpegdec ! videoconvert ! videoscale ! autovideosink
32 * ]| The above pipeline decode the mjpeg stream and renders it to the screen.
41 #include "gstjpegdec.h"
43 #include <gst/video/video.h>
44 #include <gst/video/gstvideometa.h>
45 #include <gst/video/gstvideopool.h>
46 #include "gst/gst-i18n-plugin.h"
50 #define MAX_WIDTH 65535
52 #define MAX_HEIGHT 65535
54 #define CINFO_GET_JPEGDEC(cinfo_ptr) \
55 (((struct GstJpegDecSourceMgr*)((cinfo_ptr)->src))->dec)
57 #define JPEG_DEFAULT_IDCT_METHOD JDCT_FASTEST
58 #define JPEG_DEFAULT_MAX_ERRORS 0
68 static GstStaticPadTemplate gst_jpeg_dec_src_pad_template =
69 GST_STATIC_PAD_TEMPLATE ("src",
72 GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE
73 ("{ I420, RGB, BGR, RGBx, xRGB, BGRx, xBGR, GRAY8 }"))
77 /* FIXME: sof-marker is for IJG libjpeg 8, should be different for 6.2 */
78 /* FIXME: add back "sof-marker = (int) { 0, 1, 2, 5, 6, 7, 9, 10, 13, 14 }"
79 * once we have a parser and/or demuxer set caps properly */
80 static GstStaticPadTemplate gst_jpeg_dec_sink_pad_template =
81 GST_STATIC_PAD_TEMPLATE ("sink",
84 GST_STATIC_CAPS ("image/jpeg")
87 GST_DEBUG_CATEGORY_STATIC (jpeg_dec_debug);
88 #define GST_CAT_DEFAULT jpeg_dec_debug
89 GST_DEBUG_CATEGORY_STATIC (GST_CAT_PERFORMANCE);
91 static void gst_jpeg_dec_set_property (GObject * object, guint prop_id,
92 const GValue * value, GParamSpec * pspec);
93 static void gst_jpeg_dec_get_property (GObject * object, guint prop_id,
94 GValue * value, GParamSpec * pspec);
96 static gboolean gst_jpeg_dec_set_format (GstVideoDecoder * dec,
97 GstVideoCodecState * state);
98 static gboolean gst_jpeg_dec_start (GstVideoDecoder * bdec);
99 static gboolean gst_jpeg_dec_stop (GstVideoDecoder * bdec);
100 static gboolean gst_jpeg_dec_flush (GstVideoDecoder * bdec);
101 static GstFlowReturn gst_jpeg_dec_parse (GstVideoDecoder * bdec,
102 GstVideoCodecFrame * frame, GstAdapter * adapter, gboolean at_eos);
103 static GstFlowReturn gst_jpeg_dec_handle_frame (GstVideoDecoder * bdec,
104 GstVideoCodecFrame * frame);
105 static gboolean gst_jpeg_dec_decide_allocation (GstVideoDecoder * bdec,
108 #define gst_jpeg_dec_parent_class parent_class
109 G_DEFINE_TYPE (GstJpegDec, gst_jpeg_dec, GST_TYPE_VIDEO_DECODER);
112 gst_jpeg_dec_finalize (GObject * object)
114 GstJpegDec *dec = GST_JPEG_DEC (object);
116 jpeg_destroy_decompress (&dec->cinfo);
117 if (dec->input_state)
118 gst_video_codec_state_unref (dec->input_state);
120 G_OBJECT_CLASS (parent_class)->finalize (object);
124 gst_jpeg_dec_class_init (GstJpegDecClass * klass)
126 GObjectClass *gobject_class;
127 GstElementClass *element_class;
128 GstVideoDecoderClass *vdec_class;
130 gobject_class = (GObjectClass *) klass;
131 element_class = (GstElementClass *) klass;
132 vdec_class = (GstVideoDecoderClass *) klass;
134 parent_class = g_type_class_peek_parent (klass);
136 gobject_class->finalize = gst_jpeg_dec_finalize;
137 gobject_class->set_property = gst_jpeg_dec_set_property;
138 gobject_class->get_property = gst_jpeg_dec_get_property;
140 g_object_class_install_property (gobject_class, PROP_IDCT_METHOD,
141 g_param_spec_enum ("idct-method", "IDCT Method",
142 "The IDCT algorithm to use", GST_TYPE_IDCT_METHOD,
143 JPEG_DEFAULT_IDCT_METHOD,
144 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
147 * GstJpegDec:max-errors:
149 * Error out after receiving N consecutive decoding errors
150 * (-1 = never error out, 0 = automatic, 1 = fail on first error, etc.)
152 * Deprecated: 1.3.1: Property wasn't used internally
154 #ifndef GST_REMOVE_DEPRECATED
155 g_object_class_install_property (gobject_class, PROP_MAX_ERRORS,
156 g_param_spec_int ("max-errors", "Maximum Consecutive Decoding Errors",
157 "(Deprecated) Error out after receiving N consecutive decoding errors"
158 " (-1 = never fail, 0 = automatic, 1 = fail on first error)",
159 -1, G_MAXINT, JPEG_DEFAULT_MAX_ERRORS,
160 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | G_PARAM_DEPRECATED));
163 gst_element_class_add_pad_template (element_class,
164 gst_static_pad_template_get (&gst_jpeg_dec_src_pad_template));
165 gst_element_class_add_pad_template (element_class,
166 gst_static_pad_template_get (&gst_jpeg_dec_sink_pad_template));
167 gst_element_class_set_static_metadata (element_class, "JPEG image decoder",
168 "Codec/Decoder/Image",
169 "Decode images from JPEG format", "Wim Taymans <wim@fluendo.com>");
171 vdec_class->start = gst_jpeg_dec_start;
172 vdec_class->stop = gst_jpeg_dec_stop;
173 vdec_class->flush = gst_jpeg_dec_flush;
174 vdec_class->parse = gst_jpeg_dec_parse;
175 vdec_class->set_format = gst_jpeg_dec_set_format;
176 vdec_class->handle_frame = gst_jpeg_dec_handle_frame;
177 vdec_class->decide_allocation = gst_jpeg_dec_decide_allocation;
179 GST_DEBUG_CATEGORY_INIT (jpeg_dec_debug, "jpegdec", 0, "JPEG decoder");
180 GST_DEBUG_CATEGORY_GET (GST_CAT_PERFORMANCE, "GST_PERFORMANCE");
184 gst_jpeg_dec_fill_input_buffer (j_decompress_ptr cinfo)
188 dec = CINFO_GET_JPEGDEC (cinfo);
189 g_return_val_if_fail (dec != NULL, FALSE);
190 g_return_val_if_fail (dec->current_frame != NULL, FALSE);
191 g_return_val_if_fail (dec->current_frame_map.data != NULL, FALSE);
193 cinfo->src->next_input_byte = dec->current_frame_map.data;
194 cinfo->src->bytes_in_buffer = dec->current_frame_map.size;
200 gst_jpeg_dec_init_source (j_decompress_ptr cinfo)
202 GST_LOG_OBJECT (CINFO_GET_JPEGDEC (cinfo), "init_source");
207 gst_jpeg_dec_skip_input_data (j_decompress_ptr cinfo, glong num_bytes)
209 GstJpegDec *dec = CINFO_GET_JPEGDEC (cinfo);
211 GST_DEBUG_OBJECT (dec, "skip %ld bytes", num_bytes);
213 if (num_bytes > 0 && cinfo->src->bytes_in_buffer >= num_bytes) {
214 cinfo->src->next_input_byte += (size_t) num_bytes;
215 cinfo->src->bytes_in_buffer -= (size_t) num_bytes;
220 gst_jpeg_dec_resync_to_restart (j_decompress_ptr cinfo, gint desired)
222 GST_LOG_OBJECT (CINFO_GET_JPEGDEC (cinfo), "resync_to_start");
227 gst_jpeg_dec_term_source (j_decompress_ptr cinfo)
229 GST_LOG_OBJECT (CINFO_GET_JPEGDEC (cinfo), "term_source");
234 gst_jpeg_dec_my_output_message (j_common_ptr cinfo)
236 return; /* do nothing */
240 gst_jpeg_dec_my_emit_message (j_common_ptr cinfo, int msg_level)
242 /* GST_LOG_OBJECT (CINFO_GET_JPEGDEC (&cinfo), "msg_level=%d", msg_level); */
247 gst_jpeg_dec_my_error_exit (j_common_ptr cinfo)
249 struct GstJpegDecErrorMgr *err_mgr = (struct GstJpegDecErrorMgr *) cinfo->err;
251 (*cinfo->err->output_message) (cinfo);
252 longjmp (err_mgr->setjmp_buffer, 1);
256 gst_jpeg_dec_init (GstJpegDec * dec)
258 GST_DEBUG ("initializing");
261 memset (&dec->cinfo, 0, sizeof (dec->cinfo));
262 memset (&dec->jerr, 0, sizeof (dec->jerr));
263 dec->cinfo.err = jpeg_std_error (&dec->jerr.pub);
264 dec->jerr.pub.output_message = gst_jpeg_dec_my_output_message;
265 dec->jerr.pub.emit_message = gst_jpeg_dec_my_emit_message;
266 dec->jerr.pub.error_exit = gst_jpeg_dec_my_error_exit;
268 jpeg_create_decompress (&dec->cinfo);
270 dec->cinfo.src = (struct jpeg_source_mgr *) &dec->jsrc;
271 dec->cinfo.src->init_source = gst_jpeg_dec_init_source;
272 dec->cinfo.src->fill_input_buffer = gst_jpeg_dec_fill_input_buffer;
273 dec->cinfo.src->skip_input_data = gst_jpeg_dec_skip_input_data;
274 dec->cinfo.src->resync_to_restart = gst_jpeg_dec_resync_to_restart;
275 dec->cinfo.src->term_source = gst_jpeg_dec_term_source;
278 /* init properties */
279 dec->idct_method = JPEG_DEFAULT_IDCT_METHOD;
280 dec->max_errors = JPEG_DEFAULT_MAX_ERRORS;
282 gst_video_decoder_set_use_default_pad_acceptcaps (GST_VIDEO_DECODER_CAST
284 GST_PAD_SET_ACCEPT_TEMPLATE (GST_VIDEO_DECODER_SINK_PAD (dec));
287 static inline gboolean
288 gst_jpeg_dec_parse_tag_has_entropy_segment (guint8 tag)
290 if (tag == 0xda || (tag >= 0xd0 && tag <= 0xd7))
296 gst_jpeg_dec_parse (GstVideoDecoder * bdec, GstVideoCodecFrame * frame,
297 GstAdapter * adapter, gboolean at_eos)
302 gint offset = 0, noffset;
303 GstJpegDec *dec = (GstJpegDec *) bdec;
305 GST_VIDEO_CODEC_FRAME_SET_SYNC_POINT (frame);
307 /* FIXME : The overhead of using scan_uint32 is massive */
309 size = gst_adapter_available (adapter);
310 GST_DEBUG ("Parsing jpeg image data (%u bytes)", size);
313 GST_DEBUG ("Flushing all data out");
316 /* If we have leftover data, throw it away */
317 if (!dec->saw_header)
319 goto have_full_frame;
325 if (!dec->saw_header) {
327 /* we expect at least 4 bytes, first of which start marker */
329 gst_adapter_masked_scan_uint32 (adapter, 0xffff0000, 0xffd80000, 0,
332 GST_DEBUG ("ret:%d", ret);
337 gst_adapter_flush (adapter, ret);
340 dec->saw_header = TRUE;
347 GST_DEBUG ("offset:%d, size:%d", offset, size);
350 gst_adapter_masked_scan_uint32_peek (adapter, 0x0000ff00, 0x0000ff00,
351 offset, size - offset, &value);
353 /* lost sync if 0xff marker not where expected */
354 if ((resync = (noffset != offset))) {
355 GST_DEBUG ("Lost sync at 0x%08x, resyncing", offset + 2);
357 /* may have marker, but could have been resyncng */
358 resync = resync || dec->parse_resync;
359 /* Skip over extra 0xff */
360 while ((noffset >= 0) && ((value & 0xff) == 0xff)) {
363 gst_adapter_masked_scan_uint32_peek (adapter, 0x0000ff00, 0x0000ff00,
364 noffset, size - noffset, &value);
366 /* enough bytes left for marker? (we need 0xNN after the 0xff) */
368 GST_DEBUG ("at end of input and no EOI marker found, need more data");
372 /* now lock on the marker we found */
374 value = value & 0xff;
376 GST_DEBUG ("0x%08x: EOI marker", offset + 2);
377 /* clear parse state */
378 dec->saw_header = FALSE;
379 dec->parse_resync = FALSE;
381 goto have_full_frame;
384 /* Skip this frame if we found another SOI marker */
385 GST_DEBUG ("0x%08x: SOI marker before EOI, skipping", offset + 2);
386 dec->parse_resync = FALSE;
392 if (value >= 0xd0 && value <= 0xd7)
395 /* peek tag and subsequent length */
396 if (offset + 2 + 4 > size)
399 gst_adapter_masked_scan_uint32_peek (adapter, 0x0, 0x0, offset + 2, 4,
401 frame_len = frame_len & 0xffff;
403 GST_DEBUG ("0x%08x: tag %02x, frame_len=%u", offset + 2, value, frame_len);
404 /* the frame length includes the 2 bytes for the length; here we want at
405 * least 2 more bytes at the end for an end marker */
406 if (offset + 2 + 2 + frame_len + 2 > size) {
410 if (gst_jpeg_dec_parse_tag_has_entropy_segment (value)) {
411 guint eseglen = dec->parse_entropy_len;
413 GST_DEBUG ("0x%08x: finding entropy segment length (eseglen:%d)",
414 offset + 2, eseglen);
415 if (size < offset + 2 + frame_len + eseglen)
417 noffset = offset + 2 + frame_len + dec->parse_entropy_len;
419 GST_DEBUG ("noffset:%d, size:%d, size - noffset:%d",
420 noffset, size, size - noffset);
421 noffset = gst_adapter_masked_scan_uint32_peek (adapter, 0x0000ff00,
422 0x0000ff00, noffset, size - noffset, &value);
425 dec->parse_entropy_len = size - offset - 4 - frame_len - 2;
428 if ((value & 0xff) != 0x00) {
429 eseglen = noffset - offset - frame_len - 2;
434 dec->parse_entropy_len = 0;
435 frame_len += eseglen;
436 GST_DEBUG ("entropy segment length=%u => frame_len=%u", eseglen,
440 /* check if we will still be in sync if we interpret
441 * this as a sync point and skip this frame */
442 noffset = offset + frame_len + 2;
443 noffset = gst_adapter_masked_scan_uint32 (adapter, 0x0000ff00, 0x0000ff00,
446 /* ignore and continue resyncing until we hit the end
447 * of our data or find a sync point that looks okay */
451 GST_DEBUG ("found sync at 0x%x", offset + 2);
454 /* Add current data to output buffer */
455 toadd += frame_len + 2;
456 offset += frame_len + 2;
461 gst_video_decoder_add_to_frame (bdec, toadd);
462 return GST_VIDEO_DECODER_FLOW_NEED_DATA;
466 gst_video_decoder_add_to_frame (bdec, toadd);
467 GST_VIDEO_CODEC_FRAME_SET_SYNC_POINT (frame);
468 return gst_video_decoder_have_frame (bdec);
471 gst_adapter_flush (adapter, size);
476 /* shamelessly ripped from jpegutils.c in mjpegtools */
478 add_huff_table (j_decompress_ptr dinfo,
479 JHUFF_TBL ** htblptr, const UINT8 * bits, const UINT8 * val)
480 /* Define a Huffman table */
484 if (*htblptr == NULL)
485 *htblptr = jpeg_alloc_huff_table ((j_common_ptr) dinfo);
489 /* Copy the number-of-symbols-of-each-code-length counts */
490 memcpy ((*htblptr)->bits, bits, sizeof ((*htblptr)->bits));
492 /* Validate the counts. We do this here mainly so we can copy the right
493 * number of symbols from the val[] array, without risking marching off
494 * the end of memory. jchuff.c will do a more thorough test later.
497 for (len = 1; len <= 16; len++)
498 nsymbols += bits[len];
499 if (nsymbols < 1 || nsymbols > 256)
500 g_error ("jpegutils.c: add_huff_table failed badly. ");
502 memcpy ((*htblptr)->huffval, val, nsymbols * sizeof (UINT8));
508 std_huff_tables (j_decompress_ptr dinfo)
509 /* Set up the standard Huffman tables (cf. JPEG standard section K.3) */
510 /* IMPORTANT: these are only valid for 8-bit data precision! */
512 static const UINT8 bits_dc_luminance[17] =
513 { /* 0-base */ 0, 0, 1, 5, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0 };
514 static const UINT8 val_dc_luminance[] =
515 { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11 };
517 static const UINT8 bits_dc_chrominance[17] =
518 { /* 0-base */ 0, 0, 3, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0 };
519 static const UINT8 val_dc_chrominance[] =
520 { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11 };
522 static const UINT8 bits_ac_luminance[17] =
523 { /* 0-base */ 0, 0, 2, 1, 3, 3, 2, 4, 3, 5, 5, 4, 4, 0, 0, 1, 0x7d };
524 static const UINT8 val_ac_luminance[] =
525 { 0x01, 0x02, 0x03, 0x00, 0x04, 0x11, 0x05, 0x12,
526 0x21, 0x31, 0x41, 0x06, 0x13, 0x51, 0x61, 0x07,
527 0x22, 0x71, 0x14, 0x32, 0x81, 0x91, 0xa1, 0x08,
528 0x23, 0x42, 0xb1, 0xc1, 0x15, 0x52, 0xd1, 0xf0,
529 0x24, 0x33, 0x62, 0x72, 0x82, 0x09, 0x0a, 0x16,
530 0x17, 0x18, 0x19, 0x1a, 0x25, 0x26, 0x27, 0x28,
531 0x29, 0x2a, 0x34, 0x35, 0x36, 0x37, 0x38, 0x39,
532 0x3a, 0x43, 0x44, 0x45, 0x46, 0x47, 0x48, 0x49,
533 0x4a, 0x53, 0x54, 0x55, 0x56, 0x57, 0x58, 0x59,
534 0x5a, 0x63, 0x64, 0x65, 0x66, 0x67, 0x68, 0x69,
535 0x6a, 0x73, 0x74, 0x75, 0x76, 0x77, 0x78, 0x79,
536 0x7a, 0x83, 0x84, 0x85, 0x86, 0x87, 0x88, 0x89,
537 0x8a, 0x92, 0x93, 0x94, 0x95, 0x96, 0x97, 0x98,
538 0x99, 0x9a, 0xa2, 0xa3, 0xa4, 0xa5, 0xa6, 0xa7,
539 0xa8, 0xa9, 0xaa, 0xb2, 0xb3, 0xb4, 0xb5, 0xb6,
540 0xb7, 0xb8, 0xb9, 0xba, 0xc2, 0xc3, 0xc4, 0xc5,
541 0xc6, 0xc7, 0xc8, 0xc9, 0xca, 0xd2, 0xd3, 0xd4,
542 0xd5, 0xd6, 0xd7, 0xd8, 0xd9, 0xda, 0xe1, 0xe2,
543 0xe3, 0xe4, 0xe5, 0xe6, 0xe7, 0xe8, 0xe9, 0xea,
544 0xf1, 0xf2, 0xf3, 0xf4, 0xf5, 0xf6, 0xf7, 0xf8,
548 static const UINT8 bits_ac_chrominance[17] =
549 { /* 0-base */ 0, 0, 2, 1, 2, 4, 4, 3, 4, 7, 5, 4, 4, 0, 1, 2, 0x77 };
550 static const UINT8 val_ac_chrominance[] =
551 { 0x00, 0x01, 0x02, 0x03, 0x11, 0x04, 0x05, 0x21,
552 0x31, 0x06, 0x12, 0x41, 0x51, 0x07, 0x61, 0x71,
553 0x13, 0x22, 0x32, 0x81, 0x08, 0x14, 0x42, 0x91,
554 0xa1, 0xb1, 0xc1, 0x09, 0x23, 0x33, 0x52, 0xf0,
555 0x15, 0x62, 0x72, 0xd1, 0x0a, 0x16, 0x24, 0x34,
556 0xe1, 0x25, 0xf1, 0x17, 0x18, 0x19, 0x1a, 0x26,
557 0x27, 0x28, 0x29, 0x2a, 0x35, 0x36, 0x37, 0x38,
558 0x39, 0x3a, 0x43, 0x44, 0x45, 0x46, 0x47, 0x48,
559 0x49, 0x4a, 0x53, 0x54, 0x55, 0x56, 0x57, 0x58,
560 0x59, 0x5a, 0x63, 0x64, 0x65, 0x66, 0x67, 0x68,
561 0x69, 0x6a, 0x73, 0x74, 0x75, 0x76, 0x77, 0x78,
562 0x79, 0x7a, 0x82, 0x83, 0x84, 0x85, 0x86, 0x87,
563 0x88, 0x89, 0x8a, 0x92, 0x93, 0x94, 0x95, 0x96,
564 0x97, 0x98, 0x99, 0x9a, 0xa2, 0xa3, 0xa4, 0xa5,
565 0xa6, 0xa7, 0xa8, 0xa9, 0xaa, 0xb2, 0xb3, 0xb4,
566 0xb5, 0xb6, 0xb7, 0xb8, 0xb9, 0xba, 0xc2, 0xc3,
567 0xc4, 0xc5, 0xc6, 0xc7, 0xc8, 0xc9, 0xca, 0xd2,
568 0xd3, 0xd4, 0xd5, 0xd6, 0xd7, 0xd8, 0xd9, 0xda,
569 0xe2, 0xe3, 0xe4, 0xe5, 0xe6, 0xe7, 0xe8, 0xe9,
570 0xea, 0xf2, 0xf3, 0xf4, 0xf5, 0xf6, 0xf7, 0xf8,
574 add_huff_table (dinfo, &dinfo->dc_huff_tbl_ptrs[0],
575 bits_dc_luminance, val_dc_luminance);
576 add_huff_table (dinfo, &dinfo->ac_huff_tbl_ptrs[0],
577 bits_ac_luminance, val_ac_luminance);
578 add_huff_table (dinfo, &dinfo->dc_huff_tbl_ptrs[1],
579 bits_dc_chrominance, val_dc_chrominance);
580 add_huff_table (dinfo, &dinfo->ac_huff_tbl_ptrs[1],
581 bits_ac_chrominance, val_ac_chrominance);
587 guarantee_huff_tables (j_decompress_ptr dinfo)
589 if ((dinfo->dc_huff_tbl_ptrs[0] == NULL) &&
590 (dinfo->dc_huff_tbl_ptrs[1] == NULL) &&
591 (dinfo->ac_huff_tbl_ptrs[0] == NULL) &&
592 (dinfo->ac_huff_tbl_ptrs[1] == NULL)) {
593 GST_DEBUG ("Generating standard Huffman tables for this frame.");
594 std_huff_tables (dinfo);
599 gst_jpeg_dec_set_format (GstVideoDecoder * dec, GstVideoCodecState * state)
601 GstJpegDec *jpeg = GST_JPEG_DEC (dec);
603 if (dec->input_segment.format == GST_FORMAT_TIME)
604 gst_video_decoder_set_packetized (dec, TRUE);
606 gst_video_decoder_set_packetized (dec, FALSE);
608 if (jpeg->input_state)
609 gst_video_codec_state_unref (jpeg->input_state);
610 jpeg->input_state = gst_video_codec_state_ref (state);
618 hresamplecpy1 (guint8 * dest, const guint8 * src, guint len)
622 for (i = 0; i < len; ++i) {
623 /* equivalent to: dest[i] = src[i << 1] */
632 gst_jpeg_dec_free_buffers (GstJpegDec * dec)
636 for (i = 0; i < 16; i++) {
637 g_free (dec->idr_y[i]);
638 g_free (dec->idr_u[i]);
639 g_free (dec->idr_v[i]);
640 dec->idr_y[i] = NULL;
641 dec->idr_u[i] = NULL;
642 dec->idr_v[i] = NULL;
645 dec->idr_width_allocated = 0;
648 static inline gboolean
649 gst_jpeg_dec_ensure_buffers (GstJpegDec * dec, guint maxrowbytes)
653 if (G_LIKELY (dec->idr_width_allocated == maxrowbytes))
656 /* FIXME: maybe just alloc one or three blocks altogether? */
657 for (i = 0; i < 16; i++) {
658 dec->idr_y[i] = g_try_realloc (dec->idr_y[i], maxrowbytes);
659 dec->idr_u[i] = g_try_realloc (dec->idr_u[i], maxrowbytes);
660 dec->idr_v[i] = g_try_realloc (dec->idr_v[i], maxrowbytes);
662 if (G_UNLIKELY (!dec->idr_y[i] || !dec->idr_u[i] || !dec->idr_v[i])) {
663 GST_WARNING_OBJECT (dec, "out of memory, i=%d, bytes=%u", i, maxrowbytes);
668 dec->idr_width_allocated = maxrowbytes;
669 GST_LOG_OBJECT (dec, "allocated temp memory, %u bytes/row", maxrowbytes);
674 gst_jpeg_dec_decode_grayscale (GstJpegDec * dec, GstVideoFrame * frame)
677 guchar **scanarray[1] = { rows };
682 gint pstride, rstride;
684 GST_DEBUG_OBJECT (dec, "indirect decoding of grayscale");
686 width = GST_VIDEO_FRAME_WIDTH (frame);
687 height = GST_VIDEO_FRAME_HEIGHT (frame);
689 if (G_UNLIKELY (!gst_jpeg_dec_ensure_buffers (dec, GST_ROUND_UP_32 (width))))
692 base[0] = GST_VIDEO_FRAME_COMP_DATA (frame, 0);
693 pstride = GST_VIDEO_FRAME_COMP_PSTRIDE (frame, 0);
694 rstride = GST_VIDEO_FRAME_COMP_STRIDE (frame, 0);
696 memcpy (rows, dec->idr_y, 16 * sizeof (gpointer));
700 lines = jpeg_read_raw_data (&dec->cinfo, scanarray, DCTSIZE);
701 if (G_LIKELY (lines > 0)) {
702 for (j = 0; (j < DCTSIZE) && (i < height); j++, i++) {
706 for (k = 0; k < width; k++) {
707 base[0][p] = rows[j][k];
713 GST_INFO_OBJECT (dec, "jpeg_read_raw_data() returned 0");
719 gst_jpeg_dec_decode_rgb (GstJpegDec * dec, GstVideoFrame * frame)
721 guchar *r_rows[16], *g_rows[16], *b_rows[16];
722 guchar **scanarray[3] = { r_rows, g_rows, b_rows };
726 guint pstride, rstride;
729 GST_DEBUG_OBJECT (dec, "indirect decoding of RGB");
731 width = GST_VIDEO_FRAME_WIDTH (frame);
732 height = GST_VIDEO_FRAME_HEIGHT (frame);
734 if (G_UNLIKELY (!gst_jpeg_dec_ensure_buffers (dec, GST_ROUND_UP_32 (width))))
737 for (i = 0; i < 3; i++)
738 base[i] = GST_VIDEO_FRAME_COMP_DATA (frame, i);
740 pstride = GST_VIDEO_FRAME_COMP_PSTRIDE (frame, 0);
741 rstride = GST_VIDEO_FRAME_COMP_STRIDE (frame, 0);
743 memcpy (r_rows, dec->idr_y, 16 * sizeof (gpointer));
744 memcpy (g_rows, dec->idr_u, 16 * sizeof (gpointer));
745 memcpy (b_rows, dec->idr_v, 16 * sizeof (gpointer));
749 lines = jpeg_read_raw_data (&dec->cinfo, scanarray, DCTSIZE);
750 if (G_LIKELY (lines > 0)) {
751 for (j = 0; (j < DCTSIZE) && (i < height); j++, i++) {
755 for (k = 0; k < width; k++) {
756 base[0][p] = r_rows[j][k];
757 base[1][p] = g_rows[j][k];
758 base[2][p] = b_rows[j][k];
766 GST_INFO_OBJECT (dec, "jpeg_read_raw_data() returned 0");
772 gst_jpeg_dec_decode_indirect (GstJpegDec * dec, GstVideoFrame * frame, gint r_v,
775 guchar *y_rows[16], *u_rows[16], *v_rows[16];
776 guchar **scanarray[3] = { y_rows, u_rows, v_rows };
779 guchar *base[3], *last[3];
783 GST_DEBUG_OBJECT (dec,
784 "unadvantageous width or r_h, taking slow route involving memcpy");
786 width = GST_VIDEO_FRAME_WIDTH (frame);
787 height = GST_VIDEO_FRAME_HEIGHT (frame);
789 if (G_UNLIKELY (!gst_jpeg_dec_ensure_buffers (dec, GST_ROUND_UP_32 (width))))
792 for (i = 0; i < 3; i++) {
793 base[i] = GST_VIDEO_FRAME_COMP_DATA (frame, i);
794 stride[i] = GST_VIDEO_FRAME_COMP_STRIDE (frame, i);
795 /* make sure we don't make jpeglib write beyond our buffer,
796 * which might happen if (height % (r_v*DCTSIZE)) != 0 */
797 last[i] = base[i] + (GST_VIDEO_FRAME_COMP_STRIDE (frame, i) *
798 (GST_VIDEO_FRAME_COMP_HEIGHT (frame, i) - 1));
801 memcpy (y_rows, dec->idr_y, 16 * sizeof (gpointer));
802 memcpy (u_rows, dec->idr_u, 16 * sizeof (gpointer));
803 memcpy (v_rows, dec->idr_v, 16 * sizeof (gpointer));
805 /* fill chroma components for grayscale */
807 GST_DEBUG_OBJECT (dec, "grayscale, filling chroma");
808 for (i = 0; i < 16; i++) {
809 memset (u_rows[i], GST_ROUND_UP_32 (width), 0x80);
810 memset (v_rows[i], GST_ROUND_UP_32 (width), 0x80);
814 for (i = 0; i < height; i += r_v * DCTSIZE) {
815 lines = jpeg_read_raw_data (&dec->cinfo, scanarray, r_v * DCTSIZE);
816 if (G_LIKELY (lines > 0)) {
817 for (j = 0, k = 0; j < (r_v * DCTSIZE); j += r_v, k++) {
818 if (G_LIKELY (base[0] <= last[0])) {
819 memcpy (base[0], y_rows[j], stride[0]);
820 base[0] += stride[0];
823 if (G_LIKELY (base[0] <= last[0])) {
824 memcpy (base[0], y_rows[j + 1], stride[0]);
825 base[0] += stride[0];
828 if (G_LIKELY (base[1] <= last[1] && base[2] <= last[2])) {
830 memcpy (base[1], u_rows[k], stride[1]);
831 memcpy (base[2], v_rows[k], stride[2]);
832 } else if (r_h == 1) {
833 hresamplecpy1 (base[1], u_rows[k], stride[1]);
834 hresamplecpy1 (base[2], v_rows[k], stride[2]);
836 /* FIXME: implement (at least we avoid crashing by doing nothing) */
840 if (r_v == 2 || (k & 1) != 0) {
841 base[1] += stride[1];
842 base[2] += stride[2];
846 GST_INFO_OBJECT (dec, "jpeg_read_raw_data() returned 0");
852 gst_jpeg_dec_decode_direct (GstJpegDec * dec, GstVideoFrame * frame)
854 guchar **line[3]; /* the jpeg line buffer */
855 guchar *y[4 * DCTSIZE] = { NULL, }; /* alloc enough for the lines */
856 guchar *u[4 * DCTSIZE] = { NULL, }; /* r_v will be <4 */
857 guchar *v[4 * DCTSIZE] = { NULL, };
859 gint lines, v_samp[3];
860 guchar *base[3], *last[3];
868 v_samp[0] = dec->cinfo.comp_info[0].v_samp_factor;
869 v_samp[1] = dec->cinfo.comp_info[1].v_samp_factor;
870 v_samp[2] = dec->cinfo.comp_info[2].v_samp_factor;
872 if (G_UNLIKELY (v_samp[0] > 2 || v_samp[1] > 2 || v_samp[2] > 2))
873 goto format_not_supported;
875 height = GST_VIDEO_FRAME_HEIGHT (frame);
877 for (i = 0; i < 3; i++) {
878 base[i] = GST_VIDEO_FRAME_COMP_DATA (frame, i);
879 stride[i] = GST_VIDEO_FRAME_COMP_STRIDE (frame, i);
880 /* make sure we don't make jpeglib write beyond our buffer,
881 * which might happen if (height % (r_v*DCTSIZE)) != 0 */
882 last[i] = base[i] + (GST_VIDEO_FRAME_COMP_STRIDE (frame, i) *
883 (GST_VIDEO_FRAME_COMP_HEIGHT (frame, i) - 1));
886 /* let jpeglib decode directly into our final buffer */
887 GST_DEBUG_OBJECT (dec, "decoding directly into output buffer");
889 for (i = 0; i < height; i += v_samp[0] * DCTSIZE) {
890 for (j = 0; j < (v_samp[0] * DCTSIZE); ++j) {
892 line[0][j] = base[0] + (i + j) * stride[0];
893 if (G_UNLIKELY (line[0][j] > last[0]))
894 line[0][j] = last[0];
896 if (v_samp[1] == v_samp[0]) {
897 line[1][j] = base[1] + ((i + j) / 2) * stride[1];
898 } else if (j < (v_samp[1] * DCTSIZE)) {
899 line[1][j] = base[1] + ((i / 2) + j) * stride[1];
901 if (G_UNLIKELY (line[1][j] > last[1]))
902 line[1][j] = last[1];
904 if (v_samp[2] == v_samp[0]) {
905 line[2][j] = base[2] + ((i + j) / 2) * stride[2];
906 } else if (j < (v_samp[2] * DCTSIZE)) {
907 line[2][j] = base[2] + ((i / 2) + j) * stride[2];
909 if (G_UNLIKELY (line[2][j] > last[2]))
910 line[2][j] = last[2];
913 lines = jpeg_read_raw_data (&dec->cinfo, line, v_samp[0] * DCTSIZE);
914 if (G_UNLIKELY (!lines)) {
915 GST_INFO_OBJECT (dec, "jpeg_read_raw_data() returned 0");
920 format_not_supported:
922 gboolean ret = GST_FLOW_OK;
924 GST_VIDEO_DECODER_ERROR (dec, 1, STREAM, DECODE,
925 (_("Failed to decode JPEG image")),
926 ("Unsupported subsampling schema: v_samp factors: %u %u %u", v_samp[0],
927 v_samp[1], v_samp[2]), ret);
934 gst_jpeg_dec_negotiate (GstJpegDec * dec, gint width, gint height, gint clrspc)
936 GstVideoCodecState *outstate;
938 GstVideoFormat format;
942 format = GST_VIDEO_FORMAT_RGB;
945 format = GST_VIDEO_FORMAT_GRAY8;
948 format = GST_VIDEO_FORMAT_I420;
952 /* Compare to currently configured output state */
953 outstate = gst_video_decoder_get_output_state (GST_VIDEO_DECODER (dec));
955 info = &outstate->info;
957 if (width == GST_VIDEO_INFO_WIDTH (info) &&
958 height == GST_VIDEO_INFO_HEIGHT (info) &&
959 format == GST_VIDEO_INFO_FORMAT (info)) {
960 gst_video_codec_state_unref (outstate);
963 gst_video_codec_state_unref (outstate);
967 gst_video_decoder_set_output_state (GST_VIDEO_DECODER (dec), format,
968 width, height, dec->input_state);
975 outstate->info.colorimetry.range = GST_VIDEO_COLOR_RANGE_0_255;
976 outstate->info.colorimetry.matrix = GST_VIDEO_COLOR_MATRIX_BT601;
977 outstate->info.colorimetry.transfer = GST_VIDEO_TRANSFER_UNKNOWN;
978 outstate->info.colorimetry.primaries = GST_VIDEO_COLOR_PRIMARIES_UNKNOWN;
982 gst_video_codec_state_unref (outstate);
984 gst_video_decoder_negotiate (GST_VIDEO_DECODER (dec));
986 GST_DEBUG_OBJECT (dec, "max_v_samp_factor=%d", dec->cinfo.max_v_samp_factor);
987 GST_DEBUG_OBJECT (dec, "max_h_samp_factor=%d", dec->cinfo.max_h_samp_factor);
991 gst_jpeg_dec_handle_frame (GstVideoDecoder * bdec, GstVideoCodecFrame * frame)
993 GstFlowReturn ret = GST_FLOW_OK;
994 GstJpegDec *dec = (GstJpegDec *) bdec;
995 GstVideoFrame vframe;
999 gboolean need_unmap = TRUE;
1000 GstVideoCodecState *state = NULL;
1001 gboolean release_frame = TRUE;
1003 dec->current_frame = frame;
1004 gst_buffer_map (frame->input_buffer, &dec->current_frame_map, GST_MAP_READ);
1005 gst_jpeg_dec_fill_input_buffer (&dec->cinfo);
1007 if (setjmp (dec->jerr.setjmp_buffer)) {
1008 code = dec->jerr.pub.msg_code;
1010 if (code == JERR_INPUT_EOF) {
1011 GST_DEBUG ("jpeg input EOF error, we probably need more data");
1012 goto need_more_data;
1018 hdr_ok = jpeg_read_header (&dec->cinfo, TRUE);
1019 if (G_UNLIKELY (hdr_ok != JPEG_HEADER_OK)) {
1020 GST_WARNING_OBJECT (dec, "reading the header failed, %d", hdr_ok);
1023 GST_LOG_OBJECT (dec, "num_components=%d", dec->cinfo.num_components);
1024 GST_LOG_OBJECT (dec, "jpeg_color_space=%d", dec->cinfo.jpeg_color_space);
1026 if (!dec->cinfo.num_components || !dec->cinfo.comp_info)
1027 goto components_not_supported;
1029 r_h = dec->cinfo.comp_info[0].h_samp_factor;
1030 r_v = dec->cinfo.comp_info[0].v_samp_factor;
1032 GST_LOG_OBJECT (dec, "r_h = %d, r_v = %d", r_h, r_v);
1034 if (dec->cinfo.num_components > 3)
1035 goto components_not_supported;
1037 /* verify color space expectation to avoid going *boom* or bogus output */
1038 if (dec->cinfo.jpeg_color_space != JCS_YCbCr &&
1039 dec->cinfo.jpeg_color_space != JCS_GRAYSCALE &&
1040 dec->cinfo.jpeg_color_space != JCS_RGB)
1041 goto unsupported_colorspace;
1043 #ifndef GST_DISABLE_GST_DEBUG
1047 for (i = 0; i < dec->cinfo.num_components; ++i) {
1048 GST_LOG_OBJECT (dec, "[%d] h_samp_factor=%d, v_samp_factor=%d, cid=%d",
1049 i, dec->cinfo.comp_info[i].h_samp_factor,
1050 dec->cinfo.comp_info[i].v_samp_factor,
1051 dec->cinfo.comp_info[i].component_id);
1056 /* prepare for raw output */
1057 dec->cinfo.do_fancy_upsampling = FALSE;
1058 dec->cinfo.do_block_smoothing = FALSE;
1059 dec->cinfo.out_color_space = dec->cinfo.jpeg_color_space;
1060 dec->cinfo.dct_method = dec->idct_method;
1061 dec->cinfo.raw_data_out = TRUE;
1063 GST_LOG_OBJECT (dec, "starting decompress");
1064 guarantee_huff_tables (&dec->cinfo);
1065 if (!jpeg_start_decompress (&dec->cinfo)) {
1066 GST_WARNING_OBJECT (dec, "failed to start decompression cycle");
1069 /* sanity checks to get safe and reasonable output */
1070 switch (dec->cinfo.jpeg_color_space) {
1072 if (dec->cinfo.num_components != 1)
1073 goto invalid_yuvrgbgrayscale;
1076 if (dec->cinfo.num_components != 3 || dec->cinfo.max_v_samp_factor > 1 ||
1077 dec->cinfo.max_h_samp_factor > 1)
1078 goto invalid_yuvrgbgrayscale;
1081 if (dec->cinfo.num_components != 3 ||
1082 r_v > 2 || r_v < dec->cinfo.comp_info[0].v_samp_factor ||
1083 r_v < dec->cinfo.comp_info[1].v_samp_factor ||
1084 r_h < dec->cinfo.comp_info[0].h_samp_factor ||
1085 r_h < dec->cinfo.comp_info[1].h_samp_factor)
1086 goto invalid_yuvrgbgrayscale;
1089 g_assert_not_reached ();
1093 width = dec->cinfo.output_width;
1094 height = dec->cinfo.output_height;
1096 if (G_UNLIKELY (width < MIN_WIDTH || width > MAX_WIDTH ||
1097 height < MIN_HEIGHT || height > MAX_HEIGHT))
1100 gst_jpeg_dec_negotiate (dec, width, height, dec->cinfo.jpeg_color_space);
1102 state = gst_video_decoder_get_output_state (bdec);
1103 ret = gst_video_decoder_allocate_output_frame (bdec, frame);
1104 if (G_UNLIKELY (ret != GST_FLOW_OK))
1107 if (!gst_video_frame_map (&vframe, &state->info, frame->output_buffer,
1111 GST_LOG_OBJECT (dec, "width %d, height %d", width, height);
1113 if (dec->cinfo.jpeg_color_space == JCS_RGB) {
1114 gst_jpeg_dec_decode_rgb (dec, &vframe);
1115 } else if (dec->cinfo.jpeg_color_space == JCS_GRAYSCALE) {
1116 gst_jpeg_dec_decode_grayscale (dec, &vframe);
1118 GST_LOG_OBJECT (dec, "decompressing (reqired scanline buffer height = %u)",
1119 dec->cinfo.rec_outbuf_height);
1121 /* For some widths jpeglib requires more horizontal padding than I420
1122 * provides. In those cases we need to decode into separate buffers and then
1123 * copy over the data into our final picture buffer, otherwise jpeglib might
1124 * write over the end of a line into the beginning of the next line,
1125 * resulting in blocky artifacts on the left side of the picture. */
1126 if (G_UNLIKELY (width % (dec->cinfo.max_h_samp_factor * DCTSIZE) != 0
1127 || dec->cinfo.comp_info[0].h_samp_factor != 2
1128 || dec->cinfo.comp_info[1].h_samp_factor != 1
1129 || dec->cinfo.comp_info[2].h_samp_factor != 1)) {
1130 GST_CAT_LOG_OBJECT (GST_CAT_PERFORMANCE, dec,
1131 "indirect decoding using extra buffer copy");
1132 gst_jpeg_dec_decode_indirect (dec, &vframe, r_v, r_h,
1133 dec->cinfo.num_components);
1135 ret = gst_jpeg_dec_decode_direct (dec, &vframe);
1137 if (G_UNLIKELY (ret != GST_FLOW_OK))
1138 goto decode_direct_failed;
1142 gst_video_frame_unmap (&vframe);
1144 GST_LOG_OBJECT (dec, "decompressing finished");
1145 jpeg_finish_decompress (&dec->cinfo);
1147 gst_buffer_unmap (frame->input_buffer, &dec->current_frame_map);
1148 ret = gst_video_decoder_finish_frame (bdec, frame);
1149 release_frame = FALSE;
1157 gst_buffer_unmap (frame->input_buffer, &dec->current_frame_map);
1160 gst_video_decoder_release_frame (bdec, frame);
1163 gst_video_codec_state_unref (state);
1170 GST_LOG_OBJECT (dec, "we need more data");
1177 GST_VIDEO_DECODER_ERROR (dec, 1, STREAM, DECODE,
1178 (_("Failed to decode JPEG image")),
1179 ("Picture is too small or too big (%ux%u)", width, height), ret);
1180 ret = GST_FLOW_ERROR;
1185 gchar err_msg[JMSG_LENGTH_MAX];
1187 dec->jerr.pub.format_message ((j_common_ptr) (&dec->cinfo), err_msg);
1189 GST_VIDEO_DECODER_ERROR (dec, 1, STREAM, DECODE,
1190 (_("Failed to decode JPEG image")), ("Decode error #%u: %s", code,
1193 gst_buffer_unmap (frame->input_buffer, &dec->current_frame_map);
1194 gst_video_decoder_drop_frame (bdec, frame);
1195 release_frame = FALSE;
1197 jpeg_abort_decompress (&dec->cinfo);
1201 decode_direct_failed:
1203 /* already posted an error message */
1204 jpeg_abort_decompress (&dec->cinfo);
1209 const gchar *reason;
1211 reason = gst_flow_get_name (ret);
1213 GST_DEBUG_OBJECT (dec, "failed to alloc buffer, reason %s", reason);
1214 /* Reset for next time */
1215 jpeg_abort_decompress (&dec->cinfo);
1216 if (ret != GST_FLOW_EOS && ret != GST_FLOW_FLUSHING &&
1217 ret != GST_FLOW_NOT_LINKED) {
1218 GST_VIDEO_DECODER_ERROR (dec, 1, STREAM, DECODE,
1219 (_("Failed to decode JPEG image")),
1220 ("Buffer allocation failed, reason: %s", reason), ret);
1221 jpeg_abort_decompress (&dec->cinfo);
1225 components_not_supported:
1227 GST_VIDEO_DECODER_ERROR (dec, 1, STREAM, DECODE,
1228 (_("Failed to decode JPEG image")),
1229 ("number of components not supported: %d (max 3)",
1230 dec->cinfo.num_components), ret);
1231 jpeg_abort_decompress (&dec->cinfo);
1234 unsupported_colorspace:
1236 GST_VIDEO_DECODER_ERROR (dec, 1, STREAM, DECODE,
1237 (_("Failed to decode JPEG image")),
1238 ("Picture has unknown or unsupported colourspace"), ret);
1239 jpeg_abort_decompress (&dec->cinfo);
1242 invalid_yuvrgbgrayscale:
1244 GST_VIDEO_DECODER_ERROR (dec, 1, STREAM, DECODE,
1245 (_("Failed to decode JPEG image")),
1246 ("Picture is corrupt or unhandled YUV/RGB/grayscale layout"), ret);
1247 jpeg_abort_decompress (&dec->cinfo);
1253 gst_jpeg_dec_decide_allocation (GstVideoDecoder * bdec, GstQuery * query)
1255 GstBufferPool *pool = NULL;
1256 GstStructure *config;
1258 if (!GST_VIDEO_DECODER_CLASS (parent_class)->decide_allocation (bdec, query))
1261 if (gst_query_get_n_allocation_pools (query) > 0)
1262 gst_query_parse_nth_allocation_pool (query, 0, &pool, NULL, NULL, NULL);
1267 config = gst_buffer_pool_get_config (pool);
1268 if (gst_query_find_allocation_meta (query, GST_VIDEO_META_API_TYPE, NULL)) {
1269 gst_buffer_pool_config_add_option (config,
1270 GST_BUFFER_POOL_OPTION_VIDEO_META);
1272 gst_buffer_pool_set_config (pool, config);
1273 gst_object_unref (pool);
1279 gst_jpeg_dec_start (GstVideoDecoder * bdec)
1281 GstJpegDec *dec = (GstJpegDec *) bdec;
1283 dec->saw_header = FALSE;
1284 dec->parse_entropy_len = 0;
1285 dec->parse_resync = FALSE;
1287 gst_video_decoder_set_packetized (bdec, FALSE);
1293 gst_jpeg_dec_flush (GstVideoDecoder * bdec)
1295 GstJpegDec *dec = (GstJpegDec *) bdec;
1297 jpeg_abort_decompress (&dec->cinfo);
1298 dec->parse_entropy_len = 0;
1299 dec->parse_resync = FALSE;
1300 dec->saw_header = FALSE;
1306 gst_jpeg_dec_set_property (GObject * object, guint prop_id,
1307 const GValue * value, GParamSpec * pspec)
1311 dec = GST_JPEG_DEC (object);
1314 case PROP_IDCT_METHOD:
1315 dec->idct_method = g_value_get_enum (value);
1317 #ifndef GST_REMOVE_DEPRECATED
1318 case PROP_MAX_ERRORS:
1319 g_atomic_int_set (&dec->max_errors, g_value_get_int (value));
1323 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
1329 gst_jpeg_dec_get_property (GObject * object, guint prop_id, GValue * value,
1334 dec = GST_JPEG_DEC (object);
1337 case PROP_IDCT_METHOD:
1338 g_value_set_enum (value, dec->idct_method);
1340 #ifndef GST_REMOVE_DEPRECATED
1341 case PROP_MAX_ERRORS:
1342 g_value_set_int (value, g_atomic_int_get (&dec->max_errors));
1346 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
1352 gst_jpeg_dec_stop (GstVideoDecoder * bdec)
1354 GstJpegDec *dec = (GstJpegDec *) bdec;
1356 gst_jpeg_dec_free_buffers (dec);