2 * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
3 * Copyright (C) <2009> Tim-Philipp Müller <tim centricular net>
4 * Copyright (C) 2012 Collabora Ltd.
5 * Author : Edward Hervey <edward@collabora.com>
7 * This library is free software; you can redistribute it and/or
8 * modify it under the terms of the GNU Library General Public
9 * License as published by the Free Software Foundation; either
10 * version 2 of the License, or (at your option) any later version.
12 * This library is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 * Library General Public License for more details.
17 * You should have received a copy of the GNU Library General Public
18 * License along with this library; if not, write to the
19 * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
20 * Boston, MA 02110-1301, USA.
24 * SECTION:element-jpegdec
26 * Decodes jpeg images.
29 * <title>Example launch line</title>
31 * gst-launch-1.0 -v filesrc location=mjpeg.avi ! avidemux ! queue ! jpegdec ! videoconvert ! videoscale ! autovideosink
32 * ]| The above pipeline decode the mjpeg stream and renders it to the screen.
41 #include "gstjpegdec.h"
43 #include <gst/video/video.h>
44 #include <gst/video/gstvideometa.h>
45 #include <gst/video/gstvideopool.h>
46 #include "gst/gst-i18n-plugin.h"
50 #define MAX_WIDTH 65535
52 #define MAX_HEIGHT 65535
54 #define CINFO_GET_JPEGDEC(cinfo_ptr) \
55 (((struct GstJpegDecSourceMgr*)((cinfo_ptr)->src))->dec)
57 #define JPEG_DEFAULT_IDCT_METHOD JDCT_FASTEST
58 #define JPEG_DEFAULT_MAX_ERRORS 0
68 static GstStaticPadTemplate gst_jpeg_dec_src_pad_template =
69 GST_STATIC_PAD_TEMPLATE ("src",
72 GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE
73 ("{ I420, RGB, BGR, RGBx, xRGB, BGRx, xBGR, GRAY8 }"))
77 /* FIXME: sof-marker is for IJG libjpeg 8, should be different for 6.2 */
78 /* FIXME: add back "sof-marker = (int) { 0, 1, 2, 5, 6, 7, 9, 10, 13, 14 }"
79 * once we have a parser and/or demuxer set caps properly */
80 static GstStaticPadTemplate gst_jpeg_dec_sink_pad_template =
81 GST_STATIC_PAD_TEMPLATE ("sink",
84 GST_STATIC_CAPS ("image/jpeg")
87 GST_DEBUG_CATEGORY_STATIC (jpeg_dec_debug);
88 #define GST_CAT_DEFAULT jpeg_dec_debug
89 GST_DEBUG_CATEGORY_STATIC (GST_CAT_PERFORMANCE);
91 static void gst_jpeg_dec_set_property (GObject * object, guint prop_id,
92 const GValue * value, GParamSpec * pspec);
93 static void gst_jpeg_dec_get_property (GObject * object, guint prop_id,
94 GValue * value, GParamSpec * pspec);
96 static gboolean gst_jpeg_dec_set_format (GstVideoDecoder * dec,
97 GstVideoCodecState * state);
98 static gboolean gst_jpeg_dec_start (GstVideoDecoder * bdec);
99 static gboolean gst_jpeg_dec_stop (GstVideoDecoder * bdec);
100 static gboolean gst_jpeg_dec_flush (GstVideoDecoder * bdec);
101 static GstFlowReturn gst_jpeg_dec_parse (GstVideoDecoder * bdec,
102 GstVideoCodecFrame * frame, GstAdapter * adapter, gboolean at_eos);
103 static GstFlowReturn gst_jpeg_dec_handle_frame (GstVideoDecoder * bdec,
104 GstVideoCodecFrame * frame);
105 static gboolean gst_jpeg_dec_decide_allocation (GstVideoDecoder * bdec,
108 #define gst_jpeg_dec_parent_class parent_class
109 G_DEFINE_TYPE (GstJpegDec, gst_jpeg_dec, GST_TYPE_VIDEO_DECODER);
112 gst_jpeg_dec_finalize (GObject * object)
114 GstJpegDec *dec = GST_JPEG_DEC (object);
116 jpeg_destroy_decompress (&dec->cinfo);
117 if (dec->input_state)
118 gst_video_codec_state_unref (dec->input_state);
120 G_OBJECT_CLASS (parent_class)->finalize (object);
124 gst_jpeg_dec_class_init (GstJpegDecClass * klass)
126 GObjectClass *gobject_class;
127 GstElementClass *element_class;
128 GstVideoDecoderClass *vdec_class;
130 gobject_class = (GObjectClass *) klass;
131 element_class = (GstElementClass *) klass;
132 vdec_class = (GstVideoDecoderClass *) klass;
134 parent_class = g_type_class_peek_parent (klass);
136 gobject_class->finalize = gst_jpeg_dec_finalize;
137 gobject_class->set_property = gst_jpeg_dec_set_property;
138 gobject_class->get_property = gst_jpeg_dec_get_property;
140 g_object_class_install_property (gobject_class, PROP_IDCT_METHOD,
141 g_param_spec_enum ("idct-method", "IDCT Method",
142 "The IDCT algorithm to use", GST_TYPE_IDCT_METHOD,
143 JPEG_DEFAULT_IDCT_METHOD,
144 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
147 * GstJpegDec:max-errors:
149 * Error out after receiving N consecutive decoding errors
150 * (-1 = never error out, 0 = automatic, 1 = fail on first error, etc.)
152 g_object_class_install_property (gobject_class, PROP_MAX_ERRORS,
153 g_param_spec_int ("max-errors", "Maximum Consecutive Decoding Errors",
154 "Error out after receiving N consecutive decoding errors "
155 "(-1 = never fail, 0 = automatic, 1 = fail on first error)",
156 -1, G_MAXINT, JPEG_DEFAULT_MAX_ERRORS,
157 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
159 gst_element_class_add_pad_template (element_class,
160 gst_static_pad_template_get (&gst_jpeg_dec_src_pad_template));
161 gst_element_class_add_pad_template (element_class,
162 gst_static_pad_template_get (&gst_jpeg_dec_sink_pad_template));
163 gst_element_class_set_static_metadata (element_class, "JPEG image decoder",
164 "Codec/Decoder/Image",
165 "Decode images from JPEG format", "Wim Taymans <wim@fluendo.com>");
167 vdec_class->start = gst_jpeg_dec_start;
168 vdec_class->stop = gst_jpeg_dec_stop;
169 vdec_class->flush = gst_jpeg_dec_flush;
170 vdec_class->parse = gst_jpeg_dec_parse;
171 vdec_class->set_format = gst_jpeg_dec_set_format;
172 vdec_class->handle_frame = gst_jpeg_dec_handle_frame;
173 vdec_class->decide_allocation = gst_jpeg_dec_decide_allocation;
175 GST_DEBUG_CATEGORY_INIT (jpeg_dec_debug, "jpegdec", 0, "JPEG decoder");
176 GST_DEBUG_CATEGORY_GET (GST_CAT_PERFORMANCE, "GST_PERFORMANCE");
180 gst_jpeg_dec_fill_input_buffer (j_decompress_ptr cinfo)
184 dec = CINFO_GET_JPEGDEC (cinfo);
185 g_return_val_if_fail (dec != NULL, FALSE);
186 g_return_val_if_fail (dec->current_frame != NULL, FALSE);
187 g_return_val_if_fail (dec->current_frame_map.data != NULL, FALSE);
189 cinfo->src->next_input_byte = dec->current_frame_map.data;
190 cinfo->src->bytes_in_buffer = dec->current_frame_map.size;
196 gst_jpeg_dec_init_source (j_decompress_ptr cinfo)
198 GST_LOG_OBJECT (CINFO_GET_JPEGDEC (cinfo), "init_source");
203 gst_jpeg_dec_skip_input_data (j_decompress_ptr cinfo, glong num_bytes)
205 GstJpegDec *dec = CINFO_GET_JPEGDEC (cinfo);
207 GST_DEBUG_OBJECT (dec, "skip %ld bytes", num_bytes);
209 if (num_bytes > 0 && cinfo->src->bytes_in_buffer >= num_bytes) {
210 cinfo->src->next_input_byte += (size_t) num_bytes;
211 cinfo->src->bytes_in_buffer -= (size_t) num_bytes;
216 gst_jpeg_dec_resync_to_restart (j_decompress_ptr cinfo, gint desired)
218 GST_LOG_OBJECT (CINFO_GET_JPEGDEC (cinfo), "resync_to_start");
223 gst_jpeg_dec_term_source (j_decompress_ptr cinfo)
225 GST_LOG_OBJECT (CINFO_GET_JPEGDEC (cinfo), "term_source");
230 gst_jpeg_dec_my_output_message (j_common_ptr cinfo)
232 return; /* do nothing */
236 gst_jpeg_dec_my_emit_message (j_common_ptr cinfo, int msg_level)
238 /* GST_LOG_OBJECT (CINFO_GET_JPEGDEC (&cinfo), "msg_level=%d", msg_level); */
243 gst_jpeg_dec_my_error_exit (j_common_ptr cinfo)
245 struct GstJpegDecErrorMgr *err_mgr = (struct GstJpegDecErrorMgr *) cinfo->err;
247 (*cinfo->err->output_message) (cinfo);
248 longjmp (err_mgr->setjmp_buffer, 1);
252 gst_jpeg_dec_init (GstJpegDec * dec)
254 GST_DEBUG ("initializing");
257 memset (&dec->cinfo, 0, sizeof (dec->cinfo));
258 memset (&dec->jerr, 0, sizeof (dec->jerr));
259 dec->cinfo.err = jpeg_std_error (&dec->jerr.pub);
260 dec->jerr.pub.output_message = gst_jpeg_dec_my_output_message;
261 dec->jerr.pub.emit_message = gst_jpeg_dec_my_emit_message;
262 dec->jerr.pub.error_exit = gst_jpeg_dec_my_error_exit;
264 jpeg_create_decompress (&dec->cinfo);
266 dec->cinfo.src = (struct jpeg_source_mgr *) &dec->jsrc;
267 dec->cinfo.src->init_source = gst_jpeg_dec_init_source;
268 dec->cinfo.src->fill_input_buffer = gst_jpeg_dec_fill_input_buffer;
269 dec->cinfo.src->skip_input_data = gst_jpeg_dec_skip_input_data;
270 dec->cinfo.src->resync_to_restart = gst_jpeg_dec_resync_to_restart;
271 dec->cinfo.src->term_source = gst_jpeg_dec_term_source;
274 /* init properties */
275 dec->idct_method = JPEG_DEFAULT_IDCT_METHOD;
276 dec->max_errors = JPEG_DEFAULT_MAX_ERRORS;
279 static inline gboolean
280 gst_jpeg_dec_parse_tag_has_entropy_segment (guint8 tag)
282 if (tag == 0xda || (tag >= 0xd0 && tag <= 0xd7))
288 gst_jpeg_dec_parse (GstVideoDecoder * bdec, GstVideoCodecFrame * frame,
289 GstAdapter * adapter, gboolean at_eos)
294 gint offset = 0, noffset;
295 GstJpegDec *dec = (GstJpegDec *) bdec;
297 /* FIXME : The overhead of using scan_uint32 is massive */
299 size = gst_adapter_available (adapter);
300 GST_DEBUG ("Parsing jpeg image data (%u bytes)", size);
303 GST_DEBUG ("Flushing all data out");
306 /* If we have leftover data, throw it away */
307 if (!dec->saw_header)
309 goto have_full_frame;
315 if (!dec->saw_header) {
317 /* we expect at least 4 bytes, first of which start marker */
319 gst_adapter_masked_scan_uint32 (adapter, 0xffff0000, 0xffd80000, 0,
322 GST_DEBUG ("ret:%d", ret);
327 gst_adapter_flush (adapter, ret);
330 dec->saw_header = TRUE;
337 GST_DEBUG ("offset:%d, size:%d", offset, size);
340 gst_adapter_masked_scan_uint32_peek (adapter, 0x0000ff00, 0x0000ff00,
341 offset, size - offset, &value);
343 /* lost sync if 0xff marker not where expected */
344 if ((resync = (noffset != offset))) {
345 GST_DEBUG ("Lost sync at 0x%08x, resyncing", offset + 2);
347 /* may have marker, but could have been resyncng */
348 resync = resync || dec->parse_resync;
349 /* Skip over extra 0xff */
350 while ((noffset >= 0) && ((value & 0xff) == 0xff)) {
353 gst_adapter_masked_scan_uint32_peek (adapter, 0x0000ff00, 0x0000ff00,
354 noffset, size - noffset, &value);
356 /* enough bytes left for marker? (we need 0xNN after the 0xff) */
358 GST_DEBUG ("at end of input and no EOI marker found, need more data");
362 /* now lock on the marker we found */
364 value = value & 0xff;
366 GST_DEBUG ("0x%08x: EOI marker", offset + 2);
367 /* clear parse state */
368 dec->saw_header = FALSE;
369 dec->parse_resync = FALSE;
371 goto have_full_frame;
374 /* Skip this frame if we found another SOI marker */
375 GST_DEBUG ("0x%08x: SOI marker before EOI, skipping", offset + 2);
376 dec->parse_resync = FALSE;
377 /* FIXME : Need to skip data */
379 goto have_full_frame;
383 if (value >= 0xd0 && value <= 0xd7)
386 /* peek tag and subsequent length */
387 if (offset + 2 + 4 > size)
390 gst_adapter_masked_scan_uint32_peek (adapter, 0x0, 0x0, offset + 2, 4,
392 frame_len = frame_len & 0xffff;
394 GST_DEBUG ("0x%08x: tag %02x, frame_len=%u", offset + 2, value, frame_len);
395 /* the frame length includes the 2 bytes for the length; here we want at
396 * least 2 more bytes at the end for an end marker */
397 if (offset + 2 + 2 + frame_len + 2 > size) {
401 if (gst_jpeg_dec_parse_tag_has_entropy_segment (value)) {
402 guint eseglen = dec->parse_entropy_len;
404 GST_DEBUG ("0x%08x: finding entropy segment length (eseglen:%d)",
405 offset + 2, eseglen);
406 if (size < offset + 2 + frame_len + eseglen)
408 noffset = offset + 2 + frame_len + dec->parse_entropy_len;
410 GST_DEBUG ("noffset:%d, size:%d, size - noffset:%d",
411 noffset, size, size - noffset);
412 noffset = gst_adapter_masked_scan_uint32_peek (adapter, 0x0000ff00,
413 0x0000ff00, noffset, size - noffset, &value);
416 dec->parse_entropy_len = size - offset - 4 - frame_len - 2;
419 if ((value & 0xff) != 0x00) {
420 eseglen = noffset - offset - frame_len - 2;
425 dec->parse_entropy_len = 0;
426 frame_len += eseglen;
427 GST_DEBUG ("entropy segment length=%u => frame_len=%u", eseglen,
431 /* check if we will still be in sync if we interpret
432 * this as a sync point and skip this frame */
433 noffset = offset + frame_len + 2;
434 noffset = gst_adapter_masked_scan_uint32 (adapter, 0x0000ff00, 0x0000ff00,
437 /* ignore and continue resyncing until we hit the end
438 * of our data or find a sync point that looks okay */
442 GST_DEBUG ("found sync at 0x%x", offset + 2);
445 /* Add current data to output buffer */
446 toadd += frame_len + 2;
447 offset += frame_len + 2;
452 gst_video_decoder_add_to_frame (bdec, toadd);
453 return GST_VIDEO_DECODER_FLOW_NEED_DATA;
457 gst_video_decoder_add_to_frame (bdec, toadd);
458 return gst_video_decoder_have_frame (bdec);
461 gst_adapter_flush (adapter, size);
466 /* shamelessly ripped from jpegutils.c in mjpegtools */
468 add_huff_table (j_decompress_ptr dinfo,
469 JHUFF_TBL ** htblptr, const UINT8 * bits, const UINT8 * val)
470 /* Define a Huffman table */
474 if (*htblptr == NULL)
475 *htblptr = jpeg_alloc_huff_table ((j_common_ptr) dinfo);
479 /* Copy the number-of-symbols-of-each-code-length counts */
480 memcpy ((*htblptr)->bits, bits, sizeof ((*htblptr)->bits));
482 /* Validate the counts. We do this here mainly so we can copy the right
483 * number of symbols from the val[] array, without risking marching off
484 * the end of memory. jchuff.c will do a more thorough test later.
487 for (len = 1; len <= 16; len++)
488 nsymbols += bits[len];
489 if (nsymbols < 1 || nsymbols > 256)
490 g_error ("jpegutils.c: add_huff_table failed badly. ");
492 memcpy ((*htblptr)->huffval, val, nsymbols * sizeof (UINT8));
498 std_huff_tables (j_decompress_ptr dinfo)
499 /* Set up the standard Huffman tables (cf. JPEG standard section K.3) */
500 /* IMPORTANT: these are only valid for 8-bit data precision! */
502 static const UINT8 bits_dc_luminance[17] =
503 { /* 0-base */ 0, 0, 1, 5, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0 };
504 static const UINT8 val_dc_luminance[] =
505 { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11 };
507 static const UINT8 bits_dc_chrominance[17] =
508 { /* 0-base */ 0, 0, 3, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0 };
509 static const UINT8 val_dc_chrominance[] =
510 { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11 };
512 static const UINT8 bits_ac_luminance[17] =
513 { /* 0-base */ 0, 0, 2, 1, 3, 3, 2, 4, 3, 5, 5, 4, 4, 0, 0, 1, 0x7d };
514 static const UINT8 val_ac_luminance[] =
515 { 0x01, 0x02, 0x03, 0x00, 0x04, 0x11, 0x05, 0x12,
516 0x21, 0x31, 0x41, 0x06, 0x13, 0x51, 0x61, 0x07,
517 0x22, 0x71, 0x14, 0x32, 0x81, 0x91, 0xa1, 0x08,
518 0x23, 0x42, 0xb1, 0xc1, 0x15, 0x52, 0xd1, 0xf0,
519 0x24, 0x33, 0x62, 0x72, 0x82, 0x09, 0x0a, 0x16,
520 0x17, 0x18, 0x19, 0x1a, 0x25, 0x26, 0x27, 0x28,
521 0x29, 0x2a, 0x34, 0x35, 0x36, 0x37, 0x38, 0x39,
522 0x3a, 0x43, 0x44, 0x45, 0x46, 0x47, 0x48, 0x49,
523 0x4a, 0x53, 0x54, 0x55, 0x56, 0x57, 0x58, 0x59,
524 0x5a, 0x63, 0x64, 0x65, 0x66, 0x67, 0x68, 0x69,
525 0x6a, 0x73, 0x74, 0x75, 0x76, 0x77, 0x78, 0x79,
526 0x7a, 0x83, 0x84, 0x85, 0x86, 0x87, 0x88, 0x89,
527 0x8a, 0x92, 0x93, 0x94, 0x95, 0x96, 0x97, 0x98,
528 0x99, 0x9a, 0xa2, 0xa3, 0xa4, 0xa5, 0xa6, 0xa7,
529 0xa8, 0xa9, 0xaa, 0xb2, 0xb3, 0xb4, 0xb5, 0xb6,
530 0xb7, 0xb8, 0xb9, 0xba, 0xc2, 0xc3, 0xc4, 0xc5,
531 0xc6, 0xc7, 0xc8, 0xc9, 0xca, 0xd2, 0xd3, 0xd4,
532 0xd5, 0xd6, 0xd7, 0xd8, 0xd9, 0xda, 0xe1, 0xe2,
533 0xe3, 0xe4, 0xe5, 0xe6, 0xe7, 0xe8, 0xe9, 0xea,
534 0xf1, 0xf2, 0xf3, 0xf4, 0xf5, 0xf6, 0xf7, 0xf8,
538 static const UINT8 bits_ac_chrominance[17] =
539 { /* 0-base */ 0, 0, 2, 1, 2, 4, 4, 3, 4, 7, 5, 4, 4, 0, 1, 2, 0x77 };
540 static const UINT8 val_ac_chrominance[] =
541 { 0x00, 0x01, 0x02, 0x03, 0x11, 0x04, 0x05, 0x21,
542 0x31, 0x06, 0x12, 0x41, 0x51, 0x07, 0x61, 0x71,
543 0x13, 0x22, 0x32, 0x81, 0x08, 0x14, 0x42, 0x91,
544 0xa1, 0xb1, 0xc1, 0x09, 0x23, 0x33, 0x52, 0xf0,
545 0x15, 0x62, 0x72, 0xd1, 0x0a, 0x16, 0x24, 0x34,
546 0xe1, 0x25, 0xf1, 0x17, 0x18, 0x19, 0x1a, 0x26,
547 0x27, 0x28, 0x29, 0x2a, 0x35, 0x36, 0x37, 0x38,
548 0x39, 0x3a, 0x43, 0x44, 0x45, 0x46, 0x47, 0x48,
549 0x49, 0x4a, 0x53, 0x54, 0x55, 0x56, 0x57, 0x58,
550 0x59, 0x5a, 0x63, 0x64, 0x65, 0x66, 0x67, 0x68,
551 0x69, 0x6a, 0x73, 0x74, 0x75, 0x76, 0x77, 0x78,
552 0x79, 0x7a, 0x82, 0x83, 0x84, 0x85, 0x86, 0x87,
553 0x88, 0x89, 0x8a, 0x92, 0x93, 0x94, 0x95, 0x96,
554 0x97, 0x98, 0x99, 0x9a, 0xa2, 0xa3, 0xa4, 0xa5,
555 0xa6, 0xa7, 0xa8, 0xa9, 0xaa, 0xb2, 0xb3, 0xb4,
556 0xb5, 0xb6, 0xb7, 0xb8, 0xb9, 0xba, 0xc2, 0xc3,
557 0xc4, 0xc5, 0xc6, 0xc7, 0xc8, 0xc9, 0xca, 0xd2,
558 0xd3, 0xd4, 0xd5, 0xd6, 0xd7, 0xd8, 0xd9, 0xda,
559 0xe2, 0xe3, 0xe4, 0xe5, 0xe6, 0xe7, 0xe8, 0xe9,
560 0xea, 0xf2, 0xf3, 0xf4, 0xf5, 0xf6, 0xf7, 0xf8,
564 add_huff_table (dinfo, &dinfo->dc_huff_tbl_ptrs[0],
565 bits_dc_luminance, val_dc_luminance);
566 add_huff_table (dinfo, &dinfo->ac_huff_tbl_ptrs[0],
567 bits_ac_luminance, val_ac_luminance);
568 add_huff_table (dinfo, &dinfo->dc_huff_tbl_ptrs[1],
569 bits_dc_chrominance, val_dc_chrominance);
570 add_huff_table (dinfo, &dinfo->ac_huff_tbl_ptrs[1],
571 bits_ac_chrominance, val_ac_chrominance);
577 guarantee_huff_tables (j_decompress_ptr dinfo)
579 if ((dinfo->dc_huff_tbl_ptrs[0] == NULL) &&
580 (dinfo->dc_huff_tbl_ptrs[1] == NULL) &&
581 (dinfo->ac_huff_tbl_ptrs[0] == NULL) &&
582 (dinfo->ac_huff_tbl_ptrs[1] == NULL)) {
583 GST_DEBUG ("Generating standard Huffman tables for this frame.");
584 std_huff_tables (dinfo);
589 gst_jpeg_dec_set_format (GstVideoDecoder * dec, GstVideoCodecState * state)
591 GstJpegDec *jpeg = GST_JPEG_DEC (dec);
592 GstVideoInfo *info = &state->info;
594 /* FIXME : previously jpegdec would handled input as packetized
595 * if the framerate was present. Here we consider it packetized if
596 * the fps is != 1/1 */
597 if (GST_VIDEO_INFO_FPS_N (info) != 1 && GST_VIDEO_INFO_FPS_D (info) != 1)
598 gst_video_decoder_set_packetized (dec, TRUE);
600 gst_video_decoder_set_packetized (dec, FALSE);
602 if (jpeg->input_state)
603 gst_video_codec_state_unref (jpeg->input_state);
604 jpeg->input_state = gst_video_codec_state_ref (state);
612 hresamplecpy1 (guint8 * dest, const guint8 * src, guint len)
616 for (i = 0; i < len; ++i) {
617 /* equivalent to: dest[i] = src[i << 1] */
626 gst_jpeg_dec_free_buffers (GstJpegDec * dec)
630 for (i = 0; i < 16; i++) {
631 g_free (dec->idr_y[i]);
632 g_free (dec->idr_u[i]);
633 g_free (dec->idr_v[i]);
634 dec->idr_y[i] = NULL;
635 dec->idr_u[i] = NULL;
636 dec->idr_v[i] = NULL;
639 dec->idr_width_allocated = 0;
642 static inline gboolean
643 gst_jpeg_dec_ensure_buffers (GstJpegDec * dec, guint maxrowbytes)
647 if (G_LIKELY (dec->idr_width_allocated == maxrowbytes))
650 /* FIXME: maybe just alloc one or three blocks altogether? */
651 for (i = 0; i < 16; i++) {
652 dec->idr_y[i] = g_try_realloc (dec->idr_y[i], maxrowbytes);
653 dec->idr_u[i] = g_try_realloc (dec->idr_u[i], maxrowbytes);
654 dec->idr_v[i] = g_try_realloc (dec->idr_v[i], maxrowbytes);
656 if (G_UNLIKELY (!dec->idr_y[i] || !dec->idr_u[i] || !dec->idr_v[i])) {
657 GST_WARNING_OBJECT (dec, "out of memory, i=%d, bytes=%u", i, maxrowbytes);
662 dec->idr_width_allocated = maxrowbytes;
663 GST_LOG_OBJECT (dec, "allocated temp memory, %u bytes/row", maxrowbytes);
668 gst_jpeg_dec_decode_grayscale (GstJpegDec * dec, GstVideoFrame * frame)
671 guchar **scanarray[1] = { rows };
676 gint pstride, rstride;
678 GST_DEBUG_OBJECT (dec, "indirect decoding of grayscale");
680 width = GST_VIDEO_FRAME_WIDTH (frame);
681 height = GST_VIDEO_FRAME_HEIGHT (frame);
683 if (G_UNLIKELY (!gst_jpeg_dec_ensure_buffers (dec, GST_ROUND_UP_32 (width))))
686 base[0] = GST_VIDEO_FRAME_COMP_DATA (frame, 0);
687 pstride = GST_VIDEO_FRAME_COMP_PSTRIDE (frame, 0);
688 rstride = GST_VIDEO_FRAME_COMP_STRIDE (frame, 0);
690 memcpy (rows, dec->idr_y, 16 * sizeof (gpointer));
694 lines = jpeg_read_raw_data (&dec->cinfo, scanarray, DCTSIZE);
695 if (G_LIKELY (lines > 0)) {
696 for (j = 0; (j < DCTSIZE) && (i < height); j++, i++) {
700 for (k = 0; k < width; k++) {
701 base[0][p] = rows[j][k];
707 GST_INFO_OBJECT (dec, "jpeg_read_raw_data() returned 0");
713 gst_jpeg_dec_decode_rgb (GstJpegDec * dec, GstVideoFrame * frame)
715 guchar *r_rows[16], *g_rows[16], *b_rows[16];
716 guchar **scanarray[3] = { r_rows, g_rows, b_rows };
720 guint pstride, rstride;
723 GST_DEBUG_OBJECT (dec, "indirect decoding of RGB");
725 width = GST_VIDEO_FRAME_WIDTH (frame);
726 height = GST_VIDEO_FRAME_HEIGHT (frame);
728 if (G_UNLIKELY (!gst_jpeg_dec_ensure_buffers (dec, GST_ROUND_UP_32 (width))))
731 for (i = 0; i < 3; i++)
732 base[i] = GST_VIDEO_FRAME_COMP_DATA (frame, i);
734 pstride = GST_VIDEO_FRAME_COMP_PSTRIDE (frame, 0);
735 rstride = GST_VIDEO_FRAME_COMP_STRIDE (frame, 0);
737 memcpy (r_rows, dec->idr_y, 16 * sizeof (gpointer));
738 memcpy (g_rows, dec->idr_u, 16 * sizeof (gpointer));
739 memcpy (b_rows, dec->idr_v, 16 * sizeof (gpointer));
743 lines = jpeg_read_raw_data (&dec->cinfo, scanarray, DCTSIZE);
744 if (G_LIKELY (lines > 0)) {
745 for (j = 0; (j < DCTSIZE) && (i < height); j++, i++) {
749 for (k = 0; k < width; k++) {
750 base[0][p] = r_rows[j][k];
751 base[1][p] = g_rows[j][k];
752 base[2][p] = b_rows[j][k];
760 GST_INFO_OBJECT (dec, "jpeg_read_raw_data() returned 0");
766 gst_jpeg_dec_decode_indirect (GstJpegDec * dec, GstVideoFrame * frame, gint r_v,
769 guchar *y_rows[16], *u_rows[16], *v_rows[16];
770 guchar **scanarray[3] = { y_rows, u_rows, v_rows };
773 guchar *base[3], *last[3];
777 GST_DEBUG_OBJECT (dec,
778 "unadvantageous width or r_h, taking slow route involving memcpy");
780 width = GST_VIDEO_FRAME_WIDTH (frame);
781 height = GST_VIDEO_FRAME_HEIGHT (frame);
783 if (G_UNLIKELY (!gst_jpeg_dec_ensure_buffers (dec, GST_ROUND_UP_32 (width))))
786 for (i = 0; i < 3; i++) {
787 base[i] = GST_VIDEO_FRAME_COMP_DATA (frame, i);
788 stride[i] = GST_VIDEO_FRAME_COMP_STRIDE (frame, i);
789 /* make sure we don't make jpeglib write beyond our buffer,
790 * which might happen if (height % (r_v*DCTSIZE)) != 0 */
791 last[i] = base[i] + (GST_VIDEO_FRAME_COMP_STRIDE (frame, i) *
792 (GST_VIDEO_FRAME_COMP_HEIGHT (frame, i) - 1));
795 memcpy (y_rows, dec->idr_y, 16 * sizeof (gpointer));
796 memcpy (u_rows, dec->idr_u, 16 * sizeof (gpointer));
797 memcpy (v_rows, dec->idr_v, 16 * sizeof (gpointer));
799 /* fill chroma components for grayscale */
801 GST_DEBUG_OBJECT (dec, "grayscale, filling chroma");
802 for (i = 0; i < 16; i++) {
803 memset (u_rows[i], GST_ROUND_UP_32 (width), 0x80);
804 memset (v_rows[i], GST_ROUND_UP_32 (width), 0x80);
808 for (i = 0; i < height; i += r_v * DCTSIZE) {
809 lines = jpeg_read_raw_data (&dec->cinfo, scanarray, r_v * DCTSIZE);
810 if (G_LIKELY (lines > 0)) {
811 for (j = 0, k = 0; j < (r_v * DCTSIZE); j += r_v, k++) {
812 if (G_LIKELY (base[0] <= last[0])) {
813 memcpy (base[0], y_rows[j], stride[0]);
814 base[0] += stride[0];
817 if (G_LIKELY (base[0] <= last[0])) {
818 memcpy (base[0], y_rows[j + 1], stride[0]);
819 base[0] += stride[0];
822 if (G_LIKELY (base[1] <= last[1] && base[2] <= last[2])) {
824 memcpy (base[1], u_rows[k], stride[1]);
825 memcpy (base[2], v_rows[k], stride[2]);
826 } else if (r_h == 1) {
827 hresamplecpy1 (base[1], u_rows[k], stride[1]);
828 hresamplecpy1 (base[2], v_rows[k], stride[2]);
830 /* FIXME: implement (at least we avoid crashing by doing nothing) */
834 if (r_v == 2 || (k & 1) != 0) {
835 base[1] += stride[1];
836 base[2] += stride[2];
840 GST_INFO_OBJECT (dec, "jpeg_read_raw_data() returned 0");
846 gst_jpeg_dec_decode_direct (GstJpegDec * dec, GstVideoFrame * frame)
848 guchar **line[3]; /* the jpeg line buffer */
849 guchar *y[4 * DCTSIZE] = { NULL, }; /* alloc enough for the lines */
850 guchar *u[4 * DCTSIZE] = { NULL, }; /* r_v will be <4 */
851 guchar *v[4 * DCTSIZE] = { NULL, };
853 gint lines, v_samp[3];
854 guchar *base[3], *last[3];
862 v_samp[0] = dec->cinfo.comp_info[0].v_samp_factor;
863 v_samp[1] = dec->cinfo.comp_info[1].v_samp_factor;
864 v_samp[2] = dec->cinfo.comp_info[2].v_samp_factor;
866 if (G_UNLIKELY (v_samp[0] > 2 || v_samp[1] > 2 || v_samp[2] > 2))
867 goto format_not_supported;
869 height = GST_VIDEO_FRAME_HEIGHT (frame);
871 for (i = 0; i < 3; i++) {
872 base[i] = GST_VIDEO_FRAME_COMP_DATA (frame, i);
873 stride[i] = GST_VIDEO_FRAME_COMP_STRIDE (frame, i);
874 /* make sure we don't make jpeglib write beyond our buffer,
875 * which might happen if (height % (r_v*DCTSIZE)) != 0 */
876 last[i] = base[i] + (GST_VIDEO_FRAME_COMP_STRIDE (frame, i) *
877 (GST_VIDEO_FRAME_COMP_HEIGHT (frame, i) - 1));
880 /* let jpeglib decode directly into our final buffer */
881 GST_DEBUG_OBJECT (dec, "decoding directly into output buffer");
883 for (i = 0; i < height; i += v_samp[0] * DCTSIZE) {
884 for (j = 0; j < (v_samp[0] * DCTSIZE); ++j) {
886 line[0][j] = base[0] + (i + j) * stride[0];
887 if (G_UNLIKELY (line[0][j] > last[0]))
888 line[0][j] = last[0];
890 if (v_samp[1] == v_samp[0]) {
891 line[1][j] = base[1] + ((i + j) / 2) * stride[1];
892 } else if (j < (v_samp[1] * DCTSIZE)) {
893 line[1][j] = base[1] + ((i / 2) + j) * stride[1];
895 if (G_UNLIKELY (line[1][j] > last[1]))
896 line[1][j] = last[1];
898 if (v_samp[2] == v_samp[0]) {
899 line[2][j] = base[2] + ((i + j) / 2) * stride[2];
900 } else if (j < (v_samp[2] * DCTSIZE)) {
901 line[2][j] = base[2] + ((i / 2) + j) * stride[2];
903 if (G_UNLIKELY (line[2][j] > last[2]))
904 line[2][j] = last[2];
907 lines = jpeg_read_raw_data (&dec->cinfo, line, v_samp[0] * DCTSIZE);
908 if (G_UNLIKELY (!lines)) {
909 GST_INFO_OBJECT (dec, "jpeg_read_raw_data() returned 0");
914 format_not_supported:
916 gboolean ret = GST_FLOW_OK;
918 GST_VIDEO_DECODER_ERROR (dec, 1, STREAM, DECODE,
919 (_("Failed to decode JPEG image")),
920 ("Unsupported subsampling schema: v_samp factors: %u %u %u", v_samp[0],
921 v_samp[1], v_samp[2]), ret);
928 gst_jpeg_dec_negotiate (GstJpegDec * dec, gint width, gint height, gint clrspc)
930 GstVideoCodecState *outstate;
932 GstVideoFormat format;
936 format = GST_VIDEO_FORMAT_RGB;
939 format = GST_VIDEO_FORMAT_GRAY8;
942 format = GST_VIDEO_FORMAT_I420;
946 /* Compare to currently configured output state */
947 outstate = gst_video_decoder_get_output_state (GST_VIDEO_DECODER (dec));
949 info = &outstate->info;
951 if (width == GST_VIDEO_INFO_WIDTH (info) &&
952 height == GST_VIDEO_INFO_HEIGHT (info) &&
953 format == GST_VIDEO_INFO_FORMAT (info)) {
954 gst_video_codec_state_unref (outstate);
957 gst_video_codec_state_unref (outstate);
961 gst_video_decoder_set_output_state (GST_VIDEO_DECODER (dec), format,
962 width, height, dec->input_state);
969 outstate->info.colorimetry.range = GST_VIDEO_COLOR_RANGE_0_255;
970 outstate->info.colorimetry.matrix = GST_VIDEO_COLOR_MATRIX_BT601;
971 outstate->info.colorimetry.transfer = GST_VIDEO_TRANSFER_UNKNOWN;
972 outstate->info.colorimetry.primaries = GST_VIDEO_COLOR_PRIMARIES_UNKNOWN;
976 gst_video_codec_state_unref (outstate);
978 gst_video_decoder_negotiate (GST_VIDEO_DECODER (dec));
980 GST_DEBUG_OBJECT (dec, "max_v_samp_factor=%d", dec->cinfo.max_v_samp_factor);
981 GST_DEBUG_OBJECT (dec, "max_h_samp_factor=%d", dec->cinfo.max_h_samp_factor);
985 gst_jpeg_dec_handle_frame (GstVideoDecoder * bdec, GstVideoCodecFrame * frame)
987 GstFlowReturn ret = GST_FLOW_OK;
988 GstJpegDec *dec = (GstJpegDec *) bdec;
989 GstVideoFrame vframe;
993 gboolean need_unmap = TRUE;
994 GstVideoCodecState *state = NULL;
996 dec->current_frame = frame;
997 gst_buffer_map (frame->input_buffer, &dec->current_frame_map, GST_MAP_READ);
998 gst_jpeg_dec_fill_input_buffer (&dec->cinfo);
1000 if (setjmp (dec->jerr.setjmp_buffer)) {
1001 code = dec->jerr.pub.msg_code;
1003 if (code == JERR_INPUT_EOF) {
1004 GST_DEBUG ("jpeg input EOF error, we probably need more data");
1005 goto need_more_data;
1011 hdr_ok = jpeg_read_header (&dec->cinfo, TRUE);
1012 if (G_UNLIKELY (hdr_ok != JPEG_HEADER_OK)) {
1013 GST_WARNING_OBJECT (dec, "reading the header failed, %d", hdr_ok);
1016 GST_LOG_OBJECT (dec, "num_components=%d", dec->cinfo.num_components);
1017 GST_LOG_OBJECT (dec, "jpeg_color_space=%d", dec->cinfo.jpeg_color_space);
1019 if (!dec->cinfo.num_components || !dec->cinfo.comp_info)
1020 goto components_not_supported;
1022 r_h = dec->cinfo.comp_info[0].h_samp_factor;
1023 r_v = dec->cinfo.comp_info[0].v_samp_factor;
1025 GST_LOG_OBJECT (dec, "r_h = %d, r_v = %d", r_h, r_v);
1027 if (dec->cinfo.num_components > 3)
1028 goto components_not_supported;
1030 /* verify color space expectation to avoid going *boom* or bogus output */
1031 if (dec->cinfo.jpeg_color_space != JCS_YCbCr &&
1032 dec->cinfo.jpeg_color_space != JCS_GRAYSCALE &&
1033 dec->cinfo.jpeg_color_space != JCS_RGB)
1034 goto unsupported_colorspace;
1036 #ifndef GST_DISABLE_GST_DEBUG
1040 for (i = 0; i < dec->cinfo.num_components; ++i) {
1041 GST_LOG_OBJECT (dec, "[%d] h_samp_factor=%d, v_samp_factor=%d, cid=%d",
1042 i, dec->cinfo.comp_info[i].h_samp_factor,
1043 dec->cinfo.comp_info[i].v_samp_factor,
1044 dec->cinfo.comp_info[i].component_id);
1049 /* prepare for raw output */
1050 dec->cinfo.do_fancy_upsampling = FALSE;
1051 dec->cinfo.do_block_smoothing = FALSE;
1052 dec->cinfo.out_color_space = dec->cinfo.jpeg_color_space;
1053 dec->cinfo.dct_method = dec->idct_method;
1054 dec->cinfo.raw_data_out = TRUE;
1056 GST_LOG_OBJECT (dec, "starting decompress");
1057 guarantee_huff_tables (&dec->cinfo);
1058 if (!jpeg_start_decompress (&dec->cinfo)) {
1059 GST_WARNING_OBJECT (dec, "failed to start decompression cycle");
1062 /* sanity checks to get safe and reasonable output */
1063 switch (dec->cinfo.jpeg_color_space) {
1065 if (dec->cinfo.num_components != 1)
1066 goto invalid_yuvrgbgrayscale;
1069 if (dec->cinfo.num_components != 3 || dec->cinfo.max_v_samp_factor > 1 ||
1070 dec->cinfo.max_h_samp_factor > 1)
1071 goto invalid_yuvrgbgrayscale;
1074 if (dec->cinfo.num_components != 3 ||
1075 r_v > 2 || r_v < dec->cinfo.comp_info[0].v_samp_factor ||
1076 r_v < dec->cinfo.comp_info[1].v_samp_factor ||
1077 r_h < dec->cinfo.comp_info[0].h_samp_factor ||
1078 r_h < dec->cinfo.comp_info[1].h_samp_factor)
1079 goto invalid_yuvrgbgrayscale;
1082 g_assert_not_reached ();
1086 width = dec->cinfo.output_width;
1087 height = dec->cinfo.output_height;
1089 if (G_UNLIKELY (width < MIN_WIDTH || width > MAX_WIDTH ||
1090 height < MIN_HEIGHT || height > MAX_HEIGHT))
1093 gst_jpeg_dec_negotiate (dec, width, height, dec->cinfo.jpeg_color_space);
1095 state = gst_video_decoder_get_output_state (bdec);
1096 ret = gst_video_decoder_allocate_output_frame (bdec, frame);
1097 if (G_UNLIKELY (ret != GST_FLOW_OK))
1100 if (!gst_video_frame_map (&vframe, &state->info, frame->output_buffer,
1104 GST_LOG_OBJECT (dec, "width %d, height %d", width, height);
1106 if (dec->cinfo.jpeg_color_space == JCS_RGB) {
1107 gst_jpeg_dec_decode_rgb (dec, &vframe);
1108 } else if (dec->cinfo.jpeg_color_space == JCS_GRAYSCALE) {
1109 gst_jpeg_dec_decode_grayscale (dec, &vframe);
1111 GST_LOG_OBJECT (dec, "decompressing (reqired scanline buffer height = %u)",
1112 dec->cinfo.rec_outbuf_height);
1114 /* For some widths jpeglib requires more horizontal padding than I420
1115 * provides. In those cases we need to decode into separate buffers and then
1116 * copy over the data into our final picture buffer, otherwise jpeglib might
1117 * write over the end of a line into the beginning of the next line,
1118 * resulting in blocky artifacts on the left side of the picture. */
1119 if (G_UNLIKELY (width % (dec->cinfo.max_h_samp_factor * DCTSIZE) != 0
1120 || dec->cinfo.comp_info[0].h_samp_factor != 2
1121 || dec->cinfo.comp_info[1].h_samp_factor != 1
1122 || dec->cinfo.comp_info[2].h_samp_factor != 1)) {
1123 GST_CAT_LOG_OBJECT (GST_CAT_PERFORMANCE, dec,
1124 "indirect decoding using extra buffer copy");
1125 gst_jpeg_dec_decode_indirect (dec, &vframe, r_v, r_h,
1126 dec->cinfo.num_components);
1128 ret = gst_jpeg_dec_decode_direct (dec, &vframe);
1130 if (G_UNLIKELY (ret != GST_FLOW_OK))
1131 goto decode_direct_failed;
1135 gst_video_frame_unmap (&vframe);
1137 GST_LOG_OBJECT (dec, "decompressing finished");
1138 jpeg_finish_decompress (&dec->cinfo);
1140 gst_buffer_unmap (frame->input_buffer, &dec->current_frame_map);
1141 ret = gst_video_decoder_finish_frame (bdec, frame);
1149 gst_buffer_unmap (frame->input_buffer, &dec->current_frame_map);
1152 gst_video_codec_state_unref (state);
1159 GST_LOG_OBJECT (dec, "we need more data");
1166 GST_VIDEO_DECODER_ERROR (dec, 1, STREAM, DECODE,
1167 (_("Failed to decode JPEG image")),
1168 ("Picture is too small or too big (%ux%u)", width, height), ret);
1169 ret = GST_FLOW_ERROR;
1174 gchar err_msg[JMSG_LENGTH_MAX];
1176 dec->jerr.pub.format_message ((j_common_ptr) (&dec->cinfo), err_msg);
1178 GST_VIDEO_DECODER_ERROR (dec, 1, STREAM, DECODE,
1179 (_("Failed to decode JPEG image")), ("Decode error #%u: %s", code,
1182 gst_buffer_unmap (frame->input_buffer, &dec->current_frame_map);
1183 gst_video_decoder_drop_frame (bdec, frame);
1185 jpeg_abort_decompress (&dec->cinfo);
1187 ret = GST_FLOW_ERROR;
1190 decode_direct_failed:
1192 /* already posted an error message */
1193 jpeg_abort_decompress (&dec->cinfo);
1198 const gchar *reason;
1200 reason = gst_flow_get_name (ret);
1202 GST_DEBUG_OBJECT (dec, "failed to alloc buffer, reason %s", reason);
1203 /* Reset for next time */
1204 jpeg_abort_decompress (&dec->cinfo);
1205 if (ret != GST_FLOW_EOS && ret != GST_FLOW_FLUSHING &&
1206 ret != GST_FLOW_NOT_LINKED) {
1207 GST_VIDEO_DECODER_ERROR (dec, 1, STREAM, DECODE,
1208 (_("Failed to decode JPEG image")),
1209 ("Buffer allocation failed, reason: %s", reason), ret);
1210 jpeg_abort_decompress (&dec->cinfo);
1214 components_not_supported:
1216 GST_VIDEO_DECODER_ERROR (dec, 1, STREAM, DECODE,
1217 (_("Failed to decode JPEG image")),
1218 ("number of components not supported: %d (max 3)",
1219 dec->cinfo.num_components), ret);
1220 jpeg_abort_decompress (&dec->cinfo);
1223 unsupported_colorspace:
1225 GST_VIDEO_DECODER_ERROR (dec, 1, STREAM, DECODE,
1226 (_("Failed to decode JPEG image")),
1227 ("Picture has unknown or unsupported colourspace"), ret);
1228 jpeg_abort_decompress (&dec->cinfo);
1231 invalid_yuvrgbgrayscale:
1233 GST_VIDEO_DECODER_ERROR (dec, 1, STREAM, DECODE,
1234 (_("Failed to decode JPEG image")),
1235 ("Picture is corrupt or unhandled YUV/RGB/grayscale layout"), ret);
1236 jpeg_abort_decompress (&dec->cinfo);
1242 gst_jpeg_dec_decide_allocation (GstVideoDecoder * bdec, GstQuery * query)
1244 GstBufferPool *pool = NULL;
1245 GstStructure *config;
1247 if (!GST_VIDEO_DECODER_CLASS (parent_class)->decide_allocation (bdec, query))
1250 if (gst_query_get_n_allocation_pools (query) > 0)
1251 gst_query_parse_nth_allocation_pool (query, 0, &pool, NULL, NULL, NULL);
1256 config = gst_buffer_pool_get_config (pool);
1257 if (gst_query_find_allocation_meta (query, GST_VIDEO_META_API_TYPE, NULL)) {
1258 gst_buffer_pool_config_add_option (config,
1259 GST_BUFFER_POOL_OPTION_VIDEO_META);
1261 gst_buffer_pool_set_config (pool, config);
1262 gst_object_unref (pool);
1268 gst_jpeg_dec_start (GstVideoDecoder * bdec)
1270 GstJpegDec *dec = (GstJpegDec *) bdec;
1272 dec->saw_header = FALSE;
1273 dec->parse_entropy_len = 0;
1274 dec->parse_resync = FALSE;
1276 gst_video_decoder_set_packetized (bdec, FALSE);
1282 gst_jpeg_dec_flush (GstVideoDecoder * bdec)
1284 GstJpegDec *dec = (GstJpegDec *) bdec;
1286 jpeg_abort_decompress (&dec->cinfo);
1287 dec->parse_entropy_len = 0;
1288 dec->parse_resync = FALSE;
1289 dec->saw_header = FALSE;
1295 gst_jpeg_dec_set_property (GObject * object, guint prop_id,
1296 const GValue * value, GParamSpec * pspec)
1300 dec = GST_JPEG_DEC (object);
1303 case PROP_IDCT_METHOD:
1304 dec->idct_method = g_value_get_enum (value);
1306 case PROP_MAX_ERRORS:
1307 g_atomic_int_set (&dec->max_errors, g_value_get_int (value));
1311 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
1317 gst_jpeg_dec_get_property (GObject * object, guint prop_id, GValue * value,
1322 dec = GST_JPEG_DEC (object);
1325 case PROP_IDCT_METHOD:
1326 g_value_set_enum (value, dec->idct_method);
1328 case PROP_MAX_ERRORS:
1329 g_value_set_int (value, g_atomic_int_get (&dec->max_errors));
1333 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
1339 gst_jpeg_dec_stop (GstVideoDecoder * bdec)
1341 GstJpegDec *dec = (GstJpegDec *) bdec;
1343 gst_jpeg_dec_free_buffers (dec);