2 * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
3 * Copyright (C) <2009> Tim-Philipp Müller <tim centricular net>
4 * Copyright (C) 2012 Collabora Ltd.
5 * Author : Edward Hervey <edward@collabora.com>
7 * This library is free software; you can redistribute it and/or
8 * modify it under the terms of the GNU Library General Public
9 * License as published by the Free Software Foundation; either
10 * version 2 of the License, or (at your option) any later version.
12 * This library is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 * Library General Public License for more details.
17 * You should have received a copy of the GNU Library General Public
18 * License along with this library; if not, write to the
19 * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
20 * Boston, MA 02110-1301, USA.
24 * SECTION:element-jpegdec
26 * Decodes jpeg images.
29 * <title>Example launch line</title>
31 * gst-launch-1.0 -v filesrc location=mjpeg.avi ! avidemux ! queue ! jpegdec ! videoconvert ! videoscale ! autovideosink
32 * ]| The above pipeline decode the mjpeg stream and renders it to the screen.
41 #include "gstjpegdec.h"
43 #include <gst/video/video.h>
44 #include <gst/video/gstvideometa.h>
45 #include <gst/video/gstvideopool.h>
46 #include "gst/gst-i18n-plugin.h"
50 #define MAX_WIDTH 65535
52 #define MAX_HEIGHT 65535
54 #define CINFO_GET_JPEGDEC(cinfo_ptr) \
55 (((struct GstJpegDecSourceMgr*)((cinfo_ptr)->src))->dec)
57 #define JPEG_DEFAULT_IDCT_METHOD JDCT_FASTEST
58 #define JPEG_DEFAULT_MAX_ERRORS 0
68 static GstStaticPadTemplate gst_jpeg_dec_src_pad_template =
69 GST_STATIC_PAD_TEMPLATE ("src",
72 GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE
73 ("{ I420, RGB, BGR, RGBx, xRGB, BGRx, xBGR, GRAY8 }"))
77 /* FIXME: sof-marker is for IJG libjpeg 8, should be different for 6.2 */
78 /* FIXME: add back "sof-marker = (int) { 0, 1, 2, 5, 6, 7, 9, 10, 13, 14 }"
79 * once we have a parser and/or demuxer set caps properly */
80 static GstStaticPadTemplate gst_jpeg_dec_sink_pad_template =
81 GST_STATIC_PAD_TEMPLATE ("sink",
84 GST_STATIC_CAPS ("image/jpeg")
87 GST_DEBUG_CATEGORY_STATIC (jpeg_dec_debug);
88 #define GST_CAT_DEFAULT jpeg_dec_debug
89 GST_DEBUG_CATEGORY_STATIC (GST_CAT_PERFORMANCE);
91 static void gst_jpeg_dec_set_property (GObject * object, guint prop_id,
92 const GValue * value, GParamSpec * pspec);
93 static void gst_jpeg_dec_get_property (GObject * object, guint prop_id,
94 GValue * value, GParamSpec * pspec);
96 static gboolean gst_jpeg_dec_set_format (GstVideoDecoder * dec,
97 GstVideoCodecState * state);
98 static gboolean gst_jpeg_dec_start (GstVideoDecoder * bdec);
99 static gboolean gst_jpeg_dec_stop (GstVideoDecoder * bdec);
100 static gboolean gst_jpeg_dec_flush (GstVideoDecoder * bdec);
101 static GstFlowReturn gst_jpeg_dec_parse (GstVideoDecoder * bdec,
102 GstVideoCodecFrame * frame, GstAdapter * adapter, gboolean at_eos);
103 static GstFlowReturn gst_jpeg_dec_handle_frame (GstVideoDecoder * bdec,
104 GstVideoCodecFrame * frame);
105 static gboolean gst_jpeg_dec_decide_allocation (GstVideoDecoder * bdec,
108 #define gst_jpeg_dec_parent_class parent_class
109 G_DEFINE_TYPE (GstJpegDec, gst_jpeg_dec, GST_TYPE_VIDEO_DECODER);
112 gst_jpeg_dec_finalize (GObject * object)
114 GstJpegDec *dec = GST_JPEG_DEC (object);
116 jpeg_destroy_decompress (&dec->cinfo);
117 if (dec->input_state)
118 gst_video_codec_state_unref (dec->input_state);
120 G_OBJECT_CLASS (parent_class)->finalize (object);
124 gst_jpeg_dec_class_init (GstJpegDecClass * klass)
126 GObjectClass *gobject_class;
127 GstElementClass *element_class;
128 GstVideoDecoderClass *vdec_class;
130 gobject_class = (GObjectClass *) klass;
131 element_class = (GstElementClass *) klass;
132 vdec_class = (GstVideoDecoderClass *) klass;
134 parent_class = g_type_class_peek_parent (klass);
136 gobject_class->finalize = gst_jpeg_dec_finalize;
137 gobject_class->set_property = gst_jpeg_dec_set_property;
138 gobject_class->get_property = gst_jpeg_dec_get_property;
140 g_object_class_install_property (gobject_class, PROP_IDCT_METHOD,
141 g_param_spec_enum ("idct-method", "IDCT Method",
142 "The IDCT algorithm to use", GST_TYPE_IDCT_METHOD,
143 JPEG_DEFAULT_IDCT_METHOD,
144 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
147 * GstJpegDec:max-errors:
149 * Error out after receiving N consecutive decoding errors
150 * (-1 = never error out, 0 = automatic, 1 = fail on first error, etc.)
152 g_object_class_install_property (gobject_class, PROP_MAX_ERRORS,
153 g_param_spec_int ("max-errors", "Maximum Consecutive Decoding Errors",
154 "Error out after receiving N consecutive decoding errors "
155 "(-1 = never fail, 0 = automatic, 1 = fail on first error)",
156 -1, G_MAXINT, JPEG_DEFAULT_MAX_ERRORS,
157 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
159 gst_element_class_add_pad_template (element_class,
160 gst_static_pad_template_get (&gst_jpeg_dec_src_pad_template));
161 gst_element_class_add_pad_template (element_class,
162 gst_static_pad_template_get (&gst_jpeg_dec_sink_pad_template));
163 gst_element_class_set_static_metadata (element_class, "JPEG image decoder",
164 "Codec/Decoder/Image",
165 "Decode images from JPEG format", "Wim Taymans <wim@fluendo.com>");
167 vdec_class->start = gst_jpeg_dec_start;
168 vdec_class->stop = gst_jpeg_dec_stop;
169 vdec_class->flush = gst_jpeg_dec_flush;
170 vdec_class->parse = gst_jpeg_dec_parse;
171 vdec_class->set_format = gst_jpeg_dec_set_format;
172 vdec_class->handle_frame = gst_jpeg_dec_handle_frame;
173 vdec_class->decide_allocation = gst_jpeg_dec_decide_allocation;
175 GST_DEBUG_CATEGORY_INIT (jpeg_dec_debug, "jpegdec", 0, "JPEG decoder");
176 GST_DEBUG_CATEGORY_GET (GST_CAT_PERFORMANCE, "GST_PERFORMANCE");
180 gst_jpeg_dec_fill_input_buffer (j_decompress_ptr cinfo)
184 dec = CINFO_GET_JPEGDEC (cinfo);
185 g_return_val_if_fail (dec != NULL, FALSE);
186 g_return_val_if_fail (dec->current_frame != NULL, FALSE);
187 g_return_val_if_fail (dec->current_frame_map.data != NULL, FALSE);
189 cinfo->src->next_input_byte = dec->current_frame_map.data;
190 cinfo->src->bytes_in_buffer = dec->current_frame_map.size;
196 gst_jpeg_dec_init_source (j_decompress_ptr cinfo)
198 GST_LOG_OBJECT (CINFO_GET_JPEGDEC (cinfo), "init_source");
203 gst_jpeg_dec_skip_input_data (j_decompress_ptr cinfo, glong num_bytes)
205 GstJpegDec *dec = CINFO_GET_JPEGDEC (cinfo);
207 GST_DEBUG_OBJECT (dec, "skip %ld bytes", num_bytes);
209 if (num_bytes > 0 && cinfo->src->bytes_in_buffer >= num_bytes) {
210 cinfo->src->next_input_byte += (size_t) num_bytes;
211 cinfo->src->bytes_in_buffer -= (size_t) num_bytes;
216 gst_jpeg_dec_resync_to_restart (j_decompress_ptr cinfo, gint desired)
218 GST_LOG_OBJECT (CINFO_GET_JPEGDEC (cinfo), "resync_to_start");
223 gst_jpeg_dec_term_source (j_decompress_ptr cinfo)
225 GST_LOG_OBJECT (CINFO_GET_JPEGDEC (cinfo), "term_source");
230 gst_jpeg_dec_my_output_message (j_common_ptr cinfo)
232 return; /* do nothing */
236 gst_jpeg_dec_my_emit_message (j_common_ptr cinfo, int msg_level)
238 /* GST_LOG_OBJECT (CINFO_GET_JPEGDEC (&cinfo), "msg_level=%d", msg_level); */
243 gst_jpeg_dec_my_error_exit (j_common_ptr cinfo)
245 struct GstJpegDecErrorMgr *err_mgr = (struct GstJpegDecErrorMgr *) cinfo->err;
247 (*cinfo->err->output_message) (cinfo);
248 longjmp (err_mgr->setjmp_buffer, 1);
252 gst_jpeg_dec_init (GstJpegDec * dec)
254 GST_DEBUG ("initializing");
257 memset (&dec->cinfo, 0, sizeof (dec->cinfo));
258 memset (&dec->jerr, 0, sizeof (dec->jerr));
259 dec->cinfo.err = jpeg_std_error (&dec->jerr.pub);
260 dec->jerr.pub.output_message = gst_jpeg_dec_my_output_message;
261 dec->jerr.pub.emit_message = gst_jpeg_dec_my_emit_message;
262 dec->jerr.pub.error_exit = gst_jpeg_dec_my_error_exit;
264 jpeg_create_decompress (&dec->cinfo);
266 dec->cinfo.src = (struct jpeg_source_mgr *) &dec->jsrc;
267 dec->cinfo.src->init_source = gst_jpeg_dec_init_source;
268 dec->cinfo.src->fill_input_buffer = gst_jpeg_dec_fill_input_buffer;
269 dec->cinfo.src->skip_input_data = gst_jpeg_dec_skip_input_data;
270 dec->cinfo.src->resync_to_restart = gst_jpeg_dec_resync_to_restart;
271 dec->cinfo.src->term_source = gst_jpeg_dec_term_source;
274 /* init properties */
275 dec->idct_method = JPEG_DEFAULT_IDCT_METHOD;
276 dec->max_errors = JPEG_DEFAULT_MAX_ERRORS;
279 static inline gboolean
280 gst_jpeg_dec_parse_tag_has_entropy_segment (guint8 tag)
282 if (tag == 0xda || (tag >= 0xd0 && tag <= 0xd7))
288 gst_jpeg_dec_parse (GstVideoDecoder * bdec, GstVideoCodecFrame * frame,
289 GstAdapter * adapter, gboolean at_eos)
294 gint offset = 0, noffset;
295 GstJpegDec *dec = (GstJpegDec *) bdec;
297 /* FIXME : The overhead of using scan_uint32 is massive */
299 size = gst_adapter_available (adapter);
300 GST_DEBUG ("Parsing jpeg image data (%u bytes)", size);
303 GST_DEBUG ("Flushing all data out");
306 /* If we have leftover data, throw it away */
307 if (!dec->saw_header)
309 goto have_full_frame;
315 if (!dec->saw_header) {
317 /* we expect at least 4 bytes, first of which start marker */
319 gst_adapter_masked_scan_uint32 (adapter, 0xffff0000, 0xffd80000, 0,
322 GST_DEBUG ("ret:%d", ret);
327 gst_adapter_flush (adapter, ret);
330 dec->saw_header = TRUE;
337 GST_DEBUG ("offset:%d, size:%d", offset, size);
340 gst_adapter_masked_scan_uint32_peek (adapter, 0x0000ff00, 0x0000ff00,
341 offset, size - offset, &value);
343 /* lost sync if 0xff marker not where expected */
344 if ((resync = (noffset != offset))) {
345 GST_DEBUG ("Lost sync at 0x%08x, resyncing", offset + 2);
347 /* may have marker, but could have been resyncng */
348 resync = resync || dec->parse_resync;
349 /* Skip over extra 0xff */
350 while ((noffset >= 0) && ((value & 0xff) == 0xff)) {
353 gst_adapter_masked_scan_uint32_peek (adapter, 0x0000ff00, 0x0000ff00,
354 noffset, size - noffset, &value);
356 /* enough bytes left for marker? (we need 0xNN after the 0xff) */
358 GST_DEBUG ("at end of input and no EOI marker found, need more data");
362 /* now lock on the marker we found */
364 value = value & 0xff;
366 GST_DEBUG ("0x%08x: EOI marker", offset + 2);
367 /* clear parse state */
368 dec->saw_header = FALSE;
369 dec->parse_resync = FALSE;
371 goto have_full_frame;
374 /* Skip this frame if we found another SOI marker */
375 GST_DEBUG ("0x%08x: SOI marker before EOI, skipping", offset + 2);
376 dec->parse_resync = FALSE;
382 if (value >= 0xd0 && value <= 0xd7)
385 /* peek tag and subsequent length */
386 if (offset + 2 + 4 > size)
389 gst_adapter_masked_scan_uint32_peek (adapter, 0x0, 0x0, offset + 2, 4,
391 frame_len = frame_len & 0xffff;
393 GST_DEBUG ("0x%08x: tag %02x, frame_len=%u", offset + 2, value, frame_len);
394 /* the frame length includes the 2 bytes for the length; here we want at
395 * least 2 more bytes at the end for an end marker */
396 if (offset + 2 + 2 + frame_len + 2 > size) {
400 if (gst_jpeg_dec_parse_tag_has_entropy_segment (value)) {
401 guint eseglen = dec->parse_entropy_len;
403 GST_DEBUG ("0x%08x: finding entropy segment length (eseglen:%d)",
404 offset + 2, eseglen);
405 if (size < offset + 2 + frame_len + eseglen)
407 noffset = offset + 2 + frame_len + dec->parse_entropy_len;
409 GST_DEBUG ("noffset:%d, size:%d, size - noffset:%d",
410 noffset, size, size - noffset);
411 noffset = gst_adapter_masked_scan_uint32_peek (adapter, 0x0000ff00,
412 0x0000ff00, noffset, size - noffset, &value);
415 dec->parse_entropy_len = size - offset - 4 - frame_len - 2;
418 if ((value & 0xff) != 0x00) {
419 eseglen = noffset - offset - frame_len - 2;
424 dec->parse_entropy_len = 0;
425 frame_len += eseglen;
426 GST_DEBUG ("entropy segment length=%u => frame_len=%u", eseglen,
430 /* check if we will still be in sync if we interpret
431 * this as a sync point and skip this frame */
432 noffset = offset + frame_len + 2;
433 noffset = gst_adapter_masked_scan_uint32 (adapter, 0x0000ff00, 0x0000ff00,
436 /* ignore and continue resyncing until we hit the end
437 * of our data or find a sync point that looks okay */
441 GST_DEBUG ("found sync at 0x%x", offset + 2);
444 /* Add current data to output buffer */
445 toadd += frame_len + 2;
446 offset += frame_len + 2;
451 gst_video_decoder_add_to_frame (bdec, toadd);
452 return GST_VIDEO_DECODER_FLOW_NEED_DATA;
456 gst_video_decoder_add_to_frame (bdec, toadd);
457 return gst_video_decoder_have_frame (bdec);
460 gst_adapter_flush (adapter, size);
465 /* shamelessly ripped from jpegutils.c in mjpegtools */
467 add_huff_table (j_decompress_ptr dinfo,
468 JHUFF_TBL ** htblptr, const UINT8 * bits, const UINT8 * val)
469 /* Define a Huffman table */
473 if (*htblptr == NULL)
474 *htblptr = jpeg_alloc_huff_table ((j_common_ptr) dinfo);
478 /* Copy the number-of-symbols-of-each-code-length counts */
479 memcpy ((*htblptr)->bits, bits, sizeof ((*htblptr)->bits));
481 /* Validate the counts. We do this here mainly so we can copy the right
482 * number of symbols from the val[] array, without risking marching off
483 * the end of memory. jchuff.c will do a more thorough test later.
486 for (len = 1; len <= 16; len++)
487 nsymbols += bits[len];
488 if (nsymbols < 1 || nsymbols > 256)
489 g_error ("jpegutils.c: add_huff_table failed badly. ");
491 memcpy ((*htblptr)->huffval, val, nsymbols * sizeof (UINT8));
497 std_huff_tables (j_decompress_ptr dinfo)
498 /* Set up the standard Huffman tables (cf. JPEG standard section K.3) */
499 /* IMPORTANT: these are only valid for 8-bit data precision! */
501 static const UINT8 bits_dc_luminance[17] =
502 { /* 0-base */ 0, 0, 1, 5, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0 };
503 static const UINT8 val_dc_luminance[] =
504 { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11 };
506 static const UINT8 bits_dc_chrominance[17] =
507 { /* 0-base */ 0, 0, 3, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0 };
508 static const UINT8 val_dc_chrominance[] =
509 { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11 };
511 static const UINT8 bits_ac_luminance[17] =
512 { /* 0-base */ 0, 0, 2, 1, 3, 3, 2, 4, 3, 5, 5, 4, 4, 0, 0, 1, 0x7d };
513 static const UINT8 val_ac_luminance[] =
514 { 0x01, 0x02, 0x03, 0x00, 0x04, 0x11, 0x05, 0x12,
515 0x21, 0x31, 0x41, 0x06, 0x13, 0x51, 0x61, 0x07,
516 0x22, 0x71, 0x14, 0x32, 0x81, 0x91, 0xa1, 0x08,
517 0x23, 0x42, 0xb1, 0xc1, 0x15, 0x52, 0xd1, 0xf0,
518 0x24, 0x33, 0x62, 0x72, 0x82, 0x09, 0x0a, 0x16,
519 0x17, 0x18, 0x19, 0x1a, 0x25, 0x26, 0x27, 0x28,
520 0x29, 0x2a, 0x34, 0x35, 0x36, 0x37, 0x38, 0x39,
521 0x3a, 0x43, 0x44, 0x45, 0x46, 0x47, 0x48, 0x49,
522 0x4a, 0x53, 0x54, 0x55, 0x56, 0x57, 0x58, 0x59,
523 0x5a, 0x63, 0x64, 0x65, 0x66, 0x67, 0x68, 0x69,
524 0x6a, 0x73, 0x74, 0x75, 0x76, 0x77, 0x78, 0x79,
525 0x7a, 0x83, 0x84, 0x85, 0x86, 0x87, 0x88, 0x89,
526 0x8a, 0x92, 0x93, 0x94, 0x95, 0x96, 0x97, 0x98,
527 0x99, 0x9a, 0xa2, 0xa3, 0xa4, 0xa5, 0xa6, 0xa7,
528 0xa8, 0xa9, 0xaa, 0xb2, 0xb3, 0xb4, 0xb5, 0xb6,
529 0xb7, 0xb8, 0xb9, 0xba, 0xc2, 0xc3, 0xc4, 0xc5,
530 0xc6, 0xc7, 0xc8, 0xc9, 0xca, 0xd2, 0xd3, 0xd4,
531 0xd5, 0xd6, 0xd7, 0xd8, 0xd9, 0xda, 0xe1, 0xe2,
532 0xe3, 0xe4, 0xe5, 0xe6, 0xe7, 0xe8, 0xe9, 0xea,
533 0xf1, 0xf2, 0xf3, 0xf4, 0xf5, 0xf6, 0xf7, 0xf8,
537 static const UINT8 bits_ac_chrominance[17] =
538 { /* 0-base */ 0, 0, 2, 1, 2, 4, 4, 3, 4, 7, 5, 4, 4, 0, 1, 2, 0x77 };
539 static const UINT8 val_ac_chrominance[] =
540 { 0x00, 0x01, 0x02, 0x03, 0x11, 0x04, 0x05, 0x21,
541 0x31, 0x06, 0x12, 0x41, 0x51, 0x07, 0x61, 0x71,
542 0x13, 0x22, 0x32, 0x81, 0x08, 0x14, 0x42, 0x91,
543 0xa1, 0xb1, 0xc1, 0x09, 0x23, 0x33, 0x52, 0xf0,
544 0x15, 0x62, 0x72, 0xd1, 0x0a, 0x16, 0x24, 0x34,
545 0xe1, 0x25, 0xf1, 0x17, 0x18, 0x19, 0x1a, 0x26,
546 0x27, 0x28, 0x29, 0x2a, 0x35, 0x36, 0x37, 0x38,
547 0x39, 0x3a, 0x43, 0x44, 0x45, 0x46, 0x47, 0x48,
548 0x49, 0x4a, 0x53, 0x54, 0x55, 0x56, 0x57, 0x58,
549 0x59, 0x5a, 0x63, 0x64, 0x65, 0x66, 0x67, 0x68,
550 0x69, 0x6a, 0x73, 0x74, 0x75, 0x76, 0x77, 0x78,
551 0x79, 0x7a, 0x82, 0x83, 0x84, 0x85, 0x86, 0x87,
552 0x88, 0x89, 0x8a, 0x92, 0x93, 0x94, 0x95, 0x96,
553 0x97, 0x98, 0x99, 0x9a, 0xa2, 0xa3, 0xa4, 0xa5,
554 0xa6, 0xa7, 0xa8, 0xa9, 0xaa, 0xb2, 0xb3, 0xb4,
555 0xb5, 0xb6, 0xb7, 0xb8, 0xb9, 0xba, 0xc2, 0xc3,
556 0xc4, 0xc5, 0xc6, 0xc7, 0xc8, 0xc9, 0xca, 0xd2,
557 0xd3, 0xd4, 0xd5, 0xd6, 0xd7, 0xd8, 0xd9, 0xda,
558 0xe2, 0xe3, 0xe4, 0xe5, 0xe6, 0xe7, 0xe8, 0xe9,
559 0xea, 0xf2, 0xf3, 0xf4, 0xf5, 0xf6, 0xf7, 0xf8,
563 add_huff_table (dinfo, &dinfo->dc_huff_tbl_ptrs[0],
564 bits_dc_luminance, val_dc_luminance);
565 add_huff_table (dinfo, &dinfo->ac_huff_tbl_ptrs[0],
566 bits_ac_luminance, val_ac_luminance);
567 add_huff_table (dinfo, &dinfo->dc_huff_tbl_ptrs[1],
568 bits_dc_chrominance, val_dc_chrominance);
569 add_huff_table (dinfo, &dinfo->ac_huff_tbl_ptrs[1],
570 bits_ac_chrominance, val_ac_chrominance);
576 guarantee_huff_tables (j_decompress_ptr dinfo)
578 if ((dinfo->dc_huff_tbl_ptrs[0] == NULL) &&
579 (dinfo->dc_huff_tbl_ptrs[1] == NULL) &&
580 (dinfo->ac_huff_tbl_ptrs[0] == NULL) &&
581 (dinfo->ac_huff_tbl_ptrs[1] == NULL)) {
582 GST_DEBUG ("Generating standard Huffman tables for this frame.");
583 std_huff_tables (dinfo);
588 gst_jpeg_dec_set_format (GstVideoDecoder * dec, GstVideoCodecState * state)
590 GstJpegDec *jpeg = GST_JPEG_DEC (dec);
591 GstVideoInfo *info = &state->info;
593 /* FIXME : previously jpegdec would handled input as packetized
594 * if the framerate was present. Here we consider it packetized if
595 * the fps is != 1/1 */
596 if (GST_VIDEO_INFO_FPS_N (info) != 1 && GST_VIDEO_INFO_FPS_D (info) != 1)
597 gst_video_decoder_set_packetized (dec, TRUE);
599 gst_video_decoder_set_packetized (dec, FALSE);
601 if (jpeg->input_state)
602 gst_video_codec_state_unref (jpeg->input_state);
603 jpeg->input_state = gst_video_codec_state_ref (state);
611 hresamplecpy1 (guint8 * dest, const guint8 * src, guint len)
615 for (i = 0; i < len; ++i) {
616 /* equivalent to: dest[i] = src[i << 1] */
625 gst_jpeg_dec_free_buffers (GstJpegDec * dec)
629 for (i = 0; i < 16; i++) {
630 g_free (dec->idr_y[i]);
631 g_free (dec->idr_u[i]);
632 g_free (dec->idr_v[i]);
633 dec->idr_y[i] = NULL;
634 dec->idr_u[i] = NULL;
635 dec->idr_v[i] = NULL;
638 dec->idr_width_allocated = 0;
641 static inline gboolean
642 gst_jpeg_dec_ensure_buffers (GstJpegDec * dec, guint maxrowbytes)
646 if (G_LIKELY (dec->idr_width_allocated == maxrowbytes))
649 /* FIXME: maybe just alloc one or three blocks altogether? */
650 for (i = 0; i < 16; i++) {
651 dec->idr_y[i] = g_try_realloc (dec->idr_y[i], maxrowbytes);
652 dec->idr_u[i] = g_try_realloc (dec->idr_u[i], maxrowbytes);
653 dec->idr_v[i] = g_try_realloc (dec->idr_v[i], maxrowbytes);
655 if (G_UNLIKELY (!dec->idr_y[i] || !dec->idr_u[i] || !dec->idr_v[i])) {
656 GST_WARNING_OBJECT (dec, "out of memory, i=%d, bytes=%u", i, maxrowbytes);
661 dec->idr_width_allocated = maxrowbytes;
662 GST_LOG_OBJECT (dec, "allocated temp memory, %u bytes/row", maxrowbytes);
667 gst_jpeg_dec_decode_grayscale (GstJpegDec * dec, GstVideoFrame * frame)
670 guchar **scanarray[1] = { rows };
675 gint pstride, rstride;
677 GST_DEBUG_OBJECT (dec, "indirect decoding of grayscale");
679 width = GST_VIDEO_FRAME_WIDTH (frame);
680 height = GST_VIDEO_FRAME_HEIGHT (frame);
682 if (G_UNLIKELY (!gst_jpeg_dec_ensure_buffers (dec, GST_ROUND_UP_32 (width))))
685 base[0] = GST_VIDEO_FRAME_COMP_DATA (frame, 0);
686 pstride = GST_VIDEO_FRAME_COMP_PSTRIDE (frame, 0);
687 rstride = GST_VIDEO_FRAME_COMP_STRIDE (frame, 0);
689 memcpy (rows, dec->idr_y, 16 * sizeof (gpointer));
693 lines = jpeg_read_raw_data (&dec->cinfo, scanarray, DCTSIZE);
694 if (G_LIKELY (lines > 0)) {
695 for (j = 0; (j < DCTSIZE) && (i < height); j++, i++) {
699 for (k = 0; k < width; k++) {
700 base[0][p] = rows[j][k];
706 GST_INFO_OBJECT (dec, "jpeg_read_raw_data() returned 0");
712 gst_jpeg_dec_decode_rgb (GstJpegDec * dec, GstVideoFrame * frame)
714 guchar *r_rows[16], *g_rows[16], *b_rows[16];
715 guchar **scanarray[3] = { r_rows, g_rows, b_rows };
719 guint pstride, rstride;
722 GST_DEBUG_OBJECT (dec, "indirect decoding of RGB");
724 width = GST_VIDEO_FRAME_WIDTH (frame);
725 height = GST_VIDEO_FRAME_HEIGHT (frame);
727 if (G_UNLIKELY (!gst_jpeg_dec_ensure_buffers (dec, GST_ROUND_UP_32 (width))))
730 for (i = 0; i < 3; i++)
731 base[i] = GST_VIDEO_FRAME_COMP_DATA (frame, i);
733 pstride = GST_VIDEO_FRAME_COMP_PSTRIDE (frame, 0);
734 rstride = GST_VIDEO_FRAME_COMP_STRIDE (frame, 0);
736 memcpy (r_rows, dec->idr_y, 16 * sizeof (gpointer));
737 memcpy (g_rows, dec->idr_u, 16 * sizeof (gpointer));
738 memcpy (b_rows, dec->idr_v, 16 * sizeof (gpointer));
742 lines = jpeg_read_raw_data (&dec->cinfo, scanarray, DCTSIZE);
743 if (G_LIKELY (lines > 0)) {
744 for (j = 0; (j < DCTSIZE) && (i < height); j++, i++) {
748 for (k = 0; k < width; k++) {
749 base[0][p] = r_rows[j][k];
750 base[1][p] = g_rows[j][k];
751 base[2][p] = b_rows[j][k];
759 GST_INFO_OBJECT (dec, "jpeg_read_raw_data() returned 0");
765 gst_jpeg_dec_decode_indirect (GstJpegDec * dec, GstVideoFrame * frame, gint r_v,
768 guchar *y_rows[16], *u_rows[16], *v_rows[16];
769 guchar **scanarray[3] = { y_rows, u_rows, v_rows };
772 guchar *base[3], *last[3];
776 GST_DEBUG_OBJECT (dec,
777 "unadvantageous width or r_h, taking slow route involving memcpy");
779 width = GST_VIDEO_FRAME_WIDTH (frame);
780 height = GST_VIDEO_FRAME_HEIGHT (frame);
782 if (G_UNLIKELY (!gst_jpeg_dec_ensure_buffers (dec, GST_ROUND_UP_32 (width))))
785 for (i = 0; i < 3; i++) {
786 base[i] = GST_VIDEO_FRAME_COMP_DATA (frame, i);
787 stride[i] = GST_VIDEO_FRAME_COMP_STRIDE (frame, i);
788 /* make sure we don't make jpeglib write beyond our buffer,
789 * which might happen if (height % (r_v*DCTSIZE)) != 0 */
790 last[i] = base[i] + (GST_VIDEO_FRAME_COMP_STRIDE (frame, i) *
791 (GST_VIDEO_FRAME_COMP_HEIGHT (frame, i) - 1));
794 memcpy (y_rows, dec->idr_y, 16 * sizeof (gpointer));
795 memcpy (u_rows, dec->idr_u, 16 * sizeof (gpointer));
796 memcpy (v_rows, dec->idr_v, 16 * sizeof (gpointer));
798 /* fill chroma components for grayscale */
800 GST_DEBUG_OBJECT (dec, "grayscale, filling chroma");
801 for (i = 0; i < 16; i++) {
802 memset (u_rows[i], GST_ROUND_UP_32 (width), 0x80);
803 memset (v_rows[i], GST_ROUND_UP_32 (width), 0x80);
807 for (i = 0; i < height; i += r_v * DCTSIZE) {
808 lines = jpeg_read_raw_data (&dec->cinfo, scanarray, r_v * DCTSIZE);
809 if (G_LIKELY (lines > 0)) {
810 for (j = 0, k = 0; j < (r_v * DCTSIZE); j += r_v, k++) {
811 if (G_LIKELY (base[0] <= last[0])) {
812 memcpy (base[0], y_rows[j], stride[0]);
813 base[0] += stride[0];
816 if (G_LIKELY (base[0] <= last[0])) {
817 memcpy (base[0], y_rows[j + 1], stride[0]);
818 base[0] += stride[0];
821 if (G_LIKELY (base[1] <= last[1] && base[2] <= last[2])) {
823 memcpy (base[1], u_rows[k], stride[1]);
824 memcpy (base[2], v_rows[k], stride[2]);
825 } else if (r_h == 1) {
826 hresamplecpy1 (base[1], u_rows[k], stride[1]);
827 hresamplecpy1 (base[2], v_rows[k], stride[2]);
829 /* FIXME: implement (at least we avoid crashing by doing nothing) */
833 if (r_v == 2 || (k & 1) != 0) {
834 base[1] += stride[1];
835 base[2] += stride[2];
839 GST_INFO_OBJECT (dec, "jpeg_read_raw_data() returned 0");
845 gst_jpeg_dec_decode_direct (GstJpegDec * dec, GstVideoFrame * frame)
847 guchar **line[3]; /* the jpeg line buffer */
848 guchar *y[4 * DCTSIZE] = { NULL, }; /* alloc enough for the lines */
849 guchar *u[4 * DCTSIZE] = { NULL, }; /* r_v will be <4 */
850 guchar *v[4 * DCTSIZE] = { NULL, };
852 gint lines, v_samp[3];
853 guchar *base[3], *last[3];
861 v_samp[0] = dec->cinfo.comp_info[0].v_samp_factor;
862 v_samp[1] = dec->cinfo.comp_info[1].v_samp_factor;
863 v_samp[2] = dec->cinfo.comp_info[2].v_samp_factor;
865 if (G_UNLIKELY (v_samp[0] > 2 || v_samp[1] > 2 || v_samp[2] > 2))
866 goto format_not_supported;
868 height = GST_VIDEO_FRAME_HEIGHT (frame);
870 for (i = 0; i < 3; i++) {
871 base[i] = GST_VIDEO_FRAME_COMP_DATA (frame, i);
872 stride[i] = GST_VIDEO_FRAME_COMP_STRIDE (frame, i);
873 /* make sure we don't make jpeglib write beyond our buffer,
874 * which might happen if (height % (r_v*DCTSIZE)) != 0 */
875 last[i] = base[i] + (GST_VIDEO_FRAME_COMP_STRIDE (frame, i) *
876 (GST_VIDEO_FRAME_COMP_HEIGHT (frame, i) - 1));
879 /* let jpeglib decode directly into our final buffer */
880 GST_DEBUG_OBJECT (dec, "decoding directly into output buffer");
882 for (i = 0; i < height; i += v_samp[0] * DCTSIZE) {
883 for (j = 0; j < (v_samp[0] * DCTSIZE); ++j) {
885 line[0][j] = base[0] + (i + j) * stride[0];
886 if (G_UNLIKELY (line[0][j] > last[0]))
887 line[0][j] = last[0];
889 if (v_samp[1] == v_samp[0]) {
890 line[1][j] = base[1] + ((i + j) / 2) * stride[1];
891 } else if (j < (v_samp[1] * DCTSIZE)) {
892 line[1][j] = base[1] + ((i / 2) + j) * stride[1];
894 if (G_UNLIKELY (line[1][j] > last[1]))
895 line[1][j] = last[1];
897 if (v_samp[2] == v_samp[0]) {
898 line[2][j] = base[2] + ((i + j) / 2) * stride[2];
899 } else if (j < (v_samp[2] * DCTSIZE)) {
900 line[2][j] = base[2] + ((i / 2) + j) * stride[2];
902 if (G_UNLIKELY (line[2][j] > last[2]))
903 line[2][j] = last[2];
906 lines = jpeg_read_raw_data (&dec->cinfo, line, v_samp[0] * DCTSIZE);
907 if (G_UNLIKELY (!lines)) {
908 GST_INFO_OBJECT (dec, "jpeg_read_raw_data() returned 0");
913 format_not_supported:
915 gboolean ret = GST_FLOW_OK;
917 GST_VIDEO_DECODER_ERROR (dec, 1, STREAM, DECODE,
918 (_("Failed to decode JPEG image")),
919 ("Unsupported subsampling schema: v_samp factors: %u %u %u", v_samp[0],
920 v_samp[1], v_samp[2]), ret);
927 gst_jpeg_dec_negotiate (GstJpegDec * dec, gint width, gint height, gint clrspc)
929 GstVideoCodecState *outstate;
931 GstVideoFormat format;
935 format = GST_VIDEO_FORMAT_RGB;
938 format = GST_VIDEO_FORMAT_GRAY8;
941 format = GST_VIDEO_FORMAT_I420;
945 /* Compare to currently configured output state */
946 outstate = gst_video_decoder_get_output_state (GST_VIDEO_DECODER (dec));
948 info = &outstate->info;
950 if (width == GST_VIDEO_INFO_WIDTH (info) &&
951 height == GST_VIDEO_INFO_HEIGHT (info) &&
952 format == GST_VIDEO_INFO_FORMAT (info)) {
953 gst_video_codec_state_unref (outstate);
956 gst_video_codec_state_unref (outstate);
960 gst_video_decoder_set_output_state (GST_VIDEO_DECODER (dec), format,
961 width, height, dec->input_state);
968 outstate->info.colorimetry.range = GST_VIDEO_COLOR_RANGE_0_255;
969 outstate->info.colorimetry.matrix = GST_VIDEO_COLOR_MATRIX_BT601;
970 outstate->info.colorimetry.transfer = GST_VIDEO_TRANSFER_UNKNOWN;
971 outstate->info.colorimetry.primaries = GST_VIDEO_COLOR_PRIMARIES_UNKNOWN;
975 gst_video_codec_state_unref (outstate);
977 gst_video_decoder_negotiate (GST_VIDEO_DECODER (dec));
979 GST_DEBUG_OBJECT (dec, "max_v_samp_factor=%d", dec->cinfo.max_v_samp_factor);
980 GST_DEBUG_OBJECT (dec, "max_h_samp_factor=%d", dec->cinfo.max_h_samp_factor);
984 gst_jpeg_dec_handle_frame (GstVideoDecoder * bdec, GstVideoCodecFrame * frame)
986 GstFlowReturn ret = GST_FLOW_OK;
987 GstJpegDec *dec = (GstJpegDec *) bdec;
988 GstVideoFrame vframe;
992 gboolean need_unmap = TRUE;
993 GstVideoCodecState *state = NULL;
995 dec->current_frame = frame;
996 gst_buffer_map (frame->input_buffer, &dec->current_frame_map, GST_MAP_READ);
997 gst_jpeg_dec_fill_input_buffer (&dec->cinfo);
999 if (setjmp (dec->jerr.setjmp_buffer)) {
1000 code = dec->jerr.pub.msg_code;
1002 if (code == JERR_INPUT_EOF) {
1003 GST_DEBUG ("jpeg input EOF error, we probably need more data");
1004 goto need_more_data;
1010 hdr_ok = jpeg_read_header (&dec->cinfo, TRUE);
1011 if (G_UNLIKELY (hdr_ok != JPEG_HEADER_OK)) {
1012 GST_WARNING_OBJECT (dec, "reading the header failed, %d", hdr_ok);
1015 GST_LOG_OBJECT (dec, "num_components=%d", dec->cinfo.num_components);
1016 GST_LOG_OBJECT (dec, "jpeg_color_space=%d", dec->cinfo.jpeg_color_space);
1018 if (!dec->cinfo.num_components || !dec->cinfo.comp_info)
1019 goto components_not_supported;
1021 r_h = dec->cinfo.comp_info[0].h_samp_factor;
1022 r_v = dec->cinfo.comp_info[0].v_samp_factor;
1024 GST_LOG_OBJECT (dec, "r_h = %d, r_v = %d", r_h, r_v);
1026 if (dec->cinfo.num_components > 3)
1027 goto components_not_supported;
1029 /* verify color space expectation to avoid going *boom* or bogus output */
1030 if (dec->cinfo.jpeg_color_space != JCS_YCbCr &&
1031 dec->cinfo.jpeg_color_space != JCS_GRAYSCALE &&
1032 dec->cinfo.jpeg_color_space != JCS_RGB)
1033 goto unsupported_colorspace;
1035 #ifndef GST_DISABLE_GST_DEBUG
1039 for (i = 0; i < dec->cinfo.num_components; ++i) {
1040 GST_LOG_OBJECT (dec, "[%d] h_samp_factor=%d, v_samp_factor=%d, cid=%d",
1041 i, dec->cinfo.comp_info[i].h_samp_factor,
1042 dec->cinfo.comp_info[i].v_samp_factor,
1043 dec->cinfo.comp_info[i].component_id);
1048 /* prepare for raw output */
1049 dec->cinfo.do_fancy_upsampling = FALSE;
1050 dec->cinfo.do_block_smoothing = FALSE;
1051 dec->cinfo.out_color_space = dec->cinfo.jpeg_color_space;
1052 dec->cinfo.dct_method = dec->idct_method;
1053 dec->cinfo.raw_data_out = TRUE;
1055 GST_LOG_OBJECT (dec, "starting decompress");
1056 guarantee_huff_tables (&dec->cinfo);
1057 if (!jpeg_start_decompress (&dec->cinfo)) {
1058 GST_WARNING_OBJECT (dec, "failed to start decompression cycle");
1061 /* sanity checks to get safe and reasonable output */
1062 switch (dec->cinfo.jpeg_color_space) {
1064 if (dec->cinfo.num_components != 1)
1065 goto invalid_yuvrgbgrayscale;
1068 if (dec->cinfo.num_components != 3 || dec->cinfo.max_v_samp_factor > 1 ||
1069 dec->cinfo.max_h_samp_factor > 1)
1070 goto invalid_yuvrgbgrayscale;
1073 if (dec->cinfo.num_components != 3 ||
1074 r_v > 2 || r_v < dec->cinfo.comp_info[0].v_samp_factor ||
1075 r_v < dec->cinfo.comp_info[1].v_samp_factor ||
1076 r_h < dec->cinfo.comp_info[0].h_samp_factor ||
1077 r_h < dec->cinfo.comp_info[1].h_samp_factor)
1078 goto invalid_yuvrgbgrayscale;
1081 g_assert_not_reached ();
1085 width = dec->cinfo.output_width;
1086 height = dec->cinfo.output_height;
1088 if (G_UNLIKELY (width < MIN_WIDTH || width > MAX_WIDTH ||
1089 height < MIN_HEIGHT || height > MAX_HEIGHT))
1092 gst_jpeg_dec_negotiate (dec, width, height, dec->cinfo.jpeg_color_space);
1094 state = gst_video_decoder_get_output_state (bdec);
1095 ret = gst_video_decoder_allocate_output_frame (bdec, frame);
1096 if (G_UNLIKELY (ret != GST_FLOW_OK))
1099 if (!gst_video_frame_map (&vframe, &state->info, frame->output_buffer,
1103 GST_LOG_OBJECT (dec, "width %d, height %d", width, height);
1105 if (dec->cinfo.jpeg_color_space == JCS_RGB) {
1106 gst_jpeg_dec_decode_rgb (dec, &vframe);
1107 } else if (dec->cinfo.jpeg_color_space == JCS_GRAYSCALE) {
1108 gst_jpeg_dec_decode_grayscale (dec, &vframe);
1110 GST_LOG_OBJECT (dec, "decompressing (reqired scanline buffer height = %u)",
1111 dec->cinfo.rec_outbuf_height);
1113 /* For some widths jpeglib requires more horizontal padding than I420
1114 * provides. In those cases we need to decode into separate buffers and then
1115 * copy over the data into our final picture buffer, otherwise jpeglib might
1116 * write over the end of a line into the beginning of the next line,
1117 * resulting in blocky artifacts on the left side of the picture. */
1118 if (G_UNLIKELY (width % (dec->cinfo.max_h_samp_factor * DCTSIZE) != 0
1119 || dec->cinfo.comp_info[0].h_samp_factor != 2
1120 || dec->cinfo.comp_info[1].h_samp_factor != 1
1121 || dec->cinfo.comp_info[2].h_samp_factor != 1)) {
1122 GST_CAT_LOG_OBJECT (GST_CAT_PERFORMANCE, dec,
1123 "indirect decoding using extra buffer copy");
1124 gst_jpeg_dec_decode_indirect (dec, &vframe, r_v, r_h,
1125 dec->cinfo.num_components);
1127 ret = gst_jpeg_dec_decode_direct (dec, &vframe);
1129 if (G_UNLIKELY (ret != GST_FLOW_OK))
1130 goto decode_direct_failed;
1134 gst_video_frame_unmap (&vframe);
1136 GST_LOG_OBJECT (dec, "decompressing finished");
1137 jpeg_finish_decompress (&dec->cinfo);
1139 gst_buffer_unmap (frame->input_buffer, &dec->current_frame_map);
1140 ret = gst_video_decoder_finish_frame (bdec, frame);
1148 gst_buffer_unmap (frame->input_buffer, &dec->current_frame_map);
1151 gst_video_codec_state_unref (state);
1158 GST_LOG_OBJECT (dec, "we need more data");
1165 GST_VIDEO_DECODER_ERROR (dec, 1, STREAM, DECODE,
1166 (_("Failed to decode JPEG image")),
1167 ("Picture is too small or too big (%ux%u)", width, height), ret);
1168 ret = GST_FLOW_ERROR;
1173 gchar err_msg[JMSG_LENGTH_MAX];
1175 dec->jerr.pub.format_message ((j_common_ptr) (&dec->cinfo), err_msg);
1177 GST_VIDEO_DECODER_ERROR (dec, 1, STREAM, DECODE,
1178 (_("Failed to decode JPEG image")), ("Decode error #%u: %s", code,
1181 gst_buffer_unmap (frame->input_buffer, &dec->current_frame_map);
1182 gst_video_decoder_drop_frame (bdec, frame);
1184 jpeg_abort_decompress (&dec->cinfo);
1186 ret = GST_FLOW_ERROR;
1189 decode_direct_failed:
1191 /* already posted an error message */
1192 jpeg_abort_decompress (&dec->cinfo);
1197 const gchar *reason;
1199 reason = gst_flow_get_name (ret);
1201 GST_DEBUG_OBJECT (dec, "failed to alloc buffer, reason %s", reason);
1202 /* Reset for next time */
1203 jpeg_abort_decompress (&dec->cinfo);
1204 if (ret != GST_FLOW_EOS && ret != GST_FLOW_FLUSHING &&
1205 ret != GST_FLOW_NOT_LINKED) {
1206 GST_VIDEO_DECODER_ERROR (dec, 1, STREAM, DECODE,
1207 (_("Failed to decode JPEG image")),
1208 ("Buffer allocation failed, reason: %s", reason), ret);
1209 jpeg_abort_decompress (&dec->cinfo);
1213 components_not_supported:
1215 GST_VIDEO_DECODER_ERROR (dec, 1, STREAM, DECODE,
1216 (_("Failed to decode JPEG image")),
1217 ("number of components not supported: %d (max 3)",
1218 dec->cinfo.num_components), ret);
1219 jpeg_abort_decompress (&dec->cinfo);
1222 unsupported_colorspace:
1224 GST_VIDEO_DECODER_ERROR (dec, 1, STREAM, DECODE,
1225 (_("Failed to decode JPEG image")),
1226 ("Picture has unknown or unsupported colourspace"), ret);
1227 jpeg_abort_decompress (&dec->cinfo);
1230 invalid_yuvrgbgrayscale:
1232 GST_VIDEO_DECODER_ERROR (dec, 1, STREAM, DECODE,
1233 (_("Failed to decode JPEG image")),
1234 ("Picture is corrupt or unhandled YUV/RGB/grayscale layout"), ret);
1235 jpeg_abort_decompress (&dec->cinfo);
1241 gst_jpeg_dec_decide_allocation (GstVideoDecoder * bdec, GstQuery * query)
1243 GstBufferPool *pool = NULL;
1244 GstStructure *config;
1246 if (!GST_VIDEO_DECODER_CLASS (parent_class)->decide_allocation (bdec, query))
1249 if (gst_query_get_n_allocation_pools (query) > 0)
1250 gst_query_parse_nth_allocation_pool (query, 0, &pool, NULL, NULL, NULL);
1255 config = gst_buffer_pool_get_config (pool);
1256 if (gst_query_find_allocation_meta (query, GST_VIDEO_META_API_TYPE, NULL)) {
1257 gst_buffer_pool_config_add_option (config,
1258 GST_BUFFER_POOL_OPTION_VIDEO_META);
1260 gst_buffer_pool_set_config (pool, config);
1261 gst_object_unref (pool);
1267 gst_jpeg_dec_start (GstVideoDecoder * bdec)
1269 GstJpegDec *dec = (GstJpegDec *) bdec;
1271 dec->saw_header = FALSE;
1272 dec->parse_entropy_len = 0;
1273 dec->parse_resync = FALSE;
1275 gst_video_decoder_set_packetized (bdec, FALSE);
1281 gst_jpeg_dec_flush (GstVideoDecoder * bdec)
1283 GstJpegDec *dec = (GstJpegDec *) bdec;
1285 jpeg_abort_decompress (&dec->cinfo);
1286 dec->parse_entropy_len = 0;
1287 dec->parse_resync = FALSE;
1288 dec->saw_header = FALSE;
1294 gst_jpeg_dec_set_property (GObject * object, guint prop_id,
1295 const GValue * value, GParamSpec * pspec)
1299 dec = GST_JPEG_DEC (object);
1302 case PROP_IDCT_METHOD:
1303 dec->idct_method = g_value_get_enum (value);
1305 case PROP_MAX_ERRORS:
1306 g_atomic_int_set (&dec->max_errors, g_value_get_int (value));
1310 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
1316 gst_jpeg_dec_get_property (GObject * object, guint prop_id, GValue * value,
1321 dec = GST_JPEG_DEC (object);
1324 case PROP_IDCT_METHOD:
1325 g_value_set_enum (value, dec->idct_method);
1327 case PROP_MAX_ERRORS:
1328 g_value_set_int (value, g_atomic_int_get (&dec->max_errors));
1332 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
1338 gst_jpeg_dec_stop (GstVideoDecoder * bdec)
1340 GstJpegDec *dec = (GstJpegDec *) bdec;
1342 gst_jpeg_dec_free_buffers (dec);