2 * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
3 * Copyright (C) <2009> Tim-Philipp Müller <tim centricular net>
4 * Copyright (C) 2012 Collabora Ltd.
5 * Author : Edward Hervey <edward@collabora.com>
7 * This library is free software; you can redistribute it and/or
8 * modify it under the terms of the GNU Library General Public
9 * License as published by the Free Software Foundation; either
10 * version 2 of the License, or (at your option) any later version.
12 * This library is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 * Library General Public License for more details.
17 * You should have received a copy of the GNU Library General Public
18 * License along with this library; if not, write to the
19 * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
20 * Boston, MA 02110-1301, USA.
24 * SECTION:element-jpegdec
26 * Decodes jpeg images.
29 * <title>Example launch line</title>
31 * gst-launch-1.0 -v filesrc location=mjpeg.avi ! avidemux ! queue ! jpegdec ! videoconvert ! videoscale ! autovideosink
32 * ]| The above pipeline decode the mjpeg stream and renders it to the screen.
41 #include "gstjpegdec.h"
43 #include <gst/video/video.h>
44 #include <gst/video/gstvideometa.h>
45 #include <gst/video/gstvideopool.h>
46 #include "gst/gst-i18n-plugin.h"
50 #define MAX_WIDTH 65535
52 #define MAX_HEIGHT 65535
54 #define CINFO_GET_JPEGDEC(cinfo_ptr) \
55 (((struct GstJpegDecSourceMgr*)((cinfo_ptr)->src))->dec)
57 #define JPEG_DEFAULT_IDCT_METHOD JDCT_FASTEST
58 #define JPEG_DEFAULT_MAX_ERRORS 0
68 static GstStaticPadTemplate gst_jpeg_dec_src_pad_template =
69 GST_STATIC_PAD_TEMPLATE ("src",
72 GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE
73 ("{ I420, RGB, BGR, RGBx, xRGB, BGRx, xBGR, GRAY8 }"))
77 /* FIXME: sof-marker is for IJG libjpeg 8, should be different for 6.2 */
78 /* FIXME: add back "sof-marker = (int) { 0, 1, 2, 5, 6, 7, 9, 10, 13, 14 }"
79 * once we have a parser and/or demuxer set caps properly */
80 static GstStaticPadTemplate gst_jpeg_dec_sink_pad_template =
81 GST_STATIC_PAD_TEMPLATE ("sink",
84 GST_STATIC_CAPS ("image/jpeg")
87 GST_DEBUG_CATEGORY_STATIC (jpeg_dec_debug);
88 #define GST_CAT_DEFAULT jpeg_dec_debug
89 GST_DEBUG_CATEGORY_STATIC (GST_CAT_PERFORMANCE);
91 static void gst_jpeg_dec_set_property (GObject * object, guint prop_id,
92 const GValue * value, GParamSpec * pspec);
93 static void gst_jpeg_dec_get_property (GObject * object, guint prop_id,
94 GValue * value, GParamSpec * pspec);
96 static gboolean gst_jpeg_dec_set_format (GstVideoDecoder * dec,
97 GstVideoCodecState * state);
98 static gboolean gst_jpeg_dec_start (GstVideoDecoder * bdec);
99 static gboolean gst_jpeg_dec_stop (GstVideoDecoder * bdec);
100 static gboolean gst_jpeg_dec_flush (GstVideoDecoder * bdec);
101 static GstFlowReturn gst_jpeg_dec_parse (GstVideoDecoder * bdec,
102 GstVideoCodecFrame * frame, GstAdapter * adapter, gboolean at_eos);
103 static GstFlowReturn gst_jpeg_dec_handle_frame (GstVideoDecoder * bdec,
104 GstVideoCodecFrame * frame);
105 static gboolean gst_jpeg_dec_decide_allocation (GstVideoDecoder * bdec,
108 #define gst_jpeg_dec_parent_class parent_class
109 G_DEFINE_TYPE (GstJpegDec, gst_jpeg_dec, GST_TYPE_VIDEO_DECODER);
112 gst_jpeg_dec_finalize (GObject * object)
114 GstJpegDec *dec = GST_JPEG_DEC (object);
116 jpeg_destroy_decompress (&dec->cinfo);
117 if (dec->input_state)
118 gst_video_codec_state_unref (dec->input_state);
120 G_OBJECT_CLASS (parent_class)->finalize (object);
124 gst_jpeg_dec_class_init (GstJpegDecClass * klass)
126 GObjectClass *gobject_class;
127 GstElementClass *element_class;
128 GstVideoDecoderClass *vdec_class;
130 gobject_class = (GObjectClass *) klass;
131 element_class = (GstElementClass *) klass;
132 vdec_class = (GstVideoDecoderClass *) klass;
134 parent_class = g_type_class_peek_parent (klass);
136 gobject_class->finalize = gst_jpeg_dec_finalize;
137 gobject_class->set_property = gst_jpeg_dec_set_property;
138 gobject_class->get_property = gst_jpeg_dec_get_property;
140 g_object_class_install_property (gobject_class, PROP_IDCT_METHOD,
141 g_param_spec_enum ("idct-method", "IDCT Method",
142 "The IDCT algorithm to use", GST_TYPE_IDCT_METHOD,
143 JPEG_DEFAULT_IDCT_METHOD,
144 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
147 * GstJpegDec:max-errors:
149 * Error out after receiving N consecutive decoding errors
150 * (-1 = never error out, 0 = automatic, 1 = fail on first error, etc.)
152 * Deprecated: 1.3.1: Property wasn't used internally
154 g_object_class_install_property (gobject_class, PROP_MAX_ERRORS,
155 g_param_spec_int ("max-errors", "Maximum Consecutive Decoding Errors",
156 "(Deprecated) Error out after receiving N consecutive decoding errors"
157 " (-1 = never fail, 0 = automatic, 1 = fail on first error)",
158 -1, G_MAXINT, JPEG_DEFAULT_MAX_ERRORS,
159 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
161 gst_element_class_add_pad_template (element_class,
162 gst_static_pad_template_get (&gst_jpeg_dec_src_pad_template));
163 gst_element_class_add_pad_template (element_class,
164 gst_static_pad_template_get (&gst_jpeg_dec_sink_pad_template));
165 gst_element_class_set_static_metadata (element_class, "JPEG image decoder",
166 "Codec/Decoder/Image",
167 "Decode images from JPEG format", "Wim Taymans <wim@fluendo.com>");
169 vdec_class->start = gst_jpeg_dec_start;
170 vdec_class->stop = gst_jpeg_dec_stop;
171 vdec_class->flush = gst_jpeg_dec_flush;
172 vdec_class->parse = gst_jpeg_dec_parse;
173 vdec_class->set_format = gst_jpeg_dec_set_format;
174 vdec_class->handle_frame = gst_jpeg_dec_handle_frame;
175 vdec_class->decide_allocation = gst_jpeg_dec_decide_allocation;
177 GST_DEBUG_CATEGORY_INIT (jpeg_dec_debug, "jpegdec", 0, "JPEG decoder");
178 GST_DEBUG_CATEGORY_GET (GST_CAT_PERFORMANCE, "GST_PERFORMANCE");
182 gst_jpeg_dec_fill_input_buffer (j_decompress_ptr cinfo)
186 dec = CINFO_GET_JPEGDEC (cinfo);
187 g_return_val_if_fail (dec != NULL, FALSE);
188 g_return_val_if_fail (dec->current_frame != NULL, FALSE);
189 g_return_val_if_fail (dec->current_frame_map.data != NULL, FALSE);
191 cinfo->src->next_input_byte = dec->current_frame_map.data;
192 cinfo->src->bytes_in_buffer = dec->current_frame_map.size;
198 gst_jpeg_dec_init_source (j_decompress_ptr cinfo)
200 GST_LOG_OBJECT (CINFO_GET_JPEGDEC (cinfo), "init_source");
205 gst_jpeg_dec_skip_input_data (j_decompress_ptr cinfo, glong num_bytes)
207 GstJpegDec *dec = CINFO_GET_JPEGDEC (cinfo);
209 GST_DEBUG_OBJECT (dec, "skip %ld bytes", num_bytes);
211 if (num_bytes > 0 && cinfo->src->bytes_in_buffer >= num_bytes) {
212 cinfo->src->next_input_byte += (size_t) num_bytes;
213 cinfo->src->bytes_in_buffer -= (size_t) num_bytes;
218 gst_jpeg_dec_resync_to_restart (j_decompress_ptr cinfo, gint desired)
220 GST_LOG_OBJECT (CINFO_GET_JPEGDEC (cinfo), "resync_to_start");
225 gst_jpeg_dec_term_source (j_decompress_ptr cinfo)
227 GST_LOG_OBJECT (CINFO_GET_JPEGDEC (cinfo), "term_source");
232 gst_jpeg_dec_my_output_message (j_common_ptr cinfo)
234 return; /* do nothing */
238 gst_jpeg_dec_my_emit_message (j_common_ptr cinfo, int msg_level)
240 /* GST_LOG_OBJECT (CINFO_GET_JPEGDEC (&cinfo), "msg_level=%d", msg_level); */
245 gst_jpeg_dec_my_error_exit (j_common_ptr cinfo)
247 struct GstJpegDecErrorMgr *err_mgr = (struct GstJpegDecErrorMgr *) cinfo->err;
249 (*cinfo->err->output_message) (cinfo);
250 longjmp (err_mgr->setjmp_buffer, 1);
254 gst_jpeg_dec_init (GstJpegDec * dec)
256 GST_DEBUG ("initializing");
259 memset (&dec->cinfo, 0, sizeof (dec->cinfo));
260 memset (&dec->jerr, 0, sizeof (dec->jerr));
261 dec->cinfo.err = jpeg_std_error (&dec->jerr.pub);
262 dec->jerr.pub.output_message = gst_jpeg_dec_my_output_message;
263 dec->jerr.pub.emit_message = gst_jpeg_dec_my_emit_message;
264 dec->jerr.pub.error_exit = gst_jpeg_dec_my_error_exit;
266 jpeg_create_decompress (&dec->cinfo);
268 dec->cinfo.src = (struct jpeg_source_mgr *) &dec->jsrc;
269 dec->cinfo.src->init_source = gst_jpeg_dec_init_source;
270 dec->cinfo.src->fill_input_buffer = gst_jpeg_dec_fill_input_buffer;
271 dec->cinfo.src->skip_input_data = gst_jpeg_dec_skip_input_data;
272 dec->cinfo.src->resync_to_restart = gst_jpeg_dec_resync_to_restart;
273 dec->cinfo.src->term_source = gst_jpeg_dec_term_source;
276 /* init properties */
277 dec->idct_method = JPEG_DEFAULT_IDCT_METHOD;
278 dec->max_errors = JPEG_DEFAULT_MAX_ERRORS;
281 static inline gboolean
282 gst_jpeg_dec_parse_tag_has_entropy_segment (guint8 tag)
284 if (tag == 0xda || (tag >= 0xd0 && tag <= 0xd7))
290 gst_jpeg_dec_parse (GstVideoDecoder * bdec, GstVideoCodecFrame * frame,
291 GstAdapter * adapter, gboolean at_eos)
296 gint offset = 0, noffset;
297 GstJpegDec *dec = (GstJpegDec *) bdec;
299 /* FIXME : The overhead of using scan_uint32 is massive */
301 size = gst_adapter_available (adapter);
302 GST_DEBUG ("Parsing jpeg image data (%u bytes)", size);
305 GST_DEBUG ("Flushing all data out");
308 /* If we have leftover data, throw it away */
309 if (!dec->saw_header)
311 goto have_full_frame;
317 if (!dec->saw_header) {
319 /* we expect at least 4 bytes, first of which start marker */
321 gst_adapter_masked_scan_uint32 (adapter, 0xffff0000, 0xffd80000, 0,
324 GST_DEBUG ("ret:%d", ret);
329 gst_adapter_flush (adapter, ret);
332 dec->saw_header = TRUE;
339 GST_DEBUG ("offset:%d, size:%d", offset, size);
342 gst_adapter_masked_scan_uint32_peek (adapter, 0x0000ff00, 0x0000ff00,
343 offset, size - offset, &value);
345 /* lost sync if 0xff marker not where expected */
346 if ((resync = (noffset != offset))) {
347 GST_DEBUG ("Lost sync at 0x%08x, resyncing", offset + 2);
349 /* may have marker, but could have been resyncng */
350 resync = resync || dec->parse_resync;
351 /* Skip over extra 0xff */
352 while ((noffset >= 0) && ((value & 0xff) == 0xff)) {
355 gst_adapter_masked_scan_uint32_peek (adapter, 0x0000ff00, 0x0000ff00,
356 noffset, size - noffset, &value);
358 /* enough bytes left for marker? (we need 0xNN after the 0xff) */
360 GST_DEBUG ("at end of input and no EOI marker found, need more data");
364 /* now lock on the marker we found */
366 value = value & 0xff;
368 GST_DEBUG ("0x%08x: EOI marker", offset + 2);
369 /* clear parse state */
370 dec->saw_header = FALSE;
371 dec->parse_resync = FALSE;
373 goto have_full_frame;
376 /* Skip this frame if we found another SOI marker */
377 GST_DEBUG ("0x%08x: SOI marker before EOI, skipping", offset + 2);
378 dec->parse_resync = FALSE;
384 if (value >= 0xd0 && value <= 0xd7)
387 /* peek tag and subsequent length */
388 if (offset + 2 + 4 > size)
391 gst_adapter_masked_scan_uint32_peek (adapter, 0x0, 0x0, offset + 2, 4,
393 frame_len = frame_len & 0xffff;
395 GST_DEBUG ("0x%08x: tag %02x, frame_len=%u", offset + 2, value, frame_len);
396 /* the frame length includes the 2 bytes for the length; here we want at
397 * least 2 more bytes at the end for an end marker */
398 if (offset + 2 + 2 + frame_len + 2 > size) {
402 if (gst_jpeg_dec_parse_tag_has_entropy_segment (value)) {
403 guint eseglen = dec->parse_entropy_len;
405 GST_DEBUG ("0x%08x: finding entropy segment length (eseglen:%d)",
406 offset + 2, eseglen);
407 if (size < offset + 2 + frame_len + eseglen)
409 noffset = offset + 2 + frame_len + dec->parse_entropy_len;
411 GST_DEBUG ("noffset:%d, size:%d, size - noffset:%d",
412 noffset, size, size - noffset);
413 noffset = gst_adapter_masked_scan_uint32_peek (adapter, 0x0000ff00,
414 0x0000ff00, noffset, size - noffset, &value);
417 dec->parse_entropy_len = size - offset - 4 - frame_len - 2;
420 if ((value & 0xff) != 0x00) {
421 eseglen = noffset - offset - frame_len - 2;
426 dec->parse_entropy_len = 0;
427 frame_len += eseglen;
428 GST_DEBUG ("entropy segment length=%u => frame_len=%u", eseglen,
432 /* check if we will still be in sync if we interpret
433 * this as a sync point and skip this frame */
434 noffset = offset + frame_len + 2;
435 noffset = gst_adapter_masked_scan_uint32 (adapter, 0x0000ff00, 0x0000ff00,
438 /* ignore and continue resyncing until we hit the end
439 * of our data or find a sync point that looks okay */
443 GST_DEBUG ("found sync at 0x%x", offset + 2);
446 /* Add current data to output buffer */
447 toadd += frame_len + 2;
448 offset += frame_len + 2;
453 gst_video_decoder_add_to_frame (bdec, toadd);
454 return GST_VIDEO_DECODER_FLOW_NEED_DATA;
458 gst_video_decoder_add_to_frame (bdec, toadd);
459 return gst_video_decoder_have_frame (bdec);
462 gst_adapter_flush (adapter, size);
467 /* shamelessly ripped from jpegutils.c in mjpegtools */
469 add_huff_table (j_decompress_ptr dinfo,
470 JHUFF_TBL ** htblptr, const UINT8 * bits, const UINT8 * val)
471 /* Define a Huffman table */
475 if (*htblptr == NULL)
476 *htblptr = jpeg_alloc_huff_table ((j_common_ptr) dinfo);
480 /* Copy the number-of-symbols-of-each-code-length counts */
481 memcpy ((*htblptr)->bits, bits, sizeof ((*htblptr)->bits));
483 /* Validate the counts. We do this here mainly so we can copy the right
484 * number of symbols from the val[] array, without risking marching off
485 * the end of memory. jchuff.c will do a more thorough test later.
488 for (len = 1; len <= 16; len++)
489 nsymbols += bits[len];
490 if (nsymbols < 1 || nsymbols > 256)
491 g_error ("jpegutils.c: add_huff_table failed badly. ");
493 memcpy ((*htblptr)->huffval, val, nsymbols * sizeof (UINT8));
499 std_huff_tables (j_decompress_ptr dinfo)
500 /* Set up the standard Huffman tables (cf. JPEG standard section K.3) */
501 /* IMPORTANT: these are only valid for 8-bit data precision! */
503 static const UINT8 bits_dc_luminance[17] =
504 { /* 0-base */ 0, 0, 1, 5, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0 };
505 static const UINT8 val_dc_luminance[] =
506 { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11 };
508 static const UINT8 bits_dc_chrominance[17] =
509 { /* 0-base */ 0, 0, 3, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0 };
510 static const UINT8 val_dc_chrominance[] =
511 { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11 };
513 static const UINT8 bits_ac_luminance[17] =
514 { /* 0-base */ 0, 0, 2, 1, 3, 3, 2, 4, 3, 5, 5, 4, 4, 0, 0, 1, 0x7d };
515 static const UINT8 val_ac_luminance[] =
516 { 0x01, 0x02, 0x03, 0x00, 0x04, 0x11, 0x05, 0x12,
517 0x21, 0x31, 0x41, 0x06, 0x13, 0x51, 0x61, 0x07,
518 0x22, 0x71, 0x14, 0x32, 0x81, 0x91, 0xa1, 0x08,
519 0x23, 0x42, 0xb1, 0xc1, 0x15, 0x52, 0xd1, 0xf0,
520 0x24, 0x33, 0x62, 0x72, 0x82, 0x09, 0x0a, 0x16,
521 0x17, 0x18, 0x19, 0x1a, 0x25, 0x26, 0x27, 0x28,
522 0x29, 0x2a, 0x34, 0x35, 0x36, 0x37, 0x38, 0x39,
523 0x3a, 0x43, 0x44, 0x45, 0x46, 0x47, 0x48, 0x49,
524 0x4a, 0x53, 0x54, 0x55, 0x56, 0x57, 0x58, 0x59,
525 0x5a, 0x63, 0x64, 0x65, 0x66, 0x67, 0x68, 0x69,
526 0x6a, 0x73, 0x74, 0x75, 0x76, 0x77, 0x78, 0x79,
527 0x7a, 0x83, 0x84, 0x85, 0x86, 0x87, 0x88, 0x89,
528 0x8a, 0x92, 0x93, 0x94, 0x95, 0x96, 0x97, 0x98,
529 0x99, 0x9a, 0xa2, 0xa3, 0xa4, 0xa5, 0xa6, 0xa7,
530 0xa8, 0xa9, 0xaa, 0xb2, 0xb3, 0xb4, 0xb5, 0xb6,
531 0xb7, 0xb8, 0xb9, 0xba, 0xc2, 0xc3, 0xc4, 0xc5,
532 0xc6, 0xc7, 0xc8, 0xc9, 0xca, 0xd2, 0xd3, 0xd4,
533 0xd5, 0xd6, 0xd7, 0xd8, 0xd9, 0xda, 0xe1, 0xe2,
534 0xe3, 0xe4, 0xe5, 0xe6, 0xe7, 0xe8, 0xe9, 0xea,
535 0xf1, 0xf2, 0xf3, 0xf4, 0xf5, 0xf6, 0xf7, 0xf8,
539 static const UINT8 bits_ac_chrominance[17] =
540 { /* 0-base */ 0, 0, 2, 1, 2, 4, 4, 3, 4, 7, 5, 4, 4, 0, 1, 2, 0x77 };
541 static const UINT8 val_ac_chrominance[] =
542 { 0x00, 0x01, 0x02, 0x03, 0x11, 0x04, 0x05, 0x21,
543 0x31, 0x06, 0x12, 0x41, 0x51, 0x07, 0x61, 0x71,
544 0x13, 0x22, 0x32, 0x81, 0x08, 0x14, 0x42, 0x91,
545 0xa1, 0xb1, 0xc1, 0x09, 0x23, 0x33, 0x52, 0xf0,
546 0x15, 0x62, 0x72, 0xd1, 0x0a, 0x16, 0x24, 0x34,
547 0xe1, 0x25, 0xf1, 0x17, 0x18, 0x19, 0x1a, 0x26,
548 0x27, 0x28, 0x29, 0x2a, 0x35, 0x36, 0x37, 0x38,
549 0x39, 0x3a, 0x43, 0x44, 0x45, 0x46, 0x47, 0x48,
550 0x49, 0x4a, 0x53, 0x54, 0x55, 0x56, 0x57, 0x58,
551 0x59, 0x5a, 0x63, 0x64, 0x65, 0x66, 0x67, 0x68,
552 0x69, 0x6a, 0x73, 0x74, 0x75, 0x76, 0x77, 0x78,
553 0x79, 0x7a, 0x82, 0x83, 0x84, 0x85, 0x86, 0x87,
554 0x88, 0x89, 0x8a, 0x92, 0x93, 0x94, 0x95, 0x96,
555 0x97, 0x98, 0x99, 0x9a, 0xa2, 0xa3, 0xa4, 0xa5,
556 0xa6, 0xa7, 0xa8, 0xa9, 0xaa, 0xb2, 0xb3, 0xb4,
557 0xb5, 0xb6, 0xb7, 0xb8, 0xb9, 0xba, 0xc2, 0xc3,
558 0xc4, 0xc5, 0xc6, 0xc7, 0xc8, 0xc9, 0xca, 0xd2,
559 0xd3, 0xd4, 0xd5, 0xd6, 0xd7, 0xd8, 0xd9, 0xda,
560 0xe2, 0xe3, 0xe4, 0xe5, 0xe6, 0xe7, 0xe8, 0xe9,
561 0xea, 0xf2, 0xf3, 0xf4, 0xf5, 0xf6, 0xf7, 0xf8,
565 add_huff_table (dinfo, &dinfo->dc_huff_tbl_ptrs[0],
566 bits_dc_luminance, val_dc_luminance);
567 add_huff_table (dinfo, &dinfo->ac_huff_tbl_ptrs[0],
568 bits_ac_luminance, val_ac_luminance);
569 add_huff_table (dinfo, &dinfo->dc_huff_tbl_ptrs[1],
570 bits_dc_chrominance, val_dc_chrominance);
571 add_huff_table (dinfo, &dinfo->ac_huff_tbl_ptrs[1],
572 bits_ac_chrominance, val_ac_chrominance);
578 guarantee_huff_tables (j_decompress_ptr dinfo)
580 if ((dinfo->dc_huff_tbl_ptrs[0] == NULL) &&
581 (dinfo->dc_huff_tbl_ptrs[1] == NULL) &&
582 (dinfo->ac_huff_tbl_ptrs[0] == NULL) &&
583 (dinfo->ac_huff_tbl_ptrs[1] == NULL)) {
584 GST_DEBUG ("Generating standard Huffman tables for this frame.");
585 std_huff_tables (dinfo);
590 gst_jpeg_dec_set_format (GstVideoDecoder * dec, GstVideoCodecState * state)
592 GstJpegDec *jpeg = GST_JPEG_DEC (dec);
593 GstVideoInfo *info = &state->info;
595 /* FIXME : previously jpegdec would handled input as packetized
596 * if the framerate was present. Here we consider it packetized if
597 * the fps is != 1/1 */
598 if (GST_VIDEO_INFO_FPS_N (info) != 1 && GST_VIDEO_INFO_FPS_D (info) != 1)
599 gst_video_decoder_set_packetized (dec, TRUE);
601 gst_video_decoder_set_packetized (dec, FALSE);
603 if (jpeg->input_state)
604 gst_video_codec_state_unref (jpeg->input_state);
605 jpeg->input_state = gst_video_codec_state_ref (state);
613 hresamplecpy1 (guint8 * dest, const guint8 * src, guint len)
617 for (i = 0; i < len; ++i) {
618 /* equivalent to: dest[i] = src[i << 1] */
627 gst_jpeg_dec_free_buffers (GstJpegDec * dec)
631 for (i = 0; i < 16; i++) {
632 g_free (dec->idr_y[i]);
633 g_free (dec->idr_u[i]);
634 g_free (dec->idr_v[i]);
635 dec->idr_y[i] = NULL;
636 dec->idr_u[i] = NULL;
637 dec->idr_v[i] = NULL;
640 dec->idr_width_allocated = 0;
643 static inline gboolean
644 gst_jpeg_dec_ensure_buffers (GstJpegDec * dec, guint maxrowbytes)
648 if (G_LIKELY (dec->idr_width_allocated == maxrowbytes))
651 /* FIXME: maybe just alloc one or three blocks altogether? */
652 for (i = 0; i < 16; i++) {
653 dec->idr_y[i] = g_try_realloc (dec->idr_y[i], maxrowbytes);
654 dec->idr_u[i] = g_try_realloc (dec->idr_u[i], maxrowbytes);
655 dec->idr_v[i] = g_try_realloc (dec->idr_v[i], maxrowbytes);
657 if (G_UNLIKELY (!dec->idr_y[i] || !dec->idr_u[i] || !dec->idr_v[i])) {
658 GST_WARNING_OBJECT (dec, "out of memory, i=%d, bytes=%u", i, maxrowbytes);
663 dec->idr_width_allocated = maxrowbytes;
664 GST_LOG_OBJECT (dec, "allocated temp memory, %u bytes/row", maxrowbytes);
669 gst_jpeg_dec_decode_grayscale (GstJpegDec * dec, GstVideoFrame * frame)
672 guchar **scanarray[1] = { rows };
677 gint pstride, rstride;
679 GST_DEBUG_OBJECT (dec, "indirect decoding of grayscale");
681 width = GST_VIDEO_FRAME_WIDTH (frame);
682 height = GST_VIDEO_FRAME_HEIGHT (frame);
684 if (G_UNLIKELY (!gst_jpeg_dec_ensure_buffers (dec, GST_ROUND_UP_32 (width))))
687 base[0] = GST_VIDEO_FRAME_COMP_DATA (frame, 0);
688 pstride = GST_VIDEO_FRAME_COMP_PSTRIDE (frame, 0);
689 rstride = GST_VIDEO_FRAME_COMP_STRIDE (frame, 0);
691 memcpy (rows, dec->idr_y, 16 * sizeof (gpointer));
695 lines = jpeg_read_raw_data (&dec->cinfo, scanarray, DCTSIZE);
696 if (G_LIKELY (lines > 0)) {
697 for (j = 0; (j < DCTSIZE) && (i < height); j++, i++) {
701 for (k = 0; k < width; k++) {
702 base[0][p] = rows[j][k];
708 GST_INFO_OBJECT (dec, "jpeg_read_raw_data() returned 0");
714 gst_jpeg_dec_decode_rgb (GstJpegDec * dec, GstVideoFrame * frame)
716 guchar *r_rows[16], *g_rows[16], *b_rows[16];
717 guchar **scanarray[3] = { r_rows, g_rows, b_rows };
721 guint pstride, rstride;
724 GST_DEBUG_OBJECT (dec, "indirect decoding of RGB");
726 width = GST_VIDEO_FRAME_WIDTH (frame);
727 height = GST_VIDEO_FRAME_HEIGHT (frame);
729 if (G_UNLIKELY (!gst_jpeg_dec_ensure_buffers (dec, GST_ROUND_UP_32 (width))))
732 for (i = 0; i < 3; i++)
733 base[i] = GST_VIDEO_FRAME_COMP_DATA (frame, i);
735 pstride = GST_VIDEO_FRAME_COMP_PSTRIDE (frame, 0);
736 rstride = GST_VIDEO_FRAME_COMP_STRIDE (frame, 0);
738 memcpy (r_rows, dec->idr_y, 16 * sizeof (gpointer));
739 memcpy (g_rows, dec->idr_u, 16 * sizeof (gpointer));
740 memcpy (b_rows, dec->idr_v, 16 * sizeof (gpointer));
744 lines = jpeg_read_raw_data (&dec->cinfo, scanarray, DCTSIZE);
745 if (G_LIKELY (lines > 0)) {
746 for (j = 0; (j < DCTSIZE) && (i < height); j++, i++) {
750 for (k = 0; k < width; k++) {
751 base[0][p] = r_rows[j][k];
752 base[1][p] = g_rows[j][k];
753 base[2][p] = b_rows[j][k];
761 GST_INFO_OBJECT (dec, "jpeg_read_raw_data() returned 0");
767 gst_jpeg_dec_decode_indirect (GstJpegDec * dec, GstVideoFrame * frame, gint r_v,
770 guchar *y_rows[16], *u_rows[16], *v_rows[16];
771 guchar **scanarray[3] = { y_rows, u_rows, v_rows };
774 guchar *base[3], *last[3];
778 GST_DEBUG_OBJECT (dec,
779 "unadvantageous width or r_h, taking slow route involving memcpy");
781 width = GST_VIDEO_FRAME_WIDTH (frame);
782 height = GST_VIDEO_FRAME_HEIGHT (frame);
784 if (G_UNLIKELY (!gst_jpeg_dec_ensure_buffers (dec, GST_ROUND_UP_32 (width))))
787 for (i = 0; i < 3; i++) {
788 base[i] = GST_VIDEO_FRAME_COMP_DATA (frame, i);
789 stride[i] = GST_VIDEO_FRAME_COMP_STRIDE (frame, i);
790 /* make sure we don't make jpeglib write beyond our buffer,
791 * which might happen if (height % (r_v*DCTSIZE)) != 0 */
792 last[i] = base[i] + (GST_VIDEO_FRAME_COMP_STRIDE (frame, i) *
793 (GST_VIDEO_FRAME_COMP_HEIGHT (frame, i) - 1));
796 memcpy (y_rows, dec->idr_y, 16 * sizeof (gpointer));
797 memcpy (u_rows, dec->idr_u, 16 * sizeof (gpointer));
798 memcpy (v_rows, dec->idr_v, 16 * sizeof (gpointer));
800 /* fill chroma components for grayscale */
802 GST_DEBUG_OBJECT (dec, "grayscale, filling chroma");
803 for (i = 0; i < 16; i++) {
804 memset (u_rows[i], GST_ROUND_UP_32 (width), 0x80);
805 memset (v_rows[i], GST_ROUND_UP_32 (width), 0x80);
809 for (i = 0; i < height; i += r_v * DCTSIZE) {
810 lines = jpeg_read_raw_data (&dec->cinfo, scanarray, r_v * DCTSIZE);
811 if (G_LIKELY (lines > 0)) {
812 for (j = 0, k = 0; j < (r_v * DCTSIZE); j += r_v, k++) {
813 if (G_LIKELY (base[0] <= last[0])) {
814 memcpy (base[0], y_rows[j], stride[0]);
815 base[0] += stride[0];
818 if (G_LIKELY (base[0] <= last[0])) {
819 memcpy (base[0], y_rows[j + 1], stride[0]);
820 base[0] += stride[0];
823 if (G_LIKELY (base[1] <= last[1] && base[2] <= last[2])) {
825 memcpy (base[1], u_rows[k], stride[1]);
826 memcpy (base[2], v_rows[k], stride[2]);
827 } else if (r_h == 1) {
828 hresamplecpy1 (base[1], u_rows[k], stride[1]);
829 hresamplecpy1 (base[2], v_rows[k], stride[2]);
831 /* FIXME: implement (at least we avoid crashing by doing nothing) */
835 if (r_v == 2 || (k & 1) != 0) {
836 base[1] += stride[1];
837 base[2] += stride[2];
841 GST_INFO_OBJECT (dec, "jpeg_read_raw_data() returned 0");
847 gst_jpeg_dec_decode_direct (GstJpegDec * dec, GstVideoFrame * frame)
849 guchar **line[3]; /* the jpeg line buffer */
850 guchar *y[4 * DCTSIZE] = { NULL, }; /* alloc enough for the lines */
851 guchar *u[4 * DCTSIZE] = { NULL, }; /* r_v will be <4 */
852 guchar *v[4 * DCTSIZE] = { NULL, };
854 gint lines, v_samp[3];
855 guchar *base[3], *last[3];
863 v_samp[0] = dec->cinfo.comp_info[0].v_samp_factor;
864 v_samp[1] = dec->cinfo.comp_info[1].v_samp_factor;
865 v_samp[2] = dec->cinfo.comp_info[2].v_samp_factor;
867 if (G_UNLIKELY (v_samp[0] > 2 || v_samp[1] > 2 || v_samp[2] > 2))
868 goto format_not_supported;
870 height = GST_VIDEO_FRAME_HEIGHT (frame);
872 for (i = 0; i < 3; i++) {
873 base[i] = GST_VIDEO_FRAME_COMP_DATA (frame, i);
874 stride[i] = GST_VIDEO_FRAME_COMP_STRIDE (frame, i);
875 /* make sure we don't make jpeglib write beyond our buffer,
876 * which might happen if (height % (r_v*DCTSIZE)) != 0 */
877 last[i] = base[i] + (GST_VIDEO_FRAME_COMP_STRIDE (frame, i) *
878 (GST_VIDEO_FRAME_COMP_HEIGHT (frame, i) - 1));
881 /* let jpeglib decode directly into our final buffer */
882 GST_DEBUG_OBJECT (dec, "decoding directly into output buffer");
884 for (i = 0; i < height; i += v_samp[0] * DCTSIZE) {
885 for (j = 0; j < (v_samp[0] * DCTSIZE); ++j) {
887 line[0][j] = base[0] + (i + j) * stride[0];
888 if (G_UNLIKELY (line[0][j] > last[0]))
889 line[0][j] = last[0];
891 if (v_samp[1] == v_samp[0]) {
892 line[1][j] = base[1] + ((i + j) / 2) * stride[1];
893 } else if (j < (v_samp[1] * DCTSIZE)) {
894 line[1][j] = base[1] + ((i / 2) + j) * stride[1];
896 if (G_UNLIKELY (line[1][j] > last[1]))
897 line[1][j] = last[1];
899 if (v_samp[2] == v_samp[0]) {
900 line[2][j] = base[2] + ((i + j) / 2) * stride[2];
901 } else if (j < (v_samp[2] * DCTSIZE)) {
902 line[2][j] = base[2] + ((i / 2) + j) * stride[2];
904 if (G_UNLIKELY (line[2][j] > last[2]))
905 line[2][j] = last[2];
908 lines = jpeg_read_raw_data (&dec->cinfo, line, v_samp[0] * DCTSIZE);
909 if (G_UNLIKELY (!lines)) {
910 GST_INFO_OBJECT (dec, "jpeg_read_raw_data() returned 0");
915 format_not_supported:
917 gboolean ret = GST_FLOW_OK;
919 GST_VIDEO_DECODER_ERROR (dec, 1, STREAM, DECODE,
920 (_("Failed to decode JPEG image")),
921 ("Unsupported subsampling schema: v_samp factors: %u %u %u", v_samp[0],
922 v_samp[1], v_samp[2]), ret);
929 gst_jpeg_dec_negotiate (GstJpegDec * dec, gint width, gint height, gint clrspc)
931 GstVideoCodecState *outstate;
933 GstVideoFormat format;
937 format = GST_VIDEO_FORMAT_RGB;
940 format = GST_VIDEO_FORMAT_GRAY8;
943 format = GST_VIDEO_FORMAT_I420;
947 /* Compare to currently configured output state */
948 outstate = gst_video_decoder_get_output_state (GST_VIDEO_DECODER (dec));
950 info = &outstate->info;
952 if (width == GST_VIDEO_INFO_WIDTH (info) &&
953 height == GST_VIDEO_INFO_HEIGHT (info) &&
954 format == GST_VIDEO_INFO_FORMAT (info)) {
955 gst_video_codec_state_unref (outstate);
958 gst_video_codec_state_unref (outstate);
962 gst_video_decoder_set_output_state (GST_VIDEO_DECODER (dec), format,
963 width, height, dec->input_state);
970 outstate->info.colorimetry.range = GST_VIDEO_COLOR_RANGE_0_255;
971 outstate->info.colorimetry.matrix = GST_VIDEO_COLOR_MATRIX_BT601;
972 outstate->info.colorimetry.transfer = GST_VIDEO_TRANSFER_UNKNOWN;
973 outstate->info.colorimetry.primaries = GST_VIDEO_COLOR_PRIMARIES_UNKNOWN;
977 gst_video_codec_state_unref (outstate);
979 gst_video_decoder_negotiate (GST_VIDEO_DECODER (dec));
981 GST_DEBUG_OBJECT (dec, "max_v_samp_factor=%d", dec->cinfo.max_v_samp_factor);
982 GST_DEBUG_OBJECT (dec, "max_h_samp_factor=%d", dec->cinfo.max_h_samp_factor);
986 gst_jpeg_dec_handle_frame (GstVideoDecoder * bdec, GstVideoCodecFrame * frame)
988 GstFlowReturn ret = GST_FLOW_OK;
989 GstJpegDec *dec = (GstJpegDec *) bdec;
990 GstVideoFrame vframe;
994 gboolean need_unmap = TRUE;
995 GstVideoCodecState *state = NULL;
997 dec->current_frame = frame;
998 gst_buffer_map (frame->input_buffer, &dec->current_frame_map, GST_MAP_READ);
999 gst_jpeg_dec_fill_input_buffer (&dec->cinfo);
1001 if (setjmp (dec->jerr.setjmp_buffer)) {
1002 code = dec->jerr.pub.msg_code;
1004 if (code == JERR_INPUT_EOF) {
1005 GST_DEBUG ("jpeg input EOF error, we probably need more data");
1006 goto need_more_data;
1012 hdr_ok = jpeg_read_header (&dec->cinfo, TRUE);
1013 if (G_UNLIKELY (hdr_ok != JPEG_HEADER_OK)) {
1014 GST_WARNING_OBJECT (dec, "reading the header failed, %d", hdr_ok);
1017 GST_LOG_OBJECT (dec, "num_components=%d", dec->cinfo.num_components);
1018 GST_LOG_OBJECT (dec, "jpeg_color_space=%d", dec->cinfo.jpeg_color_space);
1020 if (!dec->cinfo.num_components || !dec->cinfo.comp_info)
1021 goto components_not_supported;
1023 r_h = dec->cinfo.comp_info[0].h_samp_factor;
1024 r_v = dec->cinfo.comp_info[0].v_samp_factor;
1026 GST_LOG_OBJECT (dec, "r_h = %d, r_v = %d", r_h, r_v);
1028 if (dec->cinfo.num_components > 3)
1029 goto components_not_supported;
1031 /* verify color space expectation to avoid going *boom* or bogus output */
1032 if (dec->cinfo.jpeg_color_space != JCS_YCbCr &&
1033 dec->cinfo.jpeg_color_space != JCS_GRAYSCALE &&
1034 dec->cinfo.jpeg_color_space != JCS_RGB)
1035 goto unsupported_colorspace;
1037 #ifndef GST_DISABLE_GST_DEBUG
1041 for (i = 0; i < dec->cinfo.num_components; ++i) {
1042 GST_LOG_OBJECT (dec, "[%d] h_samp_factor=%d, v_samp_factor=%d, cid=%d",
1043 i, dec->cinfo.comp_info[i].h_samp_factor,
1044 dec->cinfo.comp_info[i].v_samp_factor,
1045 dec->cinfo.comp_info[i].component_id);
1050 /* prepare for raw output */
1051 dec->cinfo.do_fancy_upsampling = FALSE;
1052 dec->cinfo.do_block_smoothing = FALSE;
1053 dec->cinfo.out_color_space = dec->cinfo.jpeg_color_space;
1054 dec->cinfo.dct_method = dec->idct_method;
1055 dec->cinfo.raw_data_out = TRUE;
1057 GST_LOG_OBJECT (dec, "starting decompress");
1058 guarantee_huff_tables (&dec->cinfo);
1059 if (!jpeg_start_decompress (&dec->cinfo)) {
1060 GST_WARNING_OBJECT (dec, "failed to start decompression cycle");
1063 /* sanity checks to get safe and reasonable output */
1064 switch (dec->cinfo.jpeg_color_space) {
1066 if (dec->cinfo.num_components != 1)
1067 goto invalid_yuvrgbgrayscale;
1070 if (dec->cinfo.num_components != 3 || dec->cinfo.max_v_samp_factor > 1 ||
1071 dec->cinfo.max_h_samp_factor > 1)
1072 goto invalid_yuvrgbgrayscale;
1075 if (dec->cinfo.num_components != 3 ||
1076 r_v > 2 || r_v < dec->cinfo.comp_info[0].v_samp_factor ||
1077 r_v < dec->cinfo.comp_info[1].v_samp_factor ||
1078 r_h < dec->cinfo.comp_info[0].h_samp_factor ||
1079 r_h < dec->cinfo.comp_info[1].h_samp_factor)
1080 goto invalid_yuvrgbgrayscale;
1083 g_assert_not_reached ();
1087 width = dec->cinfo.output_width;
1088 height = dec->cinfo.output_height;
1090 if (G_UNLIKELY (width < MIN_WIDTH || width > MAX_WIDTH ||
1091 height < MIN_HEIGHT || height > MAX_HEIGHT))
1094 gst_jpeg_dec_negotiate (dec, width, height, dec->cinfo.jpeg_color_space);
1096 state = gst_video_decoder_get_output_state (bdec);
1097 ret = gst_video_decoder_allocate_output_frame (bdec, frame);
1098 if (G_UNLIKELY (ret != GST_FLOW_OK))
1101 if (!gst_video_frame_map (&vframe, &state->info, frame->output_buffer,
1105 GST_LOG_OBJECT (dec, "width %d, height %d", width, height);
1107 if (dec->cinfo.jpeg_color_space == JCS_RGB) {
1108 gst_jpeg_dec_decode_rgb (dec, &vframe);
1109 } else if (dec->cinfo.jpeg_color_space == JCS_GRAYSCALE) {
1110 gst_jpeg_dec_decode_grayscale (dec, &vframe);
1112 GST_LOG_OBJECT (dec, "decompressing (reqired scanline buffer height = %u)",
1113 dec->cinfo.rec_outbuf_height);
1115 /* For some widths jpeglib requires more horizontal padding than I420
1116 * provides. In those cases we need to decode into separate buffers and then
1117 * copy over the data into our final picture buffer, otherwise jpeglib might
1118 * write over the end of a line into the beginning of the next line,
1119 * resulting in blocky artifacts on the left side of the picture. */
1120 if (G_UNLIKELY (width % (dec->cinfo.max_h_samp_factor * DCTSIZE) != 0
1121 || dec->cinfo.comp_info[0].h_samp_factor != 2
1122 || dec->cinfo.comp_info[1].h_samp_factor != 1
1123 || dec->cinfo.comp_info[2].h_samp_factor != 1)) {
1124 GST_CAT_LOG_OBJECT (GST_CAT_PERFORMANCE, dec,
1125 "indirect decoding using extra buffer copy");
1126 gst_jpeg_dec_decode_indirect (dec, &vframe, r_v, r_h,
1127 dec->cinfo.num_components);
1129 ret = gst_jpeg_dec_decode_direct (dec, &vframe);
1131 if (G_UNLIKELY (ret != GST_FLOW_OK))
1132 goto decode_direct_failed;
1136 gst_video_frame_unmap (&vframe);
1138 GST_LOG_OBJECT (dec, "decompressing finished");
1139 jpeg_finish_decompress (&dec->cinfo);
1141 gst_buffer_unmap (frame->input_buffer, &dec->current_frame_map);
1142 ret = gst_video_decoder_finish_frame (bdec, frame);
1150 gst_buffer_unmap (frame->input_buffer, &dec->current_frame_map);
1153 gst_video_codec_state_unref (state);
1160 GST_LOG_OBJECT (dec, "we need more data");
1167 GST_VIDEO_DECODER_ERROR (dec, 1, STREAM, DECODE,
1168 (_("Failed to decode JPEG image")),
1169 ("Picture is too small or too big (%ux%u)", width, height), ret);
1170 ret = GST_FLOW_ERROR;
1175 gchar err_msg[JMSG_LENGTH_MAX];
1177 dec->jerr.pub.format_message ((j_common_ptr) (&dec->cinfo), err_msg);
1179 GST_VIDEO_DECODER_ERROR (dec, 1, STREAM, DECODE,
1180 (_("Failed to decode JPEG image")), ("Decode error #%u: %s", code,
1183 gst_buffer_unmap (frame->input_buffer, &dec->current_frame_map);
1184 gst_video_decoder_drop_frame (bdec, frame);
1186 jpeg_abort_decompress (&dec->cinfo);
1190 decode_direct_failed:
1192 /* already posted an error message */
1193 jpeg_abort_decompress (&dec->cinfo);
1198 const gchar *reason;
1200 reason = gst_flow_get_name (ret);
1202 GST_DEBUG_OBJECT (dec, "failed to alloc buffer, reason %s", reason);
1203 /* Reset for next time */
1204 jpeg_abort_decompress (&dec->cinfo);
1205 if (ret != GST_FLOW_EOS && ret != GST_FLOW_FLUSHING &&
1206 ret != GST_FLOW_NOT_LINKED) {
1207 GST_VIDEO_DECODER_ERROR (dec, 1, STREAM, DECODE,
1208 (_("Failed to decode JPEG image")),
1209 ("Buffer allocation failed, reason: %s", reason), ret);
1210 jpeg_abort_decompress (&dec->cinfo);
1214 components_not_supported:
1216 GST_VIDEO_DECODER_ERROR (dec, 1, STREAM, DECODE,
1217 (_("Failed to decode JPEG image")),
1218 ("number of components not supported: %d (max 3)",
1219 dec->cinfo.num_components), ret);
1220 jpeg_abort_decompress (&dec->cinfo);
1223 unsupported_colorspace:
1225 GST_VIDEO_DECODER_ERROR (dec, 1, STREAM, DECODE,
1226 (_("Failed to decode JPEG image")),
1227 ("Picture has unknown or unsupported colourspace"), ret);
1228 jpeg_abort_decompress (&dec->cinfo);
1231 invalid_yuvrgbgrayscale:
1233 GST_VIDEO_DECODER_ERROR (dec, 1, STREAM, DECODE,
1234 (_("Failed to decode JPEG image")),
1235 ("Picture is corrupt or unhandled YUV/RGB/grayscale layout"), ret);
1236 jpeg_abort_decompress (&dec->cinfo);
1242 gst_jpeg_dec_decide_allocation (GstVideoDecoder * bdec, GstQuery * query)
1244 GstBufferPool *pool = NULL;
1245 GstStructure *config;
1247 if (!GST_VIDEO_DECODER_CLASS (parent_class)->decide_allocation (bdec, query))
1250 if (gst_query_get_n_allocation_pools (query) > 0)
1251 gst_query_parse_nth_allocation_pool (query, 0, &pool, NULL, NULL, NULL);
1256 config = gst_buffer_pool_get_config (pool);
1257 if (gst_query_find_allocation_meta (query, GST_VIDEO_META_API_TYPE, NULL)) {
1258 gst_buffer_pool_config_add_option (config,
1259 GST_BUFFER_POOL_OPTION_VIDEO_META);
1261 gst_buffer_pool_set_config (pool, config);
1262 gst_object_unref (pool);
1268 gst_jpeg_dec_start (GstVideoDecoder * bdec)
1270 GstJpegDec *dec = (GstJpegDec *) bdec;
1272 dec->saw_header = FALSE;
1273 dec->parse_entropy_len = 0;
1274 dec->parse_resync = FALSE;
1276 gst_video_decoder_set_packetized (bdec, FALSE);
1282 gst_jpeg_dec_flush (GstVideoDecoder * bdec)
1284 GstJpegDec *dec = (GstJpegDec *) bdec;
1286 jpeg_abort_decompress (&dec->cinfo);
1287 dec->parse_entropy_len = 0;
1288 dec->parse_resync = FALSE;
1289 dec->saw_header = FALSE;
1295 gst_jpeg_dec_set_property (GObject * object, guint prop_id,
1296 const GValue * value, GParamSpec * pspec)
1300 dec = GST_JPEG_DEC (object);
1303 case PROP_IDCT_METHOD:
1304 dec->idct_method = g_value_get_enum (value);
1306 case PROP_MAX_ERRORS:
1307 g_atomic_int_set (&dec->max_errors, g_value_get_int (value));
1311 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
1317 gst_jpeg_dec_get_property (GObject * object, guint prop_id, GValue * value,
1322 dec = GST_JPEG_DEC (object);
1325 case PROP_IDCT_METHOD:
1326 g_value_set_enum (value, dec->idct_method);
1328 case PROP_MAX_ERRORS:
1329 g_value_set_int (value, g_atomic_int_get (&dec->max_errors));
1333 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
1339 gst_jpeg_dec_stop (GstVideoDecoder * bdec)
1341 GstJpegDec *dec = (GstJpegDec *) bdec;
1343 gst_jpeg_dec_free_buffers (dec);