2 * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
3 * Copyright (C) <2009> Tim-Philipp Müller <tim centricular net>
4 * Copyright (C) 2012 Collabora Ltd.
5 * Author : Edward Hervey <edward@collabora.com>
7 * This library is free software; you can redistribute it and/or
8 * modify it under the terms of the GNU Library General Public
9 * License as published by the Free Software Foundation; either
10 * version 2 of the License, or (at your option) any later version.
12 * This library is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 * Library General Public License for more details.
17 * You should have received a copy of the GNU Library General Public
18 * License along with this library; if not, write to the
19 * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
20 * Boston, MA 02110-1301, USA.
24 * SECTION:element-jpegdec
26 * Decodes jpeg images.
29 * <title>Example launch line</title>
31 * gst-launch-1.0 -v filesrc location=mjpeg.avi ! avidemux ! queue ! jpegdec ! videoconvert ! videoscale ! autovideosink
32 * ]| The above pipeline decode the mjpeg stream and renders it to the screen.
41 #include "gstjpegdec.h"
43 #include <gst/video/video.h>
44 #include <gst/video/gstvideometa.h>
45 #include <gst/video/gstvideopool.h>
46 #include "gst/gst-i18n-plugin.h"
50 #define MAX_WIDTH 65535
52 #define MAX_HEIGHT 65535
54 #define CINFO_GET_JPEGDEC(cinfo_ptr) \
55 (((struct GstJpegDecSourceMgr*)((cinfo_ptr)->src))->dec)
57 #define JPEG_DEFAULT_IDCT_METHOD JDCT_FASTEST
58 #define JPEG_DEFAULT_MAX_ERRORS 0
68 static GstStaticPadTemplate gst_jpeg_dec_src_pad_template =
69 GST_STATIC_PAD_TEMPLATE ("src",
72 GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE
73 ("{ I420, RGB, BGR, RGBx, xRGB, BGRx, xBGR, GRAY8 }"))
77 /* FIXME: sof-marker is for IJG libjpeg 8, should be different for 6.2 */
78 /* FIXME: add back "sof-marker = (int) { 0, 1, 2, 5, 6, 7, 9, 10, 13, 14 }"
79 * once we have a parser and/or demuxer set caps properly */
80 static GstStaticPadTemplate gst_jpeg_dec_sink_pad_template =
81 GST_STATIC_PAD_TEMPLATE ("sink",
84 GST_STATIC_CAPS ("image/jpeg")
87 GST_DEBUG_CATEGORY_STATIC (jpeg_dec_debug);
88 #define GST_CAT_DEFAULT jpeg_dec_debug
89 GST_DEBUG_CATEGORY_STATIC (GST_CAT_PERFORMANCE);
91 static void gst_jpeg_dec_set_property (GObject * object, guint prop_id,
92 const GValue * value, GParamSpec * pspec);
93 static void gst_jpeg_dec_get_property (GObject * object, guint prop_id,
94 GValue * value, GParamSpec * pspec);
96 static gboolean gst_jpeg_dec_set_format (GstVideoDecoder * dec,
97 GstVideoCodecState * state);
98 static gboolean gst_jpeg_dec_start (GstVideoDecoder * bdec);
99 static gboolean gst_jpeg_dec_stop (GstVideoDecoder * bdec);
100 static gboolean gst_jpeg_dec_flush (GstVideoDecoder * bdec);
101 static GstFlowReturn gst_jpeg_dec_parse (GstVideoDecoder * bdec,
102 GstVideoCodecFrame * frame, GstAdapter * adapter, gboolean at_eos);
103 static GstFlowReturn gst_jpeg_dec_handle_frame (GstVideoDecoder * bdec,
104 GstVideoCodecFrame * frame);
105 static gboolean gst_jpeg_dec_decide_allocation (GstVideoDecoder * bdec,
108 #define gst_jpeg_dec_parent_class parent_class
109 G_DEFINE_TYPE (GstJpegDec, gst_jpeg_dec, GST_TYPE_VIDEO_DECODER);
112 gst_jpeg_dec_finalize (GObject * object)
114 GstJpegDec *dec = GST_JPEG_DEC (object);
116 jpeg_destroy_decompress (&dec->cinfo);
117 if (dec->input_state)
118 gst_video_codec_state_unref (dec->input_state);
120 G_OBJECT_CLASS (parent_class)->finalize (object);
124 gst_jpeg_dec_class_init (GstJpegDecClass * klass)
126 GObjectClass *gobject_class;
127 GstElementClass *element_class;
128 GstVideoDecoderClass *vdec_class;
130 gobject_class = (GObjectClass *) klass;
131 element_class = (GstElementClass *) klass;
132 vdec_class = (GstVideoDecoderClass *) klass;
134 parent_class = g_type_class_peek_parent (klass);
136 gobject_class->finalize = gst_jpeg_dec_finalize;
137 gobject_class->set_property = gst_jpeg_dec_set_property;
138 gobject_class->get_property = gst_jpeg_dec_get_property;
140 g_object_class_install_property (gobject_class, PROP_IDCT_METHOD,
141 g_param_spec_enum ("idct-method", "IDCT Method",
142 "The IDCT algorithm to use", GST_TYPE_IDCT_METHOD,
143 JPEG_DEFAULT_IDCT_METHOD,
144 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
147 * GstJpegDec:max-errors:
149 * Error out after receiving N consecutive decoding errors
150 * (-1 = never error out, 0 = automatic, 1 = fail on first error, etc.)
152 * Deprecated: 1.3.1: Property wasn't used internally
154 g_object_class_install_property (gobject_class, PROP_MAX_ERRORS,
155 g_param_spec_int ("max-errors", "Maximum Consecutive Decoding Errors",
156 "(Deprecated) Error out after receiving N consecutive decoding errors"
157 " (-1 = never fail, 0 = automatic, 1 = fail on first error)",
158 -1, G_MAXINT, JPEG_DEFAULT_MAX_ERRORS,
159 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
161 gst_element_class_add_pad_template (element_class,
162 gst_static_pad_template_get (&gst_jpeg_dec_src_pad_template));
163 gst_element_class_add_pad_template (element_class,
164 gst_static_pad_template_get (&gst_jpeg_dec_sink_pad_template));
165 gst_element_class_set_static_metadata (element_class, "JPEG image decoder",
166 "Codec/Decoder/Image",
167 "Decode images from JPEG format", "Wim Taymans <wim@fluendo.com>");
169 vdec_class->start = gst_jpeg_dec_start;
170 vdec_class->stop = gst_jpeg_dec_stop;
171 vdec_class->flush = gst_jpeg_dec_flush;
172 vdec_class->parse = gst_jpeg_dec_parse;
173 vdec_class->set_format = gst_jpeg_dec_set_format;
174 vdec_class->handle_frame = gst_jpeg_dec_handle_frame;
175 vdec_class->decide_allocation = gst_jpeg_dec_decide_allocation;
177 GST_DEBUG_CATEGORY_INIT (jpeg_dec_debug, "jpegdec", 0, "JPEG decoder");
178 GST_DEBUG_CATEGORY_GET (GST_CAT_PERFORMANCE, "GST_PERFORMANCE");
182 gst_jpeg_dec_fill_input_buffer (j_decompress_ptr cinfo)
186 dec = CINFO_GET_JPEGDEC (cinfo);
187 g_return_val_if_fail (dec != NULL, FALSE);
188 g_return_val_if_fail (dec->current_frame != NULL, FALSE);
189 g_return_val_if_fail (dec->current_frame_map.data != NULL, FALSE);
191 cinfo->src->next_input_byte = dec->current_frame_map.data;
192 cinfo->src->bytes_in_buffer = dec->current_frame_map.size;
198 gst_jpeg_dec_init_source (j_decompress_ptr cinfo)
200 GST_LOG_OBJECT (CINFO_GET_JPEGDEC (cinfo), "init_source");
205 gst_jpeg_dec_skip_input_data (j_decompress_ptr cinfo, glong num_bytes)
207 GstJpegDec *dec = CINFO_GET_JPEGDEC (cinfo);
209 GST_DEBUG_OBJECT (dec, "skip %ld bytes", num_bytes);
211 if (num_bytes > 0 && cinfo->src->bytes_in_buffer >= num_bytes) {
212 cinfo->src->next_input_byte += (size_t) num_bytes;
213 cinfo->src->bytes_in_buffer -= (size_t) num_bytes;
218 gst_jpeg_dec_resync_to_restart (j_decompress_ptr cinfo, gint desired)
220 GST_LOG_OBJECT (CINFO_GET_JPEGDEC (cinfo), "resync_to_start");
225 gst_jpeg_dec_term_source (j_decompress_ptr cinfo)
227 GST_LOG_OBJECT (CINFO_GET_JPEGDEC (cinfo), "term_source");
232 gst_jpeg_dec_my_output_message (j_common_ptr cinfo)
234 return; /* do nothing */
238 gst_jpeg_dec_my_emit_message (j_common_ptr cinfo, int msg_level)
240 /* GST_LOG_OBJECT (CINFO_GET_JPEGDEC (&cinfo), "msg_level=%d", msg_level); */
245 gst_jpeg_dec_my_error_exit (j_common_ptr cinfo)
247 struct GstJpegDecErrorMgr *err_mgr = (struct GstJpegDecErrorMgr *) cinfo->err;
249 (*cinfo->err->output_message) (cinfo);
250 longjmp (err_mgr->setjmp_buffer, 1);
254 gst_jpeg_dec_init (GstJpegDec * dec)
256 GST_DEBUG ("initializing");
259 memset (&dec->cinfo, 0, sizeof (dec->cinfo));
260 memset (&dec->jerr, 0, sizeof (dec->jerr));
261 dec->cinfo.err = jpeg_std_error (&dec->jerr.pub);
262 dec->jerr.pub.output_message = gst_jpeg_dec_my_output_message;
263 dec->jerr.pub.emit_message = gst_jpeg_dec_my_emit_message;
264 dec->jerr.pub.error_exit = gst_jpeg_dec_my_error_exit;
266 jpeg_create_decompress (&dec->cinfo);
268 dec->cinfo.src = (struct jpeg_source_mgr *) &dec->jsrc;
269 dec->cinfo.src->init_source = gst_jpeg_dec_init_source;
270 dec->cinfo.src->fill_input_buffer = gst_jpeg_dec_fill_input_buffer;
271 dec->cinfo.src->skip_input_data = gst_jpeg_dec_skip_input_data;
272 dec->cinfo.src->resync_to_restart = gst_jpeg_dec_resync_to_restart;
273 dec->cinfo.src->term_source = gst_jpeg_dec_term_source;
276 /* init properties */
277 dec->idct_method = JPEG_DEFAULT_IDCT_METHOD;
278 dec->max_errors = JPEG_DEFAULT_MAX_ERRORS;
281 static inline gboolean
282 gst_jpeg_dec_parse_tag_has_entropy_segment (guint8 tag)
284 if (tag == 0xda || (tag >= 0xd0 && tag <= 0xd7))
290 gst_jpeg_dec_parse (GstVideoDecoder * bdec, GstVideoCodecFrame * frame,
291 GstAdapter * adapter, gboolean at_eos)
296 gint offset = 0, noffset;
297 GstJpegDec *dec = (GstJpegDec *) bdec;
299 GST_VIDEO_CODEC_FRAME_SET_SYNC_POINT (frame);
301 /* FIXME : The overhead of using scan_uint32 is massive */
303 size = gst_adapter_available (adapter);
304 GST_DEBUG ("Parsing jpeg image data (%u bytes)", size);
307 GST_DEBUG ("Flushing all data out");
310 /* If we have leftover data, throw it away */
311 if (!dec->saw_header)
313 goto have_full_frame;
319 if (!dec->saw_header) {
321 /* we expect at least 4 bytes, first of which start marker */
323 gst_adapter_masked_scan_uint32 (adapter, 0xffff0000, 0xffd80000, 0,
326 GST_DEBUG ("ret:%d", ret);
331 gst_adapter_flush (adapter, ret);
334 dec->saw_header = TRUE;
341 GST_DEBUG ("offset:%d, size:%d", offset, size);
344 gst_adapter_masked_scan_uint32_peek (adapter, 0x0000ff00, 0x0000ff00,
345 offset, size - offset, &value);
347 /* lost sync if 0xff marker not where expected */
348 if ((resync = (noffset != offset))) {
349 GST_DEBUG ("Lost sync at 0x%08x, resyncing", offset + 2);
351 /* may have marker, but could have been resyncng */
352 resync = resync || dec->parse_resync;
353 /* Skip over extra 0xff */
354 while ((noffset >= 0) && ((value & 0xff) == 0xff)) {
357 gst_adapter_masked_scan_uint32_peek (adapter, 0x0000ff00, 0x0000ff00,
358 noffset, size - noffset, &value);
360 /* enough bytes left for marker? (we need 0xNN after the 0xff) */
362 GST_DEBUG ("at end of input and no EOI marker found, need more data");
366 /* now lock on the marker we found */
368 value = value & 0xff;
370 GST_DEBUG ("0x%08x: EOI marker", offset + 2);
371 /* clear parse state */
372 dec->saw_header = FALSE;
373 dec->parse_resync = FALSE;
375 goto have_full_frame;
378 /* Skip this frame if we found another SOI marker */
379 GST_DEBUG ("0x%08x: SOI marker before EOI, skipping", offset + 2);
380 dec->parse_resync = FALSE;
386 if (value >= 0xd0 && value <= 0xd7)
389 /* peek tag and subsequent length */
390 if (offset + 2 + 4 > size)
393 gst_adapter_masked_scan_uint32_peek (adapter, 0x0, 0x0, offset + 2, 4,
395 frame_len = frame_len & 0xffff;
397 GST_DEBUG ("0x%08x: tag %02x, frame_len=%u", offset + 2, value, frame_len);
398 /* the frame length includes the 2 bytes for the length; here we want at
399 * least 2 more bytes at the end for an end marker */
400 if (offset + 2 + 2 + frame_len + 2 > size) {
404 if (gst_jpeg_dec_parse_tag_has_entropy_segment (value)) {
405 guint eseglen = dec->parse_entropy_len;
407 GST_DEBUG ("0x%08x: finding entropy segment length (eseglen:%d)",
408 offset + 2, eseglen);
409 if (size < offset + 2 + frame_len + eseglen)
411 noffset = offset + 2 + frame_len + dec->parse_entropy_len;
413 GST_DEBUG ("noffset:%d, size:%d, size - noffset:%d",
414 noffset, size, size - noffset);
415 noffset = gst_adapter_masked_scan_uint32_peek (adapter, 0x0000ff00,
416 0x0000ff00, noffset, size - noffset, &value);
419 dec->parse_entropy_len = size - offset - 4 - frame_len - 2;
422 if ((value & 0xff) != 0x00) {
423 eseglen = noffset - offset - frame_len - 2;
428 dec->parse_entropy_len = 0;
429 frame_len += eseglen;
430 GST_DEBUG ("entropy segment length=%u => frame_len=%u", eseglen,
434 /* check if we will still be in sync if we interpret
435 * this as a sync point and skip this frame */
436 noffset = offset + frame_len + 2;
437 noffset = gst_adapter_masked_scan_uint32 (adapter, 0x0000ff00, 0x0000ff00,
440 /* ignore and continue resyncing until we hit the end
441 * of our data or find a sync point that looks okay */
445 GST_DEBUG ("found sync at 0x%x", offset + 2);
448 /* Add current data to output buffer */
449 toadd += frame_len + 2;
450 offset += frame_len + 2;
455 gst_video_decoder_add_to_frame (bdec, toadd);
456 return GST_VIDEO_DECODER_FLOW_NEED_DATA;
460 gst_video_decoder_add_to_frame (bdec, toadd);
461 GST_VIDEO_CODEC_FRAME_SET_SYNC_POINT (frame);
462 return gst_video_decoder_have_frame (bdec);
465 gst_adapter_flush (adapter, size);
470 /* shamelessly ripped from jpegutils.c in mjpegtools */
472 add_huff_table (j_decompress_ptr dinfo,
473 JHUFF_TBL ** htblptr, const UINT8 * bits, const UINT8 * val)
474 /* Define a Huffman table */
478 if (*htblptr == NULL)
479 *htblptr = jpeg_alloc_huff_table ((j_common_ptr) dinfo);
483 /* Copy the number-of-symbols-of-each-code-length counts */
484 memcpy ((*htblptr)->bits, bits, sizeof ((*htblptr)->bits));
486 /* Validate the counts. We do this here mainly so we can copy the right
487 * number of symbols from the val[] array, without risking marching off
488 * the end of memory. jchuff.c will do a more thorough test later.
491 for (len = 1; len <= 16; len++)
492 nsymbols += bits[len];
493 if (nsymbols < 1 || nsymbols > 256)
494 g_error ("jpegutils.c: add_huff_table failed badly. ");
496 memcpy ((*htblptr)->huffval, val, nsymbols * sizeof (UINT8));
502 std_huff_tables (j_decompress_ptr dinfo)
503 /* Set up the standard Huffman tables (cf. JPEG standard section K.3) */
504 /* IMPORTANT: these are only valid for 8-bit data precision! */
506 static const UINT8 bits_dc_luminance[17] =
507 { /* 0-base */ 0, 0, 1, 5, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0 };
508 static const UINT8 val_dc_luminance[] =
509 { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11 };
511 static const UINT8 bits_dc_chrominance[17] =
512 { /* 0-base */ 0, 0, 3, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0 };
513 static const UINT8 val_dc_chrominance[] =
514 { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11 };
516 static const UINT8 bits_ac_luminance[17] =
517 { /* 0-base */ 0, 0, 2, 1, 3, 3, 2, 4, 3, 5, 5, 4, 4, 0, 0, 1, 0x7d };
518 static const UINT8 val_ac_luminance[] =
519 { 0x01, 0x02, 0x03, 0x00, 0x04, 0x11, 0x05, 0x12,
520 0x21, 0x31, 0x41, 0x06, 0x13, 0x51, 0x61, 0x07,
521 0x22, 0x71, 0x14, 0x32, 0x81, 0x91, 0xa1, 0x08,
522 0x23, 0x42, 0xb1, 0xc1, 0x15, 0x52, 0xd1, 0xf0,
523 0x24, 0x33, 0x62, 0x72, 0x82, 0x09, 0x0a, 0x16,
524 0x17, 0x18, 0x19, 0x1a, 0x25, 0x26, 0x27, 0x28,
525 0x29, 0x2a, 0x34, 0x35, 0x36, 0x37, 0x38, 0x39,
526 0x3a, 0x43, 0x44, 0x45, 0x46, 0x47, 0x48, 0x49,
527 0x4a, 0x53, 0x54, 0x55, 0x56, 0x57, 0x58, 0x59,
528 0x5a, 0x63, 0x64, 0x65, 0x66, 0x67, 0x68, 0x69,
529 0x6a, 0x73, 0x74, 0x75, 0x76, 0x77, 0x78, 0x79,
530 0x7a, 0x83, 0x84, 0x85, 0x86, 0x87, 0x88, 0x89,
531 0x8a, 0x92, 0x93, 0x94, 0x95, 0x96, 0x97, 0x98,
532 0x99, 0x9a, 0xa2, 0xa3, 0xa4, 0xa5, 0xa6, 0xa7,
533 0xa8, 0xa9, 0xaa, 0xb2, 0xb3, 0xb4, 0xb5, 0xb6,
534 0xb7, 0xb8, 0xb9, 0xba, 0xc2, 0xc3, 0xc4, 0xc5,
535 0xc6, 0xc7, 0xc8, 0xc9, 0xca, 0xd2, 0xd3, 0xd4,
536 0xd5, 0xd6, 0xd7, 0xd8, 0xd9, 0xda, 0xe1, 0xe2,
537 0xe3, 0xe4, 0xe5, 0xe6, 0xe7, 0xe8, 0xe9, 0xea,
538 0xf1, 0xf2, 0xf3, 0xf4, 0xf5, 0xf6, 0xf7, 0xf8,
542 static const UINT8 bits_ac_chrominance[17] =
543 { /* 0-base */ 0, 0, 2, 1, 2, 4, 4, 3, 4, 7, 5, 4, 4, 0, 1, 2, 0x77 };
544 static const UINT8 val_ac_chrominance[] =
545 { 0x00, 0x01, 0x02, 0x03, 0x11, 0x04, 0x05, 0x21,
546 0x31, 0x06, 0x12, 0x41, 0x51, 0x07, 0x61, 0x71,
547 0x13, 0x22, 0x32, 0x81, 0x08, 0x14, 0x42, 0x91,
548 0xa1, 0xb1, 0xc1, 0x09, 0x23, 0x33, 0x52, 0xf0,
549 0x15, 0x62, 0x72, 0xd1, 0x0a, 0x16, 0x24, 0x34,
550 0xe1, 0x25, 0xf1, 0x17, 0x18, 0x19, 0x1a, 0x26,
551 0x27, 0x28, 0x29, 0x2a, 0x35, 0x36, 0x37, 0x38,
552 0x39, 0x3a, 0x43, 0x44, 0x45, 0x46, 0x47, 0x48,
553 0x49, 0x4a, 0x53, 0x54, 0x55, 0x56, 0x57, 0x58,
554 0x59, 0x5a, 0x63, 0x64, 0x65, 0x66, 0x67, 0x68,
555 0x69, 0x6a, 0x73, 0x74, 0x75, 0x76, 0x77, 0x78,
556 0x79, 0x7a, 0x82, 0x83, 0x84, 0x85, 0x86, 0x87,
557 0x88, 0x89, 0x8a, 0x92, 0x93, 0x94, 0x95, 0x96,
558 0x97, 0x98, 0x99, 0x9a, 0xa2, 0xa3, 0xa4, 0xa5,
559 0xa6, 0xa7, 0xa8, 0xa9, 0xaa, 0xb2, 0xb3, 0xb4,
560 0xb5, 0xb6, 0xb7, 0xb8, 0xb9, 0xba, 0xc2, 0xc3,
561 0xc4, 0xc5, 0xc6, 0xc7, 0xc8, 0xc9, 0xca, 0xd2,
562 0xd3, 0xd4, 0xd5, 0xd6, 0xd7, 0xd8, 0xd9, 0xda,
563 0xe2, 0xe3, 0xe4, 0xe5, 0xe6, 0xe7, 0xe8, 0xe9,
564 0xea, 0xf2, 0xf3, 0xf4, 0xf5, 0xf6, 0xf7, 0xf8,
568 add_huff_table (dinfo, &dinfo->dc_huff_tbl_ptrs[0],
569 bits_dc_luminance, val_dc_luminance);
570 add_huff_table (dinfo, &dinfo->ac_huff_tbl_ptrs[0],
571 bits_ac_luminance, val_ac_luminance);
572 add_huff_table (dinfo, &dinfo->dc_huff_tbl_ptrs[1],
573 bits_dc_chrominance, val_dc_chrominance);
574 add_huff_table (dinfo, &dinfo->ac_huff_tbl_ptrs[1],
575 bits_ac_chrominance, val_ac_chrominance);
581 guarantee_huff_tables (j_decompress_ptr dinfo)
583 if ((dinfo->dc_huff_tbl_ptrs[0] == NULL) &&
584 (dinfo->dc_huff_tbl_ptrs[1] == NULL) &&
585 (dinfo->ac_huff_tbl_ptrs[0] == NULL) &&
586 (dinfo->ac_huff_tbl_ptrs[1] == NULL)) {
587 GST_DEBUG ("Generating standard Huffman tables for this frame.");
588 std_huff_tables (dinfo);
593 gst_jpeg_dec_set_format (GstVideoDecoder * dec, GstVideoCodecState * state)
595 GstJpegDec *jpeg = GST_JPEG_DEC (dec);
596 GstVideoInfo *info = &state->info;
598 /* FIXME : previously jpegdec would handled input as packetized
599 * if the framerate was present. Here we consider it packetized if
600 * the fps is != 1/1 */
601 if (GST_VIDEO_INFO_FPS_N (info) != 1 && GST_VIDEO_INFO_FPS_D (info) != 1)
602 gst_video_decoder_set_packetized (dec, TRUE);
604 gst_video_decoder_set_packetized (dec, FALSE);
606 if (jpeg->input_state)
607 gst_video_codec_state_unref (jpeg->input_state);
608 jpeg->input_state = gst_video_codec_state_ref (state);
616 hresamplecpy1 (guint8 * dest, const guint8 * src, guint len)
620 for (i = 0; i < len; ++i) {
621 /* equivalent to: dest[i] = src[i << 1] */
630 gst_jpeg_dec_free_buffers (GstJpegDec * dec)
634 for (i = 0; i < 16; i++) {
635 g_free (dec->idr_y[i]);
636 g_free (dec->idr_u[i]);
637 g_free (dec->idr_v[i]);
638 dec->idr_y[i] = NULL;
639 dec->idr_u[i] = NULL;
640 dec->idr_v[i] = NULL;
643 dec->idr_width_allocated = 0;
646 static inline gboolean
647 gst_jpeg_dec_ensure_buffers (GstJpegDec * dec, guint maxrowbytes)
651 if (G_LIKELY (dec->idr_width_allocated == maxrowbytes))
654 /* FIXME: maybe just alloc one or three blocks altogether? */
655 for (i = 0; i < 16; i++) {
656 dec->idr_y[i] = g_try_realloc (dec->idr_y[i], maxrowbytes);
657 dec->idr_u[i] = g_try_realloc (dec->idr_u[i], maxrowbytes);
658 dec->idr_v[i] = g_try_realloc (dec->idr_v[i], maxrowbytes);
660 if (G_UNLIKELY (!dec->idr_y[i] || !dec->idr_u[i] || !dec->idr_v[i])) {
661 GST_WARNING_OBJECT (dec, "out of memory, i=%d, bytes=%u", i, maxrowbytes);
666 dec->idr_width_allocated = maxrowbytes;
667 GST_LOG_OBJECT (dec, "allocated temp memory, %u bytes/row", maxrowbytes);
672 gst_jpeg_dec_decode_grayscale (GstJpegDec * dec, GstVideoFrame * frame)
675 guchar **scanarray[1] = { rows };
680 gint pstride, rstride;
682 GST_DEBUG_OBJECT (dec, "indirect decoding of grayscale");
684 width = GST_VIDEO_FRAME_WIDTH (frame);
685 height = GST_VIDEO_FRAME_HEIGHT (frame);
687 if (G_UNLIKELY (!gst_jpeg_dec_ensure_buffers (dec, GST_ROUND_UP_32 (width))))
690 base[0] = GST_VIDEO_FRAME_COMP_DATA (frame, 0);
691 pstride = GST_VIDEO_FRAME_COMP_PSTRIDE (frame, 0);
692 rstride = GST_VIDEO_FRAME_COMP_STRIDE (frame, 0);
694 memcpy (rows, dec->idr_y, 16 * sizeof (gpointer));
698 lines = jpeg_read_raw_data (&dec->cinfo, scanarray, DCTSIZE);
699 if (G_LIKELY (lines > 0)) {
700 for (j = 0; (j < DCTSIZE) && (i < height); j++, i++) {
704 for (k = 0; k < width; k++) {
705 base[0][p] = rows[j][k];
711 GST_INFO_OBJECT (dec, "jpeg_read_raw_data() returned 0");
717 gst_jpeg_dec_decode_rgb (GstJpegDec * dec, GstVideoFrame * frame)
719 guchar *r_rows[16], *g_rows[16], *b_rows[16];
720 guchar **scanarray[3] = { r_rows, g_rows, b_rows };
724 guint pstride, rstride;
727 GST_DEBUG_OBJECT (dec, "indirect decoding of RGB");
729 width = GST_VIDEO_FRAME_WIDTH (frame);
730 height = GST_VIDEO_FRAME_HEIGHT (frame);
732 if (G_UNLIKELY (!gst_jpeg_dec_ensure_buffers (dec, GST_ROUND_UP_32 (width))))
735 for (i = 0; i < 3; i++)
736 base[i] = GST_VIDEO_FRAME_COMP_DATA (frame, i);
738 pstride = GST_VIDEO_FRAME_COMP_PSTRIDE (frame, 0);
739 rstride = GST_VIDEO_FRAME_COMP_STRIDE (frame, 0);
741 memcpy (r_rows, dec->idr_y, 16 * sizeof (gpointer));
742 memcpy (g_rows, dec->idr_u, 16 * sizeof (gpointer));
743 memcpy (b_rows, dec->idr_v, 16 * sizeof (gpointer));
747 lines = jpeg_read_raw_data (&dec->cinfo, scanarray, DCTSIZE);
748 if (G_LIKELY (lines > 0)) {
749 for (j = 0; (j < DCTSIZE) && (i < height); j++, i++) {
753 for (k = 0; k < width; k++) {
754 base[0][p] = r_rows[j][k];
755 base[1][p] = g_rows[j][k];
756 base[2][p] = b_rows[j][k];
764 GST_INFO_OBJECT (dec, "jpeg_read_raw_data() returned 0");
770 gst_jpeg_dec_decode_indirect (GstJpegDec * dec, GstVideoFrame * frame, gint r_v,
773 guchar *y_rows[16], *u_rows[16], *v_rows[16];
774 guchar **scanarray[3] = { y_rows, u_rows, v_rows };
777 guchar *base[3], *last[3];
781 GST_DEBUG_OBJECT (dec,
782 "unadvantageous width or r_h, taking slow route involving memcpy");
784 width = GST_VIDEO_FRAME_WIDTH (frame);
785 height = GST_VIDEO_FRAME_HEIGHT (frame);
787 if (G_UNLIKELY (!gst_jpeg_dec_ensure_buffers (dec, GST_ROUND_UP_32 (width))))
790 for (i = 0; i < 3; i++) {
791 base[i] = GST_VIDEO_FRAME_COMP_DATA (frame, i);
792 stride[i] = GST_VIDEO_FRAME_COMP_STRIDE (frame, i);
793 /* make sure we don't make jpeglib write beyond our buffer,
794 * which might happen if (height % (r_v*DCTSIZE)) != 0 */
795 last[i] = base[i] + (GST_VIDEO_FRAME_COMP_STRIDE (frame, i) *
796 (GST_VIDEO_FRAME_COMP_HEIGHT (frame, i) - 1));
799 memcpy (y_rows, dec->idr_y, 16 * sizeof (gpointer));
800 memcpy (u_rows, dec->idr_u, 16 * sizeof (gpointer));
801 memcpy (v_rows, dec->idr_v, 16 * sizeof (gpointer));
803 /* fill chroma components for grayscale */
805 GST_DEBUG_OBJECT (dec, "grayscale, filling chroma");
806 for (i = 0; i < 16; i++) {
807 memset (u_rows[i], GST_ROUND_UP_32 (width), 0x80);
808 memset (v_rows[i], GST_ROUND_UP_32 (width), 0x80);
812 for (i = 0; i < height; i += r_v * DCTSIZE) {
813 lines = jpeg_read_raw_data (&dec->cinfo, scanarray, r_v * DCTSIZE);
814 if (G_LIKELY (lines > 0)) {
815 for (j = 0, k = 0; j < (r_v * DCTSIZE); j += r_v, k++) {
816 if (G_LIKELY (base[0] <= last[0])) {
817 memcpy (base[0], y_rows[j], stride[0]);
818 base[0] += stride[0];
821 if (G_LIKELY (base[0] <= last[0])) {
822 memcpy (base[0], y_rows[j + 1], stride[0]);
823 base[0] += stride[0];
826 if (G_LIKELY (base[1] <= last[1] && base[2] <= last[2])) {
828 memcpy (base[1], u_rows[k], stride[1]);
829 memcpy (base[2], v_rows[k], stride[2]);
830 } else if (r_h == 1) {
831 hresamplecpy1 (base[1], u_rows[k], stride[1]);
832 hresamplecpy1 (base[2], v_rows[k], stride[2]);
834 /* FIXME: implement (at least we avoid crashing by doing nothing) */
838 if (r_v == 2 || (k & 1) != 0) {
839 base[1] += stride[1];
840 base[2] += stride[2];
844 GST_INFO_OBJECT (dec, "jpeg_read_raw_data() returned 0");
850 gst_jpeg_dec_decode_direct (GstJpegDec * dec, GstVideoFrame * frame)
852 guchar **line[3]; /* the jpeg line buffer */
853 guchar *y[4 * DCTSIZE] = { NULL, }; /* alloc enough for the lines */
854 guchar *u[4 * DCTSIZE] = { NULL, }; /* r_v will be <4 */
855 guchar *v[4 * DCTSIZE] = { NULL, };
857 gint lines, v_samp[3];
858 guchar *base[3], *last[3];
866 v_samp[0] = dec->cinfo.comp_info[0].v_samp_factor;
867 v_samp[1] = dec->cinfo.comp_info[1].v_samp_factor;
868 v_samp[2] = dec->cinfo.comp_info[2].v_samp_factor;
870 if (G_UNLIKELY (v_samp[0] > 2 || v_samp[1] > 2 || v_samp[2] > 2))
871 goto format_not_supported;
873 height = GST_VIDEO_FRAME_HEIGHT (frame);
875 for (i = 0; i < 3; i++) {
876 base[i] = GST_VIDEO_FRAME_COMP_DATA (frame, i);
877 stride[i] = GST_VIDEO_FRAME_COMP_STRIDE (frame, i);
878 /* make sure we don't make jpeglib write beyond our buffer,
879 * which might happen if (height % (r_v*DCTSIZE)) != 0 */
880 last[i] = base[i] + (GST_VIDEO_FRAME_COMP_STRIDE (frame, i) *
881 (GST_VIDEO_FRAME_COMP_HEIGHT (frame, i) - 1));
884 /* let jpeglib decode directly into our final buffer */
885 GST_DEBUG_OBJECT (dec, "decoding directly into output buffer");
887 for (i = 0; i < height; i += v_samp[0] * DCTSIZE) {
888 for (j = 0; j < (v_samp[0] * DCTSIZE); ++j) {
890 line[0][j] = base[0] + (i + j) * stride[0];
891 if (G_UNLIKELY (line[0][j] > last[0]))
892 line[0][j] = last[0];
894 if (v_samp[1] == v_samp[0]) {
895 line[1][j] = base[1] + ((i + j) / 2) * stride[1];
896 } else if (j < (v_samp[1] * DCTSIZE)) {
897 line[1][j] = base[1] + ((i / 2) + j) * stride[1];
899 if (G_UNLIKELY (line[1][j] > last[1]))
900 line[1][j] = last[1];
902 if (v_samp[2] == v_samp[0]) {
903 line[2][j] = base[2] + ((i + j) / 2) * stride[2];
904 } else if (j < (v_samp[2] * DCTSIZE)) {
905 line[2][j] = base[2] + ((i / 2) + j) * stride[2];
907 if (G_UNLIKELY (line[2][j] > last[2]))
908 line[2][j] = last[2];
911 lines = jpeg_read_raw_data (&dec->cinfo, line, v_samp[0] * DCTSIZE);
912 if (G_UNLIKELY (!lines)) {
913 GST_INFO_OBJECT (dec, "jpeg_read_raw_data() returned 0");
918 format_not_supported:
920 gboolean ret = GST_FLOW_OK;
922 GST_VIDEO_DECODER_ERROR (dec, 1, STREAM, DECODE,
923 (_("Failed to decode JPEG image")),
924 ("Unsupported subsampling schema: v_samp factors: %u %u %u", v_samp[0],
925 v_samp[1], v_samp[2]), ret);
932 gst_jpeg_dec_negotiate (GstJpegDec * dec, gint width, gint height, gint clrspc)
934 GstVideoCodecState *outstate;
936 GstVideoFormat format;
940 format = GST_VIDEO_FORMAT_RGB;
943 format = GST_VIDEO_FORMAT_GRAY8;
946 format = GST_VIDEO_FORMAT_I420;
950 /* Compare to currently configured output state */
951 outstate = gst_video_decoder_get_output_state (GST_VIDEO_DECODER (dec));
953 info = &outstate->info;
955 if (width == GST_VIDEO_INFO_WIDTH (info) &&
956 height == GST_VIDEO_INFO_HEIGHT (info) &&
957 format == GST_VIDEO_INFO_FORMAT (info)) {
958 gst_video_codec_state_unref (outstate);
961 gst_video_codec_state_unref (outstate);
965 gst_video_decoder_set_output_state (GST_VIDEO_DECODER (dec), format,
966 width, height, dec->input_state);
973 outstate->info.colorimetry.range = GST_VIDEO_COLOR_RANGE_0_255;
974 outstate->info.colorimetry.matrix = GST_VIDEO_COLOR_MATRIX_BT601;
975 outstate->info.colorimetry.transfer = GST_VIDEO_TRANSFER_UNKNOWN;
976 outstate->info.colorimetry.primaries = GST_VIDEO_COLOR_PRIMARIES_UNKNOWN;
980 gst_video_codec_state_unref (outstate);
982 gst_video_decoder_negotiate (GST_VIDEO_DECODER (dec));
984 GST_DEBUG_OBJECT (dec, "max_v_samp_factor=%d", dec->cinfo.max_v_samp_factor);
985 GST_DEBUG_OBJECT (dec, "max_h_samp_factor=%d", dec->cinfo.max_h_samp_factor);
989 gst_jpeg_dec_handle_frame (GstVideoDecoder * bdec, GstVideoCodecFrame * frame)
991 GstFlowReturn ret = GST_FLOW_OK;
992 GstJpegDec *dec = (GstJpegDec *) bdec;
993 GstVideoFrame vframe;
997 gboolean need_unmap = TRUE;
998 GstVideoCodecState *state = NULL;
1000 dec->current_frame = frame;
1001 gst_buffer_map (frame->input_buffer, &dec->current_frame_map, GST_MAP_READ);
1002 gst_jpeg_dec_fill_input_buffer (&dec->cinfo);
1004 if (setjmp (dec->jerr.setjmp_buffer)) {
1005 code = dec->jerr.pub.msg_code;
1007 if (code == JERR_INPUT_EOF) {
1008 GST_DEBUG ("jpeg input EOF error, we probably need more data");
1009 goto need_more_data;
1015 hdr_ok = jpeg_read_header (&dec->cinfo, TRUE);
1016 if (G_UNLIKELY (hdr_ok != JPEG_HEADER_OK)) {
1017 GST_WARNING_OBJECT (dec, "reading the header failed, %d", hdr_ok);
1020 GST_LOG_OBJECT (dec, "num_components=%d", dec->cinfo.num_components);
1021 GST_LOG_OBJECT (dec, "jpeg_color_space=%d", dec->cinfo.jpeg_color_space);
1023 if (!dec->cinfo.num_components || !dec->cinfo.comp_info)
1024 goto components_not_supported;
1026 r_h = dec->cinfo.comp_info[0].h_samp_factor;
1027 r_v = dec->cinfo.comp_info[0].v_samp_factor;
1029 GST_LOG_OBJECT (dec, "r_h = %d, r_v = %d", r_h, r_v);
1031 if (dec->cinfo.num_components > 3)
1032 goto components_not_supported;
1034 /* verify color space expectation to avoid going *boom* or bogus output */
1035 if (dec->cinfo.jpeg_color_space != JCS_YCbCr &&
1036 dec->cinfo.jpeg_color_space != JCS_GRAYSCALE &&
1037 dec->cinfo.jpeg_color_space != JCS_RGB)
1038 goto unsupported_colorspace;
1040 #ifndef GST_DISABLE_GST_DEBUG
1044 for (i = 0; i < dec->cinfo.num_components; ++i) {
1045 GST_LOG_OBJECT (dec, "[%d] h_samp_factor=%d, v_samp_factor=%d, cid=%d",
1046 i, dec->cinfo.comp_info[i].h_samp_factor,
1047 dec->cinfo.comp_info[i].v_samp_factor,
1048 dec->cinfo.comp_info[i].component_id);
1053 /* prepare for raw output */
1054 dec->cinfo.do_fancy_upsampling = FALSE;
1055 dec->cinfo.do_block_smoothing = FALSE;
1056 dec->cinfo.out_color_space = dec->cinfo.jpeg_color_space;
1057 dec->cinfo.dct_method = dec->idct_method;
1058 dec->cinfo.raw_data_out = TRUE;
1060 GST_LOG_OBJECT (dec, "starting decompress");
1061 guarantee_huff_tables (&dec->cinfo);
1062 if (!jpeg_start_decompress (&dec->cinfo)) {
1063 GST_WARNING_OBJECT (dec, "failed to start decompression cycle");
1066 /* sanity checks to get safe and reasonable output */
1067 switch (dec->cinfo.jpeg_color_space) {
1069 if (dec->cinfo.num_components != 1)
1070 goto invalid_yuvrgbgrayscale;
1073 if (dec->cinfo.num_components != 3 || dec->cinfo.max_v_samp_factor > 1 ||
1074 dec->cinfo.max_h_samp_factor > 1)
1075 goto invalid_yuvrgbgrayscale;
1078 if (dec->cinfo.num_components != 3 ||
1079 r_v > 2 || r_v < dec->cinfo.comp_info[0].v_samp_factor ||
1080 r_v < dec->cinfo.comp_info[1].v_samp_factor ||
1081 r_h < dec->cinfo.comp_info[0].h_samp_factor ||
1082 r_h < dec->cinfo.comp_info[1].h_samp_factor)
1083 goto invalid_yuvrgbgrayscale;
1086 g_assert_not_reached ();
1090 width = dec->cinfo.output_width;
1091 height = dec->cinfo.output_height;
1093 if (G_UNLIKELY (width < MIN_WIDTH || width > MAX_WIDTH ||
1094 height < MIN_HEIGHT || height > MAX_HEIGHT))
1097 gst_jpeg_dec_negotiate (dec, width, height, dec->cinfo.jpeg_color_space);
1099 state = gst_video_decoder_get_output_state (bdec);
1100 ret = gst_video_decoder_allocate_output_frame (bdec, frame);
1101 if (G_UNLIKELY (ret != GST_FLOW_OK))
1104 if (!gst_video_frame_map (&vframe, &state->info, frame->output_buffer,
1108 GST_LOG_OBJECT (dec, "width %d, height %d", width, height);
1110 if (dec->cinfo.jpeg_color_space == JCS_RGB) {
1111 gst_jpeg_dec_decode_rgb (dec, &vframe);
1112 } else if (dec->cinfo.jpeg_color_space == JCS_GRAYSCALE) {
1113 gst_jpeg_dec_decode_grayscale (dec, &vframe);
1115 GST_LOG_OBJECT (dec, "decompressing (reqired scanline buffer height = %u)",
1116 dec->cinfo.rec_outbuf_height);
1118 /* For some widths jpeglib requires more horizontal padding than I420
1119 * provides. In those cases we need to decode into separate buffers and then
1120 * copy over the data into our final picture buffer, otherwise jpeglib might
1121 * write over the end of a line into the beginning of the next line,
1122 * resulting in blocky artifacts on the left side of the picture. */
1123 if (G_UNLIKELY (width % (dec->cinfo.max_h_samp_factor * DCTSIZE) != 0
1124 || dec->cinfo.comp_info[0].h_samp_factor != 2
1125 || dec->cinfo.comp_info[1].h_samp_factor != 1
1126 || dec->cinfo.comp_info[2].h_samp_factor != 1)) {
1127 GST_CAT_LOG_OBJECT (GST_CAT_PERFORMANCE, dec,
1128 "indirect decoding using extra buffer copy");
1129 gst_jpeg_dec_decode_indirect (dec, &vframe, r_v, r_h,
1130 dec->cinfo.num_components);
1132 ret = gst_jpeg_dec_decode_direct (dec, &vframe);
1134 if (G_UNLIKELY (ret != GST_FLOW_OK))
1135 goto decode_direct_failed;
1139 gst_video_frame_unmap (&vframe);
1141 GST_LOG_OBJECT (dec, "decompressing finished");
1142 jpeg_finish_decompress (&dec->cinfo);
1144 gst_buffer_unmap (frame->input_buffer, &dec->current_frame_map);
1145 ret = gst_video_decoder_finish_frame (bdec, frame);
1153 gst_buffer_unmap (frame->input_buffer, &dec->current_frame_map);
1156 gst_video_codec_state_unref (state);
1163 GST_LOG_OBJECT (dec, "we need more data");
1170 GST_VIDEO_DECODER_ERROR (dec, 1, STREAM, DECODE,
1171 (_("Failed to decode JPEG image")),
1172 ("Picture is too small or too big (%ux%u)", width, height), ret);
1173 ret = GST_FLOW_ERROR;
1178 gchar err_msg[JMSG_LENGTH_MAX];
1180 dec->jerr.pub.format_message ((j_common_ptr) (&dec->cinfo), err_msg);
1182 GST_VIDEO_DECODER_ERROR (dec, 1, STREAM, DECODE,
1183 (_("Failed to decode JPEG image")), ("Decode error #%u: %s", code,
1186 gst_buffer_unmap (frame->input_buffer, &dec->current_frame_map);
1187 gst_video_decoder_drop_frame (bdec, frame);
1189 jpeg_abort_decompress (&dec->cinfo);
1193 decode_direct_failed:
1195 /* already posted an error message */
1196 jpeg_abort_decompress (&dec->cinfo);
1201 const gchar *reason;
1203 reason = gst_flow_get_name (ret);
1205 GST_DEBUG_OBJECT (dec, "failed to alloc buffer, reason %s", reason);
1206 /* Reset for next time */
1207 jpeg_abort_decompress (&dec->cinfo);
1208 if (ret != GST_FLOW_EOS && ret != GST_FLOW_FLUSHING &&
1209 ret != GST_FLOW_NOT_LINKED) {
1210 GST_VIDEO_DECODER_ERROR (dec, 1, STREAM, DECODE,
1211 (_("Failed to decode JPEG image")),
1212 ("Buffer allocation failed, reason: %s", reason), ret);
1213 jpeg_abort_decompress (&dec->cinfo);
1217 components_not_supported:
1219 GST_VIDEO_DECODER_ERROR (dec, 1, STREAM, DECODE,
1220 (_("Failed to decode JPEG image")),
1221 ("number of components not supported: %d (max 3)",
1222 dec->cinfo.num_components), ret);
1223 jpeg_abort_decompress (&dec->cinfo);
1226 unsupported_colorspace:
1228 GST_VIDEO_DECODER_ERROR (dec, 1, STREAM, DECODE,
1229 (_("Failed to decode JPEG image")),
1230 ("Picture has unknown or unsupported colourspace"), ret);
1231 jpeg_abort_decompress (&dec->cinfo);
1234 invalid_yuvrgbgrayscale:
1236 GST_VIDEO_DECODER_ERROR (dec, 1, STREAM, DECODE,
1237 (_("Failed to decode JPEG image")),
1238 ("Picture is corrupt or unhandled YUV/RGB/grayscale layout"), ret);
1239 jpeg_abort_decompress (&dec->cinfo);
1245 gst_jpeg_dec_decide_allocation (GstVideoDecoder * bdec, GstQuery * query)
1247 GstBufferPool *pool = NULL;
1248 GstStructure *config;
1250 if (!GST_VIDEO_DECODER_CLASS (parent_class)->decide_allocation (bdec, query))
1253 if (gst_query_get_n_allocation_pools (query) > 0)
1254 gst_query_parse_nth_allocation_pool (query, 0, &pool, NULL, NULL, NULL);
1259 config = gst_buffer_pool_get_config (pool);
1260 if (gst_query_find_allocation_meta (query, GST_VIDEO_META_API_TYPE, NULL)) {
1261 gst_buffer_pool_config_add_option (config,
1262 GST_BUFFER_POOL_OPTION_VIDEO_META);
1264 gst_buffer_pool_set_config (pool, config);
1265 gst_object_unref (pool);
1271 gst_jpeg_dec_start (GstVideoDecoder * bdec)
1273 GstJpegDec *dec = (GstJpegDec *) bdec;
1275 dec->saw_header = FALSE;
1276 dec->parse_entropy_len = 0;
1277 dec->parse_resync = FALSE;
1279 gst_video_decoder_set_packetized (bdec, FALSE);
1285 gst_jpeg_dec_flush (GstVideoDecoder * bdec)
1287 GstJpegDec *dec = (GstJpegDec *) bdec;
1289 jpeg_abort_decompress (&dec->cinfo);
1290 dec->parse_entropy_len = 0;
1291 dec->parse_resync = FALSE;
1292 dec->saw_header = FALSE;
1298 gst_jpeg_dec_set_property (GObject * object, guint prop_id,
1299 const GValue * value, GParamSpec * pspec)
1303 dec = GST_JPEG_DEC (object);
1306 case PROP_IDCT_METHOD:
1307 dec->idct_method = g_value_get_enum (value);
1309 case PROP_MAX_ERRORS:
1310 g_atomic_int_set (&dec->max_errors, g_value_get_int (value));
1314 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
1320 gst_jpeg_dec_get_property (GObject * object, guint prop_id, GValue * value,
1325 dec = GST_JPEG_DEC (object);
1328 case PROP_IDCT_METHOD:
1329 g_value_set_enum (value, dec->idct_method);
1331 case PROP_MAX_ERRORS:
1332 g_value_set_int (value, g_atomic_int_get (&dec->max_errors));
1336 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
1342 gst_jpeg_dec_stop (GstVideoDecoder * bdec)
1344 GstJpegDec *dec = (GstJpegDec *) bdec;
1346 gst_jpeg_dec_free_buffers (dec);