2 * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
3 * Copyright (C) <2009> Tim-Philipp Müller <tim centricular net>
4 * Copyright (C) 2012 Collabora Ltd.
5 * Author : Edward Hervey <edward@collabora.com>
7 * This library is free software; you can redistribute it and/or
8 * modify it under the terms of the GNU Library General Public
9 * License as published by the Free Software Foundation; either
10 * version 2 of the License, or (at your option) any later version.
12 * This library is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 * Library General Public License for more details.
17 * You should have received a copy of the GNU Library General Public
18 * License along with this library; if not, write to the
19 * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
20 * Boston, MA 02110-1301, USA.
24 * SECTION:element-jpegdec
26 * Decodes jpeg images.
29 * <title>Example launch line</title>
31 * gst-launch-1.0 -v filesrc location=mjpeg.avi ! avidemux ! queue ! jpegdec ! videoconvert ! videoscale ! autovideosink
32 * ]| The above pipeline decode the mjpeg stream and renders it to the screen.
41 #include "gstjpegdec.h"
43 #include <gst/video/video.h>
44 #include <gst/video/gstvideometa.h>
45 #include <gst/video/gstvideopool.h>
46 #include "gst/gst-i18n-plugin.h"
50 #define MAX_WIDTH 65535
52 #define MAX_HEIGHT 65535
54 #define CINFO_GET_JPEGDEC(cinfo_ptr) \
55 (((struct GstJpegDecSourceMgr*)((cinfo_ptr)->src))->dec)
57 #define JPEG_DEFAULT_IDCT_METHOD JDCT_FASTEST
58 #define JPEG_DEFAULT_MAX_ERRORS 0
68 static GstStaticPadTemplate gst_jpeg_dec_src_pad_template =
69 GST_STATIC_PAD_TEMPLATE ("src",
72 GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE
73 ("{ I420, RGB, BGR, RGBx, xRGB, BGRx, xBGR, GRAY8 }"))
77 /* FIXME: sof-marker is for IJG libjpeg 8, should be different for 6.2 */
78 /* FIXME: add back "sof-marker = (int) { 0, 1, 2, 5, 6, 7, 9, 10, 13, 14 }"
79 * once we have a parser and/or demuxer set caps properly */
80 static GstStaticPadTemplate gst_jpeg_dec_sink_pad_template =
81 GST_STATIC_PAD_TEMPLATE ("sink",
84 GST_STATIC_CAPS ("image/jpeg, "
85 "width = (int) [ " G_STRINGIFY (MIN_WIDTH) ", " G_STRINGIFY (MAX_WIDTH)
86 " ], " "height = (int) [ " G_STRINGIFY (MIN_HEIGHT) ", "
87 G_STRINGIFY (MAX_HEIGHT) " ] ")
90 GST_DEBUG_CATEGORY_STATIC (jpeg_dec_debug);
91 #define GST_CAT_DEFAULT jpeg_dec_debug
92 GST_DEBUG_CATEGORY_STATIC (GST_CAT_PERFORMANCE);
94 static void gst_jpeg_dec_set_property (GObject * object, guint prop_id,
95 const GValue * value, GParamSpec * pspec);
96 static void gst_jpeg_dec_get_property (GObject * object, guint prop_id,
97 GValue * value, GParamSpec * pspec);
99 static gboolean gst_jpeg_dec_set_format (GstVideoDecoder * dec,
100 GstVideoCodecState * state);
101 static gboolean gst_jpeg_dec_start (GstVideoDecoder * bdec);
102 static gboolean gst_jpeg_dec_stop (GstVideoDecoder * bdec);
103 static gboolean gst_jpeg_dec_flush (GstVideoDecoder * bdec);
104 static GstFlowReturn gst_jpeg_dec_parse (GstVideoDecoder * bdec,
105 GstVideoCodecFrame * frame, GstAdapter * adapter, gboolean at_eos);
106 static GstFlowReturn gst_jpeg_dec_handle_frame (GstVideoDecoder * bdec,
107 GstVideoCodecFrame * frame);
108 static gboolean gst_jpeg_dec_decide_allocation (GstVideoDecoder * bdec,
111 #define gst_jpeg_dec_parent_class parent_class
112 G_DEFINE_TYPE (GstJpegDec, gst_jpeg_dec, GST_TYPE_VIDEO_DECODER);
115 gst_jpeg_dec_finalize (GObject * object)
117 GstJpegDec *dec = GST_JPEG_DEC (object);
119 jpeg_destroy_decompress (&dec->cinfo);
120 if (dec->input_state)
121 gst_video_codec_state_unref (dec->input_state);
123 G_OBJECT_CLASS (parent_class)->finalize (object);
127 gst_jpeg_dec_class_init (GstJpegDecClass * klass)
129 GObjectClass *gobject_class;
130 GstElementClass *element_class;
131 GstVideoDecoderClass *vdec_class;
133 gobject_class = (GObjectClass *) klass;
134 element_class = (GstElementClass *) klass;
135 vdec_class = (GstVideoDecoderClass *) klass;
137 parent_class = g_type_class_peek_parent (klass);
139 gobject_class->finalize = gst_jpeg_dec_finalize;
140 gobject_class->set_property = gst_jpeg_dec_set_property;
141 gobject_class->get_property = gst_jpeg_dec_get_property;
143 g_object_class_install_property (gobject_class, PROP_IDCT_METHOD,
144 g_param_spec_enum ("idct-method", "IDCT Method",
145 "The IDCT algorithm to use", GST_TYPE_IDCT_METHOD,
146 JPEG_DEFAULT_IDCT_METHOD,
147 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
150 * GstJpegDec:max-errors
152 * Error out after receiving N consecutive decoding errors
153 * (-1 = never error out, 0 = automatic, 1 = fail on first error, etc.)
157 g_object_class_install_property (gobject_class, PROP_MAX_ERRORS,
158 g_param_spec_int ("max-errors", "Maximum Consecutive Decoding Errors",
159 "Error out after receiving N consecutive decoding errors "
160 "(-1 = never fail, 0 = automatic, 1 = fail on first error)",
161 -1, G_MAXINT, JPEG_DEFAULT_MAX_ERRORS,
162 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
164 gst_element_class_add_pad_template (element_class,
165 gst_static_pad_template_get (&gst_jpeg_dec_src_pad_template));
166 gst_element_class_add_pad_template (element_class,
167 gst_static_pad_template_get (&gst_jpeg_dec_sink_pad_template));
168 gst_element_class_set_static_metadata (element_class, "JPEG image decoder",
169 "Codec/Decoder/Image",
170 "Decode images from JPEG format", "Wim Taymans <wim@fluendo.com>");
172 vdec_class->start = gst_jpeg_dec_start;
173 vdec_class->stop = gst_jpeg_dec_stop;
174 vdec_class->flush = gst_jpeg_dec_flush;
175 vdec_class->parse = gst_jpeg_dec_parse;
176 vdec_class->set_format = gst_jpeg_dec_set_format;
177 vdec_class->handle_frame = gst_jpeg_dec_handle_frame;
178 vdec_class->decide_allocation = gst_jpeg_dec_decide_allocation;
180 GST_DEBUG_CATEGORY_INIT (jpeg_dec_debug, "jpegdec", 0, "JPEG decoder");
181 GST_DEBUG_CATEGORY_GET (GST_CAT_PERFORMANCE, "GST_PERFORMANCE");
185 gst_jpeg_dec_fill_input_buffer (j_decompress_ptr cinfo)
189 dec = CINFO_GET_JPEGDEC (cinfo);
190 g_return_val_if_fail (dec != NULL, FALSE);
191 g_return_val_if_fail (dec->current_frame != NULL, FALSE);
192 g_return_val_if_fail (dec->current_frame_map.data != NULL, FALSE);
194 cinfo->src->next_input_byte = dec->current_frame_map.data;
195 cinfo->src->bytes_in_buffer = dec->current_frame_map.size;
201 gst_jpeg_dec_init_source (j_decompress_ptr cinfo)
203 GST_LOG_OBJECT (CINFO_GET_JPEGDEC (cinfo), "init_source");
208 gst_jpeg_dec_skip_input_data (j_decompress_ptr cinfo, glong num_bytes)
210 GstJpegDec *dec = CINFO_GET_JPEGDEC (cinfo);
212 GST_DEBUG_OBJECT (dec, "skip %ld bytes", num_bytes);
214 if (num_bytes > 0 && cinfo->src->bytes_in_buffer >= num_bytes) {
215 cinfo->src->next_input_byte += (size_t) num_bytes;
216 cinfo->src->bytes_in_buffer -= (size_t) num_bytes;
221 gst_jpeg_dec_resync_to_restart (j_decompress_ptr cinfo, gint desired)
223 GST_LOG_OBJECT (CINFO_GET_JPEGDEC (cinfo), "resync_to_start");
228 gst_jpeg_dec_term_source (j_decompress_ptr cinfo)
230 GST_LOG_OBJECT (CINFO_GET_JPEGDEC (cinfo), "term_source");
235 gst_jpeg_dec_my_output_message (j_common_ptr cinfo)
237 return; /* do nothing */
241 gst_jpeg_dec_my_emit_message (j_common_ptr cinfo, int msg_level)
243 /* GST_LOG_OBJECT (CINFO_GET_JPEGDEC (&cinfo), "msg_level=%d", msg_level); */
248 gst_jpeg_dec_my_error_exit (j_common_ptr cinfo)
250 struct GstJpegDecErrorMgr *err_mgr = (struct GstJpegDecErrorMgr *) cinfo->err;
252 (*cinfo->err->output_message) (cinfo);
253 longjmp (err_mgr->setjmp_buffer, 1);
257 gst_jpeg_dec_init (GstJpegDec * dec)
259 GST_DEBUG ("initializing");
262 memset (&dec->cinfo, 0, sizeof (dec->cinfo));
263 memset (&dec->jerr, 0, sizeof (dec->jerr));
264 dec->cinfo.err = jpeg_std_error (&dec->jerr.pub);
265 dec->jerr.pub.output_message = gst_jpeg_dec_my_output_message;
266 dec->jerr.pub.emit_message = gst_jpeg_dec_my_emit_message;
267 dec->jerr.pub.error_exit = gst_jpeg_dec_my_error_exit;
269 jpeg_create_decompress (&dec->cinfo);
271 dec->cinfo.src = (struct jpeg_source_mgr *) &dec->jsrc;
272 dec->cinfo.src->init_source = gst_jpeg_dec_init_source;
273 dec->cinfo.src->fill_input_buffer = gst_jpeg_dec_fill_input_buffer;
274 dec->cinfo.src->skip_input_data = gst_jpeg_dec_skip_input_data;
275 dec->cinfo.src->resync_to_restart = gst_jpeg_dec_resync_to_restart;
276 dec->cinfo.src->term_source = gst_jpeg_dec_term_source;
279 /* init properties */
280 dec->idct_method = JPEG_DEFAULT_IDCT_METHOD;
281 dec->max_errors = JPEG_DEFAULT_MAX_ERRORS;
284 static inline gboolean
285 gst_jpeg_dec_parse_tag_has_entropy_segment (guint8 tag)
287 if (tag == 0xda || (tag >= 0xd0 && tag <= 0xd7))
293 gst_jpeg_dec_parse (GstVideoDecoder * bdec, GstVideoCodecFrame * frame,
294 GstAdapter * adapter, gboolean at_eos)
299 gint offset = 0, noffset;
300 GstJpegDec *dec = (GstJpegDec *) bdec;
302 /* FIXME : The overhead of using scan_uint32 is massive */
304 size = gst_adapter_available (adapter);
305 GST_DEBUG ("Parsing jpeg image data (%u bytes)", size);
308 GST_DEBUG ("Flushing all data out");
311 /* If we have leftover data, throw it away */
312 if (!dec->saw_header)
314 goto have_full_frame;
320 if (!dec->saw_header) {
322 /* we expect at least 4 bytes, first of which start marker */
324 gst_adapter_masked_scan_uint32 (adapter, 0xffff0000, 0xffd80000, 0,
327 GST_DEBUG ("ret:%d", ret);
332 gst_adapter_flush (adapter, ret);
335 dec->saw_header = TRUE;
342 GST_DEBUG ("offset:%d, size:%d", offset, size);
345 gst_adapter_masked_scan_uint32_peek (adapter, 0x0000ff00, 0x0000ff00,
346 offset, size - offset, &value);
348 /* lost sync if 0xff marker not where expected */
349 if ((resync = (noffset != offset))) {
350 GST_DEBUG ("Lost sync at 0x%08x, resyncing", offset + 2);
352 /* may have marker, but could have been resyncng */
353 resync = resync || dec->parse_resync;
354 /* Skip over extra 0xff */
355 while ((noffset >= 0) && ((value & 0xff) == 0xff)) {
358 gst_adapter_masked_scan_uint32_peek (adapter, 0x0000ff00, 0x0000ff00,
359 noffset, size - noffset, &value);
361 /* enough bytes left for marker? (we need 0xNN after the 0xff) */
363 GST_DEBUG ("at end of input and no EOI marker found, need more data");
367 /* now lock on the marker we found */
369 value = value & 0xff;
371 GST_DEBUG ("0x%08x: EOI marker", offset + 2);
372 /* clear parse state */
373 dec->saw_header = FALSE;
374 dec->parse_resync = FALSE;
376 goto have_full_frame;
379 /* Skip this frame if we found another SOI marker */
380 GST_DEBUG ("0x%08x: SOI marker before EOI, skipping", offset + 2);
381 dec->parse_resync = FALSE;
382 /* FIXME : Need to skip data */
384 goto have_full_frame;
388 if (value >= 0xd0 && value <= 0xd7)
391 /* peek tag and subsequent length */
392 if (offset + 2 + 4 > size)
395 gst_adapter_masked_scan_uint32_peek (adapter, 0x0, 0x0, offset + 2, 4,
397 frame_len = frame_len & 0xffff;
399 GST_DEBUG ("0x%08x: tag %02x, frame_len=%u", offset + 2, value, frame_len);
400 /* the frame length includes the 2 bytes for the length; here we want at
401 * least 2 more bytes at the end for an end marker */
402 if (offset + 2 + 2 + frame_len + 2 > size) {
406 if (gst_jpeg_dec_parse_tag_has_entropy_segment (value)) {
407 guint eseglen = dec->parse_entropy_len;
409 GST_DEBUG ("0x%08x: finding entropy segment length (eseglen:%d)",
410 offset + 2, eseglen);
411 if (size < offset + 2 + frame_len + eseglen)
413 noffset = offset + 2 + frame_len + dec->parse_entropy_len;
415 GST_DEBUG ("noffset:%d, size:%d, size - noffset:%d",
416 noffset, size, size - noffset);
417 noffset = gst_adapter_masked_scan_uint32_peek (adapter, 0x0000ff00,
418 0x0000ff00, noffset, size - noffset, &value);
421 dec->parse_entropy_len = size - offset - 4 - frame_len - 2;
424 if ((value & 0xff) != 0x00) {
425 eseglen = noffset - offset - frame_len - 2;
430 dec->parse_entropy_len = 0;
431 frame_len += eseglen;
432 GST_DEBUG ("entropy segment length=%u => frame_len=%u", eseglen,
436 /* check if we will still be in sync if we interpret
437 * this as a sync point and skip this frame */
438 noffset = offset + frame_len + 2;
439 noffset = gst_adapter_masked_scan_uint32 (adapter, 0x0000ff00, 0x0000ff00,
442 /* ignore and continue resyncing until we hit the end
443 * of our data or find a sync point that looks okay */
447 GST_DEBUG ("found sync at 0x%x", offset + 2);
450 /* Add current data to output buffer */
451 toadd += frame_len + 2;
452 offset += frame_len + 2;
457 gst_video_decoder_add_to_frame (bdec, toadd);
458 return GST_VIDEO_DECODER_FLOW_NEED_DATA;
462 gst_video_decoder_add_to_frame (bdec, toadd);
463 return gst_video_decoder_have_frame (bdec);
466 gst_adapter_flush (adapter, size);
471 /* shamelessly ripped from jpegutils.c in mjpegtools */
473 add_huff_table (j_decompress_ptr dinfo,
474 JHUFF_TBL ** htblptr, const UINT8 * bits, const UINT8 * val)
475 /* Define a Huffman table */
479 if (*htblptr == NULL)
480 *htblptr = jpeg_alloc_huff_table ((j_common_ptr) dinfo);
484 /* Copy the number-of-symbols-of-each-code-length counts */
485 memcpy ((*htblptr)->bits, bits, sizeof ((*htblptr)->bits));
487 /* Validate the counts. We do this here mainly so we can copy the right
488 * number of symbols from the val[] array, without risking marching off
489 * the end of memory. jchuff.c will do a more thorough test later.
492 for (len = 1; len <= 16; len++)
493 nsymbols += bits[len];
494 if (nsymbols < 1 || nsymbols > 256)
495 g_error ("jpegutils.c: add_huff_table failed badly. ");
497 memcpy ((*htblptr)->huffval, val, nsymbols * sizeof (UINT8));
503 std_huff_tables (j_decompress_ptr dinfo)
504 /* Set up the standard Huffman tables (cf. JPEG standard section K.3) */
505 /* IMPORTANT: these are only valid for 8-bit data precision! */
507 static const UINT8 bits_dc_luminance[17] =
508 { /* 0-base */ 0, 0, 1, 5, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0 };
509 static const UINT8 val_dc_luminance[] =
510 { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11 };
512 static const UINT8 bits_dc_chrominance[17] =
513 { /* 0-base */ 0, 0, 3, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0 };
514 static const UINT8 val_dc_chrominance[] =
515 { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11 };
517 static const UINT8 bits_ac_luminance[17] =
518 { /* 0-base */ 0, 0, 2, 1, 3, 3, 2, 4, 3, 5, 5, 4, 4, 0, 0, 1, 0x7d };
519 static const UINT8 val_ac_luminance[] =
520 { 0x01, 0x02, 0x03, 0x00, 0x04, 0x11, 0x05, 0x12,
521 0x21, 0x31, 0x41, 0x06, 0x13, 0x51, 0x61, 0x07,
522 0x22, 0x71, 0x14, 0x32, 0x81, 0x91, 0xa1, 0x08,
523 0x23, 0x42, 0xb1, 0xc1, 0x15, 0x52, 0xd1, 0xf0,
524 0x24, 0x33, 0x62, 0x72, 0x82, 0x09, 0x0a, 0x16,
525 0x17, 0x18, 0x19, 0x1a, 0x25, 0x26, 0x27, 0x28,
526 0x29, 0x2a, 0x34, 0x35, 0x36, 0x37, 0x38, 0x39,
527 0x3a, 0x43, 0x44, 0x45, 0x46, 0x47, 0x48, 0x49,
528 0x4a, 0x53, 0x54, 0x55, 0x56, 0x57, 0x58, 0x59,
529 0x5a, 0x63, 0x64, 0x65, 0x66, 0x67, 0x68, 0x69,
530 0x6a, 0x73, 0x74, 0x75, 0x76, 0x77, 0x78, 0x79,
531 0x7a, 0x83, 0x84, 0x85, 0x86, 0x87, 0x88, 0x89,
532 0x8a, 0x92, 0x93, 0x94, 0x95, 0x96, 0x97, 0x98,
533 0x99, 0x9a, 0xa2, 0xa3, 0xa4, 0xa5, 0xa6, 0xa7,
534 0xa8, 0xa9, 0xaa, 0xb2, 0xb3, 0xb4, 0xb5, 0xb6,
535 0xb7, 0xb8, 0xb9, 0xba, 0xc2, 0xc3, 0xc4, 0xc5,
536 0xc6, 0xc7, 0xc8, 0xc9, 0xca, 0xd2, 0xd3, 0xd4,
537 0xd5, 0xd6, 0xd7, 0xd8, 0xd9, 0xda, 0xe1, 0xe2,
538 0xe3, 0xe4, 0xe5, 0xe6, 0xe7, 0xe8, 0xe9, 0xea,
539 0xf1, 0xf2, 0xf3, 0xf4, 0xf5, 0xf6, 0xf7, 0xf8,
543 static const UINT8 bits_ac_chrominance[17] =
544 { /* 0-base */ 0, 0, 2, 1, 2, 4, 4, 3, 4, 7, 5, 4, 4, 0, 1, 2, 0x77 };
545 static const UINT8 val_ac_chrominance[] =
546 { 0x00, 0x01, 0x02, 0x03, 0x11, 0x04, 0x05, 0x21,
547 0x31, 0x06, 0x12, 0x41, 0x51, 0x07, 0x61, 0x71,
548 0x13, 0x22, 0x32, 0x81, 0x08, 0x14, 0x42, 0x91,
549 0xa1, 0xb1, 0xc1, 0x09, 0x23, 0x33, 0x52, 0xf0,
550 0x15, 0x62, 0x72, 0xd1, 0x0a, 0x16, 0x24, 0x34,
551 0xe1, 0x25, 0xf1, 0x17, 0x18, 0x19, 0x1a, 0x26,
552 0x27, 0x28, 0x29, 0x2a, 0x35, 0x36, 0x37, 0x38,
553 0x39, 0x3a, 0x43, 0x44, 0x45, 0x46, 0x47, 0x48,
554 0x49, 0x4a, 0x53, 0x54, 0x55, 0x56, 0x57, 0x58,
555 0x59, 0x5a, 0x63, 0x64, 0x65, 0x66, 0x67, 0x68,
556 0x69, 0x6a, 0x73, 0x74, 0x75, 0x76, 0x77, 0x78,
557 0x79, 0x7a, 0x82, 0x83, 0x84, 0x85, 0x86, 0x87,
558 0x88, 0x89, 0x8a, 0x92, 0x93, 0x94, 0x95, 0x96,
559 0x97, 0x98, 0x99, 0x9a, 0xa2, 0xa3, 0xa4, 0xa5,
560 0xa6, 0xa7, 0xa8, 0xa9, 0xaa, 0xb2, 0xb3, 0xb4,
561 0xb5, 0xb6, 0xb7, 0xb8, 0xb9, 0xba, 0xc2, 0xc3,
562 0xc4, 0xc5, 0xc6, 0xc7, 0xc8, 0xc9, 0xca, 0xd2,
563 0xd3, 0xd4, 0xd5, 0xd6, 0xd7, 0xd8, 0xd9, 0xda,
564 0xe2, 0xe3, 0xe4, 0xe5, 0xe6, 0xe7, 0xe8, 0xe9,
565 0xea, 0xf2, 0xf3, 0xf4, 0xf5, 0xf6, 0xf7, 0xf8,
569 add_huff_table (dinfo, &dinfo->dc_huff_tbl_ptrs[0],
570 bits_dc_luminance, val_dc_luminance);
571 add_huff_table (dinfo, &dinfo->ac_huff_tbl_ptrs[0],
572 bits_ac_luminance, val_ac_luminance);
573 add_huff_table (dinfo, &dinfo->dc_huff_tbl_ptrs[1],
574 bits_dc_chrominance, val_dc_chrominance);
575 add_huff_table (dinfo, &dinfo->ac_huff_tbl_ptrs[1],
576 bits_ac_chrominance, val_ac_chrominance);
582 guarantee_huff_tables (j_decompress_ptr dinfo)
584 if ((dinfo->dc_huff_tbl_ptrs[0] == NULL) &&
585 (dinfo->dc_huff_tbl_ptrs[1] == NULL) &&
586 (dinfo->ac_huff_tbl_ptrs[0] == NULL) &&
587 (dinfo->ac_huff_tbl_ptrs[1] == NULL)) {
588 GST_DEBUG ("Generating standard Huffman tables for this frame.");
589 std_huff_tables (dinfo);
594 gst_jpeg_dec_set_format (GstVideoDecoder * dec, GstVideoCodecState * state)
596 GstJpegDec *jpeg = GST_JPEG_DEC (dec);
597 GstVideoInfo *info = &state->info;
599 /* FIXME : previously jpegdec would handled input as packetized
600 * if the framerate was present. Here we consider it packetized if
601 * the fps is != 1/1 */
602 if (GST_VIDEO_INFO_FPS_N (info) != 1 && GST_VIDEO_INFO_FPS_D (info) != 1)
603 gst_video_decoder_set_packetized (dec, TRUE);
605 gst_video_decoder_set_packetized (dec, FALSE);
607 if (jpeg->input_state)
608 gst_video_codec_state_unref (jpeg->input_state);
609 jpeg->input_state = gst_video_codec_state_ref (state);
617 hresamplecpy1 (guint8 * dest, const guint8 * src, guint len)
621 for (i = 0; i < len; ++i) {
622 /* equivalent to: dest[i] = src[i << 1] */
631 gst_jpeg_dec_free_buffers (GstJpegDec * dec)
635 for (i = 0; i < 16; i++) {
636 g_free (dec->idr_y[i]);
637 g_free (dec->idr_u[i]);
638 g_free (dec->idr_v[i]);
639 dec->idr_y[i] = NULL;
640 dec->idr_u[i] = NULL;
641 dec->idr_v[i] = NULL;
644 dec->idr_width_allocated = 0;
647 static inline gboolean
648 gst_jpeg_dec_ensure_buffers (GstJpegDec * dec, guint maxrowbytes)
652 if (G_LIKELY (dec->idr_width_allocated == maxrowbytes))
655 /* FIXME: maybe just alloc one or three blocks altogether? */
656 for (i = 0; i < 16; i++) {
657 dec->idr_y[i] = g_try_realloc (dec->idr_y[i], maxrowbytes);
658 dec->idr_u[i] = g_try_realloc (dec->idr_u[i], maxrowbytes);
659 dec->idr_v[i] = g_try_realloc (dec->idr_v[i], maxrowbytes);
661 if (G_UNLIKELY (!dec->idr_y[i] || !dec->idr_u[i] || !dec->idr_v[i])) {
662 GST_WARNING_OBJECT (dec, "out of memory, i=%d, bytes=%u", i, maxrowbytes);
667 dec->idr_width_allocated = maxrowbytes;
668 GST_LOG_OBJECT (dec, "allocated temp memory, %u bytes/row", maxrowbytes);
673 gst_jpeg_dec_decode_grayscale (GstJpegDec * dec, GstVideoFrame * frame)
676 guchar **scanarray[1] = { rows };
681 gint pstride, rstride;
683 GST_DEBUG_OBJECT (dec, "indirect decoding of grayscale");
685 width = GST_VIDEO_FRAME_WIDTH (frame);
686 height = GST_VIDEO_FRAME_HEIGHT (frame);
688 if (G_UNLIKELY (!gst_jpeg_dec_ensure_buffers (dec, GST_ROUND_UP_32 (width))))
691 base[0] = GST_VIDEO_FRAME_COMP_DATA (frame, 0);
692 pstride = GST_VIDEO_FRAME_COMP_PSTRIDE (frame, 0);
693 rstride = GST_VIDEO_FRAME_COMP_STRIDE (frame, 0);
695 memcpy (rows, dec->idr_y, 16 * sizeof (gpointer));
699 lines = jpeg_read_raw_data (&dec->cinfo, scanarray, DCTSIZE);
700 if (G_LIKELY (lines > 0)) {
701 for (j = 0; (j < DCTSIZE) && (i < height); j++, i++) {
705 for (k = 0; k < width; k++) {
706 base[0][p] = rows[j][k];
712 GST_INFO_OBJECT (dec, "jpeg_read_raw_data() returned 0");
718 gst_jpeg_dec_decode_rgb (GstJpegDec * dec, GstVideoFrame * frame)
720 guchar *r_rows[16], *g_rows[16], *b_rows[16];
721 guchar **scanarray[3] = { r_rows, g_rows, b_rows };
725 guint pstride, rstride;
728 GST_DEBUG_OBJECT (dec, "indirect decoding of RGB");
730 width = GST_VIDEO_FRAME_WIDTH (frame);
731 height = GST_VIDEO_FRAME_HEIGHT (frame);
733 if (G_UNLIKELY (!gst_jpeg_dec_ensure_buffers (dec, GST_ROUND_UP_32 (width))))
736 for (i = 0; i < 3; i++)
737 base[i] = GST_VIDEO_FRAME_COMP_DATA (frame, i);
739 pstride = GST_VIDEO_FRAME_COMP_PSTRIDE (frame, 0);
740 rstride = GST_VIDEO_FRAME_COMP_STRIDE (frame, 0);
742 memcpy (r_rows, dec->idr_y, 16 * sizeof (gpointer));
743 memcpy (g_rows, dec->idr_u, 16 * sizeof (gpointer));
744 memcpy (b_rows, dec->idr_v, 16 * sizeof (gpointer));
748 lines = jpeg_read_raw_data (&dec->cinfo, scanarray, DCTSIZE);
749 if (G_LIKELY (lines > 0)) {
750 for (j = 0; (j < DCTSIZE) && (i < height); j++, i++) {
754 for (k = 0; k < width; k++) {
755 base[0][p] = r_rows[j][k];
756 base[1][p] = g_rows[j][k];
757 base[2][p] = b_rows[j][k];
765 GST_INFO_OBJECT (dec, "jpeg_read_raw_data() returned 0");
771 gst_jpeg_dec_decode_indirect (GstJpegDec * dec, GstVideoFrame * frame, gint r_v,
774 guchar *y_rows[16], *u_rows[16], *v_rows[16];
775 guchar **scanarray[3] = { y_rows, u_rows, v_rows };
778 guchar *base[3], *last[3];
782 GST_DEBUG_OBJECT (dec,
783 "unadvantageous width or r_h, taking slow route involving memcpy");
785 width = GST_VIDEO_FRAME_WIDTH (frame);
786 height = GST_VIDEO_FRAME_HEIGHT (frame);
788 if (G_UNLIKELY (!gst_jpeg_dec_ensure_buffers (dec, GST_ROUND_UP_32 (width))))
791 for (i = 0; i < 3; i++) {
792 base[i] = GST_VIDEO_FRAME_COMP_DATA (frame, i);
793 stride[i] = GST_VIDEO_FRAME_COMP_STRIDE (frame, i);
794 /* make sure we don't make jpeglib write beyond our buffer,
795 * which might happen if (height % (r_v*DCTSIZE)) != 0 */
796 last[i] = base[i] + (GST_VIDEO_FRAME_COMP_STRIDE (frame, i) *
797 (GST_VIDEO_FRAME_COMP_HEIGHT (frame, i) - 1));
800 memcpy (y_rows, dec->idr_y, 16 * sizeof (gpointer));
801 memcpy (u_rows, dec->idr_u, 16 * sizeof (gpointer));
802 memcpy (v_rows, dec->idr_v, 16 * sizeof (gpointer));
804 /* fill chroma components for grayscale */
806 GST_DEBUG_OBJECT (dec, "grayscale, filling chroma");
807 for (i = 0; i < 16; i++) {
808 memset (u_rows[i], GST_ROUND_UP_32 (width), 0x80);
809 memset (v_rows[i], GST_ROUND_UP_32 (width), 0x80);
813 for (i = 0; i < height; i += r_v * DCTSIZE) {
814 lines = jpeg_read_raw_data (&dec->cinfo, scanarray, r_v * DCTSIZE);
815 if (G_LIKELY (lines > 0)) {
816 for (j = 0, k = 0; j < (r_v * DCTSIZE); j += r_v, k++) {
817 if (G_LIKELY (base[0] <= last[0])) {
818 memcpy (base[0], y_rows[j], stride[0]);
819 base[0] += stride[0];
822 if (G_LIKELY (base[0] <= last[0])) {
823 memcpy (base[0], y_rows[j + 1], stride[0]);
824 base[0] += stride[0];
827 if (G_LIKELY (base[1] <= last[1] && base[2] <= last[2])) {
829 memcpy (base[1], u_rows[k], stride[1]);
830 memcpy (base[2], v_rows[k], stride[2]);
831 } else if (r_h == 1) {
832 hresamplecpy1 (base[1], u_rows[k], stride[1]);
833 hresamplecpy1 (base[2], v_rows[k], stride[2]);
835 /* FIXME: implement (at least we avoid crashing by doing nothing) */
839 if (r_v == 2 || (k & 1) != 0) {
840 base[1] += stride[1];
841 base[2] += stride[2];
845 GST_INFO_OBJECT (dec, "jpeg_read_raw_data() returned 0");
851 gst_jpeg_dec_decode_direct (GstJpegDec * dec, GstVideoFrame * frame)
853 guchar **line[3]; /* the jpeg line buffer */
854 guchar *y[4 * DCTSIZE] = { NULL, }; /* alloc enough for the lines */
855 guchar *u[4 * DCTSIZE] = { NULL, }; /* r_v will be <4 */
856 guchar *v[4 * DCTSIZE] = { NULL, };
858 gint lines, v_samp[3];
859 guchar *base[3], *last[3];
867 v_samp[0] = dec->cinfo.comp_info[0].v_samp_factor;
868 v_samp[1] = dec->cinfo.comp_info[1].v_samp_factor;
869 v_samp[2] = dec->cinfo.comp_info[2].v_samp_factor;
871 if (G_UNLIKELY (v_samp[0] > 2 || v_samp[1] > 2 || v_samp[2] > 2))
872 goto format_not_supported;
874 height = GST_VIDEO_FRAME_HEIGHT (frame);
876 for (i = 0; i < 3; i++) {
877 base[i] = GST_VIDEO_FRAME_COMP_DATA (frame, i);
878 stride[i] = GST_VIDEO_FRAME_COMP_STRIDE (frame, i);
879 /* make sure we don't make jpeglib write beyond our buffer,
880 * which might happen if (height % (r_v*DCTSIZE)) != 0 */
881 last[i] = base[i] + (GST_VIDEO_FRAME_COMP_STRIDE (frame, i) *
882 (GST_VIDEO_FRAME_COMP_HEIGHT (frame, i) - 1));
885 /* let jpeglib decode directly into our final buffer */
886 GST_DEBUG_OBJECT (dec, "decoding directly into output buffer");
888 for (i = 0; i < height; i += v_samp[0] * DCTSIZE) {
889 for (j = 0; j < (v_samp[0] * DCTSIZE); ++j) {
891 line[0][j] = base[0] + (i + j) * stride[0];
892 if (G_UNLIKELY (line[0][j] > last[0]))
893 line[0][j] = last[0];
895 if (v_samp[1] == v_samp[0]) {
896 line[1][j] = base[1] + ((i + j) / 2) * stride[1];
897 } else if (j < (v_samp[1] * DCTSIZE)) {
898 line[1][j] = base[1] + ((i / 2) + j) * stride[1];
900 if (G_UNLIKELY (line[1][j] > last[1]))
901 line[1][j] = last[1];
903 if (v_samp[2] == v_samp[0]) {
904 line[2][j] = base[2] + ((i + j) / 2) * stride[2];
905 } else if (j < (v_samp[2] * DCTSIZE)) {
906 line[2][j] = base[2] + ((i / 2) + j) * stride[2];
908 if (G_UNLIKELY (line[2][j] > last[2]))
909 line[2][j] = last[2];
912 lines = jpeg_read_raw_data (&dec->cinfo, line, v_samp[0] * DCTSIZE);
913 if (G_UNLIKELY (!lines)) {
914 GST_INFO_OBJECT (dec, "jpeg_read_raw_data() returned 0");
919 format_not_supported:
921 gboolean ret = GST_FLOW_OK;
923 GST_VIDEO_DECODER_ERROR (dec, 1, STREAM, DECODE,
924 (_("Failed to decode JPEG image")),
925 ("Unsupported subsampling schema: v_samp factors: %u %u %u", v_samp[0],
926 v_samp[1], v_samp[2]), ret);
933 gst_jpeg_dec_negotiate (GstJpegDec * dec, gint width, gint height, gint clrspc)
935 GstVideoCodecState *outstate;
937 GstVideoFormat format;
941 format = GST_VIDEO_FORMAT_RGB;
944 format = GST_VIDEO_FORMAT_GRAY8;
947 format = GST_VIDEO_FORMAT_I420;
951 /* Compare to currently configured output state */
952 outstate = gst_video_decoder_get_output_state (GST_VIDEO_DECODER (dec));
954 info = &outstate->info;
956 if (width == GST_VIDEO_INFO_WIDTH (info) &&
957 height == GST_VIDEO_INFO_HEIGHT (info) &&
958 format == GST_VIDEO_INFO_FORMAT (info)) {
959 gst_video_codec_state_unref (outstate);
962 gst_video_codec_state_unref (outstate);
966 gst_video_decoder_set_output_state (GST_VIDEO_DECODER (dec), format,
967 width, height, dec->input_state);
974 outstate->info.colorimetry.range = GST_VIDEO_COLOR_RANGE_0_255;
975 outstate->info.colorimetry.matrix = GST_VIDEO_COLOR_MATRIX_BT601;
976 outstate->info.colorimetry.transfer = GST_VIDEO_TRANSFER_UNKNOWN;
977 outstate->info.colorimetry.primaries = GST_VIDEO_COLOR_PRIMARIES_UNKNOWN;
981 gst_video_codec_state_unref (outstate);
983 gst_video_decoder_negotiate (GST_VIDEO_DECODER (dec));
985 GST_DEBUG_OBJECT (dec, "max_v_samp_factor=%d", dec->cinfo.max_v_samp_factor);
986 GST_DEBUG_OBJECT (dec, "max_h_samp_factor=%d", dec->cinfo.max_h_samp_factor);
990 gst_jpeg_dec_handle_frame (GstVideoDecoder * bdec, GstVideoCodecFrame * frame)
992 GstFlowReturn ret = GST_FLOW_OK;
993 GstJpegDec *dec = (GstJpegDec *) bdec;
994 GstVideoFrame vframe;
998 gboolean need_unmap = TRUE;
999 GstVideoCodecState *state = NULL;
1001 dec->current_frame = frame;
1002 gst_buffer_map (frame->input_buffer, &dec->current_frame_map, GST_MAP_READ);
1003 gst_jpeg_dec_fill_input_buffer (&dec->cinfo);
1005 if (setjmp (dec->jerr.setjmp_buffer)) {
1006 code = dec->jerr.pub.msg_code;
1008 if (code == JERR_INPUT_EOF) {
1009 GST_DEBUG ("jpeg input EOF error, we probably need more data");
1010 goto need_more_data;
1016 hdr_ok = jpeg_read_header (&dec->cinfo, TRUE);
1017 if (G_UNLIKELY (hdr_ok != JPEG_HEADER_OK)) {
1018 GST_WARNING_OBJECT (dec, "reading the header failed, %d", hdr_ok);
1021 GST_LOG_OBJECT (dec, "num_components=%d", dec->cinfo.num_components);
1022 GST_LOG_OBJECT (dec, "jpeg_color_space=%d", dec->cinfo.jpeg_color_space);
1024 if (!dec->cinfo.num_components || !dec->cinfo.comp_info)
1025 goto components_not_supported;
1027 r_h = dec->cinfo.comp_info[0].h_samp_factor;
1028 r_v = dec->cinfo.comp_info[0].v_samp_factor;
1030 GST_LOG_OBJECT (dec, "r_h = %d, r_v = %d", r_h, r_v);
1032 if (dec->cinfo.num_components > 3)
1033 goto components_not_supported;
1035 /* verify color space expectation to avoid going *boom* or bogus output */
1036 if (dec->cinfo.jpeg_color_space != JCS_YCbCr &&
1037 dec->cinfo.jpeg_color_space != JCS_GRAYSCALE &&
1038 dec->cinfo.jpeg_color_space != JCS_RGB)
1039 goto unsupported_colorspace;
1041 #ifndef GST_DISABLE_GST_DEBUG
1045 for (i = 0; i < dec->cinfo.num_components; ++i) {
1046 GST_LOG_OBJECT (dec, "[%d] h_samp_factor=%d, v_samp_factor=%d, cid=%d",
1047 i, dec->cinfo.comp_info[i].h_samp_factor,
1048 dec->cinfo.comp_info[i].v_samp_factor,
1049 dec->cinfo.comp_info[i].component_id);
1054 /* prepare for raw output */
1055 dec->cinfo.do_fancy_upsampling = FALSE;
1056 dec->cinfo.do_block_smoothing = FALSE;
1057 dec->cinfo.out_color_space = dec->cinfo.jpeg_color_space;
1058 dec->cinfo.dct_method = dec->idct_method;
1059 dec->cinfo.raw_data_out = TRUE;
1061 GST_LOG_OBJECT (dec, "starting decompress");
1062 guarantee_huff_tables (&dec->cinfo);
1063 if (!jpeg_start_decompress (&dec->cinfo)) {
1064 GST_WARNING_OBJECT (dec, "failed to start decompression cycle");
1067 /* sanity checks to get safe and reasonable output */
1068 switch (dec->cinfo.jpeg_color_space) {
1070 if (dec->cinfo.num_components != 1)
1071 goto invalid_yuvrgbgrayscale;
1074 if (dec->cinfo.num_components != 3 || dec->cinfo.max_v_samp_factor > 1 ||
1075 dec->cinfo.max_h_samp_factor > 1)
1076 goto invalid_yuvrgbgrayscale;
1079 if (dec->cinfo.num_components != 3 ||
1080 r_v > 2 || r_v < dec->cinfo.comp_info[0].v_samp_factor ||
1081 r_v < dec->cinfo.comp_info[1].v_samp_factor ||
1082 r_h < dec->cinfo.comp_info[0].h_samp_factor ||
1083 r_h < dec->cinfo.comp_info[1].h_samp_factor)
1084 goto invalid_yuvrgbgrayscale;
1087 g_assert_not_reached ();
1091 width = dec->cinfo.output_width;
1092 height = dec->cinfo.output_height;
1094 if (G_UNLIKELY (width < MIN_WIDTH || width > MAX_WIDTH ||
1095 height < MIN_HEIGHT || height > MAX_HEIGHT))
1098 gst_jpeg_dec_negotiate (dec, width, height, dec->cinfo.jpeg_color_space);
1100 state = gst_video_decoder_get_output_state (bdec);
1101 ret = gst_video_decoder_allocate_output_frame (bdec, frame);
1102 if (G_UNLIKELY (ret != GST_FLOW_OK))
1105 if (!gst_video_frame_map (&vframe, &state->info, frame->output_buffer,
1109 GST_LOG_OBJECT (dec, "width %d, height %d", width, height);
1111 if (dec->cinfo.jpeg_color_space == JCS_RGB) {
1112 gst_jpeg_dec_decode_rgb (dec, &vframe);
1113 } else if (dec->cinfo.jpeg_color_space == JCS_GRAYSCALE) {
1114 gst_jpeg_dec_decode_grayscale (dec, &vframe);
1116 GST_LOG_OBJECT (dec, "decompressing (reqired scanline buffer height = %u)",
1117 dec->cinfo.rec_outbuf_height);
1119 /* For some widths jpeglib requires more horizontal padding than I420
1120 * provides. In those cases we need to decode into separate buffers and then
1121 * copy over the data into our final picture buffer, otherwise jpeglib might
1122 * write over the end of a line into the beginning of the next line,
1123 * resulting in blocky artifacts on the left side of the picture. */
1124 if (G_UNLIKELY (width % (dec->cinfo.max_h_samp_factor * DCTSIZE) != 0
1125 || dec->cinfo.comp_info[0].h_samp_factor != 2
1126 || dec->cinfo.comp_info[1].h_samp_factor != 1
1127 || dec->cinfo.comp_info[2].h_samp_factor != 1)) {
1128 GST_CAT_LOG_OBJECT (GST_CAT_PERFORMANCE, dec,
1129 "indirect decoding using extra buffer copy");
1130 gst_jpeg_dec_decode_indirect (dec, &vframe, r_v, r_h,
1131 dec->cinfo.num_components);
1133 ret = gst_jpeg_dec_decode_direct (dec, &vframe);
1135 if (G_UNLIKELY (ret != GST_FLOW_OK))
1136 goto decode_direct_failed;
1140 gst_video_frame_unmap (&vframe);
1142 GST_LOG_OBJECT (dec, "decompressing finished");
1143 jpeg_finish_decompress (&dec->cinfo);
1145 gst_buffer_unmap (frame->input_buffer, &dec->current_frame_map);
1146 ret = gst_video_decoder_finish_frame (bdec, frame);
1154 gst_buffer_unmap (frame->input_buffer, &dec->current_frame_map);
1157 gst_video_codec_state_unref (state);
1164 GST_LOG_OBJECT (dec, "we need more data");
1171 GST_VIDEO_DECODER_ERROR (dec, 1, STREAM, DECODE,
1172 (_("Failed to decode JPEG image")),
1173 ("Picture is too small or too big (%ux%u)", width, height), ret);
1174 ret = GST_FLOW_ERROR;
1179 gchar err_msg[JMSG_LENGTH_MAX];
1181 dec->jerr.pub.format_message ((j_common_ptr) (&dec->cinfo), err_msg);
1183 GST_VIDEO_DECODER_ERROR (dec, 1, STREAM, DECODE,
1184 (_("Failed to decode JPEG image")), ("Decode error #%u: %s", code,
1187 gst_buffer_unmap (frame->input_buffer, &dec->current_frame_map);
1188 gst_video_decoder_drop_frame (bdec, frame);
1190 jpeg_abort_decompress (&dec->cinfo);
1192 ret = GST_FLOW_ERROR;
1195 decode_direct_failed:
1197 /* already posted an error message */
1198 jpeg_abort_decompress (&dec->cinfo);
1203 const gchar *reason;
1205 reason = gst_flow_get_name (ret);
1207 GST_DEBUG_OBJECT (dec, "failed to alloc buffer, reason %s", reason);
1208 /* Reset for next time */
1209 jpeg_abort_decompress (&dec->cinfo);
1210 if (ret != GST_FLOW_EOS && ret != GST_FLOW_FLUSHING &&
1211 ret != GST_FLOW_NOT_LINKED) {
1212 GST_VIDEO_DECODER_ERROR (dec, 1, STREAM, DECODE,
1213 (_("Failed to decode JPEG image")),
1214 ("Buffer allocation failed, reason: %s", reason), ret);
1215 jpeg_abort_decompress (&dec->cinfo);
1219 components_not_supported:
1221 GST_VIDEO_DECODER_ERROR (dec, 1, STREAM, DECODE,
1222 (_("Failed to decode JPEG image")),
1223 ("number of components not supported: %d (max 3)",
1224 dec->cinfo.num_components), ret);
1225 jpeg_abort_decompress (&dec->cinfo);
1228 unsupported_colorspace:
1230 GST_VIDEO_DECODER_ERROR (dec, 1, STREAM, DECODE,
1231 (_("Failed to decode JPEG image")),
1232 ("Picture has unknown or unsupported colourspace"), ret);
1233 jpeg_abort_decompress (&dec->cinfo);
1236 invalid_yuvrgbgrayscale:
1238 GST_VIDEO_DECODER_ERROR (dec, 1, STREAM, DECODE,
1239 (_("Failed to decode JPEG image")),
1240 ("Picture is corrupt or unhandled YUV/RGB/grayscale layout"), ret);
1241 jpeg_abort_decompress (&dec->cinfo);
1247 gst_jpeg_dec_decide_allocation (GstVideoDecoder * bdec, GstQuery * query)
1249 GstBufferPool *pool = NULL;
1250 GstStructure *config;
1252 if (!GST_VIDEO_DECODER_CLASS (parent_class)->decide_allocation (bdec, query))
1255 if (gst_query_get_n_allocation_pools (query) > 0)
1256 gst_query_parse_nth_allocation_pool (query, 0, &pool, NULL, NULL, NULL);
1261 config = gst_buffer_pool_get_config (pool);
1262 if (gst_query_find_allocation_meta (query, GST_VIDEO_META_API_TYPE, NULL)) {
1263 gst_buffer_pool_config_add_option (config,
1264 GST_BUFFER_POOL_OPTION_VIDEO_META);
1266 gst_buffer_pool_set_config (pool, config);
1267 gst_object_unref (pool);
1273 gst_jpeg_dec_start (GstVideoDecoder * bdec)
1275 GstJpegDec *dec = (GstJpegDec *) bdec;
1277 dec->saw_header = FALSE;
1278 dec->parse_entropy_len = 0;
1279 dec->parse_resync = FALSE;
1281 gst_video_decoder_set_packetized (bdec, FALSE);
1287 gst_jpeg_dec_flush (GstVideoDecoder * bdec)
1289 GstJpegDec *dec = (GstJpegDec *) bdec;
1291 jpeg_abort_decompress (&dec->cinfo);
1292 dec->parse_entropy_len = 0;
1293 dec->parse_resync = FALSE;
1294 dec->saw_header = FALSE;
1300 gst_jpeg_dec_set_property (GObject * object, guint prop_id,
1301 const GValue * value, GParamSpec * pspec)
1305 dec = GST_JPEG_DEC (object);
1308 case PROP_IDCT_METHOD:
1309 dec->idct_method = g_value_get_enum (value);
1311 case PROP_MAX_ERRORS:
1312 g_atomic_int_set (&dec->max_errors, g_value_get_int (value));
1316 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
1322 gst_jpeg_dec_get_property (GObject * object, guint prop_id, GValue * value,
1327 dec = GST_JPEG_DEC (object);
1330 case PROP_IDCT_METHOD:
1331 g_value_set_enum (value, dec->idct_method);
1333 case PROP_MAX_ERRORS:
1334 g_value_set_int (value, g_atomic_int_get (&dec->max_errors));
1338 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
1344 gst_jpeg_dec_stop (GstVideoDecoder * bdec)
1346 GstJpegDec *dec = (GstJpegDec *) bdec;
1348 gst_jpeg_dec_free_buffers (dec);