2 * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
3 * Copyright (C) <2009> Tim-Philipp Müller <tim centricular net>
4 * Copyright (C) 2012 Collabora Ltd.
5 * Author : Edward Hervey <edward@collabora.com>
7 * This library is free software; you can redistribute it and/or
8 * modify it under the terms of the GNU Library General Public
9 * License as published by the Free Software Foundation; either
10 * version 2 of the License, or (at your option) any later version.
12 * This library is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 * Library General Public License for more details.
17 * You should have received a copy of the GNU Library General Public
18 * License along with this library; if not, write to the
19 * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
20 * Boston, MA 02110-1301, USA.
24 * SECTION:element-jpegdec
27 * Decodes jpeg images.
29 * ## Example launch line
31 * gst-launch-1.0 -v filesrc location=mjpeg.avi ! avidemux ! queue ! jpegdec ! videoconvert ! videoscale ! autovideosink
32 * ]| The above pipeline decode the mjpeg stream and renders it to the screen.
42 #include "gstjpegdec.h"
43 #include "gstjpegelements.h"
44 #include <gst/video/video.h>
45 #include <gst/video/gstvideometa.h>
46 #include <gst/video/gstvideopool.h>
47 #include <glib/gi18n-lib.h>
51 #define MAX_WIDTH 65535
53 #define MAX_HEIGHT 65535
55 #define CINFO_GET_JPEGDEC(cinfo_ptr) \
56 (((struct GstJpegDecSourceMgr*)((cinfo_ptr)->src))->dec)
58 #define JPEG_DEFAULT_IDCT_METHOD JDCT_FASTEST
59 #define JPEG_DEFAULT_MAX_ERRORS 0
69 static GstStaticPadTemplate gst_jpeg_dec_src_pad_template =
70 GST_STATIC_PAD_TEMPLATE ("src",
73 GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE
74 ("{ I420, RGB, BGR, RGBx, xRGB, BGRx, xBGR, GRAY8 }"))
78 /* FIXME: sof-marker is for IJG libjpeg 8, should be different for 6.2 */
79 /* FIXME: add back "sof-marker = (int) { 0, 1, 2, 5, 6, 7, 9, 10, 13, 14 }"
80 * once we have a parser and/or demuxer set caps properly */
81 static GstStaticPadTemplate gst_jpeg_dec_sink_pad_template =
82 GST_STATIC_PAD_TEMPLATE ("sink",
85 GST_STATIC_CAPS ("image/jpeg")
88 GST_DEBUG_CATEGORY_STATIC (jpeg_dec_debug);
89 #define GST_CAT_DEFAULT jpeg_dec_debug
90 GST_DEBUG_CATEGORY_STATIC (GST_CAT_PERFORMANCE);
92 static void gst_jpeg_dec_set_property (GObject * object, guint prop_id,
93 const GValue * value, GParamSpec * pspec);
94 static void gst_jpeg_dec_get_property (GObject * object, guint prop_id,
95 GValue * value, GParamSpec * pspec);
97 static gboolean gst_jpeg_dec_set_format (GstVideoDecoder * dec,
98 GstVideoCodecState * state);
99 static gboolean gst_jpeg_dec_start (GstVideoDecoder * bdec);
100 static gboolean gst_jpeg_dec_stop (GstVideoDecoder * bdec);
101 static gboolean gst_jpeg_dec_flush (GstVideoDecoder * bdec);
102 static GstFlowReturn gst_jpeg_dec_parse (GstVideoDecoder * bdec,
103 GstVideoCodecFrame * frame, GstAdapter * adapter, gboolean at_eos);
104 static GstFlowReturn gst_jpeg_dec_handle_frame (GstVideoDecoder * bdec,
105 GstVideoCodecFrame * frame);
106 static gboolean gst_jpeg_dec_decide_allocation (GstVideoDecoder * bdec,
108 static gboolean gst_jpeg_dec_sink_event (GstVideoDecoder * bdec,
111 #define gst_jpeg_dec_parent_class parent_class
112 G_DEFINE_TYPE (GstJpegDec, gst_jpeg_dec, GST_TYPE_VIDEO_DECODER);
113 GST_ELEMENT_REGISTER_DEFINE (jpegdec, "jpegdec", GST_RANK_PRIMARY,
117 gst_jpeg_dec_finalize (GObject * object)
119 GstJpegDec *dec = GST_JPEG_DEC (object);
121 jpeg_destroy_decompress (&dec->cinfo);
122 if (dec->input_state)
123 gst_video_codec_state_unref (dec->input_state);
125 G_OBJECT_CLASS (parent_class)->finalize (object);
129 gst_jpeg_dec_class_init (GstJpegDecClass * klass)
131 GObjectClass *gobject_class;
132 GstElementClass *element_class;
133 GstVideoDecoderClass *vdec_class;
135 gobject_class = (GObjectClass *) klass;
136 element_class = (GstElementClass *) klass;
137 vdec_class = (GstVideoDecoderClass *) klass;
139 parent_class = g_type_class_peek_parent (klass);
141 gobject_class->finalize = gst_jpeg_dec_finalize;
142 gobject_class->set_property = gst_jpeg_dec_set_property;
143 gobject_class->get_property = gst_jpeg_dec_get_property;
145 g_object_class_install_property (gobject_class, PROP_IDCT_METHOD,
146 g_param_spec_enum ("idct-method", "IDCT Method",
147 "The IDCT algorithm to use", GST_TYPE_IDCT_METHOD,
148 JPEG_DEFAULT_IDCT_METHOD,
149 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
152 * GstJpegDec:max-errors:
154 * Error out after receiving N consecutive decoding errors
155 * (-1 = never error out, 0 = automatic, 1 = fail on first error, etc.)
157 * Deprecated: 1.3.1: Property wasn't used internally
159 #ifndef GST_REMOVE_DEPRECATED
160 g_object_class_install_property (gobject_class, PROP_MAX_ERRORS,
161 g_param_spec_int ("max-errors", "Maximum Consecutive Decoding Errors",
162 "(Deprecated) Error out after receiving N consecutive decoding errors"
163 " (-1 = never fail, 0 = automatic, 1 = fail on first error)",
164 -1, G_MAXINT, JPEG_DEFAULT_MAX_ERRORS,
165 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | G_PARAM_DEPRECATED));
168 gst_element_class_add_static_pad_template (element_class,
169 &gst_jpeg_dec_src_pad_template);
170 gst_element_class_add_static_pad_template (element_class,
171 &gst_jpeg_dec_sink_pad_template);
172 gst_element_class_set_static_metadata (element_class, "JPEG image decoder",
173 "Codec/Decoder/Image", "Decode images from JPEG format",
174 "Wim Taymans <wim@fluendo.com>");
176 vdec_class->start = gst_jpeg_dec_start;
177 vdec_class->stop = gst_jpeg_dec_stop;
178 vdec_class->flush = gst_jpeg_dec_flush;
179 vdec_class->parse = gst_jpeg_dec_parse;
180 vdec_class->set_format = gst_jpeg_dec_set_format;
181 vdec_class->handle_frame = gst_jpeg_dec_handle_frame;
182 vdec_class->decide_allocation = gst_jpeg_dec_decide_allocation;
183 vdec_class->sink_event = gst_jpeg_dec_sink_event;
185 GST_DEBUG_CATEGORY_INIT (jpeg_dec_debug, "jpegdec", 0, "JPEG decoder");
186 GST_DEBUG_CATEGORY_GET (GST_CAT_PERFORMANCE, "GST_PERFORMANCE");
188 gst_type_mark_as_plugin_api (GST_TYPE_IDCT_METHOD, 0);
192 gst_jpeg_dec_fill_input_buffer (j_decompress_ptr cinfo)
194 /* We pass in full frame initially, if this get called, the frame is most likely
200 gst_jpeg_dec_init_source (j_decompress_ptr cinfo)
202 GST_LOG_OBJECT (CINFO_GET_JPEGDEC (cinfo), "init_source");
207 gst_jpeg_dec_skip_input_data (j_decompress_ptr cinfo, glong num_bytes)
209 GstJpegDec *dec = CINFO_GET_JPEGDEC (cinfo);
211 GST_DEBUG_OBJECT (dec, "skip %ld bytes", num_bytes);
213 if (num_bytes > 0 && cinfo->src->bytes_in_buffer >= num_bytes) {
214 cinfo->src->next_input_byte += (size_t) num_bytes;
215 cinfo->src->bytes_in_buffer -= (size_t) num_bytes;
220 gst_jpeg_dec_resync_to_restart (j_decompress_ptr cinfo, gint desired)
222 GST_LOG_OBJECT (CINFO_GET_JPEGDEC (cinfo), "resync_to_start");
227 gst_jpeg_dec_term_source (j_decompress_ptr cinfo)
229 GST_LOG_OBJECT (CINFO_GET_JPEGDEC (cinfo), "term_source");
234 gst_jpeg_dec_my_output_message (j_common_ptr cinfo)
236 return; /* do nothing */
240 gst_jpeg_dec_my_emit_message (j_common_ptr cinfo, int msg_level)
242 /* GST_LOG_OBJECT (CINFO_GET_JPEGDEC (&cinfo), "msg_level=%d", msg_level); */
247 gst_jpeg_dec_my_error_exit (j_common_ptr cinfo)
249 struct GstJpegDecErrorMgr *err_mgr = (struct GstJpegDecErrorMgr *) cinfo->err;
251 (*cinfo->err->output_message) (cinfo);
252 longjmp (err_mgr->setjmp_buffer, 1);
256 gst_jpeg_dec_init (GstJpegDec * dec)
258 GST_DEBUG ("initializing");
261 memset (&dec->cinfo, 0, sizeof (dec->cinfo));
262 memset (&dec->jerr, 0, sizeof (dec->jerr));
263 dec->cinfo.err = jpeg_std_error (&dec->jerr.pub);
264 dec->jerr.pub.output_message = gst_jpeg_dec_my_output_message;
265 dec->jerr.pub.emit_message = gst_jpeg_dec_my_emit_message;
266 dec->jerr.pub.error_exit = gst_jpeg_dec_my_error_exit;
268 jpeg_create_decompress (&dec->cinfo);
270 dec->cinfo.src = (struct jpeg_source_mgr *) &dec->jsrc;
271 dec->cinfo.src->init_source = gst_jpeg_dec_init_source;
272 dec->cinfo.src->fill_input_buffer = gst_jpeg_dec_fill_input_buffer;
273 dec->cinfo.src->skip_input_data = gst_jpeg_dec_skip_input_data;
274 dec->cinfo.src->resync_to_restart = gst_jpeg_dec_resync_to_restart;
275 dec->cinfo.src->term_source = gst_jpeg_dec_term_source;
278 /* init properties */
279 dec->idct_method = JPEG_DEFAULT_IDCT_METHOD;
280 dec->max_errors = JPEG_DEFAULT_MAX_ERRORS;
282 gst_video_decoder_set_use_default_pad_acceptcaps (GST_VIDEO_DECODER_CAST
284 GST_PAD_SET_ACCEPT_TEMPLATE (GST_VIDEO_DECODER_SINK_PAD (dec));
287 static inline gboolean
288 gst_jpeg_dec_parse_tag_has_entropy_segment (guint8 tag)
290 if (tag == 0xda || (tag >= 0xd0 && tag <= 0xd7))
296 gst_jpeg_dec_parse (GstVideoDecoder * bdec, GstVideoCodecFrame * frame,
297 GstAdapter * adapter, gboolean at_eos)
302 gint offset = 0, noffset;
303 GstJpegDec *dec = (GstJpegDec *) bdec;
305 GST_VIDEO_CODEC_FRAME_SET_SYNC_POINT (frame);
307 /* FIXME : The overhead of using scan_uint32 is massive */
309 size = gst_adapter_available (adapter);
310 GST_DEBUG ("Parsing jpeg image data (%u bytes)", size);
313 GST_DEBUG ("Flushing all data out");
316 /* If we have leftover data, throw it away */
317 if (!dec->saw_header)
319 goto have_full_frame;
325 if (!dec->saw_header) {
327 /* we expect at least 4 bytes, first of which start marker */
329 gst_adapter_masked_scan_uint32 (adapter, 0xffff0000, 0xffd80000, 0,
332 GST_DEBUG ("ret:%d", ret);
337 gst_adapter_flush (adapter, ret);
340 dec->saw_header = TRUE;
347 GST_DEBUG ("offset:%d, size:%d", offset, size);
350 gst_adapter_masked_scan_uint32_peek (adapter, 0x0000ff00, 0x0000ff00,
351 offset, size - offset, &value);
353 /* lost sync if 0xff marker not where expected */
354 if ((resync = (noffset != offset))) {
355 GST_DEBUG ("Lost sync at 0x%08x, resyncing", offset + 2);
357 /* may have marker, but could have been resyncng */
358 resync = resync || dec->parse_resync;
359 /* Skip over extra 0xff */
360 while ((noffset >= 0) && ((value & 0xff) == 0xff)) {
363 gst_adapter_masked_scan_uint32_peek (adapter, 0x0000ff00, 0x0000ff00,
364 noffset, size - noffset, &value);
366 /* enough bytes left for marker? (we need 0xNN after the 0xff) */
368 GST_DEBUG ("at end of input and no EOI marker found, need more data");
372 /* now lock on the marker we found */
374 value = value & 0xff;
376 GST_DEBUG ("0x%08x: EOI marker", offset + 2);
377 /* clear parse state */
378 dec->saw_header = FALSE;
379 dec->parse_resync = FALSE;
381 goto have_full_frame;
384 GST_DEBUG ("0x%08x: SOI marker before EOI marker", offset + 2);
386 /* clear parse state */
387 dec->saw_header = FALSE;
388 dec->parse_resync = FALSE;
390 goto have_full_frame;
394 if (value >= 0xd0 && value <= 0xd7)
397 /* peek tag and subsequent length */
398 if (offset + 2 + 4 > size)
401 gst_adapter_masked_scan_uint32_peek (adapter, 0x0, 0x0, offset + 2, 4,
403 frame_len = frame_len & 0xffff;
405 GST_DEBUG ("0x%08x: tag %02x, frame_len=%u", offset + 2, value, frame_len);
406 /* the frame length includes the 2 bytes for the length; here we want at
407 * least 2 more bytes at the end for an end marker */
408 if (offset + 2 + 2 + frame_len + 2 > size) {
412 if (gst_jpeg_dec_parse_tag_has_entropy_segment (value)) {
413 guint eseglen = dec->parse_entropy_len;
415 GST_DEBUG ("0x%08x: finding entropy segment length (eseglen:%d)",
416 offset + 2, eseglen);
417 if (size < offset + 2 + frame_len + eseglen)
419 noffset = offset + 2 + frame_len + dec->parse_entropy_len;
421 GST_DEBUG ("noffset:%d, size:%d, size - noffset:%d",
422 noffset, size, size - noffset);
423 noffset = gst_adapter_masked_scan_uint32_peek (adapter, 0x0000ff00,
424 0x0000ff00, noffset, size - noffset, &value);
427 dec->parse_entropy_len = size - offset - 4 - frame_len - 2;
430 if ((value & 0xff) != 0x00) {
431 eseglen = noffset - offset - frame_len - 2;
436 dec->parse_entropy_len = 0;
437 frame_len += eseglen;
438 GST_DEBUG ("entropy segment length=%u => frame_len=%u", eseglen,
442 /* check if we will still be in sync if we interpret
443 * this as a sync point and skip this frame */
444 noffset = offset + frame_len + 2;
445 noffset = gst_adapter_masked_scan_uint32 (adapter, 0x0000ff00, 0x0000ff00,
448 /* ignore and continue resyncing until we hit the end
449 * of our data or find a sync point that looks okay */
453 GST_DEBUG ("found sync at 0x%x", offset + 2);
456 /* Add current data to output buffer */
457 toadd += frame_len + 2;
458 offset += frame_len + 2;
463 gst_video_decoder_add_to_frame (bdec, toadd);
464 return GST_VIDEO_DECODER_FLOW_NEED_DATA;
468 gst_video_decoder_add_to_frame (bdec, toadd);
469 GST_VIDEO_CODEC_FRAME_SET_SYNC_POINT (frame);
470 return gst_video_decoder_have_frame (bdec);
473 gst_adapter_flush (adapter, size);
478 /* shamelessly ripped from jpegutils.c in mjpegtools */
480 add_huff_table (j_decompress_ptr dinfo,
481 JHUFF_TBL ** htblptr, const UINT8 * bits, const UINT8 * val)
482 /* Define a Huffman table */
486 if (*htblptr == NULL)
487 *htblptr = jpeg_alloc_huff_table ((j_common_ptr) dinfo);
491 /* Copy the number-of-symbols-of-each-code-length counts */
492 memcpy ((*htblptr)->bits, bits, sizeof ((*htblptr)->bits));
494 /* Validate the counts. We do this here mainly so we can copy the right
495 * number of symbols from the val[] array, without risking marching off
496 * the end of memory. jchuff.c will do a more thorough test later.
499 for (len = 1; len <= 16; len++)
500 nsymbols += bits[len];
501 if (nsymbols < 1 || nsymbols > 256)
502 g_error ("jpegutils.c: add_huff_table failed badly. ");
504 memcpy ((*htblptr)->huffval, val, nsymbols * sizeof (UINT8));
510 std_huff_tables (j_decompress_ptr dinfo)
511 /* Set up the standard Huffman tables (cf. JPEG standard section K.3) */
512 /* IMPORTANT: these are only valid for 8-bit data precision! */
514 static const UINT8 bits_dc_luminance[17] =
515 { /* 0-base */ 0, 0, 1, 5, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0 };
516 static const UINT8 val_dc_luminance[] =
517 { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11 };
519 static const UINT8 bits_dc_chrominance[17] =
520 { /* 0-base */ 0, 0, 3, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0 };
521 static const UINT8 val_dc_chrominance[] =
522 { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11 };
524 static const UINT8 bits_ac_luminance[17] =
525 { /* 0-base */ 0, 0, 2, 1, 3, 3, 2, 4, 3, 5, 5, 4, 4, 0, 0, 1, 0x7d };
526 static const UINT8 val_ac_luminance[] =
527 { 0x01, 0x02, 0x03, 0x00, 0x04, 0x11, 0x05, 0x12,
528 0x21, 0x31, 0x41, 0x06, 0x13, 0x51, 0x61, 0x07,
529 0x22, 0x71, 0x14, 0x32, 0x81, 0x91, 0xa1, 0x08,
530 0x23, 0x42, 0xb1, 0xc1, 0x15, 0x52, 0xd1, 0xf0,
531 0x24, 0x33, 0x62, 0x72, 0x82, 0x09, 0x0a, 0x16,
532 0x17, 0x18, 0x19, 0x1a, 0x25, 0x26, 0x27, 0x28,
533 0x29, 0x2a, 0x34, 0x35, 0x36, 0x37, 0x38, 0x39,
534 0x3a, 0x43, 0x44, 0x45, 0x46, 0x47, 0x48, 0x49,
535 0x4a, 0x53, 0x54, 0x55, 0x56, 0x57, 0x58, 0x59,
536 0x5a, 0x63, 0x64, 0x65, 0x66, 0x67, 0x68, 0x69,
537 0x6a, 0x73, 0x74, 0x75, 0x76, 0x77, 0x78, 0x79,
538 0x7a, 0x83, 0x84, 0x85, 0x86, 0x87, 0x88, 0x89,
539 0x8a, 0x92, 0x93, 0x94, 0x95, 0x96, 0x97, 0x98,
540 0x99, 0x9a, 0xa2, 0xa3, 0xa4, 0xa5, 0xa6, 0xa7,
541 0xa8, 0xa9, 0xaa, 0xb2, 0xb3, 0xb4, 0xb5, 0xb6,
542 0xb7, 0xb8, 0xb9, 0xba, 0xc2, 0xc3, 0xc4, 0xc5,
543 0xc6, 0xc7, 0xc8, 0xc9, 0xca, 0xd2, 0xd3, 0xd4,
544 0xd5, 0xd6, 0xd7, 0xd8, 0xd9, 0xda, 0xe1, 0xe2,
545 0xe3, 0xe4, 0xe5, 0xe6, 0xe7, 0xe8, 0xe9, 0xea,
546 0xf1, 0xf2, 0xf3, 0xf4, 0xf5, 0xf6, 0xf7, 0xf8,
550 static const UINT8 bits_ac_chrominance[17] =
551 { /* 0-base */ 0, 0, 2, 1, 2, 4, 4, 3, 4, 7, 5, 4, 4, 0, 1, 2, 0x77 };
552 static const UINT8 val_ac_chrominance[] =
553 { 0x00, 0x01, 0x02, 0x03, 0x11, 0x04, 0x05, 0x21,
554 0x31, 0x06, 0x12, 0x41, 0x51, 0x07, 0x61, 0x71,
555 0x13, 0x22, 0x32, 0x81, 0x08, 0x14, 0x42, 0x91,
556 0xa1, 0xb1, 0xc1, 0x09, 0x23, 0x33, 0x52, 0xf0,
557 0x15, 0x62, 0x72, 0xd1, 0x0a, 0x16, 0x24, 0x34,
558 0xe1, 0x25, 0xf1, 0x17, 0x18, 0x19, 0x1a, 0x26,
559 0x27, 0x28, 0x29, 0x2a, 0x35, 0x36, 0x37, 0x38,
560 0x39, 0x3a, 0x43, 0x44, 0x45, 0x46, 0x47, 0x48,
561 0x49, 0x4a, 0x53, 0x54, 0x55, 0x56, 0x57, 0x58,
562 0x59, 0x5a, 0x63, 0x64, 0x65, 0x66, 0x67, 0x68,
563 0x69, 0x6a, 0x73, 0x74, 0x75, 0x76, 0x77, 0x78,
564 0x79, 0x7a, 0x82, 0x83, 0x84, 0x85, 0x86, 0x87,
565 0x88, 0x89, 0x8a, 0x92, 0x93, 0x94, 0x95, 0x96,
566 0x97, 0x98, 0x99, 0x9a, 0xa2, 0xa3, 0xa4, 0xa5,
567 0xa6, 0xa7, 0xa8, 0xa9, 0xaa, 0xb2, 0xb3, 0xb4,
568 0xb5, 0xb6, 0xb7, 0xb8, 0xb9, 0xba, 0xc2, 0xc3,
569 0xc4, 0xc5, 0xc6, 0xc7, 0xc8, 0xc9, 0xca, 0xd2,
570 0xd3, 0xd4, 0xd5, 0xd6, 0xd7, 0xd8, 0xd9, 0xda,
571 0xe2, 0xe3, 0xe4, 0xe5, 0xe6, 0xe7, 0xe8, 0xe9,
572 0xea, 0xf2, 0xf3, 0xf4, 0xf5, 0xf6, 0xf7, 0xf8,
576 add_huff_table (dinfo, &dinfo->dc_huff_tbl_ptrs[0],
577 bits_dc_luminance, val_dc_luminance);
578 add_huff_table (dinfo, &dinfo->ac_huff_tbl_ptrs[0],
579 bits_ac_luminance, val_ac_luminance);
580 add_huff_table (dinfo, &dinfo->dc_huff_tbl_ptrs[1],
581 bits_dc_chrominance, val_dc_chrominance);
582 add_huff_table (dinfo, &dinfo->ac_huff_tbl_ptrs[1],
583 bits_ac_chrominance, val_ac_chrominance);
589 guarantee_huff_tables (j_decompress_ptr dinfo)
591 if ((dinfo->dc_huff_tbl_ptrs[0] == NULL) &&
592 (dinfo->dc_huff_tbl_ptrs[1] == NULL) &&
593 (dinfo->ac_huff_tbl_ptrs[0] == NULL) &&
594 (dinfo->ac_huff_tbl_ptrs[1] == NULL)) {
595 GST_DEBUG ("Generating standard Huffman tables for this frame.");
596 std_huff_tables (dinfo);
601 gst_jpeg_dec_set_format (GstVideoDecoder * dec, GstVideoCodecState * state)
603 GstJpegDec *jpeg = GST_JPEG_DEC (dec);
605 if (jpeg->input_state)
606 gst_video_codec_state_unref (jpeg->input_state);
607 jpeg->input_state = gst_video_codec_state_ref (state);
615 hresamplecpy1 (guint8 * dest, const guint8 * src, guint len)
619 for (i = 0; i < len; ++i) {
620 /* equivalent to: dest[i] = src[i << 1] */
629 gst_jpeg_dec_free_buffers (GstJpegDec * dec)
633 for (i = 0; i < 16; i++) {
634 g_free (dec->idr_y[i]);
635 g_free (dec->idr_u[i]);
636 g_free (dec->idr_v[i]);
637 dec->idr_y[i] = NULL;
638 dec->idr_u[i] = NULL;
639 dec->idr_v[i] = NULL;
642 dec->idr_width_allocated = 0;
645 static inline gboolean
646 gst_jpeg_dec_ensure_buffers (GstJpegDec * dec, guint maxrowbytes)
650 if (G_LIKELY (dec->idr_width_allocated == maxrowbytes))
653 /* FIXME: maybe just alloc one or three blocks altogether? */
654 for (i = 0; i < 16; i++) {
655 dec->idr_y[i] = g_try_realloc (dec->idr_y[i], maxrowbytes);
656 dec->idr_u[i] = g_try_realloc (dec->idr_u[i], maxrowbytes);
657 dec->idr_v[i] = g_try_realloc (dec->idr_v[i], maxrowbytes);
659 if (G_UNLIKELY (!dec->idr_y[i] || !dec->idr_u[i] || !dec->idr_v[i])) {
660 GST_WARNING_OBJECT (dec, "out of memory, i=%d, bytes=%u", i, maxrowbytes);
665 dec->idr_width_allocated = maxrowbytes;
666 GST_LOG_OBJECT (dec, "allocated temp memory, %u bytes/row", maxrowbytes);
671 gst_jpeg_dec_decode_grayscale (GstJpegDec * dec, GstVideoFrame * frame,
672 guint field, guint num_fields)
675 guchar **scanarray[1] = { rows };
680 gint pstride, rstride;
682 GST_DEBUG_OBJECT (dec, "indirect decoding of grayscale");
684 width = GST_VIDEO_FRAME_WIDTH (frame);
685 height = GST_VIDEO_FRAME_HEIGHT (frame) / num_fields;
687 if (G_UNLIKELY (!gst_jpeg_dec_ensure_buffers (dec, GST_ROUND_UP_32 (width))))
690 base[0] = GST_VIDEO_FRAME_COMP_DATA (frame, 0);
692 base[0] += GST_VIDEO_FRAME_COMP_STRIDE (frame, 0);
695 pstride = GST_VIDEO_FRAME_COMP_PSTRIDE (frame, 0);
696 rstride = GST_VIDEO_FRAME_COMP_STRIDE (frame, 0) * num_fields;
698 memcpy (rows, dec->idr_y, 16 * sizeof (gpointer));
702 lines = jpeg_read_raw_data (&dec->cinfo, scanarray, DCTSIZE);
703 if (G_LIKELY (lines > 0)) {
704 for (j = 0; (j < DCTSIZE) && (i < height); j++, i++) {
708 for (k = 0; k < width; k++) {
709 base[0][p] = rows[j][k];
715 GST_INFO_OBJECT (dec, "jpeg_read_raw_data() returned 0");
721 gst_jpeg_dec_decode_rgb (GstJpegDec * dec, GstVideoFrame * frame,
722 guint field, guint num_fields)
724 guchar *r_rows[16], *g_rows[16], *b_rows[16];
725 guchar **scanarray[3] = { r_rows, g_rows, b_rows };
729 guint pstride, rstride;
732 GST_DEBUG_OBJECT (dec, "indirect decoding of RGB");
734 width = GST_VIDEO_FRAME_WIDTH (frame);
735 height = GST_VIDEO_FRAME_HEIGHT (frame) / num_fields;
737 if (G_UNLIKELY (!gst_jpeg_dec_ensure_buffers (dec, GST_ROUND_UP_32 (width))))
740 for (i = 0; i < 3; i++) {
741 base[i] = GST_VIDEO_FRAME_COMP_DATA (frame, i);
743 base[i] += GST_VIDEO_FRAME_COMP_STRIDE (frame, i);
746 pstride = GST_VIDEO_FRAME_COMP_PSTRIDE (frame, 0);
747 rstride = GST_VIDEO_FRAME_COMP_STRIDE (frame, 0) * num_fields;
749 memcpy (r_rows, dec->idr_y, 16 * sizeof (gpointer));
750 memcpy (g_rows, dec->idr_u, 16 * sizeof (gpointer));
751 memcpy (b_rows, dec->idr_v, 16 * sizeof (gpointer));
755 lines = jpeg_read_raw_data (&dec->cinfo, scanarray, DCTSIZE);
756 if (G_LIKELY (lines > 0)) {
757 for (j = 0; (j < DCTSIZE) && (i < height); j++, i++) {
761 for (k = 0; k < width; k++) {
762 base[0][p] = r_rows[j][k];
763 base[1][p] = g_rows[j][k];
764 base[2][p] = b_rows[j][k];
772 GST_INFO_OBJECT (dec, "jpeg_read_raw_data() returned 0");
778 gst_jpeg_dec_decode_indirect (GstJpegDec * dec, GstVideoFrame * frame, gint r_v,
779 gint r_h, gint comp, guint field, guint num_fields)
781 guchar *y_rows[16], *u_rows[16], *v_rows[16];
782 guchar **scanarray[3] = { y_rows, u_rows, v_rows };
785 guchar *base[3], *last[3];
786 gint rowsize[3], stride[3];
789 GST_DEBUG_OBJECT (dec,
790 "unadvantageous width or r_h, taking slow route involving memcpy");
792 width = GST_VIDEO_FRAME_WIDTH (frame);
793 height = GST_VIDEO_FRAME_HEIGHT (frame);
795 if (G_UNLIKELY (!gst_jpeg_dec_ensure_buffers (dec, GST_ROUND_UP_32 (width))))
798 for (i = 0; i < 3; i++) {
799 base[i] = GST_VIDEO_FRAME_COMP_DATA (frame, i);
800 stride[i] = GST_VIDEO_FRAME_COMP_STRIDE (frame, i) * num_fields;
801 rowsize[i] = GST_VIDEO_FRAME_COMP_STRIDE (frame, i);
802 /* make sure we don't make jpeglib write beyond our buffer,
803 * which might happen if (height % (r_v*DCTSIZE)) != 0 */
804 last[i] = base[i] + (GST_VIDEO_FRAME_COMP_STRIDE (frame, i) *
805 (GST_VIDEO_FRAME_COMP_HEIGHT (frame, i) - 1));
808 base[i] += GST_VIDEO_FRAME_COMP_STRIDE (frame, i);
812 memcpy (y_rows, dec->idr_y, 16 * sizeof (gpointer));
813 memcpy (u_rows, dec->idr_u, 16 * sizeof (gpointer));
814 memcpy (v_rows, dec->idr_v, 16 * sizeof (gpointer));
816 /* fill chroma components for grayscale */
818 GST_DEBUG_OBJECT (dec, "grayscale, filling chroma");
819 for (i = 0; i < 16; i++) {
820 memset (u_rows[i], GST_ROUND_UP_32 (width), 0x80);
821 memset (v_rows[i], GST_ROUND_UP_32 (width), 0x80);
825 for (i = 0; i < height; i += r_v * DCTSIZE) {
826 lines = jpeg_read_raw_data (&dec->cinfo, scanarray, r_v * DCTSIZE);
827 if (G_LIKELY (lines > 0)) {
828 for (j = 0, k = 0; j < (r_v * DCTSIZE); j += r_v, k++) {
829 if (G_LIKELY (base[0] <= last[0])) {
830 memcpy (base[0], y_rows[j], rowsize[0]);
831 base[0] += stride[0];
834 if (G_LIKELY (base[0] <= last[0])) {
835 memcpy (base[0], y_rows[j + 1], rowsize[0]);
836 base[0] += stride[0];
839 if (G_LIKELY (base[1] <= last[1] && base[2] <= last[2])) {
841 memcpy (base[1], u_rows[k], rowsize[1]);
842 memcpy (base[2], v_rows[k], rowsize[2]);
843 } else if (r_h == 1) {
844 hresamplecpy1 (base[1], u_rows[k], rowsize[1]);
845 hresamplecpy1 (base[2], v_rows[k], rowsize[2]);
847 /* FIXME: implement (at least we avoid crashing by doing nothing) */
851 if (r_v == 2 || (k & 1) != 0) {
852 base[1] += stride[1];
853 base[2] += stride[2];
857 GST_INFO_OBJECT (dec, "jpeg_read_raw_data() returned 0");
863 gst_jpeg_dec_decode_direct (GstJpegDec * dec, GstVideoFrame * frame,
864 guint field, guint num_fields)
866 guchar **line[3]; /* the jpeg line buffer */
867 guchar *y[4 * DCTSIZE] = { NULL, }; /* alloc enough for the lines */
868 guchar *u[4 * DCTSIZE] = { NULL, }; /* r_v will be <4 */
869 guchar *v[4 * DCTSIZE] = { NULL, };
871 gint lines, v_samp[3];
872 guchar *base[3], *last[3];
874 guint height, field_height;
880 v_samp[0] = dec->cinfo.comp_info[0].v_samp_factor;
881 v_samp[1] = dec->cinfo.comp_info[1].v_samp_factor;
882 v_samp[2] = dec->cinfo.comp_info[2].v_samp_factor;
884 if (G_UNLIKELY (v_samp[0] > 2 || v_samp[1] > 2 || v_samp[2] > 2))
885 goto format_not_supported;
887 height = field_height = GST_VIDEO_FRAME_HEIGHT (frame);
889 /* XXX: division by 2 here might not be a good idea yes. But we are doing this
890 * already in gst_jpeg_dec_handle_frame() for interlaced jpeg */
894 for (i = 0; i < 3; i++) {
895 base[i] = GST_VIDEO_FRAME_COMP_DATA (frame, i);
896 stride[i] = GST_VIDEO_FRAME_COMP_STRIDE (frame, i) * num_fields;
897 /* make sure we don't make jpeglib write beyond our buffer,
898 * which might happen if (height % (r_v*DCTSIZE)) != 0 */
899 last[i] = base[i] + (GST_VIDEO_FRAME_COMP_STRIDE (frame, i) *
900 (GST_VIDEO_FRAME_COMP_HEIGHT (frame, i) - 1));
903 base[i] += GST_VIDEO_FRAME_COMP_STRIDE (frame, i);
907 if (field_height % (v_samp[0] * DCTSIZE) && (dec->scratch_size < stride[0])) {
908 g_free (dec->scratch);
909 dec->scratch = g_malloc (stride[0]);
910 dec->scratch_size = stride[0];
913 /* let jpeglib decode directly into our final buffer */
914 GST_DEBUG_OBJECT (dec, "decoding directly into output buffer");
916 #ifdef JCS_EXTENSIONS
917 if (dec->format_convert) {
918 gint row_stride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, 0);
919 guchar *bufbase = GST_VIDEO_FRAME_PLANE_DATA (frame, 0);
921 if (num_fields == 2) {
926 bufbase += GST_VIDEO_FRAME_PLANE_STRIDE (frame, 0);
929 while (dec->cinfo.output_scanline < dec->cinfo.output_height) {
930 JSAMPARRAY buffer = { &bufbase, };
931 jpeg_read_scanlines (&dec->cinfo, buffer, 1);
932 bufbase += row_stride;
937 for (i = 0; i < height; i += v_samp[0] * DCTSIZE) {
938 for (j = 0; j < (v_samp[0] * DCTSIZE); ++j) {
940 line[0][j] = base[0] + (i + j) * stride[0];
941 if (G_UNLIKELY (line[0][j] > last[0]))
942 line[0][j] = dec->scratch;
944 if (v_samp[1] == v_samp[0]) {
945 line[1][j] = base[1] + ((i + j) / 2) * stride[1];
946 } else if (j < (v_samp[1] * DCTSIZE)) {
947 line[1][j] = base[1] + ((i / 2) + j) * stride[1];
949 if (G_UNLIKELY (line[1][j] > last[1]))
950 line[1][j] = dec->scratch;
952 if (v_samp[2] == v_samp[0]) {
953 line[2][j] = base[2] + ((i + j) / 2) * stride[2];
954 } else if (j < (v_samp[2] * DCTSIZE)) {
955 line[2][j] = base[2] + ((i / 2) + j) * stride[2];
957 if (G_UNLIKELY (line[2][j] > last[2]))
958 line[2][j] = dec->scratch;
961 lines = jpeg_read_raw_data (&dec->cinfo, line, v_samp[0] * DCTSIZE);
962 if (G_UNLIKELY (!lines)) {
963 GST_INFO_OBJECT (dec, "jpeg_read_raw_data() returned 0");
969 format_not_supported:
971 gboolean ret = GST_FLOW_OK;
973 GST_VIDEO_DECODER_ERROR (dec, 1, STREAM, DECODE,
974 (_("Failed to decode JPEG image")),
975 ("Unsupported subsampling schema: v_samp factors: %u %u %u", v_samp[0],
976 v_samp[1], v_samp[2]), ret);
982 #ifdef JCS_EXTENSIONS
984 gst_fmt_to_jpeg_turbo_ext_fmt (GstVideoFormat gstfmt)
987 case GST_VIDEO_FORMAT_RGB:
989 case GST_VIDEO_FORMAT_RGBx:
991 case GST_VIDEO_FORMAT_xRGB:
993 case GST_VIDEO_FORMAT_RGBA:
995 case GST_VIDEO_FORMAT_ARGB:
997 case GST_VIDEO_FORMAT_BGR:
999 case GST_VIDEO_FORMAT_BGRx:
1000 return JCS_EXT_BGRX;
1001 case GST_VIDEO_FORMAT_xBGR:
1002 return JCS_EXT_XBGR;
1003 case GST_VIDEO_FORMAT_BGRA:
1004 return JCS_EXT_BGRA;
1005 case GST_VIDEO_FORMAT_ABGR:
1006 return JCS_EXT_ABGR;
1013 gst_jpeg_turbo_parse_ext_fmt_convert (GstJpegDec * dec, gint * clrspc)
1015 GstCaps *peer_caps, *dec_caps;
1017 dec_caps = gst_static_caps_get (&gst_jpeg_dec_src_pad_template.static_caps);
1019 gst_pad_peer_query_caps (GST_VIDEO_DECODER_SRC_PAD (dec), dec_caps);
1020 gst_caps_unref (dec_caps);
1022 GST_DEBUG ("Received caps from peer: %" GST_PTR_FORMAT, peer_caps);
1023 dec->format_convert = FALSE;
1024 if (!gst_caps_is_empty (peer_caps)) {
1025 GstStructure *peerstruct;
1026 const gchar *peerformat;
1027 GstVideoFormat peerfmt;
1029 if (!gst_caps_is_fixed (peer_caps))
1030 peer_caps = gst_caps_fixate (peer_caps);
1032 peerstruct = gst_caps_get_structure (peer_caps, 0);
1033 peerformat = gst_structure_get_string (peerstruct, "format");
1034 peerfmt = gst_video_format_from_string (peerformat);
1037 case GST_VIDEO_FORMAT_RGB:
1038 case GST_VIDEO_FORMAT_RGBx:
1039 case GST_VIDEO_FORMAT_xRGB:
1040 case GST_VIDEO_FORMAT_RGBA:
1041 case GST_VIDEO_FORMAT_ARGB:
1042 case GST_VIDEO_FORMAT_BGR:
1043 case GST_VIDEO_FORMAT_BGRx:
1044 case GST_VIDEO_FORMAT_xBGR:
1045 case GST_VIDEO_FORMAT_BGRA:
1046 case GST_VIDEO_FORMAT_ABGR:
1049 dec->format = peerfmt;
1050 dec->format_convert = TRUE;
1051 dec->libjpeg_ext_format = gst_fmt_to_jpeg_turbo_ext_fmt (peerfmt);
1057 gst_caps_unref (peer_caps);
1058 GST_DEBUG_OBJECT (dec, "format_convert=%d", dec->format_convert);
1063 gst_jpeg_dec_negotiate (GstJpegDec * dec, gint width, gint height, gint clrspc,
1064 gboolean interlaced)
1066 GstVideoCodecState *outstate;
1068 GstVideoFormat format;
1070 #ifdef JCS_EXTENSIONS
1071 if (dec->format_convert) {
1072 format = dec->format;
1078 format = GST_VIDEO_FORMAT_RGB;
1081 format = GST_VIDEO_FORMAT_GRAY8;
1084 format = GST_VIDEO_FORMAT_I420;
1089 /* Compare to currently configured output state */
1090 outstate = gst_video_decoder_get_output_state (GST_VIDEO_DECODER (dec));
1092 info = &outstate->info;
1094 if (width == GST_VIDEO_INFO_WIDTH (info) &&
1095 height == GST_VIDEO_INFO_HEIGHT (info) &&
1096 format == GST_VIDEO_INFO_FORMAT (info)) {
1097 gst_video_codec_state_unref (outstate);
1100 gst_video_codec_state_unref (outstate);
1102 #ifdef JCS_EXTENSIONS
1103 /* Determine if libjpeg-turbo direct format conversion can be used
1104 * with current caps and if so, adjust $dec to enable it and $clrspc
1106 gst_jpeg_turbo_parse_ext_fmt_convert (dec, &clrspc);
1110 gst_video_decoder_set_output_state (GST_VIDEO_DECODER (dec), format,
1111 width, height, dec->input_state);
1118 /* aka JPEG chroma siting */
1119 outstate->info.chroma_site = GST_VIDEO_CHROMA_SITE_NONE;
1121 outstate->info.colorimetry.range = GST_VIDEO_COLOR_RANGE_0_255;
1122 outstate->info.colorimetry.matrix = GST_VIDEO_COLOR_MATRIX_BT601;
1123 outstate->info.colorimetry.transfer = GST_VIDEO_TRANSFER_UNKNOWN;
1124 outstate->info.colorimetry.primaries = GST_VIDEO_COLOR_PRIMARIES_UNKNOWN;
1129 outstate->info.interlace_mode = GST_VIDEO_INTERLACE_MODE_INTERLEAVED;
1130 GST_VIDEO_INFO_FIELD_ORDER (&outstate->info) =
1131 GST_VIDEO_FIELD_ORDER_TOP_FIELD_FIRST;
1134 gst_video_codec_state_unref (outstate);
1136 gst_video_decoder_negotiate (GST_VIDEO_DECODER (dec));
1138 GST_DEBUG_OBJECT (dec, "max_v_samp_factor=%d", dec->cinfo.max_v_samp_factor);
1139 GST_DEBUG_OBJECT (dec, "max_h_samp_factor=%d", dec->cinfo.max_h_samp_factor);
1142 static GstFlowReturn
1143 gst_jpeg_dec_prepare_decode (GstJpegDec * dec)
1145 G_GNUC_UNUSED GstFlowReturn ret;
1146 guint r_h, r_v, hdr_ok;
1149 hdr_ok = jpeg_read_header (&dec->cinfo, TRUE);
1150 if (G_UNLIKELY (hdr_ok != JPEG_HEADER_OK)) {
1151 GST_WARNING_OBJECT (dec, "reading the header failed, %d", hdr_ok);
1154 GST_LOG_OBJECT (dec, "num_components=%d", dec->cinfo.num_components);
1155 GST_LOG_OBJECT (dec, "jpeg_color_space=%d", dec->cinfo.jpeg_color_space);
1157 if (!dec->cinfo.num_components || !dec->cinfo.comp_info)
1158 goto components_not_supported;
1160 r_h = dec->cinfo.comp_info[0].h_samp_factor;
1161 r_v = dec->cinfo.comp_info[0].v_samp_factor;
1163 GST_LOG_OBJECT (dec, "r_h = %d, r_v = %d", r_h, r_v);
1165 if (dec->cinfo.num_components > 3)
1166 goto components_not_supported;
1168 /* verify color space expectation to avoid going *boom* or bogus output */
1169 if (dec->cinfo.jpeg_color_space != JCS_YCbCr &&
1170 dec->cinfo.jpeg_color_space != JCS_GRAYSCALE &&
1171 dec->cinfo.jpeg_color_space != JCS_RGB)
1172 goto unsupported_colorspace;
1174 #ifndef GST_DISABLE_GST_DEBUG
1178 for (i = 0; i < dec->cinfo.num_components; ++i) {
1179 GST_LOG_OBJECT (dec, "[%d] h_samp_factor=%d, v_samp_factor=%d, cid=%d",
1180 i, dec->cinfo.comp_info[i].h_samp_factor,
1181 dec->cinfo.comp_info[i].v_samp_factor,
1182 dec->cinfo.comp_info[i].component_id);
1187 /* prepare for raw output */
1188 dec->cinfo.do_fancy_upsampling = FALSE;
1189 dec->cinfo.do_block_smoothing = FALSE;
1190 dec->cinfo.dct_method = dec->idct_method;
1191 #ifdef JCS_EXTENSIONS
1192 gst_jpeg_turbo_parse_ext_fmt_convert (dec, NULL);
1193 if (dec->format_convert) {
1194 dec->cinfo.out_color_space = dec->libjpeg_ext_format;
1195 dec->cinfo.raw_data_out = FALSE;
1199 dec->cinfo.out_color_space = dec->cinfo.jpeg_color_space;
1200 dec->cinfo.raw_data_out = TRUE;
1203 GST_LOG_OBJECT (dec, "starting decompress");
1204 guarantee_huff_tables (&dec->cinfo);
1205 if (!jpeg_start_decompress (&dec->cinfo)) {
1206 GST_WARNING_OBJECT (dec, "failed to start decompression cycle");
1209 /* sanity checks to get safe and reasonable output */
1210 switch (dec->cinfo.jpeg_color_space) {
1212 if (dec->cinfo.num_components != 1)
1213 goto invalid_yuvrgbgrayscale;
1216 if (dec->cinfo.num_components != 3 || dec->cinfo.max_v_samp_factor > 1 ||
1217 dec->cinfo.max_h_samp_factor > 1)
1218 goto invalid_yuvrgbgrayscale;
1221 if (dec->cinfo.num_components != 3 ||
1222 r_v > 2 || r_v < dec->cinfo.comp_info[0].v_samp_factor ||
1223 r_v < dec->cinfo.comp_info[1].v_samp_factor ||
1224 r_h < dec->cinfo.comp_info[0].h_samp_factor ||
1225 r_h < dec->cinfo.comp_info[1].h_samp_factor)
1226 goto invalid_yuvrgbgrayscale;
1229 g_assert_not_reached ();
1233 if (G_UNLIKELY (dec->cinfo.output_width < MIN_WIDTH ||
1234 dec->cinfo.output_width > MAX_WIDTH ||
1235 dec->cinfo.output_height < MIN_HEIGHT ||
1236 dec->cinfo.output_height > MAX_HEIGHT))
1244 ret = GST_FLOW_ERROR;
1245 GST_VIDEO_DECODER_ERROR (dec, 1, STREAM, DECODE,
1246 (_("Failed to decode JPEG image")),
1247 ("Picture is too small or too big (%ux%u)", dec->cinfo.output_width,
1248 dec->cinfo.output_height), ret);
1249 return GST_FLOW_ERROR;
1251 components_not_supported:
1253 ret = GST_FLOW_ERROR;
1254 GST_VIDEO_DECODER_ERROR (dec, 1, STREAM, DECODE,
1255 (_("Failed to decode JPEG image")),
1256 ("number of components not supported: %d (max 3)",
1257 dec->cinfo.num_components), ret);
1258 jpeg_abort_decompress (&dec->cinfo);
1259 return GST_FLOW_ERROR;
1261 unsupported_colorspace:
1263 ret = GST_FLOW_ERROR;
1264 GST_VIDEO_DECODER_ERROR (dec, 1, STREAM, DECODE,
1265 (_("Failed to decode JPEG image")),
1266 ("Picture has unknown or unsupported colourspace"), ret);
1267 jpeg_abort_decompress (&dec->cinfo);
1268 return GST_FLOW_ERROR;
1270 invalid_yuvrgbgrayscale:
1272 ret = GST_FLOW_ERROR;
1273 GST_VIDEO_DECODER_ERROR (dec, 1, STREAM, DECODE,
1274 (_("Failed to decode JPEG image")),
1275 ("Picture is corrupt or unhandled YUV/RGB/grayscale layout"), ret);
1276 jpeg_abort_decompress (&dec->cinfo);
1277 return GST_FLOW_ERROR;
1281 static GstFlowReturn
1282 gst_jpeg_dec_decode (GstJpegDec * dec, GstVideoFrame * vframe, guint width,
1283 guint height, guint field, guint num_fields)
1285 GstFlowReturn ret = GST_FLOW_OK;
1287 if (dec->cinfo.jpeg_color_space == JCS_RGB) {
1288 gst_jpeg_dec_decode_rgb (dec, vframe, field, num_fields);
1289 } else if (dec->cinfo.jpeg_color_space == JCS_GRAYSCALE) {
1290 gst_jpeg_dec_decode_grayscale (dec, vframe, field, num_fields);
1292 GST_LOG_OBJECT (dec, "decompressing (required scanline buffer height = %u)",
1293 dec->cinfo.rec_outbuf_height);
1295 /* For some widths jpeglib requires more horizontal padding than I420
1296 * provides. In those cases we need to decode into separate buffers and then
1297 * copy over the data into our final picture buffer, otherwise jpeglib might
1298 * write over the end of a line into the beginning of the next line,
1299 * resulting in blocky artifacts on the left side of the picture. */
1300 if (G_UNLIKELY (width % (dec->cinfo.max_h_samp_factor * DCTSIZE) != 0
1301 || dec->cinfo.comp_info[0].h_samp_factor != 2
1302 || dec->cinfo.comp_info[1].h_samp_factor != 1
1303 || dec->cinfo.comp_info[2].h_samp_factor != 1)) {
1304 GST_CAT_LOG_OBJECT (GST_CAT_PERFORMANCE, dec,
1305 "indirect decoding using extra buffer copy");
1306 gst_jpeg_dec_decode_indirect (dec, vframe,
1307 dec->cinfo.comp_info[0].v_samp_factor,
1308 dec->cinfo.comp_info[0].h_samp_factor, dec->cinfo.num_components,
1311 ret = gst_jpeg_dec_decode_direct (dec, vframe, field, num_fields);
1315 GST_LOG_OBJECT (dec, "decompressing finished: %s", gst_flow_get_name (ret));
1317 if (G_UNLIKELY (ret != GST_FLOW_OK)) {
1318 jpeg_abort_decompress (&dec->cinfo);
1320 jpeg_finish_decompress (&dec->cinfo);
1326 static GstFlowReturn
1327 gst_jpeg_dec_handle_frame (GstVideoDecoder * bdec, GstVideoCodecFrame * frame)
1329 GstFlowReturn ret = GST_FLOW_OK;
1330 GstJpegDec *dec = (GstJpegDec *) bdec;
1331 GstVideoFrame vframe;
1332 gint num_fields; /* number of fields (1 or 2) */
1333 gint output_height; /* height of output image (one or two fields) */
1334 gint height; /* height of current frame (whole image or a field) */
1337 gboolean need_unmap = TRUE;
1338 GstVideoCodecState *state = NULL;
1339 gboolean release_frame = TRUE;
1344 if (!gst_buffer_map (frame->input_buffer, &dec->current_frame_map,
1348 data = dec->current_frame_map.data;
1349 nbytes = dec->current_frame_map.size;
1351 goto need_more_data;
1352 has_eoi = ((data[nbytes - 2] == 0xff) && (data[nbytes - 1] == 0xd9));
1354 /* some cameras fail to send an end-of-image marker (EOI),
1355 * add it if that is the case. */
1358 GstBuffer *eoibuf = gst_buffer_new_and_alloc (2);
1360 /* unmap, will add EOI and remap at the end */
1361 gst_buffer_unmap (frame->input_buffer, &dec->current_frame_map);
1363 gst_buffer_map (eoibuf, &map, GST_MAP_WRITE);
1366 gst_buffer_unmap (eoibuf, &map);
1368 /* append to input buffer, and remap */
1369 frame->input_buffer = gst_buffer_append (frame->input_buffer, eoibuf);
1371 gst_buffer_map (frame->input_buffer, &dec->current_frame_map, GST_MAP_READ);
1372 GST_DEBUG ("fixup EOI marker added");
1375 dec->current_frame = frame;
1376 dec->cinfo.src->next_input_byte = dec->current_frame_map.data;
1377 dec->cinfo.src->bytes_in_buffer = dec->current_frame_map.size;
1379 if (setjmp (dec->jerr.setjmp_buffer)) {
1380 code = dec->jerr.pub.msg_code;
1382 if (code == JERR_INPUT_EOF) {
1383 GST_DEBUG ("jpeg input EOF error, we probably need more data");
1384 goto need_more_data;
1389 /* read header and check values */
1390 ret = gst_jpeg_dec_prepare_decode (dec);
1391 if (G_UNLIKELY (ret == GST_FLOW_ERROR))
1394 width = dec->cinfo.output_width;
1395 height = dec->cinfo.output_height;
1397 /* is it interlaced MJPEG? (we really don't want to scan the jpeg data
1398 * to see if there are two SOF markers in the packet to detect this) */
1399 if (gst_video_decoder_get_packetized (bdec) &&
1401 dec->input_state->info.height > height &&
1402 dec->input_state->info.height <= (height * 2)
1403 && dec->input_state->info.width == width) {
1404 GST_LOG_OBJECT (dec,
1405 "looks like an interlaced image: "
1406 "input width/height of %dx%d with JPEG frame width/height of %dx%d",
1407 dec->input_state->info.width, dec->input_state->info.height, width,
1409 output_height = dec->input_state->info.height;
1410 height = dec->input_state->info.height / 2;
1412 GST_LOG_OBJECT (dec, "field height=%d", height);
1414 output_height = height;
1418 gst_jpeg_dec_negotiate (dec, width, output_height,
1419 dec->cinfo.jpeg_color_space, num_fields == 2);
1421 state = gst_video_decoder_get_output_state (bdec);
1422 ret = gst_video_decoder_allocate_output_frame (bdec, frame);
1423 if (G_UNLIKELY (ret != GST_FLOW_OK))
1426 if (!gst_video_frame_map (&vframe, &state->info, frame->output_buffer,
1430 if (setjmp (dec->jerr.setjmp_buffer)) {
1431 code = dec->jerr.pub.msg_code;
1432 gst_video_frame_unmap (&vframe);
1436 GST_LOG_OBJECT (dec, "width %d, height %d, fields %d", width, output_height,
1439 ret = gst_jpeg_dec_decode (dec, &vframe, width, height, 1, num_fields);
1440 if (G_UNLIKELY (ret != GST_FLOW_OK)) {
1441 gst_video_frame_unmap (&vframe);
1445 if (setjmp (dec->jerr.setjmp_buffer)) {
1446 code = dec->jerr.pub.msg_code;
1447 gst_video_frame_unmap (&vframe);
1451 /* decode second field if there is one */
1452 if (num_fields == 2) {
1453 GstVideoFormat field2_format;
1455 /* Checked above before setting num_fields to 2 */
1456 g_assert (dec->input_state != NULL);
1458 /* skip any chunk or padding bytes before the next SOI marker; both fields
1459 * are in one single buffer here, so direct access should be fine here */
1460 while (dec->jsrc.pub.bytes_in_buffer > 2 &&
1461 GST_READ_UINT16_BE (dec->jsrc.pub.next_input_byte) != 0xffd8) {
1462 --dec->jsrc.pub.bytes_in_buffer;
1463 ++dec->jsrc.pub.next_input_byte;
1466 if (gst_jpeg_dec_prepare_decode (dec) != GST_FLOW_OK) {
1467 GST_WARNING_OBJECT (dec, "problem reading jpeg header of 2nd field");
1468 /* FIXME: post a warning message here? */
1469 gst_video_frame_unmap (&vframe);
1473 /* check if format has changed for the second field */
1474 #ifdef JCS_EXTENSIONS
1475 if (dec->format_convert) {
1476 field2_format = dec->format;
1480 switch (dec->cinfo.jpeg_color_space) {
1482 field2_format = GST_VIDEO_FORMAT_RGB;
1485 field2_format = GST_VIDEO_FORMAT_GRAY8;
1488 field2_format = GST_VIDEO_FORMAT_I420;
1493 GST_LOG_OBJECT (dec,
1494 "got for second field of interlaced image: "
1495 "input width/height of %dx%d with JPEG frame width/height of %dx%d",
1496 dec->input_state->info.width, dec->input_state->info.height,
1497 dec->cinfo.output_width, dec->cinfo.output_height);
1499 if (dec->cinfo.output_width != GST_VIDEO_INFO_WIDTH (&state->info) ||
1500 GST_VIDEO_INFO_HEIGHT (&state->info) <= dec->cinfo.output_height ||
1501 GST_VIDEO_INFO_HEIGHT (&state->info) > (dec->cinfo.output_height * 2) ||
1502 field2_format != GST_VIDEO_INFO_FORMAT (&state->info)) {
1503 GST_WARNING_OBJECT (dec, "second field has different format than first");
1504 gst_video_frame_unmap (&vframe);
1508 ret = gst_jpeg_dec_decode (dec, &vframe, width, height, 2, 2);
1509 if (G_UNLIKELY (ret != GST_FLOW_OK)) {
1510 gst_video_frame_unmap (&vframe);
1514 gst_video_frame_unmap (&vframe);
1516 gst_buffer_unmap (frame->input_buffer, &dec->current_frame_map);
1517 ret = gst_video_decoder_finish_frame (bdec, frame);
1518 release_frame = FALSE;
1526 gst_buffer_unmap (frame->input_buffer, &dec->current_frame_map);
1529 gst_video_decoder_release_frame (bdec, frame);
1532 gst_video_codec_state_unref (state);
1539 GST_LOG_OBJECT (dec, "we need more data");
1546 GST_ELEMENT_ERROR (dec, RESOURCE, READ, (_("Failed to read memory")),
1547 ("gst_buffer_map() failed for READ access"));
1548 ret = GST_FLOW_ERROR;
1553 gchar err_msg[JMSG_LENGTH_MAX];
1555 dec->jerr.pub.format_message ((j_common_ptr) (&dec->cinfo), err_msg);
1557 GST_VIDEO_DECODER_ERROR (dec, 1, STREAM, DECODE,
1558 (_("Failed to decode JPEG image")), ("Decode error #%u: %s", code,
1561 gst_buffer_unmap (frame->input_buffer, &dec->current_frame_map);
1562 gst_video_decoder_drop_frame (bdec, frame);
1563 release_frame = FALSE;
1565 jpeg_abort_decompress (&dec->cinfo);
1571 /* already posted an error message */
1576 const gchar *reason;
1578 reason = gst_flow_get_name (ret);
1580 GST_DEBUG_OBJECT (dec, "failed to alloc buffer, reason %s", reason);
1581 /* Reset for next time */
1582 jpeg_abort_decompress (&dec->cinfo);
1583 if (ret != GST_FLOW_EOS && ret != GST_FLOW_FLUSHING &&
1584 ret != GST_FLOW_NOT_LINKED) {
1585 GST_VIDEO_DECODER_ERROR (dec, 1, STREAM, DECODE,
1586 (_("Failed to decode JPEG image")),
1587 ("Buffer allocation failed, reason: %s", reason), ret);
1588 jpeg_abort_decompress (&dec->cinfo);
1595 gst_jpeg_dec_decide_allocation (GstVideoDecoder * bdec, GstQuery * query)
1597 GstBufferPool *pool = NULL;
1598 GstStructure *config;
1600 if (!GST_VIDEO_DECODER_CLASS (parent_class)->decide_allocation (bdec, query))
1603 if (gst_query_get_n_allocation_pools (query) > 0)
1604 gst_query_parse_nth_allocation_pool (query, 0, &pool, NULL, NULL, NULL);
1609 config = gst_buffer_pool_get_config (pool);
1610 if (gst_query_find_allocation_meta (query, GST_VIDEO_META_API_TYPE, NULL)) {
1611 gst_buffer_pool_config_add_option (config,
1612 GST_BUFFER_POOL_OPTION_VIDEO_META);
1614 gst_buffer_pool_set_config (pool, config);
1615 gst_object_unref (pool);
1621 gst_jpeg_dec_sink_event (GstVideoDecoder * bdec, GstEvent * event)
1623 const GstSegment *segment;
1625 if (GST_EVENT_TYPE (event) != GST_EVENT_SEGMENT)
1628 gst_event_parse_segment (event, &segment);
1630 if (segment->format == GST_FORMAT_TIME)
1631 gst_video_decoder_set_packetized (bdec, TRUE);
1633 gst_video_decoder_set_packetized (bdec, FALSE);
1636 return GST_VIDEO_DECODER_CLASS (parent_class)->sink_event (bdec, event);
1640 gst_jpeg_dec_start (GstVideoDecoder * bdec)
1642 GstJpegDec *dec = (GstJpegDec *) bdec;
1644 #ifdef JCS_EXTENSIONS
1645 dec->format_convert = FALSE;
1647 dec->saw_header = FALSE;
1648 dec->parse_entropy_len = 0;
1649 dec->parse_resync = FALSE;
1651 gst_video_decoder_set_packetized (bdec, FALSE);
1657 gst_jpeg_dec_flush (GstVideoDecoder * bdec)
1659 GstJpegDec *dec = (GstJpegDec *) bdec;
1661 jpeg_abort_decompress (&dec->cinfo);
1662 dec->parse_entropy_len = 0;
1663 dec->parse_resync = FALSE;
1664 dec->saw_header = FALSE;
1665 #ifdef JCS_EXTENSIONS
1666 dec->format_convert = FALSE;
1673 gst_jpeg_dec_set_property (GObject * object, guint prop_id,
1674 const GValue * value, GParamSpec * pspec)
1678 dec = GST_JPEG_DEC (object);
1681 case PROP_IDCT_METHOD:
1682 dec->idct_method = g_value_get_enum (value);
1684 #ifndef GST_REMOVE_DEPRECATED
1685 case PROP_MAX_ERRORS:
1686 g_atomic_int_set (&dec->max_errors, g_value_get_int (value));
1690 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
1696 gst_jpeg_dec_get_property (GObject * object, guint prop_id, GValue * value,
1701 dec = GST_JPEG_DEC (object);
1704 case PROP_IDCT_METHOD:
1705 g_value_set_enum (value, dec->idct_method);
1707 #ifndef GST_REMOVE_DEPRECATED
1708 case PROP_MAX_ERRORS:
1709 g_value_set_int (value, g_atomic_int_get (&dec->max_errors));
1713 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
1719 gst_jpeg_dec_stop (GstVideoDecoder * bdec)
1721 GstJpegDec *dec = (GstJpegDec *) bdec;
1723 gst_jpeg_dec_free_buffers (dec);
1725 g_free (dec->scratch);
1726 dec->scratch = NULL;
1727 dec->scratch_size = 0;