2 * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
3 * Copyright (C) <2009> Tim-Philipp Müller <tim centricular net>
4 * Copyright (C) 2012 Collabora Ltd.
5 * Author : Edward Hervey <edward@collabora.com>
7 * This library is free software; you can redistribute it and/or
8 * modify it under the terms of the GNU Library General Public
9 * License as published by the Free Software Foundation; either
10 * version 2 of the License, or (at your option) any later version.
12 * This library is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 * Library General Public License for more details.
17 * You should have received a copy of the GNU Library General Public
18 * License along with this library; if not, write to the
19 * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
20 * Boston, MA 02110-1301, USA.
24 * SECTION:element-jpegdec
27 * Decodes jpeg images.
29 * ## Example launch line
31 * gst-launch-1.0 -v filesrc location=mjpeg.avi ! avidemux ! queue ! jpegdec ! videoconvert ! videoscale ! autovideosink
32 * ]| The above pipeline decode the mjpeg stream and renders it to the screen.
42 #include "gstjpegdec.h"
43 #include "gstjpegelements.h"
44 #include <gst/video/video.h>
45 #include <gst/video/gstvideometa.h>
46 #include <gst/video/gstvideopool.h>
47 #include <glib/gi18n-lib.h>
51 #define MAX_WIDTH 65535
53 #define MAX_HEIGHT 65535
55 #define CINFO_GET_JPEGDEC(cinfo_ptr) \
56 (((struct GstJpegDecSourceMgr*)((cinfo_ptr)->src))->dec)
58 #define JPEG_DEFAULT_IDCT_METHOD JDCT_FASTEST
59 #define JPEG_DEFAULT_MAX_ERRORS 0
69 static GstStaticPadTemplate gst_jpeg_dec_src_pad_template =
70 GST_STATIC_PAD_TEMPLATE ("src",
73 GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE
74 ("{ I420, RGB, BGR, RGBx, xRGB, BGRx, xBGR, GRAY8 }"))
78 /* FIXME: sof-marker is for IJG libjpeg 8, should be different for 6.2 */
79 /* FIXME: add back "sof-marker = (int) { 0, 1, 2, 5, 6, 7, 9, 10, 13, 14 }"
80 * once we have a parser and/or demuxer set caps properly */
81 static GstStaticPadTemplate gst_jpeg_dec_sink_pad_template =
82 GST_STATIC_PAD_TEMPLATE ("sink",
85 GST_STATIC_CAPS ("image/jpeg")
88 GST_DEBUG_CATEGORY_STATIC (jpeg_dec_debug);
89 #define GST_CAT_DEFAULT jpeg_dec_debug
90 GST_DEBUG_CATEGORY_STATIC (GST_CAT_PERFORMANCE);
92 static void gst_jpeg_dec_set_property (GObject * object, guint prop_id,
93 const GValue * value, GParamSpec * pspec);
94 static void gst_jpeg_dec_get_property (GObject * object, guint prop_id,
95 GValue * value, GParamSpec * pspec);
97 static gboolean gst_jpeg_dec_set_format (GstVideoDecoder * dec,
98 GstVideoCodecState * state);
99 static gboolean gst_jpeg_dec_start (GstVideoDecoder * bdec);
100 static gboolean gst_jpeg_dec_stop (GstVideoDecoder * bdec);
101 static gboolean gst_jpeg_dec_flush (GstVideoDecoder * bdec);
102 static GstFlowReturn gst_jpeg_dec_parse (GstVideoDecoder * bdec,
103 GstVideoCodecFrame * frame, GstAdapter * adapter, gboolean at_eos);
104 static GstFlowReturn gst_jpeg_dec_handle_frame (GstVideoDecoder * bdec,
105 GstVideoCodecFrame * frame);
106 static gboolean gst_jpeg_dec_decide_allocation (GstVideoDecoder * bdec,
108 static gboolean gst_jpeg_dec_sink_event (GstVideoDecoder * bdec,
111 #define gst_jpeg_dec_parent_class parent_class
112 G_DEFINE_TYPE (GstJpegDec, gst_jpeg_dec, GST_TYPE_VIDEO_DECODER);
113 GST_ELEMENT_REGISTER_DEFINE (jpegdec, "jpegdec", GST_RANK_PRIMARY,
117 gst_jpeg_dec_finalize (GObject * object)
119 GstJpegDec *dec = GST_JPEG_DEC (object);
121 jpeg_destroy_decompress (&dec->cinfo);
122 if (dec->input_state)
123 gst_video_codec_state_unref (dec->input_state);
125 G_OBJECT_CLASS (parent_class)->finalize (object);
129 gst_jpeg_dec_class_init (GstJpegDecClass * klass)
131 GObjectClass *gobject_class;
132 GstElementClass *element_class;
133 GstVideoDecoderClass *vdec_class;
135 gobject_class = (GObjectClass *) klass;
136 element_class = (GstElementClass *) klass;
137 vdec_class = (GstVideoDecoderClass *) klass;
139 parent_class = g_type_class_peek_parent (klass);
141 gobject_class->finalize = gst_jpeg_dec_finalize;
142 gobject_class->set_property = gst_jpeg_dec_set_property;
143 gobject_class->get_property = gst_jpeg_dec_get_property;
145 g_object_class_install_property (gobject_class, PROP_IDCT_METHOD,
146 g_param_spec_enum ("idct-method", "IDCT Method",
147 "The IDCT algorithm to use", GST_TYPE_IDCT_METHOD,
148 JPEG_DEFAULT_IDCT_METHOD,
149 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
152 * GstJpegDec:max-errors:
154 * Error out after receiving N consecutive decoding errors
155 * (-1 = never error out, 0 = automatic, 1 = fail on first error, etc.)
157 * Deprecated: 1.3.1: Property wasn't used internally
159 #ifndef GST_REMOVE_DEPRECATED
160 g_object_class_install_property (gobject_class, PROP_MAX_ERRORS,
161 g_param_spec_int ("max-errors", "Maximum Consecutive Decoding Errors",
162 "(Deprecated) Error out after receiving N consecutive decoding errors"
163 " (-1 = never fail, 0 = automatic, 1 = fail on first error)",
164 -1, G_MAXINT, JPEG_DEFAULT_MAX_ERRORS,
165 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | G_PARAM_DEPRECATED));
168 gst_element_class_add_static_pad_template (element_class,
169 &gst_jpeg_dec_src_pad_template);
170 gst_element_class_add_static_pad_template (element_class,
171 &gst_jpeg_dec_sink_pad_template);
172 gst_element_class_set_static_metadata (element_class, "JPEG image decoder",
173 "Codec/Decoder/Image", "Decode images from JPEG format",
174 "Wim Taymans <wim@fluendo.com>");
176 vdec_class->start = gst_jpeg_dec_start;
177 vdec_class->stop = gst_jpeg_dec_stop;
178 vdec_class->flush = gst_jpeg_dec_flush;
179 vdec_class->parse = gst_jpeg_dec_parse;
180 vdec_class->set_format = gst_jpeg_dec_set_format;
181 vdec_class->handle_frame = gst_jpeg_dec_handle_frame;
182 vdec_class->decide_allocation = gst_jpeg_dec_decide_allocation;
183 vdec_class->sink_event = gst_jpeg_dec_sink_event;
185 GST_DEBUG_CATEGORY_INIT (jpeg_dec_debug, "jpegdec", 0, "JPEG decoder");
186 GST_DEBUG_CATEGORY_GET (GST_CAT_PERFORMANCE, "GST_PERFORMANCE");
188 gst_type_mark_as_plugin_api (GST_TYPE_IDCT_METHOD, 0);
192 gst_jpeg_dec_fill_input_buffer (j_decompress_ptr cinfo)
194 /* We pass in full frame initially, if this get called, the frame is most likely
200 gst_jpeg_dec_init_source (j_decompress_ptr cinfo)
202 GST_LOG_OBJECT (CINFO_GET_JPEGDEC (cinfo), "init_source");
207 gst_jpeg_dec_skip_input_data (j_decompress_ptr cinfo, glong num_bytes)
209 GstJpegDec *dec = CINFO_GET_JPEGDEC (cinfo);
211 GST_DEBUG_OBJECT (dec, "skip %ld bytes", num_bytes);
213 if (num_bytes > 0 && cinfo->src->bytes_in_buffer >= num_bytes) {
214 cinfo->src->next_input_byte += (size_t) num_bytes;
215 cinfo->src->bytes_in_buffer -= (size_t) num_bytes;
220 gst_jpeg_dec_resync_to_restart (j_decompress_ptr cinfo, gint desired)
222 GST_LOG_OBJECT (CINFO_GET_JPEGDEC (cinfo), "resync_to_start");
227 gst_jpeg_dec_term_source (j_decompress_ptr cinfo)
229 GST_LOG_OBJECT (CINFO_GET_JPEGDEC (cinfo), "term_source");
234 gst_jpeg_dec_my_output_message (j_common_ptr cinfo)
236 return; /* do nothing */
240 gst_jpeg_dec_my_emit_message (j_common_ptr cinfo, int msg_level)
242 /* GST_LOG_OBJECT (CINFO_GET_JPEGDEC (&cinfo), "msg_level=%d", msg_level); */
247 gst_jpeg_dec_my_error_exit (j_common_ptr cinfo)
249 struct GstJpegDecErrorMgr *err_mgr = (struct GstJpegDecErrorMgr *) cinfo->err;
251 (*cinfo->err->output_message) (cinfo);
252 longjmp (err_mgr->setjmp_buffer, 1);
256 gst_jpeg_dec_init (GstJpegDec * dec)
258 GST_DEBUG ("initializing");
261 memset (&dec->cinfo, 0, sizeof (dec->cinfo));
262 memset (&dec->jerr, 0, sizeof (dec->jerr));
263 dec->cinfo.err = jpeg_std_error (&dec->jerr.pub);
264 dec->jerr.pub.output_message = gst_jpeg_dec_my_output_message;
265 dec->jerr.pub.emit_message = gst_jpeg_dec_my_emit_message;
266 dec->jerr.pub.error_exit = gst_jpeg_dec_my_error_exit;
268 jpeg_create_decompress (&dec->cinfo);
270 dec->cinfo.src = (struct jpeg_source_mgr *) &dec->jsrc;
271 dec->cinfo.src->init_source = gst_jpeg_dec_init_source;
272 dec->cinfo.src->fill_input_buffer = gst_jpeg_dec_fill_input_buffer;
273 dec->cinfo.src->skip_input_data = gst_jpeg_dec_skip_input_data;
274 dec->cinfo.src->resync_to_restart = gst_jpeg_dec_resync_to_restart;
275 dec->cinfo.src->term_source = gst_jpeg_dec_term_source;
278 /* init properties */
279 dec->idct_method = JPEG_DEFAULT_IDCT_METHOD;
280 dec->max_errors = JPEG_DEFAULT_MAX_ERRORS;
282 gst_video_decoder_set_use_default_pad_acceptcaps (GST_VIDEO_DECODER_CAST
284 GST_PAD_SET_ACCEPT_TEMPLATE (GST_VIDEO_DECODER_SINK_PAD (dec));
287 static inline gboolean
288 gst_jpeg_dec_parse_tag_has_entropy_segment (guint8 tag)
290 if (tag == 0xda || (tag >= 0xd0 && tag <= 0xd7))
296 gst_jpeg_dec_parse (GstVideoDecoder * bdec, GstVideoCodecFrame * frame,
297 GstAdapter * adapter, gboolean at_eos)
302 gint offset = 0, noffset;
303 GstJpegDec *dec = (GstJpegDec *) bdec;
305 GST_VIDEO_CODEC_FRAME_SET_SYNC_POINT (frame);
307 /* FIXME : The overhead of using scan_uint32 is massive */
309 size = gst_adapter_available (adapter);
310 GST_DEBUG ("Parsing jpeg image data (%u bytes)", size);
313 GST_DEBUG ("Flushing all data out");
316 /* If we have leftover data, throw it away */
317 if (!dec->saw_header)
319 goto have_full_frame;
325 if (!dec->saw_header) {
327 /* we expect at least 4 bytes, first of which start marker */
329 gst_adapter_masked_scan_uint32 (adapter, 0xffff0000, 0xffd80000, 0,
332 GST_DEBUG ("ret:%d", ret);
337 gst_adapter_flush (adapter, ret);
340 dec->saw_header = TRUE;
347 GST_DEBUG ("offset:%d, size:%d", offset, size);
350 gst_adapter_masked_scan_uint32_peek (adapter, 0x0000ff00, 0x0000ff00,
351 offset, size - offset, &value);
353 /* lost sync if 0xff marker not where expected */
354 if ((resync = (noffset != offset))) {
355 GST_DEBUG ("Lost sync at 0x%08x, resyncing", offset + 2);
357 /* may have marker, but could have been resyncng */
358 resync = resync || dec->parse_resync;
359 /* Skip over extra 0xff */
360 while ((noffset >= 0) && ((value & 0xff) == 0xff)) {
363 gst_adapter_masked_scan_uint32_peek (adapter, 0x0000ff00, 0x0000ff00,
364 noffset, size - noffset, &value);
366 /* enough bytes left for marker? (we need 0xNN after the 0xff) */
368 GST_DEBUG ("at end of input and no EOI marker found, need more data");
372 /* now lock on the marker we found */
374 value = value & 0xff;
376 GST_DEBUG ("0x%08x: EOI marker", offset + 2);
377 /* clear parse state */
378 dec->saw_header = FALSE;
379 dec->parse_resync = FALSE;
381 goto have_full_frame;
384 GST_DEBUG ("0x%08x: SOI marker before EOI marker", offset + 2);
386 /* clear parse state */
387 dec->saw_header = FALSE;
388 dec->parse_resync = FALSE;
390 goto have_full_frame;
394 if (value >= 0xd0 && value <= 0xd7)
397 /* peek tag and subsequent length */
398 if (offset + 2 + 4 > size)
401 gst_adapter_masked_scan_uint32_peek (adapter, 0x0, 0x0, offset + 2, 4,
403 frame_len = frame_len & 0xffff;
405 GST_DEBUG ("0x%08x: tag %02x, frame_len=%u", offset + 2, value, frame_len);
406 /* the frame length includes the 2 bytes for the length; here we want at
407 * least 2 more bytes at the end for an end marker */
408 if (offset + 2 + 2 + frame_len + 2 > size) {
412 if (gst_jpeg_dec_parse_tag_has_entropy_segment (value)) {
413 guint eseglen = dec->parse_entropy_len;
415 GST_DEBUG ("0x%08x: finding entropy segment length (eseglen:%d)",
416 offset + 2, eseglen);
417 if (size < offset + 2 + frame_len + eseglen)
419 noffset = offset + 2 + frame_len + dec->parse_entropy_len;
421 GST_DEBUG ("noffset:%d, size:%d, size - noffset:%d",
422 noffset, size, size - noffset);
423 noffset = gst_adapter_masked_scan_uint32_peek (adapter, 0x0000ff00,
424 0x0000ff00, noffset, size - noffset, &value);
427 dec->parse_entropy_len = size - offset - 4 - frame_len - 2;
430 if ((value & 0xff) != 0x00) {
431 eseglen = noffset - offset - frame_len - 2;
436 dec->parse_entropy_len = 0;
437 frame_len += eseglen;
438 GST_DEBUG ("entropy segment length=%u => frame_len=%u", eseglen,
442 /* check if we will still be in sync if we interpret
443 * this as a sync point and skip this frame */
444 noffset = offset + frame_len + 2;
445 noffset = gst_adapter_masked_scan_uint32 (adapter, 0x0000ff00, 0x0000ff00,
448 /* ignore and continue resyncing until we hit the end
449 * of our data or find a sync point that looks okay */
453 GST_DEBUG ("found sync at 0x%x", offset + 2);
456 /* Add current data to output buffer */
457 toadd += frame_len + 2;
458 offset += frame_len + 2;
463 gst_video_decoder_add_to_frame (bdec, toadd);
464 return GST_VIDEO_DECODER_FLOW_NEED_DATA;
468 gst_video_decoder_add_to_frame (bdec, toadd);
469 GST_VIDEO_CODEC_FRAME_SET_SYNC_POINT (frame);
470 return gst_video_decoder_have_frame (bdec);
473 gst_adapter_flush (adapter, size);
478 /* shamelessly ripped from jpegutils.c in mjpegtools */
480 add_huff_table (j_decompress_ptr dinfo,
481 JHUFF_TBL ** htblptr, const UINT8 * bits, const UINT8 * val)
482 /* Define a Huffman table */
486 if (*htblptr == NULL)
487 *htblptr = jpeg_alloc_huff_table ((j_common_ptr) dinfo);
491 /* Copy the number-of-symbols-of-each-code-length counts */
492 memcpy ((*htblptr)->bits, bits, sizeof ((*htblptr)->bits));
494 /* Validate the counts. We do this here mainly so we can copy the right
495 * number of symbols from the val[] array, without risking marching off
496 * the end of memory. jchuff.c will do a more thorough test later.
499 for (len = 1; len <= 16; len++)
500 nsymbols += bits[len];
501 if (nsymbols < 1 || nsymbols > 256)
502 g_error ("jpegutils.c: add_huff_table failed badly. ");
504 memcpy ((*htblptr)->huffval, val, nsymbols * sizeof (UINT8));
510 std_huff_tables (j_decompress_ptr dinfo)
511 /* Set up the standard Huffman tables (cf. JPEG standard section K.3) */
512 /* IMPORTANT: these are only valid for 8-bit data precision! */
514 static const UINT8 bits_dc_luminance[17] =
515 { /* 0-base */ 0, 0, 1, 5, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0 };
516 static const UINT8 val_dc_luminance[] =
517 { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11 };
519 static const UINT8 bits_dc_chrominance[17] =
520 { /* 0-base */ 0, 0, 3, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0 };
521 static const UINT8 val_dc_chrominance[] =
522 { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11 };
524 static const UINT8 bits_ac_luminance[17] =
525 { /* 0-base */ 0, 0, 2, 1, 3, 3, 2, 4, 3, 5, 5, 4, 4, 0, 0, 1, 0x7d };
526 static const UINT8 val_ac_luminance[] =
527 { 0x01, 0x02, 0x03, 0x00, 0x04, 0x11, 0x05, 0x12,
528 0x21, 0x31, 0x41, 0x06, 0x13, 0x51, 0x61, 0x07,
529 0x22, 0x71, 0x14, 0x32, 0x81, 0x91, 0xa1, 0x08,
530 0x23, 0x42, 0xb1, 0xc1, 0x15, 0x52, 0xd1, 0xf0,
531 0x24, 0x33, 0x62, 0x72, 0x82, 0x09, 0x0a, 0x16,
532 0x17, 0x18, 0x19, 0x1a, 0x25, 0x26, 0x27, 0x28,
533 0x29, 0x2a, 0x34, 0x35, 0x36, 0x37, 0x38, 0x39,
534 0x3a, 0x43, 0x44, 0x45, 0x46, 0x47, 0x48, 0x49,
535 0x4a, 0x53, 0x54, 0x55, 0x56, 0x57, 0x58, 0x59,
536 0x5a, 0x63, 0x64, 0x65, 0x66, 0x67, 0x68, 0x69,
537 0x6a, 0x73, 0x74, 0x75, 0x76, 0x77, 0x78, 0x79,
538 0x7a, 0x83, 0x84, 0x85, 0x86, 0x87, 0x88, 0x89,
539 0x8a, 0x92, 0x93, 0x94, 0x95, 0x96, 0x97, 0x98,
540 0x99, 0x9a, 0xa2, 0xa3, 0xa4, 0xa5, 0xa6, 0xa7,
541 0xa8, 0xa9, 0xaa, 0xb2, 0xb3, 0xb4, 0xb5, 0xb6,
542 0xb7, 0xb8, 0xb9, 0xba, 0xc2, 0xc3, 0xc4, 0xc5,
543 0xc6, 0xc7, 0xc8, 0xc9, 0xca, 0xd2, 0xd3, 0xd4,
544 0xd5, 0xd6, 0xd7, 0xd8, 0xd9, 0xda, 0xe1, 0xe2,
545 0xe3, 0xe4, 0xe5, 0xe6, 0xe7, 0xe8, 0xe9, 0xea,
546 0xf1, 0xf2, 0xf3, 0xf4, 0xf5, 0xf6, 0xf7, 0xf8,
550 static const UINT8 bits_ac_chrominance[17] =
551 { /* 0-base */ 0, 0, 2, 1, 2, 4, 4, 3, 4, 7, 5, 4, 4, 0, 1, 2, 0x77 };
552 static const UINT8 val_ac_chrominance[] =
553 { 0x00, 0x01, 0x02, 0x03, 0x11, 0x04, 0x05, 0x21,
554 0x31, 0x06, 0x12, 0x41, 0x51, 0x07, 0x61, 0x71,
555 0x13, 0x22, 0x32, 0x81, 0x08, 0x14, 0x42, 0x91,
556 0xa1, 0xb1, 0xc1, 0x09, 0x23, 0x33, 0x52, 0xf0,
557 0x15, 0x62, 0x72, 0xd1, 0x0a, 0x16, 0x24, 0x34,
558 0xe1, 0x25, 0xf1, 0x17, 0x18, 0x19, 0x1a, 0x26,
559 0x27, 0x28, 0x29, 0x2a, 0x35, 0x36, 0x37, 0x38,
560 0x39, 0x3a, 0x43, 0x44, 0x45, 0x46, 0x47, 0x48,
561 0x49, 0x4a, 0x53, 0x54, 0x55, 0x56, 0x57, 0x58,
562 0x59, 0x5a, 0x63, 0x64, 0x65, 0x66, 0x67, 0x68,
563 0x69, 0x6a, 0x73, 0x74, 0x75, 0x76, 0x77, 0x78,
564 0x79, 0x7a, 0x82, 0x83, 0x84, 0x85, 0x86, 0x87,
565 0x88, 0x89, 0x8a, 0x92, 0x93, 0x94, 0x95, 0x96,
566 0x97, 0x98, 0x99, 0x9a, 0xa2, 0xa3, 0xa4, 0xa5,
567 0xa6, 0xa7, 0xa8, 0xa9, 0xaa, 0xb2, 0xb3, 0xb4,
568 0xb5, 0xb6, 0xb7, 0xb8, 0xb9, 0xba, 0xc2, 0xc3,
569 0xc4, 0xc5, 0xc6, 0xc7, 0xc8, 0xc9, 0xca, 0xd2,
570 0xd3, 0xd4, 0xd5, 0xd6, 0xd7, 0xd8, 0xd9, 0xda,
571 0xe2, 0xe3, 0xe4, 0xe5, 0xe6, 0xe7, 0xe8, 0xe9,
572 0xea, 0xf2, 0xf3, 0xf4, 0xf5, 0xf6, 0xf7, 0xf8,
576 add_huff_table (dinfo, &dinfo->dc_huff_tbl_ptrs[0],
577 bits_dc_luminance, val_dc_luminance);
578 add_huff_table (dinfo, &dinfo->ac_huff_tbl_ptrs[0],
579 bits_ac_luminance, val_ac_luminance);
580 add_huff_table (dinfo, &dinfo->dc_huff_tbl_ptrs[1],
581 bits_dc_chrominance, val_dc_chrominance);
582 add_huff_table (dinfo, &dinfo->ac_huff_tbl_ptrs[1],
583 bits_ac_chrominance, val_ac_chrominance);
589 guarantee_huff_tables (j_decompress_ptr dinfo)
591 if ((dinfo->dc_huff_tbl_ptrs[0] == NULL) &&
592 (dinfo->dc_huff_tbl_ptrs[1] == NULL) &&
593 (dinfo->ac_huff_tbl_ptrs[0] == NULL) &&
594 (dinfo->ac_huff_tbl_ptrs[1] == NULL)) {
595 GST_DEBUG ("Generating standard Huffman tables for this frame.");
596 std_huff_tables (dinfo);
601 gst_jpeg_dec_set_format (GstVideoDecoder * dec, GstVideoCodecState * state)
603 GstJpegDec *jpeg = GST_JPEG_DEC (dec);
604 GstStructure *structure;
605 gboolean parsed = FALSE;
607 if (jpeg->input_state)
608 gst_video_codec_state_unref (jpeg->input_state);
609 jpeg->input_state = gst_video_codec_state_ref (state);
611 structure = gst_caps_get_structure (state->caps, 0);
612 gst_structure_get_boolean (structure, "parsed", &parsed);
613 gst_video_decoder_set_packetized (dec, parsed);
621 hresamplecpy1 (guint8 * dest, const guint8 * src, guint len)
625 for (i = 0; i < len; ++i) {
626 /* equivalent to: dest[i] = src[i << 1] */
635 gst_jpeg_dec_free_buffers (GstJpegDec * dec)
639 for (i = 0; i < 16; i++) {
640 g_free (dec->idr_y[i]);
641 g_free (dec->idr_u[i]);
642 g_free (dec->idr_v[i]);
643 dec->idr_y[i] = NULL;
644 dec->idr_u[i] = NULL;
645 dec->idr_v[i] = NULL;
648 dec->idr_width_allocated = 0;
651 static inline gboolean
652 gst_jpeg_dec_ensure_buffers (GstJpegDec * dec, guint maxrowbytes)
656 if (G_LIKELY (dec->idr_width_allocated == maxrowbytes))
659 /* FIXME: maybe just alloc one or three blocks altogether? */
660 for (i = 0; i < 16; i++) {
661 dec->idr_y[i] = g_try_realloc (dec->idr_y[i], maxrowbytes);
662 dec->idr_u[i] = g_try_realloc (dec->idr_u[i], maxrowbytes);
663 dec->idr_v[i] = g_try_realloc (dec->idr_v[i], maxrowbytes);
665 if (G_UNLIKELY (!dec->idr_y[i] || !dec->idr_u[i] || !dec->idr_v[i])) {
666 GST_WARNING_OBJECT (dec, "out of memory, i=%d, bytes=%u", i, maxrowbytes);
671 dec->idr_width_allocated = maxrowbytes;
672 GST_LOG_OBJECT (dec, "allocated temp memory, %u bytes/row", maxrowbytes);
677 gst_jpeg_dec_decode_grayscale (GstJpegDec * dec, GstVideoFrame * frame,
678 guint field, guint num_fields)
681 guchar **scanarray[1] = { rows };
686 gint pstride, rstride;
688 GST_DEBUG_OBJECT (dec, "indirect decoding of grayscale");
690 width = GST_VIDEO_FRAME_WIDTH (frame);
691 height = GST_VIDEO_FRAME_HEIGHT (frame) / num_fields;
693 if (G_UNLIKELY (!gst_jpeg_dec_ensure_buffers (dec, GST_ROUND_UP_32 (width))))
696 base[0] = GST_VIDEO_FRAME_COMP_DATA (frame, 0);
698 base[0] += GST_VIDEO_FRAME_COMP_STRIDE (frame, 0);
701 pstride = GST_VIDEO_FRAME_COMP_PSTRIDE (frame, 0);
702 rstride = GST_VIDEO_FRAME_COMP_STRIDE (frame, 0) * num_fields;
704 memcpy (rows, dec->idr_y, 16 * sizeof (gpointer));
708 lines = jpeg_read_raw_data (&dec->cinfo, scanarray, DCTSIZE);
709 if (G_LIKELY (lines > 0)) {
710 for (j = 0; (j < DCTSIZE) && (i < height); j++, i++) {
714 for (k = 0; k < width; k++) {
715 base[0][p] = rows[j][k];
721 GST_INFO_OBJECT (dec, "jpeg_read_raw_data() returned 0");
727 gst_jpeg_dec_decode_rgb (GstJpegDec * dec, GstVideoFrame * frame,
728 guint field, guint num_fields)
730 guchar *r_rows[16], *g_rows[16], *b_rows[16];
731 guchar **scanarray[3] = { r_rows, g_rows, b_rows };
735 guint pstride, rstride;
738 GST_DEBUG_OBJECT (dec, "indirect decoding of RGB");
740 width = GST_VIDEO_FRAME_WIDTH (frame);
741 height = GST_VIDEO_FRAME_HEIGHT (frame) / num_fields;
743 if (G_UNLIKELY (!gst_jpeg_dec_ensure_buffers (dec, GST_ROUND_UP_32 (width))))
746 for (i = 0; i < 3; i++) {
747 base[i] = GST_VIDEO_FRAME_COMP_DATA (frame, i);
749 base[i] += GST_VIDEO_FRAME_COMP_STRIDE (frame, i);
752 pstride = GST_VIDEO_FRAME_COMP_PSTRIDE (frame, 0);
753 rstride = GST_VIDEO_FRAME_COMP_STRIDE (frame, 0) * num_fields;
755 memcpy (r_rows, dec->idr_y, 16 * sizeof (gpointer));
756 memcpy (g_rows, dec->idr_u, 16 * sizeof (gpointer));
757 memcpy (b_rows, dec->idr_v, 16 * sizeof (gpointer));
761 lines = jpeg_read_raw_data (&dec->cinfo, scanarray, DCTSIZE);
762 if (G_LIKELY (lines > 0)) {
763 for (j = 0; (j < DCTSIZE) && (i < height); j++, i++) {
767 for (k = 0; k < width; k++) {
768 base[0][p] = r_rows[j][k];
769 base[1][p] = g_rows[j][k];
770 base[2][p] = b_rows[j][k];
778 GST_INFO_OBJECT (dec, "jpeg_read_raw_data() returned 0");
784 gst_jpeg_dec_decode_indirect (GstJpegDec * dec, GstVideoFrame * frame, gint r_v,
785 gint r_h, gint comp, guint field, guint num_fields)
787 guchar *y_rows[16], *u_rows[16], *v_rows[16];
788 guchar **scanarray[3] = { y_rows, u_rows, v_rows };
791 guchar *base[3], *last[3];
792 gint rowsize[3], stride[3];
795 GST_DEBUG_OBJECT (dec,
796 "unadvantageous width or r_h, taking slow route involving memcpy");
798 width = GST_VIDEO_FRAME_WIDTH (frame);
799 height = GST_VIDEO_FRAME_HEIGHT (frame);
801 if (G_UNLIKELY (!gst_jpeg_dec_ensure_buffers (dec, GST_ROUND_UP_32 (width))))
804 for (i = 0; i < 3; i++) {
805 base[i] = GST_VIDEO_FRAME_COMP_DATA (frame, i);
806 stride[i] = GST_VIDEO_FRAME_COMP_STRIDE (frame, i) * num_fields;
807 rowsize[i] = GST_VIDEO_FRAME_COMP_STRIDE (frame, i);
808 /* make sure we don't make jpeglib write beyond our buffer,
809 * which might happen if (height % (r_v*DCTSIZE)) != 0 */
810 last[i] = base[i] + (GST_VIDEO_FRAME_COMP_STRIDE (frame, i) *
811 (GST_VIDEO_FRAME_COMP_HEIGHT (frame, i) - 1));
814 base[i] += GST_VIDEO_FRAME_COMP_STRIDE (frame, i);
818 memcpy (y_rows, dec->idr_y, 16 * sizeof (gpointer));
819 memcpy (u_rows, dec->idr_u, 16 * sizeof (gpointer));
820 memcpy (v_rows, dec->idr_v, 16 * sizeof (gpointer));
822 /* fill chroma components for grayscale */
824 GST_DEBUG_OBJECT (dec, "grayscale, filling chroma");
825 for (i = 0; i < 16; i++) {
826 memset (u_rows[i], GST_ROUND_UP_32 (width), 0x80);
827 memset (v_rows[i], GST_ROUND_UP_32 (width), 0x80);
831 for (i = 0; i < height; i += r_v * DCTSIZE) {
832 lines = jpeg_read_raw_data (&dec->cinfo, scanarray, r_v * DCTSIZE);
833 if (G_LIKELY (lines > 0)) {
834 for (j = 0, k = 0; j < (r_v * DCTSIZE); j += r_v, k++) {
835 if (G_LIKELY (base[0] <= last[0])) {
836 memcpy (base[0], y_rows[j], rowsize[0]);
837 base[0] += stride[0];
840 if (G_LIKELY (base[0] <= last[0])) {
841 memcpy (base[0], y_rows[j + 1], rowsize[0]);
842 base[0] += stride[0];
845 if (G_LIKELY (base[1] <= last[1] && base[2] <= last[2])) {
847 memcpy (base[1], u_rows[k], rowsize[1]);
848 memcpy (base[2], v_rows[k], rowsize[2]);
849 } else if (r_h == 1) {
850 hresamplecpy1 (base[1], u_rows[k], rowsize[1]);
851 hresamplecpy1 (base[2], v_rows[k], rowsize[2]);
853 /* FIXME: implement (at least we avoid crashing by doing nothing) */
857 if (r_v == 2 || (k & 1) != 0) {
858 base[1] += stride[1];
859 base[2] += stride[2];
863 GST_INFO_OBJECT (dec, "jpeg_read_raw_data() returned 0");
869 gst_jpeg_dec_decode_direct (GstJpegDec * dec, GstVideoFrame * frame,
870 guint field, guint num_fields)
872 guchar **line[3]; /* the jpeg line buffer */
873 guchar *y[4 * DCTSIZE] = { NULL, }; /* alloc enough for the lines */
874 guchar *u[4 * DCTSIZE] = { NULL, }; /* r_v will be <4 */
875 guchar *v[4 * DCTSIZE] = { NULL, };
877 gint lines, v_samp[3];
878 guchar *base[3], *last[3];
880 guint height, field_height;
886 v_samp[0] = dec->cinfo.comp_info[0].v_samp_factor;
887 v_samp[1] = dec->cinfo.comp_info[1].v_samp_factor;
888 v_samp[2] = dec->cinfo.comp_info[2].v_samp_factor;
890 if (G_UNLIKELY (v_samp[0] > 2 || v_samp[1] > 2 || v_samp[2] > 2))
891 goto format_not_supported;
893 height = field_height = GST_VIDEO_FRAME_HEIGHT (frame);
895 /* XXX: division by 2 here might not be a good idea yes. But we are doing this
896 * already in gst_jpeg_dec_handle_frame() for interlaced jpeg */
900 for (i = 0; i < 3; i++) {
901 base[i] = GST_VIDEO_FRAME_COMP_DATA (frame, i);
902 stride[i] = GST_VIDEO_FRAME_COMP_STRIDE (frame, i) * num_fields;
903 /* make sure we don't make jpeglib write beyond our buffer,
904 * which might happen if (height % (r_v*DCTSIZE)) != 0 */
905 last[i] = base[i] + (GST_VIDEO_FRAME_COMP_STRIDE (frame, i) *
906 (GST_VIDEO_FRAME_COMP_HEIGHT (frame, i) - 1));
909 base[i] += GST_VIDEO_FRAME_COMP_STRIDE (frame, i);
913 if (field_height % (v_samp[0] * DCTSIZE) && (dec->scratch_size < stride[0])) {
914 g_free (dec->scratch);
915 dec->scratch = g_malloc (stride[0]);
916 dec->scratch_size = stride[0];
919 /* let jpeglib decode directly into our final buffer */
920 GST_DEBUG_OBJECT (dec, "decoding directly into output buffer");
922 #ifdef JCS_EXTENSIONS
923 if (dec->format_convert) {
924 gint row_stride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, 0);
925 guchar *bufbase = GST_VIDEO_FRAME_PLANE_DATA (frame, 0);
927 if (num_fields == 2) {
932 bufbase += GST_VIDEO_FRAME_PLANE_STRIDE (frame, 0);
935 while (dec->cinfo.output_scanline < dec->cinfo.output_height) {
936 JSAMPARRAY buffer = { &bufbase, };
937 jpeg_read_scanlines (&dec->cinfo, buffer, 1);
938 bufbase += row_stride;
943 for (i = 0; i < height; i += v_samp[0] * DCTSIZE) {
944 for (j = 0; j < (v_samp[0] * DCTSIZE); ++j) {
946 line[0][j] = base[0] + (i + j) * stride[0];
947 if (G_UNLIKELY (line[0][j] > last[0]))
948 line[0][j] = dec->scratch;
950 if (v_samp[1] == v_samp[0]) {
951 line[1][j] = base[1] + ((i + j) / 2) * stride[1];
952 } else if (j < (v_samp[1] * DCTSIZE)) {
953 line[1][j] = base[1] + ((i / 2) + j) * stride[1];
955 if (G_UNLIKELY (line[1][j] > last[1]))
956 line[1][j] = dec->scratch;
958 if (v_samp[2] == v_samp[0]) {
959 line[2][j] = base[2] + ((i + j) / 2) * stride[2];
960 } else if (j < (v_samp[2] * DCTSIZE)) {
961 line[2][j] = base[2] + ((i / 2) + j) * stride[2];
963 if (G_UNLIKELY (line[2][j] > last[2]))
964 line[2][j] = dec->scratch;
967 lines = jpeg_read_raw_data (&dec->cinfo, line, v_samp[0] * DCTSIZE);
968 if (G_UNLIKELY (!lines)) {
969 GST_INFO_OBJECT (dec, "jpeg_read_raw_data() returned 0");
975 format_not_supported:
977 gboolean ret = GST_FLOW_OK;
979 GST_VIDEO_DECODER_ERROR (dec, 1, STREAM, DECODE,
980 (_("Failed to decode JPEG image")),
981 ("Unsupported subsampling schema: v_samp factors: %u %u %u", v_samp[0],
982 v_samp[1], v_samp[2]), ret);
988 #ifdef JCS_EXTENSIONS
990 gst_fmt_to_jpeg_turbo_ext_fmt (GstVideoFormat gstfmt)
993 case GST_VIDEO_FORMAT_RGB:
995 case GST_VIDEO_FORMAT_RGBx:
997 case GST_VIDEO_FORMAT_xRGB:
999 case GST_VIDEO_FORMAT_RGBA:
1000 return JCS_EXT_RGBA;
1001 case GST_VIDEO_FORMAT_ARGB:
1002 return JCS_EXT_ARGB;
1003 case GST_VIDEO_FORMAT_BGR:
1005 case GST_VIDEO_FORMAT_BGRx:
1006 return JCS_EXT_BGRX;
1007 case GST_VIDEO_FORMAT_xBGR:
1008 return JCS_EXT_XBGR;
1009 case GST_VIDEO_FORMAT_BGRA:
1010 return JCS_EXT_BGRA;
1011 case GST_VIDEO_FORMAT_ABGR:
1012 return JCS_EXT_ABGR;
1019 gst_jpeg_turbo_parse_ext_fmt_convert (GstJpegDec * dec, gint * clrspc)
1021 GstCaps *peer_caps, *dec_caps;
1023 dec_caps = gst_static_caps_get (&gst_jpeg_dec_src_pad_template.static_caps);
1025 gst_pad_peer_query_caps (GST_VIDEO_DECODER_SRC_PAD (dec), dec_caps);
1026 gst_caps_unref (dec_caps);
1028 GST_DEBUG ("Received caps from peer: %" GST_PTR_FORMAT, peer_caps);
1029 dec->format_convert = FALSE;
1030 if (!gst_caps_is_empty (peer_caps)) {
1031 GstStructure *peerstruct;
1032 const gchar *peerformat;
1033 GstVideoFormat peerfmt;
1035 if (!gst_caps_is_fixed (peer_caps))
1036 peer_caps = gst_caps_fixate (peer_caps);
1038 peerstruct = gst_caps_get_structure (peer_caps, 0);
1039 peerformat = gst_structure_get_string (peerstruct, "format");
1040 peerfmt = gst_video_format_from_string (peerformat);
1043 case GST_VIDEO_FORMAT_RGB:
1044 case GST_VIDEO_FORMAT_RGBx:
1045 case GST_VIDEO_FORMAT_xRGB:
1046 case GST_VIDEO_FORMAT_RGBA:
1047 case GST_VIDEO_FORMAT_ARGB:
1048 case GST_VIDEO_FORMAT_BGR:
1049 case GST_VIDEO_FORMAT_BGRx:
1050 case GST_VIDEO_FORMAT_xBGR:
1051 case GST_VIDEO_FORMAT_BGRA:
1052 case GST_VIDEO_FORMAT_ABGR:
1055 dec->format = peerfmt;
1056 dec->format_convert = TRUE;
1057 dec->libjpeg_ext_format = gst_fmt_to_jpeg_turbo_ext_fmt (peerfmt);
1063 gst_caps_unref (peer_caps);
1064 GST_DEBUG_OBJECT (dec, "format_convert=%d", dec->format_convert);
1069 gst_jpeg_dec_negotiate (GstJpegDec * dec, gint width, gint height, gint clrspc,
1070 gboolean interlaced)
1072 GstVideoCodecState *outstate;
1074 GstVideoFormat format;
1076 #ifdef JCS_EXTENSIONS
1077 if (dec->format_convert) {
1078 format = dec->format;
1084 format = GST_VIDEO_FORMAT_RGB;
1087 format = GST_VIDEO_FORMAT_GRAY8;
1090 format = GST_VIDEO_FORMAT_I420;
1095 /* Compare to currently configured output state */
1096 outstate = gst_video_decoder_get_output_state (GST_VIDEO_DECODER (dec));
1098 info = &outstate->info;
1100 if (width == GST_VIDEO_INFO_WIDTH (info) &&
1101 height == GST_VIDEO_INFO_HEIGHT (info) &&
1102 format == GST_VIDEO_INFO_FORMAT (info)) {
1103 gst_video_codec_state_unref (outstate);
1106 gst_video_codec_state_unref (outstate);
1108 #ifdef JCS_EXTENSIONS
1109 /* Determine if libjpeg-turbo direct format conversion can be used
1110 * with current caps and if so, adjust $dec to enable it and $clrspc
1112 gst_jpeg_turbo_parse_ext_fmt_convert (dec, &clrspc);
1116 gst_video_decoder_set_output_state (GST_VIDEO_DECODER (dec), format,
1117 width, height, dec->input_state);
1124 /* aka JPEG chroma siting */
1125 outstate->info.chroma_site = GST_VIDEO_CHROMA_SITE_NONE;
1127 outstate->info.colorimetry.range = GST_VIDEO_COLOR_RANGE_0_255;
1128 outstate->info.colorimetry.matrix = GST_VIDEO_COLOR_MATRIX_BT601;
1129 outstate->info.colorimetry.transfer = GST_VIDEO_TRANSFER_UNKNOWN;
1130 outstate->info.colorimetry.primaries = GST_VIDEO_COLOR_PRIMARIES_UNKNOWN;
1135 outstate->info.interlace_mode = GST_VIDEO_INTERLACE_MODE_INTERLEAVED;
1136 GST_VIDEO_INFO_FIELD_ORDER (&outstate->info) =
1137 GST_VIDEO_FIELD_ORDER_TOP_FIELD_FIRST;
1140 gst_video_codec_state_unref (outstate);
1142 gst_video_decoder_negotiate (GST_VIDEO_DECODER (dec));
1144 GST_DEBUG_OBJECT (dec, "max_v_samp_factor=%d", dec->cinfo.max_v_samp_factor);
1145 GST_DEBUG_OBJECT (dec, "max_h_samp_factor=%d", dec->cinfo.max_h_samp_factor);
1148 static GstFlowReturn
1149 gst_jpeg_dec_prepare_decode (GstJpegDec * dec)
1151 G_GNUC_UNUSED GstFlowReturn ret;
1152 guint r_h, r_v, hdr_ok;
1155 hdr_ok = jpeg_read_header (&dec->cinfo, TRUE);
1156 if (G_UNLIKELY (hdr_ok != JPEG_HEADER_OK)) {
1157 GST_WARNING_OBJECT (dec, "reading the header failed, %d", hdr_ok);
1160 GST_LOG_OBJECT (dec, "num_components=%d", dec->cinfo.num_components);
1161 GST_LOG_OBJECT (dec, "jpeg_color_space=%d", dec->cinfo.jpeg_color_space);
1163 if (!dec->cinfo.num_components || !dec->cinfo.comp_info)
1164 goto components_not_supported;
1166 r_h = dec->cinfo.comp_info[0].h_samp_factor;
1167 r_v = dec->cinfo.comp_info[0].v_samp_factor;
1169 GST_LOG_OBJECT (dec, "r_h = %d, r_v = %d", r_h, r_v);
1171 if (dec->cinfo.num_components > 3)
1172 goto components_not_supported;
1174 /* verify color space expectation to avoid going *boom* or bogus output */
1175 if (dec->cinfo.jpeg_color_space != JCS_YCbCr &&
1176 dec->cinfo.jpeg_color_space != JCS_GRAYSCALE &&
1177 dec->cinfo.jpeg_color_space != JCS_RGB)
1178 goto unsupported_colorspace;
1180 #ifndef GST_DISABLE_GST_DEBUG
1184 for (i = 0; i < dec->cinfo.num_components; ++i) {
1185 GST_LOG_OBJECT (dec, "[%d] h_samp_factor=%d, v_samp_factor=%d, cid=%d",
1186 i, dec->cinfo.comp_info[i].h_samp_factor,
1187 dec->cinfo.comp_info[i].v_samp_factor,
1188 dec->cinfo.comp_info[i].component_id);
1193 /* prepare for raw output */
1194 dec->cinfo.do_fancy_upsampling = FALSE;
1195 dec->cinfo.do_block_smoothing = FALSE;
1196 dec->cinfo.dct_method = dec->idct_method;
1197 #ifdef JCS_EXTENSIONS
1198 gst_jpeg_turbo_parse_ext_fmt_convert (dec, NULL);
1199 if (dec->format_convert) {
1200 dec->cinfo.out_color_space = dec->libjpeg_ext_format;
1201 dec->cinfo.raw_data_out = FALSE;
1205 dec->cinfo.out_color_space = dec->cinfo.jpeg_color_space;
1206 dec->cinfo.raw_data_out = TRUE;
1209 GST_LOG_OBJECT (dec, "starting decompress");
1210 guarantee_huff_tables (&dec->cinfo);
1211 if (!jpeg_start_decompress (&dec->cinfo)) {
1212 GST_WARNING_OBJECT (dec, "failed to start decompression cycle");
1215 /* sanity checks to get safe and reasonable output */
1216 switch (dec->cinfo.jpeg_color_space) {
1218 if (dec->cinfo.num_components != 1)
1219 goto invalid_yuvrgbgrayscale;
1222 if (dec->cinfo.num_components != 3 || dec->cinfo.max_v_samp_factor > 1 ||
1223 dec->cinfo.max_h_samp_factor > 1)
1224 goto invalid_yuvrgbgrayscale;
1227 if (dec->cinfo.num_components != 3 ||
1228 r_v > 2 || r_v < dec->cinfo.comp_info[0].v_samp_factor ||
1229 r_v < dec->cinfo.comp_info[1].v_samp_factor ||
1230 r_h < dec->cinfo.comp_info[0].h_samp_factor ||
1231 r_h < dec->cinfo.comp_info[1].h_samp_factor)
1232 goto invalid_yuvrgbgrayscale;
1235 g_assert_not_reached ();
1239 if (G_UNLIKELY (dec->cinfo.output_width < MIN_WIDTH ||
1240 dec->cinfo.output_width > MAX_WIDTH ||
1241 dec->cinfo.output_height < MIN_HEIGHT ||
1242 dec->cinfo.output_height > MAX_HEIGHT))
1250 ret = GST_FLOW_ERROR;
1251 GST_VIDEO_DECODER_ERROR (dec, 1, STREAM, DECODE,
1252 (_("Failed to decode JPEG image")),
1253 ("Picture is too small or too big (%ux%u)", dec->cinfo.output_width,
1254 dec->cinfo.output_height), ret);
1255 return GST_FLOW_ERROR;
1257 components_not_supported:
1259 ret = GST_FLOW_ERROR;
1260 GST_VIDEO_DECODER_ERROR (dec, 1, STREAM, DECODE,
1261 (_("Failed to decode JPEG image")),
1262 ("number of components not supported: %d (max 3)",
1263 dec->cinfo.num_components), ret);
1264 jpeg_abort_decompress (&dec->cinfo);
1265 return GST_FLOW_ERROR;
1267 unsupported_colorspace:
1269 ret = GST_FLOW_ERROR;
1270 GST_VIDEO_DECODER_ERROR (dec, 1, STREAM, DECODE,
1271 (_("Failed to decode JPEG image")),
1272 ("Picture has unknown or unsupported colourspace"), ret);
1273 jpeg_abort_decompress (&dec->cinfo);
1274 return GST_FLOW_ERROR;
1276 invalid_yuvrgbgrayscale:
1278 ret = GST_FLOW_ERROR;
1279 GST_VIDEO_DECODER_ERROR (dec, 1, STREAM, DECODE,
1280 (_("Failed to decode JPEG image")),
1281 ("Picture is corrupt or unhandled YUV/RGB/grayscale layout"), ret);
1282 jpeg_abort_decompress (&dec->cinfo);
1283 return GST_FLOW_ERROR;
1287 static GstFlowReturn
1288 gst_jpeg_dec_decode (GstJpegDec * dec, GstVideoFrame * vframe, guint width,
1289 guint height, guint field, guint num_fields)
1291 GstFlowReturn ret = GST_FLOW_OK;
1293 if (dec->cinfo.jpeg_color_space == JCS_RGB) {
1294 gst_jpeg_dec_decode_rgb (dec, vframe, field, num_fields);
1295 } else if (dec->cinfo.jpeg_color_space == JCS_GRAYSCALE) {
1296 gst_jpeg_dec_decode_grayscale (dec, vframe, field, num_fields);
1298 GST_LOG_OBJECT (dec, "decompressing (required scanline buffer height = %u)",
1299 dec->cinfo.rec_outbuf_height);
1301 /* For some widths jpeglib requires more horizontal padding than I420
1302 * provides. In those cases we need to decode into separate buffers and then
1303 * copy over the data into our final picture buffer, otherwise jpeglib might
1304 * write over the end of a line into the beginning of the next line,
1305 * resulting in blocky artifacts on the left side of the picture. */
1306 if (G_UNLIKELY (width % (dec->cinfo.max_h_samp_factor * DCTSIZE) != 0
1307 || dec->cinfo.comp_info[0].h_samp_factor != 2
1308 || dec->cinfo.comp_info[1].h_samp_factor != 1
1309 || dec->cinfo.comp_info[2].h_samp_factor != 1)) {
1310 GST_CAT_LOG_OBJECT (GST_CAT_PERFORMANCE, dec,
1311 "indirect decoding using extra buffer copy");
1312 gst_jpeg_dec_decode_indirect (dec, vframe,
1313 dec->cinfo.comp_info[0].v_samp_factor,
1314 dec->cinfo.comp_info[0].h_samp_factor, dec->cinfo.num_components,
1317 ret = gst_jpeg_dec_decode_direct (dec, vframe, field, num_fields);
1321 GST_LOG_OBJECT (dec, "decompressing finished: %s", gst_flow_get_name (ret));
1323 if (G_UNLIKELY (ret != GST_FLOW_OK)) {
1324 jpeg_abort_decompress (&dec->cinfo);
1326 jpeg_finish_decompress (&dec->cinfo);
1332 static GstFlowReturn
1333 gst_jpeg_dec_handle_frame (GstVideoDecoder * bdec, GstVideoCodecFrame * frame)
1335 GstFlowReturn ret = GST_FLOW_OK;
1336 GstJpegDec *dec = (GstJpegDec *) bdec;
1337 GstVideoFrame vframe;
1338 gint num_fields; /* number of fields (1 or 2) */
1339 gint output_height; /* height of output image (one or two fields) */
1340 gint height; /* height of current frame (whole image or a field) */
1343 gboolean need_unmap = TRUE;
1344 GstVideoCodecState *state = NULL;
1345 gboolean release_frame = TRUE;
1350 if (!gst_buffer_map (frame->input_buffer, &dec->current_frame_map,
1354 data = dec->current_frame_map.data;
1355 nbytes = dec->current_frame_map.size;
1357 goto need_more_data;
1358 has_eoi = ((data[nbytes - 2] == 0xff) && (data[nbytes - 1] == 0xd9));
1360 /* some cameras fail to send an end-of-image marker (EOI),
1361 * add it if that is the case. */
1364 GstBuffer *eoibuf = gst_buffer_new_and_alloc (2);
1366 /* unmap, will add EOI and remap at the end */
1367 gst_buffer_unmap (frame->input_buffer, &dec->current_frame_map);
1369 gst_buffer_map (eoibuf, &map, GST_MAP_WRITE);
1372 gst_buffer_unmap (eoibuf, &map);
1374 /* append to input buffer, and remap */
1375 frame->input_buffer = gst_buffer_append (frame->input_buffer, eoibuf);
1377 gst_buffer_map (frame->input_buffer, &dec->current_frame_map, GST_MAP_READ);
1378 GST_DEBUG ("fixup EOI marker added");
1381 dec->current_frame = frame;
1382 dec->cinfo.src->next_input_byte = dec->current_frame_map.data;
1383 dec->cinfo.src->bytes_in_buffer = dec->current_frame_map.size;
1385 if (setjmp (dec->jerr.setjmp_buffer)) {
1386 code = dec->jerr.pub.msg_code;
1388 if (code == JERR_INPUT_EOF) {
1389 GST_DEBUG ("jpeg input EOF error, we probably need more data");
1390 goto need_more_data;
1395 /* read header and check values */
1396 ret = gst_jpeg_dec_prepare_decode (dec);
1397 if (G_UNLIKELY (ret == GST_FLOW_ERROR))
1400 width = dec->cinfo.output_width;
1401 height = dec->cinfo.output_height;
1403 /* is it interlaced MJPEG? (we really don't want to scan the jpeg data
1404 * to see if there are two SOF markers in the packet to detect this) */
1405 if (gst_video_decoder_get_packetized (bdec) &&
1407 dec->input_state->info.height > height &&
1408 dec->input_state->info.height <= (height * 2)
1409 && dec->input_state->info.width == width) {
1410 GST_LOG_OBJECT (dec,
1411 "looks like an interlaced image: "
1412 "input width/height of %dx%d with JPEG frame width/height of %dx%d",
1413 dec->input_state->info.width, dec->input_state->info.height, width,
1415 output_height = dec->input_state->info.height;
1416 height = dec->input_state->info.height / 2;
1418 GST_LOG_OBJECT (dec, "field height=%d", height);
1420 output_height = height;
1424 gst_jpeg_dec_negotiate (dec, width, output_height,
1425 dec->cinfo.jpeg_color_space, num_fields == 2);
1427 state = gst_video_decoder_get_output_state (bdec);
1428 ret = gst_video_decoder_allocate_output_frame (bdec, frame);
1429 if (G_UNLIKELY (ret != GST_FLOW_OK))
1432 if (!gst_video_frame_map (&vframe, &state->info, frame->output_buffer,
1436 if (setjmp (dec->jerr.setjmp_buffer)) {
1437 code = dec->jerr.pub.msg_code;
1438 gst_video_frame_unmap (&vframe);
1442 GST_LOG_OBJECT (dec, "width %d, height %d, fields %d", width, output_height,
1445 ret = gst_jpeg_dec_decode (dec, &vframe, width, height, 1, num_fields);
1446 if (G_UNLIKELY (ret != GST_FLOW_OK)) {
1447 gst_video_frame_unmap (&vframe);
1451 if (setjmp (dec->jerr.setjmp_buffer)) {
1452 code = dec->jerr.pub.msg_code;
1453 gst_video_frame_unmap (&vframe);
1457 /* decode second field if there is one */
1458 if (num_fields == 2) {
1459 GstVideoFormat field2_format;
1461 /* Checked above before setting num_fields to 2 */
1462 g_assert (dec->input_state != NULL);
1464 /* skip any chunk or padding bytes before the next SOI marker; both fields
1465 * are in one single buffer here, so direct access should be fine here */
1466 while (dec->jsrc.pub.bytes_in_buffer > 2 &&
1467 GST_READ_UINT16_BE (dec->jsrc.pub.next_input_byte) != 0xffd8) {
1468 --dec->jsrc.pub.bytes_in_buffer;
1469 ++dec->jsrc.pub.next_input_byte;
1472 if (gst_jpeg_dec_prepare_decode (dec) != GST_FLOW_OK) {
1473 GST_WARNING_OBJECT (dec, "problem reading jpeg header of 2nd field");
1474 /* FIXME: post a warning message here? */
1475 gst_video_frame_unmap (&vframe);
1479 /* check if format has changed for the second field */
1480 #ifdef JCS_EXTENSIONS
1481 if (dec->format_convert) {
1482 field2_format = dec->format;
1486 switch (dec->cinfo.jpeg_color_space) {
1488 field2_format = GST_VIDEO_FORMAT_RGB;
1491 field2_format = GST_VIDEO_FORMAT_GRAY8;
1494 field2_format = GST_VIDEO_FORMAT_I420;
1499 GST_LOG_OBJECT (dec,
1500 "got for second field of interlaced image: "
1501 "input width/height of %dx%d with JPEG frame width/height of %dx%d",
1502 dec->input_state->info.width, dec->input_state->info.height,
1503 dec->cinfo.output_width, dec->cinfo.output_height);
1505 if (dec->cinfo.output_width != GST_VIDEO_INFO_WIDTH (&state->info) ||
1506 GST_VIDEO_INFO_HEIGHT (&state->info) <= dec->cinfo.output_height ||
1507 GST_VIDEO_INFO_HEIGHT (&state->info) > (dec->cinfo.output_height * 2) ||
1508 field2_format != GST_VIDEO_INFO_FORMAT (&state->info)) {
1509 GST_WARNING_OBJECT (dec, "second field has different format than first");
1510 gst_video_frame_unmap (&vframe);
1514 ret = gst_jpeg_dec_decode (dec, &vframe, width, height, 2, 2);
1515 if (G_UNLIKELY (ret != GST_FLOW_OK)) {
1516 gst_video_frame_unmap (&vframe);
1520 gst_video_frame_unmap (&vframe);
1522 gst_buffer_unmap (frame->input_buffer, &dec->current_frame_map);
1523 ret = gst_video_decoder_finish_frame (bdec, frame);
1524 release_frame = FALSE;
1532 gst_buffer_unmap (frame->input_buffer, &dec->current_frame_map);
1535 gst_video_decoder_release_frame (bdec, frame);
1538 gst_video_codec_state_unref (state);
1545 GST_LOG_OBJECT (dec, "we need more data");
1552 GST_ELEMENT_ERROR (dec, RESOURCE, READ, (_("Failed to read memory")),
1553 ("gst_buffer_map() failed for READ access"));
1554 ret = GST_FLOW_ERROR;
1559 gchar err_msg[JMSG_LENGTH_MAX];
1561 dec->jerr.pub.format_message ((j_common_ptr) (&dec->cinfo), err_msg);
1563 GST_VIDEO_DECODER_ERROR (dec, 1, STREAM, DECODE,
1564 (_("Failed to decode JPEG image")), ("Decode error #%u: %s", code,
1567 gst_buffer_unmap (frame->input_buffer, &dec->current_frame_map);
1568 gst_video_decoder_drop_frame (bdec, frame);
1569 release_frame = FALSE;
1571 jpeg_abort_decompress (&dec->cinfo);
1577 /* already posted an error message */
1582 const gchar *reason;
1584 reason = gst_flow_get_name (ret);
1586 GST_DEBUG_OBJECT (dec, "failed to alloc buffer, reason %s", reason);
1587 /* Reset for next time */
1588 jpeg_abort_decompress (&dec->cinfo);
1589 if (ret != GST_FLOW_EOS && ret != GST_FLOW_FLUSHING &&
1590 ret != GST_FLOW_NOT_LINKED) {
1591 GST_VIDEO_DECODER_ERROR (dec, 1, STREAM, DECODE,
1592 (_("Failed to decode JPEG image")),
1593 ("Buffer allocation failed, reason: %s", reason), ret);
1594 jpeg_abort_decompress (&dec->cinfo);
1601 gst_jpeg_dec_decide_allocation (GstVideoDecoder * bdec, GstQuery * query)
1603 GstBufferPool *pool = NULL;
1604 GstStructure *config;
1606 if (!GST_VIDEO_DECODER_CLASS (parent_class)->decide_allocation (bdec, query))
1609 if (gst_query_get_n_allocation_pools (query) > 0)
1610 gst_query_parse_nth_allocation_pool (query, 0, &pool, NULL, NULL, NULL);
1615 config = gst_buffer_pool_get_config (pool);
1616 if (gst_query_find_allocation_meta (query, GST_VIDEO_META_API_TYPE, NULL)) {
1617 gst_buffer_pool_config_add_option (config,
1618 GST_BUFFER_POOL_OPTION_VIDEO_META);
1620 gst_buffer_pool_set_config (pool, config);
1621 gst_object_unref (pool);
1627 gst_jpeg_dec_sink_event (GstVideoDecoder * bdec, GstEvent * event)
1629 const GstSegment *segment;
1631 if (GST_EVENT_TYPE (event) != GST_EVENT_SEGMENT)
1634 gst_event_parse_segment (event, &segment);
1636 if (segment->format == GST_FORMAT_TIME)
1637 gst_video_decoder_set_packetized (bdec, TRUE);
1639 gst_video_decoder_set_packetized (bdec, FALSE);
1642 return GST_VIDEO_DECODER_CLASS (parent_class)->sink_event (bdec, event);
1646 gst_jpeg_dec_start (GstVideoDecoder * bdec)
1648 GstJpegDec *dec = (GstJpegDec *) bdec;
1650 #ifdef JCS_EXTENSIONS
1651 dec->format_convert = FALSE;
1653 dec->saw_header = FALSE;
1654 dec->parse_entropy_len = 0;
1655 dec->parse_resync = FALSE;
1657 gst_video_decoder_set_packetized (bdec, FALSE);
1663 gst_jpeg_dec_flush (GstVideoDecoder * bdec)
1665 GstJpegDec *dec = (GstJpegDec *) bdec;
1667 jpeg_abort_decompress (&dec->cinfo);
1668 dec->parse_entropy_len = 0;
1669 dec->parse_resync = FALSE;
1670 dec->saw_header = FALSE;
1671 #ifdef JCS_EXTENSIONS
1672 dec->format_convert = FALSE;
1679 gst_jpeg_dec_set_property (GObject * object, guint prop_id,
1680 const GValue * value, GParamSpec * pspec)
1684 dec = GST_JPEG_DEC (object);
1687 case PROP_IDCT_METHOD:
1688 dec->idct_method = g_value_get_enum (value);
1690 #ifndef GST_REMOVE_DEPRECATED
1691 case PROP_MAX_ERRORS:
1692 g_atomic_int_set (&dec->max_errors, g_value_get_int (value));
1696 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
1702 gst_jpeg_dec_get_property (GObject * object, guint prop_id, GValue * value,
1707 dec = GST_JPEG_DEC (object);
1710 case PROP_IDCT_METHOD:
1711 g_value_set_enum (value, dec->idct_method);
1713 #ifndef GST_REMOVE_DEPRECATED
1714 case PROP_MAX_ERRORS:
1715 g_value_set_int (value, g_atomic_int_get (&dec->max_errors));
1719 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
1725 gst_jpeg_dec_stop (GstVideoDecoder * bdec)
1727 GstJpegDec *dec = (GstJpegDec *) bdec;
1729 gst_jpeg_dec_free_buffers (dec);
1731 g_free (dec->scratch);
1732 dec->scratch = NULL;
1733 dec->scratch_size = 0;