2 * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
3 * Copyright (C) <2009> Tim-Philipp Müller <tim centricular net>
4 * Copyright (C) 2012 Collabora Ltd.
5 * Author : Edward Hervey <edward@collabora.com>
7 * This library is free software; you can redistribute it and/or
8 * modify it under the terms of the GNU Library General Public
9 * License as published by the Free Software Foundation; either
10 * version 2 of the License, or (at your option) any later version.
12 * This library is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 * Library General Public License for more details.
17 * You should have received a copy of the GNU Library General Public
18 * License along with this library; if not, write to the
19 * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
20 * Boston, MA 02110-1301, USA.
24 * SECTION:element-jpegdec
26 * Decodes jpeg images.
29 * <title>Example launch line</title>
31 * gst-launch-1.0 -v filesrc location=mjpeg.avi ! avidemux ! queue ! jpegdec ! videoconvert ! videoscale ! autovideosink
32 * ]| The above pipeline decode the mjpeg stream and renders it to the screen.
41 #include "gstjpegdec.h"
43 #include <gst/video/video.h>
44 #include <gst/video/gstvideometa.h>
45 #include <gst/video/gstvideopool.h>
46 #include "gst/gst-i18n-plugin.h"
50 #define MAX_WIDTH 65535
52 #define MAX_HEIGHT 65535
54 #define CINFO_GET_JPEGDEC(cinfo_ptr) \
55 (((struct GstJpegDecSourceMgr*)((cinfo_ptr)->src))->dec)
57 #define JPEG_DEFAULT_IDCT_METHOD JDCT_FASTEST
58 #define JPEG_DEFAULT_MAX_ERRORS 0
68 static GstStaticPadTemplate gst_jpeg_dec_src_pad_template =
69 GST_STATIC_PAD_TEMPLATE ("src",
72 GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE
73 ("{ I420, RGB, BGR, RGBx, xRGB, BGRx, xBGR, GRAY8 }"))
77 /* FIXME: sof-marker is for IJG libjpeg 8, should be different for 6.2 */
78 /* FIXME: add back "sof-marker = (int) { 0, 1, 2, 5, 6, 7, 9, 10, 13, 14 }"
79 * once we have a parser and/or demuxer set caps properly */
80 static GstStaticPadTemplate gst_jpeg_dec_sink_pad_template =
81 GST_STATIC_PAD_TEMPLATE ("sink",
84 GST_STATIC_CAPS ("image/jpeg")
87 GST_DEBUG_CATEGORY_STATIC (jpeg_dec_debug);
88 #define GST_CAT_DEFAULT jpeg_dec_debug
89 GST_DEBUG_CATEGORY_STATIC (GST_CAT_PERFORMANCE);
91 static void gst_jpeg_dec_set_property (GObject * object, guint prop_id,
92 const GValue * value, GParamSpec * pspec);
93 static void gst_jpeg_dec_get_property (GObject * object, guint prop_id,
94 GValue * value, GParamSpec * pspec);
96 static gboolean gst_jpeg_dec_set_format (GstVideoDecoder * dec,
97 GstVideoCodecState * state);
98 static gboolean gst_jpeg_dec_start (GstVideoDecoder * bdec);
99 static gboolean gst_jpeg_dec_stop (GstVideoDecoder * bdec);
100 static gboolean gst_jpeg_dec_flush (GstVideoDecoder * bdec);
101 static GstFlowReturn gst_jpeg_dec_parse (GstVideoDecoder * bdec,
102 GstVideoCodecFrame * frame, GstAdapter * adapter, gboolean at_eos);
103 static GstFlowReturn gst_jpeg_dec_handle_frame (GstVideoDecoder * bdec,
104 GstVideoCodecFrame * frame);
105 static gboolean gst_jpeg_dec_decide_allocation (GstVideoDecoder * bdec,
107 static gboolean gst_jpeg_dec_sink_event (GstVideoDecoder * bdec,
110 #define gst_jpeg_dec_parent_class parent_class
111 G_DEFINE_TYPE (GstJpegDec, gst_jpeg_dec, GST_TYPE_VIDEO_DECODER);
114 gst_jpeg_dec_finalize (GObject * object)
116 GstJpegDec *dec = GST_JPEG_DEC (object);
118 jpeg_destroy_decompress (&dec->cinfo);
119 if (dec->input_state)
120 gst_video_codec_state_unref (dec->input_state);
122 G_OBJECT_CLASS (parent_class)->finalize (object);
126 gst_jpeg_dec_class_init (GstJpegDecClass * klass)
128 GObjectClass *gobject_class;
129 GstElementClass *element_class;
130 GstVideoDecoderClass *vdec_class;
132 gobject_class = (GObjectClass *) klass;
133 element_class = (GstElementClass *) klass;
134 vdec_class = (GstVideoDecoderClass *) klass;
136 parent_class = g_type_class_peek_parent (klass);
138 gobject_class->finalize = gst_jpeg_dec_finalize;
139 gobject_class->set_property = gst_jpeg_dec_set_property;
140 gobject_class->get_property = gst_jpeg_dec_get_property;
142 g_object_class_install_property (gobject_class, PROP_IDCT_METHOD,
143 g_param_spec_enum ("idct-method", "IDCT Method",
144 "The IDCT algorithm to use", GST_TYPE_IDCT_METHOD,
145 JPEG_DEFAULT_IDCT_METHOD,
146 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
149 * GstJpegDec:max-errors:
151 * Error out after receiving N consecutive decoding errors
152 * (-1 = never error out, 0 = automatic, 1 = fail on first error, etc.)
154 * Deprecated: 1.3.1: Property wasn't used internally
156 #ifndef GST_REMOVE_DEPRECATED
157 g_object_class_install_property (gobject_class, PROP_MAX_ERRORS,
158 g_param_spec_int ("max-errors", "Maximum Consecutive Decoding Errors",
159 "(Deprecated) Error out after receiving N consecutive decoding errors"
160 " (-1 = never fail, 0 = automatic, 1 = fail on first error)",
161 -1, G_MAXINT, JPEG_DEFAULT_MAX_ERRORS,
162 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | G_PARAM_DEPRECATED));
165 gst_element_class_add_static_pad_template (element_class,
166 &gst_jpeg_dec_src_pad_template);
167 gst_element_class_add_static_pad_template (element_class,
168 &gst_jpeg_dec_sink_pad_template);
169 gst_element_class_set_static_metadata (element_class, "JPEG image decoder",
170 "Codec/Decoder/Image", "Decode images from JPEG format",
171 "Wim Taymans <wim@fluendo.com>");
173 vdec_class->start = gst_jpeg_dec_start;
174 vdec_class->stop = gst_jpeg_dec_stop;
175 vdec_class->flush = gst_jpeg_dec_flush;
176 vdec_class->parse = gst_jpeg_dec_parse;
177 vdec_class->set_format = gst_jpeg_dec_set_format;
178 vdec_class->handle_frame = gst_jpeg_dec_handle_frame;
179 vdec_class->decide_allocation = gst_jpeg_dec_decide_allocation;
180 vdec_class->sink_event = gst_jpeg_dec_sink_event;
182 GST_DEBUG_CATEGORY_INIT (jpeg_dec_debug, "jpegdec", 0, "JPEG decoder");
183 GST_DEBUG_CATEGORY_GET (GST_CAT_PERFORMANCE, "GST_PERFORMANCE");
187 gst_jpeg_dec_fill_input_buffer (j_decompress_ptr cinfo)
189 /* We pass in full frame initially, if this get called, the frame is most likely
195 gst_jpeg_dec_init_source (j_decompress_ptr cinfo)
197 GST_LOG_OBJECT (CINFO_GET_JPEGDEC (cinfo), "init_source");
202 gst_jpeg_dec_skip_input_data (j_decompress_ptr cinfo, glong num_bytes)
204 GstJpegDec *dec = CINFO_GET_JPEGDEC (cinfo);
206 GST_DEBUG_OBJECT (dec, "skip %ld bytes", num_bytes);
208 if (num_bytes > 0 && cinfo->src->bytes_in_buffer >= num_bytes) {
209 cinfo->src->next_input_byte += (size_t) num_bytes;
210 cinfo->src->bytes_in_buffer -= (size_t) num_bytes;
215 gst_jpeg_dec_resync_to_restart (j_decompress_ptr cinfo, gint desired)
217 GST_LOG_OBJECT (CINFO_GET_JPEGDEC (cinfo), "resync_to_start");
222 gst_jpeg_dec_term_source (j_decompress_ptr cinfo)
224 GST_LOG_OBJECT (CINFO_GET_JPEGDEC (cinfo), "term_source");
229 gst_jpeg_dec_my_output_message (j_common_ptr cinfo)
231 return; /* do nothing */
235 gst_jpeg_dec_my_emit_message (j_common_ptr cinfo, int msg_level)
237 /* GST_LOG_OBJECT (CINFO_GET_JPEGDEC (&cinfo), "msg_level=%d", msg_level); */
242 gst_jpeg_dec_my_error_exit (j_common_ptr cinfo)
244 struct GstJpegDecErrorMgr *err_mgr = (struct GstJpegDecErrorMgr *) cinfo->err;
246 (*cinfo->err->output_message) (cinfo);
247 longjmp (err_mgr->setjmp_buffer, 1);
251 gst_jpeg_dec_init (GstJpegDec * dec)
253 GST_DEBUG ("initializing");
256 memset (&dec->cinfo, 0, sizeof (dec->cinfo));
257 memset (&dec->jerr, 0, sizeof (dec->jerr));
258 dec->cinfo.err = jpeg_std_error (&dec->jerr.pub);
259 dec->jerr.pub.output_message = gst_jpeg_dec_my_output_message;
260 dec->jerr.pub.emit_message = gst_jpeg_dec_my_emit_message;
261 dec->jerr.pub.error_exit = gst_jpeg_dec_my_error_exit;
263 jpeg_create_decompress (&dec->cinfo);
265 dec->cinfo.src = (struct jpeg_source_mgr *) &dec->jsrc;
266 dec->cinfo.src->init_source = gst_jpeg_dec_init_source;
267 dec->cinfo.src->fill_input_buffer = gst_jpeg_dec_fill_input_buffer;
268 dec->cinfo.src->skip_input_data = gst_jpeg_dec_skip_input_data;
269 dec->cinfo.src->resync_to_restart = gst_jpeg_dec_resync_to_restart;
270 dec->cinfo.src->term_source = gst_jpeg_dec_term_source;
273 /* init properties */
274 dec->idct_method = JPEG_DEFAULT_IDCT_METHOD;
275 dec->max_errors = JPEG_DEFAULT_MAX_ERRORS;
277 gst_video_decoder_set_use_default_pad_acceptcaps (GST_VIDEO_DECODER_CAST
279 GST_PAD_SET_ACCEPT_TEMPLATE (GST_VIDEO_DECODER_SINK_PAD (dec));
282 static inline gboolean
283 gst_jpeg_dec_parse_tag_has_entropy_segment (guint8 tag)
285 if (tag == 0xda || (tag >= 0xd0 && tag <= 0xd7))
291 gst_jpeg_dec_parse (GstVideoDecoder * bdec, GstVideoCodecFrame * frame,
292 GstAdapter * adapter, gboolean at_eos)
297 gint offset = 0, noffset;
298 GstJpegDec *dec = (GstJpegDec *) bdec;
300 GST_VIDEO_CODEC_FRAME_SET_SYNC_POINT (frame);
302 /* FIXME : The overhead of using scan_uint32 is massive */
304 size = gst_adapter_available (adapter);
305 GST_DEBUG ("Parsing jpeg image data (%u bytes)", size);
308 GST_DEBUG ("Flushing all data out");
311 /* If we have leftover data, throw it away */
312 if (!dec->saw_header)
314 goto have_full_frame;
320 if (!dec->saw_header) {
322 /* we expect at least 4 bytes, first of which start marker */
324 gst_adapter_masked_scan_uint32 (adapter, 0xffff0000, 0xffd80000, 0,
327 GST_DEBUG ("ret:%d", ret);
332 gst_adapter_flush (adapter, ret);
335 dec->saw_header = TRUE;
342 GST_DEBUG ("offset:%d, size:%d", offset, size);
345 gst_adapter_masked_scan_uint32_peek (adapter, 0x0000ff00, 0x0000ff00,
346 offset, size - offset, &value);
348 /* lost sync if 0xff marker not where expected */
349 if ((resync = (noffset != offset))) {
350 GST_DEBUG ("Lost sync at 0x%08x, resyncing", offset + 2);
352 /* may have marker, but could have been resyncng */
353 resync = resync || dec->parse_resync;
354 /* Skip over extra 0xff */
355 while ((noffset >= 0) && ((value & 0xff) == 0xff)) {
358 gst_adapter_masked_scan_uint32_peek (adapter, 0x0000ff00, 0x0000ff00,
359 noffset, size - noffset, &value);
361 /* enough bytes left for marker? (we need 0xNN after the 0xff) */
363 GST_DEBUG ("at end of input and no EOI marker found, need more data");
367 /* now lock on the marker we found */
369 value = value & 0xff;
371 GST_DEBUG ("0x%08x: EOI marker", offset + 2);
372 /* clear parse state */
373 dec->saw_header = FALSE;
374 dec->parse_resync = FALSE;
376 goto have_full_frame;
379 /* Skip this frame if we found another SOI marker */
380 GST_DEBUG ("0x%08x: SOI marker before EOI, skipping", offset + 2);
381 dec->parse_resync = FALSE;
387 if (value >= 0xd0 && value <= 0xd7)
390 /* peek tag and subsequent length */
391 if (offset + 2 + 4 > size)
394 gst_adapter_masked_scan_uint32_peek (adapter, 0x0, 0x0, offset + 2, 4,
396 frame_len = frame_len & 0xffff;
398 GST_DEBUG ("0x%08x: tag %02x, frame_len=%u", offset + 2, value, frame_len);
399 /* the frame length includes the 2 bytes for the length; here we want at
400 * least 2 more bytes at the end for an end marker */
401 if (offset + 2 + 2 + frame_len + 2 > size) {
405 if (gst_jpeg_dec_parse_tag_has_entropy_segment (value)) {
406 guint eseglen = dec->parse_entropy_len;
408 GST_DEBUG ("0x%08x: finding entropy segment length (eseglen:%d)",
409 offset + 2, eseglen);
410 if (size < offset + 2 + frame_len + eseglen)
412 noffset = offset + 2 + frame_len + dec->parse_entropy_len;
414 GST_DEBUG ("noffset:%d, size:%d, size - noffset:%d",
415 noffset, size, size - noffset);
416 noffset = gst_adapter_masked_scan_uint32_peek (adapter, 0x0000ff00,
417 0x0000ff00, noffset, size - noffset, &value);
420 dec->parse_entropy_len = size - offset - 4 - frame_len - 2;
423 if ((value & 0xff) != 0x00) {
424 eseglen = noffset - offset - frame_len - 2;
429 dec->parse_entropy_len = 0;
430 frame_len += eseglen;
431 GST_DEBUG ("entropy segment length=%u => frame_len=%u", eseglen,
435 /* check if we will still be in sync if we interpret
436 * this as a sync point and skip this frame */
437 noffset = offset + frame_len + 2;
438 noffset = gst_adapter_masked_scan_uint32 (adapter, 0x0000ff00, 0x0000ff00,
441 /* ignore and continue resyncing until we hit the end
442 * of our data or find a sync point that looks okay */
446 GST_DEBUG ("found sync at 0x%x", offset + 2);
449 /* Add current data to output buffer */
450 toadd += frame_len + 2;
451 offset += frame_len + 2;
456 gst_video_decoder_add_to_frame (bdec, toadd);
457 return GST_VIDEO_DECODER_FLOW_NEED_DATA;
461 gst_video_decoder_add_to_frame (bdec, toadd);
462 GST_VIDEO_CODEC_FRAME_SET_SYNC_POINT (frame);
463 return gst_video_decoder_have_frame (bdec);
466 gst_adapter_flush (adapter, size);
471 /* shamelessly ripped from jpegutils.c in mjpegtools */
473 add_huff_table (j_decompress_ptr dinfo,
474 JHUFF_TBL ** htblptr, const UINT8 * bits, const UINT8 * val)
475 /* Define a Huffman table */
479 if (*htblptr == NULL)
480 *htblptr = jpeg_alloc_huff_table ((j_common_ptr) dinfo);
484 /* Copy the number-of-symbols-of-each-code-length counts */
485 memcpy ((*htblptr)->bits, bits, sizeof ((*htblptr)->bits));
487 /* Validate the counts. We do this here mainly so we can copy the right
488 * number of symbols from the val[] array, without risking marching off
489 * the end of memory. jchuff.c will do a more thorough test later.
492 for (len = 1; len <= 16; len++)
493 nsymbols += bits[len];
494 if (nsymbols < 1 || nsymbols > 256)
495 g_error ("jpegutils.c: add_huff_table failed badly. ");
497 memcpy ((*htblptr)->huffval, val, nsymbols * sizeof (UINT8));
503 std_huff_tables (j_decompress_ptr dinfo)
504 /* Set up the standard Huffman tables (cf. JPEG standard section K.3) */
505 /* IMPORTANT: these are only valid for 8-bit data precision! */
507 static const UINT8 bits_dc_luminance[17] =
508 { /* 0-base */ 0, 0, 1, 5, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0 };
509 static const UINT8 val_dc_luminance[] =
510 { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11 };
512 static const UINT8 bits_dc_chrominance[17] =
513 { /* 0-base */ 0, 0, 3, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0 };
514 static const UINT8 val_dc_chrominance[] =
515 { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11 };
517 static const UINT8 bits_ac_luminance[17] =
518 { /* 0-base */ 0, 0, 2, 1, 3, 3, 2, 4, 3, 5, 5, 4, 4, 0, 0, 1, 0x7d };
519 static const UINT8 val_ac_luminance[] =
520 { 0x01, 0x02, 0x03, 0x00, 0x04, 0x11, 0x05, 0x12,
521 0x21, 0x31, 0x41, 0x06, 0x13, 0x51, 0x61, 0x07,
522 0x22, 0x71, 0x14, 0x32, 0x81, 0x91, 0xa1, 0x08,
523 0x23, 0x42, 0xb1, 0xc1, 0x15, 0x52, 0xd1, 0xf0,
524 0x24, 0x33, 0x62, 0x72, 0x82, 0x09, 0x0a, 0x16,
525 0x17, 0x18, 0x19, 0x1a, 0x25, 0x26, 0x27, 0x28,
526 0x29, 0x2a, 0x34, 0x35, 0x36, 0x37, 0x38, 0x39,
527 0x3a, 0x43, 0x44, 0x45, 0x46, 0x47, 0x48, 0x49,
528 0x4a, 0x53, 0x54, 0x55, 0x56, 0x57, 0x58, 0x59,
529 0x5a, 0x63, 0x64, 0x65, 0x66, 0x67, 0x68, 0x69,
530 0x6a, 0x73, 0x74, 0x75, 0x76, 0x77, 0x78, 0x79,
531 0x7a, 0x83, 0x84, 0x85, 0x86, 0x87, 0x88, 0x89,
532 0x8a, 0x92, 0x93, 0x94, 0x95, 0x96, 0x97, 0x98,
533 0x99, 0x9a, 0xa2, 0xa3, 0xa4, 0xa5, 0xa6, 0xa7,
534 0xa8, 0xa9, 0xaa, 0xb2, 0xb3, 0xb4, 0xb5, 0xb6,
535 0xb7, 0xb8, 0xb9, 0xba, 0xc2, 0xc3, 0xc4, 0xc5,
536 0xc6, 0xc7, 0xc8, 0xc9, 0xca, 0xd2, 0xd3, 0xd4,
537 0xd5, 0xd6, 0xd7, 0xd8, 0xd9, 0xda, 0xe1, 0xe2,
538 0xe3, 0xe4, 0xe5, 0xe6, 0xe7, 0xe8, 0xe9, 0xea,
539 0xf1, 0xf2, 0xf3, 0xf4, 0xf5, 0xf6, 0xf7, 0xf8,
543 static const UINT8 bits_ac_chrominance[17] =
544 { /* 0-base */ 0, 0, 2, 1, 2, 4, 4, 3, 4, 7, 5, 4, 4, 0, 1, 2, 0x77 };
545 static const UINT8 val_ac_chrominance[] =
546 { 0x00, 0x01, 0x02, 0x03, 0x11, 0x04, 0x05, 0x21,
547 0x31, 0x06, 0x12, 0x41, 0x51, 0x07, 0x61, 0x71,
548 0x13, 0x22, 0x32, 0x81, 0x08, 0x14, 0x42, 0x91,
549 0xa1, 0xb1, 0xc1, 0x09, 0x23, 0x33, 0x52, 0xf0,
550 0x15, 0x62, 0x72, 0xd1, 0x0a, 0x16, 0x24, 0x34,
551 0xe1, 0x25, 0xf1, 0x17, 0x18, 0x19, 0x1a, 0x26,
552 0x27, 0x28, 0x29, 0x2a, 0x35, 0x36, 0x37, 0x38,
553 0x39, 0x3a, 0x43, 0x44, 0x45, 0x46, 0x47, 0x48,
554 0x49, 0x4a, 0x53, 0x54, 0x55, 0x56, 0x57, 0x58,
555 0x59, 0x5a, 0x63, 0x64, 0x65, 0x66, 0x67, 0x68,
556 0x69, 0x6a, 0x73, 0x74, 0x75, 0x76, 0x77, 0x78,
557 0x79, 0x7a, 0x82, 0x83, 0x84, 0x85, 0x86, 0x87,
558 0x88, 0x89, 0x8a, 0x92, 0x93, 0x94, 0x95, 0x96,
559 0x97, 0x98, 0x99, 0x9a, 0xa2, 0xa3, 0xa4, 0xa5,
560 0xa6, 0xa7, 0xa8, 0xa9, 0xaa, 0xb2, 0xb3, 0xb4,
561 0xb5, 0xb6, 0xb7, 0xb8, 0xb9, 0xba, 0xc2, 0xc3,
562 0xc4, 0xc5, 0xc6, 0xc7, 0xc8, 0xc9, 0xca, 0xd2,
563 0xd3, 0xd4, 0xd5, 0xd6, 0xd7, 0xd8, 0xd9, 0xda,
564 0xe2, 0xe3, 0xe4, 0xe5, 0xe6, 0xe7, 0xe8, 0xe9,
565 0xea, 0xf2, 0xf3, 0xf4, 0xf5, 0xf6, 0xf7, 0xf8,
569 add_huff_table (dinfo, &dinfo->dc_huff_tbl_ptrs[0],
570 bits_dc_luminance, val_dc_luminance);
571 add_huff_table (dinfo, &dinfo->ac_huff_tbl_ptrs[0],
572 bits_ac_luminance, val_ac_luminance);
573 add_huff_table (dinfo, &dinfo->dc_huff_tbl_ptrs[1],
574 bits_dc_chrominance, val_dc_chrominance);
575 add_huff_table (dinfo, &dinfo->ac_huff_tbl_ptrs[1],
576 bits_ac_chrominance, val_ac_chrominance);
582 guarantee_huff_tables (j_decompress_ptr dinfo)
584 if ((dinfo->dc_huff_tbl_ptrs[0] == NULL) &&
585 (dinfo->dc_huff_tbl_ptrs[1] == NULL) &&
586 (dinfo->ac_huff_tbl_ptrs[0] == NULL) &&
587 (dinfo->ac_huff_tbl_ptrs[1] == NULL)) {
588 GST_DEBUG ("Generating standard Huffman tables for this frame.");
589 std_huff_tables (dinfo);
594 gst_jpeg_dec_set_format (GstVideoDecoder * dec, GstVideoCodecState * state)
596 GstJpegDec *jpeg = GST_JPEG_DEC (dec);
598 if (jpeg->input_state)
599 gst_video_codec_state_unref (jpeg->input_state);
600 jpeg->input_state = gst_video_codec_state_ref (state);
608 hresamplecpy1 (guint8 * dest, const guint8 * src, guint len)
612 for (i = 0; i < len; ++i) {
613 /* equivalent to: dest[i] = src[i << 1] */
622 gst_jpeg_dec_free_buffers (GstJpegDec * dec)
626 for (i = 0; i < 16; i++) {
627 g_free (dec->idr_y[i]);
628 g_free (dec->idr_u[i]);
629 g_free (dec->idr_v[i]);
630 dec->idr_y[i] = NULL;
631 dec->idr_u[i] = NULL;
632 dec->idr_v[i] = NULL;
635 dec->idr_width_allocated = 0;
638 static inline gboolean
639 gst_jpeg_dec_ensure_buffers (GstJpegDec * dec, guint maxrowbytes)
643 if (G_LIKELY (dec->idr_width_allocated == maxrowbytes))
646 /* FIXME: maybe just alloc one or three blocks altogether? */
647 for (i = 0; i < 16; i++) {
648 dec->idr_y[i] = g_try_realloc (dec->idr_y[i], maxrowbytes);
649 dec->idr_u[i] = g_try_realloc (dec->idr_u[i], maxrowbytes);
650 dec->idr_v[i] = g_try_realloc (dec->idr_v[i], maxrowbytes);
652 if (G_UNLIKELY (!dec->idr_y[i] || !dec->idr_u[i] || !dec->idr_v[i])) {
653 GST_WARNING_OBJECT (dec, "out of memory, i=%d, bytes=%u", i, maxrowbytes);
658 dec->idr_width_allocated = maxrowbytes;
659 GST_LOG_OBJECT (dec, "allocated temp memory, %u bytes/row", maxrowbytes);
664 gst_jpeg_dec_decode_grayscale (GstJpegDec * dec, GstVideoFrame * frame,
665 guint field, guint num_fields)
668 guchar **scanarray[1] = { rows };
673 gint pstride, rstride;
675 GST_DEBUG_OBJECT (dec, "indirect decoding of grayscale");
677 width = GST_VIDEO_FRAME_WIDTH (frame);
678 height = GST_VIDEO_FRAME_HEIGHT (frame) / num_fields;
680 if (G_UNLIKELY (!gst_jpeg_dec_ensure_buffers (dec, GST_ROUND_UP_32 (width))))
683 base[0] = GST_VIDEO_FRAME_COMP_DATA (frame, 0);
685 base[0] += GST_VIDEO_FRAME_COMP_STRIDE (frame, 0);
688 pstride = GST_VIDEO_FRAME_COMP_PSTRIDE (frame, 0);
689 rstride = GST_VIDEO_FRAME_COMP_STRIDE (frame, 0) * num_fields;
691 memcpy (rows, dec->idr_y, 16 * sizeof (gpointer));
695 lines = jpeg_read_raw_data (&dec->cinfo, scanarray, DCTSIZE);
696 if (G_LIKELY (lines > 0)) {
697 for (j = 0; (j < DCTSIZE) && (i < height); j++, i++) {
701 for (k = 0; k < width; k++) {
702 base[0][p] = rows[j][k];
708 GST_INFO_OBJECT (dec, "jpeg_read_raw_data() returned 0");
714 gst_jpeg_dec_decode_rgb (GstJpegDec * dec, GstVideoFrame * frame,
715 guint field, guint num_fields)
717 guchar *r_rows[16], *g_rows[16], *b_rows[16];
718 guchar **scanarray[3] = { r_rows, g_rows, b_rows };
722 guint pstride, rstride;
725 GST_DEBUG_OBJECT (dec, "indirect decoding of RGB");
727 width = GST_VIDEO_FRAME_WIDTH (frame);
728 height = GST_VIDEO_FRAME_HEIGHT (frame) / num_fields;
730 if (G_UNLIKELY (!gst_jpeg_dec_ensure_buffers (dec, GST_ROUND_UP_32 (width))))
733 for (i = 0; i < 3; i++) {
734 base[i] = GST_VIDEO_FRAME_COMP_DATA (frame, i);
736 base[i] += GST_VIDEO_FRAME_COMP_STRIDE (frame, i);
739 pstride = GST_VIDEO_FRAME_COMP_PSTRIDE (frame, 0);
740 rstride = GST_VIDEO_FRAME_COMP_STRIDE (frame, 0) * num_fields;
742 memcpy (r_rows, dec->idr_y, 16 * sizeof (gpointer));
743 memcpy (g_rows, dec->idr_u, 16 * sizeof (gpointer));
744 memcpy (b_rows, dec->idr_v, 16 * sizeof (gpointer));
748 lines = jpeg_read_raw_data (&dec->cinfo, scanarray, DCTSIZE);
749 if (G_LIKELY (lines > 0)) {
750 for (j = 0; (j < DCTSIZE) && (i < height); j++, i++) {
754 for (k = 0; k < width; k++) {
755 base[0][p] = r_rows[j][k];
756 base[1][p] = g_rows[j][k];
757 base[2][p] = b_rows[j][k];
765 GST_INFO_OBJECT (dec, "jpeg_read_raw_data() returned 0");
771 gst_jpeg_dec_decode_indirect (GstJpegDec * dec, GstVideoFrame * frame, gint r_v,
772 gint r_h, gint comp, guint field, guint num_fields)
774 guchar *y_rows[16], *u_rows[16], *v_rows[16];
775 guchar **scanarray[3] = { y_rows, u_rows, v_rows };
778 guchar *base[3], *last[3];
779 gint rowsize[3], stride[3];
782 GST_DEBUG_OBJECT (dec,
783 "unadvantageous width or r_h, taking slow route involving memcpy");
785 width = GST_VIDEO_FRAME_WIDTH (frame);
786 height = GST_VIDEO_FRAME_HEIGHT (frame);
788 if (G_UNLIKELY (!gst_jpeg_dec_ensure_buffers (dec, GST_ROUND_UP_32 (width))))
791 for (i = 0; i < 3; i++) {
792 base[i] = GST_VIDEO_FRAME_COMP_DATA (frame, i);
793 stride[i] = GST_VIDEO_FRAME_COMP_STRIDE (frame, i) * num_fields;
794 rowsize[i] = GST_VIDEO_FRAME_COMP_STRIDE (frame, i);
795 /* make sure we don't make jpeglib write beyond our buffer,
796 * which might happen if (height % (r_v*DCTSIZE)) != 0 */
797 last[i] = base[i] + (GST_VIDEO_FRAME_COMP_STRIDE (frame, i) *
798 (GST_VIDEO_FRAME_COMP_HEIGHT (frame, i) - 1));
801 base[i] += GST_VIDEO_FRAME_COMP_STRIDE (frame, i);
805 memcpy (y_rows, dec->idr_y, 16 * sizeof (gpointer));
806 memcpy (u_rows, dec->idr_u, 16 * sizeof (gpointer));
807 memcpy (v_rows, dec->idr_v, 16 * sizeof (gpointer));
809 /* fill chroma components for grayscale */
811 GST_DEBUG_OBJECT (dec, "grayscale, filling chroma");
812 for (i = 0; i < 16; i++) {
813 memset (u_rows[i], GST_ROUND_UP_32 (width), 0x80);
814 memset (v_rows[i], GST_ROUND_UP_32 (width), 0x80);
818 for (i = 0; i < height; i += r_v * DCTSIZE) {
819 lines = jpeg_read_raw_data (&dec->cinfo, scanarray, r_v * DCTSIZE);
820 if (G_LIKELY (lines > 0)) {
821 for (j = 0, k = 0; j < (r_v * DCTSIZE); j += r_v, k++) {
822 if (G_LIKELY (base[0] <= last[0])) {
823 memcpy (base[0], y_rows[j], rowsize[0]);
824 base[0] += stride[0];
827 if (G_LIKELY (base[0] <= last[0])) {
828 memcpy (base[0], y_rows[j + 1], rowsize[0]);
829 base[0] += stride[0];
832 if (G_LIKELY (base[1] <= last[1] && base[2] <= last[2])) {
834 memcpy (base[1], u_rows[k], rowsize[1]);
835 memcpy (base[2], v_rows[k], rowsize[2]);
836 } else if (r_h == 1) {
837 hresamplecpy1 (base[1], u_rows[k], rowsize[1]);
838 hresamplecpy1 (base[2], v_rows[k], rowsize[2]);
840 /* FIXME: implement (at least we avoid crashing by doing nothing) */
844 if (r_v == 2 || (k & 1) != 0) {
845 base[1] += stride[1];
846 base[2] += stride[2];
850 GST_INFO_OBJECT (dec, "jpeg_read_raw_data() returned 0");
856 gst_jpeg_dec_decode_direct (GstJpegDec * dec, GstVideoFrame * frame,
857 guint field, guint num_fields)
859 guchar **line[3]; /* the jpeg line buffer */
860 guchar *y[4 * DCTSIZE] = { NULL, }; /* alloc enough for the lines */
861 guchar *u[4 * DCTSIZE] = { NULL, }; /* r_v will be <4 */
862 guchar *v[4 * DCTSIZE] = { NULL, };
864 gint lines, v_samp[3];
865 guchar *base[3], *last[3];
873 v_samp[0] = dec->cinfo.comp_info[0].v_samp_factor;
874 v_samp[1] = dec->cinfo.comp_info[1].v_samp_factor;
875 v_samp[2] = dec->cinfo.comp_info[2].v_samp_factor;
877 if (G_UNLIKELY (v_samp[0] > 2 || v_samp[1] > 2 || v_samp[2] > 2))
878 goto format_not_supported;
880 height = GST_VIDEO_FRAME_HEIGHT (frame);
882 for (i = 0; i < 3; i++) {
883 base[i] = GST_VIDEO_FRAME_COMP_DATA (frame, i);
884 stride[i] = GST_VIDEO_FRAME_COMP_STRIDE (frame, i) * num_fields;
885 /* make sure we don't make jpeglib write beyond our buffer,
886 * which might happen if (height % (r_v*DCTSIZE)) != 0 */
887 last[i] = base[i] + (GST_VIDEO_FRAME_COMP_STRIDE (frame, i) *
888 (GST_VIDEO_FRAME_COMP_HEIGHT (frame, i) - 1));
891 base[i] += GST_VIDEO_FRAME_COMP_STRIDE (frame, i);
895 /* let jpeglib decode directly into our final buffer */
896 GST_DEBUG_OBJECT (dec, "decoding directly into output buffer");
898 for (i = 0; i < height; i += v_samp[0] * DCTSIZE) {
899 for (j = 0; j < (v_samp[0] * DCTSIZE); ++j) {
901 line[0][j] = base[0] + (i + j) * stride[0];
902 if (G_UNLIKELY (line[0][j] > last[0]))
903 line[0][j] = last[0];
905 if (v_samp[1] == v_samp[0]) {
906 line[1][j] = base[1] + ((i + j) / 2) * stride[1];
907 } else if (j < (v_samp[1] * DCTSIZE)) {
908 line[1][j] = base[1] + ((i / 2) + j) * stride[1];
910 if (G_UNLIKELY (line[1][j] > last[1]))
911 line[1][j] = last[1];
913 if (v_samp[2] == v_samp[0]) {
914 line[2][j] = base[2] + ((i + j) / 2) * stride[2];
915 } else if (j < (v_samp[2] * DCTSIZE)) {
916 line[2][j] = base[2] + ((i / 2) + j) * stride[2];
918 if (G_UNLIKELY (line[2][j] > last[2]))
919 line[2][j] = last[2];
922 lines = jpeg_read_raw_data (&dec->cinfo, line, v_samp[0] * DCTSIZE);
923 if (G_UNLIKELY (!lines)) {
924 GST_INFO_OBJECT (dec, "jpeg_read_raw_data() returned 0");
929 format_not_supported:
931 gboolean ret = GST_FLOW_OK;
933 GST_VIDEO_DECODER_ERROR (dec, 1, STREAM, DECODE,
934 (_("Failed to decode JPEG image")),
935 ("Unsupported subsampling schema: v_samp factors: %u %u %u", v_samp[0],
936 v_samp[1], v_samp[2]), ret);
943 gst_jpeg_dec_negotiate (GstJpegDec * dec, gint width, gint height, gint clrspc,
946 GstVideoCodecState *outstate;
948 GstVideoFormat format;
952 format = GST_VIDEO_FORMAT_RGB;
955 format = GST_VIDEO_FORMAT_GRAY8;
958 format = GST_VIDEO_FORMAT_I420;
962 /* Compare to currently configured output state */
963 outstate = gst_video_decoder_get_output_state (GST_VIDEO_DECODER (dec));
965 info = &outstate->info;
967 if (width == GST_VIDEO_INFO_WIDTH (info) &&
968 height == GST_VIDEO_INFO_HEIGHT (info) &&
969 format == GST_VIDEO_INFO_FORMAT (info)) {
970 gst_video_codec_state_unref (outstate);
973 gst_video_codec_state_unref (outstate);
977 gst_video_decoder_set_output_state (GST_VIDEO_DECODER (dec), format,
978 width, height, dec->input_state);
985 outstate->info.colorimetry.range = GST_VIDEO_COLOR_RANGE_0_255;
986 outstate->info.colorimetry.matrix = GST_VIDEO_COLOR_MATRIX_BT601;
987 outstate->info.colorimetry.transfer = GST_VIDEO_TRANSFER_UNKNOWN;
988 outstate->info.colorimetry.primaries = GST_VIDEO_COLOR_PRIMARIES_UNKNOWN;
993 outstate->info.interlace_mode = GST_VIDEO_INTERLACE_MODE_INTERLEAVED;
994 GST_VIDEO_INFO_FIELD_ORDER (&outstate->info) =
995 GST_VIDEO_FIELD_ORDER_TOP_FIELD_FIRST;
998 gst_video_codec_state_unref (outstate);
1000 gst_video_decoder_negotiate (GST_VIDEO_DECODER (dec));
1002 GST_DEBUG_OBJECT (dec, "max_v_samp_factor=%d", dec->cinfo.max_v_samp_factor);
1003 GST_DEBUG_OBJECT (dec, "max_h_samp_factor=%d", dec->cinfo.max_h_samp_factor);
1006 static GstFlowReturn
1007 gst_jpeg_dec_prepare_decode (GstJpegDec * dec)
1009 G_GNUC_UNUSED GstFlowReturn ret;
1010 guint r_h, r_v, hdr_ok;
1013 hdr_ok = jpeg_read_header (&dec->cinfo, TRUE);
1014 if (G_UNLIKELY (hdr_ok != JPEG_HEADER_OK)) {
1015 GST_WARNING_OBJECT (dec, "reading the header failed, %d", hdr_ok);
1018 GST_LOG_OBJECT (dec, "num_components=%d", dec->cinfo.num_components);
1019 GST_LOG_OBJECT (dec, "jpeg_color_space=%d", dec->cinfo.jpeg_color_space);
1021 if (!dec->cinfo.num_components || !dec->cinfo.comp_info)
1022 goto components_not_supported;
1024 r_h = dec->cinfo.comp_info[0].h_samp_factor;
1025 r_v = dec->cinfo.comp_info[0].v_samp_factor;
1027 GST_LOG_OBJECT (dec, "r_h = %d, r_v = %d", r_h, r_v);
1029 if (dec->cinfo.num_components > 3)
1030 goto components_not_supported;
1032 /* verify color space expectation to avoid going *boom* or bogus output */
1033 if (dec->cinfo.jpeg_color_space != JCS_YCbCr &&
1034 dec->cinfo.jpeg_color_space != JCS_GRAYSCALE &&
1035 dec->cinfo.jpeg_color_space != JCS_RGB)
1036 goto unsupported_colorspace;
1038 #ifndef GST_DISABLE_GST_DEBUG
1042 for (i = 0; i < dec->cinfo.num_components; ++i) {
1043 GST_LOG_OBJECT (dec, "[%d] h_samp_factor=%d, v_samp_factor=%d, cid=%d",
1044 i, dec->cinfo.comp_info[i].h_samp_factor,
1045 dec->cinfo.comp_info[i].v_samp_factor,
1046 dec->cinfo.comp_info[i].component_id);
1051 /* prepare for raw output */
1052 dec->cinfo.do_fancy_upsampling = FALSE;
1053 dec->cinfo.do_block_smoothing = FALSE;
1054 dec->cinfo.out_color_space = dec->cinfo.jpeg_color_space;
1055 dec->cinfo.dct_method = dec->idct_method;
1056 dec->cinfo.raw_data_out = TRUE;
1058 GST_LOG_OBJECT (dec, "starting decompress");
1059 guarantee_huff_tables (&dec->cinfo);
1060 if (!jpeg_start_decompress (&dec->cinfo)) {
1061 GST_WARNING_OBJECT (dec, "failed to start decompression cycle");
1064 /* sanity checks to get safe and reasonable output */
1065 switch (dec->cinfo.jpeg_color_space) {
1067 if (dec->cinfo.num_components != 1)
1068 goto invalid_yuvrgbgrayscale;
1071 if (dec->cinfo.num_components != 3 || dec->cinfo.max_v_samp_factor > 1 ||
1072 dec->cinfo.max_h_samp_factor > 1)
1073 goto invalid_yuvrgbgrayscale;
1076 if (dec->cinfo.num_components != 3 ||
1077 r_v > 2 || r_v < dec->cinfo.comp_info[0].v_samp_factor ||
1078 r_v < dec->cinfo.comp_info[1].v_samp_factor ||
1079 r_h < dec->cinfo.comp_info[0].h_samp_factor ||
1080 r_h < dec->cinfo.comp_info[1].h_samp_factor)
1081 goto invalid_yuvrgbgrayscale;
1084 g_assert_not_reached ();
1088 if (G_UNLIKELY (dec->cinfo.output_width < MIN_WIDTH ||
1089 dec->cinfo.output_width > MAX_WIDTH ||
1090 dec->cinfo.output_height < MIN_HEIGHT ||
1091 dec->cinfo.output_height > MAX_HEIGHT))
1099 ret = GST_FLOW_ERROR;
1100 GST_VIDEO_DECODER_ERROR (dec, 1, STREAM, DECODE,
1101 (_("Failed to decode JPEG image")),
1102 ("Picture is too small or too big (%ux%u)", dec->cinfo.output_width,
1103 dec->cinfo.output_height), ret);
1104 return GST_FLOW_ERROR;
1106 components_not_supported:
1108 ret = GST_FLOW_ERROR;
1109 GST_VIDEO_DECODER_ERROR (dec, 1, STREAM, DECODE,
1110 (_("Failed to decode JPEG image")),
1111 ("number of components not supported: %d (max 3)",
1112 dec->cinfo.num_components), ret);
1113 jpeg_abort_decompress (&dec->cinfo);
1114 return GST_FLOW_ERROR;
1116 unsupported_colorspace:
1118 ret = GST_FLOW_ERROR;
1119 GST_VIDEO_DECODER_ERROR (dec, 1, STREAM, DECODE,
1120 (_("Failed to decode JPEG image")),
1121 ("Picture has unknown or unsupported colourspace"), ret);
1122 jpeg_abort_decompress (&dec->cinfo);
1123 return GST_FLOW_ERROR;
1125 invalid_yuvrgbgrayscale:
1127 ret = GST_FLOW_ERROR;
1128 GST_VIDEO_DECODER_ERROR (dec, 1, STREAM, DECODE,
1129 (_("Failed to decode JPEG image")),
1130 ("Picture is corrupt or unhandled YUV/RGB/grayscale layout"), ret);
1131 jpeg_abort_decompress (&dec->cinfo);
1132 return GST_FLOW_ERROR;
1136 static GstFlowReturn
1137 gst_jpeg_dec_decode (GstJpegDec * dec, GstVideoFrame * vframe, guint width,
1138 guint height, guint field, guint num_fields)
1140 GstFlowReturn ret = GST_FLOW_OK;
1142 if (dec->cinfo.jpeg_color_space == JCS_RGB) {
1143 gst_jpeg_dec_decode_rgb (dec, vframe, field, num_fields);
1144 } else if (dec->cinfo.jpeg_color_space == JCS_GRAYSCALE) {
1145 gst_jpeg_dec_decode_grayscale (dec, vframe, field, num_fields);
1147 GST_LOG_OBJECT (dec, "decompressing (required scanline buffer height = %u)",
1148 dec->cinfo.rec_outbuf_height);
1150 /* For some widths jpeglib requires more horizontal padding than I420
1151 * provides. In those cases we need to decode into separate buffers and then
1152 * copy over the data into our final picture buffer, otherwise jpeglib might
1153 * write over the end of a line into the beginning of the next line,
1154 * resulting in blocky artifacts on the left side of the picture. */
1155 if (G_UNLIKELY (width % (dec->cinfo.max_h_samp_factor * DCTSIZE) != 0
1156 || dec->cinfo.comp_info[0].h_samp_factor != 2
1157 || dec->cinfo.comp_info[1].h_samp_factor != 1
1158 || dec->cinfo.comp_info[2].h_samp_factor != 1)) {
1159 GST_CAT_LOG_OBJECT (GST_CAT_PERFORMANCE, dec,
1160 "indirect decoding using extra buffer copy");
1161 gst_jpeg_dec_decode_indirect (dec, vframe,
1162 dec->cinfo.comp_info[0].v_samp_factor,
1163 dec->cinfo.comp_info[0].h_samp_factor, dec->cinfo.num_components,
1166 ret = gst_jpeg_dec_decode_direct (dec, vframe, field, num_fields);
1170 GST_LOG_OBJECT (dec, "decompressing finished: %s", gst_flow_get_name (ret));
1172 if (G_UNLIKELY (ret != GST_FLOW_OK)) {
1173 jpeg_abort_decompress (&dec->cinfo);
1175 jpeg_finish_decompress (&dec->cinfo);
1181 static GstFlowReturn
1182 gst_jpeg_dec_handle_frame (GstVideoDecoder * bdec, GstVideoCodecFrame * frame)
1184 GstFlowReturn ret = GST_FLOW_OK;
1185 GstJpegDec *dec = (GstJpegDec *) bdec;
1186 GstVideoFrame vframe;
1187 gint num_fields; /* number of fields (1 or 2) */
1188 gint output_height; /* height of output image (one or two fields) */
1189 gint height; /* height of current frame (whole image or a field) */
1192 gboolean need_unmap = TRUE;
1193 GstVideoCodecState *state = NULL;
1194 gboolean release_frame = TRUE;
1196 dec->current_frame = frame;
1197 gst_buffer_map (frame->input_buffer, &dec->current_frame_map, GST_MAP_READ);
1199 dec->cinfo.src->next_input_byte = dec->current_frame_map.data;
1200 dec->cinfo.src->bytes_in_buffer = dec->current_frame_map.size;
1202 if (setjmp (dec->jerr.setjmp_buffer)) {
1203 code = dec->jerr.pub.msg_code;
1205 if (code == JERR_INPUT_EOF) {
1206 GST_DEBUG ("jpeg input EOF error, we probably need more data");
1207 goto need_more_data;
1212 /* read header and check values */
1213 ret = gst_jpeg_dec_prepare_decode (dec);
1214 if (G_UNLIKELY (ret == GST_FLOW_ERROR))
1217 width = dec->cinfo.output_width;
1218 height = dec->cinfo.output_height;
1220 /* is it interlaced MJPEG? (we really don't want to scan the jpeg data
1221 * to see if there are two SOF markers in the packet to detect this) */
1222 if (gst_video_decoder_get_packetized (bdec) &&
1223 dec->input_state->info.height <= (height * 2)
1224 && dec->input_state->info.width == width) {
1225 GST_LOG_OBJECT (dec,
1226 "looks like an interlaced image: "
1227 "input width/height of %dx%d with JPEG frame width/height of %dx%d",
1228 dec->input_state->info.height, dec->input_state->info.width, width,
1230 output_height = dec->input_state->info.height;
1231 height = dec->input_state->info.height / 2;
1233 GST_LOG_OBJECT (dec, "field height=%d", height);
1235 output_height = height;
1239 gst_jpeg_dec_negotiate (dec, width, output_height,
1240 dec->cinfo.jpeg_color_space, num_fields == 2);
1242 state = gst_video_decoder_get_output_state (bdec);
1243 ret = gst_video_decoder_allocate_output_frame (bdec, frame);
1244 if (G_UNLIKELY (ret != GST_FLOW_OK))
1247 if (!gst_video_frame_map (&vframe, &state->info, frame->output_buffer,
1251 if (setjmp (dec->jerr.setjmp_buffer)) {
1252 code = dec->jerr.pub.msg_code;
1253 gst_video_frame_unmap (&vframe);
1257 GST_LOG_OBJECT (dec, "width %d, height %d, fields %d", width, output_height,
1260 ret = gst_jpeg_dec_decode (dec, &vframe, width, height, 1, num_fields);
1261 if (G_UNLIKELY (ret != GST_FLOW_OK)) {
1262 gst_video_frame_unmap (&vframe);
1266 if (setjmp (dec->jerr.setjmp_buffer)) {
1267 code = dec->jerr.pub.msg_code;
1268 gst_video_frame_unmap (&vframe);
1272 /* decode second field if there is one */
1273 if (num_fields == 2) {
1274 GstVideoFormat field2_format;
1276 /* skip any chunk or padding bytes before the next SOI marker; both fields
1277 * are in one single buffer here, so direct access should be fine here */
1278 while (dec->jsrc.pub.bytes_in_buffer > 2 &&
1279 GST_READ_UINT16_BE (dec->jsrc.pub.next_input_byte) != 0xffd8) {
1280 --dec->jsrc.pub.bytes_in_buffer;
1281 ++dec->jsrc.pub.next_input_byte;
1284 if (gst_jpeg_dec_prepare_decode (dec) != GST_FLOW_OK) {
1285 GST_WARNING_OBJECT (dec, "problem reading jpeg header of 2nd field");
1286 /* FIXME: post a warning message here? */
1287 gst_video_frame_unmap (&vframe);
1291 /* check if format has changed for the second field */
1292 switch (dec->cinfo.jpeg_color_space) {
1294 field2_format = GST_VIDEO_FORMAT_RGB;
1297 field2_format = GST_VIDEO_FORMAT_GRAY8;
1300 field2_format = GST_VIDEO_FORMAT_I420;
1304 if (dec->cinfo.output_width != GST_VIDEO_INFO_WIDTH (&state->info) ||
1305 dec->cinfo.output_height * 2 > GST_VIDEO_INFO_HEIGHT (&state->info) ||
1306 field2_format != GST_VIDEO_INFO_FORMAT (&state->info)) {
1307 GST_WARNING_OBJECT (dec, "second field has different format than first");
1308 gst_video_frame_unmap (&vframe);
1312 ret = gst_jpeg_dec_decode (dec, &vframe, width, height, 2, 2);
1313 if (G_UNLIKELY (ret != GST_FLOW_OK)) {
1314 gst_video_frame_unmap (&vframe);
1318 gst_video_frame_unmap (&vframe);
1320 gst_buffer_unmap (frame->input_buffer, &dec->current_frame_map);
1321 ret = gst_video_decoder_finish_frame (bdec, frame);
1322 release_frame = FALSE;
1330 gst_buffer_unmap (frame->input_buffer, &dec->current_frame_map);
1333 gst_video_decoder_release_frame (bdec, frame);
1336 gst_video_codec_state_unref (state);
1343 GST_LOG_OBJECT (dec, "we need more data");
1350 gchar err_msg[JMSG_LENGTH_MAX];
1352 dec->jerr.pub.format_message ((j_common_ptr) (&dec->cinfo), err_msg);
1354 GST_VIDEO_DECODER_ERROR (dec, 1, STREAM, DECODE,
1355 (_("Failed to decode JPEG image")), ("Decode error #%u: %s", code,
1358 gst_buffer_unmap (frame->input_buffer, &dec->current_frame_map);
1359 gst_video_decoder_drop_frame (bdec, frame);
1360 release_frame = FALSE;
1362 jpeg_abort_decompress (&dec->cinfo);
1368 /* already posted an error message */
1373 const gchar *reason;
1375 reason = gst_flow_get_name (ret);
1377 GST_DEBUG_OBJECT (dec, "failed to alloc buffer, reason %s", reason);
1378 /* Reset for next time */
1379 jpeg_abort_decompress (&dec->cinfo);
1380 if (ret != GST_FLOW_EOS && ret != GST_FLOW_FLUSHING &&
1381 ret != GST_FLOW_NOT_LINKED) {
1382 GST_VIDEO_DECODER_ERROR (dec, 1, STREAM, DECODE,
1383 (_("Failed to decode JPEG image")),
1384 ("Buffer allocation failed, reason: %s", reason), ret);
1385 jpeg_abort_decompress (&dec->cinfo);
1392 gst_jpeg_dec_decide_allocation (GstVideoDecoder * bdec, GstQuery * query)
1394 GstBufferPool *pool = NULL;
1395 GstStructure *config;
1397 if (!GST_VIDEO_DECODER_CLASS (parent_class)->decide_allocation (bdec, query))
1400 if (gst_query_get_n_allocation_pools (query) > 0)
1401 gst_query_parse_nth_allocation_pool (query, 0, &pool, NULL, NULL, NULL);
1406 config = gst_buffer_pool_get_config (pool);
1407 if (gst_query_find_allocation_meta (query, GST_VIDEO_META_API_TYPE, NULL)) {
1408 gst_buffer_pool_config_add_option (config,
1409 GST_BUFFER_POOL_OPTION_VIDEO_META);
1411 gst_buffer_pool_set_config (pool, config);
1412 gst_object_unref (pool);
1418 gst_jpeg_dec_sink_event (GstVideoDecoder * bdec, GstEvent * event)
1420 const GstSegment *segment;
1422 if (GST_EVENT_TYPE (event) != GST_EVENT_SEGMENT)
1425 gst_event_parse_segment (event, &segment);
1427 if (segment->format == GST_FORMAT_TIME)
1428 gst_video_decoder_set_packetized (bdec, TRUE);
1430 gst_video_decoder_set_packetized (bdec, FALSE);
1433 return GST_VIDEO_DECODER_CLASS (parent_class)->sink_event (bdec, event);
1437 gst_jpeg_dec_start (GstVideoDecoder * bdec)
1439 GstJpegDec *dec = (GstJpegDec *) bdec;
1441 dec->saw_header = FALSE;
1442 dec->parse_entropy_len = 0;
1443 dec->parse_resync = FALSE;
1445 gst_video_decoder_set_packetized (bdec, FALSE);
1451 gst_jpeg_dec_flush (GstVideoDecoder * bdec)
1453 GstJpegDec *dec = (GstJpegDec *) bdec;
1455 jpeg_abort_decompress (&dec->cinfo);
1456 dec->parse_entropy_len = 0;
1457 dec->parse_resync = FALSE;
1458 dec->saw_header = FALSE;
1464 gst_jpeg_dec_set_property (GObject * object, guint prop_id,
1465 const GValue * value, GParamSpec * pspec)
1469 dec = GST_JPEG_DEC (object);
1472 case PROP_IDCT_METHOD:
1473 dec->idct_method = g_value_get_enum (value);
1475 #ifndef GST_REMOVE_DEPRECATED
1476 case PROP_MAX_ERRORS:
1477 g_atomic_int_set (&dec->max_errors, g_value_get_int (value));
1481 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
1487 gst_jpeg_dec_get_property (GObject * object, guint prop_id, GValue * value,
1492 dec = GST_JPEG_DEC (object);
1495 case PROP_IDCT_METHOD:
1496 g_value_set_enum (value, dec->idct_method);
1498 #ifndef GST_REMOVE_DEPRECATED
1499 case PROP_MAX_ERRORS:
1500 g_value_set_int (value, g_atomic_int_get (&dec->max_errors));
1504 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
1510 gst_jpeg_dec_stop (GstVideoDecoder * bdec)
1512 GstJpegDec *dec = (GstJpegDec *) bdec;
1514 gst_jpeg_dec_free_buffers (dec);