2 * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
3 * Copyright (C) <2009> Tim-Philipp Müller <tim centricular net>
4 * Copyright (C) 2012 Collabora Ltd.
5 * Author : Edward Hervey <edward@collabora.com>
7 * This library is free software; you can redistribute it and/or
8 * modify it under the terms of the GNU Library General Public
9 * License as published by the Free Software Foundation; either
10 * version 2 of the License, or (at your option) any later version.
12 * This library is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 * Library General Public License for more details.
17 * You should have received a copy of the GNU Library General Public
18 * License along with this library; if not, write to the
19 * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
20 * Boston, MA 02110-1301, USA.
24 * SECTION:element-jpegdec
26 * Decodes jpeg images.
29 * <title>Example launch line</title>
31 * gst-launch-1.0 -v filesrc location=mjpeg.avi ! avidemux ! queue ! jpegdec ! videoconvert ! videoscale ! autovideosink
32 * ]| The above pipeline decode the mjpeg stream and renders it to the screen.
41 #include "gstjpegdec.h"
43 #include <gst/video/video.h>
44 #include <gst/video/gstvideometa.h>
45 #include <gst/video/gstvideopool.h>
46 #include "gst/gst-i18n-plugin.h"
50 #define MAX_WIDTH 65535
52 #define MAX_HEIGHT 65535
54 #define CINFO_GET_JPEGDEC(cinfo_ptr) \
55 (((struct GstJpegDecSourceMgr*)((cinfo_ptr)->src))->dec)
57 #define JPEG_DEFAULT_IDCT_METHOD JDCT_FASTEST
58 #define JPEG_DEFAULT_MAX_ERRORS 0
68 static GstStaticPadTemplate gst_jpeg_dec_src_pad_template =
69 GST_STATIC_PAD_TEMPLATE ("src",
72 GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE
73 ("{ I420, RGB, BGR, RGBx, xRGB, BGRx, xBGR, GRAY8 }"))
77 /* FIXME: sof-marker is for IJG libjpeg 8, should be different for 6.2 */
78 /* FIXME: add back "sof-marker = (int) { 0, 1, 2, 5, 6, 7, 9, 10, 13, 14 }"
79 * once we have a parser and/or demuxer set caps properly */
80 static GstStaticPadTemplate gst_jpeg_dec_sink_pad_template =
81 GST_STATIC_PAD_TEMPLATE ("sink",
84 GST_STATIC_CAPS ("image/jpeg")
87 GST_DEBUG_CATEGORY_STATIC (jpeg_dec_debug);
88 #define GST_CAT_DEFAULT jpeg_dec_debug
89 GST_DEBUG_CATEGORY_STATIC (GST_CAT_PERFORMANCE);
91 static void gst_jpeg_dec_set_property (GObject * object, guint prop_id,
92 const GValue * value, GParamSpec * pspec);
93 static void gst_jpeg_dec_get_property (GObject * object, guint prop_id,
94 GValue * value, GParamSpec * pspec);
96 static gboolean gst_jpeg_dec_set_format (GstVideoDecoder * dec,
97 GstVideoCodecState * state);
98 static gboolean gst_jpeg_dec_start (GstVideoDecoder * bdec);
99 static gboolean gst_jpeg_dec_stop (GstVideoDecoder * bdec);
100 static gboolean gst_jpeg_dec_flush (GstVideoDecoder * bdec);
101 static GstFlowReturn gst_jpeg_dec_parse (GstVideoDecoder * bdec,
102 GstVideoCodecFrame * frame, GstAdapter * adapter, gboolean at_eos);
103 static GstFlowReturn gst_jpeg_dec_handle_frame (GstVideoDecoder * bdec,
104 GstVideoCodecFrame * frame);
105 static gboolean gst_jpeg_dec_decide_allocation (GstVideoDecoder * bdec,
107 static gboolean gst_jpeg_dec_sink_event (GstVideoDecoder * bdec,
110 #define gst_jpeg_dec_parent_class parent_class
111 G_DEFINE_TYPE (GstJpegDec, gst_jpeg_dec, GST_TYPE_VIDEO_DECODER);
114 gst_jpeg_dec_finalize (GObject * object)
116 GstJpegDec *dec = GST_JPEG_DEC (object);
118 jpeg_destroy_decompress (&dec->cinfo);
119 if (dec->input_state)
120 gst_video_codec_state_unref (dec->input_state);
122 G_OBJECT_CLASS (parent_class)->finalize (object);
126 gst_jpeg_dec_class_init (GstJpegDecClass * klass)
128 GObjectClass *gobject_class;
129 GstElementClass *element_class;
130 GstVideoDecoderClass *vdec_class;
132 gobject_class = (GObjectClass *) klass;
133 element_class = (GstElementClass *) klass;
134 vdec_class = (GstVideoDecoderClass *) klass;
136 parent_class = g_type_class_peek_parent (klass);
138 gobject_class->finalize = gst_jpeg_dec_finalize;
139 gobject_class->set_property = gst_jpeg_dec_set_property;
140 gobject_class->get_property = gst_jpeg_dec_get_property;
142 g_object_class_install_property (gobject_class, PROP_IDCT_METHOD,
143 g_param_spec_enum ("idct-method", "IDCT Method",
144 "The IDCT algorithm to use", GST_TYPE_IDCT_METHOD,
145 JPEG_DEFAULT_IDCT_METHOD,
146 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
149 * GstJpegDec:max-errors:
151 * Error out after receiving N consecutive decoding errors
152 * (-1 = never error out, 0 = automatic, 1 = fail on first error, etc.)
154 * Deprecated: 1.3.1: Property wasn't used internally
156 #ifndef GST_REMOVE_DEPRECATED
157 g_object_class_install_property (gobject_class, PROP_MAX_ERRORS,
158 g_param_spec_int ("max-errors", "Maximum Consecutive Decoding Errors",
159 "(Deprecated) Error out after receiving N consecutive decoding errors"
160 " (-1 = never fail, 0 = automatic, 1 = fail on first error)",
161 -1, G_MAXINT, JPEG_DEFAULT_MAX_ERRORS,
162 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | G_PARAM_DEPRECATED));
165 gst_element_class_add_static_pad_template (element_class,
166 &gst_jpeg_dec_src_pad_template);
167 gst_element_class_add_static_pad_template (element_class,
168 &gst_jpeg_dec_sink_pad_template);
169 gst_element_class_set_static_metadata (element_class, "JPEG image decoder",
170 "Codec/Decoder/Image", "Decode images from JPEG format",
171 "Wim Taymans <wim@fluendo.com>");
173 vdec_class->start = gst_jpeg_dec_start;
174 vdec_class->stop = gst_jpeg_dec_stop;
175 vdec_class->flush = gst_jpeg_dec_flush;
176 vdec_class->parse = gst_jpeg_dec_parse;
177 vdec_class->set_format = gst_jpeg_dec_set_format;
178 vdec_class->handle_frame = gst_jpeg_dec_handle_frame;
179 vdec_class->decide_allocation = gst_jpeg_dec_decide_allocation;
180 vdec_class->sink_event = gst_jpeg_dec_sink_event;
182 GST_DEBUG_CATEGORY_INIT (jpeg_dec_debug, "jpegdec", 0, "JPEG decoder");
183 GST_DEBUG_CATEGORY_GET (GST_CAT_PERFORMANCE, "GST_PERFORMANCE");
187 gst_jpeg_dec_fill_input_buffer (j_decompress_ptr cinfo)
189 /* We pass in full frame initially, if this get called, the frame is most likely
195 gst_jpeg_dec_init_source (j_decompress_ptr cinfo)
197 GST_LOG_OBJECT (CINFO_GET_JPEGDEC (cinfo), "init_source");
202 gst_jpeg_dec_skip_input_data (j_decompress_ptr cinfo, glong num_bytes)
204 GstJpegDec *dec = CINFO_GET_JPEGDEC (cinfo);
206 GST_DEBUG_OBJECT (dec, "skip %ld bytes", num_bytes);
208 if (num_bytes > 0 && cinfo->src->bytes_in_buffer >= num_bytes) {
209 cinfo->src->next_input_byte += (size_t) num_bytes;
210 cinfo->src->bytes_in_buffer -= (size_t) num_bytes;
215 gst_jpeg_dec_resync_to_restart (j_decompress_ptr cinfo, gint desired)
217 GST_LOG_OBJECT (CINFO_GET_JPEGDEC (cinfo), "resync_to_start");
222 gst_jpeg_dec_term_source (j_decompress_ptr cinfo)
224 GST_LOG_OBJECT (CINFO_GET_JPEGDEC (cinfo), "term_source");
229 gst_jpeg_dec_my_output_message (j_common_ptr cinfo)
231 return; /* do nothing */
235 gst_jpeg_dec_my_emit_message (j_common_ptr cinfo, int msg_level)
237 /* GST_LOG_OBJECT (CINFO_GET_JPEGDEC (&cinfo), "msg_level=%d", msg_level); */
242 gst_jpeg_dec_my_error_exit (j_common_ptr cinfo)
244 struct GstJpegDecErrorMgr *err_mgr = (struct GstJpegDecErrorMgr *) cinfo->err;
246 (*cinfo->err->output_message) (cinfo);
247 longjmp (err_mgr->setjmp_buffer, 1);
251 gst_jpeg_dec_init (GstJpegDec * dec)
253 GST_DEBUG ("initializing");
256 memset (&dec->cinfo, 0, sizeof (dec->cinfo));
257 memset (&dec->jerr, 0, sizeof (dec->jerr));
258 dec->cinfo.err = jpeg_std_error (&dec->jerr.pub);
259 dec->jerr.pub.output_message = gst_jpeg_dec_my_output_message;
260 dec->jerr.pub.emit_message = gst_jpeg_dec_my_emit_message;
261 dec->jerr.pub.error_exit = gst_jpeg_dec_my_error_exit;
263 jpeg_create_decompress (&dec->cinfo);
265 dec->cinfo.src = (struct jpeg_source_mgr *) &dec->jsrc;
266 dec->cinfo.src->init_source = gst_jpeg_dec_init_source;
267 dec->cinfo.src->fill_input_buffer = gst_jpeg_dec_fill_input_buffer;
268 dec->cinfo.src->skip_input_data = gst_jpeg_dec_skip_input_data;
269 dec->cinfo.src->resync_to_restart = gst_jpeg_dec_resync_to_restart;
270 dec->cinfo.src->term_source = gst_jpeg_dec_term_source;
273 /* init properties */
274 dec->idct_method = JPEG_DEFAULT_IDCT_METHOD;
275 dec->max_errors = JPEG_DEFAULT_MAX_ERRORS;
277 gst_video_decoder_set_use_default_pad_acceptcaps (GST_VIDEO_DECODER_CAST
279 GST_PAD_SET_ACCEPT_TEMPLATE (GST_VIDEO_DECODER_SINK_PAD (dec));
282 static inline gboolean
283 gst_jpeg_dec_parse_tag_has_entropy_segment (guint8 tag)
285 if (tag == 0xda || (tag >= 0xd0 && tag <= 0xd7))
291 gst_jpeg_dec_parse (GstVideoDecoder * bdec, GstVideoCodecFrame * frame,
292 GstAdapter * adapter, gboolean at_eos)
297 gint offset = 0, noffset;
298 GstJpegDec *dec = (GstJpegDec *) bdec;
300 GST_VIDEO_CODEC_FRAME_SET_SYNC_POINT (frame);
302 /* FIXME : The overhead of using scan_uint32 is massive */
304 size = gst_adapter_available (adapter);
305 GST_DEBUG ("Parsing jpeg image data (%u bytes)", size);
308 GST_DEBUG ("Flushing all data out");
311 /* If we have leftover data, throw it away */
312 if (!dec->saw_header)
314 goto have_full_frame;
320 if (!dec->saw_header) {
322 /* we expect at least 4 bytes, first of which start marker */
324 gst_adapter_masked_scan_uint32 (adapter, 0xffff0000, 0xffd80000, 0,
327 GST_DEBUG ("ret:%d", ret);
332 gst_adapter_flush (adapter, ret);
335 dec->saw_header = TRUE;
342 GST_DEBUG ("offset:%d, size:%d", offset, size);
345 gst_adapter_masked_scan_uint32_peek (adapter, 0x0000ff00, 0x0000ff00,
346 offset, size - offset, &value);
348 /* lost sync if 0xff marker not where expected */
349 if ((resync = (noffset != offset))) {
350 GST_DEBUG ("Lost sync at 0x%08x, resyncing", offset + 2);
352 /* may have marker, but could have been resyncng */
353 resync = resync || dec->parse_resync;
354 /* Skip over extra 0xff */
355 while ((noffset >= 0) && ((value & 0xff) == 0xff)) {
358 gst_adapter_masked_scan_uint32_peek (adapter, 0x0000ff00, 0x0000ff00,
359 noffset, size - noffset, &value);
361 /* enough bytes left for marker? (we need 0xNN after the 0xff) */
363 GST_DEBUG ("at end of input and no EOI marker found, need more data");
367 /* now lock on the marker we found */
369 value = value & 0xff;
371 GST_DEBUG ("0x%08x: EOI marker", offset + 2);
372 /* clear parse state */
373 dec->saw_header = FALSE;
374 dec->parse_resync = FALSE;
376 goto have_full_frame;
379 GST_DEBUG ("0x%08x: SOI marker before EOI marker", offset + 2);
381 /* clear parse state */
382 dec->saw_header = FALSE;
383 dec->parse_resync = FALSE;
385 goto have_full_frame;
389 if (value >= 0xd0 && value <= 0xd7)
392 /* peek tag and subsequent length */
393 if (offset + 2 + 4 > size)
396 gst_adapter_masked_scan_uint32_peek (adapter, 0x0, 0x0, offset + 2, 4,
398 frame_len = frame_len & 0xffff;
400 GST_DEBUG ("0x%08x: tag %02x, frame_len=%u", offset + 2, value, frame_len);
401 /* the frame length includes the 2 bytes for the length; here we want at
402 * least 2 more bytes at the end for an end marker */
403 if (offset + 2 + 2 + frame_len + 2 > size) {
407 if (gst_jpeg_dec_parse_tag_has_entropy_segment (value)) {
408 guint eseglen = dec->parse_entropy_len;
410 GST_DEBUG ("0x%08x: finding entropy segment length (eseglen:%d)",
411 offset + 2, eseglen);
412 if (size < offset + 2 + frame_len + eseglen)
414 noffset = offset + 2 + frame_len + dec->parse_entropy_len;
416 GST_DEBUG ("noffset:%d, size:%d, size - noffset:%d",
417 noffset, size, size - noffset);
418 noffset = gst_adapter_masked_scan_uint32_peek (adapter, 0x0000ff00,
419 0x0000ff00, noffset, size - noffset, &value);
422 dec->parse_entropy_len = size - offset - 4 - frame_len - 2;
425 if ((value & 0xff) != 0x00) {
426 eseglen = noffset - offset - frame_len - 2;
431 dec->parse_entropy_len = 0;
432 frame_len += eseglen;
433 GST_DEBUG ("entropy segment length=%u => frame_len=%u", eseglen,
437 /* check if we will still be in sync if we interpret
438 * this as a sync point and skip this frame */
439 noffset = offset + frame_len + 2;
440 noffset = gst_adapter_masked_scan_uint32 (adapter, 0x0000ff00, 0x0000ff00,
443 /* ignore and continue resyncing until we hit the end
444 * of our data or find a sync point that looks okay */
448 GST_DEBUG ("found sync at 0x%x", offset + 2);
451 /* Add current data to output buffer */
452 toadd += frame_len + 2;
453 offset += frame_len + 2;
458 gst_video_decoder_add_to_frame (bdec, toadd);
459 return GST_VIDEO_DECODER_FLOW_NEED_DATA;
463 gst_video_decoder_add_to_frame (bdec, toadd);
464 GST_VIDEO_CODEC_FRAME_SET_SYNC_POINT (frame);
465 return gst_video_decoder_have_frame (bdec);
468 gst_adapter_flush (adapter, size);
473 /* shamelessly ripped from jpegutils.c in mjpegtools */
475 add_huff_table (j_decompress_ptr dinfo,
476 JHUFF_TBL ** htblptr, const UINT8 * bits, const UINT8 * val)
477 /* Define a Huffman table */
481 if (*htblptr == NULL)
482 *htblptr = jpeg_alloc_huff_table ((j_common_ptr) dinfo);
486 /* Copy the number-of-symbols-of-each-code-length counts */
487 memcpy ((*htblptr)->bits, bits, sizeof ((*htblptr)->bits));
489 /* Validate the counts. We do this here mainly so we can copy the right
490 * number of symbols from the val[] array, without risking marching off
491 * the end of memory. jchuff.c will do a more thorough test later.
494 for (len = 1; len <= 16; len++)
495 nsymbols += bits[len];
496 if (nsymbols < 1 || nsymbols > 256)
497 g_error ("jpegutils.c: add_huff_table failed badly. ");
499 memcpy ((*htblptr)->huffval, val, nsymbols * sizeof (UINT8));
505 std_huff_tables (j_decompress_ptr dinfo)
506 /* Set up the standard Huffman tables (cf. JPEG standard section K.3) */
507 /* IMPORTANT: these are only valid for 8-bit data precision! */
509 static const UINT8 bits_dc_luminance[17] =
510 { /* 0-base */ 0, 0, 1, 5, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0 };
511 static const UINT8 val_dc_luminance[] =
512 { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11 };
514 static const UINT8 bits_dc_chrominance[17] =
515 { /* 0-base */ 0, 0, 3, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0 };
516 static const UINT8 val_dc_chrominance[] =
517 { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11 };
519 static const UINT8 bits_ac_luminance[17] =
520 { /* 0-base */ 0, 0, 2, 1, 3, 3, 2, 4, 3, 5, 5, 4, 4, 0, 0, 1, 0x7d };
521 static const UINT8 val_ac_luminance[] =
522 { 0x01, 0x02, 0x03, 0x00, 0x04, 0x11, 0x05, 0x12,
523 0x21, 0x31, 0x41, 0x06, 0x13, 0x51, 0x61, 0x07,
524 0x22, 0x71, 0x14, 0x32, 0x81, 0x91, 0xa1, 0x08,
525 0x23, 0x42, 0xb1, 0xc1, 0x15, 0x52, 0xd1, 0xf0,
526 0x24, 0x33, 0x62, 0x72, 0x82, 0x09, 0x0a, 0x16,
527 0x17, 0x18, 0x19, 0x1a, 0x25, 0x26, 0x27, 0x28,
528 0x29, 0x2a, 0x34, 0x35, 0x36, 0x37, 0x38, 0x39,
529 0x3a, 0x43, 0x44, 0x45, 0x46, 0x47, 0x48, 0x49,
530 0x4a, 0x53, 0x54, 0x55, 0x56, 0x57, 0x58, 0x59,
531 0x5a, 0x63, 0x64, 0x65, 0x66, 0x67, 0x68, 0x69,
532 0x6a, 0x73, 0x74, 0x75, 0x76, 0x77, 0x78, 0x79,
533 0x7a, 0x83, 0x84, 0x85, 0x86, 0x87, 0x88, 0x89,
534 0x8a, 0x92, 0x93, 0x94, 0x95, 0x96, 0x97, 0x98,
535 0x99, 0x9a, 0xa2, 0xa3, 0xa4, 0xa5, 0xa6, 0xa7,
536 0xa8, 0xa9, 0xaa, 0xb2, 0xb3, 0xb4, 0xb5, 0xb6,
537 0xb7, 0xb8, 0xb9, 0xba, 0xc2, 0xc3, 0xc4, 0xc5,
538 0xc6, 0xc7, 0xc8, 0xc9, 0xca, 0xd2, 0xd3, 0xd4,
539 0xd5, 0xd6, 0xd7, 0xd8, 0xd9, 0xda, 0xe1, 0xe2,
540 0xe3, 0xe4, 0xe5, 0xe6, 0xe7, 0xe8, 0xe9, 0xea,
541 0xf1, 0xf2, 0xf3, 0xf4, 0xf5, 0xf6, 0xf7, 0xf8,
545 static const UINT8 bits_ac_chrominance[17] =
546 { /* 0-base */ 0, 0, 2, 1, 2, 4, 4, 3, 4, 7, 5, 4, 4, 0, 1, 2, 0x77 };
547 static const UINT8 val_ac_chrominance[] =
548 { 0x00, 0x01, 0x02, 0x03, 0x11, 0x04, 0x05, 0x21,
549 0x31, 0x06, 0x12, 0x41, 0x51, 0x07, 0x61, 0x71,
550 0x13, 0x22, 0x32, 0x81, 0x08, 0x14, 0x42, 0x91,
551 0xa1, 0xb1, 0xc1, 0x09, 0x23, 0x33, 0x52, 0xf0,
552 0x15, 0x62, 0x72, 0xd1, 0x0a, 0x16, 0x24, 0x34,
553 0xe1, 0x25, 0xf1, 0x17, 0x18, 0x19, 0x1a, 0x26,
554 0x27, 0x28, 0x29, 0x2a, 0x35, 0x36, 0x37, 0x38,
555 0x39, 0x3a, 0x43, 0x44, 0x45, 0x46, 0x47, 0x48,
556 0x49, 0x4a, 0x53, 0x54, 0x55, 0x56, 0x57, 0x58,
557 0x59, 0x5a, 0x63, 0x64, 0x65, 0x66, 0x67, 0x68,
558 0x69, 0x6a, 0x73, 0x74, 0x75, 0x76, 0x77, 0x78,
559 0x79, 0x7a, 0x82, 0x83, 0x84, 0x85, 0x86, 0x87,
560 0x88, 0x89, 0x8a, 0x92, 0x93, 0x94, 0x95, 0x96,
561 0x97, 0x98, 0x99, 0x9a, 0xa2, 0xa3, 0xa4, 0xa5,
562 0xa6, 0xa7, 0xa8, 0xa9, 0xaa, 0xb2, 0xb3, 0xb4,
563 0xb5, 0xb6, 0xb7, 0xb8, 0xb9, 0xba, 0xc2, 0xc3,
564 0xc4, 0xc5, 0xc6, 0xc7, 0xc8, 0xc9, 0xca, 0xd2,
565 0xd3, 0xd4, 0xd5, 0xd6, 0xd7, 0xd8, 0xd9, 0xda,
566 0xe2, 0xe3, 0xe4, 0xe5, 0xe6, 0xe7, 0xe8, 0xe9,
567 0xea, 0xf2, 0xf3, 0xf4, 0xf5, 0xf6, 0xf7, 0xf8,
571 add_huff_table (dinfo, &dinfo->dc_huff_tbl_ptrs[0],
572 bits_dc_luminance, val_dc_luminance);
573 add_huff_table (dinfo, &dinfo->ac_huff_tbl_ptrs[0],
574 bits_ac_luminance, val_ac_luminance);
575 add_huff_table (dinfo, &dinfo->dc_huff_tbl_ptrs[1],
576 bits_dc_chrominance, val_dc_chrominance);
577 add_huff_table (dinfo, &dinfo->ac_huff_tbl_ptrs[1],
578 bits_ac_chrominance, val_ac_chrominance);
584 guarantee_huff_tables (j_decompress_ptr dinfo)
586 if ((dinfo->dc_huff_tbl_ptrs[0] == NULL) &&
587 (dinfo->dc_huff_tbl_ptrs[1] == NULL) &&
588 (dinfo->ac_huff_tbl_ptrs[0] == NULL) &&
589 (dinfo->ac_huff_tbl_ptrs[1] == NULL)) {
590 GST_DEBUG ("Generating standard Huffman tables for this frame.");
591 std_huff_tables (dinfo);
596 gst_jpeg_dec_set_format (GstVideoDecoder * dec, GstVideoCodecState * state)
598 GstJpegDec *jpeg = GST_JPEG_DEC (dec);
600 if (jpeg->input_state)
601 gst_video_codec_state_unref (jpeg->input_state);
602 jpeg->input_state = gst_video_codec_state_ref (state);
610 hresamplecpy1 (guint8 * dest, const guint8 * src, guint len)
614 for (i = 0; i < len; ++i) {
615 /* equivalent to: dest[i] = src[i << 1] */
624 gst_jpeg_dec_free_buffers (GstJpegDec * dec)
628 for (i = 0; i < 16; i++) {
629 g_free (dec->idr_y[i]);
630 g_free (dec->idr_u[i]);
631 g_free (dec->idr_v[i]);
632 dec->idr_y[i] = NULL;
633 dec->idr_u[i] = NULL;
634 dec->idr_v[i] = NULL;
637 dec->idr_width_allocated = 0;
640 static inline gboolean
641 gst_jpeg_dec_ensure_buffers (GstJpegDec * dec, guint maxrowbytes)
645 if (G_LIKELY (dec->idr_width_allocated == maxrowbytes))
648 /* FIXME: maybe just alloc one or three blocks altogether? */
649 for (i = 0; i < 16; i++) {
650 dec->idr_y[i] = g_try_realloc (dec->idr_y[i], maxrowbytes);
651 dec->idr_u[i] = g_try_realloc (dec->idr_u[i], maxrowbytes);
652 dec->idr_v[i] = g_try_realloc (dec->idr_v[i], maxrowbytes);
654 if (G_UNLIKELY (!dec->idr_y[i] || !dec->idr_u[i] || !dec->idr_v[i])) {
655 GST_WARNING_OBJECT (dec, "out of memory, i=%d, bytes=%u", i, maxrowbytes);
660 dec->idr_width_allocated = maxrowbytes;
661 GST_LOG_OBJECT (dec, "allocated temp memory, %u bytes/row", maxrowbytes);
666 gst_jpeg_dec_decode_grayscale (GstJpegDec * dec, GstVideoFrame * frame,
667 guint field, guint num_fields)
670 guchar **scanarray[1] = { rows };
675 gint pstride, rstride;
677 GST_DEBUG_OBJECT (dec, "indirect decoding of grayscale");
679 width = GST_VIDEO_FRAME_WIDTH (frame);
680 height = GST_VIDEO_FRAME_HEIGHT (frame) / num_fields;
682 if (G_UNLIKELY (!gst_jpeg_dec_ensure_buffers (dec, GST_ROUND_UP_32 (width))))
685 base[0] = GST_VIDEO_FRAME_COMP_DATA (frame, 0);
687 base[0] += GST_VIDEO_FRAME_COMP_STRIDE (frame, 0);
690 pstride = GST_VIDEO_FRAME_COMP_PSTRIDE (frame, 0);
691 rstride = GST_VIDEO_FRAME_COMP_STRIDE (frame, 0) * num_fields;
693 memcpy (rows, dec->idr_y, 16 * sizeof (gpointer));
697 lines = jpeg_read_raw_data (&dec->cinfo, scanarray, DCTSIZE);
698 if (G_LIKELY (lines > 0)) {
699 for (j = 0; (j < DCTSIZE) && (i < height); j++, i++) {
703 for (k = 0; k < width; k++) {
704 base[0][p] = rows[j][k];
710 GST_INFO_OBJECT (dec, "jpeg_read_raw_data() returned 0");
716 gst_jpeg_dec_decode_rgb (GstJpegDec * dec, GstVideoFrame * frame,
717 guint field, guint num_fields)
719 guchar *r_rows[16], *g_rows[16], *b_rows[16];
720 guchar **scanarray[3] = { r_rows, g_rows, b_rows };
724 guint pstride, rstride;
727 GST_DEBUG_OBJECT (dec, "indirect decoding of RGB");
729 width = GST_VIDEO_FRAME_WIDTH (frame);
730 height = GST_VIDEO_FRAME_HEIGHT (frame) / num_fields;
732 if (G_UNLIKELY (!gst_jpeg_dec_ensure_buffers (dec, GST_ROUND_UP_32 (width))))
735 for (i = 0; i < 3; i++) {
736 base[i] = GST_VIDEO_FRAME_COMP_DATA (frame, i);
738 base[i] += GST_VIDEO_FRAME_COMP_STRIDE (frame, i);
741 pstride = GST_VIDEO_FRAME_COMP_PSTRIDE (frame, 0);
742 rstride = GST_VIDEO_FRAME_COMP_STRIDE (frame, 0) * num_fields;
744 memcpy (r_rows, dec->idr_y, 16 * sizeof (gpointer));
745 memcpy (g_rows, dec->idr_u, 16 * sizeof (gpointer));
746 memcpy (b_rows, dec->idr_v, 16 * sizeof (gpointer));
750 lines = jpeg_read_raw_data (&dec->cinfo, scanarray, DCTSIZE);
751 if (G_LIKELY (lines > 0)) {
752 for (j = 0; (j < DCTSIZE) && (i < height); j++, i++) {
756 for (k = 0; k < width; k++) {
757 base[0][p] = r_rows[j][k];
758 base[1][p] = g_rows[j][k];
759 base[2][p] = b_rows[j][k];
767 GST_INFO_OBJECT (dec, "jpeg_read_raw_data() returned 0");
773 gst_jpeg_dec_decode_indirect (GstJpegDec * dec, GstVideoFrame * frame, gint r_v,
774 gint r_h, gint comp, guint field, guint num_fields)
776 guchar *y_rows[16], *u_rows[16], *v_rows[16];
777 guchar **scanarray[3] = { y_rows, u_rows, v_rows };
780 guchar *base[3], *last[3];
781 gint rowsize[3], stride[3];
784 GST_DEBUG_OBJECT (dec,
785 "unadvantageous width or r_h, taking slow route involving memcpy");
787 width = GST_VIDEO_FRAME_WIDTH (frame);
788 height = GST_VIDEO_FRAME_HEIGHT (frame);
790 if (G_UNLIKELY (!gst_jpeg_dec_ensure_buffers (dec, GST_ROUND_UP_32 (width))))
793 for (i = 0; i < 3; i++) {
794 base[i] = GST_VIDEO_FRAME_COMP_DATA (frame, i);
795 stride[i] = GST_VIDEO_FRAME_COMP_STRIDE (frame, i) * num_fields;
796 rowsize[i] = GST_VIDEO_FRAME_COMP_STRIDE (frame, i);
797 /* make sure we don't make jpeglib write beyond our buffer,
798 * which might happen if (height % (r_v*DCTSIZE)) != 0 */
799 last[i] = base[i] + (GST_VIDEO_FRAME_COMP_STRIDE (frame, i) *
800 (GST_VIDEO_FRAME_COMP_HEIGHT (frame, i) - 1));
803 base[i] += GST_VIDEO_FRAME_COMP_STRIDE (frame, i);
807 memcpy (y_rows, dec->idr_y, 16 * sizeof (gpointer));
808 memcpy (u_rows, dec->idr_u, 16 * sizeof (gpointer));
809 memcpy (v_rows, dec->idr_v, 16 * sizeof (gpointer));
811 /* fill chroma components for grayscale */
813 GST_DEBUG_OBJECT (dec, "grayscale, filling chroma");
814 for (i = 0; i < 16; i++) {
815 memset (u_rows[i], GST_ROUND_UP_32 (width), 0x80);
816 memset (v_rows[i], GST_ROUND_UP_32 (width), 0x80);
820 for (i = 0; i < height; i += r_v * DCTSIZE) {
821 lines = jpeg_read_raw_data (&dec->cinfo, scanarray, r_v * DCTSIZE);
822 if (G_LIKELY (lines > 0)) {
823 for (j = 0, k = 0; j < (r_v * DCTSIZE); j += r_v, k++) {
824 if (G_LIKELY (base[0] <= last[0])) {
825 memcpy (base[0], y_rows[j], rowsize[0]);
826 base[0] += stride[0];
829 if (G_LIKELY (base[0] <= last[0])) {
830 memcpy (base[0], y_rows[j + 1], rowsize[0]);
831 base[0] += stride[0];
834 if (G_LIKELY (base[1] <= last[1] && base[2] <= last[2])) {
836 memcpy (base[1], u_rows[k], rowsize[1]);
837 memcpy (base[2], v_rows[k], rowsize[2]);
838 } else if (r_h == 1) {
839 hresamplecpy1 (base[1], u_rows[k], rowsize[1]);
840 hresamplecpy1 (base[2], v_rows[k], rowsize[2]);
842 /* FIXME: implement (at least we avoid crashing by doing nothing) */
846 if (r_v == 2 || (k & 1) != 0) {
847 base[1] += stride[1];
848 base[2] += stride[2];
852 GST_INFO_OBJECT (dec, "jpeg_read_raw_data() returned 0");
858 gst_jpeg_dec_decode_direct (GstJpegDec * dec, GstVideoFrame * frame,
859 guint field, guint num_fields)
861 guchar **line[3]; /* the jpeg line buffer */
862 guchar *y[4 * DCTSIZE] = { NULL, }; /* alloc enough for the lines */
863 guchar *u[4 * DCTSIZE] = { NULL, }; /* r_v will be <4 */
864 guchar *v[4 * DCTSIZE] = { NULL, };
866 gint lines, v_samp[3];
867 guchar *base[3], *last[3];
875 v_samp[0] = dec->cinfo.comp_info[0].v_samp_factor;
876 v_samp[1] = dec->cinfo.comp_info[1].v_samp_factor;
877 v_samp[2] = dec->cinfo.comp_info[2].v_samp_factor;
879 if (G_UNLIKELY (v_samp[0] > 2 || v_samp[1] > 2 || v_samp[2] > 2))
880 goto format_not_supported;
882 height = GST_VIDEO_FRAME_HEIGHT (frame);
884 for (i = 0; i < 3; i++) {
885 base[i] = GST_VIDEO_FRAME_COMP_DATA (frame, i);
886 stride[i] = GST_VIDEO_FRAME_COMP_STRIDE (frame, i) * num_fields;
887 /* make sure we don't make jpeglib write beyond our buffer,
888 * which might happen if (height % (r_v*DCTSIZE)) != 0 */
889 last[i] = base[i] + (GST_VIDEO_FRAME_COMP_STRIDE (frame, i) *
890 (GST_VIDEO_FRAME_COMP_HEIGHT (frame, i) - 1));
893 base[i] += GST_VIDEO_FRAME_COMP_STRIDE (frame, i);
897 if (height % (v_samp[0] * DCTSIZE) && (dec->scratch_size < stride[0])) {
898 g_free (dec->scratch);
899 dec->scratch = g_malloc (stride[0]);
900 dec->scratch_size = stride[0];
903 /* let jpeglib decode directly into our final buffer */
904 GST_DEBUG_OBJECT (dec, "decoding directly into output buffer");
906 for (i = 0; i < height; i += v_samp[0] * DCTSIZE) {
907 for (j = 0; j < (v_samp[0] * DCTSIZE); ++j) {
909 line[0][j] = base[0] + (i + j) * stride[0];
910 if (G_UNLIKELY (line[0][j] > last[0]))
911 line[0][j] = dec->scratch;
913 if (v_samp[1] == v_samp[0]) {
914 line[1][j] = base[1] + ((i + j) / 2) * stride[1];
915 } else if (j < (v_samp[1] * DCTSIZE)) {
916 line[1][j] = base[1] + ((i / 2) + j) * stride[1];
918 if (G_UNLIKELY (line[1][j] > last[1]))
919 line[1][j] = dec->scratch;
921 if (v_samp[2] == v_samp[0]) {
922 line[2][j] = base[2] + ((i + j) / 2) * stride[2];
923 } else if (j < (v_samp[2] * DCTSIZE)) {
924 line[2][j] = base[2] + ((i / 2) + j) * stride[2];
926 if (G_UNLIKELY (line[2][j] > last[2]))
927 line[2][j] = dec->scratch;
930 lines = jpeg_read_raw_data (&dec->cinfo, line, v_samp[0] * DCTSIZE);
931 if (G_UNLIKELY (!lines)) {
932 GST_INFO_OBJECT (dec, "jpeg_read_raw_data() returned 0");
937 format_not_supported:
939 gboolean ret = GST_FLOW_OK;
941 GST_VIDEO_DECODER_ERROR (dec, 1, STREAM, DECODE,
942 (_("Failed to decode JPEG image")),
943 ("Unsupported subsampling schema: v_samp factors: %u %u %u", v_samp[0],
944 v_samp[1], v_samp[2]), ret);
951 gst_jpeg_dec_negotiate (GstJpegDec * dec, gint width, gint height, gint clrspc,
954 GstVideoCodecState *outstate;
956 GstVideoFormat format;
960 format = GST_VIDEO_FORMAT_RGB;
963 format = GST_VIDEO_FORMAT_GRAY8;
966 format = GST_VIDEO_FORMAT_I420;
970 /* Compare to currently configured output state */
971 outstate = gst_video_decoder_get_output_state (GST_VIDEO_DECODER (dec));
973 info = &outstate->info;
975 if (width == GST_VIDEO_INFO_WIDTH (info) &&
976 height == GST_VIDEO_INFO_HEIGHT (info) &&
977 format == GST_VIDEO_INFO_FORMAT (info)) {
978 gst_video_codec_state_unref (outstate);
981 gst_video_codec_state_unref (outstate);
985 gst_video_decoder_set_output_state (GST_VIDEO_DECODER (dec), format,
986 width, height, dec->input_state);
993 outstate->info.colorimetry.range = GST_VIDEO_COLOR_RANGE_0_255;
994 outstate->info.colorimetry.matrix = GST_VIDEO_COLOR_MATRIX_BT601;
995 outstate->info.colorimetry.transfer = GST_VIDEO_TRANSFER_UNKNOWN;
996 outstate->info.colorimetry.primaries = GST_VIDEO_COLOR_PRIMARIES_UNKNOWN;
1001 outstate->info.interlace_mode = GST_VIDEO_INTERLACE_MODE_INTERLEAVED;
1002 GST_VIDEO_INFO_FIELD_ORDER (&outstate->info) =
1003 GST_VIDEO_FIELD_ORDER_TOP_FIELD_FIRST;
1006 gst_video_codec_state_unref (outstate);
1008 gst_video_decoder_negotiate (GST_VIDEO_DECODER (dec));
1010 GST_DEBUG_OBJECT (dec, "max_v_samp_factor=%d", dec->cinfo.max_v_samp_factor);
1011 GST_DEBUG_OBJECT (dec, "max_h_samp_factor=%d", dec->cinfo.max_h_samp_factor);
1014 static GstFlowReturn
1015 gst_jpeg_dec_prepare_decode (GstJpegDec * dec)
1017 G_GNUC_UNUSED GstFlowReturn ret;
1018 guint r_h, r_v, hdr_ok;
1021 hdr_ok = jpeg_read_header (&dec->cinfo, TRUE);
1022 if (G_UNLIKELY (hdr_ok != JPEG_HEADER_OK)) {
1023 GST_WARNING_OBJECT (dec, "reading the header failed, %d", hdr_ok);
1026 GST_LOG_OBJECT (dec, "num_components=%d", dec->cinfo.num_components);
1027 GST_LOG_OBJECT (dec, "jpeg_color_space=%d", dec->cinfo.jpeg_color_space);
1029 if (!dec->cinfo.num_components || !dec->cinfo.comp_info)
1030 goto components_not_supported;
1032 r_h = dec->cinfo.comp_info[0].h_samp_factor;
1033 r_v = dec->cinfo.comp_info[0].v_samp_factor;
1035 GST_LOG_OBJECT (dec, "r_h = %d, r_v = %d", r_h, r_v);
1037 if (dec->cinfo.num_components > 3)
1038 goto components_not_supported;
1040 /* verify color space expectation to avoid going *boom* or bogus output */
1041 if (dec->cinfo.jpeg_color_space != JCS_YCbCr &&
1042 dec->cinfo.jpeg_color_space != JCS_GRAYSCALE &&
1043 dec->cinfo.jpeg_color_space != JCS_RGB)
1044 goto unsupported_colorspace;
1046 #ifndef GST_DISABLE_GST_DEBUG
1050 for (i = 0; i < dec->cinfo.num_components; ++i) {
1051 GST_LOG_OBJECT (dec, "[%d] h_samp_factor=%d, v_samp_factor=%d, cid=%d",
1052 i, dec->cinfo.comp_info[i].h_samp_factor,
1053 dec->cinfo.comp_info[i].v_samp_factor,
1054 dec->cinfo.comp_info[i].component_id);
1059 /* prepare for raw output */
1060 dec->cinfo.do_fancy_upsampling = FALSE;
1061 dec->cinfo.do_block_smoothing = FALSE;
1062 dec->cinfo.out_color_space = dec->cinfo.jpeg_color_space;
1063 dec->cinfo.dct_method = dec->idct_method;
1064 dec->cinfo.raw_data_out = TRUE;
1066 GST_LOG_OBJECT (dec, "starting decompress");
1067 guarantee_huff_tables (&dec->cinfo);
1068 if (!jpeg_start_decompress (&dec->cinfo)) {
1069 GST_WARNING_OBJECT (dec, "failed to start decompression cycle");
1072 /* sanity checks to get safe and reasonable output */
1073 switch (dec->cinfo.jpeg_color_space) {
1075 if (dec->cinfo.num_components != 1)
1076 goto invalid_yuvrgbgrayscale;
1079 if (dec->cinfo.num_components != 3 || dec->cinfo.max_v_samp_factor > 1 ||
1080 dec->cinfo.max_h_samp_factor > 1)
1081 goto invalid_yuvrgbgrayscale;
1084 if (dec->cinfo.num_components != 3 ||
1085 r_v > 2 || r_v < dec->cinfo.comp_info[0].v_samp_factor ||
1086 r_v < dec->cinfo.comp_info[1].v_samp_factor ||
1087 r_h < dec->cinfo.comp_info[0].h_samp_factor ||
1088 r_h < dec->cinfo.comp_info[1].h_samp_factor)
1089 goto invalid_yuvrgbgrayscale;
1092 g_assert_not_reached ();
1096 if (G_UNLIKELY (dec->cinfo.output_width < MIN_WIDTH ||
1097 dec->cinfo.output_width > MAX_WIDTH ||
1098 dec->cinfo.output_height < MIN_HEIGHT ||
1099 dec->cinfo.output_height > MAX_HEIGHT))
1107 ret = GST_FLOW_ERROR;
1108 GST_VIDEO_DECODER_ERROR (dec, 1, STREAM, DECODE,
1109 (_("Failed to decode JPEG image")),
1110 ("Picture is too small or too big (%ux%u)", dec->cinfo.output_width,
1111 dec->cinfo.output_height), ret);
1112 return GST_FLOW_ERROR;
1114 components_not_supported:
1116 ret = GST_FLOW_ERROR;
1117 GST_VIDEO_DECODER_ERROR (dec, 1, STREAM, DECODE,
1118 (_("Failed to decode JPEG image")),
1119 ("number of components not supported: %d (max 3)",
1120 dec->cinfo.num_components), ret);
1121 jpeg_abort_decompress (&dec->cinfo);
1122 return GST_FLOW_ERROR;
1124 unsupported_colorspace:
1126 ret = GST_FLOW_ERROR;
1127 GST_VIDEO_DECODER_ERROR (dec, 1, STREAM, DECODE,
1128 (_("Failed to decode JPEG image")),
1129 ("Picture has unknown or unsupported colourspace"), ret);
1130 jpeg_abort_decompress (&dec->cinfo);
1131 return GST_FLOW_ERROR;
1133 invalid_yuvrgbgrayscale:
1135 ret = GST_FLOW_ERROR;
1136 GST_VIDEO_DECODER_ERROR (dec, 1, STREAM, DECODE,
1137 (_("Failed to decode JPEG image")),
1138 ("Picture is corrupt or unhandled YUV/RGB/grayscale layout"), ret);
1139 jpeg_abort_decompress (&dec->cinfo);
1140 return GST_FLOW_ERROR;
1144 static GstFlowReturn
1145 gst_jpeg_dec_decode (GstJpegDec * dec, GstVideoFrame * vframe, guint width,
1146 guint height, guint field, guint num_fields)
1148 GstFlowReturn ret = GST_FLOW_OK;
1150 if (dec->cinfo.jpeg_color_space == JCS_RGB) {
1151 gst_jpeg_dec_decode_rgb (dec, vframe, field, num_fields);
1152 } else if (dec->cinfo.jpeg_color_space == JCS_GRAYSCALE) {
1153 gst_jpeg_dec_decode_grayscale (dec, vframe, field, num_fields);
1155 GST_LOG_OBJECT (dec, "decompressing (required scanline buffer height = %u)",
1156 dec->cinfo.rec_outbuf_height);
1158 /* For some widths jpeglib requires more horizontal padding than I420
1159 * provides. In those cases we need to decode into separate buffers and then
1160 * copy over the data into our final picture buffer, otherwise jpeglib might
1161 * write over the end of a line into the beginning of the next line,
1162 * resulting in blocky artifacts on the left side of the picture. */
1163 if (G_UNLIKELY (width % (dec->cinfo.max_h_samp_factor * DCTSIZE) != 0
1164 || dec->cinfo.comp_info[0].h_samp_factor != 2
1165 || dec->cinfo.comp_info[1].h_samp_factor != 1
1166 || dec->cinfo.comp_info[2].h_samp_factor != 1)) {
1167 GST_CAT_LOG_OBJECT (GST_CAT_PERFORMANCE, dec,
1168 "indirect decoding using extra buffer copy");
1169 gst_jpeg_dec_decode_indirect (dec, vframe,
1170 dec->cinfo.comp_info[0].v_samp_factor,
1171 dec->cinfo.comp_info[0].h_samp_factor, dec->cinfo.num_components,
1174 ret = gst_jpeg_dec_decode_direct (dec, vframe, field, num_fields);
1178 GST_LOG_OBJECT (dec, "decompressing finished: %s", gst_flow_get_name (ret));
1180 if (G_UNLIKELY (ret != GST_FLOW_OK)) {
1181 jpeg_abort_decompress (&dec->cinfo);
1183 jpeg_finish_decompress (&dec->cinfo);
1189 static GstFlowReturn
1190 gst_jpeg_dec_handle_frame (GstVideoDecoder * bdec, GstVideoCodecFrame * frame)
1192 GstFlowReturn ret = GST_FLOW_OK;
1193 GstJpegDec *dec = (GstJpegDec *) bdec;
1194 GstVideoFrame vframe;
1195 gint num_fields; /* number of fields (1 or 2) */
1196 gint output_height; /* height of output image (one or two fields) */
1197 gint height; /* height of current frame (whole image or a field) */
1200 gboolean need_unmap = TRUE;
1201 GstVideoCodecState *state = NULL;
1202 gboolean release_frame = TRUE;
1207 gst_buffer_map (frame->input_buffer, &dec->current_frame_map, GST_MAP_READ);
1209 data = dec->current_frame_map.data;
1210 nbytes = dec->current_frame_map.size;
1211 has_eoi = ((data[nbytes - 2] != 0xff) || (data[nbytes - 1] != 0xd9));
1213 /* some cameras fail to send an end-of-image marker (EOI),
1214 * add it if that is the case. */
1217 GstBuffer *eoibuf = gst_buffer_new_and_alloc (2);
1219 /* unmap, will add EOI and remap at the end */
1220 gst_buffer_unmap (frame->input_buffer, &dec->current_frame_map);
1222 gst_buffer_map (eoibuf, &map, GST_MAP_WRITE);
1225 gst_buffer_unmap (eoibuf, &map);
1227 /* append to input buffer, and remap */
1228 frame->input_buffer = gst_buffer_append (frame->input_buffer, eoibuf);
1230 gst_buffer_map (frame->input_buffer, &dec->current_frame_map, GST_MAP_READ);
1231 GST_DEBUG ("fixup EOI marker added");
1234 dec->current_frame = frame;
1235 dec->cinfo.src->next_input_byte = dec->current_frame_map.data;
1236 dec->cinfo.src->bytes_in_buffer = dec->current_frame_map.size;
1238 if (setjmp (dec->jerr.setjmp_buffer)) {
1239 code = dec->jerr.pub.msg_code;
1241 if (code == JERR_INPUT_EOF) {
1242 GST_DEBUG ("jpeg input EOF error, we probably need more data");
1243 goto need_more_data;
1248 /* read header and check values */
1249 ret = gst_jpeg_dec_prepare_decode (dec);
1250 if (G_UNLIKELY (ret == GST_FLOW_ERROR))
1253 width = dec->cinfo.output_width;
1254 height = dec->cinfo.output_height;
1256 /* is it interlaced MJPEG? (we really don't want to scan the jpeg data
1257 * to see if there are two SOF markers in the packet to detect this) */
1258 if (gst_video_decoder_get_packetized (bdec) &&
1260 dec->input_state->info.height > height &&
1261 dec->input_state->info.height <= (height * 2)
1262 && dec->input_state->info.width == width) {
1263 GST_LOG_OBJECT (dec,
1264 "looks like an interlaced image: "
1265 "input width/height of %dx%d with JPEG frame width/height of %dx%d",
1266 dec->input_state->info.width, dec->input_state->info.height, width,
1268 output_height = dec->input_state->info.height;
1269 height = dec->input_state->info.height / 2;
1271 GST_LOG_OBJECT (dec, "field height=%d", height);
1273 output_height = height;
1277 gst_jpeg_dec_negotiate (dec, width, output_height,
1278 dec->cinfo.jpeg_color_space, num_fields == 2);
1280 state = gst_video_decoder_get_output_state (bdec);
1281 ret = gst_video_decoder_allocate_output_frame (bdec, frame);
1282 if (G_UNLIKELY (ret != GST_FLOW_OK))
1285 if (!gst_video_frame_map (&vframe, &state->info, frame->output_buffer,
1289 if (setjmp (dec->jerr.setjmp_buffer)) {
1290 code = dec->jerr.pub.msg_code;
1291 gst_video_frame_unmap (&vframe);
1295 GST_LOG_OBJECT (dec, "width %d, height %d, fields %d", width, output_height,
1298 ret = gst_jpeg_dec_decode (dec, &vframe, width, height, 1, num_fields);
1299 if (G_UNLIKELY (ret != GST_FLOW_OK)) {
1300 gst_video_frame_unmap (&vframe);
1304 if (setjmp (dec->jerr.setjmp_buffer)) {
1305 code = dec->jerr.pub.msg_code;
1306 gst_video_frame_unmap (&vframe);
1310 /* decode second field if there is one */
1311 if (num_fields == 2) {
1312 GstVideoFormat field2_format;
1314 /* Checked above before setting num_fields to 2 */
1315 g_assert (dec->input_state != NULL);
1317 /* skip any chunk or padding bytes before the next SOI marker; both fields
1318 * are in one single buffer here, so direct access should be fine here */
1319 while (dec->jsrc.pub.bytes_in_buffer > 2 &&
1320 GST_READ_UINT16_BE (dec->jsrc.pub.next_input_byte) != 0xffd8) {
1321 --dec->jsrc.pub.bytes_in_buffer;
1322 ++dec->jsrc.pub.next_input_byte;
1325 if (gst_jpeg_dec_prepare_decode (dec) != GST_FLOW_OK) {
1326 GST_WARNING_OBJECT (dec, "problem reading jpeg header of 2nd field");
1327 /* FIXME: post a warning message here? */
1328 gst_video_frame_unmap (&vframe);
1332 /* check if format has changed for the second field */
1333 switch (dec->cinfo.jpeg_color_space) {
1335 field2_format = GST_VIDEO_FORMAT_RGB;
1338 field2_format = GST_VIDEO_FORMAT_GRAY8;
1341 field2_format = GST_VIDEO_FORMAT_I420;
1345 GST_LOG_OBJECT (dec,
1346 "got for second field of interlaced image: "
1347 "input width/height of %dx%d with JPEG frame width/height of %dx%d",
1348 dec->input_state->info.width, dec->input_state->info.height,
1349 dec->cinfo.output_width, dec->cinfo.output_height);
1351 if (dec->cinfo.output_width != GST_VIDEO_INFO_WIDTH (&state->info) ||
1352 GST_VIDEO_INFO_HEIGHT (&state->info) <= dec->cinfo.output_height ||
1353 GST_VIDEO_INFO_HEIGHT (&state->info) > (dec->cinfo.output_height * 2) ||
1354 field2_format != GST_VIDEO_INFO_FORMAT (&state->info)) {
1355 GST_WARNING_OBJECT (dec, "second field has different format than first");
1356 gst_video_frame_unmap (&vframe);
1360 ret = gst_jpeg_dec_decode (dec, &vframe, width, height, 2, 2);
1361 if (G_UNLIKELY (ret != GST_FLOW_OK)) {
1362 gst_video_frame_unmap (&vframe);
1366 gst_video_frame_unmap (&vframe);
1368 gst_buffer_unmap (frame->input_buffer, &dec->current_frame_map);
1369 ret = gst_video_decoder_finish_frame (bdec, frame);
1370 release_frame = FALSE;
1378 gst_buffer_unmap (frame->input_buffer, &dec->current_frame_map);
1381 gst_video_decoder_release_frame (bdec, frame);
1384 gst_video_codec_state_unref (state);
1391 GST_LOG_OBJECT (dec, "we need more data");
1398 gchar err_msg[JMSG_LENGTH_MAX];
1400 dec->jerr.pub.format_message ((j_common_ptr) (&dec->cinfo), err_msg);
1402 GST_VIDEO_DECODER_ERROR (dec, 1, STREAM, DECODE,
1403 (_("Failed to decode JPEG image")), ("Decode error #%u: %s", code,
1406 gst_buffer_unmap (frame->input_buffer, &dec->current_frame_map);
1407 gst_video_decoder_drop_frame (bdec, frame);
1408 release_frame = FALSE;
1410 jpeg_abort_decompress (&dec->cinfo);
1416 /* already posted an error message */
1421 const gchar *reason;
1423 reason = gst_flow_get_name (ret);
1425 GST_DEBUG_OBJECT (dec, "failed to alloc buffer, reason %s", reason);
1426 /* Reset for next time */
1427 jpeg_abort_decompress (&dec->cinfo);
1428 if (ret != GST_FLOW_EOS && ret != GST_FLOW_FLUSHING &&
1429 ret != GST_FLOW_NOT_LINKED) {
1430 GST_VIDEO_DECODER_ERROR (dec, 1, STREAM, DECODE,
1431 (_("Failed to decode JPEG image")),
1432 ("Buffer allocation failed, reason: %s", reason), ret);
1433 jpeg_abort_decompress (&dec->cinfo);
1440 gst_jpeg_dec_decide_allocation (GstVideoDecoder * bdec, GstQuery * query)
1442 GstBufferPool *pool = NULL;
1443 GstStructure *config;
1445 if (!GST_VIDEO_DECODER_CLASS (parent_class)->decide_allocation (bdec, query))
1448 if (gst_query_get_n_allocation_pools (query) > 0)
1449 gst_query_parse_nth_allocation_pool (query, 0, &pool, NULL, NULL, NULL);
1454 config = gst_buffer_pool_get_config (pool);
1455 if (gst_query_find_allocation_meta (query, GST_VIDEO_META_API_TYPE, NULL)) {
1456 gst_buffer_pool_config_add_option (config,
1457 GST_BUFFER_POOL_OPTION_VIDEO_META);
1459 gst_buffer_pool_set_config (pool, config);
1460 gst_object_unref (pool);
1466 gst_jpeg_dec_sink_event (GstVideoDecoder * bdec, GstEvent * event)
1468 const GstSegment *segment;
1470 if (GST_EVENT_TYPE (event) != GST_EVENT_SEGMENT)
1473 gst_event_parse_segment (event, &segment);
1475 if (segment->format == GST_FORMAT_TIME)
1476 gst_video_decoder_set_packetized (bdec, TRUE);
1478 gst_video_decoder_set_packetized (bdec, FALSE);
1481 return GST_VIDEO_DECODER_CLASS (parent_class)->sink_event (bdec, event);
1485 gst_jpeg_dec_start (GstVideoDecoder * bdec)
1487 GstJpegDec *dec = (GstJpegDec *) bdec;
1489 dec->saw_header = FALSE;
1490 dec->parse_entropy_len = 0;
1491 dec->parse_resync = FALSE;
1493 gst_video_decoder_set_packetized (bdec, FALSE);
1499 gst_jpeg_dec_flush (GstVideoDecoder * bdec)
1501 GstJpegDec *dec = (GstJpegDec *) bdec;
1503 jpeg_abort_decompress (&dec->cinfo);
1504 dec->parse_entropy_len = 0;
1505 dec->parse_resync = FALSE;
1506 dec->saw_header = FALSE;
1512 gst_jpeg_dec_set_property (GObject * object, guint prop_id,
1513 const GValue * value, GParamSpec * pspec)
1517 dec = GST_JPEG_DEC (object);
1520 case PROP_IDCT_METHOD:
1521 dec->idct_method = g_value_get_enum (value);
1523 #ifndef GST_REMOVE_DEPRECATED
1524 case PROP_MAX_ERRORS:
1525 g_atomic_int_set (&dec->max_errors, g_value_get_int (value));
1529 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
1535 gst_jpeg_dec_get_property (GObject * object, guint prop_id, GValue * value,
1540 dec = GST_JPEG_DEC (object);
1543 case PROP_IDCT_METHOD:
1544 g_value_set_enum (value, dec->idct_method);
1546 #ifndef GST_REMOVE_DEPRECATED
1547 case PROP_MAX_ERRORS:
1548 g_value_set_int (value, g_atomic_int_get (&dec->max_errors));
1552 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
1558 gst_jpeg_dec_stop (GstVideoDecoder * bdec)
1560 GstJpegDec *dec = (GstJpegDec *) bdec;
1562 gst_jpeg_dec_free_buffers (dec);
1564 g_free (dec->scratch);
1565 dec->scratch = NULL;
1566 dec->scratch_size = 0;