2 * gstvaapidecoder_jpeg.c - JPEG decoder
4 * Copyright (C) 2011-2013 Intel Corporation
5 * Author: Wind Yuan <feng.yuan@intel.com>
6 * Author: Gwenole Beauchesne <gwenole.beauchesne@intel.com>
8 * This library is free software; you can redistribute it and/or
9 * modify it under the terms of the GNU Lesser General Public License
10 * as published by the Free Software Foundation; either version 2.1
11 * of the License, or (at your option) any later version.
13 * This library is distributed in the hope that it will be useful,
14 * but WITHOUT ANY WARRANTY; without even the implied warranty of
15 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
16 * Lesser General Public License for more details.
18 * You should have received a copy of the GNU Lesser General Public
19 * License along with this library; if not, write to the Free
20 * Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
21 * Boston, MA 02110-1301 USA
25 * SECTION:gstvaapidecoder_jpeg
26 * @short_description: JPEG decoder
31 #include <gst/codecparsers/gstjpegparser.h>
32 #include "gstvaapicompat.h"
33 #include "gstvaapidecoder_jpeg.h"
34 #include "gstvaapidecoder_objects.h"
35 #include "gstvaapidecoder_priv.h"
36 #include "gstvaapidisplay_priv.h"
37 #include "gstvaapiobject_priv.h"
39 #ifdef HAVE_VA_VA_DEC_JPEG_H
40 # include <va/va_dec_jpeg.h>
44 #include "gstvaapidebug.h"
46 #define GST_VAAPI_DECODER_JPEG_CAST(decoder) \
47 ((GstVaapiDecoderJpeg *)(decoder))
49 typedef struct _GstVaapiDecoderJpegPrivate GstVaapiDecoderJpegPrivate;
50 typedef struct _GstVaapiDecoderJpegClass GstVaapiDecoderJpegClass;
53 GST_JPEG_VIDEO_STATE_GOT_SOI = 1 << 0,
54 GST_JPEG_VIDEO_STATE_GOT_SOF = 1 << 1,
55 GST_JPEG_VIDEO_STATE_GOT_SOS = 1 << 2,
56 GST_JPEG_VIDEO_STATE_GOT_HUF_TABLE = 1 << 3,
57 GST_JPEG_VIDEO_STATE_GOT_IQ_TABLE = 1 << 4,
59 GST_JPEG_VIDEO_STATE_VALID_PICTURE = (
60 GST_JPEG_VIDEO_STATE_GOT_SOI |
61 GST_JPEG_VIDEO_STATE_GOT_SOF |
62 GST_JPEG_VIDEO_STATE_GOT_SOS),
65 struct _GstVaapiDecoderJpegPrivate {
66 GstVaapiProfile profile;
69 GstVaapiPicture *current_picture;
70 GstJpegFrameHdr frame_hdr;
71 GstJpegHuffmanTables huf_tables;
72 GstJpegQuantTables quant_tables;
77 guint profile_changed : 1;
81 * GstVaapiDecoderJpeg:
83 * A decoder based on Jpeg.
85 struct _GstVaapiDecoderJpeg {
87 GstVaapiDecoder parent_instance;
88 GstVaapiDecoderJpegPrivate priv;
92 * GstVaapiDecoderJpegClass:
94 * A decoder class based on Jpeg.
96 struct _GstVaapiDecoderJpegClass {
98 GstVaapiDecoderClass parent_class;
102 unit_set_marker_code(GstVaapiDecoderUnit *unit, GstJpegMarkerCode marker)
104 unit->parsed_info = GSIZE_TO_POINTER(marker);
107 static inline GstJpegMarkerCode
108 unit_get_marker_code(GstVaapiDecoderUnit *unit)
110 return GPOINTER_TO_SIZE(unit->parsed_info);
114 gst_vaapi_decoder_jpeg_close(GstVaapiDecoderJpeg *decoder)
116 GstVaapiDecoderJpegPrivate * const priv = &decoder->priv;
118 gst_vaapi_picture_replace(&priv->current_picture, NULL);
121 priv->profile = GST_VAAPI_PROFILE_JPEG_BASELINE;
124 priv->is_opened = FALSE;
125 priv->profile_changed = TRUE;
129 gst_vaapi_decoder_jpeg_open(GstVaapiDecoderJpeg *decoder)
131 GstVaapiDecoderJpegPrivate * const priv = &decoder->priv;
133 gst_vaapi_decoder_jpeg_close(decoder);
135 priv->parser_state = 0;
136 priv->decoder_state = 0;
141 gst_vaapi_decoder_jpeg_destroy(GstVaapiDecoder *base_decoder)
143 GstVaapiDecoderJpeg * const decoder =
144 GST_VAAPI_DECODER_JPEG_CAST(base_decoder);
146 gst_vaapi_decoder_jpeg_close(decoder);
150 gst_vaapi_decoder_jpeg_create(GstVaapiDecoder *base_decoder)
152 GstVaapiDecoderJpeg * const decoder =
153 GST_VAAPI_DECODER_JPEG_CAST(base_decoder);
154 GstVaapiDecoderJpegPrivate * const priv = &decoder->priv;
156 priv->profile = GST_VAAPI_PROFILE_JPEG_BASELINE;
157 priv->profile_changed = TRUE;
161 static GstVaapiDecoderStatus
162 ensure_context(GstVaapiDecoderJpeg *decoder)
164 GstVaapiDecoderJpegPrivate * const priv = &decoder->priv;
165 GstVaapiProfile profiles[2];
166 GstVaapiEntrypoint entrypoint = GST_VAAPI_ENTRYPOINT_VLD;
167 guint i, n_profiles = 0;
168 gboolean reset_context = FALSE;
170 if (priv->profile_changed) {
171 GST_DEBUG("profile changed");
172 priv->profile_changed = FALSE;
173 reset_context = TRUE;
175 profiles[n_profiles++] = priv->profile;
176 //if (priv->profile == GST_VAAPI_PROFILE_JPEG_EXTENDED)
177 // profiles[n_profiles++] = GST_VAAPI_PROFILE_JPEG_BASELINE;
179 for (i = 0; i < n_profiles; i++) {
180 if (gst_vaapi_display_has_decoder(GST_VAAPI_DECODER_DISPLAY(decoder),
181 profiles[i], entrypoint))
185 return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_PROFILE;
186 priv->profile = profiles[i];
190 GstVaapiContextInfo info;
192 info.profile = priv->profile;
193 info.entrypoint = entrypoint;
194 info.chroma_type = GST_VAAPI_CHROMA_TYPE_YUV420;
195 info.width = priv->width;
196 info.height = priv->height;
198 reset_context = gst_vaapi_decoder_ensure_context(
199 GST_VAAPI_DECODER(decoder),
203 return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
205 return GST_VAAPI_DECODER_STATUS_SUCCESS;
208 static inline gboolean
209 is_valid_state(guint state, guint ref_state)
211 return (state & ref_state) == ref_state;
214 #define VALID_STATE(TYPE, STATE) \
215 is_valid_state(priv->G_PASTE(TYPE,_state), \
216 G_PASTE(GST_JPEG_VIDEO_STATE_,STATE))
218 static GstVaapiDecoderStatus
219 decode_current_picture(GstVaapiDecoderJpeg *decoder)
221 GstVaapiDecoderJpegPrivate * const priv = &decoder->priv;
222 GstVaapiPicture * const picture = priv->current_picture;
224 if (!VALID_STATE(decoder, VALID_PICTURE))
226 priv->decoder_state = 0;
229 return GST_VAAPI_DECODER_STATUS_SUCCESS;
231 if (!gst_vaapi_picture_decode(picture))
233 if (!gst_vaapi_picture_output(picture))
235 gst_vaapi_picture_replace(&priv->current_picture, NULL);
236 return GST_VAAPI_DECODER_STATUS_SUCCESS;
239 gst_vaapi_picture_replace(&priv->current_picture, NULL);
240 return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
243 priv->decoder_state = 0;
244 return GST_VAAPI_DECODER_STATUS_DROP_FRAME;
249 GstVaapiDecoderJpeg *decoder,
250 GstVaapiPicture *picture,
251 GstJpegFrameHdr *frame_hdr
254 VAPictureParameterBufferJPEGBaseline * const pic_param = picture->param;
257 memset(pic_param, 0, sizeof(VAPictureParameterBufferJPEGBaseline));
258 pic_param->picture_width = frame_hdr->width;
259 pic_param->picture_height = frame_hdr->height;
261 pic_param->num_components = frame_hdr->num_components;
262 if (frame_hdr->num_components > 4)
264 for (i = 0; i < pic_param->num_components; i++) {
265 pic_param->components[i].component_id =
266 frame_hdr->components[i].identifier;
267 pic_param->components[i].h_sampling_factor =
268 frame_hdr->components[i].horizontal_factor;
269 pic_param->components[i].v_sampling_factor =
270 frame_hdr->components[i].vertical_factor;
271 pic_param->components[i].quantiser_table_selector =
272 frame_hdr->components[i].quant_table_selector;
277 static GstVaapiDecoderStatus
278 fill_quantization_table(GstVaapiDecoderJpeg *decoder, GstVaapiPicture *picture)
280 GstVaapiDecoderJpegPrivate * const priv = &decoder->priv;
281 VAIQMatrixBufferJPEGBaseline *iq_matrix;
282 guint i, j, num_tables;
284 if (!VALID_STATE(decoder, GOT_IQ_TABLE))
285 gst_jpeg_get_default_quantization_tables(&priv->quant_tables);
287 picture->iq_matrix = GST_VAAPI_IQ_MATRIX_NEW(JPEGBaseline, decoder);
288 if (!picture->iq_matrix) {
289 GST_ERROR("failed to allocate quantiser table");
290 return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
292 iq_matrix = picture->iq_matrix->param;
294 num_tables = MIN(G_N_ELEMENTS(iq_matrix->quantiser_table),
295 GST_JPEG_MAX_QUANT_ELEMENTS);
297 for (i = 0; i < num_tables; i++) {
298 GstJpegQuantTable * const quant_table =
299 &priv->quant_tables.quant_tables[i];
301 iq_matrix->load_quantiser_table[i] = quant_table->valid;
302 if (!iq_matrix->load_quantiser_table[i])
305 if (quant_table->quant_precision != 0) {
306 // Only Baseline profile is supported, thus 8-bit Qk values
307 GST_ERROR("unsupported quantization table element precision");
308 return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_CHROMA_FORMAT;
311 for (j = 0; j < GST_JPEG_MAX_QUANT_ELEMENTS; j++)
312 iq_matrix->quantiser_table[i][j] = quant_table->quant_table[j];
313 iq_matrix->load_quantiser_table[i] = 1;
314 quant_table->valid = FALSE;
316 return GST_VAAPI_DECODER_STATUS_SUCCESS;
320 huffman_tables_updated(const GstJpegHuffmanTables *huf_tables)
324 for (i = 0; i < G_N_ELEMENTS(huf_tables->dc_tables); i++)
325 if (huf_tables->dc_tables[i].valid)
327 for (i = 0; i < G_N_ELEMENTS(huf_tables->ac_tables); i++)
328 if (huf_tables->ac_tables[i].valid)
334 huffman_tables_reset(GstJpegHuffmanTables *huf_tables)
338 for (i = 0; i < G_N_ELEMENTS(huf_tables->dc_tables); i++)
339 huf_tables->dc_tables[i].valid = FALSE;
340 for (i = 0; i < G_N_ELEMENTS(huf_tables->ac_tables); i++)
341 huf_tables->ac_tables[i].valid = FALSE;
345 fill_huffman_table(GstVaapiHuffmanTable *huf_table,
346 const GstJpegHuffmanTables *huf_tables)
348 VAHuffmanTableBufferJPEGBaseline * const huffman_table = huf_table->param;
351 num_tables = MIN(G_N_ELEMENTS(huffman_table->huffman_table),
352 GST_JPEG_MAX_SCAN_COMPONENTS);
354 for (i = 0; i < num_tables; i++) {
355 huffman_table->load_huffman_table[i] =
356 huf_tables->dc_tables[i].valid && huf_tables->ac_tables[i].valid;
357 if (!huffman_table->load_huffman_table[i])
360 memcpy(huffman_table->huffman_table[i].num_dc_codes,
361 huf_tables->dc_tables[i].huf_bits,
362 sizeof(huffman_table->huffman_table[i].num_dc_codes));
363 memcpy(huffman_table->huffman_table[i].dc_values,
364 huf_tables->dc_tables[i].huf_values,
365 sizeof(huffman_table->huffman_table[i].dc_values));
366 memcpy(huffman_table->huffman_table[i].num_ac_codes,
367 huf_tables->ac_tables[i].huf_bits,
368 sizeof(huffman_table->huffman_table[i].num_ac_codes));
369 memcpy(huffman_table->huffman_table[i].ac_values,
370 huf_tables->ac_tables[i].huf_values,
371 sizeof(huffman_table->huffman_table[i].ac_values));
372 memset(huffman_table->huffman_table[i].pad,
374 sizeof(huffman_table->huffman_table[i].pad));
379 get_max_sampling_factors(const GstJpegFrameHdr *frame_hdr,
380 guint *h_max_ptr, guint *v_max_ptr)
382 guint h_max = frame_hdr->components[0].horizontal_factor;
383 guint v_max = frame_hdr->components[0].vertical_factor;
386 for (i = 1; i < frame_hdr->num_components; i++) {
387 const GstJpegFrameComponent * const fcp = &frame_hdr->components[i];
388 if (h_max < fcp->horizontal_factor)
389 h_max = fcp->horizontal_factor;
390 if (v_max < fcp->vertical_factor)
391 v_max = fcp->vertical_factor;
400 static const GstJpegFrameComponent *
401 get_component(const GstJpegFrameHdr *frame_hdr, guint selector)
405 for (i = 0; i < frame_hdr->num_components; i++) {
406 const GstJpegFrameComponent * const fcp = &frame_hdr->components[i];
407 if (fcp->identifier == selector)
413 static GstVaapiDecoderStatus
414 decode_picture(GstVaapiDecoderJpeg *decoder, GstJpegMarkerSegment *seg,
417 GstVaapiDecoderJpegPrivate * const priv = &decoder->priv;
418 GstJpegFrameHdr * const frame_hdr = &priv->frame_hdr;
420 if (!VALID_STATE(decoder, GOT_SOI))
421 return GST_VAAPI_DECODER_STATUS_SUCCESS;
423 switch (seg->marker) {
424 case GST_JPEG_MARKER_SOF_MIN:
425 priv->profile = GST_VAAPI_PROFILE_JPEG_BASELINE;
428 GST_ERROR("unsupported profile %d", seg->marker);
429 return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_PROFILE;
432 memset(frame_hdr, 0, sizeof(*frame_hdr));
433 if (!gst_jpeg_parse_frame_hdr(frame_hdr, buf + seg->offset, seg->size, 0)) {
434 GST_ERROR("failed to parse image");
435 return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
437 priv->height = frame_hdr->height;
438 priv->width = frame_hdr->width;
440 priv->decoder_state |= GST_JPEG_VIDEO_STATE_GOT_SOF;
441 return GST_VAAPI_DECODER_STATUS_SUCCESS;
444 static GstVaapiDecoderStatus
445 decode_huffman_table(
446 GstVaapiDecoderJpeg *decoder,
451 GstVaapiDecoderJpegPrivate * const priv = &decoder->priv;
453 if (!VALID_STATE(decoder, GOT_SOI))
454 return GST_VAAPI_DECODER_STATUS_SUCCESS;
456 if (!gst_jpeg_parse_huffman_table(&priv->huf_tables, buf, buf_size, 0)) {
457 GST_ERROR("failed to parse Huffman table");
458 return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
461 priv->decoder_state |= GST_JPEG_VIDEO_STATE_GOT_HUF_TABLE;
462 return GST_VAAPI_DECODER_STATUS_SUCCESS;
465 static GstVaapiDecoderStatus
467 GstVaapiDecoderJpeg *decoder,
472 GstVaapiDecoderJpegPrivate * const priv = &decoder->priv;
474 if (!VALID_STATE(decoder, GOT_SOI))
475 return GST_VAAPI_DECODER_STATUS_SUCCESS;
477 if (!gst_jpeg_parse_quant_table(&priv->quant_tables, buf, buf_size, 0)) {
478 GST_ERROR("failed to parse quantization table");
479 return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
482 priv->decoder_state |= GST_JPEG_VIDEO_STATE_GOT_IQ_TABLE;
483 return GST_VAAPI_DECODER_STATUS_SUCCESS;
486 static GstVaapiDecoderStatus
487 decode_restart_interval(
488 GstVaapiDecoderJpeg *decoder,
493 GstVaapiDecoderJpegPrivate * const priv = &decoder->priv;
495 if (!VALID_STATE(decoder, GOT_SOI))
496 return GST_VAAPI_DECODER_STATUS_SUCCESS;
498 if (!gst_jpeg_parse_restart_interval(&priv->mcu_restart, buf, buf_size, 0)) {
499 GST_ERROR("failed to parse restart interval");
500 return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
502 return GST_VAAPI_DECODER_STATUS_SUCCESS;
505 static GstVaapiDecoderStatus
506 decode_scan(GstVaapiDecoderJpeg *decoder, GstJpegMarkerSegment *seg,
509 GstVaapiDecoderJpegPrivate * const priv = &decoder->priv;
510 GstVaapiPicture * const picture = priv->current_picture;
511 GstVaapiSlice *slice;
512 VASliceParameterBufferJPEGBaseline *slice_param;
513 GstJpegScanHdr scan_hdr;
514 guint scan_hdr_size, scan_data_size;
515 guint i, h_max, v_max, mcu_width, mcu_height;
517 if (!VALID_STATE(decoder, GOT_SOF))
518 return GST_VAAPI_DECODER_STATUS_SUCCESS;
520 scan_hdr_size = (buf[seg->offset] << 8) | buf[seg->offset + 1];
521 scan_data_size = seg->size - scan_hdr_size;
523 memset(&scan_hdr, 0, sizeof(scan_hdr));
524 if (!gst_jpeg_parse_scan_hdr(&scan_hdr, buf + seg->offset, seg->size, 0)) {
525 GST_ERROR("failed to parse scan header");
526 return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
529 slice = GST_VAAPI_SLICE_NEW(JPEGBaseline, decoder,
530 buf + seg->offset + scan_hdr_size, scan_data_size);
532 GST_ERROR("failed to allocate slice");
533 return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
535 gst_vaapi_picture_add_slice(picture, slice);
537 if (!VALID_STATE(decoder, GOT_HUF_TABLE))
538 gst_jpeg_get_default_huffman_tables(&priv->huf_tables);
540 // Update VA Huffman table if it changed for this scan
541 if (huffman_tables_updated(&priv->huf_tables)) {
542 slice->huf_table = GST_VAAPI_HUFFMAN_TABLE_NEW(JPEGBaseline, decoder);
543 if (!slice->huf_table) {
544 GST_ERROR("failed to allocate Huffman tables");
545 huffman_tables_reset(&priv->huf_tables);
546 return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
548 fill_huffman_table(slice->huf_table, &priv->huf_tables);
549 huffman_tables_reset(&priv->huf_tables);
552 slice_param = slice->param;
553 slice_param->num_components = scan_hdr.num_components;
554 for (i = 0; i < scan_hdr.num_components; i++) {
555 slice_param->components[i].component_selector =
556 scan_hdr.components[i].component_selector;
557 slice_param->components[i].dc_table_selector =
558 scan_hdr.components[i].dc_selector;
559 slice_param->components[i].ac_table_selector =
560 scan_hdr.components[i].ac_selector;
562 slice_param->restart_interval = priv->mcu_restart;
563 slice_param->slice_horizontal_position = 0;
564 slice_param->slice_vertical_position = 0;
566 get_max_sampling_factors(&priv->frame_hdr, &h_max, &v_max);
567 mcu_width = 8 * h_max;
568 mcu_height = 8 * v_max;
570 if (scan_hdr.num_components == 1) { // Non-interleaved
571 const guint Csj = slice_param->components[0].component_selector;
572 const GstJpegFrameComponent * const fcp =
573 get_component(&priv->frame_hdr, Csj);
575 if (!fcp || fcp->horizontal_factor == 0 || fcp->vertical_factor == 0) {
576 GST_ERROR("failed to validate image component %u", Csj);
577 return GST_VAAPI_DECODER_STATUS_ERROR_INVALID_PARAMETER;
579 mcu_width /= fcp->horizontal_factor;
580 mcu_height /= fcp->vertical_factor;
582 slice_param->num_mcus =
583 ((priv->frame_hdr.width + mcu_width - 1) / mcu_width) *
584 ((priv->frame_hdr.height + mcu_height - 1) / mcu_height);
586 priv->decoder_state |= GST_JPEG_VIDEO_STATE_GOT_SOS;
587 return GST_VAAPI_DECODER_STATUS_SUCCESS;
590 static GstVaapiDecoderStatus
591 decode_segment(GstVaapiDecoderJpeg *decoder, GstJpegMarkerSegment *seg,
594 GstVaapiDecoderJpegPrivate * const priv = &decoder->priv;
595 GstVaapiDecoderStatus status;
598 status = GST_VAAPI_DECODER_STATUS_SUCCESS;
599 switch (seg->marker) {
600 case GST_JPEG_MARKER_SOI:
601 priv->mcu_restart = 0;
602 priv->decoder_state |= GST_JPEG_VIDEO_STATE_GOT_SOI;
604 case GST_JPEG_MARKER_EOI:
605 priv->decoder_state = 0;
607 case GST_JPEG_MARKER_DAC:
608 GST_ERROR("unsupported arithmetic coding mode");
609 status = GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_PROFILE;
611 case GST_JPEG_MARKER_DHT:
612 status = decode_huffman_table(decoder, buf + seg->offset, seg->size);
614 case GST_JPEG_MARKER_DQT:
615 status = decode_quant_table(decoder, buf + seg->offset, seg->size);
617 case GST_JPEG_MARKER_DRI:
618 status = decode_restart_interval(decoder, buf + seg->offset, seg->size);
620 case GST_JPEG_MARKER_SOS:
621 status = decode_scan(decoder, seg, buf);
625 if (seg->marker >= GST_JPEG_MARKER_SOF_MIN &&
626 seg->marker <= GST_JPEG_MARKER_SOF_MAX)
627 status = decode_picture(decoder, seg, buf);
633 static GstVaapiDecoderStatus
634 ensure_decoder(GstVaapiDecoderJpeg *decoder)
636 GstVaapiDecoderJpegPrivate * const priv = &decoder->priv;
638 if (!priv->is_opened) {
639 priv->is_opened = gst_vaapi_decoder_jpeg_open(decoder);
640 if (!priv->is_opened)
641 return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_CODEC;
643 return GST_VAAPI_DECODER_STATUS_SUCCESS;
647 is_scan_complete(GstJpegMarkerCode marker)
649 // Scan is assumed to be complete when the new segment is not RSTi
650 return marker < GST_JPEG_MARKER_RST_MIN || marker > GST_JPEG_MARKER_RST_MAX;
653 static GstVaapiDecoderStatus
654 gst_vaapi_decoder_jpeg_parse(GstVaapiDecoder *base_decoder,
655 GstAdapter *adapter, gboolean at_eos, GstVaapiDecoderUnit *unit)
657 GstVaapiDecoderJpeg * const decoder =
658 GST_VAAPI_DECODER_JPEG_CAST(base_decoder);
659 GstVaapiDecoderJpegPrivate * const priv = &decoder->priv;
660 GstVaapiParserState * const ps = GST_VAAPI_PARSER_STATE(base_decoder);
661 GstVaapiDecoderStatus status;
662 GstJpegMarkerCode marker;
663 GstJpegMarkerSegment seg;
665 guint buf_size, flags;
668 status = ensure_decoder(decoder);
669 if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
672 /* Expect at least 2 bytes for the marker */
673 buf_size = gst_adapter_available(adapter);
675 return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
677 buf = gst_adapter_map(adapter, buf_size);
679 return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
681 ofs1 = ps->input_offset1 - 2;
686 // Skip any garbage until we reach SOI, if needed
687 if (!gst_jpeg_parse(&seg, buf, buf_size, ofs1)) {
688 gst_adapter_unmap(adapter);
689 ps->input_offset1 = buf_size;
690 return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
695 if (!VALID_STATE(parser, GOT_SOI) && marker != GST_JPEG_MARKER_SOI)
697 if (marker == GST_JPEG_MARKER_SOS) {
698 ofs2 = ps->input_offset2 - 2;
699 if (ofs2 < ofs1 + seg.size)
700 ofs2 = ofs1 + seg.size;
702 // Parse the whole scan + ECSs, including RSTi
704 if (!gst_jpeg_parse(&seg, buf, buf_size, ofs2)) {
705 gst_adapter_unmap(adapter);
706 ps->input_offset1 = ofs1;
707 ps->input_offset2 = buf_size;
708 return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
711 if (is_scan_complete(seg.marker))
713 ofs2 = seg.offset + seg.size;
715 ofs2 = seg.offset - 2;
718 // Check that the whole segment is actually available (in buffer)
719 ofs2 = ofs1 + seg.size;
720 if (ofs2 > buf_size) {
721 gst_adapter_unmap(adapter);
722 ps->input_offset1 = ofs1;
723 return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
728 gst_adapter_unmap(adapter);
730 unit->size = ofs2 - ofs1;
731 unit_set_marker_code(unit, marker);
732 gst_adapter_flush(adapter, ofs1);
733 ps->input_offset1 = 2;
734 ps->input_offset2 = 2;
738 case GST_JPEG_MARKER_SOI:
739 flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
740 priv->parser_state |= GST_JPEG_VIDEO_STATE_GOT_SOI;
742 case GST_JPEG_MARKER_EOI:
743 flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_END;
744 priv->parser_state = 0;
746 case GST_JPEG_MARKER_SOS:
747 flags |= GST_VAAPI_DECODER_UNIT_FLAG_SLICE;
748 priv->parser_state |= GST_JPEG_VIDEO_STATE_GOT_SOS;
750 case GST_JPEG_MARKER_DAC:
751 case GST_JPEG_MARKER_DHT:
752 case GST_JPEG_MARKER_DQT:
753 if (priv->parser_state & GST_JPEG_VIDEO_STATE_GOT_SOF)
754 flags |= GST_VAAPI_DECODER_UNIT_FLAG_SLICE;
756 case GST_JPEG_MARKER_DRI:
757 if (priv->parser_state & GST_JPEG_VIDEO_STATE_GOT_SOS)
758 flags |= GST_VAAPI_DECODER_UNIT_FLAG_SLICE;
760 case GST_JPEG_MARKER_DNL:
761 flags |= GST_VAAPI_DECODER_UNIT_FLAG_SLICE;
763 case GST_JPEG_MARKER_COM:
764 flags |= GST_VAAPI_DECODER_UNIT_FLAG_SKIP;
768 if (marker >= GST_JPEG_MARKER_SOF_MIN &&
769 marker <= GST_JPEG_MARKER_SOF_MAX)
770 priv->parser_state |= GST_JPEG_VIDEO_STATE_GOT_SOF;
772 /* Application segments */
773 else if (marker >= GST_JPEG_MARKER_APP_MIN &&
774 marker <= GST_JPEG_MARKER_APP_MAX)
775 flags |= GST_VAAPI_DECODER_UNIT_FLAG_SKIP;
778 else if (marker >= 0x02 && marker <= 0xbf)
779 flags |= GST_VAAPI_DECODER_UNIT_FLAG_SKIP;
782 GST_VAAPI_DECODER_UNIT_FLAG_SET(unit, flags);
783 return GST_VAAPI_DECODER_STATUS_SUCCESS;
786 static GstVaapiDecoderStatus
787 gst_vaapi_decoder_jpeg_decode(GstVaapiDecoder *base_decoder,
788 GstVaapiDecoderUnit *unit)
790 GstVaapiDecoderJpeg * const decoder =
791 GST_VAAPI_DECODER_JPEG_CAST(base_decoder);
792 GstVaapiDecoderStatus status;
793 GstJpegMarkerSegment seg;
794 GstBuffer * const buffer =
795 GST_VAAPI_DECODER_CODEC_FRAME(decoder)->input_buffer;
798 status = ensure_decoder(decoder);
799 if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
802 if (!gst_buffer_map(buffer, &map_info, GST_MAP_READ)) {
803 GST_ERROR("failed to map buffer");
804 return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
807 seg.marker = unit_get_marker_code(unit);
808 seg.offset = unit->offset;
809 seg.size = unit->size;
811 status = decode_segment(decoder, &seg, map_info.data);
812 gst_buffer_unmap(buffer, &map_info);
813 if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
815 return GST_VAAPI_DECODER_STATUS_SUCCESS;
818 static GstVaapiDecoderStatus
819 gst_vaapi_decoder_jpeg_start_frame(GstVaapiDecoder *base_decoder,
820 GstVaapiDecoderUnit *base_unit)
822 GstVaapiDecoderJpeg * const decoder =
823 GST_VAAPI_DECODER_JPEG_CAST(base_decoder);
824 GstVaapiDecoderJpegPrivate * const priv = &decoder->priv;
825 GstVaapiPicture *picture;
826 GstVaapiDecoderStatus status;
828 if (!VALID_STATE(decoder, GOT_SOF))
829 return GST_VAAPI_DECODER_STATUS_SUCCESS;
831 status = ensure_context(decoder);
832 if (status != GST_VAAPI_DECODER_STATUS_SUCCESS) {
833 GST_ERROR("failed to reset context");
837 picture = GST_VAAPI_PICTURE_NEW(JPEGBaseline, decoder);
839 GST_ERROR("failed to allocate picture");
840 return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
842 gst_vaapi_picture_replace(&priv->current_picture, picture);
843 gst_vaapi_picture_unref(picture);
845 if (!fill_picture(decoder, picture, &priv->frame_hdr))
846 return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
848 status = fill_quantization_table(decoder, picture);
849 if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
852 /* Update presentation time */
853 picture->pts = GST_VAAPI_DECODER_CODEC_FRAME(decoder)->pts;
854 return GST_VAAPI_DECODER_STATUS_SUCCESS;
857 static GstVaapiDecoderStatus
858 gst_vaapi_decoder_jpeg_end_frame(GstVaapiDecoder *base_decoder)
860 GstVaapiDecoderJpeg * const decoder =
861 GST_VAAPI_DECODER_JPEG_CAST(base_decoder);
863 return decode_current_picture(decoder);
867 gst_vaapi_decoder_jpeg_class_init(GstVaapiDecoderJpegClass *klass)
869 GstVaapiMiniObjectClass * const object_class =
870 GST_VAAPI_MINI_OBJECT_CLASS(klass);
871 GstVaapiDecoderClass * const decoder_class = GST_VAAPI_DECODER_CLASS(klass);
873 object_class->size = sizeof(GstVaapiDecoderJpeg);
874 object_class->finalize = (GDestroyNotify)gst_vaapi_decoder_finalize;
876 decoder_class->create = gst_vaapi_decoder_jpeg_create;
877 decoder_class->destroy = gst_vaapi_decoder_jpeg_destroy;
878 decoder_class->parse = gst_vaapi_decoder_jpeg_parse;
879 decoder_class->decode = gst_vaapi_decoder_jpeg_decode;
880 decoder_class->start_frame = gst_vaapi_decoder_jpeg_start_frame;
881 decoder_class->end_frame = gst_vaapi_decoder_jpeg_end_frame;
884 static inline const GstVaapiDecoderClass *
885 gst_vaapi_decoder_jpeg_class(void)
887 static GstVaapiDecoderJpegClass g_class;
888 static gsize g_class_init = FALSE;
890 if (g_once_init_enter(&g_class_init)) {
891 gst_vaapi_decoder_jpeg_class_init(&g_class);
892 g_once_init_leave(&g_class_init, TRUE);
894 return GST_VAAPI_DECODER_CLASS(&g_class);
898 * gst_vaapi_decoder_jpeg_new:
899 * @display: a #GstVaapiDisplay
900 * @caps: a #GstCaps holding codec information
902 * Creates a new #GstVaapiDecoder for JPEG decoding. The @caps can
903 * hold extra information like codec-data and pictured coded size.
905 * Return value: the newly allocated #GstVaapiDecoder object
908 gst_vaapi_decoder_jpeg_new(GstVaapiDisplay *display, GstCaps *caps)
910 return gst_vaapi_decoder_new(gst_vaapi_decoder_jpeg_class(), display, caps);