2 * gstvaapidecoder_jpeg.c - JPEG decoder
4 * Copyright (C) 2011-2012 Intel Corporation
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public License
8 * as published by the Free Software Foundation; either version 2.1
9 * of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free
18 * Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
19 * Boston, MA 02110-1301 USA
23 * SECTION:gstvaapidecoder_jpeg
24 * @short_description: JPEG decoder
29 #include <gst/codecparsers/gstjpegparser.h>
30 #include "gstvaapicompat.h"
31 #include "gstvaapidecoder_jpeg.h"
32 #include "gstvaapidecoder_objects.h"
33 #include "gstvaapidecoder_priv.h"
34 #include "gstvaapidisplay_priv.h"
35 #include "gstvaapiobject_priv.h"
38 #include "gstvaapidebug.h"
40 G_DEFINE_TYPE(GstVaapiDecoderJpeg,
41 gst_vaapi_decoder_jpeg,
42 GST_VAAPI_TYPE_DECODER);
44 #define GST_VAAPI_DECODER_JPEG_GET_PRIVATE(obj) \
45 (G_TYPE_INSTANCE_GET_PRIVATE((obj), \
46 GST_VAAPI_TYPE_DECODER_JPEG, \
47 GstVaapiDecoderJpegPrivate))
49 struct _GstVaapiDecoderJpegPrivate {
50 GstVaapiProfile profile;
53 GstVaapiPicture *current_picture;
54 GstJpegFrameHdr frame_hdr;
55 GstJpegHuffmanTables huf_tables;
56 GstJpegQuantTables quant_tables;
57 gboolean has_huf_table;
58 gboolean has_quant_table;
61 guint profile_changed : 1;
62 guint is_constructed : 1;
65 typedef struct _GstJpegScanSegment GstJpegScanSegment;
66 struct _GstJpegScanSegment {
75 gst_vaapi_decoder_jpeg_close(GstVaapiDecoderJpeg *decoder)
77 GstVaapiDecoderJpegPrivate * const priv = decoder->priv;
79 gst_vaapi_picture_replace(&priv->current_picture, NULL);
82 priv->profile = GST_VAAPI_PROFILE_JPEG_BASELINE;
85 priv->is_opened = FALSE;
86 priv->profile_changed = TRUE;
90 gst_vaapi_decoder_jpeg_open(GstVaapiDecoderJpeg *decoder, GstBuffer *buffer)
92 gst_vaapi_decoder_jpeg_close(decoder);
98 gst_vaapi_decoder_jpeg_destroy(GstVaapiDecoderJpeg *decoder)
100 gst_vaapi_decoder_jpeg_close(decoder);
104 gst_vaapi_decoder_jpeg_create(GstVaapiDecoderJpeg *decoder)
106 if (!GST_VAAPI_DECODER_CODEC(decoder))
111 static GstVaapiDecoderStatus
112 ensure_context(GstVaapiDecoderJpeg *decoder)
114 GstVaapiDecoderJpegPrivate * const priv = decoder->priv;
115 GstVaapiProfile profiles[2];
116 GstVaapiEntrypoint entrypoint = GST_VAAPI_ENTRYPOINT_VLD;
117 guint i, n_profiles = 0;
118 gboolean reset_context = FALSE;
120 if (priv->profile_changed) {
121 GST_DEBUG("profile changed");
122 priv->profile_changed = FALSE;
123 reset_context = TRUE;
125 profiles[n_profiles++] = priv->profile;
126 //if (priv->profile == GST_VAAPI_PROFILE_JPEG_EXTENDED)
127 // profiles[n_profiles++] = GST_VAAPI_PROFILE_JPEG_BASELINE;
129 for (i = 0; i < n_profiles; i++) {
130 if (gst_vaapi_display_has_decoder(GST_VAAPI_DECODER_DISPLAY(decoder),
131 profiles[i], entrypoint))
135 return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_PROFILE;
136 priv->profile = profiles[i];
140 reset_context = gst_vaapi_decoder_ensure_context(
141 GST_VAAPI_DECODER(decoder),
146 GST_DECODER_DEFAULT_SURFACES_COUNT
149 return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
151 return GST_VAAPI_DECODER_STATUS_SUCCESS;
155 decode_current_picture(GstVaapiDecoderJpeg *decoder)
157 GstVaapiDecoderJpegPrivate * const priv = decoder->priv;
158 GstVaapiPicture * const picture = priv->current_picture;
159 gboolean success = TRUE;
162 if (!gst_vaapi_picture_decode(picture))
164 else if (!gst_vaapi_picture_output(picture))
166 gst_vaapi_picture_replace(&priv->current_picture, NULL);
173 GstVaapiDecoderJpeg *decoder,
174 GstVaapiPicture *picture,
175 GstJpegFrameHdr *jpeg_frame_hdr
178 VAPictureParameterBufferJPEGBaseline *pic_param = picture->param;
183 memset(pic_param, 0, sizeof(VAPictureParameterBufferJPEGBaseline));
184 pic_param->picture_width = jpeg_frame_hdr->width;
185 pic_param->picture_height = jpeg_frame_hdr->height;
187 pic_param->num_components = jpeg_frame_hdr->num_components;
188 if (jpeg_frame_hdr->num_components > 4)
190 for (i = 0; i < pic_param->num_components; i++) {
191 pic_param->components[i].component_id =
192 jpeg_frame_hdr->components[i].identifier;
193 pic_param->components[i].h_sampling_factor =
194 jpeg_frame_hdr->components[i].horizontal_factor;
195 pic_param->components[i].v_sampling_factor =
196 jpeg_frame_hdr->components[i].vertical_factor;
197 pic_param->components[i].quantiser_table_selector =
198 jpeg_frame_hdr->components[i].quant_table_selector;
204 fill_quantization_table(
205 GstVaapiDecoderJpeg *decoder,
206 GstVaapiPicture *picture
209 GstVaapiDecoderJpegPrivate * const priv = decoder->priv;
210 VAIQMatrixBufferJPEGBaseline *iq_matrix;
211 guint i, j, num_tables;
213 if (!priv->has_quant_table)
214 gst_jpeg_get_default_quantization_tables(&priv->quant_tables);
216 picture->iq_matrix = GST_VAAPI_IQ_MATRIX_NEW(JPEGBaseline, decoder);
217 g_assert(picture->iq_matrix);
218 iq_matrix = picture->iq_matrix->param;
220 num_tables = MIN(G_N_ELEMENTS(iq_matrix->quantiser_table),
221 GST_JPEG_MAX_QUANT_ELEMENTS);
223 for (i = 0; i < num_tables; i++) {
224 GstJpegQuantTable * const quant_table =
225 &priv->quant_tables.quant_tables[i];
227 iq_matrix->load_quantiser_table[i] = quant_table->valid;
228 if (!iq_matrix->load_quantiser_table[i])
231 g_assert(quant_table->quant_precision == 0);
232 for (j = 0; j < GST_JPEG_MAX_QUANT_ELEMENTS; j++)
233 iq_matrix->quantiser_table[i][j] = quant_table->quant_table[j];
234 iq_matrix->load_quantiser_table[i] = 1;
235 quant_table->valid = FALSE;
242 GstVaapiDecoderJpeg *decoder,
243 GstVaapiPicture *picture
246 GstVaapiDecoderJpegPrivate * const priv = decoder->priv;
247 GstJpegHuffmanTables * const huf_tables = &priv->huf_tables;
248 VAHuffmanTableBufferJPEGBaseline *huffman_table;
251 if (!priv->has_huf_table)
252 gst_jpeg_get_default_huffman_tables(&priv->huf_tables);
254 picture->huf_table = GST_VAAPI_HUFFMAN_TABLE_NEW(JPEGBaseline, decoder);
255 g_assert(picture->huf_table);
256 huffman_table = picture->huf_table->param;
258 num_tables = MIN(G_N_ELEMENTS(huffman_table->huffman_table),
259 GST_JPEG_MAX_SCAN_COMPONENTS);
261 for (i = 0; i < num_tables; i++) {
262 huffman_table->load_huffman_table[i] =
263 huf_tables->dc_tables[i].valid && huf_tables->ac_tables[i].valid;
264 if (!huffman_table->load_huffman_table[i])
267 memcpy(huffman_table->huffman_table[i].num_dc_codes,
268 huf_tables->dc_tables[i].huf_bits,
269 sizeof(huffman_table->huffman_table[i].num_dc_codes));
270 memcpy(huffman_table->huffman_table[i].dc_values,
271 huf_tables->dc_tables[i].huf_values,
272 sizeof(huffman_table->huffman_table[i].dc_values));
273 memcpy(huffman_table->huffman_table[i].num_ac_codes,
274 huf_tables->ac_tables[i].huf_bits,
275 sizeof(huffman_table->huffman_table[i].num_ac_codes));
276 memcpy(huffman_table->huffman_table[i].ac_values,
277 huf_tables->ac_tables[i].huf_values,
278 sizeof(huffman_table->huffman_table[i].ac_values));
279 memset(huffman_table->huffman_table[i].pad,
281 sizeof(huffman_table->huffman_table[i].pad));
287 get_max_horizontal_samples(GstJpegFrameHdr *frame_hdr)
289 guint i, max_factor = 0;
291 for (i = 0; i < frame_hdr->num_components; i++) {
292 if (frame_hdr->components[i].horizontal_factor > max_factor)
293 max_factor = frame_hdr->components[i].horizontal_factor;
299 get_max_vertical_samples(GstJpegFrameHdr *frame_hdr)
301 guint i, max_factor = 0;
303 for (i = 0; i < frame_hdr->num_components; i++) {
304 if (frame_hdr->components[i].vertical_factor > max_factor)
305 max_factor = frame_hdr->components[i].vertical_factor;
310 static GstVaapiDecoderStatus
312 GstVaapiDecoderJpeg *decoder,
319 GstVaapiDecoderJpegPrivate * const priv = decoder->priv;
320 GstJpegFrameHdr * const frame_hdr = &priv->frame_hdr;
321 GstVaapiPicture *picture;
322 GstVaapiDecoderStatus status;
325 case GST_JPEG_MARKER_SOF_MIN:
326 priv->profile = GST_VAAPI_PROFILE_JPEG_BASELINE;
329 GST_ERROR("unsupported profile %d", profile);
330 return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_PROFILE;
333 memset(frame_hdr, 0, sizeof(*frame_hdr));
334 if (!gst_jpeg_parse_frame_hdr(frame_hdr, buf, buf_size, 0)) {
335 GST_ERROR("failed to parse image");
336 return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
338 priv->height = frame_hdr->height;
339 priv->width = frame_hdr->width;
341 status = ensure_context(decoder);
342 if (status != GST_VAAPI_DECODER_STATUS_SUCCESS) {
343 GST_ERROR("failed to reset context");
347 if (priv->current_picture && !decode_current_picture(decoder))
348 return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
350 picture = GST_VAAPI_PICTURE_NEW(JPEGBaseline, decoder);
352 GST_ERROR("failed to allocate picture");
353 return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
355 gst_vaapi_picture_replace(&priv->current_picture, picture);
356 gst_vaapi_picture_unref(picture);
358 if (!fill_picture(decoder, picture, frame_hdr))
359 return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
361 /* Update presentation time */
363 return GST_VAAPI_DECODER_STATUS_SUCCESS;
366 static GstVaapiDecoderStatus
367 decode_huffman_table(
368 GstVaapiDecoderJpeg *decoder,
373 GstVaapiDecoderJpegPrivate * const priv = decoder->priv;
375 if (!gst_jpeg_parse_huffman_table(&priv->huf_tables, buf, buf_size, 0)) {
376 GST_DEBUG("failed to parse Huffman table");
377 return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
379 priv->has_huf_table = TRUE;
380 return GST_VAAPI_DECODER_STATUS_SUCCESS;
383 static GstVaapiDecoderStatus
385 GstVaapiDecoderJpeg *decoder,
390 GstVaapiDecoderJpegPrivate * const priv = decoder->priv;
392 if (!gst_jpeg_parse_quant_table(&priv->quant_tables, buf, buf_size, 0)) {
393 GST_DEBUG("failed to parse quantization table");
394 return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
396 priv->has_quant_table = TRUE;
397 return GST_VAAPI_DECODER_STATUS_SUCCESS;
400 static GstVaapiDecoderStatus
401 decode_restart_interval(
402 GstVaapiDecoderJpeg *decoder,
407 GstVaapiDecoderJpegPrivate * const priv = decoder->priv;
409 if (!gst_jpeg_parse_restart_interval(&priv->mcu_restart, buf, buf_size, 0)) {
410 GST_DEBUG("failed to parse restart interval");
411 return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
413 return GST_VAAPI_DECODER_STATUS_SUCCESS;
416 static GstVaapiDecoderStatus
418 GstVaapiDecoderJpeg *decoder,
420 guint scan_header_size,
422 guint scan_data_size)
424 GstVaapiDecoderJpegPrivate * const priv = decoder->priv;
425 GstVaapiPicture *picture = priv->current_picture;
426 VASliceParameterBufferJPEGBaseline *slice_param;
427 GstVaapiSlice *gst_slice;
428 guint total_h_samples, total_v_samples;
429 GstJpegScanHdr scan_hdr;
433 GST_ERROR("There is no VAPicture before decoding scan.");
434 return GST_VAAPI_DECODER_STATUS_ERROR_INVALID_SURFACE;
437 if (!fill_quantization_table(decoder, picture)) {
438 GST_ERROR("failed to fill in quantization table");
439 return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
442 if (!fill_huffman_table(decoder, picture)) {
443 GST_ERROR("failed to fill in huffman table");
444 return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
447 memset(&scan_hdr, 0, sizeof(scan_hdr));
448 if (!gst_jpeg_parse_scan_hdr(&scan_hdr, scan_header, scan_header_size, 0)) {
449 GST_DEBUG("Jpeg parsed scan failed.");
450 return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
453 gst_slice = GST_VAAPI_SLICE_NEW(JPEGBaseline, decoder, scan_data, scan_data_size);
454 gst_vaapi_picture_add_slice(picture, gst_slice);
456 slice_param = gst_slice->param;
457 slice_param->num_components = scan_hdr.num_components;
458 for (i = 0; i < scan_hdr.num_components; i++) {
459 slice_param->components[i].component_selector =
460 scan_hdr.components[i].component_selector;
461 slice_param->components[i].dc_table_selector =
462 scan_hdr.components[i].dc_selector;
463 slice_param->components[i].ac_table_selector =
464 scan_hdr.components[i].ac_selector;
466 slice_param->restart_interval = priv->mcu_restart;
467 if (scan_hdr.num_components == 1) { /*non-interleaved*/
468 slice_param->slice_horizontal_position = 0;
469 slice_param->slice_vertical_position = 0;
471 if (slice_param->components[0].component_selector == priv->frame_hdr.components[0].identifier) {
472 slice_param->num_mcus = (priv->frame_hdr.width/8)*(priv->frame_hdr.height/8);
473 } else { /*Cr, Cb mcu numbers*/
474 slice_param->num_mcus = (priv->frame_hdr.width/16)*(priv->frame_hdr.height/16);
476 } else { /* interleaved */
477 slice_param->slice_horizontal_position = 0;
478 slice_param->slice_vertical_position = 0;
479 total_v_samples = get_max_vertical_samples(&priv->frame_hdr);
480 total_h_samples = get_max_horizontal_samples(&priv->frame_hdr);
481 slice_param->num_mcus = ((priv->frame_hdr.width + total_h_samples*8 - 1)/(total_h_samples*8)) *
482 ((priv->frame_hdr.height + total_v_samples*8 -1)/(total_v_samples*8));
485 if (picture->slices && picture->slices->len)
486 return GST_VAAPI_DECODER_STATUS_SUCCESS;
487 return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
490 static GstVaapiDecoderStatus
491 decode_buffer(GstVaapiDecoderJpeg *decoder, GstBuffer *buffer)
493 GstVaapiDecoderJpegPrivate * const priv = decoder->priv;
494 GstVaapiDecoderStatus status = GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
495 GstJpegMarkerSegment seg;
496 GstJpegScanSegment scan_seg;
502 buf = GST_BUFFER_DATA(buffer);
503 buf_size = GST_BUFFER_SIZE(buffer);
504 if (!buf && buf_size == 0)
505 return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
507 memset(&scan_seg, 0, sizeof(scan_seg));
509 pts = GST_BUFFER_TIMESTAMP(buffer);
511 while (gst_jpeg_parse(&seg, buf, buf_size, ofs)) {
513 GST_DEBUG("buffer to short for parsing");
514 return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
518 /* Decode scan, if complete */
519 if (seg.marker == GST_JPEG_MARKER_EOI && scan_seg.header_size > 0) {
520 scan_seg.data_size = seg.offset - scan_seg.data_offset;
521 scan_seg.is_valid = TRUE;
523 if (scan_seg.is_valid) {
524 status = decode_scan(
526 buf + scan_seg.header_offset,
527 scan_seg.header_size,
528 buf + scan_seg.data_offset,
531 if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
533 memset(&scan_seg, 0, sizeof(scan_seg));
537 switch (seg.marker) {
538 case GST_JPEG_MARKER_SOI:
539 priv->has_quant_table = FALSE;
540 priv->has_huf_table = FALSE;
541 priv->mcu_restart = 0;
542 status = GST_VAAPI_DECODER_STATUS_SUCCESS;
544 case GST_JPEG_MARKER_EOI:
545 if (decode_current_picture(decoder)) {
546 /* Get out of the loop, trailing data is not needed */
547 status = GST_VAAPI_DECODER_STATUS_SUCCESS;
550 status = GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
552 case GST_JPEG_MARKER_DHT:
553 status = decode_huffman_table(decoder, buf + seg.offset, seg.size);
555 case GST_JPEG_MARKER_DQT:
556 status = decode_quant_table(decoder, buf + seg.offset, seg.size);
558 case GST_JPEG_MARKER_DRI:
559 status = decode_restart_interval(decoder, buf + seg.offset, seg.size);
561 case GST_JPEG_MARKER_DAC:
562 GST_ERROR("unsupported arithmetic coding mode");
563 status = GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_PROFILE;
565 case GST_JPEG_MARKER_SOS:
566 scan_seg.header_offset = seg.offset;
567 scan_seg.header_size = seg.size;
568 scan_seg.data_offset = seg.offset + seg.size;
569 scan_seg.data_size = 0;
574 if (seg.marker >= GST_JPEG_MARKER_RST_MIN &&
575 seg.marker <= GST_JPEG_MARKER_RST_MAX) {
581 if (seg.marker >= GST_JPEG_MARKER_SOF_MIN &&
582 seg.marker <= GST_JPEG_MARKER_SOF_MAX) {
583 status = decode_picture(
586 buf + seg.offset, seg.size,
592 /* Application segments */
593 if (seg.marker >= GST_JPEG_MARKER_APP_MIN &&
594 seg.marker <= GST_JPEG_MARKER_APP_MAX) {
595 status = GST_VAAPI_DECODER_STATUS_SUCCESS;
599 GST_WARNING("unsupported marker (0x%02x)", seg.marker);
600 status = GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
604 /* Append entropy coded segments */
606 scan_seg.data_size = seg.offset - scan_seg.data_offset;
608 if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
615 GstVaapiDecoderStatus
616 gst_vaapi_decoder_jpeg_decode(GstVaapiDecoder *base, GstBuffer *buffer)
618 GstVaapiDecoderJpeg * const decoder = GST_VAAPI_DECODER_JPEG(base);
619 GstVaapiDecoderJpegPrivate * const priv = decoder->priv;
621 if (!priv->is_opened) {
622 priv->is_opened = gst_vaapi_decoder_jpeg_open(decoder, buffer);
623 if (!priv->is_opened)
624 return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_CODEC;
626 return decode_buffer(decoder, buffer);
630 gst_vaapi_decoder_jpeg_finalize(GObject *object)
632 GstVaapiDecoderJpeg * const decoder = GST_VAAPI_DECODER_JPEG(object);
634 gst_vaapi_decoder_jpeg_destroy(decoder);
636 G_OBJECT_CLASS(gst_vaapi_decoder_jpeg_parent_class)->finalize(object);
640 gst_vaapi_decoder_jpeg_constructed(GObject *object)
642 GstVaapiDecoderJpeg * const decoder = GST_VAAPI_DECODER_JPEG(object);
643 GstVaapiDecoderJpegPrivate * const priv = decoder->priv;
644 GObjectClass *parent_class;
646 parent_class = G_OBJECT_CLASS(gst_vaapi_decoder_jpeg_parent_class);
647 if (parent_class->constructed)
648 parent_class->constructed(object);
650 priv->is_constructed = gst_vaapi_decoder_jpeg_create(decoder);
654 gst_vaapi_decoder_jpeg_class_init(GstVaapiDecoderJpegClass *klass)
656 GObjectClass * const object_class = G_OBJECT_CLASS(klass);
657 GstVaapiDecoderClass * const decoder_class = GST_VAAPI_DECODER_CLASS(klass);
659 g_type_class_add_private(klass, sizeof(GstVaapiDecoderJpegPrivate));
661 object_class->finalize = gst_vaapi_decoder_jpeg_finalize;
662 object_class->constructed = gst_vaapi_decoder_jpeg_constructed;
664 decoder_class->decode = gst_vaapi_decoder_jpeg_decode;
668 gst_vaapi_decoder_jpeg_init(GstVaapiDecoderJpeg *decoder)
670 GstVaapiDecoderJpegPrivate *priv;
672 priv = GST_VAAPI_DECODER_JPEG_GET_PRIVATE(decoder);
673 decoder->priv = priv;
674 priv->profile = GST_VAAPI_PROFILE_JPEG_BASELINE;
677 priv->current_picture = NULL;
678 priv->has_huf_table = FALSE;
679 priv->has_quant_table = FALSE;
680 priv->mcu_restart = 0;
681 priv->is_opened = FALSE;
682 priv->profile_changed = TRUE;
683 priv->is_constructed = FALSE;
684 memset(&priv->frame_hdr, 0, sizeof(priv->frame_hdr));
685 memset(&priv->huf_tables, 0, sizeof(priv->huf_tables));
686 memset(&priv->quant_tables, 0, sizeof(priv->quant_tables));
690 * gst_vaapi_decoder_jpeg_new:
691 * @display: a #GstVaapiDisplay
692 * @caps: a #GstCaps holding codec information
694 * Creates a new #GstVaapiDecoder for JPEG decoding. The @caps can
695 * hold extra information like codec-data and pictured coded size.
697 * Return value: the newly allocated #GstVaapiDecoder object
700 gst_vaapi_decoder_jpeg_new(GstVaapiDisplay *display, GstCaps *caps)
702 GstVaapiDecoderJpeg *decoder;
704 g_return_val_if_fail(GST_VAAPI_IS_DISPLAY(display), NULL);
705 g_return_val_if_fail(GST_IS_CAPS(caps), NULL);
707 decoder = g_object_new(
708 GST_VAAPI_TYPE_DECODER_JPEG,
713 if (!decoder->priv->is_constructed) {
714 g_object_unref(decoder);
717 return GST_VAAPI_DECODER_CAST(decoder);