2 * Copyright (C) 2022 Igalia, S.L.
3 * Author: Víctor Jáquez <vjaquez@igalia.com>
5 * This library is free software; you can redistribute it and/or
6 * modify it under the terms of the GNU Library General Public
7 * License as published by the Free Software Foundation; either
8 * version 2 of the License, or (at your option) any later version.
10 * This library is distributed in the hope that it will be useful,
11 * but WITHOUT ANY WARRANTY; without even the implied warranty of
12 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
13 * Library General Public License for more details.
15 * You should have received a copy of the GNU Library General Public
16 * License along with this library; if not, write to the0
17 * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
18 * Boston, MA 02110-1301, USA.
22 * SECTION:element-vajpegdec
24 * @short_description: A VA-API based JPEG video decoder
26 * vajpegdec decodes JPEG images to VA surfaces using the installed
27 * and chosen [VA-API](https://01.org/linuxmedia/vaapi) driver.
29 * The decoding surfaces can be mapped onto main memory as video
32 * ## Example launch line
34 * gst-launch-1.0 filesrc location=sample.mjpg ! parsebin ! vajpegdec ! autovideosink
45 #include "gstvajpegdec.h"
47 #include <gst/va/gstvavideoformat.h>
49 #include "gstvabasedec.h"
51 GST_DEBUG_CATEGORY_STATIC (gst_va_jpegdec_debug);
52 #ifndef GST_DISABLE_GST_DEBUG
53 #define GST_CAT_DEFAULT gst_va_jpegdec_debug
55 #define GST_CAT_DEFAULT NULL
58 #define GST_VA_JPEG_DEC(obj) ((GstVaJpegDec *) obj)
59 #define GST_VA_JPEG_DEC_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), G_TYPE_FROM_INSTANCE (obj), GstVaJPEGDecClass))
60 #define GST_VA_JPEG_DEC_CLASS(klass) ((GstVaJpegDecClass *) klass)
62 typedef struct _GstVaJpegDec GstVaJpegDec;
63 typedef struct _GstVaJpegDecClass GstVaJpegDecClass;
65 struct _GstVaJpegDecClass
67 GstVaBaseDecClass parent_class;
74 GstVaDecodePicture *pic;
77 static GstElementClass *parent_class = NULL;
80 static const gchar *src_caps_str =
81 GST_VIDEO_CAPS_MAKE_WITH_FEATURES (GST_CAPS_FEATURE_MEMORY_VA,
83 GST_VIDEO_CAPS_MAKE ("{ NV12 }");
86 static const gchar *sink_caps_str = "image/jpeg";
89 _get_profile (GstJpegMarker marker)
92 case GST_JPEG_MARKER_SOF0:
93 return VAProfileJPEGBaseline;
101 /* taken from MediaSDK */
102 #define RT_FORMAT_RGB (VA_RT_FORMAT_RGB16 | VA_RT_FORMAT_RGB32)
105 static const struct sampling_rtformat {
106 const gchar *sampling;
108 } sampling_rtformat_map[] = {
109 { "RGB", RT_FORMAT_RGB },
110 { "YCbCr-4:4:4", VA_RT_FORMAT_YUV444 },
111 { "YCbCr-4:2:2", VA_RT_FORMAT_YUV422 },
112 { "YCbCr-4:2:0", VA_RT_FORMAT_YUV420 },
113 { "GRAYSCALE", VA_RT_FORMAT_YUV400 },
114 { "YCbCr-4:1:1", VA_RT_FORMAT_YUV411 },
119 _get_rt_format (GstCaps * caps)
121 GstStructure *structure;
122 const gchar *sampling;
125 structure = gst_caps_get_structure (caps, 0);
126 sampling = gst_structure_get_string (structure, "sampling");
128 for (i = 0; i < G_N_ELEMENTS (sampling_rtformat_map); i++) {
129 if (g_strcmp0 (sampling, sampling_rtformat_map[i].sampling) == 0)
130 return sampling_rtformat_map[i].rt_format;
137 gst_va_jpeg_dec_new_picture (GstJpegDecoder * decoder,
138 GstVideoCodecFrame * frame, GstJpegMarker marker,
139 GstJpegFrameHdr * frame_hdr)
141 GstVaJpegDec *self = GST_VA_JPEG_DEC (decoder);
142 GstVaBaseDec *base = GST_VA_BASE_DEC (decoder);
145 VAPictureParameterBufferJPEGBaseline pic_param;
146 guint32 i, rt_format;
148 GST_LOG_OBJECT (self, "new picture");
150 g_clear_pointer (&self->pic, gst_va_decode_picture_free);
152 profile = _get_profile (marker);
153 if (profile == VAProfileNone)
154 return GST_FLOW_NOT_NEGOTIATED;
156 /* use caps to avoid re-parsing app14 */
157 rt_format = _get_rt_format (decoder->input_state->caps);
159 return GST_FLOW_NOT_NEGOTIATED;
161 if (!gst_va_decoder_config_is_equal (base->decoder, profile, rt_format,
162 frame_hdr->width, frame_hdr->height)) {
163 base->profile = profile;
164 base->rt_format = rt_format;
165 base->width = frame_hdr->width;
166 base->height = frame_hdr->height;
168 base->need_negotiation = TRUE;
169 GST_INFO_OBJECT (self, "Format changed to %s [%x] (%dx%d)",
170 gst_va_profile_name (profile), rt_format, base->width, base->height);
173 ret = gst_va_base_dec_prepare_output_frame (base, frame);
174 if (ret != GST_FLOW_OK) {
175 GST_ERROR_OBJECT (self, "Failed to allocate output buffer: %s",
176 gst_flow_get_name (ret));
180 self->pic = gst_va_decode_picture_new (base->decoder, frame->output_buffer);
183 pic_param = (VAPictureParameterBufferJPEGBaseline) {
184 .picture_width = frame_hdr->width,
185 .picture_height = frame_hdr->height,
187 .num_components = frame_hdr->num_components,
188 .color_space = (rt_format == RT_FORMAT_RGB) ? 1 : 0, /* TODO: BGR */
189 .rotation = VA_ROTATION_NONE,
193 for (i = 0; i < frame_hdr->num_components; i++) {
194 pic_param.components[i].component_id = frame_hdr->components[i].identifier;
195 pic_param.components[i].h_sampling_factor =
196 frame_hdr->components[i].horizontal_factor;
197 pic_param.components[i].v_sampling_factor =
198 frame_hdr->components[i].vertical_factor;
199 pic_param.components[i].quantiser_table_selector =
200 frame_hdr->components[i].quant_table_selector;
203 if (!gst_va_decoder_add_param_buffer (base->decoder, self->pic,
204 VAPictureParameterBufferType, &pic_param, sizeof (pic_param)))
205 return GST_FLOW_ERROR;
211 gst_va_jpeg_dec_decode_scan (GstJpegDecoder * decoder,
212 GstJpegDecoderScan * scan, const guint8 * buffer, guint32 size)
214 GstVaJpegDec *self = GST_VA_JPEG_DEC (decoder);
215 GstVaBaseDec *base = GST_VA_BASE_DEC (decoder);
216 VAHuffmanTableBufferJPEGBaseline huff = { 0, };
217 VAIQMatrixBufferJPEGBaseline quant = { 0, };
218 VASliceParameterBufferJPEGBaseline slice_param;
221 GST_LOG_OBJECT (self, "decoding slice");
223 for (i = 0; i < G_N_ELEMENTS (quant.load_quantiser_table); i++) {
224 quant.load_quantiser_table[i] =
225 scan->quantization_tables->quant_tables[i].valid;
226 if (!scan->quantization_tables->quant_tables[i].valid)
228 for (j = 0; j < GST_JPEG_MAX_QUANT_ELEMENTS; j++) {
229 quant.quantiser_table[i][j] =
230 scan->quantization_tables->quant_tables[i].quant_table[j];
233 /* invalidate table */
234 scan->quantization_tables->quant_tables[i].valid = FALSE;
237 if (!gst_va_decoder_add_param_buffer (base->decoder, self->pic,
238 VAIQMatrixBufferType, &quant, sizeof (quant)))
239 return GST_FLOW_ERROR;
241 for (i = 0; i < G_N_ELEMENTS (huff.huffman_table); i++) {
242 huff.load_huffman_table[i] = scan->huffman_tables->dc_tables[i].valid
243 && scan->huffman_tables->ac_tables[i].valid;
245 if (!huff.load_huffman_table[i])
248 memcpy (huff.huffman_table[i].num_dc_codes,
249 scan->huffman_tables->dc_tables[i].huf_bits,
250 sizeof (huff.huffman_table[i].num_dc_codes));
252 memcpy (huff.huffman_table[i].dc_values,
253 scan->huffman_tables->dc_tables[i].huf_values,
254 sizeof (huff.huffman_table[i].dc_values));
256 memcpy (huff.huffman_table[i].num_ac_codes,
257 scan->huffman_tables->ac_tables[i].huf_bits,
258 sizeof (huff.huffman_table[i].num_ac_codes));
260 memcpy (huff.huffman_table[i].ac_values,
261 scan->huffman_tables->ac_tables[i].huf_values,
262 sizeof (huff.huffman_table[i].ac_values));
266 /* invalidate table */
267 for (i = 0; i < G_N_ELEMENTS (scan->huffman_tables->dc_tables); i++)
268 scan->huffman_tables->dc_tables[i].valid = FALSE;
269 for (i = 0; i < G_N_ELEMENTS (scan->huffman_tables->ac_tables); i++)
270 scan->huffman_tables->ac_tables[i].valid = FALSE;
272 if (!gst_va_decoder_add_param_buffer (base->decoder, self->pic,
273 VAHuffmanTableBufferType, &huff, sizeof (huff)))
274 return GST_FLOW_ERROR;
277 slice_param = (VASliceParameterBufferJPEGBaseline) {
278 .slice_data_size = size,
279 .slice_data_offset = 0,
280 .slice_data_flag = VA_SLICE_DATA_FLAG_ALL,
281 .slice_horizontal_position = 0,
282 .slice_vertical_position = 0,
283 .restart_interval = scan->restart_interval,
284 .num_mcus = scan->mcu_rows_in_scan * scan->mcus_per_row,
285 .num_components = scan->scan_hdr->num_components,
289 for (i = 0; i < scan->scan_hdr->num_components; i++) {
290 slice_param.components[i].component_selector =
291 scan->scan_hdr->components[i].component_selector;
292 slice_param.components[i].dc_table_selector =
293 scan->scan_hdr->components[i].dc_selector;
294 slice_param.components[i].ac_table_selector =
295 scan->scan_hdr->components[i].ac_selector;
298 if (!gst_va_decoder_add_slice_buffer (base->decoder, self->pic, &slice_param,
299 sizeof (slice_param), (void *) buffer, size))
300 return GST_FLOW_ERROR;
306 gst_va_jpeg_dec_end_picture (GstJpegDecoder * decoder)
308 GstVaJpegDec *self = GST_VA_JPEG_DEC (decoder);
309 GstVaBaseDec *base = GST_VA_BASE_DEC (decoder);
311 GST_LOG_OBJECT (self, "end picture");
313 if (!gst_va_decoder_decode (base->decoder, self->pic))
314 return GST_FLOW_ERROR;
319 gst_va_jpeg_dec_output_picture (GstJpegDecoder * decoder,
320 GstVideoCodecFrame * frame)
322 GstVaBaseDec *base = GST_VA_BASE_DEC (decoder);
323 GstVideoDecoder *vdec = GST_VIDEO_DECODER (decoder);
325 if (gst_va_base_dec_process_output (base, frame, 0))
326 return gst_video_decoder_finish_frame (vdec, frame);
327 return GST_FLOW_ERROR;
330 /* @XXX: Checks for drivers that can do color convertion to nv12
331 * regardless the input chroma, while it's YUV. */
333 has_internal_nv12_color_convertion (GstVaBaseDec * base, GstVideoFormat format)
335 if (!GST_VA_DISPLAY_IS_IMPLEMENTATION (base->display, INTEL_I965)
336 && !GST_VA_DISPLAY_IS_IMPLEMENTATION (base->display, INTEL_IHD))
339 if (base->rt_format != VA_RT_FORMAT_YUV420
340 && base->rt_format != VA_RT_FORMAT_YUV422)
343 if (format != GST_VIDEO_FORMAT_NV12)
350 gst_va_jpeg_dec_negotiate (GstVideoDecoder * decoder)
352 GstVaBaseDec *base = GST_VA_BASE_DEC (decoder);
353 GstVaJpegDec *self = GST_VA_JPEG_DEC (decoder);
354 GstJpegDecoder *jpegdec = GST_JPEG_DECODER (decoder);
355 GstVideoFormat format;
356 GstCapsFeatures *capsfeatures = NULL;
358 /* Ignore downstream renegotiation request. */
359 if (!base->need_negotiation)
362 base->need_negotiation = FALSE;
364 if (GST_VA_DISPLAY_IS_IMPLEMENTATION (base->display, INTEL_I965))
365 base->hacks = GST_VA_HACK_SURFACE_NO_FOURCC;
367 if (gst_va_decoder_is_open (base->decoder)
368 && !gst_va_decoder_close (base->decoder))
371 if (!gst_va_decoder_open (base->decoder, base->profile, base->rt_format))
374 if (!gst_va_decoder_set_frame_size (base->decoder, base->width, base->height))
377 if (base->output_state)
378 gst_video_codec_state_unref (base->output_state);
380 /* hack for RGBP rt_format, because only RGBP is exposed as pixel
382 if (base->rt_format == RT_FORMAT_RGB)
383 base->rt_format = VA_RT_FORMAT_RGBP;
385 gst_va_base_dec_get_preferred_format_and_caps_features (base, &format,
387 if (format == GST_VIDEO_FORMAT_UNKNOWN)
390 if (!has_internal_nv12_color_convertion (base, format)
391 && (gst_va_chroma_from_video_format (format) != base->rt_format))
394 /* hack for RGBP rt_format */
395 if (base->rt_format == VA_RT_FORMAT_RGBP)
396 base->rt_format = RT_FORMAT_RGB;
399 gst_video_decoder_set_output_state (decoder, format,
400 base->width, base->height, jpegdec->input_state);
402 base->output_state->caps = gst_video_info_to_caps (&base->output_state->info);
404 gst_caps_set_features_simple (base->output_state->caps, capsfeatures);
406 GST_INFO_OBJECT (self, "Negotiated caps %" GST_PTR_FORMAT,
407 base->output_state->caps);
409 return GST_VIDEO_DECODER_CLASS (parent_class)->negotiate (decoder);
413 gst_va_jpeg_dec_dispose (GObject * object)
415 GstVaJpegDec *self = GST_VA_JPEG_DEC (object);
417 gst_va_base_dec_close (GST_VIDEO_DECODER (object));
418 g_clear_pointer (&self->pic, gst_va_decode_picture_free);
420 G_OBJECT_CLASS (parent_class)->dispose (object);
424 gst_va_jpeg_dec_class_init (gpointer g_class, gpointer class_data)
426 GstCaps *src_doc_caps, *sink_doc_caps;
427 GObjectClass *gobject_class = G_OBJECT_CLASS (g_class);
428 GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
429 GstJpegDecoderClass *jpegdecoder_class = GST_JPEG_DECODER_CLASS (g_class);
430 GstVideoDecoderClass *decoder_class = GST_VIDEO_DECODER_CLASS (g_class);
431 struct CData *cdata = class_data;
434 if (cdata->description) {
435 long_name = g_strdup_printf ("VA-API JPEG Decoder in %s",
438 long_name = g_strdup ("VA-API JPEG Decoder");
441 gst_element_class_set_metadata (element_class, long_name,
442 "Codec/Decoder/Image/Hardware",
443 "VA-API based JPEG image decoder",
444 "Víctor Jáquez <vjaquez@igalia.com>");
446 sink_doc_caps = gst_caps_from_string (sink_caps_str);
447 src_doc_caps = gst_caps_from_string (src_caps_str);
449 parent_class = g_type_class_peek_parent (g_class);
451 gobject_class->dispose = gst_va_jpeg_dec_dispose;
453 decoder_class->negotiate = GST_DEBUG_FUNCPTR (gst_va_jpeg_dec_negotiate);
455 jpegdecoder_class->decode_scan =
456 GST_DEBUG_FUNCPTR (gst_va_jpeg_dec_decode_scan);
457 jpegdecoder_class->new_picture =
458 GST_DEBUG_FUNCPTR (gst_va_jpeg_dec_new_picture);
459 jpegdecoder_class->end_picture =
460 GST_DEBUG_FUNCPTR (gst_va_jpeg_dec_end_picture);
461 jpegdecoder_class->output_picture =
462 GST_DEBUG_FUNCPTR (gst_va_jpeg_dec_output_picture);
465 * GstVaJpegDec:device-path:
467 * It shows the DRM device path used for the VA operation, if any.
469 gst_va_base_dec_class_init (GST_VA_BASE_DEC_CLASS (g_class), JPEG,
470 cdata->render_device_path, cdata->sink_caps, cdata->src_caps,
471 src_doc_caps, sink_doc_caps);
474 g_free (cdata->description);
475 g_free (cdata->render_device_path);
476 gst_caps_unref (cdata->src_caps);
477 gst_caps_unref (cdata->sink_caps);
482 gst_va_jpeg_dec_init (GTypeInstance * instance, gpointer g_class)
484 GstVaBaseDec *base = GST_VA_BASE_DEC (instance);
486 gst_va_base_dec_init (base, GST_CAT_DEFAULT);
487 base->min_buffers = 1;
491 _register_debug_category (gpointer data)
493 GST_DEBUG_CATEGORY_INIT (gst_va_jpegdec_debug, "vajpegdec", 0,
500 _fixup_sink_caps (GstVaDisplay * display, GstCaps * caps)
502 if (GST_VA_DISPLAY_IS_IMPLEMENTATION (display, INTEL_I965)) {
505 GValue sampling = G_VALUE_INIT;
506 const char *sampling_list[] = { "YCbCr-4:2:0", "YCbCr-4:2:2" };
508 ret = gst_caps_copy (caps);
509 gst_caps_set_simple (ret, "colorspace", G_TYPE_STRING, "sYUV", NULL);
511 gst_value_list_init (&sampling, G_N_ELEMENTS (sampling_list));
512 for (i = 0; i < G_N_ELEMENTS (sampling_list); i++) {
513 GValue samp = G_VALUE_INIT;
514 g_value_init (&samp, G_TYPE_STRING);
515 g_value_set_string (&samp, sampling_list[i]);
516 gst_value_list_append_value (&sampling, &samp);
517 g_value_unset (&samp);
520 gst_caps_set_value (ret, "sampling", &sampling);
521 g_value_unset (&sampling);
524 return gst_caps_ref (caps);
528 _fixup_src_caps (GstVaDisplay * display, GstCaps * caps)
530 if (GST_VA_DISPLAY_IS_IMPLEMENTATION (display, INTEL_IHD)) {
536 ret = gst_caps_copy (caps);
538 len = gst_caps_get_size (ret);
539 for (i = 0; i < len; i++) {
540 s = gst_caps_get_structure (ret, i);
541 f = gst_caps_get_features (ret, i);
542 if (gst_caps_features_is_equal (f,
543 GST_CAPS_FEATURES_MEMORY_SYSTEM_MEMORY)) {
544 /* rgbp is not correctly mapped into memory */
546 GValue out = G_VALUE_INIT;
547 const GValue *in = gst_structure_get_value (s, "format");
549 size = gst_value_list_get_size (in);
550 gst_value_list_init (&out, size);
551 for (i = 0; i < size; i++) {
552 const GValue *fmt = gst_value_list_get_value (in, i);
553 if (g_strcmp0 (g_value_get_string (fmt), "RGBP") != 0)
554 gst_value_list_append_value (&out, fmt);
556 gst_structure_set_value (s, "format", &out);
557 g_value_unset (&out);
558 } else if (gst_caps_features_contains (f, GST_CAPS_FEATURE_MEMORY_DMABUF)) {
559 /* dmabuf exportation only handles NV12 */
560 gst_structure_set (s, "format", G_TYPE_STRING, "NV12", NULL);
565 } else if (GST_VA_DISPLAY_IS_IMPLEMENTATION (display, INTEL_I965)) {
570 ret = gst_caps_copy (caps);
572 len = gst_caps_get_size (ret);
573 for (i = 0; i < len; i++) {
574 s = gst_caps_get_structure (ret, i);
575 /* only NV12 works in this nigthmare */
576 gst_structure_set (s, "format", G_TYPE_STRING, "NV12", NULL);
581 return gst_caps_ref (caps);
585 gst_va_jpeg_dec_register (GstPlugin * plugin, GstVaDevice * device,
586 GstCaps * sink_caps, GstCaps * src_caps, guint rank)
588 static GOnce debug_once = G_ONCE_INIT;
590 GTypeInfo type_info = {
591 .class_size = sizeof (GstVaJpegDecClass),
592 .class_init = gst_va_jpeg_dec_class_init,
593 .instance_size = sizeof (GstVaJpegDec),
594 .instance_init = gst_va_jpeg_dec_init,
598 gchar *type_name, *feature_name;
600 g_return_val_if_fail (GST_IS_PLUGIN (plugin), FALSE);
601 g_return_val_if_fail (GST_IS_VA_DEVICE (device), FALSE);
602 g_return_val_if_fail (GST_IS_CAPS (sink_caps), FALSE);
603 g_return_val_if_fail (GST_IS_CAPS (src_caps), FALSE);
605 cdata = g_new (struct CData, 1);
606 cdata->description = NULL;
607 cdata->render_device_path = g_strdup (device->render_device_path);
608 cdata->sink_caps = _fixup_sink_caps (device->display, sink_caps);
609 cdata->src_caps = _fixup_src_caps (device->display, src_caps);
611 /* class data will be leaked if the element never gets instantiated */
612 GST_MINI_OBJECT_FLAG_SET (cdata->sink_caps,
613 GST_MINI_OBJECT_FLAG_MAY_BE_LEAKED);
614 GST_MINI_OBJECT_FLAG_SET (src_caps, GST_MINI_OBJECT_FLAG_MAY_BE_LEAKED);
616 type_info.class_data = cdata;
619 /* The first decoder to be registered should use a constant name,
620 * like vajpegdec, for any additional decoders, we create unique
621 * names, using inserting the render device name. */
622 if (device->index == 0) {
623 type_name = g_strdup ("GstVaJpegDec");
624 feature_name = g_strdup ("vajpegdec");
626 gchar *basename = g_path_get_basename (device->render_device_path);
627 type_name = g_strdup_printf ("GstVa%sJpegDec", basename);
628 feature_name = g_strdup_printf ("va%sjpegdec", basename);
629 cdata->description = basename;
631 /* lower rank for non-first device */
636 g_once (&debug_once, _register_debug_category, NULL);
638 type = g_type_register_static (GST_TYPE_JPEG_DECODER,
639 type_name, &type_info, 0);
641 ret = gst_element_register (plugin, feature_name, rank, type);
644 g_free (feature_name);