2 * gstvaapidecoder_h264.c - H.264 decoder
4 * Copyright (C) 2011-2014 Intel Corporation
5 * Author: Gwenole Beauchesne <gwenole.beauchesne@intel.com>
7 * This library is free software; you can redistribute it and/or
8 * modify it under the terms of the GNU Lesser General Public License
9 * as published by the Free Software Foundation; either version 2.1
10 * of the License, or (at your option) any later version.
12 * This library is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 * Lesser General Public License for more details.
17 * You should have received a copy of the GNU Lesser General Public
18 * License along with this library; if not, write to the Free
19 * Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
20 * Boston, MA 02110-1301 USA
24 * SECTION:gstvaapidecoder_h264
25 * @short_description: H.264 decoder
30 #include <gst/base/gstadapter.h>
31 #include <gst/codecparsers/gsth264parser.h>
32 #include "gstvaapidecoder_h264.h"
33 #include "gstvaapidecoder_objects.h"
34 #include "gstvaapidecoder_priv.h"
35 #include "gstvaapidisplay_priv.h"
36 #include "gstvaapiobject_priv.h"
37 #include "gstvaapiutils_h264_priv.h"
40 #include "gstvaapidebug.h"
42 /* Defined to 1 if strict ordering of DPB is needed. Only useful for debug */
43 #define USE_STRICT_DPB_ORDERING 0
45 typedef struct _GstVaapiDecoderH264Private GstVaapiDecoderH264Private;
46 typedef struct _GstVaapiDecoderH264Class GstVaapiDecoderH264Class;
47 typedef struct _GstVaapiFrameStore GstVaapiFrameStore;
48 typedef struct _GstVaapiFrameStoreClass GstVaapiFrameStoreClass;
49 typedef struct _GstVaapiParserInfoH264 GstVaapiParserInfoH264;
50 typedef struct _GstVaapiPictureH264 GstVaapiPictureH264;
52 // Used for field_poc[]
54 #define BOTTOM_FIELD 1
56 /* ------------------------------------------------------------------------- */
57 /* --- H.264 Parser Info --- */
58 /* ------------------------------------------------------------------------- */
60 #define GST_VAAPI_PARSER_INFO_H264(obj) \
61 ((GstVaapiParserInfoH264 *)(obj))
63 struct _GstVaapiParserInfoH264 {
64 GstVaapiMiniObject parent_instance;
70 GstH264SliceHdr slice_hdr;
76 gst_vaapi_parser_info_h264_finalize(GstVaapiParserInfoH264 *pi)
78 switch (pi->nalu.type) {
79 case GST_H264_NAL_SEI:
81 g_array_unref(pi->data.sei);
88 static inline const GstVaapiMiniObjectClass *
89 gst_vaapi_parser_info_h264_class(void)
91 static const GstVaapiMiniObjectClass GstVaapiParserInfoH264Class = {
92 .size = sizeof(GstVaapiParserInfoH264),
93 .finalize = (GDestroyNotify)gst_vaapi_parser_info_h264_finalize
95 return &GstVaapiParserInfoH264Class;
98 static inline GstVaapiParserInfoH264 *
99 gst_vaapi_parser_info_h264_new(void)
101 return (GstVaapiParserInfoH264 *)
102 gst_vaapi_mini_object_new(gst_vaapi_parser_info_h264_class());
105 #define gst_vaapi_parser_info_h264_ref(pi) \
106 gst_vaapi_mini_object_ref(GST_VAAPI_MINI_OBJECT(pi))
108 #define gst_vaapi_parser_info_h264_unref(pi) \
109 gst_vaapi_mini_object_unref(GST_VAAPI_MINI_OBJECT(pi))
111 #define gst_vaapi_parser_info_h264_replace(old_pi_ptr, new_pi) \
112 gst_vaapi_mini_object_replace((GstVaapiMiniObject **)(old_pi_ptr), \
113 (GstVaapiMiniObject *)(new_pi))
115 /* ------------------------------------------------------------------------- */
116 /* --- H.264 Pictures --- */
117 /* ------------------------------------------------------------------------- */
120 * Extended picture flags:
122 * @GST_VAAPI_PICTURE_FLAG_IDR: flag that specifies an IDR picture
123 * @GST_VAAPI_PICTURE_FLAG_SHORT_TERM_REFERENCE: flag that specifies
124 * "used for short-term reference"
125 * @GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE: flag that specifies
126 * "used for long-term reference"
127 * @GST_VAAPI_PICTURE_FLAGS_REFERENCE: mask covering any kind of
128 * reference picture (short-term reference or long-term reference)
131 GST_VAAPI_PICTURE_FLAG_IDR = (GST_VAAPI_PICTURE_FLAG_LAST << 0),
133 GST_VAAPI_PICTURE_FLAG_SHORT_TERM_REFERENCE = (
134 GST_VAAPI_PICTURE_FLAG_REFERENCE),
135 GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE = (
136 GST_VAAPI_PICTURE_FLAG_REFERENCE | (GST_VAAPI_PICTURE_FLAG_LAST << 1)),
137 GST_VAAPI_PICTURE_FLAGS_REFERENCE = (
138 GST_VAAPI_PICTURE_FLAG_SHORT_TERM_REFERENCE |
139 GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE),
142 #define GST_VAAPI_PICTURE_IS_IDR(picture) \
143 (GST_VAAPI_PICTURE_FLAG_IS_SET(picture, GST_VAAPI_PICTURE_FLAG_IDR))
145 #define GST_VAAPI_PICTURE_IS_SHORT_TERM_REFERENCE(picture) \
146 ((GST_VAAPI_PICTURE_FLAGS(picture) & \
147 GST_VAAPI_PICTURE_FLAGS_REFERENCE) == \
148 GST_VAAPI_PICTURE_FLAG_SHORT_TERM_REFERENCE)
150 #define GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(picture) \
151 ((GST_VAAPI_PICTURE_FLAGS(picture) & \
152 GST_VAAPI_PICTURE_FLAGS_REFERENCE) == \
153 GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE)
155 struct _GstVaapiPictureH264 {
156 GstVaapiPicture base;
158 GstH264SliceHdr *last_slice_hdr;
161 gint32 frame_num; // Original frame_num from slice_header()
162 gint32 frame_num_wrap; // Temporary for ref pic marking: FrameNumWrap
163 gint32 long_term_frame_idx; // Temporary for ref pic marking: LongTermFrameIdx
164 gint32 pic_num; // Temporary for ref pic marking: PicNum
165 gint32 long_term_pic_num; // Temporary for ref pic marking: LongTermPicNum
166 GstVaapiPictureH264 *other_field; // Temporary for ref pic marking: other field in the same frame store
167 guint output_flag : 1;
168 guint output_needed : 1;
171 GST_VAAPI_CODEC_DEFINE_TYPE(GstVaapiPictureH264, gst_vaapi_picture_h264);
174 gst_vaapi_picture_h264_destroy(GstVaapiPictureH264 *picture)
176 gst_vaapi_picture_destroy(GST_VAAPI_PICTURE(picture));
180 gst_vaapi_picture_h264_create(
181 GstVaapiPictureH264 *picture,
182 const GstVaapiCodecObjectConstructorArgs *args
185 if (!gst_vaapi_picture_create(GST_VAAPI_PICTURE(picture), args))
188 picture->field_poc[0] = G_MAXINT32;
189 picture->field_poc[1] = G_MAXINT32;
190 picture->output_needed = FALSE;
194 static inline GstVaapiPictureH264 *
195 gst_vaapi_picture_h264_new(GstVaapiDecoderH264 *decoder)
197 return (GstVaapiPictureH264 *)gst_vaapi_codec_object_new(
198 &GstVaapiPictureH264Class,
199 GST_VAAPI_CODEC_BASE(decoder),
200 NULL, sizeof(VAPictureParameterBufferH264),
206 gst_vaapi_picture_h264_set_reference(
207 GstVaapiPictureH264 *picture,
208 guint reference_flags,
214 GST_VAAPI_PICTURE_FLAG_UNSET(picture, GST_VAAPI_PICTURE_FLAGS_REFERENCE);
215 GST_VAAPI_PICTURE_FLAG_SET(picture, reference_flags);
217 if (!other_field || !(picture = picture->other_field))
219 GST_VAAPI_PICTURE_FLAG_UNSET(picture, GST_VAAPI_PICTURE_FLAGS_REFERENCE);
220 GST_VAAPI_PICTURE_FLAG_SET(picture, reference_flags);
223 static inline GstVaapiPictureH264 *
224 gst_vaapi_picture_h264_new_field(GstVaapiPictureH264 *picture)
226 g_return_val_if_fail(picture, NULL);
228 return (GstVaapiPictureH264 *)gst_vaapi_picture_new_field(&picture->base);
231 /* ------------------------------------------------------------------------- */
232 /* --- Frame Buffers (DPB) --- */
233 /* ------------------------------------------------------------------------- */
235 struct _GstVaapiFrameStore {
237 GstVaapiMiniObject parent_instance;
240 GstVaapiPictureH264 *buffers[2];
246 gst_vaapi_frame_store_finalize(gpointer object)
248 GstVaapiFrameStore * const fs = object;
251 for (i = 0; i < fs->num_buffers; i++)
252 gst_vaapi_picture_replace(&fs->buffers[i], NULL);
255 static GstVaapiFrameStore *
256 gst_vaapi_frame_store_new(GstVaapiPictureH264 *picture)
258 GstVaapiFrameStore *fs;
260 static const GstVaapiMiniObjectClass GstVaapiFrameStoreClass = {
261 sizeof(GstVaapiFrameStore),
262 gst_vaapi_frame_store_finalize
265 fs = (GstVaapiFrameStore *)
266 gst_vaapi_mini_object_new(&GstVaapiFrameStoreClass);
270 fs->structure = picture->structure;
271 fs->buffers[0] = gst_vaapi_picture_ref(picture);
272 fs->buffers[1] = NULL;
274 fs->output_needed = picture->output_needed;
279 gst_vaapi_frame_store_add(GstVaapiFrameStore *fs, GstVaapiPictureH264 *picture)
283 g_return_val_if_fail(fs->num_buffers == 1, FALSE);
284 g_return_val_if_fail(!GST_VAAPI_PICTURE_IS_FRAME(picture), FALSE);
285 g_return_val_if_fail(!GST_VAAPI_PICTURE_IS_FIRST_FIELD(picture), FALSE);
287 gst_vaapi_picture_replace(&fs->buffers[fs->num_buffers++], picture);
288 if (picture->output_flag) {
289 picture->output_needed = TRUE;
293 fs->structure = GST_VAAPI_PICTURE_STRUCTURE_FRAME;
295 field = picture->structure == GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD ?
296 TOP_FIELD : BOTTOM_FIELD;
297 g_return_val_if_fail(fs->buffers[0]->field_poc[field] == G_MAXINT32, FALSE);
298 fs->buffers[0]->field_poc[field] = picture->field_poc[field];
299 g_return_val_if_fail(picture->field_poc[!field] == G_MAXINT32, FALSE);
300 picture->field_poc[!field] = fs->buffers[0]->field_poc[!field];
305 gst_vaapi_frame_store_split_fields(GstVaapiFrameStore *fs)
307 GstVaapiPictureH264 * const first_field = fs->buffers[0];
308 GstVaapiPictureH264 *second_field;
310 g_return_val_if_fail(fs->num_buffers == 1, FALSE);
312 first_field->base.structure = GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD;
313 GST_VAAPI_PICTURE_FLAG_SET(first_field, GST_VAAPI_PICTURE_FLAG_INTERLACED);
315 second_field = gst_vaapi_picture_h264_new_field(first_field);
318 gst_vaapi_picture_replace(&fs->buffers[fs->num_buffers++], second_field);
319 gst_vaapi_picture_unref(second_field);
321 second_field->frame_num = first_field->frame_num;
322 second_field->field_poc[0] = first_field->field_poc[0];
323 second_field->field_poc[1] = first_field->field_poc[1];
324 second_field->output_flag = first_field->output_flag;
325 if (second_field->output_flag) {
326 second_field->output_needed = TRUE;
332 static inline gboolean
333 gst_vaapi_frame_store_has_frame(GstVaapiFrameStore *fs)
335 return fs->structure == GST_VAAPI_PICTURE_STRUCTURE_FRAME;
338 static inline gboolean
339 gst_vaapi_frame_store_has_reference(GstVaapiFrameStore *fs)
343 for (i = 0; i < fs->num_buffers; i++) {
344 if (GST_VAAPI_PICTURE_IS_REFERENCE(fs->buffers[i]))
350 #define gst_vaapi_frame_store_ref(fs) \
351 gst_vaapi_mini_object_ref(GST_VAAPI_MINI_OBJECT(fs))
353 #define gst_vaapi_frame_store_unref(fs) \
354 gst_vaapi_mini_object_unref(GST_VAAPI_MINI_OBJECT(fs))
356 #define gst_vaapi_frame_store_replace(old_fs_p, new_fs) \
357 gst_vaapi_mini_object_replace((GstVaapiMiniObject **)(old_fs_p), \
358 (GstVaapiMiniObject *)(new_fs))
360 /* ------------------------------------------------------------------------- */
361 /* --- H.264 Decoder --- */
362 /* ------------------------------------------------------------------------- */
364 #define GST_VAAPI_DECODER_H264_CAST(decoder) \
365 ((GstVaapiDecoderH264 *)(decoder))
368 GST_H264_VIDEO_STATE_GOT_SPS = 1 << 0,
369 GST_H264_VIDEO_STATE_GOT_PPS = 1 << 1,
370 GST_H264_VIDEO_STATE_GOT_SLICE = 1 << 2,
372 GST_H264_VIDEO_STATE_VALID_PICTURE_HEADERS = (
373 GST_H264_VIDEO_STATE_GOT_SPS |
374 GST_H264_VIDEO_STATE_GOT_PPS),
375 GST_H264_VIDEO_STATE_VALID_PICTURE = (
376 GST_H264_VIDEO_STATE_VALID_PICTURE_HEADERS |
377 GST_H264_VIDEO_STATE_GOT_SLICE)
380 struct _GstVaapiDecoderH264Private {
381 GstH264NalParser *parser;
384 GstVaapiPictureH264 *current_picture;
385 GstVaapiParserInfoH264 *prev_slice_pi;
386 GstVaapiFrameStore *prev_frame;
387 GstVaapiFrameStore *dpb[16];
390 GstVaapiProfile profile;
391 GstVaapiEntrypoint entrypoint;
392 GstVaapiChromaType chroma_type;
393 GstVaapiPictureH264 *short_ref[32];
394 guint short_ref_count;
395 GstVaapiPictureH264 *long_ref[32];
396 guint long_ref_count;
397 GstVaapiPictureH264 *RefPicList0[32];
398 guint RefPicList0_count;
399 GstVaapiPictureH264 *RefPicList1[32];
400 guint RefPicList1_count;
401 guint nal_length_size;
404 gint32 field_poc[2]; // 0:TopFieldOrderCnt / 1:BottomFieldOrderCnt
405 gint32 poc_msb; // PicOrderCntMsb
406 gint32 poc_lsb; // pic_order_cnt_lsb (from slice_header())
407 gint32 prev_poc_msb; // prevPicOrderCntMsb
408 gint32 prev_poc_lsb; // prevPicOrderCntLsb
409 gint32 frame_num_offset; // FrameNumOffset
410 gint32 frame_num; // frame_num (from slice_header())
411 gint32 prev_frame_num; // prevFrameNum
412 gboolean prev_pic_has_mmco5; // prevMmco5Pic
413 gboolean prev_pic_structure; // previous picture structure
416 guint has_context : 1;
417 guint progressive_sequence : 1;
421 * GstVaapiDecoderH264:
423 * A decoder based on H264.
425 struct _GstVaapiDecoderH264 {
427 GstVaapiDecoder parent_instance;
428 GstVaapiDecoderH264Private priv;
432 * GstVaapiDecoderH264Class:
434 * A decoder class based on H264.
436 struct _GstVaapiDecoderH264Class {
438 GstVaapiDecoderClass parent_class;
442 exec_ref_pic_marking(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture);
444 /* Get number of reference frames to use */
446 get_max_dec_frame_buffering(GstH264SPS *sps)
448 guint max_dec_frame_buffering, PicSizeMbs;
449 GstVaapiLevelH264 level;
450 const GstVaapiH264LevelLimits *level_limits;
452 /* Table A-1 - Level limits */
453 if (G_UNLIKELY(sps->level_idc == 11 && sps->constraint_set3_flag))
454 level = GST_VAAPI_LEVEL_H264_L1b;
456 level = gst_vaapi_utils_h264_get_level(sps->level_idc);
457 level_limits = gst_vaapi_utils_h264_get_level_limits(level);
461 PicSizeMbs = ((sps->pic_width_in_mbs_minus1 + 1) *
462 (sps->pic_height_in_map_units_minus1 + 1) *
463 (sps->frame_mbs_only_flag ? 1 : 2));
464 max_dec_frame_buffering = level_limits->MaxDpbMbs / PicSizeMbs;
467 if (sps->vui_parameters_present_flag) {
468 GstH264VUIParams * const vui_params = &sps->vui_parameters;
469 if (vui_params->bitstream_restriction_flag)
470 max_dec_frame_buffering = vui_params->max_dec_frame_buffering;
472 switch (sps->profile_idc) {
473 case 44: // CAVLC 4:4:4 Intra profile
474 case GST_H264_PROFILE_SCALABLE_HIGH:
475 case GST_H264_PROFILE_HIGH:
476 case GST_H264_PROFILE_HIGH10:
477 case GST_H264_PROFILE_HIGH_422:
478 case GST_H264_PROFILE_HIGH_444:
479 if (sps->constraint_set3_flag)
480 max_dec_frame_buffering = 0;
486 if (max_dec_frame_buffering > 16)
487 max_dec_frame_buffering = 16;
488 else if (max_dec_frame_buffering < sps->num_ref_frames)
489 max_dec_frame_buffering = sps->num_ref_frames;
490 return MAX(1, max_dec_frame_buffering);
494 array_remove_index_fast(void *array, guint *array_length_ptr, guint index)
496 gpointer * const entries = array;
497 guint num_entries = *array_length_ptr;
499 g_return_if_fail(index < num_entries);
501 if (index != --num_entries)
502 entries[index] = entries[num_entries];
503 entries[num_entries] = NULL;
504 *array_length_ptr = num_entries;
509 array_remove_index(void *array, guint *array_length_ptr, guint index)
511 array_remove_index_fast(array, array_length_ptr, index);
515 array_remove_index(void *array, guint *array_length_ptr, guint index)
517 gpointer * const entries = array;
518 const guint num_entries = *array_length_ptr - 1;
521 g_return_if_fail(index <= num_entries);
523 for (i = index; i < num_entries; i++)
524 entries[i] = entries[i + 1];
525 entries[num_entries] = NULL;
526 *array_length_ptr = num_entries;
530 #define ARRAY_REMOVE_INDEX(array, index) \
531 array_remove_index(array, &array##_count, index)
534 dpb_remove_index(GstVaapiDecoderH264 *decoder, guint index)
536 GstVaapiDecoderH264Private * const priv = &decoder->priv;
537 guint i, num_frames = --priv->dpb_count;
539 if (USE_STRICT_DPB_ORDERING) {
540 for (i = index; i < num_frames; i++)
541 gst_vaapi_frame_store_replace(&priv->dpb[i], priv->dpb[i + 1]);
543 else if (index != num_frames)
544 gst_vaapi_frame_store_replace(&priv->dpb[index], priv->dpb[num_frames]);
545 gst_vaapi_frame_store_replace(&priv->dpb[num_frames], NULL);
550 GstVaapiDecoderH264 *decoder,
551 GstVaapiFrameStore *fs,
552 GstVaapiPictureH264 *picture
555 picture->output_needed = FALSE;
558 if (--fs->output_needed > 0)
560 picture = fs->buffers[0];
562 return gst_vaapi_picture_output(GST_VAAPI_PICTURE_CAST(picture));
566 dpb_evict(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture, guint i)
568 GstVaapiDecoderH264Private * const priv = &decoder->priv;
569 GstVaapiFrameStore * const fs = priv->dpb[i];
571 if (!fs->output_needed && !gst_vaapi_frame_store_has_reference(fs))
572 dpb_remove_index(decoder, i);
576 dpb_bump(GstVaapiDecoderH264 *decoder)
578 GstVaapiDecoderH264Private * const priv = &decoder->priv;
579 GstVaapiPictureH264 *found_picture = NULL;
580 guint i, j, found_index;
583 for (i = 0; i < priv->dpb_count; i++) {
584 GstVaapiFrameStore * const fs = priv->dpb[i];
585 if (!fs->output_needed)
587 for (j = 0; j < fs->num_buffers; j++) {
588 GstVaapiPictureH264 * const picture = fs->buffers[j];
589 if (!picture->output_needed)
591 if (!found_picture || found_picture->base.poc > picture->base.poc)
592 found_picture = picture, found_index = i;
598 success = dpb_output(decoder, priv->dpb[found_index], found_picture);
599 dpb_evict(decoder, found_picture, found_index);
604 dpb_clear(GstVaapiDecoderH264 *decoder)
606 GstVaapiDecoderH264Private * const priv = &decoder->priv;
609 for (i = 0; i < priv->dpb_count; i++)
610 gst_vaapi_frame_store_replace(&priv->dpb[i], NULL);
613 gst_vaapi_frame_store_replace(&priv->prev_frame, NULL);
617 dpb_flush(GstVaapiDecoderH264 *decoder)
619 while (dpb_bump(decoder))
625 dpb_add(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
627 GstVaapiDecoderH264Private * const priv = &decoder->priv;
628 GstVaapiFrameStore *fs;
631 // Remove all unused pictures
632 if (!GST_VAAPI_PICTURE_IS_IDR(picture)) {
634 while (i < priv->dpb_count) {
635 GstVaapiFrameStore * const fs = priv->dpb[i];
636 if (!fs->output_needed && !gst_vaapi_frame_store_has_reference(fs))
637 dpb_remove_index(decoder, i);
643 // Check if picture is the second field and the first field is still in DPB
644 fs = priv->prev_frame;
645 if (fs && !gst_vaapi_frame_store_has_frame(fs))
646 return gst_vaapi_frame_store_add(fs, picture);
648 // Create new frame store, and split fields if necessary
649 fs = gst_vaapi_frame_store_new(picture);
652 gst_vaapi_frame_store_replace(&priv->prev_frame, fs);
653 gst_vaapi_frame_store_unref(fs);
655 if (!priv->progressive_sequence && gst_vaapi_frame_store_has_frame(fs)) {
656 if (!gst_vaapi_frame_store_split_fields(fs))
660 // C.4.5.1 - Storage and marking of a reference decoded picture into the DPB
661 if (GST_VAAPI_PICTURE_IS_REFERENCE(picture)) {
662 while (priv->dpb_count == priv->dpb_size) {
663 if (!dpb_bump(decoder))
666 gst_vaapi_frame_store_replace(&priv->dpb[priv->dpb_count++], fs);
667 if (picture->output_flag) {
668 picture->output_needed = TRUE;
673 // C.4.5.2 - Storage and marking of a non-reference decoded picture into the DPB
675 if (!picture->output_flag)
677 while (priv->dpb_count == priv->dpb_size) {
678 gboolean found_picture = FALSE;
679 for (i = 0; !found_picture && i < priv->dpb_count; i++) {
680 GstVaapiFrameStore * const fs = priv->dpb[i];
681 if (!fs->output_needed)
683 for (j = 0; !found_picture && j < fs->num_buffers; j++)
684 found_picture = fs->buffers[j]->output_needed &&
685 fs->buffers[j]->base.poc < picture->base.poc;
688 return dpb_output(decoder, NULL, picture);
689 if (!dpb_bump(decoder))
692 gst_vaapi_frame_store_replace(&priv->dpb[priv->dpb_count++], fs);
693 picture->output_needed = TRUE;
700 dpb_reset(GstVaapiDecoderH264 *decoder, GstH264SPS *sps)
702 GstVaapiDecoderH264Private * const priv = &decoder->priv;
704 priv->dpb_size = get_max_dec_frame_buffering(sps);
705 GST_DEBUG("DPB size %u", priv->dpb_size);
708 static GstVaapiDecoderStatus
709 get_status(GstH264ParserResult result)
711 GstVaapiDecoderStatus status;
714 case GST_H264_PARSER_OK:
715 status = GST_VAAPI_DECODER_STATUS_SUCCESS;
717 case GST_H264_PARSER_NO_NAL_END:
718 status = GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
720 case GST_H264_PARSER_ERROR:
721 status = GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
724 status = GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
731 gst_vaapi_decoder_h264_close(GstVaapiDecoderH264 *decoder)
733 GstVaapiDecoderH264Private * const priv = &decoder->priv;
735 gst_vaapi_picture_replace(&priv->current_picture, NULL);
736 gst_vaapi_parser_info_h264_replace(&priv->prev_slice_pi, NULL);
741 gst_h264_nal_parser_free(priv->parser);
747 gst_vaapi_decoder_h264_open(GstVaapiDecoderH264 *decoder)
749 GstVaapiDecoderH264Private * const priv = &decoder->priv;
751 gst_vaapi_decoder_h264_close(decoder);
753 priv->parser = gst_h264_nal_parser_new();
760 gst_vaapi_decoder_h264_destroy(GstVaapiDecoder *base_decoder)
762 GstVaapiDecoderH264 * const decoder =
763 GST_VAAPI_DECODER_H264_CAST(base_decoder);
765 gst_vaapi_decoder_h264_close(decoder);
769 gst_vaapi_decoder_h264_create(GstVaapiDecoder *base_decoder)
771 GstVaapiDecoderH264 * const decoder =
772 GST_VAAPI_DECODER_H264_CAST(base_decoder);
773 GstVaapiDecoderH264Private * const priv = &decoder->priv;
775 priv->profile = GST_VAAPI_PROFILE_UNKNOWN;
776 priv->entrypoint = GST_VAAPI_ENTRYPOINT_VLD;
777 priv->chroma_type = GST_VAAPI_CHROMA_TYPE_YUV420;
778 priv->prev_pic_structure = GST_VAAPI_PICTURE_STRUCTURE_FRAME;
779 priv->progressive_sequence = TRUE;
784 fill_profiles(GstVaapiProfile profiles[16], guint *n_profiles_ptr,
785 GstVaapiProfile profile)
787 guint n_profiles = *n_profiles_ptr;
789 profiles[n_profiles++] = profile;
791 case GST_VAAPI_PROFILE_H264_MAIN:
792 profiles[n_profiles++] = GST_VAAPI_PROFILE_H264_HIGH;
797 *n_profiles_ptr = n_profiles;
800 static GstVaapiProfile
801 get_profile(GstVaapiDecoderH264 *decoder, GstH264SPS *sps)
803 GstVaapiDecoderH264Private * const priv = &decoder->priv;
804 GstVaapiDisplay * const display = GST_VAAPI_DECODER_DISPLAY(decoder);
805 GstVaapiProfile profile, profiles[4];
806 guint i, n_profiles = 0;
808 profile = gst_vaapi_utils_h264_get_profile(sps->profile_idc);
810 return GST_VAAPI_PROFILE_UNKNOWN;
812 fill_profiles(profiles, &n_profiles, profile);
814 case GST_VAAPI_PROFILE_H264_BASELINE:
815 if (sps->constraint_set1_flag) { // A.2.2 (main profile)
816 fill_profiles(profiles, &n_profiles,
817 GST_VAAPI_PROFILE_H264_CONSTRAINED_BASELINE);
818 fill_profiles(profiles, &n_profiles,
819 GST_VAAPI_PROFILE_H264_MAIN);
822 case GST_VAAPI_PROFILE_H264_EXTENDED:
823 if (sps->constraint_set1_flag) { // A.2.2 (main profile)
824 fill_profiles(profiles, &n_profiles,
825 GST_VAAPI_PROFILE_H264_MAIN);
832 /* If the preferred profile (profiles[0]) matches one that we already
833 found, then just return it now instead of searching for it again */
834 if (profiles[0] == priv->profile)
835 return priv->profile;
837 for (i = 0; i < n_profiles; i++) {
838 if (gst_vaapi_display_has_decoder(display, profiles[i], priv->entrypoint))
841 return GST_VAAPI_PROFILE_UNKNOWN;
844 static GstVaapiDecoderStatus
845 ensure_context(GstVaapiDecoderH264 *decoder, GstH264SPS *sps)
847 GstVaapiDecoder * const base_decoder = GST_VAAPI_DECODER_CAST(decoder);
848 GstVaapiDecoderH264Private * const priv = &decoder->priv;
849 GstVaapiContextInfo info;
850 GstVaapiProfile profile;
851 GstVaapiChromaType chroma_type;
852 gboolean reset_context = FALSE;
853 guint mb_width, mb_height;
855 profile = get_profile(decoder, sps);
857 GST_ERROR("unsupported profile_idc %u", sps->profile_idc);
858 return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_PROFILE;
861 if (priv->profile != profile) {
862 GST_DEBUG("profile changed");
863 reset_context = TRUE;
864 priv->profile = profile;
867 chroma_type = gst_vaapi_utils_h264_get_chroma_type(sps->chroma_format_idc);
869 GST_ERROR("unsupported chroma_format_idc %u", sps->chroma_format_idc);
870 return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_CHROMA_FORMAT;
873 if (priv->chroma_type != chroma_type) {
874 GST_DEBUG("chroma format changed");
875 reset_context = TRUE;
876 priv->chroma_type = chroma_type;
879 mb_width = sps->pic_width_in_mbs_minus1 + 1;
880 mb_height = (sps->pic_height_in_map_units_minus1 + 1) <<
881 !sps->frame_mbs_only_flag;
882 if (priv->mb_width != mb_width || priv->mb_height != mb_height) {
883 GST_DEBUG("size changed");
884 reset_context = TRUE;
885 priv->mb_width = mb_width;
886 priv->mb_height = mb_height;
889 priv->progressive_sequence = sps->frame_mbs_only_flag;
891 /* XXX: we only output complete frames for now */
892 gst_vaapi_decoder_set_interlaced(base_decoder, !priv->progressive_sequence);
895 gst_vaapi_decoder_set_pixel_aspect_ratio(
897 sps->vui_parameters.par_n,
898 sps->vui_parameters.par_d
901 if (!reset_context && priv->has_context)
902 return GST_VAAPI_DECODER_STATUS_SUCCESS;
904 /* XXX: fix surface size when cropping is implemented */
905 info.profile = priv->profile;
906 info.entrypoint = priv->entrypoint;
907 info.chroma_type = GST_VAAPI_CHROMA_TYPE_YUV420;
908 info.width = sps->width;
909 info.height = sps->height;
910 info.ref_frames = get_max_dec_frame_buffering(sps);
912 if (!gst_vaapi_decoder_ensure_context(GST_VAAPI_DECODER(decoder), &info))
913 return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
914 priv->has_context = TRUE;
917 dpb_reset(decoder, sps);
918 return GST_VAAPI_DECODER_STATUS_SUCCESS;
922 fill_iq_matrix_4x4(VAIQMatrixBufferH264 *iq_matrix, const GstH264PPS *pps)
926 /* There are always 6 4x4 scaling lists */
927 g_assert(G_N_ELEMENTS(iq_matrix->ScalingList4x4) == 6);
928 g_assert(G_N_ELEMENTS(iq_matrix->ScalingList4x4[0]) == 16);
930 for (i = 0; i < G_N_ELEMENTS(iq_matrix->ScalingList4x4); i++)
931 gst_h264_video_quant_matrix_4x4_get_raster_from_zigzag(
932 iq_matrix->ScalingList4x4[i], pps->scaling_lists_4x4[i]);
936 fill_iq_matrix_8x8(VAIQMatrixBufferH264 *iq_matrix, const GstH264PPS *pps)
938 const GstH264SPS * const sps = pps->sequence;
941 /* If chroma_format_idc != 3, there are up to 2 8x8 scaling lists */
942 if (!pps->transform_8x8_mode_flag)
945 g_assert(G_N_ELEMENTS(iq_matrix->ScalingList8x8) >= 2);
946 g_assert(G_N_ELEMENTS(iq_matrix->ScalingList8x8[0]) == 64);
948 n = (sps->chroma_format_idc != 3) ? 2 : 6;
949 for (i = 0; i < n; i++) {
950 gst_h264_video_quant_matrix_8x8_get_raster_from_zigzag(
951 iq_matrix->ScalingList8x8[i], pps->scaling_lists_8x8[i]);
955 static GstVaapiDecoderStatus
956 ensure_quant_matrix(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
958 GstVaapiPicture * const base_picture = &picture->base;
959 GstH264PPS * const pps = picture->pps;
960 GstH264SPS * const sps = pps->sequence;
961 VAIQMatrixBufferH264 *iq_matrix;
963 base_picture->iq_matrix = GST_VAAPI_IQ_MATRIX_NEW(H264, decoder);
964 if (!base_picture->iq_matrix) {
965 GST_ERROR("failed to allocate IQ matrix");
966 return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
968 iq_matrix = base_picture->iq_matrix->param;
970 /* XXX: we can only support 4:2:0 or 4:2:2 since ScalingLists8x8[]
971 is not large enough to hold lists for 4:4:4 */
972 if (sps->chroma_format_idc == 3)
973 return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_CHROMA_FORMAT;
975 fill_iq_matrix_4x4(iq_matrix, pps);
976 fill_iq_matrix_8x8(iq_matrix, pps);
978 return GST_VAAPI_DECODER_STATUS_SUCCESS;
981 static inline gboolean
982 is_valid_state(guint state, guint ref_state)
984 return (state & ref_state) == ref_state;
987 static GstVaapiDecoderStatus
988 decode_current_picture(GstVaapiDecoderH264 *decoder)
990 GstVaapiDecoderH264Private * const priv = &decoder->priv;
991 GstVaapiPictureH264 * const picture = priv->current_picture;
993 if (!is_valid_state(priv->decoder_state, GST_H264_VIDEO_STATE_VALID_PICTURE))
995 priv->decoder_state = 0;
998 return GST_VAAPI_DECODER_STATUS_SUCCESS;
1000 if (!exec_ref_pic_marking(decoder, picture))
1002 if (!dpb_add(decoder, picture))
1004 if (!gst_vaapi_picture_decode(GST_VAAPI_PICTURE_CAST(picture)))
1006 if (priv->prev_frame && gst_vaapi_frame_store_has_frame(priv->prev_frame))
1007 gst_vaapi_picture_replace(&priv->current_picture, NULL);
1008 return GST_VAAPI_DECODER_STATUS_SUCCESS;
1011 /* XXX: fix for cases where first field failed to be decoded */
1012 gst_vaapi_picture_replace(&priv->current_picture, NULL);
1013 return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
1016 priv->decoder_state = 0;
1017 return GST_VAAPI_DECODER_STATUS_DROP_FRAME;
1020 static GstVaapiDecoderStatus
1021 parse_sps(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
1023 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1024 GstVaapiParserInfoH264 * const pi = unit->parsed_info;
1025 GstH264SPS * const sps = &pi->data.sps;
1026 GstH264ParserResult result;
1028 GST_DEBUG("parse SPS");
1030 priv->parser_state = 0;
1032 /* Variables that don't have inferred values per the H.264
1033 standard but that should get a default value anyway */
1034 sps->log2_max_pic_order_cnt_lsb_minus4 = 0;
1036 result = gst_h264_parser_parse_sps(priv->parser, &pi->nalu, sps, TRUE);
1037 if (result != GST_H264_PARSER_OK)
1038 return get_status(result);
1040 priv->parser_state |= GST_H264_VIDEO_STATE_GOT_SPS;
1041 return GST_VAAPI_DECODER_STATUS_SUCCESS;
1044 static GstVaapiDecoderStatus
1045 parse_pps(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
1047 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1048 GstVaapiParserInfoH264 * const pi = unit->parsed_info;
1049 GstH264PPS * const pps = &pi->data.pps;
1050 GstH264ParserResult result;
1052 GST_DEBUG("parse PPS");
1054 priv->parser_state &= GST_H264_VIDEO_STATE_GOT_SPS;
1056 /* Variables that don't have inferred values per the H.264
1057 standard but that should get a default value anyway */
1058 pps->slice_group_map_type = 0;
1059 pps->slice_group_change_rate_minus1 = 0;
1061 result = gst_h264_parser_parse_pps(priv->parser, &pi->nalu, pps);
1062 if (result != GST_H264_PARSER_OK)
1063 return get_status(result);
1065 priv->parser_state |= GST_H264_VIDEO_STATE_GOT_PPS;
1066 return GST_VAAPI_DECODER_STATUS_SUCCESS;
1069 static GstVaapiDecoderStatus
1070 parse_sei(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
1072 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1073 GstVaapiParserInfoH264 * const pi = unit->parsed_info;
1074 GArray ** const sei_ptr = &pi->data.sei;
1075 GstH264ParserResult result;
1077 GST_DEBUG("parse SEI");
1079 result = gst_h264_parser_parse_sei(priv->parser, &pi->nalu, sei_ptr);
1080 if (result != GST_H264_PARSER_OK) {
1081 GST_WARNING("failed to parse SEI messages");
1082 return get_status(result);
1084 return GST_VAAPI_DECODER_STATUS_SUCCESS;
1087 static GstVaapiDecoderStatus
1088 parse_slice(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
1090 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1091 GstVaapiParserInfoH264 * const pi = unit->parsed_info;
1092 GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
1093 GstH264ParserResult result;
1095 GST_DEBUG("parse slice");
1097 priv->parser_state &= (GST_H264_VIDEO_STATE_GOT_SPS|
1098 GST_H264_VIDEO_STATE_GOT_PPS);
1100 /* Variables that don't have inferred values per the H.264
1101 standard but that should get a default value anyway */
1102 slice_hdr->cabac_init_idc = 0;
1103 slice_hdr->direct_spatial_mv_pred_flag = 0;
1105 result = gst_h264_parser_parse_slice_hdr(priv->parser, &pi->nalu,
1106 slice_hdr, TRUE, TRUE);
1107 if (result != GST_H264_PARSER_OK)
1108 return get_status(result);
1110 priv->parser_state |= GST_H264_VIDEO_STATE_GOT_SLICE;
1111 return GST_VAAPI_DECODER_STATUS_SUCCESS;
1114 static GstVaapiDecoderStatus
1115 decode_sequence_end(GstVaapiDecoderH264 *decoder)
1117 GstVaapiDecoderStatus status;
1119 GST_DEBUG("decode sequence-end");
1121 status = decode_current_picture(decoder);
1122 if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
1126 return GST_VAAPI_DECODER_STATUS_SUCCESS;
1129 /* 8.2.1.1 - Decoding process for picture order count type 0 */
1132 GstVaapiDecoderH264 *decoder,
1133 GstVaapiPictureH264 *picture,
1134 GstH264SliceHdr *slice_hdr
1137 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1138 GstH264PPS * const pps = slice_hdr->pps;
1139 GstH264SPS * const sps = pps->sequence;
1140 const gint32 MaxPicOrderCntLsb = 1 << (sps->log2_max_pic_order_cnt_lsb_minus4 + 4);
1143 GST_DEBUG("decode picture order count type 0");
1145 if (GST_VAAPI_PICTURE_IS_IDR(picture)) {
1146 priv->prev_poc_msb = 0;
1147 priv->prev_poc_lsb = 0;
1149 else if (priv->prev_pic_has_mmco5) {
1150 priv->prev_poc_msb = 0;
1151 priv->prev_poc_lsb =
1152 (priv->prev_pic_structure == GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD ?
1153 0 : priv->field_poc[TOP_FIELD]);
1156 priv->prev_poc_msb = priv->poc_msb;
1157 priv->prev_poc_lsb = priv->poc_lsb;
1161 priv->poc_lsb = slice_hdr->pic_order_cnt_lsb;
1162 if (priv->poc_lsb < priv->prev_poc_lsb &&
1163 (priv->prev_poc_lsb - priv->poc_lsb) >= (MaxPicOrderCntLsb / 2))
1164 priv->poc_msb = priv->prev_poc_msb + MaxPicOrderCntLsb;
1165 else if (priv->poc_lsb > priv->prev_poc_lsb &&
1166 (priv->poc_lsb - priv->prev_poc_lsb) > (MaxPicOrderCntLsb / 2))
1167 priv->poc_msb = priv->prev_poc_msb - MaxPicOrderCntLsb;
1169 priv->poc_msb = priv->prev_poc_msb;
1171 temp_poc = priv->poc_msb + priv->poc_lsb;
1172 switch (picture->structure) {
1173 case GST_VAAPI_PICTURE_STRUCTURE_FRAME:
1175 priv->field_poc[TOP_FIELD] = temp_poc;
1176 priv->field_poc[BOTTOM_FIELD] = temp_poc +
1177 slice_hdr->delta_pic_order_cnt_bottom;
1179 case GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD:
1181 priv->field_poc[TOP_FIELD] = temp_poc;
1183 case GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD:
1185 priv->field_poc[BOTTOM_FIELD] = temp_poc;
1190 /* 8.2.1.2 - Decoding process for picture order count type 1 */
1193 GstVaapiDecoderH264 *decoder,
1194 GstVaapiPictureH264 *picture,
1195 GstH264SliceHdr *slice_hdr
1198 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1199 GstH264PPS * const pps = slice_hdr->pps;
1200 GstH264SPS * const sps = pps->sequence;
1201 const gint32 MaxFrameNum = 1 << (sps->log2_max_frame_num_minus4 + 4);
1202 gint32 prev_frame_num_offset, abs_frame_num, expected_poc;
1205 GST_DEBUG("decode picture order count type 1");
1207 if (priv->prev_pic_has_mmco5)
1208 prev_frame_num_offset = 0;
1210 prev_frame_num_offset = priv->frame_num_offset;
1213 if (GST_VAAPI_PICTURE_IS_IDR(picture))
1214 priv->frame_num_offset = 0;
1215 else if (priv->prev_frame_num > priv->frame_num)
1216 priv->frame_num_offset = prev_frame_num_offset + MaxFrameNum;
1218 priv->frame_num_offset = prev_frame_num_offset;
1221 if (sps->num_ref_frames_in_pic_order_cnt_cycle != 0)
1222 abs_frame_num = priv->frame_num_offset + priv->frame_num;
1225 if (!GST_VAAPI_PICTURE_IS_REFERENCE(picture) && abs_frame_num > 0)
1226 abs_frame_num = abs_frame_num - 1;
1228 if (abs_frame_num > 0) {
1229 gint32 expected_delta_per_poc_cycle;
1230 gint32 poc_cycle_cnt, frame_num_in_poc_cycle;
1232 expected_delta_per_poc_cycle = 0;
1233 for (i = 0; i < sps->num_ref_frames_in_pic_order_cnt_cycle; i++)
1234 expected_delta_per_poc_cycle += sps->offset_for_ref_frame[i];
1237 poc_cycle_cnt = (abs_frame_num - 1) /
1238 sps->num_ref_frames_in_pic_order_cnt_cycle;
1239 frame_num_in_poc_cycle = (abs_frame_num - 1) %
1240 sps->num_ref_frames_in_pic_order_cnt_cycle;
1243 expected_poc = poc_cycle_cnt * expected_delta_per_poc_cycle;
1244 for (i = 0; i <= frame_num_in_poc_cycle; i++)
1245 expected_poc += sps->offset_for_ref_frame[i];
1249 if (!GST_VAAPI_PICTURE_IS_REFERENCE(picture))
1250 expected_poc += sps->offset_for_non_ref_pic;
1253 switch (picture->structure) {
1254 case GST_VAAPI_PICTURE_STRUCTURE_FRAME:
1255 priv->field_poc[TOP_FIELD] = expected_poc +
1256 slice_hdr->delta_pic_order_cnt[0];
1257 priv->field_poc[BOTTOM_FIELD] = priv->field_poc[TOP_FIELD] +
1258 sps->offset_for_top_to_bottom_field +
1259 slice_hdr->delta_pic_order_cnt[1];
1261 case GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD:
1262 priv->field_poc[TOP_FIELD] = expected_poc +
1263 slice_hdr->delta_pic_order_cnt[0];
1265 case GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD:
1266 priv->field_poc[BOTTOM_FIELD] = expected_poc +
1267 sps->offset_for_top_to_bottom_field +
1268 slice_hdr->delta_pic_order_cnt[0];
1273 /* 8.2.1.3 - Decoding process for picture order count type 2 */
1276 GstVaapiDecoderH264 *decoder,
1277 GstVaapiPictureH264 *picture,
1278 GstH264SliceHdr *slice_hdr
1281 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1282 GstH264PPS * const pps = slice_hdr->pps;
1283 GstH264SPS * const sps = pps->sequence;
1284 const gint32 MaxFrameNum = 1 << (sps->log2_max_frame_num_minus4 + 4);
1285 gint32 prev_frame_num_offset, temp_poc;
1287 GST_DEBUG("decode picture order count type 2");
1289 if (priv->prev_pic_has_mmco5)
1290 prev_frame_num_offset = 0;
1292 prev_frame_num_offset = priv->frame_num_offset;
1295 if (GST_VAAPI_PICTURE_IS_IDR(picture))
1296 priv->frame_num_offset = 0;
1297 else if (priv->prev_frame_num > priv->frame_num)
1298 priv->frame_num_offset = prev_frame_num_offset + MaxFrameNum;
1300 priv->frame_num_offset = prev_frame_num_offset;
1303 if (GST_VAAPI_PICTURE_IS_IDR(picture))
1305 else if (!GST_VAAPI_PICTURE_IS_REFERENCE(picture))
1306 temp_poc = 2 * (priv->frame_num_offset + priv->frame_num) - 1;
1308 temp_poc = 2 * (priv->frame_num_offset + priv->frame_num);
1311 if (picture->structure != GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD)
1312 priv->field_poc[TOP_FIELD] = temp_poc;
1313 if (picture->structure != GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD)
1314 priv->field_poc[BOTTOM_FIELD] = temp_poc;
1317 /* 8.2.1 - Decoding process for picture order count */
1320 GstVaapiDecoderH264 *decoder,
1321 GstVaapiPictureH264 *picture,
1322 GstH264SliceHdr *slice_hdr
1325 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1326 GstH264PPS * const pps = slice_hdr->pps;
1327 GstH264SPS * const sps = pps->sequence;
1329 switch (sps->pic_order_cnt_type) {
1331 init_picture_poc_0(decoder, picture, slice_hdr);
1334 init_picture_poc_1(decoder, picture, slice_hdr);
1337 init_picture_poc_2(decoder, picture, slice_hdr);
1341 if (picture->structure != GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD)
1342 picture->field_poc[TOP_FIELD] = priv->field_poc[TOP_FIELD];
1343 if (picture->structure != GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD)
1344 picture->field_poc[BOTTOM_FIELD] = priv->field_poc[BOTTOM_FIELD];
1345 picture->base.poc = MIN(picture->field_poc[0], picture->field_poc[1]);
1349 compare_picture_pic_num_dec(const void *a, const void *b)
1351 const GstVaapiPictureH264 * const picA = *(GstVaapiPictureH264 **)a;
1352 const GstVaapiPictureH264 * const picB = *(GstVaapiPictureH264 **)b;
1354 return picB->pic_num - picA->pic_num;
1358 compare_picture_long_term_pic_num_inc(const void *a, const void *b)
1360 const GstVaapiPictureH264 * const picA = *(GstVaapiPictureH264 **)a;
1361 const GstVaapiPictureH264 * const picB = *(GstVaapiPictureH264 **)b;
1363 return picA->long_term_pic_num - picB->long_term_pic_num;
1367 compare_picture_poc_dec(const void *a, const void *b)
1369 const GstVaapiPictureH264 * const picA = *(GstVaapiPictureH264 **)a;
1370 const GstVaapiPictureH264 * const picB = *(GstVaapiPictureH264 **)b;
1372 return picB->base.poc - picA->base.poc;
1376 compare_picture_poc_inc(const void *a, const void *b)
1378 const GstVaapiPictureH264 * const picA = *(GstVaapiPictureH264 **)a;
1379 const GstVaapiPictureH264 * const picB = *(GstVaapiPictureH264 **)b;
1381 return picA->base.poc - picB->base.poc;
1385 compare_picture_frame_num_wrap_dec(const void *a, const void *b)
1387 const GstVaapiPictureH264 * const picA = *(GstVaapiPictureH264 **)a;
1388 const GstVaapiPictureH264 * const picB = *(GstVaapiPictureH264 **)b;
1390 return picB->frame_num_wrap - picA->frame_num_wrap;
1394 compare_picture_long_term_frame_idx_inc(const void *a, const void *b)
1396 const GstVaapiPictureH264 * const picA = *(GstVaapiPictureH264 **)a;
1397 const GstVaapiPictureH264 * const picB = *(GstVaapiPictureH264 **)b;
1399 return picA->long_term_frame_idx - picB->long_term_frame_idx;
1402 /* 8.2.4.1 - Decoding process for picture numbers */
1404 init_picture_refs_pic_num(
1405 GstVaapiDecoderH264 *decoder,
1406 GstVaapiPictureH264 *picture,
1407 GstH264SliceHdr *slice_hdr
1410 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1411 GstH264PPS * const pps = slice_hdr->pps;
1412 GstH264SPS * const sps = pps->sequence;
1413 const gint32 MaxFrameNum = 1 << (sps->log2_max_frame_num_minus4 + 4);
1416 GST_DEBUG("decode picture numbers");
1418 for (i = 0; i < priv->short_ref_count; i++) {
1419 GstVaapiPictureH264 * const pic = priv->short_ref[i];
1422 if (pic->frame_num > priv->frame_num)
1423 pic->frame_num_wrap = pic->frame_num - MaxFrameNum;
1425 pic->frame_num_wrap = pic->frame_num;
1427 // (8-28, 8-30, 8-31)
1428 if (GST_VAAPI_PICTURE_IS_FRAME(picture))
1429 pic->pic_num = pic->frame_num_wrap;
1431 if (pic->structure == picture->structure)
1432 pic->pic_num = 2 * pic->frame_num_wrap + 1;
1434 pic->pic_num = 2 * pic->frame_num_wrap;
1438 for (i = 0; i < priv->long_ref_count; i++) {
1439 GstVaapiPictureH264 * const pic = priv->long_ref[i];
1441 // (8-29, 8-32, 8-33)
1442 if (GST_VAAPI_PICTURE_IS_FRAME(picture))
1443 pic->long_term_pic_num = pic->long_term_frame_idx;
1445 if (pic->structure == picture->structure)
1446 pic->long_term_pic_num = 2 * pic->long_term_frame_idx + 1;
1448 pic->long_term_pic_num = 2 * pic->long_term_frame_idx;
1453 #define SORT_REF_LIST(list, n, compare_func) \
1454 qsort(list, n, sizeof(*(list)), compare_picture_##compare_func)
1457 init_picture_refs_fields_1(
1458 guint picture_structure,
1459 GstVaapiPictureH264 *RefPicList[32],
1460 guint *RefPicList_count,
1461 GstVaapiPictureH264 *ref_list[32],
1462 guint ref_list_count
1469 n = *RefPicList_count;
1472 for (; i < ref_list_count; i++) {
1473 if (ref_list[i]->structure == picture_structure) {
1474 RefPicList[n++] = ref_list[i++];
1478 for (; j < ref_list_count; j++) {
1479 if (ref_list[j]->structure != picture_structure) {
1480 RefPicList[n++] = ref_list[j++];
1484 } while (i < ref_list_count || j < ref_list_count);
1485 *RefPicList_count = n;
1489 init_picture_refs_fields(
1490 GstVaapiPictureH264 *picture,
1491 GstVaapiPictureH264 *RefPicList[32],
1492 guint *RefPicList_count,
1493 GstVaapiPictureH264 *short_ref[32],
1494 guint short_ref_count,
1495 GstVaapiPictureH264 *long_ref[32],
1496 guint long_ref_count
1501 /* 8.2.4.2.5 - reference picture lists in fields */
1502 init_picture_refs_fields_1(picture->structure, RefPicList, &n,
1503 short_ref, short_ref_count);
1504 init_picture_refs_fields_1(picture->structure, RefPicList, &n,
1505 long_ref, long_ref_count);
1506 *RefPicList_count = n;
1510 init_picture_refs_p_slice(
1511 GstVaapiDecoderH264 *decoder,
1512 GstVaapiPictureH264 *picture,
1513 GstH264SliceHdr *slice_hdr
1516 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1517 GstVaapiPictureH264 **ref_list;
1520 GST_DEBUG("decode reference picture list for P and SP slices");
1522 if (GST_VAAPI_PICTURE_IS_FRAME(picture)) {
1523 /* 8.2.4.2.1 - P and SP slices in frames */
1524 if (priv->short_ref_count > 0) {
1525 ref_list = priv->RefPicList0;
1526 for (i = 0; i < priv->short_ref_count; i++)
1527 ref_list[i] = priv->short_ref[i];
1528 SORT_REF_LIST(ref_list, i, pic_num_dec);
1529 priv->RefPicList0_count += i;
1532 if (priv->long_ref_count > 0) {
1533 ref_list = &priv->RefPicList0[priv->RefPicList0_count];
1534 for (i = 0; i < priv->long_ref_count; i++)
1535 ref_list[i] = priv->long_ref[i];
1536 SORT_REF_LIST(ref_list, i, long_term_pic_num_inc);
1537 priv->RefPicList0_count += i;
1541 /* 8.2.4.2.2 - P and SP slices in fields */
1542 GstVaapiPictureH264 *short_ref[32];
1543 guint short_ref_count = 0;
1544 GstVaapiPictureH264 *long_ref[32];
1545 guint long_ref_count = 0;
1547 if (priv->short_ref_count > 0) {
1548 for (i = 0; i < priv->short_ref_count; i++)
1549 short_ref[i] = priv->short_ref[i];
1550 SORT_REF_LIST(short_ref, i, frame_num_wrap_dec);
1551 short_ref_count = i;
1554 if (priv->long_ref_count > 0) {
1555 for (i = 0; i < priv->long_ref_count; i++)
1556 long_ref[i] = priv->long_ref[i];
1557 SORT_REF_LIST(long_ref, i, long_term_frame_idx_inc);
1561 init_picture_refs_fields(
1563 priv->RefPicList0, &priv->RefPicList0_count,
1564 short_ref, short_ref_count,
1565 long_ref, long_ref_count
1571 init_picture_refs_b_slice(
1572 GstVaapiDecoderH264 *decoder,
1573 GstVaapiPictureH264 *picture,
1574 GstH264SliceHdr *slice_hdr
1577 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1578 GstVaapiPictureH264 **ref_list;
1581 GST_DEBUG("decode reference picture list for B slices");
1583 if (GST_VAAPI_PICTURE_IS_FRAME(picture)) {
1584 /* 8.2.4.2.3 - B slices in frames */
1587 if (priv->short_ref_count > 0) {
1588 // 1. Short-term references
1589 ref_list = priv->RefPicList0;
1590 for (n = 0, i = 0; i < priv->short_ref_count; i++) {
1591 if (priv->short_ref[i]->base.poc < picture->base.poc)
1592 ref_list[n++] = priv->short_ref[i];
1594 SORT_REF_LIST(ref_list, n, poc_dec);
1595 priv->RefPicList0_count += n;
1597 ref_list = &priv->RefPicList0[priv->RefPicList0_count];
1598 for (n = 0, i = 0; i < priv->short_ref_count; i++) {
1599 if (priv->short_ref[i]->base.poc >= picture->base.poc)
1600 ref_list[n++] = priv->short_ref[i];
1602 SORT_REF_LIST(ref_list, n, poc_inc);
1603 priv->RefPicList0_count += n;
1606 if (priv->long_ref_count > 0) {
1607 // 2. Long-term references
1608 ref_list = &priv->RefPicList0[priv->RefPicList0_count];
1609 for (n = 0, i = 0; i < priv->long_ref_count; i++)
1610 ref_list[n++] = priv->long_ref[i];
1611 SORT_REF_LIST(ref_list, n, long_term_pic_num_inc);
1612 priv->RefPicList0_count += n;
1616 if (priv->short_ref_count > 0) {
1617 // 1. Short-term references
1618 ref_list = priv->RefPicList1;
1619 for (n = 0, i = 0; i < priv->short_ref_count; i++) {
1620 if (priv->short_ref[i]->base.poc > picture->base.poc)
1621 ref_list[n++] = priv->short_ref[i];
1623 SORT_REF_LIST(ref_list, n, poc_inc);
1624 priv->RefPicList1_count += n;
1626 ref_list = &priv->RefPicList1[priv->RefPicList1_count];
1627 for (n = 0, i = 0; i < priv->short_ref_count; i++) {
1628 if (priv->short_ref[i]->base.poc <= picture->base.poc)
1629 ref_list[n++] = priv->short_ref[i];
1631 SORT_REF_LIST(ref_list, n, poc_dec);
1632 priv->RefPicList1_count += n;
1635 if (priv->long_ref_count > 0) {
1636 // 2. Long-term references
1637 ref_list = &priv->RefPicList1[priv->RefPicList1_count];
1638 for (n = 0, i = 0; i < priv->long_ref_count; i++)
1639 ref_list[n++] = priv->long_ref[i];
1640 SORT_REF_LIST(ref_list, n, long_term_pic_num_inc);
1641 priv->RefPicList1_count += n;
1645 /* 8.2.4.2.4 - B slices in fields */
1646 GstVaapiPictureH264 *short_ref0[32];
1647 guint short_ref0_count = 0;
1648 GstVaapiPictureH264 *short_ref1[32];
1649 guint short_ref1_count = 0;
1650 GstVaapiPictureH264 *long_ref[32];
1651 guint long_ref_count = 0;
1653 /* refFrameList0ShortTerm */
1654 if (priv->short_ref_count > 0) {
1655 ref_list = short_ref0;
1656 for (n = 0, i = 0; i < priv->short_ref_count; i++) {
1657 if (priv->short_ref[i]->base.poc <= picture->base.poc)
1658 ref_list[n++] = priv->short_ref[i];
1660 SORT_REF_LIST(ref_list, n, poc_dec);
1661 short_ref0_count += n;
1663 ref_list = &short_ref0[short_ref0_count];
1664 for (n = 0, i = 0; i < priv->short_ref_count; i++) {
1665 if (priv->short_ref[i]->base.poc > picture->base.poc)
1666 ref_list[n++] = priv->short_ref[i];
1668 SORT_REF_LIST(ref_list, n, poc_inc);
1669 short_ref0_count += n;
1672 /* refFrameList1ShortTerm */
1673 if (priv->short_ref_count > 0) {
1674 ref_list = short_ref1;
1675 for (n = 0, i = 0; i < priv->short_ref_count; i++) {
1676 if (priv->short_ref[i]->base.poc > picture->base.poc)
1677 ref_list[n++] = priv->short_ref[i];
1679 SORT_REF_LIST(ref_list, n, poc_inc);
1680 short_ref1_count += n;
1682 ref_list = &short_ref1[short_ref1_count];
1683 for (n = 0, i = 0; i < priv->short_ref_count; i++) {
1684 if (priv->short_ref[i]->base.poc <= picture->base.poc)
1685 ref_list[n++] = priv->short_ref[i];
1687 SORT_REF_LIST(ref_list, n, poc_dec);
1688 short_ref1_count += n;
1691 /* refFrameListLongTerm */
1692 if (priv->long_ref_count > 0) {
1693 for (i = 0; i < priv->long_ref_count; i++)
1694 long_ref[i] = priv->long_ref[i];
1695 SORT_REF_LIST(long_ref, i, long_term_frame_idx_inc);
1699 init_picture_refs_fields(
1701 priv->RefPicList0, &priv->RefPicList0_count,
1702 short_ref0, short_ref0_count,
1703 long_ref, long_ref_count
1706 init_picture_refs_fields(
1708 priv->RefPicList1, &priv->RefPicList1_count,
1709 short_ref1, short_ref1_count,
1710 long_ref, long_ref_count
1714 /* Check whether RefPicList1 is identical to RefPicList0, then
1715 swap if necessary */
1716 if (priv->RefPicList1_count > 1 &&
1717 priv->RefPicList1_count == priv->RefPicList0_count &&
1718 memcmp(priv->RefPicList0, priv->RefPicList1,
1719 priv->RefPicList0_count * sizeof(priv->RefPicList0[0])) == 0) {
1720 GstVaapiPictureH264 * const tmp = priv->RefPicList1[0];
1721 priv->RefPicList1[0] = priv->RefPicList1[1];
1722 priv->RefPicList1[1] = tmp;
1726 #undef SORT_REF_LIST
1729 find_short_term_reference(GstVaapiDecoderH264 *decoder, gint32 pic_num)
1731 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1734 for (i = 0; i < priv->short_ref_count; i++) {
1735 if (priv->short_ref[i]->pic_num == pic_num)
1738 GST_ERROR("found no short-term reference picture with PicNum = %d",
1744 find_long_term_reference(GstVaapiDecoderH264 *decoder, gint32 long_term_pic_num)
1746 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1749 for (i = 0; i < priv->long_ref_count; i++) {
1750 if (priv->long_ref[i]->long_term_pic_num == long_term_pic_num)
1753 GST_ERROR("found no long-term reference picture with LongTermPicNum = %d",
1759 exec_picture_refs_modification_1(
1760 GstVaapiDecoderH264 *decoder,
1761 GstVaapiPictureH264 *picture,
1762 GstH264SliceHdr *slice_hdr,
1766 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1767 GstH264PPS * const pps = slice_hdr->pps;
1768 GstH264SPS * const sps = pps->sequence;
1769 GstH264RefPicListModification *ref_pic_list_modification;
1770 guint num_ref_pic_list_modifications;
1771 GstVaapiPictureH264 **ref_list;
1772 guint *ref_list_count_ptr, ref_list_count, ref_list_idx = 0;
1773 guint i, j, n, num_refs;
1775 gint32 MaxPicNum, CurrPicNum, picNumPred;
1777 GST_DEBUG("modification process of reference picture list %u", list);
1780 ref_pic_list_modification = slice_hdr->ref_pic_list_modification_l0;
1781 num_ref_pic_list_modifications = slice_hdr->n_ref_pic_list_modification_l0;
1782 ref_list = priv->RefPicList0;
1783 ref_list_count_ptr = &priv->RefPicList0_count;
1784 num_refs = slice_hdr->num_ref_idx_l0_active_minus1 + 1;
1787 ref_pic_list_modification = slice_hdr->ref_pic_list_modification_l1;
1788 num_ref_pic_list_modifications = slice_hdr->n_ref_pic_list_modification_l1;
1789 ref_list = priv->RefPicList1;
1790 ref_list_count_ptr = &priv->RefPicList1_count;
1791 num_refs = slice_hdr->num_ref_idx_l1_active_minus1 + 1;
1793 ref_list_count = *ref_list_count_ptr;
1795 if (!GST_VAAPI_PICTURE_IS_FRAME(picture)) {
1796 MaxPicNum = 1 << (sps->log2_max_frame_num_minus4 + 5); // 2 * MaxFrameNum
1797 CurrPicNum = 2 * slice_hdr->frame_num + 1; // 2 * frame_num + 1
1800 MaxPicNum = 1 << (sps->log2_max_frame_num_minus4 + 4); // MaxFrameNum
1801 CurrPicNum = slice_hdr->frame_num; // frame_num
1804 picNumPred = CurrPicNum;
1806 for (i = 0; i < num_ref_pic_list_modifications; i++) {
1807 GstH264RefPicListModification * const l = &ref_pic_list_modification[i];
1808 if (l->modification_of_pic_nums_idc == 3)
1811 /* 8.2.4.3.1 - Short-term reference pictures */
1812 if (l->modification_of_pic_nums_idc == 0 || l->modification_of_pic_nums_idc == 1) {
1813 gint32 abs_diff_pic_num = l->value.abs_diff_pic_num_minus1 + 1;
1814 gint32 picNum, picNumNoWrap;
1817 if (l->modification_of_pic_nums_idc == 0) {
1818 picNumNoWrap = picNumPred - abs_diff_pic_num;
1819 if (picNumNoWrap < 0)
1820 picNumNoWrap += MaxPicNum;
1825 picNumNoWrap = picNumPred + abs_diff_pic_num;
1826 if (picNumNoWrap >= MaxPicNum)
1827 picNumNoWrap -= MaxPicNum;
1829 picNumPred = picNumNoWrap;
1832 picNum = picNumNoWrap;
1833 if (picNum > CurrPicNum)
1834 picNum -= MaxPicNum;
1837 for (j = num_refs; j > ref_list_idx; j--)
1838 ref_list[j] = ref_list[j - 1];
1839 found_ref_idx = find_short_term_reference(decoder, picNum);
1840 ref_list[ref_list_idx++] =
1841 found_ref_idx >= 0 ? priv->short_ref[found_ref_idx] : NULL;
1843 for (j = ref_list_idx; j <= num_refs; j++) {
1848 GST_VAAPI_PICTURE_IS_SHORT_TERM_REFERENCE(ref_list[j]) ?
1849 ref_list[j]->pic_num : MaxPicNum;
1850 if (PicNumF != picNum)
1851 ref_list[n++] = ref_list[j];
1855 /* 8.2.4.3.2 - Long-term reference pictures */
1858 for (j = num_refs; j > ref_list_idx; j--)
1859 ref_list[j] = ref_list[j - 1];
1861 find_long_term_reference(decoder, l->value.long_term_pic_num);
1862 ref_list[ref_list_idx++] =
1863 found_ref_idx >= 0 ? priv->long_ref[found_ref_idx] : NULL;
1865 for (j = ref_list_idx; j <= num_refs; j++) {
1866 gint32 LongTermPicNumF;
1870 GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(ref_list[j]) ?
1871 ref_list[j]->long_term_pic_num : INT_MAX;
1872 if (LongTermPicNumF != l->value.long_term_pic_num)
1873 ref_list[n++] = ref_list[j];
1879 for (i = 0; i < num_refs; i++)
1881 GST_ERROR("list %u entry %u is empty", list, i);
1883 *ref_list_count_ptr = num_refs;
1886 /* 8.2.4.3 - Modification process for reference picture lists */
1888 exec_picture_refs_modification(
1889 GstVaapiDecoderH264 *decoder,
1890 GstVaapiPictureH264 *picture,
1891 GstH264SliceHdr *slice_hdr
1894 GST_DEBUG("execute ref_pic_list_modification()");
1897 if (!GST_H264_IS_I_SLICE(slice_hdr) && !GST_H264_IS_SI_SLICE(slice_hdr) &&
1898 slice_hdr->ref_pic_list_modification_flag_l0)
1899 exec_picture_refs_modification_1(decoder, picture, slice_hdr, 0);
1902 if (GST_H264_IS_B_SLICE(slice_hdr) &&
1903 slice_hdr->ref_pic_list_modification_flag_l1)
1904 exec_picture_refs_modification_1(decoder, picture, slice_hdr, 1);
1908 init_picture_ref_lists(GstVaapiDecoderH264 *decoder)
1910 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1911 guint i, j, short_ref_count, long_ref_count;
1913 short_ref_count = 0;
1915 if (GST_VAAPI_PICTURE_IS_FRAME(priv->current_picture)) {
1916 for (i = 0; i < priv->dpb_count; i++) {
1917 GstVaapiFrameStore * const fs = priv->dpb[i];
1918 GstVaapiPictureH264 *picture;
1919 if (!gst_vaapi_frame_store_has_frame(fs))
1921 picture = fs->buffers[0];
1922 if (GST_VAAPI_PICTURE_IS_SHORT_TERM_REFERENCE(picture))
1923 priv->short_ref[short_ref_count++] = picture;
1924 else if (GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(picture))
1925 priv->long_ref[long_ref_count++] = picture;
1926 picture->structure = GST_VAAPI_PICTURE_STRUCTURE_FRAME;
1927 picture->other_field = fs->buffers[1];
1931 for (i = 0; i < priv->dpb_count; i++) {
1932 GstVaapiFrameStore * const fs = priv->dpb[i];
1933 for (j = 0; j < fs->num_buffers; j++) {
1934 GstVaapiPictureH264 * const picture = fs->buffers[j];
1935 if (GST_VAAPI_PICTURE_IS_SHORT_TERM_REFERENCE(picture))
1936 priv->short_ref[short_ref_count++] = picture;
1937 else if (GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(picture))
1938 priv->long_ref[long_ref_count++] = picture;
1939 picture->structure = picture->base.structure;
1940 picture->other_field = fs->buffers[j ^ 1];
1945 for (i = short_ref_count; i < priv->short_ref_count; i++)
1946 priv->short_ref[i] = NULL;
1947 priv->short_ref_count = short_ref_count;
1949 for (i = long_ref_count; i < priv->long_ref_count; i++)
1950 priv->long_ref[i] = NULL;
1951 priv->long_ref_count = long_ref_count;
1956 GstVaapiDecoderH264 *decoder,
1957 GstVaapiPictureH264 *picture,
1958 GstH264SliceHdr *slice_hdr
1961 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1962 GstVaapiPicture * const base_picture = &picture->base;
1965 init_picture_ref_lists(decoder);
1966 init_picture_refs_pic_num(decoder, picture, slice_hdr);
1968 priv->RefPicList0_count = 0;
1969 priv->RefPicList1_count = 0;
1971 switch (base_picture->type) {
1972 case GST_VAAPI_PICTURE_TYPE_P:
1973 case GST_VAAPI_PICTURE_TYPE_SP:
1974 init_picture_refs_p_slice(decoder, picture, slice_hdr);
1976 case GST_VAAPI_PICTURE_TYPE_B:
1977 init_picture_refs_b_slice(decoder, picture, slice_hdr);
1983 exec_picture_refs_modification(decoder, picture, slice_hdr);
1985 switch (base_picture->type) {
1986 case GST_VAAPI_PICTURE_TYPE_B:
1987 num_refs = 1 + slice_hdr->num_ref_idx_l1_active_minus1;
1988 for (i = priv->RefPicList1_count; i < num_refs; i++)
1989 priv->RefPicList1[i] = NULL;
1990 priv->RefPicList1_count = num_refs;
1993 case GST_VAAPI_PICTURE_TYPE_P:
1994 case GST_VAAPI_PICTURE_TYPE_SP:
1995 num_refs = 1 + slice_hdr->num_ref_idx_l0_active_minus1;
1996 for (i = priv->RefPicList0_count; i < num_refs; i++)
1997 priv->RefPicList0[i] = NULL;
1998 priv->RefPicList0_count = num_refs;
2007 GstVaapiDecoderH264 *decoder,
2008 GstVaapiPictureH264 *picture, GstVaapiParserInfoH264 *pi)
2010 GstVaapiDecoderH264Private * const priv = &decoder->priv;
2011 GstVaapiPicture * const base_picture = &picture->base;
2012 GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
2014 priv->prev_frame_num = priv->frame_num;
2015 priv->frame_num = slice_hdr->frame_num;
2016 picture->frame_num = priv->frame_num;
2017 picture->frame_num_wrap = priv->frame_num;
2018 picture->output_flag = TRUE; /* XXX: conformant to Annex A only */
2019 base_picture->pts = GST_VAAPI_DECODER_CODEC_FRAME(decoder)->pts;
2021 /* Reset decoder state for IDR pictures */
2022 if (pi->nalu.type == GST_H264_NAL_SLICE_IDR) {
2024 GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_IDR);
2028 /* Initialize slice type */
2029 switch (slice_hdr->type % 5) {
2030 case GST_H264_P_SLICE:
2031 base_picture->type = GST_VAAPI_PICTURE_TYPE_P;
2033 case GST_H264_B_SLICE:
2034 base_picture->type = GST_VAAPI_PICTURE_TYPE_B;
2036 case GST_H264_I_SLICE:
2037 base_picture->type = GST_VAAPI_PICTURE_TYPE_I;
2039 case GST_H264_SP_SLICE:
2040 base_picture->type = GST_VAAPI_PICTURE_TYPE_SP;
2042 case GST_H264_SI_SLICE:
2043 base_picture->type = GST_VAAPI_PICTURE_TYPE_SI;
2047 /* Initialize picture structure */
2048 if (!slice_hdr->field_pic_flag)
2049 base_picture->structure = GST_VAAPI_PICTURE_STRUCTURE_FRAME;
2051 GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_INTERLACED);
2052 if (!slice_hdr->bottom_field_flag)
2053 base_picture->structure = GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD;
2055 base_picture->structure = GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD;
2057 picture->structure = base_picture->structure;
2059 /* Initialize reference flags */
2060 if (pi->nalu.ref_idc) {
2061 GstH264DecRefPicMarking * const dec_ref_pic_marking =
2062 &slice_hdr->dec_ref_pic_marking;
2064 if (GST_VAAPI_PICTURE_IS_IDR(picture) &&
2065 dec_ref_pic_marking->long_term_reference_flag)
2066 GST_VAAPI_PICTURE_FLAG_SET(picture,
2067 GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE);
2069 GST_VAAPI_PICTURE_FLAG_SET(picture,
2070 GST_VAAPI_PICTURE_FLAG_SHORT_TERM_REFERENCE);
2073 init_picture_poc(decoder, picture, slice_hdr);
2074 init_picture_refs(decoder, picture, slice_hdr);
2078 /* 8.2.5.3 - Sliding window decoded reference picture marking process */
2080 exec_ref_pic_marking_sliding_window(GstVaapiDecoderH264 *decoder)
2082 GstVaapiDecoderH264Private * const priv = &decoder->priv;
2083 GstH264PPS * const pps = priv->current_picture->pps;
2084 GstH264SPS * const sps = pps->sequence;
2085 GstVaapiPictureH264 *ref_picture;
2086 guint i, m, max_num_ref_frames;
2088 GST_DEBUG("reference picture marking process (sliding window)");
2090 if (!GST_VAAPI_PICTURE_IS_FIRST_FIELD(priv->current_picture))
2093 max_num_ref_frames = sps->num_ref_frames;
2094 if (max_num_ref_frames == 0)
2095 max_num_ref_frames = 1;
2096 if (!GST_VAAPI_PICTURE_IS_FRAME(priv->current_picture))
2097 max_num_ref_frames <<= 1;
2099 if (priv->short_ref_count + priv->long_ref_count < max_num_ref_frames)
2101 if (priv->short_ref_count < 1)
2104 for (m = 0, i = 1; i < priv->short_ref_count; i++) {
2105 GstVaapiPictureH264 * const picture = priv->short_ref[i];
2106 if (picture->frame_num_wrap < priv->short_ref[m]->frame_num_wrap)
2110 ref_picture = priv->short_ref[m];
2111 gst_vaapi_picture_h264_set_reference(ref_picture, 0, TRUE);
2112 ARRAY_REMOVE_INDEX(priv->short_ref, m);
2114 /* Both fields need to be marked as "unused for reference", so
2115 remove the other field from the short_ref[] list as well */
2116 if (!GST_VAAPI_PICTURE_IS_FRAME(priv->current_picture) && ref_picture->other_field) {
2117 for (i = 0; i < priv->short_ref_count; i++) {
2118 if (priv->short_ref[i] == ref_picture->other_field) {
2119 ARRAY_REMOVE_INDEX(priv->short_ref, i);
2127 static inline gint32
2128 get_picNumX(GstVaapiPictureH264 *picture, GstH264RefPicMarking *ref_pic_marking)
2132 if (GST_VAAPI_PICTURE_IS_FRAME(picture))
2133 pic_num = picture->frame_num_wrap;
2135 pic_num = 2 * picture->frame_num_wrap + 1;
2136 pic_num -= ref_pic_marking->difference_of_pic_nums_minus1 + 1;
2140 /* 8.2.5.4.1. Mark short-term reference picture as "unused for reference" */
2142 exec_ref_pic_marking_adaptive_mmco_1(
2143 GstVaapiDecoderH264 *decoder,
2144 GstVaapiPictureH264 *picture,
2145 GstH264RefPicMarking *ref_pic_marking
2148 GstVaapiDecoderH264Private * const priv = &decoder->priv;
2151 picNumX = get_picNumX(picture, ref_pic_marking);
2152 i = find_short_term_reference(decoder, picNumX);
2156 gst_vaapi_picture_h264_set_reference(priv->short_ref[i], 0,
2157 GST_VAAPI_PICTURE_IS_FRAME(picture));
2158 ARRAY_REMOVE_INDEX(priv->short_ref, i);
2161 /* 8.2.5.4.2. Mark long-term reference picture as "unused for reference" */
2163 exec_ref_pic_marking_adaptive_mmco_2(
2164 GstVaapiDecoderH264 *decoder,
2165 GstVaapiPictureH264 *picture,
2166 GstH264RefPicMarking *ref_pic_marking
2169 GstVaapiDecoderH264Private * const priv = &decoder->priv;
2172 i = find_long_term_reference(decoder, ref_pic_marking->long_term_pic_num);
2176 gst_vaapi_picture_h264_set_reference(priv->long_ref[i], 0,
2177 GST_VAAPI_PICTURE_IS_FRAME(picture));
2178 ARRAY_REMOVE_INDEX(priv->long_ref, i);
2181 /* 8.2.5.4.3. Assign LongTermFrameIdx to a short-term reference picture */
2183 exec_ref_pic_marking_adaptive_mmco_3(
2184 GstVaapiDecoderH264 *decoder,
2185 GstVaapiPictureH264 *picture,
2186 GstH264RefPicMarking *ref_pic_marking
2189 GstVaapiDecoderH264Private * const priv = &decoder->priv;
2190 GstVaapiPictureH264 *ref_picture;
2193 for (i = 0; i < priv->long_ref_count; i++) {
2194 if (priv->long_ref[i]->long_term_frame_idx == ref_pic_marking->long_term_frame_idx)
2197 if (i != priv->long_ref_count) {
2198 gst_vaapi_picture_h264_set_reference(priv->long_ref[i], 0, TRUE);
2199 ARRAY_REMOVE_INDEX(priv->long_ref, i);
2202 picNumX = get_picNumX(picture, ref_pic_marking);
2203 i = find_short_term_reference(decoder, picNumX);
2207 ref_picture = priv->short_ref[i];
2208 ARRAY_REMOVE_INDEX(priv->short_ref, i);
2209 priv->long_ref[priv->long_ref_count++] = ref_picture;
2211 ref_picture->long_term_frame_idx = ref_pic_marking->long_term_frame_idx;
2212 gst_vaapi_picture_h264_set_reference(ref_picture,
2213 GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE,
2214 GST_VAAPI_PICTURE_IS_FRAME(picture));
2217 /* 8.2.5.4.4. Mark pictures with LongTermFramIdx > max_long_term_frame_idx
2218 * as "unused for reference" */
2220 exec_ref_pic_marking_adaptive_mmco_4(
2221 GstVaapiDecoderH264 *decoder,
2222 GstVaapiPictureH264 *picture,
2223 GstH264RefPicMarking *ref_pic_marking
2226 GstVaapiDecoderH264Private * const priv = &decoder->priv;
2227 gint32 i, long_term_frame_idx;
2229 long_term_frame_idx = ref_pic_marking->max_long_term_frame_idx_plus1 - 1;
2231 for (i = 0; i < priv->long_ref_count; i++) {
2232 if (priv->long_ref[i]->long_term_frame_idx <= long_term_frame_idx)
2234 gst_vaapi_picture_h264_set_reference(priv->long_ref[i], 0, FALSE);
2235 ARRAY_REMOVE_INDEX(priv->long_ref, i);
2240 /* 8.2.5.4.5. Mark all reference pictures as "unused for reference" */
2242 exec_ref_pic_marking_adaptive_mmco_5(
2243 GstVaapiDecoderH264 *decoder,
2244 GstVaapiPictureH264 *picture,
2245 GstH264RefPicMarking *ref_pic_marking
2248 GstVaapiDecoderH264Private * const priv = &decoder->priv;
2252 priv->prev_pic_has_mmco5 = TRUE;
2254 /* The picture shall be inferred to have had frame_num equal to 0 (7.4.3) */
2255 priv->frame_num = 0;
2256 priv->frame_num_offset = 0;
2257 picture->frame_num = 0;
2259 /* Update TopFieldOrderCnt and BottomFieldOrderCnt (8.2.1) */
2260 if (picture->structure != GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD)
2261 picture->field_poc[TOP_FIELD] -= picture->base.poc;
2262 if (picture->structure != GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD)
2263 picture->field_poc[BOTTOM_FIELD] -= picture->base.poc;
2264 picture->base.poc = 0;
2267 /* 8.2.5.4.6. Assign a long-term frame index to the current picture */
2269 exec_ref_pic_marking_adaptive_mmco_6(
2270 GstVaapiDecoderH264 *decoder,
2271 GstVaapiPictureH264 *picture,
2272 GstH264RefPicMarking *ref_pic_marking
2275 picture->long_term_frame_idx = ref_pic_marking->long_term_frame_idx;
2276 gst_vaapi_picture_h264_set_reference(picture,
2277 GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE, FALSE);
2280 /* 8.2.5.4. Adaptive memory control decoded reference picture marking process */
2282 exec_ref_pic_marking_adaptive(
2283 GstVaapiDecoderH264 *decoder,
2284 GstVaapiPictureH264 *picture,
2285 GstH264DecRefPicMarking *dec_ref_pic_marking
2290 GST_DEBUG("reference picture marking process (adaptive memory control)");
2292 typedef void (*exec_ref_pic_marking_adaptive_mmco_func)(
2293 GstVaapiDecoderH264 *decoder,
2294 GstVaapiPictureH264 *picture,
2295 GstH264RefPicMarking *ref_pic_marking
2298 static const exec_ref_pic_marking_adaptive_mmco_func mmco_funcs[] = {
2300 exec_ref_pic_marking_adaptive_mmco_1,
2301 exec_ref_pic_marking_adaptive_mmco_2,
2302 exec_ref_pic_marking_adaptive_mmco_3,
2303 exec_ref_pic_marking_adaptive_mmco_4,
2304 exec_ref_pic_marking_adaptive_mmco_5,
2305 exec_ref_pic_marking_adaptive_mmco_6,
2308 for (i = 0; i < dec_ref_pic_marking->n_ref_pic_marking; i++) {
2309 GstH264RefPicMarking * const ref_pic_marking =
2310 &dec_ref_pic_marking->ref_pic_marking[i];
2312 const guint mmco = ref_pic_marking->memory_management_control_operation;
2313 if (mmco < G_N_ELEMENTS(mmco_funcs) && mmco_funcs[mmco])
2314 mmco_funcs[mmco](decoder, picture, ref_pic_marking);
2316 GST_ERROR("unhandled MMCO %u", mmco);
2323 /* 8.2.5 - Execute reference picture marking process */
2325 exec_ref_pic_marking(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
2327 GstVaapiDecoderH264Private * const priv = &decoder->priv;
2329 priv->prev_pic_has_mmco5 = FALSE;
2330 priv->prev_pic_structure = picture->structure;
2332 if (!GST_VAAPI_PICTURE_IS_REFERENCE(picture))
2335 if (!GST_VAAPI_PICTURE_IS_IDR(picture)) {
2336 GstH264DecRefPicMarking * const dec_ref_pic_marking =
2337 &picture->last_slice_hdr->dec_ref_pic_marking;
2338 if (dec_ref_pic_marking->adaptive_ref_pic_marking_mode_flag) {
2339 if (!exec_ref_pic_marking_adaptive(decoder, picture, dec_ref_pic_marking))
2343 if (!exec_ref_pic_marking_sliding_window(decoder))
2351 vaapi_init_picture(VAPictureH264 *pic)
2353 pic->picture_id = VA_INVALID_ID;
2355 pic->flags = VA_PICTURE_H264_INVALID;
2356 pic->TopFieldOrderCnt = 0;
2357 pic->BottomFieldOrderCnt = 0;
2361 vaapi_fill_picture(VAPictureH264 *pic, GstVaapiPictureH264 *picture,
2362 guint picture_structure)
2364 if (!picture_structure)
2365 picture_structure = picture->structure;
2367 pic->picture_id = picture->base.surface_id;
2370 if (GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(picture)) {
2371 pic->flags |= VA_PICTURE_H264_LONG_TERM_REFERENCE;
2372 pic->frame_idx = picture->long_term_frame_idx;
2375 if (GST_VAAPI_PICTURE_IS_SHORT_TERM_REFERENCE(picture))
2376 pic->flags |= VA_PICTURE_H264_SHORT_TERM_REFERENCE;
2377 pic->frame_idx = picture->frame_num;
2380 switch (picture_structure) {
2381 case GST_VAAPI_PICTURE_STRUCTURE_FRAME:
2382 pic->TopFieldOrderCnt = picture->field_poc[TOP_FIELD];
2383 pic->BottomFieldOrderCnt = picture->field_poc[BOTTOM_FIELD];
2385 case GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD:
2386 pic->flags |= VA_PICTURE_H264_TOP_FIELD;
2387 pic->TopFieldOrderCnt = picture->field_poc[TOP_FIELD];
2388 pic->BottomFieldOrderCnt = 0;
2390 case GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD:
2391 pic->flags |= VA_PICTURE_H264_BOTTOM_FIELD;
2392 pic->BottomFieldOrderCnt = picture->field_poc[BOTTOM_FIELD];
2393 pic->TopFieldOrderCnt = 0;
2399 fill_picture(GstVaapiDecoderH264 *decoder,
2400 GstVaapiPictureH264 *picture, GstVaapiParserInfoH264 *pi)
2402 GstVaapiDecoderH264Private * const priv = &decoder->priv;
2403 GstVaapiPicture * const base_picture = &picture->base;
2404 GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
2405 GstH264PPS * const pps = picture->pps;
2406 GstH264SPS * const sps = pps->sequence;
2407 VAPictureParameterBufferH264 * const pic_param = base_picture->param;
2410 /* Fill in VAPictureParameterBufferH264 */
2411 vaapi_fill_picture(&pic_param->CurrPic, picture, 0);
2413 for (i = 0, n = 0; i < priv->dpb_count; i++) {
2414 GstVaapiFrameStore * const fs = priv->dpb[i];
2415 if (gst_vaapi_frame_store_has_reference(fs))
2416 vaapi_fill_picture(&pic_param->ReferenceFrames[n++],
2417 fs->buffers[0], fs->structure);
2419 for (; n < G_N_ELEMENTS(pic_param->ReferenceFrames); n++)
2420 vaapi_init_picture(&pic_param->ReferenceFrames[n]);
2422 #define COPY_FIELD(s, f) \
2423 pic_param->f = (s)->f
2425 #define COPY_BFM(a, s, f) \
2426 pic_param->a.bits.f = (s)->f
2428 pic_param->picture_width_in_mbs_minus1 = priv->mb_width - 1;
2429 pic_param->picture_height_in_mbs_minus1 = priv->mb_height - 1;
2430 pic_param->frame_num = priv->frame_num;
2432 COPY_FIELD(sps, bit_depth_luma_minus8);
2433 COPY_FIELD(sps, bit_depth_chroma_minus8);
2434 COPY_FIELD(sps, num_ref_frames);
2435 COPY_FIELD(pps, num_slice_groups_minus1);
2436 COPY_FIELD(pps, slice_group_map_type);
2437 COPY_FIELD(pps, slice_group_change_rate_minus1);
2438 COPY_FIELD(pps, pic_init_qp_minus26);
2439 COPY_FIELD(pps, pic_init_qs_minus26);
2440 COPY_FIELD(pps, chroma_qp_index_offset);
2441 COPY_FIELD(pps, second_chroma_qp_index_offset);
2443 pic_param->seq_fields.value = 0; /* reset all bits */
2444 pic_param->seq_fields.bits.residual_colour_transform_flag = sps->separate_colour_plane_flag;
2445 pic_param->seq_fields.bits.MinLumaBiPredSize8x8 = sps->level_idc >= 31; /* A.3.3.2 */
2447 COPY_BFM(seq_fields, sps, chroma_format_idc);
2448 COPY_BFM(seq_fields, sps, gaps_in_frame_num_value_allowed_flag);
2449 COPY_BFM(seq_fields, sps, frame_mbs_only_flag);
2450 COPY_BFM(seq_fields, sps, mb_adaptive_frame_field_flag);
2451 COPY_BFM(seq_fields, sps, direct_8x8_inference_flag);
2452 COPY_BFM(seq_fields, sps, log2_max_frame_num_minus4);
2453 COPY_BFM(seq_fields, sps, pic_order_cnt_type);
2454 COPY_BFM(seq_fields, sps, log2_max_pic_order_cnt_lsb_minus4);
2455 COPY_BFM(seq_fields, sps, delta_pic_order_always_zero_flag);
2457 pic_param->pic_fields.value = 0; /* reset all bits */
2458 pic_param->pic_fields.bits.field_pic_flag = slice_hdr->field_pic_flag;
2459 pic_param->pic_fields.bits.reference_pic_flag = GST_VAAPI_PICTURE_IS_REFERENCE(picture);
2461 COPY_BFM(pic_fields, pps, entropy_coding_mode_flag);
2462 COPY_BFM(pic_fields, pps, weighted_pred_flag);
2463 COPY_BFM(pic_fields, pps, weighted_bipred_idc);
2464 COPY_BFM(pic_fields, pps, transform_8x8_mode_flag);
2465 COPY_BFM(pic_fields, pps, constrained_intra_pred_flag);
2466 COPY_BFM(pic_fields, pps, pic_order_present_flag);
2467 COPY_BFM(pic_fields, pps, deblocking_filter_control_present_flag);
2468 COPY_BFM(pic_fields, pps, redundant_pic_cnt_present_flag);
2472 /* Detection of the first VCL NAL unit of a primary coded picture (7.4.1.2.4) */
2474 is_new_picture(GstVaapiParserInfoH264 *pi, GstVaapiParserInfoH264 *prev_pi)
2476 GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
2477 GstH264PPS * const pps = slice_hdr->pps;
2478 GstH264SPS * const sps = pps->sequence;
2479 GstH264SliceHdr *prev_slice_hdr;
2483 prev_slice_hdr = &prev_pi->data.slice_hdr;
2485 #define CHECK_EXPR(expr, field_name) do { \
2487 GST_DEBUG(field_name " differs in value"); \
2492 #define CHECK_VALUE(new_slice_hdr, old_slice_hdr, field) \
2493 CHECK_EXPR(((new_slice_hdr)->field == (old_slice_hdr)->field), #field)
2495 /* frame_num differs in value, regardless of inferred values to 0 */
2496 CHECK_VALUE(slice_hdr, prev_slice_hdr, frame_num);
2498 /* pic_parameter_set_id differs in value */
2499 CHECK_VALUE(slice_hdr, prev_slice_hdr, pps);
2501 /* field_pic_flag differs in value */
2502 CHECK_VALUE(slice_hdr, prev_slice_hdr, field_pic_flag);
2504 /* bottom_field_flag is present in both and differs in value */
2505 if (slice_hdr->field_pic_flag && prev_slice_hdr->field_pic_flag)
2506 CHECK_VALUE(slice_hdr, prev_slice_hdr, bottom_field_flag);
2508 /* nal_ref_idc differs in value with one of the nal_ref_idc values is 0 */
2509 CHECK_EXPR((pi->nalu.ref_idc != 0) ==
2510 (prev_pi->nalu.ref_idc != 0), "nal_ref_idc");
2512 /* POC type is 0 for both and either pic_order_cnt_lsb differs in
2513 value or delta_pic_order_cnt_bottom differs in value */
2514 if (sps->pic_order_cnt_type == 0) {
2515 CHECK_VALUE(slice_hdr, prev_slice_hdr, pic_order_cnt_lsb);
2516 if (pps->pic_order_present_flag && !slice_hdr->field_pic_flag)
2517 CHECK_VALUE(slice_hdr, prev_slice_hdr, delta_pic_order_cnt_bottom);
2520 /* POC type is 1 for both and either delta_pic_order_cnt[0]
2521 differs in value or delta_pic_order_cnt[1] differs in value */
2522 else if (sps->pic_order_cnt_type == 1) {
2523 CHECK_VALUE(slice_hdr, prev_slice_hdr, delta_pic_order_cnt[0]);
2524 CHECK_VALUE(slice_hdr, prev_slice_hdr, delta_pic_order_cnt[1]);
2527 /* IdrPicFlag differs in value */
2528 CHECK_VALUE(&pi->nalu, &prev_pi->nalu, idr_pic_flag);
2530 /* IdrPicFlag is equal to 1 for both and idr_pic_id differs in value */
2531 if (pi->nalu.idr_pic_flag)
2532 CHECK_VALUE(slice_hdr, prev_slice_hdr, idr_pic_id);
2539 static GstVaapiDecoderStatus
2540 decode_picture(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
2542 GstVaapiDecoderH264Private * const priv = &decoder->priv;
2543 GstVaapiParserInfoH264 * const pi = unit->parsed_info;
2544 GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
2545 GstH264PPS * const pps = slice_hdr->pps;
2546 GstH264SPS * const sps = pps->sequence;
2547 GstVaapiPictureH264 *picture;
2548 GstVaapiDecoderStatus status;
2550 status = ensure_context(decoder, sps);
2551 if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
2554 priv->decoder_state = 0;
2556 if (priv->current_picture) {
2557 /* Re-use current picture where the first field was decoded */
2558 picture = gst_vaapi_picture_h264_new_field(priv->current_picture);
2560 GST_ERROR("failed to allocate field picture");
2561 return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
2565 /* Create new picture */
2566 picture = gst_vaapi_picture_h264_new(decoder);
2568 GST_ERROR("failed to allocate picture");
2569 return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
2572 gst_vaapi_picture_replace(&priv->current_picture, picture);
2573 gst_vaapi_picture_unref(picture);
2575 /* Update cropping rectangle */
2576 if (sps->frame_cropping_flag) {
2577 GstVaapiRectangle crop_rect;
2578 crop_rect.x = sps->crop_rect_x;
2579 crop_rect.y = sps->crop_rect_y;
2580 crop_rect.width = sps->crop_rect_width;
2581 crop_rect.height = sps->crop_rect_height;
2582 gst_vaapi_picture_set_crop_rect(&picture->base, &crop_rect);
2587 status = ensure_quant_matrix(decoder, picture);
2588 if (status != GST_VAAPI_DECODER_STATUS_SUCCESS) {
2589 GST_ERROR("failed to reset quantizer matrix");
2593 if (!init_picture(decoder, picture, pi))
2594 return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
2595 if (!fill_picture(decoder, picture, pi))
2596 return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
2598 priv->decoder_state = pi->state;
2599 return GST_VAAPI_DECODER_STATUS_SUCCESS;
2603 get_slice_data_bit_offset(GstH264SliceHdr *slice_hdr)
2607 epb_count = slice_hdr->n_emulation_prevention_bytes;
2608 return 8 /* nal_unit_type */ + slice_hdr->header_size - epb_count * 8;
2612 fill_pred_weight_table(GstVaapiDecoderH264 *decoder,
2613 GstVaapiSlice *slice, GstH264SliceHdr *slice_hdr)
2615 VASliceParameterBufferH264 * const slice_param = slice->param;
2616 GstH264PPS * const pps = slice_hdr->pps;
2617 GstH264SPS * const sps = pps->sequence;
2618 GstH264PredWeightTable * const w = &slice_hdr->pred_weight_table;
2619 guint num_weight_tables = 0;
2622 if (pps->weighted_pred_flag &&
2623 (GST_H264_IS_P_SLICE(slice_hdr) || GST_H264_IS_SP_SLICE(slice_hdr)))
2624 num_weight_tables = 1;
2625 else if (pps->weighted_bipred_idc == 1 && GST_H264_IS_B_SLICE(slice_hdr))
2626 num_weight_tables = 2;
2628 num_weight_tables = 0;
2630 slice_param->luma_log2_weight_denom = 0;
2631 slice_param->chroma_log2_weight_denom = 0;
2632 slice_param->luma_weight_l0_flag = 0;
2633 slice_param->chroma_weight_l0_flag = 0;
2634 slice_param->luma_weight_l1_flag = 0;
2635 slice_param->chroma_weight_l1_flag = 0;
2637 if (num_weight_tables < 1)
2640 slice_param->luma_log2_weight_denom = w->luma_log2_weight_denom;
2641 slice_param->chroma_log2_weight_denom = w->chroma_log2_weight_denom;
2643 slice_param->luma_weight_l0_flag = 1;
2644 for (i = 0; i <= slice_param->num_ref_idx_l0_active_minus1; i++) {
2645 slice_param->luma_weight_l0[i] = w->luma_weight_l0[i];
2646 slice_param->luma_offset_l0[i] = w->luma_offset_l0[i];
2649 slice_param->chroma_weight_l0_flag = sps->chroma_array_type != 0;
2650 if (slice_param->chroma_weight_l0_flag) {
2651 for (i = 0; i <= slice_param->num_ref_idx_l0_active_minus1; i++) {
2652 for (j = 0; j < 2; j++) {
2653 slice_param->chroma_weight_l0[i][j] = w->chroma_weight_l0[i][j];
2654 slice_param->chroma_offset_l0[i][j] = w->chroma_offset_l0[i][j];
2659 if (num_weight_tables < 2)
2662 slice_param->luma_weight_l1_flag = 1;
2663 for (i = 0; i <= slice_param->num_ref_idx_l1_active_minus1; i++) {
2664 slice_param->luma_weight_l1[i] = w->luma_weight_l1[i];
2665 slice_param->luma_offset_l1[i] = w->luma_offset_l1[i];
2668 slice_param->chroma_weight_l1_flag = sps->chroma_array_type != 0;
2669 if (slice_param->chroma_weight_l1_flag) {
2670 for (i = 0; i <= slice_param->num_ref_idx_l1_active_minus1; i++) {
2671 for (j = 0; j < 2; j++) {
2672 slice_param->chroma_weight_l1[i][j] = w->chroma_weight_l1[i][j];
2673 slice_param->chroma_offset_l1[i][j] = w->chroma_offset_l1[i][j];
2681 fill_RefPicList(GstVaapiDecoderH264 *decoder,
2682 GstVaapiSlice *slice, GstH264SliceHdr *slice_hdr)
2684 GstVaapiDecoderH264Private * const priv = &decoder->priv;
2685 VASliceParameterBufferH264 * const slice_param = slice->param;
2686 guint i, num_ref_lists = 0;
2688 slice_param->num_ref_idx_l0_active_minus1 = 0;
2689 slice_param->num_ref_idx_l1_active_minus1 = 0;
2691 if (GST_H264_IS_B_SLICE(slice_hdr))
2693 else if (GST_H264_IS_I_SLICE(slice_hdr))
2698 if (num_ref_lists < 1)
2701 slice_param->num_ref_idx_l0_active_minus1 =
2702 slice_hdr->num_ref_idx_l0_active_minus1;
2704 for (i = 0; i < priv->RefPicList0_count && priv->RefPicList0[i]; i++)
2705 vaapi_fill_picture(&slice_param->RefPicList0[i], priv->RefPicList0[i], 0);
2706 for (; i <= slice_param->num_ref_idx_l0_active_minus1; i++)
2707 vaapi_init_picture(&slice_param->RefPicList0[i]);
2709 if (num_ref_lists < 2)
2712 slice_param->num_ref_idx_l1_active_minus1 =
2713 slice_hdr->num_ref_idx_l1_active_minus1;
2715 for (i = 0; i < priv->RefPicList1_count && priv->RefPicList1[i]; i++)
2716 vaapi_fill_picture(&slice_param->RefPicList1[i], priv->RefPicList1[i], 0);
2717 for (; i <= slice_param->num_ref_idx_l1_active_minus1; i++)
2718 vaapi_init_picture(&slice_param->RefPicList1[i]);
2723 fill_slice(GstVaapiDecoderH264 *decoder,
2724 GstVaapiSlice *slice, GstH264SliceHdr *slice_hdr)
2726 VASliceParameterBufferH264 * const slice_param = slice->param;
2728 /* Fill in VASliceParameterBufferH264 */
2729 slice_param->slice_data_bit_offset = get_slice_data_bit_offset(slice_hdr);
2730 slice_param->first_mb_in_slice = slice_hdr->first_mb_in_slice;
2731 slice_param->slice_type = slice_hdr->type % 5;
2732 slice_param->direct_spatial_mv_pred_flag = slice_hdr->direct_spatial_mv_pred_flag;
2733 slice_param->cabac_init_idc = slice_hdr->cabac_init_idc;
2734 slice_param->slice_qp_delta = slice_hdr->slice_qp_delta;
2735 slice_param->disable_deblocking_filter_idc = slice_hdr->disable_deblocking_filter_idc;
2736 slice_param->slice_alpha_c0_offset_div2 = slice_hdr->slice_alpha_c0_offset_div2;
2737 slice_param->slice_beta_offset_div2 = slice_hdr->slice_beta_offset_div2;
2739 if (!fill_RefPicList(decoder, slice, slice_hdr))
2741 if (!fill_pred_weight_table(decoder, slice, slice_hdr))
2746 static GstVaapiDecoderStatus
2747 decode_slice(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
2749 GstVaapiDecoderH264Private * const priv = &decoder->priv;
2750 GstVaapiParserInfoH264 * const pi = unit->parsed_info;
2751 GstVaapiPictureH264 * const picture = priv->current_picture;
2752 GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
2753 GstVaapiSlice *slice;
2754 GstBuffer * const buffer =
2755 GST_VAAPI_DECODER_CODEC_FRAME(decoder)->input_buffer;
2756 GstMapInfo map_info;
2758 GST_DEBUG("slice (%u bytes)", pi->nalu.size);
2760 if (!is_valid_state(pi->state,
2761 GST_H264_VIDEO_STATE_VALID_PICTURE_HEADERS)) {
2762 GST_WARNING("failed to receive enough headers to decode slice");
2763 return GST_VAAPI_DECODER_STATUS_SUCCESS;
2766 if (!gst_buffer_map(buffer, &map_info, GST_MAP_READ)) {
2767 GST_ERROR("failed to map buffer");
2768 return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
2771 slice = GST_VAAPI_SLICE_NEW(H264, decoder,
2772 (map_info.data + unit->offset + pi->nalu.offset), pi->nalu.size);
2773 gst_buffer_unmap(buffer, &map_info);
2775 GST_ERROR("failed to allocate slice");
2776 return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
2779 if (!fill_slice(decoder, slice, slice_hdr)) {
2780 gst_vaapi_mini_object_unref(GST_VAAPI_MINI_OBJECT(slice));
2781 return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
2784 gst_vaapi_picture_add_slice(GST_VAAPI_PICTURE_CAST(picture), slice);
2785 picture->last_slice_hdr = slice_hdr;
2786 priv->decoder_state |= GST_H264_VIDEO_STATE_GOT_SLICE;
2787 return GST_VAAPI_DECODER_STATUS_SUCCESS;
2791 scan_for_start_code(GstAdapter *adapter, guint ofs, guint size, guint32 *scp)
2793 return (gint)gst_adapter_masked_scan_uint32_peek(adapter,
2794 0xffffff00, 0x00000100,
2799 static GstVaapiDecoderStatus
2800 decode_unit(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
2802 GstVaapiDecoderH264Private * const priv = &decoder->priv;
2803 GstVaapiParserInfoH264 * const pi = unit->parsed_info;
2804 GstVaapiDecoderStatus status;
2806 priv->decoder_state |= pi->state;
2807 switch (pi->nalu.type) {
2808 case GST_H264_NAL_SLICE_IDR:
2809 /* fall-through. IDR specifics are handled in init_picture() */
2810 case GST_H264_NAL_SLICE:
2811 status = decode_slice(decoder, unit);
2813 case GST_H264_NAL_SEQ_END:
2814 case GST_H264_NAL_STREAM_END:
2815 status = decode_sequence_end(decoder);
2817 case GST_H264_NAL_SEI:
2818 status = GST_VAAPI_DECODER_STATUS_SUCCESS;
2821 GST_WARNING("unsupported NAL unit type %d", pi->nalu.type);
2822 status = GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
2828 static GstVaapiDecoderStatus
2829 gst_vaapi_decoder_h264_decode_codec_data(GstVaapiDecoder *base_decoder,
2830 const guchar *buf, guint buf_size)
2832 GstVaapiDecoderH264 * const decoder =
2833 GST_VAAPI_DECODER_H264_CAST(base_decoder);
2834 GstVaapiDecoderH264Private * const priv = &decoder->priv;
2835 GstVaapiDecoderStatus status;
2836 GstVaapiDecoderUnit unit;
2837 GstVaapiParserInfoH264 pi;
2838 GstH264ParserResult result;
2839 guint i, ofs, num_sps, num_pps;
2841 unit.parsed_info = π
2844 return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
2847 GST_ERROR("failed to decode codec-data, not in avcC format");
2848 return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
2851 priv->nal_length_size = (buf[4] & 0x03) + 1;
2853 num_sps = buf[5] & 0x1f;
2856 for (i = 0; i < num_sps; i++) {
2857 result = gst_h264_parser_identify_nalu_avc(
2859 buf, ofs, buf_size, 2,
2862 if (result != GST_H264_PARSER_OK)
2863 return get_status(result);
2865 status = parse_sps(decoder, &unit);
2866 if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
2868 ofs = pi.nalu.offset + pi.nalu.size;
2874 for (i = 0; i < num_pps; i++) {
2875 result = gst_h264_parser_identify_nalu_avc(
2877 buf, ofs, buf_size, 2,
2880 if (result != GST_H264_PARSER_OK)
2881 return get_status(result);
2883 status = parse_pps(decoder, &unit);
2884 if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
2886 ofs = pi.nalu.offset + pi.nalu.size;
2889 priv->is_avcC = TRUE;
2890 return GST_VAAPI_DECODER_STATUS_SUCCESS;
2893 static GstVaapiDecoderStatus
2894 ensure_decoder(GstVaapiDecoderH264 *decoder)
2896 GstVaapiDecoderH264Private * const priv = &decoder->priv;
2897 GstVaapiDecoderStatus status;
2899 if (!priv->is_opened) {
2900 priv->is_opened = gst_vaapi_decoder_h264_open(decoder);
2901 if (!priv->is_opened)
2902 return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_CODEC;
2904 status = gst_vaapi_decoder_decode_codec_data(
2905 GST_VAAPI_DECODER_CAST(decoder));
2906 if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
2909 return GST_VAAPI_DECODER_STATUS_SUCCESS;
2912 static GstVaapiDecoderStatus
2913 gst_vaapi_decoder_h264_parse(GstVaapiDecoder *base_decoder,
2914 GstAdapter *adapter, gboolean at_eos, GstVaapiDecoderUnit *unit)
2916 GstVaapiDecoderH264 * const decoder =
2917 GST_VAAPI_DECODER_H264_CAST(base_decoder);
2918 GstVaapiDecoderH264Private * const priv = &decoder->priv;
2919 GstVaapiParserState * const ps = GST_VAAPI_PARSER_STATE(base_decoder);
2920 GstVaapiParserInfoH264 *pi;
2921 GstVaapiDecoderStatus status;
2922 GstH264ParserResult result;
2924 guint i, size, buf_size, nalu_size, flags;
2928 status = ensure_decoder(decoder);
2929 if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
2932 size = gst_adapter_available(adapter);
2934 if (priv->is_avcC) {
2935 if (size < priv->nal_length_size)
2936 return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
2938 buf = (guchar *)&start_code;
2939 g_assert(priv->nal_length_size <= sizeof(start_code));
2940 gst_adapter_copy(adapter, buf, 0, priv->nal_length_size);
2943 for (i = 0; i < priv->nal_length_size; i++)
2944 nalu_size = (nalu_size << 8) | buf[i];
2946 buf_size = priv->nal_length_size + nalu_size;
2947 if (size < buf_size)
2948 return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
2952 return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
2954 ofs = scan_for_start_code(adapter, 0, size, NULL);
2956 return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
2959 gst_adapter_flush(adapter, ofs);
2963 ofs2 = ps->input_offset2 - ofs - 4;
2967 ofs = G_UNLIKELY(size < ofs2 + 4) ? -1 :
2968 scan_for_start_code(adapter, ofs2, size - ofs2, NULL);
2970 // Assume the whole NAL unit is present if end-of-stream
2972 ps->input_offset2 = size;
2973 return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
2979 ps->input_offset2 = 0;
2981 buf = (guchar *)gst_adapter_map(adapter, buf_size);
2983 return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
2985 unit->size = buf_size;
2987 pi = gst_vaapi_parser_info_h264_new();
2989 return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
2991 gst_vaapi_decoder_unit_set_parsed_info(unit,
2992 pi, (GDestroyNotify)gst_vaapi_mini_object_unref);
2995 result = gst_h264_parser_identify_nalu_avc(priv->parser,
2996 buf, 0, buf_size, priv->nal_length_size, &pi->nalu);
2998 result = gst_h264_parser_identify_nalu_unchecked(priv->parser,
2999 buf, 0, buf_size, &pi->nalu);
3000 status = get_status(result);
3001 if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
3004 switch (pi->nalu.type) {
3005 case GST_H264_NAL_SPS:
3006 status = parse_sps(decoder, unit);
3008 case GST_H264_NAL_PPS:
3009 status = parse_pps(decoder, unit);
3011 case GST_H264_NAL_SEI:
3012 status = parse_sei(decoder, unit);
3014 case GST_H264_NAL_SLICE_IDR:
3015 case GST_H264_NAL_SLICE:
3016 status = parse_slice(decoder, unit);
3019 status = GST_VAAPI_DECODER_STATUS_SUCCESS;
3022 if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
3026 switch (pi->nalu.type) {
3027 case GST_H264_NAL_AU_DELIMITER:
3028 flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
3030 case GST_H264_NAL_FILLER_DATA:
3031 flags |= GST_VAAPI_DECODER_UNIT_FLAG_SKIP;
3033 case GST_H264_NAL_STREAM_END:
3034 flags |= GST_VAAPI_DECODER_UNIT_FLAG_STREAM_END;
3036 case GST_H264_NAL_SEQ_END:
3037 flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_END;
3039 case GST_H264_NAL_SPS:
3040 case GST_H264_NAL_PPS:
3041 flags |= GST_VAAPI_DECODER_UNIT_FLAG_SKIP;
3043 case GST_H264_NAL_SEI:
3044 flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
3046 case GST_H264_NAL_SLICE_IDR:
3047 case GST_H264_NAL_SLICE:
3048 flags |= GST_VAAPI_DECODER_UNIT_FLAG_SLICE;
3049 if (is_new_picture(pi, priv->prev_slice_pi))
3050 flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
3051 gst_vaapi_parser_info_h264_replace(&priv->prev_slice_pi, pi);
3054 if (pi->nalu.type >= 14 && pi->nalu.type <= 18)
3055 flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
3058 GST_VAAPI_DECODER_UNIT_FLAG_SET(unit, flags);
3060 pi->nalu.data = NULL;
3061 pi->state = priv->parser_state;
3062 return GST_VAAPI_DECODER_STATUS_SUCCESS;
3065 static GstVaapiDecoderStatus
3066 gst_vaapi_decoder_h264_decode(GstVaapiDecoder *base_decoder,
3067 GstVaapiDecoderUnit *unit)
3069 GstVaapiDecoderH264 * const decoder =
3070 GST_VAAPI_DECODER_H264_CAST(base_decoder);
3071 GstVaapiDecoderStatus status;
3073 status = ensure_decoder(decoder);
3074 if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
3076 return decode_unit(decoder, unit);
3079 static GstVaapiDecoderStatus
3080 gst_vaapi_decoder_h264_start_frame(GstVaapiDecoder *base_decoder,
3081 GstVaapiDecoderUnit *unit)
3083 GstVaapiDecoderH264 * const decoder =
3084 GST_VAAPI_DECODER_H264_CAST(base_decoder);
3086 return decode_picture(decoder, unit);
3089 static GstVaapiDecoderStatus
3090 gst_vaapi_decoder_h264_end_frame(GstVaapiDecoder *base_decoder)
3092 GstVaapiDecoderH264 * const decoder =
3093 GST_VAAPI_DECODER_H264_CAST(base_decoder);
3095 return decode_current_picture(decoder);
3098 static GstVaapiDecoderStatus
3099 gst_vaapi_decoder_h264_flush(GstVaapiDecoder *base_decoder)
3101 GstVaapiDecoderH264 * const decoder =
3102 GST_VAAPI_DECODER_H264_CAST(base_decoder);
3105 return GST_VAAPI_DECODER_STATUS_SUCCESS;
3109 gst_vaapi_decoder_h264_class_init(GstVaapiDecoderH264Class *klass)
3111 GstVaapiMiniObjectClass * const object_class =
3112 GST_VAAPI_MINI_OBJECT_CLASS(klass);
3113 GstVaapiDecoderClass * const decoder_class = GST_VAAPI_DECODER_CLASS(klass);
3115 object_class->size = sizeof(GstVaapiDecoderH264);
3116 object_class->finalize = (GDestroyNotify)gst_vaapi_decoder_finalize;
3118 decoder_class->create = gst_vaapi_decoder_h264_create;
3119 decoder_class->destroy = gst_vaapi_decoder_h264_destroy;
3120 decoder_class->parse = gst_vaapi_decoder_h264_parse;
3121 decoder_class->decode = gst_vaapi_decoder_h264_decode;
3122 decoder_class->start_frame = gst_vaapi_decoder_h264_start_frame;
3123 decoder_class->end_frame = gst_vaapi_decoder_h264_end_frame;
3124 decoder_class->flush = gst_vaapi_decoder_h264_flush;
3126 decoder_class->decode_codec_data =
3127 gst_vaapi_decoder_h264_decode_codec_data;
3130 static inline const GstVaapiDecoderClass *
3131 gst_vaapi_decoder_h264_class(void)
3133 static GstVaapiDecoderH264Class g_class;
3134 static gsize g_class_init = FALSE;
3136 if (g_once_init_enter(&g_class_init)) {
3137 gst_vaapi_decoder_h264_class_init(&g_class);
3138 g_once_init_leave(&g_class_init, TRUE);
3140 return GST_VAAPI_DECODER_CLASS(&g_class);
3144 * gst_vaapi_decoder_h264_new:
3145 * @display: a #GstVaapiDisplay
3146 * @caps: a #GstCaps holding codec information
3148 * Creates a new #GstVaapiDecoder for MPEG-2 decoding. The @caps can
3149 * hold extra information like codec-data and pictured coded size.
3151 * Return value: the newly allocated #GstVaapiDecoder object
3154 gst_vaapi_decoder_h264_new(GstVaapiDisplay *display, GstCaps *caps)
3156 return gst_vaapi_decoder_new(gst_vaapi_decoder_h264_class(), display, caps);