2 * gstvaapidecoder_h264.c - H.264 decoder
4 * Copyright (C) 2011-2014 Intel Corporation
5 * Author: Gwenole Beauchesne <gwenole.beauchesne@intel.com>
7 * This library is free software; you can redistribute it and/or
8 * modify it under the terms of the GNU Lesser General Public License
9 * as published by the Free Software Foundation; either version 2.1
10 * of the License, or (at your option) any later version.
12 * This library is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 * Lesser General Public License for more details.
17 * You should have received a copy of the GNU Lesser General Public
18 * License along with this library; if not, write to the Free
19 * Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
20 * Boston, MA 02110-1301 USA
24 * SECTION:gstvaapidecoder_h264
25 * @short_description: H.264 decoder
30 #include <gst/base/gstadapter.h>
31 #include <gst/codecparsers/gsth264parser.h>
32 #include "gstvaapidecoder_h264.h"
33 #include "gstvaapidecoder_objects.h"
34 #include "gstvaapidecoder_priv.h"
35 #include "gstvaapidisplay_priv.h"
36 #include "gstvaapiobject_priv.h"
37 #include "gstvaapiutils_h264_priv.h"
40 #include "gstvaapidebug.h"
42 /* Defined to 1 if strict ordering of DPB is needed. Only useful for debug */
43 #define USE_STRICT_DPB_ORDERING 0
45 typedef struct _GstVaapiDecoderH264Private GstVaapiDecoderH264Private;
46 typedef struct _GstVaapiDecoderH264Class GstVaapiDecoderH264Class;
47 typedef struct _GstVaapiFrameStore GstVaapiFrameStore;
48 typedef struct _GstVaapiFrameStoreClass GstVaapiFrameStoreClass;
49 typedef struct _GstVaapiParserInfoH264 GstVaapiParserInfoH264;
50 typedef struct _GstVaapiPictureH264 GstVaapiPictureH264;
52 // Used for field_poc[]
54 #define BOTTOM_FIELD 1
56 /* ------------------------------------------------------------------------- */
57 /* --- H.264 Parser Info --- */
58 /* ------------------------------------------------------------------------- */
61 * Extended decoder unit flags:
63 * @GST_VAAPI_DECODER_UNIT_AU_START: marks the start of an access unit.
64 * @GST_VAAPI_DECODER_UNIT_AU_END: marks the end of an access unit.
67 /* This flag does not strictly follow the definitions (7.4.1.2.3)
68 for detecting the start of an access unit as we are only
69 interested in knowing if the current slice is the first one or
70 the last one in the current access unit */
71 GST_VAAPI_DECODER_UNIT_FLAG_AU_START = (
72 GST_VAAPI_DECODER_UNIT_FLAG_LAST << 0),
73 GST_VAAPI_DECODER_UNIT_FLAG_AU_END = (
74 GST_VAAPI_DECODER_UNIT_FLAG_LAST << 1),
76 GST_VAAPI_DECODER_UNIT_FLAGS_AU = (
77 GST_VAAPI_DECODER_UNIT_FLAG_AU_START |
78 GST_VAAPI_DECODER_UNIT_FLAG_AU_END),
81 #define GST_VAAPI_PARSER_INFO_H264(obj) \
82 ((GstVaapiParserInfoH264 *)(obj))
84 struct _GstVaapiParserInfoH264 {
85 GstVaapiMiniObject parent_instance;
91 GstH264SliceHdr slice_hdr;
94 guint flags; // Same as decoder unit flags (persistent)
95 guint view_id; // View ID of slice
96 guint voc; // View order index (VOIdx) of slice
100 gst_vaapi_parser_info_h264_finalize(GstVaapiParserInfoH264 *pi)
102 switch (pi->nalu.type) {
103 case GST_H264_NAL_SPS:
104 case GST_H264_NAL_SUBSET_SPS:
105 gst_h264_sps_clear(&pi->data.sps);
107 case GST_H264_NAL_SEI:
109 g_array_unref(pi->data.sei);
116 static inline const GstVaapiMiniObjectClass *
117 gst_vaapi_parser_info_h264_class(void)
119 static const GstVaapiMiniObjectClass GstVaapiParserInfoH264Class = {
120 .size = sizeof(GstVaapiParserInfoH264),
121 .finalize = (GDestroyNotify)gst_vaapi_parser_info_h264_finalize
123 return &GstVaapiParserInfoH264Class;
126 static inline GstVaapiParserInfoH264 *
127 gst_vaapi_parser_info_h264_new(void)
129 return (GstVaapiParserInfoH264 *)
130 gst_vaapi_mini_object_new(gst_vaapi_parser_info_h264_class());
133 #define gst_vaapi_parser_info_h264_ref(pi) \
134 gst_vaapi_mini_object_ref(GST_VAAPI_MINI_OBJECT(pi))
136 #define gst_vaapi_parser_info_h264_unref(pi) \
137 gst_vaapi_mini_object_unref(GST_VAAPI_MINI_OBJECT(pi))
139 #define gst_vaapi_parser_info_h264_replace(old_pi_ptr, new_pi) \
140 gst_vaapi_mini_object_replace((GstVaapiMiniObject **)(old_pi_ptr), \
141 (GstVaapiMiniObject *)(new_pi))
143 /* ------------------------------------------------------------------------- */
144 /* --- H.264 Pictures --- */
145 /* ------------------------------------------------------------------------- */
148 * Extended picture flags:
150 * @GST_VAAPI_PICTURE_FLAG_IDR: flag that specifies an IDR picture
151 * @GST_VAAPI_PICTURE_FLAG_INTER_VIEW: flag that indicates the picture
152 * may be used for inter-view prediction
153 * @GST_VAAPI_PICTURE_FLAG_ANCHOR: flag that specifies an anchor picture,
154 * i.e. a picture that is decoded with only inter-view prediction,
155 * and not inter prediction
156 * @GST_VAAPI_PICTURE_FLAG_AU_START: flag that marks the start of an
158 * @GST_VAAPI_PICTURE_FLAG_AU_END: flag that marks the end of an
160 * @GST_VAAPI_PICTURE_FLAG_SHORT_TERM_REFERENCE: flag that specifies
161 * "used for short-term reference"
162 * @GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE: flag that specifies
163 * "used for long-term reference"
164 * @GST_VAAPI_PICTURE_FLAGS_REFERENCE: mask covering any kind of
165 * reference picture (short-term reference or long-term reference)
168 GST_VAAPI_PICTURE_FLAG_IDR = (GST_VAAPI_PICTURE_FLAG_LAST << 0),
169 GST_VAAPI_PICTURE_FLAG_REFERENCE2 = (GST_VAAPI_PICTURE_FLAG_LAST << 1),
170 GST_VAAPI_PICTURE_FLAG_INTER_VIEW = (GST_VAAPI_PICTURE_FLAG_LAST << 2),
171 GST_VAAPI_PICTURE_FLAG_ANCHOR = (GST_VAAPI_PICTURE_FLAG_LAST << 3),
172 GST_VAAPI_PICTURE_FLAG_AU_START = (GST_VAAPI_PICTURE_FLAG_LAST << 4),
173 GST_VAAPI_PICTURE_FLAG_AU_END = (GST_VAAPI_PICTURE_FLAG_LAST << 5),
175 GST_VAAPI_PICTURE_FLAG_SHORT_TERM_REFERENCE = (
176 GST_VAAPI_PICTURE_FLAG_REFERENCE),
177 GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE = (
178 GST_VAAPI_PICTURE_FLAG_REFERENCE | GST_VAAPI_PICTURE_FLAG_REFERENCE2),
179 GST_VAAPI_PICTURE_FLAGS_REFERENCE = (
180 GST_VAAPI_PICTURE_FLAG_SHORT_TERM_REFERENCE |
181 GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE),
184 #define GST_VAAPI_PICTURE_IS_IDR(picture) \
185 (GST_VAAPI_PICTURE_FLAG_IS_SET(picture, GST_VAAPI_PICTURE_FLAG_IDR))
187 #define GST_VAAPI_PICTURE_IS_SHORT_TERM_REFERENCE(picture) \
188 ((GST_VAAPI_PICTURE_FLAGS(picture) & \
189 GST_VAAPI_PICTURE_FLAGS_REFERENCE) == \
190 GST_VAAPI_PICTURE_FLAG_SHORT_TERM_REFERENCE)
192 #define GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(picture) \
193 ((GST_VAAPI_PICTURE_FLAGS(picture) & \
194 GST_VAAPI_PICTURE_FLAGS_REFERENCE) == \
195 GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE)
197 #define GST_VAAPI_PICTURE_IS_INTER_VIEW(picture) \
198 (GST_VAAPI_PICTURE_FLAGS(picture) & GST_VAAPI_PICTURE_FLAG_INTER_VIEW)
200 #define GST_VAAPI_PICTURE_IS_ANCHOR(picture) \
201 (GST_VAAPI_PICTURE_FLAGS(picture) & GST_VAAPI_PICTURE_FLAG_ANCHOR)
203 #define GST_VAAPI_PICTURE_H264(picture) \
204 ((GstVaapiPictureH264 *)(picture))
206 struct _GstVaapiPictureH264 {
207 GstVaapiPicture base;
208 GstH264SliceHdr *last_slice_hdr;
211 gint32 frame_num; // Original frame_num from slice_header()
212 gint32 frame_num_wrap; // Temporary for ref pic marking: FrameNumWrap
213 gint32 long_term_frame_idx; // Temporary for ref pic marking: LongTermFrameIdx
214 gint32 pic_num; // Temporary for ref pic marking: PicNum
215 gint32 long_term_pic_num; // Temporary for ref pic marking: LongTermPicNum
216 GstVaapiPictureH264 *other_field; // Temporary for ref pic marking: other field in the same frame store
217 guint output_flag : 1;
218 guint output_needed : 1;
221 GST_VAAPI_CODEC_DEFINE_TYPE(GstVaapiPictureH264, gst_vaapi_picture_h264);
224 gst_vaapi_picture_h264_destroy(GstVaapiPictureH264 *picture)
226 gst_vaapi_picture_destroy(GST_VAAPI_PICTURE(picture));
230 gst_vaapi_picture_h264_create(
231 GstVaapiPictureH264 *picture,
232 const GstVaapiCodecObjectConstructorArgs *args
235 if (!gst_vaapi_picture_create(GST_VAAPI_PICTURE(picture), args))
238 picture->field_poc[0] = G_MAXINT32;
239 picture->field_poc[1] = G_MAXINT32;
240 picture->output_needed = FALSE;
244 static inline GstVaapiPictureH264 *
245 gst_vaapi_picture_h264_new(GstVaapiDecoderH264 *decoder)
247 return (GstVaapiPictureH264 *)gst_vaapi_codec_object_new(
248 &GstVaapiPictureH264Class,
249 GST_VAAPI_CODEC_BASE(decoder),
250 NULL, sizeof(VAPictureParameterBufferH264),
256 gst_vaapi_picture_h264_set_reference(
257 GstVaapiPictureH264 *picture,
258 guint reference_flags,
264 GST_VAAPI_PICTURE_FLAG_UNSET(picture, GST_VAAPI_PICTURE_FLAGS_REFERENCE);
265 GST_VAAPI_PICTURE_FLAG_SET(picture, reference_flags);
267 if (!other_field || !(picture = picture->other_field))
269 GST_VAAPI_PICTURE_FLAG_UNSET(picture, GST_VAAPI_PICTURE_FLAGS_REFERENCE);
270 GST_VAAPI_PICTURE_FLAG_SET(picture, reference_flags);
273 static inline GstVaapiPictureH264 *
274 gst_vaapi_picture_h264_new_field(GstVaapiPictureH264 *picture)
276 g_return_val_if_fail(picture, NULL);
278 return (GstVaapiPictureH264 *)gst_vaapi_picture_new_field(&picture->base);
281 /* ------------------------------------------------------------------------- */
282 /* --- Frame Buffers (DPB) --- */
283 /* ------------------------------------------------------------------------- */
285 struct _GstVaapiFrameStore {
287 GstVaapiMiniObject parent_instance;
291 GstVaapiPictureH264 *buffers[2];
297 gst_vaapi_frame_store_finalize(gpointer object)
299 GstVaapiFrameStore * const fs = object;
302 for (i = 0; i < fs->num_buffers; i++)
303 gst_vaapi_picture_replace(&fs->buffers[i], NULL);
306 static GstVaapiFrameStore *
307 gst_vaapi_frame_store_new(GstVaapiPictureH264 *picture)
309 GstVaapiFrameStore *fs;
311 static const GstVaapiMiniObjectClass GstVaapiFrameStoreClass = {
312 sizeof(GstVaapiFrameStore),
313 gst_vaapi_frame_store_finalize
316 fs = (GstVaapiFrameStore *)
317 gst_vaapi_mini_object_new(&GstVaapiFrameStoreClass);
321 fs->view_id = picture->base.view_id;
322 fs->structure = picture->structure;
323 fs->buffers[0] = gst_vaapi_picture_ref(picture);
324 fs->buffers[1] = NULL;
326 fs->output_needed = picture->output_needed;
331 gst_vaapi_frame_store_add(GstVaapiFrameStore *fs, GstVaapiPictureH264 *picture)
335 g_return_val_if_fail(fs->num_buffers == 1, FALSE);
336 g_return_val_if_fail(!GST_VAAPI_PICTURE_IS_FRAME(picture), FALSE);
337 g_return_val_if_fail(!GST_VAAPI_PICTURE_IS_FIRST_FIELD(picture), FALSE);
339 gst_vaapi_picture_replace(&fs->buffers[fs->num_buffers++], picture);
340 if (picture->output_flag) {
341 picture->output_needed = TRUE;
345 fs->structure = GST_VAAPI_PICTURE_STRUCTURE_FRAME;
347 field = picture->structure == GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD ?
348 TOP_FIELD : BOTTOM_FIELD;
349 g_return_val_if_fail(fs->buffers[0]->field_poc[field] == G_MAXINT32, FALSE);
350 fs->buffers[0]->field_poc[field] = picture->field_poc[field];
351 g_return_val_if_fail(picture->field_poc[!field] == G_MAXINT32, FALSE);
352 picture->field_poc[!field] = fs->buffers[0]->field_poc[!field];
357 gst_vaapi_frame_store_split_fields(GstVaapiFrameStore *fs)
359 GstVaapiPictureH264 * const first_field = fs->buffers[0];
360 GstVaapiPictureH264 *second_field;
362 g_return_val_if_fail(fs->num_buffers == 1, FALSE);
364 first_field->base.structure = GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD;
365 GST_VAAPI_PICTURE_FLAG_SET(first_field, GST_VAAPI_PICTURE_FLAG_INTERLACED);
367 second_field = gst_vaapi_picture_h264_new_field(first_field);
370 gst_vaapi_picture_replace(&fs->buffers[fs->num_buffers++], second_field);
371 gst_vaapi_picture_unref(second_field);
373 second_field->frame_num = first_field->frame_num;
374 second_field->field_poc[0] = first_field->field_poc[0];
375 second_field->field_poc[1] = first_field->field_poc[1];
376 second_field->output_flag = first_field->output_flag;
377 if (second_field->output_flag) {
378 second_field->output_needed = TRUE;
384 static inline gboolean
385 gst_vaapi_frame_store_has_frame(GstVaapiFrameStore *fs)
387 return fs->structure == GST_VAAPI_PICTURE_STRUCTURE_FRAME;
390 static inline gboolean
391 gst_vaapi_frame_store_has_reference(GstVaapiFrameStore *fs)
395 for (i = 0; i < fs->num_buffers; i++) {
396 if (GST_VAAPI_PICTURE_IS_REFERENCE(fs->buffers[i]))
403 gst_vaapi_frame_store_has_inter_view(GstVaapiFrameStore *fs)
407 for (i = 0; i < fs->num_buffers; i++) {
408 if (GST_VAAPI_PICTURE_IS_INTER_VIEW(fs->buffers[i]))
414 #define gst_vaapi_frame_store_ref(fs) \
415 gst_vaapi_mini_object_ref(GST_VAAPI_MINI_OBJECT(fs))
417 #define gst_vaapi_frame_store_unref(fs) \
418 gst_vaapi_mini_object_unref(GST_VAAPI_MINI_OBJECT(fs))
420 #define gst_vaapi_frame_store_replace(old_fs_p, new_fs) \
421 gst_vaapi_mini_object_replace((GstVaapiMiniObject **)(old_fs_p), \
422 (GstVaapiMiniObject *)(new_fs))
424 /* ------------------------------------------------------------------------- */
425 /* --- H.264 Decoder --- */
426 /* ------------------------------------------------------------------------- */
428 #define GST_VAAPI_DECODER_H264_CAST(decoder) \
429 ((GstVaapiDecoderH264 *)(decoder))
432 GST_H264_VIDEO_STATE_GOT_SPS = 1 << 0,
433 GST_H264_VIDEO_STATE_GOT_PPS = 1 << 1,
434 GST_H264_VIDEO_STATE_GOT_SLICE = 1 << 2,
436 GST_H264_VIDEO_STATE_VALID_PICTURE_HEADERS = (
437 GST_H264_VIDEO_STATE_GOT_SPS |
438 GST_H264_VIDEO_STATE_GOT_PPS),
439 GST_H264_VIDEO_STATE_VALID_PICTURE = (
440 GST_H264_VIDEO_STATE_VALID_PICTURE_HEADERS |
441 GST_H264_VIDEO_STATE_GOT_SLICE)
444 struct _GstVaapiDecoderH264Private {
445 GstH264NalParser *parser;
448 GstVaapiPictureH264 *current_picture;
449 GstVaapiParserInfoH264 *sps[GST_H264_MAX_SPS_COUNT];
450 GstVaapiParserInfoH264 *active_sps;
451 GstVaapiParserInfoH264 *pps[GST_H264_MAX_PPS_COUNT];
452 GstVaapiParserInfoH264 *active_pps;
453 GstVaapiParserInfoH264 *prev_pi;
454 GstVaapiParserInfoH264 *prev_slice_pi;
455 GstVaapiFrameStore **prev_frames;
456 guint prev_frames_alloc;
457 GstVaapiFrameStore **dpb;
462 GstVaapiProfile profile;
463 GstVaapiEntrypoint entrypoint;
464 GstVaapiChromaType chroma_type;
465 GPtrArray *inter_views;
466 GstVaapiPictureH264 *short_ref[32];
467 guint short_ref_count;
468 GstVaapiPictureH264 *long_ref[32];
469 guint long_ref_count;
470 GstVaapiPictureH264 *RefPicList0[32];
471 guint RefPicList0_count;
472 GstVaapiPictureH264 *RefPicList1[32];
473 guint RefPicList1_count;
474 guint nal_length_size;
477 gint32 field_poc[2]; // 0:TopFieldOrderCnt / 1:BottomFieldOrderCnt
478 gint32 poc_msb; // PicOrderCntMsb
479 gint32 poc_lsb; // pic_order_cnt_lsb (from slice_header())
480 gint32 prev_poc_msb; // prevPicOrderCntMsb
481 gint32 prev_poc_lsb; // prevPicOrderCntLsb
482 gint32 frame_num_offset; // FrameNumOffset
483 gint32 frame_num; // frame_num (from slice_header())
484 gint32 prev_frame_num; // prevFrameNum
485 gboolean prev_pic_has_mmco5; // prevMmco5Pic
486 gboolean prev_pic_structure; // previous picture structure
489 guint has_context : 1;
490 guint progressive_sequence : 1;
494 * GstVaapiDecoderH264:
496 * A decoder based on H264.
498 struct _GstVaapiDecoderH264 {
500 GstVaapiDecoder parent_instance;
501 GstVaapiDecoderH264Private priv;
505 * GstVaapiDecoderH264Class:
507 * A decoder class based on H264.
509 struct _GstVaapiDecoderH264Class {
511 GstVaapiDecoderClass parent_class;
515 exec_ref_pic_marking(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture);
517 /* Determines if the supplied profile is one of the MVC set */
519 is_mvc_profile(GstH264Profile profile)
521 return profile == GST_H264_PROFILE_MULTIVIEW_HIGH ||
522 profile == GST_H264_PROFILE_STEREO_HIGH;
525 /* Determines the view_id from the supplied NAL unit */
527 get_view_id(GstH264NalUnit *nalu)
529 return GST_H264_IS_MVC_NALU(nalu) ? nalu->extension.mvc.view_id : 0;
532 /* Determines the view order index (VOIdx) from the supplied view_id */
534 get_view_order_index(GstH264SPS *sps, guint16 view_id)
536 GstH264SPSExtMVC *mvc;
539 if (!sps || sps->extension_type != GST_H264_NAL_EXTENSION_MVC)
542 mvc = &sps->extension.mvc;
543 for (i = 0; i <= mvc->num_views_minus1; i++) {
544 if (mvc->view[i].view_id == view_id)
547 GST_ERROR("failed to find VOIdx from view_id (%d)", view_id);
551 /* Determines NumViews */
553 get_num_views(GstH264SPS *sps)
555 return 1 + (sps->extension_type == GST_H264_NAL_EXTENSION_MVC ?
556 sps->extension.mvc.num_views_minus1 : 0);
559 /* Get number of reference frames to use */
561 get_max_dec_frame_buffering(GstH264SPS *sps)
563 guint num_views, max_dpb_frames;
564 guint max_dec_frame_buffering, PicSizeMbs;
565 GstVaapiLevelH264 level;
566 const GstVaapiH264LevelLimits *level_limits;
568 /* Table A-1 - Level limits */
569 if (G_UNLIKELY(sps->level_idc == 11 && sps->constraint_set3_flag))
570 level = GST_VAAPI_LEVEL_H264_L1b;
572 level = gst_vaapi_utils_h264_get_level(sps->level_idc);
573 level_limits = gst_vaapi_utils_h264_get_level_limits(level);
574 if (G_UNLIKELY(!level_limits)) {
575 GST_FIXME("unsupported level_idc value (%d)", sps->level_idc);
576 max_dec_frame_buffering = 16;
579 PicSizeMbs = ((sps->pic_width_in_mbs_minus1 + 1) *
580 (sps->pic_height_in_map_units_minus1 + 1) *
581 (sps->frame_mbs_only_flag ? 1 : 2));
582 max_dec_frame_buffering = level_limits->MaxDpbMbs / PicSizeMbs;
584 if (is_mvc_profile(sps->profile_idc))
585 max_dec_frame_buffering <<= 1;
588 if (sps->vui_parameters_present_flag) {
589 GstH264VUIParams * const vui_params = &sps->vui_parameters;
590 if (vui_params->bitstream_restriction_flag)
591 max_dec_frame_buffering = vui_params->max_dec_frame_buffering;
593 switch (sps->profile_idc) {
594 case 44: // CAVLC 4:4:4 Intra profile
595 case GST_H264_PROFILE_SCALABLE_HIGH:
596 case GST_H264_PROFILE_HIGH:
597 case GST_H264_PROFILE_HIGH10:
598 case GST_H264_PROFILE_HIGH_422:
599 case GST_H264_PROFILE_HIGH_444:
600 if (sps->constraint_set3_flag)
601 max_dec_frame_buffering = 0;
607 num_views = get_num_views(sps);
608 max_dpb_frames = 16 * (num_views > 1 ? g_bit_storage(num_views - 1) : 1);
609 if (max_dec_frame_buffering > max_dpb_frames)
610 max_dec_frame_buffering = max_dpb_frames;
611 else if (max_dec_frame_buffering < sps->num_ref_frames)
612 max_dec_frame_buffering = sps->num_ref_frames;
613 return MAX(1, max_dec_frame_buffering);
617 array_remove_index_fast(void *array, guint *array_length_ptr, guint index)
619 gpointer * const entries = array;
620 guint num_entries = *array_length_ptr;
622 g_return_if_fail(index < num_entries);
624 if (index != --num_entries)
625 entries[index] = entries[num_entries];
626 entries[num_entries] = NULL;
627 *array_length_ptr = num_entries;
632 array_remove_index(void *array, guint *array_length_ptr, guint index)
634 array_remove_index_fast(array, array_length_ptr, index);
638 array_remove_index(void *array, guint *array_length_ptr, guint index)
640 gpointer * const entries = array;
641 const guint num_entries = *array_length_ptr - 1;
644 g_return_if_fail(index <= num_entries);
646 for (i = index; i < num_entries; i++)
647 entries[i] = entries[i + 1];
648 entries[num_entries] = NULL;
649 *array_length_ptr = num_entries;
653 #define ARRAY_REMOVE_INDEX(array, index) \
654 array_remove_index(array, &array##_count, index)
657 dpb_remove_index(GstVaapiDecoderH264 *decoder, guint index)
659 GstVaapiDecoderH264Private * const priv = &decoder->priv;
660 guint i, num_frames = --priv->dpb_count;
662 if (USE_STRICT_DPB_ORDERING) {
663 for (i = index; i < num_frames; i++)
664 gst_vaapi_frame_store_replace(&priv->dpb[i], priv->dpb[i + 1]);
666 else if (index != num_frames)
667 gst_vaapi_frame_store_replace(&priv->dpb[index], priv->dpb[num_frames]);
668 gst_vaapi_frame_store_replace(&priv->dpb[num_frames], NULL);
673 GstVaapiDecoderH264 *decoder,
674 GstVaapiFrameStore *fs,
675 GstVaapiPictureH264 *picture
678 picture->output_needed = FALSE;
681 if (--fs->output_needed > 0)
683 picture = fs->buffers[0];
685 return gst_vaapi_picture_output(GST_VAAPI_PICTURE_CAST(picture));
689 dpb_evict(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture, guint i)
691 GstVaapiDecoderH264Private * const priv = &decoder->priv;
692 GstVaapiFrameStore * const fs = priv->dpb[i];
694 if (!fs->output_needed && !gst_vaapi_frame_store_has_reference(fs))
695 dpb_remove_index(decoder, i);
698 /* Finds the frame store holding the supplied picture */
700 dpb_find_picture(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
702 GstVaapiDecoderH264Private * const priv = &decoder->priv;
705 for (i = 0; i < priv->dpb_count; i++) {
706 GstVaapiFrameStore * const fs = priv->dpb[i];
707 for (j = 0; j < fs->num_buffers; j++) {
708 if (fs->buffers[j] == picture)
715 /* Finds the picture with the lowest POC that needs to be output */
717 dpb_find_lowest_poc(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture,
718 GstVaapiPictureH264 **found_picture_ptr)
720 GstVaapiDecoderH264Private * const priv = &decoder->priv;
721 GstVaapiPictureH264 *found_picture = NULL;
722 guint i, j, found_index;
724 for (i = 0; i < priv->dpb_count; i++) {
725 GstVaapiFrameStore * const fs = priv->dpb[i];
726 if (!fs->output_needed)
728 if (picture && picture->base.view_id != fs->view_id)
730 for (j = 0; j < fs->num_buffers; j++) {
731 GstVaapiPictureH264 * const pic = fs->buffers[j];
732 if (!pic->output_needed)
734 if (!found_picture || found_picture->base.poc > pic->base.poc ||
735 (found_picture->base.poc == pic->base.poc &&
736 found_picture->base.voc > pic->base.voc))
737 found_picture = pic, found_index = i;
741 if (found_picture_ptr)
742 *found_picture_ptr = found_picture;
743 return found_picture ? found_index : -1;
746 /* Finds the picture with the lowest VOC that needs to be output */
748 dpb_find_lowest_voc(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture,
749 GstVaapiPictureH264 **found_picture_ptr)
751 GstVaapiDecoderH264Private * const priv = &decoder->priv;
752 GstVaapiPictureH264 *found_picture = NULL;
753 guint i, j, found_index;
755 for (i = 0; i < priv->dpb_count; i++) {
756 GstVaapiFrameStore * const fs = priv->dpb[i];
757 if (!fs->output_needed || fs->view_id == picture->base.view_id)
759 for (j = 0; j < fs->num_buffers; j++) {
760 GstVaapiPictureH264 * const pic = fs->buffers[j];
761 if (!pic->output_needed || pic->base.poc != picture->base.poc)
763 if (!found_picture || found_picture->base.voc > pic->base.voc)
764 found_picture = pic, found_index = i;
768 if (found_picture_ptr)
769 *found_picture_ptr = found_picture;
770 return found_picture ? found_index : -1;
774 dpb_bump(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
776 GstVaapiDecoderH264Private * const priv = &decoder->priv;
777 GstVaapiPictureH264 *found_picture;
781 found_index = dpb_find_lowest_poc(decoder, picture, &found_picture);
785 success = dpb_output(decoder, priv->dpb[found_index], found_picture);
786 dpb_evict(decoder, found_picture, found_index);
787 if (priv->max_views == 1)
790 /* Emit all other view components that were in the same access
791 unit than the picture we have just found */
793 found_index = dpb_find_lowest_voc(decoder, found_picture,
797 dpb_output(decoder, priv->dpb[found_index], found_picture);
798 dpb_evict(decoder, found_picture, found_index);
804 dpb_clear(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
806 GstVaapiDecoderH264Private * const priv = &decoder->priv;
809 for (i = 0; i < priv->dpb_count; i++) {
810 if (picture && picture->base.view_id != priv->dpb[i]->view_id)
812 gst_vaapi_frame_store_replace(&priv->dpb[i], NULL);
815 for (i = 0, n = 0; i < priv->dpb_count; i++) {
817 priv->dpb[n++] = priv->dpb[i];
821 /* Clear previous frame buffers only if this is a "flush-all" operation,
822 or if the picture is the first one in the access unit */
823 if (!picture || GST_VAAPI_PICTURE_FLAG_IS_SET(picture,
824 GST_VAAPI_PICTURE_FLAG_AU_START)) {
825 for (i = 0; i < priv->max_views; i++)
826 gst_vaapi_picture_replace(&priv->prev_frames[i], NULL);
831 dpb_flush(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
833 while (dpb_bump(decoder, picture))
835 dpb_clear(decoder, picture);
839 dpb_prune_mvc(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
841 GstVaapiDecoderH264Private * const priv = &decoder->priv;
844 // Remove all unused inter-view pictures
845 if (GST_VAAPI_PICTURE_FLAG_IS_SET(picture, GST_VAAPI_PICTURE_FLAG_AU_END)) {
847 while (i < priv->dpb_count) {
848 GstVaapiFrameStore * const fs = priv->dpb[i];
849 if (fs->view_id != picture->base.view_id &&
850 !fs->output_needed && !gst_vaapi_frame_store_has_reference(fs))
851 dpb_remove_index(decoder, i);
859 dpb_add(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
861 GstVaapiDecoderH264Private * const priv = &decoder->priv;
862 GstVaapiFrameStore *fs;
865 if (priv->max_views > 1)
866 dpb_prune_mvc(decoder, picture);
868 // Remove all unused pictures
869 if (!GST_VAAPI_PICTURE_IS_IDR(picture)) {
871 while (i < priv->dpb_count) {
872 GstVaapiFrameStore * const fs = priv->dpb[i];
873 if (fs->view_id == picture->base.view_id &&
874 !fs->output_needed && !gst_vaapi_frame_store_has_reference(fs))
875 dpb_remove_index(decoder, i);
881 // Check if picture is the second field and the first field is still in DPB
882 if (GST_VAAPI_PICTURE_IS_INTERLACED(picture) &&
883 !GST_VAAPI_PICTURE_IS_FIRST_FIELD(picture)) {
884 const gint found_index = dpb_find_picture(decoder,
885 GST_VAAPI_PICTURE_H264(picture->base.parent_picture));
886 if (found_index >= 0)
887 return gst_vaapi_frame_store_add(priv->dpb[found_index], picture);
890 // Create new frame store, and split fields if necessary
891 fs = gst_vaapi_frame_store_new(picture);
894 gst_vaapi_frame_store_replace(&priv->prev_frames[picture->base.voc], fs);
895 gst_vaapi_frame_store_unref(fs);
897 if (!priv->progressive_sequence && gst_vaapi_frame_store_has_frame(fs)) {
898 if (!gst_vaapi_frame_store_split_fields(fs))
902 // C.4.5.1 - Storage and marking of a reference decoded picture into the DPB
903 if (GST_VAAPI_PICTURE_IS_REFERENCE(picture)) {
904 while (priv->dpb_count == priv->dpb_size) {
905 if (!dpb_bump(decoder, picture))
910 // C.4.5.2 - Storage and marking of a non-reference decoded picture into the DPB
912 const gboolean StoreInterViewOnlyRefFlag =
913 !GST_VAAPI_PICTURE_FLAG_IS_SET(picture,
914 GST_VAAPI_PICTURE_FLAG_AU_END) &&
915 GST_VAAPI_PICTURE_FLAG_IS_SET(picture,
916 GST_VAAPI_PICTURE_FLAG_INTER_VIEW);
917 if (!picture->output_flag && !StoreInterViewOnlyRefFlag)
919 while (priv->dpb_count == priv->dpb_size) {
920 if (!StoreInterViewOnlyRefFlag) {
921 if (dpb_find_lowest_poc(decoder, picture, NULL) < 0)
922 return dpb_output(decoder, NULL, picture);
924 if (!dpb_bump(decoder, picture))
929 gst_vaapi_frame_store_replace(&priv->dpb[priv->dpb_count++], fs);
930 if (picture->output_flag) {
931 picture->output_needed = TRUE;
938 dpb_reset(GstVaapiDecoderH264 *decoder, guint dpb_size)
940 GstVaapiDecoderH264Private * const priv = &decoder->priv;
942 if (dpb_size < priv->dpb_count)
945 if (dpb_size > priv->dpb_size_max) {
946 priv->dpb = g_try_realloc_n(priv->dpb, dpb_size, sizeof(*priv->dpb));
949 memset(&priv->dpb[priv->dpb_size_max], 0,
950 (dpb_size - priv->dpb_size_max) * sizeof(*priv->dpb));
951 priv->dpb_size_max = dpb_size;
954 if (priv->dpb_size < dpb_size)
955 priv->dpb_size = dpb_size;
956 else if (dpb_size < priv->dpb_count)
959 GST_DEBUG("DPB size %u", priv->dpb_size);
964 unref_inter_view(GstVaapiPictureH264 *picture)
968 GST_VAAPI_PICTURE_FLAG_UNSET(picture, GST_VAAPI_PICTURE_FLAG_INTER_VIEW);
969 gst_vaapi_picture_unref(picture);
972 /* Resets MVC resources */
974 mvc_reset(GstVaapiDecoderH264 *decoder)
976 GstVaapiDecoderH264Private * const priv = &decoder->priv;
979 // Resize array of inter-view references
980 if (!priv->inter_views) {
981 priv->inter_views = g_ptr_array_new_full(priv->max_views,
982 (GDestroyNotify)unref_inter_view);
983 if (!priv->inter_views)
987 // Resize array of previous frame buffers
988 for (i = priv->max_views; i < priv->prev_frames_alloc; i++)
989 gst_vaapi_picture_replace(&priv->prev_frames[i], NULL);
991 priv->prev_frames = g_try_realloc_n(priv->prev_frames, priv->max_views,
992 sizeof(*priv->prev_frames));
993 if (!priv->prev_frames) {
994 priv->prev_frames_alloc = 0;
997 for (i = priv->prev_frames_alloc; i < priv->max_views; i++)
998 priv->prev_frames[i] = NULL;
999 priv->prev_frames_alloc = priv->max_views;
1003 static GstVaapiDecoderStatus
1004 get_status(GstH264ParserResult result)
1006 GstVaapiDecoderStatus status;
1009 case GST_H264_PARSER_OK:
1010 status = GST_VAAPI_DECODER_STATUS_SUCCESS;
1012 case GST_H264_PARSER_NO_NAL_END:
1013 status = GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
1015 case GST_H264_PARSER_ERROR:
1016 status = GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
1019 status = GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
1026 gst_vaapi_decoder_h264_close(GstVaapiDecoderH264 *decoder)
1028 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1030 gst_vaapi_picture_replace(&priv->current_picture, NULL);
1031 gst_vaapi_parser_info_h264_replace(&priv->prev_slice_pi, NULL);
1032 gst_vaapi_parser_info_h264_replace(&priv->prev_pi, NULL);
1034 dpb_clear(decoder, NULL);
1036 if (priv->inter_views) {
1037 g_ptr_array_unref(priv->inter_views);
1038 priv->inter_views = NULL;
1042 gst_h264_nal_parser_free(priv->parser);
1043 priv->parser = NULL;
1048 gst_vaapi_decoder_h264_open(GstVaapiDecoderH264 *decoder)
1050 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1052 gst_vaapi_decoder_h264_close(decoder);
1054 priv->parser = gst_h264_nal_parser_new();
1061 gst_vaapi_decoder_h264_destroy(GstVaapiDecoder *base_decoder)
1063 GstVaapiDecoderH264 * const decoder =
1064 GST_VAAPI_DECODER_H264_CAST(base_decoder);
1065 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1068 gst_vaapi_decoder_h264_close(decoder);
1074 g_free(priv->prev_frames);
1075 priv->prev_frames = NULL;
1076 priv->prev_frames_alloc = 0;
1078 for (i = 0; i < G_N_ELEMENTS(priv->pps); i++)
1079 gst_vaapi_parser_info_h264_replace(&priv->pps[i], NULL);
1080 gst_vaapi_parser_info_h264_replace(&priv->active_pps, NULL);
1082 for (i = 0; i < G_N_ELEMENTS(priv->sps); i++)
1083 gst_vaapi_parser_info_h264_replace(&priv->sps[i], NULL);
1084 gst_vaapi_parser_info_h264_replace(&priv->active_sps, NULL);
1088 gst_vaapi_decoder_h264_create(GstVaapiDecoder *base_decoder)
1090 GstVaapiDecoderH264 * const decoder =
1091 GST_VAAPI_DECODER_H264_CAST(base_decoder);
1092 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1094 priv->profile = GST_VAAPI_PROFILE_UNKNOWN;
1095 priv->entrypoint = GST_VAAPI_ENTRYPOINT_VLD;
1096 priv->chroma_type = GST_VAAPI_CHROMA_TYPE_YUV420;
1097 priv->prev_pic_structure = GST_VAAPI_PICTURE_STRUCTURE_FRAME;
1098 priv->progressive_sequence = TRUE;
1102 /* Activates the supplied PPS */
1104 ensure_pps(GstVaapiDecoderH264 *decoder, GstH264PPS *pps)
1106 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1107 GstVaapiParserInfoH264 * const pi = priv->pps[pps->id];
1109 gst_vaapi_parser_info_h264_replace(&priv->active_pps, pi);
1110 return pi ? &pi->data.pps : NULL;
1113 /* Returns the active PPS */
1114 static inline GstH264PPS *
1115 get_pps(GstVaapiDecoderH264 *decoder)
1117 GstVaapiParserInfoH264 * const pi = decoder->priv.active_pps;
1119 return pi ? &pi->data.pps : NULL;
1122 /* Activate the supplied SPS */
1124 ensure_sps(GstVaapiDecoderH264 *decoder, GstH264SPS *sps)
1126 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1127 GstVaapiParserInfoH264 * const pi = priv->sps[sps->id];
1129 gst_vaapi_parser_info_h264_replace(&priv->active_sps, pi);
1130 return pi ? &pi->data.sps : NULL;
1133 /* Returns the active SPS */
1134 static inline GstH264SPS *
1135 get_sps(GstVaapiDecoderH264 *decoder)
1137 GstVaapiParserInfoH264 * const pi = decoder->priv.active_sps;
1139 return pi ? &pi->data.sps : NULL;
1143 fill_profiles(GstVaapiProfile profiles[16], guint *n_profiles_ptr,
1144 GstVaapiProfile profile)
1146 guint n_profiles = *n_profiles_ptr;
1148 profiles[n_profiles++] = profile;
1150 case GST_VAAPI_PROFILE_H264_MAIN:
1151 profiles[n_profiles++] = GST_VAAPI_PROFILE_H264_HIGH;
1156 *n_profiles_ptr = n_profiles;
1159 /* Fills in compatible profiles for MVC decoding */
1161 fill_profiles_mvc(GstVaapiDecoderH264 *decoder, GstVaapiProfile profiles[16],
1162 guint *n_profiles_ptr, guint dpb_size)
1164 const gchar * const vendor_string =
1165 gst_vaapi_display_get_vendor_string(GST_VAAPI_DECODER_DISPLAY(decoder));
1167 gboolean add_high_profile = FALSE;
1172 const struct map *m;
1174 // Drivers that support slice level decoding
1175 if (vendor_string && dpb_size <= 16) {
1176 static const struct map drv_names[] = {
1177 { "Intel i965 driver", 17 },
1180 for (m = drv_names; m->str != NULL && !add_high_profile; m++) {
1181 if (g_ascii_strncasecmp(vendor_string, m->str, m->str_len) == 0)
1182 add_high_profile = TRUE;
1186 if (add_high_profile)
1187 fill_profiles(profiles, n_profiles_ptr, GST_VAAPI_PROFILE_H264_HIGH);
1190 static GstVaapiProfile
1191 get_profile(GstVaapiDecoderH264 *decoder, GstH264SPS *sps, guint dpb_size)
1193 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1194 GstVaapiDisplay * const display = GST_VAAPI_DECODER_DISPLAY(decoder);
1195 GstVaapiProfile profile, profiles[4];
1196 guint i, n_profiles = 0;
1198 profile = gst_vaapi_utils_h264_get_profile(sps->profile_idc);
1200 return GST_VAAPI_PROFILE_UNKNOWN;
1202 fill_profiles(profiles, &n_profiles, profile);
1204 case GST_VAAPI_PROFILE_H264_BASELINE:
1205 if (sps->constraint_set1_flag) { // A.2.2 (main profile)
1206 fill_profiles(profiles, &n_profiles,
1207 GST_VAAPI_PROFILE_H264_CONSTRAINED_BASELINE);
1208 fill_profiles(profiles, &n_profiles,
1209 GST_VAAPI_PROFILE_H264_MAIN);
1212 case GST_VAAPI_PROFILE_H264_EXTENDED:
1213 if (sps->constraint_set1_flag) { // A.2.2 (main profile)
1214 fill_profiles(profiles, &n_profiles,
1215 GST_VAAPI_PROFILE_H264_MAIN);
1218 case GST_VAAPI_PROFILE_H264_MULTIVIEW_HIGH:
1219 if (priv->max_views == 2) {
1220 fill_profiles(profiles, &n_profiles,
1221 GST_VAAPI_PROFILE_H264_STEREO_HIGH);
1223 fill_profiles_mvc(decoder, profiles, &n_profiles, dpb_size);
1225 case GST_VAAPI_PROFILE_H264_STEREO_HIGH:
1226 if (sps->frame_mbs_only_flag) {
1227 fill_profiles(profiles, &n_profiles,
1228 GST_VAAPI_PROFILE_H264_MULTIVIEW_HIGH);
1230 fill_profiles_mvc(decoder, profiles, &n_profiles, dpb_size);
1236 /* If the preferred profile (profiles[0]) matches one that we already
1237 found, then just return it now instead of searching for it again */
1238 if (profiles[0] == priv->profile)
1239 return priv->profile;
1241 for (i = 0; i < n_profiles; i++) {
1242 if (gst_vaapi_display_has_decoder(display, profiles[i], priv->entrypoint))
1245 return GST_VAAPI_PROFILE_UNKNOWN;
1248 static GstVaapiDecoderStatus
1249 ensure_context(GstVaapiDecoderH264 *decoder, GstH264SPS *sps)
1251 GstVaapiDecoder * const base_decoder = GST_VAAPI_DECODER_CAST(decoder);
1252 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1253 GstVaapiContextInfo info;
1254 GstVaapiProfile profile;
1255 GstVaapiChromaType chroma_type;
1256 gboolean reset_context = FALSE;
1257 guint mb_width, mb_height, dpb_size;
1259 dpb_size = get_max_dec_frame_buffering(sps);
1260 if (priv->dpb_size < dpb_size) {
1261 GST_DEBUG("DPB size increased");
1262 reset_context = TRUE;
1265 profile = get_profile(decoder, sps, dpb_size);
1267 GST_ERROR("unsupported profile_idc %u", sps->profile_idc);
1268 return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_PROFILE;
1271 if (!priv->profile || (priv->profile != profile && priv->max_views == 1)) {
1272 GST_DEBUG("profile changed");
1273 reset_context = TRUE;
1274 priv->profile = profile;
1277 chroma_type = gst_vaapi_utils_h264_get_chroma_type(sps->chroma_format_idc);
1279 GST_ERROR("unsupported chroma_format_idc %u", sps->chroma_format_idc);
1280 return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_CHROMA_FORMAT;
1283 if (priv->chroma_type != chroma_type) {
1284 GST_DEBUG("chroma format changed");
1285 reset_context = TRUE;
1286 priv->chroma_type = chroma_type;
1289 mb_width = sps->pic_width_in_mbs_minus1 + 1;
1290 mb_height = (sps->pic_height_in_map_units_minus1 + 1) <<
1291 !sps->frame_mbs_only_flag;
1292 if (priv->mb_width != mb_width || priv->mb_height != mb_height) {
1293 GST_DEBUG("size changed");
1294 reset_context = TRUE;
1295 priv->mb_width = mb_width;
1296 priv->mb_height = mb_height;
1299 priv->progressive_sequence = sps->frame_mbs_only_flag;
1301 /* XXX: we only output complete frames for now */
1302 gst_vaapi_decoder_set_interlaced(base_decoder, !priv->progressive_sequence);
1305 gst_vaapi_decoder_set_pixel_aspect_ratio(
1307 sps->vui_parameters.par_n,
1308 sps->vui_parameters.par_d
1311 if (!reset_context && priv->has_context)
1312 return GST_VAAPI_DECODER_STATUS_SUCCESS;
1314 /* XXX: fix surface size when cropping is implemented */
1315 info.profile = priv->profile;
1316 info.entrypoint = priv->entrypoint;
1317 info.chroma_type = priv->chroma_type;
1318 info.width = sps->width;
1319 info.height = sps->height;
1320 info.ref_frames = dpb_size;
1322 if (!gst_vaapi_decoder_ensure_context(GST_VAAPI_DECODER(decoder), &info))
1323 return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
1324 priv->has_context = TRUE;
1327 if (!dpb_reset(decoder, dpb_size))
1328 return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
1330 /* Reset MVC data */
1331 if (!mvc_reset(decoder))
1332 return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
1333 return GST_VAAPI_DECODER_STATUS_SUCCESS;
1337 fill_iq_matrix_4x4(VAIQMatrixBufferH264 *iq_matrix, const GstH264PPS *pps,
1338 const GstH264SPS *sps)
1342 /* There are always 6 4x4 scaling lists */
1343 g_assert(G_N_ELEMENTS(iq_matrix->ScalingList4x4) == 6);
1344 g_assert(G_N_ELEMENTS(iq_matrix->ScalingList4x4[0]) == 16);
1346 for (i = 0; i < G_N_ELEMENTS(iq_matrix->ScalingList4x4); i++)
1347 gst_h264_video_quant_matrix_4x4_get_raster_from_zigzag(
1348 iq_matrix->ScalingList4x4[i], pps->scaling_lists_4x4[i]);
1352 fill_iq_matrix_8x8(VAIQMatrixBufferH264 *iq_matrix, const GstH264PPS *pps,
1353 const GstH264SPS *sps)
1357 /* If chroma_format_idc != 3, there are up to 2 8x8 scaling lists */
1358 if (!pps->transform_8x8_mode_flag)
1361 g_assert(G_N_ELEMENTS(iq_matrix->ScalingList8x8) >= 2);
1362 g_assert(G_N_ELEMENTS(iq_matrix->ScalingList8x8[0]) == 64);
1364 n = (sps->chroma_format_idc != 3) ? 2 : 6;
1365 for (i = 0; i < n; i++) {
1366 gst_h264_video_quant_matrix_8x8_get_raster_from_zigzag(
1367 iq_matrix->ScalingList8x8[i], pps->scaling_lists_8x8[i]);
1371 static GstVaapiDecoderStatus
1372 ensure_quant_matrix(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
1374 GstVaapiPicture * const base_picture = &picture->base;
1375 GstH264PPS * const pps = get_pps(decoder);
1376 GstH264SPS * const sps = get_sps(decoder);
1377 VAIQMatrixBufferH264 *iq_matrix;
1379 base_picture->iq_matrix = GST_VAAPI_IQ_MATRIX_NEW(H264, decoder);
1380 if (!base_picture->iq_matrix) {
1381 GST_ERROR("failed to allocate IQ matrix");
1382 return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
1384 iq_matrix = base_picture->iq_matrix->param;
1386 /* XXX: we can only support 4:2:0 or 4:2:2 since ScalingLists8x8[]
1387 is not large enough to hold lists for 4:4:4 */
1388 if (sps->chroma_format_idc == 3)
1389 return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_CHROMA_FORMAT;
1391 fill_iq_matrix_4x4(iq_matrix, pps, sps);
1392 fill_iq_matrix_8x8(iq_matrix, pps, sps);
1394 return GST_VAAPI_DECODER_STATUS_SUCCESS;
1397 static inline gboolean
1398 is_valid_state(guint state, guint ref_state)
1400 return (state & ref_state) == ref_state;
1403 static GstVaapiDecoderStatus
1404 decode_current_picture(GstVaapiDecoderH264 *decoder)
1406 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1407 GstVaapiPictureH264 * const picture = priv->current_picture;
1409 if (!is_valid_state(priv->decoder_state, GST_H264_VIDEO_STATE_VALID_PICTURE))
1411 priv->decoder_state = 0;
1414 return GST_VAAPI_DECODER_STATUS_SUCCESS;
1416 if (!exec_ref_pic_marking(decoder, picture))
1418 if (!dpb_add(decoder, picture))
1420 if (!gst_vaapi_picture_decode(GST_VAAPI_PICTURE_CAST(picture)))
1422 gst_vaapi_picture_replace(&priv->current_picture, NULL);
1423 return GST_VAAPI_DECODER_STATUS_SUCCESS;
1426 /* XXX: fix for cases where first field failed to be decoded */
1427 gst_vaapi_picture_replace(&priv->current_picture, NULL);
1428 return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
1431 priv->decoder_state = 0;
1432 return GST_VAAPI_DECODER_STATUS_DROP_FRAME;
1435 static GstVaapiDecoderStatus
1436 parse_sps(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
1438 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1439 GstVaapiParserInfoH264 * const pi = unit->parsed_info;
1440 GstH264SPS * const sps = &pi->data.sps;
1441 GstH264ParserResult result;
1443 GST_DEBUG("parse SPS");
1445 priv->parser_state = 0;
1447 /* Variables that don't have inferred values per the H.264
1448 standard but that should get a default value anyway */
1449 sps->log2_max_pic_order_cnt_lsb_minus4 = 0;
1451 result = gst_h264_parser_parse_sps(priv->parser, &pi->nalu, sps, TRUE);
1452 if (result != GST_H264_PARSER_OK)
1453 return get_status(result);
1455 /* Reset defaults */
1456 priv->max_views = 1;
1458 priv->parser_state |= GST_H264_VIDEO_STATE_GOT_SPS;
1459 return GST_VAAPI_DECODER_STATUS_SUCCESS;
1462 static GstVaapiDecoderStatus
1463 parse_subset_sps(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
1465 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1466 GstVaapiParserInfoH264 * const pi = unit->parsed_info;
1467 GstH264SPS * const sps = &pi->data.sps;
1468 GstH264ParserResult result;
1470 GST_DEBUG("parse subset SPS");
1472 /* Variables that don't have inferred values per the H.264
1473 standard but that should get a default value anyway */
1474 sps->log2_max_pic_order_cnt_lsb_minus4 = 0;
1476 result = gst_h264_parser_parse_subset_sps(priv->parser, &pi->nalu, sps,
1478 if (result != GST_H264_PARSER_OK)
1479 return get_status(result);
1481 priv->parser_state |= GST_H264_VIDEO_STATE_GOT_SPS;
1482 return GST_VAAPI_DECODER_STATUS_SUCCESS;
1485 static GstVaapiDecoderStatus
1486 parse_pps(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
1488 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1489 GstVaapiParserInfoH264 * const pi = unit->parsed_info;
1490 GstH264PPS * const pps = &pi->data.pps;
1491 GstH264ParserResult result;
1493 GST_DEBUG("parse PPS");
1495 priv->parser_state &= GST_H264_VIDEO_STATE_GOT_SPS;
1497 /* Variables that don't have inferred values per the H.264
1498 standard but that should get a default value anyway */
1499 pps->slice_group_map_type = 0;
1500 pps->slice_group_change_rate_minus1 = 0;
1502 result = gst_h264_parser_parse_pps(priv->parser, &pi->nalu, pps);
1503 if (result != GST_H264_PARSER_OK)
1504 return get_status(result);
1506 priv->parser_state |= GST_H264_VIDEO_STATE_GOT_PPS;
1507 return GST_VAAPI_DECODER_STATUS_SUCCESS;
1510 static GstVaapiDecoderStatus
1511 parse_sei(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
1513 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1514 GstVaapiParserInfoH264 * const pi = unit->parsed_info;
1515 GArray ** const sei_ptr = &pi->data.sei;
1516 GstH264ParserResult result;
1518 GST_DEBUG("parse SEI");
1520 result = gst_h264_parser_parse_sei(priv->parser, &pi->nalu, sei_ptr);
1521 if (result != GST_H264_PARSER_OK) {
1522 GST_WARNING("failed to parse SEI messages");
1523 return get_status(result);
1525 return GST_VAAPI_DECODER_STATUS_SUCCESS;
1528 static GstVaapiDecoderStatus
1529 parse_slice(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
1531 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1532 GstVaapiParserInfoH264 * const pi = unit->parsed_info;
1533 GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
1534 GstH264NalUnit * const nalu = &pi->nalu;
1536 GstH264ParserResult result;
1539 GST_DEBUG("parse slice");
1541 priv->parser_state &= (GST_H264_VIDEO_STATE_GOT_SPS|
1542 GST_H264_VIDEO_STATE_GOT_PPS);
1544 /* Propagate Prefix NAL unit info, if necessary */
1545 switch (nalu->type) {
1546 case GST_H264_NAL_SLICE:
1547 case GST_H264_NAL_SLICE_IDR: {
1548 GstVaapiParserInfoH264 * const prev_pi = priv->prev_pi;
1549 if (prev_pi && prev_pi->nalu.type == GST_H264_NAL_PREFIX_UNIT) {
1550 /* MVC sequences shall have a Prefix NAL unit immediately
1551 preceding this NAL unit */
1552 pi->nalu.extension_type = prev_pi->nalu.extension_type;
1553 pi->nalu.extension = prev_pi->nalu.extension;
1556 /* In the very unlikely case there is no Prefix NAL unit
1557 immediately preceding this NAL unit, try to infer some
1558 defaults (H.7.4.1.1) */
1559 GstH264NalUnitExtensionMVC * const mvc = &pi->nalu.extension.mvc;
1560 mvc->non_idr_flag = !(nalu->type == GST_H264_NAL_SLICE_IDR);
1561 nalu->idr_pic_flag = !mvc->non_idr_flag;
1562 mvc->priority_id = 0;
1564 mvc->temporal_id = 0;
1565 mvc->anchor_pic_flag = 0;
1566 mvc->inter_view_flag = 1;
1572 /* Variables that don't have inferred values per the H.264
1573 standard but that should get a default value anyway */
1574 slice_hdr->cabac_init_idc = 0;
1575 slice_hdr->direct_spatial_mv_pred_flag = 0;
1577 result = gst_h264_parser_parse_slice_hdr(priv->parser, &pi->nalu,
1578 slice_hdr, TRUE, TRUE);
1579 if (result != GST_H264_PARSER_OK)
1580 return get_status(result);
1582 sps = slice_hdr->pps->sequence;
1584 /* Update MVC data */
1585 num_views = get_num_views(sps);
1586 if (priv->max_views < num_views) {
1587 priv->max_views = num_views;
1588 GST_DEBUG("maximum number of views changed to %u", num_views);
1590 pi->view_id = get_view_id(&pi->nalu);
1591 pi->voc = get_view_order_index(sps, pi->view_id);
1593 priv->parser_state |= GST_H264_VIDEO_STATE_GOT_SLICE;
1594 return GST_VAAPI_DECODER_STATUS_SUCCESS;
1597 static GstVaapiDecoderStatus
1598 decode_sps(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
1600 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1601 GstVaapiParserInfoH264 * const pi = unit->parsed_info;
1602 GstH264SPS * const sps = &pi->data.sps;
1604 GST_DEBUG("decode SPS");
1606 gst_vaapi_parser_info_h264_replace(&priv->sps[sps->id], pi);
1607 return GST_VAAPI_DECODER_STATUS_SUCCESS;
1610 static GstVaapiDecoderStatus
1611 decode_subset_sps(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
1613 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1614 GstVaapiParserInfoH264 * const pi = unit->parsed_info;
1615 GstH264SPS * const sps = &pi->data.sps;
1617 GST_DEBUG("decode subset SPS");
1619 gst_vaapi_parser_info_h264_replace(&priv->sps[sps->id], pi);
1620 return GST_VAAPI_DECODER_STATUS_SUCCESS;
1623 static GstVaapiDecoderStatus
1624 decode_pps(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
1626 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1627 GstVaapiParserInfoH264 * const pi = unit->parsed_info;
1628 GstH264PPS * const pps = &pi->data.pps;
1630 GST_DEBUG("decode PPS");
1632 gst_vaapi_parser_info_h264_replace(&priv->pps[pps->id], pi);
1633 return GST_VAAPI_DECODER_STATUS_SUCCESS;
1636 static GstVaapiDecoderStatus
1637 decode_sequence_end(GstVaapiDecoderH264 *decoder)
1639 GstVaapiDecoderStatus status;
1641 GST_DEBUG("decode sequence-end");
1643 status = decode_current_picture(decoder);
1644 if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
1647 dpb_flush(decoder, NULL);
1648 return GST_VAAPI_DECODER_STATUS_SUCCESS;
1651 /* 8.2.1.1 - Decoding process for picture order count type 0 */
1654 GstVaapiDecoderH264 *decoder,
1655 GstVaapiPictureH264 *picture,
1656 GstH264SliceHdr *slice_hdr
1659 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1660 GstH264SPS * const sps = get_sps(decoder);
1661 const gint32 MaxPicOrderCntLsb = 1 << (sps->log2_max_pic_order_cnt_lsb_minus4 + 4);
1664 GST_DEBUG("decode picture order count type 0");
1666 if (GST_VAAPI_PICTURE_IS_IDR(picture)) {
1667 priv->prev_poc_msb = 0;
1668 priv->prev_poc_lsb = 0;
1670 else if (priv->prev_pic_has_mmco5) {
1671 priv->prev_poc_msb = 0;
1672 priv->prev_poc_lsb =
1673 (priv->prev_pic_structure == GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD ?
1674 0 : priv->field_poc[TOP_FIELD]);
1677 priv->prev_poc_msb = priv->poc_msb;
1678 priv->prev_poc_lsb = priv->poc_lsb;
1682 priv->poc_lsb = slice_hdr->pic_order_cnt_lsb;
1683 if (priv->poc_lsb < priv->prev_poc_lsb &&
1684 (priv->prev_poc_lsb - priv->poc_lsb) >= (MaxPicOrderCntLsb / 2))
1685 priv->poc_msb = priv->prev_poc_msb + MaxPicOrderCntLsb;
1686 else if (priv->poc_lsb > priv->prev_poc_lsb &&
1687 (priv->poc_lsb - priv->prev_poc_lsb) > (MaxPicOrderCntLsb / 2))
1688 priv->poc_msb = priv->prev_poc_msb - MaxPicOrderCntLsb;
1690 priv->poc_msb = priv->prev_poc_msb;
1692 temp_poc = priv->poc_msb + priv->poc_lsb;
1693 switch (picture->structure) {
1694 case GST_VAAPI_PICTURE_STRUCTURE_FRAME:
1696 priv->field_poc[TOP_FIELD] = temp_poc;
1697 priv->field_poc[BOTTOM_FIELD] = temp_poc +
1698 slice_hdr->delta_pic_order_cnt_bottom;
1700 case GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD:
1702 priv->field_poc[TOP_FIELD] = temp_poc;
1704 case GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD:
1706 priv->field_poc[BOTTOM_FIELD] = temp_poc;
1711 /* 8.2.1.2 - Decoding process for picture order count type 1 */
1714 GstVaapiDecoderH264 *decoder,
1715 GstVaapiPictureH264 *picture,
1716 GstH264SliceHdr *slice_hdr
1719 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1720 GstH264SPS * const sps = get_sps(decoder);
1721 const gint32 MaxFrameNum = 1 << (sps->log2_max_frame_num_minus4 + 4);
1722 gint32 prev_frame_num_offset, abs_frame_num, expected_poc;
1725 GST_DEBUG("decode picture order count type 1");
1727 if (priv->prev_pic_has_mmco5)
1728 prev_frame_num_offset = 0;
1730 prev_frame_num_offset = priv->frame_num_offset;
1733 if (GST_VAAPI_PICTURE_IS_IDR(picture))
1734 priv->frame_num_offset = 0;
1735 else if (priv->prev_frame_num > priv->frame_num)
1736 priv->frame_num_offset = prev_frame_num_offset + MaxFrameNum;
1738 priv->frame_num_offset = prev_frame_num_offset;
1741 if (sps->num_ref_frames_in_pic_order_cnt_cycle != 0)
1742 abs_frame_num = priv->frame_num_offset + priv->frame_num;
1745 if (!GST_VAAPI_PICTURE_IS_REFERENCE(picture) && abs_frame_num > 0)
1746 abs_frame_num = abs_frame_num - 1;
1748 if (abs_frame_num > 0) {
1749 gint32 expected_delta_per_poc_cycle;
1750 gint32 poc_cycle_cnt, frame_num_in_poc_cycle;
1752 expected_delta_per_poc_cycle = 0;
1753 for (i = 0; i < sps->num_ref_frames_in_pic_order_cnt_cycle; i++)
1754 expected_delta_per_poc_cycle += sps->offset_for_ref_frame[i];
1757 poc_cycle_cnt = (abs_frame_num - 1) /
1758 sps->num_ref_frames_in_pic_order_cnt_cycle;
1759 frame_num_in_poc_cycle = (abs_frame_num - 1) %
1760 sps->num_ref_frames_in_pic_order_cnt_cycle;
1763 expected_poc = poc_cycle_cnt * expected_delta_per_poc_cycle;
1764 for (i = 0; i <= frame_num_in_poc_cycle; i++)
1765 expected_poc += sps->offset_for_ref_frame[i];
1769 if (!GST_VAAPI_PICTURE_IS_REFERENCE(picture))
1770 expected_poc += sps->offset_for_non_ref_pic;
1773 switch (picture->structure) {
1774 case GST_VAAPI_PICTURE_STRUCTURE_FRAME:
1775 priv->field_poc[TOP_FIELD] = expected_poc +
1776 slice_hdr->delta_pic_order_cnt[0];
1777 priv->field_poc[BOTTOM_FIELD] = priv->field_poc[TOP_FIELD] +
1778 sps->offset_for_top_to_bottom_field +
1779 slice_hdr->delta_pic_order_cnt[1];
1781 case GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD:
1782 priv->field_poc[TOP_FIELD] = expected_poc +
1783 slice_hdr->delta_pic_order_cnt[0];
1785 case GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD:
1786 priv->field_poc[BOTTOM_FIELD] = expected_poc +
1787 sps->offset_for_top_to_bottom_field +
1788 slice_hdr->delta_pic_order_cnt[0];
1793 /* 8.2.1.3 - Decoding process for picture order count type 2 */
1796 GstVaapiDecoderH264 *decoder,
1797 GstVaapiPictureH264 *picture,
1798 GstH264SliceHdr *slice_hdr
1801 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1802 GstH264SPS * const sps = get_sps(decoder);
1803 const gint32 MaxFrameNum = 1 << (sps->log2_max_frame_num_minus4 + 4);
1804 gint32 prev_frame_num_offset, temp_poc;
1806 GST_DEBUG("decode picture order count type 2");
1808 if (priv->prev_pic_has_mmco5)
1809 prev_frame_num_offset = 0;
1811 prev_frame_num_offset = priv->frame_num_offset;
1814 if (GST_VAAPI_PICTURE_IS_IDR(picture))
1815 priv->frame_num_offset = 0;
1816 else if (priv->prev_frame_num > priv->frame_num)
1817 priv->frame_num_offset = prev_frame_num_offset + MaxFrameNum;
1819 priv->frame_num_offset = prev_frame_num_offset;
1822 if (GST_VAAPI_PICTURE_IS_IDR(picture))
1824 else if (!GST_VAAPI_PICTURE_IS_REFERENCE(picture))
1825 temp_poc = 2 * (priv->frame_num_offset + priv->frame_num) - 1;
1827 temp_poc = 2 * (priv->frame_num_offset + priv->frame_num);
1830 if (picture->structure != GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD)
1831 priv->field_poc[TOP_FIELD] = temp_poc;
1832 if (picture->structure != GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD)
1833 priv->field_poc[BOTTOM_FIELD] = temp_poc;
1836 /* 8.2.1 - Decoding process for picture order count */
1839 GstVaapiDecoderH264 *decoder,
1840 GstVaapiPictureH264 *picture,
1841 GstH264SliceHdr *slice_hdr
1844 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1845 GstH264SPS * const sps = get_sps(decoder);
1847 switch (sps->pic_order_cnt_type) {
1849 init_picture_poc_0(decoder, picture, slice_hdr);
1852 init_picture_poc_1(decoder, picture, slice_hdr);
1855 init_picture_poc_2(decoder, picture, slice_hdr);
1859 if (picture->structure != GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD)
1860 picture->field_poc[TOP_FIELD] = priv->field_poc[TOP_FIELD];
1861 if (picture->structure != GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD)
1862 picture->field_poc[BOTTOM_FIELD] = priv->field_poc[BOTTOM_FIELD];
1863 picture->base.poc = MIN(picture->field_poc[0], picture->field_poc[1]);
1867 compare_picture_pic_num_dec(const void *a, const void *b)
1869 const GstVaapiPictureH264 * const picA = *(GstVaapiPictureH264 **)a;
1870 const GstVaapiPictureH264 * const picB = *(GstVaapiPictureH264 **)b;
1872 return picB->pic_num - picA->pic_num;
1876 compare_picture_long_term_pic_num_inc(const void *a, const void *b)
1878 const GstVaapiPictureH264 * const picA = *(GstVaapiPictureH264 **)a;
1879 const GstVaapiPictureH264 * const picB = *(GstVaapiPictureH264 **)b;
1881 return picA->long_term_pic_num - picB->long_term_pic_num;
1885 compare_picture_poc_dec(const void *a, const void *b)
1887 const GstVaapiPictureH264 * const picA = *(GstVaapiPictureH264 **)a;
1888 const GstVaapiPictureH264 * const picB = *(GstVaapiPictureH264 **)b;
1890 return picB->base.poc - picA->base.poc;
1894 compare_picture_poc_inc(const void *a, const void *b)
1896 const GstVaapiPictureH264 * const picA = *(GstVaapiPictureH264 **)a;
1897 const GstVaapiPictureH264 * const picB = *(GstVaapiPictureH264 **)b;
1899 return picA->base.poc - picB->base.poc;
1903 compare_picture_frame_num_wrap_dec(const void *a, const void *b)
1905 const GstVaapiPictureH264 * const picA = *(GstVaapiPictureH264 **)a;
1906 const GstVaapiPictureH264 * const picB = *(GstVaapiPictureH264 **)b;
1908 return picB->frame_num_wrap - picA->frame_num_wrap;
1912 compare_picture_long_term_frame_idx_inc(const void *a, const void *b)
1914 const GstVaapiPictureH264 * const picA = *(GstVaapiPictureH264 **)a;
1915 const GstVaapiPictureH264 * const picB = *(GstVaapiPictureH264 **)b;
1917 return picA->long_term_frame_idx - picB->long_term_frame_idx;
1920 /* 8.2.4.1 - Decoding process for picture numbers */
1922 init_picture_refs_pic_num(
1923 GstVaapiDecoderH264 *decoder,
1924 GstVaapiPictureH264 *picture,
1925 GstH264SliceHdr *slice_hdr
1928 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1929 GstH264SPS * const sps = get_sps(decoder);
1930 const gint32 MaxFrameNum = 1 << (sps->log2_max_frame_num_minus4 + 4);
1933 GST_DEBUG("decode picture numbers");
1935 for (i = 0; i < priv->short_ref_count; i++) {
1936 GstVaapiPictureH264 * const pic = priv->short_ref[i];
1939 if (pic->base.view_id != picture->base.view_id)
1943 if (pic->frame_num > priv->frame_num)
1944 pic->frame_num_wrap = pic->frame_num - MaxFrameNum;
1946 pic->frame_num_wrap = pic->frame_num;
1948 // (8-28, 8-30, 8-31)
1949 if (GST_VAAPI_PICTURE_IS_FRAME(picture))
1950 pic->pic_num = pic->frame_num_wrap;
1952 if (pic->structure == picture->structure)
1953 pic->pic_num = 2 * pic->frame_num_wrap + 1;
1955 pic->pic_num = 2 * pic->frame_num_wrap;
1959 for (i = 0; i < priv->long_ref_count; i++) {
1960 GstVaapiPictureH264 * const pic = priv->long_ref[i];
1963 if (pic->base.view_id != picture->base.view_id)
1966 // (8-29, 8-32, 8-33)
1967 if (GST_VAAPI_PICTURE_IS_FRAME(picture))
1968 pic->long_term_pic_num = pic->long_term_frame_idx;
1970 if (pic->structure == picture->structure)
1971 pic->long_term_pic_num = 2 * pic->long_term_frame_idx + 1;
1973 pic->long_term_pic_num = 2 * pic->long_term_frame_idx;
1978 #define SORT_REF_LIST(list, n, compare_func) \
1979 qsort(list, n, sizeof(*(list)), compare_picture_##compare_func)
1982 init_picture_refs_fields_1(
1983 guint picture_structure,
1984 GstVaapiPictureH264 *RefPicList[32],
1985 guint *RefPicList_count,
1986 GstVaapiPictureH264 *ref_list[32],
1987 guint ref_list_count
1994 n = *RefPicList_count;
1997 for (; i < ref_list_count; i++) {
1998 if (ref_list[i]->structure == picture_structure) {
1999 RefPicList[n++] = ref_list[i++];
2003 for (; j < ref_list_count; j++) {
2004 if (ref_list[j]->structure != picture_structure) {
2005 RefPicList[n++] = ref_list[j++];
2009 } while (i < ref_list_count || j < ref_list_count);
2010 *RefPicList_count = n;
2014 init_picture_refs_fields(
2015 GstVaapiPictureH264 *picture,
2016 GstVaapiPictureH264 *RefPicList[32],
2017 guint *RefPicList_count,
2018 GstVaapiPictureH264 *short_ref[32],
2019 guint short_ref_count,
2020 GstVaapiPictureH264 *long_ref[32],
2021 guint long_ref_count
2026 /* 8.2.4.2.5 - reference picture lists in fields */
2027 init_picture_refs_fields_1(picture->structure, RefPicList, &n,
2028 short_ref, short_ref_count);
2029 init_picture_refs_fields_1(picture->structure, RefPicList, &n,
2030 long_ref, long_ref_count);
2031 *RefPicList_count = n;
2034 /* Finds the inter-view reference picture with the supplied view id */
2035 static GstVaapiPictureH264 *
2036 find_inter_view_reference(GstVaapiDecoderH264 *decoder, guint16 view_id)
2038 GPtrArray * const inter_views = decoder->priv.inter_views;
2041 for (i = 0; i < inter_views->len; i++) {
2042 GstVaapiPictureH264 * const picture = g_ptr_array_index(inter_views, i);
2043 if (picture->base.view_id == view_id)
2047 GST_WARNING("failed to find inter-view reference picture for view_id: %d",
2052 /* Checks whether the view id exists in the supplied list of view ids */
2054 find_view_id(guint16 view_id, const guint16 *view_ids, guint num_view_ids)
2058 for (i = 0; i < num_view_ids; i++) {
2059 if (view_ids[i] == view_id)
2065 /* Checks whether the inter-view reference picture with the supplied
2066 view id is used for decoding the current view component picture */
2068 is_inter_view_reference_for_picture(GstVaapiDecoderH264 *decoder,
2069 guint16 view_id, GstVaapiPictureH264 *picture)
2071 const GstH264SPS * const sps = get_sps(decoder);
2072 const GstH264SPSExtMVCView *view;
2074 if (!GST_VAAPI_PICTURE_IS_MVC(picture) ||
2075 sps->extension_type != GST_H264_NAL_EXTENSION_MVC)
2078 view = &sps->extension.mvc.view[picture->base.voc];
2079 if (GST_VAAPI_PICTURE_IS_ANCHOR(picture))
2080 return (find_view_id(view_id, view->anchor_ref_l0,
2081 view->num_anchor_refs_l0) ||
2082 find_view_id(view_id, view->anchor_ref_l1,
2083 view->num_anchor_refs_l1));
2085 return (find_view_id(view_id, view->non_anchor_ref_l0,
2086 view->num_non_anchor_refs_l0) ||
2087 find_view_id(view_id, view->non_anchor_ref_l1,
2088 view->num_non_anchor_refs_l1));
2091 /* H.8.2.1 - Initialization process for inter-view prediction references */
2093 init_picture_refs_mvc_1(GstVaapiDecoderH264 *decoder,
2094 GstVaapiPictureH264 **ref_list, guint *ref_list_count_ptr, guint num_refs,
2095 const guint16 *view_ids, guint num_view_ids)
2099 n = *ref_list_count_ptr;
2100 for (j = 0; j < num_view_ids && n < num_refs; j++) {
2101 GstVaapiPictureH264 * const pic =
2102 find_inter_view_reference(decoder, view_ids[j]);
2104 ref_list[n++] = pic;
2106 *ref_list_count_ptr = n;
2110 init_picture_refs_mvc(GstVaapiDecoderH264 *decoder,
2111 GstVaapiPictureH264 *picture, GstH264SliceHdr *slice_hdr, guint list)
2113 GstVaapiDecoderH264Private * const priv = &decoder->priv;
2114 const GstH264SPS * const sps = get_sps(decoder);
2115 const GstH264SPSExtMVCView *view;
2117 GST_DEBUG("initialize reference picture list for inter-view prediction");
2119 if (sps->extension_type != GST_H264_NAL_EXTENSION_MVC)
2121 view = &sps->extension.mvc.view[picture->base.voc];
2123 #define INVOKE_INIT_PICTURE_REFS_MVC(ref_list, view_list) do { \
2124 init_picture_refs_mvc_1(decoder, \
2125 priv->RefPicList##ref_list, \
2126 &priv->RefPicList##ref_list##_count, \
2127 slice_hdr->num_ref_idx_l##ref_list##_active_minus1 + 1, \
2128 view->view_list##_l##ref_list, \
2129 view->num_##view_list##s_l##ref_list); \
2133 if (GST_VAAPI_PICTURE_IS_ANCHOR(picture))
2134 INVOKE_INIT_PICTURE_REFS_MVC(0, anchor_ref);
2136 INVOKE_INIT_PICTURE_REFS_MVC(0, non_anchor_ref);
2139 if (GST_VAAPI_PICTURE_IS_ANCHOR(picture))
2140 INVOKE_INIT_PICTURE_REFS_MVC(1, anchor_ref);
2142 INVOKE_INIT_PICTURE_REFS_MVC(1, non_anchor_ref);
2145 #undef INVOKE_INIT_PICTURE_REFS_MVC
2149 init_picture_refs_p_slice(
2150 GstVaapiDecoderH264 *decoder,
2151 GstVaapiPictureH264 *picture,
2152 GstH264SliceHdr *slice_hdr
2155 GstVaapiDecoderH264Private * const priv = &decoder->priv;
2156 GstVaapiPictureH264 **ref_list;
2159 GST_DEBUG("decode reference picture list for P and SP slices");
2161 if (GST_VAAPI_PICTURE_IS_FRAME(picture)) {
2162 /* 8.2.4.2.1 - P and SP slices in frames */
2163 if (priv->short_ref_count > 0) {
2164 ref_list = priv->RefPicList0;
2165 for (i = 0; i < priv->short_ref_count; i++)
2166 ref_list[i] = priv->short_ref[i];
2167 SORT_REF_LIST(ref_list, i, pic_num_dec);
2168 priv->RefPicList0_count += i;
2171 if (priv->long_ref_count > 0) {
2172 ref_list = &priv->RefPicList0[priv->RefPicList0_count];
2173 for (i = 0; i < priv->long_ref_count; i++)
2174 ref_list[i] = priv->long_ref[i];
2175 SORT_REF_LIST(ref_list, i, long_term_pic_num_inc);
2176 priv->RefPicList0_count += i;
2180 /* 8.2.4.2.2 - P and SP slices in fields */
2181 GstVaapiPictureH264 *short_ref[32];
2182 guint short_ref_count = 0;
2183 GstVaapiPictureH264 *long_ref[32];
2184 guint long_ref_count = 0;
2186 if (priv->short_ref_count > 0) {
2187 for (i = 0; i < priv->short_ref_count; i++)
2188 short_ref[i] = priv->short_ref[i];
2189 SORT_REF_LIST(short_ref, i, frame_num_wrap_dec);
2190 short_ref_count = i;
2193 if (priv->long_ref_count > 0) {
2194 for (i = 0; i < priv->long_ref_count; i++)
2195 long_ref[i] = priv->long_ref[i];
2196 SORT_REF_LIST(long_ref, i, long_term_frame_idx_inc);
2200 init_picture_refs_fields(
2202 priv->RefPicList0, &priv->RefPicList0_count,
2203 short_ref, short_ref_count,
2204 long_ref, long_ref_count
2208 if (GST_VAAPI_PICTURE_IS_MVC(picture)) {
2210 init_picture_refs_mvc(decoder, picture, slice_hdr, 0);
2215 init_picture_refs_b_slice(
2216 GstVaapiDecoderH264 *decoder,
2217 GstVaapiPictureH264 *picture,
2218 GstH264SliceHdr *slice_hdr
2221 GstVaapiDecoderH264Private * const priv = &decoder->priv;
2222 GstVaapiPictureH264 **ref_list;
2225 GST_DEBUG("decode reference picture list for B slices");
2227 if (GST_VAAPI_PICTURE_IS_FRAME(picture)) {
2228 /* 8.2.4.2.3 - B slices in frames */
2231 if (priv->short_ref_count > 0) {
2232 // 1. Short-term references
2233 ref_list = priv->RefPicList0;
2234 for (n = 0, i = 0; i < priv->short_ref_count; i++) {
2235 if (priv->short_ref[i]->base.poc < picture->base.poc)
2236 ref_list[n++] = priv->short_ref[i];
2238 SORT_REF_LIST(ref_list, n, poc_dec);
2239 priv->RefPicList0_count += n;
2241 ref_list = &priv->RefPicList0[priv->RefPicList0_count];
2242 for (n = 0, i = 0; i < priv->short_ref_count; i++) {
2243 if (priv->short_ref[i]->base.poc >= picture->base.poc)
2244 ref_list[n++] = priv->short_ref[i];
2246 SORT_REF_LIST(ref_list, n, poc_inc);
2247 priv->RefPicList0_count += n;
2250 if (priv->long_ref_count > 0) {
2251 // 2. Long-term references
2252 ref_list = &priv->RefPicList0[priv->RefPicList0_count];
2253 for (n = 0, i = 0; i < priv->long_ref_count; i++)
2254 ref_list[n++] = priv->long_ref[i];
2255 SORT_REF_LIST(ref_list, n, long_term_pic_num_inc);
2256 priv->RefPicList0_count += n;
2260 if (priv->short_ref_count > 0) {
2261 // 1. Short-term references
2262 ref_list = priv->RefPicList1;
2263 for (n = 0, i = 0; i < priv->short_ref_count; i++) {
2264 if (priv->short_ref[i]->base.poc > picture->base.poc)
2265 ref_list[n++] = priv->short_ref[i];
2267 SORT_REF_LIST(ref_list, n, poc_inc);
2268 priv->RefPicList1_count += n;
2270 ref_list = &priv->RefPicList1[priv->RefPicList1_count];
2271 for (n = 0, i = 0; i < priv->short_ref_count; i++) {
2272 if (priv->short_ref[i]->base.poc <= picture->base.poc)
2273 ref_list[n++] = priv->short_ref[i];
2275 SORT_REF_LIST(ref_list, n, poc_dec);
2276 priv->RefPicList1_count += n;
2279 if (priv->long_ref_count > 0) {
2280 // 2. Long-term references
2281 ref_list = &priv->RefPicList1[priv->RefPicList1_count];
2282 for (n = 0, i = 0; i < priv->long_ref_count; i++)
2283 ref_list[n++] = priv->long_ref[i];
2284 SORT_REF_LIST(ref_list, n, long_term_pic_num_inc);
2285 priv->RefPicList1_count += n;
2289 /* 8.2.4.2.4 - B slices in fields */
2290 GstVaapiPictureH264 *short_ref0[32];
2291 guint short_ref0_count = 0;
2292 GstVaapiPictureH264 *short_ref1[32];
2293 guint short_ref1_count = 0;
2294 GstVaapiPictureH264 *long_ref[32];
2295 guint long_ref_count = 0;
2297 /* refFrameList0ShortTerm */
2298 if (priv->short_ref_count > 0) {
2299 ref_list = short_ref0;
2300 for (n = 0, i = 0; i < priv->short_ref_count; i++) {
2301 if (priv->short_ref[i]->base.poc <= picture->base.poc)
2302 ref_list[n++] = priv->short_ref[i];
2304 SORT_REF_LIST(ref_list, n, poc_dec);
2305 short_ref0_count += n;
2307 ref_list = &short_ref0[short_ref0_count];
2308 for (n = 0, i = 0; i < priv->short_ref_count; i++) {
2309 if (priv->short_ref[i]->base.poc > picture->base.poc)
2310 ref_list[n++] = priv->short_ref[i];
2312 SORT_REF_LIST(ref_list, n, poc_inc);
2313 short_ref0_count += n;
2316 /* refFrameList1ShortTerm */
2317 if (priv->short_ref_count > 0) {
2318 ref_list = short_ref1;
2319 for (n = 0, i = 0; i < priv->short_ref_count; i++) {
2320 if (priv->short_ref[i]->base.poc > picture->base.poc)
2321 ref_list[n++] = priv->short_ref[i];
2323 SORT_REF_LIST(ref_list, n, poc_inc);
2324 short_ref1_count += n;
2326 ref_list = &short_ref1[short_ref1_count];
2327 for (n = 0, i = 0; i < priv->short_ref_count; i++) {
2328 if (priv->short_ref[i]->base.poc <= picture->base.poc)
2329 ref_list[n++] = priv->short_ref[i];
2331 SORT_REF_LIST(ref_list, n, poc_dec);
2332 short_ref1_count += n;
2335 /* refFrameListLongTerm */
2336 if (priv->long_ref_count > 0) {
2337 for (i = 0; i < priv->long_ref_count; i++)
2338 long_ref[i] = priv->long_ref[i];
2339 SORT_REF_LIST(long_ref, i, long_term_frame_idx_inc);
2343 init_picture_refs_fields(
2345 priv->RefPicList0, &priv->RefPicList0_count,
2346 short_ref0, short_ref0_count,
2347 long_ref, long_ref_count
2350 init_picture_refs_fields(
2352 priv->RefPicList1, &priv->RefPicList1_count,
2353 short_ref1, short_ref1_count,
2354 long_ref, long_ref_count
2358 /* Check whether RefPicList1 is identical to RefPicList0, then
2359 swap if necessary */
2360 if (priv->RefPicList1_count > 1 &&
2361 priv->RefPicList1_count == priv->RefPicList0_count &&
2362 memcmp(priv->RefPicList0, priv->RefPicList1,
2363 priv->RefPicList0_count * sizeof(priv->RefPicList0[0])) == 0) {
2364 GstVaapiPictureH264 * const tmp = priv->RefPicList1[0];
2365 priv->RefPicList1[0] = priv->RefPicList1[1];
2366 priv->RefPicList1[1] = tmp;
2369 if (GST_VAAPI_PICTURE_IS_MVC(picture)) {
2371 init_picture_refs_mvc(decoder, picture, slice_hdr, 0);
2374 init_picture_refs_mvc(decoder, picture, slice_hdr, 1);
2378 #undef SORT_REF_LIST
2381 find_short_term_reference(GstVaapiDecoderH264 *decoder, gint32 pic_num)
2383 GstVaapiDecoderH264Private * const priv = &decoder->priv;
2386 for (i = 0; i < priv->short_ref_count; i++) {
2387 if (priv->short_ref[i]->pic_num == pic_num)
2390 GST_ERROR("found no short-term reference picture with PicNum = %d",
2396 find_long_term_reference(GstVaapiDecoderH264 *decoder, gint32 long_term_pic_num)
2398 GstVaapiDecoderH264Private * const priv = &decoder->priv;
2401 for (i = 0; i < priv->long_ref_count; i++) {
2402 if (priv->long_ref[i]->long_term_pic_num == long_term_pic_num)
2405 GST_ERROR("found no long-term reference picture with LongTermPicNum = %d",
2411 exec_picture_refs_modification_1(
2412 GstVaapiDecoderH264 *decoder,
2413 GstVaapiPictureH264 *picture,
2414 GstH264SliceHdr *slice_hdr,
2418 GstVaapiDecoderH264Private * const priv = &decoder->priv;
2419 GstH264SPS * const sps = get_sps(decoder);
2420 GstH264RefPicListModification *ref_pic_list_modification;
2421 guint num_ref_pic_list_modifications;
2422 GstVaapiPictureH264 **ref_list;
2423 guint *ref_list_count_ptr, ref_list_count, ref_list_idx = 0;
2424 const guint16 *view_ids = NULL;
2425 guint i, j, n, num_refs, num_view_ids = 0;
2427 gint32 MaxPicNum, CurrPicNum, picNumPred, picViewIdxPred;
2429 GST_DEBUG("modification process of reference picture list %u", list);
2432 ref_pic_list_modification = slice_hdr->ref_pic_list_modification_l0;
2433 num_ref_pic_list_modifications = slice_hdr->n_ref_pic_list_modification_l0;
2434 ref_list = priv->RefPicList0;
2435 ref_list_count_ptr = &priv->RefPicList0_count;
2436 num_refs = slice_hdr->num_ref_idx_l0_active_minus1 + 1;
2438 if (GST_VAAPI_PICTURE_IS_MVC(picture) &&
2439 sps->extension_type == GST_H264_NAL_EXTENSION_MVC) {
2440 const GstH264SPSExtMVCView * const view =
2441 &sps->extension.mvc.view[picture->base.voc];
2442 if (GST_VAAPI_PICTURE_IS_ANCHOR(picture)) {
2443 view_ids = view->anchor_ref_l0;
2444 num_view_ids = view->num_anchor_refs_l0;
2447 view_ids = view->non_anchor_ref_l0;
2448 num_view_ids = view->num_non_anchor_refs_l0;
2453 ref_pic_list_modification = slice_hdr->ref_pic_list_modification_l1;
2454 num_ref_pic_list_modifications = slice_hdr->n_ref_pic_list_modification_l1;
2455 ref_list = priv->RefPicList1;
2456 ref_list_count_ptr = &priv->RefPicList1_count;
2457 num_refs = slice_hdr->num_ref_idx_l1_active_minus1 + 1;
2459 if (GST_VAAPI_PICTURE_IS_MVC(picture) &&
2460 sps->extension_type == GST_H264_NAL_EXTENSION_MVC) {
2461 const GstH264SPSExtMVCView * const view =
2462 &sps->extension.mvc.view[picture->base.voc];
2463 if (GST_VAAPI_PICTURE_IS_ANCHOR(picture)) {
2464 view_ids = view->anchor_ref_l1;
2465 num_view_ids = view->num_anchor_refs_l1;
2468 view_ids = view->non_anchor_ref_l1;
2469 num_view_ids = view->num_non_anchor_refs_l1;
2473 ref_list_count = *ref_list_count_ptr;
2475 if (!GST_VAAPI_PICTURE_IS_FRAME(picture)) {
2476 MaxPicNum = 1 << (sps->log2_max_frame_num_minus4 + 5); // 2 * MaxFrameNum
2477 CurrPicNum = 2 * slice_hdr->frame_num + 1; // 2 * frame_num + 1
2480 MaxPicNum = 1 << (sps->log2_max_frame_num_minus4 + 4); // MaxFrameNum
2481 CurrPicNum = slice_hdr->frame_num; // frame_num
2484 picNumPred = CurrPicNum;
2485 picViewIdxPred = -1;
2487 for (i = 0; i < num_ref_pic_list_modifications; i++) {
2488 GstH264RefPicListModification * const l = &ref_pic_list_modification[i];
2489 if (l->modification_of_pic_nums_idc == 3)
2492 /* 8.2.4.3.1 - Short-term reference pictures */
2493 if (l->modification_of_pic_nums_idc == 0 || l->modification_of_pic_nums_idc == 1) {
2494 gint32 abs_diff_pic_num = l->value.abs_diff_pic_num_minus1 + 1;
2495 gint32 picNum, picNumNoWrap;
2498 if (l->modification_of_pic_nums_idc == 0) {
2499 picNumNoWrap = picNumPred - abs_diff_pic_num;
2500 if (picNumNoWrap < 0)
2501 picNumNoWrap += MaxPicNum;
2506 picNumNoWrap = picNumPred + abs_diff_pic_num;
2507 if (picNumNoWrap >= MaxPicNum)
2508 picNumNoWrap -= MaxPicNum;
2510 picNumPred = picNumNoWrap;
2513 picNum = picNumNoWrap;
2514 if (picNum > CurrPicNum)
2515 picNum -= MaxPicNum;
2518 for (j = num_refs; j > ref_list_idx; j--)
2519 ref_list[j] = ref_list[j - 1];
2520 found_ref_idx = find_short_term_reference(decoder, picNum);
2521 ref_list[ref_list_idx++] =
2522 found_ref_idx >= 0 ? priv->short_ref[found_ref_idx] : NULL;
2524 for (j = ref_list_idx; j <= num_refs; j++) {
2529 GST_VAAPI_PICTURE_IS_SHORT_TERM_REFERENCE(ref_list[j]) ?
2530 ref_list[j]->pic_num : MaxPicNum;
2531 if (PicNumF != picNum ||
2532 ref_list[j]->base.view_id != picture->base.view_id)
2533 ref_list[n++] = ref_list[j];
2537 /* 8.2.4.3.2 - Long-term reference pictures */
2538 else if (l->modification_of_pic_nums_idc == 2) {
2540 for (j = num_refs; j > ref_list_idx; j--)
2541 ref_list[j] = ref_list[j - 1];
2543 find_long_term_reference(decoder, l->value.long_term_pic_num);
2544 ref_list[ref_list_idx++] =
2545 found_ref_idx >= 0 ? priv->long_ref[found_ref_idx] : NULL;
2547 for (j = ref_list_idx; j <= num_refs; j++) {
2548 gint32 LongTermPicNumF;
2552 GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(ref_list[j]) ?
2553 ref_list[j]->long_term_pic_num : INT_MAX;
2554 if (LongTermPicNumF != l->value.long_term_pic_num ||
2555 ref_list[j]->base.view_id != picture->base.view_id)
2556 ref_list[n++] = ref_list[j];
2560 /* H.8.2.2.3 - Inter-view prediction reference pictures */
2561 else if ((GST_VAAPI_PICTURE_IS_MVC(picture) &&
2562 sps->extension_type == GST_H264_NAL_EXTENSION_MVC) &&
2563 (l->modification_of_pic_nums_idc == 4 ||
2564 l->modification_of_pic_nums_idc == 5)) {
2565 gint32 abs_diff_view_idx = l->value.abs_diff_view_idx_minus1 + 1;
2566 gint32 picViewIdx, targetViewId;
2569 if (l->modification_of_pic_nums_idc == 4) {
2570 picViewIdx = picViewIdxPred - abs_diff_view_idx;
2572 picViewIdx += num_view_ids;
2577 picViewIdx = picViewIdxPred + abs_diff_view_idx;
2578 if (picViewIdx >= num_view_ids)
2579 picViewIdx -= num_view_ids;
2581 picViewIdxPred = picViewIdx;
2584 targetViewId = view_ids[picViewIdx];
2587 for (j = num_refs; j > ref_list_idx; j--)
2588 ref_list[j] = ref_list[j - 1];
2589 ref_list[ref_list_idx++] =
2590 find_inter_view_reference(decoder, targetViewId);
2592 for (j = ref_list_idx; j <= num_refs; j++) {
2595 if (ref_list[j]->base.view_id != targetViewId ||
2596 ref_list[j]->base.poc != picture->base.poc)
2597 ref_list[n++] = ref_list[j];
2603 for (i = 0; i < num_refs; i++)
2605 GST_ERROR("list %u entry %u is empty", list, i);
2607 *ref_list_count_ptr = num_refs;
2610 /* 8.2.4.3 - Modification process for reference picture lists */
2612 exec_picture_refs_modification(
2613 GstVaapiDecoderH264 *decoder,
2614 GstVaapiPictureH264 *picture,
2615 GstH264SliceHdr *slice_hdr
2618 GST_DEBUG("execute ref_pic_list_modification()");
2621 if (!GST_H264_IS_I_SLICE(slice_hdr) && !GST_H264_IS_SI_SLICE(slice_hdr) &&
2622 slice_hdr->ref_pic_list_modification_flag_l0)
2623 exec_picture_refs_modification_1(decoder, picture, slice_hdr, 0);
2626 if (GST_H264_IS_B_SLICE(slice_hdr) &&
2627 slice_hdr->ref_pic_list_modification_flag_l1)
2628 exec_picture_refs_modification_1(decoder, picture, slice_hdr, 1);
2632 init_picture_ref_lists(GstVaapiDecoderH264 *decoder,
2633 GstVaapiPictureH264 *picture)
2635 GstVaapiDecoderH264Private * const priv = &decoder->priv;
2636 guint i, j, short_ref_count, long_ref_count;
2638 short_ref_count = 0;
2640 if (GST_VAAPI_PICTURE_IS_FRAME(picture)) {
2641 for (i = 0; i < priv->dpb_count; i++) {
2642 GstVaapiFrameStore * const fs = priv->dpb[i];
2643 GstVaapiPictureH264 *pic;
2644 if (!gst_vaapi_frame_store_has_frame(fs))
2646 pic = fs->buffers[0];
2647 if (pic->base.view_id != picture->base.view_id)
2649 if (GST_VAAPI_PICTURE_IS_SHORT_TERM_REFERENCE(pic))
2650 priv->short_ref[short_ref_count++] = pic;
2651 else if (GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(pic))
2652 priv->long_ref[long_ref_count++] = pic;
2653 pic->structure = GST_VAAPI_PICTURE_STRUCTURE_FRAME;
2654 pic->other_field = fs->buffers[1];
2658 for (i = 0; i < priv->dpb_count; i++) {
2659 GstVaapiFrameStore * const fs = priv->dpb[i];
2660 for (j = 0; j < fs->num_buffers; j++) {
2661 GstVaapiPictureH264 * const pic = fs->buffers[j];
2662 if (pic->base.view_id != picture->base.view_id)
2664 if (GST_VAAPI_PICTURE_IS_SHORT_TERM_REFERENCE(pic))
2665 priv->short_ref[short_ref_count++] = pic;
2666 else if (GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(pic))
2667 priv->long_ref[long_ref_count++] = pic;
2668 pic->structure = pic->base.structure;
2669 pic->other_field = fs->buffers[j ^ 1];
2674 for (i = short_ref_count; i < priv->short_ref_count; i++)
2675 priv->short_ref[i] = NULL;
2676 priv->short_ref_count = short_ref_count;
2678 for (i = long_ref_count; i < priv->long_ref_count; i++)
2679 priv->long_ref[i] = NULL;
2680 priv->long_ref_count = long_ref_count;
2685 GstVaapiDecoderH264 *decoder,
2686 GstVaapiPictureH264 *picture,
2687 GstH264SliceHdr *slice_hdr
2690 GstVaapiDecoderH264Private * const priv = &decoder->priv;
2693 init_picture_ref_lists(decoder, picture);
2694 init_picture_refs_pic_num(decoder, picture, slice_hdr);
2696 priv->RefPicList0_count = 0;
2697 priv->RefPicList1_count = 0;
2699 switch (slice_hdr->type % 5) {
2700 case GST_H264_P_SLICE:
2701 case GST_H264_SP_SLICE:
2702 init_picture_refs_p_slice(decoder, picture, slice_hdr);
2704 case GST_H264_B_SLICE:
2705 init_picture_refs_b_slice(decoder, picture, slice_hdr);
2711 exec_picture_refs_modification(decoder, picture, slice_hdr);
2713 switch (slice_hdr->type % 5) {
2714 case GST_H264_B_SLICE:
2715 num_refs = 1 + slice_hdr->num_ref_idx_l1_active_minus1;
2716 for (i = priv->RefPicList1_count; i < num_refs; i++)
2717 priv->RefPicList1[i] = NULL;
2718 priv->RefPicList1_count = num_refs;
2721 case GST_H264_P_SLICE:
2722 case GST_H264_SP_SLICE:
2723 num_refs = 1 + slice_hdr->num_ref_idx_l0_active_minus1;
2724 for (i = priv->RefPicList0_count; i < num_refs; i++)
2725 priv->RefPicList0[i] = NULL;
2726 priv->RefPicList0_count = num_refs;
2735 GstVaapiDecoderH264 *decoder,
2736 GstVaapiPictureH264 *picture, GstVaapiParserInfoH264 *pi)
2738 GstVaapiDecoderH264Private * const priv = &decoder->priv;
2739 GstVaapiPicture * const base_picture = &picture->base;
2740 GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
2742 priv->prev_frame_num = priv->frame_num;
2743 priv->frame_num = slice_hdr->frame_num;
2744 picture->frame_num = priv->frame_num;
2745 picture->frame_num_wrap = priv->frame_num;
2746 picture->output_flag = TRUE; /* XXX: conformant to Annex A only */
2747 base_picture->pts = GST_VAAPI_DECODER_CODEC_FRAME(decoder)->pts;
2748 base_picture->type = GST_VAAPI_PICTURE_TYPE_NONE;
2749 base_picture->view_id = pi->view_id;
2750 base_picture->voc = pi->voc;
2752 /* Initialize extensions */
2753 switch (pi->nalu.extension_type) {
2754 case GST_H264_NAL_EXTENSION_MVC: {
2755 GstH264NalUnitExtensionMVC * const mvc = &pi->nalu.extension.mvc;
2757 GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_MVC);
2758 if (mvc->inter_view_flag)
2759 GST_VAAPI_PICTURE_FLAG_SET(picture,
2760 GST_VAAPI_PICTURE_FLAG_INTER_VIEW);
2761 if (mvc->anchor_pic_flag)
2762 GST_VAAPI_PICTURE_FLAG_SET(picture,
2763 GST_VAAPI_PICTURE_FLAG_ANCHOR);
2768 /* Reset decoder state for IDR pictures */
2769 if (pi->nalu.idr_pic_flag) {
2771 GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_IDR);
2772 dpb_flush(decoder, picture);
2775 /* Initialize picture structure */
2776 if (!slice_hdr->field_pic_flag)
2777 base_picture->structure = GST_VAAPI_PICTURE_STRUCTURE_FRAME;
2779 GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_INTERLACED);
2780 if (!slice_hdr->bottom_field_flag)
2781 base_picture->structure = GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD;
2783 base_picture->structure = GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD;
2785 picture->structure = base_picture->structure;
2787 /* Initialize reference flags */
2788 if (pi->nalu.ref_idc) {
2789 GstH264DecRefPicMarking * const dec_ref_pic_marking =
2790 &slice_hdr->dec_ref_pic_marking;
2792 if (GST_VAAPI_PICTURE_IS_IDR(picture) &&
2793 dec_ref_pic_marking->long_term_reference_flag)
2794 GST_VAAPI_PICTURE_FLAG_SET(picture,
2795 GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE);
2797 GST_VAAPI_PICTURE_FLAG_SET(picture,
2798 GST_VAAPI_PICTURE_FLAG_SHORT_TERM_REFERENCE);
2801 init_picture_poc(decoder, picture, slice_hdr);
2805 /* 8.2.5.3 - Sliding window decoded reference picture marking process */
2807 exec_ref_pic_marking_sliding_window(GstVaapiDecoderH264 *decoder)
2809 GstVaapiDecoderH264Private * const priv = &decoder->priv;
2810 GstH264SPS * const sps = get_sps(decoder);
2811 GstVaapiPictureH264 *ref_picture;
2812 guint i, m, max_num_ref_frames;
2814 GST_DEBUG("reference picture marking process (sliding window)");
2816 if (!GST_VAAPI_PICTURE_IS_FIRST_FIELD(priv->current_picture))
2819 max_num_ref_frames = sps->num_ref_frames;
2820 if (max_num_ref_frames == 0)
2821 max_num_ref_frames = 1;
2822 if (!GST_VAAPI_PICTURE_IS_FRAME(priv->current_picture))
2823 max_num_ref_frames <<= 1;
2825 if (priv->short_ref_count + priv->long_ref_count < max_num_ref_frames)
2827 if (priv->short_ref_count < 1)
2830 for (m = 0, i = 1; i < priv->short_ref_count; i++) {
2831 GstVaapiPictureH264 * const picture = priv->short_ref[i];
2832 if (picture->frame_num_wrap < priv->short_ref[m]->frame_num_wrap)
2836 ref_picture = priv->short_ref[m];
2837 gst_vaapi_picture_h264_set_reference(ref_picture, 0, TRUE);
2838 ARRAY_REMOVE_INDEX(priv->short_ref, m);
2840 /* Both fields need to be marked as "unused for reference", so
2841 remove the other field from the short_ref[] list as well */
2842 if (!GST_VAAPI_PICTURE_IS_FRAME(priv->current_picture) && ref_picture->other_field) {
2843 for (i = 0; i < priv->short_ref_count; i++) {
2844 if (priv->short_ref[i] == ref_picture->other_field) {
2845 ARRAY_REMOVE_INDEX(priv->short_ref, i);
2853 static inline gint32
2854 get_picNumX(GstVaapiPictureH264 *picture, GstH264RefPicMarking *ref_pic_marking)
2858 if (GST_VAAPI_PICTURE_IS_FRAME(picture))
2859 pic_num = picture->frame_num_wrap;
2861 pic_num = 2 * picture->frame_num_wrap + 1;
2862 pic_num -= ref_pic_marking->difference_of_pic_nums_minus1 + 1;
2866 /* 8.2.5.4.1. Mark short-term reference picture as "unused for reference" */
2868 exec_ref_pic_marking_adaptive_mmco_1(
2869 GstVaapiDecoderH264 *decoder,
2870 GstVaapiPictureH264 *picture,
2871 GstH264RefPicMarking *ref_pic_marking
2874 GstVaapiDecoderH264Private * const priv = &decoder->priv;
2877 picNumX = get_picNumX(picture, ref_pic_marking);
2878 i = find_short_term_reference(decoder, picNumX);
2882 gst_vaapi_picture_h264_set_reference(priv->short_ref[i], 0,
2883 GST_VAAPI_PICTURE_IS_FRAME(picture));
2884 ARRAY_REMOVE_INDEX(priv->short_ref, i);
2887 /* 8.2.5.4.2. Mark long-term reference picture as "unused for reference" */
2889 exec_ref_pic_marking_adaptive_mmco_2(
2890 GstVaapiDecoderH264 *decoder,
2891 GstVaapiPictureH264 *picture,
2892 GstH264RefPicMarking *ref_pic_marking
2895 GstVaapiDecoderH264Private * const priv = &decoder->priv;
2898 i = find_long_term_reference(decoder, ref_pic_marking->long_term_pic_num);
2902 gst_vaapi_picture_h264_set_reference(priv->long_ref[i], 0,
2903 GST_VAAPI_PICTURE_IS_FRAME(picture));
2904 ARRAY_REMOVE_INDEX(priv->long_ref, i);
2907 /* 8.2.5.4.3. Assign LongTermFrameIdx to a short-term reference picture */
2909 exec_ref_pic_marking_adaptive_mmco_3(
2910 GstVaapiDecoderH264 *decoder,
2911 GstVaapiPictureH264 *picture,
2912 GstH264RefPicMarking *ref_pic_marking
2915 GstVaapiDecoderH264Private * const priv = &decoder->priv;
2916 GstVaapiPictureH264 *ref_picture, *other_field;
2919 for (i = 0; i < priv->long_ref_count; i++) {
2920 if (priv->long_ref[i]->long_term_frame_idx == ref_pic_marking->long_term_frame_idx)
2923 if (i != priv->long_ref_count) {
2924 gst_vaapi_picture_h264_set_reference(priv->long_ref[i], 0, TRUE);
2925 ARRAY_REMOVE_INDEX(priv->long_ref, i);
2928 picNumX = get_picNumX(picture, ref_pic_marking);
2929 i = find_short_term_reference(decoder, picNumX);
2933 ref_picture = priv->short_ref[i];
2934 ARRAY_REMOVE_INDEX(priv->short_ref, i);
2935 priv->long_ref[priv->long_ref_count++] = ref_picture;
2937 ref_picture->long_term_frame_idx = ref_pic_marking->long_term_frame_idx;
2938 gst_vaapi_picture_h264_set_reference(ref_picture,
2939 GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE,
2940 GST_VAAPI_PICTURE_IS_COMPLETE(picture));
2942 /* Assign LongTermFrameIdx to the other field if it was also
2943 marked as "used for long-term reference */
2944 other_field = ref_picture->other_field;
2945 if (other_field && GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(other_field))
2946 other_field->long_term_frame_idx = ref_pic_marking->long_term_frame_idx;
2949 /* 8.2.5.4.4. Mark pictures with LongTermFramIdx > max_long_term_frame_idx
2950 * as "unused for reference" */
2952 exec_ref_pic_marking_adaptive_mmco_4(
2953 GstVaapiDecoderH264 *decoder,
2954 GstVaapiPictureH264 *picture,
2955 GstH264RefPicMarking *ref_pic_marking
2958 GstVaapiDecoderH264Private * const priv = &decoder->priv;
2959 gint32 i, long_term_frame_idx;
2961 long_term_frame_idx = ref_pic_marking->max_long_term_frame_idx_plus1 - 1;
2963 for (i = 0; i < priv->long_ref_count; i++) {
2964 if (priv->long_ref[i]->long_term_frame_idx <= long_term_frame_idx)
2966 gst_vaapi_picture_h264_set_reference(priv->long_ref[i], 0, FALSE);
2967 ARRAY_REMOVE_INDEX(priv->long_ref, i);
2972 /* 8.2.5.4.5. Mark all reference pictures as "unused for reference" */
2974 exec_ref_pic_marking_adaptive_mmco_5(
2975 GstVaapiDecoderH264 *decoder,
2976 GstVaapiPictureH264 *picture,
2977 GstH264RefPicMarking *ref_pic_marking
2980 GstVaapiDecoderH264Private * const priv = &decoder->priv;
2982 dpb_flush(decoder, picture);
2984 priv->prev_pic_has_mmco5 = TRUE;
2986 /* The picture shall be inferred to have had frame_num equal to 0 (7.4.3) */
2987 priv->frame_num = 0;
2988 priv->frame_num_offset = 0;
2989 picture->frame_num = 0;
2991 /* Update TopFieldOrderCnt and BottomFieldOrderCnt (8.2.1) */
2992 if (picture->structure != GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD)
2993 picture->field_poc[TOP_FIELD] -= picture->base.poc;
2994 if (picture->structure != GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD)
2995 picture->field_poc[BOTTOM_FIELD] -= picture->base.poc;
2996 picture->base.poc = 0;
2999 /* 8.2.5.4.6. Assign a long-term frame index to the current picture */
3001 exec_ref_pic_marking_adaptive_mmco_6(
3002 GstVaapiDecoderH264 *decoder,
3003 GstVaapiPictureH264 *picture,
3004 GstH264RefPicMarking *ref_pic_marking
3007 GstVaapiDecoderH264Private * const priv = &decoder->priv;
3008 GstVaapiPictureH264 *other_field;
3011 for (i = 0; i < priv->long_ref_count; i++) {
3012 if (priv->long_ref[i]->long_term_frame_idx == ref_pic_marking->long_term_frame_idx)
3015 if (i != priv->long_ref_count) {
3016 gst_vaapi_picture_h264_set_reference(priv->long_ref[i], 0, TRUE);
3017 ARRAY_REMOVE_INDEX(priv->long_ref, i);
3020 picture->long_term_frame_idx = ref_pic_marking->long_term_frame_idx;
3021 gst_vaapi_picture_h264_set_reference(picture,
3022 GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE,
3023 GST_VAAPI_PICTURE_IS_COMPLETE(picture));
3025 /* Assign LongTermFrameIdx to the other field if it was also
3026 marked as "used for long-term reference */
3027 other_field = GST_VAAPI_PICTURE_H264(picture->base.parent_picture);
3028 if (other_field && GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(other_field))
3029 other_field->long_term_frame_idx = ref_pic_marking->long_term_frame_idx;
3032 /* 8.2.5.4. Adaptive memory control decoded reference picture marking process */
3034 exec_ref_pic_marking_adaptive(
3035 GstVaapiDecoderH264 *decoder,
3036 GstVaapiPictureH264 *picture,
3037 GstH264DecRefPicMarking *dec_ref_pic_marking
3042 GST_DEBUG("reference picture marking process (adaptive memory control)");
3044 typedef void (*exec_ref_pic_marking_adaptive_mmco_func)(
3045 GstVaapiDecoderH264 *decoder,
3046 GstVaapiPictureH264 *picture,
3047 GstH264RefPicMarking *ref_pic_marking
3050 static const exec_ref_pic_marking_adaptive_mmco_func mmco_funcs[] = {
3052 exec_ref_pic_marking_adaptive_mmco_1,
3053 exec_ref_pic_marking_adaptive_mmco_2,
3054 exec_ref_pic_marking_adaptive_mmco_3,
3055 exec_ref_pic_marking_adaptive_mmco_4,
3056 exec_ref_pic_marking_adaptive_mmco_5,
3057 exec_ref_pic_marking_adaptive_mmco_6,
3060 for (i = 0; i < dec_ref_pic_marking->n_ref_pic_marking; i++) {
3061 GstH264RefPicMarking * const ref_pic_marking =
3062 &dec_ref_pic_marking->ref_pic_marking[i];
3064 const guint mmco = ref_pic_marking->memory_management_control_operation;
3065 if (mmco < G_N_ELEMENTS(mmco_funcs) && mmco_funcs[mmco])
3066 mmco_funcs[mmco](decoder, picture, ref_pic_marking);
3068 GST_ERROR("unhandled MMCO %u", mmco);
3075 /* 8.2.5 - Execute reference picture marking process */
3077 exec_ref_pic_marking(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
3079 GstVaapiDecoderH264Private * const priv = &decoder->priv;
3081 priv->prev_pic_has_mmco5 = FALSE;
3082 priv->prev_pic_structure = picture->structure;
3084 if (GST_VAAPI_PICTURE_IS_INTER_VIEW(picture))
3085 g_ptr_array_add(priv->inter_views, gst_vaapi_picture_ref(picture));
3087 if (!GST_VAAPI_PICTURE_IS_REFERENCE(picture))
3090 if (!GST_VAAPI_PICTURE_IS_IDR(picture)) {
3091 GstH264DecRefPicMarking * const dec_ref_pic_marking =
3092 &picture->last_slice_hdr->dec_ref_pic_marking;
3093 if (dec_ref_pic_marking->adaptive_ref_pic_marking_mode_flag) {
3094 if (!exec_ref_pic_marking_adaptive(decoder, picture, dec_ref_pic_marking))
3098 if (!exec_ref_pic_marking_sliding_window(decoder))
3106 vaapi_init_picture(VAPictureH264 *pic)
3108 pic->picture_id = VA_INVALID_ID;
3110 pic->flags = VA_PICTURE_H264_INVALID;
3111 pic->TopFieldOrderCnt = 0;
3112 pic->BottomFieldOrderCnt = 0;
3116 vaapi_fill_picture(VAPictureH264 *pic, GstVaapiPictureH264 *picture,
3117 guint picture_structure)
3119 if (!picture_structure)
3120 picture_structure = picture->structure;
3122 pic->picture_id = picture->base.surface_id;
3125 if (GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(picture)) {
3126 pic->flags |= VA_PICTURE_H264_LONG_TERM_REFERENCE;
3127 pic->frame_idx = picture->long_term_frame_idx;
3130 if (GST_VAAPI_PICTURE_IS_SHORT_TERM_REFERENCE(picture))
3131 pic->flags |= VA_PICTURE_H264_SHORT_TERM_REFERENCE;
3132 pic->frame_idx = picture->frame_num;
3135 switch (picture_structure) {
3136 case GST_VAAPI_PICTURE_STRUCTURE_FRAME:
3137 pic->TopFieldOrderCnt = picture->field_poc[TOP_FIELD];
3138 pic->BottomFieldOrderCnt = picture->field_poc[BOTTOM_FIELD];
3140 case GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD:
3141 pic->flags |= VA_PICTURE_H264_TOP_FIELD;
3142 pic->TopFieldOrderCnt = picture->field_poc[TOP_FIELD];
3143 pic->BottomFieldOrderCnt = 0;
3145 case GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD:
3146 pic->flags |= VA_PICTURE_H264_BOTTOM_FIELD;
3147 pic->BottomFieldOrderCnt = picture->field_poc[BOTTOM_FIELD];
3148 pic->TopFieldOrderCnt = 0;
3154 vaapi_fill_picture_for_RefPicListX(VAPictureH264 *pic,
3155 GstVaapiPictureH264 *picture)
3157 vaapi_fill_picture(pic, picture, 0);
3159 /* H.8.4 - MVC inter prediction and inter-view prediction process */
3160 if (GST_VAAPI_PICTURE_IS_INTER_VIEW(picture)) {
3161 /* The inter-view reference components and inter-view only
3162 reference components that are included in the reference
3163 picture lists are considered as not being marked as "used for
3164 short-term reference" or "used for long-term reference" */
3165 pic->flags &= ~(VA_PICTURE_H264_SHORT_TERM_REFERENCE|
3166 VA_PICTURE_H264_LONG_TERM_REFERENCE);
3171 fill_picture(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
3173 GstVaapiDecoderH264Private * const priv = &decoder->priv;
3174 GstVaapiPicture * const base_picture = &picture->base;
3175 GstH264PPS * const pps = get_pps(decoder);
3176 GstH264SPS * const sps = get_sps(decoder);
3177 VAPictureParameterBufferH264 * const pic_param = base_picture->param;
3180 /* Fill in VAPictureParameterBufferH264 */
3181 vaapi_fill_picture(&pic_param->CurrPic, picture, 0);
3183 for (i = 0, n = 0; i < priv->dpb_count; i++) {
3184 GstVaapiFrameStore * const fs = priv->dpb[i];
3185 if ((gst_vaapi_frame_store_has_reference(fs) &&
3186 fs->view_id == picture->base.view_id) ||
3187 (gst_vaapi_frame_store_has_inter_view(fs) &&
3188 is_inter_view_reference_for_picture(decoder, fs->view_id, picture)))
3189 vaapi_fill_picture(&pic_param->ReferenceFrames[n++],
3190 fs->buffers[0], fs->structure);
3191 if (n >= G_N_ELEMENTS(pic_param->ReferenceFrames))
3194 for (; n < G_N_ELEMENTS(pic_param->ReferenceFrames); n++)
3195 vaapi_init_picture(&pic_param->ReferenceFrames[n]);
3197 #define COPY_FIELD(s, f) \
3198 pic_param->f = (s)->f
3200 #define COPY_BFM(a, s, f) \
3201 pic_param->a.bits.f = (s)->f
3203 pic_param->picture_width_in_mbs_minus1 = priv->mb_width - 1;
3204 pic_param->picture_height_in_mbs_minus1 = priv->mb_height - 1;
3205 pic_param->frame_num = priv->frame_num;
3207 COPY_FIELD(sps, bit_depth_luma_minus8);
3208 COPY_FIELD(sps, bit_depth_chroma_minus8);
3209 COPY_FIELD(sps, num_ref_frames);
3210 COPY_FIELD(pps, num_slice_groups_minus1);
3211 COPY_FIELD(pps, slice_group_map_type);
3212 COPY_FIELD(pps, slice_group_change_rate_minus1);
3213 COPY_FIELD(pps, pic_init_qp_minus26);
3214 COPY_FIELD(pps, pic_init_qs_minus26);
3215 COPY_FIELD(pps, chroma_qp_index_offset);
3216 COPY_FIELD(pps, second_chroma_qp_index_offset);
3218 pic_param->seq_fields.value = 0; /* reset all bits */
3219 pic_param->seq_fields.bits.residual_colour_transform_flag = sps->separate_colour_plane_flag;
3220 pic_param->seq_fields.bits.MinLumaBiPredSize8x8 = sps->level_idc >= 31; /* A.3.3.2 */
3222 COPY_BFM(seq_fields, sps, chroma_format_idc);
3223 COPY_BFM(seq_fields, sps, gaps_in_frame_num_value_allowed_flag);
3224 COPY_BFM(seq_fields, sps, frame_mbs_only_flag);
3225 COPY_BFM(seq_fields, sps, mb_adaptive_frame_field_flag);
3226 COPY_BFM(seq_fields, sps, direct_8x8_inference_flag);
3227 COPY_BFM(seq_fields, sps, log2_max_frame_num_minus4);
3228 COPY_BFM(seq_fields, sps, pic_order_cnt_type);
3229 COPY_BFM(seq_fields, sps, log2_max_pic_order_cnt_lsb_minus4);
3230 COPY_BFM(seq_fields, sps, delta_pic_order_always_zero_flag);
3232 pic_param->pic_fields.value = 0; /* reset all bits */
3233 pic_param->pic_fields.bits.field_pic_flag = GST_VAAPI_PICTURE_IS_INTERLACED(picture);
3234 pic_param->pic_fields.bits.reference_pic_flag = GST_VAAPI_PICTURE_IS_REFERENCE(picture);
3236 COPY_BFM(pic_fields, pps, entropy_coding_mode_flag);
3237 COPY_BFM(pic_fields, pps, weighted_pred_flag);
3238 COPY_BFM(pic_fields, pps, weighted_bipred_idc);
3239 COPY_BFM(pic_fields, pps, transform_8x8_mode_flag);
3240 COPY_BFM(pic_fields, pps, constrained_intra_pred_flag);
3241 COPY_BFM(pic_fields, pps, pic_order_present_flag);
3242 COPY_BFM(pic_fields, pps, deblocking_filter_control_present_flag);
3243 COPY_BFM(pic_fields, pps, redundant_pic_cnt_present_flag);
3247 /* Detection of the first VCL NAL unit of a primary coded picture (7.4.1.2.4) */
3249 is_new_picture(GstVaapiParserInfoH264 *pi, GstVaapiParserInfoH264 *prev_pi)
3251 GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
3252 GstH264PPS * const pps = slice_hdr->pps;
3253 GstH264SPS * const sps = pps->sequence;
3254 GstH264SliceHdr *prev_slice_hdr;
3258 prev_slice_hdr = &prev_pi->data.slice_hdr;
3260 #define CHECK_EXPR(expr, field_name) do { \
3262 GST_DEBUG(field_name " differs in value"); \
3267 #define CHECK_VALUE(new_slice_hdr, old_slice_hdr, field) \
3268 CHECK_EXPR(((new_slice_hdr)->field == (old_slice_hdr)->field), #field)
3270 /* view_id differs in value and VOIdx of current slice_hdr is less
3271 than the VOIdx of the prev_slice_hdr */
3272 CHECK_VALUE(pi, prev_pi, view_id);
3274 /* frame_num differs in value, regardless of inferred values to 0 */
3275 CHECK_VALUE(slice_hdr, prev_slice_hdr, frame_num);
3277 /* pic_parameter_set_id differs in value */
3278 CHECK_VALUE(slice_hdr, prev_slice_hdr, pps);
3280 /* field_pic_flag differs in value */
3281 CHECK_VALUE(slice_hdr, prev_slice_hdr, field_pic_flag);
3283 /* bottom_field_flag is present in both and differs in value */
3284 if (slice_hdr->field_pic_flag && prev_slice_hdr->field_pic_flag)
3285 CHECK_VALUE(slice_hdr, prev_slice_hdr, bottom_field_flag);
3287 /* nal_ref_idc differs in value with one of the nal_ref_idc values is 0 */
3288 CHECK_EXPR((pi->nalu.ref_idc != 0) ==
3289 (prev_pi->nalu.ref_idc != 0), "nal_ref_idc");
3291 /* POC type is 0 for both and either pic_order_cnt_lsb differs in
3292 value or delta_pic_order_cnt_bottom differs in value */
3293 if (sps->pic_order_cnt_type == 0) {
3294 CHECK_VALUE(slice_hdr, prev_slice_hdr, pic_order_cnt_lsb);
3295 if (pps->pic_order_present_flag && !slice_hdr->field_pic_flag)
3296 CHECK_VALUE(slice_hdr, prev_slice_hdr, delta_pic_order_cnt_bottom);
3299 /* POC type is 1 for both and either delta_pic_order_cnt[0]
3300 differs in value or delta_pic_order_cnt[1] differs in value */
3301 else if (sps->pic_order_cnt_type == 1) {
3302 CHECK_VALUE(slice_hdr, prev_slice_hdr, delta_pic_order_cnt[0]);
3303 CHECK_VALUE(slice_hdr, prev_slice_hdr, delta_pic_order_cnt[1]);
3306 /* IdrPicFlag differs in value */
3307 CHECK_VALUE(&pi->nalu, &prev_pi->nalu, idr_pic_flag);
3309 /* IdrPicFlag is equal to 1 for both and idr_pic_id differs in value */
3310 if (pi->nalu.idr_pic_flag)
3311 CHECK_VALUE(slice_hdr, prev_slice_hdr, idr_pic_id);
3318 /* Detection of a new access unit, assuming we are already in presence
3320 static inline gboolean
3321 is_new_access_unit(GstVaapiParserInfoH264 *pi, GstVaapiParserInfoH264 *prev_pi)
3323 if (!prev_pi || prev_pi->view_id == pi->view_id)
3325 return pi->voc < prev_pi->voc;
3328 /* Finds the first field picture corresponding to the supplied picture */
3329 static GstVaapiPictureH264 *
3330 find_first_field(GstVaapiDecoderH264 *decoder, GstVaapiParserInfoH264 *pi)
3332 GstVaapiDecoderH264Private * const priv = &decoder->priv;
3333 GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
3334 GstVaapiFrameStore *fs;
3336 if (!slice_hdr->field_pic_flag)
3339 fs = priv->prev_frames[pi->voc];
3340 if (!fs || gst_vaapi_frame_store_has_frame(fs))
3343 if (fs->buffers[0]->frame_num == slice_hdr->frame_num)
3344 return fs->buffers[0];
3348 static GstVaapiDecoderStatus
3349 decode_picture(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
3351 GstVaapiDecoderH264Private * const priv = &decoder->priv;
3352 GstVaapiParserInfoH264 * const pi = unit->parsed_info;
3353 GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
3354 GstH264PPS * const pps = ensure_pps(decoder, slice_hdr->pps);
3355 GstH264SPS * const sps = ensure_sps(decoder, slice_hdr->pps->sequence);
3356 GstVaapiPictureH264 *picture, *first_field;
3357 GstVaapiDecoderStatus status;
3359 g_return_val_if_fail(pps != NULL, GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN);
3360 g_return_val_if_fail(sps != NULL, GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN);
3362 /* Only decode base stream for MVC */
3363 switch (sps->profile_idc) {
3364 case GST_H264_PROFILE_MULTIVIEW_HIGH:
3365 case GST_H264_PROFILE_STEREO_HIGH:
3367 GST_DEBUG("drop picture from substream");
3368 return GST_VAAPI_DECODER_STATUS_DROP_FRAME;
3373 status = ensure_context(decoder, sps);
3374 if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
3377 priv->decoder_state = 0;
3379 first_field = find_first_field(decoder, pi);
3381 /* Re-use current picture where the first field was decoded */
3382 picture = gst_vaapi_picture_h264_new_field(first_field);
3384 GST_ERROR("failed to allocate field picture");
3385 return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
3389 /* Create new picture */
3390 picture = gst_vaapi_picture_h264_new(decoder);
3392 GST_ERROR("failed to allocate picture");
3393 return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
3396 gst_vaapi_picture_replace(&priv->current_picture, picture);
3397 gst_vaapi_picture_unref(picture);
3399 /* Clear inter-view references list if this is the primary coded
3400 picture of the current access unit */
3401 if (pi->flags & GST_VAAPI_DECODER_UNIT_FLAG_AU_START)
3402 g_ptr_array_set_size(priv->inter_views, 0);
3404 /* Update cropping rectangle */
3405 if (sps->frame_cropping_flag) {
3406 GstVaapiRectangle crop_rect;
3407 crop_rect.x = sps->crop_rect_x;
3408 crop_rect.y = sps->crop_rect_y;
3409 crop_rect.width = sps->crop_rect_width;
3410 crop_rect.height = sps->crop_rect_height;
3411 gst_vaapi_picture_set_crop_rect(&picture->base, &crop_rect);
3414 status = ensure_quant_matrix(decoder, picture);
3415 if (status != GST_VAAPI_DECODER_STATUS_SUCCESS) {
3416 GST_ERROR("failed to reset quantizer matrix");
3420 if (!init_picture(decoder, picture, pi))
3421 return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
3422 if (!fill_picture(decoder, picture))
3423 return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
3425 priv->decoder_state = pi->state;
3426 return GST_VAAPI_DECODER_STATUS_SUCCESS;
3430 get_slice_data_bit_offset(GstH264SliceHdr *slice_hdr, guint nal_header_bytes)
3434 epb_count = slice_hdr->n_emulation_prevention_bytes;
3435 return 8 * nal_header_bytes + slice_hdr->header_size - epb_count * 8;
3439 fill_pred_weight_table(GstVaapiDecoderH264 *decoder,
3440 GstVaapiSlice *slice, GstH264SliceHdr *slice_hdr)
3442 VASliceParameterBufferH264 * const slice_param = slice->param;
3443 GstH264PPS * const pps = get_pps(decoder);
3444 GstH264SPS * const sps = get_sps(decoder);
3445 GstH264PredWeightTable * const w = &slice_hdr->pred_weight_table;
3446 guint num_weight_tables = 0;
3449 if (pps->weighted_pred_flag &&
3450 (GST_H264_IS_P_SLICE(slice_hdr) || GST_H264_IS_SP_SLICE(slice_hdr)))
3451 num_weight_tables = 1;
3452 else if (pps->weighted_bipred_idc == 1 && GST_H264_IS_B_SLICE(slice_hdr))
3453 num_weight_tables = 2;
3455 num_weight_tables = 0;
3457 slice_param->luma_log2_weight_denom = 0;
3458 slice_param->chroma_log2_weight_denom = 0;
3459 slice_param->luma_weight_l0_flag = 0;
3460 slice_param->chroma_weight_l0_flag = 0;
3461 slice_param->luma_weight_l1_flag = 0;
3462 slice_param->chroma_weight_l1_flag = 0;
3464 if (num_weight_tables < 1)
3467 slice_param->luma_log2_weight_denom = w->luma_log2_weight_denom;
3468 slice_param->chroma_log2_weight_denom = w->chroma_log2_weight_denom;
3470 slice_param->luma_weight_l0_flag = 1;
3471 for (i = 0; i <= slice_param->num_ref_idx_l0_active_minus1; i++) {
3472 slice_param->luma_weight_l0[i] = w->luma_weight_l0[i];
3473 slice_param->luma_offset_l0[i] = w->luma_offset_l0[i];
3476 slice_param->chroma_weight_l0_flag = sps->chroma_array_type != 0;
3477 if (slice_param->chroma_weight_l0_flag) {
3478 for (i = 0; i <= slice_param->num_ref_idx_l0_active_minus1; i++) {
3479 for (j = 0; j < 2; j++) {
3480 slice_param->chroma_weight_l0[i][j] = w->chroma_weight_l0[i][j];
3481 slice_param->chroma_offset_l0[i][j] = w->chroma_offset_l0[i][j];
3486 if (num_weight_tables < 2)
3489 slice_param->luma_weight_l1_flag = 1;
3490 for (i = 0; i <= slice_param->num_ref_idx_l1_active_minus1; i++) {
3491 slice_param->luma_weight_l1[i] = w->luma_weight_l1[i];
3492 slice_param->luma_offset_l1[i] = w->luma_offset_l1[i];
3495 slice_param->chroma_weight_l1_flag = sps->chroma_array_type != 0;
3496 if (slice_param->chroma_weight_l1_flag) {
3497 for (i = 0; i <= slice_param->num_ref_idx_l1_active_minus1; i++) {
3498 for (j = 0; j < 2; j++) {
3499 slice_param->chroma_weight_l1[i][j] = w->chroma_weight_l1[i][j];
3500 slice_param->chroma_offset_l1[i][j] = w->chroma_offset_l1[i][j];
3508 fill_RefPicList(GstVaapiDecoderH264 *decoder,
3509 GstVaapiSlice *slice, GstH264SliceHdr *slice_hdr)
3511 GstVaapiDecoderH264Private * const priv = &decoder->priv;
3512 VASliceParameterBufferH264 * const slice_param = slice->param;
3513 guint i, num_ref_lists = 0;
3515 slice_param->num_ref_idx_l0_active_minus1 = 0;
3516 slice_param->num_ref_idx_l1_active_minus1 = 0;
3518 if (GST_H264_IS_B_SLICE(slice_hdr))
3520 else if (GST_H264_IS_I_SLICE(slice_hdr))
3525 if (num_ref_lists < 1)
3528 slice_param->num_ref_idx_l0_active_minus1 =
3529 slice_hdr->num_ref_idx_l0_active_minus1;
3531 for (i = 0; i < priv->RefPicList0_count && priv->RefPicList0[i]; i++)
3532 vaapi_fill_picture_for_RefPicListX(&slice_param->RefPicList0[i],
3533 priv->RefPicList0[i]);
3534 for (; i <= slice_param->num_ref_idx_l0_active_minus1; i++)
3535 vaapi_init_picture(&slice_param->RefPicList0[i]);
3537 if (num_ref_lists < 2)
3540 slice_param->num_ref_idx_l1_active_minus1 =
3541 slice_hdr->num_ref_idx_l1_active_minus1;
3543 for (i = 0; i < priv->RefPicList1_count && priv->RefPicList1[i]; i++)
3544 vaapi_fill_picture_for_RefPicListX(&slice_param->RefPicList1[i],
3545 priv->RefPicList1[i]);
3546 for (; i <= slice_param->num_ref_idx_l1_active_minus1; i++)
3547 vaapi_init_picture(&slice_param->RefPicList1[i]);
3552 fill_slice(GstVaapiDecoderH264 *decoder,
3553 GstVaapiSlice *slice, GstVaapiParserInfoH264 *pi)
3555 VASliceParameterBufferH264 * const slice_param = slice->param;
3556 GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
3558 /* Fill in VASliceParameterBufferH264 */
3559 slice_param->slice_data_bit_offset =
3560 get_slice_data_bit_offset(slice_hdr, pi->nalu.header_bytes);
3561 slice_param->first_mb_in_slice = slice_hdr->first_mb_in_slice;
3562 slice_param->slice_type = slice_hdr->type % 5;
3563 slice_param->direct_spatial_mv_pred_flag = slice_hdr->direct_spatial_mv_pred_flag;
3564 slice_param->cabac_init_idc = slice_hdr->cabac_init_idc;
3565 slice_param->slice_qp_delta = slice_hdr->slice_qp_delta;
3566 slice_param->disable_deblocking_filter_idc = slice_hdr->disable_deblocking_filter_idc;
3567 slice_param->slice_alpha_c0_offset_div2 = slice_hdr->slice_alpha_c0_offset_div2;
3568 slice_param->slice_beta_offset_div2 = slice_hdr->slice_beta_offset_div2;
3570 if (!fill_RefPicList(decoder, slice, slice_hdr))
3572 if (!fill_pred_weight_table(decoder, slice, slice_hdr))
3577 static GstVaapiDecoderStatus
3578 decode_slice(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
3580 GstVaapiDecoderH264Private * const priv = &decoder->priv;
3581 GstVaapiParserInfoH264 * const pi = unit->parsed_info;
3582 GstVaapiPictureH264 * const picture = priv->current_picture;
3583 GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
3584 GstVaapiSlice *slice;
3585 GstBuffer * const buffer =
3586 GST_VAAPI_DECODER_CODEC_FRAME(decoder)->input_buffer;
3587 GstMapInfo map_info;
3589 GST_DEBUG("slice (%u bytes)", pi->nalu.size);
3591 if (!is_valid_state(pi->state,
3592 GST_H264_VIDEO_STATE_VALID_PICTURE_HEADERS)) {
3593 GST_WARNING("failed to receive enough headers to decode slice");
3594 return GST_VAAPI_DECODER_STATUS_SUCCESS;
3597 if (!ensure_pps(decoder, slice_hdr->pps)) {
3598 GST_ERROR("failed to activate PPS");
3599 return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
3602 if (!ensure_sps(decoder, slice_hdr->pps->sequence)) {
3603 GST_ERROR("failed to activate SPS");
3604 return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
3607 if (!gst_buffer_map(buffer, &map_info, GST_MAP_READ)) {
3608 GST_ERROR("failed to map buffer");
3609 return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
3612 /* Check wether this is the first/last slice in the current access unit */
3613 if (pi->flags & GST_VAAPI_DECODER_UNIT_FLAG_AU_START)
3614 GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_AU_START);
3615 if (pi->flags & GST_VAAPI_DECODER_UNIT_FLAG_AU_END)
3616 GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_AU_END);
3618 slice = GST_VAAPI_SLICE_NEW(H264, decoder,
3619 (map_info.data + unit->offset + pi->nalu.offset), pi->nalu.size);
3620 gst_buffer_unmap(buffer, &map_info);
3622 GST_ERROR("failed to allocate slice");
3623 return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
3626 init_picture_refs(decoder, picture, slice_hdr);
3627 if (!fill_slice(decoder, slice, pi)) {
3628 gst_vaapi_mini_object_unref(GST_VAAPI_MINI_OBJECT(slice));
3629 return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
3632 gst_vaapi_picture_add_slice(GST_VAAPI_PICTURE_CAST(picture), slice);
3633 picture->last_slice_hdr = slice_hdr;
3634 priv->decoder_state |= GST_H264_VIDEO_STATE_GOT_SLICE;
3635 return GST_VAAPI_DECODER_STATUS_SUCCESS;
3639 scan_for_start_code(GstAdapter *adapter, guint ofs, guint size, guint32 *scp)
3641 return (gint)gst_adapter_masked_scan_uint32_peek(adapter,
3642 0xffffff00, 0x00000100,
3647 static GstVaapiDecoderStatus
3648 decode_unit(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
3650 GstVaapiDecoderH264Private * const priv = &decoder->priv;
3651 GstVaapiParserInfoH264 * const pi = unit->parsed_info;
3652 GstVaapiDecoderStatus status;
3654 priv->decoder_state |= pi->state;
3655 switch (pi->nalu.type) {
3656 case GST_H264_NAL_SPS:
3657 status = decode_sps(decoder, unit);
3659 case GST_H264_NAL_SUBSET_SPS:
3660 status = decode_subset_sps(decoder, unit);
3662 case GST_H264_NAL_PPS:
3663 status = decode_pps(decoder, unit);
3665 case GST_H264_NAL_SLICE_EXT:
3666 case GST_H264_NAL_SLICE_IDR:
3667 /* fall-through. IDR specifics are handled in init_picture() */
3668 case GST_H264_NAL_SLICE:
3669 status = decode_slice(decoder, unit);
3671 case GST_H264_NAL_SEQ_END:
3672 case GST_H264_NAL_STREAM_END:
3673 status = decode_sequence_end(decoder);
3675 case GST_H264_NAL_SEI:
3676 status = GST_VAAPI_DECODER_STATUS_SUCCESS;
3679 GST_WARNING("unsupported NAL unit type %d", pi->nalu.type);
3680 status = GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
3686 static GstVaapiDecoderStatus
3687 gst_vaapi_decoder_h264_decode_codec_data(GstVaapiDecoder *base_decoder,
3688 const guchar *buf, guint buf_size)
3690 GstVaapiDecoderH264 * const decoder =
3691 GST_VAAPI_DECODER_H264_CAST(base_decoder);
3692 GstVaapiDecoderH264Private * const priv = &decoder->priv;
3693 GstVaapiDecoderStatus status;
3694 GstVaapiDecoderUnit unit;
3695 GstVaapiParserInfoH264 *pi = NULL;
3696 GstH264ParserResult result;
3697 guint i, ofs, num_sps, num_pps;
3699 unit.parsed_info = NULL;
3702 return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
3705 GST_ERROR("failed to decode codec-data, not in avcC format");
3706 return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
3709 priv->nal_length_size = (buf[4] & 0x03) + 1;
3711 num_sps = buf[5] & 0x1f;
3714 for (i = 0; i < num_sps; i++) {
3715 pi = gst_vaapi_parser_info_h264_new();
3717 return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
3718 unit.parsed_info = pi;
3720 result = gst_h264_parser_identify_nalu_avc(
3722 buf, ofs, buf_size, 2,
3725 if (result != GST_H264_PARSER_OK) {
3726 status = get_status(result);
3730 status = parse_sps(decoder, &unit);
3731 if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
3733 ofs = pi->nalu.offset + pi->nalu.size;
3735 status = decode_sps(decoder, &unit);
3736 if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
3738 gst_vaapi_parser_info_h264_replace(&pi, NULL);
3744 for (i = 0; i < num_pps; i++) {
3745 pi = gst_vaapi_parser_info_h264_new();
3747 return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
3748 unit.parsed_info = pi;
3750 result = gst_h264_parser_identify_nalu_avc(
3752 buf, ofs, buf_size, 2,
3755 if (result != GST_H264_PARSER_OK) {
3756 status = get_status(result);
3760 status = parse_pps(decoder, &unit);
3761 if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
3763 ofs = pi->nalu.offset + pi->nalu.size;
3765 status = decode_pps(decoder, &unit);
3766 if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
3768 gst_vaapi_parser_info_h264_replace(&pi, NULL);
3771 priv->is_avcC = TRUE;
3772 status = GST_VAAPI_DECODER_STATUS_SUCCESS;
3775 gst_vaapi_parser_info_h264_replace(&pi, NULL);
3779 static GstVaapiDecoderStatus
3780 ensure_decoder(GstVaapiDecoderH264 *decoder)
3782 GstVaapiDecoderH264Private * const priv = &decoder->priv;
3783 GstVaapiDecoderStatus status;
3785 if (!priv->is_opened) {
3786 priv->is_opened = gst_vaapi_decoder_h264_open(decoder);
3787 if (!priv->is_opened)
3788 return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_CODEC;
3790 status = gst_vaapi_decoder_decode_codec_data(
3791 GST_VAAPI_DECODER_CAST(decoder));
3792 if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
3795 return GST_VAAPI_DECODER_STATUS_SUCCESS;
3798 static GstVaapiDecoderStatus
3799 gst_vaapi_decoder_h264_parse(GstVaapiDecoder *base_decoder,
3800 GstAdapter *adapter, gboolean at_eos, GstVaapiDecoderUnit *unit)
3802 GstVaapiDecoderH264 * const decoder =
3803 GST_VAAPI_DECODER_H264_CAST(base_decoder);
3804 GstVaapiDecoderH264Private * const priv = &decoder->priv;
3805 GstVaapiParserState * const ps = GST_VAAPI_PARSER_STATE(base_decoder);
3806 GstVaapiParserInfoH264 *pi;
3807 GstVaapiDecoderStatus status;
3808 GstH264ParserResult result;
3810 guint i, size, buf_size, nalu_size, flags;
3814 status = ensure_decoder(decoder);
3815 if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
3818 size = gst_adapter_available(adapter);
3820 if (priv->is_avcC) {
3821 if (size < priv->nal_length_size)
3822 return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
3824 buf = (guchar *)&start_code;
3825 g_assert(priv->nal_length_size <= sizeof(start_code));
3826 gst_adapter_copy(adapter, buf, 0, priv->nal_length_size);
3829 for (i = 0; i < priv->nal_length_size; i++)
3830 nalu_size = (nalu_size << 8) | buf[i];
3832 buf_size = priv->nal_length_size + nalu_size;
3833 if (size < buf_size)
3834 return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
3838 return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
3840 ofs = scan_for_start_code(adapter, 0, size, NULL);
3842 return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
3845 gst_adapter_flush(adapter, ofs);
3849 ofs2 = ps->input_offset2 - ofs - 4;
3853 ofs = G_UNLIKELY(size < ofs2 + 4) ? -1 :
3854 scan_for_start_code(adapter, ofs2, size - ofs2, NULL);
3856 // Assume the whole NAL unit is present if end-of-stream
3858 ps->input_offset2 = size;
3859 return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
3865 ps->input_offset2 = 0;
3867 buf = (guchar *)gst_adapter_map(adapter, buf_size);
3869 return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
3871 unit->size = buf_size;
3873 pi = gst_vaapi_parser_info_h264_new();
3875 return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
3877 gst_vaapi_decoder_unit_set_parsed_info(unit,
3878 pi, (GDestroyNotify)gst_vaapi_mini_object_unref);
3881 result = gst_h264_parser_identify_nalu_avc(priv->parser,
3882 buf, 0, buf_size, priv->nal_length_size, &pi->nalu);
3884 result = gst_h264_parser_identify_nalu_unchecked(priv->parser,
3885 buf, 0, buf_size, &pi->nalu);
3886 status = get_status(result);
3887 if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
3890 switch (pi->nalu.type) {
3891 case GST_H264_NAL_SPS:
3892 status = parse_sps(decoder, unit);
3894 case GST_H264_NAL_SUBSET_SPS:
3895 status = parse_subset_sps(decoder, unit);
3897 case GST_H264_NAL_PPS:
3898 status = parse_pps(decoder, unit);
3900 case GST_H264_NAL_SEI:
3901 status = parse_sei(decoder, unit);
3903 case GST_H264_NAL_SLICE_EXT:
3904 if (!GST_H264_IS_MVC_NALU(&pi->nalu)) {
3905 status = GST_VAAPI_DECODER_STATUS_SUCCESS;
3909 case GST_H264_NAL_SLICE_IDR:
3910 case GST_H264_NAL_SLICE:
3911 status = parse_slice(decoder, unit);
3914 status = GST_VAAPI_DECODER_STATUS_SUCCESS;
3917 if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
3921 switch (pi->nalu.type) {
3922 case GST_H264_NAL_AU_DELIMITER:
3923 flags |= GST_VAAPI_DECODER_UNIT_FLAG_AU_START;
3924 flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
3926 case GST_H264_NAL_FILLER_DATA:
3927 flags |= GST_VAAPI_DECODER_UNIT_FLAG_SKIP;
3929 case GST_H264_NAL_STREAM_END:
3930 flags |= GST_VAAPI_DECODER_UNIT_FLAG_STREAM_END;
3932 case GST_H264_NAL_SEQ_END:
3933 flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_END;
3934 flags |= GST_VAAPI_DECODER_UNIT_FLAG_AU_END;
3936 case GST_H264_NAL_SPS:
3937 case GST_H264_NAL_SUBSET_SPS:
3938 case GST_H264_NAL_PPS:
3939 case GST_H264_NAL_SEI:
3940 flags |= GST_VAAPI_DECODER_UNIT_FLAG_AU_START;
3941 flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
3943 case GST_H264_NAL_SLICE_EXT:
3944 if (!GST_H264_IS_MVC_NALU(&pi->nalu)) {
3945 flags |= GST_VAAPI_DECODER_UNIT_FLAG_SKIP;
3949 case GST_H264_NAL_SLICE_IDR:
3950 case GST_H264_NAL_SLICE:
3951 flags |= GST_VAAPI_DECODER_UNIT_FLAG_SLICE;
3952 if (is_new_picture(pi, priv->prev_slice_pi)) {
3953 flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
3954 if (is_new_access_unit(pi, priv->prev_slice_pi))
3955 flags |= GST_VAAPI_DECODER_UNIT_FLAG_AU_START;
3957 gst_vaapi_parser_info_h264_replace(&priv->prev_slice_pi, pi);
3959 case GST_H264_NAL_SPS_EXT:
3960 case GST_H264_NAL_SLICE_AUX:
3961 /* skip SPS extension and auxiliary slice for now */
3962 flags |= GST_VAAPI_DECODER_UNIT_FLAG_SKIP;
3964 case GST_H264_NAL_PREFIX_UNIT:
3965 /* skip Prefix NAL units for now */
3966 flags |= GST_VAAPI_DECODER_UNIT_FLAG_SKIP |
3967 GST_VAAPI_DECODER_UNIT_FLAG_AU_START |
3968 GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
3971 if (pi->nalu.type >= 14 && pi->nalu.type <= 18)
3972 flags |= GST_VAAPI_DECODER_UNIT_FLAG_AU_START |
3973 GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
3976 if ((flags & GST_VAAPI_DECODER_UNIT_FLAGS_AU) && priv->prev_slice_pi)
3977 priv->prev_slice_pi->flags |= GST_VAAPI_DECODER_UNIT_FLAG_AU_END;
3978 GST_VAAPI_DECODER_UNIT_FLAG_SET(unit, flags);
3980 pi->nalu.data = NULL;
3981 pi->state = priv->parser_state;
3983 gst_vaapi_parser_info_h264_replace(&priv->prev_pi, pi);
3984 return GST_VAAPI_DECODER_STATUS_SUCCESS;
3987 static GstVaapiDecoderStatus
3988 gst_vaapi_decoder_h264_decode(GstVaapiDecoder *base_decoder,
3989 GstVaapiDecoderUnit *unit)
3991 GstVaapiDecoderH264 * const decoder =
3992 GST_VAAPI_DECODER_H264_CAST(base_decoder);
3993 GstVaapiDecoderStatus status;
3995 status = ensure_decoder(decoder);
3996 if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
3998 return decode_unit(decoder, unit);
4001 static GstVaapiDecoderStatus
4002 gst_vaapi_decoder_h264_start_frame(GstVaapiDecoder *base_decoder,
4003 GstVaapiDecoderUnit *unit)
4005 GstVaapiDecoderH264 * const decoder =
4006 GST_VAAPI_DECODER_H264_CAST(base_decoder);
4008 return decode_picture(decoder, unit);
4011 static GstVaapiDecoderStatus
4012 gst_vaapi_decoder_h264_end_frame(GstVaapiDecoder *base_decoder)
4014 GstVaapiDecoderH264 * const decoder =
4015 GST_VAAPI_DECODER_H264_CAST(base_decoder);
4017 return decode_current_picture(decoder);
4020 static GstVaapiDecoderStatus
4021 gst_vaapi_decoder_h264_flush(GstVaapiDecoder *base_decoder)
4023 GstVaapiDecoderH264 * const decoder =
4024 GST_VAAPI_DECODER_H264_CAST(base_decoder);
4026 dpb_flush(decoder, NULL);
4027 return GST_VAAPI_DECODER_STATUS_SUCCESS;
4031 gst_vaapi_decoder_h264_class_init(GstVaapiDecoderH264Class *klass)
4033 GstVaapiMiniObjectClass * const object_class =
4034 GST_VAAPI_MINI_OBJECT_CLASS(klass);
4035 GstVaapiDecoderClass * const decoder_class = GST_VAAPI_DECODER_CLASS(klass);
4037 object_class->size = sizeof(GstVaapiDecoderH264);
4038 object_class->finalize = (GDestroyNotify)gst_vaapi_decoder_finalize;
4040 decoder_class->create = gst_vaapi_decoder_h264_create;
4041 decoder_class->destroy = gst_vaapi_decoder_h264_destroy;
4042 decoder_class->parse = gst_vaapi_decoder_h264_parse;
4043 decoder_class->decode = gst_vaapi_decoder_h264_decode;
4044 decoder_class->start_frame = gst_vaapi_decoder_h264_start_frame;
4045 decoder_class->end_frame = gst_vaapi_decoder_h264_end_frame;
4046 decoder_class->flush = gst_vaapi_decoder_h264_flush;
4048 decoder_class->decode_codec_data =
4049 gst_vaapi_decoder_h264_decode_codec_data;
4052 static inline const GstVaapiDecoderClass *
4053 gst_vaapi_decoder_h264_class(void)
4055 static GstVaapiDecoderH264Class g_class;
4056 static gsize g_class_init = FALSE;
4058 if (g_once_init_enter(&g_class_init)) {
4059 gst_vaapi_decoder_h264_class_init(&g_class);
4060 g_once_init_leave(&g_class_init, TRUE);
4062 return GST_VAAPI_DECODER_CLASS(&g_class);
4066 * gst_vaapi_decoder_h264_new:
4067 * @display: a #GstVaapiDisplay
4068 * @caps: a #GstCaps holding codec information
4070 * Creates a new #GstVaapiDecoder for MPEG-2 decoding. The @caps can
4071 * hold extra information like codec-data and pictured coded size.
4073 * Return value: the newly allocated #GstVaapiDecoder object
4076 gst_vaapi_decoder_h264_new(GstVaapiDisplay *display, GstCaps *caps)
4078 return gst_vaapi_decoder_new(gst_vaapi_decoder_h264_class(), display, caps);