2 * gstvaapidecoder_h264.c - H.264 decoder
4 * Copyright (C) 2011-2014 Intel Corporation
5 * Author: Gwenole Beauchesne <gwenole.beauchesne@intel.com>
7 * This library is free software; you can redistribute it and/or
8 * modify it under the terms of the GNU Lesser General Public License
9 * as published by the Free Software Foundation; either version 2.1
10 * of the License, or (at your option) any later version.
12 * This library is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 * Lesser General Public License for more details.
17 * You should have received a copy of the GNU Lesser General Public
18 * License along with this library; if not, write to the Free
19 * Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
20 * Boston, MA 02110-1301 USA
24 * SECTION:gstvaapidecoder_h264
25 * @short_description: H.264 decoder
30 #include <gst/base/gstadapter.h>
31 #include <gst/codecparsers/gsth264parser.h>
32 #include "gstvaapidecoder_h264.h"
33 #include "gstvaapidecoder_objects.h"
34 #include "gstvaapidecoder_priv.h"
35 #include "gstvaapidisplay_priv.h"
36 #include "gstvaapiobject_priv.h"
37 #include "gstvaapiutils_h264_priv.h"
40 #include "gstvaapidebug.h"
42 /* Defined to 1 if strict ordering of DPB is needed. Only useful for debug */
43 #define USE_STRICT_DPB_ORDERING 0
45 typedef struct _GstVaapiDecoderH264Private GstVaapiDecoderH264Private;
46 typedef struct _GstVaapiDecoderH264Class GstVaapiDecoderH264Class;
47 typedef struct _GstVaapiFrameStore GstVaapiFrameStore;
48 typedef struct _GstVaapiFrameStoreClass GstVaapiFrameStoreClass;
49 typedef struct _GstVaapiParserInfoH264 GstVaapiParserInfoH264;
50 typedef struct _GstVaapiPictureH264 GstVaapiPictureH264;
52 // Used for field_poc[]
54 #define BOTTOM_FIELD 1
56 /* ------------------------------------------------------------------------- */
57 /* --- H.264 Parser Info --- */
58 /* ------------------------------------------------------------------------- */
61 * Extended decoder unit flags:
63 * @GST_VAAPI_DECODER_UNIT_AU_START: marks the start of an access unit.
64 * @GST_VAAPI_DECODER_UNIT_AU_END: marks the end of an access unit.
67 /* This flag does not strictly follow the definitions (7.4.1.2.3)
68 for detecting the start of an access unit as we are only
69 interested in knowing if the current slice is the first one or
70 the last one in the current access unit */
71 GST_VAAPI_DECODER_UNIT_FLAG_AU_START = (
72 GST_VAAPI_DECODER_UNIT_FLAG_LAST << 0),
73 GST_VAAPI_DECODER_UNIT_FLAG_AU_END = (
74 GST_VAAPI_DECODER_UNIT_FLAG_LAST << 1),
76 GST_VAAPI_DECODER_UNIT_FLAGS_AU = (
77 GST_VAAPI_DECODER_UNIT_FLAG_AU_START |
78 GST_VAAPI_DECODER_UNIT_FLAG_AU_END),
81 #define GST_VAAPI_PARSER_INFO_H264(obj) \
82 ((GstVaapiParserInfoH264 *)(obj))
84 struct _GstVaapiParserInfoH264 {
85 GstVaapiMiniObject parent_instance;
91 GstH264SliceHdr slice_hdr;
94 guint flags; // Same as decoder unit flags (persistent)
95 guint view_id; // View ID of slice
96 guint voc; // View order index (VOIdx) of slice
100 gst_vaapi_parser_info_h264_finalize(GstVaapiParserInfoH264 *pi)
102 switch (pi->nalu.type) {
103 case GST_H264_NAL_SPS:
104 case GST_H264_NAL_SUBSET_SPS:
105 gst_h264_sps_clear(&pi->data.sps);
107 case GST_H264_NAL_PPS:
108 gst_h264_pps_clear(&pi->data.pps);
110 case GST_H264_NAL_SEI:
112 g_array_unref(pi->data.sei);
119 static inline const GstVaapiMiniObjectClass *
120 gst_vaapi_parser_info_h264_class(void)
122 static const GstVaapiMiniObjectClass GstVaapiParserInfoH264Class = {
123 .size = sizeof(GstVaapiParserInfoH264),
124 .finalize = (GDestroyNotify)gst_vaapi_parser_info_h264_finalize
126 return &GstVaapiParserInfoH264Class;
129 static inline GstVaapiParserInfoH264 *
130 gst_vaapi_parser_info_h264_new(void)
132 return (GstVaapiParserInfoH264 *)
133 gst_vaapi_mini_object_new(gst_vaapi_parser_info_h264_class());
136 #define gst_vaapi_parser_info_h264_ref(pi) \
137 gst_vaapi_mini_object_ref(GST_VAAPI_MINI_OBJECT(pi))
139 #define gst_vaapi_parser_info_h264_unref(pi) \
140 gst_vaapi_mini_object_unref(GST_VAAPI_MINI_OBJECT(pi))
142 #define gst_vaapi_parser_info_h264_replace(old_pi_ptr, new_pi) \
143 gst_vaapi_mini_object_replace((GstVaapiMiniObject **)(old_pi_ptr), \
144 (GstVaapiMiniObject *)(new_pi))
146 /* ------------------------------------------------------------------------- */
147 /* --- H.264 Pictures --- */
148 /* ------------------------------------------------------------------------- */
151 * Extended picture flags:
153 * @GST_VAAPI_PICTURE_FLAG_IDR: flag that specifies an IDR picture
154 * @GST_VAAPI_PICTURE_FLAG_INTER_VIEW: flag that indicates the picture
155 * may be used for inter-view prediction
156 * @GST_VAAPI_PICTURE_FLAG_ANCHOR: flag that specifies an anchor picture,
157 * i.e. a picture that is decoded with only inter-view prediction,
158 * and not inter prediction
159 * @GST_VAAPI_PICTURE_FLAG_AU_START: flag that marks the start of an
161 * @GST_VAAPI_PICTURE_FLAG_AU_END: flag that marks the end of an
163 * @GST_VAAPI_PICTURE_FLAG_SHORT_TERM_REFERENCE: flag that specifies
164 * "used for short-term reference"
165 * @GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE: flag that specifies
166 * "used for long-term reference"
167 * @GST_VAAPI_PICTURE_FLAGS_REFERENCE: mask covering any kind of
168 * reference picture (short-term reference or long-term reference)
171 GST_VAAPI_PICTURE_FLAG_IDR = (GST_VAAPI_PICTURE_FLAG_LAST << 0),
172 GST_VAAPI_PICTURE_FLAG_REFERENCE2 = (GST_VAAPI_PICTURE_FLAG_LAST << 1),
173 GST_VAAPI_PICTURE_FLAG_INTER_VIEW = (GST_VAAPI_PICTURE_FLAG_LAST << 2),
174 GST_VAAPI_PICTURE_FLAG_ANCHOR = (GST_VAAPI_PICTURE_FLAG_LAST << 3),
175 GST_VAAPI_PICTURE_FLAG_AU_START = (GST_VAAPI_PICTURE_FLAG_LAST << 4),
176 GST_VAAPI_PICTURE_FLAG_AU_END = (GST_VAAPI_PICTURE_FLAG_LAST << 5),
178 GST_VAAPI_PICTURE_FLAG_SHORT_TERM_REFERENCE = (
179 GST_VAAPI_PICTURE_FLAG_REFERENCE),
180 GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE = (
181 GST_VAAPI_PICTURE_FLAG_REFERENCE | GST_VAAPI_PICTURE_FLAG_REFERENCE2),
182 GST_VAAPI_PICTURE_FLAGS_REFERENCE = (
183 GST_VAAPI_PICTURE_FLAG_SHORT_TERM_REFERENCE |
184 GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE),
187 #define GST_VAAPI_PICTURE_IS_IDR(picture) \
188 (GST_VAAPI_PICTURE_FLAG_IS_SET(picture, GST_VAAPI_PICTURE_FLAG_IDR))
190 #define GST_VAAPI_PICTURE_IS_SHORT_TERM_REFERENCE(picture) \
191 ((GST_VAAPI_PICTURE_FLAGS(picture) & \
192 GST_VAAPI_PICTURE_FLAGS_REFERENCE) == \
193 GST_VAAPI_PICTURE_FLAG_SHORT_TERM_REFERENCE)
195 #define GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(picture) \
196 ((GST_VAAPI_PICTURE_FLAGS(picture) & \
197 GST_VAAPI_PICTURE_FLAGS_REFERENCE) == \
198 GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE)
200 #define GST_VAAPI_PICTURE_IS_INTER_VIEW(picture) \
201 (GST_VAAPI_PICTURE_FLAG_IS_SET(picture, GST_VAAPI_PICTURE_FLAG_INTER_VIEW))
203 #define GST_VAAPI_PICTURE_IS_ANCHOR(picture) \
204 (GST_VAAPI_PICTURE_FLAG_IS_SET(picture, GST_VAAPI_PICTURE_FLAG_ANCHOR))
206 #define GST_VAAPI_PICTURE_H264(picture) \
207 ((GstVaapiPictureH264 *)(picture))
209 struct _GstVaapiPictureH264 {
210 GstVaapiPicture base;
211 GstH264SliceHdr *last_slice_hdr;
214 gint32 frame_num; // Original frame_num from slice_header()
215 gint32 frame_num_wrap; // Temporary for ref pic marking: FrameNumWrap
216 gint32 long_term_frame_idx; // Temporary for ref pic marking: LongTermFrameIdx
217 gint32 pic_num; // Temporary for ref pic marking: PicNum
218 gint32 long_term_pic_num; // Temporary for ref pic marking: LongTermPicNum
219 GstVaapiPictureH264 *other_field; // Temporary for ref pic marking: other field in the same frame store
220 guint output_flag : 1;
221 guint output_needed : 1;
224 GST_VAAPI_CODEC_DEFINE_TYPE(GstVaapiPictureH264, gst_vaapi_picture_h264);
227 gst_vaapi_picture_h264_destroy(GstVaapiPictureH264 *picture)
229 gst_vaapi_picture_destroy(GST_VAAPI_PICTURE(picture));
233 gst_vaapi_picture_h264_create(
234 GstVaapiPictureH264 *picture,
235 const GstVaapiCodecObjectConstructorArgs *args
238 if (!gst_vaapi_picture_create(GST_VAAPI_PICTURE(picture), args))
241 picture->field_poc[0] = G_MAXINT32;
242 picture->field_poc[1] = G_MAXINT32;
243 picture->output_needed = FALSE;
247 static inline GstVaapiPictureH264 *
248 gst_vaapi_picture_h264_new(GstVaapiDecoderH264 *decoder)
250 return (GstVaapiPictureH264 *)gst_vaapi_codec_object_new(
251 &GstVaapiPictureH264Class,
252 GST_VAAPI_CODEC_BASE(decoder),
253 NULL, sizeof(VAPictureParameterBufferH264),
259 gst_vaapi_picture_h264_set_reference(
260 GstVaapiPictureH264 *picture,
261 guint reference_flags,
267 GST_VAAPI_PICTURE_FLAG_UNSET(picture, GST_VAAPI_PICTURE_FLAGS_REFERENCE);
268 GST_VAAPI_PICTURE_FLAG_SET(picture, reference_flags);
270 if (!other_field || !(picture = picture->other_field))
272 GST_VAAPI_PICTURE_FLAG_UNSET(picture, GST_VAAPI_PICTURE_FLAGS_REFERENCE);
273 GST_VAAPI_PICTURE_FLAG_SET(picture, reference_flags);
276 static inline GstVaapiPictureH264 *
277 gst_vaapi_picture_h264_new_field(GstVaapiPictureH264 *picture)
279 g_return_val_if_fail(picture, NULL);
281 return (GstVaapiPictureH264 *)gst_vaapi_picture_new_field(&picture->base);
284 /* ------------------------------------------------------------------------- */
285 /* --- Frame Buffers (DPB) --- */
286 /* ------------------------------------------------------------------------- */
288 struct _GstVaapiFrameStore {
290 GstVaapiMiniObject parent_instance;
294 GstVaapiPictureH264 *buffers[2];
300 gst_vaapi_frame_store_finalize(gpointer object)
302 GstVaapiFrameStore * const fs = object;
305 for (i = 0; i < fs->num_buffers; i++)
306 gst_vaapi_picture_replace(&fs->buffers[i], NULL);
309 static GstVaapiFrameStore *
310 gst_vaapi_frame_store_new(GstVaapiPictureH264 *picture)
312 GstVaapiFrameStore *fs;
314 static const GstVaapiMiniObjectClass GstVaapiFrameStoreClass = {
315 sizeof(GstVaapiFrameStore),
316 gst_vaapi_frame_store_finalize
319 fs = (GstVaapiFrameStore *)
320 gst_vaapi_mini_object_new(&GstVaapiFrameStoreClass);
324 fs->view_id = picture->base.view_id;
325 fs->structure = picture->structure;
326 fs->buffers[0] = gst_vaapi_picture_ref(picture);
327 fs->buffers[1] = NULL;
329 fs->output_needed = 0;
331 if (picture->output_flag) {
332 picture->output_needed = TRUE;
339 gst_vaapi_frame_store_add(GstVaapiFrameStore *fs, GstVaapiPictureH264 *picture)
343 g_return_val_if_fail(fs->num_buffers == 1, FALSE);
344 g_return_val_if_fail(!GST_VAAPI_PICTURE_IS_FRAME(picture), FALSE);
345 g_return_val_if_fail(!GST_VAAPI_PICTURE_IS_FIRST_FIELD(picture), FALSE);
347 gst_vaapi_picture_replace(&fs->buffers[fs->num_buffers++], picture);
348 if (picture->output_flag) {
349 picture->output_needed = TRUE;
353 fs->structure = GST_VAAPI_PICTURE_STRUCTURE_FRAME;
355 field = picture->structure == GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD ?
356 TOP_FIELD : BOTTOM_FIELD;
357 g_return_val_if_fail(fs->buffers[0]->field_poc[field] == G_MAXINT32, FALSE);
358 fs->buffers[0]->field_poc[field] = picture->field_poc[field];
359 g_return_val_if_fail(picture->field_poc[!field] == G_MAXINT32, FALSE);
360 picture->field_poc[!field] = fs->buffers[0]->field_poc[!field];
365 gst_vaapi_frame_store_split_fields(GstVaapiFrameStore *fs)
367 GstVaapiPictureH264 * const first_field = fs->buffers[0];
368 GstVaapiPictureH264 *second_field;
370 g_return_val_if_fail(fs->num_buffers == 1, FALSE);
372 first_field->base.structure = GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD;
373 GST_VAAPI_PICTURE_FLAG_SET(first_field, GST_VAAPI_PICTURE_FLAG_INTERLACED);
375 second_field = gst_vaapi_picture_h264_new_field(first_field);
378 gst_vaapi_picture_replace(&fs->buffers[fs->num_buffers++], second_field);
379 gst_vaapi_picture_unref(second_field);
381 second_field->frame_num = first_field->frame_num;
382 second_field->field_poc[0] = first_field->field_poc[0];
383 second_field->field_poc[1] = first_field->field_poc[1];
384 second_field->output_flag = first_field->output_flag;
385 if (second_field->output_flag) {
386 second_field->output_needed = TRUE;
392 static inline gboolean
393 gst_vaapi_frame_store_has_frame(GstVaapiFrameStore *fs)
395 return fs->structure == GST_VAAPI_PICTURE_STRUCTURE_FRAME;
398 static inline gboolean
399 gst_vaapi_frame_store_is_complete(GstVaapiFrameStore *fs)
401 return gst_vaapi_frame_store_has_frame(fs) ||
402 GST_VAAPI_PICTURE_IS_ONEFIELD(fs->buffers[0]);
405 static inline gboolean
406 gst_vaapi_frame_store_has_reference(GstVaapiFrameStore *fs)
410 for (i = 0; i < fs->num_buffers; i++) {
411 if (GST_VAAPI_PICTURE_IS_REFERENCE(fs->buffers[i]))
418 gst_vaapi_frame_store_has_inter_view(GstVaapiFrameStore *fs)
422 for (i = 0; i < fs->num_buffers; i++) {
423 if (GST_VAAPI_PICTURE_IS_INTER_VIEW(fs->buffers[i]))
429 #define gst_vaapi_frame_store_ref(fs) \
430 gst_vaapi_mini_object_ref(GST_VAAPI_MINI_OBJECT(fs))
432 #define gst_vaapi_frame_store_unref(fs) \
433 gst_vaapi_mini_object_unref(GST_VAAPI_MINI_OBJECT(fs))
435 #define gst_vaapi_frame_store_replace(old_fs_p, new_fs) \
436 gst_vaapi_mini_object_replace((GstVaapiMiniObject **)(old_fs_p), \
437 (GstVaapiMiniObject *)(new_fs))
439 /* ------------------------------------------------------------------------- */
440 /* --- H.264 Decoder --- */
441 /* ------------------------------------------------------------------------- */
443 #define GST_VAAPI_DECODER_H264_CAST(decoder) \
444 ((GstVaapiDecoderH264 *)(decoder))
447 GST_H264_VIDEO_STATE_GOT_SPS = 1 << 0,
448 GST_H264_VIDEO_STATE_GOT_PPS = 1 << 1,
449 GST_H264_VIDEO_STATE_GOT_SLICE = 1 << 2,
451 GST_H264_VIDEO_STATE_VALID_PICTURE_HEADERS = (
452 GST_H264_VIDEO_STATE_GOT_SPS |
453 GST_H264_VIDEO_STATE_GOT_PPS),
454 GST_H264_VIDEO_STATE_VALID_PICTURE = (
455 GST_H264_VIDEO_STATE_VALID_PICTURE_HEADERS |
456 GST_H264_VIDEO_STATE_GOT_SLICE)
459 struct _GstVaapiDecoderH264Private {
460 GstH264NalParser *parser;
463 GstVaapiStreamAlignH264 stream_alignment;
464 GstVaapiPictureH264 *current_picture;
465 GstVaapiParserInfoH264 *sps[GST_H264_MAX_SPS_COUNT];
466 GstVaapiParserInfoH264 *active_sps;
467 GstVaapiParserInfoH264 *pps[GST_H264_MAX_PPS_COUNT];
468 GstVaapiParserInfoH264 *active_pps;
469 GstVaapiParserInfoH264 *prev_pi;
470 GstVaapiParserInfoH264 *prev_slice_pi;
471 GstVaapiFrameStore **prev_frames;
472 guint prev_frames_alloc;
473 GstVaapiFrameStore **dpb;
478 GstVaapiProfile profile;
479 GstVaapiEntrypoint entrypoint;
480 GstVaapiChromaType chroma_type;
481 GPtrArray *inter_views;
482 GstVaapiPictureH264 *short_ref[32];
483 guint short_ref_count;
484 GstVaapiPictureH264 *long_ref[32];
485 guint long_ref_count;
486 GstVaapiPictureH264 *RefPicList0[32];
487 guint RefPicList0_count;
488 GstVaapiPictureH264 *RefPicList1[32];
489 guint RefPicList1_count;
490 guint nal_length_size;
493 gint32 field_poc[2]; // 0:TopFieldOrderCnt / 1:BottomFieldOrderCnt
494 gint32 poc_msb; // PicOrderCntMsb
495 gint32 poc_lsb; // pic_order_cnt_lsb (from slice_header())
496 gint32 prev_poc_msb; // prevPicOrderCntMsb
497 gint32 prev_poc_lsb; // prevPicOrderCntLsb
498 gint32 frame_num_offset; // FrameNumOffset
499 gint32 frame_num; // frame_num (from slice_header())
500 gint32 prev_frame_num; // prevFrameNum
501 gboolean prev_pic_has_mmco5; // prevMmco5Pic
502 gboolean prev_pic_structure; // previous picture structure
505 guint has_context : 1;
506 guint progressive_sequence : 1;
510 * GstVaapiDecoderH264:
512 * A decoder based on H264.
514 struct _GstVaapiDecoderH264 {
516 GstVaapiDecoder parent_instance;
517 GstVaapiDecoderH264Private priv;
521 * GstVaapiDecoderH264Class:
523 * A decoder class based on H264.
525 struct _GstVaapiDecoderH264Class {
527 GstVaapiDecoderClass parent_class;
531 exec_ref_pic_marking(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture);
534 is_inter_view_reference_for_next_pictures(GstVaapiDecoderH264 *decoder,
535 GstVaapiPictureH264 *picture);
537 static inline gboolean
538 is_inter_view_reference_for_next_frames(GstVaapiDecoderH264 *decoder,
539 GstVaapiFrameStore *fs)
541 return is_inter_view_reference_for_next_pictures(decoder, fs->buffers[0]);
544 /* Determines if the supplied profile is one of the MVC set */
546 is_mvc_profile(GstH264Profile profile)
548 return profile == GST_H264_PROFILE_MULTIVIEW_HIGH ||
549 profile == GST_H264_PROFILE_STEREO_HIGH;
552 /* Determines the view_id from the supplied NAL unit */
554 get_view_id(GstH264NalUnit *nalu)
556 return GST_H264_IS_MVC_NALU(nalu) ? nalu->extension.mvc.view_id : 0;
559 /* Determines the view order index (VOIdx) from the supplied view_id */
561 get_view_order_index(GstH264SPS *sps, guint16 view_id)
563 GstH264SPSExtMVC *mvc;
566 if (!sps || sps->extension_type != GST_H264_NAL_EXTENSION_MVC)
569 mvc = &sps->extension.mvc;
570 for (i = 0; i <= mvc->num_views_minus1; i++) {
571 if (mvc->view[i].view_id == view_id)
574 GST_ERROR("failed to find VOIdx from view_id (%d)", view_id);
578 /* Determines NumViews */
580 get_num_views(GstH264SPS *sps)
582 return 1 + (sps->extension_type == GST_H264_NAL_EXTENSION_MVC ?
583 sps->extension.mvc.num_views_minus1 : 0);
586 /* Get number of reference frames to use */
588 get_max_dec_frame_buffering(GstH264SPS *sps)
590 guint num_views, max_dpb_frames;
591 guint max_dec_frame_buffering, PicSizeMbs;
592 GstVaapiLevelH264 level;
593 const GstVaapiH264LevelLimits *level_limits;
595 /* Table A-1 - Level limits */
596 if (G_UNLIKELY(sps->level_idc == 11 && sps->constraint_set3_flag))
597 level = GST_VAAPI_LEVEL_H264_L1b;
599 level = gst_vaapi_utils_h264_get_level(sps->level_idc);
600 level_limits = gst_vaapi_utils_h264_get_level_limits(level);
601 if (G_UNLIKELY(!level_limits)) {
602 GST_FIXME("unsupported level_idc value (%d)", sps->level_idc);
603 max_dec_frame_buffering = 16;
606 PicSizeMbs = ((sps->pic_width_in_mbs_minus1 + 1) *
607 (sps->pic_height_in_map_units_minus1 + 1) *
608 (sps->frame_mbs_only_flag ? 1 : 2));
609 max_dec_frame_buffering = level_limits->MaxDpbMbs / PicSizeMbs;
611 if (is_mvc_profile(sps->profile_idc))
612 max_dec_frame_buffering <<= 1;
615 if (sps->vui_parameters_present_flag) {
616 GstH264VUIParams * const vui_params = &sps->vui_parameters;
617 if (vui_params->bitstream_restriction_flag)
618 max_dec_frame_buffering = vui_params->max_dec_frame_buffering;
620 switch (sps->profile_idc) {
621 case 44: // CAVLC 4:4:4 Intra profile
622 case GST_H264_PROFILE_SCALABLE_HIGH:
623 case GST_H264_PROFILE_HIGH:
624 case GST_H264_PROFILE_HIGH10:
625 case GST_H264_PROFILE_HIGH_422:
626 case GST_H264_PROFILE_HIGH_444:
627 if (sps->constraint_set3_flag)
628 max_dec_frame_buffering = 0;
634 num_views = get_num_views(sps);
635 max_dpb_frames = 16 * (num_views > 1 ? g_bit_storage(num_views - 1) : 1);
636 if (max_dec_frame_buffering > max_dpb_frames)
637 max_dec_frame_buffering = max_dpb_frames;
638 else if (max_dec_frame_buffering < sps->num_ref_frames)
639 max_dec_frame_buffering = sps->num_ref_frames;
640 return MAX(1, max_dec_frame_buffering);
644 array_remove_index_fast(void *array, guint *array_length_ptr, guint index)
646 gpointer * const entries = array;
647 guint num_entries = *array_length_ptr;
649 g_return_if_fail(index < num_entries);
651 if (index != --num_entries)
652 entries[index] = entries[num_entries];
653 entries[num_entries] = NULL;
654 *array_length_ptr = num_entries;
659 array_remove_index(void *array, guint *array_length_ptr, guint index)
661 array_remove_index_fast(array, array_length_ptr, index);
665 array_remove_index(void *array, guint *array_length_ptr, guint index)
667 gpointer * const entries = array;
668 const guint num_entries = *array_length_ptr - 1;
671 g_return_if_fail(index <= num_entries);
673 for (i = index; i < num_entries; i++)
674 entries[i] = entries[i + 1];
675 entries[num_entries] = NULL;
676 *array_length_ptr = num_entries;
680 #define ARRAY_REMOVE_INDEX(array, index) \
681 array_remove_index(array, &array##_count, index)
684 dpb_remove_index(GstVaapiDecoderH264 *decoder, guint index)
686 GstVaapiDecoderH264Private * const priv = &decoder->priv;
687 guint i, num_frames = --priv->dpb_count;
689 if (USE_STRICT_DPB_ORDERING) {
690 for (i = index; i < num_frames; i++)
691 gst_vaapi_frame_store_replace(&priv->dpb[i], priv->dpb[i + 1]);
693 else if (index != num_frames)
694 gst_vaapi_frame_store_replace(&priv->dpb[index], priv->dpb[num_frames]);
695 gst_vaapi_frame_store_replace(&priv->dpb[num_frames], NULL);
699 dpb_output(GstVaapiDecoderH264 *decoder, GstVaapiFrameStore *fs)
701 GstVaapiPictureH264 *picture;
703 g_return_val_if_fail(fs != NULL, FALSE);
705 if (!gst_vaapi_frame_store_is_complete(fs))
708 picture = fs->buffers[0];
709 g_return_val_if_fail(picture != NULL, FALSE);
710 picture->output_needed = FALSE;
712 if (fs->num_buffers > 1) {
713 picture = fs->buffers[1];
714 g_return_val_if_fail(picture != NULL, FALSE);
715 picture->output_needed = FALSE;
718 fs->output_needed = 0;
719 return gst_vaapi_picture_output(GST_VAAPI_PICTURE_CAST(picture));
723 dpb_evict(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture, guint i)
725 GstVaapiDecoderH264Private * const priv = &decoder->priv;
726 GstVaapiFrameStore * const fs = priv->dpb[i];
728 if (!fs->output_needed && !gst_vaapi_frame_store_has_reference(fs))
729 dpb_remove_index(decoder, i);
732 /* Finds the frame store holding the supplied picture */
734 dpb_find_picture(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
736 GstVaapiDecoderH264Private * const priv = &decoder->priv;
739 for (i = 0; i < priv->dpb_count; i++) {
740 GstVaapiFrameStore * const fs = priv->dpb[i];
741 for (j = 0; j < fs->num_buffers; j++) {
742 if (fs->buffers[j] == picture)
749 /* Finds the picture with the lowest POC that needs to be output */
751 dpb_find_lowest_poc(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture,
752 GstVaapiPictureH264 **found_picture_ptr)
754 GstVaapiDecoderH264Private * const priv = &decoder->priv;
755 GstVaapiPictureH264 *found_picture = NULL;
756 guint i, j, found_index;
758 for (i = 0; i < priv->dpb_count; i++) {
759 GstVaapiFrameStore * const fs = priv->dpb[i];
760 if (!fs->output_needed)
762 if (picture && picture->base.view_id != fs->view_id)
764 for (j = 0; j < fs->num_buffers; j++) {
765 GstVaapiPictureH264 * const pic = fs->buffers[j];
766 if (!pic->output_needed)
768 if (!found_picture || found_picture->base.poc > pic->base.poc ||
769 (found_picture->base.poc == pic->base.poc &&
770 found_picture->base.voc > pic->base.voc))
771 found_picture = pic, found_index = i;
775 if (found_picture_ptr)
776 *found_picture_ptr = found_picture;
777 return found_picture ? found_index : -1;
780 /* Finds the picture with the lowest VOC that needs to be output */
782 dpb_find_lowest_voc(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture,
783 GstVaapiPictureH264 **found_picture_ptr)
785 GstVaapiDecoderH264Private * const priv = &decoder->priv;
786 GstVaapiPictureH264 *found_picture = NULL;
787 guint i, j, found_index;
789 for (i = 0; i < priv->dpb_count; i++) {
790 GstVaapiFrameStore * const fs = priv->dpb[i];
791 if (!fs->output_needed || fs->view_id == picture->base.view_id)
793 for (j = 0; j < fs->num_buffers; j++) {
794 GstVaapiPictureH264 * const pic = fs->buffers[j];
795 if (!pic->output_needed || pic->base.poc != picture->base.poc)
797 if (!found_picture || found_picture->base.voc > pic->base.voc)
798 found_picture = pic, found_index = i;
802 if (found_picture_ptr)
803 *found_picture_ptr = found_picture;
804 return found_picture ? found_index : -1;
808 dpb_output_other_views(GstVaapiDecoderH264 *decoder,
809 GstVaapiPictureH264 *picture, guint voc)
811 GstVaapiDecoderH264Private * const priv = &decoder->priv;
812 GstVaapiPictureH264 *found_picture;
816 if (priv->max_views == 1)
819 /* Emit all other view components that were in the same access
820 unit than the picture we have just found */
821 found_picture = picture;
823 found_index = dpb_find_lowest_voc(decoder, found_picture,
825 if (found_index < 0 || found_picture->base.voc >= voc)
827 success = dpb_output(decoder, priv->dpb[found_index]);
828 dpb_evict(decoder, found_picture, found_index);
836 dpb_bump(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
838 GstVaapiDecoderH264Private * const priv = &decoder->priv;
839 GstVaapiPictureH264 *found_picture;
843 found_index = dpb_find_lowest_poc(decoder, picture, &found_picture);
847 if (picture && picture->base.poc != found_picture->base.poc)
848 dpb_output_other_views(decoder, found_picture, found_picture->base.voc);
850 success = dpb_output(decoder, priv->dpb[found_index]);
851 dpb_evict(decoder, found_picture, found_index);
852 if (priv->max_views == 1)
855 if (picture && picture->base.poc != found_picture->base.poc)
856 dpb_output_other_views(decoder, found_picture, G_MAXUINT32);
861 dpb_clear(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
863 GstVaapiDecoderH264Private * const priv = &decoder->priv;
866 for (i = 0; i < priv->dpb_count; i++) {
867 if (picture && picture->base.view_id != priv->dpb[i]->view_id)
869 gst_vaapi_frame_store_replace(&priv->dpb[i], NULL);
872 /* Compact the resulting DPB, i.e. remove holes */
873 for (i = 0, n = 0; i < priv->dpb_count; i++) {
876 priv->dpb[n] = priv->dpb[i];
884 /* Clear previous frame buffers only if this is a "flush-all" operation,
885 or if the picture is the first one in the access unit */
886 if (priv->prev_frames && (!picture ||
887 GST_VAAPI_PICTURE_FLAG_IS_SET(picture,
888 GST_VAAPI_PICTURE_FLAG_AU_START))) {
889 for (i = 0; i < priv->max_views; i++)
890 gst_vaapi_frame_store_replace(&priv->prev_frames[i], NULL);
895 dpb_flush(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
897 GstVaapiDecoderH264Private * const priv = &decoder->priv;
900 /* Detect broken frames and mark them as having a single field if
902 for (i = 0; i < priv->dpb_count; i++) {
903 GstVaapiFrameStore * const fs = priv->dpb[i];
904 if (!fs->output_needed || gst_vaapi_frame_store_is_complete(fs))
906 GST_VAAPI_PICTURE_FLAG_SET(fs->buffers[0],
907 GST_VAAPI_PICTURE_FLAG_ONEFIELD);
910 /* Output any frame remaining in DPB */
911 while (dpb_bump(decoder, picture))
913 dpb_clear(decoder, picture);
917 dpb_prune_mvc(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
919 GstVaapiDecoderH264Private * const priv = &decoder->priv;
920 const gboolean is_last_picture = /* in the access unit */
921 GST_VAAPI_PICTURE_FLAG_IS_SET(picture, GST_VAAPI_PICTURE_FLAG_AU_END);
924 // Remove all unused inter-view only reference components of the current AU
926 while (i < priv->dpb_count) {
927 GstVaapiFrameStore * const fs = priv->dpb[i];
928 if (fs->view_id != picture->base.view_id &&
929 !fs->output_needed && !gst_vaapi_frame_store_has_reference(fs) &&
931 !is_inter_view_reference_for_next_frames(decoder, fs)))
932 dpb_remove_index(decoder, i);
939 dpb_add(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
941 GstVaapiDecoderH264Private * const priv = &decoder->priv;
942 GstVaapiFrameStore *fs;
945 if (priv->max_views > 1)
946 dpb_prune_mvc(decoder, picture);
948 // Remove all unused pictures
949 if (!GST_VAAPI_PICTURE_IS_IDR(picture)) {
951 while (i < priv->dpb_count) {
952 GstVaapiFrameStore * const fs = priv->dpb[i];
953 if (fs->view_id == picture->base.view_id &&
954 !fs->output_needed && !gst_vaapi_frame_store_has_reference(fs))
955 dpb_remove_index(decoder, i);
961 // Check if picture is the second field and the first field is still in DPB
962 if (GST_VAAPI_PICTURE_IS_INTERLACED(picture) &&
963 !GST_VAAPI_PICTURE_IS_FIRST_FIELD(picture)) {
964 const gint found_index = dpb_find_picture(decoder,
965 GST_VAAPI_PICTURE_H264(picture->base.parent_picture));
966 if (found_index >= 0)
967 return gst_vaapi_frame_store_add(priv->dpb[found_index], picture);
969 // ... also check the previous picture that was immediately output
970 fs = priv->prev_frames[picture->base.voc];
971 if (fs && &fs->buffers[0]->base == picture->base.parent_picture) {
972 if (!gst_vaapi_frame_store_add(fs, picture))
974 return dpb_output(decoder, fs);
978 // Create new frame store, and split fields if necessary
979 fs = gst_vaapi_frame_store_new(picture);
982 gst_vaapi_frame_store_replace(&priv->prev_frames[picture->base.voc], fs);
983 gst_vaapi_frame_store_unref(fs);
985 if (!priv->progressive_sequence && gst_vaapi_frame_store_has_frame(fs)) {
986 if (!gst_vaapi_frame_store_split_fields(fs))
990 // C.4.5.1 - Storage and marking of a reference decoded picture into the DPB
991 if (GST_VAAPI_PICTURE_IS_REFERENCE(picture)) {
992 while (priv->dpb_count == priv->dpb_size) {
993 if (!dpb_bump(decoder, picture))
998 // C.4.5.2 - Storage and marking of a non-reference decoded picture into the DPB
1000 const gboolean StoreInterViewOnlyRefFlag =
1001 !GST_VAAPI_PICTURE_FLAG_IS_SET(picture,
1002 GST_VAAPI_PICTURE_FLAG_AU_END) &&
1003 GST_VAAPI_PICTURE_FLAG_IS_SET(picture,
1004 GST_VAAPI_PICTURE_FLAG_INTER_VIEW);
1005 if (!picture->output_flag && !StoreInterViewOnlyRefFlag)
1007 while (priv->dpb_count == priv->dpb_size) {
1008 GstVaapiPictureH264 *found_picture;
1009 if (!StoreInterViewOnlyRefFlag) {
1010 if (dpb_find_lowest_poc(decoder, picture, &found_picture) < 0 ||
1011 found_picture->base.poc > picture->base.poc)
1012 return dpb_output(decoder, fs);
1014 if (!dpb_bump(decoder, picture))
1018 gst_vaapi_frame_store_replace(&priv->dpb[priv->dpb_count++], fs);
1023 dpb_reset(GstVaapiDecoderH264 *decoder, guint dpb_size)
1025 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1027 if (dpb_size > priv->dpb_size_max) {
1028 priv->dpb = g_try_realloc_n(priv->dpb, dpb_size, sizeof(*priv->dpb));
1031 memset(&priv->dpb[priv->dpb_size_max], 0,
1032 (dpb_size - priv->dpb_size_max) * sizeof(*priv->dpb));
1033 priv->dpb_size_max = dpb_size;
1035 priv->dpb_size = dpb_size;
1037 GST_DEBUG("DPB size %u", priv->dpb_size);
1042 unref_inter_view(GstVaapiPictureH264 *picture)
1046 GST_VAAPI_PICTURE_FLAG_UNSET(picture, GST_VAAPI_PICTURE_FLAG_INTER_VIEW);
1047 gst_vaapi_picture_unref(picture);
1050 /* Resets MVC resources */
1052 mvc_reset(GstVaapiDecoderH264 *decoder)
1054 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1057 // Resize array of inter-view references
1058 if (!priv->inter_views) {
1059 priv->inter_views = g_ptr_array_new_full(priv->max_views,
1060 (GDestroyNotify)unref_inter_view);
1061 if (!priv->inter_views)
1065 // Resize array of previous frame buffers
1066 for (i = priv->max_views; i < priv->prev_frames_alloc; i++)
1067 gst_vaapi_frame_store_replace(&priv->prev_frames[i], NULL);
1069 priv->prev_frames = g_try_realloc_n(priv->prev_frames, priv->max_views,
1070 sizeof(*priv->prev_frames));
1071 if (!priv->prev_frames) {
1072 priv->prev_frames_alloc = 0;
1075 for (i = priv->prev_frames_alloc; i < priv->max_views; i++)
1076 priv->prev_frames[i] = NULL;
1077 priv->prev_frames_alloc = priv->max_views;
1081 static GstVaapiDecoderStatus
1082 get_status(GstH264ParserResult result)
1084 GstVaapiDecoderStatus status;
1087 case GST_H264_PARSER_OK:
1088 status = GST_VAAPI_DECODER_STATUS_SUCCESS;
1090 case GST_H264_PARSER_NO_NAL_END:
1091 status = GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
1093 case GST_H264_PARSER_ERROR:
1094 status = GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
1097 status = GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
1104 gst_vaapi_decoder_h264_close(GstVaapiDecoderH264 *decoder)
1106 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1108 gst_vaapi_picture_replace(&priv->current_picture, NULL);
1109 gst_vaapi_parser_info_h264_replace(&priv->prev_slice_pi, NULL);
1110 gst_vaapi_parser_info_h264_replace(&priv->prev_pi, NULL);
1112 dpb_clear(decoder, NULL);
1114 if (priv->inter_views) {
1115 g_ptr_array_unref(priv->inter_views);
1116 priv->inter_views = NULL;
1120 gst_h264_nal_parser_free(priv->parser);
1121 priv->parser = NULL;
1126 gst_vaapi_decoder_h264_open(GstVaapiDecoderH264 *decoder)
1128 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1130 gst_vaapi_decoder_h264_close(decoder);
1132 priv->parser = gst_h264_nal_parser_new();
1139 gst_vaapi_decoder_h264_destroy(GstVaapiDecoder *base_decoder)
1141 GstVaapiDecoderH264 * const decoder =
1142 GST_VAAPI_DECODER_H264_CAST(base_decoder);
1143 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1146 gst_vaapi_decoder_h264_close(decoder);
1152 g_free(priv->prev_frames);
1153 priv->prev_frames = NULL;
1154 priv->prev_frames_alloc = 0;
1156 for (i = 0; i < G_N_ELEMENTS(priv->pps); i++)
1157 gst_vaapi_parser_info_h264_replace(&priv->pps[i], NULL);
1158 gst_vaapi_parser_info_h264_replace(&priv->active_pps, NULL);
1160 for (i = 0; i < G_N_ELEMENTS(priv->sps); i++)
1161 gst_vaapi_parser_info_h264_replace(&priv->sps[i], NULL);
1162 gst_vaapi_parser_info_h264_replace(&priv->active_sps, NULL);
1166 gst_vaapi_decoder_h264_create(GstVaapiDecoder *base_decoder)
1168 GstVaapiDecoderH264 * const decoder =
1169 GST_VAAPI_DECODER_H264_CAST(base_decoder);
1170 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1172 priv->profile = GST_VAAPI_PROFILE_UNKNOWN;
1173 priv->entrypoint = GST_VAAPI_ENTRYPOINT_VLD;
1174 priv->chroma_type = GST_VAAPI_CHROMA_TYPE_YUV420;
1175 priv->prev_pic_structure = GST_VAAPI_PICTURE_STRUCTURE_FRAME;
1176 priv->progressive_sequence = TRUE;
1180 /* Activates the supplied PPS */
1182 ensure_pps(GstVaapiDecoderH264 *decoder, GstH264PPS *pps)
1184 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1185 GstVaapiParserInfoH264 * const pi = priv->pps[pps->id];
1187 gst_vaapi_parser_info_h264_replace(&priv->active_pps, pi);
1188 return pi ? &pi->data.pps : NULL;
1191 /* Returns the active PPS */
1192 static inline GstH264PPS *
1193 get_pps(GstVaapiDecoderH264 *decoder)
1195 GstVaapiParserInfoH264 * const pi = decoder->priv.active_pps;
1197 return pi ? &pi->data.pps : NULL;
1200 /* Activate the supplied SPS */
1202 ensure_sps(GstVaapiDecoderH264 *decoder, GstH264SPS *sps)
1204 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1205 GstVaapiParserInfoH264 * const pi = priv->sps[sps->id];
1207 gst_vaapi_parser_info_h264_replace(&priv->active_sps, pi);
1208 return pi ? &pi->data.sps : NULL;
1211 /* Returns the active SPS */
1212 static inline GstH264SPS *
1213 get_sps(GstVaapiDecoderH264 *decoder)
1215 GstVaapiParserInfoH264 * const pi = decoder->priv.active_sps;
1217 return pi ? &pi->data.sps : NULL;
1221 fill_profiles(GstVaapiProfile profiles[16], guint *n_profiles_ptr,
1222 GstVaapiProfile profile)
1224 guint n_profiles = *n_profiles_ptr;
1226 profiles[n_profiles++] = profile;
1228 case GST_VAAPI_PROFILE_H264_MAIN:
1229 profiles[n_profiles++] = GST_VAAPI_PROFILE_H264_HIGH;
1234 *n_profiles_ptr = n_profiles;
1237 /* Fills in compatible profiles for MVC decoding */
1239 fill_profiles_mvc(GstVaapiDecoderH264 *decoder, GstVaapiProfile profiles[16],
1240 guint *n_profiles_ptr, guint dpb_size)
1242 const gchar * const vendor_string =
1243 gst_vaapi_display_get_vendor_string(GST_VAAPI_DECODER_DISPLAY(decoder));
1245 gboolean add_high_profile = FALSE;
1250 const struct map *m;
1252 // Drivers that support slice level decoding
1253 if (vendor_string && dpb_size <= 16) {
1254 static const struct map drv_names[] = {
1255 { "Intel i965 driver", 17 },
1258 for (m = drv_names; m->str != NULL && !add_high_profile; m++) {
1259 if (g_ascii_strncasecmp(vendor_string, m->str, m->str_len) == 0)
1260 add_high_profile = TRUE;
1264 if (add_high_profile)
1265 fill_profiles(profiles, n_profiles_ptr, GST_VAAPI_PROFILE_H264_HIGH);
1268 static GstVaapiProfile
1269 get_profile(GstVaapiDecoderH264 *decoder, GstH264SPS *sps, guint dpb_size)
1271 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1272 GstVaapiDisplay * const display = GST_VAAPI_DECODER_DISPLAY(decoder);
1273 GstVaapiProfile profile, profiles[4];
1274 guint i, n_profiles = 0;
1276 profile = gst_vaapi_utils_h264_get_profile(sps->profile_idc);
1278 return GST_VAAPI_PROFILE_UNKNOWN;
1280 fill_profiles(profiles, &n_profiles, profile);
1282 case GST_VAAPI_PROFILE_H264_BASELINE:
1283 if (sps->constraint_set1_flag) { // A.2.2 (main profile)
1284 fill_profiles(profiles, &n_profiles,
1285 GST_VAAPI_PROFILE_H264_CONSTRAINED_BASELINE);
1286 fill_profiles(profiles, &n_profiles,
1287 GST_VAAPI_PROFILE_H264_MAIN);
1290 case GST_VAAPI_PROFILE_H264_EXTENDED:
1291 if (sps->constraint_set1_flag) { // A.2.2 (main profile)
1292 fill_profiles(profiles, &n_profiles,
1293 GST_VAAPI_PROFILE_H264_MAIN);
1296 case GST_VAAPI_PROFILE_H264_MULTIVIEW_HIGH:
1297 if (priv->max_views == 2) {
1298 fill_profiles(profiles, &n_profiles,
1299 GST_VAAPI_PROFILE_H264_STEREO_HIGH);
1301 fill_profiles_mvc(decoder, profiles, &n_profiles, dpb_size);
1303 case GST_VAAPI_PROFILE_H264_STEREO_HIGH:
1304 if (sps->frame_mbs_only_flag) {
1305 fill_profiles(profiles, &n_profiles,
1306 GST_VAAPI_PROFILE_H264_MULTIVIEW_HIGH);
1308 fill_profiles_mvc(decoder, profiles, &n_profiles, dpb_size);
1314 /* If the preferred profile (profiles[0]) matches one that we already
1315 found, then just return it now instead of searching for it again */
1316 if (profiles[0] == priv->profile)
1317 return priv->profile;
1319 for (i = 0; i < n_profiles; i++) {
1320 if (gst_vaapi_display_has_decoder(display, profiles[i], priv->entrypoint))
1323 return GST_VAAPI_PROFILE_UNKNOWN;
1326 static GstVaapiDecoderStatus
1327 ensure_context(GstVaapiDecoderH264 *decoder, GstH264SPS *sps)
1329 GstVaapiDecoder * const base_decoder = GST_VAAPI_DECODER_CAST(decoder);
1330 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1331 GstVaapiContextInfo info;
1332 GstVaapiProfile profile;
1333 GstVaapiChromaType chroma_type;
1334 gboolean reset_context = FALSE;
1335 guint mb_width, mb_height, dpb_size, num_views;
1337 num_views = get_num_views(sps);
1338 if (priv->max_views < num_views) {
1339 priv->max_views = num_views;
1340 GST_DEBUG("maximum number of views changed to %u", num_views);
1343 dpb_size = get_max_dec_frame_buffering(sps);
1344 if (priv->dpb_size < dpb_size) {
1345 GST_DEBUG("DPB size increased");
1346 reset_context = TRUE;
1349 profile = get_profile(decoder, sps, dpb_size);
1351 GST_ERROR("unsupported profile_idc %u", sps->profile_idc);
1352 return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_PROFILE;
1355 if (!priv->profile || (priv->profile != profile && priv->max_views == 1)) {
1356 GST_DEBUG("profile changed");
1357 reset_context = TRUE;
1358 priv->profile = profile;
1361 chroma_type = gst_vaapi_utils_h264_get_chroma_type(sps->chroma_format_idc);
1363 GST_ERROR("unsupported chroma_format_idc %u", sps->chroma_format_idc);
1364 return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_CHROMA_FORMAT;
1367 if (priv->chroma_type != chroma_type) {
1368 GST_DEBUG("chroma format changed");
1369 reset_context = TRUE;
1370 priv->chroma_type = chroma_type;
1373 mb_width = sps->pic_width_in_mbs_minus1 + 1;
1374 mb_height = (sps->pic_height_in_map_units_minus1 + 1) <<
1375 !sps->frame_mbs_only_flag;
1376 if (priv->mb_width != mb_width || priv->mb_height != mb_height) {
1377 GST_DEBUG("size changed");
1378 reset_context = TRUE;
1379 priv->mb_width = mb_width;
1380 priv->mb_height = mb_height;
1383 priv->progressive_sequence = sps->frame_mbs_only_flag;
1384 gst_vaapi_decoder_set_interlaced(base_decoder, !priv->progressive_sequence);
1386 gst_vaapi_decoder_set_pixel_aspect_ratio(
1388 sps->vui_parameters.par_n,
1389 sps->vui_parameters.par_d
1392 if (!reset_context && priv->has_context)
1393 return GST_VAAPI_DECODER_STATUS_SUCCESS;
1395 /* XXX: fix surface size when cropping is implemented */
1396 info.profile = priv->profile;
1397 info.entrypoint = priv->entrypoint;
1398 info.chroma_type = priv->chroma_type;
1399 info.width = sps->width;
1400 info.height = sps->height;
1401 info.ref_frames = dpb_size;
1403 if (!gst_vaapi_decoder_ensure_context(GST_VAAPI_DECODER(decoder), &info))
1404 return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
1405 priv->has_context = TRUE;
1408 if (!dpb_reset(decoder, dpb_size))
1409 return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
1411 /* Reset MVC data */
1412 if (!mvc_reset(decoder))
1413 return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
1414 return GST_VAAPI_DECODER_STATUS_SUCCESS;
1418 fill_iq_matrix_4x4(VAIQMatrixBufferH264 *iq_matrix, const GstH264PPS *pps,
1419 const GstH264SPS *sps)
1423 /* There are always 6 4x4 scaling lists */
1424 g_assert(G_N_ELEMENTS(iq_matrix->ScalingList4x4) == 6);
1425 g_assert(G_N_ELEMENTS(iq_matrix->ScalingList4x4[0]) == 16);
1427 for (i = 0; i < G_N_ELEMENTS(iq_matrix->ScalingList4x4); i++)
1428 gst_h264_quant_matrix_4x4_get_raster_from_zigzag(
1429 iq_matrix->ScalingList4x4[i], pps->scaling_lists_4x4[i]);
1433 fill_iq_matrix_8x8(VAIQMatrixBufferH264 *iq_matrix, const GstH264PPS *pps,
1434 const GstH264SPS *sps)
1438 /* If chroma_format_idc != 3, there are up to 2 8x8 scaling lists */
1439 if (!pps->transform_8x8_mode_flag)
1442 g_assert(G_N_ELEMENTS(iq_matrix->ScalingList8x8) >= 2);
1443 g_assert(G_N_ELEMENTS(iq_matrix->ScalingList8x8[0]) == 64);
1445 n = (sps->chroma_format_idc != 3) ? 2 : 6;
1446 for (i = 0; i < n; i++) {
1447 gst_h264_quant_matrix_8x8_get_raster_from_zigzag(
1448 iq_matrix->ScalingList8x8[i], pps->scaling_lists_8x8[i]);
1452 static GstVaapiDecoderStatus
1453 ensure_quant_matrix(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
1455 GstVaapiPicture * const base_picture = &picture->base;
1456 GstH264PPS * const pps = get_pps(decoder);
1457 GstH264SPS * const sps = get_sps(decoder);
1458 VAIQMatrixBufferH264 *iq_matrix;
1460 base_picture->iq_matrix = GST_VAAPI_IQ_MATRIX_NEW(H264, decoder);
1461 if (!base_picture->iq_matrix) {
1462 GST_ERROR("failed to allocate IQ matrix");
1463 return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
1465 iq_matrix = base_picture->iq_matrix->param;
1467 /* XXX: we can only support 4:2:0 or 4:2:2 since ScalingLists8x8[]
1468 is not large enough to hold lists for 4:4:4 */
1469 if (sps->chroma_format_idc == 3)
1470 return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_CHROMA_FORMAT;
1472 fill_iq_matrix_4x4(iq_matrix, pps, sps);
1473 fill_iq_matrix_8x8(iq_matrix, pps, sps);
1475 return GST_VAAPI_DECODER_STATUS_SUCCESS;
1478 static inline gboolean
1479 is_valid_state(guint state, guint ref_state)
1481 return (state & ref_state) == ref_state;
1484 static GstVaapiDecoderStatus
1485 decode_current_picture(GstVaapiDecoderH264 *decoder)
1487 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1488 GstVaapiPictureH264 * const picture = priv->current_picture;
1490 if (!is_valid_state(priv->decoder_state, GST_H264_VIDEO_STATE_VALID_PICTURE))
1492 priv->decoder_state = 0;
1495 return GST_VAAPI_DECODER_STATUS_SUCCESS;
1497 if (!gst_vaapi_picture_decode(GST_VAAPI_PICTURE_CAST(picture)))
1499 if (!exec_ref_pic_marking(decoder, picture))
1501 if (!dpb_add(decoder, picture))
1503 gst_vaapi_picture_replace(&priv->current_picture, NULL);
1504 return GST_VAAPI_DECODER_STATUS_SUCCESS;
1507 /* XXX: fix for cases where first field failed to be decoded */
1508 gst_vaapi_picture_replace(&priv->current_picture, NULL);
1509 return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
1512 priv->decoder_state = 0;
1513 return GST_VAAPI_DECODER_STATUS_DROP_FRAME;
1516 static GstVaapiDecoderStatus
1517 parse_sps(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
1519 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1520 GstVaapiParserInfoH264 * const pi = unit->parsed_info;
1521 GstH264SPS * const sps = &pi->data.sps;
1522 GstH264ParserResult result;
1524 GST_DEBUG("parse SPS");
1526 priv->parser_state = 0;
1528 /* Variables that don't have inferred values per the H.264
1529 standard but that should get a default value anyway */
1530 sps->log2_max_pic_order_cnt_lsb_minus4 = 0;
1532 result = gst_h264_parser_parse_sps(priv->parser, &pi->nalu, sps, TRUE);
1533 if (result != GST_H264_PARSER_OK)
1534 return get_status(result);
1536 priv->parser_state |= GST_H264_VIDEO_STATE_GOT_SPS;
1537 return GST_VAAPI_DECODER_STATUS_SUCCESS;
1540 static GstVaapiDecoderStatus
1541 parse_subset_sps(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
1543 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1544 GstVaapiParserInfoH264 * const pi = unit->parsed_info;
1545 GstH264SPS * const sps = &pi->data.sps;
1546 GstH264ParserResult result;
1548 GST_DEBUG("parse subset SPS");
1550 /* Variables that don't have inferred values per the H.264
1551 standard but that should get a default value anyway */
1552 sps->log2_max_pic_order_cnt_lsb_minus4 = 0;
1554 result = gst_h264_parser_parse_subset_sps(priv->parser, &pi->nalu, sps,
1556 if (result != GST_H264_PARSER_OK)
1557 return get_status(result);
1559 priv->parser_state |= GST_H264_VIDEO_STATE_GOT_SPS;
1560 return GST_VAAPI_DECODER_STATUS_SUCCESS;
1563 static GstVaapiDecoderStatus
1564 parse_pps(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
1566 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1567 GstVaapiParserInfoH264 * const pi = unit->parsed_info;
1568 GstH264PPS * const pps = &pi->data.pps;
1569 GstH264ParserResult result;
1571 GST_DEBUG("parse PPS");
1573 priv->parser_state &= GST_H264_VIDEO_STATE_GOT_SPS;
1575 /* Variables that don't have inferred values per the H.264
1576 standard but that should get a default value anyway */
1577 pps->slice_group_map_type = 0;
1578 pps->slice_group_change_rate_minus1 = 0;
1580 result = gst_h264_parser_parse_pps(priv->parser, &pi->nalu, pps);
1581 if (result != GST_H264_PARSER_OK)
1582 return get_status(result);
1584 priv->parser_state |= GST_H264_VIDEO_STATE_GOT_PPS;
1585 return GST_VAAPI_DECODER_STATUS_SUCCESS;
1588 static GstVaapiDecoderStatus
1589 parse_sei(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
1591 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1592 GstVaapiParserInfoH264 * const pi = unit->parsed_info;
1593 GArray ** const sei_ptr = &pi->data.sei;
1594 GstH264ParserResult result;
1596 GST_DEBUG("parse SEI");
1598 result = gst_h264_parser_parse_sei(priv->parser, &pi->nalu, sei_ptr);
1599 if (result != GST_H264_PARSER_OK) {
1600 GST_WARNING("failed to parse SEI messages");
1601 return get_status(result);
1603 return GST_VAAPI_DECODER_STATUS_SUCCESS;
1606 static GstVaapiDecoderStatus
1607 parse_slice(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
1609 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1610 GstVaapiParserInfoH264 * const pi = unit->parsed_info;
1611 GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
1612 GstH264NalUnit * const nalu = &pi->nalu;
1614 GstH264ParserResult result;
1616 GST_DEBUG("parse slice");
1618 priv->parser_state &= (GST_H264_VIDEO_STATE_GOT_SPS|
1619 GST_H264_VIDEO_STATE_GOT_PPS);
1621 /* Propagate Prefix NAL unit info, if necessary */
1622 switch (nalu->type) {
1623 case GST_H264_NAL_SLICE:
1624 case GST_H264_NAL_SLICE_IDR: {
1625 GstVaapiParserInfoH264 * const prev_pi = priv->prev_pi;
1626 if (prev_pi && prev_pi->nalu.type == GST_H264_NAL_PREFIX_UNIT) {
1627 /* MVC sequences shall have a Prefix NAL unit immediately
1628 preceding this NAL unit */
1629 pi->nalu.extension_type = prev_pi->nalu.extension_type;
1630 pi->nalu.extension = prev_pi->nalu.extension;
1633 /* In the very unlikely case there is no Prefix NAL unit
1634 immediately preceding this NAL unit, try to infer some
1635 defaults (H.7.4.1.1) */
1636 GstH264NalUnitExtensionMVC * const mvc = &pi->nalu.extension.mvc;
1637 mvc->non_idr_flag = !(nalu->type == GST_H264_NAL_SLICE_IDR);
1638 nalu->idr_pic_flag = !mvc->non_idr_flag;
1639 mvc->priority_id = 0;
1641 mvc->temporal_id = 0;
1642 mvc->anchor_pic_flag = 0;
1643 mvc->inter_view_flag = 1;
1649 /* Variables that don't have inferred values per the H.264
1650 standard but that should get a default value anyway */
1651 slice_hdr->cabac_init_idc = 0;
1652 slice_hdr->direct_spatial_mv_pred_flag = 0;
1654 result = gst_h264_parser_parse_slice_hdr(priv->parser, &pi->nalu,
1655 slice_hdr, TRUE, TRUE);
1656 if (result != GST_H264_PARSER_OK)
1657 return get_status(result);
1659 sps = slice_hdr->pps->sequence;
1661 /* Update MVC data */
1662 pi->view_id = get_view_id(&pi->nalu);
1663 pi->voc = get_view_order_index(sps, pi->view_id);
1665 priv->parser_state |= GST_H264_VIDEO_STATE_GOT_SLICE;
1666 return GST_VAAPI_DECODER_STATUS_SUCCESS;
1669 static GstVaapiDecoderStatus
1670 decode_sps(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
1672 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1673 GstVaapiParserInfoH264 * const pi = unit->parsed_info;
1674 GstH264SPS * const sps = &pi->data.sps;
1676 GST_DEBUG("decode SPS");
1678 gst_vaapi_parser_info_h264_replace(&priv->sps[sps->id], pi);
1679 return GST_VAAPI_DECODER_STATUS_SUCCESS;
1682 static GstVaapiDecoderStatus
1683 decode_subset_sps(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
1685 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1686 GstVaapiParserInfoH264 * const pi = unit->parsed_info;
1687 GstH264SPS * const sps = &pi->data.sps;
1689 GST_DEBUG("decode subset SPS");
1691 gst_vaapi_parser_info_h264_replace(&priv->sps[sps->id], pi);
1692 return GST_VAAPI_DECODER_STATUS_SUCCESS;
1695 static GstVaapiDecoderStatus
1696 decode_pps(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
1698 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1699 GstVaapiParserInfoH264 * const pi = unit->parsed_info;
1700 GstH264PPS * const pps = &pi->data.pps;
1702 GST_DEBUG("decode PPS");
1704 gst_vaapi_parser_info_h264_replace(&priv->pps[pps->id], pi);
1705 return GST_VAAPI_DECODER_STATUS_SUCCESS;
1708 static GstVaapiDecoderStatus
1709 decode_sequence_end(GstVaapiDecoderH264 *decoder)
1711 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1712 GstVaapiDecoderStatus status;
1714 GST_DEBUG("decode sequence-end");
1716 status = decode_current_picture(decoder);
1717 if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
1720 dpb_flush(decoder, NULL);
1722 /* Reset defaults, should there be a new sequence available next */
1723 priv->max_views = 1;
1724 return GST_VAAPI_DECODER_STATUS_SUCCESS;
1727 /* 8.2.1.1 - Decoding process for picture order count type 0 */
1730 GstVaapiDecoderH264 *decoder,
1731 GstVaapiPictureH264 *picture,
1732 GstH264SliceHdr *slice_hdr
1735 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1736 GstH264SPS * const sps = get_sps(decoder);
1737 const gint32 MaxPicOrderCntLsb = 1 << (sps->log2_max_pic_order_cnt_lsb_minus4 + 4);
1740 GST_DEBUG("decode picture order count type 0");
1742 if (GST_VAAPI_PICTURE_IS_IDR(picture)) {
1743 priv->prev_poc_msb = 0;
1744 priv->prev_poc_lsb = 0;
1746 else if (priv->prev_pic_has_mmco5) {
1747 priv->prev_poc_msb = 0;
1748 priv->prev_poc_lsb =
1749 (priv->prev_pic_structure == GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD ?
1750 0 : priv->field_poc[TOP_FIELD]);
1753 priv->prev_poc_msb = priv->poc_msb;
1754 priv->prev_poc_lsb = priv->poc_lsb;
1758 priv->poc_lsb = slice_hdr->pic_order_cnt_lsb;
1759 if (priv->poc_lsb < priv->prev_poc_lsb &&
1760 (priv->prev_poc_lsb - priv->poc_lsb) >= (MaxPicOrderCntLsb / 2))
1761 priv->poc_msb = priv->prev_poc_msb + MaxPicOrderCntLsb;
1762 else if (priv->poc_lsb > priv->prev_poc_lsb &&
1763 (priv->poc_lsb - priv->prev_poc_lsb) > (MaxPicOrderCntLsb / 2))
1764 priv->poc_msb = priv->prev_poc_msb - MaxPicOrderCntLsb;
1766 priv->poc_msb = priv->prev_poc_msb;
1768 temp_poc = priv->poc_msb + priv->poc_lsb;
1769 switch (picture->structure) {
1770 case GST_VAAPI_PICTURE_STRUCTURE_FRAME:
1772 priv->field_poc[TOP_FIELD] = temp_poc;
1773 priv->field_poc[BOTTOM_FIELD] = temp_poc +
1774 slice_hdr->delta_pic_order_cnt_bottom;
1776 case GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD:
1778 priv->field_poc[TOP_FIELD] = temp_poc;
1780 case GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD:
1782 priv->field_poc[BOTTOM_FIELD] = temp_poc;
1787 /* 8.2.1.2 - Decoding process for picture order count type 1 */
1790 GstVaapiDecoderH264 *decoder,
1791 GstVaapiPictureH264 *picture,
1792 GstH264SliceHdr *slice_hdr
1795 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1796 GstH264SPS * const sps = get_sps(decoder);
1797 const gint32 MaxFrameNum = 1 << (sps->log2_max_frame_num_minus4 + 4);
1798 gint32 prev_frame_num_offset, abs_frame_num, expected_poc;
1801 GST_DEBUG("decode picture order count type 1");
1803 if (priv->prev_pic_has_mmco5)
1804 prev_frame_num_offset = 0;
1806 prev_frame_num_offset = priv->frame_num_offset;
1809 if (GST_VAAPI_PICTURE_IS_IDR(picture))
1810 priv->frame_num_offset = 0;
1811 else if (priv->prev_frame_num > priv->frame_num)
1812 priv->frame_num_offset = prev_frame_num_offset + MaxFrameNum;
1814 priv->frame_num_offset = prev_frame_num_offset;
1817 if (sps->num_ref_frames_in_pic_order_cnt_cycle != 0)
1818 abs_frame_num = priv->frame_num_offset + priv->frame_num;
1821 if (!GST_VAAPI_PICTURE_IS_REFERENCE(picture) && abs_frame_num > 0)
1822 abs_frame_num = abs_frame_num - 1;
1824 if (abs_frame_num > 0) {
1825 gint32 expected_delta_per_poc_cycle;
1826 gint32 poc_cycle_cnt, frame_num_in_poc_cycle;
1828 expected_delta_per_poc_cycle = 0;
1829 for (i = 0; i < sps->num_ref_frames_in_pic_order_cnt_cycle; i++)
1830 expected_delta_per_poc_cycle += sps->offset_for_ref_frame[i];
1833 poc_cycle_cnt = (abs_frame_num - 1) /
1834 sps->num_ref_frames_in_pic_order_cnt_cycle;
1835 frame_num_in_poc_cycle = (abs_frame_num - 1) %
1836 sps->num_ref_frames_in_pic_order_cnt_cycle;
1839 expected_poc = poc_cycle_cnt * expected_delta_per_poc_cycle;
1840 for (i = 0; i <= frame_num_in_poc_cycle; i++)
1841 expected_poc += sps->offset_for_ref_frame[i];
1845 if (!GST_VAAPI_PICTURE_IS_REFERENCE(picture))
1846 expected_poc += sps->offset_for_non_ref_pic;
1849 switch (picture->structure) {
1850 case GST_VAAPI_PICTURE_STRUCTURE_FRAME:
1851 priv->field_poc[TOP_FIELD] = expected_poc +
1852 slice_hdr->delta_pic_order_cnt[0];
1853 priv->field_poc[BOTTOM_FIELD] = priv->field_poc[TOP_FIELD] +
1854 sps->offset_for_top_to_bottom_field +
1855 slice_hdr->delta_pic_order_cnt[1];
1857 case GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD:
1858 priv->field_poc[TOP_FIELD] = expected_poc +
1859 slice_hdr->delta_pic_order_cnt[0];
1861 case GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD:
1862 priv->field_poc[BOTTOM_FIELD] = expected_poc +
1863 sps->offset_for_top_to_bottom_field +
1864 slice_hdr->delta_pic_order_cnt[0];
1869 /* 8.2.1.3 - Decoding process for picture order count type 2 */
1872 GstVaapiDecoderH264 *decoder,
1873 GstVaapiPictureH264 *picture,
1874 GstH264SliceHdr *slice_hdr
1877 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1878 GstH264SPS * const sps = get_sps(decoder);
1879 const gint32 MaxFrameNum = 1 << (sps->log2_max_frame_num_minus4 + 4);
1880 gint32 prev_frame_num_offset, temp_poc;
1882 GST_DEBUG("decode picture order count type 2");
1884 if (priv->prev_pic_has_mmco5)
1885 prev_frame_num_offset = 0;
1887 prev_frame_num_offset = priv->frame_num_offset;
1890 if (GST_VAAPI_PICTURE_IS_IDR(picture))
1891 priv->frame_num_offset = 0;
1892 else if (priv->prev_frame_num > priv->frame_num)
1893 priv->frame_num_offset = prev_frame_num_offset + MaxFrameNum;
1895 priv->frame_num_offset = prev_frame_num_offset;
1898 if (GST_VAAPI_PICTURE_IS_IDR(picture))
1900 else if (!GST_VAAPI_PICTURE_IS_REFERENCE(picture))
1901 temp_poc = 2 * (priv->frame_num_offset + priv->frame_num) - 1;
1903 temp_poc = 2 * (priv->frame_num_offset + priv->frame_num);
1906 if (picture->structure != GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD)
1907 priv->field_poc[TOP_FIELD] = temp_poc;
1908 if (picture->structure != GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD)
1909 priv->field_poc[BOTTOM_FIELD] = temp_poc;
1912 /* 8.2.1 - Decoding process for picture order count */
1915 GstVaapiDecoderH264 *decoder,
1916 GstVaapiPictureH264 *picture,
1917 GstH264SliceHdr *slice_hdr
1920 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1921 GstH264SPS * const sps = get_sps(decoder);
1923 switch (sps->pic_order_cnt_type) {
1925 init_picture_poc_0(decoder, picture, slice_hdr);
1928 init_picture_poc_1(decoder, picture, slice_hdr);
1931 init_picture_poc_2(decoder, picture, slice_hdr);
1935 if (picture->structure != GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD)
1936 picture->field_poc[TOP_FIELD] = priv->field_poc[TOP_FIELD];
1937 if (picture->structure != GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD)
1938 picture->field_poc[BOTTOM_FIELD] = priv->field_poc[BOTTOM_FIELD];
1939 picture->base.poc = MIN(picture->field_poc[0], picture->field_poc[1]);
1943 compare_picture_pic_num_dec(const void *a, const void *b)
1945 const GstVaapiPictureH264 * const picA = *(GstVaapiPictureH264 **)a;
1946 const GstVaapiPictureH264 * const picB = *(GstVaapiPictureH264 **)b;
1948 return picB->pic_num - picA->pic_num;
1952 compare_picture_long_term_pic_num_inc(const void *a, const void *b)
1954 const GstVaapiPictureH264 * const picA = *(GstVaapiPictureH264 **)a;
1955 const GstVaapiPictureH264 * const picB = *(GstVaapiPictureH264 **)b;
1957 return picA->long_term_pic_num - picB->long_term_pic_num;
1961 compare_picture_poc_dec(const void *a, const void *b)
1963 const GstVaapiPictureH264 * const picA = *(GstVaapiPictureH264 **)a;
1964 const GstVaapiPictureH264 * const picB = *(GstVaapiPictureH264 **)b;
1966 return picB->base.poc - picA->base.poc;
1970 compare_picture_poc_inc(const void *a, const void *b)
1972 const GstVaapiPictureH264 * const picA = *(GstVaapiPictureH264 **)a;
1973 const GstVaapiPictureH264 * const picB = *(GstVaapiPictureH264 **)b;
1975 return picA->base.poc - picB->base.poc;
1979 compare_picture_frame_num_wrap_dec(const void *a, const void *b)
1981 const GstVaapiPictureH264 * const picA = *(GstVaapiPictureH264 **)a;
1982 const GstVaapiPictureH264 * const picB = *(GstVaapiPictureH264 **)b;
1984 return picB->frame_num_wrap - picA->frame_num_wrap;
1988 compare_picture_long_term_frame_idx_inc(const void *a, const void *b)
1990 const GstVaapiPictureH264 * const picA = *(GstVaapiPictureH264 **)a;
1991 const GstVaapiPictureH264 * const picB = *(GstVaapiPictureH264 **)b;
1993 return picA->long_term_frame_idx - picB->long_term_frame_idx;
1996 /* 8.2.4.1 - Decoding process for picture numbers */
1998 init_picture_refs_pic_num(
1999 GstVaapiDecoderH264 *decoder,
2000 GstVaapiPictureH264 *picture,
2001 GstH264SliceHdr *slice_hdr
2004 GstVaapiDecoderH264Private * const priv = &decoder->priv;
2005 GstH264SPS * const sps = get_sps(decoder);
2006 const gint32 MaxFrameNum = 1 << (sps->log2_max_frame_num_minus4 + 4);
2009 GST_DEBUG("decode picture numbers");
2011 for (i = 0; i < priv->short_ref_count; i++) {
2012 GstVaapiPictureH264 * const pic = priv->short_ref[i];
2015 if (pic->base.view_id != picture->base.view_id)
2019 if (pic->frame_num > priv->frame_num)
2020 pic->frame_num_wrap = pic->frame_num - MaxFrameNum;
2022 pic->frame_num_wrap = pic->frame_num;
2024 // (8-28, 8-30, 8-31)
2025 if (GST_VAAPI_PICTURE_IS_FRAME(picture))
2026 pic->pic_num = pic->frame_num_wrap;
2028 if (pic->structure == picture->structure)
2029 pic->pic_num = 2 * pic->frame_num_wrap + 1;
2031 pic->pic_num = 2 * pic->frame_num_wrap;
2035 for (i = 0; i < priv->long_ref_count; i++) {
2036 GstVaapiPictureH264 * const pic = priv->long_ref[i];
2039 if (pic->base.view_id != picture->base.view_id)
2042 // (8-29, 8-32, 8-33)
2043 if (GST_VAAPI_PICTURE_IS_FRAME(picture))
2044 pic->long_term_pic_num = pic->long_term_frame_idx;
2046 if (pic->structure == picture->structure)
2047 pic->long_term_pic_num = 2 * pic->long_term_frame_idx + 1;
2049 pic->long_term_pic_num = 2 * pic->long_term_frame_idx;
2054 #define SORT_REF_LIST(list, n, compare_func) \
2055 qsort(list, n, sizeof(*(list)), compare_picture_##compare_func)
2058 init_picture_refs_fields_1(
2059 guint picture_structure,
2060 GstVaapiPictureH264 *RefPicList[32],
2061 guint *RefPicList_count,
2062 GstVaapiPictureH264 *ref_list[32],
2063 guint ref_list_count
2070 n = *RefPicList_count;
2073 for (; i < ref_list_count; i++) {
2074 if (ref_list[i]->structure == picture_structure) {
2075 RefPicList[n++] = ref_list[i++];
2079 for (; j < ref_list_count; j++) {
2080 if (ref_list[j]->structure != picture_structure) {
2081 RefPicList[n++] = ref_list[j++];
2085 } while (i < ref_list_count || j < ref_list_count);
2086 *RefPicList_count = n;
2090 init_picture_refs_fields(
2091 GstVaapiPictureH264 *picture,
2092 GstVaapiPictureH264 *RefPicList[32],
2093 guint *RefPicList_count,
2094 GstVaapiPictureH264 *short_ref[32],
2095 guint short_ref_count,
2096 GstVaapiPictureH264 *long_ref[32],
2097 guint long_ref_count
2102 /* 8.2.4.2.5 - reference picture lists in fields */
2103 init_picture_refs_fields_1(picture->structure, RefPicList, &n,
2104 short_ref, short_ref_count);
2105 init_picture_refs_fields_1(picture->structure, RefPicList, &n,
2106 long_ref, long_ref_count);
2107 *RefPicList_count = n;
2110 /* Finds the inter-view reference picture with the supplied view id */
2111 static GstVaapiPictureH264 *
2112 find_inter_view_reference(GstVaapiDecoderH264 *decoder, guint16 view_id)
2114 GPtrArray * const inter_views = decoder->priv.inter_views;
2117 for (i = 0; i < inter_views->len; i++) {
2118 GstVaapiPictureH264 * const picture = g_ptr_array_index(inter_views, i);
2119 if (picture->base.view_id == view_id)
2123 GST_WARNING("failed to find inter-view reference picture for view_id: %d",
2128 /* Checks whether the view id exists in the supplied list of view ids */
2130 find_view_id(guint16 view_id, const guint16 *view_ids, guint num_view_ids)
2134 for (i = 0; i < num_view_ids; i++) {
2135 if (view_ids[i] == view_id)
2142 find_view_id_in_view(guint16 view_id, const GstH264SPSExtMVCView *view,
2146 return (find_view_id(view_id, view->anchor_ref_l0,
2147 view->num_anchor_refs_l0) ||
2148 find_view_id(view_id, view->anchor_ref_l1,
2149 view->num_anchor_refs_l1));
2151 return (find_view_id(view_id, view->non_anchor_ref_l0,
2152 view->num_non_anchor_refs_l0) ||
2153 find_view_id(view_id, view->non_anchor_ref_l1,
2154 view->num_non_anchor_refs_l1));
2157 /* Checks whether the inter-view reference picture with the supplied
2158 view id is used for decoding the current view component picture */
2160 is_inter_view_reference_for_picture(GstVaapiDecoderH264 *decoder,
2161 guint16 view_id, GstVaapiPictureH264 *picture)
2163 const GstH264SPS * const sps = get_sps(decoder);
2166 if (!GST_VAAPI_PICTURE_IS_MVC(picture) ||
2167 sps->extension_type != GST_H264_NAL_EXTENSION_MVC)
2170 is_anchor = GST_VAAPI_PICTURE_IS_ANCHOR(picture);
2171 return find_view_id_in_view(view_id,
2172 &sps->extension.mvc.view[picture->base.voc], is_anchor);
2175 /* Checks whether the supplied inter-view reference picture is used
2176 for decoding the next view component pictures */
2178 is_inter_view_reference_for_next_pictures(GstVaapiDecoderH264 *decoder,
2179 GstVaapiPictureH264 *picture)
2181 const GstH264SPS * const sps = get_sps(decoder);
2185 if (!GST_VAAPI_PICTURE_IS_MVC(picture) ||
2186 sps->extension_type != GST_H264_NAL_EXTENSION_MVC)
2189 is_anchor = GST_VAAPI_PICTURE_IS_ANCHOR(picture);
2190 num_views = sps->extension.mvc.num_views_minus1 + 1;
2191 for (i = picture->base.voc + 1; i < num_views; i++) {
2192 const GstH264SPSExtMVCView * const view = &sps->extension.mvc.view[i];
2193 if (find_view_id_in_view(picture->base.view_id, view, is_anchor))
2199 /* H.8.2.1 - Initialization process for inter-view prediction references */
2201 init_picture_refs_mvc_1(GstVaapiDecoderH264 *decoder,
2202 GstVaapiPictureH264 **ref_list, guint *ref_list_count_ptr, guint num_refs,
2203 const guint16 *view_ids, guint num_view_ids)
2207 n = *ref_list_count_ptr;
2208 for (j = 0; j < num_view_ids && n < num_refs; j++) {
2209 GstVaapiPictureH264 * const pic =
2210 find_inter_view_reference(decoder, view_ids[j]);
2212 ref_list[n++] = pic;
2214 *ref_list_count_ptr = n;
2218 init_picture_refs_mvc(GstVaapiDecoderH264 *decoder,
2219 GstVaapiPictureH264 *picture, GstH264SliceHdr *slice_hdr, guint list)
2221 GstVaapiDecoderH264Private * const priv = &decoder->priv;
2222 const GstH264SPS * const sps = get_sps(decoder);
2223 const GstH264SPSExtMVCView *view;
2225 GST_DEBUG("initialize reference picture list for inter-view prediction");
2227 if (sps->extension_type != GST_H264_NAL_EXTENSION_MVC)
2229 view = &sps->extension.mvc.view[picture->base.voc];
2231 #define INVOKE_INIT_PICTURE_REFS_MVC(ref_list, view_list) do { \
2232 init_picture_refs_mvc_1(decoder, \
2233 priv->RefPicList##ref_list, \
2234 &priv->RefPicList##ref_list##_count, \
2235 slice_hdr->num_ref_idx_l##ref_list##_active_minus1 + 1, \
2236 view->view_list##_l##ref_list, \
2237 view->num_##view_list##s_l##ref_list); \
2241 if (GST_VAAPI_PICTURE_IS_ANCHOR(picture))
2242 INVOKE_INIT_PICTURE_REFS_MVC(0, anchor_ref);
2244 INVOKE_INIT_PICTURE_REFS_MVC(0, non_anchor_ref);
2247 if (GST_VAAPI_PICTURE_IS_ANCHOR(picture))
2248 INVOKE_INIT_PICTURE_REFS_MVC(1, anchor_ref);
2250 INVOKE_INIT_PICTURE_REFS_MVC(1, non_anchor_ref);
2253 #undef INVOKE_INIT_PICTURE_REFS_MVC
2257 init_picture_refs_p_slice(
2258 GstVaapiDecoderH264 *decoder,
2259 GstVaapiPictureH264 *picture,
2260 GstH264SliceHdr *slice_hdr
2263 GstVaapiDecoderH264Private * const priv = &decoder->priv;
2264 GstVaapiPictureH264 **ref_list;
2267 GST_DEBUG("decode reference picture list for P and SP slices");
2269 if (GST_VAAPI_PICTURE_IS_FRAME(picture)) {
2270 /* 8.2.4.2.1 - P and SP slices in frames */
2271 if (priv->short_ref_count > 0) {
2272 ref_list = priv->RefPicList0;
2273 for (i = 0; i < priv->short_ref_count; i++)
2274 ref_list[i] = priv->short_ref[i];
2275 SORT_REF_LIST(ref_list, i, pic_num_dec);
2276 priv->RefPicList0_count += i;
2279 if (priv->long_ref_count > 0) {
2280 ref_list = &priv->RefPicList0[priv->RefPicList0_count];
2281 for (i = 0; i < priv->long_ref_count; i++)
2282 ref_list[i] = priv->long_ref[i];
2283 SORT_REF_LIST(ref_list, i, long_term_pic_num_inc);
2284 priv->RefPicList0_count += i;
2288 /* 8.2.4.2.2 - P and SP slices in fields */
2289 GstVaapiPictureH264 *short_ref[32];
2290 guint short_ref_count = 0;
2291 GstVaapiPictureH264 *long_ref[32];
2292 guint long_ref_count = 0;
2294 if (priv->short_ref_count > 0) {
2295 for (i = 0; i < priv->short_ref_count; i++)
2296 short_ref[i] = priv->short_ref[i];
2297 SORT_REF_LIST(short_ref, i, frame_num_wrap_dec);
2298 short_ref_count = i;
2301 if (priv->long_ref_count > 0) {
2302 for (i = 0; i < priv->long_ref_count; i++)
2303 long_ref[i] = priv->long_ref[i];
2304 SORT_REF_LIST(long_ref, i, long_term_frame_idx_inc);
2308 init_picture_refs_fields(
2310 priv->RefPicList0, &priv->RefPicList0_count,
2311 short_ref, short_ref_count,
2312 long_ref, long_ref_count
2316 if (GST_VAAPI_PICTURE_IS_MVC(picture)) {
2318 init_picture_refs_mvc(decoder, picture, slice_hdr, 0);
2323 init_picture_refs_b_slice(
2324 GstVaapiDecoderH264 *decoder,
2325 GstVaapiPictureH264 *picture,
2326 GstH264SliceHdr *slice_hdr
2329 GstVaapiDecoderH264Private * const priv = &decoder->priv;
2330 GstVaapiPictureH264 **ref_list;
2333 GST_DEBUG("decode reference picture list for B slices");
2335 if (GST_VAAPI_PICTURE_IS_FRAME(picture)) {
2336 /* 8.2.4.2.3 - B slices in frames */
2339 if (priv->short_ref_count > 0) {
2340 // 1. Short-term references
2341 ref_list = priv->RefPicList0;
2342 for (n = 0, i = 0; i < priv->short_ref_count; i++) {
2343 if (priv->short_ref[i]->base.poc < picture->base.poc)
2344 ref_list[n++] = priv->short_ref[i];
2346 SORT_REF_LIST(ref_list, n, poc_dec);
2347 priv->RefPicList0_count += n;
2349 ref_list = &priv->RefPicList0[priv->RefPicList0_count];
2350 for (n = 0, i = 0; i < priv->short_ref_count; i++) {
2351 if (priv->short_ref[i]->base.poc >= picture->base.poc)
2352 ref_list[n++] = priv->short_ref[i];
2354 SORT_REF_LIST(ref_list, n, poc_inc);
2355 priv->RefPicList0_count += n;
2358 if (priv->long_ref_count > 0) {
2359 // 2. Long-term references
2360 ref_list = &priv->RefPicList0[priv->RefPicList0_count];
2361 for (n = 0, i = 0; i < priv->long_ref_count; i++)
2362 ref_list[n++] = priv->long_ref[i];
2363 SORT_REF_LIST(ref_list, n, long_term_pic_num_inc);
2364 priv->RefPicList0_count += n;
2368 if (priv->short_ref_count > 0) {
2369 // 1. Short-term references
2370 ref_list = priv->RefPicList1;
2371 for (n = 0, i = 0; i < priv->short_ref_count; i++) {
2372 if (priv->short_ref[i]->base.poc > picture->base.poc)
2373 ref_list[n++] = priv->short_ref[i];
2375 SORT_REF_LIST(ref_list, n, poc_inc);
2376 priv->RefPicList1_count += n;
2378 ref_list = &priv->RefPicList1[priv->RefPicList1_count];
2379 for (n = 0, i = 0; i < priv->short_ref_count; i++) {
2380 if (priv->short_ref[i]->base.poc <= picture->base.poc)
2381 ref_list[n++] = priv->short_ref[i];
2383 SORT_REF_LIST(ref_list, n, poc_dec);
2384 priv->RefPicList1_count += n;
2387 if (priv->long_ref_count > 0) {
2388 // 2. Long-term references
2389 ref_list = &priv->RefPicList1[priv->RefPicList1_count];
2390 for (n = 0, i = 0; i < priv->long_ref_count; i++)
2391 ref_list[n++] = priv->long_ref[i];
2392 SORT_REF_LIST(ref_list, n, long_term_pic_num_inc);
2393 priv->RefPicList1_count += n;
2397 /* 8.2.4.2.4 - B slices in fields */
2398 GstVaapiPictureH264 *short_ref0[32];
2399 guint short_ref0_count = 0;
2400 GstVaapiPictureH264 *short_ref1[32];
2401 guint short_ref1_count = 0;
2402 GstVaapiPictureH264 *long_ref[32];
2403 guint long_ref_count = 0;
2405 /* refFrameList0ShortTerm */
2406 if (priv->short_ref_count > 0) {
2407 ref_list = short_ref0;
2408 for (n = 0, i = 0; i < priv->short_ref_count; i++) {
2409 if (priv->short_ref[i]->base.poc <= picture->base.poc)
2410 ref_list[n++] = priv->short_ref[i];
2412 SORT_REF_LIST(ref_list, n, poc_dec);
2413 short_ref0_count += n;
2415 ref_list = &short_ref0[short_ref0_count];
2416 for (n = 0, i = 0; i < priv->short_ref_count; i++) {
2417 if (priv->short_ref[i]->base.poc > picture->base.poc)
2418 ref_list[n++] = priv->short_ref[i];
2420 SORT_REF_LIST(ref_list, n, poc_inc);
2421 short_ref0_count += n;
2424 /* refFrameList1ShortTerm */
2425 if (priv->short_ref_count > 0) {
2426 ref_list = short_ref1;
2427 for (n = 0, i = 0; i < priv->short_ref_count; i++) {
2428 if (priv->short_ref[i]->base.poc > picture->base.poc)
2429 ref_list[n++] = priv->short_ref[i];
2431 SORT_REF_LIST(ref_list, n, poc_inc);
2432 short_ref1_count += n;
2434 ref_list = &short_ref1[short_ref1_count];
2435 for (n = 0, i = 0; i < priv->short_ref_count; i++) {
2436 if (priv->short_ref[i]->base.poc <= picture->base.poc)
2437 ref_list[n++] = priv->short_ref[i];
2439 SORT_REF_LIST(ref_list, n, poc_dec);
2440 short_ref1_count += n;
2443 /* refFrameListLongTerm */
2444 if (priv->long_ref_count > 0) {
2445 for (i = 0; i < priv->long_ref_count; i++)
2446 long_ref[i] = priv->long_ref[i];
2447 SORT_REF_LIST(long_ref, i, long_term_frame_idx_inc);
2451 init_picture_refs_fields(
2453 priv->RefPicList0, &priv->RefPicList0_count,
2454 short_ref0, short_ref0_count,
2455 long_ref, long_ref_count
2458 init_picture_refs_fields(
2460 priv->RefPicList1, &priv->RefPicList1_count,
2461 short_ref1, short_ref1_count,
2462 long_ref, long_ref_count
2466 /* Check whether RefPicList1 is identical to RefPicList0, then
2467 swap if necessary */
2468 if (priv->RefPicList1_count > 1 &&
2469 priv->RefPicList1_count == priv->RefPicList0_count &&
2470 memcmp(priv->RefPicList0, priv->RefPicList1,
2471 priv->RefPicList0_count * sizeof(priv->RefPicList0[0])) == 0) {
2472 GstVaapiPictureH264 * const tmp = priv->RefPicList1[0];
2473 priv->RefPicList1[0] = priv->RefPicList1[1];
2474 priv->RefPicList1[1] = tmp;
2477 if (GST_VAAPI_PICTURE_IS_MVC(picture)) {
2479 init_picture_refs_mvc(decoder, picture, slice_hdr, 0);
2482 init_picture_refs_mvc(decoder, picture, slice_hdr, 1);
2486 #undef SORT_REF_LIST
2489 find_short_term_reference(GstVaapiDecoderH264 *decoder, gint32 pic_num)
2491 GstVaapiDecoderH264Private * const priv = &decoder->priv;
2494 for (i = 0; i < priv->short_ref_count; i++) {
2495 if (priv->short_ref[i]->pic_num == pic_num)
2498 GST_ERROR("found no short-term reference picture with PicNum = %d",
2504 find_long_term_reference(GstVaapiDecoderH264 *decoder, gint32 long_term_pic_num)
2506 GstVaapiDecoderH264Private * const priv = &decoder->priv;
2509 for (i = 0; i < priv->long_ref_count; i++) {
2510 if (priv->long_ref[i]->long_term_pic_num == long_term_pic_num)
2513 GST_ERROR("found no long-term reference picture with LongTermPicNum = %d",
2519 exec_picture_refs_modification_1(
2520 GstVaapiDecoderH264 *decoder,
2521 GstVaapiPictureH264 *picture,
2522 GstH264SliceHdr *slice_hdr,
2526 GstVaapiDecoderH264Private * const priv = &decoder->priv;
2527 GstH264SPS * const sps = get_sps(decoder);
2528 GstH264RefPicListModification *ref_pic_list_modification;
2529 guint num_ref_pic_list_modifications;
2530 GstVaapiPictureH264 **ref_list;
2531 guint *ref_list_count_ptr, ref_list_count, ref_list_idx = 0;
2532 const guint16 *view_ids = NULL;
2533 guint i, j, n, num_refs, num_view_ids = 0;
2535 gint32 MaxPicNum, CurrPicNum, picNumPred, picViewIdxPred;
2537 GST_DEBUG("modification process of reference picture list %u", list);
2540 ref_pic_list_modification = slice_hdr->ref_pic_list_modification_l0;
2541 num_ref_pic_list_modifications = slice_hdr->n_ref_pic_list_modification_l0;
2542 ref_list = priv->RefPicList0;
2543 ref_list_count_ptr = &priv->RefPicList0_count;
2544 num_refs = slice_hdr->num_ref_idx_l0_active_minus1 + 1;
2546 if (GST_VAAPI_PICTURE_IS_MVC(picture) &&
2547 sps->extension_type == GST_H264_NAL_EXTENSION_MVC) {
2548 const GstH264SPSExtMVCView * const view =
2549 &sps->extension.mvc.view[picture->base.voc];
2550 if (GST_VAAPI_PICTURE_IS_ANCHOR(picture)) {
2551 view_ids = view->anchor_ref_l0;
2552 num_view_ids = view->num_anchor_refs_l0;
2555 view_ids = view->non_anchor_ref_l0;
2556 num_view_ids = view->num_non_anchor_refs_l0;
2561 ref_pic_list_modification = slice_hdr->ref_pic_list_modification_l1;
2562 num_ref_pic_list_modifications = slice_hdr->n_ref_pic_list_modification_l1;
2563 ref_list = priv->RefPicList1;
2564 ref_list_count_ptr = &priv->RefPicList1_count;
2565 num_refs = slice_hdr->num_ref_idx_l1_active_minus1 + 1;
2567 if (GST_VAAPI_PICTURE_IS_MVC(picture) &&
2568 sps->extension_type == GST_H264_NAL_EXTENSION_MVC) {
2569 const GstH264SPSExtMVCView * const view =
2570 &sps->extension.mvc.view[picture->base.voc];
2571 if (GST_VAAPI_PICTURE_IS_ANCHOR(picture)) {
2572 view_ids = view->anchor_ref_l1;
2573 num_view_ids = view->num_anchor_refs_l1;
2576 view_ids = view->non_anchor_ref_l1;
2577 num_view_ids = view->num_non_anchor_refs_l1;
2581 ref_list_count = *ref_list_count_ptr;
2583 if (!GST_VAAPI_PICTURE_IS_FRAME(picture)) {
2584 MaxPicNum = 1 << (sps->log2_max_frame_num_minus4 + 5); // 2 * MaxFrameNum
2585 CurrPicNum = 2 * slice_hdr->frame_num + 1; // 2 * frame_num + 1
2588 MaxPicNum = 1 << (sps->log2_max_frame_num_minus4 + 4); // MaxFrameNum
2589 CurrPicNum = slice_hdr->frame_num; // frame_num
2592 picNumPred = CurrPicNum;
2593 picViewIdxPred = -1;
2595 for (i = 0; i < num_ref_pic_list_modifications; i++) {
2596 GstH264RefPicListModification * const l = &ref_pic_list_modification[i];
2597 if (l->modification_of_pic_nums_idc == 3)
2600 /* 8.2.4.3.1 - Short-term reference pictures */
2601 if (l->modification_of_pic_nums_idc == 0 || l->modification_of_pic_nums_idc == 1) {
2602 gint32 abs_diff_pic_num = l->value.abs_diff_pic_num_minus1 + 1;
2603 gint32 picNum, picNumNoWrap;
2606 if (l->modification_of_pic_nums_idc == 0) {
2607 picNumNoWrap = picNumPred - abs_diff_pic_num;
2608 if (picNumNoWrap < 0)
2609 picNumNoWrap += MaxPicNum;
2614 picNumNoWrap = picNumPred + abs_diff_pic_num;
2615 if (picNumNoWrap >= MaxPicNum)
2616 picNumNoWrap -= MaxPicNum;
2618 picNumPred = picNumNoWrap;
2621 picNum = picNumNoWrap;
2622 if (picNum > CurrPicNum)
2623 picNum -= MaxPicNum;
2626 for (j = num_refs; j > ref_list_idx; j--)
2627 ref_list[j] = ref_list[j - 1];
2628 found_ref_idx = find_short_term_reference(decoder, picNum);
2629 ref_list[ref_list_idx++] =
2630 found_ref_idx >= 0 ? priv->short_ref[found_ref_idx] : NULL;
2632 for (j = ref_list_idx; j <= num_refs; j++) {
2637 GST_VAAPI_PICTURE_IS_SHORT_TERM_REFERENCE(ref_list[j]) ?
2638 ref_list[j]->pic_num : MaxPicNum;
2639 if (PicNumF != picNum ||
2640 ref_list[j]->base.view_id != picture->base.view_id)
2641 ref_list[n++] = ref_list[j];
2645 /* 8.2.4.3.2 - Long-term reference pictures */
2646 else if (l->modification_of_pic_nums_idc == 2) {
2648 for (j = num_refs; j > ref_list_idx; j--)
2649 ref_list[j] = ref_list[j - 1];
2651 find_long_term_reference(decoder, l->value.long_term_pic_num);
2652 ref_list[ref_list_idx++] =
2653 found_ref_idx >= 0 ? priv->long_ref[found_ref_idx] : NULL;
2655 for (j = ref_list_idx; j <= num_refs; j++) {
2656 gint32 LongTermPicNumF;
2660 GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(ref_list[j]) ?
2661 ref_list[j]->long_term_pic_num : INT_MAX;
2662 if (LongTermPicNumF != l->value.long_term_pic_num ||
2663 ref_list[j]->base.view_id != picture->base.view_id)
2664 ref_list[n++] = ref_list[j];
2668 /* H.8.2.2.3 - Inter-view prediction reference pictures */
2669 else if ((GST_VAAPI_PICTURE_IS_MVC(picture) &&
2670 sps->extension_type == GST_H264_NAL_EXTENSION_MVC) &&
2671 (l->modification_of_pic_nums_idc == 4 ||
2672 l->modification_of_pic_nums_idc == 5)) {
2673 gint32 abs_diff_view_idx = l->value.abs_diff_view_idx_minus1 + 1;
2674 gint32 picViewIdx, targetViewId;
2677 if (l->modification_of_pic_nums_idc == 4) {
2678 picViewIdx = picViewIdxPred - abs_diff_view_idx;
2680 picViewIdx += num_view_ids;
2685 picViewIdx = picViewIdxPred + abs_diff_view_idx;
2686 if (picViewIdx >= num_view_ids)
2687 picViewIdx -= num_view_ids;
2689 picViewIdxPred = picViewIdx;
2692 targetViewId = view_ids[picViewIdx];
2695 for (j = num_refs; j > ref_list_idx; j--)
2696 ref_list[j] = ref_list[j - 1];
2697 ref_list[ref_list_idx++] =
2698 find_inter_view_reference(decoder, targetViewId);
2700 for (j = ref_list_idx; j <= num_refs; j++) {
2703 if (ref_list[j]->base.view_id != targetViewId ||
2704 ref_list[j]->base.poc != picture->base.poc)
2705 ref_list[n++] = ref_list[j];
2711 for (i = 0; i < num_refs; i++)
2713 GST_ERROR("list %u entry %u is empty", list, i);
2715 *ref_list_count_ptr = num_refs;
2718 /* 8.2.4.3 - Modification process for reference picture lists */
2720 exec_picture_refs_modification(
2721 GstVaapiDecoderH264 *decoder,
2722 GstVaapiPictureH264 *picture,
2723 GstH264SliceHdr *slice_hdr
2726 GST_DEBUG("execute ref_pic_list_modification()");
2729 if (!GST_H264_IS_I_SLICE(slice_hdr) && !GST_H264_IS_SI_SLICE(slice_hdr) &&
2730 slice_hdr->ref_pic_list_modification_flag_l0)
2731 exec_picture_refs_modification_1(decoder, picture, slice_hdr, 0);
2734 if (GST_H264_IS_B_SLICE(slice_hdr) &&
2735 slice_hdr->ref_pic_list_modification_flag_l1)
2736 exec_picture_refs_modification_1(decoder, picture, slice_hdr, 1);
2740 init_picture_ref_lists(GstVaapiDecoderH264 *decoder,
2741 GstVaapiPictureH264 *picture)
2743 GstVaapiDecoderH264Private * const priv = &decoder->priv;
2744 guint i, j, short_ref_count, long_ref_count;
2746 short_ref_count = 0;
2748 if (GST_VAAPI_PICTURE_IS_FRAME(picture)) {
2749 for (i = 0; i < priv->dpb_count; i++) {
2750 GstVaapiFrameStore * const fs = priv->dpb[i];
2751 GstVaapiPictureH264 *pic;
2752 if (!gst_vaapi_frame_store_has_frame(fs))
2754 pic = fs->buffers[0];
2755 if (pic->base.view_id != picture->base.view_id)
2757 if (GST_VAAPI_PICTURE_IS_SHORT_TERM_REFERENCE(pic))
2758 priv->short_ref[short_ref_count++] = pic;
2759 else if (GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(pic))
2760 priv->long_ref[long_ref_count++] = pic;
2761 pic->structure = GST_VAAPI_PICTURE_STRUCTURE_FRAME;
2762 pic->other_field = fs->buffers[1];
2766 for (i = 0; i < priv->dpb_count; i++) {
2767 GstVaapiFrameStore * const fs = priv->dpb[i];
2768 for (j = 0; j < fs->num_buffers; j++) {
2769 GstVaapiPictureH264 * const pic = fs->buffers[j];
2770 if (pic->base.view_id != picture->base.view_id)
2772 if (GST_VAAPI_PICTURE_IS_SHORT_TERM_REFERENCE(pic))
2773 priv->short_ref[short_ref_count++] = pic;
2774 else if (GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(pic))
2775 priv->long_ref[long_ref_count++] = pic;
2776 pic->structure = pic->base.structure;
2777 pic->other_field = fs->buffers[j ^ 1];
2782 for (i = short_ref_count; i < priv->short_ref_count; i++)
2783 priv->short_ref[i] = NULL;
2784 priv->short_ref_count = short_ref_count;
2786 for (i = long_ref_count; i < priv->long_ref_count; i++)
2787 priv->long_ref[i] = NULL;
2788 priv->long_ref_count = long_ref_count;
2793 GstVaapiDecoderH264 *decoder,
2794 GstVaapiPictureH264 *picture,
2795 GstH264SliceHdr *slice_hdr
2798 GstVaapiDecoderH264Private * const priv = &decoder->priv;
2801 init_picture_ref_lists(decoder, picture);
2802 init_picture_refs_pic_num(decoder, picture, slice_hdr);
2804 priv->RefPicList0_count = 0;
2805 priv->RefPicList1_count = 0;
2807 switch (slice_hdr->type % 5) {
2808 case GST_H264_P_SLICE:
2809 case GST_H264_SP_SLICE:
2810 init_picture_refs_p_slice(decoder, picture, slice_hdr);
2812 case GST_H264_B_SLICE:
2813 init_picture_refs_b_slice(decoder, picture, slice_hdr);
2819 exec_picture_refs_modification(decoder, picture, slice_hdr);
2821 switch (slice_hdr->type % 5) {
2822 case GST_H264_B_SLICE:
2823 num_refs = 1 + slice_hdr->num_ref_idx_l1_active_minus1;
2824 for (i = priv->RefPicList1_count; i < num_refs; i++)
2825 priv->RefPicList1[i] = NULL;
2826 priv->RefPicList1_count = num_refs;
2829 case GST_H264_P_SLICE:
2830 case GST_H264_SP_SLICE:
2831 num_refs = 1 + slice_hdr->num_ref_idx_l0_active_minus1;
2832 for (i = priv->RefPicList0_count; i < num_refs; i++)
2833 priv->RefPicList0[i] = NULL;
2834 priv->RefPicList0_count = num_refs;
2843 GstVaapiDecoderH264 *decoder,
2844 GstVaapiPictureH264 *picture, GstVaapiParserInfoH264 *pi)
2846 GstVaapiDecoderH264Private * const priv = &decoder->priv;
2847 GstVaapiPicture * const base_picture = &picture->base;
2848 GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
2850 priv->prev_frame_num = priv->frame_num;
2851 priv->frame_num = slice_hdr->frame_num;
2852 picture->frame_num = priv->frame_num;
2853 picture->frame_num_wrap = priv->frame_num;
2854 picture->output_flag = TRUE; /* XXX: conformant to Annex A only */
2855 base_picture->pts = GST_VAAPI_DECODER_CODEC_FRAME(decoder)->pts;
2856 base_picture->type = GST_VAAPI_PICTURE_TYPE_NONE;
2857 base_picture->view_id = pi->view_id;
2858 base_picture->voc = pi->voc;
2860 /* Initialize extensions */
2861 switch (pi->nalu.extension_type) {
2862 case GST_H264_NAL_EXTENSION_MVC: {
2863 GstH264NalUnitExtensionMVC * const mvc = &pi->nalu.extension.mvc;
2865 GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_MVC);
2866 if (mvc->inter_view_flag)
2867 GST_VAAPI_PICTURE_FLAG_SET(picture,
2868 GST_VAAPI_PICTURE_FLAG_INTER_VIEW);
2869 if (mvc->anchor_pic_flag)
2870 GST_VAAPI_PICTURE_FLAG_SET(picture,
2871 GST_VAAPI_PICTURE_FLAG_ANCHOR);
2876 /* Reset decoder state for IDR pictures */
2877 if (pi->nalu.idr_pic_flag) {
2879 GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_IDR);
2880 dpb_flush(decoder, picture);
2883 /* Initialize picture structure */
2884 if (!slice_hdr->field_pic_flag)
2885 base_picture->structure = GST_VAAPI_PICTURE_STRUCTURE_FRAME;
2887 GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_INTERLACED);
2888 if (!slice_hdr->bottom_field_flag)
2889 base_picture->structure = GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD;
2891 base_picture->structure = GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD;
2893 picture->structure = base_picture->structure;
2895 /* Initialize reference flags */
2896 if (pi->nalu.ref_idc) {
2897 GstH264DecRefPicMarking * const dec_ref_pic_marking =
2898 &slice_hdr->dec_ref_pic_marking;
2900 if (GST_VAAPI_PICTURE_IS_IDR(picture) &&
2901 dec_ref_pic_marking->long_term_reference_flag)
2902 GST_VAAPI_PICTURE_FLAG_SET(picture,
2903 GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE);
2905 GST_VAAPI_PICTURE_FLAG_SET(picture,
2906 GST_VAAPI_PICTURE_FLAG_SHORT_TERM_REFERENCE);
2909 init_picture_poc(decoder, picture, slice_hdr);
2913 /* 8.2.5.3 - Sliding window decoded reference picture marking process */
2915 exec_ref_pic_marking_sliding_window(GstVaapiDecoderH264 *decoder)
2917 GstVaapiDecoderH264Private * const priv = &decoder->priv;
2918 GstH264SPS * const sps = get_sps(decoder);
2919 GstVaapiPictureH264 *ref_picture;
2920 guint i, m, max_num_ref_frames;
2922 GST_DEBUG("reference picture marking process (sliding window)");
2924 if (!GST_VAAPI_PICTURE_IS_FIRST_FIELD(priv->current_picture))
2927 max_num_ref_frames = sps->num_ref_frames;
2928 if (max_num_ref_frames == 0)
2929 max_num_ref_frames = 1;
2930 if (!GST_VAAPI_PICTURE_IS_FRAME(priv->current_picture))
2931 max_num_ref_frames <<= 1;
2933 if (priv->short_ref_count + priv->long_ref_count < max_num_ref_frames)
2935 if (priv->short_ref_count < 1)
2938 for (m = 0, i = 1; i < priv->short_ref_count; i++) {
2939 GstVaapiPictureH264 * const picture = priv->short_ref[i];
2940 if (picture->frame_num_wrap < priv->short_ref[m]->frame_num_wrap)
2944 ref_picture = priv->short_ref[m];
2945 gst_vaapi_picture_h264_set_reference(ref_picture, 0, TRUE);
2946 ARRAY_REMOVE_INDEX(priv->short_ref, m);
2948 /* Both fields need to be marked as "unused for reference", so
2949 remove the other field from the short_ref[] list as well */
2950 if (!GST_VAAPI_PICTURE_IS_FRAME(priv->current_picture) && ref_picture->other_field) {
2951 for (i = 0; i < priv->short_ref_count; i++) {
2952 if (priv->short_ref[i] == ref_picture->other_field) {
2953 ARRAY_REMOVE_INDEX(priv->short_ref, i);
2961 static inline gint32
2962 get_picNumX(GstVaapiPictureH264 *picture, GstH264RefPicMarking *ref_pic_marking)
2966 if (GST_VAAPI_PICTURE_IS_FRAME(picture))
2967 pic_num = picture->frame_num_wrap;
2969 pic_num = 2 * picture->frame_num_wrap + 1;
2970 pic_num -= ref_pic_marking->difference_of_pic_nums_minus1 + 1;
2974 /* 8.2.5.4.1. Mark short-term reference picture as "unused for reference" */
2976 exec_ref_pic_marking_adaptive_mmco_1(
2977 GstVaapiDecoderH264 *decoder,
2978 GstVaapiPictureH264 *picture,
2979 GstH264RefPicMarking *ref_pic_marking
2982 GstVaapiDecoderH264Private * const priv = &decoder->priv;
2985 picNumX = get_picNumX(picture, ref_pic_marking);
2986 i = find_short_term_reference(decoder, picNumX);
2990 gst_vaapi_picture_h264_set_reference(priv->short_ref[i], 0,
2991 GST_VAAPI_PICTURE_IS_FRAME(picture));
2992 ARRAY_REMOVE_INDEX(priv->short_ref, i);
2995 /* 8.2.5.4.2. Mark long-term reference picture as "unused for reference" */
2997 exec_ref_pic_marking_adaptive_mmco_2(
2998 GstVaapiDecoderH264 *decoder,
2999 GstVaapiPictureH264 *picture,
3000 GstH264RefPicMarking *ref_pic_marking
3003 GstVaapiDecoderH264Private * const priv = &decoder->priv;
3006 i = find_long_term_reference(decoder, ref_pic_marking->long_term_pic_num);
3010 gst_vaapi_picture_h264_set_reference(priv->long_ref[i], 0,
3011 GST_VAAPI_PICTURE_IS_FRAME(picture));
3012 ARRAY_REMOVE_INDEX(priv->long_ref, i);
3015 /* 8.2.5.4.3. Assign LongTermFrameIdx to a short-term reference picture */
3017 exec_ref_pic_marking_adaptive_mmco_3(
3018 GstVaapiDecoderH264 *decoder,
3019 GstVaapiPictureH264 *picture,
3020 GstH264RefPicMarking *ref_pic_marking
3023 GstVaapiDecoderH264Private * const priv = &decoder->priv;
3024 GstVaapiPictureH264 *ref_picture, *other_field;
3027 for (i = 0; i < priv->long_ref_count; i++) {
3028 if (priv->long_ref[i]->long_term_frame_idx == ref_pic_marking->long_term_frame_idx)
3031 if (i != priv->long_ref_count) {
3032 gst_vaapi_picture_h264_set_reference(priv->long_ref[i], 0, TRUE);
3033 ARRAY_REMOVE_INDEX(priv->long_ref, i);
3036 picNumX = get_picNumX(picture, ref_pic_marking);
3037 i = find_short_term_reference(decoder, picNumX);
3041 ref_picture = priv->short_ref[i];
3042 ARRAY_REMOVE_INDEX(priv->short_ref, i);
3043 priv->long_ref[priv->long_ref_count++] = ref_picture;
3045 ref_picture->long_term_frame_idx = ref_pic_marking->long_term_frame_idx;
3046 gst_vaapi_picture_h264_set_reference(ref_picture,
3047 GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE,
3048 GST_VAAPI_PICTURE_IS_COMPLETE(picture));
3050 /* Assign LongTermFrameIdx to the other field if it was also
3051 marked as "used for long-term reference */
3052 other_field = ref_picture->other_field;
3053 if (other_field && GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(other_field))
3054 other_field->long_term_frame_idx = ref_pic_marking->long_term_frame_idx;
3057 /* 8.2.5.4.4. Mark pictures with LongTermFramIdx > max_long_term_frame_idx
3058 * as "unused for reference" */
3060 exec_ref_pic_marking_adaptive_mmco_4(
3061 GstVaapiDecoderH264 *decoder,
3062 GstVaapiPictureH264 *picture,
3063 GstH264RefPicMarking *ref_pic_marking
3066 GstVaapiDecoderH264Private * const priv = &decoder->priv;
3067 gint32 i, long_term_frame_idx;
3069 long_term_frame_idx = ref_pic_marking->max_long_term_frame_idx_plus1 - 1;
3071 for (i = 0; i < priv->long_ref_count; i++) {
3072 if (priv->long_ref[i]->long_term_frame_idx <= long_term_frame_idx)
3074 gst_vaapi_picture_h264_set_reference(priv->long_ref[i], 0, FALSE);
3075 ARRAY_REMOVE_INDEX(priv->long_ref, i);
3080 /* 8.2.5.4.5. Mark all reference pictures as "unused for reference" */
3082 exec_ref_pic_marking_adaptive_mmco_5(
3083 GstVaapiDecoderH264 *decoder,
3084 GstVaapiPictureH264 *picture,
3085 GstH264RefPicMarking *ref_pic_marking
3088 GstVaapiDecoderH264Private * const priv = &decoder->priv;
3090 dpb_flush(decoder, picture);
3092 priv->prev_pic_has_mmco5 = TRUE;
3094 /* The picture shall be inferred to have had frame_num equal to 0 (7.4.3) */
3095 priv->frame_num = 0;
3096 priv->frame_num_offset = 0;
3097 picture->frame_num = 0;
3099 /* Update TopFieldOrderCnt and BottomFieldOrderCnt (8.2.1) */
3100 if (picture->structure != GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD)
3101 picture->field_poc[TOP_FIELD] -= picture->base.poc;
3102 if (picture->structure != GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD)
3103 picture->field_poc[BOTTOM_FIELD] -= picture->base.poc;
3104 picture->base.poc = 0;
3107 /* 8.2.5.4.6. Assign a long-term frame index to the current picture */
3109 exec_ref_pic_marking_adaptive_mmco_6(
3110 GstVaapiDecoderH264 *decoder,
3111 GstVaapiPictureH264 *picture,
3112 GstH264RefPicMarking *ref_pic_marking
3115 GstVaapiDecoderH264Private * const priv = &decoder->priv;
3116 GstVaapiPictureH264 *other_field;
3119 for (i = 0; i < priv->long_ref_count; i++) {
3120 if (priv->long_ref[i]->long_term_frame_idx == ref_pic_marking->long_term_frame_idx)
3123 if (i != priv->long_ref_count) {
3124 gst_vaapi_picture_h264_set_reference(priv->long_ref[i], 0, TRUE);
3125 ARRAY_REMOVE_INDEX(priv->long_ref, i);
3128 picture->long_term_frame_idx = ref_pic_marking->long_term_frame_idx;
3129 gst_vaapi_picture_h264_set_reference(picture,
3130 GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE,
3131 GST_VAAPI_PICTURE_IS_COMPLETE(picture));
3133 /* Assign LongTermFrameIdx to the other field if it was also
3134 marked as "used for long-term reference */
3135 other_field = GST_VAAPI_PICTURE_H264(picture->base.parent_picture);
3136 if (other_field && GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(other_field))
3137 other_field->long_term_frame_idx = ref_pic_marking->long_term_frame_idx;
3140 /* 8.2.5.4. Adaptive memory control decoded reference picture marking process */
3142 exec_ref_pic_marking_adaptive(
3143 GstVaapiDecoderH264 *decoder,
3144 GstVaapiPictureH264 *picture,
3145 GstH264DecRefPicMarking *dec_ref_pic_marking
3150 GST_DEBUG("reference picture marking process (adaptive memory control)");
3152 typedef void (*exec_ref_pic_marking_adaptive_mmco_func)(
3153 GstVaapiDecoderH264 *decoder,
3154 GstVaapiPictureH264 *picture,
3155 GstH264RefPicMarking *ref_pic_marking
3158 static const exec_ref_pic_marking_adaptive_mmco_func mmco_funcs[] = {
3160 exec_ref_pic_marking_adaptive_mmco_1,
3161 exec_ref_pic_marking_adaptive_mmco_2,
3162 exec_ref_pic_marking_adaptive_mmco_3,
3163 exec_ref_pic_marking_adaptive_mmco_4,
3164 exec_ref_pic_marking_adaptive_mmco_5,
3165 exec_ref_pic_marking_adaptive_mmco_6,
3168 for (i = 0; i < dec_ref_pic_marking->n_ref_pic_marking; i++) {
3169 GstH264RefPicMarking * const ref_pic_marking =
3170 &dec_ref_pic_marking->ref_pic_marking[i];
3172 const guint mmco = ref_pic_marking->memory_management_control_operation;
3173 if (mmco < G_N_ELEMENTS(mmco_funcs) && mmco_funcs[mmco])
3174 mmco_funcs[mmco](decoder, picture, ref_pic_marking);
3176 GST_ERROR("unhandled MMCO %u", mmco);
3183 /* 8.2.5 - Execute reference picture marking process */
3185 exec_ref_pic_marking(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
3187 GstVaapiDecoderH264Private * const priv = &decoder->priv;
3189 priv->prev_pic_has_mmco5 = FALSE;
3190 priv->prev_pic_structure = picture->structure;
3192 if (GST_VAAPI_PICTURE_IS_INTER_VIEW(picture))
3193 g_ptr_array_add(priv->inter_views, gst_vaapi_picture_ref(picture));
3195 if (!GST_VAAPI_PICTURE_IS_REFERENCE(picture))
3198 if (!GST_VAAPI_PICTURE_IS_IDR(picture)) {
3199 GstH264DecRefPicMarking * const dec_ref_pic_marking =
3200 &picture->last_slice_hdr->dec_ref_pic_marking;
3201 if (dec_ref_pic_marking->adaptive_ref_pic_marking_mode_flag) {
3202 if (!exec_ref_pic_marking_adaptive(decoder, picture, dec_ref_pic_marking))
3206 if (!exec_ref_pic_marking_sliding_window(decoder))
3214 vaapi_init_picture(VAPictureH264 *pic)
3216 pic->picture_id = VA_INVALID_ID;
3218 pic->flags = VA_PICTURE_H264_INVALID;
3219 pic->TopFieldOrderCnt = 0;
3220 pic->BottomFieldOrderCnt = 0;
3224 vaapi_fill_picture(VAPictureH264 *pic, GstVaapiPictureH264 *picture,
3225 guint picture_structure)
3227 if (!picture_structure)
3228 picture_structure = picture->structure;
3230 pic->picture_id = picture->base.surface_id;
3233 if (GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(picture)) {
3234 pic->flags |= VA_PICTURE_H264_LONG_TERM_REFERENCE;
3235 pic->frame_idx = picture->long_term_frame_idx;
3238 if (GST_VAAPI_PICTURE_IS_SHORT_TERM_REFERENCE(picture))
3239 pic->flags |= VA_PICTURE_H264_SHORT_TERM_REFERENCE;
3240 pic->frame_idx = picture->frame_num;
3243 switch (picture_structure) {
3244 case GST_VAAPI_PICTURE_STRUCTURE_FRAME:
3245 pic->TopFieldOrderCnt = picture->field_poc[TOP_FIELD];
3246 pic->BottomFieldOrderCnt = picture->field_poc[BOTTOM_FIELD];
3248 case GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD:
3249 pic->flags |= VA_PICTURE_H264_TOP_FIELD;
3250 pic->TopFieldOrderCnt = picture->field_poc[TOP_FIELD];
3251 pic->BottomFieldOrderCnt = 0;
3253 case GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD:
3254 pic->flags |= VA_PICTURE_H264_BOTTOM_FIELD;
3255 pic->BottomFieldOrderCnt = picture->field_poc[BOTTOM_FIELD];
3256 pic->TopFieldOrderCnt = 0;
3262 vaapi_fill_picture_for_RefPicListX(VAPictureH264 *pic,
3263 GstVaapiPictureH264 *picture)
3265 vaapi_fill_picture(pic, picture, 0);
3267 /* H.8.4 - MVC inter prediction and inter-view prediction process */
3268 if (GST_VAAPI_PICTURE_IS_INTER_VIEW(picture)) {
3269 /* The inter-view reference components and inter-view only
3270 reference components that are included in the reference
3271 picture lists are considered as not being marked as "used for
3272 short-term reference" or "used for long-term reference" */
3273 pic->flags &= ~(VA_PICTURE_H264_SHORT_TERM_REFERENCE|
3274 VA_PICTURE_H264_LONG_TERM_REFERENCE);
3279 fill_picture(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
3281 GstVaapiDecoderH264Private * const priv = &decoder->priv;
3282 GstVaapiPicture * const base_picture = &picture->base;
3283 GstH264PPS * const pps = get_pps(decoder);
3284 GstH264SPS * const sps = get_sps(decoder);
3285 VAPictureParameterBufferH264 * const pic_param = base_picture->param;
3288 /* Fill in VAPictureParameterBufferH264 */
3289 vaapi_fill_picture(&pic_param->CurrPic, picture, 0);
3291 for (i = 0, n = 0; i < priv->dpb_count; i++) {
3292 GstVaapiFrameStore * const fs = priv->dpb[i];
3293 if ((gst_vaapi_frame_store_has_reference(fs) &&
3294 fs->view_id == picture->base.view_id) ||
3295 (gst_vaapi_frame_store_has_inter_view(fs) &&
3296 is_inter_view_reference_for_picture(decoder, fs->view_id, picture)))
3297 vaapi_fill_picture(&pic_param->ReferenceFrames[n++],
3298 fs->buffers[0], fs->structure);
3299 if (n >= G_N_ELEMENTS(pic_param->ReferenceFrames))
3302 for (; n < G_N_ELEMENTS(pic_param->ReferenceFrames); n++)
3303 vaapi_init_picture(&pic_param->ReferenceFrames[n]);
3305 #define COPY_FIELD(s, f) \
3306 pic_param->f = (s)->f
3308 #define COPY_BFM(a, s, f) \
3309 pic_param->a.bits.f = (s)->f
3311 pic_param->picture_width_in_mbs_minus1 = priv->mb_width - 1;
3312 pic_param->picture_height_in_mbs_minus1 = priv->mb_height - 1;
3313 pic_param->frame_num = priv->frame_num;
3315 COPY_FIELD(sps, bit_depth_luma_minus8);
3316 COPY_FIELD(sps, bit_depth_chroma_minus8);
3317 COPY_FIELD(sps, num_ref_frames);
3318 COPY_FIELD(pps, num_slice_groups_minus1);
3319 COPY_FIELD(pps, slice_group_map_type);
3320 COPY_FIELD(pps, slice_group_change_rate_minus1);
3321 COPY_FIELD(pps, pic_init_qp_minus26);
3322 COPY_FIELD(pps, pic_init_qs_minus26);
3323 COPY_FIELD(pps, chroma_qp_index_offset);
3324 COPY_FIELD(pps, second_chroma_qp_index_offset);
3326 pic_param->seq_fields.value = 0; /* reset all bits */
3327 pic_param->seq_fields.bits.residual_colour_transform_flag = sps->separate_colour_plane_flag;
3328 pic_param->seq_fields.bits.MinLumaBiPredSize8x8 = sps->level_idc >= 31; /* A.3.3.2 */
3330 COPY_BFM(seq_fields, sps, chroma_format_idc);
3331 COPY_BFM(seq_fields, sps, gaps_in_frame_num_value_allowed_flag);
3332 COPY_BFM(seq_fields, sps, frame_mbs_only_flag);
3333 COPY_BFM(seq_fields, sps, mb_adaptive_frame_field_flag);
3334 COPY_BFM(seq_fields, sps, direct_8x8_inference_flag);
3335 COPY_BFM(seq_fields, sps, log2_max_frame_num_minus4);
3336 COPY_BFM(seq_fields, sps, pic_order_cnt_type);
3337 COPY_BFM(seq_fields, sps, log2_max_pic_order_cnt_lsb_minus4);
3338 COPY_BFM(seq_fields, sps, delta_pic_order_always_zero_flag);
3340 pic_param->pic_fields.value = 0; /* reset all bits */
3341 pic_param->pic_fields.bits.field_pic_flag = GST_VAAPI_PICTURE_IS_INTERLACED(picture);
3342 pic_param->pic_fields.bits.reference_pic_flag = GST_VAAPI_PICTURE_IS_REFERENCE(picture);
3344 COPY_BFM(pic_fields, pps, entropy_coding_mode_flag);
3345 COPY_BFM(pic_fields, pps, weighted_pred_flag);
3346 COPY_BFM(pic_fields, pps, weighted_bipred_idc);
3347 COPY_BFM(pic_fields, pps, transform_8x8_mode_flag);
3348 COPY_BFM(pic_fields, pps, constrained_intra_pred_flag);
3349 COPY_BFM(pic_fields, pps, pic_order_present_flag);
3350 COPY_BFM(pic_fields, pps, deblocking_filter_control_present_flag);
3351 COPY_BFM(pic_fields, pps, redundant_pic_cnt_present_flag);
3355 /* Detection of the first VCL NAL unit of a primary coded picture (7.4.1.2.4) */
3357 is_new_picture(GstVaapiParserInfoH264 *pi, GstVaapiParserInfoH264 *prev_pi)
3359 GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
3360 GstH264PPS * const pps = slice_hdr->pps;
3361 GstH264SPS * const sps = pps->sequence;
3362 GstH264SliceHdr *prev_slice_hdr;
3366 prev_slice_hdr = &prev_pi->data.slice_hdr;
3368 #define CHECK_EXPR(expr, field_name) do { \
3370 GST_DEBUG(field_name " differs in value"); \
3375 #define CHECK_VALUE(new_slice_hdr, old_slice_hdr, field) \
3376 CHECK_EXPR(((new_slice_hdr)->field == (old_slice_hdr)->field), #field)
3378 /* view_id differs in value and VOIdx of current slice_hdr is less
3379 than the VOIdx of the prev_slice_hdr */
3380 CHECK_VALUE(pi, prev_pi, view_id);
3382 /* frame_num differs in value, regardless of inferred values to 0 */
3383 CHECK_VALUE(slice_hdr, prev_slice_hdr, frame_num);
3385 /* pic_parameter_set_id differs in value */
3386 CHECK_VALUE(slice_hdr, prev_slice_hdr, pps);
3388 /* field_pic_flag differs in value */
3389 CHECK_VALUE(slice_hdr, prev_slice_hdr, field_pic_flag);
3391 /* bottom_field_flag is present in both and differs in value */
3392 if (slice_hdr->field_pic_flag && prev_slice_hdr->field_pic_flag)
3393 CHECK_VALUE(slice_hdr, prev_slice_hdr, bottom_field_flag);
3395 /* nal_ref_idc differs in value with one of the nal_ref_idc values is 0 */
3396 CHECK_EXPR((pi->nalu.ref_idc != 0) ==
3397 (prev_pi->nalu.ref_idc != 0), "nal_ref_idc");
3399 /* POC type is 0 for both and either pic_order_cnt_lsb differs in
3400 value or delta_pic_order_cnt_bottom differs in value */
3401 if (sps->pic_order_cnt_type == 0) {
3402 CHECK_VALUE(slice_hdr, prev_slice_hdr, pic_order_cnt_lsb);
3403 if (pps->pic_order_present_flag && !slice_hdr->field_pic_flag)
3404 CHECK_VALUE(slice_hdr, prev_slice_hdr, delta_pic_order_cnt_bottom);
3407 /* POC type is 1 for both and either delta_pic_order_cnt[0]
3408 differs in value or delta_pic_order_cnt[1] differs in value */
3409 else if (sps->pic_order_cnt_type == 1) {
3410 CHECK_VALUE(slice_hdr, prev_slice_hdr, delta_pic_order_cnt[0]);
3411 CHECK_VALUE(slice_hdr, prev_slice_hdr, delta_pic_order_cnt[1]);
3414 /* IdrPicFlag differs in value */
3415 CHECK_VALUE(&pi->nalu, &prev_pi->nalu, idr_pic_flag);
3417 /* IdrPicFlag is equal to 1 for both and idr_pic_id differs in value */
3418 if (pi->nalu.idr_pic_flag)
3419 CHECK_VALUE(slice_hdr, prev_slice_hdr, idr_pic_id);
3426 /* Detection of a new access unit, assuming we are already in presence
3428 static inline gboolean
3429 is_new_access_unit(GstVaapiParserInfoH264 *pi, GstVaapiParserInfoH264 *prev_pi)
3431 if (!prev_pi || prev_pi->view_id == pi->view_id)
3433 return pi->voc < prev_pi->voc;
3436 /* Finds the first field picture corresponding to the supplied picture */
3437 static GstVaapiPictureH264 *
3438 find_first_field(GstVaapiDecoderH264 *decoder, GstVaapiParserInfoH264 *pi)
3440 GstVaapiDecoderH264Private * const priv = &decoder->priv;
3441 GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
3442 GstVaapiFrameStore *fs;
3444 if (!slice_hdr->field_pic_flag)
3447 fs = priv->prev_frames[pi->voc];
3448 if (!fs || gst_vaapi_frame_store_has_frame(fs))
3451 if (fs->buffers[0]->frame_num == slice_hdr->frame_num)
3452 return fs->buffers[0];
3456 static GstVaapiDecoderStatus
3457 decode_picture(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
3459 GstVaapiDecoderH264Private * const priv = &decoder->priv;
3460 GstVaapiParserInfoH264 * const pi = unit->parsed_info;
3461 GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
3462 GstH264PPS * const pps = ensure_pps(decoder, slice_hdr->pps);
3463 GstH264SPS * const sps = ensure_sps(decoder, slice_hdr->pps->sequence);
3464 GstVaapiPictureH264 *picture, *first_field;
3465 GstVaapiDecoderStatus status;
3467 g_return_val_if_fail(pps != NULL, GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN);
3468 g_return_val_if_fail(sps != NULL, GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN);
3470 /* Only decode base stream for MVC */
3471 switch (sps->profile_idc) {
3472 case GST_H264_PROFILE_MULTIVIEW_HIGH:
3473 case GST_H264_PROFILE_STEREO_HIGH:
3475 GST_DEBUG("drop picture from substream");
3476 return GST_VAAPI_DECODER_STATUS_DROP_FRAME;
3481 status = ensure_context(decoder, sps);
3482 if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
3485 priv->decoder_state = 0;
3487 first_field = find_first_field(decoder, pi);
3489 /* Re-use current picture where the first field was decoded */
3490 picture = gst_vaapi_picture_h264_new_field(first_field);
3492 GST_ERROR("failed to allocate field picture");
3493 return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
3497 /* Create new picture */
3498 picture = gst_vaapi_picture_h264_new(decoder);
3500 GST_ERROR("failed to allocate picture");
3501 return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
3504 gst_vaapi_picture_replace(&priv->current_picture, picture);
3505 gst_vaapi_picture_unref(picture);
3507 /* Clear inter-view references list if this is the primary coded
3508 picture of the current access unit */
3509 if (pi->flags & GST_VAAPI_DECODER_UNIT_FLAG_AU_START)
3510 g_ptr_array_set_size(priv->inter_views, 0);
3512 /* Update cropping rectangle */
3513 if (sps->frame_cropping_flag) {
3514 GstVaapiRectangle crop_rect;
3515 crop_rect.x = sps->crop_rect_x;
3516 crop_rect.y = sps->crop_rect_y;
3517 crop_rect.width = sps->crop_rect_width;
3518 crop_rect.height = sps->crop_rect_height;
3519 gst_vaapi_picture_set_crop_rect(&picture->base, &crop_rect);
3522 status = ensure_quant_matrix(decoder, picture);
3523 if (status != GST_VAAPI_DECODER_STATUS_SUCCESS) {
3524 GST_ERROR("failed to reset quantizer matrix");
3528 if (!init_picture(decoder, picture, pi))
3529 return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
3530 if (!fill_picture(decoder, picture))
3531 return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
3533 priv->decoder_state = pi->state;
3534 return GST_VAAPI_DECODER_STATUS_SUCCESS;
3538 get_slice_data_bit_offset(GstH264SliceHdr *slice_hdr, guint nal_header_bytes)
3542 epb_count = slice_hdr->n_emulation_prevention_bytes;
3543 return 8 * nal_header_bytes + slice_hdr->header_size - epb_count * 8;
3547 fill_pred_weight_table(GstVaapiDecoderH264 *decoder,
3548 GstVaapiSlice *slice, GstH264SliceHdr *slice_hdr)
3550 VASliceParameterBufferH264 * const slice_param = slice->param;
3551 GstH264PPS * const pps = get_pps(decoder);
3552 GstH264SPS * const sps = get_sps(decoder);
3553 GstH264PredWeightTable * const w = &slice_hdr->pred_weight_table;
3554 guint num_weight_tables = 0;
3557 if (pps->weighted_pred_flag &&
3558 (GST_H264_IS_P_SLICE(slice_hdr) || GST_H264_IS_SP_SLICE(slice_hdr)))
3559 num_weight_tables = 1;
3560 else if (pps->weighted_bipred_idc == 1 && GST_H264_IS_B_SLICE(slice_hdr))
3561 num_weight_tables = 2;
3563 num_weight_tables = 0;
3565 slice_param->luma_log2_weight_denom = 0;
3566 slice_param->chroma_log2_weight_denom = 0;
3567 slice_param->luma_weight_l0_flag = 0;
3568 slice_param->chroma_weight_l0_flag = 0;
3569 slice_param->luma_weight_l1_flag = 0;
3570 slice_param->chroma_weight_l1_flag = 0;
3572 if (num_weight_tables < 1)
3575 slice_param->luma_log2_weight_denom = w->luma_log2_weight_denom;
3576 slice_param->chroma_log2_weight_denom = w->chroma_log2_weight_denom;
3578 slice_param->luma_weight_l0_flag = 1;
3579 for (i = 0; i <= slice_param->num_ref_idx_l0_active_minus1; i++) {
3580 slice_param->luma_weight_l0[i] = w->luma_weight_l0[i];
3581 slice_param->luma_offset_l0[i] = w->luma_offset_l0[i];
3584 slice_param->chroma_weight_l0_flag = sps->chroma_array_type != 0;
3585 if (slice_param->chroma_weight_l0_flag) {
3586 for (i = 0; i <= slice_param->num_ref_idx_l0_active_minus1; i++) {
3587 for (j = 0; j < 2; j++) {
3588 slice_param->chroma_weight_l0[i][j] = w->chroma_weight_l0[i][j];
3589 slice_param->chroma_offset_l0[i][j] = w->chroma_offset_l0[i][j];
3594 if (num_weight_tables < 2)
3597 slice_param->luma_weight_l1_flag = 1;
3598 for (i = 0; i <= slice_param->num_ref_idx_l1_active_minus1; i++) {
3599 slice_param->luma_weight_l1[i] = w->luma_weight_l1[i];
3600 slice_param->luma_offset_l1[i] = w->luma_offset_l1[i];
3603 slice_param->chroma_weight_l1_flag = sps->chroma_array_type != 0;
3604 if (slice_param->chroma_weight_l1_flag) {
3605 for (i = 0; i <= slice_param->num_ref_idx_l1_active_minus1; i++) {
3606 for (j = 0; j < 2; j++) {
3607 slice_param->chroma_weight_l1[i][j] = w->chroma_weight_l1[i][j];
3608 slice_param->chroma_offset_l1[i][j] = w->chroma_offset_l1[i][j];
3616 fill_RefPicList(GstVaapiDecoderH264 *decoder,
3617 GstVaapiSlice *slice, GstH264SliceHdr *slice_hdr)
3619 GstVaapiDecoderH264Private * const priv = &decoder->priv;
3620 VASliceParameterBufferH264 * const slice_param = slice->param;
3621 guint i, num_ref_lists = 0;
3623 slice_param->num_ref_idx_l0_active_minus1 = 0;
3624 slice_param->num_ref_idx_l1_active_minus1 = 0;
3626 if (GST_H264_IS_B_SLICE(slice_hdr))
3628 else if (GST_H264_IS_I_SLICE(slice_hdr))
3633 if (num_ref_lists < 1)
3636 slice_param->num_ref_idx_l0_active_minus1 =
3637 slice_hdr->num_ref_idx_l0_active_minus1;
3639 for (i = 0; i < priv->RefPicList0_count && priv->RefPicList0[i]; i++)
3640 vaapi_fill_picture_for_RefPicListX(&slice_param->RefPicList0[i],
3641 priv->RefPicList0[i]);
3642 for (; i <= slice_param->num_ref_idx_l0_active_minus1; i++)
3643 vaapi_init_picture(&slice_param->RefPicList0[i]);
3645 if (num_ref_lists < 2)
3648 slice_param->num_ref_idx_l1_active_minus1 =
3649 slice_hdr->num_ref_idx_l1_active_minus1;
3651 for (i = 0; i < priv->RefPicList1_count && priv->RefPicList1[i]; i++)
3652 vaapi_fill_picture_for_RefPicListX(&slice_param->RefPicList1[i],
3653 priv->RefPicList1[i]);
3654 for (; i <= slice_param->num_ref_idx_l1_active_minus1; i++)
3655 vaapi_init_picture(&slice_param->RefPicList1[i]);
3660 fill_slice(GstVaapiDecoderH264 *decoder,
3661 GstVaapiSlice *slice, GstVaapiParserInfoH264 *pi)
3663 VASliceParameterBufferH264 * const slice_param = slice->param;
3664 GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
3666 /* Fill in VASliceParameterBufferH264 */
3667 slice_param->slice_data_bit_offset =
3668 get_slice_data_bit_offset(slice_hdr, pi->nalu.header_bytes);
3669 slice_param->first_mb_in_slice = slice_hdr->first_mb_in_slice;
3670 slice_param->slice_type = slice_hdr->type % 5;
3671 slice_param->direct_spatial_mv_pred_flag = slice_hdr->direct_spatial_mv_pred_flag;
3672 slice_param->cabac_init_idc = slice_hdr->cabac_init_idc;
3673 slice_param->slice_qp_delta = slice_hdr->slice_qp_delta;
3674 slice_param->disable_deblocking_filter_idc = slice_hdr->disable_deblocking_filter_idc;
3675 slice_param->slice_alpha_c0_offset_div2 = slice_hdr->slice_alpha_c0_offset_div2;
3676 slice_param->slice_beta_offset_div2 = slice_hdr->slice_beta_offset_div2;
3678 if (!fill_RefPicList(decoder, slice, slice_hdr))
3680 if (!fill_pred_weight_table(decoder, slice, slice_hdr))
3685 static GstVaapiDecoderStatus
3686 decode_slice(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
3688 GstVaapiDecoderH264Private * const priv = &decoder->priv;
3689 GstVaapiParserInfoH264 * const pi = unit->parsed_info;
3690 GstVaapiPictureH264 * const picture = priv->current_picture;
3691 GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
3692 GstVaapiSlice *slice;
3693 GstBuffer * const buffer =
3694 GST_VAAPI_DECODER_CODEC_FRAME(decoder)->input_buffer;
3695 GstMapInfo map_info;
3697 GST_DEBUG("slice (%u bytes)", pi->nalu.size);
3699 if (!is_valid_state(pi->state,
3700 GST_H264_VIDEO_STATE_VALID_PICTURE_HEADERS)) {
3701 GST_WARNING("failed to receive enough headers to decode slice");
3702 return GST_VAAPI_DECODER_STATUS_SUCCESS;
3705 if (!ensure_pps(decoder, slice_hdr->pps)) {
3706 GST_ERROR("failed to activate PPS");
3707 return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
3710 if (!ensure_sps(decoder, slice_hdr->pps->sequence)) {
3711 GST_ERROR("failed to activate SPS");
3712 return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
3715 if (!gst_buffer_map(buffer, &map_info, GST_MAP_READ)) {
3716 GST_ERROR("failed to map buffer");
3717 return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
3720 /* Check wether this is the first/last slice in the current access unit */
3721 if (pi->flags & GST_VAAPI_DECODER_UNIT_FLAG_AU_START)
3722 GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_AU_START);
3723 if (pi->flags & GST_VAAPI_DECODER_UNIT_FLAG_AU_END)
3724 GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_AU_END);
3726 slice = GST_VAAPI_SLICE_NEW(H264, decoder,
3727 (map_info.data + unit->offset + pi->nalu.offset), pi->nalu.size);
3728 gst_buffer_unmap(buffer, &map_info);
3730 GST_ERROR("failed to allocate slice");
3731 return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
3734 init_picture_refs(decoder, picture, slice_hdr);
3735 if (!fill_slice(decoder, slice, pi)) {
3736 gst_vaapi_mini_object_unref(GST_VAAPI_MINI_OBJECT(slice));
3737 return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
3740 gst_vaapi_picture_add_slice(GST_VAAPI_PICTURE_CAST(picture), slice);
3741 picture->last_slice_hdr = slice_hdr;
3742 priv->decoder_state |= GST_H264_VIDEO_STATE_GOT_SLICE;
3743 return GST_VAAPI_DECODER_STATUS_SUCCESS;
3747 scan_for_start_code(GstAdapter *adapter, guint ofs, guint size, guint32 *scp)
3749 return (gint)gst_adapter_masked_scan_uint32_peek(adapter,
3750 0xffffff00, 0x00000100,
3755 static GstVaapiDecoderStatus
3756 decode_unit(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
3758 GstVaapiDecoderH264Private * const priv = &decoder->priv;
3759 GstVaapiParserInfoH264 * const pi = unit->parsed_info;
3760 GstVaapiDecoderStatus status;
3762 priv->decoder_state |= pi->state;
3763 switch (pi->nalu.type) {
3764 case GST_H264_NAL_SPS:
3765 status = decode_sps(decoder, unit);
3767 case GST_H264_NAL_SUBSET_SPS:
3768 status = decode_subset_sps(decoder, unit);
3770 case GST_H264_NAL_PPS:
3771 status = decode_pps(decoder, unit);
3773 case GST_H264_NAL_SLICE_EXT:
3774 case GST_H264_NAL_SLICE_IDR:
3775 /* fall-through. IDR specifics are handled in init_picture() */
3776 case GST_H264_NAL_SLICE:
3777 status = decode_slice(decoder, unit);
3779 case GST_H264_NAL_SEQ_END:
3780 case GST_H264_NAL_STREAM_END:
3781 status = decode_sequence_end(decoder);
3783 case GST_H264_NAL_SEI:
3784 status = GST_VAAPI_DECODER_STATUS_SUCCESS;
3787 GST_WARNING("unsupported NAL unit type %d", pi->nalu.type);
3788 status = GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
3794 static GstVaapiDecoderStatus
3795 gst_vaapi_decoder_h264_decode_codec_data(GstVaapiDecoder *base_decoder,
3796 const guchar *buf, guint buf_size)
3798 GstVaapiDecoderH264 * const decoder =
3799 GST_VAAPI_DECODER_H264_CAST(base_decoder);
3800 GstVaapiDecoderH264Private * const priv = &decoder->priv;
3801 GstVaapiDecoderStatus status;
3802 GstVaapiDecoderUnit unit;
3803 GstVaapiParserInfoH264 *pi = NULL;
3804 GstH264ParserResult result;
3805 guint i, ofs, num_sps, num_pps;
3807 unit.parsed_info = NULL;
3810 return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
3813 GST_ERROR("failed to decode codec-data, not in avcC format");
3814 return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
3817 priv->nal_length_size = (buf[4] & 0x03) + 1;
3819 num_sps = buf[5] & 0x1f;
3822 for (i = 0; i < num_sps; i++) {
3823 pi = gst_vaapi_parser_info_h264_new();
3825 return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
3826 unit.parsed_info = pi;
3828 result = gst_h264_parser_identify_nalu_avc(
3830 buf, ofs, buf_size, 2,
3833 if (result != GST_H264_PARSER_OK) {
3834 status = get_status(result);
3838 status = parse_sps(decoder, &unit);
3839 if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
3841 ofs = pi->nalu.offset + pi->nalu.size;
3843 status = decode_sps(decoder, &unit);
3844 if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
3846 gst_vaapi_parser_info_h264_replace(&pi, NULL);
3852 for (i = 0; i < num_pps; i++) {
3853 pi = gst_vaapi_parser_info_h264_new();
3855 return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
3856 unit.parsed_info = pi;
3858 result = gst_h264_parser_identify_nalu_avc(
3860 buf, ofs, buf_size, 2,
3863 if (result != GST_H264_PARSER_OK) {
3864 status = get_status(result);
3868 status = parse_pps(decoder, &unit);
3869 if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
3871 ofs = pi->nalu.offset + pi->nalu.size;
3873 status = decode_pps(decoder, &unit);
3874 if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
3876 gst_vaapi_parser_info_h264_replace(&pi, NULL);
3879 priv->is_avcC = TRUE;
3880 status = GST_VAAPI_DECODER_STATUS_SUCCESS;
3883 gst_vaapi_parser_info_h264_replace(&pi, NULL);
3887 static GstVaapiDecoderStatus
3888 ensure_decoder(GstVaapiDecoderH264 *decoder)
3890 GstVaapiDecoderH264Private * const priv = &decoder->priv;
3891 GstVaapiDecoderStatus status;
3893 if (!priv->is_opened) {
3894 priv->is_opened = gst_vaapi_decoder_h264_open(decoder);
3895 if (!priv->is_opened)
3896 return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_CODEC;
3898 status = gst_vaapi_decoder_decode_codec_data(
3899 GST_VAAPI_DECODER_CAST(decoder));
3900 if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
3903 return GST_VAAPI_DECODER_STATUS_SUCCESS;
3906 static GstVaapiDecoderStatus
3907 gst_vaapi_decoder_h264_parse(GstVaapiDecoder *base_decoder,
3908 GstAdapter *adapter, gboolean at_eos, GstVaapiDecoderUnit *unit)
3910 GstVaapiDecoderH264 * const decoder =
3911 GST_VAAPI_DECODER_H264_CAST(base_decoder);
3912 GstVaapiDecoderH264Private * const priv = &decoder->priv;
3913 GstVaapiParserState * const ps = GST_VAAPI_PARSER_STATE(base_decoder);
3914 GstVaapiParserInfoH264 *pi;
3915 GstVaapiDecoderStatus status;
3916 GstH264ParserResult result;
3918 guint i, size, buf_size, nalu_size, flags;
3921 gboolean at_au_end = FALSE;
3923 status = ensure_decoder(decoder);
3924 if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
3927 switch (priv->stream_alignment) {
3928 case GST_VAAPI_STREAM_ALIGN_H264_NALU:
3929 case GST_VAAPI_STREAM_ALIGN_H264_AU:
3930 size = gst_adapter_available_fast(adapter);
3933 size = gst_adapter_available(adapter);
3937 if (priv->is_avcC) {
3938 if (size < priv->nal_length_size)
3939 return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
3941 buf = (guchar *)&start_code;
3942 g_assert(priv->nal_length_size <= sizeof(start_code));
3943 gst_adapter_copy(adapter, buf, 0, priv->nal_length_size);
3946 for (i = 0; i < priv->nal_length_size; i++)
3947 nalu_size = (nalu_size << 8) | buf[i];
3949 buf_size = priv->nal_length_size + nalu_size;
3950 if (size < buf_size)
3951 return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
3952 else if (priv->stream_alignment == GST_VAAPI_STREAM_ALIGN_H264_AU)
3953 at_au_end = (buf_size == size);
3957 return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
3959 if (priv->stream_alignment == GST_VAAPI_STREAM_ALIGN_H264_NALU)
3962 ofs = scan_for_start_code(adapter, 0, size, NULL);
3964 return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
3967 gst_adapter_flush(adapter, ofs);
3971 ofs2 = ps->input_offset2 - ofs - 4;
3975 ofs = G_UNLIKELY(size < ofs2 + 4) ? -1 :
3976 scan_for_start_code(adapter, ofs2, size - ofs2, NULL);
3978 // Assume the whole NAL unit is present if end-of-stream
3979 // or stream buffers aligned on access unit boundaries
3980 if (priv->stream_alignment == GST_VAAPI_STREAM_ALIGN_H264_AU)
3983 ps->input_offset2 = size;
3984 return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
3991 ps->input_offset2 = 0;
3993 buf = (guchar *)gst_adapter_map(adapter, buf_size);
3995 return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
3997 unit->size = buf_size;
3999 pi = gst_vaapi_parser_info_h264_new();
4001 return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
4003 gst_vaapi_decoder_unit_set_parsed_info(unit,
4004 pi, (GDestroyNotify)gst_vaapi_mini_object_unref);
4007 result = gst_h264_parser_identify_nalu_avc(priv->parser,
4008 buf, 0, buf_size, priv->nal_length_size, &pi->nalu);
4010 result = gst_h264_parser_identify_nalu_unchecked(priv->parser,
4011 buf, 0, buf_size, &pi->nalu);
4012 status = get_status(result);
4013 if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
4016 switch (pi->nalu.type) {
4017 case GST_H264_NAL_SPS:
4018 status = parse_sps(decoder, unit);
4020 case GST_H264_NAL_SUBSET_SPS:
4021 status = parse_subset_sps(decoder, unit);
4023 case GST_H264_NAL_PPS:
4024 status = parse_pps(decoder, unit);
4026 case GST_H264_NAL_SEI:
4027 status = parse_sei(decoder, unit);
4029 case GST_H264_NAL_SLICE_EXT:
4030 if (!GST_H264_IS_MVC_NALU(&pi->nalu)) {
4031 status = GST_VAAPI_DECODER_STATUS_SUCCESS;
4035 case GST_H264_NAL_SLICE_IDR:
4036 case GST_H264_NAL_SLICE:
4037 status = parse_slice(decoder, unit);
4040 status = GST_VAAPI_DECODER_STATUS_SUCCESS;
4043 if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
4048 flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_END |
4049 GST_VAAPI_DECODER_UNIT_FLAG_AU_END;
4051 switch (pi->nalu.type) {
4052 case GST_H264_NAL_AU_DELIMITER:
4053 flags |= GST_VAAPI_DECODER_UNIT_FLAG_AU_START;
4054 flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
4056 case GST_H264_NAL_FILLER_DATA:
4057 flags |= GST_VAAPI_DECODER_UNIT_FLAG_SKIP;
4059 case GST_H264_NAL_STREAM_END:
4060 flags |= GST_VAAPI_DECODER_UNIT_FLAG_STREAM_END;
4062 case GST_H264_NAL_SEQ_END:
4063 flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_END;
4064 flags |= GST_VAAPI_DECODER_UNIT_FLAG_AU_END;
4066 case GST_H264_NAL_SPS:
4067 case GST_H264_NAL_SUBSET_SPS:
4068 case GST_H264_NAL_PPS:
4069 case GST_H264_NAL_SEI:
4070 flags |= GST_VAAPI_DECODER_UNIT_FLAG_AU_START;
4071 flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
4073 case GST_H264_NAL_SLICE_EXT:
4074 if (!GST_H264_IS_MVC_NALU(&pi->nalu)) {
4075 flags |= GST_VAAPI_DECODER_UNIT_FLAG_SKIP;
4079 case GST_H264_NAL_SLICE_IDR:
4080 case GST_H264_NAL_SLICE:
4081 flags |= GST_VAAPI_DECODER_UNIT_FLAG_SLICE;
4082 if (priv->prev_pi &&
4083 (priv->prev_pi->flags & GST_VAAPI_DECODER_UNIT_FLAG_AU_END)) {
4084 flags |= GST_VAAPI_DECODER_UNIT_FLAG_AU_START |
4085 GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
4087 else if (is_new_picture(pi, priv->prev_slice_pi)) {
4088 flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
4089 if (is_new_access_unit(pi, priv->prev_slice_pi))
4090 flags |= GST_VAAPI_DECODER_UNIT_FLAG_AU_START;
4092 gst_vaapi_parser_info_h264_replace(&priv->prev_slice_pi, pi);
4094 case GST_H264_NAL_SPS_EXT:
4095 case GST_H264_NAL_SLICE_AUX:
4096 /* skip SPS extension and auxiliary slice for now */
4097 flags |= GST_VAAPI_DECODER_UNIT_FLAG_SKIP;
4099 case GST_H264_NAL_PREFIX_UNIT:
4100 /* skip Prefix NAL units for now */
4101 flags |= GST_VAAPI_DECODER_UNIT_FLAG_SKIP |
4102 GST_VAAPI_DECODER_UNIT_FLAG_AU_START |
4103 GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
4106 if (pi->nalu.type >= 14 && pi->nalu.type <= 18)
4107 flags |= GST_VAAPI_DECODER_UNIT_FLAG_AU_START |
4108 GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
4111 if ((flags & GST_VAAPI_DECODER_UNIT_FLAGS_AU) && priv->prev_slice_pi)
4112 priv->prev_slice_pi->flags |= GST_VAAPI_DECODER_UNIT_FLAG_AU_END;
4113 GST_VAAPI_DECODER_UNIT_FLAG_SET(unit, flags);
4115 pi->nalu.data = NULL;
4116 pi->state = priv->parser_state;
4118 gst_vaapi_parser_info_h264_replace(&priv->prev_pi, pi);
4119 return GST_VAAPI_DECODER_STATUS_SUCCESS;
4122 static GstVaapiDecoderStatus
4123 gst_vaapi_decoder_h264_decode(GstVaapiDecoder *base_decoder,
4124 GstVaapiDecoderUnit *unit)
4126 GstVaapiDecoderH264 * const decoder =
4127 GST_VAAPI_DECODER_H264_CAST(base_decoder);
4128 GstVaapiDecoderStatus status;
4130 status = ensure_decoder(decoder);
4131 if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
4133 return decode_unit(decoder, unit);
4136 static GstVaapiDecoderStatus
4137 gst_vaapi_decoder_h264_start_frame(GstVaapiDecoder *base_decoder,
4138 GstVaapiDecoderUnit *unit)
4140 GstVaapiDecoderH264 * const decoder =
4141 GST_VAAPI_DECODER_H264_CAST(base_decoder);
4143 return decode_picture(decoder, unit);
4146 static GstVaapiDecoderStatus
4147 gst_vaapi_decoder_h264_end_frame(GstVaapiDecoder *base_decoder)
4149 GstVaapiDecoderH264 * const decoder =
4150 GST_VAAPI_DECODER_H264_CAST(base_decoder);
4152 return decode_current_picture(decoder);
4155 static GstVaapiDecoderStatus
4156 gst_vaapi_decoder_h264_flush(GstVaapiDecoder *base_decoder)
4158 GstVaapiDecoderH264 * const decoder =
4159 GST_VAAPI_DECODER_H264_CAST(base_decoder);
4161 dpb_flush(decoder, NULL);
4162 return GST_VAAPI_DECODER_STATUS_SUCCESS;
4166 gst_vaapi_decoder_h264_class_init(GstVaapiDecoderH264Class *klass)
4168 GstVaapiMiniObjectClass * const object_class =
4169 GST_VAAPI_MINI_OBJECT_CLASS(klass);
4170 GstVaapiDecoderClass * const decoder_class = GST_VAAPI_DECODER_CLASS(klass);
4172 object_class->size = sizeof(GstVaapiDecoderH264);
4173 object_class->finalize = (GDestroyNotify)gst_vaapi_decoder_finalize;
4175 decoder_class->create = gst_vaapi_decoder_h264_create;
4176 decoder_class->destroy = gst_vaapi_decoder_h264_destroy;
4177 decoder_class->parse = gst_vaapi_decoder_h264_parse;
4178 decoder_class->decode = gst_vaapi_decoder_h264_decode;
4179 decoder_class->start_frame = gst_vaapi_decoder_h264_start_frame;
4180 decoder_class->end_frame = gst_vaapi_decoder_h264_end_frame;
4181 decoder_class->flush = gst_vaapi_decoder_h264_flush;
4183 decoder_class->decode_codec_data =
4184 gst_vaapi_decoder_h264_decode_codec_data;
4187 static inline const GstVaapiDecoderClass *
4188 gst_vaapi_decoder_h264_class(void)
4190 static GstVaapiDecoderH264Class g_class;
4191 static gsize g_class_init = FALSE;
4193 if (g_once_init_enter(&g_class_init)) {
4194 gst_vaapi_decoder_h264_class_init(&g_class);
4195 g_once_init_leave(&g_class_init, TRUE);
4197 return GST_VAAPI_DECODER_CLASS(&g_class);
4201 * gst_vaapi_decoder_h264_set_alignment:
4202 * @decoder: a #GstVaapiDecoderH264
4203 * @alignment: the #GstVaapiStreamAlignH264
4205 * Specifies how stream buffers are aligned / fed, i.e. the boundaries
4206 * of each buffer that is supplied to the decoder. This could be no
4207 * specific alignment, NAL unit boundaries, or access unit boundaries.
4210 gst_vaapi_decoder_h264_set_alignment(GstVaapiDecoderH264 *decoder,
4211 GstVaapiStreamAlignH264 alignment)
4213 g_return_if_fail(decoder != NULL);
4215 decoder->priv.stream_alignment = alignment;
4219 * gst_vaapi_decoder_h264_new:
4220 * @display: a #GstVaapiDisplay
4221 * @caps: a #GstCaps holding codec information
4223 * Creates a new #GstVaapiDecoder for MPEG-2 decoding. The @caps can
4224 * hold extra information like codec-data and pictured coded size.
4226 * Return value: the newly allocated #GstVaapiDecoder object
4229 gst_vaapi_decoder_h264_new(GstVaapiDisplay *display, GstCaps *caps)
4231 return gst_vaapi_decoder_new(gst_vaapi_decoder_h264_class(), display, caps);