2 * gstvaapidecoder_h264.c - H.264 decoder
4 * Copyright (C) 2011-2014 Intel Corporation
5 * Author: Gwenole Beauchesne <gwenole.beauchesne@intel.com>
7 * This library is free software; you can redistribute it and/or
8 * modify it under the terms of the GNU Lesser General Public License
9 * as published by the Free Software Foundation; either version 2.1
10 * of the License, or (at your option) any later version.
12 * This library is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 * Lesser General Public License for more details.
17 * You should have received a copy of the GNU Lesser General Public
18 * License along with this library; if not, write to the Free
19 * Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
20 * Boston, MA 02110-1301 USA
24 * SECTION:gstvaapidecoder_h264
25 * @short_description: H.264 decoder
30 #include <gst/base/gstadapter.h>
31 #include <gst/codecparsers/gsth264parser.h>
32 #include "gstvaapidecoder_h264.h"
33 #include "gstvaapidecoder_objects.h"
34 #include "gstvaapidecoder_priv.h"
35 #include "gstvaapidisplay_priv.h"
36 #include "gstvaapiobject_priv.h"
37 #include "gstvaapiutils_h264_priv.h"
40 #include "gstvaapidebug.h"
42 /* Defined to 1 if strict ordering of DPB is needed. Only useful for debug */
43 #define USE_STRICT_DPB_ORDERING 0
45 typedef struct _GstVaapiDecoderH264Private GstVaapiDecoderH264Private;
46 typedef struct _GstVaapiDecoderH264Class GstVaapiDecoderH264Class;
47 typedef struct _GstVaapiFrameStore GstVaapiFrameStore;
48 typedef struct _GstVaapiFrameStoreClass GstVaapiFrameStoreClass;
49 typedef struct _GstVaapiParserInfoH264 GstVaapiParserInfoH264;
50 typedef struct _GstVaapiPictureH264 GstVaapiPictureH264;
52 // Used for field_poc[]
54 #define BOTTOM_FIELD 1
56 /* ------------------------------------------------------------------------- */
57 /* --- H.264 Parser Info --- */
58 /* ------------------------------------------------------------------------- */
61 * Extended decoder unit flags:
63 * @GST_VAAPI_DECODER_UNIT_AU_START: marks the start of an access unit.
64 * @GST_VAAPI_DECODER_UNIT_AU_END: marks the end of an access unit.
67 /* This flag does not strictly follow the definitions (7.4.1.2.3)
68 for detecting the start of an access unit as we are only
69 interested in knowing if the current slice is the first one or
70 the last one in the current access unit */
71 GST_VAAPI_DECODER_UNIT_FLAG_AU_START = (
72 GST_VAAPI_DECODER_UNIT_FLAG_LAST << 0),
73 GST_VAAPI_DECODER_UNIT_FLAG_AU_END = (
74 GST_VAAPI_DECODER_UNIT_FLAG_LAST << 1),
76 GST_VAAPI_DECODER_UNIT_FLAGS_AU = (
77 GST_VAAPI_DECODER_UNIT_FLAG_AU_START |
78 GST_VAAPI_DECODER_UNIT_FLAG_AU_END),
81 #define GST_VAAPI_PARSER_INFO_H264(obj) \
82 ((GstVaapiParserInfoH264 *)(obj))
84 struct _GstVaapiParserInfoH264 {
85 GstVaapiMiniObject parent_instance;
91 GstH264SliceHdr slice_hdr;
94 guint flags; // Same as decoder unit flags (persistent)
95 guint view_id; // View ID of slice
96 guint voc; // View order index (VOIdx) of slice
100 gst_vaapi_parser_info_h264_finalize(GstVaapiParserInfoH264 *pi)
102 switch (pi->nalu.type) {
103 case GST_H264_NAL_SPS:
104 case GST_H264_NAL_SUBSET_SPS:
105 gst_h264_sps_clear(&pi->data.sps);
107 case GST_H264_NAL_SEI:
109 g_array_unref(pi->data.sei);
116 static inline const GstVaapiMiniObjectClass *
117 gst_vaapi_parser_info_h264_class(void)
119 static const GstVaapiMiniObjectClass GstVaapiParserInfoH264Class = {
120 .size = sizeof(GstVaapiParserInfoH264),
121 .finalize = (GDestroyNotify)gst_vaapi_parser_info_h264_finalize
123 return &GstVaapiParserInfoH264Class;
126 static inline GstVaapiParserInfoH264 *
127 gst_vaapi_parser_info_h264_new(void)
129 return (GstVaapiParserInfoH264 *)
130 gst_vaapi_mini_object_new(gst_vaapi_parser_info_h264_class());
133 #define gst_vaapi_parser_info_h264_ref(pi) \
134 gst_vaapi_mini_object_ref(GST_VAAPI_MINI_OBJECT(pi))
136 #define gst_vaapi_parser_info_h264_unref(pi) \
137 gst_vaapi_mini_object_unref(GST_VAAPI_MINI_OBJECT(pi))
139 #define gst_vaapi_parser_info_h264_replace(old_pi_ptr, new_pi) \
140 gst_vaapi_mini_object_replace((GstVaapiMiniObject **)(old_pi_ptr), \
141 (GstVaapiMiniObject *)(new_pi))
143 /* ------------------------------------------------------------------------- */
144 /* --- H.264 Pictures --- */
145 /* ------------------------------------------------------------------------- */
148 * Extended picture flags:
150 * @GST_VAAPI_PICTURE_FLAG_IDR: flag that specifies an IDR picture
151 * @GST_VAAPI_PICTURE_FLAG_INTER_VIEW: flag that indicates the picture
152 * may be used for inter-view prediction
153 * @GST_VAAPI_PICTURE_FLAG_ANCHOR: flag that specifies an anchor picture,
154 * i.e. a picture that is decoded with only inter-view prediction,
155 * and not inter prediction
156 * @GST_VAAPI_PICTURE_FLAG_AU_START: flag that marks the start of an
158 * @GST_VAAPI_PICTURE_FLAG_AU_END: flag that marks the end of an
160 * @GST_VAAPI_PICTURE_FLAG_SHORT_TERM_REFERENCE: flag that specifies
161 * "used for short-term reference"
162 * @GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE: flag that specifies
163 * "used for long-term reference"
164 * @GST_VAAPI_PICTURE_FLAGS_REFERENCE: mask covering any kind of
165 * reference picture (short-term reference or long-term reference)
168 GST_VAAPI_PICTURE_FLAG_IDR = (GST_VAAPI_PICTURE_FLAG_LAST << 0),
169 GST_VAAPI_PICTURE_FLAG_REFERENCE2 = (GST_VAAPI_PICTURE_FLAG_LAST << 1),
170 GST_VAAPI_PICTURE_FLAG_INTER_VIEW = (GST_VAAPI_PICTURE_FLAG_LAST << 2),
171 GST_VAAPI_PICTURE_FLAG_ANCHOR = (GST_VAAPI_PICTURE_FLAG_LAST << 3),
172 GST_VAAPI_PICTURE_FLAG_AU_START = (GST_VAAPI_PICTURE_FLAG_LAST << 4),
173 GST_VAAPI_PICTURE_FLAG_AU_END = (GST_VAAPI_PICTURE_FLAG_LAST << 5),
175 GST_VAAPI_PICTURE_FLAG_SHORT_TERM_REFERENCE = (
176 GST_VAAPI_PICTURE_FLAG_REFERENCE),
177 GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE = (
178 GST_VAAPI_PICTURE_FLAG_REFERENCE | GST_VAAPI_PICTURE_FLAG_REFERENCE2),
179 GST_VAAPI_PICTURE_FLAGS_REFERENCE = (
180 GST_VAAPI_PICTURE_FLAG_SHORT_TERM_REFERENCE |
181 GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE),
184 #define GST_VAAPI_PICTURE_IS_IDR(picture) \
185 (GST_VAAPI_PICTURE_FLAG_IS_SET(picture, GST_VAAPI_PICTURE_FLAG_IDR))
187 #define GST_VAAPI_PICTURE_IS_SHORT_TERM_REFERENCE(picture) \
188 ((GST_VAAPI_PICTURE_FLAGS(picture) & \
189 GST_VAAPI_PICTURE_FLAGS_REFERENCE) == \
190 GST_VAAPI_PICTURE_FLAG_SHORT_TERM_REFERENCE)
192 #define GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(picture) \
193 ((GST_VAAPI_PICTURE_FLAGS(picture) & \
194 GST_VAAPI_PICTURE_FLAGS_REFERENCE) == \
195 GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE)
197 #define GST_VAAPI_PICTURE_IS_INTER_VIEW(picture) \
198 (GST_VAAPI_PICTURE_FLAG_IS_SET(picture, GST_VAAPI_PICTURE_FLAG_INTER_VIEW))
200 #define GST_VAAPI_PICTURE_IS_ANCHOR(picture) \
201 (GST_VAAPI_PICTURE_FLAG_IS_SET(picture, GST_VAAPI_PICTURE_FLAG_ANCHOR))
203 #define GST_VAAPI_PICTURE_H264(picture) \
204 ((GstVaapiPictureH264 *)(picture))
206 struct _GstVaapiPictureH264 {
207 GstVaapiPicture base;
208 GstH264SliceHdr *last_slice_hdr;
211 gint32 frame_num; // Original frame_num from slice_header()
212 gint32 frame_num_wrap; // Temporary for ref pic marking: FrameNumWrap
213 gint32 long_term_frame_idx; // Temporary for ref pic marking: LongTermFrameIdx
214 gint32 pic_num; // Temporary for ref pic marking: PicNum
215 gint32 long_term_pic_num; // Temporary for ref pic marking: LongTermPicNum
216 GstVaapiPictureH264 *other_field; // Temporary for ref pic marking: other field in the same frame store
217 guint output_flag : 1;
218 guint output_needed : 1;
221 GST_VAAPI_CODEC_DEFINE_TYPE(GstVaapiPictureH264, gst_vaapi_picture_h264);
224 gst_vaapi_picture_h264_destroy(GstVaapiPictureH264 *picture)
226 gst_vaapi_picture_destroy(GST_VAAPI_PICTURE(picture));
230 gst_vaapi_picture_h264_create(
231 GstVaapiPictureH264 *picture,
232 const GstVaapiCodecObjectConstructorArgs *args
235 if (!gst_vaapi_picture_create(GST_VAAPI_PICTURE(picture), args))
238 picture->field_poc[0] = G_MAXINT32;
239 picture->field_poc[1] = G_MAXINT32;
240 picture->output_needed = FALSE;
244 static inline GstVaapiPictureH264 *
245 gst_vaapi_picture_h264_new(GstVaapiDecoderH264 *decoder)
247 return (GstVaapiPictureH264 *)gst_vaapi_codec_object_new(
248 &GstVaapiPictureH264Class,
249 GST_VAAPI_CODEC_BASE(decoder),
250 NULL, sizeof(VAPictureParameterBufferH264),
256 gst_vaapi_picture_h264_set_reference(
257 GstVaapiPictureH264 *picture,
258 guint reference_flags,
264 GST_VAAPI_PICTURE_FLAG_UNSET(picture, GST_VAAPI_PICTURE_FLAGS_REFERENCE);
265 GST_VAAPI_PICTURE_FLAG_SET(picture, reference_flags);
267 if (!other_field || !(picture = picture->other_field))
269 GST_VAAPI_PICTURE_FLAG_UNSET(picture, GST_VAAPI_PICTURE_FLAGS_REFERENCE);
270 GST_VAAPI_PICTURE_FLAG_SET(picture, reference_flags);
273 static inline GstVaapiPictureH264 *
274 gst_vaapi_picture_h264_new_field(GstVaapiPictureH264 *picture)
276 g_return_val_if_fail(picture, NULL);
278 return (GstVaapiPictureH264 *)gst_vaapi_picture_new_field(&picture->base);
281 /* ------------------------------------------------------------------------- */
282 /* --- Frame Buffers (DPB) --- */
283 /* ------------------------------------------------------------------------- */
285 struct _GstVaapiFrameStore {
287 GstVaapiMiniObject parent_instance;
291 GstVaapiPictureH264 *buffers[2];
297 gst_vaapi_frame_store_finalize(gpointer object)
299 GstVaapiFrameStore * const fs = object;
302 for (i = 0; i < fs->num_buffers; i++)
303 gst_vaapi_picture_replace(&fs->buffers[i], NULL);
306 static GstVaapiFrameStore *
307 gst_vaapi_frame_store_new(GstVaapiPictureH264 *picture)
309 GstVaapiFrameStore *fs;
311 static const GstVaapiMiniObjectClass GstVaapiFrameStoreClass = {
312 sizeof(GstVaapiFrameStore),
313 gst_vaapi_frame_store_finalize
316 fs = (GstVaapiFrameStore *)
317 gst_vaapi_mini_object_new(&GstVaapiFrameStoreClass);
321 fs->view_id = picture->base.view_id;
322 fs->structure = picture->structure;
323 fs->buffers[0] = gst_vaapi_picture_ref(picture);
324 fs->buffers[1] = NULL;
326 fs->output_needed = picture->output_needed;
331 gst_vaapi_frame_store_add(GstVaapiFrameStore *fs, GstVaapiPictureH264 *picture)
335 g_return_val_if_fail(fs->num_buffers == 1, FALSE);
336 g_return_val_if_fail(!GST_VAAPI_PICTURE_IS_FRAME(picture), FALSE);
337 g_return_val_if_fail(!GST_VAAPI_PICTURE_IS_FIRST_FIELD(picture), FALSE);
339 gst_vaapi_picture_replace(&fs->buffers[fs->num_buffers++], picture);
340 if (picture->output_flag) {
341 picture->output_needed = TRUE;
345 fs->structure = GST_VAAPI_PICTURE_STRUCTURE_FRAME;
347 field = picture->structure == GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD ?
348 TOP_FIELD : BOTTOM_FIELD;
349 g_return_val_if_fail(fs->buffers[0]->field_poc[field] == G_MAXINT32, FALSE);
350 fs->buffers[0]->field_poc[field] = picture->field_poc[field];
351 g_return_val_if_fail(picture->field_poc[!field] == G_MAXINT32, FALSE);
352 picture->field_poc[!field] = fs->buffers[0]->field_poc[!field];
357 gst_vaapi_frame_store_split_fields(GstVaapiFrameStore *fs)
359 GstVaapiPictureH264 * const first_field = fs->buffers[0];
360 GstVaapiPictureH264 *second_field;
362 g_return_val_if_fail(fs->num_buffers == 1, FALSE);
364 first_field->base.structure = GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD;
365 GST_VAAPI_PICTURE_FLAG_SET(first_field, GST_VAAPI_PICTURE_FLAG_INTERLACED);
367 second_field = gst_vaapi_picture_h264_new_field(first_field);
370 gst_vaapi_picture_replace(&fs->buffers[fs->num_buffers++], second_field);
371 gst_vaapi_picture_unref(second_field);
373 second_field->frame_num = first_field->frame_num;
374 second_field->field_poc[0] = first_field->field_poc[0];
375 second_field->field_poc[1] = first_field->field_poc[1];
376 second_field->output_flag = first_field->output_flag;
377 if (second_field->output_flag) {
378 second_field->output_needed = TRUE;
384 static inline gboolean
385 gst_vaapi_frame_store_has_frame(GstVaapiFrameStore *fs)
387 return fs->structure == GST_VAAPI_PICTURE_STRUCTURE_FRAME;
390 static inline gboolean
391 gst_vaapi_frame_store_has_reference(GstVaapiFrameStore *fs)
395 for (i = 0; i < fs->num_buffers; i++) {
396 if (GST_VAAPI_PICTURE_IS_REFERENCE(fs->buffers[i]))
403 gst_vaapi_frame_store_has_inter_view(GstVaapiFrameStore *fs)
407 for (i = 0; i < fs->num_buffers; i++) {
408 if (GST_VAAPI_PICTURE_IS_INTER_VIEW(fs->buffers[i]))
414 #define gst_vaapi_frame_store_ref(fs) \
415 gst_vaapi_mini_object_ref(GST_VAAPI_MINI_OBJECT(fs))
417 #define gst_vaapi_frame_store_unref(fs) \
418 gst_vaapi_mini_object_unref(GST_VAAPI_MINI_OBJECT(fs))
420 #define gst_vaapi_frame_store_replace(old_fs_p, new_fs) \
421 gst_vaapi_mini_object_replace((GstVaapiMiniObject **)(old_fs_p), \
422 (GstVaapiMiniObject *)(new_fs))
424 /* ------------------------------------------------------------------------- */
425 /* --- H.264 Decoder --- */
426 /* ------------------------------------------------------------------------- */
428 #define GST_VAAPI_DECODER_H264_CAST(decoder) \
429 ((GstVaapiDecoderH264 *)(decoder))
432 GST_H264_VIDEO_STATE_GOT_SPS = 1 << 0,
433 GST_H264_VIDEO_STATE_GOT_PPS = 1 << 1,
434 GST_H264_VIDEO_STATE_GOT_SLICE = 1 << 2,
436 GST_H264_VIDEO_STATE_VALID_PICTURE_HEADERS = (
437 GST_H264_VIDEO_STATE_GOT_SPS |
438 GST_H264_VIDEO_STATE_GOT_PPS),
439 GST_H264_VIDEO_STATE_VALID_PICTURE = (
440 GST_H264_VIDEO_STATE_VALID_PICTURE_HEADERS |
441 GST_H264_VIDEO_STATE_GOT_SLICE)
444 struct _GstVaapiDecoderH264Private {
445 GstH264NalParser *parser;
448 GstVaapiStreamAlignH264 stream_alignment;
449 GstVaapiPictureH264 *current_picture;
450 GstVaapiParserInfoH264 *sps[GST_H264_MAX_SPS_COUNT];
451 GstVaapiParserInfoH264 *active_sps;
452 GstVaapiParserInfoH264 *pps[GST_H264_MAX_PPS_COUNT];
453 GstVaapiParserInfoH264 *active_pps;
454 GstVaapiParserInfoH264 *prev_pi;
455 GstVaapiParserInfoH264 *prev_slice_pi;
456 GstVaapiFrameStore **prev_frames;
457 guint prev_frames_alloc;
458 GstVaapiFrameStore **dpb;
463 GstVaapiProfile profile;
464 GstVaapiEntrypoint entrypoint;
465 GstVaapiChromaType chroma_type;
466 GPtrArray *inter_views;
467 GstVaapiPictureH264 *short_ref[32];
468 guint short_ref_count;
469 GstVaapiPictureH264 *long_ref[32];
470 guint long_ref_count;
471 GstVaapiPictureH264 *RefPicList0[32];
472 guint RefPicList0_count;
473 GstVaapiPictureH264 *RefPicList1[32];
474 guint RefPicList1_count;
475 guint nal_length_size;
478 gint32 field_poc[2]; // 0:TopFieldOrderCnt / 1:BottomFieldOrderCnt
479 gint32 poc_msb; // PicOrderCntMsb
480 gint32 poc_lsb; // pic_order_cnt_lsb (from slice_header())
481 gint32 prev_poc_msb; // prevPicOrderCntMsb
482 gint32 prev_poc_lsb; // prevPicOrderCntLsb
483 gint32 frame_num_offset; // FrameNumOffset
484 gint32 frame_num; // frame_num (from slice_header())
485 gint32 prev_frame_num; // prevFrameNum
486 gboolean prev_pic_has_mmco5; // prevMmco5Pic
487 gboolean prev_pic_structure; // previous picture structure
490 guint has_context : 1;
491 guint progressive_sequence : 1;
495 * GstVaapiDecoderH264:
497 * A decoder based on H264.
499 struct _GstVaapiDecoderH264 {
501 GstVaapiDecoder parent_instance;
502 GstVaapiDecoderH264Private priv;
506 * GstVaapiDecoderH264Class:
508 * A decoder class based on H264.
510 struct _GstVaapiDecoderH264Class {
512 GstVaapiDecoderClass parent_class;
516 exec_ref_pic_marking(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture);
519 is_inter_view_reference_for_next_pictures(GstVaapiDecoderH264 *decoder,
520 GstVaapiPictureH264 *picture);
522 static inline gboolean
523 is_inter_view_reference_for_next_frames(GstVaapiDecoderH264 *decoder,
524 GstVaapiFrameStore *fs)
526 return is_inter_view_reference_for_next_pictures(decoder, fs->buffers[0]);
529 /* Determines if the supplied profile is one of the MVC set */
531 is_mvc_profile(GstH264Profile profile)
533 return profile == GST_H264_PROFILE_MULTIVIEW_HIGH ||
534 profile == GST_H264_PROFILE_STEREO_HIGH;
537 /* Determines the view_id from the supplied NAL unit */
539 get_view_id(GstH264NalUnit *nalu)
541 return GST_H264_IS_MVC_NALU(nalu) ? nalu->extension.mvc.view_id : 0;
544 /* Determines the view order index (VOIdx) from the supplied view_id */
546 get_view_order_index(GstH264SPS *sps, guint16 view_id)
548 GstH264SPSExtMVC *mvc;
551 if (!sps || sps->extension_type != GST_H264_NAL_EXTENSION_MVC)
554 mvc = &sps->extension.mvc;
555 for (i = 0; i <= mvc->num_views_minus1; i++) {
556 if (mvc->view[i].view_id == view_id)
559 GST_ERROR("failed to find VOIdx from view_id (%d)", view_id);
563 /* Determines NumViews */
565 get_num_views(GstH264SPS *sps)
567 return 1 + (sps->extension_type == GST_H264_NAL_EXTENSION_MVC ?
568 sps->extension.mvc.num_views_minus1 : 0);
571 /* Get number of reference frames to use */
573 get_max_dec_frame_buffering(GstH264SPS *sps)
575 guint num_views, max_dpb_frames;
576 guint max_dec_frame_buffering, PicSizeMbs;
577 GstVaapiLevelH264 level;
578 const GstVaapiH264LevelLimits *level_limits;
580 /* Table A-1 - Level limits */
581 if (G_UNLIKELY(sps->level_idc == 11 && sps->constraint_set3_flag))
582 level = GST_VAAPI_LEVEL_H264_L1b;
584 level = gst_vaapi_utils_h264_get_level(sps->level_idc);
585 level_limits = gst_vaapi_utils_h264_get_level_limits(level);
586 if (G_UNLIKELY(!level_limits)) {
587 GST_FIXME("unsupported level_idc value (%d)", sps->level_idc);
588 max_dec_frame_buffering = 16;
591 PicSizeMbs = ((sps->pic_width_in_mbs_minus1 + 1) *
592 (sps->pic_height_in_map_units_minus1 + 1) *
593 (sps->frame_mbs_only_flag ? 1 : 2));
594 max_dec_frame_buffering = level_limits->MaxDpbMbs / PicSizeMbs;
596 if (is_mvc_profile(sps->profile_idc))
597 max_dec_frame_buffering <<= 1;
600 if (sps->vui_parameters_present_flag) {
601 GstH264VUIParams * const vui_params = &sps->vui_parameters;
602 if (vui_params->bitstream_restriction_flag)
603 max_dec_frame_buffering = vui_params->max_dec_frame_buffering;
605 switch (sps->profile_idc) {
606 case 44: // CAVLC 4:4:4 Intra profile
607 case GST_H264_PROFILE_SCALABLE_HIGH:
608 case GST_H264_PROFILE_HIGH:
609 case GST_H264_PROFILE_HIGH10:
610 case GST_H264_PROFILE_HIGH_422:
611 case GST_H264_PROFILE_HIGH_444:
612 if (sps->constraint_set3_flag)
613 max_dec_frame_buffering = 0;
619 num_views = get_num_views(sps);
620 max_dpb_frames = 16 * (num_views > 1 ? g_bit_storage(num_views - 1) : 1);
621 if (max_dec_frame_buffering > max_dpb_frames)
622 max_dec_frame_buffering = max_dpb_frames;
623 else if (max_dec_frame_buffering < sps->num_ref_frames)
624 max_dec_frame_buffering = sps->num_ref_frames;
625 return MAX(1, max_dec_frame_buffering);
629 array_remove_index_fast(void *array, guint *array_length_ptr, guint index)
631 gpointer * const entries = array;
632 guint num_entries = *array_length_ptr;
634 g_return_if_fail(index < num_entries);
636 if (index != --num_entries)
637 entries[index] = entries[num_entries];
638 entries[num_entries] = NULL;
639 *array_length_ptr = num_entries;
644 array_remove_index(void *array, guint *array_length_ptr, guint index)
646 array_remove_index_fast(array, array_length_ptr, index);
650 array_remove_index(void *array, guint *array_length_ptr, guint index)
652 gpointer * const entries = array;
653 const guint num_entries = *array_length_ptr - 1;
656 g_return_if_fail(index <= num_entries);
658 for (i = index; i < num_entries; i++)
659 entries[i] = entries[i + 1];
660 entries[num_entries] = NULL;
661 *array_length_ptr = num_entries;
665 #define ARRAY_REMOVE_INDEX(array, index) \
666 array_remove_index(array, &array##_count, index)
669 dpb_remove_index(GstVaapiDecoderH264 *decoder, guint index)
671 GstVaapiDecoderH264Private * const priv = &decoder->priv;
672 guint i, num_frames = --priv->dpb_count;
674 if (USE_STRICT_DPB_ORDERING) {
675 for (i = index; i < num_frames; i++)
676 gst_vaapi_frame_store_replace(&priv->dpb[i], priv->dpb[i + 1]);
678 else if (index != num_frames)
679 gst_vaapi_frame_store_replace(&priv->dpb[index], priv->dpb[num_frames]);
680 gst_vaapi_frame_store_replace(&priv->dpb[num_frames], NULL);
685 GstVaapiDecoderH264 *decoder,
686 GstVaapiFrameStore *fs,
687 GstVaapiPictureH264 *picture
690 picture->output_needed = FALSE;
692 if (--fs->output_needed > 0)
695 if (!GST_VAAPI_PICTURE_IS_COMPLETE(picture))
697 return gst_vaapi_picture_output(GST_VAAPI_PICTURE_CAST(picture));
701 dpb_evict(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture, guint i)
703 GstVaapiDecoderH264Private * const priv = &decoder->priv;
704 GstVaapiFrameStore * const fs = priv->dpb[i];
706 if (!fs->output_needed && !gst_vaapi_frame_store_has_reference(fs))
707 dpb_remove_index(decoder, i);
710 /* Finds the frame store holding the supplied picture */
712 dpb_find_picture(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
714 GstVaapiDecoderH264Private * const priv = &decoder->priv;
717 for (i = 0; i < priv->dpb_count; i++) {
718 GstVaapiFrameStore * const fs = priv->dpb[i];
719 for (j = 0; j < fs->num_buffers; j++) {
720 if (fs->buffers[j] == picture)
727 /* Finds the picture with the lowest POC that needs to be output */
729 dpb_find_lowest_poc(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture,
730 GstVaapiPictureH264 **found_picture_ptr)
732 GstVaapiDecoderH264Private * const priv = &decoder->priv;
733 GstVaapiPictureH264 *found_picture = NULL;
734 guint i, j, found_index;
736 for (i = 0; i < priv->dpb_count; i++) {
737 GstVaapiFrameStore * const fs = priv->dpb[i];
738 if (!fs->output_needed)
740 if (picture && picture->base.view_id != fs->view_id)
742 for (j = 0; j < fs->num_buffers; j++) {
743 GstVaapiPictureH264 * const pic = fs->buffers[j];
744 if (!pic->output_needed)
746 if (!found_picture || found_picture->base.poc > pic->base.poc ||
747 (found_picture->base.poc == pic->base.poc &&
748 found_picture->base.voc > pic->base.voc))
749 found_picture = pic, found_index = i;
753 if (found_picture_ptr)
754 *found_picture_ptr = found_picture;
755 return found_picture ? found_index : -1;
758 /* Finds the picture with the lowest VOC that needs to be output */
760 dpb_find_lowest_voc(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture,
761 GstVaapiPictureH264 **found_picture_ptr)
763 GstVaapiDecoderH264Private * const priv = &decoder->priv;
764 GstVaapiPictureH264 *found_picture = NULL;
765 guint i, j, found_index;
767 for (i = 0; i < priv->dpb_count; i++) {
768 GstVaapiFrameStore * const fs = priv->dpb[i];
769 if (!fs->output_needed || fs->view_id == picture->base.view_id)
771 for (j = 0; j < fs->num_buffers; j++) {
772 GstVaapiPictureH264 * const pic = fs->buffers[j];
773 if (!pic->output_needed || pic->base.poc != picture->base.poc)
775 if (!found_picture || found_picture->base.voc > pic->base.voc)
776 found_picture = pic, found_index = i;
780 if (found_picture_ptr)
781 *found_picture_ptr = found_picture;
782 return found_picture ? found_index : -1;
786 dpb_output_other_views(GstVaapiDecoderH264 *decoder,
787 GstVaapiPictureH264 *picture, guint voc)
789 GstVaapiDecoderH264Private * const priv = &decoder->priv;
790 GstVaapiPictureH264 *found_picture;
794 if (priv->max_views == 1)
797 /* Emit all other view components that were in the same access
798 unit than the picture we have just found */
799 found_picture = picture;
801 found_index = dpb_find_lowest_voc(decoder, found_picture,
803 if (found_index < 0 || found_picture->base.voc >= voc)
805 success = dpb_output(decoder, priv->dpb[found_index], found_picture);
806 dpb_evict(decoder, found_picture, found_index);
814 dpb_bump(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
816 GstVaapiDecoderH264Private * const priv = &decoder->priv;
817 GstVaapiPictureH264 *found_picture;
821 found_index = dpb_find_lowest_poc(decoder, picture, &found_picture);
825 if (picture && picture->base.poc != found_picture->base.poc)
826 dpb_output_other_views(decoder, found_picture, found_picture->base.voc);
828 success = dpb_output(decoder, priv->dpb[found_index], found_picture);
829 dpb_evict(decoder, found_picture, found_index);
830 if (priv->max_views == 1)
833 if (picture && picture->base.poc != found_picture->base.poc)
834 dpb_output_other_views(decoder, found_picture, G_MAXUINT32);
839 dpb_clear(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
841 GstVaapiDecoderH264Private * const priv = &decoder->priv;
844 for (i = 0; i < priv->dpb_count; i++) {
845 if (picture && picture->base.view_id != priv->dpb[i]->view_id)
847 gst_vaapi_frame_store_replace(&priv->dpb[i], NULL);
850 /* Compact the resulting DPB, i.e. remove holes */
851 for (i = 0, n = 0; i < priv->dpb_count; i++) {
854 priv->dpb[n] = priv->dpb[i];
862 /* Clear previous frame buffers only if this is a "flush-all" operation,
863 or if the picture is the first one in the access unit */
864 if (priv->prev_frames && (!picture ||
865 GST_VAAPI_PICTURE_FLAG_IS_SET(picture,
866 GST_VAAPI_PICTURE_FLAG_AU_START))) {
867 for (i = 0; i < priv->max_views; i++)
868 gst_vaapi_frame_store_replace(&priv->prev_frames[i], NULL);
873 dpb_flush(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
875 while (dpb_bump(decoder, picture))
877 dpb_clear(decoder, picture);
881 dpb_prune_mvc(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
883 GstVaapiDecoderH264Private * const priv = &decoder->priv;
884 const gboolean is_last_picture = /* in the access unit */
885 GST_VAAPI_PICTURE_FLAG_IS_SET(picture, GST_VAAPI_PICTURE_FLAG_AU_END);
888 // Remove all unused inter-view only reference components of the current AU
890 while (i < priv->dpb_count) {
891 GstVaapiFrameStore * const fs = priv->dpb[i];
892 if (fs->view_id != picture->base.view_id &&
893 !fs->output_needed && !gst_vaapi_frame_store_has_reference(fs) &&
895 !is_inter_view_reference_for_next_frames(decoder, fs)))
896 dpb_remove_index(decoder, i);
903 dpb_add(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
905 GstVaapiDecoderH264Private * const priv = &decoder->priv;
906 GstVaapiFrameStore *fs;
909 if (priv->max_views > 1)
910 dpb_prune_mvc(decoder, picture);
912 // Remove all unused pictures
913 if (!GST_VAAPI_PICTURE_IS_IDR(picture)) {
915 while (i < priv->dpb_count) {
916 GstVaapiFrameStore * const fs = priv->dpb[i];
917 if (fs->view_id == picture->base.view_id &&
918 !fs->output_needed && !gst_vaapi_frame_store_has_reference(fs))
919 dpb_remove_index(decoder, i);
925 // Check if picture is the second field and the first field is still in DPB
926 if (GST_VAAPI_PICTURE_IS_INTERLACED(picture) &&
927 !GST_VAAPI_PICTURE_IS_FIRST_FIELD(picture)) {
928 const gint found_index = dpb_find_picture(decoder,
929 GST_VAAPI_PICTURE_H264(picture->base.parent_picture));
930 if (found_index >= 0)
931 return gst_vaapi_frame_store_add(priv->dpb[found_index], picture);
933 // ... also check the previous picture that was immediately output
934 fs = priv->prev_frames[picture->base.voc];
935 if (fs && &fs->buffers[0]->base == picture->base.parent_picture) {
936 if (!gst_vaapi_frame_store_add(fs, picture))
938 return dpb_output(decoder, fs, picture);
942 // Create new frame store, and split fields if necessary
943 fs = gst_vaapi_frame_store_new(picture);
946 gst_vaapi_frame_store_replace(&priv->prev_frames[picture->base.voc], fs);
947 gst_vaapi_frame_store_unref(fs);
949 if (picture->output_flag) {
950 picture->output_needed = TRUE;
954 if (!priv->progressive_sequence && gst_vaapi_frame_store_has_frame(fs)) {
955 if (!gst_vaapi_frame_store_split_fields(fs))
959 // C.4.5.1 - Storage and marking of a reference decoded picture into the DPB
960 if (GST_VAAPI_PICTURE_IS_REFERENCE(picture)) {
961 while (priv->dpb_count == priv->dpb_size) {
962 if (!dpb_bump(decoder, picture))
967 // C.4.5.2 - Storage and marking of a non-reference decoded picture into the DPB
969 const gboolean StoreInterViewOnlyRefFlag =
970 !GST_VAAPI_PICTURE_FLAG_IS_SET(picture,
971 GST_VAAPI_PICTURE_FLAG_AU_END) &&
972 GST_VAAPI_PICTURE_FLAG_IS_SET(picture,
973 GST_VAAPI_PICTURE_FLAG_INTER_VIEW);
974 if (!picture->output_flag && !StoreInterViewOnlyRefFlag)
976 while (priv->dpb_count == priv->dpb_size) {
977 GstVaapiPictureH264 *found_picture;
978 if (!StoreInterViewOnlyRefFlag) {
979 if (dpb_find_lowest_poc(decoder, picture, &found_picture) < 0 ||
980 found_picture->base.poc > picture->base.poc)
981 return dpb_output(decoder, fs, picture);
983 if (!dpb_bump(decoder, picture))
987 gst_vaapi_frame_store_replace(&priv->dpb[priv->dpb_count++], fs);
992 dpb_reset(GstVaapiDecoderH264 *decoder, guint dpb_size)
994 GstVaapiDecoderH264Private * const priv = &decoder->priv;
996 if (dpb_size > priv->dpb_size_max) {
997 priv->dpb = g_try_realloc_n(priv->dpb, dpb_size, sizeof(*priv->dpb));
1000 memset(&priv->dpb[priv->dpb_size_max], 0,
1001 (dpb_size - priv->dpb_size_max) * sizeof(*priv->dpb));
1002 priv->dpb_size_max = dpb_size;
1004 priv->dpb_size = dpb_size;
1006 GST_DEBUG("DPB size %u", priv->dpb_size);
1011 unref_inter_view(GstVaapiPictureH264 *picture)
1015 GST_VAAPI_PICTURE_FLAG_UNSET(picture, GST_VAAPI_PICTURE_FLAG_INTER_VIEW);
1016 gst_vaapi_picture_unref(picture);
1019 /* Resets MVC resources */
1021 mvc_reset(GstVaapiDecoderH264 *decoder)
1023 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1026 // Resize array of inter-view references
1027 if (!priv->inter_views) {
1028 priv->inter_views = g_ptr_array_new_full(priv->max_views,
1029 (GDestroyNotify)unref_inter_view);
1030 if (!priv->inter_views)
1034 // Resize array of previous frame buffers
1035 for (i = priv->max_views; i < priv->prev_frames_alloc; i++)
1036 gst_vaapi_frame_store_replace(&priv->prev_frames[i], NULL);
1038 priv->prev_frames = g_try_realloc_n(priv->prev_frames, priv->max_views,
1039 sizeof(*priv->prev_frames));
1040 if (!priv->prev_frames) {
1041 priv->prev_frames_alloc = 0;
1044 for (i = priv->prev_frames_alloc; i < priv->max_views; i++)
1045 priv->prev_frames[i] = NULL;
1046 priv->prev_frames_alloc = priv->max_views;
1050 static GstVaapiDecoderStatus
1051 get_status(GstH264ParserResult result)
1053 GstVaapiDecoderStatus status;
1056 case GST_H264_PARSER_OK:
1057 status = GST_VAAPI_DECODER_STATUS_SUCCESS;
1059 case GST_H264_PARSER_NO_NAL_END:
1060 status = GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
1062 case GST_H264_PARSER_ERROR:
1063 status = GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
1066 status = GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
1073 gst_vaapi_decoder_h264_close(GstVaapiDecoderH264 *decoder)
1075 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1077 gst_vaapi_picture_replace(&priv->current_picture, NULL);
1078 gst_vaapi_parser_info_h264_replace(&priv->prev_slice_pi, NULL);
1079 gst_vaapi_parser_info_h264_replace(&priv->prev_pi, NULL);
1081 dpb_clear(decoder, NULL);
1083 if (priv->inter_views) {
1084 g_ptr_array_unref(priv->inter_views);
1085 priv->inter_views = NULL;
1089 gst_h264_nal_parser_free(priv->parser);
1090 priv->parser = NULL;
1095 gst_vaapi_decoder_h264_open(GstVaapiDecoderH264 *decoder)
1097 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1099 gst_vaapi_decoder_h264_close(decoder);
1101 priv->parser = gst_h264_nal_parser_new();
1108 gst_vaapi_decoder_h264_destroy(GstVaapiDecoder *base_decoder)
1110 GstVaapiDecoderH264 * const decoder =
1111 GST_VAAPI_DECODER_H264_CAST(base_decoder);
1112 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1115 gst_vaapi_decoder_h264_close(decoder);
1121 g_free(priv->prev_frames);
1122 priv->prev_frames = NULL;
1123 priv->prev_frames_alloc = 0;
1125 for (i = 0; i < G_N_ELEMENTS(priv->pps); i++)
1126 gst_vaapi_parser_info_h264_replace(&priv->pps[i], NULL);
1127 gst_vaapi_parser_info_h264_replace(&priv->active_pps, NULL);
1129 for (i = 0; i < G_N_ELEMENTS(priv->sps); i++)
1130 gst_vaapi_parser_info_h264_replace(&priv->sps[i], NULL);
1131 gst_vaapi_parser_info_h264_replace(&priv->active_sps, NULL);
1135 gst_vaapi_decoder_h264_create(GstVaapiDecoder *base_decoder)
1137 GstVaapiDecoderH264 * const decoder =
1138 GST_VAAPI_DECODER_H264_CAST(base_decoder);
1139 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1141 priv->profile = GST_VAAPI_PROFILE_UNKNOWN;
1142 priv->entrypoint = GST_VAAPI_ENTRYPOINT_VLD;
1143 priv->chroma_type = GST_VAAPI_CHROMA_TYPE_YUV420;
1144 priv->prev_pic_structure = GST_VAAPI_PICTURE_STRUCTURE_FRAME;
1145 priv->progressive_sequence = TRUE;
1149 /* Activates the supplied PPS */
1151 ensure_pps(GstVaapiDecoderH264 *decoder, GstH264PPS *pps)
1153 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1154 GstVaapiParserInfoH264 * const pi = priv->pps[pps->id];
1156 gst_vaapi_parser_info_h264_replace(&priv->active_pps, pi);
1157 return pi ? &pi->data.pps : NULL;
1160 /* Returns the active PPS */
1161 static inline GstH264PPS *
1162 get_pps(GstVaapiDecoderH264 *decoder)
1164 GstVaapiParserInfoH264 * const pi = decoder->priv.active_pps;
1166 return pi ? &pi->data.pps : NULL;
1169 /* Activate the supplied SPS */
1171 ensure_sps(GstVaapiDecoderH264 *decoder, GstH264SPS *sps)
1173 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1174 GstVaapiParserInfoH264 * const pi = priv->sps[sps->id];
1176 gst_vaapi_parser_info_h264_replace(&priv->active_sps, pi);
1177 return pi ? &pi->data.sps : NULL;
1180 /* Returns the active SPS */
1181 static inline GstH264SPS *
1182 get_sps(GstVaapiDecoderH264 *decoder)
1184 GstVaapiParserInfoH264 * const pi = decoder->priv.active_sps;
1186 return pi ? &pi->data.sps : NULL;
1190 fill_profiles(GstVaapiProfile profiles[16], guint *n_profiles_ptr,
1191 GstVaapiProfile profile)
1193 guint n_profiles = *n_profiles_ptr;
1195 profiles[n_profiles++] = profile;
1197 case GST_VAAPI_PROFILE_H264_MAIN:
1198 profiles[n_profiles++] = GST_VAAPI_PROFILE_H264_HIGH;
1203 *n_profiles_ptr = n_profiles;
1206 /* Fills in compatible profiles for MVC decoding */
1208 fill_profiles_mvc(GstVaapiDecoderH264 *decoder, GstVaapiProfile profiles[16],
1209 guint *n_profiles_ptr, guint dpb_size)
1211 const gchar * const vendor_string =
1212 gst_vaapi_display_get_vendor_string(GST_VAAPI_DECODER_DISPLAY(decoder));
1214 gboolean add_high_profile = FALSE;
1219 const struct map *m;
1221 // Drivers that support slice level decoding
1222 if (vendor_string && dpb_size <= 16) {
1223 static const struct map drv_names[] = {
1224 { "Intel i965 driver", 17 },
1227 for (m = drv_names; m->str != NULL && !add_high_profile; m++) {
1228 if (g_ascii_strncasecmp(vendor_string, m->str, m->str_len) == 0)
1229 add_high_profile = TRUE;
1233 if (add_high_profile)
1234 fill_profiles(profiles, n_profiles_ptr, GST_VAAPI_PROFILE_H264_HIGH);
1237 static GstVaapiProfile
1238 get_profile(GstVaapiDecoderH264 *decoder, GstH264SPS *sps, guint dpb_size)
1240 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1241 GstVaapiDisplay * const display = GST_VAAPI_DECODER_DISPLAY(decoder);
1242 GstVaapiProfile profile, profiles[4];
1243 guint i, n_profiles = 0;
1245 profile = gst_vaapi_utils_h264_get_profile(sps->profile_idc);
1247 return GST_VAAPI_PROFILE_UNKNOWN;
1249 fill_profiles(profiles, &n_profiles, profile);
1251 case GST_VAAPI_PROFILE_H264_BASELINE:
1252 if (sps->constraint_set1_flag) { // A.2.2 (main profile)
1253 fill_profiles(profiles, &n_profiles,
1254 GST_VAAPI_PROFILE_H264_CONSTRAINED_BASELINE);
1255 fill_profiles(profiles, &n_profiles,
1256 GST_VAAPI_PROFILE_H264_MAIN);
1259 case GST_VAAPI_PROFILE_H264_EXTENDED:
1260 if (sps->constraint_set1_flag) { // A.2.2 (main profile)
1261 fill_profiles(profiles, &n_profiles,
1262 GST_VAAPI_PROFILE_H264_MAIN);
1265 case GST_VAAPI_PROFILE_H264_MULTIVIEW_HIGH:
1266 if (priv->max_views == 2) {
1267 fill_profiles(profiles, &n_profiles,
1268 GST_VAAPI_PROFILE_H264_STEREO_HIGH);
1270 fill_profiles_mvc(decoder, profiles, &n_profiles, dpb_size);
1272 case GST_VAAPI_PROFILE_H264_STEREO_HIGH:
1273 if (sps->frame_mbs_only_flag) {
1274 fill_profiles(profiles, &n_profiles,
1275 GST_VAAPI_PROFILE_H264_MULTIVIEW_HIGH);
1277 fill_profiles_mvc(decoder, profiles, &n_profiles, dpb_size);
1283 /* If the preferred profile (profiles[0]) matches one that we already
1284 found, then just return it now instead of searching for it again */
1285 if (profiles[0] == priv->profile)
1286 return priv->profile;
1288 for (i = 0; i < n_profiles; i++) {
1289 if (gst_vaapi_display_has_decoder(display, profiles[i], priv->entrypoint))
1292 return GST_VAAPI_PROFILE_UNKNOWN;
1295 static GstVaapiDecoderStatus
1296 ensure_context(GstVaapiDecoderH264 *decoder, GstH264SPS *sps)
1298 GstVaapiDecoder * const base_decoder = GST_VAAPI_DECODER_CAST(decoder);
1299 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1300 GstVaapiContextInfo info;
1301 GstVaapiProfile profile;
1302 GstVaapiChromaType chroma_type;
1303 gboolean reset_context = FALSE;
1304 guint mb_width, mb_height, dpb_size, num_views;
1306 num_views = get_num_views(sps);
1307 if (priv->max_views < num_views) {
1308 priv->max_views = num_views;
1309 GST_DEBUG("maximum number of views changed to %u", num_views);
1312 dpb_size = get_max_dec_frame_buffering(sps);
1313 if (priv->dpb_size < dpb_size) {
1314 GST_DEBUG("DPB size increased");
1315 reset_context = TRUE;
1318 profile = get_profile(decoder, sps, dpb_size);
1320 GST_ERROR("unsupported profile_idc %u", sps->profile_idc);
1321 return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_PROFILE;
1324 if (!priv->profile || (priv->profile != profile && priv->max_views == 1)) {
1325 GST_DEBUG("profile changed");
1326 reset_context = TRUE;
1327 priv->profile = profile;
1330 chroma_type = gst_vaapi_utils_h264_get_chroma_type(sps->chroma_format_idc);
1332 GST_ERROR("unsupported chroma_format_idc %u", sps->chroma_format_idc);
1333 return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_CHROMA_FORMAT;
1336 if (priv->chroma_type != chroma_type) {
1337 GST_DEBUG("chroma format changed");
1338 reset_context = TRUE;
1339 priv->chroma_type = chroma_type;
1342 mb_width = sps->pic_width_in_mbs_minus1 + 1;
1343 mb_height = (sps->pic_height_in_map_units_minus1 + 1) <<
1344 !sps->frame_mbs_only_flag;
1345 if (priv->mb_width != mb_width || priv->mb_height != mb_height) {
1346 GST_DEBUG("size changed");
1347 reset_context = TRUE;
1348 priv->mb_width = mb_width;
1349 priv->mb_height = mb_height;
1352 priv->progressive_sequence = sps->frame_mbs_only_flag;
1353 gst_vaapi_decoder_set_interlaced(base_decoder, !priv->progressive_sequence);
1355 gst_vaapi_decoder_set_pixel_aspect_ratio(
1357 sps->vui_parameters.par_n,
1358 sps->vui_parameters.par_d
1361 if (!reset_context && priv->has_context)
1362 return GST_VAAPI_DECODER_STATUS_SUCCESS;
1364 /* XXX: fix surface size when cropping is implemented */
1365 info.profile = priv->profile;
1366 info.entrypoint = priv->entrypoint;
1367 info.chroma_type = priv->chroma_type;
1368 info.width = sps->width;
1369 info.height = sps->height;
1370 info.ref_frames = dpb_size;
1372 if (!gst_vaapi_decoder_ensure_context(GST_VAAPI_DECODER(decoder), &info))
1373 return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
1374 priv->has_context = TRUE;
1377 if (!dpb_reset(decoder, dpb_size))
1378 return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
1380 /* Reset MVC data */
1381 if (!mvc_reset(decoder))
1382 return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
1383 return GST_VAAPI_DECODER_STATUS_SUCCESS;
1387 fill_iq_matrix_4x4(VAIQMatrixBufferH264 *iq_matrix, const GstH264PPS *pps,
1388 const GstH264SPS *sps)
1392 /* There are always 6 4x4 scaling lists */
1393 g_assert(G_N_ELEMENTS(iq_matrix->ScalingList4x4) == 6);
1394 g_assert(G_N_ELEMENTS(iq_matrix->ScalingList4x4[0]) == 16);
1396 for (i = 0; i < G_N_ELEMENTS(iq_matrix->ScalingList4x4); i++)
1397 gst_h264_quant_matrix_4x4_get_raster_from_zigzag(
1398 iq_matrix->ScalingList4x4[i], pps->scaling_lists_4x4[i]);
1402 fill_iq_matrix_8x8(VAIQMatrixBufferH264 *iq_matrix, const GstH264PPS *pps,
1403 const GstH264SPS *sps)
1407 /* If chroma_format_idc != 3, there are up to 2 8x8 scaling lists */
1408 if (!pps->transform_8x8_mode_flag)
1411 g_assert(G_N_ELEMENTS(iq_matrix->ScalingList8x8) >= 2);
1412 g_assert(G_N_ELEMENTS(iq_matrix->ScalingList8x8[0]) == 64);
1414 n = (sps->chroma_format_idc != 3) ? 2 : 6;
1415 for (i = 0; i < n; i++) {
1416 gst_h264_quant_matrix_8x8_get_raster_from_zigzag(
1417 iq_matrix->ScalingList8x8[i], pps->scaling_lists_8x8[i]);
1421 static GstVaapiDecoderStatus
1422 ensure_quant_matrix(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
1424 GstVaapiPicture * const base_picture = &picture->base;
1425 GstH264PPS * const pps = get_pps(decoder);
1426 GstH264SPS * const sps = get_sps(decoder);
1427 VAIQMatrixBufferH264 *iq_matrix;
1429 base_picture->iq_matrix = GST_VAAPI_IQ_MATRIX_NEW(H264, decoder);
1430 if (!base_picture->iq_matrix) {
1431 GST_ERROR("failed to allocate IQ matrix");
1432 return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
1434 iq_matrix = base_picture->iq_matrix->param;
1436 /* XXX: we can only support 4:2:0 or 4:2:2 since ScalingLists8x8[]
1437 is not large enough to hold lists for 4:4:4 */
1438 if (sps->chroma_format_idc == 3)
1439 return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_CHROMA_FORMAT;
1441 fill_iq_matrix_4x4(iq_matrix, pps, sps);
1442 fill_iq_matrix_8x8(iq_matrix, pps, sps);
1444 return GST_VAAPI_DECODER_STATUS_SUCCESS;
1447 static inline gboolean
1448 is_valid_state(guint state, guint ref_state)
1450 return (state & ref_state) == ref_state;
1453 static GstVaapiDecoderStatus
1454 decode_current_picture(GstVaapiDecoderH264 *decoder)
1456 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1457 GstVaapiPictureH264 * const picture = priv->current_picture;
1459 if (!is_valid_state(priv->decoder_state, GST_H264_VIDEO_STATE_VALID_PICTURE))
1461 priv->decoder_state = 0;
1464 return GST_VAAPI_DECODER_STATUS_SUCCESS;
1466 if (!exec_ref_pic_marking(decoder, picture))
1468 if (!dpb_add(decoder, picture))
1470 if (!gst_vaapi_picture_decode(GST_VAAPI_PICTURE_CAST(picture)))
1472 gst_vaapi_picture_replace(&priv->current_picture, NULL);
1473 return GST_VAAPI_DECODER_STATUS_SUCCESS;
1476 /* XXX: fix for cases where first field failed to be decoded */
1477 gst_vaapi_picture_replace(&priv->current_picture, NULL);
1478 return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
1481 priv->decoder_state = 0;
1482 return GST_VAAPI_DECODER_STATUS_DROP_FRAME;
1485 static GstVaapiDecoderStatus
1486 parse_sps(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
1488 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1489 GstVaapiParserInfoH264 * const pi = unit->parsed_info;
1490 GstH264SPS * const sps = &pi->data.sps;
1491 GstH264ParserResult result;
1493 GST_DEBUG("parse SPS");
1495 priv->parser_state = 0;
1497 /* Variables that don't have inferred values per the H.264
1498 standard but that should get a default value anyway */
1499 sps->log2_max_pic_order_cnt_lsb_minus4 = 0;
1501 result = gst_h264_parser_parse_sps(priv->parser, &pi->nalu, sps, TRUE);
1502 if (result != GST_H264_PARSER_OK)
1503 return get_status(result);
1505 priv->parser_state |= GST_H264_VIDEO_STATE_GOT_SPS;
1506 return GST_VAAPI_DECODER_STATUS_SUCCESS;
1509 static GstVaapiDecoderStatus
1510 parse_subset_sps(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
1512 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1513 GstVaapiParserInfoH264 * const pi = unit->parsed_info;
1514 GstH264SPS * const sps = &pi->data.sps;
1515 GstH264ParserResult result;
1517 GST_DEBUG("parse subset SPS");
1519 /* Variables that don't have inferred values per the H.264
1520 standard but that should get a default value anyway */
1521 sps->log2_max_pic_order_cnt_lsb_minus4 = 0;
1523 result = gst_h264_parser_parse_subset_sps(priv->parser, &pi->nalu, sps,
1525 if (result != GST_H264_PARSER_OK)
1526 return get_status(result);
1528 priv->parser_state |= GST_H264_VIDEO_STATE_GOT_SPS;
1529 return GST_VAAPI_DECODER_STATUS_SUCCESS;
1532 static GstVaapiDecoderStatus
1533 parse_pps(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
1535 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1536 GstVaapiParserInfoH264 * const pi = unit->parsed_info;
1537 GstH264PPS * const pps = &pi->data.pps;
1538 GstH264ParserResult result;
1540 GST_DEBUG("parse PPS");
1542 priv->parser_state &= GST_H264_VIDEO_STATE_GOT_SPS;
1544 /* Variables that don't have inferred values per the H.264
1545 standard but that should get a default value anyway */
1546 pps->slice_group_map_type = 0;
1547 pps->slice_group_change_rate_minus1 = 0;
1549 result = gst_h264_parser_parse_pps(priv->parser, &pi->nalu, pps);
1550 if (result != GST_H264_PARSER_OK)
1551 return get_status(result);
1553 priv->parser_state |= GST_H264_VIDEO_STATE_GOT_PPS;
1554 return GST_VAAPI_DECODER_STATUS_SUCCESS;
1557 static GstVaapiDecoderStatus
1558 parse_sei(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
1560 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1561 GstVaapiParserInfoH264 * const pi = unit->parsed_info;
1562 GArray ** const sei_ptr = &pi->data.sei;
1563 GstH264ParserResult result;
1565 GST_DEBUG("parse SEI");
1567 result = gst_h264_parser_parse_sei(priv->parser, &pi->nalu, sei_ptr);
1568 if (result != GST_H264_PARSER_OK) {
1569 GST_WARNING("failed to parse SEI messages");
1570 return get_status(result);
1572 return GST_VAAPI_DECODER_STATUS_SUCCESS;
1575 static GstVaapiDecoderStatus
1576 parse_slice(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
1578 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1579 GstVaapiParserInfoH264 * const pi = unit->parsed_info;
1580 GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
1581 GstH264NalUnit * const nalu = &pi->nalu;
1583 GstH264ParserResult result;
1585 GST_DEBUG("parse slice");
1587 priv->parser_state &= (GST_H264_VIDEO_STATE_GOT_SPS|
1588 GST_H264_VIDEO_STATE_GOT_PPS);
1590 /* Propagate Prefix NAL unit info, if necessary */
1591 switch (nalu->type) {
1592 case GST_H264_NAL_SLICE:
1593 case GST_H264_NAL_SLICE_IDR: {
1594 GstVaapiParserInfoH264 * const prev_pi = priv->prev_pi;
1595 if (prev_pi && prev_pi->nalu.type == GST_H264_NAL_PREFIX_UNIT) {
1596 /* MVC sequences shall have a Prefix NAL unit immediately
1597 preceding this NAL unit */
1598 pi->nalu.extension_type = prev_pi->nalu.extension_type;
1599 pi->nalu.extension = prev_pi->nalu.extension;
1602 /* In the very unlikely case there is no Prefix NAL unit
1603 immediately preceding this NAL unit, try to infer some
1604 defaults (H.7.4.1.1) */
1605 GstH264NalUnitExtensionMVC * const mvc = &pi->nalu.extension.mvc;
1606 mvc->non_idr_flag = !(nalu->type == GST_H264_NAL_SLICE_IDR);
1607 nalu->idr_pic_flag = !mvc->non_idr_flag;
1608 mvc->priority_id = 0;
1610 mvc->temporal_id = 0;
1611 mvc->anchor_pic_flag = 0;
1612 mvc->inter_view_flag = 1;
1618 /* Variables that don't have inferred values per the H.264
1619 standard but that should get a default value anyway */
1620 slice_hdr->cabac_init_idc = 0;
1621 slice_hdr->direct_spatial_mv_pred_flag = 0;
1623 result = gst_h264_parser_parse_slice_hdr(priv->parser, &pi->nalu,
1624 slice_hdr, TRUE, TRUE);
1625 if (result != GST_H264_PARSER_OK)
1626 return get_status(result);
1628 sps = slice_hdr->pps->sequence;
1630 /* Update MVC data */
1631 pi->view_id = get_view_id(&pi->nalu);
1632 pi->voc = get_view_order_index(sps, pi->view_id);
1634 priv->parser_state |= GST_H264_VIDEO_STATE_GOT_SLICE;
1635 return GST_VAAPI_DECODER_STATUS_SUCCESS;
1638 static GstVaapiDecoderStatus
1639 decode_sps(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
1641 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1642 GstVaapiParserInfoH264 * const pi = unit->parsed_info;
1643 GstH264SPS * const sps = &pi->data.sps;
1645 GST_DEBUG("decode SPS");
1647 gst_vaapi_parser_info_h264_replace(&priv->sps[sps->id], pi);
1648 return GST_VAAPI_DECODER_STATUS_SUCCESS;
1651 static GstVaapiDecoderStatus
1652 decode_subset_sps(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
1654 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1655 GstVaapiParserInfoH264 * const pi = unit->parsed_info;
1656 GstH264SPS * const sps = &pi->data.sps;
1658 GST_DEBUG("decode subset SPS");
1660 gst_vaapi_parser_info_h264_replace(&priv->sps[sps->id], pi);
1661 return GST_VAAPI_DECODER_STATUS_SUCCESS;
1664 static GstVaapiDecoderStatus
1665 decode_pps(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
1667 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1668 GstVaapiParserInfoH264 * const pi = unit->parsed_info;
1669 GstH264PPS * const pps = &pi->data.pps;
1671 GST_DEBUG("decode PPS");
1673 gst_vaapi_parser_info_h264_replace(&priv->pps[pps->id], pi);
1674 return GST_VAAPI_DECODER_STATUS_SUCCESS;
1677 static GstVaapiDecoderStatus
1678 decode_sequence_end(GstVaapiDecoderH264 *decoder)
1680 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1681 GstVaapiDecoderStatus status;
1683 GST_DEBUG("decode sequence-end");
1685 status = decode_current_picture(decoder);
1686 if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
1689 dpb_flush(decoder, NULL);
1691 /* Reset defaults, should there be a new sequence available next */
1692 priv->max_views = 1;
1693 return GST_VAAPI_DECODER_STATUS_SUCCESS;
1696 /* 8.2.1.1 - Decoding process for picture order count type 0 */
1699 GstVaapiDecoderH264 *decoder,
1700 GstVaapiPictureH264 *picture,
1701 GstH264SliceHdr *slice_hdr
1704 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1705 GstH264SPS * const sps = get_sps(decoder);
1706 const gint32 MaxPicOrderCntLsb = 1 << (sps->log2_max_pic_order_cnt_lsb_minus4 + 4);
1709 GST_DEBUG("decode picture order count type 0");
1711 if (GST_VAAPI_PICTURE_IS_IDR(picture)) {
1712 priv->prev_poc_msb = 0;
1713 priv->prev_poc_lsb = 0;
1715 else if (priv->prev_pic_has_mmco5) {
1716 priv->prev_poc_msb = 0;
1717 priv->prev_poc_lsb =
1718 (priv->prev_pic_structure == GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD ?
1719 0 : priv->field_poc[TOP_FIELD]);
1722 priv->prev_poc_msb = priv->poc_msb;
1723 priv->prev_poc_lsb = priv->poc_lsb;
1727 priv->poc_lsb = slice_hdr->pic_order_cnt_lsb;
1728 if (priv->poc_lsb < priv->prev_poc_lsb &&
1729 (priv->prev_poc_lsb - priv->poc_lsb) >= (MaxPicOrderCntLsb / 2))
1730 priv->poc_msb = priv->prev_poc_msb + MaxPicOrderCntLsb;
1731 else if (priv->poc_lsb > priv->prev_poc_lsb &&
1732 (priv->poc_lsb - priv->prev_poc_lsb) > (MaxPicOrderCntLsb / 2))
1733 priv->poc_msb = priv->prev_poc_msb - MaxPicOrderCntLsb;
1735 priv->poc_msb = priv->prev_poc_msb;
1737 temp_poc = priv->poc_msb + priv->poc_lsb;
1738 switch (picture->structure) {
1739 case GST_VAAPI_PICTURE_STRUCTURE_FRAME:
1741 priv->field_poc[TOP_FIELD] = temp_poc;
1742 priv->field_poc[BOTTOM_FIELD] = temp_poc +
1743 slice_hdr->delta_pic_order_cnt_bottom;
1745 case GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD:
1747 priv->field_poc[TOP_FIELD] = temp_poc;
1749 case GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD:
1751 priv->field_poc[BOTTOM_FIELD] = temp_poc;
1756 /* 8.2.1.2 - Decoding process for picture order count type 1 */
1759 GstVaapiDecoderH264 *decoder,
1760 GstVaapiPictureH264 *picture,
1761 GstH264SliceHdr *slice_hdr
1764 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1765 GstH264SPS * const sps = get_sps(decoder);
1766 const gint32 MaxFrameNum = 1 << (sps->log2_max_frame_num_minus4 + 4);
1767 gint32 prev_frame_num_offset, abs_frame_num, expected_poc;
1770 GST_DEBUG("decode picture order count type 1");
1772 if (priv->prev_pic_has_mmco5)
1773 prev_frame_num_offset = 0;
1775 prev_frame_num_offset = priv->frame_num_offset;
1778 if (GST_VAAPI_PICTURE_IS_IDR(picture))
1779 priv->frame_num_offset = 0;
1780 else if (priv->prev_frame_num > priv->frame_num)
1781 priv->frame_num_offset = prev_frame_num_offset + MaxFrameNum;
1783 priv->frame_num_offset = prev_frame_num_offset;
1786 if (sps->num_ref_frames_in_pic_order_cnt_cycle != 0)
1787 abs_frame_num = priv->frame_num_offset + priv->frame_num;
1790 if (!GST_VAAPI_PICTURE_IS_REFERENCE(picture) && abs_frame_num > 0)
1791 abs_frame_num = abs_frame_num - 1;
1793 if (abs_frame_num > 0) {
1794 gint32 expected_delta_per_poc_cycle;
1795 gint32 poc_cycle_cnt, frame_num_in_poc_cycle;
1797 expected_delta_per_poc_cycle = 0;
1798 for (i = 0; i < sps->num_ref_frames_in_pic_order_cnt_cycle; i++)
1799 expected_delta_per_poc_cycle += sps->offset_for_ref_frame[i];
1802 poc_cycle_cnt = (abs_frame_num - 1) /
1803 sps->num_ref_frames_in_pic_order_cnt_cycle;
1804 frame_num_in_poc_cycle = (abs_frame_num - 1) %
1805 sps->num_ref_frames_in_pic_order_cnt_cycle;
1808 expected_poc = poc_cycle_cnt * expected_delta_per_poc_cycle;
1809 for (i = 0; i <= frame_num_in_poc_cycle; i++)
1810 expected_poc += sps->offset_for_ref_frame[i];
1814 if (!GST_VAAPI_PICTURE_IS_REFERENCE(picture))
1815 expected_poc += sps->offset_for_non_ref_pic;
1818 switch (picture->structure) {
1819 case GST_VAAPI_PICTURE_STRUCTURE_FRAME:
1820 priv->field_poc[TOP_FIELD] = expected_poc +
1821 slice_hdr->delta_pic_order_cnt[0];
1822 priv->field_poc[BOTTOM_FIELD] = priv->field_poc[TOP_FIELD] +
1823 sps->offset_for_top_to_bottom_field +
1824 slice_hdr->delta_pic_order_cnt[1];
1826 case GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD:
1827 priv->field_poc[TOP_FIELD] = expected_poc +
1828 slice_hdr->delta_pic_order_cnt[0];
1830 case GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD:
1831 priv->field_poc[BOTTOM_FIELD] = expected_poc +
1832 sps->offset_for_top_to_bottom_field +
1833 slice_hdr->delta_pic_order_cnt[0];
1838 /* 8.2.1.3 - Decoding process for picture order count type 2 */
1841 GstVaapiDecoderH264 *decoder,
1842 GstVaapiPictureH264 *picture,
1843 GstH264SliceHdr *slice_hdr
1846 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1847 GstH264SPS * const sps = get_sps(decoder);
1848 const gint32 MaxFrameNum = 1 << (sps->log2_max_frame_num_minus4 + 4);
1849 gint32 prev_frame_num_offset, temp_poc;
1851 GST_DEBUG("decode picture order count type 2");
1853 if (priv->prev_pic_has_mmco5)
1854 prev_frame_num_offset = 0;
1856 prev_frame_num_offset = priv->frame_num_offset;
1859 if (GST_VAAPI_PICTURE_IS_IDR(picture))
1860 priv->frame_num_offset = 0;
1861 else if (priv->prev_frame_num > priv->frame_num)
1862 priv->frame_num_offset = prev_frame_num_offset + MaxFrameNum;
1864 priv->frame_num_offset = prev_frame_num_offset;
1867 if (GST_VAAPI_PICTURE_IS_IDR(picture))
1869 else if (!GST_VAAPI_PICTURE_IS_REFERENCE(picture))
1870 temp_poc = 2 * (priv->frame_num_offset + priv->frame_num) - 1;
1872 temp_poc = 2 * (priv->frame_num_offset + priv->frame_num);
1875 if (picture->structure != GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD)
1876 priv->field_poc[TOP_FIELD] = temp_poc;
1877 if (picture->structure != GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD)
1878 priv->field_poc[BOTTOM_FIELD] = temp_poc;
1881 /* 8.2.1 - Decoding process for picture order count */
1884 GstVaapiDecoderH264 *decoder,
1885 GstVaapiPictureH264 *picture,
1886 GstH264SliceHdr *slice_hdr
1889 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1890 GstH264SPS * const sps = get_sps(decoder);
1892 switch (sps->pic_order_cnt_type) {
1894 init_picture_poc_0(decoder, picture, slice_hdr);
1897 init_picture_poc_1(decoder, picture, slice_hdr);
1900 init_picture_poc_2(decoder, picture, slice_hdr);
1904 if (picture->structure != GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD)
1905 picture->field_poc[TOP_FIELD] = priv->field_poc[TOP_FIELD];
1906 if (picture->structure != GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD)
1907 picture->field_poc[BOTTOM_FIELD] = priv->field_poc[BOTTOM_FIELD];
1908 picture->base.poc = MIN(picture->field_poc[0], picture->field_poc[1]);
1912 compare_picture_pic_num_dec(const void *a, const void *b)
1914 const GstVaapiPictureH264 * const picA = *(GstVaapiPictureH264 **)a;
1915 const GstVaapiPictureH264 * const picB = *(GstVaapiPictureH264 **)b;
1917 return picB->pic_num - picA->pic_num;
1921 compare_picture_long_term_pic_num_inc(const void *a, const void *b)
1923 const GstVaapiPictureH264 * const picA = *(GstVaapiPictureH264 **)a;
1924 const GstVaapiPictureH264 * const picB = *(GstVaapiPictureH264 **)b;
1926 return picA->long_term_pic_num - picB->long_term_pic_num;
1930 compare_picture_poc_dec(const void *a, const void *b)
1932 const GstVaapiPictureH264 * const picA = *(GstVaapiPictureH264 **)a;
1933 const GstVaapiPictureH264 * const picB = *(GstVaapiPictureH264 **)b;
1935 return picB->base.poc - picA->base.poc;
1939 compare_picture_poc_inc(const void *a, const void *b)
1941 const GstVaapiPictureH264 * const picA = *(GstVaapiPictureH264 **)a;
1942 const GstVaapiPictureH264 * const picB = *(GstVaapiPictureH264 **)b;
1944 return picA->base.poc - picB->base.poc;
1948 compare_picture_frame_num_wrap_dec(const void *a, const void *b)
1950 const GstVaapiPictureH264 * const picA = *(GstVaapiPictureH264 **)a;
1951 const GstVaapiPictureH264 * const picB = *(GstVaapiPictureH264 **)b;
1953 return picB->frame_num_wrap - picA->frame_num_wrap;
1957 compare_picture_long_term_frame_idx_inc(const void *a, const void *b)
1959 const GstVaapiPictureH264 * const picA = *(GstVaapiPictureH264 **)a;
1960 const GstVaapiPictureH264 * const picB = *(GstVaapiPictureH264 **)b;
1962 return picA->long_term_frame_idx - picB->long_term_frame_idx;
1965 /* 8.2.4.1 - Decoding process for picture numbers */
1967 init_picture_refs_pic_num(
1968 GstVaapiDecoderH264 *decoder,
1969 GstVaapiPictureH264 *picture,
1970 GstH264SliceHdr *slice_hdr
1973 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1974 GstH264SPS * const sps = get_sps(decoder);
1975 const gint32 MaxFrameNum = 1 << (sps->log2_max_frame_num_minus4 + 4);
1978 GST_DEBUG("decode picture numbers");
1980 for (i = 0; i < priv->short_ref_count; i++) {
1981 GstVaapiPictureH264 * const pic = priv->short_ref[i];
1984 if (pic->base.view_id != picture->base.view_id)
1988 if (pic->frame_num > priv->frame_num)
1989 pic->frame_num_wrap = pic->frame_num - MaxFrameNum;
1991 pic->frame_num_wrap = pic->frame_num;
1993 // (8-28, 8-30, 8-31)
1994 if (GST_VAAPI_PICTURE_IS_FRAME(picture))
1995 pic->pic_num = pic->frame_num_wrap;
1997 if (pic->structure == picture->structure)
1998 pic->pic_num = 2 * pic->frame_num_wrap + 1;
2000 pic->pic_num = 2 * pic->frame_num_wrap;
2004 for (i = 0; i < priv->long_ref_count; i++) {
2005 GstVaapiPictureH264 * const pic = priv->long_ref[i];
2008 if (pic->base.view_id != picture->base.view_id)
2011 // (8-29, 8-32, 8-33)
2012 if (GST_VAAPI_PICTURE_IS_FRAME(picture))
2013 pic->long_term_pic_num = pic->long_term_frame_idx;
2015 if (pic->structure == picture->structure)
2016 pic->long_term_pic_num = 2 * pic->long_term_frame_idx + 1;
2018 pic->long_term_pic_num = 2 * pic->long_term_frame_idx;
2023 #define SORT_REF_LIST(list, n, compare_func) \
2024 qsort(list, n, sizeof(*(list)), compare_picture_##compare_func)
2027 init_picture_refs_fields_1(
2028 guint picture_structure,
2029 GstVaapiPictureH264 *RefPicList[32],
2030 guint *RefPicList_count,
2031 GstVaapiPictureH264 *ref_list[32],
2032 guint ref_list_count
2039 n = *RefPicList_count;
2042 for (; i < ref_list_count; i++) {
2043 if (ref_list[i]->structure == picture_structure) {
2044 RefPicList[n++] = ref_list[i++];
2048 for (; j < ref_list_count; j++) {
2049 if (ref_list[j]->structure != picture_structure) {
2050 RefPicList[n++] = ref_list[j++];
2054 } while (i < ref_list_count || j < ref_list_count);
2055 *RefPicList_count = n;
2059 init_picture_refs_fields(
2060 GstVaapiPictureH264 *picture,
2061 GstVaapiPictureH264 *RefPicList[32],
2062 guint *RefPicList_count,
2063 GstVaapiPictureH264 *short_ref[32],
2064 guint short_ref_count,
2065 GstVaapiPictureH264 *long_ref[32],
2066 guint long_ref_count
2071 /* 8.2.4.2.5 - reference picture lists in fields */
2072 init_picture_refs_fields_1(picture->structure, RefPicList, &n,
2073 short_ref, short_ref_count);
2074 init_picture_refs_fields_1(picture->structure, RefPicList, &n,
2075 long_ref, long_ref_count);
2076 *RefPicList_count = n;
2079 /* Finds the inter-view reference picture with the supplied view id */
2080 static GstVaapiPictureH264 *
2081 find_inter_view_reference(GstVaapiDecoderH264 *decoder, guint16 view_id)
2083 GPtrArray * const inter_views = decoder->priv.inter_views;
2086 for (i = 0; i < inter_views->len; i++) {
2087 GstVaapiPictureH264 * const picture = g_ptr_array_index(inter_views, i);
2088 if (picture->base.view_id == view_id)
2092 GST_WARNING("failed to find inter-view reference picture for view_id: %d",
2097 /* Checks whether the view id exists in the supplied list of view ids */
2099 find_view_id(guint16 view_id, const guint16 *view_ids, guint num_view_ids)
2103 for (i = 0; i < num_view_ids; i++) {
2104 if (view_ids[i] == view_id)
2111 find_view_id_in_view(guint16 view_id, const GstH264SPSExtMVCView *view,
2115 return (find_view_id(view_id, view->anchor_ref_l0,
2116 view->num_anchor_refs_l0) ||
2117 find_view_id(view_id, view->anchor_ref_l1,
2118 view->num_anchor_refs_l1));
2120 return (find_view_id(view_id, view->non_anchor_ref_l0,
2121 view->num_non_anchor_refs_l0) ||
2122 find_view_id(view_id, view->non_anchor_ref_l1,
2123 view->num_non_anchor_refs_l1));
2126 /* Checks whether the inter-view reference picture with the supplied
2127 view id is used for decoding the current view component picture */
2129 is_inter_view_reference_for_picture(GstVaapiDecoderH264 *decoder,
2130 guint16 view_id, GstVaapiPictureH264 *picture)
2132 const GstH264SPS * const sps = get_sps(decoder);
2135 if (!GST_VAAPI_PICTURE_IS_MVC(picture) ||
2136 sps->extension_type != GST_H264_NAL_EXTENSION_MVC)
2139 is_anchor = GST_VAAPI_PICTURE_IS_ANCHOR(picture);
2140 return find_view_id_in_view(view_id,
2141 &sps->extension.mvc.view[picture->base.voc], is_anchor);
2144 /* Checks whether the supplied inter-view reference picture is used
2145 for decoding the next view component pictures */
2147 is_inter_view_reference_for_next_pictures(GstVaapiDecoderH264 *decoder,
2148 GstVaapiPictureH264 *picture)
2150 const GstH264SPS * const sps = get_sps(decoder);
2154 if (!GST_VAAPI_PICTURE_IS_MVC(picture) ||
2155 sps->extension_type != GST_H264_NAL_EXTENSION_MVC)
2158 is_anchor = GST_VAAPI_PICTURE_IS_ANCHOR(picture);
2159 num_views = sps->extension.mvc.num_views_minus1 + 1;
2160 for (i = picture->base.voc + 1; i < num_views; i++) {
2161 const GstH264SPSExtMVCView * const view = &sps->extension.mvc.view[i];
2162 if (find_view_id_in_view(picture->base.view_id, view, is_anchor))
2168 /* H.8.2.1 - Initialization process for inter-view prediction references */
2170 init_picture_refs_mvc_1(GstVaapiDecoderH264 *decoder,
2171 GstVaapiPictureH264 **ref_list, guint *ref_list_count_ptr, guint num_refs,
2172 const guint16 *view_ids, guint num_view_ids)
2176 n = *ref_list_count_ptr;
2177 for (j = 0; j < num_view_ids && n < num_refs; j++) {
2178 GstVaapiPictureH264 * const pic =
2179 find_inter_view_reference(decoder, view_ids[j]);
2181 ref_list[n++] = pic;
2183 *ref_list_count_ptr = n;
2187 init_picture_refs_mvc(GstVaapiDecoderH264 *decoder,
2188 GstVaapiPictureH264 *picture, GstH264SliceHdr *slice_hdr, guint list)
2190 GstVaapiDecoderH264Private * const priv = &decoder->priv;
2191 const GstH264SPS * const sps = get_sps(decoder);
2192 const GstH264SPSExtMVCView *view;
2194 GST_DEBUG("initialize reference picture list for inter-view prediction");
2196 if (sps->extension_type != GST_H264_NAL_EXTENSION_MVC)
2198 view = &sps->extension.mvc.view[picture->base.voc];
2200 #define INVOKE_INIT_PICTURE_REFS_MVC(ref_list, view_list) do { \
2201 init_picture_refs_mvc_1(decoder, \
2202 priv->RefPicList##ref_list, \
2203 &priv->RefPicList##ref_list##_count, \
2204 slice_hdr->num_ref_idx_l##ref_list##_active_minus1 + 1, \
2205 view->view_list##_l##ref_list, \
2206 view->num_##view_list##s_l##ref_list); \
2210 if (GST_VAAPI_PICTURE_IS_ANCHOR(picture))
2211 INVOKE_INIT_PICTURE_REFS_MVC(0, anchor_ref);
2213 INVOKE_INIT_PICTURE_REFS_MVC(0, non_anchor_ref);
2216 if (GST_VAAPI_PICTURE_IS_ANCHOR(picture))
2217 INVOKE_INIT_PICTURE_REFS_MVC(1, anchor_ref);
2219 INVOKE_INIT_PICTURE_REFS_MVC(1, non_anchor_ref);
2222 #undef INVOKE_INIT_PICTURE_REFS_MVC
2226 init_picture_refs_p_slice(
2227 GstVaapiDecoderH264 *decoder,
2228 GstVaapiPictureH264 *picture,
2229 GstH264SliceHdr *slice_hdr
2232 GstVaapiDecoderH264Private * const priv = &decoder->priv;
2233 GstVaapiPictureH264 **ref_list;
2236 GST_DEBUG("decode reference picture list for P and SP slices");
2238 if (GST_VAAPI_PICTURE_IS_FRAME(picture)) {
2239 /* 8.2.4.2.1 - P and SP slices in frames */
2240 if (priv->short_ref_count > 0) {
2241 ref_list = priv->RefPicList0;
2242 for (i = 0; i < priv->short_ref_count; i++)
2243 ref_list[i] = priv->short_ref[i];
2244 SORT_REF_LIST(ref_list, i, pic_num_dec);
2245 priv->RefPicList0_count += i;
2248 if (priv->long_ref_count > 0) {
2249 ref_list = &priv->RefPicList0[priv->RefPicList0_count];
2250 for (i = 0; i < priv->long_ref_count; i++)
2251 ref_list[i] = priv->long_ref[i];
2252 SORT_REF_LIST(ref_list, i, long_term_pic_num_inc);
2253 priv->RefPicList0_count += i;
2257 /* 8.2.4.2.2 - P and SP slices in fields */
2258 GstVaapiPictureH264 *short_ref[32];
2259 guint short_ref_count = 0;
2260 GstVaapiPictureH264 *long_ref[32];
2261 guint long_ref_count = 0;
2263 if (priv->short_ref_count > 0) {
2264 for (i = 0; i < priv->short_ref_count; i++)
2265 short_ref[i] = priv->short_ref[i];
2266 SORT_REF_LIST(short_ref, i, frame_num_wrap_dec);
2267 short_ref_count = i;
2270 if (priv->long_ref_count > 0) {
2271 for (i = 0; i < priv->long_ref_count; i++)
2272 long_ref[i] = priv->long_ref[i];
2273 SORT_REF_LIST(long_ref, i, long_term_frame_idx_inc);
2277 init_picture_refs_fields(
2279 priv->RefPicList0, &priv->RefPicList0_count,
2280 short_ref, short_ref_count,
2281 long_ref, long_ref_count
2285 if (GST_VAAPI_PICTURE_IS_MVC(picture)) {
2287 init_picture_refs_mvc(decoder, picture, slice_hdr, 0);
2292 init_picture_refs_b_slice(
2293 GstVaapiDecoderH264 *decoder,
2294 GstVaapiPictureH264 *picture,
2295 GstH264SliceHdr *slice_hdr
2298 GstVaapiDecoderH264Private * const priv = &decoder->priv;
2299 GstVaapiPictureH264 **ref_list;
2302 GST_DEBUG("decode reference picture list for B slices");
2304 if (GST_VAAPI_PICTURE_IS_FRAME(picture)) {
2305 /* 8.2.4.2.3 - B slices in frames */
2308 if (priv->short_ref_count > 0) {
2309 // 1. Short-term references
2310 ref_list = priv->RefPicList0;
2311 for (n = 0, i = 0; i < priv->short_ref_count; i++) {
2312 if (priv->short_ref[i]->base.poc < picture->base.poc)
2313 ref_list[n++] = priv->short_ref[i];
2315 SORT_REF_LIST(ref_list, n, poc_dec);
2316 priv->RefPicList0_count += n;
2318 ref_list = &priv->RefPicList0[priv->RefPicList0_count];
2319 for (n = 0, i = 0; i < priv->short_ref_count; i++) {
2320 if (priv->short_ref[i]->base.poc >= picture->base.poc)
2321 ref_list[n++] = priv->short_ref[i];
2323 SORT_REF_LIST(ref_list, n, poc_inc);
2324 priv->RefPicList0_count += n;
2327 if (priv->long_ref_count > 0) {
2328 // 2. Long-term references
2329 ref_list = &priv->RefPicList0[priv->RefPicList0_count];
2330 for (n = 0, i = 0; i < priv->long_ref_count; i++)
2331 ref_list[n++] = priv->long_ref[i];
2332 SORT_REF_LIST(ref_list, n, long_term_pic_num_inc);
2333 priv->RefPicList0_count += n;
2337 if (priv->short_ref_count > 0) {
2338 // 1. Short-term references
2339 ref_list = priv->RefPicList1;
2340 for (n = 0, i = 0; i < priv->short_ref_count; i++) {
2341 if (priv->short_ref[i]->base.poc > picture->base.poc)
2342 ref_list[n++] = priv->short_ref[i];
2344 SORT_REF_LIST(ref_list, n, poc_inc);
2345 priv->RefPicList1_count += n;
2347 ref_list = &priv->RefPicList1[priv->RefPicList1_count];
2348 for (n = 0, i = 0; i < priv->short_ref_count; i++) {
2349 if (priv->short_ref[i]->base.poc <= picture->base.poc)
2350 ref_list[n++] = priv->short_ref[i];
2352 SORT_REF_LIST(ref_list, n, poc_dec);
2353 priv->RefPicList1_count += n;
2356 if (priv->long_ref_count > 0) {
2357 // 2. Long-term references
2358 ref_list = &priv->RefPicList1[priv->RefPicList1_count];
2359 for (n = 0, i = 0; i < priv->long_ref_count; i++)
2360 ref_list[n++] = priv->long_ref[i];
2361 SORT_REF_LIST(ref_list, n, long_term_pic_num_inc);
2362 priv->RefPicList1_count += n;
2366 /* 8.2.4.2.4 - B slices in fields */
2367 GstVaapiPictureH264 *short_ref0[32];
2368 guint short_ref0_count = 0;
2369 GstVaapiPictureH264 *short_ref1[32];
2370 guint short_ref1_count = 0;
2371 GstVaapiPictureH264 *long_ref[32];
2372 guint long_ref_count = 0;
2374 /* refFrameList0ShortTerm */
2375 if (priv->short_ref_count > 0) {
2376 ref_list = short_ref0;
2377 for (n = 0, i = 0; i < priv->short_ref_count; i++) {
2378 if (priv->short_ref[i]->base.poc <= picture->base.poc)
2379 ref_list[n++] = priv->short_ref[i];
2381 SORT_REF_LIST(ref_list, n, poc_dec);
2382 short_ref0_count += n;
2384 ref_list = &short_ref0[short_ref0_count];
2385 for (n = 0, i = 0; i < priv->short_ref_count; i++) {
2386 if (priv->short_ref[i]->base.poc > picture->base.poc)
2387 ref_list[n++] = priv->short_ref[i];
2389 SORT_REF_LIST(ref_list, n, poc_inc);
2390 short_ref0_count += n;
2393 /* refFrameList1ShortTerm */
2394 if (priv->short_ref_count > 0) {
2395 ref_list = short_ref1;
2396 for (n = 0, i = 0; i < priv->short_ref_count; i++) {
2397 if (priv->short_ref[i]->base.poc > picture->base.poc)
2398 ref_list[n++] = priv->short_ref[i];
2400 SORT_REF_LIST(ref_list, n, poc_inc);
2401 short_ref1_count += n;
2403 ref_list = &short_ref1[short_ref1_count];
2404 for (n = 0, i = 0; i < priv->short_ref_count; i++) {
2405 if (priv->short_ref[i]->base.poc <= picture->base.poc)
2406 ref_list[n++] = priv->short_ref[i];
2408 SORT_REF_LIST(ref_list, n, poc_dec);
2409 short_ref1_count += n;
2412 /* refFrameListLongTerm */
2413 if (priv->long_ref_count > 0) {
2414 for (i = 0; i < priv->long_ref_count; i++)
2415 long_ref[i] = priv->long_ref[i];
2416 SORT_REF_LIST(long_ref, i, long_term_frame_idx_inc);
2420 init_picture_refs_fields(
2422 priv->RefPicList0, &priv->RefPicList0_count,
2423 short_ref0, short_ref0_count,
2424 long_ref, long_ref_count
2427 init_picture_refs_fields(
2429 priv->RefPicList1, &priv->RefPicList1_count,
2430 short_ref1, short_ref1_count,
2431 long_ref, long_ref_count
2435 /* Check whether RefPicList1 is identical to RefPicList0, then
2436 swap if necessary */
2437 if (priv->RefPicList1_count > 1 &&
2438 priv->RefPicList1_count == priv->RefPicList0_count &&
2439 memcmp(priv->RefPicList0, priv->RefPicList1,
2440 priv->RefPicList0_count * sizeof(priv->RefPicList0[0])) == 0) {
2441 GstVaapiPictureH264 * const tmp = priv->RefPicList1[0];
2442 priv->RefPicList1[0] = priv->RefPicList1[1];
2443 priv->RefPicList1[1] = tmp;
2446 if (GST_VAAPI_PICTURE_IS_MVC(picture)) {
2448 init_picture_refs_mvc(decoder, picture, slice_hdr, 0);
2451 init_picture_refs_mvc(decoder, picture, slice_hdr, 1);
2455 #undef SORT_REF_LIST
2458 find_short_term_reference(GstVaapiDecoderH264 *decoder, gint32 pic_num)
2460 GstVaapiDecoderH264Private * const priv = &decoder->priv;
2463 for (i = 0; i < priv->short_ref_count; i++) {
2464 if (priv->short_ref[i]->pic_num == pic_num)
2467 GST_ERROR("found no short-term reference picture with PicNum = %d",
2473 find_long_term_reference(GstVaapiDecoderH264 *decoder, gint32 long_term_pic_num)
2475 GstVaapiDecoderH264Private * const priv = &decoder->priv;
2478 for (i = 0; i < priv->long_ref_count; i++) {
2479 if (priv->long_ref[i]->long_term_pic_num == long_term_pic_num)
2482 GST_ERROR("found no long-term reference picture with LongTermPicNum = %d",
2488 exec_picture_refs_modification_1(
2489 GstVaapiDecoderH264 *decoder,
2490 GstVaapiPictureH264 *picture,
2491 GstH264SliceHdr *slice_hdr,
2495 GstVaapiDecoderH264Private * const priv = &decoder->priv;
2496 GstH264SPS * const sps = get_sps(decoder);
2497 GstH264RefPicListModification *ref_pic_list_modification;
2498 guint num_ref_pic_list_modifications;
2499 GstVaapiPictureH264 **ref_list;
2500 guint *ref_list_count_ptr, ref_list_count, ref_list_idx = 0;
2501 const guint16 *view_ids = NULL;
2502 guint i, j, n, num_refs, num_view_ids = 0;
2504 gint32 MaxPicNum, CurrPicNum, picNumPred, picViewIdxPred;
2506 GST_DEBUG("modification process of reference picture list %u", list);
2509 ref_pic_list_modification = slice_hdr->ref_pic_list_modification_l0;
2510 num_ref_pic_list_modifications = slice_hdr->n_ref_pic_list_modification_l0;
2511 ref_list = priv->RefPicList0;
2512 ref_list_count_ptr = &priv->RefPicList0_count;
2513 num_refs = slice_hdr->num_ref_idx_l0_active_minus1 + 1;
2515 if (GST_VAAPI_PICTURE_IS_MVC(picture) &&
2516 sps->extension_type == GST_H264_NAL_EXTENSION_MVC) {
2517 const GstH264SPSExtMVCView * const view =
2518 &sps->extension.mvc.view[picture->base.voc];
2519 if (GST_VAAPI_PICTURE_IS_ANCHOR(picture)) {
2520 view_ids = view->anchor_ref_l0;
2521 num_view_ids = view->num_anchor_refs_l0;
2524 view_ids = view->non_anchor_ref_l0;
2525 num_view_ids = view->num_non_anchor_refs_l0;
2530 ref_pic_list_modification = slice_hdr->ref_pic_list_modification_l1;
2531 num_ref_pic_list_modifications = slice_hdr->n_ref_pic_list_modification_l1;
2532 ref_list = priv->RefPicList1;
2533 ref_list_count_ptr = &priv->RefPicList1_count;
2534 num_refs = slice_hdr->num_ref_idx_l1_active_minus1 + 1;
2536 if (GST_VAAPI_PICTURE_IS_MVC(picture) &&
2537 sps->extension_type == GST_H264_NAL_EXTENSION_MVC) {
2538 const GstH264SPSExtMVCView * const view =
2539 &sps->extension.mvc.view[picture->base.voc];
2540 if (GST_VAAPI_PICTURE_IS_ANCHOR(picture)) {
2541 view_ids = view->anchor_ref_l1;
2542 num_view_ids = view->num_anchor_refs_l1;
2545 view_ids = view->non_anchor_ref_l1;
2546 num_view_ids = view->num_non_anchor_refs_l1;
2550 ref_list_count = *ref_list_count_ptr;
2552 if (!GST_VAAPI_PICTURE_IS_FRAME(picture)) {
2553 MaxPicNum = 1 << (sps->log2_max_frame_num_minus4 + 5); // 2 * MaxFrameNum
2554 CurrPicNum = 2 * slice_hdr->frame_num + 1; // 2 * frame_num + 1
2557 MaxPicNum = 1 << (sps->log2_max_frame_num_minus4 + 4); // MaxFrameNum
2558 CurrPicNum = slice_hdr->frame_num; // frame_num
2561 picNumPred = CurrPicNum;
2562 picViewIdxPred = -1;
2564 for (i = 0; i < num_ref_pic_list_modifications; i++) {
2565 GstH264RefPicListModification * const l = &ref_pic_list_modification[i];
2566 if (l->modification_of_pic_nums_idc == 3)
2569 /* 8.2.4.3.1 - Short-term reference pictures */
2570 if (l->modification_of_pic_nums_idc == 0 || l->modification_of_pic_nums_idc == 1) {
2571 gint32 abs_diff_pic_num = l->value.abs_diff_pic_num_minus1 + 1;
2572 gint32 picNum, picNumNoWrap;
2575 if (l->modification_of_pic_nums_idc == 0) {
2576 picNumNoWrap = picNumPred - abs_diff_pic_num;
2577 if (picNumNoWrap < 0)
2578 picNumNoWrap += MaxPicNum;
2583 picNumNoWrap = picNumPred + abs_diff_pic_num;
2584 if (picNumNoWrap >= MaxPicNum)
2585 picNumNoWrap -= MaxPicNum;
2587 picNumPred = picNumNoWrap;
2590 picNum = picNumNoWrap;
2591 if (picNum > CurrPicNum)
2592 picNum -= MaxPicNum;
2595 for (j = num_refs; j > ref_list_idx; j--)
2596 ref_list[j] = ref_list[j - 1];
2597 found_ref_idx = find_short_term_reference(decoder, picNum);
2598 ref_list[ref_list_idx++] =
2599 found_ref_idx >= 0 ? priv->short_ref[found_ref_idx] : NULL;
2601 for (j = ref_list_idx; j <= num_refs; j++) {
2606 GST_VAAPI_PICTURE_IS_SHORT_TERM_REFERENCE(ref_list[j]) ?
2607 ref_list[j]->pic_num : MaxPicNum;
2608 if (PicNumF != picNum ||
2609 ref_list[j]->base.view_id != picture->base.view_id)
2610 ref_list[n++] = ref_list[j];
2614 /* 8.2.4.3.2 - Long-term reference pictures */
2615 else if (l->modification_of_pic_nums_idc == 2) {
2617 for (j = num_refs; j > ref_list_idx; j--)
2618 ref_list[j] = ref_list[j - 1];
2620 find_long_term_reference(decoder, l->value.long_term_pic_num);
2621 ref_list[ref_list_idx++] =
2622 found_ref_idx >= 0 ? priv->long_ref[found_ref_idx] : NULL;
2624 for (j = ref_list_idx; j <= num_refs; j++) {
2625 gint32 LongTermPicNumF;
2629 GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(ref_list[j]) ?
2630 ref_list[j]->long_term_pic_num : INT_MAX;
2631 if (LongTermPicNumF != l->value.long_term_pic_num ||
2632 ref_list[j]->base.view_id != picture->base.view_id)
2633 ref_list[n++] = ref_list[j];
2637 /* H.8.2.2.3 - Inter-view prediction reference pictures */
2638 else if ((GST_VAAPI_PICTURE_IS_MVC(picture) &&
2639 sps->extension_type == GST_H264_NAL_EXTENSION_MVC) &&
2640 (l->modification_of_pic_nums_idc == 4 ||
2641 l->modification_of_pic_nums_idc == 5)) {
2642 gint32 abs_diff_view_idx = l->value.abs_diff_view_idx_minus1 + 1;
2643 gint32 picViewIdx, targetViewId;
2646 if (l->modification_of_pic_nums_idc == 4) {
2647 picViewIdx = picViewIdxPred - abs_diff_view_idx;
2649 picViewIdx += num_view_ids;
2654 picViewIdx = picViewIdxPred + abs_diff_view_idx;
2655 if (picViewIdx >= num_view_ids)
2656 picViewIdx -= num_view_ids;
2658 picViewIdxPred = picViewIdx;
2661 targetViewId = view_ids[picViewIdx];
2664 for (j = num_refs; j > ref_list_idx; j--)
2665 ref_list[j] = ref_list[j - 1];
2666 ref_list[ref_list_idx++] =
2667 find_inter_view_reference(decoder, targetViewId);
2669 for (j = ref_list_idx; j <= num_refs; j++) {
2672 if (ref_list[j]->base.view_id != targetViewId ||
2673 ref_list[j]->base.poc != picture->base.poc)
2674 ref_list[n++] = ref_list[j];
2680 for (i = 0; i < num_refs; i++)
2682 GST_ERROR("list %u entry %u is empty", list, i);
2684 *ref_list_count_ptr = num_refs;
2687 /* 8.2.4.3 - Modification process for reference picture lists */
2689 exec_picture_refs_modification(
2690 GstVaapiDecoderH264 *decoder,
2691 GstVaapiPictureH264 *picture,
2692 GstH264SliceHdr *slice_hdr
2695 GST_DEBUG("execute ref_pic_list_modification()");
2698 if (!GST_H264_IS_I_SLICE(slice_hdr) && !GST_H264_IS_SI_SLICE(slice_hdr) &&
2699 slice_hdr->ref_pic_list_modification_flag_l0)
2700 exec_picture_refs_modification_1(decoder, picture, slice_hdr, 0);
2703 if (GST_H264_IS_B_SLICE(slice_hdr) &&
2704 slice_hdr->ref_pic_list_modification_flag_l1)
2705 exec_picture_refs_modification_1(decoder, picture, slice_hdr, 1);
2709 init_picture_ref_lists(GstVaapiDecoderH264 *decoder,
2710 GstVaapiPictureH264 *picture)
2712 GstVaapiDecoderH264Private * const priv = &decoder->priv;
2713 guint i, j, short_ref_count, long_ref_count;
2715 short_ref_count = 0;
2717 if (GST_VAAPI_PICTURE_IS_FRAME(picture)) {
2718 for (i = 0; i < priv->dpb_count; i++) {
2719 GstVaapiFrameStore * const fs = priv->dpb[i];
2720 GstVaapiPictureH264 *pic;
2721 if (!gst_vaapi_frame_store_has_frame(fs))
2723 pic = fs->buffers[0];
2724 if (pic->base.view_id != picture->base.view_id)
2726 if (GST_VAAPI_PICTURE_IS_SHORT_TERM_REFERENCE(pic))
2727 priv->short_ref[short_ref_count++] = pic;
2728 else if (GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(pic))
2729 priv->long_ref[long_ref_count++] = pic;
2730 pic->structure = GST_VAAPI_PICTURE_STRUCTURE_FRAME;
2731 pic->other_field = fs->buffers[1];
2735 for (i = 0; i < priv->dpb_count; i++) {
2736 GstVaapiFrameStore * const fs = priv->dpb[i];
2737 for (j = 0; j < fs->num_buffers; j++) {
2738 GstVaapiPictureH264 * const pic = fs->buffers[j];
2739 if (pic->base.view_id != picture->base.view_id)
2741 if (GST_VAAPI_PICTURE_IS_SHORT_TERM_REFERENCE(pic))
2742 priv->short_ref[short_ref_count++] = pic;
2743 else if (GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(pic))
2744 priv->long_ref[long_ref_count++] = pic;
2745 pic->structure = pic->base.structure;
2746 pic->other_field = fs->buffers[j ^ 1];
2751 for (i = short_ref_count; i < priv->short_ref_count; i++)
2752 priv->short_ref[i] = NULL;
2753 priv->short_ref_count = short_ref_count;
2755 for (i = long_ref_count; i < priv->long_ref_count; i++)
2756 priv->long_ref[i] = NULL;
2757 priv->long_ref_count = long_ref_count;
2762 GstVaapiDecoderH264 *decoder,
2763 GstVaapiPictureH264 *picture,
2764 GstH264SliceHdr *slice_hdr
2767 GstVaapiDecoderH264Private * const priv = &decoder->priv;
2770 init_picture_ref_lists(decoder, picture);
2771 init_picture_refs_pic_num(decoder, picture, slice_hdr);
2773 priv->RefPicList0_count = 0;
2774 priv->RefPicList1_count = 0;
2776 switch (slice_hdr->type % 5) {
2777 case GST_H264_P_SLICE:
2778 case GST_H264_SP_SLICE:
2779 init_picture_refs_p_slice(decoder, picture, slice_hdr);
2781 case GST_H264_B_SLICE:
2782 init_picture_refs_b_slice(decoder, picture, slice_hdr);
2788 exec_picture_refs_modification(decoder, picture, slice_hdr);
2790 switch (slice_hdr->type % 5) {
2791 case GST_H264_B_SLICE:
2792 num_refs = 1 + slice_hdr->num_ref_idx_l1_active_minus1;
2793 for (i = priv->RefPicList1_count; i < num_refs; i++)
2794 priv->RefPicList1[i] = NULL;
2795 priv->RefPicList1_count = num_refs;
2798 case GST_H264_P_SLICE:
2799 case GST_H264_SP_SLICE:
2800 num_refs = 1 + slice_hdr->num_ref_idx_l0_active_minus1;
2801 for (i = priv->RefPicList0_count; i < num_refs; i++)
2802 priv->RefPicList0[i] = NULL;
2803 priv->RefPicList0_count = num_refs;
2812 GstVaapiDecoderH264 *decoder,
2813 GstVaapiPictureH264 *picture, GstVaapiParserInfoH264 *pi)
2815 GstVaapiDecoderH264Private * const priv = &decoder->priv;
2816 GstVaapiPicture * const base_picture = &picture->base;
2817 GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
2819 priv->prev_frame_num = priv->frame_num;
2820 priv->frame_num = slice_hdr->frame_num;
2821 picture->frame_num = priv->frame_num;
2822 picture->frame_num_wrap = priv->frame_num;
2823 picture->output_flag = TRUE; /* XXX: conformant to Annex A only */
2824 base_picture->pts = GST_VAAPI_DECODER_CODEC_FRAME(decoder)->pts;
2825 base_picture->type = GST_VAAPI_PICTURE_TYPE_NONE;
2826 base_picture->view_id = pi->view_id;
2827 base_picture->voc = pi->voc;
2829 /* Initialize extensions */
2830 switch (pi->nalu.extension_type) {
2831 case GST_H264_NAL_EXTENSION_MVC: {
2832 GstH264NalUnitExtensionMVC * const mvc = &pi->nalu.extension.mvc;
2834 GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_MVC);
2835 if (mvc->inter_view_flag)
2836 GST_VAAPI_PICTURE_FLAG_SET(picture,
2837 GST_VAAPI_PICTURE_FLAG_INTER_VIEW);
2838 if (mvc->anchor_pic_flag)
2839 GST_VAAPI_PICTURE_FLAG_SET(picture,
2840 GST_VAAPI_PICTURE_FLAG_ANCHOR);
2845 /* Reset decoder state for IDR pictures */
2846 if (pi->nalu.idr_pic_flag) {
2848 GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_IDR);
2849 dpb_flush(decoder, picture);
2852 /* Initialize picture structure */
2853 if (!slice_hdr->field_pic_flag)
2854 base_picture->structure = GST_VAAPI_PICTURE_STRUCTURE_FRAME;
2856 GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_INTERLACED);
2857 if (!slice_hdr->bottom_field_flag)
2858 base_picture->structure = GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD;
2860 base_picture->structure = GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD;
2862 picture->structure = base_picture->structure;
2864 /* Initialize reference flags */
2865 if (pi->nalu.ref_idc) {
2866 GstH264DecRefPicMarking * const dec_ref_pic_marking =
2867 &slice_hdr->dec_ref_pic_marking;
2869 if (GST_VAAPI_PICTURE_IS_IDR(picture) &&
2870 dec_ref_pic_marking->long_term_reference_flag)
2871 GST_VAAPI_PICTURE_FLAG_SET(picture,
2872 GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE);
2874 GST_VAAPI_PICTURE_FLAG_SET(picture,
2875 GST_VAAPI_PICTURE_FLAG_SHORT_TERM_REFERENCE);
2878 init_picture_poc(decoder, picture, slice_hdr);
2882 /* 8.2.5.3 - Sliding window decoded reference picture marking process */
2884 exec_ref_pic_marking_sliding_window(GstVaapiDecoderH264 *decoder)
2886 GstVaapiDecoderH264Private * const priv = &decoder->priv;
2887 GstH264SPS * const sps = get_sps(decoder);
2888 GstVaapiPictureH264 *ref_picture;
2889 guint i, m, max_num_ref_frames;
2891 GST_DEBUG("reference picture marking process (sliding window)");
2893 if (!GST_VAAPI_PICTURE_IS_FIRST_FIELD(priv->current_picture))
2896 max_num_ref_frames = sps->num_ref_frames;
2897 if (max_num_ref_frames == 0)
2898 max_num_ref_frames = 1;
2899 if (!GST_VAAPI_PICTURE_IS_FRAME(priv->current_picture))
2900 max_num_ref_frames <<= 1;
2902 if (priv->short_ref_count + priv->long_ref_count < max_num_ref_frames)
2904 if (priv->short_ref_count < 1)
2907 for (m = 0, i = 1; i < priv->short_ref_count; i++) {
2908 GstVaapiPictureH264 * const picture = priv->short_ref[i];
2909 if (picture->frame_num_wrap < priv->short_ref[m]->frame_num_wrap)
2913 ref_picture = priv->short_ref[m];
2914 gst_vaapi_picture_h264_set_reference(ref_picture, 0, TRUE);
2915 ARRAY_REMOVE_INDEX(priv->short_ref, m);
2917 /* Both fields need to be marked as "unused for reference", so
2918 remove the other field from the short_ref[] list as well */
2919 if (!GST_VAAPI_PICTURE_IS_FRAME(priv->current_picture) && ref_picture->other_field) {
2920 for (i = 0; i < priv->short_ref_count; i++) {
2921 if (priv->short_ref[i] == ref_picture->other_field) {
2922 ARRAY_REMOVE_INDEX(priv->short_ref, i);
2930 static inline gint32
2931 get_picNumX(GstVaapiPictureH264 *picture, GstH264RefPicMarking *ref_pic_marking)
2935 if (GST_VAAPI_PICTURE_IS_FRAME(picture))
2936 pic_num = picture->frame_num_wrap;
2938 pic_num = 2 * picture->frame_num_wrap + 1;
2939 pic_num -= ref_pic_marking->difference_of_pic_nums_minus1 + 1;
2943 /* 8.2.5.4.1. Mark short-term reference picture as "unused for reference" */
2945 exec_ref_pic_marking_adaptive_mmco_1(
2946 GstVaapiDecoderH264 *decoder,
2947 GstVaapiPictureH264 *picture,
2948 GstH264RefPicMarking *ref_pic_marking
2951 GstVaapiDecoderH264Private * const priv = &decoder->priv;
2954 picNumX = get_picNumX(picture, ref_pic_marking);
2955 i = find_short_term_reference(decoder, picNumX);
2959 gst_vaapi_picture_h264_set_reference(priv->short_ref[i], 0,
2960 GST_VAAPI_PICTURE_IS_FRAME(picture));
2961 ARRAY_REMOVE_INDEX(priv->short_ref, i);
2964 /* 8.2.5.4.2. Mark long-term reference picture as "unused for reference" */
2966 exec_ref_pic_marking_adaptive_mmco_2(
2967 GstVaapiDecoderH264 *decoder,
2968 GstVaapiPictureH264 *picture,
2969 GstH264RefPicMarking *ref_pic_marking
2972 GstVaapiDecoderH264Private * const priv = &decoder->priv;
2975 i = find_long_term_reference(decoder, ref_pic_marking->long_term_pic_num);
2979 gst_vaapi_picture_h264_set_reference(priv->long_ref[i], 0,
2980 GST_VAAPI_PICTURE_IS_FRAME(picture));
2981 ARRAY_REMOVE_INDEX(priv->long_ref, i);
2984 /* 8.2.5.4.3. Assign LongTermFrameIdx to a short-term reference picture */
2986 exec_ref_pic_marking_adaptive_mmco_3(
2987 GstVaapiDecoderH264 *decoder,
2988 GstVaapiPictureH264 *picture,
2989 GstH264RefPicMarking *ref_pic_marking
2992 GstVaapiDecoderH264Private * const priv = &decoder->priv;
2993 GstVaapiPictureH264 *ref_picture, *other_field;
2996 for (i = 0; i < priv->long_ref_count; i++) {
2997 if (priv->long_ref[i]->long_term_frame_idx == ref_pic_marking->long_term_frame_idx)
3000 if (i != priv->long_ref_count) {
3001 gst_vaapi_picture_h264_set_reference(priv->long_ref[i], 0, TRUE);
3002 ARRAY_REMOVE_INDEX(priv->long_ref, i);
3005 picNumX = get_picNumX(picture, ref_pic_marking);
3006 i = find_short_term_reference(decoder, picNumX);
3010 ref_picture = priv->short_ref[i];
3011 ARRAY_REMOVE_INDEX(priv->short_ref, i);
3012 priv->long_ref[priv->long_ref_count++] = ref_picture;
3014 ref_picture->long_term_frame_idx = ref_pic_marking->long_term_frame_idx;
3015 gst_vaapi_picture_h264_set_reference(ref_picture,
3016 GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE,
3017 GST_VAAPI_PICTURE_IS_COMPLETE(picture));
3019 /* Assign LongTermFrameIdx to the other field if it was also
3020 marked as "used for long-term reference */
3021 other_field = ref_picture->other_field;
3022 if (other_field && GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(other_field))
3023 other_field->long_term_frame_idx = ref_pic_marking->long_term_frame_idx;
3026 /* 8.2.5.4.4. Mark pictures with LongTermFramIdx > max_long_term_frame_idx
3027 * as "unused for reference" */
3029 exec_ref_pic_marking_adaptive_mmco_4(
3030 GstVaapiDecoderH264 *decoder,
3031 GstVaapiPictureH264 *picture,
3032 GstH264RefPicMarking *ref_pic_marking
3035 GstVaapiDecoderH264Private * const priv = &decoder->priv;
3036 gint32 i, long_term_frame_idx;
3038 long_term_frame_idx = ref_pic_marking->max_long_term_frame_idx_plus1 - 1;
3040 for (i = 0; i < priv->long_ref_count; i++) {
3041 if (priv->long_ref[i]->long_term_frame_idx <= long_term_frame_idx)
3043 gst_vaapi_picture_h264_set_reference(priv->long_ref[i], 0, FALSE);
3044 ARRAY_REMOVE_INDEX(priv->long_ref, i);
3049 /* 8.2.5.4.5. Mark all reference pictures as "unused for reference" */
3051 exec_ref_pic_marking_adaptive_mmco_5(
3052 GstVaapiDecoderH264 *decoder,
3053 GstVaapiPictureH264 *picture,
3054 GstH264RefPicMarking *ref_pic_marking
3057 GstVaapiDecoderH264Private * const priv = &decoder->priv;
3059 dpb_flush(decoder, picture);
3061 priv->prev_pic_has_mmco5 = TRUE;
3063 /* The picture shall be inferred to have had frame_num equal to 0 (7.4.3) */
3064 priv->frame_num = 0;
3065 priv->frame_num_offset = 0;
3066 picture->frame_num = 0;
3068 /* Update TopFieldOrderCnt and BottomFieldOrderCnt (8.2.1) */
3069 if (picture->structure != GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD)
3070 picture->field_poc[TOP_FIELD] -= picture->base.poc;
3071 if (picture->structure != GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD)
3072 picture->field_poc[BOTTOM_FIELD] -= picture->base.poc;
3073 picture->base.poc = 0;
3076 /* 8.2.5.4.6. Assign a long-term frame index to the current picture */
3078 exec_ref_pic_marking_adaptive_mmco_6(
3079 GstVaapiDecoderH264 *decoder,
3080 GstVaapiPictureH264 *picture,
3081 GstH264RefPicMarking *ref_pic_marking
3084 GstVaapiDecoderH264Private * const priv = &decoder->priv;
3085 GstVaapiPictureH264 *other_field;
3088 for (i = 0; i < priv->long_ref_count; i++) {
3089 if (priv->long_ref[i]->long_term_frame_idx == ref_pic_marking->long_term_frame_idx)
3092 if (i != priv->long_ref_count) {
3093 gst_vaapi_picture_h264_set_reference(priv->long_ref[i], 0, TRUE);
3094 ARRAY_REMOVE_INDEX(priv->long_ref, i);
3097 picture->long_term_frame_idx = ref_pic_marking->long_term_frame_idx;
3098 gst_vaapi_picture_h264_set_reference(picture,
3099 GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE,
3100 GST_VAAPI_PICTURE_IS_COMPLETE(picture));
3102 /* Assign LongTermFrameIdx to the other field if it was also
3103 marked as "used for long-term reference */
3104 other_field = GST_VAAPI_PICTURE_H264(picture->base.parent_picture);
3105 if (other_field && GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(other_field))
3106 other_field->long_term_frame_idx = ref_pic_marking->long_term_frame_idx;
3109 /* 8.2.5.4. Adaptive memory control decoded reference picture marking process */
3111 exec_ref_pic_marking_adaptive(
3112 GstVaapiDecoderH264 *decoder,
3113 GstVaapiPictureH264 *picture,
3114 GstH264DecRefPicMarking *dec_ref_pic_marking
3119 GST_DEBUG("reference picture marking process (adaptive memory control)");
3121 typedef void (*exec_ref_pic_marking_adaptive_mmco_func)(
3122 GstVaapiDecoderH264 *decoder,
3123 GstVaapiPictureH264 *picture,
3124 GstH264RefPicMarking *ref_pic_marking
3127 static const exec_ref_pic_marking_adaptive_mmco_func mmco_funcs[] = {
3129 exec_ref_pic_marking_adaptive_mmco_1,
3130 exec_ref_pic_marking_adaptive_mmco_2,
3131 exec_ref_pic_marking_adaptive_mmco_3,
3132 exec_ref_pic_marking_adaptive_mmco_4,
3133 exec_ref_pic_marking_adaptive_mmco_5,
3134 exec_ref_pic_marking_adaptive_mmco_6,
3137 for (i = 0; i < dec_ref_pic_marking->n_ref_pic_marking; i++) {
3138 GstH264RefPicMarking * const ref_pic_marking =
3139 &dec_ref_pic_marking->ref_pic_marking[i];
3141 const guint mmco = ref_pic_marking->memory_management_control_operation;
3142 if (mmco < G_N_ELEMENTS(mmco_funcs) && mmco_funcs[mmco])
3143 mmco_funcs[mmco](decoder, picture, ref_pic_marking);
3145 GST_ERROR("unhandled MMCO %u", mmco);
3152 /* 8.2.5 - Execute reference picture marking process */
3154 exec_ref_pic_marking(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
3156 GstVaapiDecoderH264Private * const priv = &decoder->priv;
3158 priv->prev_pic_has_mmco5 = FALSE;
3159 priv->prev_pic_structure = picture->structure;
3161 if (GST_VAAPI_PICTURE_IS_INTER_VIEW(picture))
3162 g_ptr_array_add(priv->inter_views, gst_vaapi_picture_ref(picture));
3164 if (!GST_VAAPI_PICTURE_IS_REFERENCE(picture))
3167 if (!GST_VAAPI_PICTURE_IS_IDR(picture)) {
3168 GstH264DecRefPicMarking * const dec_ref_pic_marking =
3169 &picture->last_slice_hdr->dec_ref_pic_marking;
3170 if (dec_ref_pic_marking->adaptive_ref_pic_marking_mode_flag) {
3171 if (!exec_ref_pic_marking_adaptive(decoder, picture, dec_ref_pic_marking))
3175 if (!exec_ref_pic_marking_sliding_window(decoder))
3183 vaapi_init_picture(VAPictureH264 *pic)
3185 pic->picture_id = VA_INVALID_ID;
3187 pic->flags = VA_PICTURE_H264_INVALID;
3188 pic->TopFieldOrderCnt = 0;
3189 pic->BottomFieldOrderCnt = 0;
3193 vaapi_fill_picture(VAPictureH264 *pic, GstVaapiPictureH264 *picture,
3194 guint picture_structure)
3196 if (!picture_structure)
3197 picture_structure = picture->structure;
3199 pic->picture_id = picture->base.surface_id;
3202 if (GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(picture)) {
3203 pic->flags |= VA_PICTURE_H264_LONG_TERM_REFERENCE;
3204 pic->frame_idx = picture->long_term_frame_idx;
3207 if (GST_VAAPI_PICTURE_IS_SHORT_TERM_REFERENCE(picture))
3208 pic->flags |= VA_PICTURE_H264_SHORT_TERM_REFERENCE;
3209 pic->frame_idx = picture->frame_num;
3212 switch (picture_structure) {
3213 case GST_VAAPI_PICTURE_STRUCTURE_FRAME:
3214 pic->TopFieldOrderCnt = picture->field_poc[TOP_FIELD];
3215 pic->BottomFieldOrderCnt = picture->field_poc[BOTTOM_FIELD];
3217 case GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD:
3218 pic->flags |= VA_PICTURE_H264_TOP_FIELD;
3219 pic->TopFieldOrderCnt = picture->field_poc[TOP_FIELD];
3220 pic->BottomFieldOrderCnt = 0;
3222 case GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD:
3223 pic->flags |= VA_PICTURE_H264_BOTTOM_FIELD;
3224 pic->BottomFieldOrderCnt = picture->field_poc[BOTTOM_FIELD];
3225 pic->TopFieldOrderCnt = 0;
3231 vaapi_fill_picture_for_RefPicListX(VAPictureH264 *pic,
3232 GstVaapiPictureH264 *picture)
3234 vaapi_fill_picture(pic, picture, 0);
3236 /* H.8.4 - MVC inter prediction and inter-view prediction process */
3237 if (GST_VAAPI_PICTURE_IS_INTER_VIEW(picture)) {
3238 /* The inter-view reference components and inter-view only
3239 reference components that are included in the reference
3240 picture lists are considered as not being marked as "used for
3241 short-term reference" or "used for long-term reference" */
3242 pic->flags &= ~(VA_PICTURE_H264_SHORT_TERM_REFERENCE|
3243 VA_PICTURE_H264_LONG_TERM_REFERENCE);
3248 fill_picture(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
3250 GstVaapiDecoderH264Private * const priv = &decoder->priv;
3251 GstVaapiPicture * const base_picture = &picture->base;
3252 GstH264PPS * const pps = get_pps(decoder);
3253 GstH264SPS * const sps = get_sps(decoder);
3254 VAPictureParameterBufferH264 * const pic_param = base_picture->param;
3257 /* Fill in VAPictureParameterBufferH264 */
3258 vaapi_fill_picture(&pic_param->CurrPic, picture, 0);
3260 for (i = 0, n = 0; i < priv->dpb_count; i++) {
3261 GstVaapiFrameStore * const fs = priv->dpb[i];
3262 if ((gst_vaapi_frame_store_has_reference(fs) &&
3263 fs->view_id == picture->base.view_id) ||
3264 (gst_vaapi_frame_store_has_inter_view(fs) &&
3265 is_inter_view_reference_for_picture(decoder, fs->view_id, picture)))
3266 vaapi_fill_picture(&pic_param->ReferenceFrames[n++],
3267 fs->buffers[0], fs->structure);
3268 if (n >= G_N_ELEMENTS(pic_param->ReferenceFrames))
3271 for (; n < G_N_ELEMENTS(pic_param->ReferenceFrames); n++)
3272 vaapi_init_picture(&pic_param->ReferenceFrames[n]);
3274 #define COPY_FIELD(s, f) \
3275 pic_param->f = (s)->f
3277 #define COPY_BFM(a, s, f) \
3278 pic_param->a.bits.f = (s)->f
3280 pic_param->picture_width_in_mbs_minus1 = priv->mb_width - 1;
3281 pic_param->picture_height_in_mbs_minus1 = priv->mb_height - 1;
3282 pic_param->frame_num = priv->frame_num;
3284 COPY_FIELD(sps, bit_depth_luma_minus8);
3285 COPY_FIELD(sps, bit_depth_chroma_minus8);
3286 COPY_FIELD(sps, num_ref_frames);
3287 COPY_FIELD(pps, num_slice_groups_minus1);
3288 COPY_FIELD(pps, slice_group_map_type);
3289 COPY_FIELD(pps, slice_group_change_rate_minus1);
3290 COPY_FIELD(pps, pic_init_qp_minus26);
3291 COPY_FIELD(pps, pic_init_qs_minus26);
3292 COPY_FIELD(pps, chroma_qp_index_offset);
3293 COPY_FIELD(pps, second_chroma_qp_index_offset);
3295 pic_param->seq_fields.value = 0; /* reset all bits */
3296 pic_param->seq_fields.bits.residual_colour_transform_flag = sps->separate_colour_plane_flag;
3297 pic_param->seq_fields.bits.MinLumaBiPredSize8x8 = sps->level_idc >= 31; /* A.3.3.2 */
3299 COPY_BFM(seq_fields, sps, chroma_format_idc);
3300 COPY_BFM(seq_fields, sps, gaps_in_frame_num_value_allowed_flag);
3301 COPY_BFM(seq_fields, sps, frame_mbs_only_flag);
3302 COPY_BFM(seq_fields, sps, mb_adaptive_frame_field_flag);
3303 COPY_BFM(seq_fields, sps, direct_8x8_inference_flag);
3304 COPY_BFM(seq_fields, sps, log2_max_frame_num_minus4);
3305 COPY_BFM(seq_fields, sps, pic_order_cnt_type);
3306 COPY_BFM(seq_fields, sps, log2_max_pic_order_cnt_lsb_minus4);
3307 COPY_BFM(seq_fields, sps, delta_pic_order_always_zero_flag);
3309 pic_param->pic_fields.value = 0; /* reset all bits */
3310 pic_param->pic_fields.bits.field_pic_flag = GST_VAAPI_PICTURE_IS_INTERLACED(picture);
3311 pic_param->pic_fields.bits.reference_pic_flag = GST_VAAPI_PICTURE_IS_REFERENCE(picture);
3313 COPY_BFM(pic_fields, pps, entropy_coding_mode_flag);
3314 COPY_BFM(pic_fields, pps, weighted_pred_flag);
3315 COPY_BFM(pic_fields, pps, weighted_bipred_idc);
3316 COPY_BFM(pic_fields, pps, transform_8x8_mode_flag);
3317 COPY_BFM(pic_fields, pps, constrained_intra_pred_flag);
3318 COPY_BFM(pic_fields, pps, pic_order_present_flag);
3319 COPY_BFM(pic_fields, pps, deblocking_filter_control_present_flag);
3320 COPY_BFM(pic_fields, pps, redundant_pic_cnt_present_flag);
3324 /* Detection of the first VCL NAL unit of a primary coded picture (7.4.1.2.4) */
3326 is_new_picture(GstVaapiParserInfoH264 *pi, GstVaapiParserInfoH264 *prev_pi)
3328 GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
3329 GstH264PPS * const pps = slice_hdr->pps;
3330 GstH264SPS * const sps = pps->sequence;
3331 GstH264SliceHdr *prev_slice_hdr;
3335 prev_slice_hdr = &prev_pi->data.slice_hdr;
3337 #define CHECK_EXPR(expr, field_name) do { \
3339 GST_DEBUG(field_name " differs in value"); \
3344 #define CHECK_VALUE(new_slice_hdr, old_slice_hdr, field) \
3345 CHECK_EXPR(((new_slice_hdr)->field == (old_slice_hdr)->field), #field)
3347 /* view_id differs in value and VOIdx of current slice_hdr is less
3348 than the VOIdx of the prev_slice_hdr */
3349 CHECK_VALUE(pi, prev_pi, view_id);
3351 /* frame_num differs in value, regardless of inferred values to 0 */
3352 CHECK_VALUE(slice_hdr, prev_slice_hdr, frame_num);
3354 /* pic_parameter_set_id differs in value */
3355 CHECK_VALUE(slice_hdr, prev_slice_hdr, pps);
3357 /* field_pic_flag differs in value */
3358 CHECK_VALUE(slice_hdr, prev_slice_hdr, field_pic_flag);
3360 /* bottom_field_flag is present in both and differs in value */
3361 if (slice_hdr->field_pic_flag && prev_slice_hdr->field_pic_flag)
3362 CHECK_VALUE(slice_hdr, prev_slice_hdr, bottom_field_flag);
3364 /* nal_ref_idc differs in value with one of the nal_ref_idc values is 0 */
3365 CHECK_EXPR((pi->nalu.ref_idc != 0) ==
3366 (prev_pi->nalu.ref_idc != 0), "nal_ref_idc");
3368 /* POC type is 0 for both and either pic_order_cnt_lsb differs in
3369 value or delta_pic_order_cnt_bottom differs in value */
3370 if (sps->pic_order_cnt_type == 0) {
3371 CHECK_VALUE(slice_hdr, prev_slice_hdr, pic_order_cnt_lsb);
3372 if (pps->pic_order_present_flag && !slice_hdr->field_pic_flag)
3373 CHECK_VALUE(slice_hdr, prev_slice_hdr, delta_pic_order_cnt_bottom);
3376 /* POC type is 1 for both and either delta_pic_order_cnt[0]
3377 differs in value or delta_pic_order_cnt[1] differs in value */
3378 else if (sps->pic_order_cnt_type == 1) {
3379 CHECK_VALUE(slice_hdr, prev_slice_hdr, delta_pic_order_cnt[0]);
3380 CHECK_VALUE(slice_hdr, prev_slice_hdr, delta_pic_order_cnt[1]);
3383 /* IdrPicFlag differs in value */
3384 CHECK_VALUE(&pi->nalu, &prev_pi->nalu, idr_pic_flag);
3386 /* IdrPicFlag is equal to 1 for both and idr_pic_id differs in value */
3387 if (pi->nalu.idr_pic_flag)
3388 CHECK_VALUE(slice_hdr, prev_slice_hdr, idr_pic_id);
3395 /* Detection of a new access unit, assuming we are already in presence
3397 static inline gboolean
3398 is_new_access_unit(GstVaapiParserInfoH264 *pi, GstVaapiParserInfoH264 *prev_pi)
3400 if (!prev_pi || prev_pi->view_id == pi->view_id)
3402 return pi->voc < prev_pi->voc;
3405 /* Finds the first field picture corresponding to the supplied picture */
3406 static GstVaapiPictureH264 *
3407 find_first_field(GstVaapiDecoderH264 *decoder, GstVaapiParserInfoH264 *pi)
3409 GstVaapiDecoderH264Private * const priv = &decoder->priv;
3410 GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
3411 GstVaapiFrameStore *fs;
3413 if (!slice_hdr->field_pic_flag)
3416 fs = priv->prev_frames[pi->voc];
3417 if (!fs || gst_vaapi_frame_store_has_frame(fs))
3420 if (fs->buffers[0]->frame_num == slice_hdr->frame_num)
3421 return fs->buffers[0];
3425 static GstVaapiDecoderStatus
3426 decode_picture(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
3428 GstVaapiDecoderH264Private * const priv = &decoder->priv;
3429 GstVaapiParserInfoH264 * const pi = unit->parsed_info;
3430 GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
3431 GstH264PPS * const pps = ensure_pps(decoder, slice_hdr->pps);
3432 GstH264SPS * const sps = ensure_sps(decoder, slice_hdr->pps->sequence);
3433 GstVaapiPictureH264 *picture, *first_field;
3434 GstVaapiDecoderStatus status;
3436 g_return_val_if_fail(pps != NULL, GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN);
3437 g_return_val_if_fail(sps != NULL, GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN);
3439 /* Only decode base stream for MVC */
3440 switch (sps->profile_idc) {
3441 case GST_H264_PROFILE_MULTIVIEW_HIGH:
3442 case GST_H264_PROFILE_STEREO_HIGH:
3444 GST_DEBUG("drop picture from substream");
3445 return GST_VAAPI_DECODER_STATUS_DROP_FRAME;
3450 status = ensure_context(decoder, sps);
3451 if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
3454 priv->decoder_state = 0;
3456 first_field = find_first_field(decoder, pi);
3458 /* Re-use current picture where the first field was decoded */
3459 picture = gst_vaapi_picture_h264_new_field(first_field);
3461 GST_ERROR("failed to allocate field picture");
3462 return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
3466 /* Create new picture */
3467 picture = gst_vaapi_picture_h264_new(decoder);
3469 GST_ERROR("failed to allocate picture");
3470 return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
3473 gst_vaapi_picture_replace(&priv->current_picture, picture);
3474 gst_vaapi_picture_unref(picture);
3476 /* Clear inter-view references list if this is the primary coded
3477 picture of the current access unit */
3478 if (pi->flags & GST_VAAPI_DECODER_UNIT_FLAG_AU_START)
3479 g_ptr_array_set_size(priv->inter_views, 0);
3481 /* Update cropping rectangle */
3482 if (sps->frame_cropping_flag) {
3483 GstVaapiRectangle crop_rect;
3484 crop_rect.x = sps->crop_rect_x;
3485 crop_rect.y = sps->crop_rect_y;
3486 crop_rect.width = sps->crop_rect_width;
3487 crop_rect.height = sps->crop_rect_height;
3488 gst_vaapi_picture_set_crop_rect(&picture->base, &crop_rect);
3491 status = ensure_quant_matrix(decoder, picture);
3492 if (status != GST_VAAPI_DECODER_STATUS_SUCCESS) {
3493 GST_ERROR("failed to reset quantizer matrix");
3497 if (!init_picture(decoder, picture, pi))
3498 return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
3499 if (!fill_picture(decoder, picture))
3500 return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
3502 priv->decoder_state = pi->state;
3503 return GST_VAAPI_DECODER_STATUS_SUCCESS;
3507 get_slice_data_bit_offset(GstH264SliceHdr *slice_hdr, guint nal_header_bytes)
3511 epb_count = slice_hdr->n_emulation_prevention_bytes;
3512 return 8 * nal_header_bytes + slice_hdr->header_size - epb_count * 8;
3516 fill_pred_weight_table(GstVaapiDecoderH264 *decoder,
3517 GstVaapiSlice *slice, GstH264SliceHdr *slice_hdr)
3519 VASliceParameterBufferH264 * const slice_param = slice->param;
3520 GstH264PPS * const pps = get_pps(decoder);
3521 GstH264SPS * const sps = get_sps(decoder);
3522 GstH264PredWeightTable * const w = &slice_hdr->pred_weight_table;
3523 guint num_weight_tables = 0;
3526 if (pps->weighted_pred_flag &&
3527 (GST_H264_IS_P_SLICE(slice_hdr) || GST_H264_IS_SP_SLICE(slice_hdr)))
3528 num_weight_tables = 1;
3529 else if (pps->weighted_bipred_idc == 1 && GST_H264_IS_B_SLICE(slice_hdr))
3530 num_weight_tables = 2;
3532 num_weight_tables = 0;
3534 slice_param->luma_log2_weight_denom = 0;
3535 slice_param->chroma_log2_weight_denom = 0;
3536 slice_param->luma_weight_l0_flag = 0;
3537 slice_param->chroma_weight_l0_flag = 0;
3538 slice_param->luma_weight_l1_flag = 0;
3539 slice_param->chroma_weight_l1_flag = 0;
3541 if (num_weight_tables < 1)
3544 slice_param->luma_log2_weight_denom = w->luma_log2_weight_denom;
3545 slice_param->chroma_log2_weight_denom = w->chroma_log2_weight_denom;
3547 slice_param->luma_weight_l0_flag = 1;
3548 for (i = 0; i <= slice_param->num_ref_idx_l0_active_minus1; i++) {
3549 slice_param->luma_weight_l0[i] = w->luma_weight_l0[i];
3550 slice_param->luma_offset_l0[i] = w->luma_offset_l0[i];
3553 slice_param->chroma_weight_l0_flag = sps->chroma_array_type != 0;
3554 if (slice_param->chroma_weight_l0_flag) {
3555 for (i = 0; i <= slice_param->num_ref_idx_l0_active_minus1; i++) {
3556 for (j = 0; j < 2; j++) {
3557 slice_param->chroma_weight_l0[i][j] = w->chroma_weight_l0[i][j];
3558 slice_param->chroma_offset_l0[i][j] = w->chroma_offset_l0[i][j];
3563 if (num_weight_tables < 2)
3566 slice_param->luma_weight_l1_flag = 1;
3567 for (i = 0; i <= slice_param->num_ref_idx_l1_active_minus1; i++) {
3568 slice_param->luma_weight_l1[i] = w->luma_weight_l1[i];
3569 slice_param->luma_offset_l1[i] = w->luma_offset_l1[i];
3572 slice_param->chroma_weight_l1_flag = sps->chroma_array_type != 0;
3573 if (slice_param->chroma_weight_l1_flag) {
3574 for (i = 0; i <= slice_param->num_ref_idx_l1_active_minus1; i++) {
3575 for (j = 0; j < 2; j++) {
3576 slice_param->chroma_weight_l1[i][j] = w->chroma_weight_l1[i][j];
3577 slice_param->chroma_offset_l1[i][j] = w->chroma_offset_l1[i][j];
3585 fill_RefPicList(GstVaapiDecoderH264 *decoder,
3586 GstVaapiSlice *slice, GstH264SliceHdr *slice_hdr)
3588 GstVaapiDecoderH264Private * const priv = &decoder->priv;
3589 VASliceParameterBufferH264 * const slice_param = slice->param;
3590 guint i, num_ref_lists = 0;
3592 slice_param->num_ref_idx_l0_active_minus1 = 0;
3593 slice_param->num_ref_idx_l1_active_minus1 = 0;
3595 if (GST_H264_IS_B_SLICE(slice_hdr))
3597 else if (GST_H264_IS_I_SLICE(slice_hdr))
3602 if (num_ref_lists < 1)
3605 slice_param->num_ref_idx_l0_active_minus1 =
3606 slice_hdr->num_ref_idx_l0_active_minus1;
3608 for (i = 0; i < priv->RefPicList0_count && priv->RefPicList0[i]; i++)
3609 vaapi_fill_picture_for_RefPicListX(&slice_param->RefPicList0[i],
3610 priv->RefPicList0[i]);
3611 for (; i <= slice_param->num_ref_idx_l0_active_minus1; i++)
3612 vaapi_init_picture(&slice_param->RefPicList0[i]);
3614 if (num_ref_lists < 2)
3617 slice_param->num_ref_idx_l1_active_minus1 =
3618 slice_hdr->num_ref_idx_l1_active_minus1;
3620 for (i = 0; i < priv->RefPicList1_count && priv->RefPicList1[i]; i++)
3621 vaapi_fill_picture_for_RefPicListX(&slice_param->RefPicList1[i],
3622 priv->RefPicList1[i]);
3623 for (; i <= slice_param->num_ref_idx_l1_active_minus1; i++)
3624 vaapi_init_picture(&slice_param->RefPicList1[i]);
3629 fill_slice(GstVaapiDecoderH264 *decoder,
3630 GstVaapiSlice *slice, GstVaapiParserInfoH264 *pi)
3632 VASliceParameterBufferH264 * const slice_param = slice->param;
3633 GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
3635 /* Fill in VASliceParameterBufferH264 */
3636 slice_param->slice_data_bit_offset =
3637 get_slice_data_bit_offset(slice_hdr, pi->nalu.header_bytes);
3638 slice_param->first_mb_in_slice = slice_hdr->first_mb_in_slice;
3639 slice_param->slice_type = slice_hdr->type % 5;
3640 slice_param->direct_spatial_mv_pred_flag = slice_hdr->direct_spatial_mv_pred_flag;
3641 slice_param->cabac_init_idc = slice_hdr->cabac_init_idc;
3642 slice_param->slice_qp_delta = slice_hdr->slice_qp_delta;
3643 slice_param->disable_deblocking_filter_idc = slice_hdr->disable_deblocking_filter_idc;
3644 slice_param->slice_alpha_c0_offset_div2 = slice_hdr->slice_alpha_c0_offset_div2;
3645 slice_param->slice_beta_offset_div2 = slice_hdr->slice_beta_offset_div2;
3647 if (!fill_RefPicList(decoder, slice, slice_hdr))
3649 if (!fill_pred_weight_table(decoder, slice, slice_hdr))
3654 static GstVaapiDecoderStatus
3655 decode_slice(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
3657 GstVaapiDecoderH264Private * const priv = &decoder->priv;
3658 GstVaapiParserInfoH264 * const pi = unit->parsed_info;
3659 GstVaapiPictureH264 * const picture = priv->current_picture;
3660 GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
3661 GstVaapiSlice *slice;
3662 GstBuffer * const buffer =
3663 GST_VAAPI_DECODER_CODEC_FRAME(decoder)->input_buffer;
3664 GstMapInfo map_info;
3666 GST_DEBUG("slice (%u bytes)", pi->nalu.size);
3668 if (!is_valid_state(pi->state,
3669 GST_H264_VIDEO_STATE_VALID_PICTURE_HEADERS)) {
3670 GST_WARNING("failed to receive enough headers to decode slice");
3671 return GST_VAAPI_DECODER_STATUS_SUCCESS;
3674 if (!ensure_pps(decoder, slice_hdr->pps)) {
3675 GST_ERROR("failed to activate PPS");
3676 return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
3679 if (!ensure_sps(decoder, slice_hdr->pps->sequence)) {
3680 GST_ERROR("failed to activate SPS");
3681 return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
3684 if (!gst_buffer_map(buffer, &map_info, GST_MAP_READ)) {
3685 GST_ERROR("failed to map buffer");
3686 return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
3689 /* Check wether this is the first/last slice in the current access unit */
3690 if (pi->flags & GST_VAAPI_DECODER_UNIT_FLAG_AU_START)
3691 GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_AU_START);
3692 if (pi->flags & GST_VAAPI_DECODER_UNIT_FLAG_AU_END)
3693 GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_AU_END);
3695 slice = GST_VAAPI_SLICE_NEW(H264, decoder,
3696 (map_info.data + unit->offset + pi->nalu.offset), pi->nalu.size);
3697 gst_buffer_unmap(buffer, &map_info);
3699 GST_ERROR("failed to allocate slice");
3700 return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
3703 init_picture_refs(decoder, picture, slice_hdr);
3704 if (!fill_slice(decoder, slice, pi)) {
3705 gst_vaapi_mini_object_unref(GST_VAAPI_MINI_OBJECT(slice));
3706 return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
3709 gst_vaapi_picture_add_slice(GST_VAAPI_PICTURE_CAST(picture), slice);
3710 picture->last_slice_hdr = slice_hdr;
3711 priv->decoder_state |= GST_H264_VIDEO_STATE_GOT_SLICE;
3712 return GST_VAAPI_DECODER_STATUS_SUCCESS;
3716 scan_for_start_code(GstAdapter *adapter, guint ofs, guint size, guint32 *scp)
3718 return (gint)gst_adapter_masked_scan_uint32_peek(adapter,
3719 0xffffff00, 0x00000100,
3724 static GstVaapiDecoderStatus
3725 decode_unit(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
3727 GstVaapiDecoderH264Private * const priv = &decoder->priv;
3728 GstVaapiParserInfoH264 * const pi = unit->parsed_info;
3729 GstVaapiDecoderStatus status;
3731 priv->decoder_state |= pi->state;
3732 switch (pi->nalu.type) {
3733 case GST_H264_NAL_SPS:
3734 status = decode_sps(decoder, unit);
3736 case GST_H264_NAL_SUBSET_SPS:
3737 status = decode_subset_sps(decoder, unit);
3739 case GST_H264_NAL_PPS:
3740 status = decode_pps(decoder, unit);
3742 case GST_H264_NAL_SLICE_EXT:
3743 case GST_H264_NAL_SLICE_IDR:
3744 /* fall-through. IDR specifics are handled in init_picture() */
3745 case GST_H264_NAL_SLICE:
3746 status = decode_slice(decoder, unit);
3748 case GST_H264_NAL_SEQ_END:
3749 case GST_H264_NAL_STREAM_END:
3750 status = decode_sequence_end(decoder);
3752 case GST_H264_NAL_SEI:
3753 status = GST_VAAPI_DECODER_STATUS_SUCCESS;
3756 GST_WARNING("unsupported NAL unit type %d", pi->nalu.type);
3757 status = GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
3763 static GstVaapiDecoderStatus
3764 gst_vaapi_decoder_h264_decode_codec_data(GstVaapiDecoder *base_decoder,
3765 const guchar *buf, guint buf_size)
3767 GstVaapiDecoderH264 * const decoder =
3768 GST_VAAPI_DECODER_H264_CAST(base_decoder);
3769 GstVaapiDecoderH264Private * const priv = &decoder->priv;
3770 GstVaapiDecoderStatus status;
3771 GstVaapiDecoderUnit unit;
3772 GstVaapiParserInfoH264 *pi = NULL;
3773 GstH264ParserResult result;
3774 guint i, ofs, num_sps, num_pps;
3776 unit.parsed_info = NULL;
3779 return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
3782 GST_ERROR("failed to decode codec-data, not in avcC format");
3783 return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
3786 priv->nal_length_size = (buf[4] & 0x03) + 1;
3788 num_sps = buf[5] & 0x1f;
3791 for (i = 0; i < num_sps; i++) {
3792 pi = gst_vaapi_parser_info_h264_new();
3794 return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
3795 unit.parsed_info = pi;
3797 result = gst_h264_parser_identify_nalu_avc(
3799 buf, ofs, buf_size, 2,
3802 if (result != GST_H264_PARSER_OK) {
3803 status = get_status(result);
3807 status = parse_sps(decoder, &unit);
3808 if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
3810 ofs = pi->nalu.offset + pi->nalu.size;
3812 status = decode_sps(decoder, &unit);
3813 if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
3815 gst_vaapi_parser_info_h264_replace(&pi, NULL);
3821 for (i = 0; i < num_pps; i++) {
3822 pi = gst_vaapi_parser_info_h264_new();
3824 return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
3825 unit.parsed_info = pi;
3827 result = gst_h264_parser_identify_nalu_avc(
3829 buf, ofs, buf_size, 2,
3832 if (result != GST_H264_PARSER_OK) {
3833 status = get_status(result);
3837 status = parse_pps(decoder, &unit);
3838 if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
3840 ofs = pi->nalu.offset + pi->nalu.size;
3842 status = decode_pps(decoder, &unit);
3843 if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
3845 gst_vaapi_parser_info_h264_replace(&pi, NULL);
3848 priv->is_avcC = TRUE;
3849 status = GST_VAAPI_DECODER_STATUS_SUCCESS;
3852 gst_vaapi_parser_info_h264_replace(&pi, NULL);
3856 static GstVaapiDecoderStatus
3857 ensure_decoder(GstVaapiDecoderH264 *decoder)
3859 GstVaapiDecoderH264Private * const priv = &decoder->priv;
3860 GstVaapiDecoderStatus status;
3862 if (!priv->is_opened) {
3863 priv->is_opened = gst_vaapi_decoder_h264_open(decoder);
3864 if (!priv->is_opened)
3865 return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_CODEC;
3867 status = gst_vaapi_decoder_decode_codec_data(
3868 GST_VAAPI_DECODER_CAST(decoder));
3869 if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
3872 return GST_VAAPI_DECODER_STATUS_SUCCESS;
3875 static GstVaapiDecoderStatus
3876 gst_vaapi_decoder_h264_parse(GstVaapiDecoder *base_decoder,
3877 GstAdapter *adapter, gboolean at_eos, GstVaapiDecoderUnit *unit)
3879 GstVaapiDecoderH264 * const decoder =
3880 GST_VAAPI_DECODER_H264_CAST(base_decoder);
3881 GstVaapiDecoderH264Private * const priv = &decoder->priv;
3882 GstVaapiParserState * const ps = GST_VAAPI_PARSER_STATE(base_decoder);
3883 GstVaapiParserInfoH264 *pi;
3884 GstVaapiDecoderStatus status;
3885 GstH264ParserResult result;
3887 guint i, size, buf_size, nalu_size, flags;
3890 gboolean at_au_end = FALSE;
3892 status = ensure_decoder(decoder);
3893 if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
3896 switch (priv->stream_alignment) {
3897 case GST_VAAPI_STREAM_ALIGN_H264_NALU:
3898 case GST_VAAPI_STREAM_ALIGN_H264_AU:
3899 size = gst_adapter_available_fast(adapter);
3902 size = gst_adapter_available(adapter);
3906 if (priv->is_avcC) {
3907 if (size < priv->nal_length_size)
3908 return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
3910 buf = (guchar *)&start_code;
3911 g_assert(priv->nal_length_size <= sizeof(start_code));
3912 gst_adapter_copy(adapter, buf, 0, priv->nal_length_size);
3915 for (i = 0; i < priv->nal_length_size; i++)
3916 nalu_size = (nalu_size << 8) | buf[i];
3918 buf_size = priv->nal_length_size + nalu_size;
3919 if (size < buf_size)
3920 return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
3921 else if (priv->stream_alignment == GST_VAAPI_STREAM_ALIGN_H264_AU)
3922 at_au_end = (buf_size == size);
3926 return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
3928 if (priv->stream_alignment == GST_VAAPI_STREAM_ALIGN_H264_NALU)
3931 ofs = scan_for_start_code(adapter, 0, size, NULL);
3933 return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
3936 gst_adapter_flush(adapter, ofs);
3940 ofs2 = ps->input_offset2 - ofs - 4;
3944 ofs = G_UNLIKELY(size < ofs2 + 4) ? -1 :
3945 scan_for_start_code(adapter, ofs2, size - ofs2, NULL);
3947 // Assume the whole NAL unit is present if end-of-stream
3948 // or stream buffers aligned on access unit boundaries
3949 if (priv->stream_alignment == GST_VAAPI_STREAM_ALIGN_H264_AU)
3952 ps->input_offset2 = size;
3953 return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
3960 ps->input_offset2 = 0;
3962 buf = (guchar *)gst_adapter_map(adapter, buf_size);
3964 return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
3966 unit->size = buf_size;
3968 pi = gst_vaapi_parser_info_h264_new();
3970 return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
3972 gst_vaapi_decoder_unit_set_parsed_info(unit,
3973 pi, (GDestroyNotify)gst_vaapi_mini_object_unref);
3976 result = gst_h264_parser_identify_nalu_avc(priv->parser,
3977 buf, 0, buf_size, priv->nal_length_size, &pi->nalu);
3979 result = gst_h264_parser_identify_nalu_unchecked(priv->parser,
3980 buf, 0, buf_size, &pi->nalu);
3981 status = get_status(result);
3982 if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
3985 switch (pi->nalu.type) {
3986 case GST_H264_NAL_SPS:
3987 status = parse_sps(decoder, unit);
3989 case GST_H264_NAL_SUBSET_SPS:
3990 status = parse_subset_sps(decoder, unit);
3992 case GST_H264_NAL_PPS:
3993 status = parse_pps(decoder, unit);
3995 case GST_H264_NAL_SEI:
3996 status = parse_sei(decoder, unit);
3998 case GST_H264_NAL_SLICE_EXT:
3999 if (!GST_H264_IS_MVC_NALU(&pi->nalu)) {
4000 status = GST_VAAPI_DECODER_STATUS_SUCCESS;
4004 case GST_H264_NAL_SLICE_IDR:
4005 case GST_H264_NAL_SLICE:
4006 status = parse_slice(decoder, unit);
4009 status = GST_VAAPI_DECODER_STATUS_SUCCESS;
4012 if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
4017 flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_END |
4018 GST_VAAPI_DECODER_UNIT_FLAG_AU_END;
4020 switch (pi->nalu.type) {
4021 case GST_H264_NAL_AU_DELIMITER:
4022 flags |= GST_VAAPI_DECODER_UNIT_FLAG_AU_START;
4023 flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
4025 case GST_H264_NAL_FILLER_DATA:
4026 flags |= GST_VAAPI_DECODER_UNIT_FLAG_SKIP;
4028 case GST_H264_NAL_STREAM_END:
4029 flags |= GST_VAAPI_DECODER_UNIT_FLAG_STREAM_END;
4031 case GST_H264_NAL_SEQ_END:
4032 flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_END;
4033 flags |= GST_VAAPI_DECODER_UNIT_FLAG_AU_END;
4035 case GST_H264_NAL_SPS:
4036 case GST_H264_NAL_SUBSET_SPS:
4037 case GST_H264_NAL_PPS:
4038 case GST_H264_NAL_SEI:
4039 flags |= GST_VAAPI_DECODER_UNIT_FLAG_AU_START;
4040 flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
4042 case GST_H264_NAL_SLICE_EXT:
4043 if (!GST_H264_IS_MVC_NALU(&pi->nalu)) {
4044 flags |= GST_VAAPI_DECODER_UNIT_FLAG_SKIP;
4048 case GST_H264_NAL_SLICE_IDR:
4049 case GST_H264_NAL_SLICE:
4050 flags |= GST_VAAPI_DECODER_UNIT_FLAG_SLICE;
4051 if (priv->prev_pi &&
4052 (priv->prev_pi->flags & GST_VAAPI_DECODER_UNIT_FLAG_AU_END)) {
4053 flags |= GST_VAAPI_DECODER_UNIT_FLAG_AU_START |
4054 GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
4056 else if (is_new_picture(pi, priv->prev_slice_pi)) {
4057 flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
4058 if (is_new_access_unit(pi, priv->prev_slice_pi))
4059 flags |= GST_VAAPI_DECODER_UNIT_FLAG_AU_START;
4061 gst_vaapi_parser_info_h264_replace(&priv->prev_slice_pi, pi);
4063 case GST_H264_NAL_SPS_EXT:
4064 case GST_H264_NAL_SLICE_AUX:
4065 /* skip SPS extension and auxiliary slice for now */
4066 flags |= GST_VAAPI_DECODER_UNIT_FLAG_SKIP;
4068 case GST_H264_NAL_PREFIX_UNIT:
4069 /* skip Prefix NAL units for now */
4070 flags |= GST_VAAPI_DECODER_UNIT_FLAG_SKIP |
4071 GST_VAAPI_DECODER_UNIT_FLAG_AU_START |
4072 GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
4075 if (pi->nalu.type >= 14 && pi->nalu.type <= 18)
4076 flags |= GST_VAAPI_DECODER_UNIT_FLAG_AU_START |
4077 GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
4080 if ((flags & GST_VAAPI_DECODER_UNIT_FLAGS_AU) && priv->prev_slice_pi)
4081 priv->prev_slice_pi->flags |= GST_VAAPI_DECODER_UNIT_FLAG_AU_END;
4082 GST_VAAPI_DECODER_UNIT_FLAG_SET(unit, flags);
4084 pi->nalu.data = NULL;
4085 pi->state = priv->parser_state;
4087 gst_vaapi_parser_info_h264_replace(&priv->prev_pi, pi);
4088 return GST_VAAPI_DECODER_STATUS_SUCCESS;
4091 static GstVaapiDecoderStatus
4092 gst_vaapi_decoder_h264_decode(GstVaapiDecoder *base_decoder,
4093 GstVaapiDecoderUnit *unit)
4095 GstVaapiDecoderH264 * const decoder =
4096 GST_VAAPI_DECODER_H264_CAST(base_decoder);
4097 GstVaapiDecoderStatus status;
4099 status = ensure_decoder(decoder);
4100 if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
4102 return decode_unit(decoder, unit);
4105 static GstVaapiDecoderStatus
4106 gst_vaapi_decoder_h264_start_frame(GstVaapiDecoder *base_decoder,
4107 GstVaapiDecoderUnit *unit)
4109 GstVaapiDecoderH264 * const decoder =
4110 GST_VAAPI_DECODER_H264_CAST(base_decoder);
4112 return decode_picture(decoder, unit);
4115 static GstVaapiDecoderStatus
4116 gst_vaapi_decoder_h264_end_frame(GstVaapiDecoder *base_decoder)
4118 GstVaapiDecoderH264 * const decoder =
4119 GST_VAAPI_DECODER_H264_CAST(base_decoder);
4121 return decode_current_picture(decoder);
4124 static GstVaapiDecoderStatus
4125 gst_vaapi_decoder_h264_flush(GstVaapiDecoder *base_decoder)
4127 GstVaapiDecoderH264 * const decoder =
4128 GST_VAAPI_DECODER_H264_CAST(base_decoder);
4130 dpb_flush(decoder, NULL);
4131 return GST_VAAPI_DECODER_STATUS_SUCCESS;
4135 gst_vaapi_decoder_h264_class_init(GstVaapiDecoderH264Class *klass)
4137 GstVaapiMiniObjectClass * const object_class =
4138 GST_VAAPI_MINI_OBJECT_CLASS(klass);
4139 GstVaapiDecoderClass * const decoder_class = GST_VAAPI_DECODER_CLASS(klass);
4141 object_class->size = sizeof(GstVaapiDecoderH264);
4142 object_class->finalize = (GDestroyNotify)gst_vaapi_decoder_finalize;
4144 decoder_class->create = gst_vaapi_decoder_h264_create;
4145 decoder_class->destroy = gst_vaapi_decoder_h264_destroy;
4146 decoder_class->parse = gst_vaapi_decoder_h264_parse;
4147 decoder_class->decode = gst_vaapi_decoder_h264_decode;
4148 decoder_class->start_frame = gst_vaapi_decoder_h264_start_frame;
4149 decoder_class->end_frame = gst_vaapi_decoder_h264_end_frame;
4150 decoder_class->flush = gst_vaapi_decoder_h264_flush;
4152 decoder_class->decode_codec_data =
4153 gst_vaapi_decoder_h264_decode_codec_data;
4156 static inline const GstVaapiDecoderClass *
4157 gst_vaapi_decoder_h264_class(void)
4159 static GstVaapiDecoderH264Class g_class;
4160 static gsize g_class_init = FALSE;
4162 if (g_once_init_enter(&g_class_init)) {
4163 gst_vaapi_decoder_h264_class_init(&g_class);
4164 g_once_init_leave(&g_class_init, TRUE);
4166 return GST_VAAPI_DECODER_CLASS(&g_class);
4170 * gst_vaapi_decoder_h264_set_alignment:
4171 * @decoder: a #GstVaapiDecoderH264
4172 * @alignment: the #GstVaapiStreamAlignH264
4174 * Specifies how stream buffers are aligned / fed, i.e. the boundaries
4175 * of each buffer that is supplied to the decoder. This could be no
4176 * specific alignment, NAL unit boundaries, or access unit boundaries.
4179 gst_vaapi_decoder_h264_set_alignment(GstVaapiDecoderH264 *decoder,
4180 GstVaapiStreamAlignH264 alignment)
4182 g_return_if_fail(decoder != NULL);
4184 decoder->priv.stream_alignment = alignment;
4188 * gst_vaapi_decoder_h264_new:
4189 * @display: a #GstVaapiDisplay
4190 * @caps: a #GstCaps holding codec information
4192 * Creates a new #GstVaapiDecoder for MPEG-2 decoding. The @caps can
4193 * hold extra information like codec-data and pictured coded size.
4195 * Return value: the newly allocated #GstVaapiDecoder object
4198 gst_vaapi_decoder_h264_new(GstVaapiDisplay *display, GstCaps *caps)
4200 return gst_vaapi_decoder_new(gst_vaapi_decoder_h264_class(), display, caps);