2 * gstvaapidecoder_h264.c - H.264 decoder
4 * Copyright (C) 2011-2014 Intel Corporation
5 * Author: Gwenole Beauchesne <gwenole.beauchesne@intel.com>
7 * This library is free software; you can redistribute it and/or
8 * modify it under the terms of the GNU Lesser General Public License
9 * as published by the Free Software Foundation; either version 2.1
10 * of the License, or (at your option) any later version.
12 * This library is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 * Lesser General Public License for more details.
17 * You should have received a copy of the GNU Lesser General Public
18 * License along with this library; if not, write to the Free
19 * Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
20 * Boston, MA 02110-1301 USA
24 * SECTION:gstvaapidecoder_h264
25 * @short_description: H.264 decoder
30 #include <gst/base/gstadapter.h>
31 #include <gst/codecparsers/gsth264parser.h>
32 #include "gstvaapidecoder_h264.h"
33 #include "gstvaapidecoder_objects.h"
34 #include "gstvaapidecoder_priv.h"
35 #include "gstvaapidisplay_priv.h"
36 #include "gstvaapiobject_priv.h"
37 #include "gstvaapiutils_h264_priv.h"
40 #include "gstvaapidebug.h"
42 /* Defined to 1 if strict ordering of DPB is needed. Only useful for debug */
43 #define USE_STRICT_DPB_ORDERING 0
45 typedef struct _GstVaapiDecoderH264Private GstVaapiDecoderH264Private;
46 typedef struct _GstVaapiDecoderH264Class GstVaapiDecoderH264Class;
47 typedef struct _GstVaapiFrameStore GstVaapiFrameStore;
48 typedef struct _GstVaapiFrameStoreClass GstVaapiFrameStoreClass;
49 typedef struct _GstVaapiParserInfoH264 GstVaapiParserInfoH264;
50 typedef struct _GstVaapiPictureH264 GstVaapiPictureH264;
52 // Used for field_poc[]
54 #define BOTTOM_FIELD 1
56 /* ------------------------------------------------------------------------- */
57 /* --- H.264 Parser Info --- */
58 /* ------------------------------------------------------------------------- */
61 * Extended decoder unit flags:
63 * @GST_VAAPI_DECODER_UNIT_AU_START: marks the start of an access unit.
64 * @GST_VAAPI_DECODER_UNIT_AU_END: marks the end of an access unit.
67 /* This flag does not strictly follow the definitions (7.4.1.2.3)
68 for detecting the start of an access unit as we are only
69 interested in knowing if the current slice is the first one or
70 the last one in the current access unit */
71 GST_VAAPI_DECODER_UNIT_FLAG_AU_START = (
72 GST_VAAPI_DECODER_UNIT_FLAG_LAST << 0),
73 GST_VAAPI_DECODER_UNIT_FLAG_AU_END = (
74 GST_VAAPI_DECODER_UNIT_FLAG_LAST << 1),
76 GST_VAAPI_DECODER_UNIT_FLAGS_AU = (
77 GST_VAAPI_DECODER_UNIT_FLAG_AU_START |
78 GST_VAAPI_DECODER_UNIT_FLAG_AU_END),
81 #define GST_VAAPI_PARSER_INFO_H264(obj) \
82 ((GstVaapiParserInfoH264 *)(obj))
84 struct _GstVaapiParserInfoH264 {
85 GstVaapiMiniObject parent_instance;
91 GstH264SliceHdr slice_hdr;
94 guint flags; // Same as decoder unit flags (persistent)
95 guint view_id; // View ID of slice
96 guint voc; // View order index (VOIdx) of slice
100 gst_vaapi_parser_info_h264_finalize(GstVaapiParserInfoH264 *pi)
102 switch (pi->nalu.type) {
103 case GST_H264_NAL_SPS:
104 case GST_H264_NAL_SUBSET_SPS:
105 gst_h264_sps_clear(&pi->data.sps);
107 case GST_H264_NAL_SEI:
109 g_array_unref(pi->data.sei);
116 static inline const GstVaapiMiniObjectClass *
117 gst_vaapi_parser_info_h264_class(void)
119 static const GstVaapiMiniObjectClass GstVaapiParserInfoH264Class = {
120 .size = sizeof(GstVaapiParserInfoH264),
121 .finalize = (GDestroyNotify)gst_vaapi_parser_info_h264_finalize
123 return &GstVaapiParserInfoH264Class;
126 static inline GstVaapiParserInfoH264 *
127 gst_vaapi_parser_info_h264_new(void)
129 return (GstVaapiParserInfoH264 *)
130 gst_vaapi_mini_object_new(gst_vaapi_parser_info_h264_class());
133 #define gst_vaapi_parser_info_h264_ref(pi) \
134 gst_vaapi_mini_object_ref(GST_VAAPI_MINI_OBJECT(pi))
136 #define gst_vaapi_parser_info_h264_unref(pi) \
137 gst_vaapi_mini_object_unref(GST_VAAPI_MINI_OBJECT(pi))
139 #define gst_vaapi_parser_info_h264_replace(old_pi_ptr, new_pi) \
140 gst_vaapi_mini_object_replace((GstVaapiMiniObject **)(old_pi_ptr), \
141 (GstVaapiMiniObject *)(new_pi))
143 /* ------------------------------------------------------------------------- */
144 /* --- H.264 Pictures --- */
145 /* ------------------------------------------------------------------------- */
148 * Extended picture flags:
150 * @GST_VAAPI_PICTURE_FLAG_IDR: flag that specifies an IDR picture
151 * @GST_VAAPI_PICTURE_FLAG_INTER_VIEW: flag that indicates the picture
152 * may be used for inter-view prediction
153 * @GST_VAAPI_PICTURE_FLAG_ANCHOR: flag that specifies an anchor picture,
154 * i.e. a picture that is decoded with only inter-view prediction,
155 * and not inter prediction
156 * @GST_VAAPI_PICTURE_FLAG_AU_START: flag that marks the start of an
158 * @GST_VAAPI_PICTURE_FLAG_AU_END: flag that marks the end of an
160 * @GST_VAAPI_PICTURE_FLAG_SHORT_TERM_REFERENCE: flag that specifies
161 * "used for short-term reference"
162 * @GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE: flag that specifies
163 * "used for long-term reference"
164 * @GST_VAAPI_PICTURE_FLAGS_REFERENCE: mask covering any kind of
165 * reference picture (short-term reference or long-term reference)
168 GST_VAAPI_PICTURE_FLAG_IDR = (GST_VAAPI_PICTURE_FLAG_LAST << 0),
169 GST_VAAPI_PICTURE_FLAG_REFERENCE2 = (GST_VAAPI_PICTURE_FLAG_LAST << 1),
170 GST_VAAPI_PICTURE_FLAG_INTER_VIEW = (GST_VAAPI_PICTURE_FLAG_LAST << 2),
171 GST_VAAPI_PICTURE_FLAG_ANCHOR = (GST_VAAPI_PICTURE_FLAG_LAST << 3),
172 GST_VAAPI_PICTURE_FLAG_AU_START = (GST_VAAPI_PICTURE_FLAG_LAST << 4),
173 GST_VAAPI_PICTURE_FLAG_AU_END = (GST_VAAPI_PICTURE_FLAG_LAST << 5),
175 GST_VAAPI_PICTURE_FLAG_SHORT_TERM_REFERENCE = (
176 GST_VAAPI_PICTURE_FLAG_REFERENCE),
177 GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE = (
178 GST_VAAPI_PICTURE_FLAG_REFERENCE | GST_VAAPI_PICTURE_FLAG_REFERENCE2),
179 GST_VAAPI_PICTURE_FLAGS_REFERENCE = (
180 GST_VAAPI_PICTURE_FLAG_SHORT_TERM_REFERENCE |
181 GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE),
184 #define GST_VAAPI_PICTURE_IS_IDR(picture) \
185 (GST_VAAPI_PICTURE_FLAG_IS_SET(picture, GST_VAAPI_PICTURE_FLAG_IDR))
187 #define GST_VAAPI_PICTURE_IS_SHORT_TERM_REFERENCE(picture) \
188 ((GST_VAAPI_PICTURE_FLAGS(picture) & \
189 GST_VAAPI_PICTURE_FLAGS_REFERENCE) == \
190 GST_VAAPI_PICTURE_FLAG_SHORT_TERM_REFERENCE)
192 #define GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(picture) \
193 ((GST_VAAPI_PICTURE_FLAGS(picture) & \
194 GST_VAAPI_PICTURE_FLAGS_REFERENCE) == \
195 GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE)
197 #define GST_VAAPI_PICTURE_IS_INTER_VIEW(picture) \
198 (GST_VAAPI_PICTURE_FLAG_IS_SET(picture, GST_VAAPI_PICTURE_FLAG_INTER_VIEW))
200 #define GST_VAAPI_PICTURE_IS_ANCHOR(picture) \
201 (GST_VAAPI_PICTURE_FLAG_IS_SET(picture, GST_VAAPI_PICTURE_FLAG_ANCHOR))
203 #define GST_VAAPI_PICTURE_H264(picture) \
204 ((GstVaapiPictureH264 *)(picture))
206 struct _GstVaapiPictureH264 {
207 GstVaapiPicture base;
208 GstH264SliceHdr *last_slice_hdr;
211 gint32 frame_num; // Original frame_num from slice_header()
212 gint32 frame_num_wrap; // Temporary for ref pic marking: FrameNumWrap
213 gint32 long_term_frame_idx; // Temporary for ref pic marking: LongTermFrameIdx
214 gint32 pic_num; // Temporary for ref pic marking: PicNum
215 gint32 long_term_pic_num; // Temporary for ref pic marking: LongTermPicNum
216 GstVaapiPictureH264 *other_field; // Temporary for ref pic marking: other field in the same frame store
217 guint output_flag : 1;
218 guint output_needed : 1;
221 GST_VAAPI_CODEC_DEFINE_TYPE(GstVaapiPictureH264, gst_vaapi_picture_h264);
224 gst_vaapi_picture_h264_destroy(GstVaapiPictureH264 *picture)
226 gst_vaapi_picture_destroy(GST_VAAPI_PICTURE(picture));
230 gst_vaapi_picture_h264_create(
231 GstVaapiPictureH264 *picture,
232 const GstVaapiCodecObjectConstructorArgs *args
235 if (!gst_vaapi_picture_create(GST_VAAPI_PICTURE(picture), args))
238 picture->field_poc[0] = G_MAXINT32;
239 picture->field_poc[1] = G_MAXINT32;
240 picture->output_needed = FALSE;
244 static inline GstVaapiPictureH264 *
245 gst_vaapi_picture_h264_new(GstVaapiDecoderH264 *decoder)
247 return (GstVaapiPictureH264 *)gst_vaapi_codec_object_new(
248 &GstVaapiPictureH264Class,
249 GST_VAAPI_CODEC_BASE(decoder),
250 NULL, sizeof(VAPictureParameterBufferH264),
256 gst_vaapi_picture_h264_set_reference(
257 GstVaapiPictureH264 *picture,
258 guint reference_flags,
264 GST_VAAPI_PICTURE_FLAG_UNSET(picture, GST_VAAPI_PICTURE_FLAGS_REFERENCE);
265 GST_VAAPI_PICTURE_FLAG_SET(picture, reference_flags);
267 if (!other_field || !(picture = picture->other_field))
269 GST_VAAPI_PICTURE_FLAG_UNSET(picture, GST_VAAPI_PICTURE_FLAGS_REFERENCE);
270 GST_VAAPI_PICTURE_FLAG_SET(picture, reference_flags);
273 static inline GstVaapiPictureH264 *
274 gst_vaapi_picture_h264_new_field(GstVaapiPictureH264 *picture)
276 g_return_val_if_fail(picture, NULL);
278 return (GstVaapiPictureH264 *)gst_vaapi_picture_new_field(&picture->base);
281 /* ------------------------------------------------------------------------- */
282 /* --- Frame Buffers (DPB) --- */
283 /* ------------------------------------------------------------------------- */
285 struct _GstVaapiFrameStore {
287 GstVaapiMiniObject parent_instance;
291 GstVaapiPictureH264 *buffers[2];
297 gst_vaapi_frame_store_finalize(gpointer object)
299 GstVaapiFrameStore * const fs = object;
302 for (i = 0; i < fs->num_buffers; i++)
303 gst_vaapi_picture_replace(&fs->buffers[i], NULL);
306 static GstVaapiFrameStore *
307 gst_vaapi_frame_store_new(GstVaapiPictureH264 *picture)
309 GstVaapiFrameStore *fs;
311 static const GstVaapiMiniObjectClass GstVaapiFrameStoreClass = {
312 sizeof(GstVaapiFrameStore),
313 gst_vaapi_frame_store_finalize
316 fs = (GstVaapiFrameStore *)
317 gst_vaapi_mini_object_new(&GstVaapiFrameStoreClass);
321 fs->view_id = picture->base.view_id;
322 fs->structure = picture->structure;
323 fs->buffers[0] = gst_vaapi_picture_ref(picture);
324 fs->buffers[1] = NULL;
326 fs->output_needed = picture->output_needed;
331 gst_vaapi_frame_store_add(GstVaapiFrameStore *fs, GstVaapiPictureH264 *picture)
335 g_return_val_if_fail(fs->num_buffers == 1, FALSE);
336 g_return_val_if_fail(!GST_VAAPI_PICTURE_IS_FRAME(picture), FALSE);
337 g_return_val_if_fail(!GST_VAAPI_PICTURE_IS_FIRST_FIELD(picture), FALSE);
339 gst_vaapi_picture_replace(&fs->buffers[fs->num_buffers++], picture);
340 if (picture->output_flag) {
341 picture->output_needed = TRUE;
345 fs->structure = GST_VAAPI_PICTURE_STRUCTURE_FRAME;
347 field = picture->structure == GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD ?
348 TOP_FIELD : BOTTOM_FIELD;
349 g_return_val_if_fail(fs->buffers[0]->field_poc[field] == G_MAXINT32, FALSE);
350 fs->buffers[0]->field_poc[field] = picture->field_poc[field];
351 g_return_val_if_fail(picture->field_poc[!field] == G_MAXINT32, FALSE);
352 picture->field_poc[!field] = fs->buffers[0]->field_poc[!field];
357 gst_vaapi_frame_store_split_fields(GstVaapiFrameStore *fs)
359 GstVaapiPictureH264 * const first_field = fs->buffers[0];
360 GstVaapiPictureH264 *second_field;
362 g_return_val_if_fail(fs->num_buffers == 1, FALSE);
364 first_field->base.structure = GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD;
365 GST_VAAPI_PICTURE_FLAG_SET(first_field, GST_VAAPI_PICTURE_FLAG_INTERLACED);
367 second_field = gst_vaapi_picture_h264_new_field(first_field);
370 gst_vaapi_picture_replace(&fs->buffers[fs->num_buffers++], second_field);
371 gst_vaapi_picture_unref(second_field);
373 second_field->frame_num = first_field->frame_num;
374 second_field->field_poc[0] = first_field->field_poc[0];
375 second_field->field_poc[1] = first_field->field_poc[1];
376 second_field->output_flag = first_field->output_flag;
377 if (second_field->output_flag) {
378 second_field->output_needed = TRUE;
384 static inline gboolean
385 gst_vaapi_frame_store_has_frame(GstVaapiFrameStore *fs)
387 return fs->structure == GST_VAAPI_PICTURE_STRUCTURE_FRAME;
390 static inline gboolean
391 gst_vaapi_frame_store_has_reference(GstVaapiFrameStore *fs)
395 for (i = 0; i < fs->num_buffers; i++) {
396 if (GST_VAAPI_PICTURE_IS_REFERENCE(fs->buffers[i]))
403 gst_vaapi_frame_store_has_inter_view(GstVaapiFrameStore *fs)
407 for (i = 0; i < fs->num_buffers; i++) {
408 if (GST_VAAPI_PICTURE_IS_INTER_VIEW(fs->buffers[i]))
414 #define gst_vaapi_frame_store_ref(fs) \
415 gst_vaapi_mini_object_ref(GST_VAAPI_MINI_OBJECT(fs))
417 #define gst_vaapi_frame_store_unref(fs) \
418 gst_vaapi_mini_object_unref(GST_VAAPI_MINI_OBJECT(fs))
420 #define gst_vaapi_frame_store_replace(old_fs_p, new_fs) \
421 gst_vaapi_mini_object_replace((GstVaapiMiniObject **)(old_fs_p), \
422 (GstVaapiMiniObject *)(new_fs))
424 /* ------------------------------------------------------------------------- */
425 /* --- H.264 Decoder --- */
426 /* ------------------------------------------------------------------------- */
428 #define GST_VAAPI_DECODER_H264_CAST(decoder) \
429 ((GstVaapiDecoderH264 *)(decoder))
432 GST_H264_VIDEO_STATE_GOT_SPS = 1 << 0,
433 GST_H264_VIDEO_STATE_GOT_PPS = 1 << 1,
434 GST_H264_VIDEO_STATE_GOT_SLICE = 1 << 2,
436 GST_H264_VIDEO_STATE_VALID_PICTURE_HEADERS = (
437 GST_H264_VIDEO_STATE_GOT_SPS |
438 GST_H264_VIDEO_STATE_GOT_PPS),
439 GST_H264_VIDEO_STATE_VALID_PICTURE = (
440 GST_H264_VIDEO_STATE_VALID_PICTURE_HEADERS |
441 GST_H264_VIDEO_STATE_GOT_SLICE)
444 struct _GstVaapiDecoderH264Private {
445 GstH264NalParser *parser;
448 GstVaapiStreamAlignH264 stream_alignment;
449 GstVaapiPictureH264 *current_picture;
450 GstVaapiParserInfoH264 *sps[GST_H264_MAX_SPS_COUNT];
451 GstVaapiParserInfoH264 *active_sps;
452 GstVaapiParserInfoH264 *pps[GST_H264_MAX_PPS_COUNT];
453 GstVaapiParserInfoH264 *active_pps;
454 GstVaapiParserInfoH264 *prev_pi;
455 GstVaapiParserInfoH264 *prev_slice_pi;
456 GstVaapiFrameStore **prev_frames;
457 guint prev_frames_alloc;
458 GstVaapiFrameStore **dpb;
463 GstVaapiProfile profile;
464 GstVaapiEntrypoint entrypoint;
465 GstVaapiChromaType chroma_type;
466 GPtrArray *inter_views;
467 GstVaapiPictureH264 *short_ref[32];
468 guint short_ref_count;
469 GstVaapiPictureH264 *long_ref[32];
470 guint long_ref_count;
471 GstVaapiPictureH264 *RefPicList0[32];
472 guint RefPicList0_count;
473 GstVaapiPictureH264 *RefPicList1[32];
474 guint RefPicList1_count;
475 guint nal_length_size;
478 gint32 field_poc[2]; // 0:TopFieldOrderCnt / 1:BottomFieldOrderCnt
479 gint32 poc_msb; // PicOrderCntMsb
480 gint32 poc_lsb; // pic_order_cnt_lsb (from slice_header())
481 gint32 prev_poc_msb; // prevPicOrderCntMsb
482 gint32 prev_poc_lsb; // prevPicOrderCntLsb
483 gint32 frame_num_offset; // FrameNumOffset
484 gint32 frame_num; // frame_num (from slice_header())
485 gint32 prev_frame_num; // prevFrameNum
486 gboolean prev_pic_has_mmco5; // prevMmco5Pic
487 gboolean prev_pic_structure; // previous picture structure
490 guint has_context : 1;
491 guint progressive_sequence : 1;
495 * GstVaapiDecoderH264:
497 * A decoder based on H264.
499 struct _GstVaapiDecoderH264 {
501 GstVaapiDecoder parent_instance;
502 GstVaapiDecoderH264Private priv;
506 * GstVaapiDecoderH264Class:
508 * A decoder class based on H264.
510 struct _GstVaapiDecoderH264Class {
512 GstVaapiDecoderClass parent_class;
516 exec_ref_pic_marking(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture);
519 is_inter_view_reference_for_next_pictures(GstVaapiDecoderH264 *decoder,
520 GstVaapiPictureH264 *picture);
522 static inline gboolean
523 is_inter_view_reference_for_next_frames(GstVaapiDecoderH264 *decoder,
524 GstVaapiFrameStore *fs)
526 return is_inter_view_reference_for_next_pictures(decoder, fs->buffers[0]);
529 /* Determines if the supplied profile is one of the MVC set */
531 is_mvc_profile(GstH264Profile profile)
533 return profile == GST_H264_PROFILE_MULTIVIEW_HIGH ||
534 profile == GST_H264_PROFILE_STEREO_HIGH;
537 /* Determines the view_id from the supplied NAL unit */
539 get_view_id(GstH264NalUnit *nalu)
541 return GST_H264_IS_MVC_NALU(nalu) ? nalu->extension.mvc.view_id : 0;
544 /* Determines the view order index (VOIdx) from the supplied view_id */
546 get_view_order_index(GstH264SPS *sps, guint16 view_id)
548 GstH264SPSExtMVC *mvc;
551 if (!sps || sps->extension_type != GST_H264_NAL_EXTENSION_MVC)
554 mvc = &sps->extension.mvc;
555 for (i = 0; i <= mvc->num_views_minus1; i++) {
556 if (mvc->view[i].view_id == view_id)
559 GST_ERROR("failed to find VOIdx from view_id (%d)", view_id);
563 /* Determines NumViews */
565 get_num_views(GstH264SPS *sps)
567 return 1 + (sps->extension_type == GST_H264_NAL_EXTENSION_MVC ?
568 sps->extension.mvc.num_views_minus1 : 0);
571 /* Get number of reference frames to use */
573 get_max_dec_frame_buffering(GstH264SPS *sps)
575 guint num_views, max_dpb_frames;
576 guint max_dec_frame_buffering, PicSizeMbs;
577 GstVaapiLevelH264 level;
578 const GstVaapiH264LevelLimits *level_limits;
580 /* Table A-1 - Level limits */
581 if (G_UNLIKELY(sps->level_idc == 11 && sps->constraint_set3_flag))
582 level = GST_VAAPI_LEVEL_H264_L1b;
584 level = gst_vaapi_utils_h264_get_level(sps->level_idc);
585 level_limits = gst_vaapi_utils_h264_get_level_limits(level);
586 if (G_UNLIKELY(!level_limits)) {
587 GST_FIXME("unsupported level_idc value (%d)", sps->level_idc);
588 max_dec_frame_buffering = 16;
591 PicSizeMbs = ((sps->pic_width_in_mbs_minus1 + 1) *
592 (sps->pic_height_in_map_units_minus1 + 1) *
593 (sps->frame_mbs_only_flag ? 1 : 2));
594 max_dec_frame_buffering = level_limits->MaxDpbMbs / PicSizeMbs;
596 if (is_mvc_profile(sps->profile_idc))
597 max_dec_frame_buffering <<= 1;
600 if (sps->vui_parameters_present_flag) {
601 GstH264VUIParams * const vui_params = &sps->vui_parameters;
602 if (vui_params->bitstream_restriction_flag)
603 max_dec_frame_buffering = vui_params->max_dec_frame_buffering;
605 switch (sps->profile_idc) {
606 case 44: // CAVLC 4:4:4 Intra profile
607 case GST_H264_PROFILE_SCALABLE_HIGH:
608 case GST_H264_PROFILE_HIGH:
609 case GST_H264_PROFILE_HIGH10:
610 case GST_H264_PROFILE_HIGH_422:
611 case GST_H264_PROFILE_HIGH_444:
612 if (sps->constraint_set3_flag)
613 max_dec_frame_buffering = 0;
619 num_views = get_num_views(sps);
620 max_dpb_frames = 16 * (num_views > 1 ? g_bit_storage(num_views - 1) : 1);
621 if (max_dec_frame_buffering > max_dpb_frames)
622 max_dec_frame_buffering = max_dpb_frames;
623 else if (max_dec_frame_buffering < sps->num_ref_frames)
624 max_dec_frame_buffering = sps->num_ref_frames;
625 return MAX(1, max_dec_frame_buffering);
629 array_remove_index_fast(void *array, guint *array_length_ptr, guint index)
631 gpointer * const entries = array;
632 guint num_entries = *array_length_ptr;
634 g_return_if_fail(index < num_entries);
636 if (index != --num_entries)
637 entries[index] = entries[num_entries];
638 entries[num_entries] = NULL;
639 *array_length_ptr = num_entries;
644 array_remove_index(void *array, guint *array_length_ptr, guint index)
646 array_remove_index_fast(array, array_length_ptr, index);
650 array_remove_index(void *array, guint *array_length_ptr, guint index)
652 gpointer * const entries = array;
653 const guint num_entries = *array_length_ptr - 1;
656 g_return_if_fail(index <= num_entries);
658 for (i = index; i < num_entries; i++)
659 entries[i] = entries[i + 1];
660 entries[num_entries] = NULL;
661 *array_length_ptr = num_entries;
665 #define ARRAY_REMOVE_INDEX(array, index) \
666 array_remove_index(array, &array##_count, index)
669 dpb_remove_index(GstVaapiDecoderH264 *decoder, guint index)
671 GstVaapiDecoderH264Private * const priv = &decoder->priv;
672 guint i, num_frames = --priv->dpb_count;
674 if (USE_STRICT_DPB_ORDERING) {
675 for (i = index; i < num_frames; i++)
676 gst_vaapi_frame_store_replace(&priv->dpb[i], priv->dpb[i + 1]);
678 else if (index != num_frames)
679 gst_vaapi_frame_store_replace(&priv->dpb[index], priv->dpb[num_frames]);
680 gst_vaapi_frame_store_replace(&priv->dpb[num_frames], NULL);
685 GstVaapiDecoderH264 *decoder,
686 GstVaapiFrameStore *fs,
687 GstVaapiPictureH264 *picture
690 picture->output_needed = FALSE;
693 if (--fs->output_needed > 0)
695 picture = fs->buffers[0];
697 return gst_vaapi_picture_output(GST_VAAPI_PICTURE_CAST(picture));
701 dpb_evict(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture, guint i)
703 GstVaapiDecoderH264Private * const priv = &decoder->priv;
704 GstVaapiFrameStore * const fs = priv->dpb[i];
706 if (!fs->output_needed && !gst_vaapi_frame_store_has_reference(fs))
707 dpb_remove_index(decoder, i);
710 /* Finds the frame store holding the supplied picture */
712 dpb_find_picture(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
714 GstVaapiDecoderH264Private * const priv = &decoder->priv;
717 for (i = 0; i < priv->dpb_count; i++) {
718 GstVaapiFrameStore * const fs = priv->dpb[i];
719 for (j = 0; j < fs->num_buffers; j++) {
720 if (fs->buffers[j] == picture)
727 /* Finds the picture with the lowest POC that needs to be output */
729 dpb_find_lowest_poc(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture,
730 GstVaapiPictureH264 **found_picture_ptr)
732 GstVaapiDecoderH264Private * const priv = &decoder->priv;
733 GstVaapiPictureH264 *found_picture = NULL;
734 guint i, j, found_index;
736 for (i = 0; i < priv->dpb_count; i++) {
737 GstVaapiFrameStore * const fs = priv->dpb[i];
738 if (!fs->output_needed)
740 if (picture && picture->base.view_id != fs->view_id)
742 for (j = 0; j < fs->num_buffers; j++) {
743 GstVaapiPictureH264 * const pic = fs->buffers[j];
744 if (!pic->output_needed)
746 if (!found_picture || found_picture->base.poc > pic->base.poc ||
747 (found_picture->base.poc == pic->base.poc &&
748 found_picture->base.voc > pic->base.voc))
749 found_picture = pic, found_index = i;
753 if (found_picture_ptr)
754 *found_picture_ptr = found_picture;
755 return found_picture ? found_index : -1;
758 /* Finds the picture with the lowest VOC that needs to be output */
760 dpb_find_lowest_voc(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture,
761 GstVaapiPictureH264 **found_picture_ptr)
763 GstVaapiDecoderH264Private * const priv = &decoder->priv;
764 GstVaapiPictureH264 *found_picture = NULL;
765 guint i, j, found_index;
767 for (i = 0; i < priv->dpb_count; i++) {
768 GstVaapiFrameStore * const fs = priv->dpb[i];
769 if (!fs->output_needed || fs->view_id == picture->base.view_id)
771 for (j = 0; j < fs->num_buffers; j++) {
772 GstVaapiPictureH264 * const pic = fs->buffers[j];
773 if (!pic->output_needed || pic->base.poc != picture->base.poc)
775 if (!found_picture || found_picture->base.voc > pic->base.voc)
776 found_picture = pic, found_index = i;
780 if (found_picture_ptr)
781 *found_picture_ptr = found_picture;
782 return found_picture ? found_index : -1;
786 dpb_output_other_views(GstVaapiDecoderH264 *decoder,
787 GstVaapiPictureH264 *picture, guint voc)
789 GstVaapiDecoderH264Private * const priv = &decoder->priv;
790 GstVaapiPictureH264 *found_picture;
794 if (priv->max_views == 1)
797 /* Emit all other view components that were in the same access
798 unit than the picture we have just found */
799 found_picture = picture;
801 found_index = dpb_find_lowest_voc(decoder, found_picture,
803 if (found_index < 0 || found_picture->base.voc >= voc)
805 success = dpb_output(decoder, priv->dpb[found_index], found_picture);
806 dpb_evict(decoder, found_picture, found_index);
814 dpb_bump(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
816 GstVaapiDecoderH264Private * const priv = &decoder->priv;
817 GstVaapiPictureH264 *found_picture;
821 found_index = dpb_find_lowest_poc(decoder, picture, &found_picture);
825 if (picture && picture->base.poc != found_picture->base.poc)
826 dpb_output_other_views(decoder, found_picture, found_picture->base.voc);
828 success = dpb_output(decoder, priv->dpb[found_index], found_picture);
829 dpb_evict(decoder, found_picture, found_index);
830 if (priv->max_views == 1)
833 if (picture && picture->base.poc != found_picture->base.poc)
834 dpb_output_other_views(decoder, found_picture, G_MAXUINT32);
839 dpb_clear(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
841 GstVaapiDecoderH264Private * const priv = &decoder->priv;
844 for (i = 0; i < priv->dpb_count; i++) {
845 if (picture && picture->base.view_id != priv->dpb[i]->view_id)
847 gst_vaapi_frame_store_replace(&priv->dpb[i], NULL);
850 /* Compact the resulting DPB, i.e. remove holes */
851 for (i = 0, n = 0; i < priv->dpb_count; i++) {
854 priv->dpb[n] = priv->dpb[i];
862 /* Clear previous frame buffers only if this is a "flush-all" operation,
863 or if the picture is the first one in the access unit */
864 if (priv->prev_frames && (!picture ||
865 GST_VAAPI_PICTURE_FLAG_IS_SET(picture,
866 GST_VAAPI_PICTURE_FLAG_AU_START))) {
867 for (i = 0; i < priv->max_views; i++)
868 gst_vaapi_frame_store_replace(&priv->prev_frames[i], NULL);
873 dpb_flush(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
875 while (dpb_bump(decoder, picture))
877 dpb_clear(decoder, picture);
881 dpb_prune_mvc(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
883 GstVaapiDecoderH264Private * const priv = &decoder->priv;
884 const gboolean is_last_picture = /* in the access unit */
885 GST_VAAPI_PICTURE_FLAG_IS_SET(picture, GST_VAAPI_PICTURE_FLAG_AU_END);
888 // Remove all unused inter-view only reference components of the current AU
890 while (i < priv->dpb_count) {
891 GstVaapiFrameStore * const fs = priv->dpb[i];
892 if (fs->view_id != picture->base.view_id &&
893 !fs->output_needed && !gst_vaapi_frame_store_has_reference(fs) &&
895 !is_inter_view_reference_for_next_frames(decoder, fs)))
896 dpb_remove_index(decoder, i);
903 dpb_add(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
905 GstVaapiDecoderH264Private * const priv = &decoder->priv;
906 GstVaapiFrameStore *fs;
909 if (priv->max_views > 1)
910 dpb_prune_mvc(decoder, picture);
912 // Remove all unused pictures
913 if (!GST_VAAPI_PICTURE_IS_IDR(picture)) {
915 while (i < priv->dpb_count) {
916 GstVaapiFrameStore * const fs = priv->dpb[i];
917 if (fs->view_id == picture->base.view_id &&
918 !fs->output_needed && !gst_vaapi_frame_store_has_reference(fs))
919 dpb_remove_index(decoder, i);
925 // Check if picture is the second field and the first field is still in DPB
926 if (GST_VAAPI_PICTURE_IS_INTERLACED(picture) &&
927 !GST_VAAPI_PICTURE_IS_FIRST_FIELD(picture)) {
928 const gint found_index = dpb_find_picture(decoder,
929 GST_VAAPI_PICTURE_H264(picture->base.parent_picture));
930 if (found_index >= 0)
931 return gst_vaapi_frame_store_add(priv->dpb[found_index], picture);
934 // Create new frame store, and split fields if necessary
935 fs = gst_vaapi_frame_store_new(picture);
938 gst_vaapi_frame_store_replace(&priv->prev_frames[picture->base.voc], fs);
939 gst_vaapi_frame_store_unref(fs);
941 if (!priv->progressive_sequence && gst_vaapi_frame_store_has_frame(fs)) {
942 if (!gst_vaapi_frame_store_split_fields(fs))
946 // C.4.5.1 - Storage and marking of a reference decoded picture into the DPB
947 if (GST_VAAPI_PICTURE_IS_REFERENCE(picture)) {
948 while (priv->dpb_count == priv->dpb_size) {
949 if (!dpb_bump(decoder, picture))
954 // C.4.5.2 - Storage and marking of a non-reference decoded picture into the DPB
956 const gboolean StoreInterViewOnlyRefFlag =
957 !GST_VAAPI_PICTURE_FLAG_IS_SET(picture,
958 GST_VAAPI_PICTURE_FLAG_AU_END) &&
959 GST_VAAPI_PICTURE_FLAG_IS_SET(picture,
960 GST_VAAPI_PICTURE_FLAG_INTER_VIEW);
961 if (!picture->output_flag && !StoreInterViewOnlyRefFlag)
963 while (priv->dpb_count == priv->dpb_size) {
964 GstVaapiPictureH264 *found_picture;
965 if (!StoreInterViewOnlyRefFlag) {
966 if (dpb_find_lowest_poc(decoder, picture, &found_picture) < 0 ||
967 found_picture->base.poc > picture->base.poc)
968 return dpb_output(decoder, NULL, picture);
970 if (!dpb_bump(decoder, picture))
975 gst_vaapi_frame_store_replace(&priv->dpb[priv->dpb_count++], fs);
976 if (picture->output_flag) {
977 picture->output_needed = TRUE;
984 dpb_reset(GstVaapiDecoderH264 *decoder, guint dpb_size)
986 GstVaapiDecoderH264Private * const priv = &decoder->priv;
988 if (dpb_size < priv->dpb_count)
991 if (dpb_size > priv->dpb_size_max) {
992 priv->dpb = g_try_realloc_n(priv->dpb, dpb_size, sizeof(*priv->dpb));
995 memset(&priv->dpb[priv->dpb_size_max], 0,
996 (dpb_size - priv->dpb_size_max) * sizeof(*priv->dpb));
997 priv->dpb_size_max = dpb_size;
1000 if (priv->dpb_size < dpb_size)
1001 priv->dpb_size = dpb_size;
1002 else if (dpb_size < priv->dpb_count)
1005 GST_DEBUG("DPB size %u", priv->dpb_size);
1010 unref_inter_view(GstVaapiPictureH264 *picture)
1014 GST_VAAPI_PICTURE_FLAG_UNSET(picture, GST_VAAPI_PICTURE_FLAG_INTER_VIEW);
1015 gst_vaapi_picture_unref(picture);
1018 /* Resets MVC resources */
1020 mvc_reset(GstVaapiDecoderH264 *decoder)
1022 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1025 // Resize array of inter-view references
1026 if (!priv->inter_views) {
1027 priv->inter_views = g_ptr_array_new_full(priv->max_views,
1028 (GDestroyNotify)unref_inter_view);
1029 if (!priv->inter_views)
1033 // Resize array of previous frame buffers
1034 for (i = priv->max_views; i < priv->prev_frames_alloc; i++)
1035 gst_vaapi_frame_store_replace(&priv->prev_frames[i], NULL);
1037 priv->prev_frames = g_try_realloc_n(priv->prev_frames, priv->max_views,
1038 sizeof(*priv->prev_frames));
1039 if (!priv->prev_frames) {
1040 priv->prev_frames_alloc = 0;
1043 for (i = priv->prev_frames_alloc; i < priv->max_views; i++)
1044 priv->prev_frames[i] = NULL;
1045 priv->prev_frames_alloc = priv->max_views;
1049 static GstVaapiDecoderStatus
1050 get_status(GstH264ParserResult result)
1052 GstVaapiDecoderStatus status;
1055 case GST_H264_PARSER_OK:
1056 status = GST_VAAPI_DECODER_STATUS_SUCCESS;
1058 case GST_H264_PARSER_NO_NAL_END:
1059 status = GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
1061 case GST_H264_PARSER_ERROR:
1062 status = GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
1065 status = GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
1072 gst_vaapi_decoder_h264_close(GstVaapiDecoderH264 *decoder)
1074 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1076 gst_vaapi_picture_replace(&priv->current_picture, NULL);
1077 gst_vaapi_parser_info_h264_replace(&priv->prev_slice_pi, NULL);
1078 gst_vaapi_parser_info_h264_replace(&priv->prev_pi, NULL);
1080 dpb_clear(decoder, NULL);
1082 if (priv->inter_views) {
1083 g_ptr_array_unref(priv->inter_views);
1084 priv->inter_views = NULL;
1088 gst_h264_nal_parser_free(priv->parser);
1089 priv->parser = NULL;
1094 gst_vaapi_decoder_h264_open(GstVaapiDecoderH264 *decoder)
1096 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1098 gst_vaapi_decoder_h264_close(decoder);
1100 priv->parser = gst_h264_nal_parser_new();
1107 gst_vaapi_decoder_h264_destroy(GstVaapiDecoder *base_decoder)
1109 GstVaapiDecoderH264 * const decoder =
1110 GST_VAAPI_DECODER_H264_CAST(base_decoder);
1111 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1114 gst_vaapi_decoder_h264_close(decoder);
1120 g_free(priv->prev_frames);
1121 priv->prev_frames = NULL;
1122 priv->prev_frames_alloc = 0;
1124 for (i = 0; i < G_N_ELEMENTS(priv->pps); i++)
1125 gst_vaapi_parser_info_h264_replace(&priv->pps[i], NULL);
1126 gst_vaapi_parser_info_h264_replace(&priv->active_pps, NULL);
1128 for (i = 0; i < G_N_ELEMENTS(priv->sps); i++)
1129 gst_vaapi_parser_info_h264_replace(&priv->sps[i], NULL);
1130 gst_vaapi_parser_info_h264_replace(&priv->active_sps, NULL);
1134 gst_vaapi_decoder_h264_create(GstVaapiDecoder *base_decoder)
1136 GstVaapiDecoderH264 * const decoder =
1137 GST_VAAPI_DECODER_H264_CAST(base_decoder);
1138 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1140 priv->profile = GST_VAAPI_PROFILE_UNKNOWN;
1141 priv->entrypoint = GST_VAAPI_ENTRYPOINT_VLD;
1142 priv->chroma_type = GST_VAAPI_CHROMA_TYPE_YUV420;
1143 priv->prev_pic_structure = GST_VAAPI_PICTURE_STRUCTURE_FRAME;
1144 priv->progressive_sequence = TRUE;
1148 /* Activates the supplied PPS */
1150 ensure_pps(GstVaapiDecoderH264 *decoder, GstH264PPS *pps)
1152 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1153 GstVaapiParserInfoH264 * const pi = priv->pps[pps->id];
1155 gst_vaapi_parser_info_h264_replace(&priv->active_pps, pi);
1156 return pi ? &pi->data.pps : NULL;
1159 /* Returns the active PPS */
1160 static inline GstH264PPS *
1161 get_pps(GstVaapiDecoderH264 *decoder)
1163 GstVaapiParserInfoH264 * const pi = decoder->priv.active_pps;
1165 return pi ? &pi->data.pps : NULL;
1168 /* Activate the supplied SPS */
1170 ensure_sps(GstVaapiDecoderH264 *decoder, GstH264SPS *sps)
1172 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1173 GstVaapiParserInfoH264 * const pi = priv->sps[sps->id];
1175 gst_vaapi_parser_info_h264_replace(&priv->active_sps, pi);
1176 return pi ? &pi->data.sps : NULL;
1179 /* Returns the active SPS */
1180 static inline GstH264SPS *
1181 get_sps(GstVaapiDecoderH264 *decoder)
1183 GstVaapiParserInfoH264 * const pi = decoder->priv.active_sps;
1185 return pi ? &pi->data.sps : NULL;
1189 fill_profiles(GstVaapiProfile profiles[16], guint *n_profiles_ptr,
1190 GstVaapiProfile profile)
1192 guint n_profiles = *n_profiles_ptr;
1194 profiles[n_profiles++] = profile;
1196 case GST_VAAPI_PROFILE_H264_MAIN:
1197 profiles[n_profiles++] = GST_VAAPI_PROFILE_H264_HIGH;
1202 *n_profiles_ptr = n_profiles;
1205 /* Fills in compatible profiles for MVC decoding */
1207 fill_profiles_mvc(GstVaapiDecoderH264 *decoder, GstVaapiProfile profiles[16],
1208 guint *n_profiles_ptr, guint dpb_size)
1210 const gchar * const vendor_string =
1211 gst_vaapi_display_get_vendor_string(GST_VAAPI_DECODER_DISPLAY(decoder));
1213 gboolean add_high_profile = FALSE;
1218 const struct map *m;
1220 // Drivers that support slice level decoding
1221 if (vendor_string && dpb_size <= 16) {
1222 static const struct map drv_names[] = {
1223 { "Intel i965 driver", 17 },
1226 for (m = drv_names; m->str != NULL && !add_high_profile; m++) {
1227 if (g_ascii_strncasecmp(vendor_string, m->str, m->str_len) == 0)
1228 add_high_profile = TRUE;
1232 if (add_high_profile)
1233 fill_profiles(profiles, n_profiles_ptr, GST_VAAPI_PROFILE_H264_HIGH);
1236 static GstVaapiProfile
1237 get_profile(GstVaapiDecoderH264 *decoder, GstH264SPS *sps, guint dpb_size)
1239 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1240 GstVaapiDisplay * const display = GST_VAAPI_DECODER_DISPLAY(decoder);
1241 GstVaapiProfile profile, profiles[4];
1242 guint i, n_profiles = 0;
1244 profile = gst_vaapi_utils_h264_get_profile(sps->profile_idc);
1246 return GST_VAAPI_PROFILE_UNKNOWN;
1248 fill_profiles(profiles, &n_profiles, profile);
1250 case GST_VAAPI_PROFILE_H264_BASELINE:
1251 if (sps->constraint_set1_flag) { // A.2.2 (main profile)
1252 fill_profiles(profiles, &n_profiles,
1253 GST_VAAPI_PROFILE_H264_CONSTRAINED_BASELINE);
1254 fill_profiles(profiles, &n_profiles,
1255 GST_VAAPI_PROFILE_H264_MAIN);
1258 case GST_VAAPI_PROFILE_H264_EXTENDED:
1259 if (sps->constraint_set1_flag) { // A.2.2 (main profile)
1260 fill_profiles(profiles, &n_profiles,
1261 GST_VAAPI_PROFILE_H264_MAIN);
1264 case GST_VAAPI_PROFILE_H264_MULTIVIEW_HIGH:
1265 if (priv->max_views == 2) {
1266 fill_profiles(profiles, &n_profiles,
1267 GST_VAAPI_PROFILE_H264_STEREO_HIGH);
1269 fill_profiles_mvc(decoder, profiles, &n_profiles, dpb_size);
1271 case GST_VAAPI_PROFILE_H264_STEREO_HIGH:
1272 if (sps->frame_mbs_only_flag) {
1273 fill_profiles(profiles, &n_profiles,
1274 GST_VAAPI_PROFILE_H264_MULTIVIEW_HIGH);
1276 fill_profiles_mvc(decoder, profiles, &n_profiles, dpb_size);
1282 /* If the preferred profile (profiles[0]) matches one that we already
1283 found, then just return it now instead of searching for it again */
1284 if (profiles[0] == priv->profile)
1285 return priv->profile;
1287 for (i = 0; i < n_profiles; i++) {
1288 if (gst_vaapi_display_has_decoder(display, profiles[i], priv->entrypoint))
1291 return GST_VAAPI_PROFILE_UNKNOWN;
1294 static GstVaapiDecoderStatus
1295 ensure_context(GstVaapiDecoderH264 *decoder, GstH264SPS *sps)
1297 GstVaapiDecoder * const base_decoder = GST_VAAPI_DECODER_CAST(decoder);
1298 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1299 GstVaapiContextInfo info;
1300 GstVaapiProfile profile;
1301 GstVaapiChromaType chroma_type;
1302 gboolean reset_context = FALSE;
1303 guint mb_width, mb_height, dpb_size;
1305 dpb_size = get_max_dec_frame_buffering(sps);
1306 if (priv->dpb_size < dpb_size) {
1307 GST_DEBUG("DPB size increased");
1308 reset_context = TRUE;
1311 profile = get_profile(decoder, sps, dpb_size);
1313 GST_ERROR("unsupported profile_idc %u", sps->profile_idc);
1314 return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_PROFILE;
1317 if (!priv->profile || (priv->profile != profile && priv->max_views == 1)) {
1318 GST_DEBUG("profile changed");
1319 reset_context = TRUE;
1320 priv->profile = profile;
1323 chroma_type = gst_vaapi_utils_h264_get_chroma_type(sps->chroma_format_idc);
1325 GST_ERROR("unsupported chroma_format_idc %u", sps->chroma_format_idc);
1326 return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_CHROMA_FORMAT;
1329 if (priv->chroma_type != chroma_type) {
1330 GST_DEBUG("chroma format changed");
1331 reset_context = TRUE;
1332 priv->chroma_type = chroma_type;
1335 mb_width = sps->pic_width_in_mbs_minus1 + 1;
1336 mb_height = (sps->pic_height_in_map_units_minus1 + 1) <<
1337 !sps->frame_mbs_only_flag;
1338 if (priv->mb_width != mb_width || priv->mb_height != mb_height) {
1339 GST_DEBUG("size changed");
1340 reset_context = TRUE;
1341 priv->mb_width = mb_width;
1342 priv->mb_height = mb_height;
1345 priv->progressive_sequence = sps->frame_mbs_only_flag;
1346 gst_vaapi_decoder_set_interlaced(base_decoder, !priv->progressive_sequence);
1348 gst_vaapi_decoder_set_pixel_aspect_ratio(
1350 sps->vui_parameters.par_n,
1351 sps->vui_parameters.par_d
1354 if (!reset_context && priv->has_context)
1355 return GST_VAAPI_DECODER_STATUS_SUCCESS;
1357 /* XXX: fix surface size when cropping is implemented */
1358 info.profile = priv->profile;
1359 info.entrypoint = priv->entrypoint;
1360 info.chroma_type = priv->chroma_type;
1361 info.width = sps->width;
1362 info.height = sps->height;
1363 info.ref_frames = dpb_size;
1365 if (!gst_vaapi_decoder_ensure_context(GST_VAAPI_DECODER(decoder), &info))
1366 return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
1367 priv->has_context = TRUE;
1370 if (!dpb_reset(decoder, dpb_size))
1371 return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
1373 /* Reset MVC data */
1374 if (!mvc_reset(decoder))
1375 return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
1376 return GST_VAAPI_DECODER_STATUS_SUCCESS;
1380 fill_iq_matrix_4x4(VAIQMatrixBufferH264 *iq_matrix, const GstH264PPS *pps,
1381 const GstH264SPS *sps)
1385 /* There are always 6 4x4 scaling lists */
1386 g_assert(G_N_ELEMENTS(iq_matrix->ScalingList4x4) == 6);
1387 g_assert(G_N_ELEMENTS(iq_matrix->ScalingList4x4[0]) == 16);
1389 for (i = 0; i < G_N_ELEMENTS(iq_matrix->ScalingList4x4); i++)
1390 gst_h264_quant_matrix_4x4_get_raster_from_zigzag(
1391 iq_matrix->ScalingList4x4[i], pps->scaling_lists_4x4[i]);
1395 fill_iq_matrix_8x8(VAIQMatrixBufferH264 *iq_matrix, const GstH264PPS *pps,
1396 const GstH264SPS *sps)
1400 /* If chroma_format_idc != 3, there are up to 2 8x8 scaling lists */
1401 if (!pps->transform_8x8_mode_flag)
1404 g_assert(G_N_ELEMENTS(iq_matrix->ScalingList8x8) >= 2);
1405 g_assert(G_N_ELEMENTS(iq_matrix->ScalingList8x8[0]) == 64);
1407 n = (sps->chroma_format_idc != 3) ? 2 : 6;
1408 for (i = 0; i < n; i++) {
1409 gst_h264_quant_matrix_8x8_get_raster_from_zigzag(
1410 iq_matrix->ScalingList8x8[i], pps->scaling_lists_8x8[i]);
1414 static GstVaapiDecoderStatus
1415 ensure_quant_matrix(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
1417 GstVaapiPicture * const base_picture = &picture->base;
1418 GstH264PPS * const pps = get_pps(decoder);
1419 GstH264SPS * const sps = get_sps(decoder);
1420 VAIQMatrixBufferH264 *iq_matrix;
1422 base_picture->iq_matrix = GST_VAAPI_IQ_MATRIX_NEW(H264, decoder);
1423 if (!base_picture->iq_matrix) {
1424 GST_ERROR("failed to allocate IQ matrix");
1425 return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
1427 iq_matrix = base_picture->iq_matrix->param;
1429 /* XXX: we can only support 4:2:0 or 4:2:2 since ScalingLists8x8[]
1430 is not large enough to hold lists for 4:4:4 */
1431 if (sps->chroma_format_idc == 3)
1432 return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_CHROMA_FORMAT;
1434 fill_iq_matrix_4x4(iq_matrix, pps, sps);
1435 fill_iq_matrix_8x8(iq_matrix, pps, sps);
1437 return GST_VAAPI_DECODER_STATUS_SUCCESS;
1440 static inline gboolean
1441 is_valid_state(guint state, guint ref_state)
1443 return (state & ref_state) == ref_state;
1446 static GstVaapiDecoderStatus
1447 decode_current_picture(GstVaapiDecoderH264 *decoder)
1449 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1450 GstVaapiPictureH264 * const picture = priv->current_picture;
1452 if (!is_valid_state(priv->decoder_state, GST_H264_VIDEO_STATE_VALID_PICTURE))
1454 priv->decoder_state = 0;
1457 return GST_VAAPI_DECODER_STATUS_SUCCESS;
1459 if (!exec_ref_pic_marking(decoder, picture))
1461 if (!dpb_add(decoder, picture))
1463 if (!gst_vaapi_picture_decode(GST_VAAPI_PICTURE_CAST(picture)))
1465 gst_vaapi_picture_replace(&priv->current_picture, NULL);
1466 return GST_VAAPI_DECODER_STATUS_SUCCESS;
1469 /* XXX: fix for cases where first field failed to be decoded */
1470 gst_vaapi_picture_replace(&priv->current_picture, NULL);
1471 return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
1474 priv->decoder_state = 0;
1475 return GST_VAAPI_DECODER_STATUS_DROP_FRAME;
1478 static GstVaapiDecoderStatus
1479 parse_sps(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
1481 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1482 GstVaapiParserInfoH264 * const pi = unit->parsed_info;
1483 GstH264SPS * const sps = &pi->data.sps;
1484 GstH264ParserResult result;
1486 GST_DEBUG("parse SPS");
1488 priv->parser_state = 0;
1490 /* Variables that don't have inferred values per the H.264
1491 standard but that should get a default value anyway */
1492 sps->log2_max_pic_order_cnt_lsb_minus4 = 0;
1494 result = gst_h264_parser_parse_sps(priv->parser, &pi->nalu, sps, TRUE);
1495 if (result != GST_H264_PARSER_OK)
1496 return get_status(result);
1498 /* Reset defaults */
1499 priv->max_views = 1;
1501 priv->parser_state |= GST_H264_VIDEO_STATE_GOT_SPS;
1502 return GST_VAAPI_DECODER_STATUS_SUCCESS;
1505 static GstVaapiDecoderStatus
1506 parse_subset_sps(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
1508 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1509 GstVaapiParserInfoH264 * const pi = unit->parsed_info;
1510 GstH264SPS * const sps = &pi->data.sps;
1511 GstH264ParserResult result;
1513 GST_DEBUG("parse subset SPS");
1515 /* Variables that don't have inferred values per the H.264
1516 standard but that should get a default value anyway */
1517 sps->log2_max_pic_order_cnt_lsb_minus4 = 0;
1519 result = gst_h264_parser_parse_subset_sps(priv->parser, &pi->nalu, sps,
1521 if (result != GST_H264_PARSER_OK)
1522 return get_status(result);
1524 priv->parser_state |= GST_H264_VIDEO_STATE_GOT_SPS;
1525 return GST_VAAPI_DECODER_STATUS_SUCCESS;
1528 static GstVaapiDecoderStatus
1529 parse_pps(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
1531 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1532 GstVaapiParserInfoH264 * const pi = unit->parsed_info;
1533 GstH264PPS * const pps = &pi->data.pps;
1534 GstH264ParserResult result;
1536 GST_DEBUG("parse PPS");
1538 priv->parser_state &= GST_H264_VIDEO_STATE_GOT_SPS;
1540 /* Variables that don't have inferred values per the H.264
1541 standard but that should get a default value anyway */
1542 pps->slice_group_map_type = 0;
1543 pps->slice_group_change_rate_minus1 = 0;
1545 result = gst_h264_parser_parse_pps(priv->parser, &pi->nalu, pps);
1546 if (result != GST_H264_PARSER_OK)
1547 return get_status(result);
1549 priv->parser_state |= GST_H264_VIDEO_STATE_GOT_PPS;
1550 return GST_VAAPI_DECODER_STATUS_SUCCESS;
1553 static GstVaapiDecoderStatus
1554 parse_sei(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
1556 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1557 GstVaapiParserInfoH264 * const pi = unit->parsed_info;
1558 GArray ** const sei_ptr = &pi->data.sei;
1559 GstH264ParserResult result;
1561 GST_DEBUG("parse SEI");
1563 result = gst_h264_parser_parse_sei(priv->parser, &pi->nalu, sei_ptr);
1564 if (result != GST_H264_PARSER_OK) {
1565 GST_WARNING("failed to parse SEI messages");
1566 return get_status(result);
1568 return GST_VAAPI_DECODER_STATUS_SUCCESS;
1571 static GstVaapiDecoderStatus
1572 parse_slice(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
1574 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1575 GstVaapiParserInfoH264 * const pi = unit->parsed_info;
1576 GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
1577 GstH264NalUnit * const nalu = &pi->nalu;
1579 GstH264ParserResult result;
1582 GST_DEBUG("parse slice");
1584 priv->parser_state &= (GST_H264_VIDEO_STATE_GOT_SPS|
1585 GST_H264_VIDEO_STATE_GOT_PPS);
1587 /* Propagate Prefix NAL unit info, if necessary */
1588 switch (nalu->type) {
1589 case GST_H264_NAL_SLICE:
1590 case GST_H264_NAL_SLICE_IDR: {
1591 GstVaapiParserInfoH264 * const prev_pi = priv->prev_pi;
1592 if (prev_pi && prev_pi->nalu.type == GST_H264_NAL_PREFIX_UNIT) {
1593 /* MVC sequences shall have a Prefix NAL unit immediately
1594 preceding this NAL unit */
1595 pi->nalu.extension_type = prev_pi->nalu.extension_type;
1596 pi->nalu.extension = prev_pi->nalu.extension;
1599 /* In the very unlikely case there is no Prefix NAL unit
1600 immediately preceding this NAL unit, try to infer some
1601 defaults (H.7.4.1.1) */
1602 GstH264NalUnitExtensionMVC * const mvc = &pi->nalu.extension.mvc;
1603 mvc->non_idr_flag = !(nalu->type == GST_H264_NAL_SLICE_IDR);
1604 nalu->idr_pic_flag = !mvc->non_idr_flag;
1605 mvc->priority_id = 0;
1607 mvc->temporal_id = 0;
1608 mvc->anchor_pic_flag = 0;
1609 mvc->inter_view_flag = 1;
1615 /* Variables that don't have inferred values per the H.264
1616 standard but that should get a default value anyway */
1617 slice_hdr->cabac_init_idc = 0;
1618 slice_hdr->direct_spatial_mv_pred_flag = 0;
1620 result = gst_h264_parser_parse_slice_hdr(priv->parser, &pi->nalu,
1621 slice_hdr, TRUE, TRUE);
1622 if (result != GST_H264_PARSER_OK)
1623 return get_status(result);
1625 sps = slice_hdr->pps->sequence;
1627 /* Update MVC data */
1628 num_views = get_num_views(sps);
1629 if (priv->max_views < num_views) {
1630 priv->max_views = num_views;
1631 GST_DEBUG("maximum number of views changed to %u", num_views);
1633 pi->view_id = get_view_id(&pi->nalu);
1634 pi->voc = get_view_order_index(sps, pi->view_id);
1636 priv->parser_state |= GST_H264_VIDEO_STATE_GOT_SLICE;
1637 return GST_VAAPI_DECODER_STATUS_SUCCESS;
1640 static GstVaapiDecoderStatus
1641 decode_sps(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
1643 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1644 GstVaapiParserInfoH264 * const pi = unit->parsed_info;
1645 GstH264SPS * const sps = &pi->data.sps;
1647 GST_DEBUG("decode SPS");
1649 gst_vaapi_parser_info_h264_replace(&priv->sps[sps->id], pi);
1650 return GST_VAAPI_DECODER_STATUS_SUCCESS;
1653 static GstVaapiDecoderStatus
1654 decode_subset_sps(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
1656 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1657 GstVaapiParserInfoH264 * const pi = unit->parsed_info;
1658 GstH264SPS * const sps = &pi->data.sps;
1660 GST_DEBUG("decode subset SPS");
1662 gst_vaapi_parser_info_h264_replace(&priv->sps[sps->id], pi);
1663 return GST_VAAPI_DECODER_STATUS_SUCCESS;
1666 static GstVaapiDecoderStatus
1667 decode_pps(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
1669 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1670 GstVaapiParserInfoH264 * const pi = unit->parsed_info;
1671 GstH264PPS * const pps = &pi->data.pps;
1673 GST_DEBUG("decode PPS");
1675 gst_vaapi_parser_info_h264_replace(&priv->pps[pps->id], pi);
1676 return GST_VAAPI_DECODER_STATUS_SUCCESS;
1679 static GstVaapiDecoderStatus
1680 decode_sequence_end(GstVaapiDecoderH264 *decoder)
1682 GstVaapiDecoderStatus status;
1684 GST_DEBUG("decode sequence-end");
1686 status = decode_current_picture(decoder);
1687 if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
1690 dpb_flush(decoder, NULL);
1691 return GST_VAAPI_DECODER_STATUS_SUCCESS;
1694 /* 8.2.1.1 - Decoding process for picture order count type 0 */
1697 GstVaapiDecoderH264 *decoder,
1698 GstVaapiPictureH264 *picture,
1699 GstH264SliceHdr *slice_hdr
1702 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1703 GstH264SPS * const sps = get_sps(decoder);
1704 const gint32 MaxPicOrderCntLsb = 1 << (sps->log2_max_pic_order_cnt_lsb_minus4 + 4);
1707 GST_DEBUG("decode picture order count type 0");
1709 if (GST_VAAPI_PICTURE_IS_IDR(picture)) {
1710 priv->prev_poc_msb = 0;
1711 priv->prev_poc_lsb = 0;
1713 else if (priv->prev_pic_has_mmco5) {
1714 priv->prev_poc_msb = 0;
1715 priv->prev_poc_lsb =
1716 (priv->prev_pic_structure == GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD ?
1717 0 : priv->field_poc[TOP_FIELD]);
1720 priv->prev_poc_msb = priv->poc_msb;
1721 priv->prev_poc_lsb = priv->poc_lsb;
1725 priv->poc_lsb = slice_hdr->pic_order_cnt_lsb;
1726 if (priv->poc_lsb < priv->prev_poc_lsb &&
1727 (priv->prev_poc_lsb - priv->poc_lsb) >= (MaxPicOrderCntLsb / 2))
1728 priv->poc_msb = priv->prev_poc_msb + MaxPicOrderCntLsb;
1729 else if (priv->poc_lsb > priv->prev_poc_lsb &&
1730 (priv->poc_lsb - priv->prev_poc_lsb) > (MaxPicOrderCntLsb / 2))
1731 priv->poc_msb = priv->prev_poc_msb - MaxPicOrderCntLsb;
1733 priv->poc_msb = priv->prev_poc_msb;
1735 temp_poc = priv->poc_msb + priv->poc_lsb;
1736 switch (picture->structure) {
1737 case GST_VAAPI_PICTURE_STRUCTURE_FRAME:
1739 priv->field_poc[TOP_FIELD] = temp_poc;
1740 priv->field_poc[BOTTOM_FIELD] = temp_poc +
1741 slice_hdr->delta_pic_order_cnt_bottom;
1743 case GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD:
1745 priv->field_poc[TOP_FIELD] = temp_poc;
1747 case GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD:
1749 priv->field_poc[BOTTOM_FIELD] = temp_poc;
1754 /* 8.2.1.2 - Decoding process for picture order count type 1 */
1757 GstVaapiDecoderH264 *decoder,
1758 GstVaapiPictureH264 *picture,
1759 GstH264SliceHdr *slice_hdr
1762 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1763 GstH264SPS * const sps = get_sps(decoder);
1764 const gint32 MaxFrameNum = 1 << (sps->log2_max_frame_num_minus4 + 4);
1765 gint32 prev_frame_num_offset, abs_frame_num, expected_poc;
1768 GST_DEBUG("decode picture order count type 1");
1770 if (priv->prev_pic_has_mmco5)
1771 prev_frame_num_offset = 0;
1773 prev_frame_num_offset = priv->frame_num_offset;
1776 if (GST_VAAPI_PICTURE_IS_IDR(picture))
1777 priv->frame_num_offset = 0;
1778 else if (priv->prev_frame_num > priv->frame_num)
1779 priv->frame_num_offset = prev_frame_num_offset + MaxFrameNum;
1781 priv->frame_num_offset = prev_frame_num_offset;
1784 if (sps->num_ref_frames_in_pic_order_cnt_cycle != 0)
1785 abs_frame_num = priv->frame_num_offset + priv->frame_num;
1788 if (!GST_VAAPI_PICTURE_IS_REFERENCE(picture) && abs_frame_num > 0)
1789 abs_frame_num = abs_frame_num - 1;
1791 if (abs_frame_num > 0) {
1792 gint32 expected_delta_per_poc_cycle;
1793 gint32 poc_cycle_cnt, frame_num_in_poc_cycle;
1795 expected_delta_per_poc_cycle = 0;
1796 for (i = 0; i < sps->num_ref_frames_in_pic_order_cnt_cycle; i++)
1797 expected_delta_per_poc_cycle += sps->offset_for_ref_frame[i];
1800 poc_cycle_cnt = (abs_frame_num - 1) /
1801 sps->num_ref_frames_in_pic_order_cnt_cycle;
1802 frame_num_in_poc_cycle = (abs_frame_num - 1) %
1803 sps->num_ref_frames_in_pic_order_cnt_cycle;
1806 expected_poc = poc_cycle_cnt * expected_delta_per_poc_cycle;
1807 for (i = 0; i <= frame_num_in_poc_cycle; i++)
1808 expected_poc += sps->offset_for_ref_frame[i];
1812 if (!GST_VAAPI_PICTURE_IS_REFERENCE(picture))
1813 expected_poc += sps->offset_for_non_ref_pic;
1816 switch (picture->structure) {
1817 case GST_VAAPI_PICTURE_STRUCTURE_FRAME:
1818 priv->field_poc[TOP_FIELD] = expected_poc +
1819 slice_hdr->delta_pic_order_cnt[0];
1820 priv->field_poc[BOTTOM_FIELD] = priv->field_poc[TOP_FIELD] +
1821 sps->offset_for_top_to_bottom_field +
1822 slice_hdr->delta_pic_order_cnt[1];
1824 case GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD:
1825 priv->field_poc[TOP_FIELD] = expected_poc +
1826 slice_hdr->delta_pic_order_cnt[0];
1828 case GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD:
1829 priv->field_poc[BOTTOM_FIELD] = expected_poc +
1830 sps->offset_for_top_to_bottom_field +
1831 slice_hdr->delta_pic_order_cnt[0];
1836 /* 8.2.1.3 - Decoding process for picture order count type 2 */
1839 GstVaapiDecoderH264 *decoder,
1840 GstVaapiPictureH264 *picture,
1841 GstH264SliceHdr *slice_hdr
1844 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1845 GstH264SPS * const sps = get_sps(decoder);
1846 const gint32 MaxFrameNum = 1 << (sps->log2_max_frame_num_minus4 + 4);
1847 gint32 prev_frame_num_offset, temp_poc;
1849 GST_DEBUG("decode picture order count type 2");
1851 if (priv->prev_pic_has_mmco5)
1852 prev_frame_num_offset = 0;
1854 prev_frame_num_offset = priv->frame_num_offset;
1857 if (GST_VAAPI_PICTURE_IS_IDR(picture))
1858 priv->frame_num_offset = 0;
1859 else if (priv->prev_frame_num > priv->frame_num)
1860 priv->frame_num_offset = prev_frame_num_offset + MaxFrameNum;
1862 priv->frame_num_offset = prev_frame_num_offset;
1865 if (GST_VAAPI_PICTURE_IS_IDR(picture))
1867 else if (!GST_VAAPI_PICTURE_IS_REFERENCE(picture))
1868 temp_poc = 2 * (priv->frame_num_offset + priv->frame_num) - 1;
1870 temp_poc = 2 * (priv->frame_num_offset + priv->frame_num);
1873 if (picture->structure != GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD)
1874 priv->field_poc[TOP_FIELD] = temp_poc;
1875 if (picture->structure != GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD)
1876 priv->field_poc[BOTTOM_FIELD] = temp_poc;
1879 /* 8.2.1 - Decoding process for picture order count */
1882 GstVaapiDecoderH264 *decoder,
1883 GstVaapiPictureH264 *picture,
1884 GstH264SliceHdr *slice_hdr
1887 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1888 GstH264SPS * const sps = get_sps(decoder);
1890 switch (sps->pic_order_cnt_type) {
1892 init_picture_poc_0(decoder, picture, slice_hdr);
1895 init_picture_poc_1(decoder, picture, slice_hdr);
1898 init_picture_poc_2(decoder, picture, slice_hdr);
1902 if (picture->structure != GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD)
1903 picture->field_poc[TOP_FIELD] = priv->field_poc[TOP_FIELD];
1904 if (picture->structure != GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD)
1905 picture->field_poc[BOTTOM_FIELD] = priv->field_poc[BOTTOM_FIELD];
1906 picture->base.poc = MIN(picture->field_poc[0], picture->field_poc[1]);
1910 compare_picture_pic_num_dec(const void *a, const void *b)
1912 const GstVaapiPictureH264 * const picA = *(GstVaapiPictureH264 **)a;
1913 const GstVaapiPictureH264 * const picB = *(GstVaapiPictureH264 **)b;
1915 return picB->pic_num - picA->pic_num;
1919 compare_picture_long_term_pic_num_inc(const void *a, const void *b)
1921 const GstVaapiPictureH264 * const picA = *(GstVaapiPictureH264 **)a;
1922 const GstVaapiPictureH264 * const picB = *(GstVaapiPictureH264 **)b;
1924 return picA->long_term_pic_num - picB->long_term_pic_num;
1928 compare_picture_poc_dec(const void *a, const void *b)
1930 const GstVaapiPictureH264 * const picA = *(GstVaapiPictureH264 **)a;
1931 const GstVaapiPictureH264 * const picB = *(GstVaapiPictureH264 **)b;
1933 return picB->base.poc - picA->base.poc;
1937 compare_picture_poc_inc(const void *a, const void *b)
1939 const GstVaapiPictureH264 * const picA = *(GstVaapiPictureH264 **)a;
1940 const GstVaapiPictureH264 * const picB = *(GstVaapiPictureH264 **)b;
1942 return picA->base.poc - picB->base.poc;
1946 compare_picture_frame_num_wrap_dec(const void *a, const void *b)
1948 const GstVaapiPictureH264 * const picA = *(GstVaapiPictureH264 **)a;
1949 const GstVaapiPictureH264 * const picB = *(GstVaapiPictureH264 **)b;
1951 return picB->frame_num_wrap - picA->frame_num_wrap;
1955 compare_picture_long_term_frame_idx_inc(const void *a, const void *b)
1957 const GstVaapiPictureH264 * const picA = *(GstVaapiPictureH264 **)a;
1958 const GstVaapiPictureH264 * const picB = *(GstVaapiPictureH264 **)b;
1960 return picA->long_term_frame_idx - picB->long_term_frame_idx;
1963 /* 8.2.4.1 - Decoding process for picture numbers */
1965 init_picture_refs_pic_num(
1966 GstVaapiDecoderH264 *decoder,
1967 GstVaapiPictureH264 *picture,
1968 GstH264SliceHdr *slice_hdr
1971 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1972 GstH264SPS * const sps = get_sps(decoder);
1973 const gint32 MaxFrameNum = 1 << (sps->log2_max_frame_num_minus4 + 4);
1976 GST_DEBUG("decode picture numbers");
1978 for (i = 0; i < priv->short_ref_count; i++) {
1979 GstVaapiPictureH264 * const pic = priv->short_ref[i];
1982 if (pic->base.view_id != picture->base.view_id)
1986 if (pic->frame_num > priv->frame_num)
1987 pic->frame_num_wrap = pic->frame_num - MaxFrameNum;
1989 pic->frame_num_wrap = pic->frame_num;
1991 // (8-28, 8-30, 8-31)
1992 if (GST_VAAPI_PICTURE_IS_FRAME(picture))
1993 pic->pic_num = pic->frame_num_wrap;
1995 if (pic->structure == picture->structure)
1996 pic->pic_num = 2 * pic->frame_num_wrap + 1;
1998 pic->pic_num = 2 * pic->frame_num_wrap;
2002 for (i = 0; i < priv->long_ref_count; i++) {
2003 GstVaapiPictureH264 * const pic = priv->long_ref[i];
2006 if (pic->base.view_id != picture->base.view_id)
2009 // (8-29, 8-32, 8-33)
2010 if (GST_VAAPI_PICTURE_IS_FRAME(picture))
2011 pic->long_term_pic_num = pic->long_term_frame_idx;
2013 if (pic->structure == picture->structure)
2014 pic->long_term_pic_num = 2 * pic->long_term_frame_idx + 1;
2016 pic->long_term_pic_num = 2 * pic->long_term_frame_idx;
2021 #define SORT_REF_LIST(list, n, compare_func) \
2022 qsort(list, n, sizeof(*(list)), compare_picture_##compare_func)
2025 init_picture_refs_fields_1(
2026 guint picture_structure,
2027 GstVaapiPictureH264 *RefPicList[32],
2028 guint *RefPicList_count,
2029 GstVaapiPictureH264 *ref_list[32],
2030 guint ref_list_count
2037 n = *RefPicList_count;
2040 for (; i < ref_list_count; i++) {
2041 if (ref_list[i]->structure == picture_structure) {
2042 RefPicList[n++] = ref_list[i++];
2046 for (; j < ref_list_count; j++) {
2047 if (ref_list[j]->structure != picture_structure) {
2048 RefPicList[n++] = ref_list[j++];
2052 } while (i < ref_list_count || j < ref_list_count);
2053 *RefPicList_count = n;
2057 init_picture_refs_fields(
2058 GstVaapiPictureH264 *picture,
2059 GstVaapiPictureH264 *RefPicList[32],
2060 guint *RefPicList_count,
2061 GstVaapiPictureH264 *short_ref[32],
2062 guint short_ref_count,
2063 GstVaapiPictureH264 *long_ref[32],
2064 guint long_ref_count
2069 /* 8.2.4.2.5 - reference picture lists in fields */
2070 init_picture_refs_fields_1(picture->structure, RefPicList, &n,
2071 short_ref, short_ref_count);
2072 init_picture_refs_fields_1(picture->structure, RefPicList, &n,
2073 long_ref, long_ref_count);
2074 *RefPicList_count = n;
2077 /* Finds the inter-view reference picture with the supplied view id */
2078 static GstVaapiPictureH264 *
2079 find_inter_view_reference(GstVaapiDecoderH264 *decoder, guint16 view_id)
2081 GPtrArray * const inter_views = decoder->priv.inter_views;
2084 for (i = 0; i < inter_views->len; i++) {
2085 GstVaapiPictureH264 * const picture = g_ptr_array_index(inter_views, i);
2086 if (picture->base.view_id == view_id)
2090 GST_WARNING("failed to find inter-view reference picture for view_id: %d",
2095 /* Checks whether the view id exists in the supplied list of view ids */
2097 find_view_id(guint16 view_id, const guint16 *view_ids, guint num_view_ids)
2101 for (i = 0; i < num_view_ids; i++) {
2102 if (view_ids[i] == view_id)
2109 find_view_id_in_view(guint16 view_id, const GstH264SPSExtMVCView *view,
2113 return (find_view_id(view_id, view->anchor_ref_l0,
2114 view->num_anchor_refs_l0) ||
2115 find_view_id(view_id, view->anchor_ref_l1,
2116 view->num_anchor_refs_l1));
2118 return (find_view_id(view_id, view->non_anchor_ref_l0,
2119 view->num_non_anchor_refs_l0) ||
2120 find_view_id(view_id, view->non_anchor_ref_l1,
2121 view->num_non_anchor_refs_l1));
2124 /* Checks whether the inter-view reference picture with the supplied
2125 view id is used for decoding the current view component picture */
2127 is_inter_view_reference_for_picture(GstVaapiDecoderH264 *decoder,
2128 guint16 view_id, GstVaapiPictureH264 *picture)
2130 const GstH264SPS * const sps = get_sps(decoder);
2133 if (!GST_VAAPI_PICTURE_IS_MVC(picture) ||
2134 sps->extension_type != GST_H264_NAL_EXTENSION_MVC)
2137 is_anchor = GST_VAAPI_PICTURE_IS_ANCHOR(picture);
2138 return find_view_id_in_view(view_id,
2139 &sps->extension.mvc.view[picture->base.voc], is_anchor);
2142 /* Checks whether the supplied inter-view reference picture is used
2143 for decoding the next view component pictures */
2145 is_inter_view_reference_for_next_pictures(GstVaapiDecoderH264 *decoder,
2146 GstVaapiPictureH264 *picture)
2148 const GstH264SPS * const sps = get_sps(decoder);
2152 if (!GST_VAAPI_PICTURE_IS_MVC(picture) ||
2153 sps->extension_type != GST_H264_NAL_EXTENSION_MVC)
2156 is_anchor = GST_VAAPI_PICTURE_IS_ANCHOR(picture);
2157 num_views = sps->extension.mvc.num_views_minus1 + 1;
2158 for (i = picture->base.voc + 1; i < num_views; i++) {
2159 const GstH264SPSExtMVCView * const view = &sps->extension.mvc.view[i];
2160 if (find_view_id_in_view(picture->base.view_id, view, is_anchor))
2166 /* H.8.2.1 - Initialization process for inter-view prediction references */
2168 init_picture_refs_mvc_1(GstVaapiDecoderH264 *decoder,
2169 GstVaapiPictureH264 **ref_list, guint *ref_list_count_ptr, guint num_refs,
2170 const guint16 *view_ids, guint num_view_ids)
2174 n = *ref_list_count_ptr;
2175 for (j = 0; j < num_view_ids && n < num_refs; j++) {
2176 GstVaapiPictureH264 * const pic =
2177 find_inter_view_reference(decoder, view_ids[j]);
2179 ref_list[n++] = pic;
2181 *ref_list_count_ptr = n;
2185 init_picture_refs_mvc(GstVaapiDecoderH264 *decoder,
2186 GstVaapiPictureH264 *picture, GstH264SliceHdr *slice_hdr, guint list)
2188 GstVaapiDecoderH264Private * const priv = &decoder->priv;
2189 const GstH264SPS * const sps = get_sps(decoder);
2190 const GstH264SPSExtMVCView *view;
2192 GST_DEBUG("initialize reference picture list for inter-view prediction");
2194 if (sps->extension_type != GST_H264_NAL_EXTENSION_MVC)
2196 view = &sps->extension.mvc.view[picture->base.voc];
2198 #define INVOKE_INIT_PICTURE_REFS_MVC(ref_list, view_list) do { \
2199 init_picture_refs_mvc_1(decoder, \
2200 priv->RefPicList##ref_list, \
2201 &priv->RefPicList##ref_list##_count, \
2202 slice_hdr->num_ref_idx_l##ref_list##_active_minus1 + 1, \
2203 view->view_list##_l##ref_list, \
2204 view->num_##view_list##s_l##ref_list); \
2208 if (GST_VAAPI_PICTURE_IS_ANCHOR(picture))
2209 INVOKE_INIT_PICTURE_REFS_MVC(0, anchor_ref);
2211 INVOKE_INIT_PICTURE_REFS_MVC(0, non_anchor_ref);
2214 if (GST_VAAPI_PICTURE_IS_ANCHOR(picture))
2215 INVOKE_INIT_PICTURE_REFS_MVC(1, anchor_ref);
2217 INVOKE_INIT_PICTURE_REFS_MVC(1, non_anchor_ref);
2220 #undef INVOKE_INIT_PICTURE_REFS_MVC
2224 init_picture_refs_p_slice(
2225 GstVaapiDecoderH264 *decoder,
2226 GstVaapiPictureH264 *picture,
2227 GstH264SliceHdr *slice_hdr
2230 GstVaapiDecoderH264Private * const priv = &decoder->priv;
2231 GstVaapiPictureH264 **ref_list;
2234 GST_DEBUG("decode reference picture list for P and SP slices");
2236 if (GST_VAAPI_PICTURE_IS_FRAME(picture)) {
2237 /* 8.2.4.2.1 - P and SP slices in frames */
2238 if (priv->short_ref_count > 0) {
2239 ref_list = priv->RefPicList0;
2240 for (i = 0; i < priv->short_ref_count; i++)
2241 ref_list[i] = priv->short_ref[i];
2242 SORT_REF_LIST(ref_list, i, pic_num_dec);
2243 priv->RefPicList0_count += i;
2246 if (priv->long_ref_count > 0) {
2247 ref_list = &priv->RefPicList0[priv->RefPicList0_count];
2248 for (i = 0; i < priv->long_ref_count; i++)
2249 ref_list[i] = priv->long_ref[i];
2250 SORT_REF_LIST(ref_list, i, long_term_pic_num_inc);
2251 priv->RefPicList0_count += i;
2255 /* 8.2.4.2.2 - P and SP slices in fields */
2256 GstVaapiPictureH264 *short_ref[32];
2257 guint short_ref_count = 0;
2258 GstVaapiPictureH264 *long_ref[32];
2259 guint long_ref_count = 0;
2261 if (priv->short_ref_count > 0) {
2262 for (i = 0; i < priv->short_ref_count; i++)
2263 short_ref[i] = priv->short_ref[i];
2264 SORT_REF_LIST(short_ref, i, frame_num_wrap_dec);
2265 short_ref_count = i;
2268 if (priv->long_ref_count > 0) {
2269 for (i = 0; i < priv->long_ref_count; i++)
2270 long_ref[i] = priv->long_ref[i];
2271 SORT_REF_LIST(long_ref, i, long_term_frame_idx_inc);
2275 init_picture_refs_fields(
2277 priv->RefPicList0, &priv->RefPicList0_count,
2278 short_ref, short_ref_count,
2279 long_ref, long_ref_count
2283 if (GST_VAAPI_PICTURE_IS_MVC(picture)) {
2285 init_picture_refs_mvc(decoder, picture, slice_hdr, 0);
2290 init_picture_refs_b_slice(
2291 GstVaapiDecoderH264 *decoder,
2292 GstVaapiPictureH264 *picture,
2293 GstH264SliceHdr *slice_hdr
2296 GstVaapiDecoderH264Private * const priv = &decoder->priv;
2297 GstVaapiPictureH264 **ref_list;
2300 GST_DEBUG("decode reference picture list for B slices");
2302 if (GST_VAAPI_PICTURE_IS_FRAME(picture)) {
2303 /* 8.2.4.2.3 - B slices in frames */
2306 if (priv->short_ref_count > 0) {
2307 // 1. Short-term references
2308 ref_list = priv->RefPicList0;
2309 for (n = 0, i = 0; i < priv->short_ref_count; i++) {
2310 if (priv->short_ref[i]->base.poc < picture->base.poc)
2311 ref_list[n++] = priv->short_ref[i];
2313 SORT_REF_LIST(ref_list, n, poc_dec);
2314 priv->RefPicList0_count += n;
2316 ref_list = &priv->RefPicList0[priv->RefPicList0_count];
2317 for (n = 0, i = 0; i < priv->short_ref_count; i++) {
2318 if (priv->short_ref[i]->base.poc >= picture->base.poc)
2319 ref_list[n++] = priv->short_ref[i];
2321 SORT_REF_LIST(ref_list, n, poc_inc);
2322 priv->RefPicList0_count += n;
2325 if (priv->long_ref_count > 0) {
2326 // 2. Long-term references
2327 ref_list = &priv->RefPicList0[priv->RefPicList0_count];
2328 for (n = 0, i = 0; i < priv->long_ref_count; i++)
2329 ref_list[n++] = priv->long_ref[i];
2330 SORT_REF_LIST(ref_list, n, long_term_pic_num_inc);
2331 priv->RefPicList0_count += n;
2335 if (priv->short_ref_count > 0) {
2336 // 1. Short-term references
2337 ref_list = priv->RefPicList1;
2338 for (n = 0, i = 0; i < priv->short_ref_count; i++) {
2339 if (priv->short_ref[i]->base.poc > picture->base.poc)
2340 ref_list[n++] = priv->short_ref[i];
2342 SORT_REF_LIST(ref_list, n, poc_inc);
2343 priv->RefPicList1_count += n;
2345 ref_list = &priv->RefPicList1[priv->RefPicList1_count];
2346 for (n = 0, i = 0; i < priv->short_ref_count; i++) {
2347 if (priv->short_ref[i]->base.poc <= picture->base.poc)
2348 ref_list[n++] = priv->short_ref[i];
2350 SORT_REF_LIST(ref_list, n, poc_dec);
2351 priv->RefPicList1_count += n;
2354 if (priv->long_ref_count > 0) {
2355 // 2. Long-term references
2356 ref_list = &priv->RefPicList1[priv->RefPicList1_count];
2357 for (n = 0, i = 0; i < priv->long_ref_count; i++)
2358 ref_list[n++] = priv->long_ref[i];
2359 SORT_REF_LIST(ref_list, n, long_term_pic_num_inc);
2360 priv->RefPicList1_count += n;
2364 /* 8.2.4.2.4 - B slices in fields */
2365 GstVaapiPictureH264 *short_ref0[32];
2366 guint short_ref0_count = 0;
2367 GstVaapiPictureH264 *short_ref1[32];
2368 guint short_ref1_count = 0;
2369 GstVaapiPictureH264 *long_ref[32];
2370 guint long_ref_count = 0;
2372 /* refFrameList0ShortTerm */
2373 if (priv->short_ref_count > 0) {
2374 ref_list = short_ref0;
2375 for (n = 0, i = 0; i < priv->short_ref_count; i++) {
2376 if (priv->short_ref[i]->base.poc <= picture->base.poc)
2377 ref_list[n++] = priv->short_ref[i];
2379 SORT_REF_LIST(ref_list, n, poc_dec);
2380 short_ref0_count += n;
2382 ref_list = &short_ref0[short_ref0_count];
2383 for (n = 0, i = 0; i < priv->short_ref_count; i++) {
2384 if (priv->short_ref[i]->base.poc > picture->base.poc)
2385 ref_list[n++] = priv->short_ref[i];
2387 SORT_REF_LIST(ref_list, n, poc_inc);
2388 short_ref0_count += n;
2391 /* refFrameList1ShortTerm */
2392 if (priv->short_ref_count > 0) {
2393 ref_list = short_ref1;
2394 for (n = 0, i = 0; i < priv->short_ref_count; i++) {
2395 if (priv->short_ref[i]->base.poc > picture->base.poc)
2396 ref_list[n++] = priv->short_ref[i];
2398 SORT_REF_LIST(ref_list, n, poc_inc);
2399 short_ref1_count += n;
2401 ref_list = &short_ref1[short_ref1_count];
2402 for (n = 0, i = 0; i < priv->short_ref_count; i++) {
2403 if (priv->short_ref[i]->base.poc <= picture->base.poc)
2404 ref_list[n++] = priv->short_ref[i];
2406 SORT_REF_LIST(ref_list, n, poc_dec);
2407 short_ref1_count += n;
2410 /* refFrameListLongTerm */
2411 if (priv->long_ref_count > 0) {
2412 for (i = 0; i < priv->long_ref_count; i++)
2413 long_ref[i] = priv->long_ref[i];
2414 SORT_REF_LIST(long_ref, i, long_term_frame_idx_inc);
2418 init_picture_refs_fields(
2420 priv->RefPicList0, &priv->RefPicList0_count,
2421 short_ref0, short_ref0_count,
2422 long_ref, long_ref_count
2425 init_picture_refs_fields(
2427 priv->RefPicList1, &priv->RefPicList1_count,
2428 short_ref1, short_ref1_count,
2429 long_ref, long_ref_count
2433 /* Check whether RefPicList1 is identical to RefPicList0, then
2434 swap if necessary */
2435 if (priv->RefPicList1_count > 1 &&
2436 priv->RefPicList1_count == priv->RefPicList0_count &&
2437 memcmp(priv->RefPicList0, priv->RefPicList1,
2438 priv->RefPicList0_count * sizeof(priv->RefPicList0[0])) == 0) {
2439 GstVaapiPictureH264 * const tmp = priv->RefPicList1[0];
2440 priv->RefPicList1[0] = priv->RefPicList1[1];
2441 priv->RefPicList1[1] = tmp;
2444 if (GST_VAAPI_PICTURE_IS_MVC(picture)) {
2446 init_picture_refs_mvc(decoder, picture, slice_hdr, 0);
2449 init_picture_refs_mvc(decoder, picture, slice_hdr, 1);
2453 #undef SORT_REF_LIST
2456 find_short_term_reference(GstVaapiDecoderH264 *decoder, gint32 pic_num)
2458 GstVaapiDecoderH264Private * const priv = &decoder->priv;
2461 for (i = 0; i < priv->short_ref_count; i++) {
2462 if (priv->short_ref[i]->pic_num == pic_num)
2465 GST_ERROR("found no short-term reference picture with PicNum = %d",
2471 find_long_term_reference(GstVaapiDecoderH264 *decoder, gint32 long_term_pic_num)
2473 GstVaapiDecoderH264Private * const priv = &decoder->priv;
2476 for (i = 0; i < priv->long_ref_count; i++) {
2477 if (priv->long_ref[i]->long_term_pic_num == long_term_pic_num)
2480 GST_ERROR("found no long-term reference picture with LongTermPicNum = %d",
2486 exec_picture_refs_modification_1(
2487 GstVaapiDecoderH264 *decoder,
2488 GstVaapiPictureH264 *picture,
2489 GstH264SliceHdr *slice_hdr,
2493 GstVaapiDecoderH264Private * const priv = &decoder->priv;
2494 GstH264SPS * const sps = get_sps(decoder);
2495 GstH264RefPicListModification *ref_pic_list_modification;
2496 guint num_ref_pic_list_modifications;
2497 GstVaapiPictureH264 **ref_list;
2498 guint *ref_list_count_ptr, ref_list_count, ref_list_idx = 0;
2499 const guint16 *view_ids = NULL;
2500 guint i, j, n, num_refs, num_view_ids = 0;
2502 gint32 MaxPicNum, CurrPicNum, picNumPred, picViewIdxPred;
2504 GST_DEBUG("modification process of reference picture list %u", list);
2507 ref_pic_list_modification = slice_hdr->ref_pic_list_modification_l0;
2508 num_ref_pic_list_modifications = slice_hdr->n_ref_pic_list_modification_l0;
2509 ref_list = priv->RefPicList0;
2510 ref_list_count_ptr = &priv->RefPicList0_count;
2511 num_refs = slice_hdr->num_ref_idx_l0_active_minus1 + 1;
2513 if (GST_VAAPI_PICTURE_IS_MVC(picture) &&
2514 sps->extension_type == GST_H264_NAL_EXTENSION_MVC) {
2515 const GstH264SPSExtMVCView * const view =
2516 &sps->extension.mvc.view[picture->base.voc];
2517 if (GST_VAAPI_PICTURE_IS_ANCHOR(picture)) {
2518 view_ids = view->anchor_ref_l0;
2519 num_view_ids = view->num_anchor_refs_l0;
2522 view_ids = view->non_anchor_ref_l0;
2523 num_view_ids = view->num_non_anchor_refs_l0;
2528 ref_pic_list_modification = slice_hdr->ref_pic_list_modification_l1;
2529 num_ref_pic_list_modifications = slice_hdr->n_ref_pic_list_modification_l1;
2530 ref_list = priv->RefPicList1;
2531 ref_list_count_ptr = &priv->RefPicList1_count;
2532 num_refs = slice_hdr->num_ref_idx_l1_active_minus1 + 1;
2534 if (GST_VAAPI_PICTURE_IS_MVC(picture) &&
2535 sps->extension_type == GST_H264_NAL_EXTENSION_MVC) {
2536 const GstH264SPSExtMVCView * const view =
2537 &sps->extension.mvc.view[picture->base.voc];
2538 if (GST_VAAPI_PICTURE_IS_ANCHOR(picture)) {
2539 view_ids = view->anchor_ref_l1;
2540 num_view_ids = view->num_anchor_refs_l1;
2543 view_ids = view->non_anchor_ref_l1;
2544 num_view_ids = view->num_non_anchor_refs_l1;
2548 ref_list_count = *ref_list_count_ptr;
2550 if (!GST_VAAPI_PICTURE_IS_FRAME(picture)) {
2551 MaxPicNum = 1 << (sps->log2_max_frame_num_minus4 + 5); // 2 * MaxFrameNum
2552 CurrPicNum = 2 * slice_hdr->frame_num + 1; // 2 * frame_num + 1
2555 MaxPicNum = 1 << (sps->log2_max_frame_num_minus4 + 4); // MaxFrameNum
2556 CurrPicNum = slice_hdr->frame_num; // frame_num
2559 picNumPred = CurrPicNum;
2560 picViewIdxPred = -1;
2562 for (i = 0; i < num_ref_pic_list_modifications; i++) {
2563 GstH264RefPicListModification * const l = &ref_pic_list_modification[i];
2564 if (l->modification_of_pic_nums_idc == 3)
2567 /* 8.2.4.3.1 - Short-term reference pictures */
2568 if (l->modification_of_pic_nums_idc == 0 || l->modification_of_pic_nums_idc == 1) {
2569 gint32 abs_diff_pic_num = l->value.abs_diff_pic_num_minus1 + 1;
2570 gint32 picNum, picNumNoWrap;
2573 if (l->modification_of_pic_nums_idc == 0) {
2574 picNumNoWrap = picNumPred - abs_diff_pic_num;
2575 if (picNumNoWrap < 0)
2576 picNumNoWrap += MaxPicNum;
2581 picNumNoWrap = picNumPred + abs_diff_pic_num;
2582 if (picNumNoWrap >= MaxPicNum)
2583 picNumNoWrap -= MaxPicNum;
2585 picNumPred = picNumNoWrap;
2588 picNum = picNumNoWrap;
2589 if (picNum > CurrPicNum)
2590 picNum -= MaxPicNum;
2593 for (j = num_refs; j > ref_list_idx; j--)
2594 ref_list[j] = ref_list[j - 1];
2595 found_ref_idx = find_short_term_reference(decoder, picNum);
2596 ref_list[ref_list_idx++] =
2597 found_ref_idx >= 0 ? priv->short_ref[found_ref_idx] : NULL;
2599 for (j = ref_list_idx; j <= num_refs; j++) {
2604 GST_VAAPI_PICTURE_IS_SHORT_TERM_REFERENCE(ref_list[j]) ?
2605 ref_list[j]->pic_num : MaxPicNum;
2606 if (PicNumF != picNum ||
2607 ref_list[j]->base.view_id != picture->base.view_id)
2608 ref_list[n++] = ref_list[j];
2612 /* 8.2.4.3.2 - Long-term reference pictures */
2613 else if (l->modification_of_pic_nums_idc == 2) {
2615 for (j = num_refs; j > ref_list_idx; j--)
2616 ref_list[j] = ref_list[j - 1];
2618 find_long_term_reference(decoder, l->value.long_term_pic_num);
2619 ref_list[ref_list_idx++] =
2620 found_ref_idx >= 0 ? priv->long_ref[found_ref_idx] : NULL;
2622 for (j = ref_list_idx; j <= num_refs; j++) {
2623 gint32 LongTermPicNumF;
2627 GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(ref_list[j]) ?
2628 ref_list[j]->long_term_pic_num : INT_MAX;
2629 if (LongTermPicNumF != l->value.long_term_pic_num ||
2630 ref_list[j]->base.view_id != picture->base.view_id)
2631 ref_list[n++] = ref_list[j];
2635 /* H.8.2.2.3 - Inter-view prediction reference pictures */
2636 else if ((GST_VAAPI_PICTURE_IS_MVC(picture) &&
2637 sps->extension_type == GST_H264_NAL_EXTENSION_MVC) &&
2638 (l->modification_of_pic_nums_idc == 4 ||
2639 l->modification_of_pic_nums_idc == 5)) {
2640 gint32 abs_diff_view_idx = l->value.abs_diff_view_idx_minus1 + 1;
2641 gint32 picViewIdx, targetViewId;
2644 if (l->modification_of_pic_nums_idc == 4) {
2645 picViewIdx = picViewIdxPred - abs_diff_view_idx;
2647 picViewIdx += num_view_ids;
2652 picViewIdx = picViewIdxPred + abs_diff_view_idx;
2653 if (picViewIdx >= num_view_ids)
2654 picViewIdx -= num_view_ids;
2656 picViewIdxPred = picViewIdx;
2659 targetViewId = view_ids[picViewIdx];
2662 for (j = num_refs; j > ref_list_idx; j--)
2663 ref_list[j] = ref_list[j - 1];
2664 ref_list[ref_list_idx++] =
2665 find_inter_view_reference(decoder, targetViewId);
2667 for (j = ref_list_idx; j <= num_refs; j++) {
2670 if (ref_list[j]->base.view_id != targetViewId ||
2671 ref_list[j]->base.poc != picture->base.poc)
2672 ref_list[n++] = ref_list[j];
2678 for (i = 0; i < num_refs; i++)
2680 GST_ERROR("list %u entry %u is empty", list, i);
2682 *ref_list_count_ptr = num_refs;
2685 /* 8.2.4.3 - Modification process for reference picture lists */
2687 exec_picture_refs_modification(
2688 GstVaapiDecoderH264 *decoder,
2689 GstVaapiPictureH264 *picture,
2690 GstH264SliceHdr *slice_hdr
2693 GST_DEBUG("execute ref_pic_list_modification()");
2696 if (!GST_H264_IS_I_SLICE(slice_hdr) && !GST_H264_IS_SI_SLICE(slice_hdr) &&
2697 slice_hdr->ref_pic_list_modification_flag_l0)
2698 exec_picture_refs_modification_1(decoder, picture, slice_hdr, 0);
2701 if (GST_H264_IS_B_SLICE(slice_hdr) &&
2702 slice_hdr->ref_pic_list_modification_flag_l1)
2703 exec_picture_refs_modification_1(decoder, picture, slice_hdr, 1);
2707 init_picture_ref_lists(GstVaapiDecoderH264 *decoder,
2708 GstVaapiPictureH264 *picture)
2710 GstVaapiDecoderH264Private * const priv = &decoder->priv;
2711 guint i, j, short_ref_count, long_ref_count;
2713 short_ref_count = 0;
2715 if (GST_VAAPI_PICTURE_IS_FRAME(picture)) {
2716 for (i = 0; i < priv->dpb_count; i++) {
2717 GstVaapiFrameStore * const fs = priv->dpb[i];
2718 GstVaapiPictureH264 *pic;
2719 if (!gst_vaapi_frame_store_has_frame(fs))
2721 pic = fs->buffers[0];
2722 if (pic->base.view_id != picture->base.view_id)
2724 if (GST_VAAPI_PICTURE_IS_SHORT_TERM_REFERENCE(pic))
2725 priv->short_ref[short_ref_count++] = pic;
2726 else if (GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(pic))
2727 priv->long_ref[long_ref_count++] = pic;
2728 pic->structure = GST_VAAPI_PICTURE_STRUCTURE_FRAME;
2729 pic->other_field = fs->buffers[1];
2733 for (i = 0; i < priv->dpb_count; i++) {
2734 GstVaapiFrameStore * const fs = priv->dpb[i];
2735 for (j = 0; j < fs->num_buffers; j++) {
2736 GstVaapiPictureH264 * const pic = fs->buffers[j];
2737 if (pic->base.view_id != picture->base.view_id)
2739 if (GST_VAAPI_PICTURE_IS_SHORT_TERM_REFERENCE(pic))
2740 priv->short_ref[short_ref_count++] = pic;
2741 else if (GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(pic))
2742 priv->long_ref[long_ref_count++] = pic;
2743 pic->structure = pic->base.structure;
2744 pic->other_field = fs->buffers[j ^ 1];
2749 for (i = short_ref_count; i < priv->short_ref_count; i++)
2750 priv->short_ref[i] = NULL;
2751 priv->short_ref_count = short_ref_count;
2753 for (i = long_ref_count; i < priv->long_ref_count; i++)
2754 priv->long_ref[i] = NULL;
2755 priv->long_ref_count = long_ref_count;
2760 GstVaapiDecoderH264 *decoder,
2761 GstVaapiPictureH264 *picture,
2762 GstH264SliceHdr *slice_hdr
2765 GstVaapiDecoderH264Private * const priv = &decoder->priv;
2768 init_picture_ref_lists(decoder, picture);
2769 init_picture_refs_pic_num(decoder, picture, slice_hdr);
2771 priv->RefPicList0_count = 0;
2772 priv->RefPicList1_count = 0;
2774 switch (slice_hdr->type % 5) {
2775 case GST_H264_P_SLICE:
2776 case GST_H264_SP_SLICE:
2777 init_picture_refs_p_slice(decoder, picture, slice_hdr);
2779 case GST_H264_B_SLICE:
2780 init_picture_refs_b_slice(decoder, picture, slice_hdr);
2786 exec_picture_refs_modification(decoder, picture, slice_hdr);
2788 switch (slice_hdr->type % 5) {
2789 case GST_H264_B_SLICE:
2790 num_refs = 1 + slice_hdr->num_ref_idx_l1_active_minus1;
2791 for (i = priv->RefPicList1_count; i < num_refs; i++)
2792 priv->RefPicList1[i] = NULL;
2793 priv->RefPicList1_count = num_refs;
2796 case GST_H264_P_SLICE:
2797 case GST_H264_SP_SLICE:
2798 num_refs = 1 + slice_hdr->num_ref_idx_l0_active_minus1;
2799 for (i = priv->RefPicList0_count; i < num_refs; i++)
2800 priv->RefPicList0[i] = NULL;
2801 priv->RefPicList0_count = num_refs;
2810 GstVaapiDecoderH264 *decoder,
2811 GstVaapiPictureH264 *picture, GstVaapiParserInfoH264 *pi)
2813 GstVaapiDecoderH264Private * const priv = &decoder->priv;
2814 GstVaapiPicture * const base_picture = &picture->base;
2815 GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
2817 priv->prev_frame_num = priv->frame_num;
2818 priv->frame_num = slice_hdr->frame_num;
2819 picture->frame_num = priv->frame_num;
2820 picture->frame_num_wrap = priv->frame_num;
2821 picture->output_flag = TRUE; /* XXX: conformant to Annex A only */
2822 base_picture->pts = GST_VAAPI_DECODER_CODEC_FRAME(decoder)->pts;
2823 base_picture->type = GST_VAAPI_PICTURE_TYPE_NONE;
2824 base_picture->view_id = pi->view_id;
2825 base_picture->voc = pi->voc;
2827 /* Initialize extensions */
2828 switch (pi->nalu.extension_type) {
2829 case GST_H264_NAL_EXTENSION_MVC: {
2830 GstH264NalUnitExtensionMVC * const mvc = &pi->nalu.extension.mvc;
2832 GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_MVC);
2833 if (mvc->inter_view_flag)
2834 GST_VAAPI_PICTURE_FLAG_SET(picture,
2835 GST_VAAPI_PICTURE_FLAG_INTER_VIEW);
2836 if (mvc->anchor_pic_flag)
2837 GST_VAAPI_PICTURE_FLAG_SET(picture,
2838 GST_VAAPI_PICTURE_FLAG_ANCHOR);
2843 /* Reset decoder state for IDR pictures */
2844 if (pi->nalu.idr_pic_flag) {
2846 GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_IDR);
2847 dpb_flush(decoder, picture);
2850 /* Initialize picture structure */
2851 if (!slice_hdr->field_pic_flag)
2852 base_picture->structure = GST_VAAPI_PICTURE_STRUCTURE_FRAME;
2854 GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_INTERLACED);
2855 if (!slice_hdr->bottom_field_flag)
2856 base_picture->structure = GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD;
2858 base_picture->structure = GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD;
2860 picture->structure = base_picture->structure;
2862 /* Initialize reference flags */
2863 if (pi->nalu.ref_idc) {
2864 GstH264DecRefPicMarking * const dec_ref_pic_marking =
2865 &slice_hdr->dec_ref_pic_marking;
2867 if (GST_VAAPI_PICTURE_IS_IDR(picture) &&
2868 dec_ref_pic_marking->long_term_reference_flag)
2869 GST_VAAPI_PICTURE_FLAG_SET(picture,
2870 GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE);
2872 GST_VAAPI_PICTURE_FLAG_SET(picture,
2873 GST_VAAPI_PICTURE_FLAG_SHORT_TERM_REFERENCE);
2876 init_picture_poc(decoder, picture, slice_hdr);
2880 /* 8.2.5.3 - Sliding window decoded reference picture marking process */
2882 exec_ref_pic_marking_sliding_window(GstVaapiDecoderH264 *decoder)
2884 GstVaapiDecoderH264Private * const priv = &decoder->priv;
2885 GstH264SPS * const sps = get_sps(decoder);
2886 GstVaapiPictureH264 *ref_picture;
2887 guint i, m, max_num_ref_frames;
2889 GST_DEBUG("reference picture marking process (sliding window)");
2891 if (!GST_VAAPI_PICTURE_IS_FIRST_FIELD(priv->current_picture))
2894 max_num_ref_frames = sps->num_ref_frames;
2895 if (max_num_ref_frames == 0)
2896 max_num_ref_frames = 1;
2897 if (!GST_VAAPI_PICTURE_IS_FRAME(priv->current_picture))
2898 max_num_ref_frames <<= 1;
2900 if (priv->short_ref_count + priv->long_ref_count < max_num_ref_frames)
2902 if (priv->short_ref_count < 1)
2905 for (m = 0, i = 1; i < priv->short_ref_count; i++) {
2906 GstVaapiPictureH264 * const picture = priv->short_ref[i];
2907 if (picture->frame_num_wrap < priv->short_ref[m]->frame_num_wrap)
2911 ref_picture = priv->short_ref[m];
2912 gst_vaapi_picture_h264_set_reference(ref_picture, 0, TRUE);
2913 ARRAY_REMOVE_INDEX(priv->short_ref, m);
2915 /* Both fields need to be marked as "unused for reference", so
2916 remove the other field from the short_ref[] list as well */
2917 if (!GST_VAAPI_PICTURE_IS_FRAME(priv->current_picture) && ref_picture->other_field) {
2918 for (i = 0; i < priv->short_ref_count; i++) {
2919 if (priv->short_ref[i] == ref_picture->other_field) {
2920 ARRAY_REMOVE_INDEX(priv->short_ref, i);
2928 static inline gint32
2929 get_picNumX(GstVaapiPictureH264 *picture, GstH264RefPicMarking *ref_pic_marking)
2933 if (GST_VAAPI_PICTURE_IS_FRAME(picture))
2934 pic_num = picture->frame_num_wrap;
2936 pic_num = 2 * picture->frame_num_wrap + 1;
2937 pic_num -= ref_pic_marking->difference_of_pic_nums_minus1 + 1;
2941 /* 8.2.5.4.1. Mark short-term reference picture as "unused for reference" */
2943 exec_ref_pic_marking_adaptive_mmco_1(
2944 GstVaapiDecoderH264 *decoder,
2945 GstVaapiPictureH264 *picture,
2946 GstH264RefPicMarking *ref_pic_marking
2949 GstVaapiDecoderH264Private * const priv = &decoder->priv;
2952 picNumX = get_picNumX(picture, ref_pic_marking);
2953 i = find_short_term_reference(decoder, picNumX);
2957 gst_vaapi_picture_h264_set_reference(priv->short_ref[i], 0,
2958 GST_VAAPI_PICTURE_IS_FRAME(picture));
2959 ARRAY_REMOVE_INDEX(priv->short_ref, i);
2962 /* 8.2.5.4.2. Mark long-term reference picture as "unused for reference" */
2964 exec_ref_pic_marking_adaptive_mmco_2(
2965 GstVaapiDecoderH264 *decoder,
2966 GstVaapiPictureH264 *picture,
2967 GstH264RefPicMarking *ref_pic_marking
2970 GstVaapiDecoderH264Private * const priv = &decoder->priv;
2973 i = find_long_term_reference(decoder, ref_pic_marking->long_term_pic_num);
2977 gst_vaapi_picture_h264_set_reference(priv->long_ref[i], 0,
2978 GST_VAAPI_PICTURE_IS_FRAME(picture));
2979 ARRAY_REMOVE_INDEX(priv->long_ref, i);
2982 /* 8.2.5.4.3. Assign LongTermFrameIdx to a short-term reference picture */
2984 exec_ref_pic_marking_adaptive_mmco_3(
2985 GstVaapiDecoderH264 *decoder,
2986 GstVaapiPictureH264 *picture,
2987 GstH264RefPicMarking *ref_pic_marking
2990 GstVaapiDecoderH264Private * const priv = &decoder->priv;
2991 GstVaapiPictureH264 *ref_picture, *other_field;
2994 for (i = 0; i < priv->long_ref_count; i++) {
2995 if (priv->long_ref[i]->long_term_frame_idx == ref_pic_marking->long_term_frame_idx)
2998 if (i != priv->long_ref_count) {
2999 gst_vaapi_picture_h264_set_reference(priv->long_ref[i], 0, TRUE);
3000 ARRAY_REMOVE_INDEX(priv->long_ref, i);
3003 picNumX = get_picNumX(picture, ref_pic_marking);
3004 i = find_short_term_reference(decoder, picNumX);
3008 ref_picture = priv->short_ref[i];
3009 ARRAY_REMOVE_INDEX(priv->short_ref, i);
3010 priv->long_ref[priv->long_ref_count++] = ref_picture;
3012 ref_picture->long_term_frame_idx = ref_pic_marking->long_term_frame_idx;
3013 gst_vaapi_picture_h264_set_reference(ref_picture,
3014 GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE,
3015 GST_VAAPI_PICTURE_IS_COMPLETE(picture));
3017 /* Assign LongTermFrameIdx to the other field if it was also
3018 marked as "used for long-term reference */
3019 other_field = ref_picture->other_field;
3020 if (other_field && GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(other_field))
3021 other_field->long_term_frame_idx = ref_pic_marking->long_term_frame_idx;
3024 /* 8.2.5.4.4. Mark pictures with LongTermFramIdx > max_long_term_frame_idx
3025 * as "unused for reference" */
3027 exec_ref_pic_marking_adaptive_mmco_4(
3028 GstVaapiDecoderH264 *decoder,
3029 GstVaapiPictureH264 *picture,
3030 GstH264RefPicMarking *ref_pic_marking
3033 GstVaapiDecoderH264Private * const priv = &decoder->priv;
3034 gint32 i, long_term_frame_idx;
3036 long_term_frame_idx = ref_pic_marking->max_long_term_frame_idx_plus1 - 1;
3038 for (i = 0; i < priv->long_ref_count; i++) {
3039 if (priv->long_ref[i]->long_term_frame_idx <= long_term_frame_idx)
3041 gst_vaapi_picture_h264_set_reference(priv->long_ref[i], 0, FALSE);
3042 ARRAY_REMOVE_INDEX(priv->long_ref, i);
3047 /* 8.2.5.4.5. Mark all reference pictures as "unused for reference" */
3049 exec_ref_pic_marking_adaptive_mmco_5(
3050 GstVaapiDecoderH264 *decoder,
3051 GstVaapiPictureH264 *picture,
3052 GstH264RefPicMarking *ref_pic_marking
3055 GstVaapiDecoderH264Private * const priv = &decoder->priv;
3057 dpb_flush(decoder, picture);
3059 priv->prev_pic_has_mmco5 = TRUE;
3061 /* The picture shall be inferred to have had frame_num equal to 0 (7.4.3) */
3062 priv->frame_num = 0;
3063 priv->frame_num_offset = 0;
3064 picture->frame_num = 0;
3066 /* Update TopFieldOrderCnt and BottomFieldOrderCnt (8.2.1) */
3067 if (picture->structure != GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD)
3068 picture->field_poc[TOP_FIELD] -= picture->base.poc;
3069 if (picture->structure != GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD)
3070 picture->field_poc[BOTTOM_FIELD] -= picture->base.poc;
3071 picture->base.poc = 0;
3074 /* 8.2.5.4.6. Assign a long-term frame index to the current picture */
3076 exec_ref_pic_marking_adaptive_mmco_6(
3077 GstVaapiDecoderH264 *decoder,
3078 GstVaapiPictureH264 *picture,
3079 GstH264RefPicMarking *ref_pic_marking
3082 GstVaapiDecoderH264Private * const priv = &decoder->priv;
3083 GstVaapiPictureH264 *other_field;
3086 for (i = 0; i < priv->long_ref_count; i++) {
3087 if (priv->long_ref[i]->long_term_frame_idx == ref_pic_marking->long_term_frame_idx)
3090 if (i != priv->long_ref_count) {
3091 gst_vaapi_picture_h264_set_reference(priv->long_ref[i], 0, TRUE);
3092 ARRAY_REMOVE_INDEX(priv->long_ref, i);
3095 picture->long_term_frame_idx = ref_pic_marking->long_term_frame_idx;
3096 gst_vaapi_picture_h264_set_reference(picture,
3097 GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE,
3098 GST_VAAPI_PICTURE_IS_COMPLETE(picture));
3100 /* Assign LongTermFrameIdx to the other field if it was also
3101 marked as "used for long-term reference */
3102 other_field = GST_VAAPI_PICTURE_H264(picture->base.parent_picture);
3103 if (other_field && GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(other_field))
3104 other_field->long_term_frame_idx = ref_pic_marking->long_term_frame_idx;
3107 /* 8.2.5.4. Adaptive memory control decoded reference picture marking process */
3109 exec_ref_pic_marking_adaptive(
3110 GstVaapiDecoderH264 *decoder,
3111 GstVaapiPictureH264 *picture,
3112 GstH264DecRefPicMarking *dec_ref_pic_marking
3117 GST_DEBUG("reference picture marking process (adaptive memory control)");
3119 typedef void (*exec_ref_pic_marking_adaptive_mmco_func)(
3120 GstVaapiDecoderH264 *decoder,
3121 GstVaapiPictureH264 *picture,
3122 GstH264RefPicMarking *ref_pic_marking
3125 static const exec_ref_pic_marking_adaptive_mmco_func mmco_funcs[] = {
3127 exec_ref_pic_marking_adaptive_mmco_1,
3128 exec_ref_pic_marking_adaptive_mmco_2,
3129 exec_ref_pic_marking_adaptive_mmco_3,
3130 exec_ref_pic_marking_adaptive_mmco_4,
3131 exec_ref_pic_marking_adaptive_mmco_5,
3132 exec_ref_pic_marking_adaptive_mmco_6,
3135 for (i = 0; i < dec_ref_pic_marking->n_ref_pic_marking; i++) {
3136 GstH264RefPicMarking * const ref_pic_marking =
3137 &dec_ref_pic_marking->ref_pic_marking[i];
3139 const guint mmco = ref_pic_marking->memory_management_control_operation;
3140 if (mmco < G_N_ELEMENTS(mmco_funcs) && mmco_funcs[mmco])
3141 mmco_funcs[mmco](decoder, picture, ref_pic_marking);
3143 GST_ERROR("unhandled MMCO %u", mmco);
3150 /* 8.2.5 - Execute reference picture marking process */
3152 exec_ref_pic_marking(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
3154 GstVaapiDecoderH264Private * const priv = &decoder->priv;
3156 priv->prev_pic_has_mmco5 = FALSE;
3157 priv->prev_pic_structure = picture->structure;
3159 if (GST_VAAPI_PICTURE_IS_INTER_VIEW(picture))
3160 g_ptr_array_add(priv->inter_views, gst_vaapi_picture_ref(picture));
3162 if (!GST_VAAPI_PICTURE_IS_REFERENCE(picture))
3165 if (!GST_VAAPI_PICTURE_IS_IDR(picture)) {
3166 GstH264DecRefPicMarking * const dec_ref_pic_marking =
3167 &picture->last_slice_hdr->dec_ref_pic_marking;
3168 if (dec_ref_pic_marking->adaptive_ref_pic_marking_mode_flag) {
3169 if (!exec_ref_pic_marking_adaptive(decoder, picture, dec_ref_pic_marking))
3173 if (!exec_ref_pic_marking_sliding_window(decoder))
3181 vaapi_init_picture(VAPictureH264 *pic)
3183 pic->picture_id = VA_INVALID_ID;
3185 pic->flags = VA_PICTURE_H264_INVALID;
3186 pic->TopFieldOrderCnt = 0;
3187 pic->BottomFieldOrderCnt = 0;
3191 vaapi_fill_picture(VAPictureH264 *pic, GstVaapiPictureH264 *picture,
3192 guint picture_structure)
3194 if (!picture_structure)
3195 picture_structure = picture->structure;
3197 pic->picture_id = picture->base.surface_id;
3200 if (GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(picture)) {
3201 pic->flags |= VA_PICTURE_H264_LONG_TERM_REFERENCE;
3202 pic->frame_idx = picture->long_term_frame_idx;
3205 if (GST_VAAPI_PICTURE_IS_SHORT_TERM_REFERENCE(picture))
3206 pic->flags |= VA_PICTURE_H264_SHORT_TERM_REFERENCE;
3207 pic->frame_idx = picture->frame_num;
3210 switch (picture_structure) {
3211 case GST_VAAPI_PICTURE_STRUCTURE_FRAME:
3212 pic->TopFieldOrderCnt = picture->field_poc[TOP_FIELD];
3213 pic->BottomFieldOrderCnt = picture->field_poc[BOTTOM_FIELD];
3215 case GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD:
3216 pic->flags |= VA_PICTURE_H264_TOP_FIELD;
3217 pic->TopFieldOrderCnt = picture->field_poc[TOP_FIELD];
3218 pic->BottomFieldOrderCnt = 0;
3220 case GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD:
3221 pic->flags |= VA_PICTURE_H264_BOTTOM_FIELD;
3222 pic->BottomFieldOrderCnt = picture->field_poc[BOTTOM_FIELD];
3223 pic->TopFieldOrderCnt = 0;
3229 vaapi_fill_picture_for_RefPicListX(VAPictureH264 *pic,
3230 GstVaapiPictureH264 *picture)
3232 vaapi_fill_picture(pic, picture, 0);
3234 /* H.8.4 - MVC inter prediction and inter-view prediction process */
3235 if (GST_VAAPI_PICTURE_IS_INTER_VIEW(picture)) {
3236 /* The inter-view reference components and inter-view only
3237 reference components that are included in the reference
3238 picture lists are considered as not being marked as "used for
3239 short-term reference" or "used for long-term reference" */
3240 pic->flags &= ~(VA_PICTURE_H264_SHORT_TERM_REFERENCE|
3241 VA_PICTURE_H264_LONG_TERM_REFERENCE);
3246 fill_picture(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
3248 GstVaapiDecoderH264Private * const priv = &decoder->priv;
3249 GstVaapiPicture * const base_picture = &picture->base;
3250 GstH264PPS * const pps = get_pps(decoder);
3251 GstH264SPS * const sps = get_sps(decoder);
3252 VAPictureParameterBufferH264 * const pic_param = base_picture->param;
3255 /* Fill in VAPictureParameterBufferH264 */
3256 vaapi_fill_picture(&pic_param->CurrPic, picture, 0);
3258 for (i = 0, n = 0; i < priv->dpb_count; i++) {
3259 GstVaapiFrameStore * const fs = priv->dpb[i];
3260 if ((gst_vaapi_frame_store_has_reference(fs) &&
3261 fs->view_id == picture->base.view_id) ||
3262 (gst_vaapi_frame_store_has_inter_view(fs) &&
3263 is_inter_view_reference_for_picture(decoder, fs->view_id, picture)))
3264 vaapi_fill_picture(&pic_param->ReferenceFrames[n++],
3265 fs->buffers[0], fs->structure);
3266 if (n >= G_N_ELEMENTS(pic_param->ReferenceFrames))
3269 for (; n < G_N_ELEMENTS(pic_param->ReferenceFrames); n++)
3270 vaapi_init_picture(&pic_param->ReferenceFrames[n]);
3272 #define COPY_FIELD(s, f) \
3273 pic_param->f = (s)->f
3275 #define COPY_BFM(a, s, f) \
3276 pic_param->a.bits.f = (s)->f
3278 pic_param->picture_width_in_mbs_minus1 = priv->mb_width - 1;
3279 pic_param->picture_height_in_mbs_minus1 = priv->mb_height - 1;
3280 pic_param->frame_num = priv->frame_num;
3282 COPY_FIELD(sps, bit_depth_luma_minus8);
3283 COPY_FIELD(sps, bit_depth_chroma_minus8);
3284 COPY_FIELD(sps, num_ref_frames);
3285 COPY_FIELD(pps, num_slice_groups_minus1);
3286 COPY_FIELD(pps, slice_group_map_type);
3287 COPY_FIELD(pps, slice_group_change_rate_minus1);
3288 COPY_FIELD(pps, pic_init_qp_minus26);
3289 COPY_FIELD(pps, pic_init_qs_minus26);
3290 COPY_FIELD(pps, chroma_qp_index_offset);
3291 COPY_FIELD(pps, second_chroma_qp_index_offset);
3293 pic_param->seq_fields.value = 0; /* reset all bits */
3294 pic_param->seq_fields.bits.residual_colour_transform_flag = sps->separate_colour_plane_flag;
3295 pic_param->seq_fields.bits.MinLumaBiPredSize8x8 = sps->level_idc >= 31; /* A.3.3.2 */
3297 COPY_BFM(seq_fields, sps, chroma_format_idc);
3298 COPY_BFM(seq_fields, sps, gaps_in_frame_num_value_allowed_flag);
3299 COPY_BFM(seq_fields, sps, frame_mbs_only_flag);
3300 COPY_BFM(seq_fields, sps, mb_adaptive_frame_field_flag);
3301 COPY_BFM(seq_fields, sps, direct_8x8_inference_flag);
3302 COPY_BFM(seq_fields, sps, log2_max_frame_num_minus4);
3303 COPY_BFM(seq_fields, sps, pic_order_cnt_type);
3304 COPY_BFM(seq_fields, sps, log2_max_pic_order_cnt_lsb_minus4);
3305 COPY_BFM(seq_fields, sps, delta_pic_order_always_zero_flag);
3307 pic_param->pic_fields.value = 0; /* reset all bits */
3308 pic_param->pic_fields.bits.field_pic_flag = GST_VAAPI_PICTURE_IS_INTERLACED(picture);
3309 pic_param->pic_fields.bits.reference_pic_flag = GST_VAAPI_PICTURE_IS_REFERENCE(picture);
3311 COPY_BFM(pic_fields, pps, entropy_coding_mode_flag);
3312 COPY_BFM(pic_fields, pps, weighted_pred_flag);
3313 COPY_BFM(pic_fields, pps, weighted_bipred_idc);
3314 COPY_BFM(pic_fields, pps, transform_8x8_mode_flag);
3315 COPY_BFM(pic_fields, pps, constrained_intra_pred_flag);
3316 COPY_BFM(pic_fields, pps, pic_order_present_flag);
3317 COPY_BFM(pic_fields, pps, deblocking_filter_control_present_flag);
3318 COPY_BFM(pic_fields, pps, redundant_pic_cnt_present_flag);
3322 /* Detection of the first VCL NAL unit of a primary coded picture (7.4.1.2.4) */
3324 is_new_picture(GstVaapiParserInfoH264 *pi, GstVaapiParserInfoH264 *prev_pi)
3326 GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
3327 GstH264PPS * const pps = slice_hdr->pps;
3328 GstH264SPS * const sps = pps->sequence;
3329 GstH264SliceHdr *prev_slice_hdr;
3333 prev_slice_hdr = &prev_pi->data.slice_hdr;
3335 #define CHECK_EXPR(expr, field_name) do { \
3337 GST_DEBUG(field_name " differs in value"); \
3342 #define CHECK_VALUE(new_slice_hdr, old_slice_hdr, field) \
3343 CHECK_EXPR(((new_slice_hdr)->field == (old_slice_hdr)->field), #field)
3345 /* view_id differs in value and VOIdx of current slice_hdr is less
3346 than the VOIdx of the prev_slice_hdr */
3347 CHECK_VALUE(pi, prev_pi, view_id);
3349 /* frame_num differs in value, regardless of inferred values to 0 */
3350 CHECK_VALUE(slice_hdr, prev_slice_hdr, frame_num);
3352 /* pic_parameter_set_id differs in value */
3353 CHECK_VALUE(slice_hdr, prev_slice_hdr, pps);
3355 /* field_pic_flag differs in value */
3356 CHECK_VALUE(slice_hdr, prev_slice_hdr, field_pic_flag);
3358 /* bottom_field_flag is present in both and differs in value */
3359 if (slice_hdr->field_pic_flag && prev_slice_hdr->field_pic_flag)
3360 CHECK_VALUE(slice_hdr, prev_slice_hdr, bottom_field_flag);
3362 /* nal_ref_idc differs in value with one of the nal_ref_idc values is 0 */
3363 CHECK_EXPR((pi->nalu.ref_idc != 0) ==
3364 (prev_pi->nalu.ref_idc != 0), "nal_ref_idc");
3366 /* POC type is 0 for both and either pic_order_cnt_lsb differs in
3367 value or delta_pic_order_cnt_bottom differs in value */
3368 if (sps->pic_order_cnt_type == 0) {
3369 CHECK_VALUE(slice_hdr, prev_slice_hdr, pic_order_cnt_lsb);
3370 if (pps->pic_order_present_flag && !slice_hdr->field_pic_flag)
3371 CHECK_VALUE(slice_hdr, prev_slice_hdr, delta_pic_order_cnt_bottom);
3374 /* POC type is 1 for both and either delta_pic_order_cnt[0]
3375 differs in value or delta_pic_order_cnt[1] differs in value */
3376 else if (sps->pic_order_cnt_type == 1) {
3377 CHECK_VALUE(slice_hdr, prev_slice_hdr, delta_pic_order_cnt[0]);
3378 CHECK_VALUE(slice_hdr, prev_slice_hdr, delta_pic_order_cnt[1]);
3381 /* IdrPicFlag differs in value */
3382 CHECK_VALUE(&pi->nalu, &prev_pi->nalu, idr_pic_flag);
3384 /* IdrPicFlag is equal to 1 for both and idr_pic_id differs in value */
3385 if (pi->nalu.idr_pic_flag)
3386 CHECK_VALUE(slice_hdr, prev_slice_hdr, idr_pic_id);
3393 /* Detection of a new access unit, assuming we are already in presence
3395 static inline gboolean
3396 is_new_access_unit(GstVaapiParserInfoH264 *pi, GstVaapiParserInfoH264 *prev_pi)
3398 if (!prev_pi || prev_pi->view_id == pi->view_id)
3400 return pi->voc < prev_pi->voc;
3403 /* Finds the first field picture corresponding to the supplied picture */
3404 static GstVaapiPictureH264 *
3405 find_first_field(GstVaapiDecoderH264 *decoder, GstVaapiParserInfoH264 *pi)
3407 GstVaapiDecoderH264Private * const priv = &decoder->priv;
3408 GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
3409 GstVaapiFrameStore *fs;
3411 if (!slice_hdr->field_pic_flag)
3414 fs = priv->prev_frames[pi->voc];
3415 if (!fs || gst_vaapi_frame_store_has_frame(fs))
3418 if (fs->buffers[0]->frame_num == slice_hdr->frame_num)
3419 return fs->buffers[0];
3423 static GstVaapiDecoderStatus
3424 decode_picture(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
3426 GstVaapiDecoderH264Private * const priv = &decoder->priv;
3427 GstVaapiParserInfoH264 * const pi = unit->parsed_info;
3428 GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
3429 GstH264PPS * const pps = ensure_pps(decoder, slice_hdr->pps);
3430 GstH264SPS * const sps = ensure_sps(decoder, slice_hdr->pps->sequence);
3431 GstVaapiPictureH264 *picture, *first_field;
3432 GstVaapiDecoderStatus status;
3434 g_return_val_if_fail(pps != NULL, GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN);
3435 g_return_val_if_fail(sps != NULL, GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN);
3437 /* Only decode base stream for MVC */
3438 switch (sps->profile_idc) {
3439 case GST_H264_PROFILE_MULTIVIEW_HIGH:
3440 case GST_H264_PROFILE_STEREO_HIGH:
3442 GST_DEBUG("drop picture from substream");
3443 return GST_VAAPI_DECODER_STATUS_DROP_FRAME;
3448 status = ensure_context(decoder, sps);
3449 if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
3452 priv->decoder_state = 0;
3454 first_field = find_first_field(decoder, pi);
3456 /* Re-use current picture where the first field was decoded */
3457 picture = gst_vaapi_picture_h264_new_field(first_field);
3459 GST_ERROR("failed to allocate field picture");
3460 return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
3464 /* Create new picture */
3465 picture = gst_vaapi_picture_h264_new(decoder);
3467 GST_ERROR("failed to allocate picture");
3468 return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
3471 gst_vaapi_picture_replace(&priv->current_picture, picture);
3472 gst_vaapi_picture_unref(picture);
3474 /* Clear inter-view references list if this is the primary coded
3475 picture of the current access unit */
3476 if (pi->flags & GST_VAAPI_DECODER_UNIT_FLAG_AU_START)
3477 g_ptr_array_set_size(priv->inter_views, 0);
3479 /* Update cropping rectangle */
3480 if (sps->frame_cropping_flag) {
3481 GstVaapiRectangle crop_rect;
3482 crop_rect.x = sps->crop_rect_x;
3483 crop_rect.y = sps->crop_rect_y;
3484 crop_rect.width = sps->crop_rect_width;
3485 crop_rect.height = sps->crop_rect_height;
3486 gst_vaapi_picture_set_crop_rect(&picture->base, &crop_rect);
3489 status = ensure_quant_matrix(decoder, picture);
3490 if (status != GST_VAAPI_DECODER_STATUS_SUCCESS) {
3491 GST_ERROR("failed to reset quantizer matrix");
3495 if (!init_picture(decoder, picture, pi))
3496 return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
3497 if (!fill_picture(decoder, picture))
3498 return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
3500 priv->decoder_state = pi->state;
3501 return GST_VAAPI_DECODER_STATUS_SUCCESS;
3505 get_slice_data_bit_offset(GstH264SliceHdr *slice_hdr, guint nal_header_bytes)
3509 epb_count = slice_hdr->n_emulation_prevention_bytes;
3510 return 8 * nal_header_bytes + slice_hdr->header_size - epb_count * 8;
3514 fill_pred_weight_table(GstVaapiDecoderH264 *decoder,
3515 GstVaapiSlice *slice, GstH264SliceHdr *slice_hdr)
3517 VASliceParameterBufferH264 * const slice_param = slice->param;
3518 GstH264PPS * const pps = get_pps(decoder);
3519 GstH264SPS * const sps = get_sps(decoder);
3520 GstH264PredWeightTable * const w = &slice_hdr->pred_weight_table;
3521 guint num_weight_tables = 0;
3524 if (pps->weighted_pred_flag &&
3525 (GST_H264_IS_P_SLICE(slice_hdr) || GST_H264_IS_SP_SLICE(slice_hdr)))
3526 num_weight_tables = 1;
3527 else if (pps->weighted_bipred_idc == 1 && GST_H264_IS_B_SLICE(slice_hdr))
3528 num_weight_tables = 2;
3530 num_weight_tables = 0;
3532 slice_param->luma_log2_weight_denom = 0;
3533 slice_param->chroma_log2_weight_denom = 0;
3534 slice_param->luma_weight_l0_flag = 0;
3535 slice_param->chroma_weight_l0_flag = 0;
3536 slice_param->luma_weight_l1_flag = 0;
3537 slice_param->chroma_weight_l1_flag = 0;
3539 if (num_weight_tables < 1)
3542 slice_param->luma_log2_weight_denom = w->luma_log2_weight_denom;
3543 slice_param->chroma_log2_weight_denom = w->chroma_log2_weight_denom;
3545 slice_param->luma_weight_l0_flag = 1;
3546 for (i = 0; i <= slice_param->num_ref_idx_l0_active_minus1; i++) {
3547 slice_param->luma_weight_l0[i] = w->luma_weight_l0[i];
3548 slice_param->luma_offset_l0[i] = w->luma_offset_l0[i];
3551 slice_param->chroma_weight_l0_flag = sps->chroma_array_type != 0;
3552 if (slice_param->chroma_weight_l0_flag) {
3553 for (i = 0; i <= slice_param->num_ref_idx_l0_active_minus1; i++) {
3554 for (j = 0; j < 2; j++) {
3555 slice_param->chroma_weight_l0[i][j] = w->chroma_weight_l0[i][j];
3556 slice_param->chroma_offset_l0[i][j] = w->chroma_offset_l0[i][j];
3561 if (num_weight_tables < 2)
3564 slice_param->luma_weight_l1_flag = 1;
3565 for (i = 0; i <= slice_param->num_ref_idx_l1_active_minus1; i++) {
3566 slice_param->luma_weight_l1[i] = w->luma_weight_l1[i];
3567 slice_param->luma_offset_l1[i] = w->luma_offset_l1[i];
3570 slice_param->chroma_weight_l1_flag = sps->chroma_array_type != 0;
3571 if (slice_param->chroma_weight_l1_flag) {
3572 for (i = 0; i <= slice_param->num_ref_idx_l1_active_minus1; i++) {
3573 for (j = 0; j < 2; j++) {
3574 slice_param->chroma_weight_l1[i][j] = w->chroma_weight_l1[i][j];
3575 slice_param->chroma_offset_l1[i][j] = w->chroma_offset_l1[i][j];
3583 fill_RefPicList(GstVaapiDecoderH264 *decoder,
3584 GstVaapiSlice *slice, GstH264SliceHdr *slice_hdr)
3586 GstVaapiDecoderH264Private * const priv = &decoder->priv;
3587 VASliceParameterBufferH264 * const slice_param = slice->param;
3588 guint i, num_ref_lists = 0;
3590 slice_param->num_ref_idx_l0_active_minus1 = 0;
3591 slice_param->num_ref_idx_l1_active_minus1 = 0;
3593 if (GST_H264_IS_B_SLICE(slice_hdr))
3595 else if (GST_H264_IS_I_SLICE(slice_hdr))
3600 if (num_ref_lists < 1)
3603 slice_param->num_ref_idx_l0_active_minus1 =
3604 slice_hdr->num_ref_idx_l0_active_minus1;
3606 for (i = 0; i < priv->RefPicList0_count && priv->RefPicList0[i]; i++)
3607 vaapi_fill_picture_for_RefPicListX(&slice_param->RefPicList0[i],
3608 priv->RefPicList0[i]);
3609 for (; i <= slice_param->num_ref_idx_l0_active_minus1; i++)
3610 vaapi_init_picture(&slice_param->RefPicList0[i]);
3612 if (num_ref_lists < 2)
3615 slice_param->num_ref_idx_l1_active_minus1 =
3616 slice_hdr->num_ref_idx_l1_active_minus1;
3618 for (i = 0; i < priv->RefPicList1_count && priv->RefPicList1[i]; i++)
3619 vaapi_fill_picture_for_RefPicListX(&slice_param->RefPicList1[i],
3620 priv->RefPicList1[i]);
3621 for (; i <= slice_param->num_ref_idx_l1_active_minus1; i++)
3622 vaapi_init_picture(&slice_param->RefPicList1[i]);
3627 fill_slice(GstVaapiDecoderH264 *decoder,
3628 GstVaapiSlice *slice, GstVaapiParserInfoH264 *pi)
3630 VASliceParameterBufferH264 * const slice_param = slice->param;
3631 GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
3633 /* Fill in VASliceParameterBufferH264 */
3634 slice_param->slice_data_bit_offset =
3635 get_slice_data_bit_offset(slice_hdr, pi->nalu.header_bytes);
3636 slice_param->first_mb_in_slice = slice_hdr->first_mb_in_slice;
3637 slice_param->slice_type = slice_hdr->type % 5;
3638 slice_param->direct_spatial_mv_pred_flag = slice_hdr->direct_spatial_mv_pred_flag;
3639 slice_param->cabac_init_idc = slice_hdr->cabac_init_idc;
3640 slice_param->slice_qp_delta = slice_hdr->slice_qp_delta;
3641 slice_param->disable_deblocking_filter_idc = slice_hdr->disable_deblocking_filter_idc;
3642 slice_param->slice_alpha_c0_offset_div2 = slice_hdr->slice_alpha_c0_offset_div2;
3643 slice_param->slice_beta_offset_div2 = slice_hdr->slice_beta_offset_div2;
3645 if (!fill_RefPicList(decoder, slice, slice_hdr))
3647 if (!fill_pred_weight_table(decoder, slice, slice_hdr))
3652 static GstVaapiDecoderStatus
3653 decode_slice(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
3655 GstVaapiDecoderH264Private * const priv = &decoder->priv;
3656 GstVaapiParserInfoH264 * const pi = unit->parsed_info;
3657 GstVaapiPictureH264 * const picture = priv->current_picture;
3658 GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
3659 GstVaapiSlice *slice;
3660 GstBuffer * const buffer =
3661 GST_VAAPI_DECODER_CODEC_FRAME(decoder)->input_buffer;
3662 GstMapInfo map_info;
3664 GST_DEBUG("slice (%u bytes)", pi->nalu.size);
3666 if (!is_valid_state(pi->state,
3667 GST_H264_VIDEO_STATE_VALID_PICTURE_HEADERS)) {
3668 GST_WARNING("failed to receive enough headers to decode slice");
3669 return GST_VAAPI_DECODER_STATUS_SUCCESS;
3672 if (!ensure_pps(decoder, slice_hdr->pps)) {
3673 GST_ERROR("failed to activate PPS");
3674 return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
3677 if (!ensure_sps(decoder, slice_hdr->pps->sequence)) {
3678 GST_ERROR("failed to activate SPS");
3679 return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
3682 if (!gst_buffer_map(buffer, &map_info, GST_MAP_READ)) {
3683 GST_ERROR("failed to map buffer");
3684 return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
3687 /* Check wether this is the first/last slice in the current access unit */
3688 if (pi->flags & GST_VAAPI_DECODER_UNIT_FLAG_AU_START)
3689 GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_AU_START);
3690 if (pi->flags & GST_VAAPI_DECODER_UNIT_FLAG_AU_END)
3691 GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_AU_END);
3693 slice = GST_VAAPI_SLICE_NEW(H264, decoder,
3694 (map_info.data + unit->offset + pi->nalu.offset), pi->nalu.size);
3695 gst_buffer_unmap(buffer, &map_info);
3697 GST_ERROR("failed to allocate slice");
3698 return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
3701 init_picture_refs(decoder, picture, slice_hdr);
3702 if (!fill_slice(decoder, slice, pi)) {
3703 gst_vaapi_mini_object_unref(GST_VAAPI_MINI_OBJECT(slice));
3704 return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
3707 gst_vaapi_picture_add_slice(GST_VAAPI_PICTURE_CAST(picture), slice);
3708 picture->last_slice_hdr = slice_hdr;
3709 priv->decoder_state |= GST_H264_VIDEO_STATE_GOT_SLICE;
3710 return GST_VAAPI_DECODER_STATUS_SUCCESS;
3714 scan_for_start_code(GstAdapter *adapter, guint ofs, guint size, guint32 *scp)
3716 return (gint)gst_adapter_masked_scan_uint32_peek(adapter,
3717 0xffffff00, 0x00000100,
3722 static GstVaapiDecoderStatus
3723 decode_unit(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
3725 GstVaapiDecoderH264Private * const priv = &decoder->priv;
3726 GstVaapiParserInfoH264 * const pi = unit->parsed_info;
3727 GstVaapiDecoderStatus status;
3729 priv->decoder_state |= pi->state;
3730 switch (pi->nalu.type) {
3731 case GST_H264_NAL_SPS:
3732 status = decode_sps(decoder, unit);
3734 case GST_H264_NAL_SUBSET_SPS:
3735 status = decode_subset_sps(decoder, unit);
3737 case GST_H264_NAL_PPS:
3738 status = decode_pps(decoder, unit);
3740 case GST_H264_NAL_SLICE_EXT:
3741 case GST_H264_NAL_SLICE_IDR:
3742 /* fall-through. IDR specifics are handled in init_picture() */
3743 case GST_H264_NAL_SLICE:
3744 status = decode_slice(decoder, unit);
3746 case GST_H264_NAL_SEQ_END:
3747 case GST_H264_NAL_STREAM_END:
3748 status = decode_sequence_end(decoder);
3750 case GST_H264_NAL_SEI:
3751 status = GST_VAAPI_DECODER_STATUS_SUCCESS;
3754 GST_WARNING("unsupported NAL unit type %d", pi->nalu.type);
3755 status = GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
3761 static GstVaapiDecoderStatus
3762 gst_vaapi_decoder_h264_decode_codec_data(GstVaapiDecoder *base_decoder,
3763 const guchar *buf, guint buf_size)
3765 GstVaapiDecoderH264 * const decoder =
3766 GST_VAAPI_DECODER_H264_CAST(base_decoder);
3767 GstVaapiDecoderH264Private * const priv = &decoder->priv;
3768 GstVaapiDecoderStatus status;
3769 GstVaapiDecoderUnit unit;
3770 GstVaapiParserInfoH264 *pi = NULL;
3771 GstH264ParserResult result;
3772 guint i, ofs, num_sps, num_pps;
3774 unit.parsed_info = NULL;
3777 return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
3780 GST_ERROR("failed to decode codec-data, not in avcC format");
3781 return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
3784 priv->nal_length_size = (buf[4] & 0x03) + 1;
3786 num_sps = buf[5] & 0x1f;
3789 for (i = 0; i < num_sps; i++) {
3790 pi = gst_vaapi_parser_info_h264_new();
3792 return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
3793 unit.parsed_info = pi;
3795 result = gst_h264_parser_identify_nalu_avc(
3797 buf, ofs, buf_size, 2,
3800 if (result != GST_H264_PARSER_OK) {
3801 status = get_status(result);
3805 status = parse_sps(decoder, &unit);
3806 if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
3808 ofs = pi->nalu.offset + pi->nalu.size;
3810 status = decode_sps(decoder, &unit);
3811 if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
3813 gst_vaapi_parser_info_h264_replace(&pi, NULL);
3819 for (i = 0; i < num_pps; i++) {
3820 pi = gst_vaapi_parser_info_h264_new();
3822 return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
3823 unit.parsed_info = pi;
3825 result = gst_h264_parser_identify_nalu_avc(
3827 buf, ofs, buf_size, 2,
3830 if (result != GST_H264_PARSER_OK) {
3831 status = get_status(result);
3835 status = parse_pps(decoder, &unit);
3836 if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
3838 ofs = pi->nalu.offset + pi->nalu.size;
3840 status = decode_pps(decoder, &unit);
3841 if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
3843 gst_vaapi_parser_info_h264_replace(&pi, NULL);
3846 priv->is_avcC = TRUE;
3847 status = GST_VAAPI_DECODER_STATUS_SUCCESS;
3850 gst_vaapi_parser_info_h264_replace(&pi, NULL);
3854 static GstVaapiDecoderStatus
3855 ensure_decoder(GstVaapiDecoderH264 *decoder)
3857 GstVaapiDecoderH264Private * const priv = &decoder->priv;
3858 GstVaapiDecoderStatus status;
3860 if (!priv->is_opened) {
3861 priv->is_opened = gst_vaapi_decoder_h264_open(decoder);
3862 if (!priv->is_opened)
3863 return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_CODEC;
3865 status = gst_vaapi_decoder_decode_codec_data(
3866 GST_VAAPI_DECODER_CAST(decoder));
3867 if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
3870 return GST_VAAPI_DECODER_STATUS_SUCCESS;
3873 static GstVaapiDecoderStatus
3874 gst_vaapi_decoder_h264_parse(GstVaapiDecoder *base_decoder,
3875 GstAdapter *adapter, gboolean at_eos, GstVaapiDecoderUnit *unit)
3877 GstVaapiDecoderH264 * const decoder =
3878 GST_VAAPI_DECODER_H264_CAST(base_decoder);
3879 GstVaapiDecoderH264Private * const priv = &decoder->priv;
3880 GstVaapiParserState * const ps = GST_VAAPI_PARSER_STATE(base_decoder);
3881 GstVaapiParserInfoH264 *pi;
3882 GstVaapiDecoderStatus status;
3883 GstH264ParserResult result;
3885 guint i, size, buf_size, nalu_size, flags;
3889 status = ensure_decoder(decoder);
3890 if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
3893 switch (priv->stream_alignment) {
3894 case GST_VAAPI_STREAM_ALIGN_H264_NALU:
3895 size = gst_adapter_available_fast(adapter);
3898 size = gst_adapter_available(adapter);
3902 if (priv->is_avcC) {
3903 if (size < priv->nal_length_size)
3904 return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
3906 buf = (guchar *)&start_code;
3907 g_assert(priv->nal_length_size <= sizeof(start_code));
3908 gst_adapter_copy(adapter, buf, 0, priv->nal_length_size);
3911 for (i = 0; i < priv->nal_length_size; i++)
3912 nalu_size = (nalu_size << 8) | buf[i];
3914 buf_size = priv->nal_length_size + nalu_size;
3915 if (size < buf_size)
3916 return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
3920 return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
3922 if (priv->stream_alignment == GST_VAAPI_STREAM_ALIGN_H264_NALU)
3925 ofs = scan_for_start_code(adapter, 0, size, NULL);
3927 return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
3930 gst_adapter_flush(adapter, ofs);
3934 ofs2 = ps->input_offset2 - ofs - 4;
3938 ofs = G_UNLIKELY(size < ofs2 + 4) ? -1 :
3939 scan_for_start_code(adapter, ofs2, size - ofs2, NULL);
3941 // Assume the whole NAL unit is present if end-of-stream
3943 ps->input_offset2 = size;
3944 return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
3951 ps->input_offset2 = 0;
3953 buf = (guchar *)gst_adapter_map(adapter, buf_size);
3955 return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
3957 unit->size = buf_size;
3959 pi = gst_vaapi_parser_info_h264_new();
3961 return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
3963 gst_vaapi_decoder_unit_set_parsed_info(unit,
3964 pi, (GDestroyNotify)gst_vaapi_mini_object_unref);
3967 result = gst_h264_parser_identify_nalu_avc(priv->parser,
3968 buf, 0, buf_size, priv->nal_length_size, &pi->nalu);
3970 result = gst_h264_parser_identify_nalu_unchecked(priv->parser,
3971 buf, 0, buf_size, &pi->nalu);
3972 status = get_status(result);
3973 if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
3976 switch (pi->nalu.type) {
3977 case GST_H264_NAL_SPS:
3978 status = parse_sps(decoder, unit);
3980 case GST_H264_NAL_SUBSET_SPS:
3981 status = parse_subset_sps(decoder, unit);
3983 case GST_H264_NAL_PPS:
3984 status = parse_pps(decoder, unit);
3986 case GST_H264_NAL_SEI:
3987 status = parse_sei(decoder, unit);
3989 case GST_H264_NAL_SLICE_EXT:
3990 if (!GST_H264_IS_MVC_NALU(&pi->nalu)) {
3991 status = GST_VAAPI_DECODER_STATUS_SUCCESS;
3995 case GST_H264_NAL_SLICE_IDR:
3996 case GST_H264_NAL_SLICE:
3997 status = parse_slice(decoder, unit);
4000 status = GST_VAAPI_DECODER_STATUS_SUCCESS;
4003 if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
4007 switch (pi->nalu.type) {
4008 case GST_H264_NAL_AU_DELIMITER:
4009 flags |= GST_VAAPI_DECODER_UNIT_FLAG_AU_START;
4010 flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
4012 case GST_H264_NAL_FILLER_DATA:
4013 flags |= GST_VAAPI_DECODER_UNIT_FLAG_SKIP;
4015 case GST_H264_NAL_STREAM_END:
4016 flags |= GST_VAAPI_DECODER_UNIT_FLAG_STREAM_END;
4018 case GST_H264_NAL_SEQ_END:
4019 flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_END;
4020 flags |= GST_VAAPI_DECODER_UNIT_FLAG_AU_END;
4022 case GST_H264_NAL_SPS:
4023 case GST_H264_NAL_SUBSET_SPS:
4024 case GST_H264_NAL_PPS:
4025 case GST_H264_NAL_SEI:
4026 flags |= GST_VAAPI_DECODER_UNIT_FLAG_AU_START;
4027 flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
4029 case GST_H264_NAL_SLICE_EXT:
4030 if (!GST_H264_IS_MVC_NALU(&pi->nalu)) {
4031 flags |= GST_VAAPI_DECODER_UNIT_FLAG_SKIP;
4035 case GST_H264_NAL_SLICE_IDR:
4036 case GST_H264_NAL_SLICE:
4037 flags |= GST_VAAPI_DECODER_UNIT_FLAG_SLICE;
4038 if (is_new_picture(pi, priv->prev_slice_pi)) {
4039 flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
4040 if (is_new_access_unit(pi, priv->prev_slice_pi))
4041 flags |= GST_VAAPI_DECODER_UNIT_FLAG_AU_START;
4043 gst_vaapi_parser_info_h264_replace(&priv->prev_slice_pi, pi);
4045 case GST_H264_NAL_SPS_EXT:
4046 case GST_H264_NAL_SLICE_AUX:
4047 /* skip SPS extension and auxiliary slice for now */
4048 flags |= GST_VAAPI_DECODER_UNIT_FLAG_SKIP;
4050 case GST_H264_NAL_PREFIX_UNIT:
4051 /* skip Prefix NAL units for now */
4052 flags |= GST_VAAPI_DECODER_UNIT_FLAG_SKIP |
4053 GST_VAAPI_DECODER_UNIT_FLAG_AU_START |
4054 GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
4057 if (pi->nalu.type >= 14 && pi->nalu.type <= 18)
4058 flags |= GST_VAAPI_DECODER_UNIT_FLAG_AU_START |
4059 GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
4062 if ((flags & GST_VAAPI_DECODER_UNIT_FLAGS_AU) && priv->prev_slice_pi)
4063 priv->prev_slice_pi->flags |= GST_VAAPI_DECODER_UNIT_FLAG_AU_END;
4064 GST_VAAPI_DECODER_UNIT_FLAG_SET(unit, flags);
4066 pi->nalu.data = NULL;
4067 pi->state = priv->parser_state;
4069 gst_vaapi_parser_info_h264_replace(&priv->prev_pi, pi);
4070 return GST_VAAPI_DECODER_STATUS_SUCCESS;
4073 static GstVaapiDecoderStatus
4074 gst_vaapi_decoder_h264_decode(GstVaapiDecoder *base_decoder,
4075 GstVaapiDecoderUnit *unit)
4077 GstVaapiDecoderH264 * const decoder =
4078 GST_VAAPI_DECODER_H264_CAST(base_decoder);
4079 GstVaapiDecoderStatus status;
4081 status = ensure_decoder(decoder);
4082 if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
4084 return decode_unit(decoder, unit);
4087 static GstVaapiDecoderStatus
4088 gst_vaapi_decoder_h264_start_frame(GstVaapiDecoder *base_decoder,
4089 GstVaapiDecoderUnit *unit)
4091 GstVaapiDecoderH264 * const decoder =
4092 GST_VAAPI_DECODER_H264_CAST(base_decoder);
4094 return decode_picture(decoder, unit);
4097 static GstVaapiDecoderStatus
4098 gst_vaapi_decoder_h264_end_frame(GstVaapiDecoder *base_decoder)
4100 GstVaapiDecoderH264 * const decoder =
4101 GST_VAAPI_DECODER_H264_CAST(base_decoder);
4103 return decode_current_picture(decoder);
4106 static GstVaapiDecoderStatus
4107 gst_vaapi_decoder_h264_flush(GstVaapiDecoder *base_decoder)
4109 GstVaapiDecoderH264 * const decoder =
4110 GST_VAAPI_DECODER_H264_CAST(base_decoder);
4112 dpb_flush(decoder, NULL);
4113 return GST_VAAPI_DECODER_STATUS_SUCCESS;
4117 gst_vaapi_decoder_h264_class_init(GstVaapiDecoderH264Class *klass)
4119 GstVaapiMiniObjectClass * const object_class =
4120 GST_VAAPI_MINI_OBJECT_CLASS(klass);
4121 GstVaapiDecoderClass * const decoder_class = GST_VAAPI_DECODER_CLASS(klass);
4123 object_class->size = sizeof(GstVaapiDecoderH264);
4124 object_class->finalize = (GDestroyNotify)gst_vaapi_decoder_finalize;
4126 decoder_class->create = gst_vaapi_decoder_h264_create;
4127 decoder_class->destroy = gst_vaapi_decoder_h264_destroy;
4128 decoder_class->parse = gst_vaapi_decoder_h264_parse;
4129 decoder_class->decode = gst_vaapi_decoder_h264_decode;
4130 decoder_class->start_frame = gst_vaapi_decoder_h264_start_frame;
4131 decoder_class->end_frame = gst_vaapi_decoder_h264_end_frame;
4132 decoder_class->flush = gst_vaapi_decoder_h264_flush;
4134 decoder_class->decode_codec_data =
4135 gst_vaapi_decoder_h264_decode_codec_data;
4138 static inline const GstVaapiDecoderClass *
4139 gst_vaapi_decoder_h264_class(void)
4141 static GstVaapiDecoderH264Class g_class;
4142 static gsize g_class_init = FALSE;
4144 if (g_once_init_enter(&g_class_init)) {
4145 gst_vaapi_decoder_h264_class_init(&g_class);
4146 g_once_init_leave(&g_class_init, TRUE);
4148 return GST_VAAPI_DECODER_CLASS(&g_class);
4152 * gst_vaapi_decoder_h264_set_alignment:
4153 * @decoder: a #GstVaapiDecoderH264
4154 * @alignment: the #GstVaapiStreamAlignH264
4156 * Specifies how stream buffers are aligned / fed, i.e. the boundaries
4157 * of each buffer that is supplied to the decoder. This could be no
4158 * specific alignment, NAL unit boundaries, or access unit boundaries.
4161 gst_vaapi_decoder_h264_set_alignment(GstVaapiDecoderH264 *decoder,
4162 GstVaapiStreamAlignH264 alignment)
4164 g_return_if_fail(decoder != NULL);
4166 decoder->priv.stream_alignment = alignment;
4170 * gst_vaapi_decoder_h264_new:
4171 * @display: a #GstVaapiDisplay
4172 * @caps: a #GstCaps holding codec information
4174 * Creates a new #GstVaapiDecoder for MPEG-2 decoding. The @caps can
4175 * hold extra information like codec-data and pictured coded size.
4177 * Return value: the newly allocated #GstVaapiDecoder object
4180 gst_vaapi_decoder_h264_new(GstVaapiDisplay *display, GstCaps *caps)
4182 return gst_vaapi_decoder_new(gst_vaapi_decoder_h264_class(), display, caps);